Compare commits
829 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8f5d88156f | ||
|
|
7c941fe8a8 | ||
|
|
e9835cb376 | ||
|
|
7651a960b1 | ||
|
|
5b17a84733 | ||
|
|
22873a2f3c | ||
|
|
2debadd3bf | ||
|
|
6808d7dcaf | ||
|
|
3480aa5495 | ||
|
|
a4d1ad57c7 | ||
|
|
628e0e924d | ||
|
|
16077f4124 | ||
|
|
e6a68b3223 | ||
|
|
539a494914 | ||
|
|
9c29c5c9c6 | ||
|
|
fd4b6022a9 | ||
|
|
58bbb59e39 | ||
|
|
5cc55530e1 | ||
|
|
3d74dbf48a | ||
|
|
b7489d8f66 | ||
|
|
e0b2aa9b45 | ||
|
|
10b4c15053 | ||
|
|
8bc83a336a | ||
|
|
c84b858205 | ||
|
|
e5f3a973a0 | ||
|
|
3682f05a42 | ||
|
|
eb5ce029ba | ||
|
|
0ebff2d6e6 | ||
|
|
d061634fe3 | ||
|
|
6b9410c67e | ||
|
|
8245e54e9c | ||
|
|
8ee744ef0c | ||
|
|
da179b2580 | ||
|
|
0714f06adc | ||
|
|
b2906257a1 | ||
|
|
18097e4676 | ||
|
|
efcade84c6 | ||
|
|
7f27375d17 | ||
|
|
01e1f134be | ||
|
|
0695b0557f | ||
|
|
c63f0c0833 | ||
|
|
3264ffaaa4 | ||
|
|
40959c8876 | ||
|
|
ecea7f4638 | ||
|
|
0b15a166fa | ||
|
|
c368424a15 | ||
|
|
5df1f80307 | ||
|
|
4b59045149 | ||
|
|
a3a05131c7 | ||
|
|
a9922b86fe | ||
|
|
431350ac0e | ||
|
|
5f8802fe7f | ||
|
|
5f21594d23 | ||
|
|
8964ec1a4d | ||
|
|
aa270e57ec | ||
|
|
fe7eb07f39 | ||
|
|
c10da7f960 | ||
|
|
0c8390c094 | ||
|
|
d41c63bf7d | ||
|
|
a3bbdafabb | ||
|
|
a78eef464b | ||
|
|
e8348ac12a | ||
|
|
5efc3835db | ||
|
|
c4ed6e88de | ||
|
|
51e6559145 | ||
|
|
db8b419885 | ||
|
|
475d7cc535 | ||
|
|
1858de5ed0 | ||
|
|
642f4788fb | ||
|
|
7e70f8b758 | ||
|
|
e417bea948 | ||
|
|
6b4be93169 | ||
|
|
061eaad743 | ||
|
|
8ff21d6c89 | ||
|
|
0d9f4e8c0f | ||
|
|
02288718dc | ||
|
|
615cf86fc0 | ||
|
|
d63a209674 | ||
|
|
9d26304f7a | ||
|
|
f73bda438a | ||
|
|
19b65a654e | ||
|
|
770127e67a | ||
|
|
f373e6467a | ||
|
|
e43b4e66a1 | ||
|
|
90ec003386 | ||
|
|
2f9aca785e | ||
|
|
405a6c9901 | ||
|
|
3611b1fe61 | ||
|
|
7b33441519 | ||
|
|
2a8f61dfbe | ||
|
|
dcfd6d43c0 | ||
|
|
4e4d8b2f04 | ||
|
|
50197ba7b7 | ||
|
|
6c376d8721 | ||
|
|
82ada54103 | ||
|
|
0fdfeb3cd3 | ||
|
|
096d7719c6 | ||
|
|
619c485224 | ||
|
|
9367d5fb45 | ||
|
|
50ec97ad91 | ||
|
|
fa5fcde987 | ||
|
|
5b33333404 | ||
|
|
cf50624e4e | ||
|
|
ccc9ed8b49 | ||
|
|
141f5381e7 | ||
|
|
be054ca4f8 | ||
|
|
0a06452450 | ||
|
|
b840d3f9bf | ||
|
|
c829c30688 | ||
|
|
7947afb1b4 | ||
|
|
c32b53613d | ||
|
|
c058e7a128 | ||
|
|
1dc663339d | ||
|
|
351db4efc8 | ||
|
|
12d6ea3966 | ||
|
|
e1adc7b428 | ||
|
|
dc34adadcd | ||
|
|
6e06381640 | ||
|
|
f55389cd26 | ||
|
|
6d930f53ba | ||
|
|
f7616cf685 | ||
|
|
f55d9820bd | ||
|
|
befc2cddd2 | ||
|
|
ef268e043f | ||
|
|
e10d2aef8e | ||
|
|
a97c5fe836 | ||
|
|
9b6eddddae | ||
|
|
ed84825e65 | ||
|
|
cb84003c31 | ||
|
|
a1cd87aa3a | ||
|
|
7d3b015e20 | ||
|
|
7d0d11f526 | ||
|
|
eb2520e7ca | ||
|
|
2675bf464e | ||
|
|
b638449498 | ||
|
|
e12bf63f9a | ||
|
|
ffcc1f82f1 | ||
|
|
04d7b12dd8 | ||
|
|
3e33b00a75 | ||
|
|
12dc378fc1 | ||
|
|
bbe99f4451 | ||
|
|
91b17f8fa6 | ||
|
|
69f1778309 | ||
|
|
c55e801d00 | ||
|
|
b363f77a83 | ||
|
|
f55f46f95b | ||
|
|
5ee2f0efe1 | ||
|
|
1314a36ba4 | ||
|
|
2b8b621298 | ||
|
|
aed4c9fc58 | ||
|
|
604001dfb1 | ||
|
|
1a03c0e4ac | ||
|
|
a8c54b7640 | ||
|
|
9bb60c9474 | ||
|
|
0b2ce7a071 | ||
|
|
44145baca7 | ||
|
|
dac7881ca3 | ||
|
|
31bd927959 | ||
|
|
46922de3c0 | ||
|
|
908a862dd1 | ||
|
|
6676ba99d0 | ||
|
|
6d3c6e598f | ||
|
|
e1a10fc827 | ||
|
|
2ebdbaafa3 | ||
|
|
a74dfea08b | ||
|
|
44ff380c86 | ||
|
|
0a41713253 | ||
|
|
f5a5675da4 | ||
|
|
7a8cf55090 | ||
|
|
7932de3b7d | ||
|
|
c8ba967a54 | ||
|
|
f5d2f0e0ca | ||
|
|
2c7e2f4b7f | ||
|
|
ee3ebe687b | ||
|
|
77024f0757 | ||
|
|
c0e39886eb | ||
|
|
6339e7897d | ||
|
|
783a8a8772 | ||
|
|
8f2d865999 | ||
|
|
d6d0825926 | ||
|
|
37de2e7f52 | ||
|
|
800c9e0c93 | ||
|
|
a1bc7eb4d5 | ||
|
|
8ff45d2aee | ||
|
|
8ec19777b5 | ||
|
|
3e388fedeb | ||
|
|
83ffba2f08 | ||
|
|
f1c4fef8ba | ||
|
|
eec506a209 | ||
|
|
2ca0060c6a | ||
|
|
8b2d79a7f7 | ||
|
|
c4db8b6d4b | ||
|
|
61d4305593 | ||
|
|
542e1d24aa | ||
|
|
47ec074cfb | ||
|
|
e44835e795 | ||
|
|
2e28146a58 | ||
|
|
85e051a76d | ||
|
|
7027a61e63 | ||
|
|
e8c5b27d92 | ||
|
|
a3deec7875 | ||
|
|
6282a462c8 | ||
|
|
dac5952e96 | ||
|
|
ada6fcb908 | ||
|
|
8d2f902420 | ||
|
|
fc3fe7a81e | ||
|
|
426cc95e9f | ||
|
|
9e40043fe0 | ||
|
|
14608fe5f7 | ||
|
|
22ed090685 | ||
|
|
2ca4097daf | ||
|
|
f1d16015bf | ||
|
|
9a81ad05ed | ||
|
|
76e983d19c | ||
|
|
a3015c0fa3 | ||
|
|
88d0bda049 | ||
|
|
d2ec54e89e | ||
|
|
4559c5a38d | ||
|
|
16bd106abc | ||
|
|
e5dcec8d8e | ||
|
|
ad3565d3ad | ||
|
|
5fe12ecd74 | ||
|
|
318214642f | ||
|
|
227fe3ee6b | ||
|
|
978a82dd1a | ||
|
|
04f72a7da9 | ||
|
|
a0954a1dc0 | ||
|
|
cc1bf74370 | ||
|
|
2f7908773a | ||
|
|
0efd02979e | ||
|
|
bd9776c4b7 | ||
|
|
35e9da83ec | ||
|
|
4f5ca0bca9 | ||
|
|
43f314b2b5 | ||
|
|
4bdf579ce2 | ||
|
|
aba3039eef | ||
|
|
bbff791c65 | ||
|
|
1ed50b3081 | ||
|
|
67336a24e7 | ||
|
|
48174c327d | ||
|
|
43f2539b42 | ||
|
|
462a7a722a | ||
|
|
4101bb252e | ||
|
|
4ff4e4827e | ||
|
|
8daa92ad49 | ||
|
|
371e83bff9 | ||
|
|
6fa0227a1e | ||
|
|
c38e4ce265 | ||
|
|
de06ed148d | ||
|
|
21bcd62ba8 | ||
|
|
76c034f79a | ||
|
|
d8d394ce40 | ||
|
|
213f4ce92f | ||
|
|
2d1726763f | ||
|
|
abfd9b3cbd | ||
|
|
6114f6a7fd | ||
|
|
61e674e9f6 | ||
|
|
006cc8c52a | ||
|
|
ffe7fdcb46 | ||
|
|
49cceffe1b | ||
|
|
011782395a | ||
|
|
558009543f | ||
|
|
aaef5bde2c | ||
|
|
f52f595d56 | ||
|
|
50196d8430 | ||
|
|
2ecfbfbb42 | ||
|
|
9508dffe6d | ||
|
|
b4a72bbcab | ||
|
|
4ceaaf92cc | ||
|
|
ef28c9531b | ||
|
|
c86c602e39 | ||
|
|
3bee2905e5 | ||
|
|
9ac8a15cd5 | ||
|
|
81b8104064 | ||
|
|
65f58b1f98 | ||
|
|
7e872aa6d6 | ||
|
|
0383a91a68 | ||
|
|
bb6ebe5750 | ||
|
|
71aa3d145f | ||
|
|
2f1f80029b | ||
|
|
ad18cfa284 | ||
|
|
92ed1c6ac9 | ||
|
|
e71e04a8f1 | ||
|
|
ef127c279c | ||
|
|
4afac5fa4d | ||
|
|
29ae97aa82 | ||
|
|
9319d7e8ef | ||
|
|
689db3713b | ||
|
|
0b9fcb884b | ||
|
|
23e29a1fdc | ||
|
|
90d636a026 | ||
|
|
3e3e12afb9 | ||
|
|
421f5d23ec | ||
|
|
0fa91f32cb | ||
|
|
3d21b203be | ||
|
|
3972b8f4c1 | ||
|
|
2d1707db00 | ||
|
|
72aa4f0561 | ||
|
|
fd57cc95e9 | ||
|
|
04c392be7e | ||
|
|
38da598214 | ||
|
|
3f2c9b647c | ||
|
|
7d5b4369c1 | ||
|
|
aade33fa88 | ||
|
|
2a8a90bdfc | ||
|
|
f36048cc95 | ||
|
|
517feeca21 | ||
|
|
9436a49118 | ||
|
|
0e02cb08fd | ||
|
|
26244efc50 | ||
|
|
6339eeffe5 | ||
|
|
8cc2f598eb | ||
|
|
46a1dd57f4 | ||
|
|
9d99fc521e | ||
|
|
bcf79354ee | ||
|
|
27a8636bac | ||
|
|
92a99938c9 | ||
|
|
ed61eb0a95 | ||
|
|
50d495b522 | ||
|
|
526d5c2c44 | ||
|
|
1720f9201e | ||
|
|
e62de1ca22 | ||
|
|
d052ddb742 | ||
|
|
af53a02ea5 | ||
|
|
8e2d18da8c | ||
|
|
2c5004387d | ||
|
|
3fc3b259ba | ||
|
|
cd95f68692 | ||
|
|
59c3b1ba2f | ||
|
|
fa85865fe5 | ||
|
|
5575d4142c | ||
|
|
ae6263a07f | ||
|
|
9af75634fa | ||
|
|
04b8681272 | ||
|
|
d5549f2894 | ||
|
|
b510b85ca0 | ||
|
|
5ad754a3a2 | ||
|
|
4f04f9db6e | ||
|
|
025a1a1383 | ||
|
|
f28c1f91d9 | ||
|
|
c609377a9c | ||
|
|
2579d12f24 | ||
|
|
c18f7953e7 | ||
|
|
3a292b02b6 | ||
|
|
7baf2d0a2a | ||
|
|
328289eb1c | ||
|
|
95d93eeb61 | ||
|
|
b54cc27cab | ||
|
|
c9ca1ee7b3 | ||
|
|
649d6d94a3 | ||
|
|
bf68d21830 | ||
|
|
64b33b76cb | ||
|
|
c189df3fd6 | ||
|
|
277869ebca | ||
|
|
303513a566 | ||
|
|
8712106b3d | ||
|
|
cdb9506583 | ||
|
|
94a311a550 | ||
|
|
791e7e9c4d | ||
|
|
6cfa7e2cd3 | ||
|
|
7196c26181 | ||
|
|
735f177283 | ||
|
|
f857e5ec6c | ||
|
|
a5e02950c2 | ||
|
|
4a928f1298 | ||
|
|
f59012862e | ||
|
|
5f5418a297 | ||
|
|
548a216b56 | ||
|
|
c943c89fc6 | ||
|
|
06de6077ba | ||
|
|
cef1fcc95d | ||
|
|
1ed8b225db | ||
|
|
f0354c87f4 | ||
|
|
5e06a615cd | ||
|
|
e11aa7af18 | ||
|
|
f4ddc17851 | ||
|
|
ebce2826cb | ||
|
|
4c411acef4 | ||
|
|
ddc941f464 | ||
|
|
c5ff2c6f76 | ||
|
|
85ac16bb22 | ||
|
|
e7ee4a33c7 | ||
|
|
ac0da04542 | ||
|
|
3337117970 | ||
|
|
9b61ff5714 | ||
|
|
d25db48452 | ||
|
|
251f354076 | ||
|
|
9c6ae226fb | ||
|
|
a502ee85d1 | ||
|
|
eec72e0cbd | ||
|
|
7a1acce133 | ||
|
|
84b4bfe663 | ||
|
|
16df990183 | ||
|
|
969dd26041 | ||
|
|
9df1f91318 | ||
|
|
48ddc66d47 | ||
|
|
85e3869607 | ||
|
|
5bb2a763e3 | ||
|
|
2110709d72 | ||
|
|
493033edc0 | ||
|
|
bf8e77b9b9 | ||
|
|
c7e5581027 | ||
|
|
c78802a1ed | ||
|
|
39f9c082b9 | ||
|
|
ca1ef5192d | ||
|
|
1d6fef9169 | ||
|
|
81f80ddbe5 | ||
|
|
b53657344c | ||
|
|
95e818898e | ||
|
|
a7e014a87e | ||
|
|
cca65e5a48 | ||
|
|
a75249f3e4 | ||
|
|
053a4b0392 | ||
|
|
d1362bf44f | ||
|
|
580832ea7b | ||
|
|
ddca0bb851 | ||
|
|
d9a04ea895 | ||
|
|
48ccb27e49 | ||
|
|
a2b5ad07ff | ||
|
|
cc9cbf7f06 | ||
|
|
ad5c43c9ba | ||
|
|
9c27d01d47 | ||
|
|
64ac33e3bb | ||
|
|
329fb87e12 | ||
|
|
bd5da2b0f0 | ||
|
|
55c21888af | ||
|
|
d49e6e19a6 | ||
|
|
edb1af09c4 | ||
|
|
ab3822d1cc | ||
|
|
69120e0799 | ||
|
|
7785dfead2 | ||
|
|
29c29f9e3a | ||
|
|
b7dafc31bb | ||
|
|
2f5a306c2d | ||
|
|
0ef6b45b19 | ||
|
|
d9f33d34e3 | ||
|
|
5758e03a17 | ||
|
|
5d9f80cce8 | ||
|
|
867ee530b1 | ||
|
|
27e6a55756 | ||
|
|
b237bafa2f | ||
|
|
d0bde800f7 | ||
|
|
da0090aa99 | ||
|
|
66c9879ce3 | ||
|
|
9c2585116a | ||
|
|
e46c0e25e8 | ||
|
|
658b0ce243 | ||
|
|
c084e31416 | ||
|
|
9046844f0c | ||
|
|
5a9b49b9bb | ||
|
|
0672133bca | ||
|
|
c0de99bc42 | ||
|
|
6dbb1a27b9 | ||
|
|
dc1bace4c6 | ||
|
|
afe3c56ca8 | ||
|
|
a6f42e3eb3 | ||
|
|
9c2bd2a57b | ||
|
|
f42f474113 | ||
|
|
17c31e1539 | ||
|
|
b0fb4d6bc9 | ||
|
|
f8690bcebc | ||
|
|
b0410ec5de | ||
|
|
19e0058e61 | ||
|
|
6d64c8f031 | ||
|
|
1216fce853 | ||
|
|
c598306f49 | ||
|
|
4f8d6e310c | ||
|
|
894eb7046e | ||
|
|
3d6515e807 | ||
|
|
068d461c14 | ||
|
|
8e20d01b4e | ||
|
|
8aaa2492f2 | ||
|
|
c9a649f974 | ||
|
|
f07d9cff9b | ||
|
|
b7bfd9ea85 | ||
|
|
25f0e3ccab | ||
|
|
e19addec60 | ||
|
|
a5bc25e211 | ||
|
|
c90659fd92 | ||
|
|
30b7fe7472 | ||
|
|
d54fbdf4e6 | ||
|
|
6d7b02583d | ||
|
|
51906cbbda | ||
|
|
d3da9d30f4 | ||
|
|
9b9a0cb64a | ||
|
|
1dde5af591 | ||
|
|
4312d35743 | ||
|
|
2dcdbcbd32 | ||
|
|
e8e50c4381 | ||
|
|
0e6d85374f | ||
|
|
54789613dc | ||
|
|
43b3f72a41 | ||
|
|
13742a4e9e | ||
|
|
6bd7f0ae12 | ||
|
|
fc51b336fa | ||
|
|
df16d1ab1d | ||
|
|
b661b2be97 | ||
|
|
2d39bee416 | ||
|
|
56eeb1badb | ||
|
|
d547f81a55 | ||
|
|
e1b35cdbbc | ||
|
|
c01704b8aa | ||
|
|
5a19042fc8 | ||
|
|
bdf8c0b9c2 | ||
|
|
bc08cbe74f | ||
|
|
6e2e72a500 | ||
|
|
d0953fb63c | ||
|
|
4dbd15c66d | ||
|
|
e9e41e07d1 | ||
|
|
b435df4682 | ||
|
|
a3bf9f1c71 | ||
|
|
72ff828b57 | ||
|
|
b7f86ae7a9 | ||
|
|
3c73390a44 | ||
|
|
7117faa92b | ||
|
|
4257555c88 | ||
|
|
33b1465ccc | ||
|
|
c8ed8b2591 | ||
|
|
58f4a82616 | ||
|
|
d5f0a659af | ||
|
|
60c977bff9 | ||
|
|
73f1beac6a | ||
|
|
6195c6552f | ||
|
|
e365744dbc | ||
|
|
68f566dd1a | ||
|
|
bf350779c9 | ||
|
|
07329c9ea5 | ||
|
|
7e6483490a | ||
|
|
749565828d | ||
|
|
ff751cc877 | ||
|
|
d7ba941803 | ||
|
|
e58201e24b | ||
|
|
81e60286f2 | ||
|
|
8e156d69d7 | ||
|
|
dfcaa27235 | ||
|
|
ed0553c6b6 | ||
|
|
84ecbfc7a1 | ||
|
|
e13349ceb0 | ||
|
|
a1bcb7519f | ||
|
|
b481441052 | ||
|
|
6a1d1a492e | ||
|
|
1dcd9c495c | ||
|
|
a9b9502dbd | ||
|
|
16c0bea799 | ||
|
|
ad81127267 | ||
|
|
30d01cb0e0 | ||
|
|
2584971a07 | ||
|
|
9d9f48bcf8 | ||
|
|
0bb1501e72 | ||
|
|
d53abd815d | ||
|
|
d9c5ecf462 | ||
|
|
51ed2cd480 | ||
|
|
4c83805030 | ||
|
|
c3eca5b1b7 | ||
|
|
742bca1cf5 | ||
|
|
5ab55bb5a5 | ||
|
|
3743895b66 | ||
|
|
ca5e5e23e6 | ||
|
|
a666619289 | ||
|
|
63129236d0 | ||
|
|
4374f409a8 | ||
|
|
c49aa8e05e | ||
|
|
c590e2f36c | ||
|
|
03d4aca639 | ||
|
|
01f24523ac | ||
|
|
98312a72a7 | ||
|
|
d579cd6541 | ||
|
|
38e5d8babc | ||
|
|
c1fceab8d9 | ||
|
|
ae555969b5 | ||
|
|
3e0ea1ba77 | ||
|
|
ce1ebd2218 | ||
|
|
6215c2763e | ||
|
|
07437000ce | ||
|
|
0ef635bc68 | ||
|
|
e9574d33a9 | ||
|
|
848869e3f4 | ||
|
|
4a65b6a8b2 | ||
|
|
28c76bece0 | ||
|
|
56faf36edf | ||
|
|
440044d2aa | ||
|
|
48468ff354 | ||
|
|
31dc11ed73 | ||
|
|
903f5db707 | ||
|
|
8317f39459 | ||
|
|
42051f1620 | ||
|
|
9b90579160 | ||
|
|
541367122e | ||
|
|
0a0500a60d | ||
|
|
746086b761 | ||
|
|
412d96409e | ||
|
|
93e15b43a3 | ||
|
|
dbc8198daa | ||
|
|
b3a51d7afd | ||
|
|
3d24328402 | ||
|
|
1014c4bdda | ||
|
|
b2a6263431 | ||
|
|
20cdca77a3 | ||
|
|
98d48a3709 | ||
|
|
6b57993b2a | ||
|
|
34ac30e403 | ||
|
|
b8618aa87e | ||
|
|
7d8e63c1d1 | ||
|
|
b22a8692c8 | ||
|
|
b5cdd833e2 | ||
|
|
81a2f9c428 | ||
|
|
b43b824da6 | ||
|
|
22616c5582 | ||
|
|
2570d179bc | ||
|
|
1980f862c6 | ||
|
|
d1eb31e231 | ||
|
|
68863e3b90 | ||
|
|
b38b884715 | ||
|
|
cc6f2d8886 | ||
|
|
245db7ca28 | ||
|
|
197ab99db8 | ||
|
|
6292adf491 | ||
|
|
112d40ff1c | ||
|
|
b92d6bab7c | ||
|
|
0a4879c9a8 | ||
|
|
7d4d57104a | ||
|
|
f06c9abb35 | ||
|
|
2f7d175a76 | ||
|
|
85eaa8b275 | ||
|
|
4783ad6bff | ||
|
|
9f0a975a0c | ||
|
|
21dda65871 | ||
|
|
39fdd0cad5 | ||
|
|
3fb2c71390 | ||
|
|
b40f648a87 | ||
|
|
57216249c2 | ||
|
|
fbadc15ae9 | ||
|
|
89b00eaef8 | ||
|
|
4bc5086cfb | ||
|
|
7a79d39e23 | ||
|
|
41ae5a4b5f | ||
|
|
0493e316c0 | ||
|
|
137e17c2e1 | ||
|
|
31db2ffb82 | ||
|
|
df18ff3052 | ||
|
|
74555510b4 | ||
|
|
a2b8e7d193 | ||
|
|
b59638bd2e | ||
|
|
b0e19926da | ||
|
|
2e1b83588c | ||
|
|
ab441ef75c | ||
|
|
b4478e9b54 | ||
|
|
a715ce13c9 | ||
|
|
005372abba | ||
|
|
3f22587a7c | ||
|
|
b95533e8c0 | ||
|
|
210d8a3c64 | ||
|
|
c2d3829a72 | ||
|
|
cd427ee119 | ||
|
|
ad4c30ecf8 | ||
|
|
db7f5f5114 | ||
|
|
7c9fa03da8 | ||
|
|
615dd691bf | ||
|
|
64ba2cabad | ||
|
|
a9dcb2d705 | ||
|
|
4c81cdec98 | ||
|
|
db529d5247 | ||
|
|
4f568ea331 | ||
|
|
6d41362251 | ||
|
|
7f65a54060 | ||
|
|
0c6ca81437 | ||
|
|
b2422216b5 | ||
|
|
71f374d797 | ||
|
|
7e78a6bc5c | ||
|
|
a4532fdc61 | ||
|
|
7c5135d7d0 | ||
|
|
cdd6738748 | ||
|
|
6f16192865 | ||
|
|
8151739f87 | ||
|
|
72fc53ba9c | ||
|
|
3e6ee01c4e | ||
|
|
f6485dac95 | ||
|
|
48f15b5fc7 | ||
|
|
f856e3ac2c | ||
|
|
38a64017f2 | ||
|
|
20b15b6e1d | ||
|
|
e119218828 | ||
|
|
f494988ba6 | ||
|
|
2561db1721 | ||
|
|
089b23f0aa | ||
|
|
fbed7dd1ca | ||
|
|
06ef67f22d | ||
|
|
3d647f68e1 | ||
|
|
6a36dc34cc | ||
|
|
b48aaeac7b | ||
|
|
2da1065027 | ||
|
|
3536124fbc | ||
|
|
10b4e08bf8 | ||
|
|
b1f426672c | ||
|
|
087cae287f | ||
|
|
3d8032c9b7 | ||
|
|
6470238311 | ||
|
|
0093af8994 | ||
|
|
2bfcd119db | ||
|
|
5932bdba96 | ||
|
|
1afe6b56fa | ||
|
|
72776e8254 | ||
|
|
d2d1a09723 | ||
|
|
793b82333f | ||
|
|
b3abff3e88 | ||
|
|
890549f9e7 | ||
|
|
66825d6a37 | ||
|
|
d42982ee4c | ||
|
|
7df634f050 | ||
|
|
46606aa7b5 | ||
|
|
de5704974d | ||
|
|
977b061048 | ||
|
|
560f694f73 | ||
|
|
7a58d360fd | ||
|
|
9601d6c140 | ||
|
|
db66184c35 | ||
|
|
93e7daea49 | ||
|
|
1a18c6d056 | ||
|
|
7eb12e0004 | ||
|
|
d3192b7e3b | ||
|
|
e7ab2969d7 | ||
|
|
49a35343f6 | ||
|
|
c361671e36 | ||
|
|
b71452b87c | ||
|
|
06170f9713 | ||
|
|
920515c071 | ||
|
|
6a124685bd | ||
|
|
75f76ecd23 | ||
|
|
5a0b1b290f | ||
|
|
472008888c | ||
|
|
aa0d844dc1 | ||
|
|
2523f81640 | ||
|
|
9e8b1ffd50 | ||
|
|
06b22511a7 | ||
|
|
61373209ff | ||
|
|
b1e28f6b7d | ||
|
|
1d414bac55 | ||
|
|
2f3be92a71 | ||
|
|
a8fd6cc0ee | ||
|
|
e591236c4e | ||
|
|
41f4e04379 | ||
|
|
7e27f20e0e | ||
|
|
f550cbe98f | ||
|
|
5315c16338 | ||
|
|
540cb99de4 | ||
|
|
3abc8df8fc | ||
|
|
ca93f0e84b | ||
|
|
d9ff5bdca4 | ||
|
|
c4b12250ba | ||
|
|
d73f00196b | ||
|
|
6bf616ff4d | ||
|
|
ff02d1da05 | ||
|
|
72d57eec6e | ||
|
|
692e1235e8 | ||
|
|
b69bbf5c5d | ||
|
|
b64284c43e | ||
|
|
67eaaadfce | ||
|
|
a9545458b9 | ||
|
|
3e1b121471 | ||
|
|
28d7a26b5f | ||
|
|
1d49ae5b99 | ||
|
|
b00826d76a | ||
|
|
eab5865a5c | ||
|
|
0e8cd0d2b1 | ||
|
|
8281f408dc | ||
|
|
fce9bbce20 | ||
|
|
dc5efcedba | ||
|
|
f6c67bf696 | ||
|
|
3fce04a24b | ||
|
|
fba8f51d1b | ||
|
|
31ee3cb978 | ||
|
|
4d99126994 | ||
|
|
ced34ad704 | ||
|
|
f5e0011aa1 | ||
|
|
a0b759ecd8 | ||
|
|
58cf4db9ee | ||
|
|
e0c5ae815c | ||
|
|
bf5ed193be | ||
|
|
aa60fbc213 | ||
|
|
bdb2feb559 | ||
|
|
5b08fd0df1 | ||
|
|
c83dbde20f | ||
|
|
e033578cd2 | ||
|
|
c082a38b6b | ||
|
|
bdda27703a | ||
|
|
36bfb3987e | ||
|
|
6d26491243 | ||
|
|
98a2bbbb47 | ||
|
|
fb6bed6042 | ||
|
|
df0cc921fd | ||
|
|
cd7354446b | ||
|
|
d909f98fcb | ||
|
|
8c2db75886 | ||
|
|
73e560e6da | ||
|
|
ada1180468 | ||
|
|
d1e70816aa | ||
|
|
df936167d5 | ||
|
|
0327ec358c | ||
|
|
7a78fca252 | ||
|
|
10e86f1835 | ||
|
|
dbaed3acd5 | ||
|
|
6830bdd28d | ||
|
|
e316decae1 | ||
|
|
a86c1ce69b | ||
|
|
01418cba26 | ||
|
|
35d98f62e8 | ||
|
|
b30121b84c | ||
|
|
fd15217a20 | ||
|
|
1d03702334 | ||
|
|
c47029e9eb | ||
|
|
5fdfb44c2e | ||
|
|
6e40478440 | ||
|
|
9e68b4f061 | ||
|
|
0f82875b9d | ||
|
|
fd52f66f6d | ||
|
|
42cfa45d7e | ||
|
|
5023f91475 | ||
|
|
48df77f673 | ||
|
|
839665588f | ||
|
|
ab31d86a8d | ||
|
|
f2d07729b9 | ||
|
|
707cba4ac9 | ||
|
|
6304fe0e30 | ||
|
|
be9084e83e | ||
|
|
57d856ff5c | ||
|
|
343e9e5466 | ||
|
|
f2620c65af | ||
|
|
c5fe58db37 | ||
|
|
47b57c01f3 |
7
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -10,7 +10,12 @@ assignees: ''
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
**Version**
|
||||
The CodeQL and VS Code version in which the bug occurs.
|
||||
<!-- To copy version information for the CodeQL extension, click "CodeQL CLI vX.X.X" in the status bar at the bottom of the screen.
|
||||
To copy detailed version information for VS Code itself, see https://code.visualstudio.com/docs/supporting/FAQ#_how-do-i-find-the-version. -->
|
||||
|
||||
**To reproduce**
|
||||
Steps to reproduce the behavior.
|
||||
|
||||
**Expected behavior**
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/new-extension-release.md
vendored
@@ -1,18 +0,0 @@
|
||||
---
|
||||
name: New extension release
|
||||
about: Create an issue with a checklist for the release steps (write access required
|
||||
for the steps)
|
||||
title: Release Checklist for version xx.xx.xx
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
- [ ] Update this issue title to refer to the version of the release
|
||||
- [ ] Trigger a release build on Actions by adding a new tag on branch `main` of the format `vxx.xx.xx`
|
||||
- [ ] Monitor the status of the release build in the `Release` workflow in the Actions tab.
|
||||
- [ ] Download the VSIX from the draft GitHub release that is created when the release build finishes.
|
||||
- [ ] Log into the [Visual Studio Marketplace](https://marketplace.visualstudio.com/manage/publishers/github).
|
||||
- [ ] Click the `...` menu in the CodeQL row and click **Update**.
|
||||
- [ ] Drag the `.vsix` file you downloaded from the GitHub release into the Marketplace and click **Upload**.
|
||||
- [ ] Publish the draft GitHub release and confirm the new release is marked as the latest release at https://github.com/github/vscode-codeql/releases.
|
||||
22
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "extensions/ql-vscode"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Thursday is arbitrary
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: ".github"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Thursday is arbitrary
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
2
.github/pull_request_template.md
vendored
@@ -9,4 +9,4 @@ Replace this with a description of the changes your pull request makes.
|
||||
|
||||
- [ ] [CHANGELOG.md](https://github.com/github/vscode-codeql/blob/main/extensions/ql-vscode/CHANGELOG.md) has been updated to incorporate all user visible changes made by this pull request.
|
||||
- [ ] Issues have been created for any UI or other user-facing changes made by this pull request.
|
||||
- [ ] `@github/docs-content-dsp` has been cc'd in all issues for UI or other user-facing changes made by this pull request.
|
||||
- [ ] _[Maintainers only]_ If this pull request makes user-facing changes that require documentation changes, open a corresponding docs pull request in the [github/codeql](https://github.com/github/codeql/tree/main/docs/codeql/codeql-for-visual-studio-code) repo and add the `ready-for-doc-review` label there.
|
||||
|
||||
17
.github/workflows/codeql.yml
vendored
@@ -2,24 +2,31 @@ name: "Code Scanning - CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
schedule:
|
||||
- cron: '0 0 * * 0'
|
||||
- cron: '21 17 * * 0'
|
||||
|
||||
jobs:
|
||||
codeql:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
pull-requests: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@main
|
||||
with:
|
||||
languages: javascript
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
tools: latest
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@main
|
||||
|
||||
16
.github/workflows/dependency-review.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
name: 'Dependency Review'
|
||||
on:
|
||||
- pull_request
|
||||
- workflow_dispatch
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@v3
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@v1
|
||||
58
.github/workflows/main.yml
vendored
@@ -1,6 +1,8 @@
|
||||
name: Build Extension
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
@@ -20,7 +22,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '14.14.0'
|
||||
node-version: '16.13.0'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
@@ -49,9 +51,26 @@ jobs:
|
||||
name: vscode-codeql-extension
|
||||
path: artifacts
|
||||
|
||||
find-nightly:
|
||||
name: Find Nightly Release
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
url: ${{ steps.get-url.outputs.nightly-url }}
|
||||
steps:
|
||||
- name: Get Nightly Release URL
|
||||
id: get-url
|
||||
env:
|
||||
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
shell: bash
|
||||
# This workflow step gets an unstable testing version of the CodeQL CLI. It should not be used outside of these tests.
|
||||
run: |
|
||||
LATEST=`gh api repos/dsp-testing/codeql-cli-nightlies/releases --jq '.[].tag_name' --method GET --raw-field 'per_page=1'`
|
||||
echo "::set-output name=nightly-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$LATEST"
|
||||
|
||||
test:
|
||||
name: Test
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs: [find-nightly]
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
@@ -63,7 +82,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '14.14.0'
|
||||
node-version: '16.13.0'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
@@ -84,31 +103,23 @@ jobs:
|
||||
run: |
|
||||
npm run lint
|
||||
|
||||
- name: Install CodeQL
|
||||
run: |
|
||||
mkdir codeql-home
|
||||
curl -L --silent https://github.com/github/codeql-cli-binaries/releases/latest/download/codeql.zip -o codeql-home/codeql.zip
|
||||
unzip -q -o codeql-home/codeql.zip -d codeql-home
|
||||
unzip -q -o codeql-home/codeql.zip codeql/codeql.exe -d codeql-home
|
||||
rm codeql-home/codeql.zip
|
||||
shell: bash
|
||||
|
||||
- name: Run unit tests (Linux)
|
||||
working-directory: extensions/ql-vscode
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
CODEQL_PATH=$GITHUB_WORKSPACE/codeql-home/codeql/codeql npm run test
|
||||
npm run test
|
||||
|
||||
- name: Run unit tests (Windows)
|
||||
if: matrix.os == 'windows-latest'
|
||||
working-directory: extensions/ql-vscode
|
||||
run: |
|
||||
$env:CODEQL_PATH=$(Join-Path $env:GITHUB_WORKSPACE -ChildPath 'codeql-home/codeql/codeql.exe')
|
||||
npm run test
|
||||
|
||||
- name: Run integration tests (Linux)
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
working-directory: extensions/ql-vscode
|
||||
env:
|
||||
VSCODE_CODEQL_GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
run: |
|
||||
sudo apt-get install xvfb
|
||||
/usr/bin/xvfb-run npm run integration
|
||||
@@ -116,18 +127,22 @@ jobs:
|
||||
- name: Run integration tests (Windows)
|
||||
if: matrix.os == 'windows-latest'
|
||||
working-directory: extensions/ql-vscode
|
||||
env:
|
||||
VSCODE_CODEQL_GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
run: |
|
||||
npm run integration
|
||||
|
||||
cli-test:
|
||||
name: CLI Test
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs: [find-nightly]
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
version: ['v2.2.6', 'v2.3.3', 'v2.4.2']
|
||||
version: ['v2.6.3', 'v2.7.6', 'v2.8.5', 'v2.9.4', 'v2.10.0', 'nightly']
|
||||
env:
|
||||
CLI_VERSION: ${{ matrix.version }}
|
||||
NIGHTLY_URL: ${{ needs.find-nightly.outputs.url }}
|
||||
TEST_CODEQL_PATH: '${{ github.workspace }}/codeql'
|
||||
|
||||
steps:
|
||||
@@ -136,7 +151,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '14.14.0'
|
||||
node-version: '16.13.0'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
@@ -150,10 +165,23 @@ jobs:
|
||||
npm run build
|
||||
shell: bash
|
||||
|
||||
- name: Decide on ref of CodeQL repo
|
||||
id: choose-ref
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ "${{ matrix.version }}" == "nightly" ]]
|
||||
then
|
||||
REF="codeql-cli/latest"
|
||||
else
|
||||
REF="codeql-cli/${{ matrix.version }}"
|
||||
fi
|
||||
echo "::set-output name=ref::$REF"
|
||||
|
||||
- name: Checkout QL
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: github/codeql
|
||||
ref: ${{ steps.choose-ref.outputs.ref }}
|
||||
path: codeql
|
||||
|
||||
- name: Run CLI tests (Linux)
|
||||
|
||||
7
.github/workflows/release.yml
vendored
@@ -6,10 +6,6 @@
|
||||
|
||||
name: Release
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- '**/workflows/release.yml'
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
push:
|
||||
@@ -26,7 +22,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '10.18.1'
|
||||
node-version: '16.13.0'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -129,6 +125,7 @@ jobs:
|
||||
body: This PR was automatically generated by the GitHub Actions release workflow in this repository.
|
||||
branch: ${{ format('version/bump-to-{0}', steps.bump-patch-version.outputs.next_version) }}
|
||||
base: main
|
||||
draft: true
|
||||
|
||||
vscode-publish:
|
||||
name: Publish to VS Code Marketplace
|
||||
|
||||
4
.vscode/extensions.json
vendored
@@ -1,9 +1,9 @@
|
||||
{
|
||||
// See http://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
|
||||
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
|
||||
// List of extensions which should be recommended for users of this workspace.
|
||||
"recommendations": [
|
||||
"eamodio.tsl-problem-matcher",
|
||||
"amodio.tsl-problem-matcher",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"eternalphane.tsfmt-vscode"
|
||||
],
|
||||
|
||||
40
.vscode/launch.json
vendored
@@ -12,7 +12,6 @@
|
||||
// Add a reference to a workspace to open. Eg-
|
||||
// "${workspaceRoot}/../vscode-codeql-starter/vscode-codeql-starter.code-workspace"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
@@ -21,6 +20,9 @@
|
||||
// change to 'true' debug the IDE or Query servers
|
||||
"IDE_SERVER_JAVA_DEBUG": "false",
|
||||
"QUERY_SERVER_JAVA_DEBUG": "false",
|
||||
"CLI_SERVER_JAVA_DEBUG": "false",
|
||||
// Uncomment to set the JAVA_HOME for the codeql instance to use
|
||||
// "CODEQL_JAVA_HOME": "/Library/Java/JavaVirtualMachines/jdk-12.0.1.jdk/Contents/Home"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -43,7 +45,6 @@
|
||||
"ts-node/register",
|
||||
"test/pure-tests/**/*.ts"
|
||||
],
|
||||
"port": 9229,
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"console": "integratedTerminal",
|
||||
@@ -56,9 +57,11 @@
|
||||
"runtimeExecutable": "${execPath}",
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/no-workspace/index"
|
||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/no-workspace/index",
|
||||
"--disable-workspace-trust",
|
||||
"--disable-extensions",
|
||||
"--disable-gpu"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
@@ -72,9 +75,11 @@
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/minimal-workspace/index",
|
||||
"--disable-workspace-trust",
|
||||
"--disable-extensions",
|
||||
"--disable-gpu",
|
||||
"${workspaceRoot}/extensions/ql-vscode/test/data"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
@@ -88,12 +93,33 @@
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/cli-integration/index",
|
||||
"--disable-workspace-trust",
|
||||
"--disable-gpu",
|
||||
"--disable-extension",
|
||||
"eamodio.gitlens",
|
||||
"--disable-extension",
|
||||
"github.codespaces",
|
||||
"--disable-extension",
|
||||
"github.copilot",
|
||||
"${workspaceRoot}/extensions/ql-vscode/src/vscode-tests/cli-integration/data",
|
||||
// Add a path to a checked out instance of the codeql repository so the libraries are
|
||||
// Uncomment the last line and modify the path to a checked out
|
||||
// instance of the codeql repository so the libraries are
|
||||
// available in the workspace for the tests.
|
||||
// "${workspaceRoot}/../codeql"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"env": {
|
||||
// Optionally, set the version to use for the integration tests.
|
||||
// Use "nightly" to use the latest nightly build.
|
||||
// "CLI_VERSION": "2.7.0",
|
||||
|
||||
// If CLI_VERSION is set to nightly, set this to the url of the nightly build.
|
||||
// "NIGHTLY_URL": "some url to grab the nightly build",
|
||||
|
||||
// Optionally, add a path to the codeql executable to be used during these tests.
|
||||
// If not specified, one will be downloaded automatically.
|
||||
// This option overrides the CLI_VERSION option.
|
||||
// "CLI_PATH": "${workspaceRoot}/../semmle-code/target/intree/codeql/codeql",
|
||||
},
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
|
||||
3
.vscode/settings.json
vendored
@@ -22,7 +22,8 @@
|
||||
"common/temp": true,
|
||||
"**/.vscode-test": true
|
||||
},
|
||||
"typescript.tsdk": "./common/temp/node_modules/typescript/lib", // we want to use the TS server from our node_modules folder to control its version
|
||||
"typescript.tsdk": "./extensions/ql-vscode/node_modules/typescript/lib", // we want to use the TS server from our node_modules folder to control its version
|
||||
"typescript.enablePromptUseWorkspaceTsdk": true,
|
||||
"eslint.validate": [
|
||||
"javascript",
|
||||
"javascriptreact",
|
||||
|
||||
2
CODEOWNERS
Normal file
@@ -0,0 +1,2 @@
|
||||
**/* @github/codeql-vscode-reviewers
|
||||
/extensions/ql-vscode/src/remote-queries/ @github/code-scanning-secexp-reviewers
|
||||
@@ -25,12 +25,13 @@ Here are a few things you can do that will increase the likelihood of your pull
|
||||
* Follow the [style guide][style].
|
||||
* Write tests. Tests that don't require the VS Code API are located [here](extensions/ql-vscode/test). Integration tests that do require the VS Code API are located [here](extensions/ql-vscode/src/vscode-tests).
|
||||
* Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
|
||||
* Write a [good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
|
||||
* Write a [good commit message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
|
||||
|
||||
## Setting up a local build
|
||||
|
||||
Make sure you have a fairly recent version of vscode (>1.32) and are using nodejs
|
||||
version >=v10.13.0. (Tested on v10.15.1 and v10.16.0).
|
||||
Make sure you have installed recent versions of vscode, node, and npm. Check the `engines` block in [`package.json`](https://github.com/github/vscode-codeql/blob/main/extensions/ql-vscode/package.json) file for compatible versions. Earlier versions may work, but we no longer test against them.
|
||||
|
||||
To automatically switch to the correct version of node, we recommend using [nvm](https://github.com/nvm-sh/nvm), which will pick-up the node version from `.nvmrc`.
|
||||
|
||||
### Installing all packages
|
||||
|
||||
@@ -57,7 +58,6 @@ We recommend that you keep `npm run watch` running in the backgound and you only
|
||||
|
||||
1. on first checkout
|
||||
2. whenever any of the non-TypeScript resources have changed
|
||||
3. on any change to files included in the webview
|
||||
|
||||
### Installing the extension
|
||||
|
||||
@@ -77,11 +77,11 @@ $ vscode/scripts/code-cli.sh --install-extension dist/vscode-codeql-*.vsix # if
|
||||
|
||||
You can use VS Code to debug the extension without explicitly installing it. Just open this directory as a workspace in VS Code, and hit `F5` to start a debugging session.
|
||||
|
||||
### Running the unit/integration tests
|
||||
### Running the unit tests and integration tests that do not require a CLI instance
|
||||
|
||||
Ensure the `CODEQL_PATH` environment variable is set to point to the `codeql` cli executable.
|
||||
Unit tests and many integration tests do not require a copy of the CodeQL CLI.
|
||||
|
||||
Outside of vscode, run:
|
||||
Outside of vscode, in the `extensions/ql-vscode` directory, run:
|
||||
|
||||
```shell
|
||||
npm run test && npm run integration
|
||||
@@ -89,11 +89,27 @@ npm run test && npm run integration
|
||||
|
||||
Alternatively, you can run the tests inside of vscode. There are several vscode launch configurations defined that run the unit and integration tests. They can all be found in the debug view.
|
||||
|
||||
Only the _With CLI_ tests require a CLI instance to run. See below on how to do that.
|
||||
|
||||
Running from a terminal, you _must_ set the `TEST_CODEQL_PATH` variable to point to a checkout of the `github/codeql` repository. The appropriate CLI version will be downloaded as part of the test.
|
||||
|
||||
### Running the integration tests
|
||||
|
||||
You will need to run CLI tests using a task from inside of VS Code called _Launch Integration Tests - With CLI_.
|
||||
|
||||
The CLI integration tests require the CodeQL standard libraries in order to run so you will need to clone a local copy of the `github/codeql` repository.
|
||||
|
||||
From inside of VSCode, open the `launch.json` file and in the _Launch Integration Tests - With CLI_ task, uncomment the `"${workspaceRoot}/../codeql"` line. If necessary, replace value with a path to your checkout, and then run the task.
|
||||
|
||||
## Releasing (write access required)
|
||||
|
||||
1. Double-check the `CHANGELOG.md` contains all desired change comments and has the version to be released with date at the top.
|
||||
* Go through all recent PRs and make sure they are properly accounted for.
|
||||
* Make sure all changelog entries have links back to their PR(s) if appropriate.
|
||||
1. Double-check that the node version we're using matches the one used for VS Code. If it doesn't, you will then need to update the node version in the following files:
|
||||
* `.nvmrc` - this will enable `nvm` to automatically switch to the correct node version when you're in the project folder
|
||||
* `.github/workflows/main.yml` - all the "node-version: <version>" settings
|
||||
* `.github/workflows/release.yml` - the "node-version: <version>" setting
|
||||
1. Double-check that the extension `package.json` and `package-lock.json` have the version you intend to release. If you are doing a patch release (as opposed to minor or major version) this should already be correct.
|
||||
1. Create a PR for this release:
|
||||
* This PR will contain any missing bits from steps 1 and 2. Most of the time, this will just be updating `CHANGELOG.md` with today's date.
|
||||
@@ -101,19 +117,39 @@ Alternatively, you can run the tests inside of vscode. There are several vscode
|
||||
* Create a new commit with a message the same as the branch name.
|
||||
* Create a PR for this branch.
|
||||
* Wait for the PR to be merged into `main`
|
||||
1. Trigger a release build on Actions by adding a new tag on branch `main` named after the release, as above. Note that when you push to upstream, you will need to fully qualify the ref. A command like this will work:
|
||||
1. Switch to `main` and add a new tag on the `main` branch with your new version (named after the release), e.g.
|
||||
```bash
|
||||
git checkout main
|
||||
git tag v1.3.6
|
||||
```
|
||||
|
||||
If you've accidentally created a badly named tag, you can delete it via
|
||||
```bash
|
||||
git tag -d badly-named-tag
|
||||
```
|
||||
1. Push the new tag up:
|
||||
|
||||
a. If you're using a fork of the repo:
|
||||
|
||||
```bash
|
||||
git push upstream refs/tags/v1.3.6
|
||||
```
|
||||
|
||||
b. If you're working straight in this repo:
|
||||
|
||||
```bash
|
||||
git push origin refs/tags/v1.3.6
|
||||
```
|
||||
|
||||
This will trigger [a release build](https://github.com/github/vscode-codeql/releases) on Actions.
|
||||
|
||||
* **IMPORTANT** Make sure you are on the `main` branch and your local checkout is fully updated when you add the tag.
|
||||
* If you accidentally add the tag to the wrong ref, you can just force push it to the right one later.
|
||||
|
||||
1. Monitor the status of the release build in the `Release` workflow in the Actions tab.
|
||||
1. Download the VSIX from the draft GitHub release at the top of [the releases page](https://github.com/github/vscode-codeql/releases) that is created when the release build finishes.
|
||||
1. Unzip the `.vsix` and inspect its `package.json` to make sure the version is what you expect,
|
||||
or look at the source if there's any doubt the right code is being shipped.
|
||||
1. Install the `.vsix` file into your vscode IDE and ensure the extension can load properly. Run a single command (like run query, or add database).
|
||||
1. Go to the actions tab of the vscode-codeql repository and select the [Release workflow](https://github.com/github/vscode-codeql/actions?query=workflow%3ARelease).
|
||||
- If there is an authentication failure when publishing, be sure to check that the authentication keys haven't expired. See below.
|
||||
1. Approve the deployments of the correct Release workflow. This will automatically publish to Open VSX and VS Code Marketplace.
|
||||
@@ -133,12 +169,7 @@ To regenerate the Open VSX token:
|
||||
1. Go to the [Access Tokens](https://open-vsx.org/user-settings/tokens) page and generate a new token.
|
||||
1. Update the secret in the `publish-open-vsx` environment in the project settings.
|
||||
|
||||
To regenerate the VSCode Marketplace token:
|
||||
|
||||
1. Follow the instructions on [getting a PAT for Azure DevOps](https://code.visualstudio.com/api/working-with-extensions/publishing-extension#get-a-personal-access-token).
|
||||
1. Update the secret in the `publish-vscode-marketplace` environment in the project settings.
|
||||
|
||||
Not that Azure DevOps PATs expire yearly and must be regenerated.
|
||||
To regenerate the VSCode Marketplace token, please see our internal documentation. Note that Azure DevOps PATs expire every 90 days and must be regenerated.
|
||||
|
||||
## Resources
|
||||
|
||||
|
||||
@@ -16,7 +16,6 @@ To see what has changed in the last few versions of the extension, see the [Chan
|
||||
* Provides an easy way to run queries from the large, open source repository of [CodeQL security queries](https://github.com/github/codeql).
|
||||
* Adds IntelliSense to support you writing and editing your own CodeQL query and library files.
|
||||
|
||||
|
||||
## Project goals and scope
|
||||
|
||||
This project will track new feature development in CodeQL and, whenever appropriate, bring that functionality to the Visual Studio Code experience.
|
||||
|
||||
@@ -22,8 +22,10 @@ module.exports = {
|
||||
},
|
||||
],
|
||||
"@typescript-eslint/explicit-function-return-type": "off",
|
||||
"@typescript-eslint/explicit-module-boundary-types": "off",
|
||||
"@typescript-eslint/no-non-null-assertion": "off",
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/no-floating-promises": [ "error", { ignoreVoid: true } ],
|
||||
"prefer-const": ["warn", { destructuring: "all" }],
|
||||
indent: "off",
|
||||
"@typescript-eslint/indent": "off",
|
||||
|
||||
1
extensions/ql-vscode/.nvmrc
Normal file
@@ -0,0 +1 @@
|
||||
v16.13.0
|
||||
@@ -1,10 +1,178 @@
|
||||
# CodeQL for Visual Studio Code: Changelog
|
||||
|
||||
## 1.6.8 - 29 June 2022
|
||||
|
||||
- Fix a bug where quick queries cannot be compiled if the core libraries are not in the workspace. [#1411](https://github.com/github/vscode-codeql/pull/1411)
|
||||
- Fix a bug where quick evaluation of library files would display an error message when using CodeQL CLI v2.10.0. [#1412](https://github.com/github/vscode-codeql/pull/1412)
|
||||
|
||||
## 1.6.7 - 15 June 2022
|
||||
|
||||
- Prints end-of-query evaluator log summaries to the Query Log. [#1349](https://github.com/github/vscode-codeql/pull/1349)
|
||||
- Be consistent about casing in Query History menu. [#1369](https://github.com/github/vscode-codeql/pull/1369)
|
||||
- Fix quoting string columns in exported CSV results. [#1379](https://github.com/github/vscode-codeql/pull/1379)
|
||||
|
||||
## 1.6.6 - 17 May 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.5 - 25 April 2022
|
||||
|
||||
- Re-enable publishing to open-vsx. [#1285](https://github.com/github/vscode-codeql/pull/1285)
|
||||
|
||||
## 1.6.4 - 6 April 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.3 - 4 April 2022
|
||||
|
||||
- Fix a bug where the AST viewer was not synchronizing its selected node when the editor selection changes. [#1230](https://github.com/github/vscode-codeql/pull/1230)
|
||||
- Avoid synchronizing the `codeQL.cli.executablePath` setting. [#1252](https://github.com/github/vscode-codeql/pull/1252)
|
||||
- Open the directory in the finder/explorer (instead of just highlighting it) when running the "Open query directory" command from the query history view. [#1235](https://github.com/github/vscode-codeql/pull/1235)
|
||||
- Ensure query label in the query history view changes are persisted across restarts. [#1235](https://github.com/github/vscode-codeql/pull/1235)
|
||||
- Prints end-of-query evaluator log summaries to the Query Server Console. [#1264](https://github.com/github/vscode-codeql/pull/1264)
|
||||
|
||||
## 1.6.1 - 17 March 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.0 - 7 March 2022
|
||||
|
||||
- Fix a bug where database upgrades could not be resolved if some of the target pack's dependencies are outside of the workspace. [#1138](https://github.com/github/vscode-codeql/pull/1138)
|
||||
- Open the query server logs for query errors (instead of the extension log). This will make it easier to track down query errors. [#1158](https://github.com/github/vscode-codeql/pull/1158)
|
||||
- Fix a bug where queries took a long time to run if there are no folders in the workspace. [#1157](https://github.com/github/vscode-codeql/pull/1157)
|
||||
- [BREAKING CHANGE] The `codeQL.runningQueries.customLogDirectory` setting is deprecated and no longer has any function. Instead, all query log files will be stored in the query history directory, next to the query results. [#1178](https://github.com/github/vscode-codeql/pull/1178)
|
||||
- Add a _Open query directory_ command for query items. This command opens the directory containing all artifacts for a query. [#1179](https://github.com/github/vscode-codeql/pull/1179)
|
||||
- Add options to display evaluator logs for a given query run. Some information that was previously found in the query server output may now be found here. [#1186](https://github.com/github/vscode-codeql/pull/1186)
|
||||
|
||||
## 1.5.11 - 10 February 2022
|
||||
|
||||
- Fix a bug where invoking _View AST_ from the file explorer would not view the selected file. Instead it would view the active editor. Also, prevent the _View AST_ from appearing if the current selection includes a directory or multiple files. [#1113](https://github.com/github/vscode-codeql/pull/1113)
|
||||
- Add query history items as soon as a query is run, including new icons for each history item. [#1094](https://github.com/github/vscode-codeql/pull/1094)
|
||||
- Save query history items across restarts. Items will be saved for 30 days and can be overwritten by setting the `codeQL.queryHistory.ttl` configuration setting. [#1130](https://github.com/github/vscode-codeql/pull/1130)
|
||||
- Allow in-progress query items to be cancelled from the query history view. [#1105](https://github.com/github/vscode-codeql/pull/1105)
|
||||
|
||||
## 1.5.10 - 25 January 2022
|
||||
|
||||
- Fix a bug where the results view moved column even when it was already visible. [#1070](https://github.com/github/vscode-codeql/pull/1070)
|
||||
- Add packaging-related commands. _CodeQL: Download Packs_ downloads query packs from the package registry that can be run locally, and _CodeQL: Install Pack Dependencies_ installs dependencies for packs in your workspace. [#1076](https://github.com/github/vscode-codeql/pull/1076)
|
||||
|
||||
## 1.5.9 - 17 December 2021
|
||||
|
||||
- Avoid creating a third column when opening the results view. The results view will always open to the right of the active editor, unless the active editor is in the rightmost editor column. In that case open in the leftmost column. [#1037](https://github.com/github/vscode-codeql/pull/1037)
|
||||
- Add a CodeLens to make the Quick Evaluation command more accessible. Click the `Quick Evaluation` prompt above a predicate definition in the editor to evaluate that predicate on its own. You can enable/disable this feature in the `codeQL.runningQueries.quickEvalCodelens` setting. [#1035](https://github.com/github/vscode-codeql/pull/1035) & [#1052](https://github.com/github/vscode-codeql/pull/1052)
|
||||
- Fix a bug where the _Alerts_ option would show in the results view even if there is no alerts table available. [#1038](https://github.com/github/vscode-codeql/pull/1038)
|
||||
|
||||
## 1.5.8 - 2 December 2021
|
||||
|
||||
- Emit a more explicit error message when a user tries to add a database with an unzipped source folder to the workspace. [#1021](https://github.com/github/vscode-codeql/pull/1021)
|
||||
- Ensure `src.zip` archives are used as the canonical source instead of `src` folders when importing databases. [#1025](https://github.com/github/vscode-codeql/pull/1025)
|
||||
|
||||
## 1.5.7 - 23 November 2021
|
||||
|
||||
- Fix the _CodeQL: Open Referenced File_ command for Windows systems. [#979](https://github.com/github/vscode-codeql/pull/979)
|
||||
- Support large SARIF results files (>4GB) without crashing VS Code. [#1004](https://github.com/github/vscode-codeql/pull/1004)
|
||||
- Fix a bug that shows 'Set current database' when hovering over the currently selected database in the databases view. [#976](https://github.com/github/vscode-codeql/pull/976)
|
||||
- Fix a bug with importing large databases. Databases over 4GB can now be imported directly from LGTM or from a zip file. This functionality is only available when using CodeQL CLI version 2.6.0 or later. [#971](https://github.com/github/vscode-codeql/pull/971)
|
||||
- Replace certain control codes (`U+0000` - `U+001F`) with their corresponding control labels (`U+2400` - `U+241F`) in the results view. [#963](https://github.com/github/vscode-codeql/pull/963)
|
||||
- Allow case-insensitive project slugs for GitHub repositories when adding a CodeQL database from LGTM. [#978](https://github.com/github/vscode-codeql/pull/961)
|
||||
- Add a _CodeQL: Preview Query Help_ command to generate Markdown previews of `.qhelp` query help files. This command should only be run in trusted workspaces. See [the CodeQL CLI docs](https://codeql.github.com/docs/codeql-cli/testing-query-help-files) for more information about query help. [#988](https://github.com/github/vscode-codeql/pull/988)
|
||||
- Make "Open Referenced File" command accessible from the active editor menu. [#989](https://github.com/github/vscode-codeql/pull/989)
|
||||
- Fix a bug where result set names in the result set drop-down were disappearing when viewing a sorted table. [#1007](https://github.com/github/vscode-codeql/pull/1007)
|
||||
- Allow query result locations with 0 as the end column value. These are treated as the first column in the line. [#1002](https://github.com/github/vscode-codeql/pull/1002)
|
||||
|
||||
## 1.5.6 - 07 October 2021
|
||||
|
||||
- Add progress messages to LGTM download option. This makes the two-step process (selecting a project, then selecting a language) more clear. [#960](https://github.com/github/vscode-codeql/pull/960)
|
||||
- Remove line about selecting a language from the dropdown when downloading database from LGTM. This makes the download progress visible when the popup is not expanded. [#957](https://github.com/github/vscode-codeql/pull/957)
|
||||
- Fix a bug where copying the version information fails when a CodeQL CLI cannot be found. [#958](https://github.com/github/vscode-codeql/pull/958)
|
||||
- Avoid a race condition when deleting databases that can cause occasional errors. [#959](https://github.com/github/vscode-codeql/pull/959)
|
||||
- Update CodeQL logos. [#965](https://github.com/github/vscode-codeql/pull/965)
|
||||
|
||||
## 1.5.5 - 08 September 2021
|
||||
|
||||
- Fix bug where a query is sometimes run before the file is saved. [#947](https://github.com/github/vscode-codeql/pull/947)
|
||||
- Fix broken contextual queries, including _View AST_. [#949](https://github.com/github/vscode-codeql/pull/949)
|
||||
|
||||
## 1.5.4 - 02 September 2021
|
||||
|
||||
- Add support for filename pattern in history view. [#930](https://github.com/github/vscode-codeql/pull/930)
|
||||
- Add an option _View Results (CSV)_ to view the results of a non-alert query. The existing options for alert queries have been renamed to _View Alerts_ to avoid confusion. [#929](https://github.com/github/vscode-codeql/pull/929)
|
||||
- Allow users to specify the number of paths to display for each alert. [#931](https://github.com/github/vscode-codeql/pull/931)
|
||||
- Adjust pagination controls in _CodeQL Query Results_ to always be visible [#936](https://github.com/github/vscode-codeql/pull/936)
|
||||
- Fix bug where _View AST_ fails due to recent refactoring in the standard library and query packs. [#939](https://github.com/github/vscode-codeql/pull/939)
|
||||
|
||||
## 1.5.3 - 18 August 2021
|
||||
|
||||
- Add a command _CodeQL: Run Query on Multiple Databases_, which lets users select multiple databases to run a query on. [#898](https://github.com/github/vscode-codeql/pull/898)
|
||||
- Autodetect what language a query targets. This refines the _CodeQL: Run Query on Multiple Databases_ command to only show relevant databases. [#915](https://github.com/github/vscode-codeql/pull/915)
|
||||
- Adjust test log output to display diffs only when comparing failed test results with expected test results. [#920](https://github.com/github/vscode-codeql/pull/920)
|
||||
|
||||
## 1.5.2 - 13 July 2021
|
||||
|
||||
- Add the _Add Database Source to Workspace_ command to the right-click context menu in the databases view. This lets users re-add a database's source folder to the workspace and browse the source code. [#891](https://github.com/github/vscode-codeql/pull/891)
|
||||
- Fix markdown rendering in the description of the `codeQL.cli.executablePath` setting. [#908](https://github.com/github/vscode-codeql/pull/908)
|
||||
- Fix the _Open Query Results_ command in the query history view. [#909](https://github.com/github/vscode-codeql/pull/909)
|
||||
|
||||
## 1.5.1 - 23 June 2021
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.5.0 - 14 June 2021
|
||||
|
||||
- Display CodeQL CLI version being downloaded during an upgrade. [#862](https://github.com/github/vscode-codeql/pull/862)
|
||||
- Display a helpful message and link to documentation when a query produces no results. [#866](https://github.com/github/vscode-codeql/pull/866)
|
||||
- Refresh test databases automatically after a test run. [#868](https://github.com/github/vscode-codeql/pull/868)
|
||||
- Allow users to specify a custom directory for storing query server logs (`codeQL.runningQueries.customLogDirectory`). The extension will not delete these logs automatically. [#863](https://github.com/github/vscode-codeql/pull/863)
|
||||
- Support the VS Code [Workspace Trust feature](https://code.visualstudio.com/docs/editor/workspace-trust). This extension is now enabled in untrusted workspaces, but it restricts commands that contain arbitrary paths. [#861](https://github.com/github/vscode-codeql/pull/861)
|
||||
- Allow the `codeQL.cli.executablePath` configuration setting to be set in workspace-scoped configuration files. This means that each workspace can now specify its own CodeQL CLI compiler, a feature that is unblocked due to implementing Workspace Trust. [#861](https://github.com/github/vscode-codeql/pull/861)
|
||||
|
||||
## 1.4.8 - 05 May 2021
|
||||
|
||||
- Copy version information to the clipboard when a user clicks the CodeQL section of the status bar. [#845](https://github.com/github/vscode-codeql/pull/845)
|
||||
- Ensure changes in directories that contain tests will be properly updated in the test explorer. [#846](https://github.com/github/vscode-codeql/pull/846)
|
||||
- Remind users to choose a language when downloading a database from LGTM. [#852](https://github.com/github/vscode-codeql/pull/852)
|
||||
|
||||
## 1.4.7 - 23 April 2021
|
||||
|
||||
- Fix a bug that prevented the results view from being loaded. [#842](https://github.com/github/vscode-codeql/pull/842)
|
||||
|
||||
## 1.4.6 - 21 April 2021
|
||||
|
||||
- Avoid showing an error popup when running a query with `@kind table` metadata. [#814](https://github.com/github/vscode-codeql/pull/814)
|
||||
- Add an option to jump from a .qlref file to the .ql file it references. [#815](https://github.com/github/vscode-codeql/pull/815)
|
||||
- Avoid opening the results panel when a database is deleted. [#831](https://github.com/github/vscode-codeql/pull/831)
|
||||
- Forward all query metadata to the CLI when interpreting results. [#838](https://github.com/github/vscode-codeql/pull/838)
|
||||
|
||||
## 1.4.5 - 22 March 2021
|
||||
|
||||
- Avoid showing an error popup when user runs a query without `@kind` metadata. [#801](https://github.com/github/vscode-codeql/pull/801)
|
||||
- Fix running of tests when the `ms-python` extension is installed. [#803](https://github.com/github/vscode-codeql/pull/803)
|
||||
|
||||
## 1.4.4 - 19 March 2021
|
||||
|
||||
- Introduce evaluator options for saving intermediate results to the disk cache (`codeQL.runningQueries.saveCache`) and for limiting the size of this cache (`codeQL.runningQueries.cacheSize`). [#778](https://github.com/github/vscode-codeql/pull/778)
|
||||
- Respect the `codeQL.runningQueries.numberOfThreads` setting when creating SARIF files during result interpretation. [#771](https://github.com/github/vscode-codeql/pull/771)
|
||||
- Allow using raw LGTM project slugs for fetching LGTM databases. [#769](https://github.com/github/vscode-codeql/pull/769)
|
||||
- Better error messages when BQRS interpretation fails to produce SARIF. [#770](https://github.com/github/vscode-codeql/pull/770)
|
||||
- Implement sorting of the query history view by name, date, and results count. [#777](https://github.com/github/vscode-codeql/pull/777)
|
||||
- Add a configuration option to pass additional arguments to the CLI when running tests. [#785](https://github.com/github/vscode-codeql/pull/785)
|
||||
- Introduce option to view query results as CSV. [#784](https://github.com/github/vscode-codeql/pull/784)
|
||||
- Add some snippets for commonly used QL statements. [#782](https://github.com/github/vscode-codeql/pull/782)
|
||||
- More descriptive error messages on QL test failures. [#788](https://github.com/github/vscode-codeql/pull/788)
|
||||
|
||||
## 1.4.3 - 22 February 2021
|
||||
|
||||
- Avoid displaying an error when removing orphaned databases and the storage folder does not exist. [#748](https://github.com/github/vscode-codeql/pull/748)
|
||||
- Add better error messages when AST Viewer is unable to create an AST. [#753](https://github.com/github/vscode-codeql/pull/753)
|
||||
- Cache AST viewing operations so that subsequent calls to view the AST of a single file will be extremely fast. [#753](https://github.com/github/vscode-codeql/pull/753)
|
||||
- Ensure CodeQL version in status bar updates correctly when version changes. [#754](https://github.com/github/vscode-codeql/pull/754)
|
||||
- Avoid deleting the quick query file when it is re-opened. [#747](https://github.com/github/vscode-codeql/pull/747)
|
||||
|
||||
## 1.4.2 - 2 February 2021
|
||||
|
||||
- Add a status bar item for the CodeQL CLI to show the current version. [#741](https://github.com/github/vscode-codeql/pull/741)
|
||||
- Fix version constraint for flagging CLI support of non-destructive updates. [#744](https://github.com/github/vscode-codeql/pull/744)
|
||||
- Add a _More Information_ button in the telemetry popup that opens [TELEMETRY.md](https://github.com/github/vscode-codeql/blob/main/extensions/ql-vscode/TELEMETRY.md) in a browser tab. [#742](https://github.com/github/vscode-codeql/pull/742)
|
||||
- Add a _More Information_ button in the telemetry popup that opens the [telemetry documentation](https://codeql.github.com/docs/codeql-for-visual-studio-code/about-telemetry-in-codeql-for-visual-studio-code) in a browser tab. [#742](https://github.com/github/vscode-codeql/pull/742)
|
||||
|
||||
## 1.4.1 - 29 January 2021
|
||||
|
||||
@@ -14,7 +182,7 @@
|
||||
|
||||
- Fix bug where databases are not reregistered when the query server restarts. [#734](https://github.com/github/vscode-codeql/pull/734)
|
||||
- Fix bug where upgrade requests were erroneously being marked as failed. [#734](https://github.com/github/vscode-codeql/pull/734)
|
||||
- On a strictly opt-in basis, collect anonymized usage data from the VS Code extension, helping improve CodeQL's usability and performance. See [TELEMETRY.md](https://github.com/github/vscode-codeql/blob/main/extensions/ql-vscode/TELEMETRY.md) for more information on exactly what data is collected and what it is used for. [#611](https://github.com/github/vscode-codeql/pull/611)
|
||||
- On a strictly opt-in basis, collect anonymized usage data from the VS Code extension, helping improve CodeQL's usability and performance. See the [telemetry documentation](https://codeql.github.com/docs/codeql-for-visual-studio-code/about-telemetry-in-codeql-for-visual-studio-code) for more information on exactly what data is collected and what it is used for. [#611](https://github.com/github/vscode-codeql/pull/611)
|
||||
|
||||
## 1.3.10 - 20 January 2021
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# CodeQL extension for Visual Studio Code
|
||||
|
||||
This project is an extension for Visual Studio Code that adds rich language support for [CodeQL](https://help.semmle.com/codeql) and allows you to easily find problems in codebases. In particular, the extension:
|
||||
This project is an extension for Visual Studio Code that adds rich language support for [CodeQL](https://codeql.github.com/docs/) and allows you to easily find problems in codebases. In particular, the extension:
|
||||
|
||||
- Enables you to use CodeQL to query databases generated from source code.
|
||||
- Shows the flow of data through the results of path queries, which is essential for triaging security results.
|
||||
@@ -12,7 +12,7 @@ To see what has changed in the last few versions of the extension, see the [Chan
|
||||
## Quick start overview
|
||||
|
||||
The information in this `README` file describes the quickest way to start using CodeQL.
|
||||
For information about other configurations, see the separate [CodeQL help](https://help.semmle.com/codeql/codeql-for-vscode.html).
|
||||
For information about other configurations, see the separate [CodeQL help](https://codeql.github.com/docs/codeql-for-visual-studio-code/).
|
||||
|
||||
### Quick start: Installing and configuring the extension
|
||||
|
||||
@@ -40,9 +40,9 @@ The CodeQL extension requires a minimum of Visual Studio Code 1.39. Older versio
|
||||
|
||||
### Checking access to the CodeQL CLI
|
||||
|
||||
The extension uses the [CodeQL CLI](https://help.semmle.com/codeql/codeql-cli.html) to compile and run queries. The extension automatically manages access to the CLI for you by default (recommended). To check for updates to the CodeQL CLI, you can use the **CodeQL: Check for CLI Updates** command.
|
||||
The extension uses the [CodeQL CLI](https://codeql.github.com/docs/codeql-cli/) to compile and run queries. The extension automatically manages access to the CLI for you by default (recommended). To check for updates to the CodeQL CLI, you can use the **CodeQL: Check for CLI Updates** command.
|
||||
|
||||
If you want to override the default behavior and use a CodeQL CLI that's already on your machine, see [Configuring access to the CodeQL CLI](https://help.semmle.com/codeql/codeql-for-vscode/procedures/setting-up.html#configuring-access-to-the-codeql-cli).
|
||||
If you want to override the default behavior and use a CodeQL CLI that's already on your machine, see [Configuring access to the CodeQL CLI](https://codeql.github.com/docs/codeql-for-visual-studio-code/setting-up-codeql-in-visual-studio-code/#configuring-access-to-the-codeql-cli).
|
||||
|
||||
If you have any difficulty with CodeQL CLI access, see the **CodeQL Extension Log** in the **Output** view for any error messages.
|
||||
|
||||
@@ -52,7 +52,7 @@ When you're working with CodeQL, you need access to the standard CodeQL librarie
|
||||
Initially, we recommend that you clone and use the ready-to-use [starter workspace](https://github.com/github/vscode-codeql-starter/).
|
||||
This includes libraries and queries for the main supported languages, with folders set up ready for your custom queries. After cloning the workspace (use `git clone --recursive`), you can use it in the same way as any other VS Code workspace—with the added advantage that you can easily update the CodeQL libraries.
|
||||
|
||||
For information about configuring an existing workspace for CodeQL, [see the documentation](https://help.semmle.com/codeql/codeql-for-vscode/procedures/setting-up.html#updating-an-existing-workspace-for-codeql).
|
||||
For information about configuring an existing workspace for CodeQL, [see the documentation](https://codeql.github.com/docs/codeql-for-visual-studio-code/setting-up-codeql-in-visual-studio-code/#updating-an-existing-workspace-for-codeql).
|
||||
|
||||
## Upgrading CodeQL standard libraries
|
||||
|
||||
@@ -75,7 +75,7 @@ You can find all the commands contributed by the extension in the Command Palett
|
||||
|
||||
### Importing a database from LGTM
|
||||
|
||||
While you can use the [CodeQL CLI to create your own databases](https://help.semmle.com/codeql/codeql-cli/procedures/create-codeql-database.html), the simplest way to start is by downloading a database from LGTM.com.
|
||||
While you can use the [CodeQL CLI to create your own databases](https://codeql.github.com/docs/codeql-cli/creating-codeql-databases/), the simplest way to start is by downloading a database from LGTM.com.
|
||||
|
||||
1. Open [LGTM.com](https://lgtm.com/#explore) in your browser.
|
||||
1. Search for a project you're interested in, for example [Apache Kafka](https://lgtm.com/projects/g/apache/kafka).
|
||||
@@ -100,11 +100,11 @@ If there are any problems running a query, a notification is displayed in the bo
|
||||
|
||||
## What next?
|
||||
|
||||
For more information about the CodeQL extension, [see the documentation](https://help.semmle.com/codeql/codeql-for-vscode.html). Otherwise, you could:
|
||||
For more information about the CodeQL extension, [see the documentation](https://codeql.github.com/docs/codeql-for-visual-studio-code/). Otherwise, you could:
|
||||
|
||||
- [Create a database for a different codebase](https://help.semmle.com/codeql/codeql-cli/procedures/create-codeql-database.html).
|
||||
- [Create a database for a different codebase](https://codeql.github.com/docs/codeql-cli/creating-codeql-databases/).
|
||||
- [Try out variant analysis](https://help.semmle.com/QL/learn-ql/ql-training.html).
|
||||
- [Learn more about CodeQL](https://help.semmle.com/QL/learn-ql/).
|
||||
- [Learn more about CodeQL](https://codeql.github.com/docs/).
|
||||
- [Read how security researchers use CodeQL to find CVEs](https://securitylab.github.com/research).
|
||||
|
||||
## License
|
||||
@@ -113,4 +113,4 @@ The CodeQL extension for Visual Studio Code is [licensed](LICENSE.md) under the
|
||||
|
||||
## Data and Telemetry
|
||||
|
||||
If you specifically opt-in to permit GitHub to do so, GitHub will collect usage data and metrics for the purposes of helping the core developers to improve the CodeQL extension for VS Code. This data will not be shared with any parties outside of GitHub. IP addresses and installation IDs will be retained for a maximum of 30 days. Anonymous data will be retained for a maximum of 180 days. Please see [telemetry](TELEMETRY.md) for more information.
|
||||
If you specifically opt-in to permit GitHub to do so, GitHub will collect usage data and metrics for the purposes of helping the core developers to improve the CodeQL extension for VS Code. This data will not be shared with any parties outside of GitHub. IP addresses and installation IDs will be retained for a maximum of 30 days. Anonymous data will be retained for a maximum of 180 days. For more information about telemetry, [see the documentation](https://codeql.github.com/docs/codeql-for-visual-studio-code/about-telemetry-in-codeql-for-visual-studio-code).
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
# Telemetry in the CodeQL extension for VS Code
|
||||
|
||||
If you specifically opt-in to permit GitHub to do so, GitHub will collect usage data and metrics for the purposes of helping the core developers to improve the CodeQL extension for VS Code. This data will not be shared with any parties outside of GitHub. IP addresses and installation IDs will be retained for a maximum of 30 days. Anonymous data will be retained for a maximum of 180 days.
|
||||
|
||||
## Why do you collect data?
|
||||
|
||||
GitHub collects aggregated, anonymous usage data and metrics to help us improve CodeQL for VS Code. IP addresses and installation IDs are collected only to ensure that anonymous data is not duplicated during aggregation.
|
||||
|
||||
## What data is collected
|
||||
|
||||
If you opt in, GitHub collects the following information related to the usage of the extension. The data collected are:
|
||||
|
||||
- The identifiers of any CodeQL-related [VS Code commands](https://code.visualstudio.com/docs/getstarted/tips-and-tricks#_command-palette) that are run
|
||||
- For each command: the timestamp, time taken, and whether or not the command completed successfully
|
||||
- VS Code and extension version
|
||||
- Randomly generated GUID that uniquely identifies a CodeQL extension installation. (Discarded before aggregation.)
|
||||
- IP address of the client sending the telemetry data. (Discarded before aggregation.)
|
||||
- Whether or not the `codeQL.canary` setting is enabled and set to `true`
|
||||
|
||||
## How long will data be retained?
|
||||
|
||||
IP address and GUIDs will be retained for a maximum of 30 days. Anonymous, aggregated data that includes command identifiers, run times, and timestamps will be retained for a maximum of 180 days.
|
||||
|
||||
## Who will have access to this data?
|
||||
|
||||
IP address and GUIDs will only be available to the core developers of CodeQL. Aggregated data will be available to GitHub employees.
|
||||
|
||||
## What data is **NOT** collected?
|
||||
|
||||
We only collect the minimal amount of data we need to answer the questions about how our users are experiencing this product. To that end, we do not collect the following information:
|
||||
|
||||
- No GitHub user ID
|
||||
- No CodeQL database names or contents
|
||||
- No contents of CodeQL queries
|
||||
- No filesystem paths.
|
||||
|
||||
## How do I disable telemetry reporting?
|
||||
|
||||
When telemetry collection is disabled, no data will be sent to GitHub servers.
|
||||
|
||||
You can disable telemetry collection by setting `codeQL.telemetry.enableTelemetry` to `false` in [your settings](https://code.visualstudio.com/docs/getstarted/settings#_settings-editor). Telemetry collection is _disabled_ by default.
|
||||
|
||||
Additionally, telemetry collection will be disabled if the global `telemetry.enableTelemetry` setting is set to `false`. For more information on global telemetry collection, see [Microsoft’s documentation](https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting).
|
||||
|
||||
## More information
|
||||
|
||||
See GitHub's [Privacy Statement](https://docs.github.com/en/free-pro-team@latest/github/site-policy/github-privacy-statement) and [Terms of Service](https://docs.github.com/en/free-pro-team@latest/github/site-policy/github-terms-of-service) for more information.
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as gulp from 'gulp';
|
||||
import * as replace from 'gulp-replace';
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const replace = require('gulp-replace');
|
||||
|
||||
/** Inject the application insights key into the telemetry file */
|
||||
export function injectAppInsightsKey() {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as jsonc from 'jsonc-parser';
|
||||
import * as path from 'path';
|
||||
|
||||
export interface DeployedPackage {
|
||||
@@ -13,6 +12,7 @@ const packageFiles = [
|
||||
'CHANGELOG.md',
|
||||
'README.md',
|
||||
'language-configuration.json',
|
||||
'snippets.json',
|
||||
'media',
|
||||
'node_modules',
|
||||
'out'
|
||||
@@ -27,7 +27,7 @@ async function copyPackage(sourcePath: string, destPath: string): Promise<void>
|
||||
|
||||
export async function deployPackage(packageJsonPath: string): Promise<DeployedPackage> {
|
||||
try {
|
||||
const packageJson: any = jsonc.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
const packageJson: any = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
|
||||
// Default to development build; use flag --release to indicate release build.
|
||||
const isDevBuild = !process.argv.includes('--release');
|
||||
|
||||
@@ -1,15 +1,28 @@
|
||||
import * as gulp from 'gulp';
|
||||
import { compileTypeScript, watchTypeScript, copyViewCss } from './typescript';
|
||||
import { compileTypeScript, watchTypeScript, copyViewCss, cleanOutput, watchCss } from './typescript';
|
||||
import { compileTextMateGrammar } from './textmate';
|
||||
import { copyTestData } from './tests';
|
||||
import { compileView } from './webpack';
|
||||
import { compileView, watchView } from './webpack';
|
||||
import { packageExtension } from './package';
|
||||
import { injectAppInsightsKey } from './appInsights';
|
||||
|
||||
export const buildWithoutPackage =
|
||||
gulp.parallel(
|
||||
compileTypeScript, compileTextMateGrammar, compileView, copyTestData, copyViewCss
|
||||
gulp.series(
|
||||
cleanOutput,
|
||||
gulp.parallel(
|
||||
compileTypeScript, compileTextMateGrammar, compileView, copyTestData, copyViewCss
|
||||
)
|
||||
);
|
||||
|
||||
export { compileTextMateGrammar, watchTypeScript, compileTypeScript, copyTestData, injectAppInsightsKey };
|
||||
export {
|
||||
cleanOutput,
|
||||
compileTextMateGrammar,
|
||||
watchTypeScript,
|
||||
watchView,
|
||||
compileTypeScript,
|
||||
copyTestData,
|
||||
injectAppInsightsKey,
|
||||
compileView,
|
||||
watchCss
|
||||
};
|
||||
export default gulp.series(buildWithoutPackage, injectAppInsightsKey, packageExtension);
|
||||
|
||||
@@ -219,14 +219,14 @@ function transformFile(yaml: any) {
|
||||
}
|
||||
|
||||
export function transpileTextMateGrammar() {
|
||||
return through.obj((file: Vinyl, _encoding: string, callback: Function): void => {
|
||||
return through.obj((file: Vinyl, _encoding: string, callback: (err: string | null, file: Vinyl | PluginError) => void): void => {
|
||||
if (file.isNull()) {
|
||||
callback(null, file);
|
||||
}
|
||||
else if (file.isBuffer()) {
|
||||
const buf: Buffer = file.contents;
|
||||
const yamlText: string = buf.toString('utf8');
|
||||
const jsonData: any = jsYaml.safeLoad(yamlText);
|
||||
const jsonData: any = jsYaml.load(yamlText);
|
||||
transformFile(jsonData);
|
||||
|
||||
file.contents = Buffer.from(JSON.stringify(jsonData, null, 2), 'utf8');
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"$schema": "http://json.schemastore.org/tsconfig",
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"strict": true,
|
||||
"module": "commonjs",
|
||||
"target": "es2017",
|
||||
"lib": ["es6"],
|
||||
"lib": ["ES2021"],
|
||||
"moduleResolution": "node",
|
||||
"sourceMap": true,
|
||||
"rootDir": ".",
|
||||
@@ -16,7 +16,8 @@
|
||||
"noImplicitReturns": true,
|
||||
"experimentalDecorators": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true
|
||||
"noUnusedParameters": true,
|
||||
"esModuleInterop": true
|
||||
},
|
||||
"include": ["*.ts"]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import * as colors from 'ansi-colors';
|
||||
import * as gulp from 'gulp';
|
||||
import * as sourcemaps from 'gulp-sourcemaps';
|
||||
import * as ts from 'gulp-typescript';
|
||||
import * as del from 'del';
|
||||
|
||||
function goodReporter(): ts.reporter.Reporter {
|
||||
return {
|
||||
@@ -20,6 +21,10 @@ function goodReporter(): ts.reporter.Reporter {
|
||||
|
||||
const tsProject = ts.createProject('tsconfig.json');
|
||||
|
||||
export function cleanOutput() {
|
||||
return tsProject.projectDirectory ? del(tsProject.projectDirectory + '/out/*') : Promise.resolve();
|
||||
}
|
||||
|
||||
export function compileTypeScript() {
|
||||
return tsProject.src()
|
||||
.pipe(sourcemaps.init())
|
||||
@@ -35,8 +40,12 @@ export function watchTypeScript() {
|
||||
gulp.watch('src/**/*.ts', compileTypeScript);
|
||||
}
|
||||
|
||||
export function watchCss() {
|
||||
gulp.watch('src/**/*.css', copyViewCss);
|
||||
}
|
||||
|
||||
/** Copy CSS files for the results view into the output directory. */
|
||||
export function copyViewCss() {
|
||||
return gulp.src('src/view/*.css')
|
||||
return gulp.src('src/**/view/*.css')
|
||||
.pipe(gulp.dest('out'));
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ export const config: webpack.Configuration = {
|
||||
entry: {
|
||||
resultsView: './src/view/results.tsx',
|
||||
compareView: './src/compare/view/Compare.tsx',
|
||||
remoteQueriesView: './src/remote-queries/view/RemoteQueries.tsx',
|
||||
},
|
||||
output: {
|
||||
path: path.resolve(__dirname, '..', 'out'),
|
||||
@@ -13,7 +14,10 @@ export const config: webpack.Configuration = {
|
||||
},
|
||||
devtool: 'inline-source-map',
|
||||
resolve: {
|
||||
extensions: ['.js', '.ts', '.tsx', '.json']
|
||||
extensions: ['.js', '.ts', '.tsx', '.json'],
|
||||
fallback: {
|
||||
path: require.resolve('path-browserify')
|
||||
}
|
||||
},
|
||||
module: {
|
||||
rules: [
|
||||
|
||||
@@ -2,27 +2,50 @@ import * as webpack from 'webpack';
|
||||
import { config } from './webpack.config';
|
||||
|
||||
export function compileView(cb: (err?: Error) => void) {
|
||||
webpack(config).run((error, stats) => {
|
||||
doWebpack(config, true, cb);
|
||||
}
|
||||
|
||||
export function watchView(cb: (err?: Error) => void) {
|
||||
const watchConfig = {
|
||||
...config,
|
||||
watch: true,
|
||||
watchOptions: {
|
||||
aggregateTimeout: 200,
|
||||
poll: 1000,
|
||||
}
|
||||
};
|
||||
doWebpack(watchConfig, false, cb);
|
||||
}
|
||||
|
||||
function doWebpack(internalConfig: webpack.Configuration, failOnError: boolean, cb: (err?: Error) => void) {
|
||||
const resultCb = (error: Error | undefined, stats?: webpack.Stats) => {
|
||||
if (error) {
|
||||
cb(error);
|
||||
}
|
||||
console.log(stats.toString({
|
||||
errorDetails: true,
|
||||
colors: true,
|
||||
assets: false,
|
||||
builtAt: false,
|
||||
version: false,
|
||||
hash: false,
|
||||
entrypoints: false,
|
||||
timings: false,
|
||||
modules: false,
|
||||
errors: true
|
||||
}));
|
||||
if (stats.hasErrors()) {
|
||||
cb(new Error('Compilation errors detected.'));
|
||||
return;
|
||||
if (stats) {
|
||||
console.log(stats.toString({
|
||||
errorDetails: true,
|
||||
colors: true,
|
||||
assets: false,
|
||||
builtAt: false,
|
||||
version: false,
|
||||
hash: false,
|
||||
entrypoints: false,
|
||||
timings: false,
|
||||
modules: false,
|
||||
errors: true
|
||||
}));
|
||||
if (stats.hasErrors()) {
|
||||
if (failOnError) {
|
||||
cb(new Error('Compilation errors detected.'));
|
||||
return;
|
||||
} else {
|
||||
console.error('Compilation errors detected.');
|
||||
}
|
||||
}
|
||||
cb();
|
||||
}
|
||||
};
|
||||
|
||||
cb();
|
||||
});
|
||||
webpack(internalConfig, resultCb);
|
||||
}
|
||||
|
||||
|
Before Width: | Height: | Size: 499 KiB After Width: | Height: | Size: 31 KiB |
4
extensions/ql-vscode/media/dark/github.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<!-- From https://github.com/microsoft/vscode-icons -->
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97553 0C3.57186 0 0 3.57186 0 7.97553C0 11.4985 2.29969 14.4832 5.43119 15.5596C5.82263 15.6086 5.96942 15.3639 5.96942 15.1682C5.96942 14.9725 5.96942 14.4832 5.96942 13.7982C3.76758 14.2875 3.27829 12.7217 3.27829 12.7217C2.93578 11.792 2.39755 11.5474 2.39755 11.5474C1.66361 11.0581 2.44648 11.0581 2.44648 11.0581C3.22936 11.107 3.66972 11.8899 3.66972 11.8899C4.40367 13.1131 5.52905 12.7706 5.96942 12.5749C6.01835 12.0367 6.263 11.6942 6.45872 11.4985C4.69725 11.3028 2.83792 10.6177 2.83792 7.53517C2.83792 6.65443 3.1315 5.96942 3.66972 5.38226C3.62079 5.23547 3.32722 4.40367 3.76758 3.32722C3.76758 3.32722 4.4526 3.1315 5.96942 4.15902C6.6055 3.9633 7.29052 3.91437 7.97553 3.91437C8.66055 3.91437 9.34557 4.01223 9.98165 4.15902C11.4985 3.1315 12.1835 3.32722 12.1835 3.32722C12.6239 4.40367 12.3303 5.23547 12.2813 5.43119C12.7706 5.96942 13.1131 6.70336 13.1131 7.5841C13.1131 10.6667 11.2538 11.3028 9.49235 11.4985C9.78593 11.7431 10.0306 12.2324 10.0306 12.9664C10.0306 14.0428 10.0306 14.8746 10.0306 15.1682C10.0306 15.3639 10.1774 15.6086 10.5688 15.5596C13.7492 14.4832 16 11.4985 16 7.97553C15.9511 3.57186 12.3792 0 7.97553 0Z" fill="#C5C5C5"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
15
extensions/ql-vscode/media/dark/sort-num.svg
Normal file
@@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="iso-8859-1"?>
|
||||
<!-- Generator: Adobe Illustrator 19.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Capa_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" fill="none"
|
||||
viewBox="0 0 432 432" style="enable-background:new 0 0 432 432;" xml:space="preserve">
|
||||
<g>
|
||||
<g>
|
||||
<g>
|
||||
<polygon points="234.24,9.067 183.893,59.413 284.587,59.413" fill="#C5C5C5"/>
|
||||
<path d="m 259.24622,341.40906 v -32.34375 q 13.35937,6.32812 27.07031,9.66797 13.71094,3.33984 26.89453,3.33984 35.15625,0 53.61328,-23.55469 18.63282,-23.73047 21.26953,-71.89453 -10.19531,15.11719 -25.83984,23.20313 -15.64453,8.08593 -34.62891,8.08593 -39.375,0 -62.40234,-23.73046 -22.85156,-23.90625 -22.85156,-65.21485 0,-40.42969 23.90625,-64.86328 23.90625,-24.433594 63.63281,-24.433594 45.52734,0 69.43359,34.980474 24.08204,34.80468 24.08204,101.25 0,62.05078 -29.53125,99.14062 -29.35547,36.91406 -79.10157,36.91406 -13.35937,0 -27.07031,-2.63672 -13.71094,-2.63671 -28.47656,-7.91015 z m 70.66406,-111.26953 q 23.90625,0 37.79297,-16.34766 14.0625,-16.34766 14.0625,-44.82422 0,-28.30078 -14.0625,-44.64844 -13.88672,-16.52343 -37.79297,-16.52343 -23.90625,0 -37.96875,16.52343 -13.88672,16.34766 -13.88672,44.64844 0,28.47656 13.88672,44.82422 14.0625,16.34766 37.96875,16.34766 z" fill="#C5C5C5" />
|
||||
<polygon points="234.24,422.933 283.947,373.227 184.533,373.227" fill="#C5C5C5"/>
|
||||
<path d="M 35.300905,316.97546 H 93.308718 V 116.76062 L 30.203249,129.41687 V 97.07312 L 92.957155,84.41687 h 35.507815 v 232.55859 h 58.00781 v 29.88282 H 35.300905 Z" fill="#C5C5C5"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.6 KiB |
7
extensions/ql-vscode/media/drive.svg
Normal file
@@ -0,0 +1,7 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M15.5 12.1952C15.5 12.9126 14.9137 13.4996 14.1957 13.4996H1.80435C1.08696 13.4996 0.5 12.9126 0.5 12.1952L0.5 9.80435C0.5 9.08696 1.08696 8.5 1.80435 8.5H14.1956C14.9137 8.5 15.5 9.08696 15.5 9.80435L15.5 12.1952Z" stroke="#959DA5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M2.45654 11.5H13.5435" stroke="#959DA5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M13.5 9.5C13.224 9.5 13 9.725 13 10C13 10.275 13.224 10.5 13.5 10.5C13.776 10.5 14 10.275 14 10C14 9.725 13.776 9.5 13.5 9.5" fill="#959DA5"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M11.5 9.5C11.224 9.5 11 9.725 11 10C11 10.275 11.224 10.5 11.5 10.5C11.776 10.5 12 10.275 12 10C12 9.725 11.776 9.5 11.5 9.5" fill="#959DA5"/>
|
||||
<path d="M15.5 9.81464L13.8728 2.76261C13.6922 2.06804 12.9572 1.5 12.2391 1.5H3.76087C3.04348 1.5 2.30848 2.06804 2.12783 2.76261L0.5 9.8" stroke="#959DA5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
16
extensions/ql-vscode/media/globe.svg
Normal file
@@ -0,0 +1,16 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle cx="7.5" cy="7.5" r="7" stroke="#959DA5"/>
|
||||
<mask id="mask0_394_2982" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="0" y="0" width="15" height="15">
|
||||
<circle cx="7.5" cy="7.5" r="7.5" fill="#C4C4C4"/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_394_2982)">
|
||||
<path d="M14.5 7.5C14.5 9.42971 13.6822 11.1907 12.5493 12.4721C11.4035 13.7683 10.0054 14.5 8.90625 14.5C7.84644 14.5 6.81131 13.8113 6.01569 12.5383C5.22447 11.2724 4.71875 9.49235 4.71875 7.5C4.71875 5.50765 5.22447 3.72765 6.01569 2.4617C6.81131 1.1887 7.84644 0.5 8.90625 0.5C10.0054 0.5 11.4035 1.23172 12.5493 2.52786C13.6822 3.80934 14.5 5.57029 14.5 7.5Z" stroke="#959DA5"/>
|
||||
</g>
|
||||
<mask id="mask1_394_2982" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="1" y="0" width="16" height="15">
|
||||
<circle cx="9.375" cy="7.5" r="7.5" fill="#C4C4C4"/>
|
||||
</mask>
|
||||
<g mask="url(#mask1_394_2982)">
|
||||
<path d="M10.2812 7.5C10.2812 9.49235 9.77553 11.2724 8.98431 12.5383C8.18869 13.8113 7.15356 14.5 6.09375 14.5C4.99456 14.5 3.5965 13.7683 2.45067 12.4721C1.31781 11.1907 0.5 9.42971 0.5 7.5C0.5 5.57029 1.31781 3.80934 2.45067 2.52786C3.5965 1.23172 4.99456 0.5 6.09375 0.5C7.15356 0.5 8.18869 1.1887 8.98431 2.4617C9.77553 3.72765 10.2812 5.50765 10.2812 7.5Z" stroke="#959DA5"/>
|
||||
</g>
|
||||
<line y1="7.5" x2="15" y2="7.5" stroke="#959DA5"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.4 KiB |
11
extensions/ql-vscode/media/light/github.svg
Normal file
@@ -0,0 +1,11 @@
|
||||
<!-- From https://github.com/microsoft/vscode-icons -->
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0)">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97578 0C3.57211 0 0.000244141 3.57186 0.000244141 7.97553C0.000244141 11.4985 2.29994 14.4832 5.43144 15.5596C5.82287 15.6086 5.96966 15.3639 5.96966 15.1682C5.96966 14.9725 5.96966 14.4832 5.96966 13.7982C3.76783 14.2875 3.27853 12.7217 3.27853 12.7217C2.93602 11.792 2.3978 11.5474 2.3978 11.5474C1.66385 11.0581 2.44673 11.0581 2.44673 11.0581C3.2296 11.107 3.66997 11.8899 3.66997 11.8899C4.40391 13.1131 5.5293 12.7706 5.96966 12.5749C6.01859 12.0367 6.26324 11.6942 6.45896 11.4985C4.69749 11.3028 2.83816 10.6177 2.83816 7.53517C2.83816 6.65443 3.13174 5.96942 3.66997 5.38226C3.62104 5.23547 3.32746 4.40367 3.76783 3.32722C3.76783 3.32722 4.45284 3.1315 5.96966 4.15902C6.60575 3.9633 7.29076 3.91437 7.97578 3.91437C8.66079 3.91437 9.34581 4.01223 9.98189 4.15902C11.4987 3.1315 12.1837 3.32722 12.1837 3.32722C12.6241 4.40367 12.3305 5.23547 12.2816 5.43119C12.7709 5.96942 13.1134 6.70336 13.1134 7.5841C13.1134 10.6667 11.2541 11.3028 9.4926 11.4985C9.78618 11.7431 10.0308 12.2324 10.0308 12.9664C10.0308 14.0428 10.0308 14.8746 10.0308 15.1682C10.0308 15.3639 10.1776 15.6086 10.5691 15.5596C13.7495 14.4832 16.0002 11.4985 16.0002 7.97553C15.9513 3.57186 12.3794 0 7.97578 0Z" fill="#424242"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0">
|
||||
<rect width="16" height="16" fill="white" transform="translate(0.000244141)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
15
extensions/ql-vscode/media/light/sort-num.svg
Normal file
@@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="iso-8859-1"?>
|
||||
<!-- Generator: Adobe Illustrator 19.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Capa_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 432 432" style="enable-background:new 0 0 432 432;" xml:space="preserve">
|
||||
<g>
|
||||
<g>
|
||||
<g>
|
||||
<polygon points="234.24,9.067 183.893,59.413 284.587,59.413" />
|
||||
<path d="m 259.24622,341.40906 v -32.34375 q 13.35937,6.32812 27.07031,9.66797 13.71094,3.33984 26.89453,3.33984 35.15625,0 53.61328,-23.55469 18.63282,-23.73047 21.26953,-71.89453 -10.19531,15.11719 -25.83984,23.20313 -15.64453,8.08593 -34.62891,8.08593 -39.375,0 -62.40234,-23.73046 -22.85156,-23.90625 -22.85156,-65.21485 0,-40.42969 23.90625,-64.86328 23.90625,-24.433594 63.63281,-24.433594 45.52734,0 69.43359,34.980474 24.08204,34.80468 24.08204,101.25 0,62.05078 -29.53125,99.14062 -29.35547,36.91406 -79.10157,36.91406 -13.35937,0 -27.07031,-2.63672 -13.71094,-2.63671 -28.47656,-7.91015 z m 70.66406,-111.26953 q 23.90625,0 37.79297,-16.34766 14.0625,-16.34766 14.0625,-44.82422 0,-28.30078 -14.0625,-44.64844 -13.88672,-16.52343 -37.79297,-16.52343 -23.90625,0 -37.96875,16.52343 -13.88672,16.34766 -13.88672,44.64844 0,28.47656 13.88672,44.82422 14.0625,16.34766 37.96875,16.34766 z" />
|
||||
<polygon points="234.24,422.933 283.947,373.227 184.533,373.227" />
|
||||
<path d="M 35.300905,316.97546 H 93.308718 V 116.76062 L 30.203249,129.41687 V 97.07312 L 92.957155,84.41687 h 35.507815 v 232.55859 h 58.00781 v 29.88282 H 35.300905 Z" />
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.6 KiB |
@@ -1,14 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="27px" height="16px" viewBox="0 0 27 16" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<!-- Generator: Sketch 59 (86127) - https://sketch.com -->
|
||||
<title>Slice</title>
|
||||
<desc>Created with Sketch.</desc>
|
||||
<g id="light" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||
<g id="QL" transform="translate(1.000000, 1.000000)">
|
||||
<rect id="Rectangle-41" stroke="#2088FF" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" x="0" y="0" width="25" height="14" rx="2"></rect>
|
||||
<line x1="17" y1="5" x2="19" y2="5" id="Stroke-15" stroke="#2088FF" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"></line>
|
||||
<line x1="17" y1="9" x2="21" y2="9" id="Stroke-15" stroke="#2088FF" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"></line>
|
||||
<path d="M8.85227273,7 C8.85227273,7.51894199 8.76988719,7.97537682 8.60511364,8.36931818 C8.44034009,8.76325955 8.21591051,9.08711994 7.93181818,9.34090909 L8.76420455,10.3863636 L7.61647727,10.3863636 L7.14772727,9.80965909 C6.83143781,9.92897787 6.49147909,9.98863636 6.12784091,9.98863636 C5.61079287,9.98863636 5.14678236,9.8712133 4.73579545,9.63636364 C4.32480855,9.40151398 4.00000119,9.06108178 3.76136364,8.61505682 C3.52272608,8.16903186 3.40340909,7.63068497 3.40340909,7 C3.40340909,6.36552713 3.52272608,5.8257598 3.76136364,5.38068182 C4.00000119,4.93560384 4.32480855,4.59611859 4.73579545,4.36221591 C5.14678236,4.12831322 5.61079287,4.01136364 6.12784091,4.01136364 C6.642995,4.01136364 7.10605855,4.12831322 7.51704545,4.36221591 C7.92803236,4.59611859 8.2533132,4.93560384 8.49289773,5.38068182 C8.73248226,5.8257598 8.85227273,6.36552713 8.85227273,7 Z M5.70170455,7.88636364 L6.74715909,7.88636364 L7.17897727,8.44034091 C7.31344764,8.27935526 7.41808675,8.07859969 7.49289773,7.83806818 C7.56770871,7.59753668 7.60511364,7.31818341 7.60511364,7 C7.60511364,6.38257267 7.47064528,5.91145996 7.20170455,5.58664773 C6.93276381,5.2618355 6.57481284,5.09943182 6.12784091,5.09943182 C5.68086898,5.09943182 5.32291801,5.2618355 5.05397727,5.58664773 C4.78503653,5.91145996 4.65056818,6.38257267 4.65056818,7 C4.65056818,7.61553338 4.78503653,8.08617261 5.05397727,8.41193182 C5.32291801,8.73769102 5.68086898,8.90056818 6.12784091,8.90056818 C6.23958389,8.90056818 6.34564344,8.89015162 6.44602273,8.86931818 L5.70170455,7.88636364 Z M10.1813315,10 L10.1813315,4 L11.4114451,4 L11.4114451,8.98579545 L13.9057633,8.98579545 L13.9057633,10 L10.1813315,10 Z" fill="#2088FF" fill-rule="nonzero"></path>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8.19789 8C8.19789 8.51894 8.1155 8.97538 7.95073 9.36932C7.78595 9.76326 7.56152 10.0871 7.27743 10.3409L8.10982 11.3864H6.96209L6.49334 10.8097C6.17705 10.929 5.83709 10.9886 5.47346 10.9886C4.95641 10.9886 4.4924 10.8712 4.08141 10.6364C3.67042 10.4015 3.34562 10.0611 3.10698 9.61506C2.86834 9.16903 2.74902 8.63068 2.74902 8C2.74902 7.36553 2.86834 6.82576 3.10698 6.38068C3.34562 5.9356 3.67042 5.59612 4.08141 5.36222C4.4924 5.12831 4.95641 5.01136 5.47346 5.01136C5.98861 5.01136 6.45167 5.12831 6.86266 5.36222C7.27365 5.59612 7.59893 5.9356 7.83851 6.38068C8.0781 6.82576 8.19789 7.36553 8.19789 8ZM5.04732 8.88636H6.09277L6.52459 9.44034C6.65906 9.27936 6.7637 9.0786 6.83851 8.83807C6.91332 8.59754 6.95073 8.31818 6.95073 8C6.95073 7.38257 6.81626 6.91146 6.54732 6.58665C6.27838 6.26184 5.92043 6.09943 5.47346 6.09943C5.02648 6.09943 4.66853 6.26184 4.39959 6.58665C4.13065 6.91146 3.99618 7.38257 3.99618 8C3.99618 8.61553 4.13065 9.08617 4.39959 9.41193C4.66853 9.73769 5.02648 9.90057 5.47346 9.90057C5.5852 9.90057 5.69126 9.89015 5.79164 9.86932L5.04732 8.88636ZM9.52695 11V5H10.7571V9.9858H13.2514V11H9.52695Z" fill="#24292F"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M13 1.5H3C2.17157 1.5 1.5 2.17157 1.5 3V13C1.5 13.8284 2.17157 14.5 3 14.5H13C13.8284 14.5 14.5 13.8284 14.5 13V3C14.5 2.17157 13.8284 1.5 13 1.5ZM3 0C1.34315 0 0 1.34315 0 3V13C0 14.6569 1.34315 16 3 16H13C14.6569 16 16 14.6569 16 13V3C16 1.34315 14.6569 0 13 0H3Z" fill="#24292F"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 2.6 KiB After Width: | Height: | Size: 1.6 KiB |
20654
extensions/ql-vscode/package-lock.json
generated
@@ -4,7 +4,7 @@
|
||||
"description": "CodeQL for Visual Studio Code",
|
||||
"author": "GitHub",
|
||||
"private": true,
|
||||
"version": "1.4.2",
|
||||
"version": "1.6.8",
|
||||
"publisher": "GitHub",
|
||||
"license": "MIT",
|
||||
"icon": "media/VS-marketplace-CodeQL-icon.png",
|
||||
@@ -13,7 +13,9 @@
|
||||
"url": "https://github.com/github/vscode-codeql"
|
||||
},
|
||||
"engines": {
|
||||
"vscode": "^1.43.0"
|
||||
"vscode": "^1.59.0",
|
||||
"node": "^16.13.0",
|
||||
"npm": ">=7.20.6"
|
||||
},
|
||||
"categories": [
|
||||
"Programming Languages"
|
||||
@@ -21,6 +23,16 @@
|
||||
"extensionDependencies": [
|
||||
"hbenl.vscode-test-explorer"
|
||||
],
|
||||
"capabilities": {
|
||||
"untrustedWorkspaces": {
|
||||
"supported": "limited",
|
||||
"description": "Workspace trust is required to execute commands that can contain arbitrary paths.",
|
||||
"restrictedConfigurations": [
|
||||
"codeQL.cli.executablePath",
|
||||
"codeQL.runningTests.additionalTestArguments"
|
||||
]
|
||||
}
|
||||
},
|
||||
"activationEvents": [
|
||||
"onLanguage:ql",
|
||||
"onView:codeQLDatabases",
|
||||
@@ -28,15 +40,21 @@
|
||||
"onView:codeQLAstViewer",
|
||||
"onView:test-explorer",
|
||||
"onCommand:codeQL.checkForUpdatesToCLI",
|
||||
"onCommand:codeQL.authenticateToGitHub",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseFolder",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseArchive",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseInternet",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseGithub",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseLgtm",
|
||||
"onCommand:codeQL.setCurrentDatabase",
|
||||
"onCommand:codeQL.viewAst",
|
||||
"onCommand:codeQL.viewCfg",
|
||||
"onCommand:codeQL.openReferencedFile",
|
||||
"onCommand:codeQL.previewQueryHelp",
|
||||
"onCommand:codeQL.chooseDatabaseFolder",
|
||||
"onCommand:codeQL.chooseDatabaseArchive",
|
||||
"onCommand:codeQL.chooseDatabaseInternet",
|
||||
"onCommand:codeQL.chooseDatabaseGithub",
|
||||
"onCommand:codeQL.chooseDatabaseLgtm",
|
||||
"onCommand:codeQLDatabases.chooseDatabase",
|
||||
"onCommand:codeQLDatabases.setCurrentDatabase",
|
||||
@@ -106,15 +124,21 @@
|
||||
"path": "./out/syntaxes/dbscheme.tmLanguage.json"
|
||||
}
|
||||
],
|
||||
"snippets": [
|
||||
{
|
||||
"language": "ql",
|
||||
"path": "./snippets.json"
|
||||
}
|
||||
],
|
||||
"configuration": {
|
||||
"type": "object",
|
||||
"title": "CodeQL",
|
||||
"properties": {
|
||||
"codeQL.cli.executablePath": {
|
||||
"scope": "machine",
|
||||
"scope": "machine-overridable",
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"description": "Path to the CodeQL executable that should be used by the CodeQL extension. The executable is named `codeql` on Linux/Mac and `codeql.exe` on Windows. This overrides all other CodeQL CLI settings."
|
||||
"markdownDescription": "Path to the CodeQL executable that should be used by the CodeQL extension. The executable is named `codeql` on Linux/Mac and `codeql.exe` on Windows. If empty, the extension will look for a CodeQL executable on your shell PATH, or if CodeQL is not on your PATH, download and manage its own CodeQL executable."
|
||||
},
|
||||
"codeQL.runningQueries.numberOfThreads": {
|
||||
"type": "integer",
|
||||
@@ -123,6 +147,21 @@
|
||||
"maximum": 1024,
|
||||
"description": "Number of threads for running queries."
|
||||
},
|
||||
"codeQL.runningQueries.saveCache": {
|
||||
"type": "boolean",
|
||||
"default": false,
|
||||
"scope": "window",
|
||||
"description": "Aggressively save intermediate results to the disk cache. This may speed up subsequent queries if they are similar. Be aware that using this option will greatly increase disk usage and initial evaluation time."
|
||||
},
|
||||
"codeQL.runningQueries.cacheSize": {
|
||||
"type": [
|
||||
"integer",
|
||||
"null"
|
||||
],
|
||||
"default": null,
|
||||
"minimum": 1024,
|
||||
"description": "Maximum size of the disk cache (in MB). Leave blank to allow the evaluator to automatically adjust the size of the disk cache based on the size of the codebase and the complexity of the queries being executed."
|
||||
},
|
||||
"codeQL.runningQueries.timeout": {
|
||||
"type": [
|
||||
"integer",
|
||||
@@ -147,6 +186,13 @@
|
||||
"default": false,
|
||||
"description": "Enable debug logging and tuple counting when running CodeQL queries. This information is useful for debugging query performance."
|
||||
},
|
||||
"codeQL.runningQueries.maxPaths": {
|
||||
"type": "integer",
|
||||
"default": 4,
|
||||
"minimum": 1,
|
||||
"maximum": 256,
|
||||
"markdownDescription": "Max number of paths to display for each alert found by a path query (`@kind path-problem`)."
|
||||
},
|
||||
"codeQL.runningQueries.autoSave": {
|
||||
"type": "boolean",
|
||||
"default": false,
|
||||
@@ -157,15 +203,41 @@
|
||||
"default": 20,
|
||||
"description": "Max number of simultaneous queries to run using the 'CodeQL: Run Queries' command."
|
||||
},
|
||||
"codeQL.runningQueries.customLogDirectory": {
|
||||
"type": [
|
||||
"string",
|
||||
null
|
||||
],
|
||||
"default": null,
|
||||
"description": "Path to a directory where the CodeQL extension should store query server logs. If empty, the extension stores logs in a temporary workspace folder and deletes the contents after each run.",
|
||||
"markdownDeprecationMessage": "This property is deprecated and no longer has any effect. All query logs are stored in the query history folder next to the query results."
|
||||
},
|
||||
"codeQL.runningQueries.quickEvalCodelens": {
|
||||
"type": "boolean",
|
||||
"default": true,
|
||||
"description": "Enable the 'Quick Evaluation' CodeLens."
|
||||
},
|
||||
"codeQL.resultsDisplay.pageSize": {
|
||||
"type": "integer",
|
||||
"default": 200,
|
||||
"description": "Number of queries displayed per page of the results view."
|
||||
"description": "Max number of query results to display per page in the results view."
|
||||
},
|
||||
"codeQL.queryHistory.format": {
|
||||
"type": "string",
|
||||
"default": "[%t] %q on %d - %s",
|
||||
"description": "Default string for how to label query history items. %t is the time of the query, %q is the query name, %d is the database name, and %s is a status string."
|
||||
"default": "%q on %d - %s, %r [%t]",
|
||||
"markdownDescription": "Default string for how to label query history items.\n* %t is the time of the query\n* %q is the human-readable query name\n* %f is the query file name\n* %d is the database name\n* %r is the number of results\n* %s is a status string"
|
||||
},
|
||||
"codeQL.queryHistory.ttl": {
|
||||
"type": "number",
|
||||
"default": 30,
|
||||
"description": "Number of days to retain queries in the query history before being automatically deleted.",
|
||||
"scope": "machine"
|
||||
},
|
||||
"codeQL.runningTests.additionalTestArguments": {
|
||||
"scope": "window",
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"markdownDescription": "Additional command line arguments to pass to the CLI when [running tests](https://codeql.github.com/docs/codeql-cli/manual/test-run/). This setting should be an array of strings, each containing an argument to be passed."
|
||||
},
|
||||
"codeQL.runningTests.numberOfThreads": {
|
||||
"scope": "window",
|
||||
@@ -179,21 +251,60 @@
|
||||
"type": "boolean",
|
||||
"default": false,
|
||||
"scope": "application",
|
||||
"markdownDescription": "Specifies whether to send CodeQL usage telemetry. This setting AND the global `#telemetry.enableTelemetry#` setting must be checked for telemetry to be sent to GitHub. For more information, see [TELEMETRY.md](https://github.com/github/vscode-codeql/blob/main/extensions/ql-vscode/TELEMETRY.md)"
|
||||
"markdownDescription": "Specifies whether to send CodeQL usage telemetry. This setting AND the global `#telemetry.enableTelemetry#` setting must be checked for telemetry to be sent to GitHub. For more information, see the [telemetry documentation](https://codeql.github.com/docs/codeql-for-visual-studio-code/about-telemetry-in-codeql-for-visual-studio-code)"
|
||||
},
|
||||
"codeQL.telemetry.logTelemetry": {
|
||||
"type": "boolean",
|
||||
"default": false,
|
||||
"scope": "application",
|
||||
"description": "Specifies whether or not to write telemetry events to the extension log."
|
||||
},
|
||||
"codeQL.variantAnalysis.repositoryLists": {
|
||||
"type": [
|
||||
"object",
|
||||
null
|
||||
],
|
||||
"patternProperties": {
|
||||
".*": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"default": null,
|
||||
"markdownDescription": "[For internal use only] Lists of GitHub repositories that you want to run variant analysis against. This should be a JSON object where each key is a user-specified name for this repository list, and the value is an array of GitHub repositories (of the form `<owner>/<repo>`)."
|
||||
},
|
||||
"codeQL.variantAnalysis.controllerRepo": {
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"pattern": "^$|^(?:[a-zA-Z0-9]+-)*[a-zA-Z0-9]+/[a-zA-Z0-9-_]+$",
|
||||
"patternErrorMessage": "Please enter a valid GitHub repository",
|
||||
"markdownDescription": "[For internal use only] The name of the GitHub repository where you can view the progress and results of the \"Run Variant Analysis\" command. The repository should be of the form `<owner>/<repo>`)."
|
||||
}
|
||||
}
|
||||
},
|
||||
"commands": [
|
||||
{
|
||||
"command": "codeQL.authenticateToGitHub",
|
||||
"title": "CodeQL: Authenticate to GitHub"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQuery",
|
||||
"title": "CodeQL: Run Query"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQueryOnMultipleDatabases",
|
||||
"title": "CodeQL: Run Query on Multiple Databases"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runVariantAnalysis",
|
||||
"title": "CodeQL: Run Variant Analysis"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.exportVariantAnalysisResults",
|
||||
"title": "CodeQL: Export Variant Analysis Results"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQueries",
|
||||
"title": "CodeQL: Run Queries in Selected Files"
|
||||
@@ -202,6 +313,14 @@
|
||||
"command": "codeQL.quickEval",
|
||||
"title": "CodeQL: Quick Evaluation"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.openReferencedFile",
|
||||
"title": "CodeQL: Open Referenced File"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.previewQueryHelp",
|
||||
"title": "CodeQL: Preview Query Help"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.quickQuery",
|
||||
"title": "CodeQL: Quick Query"
|
||||
@@ -210,6 +329,10 @@
|
||||
"command": "codeQL.openDocumentation",
|
||||
"title": "CodeQL: Open Documentation"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.copyVersion",
|
||||
"title": "CodeQL: Copy Version Information"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseFolder",
|
||||
"title": "Choose Database from Folder",
|
||||
@@ -238,6 +361,14 @@
|
||||
"dark": "media/dark/cloud-download.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"title": "Download Database from GitHub",
|
||||
"icon": {
|
||||
"light": "media/light/github.svg",
|
||||
"dark": "media/dark/github.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"title": "Download from LGTM",
|
||||
@@ -254,6 +385,10 @@
|
||||
"command": "codeQL.viewAst",
|
||||
"title": "CodeQL: View AST"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"title": "CodeQL: View CFG"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.upgradeCurrentDatabase",
|
||||
"title": "CodeQL: Upgrade Current Database"
|
||||
@@ -262,6 +397,14 @@
|
||||
"command": "codeQL.clearCache",
|
||||
"title": "CodeQL: Clear Cache"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.installPackDependencies",
|
||||
"title": "CodeQL: Install Pack Dependencies"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.downloadPacks",
|
||||
"title": "CodeQL: Download Packs"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.setCurrentDatabase",
|
||||
"title": "Set Current Database"
|
||||
@@ -282,6 +425,10 @@
|
||||
"command": "codeQLDatabases.openDatabaseFolder",
|
||||
"title": "Show Database Directory"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.addDatabaseSource",
|
||||
"title": "Add Database Source to Workspace"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseFolder",
|
||||
"title": "CodeQL: Choose Database from Folder"
|
||||
@@ -294,6 +441,10 @@
|
||||
"command": "codeQL.chooseDatabaseInternet",
|
||||
"title": "CodeQL: Download Database"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseGithub",
|
||||
"title": "CodeQL: Download Database from GitHub"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseLgtm",
|
||||
"title": "CodeQL: Download Database from LGTM"
|
||||
@@ -320,7 +471,7 @@
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQuery",
|
||||
"title": "Open the query that produced these results",
|
||||
"title": "Open the Query that Produced these Results",
|
||||
"icon": {
|
||||
"light": "media/light/edit.svg",
|
||||
"dark": "media/dark/edit.svg"
|
||||
@@ -342,17 +493,69 @@
|
||||
"dark": "media/dark/trash.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.sortByName",
|
||||
"title": "Sort by Name",
|
||||
"icon": {
|
||||
"light": "media/light/sort-alpha.svg",
|
||||
"dark": "media/dark/sort-alpha.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.sortByDate",
|
||||
"title": "Sort by Query Date",
|
||||
"icon": {
|
||||
"light": "media/light/sort-date.svg",
|
||||
"dark": "media/dark/sort-date.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.sortByCount",
|
||||
"title": "Sort by Results Count",
|
||||
"icon": {
|
||||
"light": "media/light/sort-num.svg",
|
||||
"dark": "media/dark/sort-num.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryLog",
|
||||
"title": "Show Query Log"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQueryDirectory",
|
||||
"title": "Open Query Directory"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"title": "Show Evaluator Log (Raw)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"title": "Show Evaluator Log (Summary)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.cancel",
|
||||
"title": "Cancel"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryText",
|
||||
"title": "Show Query Text"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewSarif",
|
||||
"title": "View SARIF"
|
||||
"command": "codeQLQueryHistory.exportResults",
|
||||
"title": "Export Results"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvResults",
|
||||
"title": "View Results (CSV)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvAlerts",
|
||||
"title": "View Alerts (CSV)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewSarifAlerts",
|
||||
"title": "View Alerts (SARIF)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewDil",
|
||||
@@ -366,6 +569,14 @@
|
||||
"command": "codeQLQueryHistory.compareWith",
|
||||
"title": "Compare Results"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openOnGithub",
|
||||
"title": "Open Variant Analysis on GitHub"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.copyRepoList",
|
||||
"title": "Copy Repository List"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryResults.nextPathStep",
|
||||
"title": "CodeQL: Show Next Step on Path"
|
||||
@@ -426,6 +637,11 @@
|
||||
"when": "view == codeQLDatabases",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"when": "config.codeQL.canary && view == codeQLDatabases",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"when": "view == codeQLDatabases",
|
||||
@@ -446,6 +662,21 @@
|
||||
"when": "view == codeQLQueryHistory",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.sortByName",
|
||||
"when": "view == codeQLQueryHistory",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.sortByDate",
|
||||
"when": "view == codeQLQueryHistory",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.sortByCount",
|
||||
"when": "view == codeQLQueryHistory",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLAstViewer.clear",
|
||||
"when": "view == codeQLAstViewer",
|
||||
@@ -456,7 +687,7 @@
|
||||
{
|
||||
"command": "codeQLDatabases.setCurrentDatabase",
|
||||
"group": "inline",
|
||||
"when": "view == codeQLDatabases"
|
||||
"when": "view == codeQLDatabases && viewItem != currentDatabase"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.removeDatabase",
|
||||
@@ -478,6 +709,11 @@
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLDatabases"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.addDatabaseSource",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLDatabases"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQuery",
|
||||
"group": "9_qlCommands",
|
||||
@@ -486,7 +722,7 @@
|
||||
{
|
||||
"command": "codeQLQueryHistory.removeHistoryItem",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory"
|
||||
"when": "viewItem == interpretedResultsItem || viewItem == rawResultsItem || viewItem == remoteResultsItem || viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.setLabel",
|
||||
@@ -496,12 +732,27 @@
|
||||
{
|
||||
"command": "codeQLQueryHistory.compareWith",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory"
|
||||
"when": "viewItem == rawResultsItem || viewItem == interpretedResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryLog",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory"
|
||||
"when": "viewItem == rawResultsItem || viewItem == interpretedResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQueryDirectory",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory && !hasRemoteServer"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"group": "9_qlCommands",
|
||||
"when": "codeql.supportsEvalLog && viewItem == rawResultsItem || codeql.supportsEvalLog && viewItem == interpretedResultsItem || codeql.supportsEvalLog && viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"group": "9_qlCommands",
|
||||
"when": "codeql.supportsEvalLog && viewItem == rawResultsItem || codeql.supportsEvalLog && viewItem == interpretedResultsItem || codeql.supportsEvalLog && viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryText",
|
||||
@@ -509,24 +760,54 @@
|
||||
"when": "view == codeQLQueryHistory"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewSarif",
|
||||
"command": "codeQLQueryHistory.exportResults",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory && viewItem == interpretedResultsItem"
|
||||
"when": "view == codeQLQueryHistory && viewItem == remoteResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvResults",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == rawResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvAlerts",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == interpretedResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewSarifAlerts",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == interpretedResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewDil",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory"
|
||||
"when": "viewItem == rawResultsItem || viewItem == interpretedResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.cancel",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == inProgressResultsItem || viewItem == inProgressRemoteResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openOnGithub",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == remoteResultsItem || viewItem == inProgressRemoteResultsItem || viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.copyRepoList",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == remoteResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLTests.showOutputDifferences",
|
||||
"group": "qltest@1",
|
||||
"when": "view == test-explorer && viewItem == testWithSource"
|
||||
"when": "viewItem == testWithSource"
|
||||
},
|
||||
{
|
||||
"command": "codeQLTests.acceptOutput",
|
||||
"group": "qltest@2",
|
||||
"when": "view == test-explorer && viewItem == testWithSource"
|
||||
"when": "viewItem == testWithSource"
|
||||
}
|
||||
],
|
||||
"explorer/context": [
|
||||
@@ -538,19 +819,50 @@
|
||||
{
|
||||
"command": "codeQL.viewAst",
|
||||
"group": "9_qlCommands",
|
||||
"when": "resourceScheme == codeql-zip-archive"
|
||||
"when": "resourceScheme == codeql-zip-archive && !explorerResourceIsFolder && !listMultiSelection"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"group": "9_qlCommands",
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQueries",
|
||||
"group": "9_qlCommands",
|
||||
"when": "resourceScheme != codeql-zip-archive"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.openReferencedFile",
|
||||
"group": "9_qlCommands",
|
||||
"when": "resourceExtname == .qlref"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.previewQueryHelp",
|
||||
"group": "9_qlCommands",
|
||||
"when": "resourceExtname == .qhelp && isWorkspaceTrusted"
|
||||
}
|
||||
],
|
||||
"commandPalette": [
|
||||
{
|
||||
"command": "codeQL.authenticateToGitHub",
|
||||
"when": "config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQuery",
|
||||
"when": "resourceLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQueryOnMultipleDatabases",
|
||||
"when": "resourceLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runVariantAnalysis",
|
||||
"when": "config.codeQL.canary && editorLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.exportVariantAnalysisResults",
|
||||
"when": "config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQueries",
|
||||
"when": "false"
|
||||
@@ -559,6 +871,14 @@
|
||||
"command": "codeQL.quickEval",
|
||||
"when": "editorLangId == ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.openReferencedFile",
|
||||
"when": "resourceExtname == .qlref"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.previewQueryHelp",
|
||||
"when": "resourceExtname == .qhelp && isWorkspaceTrusted"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.setCurrentDatabase",
|
||||
"when": "false"
|
||||
@@ -567,6 +887,14 @@
|
||||
"command": "codeQL.viewAst",
|
||||
"when": "resourceScheme == codeql-zip-archive"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseGithub",
|
||||
"when": "config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.setCurrentDatabase",
|
||||
"when": "false"
|
||||
@@ -579,6 +907,10 @@
|
||||
"command": "codeQLDatabases.openDatabaseFolder",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.addDatabaseSource",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.sortByName",
|
||||
"when": "false"
|
||||
@@ -607,6 +939,10 @@
|
||||
"command": "codeQLDatabases.chooseDatabaseInternet",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"when": "false"
|
||||
@@ -631,12 +967,48 @@
|
||||
"command": "codeQLQueryHistory.showQueryLog",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQueryDirectory",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.cancel",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openOnGithub",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.copyRepoList",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryText",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewSarif",
|
||||
"command": "codeQLQueryHistory.exportResults",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvResults",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvAlerts",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewSarifAlerts",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
@@ -651,6 +1023,18 @@
|
||||
"command": "codeQLQueryHistory.compareWith",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.sortByName",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.sortByDate",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.sortByCount",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLAstViewer.gotoCode",
|
||||
"when": "false"
|
||||
@@ -673,13 +1057,33 @@
|
||||
"command": "codeQL.runQuery",
|
||||
"when": "editorLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQueryOnMultipleDatabases",
|
||||
"when": "editorLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runVariantAnalysis",
|
||||
"when": "config.codeQL.canary && editorLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewAst",
|
||||
"when": "resourceScheme == codeql-zip-archive"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.quickEval",
|
||||
"when": "editorLangId == ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.openReferencedFile",
|
||||
"when": "resourceExtname == .qlref"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.previewQueryHelp",
|
||||
"when": "resourceExtname == .qhelp && isWorkspaceTrusted"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -727,6 +1131,8 @@
|
||||
"build": "gulp",
|
||||
"watch": "npm-run-all -p watch:*",
|
||||
"watch:extension": "tsc --watch",
|
||||
"watch:webpack": "gulp watchView",
|
||||
"watch:css": "gulp watchCss",
|
||||
"test": "mocha --exit -r ts-node/register test/pure-tests/**/*.ts",
|
||||
"preintegration": "rm -rf ./out/vscode-tests && gulp",
|
||||
"integration": "node ./out/vscode-tests/run-integration-tests.js no-workspace,minimal-workspace",
|
||||
@@ -737,16 +1143,30 @@
|
||||
"format-staged": "lint-staged"
|
||||
},
|
||||
"dependencies": {
|
||||
"@octokit/rest": "^18.5.6",
|
||||
"@primer/octicons-react": "^16.3.0",
|
||||
"@primer/react": "^35.0.0",
|
||||
"@vscode/webview-ui-toolkit": "^1.0.0",
|
||||
"child-process-promise": "^2.2.1",
|
||||
"classnames": "~2.2.6",
|
||||
"fs-extra": "^9.0.1",
|
||||
"glob-promise": "^3.4.0",
|
||||
"js-yaml": "^3.14.0",
|
||||
"minimist": "~1.2.5",
|
||||
"node-fetch": "~2.6.0",
|
||||
"react": "^16.8.6",
|
||||
"react-dom": "^16.8.6",
|
||||
"d3": "^6.3.1",
|
||||
"d3-graphviz": "^2.6.1",
|
||||
"fs-extra": "^10.0.1",
|
||||
"glob-promise": "^4.2.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
"minimist": "~1.2.6",
|
||||
"nanoid": "^3.2.0",
|
||||
"node-fetch": "~2.6.7",
|
||||
"path-browserify": "^1.0.1",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"semver": "~7.3.2",
|
||||
"source-map-support": "^0.5.21",
|
||||
"source-map": "^0.7.4",
|
||||
"stream": "^0.0.2",
|
||||
"stream-chain": "~2.2.4",
|
||||
"stream-json": "~1.7.3",
|
||||
"styled-components": "^5.3.3",
|
||||
"tmp": "^0.1.0",
|
||||
"tmp-promise": "~3.0.2",
|
||||
"tree-kill": "~1.2.2",
|
||||
@@ -756,86 +1176,95 @@
|
||||
"vscode-languageclient": "^6.1.3",
|
||||
"vscode-test-adapter-api": "~1.7.0",
|
||||
"vscode-test-adapter-util": "~0.7.0",
|
||||
"zip-a-folder": "~0.0.12"
|
||||
"zip-a-folder": "~1.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/chai": "^4.1.7",
|
||||
"@types/chai-as-promised": "~7.1.2",
|
||||
"@types/child-process-promise": "^2.2.1",
|
||||
"@types/classnames": "~2.2.9",
|
||||
"@types/d3": "^6.2.0",
|
||||
"@types/d3-graphviz": "^2.6.6",
|
||||
"@types/del": "^4.0.0",
|
||||
"@types/fs-extra": "^9.0.6",
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/google-protobuf": "^3.2.7",
|
||||
"@types/gulp": "^4.0.6",
|
||||
"@types/gulp-replace": "0.0.31",
|
||||
"@types/gulp": "^4.0.9",
|
||||
"@types/gulp-replace": "^1.1.0",
|
||||
"@types/gulp-sourcemaps": "0.0.32",
|
||||
"@types/js-yaml": "^3.12.5",
|
||||
"@types/jszip": "~3.1.6",
|
||||
"@types/mocha": "^8.0.4",
|
||||
"@types/node": "^12.14.1",
|
||||
"@types/mocha": "^9.0.0",
|
||||
"@types/nanoid": "^3.0.0",
|
||||
"@types/node": "^16.11.25",
|
||||
"@types/node-fetch": "~2.5.2",
|
||||
"@types/proxyquire": "~1.3.28",
|
||||
"@types/react": "^16.8.17",
|
||||
"@types/react-dom": "^16.8.4",
|
||||
"@types/react": "^17.0.2",
|
||||
"@types/react-dom": "^17.0.2",
|
||||
"@types/sarif": "~2.1.2",
|
||||
"@types/semver": "~7.2.0",
|
||||
"@types/sinon": "~7.5.2",
|
||||
"@types/sinon-chai": "~3.2.3",
|
||||
"@types/stream-chain": "~2.0.1",
|
||||
"@types/stream-json": "~1.7.1",
|
||||
"@types/through2": "^2.0.36",
|
||||
"@types/tmp": "^0.1.0",
|
||||
"@types/unzipper": "~0.10.1",
|
||||
"@types/vscode": "^1.43.0",
|
||||
"@types/webpack": "^4.32.1",
|
||||
"@types/vscode": "^1.59.0",
|
||||
"@types/webpack": "^5.28.0",
|
||||
"@types/xml2js": "~0.4.4",
|
||||
"@typescript-eslint/eslint-plugin": "~2.23.0",
|
||||
"@typescript-eslint/parser": "~2.23.0",
|
||||
"@typescript-eslint/eslint-plugin": "^4.26.0",
|
||||
"@typescript-eslint/parser": "^4.26.0",
|
||||
"ansi-colors": "^4.1.1",
|
||||
"applicationinsights": "^1.8.7",
|
||||
"chai": "^4.2.0",
|
||||
"chai-as-promised": "~7.1.1",
|
||||
"css-loader": "~3.1.0",
|
||||
"del": "^6.0.0",
|
||||
"eslint": "~6.8.0",
|
||||
"eslint-plugin-react": "~7.19.0",
|
||||
"glob": "^7.1.4",
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-replace": "^1.0.0",
|
||||
"gulp-sourcemaps": "^2.6.5",
|
||||
"gulp-replace": "^1.1.3",
|
||||
"gulp-sourcemaps": "^3.0.0",
|
||||
"gulp-typescript": "^5.0.1",
|
||||
"husky": "~4.2.5",
|
||||
"jsonc-parser": "^2.3.0",
|
||||
"lint-staged": "~10.2.2",
|
||||
"mocha": "^8.2.1",
|
||||
"mocha-sinon": "~2.1.0",
|
||||
"mocha": "^10.0.0",
|
||||
"mocha-sinon": "~2.1.2",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"prettier": "~2.0.5",
|
||||
"proxyquire": "~2.1.3",
|
||||
"sinon": "~9.0.0",
|
||||
"sinon": "~13.0.1",
|
||||
"sinon-chai": "~3.5.0",
|
||||
"style-loader": "~0.23.1",
|
||||
"through2": "^3.0.1",
|
||||
"ts-loader": "^5.4.5",
|
||||
"ts-node": "^8.3.0",
|
||||
"style-loader": "~3.3.1",
|
||||
"through2": "^4.0.2",
|
||||
"ts-loader": "^8.1.0",
|
||||
"ts-node": "^10.7.0",
|
||||
"ts-protoc-gen": "^0.9.0",
|
||||
"typescript": "~3.8.3",
|
||||
"typescript": "^4.5.5",
|
||||
"typescript-formatter": "^7.2.2",
|
||||
"vsce": "^1.65.0",
|
||||
"vsce": "^2.7.0",
|
||||
"vscode-test": "^1.4.0",
|
||||
"webpack": "^4.38.0",
|
||||
"webpack-cli": "^3.3.2"
|
||||
"webpack": "^5.62.2",
|
||||
"webpack-cli": "^4.6.0"
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"pre-commit": "npm run format-staged",
|
||||
"pre-push": "npm run lint"
|
||||
"pre-push": "npm run lint && scripts/forbid-mocha-only"
|
||||
}
|
||||
},
|
||||
"lint-staged": {
|
||||
"./**/*.{json,css,scss,md}": [
|
||||
"./**/*.{json,css,scss}": [
|
||||
"prettier --write"
|
||||
],
|
||||
"./**/*.{ts,tsx}": [
|
||||
"tsfmt -r",
|
||||
"eslint --fix"
|
||||
]
|
||||
},
|
||||
"resolutions": {
|
||||
"glob-parent": "6.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
6
extensions/ql-vscode/scripts/forbid-mocha-only
Executable file
@@ -0,0 +1,6 @@
|
||||
if grep -rq --include '*.test.ts' 'it.only\|describe.only' './test' './src'; then
|
||||
echo 'There is a .only() in the tests. Please remove it.'
|
||||
exit 1;
|
||||
else
|
||||
exit 0;
|
||||
fi
|
||||
134
extensions/ql-vscode/snippets.json
Normal file
@@ -0,0 +1,134 @@
|
||||
{
|
||||
"Query Metadata": {
|
||||
"prefix": "querymetadata",
|
||||
"body": [
|
||||
"/**",
|
||||
" * @name $1",
|
||||
" * @description $2",
|
||||
" * @kind $3",
|
||||
" * @id $4",
|
||||
" * @tags $5",
|
||||
" */"
|
||||
],
|
||||
"description": "Metadata for a query"
|
||||
},
|
||||
"Class": {
|
||||
"prefix": "class",
|
||||
"body": ["class $1 extends $2 {", "\t$0", "}"],
|
||||
"description": "A class"
|
||||
},
|
||||
"From/Where/Select": {
|
||||
"prefix": "from",
|
||||
"body": ["from $1", "where $2", "select $3"],
|
||||
"description": "A from/where/select statement"
|
||||
},
|
||||
"Predicate": {
|
||||
"prefix": "predicate",
|
||||
"body": ["predicate $1($2) {", "\t$0", "}"],
|
||||
"description": "A predicate"
|
||||
},
|
||||
"Dataflow Tracking Class": {
|
||||
"prefix": "dataflowtracking",
|
||||
"body": [
|
||||
"class $1 extends DataFlow::Configuration {",
|
||||
"\t$1() { this = \"$1\" }",
|
||||
"\t",
|
||||
"\toverride predicate isSource(DataFlow::Node node) {",
|
||||
"\t\t${2:none()}",
|
||||
"\t}",
|
||||
"\t",
|
||||
"\toverride predicate isSink(DataFlow::Node node) {",
|
||||
"\t\t${3:none()}",
|
||||
"\t}",
|
||||
"}"
|
||||
],
|
||||
"description": "Boilerplate for a dataflow tracking class"
|
||||
},
|
||||
"Taint Tracking Class": {
|
||||
"prefix": "tainttracking",
|
||||
"body": [
|
||||
"class $1 extends TaintTracking::Configuration {",
|
||||
"\t$1() { this = \"$1\" }",
|
||||
"\t",
|
||||
"\toverride predicate isSource(DataFlow::Node node) {",
|
||||
"\t\t${2:none()}",
|
||||
"\t}",
|
||||
"\t",
|
||||
"\toverride predicate isSink(DataFlow::Node node) {",
|
||||
"\t\t${3:none()}",
|
||||
"\t}",
|
||||
"}"
|
||||
],
|
||||
"description": "Boilerplate for a taint tracking class"
|
||||
},
|
||||
"Count": {
|
||||
"prefix": "count",
|
||||
"body": ["count($1 | $2 | $3)"],
|
||||
"description": "A count aggregate"
|
||||
},
|
||||
"Max": {
|
||||
"prefix": "max",
|
||||
"body": ["max($1 | $2 | $3)"],
|
||||
"description": "A max aggregate"
|
||||
},
|
||||
"Min": {
|
||||
"prefix": "min",
|
||||
"body": ["min($1 | $2 | $3)"],
|
||||
"description": "A min aggregate"
|
||||
},
|
||||
"Average": {
|
||||
"prefix": "avg",
|
||||
"body": ["avg($1 | $2 | $3)"],
|
||||
"description": "An average aggregate"
|
||||
},
|
||||
"Sum": {
|
||||
"prefix": "sum",
|
||||
"body": ["sum($1 | $2 | $3)"],
|
||||
"description": "A sum aggregate"
|
||||
},
|
||||
"Concatenation": {
|
||||
"prefix": "concat",
|
||||
"body": ["concat($1 | $2 | $3)"],
|
||||
"description": "A concatenation aggregate"
|
||||
},
|
||||
"Rank": {
|
||||
"prefix": "rank",
|
||||
"body": ["rank[$1]($2 | $3 | $4)"],
|
||||
"description": "A rank aggregate"
|
||||
},
|
||||
"Strict Sum": {
|
||||
"prefix": "strictsum",
|
||||
"body": ["strictsum($1 | $2 | $3)"],
|
||||
"description": "A strict sum aggregate"
|
||||
},
|
||||
"Strict Concatenation": {
|
||||
"prefix": "strictconcat",
|
||||
"body": ["strictconcat($1 | $2 | $3)"],
|
||||
"description": "A strict concatenation aggregate"
|
||||
},
|
||||
"Strict Count": {
|
||||
"prefix": "strictcount",
|
||||
"body": ["strictcount($1 | $2 | $3)"],
|
||||
"description": "A strict count aggregate"
|
||||
},
|
||||
"Unique": {
|
||||
"prefix": "unique",
|
||||
"body": ["unique($1 | $2 | $3)"],
|
||||
"description": "A unique aggregate"
|
||||
},
|
||||
"Exists": {
|
||||
"prefix": "exists",
|
||||
"body": ["exists($1 | $2 | $3)"],
|
||||
"description": "An exists quantifier"
|
||||
},
|
||||
"For All": {
|
||||
"prefix": "forall",
|
||||
"body": ["forall($1 | $2 | $3)"],
|
||||
"description": "A for all quantifier"
|
||||
},
|
||||
"For All and Exists": {
|
||||
"prefix": "forex",
|
||||
"body": ["forex($1 | $2 | $3)"],
|
||||
"description": "A for all and exists quantifier"
|
||||
}
|
||||
}
|
||||
15
extensions/ql-vscode/src/additional-typings.d.ts
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* The d3 library is designed to work in both the browser and
|
||||
* node. Consequently their typings files refer to both node
|
||||
* types like `Buffer` (which don't exist in the browser), and browser
|
||||
* types like `Blob` (which don't exist in node). Instead of sticking
|
||||
* all of `dom` in `compilerOptions.lib`, it suffices just to put in a
|
||||
* stub definition of the affected types so that compilation
|
||||
* succeeds.
|
||||
*/
|
||||
|
||||
declare type RequestInit = Record<string, unknown>;
|
||||
declare type ElementTagNameMap = any;
|
||||
declare type NodeListOf<T> = Record<string, T>;
|
||||
declare type Node = Record<string, unknown>;
|
||||
declare type XMLDocument = Record<string, unknown>;
|
||||
@@ -115,7 +115,7 @@ class InvalidSourceArchiveUriError extends Error {
|
||||
export function decodeSourceArchiveUri(uri: vscode.Uri): ZipFileReference {
|
||||
if (!uri.authority) {
|
||||
// Uri is malformed, but this is recoverable
|
||||
logger.log(`Warning: ${new InvalidSourceArchiveUriError(uri).message}`);
|
||||
void logger.log(`Warning: ${new InvalidSourceArchiveUriError(uri).message}`);
|
||||
return {
|
||||
pathWithinSourceArchive: '/',
|
||||
sourceArchiveZipPath: uri.path
|
||||
@@ -141,7 +141,7 @@ function ensureFile(map: DirectoryHierarchyMap, file: string) {
|
||||
const dirname = path.dirname(file);
|
||||
if (dirname === '.') {
|
||||
const error = `Ill-formed path ${file} in zip archive (expected absolute path)`;
|
||||
logger.log(error);
|
||||
void logger.log(error);
|
||||
throw new Error(error);
|
||||
}
|
||||
ensureDir(map, dirname);
|
||||
|
||||
@@ -10,7 +10,8 @@ import {
|
||||
TextEditorSelectionChangeEvent,
|
||||
TextEditorSelectionChangeKind,
|
||||
Location,
|
||||
Range
|
||||
Range,
|
||||
Uri
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
@@ -19,7 +20,8 @@ import { UrlValue, BqrsId } from './pure/bqrs-cli-types';
|
||||
import { showLocation } from './interface-utils';
|
||||
import { isStringLoc, isWholeFileLoc, isLineColumnLoc } from './pure/bqrs-utils';
|
||||
import { commandRunner } from './commandRunner';
|
||||
import { DisposableObject } from './vscode-utils/disposable-object';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { showAndLogErrorMessage } from './helpers';
|
||||
|
||||
export interface AstItem {
|
||||
id: BqrsId;
|
||||
@@ -55,7 +57,7 @@ class AstViewerDataProvider extends DisposableObject implements TreeDataProvider
|
||||
}
|
||||
|
||||
refresh(): void {
|
||||
this._onDidChangeTreeData.fire();
|
||||
this._onDidChangeTreeData.fire(undefined);
|
||||
}
|
||||
getChildren(item?: AstItem): ProviderResult<AstItem[]> {
|
||||
const children = item ? item.children : this.roots;
|
||||
@@ -103,7 +105,7 @@ class AstViewerDataProvider extends DisposableObject implements TreeDataProvider
|
||||
export class AstViewer extends DisposableObject {
|
||||
private treeView: TreeView<AstItem>;
|
||||
private treeDataProvider: AstViewerDataProvider;
|
||||
private currentFile: string | undefined;
|
||||
private currentFileUri: Uri | undefined;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
@@ -124,13 +126,18 @@ export class AstViewer extends DisposableObject {
|
||||
this.push(window.onDidChangeTextEditorSelection(this.updateTreeSelection, this));
|
||||
}
|
||||
|
||||
updateRoots(roots: AstItem[], db: DatabaseItem, fileName: string) {
|
||||
updateRoots(roots: AstItem[], db: DatabaseItem, fileUri: Uri) {
|
||||
this.treeDataProvider.roots = roots;
|
||||
this.treeDataProvider.db = db;
|
||||
this.treeDataProvider.refresh();
|
||||
this.treeView.message = `AST for ${path.basename(fileName)}`;
|
||||
this.treeView.reveal(roots[0], { focus: false });
|
||||
this.currentFile = fileName;
|
||||
this.treeView.message = `AST for ${path.basename(fileUri.fsPath)}`;
|
||||
this.currentFileUri = fileUri;
|
||||
// Handle error on reveal. This could happen if
|
||||
// the tree view is disposed during the reveal.
|
||||
this.treeView.reveal(roots[0], { focus: false })?.then(
|
||||
() => { /**/ },
|
||||
err => showAndLogErrorMessage(err)
|
||||
);
|
||||
}
|
||||
|
||||
private updateTreeSelection(e: TextEditorSelectionChangeEvent) {
|
||||
@@ -168,7 +175,7 @@ export class AstViewer extends DisposableObject {
|
||||
|
||||
if (
|
||||
this.treeView.visible &&
|
||||
e.textEditor.document.uri.fsPath === this.currentFile &&
|
||||
e.textEditor.document.uri.fsPath === this.currentFileUri?.fsPath &&
|
||||
e.selections.length === 1
|
||||
) {
|
||||
const selection = e.selections[0];
|
||||
@@ -178,7 +185,12 @@ export class AstViewer extends DisposableObject {
|
||||
|
||||
const targetItem = findBest(range, this.treeDataProvider.roots);
|
||||
if (targetItem) {
|
||||
this.treeView.reveal(targetItem);
|
||||
// Handle error on reveal. This could happen if
|
||||
// the tree view is disposed during the reveal.
|
||||
this.treeView.reveal(targetItem)?.then(
|
||||
() => { /**/ },
|
||||
err => showAndLogErrorMessage(err)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -188,6 +200,6 @@ export class AstViewer extends DisposableObject {
|
||||
this.treeDataProvider.db = undefined;
|
||||
this.treeDataProvider.refresh();
|
||||
this.treeView.message = undefined;
|
||||
this.currentFile = undefined;
|
||||
this.currentFileUri = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
90
extensions/ql-vscode/src/authentication.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import * as vscode from 'vscode';
|
||||
import * as Octokit from '@octokit/rest';
|
||||
|
||||
const GITHUB_AUTH_PROVIDER_ID = 'github';
|
||||
|
||||
// We need 'repo' scope for triggering workflows and 'gist' scope for exporting results to Gist.
|
||||
// For a comprehensive list of scopes, see:
|
||||
// https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps
|
||||
const SCOPES = ['repo', 'gist'];
|
||||
|
||||
/**
|
||||
* Handles authentication to GitHub, using the VS Code [authentication API](https://code.visualstudio.com/api/references/vscode-api#authentication).
|
||||
*/
|
||||
export class Credentials {
|
||||
private octokit: Octokit.Octokit | undefined;
|
||||
|
||||
// Explicitly make the constructor private, so that we can't accidentally call the constructor from outside the class
|
||||
// without also initializing the class.
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
private constructor() { }
|
||||
|
||||
/**
|
||||
* Initializes an instance of credentials with an octokit instance.
|
||||
*
|
||||
* Do not call this method until you know you actually need an instance of credentials.
|
||||
* since calling this method will require the user to log in.
|
||||
*
|
||||
* @param context The extension context.
|
||||
* @returns An instance of credentials.
|
||||
*/
|
||||
static async initialize(context: vscode.ExtensionContext): Promise<Credentials> {
|
||||
const c = new Credentials();
|
||||
c.registerListeners(context);
|
||||
c.octokit = await c.createOctokit(false);
|
||||
return c;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes an instance of credentials with an octokit instance using
|
||||
* a token from the user's GitHub account. This method is meant to be
|
||||
* used non-interactive environments such as tests.
|
||||
*
|
||||
* @param overrideToken The GitHub token to use for authentication.
|
||||
* @returns An instance of credentials.
|
||||
*/
|
||||
static async initializeWithToken(overrideToken: string) {
|
||||
const c = new Credentials();
|
||||
c.octokit = await c.createOctokit(false, overrideToken);
|
||||
return c;
|
||||
}
|
||||
|
||||
private async createOctokit(createIfNone: boolean, overrideToken?: string): Promise<Octokit.Octokit | undefined> {
|
||||
if (overrideToken) {
|
||||
return new Octokit.Octokit({ auth: overrideToken });
|
||||
}
|
||||
|
||||
const session = await vscode.authentication.getSession(GITHUB_AUTH_PROVIDER_ID, SCOPES, { createIfNone });
|
||||
|
||||
if (session) {
|
||||
return new Octokit.Octokit({
|
||||
auth: session.accessToken
|
||||
});
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
registerListeners(context: vscode.ExtensionContext): void {
|
||||
// Sessions are changed when a user logs in or logs out.
|
||||
context.subscriptions.push(vscode.authentication.onDidChangeSessions(async e => {
|
||||
if (e.provider.id === GITHUB_AUTH_PROVIDER_ID) {
|
||||
this.octokit = await this.createOctokit(false);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
async getOctokit(): Promise<Octokit.Octokit> {
|
||||
if (this.octokit) {
|
||||
return this.octokit;
|
||||
}
|
||||
|
||||
this.octokit = await this.createOctokit(true);
|
||||
// octokit shouldn't be undefined, since we've set "createIfNone: true".
|
||||
// The following block is mainly here to prevent a compiler error.
|
||||
if (!this.octokit) {
|
||||
throw new Error('Did not initialize Octokit.');
|
||||
}
|
||||
return this.octokit;
|
||||
}
|
||||
}
|
||||
11
extensions/ql-vscode/src/blob.d.ts
vendored
@@ -1,11 +0,0 @@
|
||||
/**
|
||||
* The npm library jszip is designed to work in both the browser and
|
||||
* node. Consequently its typings @types/jszip refers to both node
|
||||
* types like `Buffer` (which don't exist in the browser), and browser
|
||||
* types like `Blob` (which don't exist in node). Instead of sticking
|
||||
* all of `dom` in `compilerOptions.lib`, it suffices just to put in a
|
||||
* stub definition of the type `Blob` here so that compilation
|
||||
* succeeds.
|
||||
*/
|
||||
|
||||
declare type Blob = string;
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as semver from 'semver';
|
||||
import { runCodeQlCliCommand } from './cli';
|
||||
import { Logger } from './logging';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* Get the version of a CodeQL CLI.
|
||||
@@ -18,7 +19,7 @@ export async function getCodeQlCliVersion(codeQlPath: string, logger: Logger): P
|
||||
} catch (e) {
|
||||
// Failed to run the version command. This might happen if the cli version is _really_ old, or it is corrupted.
|
||||
// Either way, we can't determine compatibility.
|
||||
logger.log(`Failed to run 'codeql version'. Reason: ${e.message}`);
|
||||
void logger.log(`Failed to run 'codeql version'. Reason: ${getErrorMessage(e)}`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
/* eslint-disable @typescript-eslint/camelcase */
|
||||
import * as cpp from 'child-process-promise';
|
||||
import * as child_process from 'child_process';
|
||||
import * as fs from 'fs-extra';
|
||||
@@ -9,22 +8,28 @@ import { Readable } from 'stream';
|
||||
import { StringDecoder } from 'string_decoder';
|
||||
import * as tk from 'tree-kill';
|
||||
import { promisify } from 'util';
|
||||
import { CancellationToken, Disposable } from 'vscode';
|
||||
import { CancellationToken, commands, Disposable, Uri } from 'vscode';
|
||||
|
||||
import { BQRSInfo, DecodedBqrsChunk } from './pure/bqrs-cli-types';
|
||||
import * as config from './config';
|
||||
import { CliConfig } from './config';
|
||||
import { DistributionProvider, FindDistributionResultKind } from './distribution';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { QueryMetadata, SortDirection } from './pure/interface-types';
|
||||
import { Logger, ProgressReporter } from './logging';
|
||||
import { CompilationMessage } from './pure/messages';
|
||||
import { sarifParser } from './sarif-parser';
|
||||
import { dbSchemeToLanguage, walkDirectory } from './helpers';
|
||||
|
||||
/**
|
||||
* The version of the SARIF format that we are using.
|
||||
*/
|
||||
const SARIF_FORMAT = 'sarifv2.1.0';
|
||||
|
||||
/**
|
||||
* The string used to specify CSV format.
|
||||
*/
|
||||
const CSV_FORMAT = 'csv';
|
||||
|
||||
/**
|
||||
* Flags to pass to all cli commands.
|
||||
*/
|
||||
@@ -40,6 +45,16 @@ export interface QuerySetup {
|
||||
compilationCache?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The expected output of `codeql resolve queries --format bylanguage`.
|
||||
*/
|
||||
export interface QueryInfoByLanguage {
|
||||
// Using `unknown` as a placeholder. For now, the value is only ever an empty object.
|
||||
byLanguage: Record<string, Record<string, unknown>>;
|
||||
noDeclaredLanguage: Record<string, unknown>;
|
||||
multipleDeclaredLanguages: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* The expected output of `codeql resolve database`.
|
||||
*/
|
||||
@@ -68,6 +83,25 @@ export interface UpgradesInfo {
|
||||
*/
|
||||
export type QlpacksInfo = { [name: string]: string[] };
|
||||
|
||||
/**
|
||||
* The expected output of `codeql resolve languages`.
|
||||
*/
|
||||
export type LanguagesInfo = { [name: string]: string[] };
|
||||
|
||||
/** Information about an ML model, as resolved by `codeql resolve ml-models`. */
|
||||
export type MlModelInfo = {
|
||||
checksum: string;
|
||||
path: string;
|
||||
};
|
||||
|
||||
/** The expected output of `codeql resolve ml-models`. */
|
||||
export type MlModelsInfo = { models: MlModelInfo[] };
|
||||
|
||||
/**
|
||||
* The expected output of `codeql resolve qlref`.
|
||||
*/
|
||||
export type QlrefInfo = { resolvedPath: string };
|
||||
|
||||
// `codeql bqrs interpret` requires both of these to be present or
|
||||
// both absent.
|
||||
export interface SourceInfo {
|
||||
@@ -99,6 +133,8 @@ export interface TestCompleted {
|
||||
evaluationMs: number;
|
||||
expected: string;
|
||||
diff: string[] | undefined;
|
||||
failureDescription?: string;
|
||||
failureStage?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -121,15 +157,6 @@ interface BqrsDecodeOptions {
|
||||
*/
|
||||
export class CodeQLCliServer implements Disposable {
|
||||
|
||||
/**
|
||||
* CLI version where --kind=DIL was introduced
|
||||
*/
|
||||
private static CLI_VERSION_WITH_DECOMPILE_KIND_DIL = new SemVer('2.3.0');
|
||||
|
||||
/**
|
||||
* CLI version where languages are exposed during a `codeql resolve database` command.
|
||||
*/
|
||||
private static CLI_VERSION_WITH_LANGUAGE = new SemVer('2.4.1');
|
||||
|
||||
/** The process for the cli server, or undefined if one doesn't exist yet */
|
||||
process?: child_process.ChildProcessWithoutNullStreams;
|
||||
@@ -143,9 +170,16 @@ export class CodeQLCliServer implements Disposable {
|
||||
/** Version of current cli, lazily computed by the `getVersion()` method */
|
||||
private _version: SemVer | undefined;
|
||||
|
||||
/**
|
||||
* The languages supported by the current version of the CLI, computed by `getSupportedLanguages()`.
|
||||
*/
|
||||
private _supportedLanguages: string[] | undefined;
|
||||
|
||||
/** Path to current codeQL executable, or undefined if not running yet. */
|
||||
codeQlPath: string | undefined;
|
||||
|
||||
cliConstraints = new CliVersionConstraint(this);
|
||||
|
||||
/**
|
||||
* When set to true, ignore some modal popups and assume user has clicked "yes".
|
||||
*/
|
||||
@@ -162,12 +196,15 @@ export class CodeQLCliServer implements Disposable {
|
||||
if (this.distributionProvider.onDidChangeDistribution) {
|
||||
this.distributionProvider.onDidChangeDistribution(() => {
|
||||
this.restartCliServer();
|
||||
this._version = undefined;
|
||||
this._supportedLanguages = undefined;
|
||||
});
|
||||
}
|
||||
if (this.cliConfig.onDidChangeConfiguration) {
|
||||
this.cliConfig.onDidChangeConfiguration(() => {
|
||||
this.restartCliServer();
|
||||
this._version = undefined;
|
||||
this._supportedLanguages = undefined;
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -179,15 +216,15 @@ export class CodeQLCliServer implements Disposable {
|
||||
killProcessIfRunning(): void {
|
||||
if (this.process) {
|
||||
// Tell the Java CLI server process to shut down.
|
||||
this.logger.log('Sending shutdown request');
|
||||
void this.logger.log('Sending shutdown request');
|
||||
try {
|
||||
this.process.stdin.write(JSON.stringify(['shutdown']), 'utf8');
|
||||
this.process.stdin.write(this.nullBuffer);
|
||||
this.logger.log('Sent shutdown request');
|
||||
void this.logger.log('Sent shutdown request');
|
||||
} catch (e) {
|
||||
// We are probably fine here, the process has already closed stdin.
|
||||
this.logger.log(`Shutdown request failed: process stdin may have already closed. The error was ${e}`);
|
||||
this.logger.log('Stopping the process anyway.');
|
||||
void this.logger.log(`Shutdown request failed: process stdin may have already closed. The error was ${e}`);
|
||||
void this.logger.log('Stopping the process anyway.');
|
||||
}
|
||||
// Close the stdin and stdout streams.
|
||||
// This is important on Windows where the child process may not die cleanly.
|
||||
@@ -238,11 +275,16 @@ export class CodeQLCliServer implements Disposable {
|
||||
*/
|
||||
private async launchProcess(): Promise<child_process.ChildProcessWithoutNullStreams> {
|
||||
const codeQlPath = await this.getCodeQlPath();
|
||||
const args = [];
|
||||
if (shouldDebugCliServer()) {
|
||||
args.push('-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9012,server=n,suspend=y,quiet=y');
|
||||
}
|
||||
|
||||
return await spawnServer(
|
||||
codeQlPath,
|
||||
'CodeQL CLI Server',
|
||||
['execute', 'cli-server'],
|
||||
[],
|
||||
args,
|
||||
this.logger,
|
||||
_data => { /**/ }
|
||||
);
|
||||
@@ -267,7 +309,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
// Compute the full args array
|
||||
const args = command.concat(LOGGING_FLAGS).concat(commandArgs);
|
||||
const argsString = args.join(' ');
|
||||
this.logger.log(`${description} using CodeQL CLI: ${argsString}...`);
|
||||
void this.logger.log(`${description} using CodeQL CLI: ${argsString}...`);
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
// Start listening to stdout
|
||||
@@ -294,7 +336,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
const fullBuffer = Buffer.concat(stdoutBuffers);
|
||||
// Make sure we remove the terminator;
|
||||
const data = fullBuffer.toString('utf8', 0, fullBuffer.length - 1);
|
||||
this.logger.log('CLI command succeeded.');
|
||||
void this.logger.log('CLI command succeeded.');
|
||||
return data;
|
||||
} catch (err) {
|
||||
// Kill the process if it isn't already dead.
|
||||
@@ -304,10 +346,10 @@ export class CodeQLCliServer implements Disposable {
|
||||
stderrBuffers.length == 0
|
||||
? new Error(`${description} failed: ${err}`)
|
||||
: new Error(`${description} failed: ${Buffer.concat(stderrBuffers).toString('utf8')}`);
|
||||
newError.stack += (err.stack || '');
|
||||
newError.stack += getErrorStack(err);
|
||||
throw newError;
|
||||
} finally {
|
||||
this.logger.log(Buffer.concat(stderrBuffers).toString('utf8'));
|
||||
void this.logger.log(Buffer.concat(stderrBuffers).toString('utf8'));
|
||||
// Remove the listeners we set up.
|
||||
process.stdout.removeAllListeners('data');
|
||||
process.stderr.removeAllListeners('data');
|
||||
@@ -362,12 +404,12 @@ export class CodeQLCliServer implements Disposable {
|
||||
try {
|
||||
if (cancellationToken !== undefined) {
|
||||
cancellationRegistration = cancellationToken.onCancellationRequested(_e => {
|
||||
tk(child.pid);
|
||||
tk(child.pid || 0);
|
||||
});
|
||||
}
|
||||
if (logger !== undefined) {
|
||||
// The human-readable output goes to stderr.
|
||||
logStream(child.stderr!, logger);
|
||||
void logStream(child.stderr!, logger);
|
||||
}
|
||||
|
||||
for await (const event of await splitStreamAtSeparators(child.stdout!, ['\0'])) {
|
||||
@@ -406,7 +448,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
try {
|
||||
yield JSON.parse(event) as EventType;
|
||||
} catch (err) {
|
||||
throw new Error(`Parsing output of ${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`Parsing output of ${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -448,17 +490,20 @@ export class CodeQLCliServer implements Disposable {
|
||||
* @param command The `codeql` command to be run, provided as an array of command/subcommand names.
|
||||
* @param commandArgs The arguments to pass to the `codeql` command.
|
||||
* @param description Description of the action being run, to be shown in log and error messages.
|
||||
* @param addFormat Whether or not to add commandline arguments to specify the format as JSON.
|
||||
* @param progressReporter Used to output progress messages, e.g. to the status bar.
|
||||
* @returns The contents of the command's stdout, if the command succeeded.
|
||||
*/
|
||||
async runJsonCodeQlCliCommand<OutputType>(command: string[], commandArgs: string[], description: string, progressReporter?: ProgressReporter): Promise<OutputType> {
|
||||
// Add format argument first, in case commandArgs contains positional parameters.
|
||||
const args = ['--format', 'json'].concat(commandArgs);
|
||||
async runJsonCodeQlCliCommand<OutputType>(command: string[], commandArgs: string[], description: string, addFormat = true, progressReporter?: ProgressReporter): Promise<OutputType> {
|
||||
let args: string[] = [];
|
||||
if (addFormat) // Add format argument first, in case commandArgs contains positional parameters.
|
||||
args = args.concat(['--format', 'json']);
|
||||
args = args.concat(commandArgs);
|
||||
const result = await this.runCodeQlCliCommand(command, args, description, progressReporter);
|
||||
try {
|
||||
return JSON.parse(result) as OutputType;
|
||||
} catch (err) {
|
||||
throw new Error(`Parsing output of ${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`Parsing output of ${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -470,12 +515,24 @@ export class CodeQLCliServer implements Disposable {
|
||||
async resolveLibraryPath(workspaces: string[], queryPath: string): Promise<QuerySetup> {
|
||||
const subcommandArgs = [
|
||||
'--query', queryPath,
|
||||
'--additional-packs',
|
||||
workspaces.join(path.delimiter)
|
||||
...this.getAdditionalPacksArg(workspaces)
|
||||
];
|
||||
return await this.runJsonCodeQlCliCommand<QuerySetup>(['resolve', 'library-path'], subcommandArgs, 'Resolving library paths');
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the language for a query.
|
||||
* @param queryUri The URI of the query
|
||||
*/
|
||||
async resolveQueryByLanguage(workspaces: string[], queryUri: Uri): Promise<QueryInfoByLanguage> {
|
||||
const subcommandArgs = [
|
||||
'--format', 'bylanguage',
|
||||
queryUri.fsPath,
|
||||
...this.getAdditionalPacksArg(workspaces)
|
||||
];
|
||||
return JSON.parse(await this.runCodeQlCliCommand(['resolve', 'queries'], subcommandArgs, 'Resolving query by language'));
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds all available QL tests in a given directory.
|
||||
* @param testPath Root of directory tree to search for tests.
|
||||
@@ -492,6 +549,29 @@ export class CodeQLCliServer implements Disposable {
|
||||
);
|
||||
}
|
||||
|
||||
public async resolveQlref(qlref: string): Promise<QlrefInfo> {
|
||||
const subcommandArgs = [
|
||||
qlref
|
||||
];
|
||||
return await this.runJsonCodeQlCliCommand<QlrefInfo>(
|
||||
['resolve', 'qlref'],
|
||||
subcommandArgs,
|
||||
'Resolving qlref',
|
||||
false
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Issues an internal clear-cache command to the cli server. This
|
||||
* command is used to clear the qlpack cache of the server.
|
||||
*
|
||||
* This cache is generally cleared every 1s. This method is used
|
||||
* to force an early clearing of the cache.
|
||||
*/
|
||||
public async clearCache(): Promise<void> {
|
||||
await this.runCodeQlCliCommand(['clear-cache'], [], 'Clearing qlpack cache');
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs QL tests.
|
||||
* @param testPaths Full paths of the tests to run.
|
||||
@@ -502,12 +582,12 @@ export class CodeQLCliServer implements Disposable {
|
||||
testPaths: string[], workspaces: string[], options: TestRunOptions
|
||||
): AsyncGenerator<TestCompleted, void, unknown> {
|
||||
|
||||
const subcommandArgs = [
|
||||
'--additional-packs', workspaces.join(path.delimiter),
|
||||
const subcommandArgs = this.cliConfig.additionalTestArguments.concat([
|
||||
...this.getAdditionalPacksArg(workspaces),
|
||||
'--threads',
|
||||
this.cliConfig.numberTestThreads.toString(),
|
||||
...testPaths
|
||||
];
|
||||
]);
|
||||
|
||||
for await (const event of await this.runAsyncCodeQlCliCommand<TestCompleted>(['test', 'run'],
|
||||
subcommandArgs, 'Run CodeQL Tests', options.cancellationToken, options.logger)) {
|
||||
@@ -523,6 +603,19 @@ export class CodeQLCliServer implements Disposable {
|
||||
return await this.runJsonCodeQlCliCommand<QueryMetadata>(['resolve', 'metadata'], [queryPath], 'Resolving query metadata');
|
||||
}
|
||||
|
||||
/** Resolves the ML models that should be available when evaluating a query. */
|
||||
async resolveMlModels(additionalPacks: string[], queryPath: string): Promise<MlModelsInfo> {
|
||||
const args = await this.cliConstraints.supportsPreciseResolveMlModels()
|
||||
? [...this.getAdditionalPacksArg(additionalPacks), queryPath]
|
||||
: this.getAdditionalPacksArg(additionalPacks);
|
||||
return await this.runJsonCodeQlCliCommand<MlModelsInfo>(
|
||||
['resolve', 'ml-models'],
|
||||
args,
|
||||
'Resolving ML models',
|
||||
false
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the RAM setting for the query server.
|
||||
* @param queryMemoryMb The maximum amount of RAM to use, in MB.
|
||||
@@ -536,7 +629,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
if (queryMemoryMb !== undefined) {
|
||||
args.push('--ram', queryMemoryMb.toString());
|
||||
}
|
||||
return await this.runJsonCodeQlCliCommand<string[]>(['resolve', 'ram'], args, 'Resolving RAM settings', progressReporter);
|
||||
return await this.runJsonCodeQlCliCommand<string[]>(['resolve', 'ram'], args, 'Resolving RAM settings', true, progressReporter);
|
||||
}
|
||||
/**
|
||||
* Gets the headers (and optionally pagination info) of a bqrs.
|
||||
@@ -552,6 +645,49 @@ export class CodeQLCliServer implements Disposable {
|
||||
return await this.runJsonCodeQlCliCommand<BQRSInfo>(['bqrs', 'info'], subcommandArgs, 'Reading bqrs header');
|
||||
}
|
||||
|
||||
async databaseUnbundle(archivePath: string, target: string, name?: string): Promise<string> {
|
||||
const subcommandArgs = [];
|
||||
if (target) subcommandArgs.push('--target', target);
|
||||
if (name) subcommandArgs.push('--name', name);
|
||||
subcommandArgs.push(archivePath);
|
||||
|
||||
return await this.runCodeQlCliCommand(['database', 'unbundle'], subcommandArgs, `Extracting ${archivePath} to directory ${target}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses a .qhelp file to generate Query Help documentation in a specified format.
|
||||
* @param pathToQhelp The path to the .qhelp file
|
||||
* @param format The format in which the query help should be generated {@link https://codeql.github.com/docs/codeql-cli/manual/generate-query-help/#cmdoption-codeql-generate-query-help-format}
|
||||
* @param outputDirectory The output directory for the generated file
|
||||
*/
|
||||
async generateQueryHelp(pathToQhelp: string, outputDirectory?: string): Promise<string> {
|
||||
const subcommandArgs = ['--format=markdown'];
|
||||
if (outputDirectory) subcommandArgs.push('--output', outputDirectory);
|
||||
subcommandArgs.push(pathToQhelp);
|
||||
|
||||
return await this.runCodeQlCliCommand(['generate', 'query-help'], subcommandArgs, `Generating qhelp in markdown format at ${outputDirectory}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a summary of an evaluation log.
|
||||
* @param endSummaryPath The path to write only the end of query part of the human-readable summary to.
|
||||
* @param inputPath The path of an evaluation event log.
|
||||
* @param outputPath The path to write a human-readable summary of it to.
|
||||
*/
|
||||
async generateLogSummary(
|
||||
inputPath: string,
|
||||
outputPath: string,
|
||||
endSummaryPath: string,
|
||||
): Promise<string> {
|
||||
const subcommandArgs = [
|
||||
'--format=text',
|
||||
`--end-summary=${endSummaryPath}`,
|
||||
inputPath,
|
||||
outputPath
|
||||
];
|
||||
return await this.runCodeQlCliCommand(['generate', 'log-summary'], subcommandArgs, 'Generating log summary');
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the results from a bqrs.
|
||||
* @param bqrsPath The path to the bqrs.
|
||||
@@ -575,43 +711,76 @@ export class CodeQLCliServer implements Disposable {
|
||||
return await this.runJsonCodeQlCliCommand<DecodedBqrsChunk>(['bqrs', 'decode'], subcommandArgs, 'Reading bqrs data');
|
||||
}
|
||||
|
||||
async interpretBqrs(metadata: { kind: string; id: string; scored?: string }, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<sarif.Log> {
|
||||
async runInterpretCommand(format: string, additonalArgs: string[], metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo) {
|
||||
const args = [
|
||||
`-t=kind=${metadata.kind}`,
|
||||
`-t=id=${metadata.id}`,
|
||||
'--output', interpretedResultsPath,
|
||||
'--format', SARIF_FORMAT,
|
||||
// TODO: This flag means that we don't group interpreted results
|
||||
// by primary location. We may want to revisit whether we call
|
||||
// interpretation with and without this flag, or do some
|
||||
// grouping client-side.
|
||||
'--no-group-results',
|
||||
];
|
||||
if (config.isCanary() && metadata.scored !== undefined) {
|
||||
args.push(`-t=scored=${metadata.scored}`);
|
||||
}
|
||||
'--format', format,
|
||||
// Forward all of the query metadata.
|
||||
...Object.entries(metadata).map(([key, value]) => `-t=${key}=${value}`)
|
||||
].concat(additonalArgs);
|
||||
if (sourceInfo !== undefined) {
|
||||
args.push(
|
||||
'--source-archive', sourceInfo.sourceArchive,
|
||||
'--source-location-prefix', sourceInfo.sourceLocationPrefix
|
||||
);
|
||||
}
|
||||
|
||||
args.push(
|
||||
'--threads',
|
||||
this.cliConfig.numberThreads.toString(),
|
||||
);
|
||||
|
||||
args.push(
|
||||
'--max-paths',
|
||||
this.cliConfig.maxPaths.toString(),
|
||||
);
|
||||
|
||||
args.push(resultsPath);
|
||||
await this.runCodeQlCliCommand(['bqrs', 'interpret'], args, 'Interpreting query results');
|
||||
}
|
||||
|
||||
let output: string;
|
||||
try {
|
||||
output = await fs.readFile(interpretedResultsPath, 'utf8');
|
||||
} catch (err) {
|
||||
throw new Error(`Reading output of interpretation failed: ${err.stderr || err}`);
|
||||
async interpretBqrsSarif(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<sarif.Log> {
|
||||
const additionalArgs = [
|
||||
// TODO: This flag means that we don't group interpreted results
|
||||
// by primary location. We may want to revisit whether we call
|
||||
// interpretation with and without this flag, or do some
|
||||
// grouping client-side.
|
||||
'--no-group-results'
|
||||
];
|
||||
|
||||
await this.runInterpretCommand(SARIF_FORMAT, additionalArgs, metadata, resultsPath, interpretedResultsPath, sourceInfo);
|
||||
return await sarifParser(interpretedResultsPath);
|
||||
}
|
||||
|
||||
// Warning: this function is untenable for large dot files,
|
||||
async readDotFiles(dir: string): Promise<string[]> {
|
||||
const dotFiles: Promise<string>[] = [];
|
||||
for await (const file of walkDirectory(dir)) {
|
||||
if (file.endsWith('.dot')) {
|
||||
dotFiles.push(fs.readFile(file, 'utf8'));
|
||||
}
|
||||
}
|
||||
return Promise.all(dotFiles);
|
||||
}
|
||||
|
||||
async interpretBqrsGraph(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<string[]> {
|
||||
const additionalArgs = sourceInfo
|
||||
? ['--dot-location-url-format', 'file://' + sourceInfo.sourceLocationPrefix + '{path}:{start:line}:{start:column}:{end:line}:{end:column}']
|
||||
: [];
|
||||
|
||||
await this.runInterpretCommand('dot', additionalArgs, metadata, resultsPath, interpretedResultsPath, sourceInfo);
|
||||
|
||||
try {
|
||||
return JSON.parse(output) as sarif.Log;
|
||||
const dot = await this.readDotFiles(interpretedResultsPath);
|
||||
return dot;
|
||||
} catch (err) {
|
||||
throw new Error(`Parsing output of interpretation failed: ${err.stderr || err}`);
|
||||
throw new Error(`Reading output of interpretation failed: ${getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
async generateResultsCsv(metadata: QueryMetadata, resultsPath: string, csvPath: string, sourceInfo?: SourceInfo): Promise<void> {
|
||||
await this.runInterpretCommand(CSV_FORMAT, [], metadata, resultsPath, csvPath, sourceInfo);
|
||||
}
|
||||
|
||||
async sortBqrs(resultsPath: string, sortedResultsPath: string, resultSet: string, sortKeys: number[], sortDirections: SortDirection[]): Promise<void> {
|
||||
const sortDirectionStrings = sortDirections.map(direction => {
|
||||
@@ -651,15 +820,19 @@ export class CodeQLCliServer implements Disposable {
|
||||
* Gets information necessary for upgrading a database.
|
||||
* @param dbScheme the path to the dbscheme of the database to be upgraded.
|
||||
* @param searchPath A list of directories to search for upgrade scripts.
|
||||
* @param allowDowngradesIfPossible Whether we should try and include downgrades of we can.
|
||||
* @param targetDbScheme The dbscheme to try to upgrade to.
|
||||
* @returns A list of database upgrade script directories
|
||||
*/
|
||||
resolveUpgrades(dbScheme: string, searchPath: string[], targetDbScheme?: string): Promise<UpgradesInfo> {
|
||||
const args = ['--additional-packs', searchPath.join(path.delimiter), '--dbscheme', dbScheme];
|
||||
async resolveUpgrades(dbScheme: string, searchPath: string[], allowDowngradesIfPossible: boolean, targetDbScheme?: string): Promise<UpgradesInfo> {
|
||||
const args = [...this.getAdditionalPacksArg(searchPath), '--dbscheme', dbScheme];
|
||||
if (targetDbScheme) {
|
||||
args.push('--target-dbscheme', targetDbScheme);
|
||||
if (allowDowngradesIfPossible && await this.cliConstraints.supportsDowngrades()) {
|
||||
args.push('--allow-downgrades');
|
||||
}
|
||||
}
|
||||
return this.runJsonCodeQlCliCommand<UpgradesInfo>(
|
||||
return await this.runJsonCodeQlCliCommand<UpgradesInfo>(
|
||||
['resolve', 'upgrades'],
|
||||
args,
|
||||
'Resolving database upgrade scripts',
|
||||
@@ -674,7 +847,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
* @returns A dictionary mapping qlpack name to the directory it comes from
|
||||
*/
|
||||
resolveQlpacks(additionalPacks: string[], searchPath?: string[]): Promise<QlpacksInfo> {
|
||||
const args = ['--additional-packs', additionalPacks.join(path.delimiter)];
|
||||
const args = this.getAdditionalPacksArg(additionalPacks);
|
||||
if (searchPath?.length) {
|
||||
args.push('--search-path', path.join(...searchPath));
|
||||
}
|
||||
@@ -686,6 +859,31 @@ export class CodeQLCliServer implements Disposable {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets information about the available languages.
|
||||
* @returns A dictionary mapping language name to the directory it comes from
|
||||
*/
|
||||
async resolveLanguages(): Promise<LanguagesInfo> {
|
||||
return await this.runJsonCodeQlCliCommand<LanguagesInfo>(['resolve', 'languages'], [], 'Resolving languages');
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the list of available languages. Refines the result of `resolveLanguages()`, by excluding
|
||||
* extra things like "xml" and "properties".
|
||||
*
|
||||
* @returns An array of languages that are supported by the current version of the CodeQL CLI.
|
||||
*/
|
||||
public async getSupportedLanguages(): Promise<string[]> {
|
||||
if (!this._supportedLanguages) {
|
||||
// Get the intersection of resolveLanguages with the list of hardcoded languages in dbSchemeToLanguage.
|
||||
const resolvedLanguages = Object.keys(await this.resolveLanguages());
|
||||
const hardcodedLanguages = Object.values(dbSchemeToLanguage);
|
||||
|
||||
this._supportedLanguages = resolvedLanguages.filter(lang => hardcodedLanguages.includes(lang));
|
||||
}
|
||||
return this._supportedLanguages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets information about queries in a query suite.
|
||||
* @param suite The suite to resolve.
|
||||
@@ -694,11 +892,15 @@ export class CodeQLCliServer implements Disposable {
|
||||
* the default CLI search path is used.
|
||||
* @returns A list of query files found.
|
||||
*/
|
||||
resolveQueriesInSuite(suite: string, additionalPacks: string[], searchPath?: string[]): Promise<string[]> {
|
||||
const args = ['--additional-packs', additionalPacks.join(path.delimiter)];
|
||||
async resolveQueriesInSuite(suite: string, additionalPacks: string[], searchPath?: string[]): Promise<string[]> {
|
||||
const args = this.getAdditionalPacksArg(additionalPacks);
|
||||
if (searchPath !== undefined) {
|
||||
args.push('--search-path', path.join(...searchPath));
|
||||
}
|
||||
if (await this.cliConstraints.supportsAllowLibraryPacksInResolveQueries()) {
|
||||
// All of our usage of `codeql resolve queries` needs to handle library packs.
|
||||
args.push('--allow-library-packs');
|
||||
}
|
||||
args.push(suite);
|
||||
return this.runJsonCodeQlCliCommand<string[]>(
|
||||
['resolve', 'queries'],
|
||||
@@ -707,8 +909,52 @@ export class CodeQLCliServer implements Disposable {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads a specified pack.
|
||||
* @param packs The `<package-scope/name[@version]>` of the packs to download.
|
||||
*/
|
||||
async packDownload(packs: string[]) {
|
||||
return this.runJsonCodeQlCliCommand(['pack', 'download'], packs, 'Downloading packs');
|
||||
}
|
||||
|
||||
async packInstall(dir: string, forceUpdate = false) {
|
||||
const args = [dir];
|
||||
if (forceUpdate) {
|
||||
args.push('--mode', 'update');
|
||||
}
|
||||
return this.runJsonCodeQlCliCommand(['pack', 'install'], args, 'Installing pack dependencies');
|
||||
}
|
||||
|
||||
async packBundle(dir: string, workspaceFolders: string[], outputPath: string, precompile = true): Promise<void> {
|
||||
const args = [
|
||||
'-o',
|
||||
outputPath,
|
||||
dir,
|
||||
...this.getAdditionalPacksArg(workspaceFolders)
|
||||
];
|
||||
if (!precompile && await this.cliConstraints.supportsNoPrecompile()) {
|
||||
args.push('--no-precompile');
|
||||
}
|
||||
|
||||
return this.runJsonCodeQlCliCommand(['pack', 'bundle'], args, 'Bundling pack');
|
||||
}
|
||||
|
||||
async packPacklist(dir: string, includeQueries: boolean): Promise<string[]> {
|
||||
const args = includeQueries ? [dir] : ['--no-include-queries', dir];
|
||||
// since 2.7.1, packlist returns an object with a "paths" property that is a list of packs.
|
||||
// previous versions return a list of packs.
|
||||
const results: { paths: string[] } | string[] = await this.runJsonCodeQlCliCommand(['pack', 'packlist'], args, 'Generating the pack list');
|
||||
|
||||
// Once we no longer need to support 2.7.0 or earlier, we can remove this and assume all versions return an object.
|
||||
if ('paths' in results) {
|
||||
return results.paths;
|
||||
} else {
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
||||
async generateDil(qloFile: string, outFile: string): Promise<void> {
|
||||
const extraArgs = await this.supportsDecompileDil()
|
||||
const extraArgs = await this.cliConstraints.supportsDecompileDil()
|
||||
? ['--kind', 'dil', '-o', outFile, qloFile]
|
||||
: ['-o', outFile, qloFile];
|
||||
await this.runCodeQlCliCommand(
|
||||
@@ -721,18 +967,14 @@ export class CodeQLCliServer implements Disposable {
|
||||
public async getVersion() {
|
||||
if (!this._version) {
|
||||
this._version = await this.refreshVersion();
|
||||
// this._version is only undefined upon config change, so we reset CLI-based context key only when necessary.
|
||||
await commands.executeCommand(
|
||||
'setContext', 'codeql.supportsEvalLog', await this.cliConstraints.supportsPerQueryEvalLog()
|
||||
);
|
||||
}
|
||||
return this._version;
|
||||
}
|
||||
|
||||
private async supportsDecompileDil() {
|
||||
return (await this.getVersion()).compare(CodeQLCliServer.CLI_VERSION_WITH_DECOMPILE_KIND_DIL) >= 0;
|
||||
}
|
||||
|
||||
public async supportsLanguageName() {
|
||||
return (await this.getVersion()).compare(CodeQLCliServer.CLI_VERSION_WITH_LANGUAGE) >= 0;
|
||||
}
|
||||
|
||||
private async refreshVersion() {
|
||||
const distribution = await this.distributionProvider.getDistribution();
|
||||
switch (distribution.kind) {
|
||||
@@ -747,6 +989,12 @@ export class CodeQLCliServer implements Disposable {
|
||||
throw new Error('No distribution found');
|
||||
}
|
||||
}
|
||||
|
||||
private getAdditionalPacksArg(paths: string[]): string[] {
|
||||
return paths.length
|
||||
? ['--additional-packs', paths.join(path.delimiter)]
|
||||
: [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -782,7 +1030,7 @@ export function spawnServer(
|
||||
if (progressReporter !== undefined) {
|
||||
progressReporter.report({ message: `Starting ${name}` });
|
||||
}
|
||||
logger.log(`Starting ${name} using CodeQL CLI: ${base} ${argsString}`);
|
||||
void logger.log(`Starting ${name} using CodeQL CLI: ${base} ${argsString}`);
|
||||
const child = child_process.spawn(base, args);
|
||||
if (!child || !child.pid) {
|
||||
throw new Error(`Failed to start ${name} using command ${base} ${argsString}.`);
|
||||
@@ -798,7 +1046,7 @@ export function spawnServer(
|
||||
if (progressReporter !== undefined) {
|
||||
progressReporter.report({ message: `Started ${name}` });
|
||||
}
|
||||
logger.log(`${name} started on PID: ${child.pid}`);
|
||||
void logger.log(`${name} started on PID: ${child.pid}`);
|
||||
return child;
|
||||
}
|
||||
|
||||
@@ -827,13 +1075,13 @@ export async function runCodeQlCliCommand(
|
||||
if (progressReporter !== undefined) {
|
||||
progressReporter.report({ message: description });
|
||||
}
|
||||
logger.log(`${description} using CodeQL CLI: ${codeQlPath} ${argsString}...`);
|
||||
void logger.log(`${description} using CodeQL CLI: ${codeQlPath} ${argsString}...`);
|
||||
const result = await promisify(child_process.execFile)(codeQlPath, args);
|
||||
logger.log(result.stderr);
|
||||
logger.log('CLI command succeeded.');
|
||||
void logger.log(result.stderr);
|
||||
void logger.log('CLI command succeeded.');
|
||||
return result.stdout;
|
||||
} catch (err) {
|
||||
throw new Error(`${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -866,6 +1114,20 @@ class SplitBuffer {
|
||||
this.buffer += this.separators[0]; // Append a separator to the end to ensure the last line is returned.
|
||||
}
|
||||
|
||||
/**
|
||||
* A version of startsWith that isn't overriden by a broken version of ms-python.
|
||||
*
|
||||
* The definition comes from
|
||||
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith
|
||||
* which is CC0/public domain
|
||||
*
|
||||
* See https://github.com/github/vscode-codeql/issues/802 for more context as to why we need it.
|
||||
*/
|
||||
private static startsWith(s: string, searchString: string, position: number): boolean {
|
||||
const pos = position > 0 ? position | 0 : 0;
|
||||
return s.substring(pos, pos + searchString.length) === searchString;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the next full line from the buffer, if one is available.
|
||||
* @returns The text of the next available full line (without the separator), or `undefined` if no
|
||||
@@ -874,9 +1136,9 @@ class SplitBuffer {
|
||||
public getNextLine(): string | undefined {
|
||||
while (this.searchIndex <= (this.buffer.length - this.maxSeparatorLength)) {
|
||||
for (const separator of this.separators) {
|
||||
if (this.buffer.startsWith(separator, this.searchIndex)) {
|
||||
const line = this.buffer.substr(0, this.searchIndex);
|
||||
this.buffer = this.buffer.substr(this.searchIndex + separator.length);
|
||||
if (SplitBuffer.startsWith(this.buffer, separator, this.searchIndex)) {
|
||||
const line = this.buffer.slice(0, this.searchIndex);
|
||||
this.buffer = this.buffer.slice(this.searchIndex + separator.length);
|
||||
this.searchIndex = 0;
|
||||
return line;
|
||||
}
|
||||
@@ -931,7 +1193,8 @@ const lineEndings = ['\r\n', '\r', '\n'];
|
||||
*/
|
||||
async function logStream(stream: Readable, logger: Logger): Promise<void> {
|
||||
for await (const line of await splitStreamAtSeparators(stream, lineEndings)) {
|
||||
logger.log(line);
|
||||
// Await the result of log here in order to ensure the logs are written in the correct order.
|
||||
await logger.log(line);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -947,3 +1210,164 @@ export function shouldDebugQueryServer() {
|
||||
&& process.env.QUERY_SERVER_JAVA_DEBUG !== '0'
|
||||
&& process.env.QUERY_SERVER_JAVA_DEBUG?.toLocaleLowerCase() !== 'false';
|
||||
}
|
||||
|
||||
export function shouldDebugCliServer() {
|
||||
return 'CLI_SERVER_JAVA_DEBUG' in process.env
|
||||
&& process.env.CLI_SERVER_JAVA_DEBUG !== '0'
|
||||
&& process.env.CLI_SERVER_JAVA_DEBUG?.toLocaleLowerCase() !== 'false';
|
||||
}
|
||||
|
||||
export class CliVersionConstraint {
|
||||
|
||||
/**
|
||||
* CLI version where --kind=DIL was introduced
|
||||
*/
|
||||
public static CLI_VERSION_WITH_DECOMPILE_KIND_DIL = new SemVer('2.3.0');
|
||||
|
||||
/**
|
||||
* CLI version where languages are exposed during a `codeql resolve database` command.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_LANGUAGE = new SemVer('2.4.1');
|
||||
|
||||
/**
|
||||
* CLI version where `codeql resolve upgrades` supports
|
||||
* the `--allow-downgrades` flag
|
||||
*/
|
||||
public static CLI_VERSION_WITH_DOWNGRADES = new SemVer('2.4.4');
|
||||
|
||||
/**
|
||||
* CLI version where the `codeql resolve qlref` command is available.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_RESOLVE_QLREF = new SemVer('2.5.1');
|
||||
|
||||
/**
|
||||
* CLI version where database registration was introduced
|
||||
*/
|
||||
public static CLI_VERSION_WITH_DB_REGISTRATION = new SemVer('2.4.1');
|
||||
|
||||
/**
|
||||
* CLI version where the `--allow-library-packs` option to `codeql resolve queries` was
|
||||
* introduced.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_ALLOW_LIBRARY_PACKS_IN_RESOLVE_QUERIES = new SemVer('2.6.1');
|
||||
|
||||
/**
|
||||
* CLI version where the `database unbundle` subcommand was introduced.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_DATABASE_UNBUNDLE = new SemVer('2.6.0');
|
||||
|
||||
/**
|
||||
* CLI version where the `--no-precompile` option for pack creation was introduced.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_NO_PRECOMPILE = new SemVer('2.7.1');
|
||||
|
||||
/**
|
||||
* CLI version where remote queries (variant analysis) are supported.
|
||||
*/
|
||||
public static CLI_VERSION_REMOTE_QUERIES = new SemVer('2.6.3');
|
||||
|
||||
/**
|
||||
* CLI version where the `resolve ml-models` subcommand was introduced.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_RESOLVE_ML_MODELS = new SemVer('2.7.3');
|
||||
|
||||
/**
|
||||
* CLI version where the `resolve ml-models` subcommand was enhanced to work with packaging.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_PRECISE_RESOLVE_ML_MODELS = new SemVer('2.10.0');
|
||||
|
||||
/**
|
||||
* CLI version where the `--old-eval-stats` option to the query server was introduced.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_OLD_EVAL_STATS = new SemVer('2.7.4');
|
||||
|
||||
/**
|
||||
* CLI version where packaging was introduced.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_PACKAGING = new SemVer('2.6.0');
|
||||
|
||||
/**
|
||||
* CLI version where the `--evaluator-log` and related options to the query server were introduced,
|
||||
* on a per-query server basis.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_STRUCTURED_EVAL_LOG = new SemVer('2.8.2');
|
||||
|
||||
/**
|
||||
* CLI version that supports rotating structured logs to produce one per query.
|
||||
*
|
||||
* Note that 2.8.4 supports generating the evaluation logs and summaries,
|
||||
* but 2.9.0 includes a new option to produce the end-of-query summary logs to
|
||||
* the query server console. For simplicity we gate all features behind 2.9.0,
|
||||
* but if a user is tied to the 2.8 release, we can enable evaluator logs
|
||||
* and summaries for them.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_PER_QUERY_EVAL_LOG = new SemVer('2.9.0');
|
||||
|
||||
constructor(private readonly cli: CodeQLCliServer) {
|
||||
/**/
|
||||
}
|
||||
|
||||
private async isVersionAtLeast(v: SemVer) {
|
||||
return (await this.cli.getVersion()).compare(v) >= 0;
|
||||
}
|
||||
|
||||
public async supportsDecompileDil() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_DECOMPILE_KIND_DIL);
|
||||
}
|
||||
|
||||
public async supportsLanguageName() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_LANGUAGE);
|
||||
}
|
||||
|
||||
public async supportsDowngrades() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_DOWNGRADES);
|
||||
}
|
||||
|
||||
public async supportsResolveQlref() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_RESOLVE_QLREF);
|
||||
}
|
||||
|
||||
public async supportsAllowLibraryPacksInResolveQueries() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_ALLOW_LIBRARY_PACKS_IN_RESOLVE_QUERIES);
|
||||
}
|
||||
|
||||
async supportsDatabaseRegistration() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_DB_REGISTRATION);
|
||||
}
|
||||
|
||||
async supportsDatabaseUnbundle() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_DATABASE_UNBUNDLE);
|
||||
}
|
||||
|
||||
async supportsNoPrecompile() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_NO_PRECOMPILE);
|
||||
}
|
||||
|
||||
async supportsRemoteQueries() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_REMOTE_QUERIES);
|
||||
}
|
||||
|
||||
async supportsResolveMlModels() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_RESOLVE_ML_MODELS);
|
||||
}
|
||||
|
||||
async supportsPreciseResolveMlModels() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PRECISE_RESOLVE_ML_MODELS);
|
||||
}
|
||||
|
||||
async supportsOldEvalStats() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_OLD_EVAL_STATS);
|
||||
}
|
||||
|
||||
async supportsPackaging() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PACKAGING);
|
||||
}
|
||||
|
||||
async supportsStructuredEvalLog() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_STRUCTURED_EVAL_LOG);
|
||||
}
|
||||
|
||||
async supportsPerQueryEvalLog() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PER_QUERY_EVAL_LOG);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
} from 'vscode';
|
||||
import { showAndLogErrorMessage, showAndLogWarningMessage } from './helpers';
|
||||
import { logger } from './logging';
|
||||
import { getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { telemetryListener } from './telemetry';
|
||||
|
||||
export class UserCancellationException extends Error {
|
||||
@@ -121,21 +122,22 @@ export function commandRunner(
|
||||
try {
|
||||
return await task(...args);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
const errorMessage = `${e.message || e} (${commandId})`;
|
||||
const errorMessage = `${getErrorMessage(e) || e} (${commandId})`;
|
||||
error = e instanceof Error ? e : new Error(errorMessage);
|
||||
const errorStack = getErrorStack(e);
|
||||
if (e instanceof UserCancellationException) {
|
||||
// User has cancelled this action manually
|
||||
if (e.silent) {
|
||||
logger.log(errorMessage);
|
||||
void logger.log(errorMessage);
|
||||
} else {
|
||||
showAndLogWarningMessage(errorMessage);
|
||||
void showAndLogWarningMessage(errorMessage);
|
||||
}
|
||||
} else {
|
||||
// Include the full stack in the error log only.
|
||||
const fullMessage = e.stack
|
||||
? `${errorMessage}\n${e.stack}`
|
||||
const fullMessage = errorStack
|
||||
? `${errorMessage}\n${errorStack}`
|
||||
: errorMessage;
|
||||
showAndLogErrorMessage(errorMessage, {
|
||||
void showAndLogErrorMessage(errorMessage, {
|
||||
fullMessage
|
||||
});
|
||||
}
|
||||
@@ -160,7 +162,8 @@ export function commandRunner(
|
||||
export function commandRunnerWithProgress<R>(
|
||||
commandId: string,
|
||||
task: ProgressTask<R>,
|
||||
progressOptions: Partial<ProgressOptions>
|
||||
progressOptions: Partial<ProgressOptions>,
|
||||
outputLogger = logger
|
||||
): Disposable {
|
||||
return commands.registerCommand(commandId, async (...args: any[]) => {
|
||||
const startTime = Date.now();
|
||||
@@ -172,21 +175,23 @@ export function commandRunnerWithProgress<R>(
|
||||
try {
|
||||
return await withProgress(progressOptionsWithDefaults, task, ...args);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
const errorMessage = `${e.message || e} (${commandId})`;
|
||||
const errorMessage = `${getErrorMessage(e) || e} (${commandId})`;
|
||||
error = e instanceof Error ? e : new Error(errorMessage);
|
||||
const errorStack = getErrorStack(e);
|
||||
if (e instanceof UserCancellationException) {
|
||||
// User has cancelled this action manually
|
||||
if (e.silent) {
|
||||
logger.log(errorMessage);
|
||||
void outputLogger.log(errorMessage);
|
||||
} else {
|
||||
showAndLogWarningMessage(errorMessage);
|
||||
void showAndLogWarningMessage(errorMessage, { outputLogger });
|
||||
}
|
||||
} else {
|
||||
// Include the full stack in the error log only.
|
||||
const fullMessage = e.stack
|
||||
? `${errorMessage}\n${e.stack}`
|
||||
const fullMessage = errorStack
|
||||
? `${errorMessage}\n${errorStack}`
|
||||
: errorMessage;
|
||||
showAndLogErrorMessage(errorMessage, {
|
||||
void showAndLogErrorMessage(errorMessage, {
|
||||
outputLogger,
|
||||
fullMessage
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { DisposableObject } from '../vscode-utils/disposable-object';
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import {
|
||||
WebviewPanel,
|
||||
ExtensionContext,
|
||||
@@ -8,8 +8,7 @@ import {
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
import { tmpDir } from '../run-queries';
|
||||
import { CompletedQuery } from '../query-results';
|
||||
import { tmpDir } from '../helpers';
|
||||
import {
|
||||
FromCompareViewMessage,
|
||||
ToCompareViewMessage,
|
||||
@@ -21,10 +20,13 @@ import { DatabaseManager } from '../databases';
|
||||
import { getHtmlForWebview, jumpToLocation } from '../interface-utils';
|
||||
import { transformBqrsResultSet, RawResultSet, BQRSInfo } from '../pure/bqrs-cli-types';
|
||||
import resultsDiff from './resultsDiff';
|
||||
import { CompletedLocalQueryInfo } from '../query-results';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
import { HistoryItemLabelProvider } from '../history-item-label-provider';
|
||||
|
||||
interface ComparePair {
|
||||
from: CompletedQuery;
|
||||
to: CompletedQuery;
|
||||
from: CompletedLocalQueryInfo;
|
||||
to: CompletedLocalQueryInfo;
|
||||
}
|
||||
|
||||
export class CompareInterfaceManager extends DisposableObject {
|
||||
@@ -38,16 +40,17 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
private databaseManager: DatabaseManager,
|
||||
private cliServer: CodeQLCliServer,
|
||||
private logger: Logger,
|
||||
private labelProvider: HistoryItemLabelProvider,
|
||||
private showQueryResultsCallback: (
|
||||
item: CompletedQuery
|
||||
item: CompletedLocalQueryInfo
|
||||
) => Promise<void>
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
async showResults(
|
||||
from: CompletedQuery,
|
||||
to: CompletedQuery,
|
||||
from: CompletedLocalQueryInfo,
|
||||
to: CompletedLocalQueryInfo,
|
||||
selectedResultSetName?: string
|
||||
) {
|
||||
this.comparePair = { from, to };
|
||||
@@ -70,7 +73,7 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
try {
|
||||
rows = this.compareResults(fromResultSet, toResultSet);
|
||||
} catch (e) {
|
||||
message = e.message;
|
||||
message = getErrorMessage(e);
|
||||
}
|
||||
|
||||
await this.postMessage({
|
||||
@@ -80,18 +83,14 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
// since we split the description into several rows
|
||||
// only run interpolation if the label is user-defined
|
||||
// otherwise we will wind up with duplicated rows
|
||||
name: from.options.label
|
||||
? from.interpolate(from.getLabel())
|
||||
: from.queryName,
|
||||
status: from.statusString,
|
||||
time: from.time,
|
||||
name: this.labelProvider.getShortLabel(from),
|
||||
status: from.completedQuery.statusString,
|
||||
time: from.startTime,
|
||||
},
|
||||
toQuery: {
|
||||
name: to.options.label
|
||||
? to.interpolate(to.getLabel())
|
||||
: to.queryName,
|
||||
status: to.statusString,
|
||||
time: to.time,
|
||||
name: this.labelProvider.getShortLabel(to),
|
||||
status: to.completedQuery.statusString,
|
||||
time: to.startTime,
|
||||
},
|
||||
},
|
||||
columns: fromResultSet.schema.columns,
|
||||
@@ -99,7 +98,7 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
currentResultSetName: currentResultSetName,
|
||||
rows,
|
||||
message,
|
||||
datebaseUri: to.database.databaseUri,
|
||||
databaseUri: to.initialInfo.databaseInfo.databaseUri,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -121,33 +120,34 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
],
|
||||
}
|
||||
));
|
||||
this.panel.onDidDispose(
|
||||
this.push(this.panel.onDidDispose(
|
||||
() => {
|
||||
this.panel = undefined;
|
||||
this.comparePair = undefined;
|
||||
},
|
||||
null,
|
||||
ctx.subscriptions
|
||||
);
|
||||
));
|
||||
|
||||
const scriptPathOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('out/compareView.js')
|
||||
);
|
||||
|
||||
const stylesheetPathOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('out/resultsView.css')
|
||||
ctx.asAbsolutePath('out/view/resultsView.css')
|
||||
);
|
||||
|
||||
panel.webview.html = getHtmlForWebview(
|
||||
panel.webview,
|
||||
scriptPathOnDisk,
|
||||
stylesheetPathOnDisk
|
||||
[stylesheetPathOnDisk],
|
||||
false
|
||||
);
|
||||
panel.webview.onDidReceiveMessage(
|
||||
this.push(panel.webview.onDidReceiveMessage(
|
||||
async (e) => this.handleMsgFromView(e),
|
||||
undefined,
|
||||
ctx.subscriptions
|
||||
);
|
||||
));
|
||||
}
|
||||
return this.panel;
|
||||
}
|
||||
@@ -173,7 +173,7 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
break;
|
||||
|
||||
case 'changeCompare':
|
||||
this.changeTable(msg.newResultSetName);
|
||||
await this.changeTable(msg.newResultSetName);
|
||||
break;
|
||||
|
||||
case 'viewSourceFile':
|
||||
@@ -191,15 +191,15 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
}
|
||||
|
||||
private async findCommonResultSetNames(
|
||||
from: CompletedQuery,
|
||||
to: CompletedQuery,
|
||||
from: CompletedLocalQueryInfo,
|
||||
to: CompletedLocalQueryInfo,
|
||||
selectedResultSetName: string | undefined
|
||||
): Promise<[string[], string, RawResultSet, RawResultSet]> {
|
||||
const fromSchemas = await this.cliServer.bqrsInfo(
|
||||
from.query.resultsPaths.resultsPath
|
||||
from.completedQuery.query.resultsPaths.resultsPath
|
||||
);
|
||||
const toSchemas = await this.cliServer.bqrsInfo(
|
||||
to.query.resultsPaths.resultsPath
|
||||
to.completedQuery.query.resultsPaths.resultsPath
|
||||
);
|
||||
const fromSchemaNames = fromSchemas['result-sets'].map(
|
||||
(schema) => schema.name
|
||||
@@ -215,12 +215,12 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
const fromResultSet = await this.getResultSet(
|
||||
fromSchemas,
|
||||
currentResultSetName,
|
||||
from.query.resultsPaths.resultsPath
|
||||
from.completedQuery.query.resultsPaths.resultsPath
|
||||
);
|
||||
const toResultSet = await this.getResultSet(
|
||||
toSchemas,
|
||||
currentResultSetName,
|
||||
to.query.resultsPaths.resultsPath
|
||||
to.completedQuery.query.resultsPaths.resultsPath
|
||||
);
|
||||
return [
|
||||
commonResultSetNames,
|
||||
@@ -267,11 +267,11 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
return resultsDiff(fromResults, toResults);
|
||||
}
|
||||
|
||||
private openQuery(kind: 'from' | 'to') {
|
||||
private async openQuery(kind: 'from' | 'to') {
|
||||
const toOpen =
|
||||
kind === 'from' ? this.comparePair?.from : this.comparePair?.to;
|
||||
if (toOpen) {
|
||||
this.showQueryResultsCallback(toOpen);
|
||||
await this.showQueryResultsCallback(toOpen);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,11 +17,11 @@ const emptyComparison: SetComparisonsMessage = {
|
||||
columns: [],
|
||||
commonResultSetNames: [],
|
||||
currentResultSetName: '',
|
||||
datebaseUri: '',
|
||||
databaseUri: '',
|
||||
message: 'Empty comparison'
|
||||
};
|
||||
|
||||
export function Compare(_: {}): JSX.Element {
|
||||
export function Compare(_: Record<string, never>): JSX.Element {
|
||||
const [comparison, setComparison] = useState<SetComparisonsMessage>(
|
||||
emptyComparison
|
||||
);
|
||||
@@ -38,7 +38,9 @@ export function Compare(_: {}): JSX.Element {
|
||||
setComparison(msg);
|
||||
}
|
||||
} else {
|
||||
console.error(`Invalid event origin ${evt.origin}`);
|
||||
// sanitize origin
|
||||
const origin = evt.origin.replace(/\n|\r/g, '');
|
||||
console.error(`Invalid event origin ${origin}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -64,8 +66,8 @@ export function Compare(_: {}): JSX.Element {
|
||||
{hasRows ? (
|
||||
<CompareTable comparison={comparison}></CompareTable>
|
||||
) : (
|
||||
<div className="vscode-codeql__compare-message">{message}</div>
|
||||
)}
|
||||
<div className="vscode-codeql__compare-message">{message}</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
} catch (err) {
|
||||
|
||||
@@ -76,7 +76,7 @@ export default function CompareTable(props: Props) {
|
||||
schemaName={comparison.currentResultSetName}
|
||||
preventSort={true}
|
||||
/>
|
||||
{createRows(rows.from, comparison.datebaseUri)}
|
||||
{createRows(rows.from, comparison.databaseUri)}
|
||||
</table>
|
||||
</td>
|
||||
<td>
|
||||
@@ -86,7 +86,7 @@ export default function CompareTable(props: Props) {
|
||||
schemaName={comparison.currentResultSetName}
|
||||
preventSort={true}
|
||||
/>
|
||||
{createRows(rows.to, comparison.datebaseUri)}
|
||||
{createRows(rows.to, comparison.databaseUri)}
|
||||
</table>
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
@@ -4,10 +4,7 @@
|
||||
"moduleResolution": "node",
|
||||
"target": "es6",
|
||||
"outDir": "out",
|
||||
"lib": [
|
||||
"es6",
|
||||
"dom"
|
||||
],
|
||||
"lib": ["ES2021", "dom"],
|
||||
"jsx": "react",
|
||||
"sourceMap": true,
|
||||
"rootDir": "..",
|
||||
@@ -17,7 +14,5 @@
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"experimentalDecorators": true
|
||||
},
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
}
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { DisposableObject } from './vscode-utils/disposable-object';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { workspace, Event, EventEmitter, ConfigurationChangeEvent, ConfigurationTarget } from 'vscode';
|
||||
import { DistributionManager } from './distribution';
|
||||
import { logger } from './logging';
|
||||
import { ONE_DAY_IN_MS } from './pure/time';
|
||||
|
||||
/** Helper class to look up a labelled (and possibly nested) setting. */
|
||||
export class Setting {
|
||||
@@ -41,6 +42,7 @@ const ROOT_SETTING = new Setting('codeQL');
|
||||
|
||||
// Global configuration
|
||||
const TELEMETRY_SETTING = new Setting('telemetry', ROOT_SETTING);
|
||||
const AST_VIEWER_SETTING = new Setting('astViewer', ROOT_SETTING);
|
||||
const GLOBAL_TELEMETRY_SETTING = new Setting('telemetry');
|
||||
|
||||
export const LOG_TELEMETRY = new Setting('logTelemetry', TELEMETRY_SETTING);
|
||||
@@ -50,11 +52,14 @@ export const GLOBAL_ENABLE_TELEMETRY = new Setting('enableTelemetry', GLOBAL_TEL
|
||||
|
||||
// Distribution configuration
|
||||
const DISTRIBUTION_SETTING = new Setting('cli', ROOT_SETTING);
|
||||
const CUSTOM_CODEQL_PATH_SETTING = new Setting('executablePath', DISTRIBUTION_SETTING);
|
||||
export const CUSTOM_CODEQL_PATH_SETTING = new Setting('executablePath', DISTRIBUTION_SETTING);
|
||||
const INCLUDE_PRERELEASE_SETTING = new Setting('includePrerelease', DISTRIBUTION_SETTING);
|
||||
const PERSONAL_ACCESS_TOKEN_SETTING = new Setting('personalAccessToken', DISTRIBUTION_SETTING);
|
||||
|
||||
// Query History configuration
|
||||
const QUERY_HISTORY_SETTING = new Setting('queryHistory', ROOT_SETTING);
|
||||
const QUERY_HISTORY_FORMAT_SETTING = new Setting('format', QUERY_HISTORY_SETTING);
|
||||
const QUERY_HISTORY_TTL = new Setting('format', QUERY_HISTORY_SETTING);
|
||||
|
||||
/** When these settings change, the distribution should be updated. */
|
||||
const DISTRIBUTION_CHANGE_SETTINGS = [CUSTOM_CODEQL_PATH_SETTING, INCLUDE_PRERELEASE_SETTING, PERSONAL_ACCESS_TOKEN_SETTING];
|
||||
@@ -70,44 +75,58 @@ export interface DistributionConfig {
|
||||
}
|
||||
|
||||
// Query server configuration
|
||||
|
||||
const RUNNING_QUERIES_SETTING = new Setting('runningQueries', ROOT_SETTING);
|
||||
const NUMBER_OF_THREADS_SETTING = new Setting('numberOfThreads', RUNNING_QUERIES_SETTING);
|
||||
const SAVE_CACHE_SETTING = new Setting('saveCache', RUNNING_QUERIES_SETTING);
|
||||
const CACHE_SIZE_SETTING = new Setting('cacheSize', RUNNING_QUERIES_SETTING);
|
||||
const TIMEOUT_SETTING = new Setting('timeout', RUNNING_QUERIES_SETTING);
|
||||
const MEMORY_SETTING = new Setting('memory', RUNNING_QUERIES_SETTING);
|
||||
const DEBUG_SETTING = new Setting('debug', RUNNING_QUERIES_SETTING);
|
||||
const MAX_PATHS = new Setting('maxPaths', RUNNING_QUERIES_SETTING);
|
||||
const RUNNING_TESTS_SETTING = new Setting('runningTests', ROOT_SETTING);
|
||||
const RESULTS_DISPLAY_SETTING = new Setting('resultsDisplay', ROOT_SETTING);
|
||||
|
||||
export const ADDITIONAL_TEST_ARGUMENTS_SETTING = new Setting('additionalTestArguments', RUNNING_TESTS_SETTING);
|
||||
export const NUMBER_OF_TEST_THREADS_SETTING = new Setting('numberOfThreads', RUNNING_TESTS_SETTING);
|
||||
export const MAX_QUERIES = new Setting('maxQueries', RUNNING_QUERIES_SETTING);
|
||||
export const AUTOSAVE_SETTING = new Setting('autoSave', RUNNING_QUERIES_SETTING);
|
||||
export const PAGE_SIZE = new Setting('pageSize', RESULTS_DISPLAY_SETTING);
|
||||
const CUSTOM_LOG_DIRECTORY_SETTING = new Setting('customLogDirectory', RUNNING_QUERIES_SETTING);
|
||||
|
||||
/** When these settings change, the running query server should be restarted. */
|
||||
const QUERY_SERVER_RESTARTING_SETTINGS = [NUMBER_OF_THREADS_SETTING, MEMORY_SETTING, DEBUG_SETTING];
|
||||
const QUERY_SERVER_RESTARTING_SETTINGS = [
|
||||
NUMBER_OF_THREADS_SETTING, SAVE_CACHE_SETTING, CACHE_SIZE_SETTING, MEMORY_SETTING,
|
||||
DEBUG_SETTING, CUSTOM_LOG_DIRECTORY_SETTING,
|
||||
];
|
||||
|
||||
export interface QueryServerConfig {
|
||||
codeQlPath: string;
|
||||
debug: boolean;
|
||||
numThreads: number;
|
||||
saveCache: boolean;
|
||||
cacheSize: number;
|
||||
queryMemoryMb?: number;
|
||||
timeoutSecs: number;
|
||||
customLogDirectory?: string;
|
||||
onDidChangeConfiguration?: Event<void>;
|
||||
}
|
||||
|
||||
/** When these settings change, the query history should be refreshed. */
|
||||
const QUERY_HISTORY_SETTINGS = [QUERY_HISTORY_FORMAT_SETTING];
|
||||
const QUERY_HISTORY_SETTINGS = [QUERY_HISTORY_FORMAT_SETTING, QUERY_HISTORY_TTL];
|
||||
|
||||
export interface QueryHistoryConfig {
|
||||
format: string;
|
||||
ttlInMillis: number;
|
||||
onDidChangeConfiguration: Event<void>;
|
||||
}
|
||||
|
||||
const CLI_SETTINGS = [NUMBER_OF_TEST_THREADS_SETTING];
|
||||
const CLI_SETTINGS = [ADDITIONAL_TEST_ARGUMENTS_SETTING, NUMBER_OF_TEST_THREADS_SETTING, NUMBER_OF_THREADS_SETTING, MAX_PATHS];
|
||||
|
||||
export interface CliConfig {
|
||||
additionalTestArguments: string[];
|
||||
numberTestThreads: number;
|
||||
numberThreads: number;
|
||||
maxPaths: number;
|
||||
onDidChangeConfiguration?: Event<void>;
|
||||
}
|
||||
|
||||
@@ -137,7 +156,7 @@ export abstract class ConfigListener extends DisposableObject {
|
||||
|
||||
protected abstract handleDidChangeConfiguration(e: ConfigurationChangeEvent): void;
|
||||
private updateConfiguration(): void {
|
||||
this._onDidChangeConfiguration.fire();
|
||||
this._onDidChangeConfiguration.fire(undefined);
|
||||
}
|
||||
|
||||
public get onDidChangeConfiguration(): Event<void> {
|
||||
@@ -179,7 +198,7 @@ export class QueryServerConfigListener extends ConfigListener implements QuerySe
|
||||
config.push(distributionManager.onDidChangeDistribution(async () => {
|
||||
const codeQlPath = await distributionManager.getCodeQlPathWithoutVersionCheck();
|
||||
config._codeQlPath = codeQlPath!;
|
||||
config._onDidChangeConfiguration.fire();
|
||||
config._onDidChangeConfiguration.fire(undefined);
|
||||
}));
|
||||
}
|
||||
return config;
|
||||
@@ -189,10 +208,22 @@ export class QueryServerConfigListener extends ConfigListener implements QuerySe
|
||||
return this._codeQlPath;
|
||||
}
|
||||
|
||||
public get customLogDirectory(): string | undefined {
|
||||
return CUSTOM_LOG_DIRECTORY_SETTING.getValue<string>() || undefined;
|
||||
}
|
||||
|
||||
public get numThreads(): number {
|
||||
return NUMBER_OF_THREADS_SETTING.getValue<number>();
|
||||
}
|
||||
|
||||
public get saveCache(): boolean {
|
||||
return SAVE_CACHE_SETTING.getValue<boolean>();
|
||||
}
|
||||
|
||||
public get cacheSize(): number {
|
||||
return CACHE_SIZE_SETTING.getValue<number | null>() || 0;
|
||||
}
|
||||
|
||||
/** Gets the configured query timeout, in seconds. This looks up the setting at the time of access. */
|
||||
public get timeoutSecs(): number {
|
||||
return TIMEOUT_SETTING.getValue<number | null>() || 0;
|
||||
@@ -204,7 +235,7 @@ export class QueryServerConfigListener extends ConfigListener implements QuerySe
|
||||
return undefined;
|
||||
}
|
||||
if (memory == 0 || typeof (memory) !== 'number') {
|
||||
logger.log(`Ignoring value '${memory}' for setting ${MEMORY_SETTING.qualifiedName}`);
|
||||
void logger.log(`Ignoring value '${memory}' for setting ${MEMORY_SETTING.qualifiedName}`);
|
||||
return undefined;
|
||||
}
|
||||
return memory;
|
||||
@@ -227,19 +258,47 @@ export class QueryHistoryConfigListener extends ConfigListener implements QueryH
|
||||
public get format(): string {
|
||||
return QUERY_HISTORY_FORMAT_SETTING.getValue<string>();
|
||||
}
|
||||
|
||||
/**
|
||||
* The configuration value is in days, but return the value in milliseconds to make it easier to use.
|
||||
*/
|
||||
public get ttlInMillis(): number {
|
||||
return (QUERY_HISTORY_TTL.getValue<number>() || 30) * ONE_DAY_IN_MS;
|
||||
}
|
||||
}
|
||||
|
||||
export class CliConfigListener extends ConfigListener implements CliConfig {
|
||||
public get additionalTestArguments(): string[] {
|
||||
return ADDITIONAL_TEST_ARGUMENTS_SETTING.getValue();
|
||||
}
|
||||
|
||||
public get numberTestThreads(): number {
|
||||
return NUMBER_OF_TEST_THREADS_SETTING.getValue();
|
||||
}
|
||||
|
||||
public get numberThreads(): number {
|
||||
return NUMBER_OF_THREADS_SETTING.getValue<number>();
|
||||
}
|
||||
|
||||
public get maxPaths(): number {
|
||||
return MAX_PATHS.getValue<number>();
|
||||
}
|
||||
|
||||
protected handleDidChangeConfiguration(e: ConfigurationChangeEvent): void {
|
||||
this.handleDidChangeConfigurationForRelevantSettings(CLI_SETTINGS, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether to enable CodeLens for the 'Quick Evaluation' command.
|
||||
*/
|
||||
const QUICK_EVAL_CODELENS_SETTING = new Setting('quickEvalCodelens', RUNNING_QUERIES_SETTING);
|
||||
|
||||
export function isQuickEvalCodelensEnabled() {
|
||||
return QUICK_EVAL_CODELENS_SETTING.getValue<boolean>();
|
||||
}
|
||||
|
||||
|
||||
// Enable experimental features
|
||||
|
||||
/**
|
||||
@@ -257,3 +316,74 @@ export const CANARY_FEATURES = new Setting('canary', ROOT_SETTING);
|
||||
export function isCanary() {
|
||||
return !!CANARY_FEATURES.getValue<boolean>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Avoids caching in the AST viewer if the user is also a canary user.
|
||||
*/
|
||||
export const NO_CACHE_AST_VIEWER = new Setting('disableCache', AST_VIEWER_SETTING);
|
||||
|
||||
// Settings for variant analysis
|
||||
const REMOTE_QUERIES_SETTING = new Setting('variantAnalysis', ROOT_SETTING);
|
||||
|
||||
/**
|
||||
* Lists of GitHub repositories that you want to query remotely via the "Run Variant Analysis" command.
|
||||
* Note: This command is only available for internal users.
|
||||
*
|
||||
* This setting should be a JSON object where each key is a user-specified name (string),
|
||||
* and the value is an array of GitHub repositories (of the form `<owner>/<repo>`).
|
||||
*/
|
||||
const REMOTE_REPO_LISTS = new Setting('repositoryLists', REMOTE_QUERIES_SETTING);
|
||||
|
||||
export function getRemoteRepositoryLists(): Record<string, string[]> | undefined {
|
||||
return REMOTE_REPO_LISTS.getValue<Record<string, string[]>>() || undefined;
|
||||
}
|
||||
|
||||
export async function setRemoteRepositoryLists(lists: Record<string, string[]> | undefined) {
|
||||
await REMOTE_REPO_LISTS.updateValue(lists, ConfigurationTarget.Global);
|
||||
}
|
||||
|
||||
/**
|
||||
* Path to a file that contains lists of GitHub repositories that you want to query remotely via
|
||||
* the "Run Variant Analysis" command.
|
||||
* Note: This command is only available for internal users.
|
||||
*
|
||||
* This setting should be a path to a JSON file that contains a JSON object where each key is a
|
||||
* user-specified name (string), and the value is an array of GitHub repositories
|
||||
* (of the form `<owner>/<repo>`).
|
||||
*/
|
||||
const REPO_LISTS_PATH = new Setting('repositoryListsPath', REMOTE_QUERIES_SETTING);
|
||||
|
||||
export function getRemoteRepositoryListsPath(): string | undefined {
|
||||
return REPO_LISTS_PATH.getValue<string>() || undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the "controller" repository that you want to use with the "Run Variant Analysis" command.
|
||||
* Note: This command is only available for internal users.
|
||||
*
|
||||
* This setting should be a GitHub repository of the form `<owner>/<repo>`.
|
||||
*/
|
||||
const REMOTE_CONTROLLER_REPO = new Setting('controllerRepo', REMOTE_QUERIES_SETTING);
|
||||
|
||||
export function getRemoteControllerRepo(): string | undefined {
|
||||
return REMOTE_CONTROLLER_REPO.getValue<string>() || undefined;
|
||||
}
|
||||
|
||||
export async function setRemoteControllerRepo(repo: string | undefined) {
|
||||
await REMOTE_CONTROLLER_REPO.updateValue(repo, ConfigurationTarget.Global);
|
||||
}
|
||||
|
||||
/**
|
||||
* The branch of "github/codeql-variant-analysis-action" to use with the "Run Variant Analysis" command.
|
||||
* Default value is "main".
|
||||
* Note: This command is only available for internal users.
|
||||
*/
|
||||
const ACTION_BRANCH = new Setting('actionBranch', REMOTE_QUERIES_SETTING);
|
||||
|
||||
export function getActionBranch(): string {
|
||||
return ACTION_BRANCH.getValue<string>() || 'main';
|
||||
}
|
||||
|
||||
export function isIntegrationTestMode() {
|
||||
return process.env.INTEGRATION_TEST_MODE === 'true';
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import { DecodedBqrsChunk, BqrsId, EntityValue } from '../pure/bqrs-cli-types';
|
||||
import { DatabaseItem } from '../databases';
|
||||
import { ChildAstItem, AstItem } from '../astViewer';
|
||||
import fileRangeFromURI from './fileRangeFromURI';
|
||||
import { Uri } from 'vscode';
|
||||
|
||||
/**
|
||||
* A class that wraps a tree of QL results from a query that
|
||||
@@ -17,7 +18,7 @@ export default class AstBuilder {
|
||||
queryResults: QueryWithResults,
|
||||
private cli: CodeQLCliServer,
|
||||
public db: DatabaseItem,
|
||||
public fileName: string
|
||||
public fileName: Uri
|
||||
) {
|
||||
this.bqrsPath = queryResults.query.resultsPaths.resultsPath;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ export enum KeyType {
|
||||
DefinitionQuery = 'DefinitionQuery',
|
||||
ReferenceQuery = 'ReferenceQuery',
|
||||
PrintAstQuery = 'PrintAstQuery',
|
||||
PrintCfgQuery = 'PrintCfgQuery',
|
||||
}
|
||||
|
||||
export function tagOfKeyType(keyType: KeyType): string {
|
||||
@@ -12,6 +13,8 @@ export function tagOfKeyType(keyType: KeyType): string {
|
||||
return 'ide-contextual-queries/local-references';
|
||||
case KeyType.PrintAstQuery:
|
||||
return 'ide-contextual-queries/print-ast';
|
||||
case KeyType.PrintCfgQuery:
|
||||
return 'ide-contextual-queries/print-cfg';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,6 +26,8 @@ export function nameOfKeyType(keyType: KeyType): string {
|
||||
return 'references';
|
||||
case KeyType.PrintAstQuery:
|
||||
return 'print AST';
|
||||
case KeyType.PrintCfgQuery:
|
||||
return 'print CFG';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +37,7 @@ export function kindOfKeyType(keyType: KeyType): string {
|
||||
case KeyType.ReferenceQuery:
|
||||
return 'definitions';
|
||||
case KeyType.PrintAstQuery:
|
||||
case KeyType.PrintCfgQuery:
|
||||
return 'graph';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
import { decodeSourceArchiveUri, encodeArchiveBasePath } from '../archive-filesystem-provider';
|
||||
import { ColumnKindCode, EntityValue, getResultSetSchema, ResultSetSchema } from '../pure/bqrs-cli-types';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
@@ -7,16 +5,17 @@ import { DatabaseManager, DatabaseItem } from '../databases';
|
||||
import fileRangeFromURI from './fileRangeFromURI';
|
||||
import * as messages from '../pure/messages';
|
||||
import { QueryServerClient } from '../queryserver-client';
|
||||
import { QueryWithResults, compileAndRunQueryAgainstDatabase } from '../run-queries';
|
||||
import { QueryWithResults, compileAndRunQueryAgainstDatabase, createInitialQueryInfo } from '../run-queries';
|
||||
import { ProgressCallback } from '../commandRunner';
|
||||
import { KeyType } from './keyType';
|
||||
import { qlpackOfDatabase, resolveQueries } from './queryResolver';
|
||||
import { CancellationToken, LocationLink, Uri } from 'vscode';
|
||||
|
||||
const SELECT_QUERY_NAME = '#select';
|
||||
export const SELECT_QUERY_NAME = '#select';
|
||||
export const TEMPLATE_NAME = 'selectedSourceFile';
|
||||
|
||||
export interface FullLocationLink extends vscode.LocationLink {
|
||||
originUri: vscode.Uri;
|
||||
export interface FullLocationLink extends LocationLink {
|
||||
originUri: Uri;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -29,6 +28,7 @@ export interface FullLocationLink extends vscode.LocationLink {
|
||||
* @param dbm The database manager
|
||||
* @param uriString The selected source file and location
|
||||
* @param keyType The contextual query type to run
|
||||
* @param queryStorageDir The directory to store the query results
|
||||
* @param progress A progress callback
|
||||
* @param token A CancellationToken
|
||||
* @param filter A function that will filter extraneous results
|
||||
@@ -39,11 +39,12 @@ export async function getLocationsForUriString(
|
||||
dbm: DatabaseManager,
|
||||
uriString: string,
|
||||
keyType: KeyType,
|
||||
queryStorageDir: string,
|
||||
progress: ProgressCallback,
|
||||
token: vscode.CancellationToken,
|
||||
token: CancellationToken,
|
||||
filter: (src: string, dest: string) => boolean
|
||||
): Promise<FullLocationLink[]> {
|
||||
const uri = decodeSourceArchiveUri(vscode.Uri.parse(uriString, true));
|
||||
const uri = decodeSourceArchiveUri(Uri.parse(uriString, true));
|
||||
const sourceArchiveUri = encodeArchiveBasePath(uri.sourceArchiveZipPath);
|
||||
|
||||
const db = dbm.findDatabaseItemBySourceArchive(sourceArchiveUri);
|
||||
@@ -56,12 +57,21 @@ export async function getLocationsForUriString(
|
||||
|
||||
const links: FullLocationLink[] = [];
|
||||
for (const query of await resolveQueries(cli, qlpack, keyType)) {
|
||||
const initialInfo = await createInitialQueryInfo(
|
||||
Uri.file(query),
|
||||
{
|
||||
name: db.name,
|
||||
databaseUri: db.databaseUri.toString(),
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
const results = await compileAndRunQueryAgainstDatabase(
|
||||
cli,
|
||||
qs,
|
||||
db,
|
||||
false,
|
||||
vscode.Uri.file(query),
|
||||
initialInfo,
|
||||
queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
templates
|
||||
|
||||
@@ -11,8 +11,9 @@ import {
|
||||
} from './keyType';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { DatabaseItem } from '../databases';
|
||||
import { QlPacksForLanguage } from '../helpers';
|
||||
|
||||
export async function qlpackOfDatabase(cli: CodeQLCliServer, db: DatabaseItem): Promise<string> {
|
||||
export async function qlpackOfDatabase(cli: CodeQLCliServer, db: DatabaseItem): Promise<QlPacksForLanguage> {
|
||||
if (db.contents === undefined) {
|
||||
throw new Error('Database is invalid and cannot infer QLPack.');
|
||||
}
|
||||
@@ -21,28 +22,85 @@ export async function qlpackOfDatabase(cli: CodeQLCliServer, db: DatabaseItem):
|
||||
return await helpers.getQlPackForDbscheme(cli, dbscheme);
|
||||
}
|
||||
|
||||
|
||||
export async function resolveQueries(cli: CodeQLCliServer, qlpack: string, keyType: KeyType): Promise<string[]> {
|
||||
/**
|
||||
* Finds the contextual queries with the specified key in a list of CodeQL packs.
|
||||
*
|
||||
* @param cli The CLI instance to use.
|
||||
* @param qlpacks The list of packs to search.
|
||||
* @param keyType The contextual query key of the query to search for.
|
||||
* @returns The found queries from the first pack in which any matching queries were found.
|
||||
*/
|
||||
async function resolveQueriesFromPacks(cli: CodeQLCliServer, qlpacks: string[], keyType: KeyType): Promise<string[]> {
|
||||
const suiteFile = (await tmp.file({
|
||||
postfix: '.qls'
|
||||
})).path;
|
||||
const suiteYaml = {
|
||||
qlpack,
|
||||
include: {
|
||||
kind: kindOfKeyType(keyType),
|
||||
'tags contain': tagOfKeyType(keyType)
|
||||
}
|
||||
};
|
||||
await fs.writeFile(suiteFile, yaml.safeDump(suiteYaml), 'utf8');
|
||||
const suiteYaml = [];
|
||||
for (const qlpack of qlpacks) {
|
||||
suiteYaml.push({
|
||||
from: qlpack,
|
||||
queries: '.',
|
||||
include: {
|
||||
kind: kindOfKeyType(keyType),
|
||||
'tags contain': tagOfKeyType(keyType)
|
||||
}
|
||||
});
|
||||
}
|
||||
await fs.writeFile(suiteFile, yaml.dump(suiteYaml), 'utf8');
|
||||
|
||||
const queries = await cli.resolveQueriesInSuite(suiteFile, helpers.getOnDiskWorkspaceFolders());
|
||||
if (queries.length === 0) {
|
||||
helpers.showAndLogErrorMessage(
|
||||
`No ${nameOfKeyType(keyType)} queries (tagged "${tagOfKeyType(keyType)}") could be found in the current library path. \
|
||||
Try upgrading the CodeQL libraries. If that doesn't work, then ${nameOfKeyType(keyType)} queries are not yet available \
|
||||
for this language.`
|
||||
);
|
||||
throw new Error(`Couldn't find any queries tagged ${tagOfKeyType(keyType)} for qlpack ${qlpack}`);
|
||||
}
|
||||
return queries;
|
||||
}
|
||||
|
||||
export async function resolveQueries(cli: CodeQLCliServer, qlpacks: QlPacksForLanguage, keyType: KeyType): Promise<string[]> {
|
||||
const cliCanHandleLibraryPack = await cli.cliConstraints.supportsAllowLibraryPacksInResolveQueries();
|
||||
const packsToSearch: string[] = [];
|
||||
let blameCli: boolean;
|
||||
|
||||
if (cliCanHandleLibraryPack) {
|
||||
// The CLI can handle both library packs and query packs, so search both packs in order.
|
||||
packsToSearch.push(qlpacks.dbschemePack);
|
||||
if (qlpacks.queryPack !== undefined) {
|
||||
packsToSearch.push(qlpacks.queryPack);
|
||||
}
|
||||
// If we don't find the query, it's because it's not there, not because the CLI was unable to
|
||||
// search the pack.
|
||||
blameCli = false;
|
||||
} else {
|
||||
// Older CLIs can't handle `codeql resolve queries` with a suite that references a library pack.
|
||||
if (qlpacks.dbschemePackIsLibraryPack) {
|
||||
if (qlpacks.queryPack !== undefined) {
|
||||
// Just search the query pack, because some older library/query releases still had the
|
||||
// contextual queries in the query pack.
|
||||
packsToSearch.push(qlpacks.queryPack);
|
||||
}
|
||||
// If we don't find it, it's because the CLI was unable to search the library pack that
|
||||
// actually contains the query. Blame any failure on the CLI, not the packs.
|
||||
blameCli = true;
|
||||
} else {
|
||||
// We have an old CLI, but the dbscheme pack is old enough that it's still a unified pack with
|
||||
// both libraries and queries. Just search that pack.
|
||||
packsToSearch.push(qlpacks.dbschemePack);
|
||||
// Any CLI should be able to search the single query pack, so if we don't find it, it's
|
||||
// because the language doesn't support it.
|
||||
blameCli = false;
|
||||
}
|
||||
}
|
||||
|
||||
const queries = await resolveQueriesFromPacks(cli, packsToSearch, keyType);
|
||||
if (queries.length > 0) {
|
||||
return queries;
|
||||
}
|
||||
|
||||
// No queries found. Determine the correct error message for the various scenarios.
|
||||
const errorMessage = blameCli ?
|
||||
`Your current version of the CodeQL CLI, '${(await cli.getVersion()).version}', \
|
||||
is unable to use contextual queries from recent versions of the standard CodeQL libraries. \
|
||||
Please upgrade to the latest version of the CodeQL CLI.`
|
||||
:
|
||||
`No ${nameOfKeyType(keyType)} queries (tagged "${tagOfKeyType(keyType)}") could be found in the current library path. \
|
||||
Try upgrading the CodeQL libraries. If that doesn't work, then ${nameOfKeyType(keyType)} queries are not yet available \
|
||||
for this language.`;
|
||||
|
||||
void helpers.showAndLogErrorMessage(errorMessage);
|
||||
throw new Error(`Couldn't find any queries tagged ${tagOfKeyType(keyType)} in any of the following packs: ${packsToSearch.join(', ')}.`);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,15 @@
|
||||
import * as vscode from 'vscode';
|
||||
import {
|
||||
CancellationToken,
|
||||
DefinitionProvider,
|
||||
Location,
|
||||
LocationLink,
|
||||
Position,
|
||||
ProgressLocation,
|
||||
ReferenceContext,
|
||||
ReferenceProvider,
|
||||
TextDocument,
|
||||
Uri
|
||||
} from 'vscode';
|
||||
|
||||
import { decodeSourceArchiveUri, encodeArchiveBasePath, zipArchiveScheme } from '../archive-filesystem-provider';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
@@ -7,13 +18,14 @@ import { CachedOperation } from '../helpers';
|
||||
import { ProgressCallback, withProgress } from '../commandRunner';
|
||||
import * as messages from '../pure/messages';
|
||||
import { QueryServerClient } from '../queryserver-client';
|
||||
import { compileAndRunQueryAgainstDatabase, QueryWithResults } from '../run-queries';
|
||||
import { compileAndRunQueryAgainstDatabase, createInitialQueryInfo, QueryWithResults } from '../run-queries';
|
||||
import AstBuilder from './astBuilder';
|
||||
import {
|
||||
KeyType,
|
||||
} from './keyType';
|
||||
import { FullLocationLink, getLocationsForUriString, TEMPLATE_NAME } from './locationFinder';
|
||||
import { qlpackOfDatabase, resolveQueries } from './queryResolver';
|
||||
import { isCanary, NO_CACHE_AST_VIEWER } from '../config';
|
||||
|
||||
/**
|
||||
* Run templated CodeQL queries to find definitions and references in
|
||||
@@ -22,20 +34,21 @@ import { qlpackOfDatabase, resolveQueries } from './queryResolver';
|
||||
* or from a selected identifier.
|
||||
*/
|
||||
|
||||
export class TemplateQueryDefinitionProvider implements vscode.DefinitionProvider {
|
||||
private cache: CachedOperation<vscode.LocationLink[]>;
|
||||
export class TemplateQueryDefinitionProvider implements DefinitionProvider {
|
||||
private cache: CachedOperation<LocationLink[]>;
|
||||
|
||||
constructor(
|
||||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
this.cache = new CachedOperation<vscode.LocationLink[]>(this.getDefinitions.bind(this));
|
||||
this.cache = new CachedOperation<LocationLink[]>(this.getDefinitions.bind(this));
|
||||
}
|
||||
|
||||
async provideDefinition(document: vscode.TextDocument, position: vscode.Position, _token: vscode.CancellationToken): Promise<vscode.LocationLink[]> {
|
||||
async provideDefinition(document: TextDocument, position: Position, _token: CancellationToken): Promise<LocationLink[]> {
|
||||
const fileLinks = await this.cache.get(document.uri.toString());
|
||||
const locLinks: vscode.LocationLink[] = [];
|
||||
const locLinks: LocationLink[] = [];
|
||||
for (const link of fileLinks) {
|
||||
if (link.originSelectionRange!.contains(position)) {
|
||||
locLinks.push(link);
|
||||
@@ -44,9 +57,9 @@ export class TemplateQueryDefinitionProvider implements vscode.DefinitionProvide
|
||||
return locLinks;
|
||||
}
|
||||
|
||||
private async getDefinitions(uriString: string): Promise<vscode.LocationLink[]> {
|
||||
private async getDefinitions(uriString: string): Promise<LocationLink[]> {
|
||||
return withProgress({
|
||||
location: vscode.ProgressLocation.Notification,
|
||||
location: ProgressLocation.Notification,
|
||||
cancellable: true,
|
||||
title: 'Finding definitions'
|
||||
}, async (progress, token) => {
|
||||
@@ -56,6 +69,7 @@ export class TemplateQueryDefinitionProvider implements vscode.DefinitionProvide
|
||||
this.dbm,
|
||||
uriString,
|
||||
KeyType.DefinitionQuery,
|
||||
this.queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
(src, _dest) => src === uriString
|
||||
@@ -64,25 +78,26 @@ export class TemplateQueryDefinitionProvider implements vscode.DefinitionProvide
|
||||
}
|
||||
}
|
||||
|
||||
export class TemplateQueryReferenceProvider implements vscode.ReferenceProvider {
|
||||
export class TemplateQueryReferenceProvider implements ReferenceProvider {
|
||||
private cache: CachedOperation<FullLocationLink[]>;
|
||||
|
||||
constructor(
|
||||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
this.cache = new CachedOperation<FullLocationLink[]>(this.getReferences.bind(this));
|
||||
}
|
||||
|
||||
async provideReferences(
|
||||
document: vscode.TextDocument,
|
||||
position: vscode.Position,
|
||||
_context: vscode.ReferenceContext,
|
||||
_token: vscode.CancellationToken
|
||||
): Promise<vscode.Location[]> {
|
||||
document: TextDocument,
|
||||
position: Position,
|
||||
_context: ReferenceContext,
|
||||
_token: CancellationToken
|
||||
): Promise<Location[]> {
|
||||
const fileLinks = await this.cache.get(document.uri.toString());
|
||||
const locLinks: vscode.Location[] = [];
|
||||
const locLinks: Location[] = [];
|
||||
for (const link of fileLinks) {
|
||||
if (link.targetRange!.contains(position)) {
|
||||
locLinks.push({ range: link.originSelectionRange!, uri: link.originUri });
|
||||
@@ -93,7 +108,7 @@ export class TemplateQueryReferenceProvider implements vscode.ReferenceProvider
|
||||
|
||||
private async getReferences(uriString: string): Promise<FullLocationLink[]> {
|
||||
return withProgress({
|
||||
location: vscode.ProgressLocation.Notification,
|
||||
location: ProgressLocation.Notification,
|
||||
cancellable: true,
|
||||
title: 'Finding references'
|
||||
}, async (progress, token) => {
|
||||
@@ -103,6 +118,7 @@ export class TemplateQueryReferenceProvider implements vscode.ReferenceProvider
|
||||
this.dbm,
|
||||
uriString,
|
||||
KeyType.DefinitionQuery,
|
||||
this.queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
(src, _dest) => src === uriString
|
||||
@@ -111,41 +127,54 @@ export class TemplateQueryReferenceProvider implements vscode.ReferenceProvider
|
||||
}
|
||||
}
|
||||
|
||||
type QueryWithDb = {
|
||||
query: QueryWithResults,
|
||||
dbUri: Uri
|
||||
};
|
||||
|
||||
export class TemplatePrintAstProvider {
|
||||
private cache: CachedOperation<QueryWithResults | undefined>;
|
||||
private cache: CachedOperation<QueryWithDb>;
|
||||
|
||||
constructor(
|
||||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private dbm: DatabaseManager,
|
||||
|
||||
// Note: progress and token are only used if a cached value is not available
|
||||
private progress: ProgressCallback,
|
||||
private token: vscode.CancellationToken
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
this.cache = new CachedOperation<QueryWithResults | undefined>(this.getAst.bind(this));
|
||||
this.cache = new CachedOperation<QueryWithDb>(this.getAst.bind(this));
|
||||
}
|
||||
|
||||
async provideAst(document?: vscode.TextDocument): Promise<AstBuilder | undefined> {
|
||||
if (!document) {
|
||||
return;
|
||||
}
|
||||
const queryResults = await this.cache.get(document.uri.toString());
|
||||
if (!queryResults) {
|
||||
return;
|
||||
async provideAst(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
fileUri?: Uri
|
||||
): Promise<AstBuilder | undefined> {
|
||||
if (!fileUri) {
|
||||
throw new Error('Cannot view the AST. Please select a valid source file inside a CodeQL database.');
|
||||
}
|
||||
const { query, dbUri } = this.shouldCache()
|
||||
? await this.cache.get(fileUri.toString(), progress, token)
|
||||
: await this.getAst(fileUri.toString(), progress, token);
|
||||
|
||||
return new AstBuilder(
|
||||
queryResults, this.cli,
|
||||
this.dbm.findDatabaseItem(vscode.Uri.parse(queryResults.database.databaseUri!, true))!,
|
||||
document.fileName
|
||||
query, this.cli,
|
||||
this.dbm.findDatabaseItem(dbUri)!,
|
||||
fileUri,
|
||||
);
|
||||
}
|
||||
|
||||
private async getAst(uriString: string): Promise<QueryWithResults> {
|
||||
const uri = vscode.Uri.parse(uriString, true);
|
||||
private shouldCache() {
|
||||
return !(isCanary() && NO_CACHE_AST_VIEWER.getValue<boolean>());
|
||||
}
|
||||
|
||||
private async getAst(
|
||||
uriString: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<QueryWithDb> {
|
||||
const uri = Uri.parse(uriString, true);
|
||||
if (uri.scheme !== zipArchiveScheme) {
|
||||
throw new Error('AST Viewing is only available for databases with zipped source archives.');
|
||||
throw new Error('Cannot view the AST. Please select a valid source file inside a CodeQL database.');
|
||||
}
|
||||
|
||||
const zippedArchive = decodeSourceArchiveUri(uri);
|
||||
@@ -156,8 +185,8 @@ export class TemplatePrintAstProvider {
|
||||
throw new Error('Can\'t infer database from the provided source.');
|
||||
}
|
||||
|
||||
const qlpack = await qlpackOfDatabase(this.cli, db);
|
||||
const queries = await resolveQueries(this.cli, qlpack, KeyType.PrintAstQuery);
|
||||
const qlpacks = await qlpackOfDatabase(this.cli, db);
|
||||
const queries = await resolveQueries(this.cli, qlpacks, KeyType.PrintAstQuery);
|
||||
if (queries.length > 1) {
|
||||
throw new Error('Found multiple Print AST queries. Can\'t continue');
|
||||
}
|
||||
@@ -176,15 +205,86 @@ export class TemplatePrintAstProvider {
|
||||
}
|
||||
};
|
||||
|
||||
return await compileAndRunQueryAgainstDatabase(
|
||||
this.cli,
|
||||
this.qs,
|
||||
db,
|
||||
false,
|
||||
vscode.Uri.file(query),
|
||||
this.progress,
|
||||
this.token,
|
||||
templates
|
||||
const initialInfo = await createInitialQueryInfo(
|
||||
Uri.file(query),
|
||||
{
|
||||
name: db.name,
|
||||
databaseUri: db.databaseUri.toString(),
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
return {
|
||||
query: await compileAndRunQueryAgainstDatabase(
|
||||
this.cli,
|
||||
this.qs,
|
||||
db,
|
||||
initialInfo,
|
||||
this.queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
templates
|
||||
),
|
||||
dbUri: db.databaseUri
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class TemplatePrintCfgProvider {
|
||||
private cache: CachedOperation<[Uri, messages.TemplateDefinitions] | undefined>;
|
||||
|
||||
constructor(
|
||||
private cli: CodeQLCliServer,
|
||||
private dbm: DatabaseManager,
|
||||
) {
|
||||
this.cache = new CachedOperation<[Uri, messages.TemplateDefinitions] | undefined>(this.getCfgUri.bind(this));
|
||||
}
|
||||
|
||||
async provideCfgUri(document?: TextDocument): Promise<[Uri, messages.TemplateDefinitions] | undefined> {
|
||||
if (!document) {
|
||||
return;
|
||||
}
|
||||
return await this.cache.get(document.uri.toString());
|
||||
}
|
||||
|
||||
private async getCfgUri(uriString: string): Promise<[Uri, messages.TemplateDefinitions]> {
|
||||
const uri = Uri.parse(uriString, true);
|
||||
if (uri.scheme !== zipArchiveScheme) {
|
||||
throw new Error('CFG Viewing is only available for databases with zipped source archives.');
|
||||
}
|
||||
|
||||
const zippedArchive = decodeSourceArchiveUri(uri);
|
||||
const sourceArchiveUri = encodeArchiveBasePath(zippedArchive.sourceArchiveZipPath);
|
||||
const db = this.dbm.findDatabaseItemBySourceArchive(sourceArchiveUri);
|
||||
|
||||
if (!db) {
|
||||
throw new Error('Can\'t infer database from the provided source.');
|
||||
}
|
||||
|
||||
const qlpack = await qlpackOfDatabase(this.cli, db);
|
||||
if (!qlpack) {
|
||||
throw new Error('Can\'t infer qlpack from database source archive.');
|
||||
}
|
||||
const queries = await resolveQueries(this.cli, qlpack, KeyType.PrintCfgQuery);
|
||||
if (queries.length > 1) {
|
||||
throw new Error(`Found multiple Print CFG queries. Can't continue. Make sure there is exacly one query with the tag ${KeyType.PrintCfgQuery}`);
|
||||
}
|
||||
if (queries.length === 0) {
|
||||
throw new Error(`Did not find any Print CFG queries. Can't continue. Make sure there is exacly one query with the tag ${KeyType.PrintCfgQuery}`);
|
||||
}
|
||||
|
||||
const queryUri = Uri.file(queries[0]);
|
||||
|
||||
const templates: messages.TemplateDefinitions = {
|
||||
[TEMPLATE_NAME]: {
|
||||
values: {
|
||||
tuples: [[{
|
||||
stringValue: zippedArchive.pathWithinSourceArchive
|
||||
}]]
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return [queryUri, templates];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import fetch, { Response } from 'node-fetch';
|
||||
import * as unzipper from 'unzipper';
|
||||
import { zip } from 'zip-a-folder';
|
||||
import * as unzipper from 'unzipper';
|
||||
import {
|
||||
Uri,
|
||||
CancellationToken,
|
||||
commands,
|
||||
window,
|
||||
} from 'vscode';
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
|
||||
@@ -19,7 +20,9 @@ import {
|
||||
ProgressCallback,
|
||||
} from './commandRunner';
|
||||
import { logger } from './logging';
|
||||
import { tmpDir } from './run-queries';
|
||||
import { tmpDir } from './helpers';
|
||||
import { Credentials } from './authentication';
|
||||
import { REPO_REGEX, getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from a remote location. Database is assumed to be an archive file.
|
||||
@@ -32,6 +35,7 @@ export async function promptImportInternetDatabase(
|
||||
storagePath: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
cli?: CodeQLCliServer
|
||||
): Promise<DatabaseItem | undefined> {
|
||||
const databaseUrl = await window.showInputBox({
|
||||
prompt: 'Enter URL of zipfile of database to download',
|
||||
@@ -44,20 +48,99 @@ export async function promptImportInternetDatabase(
|
||||
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
undefined,
|
||||
progress,
|
||||
token
|
||||
token,
|
||||
cli
|
||||
);
|
||||
|
||||
if (item) {
|
||||
commands.executeCommand('codeQLDatabases.focus');
|
||||
showAndLogInformationMessage('Database downloaded and imported successfully.');
|
||||
await commands.executeCommand('codeQLDatabases.focus');
|
||||
void showAndLogInformationMessage('Database downloaded and imported successfully.');
|
||||
}
|
||||
return item;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from GitHub.
|
||||
* User enters a GitHub repository and then the user is asked which language
|
||||
* to download (if there is more than one)
|
||||
*
|
||||
* @param databaseManager the DatabaseManager
|
||||
* @param storagePath where to store the unzipped database.
|
||||
*/
|
||||
export async function promptImportGithubDatabase(
|
||||
databaseManager: DatabaseManager,
|
||||
storagePath: string,
|
||||
credentials: Credentials,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
cli?: CodeQLCliServer
|
||||
): Promise<DatabaseItem | undefined> {
|
||||
progress({
|
||||
message: 'Choose repository',
|
||||
step: 1,
|
||||
maxStep: 2
|
||||
});
|
||||
const githubRepo = await window.showInputBox({
|
||||
title: 'Enter a GitHub repository URL or "name with owner" (e.g. https://github.com/github/codeql or github/codeql)',
|
||||
placeHolder: 'https://github.com/<owner>/<repo> or <owner>/<repo>',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
if (!githubRepo) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!looksLikeGithubRepo(githubRepo)) {
|
||||
throw new Error(`Invalid GitHub repository: ${githubRepo}`);
|
||||
}
|
||||
|
||||
const result = await convertGithubNwoToDatabaseUrl(githubRepo, credentials, progress);
|
||||
if (!result) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { databaseUrl, name, owner } = result;
|
||||
|
||||
const octokit = await credentials.getOctokit();
|
||||
/**
|
||||
* The 'token' property of the token object returned by `octokit.auth()`.
|
||||
* The object is undocumented, but looks something like this:
|
||||
* {
|
||||
* token: 'xxxx',
|
||||
* tokenType: 'oauth',
|
||||
* type: 'token',
|
||||
* }
|
||||
* We only need the actual token string.
|
||||
*/
|
||||
const octokitToken = (await octokit.auth() as { token: string })?.token;
|
||||
if (!octokitToken) {
|
||||
// Just print a generic error message for now. Ideally we could show more debugging info, like the
|
||||
// octokit object, but that would expose a user token.
|
||||
throw new Error('Unable to get GitHub token.');
|
||||
}
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{ 'Accept': 'application/zip', 'Authorization': `Bearer ${octokitToken}` },
|
||||
databaseManager,
|
||||
storagePath,
|
||||
`${owner}/${name}`,
|
||||
progress,
|
||||
token,
|
||||
cli
|
||||
);
|
||||
if (item) {
|
||||
await commands.executeCommand('codeQLDatabases.focus');
|
||||
void showAndLogInformationMessage('Database downloaded and imported successfully.');
|
||||
return item;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from lgtm.
|
||||
* User enters a project url and then the user is asked which language
|
||||
@@ -70,29 +153,38 @@ export async function promptImportLgtmDatabase(
|
||||
databaseManager: DatabaseManager,
|
||||
storagePath: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
token: CancellationToken,
|
||||
cli?: CodeQLCliServer
|
||||
): Promise<DatabaseItem | undefined> {
|
||||
progress({
|
||||
message: 'Choose project',
|
||||
step: 1,
|
||||
maxStep: 2
|
||||
});
|
||||
const lgtmUrl = await window.showInputBox({
|
||||
prompt:
|
||||
'Enter the project URL on LGTM (e.g., https://lgtm.com/projects/g/github/codeql)',
|
||||
'Enter the project slug or URL on LGTM (e.g., g/github/codeql or https://lgtm.com/projects/g/github/codeql)',
|
||||
});
|
||||
if (!lgtmUrl) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (looksLikeLgtmUrl(lgtmUrl)) {
|
||||
const databaseUrl = await convertToDatabaseUrl(lgtmUrl);
|
||||
const databaseUrl = await convertLgtmUrlToDatabaseUrl(lgtmUrl, progress);
|
||||
if (databaseUrl) {
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
undefined,
|
||||
progress,
|
||||
token
|
||||
token,
|
||||
cli
|
||||
);
|
||||
if (item) {
|
||||
commands.executeCommand('codeQLDatabases.focus');
|
||||
showAndLogInformationMessage('Database downloaded and imported successfully.');
|
||||
await commands.executeCommand('codeQLDatabases.focus');
|
||||
void showAndLogInformationMessage('Database downloaded and imported successfully.');
|
||||
}
|
||||
return item;
|
||||
}
|
||||
@@ -102,6 +194,16 @@ export async function promptImportLgtmDatabase(
|
||||
return;
|
||||
}
|
||||
|
||||
export async function retrieveCanonicalRepoName(lgtmUrl: string) {
|
||||
const givenRepoName = extractProjectSlug(lgtmUrl);
|
||||
const response = await checkForFailingResponse(await fetch(`https://api.github.com/repos/${givenRepoName}`), 'Failed to locate the repository on github');
|
||||
const repo = await response.json();
|
||||
if (!repo || !repo.full_name) {
|
||||
return;
|
||||
}
|
||||
return repo.full_name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Imports a database from a local archive.
|
||||
*
|
||||
@@ -115,22 +217,26 @@ export async function importArchiveDatabase(
|
||||
storagePath: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
cli?: CodeQLCliServer,
|
||||
): Promise<DatabaseItem | undefined> {
|
||||
try {
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
undefined,
|
||||
progress,
|
||||
token
|
||||
token,
|
||||
cli
|
||||
);
|
||||
if (item) {
|
||||
commands.executeCommand('codeQLDatabases.focus');
|
||||
showAndLogInformationMessage('Database unzipped and imported successfully.');
|
||||
await commands.executeCommand('codeQLDatabases.focus');
|
||||
void showAndLogInformationMessage('Database unzipped and imported successfully.');
|
||||
}
|
||||
return item;
|
||||
} catch (e) {
|
||||
if (e.message.includes('unexpected end of file')) {
|
||||
if (getErrorMessage(e).includes('unexpected end of file')) {
|
||||
throw new Error('Database is corrupt or too large. Try unzipping outside of VS Code and importing the unzipped folder instead.');
|
||||
} else {
|
||||
// delegate
|
||||
@@ -144,17 +250,22 @@ export async function importArchiveDatabase(
|
||||
* or in the local filesystem.
|
||||
*
|
||||
* @param databaseUrl URL from which to grab the database
|
||||
* @param requestHeaders Headers to send with the request
|
||||
* @param databaseManager the DatabaseManager
|
||||
* @param storagePath where to store the unzipped database.
|
||||
* @param nameOverride a name for the database that overrides the default
|
||||
* @param progress callback to send progress messages to
|
||||
* @param token cancellation token
|
||||
*/
|
||||
async function databaseArchiveFetcher(
|
||||
databaseUrl: string,
|
||||
requestHeaders: { [key: string]: string },
|
||||
databaseManager: DatabaseManager,
|
||||
storagePath: string,
|
||||
nameOverride: string | undefined,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
token: CancellationToken,
|
||||
cli?: CodeQLCliServer,
|
||||
): Promise<DatabaseItem> {
|
||||
progress({
|
||||
message: 'Getting database',
|
||||
@@ -168,9 +279,9 @@ async function databaseArchiveFetcher(
|
||||
const unzipPath = await getStorageFolder(storagePath, databaseUrl);
|
||||
|
||||
if (isFile(databaseUrl)) {
|
||||
await readAndUnzip(databaseUrl, unzipPath, progress);
|
||||
await readAndUnzip(databaseUrl, unzipPath, cli, progress);
|
||||
} else {
|
||||
await fetchAndUnzip(databaseUrl, unzipPath, progress);
|
||||
await fetchAndUnzip(databaseUrl, requestHeaders, unzipPath, cli, progress);
|
||||
}
|
||||
|
||||
progress({
|
||||
@@ -193,7 +304,7 @@ async function databaseArchiveFetcher(
|
||||
});
|
||||
await ensureZippedSourceLocation(dbPath);
|
||||
|
||||
const item = await databaseManager.openDatabase(progress, token, Uri.file(dbPath));
|
||||
const item = await databaseManager.openDatabase(progress, token, Uri.file(dbPath), nameOverride);
|
||||
await databaseManager.setCurrentDatabaseItem(item);
|
||||
return item;
|
||||
} else {
|
||||
@@ -244,6 +355,7 @@ function validateHttpsUrl(databaseUrl: string) {
|
||||
async function readAndUnzip(
|
||||
zipUrl: string,
|
||||
unzipPath: string,
|
||||
cli?: CodeQLCliServer,
|
||||
progress?: ProgressCallback
|
||||
) {
|
||||
// TODO: Providing progress as the file is unzipped is currently blocked
|
||||
@@ -254,16 +366,23 @@ async function readAndUnzip(
|
||||
step: 9,
|
||||
message: `Unzipping into ${path.basename(unzipPath)}`
|
||||
});
|
||||
// Must get the zip central directory since streaming the
|
||||
// zip contents may not have correct local file headers.
|
||||
// Instead, we can only rely on the central directory.
|
||||
const directory = await unzipper.Open.file(zipFile);
|
||||
await directory.extract({ path: unzipPath });
|
||||
if (cli && await cli.cliConstraints.supportsDatabaseUnbundle()) {
|
||||
// Use the `database unbundle` command if the installed cli version supports it
|
||||
await cli.databaseUnbundle(zipFile, unzipPath);
|
||||
} else {
|
||||
// Must get the zip central directory since streaming the
|
||||
// zip contents may not have correct local file headers.
|
||||
// Instead, we can only rely on the central directory.
|
||||
const directory = await unzipper.Open.file(zipFile);
|
||||
await directory.extract({ path: unzipPath });
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchAndUnzip(
|
||||
databaseUrl: string,
|
||||
requestHeaders: { [key: string]: string },
|
||||
unzipPath: string,
|
||||
cli?: CodeQLCliServer,
|
||||
progress?: ProgressCallback
|
||||
) {
|
||||
// Although it is possible to download and stream directly to an unzipped directory,
|
||||
@@ -280,7 +399,10 @@ async function fetchAndUnzip(
|
||||
step: 1,
|
||||
});
|
||||
|
||||
const response = await checkForFailingResponse(await fetch(databaseUrl));
|
||||
const response = await checkForFailingResponse(
|
||||
await fetch(databaseUrl, { headers: requestHeaders }),
|
||||
'Error downloading database'
|
||||
);
|
||||
const archiveFileStream = fs.createWriteStream(archivePath);
|
||||
|
||||
const contentLength = response.headers.get('content-length');
|
||||
@@ -293,13 +415,13 @@ async function fetchAndUnzip(
|
||||
.on('error', reject)
|
||||
);
|
||||
|
||||
await readAndUnzip(Uri.file(archivePath).toString(true), unzipPath, progress);
|
||||
await readAndUnzip(Uri.file(archivePath).toString(true), unzipPath, cli, progress);
|
||||
|
||||
// remove archivePath eagerly since these archives can be large.
|
||||
await fs.remove(archivePath);
|
||||
}
|
||||
|
||||
async function checkForFailingResponse(response: Response): Promise<Response | never> {
|
||||
async function checkForFailingResponse(response: Response, errorMessage: string): Promise<Response | never> {
|
||||
if (response.ok) {
|
||||
return response;
|
||||
}
|
||||
@@ -313,7 +435,7 @@ async function checkForFailingResponse(response: Response): Promise<Response | n
|
||||
} catch (e) {
|
||||
msg = text;
|
||||
}
|
||||
throw new Error(`Error downloading database.\n\nReason: ${msg}`);
|
||||
throw new Error(`${errorMessage}.\n\nReason: ${msg}`);
|
||||
}
|
||||
|
||||
function isFile(databaseUrl: string) {
|
||||
@@ -350,15 +472,99 @@ export async function findDirWithFile(
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* The URL pattern is https://github.com/{owner}/{name}/{subpages}.
|
||||
*
|
||||
* This function accepts any URL that matches the pattern above. It also accepts just the
|
||||
* name with owner (NWO): `<owner>/<repo>`.
|
||||
*
|
||||
* @param githubRepo The GitHub repository URL or NWO
|
||||
*
|
||||
* @return true if this looks like a valid GitHub repository URL or NWO
|
||||
*/
|
||||
export function looksLikeGithubRepo(
|
||||
githubRepo: string | undefined
|
||||
): githubRepo is string {
|
||||
if (!githubRepo) {
|
||||
return false;
|
||||
}
|
||||
if (REPO_REGEX.test(githubRepo) || convertGitHubUrlToNwo(githubRepo)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a GitHub repository URL to the corresponding NWO.
|
||||
* @param githubUrl The GitHub repository URL
|
||||
* @return The corresponding NWO, or undefined if the URL is not valid
|
||||
*/
|
||||
function convertGitHubUrlToNwo(githubUrl: string): string | undefined {
|
||||
try {
|
||||
const uri = Uri.parse(githubUrl, true);
|
||||
if (uri.scheme !== 'https') {
|
||||
return;
|
||||
}
|
||||
if (uri.authority !== 'github.com' && uri.authority !== 'www.github.com') {
|
||||
return;
|
||||
}
|
||||
const paths = uri.path.split('/').filter((segment: string) => segment);
|
||||
const nwo = `${paths[0]}/${paths[1]}`;
|
||||
if (REPO_REGEX.test(nwo)) {
|
||||
return nwo;
|
||||
}
|
||||
return;
|
||||
} catch (e) {
|
||||
// Ignore the error here, since we catch failures at a higher level.
|
||||
// In particular: returning undefined leads to an error in 'promptImportGithubDatabase'.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
export async function convertGithubNwoToDatabaseUrl(
|
||||
githubRepo: string,
|
||||
credentials: Credentials,
|
||||
progress: ProgressCallback): Promise<{
|
||||
databaseUrl: string,
|
||||
owner: string,
|
||||
name: string
|
||||
} | undefined> {
|
||||
try {
|
||||
const nwo = convertGitHubUrlToNwo(githubRepo) || githubRepo;
|
||||
const [owner, repo] = nwo.split('/');
|
||||
|
||||
const octokit = await credentials.getOctokit();
|
||||
const response = await octokit.request('GET /repos/:owner/:repo/code-scanning/codeql/databases', { owner, repo });
|
||||
|
||||
const languages = response.data.map((db: any) => db.language);
|
||||
|
||||
const language = await promptForLanguage(languages, progress);
|
||||
if (!language) {
|
||||
return;
|
||||
}
|
||||
|
||||
return {
|
||||
databaseUrl: `https://api.github.com/repos/${owner}/${repo}/code-scanning/codeql/databases/${language}`,
|
||||
owner,
|
||||
name: repo
|
||||
};
|
||||
|
||||
} catch (e) {
|
||||
void logger.log(`Error: ${getErrorMessage(e)}`);
|
||||
throw new Error(`Unable to get database for '${githubRepo}'`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The URL pattern is https://lgtm.com/projects/{provider}/{org}/{name}/{irrelevant-subpages}.
|
||||
* There are several possibilities for the provider: in addition to GitHub.com(g),
|
||||
* There are several possibilities for the provider: in addition to GitHub.com (g),
|
||||
* LGTM currently hosts projects from Bitbucket (b), GitLab (gl) and plain git (git).
|
||||
*
|
||||
* After the {provider}/{org}/{name} path components, there may be the components
|
||||
* related to sub pages.
|
||||
* This function accepts any url that matches the pattern above. It also accepts the
|
||||
* raw project slug, e.g., `g/myorg/myproject`
|
||||
*
|
||||
* This function accepts any url that matches the patter above
|
||||
* After the `{provider}/{org}/{name}` path components, there may be the components
|
||||
* related to sub pages.
|
||||
*
|
||||
* @param lgtmUrl The URL to the lgtm project
|
||||
*
|
||||
@@ -370,6 +576,10 @@ export function looksLikeLgtmUrl(lgtmUrl: string | undefined): lgtmUrl is string
|
||||
return false;
|
||||
}
|
||||
|
||||
if (convertRawLgtmSlug(lgtmUrl)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
const uri = Uri.parse(lgtmUrl, true);
|
||||
if (uri.scheme !== 'https') {
|
||||
@@ -380,29 +590,60 @@ export function looksLikeLgtmUrl(lgtmUrl: string | undefined): lgtmUrl is string
|
||||
return false;
|
||||
}
|
||||
|
||||
const paths = uri.path.split('/').filter((segment) => segment);
|
||||
const paths = uri.path.split('/').filter((segment: string) => segment);
|
||||
return paths.length >= 4 && paths[0] === 'projects';
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function convertRawLgtmSlug(maybeSlug: string): string | undefined {
|
||||
if (!maybeSlug) {
|
||||
return;
|
||||
}
|
||||
const segments = maybeSlug.split('/');
|
||||
const providers = ['g', 'gl', 'b', 'git'];
|
||||
if (segments.length === 3 && providers.includes(segments[0])) {
|
||||
return `https://lgtm.com/projects/${maybeSlug}`;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
function extractProjectSlug(lgtmUrl: string): string | undefined {
|
||||
// Only matches the '/g/' provider (github)
|
||||
const re = new RegExp('https://lgtm.com/projects/g/(.*[^/])');
|
||||
const match = lgtmUrl.match(re);
|
||||
if (!match) {
|
||||
return;
|
||||
}
|
||||
return match[1];
|
||||
}
|
||||
|
||||
// exported for testing
|
||||
export async function convertToDatabaseUrl(lgtmUrl: string) {
|
||||
export async function convertLgtmUrlToDatabaseUrl(
|
||||
lgtmUrl: string,
|
||||
progress: ProgressCallback) {
|
||||
try {
|
||||
const uri = Uri.parse(lgtmUrl, true);
|
||||
const paths = ['api', 'v1.0'].concat(
|
||||
uri.path.split('/').filter((segment) => segment)
|
||||
).slice(0, 6);
|
||||
const projectUrl = `https://lgtm.com/${paths.join('/')}`;
|
||||
const projectResponse = await fetch(projectUrl);
|
||||
const projectJson = await projectResponse.json();
|
||||
lgtmUrl = convertRawLgtmSlug(lgtmUrl) || lgtmUrl;
|
||||
let projectJson = await downloadLgtmProjectMetadata(lgtmUrl);
|
||||
|
||||
if (projectJson.code === 404) {
|
||||
throw new Error();
|
||||
// fallback check for github repositories with same name but different case
|
||||
// will fail for other providers
|
||||
let canonicalName = await retrieveCanonicalRepoName(lgtmUrl);
|
||||
if (!canonicalName) {
|
||||
throw new Error(`Project was not found at ${lgtmUrl}.`);
|
||||
}
|
||||
canonicalName = convertRawLgtmSlug(`g/${canonicalName}`);
|
||||
projectJson = await downloadLgtmProjectMetadata(canonicalName);
|
||||
if (projectJson.code === 404) {
|
||||
throw new Error('Failed to download project from LGTM.');
|
||||
}
|
||||
}
|
||||
|
||||
const language = await promptForLanguage(projectJson);
|
||||
const languages = projectJson?.languages?.map((lang: { language: string }) => lang.language) || [];
|
||||
|
||||
const language = await promptForLanguage(languages, progress);
|
||||
if (!language) {
|
||||
return;
|
||||
}
|
||||
@@ -414,25 +655,43 @@ export async function convertToDatabaseUrl(lgtmUrl: string) {
|
||||
language,
|
||||
].join('/')}`;
|
||||
} catch (e) {
|
||||
logger.log(`Error: ${e.message}`);
|
||||
void logger.log(`Error: ${getErrorMessage(e)}`);
|
||||
throw new Error(`Invalid LGTM URL: ${lgtmUrl}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadLgtmProjectMetadata(lgtmUrl: string): Promise<any> {
|
||||
const uri = Uri.parse(lgtmUrl, true);
|
||||
const paths = ['api', 'v1.0'].concat(
|
||||
uri.path.split('/').filter((segment: string) => segment)
|
||||
).slice(0, 6);
|
||||
const projectUrl = `https://lgtm.com/${paths.join('/')}`;
|
||||
const projectResponse = await fetch(projectUrl);
|
||||
return projectResponse.json();
|
||||
}
|
||||
|
||||
async function promptForLanguage(
|
||||
projectJson: any
|
||||
languages: string[],
|
||||
progress: ProgressCallback
|
||||
): Promise<string | undefined> {
|
||||
if (!projectJson?.languages?.length) {
|
||||
return;
|
||||
progress({
|
||||
message: 'Choose language',
|
||||
step: 2,
|
||||
maxStep: 2
|
||||
});
|
||||
if (!languages.length) {
|
||||
throw new Error('No databases found');
|
||||
}
|
||||
if (projectJson.languages.length === 1) {
|
||||
return projectJson.languages[0].language;
|
||||
if (languages.length === 1) {
|
||||
return languages[0];
|
||||
}
|
||||
|
||||
return await window.showQuickPick(
|
||||
projectJson.languages.map((lang: { language: string }) => lang.language), {
|
||||
placeHolder: 'Select the database language to download:'
|
||||
}
|
||||
languages,
|
||||
{
|
||||
placeHolder: 'Select the database language to download:',
|
||||
ignoreFocusOut: true,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as path from 'path';
|
||||
import { DisposableObject } from './vscode-utils/disposable-object';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import {
|
||||
Event,
|
||||
EventEmitter,
|
||||
@@ -33,11 +33,13 @@ import * as qsClient from './queryserver-client';
|
||||
import { upgradeDatabaseExplicit } from './upgrades';
|
||||
import {
|
||||
importArchiveDatabase,
|
||||
promptImportGithubDatabase,
|
||||
promptImportInternetDatabase,
|
||||
promptImportLgtmDatabase,
|
||||
} from './databaseFetcher';
|
||||
import { CancellationToken } from 'vscode';
|
||||
import { asyncFilter } from './pure/helpers-pure';
|
||||
import { asyncFilter, getErrorMessage } from './pure/helpers-pure';
|
||||
import { Credentials } from './authentication';
|
||||
|
||||
type ThemableIconPath = { light: string; dark: string } | string;
|
||||
|
||||
@@ -108,7 +110,7 @@ class DatabaseTreeDataProvider extends DisposableObject
|
||||
}
|
||||
|
||||
private handleDidChangeDatabaseItem = (event: DatabaseChangedEvent): void => {
|
||||
// Note that events from the databse manager are instances of DatabaseChangedEvent
|
||||
// Note that events from the database manager are instances of DatabaseChangedEvent
|
||||
// and events fired by the UI are instances of DatabaseItem
|
||||
|
||||
// When event.item is undefined, then the entire tree is refreshed.
|
||||
@@ -135,6 +137,7 @@ class DatabaseTreeDataProvider extends DisposableObject
|
||||
this.extensionPath,
|
||||
SELECTED_DATABASE_ICON
|
||||
);
|
||||
item.contextValue = 'currentDatabase';
|
||||
} else if (element.error !== undefined) {
|
||||
item.iconPath = joinThemableIconPath(
|
||||
this.extensionPath,
|
||||
@@ -179,7 +182,7 @@ class DatabaseTreeDataProvider extends DisposableObject
|
||||
|
||||
public set sortOrder(newSortOrder: SortOrder) {
|
||||
this._sortOrder = newSortOrder;
|
||||
this._onDidChangeTreeData.fire();
|
||||
this._onDidChangeTreeData.fire(undefined);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -218,7 +221,8 @@ export class DatabaseUI extends DisposableObject {
|
||||
private databaseManager: DatabaseManager,
|
||||
private readonly queryServer: qsClient.QueryServerClient | undefined,
|
||||
private readonly storagePath: string,
|
||||
readonly extensionPath: string
|
||||
readonly extensionPath: string,
|
||||
private readonly getCredentials: () => Promise<Credentials>
|
||||
) {
|
||||
super();
|
||||
|
||||
@@ -234,7 +238,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
}
|
||||
|
||||
init() {
|
||||
logger.log('Registering database panel commands.');
|
||||
void logger.log('Registering database panel commands.');
|
||||
this.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQL.setCurrentDatabase',
|
||||
@@ -290,6 +294,20 @@ export class DatabaseUI extends DisposableObject {
|
||||
}
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQLDatabases.chooseDatabaseGithub',
|
||||
async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const credentials = await this.getCredentials();
|
||||
await this.handleChooseDatabaseGithub(credentials, progress, token);
|
||||
},
|
||||
{
|
||||
title: 'Adding database from GitHub',
|
||||
})
|
||||
);
|
||||
this.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQLDatabases.chooseDatabaseLgtm',
|
||||
@@ -348,6 +366,12 @@ export class DatabaseUI extends DisposableObject {
|
||||
this.handleOpenFolder
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLDatabases.addDatabaseSource',
|
||||
this.handleAddSource
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLDatabases.removeOrphanedDatabases',
|
||||
@@ -369,20 +393,20 @@ export class DatabaseUI extends DisposableObject {
|
||||
try {
|
||||
return await this.chooseAndSetDatabase(true, progress, token);
|
||||
} catch (e) {
|
||||
showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
handleRemoveOrphanedDatabases = async (): Promise<void> => {
|
||||
logger.log('Removing orphaned databases from workspace storage.');
|
||||
void logger.log('Removing orphaned databases from workspace storage.');
|
||||
let dbDirs = undefined;
|
||||
|
||||
if (
|
||||
!(await fs.pathExists(this.storagePath) ||
|
||||
!(await fs.stat(this.storagePath)).isDirectory())
|
||||
!(await fs.pathExists(this.storagePath)) ||
|
||||
!(await fs.stat(this.storagePath)).isDirectory()
|
||||
) {
|
||||
logger.log('Missing or invalid storage directory. Not trying to remove orphaned databases.');
|
||||
void logger.log('Missing or invalid storage directory. Not trying to remove orphaned databases.');
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -403,7 +427,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
dbDirs = await asyncFilter(dbDirs, isLikelyDatabaseRoot);
|
||||
|
||||
if (!dbDirs.length) {
|
||||
logger.log('No orphaned databases found.');
|
||||
void logger.log('No orphaned databases found.');
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -412,8 +436,8 @@ export class DatabaseUI extends DisposableObject {
|
||||
await Promise.all(
|
||||
dbDirs.map(async dbDir => {
|
||||
try {
|
||||
logger.log(`Deleting orphaned database '${dbDir}'.`);
|
||||
await fs.rmdir(dbDir, { recursive: true } as any); // typings doesn't recognize the options argument
|
||||
void logger.log(`Deleting orphaned database '${dbDir}'.`);
|
||||
await fs.remove(dbDir);
|
||||
} catch (e) {
|
||||
failures.push(`${path.basename(dbDir)}`);
|
||||
}
|
||||
@@ -422,10 +446,9 @@ export class DatabaseUI extends DisposableObject {
|
||||
|
||||
if (failures.length) {
|
||||
const dirname = path.dirname(failures[0]);
|
||||
showAndLogErrorMessage(
|
||||
`Failed to delete unused databases:\n ${
|
||||
failures.join('\n ')
|
||||
}\n. To delete unused databases, please remove them manually from the storage folder ${dirname}.`
|
||||
void showAndLogErrorMessage(
|
||||
`Failed to delete unused databases (${failures.join(', ')
|
||||
}).\nTo delete unused databases, please remove them manually from the storage folder ${dirname}.`
|
||||
);
|
||||
}
|
||||
};
|
||||
@@ -438,7 +461,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
try {
|
||||
return await this.chooseAndSetDatabase(false, progress, token);
|
||||
} catch (e) {
|
||||
showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
@@ -446,14 +469,28 @@ export class DatabaseUI extends DisposableObject {
|
||||
handleChooseDatabaseInternet = async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<
|
||||
DatabaseItem | undefined
|
||||
> => {
|
||||
): Promise<DatabaseItem | undefined> => {
|
||||
return await promptImportInternetDatabase(
|
||||
this.databaseManager,
|
||||
this.storagePath,
|
||||
progress,
|
||||
token
|
||||
token,
|
||||
this.queryServer?.cliServer
|
||||
);
|
||||
};
|
||||
|
||||
handleChooseDatabaseGithub = async (
|
||||
credentials: Credentials,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<DatabaseItem | undefined> => {
|
||||
return await promptImportGithubDatabase(
|
||||
this.databaseManager,
|
||||
this.storagePath,
|
||||
credentials,
|
||||
progress,
|
||||
token,
|
||||
this.queryServer?.cliServer
|
||||
);
|
||||
};
|
||||
|
||||
@@ -465,7 +502,8 @@ export class DatabaseUI extends DisposableObject {
|
||||
this.databaseManager,
|
||||
this.storagePath,
|
||||
progress,
|
||||
token
|
||||
token,
|
||||
this.queryServer?.cliServer
|
||||
);
|
||||
};
|
||||
|
||||
@@ -575,7 +613,8 @@ export class DatabaseUI extends DisposableObject {
|
||||
this.databaseManager,
|
||||
this.storagePath,
|
||||
progress,
|
||||
token
|
||||
token,
|
||||
this.queryServer?.cliServer
|
||||
);
|
||||
} else {
|
||||
await this.setCurrentDatabase(progress, token, uri);
|
||||
@@ -583,9 +622,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
} catch (e) {
|
||||
// rethrow and let this be handled by default error handling.
|
||||
throw new Error(
|
||||
`Could not set database to ${path.basename(uri.fsPath)}. Reason: ${
|
||||
e.message
|
||||
}`
|
||||
`Could not set database to ${path.basename(uri.fsPath)}. Reason: ${getErrorMessage(e)}`
|
||||
);
|
||||
}
|
||||
};
|
||||
@@ -617,7 +654,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
});
|
||||
|
||||
if (newName) {
|
||||
this.databaseManager.renameDatabaseItem(databaseItem, newName);
|
||||
await this.databaseManager.renameDatabaseItem(databaseItem, newName);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -634,6 +671,24 @@ export class DatabaseUI extends DisposableObject {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Adds the source folder of a CodeQL database to the workspace.
|
||||
* When a database is first added in the "Databases" view, its source folder is added to the workspace.
|
||||
* If the source folder is removed from the workspace for some reason, we want to be able to re-add it if need be.
|
||||
*/
|
||||
private handleAddSource = async (
|
||||
databaseItem: DatabaseItem,
|
||||
multiSelect: DatabaseItem[] | undefined
|
||||
): Promise<void> => {
|
||||
if (multiSelect?.length) {
|
||||
for (const dbItem of multiSelect) {
|
||||
await this.databaseManager.addDatabaseSourceArchiveFolder(dbItem);
|
||||
}
|
||||
} else {
|
||||
await this.databaseManager.addDatabaseSourceArchiveFolder(databaseItem);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the current database directory. If we don't already have a
|
||||
* current database, ask the user for one, and return that, or
|
||||
@@ -674,7 +729,6 @@ export class DatabaseUI extends DisposableObject {
|
||||
token: CancellationToken,
|
||||
): Promise<DatabaseItem | undefined> {
|
||||
const uri = await chooseDatabaseDir(byFolder);
|
||||
|
||||
if (!uri) {
|
||||
return undefined;
|
||||
}
|
||||
@@ -691,7 +745,8 @@ export class DatabaseUI extends DisposableObject {
|
||||
this.databaseManager,
|
||||
this.storagePath,
|
||||
progress,
|
||||
token
|
||||
token,
|
||||
this.queryServer?.cliServer
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -703,7 +758,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
* 2. If the selected URI is a directory matching db-*, choose the containing directory
|
||||
* 3. choose the current directory
|
||||
*
|
||||
* @param uri a URI that is a datbase folder or inside it
|
||||
* @param uri a URI that is a database folder or inside it
|
||||
*
|
||||
* @return the actual database folder found by using the heuristics above.
|
||||
*/
|
||||
|
||||
@@ -15,10 +15,11 @@ import {
|
||||
withProgress
|
||||
} from './commandRunner';
|
||||
import { zipArchiveScheme, encodeArchiveBasePath, decodeSourceArchiveUri, encodeSourceArchiveUri } from './archive-filesystem-provider';
|
||||
import { DisposableObject } from './vscode-utils/disposable-object';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { Logger, logger } from './logging';
|
||||
import { registerDatabases, Dataset, deregisterDatabases } from './pure/messages';
|
||||
import { QueryServerClient } from './queryserver-client';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* databases.ts
|
||||
@@ -115,30 +116,31 @@ async function findDataset(parentDirectory: string): Promise<vscode.Uri> {
|
||||
|
||||
const dbAbsolutePath = path.join(parentDirectory, dbRelativePaths[0]);
|
||||
if (dbRelativePaths.length > 1) {
|
||||
showAndLogWarningMessage(`Found multiple dataset directories in database, using '${dbAbsolutePath}'.`);
|
||||
void showAndLogWarningMessage(`Found multiple dataset directories in database, using '${dbAbsolutePath}'.`);
|
||||
}
|
||||
|
||||
return vscode.Uri.file(dbAbsolutePath);
|
||||
}
|
||||
|
||||
async function findSourceArchive(
|
||||
// exported for testing
|
||||
export async function findSourceArchive(
|
||||
databasePath: string, silent = false
|
||||
): Promise<vscode.Uri | undefined> {
|
||||
|
||||
const relativePaths = ['src', 'output/src_archive'];
|
||||
|
||||
for (const relativePath of relativePaths) {
|
||||
const basePath = path.join(databasePath, relativePath);
|
||||
const zipPath = basePath + '.zip';
|
||||
|
||||
if (await fs.pathExists(basePath)) {
|
||||
return vscode.Uri.file(basePath);
|
||||
} else if (await fs.pathExists(zipPath)) {
|
||||
// Prefer using a zip archive over a directory.
|
||||
if (await fs.pathExists(zipPath)) {
|
||||
return encodeArchiveBasePath(zipPath);
|
||||
} else if (await fs.pathExists(basePath)) {
|
||||
return vscode.Uri.file(basePath);
|
||||
}
|
||||
}
|
||||
if (!silent) {
|
||||
showAndLogInformationMessage(
|
||||
void showAndLogInformationMessage(
|
||||
`Could not find source archive for database '${databasePath}'. Assuming paths are absolute.`
|
||||
);
|
||||
}
|
||||
@@ -146,7 +148,7 @@ async function findSourceArchive(
|
||||
}
|
||||
|
||||
async function resolveDatabase(
|
||||
databasePath: string
|
||||
databasePath: string,
|
||||
): Promise<DatabaseContents> {
|
||||
|
||||
const name = path.basename(databasePath);
|
||||
@@ -161,7 +163,6 @@ async function resolveDatabase(
|
||||
datasetUri,
|
||||
sourceArchiveUri
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
/** Gets the relative paths of all `.dbscheme` files in the given directory. */
|
||||
@@ -169,7 +170,9 @@ async function getDbSchemeFiles(dbDirectory: string): Promise<string[]> {
|
||||
return await glob('*.dbscheme', { cwd: dbDirectory });
|
||||
}
|
||||
|
||||
async function resolveDatabaseContents(uri: vscode.Uri): Promise<DatabaseContents> {
|
||||
async function resolveDatabaseContents(
|
||||
uri: vscode.Uri,
|
||||
): Promise<DatabaseContents> {
|
||||
if (uri.scheme !== 'file') {
|
||||
throw new Error(`Database URI scheme '${uri.scheme}' not supported; only 'file' URIs are supported.`);
|
||||
}
|
||||
@@ -258,17 +261,27 @@ export interface DatabaseItem {
|
||||
* Returns the root uri of the virtual filesystem for this database's source archive,
|
||||
* as displayed in the filesystem explorer.
|
||||
*/
|
||||
getSourceArchiveExplorerUri(): vscode.Uri | undefined;
|
||||
getSourceArchiveExplorerUri(): vscode.Uri;
|
||||
|
||||
/**
|
||||
* Holds if `uri` belongs to this database's source archive.
|
||||
*/
|
||||
belongsToSourceArchiveExplorerUri(uri: vscode.Uri): boolean;
|
||||
|
||||
/**
|
||||
* Whether the database may be affected by test execution for the given path.
|
||||
*/
|
||||
isAffectedByTest(testPath: string): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Gets the state of this database, to be persisted in the workspace state.
|
||||
*/
|
||||
getPersistedState(): PersistedDatabaseItem;
|
||||
|
||||
/**
|
||||
* Verifies that this database item has a zipped source folder. Returns an error message if it does not.
|
||||
*/
|
||||
verifyZippedSources(): string | undefined;
|
||||
}
|
||||
|
||||
export enum DatabaseEventKind {
|
||||
@@ -349,7 +362,7 @@ export class DatabaseItemImpl implements DatabaseItem {
|
||||
}
|
||||
catch (e) {
|
||||
this._contents = undefined;
|
||||
this._error = e;
|
||||
this._error = e instanceof Error ? e : new Error(String(e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -454,13 +467,26 @@ export class DatabaseItemImpl implements DatabaseItem {
|
||||
/**
|
||||
* Returns the root uri of the virtual filesystem for this database's source archive.
|
||||
*/
|
||||
public getSourceArchiveExplorerUri(): vscode.Uri | undefined {
|
||||
public getSourceArchiveExplorerUri(): vscode.Uri {
|
||||
const sourceArchive = this.sourceArchive;
|
||||
if (sourceArchive === undefined || !sourceArchive.fsPath.endsWith('.zip'))
|
||||
return undefined;
|
||||
if (sourceArchive === undefined || !sourceArchive.fsPath.endsWith('.zip')) {
|
||||
throw new Error(this.verifyZippedSources());
|
||||
}
|
||||
return encodeArchiveBasePath(sourceArchive.fsPath);
|
||||
}
|
||||
|
||||
public verifyZippedSources(): string | undefined {
|
||||
const sourceArchive = this.sourceArchive;
|
||||
if (sourceArchive === undefined) {
|
||||
return `${this.name} has no source archive.`;
|
||||
}
|
||||
|
||||
if (!sourceArchive.fsPath.endsWith('.zip')) {
|
||||
return `${this.name} has a source folder that is unzipped.`;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `uri` belongs to this database's source archive.
|
||||
*/
|
||||
@@ -470,6 +496,27 @@ export class DatabaseItemImpl implements DatabaseItem {
|
||||
return uri.scheme === zipArchiveScheme &&
|
||||
decodeSourceArchiveUri(uri).sourceArchiveZipPath === this.sourceArchive.fsPath;
|
||||
}
|
||||
|
||||
public async isAffectedByTest(testPath: string): Promise<boolean> {
|
||||
const databasePath = this.databaseUri.fsPath;
|
||||
if (!databasePath.endsWith('.testproj')) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
const stats = await fs.stat(testPath);
|
||||
if (stats.isDirectory()) {
|
||||
return !path.relative(testPath, databasePath).startsWith('..');
|
||||
} else {
|
||||
// database for /one/two/three/test.ql is at /one/two/three/three.testproj
|
||||
const testdir = path.dirname(testPath);
|
||||
const testdirbase = path.basename(testdir);
|
||||
return databasePath == path.join(testdir, testdirbase + '.testproj');
|
||||
}
|
||||
} catch {
|
||||
// No information available for test path - assume database is unaffected.
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -480,7 +527,7 @@ export class DatabaseItemImpl implements DatabaseItem {
|
||||
function eventFired<T>(event: vscode.Event<T>, timeoutMs = 1000): Promise<T | undefined> {
|
||||
return new Promise((res, _rej) => {
|
||||
const timeout = setTimeout(() => {
|
||||
logger.log(`Waiting for event ${event} timed out after ${timeoutMs}ms`);
|
||||
void logger.log(`Waiting for event ${event} timed out after ${timeoutMs}ms`);
|
||||
res(undefined);
|
||||
dispose();
|
||||
}, timeoutMs);
|
||||
@@ -517,21 +564,22 @@ export class DatabaseManager extends DisposableObject {
|
||||
qs.onDidStartQueryServer(this.reregisterDatabases.bind(this));
|
||||
|
||||
// Let this run async.
|
||||
this.loadPersistedState();
|
||||
void this.loadPersistedState();
|
||||
}
|
||||
|
||||
public async openDatabase(
|
||||
progress: ProgressCallback,
|
||||
token: vscode.CancellationToken,
|
||||
uri: vscode.Uri,
|
||||
displayName?: string
|
||||
): Promise<DatabaseItem> {
|
||||
const contents = await resolveDatabaseContents(uri);
|
||||
// Ignore the source archive for QLTest databases by default.
|
||||
const isQLTestDatabase = path.extname(uri.fsPath) === '.testproj';
|
||||
const fullOptions: FullDatabaseOptions = {
|
||||
ignoreSourceArchive: isQLTestDatabase,
|
||||
// displayName is only set if a user explicitly renames a database
|
||||
displayName: undefined,
|
||||
// If a displayName is not passed in, the basename of folder containing the database is used.
|
||||
displayName,
|
||||
dateAdded: Date.now(),
|
||||
language: await this.getPrimaryLanguage(uri.fsPath)
|
||||
};
|
||||
@@ -561,7 +609,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
}));
|
||||
}
|
||||
|
||||
private async addDatabaseSourceArchiveFolder(item: DatabaseItem) {
|
||||
public async addDatabaseSourceArchiveFolder(item: DatabaseItem) {
|
||||
// The folder may already be in workspace state from a previous
|
||||
// session. If not, add it.
|
||||
const index = this.getDatabaseWorkspaceFolderIndex(item);
|
||||
@@ -577,26 +625,28 @@ export class DatabaseManager extends DisposableObject {
|
||||
// This is undesirable, as we might be adding and removing many
|
||||
// workspace folders as the user adds and removes databases.
|
||||
const end = (vscode.workspace.workspaceFolders || []).length;
|
||||
|
||||
const msg = item.verifyZippedSources();
|
||||
if (msg) {
|
||||
void logger.log(`Could not add source folder because ${msg}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const uri = item.getSourceArchiveExplorerUri();
|
||||
if (uri === undefined) {
|
||||
logger.log(`Couldn't obtain file explorer uri for ${item.name}`);
|
||||
}
|
||||
else {
|
||||
logger.log(`Adding workspace folder for ${item.name} source archive at index ${end}`);
|
||||
if ((vscode.workspace.workspaceFolders || []).length < 2) {
|
||||
// Adding this workspace folder makes the workspace
|
||||
// multi-root, which may surprise the user. Let them know
|
||||
// we're doing this.
|
||||
vscode.window.showInformationMessage(`Adding workspace folder for source archive of database ${item.name}.`);
|
||||
}
|
||||
vscode.workspace.updateWorkspaceFolders(end, 0, {
|
||||
name: `[${item.name} source archive]`,
|
||||
uri,
|
||||
});
|
||||
// vscode api documentation says we must to wait for this event
|
||||
// between multiple `updateWorkspaceFolders` calls.
|
||||
await eventFired(vscode.workspace.onDidChangeWorkspaceFolders);
|
||||
void logger.log(`Adding workspace folder for ${item.name} source archive at index ${end}`);
|
||||
if ((vscode.workspace.workspaceFolders || []).length < 2) {
|
||||
// Adding this workspace folder makes the workspace
|
||||
// multi-root, which may surprise the user. Let them know
|
||||
// we're doing this.
|
||||
void vscode.window.showInformationMessage(`Adding workspace folder for source archive of database ${item.name}.`);
|
||||
}
|
||||
vscode.workspace.updateWorkspaceFolders(end, 0, {
|
||||
name: `[${item.name} source archive]`,
|
||||
uri,
|
||||
});
|
||||
// vscode api documentation says we must to wait for this event
|
||||
// between multiple `updateWorkspaceFolders` calls.
|
||||
await eventFired(vscode.workspace.onDidChangeWorkspaceFolders);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -670,7 +720,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
await databaseItem.refresh();
|
||||
await this.registerDatabase(progress, token, databaseItem);
|
||||
if (currentDatabaseUri === database.uri) {
|
||||
this.setCurrentDatabaseItem(databaseItem, true);
|
||||
await this.setCurrentDatabaseItem(databaseItem, true);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
@@ -680,7 +730,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
}
|
||||
} catch (e) {
|
||||
// database list had an unexpected type - nothing to be done?
|
||||
showAndLogErrorMessage(`Database list loading failed: ${e.message}`);
|
||||
void showAndLogErrorMessage(`Database list loading failed: ${getErrorMessage(e)}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -705,6 +755,8 @@ export class DatabaseManager extends DisposableObject {
|
||||
this._currentDatabaseItem = item;
|
||||
this.updatePersistedCurrentDatabaseItem();
|
||||
|
||||
await vscode.commands.executeCommand('setContext', 'codeQL.currentDatabaseItem', item?.name);
|
||||
|
||||
this._onDidChangeCurrentDatabaseItem.fire({
|
||||
item,
|
||||
kind: DatabaseEventKind.Change
|
||||
@@ -737,7 +789,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
item: DatabaseItem
|
||||
) {
|
||||
this._databaseItems.push(item);
|
||||
this.updatePersistedDatabaseList();
|
||||
await this.updatePersistedDatabaseList();
|
||||
|
||||
// Add this database item to the allow-list
|
||||
// Database items reconstituted from persisted state
|
||||
@@ -754,7 +806,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
|
||||
public async renameDatabaseItem(item: DatabaseItem, newName: string) {
|
||||
item.name = newName;
|
||||
this.updatePersistedDatabaseList();
|
||||
await this.updatePersistedDatabaseList();
|
||||
this._onDidChangeDatabaseItem.fire({
|
||||
// pass undefined so that the entire tree is rebuilt in order to re-sort
|
||||
item: undefined,
|
||||
@@ -774,28 +826,28 @@ export class DatabaseManager extends DisposableObject {
|
||||
if (index >= 0) {
|
||||
this._databaseItems.splice(index, 1);
|
||||
}
|
||||
this.updatePersistedDatabaseList();
|
||||
await this.updatePersistedDatabaseList();
|
||||
|
||||
// Delete folder from workspace, if it is still there
|
||||
const folderIndex = (vscode.workspace.workspaceFolders || []).findIndex(
|
||||
folder => item.belongsToSourceArchiveExplorerUri(folder.uri)
|
||||
);
|
||||
if (folderIndex >= 0) {
|
||||
logger.log(`Removing workspace folder at index ${folderIndex}`);
|
||||
void logger.log(`Removing workspace folder at index ${folderIndex}`);
|
||||
vscode.workspace.updateWorkspaceFolders(folderIndex, 1);
|
||||
}
|
||||
|
||||
// Delete folder from file system only if it is controlled by the extension
|
||||
if (this.isExtensionControlledLocation(item.databaseUri)) {
|
||||
logger.log('Deleting database from filesystem.');
|
||||
fs.remove(item.databaseUri.fsPath).then(
|
||||
() => logger.log(`Deleted '${item.databaseUri.fsPath}'`),
|
||||
e => logger.log(`Failed to delete '${item.databaseUri.fsPath}'. Reason: ${e.message}`));
|
||||
}
|
||||
|
||||
// Remove this database item from the allow-list
|
||||
await this.deregisterDatabase(progress, token, item);
|
||||
|
||||
// Delete folder from file system only if it is controlled by the extension
|
||||
if (this.isExtensionControlledLocation(item.databaseUri)) {
|
||||
void logger.log('Deleting database from filesystem.');
|
||||
fs.remove(item.databaseUri.fsPath).then(
|
||||
() => void logger.log(`Deleted '${item.databaseUri.fsPath}'`),
|
||||
e => void logger.log(`Failed to delete '${item.databaseUri.fsPath}'. Reason: ${getErrorMessage(e)}`));
|
||||
}
|
||||
|
||||
// note that we use undefined as the item in order to reset the entire tree
|
||||
this._onDidChangeDatabaseItem.fire({
|
||||
item: undefined,
|
||||
@@ -808,7 +860,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
token: vscode.CancellationToken,
|
||||
dbItem: DatabaseItem,
|
||||
) {
|
||||
if (dbItem.contents && (await this.qs.supportsDatabaseRegistration())) {
|
||||
if (dbItem.contents && (await this.cli.cliConstraints.supportsDatabaseRegistration())) {
|
||||
const databases: Dataset[] = [{
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
@@ -822,7 +874,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
token: vscode.CancellationToken,
|
||||
dbItem: DatabaseItem,
|
||||
) {
|
||||
if (dbItem.contents && (await this.qs.supportsDatabaseRegistration())) {
|
||||
if (dbItem.contents && (await this.cli.cliConstraints.supportsDatabaseRegistration())) {
|
||||
const databases: Dataset[] = [{
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
@@ -832,12 +884,12 @@ export class DatabaseManager extends DisposableObject {
|
||||
}
|
||||
|
||||
private updatePersistedCurrentDatabaseItem(): void {
|
||||
this.ctx.workspaceState.update(CURRENT_DB, this._currentDatabaseItem ?
|
||||
void this.ctx.workspaceState.update(CURRENT_DB, this._currentDatabaseItem ?
|
||||
this._currentDatabaseItem.databaseUri.toString(true) : undefined);
|
||||
}
|
||||
|
||||
private updatePersistedDatabaseList(): void {
|
||||
this.ctx.workspaceState.update(DB_LIST, this._databaseItems.map(item => item.getPersistedState()));
|
||||
private async updatePersistedDatabaseList(): Promise<void> {
|
||||
await this.ctx.workspaceState.update(DB_LIST, this._databaseItems.map(item => item.getPersistedState()));
|
||||
}
|
||||
|
||||
private isExtensionControlledLocation(uri: vscode.Uri) {
|
||||
@@ -852,7 +904,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
}
|
||||
|
||||
private async getPrimaryLanguage(dbPath: string) {
|
||||
if (!(await this.cli.supportsLanguageName())) {
|
||||
if (!(await this.cli.cliConstraints.supportsLanguageName())) {
|
||||
// return undefined so that we recalculate on restart until the cli is at a version that
|
||||
// supports this feature. This recalculation is cheap since we avoid calling into the cli
|
||||
// unless we know it can return the langauges property.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { DisposableObject } from './vscode-utils/disposable-object';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { logger } from './logging';
|
||||
|
||||
/**
|
||||
@@ -59,23 +59,23 @@ export abstract class Discovery<T> extends DisposableObject {
|
||||
this.discoveryInProgress = false;
|
||||
this.update(results);
|
||||
}
|
||||
});
|
||||
})
|
||||
|
||||
discoveryPromise.catch(err => {
|
||||
logger.log(`${this.name} failed. Reason: ${err.message}`);
|
||||
});
|
||||
.catch(err => {
|
||||
void logger.log(`${this.name} failed. Reason: ${err.message}`);
|
||||
})
|
||||
|
||||
discoveryPromise.finally(() => {
|
||||
if (this.retry) {
|
||||
// Another refresh request came in while we were still running a previous discovery
|
||||
// operation. Since the discovery results we just computed are now stale, we'll launch
|
||||
// another discovery operation instead of updating.
|
||||
// Note that by doing this inside of `finally`, we will relaunch discovery even if the
|
||||
// initial discovery operation failed.
|
||||
this.retry = false;
|
||||
this.launchDiscovery();
|
||||
}
|
||||
});
|
||||
.finally(() => {
|
||||
if (this.retry) {
|
||||
// Another refresh request came in while we were still running a previous discovery
|
||||
// operation. Since the discovery results we just computed are now stale, we'll launch
|
||||
// another discovery operation instead of updating.
|
||||
// Note that by doing this inside of `finally`, we will relaunch discovery even if the
|
||||
// initial discovery operation failed.
|
||||
this.retry = false;
|
||||
this.launchDiscovery();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -153,7 +153,7 @@ export class DistributionManager implements DistributionProvider {
|
||||
// Check config setting, then extension specific distribution, then PATH.
|
||||
if (this.config.customCodeQlPath) {
|
||||
if (!await fs.pathExists(this.config.customCodeQlPath)) {
|
||||
showAndLogErrorMessage(`The CodeQL executable path is specified as "${this.config.customCodeQlPath}" ` +
|
||||
void showAndLogErrorMessage(`The CodeQL executable path is specified as "${this.config.customCodeQlPath}" ` +
|
||||
'by a configuration setting, but a CodeQL executable could not be found at that path. Please check ' +
|
||||
'that a CodeQL executable exists at the specified path or remove the setting.');
|
||||
return undefined;
|
||||
@@ -191,7 +191,7 @@ export class DistributionManager implements DistributionProvider {
|
||||
};
|
||||
}
|
||||
}
|
||||
logger.log('INFO: Could not find CodeQL on path.');
|
||||
void logger.log('INFO: Could not find CodeQL on path.');
|
||||
}
|
||||
|
||||
return undefined;
|
||||
@@ -276,7 +276,7 @@ class ExtensionSpecificDistributionManager {
|
||||
try {
|
||||
await this.removeDistribution();
|
||||
} catch (e) {
|
||||
logger.log('WARNING: Tried to remove corrupted CodeQL CLI at ' +
|
||||
void logger.log('WARNING: Tried to remove corrupted CodeQL CLI at ' +
|
||||
`${this.getDistributionStoragePath()} but encountered an error: ${e}.`);
|
||||
}
|
||||
}
|
||||
@@ -313,7 +313,7 @@ class ExtensionSpecificDistributionManager {
|
||||
progressCallback?: ProgressCallback): Promise<void> {
|
||||
await this.downloadDistribution(release, progressCallback);
|
||||
// Store the installed release within the global extension state.
|
||||
this.storeInstalledRelease(release);
|
||||
await this.storeInstalledRelease(release);
|
||||
}
|
||||
|
||||
private async downloadDistribution(release: Release,
|
||||
@@ -321,7 +321,7 @@ class ExtensionSpecificDistributionManager {
|
||||
try {
|
||||
await this.removeDistribution();
|
||||
} catch (e) {
|
||||
logger.log(`Tried to clean up old version of CLI at ${this.getDistributionStoragePath()} ` +
|
||||
void logger.log(`Tried to clean up old version of CLI at ${this.getDistributionStoragePath()} ` +
|
||||
`but encountered an error: ${e}.`);
|
||||
}
|
||||
|
||||
@@ -332,7 +332,7 @@ class ExtensionSpecificDistributionManager {
|
||||
throw new Error(`Invariant violation: chose a release to install that didn't have ${requiredAssetName}`);
|
||||
}
|
||||
if (assets.length > 1) {
|
||||
logger.log('WARNING: chose a release with more than one asset to install, found ' +
|
||||
void logger.log('WARNING: chose a release with more than one asset to install, found ' +
|
||||
assets.map(asset => asset.name).join(', '));
|
||||
}
|
||||
|
||||
@@ -345,7 +345,7 @@ class ExtensionSpecificDistributionManager {
|
||||
|
||||
const contentLength = assetStream.headers.get('content-length');
|
||||
const totalNumBytes = contentLength ? parseInt(contentLength, 10) : undefined;
|
||||
reportStreamProgress(assetStream.body, 'Downloading CodeQL CLI…', totalNumBytes, progressCallback);
|
||||
reportStreamProgress(assetStream.body, `Downloading CodeQL CLI ${release.name}…`, totalNumBytes, progressCallback);
|
||||
|
||||
await new Promise((resolve, reject) =>
|
||||
assetStream.body.pipe(archiveFile)
|
||||
@@ -355,7 +355,7 @@ class ExtensionSpecificDistributionManager {
|
||||
|
||||
await this.bumpDistributionFolderIndex();
|
||||
|
||||
logger.log(`Extracting CodeQL CLI to ${this.getDistributionStoragePath()}`);
|
||||
void logger.log(`Extracting CodeQL CLI to ${this.getDistributionStoragePath()}`);
|
||||
await extractZipArchive(archivePath, this.getDistributionStoragePath());
|
||||
} finally {
|
||||
await fs.remove(tmpDirectory);
|
||||
@@ -368,7 +368,7 @@ class ExtensionSpecificDistributionManager {
|
||||
* This should not be called for a distribution that is currently in use, as remove may fail.
|
||||
*/
|
||||
private async removeDistribution(): Promise<void> {
|
||||
this.storeInstalledRelease(undefined);
|
||||
await this.storeInstalledRelease(undefined);
|
||||
if (await fs.pathExists(this.getDistributionStoragePath())) {
|
||||
await fs.remove(this.getDistributionStoragePath());
|
||||
}
|
||||
@@ -376,7 +376,7 @@ class ExtensionSpecificDistributionManager {
|
||||
|
||||
private async getLatestRelease(): Promise<Release> {
|
||||
const requiredAssetName = DistributionManager.getRequiredAssetName();
|
||||
logger.log(`Searching for latest release including ${requiredAssetName}.`);
|
||||
void logger.log(`Searching for latest release including ${requiredAssetName}.`);
|
||||
return this.createReleasesApiConsumer().getLatestRelease(
|
||||
this.versionRange,
|
||||
this.config.includePrerelease,
|
||||
@@ -384,11 +384,11 @@ class ExtensionSpecificDistributionManager {
|
||||
const matchingAssets = release.assets.filter(asset => asset.name === requiredAssetName);
|
||||
if (matchingAssets.length === 0) {
|
||||
// For example, this could be a release with no platform-specific assets.
|
||||
logger.log(`INFO: Ignoring a release with no assets named ${requiredAssetName}`);
|
||||
void logger.log(`INFO: Ignoring a release with no assets named ${requiredAssetName}`);
|
||||
return false;
|
||||
}
|
||||
if (matchingAssets.length > 1) {
|
||||
logger.log(`WARNING: Ignoring a release with more than one asset named ${requiredAssetName}`);
|
||||
void logger.log(`WARNING: Ignoring a release with more than one asset named ${requiredAssetName}`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
@@ -707,16 +707,14 @@ export async function getExecutableFromDirectory(directory: string, warnWhenNotF
|
||||
return alternateExpectedLauncherPath;
|
||||
}
|
||||
if (warnWhenNotFound) {
|
||||
logger.log(`WARNING: Expected to find a CodeQL CLI executable at ${expectedLauncherPath} but one was not found. ` +
|
||||
void logger.log(`WARNING: Expected to find a CodeQL CLI executable at ${expectedLauncherPath} but one was not found. ` +
|
||||
'Will try PATH.');
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function warnDeprecatedLauncher() {
|
||||
|
||||
showAndLogWarningMessage(
|
||||
|
||||
void showAndLogWarningMessage(
|
||||
`The "${deprecatedCodeQlLauncherName()!}" launcher has been deprecated and will be removed in a future version. ` +
|
||||
`Please use "${codeQlLauncherName()}" instead. It is recommended to update to the latest CodeQL binaries.`
|
||||
);
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import 'source-map-support/register';
|
||||
import {
|
||||
CancellationToken,
|
||||
CancellationTokenSource,
|
||||
commands,
|
||||
Disposable,
|
||||
ExtensionContext,
|
||||
@@ -10,29 +12,39 @@ import {
|
||||
Uri,
|
||||
window as Window,
|
||||
env,
|
||||
window
|
||||
window,
|
||||
QuickPickItem,
|
||||
Range,
|
||||
workspace,
|
||||
ProviderResult
|
||||
} from 'vscode';
|
||||
import { LanguageClient } from 'vscode-languageclient';
|
||||
import * as os from 'os';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import { testExplorerExtensionId, TestHub } from 'vscode-test-adapter-api';
|
||||
|
||||
import { AstViewer } from './astViewer';
|
||||
import * as archiveFilesystemProvider from './archive-filesystem-provider';
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import QuickEvalCodeLensProvider from './quickEvalCodeLensProvider';
|
||||
import { CodeQLCliServer, CliVersionConstraint } from './cli';
|
||||
import {
|
||||
CliConfigListener,
|
||||
DistributionConfigListener,
|
||||
isCanary,
|
||||
MAX_QUERIES,
|
||||
QueryHistoryConfigListener,
|
||||
QueryServerConfigListener
|
||||
} from './config';
|
||||
import * as languageSupport from './languageSupport';
|
||||
import { DatabaseManager } from './databases';
|
||||
import { DatabaseItem, DatabaseManager } from './databases';
|
||||
import { DatabaseUI } from './databases-ui';
|
||||
import {
|
||||
TemplateQueryDefinitionProvider,
|
||||
TemplateQueryReferenceProvider,
|
||||
TemplatePrintAstProvider
|
||||
TemplatePrintAstProvider,
|
||||
TemplatePrintCfgProvider
|
||||
} from './contextual/templateProvider';
|
||||
import {
|
||||
DEFAULT_DISTRIBUTION_VERSION_RANGE,
|
||||
@@ -44,17 +56,26 @@ import {
|
||||
GithubApiError,
|
||||
GithubRateLimitedError
|
||||
} from './distribution';
|
||||
import * as helpers from './helpers';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import {
|
||||
findLanguage,
|
||||
tmpDirDisposal,
|
||||
showBinaryChoiceDialog,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogWarningMessage,
|
||||
showAndLogInformationMessage,
|
||||
showInformationMessageWithAction,
|
||||
tmpDir
|
||||
} from './helpers';
|
||||
import { asError, assertNever, getErrorMessage } from './pure/helpers-pure';
|
||||
import { spawnIdeServer } from './ide-server';
|
||||
import { InterfaceManager } from './interface';
|
||||
import { WebviewReveal } from './interface-utils';
|
||||
import { ideServerLogger, logger, queryServerLogger } from './logging';
|
||||
import { QueryHistoryManager } from './query-history';
|
||||
import { CompletedQuery } from './query-results';
|
||||
import { CompletedLocalQueryInfo, LocalQueryInfo } from './query-results';
|
||||
import * as qsClient from './queryserver-client';
|
||||
import { displayQuickQuery } from './quick-query';
|
||||
import { compileAndRunQueryAgainstDatabase, tmpDirDisposal } from './run-queries';
|
||||
import { compileAndRunQueryAgainstDatabase, createInitialQueryInfo } from './run-queries';
|
||||
import { QLTestAdapterFactory } from './test-adapter';
|
||||
import { TestUIService } from './test-ui';
|
||||
import { CompareInterfaceManager } from './compare/compare-interface';
|
||||
@@ -69,6 +90,15 @@ import {
|
||||
} from './commandRunner';
|
||||
import { CodeQlStatusBarHandler } from './status-bar';
|
||||
|
||||
import { Credentials } from './authentication';
|
||||
import { RemoteQueriesManager } from './remote-queries/remote-queries-manager';
|
||||
import { RemoteQueryResult } from './remote-queries/remote-query-result';
|
||||
import { URLSearchParams } from 'url';
|
||||
import { handleDownloadPacks, handleInstallPackDependencies } from './packaging';
|
||||
import { HistoryItemLabelProvider } from './history-item-label-provider';
|
||||
import { exportRemoteQueryResults } from './remote-queries/export-results';
|
||||
import { RemoteQuery } from './remote-queries/remote-query';
|
||||
|
||||
/**
|
||||
* extension.ts
|
||||
* ------------
|
||||
@@ -139,7 +169,7 @@ export interface CodeQLExtensionInterface {
|
||||
|
||||
/**
|
||||
* Returns the CodeQLExtensionInterface, or an empty object if the interface is not
|
||||
* available afer activation is complete. This will happen if there is no cli
|
||||
* available after activation is complete. This will happen if there is no cli
|
||||
* installed when the extension starts. Downloading and installing the cli
|
||||
* will happen at a later time.
|
||||
*
|
||||
@@ -147,17 +177,21 @@ export interface CodeQLExtensionInterface {
|
||||
*
|
||||
* @returns CodeQLExtensionInterface
|
||||
*/
|
||||
export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionInterface | {}> {
|
||||
logger.log(`Starting ${extensionId} extension`);
|
||||
export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionInterface | Record<string, never>> {
|
||||
|
||||
void logger.log(`Starting ${extensionId} extension`);
|
||||
if (extension === undefined) {
|
||||
throw new Error(`Can't find extension ${extensionId}`);
|
||||
}
|
||||
|
||||
const distributionConfigListener = new DistributionConfigListener();
|
||||
initializeLogging(ctx);
|
||||
await initializeLogging(ctx);
|
||||
await initializeTelemetry(extension, ctx);
|
||||
languageSupport.install();
|
||||
|
||||
const codelensProvider = new QuickEvalCodeLensProvider();
|
||||
languages.registerCodeLensProvider({ scheme: 'file', language: 'ql' }, codelensProvider);
|
||||
|
||||
ctx.subscriptions.push(distributionConfigListener);
|
||||
const codeQlVersionRange = DEFAULT_DISTRIBUTION_VERSION_RANGE;
|
||||
const distributionManager = new DistributionManager(distributionConfigListener, codeQlVersionRange, ctx);
|
||||
@@ -165,7 +199,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
const shouldUpdateOnNextActivationKey = 'shouldUpdateOnNextActivation';
|
||||
|
||||
registerErrorStubs([checkForUpdatesCommand], command => (async () => {
|
||||
helpers.showAndLogErrorMessage(`Can't execute ${command}: waiting to finish loading CodeQL CLI.`);
|
||||
void showAndLogErrorMessage(`Can't execute ${command}: waiting to finish loading CodeQL CLI.`);
|
||||
}));
|
||||
|
||||
interface DistributionUpdateConfig {
|
||||
@@ -177,7 +211,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
async function installOrUpdateDistributionWithProgressTitle(progressTitle: string, config: DistributionUpdateConfig): Promise<void> {
|
||||
const minSecondsSinceLastUpdateCheck = config.isUserInitiated ? 0 : 86400;
|
||||
const noUpdatesLoggingFunc = config.shouldDisplayMessageWhenNoUpdates ?
|
||||
helpers.showAndLogInformationMessage : async (message: string) => logger.log(message);
|
||||
showAndLogInformationMessage : async (message: string) => void logger.log(message);
|
||||
const result = await distributionManager.checkForUpdatesToExtensionManagedDistribution(minSecondsSinceLastUpdateCheck);
|
||||
|
||||
// We do want to auto update if there is no distribution at all
|
||||
@@ -185,7 +219,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
|
||||
switch (result.kind) {
|
||||
case DistributionUpdateCheckResultKind.AlreadyCheckedRecentlyResult:
|
||||
logger.log('Didn\'t perform CodeQL CLI update check since a check was already performed within the previous ' +
|
||||
void logger.log('Didn\'t perform CodeQL CLI update check since a check was already performed within the previous ' +
|
||||
`${minSecondsSinceLastUpdateCheck} seconds.`);
|
||||
break;
|
||||
case DistributionUpdateCheckResultKind.AlreadyUpToDate:
|
||||
@@ -199,7 +233,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
const updateAvailableMessage = `Version "${result.updatedRelease.name}" of the CodeQL CLI is now available. ` +
|
||||
'Do you wish to upgrade?';
|
||||
await ctx.globalState.update(shouldUpdateOnNextActivationKey, true);
|
||||
if (await helpers.showInformationMessageWithAction(updateAvailableMessage, 'Restart and Upgrade')) {
|
||||
if (await showInformationMessageWithAction(updateAvailableMessage, 'Restart and Upgrade')) {
|
||||
await commands.executeCommand('workbench.action.reloadWindow');
|
||||
}
|
||||
} else {
|
||||
@@ -212,7 +246,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
distributionManager.installExtensionManagedDistributionRelease(result.updatedRelease, progress));
|
||||
|
||||
await ctx.globalState.update(shouldUpdateOnNextActivationKey, false);
|
||||
helpers.showAndLogInformationMessage(`CodeQL CLI updated to version "${result.updatedRelease.name}".`);
|
||||
void showAndLogInformationMessage(`CodeQL CLI updated to version "${result.updatedRelease.name}".`);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
@@ -239,17 +273,17 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
// Don't rethrow the exception, because if the config is changed, we want to be able to retry installing
|
||||
// or updating the distribution.
|
||||
const alertFunction = (codeQlInstalled && !config.isUserInitiated) ?
|
||||
helpers.showAndLogWarningMessage : helpers.showAndLogErrorMessage;
|
||||
showAndLogWarningMessage : showAndLogErrorMessage;
|
||||
const taskDescription = (willUpdateCodeQl ? 'update' :
|
||||
codeQlInstalled ? 'check for updates to' : 'install') + ' CodeQL CLI';
|
||||
|
||||
if (e instanceof GithubRateLimitedError) {
|
||||
alertFunction(`Rate limited while trying to ${taskDescription}. Please try again after ` +
|
||||
void alertFunction(`Rate limited while trying to ${taskDescription}. Please try again after ` +
|
||||
`your rate limit window resets at ${e.rateLimitResetDate.toLocaleString(env.language)}.`);
|
||||
} else if (e instanceof GithubApiError) {
|
||||
alertFunction(`Encountered GitHub API error while trying to ${taskDescription}. ` + e);
|
||||
void alertFunction(`Encountered GitHub API error while trying to ${taskDescription}. ` + e);
|
||||
}
|
||||
alertFunction(`Unable to ${taskDescription}. ` + e);
|
||||
void alertFunction(`Unable to ${taskDescription}. ` + e);
|
||||
} finally {
|
||||
isInstallingOrUpdatingDistribution = false;
|
||||
}
|
||||
@@ -259,7 +293,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
const result = await distributionManager.getDistribution();
|
||||
switch (result.kind) {
|
||||
case FindDistributionResultKind.CompatibleDistribution:
|
||||
logger.log(`Found compatible version of CodeQL CLI (version ${result.version.raw})`);
|
||||
void logger.log(`Found compatible version of CodeQL CLI (version ${result.version.raw})`);
|
||||
break;
|
||||
case FindDistributionResultKind.IncompatibleDistribution: {
|
||||
const fixGuidanceMessage = (() => {
|
||||
@@ -274,16 +308,20 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
}
|
||||
})();
|
||||
|
||||
helpers.showAndLogWarningMessage(`The current version of the CodeQL CLI (${result.version.raw}) ` +
|
||||
'is incompatible with this extension. ' + fixGuidanceMessage);
|
||||
void showAndLogWarningMessage(
|
||||
`The current version of the CodeQL CLI (${result.version.raw}) ` +
|
||||
`is incompatible with this extension. ${fixGuidanceMessage}`
|
||||
);
|
||||
break;
|
||||
}
|
||||
case FindDistributionResultKind.UnknownCompatibilityDistribution:
|
||||
helpers.showAndLogWarningMessage('Compatibility with the configured CodeQL CLI could not be determined. ' +
|
||||
'You may experience problems using the extension.');
|
||||
void showAndLogWarningMessage(
|
||||
'Compatibility with the configured CodeQL CLI could not be determined. ' +
|
||||
'You may experience problems using the extension.'
|
||||
);
|
||||
break;
|
||||
case FindDistributionResultKind.NoDistribution:
|
||||
helpers.showAndLogErrorMessage('The CodeQL CLI could not be found.');
|
||||
void showAndLogErrorMessage('The CodeQL CLI could not be found.');
|
||||
break;
|
||||
default:
|
||||
assertNever(result);
|
||||
@@ -293,13 +331,13 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
|
||||
async function installOrUpdateThenTryActivate(
|
||||
config: DistributionUpdateConfig
|
||||
): Promise<CodeQLExtensionInterface | {}> {
|
||||
): Promise<CodeQLExtensionInterface | Record<string, never>> {
|
||||
|
||||
await installOrUpdateDistribution(config);
|
||||
|
||||
// Display the warnings even if the extension has already activated.
|
||||
const distributionResult = await getDistributionDisplayingDistributionWarnings();
|
||||
let extensionInterface: CodeQLExtensionInterface | {} = {};
|
||||
let extensionInterface: CodeQLExtensionInterface | Record<string, never> = {};
|
||||
if (!beganMainExtensionActivation && distributionResult.kind !== FindDistributionResultKind.NoDistribution) {
|
||||
extensionInterface = await activateWithInstalledDistribution(
|
||||
ctx,
|
||||
@@ -310,7 +348,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
} else if (distributionResult.kind === FindDistributionResultKind.NoDistribution) {
|
||||
registerErrorStubs([checkForUpdatesCommand], command => async () => {
|
||||
const installActionName = 'Install CodeQL CLI';
|
||||
const chosenAction = await helpers.showAndLogErrorMessage(`Can't execute ${command}: missing CodeQL CLI.`, {
|
||||
const chosenAction = await void showAndLogErrorMessage(`Can't execute ${command}: missing CodeQL CLI.`, {
|
||||
items: [installActionName]
|
||||
});
|
||||
if (chosenAction === installActionName) {
|
||||
@@ -356,13 +394,13 @@ async function activateWithInstalledDistribution(
|
||||
// of activation.
|
||||
errorStubs.forEach((stub) => stub.dispose());
|
||||
|
||||
logger.log('Initializing configuration listener...');
|
||||
void logger.log('Initializing configuration listener...');
|
||||
const qlConfigurationListener = await QueryServerConfigListener.createQueryServerConfigListener(
|
||||
distributionManager
|
||||
);
|
||||
ctx.subscriptions.push(qlConfigurationListener);
|
||||
|
||||
logger.log('Initializing CodeQL cli server...');
|
||||
void logger.log('Initializing CodeQL cli server...');
|
||||
const cliServer = new CodeQLCliServer(
|
||||
distributionManager,
|
||||
new CliConfigListener(),
|
||||
@@ -373,12 +411,13 @@ async function activateWithInstalledDistribution(
|
||||
const statusBar = new CodeQlStatusBarHandler(cliServer, distributionConfigListener);
|
||||
ctx.subscriptions.push(statusBar);
|
||||
|
||||
logger.log('Initializing query server client.');
|
||||
void logger.log('Initializing query server client.');
|
||||
const qs = new qsClient.QueryServerClient(
|
||||
qlConfigurationListener,
|
||||
cliServer,
|
||||
{
|
||||
logger: queryServerLogger,
|
||||
contextStoragePath: getContextStoragePath(ctx),
|
||||
},
|
||||
(task) =>
|
||||
Window.withProgress(
|
||||
@@ -389,64 +428,84 @@ async function activateWithInstalledDistribution(
|
||||
ctx.subscriptions.push(qs);
|
||||
await qs.startQueryServer();
|
||||
|
||||
logger.log('Initializing database manager.');
|
||||
void logger.log('Initializing database manager.');
|
||||
const dbm = new DatabaseManager(ctx, qs, cliServer, logger);
|
||||
ctx.subscriptions.push(dbm);
|
||||
logger.log('Initializing database panel.');
|
||||
void logger.log('Initializing database panel.');
|
||||
const databaseUI = new DatabaseUI(
|
||||
dbm,
|
||||
qs,
|
||||
getContextStoragePath(ctx),
|
||||
ctx.extensionPath
|
||||
ctx.extensionPath,
|
||||
() => Credentials.initialize(ctx),
|
||||
);
|
||||
databaseUI.init();
|
||||
ctx.subscriptions.push(databaseUI);
|
||||
|
||||
logger.log('Initializing query history manager.');
|
||||
void logger.log('Initializing query history manager.');
|
||||
const queryHistoryConfigurationListener = new QueryHistoryConfigListener();
|
||||
ctx.subscriptions.push(queryHistoryConfigurationListener);
|
||||
const showResults = async (item: CompletedQuery) =>
|
||||
const showResults = async (item: CompletedLocalQueryInfo) =>
|
||||
showResultsForCompletedQuery(item, WebviewReveal.Forced);
|
||||
const queryStorageDir = path.join(ctx.globalStorageUri.fsPath, 'queries');
|
||||
await fs.ensureDir(queryStorageDir);
|
||||
const labelProvider = new HistoryItemLabelProvider(queryHistoryConfigurationListener);
|
||||
|
||||
const qhm = new QueryHistoryManager(
|
||||
qs,
|
||||
ctx.extensionPath,
|
||||
queryHistoryConfigurationListener,
|
||||
showResults,
|
||||
async (from: CompletedQuery, to: CompletedQuery) =>
|
||||
showResultsForComparison(from, to),
|
||||
);
|
||||
ctx.subscriptions.push(qhm);
|
||||
logger.log('Initializing results panel interface.');
|
||||
const intm = new InterfaceManager(ctx, dbm, cliServer, queryServerLogger);
|
||||
void logger.log('Initializing results panel interface.');
|
||||
const intm = new InterfaceManager(ctx, dbm, cliServer, queryServerLogger, labelProvider);
|
||||
ctx.subscriptions.push(intm);
|
||||
|
||||
logger.log('Initializing compare panel interface.');
|
||||
void logger.log('Initializing variant analysis manager.');
|
||||
const rqm = new RemoteQueriesManager(ctx, cliServer, queryStorageDir, logger);
|
||||
ctx.subscriptions.push(rqm);
|
||||
|
||||
void logger.log('Initializing query history.');
|
||||
const qhm = new QueryHistoryManager(
|
||||
qs,
|
||||
dbm,
|
||||
intm,
|
||||
rqm,
|
||||
queryStorageDir,
|
||||
ctx,
|
||||
queryHistoryConfigurationListener,
|
||||
labelProvider,
|
||||
async (from: CompletedLocalQueryInfo, to: CompletedLocalQueryInfo) =>
|
||||
showResultsForComparison(from, to),
|
||||
);
|
||||
|
||||
|
||||
ctx.subscriptions.push(qhm);
|
||||
|
||||
void logger.log('Reading query history');
|
||||
await qhm.readQueryHistory();
|
||||
|
||||
void logger.log('Initializing compare panel interface.');
|
||||
const cmpm = new CompareInterfaceManager(
|
||||
ctx,
|
||||
dbm,
|
||||
cliServer,
|
||||
queryServerLogger,
|
||||
labelProvider,
|
||||
showResults
|
||||
);
|
||||
ctx.subscriptions.push(cmpm);
|
||||
|
||||
logger.log('Initializing source archive filesystem provider.');
|
||||
void logger.log('Initializing source archive filesystem provider.');
|
||||
archiveFilesystemProvider.activate(ctx);
|
||||
|
||||
async function showResultsForComparison(
|
||||
from: CompletedQuery,
|
||||
to: CompletedQuery
|
||||
from: CompletedLocalQueryInfo,
|
||||
to: CompletedLocalQueryInfo
|
||||
): Promise<void> {
|
||||
try {
|
||||
await cmpm.showResults(from, to);
|
||||
} catch (e) {
|
||||
helpers.showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
}
|
||||
}
|
||||
|
||||
async function showResultsForCompletedQuery(
|
||||
query: CompletedQuery,
|
||||
query: CompletedLocalQueryInfo,
|
||||
forceReveal: WebviewReveal
|
||||
): Promise<void> {
|
||||
await intm.showResults(query, forceReveal, false);
|
||||
@@ -457,33 +516,104 @@ async function activateWithInstalledDistribution(
|
||||
selectedQuery: Uri | undefined,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
databaseItem: DatabaseItem | undefined,
|
||||
range?: Range
|
||||
): Promise<void> {
|
||||
if (qs !== undefined) {
|
||||
const dbItem = await databaseUI.getDatabaseItem(progress, token);
|
||||
if (dbItem === undefined) {
|
||||
// If no databaseItem is specified, use the database currently selected in the Databases UI
|
||||
databaseItem = databaseItem || await databaseUI.getDatabaseItem(progress, token);
|
||||
if (databaseItem === undefined) {
|
||||
throw new Error('Can\'t run query without a selected database');
|
||||
}
|
||||
const info = await compileAndRunQueryAgainstDatabase(
|
||||
cliServer,
|
||||
qs,
|
||||
dbItem,
|
||||
quickEval,
|
||||
selectedQuery,
|
||||
progress,
|
||||
token
|
||||
);
|
||||
const item = qhm.addQuery(info);
|
||||
await showResultsForCompletedQuery(item, WebviewReveal.NotForced);
|
||||
// The call to showResults potentially creates SARIF file;
|
||||
// Update the tree item context value to allow viewing that
|
||||
// SARIF file from context menu.
|
||||
await qhm.refreshTreeView(item);
|
||||
const databaseInfo = {
|
||||
name: databaseItem.name,
|
||||
databaseUri: databaseItem.databaseUri.toString(),
|
||||
};
|
||||
|
||||
// handle cancellation from the history view.
|
||||
const source = new CancellationTokenSource();
|
||||
token.onCancellationRequested(() => source.cancel());
|
||||
|
||||
const initialInfo = await createInitialQueryInfo(selectedQuery, databaseInfo, quickEval, range);
|
||||
const item = new LocalQueryInfo(initialInfo, source);
|
||||
qhm.addQuery(item);
|
||||
try {
|
||||
const completedQueryInfo = await compileAndRunQueryAgainstDatabase(
|
||||
cliServer,
|
||||
qs,
|
||||
databaseItem,
|
||||
initialInfo,
|
||||
queryStorageDir,
|
||||
progress,
|
||||
source.token,
|
||||
undefined,
|
||||
item,
|
||||
);
|
||||
item.completeThisQuery(completedQueryInfo);
|
||||
await showResultsForCompletedQuery(item as CompletedLocalQueryInfo, WebviewReveal.NotForced);
|
||||
// Note we must update the query history view after showing results as the
|
||||
// display and sorting might depend on the number of results
|
||||
} catch (e) {
|
||||
const err = asError(e);
|
||||
err.message = `Error running query: ${err.message}`;
|
||||
item.failureReason = err.message;
|
||||
throw e;
|
||||
} finally {
|
||||
await qhm.refreshTreeView();
|
||||
source.dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const qhelpTmpDir = tmp.dirSync({ prefix: 'qhelp_', keep: false, unsafeCleanup: true });
|
||||
ctx.subscriptions.push({ dispose: qhelpTmpDir.removeCallback });
|
||||
|
||||
async function previewQueryHelp(
|
||||
selectedQuery: Uri
|
||||
): Promise<void> {
|
||||
// selectedQuery is unpopulated when executing through the command palette
|
||||
const pathToQhelp = selectedQuery ? selectedQuery.fsPath : window.activeTextEditor?.document.uri.fsPath;
|
||||
if (pathToQhelp) {
|
||||
// Create temporary directory
|
||||
const relativePathToMd = path.basename(pathToQhelp, '.qhelp') + '.md';
|
||||
const absolutePathToMd = path.join(qhelpTmpDir.name, relativePathToMd);
|
||||
const uri = Uri.file(absolutePathToMd);
|
||||
try {
|
||||
await cliServer.generateQueryHelp(pathToQhelp, absolutePathToMd);
|
||||
await commands.executeCommand('markdown.showPreviewToSide', uri);
|
||||
} catch (e) {
|
||||
const errorMessage = getErrorMessage(e).includes('Generating qhelp in markdown') ? (
|
||||
`Could not generate markdown from ${pathToQhelp}: Bad formatting in .qhelp file.`
|
||||
) : `Could not open a preview of the generated file (${absolutePathToMd}).`;
|
||||
void showAndLogErrorMessage(errorMessage, { fullMessage: `${errorMessage}\n${e}` });
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
async function openReferencedFile(
|
||||
selectedQuery: Uri
|
||||
): Promise<void> {
|
||||
// If no file is selected, the path of the file in the editor is selected
|
||||
const path = selectedQuery?.fsPath || window.activeTextEditor?.document.uri.fsPath;
|
||||
if (qs !== undefined && path) {
|
||||
if (await cliServer.cliConstraints.supportsResolveQlref()) {
|
||||
const resolved = await cliServer.resolveQlref(path);
|
||||
const uri = Uri.file(resolved.resolvedPath);
|
||||
await window.showTextDocument(uri, { preview: false });
|
||||
} else {
|
||||
void showAndLogErrorMessage(
|
||||
'Jumping from a .qlref file to the .ql file it references is not '
|
||||
+ 'supported with the CLI version you are running.\n'
|
||||
+ `Please upgrade your CLI to version ${CliVersionConstraint.CLI_VERSION_WITH_RESOLVE_QLREF
|
||||
} or later to use this feature.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ctx.subscriptions.push(tmpDirDisposal);
|
||||
|
||||
logger.log('Initializing CodeQL language server.');
|
||||
void logger.log('Initializing CodeQL language server.');
|
||||
const client = new LanguageClient(
|
||||
'CodeQL Language Server',
|
||||
() => spawnIdeServer(qlConfigurationListener),
|
||||
@@ -501,20 +631,20 @@ async function activateWithInstalledDistribution(
|
||||
true
|
||||
);
|
||||
|
||||
logger.log('Initializing QLTest interface.');
|
||||
void logger.log('Initializing QLTest interface.');
|
||||
const testExplorerExtension = extensions.getExtension<TestHub>(
|
||||
testExplorerExtensionId
|
||||
);
|
||||
if (testExplorerExtension) {
|
||||
const testHub = testExplorerExtension.exports;
|
||||
const testAdapterFactory = new QLTestAdapterFactory(testHub, cliServer);
|
||||
const testAdapterFactory = new QLTestAdapterFactory(testHub, cliServer, dbm);
|
||||
ctx.subscriptions.push(testAdapterFactory);
|
||||
|
||||
const testUIService = new TestUIService(testHub);
|
||||
ctx.subscriptions.push(testUIService);
|
||||
}
|
||||
|
||||
logger.log('Registering top-level command palette commands.');
|
||||
void logger.log('Registering top-level command palette commands.');
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQL.runQuery',
|
||||
@@ -522,10 +652,80 @@ async function activateWithInstalledDistribution(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
uri: Uri | undefined
|
||||
) => await compileAndRunQuery(false, uri, progress, token),
|
||||
) => await compileAndRunQuery(false, uri, progress, token, undefined),
|
||||
{
|
||||
title: 'Running query',
|
||||
cancellable: true
|
||||
},
|
||||
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
interface DatabaseQuickPickItem extends QuickPickItem {
|
||||
databaseItem: DatabaseItem;
|
||||
}
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQL.runQueryOnMultipleDatabases',
|
||||
async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
uri: Uri | undefined
|
||||
) => {
|
||||
let filteredDBs = dbm.databaseItems;
|
||||
if (filteredDBs.length === 0) {
|
||||
void showAndLogErrorMessage('No databases found. Please add a suitable database to your workspace.');
|
||||
return;
|
||||
}
|
||||
// If possible, only show databases with the right language (otherwise show all databases).
|
||||
const queryLanguage = await findLanguage(cliServer, uri);
|
||||
if (queryLanguage) {
|
||||
filteredDBs = dbm.databaseItems.filter(db => db.language === queryLanguage);
|
||||
if (filteredDBs.length === 0) {
|
||||
void showAndLogErrorMessage(`No databases found for language ${queryLanguage}. Please add a suitable database to your workspace.`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
const quickPickItems = filteredDBs.map<DatabaseQuickPickItem>(dbItem => (
|
||||
{
|
||||
databaseItem: dbItem,
|
||||
label: dbItem.name,
|
||||
description: dbItem.language,
|
||||
}
|
||||
));
|
||||
/**
|
||||
* Databases that were selected in the quick pick menu.
|
||||
*/
|
||||
const quickpick = await window.showQuickPick<DatabaseQuickPickItem>(
|
||||
quickPickItems,
|
||||
{ canPickMany: true, ignoreFocusOut: true }
|
||||
);
|
||||
if (quickpick !== undefined) {
|
||||
// Collect all skipped databases and display them at the end (instead of popping up individual errors)
|
||||
const skippedDatabases = [];
|
||||
const errors = [];
|
||||
for (const item of quickpick) {
|
||||
try {
|
||||
await compileAndRunQuery(false, uri, progress, token, item.databaseItem);
|
||||
} catch (e) {
|
||||
skippedDatabases.push(item.label);
|
||||
errors.push(getErrorMessage(e));
|
||||
}
|
||||
}
|
||||
if (skippedDatabases.length > 0) {
|
||||
void logger.log(`Errors:\n${errors.join('\n')}`);
|
||||
void showAndLogWarningMessage(
|
||||
`The following databases were skipped:\n${skippedDatabases.join('\n')}.\nFor details about the errors, see the logs.`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
void showAndLogErrorMessage('No databases selected.');
|
||||
}
|
||||
},
|
||||
{
|
||||
title: 'Running query on selected databases',
|
||||
cancellable: true
|
||||
}
|
||||
)
|
||||
);
|
||||
@@ -547,7 +747,7 @@ async function activateWithInstalledDistribution(
|
||||
// files may be hidden from the user.
|
||||
if (dirFound) {
|
||||
const fileString = files.map(file => path.basename(file)).join(', ');
|
||||
const res = await helpers.showBinaryChoiceDialog(
|
||||
const res = await showBinaryChoiceDialog(
|
||||
`You are about to run ${files.length} queries: ${fileString} Do you want to continue?`
|
||||
);
|
||||
if (!res) {
|
||||
@@ -584,14 +784,18 @@ async function activateWithInstalledDistribution(
|
||||
});
|
||||
|
||||
await Promise.all(queryUris.map(async uri =>
|
||||
compileAndRunQuery(false, uri, wrappedProgress, token)
|
||||
compileAndRunQuery(false, uri, wrappedProgress, token, undefined)
|
||||
.then(() => queriesRemaining--)
|
||||
));
|
||||
},
|
||||
{
|
||||
title: 'Running queries',
|
||||
cancellable: true
|
||||
})
|
||||
},
|
||||
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress(
|
||||
@@ -600,12 +804,35 @@ async function activateWithInstalledDistribution(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
uri: Uri | undefined
|
||||
) => await compileAndRunQuery(true, uri, progress, token),
|
||||
) => await compileAndRunQuery(true, uri, progress, token, undefined),
|
||||
{
|
||||
title: 'Running query',
|
||||
cancellable: true
|
||||
})
|
||||
},
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQL.codeLensQuickEval',
|
||||
async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
uri: Uri,
|
||||
range: Range
|
||||
) => await compileAndRunQuery(true, uri, progress, token, undefined, range),
|
||||
{
|
||||
title: 'Running query',
|
||||
cancellable: true
|
||||
},
|
||||
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.quickQuery', async (
|
||||
progress: ProgressCallback,
|
||||
@@ -614,7 +841,81 @@ async function activateWithInstalledDistribution(
|
||||
displayQuickQuery(ctx, cliServer, databaseUI, progress, token),
|
||||
{
|
||||
title: 'Run Quick Query'
|
||||
},
|
||||
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
|
||||
|
||||
registerRemoteQueryTextProvider();
|
||||
|
||||
// The "runVariantAnalysis" command is internal-only.
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.runVariantAnalysis', async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
uri: Uri | undefined
|
||||
) => {
|
||||
if (isCanary()) {
|
||||
progress({
|
||||
maxStep: 5,
|
||||
step: 0,
|
||||
message: 'Getting credentials'
|
||||
});
|
||||
await rqm.runRemoteQuery(
|
||||
uri || window.activeTextEditor?.document.uri,
|
||||
progress,
|
||||
token
|
||||
);
|
||||
} else {
|
||||
throw new Error('Variant analysis requires the CodeQL Canary version to run.');
|
||||
}
|
||||
}, {
|
||||
title: 'Run Variant Analysis',
|
||||
cancellable: true
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.monitorRemoteQuery', async (
|
||||
queryId: string,
|
||||
query: RemoteQuery,
|
||||
token: CancellationToken) => {
|
||||
await rqm.monitorRemoteQuery(queryId, query, token);
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.copyRepoList', async (queryId: string) => {
|
||||
await rqm.copyRemoteQueryRepoListToClipboard(queryId);
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.autoDownloadRemoteQueryResults', async (
|
||||
queryResult: RemoteQueryResult,
|
||||
token: CancellationToken) => {
|
||||
await rqm.autoDownloadRemoteQueryResults(queryResult, token);
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.exportVariantAnalysisResults', async () => {
|
||||
await exportRemoteQueryResults(qhm, rqm, ctx);
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
'codeQL.openReferencedFile',
|
||||
openReferencedFile
|
||||
)
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
'codeQL.previewQueryHelp',
|
||||
previewQueryHelp
|
||||
)
|
||||
);
|
||||
|
||||
@@ -624,7 +925,7 @@ async function activateWithInstalledDistribution(
|
||||
token: CancellationToken
|
||||
) => {
|
||||
await qs.restartQueryServer(progress, token);
|
||||
helpers.showAndLogInformationMessage('CodeQL Query Server restarted.', {
|
||||
void showAndLogInformationMessage('CodeQL Query Server restarted.', {
|
||||
outputLogger: queryServerLogger,
|
||||
});
|
||||
}, {
|
||||
@@ -650,6 +951,18 @@ async function activateWithInstalledDistribution(
|
||||
title: 'Choose a Database from an Archive'
|
||||
})
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.chooseDatabaseGithub', async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const credentials = await Credentials.initialize(ctx);
|
||||
await databaseUI.handleChooseDatabaseGithub(credentials, progress, token);
|
||||
},
|
||||
{
|
||||
title: 'Adding database from GitHub',
|
||||
})
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.chooseDatabaseLgtm', (
|
||||
progress: ProgressCallback,
|
||||
@@ -676,29 +989,97 @@ async function activateWithInstalledDistribution(
|
||||
commandRunner('codeQL.openDocumentation', async () =>
|
||||
env.openExternal(Uri.parse('https://codeql.github.com/docs/'))));
|
||||
|
||||
logger.log('Starting language server.');
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.copyVersion', async () => {
|
||||
const text = `CodeQL extension version: ${extension?.packageJSON.version} \nCodeQL CLI version: ${await getCliVersion()} \nPlatform: ${os.platform()} ${os.arch()}`;
|
||||
await env.clipboard.writeText(text);
|
||||
void showAndLogInformationMessage(text);
|
||||
}));
|
||||
|
||||
const getCliVersion = async () => {
|
||||
try {
|
||||
return await cliServer.getVersion();
|
||||
} catch {
|
||||
return '<missing>';
|
||||
}
|
||||
};
|
||||
|
||||
// The "authenticateToGitHub" command is internal-only.
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.authenticateToGitHub', async () => {
|
||||
if (isCanary()) {
|
||||
/**
|
||||
* Credentials for authenticating to GitHub.
|
||||
* These are used when making API calls.
|
||||
*/
|
||||
const credentials = await Credentials.initialize(ctx);
|
||||
const octokit = await credentials.getOctokit();
|
||||
const userInfo = await octokit.users.getAuthenticated();
|
||||
void showAndLogInformationMessage(`Authenticated to GitHub as user: ${userInfo.data.login}`);
|
||||
}
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.installPackDependencies', async (
|
||||
progress: ProgressCallback
|
||||
) =>
|
||||
await handleInstallPackDependencies(cliServer, progress),
|
||||
{
|
||||
title: 'Installing pack dependencies',
|
||||
}
|
||||
));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.downloadPacks', async (
|
||||
progress: ProgressCallback
|
||||
) =>
|
||||
await handleDownloadPacks(cliServer, progress),
|
||||
{
|
||||
title: 'Downloading packs',
|
||||
}
|
||||
));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.showLogs', async () => {
|
||||
logger.show();
|
||||
})
|
||||
);
|
||||
|
||||
void logger.log('Starting language server.');
|
||||
ctx.subscriptions.push(client.start());
|
||||
|
||||
// Jump-to-definition and find-references
|
||||
logger.log('Registering jump-to-definition handlers.');
|
||||
void logger.log('Registering jump-to-definition handlers.');
|
||||
|
||||
// Store contextual queries in a temporary folder so that they are removed
|
||||
// when the application closes. There is no need for the user to interact with them.
|
||||
const contextualQueryStorageDir = path.join(tmpDir.name, 'contextual-query-storage');
|
||||
await fs.ensureDir(contextualQueryStorageDir);
|
||||
languages.registerDefinitionProvider(
|
||||
{ scheme: archiveFilesystemProvider.zipArchiveScheme },
|
||||
new TemplateQueryDefinitionProvider(cliServer, qs, dbm)
|
||||
new TemplateQueryDefinitionProvider(cliServer, qs, dbm, contextualQueryStorageDir)
|
||||
);
|
||||
|
||||
languages.registerReferenceProvider(
|
||||
{ scheme: archiveFilesystemProvider.zipArchiveScheme },
|
||||
new TemplateQueryReferenceProvider(cliServer, qs, dbm)
|
||||
new TemplateQueryReferenceProvider(cliServer, qs, dbm, contextualQueryStorageDir)
|
||||
);
|
||||
|
||||
const astViewer = new AstViewer();
|
||||
const printAstTemplateProvider = new TemplatePrintAstProvider(cliServer, qs, dbm, contextualQueryStorageDir);
|
||||
const cfgTemplateProvider = new TemplatePrintCfgProvider(cliServer, dbm);
|
||||
|
||||
ctx.subscriptions.push(astViewer);
|
||||
ctx.subscriptions.push(commandRunnerWithProgress('codeQL.viewAst', async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
token: CancellationToken,
|
||||
selectedFile: Uri
|
||||
) => {
|
||||
const ast = await new TemplatePrintAstProvider(cliServer, qs, dbm, progress, token)
|
||||
.provideAst(window.activeTextEditor?.document);
|
||||
const ast = await printAstTemplateProvider.provideAst(
|
||||
progress,
|
||||
token,
|
||||
selectedFile ?? window.activeTextEditor?.document.uri,
|
||||
);
|
||||
if (ast) {
|
||||
astViewer.updateRoots(await ast.getRoots(), ast.db, ast.fileName);
|
||||
}
|
||||
@@ -707,9 +1088,28 @@ async function activateWithInstalledDistribution(
|
||||
title: 'Calculate AST'
|
||||
}));
|
||||
|
||||
commands.executeCommand('codeQLDatabases.removeOrphanedDatabases');
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQL.viewCfg',
|
||||
async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const res = await cfgTemplateProvider.provideCfgUri(window.activeTextEditor?.document);
|
||||
if (res) {
|
||||
await compileAndRunQuery(false, res[0], progress, token, undefined);
|
||||
}
|
||||
},
|
||||
{
|
||||
title: 'Calculating Control Flow Graph',
|
||||
cancellable: true
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
logger.log('Successfully finished extension initialization.');
|
||||
await commands.executeCommand('codeQLDatabases.removeOrphanedDatabases');
|
||||
|
||||
void logger.log('Successfully finished extension initialization.');
|
||||
|
||||
return {
|
||||
ctx,
|
||||
@@ -725,17 +1125,30 @@ async function activateWithInstalledDistribution(
|
||||
}
|
||||
|
||||
function getContextStoragePath(ctx: ExtensionContext) {
|
||||
return ctx.storagePath || ctx.globalStoragePath;
|
||||
return ctx.storageUri?.fsPath || ctx.globalStorageUri.fsPath;
|
||||
}
|
||||
|
||||
function initializeLogging(ctx: ExtensionContext): void {
|
||||
const storagePath = getContextStoragePath(ctx);
|
||||
logger.init(storagePath);
|
||||
queryServerLogger.init(storagePath);
|
||||
ideServerLogger.init(storagePath);
|
||||
async function initializeLogging(ctx: ExtensionContext): Promise<void> {
|
||||
ctx.subscriptions.push(logger);
|
||||
ctx.subscriptions.push(queryServerLogger);
|
||||
ctx.subscriptions.push(ideServerLogger);
|
||||
}
|
||||
|
||||
const checkForUpdatesCommand = 'codeQL.checkForUpdatesToCLI';
|
||||
|
||||
/**
|
||||
* This text provider lets us open readonly files in the editor.
|
||||
*
|
||||
* TODO: Consolidate this with the 'codeql' text provider in query-history.ts.
|
||||
*/
|
||||
function registerRemoteQueryTextProvider() {
|
||||
workspace.registerTextDocumentContentProvider('remote-query', {
|
||||
provideTextDocumentContent(
|
||||
uri: Uri
|
||||
): ProviderResult<string> {
|
||||
const params = new URLSearchParams(uri.query);
|
||||
|
||||
return params.get('queryText');
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import * as fs from 'fs-extra';
|
||||
import * as glob from 'glob-promise';
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as path from 'path';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import {
|
||||
ExtensionContext,
|
||||
Uri,
|
||||
@@ -9,8 +10,21 @@ import {
|
||||
workspace,
|
||||
env
|
||||
} from 'vscode';
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import { CodeQLCliServer, QlpacksInfo } from './cli';
|
||||
import { UserCancellationException } from './commandRunner';
|
||||
import { logger } from './logging';
|
||||
import { QueryMetadata } from './pure/interface-types';
|
||||
|
||||
// Shared temporary folder for the extension.
|
||||
export const tmpDir = tmp.dirSync({ prefix: 'queries_', keep: false, unsafeCleanup: true });
|
||||
export const upgradesTmpDir = path.join(tmpDir.name, 'upgrades');
|
||||
fs.ensureDirSync(upgradesTmpDir);
|
||||
|
||||
export const tmpDirDisposal = {
|
||||
dispose: () => {
|
||||
tmpDir.removeCallback();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Show an error message and log it to the console
|
||||
@@ -29,8 +43,13 @@ export async function showAndLogErrorMessage(message: string, {
|
||||
items = [] as string[],
|
||||
fullMessage = undefined as (string | undefined)
|
||||
} = {}): Promise<string | undefined> {
|
||||
return internalShowAndLog(message, items, outputLogger, Window.showErrorMessage, fullMessage);
|
||||
return internalShowAndLog(dropLinesExceptInitial(message), items, outputLogger, Window.showErrorMessage, fullMessage);
|
||||
}
|
||||
|
||||
function dropLinesExceptInitial(message: string, n = 2) {
|
||||
return message.toString().split(/\r?\n/).slice(0, n).join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Show a warning message and log it to the console
|
||||
*
|
||||
@@ -57,9 +76,10 @@ export async function showAndLogWarningMessage(message: string, {
|
||||
*/
|
||||
export async function showAndLogInformationMessage(message: string, {
|
||||
outputLogger = logger,
|
||||
items = [] as string[]
|
||||
items = [] as string[],
|
||||
fullMessage = ''
|
||||
} = {}): Promise<string | undefined> {
|
||||
return internalShowAndLog(message, items, outputLogger, Window.showInformationMessage);
|
||||
return internalShowAndLog(message, items, outputLogger, Window.showInformationMessage, fullMessage);
|
||||
}
|
||||
|
||||
type ShowMessageFn = (message: string, ...items: string[]) => Thenable<string | undefined>;
|
||||
@@ -72,7 +92,7 @@ async function internalShowAndLog(
|
||||
fullMessage?: string
|
||||
): Promise<string | undefined> {
|
||||
const label = 'Show Log';
|
||||
outputLogger.log(fullMessage || message);
|
||||
void outputLogger.log(fullMessage || message);
|
||||
const result = await fn(message, label, ...items);
|
||||
if (result === label) {
|
||||
outputLogger.show();
|
||||
@@ -249,31 +269,75 @@ function createRateLimitedResult(): RateLimitedResult {
|
||||
};
|
||||
}
|
||||
|
||||
export async function getQlPackForDbscheme(cliServer: CodeQLCliServer, dbschemePath: string): Promise<string> {
|
||||
export interface QlPacksForLanguage {
|
||||
/** The name of the pack containing the dbscheme. */
|
||||
dbschemePack: string;
|
||||
/** `true` if `dbschemePack` is a library pack. */
|
||||
dbschemePackIsLibraryPack: boolean;
|
||||
/**
|
||||
* The name of the corresponding standard query pack.
|
||||
* Only defined if `dbschemePack` is a library pack.
|
||||
*/
|
||||
queryPack?: string;
|
||||
}
|
||||
|
||||
interface QlPackWithPath {
|
||||
packName: string;
|
||||
packDir: string | undefined;
|
||||
}
|
||||
|
||||
async function findDbschemePack(packs: QlPackWithPath[], dbschemePath: string): Promise<{ name: string; isLibraryPack: boolean; }> {
|
||||
for (const { packDir, packName } of packs) {
|
||||
if (packDir !== undefined) {
|
||||
const qlpack = yaml.load(await fs.readFile(path.join(packDir, 'qlpack.yml'), 'utf8')) as { dbscheme?: string; library?: boolean; };
|
||||
if (qlpack.dbscheme !== undefined && path.basename(qlpack.dbscheme) === path.basename(dbschemePath)) {
|
||||
return {
|
||||
name: packName,
|
||||
isLibraryPack: qlpack.library === true
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new Error(`Could not find qlpack file for dbscheme ${dbschemePath}`);
|
||||
}
|
||||
|
||||
function findStandardQueryPack(qlpacks: QlpacksInfo, dbschemePackName: string): string | undefined {
|
||||
const matches = dbschemePackName.match(/^codeql\/(?<language>[a-z]+)-all$/);
|
||||
if (matches) {
|
||||
const queryPackName = `codeql/${matches.groups!.language}-queries`;
|
||||
if (qlpacks[queryPackName] !== undefined) {
|
||||
return queryPackName;
|
||||
}
|
||||
}
|
||||
|
||||
// Either the dbscheme pack didn't look like one where the queries might be in the query pack, or
|
||||
// no query pack was found in the search path. Either is OK.
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export async function getQlPackForDbscheme(cliServer: CodeQLCliServer, dbschemePath: string): Promise<QlPacksForLanguage> {
|
||||
const qlpacks = await cliServer.resolveQlpacks(getOnDiskWorkspaceFolders());
|
||||
const packs: { packDir: string | undefined; packName: string }[] =
|
||||
const packs: QlPackWithPath[] =
|
||||
Object.entries(qlpacks).map(([packName, dirs]) => {
|
||||
if (dirs.length < 1) {
|
||||
logger.log(`In getQlPackFor ${dbschemePath}, qlpack ${packName} has no directories`);
|
||||
void logger.log(`In getQlPackFor ${dbschemePath}, qlpack ${packName} has no directories`);
|
||||
return { packName, packDir: undefined };
|
||||
}
|
||||
if (dirs.length > 1) {
|
||||
logger.log(`In getQlPackFor ${dbschemePath}, qlpack ${packName} has more than one directory; arbitrarily choosing the first`);
|
||||
void logger.log(`In getQlPackFor ${dbschemePath}, qlpack ${packName} has more than one directory; arbitrarily choosing the first`);
|
||||
}
|
||||
return {
|
||||
packName,
|
||||
packDir: dirs[0]
|
||||
};
|
||||
});
|
||||
for (const { packDir, packName } of packs) {
|
||||
if (packDir !== undefined) {
|
||||
const qlpack = yaml.safeLoad(await fs.readFile(path.join(packDir, 'qlpack.yml'), 'utf8')) as { dbscheme: string };
|
||||
if (qlpack.dbscheme !== undefined && path.basename(qlpack.dbscheme) === path.basename(dbschemePath)) {
|
||||
return packName;
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new Error(`Could not find qlpack file for dbscheme ${dbschemePath}`);
|
||||
const dbschemePack = await findDbschemePack(packs, dbschemePath);
|
||||
const queryPack = dbschemePack.isLibraryPack ? findStandardQueryPack(qlpacks, dbschemePack.name) : undefined;
|
||||
return {
|
||||
dbschemePack: dbschemePack.name,
|
||||
dbschemePackIsLibraryPack: dbschemePack.isLibraryPack,
|
||||
queryPack
|
||||
};
|
||||
}
|
||||
|
||||
export async function getPrimaryDbscheme(datasetFolder: string): Promise<string> {
|
||||
@@ -287,7 +351,7 @@ export async function getPrimaryDbscheme(datasetFolder: string): Promise<string>
|
||||
const dbscheme = dbschemes[0];
|
||||
|
||||
if (dbschemes.length > 1) {
|
||||
Window.showErrorMessage(`Found multiple dbschemes in ${datasetFolder} during quick query; arbitrarily choosing the first, ${dbscheme}, to decide what library to use.`);
|
||||
void Window.showErrorMessage(`Found multiple dbschemes in ${datasetFolder} during quick query; arbitrarily choosing the first, ${dbscheme}, to decide what library to use.`);
|
||||
}
|
||||
return dbscheme;
|
||||
}
|
||||
@@ -296,19 +360,19 @@ export async function getPrimaryDbscheme(datasetFolder: string): Promise<string>
|
||||
* A cached mapping from strings to value of type U.
|
||||
*/
|
||||
export class CachedOperation<U> {
|
||||
private readonly operation: (t: string) => Promise<U>;
|
||||
private readonly operation: (t: string, ...args: any[]) => Promise<U>;
|
||||
private readonly cached: Map<string, U>;
|
||||
private readonly lru: string[];
|
||||
private readonly inProgressCallbacks: Map<string, [(u: U) => void, (reason?: any) => void][]>;
|
||||
|
||||
constructor(operation: (t: string) => Promise<U>, private cacheSize = 100) {
|
||||
constructor(operation: (t: string, ...args: any[]) => Promise<U>, private cacheSize = 100) {
|
||||
this.operation = operation;
|
||||
this.lru = [];
|
||||
this.inProgressCallbacks = new Map<string, [(u: U) => void, (reason?: any) => void][]>();
|
||||
this.cached = new Map<string, U>();
|
||||
}
|
||||
|
||||
async get(t: string): Promise<U> {
|
||||
async get(t: string, ...args: any[]): Promise<U> {
|
||||
// Try and retrieve from the cache
|
||||
const fromCache = this.cached.get(t);
|
||||
if (fromCache !== undefined) {
|
||||
@@ -329,7 +393,7 @@ export class CachedOperation<U> {
|
||||
const callbacks: [(u: U) => void, (reason?: any) => void][] = [];
|
||||
this.inProgressCallbacks.set(t, callbacks);
|
||||
try {
|
||||
const result = await this.operation(t);
|
||||
const result = await this.operation(t, ...args);
|
||||
callbacks.forEach(f => f[0](result));
|
||||
this.inProgressCallbacks.delete(t);
|
||||
if (this.lru.length > this.cacheSize) {
|
||||
@@ -362,18 +426,25 @@ export class CachedOperation<U> {
|
||||
* `cli.CodeQLCliServer.resolveDatabase` and use the first entry in the
|
||||
* `languages` property.
|
||||
*
|
||||
* @see cli.CodeQLCliServer.supportsLanguageName
|
||||
* @see cli.CliVersionConstraint.supportsLanguageName
|
||||
* @see cli.CodeQLCliServer.resolveDatabase
|
||||
*/
|
||||
const dbSchemeToLanguage = {
|
||||
export const dbSchemeToLanguage = {
|
||||
'semmlecode.javascript.dbscheme': 'javascript',
|
||||
'semmlecode.cpp.dbscheme': 'cpp',
|
||||
'semmlecode.dbscheme': 'java',
|
||||
'semmlecode.python.dbscheme': 'python',
|
||||
'semmlecode.csharp.dbscheme': 'csharp',
|
||||
'go.dbscheme': 'go'
|
||||
'go.dbscheme': 'go',
|
||||
'ruby.dbscheme': 'ruby'
|
||||
};
|
||||
|
||||
export const languageToDbScheme = Object.entries(dbSchemeToLanguage).reduce((acc, [k, v]) => {
|
||||
acc[v] = k;
|
||||
return acc;
|
||||
}, {} as { [k: string]: string });
|
||||
|
||||
|
||||
/**
|
||||
* Returns the initial contents for an empty query, based on the language of the selected
|
||||
* databse.
|
||||
@@ -419,3 +490,94 @@ export async function isLikelyDatabaseRoot(maybeRoot: string) {
|
||||
export function isLikelyDbLanguageFolder(dbPath: string) {
|
||||
return !!path.basename(dbPath).startsWith('db-');
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the language that a query targets.
|
||||
* If it can't be autodetected, prompt the user to specify the language manually.
|
||||
*/
|
||||
export async function findLanguage(
|
||||
cliServer: CodeQLCliServer,
|
||||
queryUri: Uri | undefined
|
||||
): Promise<string | undefined> {
|
||||
const uri = queryUri || Window.activeTextEditor?.document.uri;
|
||||
if (uri !== undefined) {
|
||||
try {
|
||||
const queryInfo = await cliServer.resolveQueryByLanguage(getOnDiskWorkspaceFolders(), uri);
|
||||
const language = (Object.keys(queryInfo.byLanguage))[0];
|
||||
void logger.log(`Detected query language: ${language}`);
|
||||
return language;
|
||||
} catch (e) {
|
||||
void logger.log('Could not autodetect query language. Select language manually.');
|
||||
}
|
||||
}
|
||||
|
||||
// will be undefined if user cancels the quick pick.
|
||||
return await askForLanguage(cliServer, false);
|
||||
}
|
||||
|
||||
export async function askForLanguage(cliServer: CodeQLCliServer, throwOnEmpty = true): Promise<string | undefined> {
|
||||
const language = await Window.showQuickPick(
|
||||
await cliServer.getSupportedLanguages(),
|
||||
{ placeHolder: 'Select target language for your query', ignoreFocusOut: true }
|
||||
);
|
||||
if (!language) {
|
||||
// This only happens if the user cancels the quick pick.
|
||||
if (throwOnEmpty) {
|
||||
throw new UserCancellationException('Cancelled.');
|
||||
} else {
|
||||
void showAndLogErrorMessage('Language not found. Language must be specified manually.');
|
||||
}
|
||||
}
|
||||
return language;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets metadata for a query, if it exists.
|
||||
* @param cliServer The CLI server.
|
||||
* @param queryPath The path to the query.
|
||||
* @returns A promise that resolves to the query metadata, if available.
|
||||
*/
|
||||
export async function tryGetQueryMetadata(cliServer: CodeQLCliServer, queryPath: string): Promise<QueryMetadata | undefined> {
|
||||
try {
|
||||
return await cliServer.resolveMetadata(queryPath);
|
||||
} catch (e) {
|
||||
// Ignore errors and provide no metadata.
|
||||
void logger.log(`Couldn't resolve metadata for ${queryPath}: ${e}`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a file in the query directory that indicates when this query was created.
|
||||
* This is important for keeping track of when queries should be removed.
|
||||
*
|
||||
* @param queryPath The directory that will containt all files relevant to a query result.
|
||||
* It does not need to exist.
|
||||
*/
|
||||
export async function createTimestampFile(storagePath: string) {
|
||||
const timestampPath = path.join(storagePath, 'timestamp');
|
||||
await fs.ensureDir(storagePath);
|
||||
await fs.writeFile(timestampPath, Date.now().toString(), 'utf8');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Recursively walk a directory and return the full path to all files found.
|
||||
* Symbolic links are ignored.
|
||||
*
|
||||
* @param dir the directory to walk
|
||||
*
|
||||
* @return An iterator of the full path to all files recursively found in the directory.
|
||||
*/
|
||||
export async function* walkDirectory(dir: string): AsyncIterableIterator<string> {
|
||||
const seenFiles = new Set<string>();
|
||||
for await (const d of await fs.opendir(dir)) {
|
||||
const entry = path.join(dir, d.name);
|
||||
seenFiles.add(entry);
|
||||
if (d.isDirectory()) {
|
||||
yield* walkDirectory(entry);
|
||||
} else if (d.isFile()) {
|
||||
yield entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
82
extensions/ql-vscode/src/history-item-label-provider.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { env } from 'vscode';
|
||||
import * as path from 'path';
|
||||
import { QueryHistoryConfig } from './config';
|
||||
import { LocalQueryInfo, QueryHistoryInfo } from './query-results';
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
|
||||
interface InterpolateReplacements {
|
||||
t: string; // Start time
|
||||
q: string; // Query name
|
||||
d: string; // Database/Controller repo name
|
||||
r: string; // Result count/Empty
|
||||
s: string; // Status
|
||||
f: string; // Query file name
|
||||
'%': '%'; // Percent sign
|
||||
}
|
||||
|
||||
export class HistoryItemLabelProvider {
|
||||
constructor(private config: QueryHistoryConfig) {
|
||||
/**/
|
||||
}
|
||||
|
||||
getLabel(item: QueryHistoryInfo) {
|
||||
const replacements = item.t === 'local'
|
||||
? this.getLocalInterpolateReplacements(item)
|
||||
: this.getRemoteInterpolateReplacements(item);
|
||||
|
||||
const rawLabel = item.userSpecifiedLabel ?? (this.config.format || '%q');
|
||||
|
||||
return this.interpolate(rawLabel, replacements);
|
||||
}
|
||||
|
||||
/**
|
||||
* If there is a user-specified label for this query, interpolate and use that.
|
||||
* Otherwise, use the raw name of this query.
|
||||
*
|
||||
* @returns the name of the query, unless there is a custom label for this query.
|
||||
*/
|
||||
getShortLabel(item: QueryHistoryInfo): string {
|
||||
return item.userSpecifiedLabel
|
||||
? this.getLabel(item)
|
||||
: item.t === 'local'
|
||||
? item.getQueryName()
|
||||
: item.remoteQuery.queryName;
|
||||
}
|
||||
|
||||
|
||||
private interpolate(rawLabel: string, replacements: InterpolateReplacements): string {
|
||||
return rawLabel.replace(/%(.)/g, (match, key: keyof InterpolateReplacements) => {
|
||||
const replacement = replacements[key];
|
||||
return replacement !== undefined ? replacement : match;
|
||||
});
|
||||
}
|
||||
|
||||
private getLocalInterpolateReplacements(item: LocalQueryInfo): InterpolateReplacements {
|
||||
const { resultCount = 0, statusString = 'in progress' } = item.completedQuery || {};
|
||||
return {
|
||||
t: item.startTime,
|
||||
q: item.getQueryName(),
|
||||
d: item.initialInfo.databaseInfo.name,
|
||||
r: `${resultCount} results`,
|
||||
s: statusString,
|
||||
f: item.getQueryFileName(),
|
||||
'%': '%',
|
||||
};
|
||||
}
|
||||
|
||||
private getRemoteInterpolateReplacements(item: RemoteQueryHistoryItem): InterpolateReplacements {
|
||||
return {
|
||||
t: new Date(item.remoteQuery.executionStartTime).toLocaleString(env.language),
|
||||
q: item.remoteQuery.queryName,
|
||||
|
||||
// There is no database name for remote queries. Instead use the controller repository name.
|
||||
d: `${item.remoteQuery.controllerRepository.owner}/${item.remoteQuery.controllerRepository.name}`,
|
||||
|
||||
// There is no synchronous way to get the results count.
|
||||
r: '',
|
||||
s: item.status,
|
||||
f: path.basename(item.remoteQuery.queryFilePath),
|
||||
'%': '%'
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import * as crypto from 'crypto';
|
||||
import * as os from 'os';
|
||||
import {
|
||||
Uri,
|
||||
Location,
|
||||
@@ -70,7 +71,7 @@ function resolveFivePartLocation(
|
||||
Math.max(0, loc.startLine - 1),
|
||||
Math.max(0, loc.startColumn - 1),
|
||||
Math.max(0, loc.endLine - 1),
|
||||
Math.max(0, loc.endColumn)
|
||||
Math.max(1, loc.endColumn)
|
||||
);
|
||||
|
||||
return new Location(databaseItem.resolveSourceFile(loc.uri), range);
|
||||
@@ -117,13 +118,25 @@ export function tryResolveLocation(
|
||||
export function getHtmlForWebview(
|
||||
webview: Webview,
|
||||
scriptUriOnDisk: Uri,
|
||||
stylesheetUriOnDisk: Uri
|
||||
stylesheetUrisOnDisk: Uri[],
|
||||
allowInlineStyles: boolean
|
||||
): string {
|
||||
// Convert the on-disk URIs into webview URIs.
|
||||
const scriptWebviewUri = webview.asWebviewUri(scriptUriOnDisk);
|
||||
const stylesheetWebviewUri = webview.asWebviewUri(stylesheetUriOnDisk);
|
||||
const stylesheetWebviewUris = stylesheetUrisOnDisk.map(stylesheetUriOnDisk =>
|
||||
webview.asWebviewUri(stylesheetUriOnDisk));
|
||||
|
||||
// Use a nonce in the content security policy to uniquely identify the above resources.
|
||||
const nonce = getNonce();
|
||||
|
||||
const stylesheetsHtmlLines = allowInlineStyles
|
||||
? stylesheetWebviewUris.map(uri => createStylesLinkWithoutNonce(uri))
|
||||
: stylesheetWebviewUris.map(uri => createStylesLinkWithNonce(nonce, uri));
|
||||
|
||||
const styleSrc = allowInlineStyles
|
||||
? `${webview.cspSource} vscode-file: 'unsafe-inline'`
|
||||
: `'nonce-${nonce}'`;
|
||||
|
||||
/*
|
||||
* Content security policy:
|
||||
* default-src: allow nothing by default.
|
||||
@@ -136,8 +149,8 @@ export function getHtmlForWebview(
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Security-Policy"
|
||||
content="default-src 'none'; script-src 'nonce-${nonce}'; style-src 'nonce-${nonce}'; connect-src ${webview.cspSource};">
|
||||
<link nonce="${nonce}" rel="stylesheet" href="${stylesheetWebviewUri}">
|
||||
content="default-src 'none'; script-src 'nonce-${nonce}'; style-src ${styleSrc}; connect-src ${webview.cspSource};">
|
||||
${stylesheetsHtmlLines.join(` ${os.EOL}`)}
|
||||
</head>
|
||||
<body>
|
||||
<div id=root>
|
||||
@@ -224,15 +237,23 @@ export async function jumpToLocation(
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
if (e.message.match(/File not found/)) {
|
||||
Window.showErrorMessage(
|
||||
void Window.showErrorMessage(
|
||||
'Original file of this result is not in the database\'s source archive.'
|
||||
);
|
||||
} else {
|
||||
logger.log(`Unable to handleMsgFromView: ${e.message}`);
|
||||
void logger.log(`Unable to handleMsgFromView: ${e.message}`);
|
||||
}
|
||||
} else {
|
||||
logger.log(`Unable to handleMsgFromView: ${e}`);
|
||||
void logger.log(`Unable to handleMsgFromView: ${e}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createStylesLinkWithNonce(nonce: string, uri: Uri): string {
|
||||
return `<link nonce="${nonce}" rel="stylesheet" href="${uri}">`;
|
||||
}
|
||||
|
||||
function createStylesLinkWithoutNonce(uri: Uri): string {
|
||||
return `<link rel="stylesheet" href="${uri}">`;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as path from 'path';
|
||||
import * as Sarif from 'sarif';
|
||||
import { DisposableObject } from './vscode-utils/disposable-object';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import * as vscode from 'vscode';
|
||||
import {
|
||||
Diagnostic,
|
||||
@@ -14,8 +14,8 @@ import {
|
||||
import * as cli from './cli';
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import { DatabaseEventKind, DatabaseItem, DatabaseManager } from './databases';
|
||||
import { showAndLogErrorMessage } from './helpers';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { showAndLogErrorMessage, tmpDir } from './helpers';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import {
|
||||
FromResultsViewMsg,
|
||||
Interpretation,
|
||||
@@ -27,13 +27,14 @@ import {
|
||||
InterpretedResultsSortState,
|
||||
SortDirection,
|
||||
ALERTS_TABLE_NAME,
|
||||
GRAPH_TABLE_NAME,
|
||||
RawResultsSortState,
|
||||
} from './pure/interface-types';
|
||||
import { Logger } from './logging';
|
||||
import * as messages from './pure/messages';
|
||||
import { commandRunner } from './commandRunner';
|
||||
import { CompletedQuery, interpretResults } from './query-results';
|
||||
import { QueryInfo, tmpDir } from './run-queries';
|
||||
import { CompletedQueryInfo, interpretResultsSarif, interpretGraphResults } from './query-results';
|
||||
import { QueryEvaluationInfo } from './run-queries';
|
||||
import { parseSarifLocation, parseSarifPlainTextMessage } from './pure/sarif-utils';
|
||||
import {
|
||||
WebviewReveal,
|
||||
@@ -47,6 +48,8 @@ import {
|
||||
import { getDefaultResultSetName, ParsedResultSets } from './pure/interface-types';
|
||||
import { RawResultSet, transformBqrsResultSet, ResultSetSchema } from './pure/bqrs-cli-types';
|
||||
import { PAGE_SIZE } from './config';
|
||||
import { CompletedLocalQueryInfo } from './query-results';
|
||||
import { HistoryItemLabelProvider } from './history-item-label-provider';
|
||||
|
||||
/**
|
||||
* interface.ts
|
||||
@@ -87,16 +90,40 @@ function sortInterpretedResults(
|
||||
}
|
||||
}
|
||||
|
||||
function numPagesOfResultSet(resultSet: RawResultSet): number {
|
||||
return Math.ceil(resultSet.schema.rows / PAGE_SIZE.getValue<number>());
|
||||
function interpretedPageSize(interpretation: Interpretation | undefined): number {
|
||||
if (interpretation?.data.t == 'GraphInterpretationData') {
|
||||
// Graph views always have one result per page.
|
||||
return 1;
|
||||
}
|
||||
return PAGE_SIZE.getValue<number>();
|
||||
}
|
||||
|
||||
function numPagesOfResultSet(resultSet: RawResultSet, interpretation?: Interpretation): number {
|
||||
const pageSize = interpretedPageSize(interpretation);
|
||||
|
||||
const n = interpretation?.data.t == 'GraphInterpretationData'
|
||||
? interpretation.data.dot.length
|
||||
: resultSet.schema.rows;
|
||||
|
||||
return Math.ceil(n / pageSize);
|
||||
}
|
||||
|
||||
function numInterpretedPages(interpretation: Interpretation | undefined): number {
|
||||
return Math.ceil((interpretation?.sarif.runs[0].results?.length || 0) / PAGE_SIZE.getValue<number>());
|
||||
if (!interpretation) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const pageSize = interpretedPageSize(interpretation);
|
||||
|
||||
const n = interpretation.data.t == 'GraphInterpretationData'
|
||||
? interpretation.data.dot.length
|
||||
: interpretation.data.runs[0].results?.length || 0;
|
||||
|
||||
return Math.ceil(n / pageSize);
|
||||
}
|
||||
|
||||
export class InterfaceManager extends DisposableObject {
|
||||
private _displayedQuery?: CompletedQuery;
|
||||
private _displayedQuery?: CompletedLocalQueryInfo;
|
||||
private _interpretation?: Interpretation;
|
||||
private _panel: vscode.WebviewPanel | undefined;
|
||||
private _panelLoaded = false;
|
||||
@@ -110,7 +137,8 @@ export class InterfaceManager extends DisposableObject {
|
||||
public ctx: vscode.ExtensionContext,
|
||||
private databaseManager: DatabaseManager,
|
||||
public cliServer: CodeQLCliServer,
|
||||
public logger: Logger
|
||||
public logger: Logger,
|
||||
private labelProvider: HistoryItemLabelProvider
|
||||
) {
|
||||
super();
|
||||
this.push(this._diagnosticCollection);
|
||||
@@ -119,7 +147,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
this.handleSelectionChange.bind(this)
|
||||
)
|
||||
);
|
||||
logger.log('Registering path-step navigation commands.');
|
||||
void logger.log('Registering path-step navigation commands.');
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLQueryResults.nextPathStep',
|
||||
@@ -137,16 +165,22 @@ export class InterfaceManager extends DisposableObject {
|
||||
this.databaseManager.onDidChangeDatabaseItem(({ kind }) => {
|
||||
if (kind === DatabaseEventKind.Remove) {
|
||||
this._diagnosticCollection.clear();
|
||||
this.postMessage({
|
||||
t: 'untoggleShowProblems'
|
||||
});
|
||||
if (this.isShowingPanel()) {
|
||||
void this.postMessage({
|
||||
t: 'untoggleShowProblems'
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
async navigatePathStep(direction: number): Promise<void> {
|
||||
this.postMessage({ t: 'navigatePath', direction });
|
||||
await this.postMessage({ t: 'navigatePath', direction });
|
||||
}
|
||||
|
||||
private isShowingPanel() {
|
||||
return !!this._panel;
|
||||
}
|
||||
|
||||
// Returns the webview panel, creating it if it doesn't already
|
||||
@@ -154,10 +188,11 @@ export class InterfaceManager extends DisposableObject {
|
||||
getPanel(): vscode.WebviewPanel {
|
||||
if (this._panel == undefined) {
|
||||
const { ctx } = this;
|
||||
const webViewColumn = this.chooseColumnForWebview();
|
||||
const panel = (this._panel = Window.createWebviewPanel(
|
||||
'resultsView', // internal name
|
||||
'CodeQL Query Results', // user-visible name
|
||||
{ viewColumn: vscode.ViewColumn.Beside, preserveFocus: true },
|
||||
{ viewColumn: webViewColumn, preserveFocus: true },
|
||||
{
|
||||
enableScripts: true,
|
||||
enableFindWidget: true,
|
||||
@@ -168,46 +203,72 @@ export class InterfaceManager extends DisposableObject {
|
||||
]
|
||||
}
|
||||
));
|
||||
this._panel.onDidDispose(
|
||||
|
||||
this.push(this._panel.onDidDispose(
|
||||
() => {
|
||||
this._panel = undefined;
|
||||
this._displayedQuery = undefined;
|
||||
this._panelLoaded = false;
|
||||
},
|
||||
null,
|
||||
ctx.subscriptions
|
||||
);
|
||||
));
|
||||
const scriptPathOnDisk = vscode.Uri.file(
|
||||
ctx.asAbsolutePath('out/resultsView.js')
|
||||
);
|
||||
const stylesheetPathOnDisk = vscode.Uri.file(
|
||||
ctx.asAbsolutePath('out/resultsView.css')
|
||||
ctx.asAbsolutePath('out/view/resultsView.css')
|
||||
);
|
||||
panel.webview.html = getHtmlForWebview(
|
||||
panel.webview,
|
||||
scriptPathOnDisk,
|
||||
stylesheetPathOnDisk
|
||||
[stylesheetPathOnDisk],
|
||||
false
|
||||
);
|
||||
panel.webview.onDidReceiveMessage(
|
||||
this.push(panel.webview.onDidReceiveMessage(
|
||||
async (e) => this.handleMsgFromView(e),
|
||||
undefined,
|
||||
ctx.subscriptions
|
||||
);
|
||||
));
|
||||
}
|
||||
return this._panel;
|
||||
}
|
||||
|
||||
/**
|
||||
* Choose where to open the webview.
|
||||
*
|
||||
* If there is a single view column, then open beside it.
|
||||
* If there are multiple view columns, then open beside the active column,
|
||||
* unless the active editor is the last column. In this case, open in the first column.
|
||||
*
|
||||
* The goal is to avoid opening new columns when there already are two columns open.
|
||||
*/
|
||||
private chooseColumnForWebview(): vscode.ViewColumn {
|
||||
// This is not a great way to determine the number of view columns, but I
|
||||
// can't find a vscode API that does it any better.
|
||||
// Here, iterate through all the visible editors and determine the max view column.
|
||||
// This won't work if the largest view column is empty.
|
||||
const colCount = Window.visibleTextEditors.reduce((maxVal, editor) =>
|
||||
Math.max(maxVal, Number.parseInt(editor.viewColumn?.toFixed() || '0', 10)), 0);
|
||||
if (colCount <= 1) {
|
||||
return vscode.ViewColumn.Beside;
|
||||
}
|
||||
const activeViewColumnNum = Number.parseInt(Window.activeTextEditor?.viewColumn?.toFixed() || '0', 10);
|
||||
return activeViewColumnNum === colCount ? vscode.ViewColumn.One : vscode.ViewColumn.Beside;
|
||||
}
|
||||
|
||||
private async changeInterpretedSortState(
|
||||
sortState: InterpretedResultsSortState | undefined
|
||||
): Promise<void> {
|
||||
if (this._displayedQuery === undefined) {
|
||||
showAndLogErrorMessage(
|
||||
void showAndLogErrorMessage(
|
||||
'Failed to sort results since evaluation info was unknown.'
|
||||
);
|
||||
return;
|
||||
}
|
||||
// Notify the webview that it should expect new results.
|
||||
await this.postMessage({ t: 'resultsUpdating' });
|
||||
this._displayedQuery.updateInterpretedSortState(sortState);
|
||||
await this._displayedQuery.completedQuery.updateInterpretedSortState(sortState);
|
||||
await this.showResults(this._displayedQuery, WebviewReveal.NotForced, true);
|
||||
}
|
||||
|
||||
@@ -216,14 +277,14 @@ export class InterfaceManager extends DisposableObject {
|
||||
sortState: RawResultsSortState | undefined
|
||||
): Promise<void> {
|
||||
if (this._displayedQuery === undefined) {
|
||||
showAndLogErrorMessage(
|
||||
void showAndLogErrorMessage(
|
||||
'Failed to sort results since evaluation info was unknown.'
|
||||
);
|
||||
return;
|
||||
}
|
||||
// Notify the webview that it should expect new results.
|
||||
await this.postMessage({ t: 'resultsUpdating' });
|
||||
await this._displayedQuery.updateSortState(
|
||||
await this._displayedQuery.completedQuery.updateSortState(
|
||||
this.cliServer,
|
||||
resultSetName,
|
||||
sortState
|
||||
@@ -236,61 +297,67 @@ export class InterfaceManager extends DisposableObject {
|
||||
}
|
||||
|
||||
private async handleMsgFromView(msg: FromResultsViewMsg): Promise<void> {
|
||||
switch (msg.t) {
|
||||
case 'viewSourceFile': {
|
||||
await jumpToLocation(msg, this.databaseManager, this.logger);
|
||||
break;
|
||||
}
|
||||
case 'toggleDiagnostics': {
|
||||
if (msg.visible) {
|
||||
const databaseItem = this.databaseManager.findDatabaseItem(
|
||||
Uri.parse(msg.databaseUri)
|
||||
);
|
||||
if (databaseItem !== undefined) {
|
||||
await this.showResultsAsDiagnostics(
|
||||
msg.origResultsPaths,
|
||||
msg.metadata,
|
||||
databaseItem
|
||||
try {
|
||||
switch (msg.t) {
|
||||
case 'viewSourceFile': {
|
||||
await jumpToLocation(msg, this.databaseManager, this.logger);
|
||||
break;
|
||||
}
|
||||
case 'toggleDiagnostics': {
|
||||
if (msg.visible) {
|
||||
const databaseItem = this.databaseManager.findDatabaseItem(
|
||||
Uri.parse(msg.databaseUri)
|
||||
);
|
||||
if (databaseItem !== undefined) {
|
||||
await this.showResultsAsDiagnostics(
|
||||
msg.origResultsPaths,
|
||||
msg.metadata,
|
||||
databaseItem
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// TODO: Only clear diagnostics on the same database.
|
||||
this._diagnosticCollection.clear();
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'resultViewLoaded':
|
||||
this._panelLoaded = true;
|
||||
this._panelLoadedCallBacks.forEach((cb) => cb());
|
||||
this._panelLoadedCallBacks = [];
|
||||
break;
|
||||
case 'changeSort':
|
||||
await this.changeRawSortState(msg.resultSetName, msg.sortState);
|
||||
break;
|
||||
case 'changeInterpretedSort':
|
||||
await this.changeInterpretedSortState(msg.sortState);
|
||||
break;
|
||||
case 'changePage':
|
||||
if (msg.selectedTable === ALERTS_TABLE_NAME || msg.selectedTable === GRAPH_TABLE_NAME) {
|
||||
await this.showPageOfInterpretedResults(msg.pageNumber);
|
||||
}
|
||||
else {
|
||||
await this.showPageOfRawResults(
|
||||
msg.selectedTable,
|
||||
msg.pageNumber,
|
||||
// When we are in an unsorted state, we guarantee that
|
||||
// sortedResultsInfo doesn't have an entry for the current
|
||||
// result set. Use this to determine whether or not we use
|
||||
// the sorted bqrs file.
|
||||
!!this._displayedQuery?.completedQuery.sortedResultsInfo[msg.selectedTable]
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// TODO: Only clear diagnostics on the same database.
|
||||
this._diagnosticCollection.clear();
|
||||
}
|
||||
break;
|
||||
break;
|
||||
case 'openFile':
|
||||
await this.openFile(msg.filePath);
|
||||
break;
|
||||
default:
|
||||
assertNever(msg);
|
||||
}
|
||||
case 'resultViewLoaded':
|
||||
this._panelLoaded = true;
|
||||
this._panelLoadedCallBacks.forEach((cb) => cb());
|
||||
this._panelLoadedCallBacks = [];
|
||||
break;
|
||||
case 'changeSort':
|
||||
await this.changeRawSortState(msg.resultSetName, msg.sortState);
|
||||
break;
|
||||
case 'changeInterpretedSort':
|
||||
await this.changeInterpretedSortState(msg.sortState);
|
||||
break;
|
||||
case 'changePage':
|
||||
if (msg.selectedTable === ALERTS_TABLE_NAME) {
|
||||
await this.showPageOfInterpretedResults(msg.pageNumber);
|
||||
}
|
||||
else {
|
||||
await this.showPageOfRawResults(
|
||||
msg.selectedTable,
|
||||
msg.pageNumber,
|
||||
// When we are in an unsorted state, we guarantee that
|
||||
// sortedResultsInfo doesn't have an entry for the current
|
||||
// result set. Use this to determine whether or not we use
|
||||
// the sorted bqrs file.
|
||||
this._displayedQuery?.sortedResultsInfo.has(msg.selectedTable) || false
|
||||
);
|
||||
}
|
||||
break;
|
||||
case 'openFile':
|
||||
await this.openFile(msg.filePath);
|
||||
break;
|
||||
default:
|
||||
assertNever(msg);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(getErrorMessage(e), {
|
||||
fullMessage: getErrorStack(e)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -310,7 +377,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
|
||||
/**
|
||||
* Show query results in webview panel.
|
||||
* @param results Evaluation info for the executed query.
|
||||
* @param fullQuery Evaluation info for the executed query.
|
||||
* @param shouldKeepOldResultsWhileRendering Should keep old results while rendering.
|
||||
* @param forceReveal Force the webview panel to be visible and
|
||||
* Appropriate when the user has just performed an explicit
|
||||
@@ -318,58 +385,59 @@ export class InterfaceManager extends DisposableObject {
|
||||
* history entry.
|
||||
*/
|
||||
public async showResults(
|
||||
results: CompletedQuery,
|
||||
fullQuery: CompletedLocalQueryInfo,
|
||||
forceReveal: WebviewReveal,
|
||||
shouldKeepOldResultsWhileRendering = false
|
||||
): Promise<void> {
|
||||
if (results.result.resultType !== messages.QueryResultType.SUCCESS) {
|
||||
if (fullQuery.completedQuery.result.resultType !== messages.QueryResultType.SUCCESS) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._interpretation = undefined;
|
||||
const interpretationPage = await this.interpretResultsInfo(
|
||||
results.query,
|
||||
results.interpretedResultsSortState
|
||||
fullQuery.completedQuery.query,
|
||||
fullQuery.completedQuery.interpretedResultsSortState
|
||||
);
|
||||
|
||||
const sortedResultsMap: SortedResultsMap = {};
|
||||
results.sortedResultsInfo.forEach(
|
||||
(v, k) =>
|
||||
Object.entries(fullQuery.completedQuery.sortedResultsInfo).forEach(
|
||||
([k, v]) =>
|
||||
(sortedResultsMap[k] = this.convertPathPropertiesToWebviewUris(v))
|
||||
);
|
||||
|
||||
this._displayedQuery = results;
|
||||
this._displayedQuery = fullQuery;
|
||||
|
||||
const panel = this.getPanel();
|
||||
await this.waitForPanelLoaded();
|
||||
if (forceReveal === WebviewReveal.Forced) {
|
||||
panel.reveal(undefined, true);
|
||||
} else if (!panel.visible) {
|
||||
// The results panel exists, (`.getPanel()` guarantees it) but
|
||||
// is not visible; it's in a not-currently-viewed tab. Show a
|
||||
// more asynchronous message to not so abruptly interrupt
|
||||
// user's workflow by immediately revealing the panel.
|
||||
const showButton = 'View Results';
|
||||
const queryName = results.queryName;
|
||||
const resultPromise = vscode.window.showInformationMessage(
|
||||
`Finished running query ${
|
||||
queryName.length > 0 ? ` "${queryName}"` : ''
|
||||
}.`,
|
||||
showButton
|
||||
);
|
||||
// Address this click asynchronously so we still update the
|
||||
// query history immediately.
|
||||
resultPromise.then((result) => {
|
||||
if (result === showButton) {
|
||||
panel.reveal();
|
||||
}
|
||||
});
|
||||
if (!panel.visible) {
|
||||
if (forceReveal === WebviewReveal.Forced) {
|
||||
panel.reveal(undefined, true);
|
||||
} else {
|
||||
// The results panel exists, (`.getPanel()` guarantees it) but
|
||||
// is not visible; it's in a not-currently-viewed tab. Show a
|
||||
// more asynchronous message to not so abruptly interrupt
|
||||
// user's workflow by immediately revealing the panel.
|
||||
const showButton = 'View Results';
|
||||
const queryName = this.labelProvider.getShortLabel(fullQuery);
|
||||
const resultPromise = vscode.window.showInformationMessage(
|
||||
`Finished running query ${queryName.length > 0 ? ` "${queryName}"` : ''
|
||||
}.`,
|
||||
showButton
|
||||
);
|
||||
// Address this click asynchronously so we still update the
|
||||
// query history immediately.
|
||||
void resultPromise.then((result) => {
|
||||
if (result === showButton) {
|
||||
panel.reveal();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Note that the resultSetSchemas will return offsets for the default (unsorted) page,
|
||||
// which may not be correct. However, in this case, it doesn't matter since we only
|
||||
// need the first offset, which will be the same no matter which sorting we use.
|
||||
const resultSetSchemas = await this.getResultSetSchemas(results);
|
||||
const resultSetSchemas = await this.getResultSetSchemas(fullQuery.completedQuery);
|
||||
const resultSetNames = resultSetSchemas.map(schema => schema.name);
|
||||
|
||||
const selectedTable = getDefaultResultSetName(resultSetNames);
|
||||
@@ -379,7 +447,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
|
||||
// Use sorted results path if it exists. This may happen if we are
|
||||
// reloading the results view after it has been sorted in the past.
|
||||
const resultsPath = results.getResultsPath(selectedTable);
|
||||
const resultsPath = fullQuery.completedQuery.getResultsPath(selectedTable);
|
||||
const pageSize = PAGE_SIZE.getValue<number>();
|
||||
const chunk = await this.cliServer.bqrsDecode(
|
||||
resultsPath,
|
||||
@@ -394,10 +462,11 @@ export class InterfaceManager extends DisposableObject {
|
||||
}
|
||||
);
|
||||
const resultSet = transformBqrsResultSet(schema, chunk);
|
||||
fullQuery.completedQuery.setResultCount(interpretationPage?.numTotalResults || resultSet.schema.rows);
|
||||
const parsedResultSets: ParsedResultSets = {
|
||||
pageNumber: 0,
|
||||
pageSize,
|
||||
numPages: numPagesOfResultSet(resultSet),
|
||||
numPages: numPagesOfResultSet(resultSet, this._interpretation),
|
||||
numInterpretedPages: numInterpretedPages(this._interpretation),
|
||||
resultSet: { ...resultSet, t: 'RawResultSet' },
|
||||
selectedTable: undefined,
|
||||
@@ -407,17 +476,17 @@ export class InterfaceManager extends DisposableObject {
|
||||
await this.postMessage({
|
||||
t: 'setState',
|
||||
interpretation: interpretationPage,
|
||||
origResultsPaths: results.query.resultsPaths,
|
||||
origResultsPaths: fullQuery.completedQuery.query.resultsPaths,
|
||||
resultsPath: this.convertPathToWebviewUri(
|
||||
results.query.resultsPaths.resultsPath
|
||||
fullQuery.completedQuery.query.resultsPaths.resultsPath
|
||||
),
|
||||
parsedResultSets,
|
||||
sortedResultsMap,
|
||||
database: results.database,
|
||||
database: fullQuery.initialInfo.databaseInfo,
|
||||
shouldKeepOldResultsWhileRendering,
|
||||
metadata: results.query.metadata,
|
||||
queryName: results.toString(),
|
||||
queryPath: results.query.program.queryPath
|
||||
metadata: fullQuery.completedQuery.query.metadata,
|
||||
queryName: this.labelProvider.getLabel(fullQuery),
|
||||
queryPath: fullQuery.initialInfo.queryPath
|
||||
});
|
||||
}
|
||||
|
||||
@@ -433,29 +502,29 @@ export class InterfaceManager extends DisposableObject {
|
||||
if (this._interpretation === undefined) {
|
||||
throw new Error('Trying to show interpreted results but interpretation was undefined');
|
||||
}
|
||||
if (this._interpretation.sarif.runs[0].results === undefined) {
|
||||
if (this._interpretation.data.t === 'SarifInterpretationData' && this._interpretation.data.runs[0].results === undefined) {
|
||||
throw new Error('Trying to show interpreted results but results were undefined');
|
||||
}
|
||||
|
||||
const resultSetSchemas = await this.getResultSetSchemas(this._displayedQuery);
|
||||
const resultSetSchemas = await this.getResultSetSchemas(this._displayedQuery.completedQuery);
|
||||
const resultSetNames = resultSetSchemas.map(schema => schema.name);
|
||||
|
||||
await this.postMessage({
|
||||
t: 'showInterpretedPage',
|
||||
interpretation: this.getPageOfInterpretedResults(pageNumber),
|
||||
database: this._displayedQuery.database,
|
||||
metadata: this._displayedQuery.query.metadata,
|
||||
database: this._displayedQuery.initialInfo.databaseInfo,
|
||||
metadata: this._displayedQuery.completedQuery.query.metadata,
|
||||
pageNumber,
|
||||
resultSetNames,
|
||||
pageSize: PAGE_SIZE.getValue(),
|
||||
pageSize: interpretedPageSize(this._interpretation),
|
||||
numPages: numInterpretedPages(this._interpretation),
|
||||
queryName: this._displayedQuery.toString(),
|
||||
queryPath: this._displayedQuery.query.program.queryPath
|
||||
queryName: this.labelProvider.getLabel(this._displayedQuery),
|
||||
queryPath: this._displayedQuery.initialInfo.queryPath
|
||||
});
|
||||
}
|
||||
|
||||
private async getResultSetSchemas(results: CompletedQuery, selectedTable = ''): Promise<ResultSetSchema[]> {
|
||||
const resultsPath = results.getResultsPath(selectedTable);
|
||||
private async getResultSetSchemas(completedQuery: CompletedQueryInfo, selectedTable = ''): Promise<ResultSetSchema[]> {
|
||||
const resultsPath = completedQuery.getResultsPath(selectedTable);
|
||||
const schemas = await this.cliServer.bqrsInfo(
|
||||
resultsPath,
|
||||
PAGE_SIZE.getValue()
|
||||
@@ -482,13 +551,18 @@ export class InterfaceManager extends DisposableObject {
|
||||
}
|
||||
|
||||
const sortedResultsMap: SortedResultsMap = {};
|
||||
results.sortedResultsInfo.forEach(
|
||||
(v, k) =>
|
||||
Object.entries(results.completedQuery.sortedResultsInfo).forEach(
|
||||
([k, v]) =>
|
||||
(sortedResultsMap[k] = this.convertPathPropertiesToWebviewUris(v))
|
||||
);
|
||||
|
||||
const resultSetSchemas = await this.getResultSetSchemas(results, sorted ? selectedTable : '');
|
||||
const resultSetNames = resultSetSchemas.map(schema => schema.name);
|
||||
const resultSetSchemas = await this.getResultSetSchemas(results.completedQuery, sorted ? selectedTable : '');
|
||||
|
||||
// If there is a specific sorted table selected, a different bqrs file is loaded that doesn't have all the result set names.
|
||||
// Make sure that we load all result set names here.
|
||||
// See https://github.com/github/vscode-codeql/issues/1005
|
||||
const allResultSetSchemas = sorted ? await this.getResultSetSchemas(results.completedQuery, '') : resultSetSchemas;
|
||||
const resultSetNames = allResultSetSchemas.map(schema => schema.name);
|
||||
|
||||
const schema = resultSetSchemas.find(
|
||||
(resultSet) => resultSet.name == selectedTable
|
||||
@@ -498,7 +572,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
|
||||
const pageSize = PAGE_SIZE.getValue<number>();
|
||||
const chunk = await this.cliServer.bqrsDecode(
|
||||
results.getResultsPath(selectedTable, sorted),
|
||||
results.completedQuery.getResultsPath(selectedTable, sorted),
|
||||
schema.name,
|
||||
{
|
||||
offset: schema.pagination?.offsets[pageNumber],
|
||||
@@ -520,17 +594,17 @@ export class InterfaceManager extends DisposableObject {
|
||||
await this.postMessage({
|
||||
t: 'setState',
|
||||
interpretation: this._interpretation,
|
||||
origResultsPaths: results.query.resultsPaths,
|
||||
origResultsPaths: results.completedQuery.query.resultsPaths,
|
||||
resultsPath: this.convertPathToWebviewUri(
|
||||
results.query.resultsPaths.resultsPath
|
||||
results.completedQuery.query.resultsPaths.resultsPath
|
||||
),
|
||||
parsedResultSets,
|
||||
sortedResultsMap,
|
||||
database: results.database,
|
||||
database: results.initialInfo.databaseInfo,
|
||||
shouldKeepOldResultsWhileRendering: false,
|
||||
metadata: results.query.metadata,
|
||||
queryName: results.toString(),
|
||||
queryPath: results.query.program.queryPath
|
||||
metadata: results.completedQuery.query.metadata,
|
||||
queryName: this.labelProvider.getLabel(results),
|
||||
queryPath: results.initialInfo.queryPath
|
||||
});
|
||||
}
|
||||
|
||||
@@ -540,31 +614,50 @@ export class InterfaceManager extends DisposableObject {
|
||||
sourceInfo: cli.SourceInfo | undefined,
|
||||
sourceLocationPrefix: string,
|
||||
sortState: InterpretedResultsSortState | undefined
|
||||
): Promise<Interpretation> {
|
||||
const sarif = await interpretResults(
|
||||
this.cliServer,
|
||||
metadata,
|
||||
resultsPaths,
|
||||
sourceInfo
|
||||
);
|
||||
sarif.runs.forEach(run => {
|
||||
if (run.results !== undefined) {
|
||||
sortInterpretedResults(run.results, sortState);
|
||||
}
|
||||
});
|
||||
): Promise<Interpretation | undefined> {
|
||||
if (!resultsPaths) {
|
||||
void this.logger.log('No results path. Cannot display interpreted results.');
|
||||
return undefined;
|
||||
}
|
||||
let data;
|
||||
let numTotalResults;
|
||||
if (metadata?.kind === GRAPH_TABLE_NAME) {
|
||||
data = await interpretGraphResults(
|
||||
this.cliServer,
|
||||
metadata,
|
||||
resultsPaths,
|
||||
sourceInfo
|
||||
);
|
||||
numTotalResults = data.dot.length;
|
||||
} else {
|
||||
const sarif = await interpretResultsSarif(
|
||||
this.cliServer,
|
||||
metadata,
|
||||
resultsPaths,
|
||||
sourceInfo
|
||||
);
|
||||
|
||||
const numTotalResults = (() => {
|
||||
if (sarif.runs.length === 0) return 0;
|
||||
if (sarif.runs[0].results === undefined) return 0;
|
||||
return sarif.runs[0].results.length;
|
||||
})();
|
||||
sarif.runs.forEach(run => {
|
||||
if (run.results) {
|
||||
sortInterpretedResults(run.results, sortState);
|
||||
}
|
||||
});
|
||||
|
||||
sarif.sortState = sortState;
|
||||
data = sarif;
|
||||
|
||||
numTotalResults = (() => {
|
||||
return sarif.runs?.[0]?.results
|
||||
? sarif.runs[0].results.length
|
||||
: 0;
|
||||
})();
|
||||
}
|
||||
|
||||
const interpretation: Interpretation = {
|
||||
sarif,
|
||||
data,
|
||||
sourceLocationPrefix,
|
||||
numTruncatedResults: 0,
|
||||
numTotalResults,
|
||||
sortState,
|
||||
numTotalResults
|
||||
};
|
||||
this._interpretation = interpretation;
|
||||
return interpretation;
|
||||
@@ -573,7 +666,6 @@ export class InterfaceManager extends DisposableObject {
|
||||
private getPageOfInterpretedResults(
|
||||
pageNumber: number
|
||||
): Interpretation {
|
||||
|
||||
function getPageOfRun(run: Sarif.Run): Sarif.Run {
|
||||
return {
|
||||
...run, results: run.results?.slice(
|
||||
@@ -583,32 +675,44 @@ export class InterfaceManager extends DisposableObject {
|
||||
};
|
||||
}
|
||||
|
||||
if (this._interpretation === undefined) {
|
||||
const interp = this._interpretation;
|
||||
if (interp === undefined) {
|
||||
throw new Error('Tried to get interpreted results before interpretation finished');
|
||||
}
|
||||
if (this._interpretation.sarif.runs.length !== 1) {
|
||||
this.logger.log(`Warning: SARIF file had ${this._interpretation.sarif.runs.length} runs, expected 1`);
|
||||
|
||||
if (interp.data.t !== 'SarifInterpretationData')
|
||||
return interp;
|
||||
|
||||
if (interp.data.runs.length !== 1) {
|
||||
void this.logger.log(`Warning: SARIF file had ${interp.data.runs.length} runs, expected 1`);
|
||||
}
|
||||
const interp = this._interpretation;
|
||||
|
||||
return {
|
||||
...interp,
|
||||
sarif: { ...interp.sarif, runs: [getPageOfRun(interp.sarif.runs[0])] },
|
||||
data: {
|
||||
...interp.data,
|
||||
runs: [getPageOfRun(interp.data.runs[0])]
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private async interpretResultsInfo(
|
||||
query: QueryInfo,
|
||||
query: QueryEvaluationInfo,
|
||||
sortState: InterpretedResultsSortState | undefined
|
||||
): Promise<Interpretation | undefined> {
|
||||
if (
|
||||
(await query.canHaveInterpretedResults()) &&
|
||||
query.canHaveInterpretedResults() &&
|
||||
query.quickEvalPosition === undefined // never do results interpretation if quickEval
|
||||
) {
|
||||
try {
|
||||
const sourceLocationPrefix = await query.dbItem.getSourceLocationPrefix(
|
||||
const dbItem = this.databaseManager.findDatabaseItem(Uri.file(query.dbItemPath));
|
||||
if (!dbItem) {
|
||||
throw new Error(`Could not find database item for ${query.dbItemPath}`);
|
||||
}
|
||||
const sourceLocationPrefix = await dbItem.getSourceLocationPrefix(
|
||||
this.cliServer
|
||||
);
|
||||
const sourceArchiveUri = query.dbItem.sourceArchive;
|
||||
const sourceArchiveUri = dbItem.sourceArchive;
|
||||
const sourceInfo =
|
||||
sourceArchiveUri === undefined
|
||||
? undefined
|
||||
@@ -626,8 +730,8 @@ export class InterfaceManager extends DisposableObject {
|
||||
} catch (e) {
|
||||
// If interpretation fails, accept the error and continue
|
||||
// trying to render uninterpreted results anyway.
|
||||
this.logger.log(
|
||||
`Exception during results interpretation: ${e.message}. Will show raw results instead.`
|
||||
void showAndLogErrorMessage(
|
||||
`Showing raw results instead of interpreted ones due to an error. ${getErrorMessage(e)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -659,12 +763,15 @@ export class InterfaceManager extends DisposableObject {
|
||||
undefined
|
||||
);
|
||||
|
||||
if (!interpretation) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.showProblemResultsAsDiagnostics(interpretation, database);
|
||||
} catch (e) {
|
||||
const msg = e instanceof Error ? e.message : e.toString();
|
||||
this.logger.log(
|
||||
`Exception while computing problem results as diagnostics: ${msg}`
|
||||
void this.logger.log(
|
||||
`Exception while computing problem results as diagnostics: ${getErrorMessage(e)}`
|
||||
);
|
||||
this._diagnosticCollection.clear();
|
||||
}
|
||||
@@ -674,10 +781,13 @@ export class InterfaceManager extends DisposableObject {
|
||||
interpretation: Interpretation,
|
||||
databaseItem: DatabaseItem
|
||||
): Promise<void> {
|
||||
const { sarif, sourceLocationPrefix } = interpretation;
|
||||
const { data, sourceLocationPrefix } = interpretation;
|
||||
|
||||
if (!sarif.runs || !sarif.runs[0].results) {
|
||||
this.logger.log(
|
||||
if (data.t !== 'SarifInterpretationData')
|
||||
return;
|
||||
|
||||
if (!data.runs || !data.runs[0].results) {
|
||||
void this.logger.log(
|
||||
'Didn\'t find a run in the sarif results. Error processing sarif?'
|
||||
);
|
||||
return;
|
||||
@@ -685,14 +795,14 @@ export class InterfaceManager extends DisposableObject {
|
||||
|
||||
const diagnostics: [Uri, ReadonlyArray<Diagnostic>][] = [];
|
||||
|
||||
for (const result of sarif.runs[0].results) {
|
||||
for (const result of data.runs[0].results) {
|
||||
const message = result.message.text;
|
||||
if (message === undefined) {
|
||||
this.logger.log('Sarif had result without plaintext message');
|
||||
void this.logger.log('Sarif had result without plaintext message');
|
||||
continue;
|
||||
}
|
||||
if (!result.locations) {
|
||||
this.logger.log('Sarif had result without location');
|
||||
void this.logger.log('Sarif had result without location');
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -705,7 +815,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
}
|
||||
const resultLocation = tryResolveLocation(sarifLoc, databaseItem);
|
||||
if (!resultLocation) {
|
||||
this.logger.log('Sarif location was not resolvable ' + sarifLoc);
|
||||
void this.logger.log('Sarif location was not resolvable ' + sarifLoc);
|
||||
continue;
|
||||
}
|
||||
const parsedMessage = parseSarifPlainTextMessage(message);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { window as Window, OutputChannel, Progress, Disposable } from 'vscode';
|
||||
import { DisposableObject } from './vscode-utils/disposable-object';
|
||||
import { window as Window, OutputChannel, Progress } from 'vscode';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
|
||||
@@ -26,11 +26,6 @@ export interface Logger {
|
||||
* @param location log to remove
|
||||
*/
|
||||
removeAdditionalLogLocation(location: string | undefined): void;
|
||||
|
||||
/**
|
||||
* The base location location where all side log files are stored.
|
||||
*/
|
||||
getBaseLocation(): string | undefined;
|
||||
}
|
||||
|
||||
export type ProgressReporter = Progress<{ message: string }>;
|
||||
@@ -39,19 +34,13 @@ export type ProgressReporter = Progress<{ message: string }>;
|
||||
export class OutputChannelLogger extends DisposableObject implements Logger {
|
||||
public readonly outputChannel: OutputChannel;
|
||||
private readonly additionalLocations = new Map<string, AdditionalLogLocation>();
|
||||
private additionalLogLocationPath: string | undefined;
|
||||
isCustomLogDirectory: boolean;
|
||||
|
||||
constructor(private title: string) {
|
||||
constructor(title: string) {
|
||||
super();
|
||||
this.outputChannel = Window.createOutputChannel(title);
|
||||
this.push(this.outputChannel);
|
||||
}
|
||||
|
||||
init(storagePath: string): void {
|
||||
this.additionalLogLocationPath = path.join(storagePath, this.title);
|
||||
|
||||
// clear out any old state from previous runs
|
||||
fs.remove(this.additionalLogLocationPath);
|
||||
this.isCustomLogDirectory = false;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -61,31 +50,41 @@ export class OutputChannelLogger extends DisposableObject implements Logger {
|
||||
* continuing.
|
||||
*/
|
||||
async log(message: string, options = {} as LogOptions): Promise<void> {
|
||||
if (options.trailingNewline === undefined) {
|
||||
options.trailingNewline = true;
|
||||
}
|
||||
|
||||
if (options.trailingNewline) {
|
||||
this.outputChannel.appendLine(message);
|
||||
} else {
|
||||
this.outputChannel.append(message);
|
||||
}
|
||||
|
||||
if (this.additionalLogLocationPath && options.additionalLogLocation) {
|
||||
const logPath = path.join(this.additionalLogLocationPath, options.additionalLogLocation);
|
||||
let additional = this.additionalLocations.get(logPath);
|
||||
if (!additional) {
|
||||
const msg = `| Log being saved to ${logPath} |`;
|
||||
const separator = new Array(msg.length).fill('-').join('');
|
||||
this.outputChannel.appendLine(separator);
|
||||
this.outputChannel.appendLine(msg);
|
||||
this.outputChannel.appendLine(separator);
|
||||
additional = new AdditionalLogLocation(logPath);
|
||||
this.additionalLocations.set(logPath, additional);
|
||||
this.track(additional);
|
||||
try {
|
||||
if (options.trailingNewline === undefined) {
|
||||
options.trailingNewline = true;
|
||||
}
|
||||
if (options.trailingNewline) {
|
||||
this.outputChannel.appendLine(message);
|
||||
} else {
|
||||
this.outputChannel.append(message);
|
||||
}
|
||||
|
||||
await additional.log(message, options);
|
||||
if (options.additionalLogLocation) {
|
||||
if (!path.isAbsolute(options.additionalLogLocation)) {
|
||||
throw new Error(`Additional Log Location must be an absolute path: ${options.additionalLogLocation}`);
|
||||
}
|
||||
const logPath = options.additionalLogLocation;
|
||||
let additional = this.additionalLocations.get(logPath);
|
||||
if (!additional) {
|
||||
const msg = `| Log being saved to ${logPath} |`;
|
||||
const separator = new Array(msg.length).fill('-').join('');
|
||||
this.outputChannel.appendLine(separator);
|
||||
this.outputChannel.appendLine(msg);
|
||||
this.outputChannel.appendLine(separator);
|
||||
additional = new AdditionalLogLocation(logPath);
|
||||
this.additionalLocations.set(logPath, additional);
|
||||
}
|
||||
|
||||
await additional.log(message, options);
|
||||
}
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message === 'Channel has been closed') {
|
||||
// Output channel is closed logging to console instead
|
||||
console.log('Output channel is closed logging to console instead:', message);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,26 +93,15 @@ export class OutputChannelLogger extends DisposableObject implements Logger {
|
||||
}
|
||||
|
||||
removeAdditionalLogLocation(location: string | undefined): void {
|
||||
if (this.additionalLogLocationPath && location) {
|
||||
const logPath = location.startsWith(this.additionalLogLocationPath)
|
||||
? location
|
||||
: path.join(this.additionalLogLocationPath, location);
|
||||
const additional = this.additionalLocations.get(logPath);
|
||||
if (additional) {
|
||||
this.disposeAndStopTracking(additional);
|
||||
this.additionalLocations.delete(logPath);
|
||||
}
|
||||
if (location) {
|
||||
this.additionalLocations.delete(location);
|
||||
}
|
||||
}
|
||||
|
||||
getBaseLocation() {
|
||||
return this.additionalLogLocationPath;
|
||||
}
|
||||
}
|
||||
|
||||
class AdditionalLogLocation extends Disposable {
|
||||
class AdditionalLogLocation {
|
||||
constructor(private location: string) {
|
||||
super(() => { /**/ });
|
||||
/**/
|
||||
}
|
||||
|
||||
async log(message: string, options = {} as LogOptions): Promise<void> {
|
||||
@@ -126,10 +114,6 @@ class AdditionalLogLocation extends Disposable {
|
||||
encoding: 'utf8'
|
||||
});
|
||||
}
|
||||
|
||||
async dispose(): Promise<void> {
|
||||
await fs.remove(this.location);
|
||||
}
|
||||
}
|
||||
|
||||
/** The global logger for the extension. */
|
||||
|
||||
146
extensions/ql-vscode/src/packaging.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import { CliVersionConstraint, CodeQLCliServer } from './cli';
|
||||
import {
|
||||
getOnDiskWorkspaceFolders,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogInformationMessage,
|
||||
} from './helpers';
|
||||
import { QuickPickItem, window } from 'vscode';
|
||||
import { ProgressCallback, UserCancellationException } from './commandRunner';
|
||||
import { logger } from './logging';
|
||||
|
||||
const QUERY_PACKS = [
|
||||
'codeql/cpp-queries',
|
||||
'codeql/csharp-queries',
|
||||
'codeql/go-queries',
|
||||
'codeql/java-queries',
|
||||
'codeql/javascript-queries',
|
||||
'codeql/python-queries',
|
||||
'codeql/ruby-queries',
|
||||
'codeql/csharp-solorigate-queries',
|
||||
'codeql/javascript-experimental-atm-queries',
|
||||
];
|
||||
|
||||
/**
|
||||
* Prompts user to choose packs to download, and downloads them.
|
||||
*
|
||||
* @param cliServer The CLI server.
|
||||
* @param progress A progress callback.
|
||||
*/
|
||||
export async function handleDownloadPacks(
|
||||
cliServer: CodeQLCliServer,
|
||||
progress: ProgressCallback,
|
||||
): Promise<void> {
|
||||
if (!(await cliServer.cliConstraints.supportsPackaging())) {
|
||||
throw new Error(`Packaging commands are not supported by this version of CodeQL. Please upgrade to v${CliVersionConstraint.CLI_VERSION_WITH_PACKAGING
|
||||
} or later.`);
|
||||
}
|
||||
progress({
|
||||
message: 'Choose packs to download',
|
||||
step: 1,
|
||||
maxStep: 2,
|
||||
});
|
||||
let packsToDownload: string[] = [];
|
||||
const queryPackOption = 'Download all core query packs';
|
||||
const customPackOption = 'Download custom specified pack';
|
||||
const quickpick = await window.showQuickPick(
|
||||
[queryPackOption, customPackOption],
|
||||
{ ignoreFocusOut: true }
|
||||
);
|
||||
if (quickpick === queryPackOption) {
|
||||
packsToDownload = QUERY_PACKS;
|
||||
} else if (quickpick === customPackOption) {
|
||||
const customPack = await window.showInputBox({
|
||||
prompt:
|
||||
'Enter the <package-scope/name[@version]> of the pack to download',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
if (customPack) {
|
||||
packsToDownload.push(customPack);
|
||||
} else {
|
||||
throw new UserCancellationException('No pack specified.');
|
||||
}
|
||||
}
|
||||
if (packsToDownload?.length > 0) {
|
||||
progress({
|
||||
message: 'Downloading packs. This may take a few minutes.',
|
||||
step: 2,
|
||||
maxStep: 2,
|
||||
});
|
||||
try {
|
||||
await cliServer.packDownload(packsToDownload);
|
||||
void showAndLogInformationMessage('Finished downloading packs.');
|
||||
} catch (error) {
|
||||
void showAndLogErrorMessage(
|
||||
'Unable to download all packs. See log for more details.'
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
interface QLPackQuickPickItem extends QuickPickItem {
|
||||
packRootDir: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompts user to choose packs to install, and installs them.
|
||||
*
|
||||
* @param cliServer The CLI server.
|
||||
* @param progress A progress callback.
|
||||
*/
|
||||
export async function handleInstallPackDependencies(
|
||||
cliServer: CodeQLCliServer,
|
||||
progress: ProgressCallback,
|
||||
): Promise<void> {
|
||||
if (!(await cliServer.cliConstraints.supportsPackaging())) {
|
||||
throw new Error(`Packaging commands are not supported by this version of CodeQL. Please upgrade to v${CliVersionConstraint.CLI_VERSION_WITH_PACKAGING
|
||||
} or later.`);
|
||||
}
|
||||
progress({
|
||||
message: 'Choose packs to install dependencies for',
|
||||
step: 1,
|
||||
maxStep: 2,
|
||||
});
|
||||
const workspacePacks = await cliServer.resolveQlpacks(getOnDiskWorkspaceFolders());
|
||||
const quickPickItems = Object.entries(workspacePacks).map<QLPackQuickPickItem>(([key, value]) => ({
|
||||
label: key,
|
||||
packRootDir: value,
|
||||
}));
|
||||
const packsToInstall = await window.showQuickPick(quickPickItems, {
|
||||
placeHolder: 'Select packs to install dependencies for',
|
||||
canPickMany: true,
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
const numberOfPacks = packsToInstall?.length || 0;
|
||||
if (packsToInstall && numberOfPacks > 0) {
|
||||
const failedPacks = [];
|
||||
const errors = [];
|
||||
// Start at 1 because we already have the first step
|
||||
let count = 1;
|
||||
for (const pack of packsToInstall) {
|
||||
count++;
|
||||
progress({
|
||||
message: `Installing dependencies for ${pack.label}`,
|
||||
step: count,
|
||||
maxStep: numberOfPacks + 1,
|
||||
});
|
||||
try {
|
||||
for (const dir of pack.packRootDir) {
|
||||
await cliServer.packInstall(dir);
|
||||
}
|
||||
} catch (error) {
|
||||
failedPacks.push(pack.label);
|
||||
errors.push(error);
|
||||
}
|
||||
}
|
||||
if (failedPacks.length > 0) {
|
||||
void logger.log(`Errors:\n${errors.join('\n')}`);
|
||||
throw new Error(
|
||||
`Unable to install pack dependencies for: ${failedPacks.join(', ')}. See log for more details.`
|
||||
);
|
||||
} else {
|
||||
void showAndLogInformationMessage('Finished installing pack dependencies.');
|
||||
}
|
||||
} else {
|
||||
throw new UserCancellationException('No packs selected.');
|
||||
}
|
||||
}
|
||||
@@ -79,11 +79,11 @@ export interface WholeFileLocation {
|
||||
|
||||
export type ResolvableLocationValue = WholeFileLocation | LineColumnLocation;
|
||||
|
||||
export type UrlValue = ResolvableLocationValue | string;
|
||||
export type UrlValue = ResolvableLocationValue | string;
|
||||
|
||||
export type ColumnValue = EntityValue | number | string | boolean;
|
||||
export type CellValue = EntityValue | number | string | boolean;
|
||||
|
||||
export type ResultRow = ColumnValue[];
|
||||
export type ResultRow = CellValue[];
|
||||
|
||||
export interface RawResultSet {
|
||||
readonly schema: ResultSetSchema;
|
||||
@@ -103,7 +103,14 @@ export function transformBqrsResultSet(
|
||||
};
|
||||
}
|
||||
|
||||
export interface DecodedBqrsChunk {
|
||||
tuples: ColumnValue[][];
|
||||
next?: number;
|
||||
type BqrsKind = 'String' | 'Float' | 'Integer' | 'String' | 'Boolean' | 'Date' | 'Entity';
|
||||
|
||||
interface BqrsColumn {
|
||||
name: string;
|
||||
kind: BqrsKind;
|
||||
}
|
||||
export interface DecodedBqrsChunk {
|
||||
tuples: CellValue[][];
|
||||
next?: number;
|
||||
columns: BqrsColumn[];
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
LineColumnLocation,
|
||||
WholeFileLocation
|
||||
} from './bqrs-cli-types';
|
||||
import { createRemoteFileRef } from './location-link-utils';
|
||||
|
||||
/**
|
||||
* The CodeQL filesystem libraries use this pattern in `getURL()` predicates
|
||||
@@ -83,8 +84,7 @@ export function isLineColumnLoc(loc: UrlValue): loc is LineColumnLocation {
|
||||
&& 'startLine' in loc
|
||||
&& 'startColumn' in loc
|
||||
&& 'endLine' in loc
|
||||
&& 'endColumn' in loc
|
||||
&& loc.endColumn > 0;
|
||||
&& 'endColumn' in loc;
|
||||
}
|
||||
|
||||
export function isWholeFileLoc(loc: UrlValue): loc is WholeFileLocation {
|
||||
@@ -94,3 +94,45 @@ export function isWholeFileLoc(loc: UrlValue): loc is WholeFileLocation {
|
||||
export function isStringLoc(loc: UrlValue): loc is string {
|
||||
return typeof loc === 'string';
|
||||
}
|
||||
|
||||
export function tryGetRemoteLocation(
|
||||
loc: UrlValue | undefined,
|
||||
fileLinkPrefix: string,
|
||||
sourceLocationPrefix: string | undefined,
|
||||
): string | undefined {
|
||||
const resolvableLocation = tryGetResolvableLocation(loc);
|
||||
if (!resolvableLocation) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let trimmedLocation: string;
|
||||
|
||||
// Remote locations have the following format:
|
||||
// "file:${sourceLocationPrefix}/relative/path/to/file"
|
||||
// So we need to strip off the first part to get the relative path.
|
||||
if (sourceLocationPrefix) {
|
||||
if (!resolvableLocation.uri.startsWith(`file:${sourceLocationPrefix}/`)) {
|
||||
return undefined;
|
||||
}
|
||||
trimmedLocation = resolvableLocation.uri.replace(`file:${sourceLocationPrefix}/`, '');
|
||||
} else {
|
||||
// If the source location prefix is empty (e.g. for older remote queries), we assume that the database
|
||||
// was created on a Linux actions runner and has the format:
|
||||
// "file:/home/runner/work/<repo>/<repo>/relative/path/to/file"
|
||||
// So we need to drop the first 6 parts of the path.
|
||||
if (!resolvableLocation.uri.startsWith('file:/home/runner/work/')) {
|
||||
return undefined;
|
||||
}
|
||||
const locationParts = resolvableLocation.uri.split('/');
|
||||
trimmedLocation = locationParts.slice(6, locationParts.length).join('/');
|
||||
}
|
||||
|
||||
const fileLink = {
|
||||
fileLinkPrefix,
|
||||
filePath: trimmedLocation,
|
||||
};
|
||||
return createRemoteFileRef(
|
||||
fileLink,
|
||||
resolvableLocation.startLine,
|
||||
resolvableLocation.endLine);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
import { Disposable } from 'vscode';
|
||||
|
||||
// Avoid explicitly referencing Disposable type in vscode.
|
||||
// This file cannot have dependencies on the vscode API.
|
||||
interface Disposable {
|
||||
dispose(): any;
|
||||
}
|
||||
|
||||
export type DisposeHandler = (disposable: Disposable) => void;
|
||||
|
||||
/**
|
||||
* Base class to make it easier to implement a `Disposable` that owns other disposable object.
|
||||
@@ -40,21 +47,39 @@ export abstract class DisposableObject implements Disposable {
|
||||
* @param obj The object to stop tracking.
|
||||
*/
|
||||
protected disposeAndStopTracking(obj: Disposable): void {
|
||||
if (obj !== undefined) {
|
||||
this.tracked!.delete(obj);
|
||||
if (obj && this.tracked) {
|
||||
this.tracked.delete(obj);
|
||||
obj.dispose();
|
||||
}
|
||||
}
|
||||
|
||||
public dispose() {
|
||||
/**
|
||||
* Dispose this object and all contained objects
|
||||
*
|
||||
* @param disposeHandler An optional dispose handler that gets
|
||||
* passed each element to dispose. The dispose handler
|
||||
* can choose how (and if) to dispose the object. The
|
||||
* primary usage is for tests that should not dispose
|
||||
* all items of a disposable.
|
||||
*/
|
||||
public dispose(disposeHandler?: DisposeHandler) {
|
||||
if (this.tracked !== undefined) {
|
||||
for (const trackedObject of this.tracked.values()) {
|
||||
trackedObject.dispose();
|
||||
if (disposeHandler) {
|
||||
disposeHandler(trackedObject);
|
||||
} else {
|
||||
trackedObject.dispose();
|
||||
}
|
||||
}
|
||||
this.tracked = undefined;
|
||||
}
|
||||
while (this.disposables.length > 0) {
|
||||
this.disposables.pop()!.dispose();
|
||||
const disposable = this.disposables.pop()!;
|
||||
if (disposeHandler) {
|
||||
disposeHandler(disposable);
|
||||
} else {
|
||||
disposable.dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
|
||||
/**
|
||||
* helpers-pure.ts
|
||||
* ------------
|
||||
@@ -29,3 +30,28 @@ export const asyncFilter = async function <T>(arr: T[], predicate: (arg0: T) =>
|
||||
const results = await Promise.all(arr.map(predicate));
|
||||
return arr.filter((_, index) => results[index]);
|
||||
};
|
||||
|
||||
/**
|
||||
* This regex matches strings of the form `owner/repo` where:
|
||||
* - `owner` is made up of alphanumeric characters, hyphens, underscores, or periods
|
||||
* - `repo` is made up of alphanumeric characters, hyphens, underscores, or periods
|
||||
*/
|
||||
export const REPO_REGEX = /^[a-zA-Z0-9-_\.]+\/[a-zA-Z0-9-_\.]+$/;
|
||||
|
||||
/**
|
||||
* This regex matches GiHub organization and user strings. These are made up for alphanumeric
|
||||
* characters, hyphens, underscores or periods.
|
||||
*/
|
||||
export const OWNER_REGEX = /^[a-zA-Z0-9-_\.]+$/;
|
||||
|
||||
export function getErrorMessage(e: any) {
|
||||
return e instanceof Error ? e.message : String(e);
|
||||
}
|
||||
|
||||
export function getErrorStack(e: any) {
|
||||
return e instanceof Error ? e.stack ?? '' : '';
|
||||
}
|
||||
|
||||
export function asError(e: any): Error {
|
||||
return e instanceof Error ? e : new Error(String(e));
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import * as sarif from 'sarif';
|
||||
import { AnalysisResults } from '../remote-queries/shared/analysis-result';
|
||||
import { AnalysisSummary, RemoteQueryResult } from '../remote-queries/shared/remote-query-result';
|
||||
import { RawResultSet, ResultRow, ResultSetSchema, Column, ResolvableLocationValue } from './bqrs-cli-types';
|
||||
|
||||
/**
|
||||
@@ -8,15 +10,17 @@ import { RawResultSet, ResultRow, ResultSetSchema, Column, ResolvableLocationVal
|
||||
|
||||
export const SELECT_TABLE_NAME = '#select';
|
||||
export const ALERTS_TABLE_NAME = 'alerts';
|
||||
export const GRAPH_TABLE_NAME = 'graph';
|
||||
|
||||
export type RawTableResultSet = { t: 'RawResultSet' } & RawResultSet;
|
||||
export type PathTableResultSet = {
|
||||
t: 'SarifResultSet';
|
||||
export type InterpretedResultSet<T> = {
|
||||
t: 'InterpretedResultSet';
|
||||
readonly schema: ResultSetSchema;
|
||||
name: string;
|
||||
} & Interpretation;
|
||||
interpretation: InterpretationT<T>;
|
||||
};
|
||||
|
||||
export type ResultSet = RawTableResultSet | PathTableResultSet;
|
||||
export type ResultSet = RawTableResultSet | InterpretedResultSet<InterpretationData>;
|
||||
|
||||
/**
|
||||
* Only ever show this many rows in a raw result table.
|
||||
@@ -44,18 +48,31 @@ export interface PreviousExecution {
|
||||
durationSeconds: number;
|
||||
}
|
||||
|
||||
export interface Interpretation {
|
||||
sourceLocationPrefix: string;
|
||||
numTruncatedResults: number;
|
||||
numTotalResults: number;
|
||||
export type SarifInterpretationData = {
|
||||
t: 'SarifInterpretationData';
|
||||
/**
|
||||
* sortState being undefined means don't sort, just present results in the order
|
||||
* they appear in the sarif file.
|
||||
*/
|
||||
sortState?: InterpretedResultsSortState;
|
||||
sarif: sarif.Log;
|
||||
} & sarif.Log;
|
||||
|
||||
export type GraphInterpretationData = {
|
||||
t: 'GraphInterpretationData';
|
||||
dot: string[];
|
||||
};
|
||||
|
||||
export type InterpretationData = SarifInterpretationData | GraphInterpretationData;
|
||||
|
||||
export interface InterpretationT<T> {
|
||||
sourceLocationPrefix: string;
|
||||
numTruncatedResults: number;
|
||||
numTotalResults: number;
|
||||
data: T;
|
||||
}
|
||||
|
||||
export type Interpretation = InterpretationT<InterpretationData>;
|
||||
|
||||
export interface ResultsPaths {
|
||||
resultsPath: string;
|
||||
interpretedResultsPath: string;
|
||||
@@ -180,6 +197,11 @@ export interface OpenFileMsg {
|
||||
filePath: string;
|
||||
}
|
||||
|
||||
export interface OpenVirtualFileMsg {
|
||||
t: 'openVirtualFile';
|
||||
queryText: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Message from the results view to toggle the display of
|
||||
* query diagnostics.
|
||||
@@ -309,7 +331,7 @@ export interface SetComparisonsMessage {
|
||||
readonly currentResultSetName: string;
|
||||
readonly rows: QueryCompareResult | undefined;
|
||||
readonly message: string | undefined;
|
||||
readonly datebaseUri: string;
|
||||
readonly databaseUri: string;
|
||||
}
|
||||
|
||||
export enum DiffKind {
|
||||
@@ -350,8 +372,9 @@ export function getDefaultResultSetName(
|
||||
// Choose first available result set from the array
|
||||
return [
|
||||
ALERTS_TABLE_NAME,
|
||||
GRAPH_TABLE_NAME,
|
||||
SELECT_TABLE_NAME,
|
||||
resultSetNames[0],
|
||||
resultSetNames[0]
|
||||
].filter((resultSetName) => resultSetNames.includes(resultSetName))[0];
|
||||
}
|
||||
|
||||
@@ -364,3 +387,55 @@ export interface ParsedResultSets {
|
||||
resultSetNames: string[];
|
||||
resultSet: ResultSet;
|
||||
}
|
||||
|
||||
export type FromRemoteQueriesMessage =
|
||||
| RemoteQueryLoadedMessage
|
||||
| RemoteQueryErrorMessage
|
||||
| OpenFileMsg
|
||||
| OpenVirtualFileMsg
|
||||
| RemoteQueryDownloadAnalysisResultsMessage
|
||||
| RemoteQueryDownloadAllAnalysesResultsMessage
|
||||
| RemoteQueryExportResultsMessage
|
||||
| CopyRepoListMessage;
|
||||
|
||||
export type ToRemoteQueriesMessage =
|
||||
| SetRemoteQueryResultMessage
|
||||
| SetAnalysesResultsMessage;
|
||||
|
||||
export interface RemoteQueryLoadedMessage {
|
||||
t: 'remoteQueryLoaded';
|
||||
}
|
||||
|
||||
export interface SetRemoteQueryResultMessage {
|
||||
t: 'setRemoteQueryResult';
|
||||
queryResult: RemoteQueryResult
|
||||
}
|
||||
|
||||
export interface SetAnalysesResultsMessage {
|
||||
t: 'setAnalysesResults';
|
||||
analysesResults: AnalysisResults[];
|
||||
}
|
||||
|
||||
export interface RemoteQueryErrorMessage {
|
||||
t: 'remoteQueryError';
|
||||
error: string;
|
||||
}
|
||||
|
||||
export interface RemoteQueryDownloadAnalysisResultsMessage {
|
||||
t: 'remoteQueryDownloadAnalysisResults';
|
||||
analysisSummary: AnalysisSummary
|
||||
}
|
||||
|
||||
export interface RemoteQueryDownloadAllAnalysesResultsMessage {
|
||||
t: 'remoteQueryDownloadAllAnalysesResults';
|
||||
analysisSummaries: AnalysisSummary[];
|
||||
}
|
||||
|
||||
export interface RemoteQueryExportResultsMessage {
|
||||
t: 'remoteQueryExportResults';
|
||||
}
|
||||
|
||||
export interface CopyRepoListMessage {
|
||||
t: 'copyRepoList';
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
15
extensions/ql-vscode/src/pure/location-link-utils.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { FileLink } from '../remote-queries/shared/analysis-result';
|
||||
|
||||
export function createRemoteFileRef(
|
||||
fileLink: FileLink,
|
||||
startLine?: number,
|
||||
endLine?: number
|
||||
): string {
|
||||
if (startLine && endLine) {
|
||||
return `${fileLink.fileLinkPrefix}/${fileLink.filePath}#L${startLine}-L${endLine}`;
|
||||
} else if (startLine) {
|
||||
return `${fileLink.fileLinkPrefix}/${fileLink.filePath}#L${startLine}`;
|
||||
} else {
|
||||
return `${fileLink.fileLinkPrefix}/${fileLink.filePath}`;
|
||||
}
|
||||
}
|
||||
@@ -262,7 +262,7 @@ export interface CompilationTarget {
|
||||
/**
|
||||
* Compile as a normal query
|
||||
*/
|
||||
query?: {};
|
||||
query?: Record<string, never>;
|
||||
/**
|
||||
* Compile as a quick evaluation
|
||||
*/
|
||||
@@ -646,6 +646,35 @@ export interface ClearCacheParams {
|
||||
*/
|
||||
dryRun: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters to start a new structured log
|
||||
*/
|
||||
export interface StartLogParams {
|
||||
/**
|
||||
* The dataset for which we want to start a new structured log
|
||||
*/
|
||||
db: Dataset;
|
||||
/**
|
||||
* The path where we want to place the new structured log
|
||||
*/
|
||||
logPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters to terminate a structured log
|
||||
*/
|
||||
export interface EndLogParams {
|
||||
/**
|
||||
* The dataset for which we want to terminated the log
|
||||
*/
|
||||
db: Dataset;
|
||||
/**
|
||||
* The path of the log to terminate, will be a no-op if we aren't logging here
|
||||
*/
|
||||
logPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for trimming the cache of a dataset
|
||||
*/
|
||||
@@ -682,6 +711,26 @@ export interface ClearCacheResult {
|
||||
deletionMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of starting a new structured log.
|
||||
*/
|
||||
export interface StartLogResult {
|
||||
/**
|
||||
* A user friendly message saying what happened.
|
||||
*/
|
||||
outcomeMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of terminating a structured log.
|
||||
*/
|
||||
export interface EndLogResult {
|
||||
/**
|
||||
* A user friendly message saying what happened.
|
||||
*/
|
||||
outcomeMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for running a set of queries
|
||||
*/
|
||||
@@ -711,6 +760,11 @@ export interface EvaluateQueriesParams {
|
||||
|
||||
export type TemplateDefinitions = { [key: string]: TemplateSource }
|
||||
|
||||
export interface MlModel {
|
||||
/** A URI pointing to the root directory of the model. */
|
||||
uri: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A single query that should be run
|
||||
*/
|
||||
@@ -744,6 +798,11 @@ export interface QueryToRun {
|
||||
* map should be set to the empty set or give an error.
|
||||
*/
|
||||
allowUnknownTemplates: boolean;
|
||||
/**
|
||||
* The list of ML models that should be made available
|
||||
* when evaluating the query.
|
||||
*/
|
||||
availableMlModels?: MlModel[];
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -826,7 +885,7 @@ export interface ResultSet {
|
||||
/**
|
||||
* The type returned when the evaluation is complete
|
||||
*/
|
||||
export type EvaluationComplete = {};
|
||||
export type EvaluationComplete = Record<string, never>;
|
||||
|
||||
/**
|
||||
* The result of a single query
|
||||
@@ -1008,6 +1067,16 @@ export const compileUpgrade = new rpc.RequestType<WithProgressId<CompileUpgradeP
|
||||
*/
|
||||
export const compileUpgradeSequence = new rpc.RequestType<WithProgressId<CompileUpgradeSequenceParams>, CompileUpgradeSequenceResult, void, void>('compilation/compileUpgradeSequence');
|
||||
|
||||
/**
|
||||
* Start a new structured log in the evaluator, terminating the previous one if it exists
|
||||
*/
|
||||
export const startLog = new rpc.RequestType<WithProgressId<StartLogParams>, StartLogResult, void, void>('evaluation/startLog');
|
||||
|
||||
/**
|
||||
* Terminate a structured log in the evaluator. Is a no-op if we aren't logging to the given location
|
||||
*/
|
||||
export const endLog = new rpc.RequestType<WithProgressId<EndLogParams>, EndLogResult, void, void>('evaluation/endLog');
|
||||
|
||||
/**
|
||||
* Clear the cache of a dataset
|
||||
*/
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import * as Sarif from 'sarif';
|
||||
import { HighlightedRegion } from '../remote-queries/shared/analysis-result';
|
||||
import { ResolvableLocationValue } from './bqrs-cli-types';
|
||||
|
||||
export interface SarifLink {
|
||||
@@ -127,35 +128,111 @@ export function parseSarifLocation(
|
||||
userVisibleFile
|
||||
} as ParsedSarifLocation;
|
||||
} else {
|
||||
const region = physicalLocation.region;
|
||||
// We assume that the SARIF we're given always has startLine
|
||||
// This is not mandated by the SARIF spec, but should be true of
|
||||
// SARIF output by our own tools.
|
||||
const startLine = region.startLine!;
|
||||
|
||||
// These defaults are from SARIF 2.1.0 spec, section 3.30.2, "Text Regions"
|
||||
// https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html#_Ref493492556
|
||||
const endLine = region.endLine === undefined ? startLine : region.endLine;
|
||||
const startColumn = region.startColumn === undefined ? 1 : region.startColumn;
|
||||
|
||||
// We also assume that our tools will always supply `endColumn` field, which is
|
||||
// fortunate, since the SARIF spec says that it defaults to the end of the line, whose
|
||||
// length we don't know at this point in the code.
|
||||
//
|
||||
// It is off by one with respect to the way vscode counts columns in selections.
|
||||
const endColumn = region.endColumn! - 1;
|
||||
const region = parseSarifRegion(physicalLocation.region);
|
||||
|
||||
return {
|
||||
uri: effectiveLocation,
|
||||
userVisibleFile,
|
||||
startLine,
|
||||
startColumn,
|
||||
endLine,
|
||||
endColumn,
|
||||
...region
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function parseSarifRegion(
|
||||
region: Sarif.Region
|
||||
): {
|
||||
startLine: number,
|
||||
endLine: number,
|
||||
startColumn: number,
|
||||
endColumn: number
|
||||
} {
|
||||
// The SARIF we're given should have a startLine, but we
|
||||
// fall back to 1, just in case something has gone wrong.
|
||||
const startLine = region.startLine ?? 1;
|
||||
|
||||
// These defaults are from SARIF 2.1.0 spec, section 3.30.2, "Text Regions"
|
||||
// https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html#_Ref493492556
|
||||
const endLine = region.endLine === undefined ? startLine : region.endLine;
|
||||
const startColumn = region.startColumn === undefined ? 1 : region.startColumn;
|
||||
|
||||
// Our tools should always supply `endColumn` field, which is fortunate, since
|
||||
// the SARIF spec says that it defaults to the end of the line, whose
|
||||
// length we don't know at this point in the code. We fall back to 1,
|
||||
// just in case something has gone wrong.
|
||||
//
|
||||
// It is off by one with respect to the way vscode counts columns in selections.
|
||||
const endColumn = (region.endColumn ?? 1) - 1;
|
||||
|
||||
return {
|
||||
startLine,
|
||||
startColumn,
|
||||
endLine,
|
||||
endColumn
|
||||
};
|
||||
}
|
||||
|
||||
export function isNoLocation(loc: ParsedSarifLocation): loc is NoLocation {
|
||||
return 'hint' in loc;
|
||||
}
|
||||
|
||||
// Some helpers for highlighting specific regions from a SARIF code snippet
|
||||
|
||||
/**
|
||||
* Checks whether a particular line (determined by its line number in the original file)
|
||||
* is part of the highlighted region of a SARIF code snippet.
|
||||
*/
|
||||
export function shouldHighlightLine(
|
||||
lineNumber: number,
|
||||
highlightedRegion: HighlightedRegion
|
||||
): boolean {
|
||||
if (lineNumber < highlightedRegion.startLine) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (highlightedRegion.endLine == undefined) {
|
||||
return lineNumber == highlightedRegion.startLine;
|
||||
}
|
||||
|
||||
return lineNumber <= highlightedRegion.endLine;
|
||||
}
|
||||
|
||||
/**
|
||||
* A line of code split into: plain text before the highlighted section, the highlighted
|
||||
* text itself, and plain text after the highlighted section.
|
||||
*/
|
||||
export interface PartiallyHighlightedLine {
|
||||
plainSection1: string;
|
||||
highlightedSection: string;
|
||||
plainSection2: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits a line of code into the highlighted and non-highlighted sections.
|
||||
*/
|
||||
export function parseHighlightedLine(
|
||||
line: string,
|
||||
lineNumber: number,
|
||||
highlightedRegion: HighlightedRegion
|
||||
): PartiallyHighlightedLine {
|
||||
const isSingleLineHighlight = highlightedRegion.endLine === undefined;
|
||||
const isFirstHighlightedLine = lineNumber === highlightedRegion.startLine;
|
||||
const isLastHighlightedLine = lineNumber === highlightedRegion.endLine;
|
||||
|
||||
const highlightStartColumn = isSingleLineHighlight
|
||||
? highlightedRegion.startColumn
|
||||
: isFirstHighlightedLine
|
||||
? highlightedRegion.startColumn
|
||||
: 0;
|
||||
|
||||
const highlightEndColumn = isSingleLineHighlight
|
||||
? highlightedRegion.endColumn
|
||||
: isLastHighlightedLine
|
||||
? highlightedRegion.endColumn
|
||||
: line.length + 1;
|
||||
|
||||
const plainSection1 = line.substring(0, highlightStartColumn - 1);
|
||||
const highlightedSection = line.substring(highlightStartColumn - 1, highlightEndColumn - 1);
|
||||
const plainSection2 = line.substring(highlightEndColumn - 1, line.length);
|
||||
|
||||
return { plainSection1, highlightedSection, plainSection2 };
|
||||
}
|
||||
|
||||
85
extensions/ql-vscode/src/pure/time.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
/*
|
||||
* Contains an assortment of helper constants and functions for working with time, dates, and durations.
|
||||
*/
|
||||
|
||||
export const ONE_MINUTE_IN_MS = 1000 * 60;
|
||||
export const ONE_HOUR_IN_MS = ONE_MINUTE_IN_MS * 60;
|
||||
export const TWO_HOURS_IN_MS = ONE_HOUR_IN_MS * 2;
|
||||
export const THREE_HOURS_IN_MS = ONE_HOUR_IN_MS * 3;
|
||||
export const ONE_DAY_IN_MS = ONE_HOUR_IN_MS * 24;
|
||||
|
||||
// These are approximations
|
||||
export const ONE_MONTH_IN_MS = ONE_DAY_IN_MS * 30;
|
||||
export const ONE_YEAR_IN_MS = ONE_DAY_IN_MS * 365;
|
||||
|
||||
const durationFormatter = new Intl.RelativeTimeFormat('en', {
|
||||
numeric: 'auto',
|
||||
});
|
||||
|
||||
/**
|
||||
* Converts a number of milliseconds into a human-readable string with units, indicating a relative time in the past or future.
|
||||
*
|
||||
* @param relativeTimeMillis The duration in milliseconds. A negative number indicates a duration in the past. And a positive number is
|
||||
* the future.
|
||||
* @returns A humanized duration. For example, "in 2 minutes", "2 minutes ago", "yesterday", or "tomorrow".
|
||||
*/
|
||||
export function humanizeRelativeTime(relativeTimeMillis?: number) {
|
||||
if (relativeTimeMillis === undefined) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (Math.abs(relativeTimeMillis) < ONE_HOUR_IN_MS) {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_MINUTE_IN_MS), 'minute');
|
||||
} else if (Math.abs(relativeTimeMillis) < ONE_DAY_IN_MS) {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_HOUR_IN_MS), 'hour');
|
||||
} else if (Math.abs(relativeTimeMillis) < ONE_MONTH_IN_MS) {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_DAY_IN_MS), 'day');
|
||||
} else if (Math.abs(relativeTimeMillis) < ONE_YEAR_IN_MS) {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_MONTH_IN_MS), 'month');
|
||||
} else {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_YEAR_IN_MS), 'year');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a number of milliseconds into a human-readable string with units, indicating an amount of time.
|
||||
* Negative numbers have no meaning and are considered to be "Less than a minute".
|
||||
*
|
||||
* @param millis The number of milliseconds to convert.
|
||||
* @returns A humanized duration. For example, "2 minutes", "2 hours", "2 days", or "2 months".
|
||||
*/
|
||||
export function humanizeUnit(millis?: number): string {
|
||||
// assume a blank or empty string is a zero
|
||||
// assume anything less than 0 is a zero
|
||||
if (!millis || millis < ONE_MINUTE_IN_MS) {
|
||||
return 'Less than a minute';
|
||||
}
|
||||
let unit: string;
|
||||
let unitDiff: number;
|
||||
if (millis < ONE_HOUR_IN_MS) {
|
||||
unit = 'minute';
|
||||
unitDiff = Math.floor(millis / ONE_MINUTE_IN_MS);
|
||||
} else if (millis < ONE_DAY_IN_MS) {
|
||||
unit = 'hour';
|
||||
unitDiff = Math.floor(millis / ONE_HOUR_IN_MS);
|
||||
} else if (millis < ONE_MONTH_IN_MS) {
|
||||
unit = 'day';
|
||||
unitDiff = Math.floor(millis / ONE_DAY_IN_MS);
|
||||
} else if (millis < ONE_YEAR_IN_MS) {
|
||||
unit = 'month';
|
||||
unitDiff = Math.floor(millis / ONE_MONTH_IN_MS);
|
||||
} else {
|
||||
unit = 'year';
|
||||
unitDiff = Math.floor(millis / ONE_YEAR_IN_MS);
|
||||
}
|
||||
|
||||
return createFormatter(unit).format(unitDiff);
|
||||
}
|
||||
|
||||
function createFormatter(unit: string) {
|
||||
return Intl.NumberFormat('en-US', {
|
||||
style: 'unit',
|
||||
unit,
|
||||
unitDisplay: 'long'
|
||||
});
|
||||
}
|
||||
@@ -167,10 +167,12 @@ export class QLTestDiscovery extends Discovery<QLTestDiscoveryResults> {
|
||||
protected update(results: QLTestDiscoveryResults): void {
|
||||
this._testDirectory = results.testDirectory;
|
||||
|
||||
// Watch for changes to any `.ql` or `.qlref` file in any of the QL packs that contain tests.
|
||||
this.watcher.clear();
|
||||
// Watch for changes to any `.ql` or `.qlref` file in any of the QL packs that contain tests.
|
||||
this.watcher.addWatch(new RelativePattern(results.watchPath, '**/*.{ql,qlref}'));
|
||||
this._onDidChangeTests.fire();
|
||||
// need to explicitly watch for changes to directories themselves.
|
||||
this.watcher.addWatch(new RelativePattern(results.watchPath, '**/'));
|
||||
this._onDidChangeTests.fire(undefined);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
135
extensions/ql-vscode/src/query-history-scrubber.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import { Disposable, ExtensionContext } from 'vscode';
|
||||
import { logger } from './logging';
|
||||
|
||||
const LAST_SCRUB_TIME_KEY = 'lastScrubTime';
|
||||
|
||||
type Counter = {
|
||||
increment: () => void;
|
||||
};
|
||||
|
||||
/**
|
||||
* Registers an interval timer that will periodically check for queries old enought
|
||||
* to be deleted.
|
||||
*
|
||||
* Note that this scrubber will clean all queries from all workspaces. It should not
|
||||
* run too often and it should only run from one workspace at a time.
|
||||
*
|
||||
* Generally, `wakeInterval` should be significantly shorter than `throttleTime`.
|
||||
*
|
||||
* @param wakeInterval How often to check to see if the job should run.
|
||||
* @param throttleTime How often to actually run the job.
|
||||
* @param maxQueryTime The maximum age of a query before is ready for deletion.
|
||||
* @param queryDirectory The directory containing all queries.
|
||||
* @param ctx The extension context.
|
||||
*/
|
||||
export function registerQueryHistoryScubber(
|
||||
wakeInterval: number,
|
||||
throttleTime: number,
|
||||
maxQueryTime: number,
|
||||
queryDirectory: string,
|
||||
ctx: ExtensionContext,
|
||||
|
||||
// optional counter to keep track of how many times the scrubber has run
|
||||
counter?: Counter
|
||||
): Disposable {
|
||||
const deregister = setInterval(scrubQueries, wakeInterval, throttleTime, maxQueryTime, queryDirectory, ctx, counter);
|
||||
|
||||
return {
|
||||
dispose: () => {
|
||||
clearInterval(deregister);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function scrubQueries(
|
||||
throttleTime: number,
|
||||
maxQueryTime: number,
|
||||
queryDirectory: string,
|
||||
ctx: ExtensionContext,
|
||||
counter?: Counter
|
||||
) {
|
||||
const lastScrubTime = ctx.globalState.get<number>(LAST_SCRUB_TIME_KEY);
|
||||
const now = Date.now();
|
||||
|
||||
// If we have never scrubbed before, or if the last scrub was more than `throttleTime` ago,
|
||||
// then scrub again.
|
||||
if (lastScrubTime === undefined || now - lastScrubTime >= throttleTime) {
|
||||
await ctx.globalState.update(LAST_SCRUB_TIME_KEY, now);
|
||||
|
||||
let scrubCount = 0; // total number of directories deleted
|
||||
try {
|
||||
counter?.increment();
|
||||
void logger.log('Scrubbing query directory. Removing old queries.');
|
||||
if (!(await fs.pathExists(queryDirectory))) {
|
||||
void logger.log(`Cannot scrub. Query directory does not exist: ${queryDirectory}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const baseNames = await fs.readdir(queryDirectory);
|
||||
const errors: string[] = [];
|
||||
for (const baseName of baseNames) {
|
||||
const dir = path.join(queryDirectory, baseName);
|
||||
const scrubResult = await scrubDirectory(dir, now, maxQueryTime);
|
||||
if (scrubResult.errorMsg) {
|
||||
errors.push(scrubResult.errorMsg);
|
||||
}
|
||||
if (scrubResult.deleted) {
|
||||
scrubCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
throw new Error(os.EOL + errors.join(os.EOL));
|
||||
}
|
||||
} catch (e) {
|
||||
void logger.log(`Error while scrubbing queries: ${e}`);
|
||||
} finally {
|
||||
void logger.log(`Scrubbed ${scrubCount} old queries.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function scrubDirectory(dir: string, now: number, maxQueryTime: number): Promise<{
|
||||
errorMsg?: string,
|
||||
deleted: boolean
|
||||
}> {
|
||||
const timestampFile = path.join(dir, 'timestamp');
|
||||
try {
|
||||
let deleted = true;
|
||||
if (!(await fs.stat(dir)).isDirectory()) {
|
||||
void logger.log(` ${dir} is not a directory. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else if (!(await fs.pathExists(timestampFile))) {
|
||||
void logger.log(` ${dir} has no timestamp file. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else if (!(await fs.stat(timestampFile)).isFile()) {
|
||||
void logger.log(` ${timestampFile} is not a file. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else {
|
||||
const timestampText = await fs.readFile(timestampFile, 'utf8');
|
||||
const timestamp = parseInt(timestampText, 10);
|
||||
|
||||
if (Number.isNaN(timestamp)) {
|
||||
void logger.log(` ${dir} has invalid timestamp '${timestampText}'. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else if (now - timestamp > maxQueryTime) {
|
||||
void logger.log(` ${dir} is older than ${maxQueryTime / 1000} seconds. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else {
|
||||
void logger.log(` ${dir} is not older than ${maxQueryTime / 1000} seconds. Keeping.`);
|
||||
deleted = false;
|
||||
}
|
||||
}
|
||||
return {
|
||||
deleted
|
||||
};
|
||||
} catch (err) {
|
||||
return {
|
||||
errorMsg: ` Could not delete '${dir}': ${err}`,
|
||||
deleted: false
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,28 +1,62 @@
|
||||
import { env } from 'vscode';
|
||||
import { CancellationTokenSource, env } from 'vscode';
|
||||
|
||||
import { QueryWithResults, tmpDir, QueryInfo } from './run-queries';
|
||||
import { QueryWithResults, QueryEvaluationInfo } from './run-queries';
|
||||
import * as messages from './pure/messages';
|
||||
import * as cli from './cli';
|
||||
import * as sarif from 'sarif';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import { RawResultsSortState, SortedResultSetInfo, DatabaseInfo, QueryMetadata, InterpretedResultsSortState, ResultsPaths } from './pure/interface-types';
|
||||
import { QueryHistoryConfig } from './config';
|
||||
import { QueryHistoryItemOptions } from './query-history';
|
||||
import {
|
||||
RawResultsSortState,
|
||||
SortedResultSetInfo,
|
||||
QueryMetadata,
|
||||
InterpretedResultsSortState,
|
||||
ResultsPaths,
|
||||
SarifInterpretationData,
|
||||
GraphInterpretationData
|
||||
} from './pure/interface-types';
|
||||
import { DatabaseInfo } from './pure/interface-types';
|
||||
import { QueryStatus } from './query-status';
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
|
||||
export class CompletedQuery implements QueryWithResults {
|
||||
readonly time: string;
|
||||
readonly query: QueryInfo;
|
||||
/**
|
||||
* query-results.ts
|
||||
* ----------------
|
||||
*
|
||||
* A collection of classes and functions that collectively
|
||||
* manage query results.
|
||||
*/
|
||||
|
||||
/**
|
||||
* A description of the information about a query
|
||||
* that is available before results are populated.
|
||||
*/
|
||||
export interface InitialQueryInfo {
|
||||
userSpecifiedLabel?: string; // if missing, use a default label
|
||||
readonly queryText: string; // text of the selected file, or the selected text when doing quick eval
|
||||
readonly isQuickQuery: boolean;
|
||||
readonly isQuickEval: boolean;
|
||||
readonly quickEvalPosition?: messages.Position;
|
||||
readonly queryPath: string;
|
||||
readonly databaseInfo: DatabaseInfo
|
||||
readonly start: Date;
|
||||
readonly id: string; // unique id for this query.
|
||||
}
|
||||
|
||||
export class CompletedQueryInfo implements QueryWithResults {
|
||||
readonly query: QueryEvaluationInfo;
|
||||
readonly result: messages.EvaluationResult;
|
||||
readonly database: DatabaseInfo;
|
||||
readonly logFileLocation?: string;
|
||||
options: QueryHistoryItemOptions;
|
||||
resultCount: number;
|
||||
|
||||
/**
|
||||
* This dispose method is called when the query is removed from the history view.
|
||||
*/
|
||||
dispose: () => void;
|
||||
|
||||
/**
|
||||
* Map from result set name to SortedResultSetInfo.
|
||||
*/
|
||||
sortedResultsInfo: Map<string, SortedResultSetInfo>;
|
||||
sortedResultsInfo: Record<string, SortedResultSetInfo>;
|
||||
|
||||
/**
|
||||
* How we're currently sorting alerts. This is not mere interface
|
||||
@@ -33,38 +67,40 @@ export class CompletedQuery implements QueryWithResults {
|
||||
*/
|
||||
interpretedResultsSortState: InterpretedResultsSortState | undefined;
|
||||
|
||||
/**
|
||||
* Note that in the {@link FullQueryInfo.slurp} method, we create a CompletedQueryInfo instance
|
||||
* by explicitly setting the prototype in order to avoid calling this constructor.
|
||||
*/
|
||||
constructor(
|
||||
evaluation: QueryWithResults,
|
||||
public config: QueryHistoryConfig,
|
||||
) {
|
||||
this.query = evaluation.query;
|
||||
this.result = evaluation.result;
|
||||
this.database = evaluation.database;
|
||||
this.logFileLocation = evaluation.logFileLocation;
|
||||
this.options = evaluation.options;
|
||||
|
||||
// Use the dispose method from the evaluation.
|
||||
// The dispose will clean up any additional log locations that this
|
||||
// query may have created.
|
||||
this.dispose = evaluation.dispose;
|
||||
|
||||
this.time = new Date().toLocaleString(env.language);
|
||||
this.sortedResultsInfo = new Map();
|
||||
this.sortedResultsInfo = {};
|
||||
this.resultCount = 0;
|
||||
}
|
||||
|
||||
get databaseName(): string {
|
||||
return this.database.name;
|
||||
}
|
||||
get queryName(): string {
|
||||
return getQueryName(this.query);
|
||||
setResultCount(value: number) {
|
||||
this.resultCount = value;
|
||||
}
|
||||
|
||||
get statusString(): string {
|
||||
switch (this.result.resultType) {
|
||||
case messages.QueryResultType.CANCELLATION:
|
||||
return `cancelled after ${this.result.evaluationTime / 1000} seconds`;
|
||||
return `cancelled after ${Math.round(this.result.evaluationTime / 1000)} seconds`;
|
||||
case messages.QueryResultType.OOM:
|
||||
return 'out of memory';
|
||||
case messages.QueryResultType.SUCCESS:
|
||||
return `finished in ${this.result.evaluationTime / 1000} seconds`;
|
||||
return `finished in ${Math.round(this.result.evaluationTime / 1000)} seconds`;
|
||||
case messages.QueryResultType.TIMEOUT:
|
||||
return `timed out after ${this.result.evaluationTime / 1000} seconds`;
|
||||
return `timed out after ${Math.round(this.result.evaluationTime / 1000)} seconds`;
|
||||
case messages.QueryResultType.OTHER_ERROR:
|
||||
default:
|
||||
return this.result.message ? `failed: ${this.result.message}` : 'failed';
|
||||
@@ -75,50 +111,26 @@ export class CompletedQuery implements QueryWithResults {
|
||||
if (!useSorted) {
|
||||
return this.query.resultsPaths.resultsPath;
|
||||
}
|
||||
return this.sortedResultsInfo.get(selectedTable)?.resultsPath
|
||||
return this.sortedResultsInfo[selectedTable]?.resultsPath
|
||||
|| this.query.resultsPaths.resultsPath;
|
||||
}
|
||||
|
||||
interpolate(template: string): string {
|
||||
const { databaseName, queryName, time, statusString } = this;
|
||||
const replacements: { [k: string]: string } = {
|
||||
t: time,
|
||||
q: queryName,
|
||||
d: databaseName,
|
||||
s: statusString,
|
||||
'%': '%',
|
||||
};
|
||||
return template.replace(/%(.)/g, (match, key) => {
|
||||
const replacement = replacements[key];
|
||||
return replacement !== undefined ? replacement : match;
|
||||
});
|
||||
}
|
||||
|
||||
getLabel(): string {
|
||||
return this.options?.label
|
||||
|| this.config.format;
|
||||
}
|
||||
|
||||
get didRunSuccessfully(): boolean {
|
||||
return this.result.resultType === messages.QueryResultType.SUCCESS;
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.interpolate(this.getLabel());
|
||||
}
|
||||
|
||||
async updateSortState(
|
||||
server: cli.CodeQLCliServer,
|
||||
resultSetName: string,
|
||||
sortState?: RawResultsSortState
|
||||
): Promise<void> {
|
||||
if (sortState === undefined) {
|
||||
this.sortedResultsInfo.delete(resultSetName);
|
||||
delete this.sortedResultsInfo[resultSetName];
|
||||
return;
|
||||
}
|
||||
|
||||
const sortedResultSetInfo: SortedResultSetInfo = {
|
||||
resultsPath: path.join(tmpDir.name, `sortedResults${this.query.queryID}-${resultSetName}.bqrs`),
|
||||
resultsPath: this.query.getSortedResultSetPath(resultSetName),
|
||||
sortState
|
||||
};
|
||||
|
||||
@@ -129,7 +141,7 @@ export class CompletedQuery implements QueryWithResults {
|
||||
[sortState.columnIndex],
|
||||
[sortState.sortDirection]
|
||||
);
|
||||
this.sortedResultsInfo.set(resultSetName, sortedResultSetInfo);
|
||||
this.sortedResultsInfo[resultSetName] = sortedResultSetInfo;
|
||||
}
|
||||
|
||||
async updateInterpretedSortState(sortState?: InterpretedResultsSortState): Promise<void> {
|
||||
@@ -139,48 +151,159 @@ export class CompletedQuery implements QueryWithResults {
|
||||
|
||||
|
||||
/**
|
||||
* Gets a human-readable name for an evaluated query.
|
||||
* Uses metadata if it exists, and defaults to the query file name.
|
||||
* Call cli command to interpret SARIF results.
|
||||
*/
|
||||
export function getQueryName(query: QueryInfo) {
|
||||
// Queries run through quick evaluation are not usually the entire query file.
|
||||
// Label them differently and include the line numbers.
|
||||
if (query.quickEvalPosition !== undefined) {
|
||||
const { line, endLine, fileName } = query.quickEvalPosition;
|
||||
const lineInfo = line === endLine ? `${line}` : `${line}-${endLine}`;
|
||||
return `Quick evaluation of ${path.basename(fileName)}:${lineInfo}`;
|
||||
} else if (query.metadata?.name) {
|
||||
return query.metadata.name;
|
||||
} else {
|
||||
return path.basename(query.program.queryPath);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Call cli command to interpret results.
|
||||
*/
|
||||
export async function interpretResults(
|
||||
server: cli.CodeQLCliServer,
|
||||
export async function interpretResultsSarif(
|
||||
cli: cli.CodeQLCliServer,
|
||||
metadata: QueryMetadata | undefined,
|
||||
resultsPaths: ResultsPaths,
|
||||
sourceInfo?: cli.SourceInfo
|
||||
): Promise<sarif.Log> {
|
||||
): Promise<SarifInterpretationData> {
|
||||
const { resultsPath, interpretedResultsPath } = resultsPaths;
|
||||
if (await fs.pathExists(interpretedResultsPath)) {
|
||||
return JSON.parse(await fs.readFile(interpretedResultsPath, 'utf8'));
|
||||
return { ...JSON.parse(await fs.readFile(interpretedResultsPath, 'utf8')), t: 'SarifInterpretationData' };
|
||||
}
|
||||
const res = await cli.interpretBqrsSarif(ensureMetadataIsComplete(metadata), resultsPath, interpretedResultsPath, sourceInfo);
|
||||
return { ...res, t: 'SarifInterpretationData' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Call cli command to interpret graph results.
|
||||
*/
|
||||
export async function interpretGraphResults(
|
||||
cli: cli.CodeQLCliServer,
|
||||
metadata: QueryMetadata | undefined,
|
||||
resultsPaths: ResultsPaths,
|
||||
sourceInfo?: cli.SourceInfo
|
||||
): Promise<GraphInterpretationData> {
|
||||
const { resultsPath, interpretedResultsPath } = resultsPaths;
|
||||
if (await fs.pathExists(interpretedResultsPath)) {
|
||||
const dot = await cli.readDotFiles(interpretedResultsPath);
|
||||
return { dot, t: 'GraphInterpretationData' };
|
||||
}
|
||||
|
||||
const dot = await cli.interpretBqrsGraph(ensureMetadataIsComplete(metadata), resultsPath, interpretedResultsPath, sourceInfo);
|
||||
return { dot, t: 'GraphInterpretationData' };
|
||||
}
|
||||
|
||||
export function ensureMetadataIsComplete(metadata: QueryMetadata | undefined) {
|
||||
if (metadata === undefined) {
|
||||
throw new Error('Can\'t interpret results without query metadata');
|
||||
}
|
||||
let { kind, id, scored } = metadata;
|
||||
if (kind === undefined) {
|
||||
if (metadata.kind === undefined) {
|
||||
throw new Error('Can\'t interpret results without query metadata including kind');
|
||||
}
|
||||
if (id === undefined) {
|
||||
if (metadata.id === undefined) {
|
||||
// Interpretation per se doesn't really require an id, but the
|
||||
// SARIF format does, so in the absence of one, we use a dummy id.
|
||||
id = 'dummy-id';
|
||||
metadata.id = 'dummy-id';
|
||||
}
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used in Interface and Compare-Interface for queries that we know have been complated.
|
||||
*/
|
||||
export type CompletedLocalQueryInfo = LocalQueryInfo & {
|
||||
completedQuery: CompletedQueryInfo
|
||||
};
|
||||
|
||||
export type QueryHistoryInfo = LocalQueryInfo | RemoteQueryHistoryItem;
|
||||
|
||||
export class LocalQueryInfo {
|
||||
readonly t = 'local';
|
||||
|
||||
public failureReason: string | undefined;
|
||||
public completedQuery: CompletedQueryInfo | undefined;
|
||||
public evalLogLocation: string | undefined;
|
||||
public evalLogSummaryLocation: string | undefined;
|
||||
|
||||
/**
|
||||
* Note that in the {@link slurpQueryHistory} method, we create a FullQueryInfo instance
|
||||
* by explicitly setting the prototype in order to avoid calling this constructor.
|
||||
*/
|
||||
constructor(
|
||||
public readonly initialInfo: InitialQueryInfo,
|
||||
private cancellationSource?: CancellationTokenSource // used to cancel in progress queries
|
||||
) { /**/ }
|
||||
|
||||
cancel() {
|
||||
this.cancellationSource?.cancel();
|
||||
// query is no longer in progress, can delete the cancellation token source
|
||||
this.cancellationSource?.dispose();
|
||||
delete this.cancellationSource;
|
||||
}
|
||||
|
||||
get startTime() {
|
||||
return this.initialInfo.start.toLocaleString(env.language);
|
||||
}
|
||||
|
||||
get userSpecifiedLabel() {
|
||||
return this.initialInfo.userSpecifiedLabel;
|
||||
}
|
||||
|
||||
set userSpecifiedLabel(label: string | undefined) {
|
||||
this.initialInfo.userSpecifiedLabel = label;
|
||||
}
|
||||
|
||||
/**
|
||||
* The query's file name, unless it is a quick eval.
|
||||
* Queries run through quick evaluation are not usually the entire query file.
|
||||
* Label them differently and include the line numbers.
|
||||
*/
|
||||
getQueryFileName() {
|
||||
if (this.initialInfo.quickEvalPosition) {
|
||||
const { line, endLine, fileName } = this.initialInfo.quickEvalPosition;
|
||||
const lineInfo = line === endLine ? `${line}` : `${line}-${endLine}`;
|
||||
return `${path.basename(fileName)}:${lineInfo}`;
|
||||
}
|
||||
return path.basename(this.initialInfo.queryPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Three cases:
|
||||
*
|
||||
* - If this is a completed query, use the query name from the query metadata.
|
||||
* - If this is a quick eval, return the query name with a prefix
|
||||
* - Otherwise, return the query file name.
|
||||
*/
|
||||
getQueryName() {
|
||||
if (this.initialInfo.quickEvalPosition) {
|
||||
return 'Quick evaluation of ' + this.getQueryFileName();
|
||||
} else if (this.completedQuery?.query.metadata?.name) {
|
||||
return this.completedQuery?.query.metadata?.name;
|
||||
} else {
|
||||
return this.getQueryFileName();
|
||||
}
|
||||
}
|
||||
|
||||
get completed(): boolean {
|
||||
return !!this.completedQuery;
|
||||
}
|
||||
|
||||
completeThisQuery(info: QueryWithResults) {
|
||||
this.completedQuery = new CompletedQueryInfo(info);
|
||||
|
||||
// dispose of the cancellation token source and also ensure the source is not serialized as JSON
|
||||
this.cancellationSource?.dispose();
|
||||
delete this.cancellationSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* If there is a failure reason, then this query has failed.
|
||||
* If there is no completed query, then this query is still running.
|
||||
* If there is a completed query, then check if didRunSuccessfully.
|
||||
* If true, then this query has completed successfully, otherwise it has failed.
|
||||
*/
|
||||
get status(): QueryStatus {
|
||||
if (this.failureReason) {
|
||||
return QueryStatus.Failed;
|
||||
} else if (!this.completedQuery) {
|
||||
return QueryStatus.InProgress;
|
||||
} else if (this.completedQuery.didRunSuccessfully) {
|
||||
return QueryStatus.Completed;
|
||||
} else {
|
||||
return QueryStatus.Failed;
|
||||
}
|
||||
}
|
||||
return await server.interpretBqrs({ kind, id, scored }, resultsPath, interpretedResultsPath, sourceInfo);
|
||||
}
|
||||
|
||||
94
extensions/ql-vscode/src/query-serialization.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
|
||||
import { showAndLogErrorMessage } from './helpers';
|
||||
import { asyncFilter, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { CompletedQueryInfo, LocalQueryInfo, QueryHistoryInfo } from './query-results';
|
||||
import { QueryEvaluationInfo } from './run-queries';
|
||||
|
||||
export async function slurpQueryHistory(fsPath: string): Promise<QueryHistoryInfo[]> {
|
||||
try {
|
||||
if (!(await fs.pathExists(fsPath))) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const data = await fs.readFile(fsPath, 'utf8');
|
||||
const obj = JSON.parse(data);
|
||||
if (obj.version !== 1) {
|
||||
void showAndLogErrorMessage(`Unsupported query history format: v${obj.version}. `);
|
||||
return [];
|
||||
}
|
||||
|
||||
const queries = obj.queries;
|
||||
const parsedQueries = queries.map((q: QueryHistoryInfo) => {
|
||||
|
||||
// Need to explicitly set prototype since reading in from JSON will not
|
||||
// do this automatically. Note that we can't call the constructor here since
|
||||
// the constructor invokes extra logic that we don't want to do.
|
||||
if (q.t === 'local') {
|
||||
Object.setPrototypeOf(q, LocalQueryInfo.prototype);
|
||||
|
||||
// Date instances are serialized as strings. Need to
|
||||
// convert them back to Date instances.
|
||||
(q.initialInfo as any).start = new Date(q.initialInfo.start);
|
||||
if (q.completedQuery) {
|
||||
// Again, need to explicitly set prototypes.
|
||||
Object.setPrototypeOf(q.completedQuery, CompletedQueryInfo.prototype);
|
||||
Object.setPrototypeOf(q.completedQuery.query, QueryEvaluationInfo.prototype);
|
||||
// slurped queries do not need to be disposed
|
||||
q.completedQuery.dispose = () => { /**/ };
|
||||
}
|
||||
} else if (q.t === 'remote') {
|
||||
// noop
|
||||
}
|
||||
return q;
|
||||
});
|
||||
|
||||
// filter out queries that have been deleted on disk
|
||||
// most likely another workspace has deleted them because the
|
||||
// queries aged out.
|
||||
return asyncFilter(parsedQueries, async (q) => {
|
||||
if (q.t === 'remote') {
|
||||
// the slurper doesn't know where the remote queries are stored
|
||||
// so we need to assume here that they exist. Later, we check to
|
||||
// see if they exist on disk.
|
||||
return true;
|
||||
}
|
||||
const resultsPath = q.completedQuery?.query.resultsPaths.resultsPath;
|
||||
return !!resultsPath && await fs.pathExists(resultsPath);
|
||||
});
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage('Error loading query history.', {
|
||||
fullMessage: ['Error loading query history.', getErrorStack(e)].join('\n'),
|
||||
});
|
||||
// since the query history is invalid, it should be deleted so this error does not happen on next startup.
|
||||
await fs.remove(fsPath);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save the query history to disk. It is not necessary that the parent directory
|
||||
* exists, but if it does, it must be writable. An existing file will be overwritten.
|
||||
*
|
||||
* Any errors will be rethrown.
|
||||
*
|
||||
* @param queries the list of queries to save.
|
||||
* @param fsPath the path to save the queries to.
|
||||
*/
|
||||
export async function splatQueryHistory(queries: QueryHistoryInfo[], fsPath: string): Promise<void> {
|
||||
try {
|
||||
if (!(await fs.pathExists(fsPath))) {
|
||||
await fs.mkdir(path.dirname(fsPath), { recursive: true });
|
||||
}
|
||||
// remove incomplete local queries since they cannot be recreated on restart
|
||||
const filteredQueries = queries.filter(q => q.t === 'local' ? q.completedQuery !== undefined : true);
|
||||
const data = JSON.stringify({
|
||||
version: 1,
|
||||
queries: filteredQueries
|
||||
}, null, 2);
|
||||
await fs.writeFile(fsPath, data);
|
||||
} catch (e) {
|
||||
throw new Error(`Error saving query history to ${fsPath}: ${getErrorMessage(e)}`);
|
||||
}
|
||||
}
|
||||
5
extensions/ql-vscode/src/query-status.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export enum QueryStatus {
|
||||
InProgress = 'InProgress',
|
||||
Completed = 'Completed',
|
||||
Failed = 'Failed',
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
import * as cp from 'child_process';
|
||||
import * as path from 'path';
|
||||
import { DisposableObject } from './vscode-utils/disposable-object';
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { Disposable, CancellationToken, commands } from 'vscode';
|
||||
import { createMessageConnection, MessageConnection, RequestType } from 'vscode-jsonrpc';
|
||||
import * as cli from './cli';
|
||||
@@ -8,11 +10,11 @@ import { QueryServerConfig } from './config';
|
||||
import { Logger, ProgressReporter } from './logging';
|
||||
import { completeQuery, EvaluationResult, progress, ProgressMessage, WithProgressId } from './pure/messages';
|
||||
import * as messages from './pure/messages';
|
||||
import { SemVer } from 'semver';
|
||||
import { ProgressCallback, ProgressTask } from './commandRunner';
|
||||
|
||||
type ServerOpts = {
|
||||
logger: Logger;
|
||||
contextStoragePath: string;
|
||||
}
|
||||
|
||||
/** A running query server process and its associated message connection. */
|
||||
@@ -28,7 +30,7 @@ class ServerProcess implements Disposable {
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
this.logger.log('Stopping query server...');
|
||||
void this.logger.log('Stopping query server...');
|
||||
this.connection.dispose();
|
||||
this.child.stdin!.end();
|
||||
this.child.stderr!.destroy();
|
||||
@@ -36,7 +38,7 @@ class ServerProcess implements Disposable {
|
||||
|
||||
// On Windows, we usually have to terminate the process before closing its stdout.
|
||||
this.child.stdout!.destroy();
|
||||
this.logger.log('Stopped query server.');
|
||||
void this.logger.log('Stopped query server.');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,11 +52,6 @@ type WithProgressReporting = (task: (progress: ProgressReporter, token: Cancella
|
||||
*/
|
||||
export class QueryServerClient extends DisposableObject {
|
||||
|
||||
/**
|
||||
* Query Server version where database registration was introduced
|
||||
*/
|
||||
private static VERSION_WITH_DB_REGISTRATION = new SemVer('2.4.1');
|
||||
|
||||
serverProcess?: ServerProcess;
|
||||
evaluationResultCallbacks: { [key: number]: (res: EvaluationResult) => void };
|
||||
progressCallbacks: { [key: number]: ((res: ProgressMessage) => void) | undefined };
|
||||
@@ -71,7 +68,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
this.queryServerStartListeners.push(e);
|
||||
}
|
||||
|
||||
public activeQueryName: string | undefined;
|
||||
public activeQueryLogFile: string | undefined;
|
||||
|
||||
constructor(
|
||||
readonly config: QueryServerConfig,
|
||||
@@ -101,7 +98,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
if (this.serverProcess !== undefined) {
|
||||
this.disposeAndStopTracking(this.serverProcess);
|
||||
} else {
|
||||
this.logger.log('No server process to be stopped.');
|
||||
void this.logger.log('No server process to be stopped.');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,16 +133,42 @@ export class QueryServerClient extends DisposableObject {
|
||||
const ramArgs = await this.cliServer.resolveRam(this.config.queryMemoryMb, progressReporter);
|
||||
const args = ['--threads', this.config.numThreads.toString()].concat(ramArgs);
|
||||
|
||||
if (await this.supportsDatabaseRegistration()) {
|
||||
if (this.config.saveCache) {
|
||||
args.push('--save-cache');
|
||||
}
|
||||
|
||||
if (this.config.cacheSize > 0) {
|
||||
args.push('--max-disk-cache');
|
||||
args.push(this.config.cacheSize.toString());
|
||||
}
|
||||
|
||||
if (await this.cliServer.cliConstraints.supportsDatabaseRegistration()) {
|
||||
args.push('--require-db-registration');
|
||||
}
|
||||
|
||||
if (await this.cliServer.cliConstraints.supportsOldEvalStats() && !(await this.cliServer.cliConstraints.supportsPerQueryEvalLog())) {
|
||||
args.push('--old-eval-stats');
|
||||
}
|
||||
|
||||
if (await this.cliServer.cliConstraints.supportsStructuredEvalLog()) {
|
||||
const structuredLogFile = `${this.opts.contextStoragePath}/structured-evaluator-log.json`;
|
||||
await fs.ensureFile(structuredLogFile);
|
||||
|
||||
args.push('--evaluator-log');
|
||||
args.push(structuredLogFile);
|
||||
|
||||
// We hard-code the verbosity level to 5 and minify to false.
|
||||
// This will be the behavior of the per-query structured logging in the CLI after 2.8.3.
|
||||
args.push('--evaluator-log-level');
|
||||
args.push('5');
|
||||
}
|
||||
|
||||
if (this.config.debug) {
|
||||
args.push('--debug', '--tuple-counting');
|
||||
}
|
||||
|
||||
if (cli.shouldDebugQueryServer()) {
|
||||
args.push('-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9010,server=y,suspend=n,quiet=y');
|
||||
args.push('-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9010,server=n,suspend=y,quiet=y');
|
||||
}
|
||||
|
||||
const child = cli.spawnServer(
|
||||
@@ -156,7 +179,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
this.logger,
|
||||
data => this.logger.log(data.toString(), {
|
||||
trailingNewline: false,
|
||||
additionalLogLocation: this.activeQueryName
|
||||
additionalLogLocation: this.activeQueryLogFile
|
||||
}),
|
||||
undefined, // no listener for stdout
|
||||
progressReporter
|
||||
@@ -165,13 +188,8 @@ export class QueryServerClient extends DisposableObject {
|
||||
const connection = createMessageConnection(child.stdout, child.stdin);
|
||||
connection.onRequest(completeQuery, res => {
|
||||
if (!(res.runId in this.evaluationResultCallbacks)) {
|
||||
this.logger.log(`No callback associated with run id ${res.runId}, continuing without executing any callback`);
|
||||
}
|
||||
else {
|
||||
const baseLocation = this.logger.getBaseLocation();
|
||||
if (baseLocation && this.activeQueryName) {
|
||||
res.logFileLocation = path.join(baseLocation, this.activeQueryName);
|
||||
}
|
||||
void this.logger.log(`No callback associated with run id ${res.runId}, continuing without executing any callback`);
|
||||
} else {
|
||||
this.evaluationResultCallbacks[res.runId](res);
|
||||
}
|
||||
return {};
|
||||
@@ -182,7 +200,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
callback(res);
|
||||
}
|
||||
});
|
||||
this.serverProcess = new ServerProcess(child, connection, this.opts.logger);
|
||||
this.serverProcess = new ServerProcess(child, connection, this.logger);
|
||||
// Ensure the server process is disposed together with this client.
|
||||
this.track(this.serverProcess);
|
||||
connection.listen();
|
||||
@@ -193,10 +211,6 @@ export class QueryServerClient extends DisposableObject {
|
||||
this.evaluationResultCallbacks = {};
|
||||
}
|
||||
|
||||
async supportsDatabaseRegistration() {
|
||||
return (await this.cliServer.getVersion()).compare(QueryServerClient.VERSION_WITH_DB_REGISTRATION) >= 0;
|
||||
}
|
||||
|
||||
registerCallback(callback: (res: EvaluationResult) => void): number {
|
||||
const id = this.nextCallback++;
|
||||
this.evaluationResultCallbacks[id] = callback;
|
||||
@@ -208,7 +222,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
}
|
||||
|
||||
get serverProcessPid(): number {
|
||||
return this.serverProcess!.child.pid;
|
||||
return this.serverProcess!.child.pid || 0;
|
||||
}
|
||||
|
||||
async sendRequest<P, R, E, RO>(type: RequestType<WithProgressId<P>, R, E, RO>, parameter: P, token?: CancellationToken, progress?: (res: ProgressMessage) => void): Promise<R> {
|
||||
@@ -236,8 +250,23 @@ export class QueryServerClient extends DisposableObject {
|
||||
*/
|
||||
private updateActiveQuery(method: string, parameter: any): void {
|
||||
if (method === messages.compileQuery.method) {
|
||||
const queryPath = parameter?.queryToCheck?.queryPath || 'unknown';
|
||||
this.activeQueryName = `query-${path.basename(queryPath)}-${this.nextProgress}.log`;
|
||||
this.activeQueryLogFile = findQueryLogFile(path.dirname(parameter.resultPath));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function findQueryLogFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'query.log');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.jsonl');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.summary');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogEndSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log-end.summary');
|
||||
}
|
||||
@@ -1,43 +1,60 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as path from 'path';
|
||||
import { CancellationToken, ExtensionContext, window as Window, workspace, Uri } from 'vscode';
|
||||
import {
|
||||
CancellationToken,
|
||||
ExtensionContext,
|
||||
window as Window,
|
||||
workspace,
|
||||
Uri
|
||||
} from 'vscode';
|
||||
import { ErrorCodes, ResponseError } from 'vscode-languageclient';
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import { DatabaseUI } from './databases-ui';
|
||||
import { logger } from './logging';
|
||||
import {
|
||||
getInitialQueryContents,
|
||||
getPrimaryDbscheme,
|
||||
getQlPackForDbscheme,
|
||||
showAndLogErrorMessage,
|
||||
showBinaryChoiceDialog,
|
||||
} from './helpers';
|
||||
import {
|
||||
ProgressCallback,
|
||||
UserCancellationException
|
||||
} from './commandRunner';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
const QUICK_QUERIES_DIR_NAME = 'quick-queries';
|
||||
const QUICK_QUERY_QUERY_NAME = 'quick-query.ql';
|
||||
const QUICK_QUERY_WORKSPACE_FOLDER_NAME = 'Quick Queries';
|
||||
const QLPACK_FILE_HEADER = '# This is an automatically generated file.\n\n';
|
||||
|
||||
export function isQuickQueryPath(queryPath: string): boolean {
|
||||
return path.basename(queryPath) === QUICK_QUERY_QUERY_NAME;
|
||||
}
|
||||
|
||||
function getQuickQueriesDir(ctx: ExtensionContext): string {
|
||||
async function getQuickQueriesDir(ctx: ExtensionContext): Promise<string> {
|
||||
const storagePath = ctx.storagePath;
|
||||
if (storagePath === undefined) {
|
||||
throw new Error('Workspace storage path is undefined');
|
||||
}
|
||||
const queriesPath = path.join(storagePath, QUICK_QUERIES_DIR_NAME);
|
||||
fs.ensureDir(queriesPath, { mode: 0o700 });
|
||||
await fs.ensureDir(queriesPath, { mode: 0o700 });
|
||||
return queriesPath;
|
||||
}
|
||||
|
||||
function updateQuickQueryDir(queriesDir: string, index: number, len: number) {
|
||||
workspace.updateWorkspaceFolders(
|
||||
index,
|
||||
len,
|
||||
{ uri: Uri.file(queriesDir), name: QUICK_QUERY_WORKSPACE_FOLDER_NAME }
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
function findExistingQuickQueryEditor() {
|
||||
return Window.visibleTextEditors.find(editor =>
|
||||
path.basename(editor.document.uri.fsPath) === QUICK_QUERY_QUERY_NAME
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Show a buffer the user can enter a simple query into.
|
||||
@@ -50,26 +67,18 @@ export async function displayQuickQuery(
|
||||
token: CancellationToken
|
||||
) {
|
||||
|
||||
function updateQuickQueryDir(queriesDir: string, index: number, len: number) {
|
||||
workspace.updateWorkspaceFolders(
|
||||
index,
|
||||
len,
|
||||
{ uri: Uri.file(queriesDir), name: QUICK_QUERY_WORKSPACE_FOLDER_NAME }
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
const workspaceFolders = workspace.workspaceFolders || [];
|
||||
const queriesDir = await getQuickQueriesDir(ctx);
|
||||
|
||||
// If there is already a quick query open, don't clobber it, just
|
||||
// show it.
|
||||
const existing = workspace.textDocuments.find(doc => path.basename(doc.uri.fsPath) === QUICK_QUERY_QUERY_NAME);
|
||||
if (existing !== undefined) {
|
||||
Window.showTextDocument(existing);
|
||||
const existing = findExistingQuickQueryEditor();
|
||||
if (existing) {
|
||||
await Window.showTextDocument(existing.document);
|
||||
return;
|
||||
}
|
||||
|
||||
const workspaceFolders = workspace.workspaceFolders || [];
|
||||
const queriesDir = await getQuickQueriesDir(ctx);
|
||||
|
||||
// We need to have a multi-root workspace to make quick query work
|
||||
// at all. Changing the workspace from single-root to multi-root
|
||||
// causes a restart of the whole extension host environment, so we
|
||||
@@ -88,10 +97,11 @@ export async function displayQuickQuery(
|
||||
}
|
||||
|
||||
const index = workspaceFolders.findIndex(folder => folder.name === QUICK_QUERY_WORKSPACE_FOLDER_NAME);
|
||||
if (index === -1)
|
||||
if (index === -1) {
|
||||
updateQuickQueryDir(queriesDir, workspaceFolders.length, 0);
|
||||
else
|
||||
} else {
|
||||
updateQuickQueryDir(queriesDir, index, 1);
|
||||
}
|
||||
|
||||
// We're going to infer which qlpack to use from the current database
|
||||
const dbItem = await databaseUI.getDatabaseItem(progress, token);
|
||||
@@ -101,32 +111,46 @@ export async function displayQuickQuery(
|
||||
|
||||
const datasetFolder = await dbItem.getDatasetFolder(cliServer);
|
||||
const dbscheme = await getPrimaryDbscheme(datasetFolder);
|
||||
const qlpack = await getQlPackForDbscheme(cliServer, dbscheme);
|
||||
|
||||
const quickQueryQlpackYaml: any = {
|
||||
name: 'quick-query',
|
||||
version: '1.0.0',
|
||||
libraryPathDependencies: [qlpack]
|
||||
};
|
||||
|
||||
const qlFile = path.join(queriesDir, QUICK_QUERY_QUERY_NAME);
|
||||
const qlpack = (await getQlPackForDbscheme(cliServer, dbscheme)).dbschemePack;
|
||||
const qlPackFile = path.join(queriesDir, 'qlpack.yml');
|
||||
await fs.writeFile(qlFile, getInitialQueryContents(dbItem.language, dbscheme), 'utf8');
|
||||
await fs.writeFile(qlPackFile, yaml.safeDump(quickQueryQlpackYaml), 'utf8');
|
||||
Window.showTextDocument(await workspace.openTextDocument(qlFile));
|
||||
}
|
||||
const qlFile = path.join(queriesDir, QUICK_QUERY_QUERY_NAME);
|
||||
const shouldRewrite = await checkShouldRewrite(qlPackFile, qlpack);
|
||||
|
||||
// TODO: clean up error handling for top-level commands like this
|
||||
catch (e) {
|
||||
if (e instanceof UserCancellationException) {
|
||||
logger.log(e.message);
|
||||
// Only rewrite the qlpack file if the database has changed
|
||||
if (shouldRewrite) {
|
||||
const quickQueryQlpackYaml: any = {
|
||||
name: 'vscode/quick-query',
|
||||
version: '1.0.0',
|
||||
dependencies: {
|
||||
[qlpack]: '*'
|
||||
}
|
||||
};
|
||||
await fs.writeFile(qlPackFile, QLPACK_FILE_HEADER + yaml.dump(quickQueryQlpackYaml), 'utf8');
|
||||
}
|
||||
else if (e instanceof ResponseError && e.code == ErrorCodes.RequestCancelled) {
|
||||
logger.log(e.message);
|
||||
|
||||
if (shouldRewrite || !(await fs.pathExists(qlFile))) {
|
||||
await fs.writeFile(qlFile, getInitialQueryContents(dbItem.language, dbscheme), 'utf8');
|
||||
}
|
||||
else if (e instanceof Error)
|
||||
showAndLogErrorMessage(e.message);
|
||||
else
|
||||
|
||||
if (shouldRewrite) {
|
||||
await cliServer.clearCache();
|
||||
await cliServer.packInstall(queriesDir, true);
|
||||
}
|
||||
|
||||
await Window.showTextDocument(await workspace.openTextDocument(qlFile));
|
||||
} catch (e) {
|
||||
if (e instanceof ResponseError && e.code == ErrorCodes.RequestCancelled) {
|
||||
throw new UserCancellationException(getErrorMessage(e));
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function checkShouldRewrite(qlPackFile: string, newDependency: string) {
|
||||
if (!(await fs.pathExists(qlPackFile))) {
|
||||
return true;
|
||||
}
|
||||
const qlPackContents: any = yaml.load(await fs.readFile(qlPackFile, 'utf8'));
|
||||
return !qlPackContents.dependencies?.[newDependency];
|
||||
}
|
||||
|
||||
46
extensions/ql-vscode/src/quickEvalCodeLensProvider.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import {
|
||||
CodeLensProvider,
|
||||
TextDocument,
|
||||
CodeLens,
|
||||
Command,
|
||||
Range
|
||||
} from 'vscode';
|
||||
import { isQuickEvalCodelensEnabled } from './config';
|
||||
|
||||
class QuickEvalCodeLensProvider implements CodeLensProvider {
|
||||
async provideCodeLenses(document: TextDocument): Promise<CodeLens[]> {
|
||||
|
||||
const codeLenses: CodeLens[] = [];
|
||||
|
||||
if (isQuickEvalCodelensEnabled()) {
|
||||
for (let index = 0; index < document.lineCount; index++) {
|
||||
const textLine = document.lineAt(index);
|
||||
|
||||
// Match a predicate signature, including predicate name, parameter list, and opening brace.
|
||||
// This currently does not match predicates that span multiple lines.
|
||||
const regex = new RegExp(/(\w+)\s*\([^()]*\)\s*\{/);
|
||||
|
||||
const matches = textLine.text.match(regex);
|
||||
|
||||
// Make sure that a code lens is not generated for any predicate that is commented out.
|
||||
if (matches && !(/^\s*\/\//).test(textLine.text)) {
|
||||
const range: Range = new Range(
|
||||
textLine.range.start.line, matches.index!,
|
||||
textLine.range.end.line, matches.index! + 1
|
||||
);
|
||||
|
||||
const command: Command = {
|
||||
command: 'codeQL.codeLensQuickEval',
|
||||
title: `Quick Evaluation: ${matches[1]}`,
|
||||
arguments: [document.uri, range]
|
||||
};
|
||||
const codeLens = new CodeLens(range, command);
|
||||
codeLenses.push(codeLens);
|
||||
}
|
||||
}
|
||||
}
|
||||
return codeLenses;
|
||||
}
|
||||
}
|
||||
|
||||
export default QuickEvalCodeLensProvider;
|
||||
@@ -0,0 +1,205 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import { CancellationToken, ExtensionContext } from 'vscode';
|
||||
|
||||
import { Credentials } from '../authentication';
|
||||
import { Logger } from '../logging';
|
||||
import { downloadArtifactFromLink } from './gh-actions-api-client';
|
||||
import { AnalysisSummary } from './shared/remote-query-result';
|
||||
import { AnalysisResults, AnalysisAlert, AnalysisRawResults } from './shared/analysis-result';
|
||||
import { UserCancellationException } from '../commandRunner';
|
||||
import { sarifParser } from '../sarif-parser';
|
||||
import { extractAnalysisAlerts } from './sarif-processing';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { extractRawResults } from './bqrs-processing';
|
||||
import { asyncFilter, getErrorMessage } from '../pure/helpers-pure';
|
||||
import { createDownloadPath } from './download-link';
|
||||
|
||||
export class AnalysesResultsManager {
|
||||
// Store for the results of various analyses for each remote query.
|
||||
// The key is the queryId and is also the name of the directory where results are stored.
|
||||
private readonly analysesResults: Map<string, AnalysisResults[]>;
|
||||
|
||||
constructor(
|
||||
private readonly ctx: ExtensionContext,
|
||||
private readonly cliServer: CodeQLCliServer,
|
||||
readonly storagePath: string,
|
||||
private readonly logger: Logger,
|
||||
) {
|
||||
this.analysesResults = new Map();
|
||||
}
|
||||
|
||||
public async downloadAnalysisResults(
|
||||
analysisSummary: AnalysisSummary,
|
||||
publishResults: (analysesResults: AnalysisResults[]) => Promise<void>
|
||||
): Promise<void> {
|
||||
if (this.isAnalysisInMemory(analysisSummary)) {
|
||||
// We already have the results for this analysis in memory, don't download again.
|
||||
return;
|
||||
}
|
||||
|
||||
const credentials = await Credentials.initialize(this.ctx);
|
||||
|
||||
void this.logger.log(`Downloading and processing results for ${analysisSummary.nwo}`);
|
||||
|
||||
await this.downloadSingleAnalysisResults(analysisSummary, credentials, publishResults);
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the array analysis results. For each analysis results, if it is not downloaded yet,
|
||||
* it will be downloaded. If it is already downloaded, it will be loaded into memory.
|
||||
* If it is already in memory, this will be a no-op.
|
||||
*
|
||||
* @param allAnalysesToLoad List of analyses to ensure are downloaded and in memory
|
||||
* @param token Optional cancellation token
|
||||
* @param publishResults Optional function to publish the results after loading
|
||||
*/
|
||||
public async loadAnalysesResults(
|
||||
allAnalysesToLoad: AnalysisSummary[],
|
||||
token?: CancellationToken,
|
||||
publishResults: (analysesResults: AnalysisResults[]) => Promise<void> = () => Promise.resolve()
|
||||
): Promise<void> {
|
||||
// Filter out analyses that we have already in memory.
|
||||
const analysesToDownload = allAnalysesToLoad.filter(x => !this.isAnalysisInMemory(x));
|
||||
|
||||
const credentials = await Credentials.initialize(this.ctx);
|
||||
|
||||
void this.logger.log('Downloading and processing analyses results');
|
||||
|
||||
const batchSize = 3;
|
||||
const numOfBatches = Math.ceil(analysesToDownload.length / batchSize);
|
||||
const allFailures = [];
|
||||
|
||||
for (let i = 0; i < analysesToDownload.length; i += batchSize) {
|
||||
if (token?.isCancellationRequested) {
|
||||
throw new UserCancellationException('Downloading of analyses results has been cancelled', true);
|
||||
}
|
||||
|
||||
const batch = analysesToDownload.slice(i, i + batchSize);
|
||||
const batchTasks = batch.map(analysis => this.downloadSingleAnalysisResults(analysis, credentials, publishResults));
|
||||
|
||||
const nwos = batch.map(a => a.nwo).join(', ');
|
||||
void this.logger.log(`Downloading batch ${Math.floor(i / batchSize) + 1} of ${numOfBatches} (${nwos})`);
|
||||
|
||||
const taskResults = await Promise.allSettled(batchTasks);
|
||||
const failedTasks = taskResults.filter(x => x.status === 'rejected') as Array<PromiseRejectedResult>;
|
||||
if (failedTasks.length > 0) {
|
||||
const failures = failedTasks.map(t => t.reason.message);
|
||||
failures.forEach(f => void this.logger.log(f));
|
||||
allFailures.push(...failures);
|
||||
}
|
||||
}
|
||||
|
||||
if (allFailures.length > 0) {
|
||||
throw Error(allFailures.join(os.EOL));
|
||||
}
|
||||
}
|
||||
|
||||
public getAnalysesResults(queryId: string): AnalysisResults[] {
|
||||
return [...this.internalGetAnalysesResults(queryId)];
|
||||
}
|
||||
|
||||
private internalGetAnalysesResults(queryId: string): AnalysisResults[] {
|
||||
return this.analysesResults.get(queryId) || [];
|
||||
}
|
||||
|
||||
public removeAnalysesResults(queryId: string) {
|
||||
this.analysesResults.delete(queryId);
|
||||
}
|
||||
|
||||
private async downloadSingleAnalysisResults(
|
||||
analysis: AnalysisSummary,
|
||||
credentials: Credentials,
|
||||
publishResults: (analysesResults: AnalysisResults[]) => Promise<void>
|
||||
): Promise<void> {
|
||||
const analysisResults: AnalysisResults = {
|
||||
nwo: analysis.nwo,
|
||||
status: 'InProgress',
|
||||
interpretedResults: [],
|
||||
resultCount: analysis.resultCount,
|
||||
starCount: analysis.starCount,
|
||||
lastUpdated: analysis.lastUpdated,
|
||||
};
|
||||
const queryId = analysis.downloadLink.queryId;
|
||||
const resultsForQuery = this.internalGetAnalysesResults(queryId);
|
||||
resultsForQuery.push(analysisResults);
|
||||
this.analysesResults.set(queryId, resultsForQuery);
|
||||
void publishResults([...resultsForQuery]);
|
||||
const pos = resultsForQuery.length - 1;
|
||||
|
||||
let artifactPath;
|
||||
try {
|
||||
artifactPath = await downloadArtifactFromLink(credentials, this.storagePath, analysis.downloadLink);
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Could not download the analysis results for ${analysis.nwo}: ${getErrorMessage(e)}`);
|
||||
}
|
||||
|
||||
const fileLinkPrefix = this.createGitHubDotcomFileLinkPrefix(analysis.nwo, analysis.databaseSha);
|
||||
|
||||
let newAnaysisResults: AnalysisResults;
|
||||
const fileExtension = path.extname(artifactPath);
|
||||
if (fileExtension === '.sarif') {
|
||||
const queryResults = await this.readSarifResults(artifactPath, fileLinkPrefix);
|
||||
newAnaysisResults = {
|
||||
...analysisResults,
|
||||
interpretedResults: queryResults,
|
||||
status: 'Completed'
|
||||
};
|
||||
} else if (fileExtension === '.bqrs') {
|
||||
const queryResults = await this.readBqrsResults(artifactPath, fileLinkPrefix, analysis.sourceLocationPrefix);
|
||||
newAnaysisResults = {
|
||||
...analysisResults,
|
||||
rawResults: queryResults,
|
||||
status: 'Completed'
|
||||
};
|
||||
} else {
|
||||
void this.logger.log(`Cannot download results. File type '${fileExtension}' not supported.`);
|
||||
newAnaysisResults = {
|
||||
...analysisResults,
|
||||
status: 'Failed'
|
||||
};
|
||||
}
|
||||
resultsForQuery[pos] = newAnaysisResults;
|
||||
void publishResults([...resultsForQuery]);
|
||||
}
|
||||
|
||||
|
||||
public async loadDownloadedAnalyses(
|
||||
allAnalysesToCheck: AnalysisSummary[]
|
||||
) {
|
||||
|
||||
// Find all analyses that are already downloaded.
|
||||
const allDownloadedAnalyses = await asyncFilter(allAnalysesToCheck, x => this.isAnalysisDownloaded(x));
|
||||
// Now, ensure that all of these analyses are in memory. Some may already be in memory. These are ignored.
|
||||
await this.loadAnalysesResults(allDownloadedAnalyses);
|
||||
}
|
||||
|
||||
private async isAnalysisDownloaded(analysis: AnalysisSummary): Promise<boolean> {
|
||||
return await fs.pathExists(createDownloadPath(this.storagePath, analysis.downloadLink));
|
||||
}
|
||||
|
||||
private async readBqrsResults(filePath: string, fileLinkPrefix: string, sourceLocationPrefix: string): Promise<AnalysisRawResults> {
|
||||
return await extractRawResults(this.cliServer, this.logger, filePath, fileLinkPrefix, sourceLocationPrefix);
|
||||
}
|
||||
|
||||
private async readSarifResults(filePath: string, fileLinkPrefix: string): Promise<AnalysisAlert[]> {
|
||||
const sarifLog = await sarifParser(filePath);
|
||||
|
||||
const processedSarif = extractAnalysisAlerts(sarifLog, fileLinkPrefix);
|
||||
if (processedSarif.errors.length) {
|
||||
void this.logger.log(`Error processing SARIF file: ${os.EOL}${processedSarif.errors.join(os.EOL)}`);
|
||||
}
|
||||
|
||||
return processedSarif.alerts;
|
||||
}
|
||||
|
||||
private isAnalysisInMemory(analysis: AnalysisSummary): boolean {
|
||||
return this.internalGetAnalysesResults(analysis.downloadLink.queryId).some(x => x.nwo === analysis.nwo);
|
||||
}
|
||||
|
||||
private createGitHubDotcomFileLinkPrefix(nwo: string, sha: string): string {
|
||||
return `https://github.com/${nwo}/blob/${sha}`;
|
||||
}
|
||||
}
|
||||
36
extensions/ql-vscode/src/remote-queries/bqrs-processing.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { Logger } from '../logging';
|
||||
import { transformBqrsResultSet } from '../pure/bqrs-cli-types';
|
||||
import { AnalysisRawResults } from './shared/analysis-result';
|
||||
import { MAX_RAW_RESULTS } from './shared/result-limits';
|
||||
|
||||
export async function extractRawResults(
|
||||
cliServer: CodeQLCliServer,
|
||||
logger: Logger,
|
||||
filePath: string,
|
||||
fileLinkPrefix: string,
|
||||
sourceLocationPrefix: string
|
||||
): Promise<AnalysisRawResults> {
|
||||
const bqrsInfo = await cliServer.bqrsInfo(filePath);
|
||||
const resultSets = bqrsInfo['result-sets'];
|
||||
|
||||
if (resultSets.length < 1) {
|
||||
throw new Error('No result sets found in results file.');
|
||||
}
|
||||
if (resultSets.length > 1) {
|
||||
void logger.log('Multiple result sets found in results file. Only the first one will be used.');
|
||||
}
|
||||
|
||||
const schema = resultSets[0];
|
||||
|
||||
const chunk = await cliServer.bqrsDecode(
|
||||
filePath,
|
||||
schema.name,
|
||||
{ pageSize: MAX_RAW_RESULTS });
|
||||
|
||||
const resultSet = transformBqrsResultSet(schema, chunk);
|
||||
|
||||
const capped = !!chunk.next;
|
||||
|
||||
return { schema, resultSet, fileLinkPrefix, sourceLocationPrefix, capped };
|
||||
}
|
||||
40
extensions/ql-vscode/src/remote-queries/download-link.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import * as path from 'path';
|
||||
|
||||
/**
|
||||
* Represents a link to an artifact to be downloaded.
|
||||
*/
|
||||
export interface DownloadLink {
|
||||
/**
|
||||
* A unique id of the artifact being downloaded.
|
||||
*/
|
||||
id: string;
|
||||
|
||||
/**
|
||||
* The URL path to use against the GitHub API to download the
|
||||
* linked artifact.
|
||||
*/
|
||||
urlPath: string;
|
||||
|
||||
/**
|
||||
* An optional path to follow inside the downloaded archive containing the artifact.
|
||||
*/
|
||||
innerFilePath?: string;
|
||||
|
||||
/**
|
||||
* A unique id of the remote query run. This is used to determine where to store artifacts and data from the run.
|
||||
*/
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a downloadLink to the path where the artifact should be stored.
|
||||
*
|
||||
* @param storagePath The base directory to store artifacts in.
|
||||
* @param downloadLink The DownloadLink
|
||||
* @param extension An optional file extension to append to the artifact (no `.`).
|
||||
*
|
||||
* @returns A full path to the download location of the artifact
|
||||
*/
|
||||
export function createDownloadPath(storagePath: string, downloadLink: DownloadLink, extension = '') {
|
||||
return path.join(storagePath, downloadLink.queryId, downloadLink.id + (extension ? `.${extension}` : ''));
|
||||
}
|
||||
129
extensions/ql-vscode/src/remote-queries/export-results.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
import { window, commands, Uri, ExtensionContext, QuickPickItem, workspace, ViewColumn } from 'vscode';
|
||||
import { Credentials } from '../authentication';
|
||||
import { UserCancellationException } from '../commandRunner';
|
||||
import { showInformationMessageWithAction } from '../helpers';
|
||||
import { logger } from '../logging';
|
||||
import { QueryHistoryManager } from '../query-history';
|
||||
import { createGist } from './gh-actions-api-client';
|
||||
import { RemoteQueriesManager } from './remote-queries-manager';
|
||||
import { generateMarkdown } from './remote-queries-markdown-generation';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { AnalysisResults } from './shared/analysis-result';
|
||||
|
||||
/**
|
||||
* Exports the results of the currently-selected remote query.
|
||||
* The user is prompted to select the export format.
|
||||
*/
|
||||
export async function exportRemoteQueryResults(
|
||||
queryHistoryManager: QueryHistoryManager,
|
||||
remoteQueriesManager: RemoteQueriesManager,
|
||||
ctx: ExtensionContext,
|
||||
): Promise<void> {
|
||||
const queryHistoryItem = queryHistoryManager.getCurrentQueryHistoryItem();
|
||||
if (!queryHistoryItem || queryHistoryItem.t !== 'remote') {
|
||||
throw new Error('No variant analysis results currently open. To open results, click an item in the query history view.');
|
||||
} else if (!queryHistoryItem.completed) {
|
||||
throw new Error('Variant analysis results are not yet available.');
|
||||
}
|
||||
const queryId = queryHistoryItem.queryId;
|
||||
void logger.log(`Exporting variant analysis results for query: ${queryId}`);
|
||||
const query = queryHistoryItem.remoteQuery;
|
||||
const analysesResults = remoteQueriesManager.getAnalysesResults(queryId);
|
||||
|
||||
const gistOption = {
|
||||
label: '$(ports-open-browser-icon) Create Gist (GitHub)',
|
||||
};
|
||||
const localMarkdownOption = {
|
||||
label: '$(markdown) Save as markdown',
|
||||
};
|
||||
const exportFormat = await determineExportFormat(gistOption, localMarkdownOption);
|
||||
|
||||
if (exportFormat === gistOption) {
|
||||
await exportResultsToGist(ctx, query, analysesResults);
|
||||
} else if (exportFormat === localMarkdownOption) {
|
||||
const queryDirectoryPath = await queryHistoryManager.getQueryHistoryItemDirectory(
|
||||
queryHistoryItem
|
||||
);
|
||||
await exportResultsToLocalMarkdown(queryDirectoryPath, query, analysesResults);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the format in which to export the results, from the given export options.
|
||||
*/
|
||||
async function determineExportFormat(
|
||||
...options: { label: string }[]
|
||||
): Promise<QuickPickItem> {
|
||||
const exportFormat = await window.showQuickPick(
|
||||
options,
|
||||
{
|
||||
placeHolder: 'Select export format',
|
||||
canPickMany: false,
|
||||
ignoreFocusOut: true,
|
||||
}
|
||||
);
|
||||
if (!exportFormat || !exportFormat.label) {
|
||||
throw new UserCancellationException('No export format selected', true);
|
||||
}
|
||||
return exportFormat;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the results of a remote query to markdown and uploads the files as a secret gist.
|
||||
*/
|
||||
async function exportResultsToGist(
|
||||
ctx: ExtensionContext,
|
||||
query: RemoteQuery,
|
||||
analysesResults: AnalysisResults[]
|
||||
): Promise<void> {
|
||||
const credentials = await Credentials.initialize(ctx);
|
||||
const description = 'CodeQL Variant Analysis Results';
|
||||
const markdownFiles = generateMarkdown(query, analysesResults, 'gist');
|
||||
// Convert markdownFiles to the appropriate format for uploading to gist
|
||||
const gistFiles = markdownFiles.reduce((acc, cur) => {
|
||||
acc[`${cur.fileName}.md`] = { content: cur.content.join('\n') };
|
||||
return acc;
|
||||
}, {} as { [key: string]: { content: string } });
|
||||
|
||||
const gistUrl = await createGist(credentials, description, gistFiles);
|
||||
if (gistUrl) {
|
||||
const shouldOpenGist = await showInformationMessageWithAction(
|
||||
'Variant analysis results exported to gist.',
|
||||
'Open gist'
|
||||
);
|
||||
if (shouldOpenGist) {
|
||||
await commands.executeCommand('vscode.open', Uri.parse(gistUrl));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the results of a remote query to markdown and saves the files locally
|
||||
* in the query directory (where query results and metadata are also saved).
|
||||
*/
|
||||
async function exportResultsToLocalMarkdown(
|
||||
queryDirectoryPath: string,
|
||||
query: RemoteQuery,
|
||||
analysesResults: AnalysisResults[]
|
||||
) {
|
||||
const markdownFiles = generateMarkdown(query, analysesResults, 'local');
|
||||
const exportedResultsPath = path.join(queryDirectoryPath, 'exported-results');
|
||||
await fs.ensureDir(exportedResultsPath);
|
||||
for (const markdownFile of markdownFiles) {
|
||||
const filePath = path.join(exportedResultsPath, `${markdownFile.fileName}.md`);
|
||||
await fs.writeFile(filePath, markdownFile.content.join('\n'), 'utf8');
|
||||
}
|
||||
const shouldOpenExportedResults = await showInformationMessageWithAction(
|
||||
`Variant analysis results exported to \"${exportedResultsPath}\".`,
|
||||
'Open exported results'
|
||||
);
|
||||
if (shouldOpenExportedResults) {
|
||||
const summaryFilePath = path.join(exportedResultsPath, '_summary.md');
|
||||
const summaryFile = await workspace.openTextDocument(summaryFilePath);
|
||||
await window.showTextDocument(summaryFile, ViewColumn.One);
|
||||
await commands.executeCommand('revealFileInOS', Uri.file(summaryFilePath));
|
||||
}
|
||||
}
|
||||
413
extensions/ql-vscode/src/remote-queries/gh-actions-api-client.ts
Normal file
@@ -0,0 +1,413 @@
|
||||
import * as unzipper from 'unzipper';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import { showAndLogErrorMessage, showAndLogWarningMessage, tmpDir } from '../helpers';
|
||||
import { Credentials } from '../authentication';
|
||||
import { logger } from '../logging';
|
||||
import { RemoteQueryWorkflowResult } from './remote-query-workflow-result';
|
||||
import { DownloadLink, createDownloadPath } from './download-link';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueryFailureIndexItem, RemoteQueryResultIndex, RemoteQuerySuccessIndexItem } from './remote-query-result-index';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
|
||||
interface ApiSuccessIndexItem {
|
||||
nwo: string;
|
||||
id: string;
|
||||
sha?: string;
|
||||
results_count: number;
|
||||
bqrs_file_size: number;
|
||||
sarif_file_size?: number;
|
||||
source_location_prefix: string;
|
||||
}
|
||||
|
||||
interface ApiFailureIndexItem {
|
||||
nwo: string;
|
||||
id: string;
|
||||
error: string;
|
||||
}
|
||||
|
||||
interface ApiResultIndex {
|
||||
successes: ApiSuccessIndexItem[];
|
||||
failures: ApiFailureIndexItem[];
|
||||
}
|
||||
|
||||
export async function getRemoteQueryIndex(
|
||||
credentials: Credentials,
|
||||
remoteQuery: RemoteQuery
|
||||
): Promise<RemoteQueryResultIndex | undefined> {
|
||||
const controllerRepo = remoteQuery.controllerRepository;
|
||||
const owner = controllerRepo.owner;
|
||||
const repoName = controllerRepo.name;
|
||||
const workflowRunId = remoteQuery.actionsWorkflowRunId;
|
||||
|
||||
const workflowUri = `https://github.com/${owner}/${repoName}/actions/runs/${workflowRunId}`;
|
||||
const artifactsUrlPath = `/repos/${owner}/${repoName}/actions/artifacts`;
|
||||
|
||||
const artifactList = await listWorkflowRunArtifacts(credentials, owner, repoName, workflowRunId);
|
||||
const resultIndexArtifactId = tryGetArtifactIDfromName('result-index', artifactList);
|
||||
if (!resultIndexArtifactId) {
|
||||
return undefined;
|
||||
}
|
||||
const resultIndex = await getResultIndex(credentials, owner, repoName, resultIndexArtifactId);
|
||||
|
||||
const successes = resultIndex?.successes.map(item => {
|
||||
const artifactId = getArtifactIDfromName(item.id, workflowUri, artifactList);
|
||||
|
||||
return {
|
||||
id: item.id.toString(),
|
||||
artifactId: artifactId,
|
||||
nwo: item.nwo,
|
||||
sha: item.sha,
|
||||
resultCount: item.results_count,
|
||||
bqrsFileSize: item.bqrs_file_size,
|
||||
sarifFileSize: item.sarif_file_size,
|
||||
sourceLocationPrefix: item.source_location_prefix
|
||||
} as RemoteQuerySuccessIndexItem;
|
||||
});
|
||||
|
||||
const failures = resultIndex?.failures.map(item => {
|
||||
return {
|
||||
id: item.id.toString(),
|
||||
nwo: item.nwo,
|
||||
error: item.error
|
||||
} as RemoteQueryFailureIndexItem;
|
||||
});
|
||||
|
||||
return {
|
||||
artifactsUrlPath,
|
||||
successes: successes || [],
|
||||
failures: failures || []
|
||||
};
|
||||
}
|
||||
|
||||
export async function cancelRemoteQuery(
|
||||
credentials: Credentials,
|
||||
remoteQuery: RemoteQuery
|
||||
): Promise<void> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const { actionsWorkflowRunId, controllerRepository: { owner, name } } = remoteQuery;
|
||||
const response = await octokit.request(`POST /repos/${owner}/${name}/actions/runs/${actionsWorkflowRunId}/cancel`);
|
||||
if (response.status >= 300) {
|
||||
throw new Error(`Error cancelling variant analysis: ${response.status} ${response?.data?.message || ''}`);
|
||||
}
|
||||
}
|
||||
|
||||
export async function downloadArtifactFromLink(
|
||||
credentials: Credentials,
|
||||
storagePath: string,
|
||||
downloadLink: DownloadLink
|
||||
): Promise<string> {
|
||||
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
const extractedPath = createDownloadPath(storagePath, downloadLink);
|
||||
|
||||
// first check if we already have the artifact
|
||||
if (!(await fs.pathExists(extractedPath))) {
|
||||
// Download the zipped artifact.
|
||||
const response = await octokit.request(`GET ${downloadLink.urlPath}/zip`, {});
|
||||
|
||||
const zipFilePath = createDownloadPath(storagePath, downloadLink, 'zip');
|
||||
await saveFile(`${zipFilePath}`, response.data as ArrayBuffer);
|
||||
|
||||
// Extract the zipped artifact.
|
||||
await unzipFile(zipFilePath, extractedPath);
|
||||
}
|
||||
return path.join(extractedPath, downloadLink.innerFilePath || '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads the result index artifact and extracts the result index items.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param workflowRunId The ID of the workflow run to get the result index for.
|
||||
* @returns An object containing the result index.
|
||||
*/
|
||||
async function getResultIndex(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
artifactId: number
|
||||
): Promise<ApiResultIndex | undefined> {
|
||||
const artifactPath = await downloadArtifact(credentials, owner, repo, artifactId);
|
||||
const indexFilePath = path.join(artifactPath, 'index.json');
|
||||
if (!(await fs.pathExists(indexFilePath))) {
|
||||
void showAndLogWarningMessage('Could not find an `index.json` file in the result artifact.');
|
||||
return undefined;
|
||||
}
|
||||
const resultIndex = await fs.readFile(path.join(artifactPath, 'index.json'), 'utf8');
|
||||
|
||||
try {
|
||||
return JSON.parse(resultIndex);
|
||||
} catch (error) {
|
||||
throw new Error(`Invalid result index file: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the status of a workflow run.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param workflowRunId The ID of the workflow run to get the result index for.
|
||||
* @returns The workflow run status.
|
||||
*/
|
||||
export async function getWorkflowStatus(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
workflowRunId: number): Promise<RemoteQueryWorkflowResult> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
const workflowRun = await octokit.rest.actions.getWorkflowRun({
|
||||
owner,
|
||||
repo,
|
||||
run_id: workflowRunId
|
||||
});
|
||||
|
||||
if (workflowRun.data.status === 'completed') {
|
||||
if (workflowRun.data.conclusion === 'success') {
|
||||
return { status: 'CompletedSuccessfully' };
|
||||
} else {
|
||||
const error = getWorkflowError(workflowRun.data.conclusion);
|
||||
return { status: 'CompletedUnsuccessfully', error };
|
||||
}
|
||||
}
|
||||
|
||||
return { status: 'InProgress' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists the workflow run artifacts for the given workflow run ID.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param workflowRunId The ID of the workflow run to list artifacts for.
|
||||
* @returns An array of artifact details (including artifact name and ID).
|
||||
*/
|
||||
async function listWorkflowRunArtifacts(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
workflowRunId: number
|
||||
) {
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
// There are limits on the number of artifacts that are returned by the API
|
||||
// so we use paging to make sure we retrieve all of them.
|
||||
let morePages = true;
|
||||
let pageNum = 1;
|
||||
const allArtifacts = [];
|
||||
|
||||
while (morePages) {
|
||||
const response = await octokit.rest.actions.listWorkflowRunArtifacts({
|
||||
owner,
|
||||
repo,
|
||||
run_id: workflowRunId,
|
||||
per_page: 100,
|
||||
page: pageNum
|
||||
});
|
||||
|
||||
allArtifacts.push(...response.data.artifacts);
|
||||
pageNum++;
|
||||
if (response.data.artifacts.length < 100) {
|
||||
morePages = false;
|
||||
}
|
||||
}
|
||||
|
||||
return allArtifacts;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param artifactName The artifact name, as a string.
|
||||
* @param artifacts An array of artifact details (from the "list workflow run artifacts" API response).
|
||||
* @returns The artifact ID corresponding to the given artifact name.
|
||||
*/
|
||||
function getArtifactIDfromName(
|
||||
artifactName: string,
|
||||
workflowUri: string,
|
||||
artifacts: Array<{ id: number, name: string }>
|
||||
): number {
|
||||
const artifactId = tryGetArtifactIDfromName(artifactName, artifacts);
|
||||
|
||||
if (!artifactId) {
|
||||
const errorMessage =
|
||||
`Could not find artifact with name ${artifactName} in workflow ${workflowUri}.
|
||||
Please check whether the workflow run has successfully completed.`;
|
||||
throw Error(errorMessage);
|
||||
}
|
||||
|
||||
return artifactId;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param artifactName The artifact name, as a string.
|
||||
* @param artifacts An array of artifact details (from the "list workflow run artifacts" API response).
|
||||
* @returns The artifact ID corresponding to the given artifact name, if it exists.
|
||||
*/
|
||||
function tryGetArtifactIDfromName(
|
||||
artifactName: string,
|
||||
artifacts: Array<{ id: number, name: string }>
|
||||
): number | undefined {
|
||||
const artifact = artifacts.find(a => a.name === artifactName);
|
||||
|
||||
return artifact?.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads an artifact from a workflow run.
|
||||
* @param credentials Credentials for authenticating to the GitHub API.
|
||||
* @param owner
|
||||
* @param repo
|
||||
* @param artifactId The ID of the artifact to download.
|
||||
* @returns The path to the enclosing directory of the unzipped artifact.
|
||||
*/
|
||||
async function downloadArtifact(
|
||||
credentials: Credentials,
|
||||
owner: string,
|
||||
repo: string,
|
||||
artifactId: number
|
||||
): Promise<string> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const response = await octokit.rest.actions.downloadArtifact({
|
||||
owner,
|
||||
repo,
|
||||
artifact_id: artifactId,
|
||||
archive_format: 'zip',
|
||||
});
|
||||
const artifactPath = path.join(tmpDir.name, `${artifactId}`);
|
||||
await saveFile(`${artifactPath}.zip`, response.data as ArrayBuffer);
|
||||
await unzipFile(`${artifactPath}.zip`, artifactPath);
|
||||
return artifactPath;
|
||||
}
|
||||
|
||||
async function saveFile(filePath: string, data: ArrayBuffer): Promise<void> {
|
||||
void logger.log(`Saving file to ${filePath}`);
|
||||
await fs.writeFile(filePath, Buffer.from(data));
|
||||
}
|
||||
|
||||
async function unzipFile(sourcePath: string, destinationPath: string) {
|
||||
void logger.log(`Unzipping file to ${destinationPath}`);
|
||||
const file = await unzipper.Open.file(sourcePath);
|
||||
await file.extract({ path: destinationPath });
|
||||
}
|
||||
|
||||
function getWorkflowError(conclusion: string | null): string {
|
||||
if (!conclusion) {
|
||||
return 'Workflow finished without a conclusion';
|
||||
}
|
||||
|
||||
if (conclusion === 'cancelled') {
|
||||
return 'Variant analysis execution was cancelled.';
|
||||
}
|
||||
|
||||
if (conclusion === 'timed_out') {
|
||||
return 'Variant analysis execution timed out.';
|
||||
}
|
||||
|
||||
if (conclusion === 'failure') {
|
||||
// TODO: Get the actual error from the workflow or potentially
|
||||
// from an artifact from the action itself.
|
||||
return 'Variant analysis execution has failed.';
|
||||
}
|
||||
|
||||
return `Unexpected variant analysis execution conclusion: ${conclusion}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a gist with the given description and files.
|
||||
* Returns the URL of the created gist.
|
||||
*/
|
||||
export async function createGist(
|
||||
credentials: Credentials,
|
||||
description: string,
|
||||
files: { [key: string]: { content: string } }
|
||||
): Promise<string | undefined> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const response = await octokit.request('POST /gists', {
|
||||
description,
|
||||
files,
|
||||
public: false,
|
||||
});
|
||||
if (response.status >= 300) {
|
||||
throw new Error(`Error exporting variant analysis results: ${response.status} ${response?.data || ''}`);
|
||||
}
|
||||
return response.data.html_url;
|
||||
}
|
||||
|
||||
const repositoriesMetadataQuery = `query Stars($repos: String!, $pageSize: Int!, $cursor: String) {
|
||||
search(
|
||||
query: $repos
|
||||
type: REPOSITORY
|
||||
first: $pageSize
|
||||
after: $cursor
|
||||
) {
|
||||
edges {
|
||||
node {
|
||||
... on Repository {
|
||||
name
|
||||
owner {
|
||||
login
|
||||
}
|
||||
stargazerCount
|
||||
updatedAt
|
||||
}
|
||||
}
|
||||
cursor
|
||||
}
|
||||
}
|
||||
}`;
|
||||
|
||||
type RepositoriesMetadataQueryResponse = {
|
||||
search: {
|
||||
edges: {
|
||||
cursor: string;
|
||||
node: {
|
||||
name: string;
|
||||
owner: {
|
||||
login: string;
|
||||
};
|
||||
stargazerCount: number;
|
||||
updatedAt: string; // Actually a ISO Date string
|
||||
}
|
||||
}[]
|
||||
}
|
||||
};
|
||||
|
||||
export type RepositoriesMetadata = Record<string, { starCount: number, lastUpdated: number }>
|
||||
|
||||
export async function getRepositoriesMetadata(credentials: Credentials, nwos: string[], pageSize = 100): Promise<RepositoriesMetadata> {
|
||||
const octokit = await credentials.getOctokit();
|
||||
const repos = `repo:${nwos.join(' repo:')} fork:true`;
|
||||
let cursor = null;
|
||||
const metadata: RepositoriesMetadata = {};
|
||||
try {
|
||||
do {
|
||||
const response: RepositoriesMetadataQueryResponse = await octokit.graphql({
|
||||
query: repositoriesMetadataQuery,
|
||||
repos,
|
||||
pageSize,
|
||||
cursor
|
||||
});
|
||||
cursor = response.search.edges.length === pageSize ? response.search.edges[pageSize - 1].cursor : null;
|
||||
|
||||
for (const edge of response.search.edges) {
|
||||
const node = edge.node;
|
||||
const owner = node.owner.login;
|
||||
const name = node.name;
|
||||
const starCount = node.stargazerCount;
|
||||
// lastUpdated is always negative since it happened in the past.
|
||||
const lastUpdated = new Date(node.updatedAt).getTime() - Date.now();
|
||||
metadata[`${owner}/${name}`] = {
|
||||
starCount, lastUpdated
|
||||
};
|
||||
}
|
||||
|
||||
} while (cursor);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(`Error retrieving repository metadata for variant analysis: ${getErrorMessage(e)}`);
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
@@ -0,0 +1,304 @@
|
||||
import {
|
||||
WebviewPanel,
|
||||
ExtensionContext,
|
||||
window as Window,
|
||||
ViewColumn,
|
||||
Uri,
|
||||
workspace,
|
||||
commands
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
import {
|
||||
ToRemoteQueriesMessage,
|
||||
FromRemoteQueriesMessage,
|
||||
RemoteQueryDownloadAnalysisResultsMessage,
|
||||
RemoteQueryDownloadAllAnalysesResultsMessage
|
||||
} from '../pure/interface-types';
|
||||
import { Logger } from '../logging';
|
||||
import { getHtmlForWebview } from '../interface-utils';
|
||||
import { assertNever } from '../pure/helpers-pure';
|
||||
import { AnalysisSummary, RemoteQueryResult } from './remote-query-result';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueryResult as RemoteQueryResultViewModel } from './shared/remote-query-result';
|
||||
import { AnalysisSummary as AnalysisResultViewModel } from './shared/remote-query-result';
|
||||
import { showAndLogWarningMessage } from '../helpers';
|
||||
import { URLSearchParams } from 'url';
|
||||
import { SHOW_QUERY_TEXT_MSG } from '../query-history';
|
||||
import { AnalysesResultsManager } from './analyses-results-manager';
|
||||
import { AnalysisResults } from './shared/analysis-result';
|
||||
import { humanizeUnit } from '../pure/time';
|
||||
|
||||
export class RemoteQueriesInterfaceManager {
|
||||
private panel: WebviewPanel | undefined;
|
||||
private panelLoaded = false;
|
||||
private currentQueryId: string | undefined;
|
||||
private panelLoadedCallBacks: (() => void)[] = [];
|
||||
|
||||
constructor(
|
||||
private readonly ctx: ExtensionContext,
|
||||
private readonly logger: Logger,
|
||||
private readonly analysesResultsManager: AnalysesResultsManager
|
||||
) {
|
||||
this.panelLoadedCallBacks.push(() => {
|
||||
void logger.log('Variant analysis results view loaded');
|
||||
});
|
||||
}
|
||||
|
||||
async showResults(query: RemoteQuery, queryResult: RemoteQueryResult) {
|
||||
this.getPanel().reveal(undefined, true);
|
||||
|
||||
await this.waitForPanelLoaded();
|
||||
const model = this.buildViewModel(query, queryResult);
|
||||
this.currentQueryId = queryResult.queryId;
|
||||
|
||||
await this.postMessage({
|
||||
t: 'setRemoteQueryResult',
|
||||
queryResult: model
|
||||
});
|
||||
|
||||
// Ensure all pre-downloaded artifacts are loaded into memory
|
||||
await this.analysesResultsManager.loadDownloadedAnalyses(model.analysisSummaries);
|
||||
|
||||
await this.setAnalysisResults(this.analysesResultsManager.getAnalysesResults(queryResult.queryId), queryResult.queryId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds up a model tailored to the view based on the query and result domain entities.
|
||||
* The data is cleaned up, sorted where necessary, and transformed to a format that
|
||||
* the view model can use.
|
||||
* @param query Information about the query that was run.
|
||||
* @param queryResult The result of the query.
|
||||
* @returns A fully created view model.
|
||||
*/
|
||||
private buildViewModel(query: RemoteQuery, queryResult: RemoteQueryResult): RemoteQueryResultViewModel {
|
||||
const queryFileName = path.basename(query.queryFilePath);
|
||||
const totalResultCount = queryResult.analysisSummaries.reduce((acc, cur) => acc + cur.resultCount, 0);
|
||||
const executionDuration = this.getDuration(queryResult.executionEndTime, query.executionStartTime);
|
||||
const analysisSummaries = this.buildAnalysisSummaries(queryResult.analysisSummaries);
|
||||
const totalRepositoryCount = queryResult.analysisSummaries.length;
|
||||
const affectedRepositories = queryResult.analysisSummaries.filter(r => r.resultCount > 0);
|
||||
|
||||
return {
|
||||
queryId: queryResult.queryId,
|
||||
queryTitle: query.queryName,
|
||||
queryFileName: queryFileName,
|
||||
queryFilePath: query.queryFilePath,
|
||||
queryText: query.queryText,
|
||||
language: query.language,
|
||||
workflowRunUrl: `https://github.com/${query.controllerRepository.owner}/${query.controllerRepository.name}/actions/runs/${query.actionsWorkflowRunId}`,
|
||||
totalRepositoryCount: totalRepositoryCount,
|
||||
affectedRepositoryCount: affectedRepositories.length,
|
||||
totalResultCount: totalResultCount,
|
||||
executionTimestamp: this.formatDate(query.executionStartTime),
|
||||
executionDuration: executionDuration,
|
||||
analysisSummaries: analysisSummaries,
|
||||
analysisFailures: queryResult.analysisFailures,
|
||||
};
|
||||
}
|
||||
|
||||
getPanel(): WebviewPanel {
|
||||
if (this.panel == undefined) {
|
||||
const { ctx } = this;
|
||||
const panel = (this.panel = Window.createWebviewPanel(
|
||||
'remoteQueriesView',
|
||||
'CodeQL Query Results',
|
||||
{ viewColumn: ViewColumn.Active, preserveFocus: true },
|
||||
{
|
||||
enableScripts: true,
|
||||
enableFindWidget: true,
|
||||
retainContextWhenHidden: true,
|
||||
localResourceRoots: [
|
||||
Uri.file(this.analysesResultsManager.storagePath),
|
||||
Uri.file(path.join(this.ctx.extensionPath, 'out')),
|
||||
],
|
||||
}
|
||||
));
|
||||
this.panel.onDidDispose(
|
||||
() => {
|
||||
this.panel = undefined;
|
||||
this.currentQueryId = undefined;
|
||||
},
|
||||
null,
|
||||
ctx.subscriptions
|
||||
);
|
||||
|
||||
const scriptPathOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('out/remoteQueriesView.js')
|
||||
);
|
||||
|
||||
const baseStylesheetUriOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('out/remote-queries/view/baseStyles.css')
|
||||
);
|
||||
|
||||
const stylesheetPathOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('out/remote-queries/view/remoteQueries.css')
|
||||
);
|
||||
|
||||
panel.webview.html = getHtmlForWebview(
|
||||
panel.webview,
|
||||
scriptPathOnDisk,
|
||||
[baseStylesheetUriOnDisk, stylesheetPathOnDisk],
|
||||
true
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
panel.webview.onDidReceiveMessage(
|
||||
async (e) => this.handleMsgFromView(e),
|
||||
undefined,
|
||||
ctx.subscriptions
|
||||
)
|
||||
);
|
||||
}
|
||||
return this.panel;
|
||||
}
|
||||
|
||||
private waitForPanelLoaded(): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
if (this.panelLoaded) {
|
||||
resolve();
|
||||
} else {
|
||||
this.panelLoadedCallBacks.push(resolve);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private async openFile(filePath: string) {
|
||||
try {
|
||||
const textDocument = await workspace.openTextDocument(filePath);
|
||||
await Window.showTextDocument(textDocument, ViewColumn.One);
|
||||
} catch (error) {
|
||||
void showAndLogWarningMessage(`Could not open file: ${filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async openVirtualFile(text: string) {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
queryText: encodeURIComponent(SHOW_QUERY_TEXT_MSG + text)
|
||||
});
|
||||
const uri = Uri.parse(
|
||||
`remote-query:query-text.ql?${params.toString()}`,
|
||||
true
|
||||
);
|
||||
const doc = await workspace.openTextDocument(uri);
|
||||
await Window.showTextDocument(doc, { preview: false });
|
||||
} catch (error) {
|
||||
void showAndLogWarningMessage('Could not open query text');
|
||||
}
|
||||
}
|
||||
|
||||
private async handleMsgFromView(
|
||||
msg: FromRemoteQueriesMessage
|
||||
): Promise<void> {
|
||||
switch (msg.t) {
|
||||
case 'remoteQueryLoaded':
|
||||
this.panelLoaded = true;
|
||||
this.panelLoadedCallBacks.forEach((cb) => cb());
|
||||
this.panelLoadedCallBacks = [];
|
||||
break;
|
||||
case 'remoteQueryError':
|
||||
void this.logger.log(
|
||||
`Variant analysis error: ${msg.error}`
|
||||
);
|
||||
break;
|
||||
case 'openFile':
|
||||
await this.openFile(msg.filePath);
|
||||
break;
|
||||
case 'openVirtualFile':
|
||||
await this.openVirtualFile(msg.queryText);
|
||||
break;
|
||||
case 'copyRepoList':
|
||||
await commands.executeCommand('codeQL.copyRepoList', msg.queryId);
|
||||
break;
|
||||
case 'remoteQueryDownloadAnalysisResults':
|
||||
await this.downloadAnalysisResults(msg);
|
||||
break;
|
||||
case 'remoteQueryDownloadAllAnalysesResults':
|
||||
await this.downloadAllAnalysesResults(msg);
|
||||
break;
|
||||
case 'remoteQueryExportResults':
|
||||
await commands.executeCommand('codeQL.exportVariantAnalysisResults');
|
||||
break;
|
||||
default:
|
||||
assertNever(msg);
|
||||
}
|
||||
}
|
||||
|
||||
private async downloadAnalysisResults(msg: RemoteQueryDownloadAnalysisResultsMessage): Promise<void> {
|
||||
const queryId = this.currentQueryId;
|
||||
await this.analysesResultsManager.downloadAnalysisResults(
|
||||
msg.analysisSummary,
|
||||
results => this.setAnalysisResults(results, queryId));
|
||||
}
|
||||
|
||||
private async downloadAllAnalysesResults(msg: RemoteQueryDownloadAllAnalysesResultsMessage): Promise<void> {
|
||||
const queryId = this.currentQueryId;
|
||||
await this.analysesResultsManager.loadAnalysesResults(
|
||||
msg.analysisSummaries,
|
||||
undefined,
|
||||
results => this.setAnalysisResults(results, queryId));
|
||||
}
|
||||
|
||||
public async setAnalysisResults(analysesResults: AnalysisResults[], queryId: string | undefined): Promise<void> {
|
||||
if (this.panel?.active && this.currentQueryId === queryId) {
|
||||
await this.postMessage({
|
||||
t: 'setAnalysesResults',
|
||||
analysesResults
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private postMessage(msg: ToRemoteQueriesMessage): Thenable<boolean> {
|
||||
return this.getPanel().webview.postMessage(msg);
|
||||
}
|
||||
|
||||
private getDuration(startTime: number, endTime: number): string {
|
||||
const diffInMs = startTime - endTime;
|
||||
return humanizeUnit(diffInMs);
|
||||
}
|
||||
|
||||
private formatDate = (millis: number): string => {
|
||||
const d = new Date(millis);
|
||||
const datePart = d.toLocaleDateString(undefined, { day: 'numeric', month: 'short' });
|
||||
const timePart = d.toLocaleTimeString(undefined, { hour: 'numeric', minute: 'numeric', hour12: true });
|
||||
return `${datePart} at ${timePart}`;
|
||||
};
|
||||
|
||||
private formatFileSize(bytes: number): string {
|
||||
const kb = bytes / 1024;
|
||||
const mb = kb / 1024;
|
||||
const gb = mb / 1024;
|
||||
|
||||
if (bytes < 1024) {
|
||||
return `${bytes} bytes`;
|
||||
} else if (kb < 1024) {
|
||||
return `${kb.toFixed(2)} KB`;
|
||||
} else if (mb < 1024) {
|
||||
return `${mb.toFixed(2)} MB`;
|
||||
} else {
|
||||
return `${gb.toFixed(2)} GB`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds up a list of analysis summaries, in a data structure tailored to the view.
|
||||
* @param analysisSummaries The summaries of a specific analyses.
|
||||
* @returns A fully created view model.
|
||||
*/
|
||||
private buildAnalysisSummaries(analysisSummaries: AnalysisSummary[]): AnalysisResultViewModel[] {
|
||||
const filteredAnalysisSummaries = analysisSummaries.filter(r => r.resultCount > 0);
|
||||
|
||||
const sortedAnalysisSummaries = filteredAnalysisSummaries.sort((a, b) => b.resultCount - a.resultCount);
|
||||
|
||||
return sortedAnalysisSummaries.map((analysisResult) => ({
|
||||
nwo: analysisResult.nwo,
|
||||
databaseSha: analysisResult.databaseSha || 'HEAD',
|
||||
resultCount: analysisResult.resultCount,
|
||||
downloadLink: analysisResult.downloadLink,
|
||||
sourceLocationPrefix: analysisResult.sourceLocationPrefix,
|
||||
fileSize: this.formatFileSize(analysisResult.fileSizeInBytes),
|
||||
starCount: analysisResult.starCount,
|
||||
lastUpdated: analysisResult.lastUpdated
|
||||
}));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,353 @@
|
||||
import { CancellationToken, commands, EventEmitter, ExtensionContext, Uri, env, window } from 'vscode';
|
||||
import { nanoid } from 'nanoid';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
|
||||
import { Credentials } from '../authentication';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { ProgressCallback } from '../commandRunner';
|
||||
import { createTimestampFile, showAndLogErrorMessage, showAndLogInformationMessage, showInformationMessageWithAction } from '../helpers';
|
||||
import { Logger } from '../logging';
|
||||
import { runRemoteQuery } from './run-remote-query';
|
||||
import { RemoteQueriesInterfaceManager } from './remote-queries-interface';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueriesMonitor } from './remote-queries-monitor';
|
||||
import { getRemoteQueryIndex, getRepositoriesMetadata, RepositoriesMetadata } from './gh-actions-api-client';
|
||||
import { RemoteQueryResultIndex } from './remote-query-result-index';
|
||||
import { RemoteQueryResult } from './remote-query-result';
|
||||
import { DownloadLink } from './download-link';
|
||||
import { AnalysesResultsManager } from './analyses-results-manager';
|
||||
import { assertNever } from '../pure/helpers-pure';
|
||||
import { QueryStatus } from '../query-status';
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import { AnalysisResults } from './shared/analysis-result';
|
||||
|
||||
const autoDownloadMaxSize = 300 * 1024;
|
||||
const autoDownloadMaxCount = 100;
|
||||
|
||||
const noop = () => { /* do nothing */ };
|
||||
|
||||
export interface NewQueryEvent {
|
||||
queryId: string;
|
||||
query: RemoteQuery
|
||||
}
|
||||
|
||||
export interface RemovedQueryEvent {
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
export interface UpdatedQueryStatusEvent {
|
||||
queryId: string;
|
||||
status: QueryStatus;
|
||||
failureReason?: string;
|
||||
}
|
||||
|
||||
export class RemoteQueriesManager extends DisposableObject {
|
||||
public readonly onRemoteQueryAdded;
|
||||
public readonly onRemoteQueryRemoved;
|
||||
public readonly onRemoteQueryStatusUpdate;
|
||||
|
||||
private readonly remoteQueryAddedEventEmitter;
|
||||
private readonly remoteQueryRemovedEventEmitter;
|
||||
private readonly remoteQueryStatusUpdateEventEmitter;
|
||||
|
||||
private readonly remoteQueriesMonitor: RemoteQueriesMonitor;
|
||||
private readonly analysesResultsManager: AnalysesResultsManager;
|
||||
private readonly interfaceManager: RemoteQueriesInterfaceManager;
|
||||
|
||||
constructor(
|
||||
private readonly ctx: ExtensionContext,
|
||||
private readonly cliServer: CodeQLCliServer,
|
||||
private readonly storagePath: string,
|
||||
logger: Logger,
|
||||
) {
|
||||
super();
|
||||
this.analysesResultsManager = new AnalysesResultsManager(ctx, cliServer, storagePath, logger);
|
||||
this.interfaceManager = new RemoteQueriesInterfaceManager(ctx, logger, this.analysesResultsManager);
|
||||
this.remoteQueriesMonitor = new RemoteQueriesMonitor(ctx, logger);
|
||||
|
||||
this.remoteQueryAddedEventEmitter = this.push(new EventEmitter<NewQueryEvent>());
|
||||
this.remoteQueryRemovedEventEmitter = this.push(new EventEmitter<RemovedQueryEvent>());
|
||||
this.remoteQueryStatusUpdateEventEmitter = this.push(new EventEmitter<UpdatedQueryStatusEvent>());
|
||||
this.onRemoteQueryAdded = this.remoteQueryAddedEventEmitter.event;
|
||||
this.onRemoteQueryRemoved = this.remoteQueryRemovedEventEmitter.event;
|
||||
this.onRemoteQueryStatusUpdate = this.remoteQueryStatusUpdateEventEmitter.event;
|
||||
}
|
||||
|
||||
public async rehydrateRemoteQuery(queryId: string, query: RemoteQuery, status: QueryStatus) {
|
||||
if (!(await this.queryRecordExists(queryId))) {
|
||||
// In this case, the query was deleted from disk, most likely because it was purged
|
||||
// by another workspace.
|
||||
this.remoteQueryRemovedEventEmitter.fire({ queryId });
|
||||
} else if (status === QueryStatus.InProgress) {
|
||||
// In this case, last time we checked, the query was still in progress.
|
||||
// We need to setup the monitor to check for completion.
|
||||
await commands.executeCommand('codeQL.monitorRemoteQuery', queryId, query);
|
||||
}
|
||||
}
|
||||
|
||||
public async removeRemoteQuery(queryId: string) {
|
||||
this.analysesResultsManager.removeAnalysesResults(queryId);
|
||||
await this.removeStorageDirectory(queryId);
|
||||
}
|
||||
|
||||
public async openRemoteQueryResults(queryId: string) {
|
||||
try {
|
||||
const remoteQuery = await this.retrieveJsonFile(queryId, 'query.json') as RemoteQuery;
|
||||
const remoteQueryResult = await this.retrieveJsonFile(queryId, 'query-result.json') as RemoteQueryResult;
|
||||
|
||||
// Open results in the background
|
||||
void this.openResults(remoteQuery, remoteQueryResult).then(
|
||||
noop,
|
||||
err => void showAndLogErrorMessage(err)
|
||||
);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(`Could not open query results. ${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
public async runRemoteQuery(
|
||||
uri: Uri | undefined,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<void> {
|
||||
const credentials = await Credentials.initialize(this.ctx);
|
||||
|
||||
const querySubmission = await runRemoteQuery(
|
||||
this.cliServer,
|
||||
credentials, uri || window.activeTextEditor?.document.uri,
|
||||
false,
|
||||
progress,
|
||||
token);
|
||||
|
||||
if (querySubmission?.query) {
|
||||
const query = querySubmission.query;
|
||||
const queryId = this.createQueryId(query.queryName);
|
||||
|
||||
await this.prepareStorageDirectory(queryId);
|
||||
await this.storeJsonFile(queryId, 'query.json', query);
|
||||
|
||||
this.remoteQueryAddedEventEmitter.fire({ queryId, query });
|
||||
void commands.executeCommand('codeQL.monitorRemoteQuery', queryId, query);
|
||||
}
|
||||
}
|
||||
|
||||
public async monitorRemoteQuery(
|
||||
queryId: string,
|
||||
remoteQuery: RemoteQuery,
|
||||
cancellationToken: CancellationToken
|
||||
): Promise<void> {
|
||||
const credentials = await Credentials.initialize(this.ctx);
|
||||
|
||||
const queryWorkflowResult = await this.remoteQueriesMonitor.monitorQuery(remoteQuery, cancellationToken);
|
||||
|
||||
const executionEndTime = Date.now();
|
||||
|
||||
if (queryWorkflowResult.status === 'CompletedSuccessfully') {
|
||||
await this.downloadAvailableResults(queryId, remoteQuery, credentials, executionEndTime);
|
||||
} else if (queryWorkflowResult.status === 'CompletedUnsuccessfully') {
|
||||
if (queryWorkflowResult.error?.includes('cancelled')) {
|
||||
// Workflow was cancelled on the server
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({ queryId, status: QueryStatus.Failed, failureReason: 'Cancelled' });
|
||||
await this.downloadAvailableResults(queryId, remoteQuery, credentials, executionEndTime);
|
||||
void showAndLogInformationMessage('Variant analysis was cancelled');
|
||||
} else {
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({ queryId, status: QueryStatus.Failed, failureReason: queryWorkflowResult.error });
|
||||
void showAndLogErrorMessage(`Variant analysis execution failed. Error: ${queryWorkflowResult.error}`);
|
||||
}
|
||||
} else if (queryWorkflowResult.status === 'Cancelled') {
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({ queryId, status: QueryStatus.Failed, failureReason: 'Cancelled' });
|
||||
await this.downloadAvailableResults(queryId, remoteQuery, credentials, executionEndTime);
|
||||
void showAndLogInformationMessage('Variant analysis was cancelled');
|
||||
} else if (queryWorkflowResult.status === 'InProgress') {
|
||||
// Should not get here. Only including this to ensure `assertNever` uses proper type checking.
|
||||
void showAndLogErrorMessage(`Unexpected status: ${queryWorkflowResult.status}`);
|
||||
} else {
|
||||
// Ensure all cases are covered
|
||||
assertNever(queryWorkflowResult.status);
|
||||
}
|
||||
}
|
||||
|
||||
public async autoDownloadRemoteQueryResults(
|
||||
queryResult: RemoteQueryResult,
|
||||
token: CancellationToken
|
||||
): Promise<void> {
|
||||
const analysesToDownload = queryResult.analysisSummaries
|
||||
.filter(a => a.fileSizeInBytes < autoDownloadMaxSize)
|
||||
.slice(0, autoDownloadMaxCount)
|
||||
.map(a => ({
|
||||
nwo: a.nwo,
|
||||
databaseSha: a.databaseSha,
|
||||
resultCount: a.resultCount,
|
||||
sourceLocationPrefix: a.sourceLocationPrefix,
|
||||
downloadLink: a.downloadLink,
|
||||
fileSize: String(a.fileSizeInBytes)
|
||||
}));
|
||||
|
||||
await this.analysesResultsManager.loadAnalysesResults(
|
||||
analysesToDownload,
|
||||
token,
|
||||
results => this.interfaceManager.setAnalysisResults(results, queryResult.queryId));
|
||||
}
|
||||
|
||||
public async copyRemoteQueryRepoListToClipboard(queryId: string) {
|
||||
const queryResult = await this.getRemoteQueryResult(queryId);
|
||||
const repos = queryResult.analysisSummaries
|
||||
.filter(a => a.resultCount > 0)
|
||||
.map(a => a.nwo);
|
||||
|
||||
if (repos.length > 0) {
|
||||
const text = [
|
||||
'"new-repo-list": [',
|
||||
...repos.slice(0, -1).map(repo => ` "${repo}",`),
|
||||
` "${repos[repos.length - 1]}"`,
|
||||
']'
|
||||
];
|
||||
|
||||
await env.clipboard.writeText(text.join(os.EOL));
|
||||
}
|
||||
}
|
||||
|
||||
private mapQueryResult(
|
||||
executionEndTime: number,
|
||||
resultIndex: RemoteQueryResultIndex,
|
||||
queryId: string,
|
||||
metadata: RepositoriesMetadata
|
||||
): RemoteQueryResult {
|
||||
const analysisSummaries = resultIndex.successes.map(item => ({
|
||||
nwo: item.nwo,
|
||||
databaseSha: item.sha || 'HEAD',
|
||||
resultCount: item.resultCount,
|
||||
sourceLocationPrefix: item.sourceLocationPrefix,
|
||||
fileSizeInBytes: item.sarifFileSize ? item.sarifFileSize : item.bqrsFileSize,
|
||||
starCount: metadata[item.nwo]?.starCount,
|
||||
lastUpdated: metadata[item.nwo]?.lastUpdated,
|
||||
downloadLink: {
|
||||
id: item.artifactId.toString(),
|
||||
urlPath: `${resultIndex.artifactsUrlPath}/${item.artifactId}`,
|
||||
innerFilePath: item.sarifFileSize ? 'results.sarif' : 'results.bqrs',
|
||||
queryId
|
||||
} as DownloadLink
|
||||
}));
|
||||
const analysisFailures = resultIndex.failures.map(item => ({
|
||||
nwo: item.nwo,
|
||||
error: item.error
|
||||
}));
|
||||
|
||||
return {
|
||||
executionEndTime,
|
||||
analysisSummaries,
|
||||
analysisFailures,
|
||||
queryId
|
||||
};
|
||||
}
|
||||
|
||||
public async openResults(query: RemoteQuery, queryResult: RemoteQueryResult) {
|
||||
await this.interfaceManager.showResults(query, queryResult);
|
||||
}
|
||||
|
||||
private async askToOpenResults(query: RemoteQuery, queryResult: RemoteQueryResult): Promise<void> {
|
||||
const totalResultCount = queryResult.analysisSummaries.reduce((acc, cur) => acc + cur.resultCount, 0);
|
||||
const totalRepoCount = queryResult.analysisSummaries.length;
|
||||
const message = `Query "${query.queryName}" run on ${totalRepoCount} repositories and returned ${totalResultCount} results`;
|
||||
|
||||
const shouldOpenView = await showInformationMessageWithAction(message, 'View');
|
||||
if (shouldOpenView) {
|
||||
await this.openResults(query, queryResult);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a unique id for this query, suitable for determining the storage location for the downloaded query artifacts.
|
||||
* @param queryName
|
||||
* @returns
|
||||
*/
|
||||
private createQueryId(queryName: string): string {
|
||||
return `${queryName}-${nanoid()}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares a directory for storing analysis results for a single query run.
|
||||
* This directory contains a timestamp file, which will be
|
||||
* used by the query history manager to determine when the directory
|
||||
* should be deleted.
|
||||
*
|
||||
*/
|
||||
private async prepareStorageDirectory(queryId: string): Promise<void> {
|
||||
await createTimestampFile(path.join(this.storagePath, queryId));
|
||||
}
|
||||
|
||||
private async getRemoteQueryResult(queryId: string): Promise<RemoteQueryResult> {
|
||||
return await this.retrieveJsonFile<RemoteQueryResult>(queryId, 'query-result.json');
|
||||
}
|
||||
|
||||
private async storeJsonFile<T>(queryId: string, fileName: string, obj: T): Promise<void> {
|
||||
const filePath = path.join(this.storagePath, queryId, fileName);
|
||||
await fs.writeFile(filePath, JSON.stringify(obj, null, 2), 'utf8');
|
||||
}
|
||||
|
||||
private async retrieveJsonFile<T>(queryId: string, fileName: string): Promise<T> {
|
||||
const filePath = path.join(this.storagePath, queryId, fileName);
|
||||
return JSON.parse(await fs.readFile(filePath, 'utf8'));
|
||||
}
|
||||
|
||||
private async removeStorageDirectory(queryId: string): Promise<void> {
|
||||
const filePath = path.join(this.storagePath, queryId);
|
||||
await fs.remove(filePath);
|
||||
}
|
||||
|
||||
private async queryRecordExists(queryId: string): Promise<boolean> {
|
||||
const filePath = path.join(this.storagePath, queryId);
|
||||
return await fs.pathExists(filePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether there's a result index artifact available for the given query.
|
||||
* If so, set the query status to `Completed` and auto-download the results.
|
||||
*/
|
||||
private async downloadAvailableResults(
|
||||
queryId: string,
|
||||
remoteQuery: RemoteQuery,
|
||||
credentials: Credentials,
|
||||
executionEndTime: number
|
||||
): Promise<void> {
|
||||
const resultIndex = await getRemoteQueryIndex(credentials, remoteQuery);
|
||||
if (resultIndex) {
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({ queryId, status: QueryStatus.Completed });
|
||||
const metadata = await this.getRepositoriesMetadata(resultIndex, credentials);
|
||||
const queryResult = this.mapQueryResult(executionEndTime, resultIndex, queryId, metadata);
|
||||
|
||||
await this.storeJsonFile(queryId, 'query-result.json', queryResult);
|
||||
|
||||
// Kick off auto-download of results in the background.
|
||||
void commands.executeCommand('codeQL.autoDownloadRemoteQueryResults', queryResult);
|
||||
|
||||
// Ask if the user wants to open the results in the background.
|
||||
void this.askToOpenResults(remoteQuery, queryResult).then(
|
||||
noop,
|
||||
err => {
|
||||
void showAndLogErrorMessage(err);
|
||||
}
|
||||
);
|
||||
} else {
|
||||
const controllerRepo = `${remoteQuery.controllerRepository.owner}/${remoteQuery.controllerRepository.name}`;
|
||||
const workflowRunUrl = `https://github.com/${controllerRepo}/actions/runs/${remoteQuery.actionsWorkflowRunId}`;
|
||||
void showAndLogErrorMessage(
|
||||
`There was an issue retrieving the result for the query [${remoteQuery.queryName}](${workflowRunUrl}).`
|
||||
);
|
||||
this.remoteQueryStatusUpdateEventEmitter.fire({ queryId, status: QueryStatus.Failed });
|
||||
}
|
||||
}
|
||||
|
||||
private async getRepositoriesMetadata(resultIndex: RemoteQueryResultIndex, credentials: Credentials) {
|
||||
const nwos = resultIndex.successes.map(s => s.nwo);
|
||||
return await getRepositoriesMetadata(credentials, nwos);
|
||||
}
|
||||
|
||||
// Pulled from the analysis results manager, so that we can get access to
|
||||
// analyses results from the "export results" command.
|
||||
public getAnalysesResults(queryId: string): AnalysisResults[] {
|
||||
return [...this.analysesResultsManager.getAnalysesResults(queryId)];
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,332 @@
|
||||
import { CellValue } from '../pure/bqrs-cli-types';
|
||||
import { tryGetRemoteLocation } from '../pure/bqrs-utils';
|
||||
import { createRemoteFileRef } from '../pure/location-link-utils';
|
||||
import { parseHighlightedLine, shouldHighlightLine } from '../pure/sarif-utils';
|
||||
import { convertNonPrintableChars } from '../text-utils';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { AnalysisAlert, AnalysisRawResults, AnalysisResults, CodeSnippet, FileLink, getAnalysisResultCount, HighlightedRegion } from './shared/analysis-result';
|
||||
|
||||
export type MarkdownLinkType = 'local' | 'gist';
|
||||
|
||||
export interface MarkdownFile {
|
||||
fileName: string;
|
||||
content: string[]; // Each array item is a line of the markdown file.
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates markdown files with variant analysis results.
|
||||
*/
|
||||
export function generateMarkdown(
|
||||
query: RemoteQuery,
|
||||
analysesResults: AnalysisResults[],
|
||||
linkType: MarkdownLinkType
|
||||
): MarkdownFile[] {
|
||||
const resultsFiles: MarkdownFile[] = [];
|
||||
// Generate summary file with links to individual files
|
||||
const summaryFile: MarkdownFile = generateMarkdownSummary(query);
|
||||
for (const analysisResult of analysesResults) {
|
||||
const resultsCount = getAnalysisResultCount(analysisResult);
|
||||
if (resultsCount === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Append nwo and results count to the summary table
|
||||
const nwo = analysisResult.nwo;
|
||||
const fileName = createFileName(nwo);
|
||||
const link = createRelativeLink(fileName, linkType);
|
||||
summaryFile.content.push(`| ${nwo} | [${resultsCount} result(s)](${link}) |`);
|
||||
|
||||
// Generate individual markdown file for each repository
|
||||
const resultsFileContent = [
|
||||
`### ${analysisResult.nwo}`,
|
||||
''
|
||||
];
|
||||
for (const interpretedResult of analysisResult.interpretedResults) {
|
||||
const individualResult = generateMarkdownForInterpretedResult(interpretedResult, query.language);
|
||||
resultsFileContent.push(...individualResult);
|
||||
}
|
||||
if (analysisResult.rawResults) {
|
||||
const rawResultTable = generateMarkdownForRawResults(analysisResult.rawResults);
|
||||
resultsFileContent.push(...rawResultTable);
|
||||
}
|
||||
resultsFiles.push({
|
||||
fileName: fileName,
|
||||
content: resultsFileContent,
|
||||
});
|
||||
}
|
||||
return [summaryFile, ...resultsFiles];
|
||||
}
|
||||
|
||||
export function generateMarkdownSummary(query: RemoteQuery): MarkdownFile {
|
||||
const lines: string[] = [];
|
||||
// Title
|
||||
lines.push(
|
||||
`### Results for "${query.queryName}"`,
|
||||
''
|
||||
);
|
||||
|
||||
// Expandable section containing query text
|
||||
const queryCodeBlock = [
|
||||
'```ql',
|
||||
...query.queryText.split('\n'),
|
||||
'```',
|
||||
];
|
||||
lines.push(
|
||||
...buildExpandableMarkdownSection('Query', queryCodeBlock)
|
||||
);
|
||||
|
||||
// Padding between sections
|
||||
lines.push(
|
||||
'<br />',
|
||||
'',
|
||||
);
|
||||
|
||||
// Summary table
|
||||
lines.push(
|
||||
'### Summary',
|
||||
'',
|
||||
'| Repository | Results |',
|
||||
'| --- | --- |',
|
||||
);
|
||||
// nwo and result count will be appended to this table
|
||||
return {
|
||||
fileName: '_summary',
|
||||
content: lines
|
||||
};
|
||||
}
|
||||
|
||||
function generateMarkdownForInterpretedResult(interpretedResult: AnalysisAlert, language: string): string[] {
|
||||
const lines: string[] = [];
|
||||
lines.push(createMarkdownRemoteFileRef(
|
||||
interpretedResult.fileLink,
|
||||
interpretedResult.highlightedRegion?.startLine,
|
||||
interpretedResult.highlightedRegion?.endLine
|
||||
));
|
||||
lines.push('');
|
||||
const codeSnippet = interpretedResult.codeSnippet;
|
||||
const highlightedRegion = interpretedResult.highlightedRegion;
|
||||
if (codeSnippet) {
|
||||
lines.push(
|
||||
...generateMarkdownForCodeSnippet(codeSnippet, language, highlightedRegion),
|
||||
);
|
||||
}
|
||||
const alertMessage = generateMarkdownForAlertMessage(interpretedResult);
|
||||
lines.push(alertMessage, '');
|
||||
|
||||
// If available, show paths
|
||||
const hasPathResults = interpretedResult.codeFlows.length > 0;
|
||||
if (hasPathResults) {
|
||||
const pathLines = generateMarkdownForPathResults(interpretedResult, language);
|
||||
lines.push(...pathLines);
|
||||
}
|
||||
|
||||
// Padding between results
|
||||
lines.push(
|
||||
'----------------------------------------',
|
||||
'',
|
||||
);
|
||||
return lines;
|
||||
}
|
||||
|
||||
function generateMarkdownForCodeSnippet(
|
||||
codeSnippet: CodeSnippet,
|
||||
language: string,
|
||||
highlightedRegion?: HighlightedRegion
|
||||
): string[] {
|
||||
const lines: string[] = [];
|
||||
const snippetStartLine = codeSnippet.startLine || 0;
|
||||
const codeLines = codeSnippet.text
|
||||
.split('\n')
|
||||
.map((line, index) =>
|
||||
highlightCodeLines(line, index + snippetStartLine, highlightedRegion)
|
||||
);
|
||||
|
||||
// Make sure there are no extra newlines before or after the <code> block:
|
||||
const codeLinesWrapped = [...codeLines];
|
||||
codeLinesWrapped[0] = `<pre><code class="${language}">${codeLinesWrapped[0]}`;
|
||||
codeLinesWrapped[codeLinesWrapped.length - 1] = `${codeLinesWrapped[codeLinesWrapped.length - 1]}</code></pre>`;
|
||||
|
||||
lines.push(
|
||||
...codeLinesWrapped,
|
||||
'',
|
||||
);
|
||||
return lines;
|
||||
}
|
||||
|
||||
function highlightCodeLines(
|
||||
line: string,
|
||||
lineNumber: number,
|
||||
highlightedRegion?: HighlightedRegion
|
||||
): string {
|
||||
if (!highlightedRegion || !shouldHighlightLine(lineNumber, highlightedRegion)) {
|
||||
return line;
|
||||
}
|
||||
const partiallyHighlightedLine = parseHighlightedLine(
|
||||
line,
|
||||
lineNumber,
|
||||
highlightedRegion
|
||||
);
|
||||
return `${partiallyHighlightedLine.plainSection1}<strong>${partiallyHighlightedLine.highlightedSection}</strong>${partiallyHighlightedLine.plainSection2}`;
|
||||
}
|
||||
|
||||
function generateMarkdownForAlertMessage(
|
||||
interpretedResult: AnalysisAlert
|
||||
): string {
|
||||
let alertMessage = '';
|
||||
for (const token of interpretedResult.message.tokens) {
|
||||
if (token.t === 'text') {
|
||||
alertMessage += token.text;
|
||||
} else if (token.t === 'location') {
|
||||
alertMessage += createMarkdownRemoteFileRef(
|
||||
token.location.fileLink,
|
||||
token.location.highlightedRegion?.startLine,
|
||||
token.location.highlightedRegion?.endLine,
|
||||
token.text
|
||||
);
|
||||
}
|
||||
}
|
||||
// Italicize the alert message
|
||||
return `*${alertMessage}*`;
|
||||
}
|
||||
|
||||
function generateMarkdownForPathResults(
|
||||
interpretedResult: AnalysisAlert,
|
||||
language: string
|
||||
): string[] {
|
||||
const lines: string[] = [];
|
||||
lines.push('#### Paths', '');
|
||||
for (const codeFlow of interpretedResult.codeFlows) {
|
||||
const pathLines: string[] = [];
|
||||
const stepCount = codeFlow.threadFlows.length;
|
||||
const title = `Path with ${stepCount} steps`;
|
||||
for (let i = 0; i < stepCount; i++) {
|
||||
const threadFlow = codeFlow.threadFlows[i];
|
||||
const link = createMarkdownRemoteFileRef(
|
||||
threadFlow.fileLink,
|
||||
threadFlow.highlightedRegion?.startLine,
|
||||
threadFlow.highlightedRegion?.endLine
|
||||
);
|
||||
const codeSnippet = generateMarkdownForCodeSnippet(
|
||||
threadFlow.codeSnippet,
|
||||
language,
|
||||
threadFlow.highlightedRegion
|
||||
);
|
||||
// Indent the snippet to fit with the numbered list.
|
||||
const codeSnippetIndented = codeSnippet.map((line) => ` ${line}`);
|
||||
pathLines.push(`${i + 1}. ${link}`, ...codeSnippetIndented);
|
||||
}
|
||||
lines.push(
|
||||
...buildExpandableMarkdownSection(title, pathLines)
|
||||
);
|
||||
}
|
||||
return lines;
|
||||
}
|
||||
|
||||
function generateMarkdownForRawResults(
|
||||
analysisRawResults: AnalysisRawResults
|
||||
): string[] {
|
||||
const tableRows: string[] = [];
|
||||
const columnCount = analysisRawResults.schema.columns.length;
|
||||
// Table headers are the column names if they exist, and empty otherwise
|
||||
const headers = analysisRawResults.schema.columns.map(
|
||||
(column) => column.name || ''
|
||||
);
|
||||
const tableHeader = `| ${headers.join(' | ')} |`;
|
||||
|
||||
tableRows.push(tableHeader);
|
||||
tableRows.push('|' + ' --- |'.repeat(columnCount));
|
||||
|
||||
for (const row of analysisRawResults.resultSet.rows) {
|
||||
const cells = row.map((cell) =>
|
||||
generateMarkdownForRawTableCell(cell, analysisRawResults.fileLinkPrefix, analysisRawResults.sourceLocationPrefix)
|
||||
);
|
||||
tableRows.push(`| ${cells.join(' | ')} |`);
|
||||
}
|
||||
return tableRows;
|
||||
}
|
||||
|
||||
function generateMarkdownForRawTableCell(
|
||||
value: CellValue,
|
||||
fileLinkPrefix: string,
|
||||
sourceLocationPrefix: string
|
||||
) {
|
||||
let cellValue: string;
|
||||
switch (typeof value) {
|
||||
case 'string':
|
||||
case 'number':
|
||||
case 'boolean':
|
||||
cellValue = `\`${convertNonPrintableChars(value.toString())}\``;
|
||||
break;
|
||||
case 'object':
|
||||
{
|
||||
const url = tryGetRemoteLocation(value.url, fileLinkPrefix, sourceLocationPrefix);
|
||||
if (url) {
|
||||
cellValue = `[\`${convertNonPrintableChars(value.label)}\`](${url})`;
|
||||
} else {
|
||||
cellValue = `\`${convertNonPrintableChars(value.label)}\``;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
// `|` characters break the table, so we need to escape them
|
||||
return cellValue.replaceAll('|', '\\|');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a markdown link to a remote file.
|
||||
* If the "link text" is not provided, we use the file path.
|
||||
*/
|
||||
export function createMarkdownRemoteFileRef(
|
||||
fileLink: FileLink,
|
||||
startLine?: number,
|
||||
endLine?: number,
|
||||
linkText?: string,
|
||||
): string {
|
||||
const markdownLink = `[${linkText || fileLink.filePath}](${createRemoteFileRef(fileLink, startLine, endLine)})`;
|
||||
return markdownLink;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds an expandable markdown section of the form:
|
||||
* <details>
|
||||
* <summary>title</summary>
|
||||
*
|
||||
* contents
|
||||
*
|
||||
* </details>
|
||||
*/
|
||||
function buildExpandableMarkdownSection(title: string, contents: string[]): string[] {
|
||||
const expandableLines: string[] = [];
|
||||
expandableLines.push(
|
||||
'<details>',
|
||||
`<summary>${title}</summary>`,
|
||||
'',
|
||||
...contents,
|
||||
'',
|
||||
'</details>',
|
||||
''
|
||||
);
|
||||
return expandableLines;
|
||||
}
|
||||
|
||||
function createRelativeLink(fileName: string, linkType: MarkdownLinkType): string {
|
||||
switch (linkType) {
|
||||
case 'local':
|
||||
return `./${fileName}.md`;
|
||||
|
||||
case 'gist':
|
||||
// Creates an anchor link to a file in the gist. This is of the form:
|
||||
// '#file-<name>-<file-extension>'
|
||||
return `#file-${fileName}-md`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the name of the markdown file for a given repository nwo.
|
||||
* This name doesn't include the file extension.
|
||||
*/
|
||||
function createFileName(nwo: string) {
|
||||
const [owner, repo] = nwo.split('/');
|
||||
return `${owner}-${repo}`;
|
||||
}
|
||||
@@ -0,0 +1,61 @@
|
||||
import * as vscode from 'vscode';
|
||||
import { Credentials } from '../authentication';
|
||||
import { Logger } from '../logging';
|
||||
import { getWorkflowStatus } from './gh-actions-api-client';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueryWorkflowResult } from './remote-query-workflow-result';
|
||||
|
||||
export class RemoteQueriesMonitor {
|
||||
// With a sleep of 5 seconds, the maximum number of attempts takes
|
||||
// us to just over 2 days worth of monitoring.
|
||||
private static readonly maxAttemptCount = 17280;
|
||||
private static readonly sleepTime = 5000;
|
||||
|
||||
constructor(
|
||||
private readonly extensionContext: vscode.ExtensionContext,
|
||||
private readonly logger: Logger
|
||||
) {
|
||||
}
|
||||
|
||||
public async monitorQuery(
|
||||
remoteQuery: RemoteQuery,
|
||||
cancellationToken: vscode.CancellationToken
|
||||
): Promise<RemoteQueryWorkflowResult> {
|
||||
const credentials = await Credentials.initialize(this.extensionContext);
|
||||
|
||||
if (!credentials) {
|
||||
throw Error('Error authenticating with GitHub');
|
||||
}
|
||||
|
||||
let attemptCount = 0;
|
||||
|
||||
while (attemptCount <= RemoteQueriesMonitor.maxAttemptCount) {
|
||||
await this.sleep(RemoteQueriesMonitor.sleepTime);
|
||||
|
||||
if (cancellationToken && cancellationToken.isCancellationRequested) {
|
||||
return { status: 'Cancelled' };
|
||||
}
|
||||
|
||||
const workflowStatus = await getWorkflowStatus(
|
||||
credentials,
|
||||
remoteQuery.controllerRepository.owner,
|
||||
remoteQuery.controllerRepository.name,
|
||||
remoteQuery.actionsWorkflowRunId);
|
||||
|
||||
if (workflowStatus.status !== 'InProgress') {
|
||||
return workflowStatus;
|
||||
}
|
||||
|
||||
attemptCount++;
|
||||
}
|
||||
|
||||
void this.logger.log('Variant analysis monitoring timed out after 2 days');
|
||||
return { status: 'Cancelled' };
|
||||
}
|
||||
|
||||
private async sleep(ms: number) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
import { QueryStatus } from '../query-status';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
|
||||
/**
|
||||
* Information about a remote query.
|
||||
*/
|
||||
export interface RemoteQueryHistoryItem {
|
||||
readonly t: 'remote';
|
||||
failureReason?: string;
|
||||
status: QueryStatus;
|
||||
completed: boolean;
|
||||
readonly queryId: string,
|
||||
remoteQuery: RemoteQuery;
|
||||
userSpecifiedLabel?: string;
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
export interface RemoteQueryResultIndex {
|
||||
artifactsUrlPath: string;
|
||||
successes: RemoteQuerySuccessIndexItem[];
|
||||
failures: RemoteQueryFailureIndexItem[];
|
||||
}
|
||||
|
||||
export interface RemoteQuerySuccessIndexItem {
|
||||
id: string;
|
||||
artifactId: number;
|
||||
nwo: string;
|
||||
sha?: string;
|
||||
resultCount: number;
|
||||
bqrsFileSize: number;
|
||||
sarifFileSize?: number;
|
||||
sourceLocationPrefix: string;
|
||||
}
|
||||
|
||||
export interface RemoteQueryFailureIndexItem {
|
||||
id: string;
|
||||
artifactId: number;
|
||||
nwo: string;
|
||||
error: string;
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
import { DownloadLink } from './download-link';
|
||||
import { AnalysisFailure } from './shared/analysis-failure';
|
||||
|
||||
export interface RemoteQueryResult {
|
||||
executionEndTime: number, // Can't use a Date here since it needs to be serialized and desserialized.
|
||||
analysisSummaries: AnalysisSummary[],
|
||||
analysisFailures: AnalysisFailure[],
|
||||
queryId: string,
|
||||
}
|
||||
|
||||
export interface AnalysisSummary {
|
||||
nwo: string,
|
||||
databaseSha: string,
|
||||
resultCount: number,
|
||||
sourceLocationPrefix: string,
|
||||
downloadLink: DownloadLink,
|
||||
fileSizeInBytes: number,
|
||||
starCount?: number,
|
||||
lastUpdated?: number,
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
import { RemoteQuery } from './remote-query';
|
||||
|
||||
export interface RemoteQuerySubmissionResult {
|
||||
queryDirPath?: string;
|
||||
query?: RemoteQuery;
|
||||
}
|
||||