Compare commits
950 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bf662354fe | ||
|
|
6ea36867a2 | ||
|
|
8df061f443 | ||
|
|
0885a22984 | ||
|
|
0354b1caac | ||
|
|
d32a3a0deb | ||
|
|
e41dba7627 | ||
|
|
b2f4fecfb4 | ||
|
|
21b6adb92c | ||
|
|
90577f516f | ||
|
|
a2825162ac | ||
|
|
2e26b857f4 | ||
|
|
7bdd452d63 | ||
|
|
be9a7a35bc | ||
|
|
dd8600fcc6 | ||
|
|
cc8f304f96 | ||
|
|
1ca623f68a | ||
|
|
53c404b131 | ||
|
|
f8d5fd8f6e | ||
|
|
d49c2d7958 | ||
|
|
01d7329bc3 | ||
|
|
e8d230c8f5 | ||
|
|
44a3e6b557 | ||
|
|
8b2a3b18ce | ||
|
|
fad5bb31a0 | ||
|
|
484b7668cb | ||
|
|
33dd50ca4d | ||
|
|
195cd69567 | ||
|
|
268199e9e2 | ||
|
|
6cef629507 | ||
|
|
563489d1e0 | ||
|
|
ebfcce30ba | ||
|
|
159d900edb | ||
|
|
46233b9355 | ||
|
|
1d6a7f8df1 | ||
|
|
e380c78876 | ||
|
|
cd67ce9242 | ||
|
|
cd8d82daac | ||
|
|
726feb19e1 | ||
|
|
8c324a3263 | ||
|
|
119649144e | ||
|
|
4c527a3573 | ||
|
|
525f5234b6 | ||
|
|
908abb4413 | ||
|
|
a69ec03c6e | ||
|
|
a071470c5a | ||
|
|
2ae95144a5 | ||
|
|
60faed1ccc | ||
|
|
6e6ea76c97 | ||
|
|
d30eb27320 | ||
|
|
2104cb3d09 | ||
|
|
5644206777 | ||
|
|
a6a0ee5f50 | ||
|
|
74c1e583b4 | ||
|
|
326653e25a | ||
|
|
0d057aed3f | ||
|
|
c90eede573 | ||
|
|
ebba9949a8 | ||
|
|
d18e3dd40e | ||
|
|
9355f0633a | ||
|
|
f553523f73 | ||
|
|
627bb59bd5 | ||
|
|
95cbe02768 | ||
|
|
e73a6874b2 | ||
|
|
dc6ae6cc39 | ||
|
|
3902596823 | ||
|
|
c400485a4e | ||
|
|
1a7ddcf843 | ||
|
|
7cef45c434 | ||
|
|
69b06ae95c | ||
|
|
ae2bd81215 | ||
|
|
b9be9cff9f | ||
|
|
8c5d73bd76 | ||
|
|
81b53c9c19 | ||
|
|
a232b56bcd | ||
|
|
c26d786a1c | ||
|
|
99d2df2067 | ||
|
|
866b137fd4 | ||
|
|
76a00e5fa5 | ||
|
|
f0d71ba356 | ||
|
|
b4fbfb6d2b | ||
|
|
1d02c19854 | ||
|
|
3167ceec91 | ||
|
|
fba49020e3 | ||
|
|
dea36820e4 | ||
|
|
60df319754 | ||
|
|
0bdee6e77e | ||
|
|
88440ba148 | ||
|
|
a0fb3b47c8 | ||
|
|
86d10b439b | ||
|
|
902c489979 | ||
|
|
7fed5baebc | ||
|
|
d3e961ffb3 | ||
|
|
a20d9102e6 | ||
|
|
131d252a8b | ||
|
|
90023137ca | ||
|
|
fcecfa112e | ||
|
|
303a7d1662 | ||
|
|
7c935b37b0 | ||
|
|
339fc9a755 | ||
|
|
4138ca1085 | ||
|
|
6941584214 | ||
|
|
525d7f5f3d | ||
|
|
84621b7ecc | ||
|
|
2baa53a149 | ||
|
|
15579012f1 | ||
|
|
28b00b249b | ||
|
|
401da636a0 | ||
|
|
ab9cf465cc | ||
|
|
bb7246b612 | ||
|
|
b82cd8b6f4 | ||
|
|
f56f017a84 | ||
|
|
7dc5eebcc1 | ||
|
|
644a83d6d8 | ||
|
|
4f84376faa | ||
|
|
5e76c08f84 | ||
|
|
765c956481 | ||
|
|
deac8c8c02 | ||
|
|
a47031b0d5 | ||
|
|
3bf27b3472 | ||
|
|
9422c6d65c | ||
|
|
b81e3c7b94 | ||
|
|
011eee1d16 | ||
|
|
924d24b106 | ||
|
|
54ba5ced09 | ||
|
|
78a90ffa92 | ||
|
|
b95ee896df | ||
|
|
d33b07b2d1 | ||
|
|
3d7f303c65 | ||
|
|
540d6758d1 | ||
|
|
b5b34743f1 | ||
|
|
0a6db47b5f | ||
|
|
f679a2efec | ||
|
|
72253a1bb8 | ||
|
|
2065c7d75c | ||
|
|
ff4ea3e4c8 | ||
|
|
9bd932294a | ||
|
|
afdc8164c8 | ||
|
|
ea022f4cde | ||
|
|
48ced51035 | ||
|
|
177688dc56 | ||
|
|
c5cbf92b3a | ||
|
|
529ceb133e | ||
|
|
baaa3d31c0 | ||
|
|
9629c99ccb | ||
|
|
7ade7be0c4 | ||
|
|
4272cee01b | ||
|
|
d8fbc56ec2 | ||
|
|
e41b0ff779 | ||
|
|
cf3ba32906 | ||
|
|
741d364a52 | ||
|
|
49a2555dab | ||
|
|
f4e6a0db9b | ||
|
|
4e7b89864c | ||
|
|
02443b5ddd | ||
|
|
50b507dba5 | ||
|
|
aea5d33c42 | ||
|
|
b2427a6534 | ||
|
|
b95f6a5afb | ||
|
|
7b7413ba26 | ||
|
|
d33fa5df8a | ||
|
|
2efff809eb | ||
|
|
c442ff5599 | ||
|
|
e4de8c6b9b | ||
|
|
c032e4f9a7 | ||
|
|
487cc7b088 | ||
|
|
d9e9c1b885 | ||
|
|
e19637b59c | ||
|
|
066bf3fd26 | ||
|
|
7ab1f3a83d | ||
|
|
e3e2fcc349 | ||
|
|
17ed18a29d | ||
|
|
110d930b68 | ||
|
|
f8cc3aec32 | ||
|
|
f408418f23 | ||
|
|
0b638b6ae1 | ||
|
|
ce7c7119c7 | ||
|
|
5dce5e83b0 | ||
|
|
ac3b94dac8 | ||
|
|
519c3039b8 | ||
|
|
0a5c272b17 | ||
|
|
32ec043cbe | ||
|
|
454a1eab39 | ||
|
|
d3701944bf | ||
|
|
43bcd69e39 | ||
|
|
53a17d5728 | ||
|
|
b0dab966f3 | ||
|
|
e4a3161283 | ||
|
|
47e53da89c | ||
|
|
f8f81cfb40 | ||
|
|
fd43bed99d | ||
|
|
ffc3d406c2 | ||
|
|
11bf3c9462 | ||
|
|
9b2c40b298 | ||
|
|
abf6c6f108 | ||
|
|
910c1b7352 | ||
|
|
f47d6ec21c | ||
|
|
0e23dd59db | ||
|
|
160a0aebfe | ||
|
|
4d3385825b | ||
|
|
80862944d8 | ||
|
|
91344a74f6 | ||
|
|
7538ad1ba4 | ||
|
|
24c2663fe7 | ||
|
|
50aaf3b537 | ||
|
|
847082cd30 | ||
|
|
8c7c197b22 | ||
|
|
1f95eb2f49 | ||
|
|
7874a34947 | ||
|
|
a74c8a7cee | ||
|
|
3aced3c4d3 | ||
|
|
bec23f36d2 | ||
|
|
92bbf3a2e8 | ||
|
|
5c478e98d9 | ||
|
|
f26988731e | ||
|
|
e6f9ce050b | ||
|
|
52f993f748 | ||
|
|
99fe65f6f7 | ||
|
|
7d721d9544 | ||
|
|
1005ecdc6a | ||
|
|
c9f65be721 | ||
|
|
9ad28f36b4 | ||
|
|
9c076152cb | ||
|
|
bbb6f10f17 | ||
|
|
8a671be85c | ||
|
|
0476815f8a | ||
|
|
53dfd1243f | ||
|
|
d69772d1f8 | ||
|
|
2fd5f38574 | ||
|
|
06d22841cf | ||
|
|
0133cd7734 | ||
|
|
a53c04e2c1 | ||
|
|
eba6c190e8 | ||
|
|
d0e6e3ca89 | ||
|
|
cc00456cbc | ||
|
|
434567aa34 | ||
|
|
7b1a93d7c6 | ||
|
|
d3ea84e863 | ||
|
|
1b6685ef6f | ||
|
|
f26795ca17 | ||
|
|
617f7bab0a | ||
|
|
8da1a28478 | ||
|
|
4518d9a81d | ||
|
|
3817133b5b | ||
|
|
c9b68caee4 | ||
|
|
60c4d8d40a | ||
|
|
1a9d63315f | ||
|
|
5c8098f28d | ||
|
|
bcf70c6962 | ||
|
|
64f33a5f44 | ||
|
|
48a527ad52 | ||
|
|
faabe6d887 | ||
|
|
4b8d611d86 | ||
|
|
bfc9a17ffb | ||
|
|
a4a3f70984 | ||
|
|
98bae3253d | ||
|
|
70098aa19c | ||
|
|
1261fdd41e | ||
|
|
c914312e85 | ||
|
|
cd2b5a8c59 | ||
|
|
29a43c7dc1 | ||
|
|
8ef3c3713b | ||
|
|
54f83d11d6 | ||
|
|
22cfad6711 | ||
|
|
cbc2650f30 | ||
|
|
55b060af97 | ||
|
|
9f347d136b | ||
|
|
0d0367c39d | ||
|
|
ba0a30dcfe | ||
|
|
3079d7f285 | ||
|
|
10eb355900 | ||
|
|
0daea7399a | ||
|
|
1b0077a115 | ||
|
|
db5e743055 | ||
|
|
a6d63222f5 | ||
|
|
58e80ecce3 | ||
|
|
0ad44a3fe2 | ||
|
|
09dccc13a2 | ||
|
|
2cdded9cca | ||
|
|
e8a0b24f57 | ||
|
|
182c2f3b8e | ||
|
|
e5376b3469 | ||
|
|
ef22cf174e | ||
|
|
d158487081 | ||
|
|
2e9c0c301c | ||
|
|
f256e18041 | ||
|
|
aa23680603 | ||
|
|
e5fe2148ab | ||
|
|
c44b7b1d78 | ||
|
|
24ede1b66f | ||
|
|
6335b9881b | ||
|
|
8c0fee5a2e | ||
|
|
e95f8e85a8 | ||
|
|
c6531a293e | ||
|
|
e648d9c67c | ||
|
|
45efca9425 | ||
|
|
9071f54863 | ||
|
|
0aa34a51ff | ||
|
|
181b5d6f7b | ||
|
|
7502fdee67 | ||
|
|
24652a84e4 | ||
|
|
2ee46cfd81 | ||
|
|
7c4eac8520 | ||
|
|
6fdc632743 | ||
|
|
a38a0356a0 | ||
|
|
9383b03971 | ||
|
|
baf130d60e | ||
|
|
d15e3885d7 | ||
|
|
2211e2317d | ||
|
|
6018ebaca9 | ||
|
|
da9065101f | ||
|
|
80867e6f58 | ||
|
|
5067fbc452 | ||
|
|
d88b5170ac | ||
|
|
d4673d9ca0 | ||
|
|
87f45a7739 | ||
|
|
0c89df9a80 | ||
|
|
57666bbbe3 | ||
|
|
ba8b32078d | ||
|
|
fa4dd087e5 | ||
|
|
ac74b967b3 | ||
|
|
c349c6a048 | ||
|
|
234b05994c | ||
|
|
af8f0231c0 | ||
|
|
84bd029749 | ||
|
|
7d2e4b6de4 | ||
|
|
23a0e03cef | ||
|
|
21c5ed01ad | ||
|
|
d2af550bcc | ||
|
|
cf36a52762 | ||
|
|
ac1a97efa0 | ||
|
|
8d5067f622 | ||
|
|
fe5f1c417d | ||
|
|
95438bb7e3 | ||
|
|
6d7d0ca41a | ||
|
|
3749e17769 | ||
|
|
ee49fb5070 | ||
|
|
de6c523bad | ||
|
|
6612c279ae | ||
|
|
2dfa0e8b52 | ||
|
|
0197306713 | ||
|
|
269165eaa3 | ||
|
|
14c736d72e | ||
|
|
b8898b939c | ||
|
|
45da1e0f1f | ||
|
|
88c990c6ae | ||
|
|
ac7211c117 | ||
|
|
d1d13fbd2e | ||
|
|
f99166d26c | ||
|
|
9cd6f9a768 | ||
|
|
4dd16f4611 | ||
|
|
2113d08545 | ||
|
|
5b5ef26864 | ||
|
|
c5a6e64df8 | ||
|
|
178d626062 | ||
|
|
d1d48b3506 | ||
|
|
9180d1d9fc | ||
|
|
674c5ecbff | ||
|
|
951d0b1004 | ||
|
|
edcac6925c | ||
|
|
2989e4cfb9 | ||
|
|
8f869813a9 | ||
|
|
c10500c5ea | ||
|
|
0832850009 | ||
|
|
b352830674 | ||
|
|
e913165249 | ||
|
|
ef94bb3d38 | ||
|
|
4d6076c4ea | ||
|
|
43650fde00 | ||
|
|
f2c72a67f6 | ||
|
|
2b1f3227ce | ||
|
|
841f1d3310 | ||
|
|
99756ae63b | ||
|
|
9a2bea39e6 | ||
|
|
1aab49c719 | ||
|
|
cf925c256f | ||
|
|
8383a76e43 | ||
|
|
c6d792f41e | ||
|
|
277192e7d3 | ||
|
|
85988ecf34 | ||
|
|
49d12674b7 | ||
|
|
beeb19dc05 | ||
|
|
de88d27057 | ||
|
|
eb2d00e999 | ||
|
|
d58fb54928 | ||
|
|
fdc209ca08 | ||
|
|
28092f2b86 | ||
|
|
8970ad78ae | ||
|
|
e7a0c58940 | ||
|
|
02270aaeee | ||
|
|
51fb03b4b1 | ||
|
|
838a2b71ac | ||
|
|
f01c421d42 | ||
|
|
561bc6f53c | ||
|
|
24b421e82d | ||
|
|
3c57597a19 | ||
|
|
e8d5029912 | ||
|
|
cb514f5c78 | ||
|
|
57bb8cee41 | ||
|
|
1219ef4a8c | ||
|
|
677a0f7940 | ||
|
|
b8cca29eb3 | ||
|
|
4cbf104bdf | ||
|
|
26ccde9e7d | ||
|
|
beb5b78b89 | ||
|
|
c3a21b93c0 | ||
|
|
6b9f73e156 | ||
|
|
6409e09063 | ||
|
|
8f5611b074 | ||
|
|
7f3fcce1ac | ||
|
|
4bc1d1ed8a | ||
|
|
02e5b4e830 | ||
|
|
538792e8bb | ||
|
|
56ec970121 | ||
|
|
57a04297bd | ||
|
|
59f1e4e90a | ||
|
|
7c1fce3319 | ||
|
|
476ea7aef0 | ||
|
|
0c654c4320 | ||
|
|
895ac6ae26 | ||
|
|
52484f1211 | ||
|
|
cba188b4db | ||
|
|
123b1fc085 | ||
|
|
833f8e06ca | ||
|
|
747049ed1b | ||
|
|
d62e9181f2 | ||
|
|
e4d1f4e73e | ||
|
|
c1922126d3 | ||
|
|
d2ebb3d20a | ||
|
|
72858e341a | ||
|
|
4499773f6f | ||
|
|
1d3b0e0ca9 | ||
|
|
98e503c768 | ||
|
|
62c3974d35 | ||
|
|
40e0027074 | ||
|
|
ab1c2e0a0d | ||
|
|
d918c41197 | ||
|
|
84048ccac1 | ||
|
|
cbb09da0d0 | ||
|
|
c8d3428f21 | ||
|
|
2cf5b39cfe | ||
|
|
13921bf8a2 | ||
|
|
12a97ecba2 | ||
|
|
26529232f4 | ||
|
|
1b425fc261 | ||
|
|
9c598c2f06 | ||
|
|
99a784f072 | ||
|
|
030488a459 | ||
|
|
377f7965b1 | ||
|
|
651a6fbda8 | ||
|
|
55ffdf7963 | ||
|
|
cc907d2f31 | ||
|
|
49a1576d14 | ||
|
|
0cc4561ee9 | ||
|
|
c4df9dbec8 | ||
|
|
c384a631dc | ||
|
|
b079690f0e | ||
|
|
4e863e995b | ||
|
|
576737cac8 | ||
|
|
742aa4ca19 | ||
|
|
f992679e94 | ||
|
|
ffe1704ac0 | ||
|
|
b5e6700cba | ||
|
|
7f5302dc37 | ||
|
|
3ea5524048 | ||
|
|
1823ae8397 | ||
|
|
6dca9ccbeb | ||
|
|
f3c2862937 | ||
|
|
855cb485d5 | ||
|
|
bd2dd04ac6 | ||
|
|
bbf4a03b03 | ||
|
|
f38eb4895d | ||
|
|
f559b59ee5 | ||
|
|
c9d895ea42 | ||
|
|
e57bbcb711 | ||
|
|
b311991644 | ||
|
|
825054a271 | ||
|
|
f7aa0a5ae5 | ||
|
|
f486ccfac6 | ||
|
|
70f74d3baf | ||
|
|
ebad1844df | ||
|
|
a40a2edaf2 | ||
|
|
5f3d525ff8 | ||
|
|
8f5d88156f | ||
|
|
7c941fe8a8 | ||
|
|
e9835cb376 | ||
|
|
7651a960b1 | ||
|
|
5b17a84733 | ||
|
|
22873a2f3c | ||
|
|
2debadd3bf | ||
|
|
6808d7dcaf | ||
|
|
3480aa5495 | ||
|
|
a4d1ad57c7 | ||
|
|
628e0e924d | ||
|
|
16077f4124 | ||
|
|
e6a68b3223 | ||
|
|
539a494914 | ||
|
|
9c29c5c9c6 | ||
|
|
fd4b6022a9 | ||
|
|
58bbb59e39 | ||
|
|
5cc55530e1 | ||
|
|
3d74dbf48a | ||
|
|
b7489d8f66 | ||
|
|
e0b2aa9b45 | ||
|
|
10b4c15053 | ||
|
|
8bc83a336a | ||
|
|
c84b858205 | ||
|
|
e5f3a973a0 | ||
|
|
3682f05a42 | ||
|
|
eb5ce029ba | ||
|
|
0ebff2d6e6 | ||
|
|
d061634fe3 | ||
|
|
6b9410c67e | ||
|
|
8245e54e9c | ||
|
|
8ee744ef0c | ||
|
|
da179b2580 | ||
|
|
0714f06adc | ||
|
|
b2906257a1 | ||
|
|
18097e4676 | ||
|
|
efcade84c6 | ||
|
|
7f27375d17 | ||
|
|
01e1f134be | ||
|
|
0695b0557f | ||
|
|
c63f0c0833 | ||
|
|
3264ffaaa4 | ||
|
|
40959c8876 | ||
|
|
ecea7f4638 | ||
|
|
0b15a166fa | ||
|
|
c368424a15 | ||
|
|
5df1f80307 | ||
|
|
4b59045149 | ||
|
|
a3a05131c7 | ||
|
|
a9922b86fe | ||
|
|
431350ac0e | ||
|
|
5f8802fe7f | ||
|
|
5f21594d23 | ||
|
|
8964ec1a4d | ||
|
|
aa270e57ec | ||
|
|
fe7eb07f39 | ||
|
|
c10da7f960 | ||
|
|
0c8390c094 | ||
|
|
d41c63bf7d | ||
|
|
a3bbdafabb | ||
|
|
a78eef464b | ||
|
|
e8348ac12a | ||
|
|
5efc3835db | ||
|
|
c4ed6e88de | ||
|
|
51e6559145 | ||
|
|
db8b419885 | ||
|
|
475d7cc535 | ||
|
|
1858de5ed0 | ||
|
|
642f4788fb | ||
|
|
7e70f8b758 | ||
|
|
e417bea948 | ||
|
|
6b4be93169 | ||
|
|
061eaad743 | ||
|
|
8ff21d6c89 | ||
|
|
0d9f4e8c0f | ||
|
|
02288718dc | ||
|
|
615cf86fc0 | ||
|
|
d63a209674 | ||
|
|
9d26304f7a | ||
|
|
f73bda438a | ||
|
|
19b65a654e | ||
|
|
770127e67a | ||
|
|
f373e6467a | ||
|
|
e43b4e66a1 | ||
|
|
90ec003386 | ||
|
|
2f9aca785e | ||
|
|
405a6c9901 | ||
|
|
3611b1fe61 | ||
|
|
7b33441519 | ||
|
|
2a8f61dfbe | ||
|
|
dcfd6d43c0 | ||
|
|
4e4d8b2f04 | ||
|
|
50197ba7b7 | ||
|
|
6c376d8721 | ||
|
|
82ada54103 | ||
|
|
0fdfeb3cd3 | ||
|
|
096d7719c6 | ||
|
|
619c485224 | ||
|
|
9367d5fb45 | ||
|
|
50ec97ad91 | ||
|
|
fa5fcde987 | ||
|
|
5b33333404 | ||
|
|
cf50624e4e | ||
|
|
ccc9ed8b49 | ||
|
|
141f5381e7 | ||
|
|
be054ca4f8 | ||
|
|
0a06452450 | ||
|
|
b840d3f9bf | ||
|
|
c829c30688 | ||
|
|
7947afb1b4 | ||
|
|
c32b53613d | ||
|
|
c058e7a128 | ||
|
|
1dc663339d | ||
|
|
351db4efc8 | ||
|
|
12d6ea3966 | ||
|
|
e1adc7b428 | ||
|
|
dc34adadcd | ||
|
|
6e06381640 | ||
|
|
f55389cd26 | ||
|
|
6d930f53ba | ||
|
|
f7616cf685 | ||
|
|
f55d9820bd | ||
|
|
befc2cddd2 | ||
|
|
ef268e043f | ||
|
|
cff235c420 | ||
|
|
1089a052ec | ||
|
|
e10d2aef8e | ||
|
|
a97c5fe836 | ||
|
|
9b6eddddae | ||
|
|
ed84825e65 | ||
|
|
cb84003c31 | ||
|
|
a1cd87aa3a | ||
|
|
7d3b015e20 | ||
|
|
7d0d11f526 | ||
|
|
eb2520e7ca | ||
|
|
2675bf464e | ||
|
|
b638449498 | ||
|
|
1d195cb347 | ||
|
|
8d8ed28aea | ||
|
|
e12bf63f9a | ||
|
|
ffcc1f82f1 | ||
|
|
04d7b12dd8 | ||
|
|
3e33b00a75 | ||
|
|
12dc378fc1 | ||
|
|
bbe99f4451 | ||
|
|
91b17f8fa6 | ||
|
|
69f1778309 | ||
|
|
c55e801d00 | ||
|
|
b363f77a83 | ||
|
|
f55f46f95b | ||
|
|
5ee2f0efe1 | ||
|
|
1314a36ba4 | ||
|
|
2b8b621298 | ||
|
|
aed4c9fc58 | ||
|
|
604001dfb1 | ||
|
|
1a03c0e4ac | ||
|
|
a8c54b7640 | ||
|
|
9bb60c9474 | ||
|
|
0b2ce7a071 | ||
|
|
44145baca7 | ||
|
|
dac7881ca3 | ||
|
|
31bd927959 | ||
|
|
46922de3c0 | ||
|
|
908a862dd1 | ||
|
|
6676ba99d0 | ||
|
|
6d3c6e598f | ||
|
|
e1a10fc827 | ||
|
|
2ebdbaafa3 | ||
|
|
a74dfea08b | ||
|
|
44ff380c86 | ||
|
|
0a41713253 | ||
|
|
f5a5675da4 | ||
|
|
7a8cf55090 | ||
|
|
7932de3b7d | ||
|
|
c8ba967a54 | ||
|
|
f5d2f0e0ca | ||
|
|
2c7e2f4b7f | ||
|
|
ee3ebe687b | ||
|
|
77024f0757 | ||
|
|
c0e39886eb | ||
|
|
6339e7897d | ||
|
|
783a8a8772 | ||
|
|
8f2d865999 | ||
|
|
d6d0825926 | ||
|
|
37de2e7f52 | ||
|
|
800c9e0c93 | ||
|
|
a1bc7eb4d5 | ||
|
|
8ff45d2aee | ||
|
|
8ec19777b5 | ||
|
|
3e388fedeb | ||
|
|
83ffba2f08 | ||
|
|
f1c4fef8ba | ||
|
|
eec506a209 | ||
|
|
2ca0060c6a | ||
|
|
8b2d79a7f7 | ||
|
|
c4db8b6d4b | ||
|
|
61d4305593 | ||
|
|
542e1d24aa | ||
|
|
47ec074cfb | ||
|
|
e44835e795 | ||
|
|
2e28146a58 | ||
|
|
85e051a76d | ||
|
|
7027a61e63 | ||
|
|
e8c5b27d92 | ||
|
|
a3deec7875 | ||
|
|
6282a462c8 | ||
|
|
dac5952e96 | ||
|
|
ada6fcb908 | ||
|
|
8d2f902420 | ||
|
|
fc3fe7a81e | ||
|
|
426cc95e9f | ||
|
|
9e40043fe0 | ||
|
|
14608fe5f7 | ||
|
|
22ed090685 | ||
|
|
2ca4097daf | ||
|
|
f1d16015bf | ||
|
|
9a81ad05ed | ||
|
|
76e983d19c | ||
|
|
a3015c0fa3 | ||
|
|
88d0bda049 | ||
|
|
d2ec54e89e | ||
|
|
4559c5a38d | ||
|
|
16bd106abc | ||
|
|
e5dcec8d8e | ||
|
|
ad3565d3ad | ||
|
|
5fe12ecd74 | ||
|
|
318214642f | ||
|
|
227fe3ee6b | ||
|
|
978a82dd1a | ||
|
|
04f72a7da9 | ||
|
|
a0954a1dc0 | ||
|
|
cc1bf74370 | ||
|
|
2f7908773a | ||
|
|
0efd02979e | ||
|
|
bd9776c4b7 | ||
|
|
35e9da83ec | ||
|
|
4f5ca0bca9 | ||
|
|
43f314b2b5 | ||
|
|
4bdf579ce2 | ||
|
|
aba3039eef | ||
|
|
bbff791c65 | ||
|
|
1ed50b3081 | ||
|
|
67336a24e7 | ||
|
|
48174c327d | ||
|
|
43f2539b42 | ||
|
|
462a7a722a | ||
|
|
4101bb252e | ||
|
|
4ff4e4827e | ||
|
|
8daa92ad49 | ||
|
|
371e83bff9 | ||
|
|
6fa0227a1e | ||
|
|
c38e4ce265 | ||
|
|
de06ed148d | ||
|
|
21bcd62ba8 | ||
|
|
76c034f79a | ||
|
|
d8d394ce40 | ||
|
|
213f4ce92f | ||
|
|
2d1726763f | ||
|
|
abfd9b3cbd | ||
|
|
6114f6a7fd | ||
|
|
61e674e9f6 | ||
|
|
006cc8c52a | ||
|
|
ffe7fdcb46 | ||
|
|
49cceffe1b | ||
|
|
011782395a | ||
|
|
558009543f | ||
|
|
aaef5bde2c | ||
|
|
f52f595d56 | ||
|
|
50196d8430 | ||
|
|
2ecfbfbb42 | ||
|
|
9508dffe6d | ||
|
|
b4a72bbcab | ||
|
|
4ceaaf92cc | ||
|
|
ef28c9531b | ||
|
|
c86c602e39 | ||
|
|
3bee2905e5 | ||
|
|
9ac8a15cd5 | ||
|
|
81b8104064 | ||
|
|
65f58b1f98 | ||
|
|
7e872aa6d6 | ||
|
|
0383a91a68 | ||
|
|
bb6ebe5750 | ||
|
|
71aa3d145f | ||
|
|
2f1f80029b | ||
|
|
ad18cfa284 | ||
|
|
92ed1c6ac9 | ||
|
|
e71e04a8f1 | ||
|
|
ef127c279c | ||
|
|
4afac5fa4d | ||
|
|
29ae97aa82 | ||
|
|
9319d7e8ef | ||
|
|
689db3713b | ||
|
|
0b9fcb884b | ||
|
|
23e29a1fdc | ||
|
|
90d636a026 | ||
|
|
3e3e12afb9 | ||
|
|
421f5d23ec | ||
|
|
0fa91f32cb | ||
|
|
3d21b203be | ||
|
|
3972b8f4c1 | ||
|
|
2d1707db00 | ||
|
|
72aa4f0561 | ||
|
|
fd57cc95e9 | ||
|
|
04c392be7e | ||
|
|
38da598214 | ||
|
|
3f2c9b647c | ||
|
|
7d5b4369c1 | ||
|
|
aade33fa88 | ||
|
|
2a8a90bdfc | ||
|
|
f36048cc95 | ||
|
|
517feeca21 | ||
|
|
9436a49118 | ||
|
|
0e02cb08fd | ||
|
|
26244efc50 | ||
|
|
6339eeffe5 | ||
|
|
8cc2f598eb | ||
|
|
46a1dd57f4 | ||
|
|
9d99fc521e | ||
|
|
bcf79354ee | ||
|
|
27a8636bac | ||
|
|
92a99938c9 | ||
|
|
ed61eb0a95 | ||
|
|
50d495b522 | ||
|
|
526d5c2c44 | ||
|
|
1720f9201e | ||
|
|
e62de1ca22 | ||
|
|
d052ddb742 | ||
|
|
af53a02ea5 | ||
|
|
8e2d18da8c | ||
|
|
2c5004387d | ||
|
|
3fc3b259ba | ||
|
|
cd95f68692 | ||
|
|
59c3b1ba2f | ||
|
|
fa85865fe5 | ||
|
|
5575d4142c | ||
|
|
ae6263a07f | ||
|
|
9af75634fa | ||
|
|
04b8681272 | ||
|
|
d5549f2894 | ||
|
|
b510b85ca0 | ||
|
|
5ad754a3a2 | ||
|
|
4f04f9db6e | ||
|
|
025a1a1383 | ||
|
|
f28c1f91d9 | ||
|
|
c609377a9c | ||
|
|
2579d12f24 | ||
|
|
c18f7953e7 | ||
|
|
3a292b02b6 | ||
|
|
7baf2d0a2a | ||
|
|
328289eb1c | ||
|
|
95d93eeb61 | ||
|
|
b54cc27cab | ||
|
|
c9ca1ee7b3 | ||
|
|
649d6d94a3 | ||
|
|
bf68d21830 | ||
|
|
64b33b76cb | ||
|
|
c189df3fd6 | ||
|
|
277869ebca | ||
|
|
303513a566 | ||
|
|
8712106b3d | ||
|
|
cdb9506583 | ||
|
|
94a311a550 | ||
|
|
791e7e9c4d | ||
|
|
6cfa7e2cd3 | ||
|
|
7196c26181 | ||
|
|
735f177283 | ||
|
|
f857e5ec6c | ||
|
|
a5e02950c2 | ||
|
|
4a928f1298 | ||
|
|
f59012862e | ||
|
|
5f5418a297 | ||
|
|
548a216b56 | ||
|
|
c943c89fc6 | ||
|
|
06de6077ba | ||
|
|
cef1fcc95d | ||
|
|
1ed8b225db | ||
|
|
f0354c87f4 | ||
|
|
5e06a615cd | ||
|
|
e11aa7af18 | ||
|
|
f4ddc17851 | ||
|
|
ebce2826cb | ||
|
|
4c411acef4 | ||
|
|
ddc941f464 | ||
|
|
c5ff2c6f76 | ||
|
|
85ac16bb22 | ||
|
|
e7ee4a33c7 | ||
|
|
ac0da04542 | ||
|
|
3337117970 | ||
|
|
9b61ff5714 | ||
|
|
d25db48452 | ||
|
|
251f354076 | ||
|
|
9c6ae226fb | ||
|
|
a502ee85d1 | ||
|
|
eec72e0cbd | ||
|
|
7a1acce133 | ||
|
|
84b4bfe663 | ||
|
|
16df990183 | ||
|
|
969dd26041 | ||
|
|
9df1f91318 | ||
|
|
48ddc66d47 | ||
|
|
85e3869607 | ||
|
|
5bb2a763e3 | ||
|
|
2110709d72 | ||
|
|
493033edc0 | ||
|
|
bf8e77b9b9 | ||
|
|
c7e5581027 | ||
|
|
c78802a1ed | ||
|
|
39f9c082b9 | ||
|
|
ca1ef5192d | ||
|
|
1d6fef9169 | ||
|
|
81f80ddbe5 | ||
|
|
b53657344c | ||
|
|
95e818898e | ||
|
|
a7e014a87e | ||
|
|
cca65e5a48 | ||
|
|
a75249f3e4 | ||
|
|
053a4b0392 | ||
|
|
d1362bf44f | ||
|
|
580832ea7b | ||
|
|
ddca0bb851 | ||
|
|
d9a04ea895 | ||
|
|
48ccb27e49 | ||
|
|
a2b5ad07ff | ||
|
|
cc9cbf7f06 | ||
|
|
ad5c43c9ba | ||
|
|
9c27d01d47 | ||
|
|
64ac33e3bb | ||
|
|
329fb87e12 | ||
|
|
bd5da2b0f0 | ||
|
|
55c21888af | ||
|
|
d49e6e19a6 | ||
|
|
edb1af09c4 | ||
|
|
ab3822d1cc | ||
|
|
69120e0799 | ||
|
|
7785dfead2 | ||
|
|
29c29f9e3a | ||
|
|
b7dafc31bb | ||
|
|
2f5a306c2d | ||
|
|
0ef6b45b19 | ||
|
|
d9f33d34e3 | ||
|
|
5758e03a17 | ||
|
|
5d9f80cce8 | ||
|
|
867ee530b1 | ||
|
|
27e6a55756 | ||
|
|
b237bafa2f | ||
|
|
d0bde800f7 | ||
|
|
da0090aa99 | ||
|
|
66c9879ce3 | ||
|
|
9c2585116a | ||
|
|
e46c0e25e8 | ||
|
|
658b0ce243 | ||
|
|
c084e31416 | ||
|
|
9046844f0c | ||
|
|
5a9b49b9bb | ||
|
|
0672133bca | ||
|
|
c0de99bc42 | ||
|
|
6dbb1a27b9 | ||
|
|
dc1bace4c6 | ||
|
|
afe3c56ca8 | ||
|
|
a6f42e3eb3 | ||
|
|
9c2bd2a57b | ||
|
|
f42f474113 | ||
|
|
17c31e1539 | ||
|
|
b0fb4d6bc9 | ||
|
|
f8690bcebc | ||
|
|
b0410ec5de | ||
|
|
19e0058e61 |
6
.gitattributes
vendored
6
.gitattributes
vendored
@@ -18,4 +18,8 @@ yarn.lock merge=binary
|
||||
# https://mirrors.edge.kernel.org/pub/software/scm/git/docs/gitattributes.html
|
||||
# suggests that this might interleave lines arbitrarily, but empirically
|
||||
# it keeps added chunks contiguous
|
||||
CHANGELOG.md merge=union
|
||||
CHANGELOG.md merge=union
|
||||
|
||||
# Mark some JSON files containing test data as generated so they are not included
|
||||
# as part of diffs or language statistics.
|
||||
extensions/ql-vscode/src/stories/remote-queries/data/*.json linguist-generated
|
||||
|
||||
22
.github/dependabot.yml
vendored
Normal file
22
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "extensions/ql-vscode"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Thursday is arbitrary
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: ".github"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Thursday is arbitrary
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
1
.github/workflows/codeql.yml
vendored
1
.github/workflows/codeql.yml
vendored
@@ -26,6 +26,7 @@ jobs:
|
||||
with:
|
||||
languages: javascript
|
||||
config-file: ./.github/codeql/codeql-config.yml
|
||||
tools: latest
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@main
|
||||
|
||||
16
.github/workflows/dependency-review.yml
vendored
Normal file
16
.github/workflows/dependency-review.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
name: 'Dependency Review'
|
||||
on:
|
||||
- pull_request
|
||||
- workflow_dispatch
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@v3
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@v1
|
||||
15
.github/workflows/main.yml
vendored
15
.github/workflows/main.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '14.14.0'
|
||||
node-version: '16.14.2'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
@@ -82,7 +82,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '14.14.0'
|
||||
node-version: '16.14.0'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
@@ -118,6 +118,8 @@ jobs:
|
||||
- name: Run integration tests (Linux)
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
working-directory: extensions/ql-vscode
|
||||
env:
|
||||
VSCODE_CODEQL_GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
run: |
|
||||
sudo apt-get install xvfb
|
||||
/usr/bin/xvfb-run npm run integration
|
||||
@@ -125,6 +127,8 @@ jobs:
|
||||
- name: Run integration tests (Windows)
|
||||
if: matrix.os == 'windows-latest'
|
||||
working-directory: extensions/ql-vscode
|
||||
env:
|
||||
VSCODE_CODEQL_GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||
run: |
|
||||
npm run integration
|
||||
|
||||
@@ -135,7 +139,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
version: ['v2.3.3', 'v2.4.6', 'v2.5.9', 'v2.6.3', 'v2.7.6', 'nightly']
|
||||
version: ['v2.6.3', 'v2.7.6', 'v2.8.5', 'v2.9.4', 'v2.10.5', 'v2.11.1', 'nightly']
|
||||
env:
|
||||
CLI_VERSION: ${{ matrix.version }}
|
||||
NIGHTLY_URL: ${{ needs.find-nightly.outputs.url }}
|
||||
@@ -147,7 +151,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '14.14.0'
|
||||
node-version: '16.14.0'
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
@@ -168,9 +172,6 @@ jobs:
|
||||
if [[ "${{ matrix.version }}" == "nightly" ]]
|
||||
then
|
||||
REF="codeql-cli/latest"
|
||||
elif [[ "${{ matrix.version }}" == "v2.2.6" || "${{ matrix.version }}" == "v2.3.3" ]]
|
||||
then
|
||||
REF="codeql-cli/v2.4.5"
|
||||
else
|
||||
REF="codeql-cli/${{ matrix.version }}"
|
||||
fi
|
||||
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '10.18.1'
|
||||
node-version: '16.14.2'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
|
||||
33
.vscode/launch.json
vendored
33
.vscode/launch.json
vendored
@@ -12,7 +12,6 @@
|
||||
// Add a reference to a workspace to open. Eg-
|
||||
// "${workspaceRoot}/../vscode-codeql-starter/vscode-codeql-starter.code-workspace"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
@@ -36,6 +35,9 @@
|
||||
"runtimeArgs": [
|
||||
"--inspect=9229"
|
||||
],
|
||||
"env": {
|
||||
"LANG": "en-US"
|
||||
},
|
||||
"args": [
|
||||
"--exit",
|
||||
"-u",
|
||||
@@ -44,9 +46,22 @@
|
||||
"--diff",
|
||||
"-r",
|
||||
"ts-node/register",
|
||||
"-r",
|
||||
"test/mocha.setup.js",
|
||||
"test/pure-tests/**/*.ts"
|
||||
],
|
||||
"port": 9229,
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"console": "integratedTerminal",
|
||||
"internalConsoleOptions": "neverOpen"
|
||||
},
|
||||
{
|
||||
"name": "Launch Unit Tests - React (vscode-codeql)",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/extensions/ql-vscode/node_modules/jest/bin/jest.js",
|
||||
"showAsyncStacks": true,
|
||||
"cwd": "${workspaceFolder}/extensions/ql-vscode",
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"console": "integratedTerminal",
|
||||
@@ -60,10 +75,10 @@
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/no-workspace/index",
|
||||
"--disable-workspace-trust",
|
||||
"--disable-extensions",
|
||||
"--disable-gpu"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
@@ -77,11 +92,11 @@
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/minimal-workspace/index",
|
||||
"--disable-workspace-trust",
|
||||
"--disable-extensions",
|
||||
"--disable-gpu",
|
||||
"${workspaceRoot}/extensions/ql-vscode/test/data"
|
||||
],
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
@@ -95,6 +110,7 @@
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/cli-integration/index",
|
||||
"--disable-workspace-trust",
|
||||
"--disable-gpu",
|
||||
"--disable-extension",
|
||||
"eamodio.gitlens",
|
||||
@@ -121,11 +137,18 @@
|
||||
// This option overrides the CLI_VERSION option.
|
||||
// "CLI_PATH": "${workspaceRoot}/../semmle-code/target/intree/codeql/codeql",
|
||||
},
|
||||
"stopOnEntry": false,
|
||||
"sourceMaps": true,
|
||||
"outFiles": [
|
||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "Launch Storybook",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"cwd": "${workspaceFolder}/extensions/ql-vscode",
|
||||
"runtimeExecutable": "npm",
|
||||
"runtimeArgs": ["run-script", "storybook"]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
9
.vscode/settings.json
vendored
9
.vscode/settings.json
vendored
@@ -30,12 +30,11 @@
|
||||
"typescript",
|
||||
"typescriptreact"
|
||||
],
|
||||
"eslint.options": {
|
||||
// This is necessary so that eslint can properly resolve its plugins
|
||||
"resolvePluginsRelativeTo": "./extensions/ql-vscode"
|
||||
},
|
||||
// This is necessary to ensure that ESLint can find the correct configuration files and plugins.
|
||||
"eslint.workingDirectories": ["./extensions/ql-vscode"],
|
||||
"editor.formatOnSave": false,
|
||||
"typescript.preferences.quoteStyle": "single",
|
||||
"javascript.preferences.quoteStyle": "single",
|
||||
"editor.wordWrapColumn": 100
|
||||
"editor.wordWrapColumn": 100,
|
||||
"jest.rootPath": "./extensions/ql-vscode"
|
||||
}
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
**/* @github/codeql-vscode-reviewers
|
||||
**/remote-queries/ @github/code-scanning-secexp-reviewers
|
||||
**/variant-analysis/ @github/code-scanning-secexp-reviewers
|
||||
|
||||
@@ -29,7 +29,9 @@ Here are a few things you can do that will increase the likelihood of your pull
|
||||
|
||||
## Setting up a local build
|
||||
|
||||
Make sure you have installed recent versions of vscode (>= v1.52), node (>=12.16), and npm (>= 7.5.2). Earlier versions will probably work, but we no longer test against them.
|
||||
Make sure you have installed recent versions of vscode, node, and npm. Check the `engines` block in [`package.json`](https://github.com/github/vscode-codeql/blob/main/extensions/ql-vscode/package.json) file for compatible versions. Earlier versions may work, but we no longer test against them.
|
||||
|
||||
To automatically switch to the correct version of node, we recommend using [nvm](https://github.com/nvm-sh/nvm), which will pick-up the node version from `.nvmrc`.
|
||||
|
||||
### Installing all packages
|
||||
|
||||
@@ -56,8 +58,6 @@ We recommend that you keep `npm run watch` running in the backgound and you only
|
||||
|
||||
1. on first checkout
|
||||
2. whenever any of the non-TypeScript resources have changed
|
||||
3. on any change to files included in one of the webviews
|
||||
- **Important**: This is easy to forget. You must explicitly run `npm run build` whenever one of the files in the webview is changed. These are the files in the `src/view` and `src/compare/view` folders.
|
||||
|
||||
### Installing the extension
|
||||
|
||||
@@ -77,6 +77,20 @@ $ vscode/scripts/code-cli.sh --install-extension dist/vscode-codeql-*.vsix # if
|
||||
|
||||
You can use VS Code to debug the extension without explicitly installing it. Just open this directory as a workspace in VS Code, and hit `F5` to start a debugging session.
|
||||
|
||||
### Storybook
|
||||
|
||||
You can use [Storybook](https://storybook.js.org/) to preview React components outside VSCode. Inside the `extensions/ql-vscode` directory, run:
|
||||
|
||||
```shell
|
||||
npm run storybook
|
||||
```
|
||||
|
||||
Your browser should automatically open to the Storybook UI. Stories live in the `src/stories` directory.
|
||||
|
||||
Alternatively, you can start Storybook inside of VSCode. There is a VSCode launch configuration for starting Storybook. It can be found in the debug view.
|
||||
|
||||
More information about Storybook can be found inside the **Overview** page once you have launched Storybook.
|
||||
|
||||
### Running the unit tests and integration tests that do not require a CLI instance
|
||||
|
||||
Unit tests and many integration tests do not require a copy of the CodeQL CLI.
|
||||
@@ -95,15 +109,21 @@ Running from a terminal, you _must_ set the `TEST_CODEQL_PATH` variable to point
|
||||
|
||||
### Running the integration tests
|
||||
|
||||
The _Launch Integration Tests - With CLI_ tests require a CLI instance in order to run. There are several environment variables you can use to configure this.
|
||||
You will need to run CLI tests using a task from inside of VS Code called _Launch Integration Tests - With CLI_.
|
||||
|
||||
From inside of VSCode, open the `launch.json` file and in the _Launch Integration Tests - With CLI_ uncomment and change the environment variables appropriate for your purpose.
|
||||
The CLI integration tests require the CodeQL standard libraries in order to run so you will need to clone a local copy of the `github/codeql` repository.
|
||||
|
||||
From inside of VSCode, open the `launch.json` file and in the _Launch Integration Tests - With CLI_ task, uncomment the `"${workspaceRoot}/../codeql"` line. If necessary, replace value with a path to your checkout, and then run the task.
|
||||
|
||||
## Releasing (write access required)
|
||||
|
||||
1. Double-check the `CHANGELOG.md` contains all desired change comments and has the version to be released with date at the top.
|
||||
* Go through all recent PRs and make sure they are properly accounted for.
|
||||
* Make sure all changelog entries have links back to their PR(s) if appropriate.
|
||||
1. Double-check that the node version we're using matches the one used for VS Code. If it doesn't, you will then need to update the node version in the following files:
|
||||
* `.nvmrc` - this will enable `nvm` to automatically switch to the correct node version when you're in the project folder
|
||||
* `.github/workflows/main.yml` - all the "node-version: <version>" settings
|
||||
* `.github/workflows/release.yml` - the "node-version: <version>" setting
|
||||
1. Double-check that the extension `package.json` and `package-lock.json` have the version you intend to release. If you are doing a patch release (as opposed to minor or major version) this should already be correct.
|
||||
1. Create a PR for this release:
|
||||
* This PR will contain any missing bits from steps 1 and 2. Most of the time, this will just be updating `CHANGELOG.md` with today's date.
|
||||
@@ -111,19 +131,40 @@ From inside of VSCode, open the `launch.json` file and in the _Launch Integratio
|
||||
* Create a new commit with a message the same as the branch name.
|
||||
* Create a PR for this branch.
|
||||
* Wait for the PR to be merged into `main`
|
||||
1. Trigger a release build on Actions by adding a new tag on branch `main` named after the release, as above. Note that when you push to upstream, you will need to fully qualify the ref. A command like this will work:
|
||||
1. Switch to `main` and add a new tag on the `main` branch with your new version (named after the release), e.g.
|
||||
```bash
|
||||
git checkout main
|
||||
git tag v1.3.6
|
||||
```
|
||||
|
||||
If you've accidentally created a badly named tag, you can delete it via
|
||||
```bash
|
||||
git tag -d badly-named-tag
|
||||
```
|
||||
1. Push the new tag up:
|
||||
|
||||
a. If you're using a fork of the repo:
|
||||
|
||||
```bash
|
||||
git push upstream refs/tags/v1.3.6
|
||||
```
|
||||
|
||||
b. If you're working straight in this repo:
|
||||
|
||||
```bash
|
||||
git push origin refs/tags/v1.3.6
|
||||
```
|
||||
|
||||
This will trigger [a release build](https://github.com/github/vscode-codeql/releases) on Actions.
|
||||
|
||||
* **IMPORTANT** Make sure you are on the `main` branch and your local checkout is fully updated when you add the tag.
|
||||
* If you accidentally add the tag to the wrong ref, you can just force push it to the right one later.
|
||||
|
||||
1. Monitor the status of the release build in the `Release` workflow in the Actions tab.
|
||||
* DO NOT approve the "publish" stages of the workflow yet.
|
||||
1. Download the VSIX from the draft GitHub release at the top of [the releases page](https://github.com/github/vscode-codeql/releases) that is created when the release build finishes.
|
||||
1. Unzip the `.vsix` and inspect its `package.json` to make sure the version is what you expect,
|
||||
or look at the source if there's any doubt the right code is being shipped.
|
||||
1. Install the `.vsix` file into your vscode IDE and ensure the extension can load properly. Run a single command (like run query, or add database).
|
||||
1. Go to the actions tab of the vscode-codeql repository and select the [Release workflow](https://github.com/github/vscode-codeql/actions?query=workflow%3ARelease).
|
||||
- If there is an authentication failure when publishing, be sure to check that the authentication keys haven't expired. See below.
|
||||
1. Approve the deployments of the correct Release workflow. This will automatically publish to Open VSX and VS Code Marketplace.
|
||||
@@ -143,12 +184,7 @@ To regenerate the Open VSX token:
|
||||
1. Go to the [Access Tokens](https://open-vsx.org/user-settings/tokens) page and generate a new token.
|
||||
1. Update the secret in the `publish-open-vsx` environment in the project settings.
|
||||
|
||||
To regenerate the VSCode Marketplace token:
|
||||
|
||||
1. Follow the instructions on [getting a PAT for Azure DevOps](https://code.visualstudio.com/api/working-with-extensions/publishing-extension#get-a-personal-access-token).
|
||||
1. Update the secret in the `publish-vscode-marketplace` environment in the project settings.
|
||||
|
||||
Not that Azure DevOps PATs expire yearly and must be regenerated.
|
||||
To regenerate the VSCode Marketplace token, please see our internal documentation. Note that Azure DevOps PATs expire every 90 days and must be regenerated.
|
||||
|
||||
## Resources
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ module.exports = {
|
||||
node: true,
|
||||
es6: true,
|
||||
},
|
||||
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
|
||||
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended", "plugin:jest-dom/recommended"],
|
||||
rules: {
|
||||
"@typescript-eslint/no-use-before-define": 0,
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
|
||||
2
extensions/ql-vscode/.npmrc
Normal file
2
extensions/ql-vscode/.npmrc
Normal file
@@ -0,0 +1,2 @@
|
||||
# Storybook requires this option to be set. See https://github.com/storybookjs/storybook/issues/18298
|
||||
legacy-peer-deps=true
|
||||
1
extensions/ql-vscode/.nvmrc
Normal file
1
extensions/ql-vscode/.nvmrc
Normal file
@@ -0,0 +1 @@
|
||||
v16.14.2
|
||||
19
extensions/ql-vscode/.storybook/main.ts
Normal file
19
extensions/ql-vscode/.storybook/main.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import type { StorybookConfig } from '@storybook/core-common';
|
||||
|
||||
const config: StorybookConfig = {
|
||||
stories: [
|
||||
'../src/**/*.stories.mdx',
|
||||
'../src/**/*.stories.@(js|jsx|ts|tsx)'
|
||||
],
|
||||
addons: [
|
||||
'@storybook/addon-links',
|
||||
'@storybook/addon-essentials',
|
||||
'@storybook/addon-interactions'
|
||||
],
|
||||
framework: '@storybook/react',
|
||||
core: {
|
||||
builder: '@storybook/builder-webpack5'
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = config;
|
||||
7
extensions/ql-vscode/.storybook/manager.ts
Normal file
7
extensions/ql-vscode/.storybook/manager.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { addons } from '@storybook/addons';
|
||||
import { themes } from '@storybook/theming';
|
||||
|
||||
addons.setConfig({
|
||||
theme: themes.dark,
|
||||
enableShortcuts: false,
|
||||
});
|
||||
38
extensions/ql-vscode/.storybook/preview.ts
Normal file
38
extensions/ql-vscode/.storybook/preview.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { themes } from '@storybook/theming';
|
||||
import { action } from '@storybook/addon-actions';
|
||||
|
||||
// Allow all stories/components to use Codicons
|
||||
import '@vscode/codicons/dist/codicon.css';
|
||||
|
||||
import '../src/stories/vscode-theme.css';
|
||||
|
||||
// https://storybook.js.org/docs/react/configure/overview#configure-story-rendering
|
||||
export const parameters = {
|
||||
// All props starting with `on` will automatically receive an action as a prop
|
||||
actions: { argTypesRegex: "^on[A-Z].*" },
|
||||
// All props matching these names will automatically get the correct control
|
||||
controls: {
|
||||
matchers: {
|
||||
color: /(background|color)$/i,
|
||||
date: /Date$/,
|
||||
},
|
||||
},
|
||||
// Use a dark theme to be aligned with VSCode
|
||||
docs: {
|
||||
theme: themes.dark,
|
||||
},
|
||||
backgrounds: {
|
||||
default: 'dark',
|
||||
values: [
|
||||
{
|
||||
name: 'dark',
|
||||
value: '#1e1e1e',
|
||||
},
|
||||
],
|
||||
}
|
||||
};
|
||||
|
||||
(window as any).acquireVsCodeApi = () => ({
|
||||
postMessage: action('post-vscode-message'),
|
||||
setState: action('set-vscode-state'),
|
||||
});
|
||||
@@ -1,5 +1,89 @@
|
||||
# CodeQL for Visual Studio Code: Changelog
|
||||
|
||||
## 1.7.2 - 14 October 2022
|
||||
|
||||
- Fix a bug where results created in older versions were thought to be unsuccessful. [#1605](https://github.com/github/vscode-codeql/pull/1605)
|
||||
|
||||
## 1.7.1 - 12 October 2022
|
||||
|
||||
- Fix a bug where it was not possible to add a database folder if the folder name starts with `db-`. [#1565](https://github.com/github/vscode-codeql/pull/1565)
|
||||
- Ensure the results view opens in an editor column beside the currently active editor. [#1557](https://github.com/github/vscode-codeql/pull/1557)
|
||||
|
||||
## 1.7.0 - 20 September 2022
|
||||
|
||||
- Remove ability to download databases from LGTM. [#1467](https://github.com/github/vscode-codeql/pull/1467)
|
||||
- Remove the ability to manually upgrade databases from the context menu on databases. Databases are non-destructively upgraded automatically so for most users this was not needed. For advanced users this is still available in the Command Palette. [#1501](https://github.com/github/vscode-codeql/pull/1501)
|
||||
- Always restart the query server after a manual database upgrade. This avoids a bug in the query server where an invalid dbscheme was being retained in memory after an upgrade. [#1519](https://github.com/github/vscode-codeql/pull/1519)
|
||||
|
||||
## 1.6.12 - 1 September 2022
|
||||
|
||||
- Add ability for users to download databases directly from GitHub. [#1485](https://github.com/github/vscode-codeql/pull/1485)
|
||||
- Fix a race condition that could cause a failure to open the evaluator log when running a query. [#1490](https://github.com/github/vscode-codeql/pull/1490)
|
||||
- Fix an error when running a query with an older version of the CodeQL CLI. [#1490](https://github.com/github/vscode-codeql/pull/1490)
|
||||
|
||||
## 1.6.11 - 25 August 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.10 - 9 August 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.9 - 20 July 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.8 - 29 June 2022
|
||||
|
||||
- Fix a bug where quick queries cannot be compiled if the core libraries are not in the workspace. [#1411](https://github.com/github/vscode-codeql/pull/1411)
|
||||
- Fix a bug where quick evaluation of library files would display an error message when using CodeQL CLI v2.10.0. [#1412](https://github.com/github/vscode-codeql/pull/1412)
|
||||
|
||||
## 1.6.7 - 15 June 2022
|
||||
|
||||
- Prints end-of-query evaluator log summaries to the Query Log. [#1349](https://github.com/github/vscode-codeql/pull/1349)
|
||||
- Be consistent about casing in Query History menu. [#1369](https://github.com/github/vscode-codeql/pull/1369)
|
||||
- Fix quoting string columns in exported CSV results. [#1379](https://github.com/github/vscode-codeql/pull/1379)
|
||||
|
||||
## 1.6.6 - 17 May 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.5 - 25 April 2022
|
||||
|
||||
- Re-enable publishing to open-vsx. [#1285](https://github.com/github/vscode-codeql/pull/1285)
|
||||
|
||||
## 1.6.4 - 6 April 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.3 - 4 April 2022
|
||||
|
||||
- Fix a bug where the AST viewer was not synchronizing its selected node when the editor selection changes. [#1230](https://github.com/github/vscode-codeql/pull/1230)
|
||||
- Avoid synchronizing the `codeQL.cli.executablePath` setting. [#1252](https://github.com/github/vscode-codeql/pull/1252)
|
||||
- Open the directory in the finder/explorer (instead of just highlighting it) when running the "Open query directory" command from the query history view. [#1235](https://github.com/github/vscode-codeql/pull/1235)
|
||||
- Ensure query label in the query history view changes are persisted across restarts. [#1235](https://github.com/github/vscode-codeql/pull/1235)
|
||||
- Prints end-of-query evaluator log summaries to the Query Server Console. [#1264](https://github.com/github/vscode-codeql/pull/1264)
|
||||
|
||||
## 1.6.1 - 17 March 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.0 - 7 March 2022
|
||||
|
||||
- Fix a bug where database upgrades could not be resolved if some of the target pack's dependencies are outside of the workspace. [#1138](https://github.com/github/vscode-codeql/pull/1138)
|
||||
- Open the query server logs for query errors (instead of the extension log). This will make it easier to track down query errors. [#1158](https://github.com/github/vscode-codeql/pull/1158)
|
||||
- Fix a bug where queries took a long time to run if there are no folders in the workspace. [#1157](https://github.com/github/vscode-codeql/pull/1157)
|
||||
- [BREAKING CHANGE] The `codeQL.runningQueries.customLogDirectory` setting is deprecated and no longer has any function. Instead, all query log files will be stored in the query history directory, next to the query results. [#1178](https://github.com/github/vscode-codeql/pull/1178)
|
||||
- Add a _Open query directory_ command for query items. This command opens the directory containing all artifacts for a query. [#1179](https://github.com/github/vscode-codeql/pull/1179)
|
||||
- Add options to display evaluator logs for a given query run. Some information that was previously found in the query server output may now be found here. [#1186](https://github.com/github/vscode-codeql/pull/1186)
|
||||
|
||||
## 1.5.11 - 10 February 2022
|
||||
|
||||
- Fix a bug where invoking _View AST_ from the file explorer would not view the selected file. Instead it would view the active editor. Also, prevent the _View AST_ from appearing if the current selection includes a directory or multiple files. [#1113](https://github.com/github/vscode-codeql/pull/1113)
|
||||
- Add query history items as soon as a query is run, including new icons for each history item. [#1094](https://github.com/github/vscode-codeql/pull/1094)
|
||||
- Save query history items across restarts. Items will be saved for 30 days and can be overwritten by setting the `codeQL.queryHistory.ttl` configuration setting. [#1130](https://github.com/github/vscode-codeql/pull/1130)
|
||||
- Allow in-progress query items to be cancelled from the query history view. [#1105](https://github.com/github/vscode-codeql/pull/1105)
|
||||
|
||||
## 1.5.10 - 25 January 2022
|
||||
|
||||
- Fix a bug where the results view moved column even when it was already visible. [#1070](https://github.com/github/vscode-codeql/pull/1070)
|
||||
@@ -24,7 +108,7 @@
|
||||
- Fix a bug with importing large databases. Databases over 4GB can now be imported directly from LGTM or from a zip file. This functionality is only available when using CodeQL CLI version 2.6.0 or later. [#971](https://github.com/github/vscode-codeql/pull/971)
|
||||
- Replace certain control codes (`U+0000` - `U+001F`) with their corresponding control labels (`U+2400` - `U+241F`) in the results view. [#963](https://github.com/github/vscode-codeql/pull/963)
|
||||
- Allow case-insensitive project slugs for GitHub repositories when adding a CodeQL database from LGTM. [#978](https://github.com/github/vscode-codeql/pull/961)
|
||||
- Add a _CodeQL: Preview Query Help_ command to generate Markdown previews of `.qhelp` query help files. This command should only be run in trusted workspaces. See https://codeql.github.com/docs/codeql-cli/testing-query-help-files for more information about query help. [#988](https://github.com/github/vscode-codeql/pull/988)
|
||||
- Add a _CodeQL: Preview Query Help_ command to generate Markdown previews of `.qhelp` query help files. This command should only be run in trusted workspaces. See [the CodeQL CLI docs](https://codeql.github.com/docs/codeql-cli/testing-query-help-files) for more information about query help. [#988](https://github.com/github/vscode-codeql/pull/988)
|
||||
- Make "Open Referenced File" command accessible from the active editor menu. [#989](https://github.com/github/vscode-codeql/pull/989)
|
||||
- Fix a bug where result set names in the result set drop-down were disappearing when viewing a sorted table. [#1007](https://github.com/github/vscode-codeql/pull/1007)
|
||||
- Allow query result locations with 0 as the end column value. These are treated as the first column in the line. [#1002](https://github.com/github/vscode-codeql/pull/1002)
|
||||
|
||||
@@ -22,7 +22,7 @@ For information about other configurations, see the separate [CodeQL help](https
|
||||
|
||||
### Quick start: Using CodeQL
|
||||
|
||||
1. [Import a database from LGTM](#importing-a-database-from-lgtm).
|
||||
1. [Import a database from GitHub](#importing-a-database-from-github).
|
||||
1. [Run a query](#running-a-query).
|
||||
|
||||
---
|
||||
@@ -73,18 +73,19 @@ If you're using your own clone of the CodeQL standard libraries, you can do a `g
|
||||
|
||||
You can find all the commands contributed by the extension in the Command Palette (**Ctrl+Shift+P** or **Cmd+Shift+P**) by typing `CodeQL`, many of them are also accessible through the interface, and via keyboard shortcuts.
|
||||
|
||||
### Importing a database from LGTM
|
||||
### Importing a database from GitHub
|
||||
|
||||
While you can use the [CodeQL CLI to create your own databases](https://codeql.github.com/docs/codeql-cli/creating-codeql-databases/), the simplest way to start is by downloading a database from LGTM.com.
|
||||
While you can use the [CodeQL CLI to create your own databases](https://codeql.github.com/docs/codeql-cli/creating-codeql-databases/), the simplest way to start is by downloading a database from GitHub.com.
|
||||
|
||||
1. Open [LGTM.com](https://lgtm.com/#explore) in your browser.
|
||||
1. Search for a project you're interested in, for example [Apache Kafka](https://lgtm.com/projects/g/apache/kafka).
|
||||
1. Copy the link to that project, for example `https://lgtm.com/projects/g/apache/kafka`.
|
||||
1. In VS Code, open the Command Palette and choose the **CodeQL: Download Database from LGTM** command.
|
||||
1. Find a project that you're interested in on GitHub.com, for example [Apache Kafka](https://github.com/apache/kafka).
|
||||
1. Copy the link to that project, for example `https://github.com/apache/kafka`.
|
||||
1. In VS Code, open the Command Palette and choose the **CodeQL: Download Database from GitHub** command.
|
||||
1. Paste the link you copied earlier.
|
||||
1. Select the language for the database you want to download (only required if the project has databases for multiple languages).
|
||||
1. Once the CodeQL database has been imported, it is displayed in the Databases view.
|
||||
|
||||
For more information, see [Choosing a database](https://codeql.github.com/docs/codeql-for-visual-studio-code/analyzing-your-projects/#choosing-a-database) on codeql.github.com.
|
||||
|
||||
### Running a query
|
||||
|
||||
The instructions below assume that you're using the CodeQL starter workspace, or that you've added the CodeQL libraries and queries repository to your workspace.
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as gulp from 'gulp';
|
||||
import * as replace from 'gulp-replace';
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const replace = require('gulp-replace');
|
||||
|
||||
/** Inject the application insights key into the telemetry file */
|
||||
export function injectAppInsightsKey() {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as jsonc from 'jsonc-parser';
|
||||
import * as path from 'path';
|
||||
|
||||
export interface DeployedPackage {
|
||||
@@ -28,7 +27,7 @@ async function copyPackage(sourcePath: string, destPath: string): Promise<void>
|
||||
|
||||
export async function deployPackage(packageJsonPath: string): Promise<DeployedPackage> {
|
||||
try {
|
||||
const packageJson: any = jsonc.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
const packageJson: any = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
|
||||
// Default to development build; use flag --release to indicate release build.
|
||||
const isDevBuild = !process.argv.includes('--release');
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import * as gulp from 'gulp';
|
||||
import { compileTypeScript, watchTypeScript, copyViewCss, cleanOutput } from './typescript';
|
||||
import { compileTypeScript, watchTypeScript, cleanOutput } from './typescript';
|
||||
import { compileTextMateGrammar } from './textmate';
|
||||
import { copyTestData } from './tests';
|
||||
import { compileView } from './webpack';
|
||||
import { compileView, watchView } from './webpack';
|
||||
import { packageExtension } from './package';
|
||||
import { injectAppInsightsKey } from './appInsights';
|
||||
|
||||
@@ -10,9 +10,18 @@ export const buildWithoutPackage =
|
||||
gulp.series(
|
||||
cleanOutput,
|
||||
gulp.parallel(
|
||||
compileTypeScript, compileTextMateGrammar, compileView, copyTestData, copyViewCss
|
||||
compileTypeScript, compileTextMateGrammar, compileView, copyTestData
|
||||
)
|
||||
);
|
||||
|
||||
export { cleanOutput, compileTextMateGrammar, watchTypeScript, compileTypeScript, copyTestData, injectAppInsightsKey };
|
||||
export {
|
||||
cleanOutput,
|
||||
compileTextMateGrammar,
|
||||
watchTypeScript,
|
||||
watchView,
|
||||
compileTypeScript,
|
||||
copyTestData,
|
||||
injectAppInsightsKey,
|
||||
compileView,
|
||||
};
|
||||
export default gulp.series(buildWithoutPackage, injectAppInsightsKey, packageExtension);
|
||||
|
||||
@@ -219,14 +219,14 @@ function transformFile(yaml: any) {
|
||||
}
|
||||
|
||||
export function transpileTextMateGrammar() {
|
||||
return through.obj((file: Vinyl, _encoding: string, callback: Function): void => {
|
||||
return through.obj((file: Vinyl, _encoding: string, callback: (err: string | null, file: Vinyl | PluginError) => void): void => {
|
||||
if (file.isNull()) {
|
||||
callback(null, file);
|
||||
}
|
||||
else if (file.isBuffer()) {
|
||||
const buf: Buffer = file.contents;
|
||||
const yamlText: string = buf.toString('utf8');
|
||||
const jsonData: any = jsYaml.safeLoad(yamlText);
|
||||
const jsonData: any = jsYaml.load(yamlText);
|
||||
transformFile(jsonData);
|
||||
|
||||
file.contents = Buffer.from(JSON.stringify(jsonData, null, 2), 'utf8');
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"strict": true,
|
||||
"module": "commonjs",
|
||||
"target": "es2017",
|
||||
"lib": ["es6"],
|
||||
"lib": ["ES2021"],
|
||||
"moduleResolution": "node",
|
||||
"sourceMap": true,
|
||||
"rootDir": ".",
|
||||
@@ -16,7 +16,8 @@
|
||||
"noImplicitReturns": true,
|
||||
"experimentalDecorators": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true
|
||||
"noUnusedParameters": true,
|
||||
"esModuleInterop": true
|
||||
},
|
||||
"include": ["*.ts"]
|
||||
}
|
||||
|
||||
@@ -39,9 +39,3 @@ export function compileTypeScript() {
|
||||
export function watchTypeScript() {
|
||||
gulp.watch('src/**/*.ts', compileTypeScript);
|
||||
}
|
||||
|
||||
/** Copy CSS files for the results view into the output directory. */
|
||||
export function copyViewCss() {
|
||||
return gulp.src('src/**/view/*.css')
|
||||
.pipe(gulp.dest('out'));
|
||||
}
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import * as path from 'path';
|
||||
import * as webpack from 'webpack';
|
||||
import * as MiniCssExtractPlugin from 'mini-css-extract-plugin';
|
||||
|
||||
export const config: webpack.Configuration = {
|
||||
mode: 'development',
|
||||
entry: {
|
||||
resultsView: './src/view/results.tsx',
|
||||
compareView: './src/compare/view/Compare.tsx',
|
||||
remoteQueriesView: './src/remote-queries/view/RemoteQueries.tsx',
|
||||
webview: './src/view/webview.tsx'
|
||||
},
|
||||
output: {
|
||||
path: path.resolve(__dirname, '..', 'out'),
|
||||
@@ -31,9 +30,7 @@ export const config: webpack.Configuration = {
|
||||
{
|
||||
test: /\.less$/,
|
||||
use: [
|
||||
{
|
||||
loader: 'style-loader'
|
||||
},
|
||||
MiniCssExtractPlugin.loader,
|
||||
{
|
||||
loader: 'css-loader',
|
||||
options: {
|
||||
@@ -53,17 +50,31 @@ export const config: webpack.Configuration = {
|
||||
{
|
||||
test: /\.css$/,
|
||||
use: [
|
||||
{
|
||||
loader: 'style-loader'
|
||||
},
|
||||
MiniCssExtractPlugin.loader,
|
||||
{
|
||||
loader: 'css-loader'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
test: /\.(woff(2)?|ttf|eot)$/,
|
||||
use: [
|
||||
{
|
||||
loader: 'file-loader',
|
||||
options: {
|
||||
name: '[name].[ext]',
|
||||
outputPath: 'fonts/',
|
||||
// We need this to make Webpack use the correct path for the fonts.
|
||||
// Without this, the CSS file will use `url([object Module])`
|
||||
esModule: false
|
||||
}
|
||||
},
|
||||
],
|
||||
}
|
||||
]
|
||||
},
|
||||
performance: {
|
||||
hints: false
|
||||
}
|
||||
},
|
||||
plugins: [new MiniCssExtractPlugin()],
|
||||
};
|
||||
|
||||
@@ -2,7 +2,23 @@ import * as webpack from 'webpack';
|
||||
import { config } from './webpack.config';
|
||||
|
||||
export function compileView(cb: (err?: Error) => void) {
|
||||
webpack(config).run((error, stats) => {
|
||||
doWebpack(config, true, cb);
|
||||
}
|
||||
|
||||
export function watchView(cb: (err?: Error) => void) {
|
||||
const watchConfig = {
|
||||
...config,
|
||||
watch: true,
|
||||
watchOptions: {
|
||||
aggregateTimeout: 200,
|
||||
poll: 1000,
|
||||
}
|
||||
};
|
||||
doWebpack(watchConfig, false, cb);
|
||||
}
|
||||
|
||||
function doWebpack(internalConfig: webpack.Configuration, failOnError: boolean, cb: (err?: Error) => void) {
|
||||
const resultCb = (error: Error | undefined, stats?: webpack.Stats) => {
|
||||
if (error) {
|
||||
cb(error);
|
||||
}
|
||||
@@ -20,11 +36,16 @@ export function compileView(cb: (err?: Error) => void) {
|
||||
errors: true
|
||||
}));
|
||||
if (stats.hasErrors()) {
|
||||
cb(new Error('Compilation errors detected.'));
|
||||
return;
|
||||
if (failOnError) {
|
||||
cb(new Error('Compilation errors detected.'));
|
||||
return;
|
||||
} else {
|
||||
console.error('Compilation errors detected.');
|
||||
}
|
||||
}
|
||||
cb();
|
||||
}
|
||||
};
|
||||
|
||||
cb();
|
||||
});
|
||||
webpack(internalConfig, resultCb);
|
||||
}
|
||||
|
||||
214
extensions/ql-vscode/jest.config.js
Normal file
214
extensions/ql-vscode/jest.config.js
Normal file
@@ -0,0 +1,214 @@
|
||||
/*
|
||||
* For a detailed explanation regarding each configuration property and type check, visit:
|
||||
* https://jestjs.io/docs/configuration
|
||||
*/
|
||||
|
||||
module.exports = {
|
||||
// All imported modules in your tests should be mocked automatically
|
||||
// automock: false,
|
||||
|
||||
// Stop running tests after `n` failures
|
||||
// bail: 0,
|
||||
|
||||
// The directory where Jest should store its cached dependency information
|
||||
// cacheDirectory: "/private/var/folders/6m/1394pht172qgd7dmw1fwjk100000gn/T/jest_dx",
|
||||
|
||||
// Automatically clear mock calls, instances, contexts and results before every test
|
||||
// clearMocks: true,
|
||||
|
||||
// Indicates whether the coverage information should be collected while executing the test
|
||||
// collectCoverage: false,
|
||||
|
||||
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||
// collectCoverageFrom: undefined,
|
||||
|
||||
// The directory where Jest should output its coverage files
|
||||
// coverageDirectory: undefined,
|
||||
|
||||
// An array of regexp pattern strings used to skip coverage collection
|
||||
// coveragePathIgnorePatterns: [
|
||||
// "/node_modules/"
|
||||
// ],
|
||||
|
||||
// Indicates which provider should be used to instrument code for coverage
|
||||
coverageProvider: 'v8',
|
||||
|
||||
// A list of reporter names that Jest uses when writing coverage reports
|
||||
// coverageReporters: [
|
||||
// "json",
|
||||
// "text",
|
||||
// "lcov",
|
||||
// "clover"
|
||||
// ],
|
||||
|
||||
// An object that configures minimum threshold enforcement for coverage results
|
||||
// coverageThreshold: undefined,
|
||||
|
||||
// A path to a custom dependency extractor
|
||||
// dependencyExtractor: undefined,
|
||||
|
||||
// Make calling deprecated APIs throw helpful error messages
|
||||
// errorOnDeprecated: false,
|
||||
|
||||
// The default configuration for fake timers
|
||||
// fakeTimers: {
|
||||
// "enableGlobally": false
|
||||
// },
|
||||
|
||||
// Force coverage collection from ignored files using an array of glob patterns
|
||||
// forceCoverageMatch: [],
|
||||
|
||||
// A path to a module which exports an async function that is triggered once before all test suites
|
||||
// globalSetup: undefined,
|
||||
|
||||
// A path to a module which exports an async function that is triggered once after all test suites
|
||||
// globalTeardown: undefined,
|
||||
|
||||
// A set of global variables that need to be available in all test environments
|
||||
// globals: {},
|
||||
|
||||
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
|
||||
// maxWorkers: "50%",
|
||||
|
||||
// An array of directory names to be searched recursively up from the requiring module's location
|
||||
// moduleDirectories: [
|
||||
// "node_modules"
|
||||
// ],
|
||||
|
||||
// An array of file extensions your modules use
|
||||
moduleFileExtensions: [
|
||||
'js',
|
||||
'mjs',
|
||||
'cjs',
|
||||
'jsx',
|
||||
'ts',
|
||||
'tsx',
|
||||
'json'
|
||||
],
|
||||
|
||||
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
||||
'moduleNameMapper': {
|
||||
'\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$': '<rootDir>/test/__mocks__/fileMock.ts',
|
||||
'\\.(css|less)$': '<rootDir>/test/__mocks__/styleMock.ts'
|
||||
},
|
||||
|
||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||
// modulePathIgnorePatterns: [],
|
||||
|
||||
// Activates notifications for test results
|
||||
// notify: false,
|
||||
|
||||
// An enum that specifies notification mode. Requires { notify: true }
|
||||
// notifyMode: "failure-change",
|
||||
|
||||
// A preset that is used as a base for Jest's configuration
|
||||
preset: 'ts-jest',
|
||||
|
||||
// Run tests from one or more projects
|
||||
// projects: undefined,
|
||||
|
||||
// Use this configuration option to add custom reporters to Jest
|
||||
// reporters: undefined,
|
||||
|
||||
// Automatically reset mock state before every test
|
||||
// resetMocks: false,
|
||||
|
||||
// Reset the module registry before running each individual test
|
||||
// resetModules: false,
|
||||
|
||||
// A path to a custom resolver
|
||||
// resolver: undefined,
|
||||
|
||||
// Automatically restore mock state and implementation before every test
|
||||
// restoreMocks: false,
|
||||
|
||||
// The root directory that Jest should scan for tests and modules within
|
||||
// rootDir: undefined,
|
||||
|
||||
// A list of paths to directories that Jest should use to search for files in
|
||||
// roots: [
|
||||
// "<rootDir>"
|
||||
// ],
|
||||
|
||||
// Allows you to use a custom runner instead of Jest's default test runner
|
||||
// runner: "jest-runner",
|
||||
|
||||
// The paths to modules that run some code to configure or set up the testing environment before each test
|
||||
// setupFiles: [],
|
||||
|
||||
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||
setupFilesAfterEnv: ['<rootDir>/test/jest.setup.ts'],
|
||||
|
||||
// The number of seconds after which a test is considered as slow and reported as such in the results.
|
||||
// slowTestThreshold: 5,
|
||||
|
||||
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||
// snapshotSerializers: [],
|
||||
|
||||
// The test environment that will be used for testing
|
||||
testEnvironment: 'jsdom',
|
||||
|
||||
// Options that will be passed to the testEnvironment
|
||||
// testEnvironmentOptions: {},
|
||||
|
||||
// Adds a location field to test results
|
||||
// testLocationInResults: false,
|
||||
|
||||
// The glob patterns Jest uses to detect test files
|
||||
testMatch: [
|
||||
'**/__tests__/**/*.[jt]s?(x)'
|
||||
],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||
// testPathIgnorePatterns: [
|
||||
// "/node_modules/"
|
||||
// ],
|
||||
|
||||
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||
// testRegex: [],
|
||||
|
||||
// This option allows the use of a custom results processor
|
||||
// testResultsProcessor: undefined,
|
||||
|
||||
// This option allows use of a custom test runner
|
||||
// testRunner: "jest-circus/runner",
|
||||
|
||||
// A map from regular expressions to paths to transformers
|
||||
transform: {
|
||||
'^.+\\.tsx?$': [
|
||||
'ts-jest',
|
||||
{
|
||||
tsconfig: 'src/view/tsconfig.spec.json',
|
||||
},
|
||||
],
|
||||
'node_modules': [
|
||||
'babel-jest',
|
||||
{
|
||||
presets: [
|
||||
'@babel/preset-env'
|
||||
],
|
||||
plugins: [
|
||||
'@babel/plugin-transform-modules-commonjs',
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||
'transformIgnorePatterns': [
|
||||
// These use ES modules, so need to be transformed
|
||||
'node_modules/(?!(?:@vscode/webview-ui-toolkit|@microsoft/.+|exenv-es6)/.*)'
|
||||
],
|
||||
|
||||
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||
// unmockedModulePathPatterns: undefined,
|
||||
|
||||
// Indicates whether each individual test should be reported during the run
|
||||
// verbose: undefined,
|
||||
|
||||
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||
// watchPathIgnorePatterns: [],
|
||||
|
||||
// Whether to use watchman for file crawling
|
||||
// watchman: true,
|
||||
};
|
||||
4
extensions/ql-vscode/media/dark/github.svg
Normal file
4
extensions/ql-vscode/media/dark/github.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<!-- From https://github.com/microsoft/vscode-icons -->
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97553 0C3.57186 0 0 3.57186 0 7.97553C0 11.4985 2.29969 14.4832 5.43119 15.5596C5.82263 15.6086 5.96942 15.3639 5.96942 15.1682C5.96942 14.9725 5.96942 14.4832 5.96942 13.7982C3.76758 14.2875 3.27829 12.7217 3.27829 12.7217C2.93578 11.792 2.39755 11.5474 2.39755 11.5474C1.66361 11.0581 2.44648 11.0581 2.44648 11.0581C3.22936 11.107 3.66972 11.8899 3.66972 11.8899C4.40367 13.1131 5.52905 12.7706 5.96942 12.5749C6.01835 12.0367 6.263 11.6942 6.45872 11.4985C4.69725 11.3028 2.83792 10.6177 2.83792 7.53517C2.83792 6.65443 3.1315 5.96942 3.66972 5.38226C3.62079 5.23547 3.32722 4.40367 3.76758 3.32722C3.76758 3.32722 4.4526 3.1315 5.96942 4.15902C6.6055 3.9633 7.29052 3.91437 7.97553 3.91437C8.66055 3.91437 9.34557 4.01223 9.98165 4.15902C11.4985 3.1315 12.1835 3.32722 12.1835 3.32722C12.6239 4.40367 12.3303 5.23547 12.2813 5.43119C12.7706 5.96942 13.1131 6.70336 13.1131 7.5841C13.1131 10.6667 11.2538 11.3028 9.49235 11.4985C9.78593 11.7431 10.0306 12.2324 10.0306 12.9664C10.0306 14.0428 10.0306 14.8746 10.0306 15.1682C10.0306 15.3639 10.1774 15.6086 10.5688 15.5596C13.7492 14.4832 16 11.4985 16 7.97553C15.9511 3.57186 12.3792 0 7.97553 0Z" fill="#C5C5C5"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
7
extensions/ql-vscode/media/drive.svg
Normal file
7
extensions/ql-vscode/media/drive.svg
Normal file
@@ -0,0 +1,7 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M15.5 12.1952C15.5 12.9126 14.9137 13.4996 14.1957 13.4996H1.80435C1.08696 13.4996 0.5 12.9126 0.5 12.1952L0.5 9.80435C0.5 9.08696 1.08696 8.5 1.80435 8.5H14.1956C14.9137 8.5 15.5 9.08696 15.5 9.80435L15.5 12.1952Z" stroke="#959DA5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M2.45654 11.5H13.5435" stroke="#959DA5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M13.5 9.5C13.224 9.5 13 9.725 13 10C13 10.275 13.224 10.5 13.5 10.5C13.776 10.5 14 10.275 14 10C14 9.725 13.776 9.5 13.5 9.5" fill="#959DA5"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M11.5 9.5C11.224 9.5 11 9.725 11 10C11 10.275 11.224 10.5 11.5 10.5C11.776 10.5 12 10.275 12 10C12 9.725 11.776 9.5 11.5 9.5" fill="#959DA5"/>
|
||||
<path d="M15.5 9.81464L13.8728 2.76261C13.6922 2.06804 12.9572 1.5 12.2391 1.5H3.76087C3.04348 1.5 2.30848 2.06804 2.12783 2.76261L0.5 9.8" stroke="#959DA5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
16
extensions/ql-vscode/media/globe.svg
Normal file
16
extensions/ql-vscode/media/globe.svg
Normal file
@@ -0,0 +1,16 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle cx="7.5" cy="7.5" r="7" stroke="#959DA5"/>
|
||||
<mask id="mask0_394_2982" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="0" y="0" width="15" height="15">
|
||||
<circle cx="7.5" cy="7.5" r="7.5" fill="#C4C4C4"/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_394_2982)">
|
||||
<path d="M14.5 7.5C14.5 9.42971 13.6822 11.1907 12.5493 12.4721C11.4035 13.7683 10.0054 14.5 8.90625 14.5C7.84644 14.5 6.81131 13.8113 6.01569 12.5383C5.22447 11.2724 4.71875 9.49235 4.71875 7.5C4.71875 5.50765 5.22447 3.72765 6.01569 2.4617C6.81131 1.1887 7.84644 0.5 8.90625 0.5C10.0054 0.5 11.4035 1.23172 12.5493 2.52786C13.6822 3.80934 14.5 5.57029 14.5 7.5Z" stroke="#959DA5"/>
|
||||
</g>
|
||||
<mask id="mask1_394_2982" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="1" y="0" width="16" height="15">
|
||||
<circle cx="9.375" cy="7.5" r="7.5" fill="#C4C4C4"/>
|
||||
</mask>
|
||||
<g mask="url(#mask1_394_2982)">
|
||||
<path d="M10.2812 7.5C10.2812 9.49235 9.77553 11.2724 8.98431 12.5383C8.18869 13.8113 7.15356 14.5 6.09375 14.5C4.99456 14.5 3.5965 13.7683 2.45067 12.4721C1.31781 11.1907 0.5 9.42971 0.5 7.5C0.5 5.57029 1.31781 3.80934 2.45067 2.52786C3.5965 1.23172 4.99456 0.5 6.09375 0.5C7.15356 0.5 8.18869 1.1887 8.98431 2.4617C9.77553 3.72765 10.2812 5.50765 10.2812 7.5Z" stroke="#959DA5"/>
|
||||
</g>
|
||||
<line y1="7.5" x2="15" y2="7.5" stroke="#959DA5"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.4 KiB |
11
extensions/ql-vscode/media/light/github.svg
Normal file
11
extensions/ql-vscode/media/light/github.svg
Normal file
@@ -0,0 +1,11 @@
|
||||
<!-- From https://github.com/microsoft/vscode-icons -->
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0)">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97578 0C3.57211 0 0.000244141 3.57186 0.000244141 7.97553C0.000244141 11.4985 2.29994 14.4832 5.43144 15.5596C5.82287 15.6086 5.96966 15.3639 5.96966 15.1682C5.96966 14.9725 5.96966 14.4832 5.96966 13.7982C3.76783 14.2875 3.27853 12.7217 3.27853 12.7217C2.93602 11.792 2.3978 11.5474 2.3978 11.5474C1.66385 11.0581 2.44673 11.0581 2.44673 11.0581C3.2296 11.107 3.66997 11.8899 3.66997 11.8899C4.40391 13.1131 5.5293 12.7706 5.96966 12.5749C6.01859 12.0367 6.26324 11.6942 6.45896 11.4985C4.69749 11.3028 2.83816 10.6177 2.83816 7.53517C2.83816 6.65443 3.13174 5.96942 3.66997 5.38226C3.62104 5.23547 3.32746 4.40367 3.76783 3.32722C3.76783 3.32722 4.45284 3.1315 5.96966 4.15902C6.60575 3.9633 7.29076 3.91437 7.97578 3.91437C8.66079 3.91437 9.34581 4.01223 9.98189 4.15902C11.4987 3.1315 12.1837 3.32722 12.1837 3.32722C12.6241 4.40367 12.3305 5.23547 12.2816 5.43119C12.7709 5.96942 13.1134 6.70336 13.1134 7.5841C13.1134 10.6667 11.2541 11.3028 9.4926 11.4985C9.78618 11.7431 10.0308 12.2324 10.0308 12.9664C10.0308 14.0428 10.0308 14.8746 10.0308 15.1682C10.0308 15.3639 10.1776 15.6086 10.5691 15.5596C13.7495 14.4832 16.0002 11.4985 16.0002 7.97553C15.9513 3.57186 12.3794 0 7.97578 0Z" fill="#424242"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0">
|
||||
<rect width="16" height="16" fill="white" transform="translate(0.000244141)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
53505
extensions/ql-vscode/package-lock.json
generated
53505
extensions/ql-vscode/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@
|
||||
"description": "CodeQL for Visual Studio Code",
|
||||
"author": "GitHub",
|
||||
"private": true,
|
||||
"version": "1.5.10",
|
||||
"version": "1.7.2",
|
||||
"publisher": "GitHub",
|
||||
"license": "MIT",
|
||||
"icon": "media/VS-marketplace-CodeQL-icon.png",
|
||||
@@ -13,14 +13,15 @@
|
||||
"url": "https://github.com/github/vscode-codeql"
|
||||
},
|
||||
"engines": {
|
||||
"vscode": "^1.57.0"
|
||||
"vscode": "^1.59.0",
|
||||
"node": "^16.13.0",
|
||||
"npm": ">=7.20.6"
|
||||
},
|
||||
"categories": [
|
||||
"Programming Languages"
|
||||
],
|
||||
"extensionDependencies": [
|
||||
"hbenl.vscode-test-explorer",
|
||||
"ms-vscode.test-adapter-converter"
|
||||
"hbenl.vscode-test-explorer"
|
||||
],
|
||||
"capabilities": {
|
||||
"untrustedWorkspaces": {
|
||||
@@ -34,29 +35,35 @@
|
||||
},
|
||||
"activationEvents": [
|
||||
"onLanguage:ql",
|
||||
"onLanguage:ql-summary",
|
||||
"onView:codeQLDatabases",
|
||||
"onView:codeQLQueryHistory",
|
||||
"onView:codeQLAstViewer",
|
||||
"onView:codeQLEvalLogViewer",
|
||||
"onView:test-explorer",
|
||||
"onCommand:codeQL.checkForUpdatesToCLI",
|
||||
"onCommand:codeQL.authenticateToGitHub",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseFolder",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseArchive",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseInternet",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseGithub",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseLgtm",
|
||||
"onCommand:codeQL.setCurrentDatabase",
|
||||
"onCommand:codeQL.viewAst",
|
||||
"onCommand:codeQL.viewCfg",
|
||||
"onCommand:codeQL.openReferencedFile",
|
||||
"onCommand:codeQL.previewQueryHelp",
|
||||
"onCommand:codeQL.chooseDatabaseFolder",
|
||||
"onCommand:codeQL.chooseDatabaseArchive",
|
||||
"onCommand:codeQL.chooseDatabaseInternet",
|
||||
"onCommand:codeQL.chooseDatabaseGithub",
|
||||
"onCommand:codeQL.chooseDatabaseLgtm",
|
||||
"onCommand:codeQLDatabases.chooseDatabase",
|
||||
"onCommand:codeQLDatabases.setCurrentDatabase",
|
||||
"onCommand:codeQL.quickQuery",
|
||||
"onCommand:codeQL.restartQueryServer",
|
||||
"onWebviewPanel:resultsView",
|
||||
"onWebviewPanel:codeQL.variantAnalysis",
|
||||
"onFileSystem:codeql-zip-archive"
|
||||
],
|
||||
"main": "./out/extension",
|
||||
@@ -106,6 +113,12 @@
|
||||
"extensions": [
|
||||
".qhelp"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "ql-summary",
|
||||
"filenames": [
|
||||
"evaluator-log.summary"
|
||||
]
|
||||
}
|
||||
],
|
||||
"grammars": [
|
||||
@@ -131,7 +144,7 @@
|
||||
"title": "CodeQL",
|
||||
"properties": {
|
||||
"codeQL.cli.executablePath": {
|
||||
"scope": "window",
|
||||
"scope": "machine-overridable",
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"markdownDescription": "Path to the CodeQL executable that should be used by the CodeQL extension. The executable is named `codeql` on Linux/Mac and `codeql.exe` on Windows. If empty, the extension will look for a CodeQL executable on your shell PATH, or if CodeQL is not on your PATH, download and manage its own CodeQL executable."
|
||||
@@ -205,7 +218,8 @@
|
||||
null
|
||||
],
|
||||
"default": null,
|
||||
"description": "Path to a directory where the CodeQL extension should store query server logs. If empty, the extension stores logs in a temporary workspace folder and deletes the contents after each run."
|
||||
"description": "Path to a directory where the CodeQL extension should store query server logs. If empty, the extension stores logs in a temporary workspace folder and deletes the contents after each run.",
|
||||
"markdownDeprecationMessage": "This property is deprecated and no longer has any effect. All query logs are stored in the query history folder next to the query results."
|
||||
},
|
||||
"codeQL.runningQueries.quickEvalCodelens": {
|
||||
"type": "boolean",
|
||||
@@ -219,9 +233,15 @@
|
||||
},
|
||||
"codeQL.queryHistory.format": {
|
||||
"type": "string",
|
||||
"default": "%q on %d - %s, %r result count [%t]",
|
||||
"default": "%q on %d - %s %r [%t]",
|
||||
"markdownDescription": "Default string for how to label query history items.\n* %t is the time of the query\n* %q is the human-readable query name\n* %f is the query file name\n* %d is the database name\n* %r is the number of results\n* %s is a status string"
|
||||
},
|
||||
"codeQL.queryHistory.ttl": {
|
||||
"type": "number",
|
||||
"default": 30,
|
||||
"description": "Number of days to retain queries in the query history before being automatically deleted.",
|
||||
"scope": "machine"
|
||||
},
|
||||
"codeQL.runningTests.additionalTestArguments": {
|
||||
"scope": "window",
|
||||
"type": "array",
|
||||
@@ -248,7 +268,7 @@
|
||||
"scope": "application",
|
||||
"description": "Specifies whether or not to write telemetry events to the extension log."
|
||||
},
|
||||
"codeQL.remoteQueries.repositoryLists": {
|
||||
"codeQL.variantAnalysis.repositoryLists": {
|
||||
"type": [
|
||||
"object",
|
||||
null
|
||||
@@ -262,14 +282,14 @@
|
||||
}
|
||||
},
|
||||
"default": null,
|
||||
"markdownDescription": "[For internal use only] Lists of GitHub repositories that you want to query remotely. This should be a JSON object where each key is a user-specified name for this repository list, and the value is an array of GitHub repositories (of the form `<owner>/<repo>`)."
|
||||
"markdownDescription": "[For internal use only] Lists of GitHub repositories that you want to run variant analysis against. This should be a JSON object where each key is a user-specified name for this repository list, and the value is an array of GitHub repositories (of the form `<owner>/<repo>`)."
|
||||
},
|
||||
"codeQL.remoteQueries.controllerRepo": {
|
||||
"codeQL.variantAnalysis.controllerRepo": {
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"pattern": "^$|^(?:[a-zA-Z0-9]+-)*[a-zA-Z0-9]+/[a-zA-Z0-9-_]+$",
|
||||
"patternErrorMessage": "Please enter a valid GitHub repository",
|
||||
"markdownDescription": "[For internal use only] The name of the GitHub repository where you can view the progress and results of the \"Run Remote query\" command. The repository should be of the form `<owner>/<repo>`)."
|
||||
"markdownDescription": "[For internal use only] The name of the GitHub repository in which the GitHub Actions workflow is run when using the \"Run Variant Analysis\" command. The repository should be of the form `<owner>/<repo>`)."
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -287,12 +307,16 @@
|
||||
"title": "CodeQL: Run Query on Multiple Databases"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runRemoteQuery",
|
||||
"title": "CodeQL: Run Remote Query"
|
||||
"command": "codeQL.runVariantAnalysis",
|
||||
"title": "CodeQL: Run Variant Analysis"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.showFakeRemoteQueryResults",
|
||||
"title": "CodeQL: [Internal] Show fake remote query results"
|
||||
"command": "codeQL.exportVariantAnalysisResults",
|
||||
"title": "CodeQL: Export Variant Analysis Results"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.openVariantAnalysis",
|
||||
"title": "CodeQL: Open Variant Analysis"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQueries",
|
||||
@@ -350,6 +374,14 @@
|
||||
"dark": "media/dark/cloud-download.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"title": "Download Database from GitHub",
|
||||
"icon": {
|
||||
"light": "media/light/github.svg",
|
||||
"dark": "media/dark/github.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"title": "Download from LGTM",
|
||||
@@ -366,6 +398,10 @@
|
||||
"command": "codeQL.viewAst",
|
||||
"title": "CodeQL: View AST"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"title": "CodeQL: View CFG"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.upgradeCurrentDatabase",
|
||||
"title": "CodeQL: Upgrade Current Database"
|
||||
@@ -418,6 +454,10 @@
|
||||
"command": "codeQL.chooseDatabaseInternet",
|
||||
"title": "CodeQL: Download Database"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseGithub",
|
||||
"title": "CodeQL: Download Database from GitHub"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseLgtm",
|
||||
"title": "CodeQL: Download Database from LGTM"
|
||||
@@ -444,7 +484,7 @@
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQuery",
|
||||
"title": "Open the query that produced these results",
|
||||
"title": "Open the Query that Produced these Results",
|
||||
"icon": {
|
||||
"light": "media/light/edit.svg",
|
||||
"dark": "media/dark/edit.svg"
|
||||
@@ -494,10 +534,34 @@
|
||||
"command": "codeQLQueryHistory.showQueryLog",
|
||||
"title": "Show Query Log"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQueryDirectory",
|
||||
"title": "Open Query Directory"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"title": "Show Evaluator Log (Raw JSON)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"title": "Show Evaluator Log (Summary Text)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogViewer",
|
||||
"title": "Show Evaluator Log (UI)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.cancel",
|
||||
"title": "Cancel"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryText",
|
||||
"title": "Show Query Text"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.exportResults",
|
||||
"title": "Export Results"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvResults",
|
||||
"title": "View Results (CSV)"
|
||||
@@ -522,6 +586,14 @@
|
||||
"command": "codeQLQueryHistory.compareWith",
|
||||
"title": "Compare Results"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openOnGithub",
|
||||
"title": "Open Variant Analysis on GitHub"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.copyRepoList",
|
||||
"title": "Copy Repository List"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryResults.nextPathStep",
|
||||
"title": "CodeQL: Show Next Step on Path"
|
||||
@@ -553,6 +625,19 @@
|
||||
"light": "media/light/clear-all.svg",
|
||||
"dark": "media/dark/clear-all.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLEvalLogViewer.clear",
|
||||
"title": "Clear Viewer",
|
||||
"icon": {
|
||||
"light": "media/light/clear-all.svg",
|
||||
"dark": "media/dark/clear-all.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQL.gotoQL",
|
||||
"title": "CodeQL: Go to QL Code",
|
||||
"enablement": "codeql.hasQLSource"
|
||||
}
|
||||
],
|
||||
"menus": {
|
||||
@@ -583,10 +668,15 @@
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"when": "view == codeQLDatabases",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"when": "config.codeQL.canary && view == codeQLDatabases",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQuery",
|
||||
"when": "view == codeQLQueryHistory",
|
||||
@@ -621,6 +711,11 @@
|
||||
"command": "codeQLAstViewer.clear",
|
||||
"when": "view == codeQLAstViewer",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLEvalLogViewer.clear",
|
||||
"when": "view == codeQLEvalLogViewer",
|
||||
"group": "navigation"
|
||||
}
|
||||
],
|
||||
"view/item/context": [
|
||||
@@ -634,11 +729,6 @@
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLDatabases"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.upgradeDatabase",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLDatabases"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.renameDatabase",
|
||||
"group": "9_qlCommands",
|
||||
@@ -662,7 +752,7 @@
|
||||
{
|
||||
"command": "codeQLQueryHistory.removeHistoryItem",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory"
|
||||
"when": "viewItem == interpretedResultsItem || viewItem == rawResultsItem || viewItem == remoteResultsItem || viewItem == cancelledResultsItem || viewItem == cancelledRemoteResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.setLabel",
|
||||
@@ -672,52 +762,87 @@
|
||||
{
|
||||
"command": "codeQLQueryHistory.compareWith",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory"
|
||||
"when": "viewItem == rawResultsItem || viewItem == interpretedResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryLog",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory"
|
||||
"when": "viewItem == rawResultsItem || viewItem == interpretedResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQueryDirectory",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory && !hasRemoteServer"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"group": "9_qlCommands",
|
||||
"when": "codeql.supportsEvalLog && viewItem == rawResultsItem || codeql.supportsEvalLog && viewItem == interpretedResultsItem || codeql.supportsEvalLog && viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"group": "9_qlCommands",
|
||||
"when": "codeql.supportsEvalLog && viewItem == rawResultsItem || codeql.supportsEvalLog && viewItem == interpretedResultsItem || codeql.supportsEvalLog && viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogViewer",
|
||||
"group": "9_qlCommands",
|
||||
"when": "config.codeQL.canary && codeql.supportsEvalLog && viewItem == rawResultsItem || config.codeQL.canary && codeql.supportsEvalLog && viewItem == interpretedResultsItem || config.codeQL.canary && codeql.supportsEvalLog && viewItem == cancelledResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryText",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.exportResults",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory && viewItem == remoteResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvResults",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory && viewItem != interpretedResultsItem"
|
||||
"when": "viewItem == rawResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvAlerts",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory && viewItem == interpretedResultsItem"
|
||||
"when": "viewItem == interpretedResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewSarifAlerts",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory && viewItem == interpretedResultsItem"
|
||||
"when": "viewItem == interpretedResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewDil",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory"
|
||||
"when": "viewItem == rawResultsItem || viewItem == interpretedResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.previewQueryHelp",
|
||||
"command": "codeQLQueryHistory.cancel",
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory && resourceScheme == .qhelp && isWorkspaceTrusted"
|
||||
"when": "viewItem == inProgressResultsItem || viewItem == inProgressRemoteResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openOnGithub",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == remoteResultsItem || viewItem == inProgressRemoteResultsItem || viewItem == cancelledRemoteResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.copyRepoList",
|
||||
"group": "9_qlCommands",
|
||||
"when": "viewItem == remoteResultsItem"
|
||||
},
|
||||
{
|
||||
"command": "codeQLTests.showOutputDifferences",
|
||||
"group": "qltest@1",
|
||||
"when": "view == test-explorer && viewItem == testWithSource"
|
||||
"when": "viewItem == testWithSource"
|
||||
},
|
||||
{
|
||||
"command": "codeQLTests.acceptOutput",
|
||||
"group": "qltest@2",
|
||||
"when": "view == test-explorer && viewItem == testWithSource"
|
||||
"when": "viewItem == testWithSource"
|
||||
}
|
||||
],
|
||||
"explorer/context": [
|
||||
@@ -729,7 +854,12 @@
|
||||
{
|
||||
"command": "codeQL.viewAst",
|
||||
"group": "9_qlCommands",
|
||||
"when": "resourceScheme == codeql-zip-archive"
|
||||
"when": "resourceScheme == codeql-zip-archive && !explorerResourceIsFolder && !listMultiSelection"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"group": "9_qlCommands",
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runQueries",
|
||||
@@ -761,11 +891,15 @@
|
||||
"when": "resourceLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runRemoteQuery",
|
||||
"command": "codeQL.runVariantAnalysis",
|
||||
"when": "config.codeQL.canary && editorLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.showFakeRemoteQueryResults",
|
||||
"command": "codeQL.openVariantAnalysis",
|
||||
"when": "config.codeQL.canary && config.codeQL.variantAnalysis.liveResults"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.exportVariantAnalysisResults",
|
||||
"when": "config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
@@ -792,6 +926,14 @@
|
||||
"command": "codeQL.viewAst",
|
||||
"when": "resourceScheme == codeql-zip-archive"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseLgtm",
|
||||
"when": "config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.setCurrentDatabase",
|
||||
"when": "false"
|
||||
@@ -836,6 +978,10 @@
|
||||
"command": "codeQLDatabases.chooseDatabaseInternet",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"when": "false"
|
||||
@@ -860,10 +1006,42 @@
|
||||
"command": "codeQLQueryHistory.showQueryLog",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogViewer",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQueryDirectory",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.cancel",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openOnGithub",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.copyRepoList",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryText",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.exportResults",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.viewCsvResults",
|
||||
"when": "false"
|
||||
@@ -908,6 +1086,10 @@
|
||||
"command": "codeQLAstViewer.clear",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLEvalLogViewer.clear",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLTests.acceptOutput",
|
||||
"when": "false"
|
||||
@@ -927,13 +1109,17 @@
|
||||
"when": "editorLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.runRemoteQuery",
|
||||
"command": "codeQL.runVariantAnalysis",
|
||||
"when": "config.codeQL.canary && editorLangId == ql && resourceExtname == .ql"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewAst",
|
||||
"when": "resourceScheme == codeql-zip-archive"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.viewCfg",
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.quickEval",
|
||||
"when": "editorLangId == ql"
|
||||
@@ -945,6 +1131,10 @@
|
||||
{
|
||||
"command": "codeQL.previewQueryHelp",
|
||||
"when": "resourceExtname == .qhelp && isWorkspaceTrusted"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.gotoQL",
|
||||
"when": "editorLangId == ql-summary && config.codeQL.canary"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -970,6 +1160,11 @@
|
||||
{
|
||||
"id": "codeQLAstViewer",
|
||||
"name": "AST Viewer"
|
||||
},
|
||||
{
|
||||
"id": "codeQLEvalLogViewer",
|
||||
"name": "Evaluator Log Viewer",
|
||||
"when": "config.codeQL.canary"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -984,7 +1179,11 @@
|
||||
},
|
||||
{
|
||||
"view": "codeQLDatabases",
|
||||
"contents": "Add a CodeQL database:\n[From a folder](command:codeQLDatabases.chooseDatabaseFolder)\n[From an archive](command:codeQLDatabases.chooseDatabaseArchive)\n[From a URL (as a zip file)](command:codeQLDatabases.chooseDatabaseInternet)\n[From LGTM](command:codeQLDatabases.chooseDatabaseLgtm)"
|
||||
"contents": "Add a CodeQL database:\n[From a folder](command:codeQLDatabases.chooseDatabaseFolder)\n[From an archive](command:codeQLDatabases.chooseDatabaseArchive)\n[From a URL (as a zip file)](command:codeQLDatabases.chooseDatabaseInternet)\n[From GitHub](command:codeQLDatabases.chooseDatabaseGithub)"
|
||||
},
|
||||
{
|
||||
"view": "codeQLEvalLogViewer",
|
||||
"contents": "Run the 'Show Evaluator Log (UI)' command on a CodeQL query run in the Query History view."
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -992,31 +1191,48 @@
|
||||
"build": "gulp",
|
||||
"watch": "npm-run-all -p watch:*",
|
||||
"watch:extension": "tsc --watch",
|
||||
"test": "mocha --exit -r ts-node/register test/pure-tests/**/*.ts",
|
||||
"watch:webpack": "gulp watchView",
|
||||
"test": "npm-run-all -p test:*",
|
||||
"test:unit": "mocha --exit -r ts-node/register -r test/mocha.setup.js test/pure-tests/**/*.ts",
|
||||
"test:view": "jest",
|
||||
"preintegration": "rm -rf ./out/vscode-tests && gulp",
|
||||
"integration": "node ./out/vscode-tests/run-integration-tests.js no-workspace,minimal-workspace",
|
||||
"cli-integration": "npm run preintegration && node ./out/vscode-tests/run-integration-tests.js cli-integration",
|
||||
"update-vscode": "node ./node_modules/vscode/bin/install",
|
||||
"format": "tsfmt -r && eslint src test --ext .ts,.tsx --fix",
|
||||
"lint": "eslint src test --ext .ts,.tsx --max-warnings=0",
|
||||
"format-staged": "lint-staged"
|
||||
"format-staged": "lint-staged",
|
||||
"storybook": "start-storybook -p 6006",
|
||||
"build-storybook": "build-storybook"
|
||||
},
|
||||
"dependencies": {
|
||||
"@octokit/rest": "^18.5.6",
|
||||
"@octokit/plugin-retry": "^3.0.9",
|
||||
"@octokit/rest": "^19.0.4",
|
||||
"@primer/octicons-react": "^17.6.0",
|
||||
"@primer/react": "^35.0.0",
|
||||
"@vscode/codicons": "^0.0.31",
|
||||
"@vscode/webview-ui-toolkit": "^1.0.1",
|
||||
"child-process-promise": "^2.2.1",
|
||||
"classnames": "~2.2.6",
|
||||
"fs-extra": "^9.0.1",
|
||||
"glob-promise": "^3.4.0",
|
||||
"js-yaml": "^3.14.0",
|
||||
"minimist": "~1.2.5",
|
||||
"d3": "^7.6.1",
|
||||
"d3-graphviz": "^2.6.1",
|
||||
"fs-extra": "^10.0.1",
|
||||
"glob-promise": "^4.2.2",
|
||||
"immutable": "^4.0.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"minimist": "~1.2.6",
|
||||
"nanoid": "^3.2.0",
|
||||
"node-fetch": "~2.6.7",
|
||||
"path-browserify": "^1.0.1",
|
||||
"react": "^16.8.6",
|
||||
"react-dom": "^16.8.6",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"semver": "~7.3.2",
|
||||
"source-map": "^0.7.4",
|
||||
"source-map-support": "^0.5.21",
|
||||
"stream": "^0.0.2",
|
||||
"stream-chain": "~2.2.4",
|
||||
"stream-json": "~1.7.3",
|
||||
"styled-components": "^5.3.3",
|
||||
"tmp": "^0.1.0",
|
||||
"tmp-promise": "~3.0.2",
|
||||
"tree-kill": "~1.2.2",
|
||||
@@ -1026,28 +1242,46 @@
|
||||
"vscode-languageclient": "^6.1.3",
|
||||
"vscode-test-adapter-api": "~1.7.0",
|
||||
"vscode-test-adapter-util": "~0.7.0",
|
||||
"zip-a-folder": "~0.0.12"
|
||||
"zip-a-folder": "~1.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.18.13",
|
||||
"@babel/plugin-transform-modules-commonjs": "^7.18.6",
|
||||
"@faker-js/faker": "^7.5.0",
|
||||
"@storybook/addon-actions": "^6.5.10",
|
||||
"@storybook/addon-essentials": "^6.5.10",
|
||||
"@storybook/addon-interactions": "^6.5.10",
|
||||
"@storybook/addon-links": "^6.5.10",
|
||||
"@storybook/builder-webpack5": "^6.5.10",
|
||||
"@storybook/manager-webpack5": "^6.5.10",
|
||||
"@storybook/react": "^6.5.10",
|
||||
"@storybook/testing-library": "^0.0.13",
|
||||
"@testing-library/jest-dom": "^5.16.5",
|
||||
"@testing-library/react": "^12.1.5",
|
||||
"@testing-library/user-event": "^14.4.3",
|
||||
"@types/chai": "^4.1.7",
|
||||
"@types/chai-as-promised": "~7.1.2",
|
||||
"@types/child-process-promise": "^2.2.1",
|
||||
"@types/classnames": "~2.2.9",
|
||||
"@types/d3": "^7.4.0",
|
||||
"@types/d3-graphviz": "^2.6.6",
|
||||
"@types/del": "^4.0.0",
|
||||
"@types/fs-extra": "^9.0.6",
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/google-protobuf": "^3.2.7",
|
||||
"@types/gulp": "^4.0.9",
|
||||
"@types/gulp-replace": "0.0.31",
|
||||
"@types/gulp-replace": "^1.1.0",
|
||||
"@types/gulp-sourcemaps": "0.0.32",
|
||||
"@types/jest": "^29.0.2",
|
||||
"@types/js-yaml": "^3.12.5",
|
||||
"@types/jszip": "~3.1.6",
|
||||
"@types/mocha": "^9.0.0",
|
||||
"@types/node": "^12.14.1",
|
||||
"@types/nanoid": "^3.0.0",
|
||||
"@types/node": "^16.11.25",
|
||||
"@types/node-fetch": "~2.5.2",
|
||||
"@types/proxyquire": "~1.3.28",
|
||||
"@types/react": "^16.8.17",
|
||||
"@types/react-dom": "^16.8.4",
|
||||
"@types/react": "^17.0.2",
|
||||
"@types/react-dom": "^17.0.2",
|
||||
"@types/sarif": "~2.1.2",
|
||||
"@types/semver": "~7.2.0",
|
||||
"@types/sinon": "~7.5.2",
|
||||
@@ -1057,54 +1291,62 @@
|
||||
"@types/through2": "^2.0.36",
|
||||
"@types/tmp": "^0.1.0",
|
||||
"@types/unzipper": "~0.10.1",
|
||||
"@types/vscode": "^1.57.0",
|
||||
"@types/webpack": "^4.32.1",
|
||||
"@types/vscode": "^1.59.0",
|
||||
"@types/webpack": "^5.28.0",
|
||||
"@types/webpack-env": "^1.18.0",
|
||||
"@types/xml2js": "~0.4.4",
|
||||
"@typescript-eslint/eslint-plugin": "^4.26.0",
|
||||
"@typescript-eslint/parser": "^4.26.0",
|
||||
"@vscode/test-electron": "^2.1.5",
|
||||
"ansi-colors": "^4.1.1",
|
||||
"applicationinsights": "^1.8.7",
|
||||
"applicationinsights": "^2.3.5",
|
||||
"babel-loader": "^8.2.5",
|
||||
"chai": "^4.2.0",
|
||||
"chai-as-promised": "~7.1.1",
|
||||
"css-loader": "~3.1.0",
|
||||
"del": "^6.0.0",
|
||||
"eslint": "~6.8.0",
|
||||
"eslint-plugin-jest-dom": "^4.0.2",
|
||||
"eslint-plugin-react": "~7.19.0",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-storybook": "^0.6.4",
|
||||
"file-loader": "^6.2.0",
|
||||
"glob": "^7.1.4",
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-replace": "^1.0.0",
|
||||
"gulp-sourcemaps": "^2.6.5",
|
||||
"gulp-replace": "^1.1.3",
|
||||
"gulp-sourcemaps": "^3.0.0",
|
||||
"gulp-typescript": "^5.0.1",
|
||||
"husky": "~4.2.5",
|
||||
"jsonc-parser": "^2.3.0",
|
||||
"husky": "~4.3.8",
|
||||
"jest": "^29.0.3",
|
||||
"jest-environment-jsdom": "^29.0.3",
|
||||
"lint-staged": "~10.2.2",
|
||||
"mocha": "^9.1.3",
|
||||
"mini-css-extract-plugin": "^2.6.1",
|
||||
"mocha": "^10.0.0",
|
||||
"mocha-sinon": "~2.1.2",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"prettier": "~2.0.5",
|
||||
"proxyquire": "~2.1.3",
|
||||
"sinon": "~9.0.0",
|
||||
"sinon": "~14.0.0",
|
||||
"sinon-chai": "~3.5.0",
|
||||
"style-loader": "~0.23.1",
|
||||
"through2": "^3.0.1",
|
||||
"through2": "^4.0.2",
|
||||
"ts-jest": "^29.0.1",
|
||||
"ts-loader": "^8.1.0",
|
||||
"ts-node": "^8.3.0",
|
||||
"ts-node": "^10.7.0",
|
||||
"ts-protoc-gen": "^0.9.0",
|
||||
"typescript": "^4.3.2",
|
||||
"typescript": "^4.5.5",
|
||||
"typescript-formatter": "^7.2.2",
|
||||
"vsce": "^1.65.0",
|
||||
"vscode-test": "^1.4.0",
|
||||
"webpack": "^5.28.0",
|
||||
"vsce": "^2.7.0",
|
||||
"webpack": "^5.62.2",
|
||||
"webpack-cli": "^4.6.0"
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"pre-commit": "npm run format-staged",
|
||||
"pre-push": "npm run lint"
|
||||
"pre-push": "npm run lint && scripts/forbid-mocha-only"
|
||||
}
|
||||
},
|
||||
"lint-staged": {
|
||||
"./**/*.{json,css,scss,md}": [
|
||||
"./**/*.{json,css,scss}": [
|
||||
"prettier --write"
|
||||
],
|
||||
"./**/*.{ts,tsx}": [
|
||||
@@ -1113,6 +1355,6 @@
|
||||
]
|
||||
},
|
||||
"resolutions": {
|
||||
"glob-parent": "~6.0.0"
|
||||
"glob-parent": "6.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
6
extensions/ql-vscode/scripts/forbid-mocha-only
Executable file
6
extensions/ql-vscode/scripts/forbid-mocha-only
Executable file
@@ -0,0 +1,6 @@
|
||||
if grep -rq --include '*.test.ts' 'it.only\|describe.only' './test' './src'; then
|
||||
echo 'There is a .only() in the tests. Please remove it.'
|
||||
exit 1;
|
||||
else
|
||||
exit 0;
|
||||
fi
|
||||
129
extensions/ql-vscode/src/abstract-webview.ts
Normal file
129
extensions/ql-vscode/src/abstract-webview.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import {
|
||||
WebviewPanel,
|
||||
ExtensionContext,
|
||||
window as Window,
|
||||
ViewColumn,
|
||||
Uri,
|
||||
WebviewPanelOptions,
|
||||
WebviewOptions
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { tmpDir } from './helpers';
|
||||
import { getHtmlForWebview, WebviewMessage, WebviewView } from './interface-utils';
|
||||
|
||||
export type WebviewPanelConfig = {
|
||||
viewId: string;
|
||||
title: string;
|
||||
viewColumn: ViewColumn;
|
||||
view: WebviewView;
|
||||
preserveFocus?: boolean;
|
||||
additionalOptions?: WebviewPanelOptions & WebviewOptions;
|
||||
}
|
||||
|
||||
export abstract class AbstractWebview<ToMessage extends WebviewMessage, FromMessage extends WebviewMessage> extends DisposableObject {
|
||||
protected panel: WebviewPanel | undefined;
|
||||
protected panelLoaded = false;
|
||||
protected panelLoadedCallBacks: (() => void)[] = [];
|
||||
|
||||
constructor(
|
||||
protected readonly ctx: ExtensionContext
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
public async restoreView(panel: WebviewPanel): Promise<void> {
|
||||
this.panel = panel;
|
||||
this.setupPanel(panel);
|
||||
}
|
||||
|
||||
protected get isShowingPanel() {
|
||||
return !!this.panel;
|
||||
}
|
||||
|
||||
protected getPanel(): WebviewPanel {
|
||||
if (this.panel == undefined) {
|
||||
const { ctx } = this;
|
||||
|
||||
const config = this.getPanelConfig();
|
||||
|
||||
this.panel = Window.createWebviewPanel(
|
||||
config.viewId,
|
||||
config.title,
|
||||
{ viewColumn: config.viewColumn, preserveFocus: config.preserveFocus },
|
||||
{
|
||||
enableScripts: true,
|
||||
enableFindWidget: true,
|
||||
retainContextWhenHidden: true,
|
||||
...config.additionalOptions,
|
||||
localResourceRoots: [
|
||||
...(config.additionalOptions?.localResourceRoots ?? []),
|
||||
Uri.file(tmpDir.name),
|
||||
Uri.file(path.join(ctx.extensionPath, 'out'))
|
||||
],
|
||||
}
|
||||
);
|
||||
this.setupPanel(this.panel);
|
||||
}
|
||||
return this.panel;
|
||||
}
|
||||
|
||||
protected setupPanel(panel: WebviewPanel): void {
|
||||
const config = this.getPanelConfig();
|
||||
|
||||
this.push(
|
||||
panel.onDidDispose(
|
||||
() => {
|
||||
this.panel = undefined;
|
||||
this.panelLoaded = false;
|
||||
this.onPanelDispose();
|
||||
},
|
||||
null,
|
||||
this.ctx.subscriptions
|
||||
)
|
||||
);
|
||||
|
||||
panel.webview.html = getHtmlForWebview(
|
||||
this.ctx,
|
||||
panel.webview,
|
||||
config.view,
|
||||
{
|
||||
allowInlineStyles: true,
|
||||
}
|
||||
);
|
||||
this.push(
|
||||
panel.webview.onDidReceiveMessage(
|
||||
async (e) => this.onMessage(e),
|
||||
undefined,
|
||||
this.ctx.subscriptions
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
protected abstract getPanelConfig(): WebviewPanelConfig;
|
||||
|
||||
protected abstract onPanelDispose(): void;
|
||||
|
||||
protected abstract onMessage(msg: FromMessage): Promise<void>;
|
||||
|
||||
protected waitForPanelLoaded(): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
if (this.panelLoaded) {
|
||||
resolve();
|
||||
} else {
|
||||
this.panelLoadedCallBacks.push(resolve);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
protected onWebViewLoaded(): void {
|
||||
this.panelLoaded = true;
|
||||
this.panelLoadedCallBacks.forEach((cb) => cb());
|
||||
this.panelLoadedCallBacks = [];
|
||||
}
|
||||
|
||||
protected postMessage(msg: ToMessage): Thenable<boolean> {
|
||||
return this.getPanel().webview.postMessage(msg);
|
||||
}
|
||||
}
|
||||
15
extensions/ql-vscode/src/additional-typings.d.ts
vendored
Normal file
15
extensions/ql-vscode/src/additional-typings.d.ts
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* The d3 library is designed to work in both the browser and
|
||||
* node. Consequently their typings files refer to both node
|
||||
* types like `Buffer` (which don't exist in the browser), and browser
|
||||
* types like `Blob` (which don't exist in node). Instead of sticking
|
||||
* all of `dom` in `compilerOptions.lib`, it suffices just to put in a
|
||||
* stub definition of the affected types so that compilation
|
||||
* succeeds.
|
||||
*/
|
||||
|
||||
declare type RequestInit = Record<string, unknown>;
|
||||
declare type ElementTagNameMap = any;
|
||||
declare type NodeListOf<T> = Record<string, T>;
|
||||
declare type Node = Record<string, unknown>;
|
||||
declare type XMLDocument = Record<string, unknown>;
|
||||
@@ -167,21 +167,26 @@ type Archive = {
|
||||
dirMap: DirectoryHierarchyMap;
|
||||
};
|
||||
|
||||
async function parse_zip(zipPath: string): Promise<Archive> {
|
||||
if (!await fs.pathExists(zipPath))
|
||||
throw vscode.FileSystemError.FileNotFound(zipPath);
|
||||
const archive: Archive = { unzipped: await unzipper.Open.file(zipPath), dirMap: new Map };
|
||||
archive.unzipped.files.forEach(f => { ensureFile(archive.dirMap, path.resolve('/', f.path)); });
|
||||
return archive;
|
||||
}
|
||||
|
||||
export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
|
||||
private readOnlyError = vscode.FileSystemError.NoPermissions('write operation attempted, but source archive filesystem is readonly');
|
||||
private archives: Map<string, Archive> = new Map;
|
||||
private archives: Map<string, Promise<Archive>> = new Map;
|
||||
|
||||
private async getArchive(zipPath: string): Promise<Archive> {
|
||||
if (!this.archives.has(zipPath)) {
|
||||
if (!await fs.pathExists(zipPath))
|
||||
throw vscode.FileSystemError.FileNotFound(zipPath);
|
||||
const archive: Archive = { unzipped: await unzipper.Open.file(zipPath), dirMap: new Map };
|
||||
archive.unzipped.files.forEach(f => { ensureFile(archive.dirMap, path.resolve('/', f.path)); });
|
||||
this.archives.set(zipPath, archive);
|
||||
this.archives.set(zipPath, parse_zip(zipPath));
|
||||
}
|
||||
return this.archives.get(zipPath)!;
|
||||
return await this.archives.get(zipPath)!;
|
||||
}
|
||||
|
||||
|
||||
root = new Directory('');
|
||||
|
||||
// metadata
|
||||
|
||||
@@ -10,7 +10,8 @@ import {
|
||||
TextEditorSelectionChangeEvent,
|
||||
TextEditorSelectionChangeKind,
|
||||
Location,
|
||||
Range
|
||||
Range,
|
||||
Uri
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
@@ -104,7 +105,7 @@ class AstViewerDataProvider extends DisposableObject implements TreeDataProvider
|
||||
export class AstViewer extends DisposableObject {
|
||||
private treeView: TreeView<AstItem>;
|
||||
private treeDataProvider: AstViewerDataProvider;
|
||||
private currentFile: string | undefined;
|
||||
private currentFileUri: Uri | undefined;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
@@ -125,12 +126,12 @@ export class AstViewer extends DisposableObject {
|
||||
this.push(window.onDidChangeTextEditorSelection(this.updateTreeSelection, this));
|
||||
}
|
||||
|
||||
updateRoots(roots: AstItem[], db: DatabaseItem, fileName: string) {
|
||||
updateRoots(roots: AstItem[], db: DatabaseItem, fileUri: Uri) {
|
||||
this.treeDataProvider.roots = roots;
|
||||
this.treeDataProvider.db = db;
|
||||
this.treeDataProvider.refresh();
|
||||
this.treeView.message = `AST for ${path.basename(fileName)}`;
|
||||
this.currentFile = fileName;
|
||||
this.treeView.message = `AST for ${path.basename(fileUri.fsPath)}`;
|
||||
this.currentFileUri = fileUri;
|
||||
// Handle error on reveal. This could happen if
|
||||
// the tree view is disposed during the reveal.
|
||||
this.treeView.reveal(roots[0], { focus: false })?.then(
|
||||
@@ -174,7 +175,7 @@ export class AstViewer extends DisposableObject {
|
||||
|
||||
if (
|
||||
this.treeView.visible &&
|
||||
e.textEditor.document.uri.fsPath === this.currentFile &&
|
||||
e.textEditor.document.uri.fsPath === this.currentFileUri?.fsPath &&
|
||||
e.selections.length === 1
|
||||
) {
|
||||
const selection = e.selections[0];
|
||||
@@ -199,6 +200,6 @@ export class AstViewer extends DisposableObject {
|
||||
this.treeDataProvider.db = undefined;
|
||||
this.treeDataProvider.refresh();
|
||||
this.treeView.message = undefined;
|
||||
this.currentFile = undefined;
|
||||
this.currentFileUri = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import * as vscode from 'vscode';
|
||||
import * as Octokit from '@octokit/rest';
|
||||
import { retry } from '@octokit/plugin-retry';
|
||||
|
||||
const GITHUB_AUTH_PROVIDER_ID = 'github';
|
||||
|
||||
// 'repo' scope should be enough for triggering workflows. For a comprehensive list, see:
|
||||
// We need 'repo' scope for triggering workflows and 'gist' scope for exporting results to Gist.
|
||||
// For a comprehensive list of scopes, see:
|
||||
// https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps
|
||||
const SCOPES = ['repo'];
|
||||
const SCOPES = ['repo', 'gist'];
|
||||
|
||||
/**
|
||||
/**
|
||||
* Handles authentication to GitHub, using the VS Code [authentication API](https://code.visualstudio.com/api/references/vscode-api#authentication).
|
||||
*/
|
||||
export class Credentials {
|
||||
@@ -18,6 +20,15 @@ export class Credentials {
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
private constructor() { }
|
||||
|
||||
/**
|
||||
* Initializes an instance of credentials with an octokit instance.
|
||||
*
|
||||
* Do not call this method until you know you actually need an instance of credentials.
|
||||
* since calling this method will require the user to log in.
|
||||
*
|
||||
* @param context The extension context.
|
||||
* @returns An instance of credentials.
|
||||
*/
|
||||
static async initialize(context: vscode.ExtensionContext): Promise<Credentials> {
|
||||
const c = new Credentials();
|
||||
c.registerListeners(context);
|
||||
@@ -25,12 +36,31 @@ export class Credentials {
|
||||
return c;
|
||||
}
|
||||
|
||||
private async createOctokit(createIfNone: boolean): Promise<Octokit.Octokit | undefined> {
|
||||
/**
|
||||
* Initializes an instance of credentials with an octokit instance using
|
||||
* a token from the user's GitHub account. This method is meant to be
|
||||
* used non-interactive environments such as tests.
|
||||
*
|
||||
* @param overrideToken The GitHub token to use for authentication.
|
||||
* @returns An instance of credentials.
|
||||
*/
|
||||
static async initializeWithToken(overrideToken: string) {
|
||||
const c = new Credentials();
|
||||
c.octokit = await c.createOctokit(false, overrideToken);
|
||||
return c;
|
||||
}
|
||||
|
||||
private async createOctokit(createIfNone: boolean, overrideToken?: string): Promise<Octokit.Octokit | undefined> {
|
||||
if (overrideToken) {
|
||||
return new Octokit.Octokit({ auth: overrideToken, retry });
|
||||
}
|
||||
|
||||
const session = await vscode.authentication.getSession(GITHUB_AUTH_PROVIDER_ID, SCOPES, { createIfNone });
|
||||
|
||||
if (session) {
|
||||
return new Octokit.Octokit({
|
||||
auth: session.accessToken
|
||||
auth: session.accessToken,
|
||||
retry
|
||||
});
|
||||
} else {
|
||||
return undefined;
|
||||
@@ -46,16 +76,27 @@ export class Credentials {
|
||||
}));
|
||||
}
|
||||
|
||||
async getOctokit(): Promise<Octokit.Octokit> {
|
||||
/**
|
||||
* Creates or returns an instance of Octokit.
|
||||
*
|
||||
* @param requireAuthentication Whether the Octokit instance needs to be authenticated as user.
|
||||
* @returns An instance of Octokit.
|
||||
*/
|
||||
async getOctokit(requireAuthentication = true): Promise<Octokit.Octokit> {
|
||||
if (this.octokit) {
|
||||
return this.octokit;
|
||||
}
|
||||
|
||||
this.octokit = await this.createOctokit(true);
|
||||
// octokit shouldn't be undefined, since we've set "createIfNone: true".
|
||||
// The following block is mainly here to prevent a compiler error.
|
||||
this.octokit = await this.createOctokit(requireAuthentication);
|
||||
|
||||
if (!this.octokit) {
|
||||
throw new Error('Did not initialize Octokit.');
|
||||
if (requireAuthentication) {
|
||||
throw new Error('Did not initialize Octokit.');
|
||||
}
|
||||
|
||||
// We don't want to set this in this.octokit because that would prevent
|
||||
// authenticating when requireCredentials is true.
|
||||
return new Octokit.Octokit({ retry });
|
||||
}
|
||||
return this.octokit;
|
||||
}
|
||||
|
||||
11
extensions/ql-vscode/src/blob.d.ts
vendored
11
extensions/ql-vscode/src/blob.d.ts
vendored
@@ -1,11 +0,0 @@
|
||||
/**
|
||||
* The npm library jszip is designed to work in both the browser and
|
||||
* node. Consequently its typings @types/jszip refers to both node
|
||||
* types like `Buffer` (which don't exist in the browser), and browser
|
||||
* types like `Blob` (which don't exist in node). Instead of sticking
|
||||
* all of `dom` in `compilerOptions.lib`, it suffices just to put in a
|
||||
* stub definition of the type `Blob` here so that compilation
|
||||
* succeeds.
|
||||
*/
|
||||
|
||||
declare type Blob = string;
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as semver from 'semver';
|
||||
import { runCodeQlCliCommand } from './cli';
|
||||
import { Logger } from './logging';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* Get the version of a CodeQL CLI.
|
||||
@@ -18,7 +19,7 @@ export async function getCodeQlCliVersion(codeQlPath: string, logger: Logger): P
|
||||
} catch (e) {
|
||||
// Failed to run the version command. This might happen if the cli version is _really_ old, or it is corrupted.
|
||||
// Either way, we can't determine compatibility.
|
||||
void logger.log(`Failed to run 'codeql version'. Reason: ${e.message}`);
|
||||
void logger.log(`Failed to run 'codeql version'. Reason: ${getErrorMessage(e)}`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as cpp from 'child-process-promise';
|
||||
import * as child_process from 'child_process';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as sarif from 'sarif';
|
||||
import { SemVer } from 'semver';
|
||||
@@ -7,17 +8,17 @@ import { Readable } from 'stream';
|
||||
import { StringDecoder } from 'string_decoder';
|
||||
import * as tk from 'tree-kill';
|
||||
import { promisify } from 'util';
|
||||
import { CancellationToken, Disposable, Uri } from 'vscode';
|
||||
import { CancellationToken, commands, Disposable, Uri } from 'vscode';
|
||||
|
||||
import { BQRSInfo, DecodedBqrsChunk } from './pure/bqrs-cli-types';
|
||||
import { CliConfig } from './config';
|
||||
import { allowCanaryQueryServer, CliConfig } from './config';
|
||||
import { DistributionProvider, FindDistributionResultKind } from './distribution';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { QueryMetadata, SortDirection } from './pure/interface-types';
|
||||
import { Logger, ProgressReporter } from './logging';
|
||||
import { CompilationMessage } from './pure/messages';
|
||||
import { CompilationMessage } from './pure/legacy-messages';
|
||||
import { sarifParser } from './sarif-parser';
|
||||
import { dbSchemeToLanguage } from './helpers';
|
||||
import { dbSchemeToLanguage, walkDirectory } from './helpers';
|
||||
|
||||
/**
|
||||
* The version of the SARIF format that we are using.
|
||||
@@ -167,7 +168,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
nullBuffer: Buffer;
|
||||
|
||||
/** Version of current cli, lazily computed by the `getVersion()` method */
|
||||
private _version: SemVer | undefined;
|
||||
private _version: Promise<SemVer> | undefined;
|
||||
|
||||
/**
|
||||
* The languages supported by the current version of the CLI, computed by `getSupportedLanguages()`.
|
||||
@@ -239,7 +240,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
/**
|
||||
* Restart the server when the current command terminates
|
||||
*/
|
||||
private restartCliServer(): void {
|
||||
restartCliServer(): void {
|
||||
const callback = (): void => {
|
||||
try {
|
||||
this.killProcessIfRunning();
|
||||
@@ -345,7 +346,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
stderrBuffers.length == 0
|
||||
? new Error(`${description} failed: ${err}`)
|
||||
: new Error(`${description} failed: ${Buffer.concat(stderrBuffers).toString('utf8')}`);
|
||||
newError.stack += (err.stack || '');
|
||||
newError.stack += getErrorStack(err);
|
||||
throw newError;
|
||||
} finally {
|
||||
void this.logger.log(Buffer.concat(stderrBuffers).toString('utf8'));
|
||||
@@ -403,7 +404,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
try {
|
||||
if (cancellationToken !== undefined) {
|
||||
cancellationRegistration = cancellationToken.onCancellationRequested(_e => {
|
||||
tk(child.pid);
|
||||
tk(child.pid || 0);
|
||||
});
|
||||
}
|
||||
if (logger !== undefined) {
|
||||
@@ -447,7 +448,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
try {
|
||||
yield JSON.parse(event) as EventType;
|
||||
} catch (err) {
|
||||
throw new Error(`Parsing output of ${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`Parsing output of ${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -502,7 +503,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
try {
|
||||
return JSON.parse(result) as OutputType;
|
||||
} catch (err) {
|
||||
throw new Error(`Parsing output of ${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`Parsing output of ${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -514,8 +515,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
async resolveLibraryPath(workspaces: string[], queryPath: string): Promise<QuerySetup> {
|
||||
const subcommandArgs = [
|
||||
'--query', queryPath,
|
||||
'--additional-packs',
|
||||
workspaces.join(path.delimiter)
|
||||
...this.getAdditionalPacksArg(workspaces)
|
||||
];
|
||||
return await this.runJsonCodeQlCliCommand<QuerySetup>(['resolve', 'library-path'], subcommandArgs, 'Resolving library paths');
|
||||
}
|
||||
@@ -528,8 +528,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
const subcommandArgs = [
|
||||
'--format', 'bylanguage',
|
||||
queryUri.fsPath,
|
||||
'--additional-packs',
|
||||
workspaces.join(path.delimiter)
|
||||
...this.getAdditionalPacksArg(workspaces)
|
||||
];
|
||||
return JSON.parse(await this.runCodeQlCliCommand(['resolve', 'queries'], subcommandArgs, 'Resolving query by language'));
|
||||
}
|
||||
@@ -562,6 +561,17 @@ export class CodeQLCliServer implements Disposable {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Issues an internal clear-cache command to the cli server. This
|
||||
* command is used to clear the qlpack cache of the server.
|
||||
*
|
||||
* This cache is generally cleared every 1s. This method is used
|
||||
* to force an early clearing of the cache.
|
||||
*/
|
||||
public async clearCache(): Promise<void> {
|
||||
await this.runCodeQlCliCommand(['clear-cache'], [], 'Clearing qlpack cache');
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs QL tests.
|
||||
* @param testPaths Full paths of the tests to run.
|
||||
@@ -573,7 +583,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
): AsyncGenerator<TestCompleted, void, unknown> {
|
||||
|
||||
const subcommandArgs = this.cliConfig.additionalTestArguments.concat([
|
||||
'--additional-packs', workspaces.join(path.delimiter),
|
||||
...this.getAdditionalPacksArg(workspaces),
|
||||
'--threads',
|
||||
this.cliConfig.numberTestThreads.toString(),
|
||||
...testPaths
|
||||
@@ -594,9 +604,17 @@ export class CodeQLCliServer implements Disposable {
|
||||
}
|
||||
|
||||
/** Resolves the ML models that should be available when evaluating a query. */
|
||||
async resolveMlModels(additionalPacks: string[]): Promise<MlModelsInfo> {
|
||||
return await this.runJsonCodeQlCliCommand<MlModelsInfo>(['resolve', 'ml-models'], ['--additional-packs',
|
||||
additionalPacks.join(path.delimiter)], 'Resolving ML models', false);
|
||||
async resolveMlModels(additionalPacks: string[], queryPath: string): Promise<MlModelsInfo> {
|
||||
const args = await this.cliConstraints.supportsPreciseResolveMlModels()
|
||||
// use the dirname of the path so that we can handle query libraries
|
||||
? [...this.getAdditionalPacksArg(additionalPacks), path.dirname(queryPath)]
|
||||
: this.getAdditionalPacksArg(additionalPacks);
|
||||
return await this.runJsonCodeQlCliCommand<MlModelsInfo>(
|
||||
['resolve', 'ml-models'],
|
||||
args,
|
||||
'Resolving ML models',
|
||||
false
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -651,6 +669,44 @@ export class CodeQLCliServer implements Disposable {
|
||||
return await this.runCodeQlCliCommand(['generate', 'query-help'], subcommandArgs, `Generating qhelp in markdown format at ${outputDirectory}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a summary of an evaluation log.
|
||||
* @param endSummaryPath The path to write only the end of query part of the human-readable summary to.
|
||||
* @param inputPath The path of an evaluation event log.
|
||||
* @param outputPath The path to write a human-readable summary of it to.
|
||||
*/
|
||||
async generateLogSummary(
|
||||
inputPath: string,
|
||||
outputPath: string,
|
||||
endSummaryPath: string,
|
||||
): Promise<string> {
|
||||
const subcommandArgs = [
|
||||
'--format=text',
|
||||
`--end-summary=${endSummaryPath}`,
|
||||
...(await this.cliConstraints.supportsSourceMap() ? ['--sourcemap'] : []),
|
||||
inputPath,
|
||||
outputPath
|
||||
];
|
||||
return await this.runCodeQlCliCommand(['generate', 'log-summary'], subcommandArgs, 'Generating log summary');
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a JSON summary of an evaluation log.
|
||||
* @param inputPath The path of an evaluation event log.
|
||||
* @param outputPath The path to write a JSON summary of it to.
|
||||
*/
|
||||
async generateJsonLogSummary(
|
||||
inputPath: string,
|
||||
outputPath: string,
|
||||
): Promise<string> {
|
||||
const subcommandArgs = [
|
||||
'--format=predicates',
|
||||
inputPath,
|
||||
outputPath
|
||||
];
|
||||
return await this.runCodeQlCliCommand(['generate', 'log-summary'], subcommandArgs, 'Generating JSON log summary');
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the results from a bqrs.
|
||||
* @param bqrsPath The path to the bqrs.
|
||||
@@ -674,20 +730,13 @@ export class CodeQLCliServer implements Disposable {
|
||||
return await this.runJsonCodeQlCliCommand<DecodedBqrsChunk>(['bqrs', 'decode'], subcommandArgs, 'Reading bqrs data');
|
||||
}
|
||||
|
||||
async runInterpretCommand(format: string, metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo) {
|
||||
async runInterpretCommand(format: string, additonalArgs: string[], metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo) {
|
||||
const args = [
|
||||
'--output', interpretedResultsPath,
|
||||
'--format', format,
|
||||
// Forward all of the query metadata.
|
||||
...Object.entries(metadata).map(([key, value]) => `-t=${key}=${value}`)
|
||||
];
|
||||
if (format == SARIF_FORMAT) {
|
||||
// TODO: This flag means that we don't group interpreted results
|
||||
// by primary location. We may want to revisit whether we call
|
||||
// interpretation with and without this flag, or do some
|
||||
// grouping client-side.
|
||||
args.push('--no-group-results');
|
||||
}
|
||||
].concat(additonalArgs);
|
||||
if (sourceInfo !== undefined) {
|
||||
args.push(
|
||||
'--source-archive', sourceInfo.sourceArchive,
|
||||
@@ -709,13 +758,47 @@ export class CodeQLCliServer implements Disposable {
|
||||
await this.runCodeQlCliCommand(['bqrs', 'interpret'], args, 'Interpreting query results');
|
||||
}
|
||||
|
||||
async interpretBqrs(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<sarif.Log> {
|
||||
await this.runInterpretCommand(SARIF_FORMAT, metadata, resultsPath, interpretedResultsPath, sourceInfo);
|
||||
async interpretBqrsSarif(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<sarif.Log> {
|
||||
const additionalArgs = [
|
||||
// TODO: This flag means that we don't group interpreted results
|
||||
// by primary location. We may want to revisit whether we call
|
||||
// interpretation with and without this flag, or do some
|
||||
// grouping client-side.
|
||||
'--no-group-results'
|
||||
];
|
||||
|
||||
await this.runInterpretCommand(SARIF_FORMAT, additionalArgs, metadata, resultsPath, interpretedResultsPath, sourceInfo);
|
||||
return await sarifParser(interpretedResultsPath);
|
||||
}
|
||||
|
||||
// Warning: this function is untenable for large dot files,
|
||||
async readDotFiles(dir: string): Promise<string[]> {
|
||||
const dotFiles: Promise<string>[] = [];
|
||||
for await (const file of walkDirectory(dir)) {
|
||||
if (file.endsWith('.dot')) {
|
||||
dotFiles.push(fs.readFile(file, 'utf8'));
|
||||
}
|
||||
}
|
||||
return Promise.all(dotFiles);
|
||||
}
|
||||
|
||||
async interpretBqrsGraph(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<string[]> {
|
||||
const additionalArgs = sourceInfo
|
||||
? ['--dot-location-url-format', 'file://' + sourceInfo.sourceLocationPrefix + '{path}:{start:line}:{start:column}:{end:line}:{end:column}']
|
||||
: [];
|
||||
|
||||
await this.runInterpretCommand('dot', additionalArgs, metadata, resultsPath, interpretedResultsPath, sourceInfo);
|
||||
|
||||
try {
|
||||
const dot = await this.readDotFiles(interpretedResultsPath);
|
||||
return dot;
|
||||
} catch (err) {
|
||||
throw new Error(`Reading output of interpretation failed: ${getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
async generateResultsCsv(metadata: QueryMetadata, resultsPath: string, csvPath: string, sourceInfo?: SourceInfo): Promise<void> {
|
||||
await this.runInterpretCommand(CSV_FORMAT, metadata, resultsPath, csvPath, sourceInfo);
|
||||
await this.runInterpretCommand(CSV_FORMAT, [], metadata, resultsPath, csvPath, sourceInfo);
|
||||
}
|
||||
|
||||
async sortBqrs(resultsPath: string, sortedResultsPath: string, resultSet: string, sortKeys: number[], sortDirections: SortDirection[]): Promise<void> {
|
||||
@@ -761,7 +844,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
* @returns A list of database upgrade script directories
|
||||
*/
|
||||
async resolveUpgrades(dbScheme: string, searchPath: string[], allowDowngradesIfPossible: boolean, targetDbScheme?: string): Promise<UpgradesInfo> {
|
||||
const args = ['--additional-packs', searchPath.join(path.delimiter), '--dbscheme', dbScheme];
|
||||
const args = [...this.getAdditionalPacksArg(searchPath), '--dbscheme', dbScheme];
|
||||
if (targetDbScheme) {
|
||||
args.push('--target-dbscheme', targetDbScheme);
|
||||
if (allowDowngradesIfPossible && await this.cliConstraints.supportsDowngrades()) {
|
||||
@@ -783,7 +866,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
* @returns A dictionary mapping qlpack name to the directory it comes from
|
||||
*/
|
||||
resolveQlpacks(additionalPacks: string[], searchPath?: string[]): Promise<QlpacksInfo> {
|
||||
const args = ['--additional-packs', additionalPacks.join(path.delimiter)];
|
||||
const args = this.getAdditionalPacksArg(additionalPacks);
|
||||
if (searchPath?.length) {
|
||||
args.push('--search-path', path.join(...searchPath));
|
||||
}
|
||||
@@ -829,7 +912,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
* @returns A list of query files found.
|
||||
*/
|
||||
async resolveQueriesInSuite(suite: string, additionalPacks: string[], searchPath?: string[]): Promise<string[]> {
|
||||
const args = ['--additional-packs', additionalPacks.join(path.delimiter)];
|
||||
const args = this.getAdditionalPacksArg(additionalPacks);
|
||||
if (searchPath !== undefined) {
|
||||
args.push('--search-path', path.join(...searchPath));
|
||||
}
|
||||
@@ -853,8 +936,12 @@ export class CodeQLCliServer implements Disposable {
|
||||
return this.runJsonCodeQlCliCommand(['pack', 'download'], packs, 'Downloading packs');
|
||||
}
|
||||
|
||||
async packInstall(dir: string) {
|
||||
return this.runJsonCodeQlCliCommand(['pack', 'install'], [dir], 'Installing pack dependencies');
|
||||
async packInstall(dir: string, forceUpdate = false) {
|
||||
const args = [dir];
|
||||
if (forceUpdate) {
|
||||
args.push('--mode', 'update');
|
||||
}
|
||||
return this.runJsonCodeQlCliCommand(['pack', 'install'], args, 'Installing pack dependencies');
|
||||
}
|
||||
|
||||
async packBundle(dir: string, workspaceFolders: string[], outputPath: string, precompile = true): Promise<void> {
|
||||
@@ -862,8 +949,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
'-o',
|
||||
outputPath,
|
||||
dir,
|
||||
'--additional-packs',
|
||||
workspaceFolders.join(path.delimiter)
|
||||
...this.getAdditionalPacksArg(workspaceFolders)
|
||||
];
|
||||
if (!precompile && await this.cliConstraints.supportsNoPrecompile()) {
|
||||
args.push('--no-precompile');
|
||||
@@ -899,9 +985,13 @@ export class CodeQLCliServer implements Disposable {
|
||||
|
||||
public async getVersion() {
|
||||
if (!this._version) {
|
||||
this._version = await this.refreshVersion();
|
||||
this._version = this.refreshVersion();
|
||||
// this._version is only undefined upon config change, so we reset CLI-based context key only when necessary.
|
||||
await commands.executeCommand(
|
||||
'setContext', 'codeql.supportsEvalLog', await this.cliConstraints.supportsPerQueryEvalLog()
|
||||
);
|
||||
}
|
||||
return this._version;
|
||||
return await this._version;
|
||||
}
|
||||
|
||||
private async refreshVersion() {
|
||||
@@ -918,6 +1008,12 @@ export class CodeQLCliServer implements Disposable {
|
||||
throw new Error('No distribution found');
|
||||
}
|
||||
}
|
||||
|
||||
private getAdditionalPacksArg(paths: string[]): string[] {
|
||||
return paths.length
|
||||
? ['--additional-packs', paths.join(path.delimiter)]
|
||||
: [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1004,7 +1100,7 @@ export async function runCodeQlCliCommand(
|
||||
void logger.log('CLI command succeeded.');
|
||||
return result.stdout;
|
||||
} catch (err) {
|
||||
throw new Error(`${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1060,8 +1156,8 @@ class SplitBuffer {
|
||||
while (this.searchIndex <= (this.buffer.length - this.maxSeparatorLength)) {
|
||||
for (const separator of this.separators) {
|
||||
if (SplitBuffer.startsWith(this.buffer, separator, this.searchIndex)) {
|
||||
const line = this.buffer.substr(0, this.searchIndex);
|
||||
this.buffer = this.buffer.substr(this.searchIndex + separator.length);
|
||||
const line = this.buffer.slice(0, this.searchIndex);
|
||||
this.buffer = this.buffer.slice(this.searchIndex + separator.length);
|
||||
this.searchIndex = 0;
|
||||
return line;
|
||||
}
|
||||
@@ -1152,6 +1248,9 @@ export class CliVersionConstraint {
|
||||
*/
|
||||
public static CLI_VERSION_WITH_LANGUAGE = new SemVer('2.4.1');
|
||||
|
||||
|
||||
public static CLI_VERSION_WITH_NONDESTURCTIVE_UPGRADES = new SemVer('2.4.2');
|
||||
|
||||
/**
|
||||
* CLI version where `codeql resolve upgrades` supports
|
||||
* the `--allow-downgrades` flag
|
||||
@@ -1165,7 +1264,7 @@ export class CliVersionConstraint {
|
||||
|
||||
/**
|
||||
* CLI version where database registration was introduced
|
||||
*/
|
||||
*/
|
||||
public static CLI_VERSION_WITH_DB_REGISTRATION = new SemVer('2.4.1');
|
||||
|
||||
/**
|
||||
@@ -1185,7 +1284,7 @@ export class CliVersionConstraint {
|
||||
public static CLI_VERSION_WITH_NO_PRECOMPILE = new SemVer('2.7.1');
|
||||
|
||||
/**
|
||||
* CLI version where remote queries are supported.
|
||||
* CLI version where remote queries (variant analysis) are supported.
|
||||
*/
|
||||
public static CLI_VERSION_REMOTE_QUERIES = new SemVer('2.6.3');
|
||||
|
||||
@@ -1194,6 +1293,11 @@ export class CliVersionConstraint {
|
||||
*/
|
||||
public static CLI_VERSION_WITH_RESOLVE_ML_MODELS = new SemVer('2.7.3');
|
||||
|
||||
/**
|
||||
* CLI version where the `resolve ml-models` subcommand was enhanced to work with packaging.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_PRECISE_RESOLVE_ML_MODELS = new SemVer('2.10.0');
|
||||
|
||||
/**
|
||||
* CLI version where the `--old-eval-stats` option to the query server was introduced.
|
||||
*/
|
||||
@@ -1204,6 +1308,33 @@ export class CliVersionConstraint {
|
||||
*/
|
||||
public static CLI_VERSION_WITH_PACKAGING = new SemVer('2.6.0');
|
||||
|
||||
/**
|
||||
* CLI version where the `--evaluator-log` and related options to the query server were introduced,
|
||||
* on a per-query server basis.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_STRUCTURED_EVAL_LOG = new SemVer('2.8.2');
|
||||
|
||||
/**
|
||||
* CLI version that supports rotating structured logs to produce one per query.
|
||||
*
|
||||
* Note that 2.8.4 supports generating the evaluation logs and summaries,
|
||||
* but 2.9.0 includes a new option to produce the end-of-query summary logs to
|
||||
* the query server console. For simplicity we gate all features behind 2.9.0,
|
||||
* but if a user is tied to the 2.8 release, we can enable evaluator logs
|
||||
* and summaries for them.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_PER_QUERY_EVAL_LOG = new SemVer('2.9.0');
|
||||
|
||||
/**
|
||||
* CLI version that supports the `--sourcemap` option for log generation.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_SOURCEMAP = new SemVer('2.10.3');
|
||||
|
||||
/**
|
||||
* CLI version that supports the new query server.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_NEW_QUERY_SERVER = new SemVer('2.11.0');
|
||||
|
||||
constructor(private readonly cli: CodeQLCliServer) {
|
||||
/**/
|
||||
}
|
||||
@@ -1220,6 +1351,10 @@ export class CliVersionConstraint {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_LANGUAGE);
|
||||
}
|
||||
|
||||
public async supportsNonDestructiveUpgrades() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_NONDESTURCTIVE_UPGRADES);
|
||||
}
|
||||
|
||||
public async supportsDowngrades() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_DOWNGRADES);
|
||||
}
|
||||
@@ -1252,6 +1387,10 @@ export class CliVersionConstraint {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_RESOLVE_ML_MODELS);
|
||||
}
|
||||
|
||||
async supportsPreciseResolveMlModels() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PRECISE_RESOLVE_ML_MODELS);
|
||||
}
|
||||
|
||||
async supportsOldEvalStats() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_OLD_EVAL_STATS);
|
||||
}
|
||||
@@ -1259,4 +1398,24 @@ export class CliVersionConstraint {
|
||||
async supportsPackaging() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PACKAGING);
|
||||
}
|
||||
|
||||
async supportsStructuredEvalLog() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_STRUCTURED_EVAL_LOG);
|
||||
}
|
||||
|
||||
async supportsPerQueryEvalLog() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PER_QUERY_EVAL_LOG);
|
||||
}
|
||||
|
||||
async supportsSourceMap() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_SOURCEMAP);
|
||||
}
|
||||
|
||||
async supportsNewQueryServer() {
|
||||
// TODO while under development, users _must_ opt-in to the new query server
|
||||
// by setting the `codeql.canaryQueryServer` setting to `true`.
|
||||
// Ignore the version check for now.
|
||||
return allowCanaryQueryServer();
|
||||
// return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_NEW_QUERY_SERVER);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
} from 'vscode';
|
||||
import { showAndLogErrorMessage, showAndLogWarningMessage } from './helpers';
|
||||
import { logger } from './logging';
|
||||
import { getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { telemetryListener } from './telemetry';
|
||||
|
||||
export class UserCancellationException extends Error {
|
||||
@@ -121,8 +122,9 @@ export function commandRunner(
|
||||
try {
|
||||
return await task(...args);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
const errorMessage = `${e.message || e} (${commandId})`;
|
||||
const errorMessage = `${getErrorMessage(e) || e} (${commandId})`;
|
||||
error = e instanceof Error ? e : new Error(errorMessage);
|
||||
const errorStack = getErrorStack(e);
|
||||
if (e instanceof UserCancellationException) {
|
||||
// User has cancelled this action manually
|
||||
if (e.silent) {
|
||||
@@ -132,8 +134,8 @@ export function commandRunner(
|
||||
}
|
||||
} else {
|
||||
// Include the full stack in the error log only.
|
||||
const fullMessage = e.stack
|
||||
? `${errorMessage}\n${e.stack}`
|
||||
const fullMessage = errorStack
|
||||
? `${errorMessage}\n${errorStack}`
|
||||
: errorMessage;
|
||||
void showAndLogErrorMessage(errorMessage, {
|
||||
fullMessage
|
||||
@@ -160,7 +162,8 @@ export function commandRunner(
|
||||
export function commandRunnerWithProgress<R>(
|
||||
commandId: string,
|
||||
task: ProgressTask<R>,
|
||||
progressOptions: Partial<ProgressOptions>
|
||||
progressOptions: Partial<ProgressOptions>,
|
||||
outputLogger = logger
|
||||
): Disposable {
|
||||
return commands.registerCommand(commandId, async (...args: any[]) => {
|
||||
const startTime = Date.now();
|
||||
@@ -172,21 +175,23 @@ export function commandRunnerWithProgress<R>(
|
||||
try {
|
||||
return await withProgress(progressOptionsWithDefaults, task, ...args);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
const errorMessage = `${e.message || e} (${commandId})`;
|
||||
const errorMessage = `${getErrorMessage(e) || e} (${commandId})`;
|
||||
error = e instanceof Error ? e : new Error(errorMessage);
|
||||
const errorStack = getErrorStack(e);
|
||||
if (e instanceof UserCancellationException) {
|
||||
// User has cancelled this action manually
|
||||
if (e.silent) {
|
||||
void logger.log(errorMessage);
|
||||
void outputLogger.log(errorMessage);
|
||||
} else {
|
||||
void showAndLogWarningMessage(errorMessage);
|
||||
void showAndLogWarningMessage(errorMessage, { outputLogger });
|
||||
}
|
||||
} else {
|
||||
// Include the full stack in the error log only.
|
||||
const fullMessage = e.stack
|
||||
? `${errorMessage}\n${e.stack}`
|
||||
const fullMessage = errorStack
|
||||
? `${errorMessage}\n${errorStack}`
|
||||
: errorMessage;
|
||||
void showAndLogErrorMessage(errorMessage, {
|
||||
outputLogger,
|
||||
fullMessage
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import {
|
||||
WebviewPanel,
|
||||
ExtensionContext,
|
||||
window as Window,
|
||||
ViewColumn,
|
||||
Uri,
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
import { tmpDir } from '../run-queries';
|
||||
import { CompletedQuery } from '../query-results';
|
||||
import {
|
||||
FromCompareViewMessage,
|
||||
ToCompareViewMessage,
|
||||
@@ -18,36 +11,38 @@ import {
|
||||
import { Logger } from '../logging';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { DatabaseManager } from '../databases';
|
||||
import { getHtmlForWebview, jumpToLocation } from '../interface-utils';
|
||||
import { jumpToLocation } from '../interface-utils';
|
||||
import { transformBqrsResultSet, RawResultSet, BQRSInfo } from '../pure/bqrs-cli-types';
|
||||
import resultsDiff from './resultsDiff';
|
||||
import { CompletedLocalQueryInfo } from '../query-results';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
import { HistoryItemLabelProvider } from '../history-item-label-provider';
|
||||
import { AbstractWebview, WebviewPanelConfig } from '../abstract-webview';
|
||||
|
||||
interface ComparePair {
|
||||
from: CompletedQuery;
|
||||
to: CompletedQuery;
|
||||
from: CompletedLocalQueryInfo;
|
||||
to: CompletedLocalQueryInfo;
|
||||
}
|
||||
|
||||
export class CompareInterfaceManager extends DisposableObject {
|
||||
export class CompareView extends AbstractWebview<ToCompareViewMessage, FromCompareViewMessage> {
|
||||
private comparePair: ComparePair | undefined;
|
||||
private panel: WebviewPanel | undefined;
|
||||
private panelLoaded = false;
|
||||
private panelLoadedCallBacks: (() => void)[] = [];
|
||||
|
||||
constructor(
|
||||
private ctx: ExtensionContext,
|
||||
ctx: ExtensionContext,
|
||||
private databaseManager: DatabaseManager,
|
||||
private cliServer: CodeQLCliServer,
|
||||
private logger: Logger,
|
||||
private labelProvider: HistoryItemLabelProvider,
|
||||
private showQueryResultsCallback: (
|
||||
item: CompletedQuery
|
||||
item: CompletedLocalQueryInfo
|
||||
) => Promise<void>
|
||||
) {
|
||||
super();
|
||||
super(ctx);
|
||||
}
|
||||
|
||||
async showResults(
|
||||
from: CompletedQuery,
|
||||
to: CompletedQuery,
|
||||
from: CompletedLocalQueryInfo,
|
||||
to: CompletedLocalQueryInfo,
|
||||
selectedResultSetName?: string
|
||||
) {
|
||||
this.comparePair = { from, to };
|
||||
@@ -70,7 +65,7 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
try {
|
||||
rows = this.compareResults(fromResultSet, toResultSet);
|
||||
} catch (e) {
|
||||
message = e.message;
|
||||
message = getErrorMessage(e);
|
||||
}
|
||||
|
||||
await this.postMessage({
|
||||
@@ -80,18 +75,14 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
// since we split the description into several rows
|
||||
// only run interpolation if the label is user-defined
|
||||
// otherwise we will wind up with duplicated rows
|
||||
name: from.options.label
|
||||
? from.interpolate(from.getLabel())
|
||||
: from.queryName,
|
||||
status: from.statusString,
|
||||
time: from.time,
|
||||
name: this.labelProvider.getShortLabel(from),
|
||||
status: from.completedQuery.statusString,
|
||||
time: from.startTime,
|
||||
},
|
||||
toQuery: {
|
||||
name: to.options.label
|
||||
? to.interpolate(to.getLabel())
|
||||
: to.queryName,
|
||||
status: to.statusString,
|
||||
time: to.time,
|
||||
name: this.labelProvider.getShortLabel(to),
|
||||
status: to.completedQuery.statusString,
|
||||
time: to.startTime,
|
||||
},
|
||||
},
|
||||
columns: fromResultSet.schema.columns,
|
||||
@@ -99,77 +90,29 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
currentResultSetName: currentResultSetName,
|
||||
rows,
|
||||
message,
|
||||
datebaseUri: to.database.databaseUri,
|
||||
databaseUri: to.initialInfo.databaseInfo.databaseUri,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
getPanel(): WebviewPanel {
|
||||
if (this.panel == undefined) {
|
||||
const { ctx } = this;
|
||||
const panel = (this.panel = Window.createWebviewPanel(
|
||||
'compareView',
|
||||
'Compare CodeQL Query Results',
|
||||
{ viewColumn: ViewColumn.Active, preserveFocus: true },
|
||||
{
|
||||
enableScripts: true,
|
||||
enableFindWidget: true,
|
||||
retainContextWhenHidden: true,
|
||||
localResourceRoots: [
|
||||
Uri.file(tmpDir.name),
|
||||
Uri.file(path.join(this.ctx.extensionPath, 'out')),
|
||||
],
|
||||
}
|
||||
));
|
||||
this.panel.onDidDispose(
|
||||
() => {
|
||||
this.panel = undefined;
|
||||
this.comparePair = undefined;
|
||||
},
|
||||
null,
|
||||
ctx.subscriptions
|
||||
);
|
||||
|
||||
const scriptPathOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('out/compareView.js')
|
||||
);
|
||||
|
||||
const stylesheetPathOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('out/view/resultsView.css')
|
||||
);
|
||||
|
||||
panel.webview.html = getHtmlForWebview(
|
||||
panel.webview,
|
||||
scriptPathOnDisk,
|
||||
[stylesheetPathOnDisk]
|
||||
);
|
||||
panel.webview.onDidReceiveMessage(
|
||||
async (e) => this.handleMsgFromView(e),
|
||||
undefined,
|
||||
ctx.subscriptions
|
||||
);
|
||||
}
|
||||
return this.panel;
|
||||
protected getPanelConfig(): WebviewPanelConfig {
|
||||
return {
|
||||
viewId: 'compareView',
|
||||
title: 'Compare CodeQL Query Results',
|
||||
viewColumn: ViewColumn.Active,
|
||||
preserveFocus: true,
|
||||
view: 'compare',
|
||||
};
|
||||
}
|
||||
|
||||
private waitForPanelLoaded(): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
if (this.panelLoaded) {
|
||||
resolve();
|
||||
} else {
|
||||
this.panelLoadedCallBacks.push(resolve);
|
||||
}
|
||||
});
|
||||
protected onPanelDispose(): void {
|
||||
this.comparePair = undefined;
|
||||
}
|
||||
|
||||
private async handleMsgFromView(
|
||||
msg: FromCompareViewMessage
|
||||
): Promise<void> {
|
||||
protected async onMessage(msg: FromCompareViewMessage): Promise<void> {
|
||||
switch (msg.t) {
|
||||
case 'compareViewLoaded':
|
||||
this.panelLoaded = true;
|
||||
this.panelLoadedCallBacks.forEach((cb) => cb());
|
||||
this.panelLoadedCallBacks = [];
|
||||
case 'viewLoaded':
|
||||
this.onWebViewLoaded();
|
||||
break;
|
||||
|
||||
case 'changeCompare':
|
||||
@@ -186,20 +129,16 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
}
|
||||
}
|
||||
|
||||
private postMessage(msg: ToCompareViewMessage): Thenable<boolean> {
|
||||
return this.getPanel().webview.postMessage(msg);
|
||||
}
|
||||
|
||||
private async findCommonResultSetNames(
|
||||
from: CompletedQuery,
|
||||
to: CompletedQuery,
|
||||
from: CompletedLocalQueryInfo,
|
||||
to: CompletedLocalQueryInfo,
|
||||
selectedResultSetName: string | undefined
|
||||
): Promise<[string[], string, RawResultSet, RawResultSet]> {
|
||||
const fromSchemas = await this.cliServer.bqrsInfo(
|
||||
from.query.resultsPaths.resultsPath
|
||||
from.completedQuery.query.resultsPaths.resultsPath
|
||||
);
|
||||
const toSchemas = await this.cliServer.bqrsInfo(
|
||||
to.query.resultsPaths.resultsPath
|
||||
to.completedQuery.query.resultsPaths.resultsPath
|
||||
);
|
||||
const fromSchemaNames = fromSchemas['result-sets'].map(
|
||||
(schema) => schema.name
|
||||
@@ -215,12 +154,12 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
const fromResultSet = await this.getResultSet(
|
||||
fromSchemas,
|
||||
currentResultSetName,
|
||||
from.query.resultsPaths.resultsPath
|
||||
from.completedQuery.query.resultsPaths.resultsPath
|
||||
);
|
||||
const toResultSet = await this.getResultSet(
|
||||
toSchemas,
|
||||
currentResultSetName,
|
||||
to.query.resultsPaths.resultsPath
|
||||
to.completedQuery.query.resultsPaths.resultsPath
|
||||
);
|
||||
return [
|
||||
commonResultSetNames,
|
||||
@@ -1,13 +0,0 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
browser: true
|
||||
},
|
||||
extends: [
|
||||
"plugin:react/recommended"
|
||||
],
|
||||
settings: {
|
||||
react: {
|
||||
version: 'detect'
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"target": "es6",
|
||||
"outDir": "out",
|
||||
"lib": [
|
||||
"es6",
|
||||
"dom"
|
||||
],
|
||||
"jsx": "react",
|
||||
"sourceMap": true,
|
||||
"rootDir": "..",
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noImplicitReturns": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"experimentalDecorators": true
|
||||
},
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
}
|
||||
@@ -2,6 +2,9 @@ import { DisposableObject } from './pure/disposable-object';
|
||||
import { workspace, Event, EventEmitter, ConfigurationChangeEvent, ConfigurationTarget } from 'vscode';
|
||||
import { DistributionManager } from './distribution';
|
||||
import { logger } from './logging';
|
||||
import { ONE_DAY_IN_MS } from './pure/time';
|
||||
|
||||
export const ALL_SETTINGS: Setting[] = [];
|
||||
|
||||
/** Helper class to look up a labelled (and possibly nested) setting. */
|
||||
export class Setting {
|
||||
@@ -11,6 +14,7 @@ export class Setting {
|
||||
constructor(name: string, parent?: Setting) {
|
||||
this.name = name;
|
||||
this.parent = parent;
|
||||
ALL_SETTINGS.push(this);
|
||||
}
|
||||
|
||||
get qualifiedName(): string {
|
||||
@@ -35,6 +39,18 @@ export class Setting {
|
||||
return workspace.getConfiguration(this.parent.qualifiedName).update(this.name, value, target);
|
||||
}
|
||||
|
||||
inspect<T>(): InspectionResult<T> | undefined {
|
||||
if (this.parent === undefined) {
|
||||
throw new Error('Cannot update the value of a root setting.');
|
||||
}
|
||||
return workspace.getConfiguration(this.parent.qualifiedName).inspect(this.name);
|
||||
}
|
||||
}
|
||||
|
||||
export interface InspectionResult<T> {
|
||||
globalValue?: T;
|
||||
workspaceValue?: T,
|
||||
workspaceFolderValue?: T,
|
||||
}
|
||||
|
||||
const ROOT_SETTING = new Setting('codeQL');
|
||||
@@ -54,8 +70,11 @@ const DISTRIBUTION_SETTING = new Setting('cli', ROOT_SETTING);
|
||||
export const CUSTOM_CODEQL_PATH_SETTING = new Setting('executablePath', DISTRIBUTION_SETTING);
|
||||
const INCLUDE_PRERELEASE_SETTING = new Setting('includePrerelease', DISTRIBUTION_SETTING);
|
||||
const PERSONAL_ACCESS_TOKEN_SETTING = new Setting('personalAccessToken', DISTRIBUTION_SETTING);
|
||||
|
||||
// Query History configuration
|
||||
const QUERY_HISTORY_SETTING = new Setting('queryHistory', ROOT_SETTING);
|
||||
const QUERY_HISTORY_FORMAT_SETTING = new Setting('format', QUERY_HISTORY_SETTING);
|
||||
const QUERY_HISTORY_TTL = new Setting('ttl', QUERY_HISTORY_SETTING);
|
||||
|
||||
/** When these settings change, the distribution should be updated. */
|
||||
const DISTRIBUTION_CHANGE_SETTINGS = [CUSTOM_CODEQL_PATH_SETTING, INCLUDE_PRERELEASE_SETTING, PERSONAL_ACCESS_TOKEN_SETTING];
|
||||
@@ -71,7 +90,6 @@ export interface DistributionConfig {
|
||||
}
|
||||
|
||||
// Query server configuration
|
||||
|
||||
const RUNNING_QUERIES_SETTING = new Setting('runningQueries', ROOT_SETTING);
|
||||
const NUMBER_OF_THREADS_SETTING = new Setting('numberOfThreads', RUNNING_QUERIES_SETTING);
|
||||
const SAVE_CACHE_SETTING = new Setting('saveCache', RUNNING_QUERIES_SETTING);
|
||||
@@ -91,7 +109,10 @@ export const PAGE_SIZE = new Setting('pageSize', RESULTS_DISPLAY_SETTING);
|
||||
const CUSTOM_LOG_DIRECTORY_SETTING = new Setting('customLogDirectory', RUNNING_QUERIES_SETTING);
|
||||
|
||||
/** When these settings change, the running query server should be restarted. */
|
||||
const QUERY_SERVER_RESTARTING_SETTINGS = [NUMBER_OF_THREADS_SETTING, SAVE_CACHE_SETTING, CACHE_SIZE_SETTING, MEMORY_SETTING, DEBUG_SETTING, CUSTOM_LOG_DIRECTORY_SETTING];
|
||||
const QUERY_SERVER_RESTARTING_SETTINGS = [
|
||||
NUMBER_OF_THREADS_SETTING, SAVE_CACHE_SETTING, CACHE_SIZE_SETTING, MEMORY_SETTING,
|
||||
DEBUG_SETTING, CUSTOM_LOG_DIRECTORY_SETTING,
|
||||
];
|
||||
|
||||
export interface QueryServerConfig {
|
||||
codeQlPath: string;
|
||||
@@ -106,10 +127,11 @@ export interface QueryServerConfig {
|
||||
}
|
||||
|
||||
/** When these settings change, the query history should be refreshed. */
|
||||
const QUERY_HISTORY_SETTINGS = [QUERY_HISTORY_FORMAT_SETTING];
|
||||
const QUERY_HISTORY_SETTINGS = [QUERY_HISTORY_FORMAT_SETTING, QUERY_HISTORY_TTL];
|
||||
|
||||
export interface QueryHistoryConfig {
|
||||
format: string;
|
||||
ttlInMillis: number;
|
||||
onDidChangeConfiguration: Event<void>;
|
||||
}
|
||||
|
||||
@@ -251,6 +273,13 @@ export class QueryHistoryConfigListener extends ConfigListener implements QueryH
|
||||
public get format(): string {
|
||||
return QUERY_HISTORY_FORMAT_SETTING.getValue<string>();
|
||||
}
|
||||
|
||||
/**
|
||||
* The configuration value is in days, but return the value in milliseconds to make it easier to use.
|
||||
*/
|
||||
public get ttlInMillis(): number {
|
||||
return (QUERY_HISTORY_TTL.getValue<number>() || 30) * ONE_DAY_IN_MS;
|
||||
}
|
||||
}
|
||||
|
||||
export class CliConfigListener extends ConfigListener implements CliConfig {
|
||||
@@ -303,16 +332,27 @@ export function isCanary() {
|
||||
return !!CANARY_FEATURES.getValue<boolean>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Enables the experimental query server
|
||||
*/
|
||||
export const CANARY_QUERY_SERVER = new Setting('canaryQueryServer', ROOT_SETTING);
|
||||
|
||||
|
||||
export function allowCanaryQueryServer() {
|
||||
return !!CANARY_QUERY_SERVER.getValue<boolean>();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Avoids caching in the AST viewer if the user is also a canary user.
|
||||
*/
|
||||
export const NO_CACHE_AST_VIEWER = new Setting('disableCache', AST_VIEWER_SETTING);
|
||||
|
||||
// Settings for remote queries
|
||||
const REMOTE_QUERIES_SETTING = new Setting('remoteQueries', ROOT_SETTING);
|
||||
// Settings for variant analysis
|
||||
const REMOTE_QUERIES_SETTING = new Setting('variantAnalysis', ROOT_SETTING);
|
||||
|
||||
/**
|
||||
* Lists of GitHub repositories that you want to query remotely via the "Run Remote query" command.
|
||||
* Lists of GitHub repositories that you want to query remotely via the "Run Variant Analysis" command.
|
||||
* Note: This command is only available for internal users.
|
||||
*
|
||||
* This setting should be a JSON object where each key is a user-specified name (string),
|
||||
@@ -329,7 +369,22 @@ export async function setRemoteRepositoryLists(lists: Record<string, string[]> |
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the "controller" repository that you want to use with the "Run Remote query" command.
|
||||
* Path to a file that contains lists of GitHub repositories that you want to query remotely via
|
||||
* the "Run Variant Analysis" command.
|
||||
* Note: This command is only available for internal users.
|
||||
*
|
||||
* This setting should be a path to a JSON file that contains a JSON object where each key is a
|
||||
* user-specified name (string), and the value is an array of GitHub repositories
|
||||
* (of the form `<owner>/<repo>`).
|
||||
*/
|
||||
const REPO_LISTS_PATH = new Setting('repositoryListsPath', REMOTE_QUERIES_SETTING);
|
||||
|
||||
export function getRemoteRepositoryListsPath(): string | undefined {
|
||||
return REPO_LISTS_PATH.getValue<string>() || undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* The name of the "controller" repository that you want to use with the "Run Variant Analysis" command.
|
||||
* Note: This command is only available for internal users.
|
||||
*
|
||||
* This setting should be a GitHub repository of the form `<owner>/<repo>`.
|
||||
@@ -345,13 +400,26 @@ export async function setRemoteControllerRepo(repo: string | undefined) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether to insecurely load ML models from CodeQL packs.
|
||||
*
|
||||
* This setting is for internal users only.
|
||||
* The branch of "github/codeql-variant-analysis-action" to use with the "Run Variant Analysis" command.
|
||||
* Default value is "main".
|
||||
* Note: This command is only available for internal users.
|
||||
*/
|
||||
const SHOULD_INSECURELY_LOAD_MODELS_FROM_PACKS =
|
||||
new Setting('shouldInsecurelyLoadModelsFromPacks', RUNNING_QUERIES_SETTING);
|
||||
const ACTION_BRANCH = new Setting('actionBranch', REMOTE_QUERIES_SETTING);
|
||||
|
||||
export function shouldInsecurelyLoadMlModelsFromPacks(): boolean {
|
||||
return SHOULD_INSECURELY_LOAD_MODELS_FROM_PACKS.getValue<boolean>();
|
||||
export function getActionBranch(): string {
|
||||
return ACTION_BRANCH.getValue<string>() || 'main';
|
||||
}
|
||||
|
||||
export function isIntegrationTestMode() {
|
||||
return process.env.INTEGRATION_TEST_MODE === 'true';
|
||||
}
|
||||
|
||||
/**
|
||||
* A flag indicating whether to enable the experimental "live results" feature
|
||||
* for multi-repo variant analyses.
|
||||
*/
|
||||
const LIVE_RESULTS = new Setting('liveResults', REMOTE_QUERIES_SETTING);
|
||||
|
||||
export function isVariantAnalysisLiveResultsEnabled(): boolean {
|
||||
return !!LIVE_RESULTS.getValue<boolean>();
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { QueryWithResults } from '../run-queries';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { DecodedBqrsChunk, BqrsId, EntityValue } from '../pure/bqrs-cli-types';
|
||||
import { DatabaseItem } from '../databases';
|
||||
import { ChildAstItem, AstItem } from '../astViewer';
|
||||
import fileRangeFromURI from './fileRangeFromURI';
|
||||
import { Uri } from 'vscode';
|
||||
import { QueryWithResults } from '../run-queries-shared';
|
||||
|
||||
/**
|
||||
* A class that wraps a tree of QL results from a query that
|
||||
@@ -17,7 +18,7 @@ export default class AstBuilder {
|
||||
queryResults: QueryWithResults,
|
||||
private cli: CodeQLCliServer,
|
||||
public db: DatabaseItem,
|
||||
public fileName: string
|
||||
public fileName: Uri
|
||||
) {
|
||||
this.bqrsPath = queryResults.query.resultsPaths.resultsPath;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ export enum KeyType {
|
||||
DefinitionQuery = 'DefinitionQuery',
|
||||
ReferenceQuery = 'ReferenceQuery',
|
||||
PrintAstQuery = 'PrintAstQuery',
|
||||
PrintCfgQuery = 'PrintCfgQuery',
|
||||
}
|
||||
|
||||
export function tagOfKeyType(keyType: KeyType): string {
|
||||
@@ -12,6 +13,8 @@ export function tagOfKeyType(keyType: KeyType): string {
|
||||
return 'ide-contextual-queries/local-references';
|
||||
case KeyType.PrintAstQuery:
|
||||
return 'ide-contextual-queries/print-ast';
|
||||
case KeyType.PrintCfgQuery:
|
||||
return 'ide-contextual-queries/print-cfg';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,6 +26,8 @@ export function nameOfKeyType(keyType: KeyType): string {
|
||||
return 'references';
|
||||
case KeyType.PrintAstQuery:
|
||||
return 'print AST';
|
||||
case KeyType.PrintCfgQuery:
|
||||
return 'print CFG';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +37,7 @@ export function kindOfKeyType(keyType: KeyType): string {
|
||||
case KeyType.ReferenceQuery:
|
||||
return 'definitions';
|
||||
case KeyType.PrintAstQuery:
|
||||
case KeyType.PrintCfgQuery:
|
||||
return 'graph';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,22 +1,20 @@
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
import { decodeSourceArchiveUri, encodeArchiveBasePath } from '../archive-filesystem-provider';
|
||||
import { ColumnKindCode, EntityValue, getResultSetSchema, ResultSetSchema } from '../pure/bqrs-cli-types';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { DatabaseManager, DatabaseItem } from '../databases';
|
||||
import fileRangeFromURI from './fileRangeFromURI';
|
||||
import * as messages from '../pure/messages';
|
||||
import { QueryServerClient } from '../queryserver-client';
|
||||
import { QueryWithResults, compileAndRunQueryAgainstDatabase } from '../run-queries';
|
||||
import { ProgressCallback } from '../commandRunner';
|
||||
import { KeyType } from './keyType';
|
||||
import { qlpackOfDatabase, resolveQueries } from './queryResolver';
|
||||
import { CancellationToken, LocationLink, Uri } from 'vscode';
|
||||
import { createInitialQueryInfo, QueryWithResults } from '../run-queries-shared';
|
||||
import { QueryRunner } from '../queryRunner';
|
||||
|
||||
export const SELECT_QUERY_NAME = '#select';
|
||||
export const TEMPLATE_NAME = 'selectedSourceFile';
|
||||
|
||||
export interface FullLocationLink extends vscode.LocationLink {
|
||||
originUri: vscode.Uri;
|
||||
export interface FullLocationLink extends LocationLink {
|
||||
originUri: Uri;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -29,21 +27,23 @@ export interface FullLocationLink extends vscode.LocationLink {
|
||||
* @param dbm The database manager
|
||||
* @param uriString The selected source file and location
|
||||
* @param keyType The contextual query type to run
|
||||
* @param queryStorageDir The directory to store the query results
|
||||
* @param progress A progress callback
|
||||
* @param token A CancellationToken
|
||||
* @param filter A function that will filter extraneous results
|
||||
*/
|
||||
export async function getLocationsForUriString(
|
||||
cli: CodeQLCliServer,
|
||||
qs: QueryServerClient,
|
||||
qs: QueryRunner,
|
||||
dbm: DatabaseManager,
|
||||
uriString: string,
|
||||
keyType: KeyType,
|
||||
queryStorageDir: string,
|
||||
progress: ProgressCallback,
|
||||
token: vscode.CancellationToken,
|
||||
token: CancellationToken,
|
||||
filter: (src: string, dest: string) => boolean
|
||||
): Promise<FullLocationLink[]> {
|
||||
const uri = decodeSourceArchiveUri(vscode.Uri.parse(uriString, true));
|
||||
const uri = decodeSourceArchiveUri(Uri.parse(uriString, true));
|
||||
const sourceArchiveUri = encodeArchiveBasePath(uri.sourceArchiveZipPath);
|
||||
|
||||
const db = dbm.findDatabaseItemBySourceArchive(sourceArchiveUri);
|
||||
@@ -56,18 +56,16 @@ export async function getLocationsForUriString(
|
||||
|
||||
const links: FullLocationLink[] = [];
|
||||
for (const query of await resolveQueries(cli, qlpack, keyType)) {
|
||||
const results = await compileAndRunQueryAgainstDatabase(
|
||||
cli,
|
||||
qs,
|
||||
db,
|
||||
false,
|
||||
vscode.Uri.file(query),
|
||||
progress,
|
||||
token,
|
||||
templates
|
||||
const initialInfo = await createInitialQueryInfo(
|
||||
Uri.file(query),
|
||||
{
|
||||
name: db.name,
|
||||
databaseUri: db.databaseUri.toString(),
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
if (results.result.resultType == messages.QueryResultType.SUCCESS) {
|
||||
const results = await qs.compileAndRunQueryAgainstDatabase(db, initialInfo, queryStorageDir, progress, token, templates);
|
||||
if (results.successful) {
|
||||
links.push(...await getLinksFromResults(results, cli, db, filter));
|
||||
}
|
||||
}
|
||||
@@ -104,15 +102,9 @@ async function getLinksFromResults(
|
||||
return localLinks;
|
||||
}
|
||||
|
||||
function createTemplates(path: string): messages.TemplateDefinitions {
|
||||
function createTemplates(path: string): Record<string, string> {
|
||||
return {
|
||||
[TEMPLATE_NAME]: {
|
||||
values: {
|
||||
tuples: [[{
|
||||
stringValue: path
|
||||
}]]
|
||||
}
|
||||
}
|
||||
[TEMPLATE_NAME]: path
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ async function resolveQueriesFromPacks(cli: CodeQLCliServer, qlpacks: string[],
|
||||
}
|
||||
});
|
||||
}
|
||||
await fs.writeFile(suiteFile, yaml.safeDump(suiteYaml), 'utf8');
|
||||
await fs.writeFile(suiteFile, yaml.dump(suiteYaml), 'utf8');
|
||||
|
||||
const queries = await cli.resolveQueriesInSuite(suiteFile, helpers.getOnDiskWorkspaceFolders());
|
||||
return queries;
|
||||
|
||||
@@ -16,9 +16,6 @@ import { CodeQLCliServer } from '../cli';
|
||||
import { DatabaseManager } from '../databases';
|
||||
import { CachedOperation } from '../helpers';
|
||||
import { ProgressCallback, withProgress } from '../commandRunner';
|
||||
import * as messages from '../pure/messages';
|
||||
import { QueryServerClient } from '../queryserver-client';
|
||||
import { compileAndRunQueryAgainstDatabase, QueryWithResults } from '../run-queries';
|
||||
import AstBuilder from './astBuilder';
|
||||
import {
|
||||
KeyType,
|
||||
@@ -26,6 +23,8 @@ import {
|
||||
import { FullLocationLink, getLocationsForUriString, TEMPLATE_NAME } from './locationFinder';
|
||||
import { qlpackOfDatabase, resolveQueries } from './queryResolver';
|
||||
import { isCanary, NO_CACHE_AST_VIEWER } from '../config';
|
||||
import { createInitialQueryInfo, QueryWithResults } from '../run-queries-shared';
|
||||
import { QueryRunner } from '../queryRunner';
|
||||
|
||||
/**
|
||||
* Run templated CodeQL queries to find definitions and references in
|
||||
@@ -39,8 +38,9 @@ export class TemplateQueryDefinitionProvider implements DefinitionProvider {
|
||||
|
||||
constructor(
|
||||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private qs: QueryRunner,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
this.cache = new CachedOperation<LocationLink[]>(this.getDefinitions.bind(this));
|
||||
}
|
||||
@@ -68,6 +68,7 @@ export class TemplateQueryDefinitionProvider implements DefinitionProvider {
|
||||
this.dbm,
|
||||
uriString,
|
||||
KeyType.DefinitionQuery,
|
||||
this.queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
(src, _dest) => src === uriString
|
||||
@@ -81,8 +82,9 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
|
||||
|
||||
constructor(
|
||||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private qs: QueryRunner,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
this.cache = new CachedOperation<FullLocationLink[]>(this.getReferences.bind(this));
|
||||
}
|
||||
@@ -115,6 +117,7 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
|
||||
this.dbm,
|
||||
uriString,
|
||||
KeyType.DefinitionQuery,
|
||||
this.queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
(src, _dest) => src === uriString
|
||||
@@ -123,33 +126,39 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
|
||||
}
|
||||
}
|
||||
|
||||
type QueryWithDb = {
|
||||
query: QueryWithResults,
|
||||
dbUri: Uri
|
||||
};
|
||||
|
||||
export class TemplatePrintAstProvider {
|
||||
private cache: CachedOperation<QueryWithResults>;
|
||||
private cache: CachedOperation<QueryWithDb>;
|
||||
|
||||
constructor(
|
||||
private cli: CodeQLCliServer,
|
||||
private qs: QueryServerClient,
|
||||
private qs: QueryRunner,
|
||||
private dbm: DatabaseManager,
|
||||
private queryStorageDir: string,
|
||||
) {
|
||||
this.cache = new CachedOperation<QueryWithResults>(this.getAst.bind(this));
|
||||
this.cache = new CachedOperation<QueryWithDb>(this.getAst.bind(this));
|
||||
}
|
||||
|
||||
async provideAst(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
document?: TextDocument
|
||||
fileUri?: Uri
|
||||
): Promise<AstBuilder | undefined> {
|
||||
if (!document) {
|
||||
if (!fileUri) {
|
||||
throw new Error('Cannot view the AST. Please select a valid source file inside a CodeQL database.');
|
||||
}
|
||||
const queryResults = this.shouldCache()
|
||||
? await this.cache.get(document.uri.toString(), progress, token)
|
||||
: await this.getAst(document.uri.toString(), progress, token);
|
||||
const { query, dbUri } = this.shouldCache()
|
||||
? await this.cache.get(fileUri.toString(), progress, token)
|
||||
: await this.getAst(fileUri.toString(), progress, token);
|
||||
|
||||
return new AstBuilder(
|
||||
queryResults, this.cli,
|
||||
this.dbm.findDatabaseItem(Uri.parse(queryResults.database.databaseUri!, true))!,
|
||||
document.fileName
|
||||
query, this.cli,
|
||||
this.dbm.findDatabaseItem(dbUri)!,
|
||||
fileUri,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -161,7 +170,7 @@ export class TemplatePrintAstProvider {
|
||||
uriString: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<QueryWithResults> {
|
||||
): Promise<QueryWithDb> {
|
||||
const uri = Uri.parse(uriString, true);
|
||||
if (uri.scheme !== zipArchiveScheme) {
|
||||
throw new Error('Cannot view the AST. Please select a valid source file inside a CodeQL database.');
|
||||
@@ -185,25 +194,83 @@ export class TemplatePrintAstProvider {
|
||||
}
|
||||
|
||||
const query = queries[0];
|
||||
const templates: messages.TemplateDefinitions = {
|
||||
[TEMPLATE_NAME]: {
|
||||
values: {
|
||||
tuples: [[{
|
||||
stringValue: zippedArchive.pathWithinSourceArchive
|
||||
}]]
|
||||
}
|
||||
}
|
||||
const templates: Record<string, string> = {
|
||||
[TEMPLATE_NAME]:
|
||||
zippedArchive.pathWithinSourceArchive
|
||||
};
|
||||
|
||||
return await compileAndRunQueryAgainstDatabase(
|
||||
this.cli,
|
||||
this.qs,
|
||||
db,
|
||||
false,
|
||||
const initialInfo = await createInitialQueryInfo(
|
||||
Uri.file(query),
|
||||
progress,
|
||||
token,
|
||||
templates
|
||||
{
|
||||
name: db.name,
|
||||
databaseUri: db.databaseUri.toString(),
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
return {
|
||||
query: await this.qs.compileAndRunQueryAgainstDatabase(
|
||||
db,
|
||||
initialInfo,
|
||||
this.queryStorageDir,
|
||||
progress,
|
||||
token,
|
||||
templates
|
||||
),
|
||||
dbUri: db.databaseUri
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class TemplatePrintCfgProvider {
|
||||
private cache: CachedOperation<[Uri, Record<string, string>] | undefined>;
|
||||
|
||||
constructor(
|
||||
private cli: CodeQLCliServer,
|
||||
private dbm: DatabaseManager,
|
||||
) {
|
||||
this.cache = new CachedOperation<[Uri, Record<string, string>] | undefined>(this.getCfgUri.bind(this));
|
||||
}
|
||||
|
||||
async provideCfgUri(document?: TextDocument): Promise<[Uri, Record<string, string>] | undefined> {
|
||||
if (!document) {
|
||||
return;
|
||||
}
|
||||
return await this.cache.get(document.uri.toString());
|
||||
}
|
||||
|
||||
private async getCfgUri(uriString: string): Promise<[Uri, Record<string, string>]> {
|
||||
const uri = Uri.parse(uriString, true);
|
||||
if (uri.scheme !== zipArchiveScheme) {
|
||||
throw new Error('CFG Viewing is only available for databases with zipped source archives.');
|
||||
}
|
||||
|
||||
const zippedArchive = decodeSourceArchiveUri(uri);
|
||||
const sourceArchiveUri = encodeArchiveBasePath(zippedArchive.sourceArchiveZipPath);
|
||||
const db = this.dbm.findDatabaseItemBySourceArchive(sourceArchiveUri);
|
||||
|
||||
if (!db) {
|
||||
throw new Error('Can\'t infer database from the provided source.');
|
||||
}
|
||||
|
||||
const qlpack = await qlpackOfDatabase(this.cli, db);
|
||||
if (!qlpack) {
|
||||
throw new Error('Can\'t infer qlpack from database source archive.');
|
||||
}
|
||||
const queries = await resolveQueries(this.cli, qlpack, KeyType.PrintCfgQuery);
|
||||
if (queries.length > 1) {
|
||||
throw new Error(`Found multiple Print CFG queries. Can't continue. Make sure there is exacly one query with the tag ${KeyType.PrintCfgQuery}`);
|
||||
}
|
||||
if (queries.length === 0) {
|
||||
throw new Error(`Did not find any Print CFG queries. Can't continue. Make sure there is exacly one query with the tag ${KeyType.PrintCfgQuery}`);
|
||||
}
|
||||
|
||||
const queryUri = Uri.file(queries[0]);
|
||||
|
||||
const templates: Record<string, string> = {
|
||||
[TEMPLATE_NAME]: zippedArchive.pathWithinSourceArchive
|
||||
};
|
||||
|
||||
return [queryUri, templates];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,8 @@ import {
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as Octokit from '@octokit/rest';
|
||||
import { retry } from '@octokit/plugin-retry';
|
||||
|
||||
import { DatabaseManager, DatabaseItem } from './databases';
|
||||
import {
|
||||
@@ -20,7 +22,9 @@ import {
|
||||
ProgressCallback,
|
||||
} from './commandRunner';
|
||||
import { logger } from './logging';
|
||||
import { tmpDir } from './run-queries';
|
||||
import { tmpDir } from './helpers';
|
||||
import { Credentials } from './authentication';
|
||||
import { REPO_REGEX, getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from a remote location. Database is assumed to be an archive file.
|
||||
@@ -46,8 +50,10 @@ export async function promptImportInternetDatabase(
|
||||
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
undefined,
|
||||
progress,
|
||||
token,
|
||||
cli
|
||||
@@ -61,6 +67,78 @@ export async function promptImportInternetDatabase(
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from GitHub.
|
||||
* User enters a GitHub repository and then the user is asked which language
|
||||
* to download (if there is more than one)
|
||||
*
|
||||
* @param databaseManager the DatabaseManager
|
||||
* @param storagePath where to store the unzipped database.
|
||||
*/
|
||||
export async function promptImportGithubDatabase(
|
||||
databaseManager: DatabaseManager,
|
||||
storagePath: string,
|
||||
credentials: Credentials | undefined,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
cli?: CodeQLCliServer
|
||||
): Promise<DatabaseItem | undefined> {
|
||||
progress({
|
||||
message: 'Choose repository',
|
||||
step: 1,
|
||||
maxStep: 2
|
||||
});
|
||||
const githubRepo = await window.showInputBox({
|
||||
title: 'Enter a GitHub repository URL or "name with owner" (e.g. https://github.com/github/codeql or github/codeql)',
|
||||
placeHolder: 'https://github.com/<owner>/<repo> or <owner>/<repo>',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
if (!githubRepo) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!looksLikeGithubRepo(githubRepo)) {
|
||||
throw new Error(`Invalid GitHub repository: ${githubRepo}`);
|
||||
}
|
||||
|
||||
const octokit = credentials ? await credentials.getOctokit(true) : new Octokit.Octokit({ retry });
|
||||
|
||||
const result = await convertGithubNwoToDatabaseUrl(githubRepo, octokit, progress);
|
||||
if (!result) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { databaseUrl, name, owner } = result;
|
||||
|
||||
/**
|
||||
* The 'token' property of the token object returned by `octokit.auth()`.
|
||||
* The object is undocumented, but looks something like this:
|
||||
* {
|
||||
* token: 'xxxx',
|
||||
* tokenType: 'oauth',
|
||||
* type: 'token',
|
||||
* }
|
||||
* We only need the actual token string.
|
||||
*/
|
||||
const octokitToken = (await octokit.auth() as { token: string })?.token;
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{ 'Accept': 'application/zip', 'Authorization': octokitToken ? `Bearer ${octokitToken}` : '' },
|
||||
databaseManager,
|
||||
storagePath,
|
||||
`${owner}/${name}`,
|
||||
progress,
|
||||
token,
|
||||
cli
|
||||
);
|
||||
if (item) {
|
||||
await commands.executeCommand('codeQLDatabases.focus');
|
||||
void showAndLogInformationMessage('Database downloaded and imported successfully.');
|
||||
return item;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from lgtm.
|
||||
* User enters a project url and then the user is asked which language
|
||||
@@ -90,12 +168,14 @@ export async function promptImportLgtmDatabase(
|
||||
}
|
||||
|
||||
if (looksLikeLgtmUrl(lgtmUrl)) {
|
||||
const databaseUrl = await convertToDatabaseUrl(lgtmUrl, progress);
|
||||
const databaseUrl = await convertLgtmUrlToDatabaseUrl(lgtmUrl, progress);
|
||||
if (databaseUrl) {
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
undefined,
|
||||
progress,
|
||||
token,
|
||||
cli
|
||||
@@ -140,8 +220,10 @@ export async function importArchiveDatabase(
|
||||
try {
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
undefined,
|
||||
progress,
|
||||
token,
|
||||
cli
|
||||
@@ -152,7 +234,7 @@ export async function importArchiveDatabase(
|
||||
}
|
||||
return item;
|
||||
} catch (e) {
|
||||
if (e.message.includes('unexpected end of file')) {
|
||||
if (getErrorMessage(e).includes('unexpected end of file')) {
|
||||
throw new Error('Database is corrupt or too large. Try unzipping outside of VS Code and importing the unzipped folder instead.');
|
||||
} else {
|
||||
// delegate
|
||||
@@ -166,15 +248,19 @@ export async function importArchiveDatabase(
|
||||
* or in the local filesystem.
|
||||
*
|
||||
* @param databaseUrl URL from which to grab the database
|
||||
* @param requestHeaders Headers to send with the request
|
||||
* @param databaseManager the DatabaseManager
|
||||
* @param storagePath where to store the unzipped database.
|
||||
* @param nameOverride a name for the database that overrides the default
|
||||
* @param progress callback to send progress messages to
|
||||
* @param token cancellation token
|
||||
*/
|
||||
async function databaseArchiveFetcher(
|
||||
databaseUrl: string,
|
||||
requestHeaders: { [key: string]: string },
|
||||
databaseManager: DatabaseManager,
|
||||
storagePath: string,
|
||||
nameOverride: string | undefined,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
cli?: CodeQLCliServer,
|
||||
@@ -193,7 +279,7 @@ async function databaseArchiveFetcher(
|
||||
if (isFile(databaseUrl)) {
|
||||
await readAndUnzip(databaseUrl, unzipPath, cli, progress);
|
||||
} else {
|
||||
await fetchAndUnzip(databaseUrl, unzipPath, cli, progress);
|
||||
await fetchAndUnzip(databaseUrl, requestHeaders, unzipPath, cli, progress);
|
||||
}
|
||||
|
||||
progress({
|
||||
@@ -216,7 +302,7 @@ async function databaseArchiveFetcher(
|
||||
});
|
||||
await ensureZippedSourceLocation(dbPath);
|
||||
|
||||
const item = await databaseManager.openDatabase(progress, token, Uri.file(dbPath));
|
||||
const item = await databaseManager.openDatabase(progress, token, Uri.file(dbPath), nameOverride);
|
||||
await databaseManager.setCurrentDatabaseItem(item);
|
||||
return item;
|
||||
} else {
|
||||
@@ -292,6 +378,7 @@ async function readAndUnzip(
|
||||
|
||||
async function fetchAndUnzip(
|
||||
databaseUrl: string,
|
||||
requestHeaders: { [key: string]: string },
|
||||
unzipPath: string,
|
||||
cli?: CodeQLCliServer,
|
||||
progress?: ProgressCallback
|
||||
@@ -310,7 +397,10 @@ async function fetchAndUnzip(
|
||||
step: 1,
|
||||
});
|
||||
|
||||
const response = await checkForFailingResponse(await fetch(databaseUrl), 'Error downloading database');
|
||||
const response = await checkForFailingResponse(
|
||||
await fetch(databaseUrl, { headers: requestHeaders }),
|
||||
'Error downloading database'
|
||||
);
|
||||
const archiveFileStream = fs.createWriteStream(archivePath);
|
||||
|
||||
const contentLength = response.headers.get('content-length');
|
||||
@@ -325,7 +415,6 @@ async function fetchAndUnzip(
|
||||
|
||||
await readAndUnzip(Uri.file(archivePath).toString(true), unzipPath, cli, progress);
|
||||
|
||||
|
||||
// remove archivePath eagerly since these archives can be large.
|
||||
await fs.remove(archivePath);
|
||||
}
|
||||
@@ -381,6 +470,88 @@ export async function findDirWithFile(
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* The URL pattern is https://github.com/{owner}/{name}/{subpages}.
|
||||
*
|
||||
* This function accepts any URL that matches the pattern above. It also accepts just the
|
||||
* name with owner (NWO): `<owner>/<repo>`.
|
||||
*
|
||||
* @param githubRepo The GitHub repository URL or NWO
|
||||
*
|
||||
* @return true if this looks like a valid GitHub repository URL or NWO
|
||||
*/
|
||||
export function looksLikeGithubRepo(
|
||||
githubRepo: string | undefined
|
||||
): githubRepo is string {
|
||||
if (!githubRepo) {
|
||||
return false;
|
||||
}
|
||||
if (REPO_REGEX.test(githubRepo) || convertGitHubUrlToNwo(githubRepo)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a GitHub repository URL to the corresponding NWO.
|
||||
* @param githubUrl The GitHub repository URL
|
||||
* @return The corresponding NWO, or undefined if the URL is not valid
|
||||
*/
|
||||
function convertGitHubUrlToNwo(githubUrl: string): string | undefined {
|
||||
try {
|
||||
const uri = Uri.parse(githubUrl, true);
|
||||
if (uri.scheme !== 'https') {
|
||||
return;
|
||||
}
|
||||
if (uri.authority !== 'github.com' && uri.authority !== 'www.github.com') {
|
||||
return;
|
||||
}
|
||||
const paths = uri.path.split('/').filter((segment: string) => segment);
|
||||
const nwo = `${paths[0]}/${paths[1]}`;
|
||||
if (REPO_REGEX.test(nwo)) {
|
||||
return nwo;
|
||||
}
|
||||
return;
|
||||
} catch (e) {
|
||||
// Ignore the error here, since we catch failures at a higher level.
|
||||
// In particular: returning undefined leads to an error in 'promptImportGithubDatabase'.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
export async function convertGithubNwoToDatabaseUrl(
|
||||
githubRepo: string,
|
||||
octokit: Octokit.Octokit,
|
||||
progress: ProgressCallback): Promise<{
|
||||
databaseUrl: string,
|
||||
owner: string,
|
||||
name: string
|
||||
} | undefined> {
|
||||
try {
|
||||
const nwo = convertGitHubUrlToNwo(githubRepo) || githubRepo;
|
||||
const [owner, repo] = nwo.split('/');
|
||||
|
||||
const response = await octokit.request('GET /repos/:owner/:repo/code-scanning/codeql/databases', { owner, repo });
|
||||
|
||||
const languages = response.data.map((db: any) => db.language);
|
||||
|
||||
const language = await promptForLanguage(languages, progress);
|
||||
if (!language) {
|
||||
return;
|
||||
}
|
||||
|
||||
return {
|
||||
databaseUrl: `https://api.github.com/repos/${owner}/${repo}/code-scanning/codeql/databases/${language}`,
|
||||
owner,
|
||||
name: repo
|
||||
};
|
||||
|
||||
} catch (e) {
|
||||
void logger.log(`Error: ${getErrorMessage(e)}`);
|
||||
throw new Error(`Unable to get database for '${githubRepo}'`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The URL pattern is https://lgtm.com/projects/{provider}/{org}/{name}/{irrelevant-subpages}.
|
||||
* There are several possibilities for the provider: in addition to GitHub.com (g),
|
||||
@@ -416,7 +587,7 @@ export function looksLikeLgtmUrl(lgtmUrl: string | undefined): lgtmUrl is string
|
||||
return false;
|
||||
}
|
||||
|
||||
const paths = uri.path.split('/').filter((segment) => segment);
|
||||
const paths = uri.path.split('/').filter((segment: string) => segment);
|
||||
return paths.length >= 4 && paths[0] === 'projects';
|
||||
} catch (e) {
|
||||
return false;
|
||||
@@ -434,7 +605,7 @@ function convertRawLgtmSlug(maybeSlug: string): string | undefined {
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
function extractProjectSlug(lgtmUrl: string): string | undefined {
|
||||
// Only matches the '/g/' provider (github)
|
||||
const re = new RegExp('https://lgtm.com/projects/g/(.*[^/])');
|
||||
@@ -446,7 +617,7 @@ function extractProjectSlug(lgtmUrl: string): string | undefined {
|
||||
}
|
||||
|
||||
// exported for testing
|
||||
export async function convertToDatabaseUrl(
|
||||
export async function convertLgtmUrlToDatabaseUrl(
|
||||
lgtmUrl: string,
|
||||
progress: ProgressCallback) {
|
||||
try {
|
||||
@@ -467,7 +638,9 @@ export async function convertToDatabaseUrl(
|
||||
}
|
||||
}
|
||||
|
||||
const language = await promptForLanguage(projectJson, progress);
|
||||
const languages = projectJson?.languages?.map((lang: { language: string }) => lang.language) || [];
|
||||
|
||||
const language = await promptForLanguage(languages, progress);
|
||||
if (!language) {
|
||||
return;
|
||||
}
|
||||
@@ -479,7 +652,7 @@ export async function convertToDatabaseUrl(
|
||||
language,
|
||||
].join('/')}`;
|
||||
} catch (e) {
|
||||
void logger.log(`Error: ${e.message}`);
|
||||
void logger.log(`Error: ${getErrorMessage(e)}`);
|
||||
throw new Error(`Invalid LGTM URL: ${lgtmUrl}`);
|
||||
}
|
||||
}
|
||||
@@ -487,7 +660,7 @@ export async function convertToDatabaseUrl(
|
||||
async function downloadLgtmProjectMetadata(lgtmUrl: string): Promise<any> {
|
||||
const uri = Uri.parse(lgtmUrl, true);
|
||||
const paths = ['api', 'v1.0'].concat(
|
||||
uri.path.split('/').filter((segment) => segment)
|
||||
uri.path.split('/').filter((segment: string) => segment)
|
||||
).slice(0, 6);
|
||||
const projectUrl = `https://lgtm.com/${paths.join('/')}`;
|
||||
const projectResponse = await fetch(projectUrl);
|
||||
@@ -495,7 +668,7 @@ async function downloadLgtmProjectMetadata(lgtmUrl: string): Promise<any> {
|
||||
}
|
||||
|
||||
async function promptForLanguage(
|
||||
projectJson: any,
|
||||
languages: string[],
|
||||
progress: ProgressCallback
|
||||
): Promise<string | undefined> {
|
||||
progress({
|
||||
@@ -503,17 +676,19 @@ async function promptForLanguage(
|
||||
step: 2,
|
||||
maxStep: 2
|
||||
});
|
||||
if (!projectJson?.languages?.length) {
|
||||
return;
|
||||
if (!languages.length) {
|
||||
throw new Error('No databases found');
|
||||
}
|
||||
if (projectJson.languages.length === 1) {
|
||||
return projectJson.languages[0].language;
|
||||
if (languages.length === 1) {
|
||||
return languages[0];
|
||||
}
|
||||
|
||||
return await window.showQuickPick(
|
||||
projectJson.languages.map((lang: { language: string }) => lang.language), {
|
||||
placeHolder: 'Select the database language to download:'
|
||||
}
|
||||
languages,
|
||||
{
|
||||
placeHolder: 'Select the database language to download:',
|
||||
ignoreFocusOut: true,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -28,16 +28,17 @@ import {
|
||||
showAndLogErrorMessage
|
||||
} from './helpers';
|
||||
import { logger } from './logging';
|
||||
import { clearCacheInDatabase } from './run-queries';
|
||||
import * as qsClient from './queryserver-client';
|
||||
import { upgradeDatabaseExplicit } from './upgrades';
|
||||
import {
|
||||
importArchiveDatabase,
|
||||
promptImportGithubDatabase,
|
||||
promptImportInternetDatabase,
|
||||
promptImportLgtmDatabase,
|
||||
} from './databaseFetcher';
|
||||
import { CancellationToken } from 'vscode';
|
||||
import { asyncFilter } from './pure/helpers-pure';
|
||||
import { asyncFilter, getErrorMessage } from './pure/helpers-pure';
|
||||
import { Credentials } from './authentication';
|
||||
import { QueryRunner } from './queryRunner';
|
||||
import { isCanary } from './config';
|
||||
|
||||
type ThemableIconPath = { light: string; dark: string } | string;
|
||||
|
||||
@@ -217,9 +218,10 @@ export class DatabaseUI extends DisposableObject {
|
||||
|
||||
public constructor(
|
||||
private databaseManager: DatabaseManager,
|
||||
private readonly queryServer: qsClient.QueryServerClient | undefined,
|
||||
private readonly queryServer: QueryRunner | undefined,
|
||||
private readonly storagePath: string,
|
||||
readonly extensionPath: string
|
||||
readonly extensionPath: string,
|
||||
private readonly getCredentials: () => Promise<Credentials>
|
||||
) {
|
||||
super();
|
||||
|
||||
@@ -291,6 +293,20 @@ export class DatabaseUI extends DisposableObject {
|
||||
}
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQLDatabases.chooseDatabaseGithub',
|
||||
async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const credentials = isCanary() ? await this.getCredentials() : undefined;
|
||||
await this.handleChooseDatabaseGithub(credentials, progress, token);
|
||||
},
|
||||
{
|
||||
title: 'Adding database from GitHub',
|
||||
})
|
||||
);
|
||||
this.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQLDatabases.chooseDatabaseLgtm',
|
||||
@@ -372,12 +388,11 @@ export class DatabaseUI extends DisposableObject {
|
||||
handleChooseDatabaseFolder = async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<DatabaseItem | undefined> => {
|
||||
): Promise<void> => {
|
||||
try {
|
||||
return await this.chooseAndSetDatabase(true, progress, token);
|
||||
await this.chooseAndSetDatabase(true, progress, token);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message);
|
||||
return undefined;
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -440,12 +455,11 @@ export class DatabaseUI extends DisposableObject {
|
||||
handleChooseDatabaseArchive = async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<DatabaseItem | undefined> => {
|
||||
): Promise<void> => {
|
||||
try {
|
||||
return await this.chooseAndSetDatabase(false, progress, token);
|
||||
await this.chooseAndSetDatabase(false, progress, token);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message);
|
||||
return undefined;
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
}
|
||||
};
|
||||
|
||||
@@ -462,6 +476,21 @@ export class DatabaseUI extends DisposableObject {
|
||||
);
|
||||
};
|
||||
|
||||
handleChooseDatabaseGithub = async (
|
||||
credentials: Credentials | undefined,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<DatabaseItem | undefined> => {
|
||||
return await promptImportGithubDatabase(
|
||||
this.databaseManager,
|
||||
this.storagePath,
|
||||
credentials,
|
||||
progress,
|
||||
token,
|
||||
this.queryServer?.cliServer
|
||||
);
|
||||
};
|
||||
|
||||
handleChooseDatabaseLgtm = async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
@@ -543,8 +572,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
|
||||
// Search for upgrade scripts in any workspace folders available
|
||||
|
||||
await upgradeDatabaseExplicit(
|
||||
this.queryServer,
|
||||
await this.queryServer.upgradeDatabaseExplicit(
|
||||
databaseItem,
|
||||
progress,
|
||||
token
|
||||
@@ -559,8 +587,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
this.queryServer !== undefined &&
|
||||
this.databaseManager.currentDatabaseItem !== undefined
|
||||
) {
|
||||
await clearCacheInDatabase(
|
||||
this.queryServer,
|
||||
await this.queryServer.clearCacheInDatabase(
|
||||
this.databaseManager.currentDatabaseItem,
|
||||
progress,
|
||||
token
|
||||
@@ -590,8 +617,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
} catch (e) {
|
||||
// rethrow and let this be handled by default error handling.
|
||||
throw new Error(
|
||||
`Could not set database to ${path.basename(uri.fsPath)}. Reason: ${e.message
|
||||
}`
|
||||
`Could not set database to ${path.basename(uri.fsPath)}. Reason: ${getErrorMessage(e)}`
|
||||
);
|
||||
}
|
||||
};
|
||||
@@ -724,7 +750,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
* Perform some heuristics to ensure a proper database location is chosen.
|
||||
*
|
||||
* 1. If the selected URI to add is a file, choose the containing directory
|
||||
* 2. If the selected URI is a directory matching db-*, choose the containing directory
|
||||
* 2. If the selected URI appears to be a db language folder, choose the containing directory
|
||||
* 3. choose the current directory
|
||||
*
|
||||
* @param uri a URI that is a database folder or inside it
|
||||
@@ -737,7 +763,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
dbPath = path.dirname(dbPath);
|
||||
}
|
||||
|
||||
if (isLikelyDbLanguageFolder(dbPath)) {
|
||||
if (await isLikelyDbLanguageFolder(dbPath)) {
|
||||
dbPath = path.dirname(dbPath);
|
||||
}
|
||||
return Uri.file(dbPath);
|
||||
|
||||
@@ -17,8 +17,8 @@ import {
|
||||
import { zipArchiveScheme, encodeArchiveBasePath, decodeSourceArchiveUri, encodeSourceArchiveUri } from './archive-filesystem-provider';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { Logger, logger } from './logging';
|
||||
import { registerDatabases, Dataset, deregisterDatabases } from './pure/messages';
|
||||
import { QueryServerClient } from './queryserver-client';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
import { QueryRunner } from './queryRunner';
|
||||
|
||||
/**
|
||||
* databases.ts
|
||||
@@ -147,7 +147,7 @@ export async function findSourceArchive(
|
||||
}
|
||||
|
||||
async function resolveDatabase(
|
||||
databasePath: string
|
||||
databasePath: string,
|
||||
): Promise<DatabaseContents> {
|
||||
|
||||
const name = path.basename(databasePath);
|
||||
@@ -169,7 +169,9 @@ async function getDbSchemeFiles(dbDirectory: string): Promise<string[]> {
|
||||
return await glob('*.dbscheme', { cwd: dbDirectory });
|
||||
}
|
||||
|
||||
async function resolveDatabaseContents(uri: vscode.Uri): Promise<DatabaseContents> {
|
||||
async function resolveDatabaseContents(
|
||||
uri: vscode.Uri,
|
||||
): Promise<DatabaseContents> {
|
||||
if (uri.scheme !== 'file') {
|
||||
throw new Error(`Database URI scheme '${uri.scheme}' not supported; only 'file' URIs are supported.`);
|
||||
}
|
||||
@@ -356,14 +358,12 @@ export class DatabaseItemImpl implements DatabaseItem {
|
||||
try {
|
||||
this._contents = await resolveDatabaseContents(this.databaseUri);
|
||||
this._error = undefined;
|
||||
}
|
||||
catch (e) {
|
||||
} catch (e) {
|
||||
this._contents = undefined;
|
||||
this._error = e;
|
||||
this._error = e instanceof Error ? e : new Error(String(e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
finally {
|
||||
} finally {
|
||||
this.onChanged({
|
||||
kind: DatabaseEventKind.Refresh,
|
||||
item: this
|
||||
@@ -552,30 +552,28 @@ export class DatabaseManager extends DisposableObject {
|
||||
|
||||
constructor(
|
||||
private readonly ctx: ExtensionContext,
|
||||
private readonly qs: QueryServerClient,
|
||||
private readonly qs: QueryRunner,
|
||||
private readonly cli: cli.CodeQLCliServer,
|
||||
public logger: Logger
|
||||
) {
|
||||
super();
|
||||
|
||||
qs.onDidStartQueryServer(this.reregisterDatabases.bind(this));
|
||||
|
||||
// Let this run async.
|
||||
void this.loadPersistedState();
|
||||
qs.onStart(this.reregisterDatabases.bind(this));
|
||||
}
|
||||
|
||||
public async openDatabase(
|
||||
progress: ProgressCallback,
|
||||
token: vscode.CancellationToken,
|
||||
uri: vscode.Uri,
|
||||
displayName?: string
|
||||
): Promise<DatabaseItem> {
|
||||
const contents = await resolveDatabaseContents(uri);
|
||||
// Ignore the source archive for QLTest databases by default.
|
||||
const isQLTestDatabase = path.extname(uri.fsPath) === '.testproj';
|
||||
const fullOptions: FullDatabaseOptions = {
|
||||
ignoreSourceArchive: isQLTestDatabase,
|
||||
// displayName is only set if a user explicitly renames a database
|
||||
displayName: undefined,
|
||||
// If a displayName is not passed in, the basename of folder containing the database is used.
|
||||
displayName,
|
||||
dateAdded: Date.now(),
|
||||
language: await this.getPrimaryLanguage(uri.fsPath)
|
||||
};
|
||||
@@ -690,7 +688,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
return item;
|
||||
}
|
||||
|
||||
private async loadPersistedState(): Promise<void> {
|
||||
public async loadPersistedState(): Promise<void> {
|
||||
return withProgress({
|
||||
location: vscode.ProgressLocation.Notification
|
||||
},
|
||||
@@ -704,6 +702,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
step
|
||||
});
|
||||
try {
|
||||
void this.logger.log(`Found ${databases.length} persisted databases: ${databases.map(db => db.uri).join(', ')}`);
|
||||
for (const database of databases) {
|
||||
progress({
|
||||
maxStep: databases.length,
|
||||
@@ -718,16 +717,19 @@ export class DatabaseManager extends DisposableObject {
|
||||
if (currentDatabaseUri === database.uri) {
|
||||
await this.setCurrentDatabaseItem(databaseItem, true);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
void this.logger.log(`Loaded database ${databaseItem.name} at URI ${database.uri}.`);
|
||||
} catch (e) {
|
||||
// When loading from persisted state, leave invalid databases in the list. They will be
|
||||
// marked as invalid, and cannot be set as the current database.
|
||||
void this.logger.log(`Error loading database ${database.uri}: ${e}.`);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// database list had an unexpected type - nothing to be done?
|
||||
void showAndLogErrorMessage(`Database list loading failed: ${e.message}`);
|
||||
void showAndLogErrorMessage(`Database list loading failed: ${getErrorMessage(e)}`);
|
||||
}
|
||||
|
||||
void this.logger.log('Finished loading persisted databases.');
|
||||
});
|
||||
}
|
||||
|
||||
@@ -841,7 +843,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
void logger.log('Deleting database from filesystem.');
|
||||
fs.remove(item.databaseUri.fsPath).then(
|
||||
() => void logger.log(`Deleted '${item.databaseUri.fsPath}'`),
|
||||
e => void logger.log(`Failed to delete '${item.databaseUri.fsPath}'. Reason: ${e.message}`));
|
||||
e => void logger.log(`Failed to delete '${item.databaseUri.fsPath}'. Reason: ${getErrorMessage(e)}`));
|
||||
}
|
||||
|
||||
// note that we use undefined as the item in order to reset the entire tree
|
||||
@@ -856,27 +858,14 @@ export class DatabaseManager extends DisposableObject {
|
||||
token: vscode.CancellationToken,
|
||||
dbItem: DatabaseItem,
|
||||
) {
|
||||
if (dbItem.contents && (await this.cli.cliConstraints.supportsDatabaseRegistration())) {
|
||||
const databases: Dataset[] = [{
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
}];
|
||||
await this.qs.sendRequest(deregisterDatabases, { databases }, token, progress);
|
||||
}
|
||||
await this.qs.deregisterDatabase(progress, token, dbItem);
|
||||
}
|
||||
|
||||
private async registerDatabase(
|
||||
progress: ProgressCallback,
|
||||
token: vscode.CancellationToken,
|
||||
dbItem: DatabaseItem,
|
||||
) {
|
||||
if (dbItem.contents && (await this.cli.cliConstraints.supportsDatabaseRegistration())) {
|
||||
const databases: Dataset[] = [{
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
}];
|
||||
await this.qs.sendRequest(registerDatabases, { databases }, token, progress);
|
||||
}
|
||||
await this.qs.registerDatabase(progress, token, dbItem);
|
||||
}
|
||||
|
||||
private updatePersistedCurrentDatabaseItem(): void {
|
||||
|
||||
67
extensions/ql-vscode/src/eval-log-tree-builder.ts
Normal file
67
extensions/ql-vscode/src/eval-log-tree-builder.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { ChildEvalLogTreeItem, EvalLogTreeItem } from './eval-log-viewer';
|
||||
import { EvalLogData as EvalLogData } from './pure/log-summary-parser';
|
||||
|
||||
/** Builds the tree data for the evaluator log viewer for a single query run. */
|
||||
export default class EvalLogTreeBuilder {
|
||||
private queryName: string;
|
||||
private evalLogDataItems: EvalLogData[];
|
||||
|
||||
constructor(queryName: string, evaluatorLogDataItems: EvalLogData[]) {
|
||||
this.queryName = queryName;
|
||||
this.evalLogDataItems = evaluatorLogDataItems;
|
||||
}
|
||||
|
||||
async getRoots(): Promise<EvalLogTreeItem[]> {
|
||||
return await this.parseRoots();
|
||||
}
|
||||
|
||||
private async parseRoots(): Promise<EvalLogTreeItem[]> {
|
||||
const roots: EvalLogTreeItem[] = [];
|
||||
|
||||
// Once the viewer can show logs for multiple queries, there will be more than 1 item at the root
|
||||
// level. For now, there will always be one root (the one query being shown).
|
||||
const queryItem: EvalLogTreeItem = {
|
||||
label: this.queryName,
|
||||
children: [] // Will assign predicate items as children shortly.
|
||||
};
|
||||
|
||||
// Display descriptive message when no data exists
|
||||
if (this.evalLogDataItems.length === 0) {
|
||||
const noResultsItem: ChildEvalLogTreeItem = {
|
||||
label: 'No predicates evaluated in this query run.',
|
||||
parent: queryItem,
|
||||
children: [],
|
||||
};
|
||||
queryItem.children.push(noResultsItem);
|
||||
}
|
||||
|
||||
// For each predicate, create a TreeItem object with appropriate parents/children
|
||||
this.evalLogDataItems.forEach(logDataItem => {
|
||||
const predicateLabel = `${logDataItem.predicateName} (${logDataItem.resultSize} tuples, ${logDataItem.millis} ms)`;
|
||||
const predicateItem: ChildEvalLogTreeItem = {
|
||||
label: predicateLabel,
|
||||
parent: queryItem,
|
||||
children: [] // Will assign pipeline items as children shortly.
|
||||
};
|
||||
for (const [pipelineName, steps] of Object.entries(logDataItem.ra)) {
|
||||
const pipelineLabel = `Pipeline: ${pipelineName}`;
|
||||
const pipelineItem: ChildEvalLogTreeItem = {
|
||||
label: pipelineLabel,
|
||||
parent: predicateItem,
|
||||
children: [] // Will assign step items as children shortly.
|
||||
};
|
||||
predicateItem.children.push(pipelineItem);
|
||||
|
||||
pipelineItem.children = steps.map((step: string) => ({
|
||||
label: step,
|
||||
parent: pipelineItem,
|
||||
children: []
|
||||
}));
|
||||
}
|
||||
queryItem.children.push(predicateItem);
|
||||
});
|
||||
|
||||
roots.push(queryItem);
|
||||
return roots;
|
||||
}
|
||||
}
|
||||
92
extensions/ql-vscode/src/eval-log-viewer.ts
Normal file
92
extensions/ql-vscode/src/eval-log-viewer.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { window, TreeDataProvider, TreeView, TreeItem, ProviderResult, Event, EventEmitter, TreeItemCollapsibleState } from 'vscode';
|
||||
import { commandRunner } from './commandRunner';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { showAndLogErrorMessage } from './helpers';
|
||||
|
||||
export interface EvalLogTreeItem {
|
||||
label?: string;
|
||||
children: ChildEvalLogTreeItem[];
|
||||
}
|
||||
|
||||
export interface ChildEvalLogTreeItem extends EvalLogTreeItem {
|
||||
parent: ChildEvalLogTreeItem | EvalLogTreeItem;
|
||||
}
|
||||
|
||||
/** Provides data from parsed CodeQL evaluator logs to be rendered in a tree view. */
|
||||
class EvalLogDataProvider extends DisposableObject implements TreeDataProvider<EvalLogTreeItem> {
|
||||
public roots: EvalLogTreeItem[] = [];
|
||||
|
||||
private _onDidChangeTreeData: EventEmitter<EvalLogTreeItem | undefined | null | void> = new EventEmitter<EvalLogTreeItem | undefined | null | void>();
|
||||
readonly onDidChangeTreeData: Event<EvalLogTreeItem | undefined | null | void> = this._onDidChangeTreeData.event;
|
||||
|
||||
refresh(): void {
|
||||
this._onDidChangeTreeData.fire();
|
||||
}
|
||||
|
||||
getTreeItem(element: EvalLogTreeItem): TreeItem | Thenable<TreeItem> {
|
||||
const state = element.children.length
|
||||
? TreeItemCollapsibleState.Collapsed
|
||||
: TreeItemCollapsibleState.None;
|
||||
const treeItem = new TreeItem(element.label || '', state);
|
||||
treeItem.tooltip = `${treeItem.label} || ''}`;
|
||||
return treeItem;
|
||||
}
|
||||
|
||||
getChildren(element?: EvalLogTreeItem): ProviderResult<EvalLogTreeItem[]> {
|
||||
// If no item is passed, return the root.
|
||||
if (!element) {
|
||||
return this.roots || [];
|
||||
}
|
||||
// Otherwise it is called with an existing item, to load its children.
|
||||
return element.children;
|
||||
}
|
||||
|
||||
getParent(element: ChildEvalLogTreeItem): ProviderResult<EvalLogTreeItem> {
|
||||
return element.parent;
|
||||
}
|
||||
}
|
||||
|
||||
/** Manages a tree viewer of structured evaluator logs. */
|
||||
export class EvalLogViewer extends DisposableObject {
|
||||
private treeView: TreeView<EvalLogTreeItem>;
|
||||
private treeDataProvider: EvalLogDataProvider;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
this.treeDataProvider = new EvalLogDataProvider();
|
||||
this.treeView = window.createTreeView('codeQLEvalLogViewer', {
|
||||
treeDataProvider: this.treeDataProvider,
|
||||
showCollapseAll: true
|
||||
});
|
||||
|
||||
this.push(this.treeView);
|
||||
this.push(this.treeDataProvider);
|
||||
this.push(
|
||||
commandRunner('codeQLEvalLogViewer.clear', async () => {
|
||||
this.clear();
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
private clear(): void {
|
||||
this.treeDataProvider.roots = [];
|
||||
this.treeDataProvider.refresh();
|
||||
this.treeView.message = undefined;
|
||||
}
|
||||
|
||||
// Called when the Show Evaluator Log (UI) command is run on a new query.
|
||||
updateRoots(roots: EvalLogTreeItem[]): void {
|
||||
this.treeDataProvider.roots = roots;
|
||||
this.treeDataProvider.refresh();
|
||||
|
||||
this.treeView.message = 'Viewer for query run:'; // Currently only one query supported at a time.
|
||||
|
||||
// Handle error on reveal. This could happen if
|
||||
// the tree view is disposed during the reveal.
|
||||
this.treeView.reveal(roots[0], { focus: false })?.then(
|
||||
() => { /**/ },
|
||||
err => showAndLogErrorMessage(err)
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
import 'source-map-support/register';
|
||||
import {
|
||||
CancellationToken,
|
||||
CancellationTokenSource,
|
||||
commands,
|
||||
Disposable,
|
||||
ExtensionContext,
|
||||
@@ -18,6 +20,7 @@ import {
|
||||
} from 'vscode';
|
||||
import { LanguageClient } from 'vscode-languageclient';
|
||||
import * as os from 'os';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import { testExplorerExtensionId, TestHub } from 'vscode-test-adapter-api';
|
||||
@@ -40,7 +43,8 @@ import { DatabaseUI } from './databases-ui';
|
||||
import {
|
||||
TemplateQueryDefinitionProvider,
|
||||
TemplateQueryReferenceProvider,
|
||||
TemplatePrintAstProvider
|
||||
TemplatePrintAstProvider,
|
||||
TemplatePrintCfgProvider
|
||||
} from './contextual/templateProvider';
|
||||
import {
|
||||
DEFAULT_DISTRIBUTION_VERSION_RANGE,
|
||||
@@ -52,20 +56,29 @@ import {
|
||||
GithubApiError,
|
||||
GithubRateLimitedError
|
||||
} from './distribution';
|
||||
import * as helpers from './helpers';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import {
|
||||
findLanguage,
|
||||
tmpDirDisposal,
|
||||
showBinaryChoiceDialog,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogWarningMessage,
|
||||
showAndLogInformationMessage,
|
||||
showInformationMessageWithAction,
|
||||
tmpDir
|
||||
} from './helpers';
|
||||
import { asError, assertNever, getErrorMessage } from './pure/helpers-pure';
|
||||
import { spawnIdeServer } from './ide-server';
|
||||
import { InterfaceManager } from './interface';
|
||||
import { ResultsView } from './interface';
|
||||
import { WebviewReveal } from './interface-utils';
|
||||
import { ideServerLogger, logger, queryServerLogger } from './logging';
|
||||
import { ideServerLogger, logger, ProgressReporter, queryServerLogger } from './logging';
|
||||
import { QueryHistoryManager } from './query-history';
|
||||
import { CompletedQuery } from './query-results';
|
||||
import * as qsClient from './queryserver-client';
|
||||
import { CompletedLocalQueryInfo, LocalQueryInfo } from './query-results';
|
||||
import * as legacyQueryServer from './legacy-query-server/queryserver-client';
|
||||
import * as newQueryServer from './query-server/queryserver-client';
|
||||
import { displayQuickQuery } from './quick-query';
|
||||
import { compileAndRunQueryAgainstDatabase, tmpDirDisposal } from './run-queries';
|
||||
import { QLTestAdapterFactory } from './test-adapter';
|
||||
import { TestUIService } from './test-ui';
|
||||
import { CompareInterfaceManager } from './compare/compare-interface';
|
||||
import { CompareView } from './compare/compare-view';
|
||||
import { gatherQlFiles } from './pure/files';
|
||||
import { initializeTelemetry } from './telemetry';
|
||||
import {
|
||||
@@ -79,12 +92,29 @@ import { CodeQlStatusBarHandler } from './status-bar';
|
||||
|
||||
import { Credentials } from './authentication';
|
||||
import { RemoteQueriesManager } from './remote-queries/remote-queries-manager';
|
||||
import { RemoteQuery } from './remote-queries/remote-query';
|
||||
import { RemoteQueryResult } from './remote-queries/remote-query-result';
|
||||
import { URLSearchParams } from 'url';
|
||||
import { RemoteQueriesInterfaceManager } from './remote-queries/remote-queries-interface';
|
||||
import { sampleRemoteQuery, sampleRemoteQueryResult } from './remote-queries/sample-data';
|
||||
import { handleDownloadPacks, handleInstallPackDependencies } from './packaging';
|
||||
import { AnalysesResultsManager } from './remote-queries/analyses-results-manager';
|
||||
import { HistoryItemLabelProvider } from './history-item-label-provider';
|
||||
import { exportRemoteQueryResults } from './remote-queries/export-results';
|
||||
import { RemoteQuery } from './remote-queries/remote-query';
|
||||
import { EvalLogViewer } from './eval-log-viewer';
|
||||
import { SummaryLanguageSupport } from './log-insights/summary-language-support';
|
||||
import { JoinOrderScannerProvider } from './log-insights/join-order';
|
||||
import { LogScannerService } from './log-insights/log-scanner-service';
|
||||
import { createInitialQueryInfo } from './run-queries-shared';
|
||||
import { LegacyQueryRunner } from './legacy-query-server/legacyRunner';
|
||||
import { NewQueryRunner } from './query-server/query-runner';
|
||||
import { QueryRunner } from './queryRunner';
|
||||
import { VariantAnalysisView } from './remote-queries/variant-analysis-view';
|
||||
import { VariantAnalysisViewSerializer } from './remote-queries/variant-analysis-view-serializer';
|
||||
import { VariantAnalysis } from './remote-queries/shared/variant-analysis';
|
||||
import {
|
||||
VariantAnalysis as VariantAnalysisApiResponse,
|
||||
VariantAnalysisScannedRepository as ApiVariantAnalysisScannedRepository
|
||||
} from './remote-queries/gh-api/variant-analysis';
|
||||
import { VariantAnalysisManager } from './remote-queries/variant-analysis-manager';
|
||||
import { createVariantAnalysisContentProvider } from './remote-queries/variant-analysis-content-provider';
|
||||
|
||||
/**
|
||||
* extension.ts
|
||||
@@ -147,10 +177,11 @@ function registerErrorStubs(excludedCommands: string[], stubGenerator: (command:
|
||||
export interface CodeQLExtensionInterface {
|
||||
readonly ctx: ExtensionContext;
|
||||
readonly cliServer: CodeQLCliServer;
|
||||
readonly qs: qsClient.QueryServerClient;
|
||||
readonly qs: QueryRunner;
|
||||
readonly distributionManager: DistributionManager;
|
||||
readonly databaseManager: DatabaseManager;
|
||||
readonly databaseUI: DatabaseUI;
|
||||
readonly variantAnalysisManager: VariantAnalysisManager;
|
||||
readonly dispose: () => void;
|
||||
}
|
||||
|
||||
@@ -186,7 +217,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
const shouldUpdateOnNextActivationKey = 'shouldUpdateOnNextActivation';
|
||||
|
||||
registerErrorStubs([checkForUpdatesCommand], command => (async () => {
|
||||
void helpers.showAndLogErrorMessage(`Can't execute ${command}: waiting to finish loading CodeQL CLI.`);
|
||||
void showAndLogErrorMessage(`Can't execute ${command}: waiting to finish loading CodeQL CLI.`);
|
||||
}));
|
||||
|
||||
interface DistributionUpdateConfig {
|
||||
@@ -198,7 +229,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
async function installOrUpdateDistributionWithProgressTitle(progressTitle: string, config: DistributionUpdateConfig): Promise<void> {
|
||||
const minSecondsSinceLastUpdateCheck = config.isUserInitiated ? 0 : 86400;
|
||||
const noUpdatesLoggingFunc = config.shouldDisplayMessageWhenNoUpdates ?
|
||||
helpers.showAndLogInformationMessage : async (message: string) => void logger.log(message);
|
||||
showAndLogInformationMessage : async (message: string) => void logger.log(message);
|
||||
const result = await distributionManager.checkForUpdatesToExtensionManagedDistribution(minSecondsSinceLastUpdateCheck);
|
||||
|
||||
// We do want to auto update if there is no distribution at all
|
||||
@@ -220,7 +251,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
const updateAvailableMessage = `Version "${result.updatedRelease.name}" of the CodeQL CLI is now available. ` +
|
||||
'Do you wish to upgrade?';
|
||||
await ctx.globalState.update(shouldUpdateOnNextActivationKey, true);
|
||||
if (await helpers.showInformationMessageWithAction(updateAvailableMessage, 'Restart and Upgrade')) {
|
||||
if (await showInformationMessageWithAction(updateAvailableMessage, 'Restart and Upgrade')) {
|
||||
await commands.executeCommand('workbench.action.reloadWindow');
|
||||
}
|
||||
} else {
|
||||
@@ -233,7 +264,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
distributionManager.installExtensionManagedDistributionRelease(result.updatedRelease, progress));
|
||||
|
||||
await ctx.globalState.update(shouldUpdateOnNextActivationKey, false);
|
||||
void helpers.showAndLogInformationMessage(`CodeQL CLI updated to version "${result.updatedRelease.name}".`);
|
||||
void showAndLogInformationMessage(`CodeQL CLI updated to version "${result.updatedRelease.name}".`);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
@@ -260,7 +291,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
// Don't rethrow the exception, because if the config is changed, we want to be able to retry installing
|
||||
// or updating the distribution.
|
||||
const alertFunction = (codeQlInstalled && !config.isUserInitiated) ?
|
||||
helpers.showAndLogWarningMessage : helpers.showAndLogErrorMessage;
|
||||
showAndLogWarningMessage : showAndLogErrorMessage;
|
||||
const taskDescription = (willUpdateCodeQl ? 'update' :
|
||||
codeQlInstalled ? 'check for updates to' : 'install') + ' CodeQL CLI';
|
||||
|
||||
@@ -295,20 +326,20 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
}
|
||||
})();
|
||||
|
||||
void helpers.showAndLogWarningMessage(
|
||||
void showAndLogWarningMessage(
|
||||
`The current version of the CodeQL CLI (${result.version.raw}) ` +
|
||||
`is incompatible with this extension. ${fixGuidanceMessage}`
|
||||
);
|
||||
break;
|
||||
}
|
||||
case FindDistributionResultKind.UnknownCompatibilityDistribution:
|
||||
void helpers.showAndLogWarningMessage(
|
||||
void showAndLogWarningMessage(
|
||||
'Compatibility with the configured CodeQL CLI could not be determined. ' +
|
||||
'You may experience problems using the extension.'
|
||||
);
|
||||
break;
|
||||
case FindDistributionResultKind.NoDistribution:
|
||||
void helpers.showAndLogErrorMessage('The CodeQL CLI could not be found.');
|
||||
void showAndLogErrorMessage('The CodeQL CLI could not be found.');
|
||||
break;
|
||||
default:
|
||||
assertNever(result);
|
||||
@@ -335,7 +366,7 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
} else if (distributionResult.kind === FindDistributionResultKind.NoDistribution) {
|
||||
registerErrorStubs([checkForUpdatesCommand], command => async () => {
|
||||
const installActionName = 'Install CodeQL CLI';
|
||||
const chosenAction = await void helpers.showAndLogErrorMessage(`Can't execute ${command}: missing CodeQL CLI.`, {
|
||||
const chosenAction = await void showAndLogErrorMessage(`Can't execute ${command}: missing CodeQL CLI.`, {
|
||||
items: [installActionName]
|
||||
});
|
||||
if (chosenAction === installActionName) {
|
||||
@@ -361,7 +392,10 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
allowAutoUpdating: true
|
||||
})));
|
||||
|
||||
return await installOrUpdateThenTryActivate({
|
||||
const variantAnalysisViewSerializer = new VariantAnalysisViewSerializer(ctx);
|
||||
Window.registerWebviewPanelSerializer(VariantAnalysisView.viewType, variantAnalysisViewSerializer);
|
||||
|
||||
const codeQlExtension = await installOrUpdateThenTryActivate({
|
||||
isUserInitiated: !!ctx.globalState.get(shouldUpdateOnNextActivationKey),
|
||||
shouldDisplayMessageWhenNoUpdates: false,
|
||||
|
||||
@@ -369,8 +403,14 @@ export async function activate(ctx: ExtensionContext): Promise<CodeQLExtensionIn
|
||||
// otherwise, ask user to accept the update
|
||||
allowAutoUpdating: !!ctx.globalState.get(shouldUpdateOnNextActivationKey)
|
||||
});
|
||||
|
||||
variantAnalysisViewSerializer.onExtensionLoaded(codeQlExtension.variantAnalysisManager);
|
||||
|
||||
return codeQlExtension;
|
||||
}
|
||||
|
||||
const PACK_GLOBS = ['**/codeql-pack.yml', '**/qlpack.yml', '**/queries.xml', '**/codeql-pack.lock.yml', '**/qlpack.lock.yml', '.codeqlmanifest.json', 'codeql-workspace.yml'];
|
||||
|
||||
async function activateWithInstalledDistribution(
|
||||
ctx: ExtensionContext,
|
||||
distributionManager: DistributionManager,
|
||||
@@ -399,83 +439,120 @@ async function activateWithInstalledDistribution(
|
||||
ctx.subscriptions.push(statusBar);
|
||||
|
||||
void logger.log('Initializing query server client.');
|
||||
const qs = new qsClient.QueryServerClient(
|
||||
qlConfigurationListener,
|
||||
cliServer,
|
||||
{
|
||||
logger: queryServerLogger,
|
||||
contextStoragePath: getContextStoragePath(ctx),
|
||||
},
|
||||
(task) =>
|
||||
Window.withProgress(
|
||||
{ title: 'CodeQL query server', location: ProgressLocation.Window },
|
||||
task
|
||||
)
|
||||
);
|
||||
ctx.subscriptions.push(qs);
|
||||
await qs.startQueryServer();
|
||||
const qs = await createQueryServer(qlConfigurationListener, cliServer, ctx);
|
||||
|
||||
|
||||
for (const glob of PACK_GLOBS) {
|
||||
const fsWatcher = workspace.createFileSystemWatcher(glob);
|
||||
ctx.subscriptions.push(fsWatcher);
|
||||
fsWatcher.onDidChange(async (_uri) => {
|
||||
await qs.clearPackCache();
|
||||
});
|
||||
}
|
||||
|
||||
void logger.log('Initializing database manager.');
|
||||
const dbm = new DatabaseManager(ctx, qs, cliServer, logger);
|
||||
|
||||
// Let this run async.
|
||||
void dbm.loadPersistedState();
|
||||
|
||||
ctx.subscriptions.push(dbm);
|
||||
void logger.log('Initializing database panel.');
|
||||
const databaseUI = new DatabaseUI(
|
||||
dbm,
|
||||
qs,
|
||||
getContextStoragePath(ctx),
|
||||
ctx.extensionPath
|
||||
ctx.extensionPath,
|
||||
() => Credentials.initialize(ctx),
|
||||
);
|
||||
databaseUI.init();
|
||||
ctx.subscriptions.push(databaseUI);
|
||||
|
||||
void logger.log('Initializing evaluator log viewer.');
|
||||
const evalLogViewer = new EvalLogViewer();
|
||||
ctx.subscriptions.push(evalLogViewer);
|
||||
|
||||
void logger.log('Initializing query history manager.');
|
||||
const queryHistoryConfigurationListener = new QueryHistoryConfigListener();
|
||||
ctx.subscriptions.push(queryHistoryConfigurationListener);
|
||||
const showResults = async (item: CompletedQuery) =>
|
||||
const showResults = async (item: CompletedLocalQueryInfo) =>
|
||||
showResultsForCompletedQuery(item, WebviewReveal.Forced);
|
||||
const queryStorageDir = path.join(ctx.globalStorageUri.fsPath, 'queries');
|
||||
await fs.ensureDir(queryStorageDir);
|
||||
const labelProvider = new HistoryItemLabelProvider(queryHistoryConfigurationListener);
|
||||
|
||||
void logger.log('Initializing results panel interface.');
|
||||
const localQueryResultsView = new ResultsView(ctx, dbm, cliServer, queryServerLogger, labelProvider);
|
||||
ctx.subscriptions.push(localQueryResultsView);
|
||||
|
||||
void logger.log('Initializing variant analysis manager.');
|
||||
const variantAnalysisStorageDir = path.join(ctx.globalStorageUri.fsPath, 'variant-analyses');
|
||||
await fs.ensureDir(variantAnalysisStorageDir);
|
||||
const variantAnalysisManager = new VariantAnalysisManager(ctx, cliServer, variantAnalysisStorageDir, logger);
|
||||
ctx.subscriptions.push(variantAnalysisManager);
|
||||
ctx.subscriptions.push(workspace.registerTextDocumentContentProvider('codeql-variant-analysis', createVariantAnalysisContentProvider(variantAnalysisManager)));
|
||||
|
||||
void logger.log('Initializing remote queries manager.');
|
||||
const rqm = new RemoteQueriesManager(ctx, cliServer, queryStorageDir, logger, variantAnalysisManager);
|
||||
ctx.subscriptions.push(rqm);
|
||||
|
||||
void logger.log('Initializing query history.');
|
||||
const qhm = new QueryHistoryManager(
|
||||
qs,
|
||||
ctx.extensionPath,
|
||||
dbm,
|
||||
localQueryResultsView,
|
||||
rqm,
|
||||
variantAnalysisManager,
|
||||
evalLogViewer,
|
||||
queryStorageDir,
|
||||
ctx,
|
||||
queryHistoryConfigurationListener,
|
||||
showResults,
|
||||
async (from: CompletedQuery, to: CompletedQuery) =>
|
||||
labelProvider,
|
||||
async (from: CompletedLocalQueryInfo, to: CompletedLocalQueryInfo) =>
|
||||
showResultsForComparison(from, to),
|
||||
);
|
||||
ctx.subscriptions.push(qhm);
|
||||
void logger.log('Initializing results panel interface.');
|
||||
const intm = new InterfaceManager(ctx, dbm, cliServer, queryServerLogger);
|
||||
ctx.subscriptions.push(intm);
|
||||
|
||||
void logger.log('Initializing compare panel interface.');
|
||||
const cmpm = new CompareInterfaceManager(
|
||||
|
||||
ctx.subscriptions.push(qhm);
|
||||
|
||||
void logger.log('Initializing evaluation log scanners.');
|
||||
const logScannerService = new LogScannerService(qhm);
|
||||
ctx.subscriptions.push(logScannerService);
|
||||
ctx.subscriptions.push(logScannerService.scanners.registerLogScannerProvider(new JoinOrderScannerProvider()));
|
||||
|
||||
void logger.log('Reading query history');
|
||||
await qhm.readQueryHistory();
|
||||
|
||||
void logger.log('Initializing compare view.');
|
||||
const compareView = new CompareView(
|
||||
ctx,
|
||||
dbm,
|
||||
cliServer,
|
||||
queryServerLogger,
|
||||
labelProvider,
|
||||
showResults
|
||||
);
|
||||
ctx.subscriptions.push(cmpm);
|
||||
ctx.subscriptions.push(compareView);
|
||||
|
||||
void logger.log('Initializing source archive filesystem provider.');
|
||||
archiveFilesystemProvider.activate(ctx);
|
||||
|
||||
async function showResultsForComparison(
|
||||
from: CompletedQuery,
|
||||
to: CompletedQuery
|
||||
from: CompletedLocalQueryInfo,
|
||||
to: CompletedLocalQueryInfo
|
||||
): Promise<void> {
|
||||
try {
|
||||
await cmpm.showResults(from, to);
|
||||
await compareView.showResults(from, to);
|
||||
} catch (e) {
|
||||
void helpers.showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
}
|
||||
}
|
||||
|
||||
async function showResultsForCompletedQuery(
|
||||
query: CompletedQuery,
|
||||
query: CompletedLocalQueryInfo,
|
||||
forceReveal: WebviewReveal
|
||||
): Promise<void> {
|
||||
await intm.showResults(query, forceReveal, false);
|
||||
await localQueryResultsView.showResults(query, forceReveal, false);
|
||||
}
|
||||
|
||||
async function compileAndRunQuery(
|
||||
@@ -492,22 +569,41 @@ async function activateWithInstalledDistribution(
|
||||
if (databaseItem === undefined) {
|
||||
throw new Error('Can\'t run query without a selected database');
|
||||
}
|
||||
const info = await compileAndRunQueryAgainstDatabase(
|
||||
cliServer,
|
||||
qs,
|
||||
databaseItem,
|
||||
quickEval,
|
||||
selectedQuery,
|
||||
progress,
|
||||
token,
|
||||
undefined,
|
||||
range
|
||||
);
|
||||
const item = qhm.buildCompletedQuery(info);
|
||||
await showResultsForCompletedQuery(item, WebviewReveal.NotForced);
|
||||
// Note we must update the query history view after showing results as the
|
||||
// display and sorting might depend on the number of results
|
||||
await qhm.addCompletedQuery(item);
|
||||
const databaseInfo = {
|
||||
name: databaseItem.name,
|
||||
databaseUri: databaseItem.databaseUri.toString(),
|
||||
};
|
||||
|
||||
// handle cancellation from the history view.
|
||||
const source = new CancellationTokenSource();
|
||||
token.onCancellationRequested(() => source.cancel());
|
||||
|
||||
const initialInfo = await createInitialQueryInfo(selectedQuery, databaseInfo, quickEval, range);
|
||||
const item = new LocalQueryInfo(initialInfo, source);
|
||||
qhm.addQuery(item);
|
||||
try {
|
||||
const completedQueryInfo = await qs.compileAndRunQueryAgainstDatabase(
|
||||
databaseItem,
|
||||
initialInfo,
|
||||
queryStorageDir,
|
||||
progress,
|
||||
source.token,
|
||||
undefined,
|
||||
item,
|
||||
);
|
||||
qhm.completeQuery(item, completedQueryInfo);
|
||||
await showResultsForCompletedQuery(item as CompletedLocalQueryInfo, WebviewReveal.Forced);
|
||||
// Note we must update the query history view after showing results as the
|
||||
// display and sorting might depend on the number of results
|
||||
} catch (e) {
|
||||
const err = asError(e);
|
||||
err.message = `Error running query: ${err.message}`;
|
||||
item.failureReason = err.message;
|
||||
throw e;
|
||||
} finally {
|
||||
await qhm.refreshTreeView();
|
||||
source.dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -527,11 +623,11 @@ async function activateWithInstalledDistribution(
|
||||
try {
|
||||
await cliServer.generateQueryHelp(pathToQhelp, absolutePathToMd);
|
||||
await commands.executeCommand('markdown.showPreviewToSide', uri);
|
||||
} catch (err) {
|
||||
const errorMessage = err.message.includes('Generating qhelp in markdown') ? (
|
||||
} catch (e) {
|
||||
const errorMessage = getErrorMessage(e).includes('Generating qhelp in markdown') ? (
|
||||
`Could not generate markdown from ${pathToQhelp}: Bad formatting in .qhelp file.`
|
||||
) : `Could not open a preview of the generated file (${absolutePathToMd}).`;
|
||||
void helpers.showAndLogErrorMessage(errorMessage, { fullMessage: `${errorMessage}\n${err}` });
|
||||
void showAndLogErrorMessage(errorMessage, { fullMessage: `${errorMessage}\n${e}` });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -548,7 +644,7 @@ async function activateWithInstalledDistribution(
|
||||
const uri = Uri.file(resolved.resolvedPath);
|
||||
await window.showTextDocument(uri, { preview: false });
|
||||
} else {
|
||||
void helpers.showAndLogErrorMessage(
|
||||
void showAndLogErrorMessage(
|
||||
'Jumping from a .qlref file to the .ql file it references is not '
|
||||
+ 'supported with the CLI version you are running.\n'
|
||||
+ `Please upgrade your CLI to version ${CliVersionConstraint.CLI_VERSION_WITH_RESOLVE_QLREF
|
||||
@@ -602,7 +698,10 @@ async function activateWithInstalledDistribution(
|
||||
{
|
||||
title: 'Running query',
|
||||
cancellable: true
|
||||
}
|
||||
},
|
||||
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
interface DatabaseQuickPickItem extends QuickPickItem {
|
||||
@@ -618,15 +717,15 @@ async function activateWithInstalledDistribution(
|
||||
) => {
|
||||
let filteredDBs = dbm.databaseItems;
|
||||
if (filteredDBs.length === 0) {
|
||||
void helpers.showAndLogErrorMessage('No databases found. Please add a suitable database to your workspace.');
|
||||
void showAndLogErrorMessage('No databases found. Please add a suitable database to your workspace.');
|
||||
return;
|
||||
}
|
||||
// If possible, only show databases with the right language (otherwise show all databases).
|
||||
const queryLanguage = await helpers.findLanguage(cliServer, uri);
|
||||
const queryLanguage = await findLanguage(cliServer, uri);
|
||||
if (queryLanguage) {
|
||||
filteredDBs = dbm.databaseItems.filter(db => db.language === queryLanguage);
|
||||
if (filteredDBs.length === 0) {
|
||||
void helpers.showAndLogErrorMessage(`No databases found for language ${queryLanguage}. Please add a suitable database to your workspace.`);
|
||||
void showAndLogErrorMessage(`No databases found for language ${queryLanguage}. Please add a suitable database to your workspace.`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -651,19 +750,19 @@ async function activateWithInstalledDistribution(
|
||||
for (const item of quickpick) {
|
||||
try {
|
||||
await compileAndRunQuery(false, uri, progress, token, item.databaseItem);
|
||||
} catch (error) {
|
||||
} catch (e) {
|
||||
skippedDatabases.push(item.label);
|
||||
errors.push(error.message);
|
||||
errors.push(getErrorMessage(e));
|
||||
}
|
||||
}
|
||||
if (skippedDatabases.length > 0) {
|
||||
void logger.log(`Errors:\n${errors.join('\n')}`);
|
||||
void helpers.showAndLogWarningMessage(
|
||||
void showAndLogWarningMessage(
|
||||
`The following databases were skipped:\n${skippedDatabases.join('\n')}.\nFor details about the errors, see the logs.`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
void helpers.showAndLogErrorMessage('No databases selected.');
|
||||
void showAndLogErrorMessage('No databases selected.');
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -690,7 +789,7 @@ async function activateWithInstalledDistribution(
|
||||
// files may be hidden from the user.
|
||||
if (dirFound) {
|
||||
const fileString = files.map(file => path.basename(file)).join(', ');
|
||||
const res = await helpers.showBinaryChoiceDialog(
|
||||
const res = await showBinaryChoiceDialog(
|
||||
`You are about to run ${files.length} queries: ${fileString} Do you want to continue?`
|
||||
);
|
||||
if (!res) {
|
||||
@@ -711,12 +810,13 @@ async function activateWithInstalledDistribution(
|
||||
});
|
||||
}
|
||||
|
||||
if (queryUris.length > 1) {
|
||||
if (queryUris.length > 1 && !await cliServer.cliConstraints.supportsNonDestructiveUpgrades()) {
|
||||
// Try to upgrade the current database before running any queries
|
||||
// so that the user isn't confronted with multiple upgrade
|
||||
// requests for each query to run.
|
||||
// Only do it if running multiple queries since this check is
|
||||
// performed on each query run anyway.
|
||||
// Don't do this with non destructive upgrades as the user won't see anything anyway.
|
||||
await databaseUI.tryUpgradeCurrentDatabase(progress, token);
|
||||
}
|
||||
|
||||
@@ -734,7 +834,11 @@ async function activateWithInstalledDistribution(
|
||||
{
|
||||
title: 'Running queries',
|
||||
cancellable: true
|
||||
})
|
||||
},
|
||||
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress(
|
||||
@@ -747,7 +851,10 @@ async function activateWithInstalledDistribution(
|
||||
{
|
||||
title: 'Running query',
|
||||
cancellable: true
|
||||
})
|
||||
},
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
@@ -762,7 +869,11 @@ async function activateWithInstalledDistribution(
|
||||
{
|
||||
title: 'Running query',
|
||||
cancellable: true
|
||||
})
|
||||
},
|
||||
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
@@ -773,18 +884,19 @@ async function activateWithInstalledDistribution(
|
||||
displayQuickQuery(ctx, cliServer, databaseUI, progress, token),
|
||||
{
|
||||
title: 'Run Quick Query'
|
||||
}
|
||||
},
|
||||
|
||||
// Open the query server logger on error since that's usually where the interesting errors appear.
|
||||
queryServerLogger
|
||||
)
|
||||
);
|
||||
|
||||
void logger.log('Initializing remote queries interface.');
|
||||
const rqm = new RemoteQueriesManager(ctx, logger, cliServer);
|
||||
|
||||
registerRemoteQueryTextProvider();
|
||||
|
||||
// The "runRemoteQuery" command is internal-only.
|
||||
// The "runVariantAnalysis" command is internal-only.
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.runRemoteQuery', async (
|
||||
commandRunnerWithProgress('codeQL.runVariantAnalysis', async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
uri: Uri | undefined
|
||||
@@ -801,28 +913,79 @@ async function activateWithInstalledDistribution(
|
||||
token
|
||||
);
|
||||
} else {
|
||||
throw new Error('Remote queries require the CodeQL Canary version to run.');
|
||||
throw new Error('Variant analysis requires the CodeQL Canary version to run.');
|
||||
}
|
||||
}, {
|
||||
title: 'Run Remote Query',
|
||||
title: 'Run Variant Analysis',
|
||||
cancellable: true
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.monitorRemoteQuery', async (
|
||||
queryId: string,
|
||||
query: RemoteQuery,
|
||||
token: CancellationToken) => {
|
||||
await rqm.monitorRemoteQuery(query, token);
|
||||
await rqm.monitorRemoteQuery(queryId, query, token);
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.showFakeRemoteQueryResults', async () => {
|
||||
const analysisResultsManager = new AnalysesResultsManager(ctx, logger);
|
||||
const rqim = new RemoteQueriesInterfaceManager(ctx, logger, analysisResultsManager);
|
||||
await rqim.showResults(sampleRemoteQuery, sampleRemoteQueryResult);
|
||||
commandRunner('codeQL.copyRepoList', async (queryId: string) => {
|
||||
await rqm.copyRemoteQueryRepoListToClipboard(queryId);
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.monitorVariantAnalysis', async (
|
||||
variantAnalysis: VariantAnalysis,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
await variantAnalysisManager.monitorVariantAnalysis(variantAnalysis, token);
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.autoDownloadVariantAnalysisResult', async (
|
||||
scannedRepo: ApiVariantAnalysisScannedRepository,
|
||||
variantAnalysisSummary: VariantAnalysisApiResponse,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
await variantAnalysisManager.autoDownloadVariantAnalysisResult(scannedRepo, variantAnalysisSummary, token);
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.openVariantAnalysis', async () => {
|
||||
await variantAnalysisManager.promptOpenVariantAnalysis();
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.autoDownloadRemoteQueryResults', async (
|
||||
queryResult: RemoteQueryResult,
|
||||
token: CancellationToken) => {
|
||||
await rqm.autoDownloadRemoteQueryResults(queryResult, token);
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.exportVariantAnalysisResults', async (queryId?: string) => {
|
||||
await exportRemoteQueryResults(qhm, rqm, ctx, queryId);
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.loadVariantAnalysisRepoResults', async (variantAnalysisId: number, repositoryFullName: string) => {
|
||||
await variantAnalysisManager.loadResults(variantAnalysisId, repositoryFullName);
|
||||
})
|
||||
);
|
||||
|
||||
// The "openVariantAnalysisView" command is internal-only.
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.openVariantAnalysisView', async (variantAnalysisId: number) => {
|
||||
await variantAnalysisManager.showView(variantAnalysisId);
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunner(
|
||||
'codeQL.openReferencedFile',
|
||||
@@ -842,8 +1005,10 @@ async function activateWithInstalledDistribution(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
// We restart the CLI server too, to ensure they are the same version
|
||||
cliServer.restartCliServer();
|
||||
await qs.restartQueryServer(progress, token);
|
||||
void helpers.showAndLogInformationMessage('CodeQL Query Server restarted.', {
|
||||
void showAndLogInformationMessage('CodeQL Query Server restarted.', {
|
||||
outputLogger: queryServerLogger,
|
||||
});
|
||||
}, {
|
||||
@@ -869,6 +1034,18 @@ async function activateWithInstalledDistribution(
|
||||
title: 'Choose a Database from an Archive'
|
||||
})
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.chooseDatabaseGithub', async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const credentials = isCanary() ? await Credentials.initialize(ctx) : undefined;
|
||||
await databaseUI.handleChooseDatabaseGithub(credentials, progress, token);
|
||||
},
|
||||
{
|
||||
title: 'Adding database from GitHub',
|
||||
})
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.chooseDatabaseLgtm', (
|
||||
progress: ProgressCallback,
|
||||
@@ -899,7 +1076,7 @@ async function activateWithInstalledDistribution(
|
||||
commandRunner('codeQL.copyVersion', async () => {
|
||||
const text = `CodeQL extension version: ${extension?.packageJSON.version} \nCodeQL CLI version: ${await getCliVersion()} \nPlatform: ${os.platform()} ${os.arch()}`;
|
||||
await env.clipboard.writeText(text);
|
||||
void helpers.showAndLogInformationMessage(text);
|
||||
void showAndLogInformationMessage(text);
|
||||
}));
|
||||
|
||||
const getCliVersion = async () => {
|
||||
@@ -910,19 +1087,16 @@ async function activateWithInstalledDistribution(
|
||||
}
|
||||
};
|
||||
|
||||
// The "authenticateToGitHub" command is internal-only.
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.authenticateToGitHub', async () => {
|
||||
if (isCanary()) {
|
||||
/**
|
||||
* Credentials for authenticating to GitHub.
|
||||
* These are used when making API calls.
|
||||
*/
|
||||
const credentials = await Credentials.initialize(ctx);
|
||||
const octokit = await credentials.getOctokit();
|
||||
const userInfo = await octokit.users.getAuthenticated();
|
||||
void helpers.showAndLogInformationMessage(`Authenticated to GitHub as user: ${userInfo.data.login}`);
|
||||
}
|
||||
/**
|
||||
* Credentials for authenticating to GitHub.
|
||||
* These are used when making API calls.
|
||||
*/
|
||||
const credentials = await Credentials.initialize(ctx);
|
||||
const octokit = await credentials.getOctokit();
|
||||
const userInfo = await octokit.users.getAuthenticated();
|
||||
void showAndLogInformationMessage(`Authenticated to GitHub as user: ${userInfo.data.login}`);
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
@@ -945,37 +1119,48 @@ async function activateWithInstalledDistribution(
|
||||
}
|
||||
));
|
||||
|
||||
commands.registerCommand('codeQL.showLogs', () => {
|
||||
logger.show();
|
||||
});
|
||||
ctx.subscriptions.push(
|
||||
commandRunner('codeQL.showLogs', async () => {
|
||||
logger.show();
|
||||
})
|
||||
);
|
||||
|
||||
ctx.subscriptions.push(new SummaryLanguageSupport());
|
||||
|
||||
void logger.log('Starting language server.');
|
||||
ctx.subscriptions.push(client.start());
|
||||
|
||||
// Jump-to-definition and find-references
|
||||
void logger.log('Registering jump-to-definition handlers.');
|
||||
|
||||
// Store contextual queries in a temporary folder so that they are removed
|
||||
// when the application closes. There is no need for the user to interact with them.
|
||||
const contextualQueryStorageDir = path.join(tmpDir.name, 'contextual-query-storage');
|
||||
await fs.ensureDir(contextualQueryStorageDir);
|
||||
languages.registerDefinitionProvider(
|
||||
{ scheme: archiveFilesystemProvider.zipArchiveScheme },
|
||||
new TemplateQueryDefinitionProvider(cliServer, qs, dbm)
|
||||
new TemplateQueryDefinitionProvider(cliServer, qs, dbm, contextualQueryStorageDir)
|
||||
);
|
||||
|
||||
languages.registerReferenceProvider(
|
||||
{ scheme: archiveFilesystemProvider.zipArchiveScheme },
|
||||
new TemplateQueryReferenceProvider(cliServer, qs, dbm)
|
||||
new TemplateQueryReferenceProvider(cliServer, qs, dbm, contextualQueryStorageDir)
|
||||
);
|
||||
|
||||
const astViewer = new AstViewer();
|
||||
const templateProvider = new TemplatePrintAstProvider(cliServer, qs, dbm);
|
||||
const printAstTemplateProvider = new TemplatePrintAstProvider(cliServer, qs, dbm, contextualQueryStorageDir);
|
||||
const cfgTemplateProvider = new TemplatePrintCfgProvider(cliServer, dbm);
|
||||
|
||||
ctx.subscriptions.push(astViewer);
|
||||
ctx.subscriptions.push(commandRunnerWithProgress('codeQL.viewAst', async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
token: CancellationToken,
|
||||
selectedFile: Uri
|
||||
) => {
|
||||
const ast = await templateProvider.provideAst(
|
||||
const ast = await printAstTemplateProvider.provideAst(
|
||||
progress,
|
||||
token,
|
||||
window.activeTextEditor?.document,
|
||||
selectedFile ?? window.activeTextEditor?.document.uri,
|
||||
);
|
||||
if (ast) {
|
||||
astViewer.updateRoots(await ast.getRoots(), ast.db, ast.fileName);
|
||||
@@ -985,6 +1170,25 @@ async function activateWithInstalledDistribution(
|
||||
title: 'Calculate AST'
|
||||
}));
|
||||
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQL.viewCfg',
|
||||
async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const res = await cfgTemplateProvider.provideCfgUri(window.activeTextEditor?.document);
|
||||
if (res) {
|
||||
await compileAndRunQuery(false, res[0], progress, token, undefined);
|
||||
}
|
||||
},
|
||||
{
|
||||
title: 'Calculating Control Flow Graph',
|
||||
cancellable: true
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
await commands.executeCommand('codeQLDatabases.removeOrphanedDatabases');
|
||||
|
||||
void logger.log('Successfully finished extension initialization.');
|
||||
@@ -996,20 +1200,51 @@ async function activateWithInstalledDistribution(
|
||||
distributionManager,
|
||||
databaseManager: dbm,
|
||||
databaseUI,
|
||||
variantAnalysisManager,
|
||||
dispose: () => {
|
||||
ctx.subscriptions.forEach(d => d.dispose());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function createQueryServer(qlConfigurationListener: QueryServerConfigListener, cliServer: CodeQLCliServer, ctx: ExtensionContext): Promise<QueryRunner> {
|
||||
const qsOpts = {
|
||||
logger: queryServerLogger,
|
||||
contextStoragePath: getContextStoragePath(ctx),
|
||||
};
|
||||
const progressCallback = (task: (progress: ProgressReporter, token: CancellationToken) => Thenable<void>) => Window.withProgress(
|
||||
{ title: 'CodeQL query server', location: ProgressLocation.Window },
|
||||
task
|
||||
);
|
||||
if (await cliServer.cliConstraints.supportsNewQueryServer()) {
|
||||
const qs = new newQueryServer.QueryServerClient(
|
||||
qlConfigurationListener,
|
||||
cliServer,
|
||||
qsOpts,
|
||||
progressCallback
|
||||
);
|
||||
ctx.subscriptions.push(qs);
|
||||
await qs.startQueryServer();
|
||||
return new NewQueryRunner(qs);
|
||||
|
||||
} else {
|
||||
const qs = new legacyQueryServer.QueryServerClient(
|
||||
qlConfigurationListener,
|
||||
cliServer,
|
||||
qsOpts,
|
||||
progressCallback
|
||||
);
|
||||
ctx.subscriptions.push(qs);
|
||||
await qs.startQueryServer();
|
||||
return new LegacyQueryRunner(qs);
|
||||
}
|
||||
}
|
||||
|
||||
function getContextStoragePath(ctx: ExtensionContext) {
|
||||
return ctx.storagePath || ctx.globalStoragePath;
|
||||
return ctx.storageUri?.fsPath || ctx.globalStorageUri.fsPath;
|
||||
}
|
||||
|
||||
async function initializeLogging(ctx: ExtensionContext): Promise<void> {
|
||||
const storagePath = getContextStoragePath(ctx);
|
||||
await logger.setLogStoragePath(storagePath, false);
|
||||
await ideServerLogger.setLogStoragePath(storagePath, false);
|
||||
ctx.subscriptions.push(logger);
|
||||
ctx.subscriptions.push(queryServerLogger);
|
||||
ctx.subscriptions.push(ideServerLogger);
|
||||
@@ -1019,7 +1254,7 @@ const checkForUpdatesCommand = 'codeQL.checkForUpdatesToCLI';
|
||||
|
||||
/**
|
||||
* This text provider lets us open readonly files in the editor.
|
||||
*
|
||||
*
|
||||
* TODO: Consolidate this with the 'codeql' text provider in query-history.ts.
|
||||
*/
|
||||
function registerRemoteQueryTextProvider() {
|
||||
|
||||
@@ -2,6 +2,7 @@ import * as fs from 'fs-extra';
|
||||
import * as glob from 'glob-promise';
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as path from 'path';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import {
|
||||
ExtensionContext,
|
||||
Uri,
|
||||
@@ -14,6 +15,17 @@ import { UserCancellationException } from './commandRunner';
|
||||
import { logger } from './logging';
|
||||
import { QueryMetadata } from './pure/interface-types';
|
||||
|
||||
// Shared temporary folder for the extension.
|
||||
export const tmpDir = tmp.dirSync({ prefix: 'queries_', keep: false, unsafeCleanup: true });
|
||||
export const upgradesTmpDir = path.join(tmpDir.name, 'upgrades');
|
||||
fs.ensureDirSync(upgradesTmpDir);
|
||||
|
||||
export const tmpDirDisposal = {
|
||||
dispose: () => {
|
||||
tmpDir.removeCallback();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Show an error message and log it to the console
|
||||
*
|
||||
@@ -64,9 +76,10 @@ export async function showAndLogWarningMessage(message: string, {
|
||||
*/
|
||||
export async function showAndLogInformationMessage(message: string, {
|
||||
outputLogger = logger,
|
||||
items = [] as string[]
|
||||
items = [] as string[],
|
||||
fullMessage = ''
|
||||
} = {}): Promise<string | undefined> {
|
||||
return internalShowAndLog(message, items, outputLogger, Window.showInformationMessage);
|
||||
return internalShowAndLog(message, items, outputLogger, Window.showInformationMessage, fullMessage);
|
||||
}
|
||||
|
||||
type ShowMessageFn = (message: string, ...items: string[]) => Thenable<string | undefined>;
|
||||
@@ -276,7 +289,7 @@ interface QlPackWithPath {
|
||||
async function findDbschemePack(packs: QlPackWithPath[], dbschemePath: string): Promise<{ name: string; isLibraryPack: boolean; }> {
|
||||
for (const { packDir, packName } of packs) {
|
||||
if (packDir !== undefined) {
|
||||
const qlpack = yaml.safeLoad(await fs.readFile(path.join(packDir, 'qlpack.yml'), 'utf8')) as { dbscheme?: string; library?: boolean; };
|
||||
const qlpack = yaml.load(await fs.readFile(path.join(packDir, 'qlpack.yml'), 'utf8')) as { dbscheme?: string; library?: boolean; };
|
||||
if (qlpack.dbscheme !== undefined && path.basename(qlpack.dbscheme) === path.basename(dbschemePath)) {
|
||||
return {
|
||||
name: packName,
|
||||
@@ -457,9 +470,9 @@ export function getInitialQueryContents(language: string, dbscheme: string) {
|
||||
|
||||
/**
|
||||
* Heuristically determines if the directory passed in corresponds
|
||||
* to a database root.
|
||||
*
|
||||
* @param maybeRoot
|
||||
* to a database root. A database root is a directory that contains
|
||||
* a codeql-database.yml or (historically) a .dbinfo file. It also
|
||||
* contains a folder starting with `db-`.
|
||||
*/
|
||||
export async function isLikelyDatabaseRoot(maybeRoot: string) {
|
||||
const [a, b, c] = (await Promise.all([
|
||||
@@ -471,11 +484,14 @@ export async function isLikelyDatabaseRoot(maybeRoot: string) {
|
||||
glob('db-*/', { cwd: maybeRoot })
|
||||
]));
|
||||
|
||||
return !!((a || b) && c);
|
||||
return ((a || b) && c.length > 0);
|
||||
}
|
||||
|
||||
export function isLikelyDbLanguageFolder(dbPath: string) {
|
||||
return !!path.basename(dbPath).startsWith('db-');
|
||||
/**
|
||||
* A language folder is any folder starting with `db-` that is itself not a database root.
|
||||
*/
|
||||
export async function isLikelyDbLanguageFolder(dbPath: string) {
|
||||
return path.basename(dbPath).startsWith('db-') && !(await isLikelyDatabaseRoot(dbPath));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -533,3 +549,46 @@ export async function tryGetQueryMetadata(cliServer: CodeQLCliServer, queryPath:
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a file in the query directory that indicates when this query was created.
|
||||
* This is important for keeping track of when queries should be removed.
|
||||
*
|
||||
* @param queryPath The directory that will containt all files relevant to a query result.
|
||||
* It does not need to exist.
|
||||
*/
|
||||
export async function createTimestampFile(storagePath: string) {
|
||||
const timestampPath = path.join(storagePath, 'timestamp');
|
||||
await fs.ensureDir(storagePath);
|
||||
await fs.writeFile(timestampPath, Date.now().toString(), 'utf8');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Recursively walk a directory and return the full path to all files found.
|
||||
* Symbolic links are ignored.
|
||||
*
|
||||
* @param dir the directory to walk
|
||||
*
|
||||
* @return An iterator of the full path to all files recursively found in the directory.
|
||||
*/
|
||||
export async function* walkDirectory(dir: string): AsyncIterableIterator<string> {
|
||||
const seenFiles = new Set<string>();
|
||||
for await (const d of await fs.opendir(dir)) {
|
||||
const entry = path.join(dir, d.name);
|
||||
seenFiles.add(entry);
|
||||
if (d.isDirectory()) {
|
||||
yield* walkDirectory(entry);
|
||||
} else if (d.isFile()) {
|
||||
yield entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pluralizes a word.
|
||||
* Example: Returns "N repository" if N is one, "N repositories" otherwise.
|
||||
*/
|
||||
export function pluralize(numItems: number | undefined, singular: string, plural: string): string {
|
||||
return numItems ? `${numItems} ${numItems === 1 ? singular : plural}` : '';
|
||||
}
|
||||
|
||||
118
extensions/ql-vscode/src/history-item-label-provider.ts
Normal file
118
extensions/ql-vscode/src/history-item-label-provider.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import { env } from 'vscode';
|
||||
import * as path from 'path';
|
||||
import { QueryHistoryConfig } from './config';
|
||||
import { LocalQueryInfo } from './query-results';
|
||||
import { getRawQueryName, QueryHistoryInfo } from './query-history-info';
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
import { pluralize } from './helpers';
|
||||
import { VariantAnalysisHistoryItem } from './remote-queries/variant-analysis-history-item';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
|
||||
interface InterpolateReplacements {
|
||||
t: string; // Start time
|
||||
q: string; // Query name
|
||||
d: string; // Database/Controller repo name
|
||||
r: string; // Result count/Empty
|
||||
s: string; // Status
|
||||
f: string; // Query file name
|
||||
'%': '%'; // Percent sign
|
||||
}
|
||||
|
||||
export class HistoryItemLabelProvider {
|
||||
constructor(private config: QueryHistoryConfig) {
|
||||
/**/
|
||||
}
|
||||
|
||||
getLabel(item: QueryHistoryInfo) {
|
||||
let replacements: InterpolateReplacements;
|
||||
switch (item.t) {
|
||||
case 'local':
|
||||
replacements = this.getLocalInterpolateReplacements(item);
|
||||
break;
|
||||
case 'remote':
|
||||
replacements = this.getRemoteInterpolateReplacements(item);
|
||||
break;
|
||||
case 'variant-analysis':
|
||||
replacements = this.getVariantAnalysisInterpolateReplacements(item);
|
||||
break;
|
||||
default:
|
||||
assertNever(item);
|
||||
}
|
||||
|
||||
const rawLabel = item.userSpecifiedLabel ?? (this.config.format || '%q');
|
||||
|
||||
return this.interpolate(rawLabel, replacements);
|
||||
}
|
||||
|
||||
/**
|
||||
* If there is a user-specified label for this query, interpolate and use that.
|
||||
* Otherwise, use the raw name of this query.
|
||||
*
|
||||
* @returns the name of the query, unless there is a custom label for this query.
|
||||
*/
|
||||
getShortLabel(item: QueryHistoryInfo): string {
|
||||
return item.userSpecifiedLabel
|
||||
? this.getLabel(item)
|
||||
: getRawQueryName(item);
|
||||
}
|
||||
|
||||
|
||||
private interpolate(rawLabel: string, replacements: InterpolateReplacements): string {
|
||||
const label = rawLabel.replace(/%(.)/g, (match, key: keyof InterpolateReplacements) => {
|
||||
const replacement = replacements[key];
|
||||
return replacement !== undefined ? replacement : match;
|
||||
});
|
||||
|
||||
return label.replace(/\s+/g, ' ');
|
||||
}
|
||||
|
||||
private getLocalInterpolateReplacements(item: LocalQueryInfo): InterpolateReplacements {
|
||||
const { resultCount = 0, statusString = 'in progress' } = item.completedQuery || {};
|
||||
return {
|
||||
t: item.startTime,
|
||||
q: item.getQueryName(),
|
||||
d: item.initialInfo.databaseInfo.name,
|
||||
r: `(${resultCount} results)`,
|
||||
s: statusString,
|
||||
f: item.getQueryFileName(),
|
||||
'%': '%',
|
||||
};
|
||||
}
|
||||
|
||||
// Return the number of repositories queried if available. Otherwise, use the controller repository name.
|
||||
private buildRepoLabel(item: RemoteQueryHistoryItem): string {
|
||||
const repositoryCount = item.remoteQuery.repositoryCount;
|
||||
|
||||
if (repositoryCount) {
|
||||
return pluralize(repositoryCount, 'repository', 'repositories');
|
||||
}
|
||||
|
||||
return `${item.remoteQuery.controllerRepository.owner}/${item.remoteQuery.controllerRepository.name}`;
|
||||
}
|
||||
|
||||
private getRemoteInterpolateReplacements(item: RemoteQueryHistoryItem): InterpolateReplacements {
|
||||
const resultCount = item.resultCount ? `(${pluralize(item.resultCount, 'result', 'results')})` : '';
|
||||
return {
|
||||
t: new Date(item.remoteQuery.executionStartTime).toLocaleString(env.language),
|
||||
q: `${item.remoteQuery.queryName} (${item.remoteQuery.language})`,
|
||||
d: this.buildRepoLabel(item),
|
||||
r: resultCount,
|
||||
s: item.status,
|
||||
f: path.basename(item.remoteQuery.queryFilePath),
|
||||
'%': '%'
|
||||
};
|
||||
}
|
||||
|
||||
private getVariantAnalysisInterpolateReplacements(item: VariantAnalysisHistoryItem): InterpolateReplacements {
|
||||
const resultCount = item.resultCount ? `(${pluralize(item.resultCount, 'result', 'results')})` : '';
|
||||
return {
|
||||
t: new Date(item.variantAnalysis.executionStartTime).toLocaleString(env.language),
|
||||
q: `${item.variantAnalysis.query.name} (${item.variantAnalysis.query.language})`,
|
||||
d: 'TODO',
|
||||
r: resultCount,
|
||||
s: item.status,
|
||||
f: path.basename(item.variantAnalysis.query.filePath),
|
||||
'%': '%',
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
Uri,
|
||||
Location,
|
||||
Range,
|
||||
ExtensionContext,
|
||||
WebviewPanel,
|
||||
Webview,
|
||||
workspace,
|
||||
@@ -111,15 +112,36 @@ export function tryResolveLocation(
|
||||
}
|
||||
}
|
||||
|
||||
export type WebviewView = 'results' | 'compare' | 'remote-queries' | 'variant-analysis';
|
||||
|
||||
export interface WebviewMessage {
|
||||
t: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns HTML to populate the given webview.
|
||||
* Uses a content security policy that only loads the given script.
|
||||
*/
|
||||
export function getHtmlForWebview(
|
||||
ctx: ExtensionContext,
|
||||
webview: Webview,
|
||||
scriptUriOnDisk: Uri,
|
||||
stylesheetUrisOnDisk: Uri[],
|
||||
view: WebviewView,
|
||||
{
|
||||
allowInlineStyles,
|
||||
}: {
|
||||
allowInlineStyles?: boolean;
|
||||
} = {
|
||||
allowInlineStyles: false,
|
||||
}
|
||||
): string {
|
||||
const scriptUriOnDisk = Uri.file(
|
||||
ctx.asAbsolutePath('out/webview.js')
|
||||
);
|
||||
|
||||
const stylesheetUrisOnDisk = [
|
||||
Uri.file(ctx.asAbsolutePath('out/webview.css'))
|
||||
];
|
||||
|
||||
// Convert the on-disk URIs into webview URIs.
|
||||
const scriptWebviewUri = webview.asWebviewUri(scriptUriOnDisk);
|
||||
const stylesheetWebviewUris = stylesheetUrisOnDisk.map(stylesheetUriOnDisk =>
|
||||
@@ -128,8 +150,15 @@ export function getHtmlForWebview(
|
||||
// Use a nonce in the content security policy to uniquely identify the above resources.
|
||||
const nonce = getNonce();
|
||||
|
||||
const stylesheetsHtmlLines = stylesheetWebviewUris.map(stylesheetWebviewUri =>
|
||||
`<link nonce="${nonce}" rel="stylesheet" href="${stylesheetWebviewUri}">`);
|
||||
const stylesheetsHtmlLines = allowInlineStyles
|
||||
? stylesheetWebviewUris.map(uri => createStylesLinkWithoutNonce(uri))
|
||||
: stylesheetWebviewUris.map(uri => createStylesLinkWithNonce(nonce, uri));
|
||||
|
||||
const styleSrc = allowInlineStyles
|
||||
? `${webview.cspSource} vscode-file: 'unsafe-inline'`
|
||||
: `'nonce-${nonce}'`;
|
||||
|
||||
const fontSrc = webview.cspSource;
|
||||
|
||||
/*
|
||||
* Content security policy:
|
||||
@@ -143,11 +172,11 @@ export function getHtmlForWebview(
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Security-Policy"
|
||||
content="default-src 'none'; script-src 'nonce-${nonce}'; style-src 'nonce-${nonce}'; connect-src ${webview.cspSource};">
|
||||
content="default-src 'none'; script-src 'nonce-${nonce}'; font-src ${fontSrc}; style-src ${styleSrc}; connect-src ${webview.cspSource};">
|
||||
${stylesheetsHtmlLines.join(` ${os.EOL}`)}
|
||||
</head>
|
||||
<body>
|
||||
<div id=root>
|
||||
<div id=root data-view="${view}">
|
||||
</div>
|
||||
<script nonce="${nonce}" src="${scriptWebviewUri}">
|
||||
</script>
|
||||
@@ -243,3 +272,11 @@ export async function jumpToLocation(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createStylesLinkWithNonce(nonce: string, uri: Uri): string {
|
||||
return `<link nonce="${nonce}" rel="stylesheet" href="${uri}">`;
|
||||
}
|
||||
|
||||
function createStylesLinkWithoutNonce(uri: Uri): string {
|
||||
return `<link rel="stylesheet" href="${uri}">`;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
import * as path from 'path';
|
||||
import * as Sarif from 'sarif';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import * as vscode from 'vscode';
|
||||
import {
|
||||
Diagnostic,
|
||||
@@ -15,7 +13,7 @@ import * as cli from './cli';
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import { DatabaseEventKind, DatabaseItem, DatabaseManager } from './databases';
|
||||
import { showAndLogErrorMessage } from './helpers';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import {
|
||||
FromResultsViewMsg,
|
||||
Interpretation,
|
||||
@@ -27,26 +25,28 @@ import {
|
||||
InterpretedResultsSortState,
|
||||
SortDirection,
|
||||
ALERTS_TABLE_NAME,
|
||||
GRAPH_TABLE_NAME,
|
||||
RawResultsSortState,
|
||||
} from './pure/interface-types';
|
||||
import { Logger } from './logging';
|
||||
import * as messages from './pure/messages';
|
||||
import { commandRunner } from './commandRunner';
|
||||
import { CompletedQuery, interpretResults } from './query-results';
|
||||
import { QueryInfo, tmpDir } from './run-queries';
|
||||
import { CompletedQueryInfo, interpretResultsSarif, interpretGraphResults } from './query-results';
|
||||
import { QueryEvaluationInfo } from './run-queries-shared';
|
||||
import { parseSarifLocation, parseSarifPlainTextMessage } from './pure/sarif-utils';
|
||||
import {
|
||||
WebviewReveal,
|
||||
fileUriToWebviewUri,
|
||||
tryResolveLocation,
|
||||
getHtmlForWebview,
|
||||
shownLocationDecoration,
|
||||
shownLocationLineDecoration,
|
||||
jumpToLocation,
|
||||
} from './interface-utils';
|
||||
import { getDefaultResultSetName, ParsedResultSets } from './pure/interface-types';
|
||||
import { RawResultSet, transformBqrsResultSet, ResultSetSchema } from './pure/bqrs-cli-types';
|
||||
import { AbstractWebview, WebviewPanelConfig } from './abstract-webview';
|
||||
import { PAGE_SIZE } from './config';
|
||||
import { CompletedLocalQueryInfo } from './query-results';
|
||||
import { HistoryItemLabelProvider } from './history-item-label-provider';
|
||||
|
||||
/**
|
||||
* interface.ts
|
||||
@@ -87,20 +87,41 @@ function sortInterpretedResults(
|
||||
}
|
||||
}
|
||||
|
||||
function numPagesOfResultSet(resultSet: RawResultSet): number {
|
||||
return Math.ceil(resultSet.schema.rows / PAGE_SIZE.getValue<number>());
|
||||
function interpretedPageSize(interpretation: Interpretation | undefined): number {
|
||||
if (interpretation?.data.t == 'GraphInterpretationData') {
|
||||
// Graph views always have one result per page.
|
||||
return 1;
|
||||
}
|
||||
return PAGE_SIZE.getValue<number>();
|
||||
}
|
||||
|
||||
function numPagesOfResultSet(resultSet: RawResultSet, interpretation?: Interpretation): number {
|
||||
const pageSize = interpretedPageSize(interpretation);
|
||||
|
||||
const n = interpretation?.data.t == 'GraphInterpretationData'
|
||||
? interpretation.data.dot.length
|
||||
: resultSet.schema.rows;
|
||||
|
||||
return Math.ceil(n / pageSize);
|
||||
}
|
||||
|
||||
function numInterpretedPages(interpretation: Interpretation | undefined): number {
|
||||
return Math.ceil((interpretation?.sarif.runs[0].results?.length || 0) / PAGE_SIZE.getValue<number>());
|
||||
if (!interpretation) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const pageSize = interpretedPageSize(interpretation);
|
||||
|
||||
const n = interpretation.data.t == 'GraphInterpretationData'
|
||||
? interpretation.data.dot.length
|
||||
: interpretation.data.runs[0].results?.length || 0;
|
||||
|
||||
return Math.ceil(n / pageSize);
|
||||
}
|
||||
|
||||
export class InterfaceManager extends DisposableObject {
|
||||
private _displayedQuery?: CompletedQuery;
|
||||
export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResultsViewMsg> {
|
||||
private _displayedQuery?: CompletedLocalQueryInfo;
|
||||
private _interpretation?: Interpretation;
|
||||
private _panel: vscode.WebviewPanel | undefined;
|
||||
private _panelLoaded = false;
|
||||
private _panelLoadedCallBacks: (() => void)[] = [];
|
||||
|
||||
private readonly _diagnosticCollection = languages.createDiagnosticCollection(
|
||||
'codeql-query-results'
|
||||
@@ -110,9 +131,10 @@ export class InterfaceManager extends DisposableObject {
|
||||
public ctx: vscode.ExtensionContext,
|
||||
private databaseManager: DatabaseManager,
|
||||
public cliServer: CodeQLCliServer,
|
||||
public logger: Logger
|
||||
public logger: Logger,
|
||||
private labelProvider: HistoryItemLabelProvider
|
||||
) {
|
||||
super();
|
||||
super(ctx);
|
||||
this.push(this._diagnosticCollection);
|
||||
this.push(
|
||||
vscode.window.onDidChangeTextEditorSelection(
|
||||
@@ -137,7 +159,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
this.databaseManager.onDidChangeDatabaseItem(({ kind }) => {
|
||||
if (kind === DatabaseEventKind.Remove) {
|
||||
this._diagnosticCollection.clear();
|
||||
if (this.isShowingPanel()) {
|
||||
if (this.isShowingPanel) {
|
||||
void this.postMessage({
|
||||
t: 'untoggleShowProblems'
|
||||
});
|
||||
@@ -151,57 +173,81 @@ export class InterfaceManager extends DisposableObject {
|
||||
await this.postMessage({ t: 'navigatePath', direction });
|
||||
}
|
||||
|
||||
private isShowingPanel() {
|
||||
return !!this._panel;
|
||||
protected getPanelConfig(): WebviewPanelConfig {
|
||||
return {
|
||||
viewId: 'resultsView',
|
||||
title: 'CodeQL Query Results',
|
||||
viewColumn: this.chooseColumnForWebview(),
|
||||
preserveFocus: true,
|
||||
view: 'results',
|
||||
};
|
||||
}
|
||||
|
||||
// Returns the webview panel, creating it if it doesn't already
|
||||
// exist.
|
||||
getPanel(): vscode.WebviewPanel {
|
||||
if (this._panel == undefined) {
|
||||
const { ctx } = this;
|
||||
const webViewColumn = this.chooseColumnForWebview();
|
||||
const panel = (this._panel = Window.createWebviewPanel(
|
||||
'resultsView', // internal name
|
||||
'CodeQL Query Results', // user-visible name
|
||||
{ viewColumn: webViewColumn, preserveFocus: true },
|
||||
{
|
||||
enableScripts: true,
|
||||
enableFindWidget: true,
|
||||
retainContextWhenHidden: true,
|
||||
localResourceRoots: [
|
||||
vscode.Uri.file(tmpDir.name),
|
||||
vscode.Uri.file(path.join(this.ctx.extensionPath, 'out'))
|
||||
]
|
||||
}
|
||||
));
|
||||
protected onPanelDispose(): void {
|
||||
this._displayedQuery = undefined;
|
||||
}
|
||||
|
||||
this._panel.onDidDispose(
|
||||
() => {
|
||||
this._panel = undefined;
|
||||
this._displayedQuery = undefined;
|
||||
},
|
||||
null,
|
||||
ctx.subscriptions
|
||||
);
|
||||
const scriptPathOnDisk = vscode.Uri.file(
|
||||
ctx.asAbsolutePath('out/resultsView.js')
|
||||
);
|
||||
const stylesheetPathOnDisk = vscode.Uri.file(
|
||||
ctx.asAbsolutePath('out/view/resultsView.css')
|
||||
);
|
||||
panel.webview.html = getHtmlForWebview(
|
||||
panel.webview,
|
||||
scriptPathOnDisk,
|
||||
[stylesheetPathOnDisk]
|
||||
);
|
||||
panel.webview.onDidReceiveMessage(
|
||||
async (e) => this.handleMsgFromView(e),
|
||||
undefined,
|
||||
ctx.subscriptions
|
||||
);
|
||||
protected async onMessage(msg: FromResultsViewMsg): Promise<void> {
|
||||
try {
|
||||
switch (msg.t) {
|
||||
case 'viewLoaded':
|
||||
this.onWebViewLoaded();
|
||||
break;
|
||||
case 'viewSourceFile': {
|
||||
await jumpToLocation(msg, this.databaseManager, this.logger);
|
||||
break;
|
||||
}
|
||||
case 'toggleDiagnostics': {
|
||||
if (msg.visible) {
|
||||
const databaseItem = this.databaseManager.findDatabaseItem(
|
||||
Uri.parse(msg.databaseUri)
|
||||
);
|
||||
if (databaseItem !== undefined) {
|
||||
await this.showResultsAsDiagnostics(
|
||||
msg.origResultsPaths,
|
||||
msg.metadata,
|
||||
databaseItem
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// TODO: Only clear diagnostics on the same database.
|
||||
this._diagnosticCollection.clear();
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'changeSort':
|
||||
await this.changeRawSortState(msg.resultSetName, msg.sortState);
|
||||
break;
|
||||
case 'changeInterpretedSort':
|
||||
await this.changeInterpretedSortState(msg.sortState);
|
||||
break;
|
||||
case 'changePage':
|
||||
if (msg.selectedTable === ALERTS_TABLE_NAME || msg.selectedTable === GRAPH_TABLE_NAME) {
|
||||
await this.showPageOfInterpretedResults(msg.pageNumber);
|
||||
}
|
||||
else {
|
||||
await this.showPageOfRawResults(
|
||||
msg.selectedTable,
|
||||
msg.pageNumber,
|
||||
// When we are in an unsorted state, we guarantee that
|
||||
// sortedResultsInfo doesn't have an entry for the current
|
||||
// result set. Use this to determine whether or not we use
|
||||
// the sorted bqrs file.
|
||||
!!this._displayedQuery?.completedQuery.sortedResultsInfo[msg.selectedTable]
|
||||
);
|
||||
}
|
||||
break;
|
||||
case 'openFile':
|
||||
await this.openFile(msg.filePath);
|
||||
break;
|
||||
default:
|
||||
assertNever(msg);
|
||||
}
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(getErrorMessage(e), {
|
||||
fullMessage: getErrorStack(e)
|
||||
});
|
||||
}
|
||||
return this._panel;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -238,7 +284,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
}
|
||||
// Notify the webview that it should expect new results.
|
||||
await this.postMessage({ t: 'resultsUpdating' });
|
||||
await this._displayedQuery.updateInterpretedSortState(sortState);
|
||||
await this._displayedQuery.completedQuery.updateInterpretedSortState(sortState);
|
||||
await this.showResults(this._displayedQuery, WebviewReveal.NotForced, true);
|
||||
}
|
||||
|
||||
@@ -254,7 +300,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
}
|
||||
// Notify the webview that it should expect new results.
|
||||
await this.postMessage({ t: 'resultsUpdating' });
|
||||
await this._displayedQuery.updateSortState(
|
||||
await this._displayedQuery.completedQuery.updateSortState(
|
||||
this.cliServer,
|
||||
resultSetName,
|
||||
sortState
|
||||
@@ -266,88 +312,9 @@ export class InterfaceManager extends DisposableObject {
|
||||
await this.showPageOfRawResults(resultSetName, 0, true);
|
||||
}
|
||||
|
||||
private async handleMsgFromView(msg: FromResultsViewMsg): Promise<void> {
|
||||
try {
|
||||
switch (msg.t) {
|
||||
case 'viewSourceFile': {
|
||||
await jumpToLocation(msg, this.databaseManager, this.logger);
|
||||
break;
|
||||
}
|
||||
case 'toggleDiagnostics': {
|
||||
if (msg.visible) {
|
||||
const databaseItem = this.databaseManager.findDatabaseItem(
|
||||
Uri.parse(msg.databaseUri)
|
||||
);
|
||||
if (databaseItem !== undefined) {
|
||||
await this.showResultsAsDiagnostics(
|
||||
msg.origResultsPaths,
|
||||
msg.metadata,
|
||||
databaseItem
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// TODO: Only clear diagnostics on the same database.
|
||||
this._diagnosticCollection.clear();
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'resultViewLoaded':
|
||||
this._panelLoaded = true;
|
||||
this._panelLoadedCallBacks.forEach((cb) => cb());
|
||||
this._panelLoadedCallBacks = [];
|
||||
break;
|
||||
case 'changeSort':
|
||||
await this.changeRawSortState(msg.resultSetName, msg.sortState);
|
||||
break;
|
||||
case 'changeInterpretedSort':
|
||||
await this.changeInterpretedSortState(msg.sortState);
|
||||
break;
|
||||
case 'changePage':
|
||||
if (msg.selectedTable === ALERTS_TABLE_NAME) {
|
||||
await this.showPageOfInterpretedResults(msg.pageNumber);
|
||||
}
|
||||
else {
|
||||
await this.showPageOfRawResults(
|
||||
msg.selectedTable,
|
||||
msg.pageNumber,
|
||||
// When we are in an unsorted state, we guarantee that
|
||||
// sortedResultsInfo doesn't have an entry for the current
|
||||
// result set. Use this to determine whether or not we use
|
||||
// the sorted bqrs file.
|
||||
this._displayedQuery?.sortedResultsInfo.has(msg.selectedTable) || false
|
||||
);
|
||||
}
|
||||
break;
|
||||
case 'openFile':
|
||||
await this.openFile(msg.filePath);
|
||||
break;
|
||||
default:
|
||||
assertNever(msg);
|
||||
}
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message, {
|
||||
fullMessage: e.stack
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
postMessage(msg: IntoResultsViewMsg): Thenable<boolean> {
|
||||
return this.getPanel().webview.postMessage(msg);
|
||||
}
|
||||
|
||||
private waitForPanelLoaded(): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
if (this._panelLoaded) {
|
||||
resolve();
|
||||
} else {
|
||||
this._panelLoadedCallBacks.push(resolve);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Show query results in webview panel.
|
||||
* @param results Evaluation info for the executed query.
|
||||
* @param fullQuery Evaluation info for the executed query.
|
||||
* @param shouldKeepOldResultsWhileRendering Should keep old results while rendering.
|
||||
* @param forceReveal Force the webview panel to be visible and
|
||||
* Appropriate when the user has just performed an explicit
|
||||
@@ -355,27 +322,27 @@ export class InterfaceManager extends DisposableObject {
|
||||
* history entry.
|
||||
*/
|
||||
public async showResults(
|
||||
results: CompletedQuery,
|
||||
fullQuery: CompletedLocalQueryInfo,
|
||||
forceReveal: WebviewReveal,
|
||||
shouldKeepOldResultsWhileRendering = false
|
||||
): Promise<void> {
|
||||
if (results.result.resultType !== messages.QueryResultType.SUCCESS) {
|
||||
if (!fullQuery.completedQuery.successful) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._interpretation = undefined;
|
||||
const interpretationPage = await this.interpretResultsInfo(
|
||||
results.query,
|
||||
results.interpretedResultsSortState
|
||||
fullQuery.completedQuery.query,
|
||||
fullQuery.completedQuery.interpretedResultsSortState
|
||||
);
|
||||
|
||||
const sortedResultsMap: SortedResultsMap = {};
|
||||
results.sortedResultsInfo.forEach(
|
||||
(v, k) =>
|
||||
Object.entries(fullQuery.completedQuery.sortedResultsInfo).forEach(
|
||||
([k, v]) =>
|
||||
(sortedResultsMap[k] = this.convertPathPropertiesToWebviewUris(v))
|
||||
);
|
||||
|
||||
this._displayedQuery = results;
|
||||
this._displayedQuery = fullQuery;
|
||||
|
||||
const panel = this.getPanel();
|
||||
await this.waitForPanelLoaded();
|
||||
@@ -388,7 +355,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
// more asynchronous message to not so abruptly interrupt
|
||||
// user's workflow by immediately revealing the panel.
|
||||
const showButton = 'View Results';
|
||||
const queryName = results.queryName;
|
||||
const queryName = this.labelProvider.getShortLabel(fullQuery);
|
||||
const resultPromise = vscode.window.showInformationMessage(
|
||||
`Finished running query ${queryName.length > 0 ? ` "${queryName}"` : ''
|
||||
}.`,
|
||||
@@ -407,7 +374,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
// Note that the resultSetSchemas will return offsets for the default (unsorted) page,
|
||||
// which may not be correct. However, in this case, it doesn't matter since we only
|
||||
// need the first offset, which will be the same no matter which sorting we use.
|
||||
const resultSetSchemas = await this.getResultSetSchemas(results);
|
||||
const resultSetSchemas = await this.getResultSetSchemas(fullQuery.completedQuery);
|
||||
const resultSetNames = resultSetSchemas.map(schema => schema.name);
|
||||
|
||||
const selectedTable = getDefaultResultSetName(resultSetNames);
|
||||
@@ -417,7 +384,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
|
||||
// Use sorted results path if it exists. This may happen if we are
|
||||
// reloading the results view after it has been sorted in the past.
|
||||
const resultsPath = results.getResultsPath(selectedTable);
|
||||
const resultsPath = fullQuery.completedQuery.getResultsPath(selectedTable);
|
||||
const pageSize = PAGE_SIZE.getValue<number>();
|
||||
const chunk = await this.cliServer.bqrsDecode(
|
||||
resultsPath,
|
||||
@@ -432,11 +399,11 @@ export class InterfaceManager extends DisposableObject {
|
||||
}
|
||||
);
|
||||
const resultSet = transformBqrsResultSet(schema, chunk);
|
||||
results.setResultCount(interpretationPage?.numTotalResults || resultSet.schema.rows);
|
||||
fullQuery.completedQuery.setResultCount(interpretationPage?.numTotalResults || resultSet.schema.rows);
|
||||
const parsedResultSets: ParsedResultSets = {
|
||||
pageNumber: 0,
|
||||
pageSize,
|
||||
numPages: numPagesOfResultSet(resultSet),
|
||||
numPages: numPagesOfResultSet(resultSet, this._interpretation),
|
||||
numInterpretedPages: numInterpretedPages(this._interpretation),
|
||||
resultSet: { ...resultSet, t: 'RawResultSet' },
|
||||
selectedTable: undefined,
|
||||
@@ -446,17 +413,17 @@ export class InterfaceManager extends DisposableObject {
|
||||
await this.postMessage({
|
||||
t: 'setState',
|
||||
interpretation: interpretationPage,
|
||||
origResultsPaths: results.query.resultsPaths,
|
||||
origResultsPaths: fullQuery.completedQuery.query.resultsPaths,
|
||||
resultsPath: this.convertPathToWebviewUri(
|
||||
results.query.resultsPaths.resultsPath
|
||||
fullQuery.completedQuery.query.resultsPaths.resultsPath
|
||||
),
|
||||
parsedResultSets,
|
||||
sortedResultsMap,
|
||||
database: results.database,
|
||||
database: fullQuery.initialInfo.databaseInfo,
|
||||
shouldKeepOldResultsWhileRendering,
|
||||
metadata: results.query.metadata,
|
||||
queryName: results.toString(),
|
||||
queryPath: results.query.program.queryPath
|
||||
metadata: fullQuery.completedQuery.query.metadata,
|
||||
queryName: this.labelProvider.getLabel(fullQuery),
|
||||
queryPath: fullQuery.initialInfo.queryPath
|
||||
});
|
||||
}
|
||||
|
||||
@@ -472,29 +439,29 @@ export class InterfaceManager extends DisposableObject {
|
||||
if (this._interpretation === undefined) {
|
||||
throw new Error('Trying to show interpreted results but interpretation was undefined');
|
||||
}
|
||||
if (this._interpretation.sarif.runs[0].results === undefined) {
|
||||
if (this._interpretation.data.t === 'SarifInterpretationData' && this._interpretation.data.runs[0].results === undefined) {
|
||||
throw new Error('Trying to show interpreted results but results were undefined');
|
||||
}
|
||||
|
||||
const resultSetSchemas = await this.getResultSetSchemas(this._displayedQuery);
|
||||
const resultSetSchemas = await this.getResultSetSchemas(this._displayedQuery.completedQuery);
|
||||
const resultSetNames = resultSetSchemas.map(schema => schema.name);
|
||||
|
||||
await this.postMessage({
|
||||
t: 'showInterpretedPage',
|
||||
interpretation: this.getPageOfInterpretedResults(pageNumber),
|
||||
database: this._displayedQuery.database,
|
||||
metadata: this._displayedQuery.query.metadata,
|
||||
database: this._displayedQuery.initialInfo.databaseInfo,
|
||||
metadata: this._displayedQuery.completedQuery.query.metadata,
|
||||
pageNumber,
|
||||
resultSetNames,
|
||||
pageSize: PAGE_SIZE.getValue(),
|
||||
pageSize: interpretedPageSize(this._interpretation),
|
||||
numPages: numInterpretedPages(this._interpretation),
|
||||
queryName: this._displayedQuery.toString(),
|
||||
queryPath: this._displayedQuery.query.program.queryPath
|
||||
queryName: this.labelProvider.getLabel(this._displayedQuery),
|
||||
queryPath: this._displayedQuery.initialInfo.queryPath
|
||||
});
|
||||
}
|
||||
|
||||
private async getResultSetSchemas(results: CompletedQuery, selectedTable = ''): Promise<ResultSetSchema[]> {
|
||||
const resultsPath = results.getResultsPath(selectedTable);
|
||||
private async getResultSetSchemas(completedQuery: CompletedQueryInfo, selectedTable = ''): Promise<ResultSetSchema[]> {
|
||||
const resultsPath = completedQuery.getResultsPath(selectedTable);
|
||||
const schemas = await this.cliServer.bqrsInfo(
|
||||
resultsPath,
|
||||
PAGE_SIZE.getValue()
|
||||
@@ -521,17 +488,17 @@ export class InterfaceManager extends DisposableObject {
|
||||
}
|
||||
|
||||
const sortedResultsMap: SortedResultsMap = {};
|
||||
results.sortedResultsInfo.forEach(
|
||||
(v, k) =>
|
||||
Object.entries(results.completedQuery.sortedResultsInfo).forEach(
|
||||
([k, v]) =>
|
||||
(sortedResultsMap[k] = this.convertPathPropertiesToWebviewUris(v))
|
||||
);
|
||||
|
||||
const resultSetSchemas = await this.getResultSetSchemas(results, sorted ? selectedTable : '');
|
||||
const resultSetSchemas = await this.getResultSetSchemas(results.completedQuery, sorted ? selectedTable : '');
|
||||
|
||||
// If there is a specific sorted table selected, a different bqrs file is loaded that doesn't have all the result set names.
|
||||
// Make sure that we load all result set names here.
|
||||
// See https://github.com/github/vscode-codeql/issues/1005
|
||||
const allResultSetSchemas = sorted ? await this.getResultSetSchemas(results, '') : resultSetSchemas;
|
||||
const allResultSetSchemas = sorted ? await this.getResultSetSchemas(results.completedQuery, '') : resultSetSchemas;
|
||||
const resultSetNames = allResultSetSchemas.map(schema => schema.name);
|
||||
|
||||
const schema = resultSetSchemas.find(
|
||||
@@ -542,7 +509,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
|
||||
const pageSize = PAGE_SIZE.getValue<number>();
|
||||
const chunk = await this.cliServer.bqrsDecode(
|
||||
results.getResultsPath(selectedTable, sorted),
|
||||
results.completedQuery.getResultsPath(selectedTable, sorted),
|
||||
schema.name,
|
||||
{
|
||||
offset: schema.pagination?.offsets[pageNumber],
|
||||
@@ -564,17 +531,17 @@ export class InterfaceManager extends DisposableObject {
|
||||
await this.postMessage({
|
||||
t: 'setState',
|
||||
interpretation: this._interpretation,
|
||||
origResultsPaths: results.query.resultsPaths,
|
||||
origResultsPaths: results.completedQuery.query.resultsPaths,
|
||||
resultsPath: this.convertPathToWebviewUri(
|
||||
results.query.resultsPaths.resultsPath
|
||||
results.completedQuery.query.resultsPaths.resultsPath
|
||||
),
|
||||
parsedResultSets,
|
||||
sortedResultsMap,
|
||||
database: results.database,
|
||||
database: results.initialInfo.databaseInfo,
|
||||
shouldKeepOldResultsWhileRendering: false,
|
||||
metadata: results.query.metadata,
|
||||
queryName: results.toString(),
|
||||
queryPath: results.query.program.queryPath
|
||||
metadata: results.completedQuery.query.metadata,
|
||||
queryName: this.labelProvider.getLabel(results),
|
||||
queryPath: results.initialInfo.queryPath
|
||||
});
|
||||
}
|
||||
|
||||
@@ -589,28 +556,45 @@ export class InterfaceManager extends DisposableObject {
|
||||
void this.logger.log('No results path. Cannot display interpreted results.');
|
||||
return undefined;
|
||||
}
|
||||
let data;
|
||||
let numTotalResults;
|
||||
if (metadata?.kind === GRAPH_TABLE_NAME) {
|
||||
data = await interpretGraphResults(
|
||||
this.cliServer,
|
||||
metadata,
|
||||
resultsPaths,
|
||||
sourceInfo
|
||||
);
|
||||
numTotalResults = data.dot.length;
|
||||
} else {
|
||||
const sarif = await interpretResultsSarif(
|
||||
this.cliServer,
|
||||
metadata,
|
||||
resultsPaths,
|
||||
sourceInfo
|
||||
);
|
||||
|
||||
const sarif = await interpretResults(
|
||||
this.cliServer,
|
||||
metadata,
|
||||
resultsPaths,
|
||||
sourceInfo
|
||||
);
|
||||
sarif.runs.forEach(run => {
|
||||
if (run.results) {
|
||||
sortInterpretedResults(run.results, sortState);
|
||||
}
|
||||
});
|
||||
|
||||
sarif.runs.forEach(run => {
|
||||
if (run.results !== undefined) {
|
||||
sortInterpretedResults(run.results, sortState);
|
||||
}
|
||||
});
|
||||
sarif.sortState = sortState;
|
||||
data = sarif;
|
||||
|
||||
const numTotalResults = sarif.runs[0]?.results?.length || 0;
|
||||
numTotalResults = (() => {
|
||||
return sarif.runs?.[0]?.results
|
||||
? sarif.runs[0].results.length
|
||||
: 0;
|
||||
})();
|
||||
}
|
||||
|
||||
const interpretation: Interpretation = {
|
||||
sarif,
|
||||
data,
|
||||
sourceLocationPrefix,
|
||||
numTruncatedResults: 0,
|
||||
numTotalResults,
|
||||
sortState,
|
||||
numTotalResults
|
||||
};
|
||||
this._interpretation = interpretation;
|
||||
return interpretation;
|
||||
@@ -619,7 +603,6 @@ export class InterfaceManager extends DisposableObject {
|
||||
private getPageOfInterpretedResults(
|
||||
pageNumber: number
|
||||
): Interpretation {
|
||||
|
||||
function getPageOfRun(run: Sarif.Run): Sarif.Run {
|
||||
return {
|
||||
...run, results: run.results?.slice(
|
||||
@@ -629,32 +612,44 @@ export class InterfaceManager extends DisposableObject {
|
||||
};
|
||||
}
|
||||
|
||||
if (this._interpretation === undefined) {
|
||||
const interp = this._interpretation;
|
||||
if (interp === undefined) {
|
||||
throw new Error('Tried to get interpreted results before interpretation finished');
|
||||
}
|
||||
if (this._interpretation.sarif.runs.length !== 1) {
|
||||
void this.logger.log(`Warning: SARIF file had ${this._interpretation.sarif.runs.length} runs, expected 1`);
|
||||
|
||||
if (interp.data.t !== 'SarifInterpretationData')
|
||||
return interp;
|
||||
|
||||
if (interp.data.runs.length !== 1) {
|
||||
void this.logger.log(`Warning: SARIF file had ${interp.data.runs.length} runs, expected 1`);
|
||||
}
|
||||
const interp = this._interpretation;
|
||||
|
||||
return {
|
||||
...interp,
|
||||
sarif: { ...interp.sarif, runs: [getPageOfRun(interp.sarif.runs[0])] },
|
||||
data: {
|
||||
...interp.data,
|
||||
runs: [getPageOfRun(interp.data.runs[0])]
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private async interpretResultsInfo(
|
||||
query: QueryInfo,
|
||||
query: QueryEvaluationInfo,
|
||||
sortState: InterpretedResultsSortState | undefined
|
||||
): Promise<Interpretation | undefined> {
|
||||
if (
|
||||
(await query.canHaveInterpretedResults()) &&
|
||||
query.canHaveInterpretedResults() &&
|
||||
query.quickEvalPosition === undefined // never do results interpretation if quickEval
|
||||
) {
|
||||
try {
|
||||
const sourceLocationPrefix = await query.dbItem.getSourceLocationPrefix(
|
||||
const dbItem = this.databaseManager.findDatabaseItem(Uri.file(query.dbItemPath));
|
||||
if (!dbItem) {
|
||||
throw new Error(`Could not find database item for ${query.dbItemPath}`);
|
||||
}
|
||||
const sourceLocationPrefix = await dbItem.getSourceLocationPrefix(
|
||||
this.cliServer
|
||||
);
|
||||
const sourceArchiveUri = query.dbItem.sourceArchive;
|
||||
const sourceArchiveUri = dbItem.sourceArchive;
|
||||
const sourceInfo =
|
||||
sourceArchiveUri === undefined
|
||||
? undefined
|
||||
@@ -673,7 +668,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
// If interpretation fails, accept the error and continue
|
||||
// trying to render uninterpreted results anyway.
|
||||
void showAndLogErrorMessage(
|
||||
`Showing raw results instead of interpreted ones due to an error. ${e.message}`
|
||||
`Showing raw results instead of interpreted ones due to an error. ${getErrorMessage(e)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -712,9 +707,8 @@ export class InterfaceManager extends DisposableObject {
|
||||
try {
|
||||
await this.showProblemResultsAsDiagnostics(interpretation, database);
|
||||
} catch (e) {
|
||||
const msg = e instanceof Error ? e.message : e.toString();
|
||||
void this.logger.log(
|
||||
`Exception while computing problem results as diagnostics: ${msg}`
|
||||
`Exception while computing problem results as diagnostics: ${getErrorMessage(e)}`
|
||||
);
|
||||
this._diagnosticCollection.clear();
|
||||
}
|
||||
@@ -724,9 +718,12 @@ export class InterfaceManager extends DisposableObject {
|
||||
interpretation: Interpretation,
|
||||
databaseItem: DatabaseItem
|
||||
): Promise<void> {
|
||||
const { sarif, sourceLocationPrefix } = interpretation;
|
||||
const { data, sourceLocationPrefix } = interpretation;
|
||||
|
||||
if (!sarif.runs || !sarif.runs[0].results) {
|
||||
if (data.t !== 'SarifInterpretationData')
|
||||
return;
|
||||
|
||||
if (!data.runs || !data.runs[0].results) {
|
||||
void this.logger.log(
|
||||
'Didn\'t find a run in the sarif results. Error processing sarif?'
|
||||
);
|
||||
@@ -735,7 +732,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
|
||||
const diagnostics: [Uri, ReadonlyArray<Diagnostic>][] = [];
|
||||
|
||||
for (const result of sarif.runs[0].results) {
|
||||
for (const result of data.runs[0].results) {
|
||||
const message = result.message.text;
|
||||
if (message === undefined) {
|
||||
void this.logger.log('Sarif had result without plaintext message');
|
||||
|
||||
30
extensions/ql-vscode/src/json-rpc-server.ts
Normal file
30
extensions/ql-vscode/src/json-rpc-server.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { Logger } from './logging';
|
||||
import * as cp from 'child_process';
|
||||
import { Disposable } from 'vscode';
|
||||
import { MessageConnection } from 'vscode-jsonrpc';
|
||||
|
||||
|
||||
/** A running query server process and its associated message connection. */
|
||||
export class ServerProcess implements Disposable {
|
||||
child: cp.ChildProcess;
|
||||
connection: MessageConnection;
|
||||
logger: Logger;
|
||||
|
||||
constructor(child: cp.ChildProcess, connection: MessageConnection, private name: string, logger: Logger) {
|
||||
this.child = child;
|
||||
this.connection = connection;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
void this.logger.log(`Stopping ${this.name}...`);
|
||||
this.connection.dispose();
|
||||
this.child.stdin!.end();
|
||||
this.child.stderr!.destroy();
|
||||
// TODO kill the process if it doesn't terminate after a certain time limit.
|
||||
|
||||
// On Windows, we usually have to terminate the process before closing its stdout.
|
||||
this.child.stdout!.destroy();
|
||||
void this.logger.log(`Stopped ${this.name}.`);
|
||||
}
|
||||
}
|
||||
65
extensions/ql-vscode/src/legacy-query-server/legacyRunner.ts
Normal file
65
extensions/ql-vscode/src/legacy-query-server/legacyRunner.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { CancellationToken } from 'vscode';
|
||||
import { ProgressCallback } from '../commandRunner';
|
||||
import { DatabaseItem } from '../databases';
|
||||
import { Dataset, deregisterDatabases, registerDatabases } from '../pure/legacy-messages';
|
||||
import { InitialQueryInfo, LocalQueryInfo } from '../query-results';
|
||||
import { QueryRunner } from '../queryRunner';
|
||||
import { QueryWithResults } from '../run-queries-shared';
|
||||
import { QueryServerClient } from './queryserver-client';
|
||||
import { clearCacheInDatabase, compileAndRunQueryAgainstDatabase } from './run-queries';
|
||||
import { upgradeDatabaseExplicit } from './upgrades';
|
||||
|
||||
export class LegacyQueryRunner extends QueryRunner {
|
||||
|
||||
|
||||
constructor(public readonly qs: QueryServerClient) {
|
||||
super();
|
||||
}
|
||||
|
||||
get cliServer() {
|
||||
return this.qs.cliServer;
|
||||
}
|
||||
|
||||
async restartQueryServer(progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||
await this.qs.restartQueryServer(progress, token);
|
||||
}
|
||||
|
||||
onStart(callBack: (progress: ProgressCallback, token: CancellationToken) => Promise<void>) {
|
||||
this.qs.onDidStartQueryServer(callBack);
|
||||
}
|
||||
async clearCacheInDatabase(dbItem: DatabaseItem, progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||
await clearCacheInDatabase(this.qs, dbItem, progress, token);
|
||||
}
|
||||
async compileAndRunQueryAgainstDatabase(dbItem: DatabaseItem, initialInfo: InitialQueryInfo, queryStorageDir: string, progress: ProgressCallback, token: CancellationToken, templates?: Record<string, string>, queryInfo?: LocalQueryInfo): Promise<QueryWithResults> {
|
||||
return await compileAndRunQueryAgainstDatabase(this.qs.cliServer, this.qs, dbItem, initialInfo, queryStorageDir, progress, token, templates, queryInfo);
|
||||
}
|
||||
|
||||
async deregisterDatabase(progress: ProgressCallback, token: CancellationToken, dbItem: DatabaseItem): Promise<void> {
|
||||
if (dbItem.contents && (await this.qs.cliServer.cliConstraints.supportsDatabaseRegistration())) {
|
||||
const databases: Dataset[] = [{
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
}];
|
||||
await this.qs.sendRequest(deregisterDatabases, { databases }, token, progress);
|
||||
}
|
||||
}
|
||||
async registerDatabase(progress: ProgressCallback, token: CancellationToken, dbItem: DatabaseItem): Promise<void> {
|
||||
if (dbItem.contents && (await this.qs.cliServer.cliConstraints.supportsDatabaseRegistration())) {
|
||||
const databases: Dataset[] = [{
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
}];
|
||||
await this.qs.sendRequest(registerDatabases, { databases }, token, progress);
|
||||
}
|
||||
}
|
||||
|
||||
async upgradeDatabaseExplicit(dbItem: DatabaseItem, progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||
await upgradeDatabaseExplicit(this.qs, dbItem, progress, token);
|
||||
}
|
||||
|
||||
async clearPackCache(): Promise<void> {
|
||||
/**
|
||||
* Nothing needs to be done
|
||||
*/
|
||||
}
|
||||
}
|
||||
@@ -1,49 +1,25 @@
|
||||
import * as cp from 'child_process';
|
||||
import * as path from 'path';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { Disposable, CancellationToken, commands } from 'vscode';
|
||||
import { createMessageConnection, MessageConnection, RequestType } from 'vscode-jsonrpc';
|
||||
import * as cli from './cli';
|
||||
import { QueryServerConfig } from './config';
|
||||
import { Logger, ProgressReporter } from './logging';
|
||||
import { completeQuery, EvaluationResult, progress, ProgressMessage, WithProgressId } from './pure/messages';
|
||||
import * as messages from './pure/messages';
|
||||
import { ProgressCallback, ProgressTask } from './commandRunner';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as helpers from './helpers';
|
||||
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import { CancellationToken, commands } from 'vscode';
|
||||
import { createMessageConnection, RequestType } from 'vscode-jsonrpc';
|
||||
import * as cli from '../cli';
|
||||
import { QueryServerConfig } from '../config';
|
||||
import { Logger, ProgressReporter } from '../logging';
|
||||
import { completeQuery, EvaluationResult, progress, ProgressMessage, WithProgressId } from '../pure/legacy-messages';
|
||||
import * as messages from '../pure/legacy-messages';
|
||||
import { ProgressCallback, ProgressTask } from '../commandRunner';
|
||||
import { findQueryLogFile } from '../run-queries-shared';
|
||||
import { ServerProcess } from '../json-rpc-server';
|
||||
|
||||
type WithProgressReporting = (task: (progress: ProgressReporter, token: CancellationToken) => Thenable<void>) => Thenable<void>;
|
||||
|
||||
type ServerOpts = {
|
||||
logger: Logger;
|
||||
contextStoragePath: string;
|
||||
}
|
||||
|
||||
/** A running query server process and its associated message connection. */
|
||||
class ServerProcess implements Disposable {
|
||||
child: cp.ChildProcess;
|
||||
connection: MessageConnection;
|
||||
logger: Logger;
|
||||
|
||||
constructor(child: cp.ChildProcess, connection: MessageConnection, logger: Logger) {
|
||||
this.child = child;
|
||||
this.connection = connection;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
void this.logger.log('Stopping query server...');
|
||||
this.connection.dispose();
|
||||
this.child.stdin!.end();
|
||||
this.child.stderr!.destroy();
|
||||
// TODO kill the process if it doesn't terminate after a certain time limit.
|
||||
|
||||
// On Windows, we usually have to terminate the process before closing its stdout.
|
||||
this.child.stdout!.destroy();
|
||||
void this.logger.log('Stopped query server.');
|
||||
}
|
||||
}
|
||||
|
||||
type WithProgressReporting = (task: (progress: ProgressReporter, token: CancellationToken) => Thenable<void>) => Thenable<void>;
|
||||
|
||||
/**
|
||||
* Client that manages a query server process.
|
||||
* The server process is started upon initialization and tracked during its lifetime.
|
||||
@@ -68,7 +44,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
this.queryServerStartListeners.push(e);
|
||||
}
|
||||
|
||||
public activeQueryName: string | undefined;
|
||||
public activeQueryLogFile: string | undefined;
|
||||
|
||||
constructor(
|
||||
readonly config: QueryServerConfig,
|
||||
@@ -89,26 +65,6 @@ export class QueryServerClient extends DisposableObject {
|
||||
this.evaluationResultCallbacks = {};
|
||||
}
|
||||
|
||||
async initLogger() {
|
||||
let storagePath = this.opts.contextStoragePath;
|
||||
let isCustomLogDirectory = false;
|
||||
if (this.config.customLogDirectory) {
|
||||
try {
|
||||
if (!(await fs.pathExists(this.config.customLogDirectory))) {
|
||||
await fs.mkdir(this.config.customLogDirectory);
|
||||
}
|
||||
void this.logger.log(`Saving query server logs to user-specified directory: ${this.config.customLogDirectory}.`);
|
||||
storagePath = this.config.customLogDirectory;
|
||||
isCustomLogDirectory = true;
|
||||
} catch (e) {
|
||||
void helpers.showAndLogErrorMessage(`${this.config.customLogDirectory} is not a valid directory. Logs will be stored in a temporary workspace directory instead.`);
|
||||
}
|
||||
}
|
||||
|
||||
await this.logger.setLogStoragePath(storagePath, isCustomLogDirectory);
|
||||
|
||||
}
|
||||
|
||||
get logger(): Logger {
|
||||
return this.opts.logger;
|
||||
}
|
||||
@@ -150,7 +106,6 @@ export class QueryServerClient extends DisposableObject {
|
||||
|
||||
/** Starts a new query server process, sending progress messages to the given reporter. */
|
||||
private async startQueryServerImpl(progressReporter: ProgressReporter): Promise<void> {
|
||||
await this.initLogger();
|
||||
const ramArgs = await this.cliServer.resolveRam(this.config.queryMemoryMb, progressReporter);
|
||||
const args = ['--threads', this.config.numThreads.toString()].concat(ramArgs);
|
||||
|
||||
@@ -167,16 +122,29 @@ export class QueryServerClient extends DisposableObject {
|
||||
args.push('--require-db-registration');
|
||||
}
|
||||
|
||||
if (await this.cliServer.cliConstraints.supportsOldEvalStats()) {
|
||||
if (await this.cliServer.cliConstraints.supportsOldEvalStats() && !(await this.cliServer.cliConstraints.supportsPerQueryEvalLog())) {
|
||||
args.push('--old-eval-stats');
|
||||
}
|
||||
|
||||
if (await this.cliServer.cliConstraints.supportsStructuredEvalLog()) {
|
||||
const structuredLogFile = `${this.opts.contextStoragePath}/structured-evaluator-log.json`;
|
||||
await fs.ensureFile(structuredLogFile);
|
||||
|
||||
args.push('--evaluator-log');
|
||||
args.push(structuredLogFile);
|
||||
|
||||
// We hard-code the verbosity level to 5 and minify to false.
|
||||
// This will be the behavior of the per-query structured logging in the CLI after 2.8.3.
|
||||
args.push('--evaluator-log-level');
|
||||
args.push('5');
|
||||
}
|
||||
|
||||
if (this.config.debug) {
|
||||
args.push('--debug', '--tuple-counting');
|
||||
}
|
||||
|
||||
if (cli.shouldDebugQueryServer()) {
|
||||
args.push('-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9010,server=y,suspend=n,quiet=y');
|
||||
args.push('-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9010,server=n,suspend=y,quiet=y');
|
||||
}
|
||||
|
||||
const child = cli.spawnServer(
|
||||
@@ -187,7 +155,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
this.logger,
|
||||
data => this.logger.log(data.toString(), {
|
||||
trailingNewline: false,
|
||||
additionalLogLocation: this.activeQueryName
|
||||
additionalLogLocation: this.activeQueryLogFile
|
||||
}),
|
||||
undefined, // no listener for stdout
|
||||
progressReporter
|
||||
@@ -198,10 +166,6 @@ export class QueryServerClient extends DisposableObject {
|
||||
if (!(res.runId in this.evaluationResultCallbacks)) {
|
||||
void this.logger.log(`No callback associated with run id ${res.runId}, continuing without executing any callback`);
|
||||
} else {
|
||||
const baseLocation = this.logger.getBaseLocation();
|
||||
if (baseLocation && this.activeQueryName) {
|
||||
res.logFileLocation = path.join(baseLocation, this.activeQueryName);
|
||||
}
|
||||
this.evaluationResultCallbacks[res.runId](res);
|
||||
}
|
||||
return {};
|
||||
@@ -212,7 +176,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
callback(res);
|
||||
}
|
||||
});
|
||||
this.serverProcess = new ServerProcess(child, connection, this.logger);
|
||||
this.serverProcess = new ServerProcess(child, connection, 'Query server', this.logger);
|
||||
// Ensure the server process is disposed together with this client.
|
||||
this.track(this.serverProcess);
|
||||
connection.listen();
|
||||
@@ -234,7 +198,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
}
|
||||
|
||||
get serverProcessPid(): number {
|
||||
return this.serverProcess!.child.pid;
|
||||
return this.serverProcess!.child.pid || 0;
|
||||
}
|
||||
|
||||
async sendRequest<P, R, E, RO>(type: RequestType<WithProgressId<P>, R, E, RO>, parameter: P, token?: CancellationToken, progress?: (res: ProgressMessage) => void): Promise<R> {
|
||||
@@ -262,8 +226,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
*/
|
||||
private updateActiveQuery(method: string, parameter: any): void {
|
||||
if (method === messages.compileQuery.method) {
|
||||
const queryPath = parameter?.queryToCheck?.queryPath || 'unknown';
|
||||
this.activeQueryName = `query-${path.basename(queryPath)}-${this.nextProgress}.log`;
|
||||
this.activeQueryLogFile = findQueryLogFile(path.dirname(parameter.resultPath));
|
||||
}
|
||||
}
|
||||
}
|
||||
526
extensions/ql-vscode/src/legacy-query-server/run-queries.ts
Normal file
526
extensions/ql-vscode/src/legacy-query-server/run-queries.ts
Normal file
@@ -0,0 +1,526 @@
|
||||
import * as crypto from 'crypto';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import * as path from 'path';
|
||||
import {
|
||||
CancellationToken,
|
||||
Uri,
|
||||
} from 'vscode';
|
||||
import { ErrorCodes, ResponseError } from 'vscode-languageclient';
|
||||
|
||||
import * as cli from '../cli';
|
||||
import { DatabaseItem, } from '../databases';
|
||||
import {
|
||||
getOnDiskWorkspaceFolders,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogWarningMessage,
|
||||
tryGetQueryMetadata,
|
||||
upgradesTmpDir
|
||||
} from '../helpers';
|
||||
import { ProgressCallback } from '../commandRunner';
|
||||
import { QueryMetadata } from '../pure/interface-types';
|
||||
import { logger } from '../logging';
|
||||
import * as messages from '../pure/legacy-messages';
|
||||
import { InitialQueryInfo, LocalQueryInfo } from '../query-results';
|
||||
import * as qsClient from './queryserver-client';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
import { compileDatabaseUpgradeSequence, upgradeDatabaseExplicit } from './upgrades';
|
||||
import { QueryEvaluationInfo, QueryWithResults } from '../run-queries-shared';
|
||||
|
||||
/**
|
||||
* A collection of evaluation-time information about a query,
|
||||
* including the query itself, and where we have decided to put
|
||||
* temporary files associated with it, such as the compiled query
|
||||
* output and results.
|
||||
*/
|
||||
export class QueryInProgress {
|
||||
|
||||
public queryEvalInfo: QueryEvaluationInfo;
|
||||
/**
|
||||
* Note that in the {@link slurpQueryHistory} method, we create a QueryEvaluationInfo instance
|
||||
* by explicitly setting the prototype in order to avoid calling this constructor.
|
||||
*/
|
||||
constructor(
|
||||
readonly querySaveDir: string,
|
||||
readonly dbItemPath: string,
|
||||
databaseHasMetadataFile: boolean,
|
||||
readonly queryDbscheme: string, // the dbscheme file the query expects, based on library path resolution
|
||||
readonly quickEvalPosition?: messages.Position,
|
||||
readonly metadata?: QueryMetadata,
|
||||
readonly templates?: Record<string, string>,
|
||||
) {
|
||||
this.queryEvalInfo = new QueryEvaluationInfo(querySaveDir, dbItemPath, databaseHasMetadataFile, quickEvalPosition, metadata);
|
||||
/**/
|
||||
}
|
||||
|
||||
get compiledQueryPath() {
|
||||
return path.join(this.querySaveDir, 'compiledQuery.qlo');
|
||||
}
|
||||
|
||||
|
||||
async run(
|
||||
qs: qsClient.QueryServerClient,
|
||||
upgradeQlo: string | undefined,
|
||||
availableMlModels: cli.MlModelInfo[],
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
queryInfo?: LocalQueryInfo,
|
||||
): Promise<messages.EvaluationResult> {
|
||||
if (!dbItem.contents || dbItem.error) {
|
||||
throw new Error('Can\'t run query on invalid database.');
|
||||
}
|
||||
|
||||
let result: messages.EvaluationResult | null = null;
|
||||
|
||||
const callbackId = qs.registerCallback(res => {
|
||||
result = {
|
||||
...res,
|
||||
logFileLocation: this.queryEvalInfo.logPath
|
||||
};
|
||||
});
|
||||
|
||||
const availableMlModelUris: messages.MlModel[] = availableMlModels.map(model => ({ uri: Uri.file(model.path).toString(true) }));
|
||||
|
||||
const queryToRun: messages.QueryToRun = {
|
||||
resultsPath: this.queryEvalInfo.resultsPaths.resultsPath,
|
||||
qlo: Uri.file(this.compiledQueryPath).toString(),
|
||||
compiledUpgrade: upgradeQlo && Uri.file(upgradeQlo).toString(),
|
||||
allowUnknownTemplates: true,
|
||||
templateValues: createSimpleTemplates(this.templates),
|
||||
availableMlModels: availableMlModelUris,
|
||||
id: callbackId,
|
||||
timeoutSecs: qs.config.timeoutSecs,
|
||||
};
|
||||
|
||||
const dataset: messages.Dataset = {
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
};
|
||||
if (queryInfo && await qs.cliServer.cliConstraints.supportsPerQueryEvalLog()) {
|
||||
await qs.sendRequest(messages.startLog, {
|
||||
db: dataset,
|
||||
logPath: this.queryEvalInfo.evalLogPath,
|
||||
});
|
||||
|
||||
}
|
||||
const params: messages.EvaluateQueriesParams = {
|
||||
db: dataset,
|
||||
evaluateId: callbackId,
|
||||
queries: [queryToRun],
|
||||
stopOnError: false,
|
||||
useSequenceHint: false
|
||||
};
|
||||
try {
|
||||
await qs.sendRequest(messages.runQueries, params, token, progress);
|
||||
if (qs.config.customLogDirectory) {
|
||||
void showAndLogWarningMessage(
|
||||
`Custom log directories are no longer supported. The "codeQL.runningQueries.customLogDirectory" setting is deprecated. Unset the setting to stop seeing this message. Query logs saved to ${this.queryEvalInfo.logPath}.`
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
qs.unRegisterCallback(callbackId);
|
||||
if (queryInfo && await qs.cliServer.cliConstraints.supportsPerQueryEvalLog()) {
|
||||
await qs.sendRequest(messages.endLog, {
|
||||
db: dataset,
|
||||
logPath: this.queryEvalInfo.evalLogPath,
|
||||
});
|
||||
if (await this.queryEvalInfo.hasEvalLog()) {
|
||||
await this.queryEvalInfo.addQueryLogs(queryInfo, qs.cliServer, qs.logger);
|
||||
} else {
|
||||
void showAndLogWarningMessage(`Failed to write structured evaluator log to ${this.queryEvalInfo.evalLogPath}.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result || {
|
||||
evaluationTime: 0,
|
||||
message: 'No result from server',
|
||||
queryId: -1,
|
||||
runId: callbackId,
|
||||
resultType: messages.QueryResultType.OTHER_ERROR
|
||||
};
|
||||
}
|
||||
|
||||
async compile(
|
||||
qs: qsClient.QueryServerClient,
|
||||
program: messages.QlProgram,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
): Promise<messages.CompilationMessage[]> {
|
||||
let compiled: messages.CheckQueryResult | undefined;
|
||||
try {
|
||||
const target = this.quickEvalPosition ? {
|
||||
quickEval: { quickEvalPos: this.quickEvalPosition }
|
||||
} : { query: {} };
|
||||
const params: messages.CompileQueryParams = {
|
||||
compilationOptions: {
|
||||
computeNoLocationUrls: true,
|
||||
failOnWarnings: false,
|
||||
fastCompilation: false,
|
||||
includeDilInQlo: true,
|
||||
localChecking: false,
|
||||
noComputeGetUrl: false,
|
||||
noComputeToString: false,
|
||||
computeDefaultStrings: true,
|
||||
emitDebugInfo: true
|
||||
},
|
||||
extraOptions: {
|
||||
timeoutSecs: qs.config.timeoutSecs
|
||||
},
|
||||
queryToCheck: program,
|
||||
resultPath: this.compiledQueryPath,
|
||||
target,
|
||||
};
|
||||
|
||||
compiled = await qs.sendRequest(messages.compileQuery, params, token, progress);
|
||||
} finally {
|
||||
void qs.logger.log(' - - - COMPILATION DONE - - - ', { additionalLogLocation: this.queryEvalInfo.logPath });
|
||||
}
|
||||
return (compiled?.messages || []).filter(msg => msg.severity === messages.Severity.ERROR);
|
||||
}
|
||||
}
|
||||
|
||||
export async function clearCacheInDatabase(
|
||||
qs: qsClient.QueryServerClient,
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
): Promise<messages.ClearCacheResult> {
|
||||
if (dbItem.contents === undefined) {
|
||||
throw new Error('Can\'t clear the cache in an invalid database.');
|
||||
}
|
||||
|
||||
const db: messages.Dataset = {
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default',
|
||||
};
|
||||
|
||||
const params: messages.ClearCacheParams = {
|
||||
dryRun: false,
|
||||
db,
|
||||
};
|
||||
|
||||
return qs.sendRequest(messages.clearCache, params, token, progress);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Compare the dbscheme implied by the query `query` and that of the current database.
|
||||
* - If they are compatible, do nothing.
|
||||
* - If they are incompatible but the database can be upgraded, suggest that upgrade.
|
||||
* - If they are incompatible and the database cannot be upgraded, throw an error.
|
||||
*/
|
||||
async function checkDbschemeCompatibility(
|
||||
cliServer: cli.CodeQLCliServer,
|
||||
qs: qsClient.QueryServerClient,
|
||||
query: QueryInProgress,
|
||||
qlProgram: messages.QlProgram,
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
): Promise<void> {
|
||||
const searchPath = getOnDiskWorkspaceFolders();
|
||||
|
||||
if (dbItem.contents?.dbSchemeUri !== undefined) {
|
||||
const { finalDbscheme } = await cliServer.resolveUpgrades(dbItem.contents.dbSchemeUri.fsPath, searchPath, false);
|
||||
const hash = async function(filename: string): Promise<string> {
|
||||
return crypto.createHash('sha256').update(await fs.readFile(filename)).digest('hex');
|
||||
};
|
||||
|
||||
// At this point, we have learned about three dbschemes:
|
||||
|
||||
// the dbscheme of the actual database we're querying.
|
||||
const dbschemeOfDb = await hash(dbItem.contents.dbSchemeUri.fsPath);
|
||||
|
||||
// the dbscheme of the query we're running, including the library we've resolved it to use.
|
||||
const dbschemeOfLib = await hash(query.queryDbscheme);
|
||||
|
||||
// the database we're able to upgrade to
|
||||
const upgradableTo = await hash(finalDbscheme);
|
||||
|
||||
if (upgradableTo != dbschemeOfLib) {
|
||||
reportNoUpgradePath(qlProgram, query);
|
||||
}
|
||||
|
||||
if (upgradableTo == dbschemeOfLib &&
|
||||
dbschemeOfDb != dbschemeOfLib) {
|
||||
// Try to upgrade the database
|
||||
await upgradeDatabaseExplicit(
|
||||
qs,
|
||||
dbItem,
|
||||
progress,
|
||||
token
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function reportNoUpgradePath(qlProgram: messages.QlProgram, query: QueryInProgress): void {
|
||||
throw new Error(
|
||||
`Query ${qlProgram.queryPath} expects database scheme ${query.queryDbscheme}, but the current database has a different scheme, and no database upgrades are available. The current database scheme may be newer than the CodeQL query libraries in your workspace.\n\nPlease try using a newer version of the query libraries.`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compile a non-destructive upgrade.
|
||||
*/
|
||||
async function compileNonDestructiveUpgrade(
|
||||
qs: qsClient.QueryServerClient,
|
||||
upgradeTemp: tmp.DirectoryResult,
|
||||
query: QueryInProgress,
|
||||
qlProgram: messages.QlProgram,
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
): Promise<string> {
|
||||
|
||||
if (!dbItem?.contents?.dbSchemeUri) {
|
||||
throw new Error('Database is invalid, and cannot be upgraded.');
|
||||
}
|
||||
|
||||
// When packaging is used, dependencies may exist outside of the workspace and they are always on the resolved search path.
|
||||
// When packaging is not used, all dependencies are in the workspace.
|
||||
const upgradesPath = (await qs.cliServer.cliConstraints.supportsPackaging())
|
||||
? qlProgram.libraryPath
|
||||
: getOnDiskWorkspaceFolders();
|
||||
|
||||
const { scripts, matchesTarget } = await qs.cliServer.resolveUpgrades(
|
||||
dbItem.contents.dbSchemeUri.fsPath,
|
||||
upgradesPath,
|
||||
true,
|
||||
query.queryDbscheme
|
||||
);
|
||||
|
||||
if (!matchesTarget) {
|
||||
reportNoUpgradePath(qlProgram, query);
|
||||
}
|
||||
const result = await compileDatabaseUpgradeSequence(qs, dbItem, scripts, upgradeTemp, progress, token);
|
||||
if (result.compiledUpgrade === undefined) {
|
||||
const error = result.error || '[no error message available]';
|
||||
throw new Error(error);
|
||||
}
|
||||
// We can upgrade to the actual target
|
||||
qlProgram.dbschemePath = query.queryDbscheme;
|
||||
// We are new enough that we will always support single file upgrades.
|
||||
return result.compiledUpgrade;
|
||||
}
|
||||
|
||||
|
||||
|
||||
export async function compileAndRunQueryAgainstDatabase(
|
||||
cliServer: cli.CodeQLCliServer,
|
||||
qs: qsClient.QueryServerClient,
|
||||
dbItem: DatabaseItem,
|
||||
initialInfo: InitialQueryInfo,
|
||||
queryStorageDir: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
templates?: Record<string, string>,
|
||||
queryInfo?: LocalQueryInfo, // May be omitted for queries not initiated by the user. If omitted we won't create a structured log for the query.
|
||||
): Promise<QueryWithResults> {
|
||||
if (!dbItem.contents || !dbItem.contents.dbSchemeUri) {
|
||||
throw new Error(`Database ${dbItem.databaseUri} does not have a CodeQL database scheme.`);
|
||||
}
|
||||
|
||||
// Get the workspace folder paths.
|
||||
const diskWorkspaceFolders = getOnDiskWorkspaceFolders();
|
||||
// Figure out the library path for the query.
|
||||
const packConfig = await cliServer.resolveLibraryPath(diskWorkspaceFolders, initialInfo.queryPath);
|
||||
|
||||
if (!packConfig.dbscheme) {
|
||||
throw new Error('Could not find a database scheme for this query. Please check that you have a valid qlpack.yml file for this query, which refers to a database scheme either in the `dbscheme` field or through one of its dependencies.');
|
||||
}
|
||||
|
||||
// Check whether the query has an entirely different schema from the
|
||||
// database. (Queries that merely need the database to be upgraded
|
||||
// won't trigger this check)
|
||||
// This test will produce confusing results if we ever change the name of the database schema files.
|
||||
const querySchemaName = path.basename(packConfig.dbscheme);
|
||||
const dbSchemaName = path.basename(dbItem.contents.dbSchemeUri.fsPath);
|
||||
if (querySchemaName != dbSchemaName) {
|
||||
void logger.log(`Query schema was ${querySchemaName}, but database schema was ${dbSchemaName}.`);
|
||||
throw new Error(`The query ${path.basename(initialInfo.queryPath)} cannot be run against the selected database (${dbItem.name}): their target languages are different. Please select a different database and try again.`);
|
||||
}
|
||||
|
||||
const qlProgram: messages.QlProgram = {
|
||||
// The project of the current document determines which library path
|
||||
// we use. The `libraryPath` field in this server message is relative
|
||||
// to the workspace root, not to the project root.
|
||||
libraryPath: packConfig.libraryPath,
|
||||
// Since we are compiling and running a query against a database,
|
||||
// we use the database's DB scheme here instead of the DB scheme
|
||||
// from the current document's project.
|
||||
dbschemePath: dbItem.contents.dbSchemeUri.fsPath,
|
||||
queryPath: initialInfo.queryPath
|
||||
};
|
||||
|
||||
// Read the query metadata if possible, to use in the UI.
|
||||
const metadata = await tryGetQueryMetadata(cliServer, qlProgram.queryPath);
|
||||
|
||||
let availableMlModels: cli.MlModelInfo[] = [];
|
||||
if (!await cliServer.cliConstraints.supportsResolveMlModels()) {
|
||||
void logger.log('Resolving ML models is unsupported by this version of the CLI. Running the query without any ML models.');
|
||||
} else {
|
||||
try {
|
||||
availableMlModels = (await cliServer.resolveMlModels(diskWorkspaceFolders, initialInfo.queryPath)).models;
|
||||
if (availableMlModels.length) {
|
||||
void logger.log(`Found available ML models at the following paths: ${availableMlModels.map(x => `'${x.path}'`).join(', ')}.`);
|
||||
} else {
|
||||
void logger.log('Did not find any available ML models.');
|
||||
}
|
||||
} catch (e) {
|
||||
const message = `Couldn't resolve available ML models for ${qlProgram.queryPath}. Running the ` +
|
||||
`query without any ML models: ${e}.`;
|
||||
void showAndLogErrorMessage(message);
|
||||
}
|
||||
}
|
||||
|
||||
const hasMetadataFile = (await dbItem.hasMetadataFile());
|
||||
const query = new QueryInProgress(
|
||||
path.join(queryStorageDir, initialInfo.id),
|
||||
dbItem.databaseUri.fsPath,
|
||||
hasMetadataFile,
|
||||
packConfig.dbscheme,
|
||||
initialInfo.quickEvalPosition,
|
||||
metadata,
|
||||
templates
|
||||
);
|
||||
await query.queryEvalInfo.createTimestampFile();
|
||||
|
||||
let upgradeDir: tmp.DirectoryResult | undefined;
|
||||
try {
|
||||
let upgradeQlo;
|
||||
if (await cliServer.cliConstraints.supportsNonDestructiveUpgrades()) {
|
||||
upgradeDir = await tmp.dir({ dir: upgradesTmpDir, unsafeCleanup: true });
|
||||
upgradeQlo = await compileNonDestructiveUpgrade(qs, upgradeDir, query, qlProgram, dbItem, progress, token);
|
||||
} else {
|
||||
await checkDbschemeCompatibility(cliServer, qs, query, qlProgram, dbItem, progress, token);
|
||||
}
|
||||
let errors;
|
||||
try {
|
||||
errors = await query.compile(qs, qlProgram, progress, token);
|
||||
} catch (e) {
|
||||
if (e instanceof ResponseError && e.code == ErrorCodes.RequestCancelled) {
|
||||
return createSyntheticResult(query, 'Query cancelled');
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length === 0) {
|
||||
const result = await query.run(qs, upgradeQlo, availableMlModels, dbItem, progress, token, queryInfo);
|
||||
if (result.resultType !== messages.QueryResultType.SUCCESS) {
|
||||
const message = result.message || 'Failed to run query';
|
||||
void logger.log(message);
|
||||
void showAndLogErrorMessage(message);
|
||||
}
|
||||
const message = formatLegacyMessage(result);
|
||||
|
||||
return {
|
||||
query: query.queryEvalInfo,
|
||||
message,
|
||||
result,
|
||||
successful: result.resultType == messages.QueryResultType.SUCCESS,
|
||||
logFileLocation: result.logFileLocation,
|
||||
dispose: () => {
|
||||
qs.logger.removeAdditionalLogLocation(result.logFileLocation);
|
||||
}
|
||||
};
|
||||
} else {
|
||||
// Error dialogs are limited in size and scrollability,
|
||||
// so we include a general description of the problem,
|
||||
// and direct the user to the output window for the detailed compilation messages.
|
||||
// However we don't show quick eval errors there so we need to display them anyway.
|
||||
void qs.logger.log(
|
||||
`Failed to compile query ${initialInfo.queryPath} against database scheme ${qlProgram.dbschemePath}:`,
|
||||
{ additionalLogLocation: query.queryEvalInfo.logPath }
|
||||
);
|
||||
|
||||
const formattedMessages: string[] = [];
|
||||
|
||||
for (const error of errors) {
|
||||
const message = error.message || '[no error message available]';
|
||||
const formatted = `ERROR: ${message} (${error.position.fileName}:${error.position.line}:${error.position.column}:${error.position.endLine}:${error.position.endColumn})`;
|
||||
formattedMessages.push(formatted);
|
||||
void qs.logger.log(formatted, { additionalLogLocation: query.queryEvalInfo.logPath });
|
||||
}
|
||||
if (initialInfo.isQuickEval && formattedMessages.length <= 2) {
|
||||
// If there are more than 2 error messages, they will not be displayed well in a popup
|
||||
// and will be trimmed by the function displaying the error popup. Accordingly, we only
|
||||
// try to show the errors if there are 2 or less, otherwise we direct the user to the log.
|
||||
void showAndLogErrorMessage('Quick evaluation compilation failed: ' + formattedMessages.join('\n'));
|
||||
} else {
|
||||
void showAndLogErrorMessage((initialInfo.isQuickEval ? 'Quick evaluation' : 'Query') + compilationFailedErrorTail);
|
||||
}
|
||||
return createSyntheticResult(query, 'Query had compilation errors');
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
await upgradeDir?.cleanup();
|
||||
} catch (e) {
|
||||
void qs.logger.log(
|
||||
`Could not clean up the upgrades dir. Reason: ${getErrorMessage(e)}`,
|
||||
{ additionalLogLocation: query.queryEvalInfo.logPath }
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const compilationFailedErrorTail = ' compilation failed. Please make sure there are no errors in the query, the database is up to date,' +
|
||||
' and the query and database use the same target language. For more details on the error, go to View > Output,' +
|
||||
' and choose CodeQL Query Server from the dropdown.';
|
||||
|
||||
export function formatLegacyMessage(result: messages.EvaluationResult) {
|
||||
switch (result.resultType) {
|
||||
case messages.QueryResultType.CANCELLATION:
|
||||
return `cancelled after ${Math.round(result.evaluationTime / 1000)} seconds`;
|
||||
case messages.QueryResultType.OOM:
|
||||
return 'out of memory';
|
||||
case messages.QueryResultType.SUCCESS:
|
||||
return `finished in ${Math.round(result.evaluationTime / 1000)} seconds`;
|
||||
case messages.QueryResultType.TIMEOUT:
|
||||
return `timed out after ${Math.round(result.evaluationTime / 1000)} seconds`;
|
||||
case messages.QueryResultType.OTHER_ERROR:
|
||||
default:
|
||||
return result.message ? `failed: ${result.message}` : 'failed';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a synthetic result for a query that failed to compile.
|
||||
*/
|
||||
function createSyntheticResult(
|
||||
query: QueryInProgress,
|
||||
message: string,
|
||||
): QueryWithResults {
|
||||
return {
|
||||
query: query.queryEvalInfo,
|
||||
message,
|
||||
result: {
|
||||
evaluationTime: 0,
|
||||
queryId: 0,
|
||||
resultType: messages.QueryResultType.OTHER_ERROR,
|
||||
message,
|
||||
runId: 0,
|
||||
},
|
||||
successful: false,
|
||||
dispose: () => { /**/ },
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function createSimpleTemplates(templates: Record<string, string> | undefined): messages.TemplateDefinitions | undefined {
|
||||
if (!templates) {
|
||||
return undefined;
|
||||
}
|
||||
const result: messages.TemplateDefinitions = {};
|
||||
for (const key of Object.keys(templates)) {
|
||||
result[key] = {
|
||||
values: {
|
||||
tuples: [[{ stringValue: templates[key] }]]
|
||||
}
|
||||
};
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@@ -1,14 +1,12 @@
|
||||
import * as vscode from 'vscode';
|
||||
import { getOnDiskWorkspaceFolders, showAndLogErrorMessage } from './helpers';
|
||||
import { ProgressCallback, UserCancellationException } from './commandRunner';
|
||||
import { logger } from './logging';
|
||||
import * as messages from './pure/messages';
|
||||
import { getOnDiskWorkspaceFolders, showAndLogErrorMessage, tmpDir } from '../helpers';
|
||||
import { ProgressCallback, UserCancellationException } from '../commandRunner';
|
||||
import { logger } from '../logging';
|
||||
import * as messages from '../pure/legacy-messages';
|
||||
import * as qsClient from './queryserver-client';
|
||||
import { upgradesTmpDir } from './run-queries';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import * as path from 'path';
|
||||
import * as semver from 'semver';
|
||||
import { DatabaseItem } from './databases';
|
||||
import { DatabaseItem } from '../databases';
|
||||
|
||||
/**
|
||||
* Maximum number of lines to include from database upgrade message,
|
||||
@@ -17,17 +15,6 @@ import { DatabaseItem } from './databases';
|
||||
*/
|
||||
const MAX_UPGRADE_MESSAGE_LINES = 10;
|
||||
|
||||
/**
|
||||
* Check that we support non-destructive upgrades.
|
||||
*
|
||||
* This requires 3 features. The ability to compile an upgrade sequence; The ability to
|
||||
* run a non-destructive upgrades as a query; the ability to specify a target when
|
||||
* resolving upgrades. We check for a version of codeql that has all three features.
|
||||
*/
|
||||
export async function hasNondestructiveUpgradeCapabilities(qs: qsClient.QueryServerClient): Promise<boolean> {
|
||||
return semver.gte(await qs.cliServer.getVersion(), '2.4.2');
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Compile a database upgrade sequence.
|
||||
@@ -35,16 +22,16 @@ export async function hasNondestructiveUpgradeCapabilities(qs: qsClient.QuerySer
|
||||
*/
|
||||
export async function compileDatabaseUpgradeSequence(
|
||||
qs: qsClient.QueryServerClient,
|
||||
db: DatabaseItem,
|
||||
dbItem: DatabaseItem,
|
||||
resolvedSequence: string[],
|
||||
currentUpgradeTmp: tmp.DirectoryResult,
|
||||
progress: ProgressCallback,
|
||||
token: vscode.CancellationToken
|
||||
): Promise<messages.CompileUpgradeSequenceResult> {
|
||||
if (db.contents === undefined || db.contents.dbSchemeUri === undefined) {
|
||||
if (dbItem.contents === undefined || dbItem.contents.dbSchemeUri === undefined) {
|
||||
throw new Error('Database is invalid, and cannot be upgraded.');
|
||||
}
|
||||
if (!await hasNondestructiveUpgradeCapabilities(qs)) {
|
||||
if (!await qs.cliServer.cliConstraints.supportsNonDestructiveUpgrades()) {
|
||||
throw new Error('The version of codeql is too old to run non-destructive upgrades.');
|
||||
}
|
||||
// If possible just compile the upgrade sequence
|
||||
@@ -56,14 +43,14 @@ export async function compileDatabaseUpgradeSequence(
|
||||
|
||||
async function compileDatabaseUpgrade(
|
||||
qs: qsClient.QueryServerClient,
|
||||
db: DatabaseItem,
|
||||
dbItem: DatabaseItem,
|
||||
targetDbScheme: string,
|
||||
resolvedSequence: string[],
|
||||
currentUpgradeTmp: tmp.DirectoryResult,
|
||||
progress: ProgressCallback,
|
||||
token: vscode.CancellationToken
|
||||
): Promise<messages.CompileUpgradeResult> {
|
||||
if (!db.contents?.dbSchemeUri) {
|
||||
if (!dbItem.contents?.dbSchemeUri) {
|
||||
throw new Error('Database is invalid, and cannot be upgraded.');
|
||||
}
|
||||
// We have the upgrades we want but compileUpgrade
|
||||
@@ -78,7 +65,7 @@ async function compileDatabaseUpgrade(
|
||||
});
|
||||
return qs.sendRequest(messages.compileUpgrade, {
|
||||
upgrade: {
|
||||
fromDbscheme: db.contents.dbSchemeUri.fsPath,
|
||||
fromDbscheme: dbItem.contents.dbSchemeUri.fsPath,
|
||||
toDbscheme: targetDbScheme,
|
||||
additionalUpgrades: Array.from(uniqueParentDirs)
|
||||
},
|
||||
@@ -159,18 +146,18 @@ function getUpgradeDescriptions(compiled: messages.CompiledUpgrades): messages.U
|
||||
*/
|
||||
export async function upgradeDatabaseExplicit(
|
||||
qs: qsClient.QueryServerClient,
|
||||
db: DatabaseItem,
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: vscode.CancellationToken,
|
||||
): Promise<messages.RunUpgradeResult | undefined> {
|
||||
|
||||
const searchPath: string[] = getOnDiskWorkspaceFolders();
|
||||
|
||||
if (!db?.contents?.dbSchemeUri) {
|
||||
if (!dbItem?.contents?.dbSchemeUri) {
|
||||
throw new Error('Database is invalid, and cannot be upgraded.');
|
||||
}
|
||||
const upgradeInfo = await qs.cliServer.resolveUpgrades(
|
||||
db.contents.dbSchemeUri.fsPath,
|
||||
dbItem.contents.dbSchemeUri.fsPath,
|
||||
searchPath,
|
||||
false
|
||||
);
|
||||
@@ -180,11 +167,11 @@ export async function upgradeDatabaseExplicit(
|
||||
if (finalDbscheme === undefined) {
|
||||
throw new Error('Could not determine target dbscheme to upgrade to.');
|
||||
}
|
||||
const currentUpgradeTmp = await tmp.dir({ dir: upgradesTmpDir.name, prefix: 'upgrade_', keep: false, unsafeCleanup: true });
|
||||
const currentUpgradeTmp = await tmp.dir({ dir: tmpDir.name, prefix: 'upgrade_', keep: false, unsafeCleanup: true });
|
||||
try {
|
||||
let compileUpgradeResult: messages.CompileUpgradeResult;
|
||||
try {
|
||||
compileUpgradeResult = await compileDatabaseUpgrade(qs, db, finalDbscheme, scripts, currentUpgradeTmp, progress, token);
|
||||
compileUpgradeResult = await compileDatabaseUpgrade(qs, dbItem, finalDbscheme, scripts, currentUpgradeTmp, progress, token);
|
||||
}
|
||||
catch (e) {
|
||||
void showAndLogErrorMessage(`Compilation of database upgrades failed: ${e}`);
|
||||
@@ -200,13 +187,20 @@ export async function upgradeDatabaseExplicit(
|
||||
return;
|
||||
}
|
||||
|
||||
await checkAndConfirmDatabaseUpgrade(compileUpgradeResult.compiledUpgrades, db, qs.cliServer.quiet);
|
||||
await checkAndConfirmDatabaseUpgrade(compileUpgradeResult.compiledUpgrades, dbItem, qs.cliServer.quiet);
|
||||
|
||||
try {
|
||||
void qs.logger.log('Running the following database upgrade:');
|
||||
|
||||
getUpgradeDescriptions(compileUpgradeResult.compiledUpgrades).map(s => s.description).join('\n');
|
||||
return await runDatabaseUpgrade(qs, db, compileUpgradeResult.compiledUpgrades, progress, token);
|
||||
const result = await runDatabaseUpgrade(qs, dbItem, compileUpgradeResult.compiledUpgrades, progress, token);
|
||||
|
||||
// TODO Can remove the next lines when https://github.com/github/codeql-team/issues/1241 is fixed
|
||||
// restart the query server to avoid a bug in the CLI where the upgrade is applied, but the old dbscheme
|
||||
// is still cached in memory.
|
||||
|
||||
await qs.restartQueryServer(progress, token);
|
||||
return result;
|
||||
}
|
||||
catch (e) {
|
||||
void showAndLogErrorMessage(`Database upgrade failed: ${e}`);
|
||||
460
extensions/ql-vscode/src/log-insights/join-order.ts
Normal file
460
extensions/ql-vscode/src/log-insights/join-order.ts
Normal file
@@ -0,0 +1,460 @@
|
||||
import * as I from 'immutable';
|
||||
import { EvaluationLogProblemReporter, EvaluationLogScanner, EvaluationLogScannerProvider } from './log-scanner';
|
||||
import { InLayer, ComputeRecursive, SummaryEvent, PipelineRun, ComputeSimple } from './log-summary';
|
||||
|
||||
const DEFAULT_WARNING_THRESHOLD = 50;
|
||||
|
||||
/**
|
||||
* Like `max`, but returns 0 if no meaningful maximum can be computed.
|
||||
*/
|
||||
function safeMax(it?: Iterable<number>) {
|
||||
const m = Math.max(...(it || []));
|
||||
return Number.isFinite(m) ? m : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute a key for the maps that that is sent to report generation.
|
||||
* Should only be used on events that are known to define queryCausingWork.
|
||||
*/
|
||||
function makeKey(
|
||||
queryCausingWork: string | undefined,
|
||||
predicate: string,
|
||||
suffix = ''
|
||||
): string {
|
||||
if (queryCausingWork === undefined) {
|
||||
throw new Error(
|
||||
'queryCausingWork was not defined on an event we expected it to be defined for!'
|
||||
);
|
||||
}
|
||||
return `${queryCausingWork}:${predicate}${suffix ? ' ' + suffix : ''}`;
|
||||
}
|
||||
|
||||
const DEPENDENT_PREDICATES_REGEXP = (() => {
|
||||
const regexps = [
|
||||
// SCAN id
|
||||
String.raw`SCAN\s+([0-9a-zA-Z:#_]+)\s`,
|
||||
// JOIN id WITH id
|
||||
String.raw`JOIN\s+([0-9a-zA-Z:#_]+)\s+WITH\s+([0-9a-zA-Z:#_]+)\s`,
|
||||
// AGGREGATE id, id
|
||||
String.raw`AGGREGATE\s+([0-9a-zA-Z:#_]+)\s*,\s+([0-9a-zA-Z:#_]+)`,
|
||||
// id AND NOT id
|
||||
String.raw`([0-9a-zA-Z:#_]+)\s+AND\s+NOT\s+([0-9a-zA-Z:#_]+)`,
|
||||
// INVOKE HIGHER-ORDER RELATION rel ON <id, ..., id>
|
||||
String.raw`INVOKE\s+HIGHER-ORDER\s+RELATION\s[^\s]+\sON\s+<([0-9a-zA-Z:#_<>]+)((?:,[0-9a-zA-Z:#_<>]+)*)>`,
|
||||
// SELECT id
|
||||
String.raw`SELECT\s+([0-9a-zA-Z:#_]+)`
|
||||
];
|
||||
return new RegExp(
|
||||
`${String.raw`\{[0-9]+\}\s+[0-9a-zA-Z]+\s=\s(?:` + regexps.join('|')})`
|
||||
);
|
||||
})();
|
||||
|
||||
function getDependentPredicates(operations: string[]): I.List<string> {
|
||||
return I.List(operations).flatMap(operation => {
|
||||
const matches = DEPENDENT_PREDICATES_REGEXP.exec(operation.trim());
|
||||
if (matches !== null) {
|
||||
return I.List(matches)
|
||||
.rest() // Skip the first group as it's just the entire string
|
||||
.filter(x => !!x && !x.match('r[0-9]+|PRIMITIVE')) // Only keep the references to predicates.
|
||||
.flatMap(x => x.split(',')) // Group 2 in the INVOKE HIGHER_ORDER RELATION case is a comma-separated list of identifiers.
|
||||
.filter(x => !!x); // Remove empty strings
|
||||
} else {
|
||||
return I.List();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function getMainHash(event: InLayer | ComputeRecursive): string {
|
||||
switch (event.evaluationStrategy) {
|
||||
case 'IN_LAYER':
|
||||
return event.mainHash;
|
||||
case 'COMPUTE_RECURSIVE':
|
||||
return event.raHash;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sum arrays a and b element-wise. The shorter array is padded with 0s if the arrays are not the same length.
|
||||
*/
|
||||
function pointwiseSum(a: Int32Array, b: Int32Array, problemReporter: EvaluationLogProblemReporter): Int32Array {
|
||||
function reportIfInconsistent(ai: number, bi: number) {
|
||||
if (ai === -1 && bi !== -1) {
|
||||
problemReporter.log(
|
||||
`Operation was not evaluated in the first pipeline, but it was evaluated in the accumulated pipeline (with tuple count ${bi}).`
|
||||
);
|
||||
}
|
||||
if (ai !== -1 && bi === -1) {
|
||||
problemReporter.log(
|
||||
`Operation was evaluated in the first pipeline (with tuple count ${ai}), but it was not evaluated in the accumulated pipeline.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const length = Math.max(a.length, b.length);
|
||||
const result = new Int32Array(length);
|
||||
for (let i = 0; i < length; i++) {
|
||||
const ai = a[i] || 0;
|
||||
const bi = b[i] || 0;
|
||||
// -1 is used to represent the absence of a tuple count for a line in the pretty-printed RA (e.g. an empty line), so we ignore those.
|
||||
if (i < a.length && i < b.length && (ai === -1 || bi === -1)) {
|
||||
result[i] = -1;
|
||||
reportIfInconsistent(ai, bi);
|
||||
} else {
|
||||
result[i] = ai + bi;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function pushValue<K, V>(m: Map<K, V[]>, k: K, v: V) {
|
||||
if (!m.has(k)) {
|
||||
m.set(k, []);
|
||||
}
|
||||
m.get(k)!.push(v);
|
||||
return m;
|
||||
}
|
||||
|
||||
function computeJoinOrderBadness(
|
||||
maxTupleCount: number,
|
||||
maxDependentPredicateSize: number,
|
||||
resultSize: number
|
||||
): number {
|
||||
return maxTupleCount / Math.max(maxDependentPredicateSize, resultSize);
|
||||
}
|
||||
|
||||
/**
|
||||
* A bucket contains the pointwise sum of the tuple counts, result sizes and dependent predicate sizes
|
||||
* For each (predicate, order) in an SCC, we will compute a bucket.
|
||||
*/
|
||||
interface Bucket {
|
||||
tupleCounts: Int32Array;
|
||||
resultSize: number;
|
||||
dependentPredicateSizes: I.Map<string, number>;
|
||||
}
|
||||
|
||||
class JoinOrderScanner implements EvaluationLogScanner {
|
||||
// Map a predicate hash to its result size
|
||||
private readonly predicateSizes = new Map<string, number>();
|
||||
private readonly layerEvents = new Map<string, (ComputeRecursive | InLayer)[]>();
|
||||
// Map a key of the form 'query-with-demand : predicate name' to its badness input.
|
||||
private readonly maxTupleCountMap = new Map<string, number[]>();
|
||||
private readonly resultSizeMap = new Map<string, number[]>();
|
||||
private readonly maxDependentPredicateSizeMap = new Map<string, number[]>();
|
||||
private readonly joinOrderMetricMap = new Map<string, number>();
|
||||
|
||||
constructor(
|
||||
private readonly problemReporter: EvaluationLogProblemReporter,
|
||||
private readonly warningThreshold: number) {
|
||||
}
|
||||
|
||||
public onEvent(event: SummaryEvent): void {
|
||||
if (
|
||||
event.completionType !== undefined &&
|
||||
event.completionType !== 'SUCCESS'
|
||||
) {
|
||||
return; // Skip any evaluation that wasn't successful
|
||||
}
|
||||
|
||||
this.recordPredicateSizes(event);
|
||||
this.computeBadnessMetric(event);
|
||||
}
|
||||
|
||||
public onDone(): void {
|
||||
void this;
|
||||
}
|
||||
|
||||
private recordPredicateSizes(event: SummaryEvent): void {
|
||||
switch (event.evaluationStrategy) {
|
||||
case 'EXTENSIONAL':
|
||||
case 'COMPUTED_EXTENSIONAL':
|
||||
case 'COMPUTE_SIMPLE':
|
||||
case 'CACHACA':
|
||||
case 'CACHE_HIT': {
|
||||
this.predicateSizes.set(event.raHash, event.resultSize);
|
||||
break;
|
||||
}
|
||||
case 'SENTINEL_EMPTY': {
|
||||
this.predicateSizes.set(event.raHash, 0);
|
||||
break;
|
||||
}
|
||||
case 'COMPUTE_RECURSIVE':
|
||||
case 'IN_LAYER': {
|
||||
this.predicateSizes.set(event.raHash, event.resultSize);
|
||||
// layerEvents are indexed by the mainHash.
|
||||
const hash = getMainHash(event);
|
||||
if (!this.layerEvents.has(hash)) {
|
||||
this.layerEvents.set(hash, []);
|
||||
}
|
||||
this.layerEvents.get(hash)!.push(event);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private reportProblemIfNecessary(event: SummaryEvent, iteration: number, metric: number): void {
|
||||
if (metric >= this.warningThreshold) {
|
||||
this.problemReporter.reportProblem(event.predicateName, event.raHash, iteration,
|
||||
`Relation '${event.predicateName}' has an inefficient join order. Its join order metric is ${metric.toFixed(2)}, which is larger than the threshold of ${this.warningThreshold.toFixed(2)}.`);
|
||||
}
|
||||
}
|
||||
|
||||
private computeBadnessMetric(event: SummaryEvent): void {
|
||||
if (
|
||||
event.completionType !== undefined &&
|
||||
event.completionType !== 'SUCCESS'
|
||||
) {
|
||||
return; // Skip any evaluation that wasn't successful
|
||||
}
|
||||
switch (event.evaluationStrategy) {
|
||||
case 'COMPUTE_SIMPLE': {
|
||||
if (!event.pipelineRuns) {
|
||||
// skip if the optional pipelineRuns field is not present.
|
||||
break;
|
||||
}
|
||||
// Compute the badness metric for a non-recursive predicate. The metric in this case is defined as:
|
||||
// badness = (max tuple count in the pipeline) / (largest predicate this pipeline depends on)
|
||||
const key = makeKey(event.queryCausingWork, event.predicateName);
|
||||
const resultSize = event.resultSize;
|
||||
|
||||
// There is only one entry in `pipelineRuns` if it's a non-recursive predicate.
|
||||
const { maxTupleCount, maxDependentPredicateSize } =
|
||||
this.badnessInputsForNonRecursiveDelta(event.pipelineRuns[0], event);
|
||||
|
||||
if (maxDependentPredicateSize > 0) {
|
||||
pushValue(this.maxTupleCountMap, key, maxTupleCount);
|
||||
pushValue(this.resultSizeMap, key, resultSize);
|
||||
pushValue(
|
||||
this.maxDependentPredicateSizeMap,
|
||||
key,
|
||||
maxDependentPredicateSize
|
||||
);
|
||||
const metric = computeJoinOrderBadness(maxTupleCount, maxDependentPredicateSize, resultSize!);
|
||||
this.joinOrderMetricMap.set(key, metric);
|
||||
this.reportProblemIfNecessary(event, 0, metric);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case 'COMPUTE_RECURSIVE': {
|
||||
// Compute the badness metric for a recursive predicate for each ordering.
|
||||
const sccMetricInput = this.badnessInputsForRecursiveDelta(event);
|
||||
// Loop through each predicate in the SCC
|
||||
sccMetricInput.forEach((buckets, predicate) => {
|
||||
// Loop through each ordering of the predicate
|
||||
buckets.forEach((bucket, raReference) => {
|
||||
// Format the key as demanding-query:name (ordering)
|
||||
const key = makeKey(
|
||||
event.queryCausingWork,
|
||||
predicate,
|
||||
`(${raReference})`
|
||||
);
|
||||
const maxTupleCount = Math.max(...bucket.tupleCounts);
|
||||
const resultSize = bucket.resultSize;
|
||||
const maxDependentPredicateSize = Math.max(
|
||||
...bucket.dependentPredicateSizes.values()
|
||||
);
|
||||
|
||||
if (maxDependentPredicateSize > 0) {
|
||||
pushValue(this.maxTupleCountMap, key, maxTupleCount);
|
||||
pushValue(this.resultSizeMap, key, resultSize);
|
||||
pushValue(
|
||||
this.maxDependentPredicateSizeMap,
|
||||
key,
|
||||
maxDependentPredicateSize
|
||||
);
|
||||
const metric = computeJoinOrderBadness(maxTupleCount, maxDependentPredicateSize, resultSize);
|
||||
const oldMetric = this.joinOrderMetricMap.get(key);
|
||||
if ((oldMetric === undefined) || (metric > oldMetric)) {
|
||||
this.joinOrderMetricMap.set(key, metric);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate through an SCC with main node `event`.
|
||||
*/
|
||||
private iterateSCC(
|
||||
event: ComputeRecursive,
|
||||
func: (
|
||||
inLayerEvent: ComputeRecursive | InLayer,
|
||||
run: PipelineRun,
|
||||
iteration: number
|
||||
) => void
|
||||
): void {
|
||||
const sccEvents = this.layerEvents.get(event.raHash)!;
|
||||
const nextPipeline: number[] = new Array(sccEvents.length).fill(0);
|
||||
|
||||
const maxIteration = Math.max(
|
||||
...sccEvents.map(e => e.predicateIterationMillis.length)
|
||||
);
|
||||
|
||||
for (let iteration = 0; iteration < maxIteration; ++iteration) {
|
||||
// Loop through each predicate in this iteration
|
||||
for (let predicate = 0; predicate < sccEvents.length; ++predicate) {
|
||||
const inLayerEvent = sccEvents[predicate];
|
||||
const iterationTime =
|
||||
inLayerEvent.predicateIterationMillis.length <= iteration
|
||||
? -1
|
||||
: inLayerEvent.predicateIterationMillis[iteration];
|
||||
if (iterationTime != -1) {
|
||||
const run: PipelineRun =
|
||||
inLayerEvent.pipelineRuns[nextPipeline[predicate]++];
|
||||
func(inLayerEvent, run, iteration);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the maximum tuple count and maximum dependent predicate size for a non-recursive pipeline
|
||||
*/
|
||||
private badnessInputsForNonRecursiveDelta(
|
||||
pipelineRun: PipelineRun,
|
||||
event: ComputeSimple
|
||||
): { maxTupleCount: number; maxDependentPredicateSize: number } {
|
||||
const dependentPredicateSizes = Object.values(event.dependencies).map(hash =>
|
||||
this.predicateSizes.get(hash) ?? 0 // Should always be present, but zero is a safe default.
|
||||
);
|
||||
const maxDependentPredicateSize = safeMax(dependentPredicateSizes);
|
||||
return {
|
||||
maxTupleCount: safeMax(pipelineRun.counts),
|
||||
maxDependentPredicateSize: maxDependentPredicateSize
|
||||
};
|
||||
}
|
||||
|
||||
private prevDeltaSizes(event: ComputeRecursive, predicate: string, i: number) {
|
||||
// If an iteration isn't present in the map it means it was skipped because the optimizer
|
||||
// inferred that it was empty. So its size is 0.
|
||||
return this.curDeltaSizes(event, predicate, i - 1);
|
||||
}
|
||||
|
||||
private curDeltaSizes(event: ComputeRecursive, predicate: string, i: number) {
|
||||
// If an iteration isn't present in the map it means it was skipped because the optimizer
|
||||
// inferred that it was empty. So its size is 0.
|
||||
return (
|
||||
this.layerEvents.get(event.raHash)?.find(x => x.predicateName === predicate)?.deltaSizes[i] ?? 0
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the metric dependent predicate sizes and the result size for a predicate in an SCC.
|
||||
*/
|
||||
private badnessInputsForLayer(
|
||||
event: ComputeRecursive,
|
||||
inLayerEvent: InLayer | ComputeRecursive,
|
||||
raReference: string,
|
||||
iteration: number
|
||||
) {
|
||||
const dependentPredicates = getDependentPredicates(
|
||||
inLayerEvent.ra[raReference]
|
||||
);
|
||||
let dependentPredicateSizes: I.Map<string, number>;
|
||||
// We treat the base case as a non-recursive pipeline. In that case, the dependent predicates are
|
||||
// the dependencies of the base case and the cur_deltas.
|
||||
if (raReference === 'base') {
|
||||
dependentPredicateSizes = I.Map(
|
||||
dependentPredicates.map((pred): [string, number] => {
|
||||
// A base case cannot contain a `prev_delta`, but it can contain a `cur_delta`.
|
||||
let size = 0;
|
||||
if (pred.endsWith('#cur_delta')) {
|
||||
size = this.curDeltaSizes(
|
||||
event,
|
||||
pred.slice(0, -'#cur_delta'.length),
|
||||
iteration
|
||||
);
|
||||
} else {
|
||||
const hash = event.dependencies[pred];
|
||||
size = this.predicateSizes.get(hash)!;
|
||||
}
|
||||
return [pred, size];
|
||||
})
|
||||
);
|
||||
} else {
|
||||
// It's a non-base case in a recursive pipeline. In that case, the dependent predicates are
|
||||
// only the prev_deltas.
|
||||
dependentPredicateSizes = I.Map(
|
||||
dependentPredicates
|
||||
.flatMap(pred => {
|
||||
// If it's actually a prev_delta
|
||||
if (pred.endsWith('#prev_delta')) {
|
||||
// Return the predicate without the #prev_delta suffix.
|
||||
return [pred.slice(0, -'#prev_delta'.length)];
|
||||
} else {
|
||||
// Not a recursive delta. Skip it.
|
||||
return [];
|
||||
}
|
||||
})
|
||||
.map((prev): [string, number] => {
|
||||
const size = this.prevDeltaSizes(event, prev, iteration);
|
||||
return [prev, size];
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const deltaSize = inLayerEvent.deltaSizes[iteration];
|
||||
return { dependentPredicateSizes, deltaSize };
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the metric input for all the events in a SCC that starts with main node `event`
|
||||
*/
|
||||
private badnessInputsForRecursiveDelta(event: ComputeRecursive): Map<string, Map<string, Bucket>> {
|
||||
// nameToOrderToBucket : predicate name -> ordering (i.e., standard, order_500000, etc.) -> bucket
|
||||
const nameToOrderToBucket = new Map<string, Map<string, Bucket>>();
|
||||
|
||||
// Iterate through the SCC and compute the metric inputs
|
||||
this.iterateSCC(event, (inLayerEvent, run, iteration) => {
|
||||
const raReference = run.raReference;
|
||||
const predicateName = inLayerEvent.predicateName;
|
||||
if (!nameToOrderToBucket.has(predicateName)) {
|
||||
nameToOrderToBucket.set(predicateName, new Map());
|
||||
}
|
||||
const orderTobucket = nameToOrderToBucket.get(predicateName)!;
|
||||
if (!orderTobucket.has(raReference)) {
|
||||
orderTobucket.set(raReference, {
|
||||
tupleCounts: new Int32Array(0),
|
||||
resultSize: 0,
|
||||
dependentPredicateSizes: I.Map()
|
||||
});
|
||||
}
|
||||
|
||||
const { dependentPredicateSizes, deltaSize } = this.badnessInputsForLayer(
|
||||
event,
|
||||
inLayerEvent,
|
||||
raReference,
|
||||
iteration
|
||||
);
|
||||
|
||||
const bucket = orderTobucket.get(raReference)!;
|
||||
// Pointwise sum the tuple counts
|
||||
const newTupleCounts = pointwiseSum(
|
||||
bucket.tupleCounts,
|
||||
new Int32Array(run.counts),
|
||||
this.problemReporter
|
||||
);
|
||||
const resultSize = bucket.resultSize + deltaSize;
|
||||
// Pointwise sum the deltas.
|
||||
const newDependentPredicateSizes = bucket.dependentPredicateSizes.mergeWith(
|
||||
(oldSize, newSize) => oldSize + newSize,
|
||||
dependentPredicateSizes
|
||||
);
|
||||
orderTobucket.set(raReference, {
|
||||
tupleCounts: newTupleCounts,
|
||||
resultSize: resultSize,
|
||||
dependentPredicateSizes: newDependentPredicateSizes
|
||||
});
|
||||
});
|
||||
return nameToOrderToBucket;
|
||||
}
|
||||
}
|
||||
|
||||
export class JoinOrderScannerProvider implements EvaluationLogScannerProvider {
|
||||
public createScanner(problemReporter: EvaluationLogProblemReporter): EvaluationLogScanner {
|
||||
return new JoinOrderScanner(problemReporter, DEFAULT_WARNING_THRESHOLD);
|
||||
}
|
||||
}
|
||||
23
extensions/ql-vscode/src/log-insights/jsonl-reader.ts
Normal file
23
extensions/ql-vscode/src/log-insights/jsonl-reader.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
/**
|
||||
* Read a file consisting of multiple JSON objects. Each object is separated from the previous one
|
||||
* by a double newline sequence. This is basically a more human-readable form of JSONL.
|
||||
*
|
||||
* The current implementation reads the entire text of the document into memory, but in the future
|
||||
* it will stream the document to improve the performance with large documents.
|
||||
*
|
||||
* @param path The path to the file.
|
||||
* @param handler Callback to be invoked for each top-level JSON object in order.
|
||||
*/
|
||||
export async function readJsonlFile(path: string, handler: (value: any) => Promise<void>): Promise<void> {
|
||||
const logSummary = await fs.readFile(path, 'utf-8');
|
||||
|
||||
// Remove newline delimiters because summary is in .jsonl format.
|
||||
const jsonSummaryObjects: string[] = logSummary.split(/\r?\n\r?\n/g);
|
||||
|
||||
for (const obj of jsonSummaryObjects) {
|
||||
const jsonObj = JSON.parse(obj);
|
||||
await handler(jsonObj);
|
||||
}
|
||||
}
|
||||
109
extensions/ql-vscode/src/log-insights/log-scanner-service.ts
Normal file
109
extensions/ql-vscode/src/log-insights/log-scanner-service.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { Diagnostic, DiagnosticSeverity, languages, Range, Uri } from 'vscode';
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import { QueryHistoryManager } from '../query-history';
|
||||
import { QueryHistoryInfo } from '../query-history-info';
|
||||
import { EvaluationLogProblemReporter, EvaluationLogScannerSet } from './log-scanner';
|
||||
import { PipelineInfo, SummarySymbols } from './summary-parser';
|
||||
import * as fs from 'fs-extra';
|
||||
import { logger } from '../logging';
|
||||
|
||||
/**
|
||||
* Compute the key used to find a predicate in the summary symbols.
|
||||
* @param name The name of the predicate.
|
||||
* @param raHash The RA hash of the predicate.
|
||||
* @returns The key of the predicate, consisting of `name@shortHash`, where `shortHash` is the first
|
||||
* eight characters of `raHash`.
|
||||
*/
|
||||
function predicateSymbolKey(name: string, raHash: string): string {
|
||||
return `${name}@${raHash.substring(0, 8)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementation of `EvaluationLogProblemReporter` that generates `Diagnostic` objects to display
|
||||
* in the VS Code "Problems" view.
|
||||
*/
|
||||
class ProblemReporter implements EvaluationLogProblemReporter {
|
||||
public readonly diagnostics: Diagnostic[] = [];
|
||||
|
||||
constructor(private readonly symbols: SummarySymbols | undefined) {
|
||||
}
|
||||
|
||||
public reportProblem(predicateName: string, raHash: string, iteration: number, message: string): void {
|
||||
const nameWithHash = predicateSymbolKey(predicateName, raHash);
|
||||
const predicateSymbol = this.symbols?.predicates[nameWithHash];
|
||||
let predicateInfo: PipelineInfo | undefined = undefined;
|
||||
if (predicateSymbol !== undefined) {
|
||||
predicateInfo = predicateSymbol.iterations[iteration];
|
||||
}
|
||||
if (predicateInfo !== undefined) {
|
||||
const range = new Range(predicateInfo.raStartLine, 0, predicateInfo.raEndLine + 1, 0);
|
||||
this.diagnostics.push(new Diagnostic(range, message, DiagnosticSeverity.Error));
|
||||
}
|
||||
}
|
||||
|
||||
public log(message: string): void {
|
||||
void logger.log(message);
|
||||
}
|
||||
}
|
||||
|
||||
export class LogScannerService extends DisposableObject {
|
||||
public readonly scanners = new EvaluationLogScannerSet();
|
||||
private readonly diagnosticCollection = this.push(languages.createDiagnosticCollection('ql-eval-log'));
|
||||
private currentItem: QueryHistoryInfo | undefined = undefined;
|
||||
|
||||
constructor(qhm: QueryHistoryManager) {
|
||||
super();
|
||||
|
||||
this.push(qhm.onDidChangeCurrentQueryItem(async (item) => {
|
||||
if (item !== this.currentItem) {
|
||||
this.currentItem = item;
|
||||
await this.scanEvalLog(item);
|
||||
}
|
||||
}));
|
||||
|
||||
this.push(qhm.onDidCompleteQuery(async (item) => {
|
||||
if (item === this.currentItem) {
|
||||
await this.scanEvalLog(item);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan the evaluation log for a query, and report any diagnostics.
|
||||
*
|
||||
* @param query The query whose log is to be scanned.
|
||||
*/
|
||||
public async scanEvalLog(
|
||||
query: QueryHistoryInfo | undefined
|
||||
): Promise<void> {
|
||||
this.diagnosticCollection.clear();
|
||||
|
||||
if ((query?.t !== 'local')
|
||||
|| (query.evalLogSummaryLocation === undefined)
|
||||
|| (query.jsonEvalLogSummaryLocation === undefined)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const diagnostics = await this.scanLog(query.jsonEvalLogSummaryLocation, query.evalLogSummarySymbolsLocation);
|
||||
const uri = Uri.file(query.evalLogSummaryLocation);
|
||||
this.diagnosticCollection.set(uri, diagnostics);
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan the evaluator summary log for problems, using the scanners for all registered providers.
|
||||
* @param jsonSummaryLocation The file path of the JSON summary log.
|
||||
* @param symbolsLocation The file path of the symbols file for the human-readable log summary.
|
||||
* @returns An array of `Diagnostic`s representing the problems found by scanners.
|
||||
*/
|
||||
private async scanLog(jsonSummaryLocation: string, symbolsLocation: string | undefined): Promise<Diagnostic[]> {
|
||||
let symbols: SummarySymbols | undefined = undefined;
|
||||
if (symbolsLocation !== undefined) {
|
||||
symbols = JSON.parse(await fs.readFile(symbolsLocation, { encoding: 'utf-8' }));
|
||||
}
|
||||
const problemReporter = new ProblemReporter(symbols);
|
||||
|
||||
await this.scanners.scanLog(jsonSummaryLocation, problemReporter);
|
||||
|
||||
return problemReporter.diagnostics;
|
||||
}
|
||||
}
|
||||
103
extensions/ql-vscode/src/log-insights/log-scanner.ts
Normal file
103
extensions/ql-vscode/src/log-insights/log-scanner.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import { SummaryEvent } from './log-summary';
|
||||
import { readJsonlFile } from './jsonl-reader';
|
||||
|
||||
/**
|
||||
* Callback interface used to report diagnostics from a log scanner.
|
||||
*/
|
||||
export interface EvaluationLogProblemReporter {
|
||||
/**
|
||||
* Report a potential problem detected in the evaluation log.
|
||||
*
|
||||
* @param predicateName The mangled name of the predicate with the problem.
|
||||
* @param raHash The RA hash of the predicate with the problem.
|
||||
* @param iteration The iteration number with the problem. For a non-recursive predicate, this
|
||||
* must be zero.
|
||||
* @param message The problem message.
|
||||
*/
|
||||
reportProblem(predicateName: string, raHash: string, iteration: number, message: string): void;
|
||||
|
||||
/**
|
||||
* Log a message about a problem in the implementation of the scanner. These will typically be
|
||||
* displayed separate from any problems reported via `reportProblem()`.
|
||||
*/
|
||||
log(message: string): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface implemented by a log scanner. Instances are created via
|
||||
* `EvaluationLogScannerProvider.createScanner()`.
|
||||
*/
|
||||
export interface EvaluationLogScanner {
|
||||
/**
|
||||
* Called for each event in the log summary, in order. The implementation can report problems via
|
||||
* the `EvaluationLogProblemReporter` interface that was supplied to `createScanner()`.
|
||||
* @param event The log summary event.
|
||||
*/
|
||||
onEvent(event: SummaryEvent): void;
|
||||
/**
|
||||
* Called after all events in the log summary have been processed. The implementation can report
|
||||
* problems via the `EvaluationLogProblemReporter` interface that was supplied to
|
||||
* `createScanner()`.
|
||||
*/
|
||||
onDone(): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* A factory for log scanners. When a log is to be scanned, all registered
|
||||
* `EvaluationLogScannerProviders` will be asked to create a new instance of `EvaluationLogScanner`
|
||||
* to do the scanning.
|
||||
*/
|
||||
export interface EvaluationLogScannerProvider {
|
||||
/**
|
||||
* Create a new instance of `EvaluationLogScanner` to scan a single summary log.
|
||||
* @param problemReporter Callback interface for reporting any problems discovered.
|
||||
*/
|
||||
createScanner(problemReporter: EvaluationLogProblemReporter): EvaluationLogScanner;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as VSCode's `Disposable`, but avoids a dependency on VS Code.
|
||||
*/
|
||||
export interface Disposable {
|
||||
dispose(): void;
|
||||
}
|
||||
|
||||
export class EvaluationLogScannerSet {
|
||||
private readonly scannerProviders = new Map<number, EvaluationLogScannerProvider>();
|
||||
private nextScannerProviderId = 0;
|
||||
|
||||
/**
|
||||
* Register a provider that can create instances of `EvaluationLogScanner` to scan evaluation logs
|
||||
* for problems.
|
||||
* @param provider The provider.
|
||||
* @returns A `Disposable` that, when disposed, will unregister the provider.
|
||||
*/
|
||||
public registerLogScannerProvider(provider: EvaluationLogScannerProvider): Disposable {
|
||||
const id = this.nextScannerProviderId;
|
||||
this.nextScannerProviderId++;
|
||||
|
||||
this.scannerProviders.set(id, provider);
|
||||
return {
|
||||
dispose: () => {
|
||||
this.scannerProviders.delete(id);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan the evaluator summary log for problems, using the scanners for all registered providers.
|
||||
* @param jsonSummaryLocation The file path of the JSON summary log.
|
||||
* @param problemReporter Callback interface for reporting any problems discovered.
|
||||
*/
|
||||
public async scanLog(jsonSummaryLocation: string, problemReporter: EvaluationLogProblemReporter): Promise<void> {
|
||||
const scanners = [...this.scannerProviders.values()].map(p => p.createScanner(problemReporter));
|
||||
|
||||
await readJsonlFile(jsonSummaryLocation, async obj => {
|
||||
scanners.forEach(scanner => {
|
||||
scanner.onEvent(obj);
|
||||
});
|
||||
});
|
||||
|
||||
scanners.forEach(scanner => scanner.onDone());
|
||||
}
|
||||
}
|
||||
93
extensions/ql-vscode/src/log-insights/log-summary.ts
Normal file
93
extensions/ql-vscode/src/log-insights/log-summary.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
export interface PipelineRun {
|
||||
raReference: string;
|
||||
counts: number[];
|
||||
duplicationPercentages: number[];
|
||||
}
|
||||
|
||||
export interface Ra {
|
||||
[key: string]: string[];
|
||||
}
|
||||
|
||||
export type EvaluationStrategy =
|
||||
'COMPUTE_SIMPLE' |
|
||||
'COMPUTE_RECURSIVE' |
|
||||
'IN_LAYER' |
|
||||
'COMPUTED_EXTENSIONAL' |
|
||||
'EXTENSIONAL' |
|
||||
'SENTINEL_EMPTY' |
|
||||
'CACHACA' |
|
||||
'CACHE_HIT';
|
||||
|
||||
interface SummaryEventBase {
|
||||
evaluationStrategy: EvaluationStrategy;
|
||||
predicateName: string;
|
||||
raHash: string;
|
||||
appearsAs: { [key: string]: { [key: string]: number[] } };
|
||||
completionType?: string;
|
||||
}
|
||||
|
||||
interface ResultEventBase extends SummaryEventBase {
|
||||
resultSize: number;
|
||||
}
|
||||
|
||||
export interface ComputeSimple extends ResultEventBase {
|
||||
evaluationStrategy: 'COMPUTE_SIMPLE';
|
||||
ra: Ra;
|
||||
pipelineRuns?: [PipelineRun];
|
||||
queryCausingWork?: string;
|
||||
dependencies: { [key: string]: string };
|
||||
}
|
||||
|
||||
export interface ComputeRecursive extends ResultEventBase {
|
||||
evaluationStrategy: 'COMPUTE_RECURSIVE';
|
||||
deltaSizes: number[];
|
||||
ra: Ra;
|
||||
pipelineRuns: PipelineRun[];
|
||||
queryCausingWork?: string;
|
||||
dependencies: { [key: string]: string };
|
||||
predicateIterationMillis: number[];
|
||||
}
|
||||
|
||||
export interface InLayer extends ResultEventBase {
|
||||
evaluationStrategy: 'IN_LAYER';
|
||||
deltaSizes: number[];
|
||||
ra: Ra;
|
||||
pipelineRuns: PipelineRun[];
|
||||
queryCausingWork?: string;
|
||||
mainHash: string;
|
||||
predicateIterationMillis: number[];
|
||||
}
|
||||
|
||||
export interface ComputedExtensional extends ResultEventBase {
|
||||
evaluationStrategy: 'COMPUTED_EXTENSIONAL';
|
||||
queryCausingWork?: string;
|
||||
}
|
||||
|
||||
export interface NonComputedExtensional extends ResultEventBase {
|
||||
evaluationStrategy: 'EXTENSIONAL';
|
||||
queryCausingWork?: string;
|
||||
}
|
||||
|
||||
export interface SentinelEmpty extends SummaryEventBase {
|
||||
evaluationStrategy: 'SENTINEL_EMPTY';
|
||||
sentinelRaHash: string;
|
||||
}
|
||||
|
||||
export interface Cachaca extends ResultEventBase {
|
||||
evaluationStrategy: 'CACHACA';
|
||||
}
|
||||
|
||||
export interface CacheHit extends ResultEventBase {
|
||||
evaluationStrategy: 'CACHE_HIT';
|
||||
}
|
||||
|
||||
export type Extensional = ComputedExtensional | NonComputedExtensional;
|
||||
|
||||
export type SummaryEvent =
|
||||
| ComputeSimple
|
||||
| ComputeRecursive
|
||||
| InLayer
|
||||
| Extensional
|
||||
| SentinelEmpty
|
||||
| Cachaca
|
||||
| CacheHit;
|
||||
@@ -0,0 +1,154 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import { RawSourceMap, SourceMapConsumer } from 'source-map';
|
||||
import { commands, Position, Selection, TextDocument, TextEditor, TextEditorRevealType, TextEditorSelectionChangeEvent, ViewColumn, window, workspace } from 'vscode';
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import { commandRunner } from '../commandRunner';
|
||||
import { logger } from '../logging';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
|
||||
/** A `Position` within a specified file on disk. */
|
||||
interface PositionInFile {
|
||||
filePath: string;
|
||||
position: Position;
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the specified source location in a text editor.
|
||||
* @param position The position (including file path) to show.
|
||||
*/
|
||||
async function showSourceLocation(position: PositionInFile): Promise<void> {
|
||||
const document = await workspace.openTextDocument(position.filePath);
|
||||
const editor = await window.showTextDocument(document, ViewColumn.Active);
|
||||
editor.selection = new Selection(position.position, position.position);
|
||||
editor.revealRange(editor.selection, TextEditorRevealType.InCenterIfOutsideViewport);
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple language support for human-readable evaluator log summaries.
|
||||
*
|
||||
* This class implements the `codeQL.gotoQL` command, which jumps from RA code to the corresponding
|
||||
* QL code that generated it. It also tracks the current selection and active editor to enable and
|
||||
* disable that command based on whether there is a QL mapping for the current selection.
|
||||
*/
|
||||
export class SummaryLanguageSupport extends DisposableObject {
|
||||
/**
|
||||
* The last `TextDocument` (with language `ql-summary`) for which we tried to find a sourcemap, or
|
||||
* `undefined` if we have not seen such a document yet.
|
||||
*/
|
||||
private lastDocument: TextDocument | undefined = undefined;
|
||||
/**
|
||||
* The sourcemap for `lastDocument`, or `undefined` if there was no such sourcemap or document.
|
||||
*/
|
||||
private sourceMap: SourceMapConsumer | undefined = undefined;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
this.push(window.onDidChangeActiveTextEditor(this.handleDidChangeActiveTextEditor));
|
||||
this.push(window.onDidChangeTextEditorSelection(this.handleDidChangeTextEditorSelection));
|
||||
this.push(workspace.onDidCloseTextDocument(this.handleDidCloseTextDocument));
|
||||
|
||||
this.push(commandRunner('codeQL.gotoQL', this.handleGotoQL));
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the location of the QL code that generated the RA at the current selection in the active
|
||||
* editor, or `undefined` if there is no mapping.
|
||||
*/
|
||||
private async getQLSourceLocation(): Promise<PositionInFile | undefined> {
|
||||
const editor = window.activeTextEditor;
|
||||
if (editor === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const document = editor.document;
|
||||
if (document.languageId !== 'ql-summary') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (document.uri.scheme !== 'file') {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (this.lastDocument !== document) {
|
||||
this.clearCache();
|
||||
|
||||
const mapPath = document.uri.fsPath + '.map';
|
||||
|
||||
try {
|
||||
const sourceMapText = await fs.readFile(mapPath, 'utf-8');
|
||||
const rawMap: RawSourceMap = JSON.parse(sourceMapText);
|
||||
this.sourceMap = await new SourceMapConsumer(rawMap);
|
||||
} catch (e: unknown) {
|
||||
// Error reading sourcemap. Pretend there was no sourcemap.
|
||||
void logger.log(`Error reading sourcemap file '${mapPath}': ${getErrorMessage(e)}`);
|
||||
this.sourceMap = undefined;
|
||||
}
|
||||
this.lastDocument = document;
|
||||
}
|
||||
|
||||
if (this.sourceMap === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const qlPosition = this.sourceMap.originalPositionFor({
|
||||
line: editor.selection.start.line + 1,
|
||||
column: editor.selection.start.character,
|
||||
bias: SourceMapConsumer.GREATEST_LOWER_BOUND
|
||||
});
|
||||
|
||||
if ((qlPosition.source === null) || (qlPosition.line === null)) {
|
||||
// No position found.
|
||||
return undefined;
|
||||
}
|
||||
const line = qlPosition.line - 1; // In `source-map`, lines are 1-based...
|
||||
const column = qlPosition.column ?? 0; // ...but columns are 0-based :(
|
||||
|
||||
return {
|
||||
filePath: qlPosition.source,
|
||||
position: new Position(line, column)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the cached sourcemap and its corresponding `TextDocument`.
|
||||
*/
|
||||
private clearCache(): void {
|
||||
if (this.sourceMap !== undefined) {
|
||||
this.sourceMap.destroy();
|
||||
this.sourceMap = undefined;
|
||||
this.lastDocument = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the `codeql.hasQLSource` context variable based on the current selection. This variable
|
||||
* controls whether or not the `codeQL.gotoQL` command is enabled.
|
||||
*/
|
||||
private async updateContext(): Promise<void> {
|
||||
const position = await this.getQLSourceLocation();
|
||||
|
||||
await commands.executeCommand('setContext', 'codeql.hasQLSource', position !== undefined);
|
||||
}
|
||||
|
||||
handleDidChangeActiveTextEditor = async (_editor: TextEditor | undefined): Promise<void> => {
|
||||
await this.updateContext();
|
||||
}
|
||||
|
||||
handleDidChangeTextEditorSelection = async (_e: TextEditorSelectionChangeEvent): Promise<void> => {
|
||||
await this.updateContext();
|
||||
}
|
||||
|
||||
handleDidCloseTextDocument = (document: TextDocument): void => {
|
||||
if (this.lastDocument === document) {
|
||||
this.clearCache();
|
||||
}
|
||||
}
|
||||
|
||||
handleGotoQL = async (): Promise<void> => {
|
||||
const position = await this.getQLSourceLocation();
|
||||
if (position !== undefined) {
|
||||
await showSourceLocation(position);
|
||||
}
|
||||
};
|
||||
}
|
||||
113
extensions/ql-vscode/src/log-insights/summary-parser.ts
Normal file
113
extensions/ql-vscode/src/log-insights/summary-parser.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
/**
|
||||
* Location information for a single pipeline invocation in the RA.
|
||||
*/
|
||||
export interface PipelineInfo {
|
||||
startLine: number;
|
||||
raStartLine: number;
|
||||
raEndLine: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Location information for a single predicate in the RA.
|
||||
*/
|
||||
export interface PredicateSymbol {
|
||||
/**
|
||||
* `PipelineInfo` for each iteration. A non-recursive predicate will have a single iteration `0`.
|
||||
*/
|
||||
iterations: Record<number, PipelineInfo>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Location information for the RA from an evaluation log. Line numbers point into the
|
||||
* human-readable log summary.
|
||||
*/
|
||||
export interface SummarySymbols {
|
||||
predicates: Record<string, PredicateSymbol>;
|
||||
}
|
||||
|
||||
// Tuple counts for Expr::Expr::getParent#dispred#f0820431#ff@76d6745o:
|
||||
const NON_RECURSIVE_TUPLE_COUNT_REGEXP = /^Evaluated relational algebra for predicate (?<predicateName>\S+) with tuple counts:$/;
|
||||
// Tuple counts for Expr::Expr::getEnclosingStmt#f0820431#bf@923ddwj9 on iteration 0 running pipeline base:
|
||||
const RECURSIVE_TUPLE_COUNT_REGEXP = /^Evaluated relational algebra for predicate (?<predicateName>\S+) on iteration (?<iteration>\d+) running pipeline (?<pipeline>\S+) with tuple counts:$/;
|
||||
const RETURN_REGEXP = /^\s*return /;
|
||||
|
||||
/**
|
||||
* Parse a human-readable evaluation log summary to find the location of the RA for each pipeline
|
||||
* run.
|
||||
*
|
||||
* TODO: Once we're more certain about the symbol format, we should have the CLI generate this as it
|
||||
* generates the human-readabe summary to avoid having to rely on regular expression matching of the
|
||||
* human-readable text.
|
||||
*
|
||||
* @param summaryPath The path to the summary file.
|
||||
* @param symbolsPath The path to the symbols file to generate.
|
||||
*/
|
||||
export async function generateSummarySymbolsFile(summaryPath: string, symbolsPath: string): Promise<void> {
|
||||
const symbols = await generateSummarySymbols(summaryPath);
|
||||
await fs.writeFile(symbolsPath, JSON.stringify(symbols));
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a human-readable evaluation log summary to find the location of the RA for each pipeline
|
||||
* run.
|
||||
*
|
||||
* @param fileLocation The path to the summary file.
|
||||
* @returns Symbol information for the summary file.
|
||||
*/
|
||||
async function generateSummarySymbols(summaryPath: string): Promise<SummarySymbols> {
|
||||
const summary = await fs.promises.readFile(summaryPath, { encoding: 'utf-8' });
|
||||
const symbols: SummarySymbols = {
|
||||
predicates: {}
|
||||
};
|
||||
|
||||
const lines = summary.split(/\r?\n/);
|
||||
let lineNumber = 0;
|
||||
while (lineNumber < lines.length) {
|
||||
const startLineNumber = lineNumber;
|
||||
lineNumber++;
|
||||
const startLine = lines[startLineNumber];
|
||||
const nonRecursiveMatch = startLine.match(NON_RECURSIVE_TUPLE_COUNT_REGEXP);
|
||||
let predicateName: string | undefined = undefined;
|
||||
let iteration = 0;
|
||||
if (nonRecursiveMatch) {
|
||||
predicateName = nonRecursiveMatch.groups!.predicateName;
|
||||
} else {
|
||||
const recursiveMatch = startLine.match(RECURSIVE_TUPLE_COUNT_REGEXP);
|
||||
if (recursiveMatch?.groups) {
|
||||
predicateName = recursiveMatch.groups.predicateName;
|
||||
iteration = parseInt(recursiveMatch.groups.iteration);
|
||||
}
|
||||
}
|
||||
|
||||
if (predicateName !== undefined) {
|
||||
const raStartLine = lineNumber;
|
||||
let raEndLine: number | undefined = undefined;
|
||||
while ((lineNumber < lines.length) && (raEndLine === undefined)) {
|
||||
const raLine = lines[lineNumber];
|
||||
const returnMatch = raLine.match(RETURN_REGEXP);
|
||||
if (returnMatch) {
|
||||
raEndLine = lineNumber;
|
||||
}
|
||||
lineNumber++;
|
||||
}
|
||||
if (raEndLine !== undefined) {
|
||||
let symbol = symbols.predicates[predicateName];
|
||||
if (symbol === undefined) {
|
||||
symbol = {
|
||||
iterations: {}
|
||||
};
|
||||
symbols.predicates[predicateName] = symbol;
|
||||
}
|
||||
symbol.iterations[iteration] = {
|
||||
startLine: lineNumber,
|
||||
raStartLine: raStartLine,
|
||||
raEndLine: raEndLine
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return symbols;
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { window as Window, OutputChannel, Progress, Disposable } from 'vscode';
|
||||
import { window as Window, OutputChannel, Progress } from 'vscode';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
@@ -26,18 +26,6 @@ export interface Logger {
|
||||
* @param location log to remove
|
||||
*/
|
||||
removeAdditionalLogLocation(location: string | undefined): void;
|
||||
|
||||
/**
|
||||
* The base location where all side log files are stored.
|
||||
*/
|
||||
getBaseLocation(): string | undefined;
|
||||
|
||||
/**
|
||||
* Sets the location where logs are stored.
|
||||
* @param storagePath The path where logs are stored.
|
||||
* @param isCustomLogDirectory Whether the logs are stored in a custom, user-specified directory.
|
||||
*/
|
||||
setLogStoragePath(storagePath: string, isCustomLogDirectory: boolean): Promise<void>;
|
||||
}
|
||||
|
||||
export type ProgressReporter = Progress<{ message: string }>;
|
||||
@@ -46,27 +34,15 @@ export type ProgressReporter = Progress<{ message: string }>;
|
||||
export class OutputChannelLogger extends DisposableObject implements Logger {
|
||||
public readonly outputChannel: OutputChannel;
|
||||
private readonly additionalLocations = new Map<string, AdditionalLogLocation>();
|
||||
private additionalLogLocationPath: string | undefined;
|
||||
isCustomLogDirectory: boolean;
|
||||
|
||||
constructor(private title: string) {
|
||||
constructor(title: string) {
|
||||
super();
|
||||
this.outputChannel = Window.createOutputChannel(title);
|
||||
this.push(this.outputChannel);
|
||||
this.isCustomLogDirectory = false;
|
||||
}
|
||||
|
||||
async setLogStoragePath(storagePath: string, isCustomLogDirectory: boolean): Promise<void> {
|
||||
this.additionalLogLocationPath = path.join(storagePath, this.title);
|
||||
|
||||
this.isCustomLogDirectory = isCustomLogDirectory;
|
||||
|
||||
if (!this.isCustomLogDirectory) {
|
||||
// clear out any old state from previous runs
|
||||
await fs.remove(this.additionalLogLocationPath);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is asynchronous and will only resolve once the message is written
|
||||
* to the side log (if required). It is not necessary to await the results of this
|
||||
@@ -74,31 +50,41 @@ export class OutputChannelLogger extends DisposableObject implements Logger {
|
||||
* continuing.
|
||||
*/
|
||||
async log(message: string, options = {} as LogOptions): Promise<void> {
|
||||
if (options.trailingNewline === undefined) {
|
||||
options.trailingNewline = true;
|
||||
}
|
||||
|
||||
if (options.trailingNewline) {
|
||||
this.outputChannel.appendLine(message);
|
||||
} else {
|
||||
this.outputChannel.append(message);
|
||||
}
|
||||
|
||||
if (this.additionalLogLocationPath && options.additionalLogLocation) {
|
||||
const logPath = path.join(this.additionalLogLocationPath, options.additionalLogLocation);
|
||||
let additional = this.additionalLocations.get(logPath);
|
||||
if (!additional) {
|
||||
const msg = `| Log being saved to ${logPath} |`;
|
||||
const separator = new Array(msg.length).fill('-').join('');
|
||||
this.outputChannel.appendLine(separator);
|
||||
this.outputChannel.appendLine(msg);
|
||||
this.outputChannel.appendLine(separator);
|
||||
additional = new AdditionalLogLocation(logPath, !this.isCustomLogDirectory);
|
||||
this.additionalLocations.set(logPath, additional);
|
||||
this.track(additional);
|
||||
try {
|
||||
if (options.trailingNewline === undefined) {
|
||||
options.trailingNewline = true;
|
||||
}
|
||||
if (options.trailingNewline) {
|
||||
this.outputChannel.appendLine(message);
|
||||
} else {
|
||||
this.outputChannel.append(message);
|
||||
}
|
||||
|
||||
await additional.log(message, options);
|
||||
if (options.additionalLogLocation) {
|
||||
if (!path.isAbsolute(options.additionalLogLocation)) {
|
||||
throw new Error(`Additional Log Location must be an absolute path: ${options.additionalLogLocation}`);
|
||||
}
|
||||
const logPath = options.additionalLogLocation;
|
||||
let additional = this.additionalLocations.get(logPath);
|
||||
if (!additional) {
|
||||
const msg = `| Log being saved to ${logPath} |`;
|
||||
const separator = new Array(msg.length).fill('-').join('');
|
||||
this.outputChannel.appendLine(separator);
|
||||
this.outputChannel.appendLine(msg);
|
||||
this.outputChannel.appendLine(separator);
|
||||
additional = new AdditionalLogLocation(logPath);
|
||||
this.additionalLocations.set(logPath, additional);
|
||||
}
|
||||
|
||||
await additional.log(message, options);
|
||||
}
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message === 'Channel has been closed') {
|
||||
// Output channel is closed logging to console instead
|
||||
console.log('Output channel is closed logging to console instead:', message);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -107,26 +93,15 @@ export class OutputChannelLogger extends DisposableObject implements Logger {
|
||||
}
|
||||
|
||||
removeAdditionalLogLocation(location: string | undefined): void {
|
||||
if (this.additionalLogLocationPath && location) {
|
||||
const logPath = location.startsWith(this.additionalLogLocationPath)
|
||||
? location
|
||||
: path.join(this.additionalLogLocationPath, location);
|
||||
const additional = this.additionalLocations.get(logPath);
|
||||
if (additional) {
|
||||
this.disposeAndStopTracking(additional);
|
||||
this.additionalLocations.delete(logPath);
|
||||
}
|
||||
if (location) {
|
||||
this.additionalLocations.delete(location);
|
||||
}
|
||||
}
|
||||
|
||||
getBaseLocation() {
|
||||
return this.additionalLogLocationPath;
|
||||
}
|
||||
}
|
||||
|
||||
class AdditionalLogLocation extends Disposable {
|
||||
constructor(private location: string, private shouldDeleteLogs: boolean) {
|
||||
super(() => { /**/ });
|
||||
class AdditionalLogLocation {
|
||||
constructor(private location: string) {
|
||||
/**/
|
||||
}
|
||||
|
||||
async log(message: string, options = {} as LogOptions): Promise<void> {
|
||||
@@ -139,12 +114,6 @@ class AdditionalLogLocation extends Disposable {
|
||||
encoding: 'utf8'
|
||||
});
|
||||
}
|
||||
|
||||
async dispose(): Promise<void> {
|
||||
if (this.shouldDeleteLogs) {
|
||||
await fs.remove(this.location);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** The global logger for the extension. */
|
||||
|
||||
@@ -79,11 +79,11 @@ export interface WholeFileLocation {
|
||||
|
||||
export type ResolvableLocationValue = WholeFileLocation | LineColumnLocation;
|
||||
|
||||
export type UrlValue = ResolvableLocationValue | string;
|
||||
export type UrlValue = ResolvableLocationValue | string;
|
||||
|
||||
export type ColumnValue = EntityValue | number | string | boolean;
|
||||
export type CellValue = EntityValue | number | string | boolean;
|
||||
|
||||
export type ResultRow = ColumnValue[];
|
||||
export type ResultRow = CellValue[];
|
||||
|
||||
export interface RawResultSet {
|
||||
readonly schema: ResultSetSchema;
|
||||
@@ -103,7 +103,14 @@ export function transformBqrsResultSet(
|
||||
};
|
||||
}
|
||||
|
||||
export interface DecodedBqrsChunk {
|
||||
tuples: ColumnValue[][];
|
||||
next?: number;
|
||||
type BqrsKind = 'String' | 'Float' | 'Integer' | 'String' | 'Boolean' | 'Date' | 'Entity';
|
||||
|
||||
interface BqrsColumn {
|
||||
name: string;
|
||||
kind: BqrsKind;
|
||||
}
|
||||
export interface DecodedBqrsChunk {
|
||||
tuples: CellValue[][];
|
||||
next?: number;
|
||||
columns: BqrsColumn[];
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
LineColumnLocation,
|
||||
WholeFileLocation
|
||||
} from './bqrs-cli-types';
|
||||
import { createRemoteFileRef } from './location-link-utils';
|
||||
|
||||
/**
|
||||
* The CodeQL filesystem libraries use this pattern in `getURL()` predicates
|
||||
@@ -93,3 +94,45 @@ export function isWholeFileLoc(loc: UrlValue): loc is WholeFileLocation {
|
||||
export function isStringLoc(loc: UrlValue): loc is string {
|
||||
return typeof loc === 'string';
|
||||
}
|
||||
|
||||
export function tryGetRemoteLocation(
|
||||
loc: UrlValue | undefined,
|
||||
fileLinkPrefix: string,
|
||||
sourceLocationPrefix: string | undefined,
|
||||
): string | undefined {
|
||||
const resolvableLocation = tryGetResolvableLocation(loc);
|
||||
if (!resolvableLocation) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let trimmedLocation: string;
|
||||
|
||||
// Remote locations have the following format:
|
||||
// "file:${sourceLocationPrefix}/relative/path/to/file"
|
||||
// So we need to strip off the first part to get the relative path.
|
||||
if (sourceLocationPrefix) {
|
||||
if (!resolvableLocation.uri.startsWith(`file:${sourceLocationPrefix}/`)) {
|
||||
return undefined;
|
||||
}
|
||||
trimmedLocation = resolvableLocation.uri.replace(`file:${sourceLocationPrefix}/`, '');
|
||||
} else {
|
||||
// If the source location prefix is empty (e.g. for older remote queries), we assume that the database
|
||||
// was created on a Linux actions runner and has the format:
|
||||
// "file:/home/runner/work/<repo>/<repo>/relative/path/to/file"
|
||||
// So we need to drop the first 6 parts of the path.
|
||||
if (!resolvableLocation.uri.startsWith('file:/home/runner/work/')) {
|
||||
return undefined;
|
||||
}
|
||||
const locationParts = resolvableLocation.uri.split('/');
|
||||
trimmedLocation = locationParts.slice(6, locationParts.length).join('/');
|
||||
}
|
||||
|
||||
const fileLink = {
|
||||
fileLinkPrefix,
|
||||
filePath: trimmedLocation,
|
||||
};
|
||||
return createRemoteFileRef(
|
||||
fileLink,
|
||||
resolvableLocation.startLine,
|
||||
resolvableLocation.endLine);
|
||||
}
|
||||
|
||||
39
extensions/ql-vscode/src/pure/date.ts
Normal file
39
extensions/ql-vscode/src/pure/date.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Contains an assortment of helper constants and functions for working with dates.
|
||||
*/
|
||||
|
||||
const dateWithoutYearFormatter = new Intl.DateTimeFormat(undefined, {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: 'numeric',
|
||||
minute: '2-digit',
|
||||
});
|
||||
|
||||
const dateFormatter = new Intl.DateTimeFormat(undefined, {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: 'numeric',
|
||||
minute: '2-digit',
|
||||
});
|
||||
|
||||
export function formatDate(value: Date): string {
|
||||
if (value.getFullYear() === new Date().getFullYear()) {
|
||||
return dateWithoutYearFormatter.format(value);
|
||||
}
|
||||
|
||||
return dateFormatter.format(value);
|
||||
}
|
||||
|
||||
// These are overloads for the function that allow us to not add an extra
|
||||
// type check when the value is definitely not undefined.
|
||||
export function parseDate(value: string): Date;
|
||||
export function parseDate(value: string | undefined | null): Date | undefined;
|
||||
|
||||
export function parseDate(value: string | undefined | null): Date | undefined {
|
||||
if (value === undefined || value === null) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return new Date(value);
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
|
||||
/**
|
||||
* helpers-pure.ts
|
||||
* ------------
|
||||
@@ -29,3 +30,28 @@ export const asyncFilter = async function <T>(arr: T[], predicate: (arg0: T) =>
|
||||
const results = await Promise.all(arr.map(predicate));
|
||||
return arr.filter((_, index) => results[index]);
|
||||
};
|
||||
|
||||
/**
|
||||
* This regex matches strings of the form `owner/repo` where:
|
||||
* - `owner` is made up of alphanumeric characters, hyphens, underscores, or periods
|
||||
* - `repo` is made up of alphanumeric characters, hyphens, underscores, or periods
|
||||
*/
|
||||
export const REPO_REGEX = /^[a-zA-Z0-9-_\.]+\/[a-zA-Z0-9-_\.]+$/;
|
||||
|
||||
/**
|
||||
* This regex matches GiHub organization and user strings. These are made up for alphanumeric
|
||||
* characters, hyphens, underscores or periods.
|
||||
*/
|
||||
export const OWNER_REGEX = /^[a-zA-Z0-9-_\.]+$/;
|
||||
|
||||
export function getErrorMessage(e: any) {
|
||||
return e instanceof Error ? e.message : String(e);
|
||||
}
|
||||
|
||||
export function getErrorStack(e: any) {
|
||||
return e instanceof Error ? e.stack ?? '' : '';
|
||||
}
|
||||
|
||||
export function asError(e: any): Error {
|
||||
return e instanceof Error ? e : new Error(String(e));
|
||||
}
|
||||
|
||||
@@ -2,6 +2,11 @@ import * as sarif from 'sarif';
|
||||
import { AnalysisResults } from '../remote-queries/shared/analysis-result';
|
||||
import { AnalysisSummary, RemoteQueryResult } from '../remote-queries/shared/remote-query-result';
|
||||
import { RawResultSet, ResultRow, ResultSetSchema, Column, ResolvableLocationValue } from './bqrs-cli-types';
|
||||
import {
|
||||
VariantAnalysis,
|
||||
VariantAnalysisScannedRepositoryResult,
|
||||
VariantAnalysisScannedRepositoryState,
|
||||
} from '../remote-queries/shared/variant-analysis';
|
||||
|
||||
/**
|
||||
* This module contains types and code that are shared between
|
||||
@@ -10,15 +15,17 @@ import { RawResultSet, ResultRow, ResultSetSchema, Column, ResolvableLocationVal
|
||||
|
||||
export const SELECT_TABLE_NAME = '#select';
|
||||
export const ALERTS_TABLE_NAME = 'alerts';
|
||||
export const GRAPH_TABLE_NAME = 'graph';
|
||||
|
||||
export type RawTableResultSet = { t: 'RawResultSet' } & RawResultSet;
|
||||
export type PathTableResultSet = {
|
||||
t: 'SarifResultSet';
|
||||
export type InterpretedResultSet<T> = {
|
||||
t: 'InterpretedResultSet';
|
||||
readonly schema: ResultSetSchema;
|
||||
name: string;
|
||||
} & Interpretation;
|
||||
interpretation: InterpretationT<T>;
|
||||
};
|
||||
|
||||
export type ResultSet = RawTableResultSet | PathTableResultSet;
|
||||
export type ResultSet = RawTableResultSet | InterpretedResultSet<InterpretationData>;
|
||||
|
||||
/**
|
||||
* Only ever show this many rows in a raw result table.
|
||||
@@ -46,18 +53,31 @@ export interface PreviousExecution {
|
||||
durationSeconds: number;
|
||||
}
|
||||
|
||||
export interface Interpretation {
|
||||
sourceLocationPrefix: string;
|
||||
numTruncatedResults: number;
|
||||
numTotalResults: number;
|
||||
export type SarifInterpretationData = {
|
||||
t: 'SarifInterpretationData';
|
||||
/**
|
||||
* sortState being undefined means don't sort, just present results in the order
|
||||
* they appear in the sarif file.
|
||||
*/
|
||||
sortState?: InterpretedResultsSortState;
|
||||
sarif: sarif.Log;
|
||||
} & sarif.Log;
|
||||
|
||||
export type GraphInterpretationData = {
|
||||
t: 'GraphInterpretationData';
|
||||
dot: string[];
|
||||
};
|
||||
|
||||
export type InterpretationData = SarifInterpretationData | GraphInterpretationData;
|
||||
|
||||
export interface InterpretationT<T> {
|
||||
sourceLocationPrefix: string;
|
||||
numTruncatedResults: number;
|
||||
numTotalResults: number;
|
||||
data: T;
|
||||
}
|
||||
|
||||
export type Interpretation = InterpretationT<InterpretationData>;
|
||||
|
||||
export interface ResultsPaths {
|
||||
resultsPath: string;
|
||||
interpretedResultsPath: string;
|
||||
@@ -159,7 +179,7 @@ export type FromResultsViewMsg =
|
||||
| ToggleDiagnostics
|
||||
| ChangeRawResultsSortMsg
|
||||
| ChangeInterpretedResultsSortMsg
|
||||
| ResultViewLoaded
|
||||
| ViewLoadedMsg
|
||||
| ChangePage
|
||||
| OpenFileMsg;
|
||||
|
||||
@@ -201,11 +221,11 @@ interface ToggleDiagnostics {
|
||||
}
|
||||
|
||||
/**
|
||||
* Message from the results view to signal that loading the results
|
||||
* is complete.
|
||||
* Message from a view signal that loading is complete.
|
||||
*/
|
||||
interface ResultViewLoaded {
|
||||
t: 'resultViewLoaded';
|
||||
interface ViewLoadedMsg {
|
||||
t: 'viewLoaded';
|
||||
viewName: string;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -264,18 +284,11 @@ interface ChangeInterpretedResultsSortMsg {
|
||||
* Message from the compare view to the extension.
|
||||
*/
|
||||
export type FromCompareViewMessage =
|
||||
| CompareViewLoadedMessage
|
||||
| ViewLoadedMsg
|
||||
| ChangeCompareMessage
|
||||
| ViewSourceFileMsg
|
||||
| OpenQueryMessage;
|
||||
|
||||
/**
|
||||
* Message from the compare view to signal the completion of loading results.
|
||||
*/
|
||||
interface CompareViewLoadedMessage {
|
||||
t: 'compareViewLoaded';
|
||||
}
|
||||
|
||||
/**
|
||||
* Message from the compare view to request opening a query.
|
||||
*/
|
||||
@@ -316,7 +329,7 @@ export interface SetComparisonsMessage {
|
||||
readonly currentResultSetName: string;
|
||||
readonly rows: QueryCompareResult | undefined;
|
||||
readonly message: string | undefined;
|
||||
readonly datebaseUri: string;
|
||||
readonly databaseUri: string;
|
||||
}
|
||||
|
||||
export enum DiffKind {
|
||||
@@ -357,8 +370,9 @@ export function getDefaultResultSetName(
|
||||
// Choose first available result set from the array
|
||||
return [
|
||||
ALERTS_TABLE_NAME,
|
||||
GRAPH_TABLE_NAME,
|
||||
SELECT_TABLE_NAME,
|
||||
resultSetNames[0],
|
||||
resultSetNames[0]
|
||||
].filter((resultSetName) => resultSetNames.includes(resultSetName))[0];
|
||||
}
|
||||
|
||||
@@ -373,21 +387,19 @@ export interface ParsedResultSets {
|
||||
}
|
||||
|
||||
export type FromRemoteQueriesMessage =
|
||||
| RemoteQueryLoadedMessage
|
||||
| ViewLoadedMsg
|
||||
| RemoteQueryErrorMessage
|
||||
| OpenFileMsg
|
||||
| OpenVirtualFileMsg
|
||||
| RemoteQueryDownloadAnalysisResultsMessage
|
||||
| RemoteQueryDownloadAllAnalysesResultsMessage;
|
||||
| RemoteQueryDownloadAllAnalysesResultsMessage
|
||||
| RemoteQueryExportResultsMessage
|
||||
| CopyRepoListMessage;
|
||||
|
||||
export type ToRemoteQueriesMessage =
|
||||
| SetRemoteQueryResultMessage
|
||||
| SetAnalysesResultsMessage;
|
||||
|
||||
export interface RemoteQueryLoadedMessage {
|
||||
t: 'remoteQueryLoaded';
|
||||
}
|
||||
|
||||
export interface SetRemoteQueryResultMessage {
|
||||
t: 'setRemoteQueryResult';
|
||||
queryResult: RemoteQueryResult
|
||||
@@ -412,3 +424,62 @@ export interface RemoteQueryDownloadAllAnalysesResultsMessage {
|
||||
t: 'remoteQueryDownloadAllAnalysesResults';
|
||||
analysisSummaries: AnalysisSummary[];
|
||||
}
|
||||
|
||||
export interface RemoteQueryExportResultsMessage {
|
||||
t: 'remoteQueryExportResults';
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
export interface CopyRepoListMessage {
|
||||
t: 'copyRepoList';
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
export interface SetVariantAnalysisMessage {
|
||||
t: 'setVariantAnalysis';
|
||||
variantAnalysis: VariantAnalysis;
|
||||
}
|
||||
|
||||
export type StopVariantAnalysisMessage = {
|
||||
t: 'stopVariantAnalysis';
|
||||
variantAnalysisId: number;
|
||||
}
|
||||
|
||||
export type VariantAnalysisState = {
|
||||
variantAnalysisId: number;
|
||||
}
|
||||
|
||||
export interface SetRepoResultsMessage {
|
||||
t: 'setRepoResults';
|
||||
repoResults: VariantAnalysisScannedRepositoryResult[];
|
||||
}
|
||||
|
||||
export interface SetRepoStatesMessage {
|
||||
t: 'setRepoStates';
|
||||
repoStates: VariantAnalysisScannedRepositoryState[];
|
||||
}
|
||||
|
||||
export interface RequestRepositoryResultsMessage {
|
||||
t: 'requestRepositoryResults';
|
||||
repositoryFullName: string;
|
||||
}
|
||||
|
||||
export interface OpenQueryFileMessage {
|
||||
t: 'openQueryFile';
|
||||
}
|
||||
|
||||
export interface OpenQueryTextMessage {
|
||||
t: 'openQueryText';
|
||||
}
|
||||
|
||||
export type ToVariantAnalysisMessage =
|
||||
| SetVariantAnalysisMessage
|
||||
| SetRepoResultsMessage
|
||||
| SetRepoStatesMessage;
|
||||
|
||||
export type FromVariantAnalysisMessage =
|
||||
| ViewLoadedMsg
|
||||
| StopVariantAnalysisMessage
|
||||
| RequestRepositoryResultsMessage
|
||||
| OpenQueryFileMessage
|
||||
| OpenQueryTextMessage;
|
||||
|
||||
@@ -15,38 +15,7 @@
|
||||
*/
|
||||
|
||||
import * as rpc from 'vscode-jsonrpc';
|
||||
|
||||
/**
|
||||
* A position within a QL file.
|
||||
*/
|
||||
export interface Position {
|
||||
/**
|
||||
* The one-based index of the start line
|
||||
*/
|
||||
line: number;
|
||||
/**
|
||||
* The one-based offset of the start column within
|
||||
* the start line in UTF-16 code-units
|
||||
*/
|
||||
column: number;
|
||||
/**
|
||||
* The one-based index of the end line line
|
||||
*/
|
||||
endLine: number;
|
||||
|
||||
/**
|
||||
* The one-based offset of the end column within
|
||||
* the end line in UTF-16 code-units
|
||||
*/
|
||||
endColumn: number;
|
||||
/**
|
||||
* The path of the file.
|
||||
* If the file name is "Compiler Generated" the
|
||||
* the position is not a real position but
|
||||
* arises from compiler generated code.
|
||||
*/
|
||||
fileName: string;
|
||||
}
|
||||
import * as shared from './messages-shared';
|
||||
|
||||
/**
|
||||
* A query that should be checked for any errors or warnings
|
||||
@@ -155,6 +124,10 @@ export interface CompilationOptions {
|
||||
* get reported anyway. Useful for universal compilation options.
|
||||
*/
|
||||
computeDefaultStrings: boolean;
|
||||
/**
|
||||
* Emit debug information in compiled query.
|
||||
*/
|
||||
emitDebugInfo: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -254,28 +227,6 @@ export interface DILQuery {
|
||||
dilSource: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The way of compiling the query, as a normal query
|
||||
* or a subset of it. Note that precisely one of the two options should be set.
|
||||
*/
|
||||
export interface CompilationTarget {
|
||||
/**
|
||||
* Compile as a normal query
|
||||
*/
|
||||
query?: Record<string, never>;
|
||||
/**
|
||||
* Compile as a quick evaluation
|
||||
*/
|
||||
quickEval?: QuickEvalOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for quick evaluation
|
||||
*/
|
||||
export interface QuickEvalOptions {
|
||||
quickEvalPos?: Position;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of checking a query.
|
||||
*/
|
||||
@@ -646,6 +597,35 @@ export interface ClearCacheParams {
|
||||
*/
|
||||
dryRun: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters to start a new structured log
|
||||
*/
|
||||
export interface StartLogParams {
|
||||
/**
|
||||
* The dataset for which we want to start a new structured log
|
||||
*/
|
||||
db: Dataset;
|
||||
/**
|
||||
* The path where we want to place the new structured log
|
||||
*/
|
||||
logPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters to terminate a structured log
|
||||
*/
|
||||
export interface EndLogParams {
|
||||
/**
|
||||
* The dataset for which we want to terminated the log
|
||||
*/
|
||||
db: Dataset;
|
||||
/**
|
||||
* The path of the log to terminate, will be a no-op if we aren't logging here
|
||||
*/
|
||||
logPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for trimming the cache of a dataset
|
||||
*/
|
||||
@@ -682,6 +662,26 @@ export interface ClearCacheResult {
|
||||
deletionMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of starting a new structured log.
|
||||
*/
|
||||
export interface StartLogResult {
|
||||
/**
|
||||
* A user friendly message saying what happened.
|
||||
*/
|
||||
outcomeMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of terminating a structured log.
|
||||
*/
|
||||
export interface EndLogResult {
|
||||
/**
|
||||
* A user friendly message saying what happened.
|
||||
*/
|
||||
outcomeMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for running a set of queries
|
||||
*/
|
||||
@@ -959,37 +959,20 @@ export type DeregisterDatabasesResult = {
|
||||
};
|
||||
|
||||
/**
|
||||
* Type for any action that could have progress messages.
|
||||
* A position within a QL file.
|
||||
*/
|
||||
export interface WithProgressId<T> {
|
||||
/**
|
||||
* The main body
|
||||
*/
|
||||
body: T;
|
||||
/**
|
||||
* The id used to report progress updates
|
||||
*/
|
||||
progressId: number;
|
||||
}
|
||||
export type Position = shared.Position;
|
||||
|
||||
export interface ProgressMessage {
|
||||
/**
|
||||
* The id of the operation that is running
|
||||
*/
|
||||
id: number;
|
||||
/**
|
||||
* The current step
|
||||
*/
|
||||
step: number;
|
||||
/**
|
||||
* The maximum step. This *should* be constant for a single job.
|
||||
*/
|
||||
maxStep: number;
|
||||
/**
|
||||
* The current progress message
|
||||
*/
|
||||
message: string;
|
||||
}
|
||||
/**
|
||||
* The way of compiling the query, as a normal query
|
||||
* or a subset of it. Note that precisely one of the two options should be set.
|
||||
*/
|
||||
export type CompilationTarget = shared.CompilationTarget;
|
||||
|
||||
export type QuickEvalOptions = shared.QuickEvalOptions;
|
||||
|
||||
export type WithProgressId<T> = shared.WithProgressId<T>;
|
||||
export type ProgressMessage = shared.ProgressMessage;
|
||||
|
||||
/**
|
||||
* Check a Ql query for errors without compiling it
|
||||
@@ -1018,6 +1001,16 @@ export const compileUpgrade = new rpc.RequestType<WithProgressId<CompileUpgradeP
|
||||
*/
|
||||
export const compileUpgradeSequence = new rpc.RequestType<WithProgressId<CompileUpgradeSequenceParams>, CompileUpgradeSequenceResult, void, void>('compilation/compileUpgradeSequence');
|
||||
|
||||
/**
|
||||
* Start a new structured log in the evaluator, terminating the previous one if it exists
|
||||
*/
|
||||
export const startLog = new rpc.RequestType<WithProgressId<StartLogParams>, StartLogResult, void, void>('evaluation/startLog');
|
||||
|
||||
/**
|
||||
* Terminate a structured log in the evaluator. Is a no-op if we aren't logging to the given location
|
||||
*/
|
||||
export const endLog = new rpc.RequestType<WithProgressId<EndLogParams>, EndLogResult, void, void>('evaluation/endLog');
|
||||
|
||||
/**
|
||||
* Clear the cache of a dataset
|
||||
*/
|
||||
@@ -1057,7 +1050,4 @@ export const deregisterDatabases = new rpc.RequestType<
|
||||
*/
|
||||
export const completeQuery = new rpc.RequestType<EvaluationResult, Record<string, any>, void, void>('evaluation/queryCompleted');
|
||||
|
||||
/**
|
||||
* A notification that the progress has been changed.
|
||||
*/
|
||||
export const progress = new rpc.NotificationType<ProgressMessage, void>('ql/progressUpdated');
|
||||
export const progress = shared.progress;
|
||||
15
extensions/ql-vscode/src/pure/location-link-utils.ts
Normal file
15
extensions/ql-vscode/src/pure/location-link-utils.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { FileLink } from '../remote-queries/shared/analysis-result';
|
||||
|
||||
export function createRemoteFileRef(
|
||||
fileLink: FileLink,
|
||||
startLine?: number,
|
||||
endLine?: number
|
||||
): string {
|
||||
if (startLine && endLine) {
|
||||
return `${fileLink.fileLinkPrefix}/${fileLink.filePath}#L${startLine}-L${endLine}`;
|
||||
} else if (startLine) {
|
||||
return `${fileLink.fileLinkPrefix}/${fileLink.filePath}#L${startLine}`;
|
||||
} else {
|
||||
return `${fileLink.fileLinkPrefix}/${fileLink.filePath}`;
|
||||
}
|
||||
}
|
||||
34
extensions/ql-vscode/src/pure/log-summary-parser.ts
Normal file
34
extensions/ql-vscode/src/pure/log-summary-parser.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { readJsonlFile } from '../log-insights/jsonl-reader';
|
||||
|
||||
// TODO(angelapwen): Only load in necessary information and
|
||||
// location in bytes for this log to save memory.
|
||||
export interface EvalLogData {
|
||||
predicateName: string;
|
||||
millis: number;
|
||||
resultSize: number;
|
||||
// Key: pipeline identifier; Value: array of pipeline steps
|
||||
ra: Record<string, string[]>;
|
||||
}
|
||||
|
||||
/**
|
||||
* A pure method that parses a string of evaluator log summaries into
|
||||
* an array of EvalLogData objects.
|
||||
*/
|
||||
export async function parseViewerData(jsonSummaryPath: string): Promise<EvalLogData[]> {
|
||||
const viewerData: EvalLogData[] = [];
|
||||
|
||||
await readJsonlFile(jsonSummaryPath, async jsonObj => {
|
||||
// Only convert log items that have an RA and millis field
|
||||
if (jsonObj.ra !== undefined && jsonObj.millis !== undefined) {
|
||||
const newLogData: EvalLogData = {
|
||||
predicateName: jsonObj.predicateName,
|
||||
millis: jsonObj.millis,
|
||||
resultSize: jsonObj.resultSize,
|
||||
ra: jsonObj.ra
|
||||
};
|
||||
viewerData.push(newLogData);
|
||||
}
|
||||
});
|
||||
|
||||
return viewerData;
|
||||
}
|
||||
110
extensions/ql-vscode/src/pure/messages-shared.ts
Normal file
110
extensions/ql-vscode/src/pure/messages-shared.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
/**
|
||||
* Types for messages exchanged during jsonrpc communication with the
|
||||
* the CodeQL query server.
|
||||
*
|
||||
* This file exists in the queryserver and in the vscode extension, and
|
||||
* should be kept in sync between them.
|
||||
*
|
||||
* A note about the namespaces below, which look like they are
|
||||
* essentially enums, namely Severity, ResultColumnKind, and
|
||||
* QueryResultType. By design, for the sake of extensibility, clients
|
||||
* receiving messages of this protocol are supposed to accept any
|
||||
* number for any of these types. We commit to the given meaning of
|
||||
* the numbers listed in constants in the namespaces, and we commit to
|
||||
* the fact that any unknown QueryResultType value counts as an error.
|
||||
*/
|
||||
|
||||
import * as rpc from 'vscode-jsonrpc';
|
||||
|
||||
/**
|
||||
* A position within a QL file.
|
||||
*/
|
||||
export interface Position {
|
||||
/**
|
||||
* The one-based index of the start line
|
||||
*/
|
||||
line: number;
|
||||
/**
|
||||
* The one-based offset of the start column within
|
||||
* the start line in UTF-16 code-units
|
||||
*/
|
||||
column: number;
|
||||
/**
|
||||
* The one-based index of the end line line
|
||||
*/
|
||||
endLine: number;
|
||||
|
||||
/**
|
||||
* The one-based offset of the end column within
|
||||
* the end line in UTF-16 code-units
|
||||
*/
|
||||
endColumn: number;
|
||||
/**
|
||||
* The path of the file.
|
||||
* If the file name is "Compiler Generated" the
|
||||
* the position is not a real position but
|
||||
* arises from compiler generated code.
|
||||
*/
|
||||
fileName: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The way of compiling the query, as a normal query
|
||||
* or a subset of it. Note that precisely one of the two options should be set.
|
||||
*/
|
||||
export interface CompilationTarget {
|
||||
/**
|
||||
* Compile as a normal query
|
||||
*/
|
||||
query?: Record<string, never>;
|
||||
/**
|
||||
* Compile as a quick evaluation
|
||||
*/
|
||||
quickEval?: QuickEvalOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for quick evaluation
|
||||
*/
|
||||
export interface QuickEvalOptions {
|
||||
quickEvalPos?: Position;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type for any action that could have progress messages.
|
||||
*/
|
||||
export interface WithProgressId<T> {
|
||||
/**
|
||||
* The main body
|
||||
*/
|
||||
body: T;
|
||||
/**
|
||||
* The id used to report progress updates
|
||||
*/
|
||||
progressId: number;
|
||||
}
|
||||
|
||||
export interface ProgressMessage {
|
||||
/**
|
||||
* The id of the operation that is running
|
||||
*/
|
||||
id: number;
|
||||
/**
|
||||
* The current step
|
||||
*/
|
||||
step: number;
|
||||
/**
|
||||
* The maximum step. This *should* be constant for a single job.
|
||||
*/
|
||||
maxStep: number;
|
||||
/**
|
||||
* The current progress message
|
||||
*/
|
||||
message: string;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A notification that the progress has been changed.
|
||||
*/
|
||||
export const progress = new rpc.NotificationType<ProgressMessage, void>('ql/progressUpdated');
|
||||
215
extensions/ql-vscode/src/pure/new-messages.ts
Normal file
215
extensions/ql-vscode/src/pure/new-messages.ts
Normal file
@@ -0,0 +1,215 @@
|
||||
/**
|
||||
* Types for messages exchanged during jsonrpc communication with the
|
||||
* the CodeQL query server.
|
||||
*
|
||||
* This file exists in the queryserver and in the vscode extension, and
|
||||
* should be kept in sync between them.
|
||||
*
|
||||
* A note about the namespaces below, which look like they are
|
||||
* essentially enums, namely Severity, ResultColumnKind, and
|
||||
* QueryResultType. By design, for the sake of extensibility, clients
|
||||
* receiving messages of this protocol are supposed to accept any
|
||||
* number for any of these types. We commit to the given meaning of
|
||||
* the numbers listed in constants in the namespaces, and we commit to
|
||||
* the fact that any unknown QueryResultType value counts as an error.
|
||||
*/
|
||||
|
||||
import * as rpc from 'vscode-jsonrpc';
|
||||
import * as shared from './messages-shared';
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Parameters to clear the cache
|
||||
*/
|
||||
export interface ClearCacheParams {
|
||||
/**
|
||||
* The dataset for which we want to clear the cache
|
||||
*/
|
||||
db: string;
|
||||
/**
|
||||
* Whether the cache should actually be cleared.
|
||||
*/
|
||||
dryRun: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for trimming the cache of a dataset
|
||||
*/
|
||||
export interface TrimCacheParams {
|
||||
/**
|
||||
* The dataset that we want to trim the cache of.
|
||||
*/
|
||||
db: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of trimming or clearing the cache.
|
||||
*/
|
||||
export interface ClearCacheResult {
|
||||
/**
|
||||
* A user friendly message saying what was or would be
|
||||
* deleted.
|
||||
*/
|
||||
deletionMessage: string;
|
||||
}
|
||||
|
||||
|
||||
export type QueryResultType = number;
|
||||
/**
|
||||
* The result of running a query. This namespace is intentionally not
|
||||
* an enum, see "for the sake of extensibility" comment above.
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-namespace
|
||||
export namespace QueryResultType {
|
||||
/**
|
||||
* The query ran successfully
|
||||
*/
|
||||
export const SUCCESS = 0;
|
||||
/**
|
||||
* The query failed due to an reason
|
||||
* that isn't listed
|
||||
*/
|
||||
export const OTHER_ERROR = 1;
|
||||
/**
|
||||
* The query failed do to compilation erorrs
|
||||
*/
|
||||
export const COMPILATION_ERROR = 2;
|
||||
/**
|
||||
* The query failed due to running out of
|
||||
* memory
|
||||
*/
|
||||
export const OOM = 3;
|
||||
/**
|
||||
* The query failed because it was cancelled.
|
||||
*/
|
||||
export const CANCELLATION = 4;
|
||||
/**
|
||||
* The dbscheme basename was not the same
|
||||
*/
|
||||
export const DBSCHEME_MISMATCH_NAME = 5;
|
||||
/**
|
||||
* No upgrade was found
|
||||
*/
|
||||
export const DBSCHEME_NO_UPGRADE = 6;
|
||||
}
|
||||
|
||||
|
||||
export interface RegisterDatabasesParams {
|
||||
databases: string[];
|
||||
}
|
||||
|
||||
export interface DeregisterDatabasesParams {
|
||||
databases: string[];
|
||||
}
|
||||
|
||||
export type RegisterDatabasesResult = {
|
||||
registeredDatabases: string[];
|
||||
};
|
||||
|
||||
export type DeregisterDatabasesResult = {
|
||||
registeredDatabases: string[];
|
||||
};
|
||||
|
||||
|
||||
export interface RunQueryParams {
|
||||
/**
|
||||
* The path of the query
|
||||
*/
|
||||
queryPath: string,
|
||||
/**
|
||||
* The output path
|
||||
*/
|
||||
outputPath: string,
|
||||
/**
|
||||
* The database path
|
||||
*/
|
||||
db: string,
|
||||
additionalPacks: string[],
|
||||
target: CompilationTarget,
|
||||
externalInputs: Record<string, string>,
|
||||
singletonExternalInputs: Record<string, string>,
|
||||
dilPath?: string,
|
||||
logPath?: string
|
||||
}
|
||||
|
||||
export interface RunQueryResult {
|
||||
resultType: QueryResultType,
|
||||
message?: string,
|
||||
expectedDbschemeName?: string,
|
||||
evaluationTime: number;
|
||||
}
|
||||
|
||||
|
||||
|
||||
export interface UpgradeParams {
|
||||
db: string,
|
||||
additionalPacks: string[],
|
||||
}
|
||||
|
||||
export type UpgradeResult = Record<string, unknown>;
|
||||
|
||||
export type ClearPackCacheParams = Record<string, unknown>;
|
||||
export type ClearPackCacheResult = Record<string, unknown>;
|
||||
|
||||
/**
|
||||
* A position within a QL file.
|
||||
*/
|
||||
export type Position = shared.Position;
|
||||
|
||||
/**
|
||||
* The way of compiling the query, as a normal query
|
||||
* or a subset of it. Note that precisely one of the two options should be set.
|
||||
*/
|
||||
export type CompilationTarget = shared.CompilationTarget;
|
||||
|
||||
export type QuickEvalOptions = shared.QuickEvalOptions;
|
||||
|
||||
export type WithProgressId<T> = shared.WithProgressId<T>;
|
||||
export type ProgressMessage = shared.ProgressMessage;
|
||||
|
||||
/**
|
||||
* Clear the cache of a dataset
|
||||
*/
|
||||
export const clearCache = new rpc.RequestType<WithProgressId<ClearCacheParams>, ClearCacheResult, void, void>('evaluation/clearCache');
|
||||
/**
|
||||
* Trim the cache of a dataset
|
||||
*/
|
||||
export const trimCache = new rpc.RequestType<WithProgressId<TrimCacheParams>, ClearCacheResult, void, void>('evaluation/trimCache');
|
||||
|
||||
/**
|
||||
* Clear the pack cache
|
||||
*/
|
||||
export const clearPackCache = new rpc.RequestType<WithProgressId<ClearPackCacheParams>, ClearPackCacheResult, void, void>('evaluation/clearPackCache');
|
||||
|
||||
/**
|
||||
* Run a query on a database
|
||||
*/
|
||||
export const runQuery = new rpc.RequestType<WithProgressId<RunQueryParams>, RunQueryResult, void, void>('evaluation/runQuery');
|
||||
|
||||
export const registerDatabases = new rpc.RequestType<
|
||||
WithProgressId<RegisterDatabasesParams>,
|
||||
RegisterDatabasesResult,
|
||||
void,
|
||||
void
|
||||
>('evaluation/registerDatabases');
|
||||
|
||||
export const deregisterDatabases = new rpc.RequestType<
|
||||
WithProgressId<DeregisterDatabasesParams>,
|
||||
DeregisterDatabasesResult,
|
||||
void,
|
||||
void
|
||||
>('evaluation/deregisterDatabases');
|
||||
|
||||
|
||||
export const upgradeDatabase = new rpc.RequestType<
|
||||
WithProgressId<UpgradeParams>,
|
||||
UpgradeResult,
|
||||
void,
|
||||
void
|
||||
>('evaluation/runUpgrade');
|
||||
|
||||
/**
|
||||
* A notification that the progress has been changed.
|
||||
*/
|
||||
export const progress = shared.progress;
|
||||
15
extensions/ql-vscode/src/pure/number.ts
Normal file
15
extensions/ql-vscode/src/pure/number.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
/*
|
||||
* Contains an assortment of helper constants and functions for working with numbers.
|
||||
*/
|
||||
|
||||
const numberFormatter = new Intl.NumberFormat('en-US');
|
||||
|
||||
/**
|
||||
* Formats a number to be human-readable with decimal places and thousands separators.
|
||||
*
|
||||
* @param value The number to format.
|
||||
* @returns The formatted number. For example, "10,000", "1,000,000", or "1,000,000,000".
|
||||
*/
|
||||
export function formatDecimal(value: number): string {
|
||||
return numberFormatter.format(value);
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import * as Sarif from 'sarif';
|
||||
import { HighlightedRegion } from '../remote-queries/shared/analysis-result';
|
||||
import { ResolvableLocationValue } from './bqrs-cli-types';
|
||||
|
||||
export interface SarifLink {
|
||||
@@ -127,35 +128,111 @@ export function parseSarifLocation(
|
||||
userVisibleFile
|
||||
} as ParsedSarifLocation;
|
||||
} else {
|
||||
const region = physicalLocation.region;
|
||||
// We assume that the SARIF we're given always has startLine
|
||||
// This is not mandated by the SARIF spec, but should be true of
|
||||
// SARIF output by our own tools.
|
||||
const startLine = region.startLine!;
|
||||
|
||||
// These defaults are from SARIF 2.1.0 spec, section 3.30.2, "Text Regions"
|
||||
// https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html#_Ref493492556
|
||||
const endLine = region.endLine === undefined ? startLine : region.endLine;
|
||||
const startColumn = region.startColumn === undefined ? 1 : region.startColumn;
|
||||
|
||||
// We also assume that our tools will always supply `endColumn` field, which is
|
||||
// fortunate, since the SARIF spec says that it defaults to the end of the line, whose
|
||||
// length we don't know at this point in the code.
|
||||
//
|
||||
// It is off by one with respect to the way vscode counts columns in selections.
|
||||
const endColumn = region.endColumn! - 1;
|
||||
const region = parseSarifRegion(physicalLocation.region);
|
||||
|
||||
return {
|
||||
uri: effectiveLocation,
|
||||
userVisibleFile,
|
||||
startLine,
|
||||
startColumn,
|
||||
endLine,
|
||||
endColumn,
|
||||
...region
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function parseSarifRegion(
|
||||
region: Sarif.Region
|
||||
): {
|
||||
startLine: number,
|
||||
endLine: number,
|
||||
startColumn: number,
|
||||
endColumn: number
|
||||
} {
|
||||
// The SARIF we're given should have a startLine, but we
|
||||
// fall back to 1, just in case something has gone wrong.
|
||||
const startLine = region.startLine ?? 1;
|
||||
|
||||
// These defaults are from SARIF 2.1.0 spec, section 3.30.2, "Text Regions"
|
||||
// https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html#_Ref493492556
|
||||
const endLine = region.endLine === undefined ? startLine : region.endLine;
|
||||
const startColumn = region.startColumn === undefined ? 1 : region.startColumn;
|
||||
|
||||
// Our tools should always supply `endColumn` field, which is fortunate, since
|
||||
// the SARIF spec says that it defaults to the end of the line, whose
|
||||
// length we don't know at this point in the code. We fall back to 1,
|
||||
// just in case something has gone wrong.
|
||||
//
|
||||
// It is off by one with respect to the way vscode counts columns in selections.
|
||||
const endColumn = (region.endColumn ?? 1) - 1;
|
||||
|
||||
return {
|
||||
startLine,
|
||||
startColumn,
|
||||
endLine,
|
||||
endColumn
|
||||
};
|
||||
}
|
||||
|
||||
export function isNoLocation(loc: ParsedSarifLocation): loc is NoLocation {
|
||||
return 'hint' in loc;
|
||||
}
|
||||
|
||||
// Some helpers for highlighting specific regions from a SARIF code snippet
|
||||
|
||||
/**
|
||||
* Checks whether a particular line (determined by its line number in the original file)
|
||||
* is part of the highlighted region of a SARIF code snippet.
|
||||
*/
|
||||
export function shouldHighlightLine(
|
||||
lineNumber: number,
|
||||
highlightedRegion: HighlightedRegion
|
||||
): boolean {
|
||||
if (lineNumber < highlightedRegion.startLine) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (highlightedRegion.endLine == undefined) {
|
||||
return lineNumber == highlightedRegion.startLine;
|
||||
}
|
||||
|
||||
return lineNumber <= highlightedRegion.endLine;
|
||||
}
|
||||
|
||||
/**
|
||||
* A line of code split into: plain text before the highlighted section, the highlighted
|
||||
* text itself, and plain text after the highlighted section.
|
||||
*/
|
||||
export interface PartiallyHighlightedLine {
|
||||
plainSection1: string;
|
||||
highlightedSection: string;
|
||||
plainSection2: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits a line of code into the highlighted and non-highlighted sections.
|
||||
*/
|
||||
export function parseHighlightedLine(
|
||||
line: string,
|
||||
lineNumber: number,
|
||||
highlightedRegion: HighlightedRegion
|
||||
): PartiallyHighlightedLine {
|
||||
const isSingleLineHighlight = highlightedRegion.endLine === undefined;
|
||||
const isFirstHighlightedLine = lineNumber === highlightedRegion.startLine;
|
||||
const isLastHighlightedLine = lineNumber === highlightedRegion.endLine;
|
||||
|
||||
const highlightStartColumn = isSingleLineHighlight
|
||||
? highlightedRegion.startColumn
|
||||
: isFirstHighlightedLine
|
||||
? highlightedRegion.startColumn
|
||||
: 0;
|
||||
|
||||
const highlightEndColumn = isSingleLineHighlight
|
||||
? highlightedRegion.endColumn
|
||||
: isLastHighlightedLine
|
||||
? highlightedRegion.endColumn
|
||||
: line.length + 1;
|
||||
|
||||
const plainSection1 = line.substring(0, highlightStartColumn - 1);
|
||||
const highlightedSection = line.substring(highlightStartColumn - 1, highlightEndColumn - 1);
|
||||
const plainSection2 = line.substring(highlightEndColumn - 1, line.length);
|
||||
|
||||
return { plainSection1, highlightedSection, plainSection2 };
|
||||
}
|
||||
|
||||
89
extensions/ql-vscode/src/pure/time.ts
Normal file
89
extensions/ql-vscode/src/pure/time.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
/*
|
||||
* Contains an assortment of helper constants and functions for working with time, dates, and durations.
|
||||
*/
|
||||
|
||||
export const ONE_SECOND_IN_MS = 1000;
|
||||
export const ONE_MINUTE_IN_MS = ONE_SECOND_IN_MS * 60;
|
||||
export const ONE_HOUR_IN_MS = ONE_MINUTE_IN_MS * 60;
|
||||
export const TWO_HOURS_IN_MS = ONE_HOUR_IN_MS * 2;
|
||||
export const THREE_HOURS_IN_MS = ONE_HOUR_IN_MS * 3;
|
||||
export const ONE_DAY_IN_MS = ONE_HOUR_IN_MS * 24;
|
||||
|
||||
// These are approximations
|
||||
export const ONE_MONTH_IN_MS = ONE_DAY_IN_MS * 30;
|
||||
export const ONE_YEAR_IN_MS = ONE_DAY_IN_MS * 365;
|
||||
|
||||
const durationFormatter = new Intl.RelativeTimeFormat('en', {
|
||||
numeric: 'auto',
|
||||
});
|
||||
|
||||
/**
|
||||
* Converts a number of milliseconds into a human-readable string with units, indicating a relative time in the past or future.
|
||||
*
|
||||
* @param relativeTimeMillis The duration in milliseconds. A negative number indicates a duration in the past. And a positive number is
|
||||
* the future.
|
||||
* @returns A humanized duration. For example, "in 2 minutes", "2 minutes ago", "yesterday", or "tomorrow".
|
||||
*/
|
||||
export function humanizeRelativeTime(relativeTimeMillis?: number) {
|
||||
if (relativeTimeMillis === undefined) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (Math.abs(relativeTimeMillis) < ONE_HOUR_IN_MS) {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_MINUTE_IN_MS), 'minute');
|
||||
} else if (Math.abs(relativeTimeMillis) < ONE_DAY_IN_MS) {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_HOUR_IN_MS), 'hour');
|
||||
} else if (Math.abs(relativeTimeMillis) < ONE_MONTH_IN_MS) {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_DAY_IN_MS), 'day');
|
||||
} else if (Math.abs(relativeTimeMillis) < ONE_YEAR_IN_MS) {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_MONTH_IN_MS), 'month');
|
||||
} else {
|
||||
return durationFormatter.format(Math.floor(relativeTimeMillis / ONE_YEAR_IN_MS), 'year');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a number of milliseconds into a human-readable string with units, indicating an amount of time.
|
||||
* Negative numbers have no meaning and are considered to be "Less than a second".
|
||||
*
|
||||
* @param millis The number of milliseconds to convert.
|
||||
* @returns A humanized duration. For example, "2 seconds", "2 minutes", "2 hours", "2 days", or "2 months".
|
||||
*/
|
||||
export function humanizeUnit(millis?: number): string {
|
||||
// assume a blank or empty string is a zero
|
||||
// assume anything less than 0 is a zero
|
||||
if (!millis || millis < ONE_SECOND_IN_MS) {
|
||||
return 'Less than a second';
|
||||
}
|
||||
let unit: string;
|
||||
let unitDiff: number;
|
||||
if (millis < ONE_MINUTE_IN_MS) {
|
||||
unit = 'second';
|
||||
unitDiff = Math.floor(millis / ONE_SECOND_IN_MS);
|
||||
} else if (millis < ONE_HOUR_IN_MS) {
|
||||
unit = 'minute';
|
||||
unitDiff = Math.floor(millis / ONE_MINUTE_IN_MS);
|
||||
} else if (millis < ONE_DAY_IN_MS) {
|
||||
unit = 'hour';
|
||||
unitDiff = Math.floor(millis / ONE_HOUR_IN_MS);
|
||||
} else if (millis < ONE_MONTH_IN_MS) {
|
||||
unit = 'day';
|
||||
unitDiff = Math.floor(millis / ONE_DAY_IN_MS);
|
||||
} else if (millis < ONE_YEAR_IN_MS) {
|
||||
unit = 'month';
|
||||
unitDiff = Math.floor(millis / ONE_MONTH_IN_MS);
|
||||
} else {
|
||||
unit = 'year';
|
||||
unitDiff = Math.floor(millis / ONE_YEAR_IN_MS);
|
||||
}
|
||||
|
||||
return createFormatter(unit).format(unitDiff);
|
||||
}
|
||||
|
||||
function createFormatter(unit: string) {
|
||||
return Intl.NumberFormat('en-US', {
|
||||
style: 'unit',
|
||||
unit,
|
||||
unitDisplay: 'long'
|
||||
});
|
||||
}
|
||||
11
extensions/ql-vscode/src/pure/zip.ts
Normal file
11
extensions/ql-vscode/src/pure/zip.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import * as unzipper from 'unzipper';
|
||||
|
||||
/**
|
||||
* Unzips a zip file to a directory.
|
||||
* @param sourcePath The path to the zip file.
|
||||
* @param destinationPath The path to the directory to unzip to.
|
||||
*/
|
||||
export async function unzipFile(sourcePath: string, destinationPath: string) {
|
||||
const file = await unzipper.Open.file(sourcePath);
|
||||
await file.extract({ path: destinationPath });
|
||||
}
|
||||
19
extensions/ql-vscode/src/query-history-info.ts
Normal file
19
extensions/ql-vscode/src/query-history-info.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||
import { VariantAnalysisHistoryItem } from './remote-queries/variant-analysis-history-item';
|
||||
import { LocalQueryInfo } from './query-results';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
|
||||
export type QueryHistoryInfo = LocalQueryInfo | RemoteQueryHistoryItem | VariantAnalysisHistoryItem;
|
||||
|
||||
export function getRawQueryName(item: QueryHistoryInfo): string {
|
||||
switch (item.t) {
|
||||
case 'local':
|
||||
return item.getQueryName();
|
||||
case 'remote':
|
||||
return item.remoteQuery.queryName;
|
||||
case 'variant-analysis':
|
||||
return item.variantAnalysis.query.name;
|
||||
default:
|
||||
assertNever(item);
|
||||
}
|
||||
}
|
||||
139
extensions/ql-vscode/src/query-history-scrubber.ts
Normal file
139
extensions/ql-vscode/src/query-history-scrubber.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import { Disposable, ExtensionContext } from 'vscode';
|
||||
import { logger } from './logging';
|
||||
import { QueryHistoryManager } from './query-history';
|
||||
|
||||
const LAST_SCRUB_TIME_KEY = 'lastScrubTime';
|
||||
|
||||
type Counter = {
|
||||
increment: () => void;
|
||||
};
|
||||
|
||||
/**
|
||||
* Registers an interval timer that will periodically check for queries old enought
|
||||
* to be deleted.
|
||||
*
|
||||
* Note that this scrubber will clean all queries from all workspaces. It should not
|
||||
* run too often and it should only run from one workspace at a time.
|
||||
*
|
||||
* Generally, `wakeInterval` should be significantly shorter than `throttleTime`.
|
||||
*
|
||||
* @param wakeInterval How often to check to see if the job should run.
|
||||
* @param throttleTime How often to actually run the job.
|
||||
* @param maxQueryTime The maximum age of a query before is ready for deletion.
|
||||
* @param queryDirectory The directory containing all queries.
|
||||
* @param ctx The extension context.
|
||||
*/
|
||||
export function registerQueryHistoryScrubber(
|
||||
wakeInterval: number,
|
||||
throttleTime: number,
|
||||
maxQueryTime: number,
|
||||
queryDirectory: string,
|
||||
qhm: QueryHistoryManager,
|
||||
ctx: ExtensionContext,
|
||||
|
||||
// optional counter to keep track of how many times the scrubber has run
|
||||
counter?: Counter
|
||||
): Disposable {
|
||||
const deregister = setInterval(scrubQueries, wakeInterval, throttleTime, maxQueryTime, queryDirectory, qhm, ctx, counter);
|
||||
|
||||
return {
|
||||
dispose: () => {
|
||||
clearInterval(deregister);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function scrubQueries(
|
||||
throttleTime: number,
|
||||
maxQueryTime: number,
|
||||
queryDirectory: string,
|
||||
qhm: QueryHistoryManager,
|
||||
ctx: ExtensionContext,
|
||||
counter?: Counter
|
||||
) {
|
||||
const lastScrubTime = ctx.globalState.get<number>(LAST_SCRUB_TIME_KEY);
|
||||
const now = Date.now();
|
||||
|
||||
// If we have never scrubbed before, or if the last scrub was more than `throttleTime` ago,
|
||||
// then scrub again.
|
||||
if (lastScrubTime === undefined || now - lastScrubTime >= throttleTime) {
|
||||
await ctx.globalState.update(LAST_SCRUB_TIME_KEY, now);
|
||||
|
||||
let scrubCount = 0; // total number of directories deleted
|
||||
try {
|
||||
counter?.increment();
|
||||
void logger.log('Scrubbing query directory. Removing old queries.');
|
||||
if (!(await fs.pathExists(queryDirectory))) {
|
||||
void logger.log(`Cannot scrub. Query directory does not exist: ${queryDirectory}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const baseNames = await fs.readdir(queryDirectory);
|
||||
const errors: string[] = [];
|
||||
for (const baseName of baseNames) {
|
||||
const dir = path.join(queryDirectory, baseName);
|
||||
const scrubResult = await scrubDirectory(dir, now, maxQueryTime);
|
||||
if (scrubResult.errorMsg) {
|
||||
errors.push(scrubResult.errorMsg);
|
||||
}
|
||||
if (scrubResult.deleted) {
|
||||
scrubCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
throw new Error(os.EOL + errors.join(os.EOL));
|
||||
}
|
||||
} catch (e) {
|
||||
void logger.log(`Error while scrubbing queries: ${e}`);
|
||||
} finally {
|
||||
void logger.log(`Scrubbed ${scrubCount} old queries.`);
|
||||
}
|
||||
await qhm.removeDeletedQueries();
|
||||
}
|
||||
}
|
||||
|
||||
async function scrubDirectory(dir: string, now: number, maxQueryTime: number): Promise<{
|
||||
errorMsg?: string,
|
||||
deleted: boolean
|
||||
}> {
|
||||
const timestampFile = path.join(dir, 'timestamp');
|
||||
try {
|
||||
let deleted = true;
|
||||
if (!(await fs.stat(dir)).isDirectory()) {
|
||||
void logger.log(` ${dir} is not a directory. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else if (!(await fs.pathExists(timestampFile))) {
|
||||
void logger.log(` ${dir} has no timestamp file. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else if (!(await fs.stat(timestampFile)).isFile()) {
|
||||
void logger.log(` ${timestampFile} is not a file. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else {
|
||||
const timestampText = await fs.readFile(timestampFile, 'utf8');
|
||||
const timestamp = parseInt(timestampText, 10);
|
||||
|
||||
if (Number.isNaN(timestamp)) {
|
||||
void logger.log(` ${dir} has invalid timestamp '${timestampText}'. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else if (now - timestamp > maxQueryTime) {
|
||||
void logger.log(` ${dir} is older than ${maxQueryTime / 1000} seconds. Deleting.`);
|
||||
await fs.remove(dir);
|
||||
} else {
|
||||
void logger.log(` ${dir} is not older than ${maxQueryTime / 1000} seconds. Keeping.`);
|
||||
deleted = false;
|
||||
}
|
||||
}
|
||||
return {
|
||||
deleted
|
||||
};
|
||||
} catch (err) {
|
||||
return {
|
||||
errorMsg: ` Could not delete '${dir}': ${err}`,
|
||||
deleted: false
|
||||
};
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,30 +1,68 @@
|
||||
import { env } from 'vscode';
|
||||
import { CancellationTokenSource, env } from 'vscode';
|
||||
|
||||
import { QueryWithResults, tmpDir, QueryInfo } from './run-queries';
|
||||
import * as messages from './pure/messages';
|
||||
import * as messages from './pure/messages-shared';
|
||||
import * as legacyMessages from './pure/legacy-messages';
|
||||
import * as cli from './cli';
|
||||
import * as sarif from 'sarif';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import { RawResultsSortState, SortedResultSetInfo, DatabaseInfo, QueryMetadata, InterpretedResultsSortState, ResultsPaths } from './pure/interface-types';
|
||||
import { QueryHistoryConfig } from './config';
|
||||
import { QueryHistoryItemOptions } from './query-history';
|
||||
import {
|
||||
RawResultsSortState,
|
||||
SortedResultSetInfo,
|
||||
QueryMetadata,
|
||||
InterpretedResultsSortState,
|
||||
ResultsPaths,
|
||||
SarifInterpretationData,
|
||||
GraphInterpretationData
|
||||
} from './pure/interface-types';
|
||||
import { DatabaseInfo } from './pure/interface-types';
|
||||
import { QueryStatus } from './query-status';
|
||||
import { QueryEvaluationInfo, QueryWithResults } from './run-queries-shared';
|
||||
import { formatLegacyMessage } from './legacy-query-server/run-queries';
|
||||
|
||||
export class CompletedQuery implements QueryWithResults {
|
||||
readonly date: Date;
|
||||
readonly time: string;
|
||||
readonly query: QueryInfo;
|
||||
readonly result: messages.EvaluationResult;
|
||||
readonly database: DatabaseInfo;
|
||||
/**
|
||||
* query-results.ts
|
||||
* ----------------
|
||||
*
|
||||
* A collection of classes and functions that collectively
|
||||
* manage query results.
|
||||
*/
|
||||
|
||||
/**
|
||||
* A description of the information about a query
|
||||
* that is available before results are populated.
|
||||
*/
|
||||
export interface InitialQueryInfo {
|
||||
userSpecifiedLabel?: string; // if missing, use a default label
|
||||
readonly queryText: string; // text of the selected file, or the selected text when doing quick eval
|
||||
readonly isQuickQuery: boolean;
|
||||
readonly isQuickEval: boolean;
|
||||
readonly quickEvalPosition?: messages.Position;
|
||||
readonly queryPath: string;
|
||||
readonly databaseInfo: DatabaseInfo
|
||||
readonly start: Date;
|
||||
readonly id: string; // unique id for this query.
|
||||
}
|
||||
|
||||
export class CompletedQueryInfo implements QueryWithResults {
|
||||
readonly query: QueryEvaluationInfo;
|
||||
readonly message?: string;
|
||||
readonly successful?: boolean;
|
||||
/**
|
||||
* The legacy result. This is only set when loading from the query history.
|
||||
*/
|
||||
readonly result: legacyMessages.EvaluationResult;
|
||||
readonly logFileLocation?: string;
|
||||
options: QueryHistoryItemOptions;
|
||||
resultCount: number;
|
||||
|
||||
/**
|
||||
* This dispose method is called when the query is removed from the history view.
|
||||
*/
|
||||
dispose: () => void;
|
||||
|
||||
/**
|
||||
* Map from result set name to SortedResultSetInfo.
|
||||
*/
|
||||
sortedResultsInfo: Map<string, SortedResultSetInfo>;
|
||||
sortedResultsInfo: Record<string, SortedResultSetInfo>;
|
||||
|
||||
/**
|
||||
* How we're currently sorting alerts. This is not mere interface
|
||||
@@ -35,20 +73,25 @@ export class CompletedQuery implements QueryWithResults {
|
||||
*/
|
||||
interpretedResultsSortState: InterpretedResultsSortState | undefined;
|
||||
|
||||
/**
|
||||
* Note that in the {@link slurpQueryHistory} method, we create a CompletedQueryInfo instance
|
||||
* by explicitly setting the prototype in order to avoid calling this constructor.
|
||||
*/
|
||||
constructor(
|
||||
evaluation: QueryWithResults,
|
||||
public config: QueryHistoryConfig,
|
||||
) {
|
||||
this.query = evaluation.query;
|
||||
this.result = evaluation.result;
|
||||
this.database = evaluation.database;
|
||||
this.logFileLocation = evaluation.logFileLocation;
|
||||
this.options = evaluation.options;
|
||||
this.result = evaluation.result;
|
||||
|
||||
this.message = evaluation.message;
|
||||
this.successful = evaluation.successful;
|
||||
// Use the dispose method from the evaluation.
|
||||
// The dispose will clean up any additional log locations that this
|
||||
// query may have created.
|
||||
this.dispose = evaluation.dispose;
|
||||
|
||||
this.date = new Date();
|
||||
this.time = this.date.toLocaleString(env.language);
|
||||
this.sortedResultsInfo = new Map();
|
||||
this.sortedResultsInfo = {};
|
||||
this.resultCount = 0;
|
||||
}
|
||||
|
||||
@@ -56,29 +99,13 @@ export class CompletedQuery implements QueryWithResults {
|
||||
this.resultCount = value;
|
||||
}
|
||||
|
||||
get databaseName(): string {
|
||||
return this.database.name;
|
||||
}
|
||||
get queryName(): string {
|
||||
return getQueryName(this.query);
|
||||
}
|
||||
get queryFileName(): string {
|
||||
return getQueryFileName(this.query);
|
||||
}
|
||||
|
||||
get statusString(): string {
|
||||
switch (this.result.resultType) {
|
||||
case messages.QueryResultType.CANCELLATION:
|
||||
return `cancelled after ${this.result.evaluationTime / 1000} seconds`;
|
||||
case messages.QueryResultType.OOM:
|
||||
return 'out of memory';
|
||||
case messages.QueryResultType.SUCCESS:
|
||||
return `finished in ${this.result.evaluationTime / 1000} seconds`;
|
||||
case messages.QueryResultType.TIMEOUT:
|
||||
return `timed out after ${this.result.evaluationTime / 1000} seconds`;
|
||||
case messages.QueryResultType.OTHER_ERROR:
|
||||
default:
|
||||
return this.result.message ? `failed: ${this.result.message}` : 'failed';
|
||||
if (this.message) {
|
||||
return this.message;
|
||||
} else if (this.result) {
|
||||
return formatLegacyMessage(this.result);
|
||||
} else {
|
||||
throw new Error('No status available');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,52 +113,22 @@ export class CompletedQuery implements QueryWithResults {
|
||||
if (!useSorted) {
|
||||
return this.query.resultsPaths.resultsPath;
|
||||
}
|
||||
return this.sortedResultsInfo.get(selectedTable)?.resultsPath
|
||||
return this.sortedResultsInfo[selectedTable]?.resultsPath
|
||||
|| this.query.resultsPaths.resultsPath;
|
||||
}
|
||||
|
||||
interpolate(template: string): string {
|
||||
const { databaseName, queryName, time, resultCount, statusString, queryFileName } = this;
|
||||
const replacements: { [k: string]: string } = {
|
||||
t: time,
|
||||
q: queryName,
|
||||
d: databaseName,
|
||||
r: resultCount.toString(),
|
||||
s: statusString,
|
||||
f: queryFileName,
|
||||
'%': '%',
|
||||
};
|
||||
return template.replace(/%(.)/g, (match, key) => {
|
||||
const replacement = replacements[key];
|
||||
return replacement !== undefined ? replacement : match;
|
||||
});
|
||||
}
|
||||
|
||||
getLabel(): string {
|
||||
return this.options?.label
|
||||
|| this.config.format;
|
||||
}
|
||||
|
||||
get didRunSuccessfully(): boolean {
|
||||
return this.result.resultType === messages.QueryResultType.SUCCESS;
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return this.interpolate(this.getLabel());
|
||||
}
|
||||
|
||||
async updateSortState(
|
||||
server: cli.CodeQLCliServer,
|
||||
resultSetName: string,
|
||||
sortState?: RawResultsSortState
|
||||
): Promise<void> {
|
||||
if (sortState === undefined) {
|
||||
this.sortedResultsInfo.delete(resultSetName);
|
||||
delete this.sortedResultsInfo[resultSetName];
|
||||
return;
|
||||
}
|
||||
|
||||
const sortedResultSetInfo: SortedResultSetInfo = {
|
||||
resultsPath: path.join(tmpDir.name, `sortedResults${this.query.queryID}-${resultSetName}.bqrs`),
|
||||
resultsPath: this.query.getSortedResultSetPath(resultSetName),
|
||||
sortState
|
||||
};
|
||||
|
||||
@@ -142,7 +139,7 @@ export class CompletedQuery implements QueryWithResults {
|
||||
[sortState.columnIndex],
|
||||
[sortState.sortDirection]
|
||||
);
|
||||
this.sortedResultsInfo.set(resultSetName, sortedResultSetInfo);
|
||||
this.sortedResultsInfo[resultSetName] = sortedResultSetInfo;
|
||||
}
|
||||
|
||||
async updateInterpretedSortState(sortState?: InterpretedResultsSortState): Promise<void> {
|
||||
@@ -152,49 +149,39 @@ export class CompletedQuery implements QueryWithResults {
|
||||
|
||||
|
||||
/**
|
||||
* Gets a human-readable name for an evaluated query.
|
||||
* Uses metadata if it exists, and defaults to the query file name.
|
||||
* Call cli command to interpret SARIF results.
|
||||
*/
|
||||
export function getQueryName(query: QueryInfo) {
|
||||
if (query.quickEvalPosition !== undefined) {
|
||||
return 'Quick evaluation of ' + getQueryFileName(query);
|
||||
} else if (query.metadata?.name) {
|
||||
return query.metadata.name;
|
||||
} else {
|
||||
return getQueryFileName(query);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the file name for an evaluated query.
|
||||
* Defaults to the query file name and may contain position information for quick eval queries.
|
||||
*/
|
||||
export function getQueryFileName(query: QueryInfo) {
|
||||
// Queries run through quick evaluation are not usually the entire query file.
|
||||
// Label them differently and include the line numbers.
|
||||
if (query.quickEvalPosition !== undefined) {
|
||||
const { line, endLine, fileName } = query.quickEvalPosition;
|
||||
const lineInfo = line === endLine ? `${line}` : `${line}-${endLine}`;
|
||||
return `${path.basename(fileName)}:${lineInfo}`;
|
||||
}
|
||||
return path.basename(query.program.queryPath);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Call cli command to interpret results.
|
||||
*/
|
||||
export async function interpretResults(
|
||||
server: cli.CodeQLCliServer,
|
||||
export async function interpretResultsSarif(
|
||||
cli: cli.CodeQLCliServer,
|
||||
metadata: QueryMetadata | undefined,
|
||||
resultsPaths: ResultsPaths,
|
||||
sourceInfo?: cli.SourceInfo
|
||||
): Promise<sarif.Log> {
|
||||
): Promise<SarifInterpretationData> {
|
||||
const { resultsPath, interpretedResultsPath } = resultsPaths;
|
||||
if (await fs.pathExists(interpretedResultsPath)) {
|
||||
return JSON.parse(await fs.readFile(interpretedResultsPath, 'utf8'));
|
||||
return { ...JSON.parse(await fs.readFile(interpretedResultsPath, 'utf8')), t: 'SarifInterpretationData' };
|
||||
}
|
||||
return await server.interpretBqrs(ensureMetadataIsComplete(metadata), resultsPath, interpretedResultsPath, sourceInfo);
|
||||
const res = await cli.interpretBqrsSarif(ensureMetadataIsComplete(metadata), resultsPath, interpretedResultsPath, sourceInfo);
|
||||
return { ...res, t: 'SarifInterpretationData' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Call cli command to interpret graph results.
|
||||
*/
|
||||
export async function interpretGraphResults(
|
||||
cli: cli.CodeQLCliServer,
|
||||
metadata: QueryMetadata | undefined,
|
||||
resultsPaths: ResultsPaths,
|
||||
sourceInfo?: cli.SourceInfo
|
||||
): Promise<GraphInterpretationData> {
|
||||
const { resultsPath, interpretedResultsPath } = resultsPaths;
|
||||
if (await fs.pathExists(interpretedResultsPath)) {
|
||||
const dot = await cli.readDotFiles(interpretedResultsPath);
|
||||
return { dot, t: 'GraphInterpretationData' };
|
||||
}
|
||||
|
||||
const dot = await cli.interpretBqrsGraph(ensureMetadataIsComplete(metadata), resultsPath, interpretedResultsPath, sourceInfo);
|
||||
return { dot, t: 'GraphInterpretationData' };
|
||||
}
|
||||
|
||||
export function ensureMetadataIsComplete(metadata: QueryMetadata | undefined) {
|
||||
@@ -211,3 +198,110 @@ export function ensureMetadataIsComplete(metadata: QueryMetadata | undefined) {
|
||||
}
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used in Interface and Compare-Interface for queries that we know have been completed.
|
||||
*/
|
||||
export type CompletedLocalQueryInfo = LocalQueryInfo & {
|
||||
completedQuery: CompletedQueryInfo
|
||||
};
|
||||
|
||||
export class LocalQueryInfo {
|
||||
readonly t = 'local';
|
||||
|
||||
public failureReason: string | undefined;
|
||||
public completedQuery: CompletedQueryInfo | undefined;
|
||||
public evalLogLocation: string | undefined;
|
||||
public evalLogSummaryLocation: string | undefined;
|
||||
public jsonEvalLogSummaryLocation: string | undefined;
|
||||
public evalLogSummarySymbolsLocation: string | undefined;
|
||||
|
||||
/**
|
||||
* Note that in the {@link slurpQueryHistory} method, we create a FullQueryInfo instance
|
||||
* by explicitly setting the prototype in order to avoid calling this constructor.
|
||||
*/
|
||||
constructor(
|
||||
public readonly initialInfo: InitialQueryInfo,
|
||||
private cancellationSource?: CancellationTokenSource // used to cancel in progress queries
|
||||
) { /**/ }
|
||||
|
||||
cancel() {
|
||||
this.cancellationSource?.cancel();
|
||||
// query is no longer in progress, can delete the cancellation token source
|
||||
this.cancellationSource?.dispose();
|
||||
delete this.cancellationSource;
|
||||
}
|
||||
|
||||
get startTime() {
|
||||
return this.initialInfo.start.toLocaleString(env.language);
|
||||
}
|
||||
|
||||
get userSpecifiedLabel() {
|
||||
return this.initialInfo.userSpecifiedLabel;
|
||||
}
|
||||
|
||||
set userSpecifiedLabel(label: string | undefined) {
|
||||
this.initialInfo.userSpecifiedLabel = label;
|
||||
}
|
||||
|
||||
/**
|
||||
* The query's file name, unless it is a quick eval.
|
||||
* Queries run through quick evaluation are not usually the entire query file.
|
||||
* Label them differently and include the line numbers.
|
||||
*/
|
||||
getQueryFileName() {
|
||||
if (this.initialInfo.quickEvalPosition) {
|
||||
const { line, endLine, fileName } = this.initialInfo.quickEvalPosition;
|
||||
const lineInfo = line === endLine ? `${line}` : `${line}-${endLine}`;
|
||||
return `${path.basename(fileName)}:${lineInfo}`;
|
||||
}
|
||||
return path.basename(this.initialInfo.queryPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Three cases:
|
||||
*
|
||||
* - If this is a completed query, use the query name from the query metadata.
|
||||
* - If this is a quick eval, return the query name with a prefix
|
||||
* - Otherwise, return the query file name.
|
||||
*/
|
||||
getQueryName() {
|
||||
if (this.initialInfo.quickEvalPosition) {
|
||||
return 'Quick evaluation of ' + this.getQueryFileName();
|
||||
} else if (this.completedQuery?.query.metadata?.name) {
|
||||
return this.completedQuery?.query.metadata?.name;
|
||||
} else {
|
||||
return this.getQueryFileName();
|
||||
}
|
||||
}
|
||||
|
||||
get completed(): boolean {
|
||||
return !!this.completedQuery;
|
||||
}
|
||||
|
||||
completeThisQuery(info: QueryWithResults): void {
|
||||
this.completedQuery = new CompletedQueryInfo(info);
|
||||
|
||||
// dispose of the cancellation token source and also ensure the source is not serialized as JSON
|
||||
this.cancellationSource?.dispose();
|
||||
delete this.cancellationSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* If there is a failure reason, then this query has failed.
|
||||
* If there is no completed query, then this query is still running.
|
||||
* If there is a completed query, then check if didRunSuccessfully.
|
||||
* If true, then this query has completed successfully, otherwise it has failed.
|
||||
*/
|
||||
get status(): QueryStatus {
|
||||
if (this.failureReason) {
|
||||
return QueryStatus.Failed;
|
||||
} else if (!this.completedQuery) {
|
||||
return QueryStatus.InProgress;
|
||||
} else if (this.completedQuery.successful) {
|
||||
return QueryStatus.Completed;
|
||||
} else {
|
||||
return QueryStatus.Failed;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
116
extensions/ql-vscode/src/query-serialization.ts
Normal file
116
extensions/ql-vscode/src/query-serialization.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
|
||||
import { showAndLogErrorMessage } from './helpers';
|
||||
import { asyncFilter, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { CompletedQueryInfo, LocalQueryInfo } from './query-results';
|
||||
import { QueryHistoryInfo } from './query-history-info';
|
||||
import { QueryStatus } from './query-status';
|
||||
import { QueryEvaluationInfo } from './run-queries-shared';
|
||||
import { QueryResultType } from './pure/legacy-messages';
|
||||
|
||||
export async function slurpQueryHistory(fsPath: string): Promise<QueryHistoryInfo[]> {
|
||||
try {
|
||||
if (!(await fs.pathExists(fsPath))) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const data = await fs.readFile(fsPath, 'utf8');
|
||||
const obj = JSON.parse(data);
|
||||
if (![1, 2].includes(obj.version)) {
|
||||
void showAndLogErrorMessage(`Can't parse query history. Unsupported query history format: v${obj.version}. `);
|
||||
return [];
|
||||
}
|
||||
|
||||
const queries = obj.queries;
|
||||
const parsedQueries = queries.map((q: QueryHistoryInfo) => {
|
||||
|
||||
// Need to explicitly set prototype since reading in from JSON will not
|
||||
// do this automatically. Note that we can't call the constructor here since
|
||||
// the constructor invokes extra logic that we don't want to do.
|
||||
if (q.t === 'local') {
|
||||
Object.setPrototypeOf(q, LocalQueryInfo.prototype);
|
||||
|
||||
// Date instances are serialized as strings. Need to
|
||||
// convert them back to Date instances.
|
||||
(q.initialInfo as any).start = new Date(q.initialInfo.start);
|
||||
if (q.completedQuery) {
|
||||
// Again, need to explicitly set prototypes.
|
||||
Object.setPrototypeOf(q.completedQuery, CompletedQueryInfo.prototype);
|
||||
Object.setPrototypeOf(q.completedQuery.query, QueryEvaluationInfo.prototype);
|
||||
// slurped queries do not need to be disposed
|
||||
q.completedQuery.dispose = () => { /**/ };
|
||||
|
||||
// Previously, there was a typo in the completedQuery type. There was a field
|
||||
// `sucessful` and it was renamed to `successful`. We need to handle this case.
|
||||
if ('sucessful' in q.completedQuery) {
|
||||
(q.completedQuery as any).successful = (q.completedQuery as any).sucessful;
|
||||
delete (q.completedQuery as any).sucessful;
|
||||
}
|
||||
|
||||
if (!('successful' in q.completedQuery)) {
|
||||
(q.completedQuery as any).successful = q.completedQuery.result?.resultType === QueryResultType.SUCCESS;
|
||||
}
|
||||
}
|
||||
} else if (q.t === 'remote') {
|
||||
// A bug was introduced that didn't set the completed flag in query history
|
||||
// items. The following code makes sure that the flag is set in order to
|
||||
// "patch" older query history items.
|
||||
if (q.status === QueryStatus.Completed) {
|
||||
q.completed = true;
|
||||
}
|
||||
}
|
||||
return q;
|
||||
});
|
||||
|
||||
// filter out queries that have been deleted on disk
|
||||
// most likely another workspace has deleted them because the
|
||||
// queries aged out.
|
||||
return asyncFilter(parsedQueries, async (q) => {
|
||||
if (q.t === 'remote' || q.t === 'variant-analysis') {
|
||||
// the slurper doesn't know where the remote queries are stored
|
||||
// so we need to assume here that they exist. Later, we check to
|
||||
// see if they exist on disk.
|
||||
return true;
|
||||
}
|
||||
const resultsPath = q.completedQuery?.query.resultsPaths.resultsPath;
|
||||
return !!resultsPath && await fs.pathExists(resultsPath);
|
||||
});
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage('Error loading query history.', {
|
||||
fullMessage: ['Error loading query history.', getErrorStack(e)].join('\n'),
|
||||
});
|
||||
// since the query history is invalid, it should be deleted so this error does not happen on next startup.
|
||||
await fs.remove(fsPath);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save the query history to disk. It is not necessary that the parent directory
|
||||
* exists, but if it does, it must be writable. An existing file will be overwritten.
|
||||
*
|
||||
* Any errors will be rethrown.
|
||||
*
|
||||
* @param queries the list of queries to save.
|
||||
* @param fsPath the path to save the queries to.
|
||||
*/
|
||||
export async function splatQueryHistory(queries: QueryHistoryInfo[], fsPath: string): Promise<void> {
|
||||
try {
|
||||
if (!(await fs.pathExists(fsPath))) {
|
||||
await fs.mkdir(path.dirname(fsPath), { recursive: true });
|
||||
}
|
||||
// remove incomplete local queries since they cannot be recreated on restart
|
||||
const filteredQueries = queries.filter(q => q.t === 'local' ? q.completedQuery !== undefined : true);
|
||||
const data = JSON.stringify({
|
||||
// version 2:
|
||||
// - adds the `variant-analysis` type
|
||||
// - ensures a `successful` property exists on completedQuery
|
||||
version: 2,
|
||||
queries: filteredQueries
|
||||
}, null, 2);
|
||||
await fs.writeFile(fsPath, data);
|
||||
} catch (e) {
|
||||
throw new Error(`Error saving query history to ${fsPath}: ${getErrorMessage(e)}`);
|
||||
}
|
||||
}
|
||||
81
extensions/ql-vscode/src/query-server/query-runner.ts
Normal file
81
extensions/ql-vscode/src/query-server/query-runner.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { CancellationToken } from 'vscode';
|
||||
import { ProgressCallback, UserCancellationException } from '../commandRunner';
|
||||
import { DatabaseItem } from '../databases';
|
||||
import { clearCache, ClearCacheParams, clearPackCache, deregisterDatabases, registerDatabases, upgradeDatabase } from '../pure/new-messages';
|
||||
import { InitialQueryInfo, LocalQueryInfo } from '../query-results';
|
||||
import { QueryRunner } from '../queryRunner';
|
||||
import { QueryWithResults } from '../run-queries-shared';
|
||||
import { QueryServerClient } from './queryserver-client';
|
||||
import { compileAndRunQueryAgainstDatabase } from './run-queries';
|
||||
import * as vscode from 'vscode';
|
||||
import { getOnDiskWorkspaceFolders } from '../helpers';
|
||||
export class NewQueryRunner extends QueryRunner {
|
||||
|
||||
|
||||
constructor(public readonly qs: QueryServerClient) {
|
||||
super();
|
||||
}
|
||||
|
||||
get cliServer() {
|
||||
return this.qs.cliServer;
|
||||
}
|
||||
|
||||
async restartQueryServer(progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||
await this.qs.restartQueryServer(progress, token);
|
||||
}
|
||||
|
||||
onStart(callBack: (progress: ProgressCallback, token: CancellationToken) => Promise<void>) {
|
||||
this.qs.onDidStartQueryServer(callBack);
|
||||
}
|
||||
|
||||
async clearCacheInDatabase(dbItem: DatabaseItem, progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||
if (dbItem.contents === undefined) {
|
||||
throw new Error('Can\'t clear the cache in an invalid database.');
|
||||
}
|
||||
|
||||
const db = dbItem.databaseUri.fsPath;
|
||||
const params: ClearCacheParams = {
|
||||
dryRun: false,
|
||||
db,
|
||||
};
|
||||
await this.qs.sendRequest(clearCache, params, token, progress);
|
||||
}
|
||||
async compileAndRunQueryAgainstDatabase(dbItem: DatabaseItem, initialInfo: InitialQueryInfo, queryStorageDir: string, progress: ProgressCallback, token: CancellationToken, templates?: Record<string, string>, queryInfo?: LocalQueryInfo): Promise<QueryWithResults> {
|
||||
return await compileAndRunQueryAgainstDatabase(this.qs.cliServer, this.qs, dbItem, initialInfo, queryStorageDir, progress, token, templates, queryInfo);
|
||||
}
|
||||
|
||||
async deregisterDatabase(progress: ProgressCallback, token: CancellationToken, dbItem: DatabaseItem): Promise<void> {
|
||||
if (dbItem.contents && (await this.qs.cliServer.cliConstraints.supportsDatabaseRegistration())) {
|
||||
const databases: string[] = [dbItem.databaseUri.fsPath];
|
||||
await this.qs.sendRequest(deregisterDatabases, { databases }, token, progress);
|
||||
}
|
||||
}
|
||||
async registerDatabase(progress: ProgressCallback, token: CancellationToken, dbItem: DatabaseItem): Promise<void> {
|
||||
if (dbItem.contents && (await this.qs.cliServer.cliConstraints.supportsDatabaseRegistration())) {
|
||||
const databases: string[] = [dbItem.databaseUri.fsPath];
|
||||
await this.qs.sendRequest(registerDatabases, { databases }, token, progress);
|
||||
}
|
||||
}
|
||||
|
||||
async clearPackCache(): Promise<void> {
|
||||
await this.qs.sendRequest(clearPackCache, {});
|
||||
}
|
||||
|
||||
async upgradeDatabaseExplicit(dbItem: DatabaseItem, progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||
|
||||
const yesItem = { title: 'Yes', isCloseAffordance: false };
|
||||
const noItem = { title: 'No', isCloseAffordance: true };
|
||||
const dialogOptions: vscode.MessageItem[] = [yesItem, noItem];
|
||||
|
||||
|
||||
|
||||
const message = `Should the database ${dbItem.databaseUri.fsPath} be destructively upgraded?\n\nThis should not be necessary to run queries
|
||||
as we will non-destructively update it anyway.`;
|
||||
const chosenItem = await vscode.window.showInformationMessage(message, { modal: true }, ...dialogOptions);
|
||||
|
||||
if (chosenItem !== yesItem) {
|
||||
throw new UserCancellationException('User cancelled the database upgrade.');
|
||||
}
|
||||
await this.qs.sendRequest(upgradeDatabase, { db: dbItem.databaseUri.fsPath, additionalPacks: getOnDiskWorkspaceFolders() }, token, progress);
|
||||
}
|
||||
}
|
||||
205
extensions/ql-vscode/src/query-server/queryserver-client.ts
Normal file
205
extensions/ql-vscode/src/query-server/queryserver-client.ts
Normal file
@@ -0,0 +1,205 @@
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
import { DisposableObject } from '../pure/disposable-object';
|
||||
import { CancellationToken, commands } from 'vscode';
|
||||
import { createMessageConnection, RequestType } from 'vscode-jsonrpc';
|
||||
import * as cli from '../cli';
|
||||
import { QueryServerConfig } from '../config';
|
||||
import { Logger, ProgressReporter } from '../logging';
|
||||
import { progress, ProgressMessage, WithProgressId } from '../pure/new-messages';
|
||||
import * as messages from '../pure/new-messages';
|
||||
import { ProgressCallback, ProgressTask } from '../commandRunner';
|
||||
import { findQueryLogFile } from '../run-queries-shared';
|
||||
import { ServerProcess } from '../json-rpc-server';
|
||||
|
||||
type ServerOpts = {
|
||||
logger: Logger;
|
||||
contextStoragePath: string;
|
||||
}
|
||||
|
||||
|
||||
type WithProgressReporting = (task: (progress: ProgressReporter, token: CancellationToken) => Thenable<void>) => Thenable<void>;
|
||||
|
||||
/**
|
||||
* Client that manages a query server process.
|
||||
* The server process is started upon initialization and tracked during its lifetime.
|
||||
* The server process is disposed when the client is disposed, or if the client asks
|
||||
* to restart it (which disposes the existing process and starts a new one).
|
||||
*/
|
||||
export class QueryServerClient extends DisposableObject {
|
||||
|
||||
serverProcess?: ServerProcess;
|
||||
progressCallbacks: { [key: number]: ((res: ProgressMessage) => void) | undefined };
|
||||
nextCallback: number;
|
||||
nextProgress: number;
|
||||
withProgressReporting: WithProgressReporting;
|
||||
|
||||
private readonly queryServerStartListeners = [] as ProgressTask<void>[];
|
||||
|
||||
// Can't use standard vscode EventEmitter here since they do not cause the calling
|
||||
// function to fail if one of the event handlers fail. This is something that
|
||||
// we need here.
|
||||
readonly onDidStartQueryServer = (e: ProgressTask<void>) => {
|
||||
this.queryServerStartListeners.push(e);
|
||||
}
|
||||
|
||||
public activeQueryLogFile: string | undefined;
|
||||
|
||||
constructor(
|
||||
readonly config: QueryServerConfig,
|
||||
readonly cliServer: cli.CodeQLCliServer,
|
||||
readonly opts: ServerOpts,
|
||||
withProgressReporting: WithProgressReporting
|
||||
) {
|
||||
super();
|
||||
// When the query server configuration changes, restart the query server.
|
||||
if (config.onDidChangeConfiguration !== undefined) {
|
||||
this.push(config.onDidChangeConfiguration(() =>
|
||||
commands.executeCommand('codeQL.restartQueryServer')));
|
||||
}
|
||||
this.withProgressReporting = withProgressReporting;
|
||||
this.nextCallback = 0;
|
||||
this.nextProgress = 0;
|
||||
this.progressCallbacks = {};
|
||||
}
|
||||
|
||||
get logger(): Logger {
|
||||
return this.opts.logger;
|
||||
}
|
||||
|
||||
/** Stops the query server by disposing of the current server process. */
|
||||
private stopQueryServer(): void {
|
||||
if (this.serverProcess !== undefined) {
|
||||
this.disposeAndStopTracking(this.serverProcess);
|
||||
} else {
|
||||
void this.logger.log('No server process to be stopped.');
|
||||
}
|
||||
}
|
||||
|
||||
/** Restarts the query server by disposing of the current server process and then starting a new one. */
|
||||
async restartQueryServer(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<void> {
|
||||
this.stopQueryServer();
|
||||
await this.startQueryServer();
|
||||
|
||||
// Ensure we await all responses from event handlers so that
|
||||
// errors can be properly reported to the user.
|
||||
await Promise.all(this.queryServerStartListeners.map(handler => handler(
|
||||
progress,
|
||||
token
|
||||
)));
|
||||
}
|
||||
|
||||
showLog(): void {
|
||||
this.logger.show();
|
||||
}
|
||||
|
||||
/** Starts a new query server process, sending progress messages to the status bar. */
|
||||
async startQueryServer(): Promise<void> {
|
||||
// Use an arrow function to preserve the value of `this`.
|
||||
return this.withProgressReporting((progress, _) => this.startQueryServerImpl(progress));
|
||||
}
|
||||
|
||||
/** Starts a new query server process, sending progress messages to the given reporter. */
|
||||
private async startQueryServerImpl(progressReporter: ProgressReporter): Promise<void> {
|
||||
void this.logger.log('Starting NEW query server.');
|
||||
|
||||
const ramArgs = await this.cliServer.resolveRam(this.config.queryMemoryMb, progressReporter);
|
||||
const args = ['--threads', this.config.numThreads.toString()].concat(ramArgs);
|
||||
|
||||
if (this.config.saveCache) {
|
||||
args.push('--save-cache');
|
||||
}
|
||||
|
||||
if (this.config.cacheSize > 0) {
|
||||
args.push('--max-disk-cache');
|
||||
args.push(this.config.cacheSize.toString());
|
||||
}
|
||||
|
||||
const structuredLogFile = `${this.opts.contextStoragePath}/structured-evaluator-log.json`;
|
||||
await fs.ensureFile(structuredLogFile);
|
||||
|
||||
args.push('--evaluator-log');
|
||||
args.push(structuredLogFile);
|
||||
|
||||
// We hard-code the verbosity level to 5 and minify to false.
|
||||
// This will be the behavior of the per-query structured logging in the CLI after 2.8.3.
|
||||
args.push('--evaluator-log-level');
|
||||
args.push('5');
|
||||
|
||||
|
||||
if (this.config.debug) {
|
||||
args.push('--debug', '--tuple-counting');
|
||||
}
|
||||
|
||||
if (cli.shouldDebugQueryServer()) {
|
||||
args.push('-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9010,server=y,suspend=y,quiet=y');
|
||||
}
|
||||
|
||||
const child = cli.spawnServer(
|
||||
this.config.codeQlPath,
|
||||
'CodeQL query server',
|
||||
['execute', 'query-server2'],
|
||||
args,
|
||||
this.logger,
|
||||
data => this.logger.log(data.toString(), {
|
||||
trailingNewline: false,
|
||||
additionalLogLocation: this.activeQueryLogFile
|
||||
}),
|
||||
undefined, // no listener for stdout
|
||||
progressReporter
|
||||
);
|
||||
progressReporter.report({ message: 'Connecting to CodeQL query server' });
|
||||
const connection = createMessageConnection(child.stdout, child.stdin);
|
||||
connection.onNotification(progress, res => {
|
||||
const callback = this.progressCallbacks[res.id];
|
||||
if (callback) {
|
||||
callback(res);
|
||||
}
|
||||
});
|
||||
this.serverProcess = new ServerProcess(child, connection, 'Query Server 2', this.logger);
|
||||
// Ensure the server process is disposed together with this client.
|
||||
this.track(this.serverProcess);
|
||||
connection.listen();
|
||||
progressReporter.report({ message: 'Connected to CodeQL query server v2' });
|
||||
this.nextCallback = 0;
|
||||
this.nextProgress = 0;
|
||||
this.progressCallbacks = {};
|
||||
}
|
||||
|
||||
get serverProcessPid(): number {
|
||||
return this.serverProcess!.child.pid || 0;
|
||||
}
|
||||
|
||||
async sendRequest<P, R, E, RO>(type: RequestType<WithProgressId<P>, R, E, RO>, parameter: P, token?: CancellationToken, progress?: (res: ProgressMessage) => void): Promise<R> {
|
||||
const id = this.nextProgress++;
|
||||
this.progressCallbacks[id] = progress;
|
||||
|
||||
this.updateActiveQuery(type.method, parameter);
|
||||
try {
|
||||
if (this.serverProcess === undefined) {
|
||||
throw new Error('No query server process found.');
|
||||
}
|
||||
return await this.serverProcess.connection.sendRequest(type, { body: parameter, progressId: id }, token);
|
||||
} finally {
|
||||
delete this.progressCallbacks[id];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the active query every time there is a new request to compile.
|
||||
* The active query is used to specify the side log.
|
||||
*
|
||||
* This isn't ideal because in situations where there are queries running
|
||||
* in parallel, each query's log messages are interleaved. Fixing this
|
||||
* properly will require a change in the query server.
|
||||
*/
|
||||
private updateActiveQuery(method: string, parameter: any): void {
|
||||
if (method === messages.runQuery.method) {
|
||||
this.activeQueryLogFile = findQueryLogFile(path.dirname(path.dirname((parameter as messages.RunQueryParams).outputPath)));
|
||||
}
|
||||
}
|
||||
}
|
||||
143
extensions/ql-vscode/src/query-server/run-queries.ts
Normal file
143
extensions/ql-vscode/src/query-server/run-queries.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import * as path from 'path';
|
||||
import {
|
||||
CancellationToken
|
||||
} from 'vscode';
|
||||
import * as cli from '../cli';
|
||||
import { ProgressCallback } from '../commandRunner';
|
||||
import { DatabaseItem } from '../databases';
|
||||
import {
|
||||
getOnDiskWorkspaceFolders,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogWarningMessage,
|
||||
tryGetQueryMetadata
|
||||
} from '../helpers';
|
||||
import { logger } from '../logging';
|
||||
import * as messages from '../pure/new-messages';
|
||||
import * as legacyMessages from '../pure/legacy-messages';
|
||||
import { InitialQueryInfo, LocalQueryInfo } from '../query-results';
|
||||
import { QueryEvaluationInfo, QueryWithResults } from '../run-queries-shared';
|
||||
import * as qsClient from './queryserver-client';
|
||||
|
||||
|
||||
/**
|
||||
* run-queries.ts
|
||||
* --------------
|
||||
*
|
||||
* Compiling and running QL queries.
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* A collection of evaluation-time information about a query,
|
||||
* including the query itself, and where we have decided to put
|
||||
* temporary files associated with it, such as the compiled query
|
||||
* output and results.
|
||||
*/
|
||||
|
||||
export async function compileAndRunQueryAgainstDatabase(
|
||||
cliServer: cli.CodeQLCliServer,
|
||||
qs: qsClient.QueryServerClient,
|
||||
dbItem: DatabaseItem,
|
||||
initialInfo: InitialQueryInfo,
|
||||
queryStorageDir: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
templates?: Record<string, string>,
|
||||
queryInfo?: LocalQueryInfo, // May be omitted for queries not initiated by the user. If omitted we won't create a structured log for the query.
|
||||
): Promise<QueryWithResults> {
|
||||
if (!dbItem.contents || !dbItem.contents.dbSchemeUri) {
|
||||
throw new Error(`Database ${dbItem.databaseUri} does not have a CodeQL database scheme.`);
|
||||
}
|
||||
|
||||
// Read the query metadata if possible, to use in the UI.
|
||||
const metadata = await tryGetQueryMetadata(cliServer, initialInfo.queryPath);
|
||||
|
||||
const hasMetadataFile = (await dbItem.hasMetadataFile());
|
||||
const query = new QueryEvaluationInfo(
|
||||
path.join(queryStorageDir, initialInfo.id),
|
||||
dbItem.databaseUri.fsPath,
|
||||
hasMetadataFile,
|
||||
initialInfo.quickEvalPosition,
|
||||
metadata,
|
||||
);
|
||||
|
||||
if (!dbItem.contents || dbItem.error) {
|
||||
throw new Error('Can\'t run query on invalid database.');
|
||||
}
|
||||
const target = query.quickEvalPosition ? {
|
||||
quickEval: { quickEvalPos: query.quickEvalPosition }
|
||||
} : { query: {} };
|
||||
|
||||
const diskWorkspaceFolders = getOnDiskWorkspaceFolders();
|
||||
const db = dbItem.databaseUri.fsPath;
|
||||
const logPath = queryInfo ? query.evalLogPath : undefined;
|
||||
const queryToRun: messages.RunQueryParams = {
|
||||
db,
|
||||
additionalPacks: diskWorkspaceFolders,
|
||||
externalInputs: {},
|
||||
singletonExternalInputs: templates || {},
|
||||
outputPath: query.resultsPaths.resultsPath,
|
||||
queryPath: initialInfo.queryPath,
|
||||
logPath,
|
||||
target,
|
||||
};
|
||||
await query.createTimestampFile();
|
||||
let result: messages.RunQueryResult | undefined;
|
||||
try {
|
||||
result = await qs.sendRequest(messages.runQuery, queryToRun, token, progress);
|
||||
if (qs.config.customLogDirectory) {
|
||||
void showAndLogWarningMessage(
|
||||
`Custom log directories are no longer supported. The "codeQL.runningQueries.customLogDirectory" setting is deprecated. Unset the setting to stop seeing this message. Query logs saved to ${query.logPath}.`
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
if (queryInfo) {
|
||||
if (await query.hasEvalLog()) {
|
||||
await query.addQueryLogs(queryInfo, qs.cliServer, qs.logger);
|
||||
} else {
|
||||
void showAndLogWarningMessage(`Failed to write structured evaluator log to ${query.evalLogPath}.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (result.resultType !== messages.QueryResultType.SUCCESS) {
|
||||
const message = result.message || 'Failed to run query';
|
||||
void logger.log(message);
|
||||
void showAndLogErrorMessage(message);
|
||||
}
|
||||
let message;
|
||||
switch (result.resultType) {
|
||||
case messages.QueryResultType.CANCELLATION:
|
||||
message = `cancelled after ${Math.round(result.evaluationTime / 1000)} seconds`;
|
||||
break;
|
||||
case messages.QueryResultType.OOM:
|
||||
message = 'out of memory';
|
||||
break;
|
||||
case messages.QueryResultType.SUCCESS:
|
||||
message = `finished in ${Math.round(result.evaluationTime / 1000)} seconds`;
|
||||
break;
|
||||
case messages.QueryResultType.COMPILATION_ERROR:
|
||||
message = `compilation failed: ${result.message}`;
|
||||
break;
|
||||
case messages.QueryResultType.OTHER_ERROR:
|
||||
default:
|
||||
message = result.message ? `failed: ${result.message}` : 'failed';
|
||||
break;
|
||||
}
|
||||
const successful = result.resultType === messages.QueryResultType.SUCCESS;
|
||||
return {
|
||||
query,
|
||||
result: {
|
||||
evaluationTime: result.evaluationTime,
|
||||
queryId: 0,
|
||||
resultType: successful ? legacyMessages.QueryResultType.SUCCESS : legacyMessages.QueryResultType.OTHER_ERROR,
|
||||
runId: 0,
|
||||
message
|
||||
},
|
||||
message,
|
||||
successful,
|
||||
dispose: () => {
|
||||
qs.logger.removeAdditionalLogLocation(undefined);
|
||||
}
|
||||
};
|
||||
}
|
||||
5
extensions/ql-vscode/src/query-status.ts
Normal file
5
extensions/ql-vscode/src/query-status.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export enum QueryStatus {
|
||||
InProgress = 'InProgress',
|
||||
Completed = 'Completed',
|
||||
Failed = 'Failed',
|
||||
}
|
||||
50
extensions/ql-vscode/src/queryRunner.ts
Normal file
50
extensions/ql-vscode/src/queryRunner.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { CancellationToken } from 'vscode';
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import { ProgressCallback } from './commandRunner';
|
||||
import { DatabaseItem } from './databases';
|
||||
import { InitialQueryInfo, LocalQueryInfo } from './query-results';
|
||||
import { QueryWithResults } from './run-queries-shared';
|
||||
|
||||
|
||||
|
||||
export abstract class QueryRunner {
|
||||
abstract restartQueryServer(progress: ProgressCallback, token: CancellationToken): Promise<void>;
|
||||
|
||||
abstract cliServer: CodeQLCliServer;
|
||||
|
||||
abstract onStart(arg0: (progress: ProgressCallback, token: CancellationToken) => Promise<void>): void;
|
||||
abstract clearCacheInDatabase(
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken): Promise<void>;
|
||||
|
||||
abstract compileAndRunQueryAgainstDatabase(
|
||||
dbItem: DatabaseItem,
|
||||
initialInfo: InitialQueryInfo,
|
||||
queryStorageDir: string,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
templates?: Record<string, string>,
|
||||
queryInfo?: LocalQueryInfo, // May be omitted for queries not initiated by the user. If omitted we won't create a structured log for the query.
|
||||
): Promise<QueryWithResults>;
|
||||
|
||||
abstract deregisterDatabase(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
dbItem: DatabaseItem,
|
||||
): Promise<void>;
|
||||
|
||||
abstract registerDatabase(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
dbItem: DatabaseItem,
|
||||
): Promise<void>;
|
||||
|
||||
abstract upgradeDatabaseExplicit(
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
): Promise<void>
|
||||
|
||||
abstract clearPackCache(): Promise<void>
|
||||
}
|
||||
@@ -21,6 +21,7 @@ import {
|
||||
ProgressCallback,
|
||||
UserCancellationException
|
||||
} from './commandRunner';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
const QUICK_QUERIES_DIR_NAME = 'quick-queries';
|
||||
const QUICK_QUERY_QUERY_NAME = 'quick-query.ql';
|
||||
@@ -120,19 +121,26 @@ export async function displayQuickQuery(
|
||||
const quickQueryQlpackYaml: any = {
|
||||
name: 'vscode/quick-query',
|
||||
version: '1.0.0',
|
||||
libraryPathDependencies: [qlpack]
|
||||
dependencies: {
|
||||
[qlpack]: '*'
|
||||
}
|
||||
};
|
||||
await fs.writeFile(qlPackFile, QLPACK_FILE_HEADER + yaml.safeDump(quickQueryQlpackYaml), 'utf8');
|
||||
await fs.writeFile(qlPackFile, QLPACK_FILE_HEADER + yaml.dump(quickQueryQlpackYaml), 'utf8');
|
||||
}
|
||||
|
||||
if (shouldRewrite || !(await fs.pathExists(qlFile))) {
|
||||
await fs.writeFile(qlFile, getInitialQueryContents(dbItem.language, dbscheme), 'utf8');
|
||||
}
|
||||
|
||||
if (shouldRewrite) {
|
||||
await cliServer.clearCache();
|
||||
await cliServer.packInstall(queriesDir, true);
|
||||
}
|
||||
|
||||
await Window.showTextDocument(await workspace.openTextDocument(qlFile));
|
||||
} catch (e) {
|
||||
if (e instanceof ResponseError && e.code == ErrorCodes.RequestCancelled) {
|
||||
throw new UserCancellationException(e.message);
|
||||
throw new UserCancellationException(getErrorMessage(e));
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
@@ -143,6 +151,6 @@ async function checkShouldRewrite(qlPackFile: string, newDependency: string) {
|
||||
if (!(await fs.pathExists(qlPackFile))) {
|
||||
return true;
|
||||
}
|
||||
const qlPackContents: any = yaml.safeLoad(await fs.readFile(qlPackFile, 'utf8'));
|
||||
return qlPackContents.libraryPathDependencies?.[0] !== newDependency;
|
||||
const qlPackContents: any = yaml.load(await fs.readFile(qlPackFile, 'utf8'));
|
||||
return !qlPackContents.dependencies?.[newDependency];
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user