Compare commits
1421 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
84b48f416f | ||
|
|
606ffe41b0 | ||
|
|
75b4934d97 | ||
|
|
77c28e0453 | ||
|
|
bc10a7a38a | ||
|
|
562986546d | ||
|
|
c4d9ef26a8 | ||
|
|
a9e49f2d72 | ||
|
|
b15626853b | ||
|
|
f04c34629e | ||
|
|
a217b53bf3 | ||
|
|
4826a7e91f | ||
|
|
d12cdbe679 | ||
|
|
1ec3d044cf | ||
|
|
dbb549d5fb | ||
|
|
4c469e7386 | ||
|
|
2ccdb9e577 | ||
|
|
123219aace | ||
|
|
ecdc485e79 | ||
|
|
3812e3dcb0 | ||
|
|
eb09a0db8a | ||
|
|
158bebd03f | ||
|
|
1a3fecd3e8 | ||
|
|
1348de5a5f | ||
|
|
8521138bce | ||
|
|
8569fa7399 | ||
|
|
ee37fbff63 | ||
|
|
d49bffe98e | ||
|
|
832211d789 | ||
|
|
f9553d7033 | ||
|
|
f18f1b0ca7 | ||
|
|
50ec71893c | ||
|
|
56af69e58d | ||
|
|
d209e52a0b | ||
|
|
09b30fe5a3 | ||
|
|
c6d54de748 | ||
|
|
a3fafc8e59 | ||
|
|
6a636baa21 | ||
|
|
9e92d0c4a7 | ||
|
|
78a0a4e580 | ||
|
|
f0f01720f1 | ||
|
|
c8b0461f7f | ||
|
|
00de0820fb | ||
|
|
5a76df8489 | ||
|
|
9764a93900 | ||
|
|
130d8efe35 | ||
|
|
63a5021e5e | ||
|
|
e891169ca3 | ||
|
|
98284d9b2c | ||
|
|
b480f8f375 | ||
|
|
ead1fb4cd9 | ||
|
|
0acf9f7b66 | ||
|
|
9cb4b9d372 | ||
|
|
8a10a49f66 | ||
|
|
40d281aa3f | ||
|
|
b25cb8adbe | ||
|
|
88edcaf067 | ||
|
|
8737cfde0b | ||
|
|
593ca57497 | ||
|
|
471ead37c0 | ||
|
|
436af066fc | ||
|
|
c85338d11a | ||
|
|
1523babcb3 | ||
|
|
4ed0e0fa09 | ||
|
|
79bb894a7d | ||
|
|
da63b99a94 | ||
|
|
c325a725ea | ||
|
|
6c8c15155b | ||
|
|
3cd025f879 | ||
|
|
6b1fce9cd5 | ||
|
|
104055e703 | ||
|
|
add5417a42 | ||
|
|
31ef6aef29 | ||
|
|
bd81d3c4a7 | ||
|
|
4e5abee2ea | ||
|
|
bdf7208476 | ||
|
|
e1a56dd91d | ||
|
|
d4a58a64ee | ||
|
|
71b1b49502 | ||
|
|
65777b5e60 | ||
|
|
53bb9d797b | ||
|
|
cbf15e6d02 | ||
|
|
ecc07a50be | ||
|
|
7288712e47 | ||
|
|
74ae5a7fdc | ||
|
|
302722b982 | ||
|
|
22f28fa6ff | ||
|
|
9ba06ef562 | ||
|
|
cff56b7e7b | ||
|
|
ad41a043a7 | ||
|
|
66c6bf5e86 | ||
|
|
463633334c | ||
|
|
9278422406 | ||
|
|
57ba12db8b | ||
|
|
ccdffc296c | ||
|
|
24e9fbe8ca | ||
|
|
6e33b3c032 | ||
|
|
a625a39999 | ||
|
|
94ef752c0b | ||
|
|
9957b211e0 | ||
|
|
38f22b65ef | ||
|
|
4eecdbfada | ||
|
|
7637f9428a | ||
|
|
0e3679d186 | ||
|
|
e5dcffc04b | ||
|
|
0ce25eef63 | ||
|
|
2ca4fb052e | ||
|
|
dab67f148e | ||
|
|
a032678f24 | ||
|
|
dcac6f56da | ||
|
|
57ee00efd0 | ||
|
|
4bc799246f | ||
|
|
6a7856052c | ||
|
|
fe31730dd1 | ||
|
|
cc74533267 | ||
|
|
1a219af821 | ||
|
|
43de90f03d | ||
|
|
9624858335 | ||
|
|
3d4cdb69b1 | ||
|
|
6b7ebf543c | ||
|
|
7e8782723d | ||
|
|
55fb0b7078 | ||
|
|
45b6288363 | ||
|
|
424520613e | ||
|
|
304a96cb25 | ||
|
|
32dbc87049 | ||
|
|
baf1b70460 | ||
|
|
a254ceaa59 | ||
|
|
7fd5999ead | ||
|
|
c85ef15d9e | ||
|
|
986749b40f | ||
|
|
9c2821a418 | ||
|
|
93a6f50c0e | ||
|
|
0413b01990 | ||
|
|
157a5d6afd | ||
|
|
6b27a4209a | ||
|
|
1bb68d65f9 | ||
|
|
fd13c35b5d | ||
|
|
77deea77fc | ||
|
|
2eaa923019 | ||
|
|
ad9b46e494 | ||
|
|
fa4766fe91 | ||
|
|
28eb9ead01 | ||
|
|
0013a0f1b2 | ||
|
|
25b71e8651 | ||
|
|
31a97897c8 | ||
|
|
bf7509e3df | ||
|
|
4fd9b54c58 | ||
|
|
7d2bae1f1b | ||
|
|
1f4e1f27ae | ||
|
|
3a1800319a | ||
|
|
f2fe1063d9 | ||
|
|
7e1b35eae4 | ||
|
|
3283b68ff9 | ||
|
|
aaf21d35f6 | ||
|
|
67a6ab5c8e | ||
|
|
64994d7c03 | ||
|
|
362094b8de | ||
|
|
e791e77ef9 | ||
|
|
0009114f7b | ||
|
|
3b644fea7b | ||
|
|
04c9f17398 | ||
|
|
60e9f552db | ||
|
|
38caad032b | ||
|
|
7c1a8b3bc9 | ||
|
|
c7c709b366 | ||
|
|
bf662354fe | ||
|
|
131e72b162 | ||
|
|
4b875e7e42 | ||
|
|
6ea36867a2 | ||
|
|
8df061f443 | ||
|
|
0885a22984 | ||
|
|
0354b1caac | ||
|
|
d32a3a0deb | ||
|
|
e41dba7627 | ||
|
|
b2f4fecfb4 | ||
|
|
21b6adb92c | ||
|
|
90577f516f | ||
|
|
a2825162ac | ||
|
|
9b0d4bd7b8 | ||
|
|
2e26b857f4 | ||
|
|
7bdd452d63 | ||
|
|
be9a7a35bc | ||
|
|
dd8600fcc6 | ||
|
|
cc8f304f96 | ||
|
|
1ca623f68a | ||
|
|
53c404b131 | ||
|
|
f8d5fd8f6e | ||
|
|
d49c2d7958 | ||
|
|
01d7329bc3 | ||
|
|
e8d230c8f5 | ||
|
|
44a3e6b557 | ||
|
|
8b2a3b18ce | ||
|
|
fad5bb31a0 | ||
|
|
484b7668cb | ||
|
|
33dd50ca4d | ||
|
|
195cd69567 | ||
|
|
268199e9e2 | ||
|
|
6cef629507 | ||
|
|
563489d1e0 | ||
|
|
ebfcce30ba | ||
|
|
159d900edb | ||
|
|
46233b9355 | ||
|
|
1d6a7f8df1 | ||
|
|
e380c78876 | ||
|
|
cd67ce9242 | ||
|
|
cd8d82daac | ||
|
|
726feb19e1 | ||
|
|
8c324a3263 | ||
|
|
119649144e | ||
|
|
4c527a3573 | ||
|
|
525f5234b6 | ||
|
|
908abb4413 | ||
|
|
a69ec03c6e | ||
|
|
a071470c5a | ||
|
|
2ae95144a5 | ||
|
|
60faed1ccc | ||
|
|
6e6ea76c97 | ||
|
|
d30eb27320 | ||
|
|
2104cb3d09 | ||
|
|
5644206777 | ||
|
|
a6a0ee5f50 | ||
|
|
74c1e583b4 | ||
|
|
326653e25a | ||
|
|
0d057aed3f | ||
|
|
c90eede573 | ||
|
|
ebba9949a8 | ||
|
|
d18e3dd40e | ||
|
|
9355f0633a | ||
|
|
f553523f73 | ||
|
|
627bb59bd5 | ||
|
|
95cbe02768 | ||
|
|
d08e005b46 | ||
|
|
e73a6874b2 | ||
|
|
dc6ae6cc39 | ||
|
|
3902596823 | ||
|
|
c400485a4e | ||
|
|
1a7ddcf843 | ||
|
|
7cef45c434 | ||
|
|
69b06ae95c | ||
|
|
ae2bd81215 | ||
|
|
b9be9cff9f | ||
|
|
4871728216 | ||
|
|
8c5d73bd76 | ||
|
|
81b53c9c19 | ||
|
|
a232b56bcd | ||
|
|
c26d786a1c | ||
|
|
f759eed0f5 | ||
|
|
99d2df2067 | ||
|
|
866b137fd4 | ||
|
|
76a00e5fa5 | ||
|
|
f0d71ba356 | ||
|
|
b4fbfb6d2b | ||
|
|
1d02c19854 | ||
|
|
3167ceec91 | ||
|
|
fba49020e3 | ||
|
|
dea36820e4 | ||
|
|
60df319754 | ||
|
|
0bdee6e77e | ||
|
|
88440ba148 | ||
|
|
a0fb3b47c8 | ||
|
|
86d10b439b | ||
|
|
5a694653d7 | ||
|
|
902c489979 | ||
|
|
0f6100cc42 | ||
|
|
7fed5baebc | ||
|
|
d3e961ffb3 | ||
|
|
a20d9102e6 | ||
|
|
88bfd19c91 | ||
|
|
131d252a8b | ||
|
|
125f63887a | ||
|
|
20dea5ea46 | ||
|
|
3c4682e556 | ||
|
|
bb61b5ea25 | ||
|
|
2949fc33d1 | ||
|
|
ab933fcb81 | ||
|
|
90023137ca | ||
|
|
fcecfa112e | ||
|
|
303a7d1662 | ||
|
|
7c935b37b0 | ||
|
|
339fc9a755 | ||
|
|
4138ca1085 | ||
|
|
6941584214 | ||
|
|
525d7f5f3d | ||
|
|
84621b7ecc | ||
|
|
2baa53a149 | ||
|
|
15579012f1 | ||
|
|
28b00b249b | ||
|
|
401da636a0 | ||
|
|
ab9cf465cc | ||
|
|
bb7246b612 | ||
|
|
b82cd8b6f4 | ||
|
|
f56f017a84 | ||
|
|
7dc5eebcc1 | ||
|
|
644a83d6d8 | ||
|
|
4f84376faa | ||
|
|
5e76c08f84 | ||
|
|
765c956481 | ||
|
|
deac8c8c02 | ||
|
|
a47031b0d5 | ||
|
|
3bf27b3472 | ||
|
|
9422c6d65c | ||
|
|
b81e3c7b94 | ||
|
|
011eee1d16 | ||
|
|
924d24b106 | ||
|
|
54ba5ced09 | ||
|
|
78a90ffa92 | ||
|
|
b95ee896df | ||
|
|
d33b07b2d1 | ||
|
|
3d7f303c65 | ||
|
|
540d6758d1 | ||
|
|
b5b34743f1 | ||
|
|
0a6db47b5f | ||
|
|
f679a2efec | ||
|
|
72253a1bb8 | ||
|
|
2065c7d75c | ||
|
|
ff4ea3e4c8 | ||
|
|
9bd932294a | ||
|
|
afdc8164c8 | ||
|
|
ea022f4cde | ||
|
|
48ced51035 | ||
|
|
177688dc56 | ||
|
|
c5cbf92b3a | ||
|
|
529ceb133e | ||
|
|
baaa3d31c0 | ||
|
|
9629c99ccb | ||
|
|
7ade7be0c4 | ||
|
|
4272cee01b | ||
|
|
d8fbc56ec2 | ||
|
|
e41b0ff779 | ||
|
|
cf3ba32906 | ||
|
|
741d364a52 | ||
|
|
49a2555dab | ||
|
|
f4e6a0db9b | ||
|
|
4e7b89864c | ||
|
|
02443b5ddd | ||
|
|
50b507dba5 | ||
|
|
aea5d33c42 | ||
|
|
b2427a6534 | ||
|
|
b95f6a5afb | ||
|
|
7b7413ba26 | ||
|
|
d33fa5df8a | ||
|
|
2efff809eb | ||
|
|
c442ff5599 | ||
|
|
e4de8c6b9b | ||
|
|
c032e4f9a7 | ||
|
|
487cc7b088 | ||
|
|
d9e9c1b885 | ||
|
|
e19637b59c | ||
|
|
066bf3fd26 | ||
|
|
7ab1f3a83d | ||
|
|
e3e2fcc349 | ||
|
|
17ed18a29d | ||
|
|
110d930b68 | ||
|
|
f8cc3aec32 | ||
|
|
f408418f23 | ||
|
|
0b638b6ae1 | ||
|
|
ce7c7119c7 | ||
|
|
5dce5e83b0 | ||
|
|
ac3b94dac8 | ||
|
|
519c3039b8 | ||
|
|
0a5c272b17 | ||
|
|
32ec043cbe | ||
|
|
454a1eab39 | ||
|
|
d3701944bf | ||
|
|
43bcd69e39 | ||
|
|
53a17d5728 | ||
|
|
b0dab966f3 | ||
|
|
e4a3161283 | ||
|
|
47e53da89c | ||
|
|
f8f81cfb40 | ||
|
|
fd43bed99d | ||
|
|
ffc3d406c2 | ||
|
|
11bf3c9462 | ||
|
|
9b2c40b298 | ||
|
|
abf6c6f108 | ||
|
|
910c1b7352 | ||
|
|
f47d6ec21c | ||
|
|
0e23dd59db | ||
|
|
160a0aebfe | ||
|
|
4d3385825b | ||
|
|
80862944d8 | ||
|
|
91344a74f6 | ||
|
|
7538ad1ba4 | ||
|
|
24c2663fe7 | ||
|
|
50aaf3b537 | ||
|
|
847082cd30 | ||
|
|
8c7c197b22 | ||
|
|
1f95eb2f49 | ||
|
|
7874a34947 | ||
|
|
a74c8a7cee | ||
|
|
3aced3c4d3 | ||
|
|
bec23f36d2 | ||
|
|
92bbf3a2e8 | ||
|
|
5c478e98d9 | ||
|
|
f26988731e | ||
|
|
e6f9ce050b | ||
|
|
52f993f748 | ||
|
|
99fe65f6f7 | ||
|
|
7d721d9544 | ||
|
|
1005ecdc6a | ||
|
|
c9f65be721 | ||
|
|
9ad28f36b4 | ||
|
|
9c076152cb | ||
|
|
bbb6f10f17 | ||
|
|
8a671be85c | ||
|
|
0476815f8a | ||
|
|
53dfd1243f | ||
|
|
d69772d1f8 | ||
|
|
2fd5f38574 | ||
|
|
06d22841cf | ||
|
|
0133cd7734 | ||
|
|
a53c04e2c1 | ||
|
|
eba6c190e8 | ||
|
|
d0e6e3ca89 | ||
|
|
cc00456cbc | ||
|
|
434567aa34 | ||
|
|
7b1a93d7c6 | ||
|
|
d3ea84e863 | ||
|
|
1b6685ef6f | ||
|
|
f26795ca17 | ||
|
|
617f7bab0a | ||
|
|
8da1a28478 | ||
|
|
4518d9a81d | ||
|
|
3817133b5b | ||
|
|
c9b68caee4 | ||
|
|
60c4d8d40a | ||
|
|
1a9d63315f | ||
|
|
5c8098f28d | ||
|
|
bcf70c6962 | ||
|
|
64f33a5f44 | ||
|
|
48a527ad52 | ||
|
|
faabe6d887 | ||
|
|
4b8d611d86 | ||
|
|
bfc9a17ffb | ||
|
|
a4a3f70984 | ||
|
|
98bae3253d | ||
|
|
70098aa19c | ||
|
|
1261fdd41e | ||
|
|
c914312e85 | ||
|
|
cd2b5a8c59 | ||
|
|
29a43c7dc1 | ||
|
|
8ef3c3713b | ||
|
|
54f83d11d6 | ||
|
|
22cfad6711 | ||
|
|
cbc2650f30 | ||
|
|
55b060af97 | ||
|
|
9f347d136b | ||
|
|
0d0367c39d | ||
|
|
ba0a30dcfe | ||
|
|
3079d7f285 | ||
|
|
10eb355900 | ||
|
|
0daea7399a | ||
|
|
1b0077a115 | ||
|
|
db5e743055 | ||
|
|
a6d63222f5 | ||
|
|
58e80ecce3 | ||
|
|
0ad44a3fe2 | ||
|
|
09dccc13a2 | ||
|
|
2cdded9cca | ||
|
|
e8a0b24f57 | ||
|
|
182c2f3b8e | ||
|
|
e5376b3469 | ||
|
|
ef22cf174e | ||
|
|
d158487081 | ||
|
|
2e9c0c301c | ||
|
|
f256e18041 | ||
|
|
aa23680603 | ||
|
|
e5fe2148ab | ||
|
|
c44b7b1d78 | ||
|
|
24ede1b66f | ||
|
|
6335b9881b | ||
|
|
8c0fee5a2e | ||
|
|
e95f8e85a8 | ||
|
|
c6531a293e | ||
|
|
e648d9c67c | ||
|
|
45efca9425 | ||
|
|
9071f54863 | ||
|
|
0aa34a51ff | ||
|
|
181b5d6f7b | ||
|
|
7502fdee67 | ||
|
|
24652a84e4 | ||
|
|
2ee46cfd81 | ||
|
|
7c4eac8520 | ||
|
|
6fdc632743 | ||
|
|
a38a0356a0 | ||
|
|
9383b03971 | ||
|
|
baf130d60e | ||
|
|
d15e3885d7 | ||
|
|
2211e2317d | ||
|
|
6018ebaca9 | ||
|
|
da9065101f | ||
|
|
80867e6f58 | ||
|
|
5067fbc452 | ||
|
|
d88b5170ac | ||
|
|
d4673d9ca0 | ||
|
|
87f45a7739 | ||
|
|
0c89df9a80 | ||
|
|
57666bbbe3 | ||
|
|
ba8b32078d | ||
|
|
fa4dd087e5 | ||
|
|
ac74b967b3 | ||
|
|
c349c6a048 | ||
|
|
234b05994c | ||
|
|
af8f0231c0 | ||
|
|
84bd029749 | ||
|
|
7d2e4b6de4 | ||
|
|
23a0e03cef | ||
|
|
21c5ed01ad | ||
|
|
d2af550bcc | ||
|
|
cf36a52762 | ||
|
|
ac1a97efa0 | ||
|
|
8d5067f622 | ||
|
|
fe5f1c417d | ||
|
|
95438bb7e3 | ||
|
|
6d7d0ca41a | ||
|
|
3749e17769 | ||
|
|
ee49fb5070 | ||
|
|
de6c523bad | ||
|
|
6612c279ae | ||
|
|
2dfa0e8b52 | ||
|
|
0197306713 | ||
|
|
269165eaa3 | ||
|
|
14c736d72e | ||
|
|
b8898b939c | ||
|
|
45da1e0f1f | ||
|
|
88c990c6ae | ||
|
|
ac7211c117 | ||
|
|
d1d13fbd2e | ||
|
|
f99166d26c | ||
|
|
9cd6f9a768 | ||
|
|
4dd16f4611 | ||
|
|
2113d08545 | ||
|
|
5b5ef26864 | ||
|
|
c5a6e64df8 | ||
|
|
178d626062 | ||
|
|
d1d48b3506 | ||
|
|
9180d1d9fc | ||
|
|
674c5ecbff | ||
|
|
951d0b1004 | ||
|
|
edcac6925c | ||
|
|
2989e4cfb9 | ||
|
|
8f869813a9 | ||
|
|
c10500c5ea | ||
|
|
0832850009 | ||
|
|
b352830674 | ||
|
|
e913165249 | ||
|
|
ef94bb3d38 | ||
|
|
4d6076c4ea | ||
|
|
43650fde00 | ||
|
|
f2c72a67f6 | ||
|
|
2b1f3227ce | ||
|
|
841f1d3310 | ||
|
|
99756ae63b | ||
|
|
9a2bea39e6 | ||
|
|
1aab49c719 | ||
|
|
cf925c256f | ||
|
|
8383a76e43 | ||
|
|
c6d792f41e | ||
|
|
277192e7d3 | ||
|
|
85988ecf34 | ||
|
|
49d12674b7 | ||
|
|
beeb19dc05 | ||
|
|
de88d27057 | ||
|
|
eb2d00e999 | ||
|
|
d58fb54928 | ||
|
|
fdc209ca08 | ||
|
|
28092f2b86 | ||
|
|
8970ad78ae | ||
|
|
e7a0c58940 | ||
|
|
02270aaeee | ||
|
|
51fb03b4b1 | ||
|
|
838a2b71ac | ||
|
|
f01c421d42 | ||
|
|
561bc6f53c | ||
|
|
24b421e82d | ||
|
|
3c57597a19 | ||
|
|
e8d5029912 | ||
|
|
cb514f5c78 | ||
|
|
57bb8cee41 | ||
|
|
1219ef4a8c | ||
|
|
677a0f7940 | ||
|
|
b8cca29eb3 | ||
|
|
4cbf104bdf | ||
|
|
26ccde9e7d | ||
|
|
beb5b78b89 | ||
|
|
c3a21b93c0 | ||
|
|
6b9f73e156 | ||
|
|
6409e09063 | ||
|
|
8f5611b074 | ||
|
|
7f3fcce1ac | ||
|
|
4bc1d1ed8a | ||
|
|
02e5b4e830 | ||
|
|
538792e8bb | ||
|
|
56ec970121 | ||
|
|
57a04297bd | ||
|
|
59f1e4e90a | ||
|
|
7c1fce3319 | ||
|
|
476ea7aef0 | ||
|
|
0c654c4320 | ||
|
|
895ac6ae26 | ||
|
|
52484f1211 | ||
|
|
cba188b4db | ||
|
|
123b1fc085 | ||
|
|
833f8e06ca | ||
|
|
747049ed1b | ||
|
|
d62e9181f2 | ||
|
|
e4d1f4e73e | ||
|
|
c1922126d3 | ||
|
|
d2ebb3d20a | ||
|
|
72858e341a | ||
|
|
4499773f6f | ||
|
|
1d3b0e0ca9 | ||
|
|
98e503c768 | ||
|
|
62c3974d35 | ||
|
|
40e0027074 | ||
|
|
ab1c2e0a0d | ||
|
|
d918c41197 | ||
|
|
84048ccac1 | ||
|
|
cbb09da0d0 | ||
|
|
c8d3428f21 | ||
|
|
2cf5b39cfe | ||
|
|
13921bf8a2 | ||
|
|
12a97ecba2 | ||
|
|
26529232f4 | ||
|
|
1b425fc261 | ||
|
|
9c598c2f06 | ||
|
|
99a784f072 | ||
|
|
030488a459 | ||
|
|
377f7965b1 | ||
|
|
651a6fbda8 | ||
|
|
55ffdf7963 | ||
|
|
cc907d2f31 | ||
|
|
49a1576d14 | ||
|
|
0cc4561ee9 | ||
|
|
c4df9dbec8 | ||
|
|
c384a631dc | ||
|
|
b079690f0e | ||
|
|
4e863e995b | ||
|
|
576737cac8 | ||
|
|
742aa4ca19 | ||
|
|
f992679e94 | ||
|
|
ffe1704ac0 | ||
|
|
b5e6700cba | ||
|
|
7f5302dc37 | ||
|
|
3ea5524048 | ||
|
|
1823ae8397 | ||
|
|
6dca9ccbeb | ||
|
|
f3c2862937 | ||
|
|
855cb485d5 | ||
|
|
bd2dd04ac6 | ||
|
|
bbf4a03b03 | ||
|
|
f38eb4895d | ||
|
|
f559b59ee5 | ||
|
|
c9d895ea42 | ||
|
|
e57bbcb711 | ||
|
|
b311991644 | ||
|
|
825054a271 | ||
|
|
f7aa0a5ae5 | ||
|
|
f486ccfac6 | ||
|
|
70f74d3baf | ||
|
|
ebad1844df | ||
|
|
a40a2edaf2 | ||
|
|
5f3d525ff8 | ||
|
|
8f5d88156f | ||
|
|
7c941fe8a8 | ||
|
|
e9835cb376 | ||
|
|
7651a960b1 | ||
|
|
5b17a84733 | ||
|
|
22873a2f3c | ||
|
|
2debadd3bf | ||
|
|
6808d7dcaf | ||
|
|
3480aa5495 | ||
|
|
a4d1ad57c7 | ||
|
|
628e0e924d | ||
|
|
16077f4124 | ||
|
|
e6a68b3223 | ||
|
|
539a494914 | ||
|
|
9c29c5c9c6 | ||
|
|
fd4b6022a9 | ||
|
|
58bbb59e39 | ||
|
|
5cc55530e1 | ||
|
|
3d74dbf48a | ||
|
|
b7489d8f66 | ||
|
|
e0b2aa9b45 | ||
|
|
10b4c15053 | ||
|
|
8bc83a336a | ||
|
|
c84b858205 | ||
|
|
e5f3a973a0 | ||
|
|
3682f05a42 | ||
|
|
eb5ce029ba | ||
|
|
0ebff2d6e6 | ||
|
|
d061634fe3 | ||
|
|
6b9410c67e | ||
|
|
8245e54e9c | ||
|
|
8ee744ef0c | ||
|
|
da179b2580 | ||
|
|
0714f06adc | ||
|
|
b2906257a1 | ||
|
|
18097e4676 | ||
|
|
efcade84c6 | ||
|
|
7f27375d17 | ||
|
|
01e1f134be | ||
|
|
0695b0557f | ||
|
|
c63f0c0833 | ||
|
|
3264ffaaa4 | ||
|
|
40959c8876 | ||
|
|
ecea7f4638 | ||
|
|
0b15a166fa | ||
|
|
c368424a15 | ||
|
|
5df1f80307 | ||
|
|
4b59045149 | ||
|
|
a3a05131c7 | ||
|
|
a9922b86fe | ||
|
|
431350ac0e | ||
|
|
5f8802fe7f | ||
|
|
5f21594d23 | ||
|
|
8964ec1a4d | ||
|
|
aa270e57ec | ||
|
|
fe7eb07f39 | ||
|
|
c10da7f960 | ||
|
|
0c8390c094 | ||
|
|
d41c63bf7d | ||
|
|
a3bbdafabb | ||
|
|
a78eef464b | ||
|
|
e8348ac12a | ||
|
|
5efc3835db | ||
|
|
c4ed6e88de | ||
|
|
51e6559145 | ||
|
|
db8b419885 | ||
|
|
475d7cc535 | ||
|
|
1858de5ed0 | ||
|
|
642f4788fb | ||
|
|
7e70f8b758 | ||
|
|
e417bea948 | ||
|
|
6b4be93169 | ||
|
|
061eaad743 | ||
|
|
8ff21d6c89 | ||
|
|
0d9f4e8c0f | ||
|
|
02288718dc | ||
|
|
615cf86fc0 | ||
|
|
d63a209674 | ||
|
|
9d26304f7a | ||
|
|
f73bda438a | ||
|
|
19b65a654e | ||
|
|
770127e67a | ||
|
|
f373e6467a | ||
|
|
e43b4e66a1 | ||
|
|
90ec003386 | ||
|
|
2f9aca785e | ||
|
|
405a6c9901 | ||
|
|
3611b1fe61 | ||
|
|
7b33441519 | ||
|
|
2a8f61dfbe | ||
|
|
dcfd6d43c0 | ||
|
|
4e4d8b2f04 | ||
|
|
50197ba7b7 | ||
|
|
6c376d8721 | ||
|
|
82ada54103 | ||
|
|
0fdfeb3cd3 | ||
|
|
096d7719c6 | ||
|
|
619c485224 | ||
|
|
9367d5fb45 | ||
|
|
50ec97ad91 | ||
|
|
fa5fcde987 | ||
|
|
5b33333404 | ||
|
|
cf50624e4e | ||
|
|
ccc9ed8b49 | ||
|
|
141f5381e7 | ||
|
|
be054ca4f8 | ||
|
|
0a06452450 | ||
|
|
b840d3f9bf | ||
|
|
c829c30688 | ||
|
|
7947afb1b4 | ||
|
|
c32b53613d | ||
|
|
c058e7a128 | ||
|
|
1dc663339d | ||
|
|
351db4efc8 | ||
|
|
12d6ea3966 | ||
|
|
e1adc7b428 | ||
|
|
dc34adadcd | ||
|
|
6e06381640 | ||
|
|
f55389cd26 | ||
|
|
6d930f53ba | ||
|
|
f7616cf685 | ||
|
|
f55d9820bd | ||
|
|
befc2cddd2 | ||
|
|
ef268e043f | ||
|
|
cff235c420 | ||
|
|
1089a052ec | ||
|
|
e10d2aef8e | ||
|
|
a97c5fe836 | ||
|
|
9b6eddddae | ||
|
|
ed84825e65 | ||
|
|
cb84003c31 | ||
|
|
a1cd87aa3a | ||
|
|
7d3b015e20 | ||
|
|
7d0d11f526 | ||
|
|
eb2520e7ca | ||
|
|
2675bf464e | ||
|
|
b638449498 | ||
|
|
1d195cb347 | ||
|
|
8d8ed28aea | ||
|
|
e12bf63f9a | ||
|
|
ffcc1f82f1 | ||
|
|
04d7b12dd8 | ||
|
|
3e33b00a75 | ||
|
|
12dc378fc1 | ||
|
|
bbe99f4451 | ||
|
|
91b17f8fa6 | ||
|
|
69f1778309 | ||
|
|
c55e801d00 | ||
|
|
b363f77a83 | ||
|
|
f55f46f95b | ||
|
|
5ee2f0efe1 | ||
|
|
1314a36ba4 | ||
|
|
2b8b621298 | ||
|
|
aed4c9fc58 | ||
|
|
604001dfb1 | ||
|
|
1a03c0e4ac | ||
|
|
a8c54b7640 | ||
|
|
9bb60c9474 | ||
|
|
0b2ce7a071 | ||
|
|
44145baca7 | ||
|
|
dac7881ca3 | ||
|
|
31bd927959 | ||
|
|
46922de3c0 | ||
|
|
908a862dd1 | ||
|
|
6676ba99d0 | ||
|
|
6d3c6e598f | ||
|
|
e1a10fc827 | ||
|
|
2ebdbaafa3 | ||
|
|
a74dfea08b | ||
|
|
44ff380c86 | ||
|
|
0a41713253 | ||
|
|
f5a5675da4 | ||
|
|
7a8cf55090 | ||
|
|
7932de3b7d | ||
|
|
c8ba967a54 | ||
|
|
f5d2f0e0ca | ||
|
|
2c7e2f4b7f | ||
|
|
ee3ebe687b | ||
|
|
77024f0757 | ||
|
|
c0e39886eb | ||
|
|
6339e7897d | ||
|
|
783a8a8772 | ||
|
|
8f2d865999 | ||
|
|
d6d0825926 | ||
|
|
37de2e7f52 | ||
|
|
800c9e0c93 | ||
|
|
a1bc7eb4d5 | ||
|
|
8ff45d2aee | ||
|
|
8ec19777b5 | ||
|
|
3e388fedeb | ||
|
|
83ffba2f08 | ||
|
|
f1c4fef8ba | ||
|
|
eec506a209 | ||
|
|
2ca0060c6a | ||
|
|
8b2d79a7f7 | ||
|
|
c4db8b6d4b | ||
|
|
61d4305593 | ||
|
|
542e1d24aa | ||
|
|
47ec074cfb | ||
|
|
e44835e795 | ||
|
|
2e28146a58 | ||
|
|
85e051a76d | ||
|
|
7027a61e63 | ||
|
|
e8c5b27d92 | ||
|
|
a3deec7875 | ||
|
|
6282a462c8 | ||
|
|
dac5952e96 | ||
|
|
ada6fcb908 | ||
|
|
8d2f902420 | ||
|
|
fc3fe7a81e | ||
|
|
426cc95e9f | ||
|
|
9e40043fe0 | ||
|
|
14608fe5f7 | ||
|
|
22ed090685 | ||
|
|
2ca4097daf | ||
|
|
f1d16015bf | ||
|
|
9a81ad05ed | ||
|
|
76e983d19c | ||
|
|
a3015c0fa3 | ||
|
|
88d0bda049 | ||
|
|
d2ec54e89e | ||
|
|
4559c5a38d | ||
|
|
16bd106abc | ||
|
|
e5dcec8d8e | ||
|
|
ad3565d3ad | ||
|
|
5fe12ecd74 | ||
|
|
318214642f | ||
|
|
227fe3ee6b | ||
|
|
978a82dd1a | ||
|
|
04f72a7da9 | ||
|
|
a0954a1dc0 | ||
|
|
cc1bf74370 | ||
|
|
2f7908773a | ||
|
|
0efd02979e | ||
|
|
bd9776c4b7 | ||
|
|
35e9da83ec | ||
|
|
4f5ca0bca9 | ||
|
|
43f314b2b5 | ||
|
|
4bdf579ce2 | ||
|
|
aba3039eef | ||
|
|
bbff791c65 | ||
|
|
1ed50b3081 | ||
|
|
67336a24e7 | ||
|
|
48174c327d | ||
|
|
43f2539b42 | ||
|
|
462a7a722a | ||
|
|
4101bb252e | ||
|
|
4ff4e4827e | ||
|
|
8daa92ad49 | ||
|
|
371e83bff9 | ||
|
|
6fa0227a1e | ||
|
|
c38e4ce265 | ||
|
|
de06ed148d | ||
|
|
21bcd62ba8 | ||
|
|
76c034f79a | ||
|
|
d8d394ce40 | ||
|
|
213f4ce92f | ||
|
|
2d1726763f | ||
|
|
abfd9b3cbd | ||
|
|
6114f6a7fd | ||
|
|
61e674e9f6 | ||
|
|
006cc8c52a | ||
|
|
ffe7fdcb46 | ||
|
|
49cceffe1b | ||
|
|
011782395a | ||
|
|
558009543f | ||
|
|
aaef5bde2c | ||
|
|
f52f595d56 | ||
|
|
50196d8430 | ||
|
|
2ecfbfbb42 | ||
|
|
9508dffe6d | ||
|
|
b4a72bbcab | ||
|
|
4ceaaf92cc | ||
|
|
ef28c9531b | ||
|
|
c86c602e39 | ||
|
|
3bee2905e5 | ||
|
|
9ac8a15cd5 | ||
|
|
81b8104064 | ||
|
|
65f58b1f98 | ||
|
|
7e872aa6d6 | ||
|
|
0383a91a68 | ||
|
|
bb6ebe5750 | ||
|
|
71aa3d145f | ||
|
|
2f1f80029b | ||
|
|
ad18cfa284 | ||
|
|
92ed1c6ac9 | ||
|
|
e71e04a8f1 | ||
|
|
ef127c279c | ||
|
|
4afac5fa4d | ||
|
|
29ae97aa82 | ||
|
|
9319d7e8ef | ||
|
|
689db3713b | ||
|
|
0b9fcb884b | ||
|
|
23e29a1fdc | ||
|
|
90d636a026 | ||
|
|
3e3e12afb9 | ||
|
|
421f5d23ec | ||
|
|
0fa91f32cb | ||
|
|
3d21b203be | ||
|
|
3972b8f4c1 | ||
|
|
2d1707db00 | ||
|
|
72aa4f0561 | ||
|
|
fd57cc95e9 | ||
|
|
04c392be7e | ||
|
|
38da598214 | ||
|
|
3f2c9b647c | ||
|
|
7d5b4369c1 | ||
|
|
aade33fa88 | ||
|
|
2a8a90bdfc | ||
|
|
f36048cc95 | ||
|
|
517feeca21 | ||
|
|
9436a49118 | ||
|
|
0e02cb08fd | ||
|
|
26244efc50 | ||
|
|
6339eeffe5 | ||
|
|
8cc2f598eb | ||
|
|
46a1dd57f4 | ||
|
|
9d99fc521e | ||
|
|
bcf79354ee | ||
|
|
27a8636bac | ||
|
|
92a99938c9 | ||
|
|
ed61eb0a95 | ||
|
|
50d495b522 | ||
|
|
526d5c2c44 | ||
|
|
1720f9201e | ||
|
|
e62de1ca22 | ||
|
|
d052ddb742 | ||
|
|
af53a02ea5 | ||
|
|
8e2d18da8c | ||
|
|
2c5004387d | ||
|
|
3fc3b259ba | ||
|
|
cd95f68692 | ||
|
|
59c3b1ba2f | ||
|
|
fa85865fe5 | ||
|
|
5575d4142c | ||
|
|
ae6263a07f | ||
|
|
9af75634fa | ||
|
|
04b8681272 | ||
|
|
d5549f2894 | ||
|
|
b510b85ca0 | ||
|
|
5ad754a3a2 | ||
|
|
4f04f9db6e | ||
|
|
025a1a1383 | ||
|
|
f28c1f91d9 | ||
|
|
c609377a9c | ||
|
|
2579d12f24 | ||
|
|
c18f7953e7 | ||
|
|
3a292b02b6 | ||
|
|
7baf2d0a2a | ||
|
|
328289eb1c | ||
|
|
95d93eeb61 | ||
|
|
b54cc27cab | ||
|
|
c9ca1ee7b3 | ||
|
|
649d6d94a3 | ||
|
|
bf68d21830 | ||
|
|
64b33b76cb | ||
|
|
c189df3fd6 | ||
|
|
277869ebca | ||
|
|
303513a566 | ||
|
|
8712106b3d | ||
|
|
cdb9506583 | ||
|
|
94a311a550 | ||
|
|
791e7e9c4d | ||
|
|
6cfa7e2cd3 | ||
|
|
7196c26181 | ||
|
|
735f177283 | ||
|
|
f857e5ec6c | ||
|
|
a5e02950c2 | ||
|
|
4a928f1298 | ||
|
|
f59012862e | ||
|
|
5f5418a297 | ||
|
|
548a216b56 | ||
|
|
c943c89fc6 | ||
|
|
06de6077ba | ||
|
|
cef1fcc95d | ||
|
|
1ed8b225db | ||
|
|
f0354c87f4 | ||
|
|
5e06a615cd | ||
|
|
e11aa7af18 | ||
|
|
f4ddc17851 | ||
|
|
ebce2826cb | ||
|
|
4c411acef4 | ||
|
|
ddc941f464 | ||
|
|
c5ff2c6f76 | ||
|
|
85ac16bb22 | ||
|
|
e7ee4a33c7 | ||
|
|
ac0da04542 | ||
|
|
3337117970 | ||
|
|
9b61ff5714 | ||
|
|
d25db48452 | ||
|
|
251f354076 | ||
|
|
9c6ae226fb | ||
|
|
a502ee85d1 | ||
|
|
eec72e0cbd | ||
|
|
7a1acce133 | ||
|
|
84b4bfe663 | ||
|
|
16df990183 | ||
|
|
969dd26041 | ||
|
|
9df1f91318 | ||
|
|
48ddc66d47 | ||
|
|
85e3869607 | ||
|
|
5bb2a763e3 | ||
|
|
2110709d72 | ||
|
|
493033edc0 | ||
|
|
bf8e77b9b9 | ||
|
|
c7e5581027 | ||
|
|
c78802a1ed | ||
|
|
39f9c082b9 | ||
|
|
ca1ef5192d | ||
|
|
1d6fef9169 | ||
|
|
81f80ddbe5 | ||
|
|
b53657344c | ||
|
|
95e818898e | ||
|
|
a7e014a87e | ||
|
|
cca65e5a48 | ||
|
|
a75249f3e4 | ||
|
|
053a4b0392 | ||
|
|
d1362bf44f | ||
|
|
580832ea7b | ||
|
|
ddca0bb851 | ||
|
|
d9a04ea895 | ||
|
|
48ccb27e49 | ||
|
|
a2b5ad07ff | ||
|
|
cc9cbf7f06 | ||
|
|
ad5c43c9ba | ||
|
|
9c27d01d47 | ||
|
|
64ac33e3bb | ||
|
|
329fb87e12 | ||
|
|
bd5da2b0f0 | ||
|
|
55c21888af | ||
|
|
d49e6e19a6 | ||
|
|
edb1af09c4 | ||
|
|
ab3822d1cc | ||
|
|
69120e0799 | ||
|
|
7785dfead2 | ||
|
|
29c29f9e3a | ||
|
|
b7dafc31bb | ||
|
|
2f5a306c2d | ||
|
|
0ef6b45b19 | ||
|
|
d9f33d34e3 | ||
|
|
5758e03a17 | ||
|
|
5d9f80cce8 | ||
|
|
867ee530b1 | ||
|
|
27e6a55756 | ||
|
|
b237bafa2f | ||
|
|
d0bde800f7 | ||
|
|
da0090aa99 | ||
|
|
66c9879ce3 | ||
|
|
9c2585116a | ||
|
|
e46c0e25e8 | ||
|
|
658b0ce243 | ||
|
|
c084e31416 | ||
|
|
9046844f0c | ||
|
|
5a9b49b9bb | ||
|
|
0672133bca | ||
|
|
c0de99bc42 | ||
|
|
6dbb1a27b9 | ||
|
|
dc1bace4c6 | ||
|
|
afe3c56ca8 | ||
|
|
a6f42e3eb3 | ||
|
|
9c2bd2a57b | ||
|
|
f42f474113 | ||
|
|
17c31e1539 | ||
|
|
b0fb4d6bc9 | ||
|
|
f8690bcebc | ||
|
|
b0410ec5de | ||
|
|
19e0058e61 | ||
|
|
6d64c8f031 | ||
|
|
1216fce853 | ||
|
|
c598306f49 | ||
|
|
4f8d6e310c | ||
|
|
894eb7046e | ||
|
|
3d6515e807 | ||
|
|
068d461c14 | ||
|
|
8e20d01b4e | ||
|
|
8aaa2492f2 | ||
|
|
c9a649f974 | ||
|
|
f07d9cff9b | ||
|
|
b7bfd9ea85 | ||
|
|
25f0e3ccab | ||
|
|
e19addec60 | ||
|
|
a5bc25e211 | ||
|
|
c90659fd92 | ||
|
|
30b7fe7472 | ||
|
|
d54fbdf4e6 | ||
|
|
6d7b02583d | ||
|
|
51906cbbda | ||
|
|
d3da9d30f4 | ||
|
|
9b9a0cb64a | ||
|
|
1dde5af591 | ||
|
|
4312d35743 | ||
|
|
2dcdbcbd32 | ||
|
|
e8e50c4381 | ||
|
|
0e6d85374f | ||
|
|
54789613dc | ||
|
|
43b3f72a41 | ||
|
|
13742a4e9e | ||
|
|
6bd7f0ae12 | ||
|
|
fc51b336fa | ||
|
|
df16d1ab1d | ||
|
|
b661b2be97 | ||
|
|
2d39bee416 | ||
|
|
56eeb1badb | ||
|
|
d547f81a55 | ||
|
|
e1b35cdbbc | ||
|
|
c01704b8aa | ||
|
|
5a19042fc8 | ||
|
|
bdf8c0b9c2 | ||
|
|
bc08cbe74f | ||
|
|
6e2e72a500 | ||
|
|
d0953fb63c | ||
|
|
4dbd15c66d | ||
|
|
e9e41e07d1 | ||
|
|
b435df4682 | ||
|
|
a3bf9f1c71 | ||
|
|
72ff828b57 | ||
|
|
b7f86ae7a9 | ||
|
|
3c73390a44 | ||
|
|
7117faa92b | ||
|
|
4257555c88 | ||
|
|
33b1465ccc | ||
|
|
c8ed8b2591 | ||
|
|
58f4a82616 | ||
|
|
d5f0a659af | ||
|
|
60c977bff9 | ||
|
|
73f1beac6a | ||
|
|
6195c6552f | ||
|
|
e365744dbc | ||
|
|
68f566dd1a | ||
|
|
bf350779c9 | ||
|
|
07329c9ea5 | ||
|
|
7e6483490a | ||
|
|
749565828d | ||
|
|
ff751cc877 | ||
|
|
d7ba941803 | ||
|
|
e58201e24b | ||
|
|
81e60286f2 | ||
|
|
8e156d69d7 | ||
|
|
dfcaa27235 | ||
|
|
ed0553c6b6 | ||
|
|
84ecbfc7a1 | ||
|
|
e13349ceb0 | ||
|
|
a1bcb7519f | ||
|
|
b481441052 | ||
|
|
6a1d1a492e | ||
|
|
1dcd9c495c | ||
|
|
a9b9502dbd | ||
|
|
16c0bea799 | ||
|
|
ad81127267 | ||
|
|
30d01cb0e0 | ||
|
|
2584971a07 | ||
|
|
9d9f48bcf8 | ||
|
|
0bb1501e72 | ||
|
|
d53abd815d | ||
|
|
d9c5ecf462 | ||
|
|
51ed2cd480 | ||
|
|
4c83805030 | ||
|
|
c3eca5b1b7 | ||
|
|
742bca1cf5 | ||
|
|
5ab55bb5a5 | ||
|
|
3743895b66 | ||
|
|
ca5e5e23e6 | ||
|
|
a666619289 | ||
|
|
63129236d0 | ||
|
|
4374f409a8 | ||
|
|
c49aa8e05e | ||
|
|
c590e2f36c | ||
|
|
03d4aca639 | ||
|
|
01f24523ac | ||
|
|
98312a72a7 | ||
|
|
d579cd6541 | ||
|
|
38e5d8babc | ||
|
|
c1fceab8d9 | ||
|
|
ae555969b5 | ||
|
|
3e0ea1ba77 | ||
|
|
ce1ebd2218 | ||
|
|
6215c2763e | ||
|
|
07437000ce | ||
|
|
0ef635bc68 | ||
|
|
e9574d33a9 | ||
|
|
848869e3f4 | ||
|
|
4a65b6a8b2 | ||
|
|
28c76bece0 | ||
|
|
56faf36edf | ||
|
|
440044d2aa | ||
|
|
48468ff354 | ||
|
|
31dc11ed73 | ||
|
|
903f5db707 | ||
|
|
8317f39459 | ||
|
|
42051f1620 | ||
|
|
9b90579160 | ||
|
|
541367122e | ||
|
|
0a0500a60d | ||
|
|
746086b761 | ||
|
|
412d96409e | ||
|
|
93e15b43a3 | ||
|
|
dbc8198daa | ||
|
|
b3a51d7afd | ||
|
|
3d24328402 | ||
|
|
1014c4bdda | ||
|
|
b2a6263431 | ||
|
|
20cdca77a3 | ||
|
|
98d48a3709 | ||
|
|
6b57993b2a | ||
|
|
34ac30e403 | ||
|
|
b8618aa87e | ||
|
|
7d8e63c1d1 | ||
|
|
b22a8692c8 | ||
|
|
b5cdd833e2 | ||
|
|
81a2f9c428 | ||
|
|
b43b824da6 | ||
|
|
22616c5582 | ||
|
|
2570d179bc | ||
|
|
1980f862c6 | ||
|
|
d1eb31e231 | ||
|
|
68863e3b90 | ||
|
|
b38b884715 | ||
|
|
cc6f2d8886 | ||
|
|
245db7ca28 | ||
|
|
197ab99db8 | ||
|
|
6292adf491 | ||
|
|
112d40ff1c | ||
|
|
b92d6bab7c | ||
|
|
0a4879c9a8 | ||
|
|
7d4d57104a | ||
|
|
f06c9abb35 | ||
|
|
2f7d175a76 | ||
|
|
85eaa8b275 | ||
|
|
4783ad6bff | ||
|
|
9f0a975a0c | ||
|
|
21dda65871 | ||
|
|
39fdd0cad5 | ||
|
|
3fb2c71390 | ||
|
|
b40f648a87 | ||
|
|
57216249c2 | ||
|
|
fbadc15ae9 | ||
|
|
89b00eaef8 | ||
|
|
4bc5086cfb | ||
|
|
7a79d39e23 | ||
|
|
41ae5a4b5f | ||
|
|
0493e316c0 | ||
|
|
137e17c2e1 | ||
|
|
31db2ffb82 | ||
|
|
df18ff3052 | ||
|
|
74555510b4 | ||
|
|
a2b8e7d193 | ||
|
|
b59638bd2e | ||
|
|
b0e19926da | ||
|
|
2e1b83588c | ||
|
|
ab441ef75c | ||
|
|
b4478e9b54 | ||
|
|
a715ce13c9 | ||
|
|
005372abba | ||
|
|
3f22587a7c | ||
|
|
b95533e8c0 | ||
|
|
210d8a3c64 | ||
|
|
c2d3829a72 | ||
|
|
cd427ee119 | ||
|
|
ad4c30ecf8 | ||
|
|
db7f5f5114 | ||
|
|
7c9fa03da8 | ||
|
|
615dd691bf | ||
|
|
64ba2cabad | ||
|
|
a9dcb2d705 | ||
|
|
4c81cdec98 | ||
|
|
db529d5247 | ||
|
|
4f568ea331 | ||
|
|
6d41362251 | ||
|
|
7f65a54060 | ||
|
|
0c6ca81437 | ||
|
|
b2422216b5 | ||
|
|
71f374d797 | ||
|
|
7e78a6bc5c | ||
|
|
a4532fdc61 | ||
|
|
7c5135d7d0 | ||
|
|
cdd6738748 | ||
|
|
6f16192865 | ||
|
|
8151739f87 | ||
|
|
72fc53ba9c | ||
|
|
3e6ee01c4e | ||
|
|
f6485dac95 | ||
|
|
48f15b5fc7 | ||
|
|
f856e3ac2c | ||
|
|
38a64017f2 | ||
|
|
20b15b6e1d | ||
|
|
e119218828 | ||
|
|
f494988ba6 | ||
|
|
2561db1721 | ||
|
|
089b23f0aa | ||
|
|
fbed7dd1ca | ||
|
|
06ef67f22d | ||
|
|
3d647f68e1 | ||
|
|
6a36dc34cc | ||
|
|
b48aaeac7b | ||
|
|
2da1065027 | ||
|
|
3536124fbc | ||
|
|
10b4e08bf8 | ||
|
|
b1f426672c | ||
|
|
087cae287f | ||
|
|
3d8032c9b7 | ||
|
|
6470238311 | ||
|
|
0093af8994 | ||
|
|
2bfcd119db | ||
|
|
5932bdba96 | ||
|
|
1afe6b56fa | ||
|
|
72776e8254 | ||
|
|
d2d1a09723 | ||
|
|
793b82333f | ||
|
|
b3abff3e88 | ||
|
|
890549f9e7 | ||
|
|
66825d6a37 | ||
|
|
d42982ee4c | ||
|
|
7df634f050 | ||
|
|
46606aa7b5 | ||
|
|
de5704974d | ||
|
|
977b061048 | ||
|
|
560f694f73 | ||
|
|
7a58d360fd | ||
|
|
9601d6c140 | ||
|
|
db66184c35 | ||
|
|
93e7daea49 | ||
|
|
1a18c6d056 | ||
|
|
7eb12e0004 | ||
|
|
d3192b7e3b | ||
|
|
e7ab2969d7 | ||
|
|
49a35343f6 | ||
|
|
c361671e36 | ||
|
|
b71452b87c | ||
|
|
06170f9713 | ||
|
|
920515c071 | ||
|
|
6a124685bd | ||
|
|
75f76ecd23 | ||
|
|
5a0b1b290f | ||
|
|
472008888c | ||
|
|
aa0d844dc1 | ||
|
|
2523f81640 | ||
|
|
9e8b1ffd50 | ||
|
|
06b22511a7 | ||
|
|
61373209ff | ||
|
|
b1e28f6b7d | ||
|
|
1d414bac55 | ||
|
|
2f3be92a71 | ||
|
|
a8fd6cc0ee | ||
|
|
e591236c4e | ||
|
|
41f4e04379 | ||
|
|
7e27f20e0e | ||
|
|
f550cbe98f | ||
|
|
5315c16338 | ||
|
|
540cb99de4 | ||
|
|
3abc8df8fc | ||
|
|
ca93f0e84b | ||
|
|
d9ff5bdca4 | ||
|
|
c4b12250ba | ||
|
|
d73f00196b | ||
|
|
6bf616ff4d | ||
|
|
ff02d1da05 | ||
|
|
72d57eec6e |
6
.gitattributes
vendored
@@ -18,4 +18,8 @@ yarn.lock merge=binary
|
|||||||
# https://mirrors.edge.kernel.org/pub/software/scm/git/docs/gitattributes.html
|
# https://mirrors.edge.kernel.org/pub/software/scm/git/docs/gitattributes.html
|
||||||
# suggests that this might interleave lines arbitrarily, but empirically
|
# suggests that this might interleave lines arbitrarily, but empirically
|
||||||
# it keeps added chunks contiguous
|
# it keeps added chunks contiguous
|
||||||
CHANGELOG.md merge=union
|
CHANGELOG.md merge=union
|
||||||
|
|
||||||
|
# Mark some JSON files containing test data as generated so they are not included
|
||||||
|
# as part of diffs or language statistics.
|
||||||
|
extensions/ql-vscode/src/stories/remote-queries/data/*.json linguist-generated
|
||||||
|
|||||||
18
.github/ISSUE_TEMPLATE/new-extension-release.md
vendored
@@ -1,18 +0,0 @@
|
|||||||
---
|
|
||||||
name: New extension release
|
|
||||||
about: Create an issue with a checklist for the release steps (write access required
|
|
||||||
for the steps)
|
|
||||||
title: Release Checklist for version xx.xx.xx
|
|
||||||
labels: ''
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
- [ ] Update this issue title to refer to the version of the release
|
|
||||||
- [ ] Trigger a release build on Actions by adding a new tag on branch `main` of the format `vxx.xx.xx`
|
|
||||||
- [ ] Monitor the status of the release build in the `Release` workflow in the Actions tab.
|
|
||||||
- [ ] Download the VSIX from the draft GitHub release that is created when the release build finishes.
|
|
||||||
- [ ] Log into the [Visual Studio Marketplace](https://marketplace.visualstudio.com/manage/publishers/github).
|
|
||||||
- [ ] Click the `...` menu in the CodeQL row and click **Update**.
|
|
||||||
- [ ] Drag the `.vsix` file you downloaded from the GitHub release into the Marketplace and click **Upload**.
|
|
||||||
- [ ] Publish the draft GitHub release and confirm the new release is marked as the latest release at https://github.com/github/vscode-codeql/releases.
|
|
||||||
22
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "extensions/ql-vscode"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
day: "thursday" # Thursday is arbitrary
|
||||||
|
labels:
|
||||||
|
- "Update dependencies"
|
||||||
|
ignore:
|
||||||
|
- dependency-name: "*"
|
||||||
|
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: ".github"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
day: "thursday" # Thursday is arbitrary
|
||||||
|
labels:
|
||||||
|
- "Update dependencies"
|
||||||
|
ignore:
|
||||||
|
- dependency-name: "*"
|
||||||
|
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||||
2
.github/pull_request_template.md
vendored
@@ -9,4 +9,4 @@ Replace this with a description of the changes your pull request makes.
|
|||||||
|
|
||||||
- [ ] [CHANGELOG.md](https://github.com/github/vscode-codeql/blob/main/extensions/ql-vscode/CHANGELOG.md) has been updated to incorporate all user visible changes made by this pull request.
|
- [ ] [CHANGELOG.md](https://github.com/github/vscode-codeql/blob/main/extensions/ql-vscode/CHANGELOG.md) has been updated to incorporate all user visible changes made by this pull request.
|
||||||
- [ ] Issues have been created for any UI or other user-facing changes made by this pull request.
|
- [ ] Issues have been created for any UI or other user-facing changes made by this pull request.
|
||||||
- [ ] `@github/docs-content-codeql` has been cc'd in all issues for UI or other user-facing changes made by this pull request.
|
- [ ] _[Maintainers only]_ If this pull request makes user-facing changes that require documentation changes, open a corresponding docs pull request in the [github/codeql](https://github.com/github/codeql/tree/main/docs/codeql/codeql-for-visual-studio-code) repo and add the `ready-for-doc-review` label there.
|
||||||
|
|||||||
1
.github/workflows/codeql.yml
vendored
@@ -26,6 +26,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
languages: javascript
|
languages: javascript
|
||||||
config-file: ./.github/codeql/codeql-config.yml
|
config-file: ./.github/codeql/codeql-config.yml
|
||||||
|
tools: latest
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@main
|
uses: github/codeql-action/analyze@main
|
||||||
|
|||||||
16
.github/workflows/dependency-review.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
name: 'Dependency Review'
|
||||||
|
on:
|
||||||
|
- pull_request
|
||||||
|
- workflow_dispatch
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dependency-review:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: 'Checkout Repository'
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: 'Dependency Review'
|
||||||
|
uses: actions/dependency-review-action@v1
|
||||||
61
.github/workflows/main.yml
vendored
@@ -2,6 +2,7 @@ name: Build Extension
|
|||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
pull_request:
|
pull_request:
|
||||||
|
types: [opened, synchronize, reopened, ready_for_review]
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
@@ -21,7 +22,7 @@ jobs:
|
|||||||
|
|
||||||
- uses: actions/setup-node@v1
|
- uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
node-version: '14.14.0'
|
node-version: '16.14.2'
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
working-directory: extensions/ql-vscode
|
working-directory: extensions/ql-vscode
|
||||||
@@ -50,9 +51,26 @@ jobs:
|
|||||||
name: vscode-codeql-extension
|
name: vscode-codeql-extension
|
||||||
path: artifacts
|
path: artifacts
|
||||||
|
|
||||||
|
find-nightly:
|
||||||
|
name: Find Nightly Release
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
url: ${{ steps.get-url.outputs.nightly-url }}
|
||||||
|
steps:
|
||||||
|
- name: Get Nightly Release URL
|
||||||
|
id: get-url
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||||
|
shell: bash
|
||||||
|
# This workflow step gets an unstable testing version of the CodeQL CLI. It should not be used outside of these tests.
|
||||||
|
run: |
|
||||||
|
LATEST=`gh api repos/dsp-testing/codeql-cli-nightlies/releases --jq '.[].tag_name' --method GET --raw-field 'per_page=1'`
|
||||||
|
echo "::set-output name=nightly-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$LATEST"
|
||||||
|
|
||||||
test:
|
test:
|
||||||
name: Test
|
name: Test
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
needs: [find-nightly]
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, windows-latest]
|
os: [ubuntu-latest, windows-latest]
|
||||||
@@ -64,7 +82,7 @@ jobs:
|
|||||||
|
|
||||||
- uses: actions/setup-node@v1
|
- uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
node-version: '14.14.0'
|
node-version: '16.14.2'
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
working-directory: extensions/ql-vscode
|
working-directory: extensions/ql-vscode
|
||||||
@@ -85,50 +103,51 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
npm run lint
|
npm run lint
|
||||||
|
|
||||||
- name: Install CodeQL
|
- name: Lint scenarios
|
||||||
|
working-directory: extensions/ql-vscode
|
||||||
run: |
|
run: |
|
||||||
mkdir codeql-home
|
npm run lint:scenarios
|
||||||
curl -L --silent https://github.com/github/codeql-cli-binaries/releases/latest/download/codeql.zip -o codeql-home/codeql.zip
|
|
||||||
unzip -q -o codeql-home/codeql.zip -d codeql-home
|
|
||||||
unzip -q -o codeql-home/codeql.zip codeql/codeql.exe -d codeql-home
|
|
||||||
rm codeql-home/codeql.zip
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: Run unit tests (Linux)
|
- name: Run unit tests (Linux)
|
||||||
working-directory: extensions/ql-vscode
|
working-directory: extensions/ql-vscode
|
||||||
if: matrix.os == 'ubuntu-latest'
|
if: matrix.os == 'ubuntu-latest'
|
||||||
run: |
|
run: |
|
||||||
CODEQL_PATH=$GITHUB_WORKSPACE/codeql-home/codeql/codeql npm run test
|
npm run test
|
||||||
|
|
||||||
- name: Run unit tests (Windows)
|
- name: Run unit tests (Windows)
|
||||||
if: matrix.os == 'windows-latest'
|
if: matrix.os == 'windows-latest'
|
||||||
working-directory: extensions/ql-vscode
|
working-directory: extensions/ql-vscode
|
||||||
run: |
|
run: |
|
||||||
$env:CODEQL_PATH=$(Join-Path $env:GITHUB_WORKSPACE -ChildPath 'codeql-home/codeql/codeql.exe')
|
|
||||||
npm run test
|
npm run test
|
||||||
|
|
||||||
- name: Run integration tests (Linux)
|
- name: Run integration tests (Linux)
|
||||||
if: matrix.os == 'ubuntu-latest'
|
if: matrix.os == 'ubuntu-latest'
|
||||||
working-directory: extensions/ql-vscode
|
working-directory: extensions/ql-vscode
|
||||||
|
env:
|
||||||
|
VSCODE_CODEQL_GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get install xvfb
|
unset DBUS_SESSION_BUS_ADDRESS
|
||||||
/usr/bin/xvfb-run npm run integration
|
/usr/bin/xvfb-run npm run integration
|
||||||
|
|
||||||
- name: Run integration tests (Windows)
|
- name: Run integration tests (Windows)
|
||||||
if: matrix.os == 'windows-latest'
|
if: matrix.os == 'windows-latest'
|
||||||
working-directory: extensions/ql-vscode
|
working-directory: extensions/ql-vscode
|
||||||
|
env:
|
||||||
|
VSCODE_CODEQL_GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||||
run: |
|
run: |
|
||||||
npm run integration
|
npm run integration
|
||||||
|
|
||||||
cli-test:
|
cli-test:
|
||||||
name: CLI Test
|
name: CLI Test
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
needs: [find-nightly]
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, windows-latest]
|
os: [ubuntu-latest, windows-latest]
|
||||||
version: ['v2.2.6', 'v2.3.3', 'v2.4.5', 'v2.4.6', 'v2.5.3']
|
version: ['v2.7.6', 'v2.8.5', 'v2.9.4', 'v2.10.5', 'v2.11.2', 'nightly']
|
||||||
env:
|
env:
|
||||||
CLI_VERSION: ${{ matrix.version }}
|
CLI_VERSION: ${{ matrix.version }}
|
||||||
|
NIGHTLY_URL: ${{ needs.find-nightly.outputs.url }}
|
||||||
TEST_CODEQL_PATH: '${{ github.workspace }}/codeql'
|
TEST_CODEQL_PATH: '${{ github.workspace }}/codeql'
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -137,7 +156,7 @@ jobs:
|
|||||||
|
|
||||||
- uses: actions/setup-node@v1
|
- uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
node-version: '14.14.0'
|
node-version: '16.14.2'
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
working-directory: extensions/ql-vscode
|
working-directory: extensions/ql-vscode
|
||||||
@@ -151,16 +170,30 @@ jobs:
|
|||||||
npm run build
|
npm run build
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
- name: Decide on ref of CodeQL repo
|
||||||
|
id: choose-ref
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [[ "${{ matrix.version }}" == "nightly" ]]
|
||||||
|
then
|
||||||
|
REF="codeql-cli/latest"
|
||||||
|
else
|
||||||
|
REF="codeql-cli/${{ matrix.version }}"
|
||||||
|
fi
|
||||||
|
echo "::set-output name=ref::$REF"
|
||||||
|
|
||||||
- name: Checkout QL
|
- name: Checkout QL
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
repository: github/codeql
|
repository: github/codeql
|
||||||
|
ref: ${{ steps.choose-ref.outputs.ref }}
|
||||||
path: codeql
|
path: codeql
|
||||||
|
|
||||||
- name: Run CLI tests (Linux)
|
- name: Run CLI tests (Linux)
|
||||||
working-directory: extensions/ql-vscode
|
working-directory: extensions/ql-vscode
|
||||||
if: matrix.os == 'ubuntu-latest'
|
if: matrix.os == 'ubuntu-latest'
|
||||||
run: |
|
run: |
|
||||||
|
unset DBUS_SESSION_BUS_ADDRESS
|
||||||
/usr/bin/xvfb-run npm run cli-integration
|
/usr/bin/xvfb-run npm run cli-integration
|
||||||
|
|
||||||
- name: Run CLI tests (Windows)
|
- name: Run CLI tests (Windows)
|
||||||
|
|||||||
7
.github/workflows/release.yml
vendored
@@ -6,10 +6,6 @@
|
|||||||
|
|
||||||
name: Release
|
name: Release
|
||||||
on:
|
on:
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- '**/workflows/release.yml'
|
|
||||||
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
push:
|
push:
|
||||||
@@ -26,7 +22,7 @@ jobs:
|
|||||||
|
|
||||||
- uses: actions/setup-node@v1
|
- uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
node-version: '10.18.1'
|
node-version: '16.14.2'
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
@@ -129,6 +125,7 @@ jobs:
|
|||||||
body: This PR was automatically generated by the GitHub Actions release workflow in this repository.
|
body: This PR was automatically generated by the GitHub Actions release workflow in this repository.
|
||||||
branch: ${{ format('version/bump-to-{0}', steps.bump-patch-version.outputs.next_version) }}
|
branch: ${{ format('version/bump-to-{0}', steps.bump-patch-version.outputs.next_version) }}
|
||||||
base: main
|
base: main
|
||||||
|
draft: true
|
||||||
|
|
||||||
vscode-publish:
|
vscode-publish:
|
||||||
name: Publish to VS Code Marketplace
|
name: Publish to VS Code Marketplace
|
||||||
|
|||||||
2
.vscode/extensions.json
vendored
@@ -3,7 +3,7 @@
|
|||||||
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
|
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
|
||||||
// List of extensions which should be recommended for users of this workspace.
|
// List of extensions which should be recommended for users of this workspace.
|
||||||
"recommendations": [
|
"recommendations": [
|
||||||
"eamodio.tsl-problem-matcher",
|
"amodio.tsl-problem-matcher",
|
||||||
"dbaeumer.vscode-eslint",
|
"dbaeumer.vscode-eslint",
|
||||||
"eternalphane.tsfmt-vscode"
|
"eternalphane.tsfmt-vscode"
|
||||||
],
|
],
|
||||||
|
|||||||
67
.vscode/launch.json
vendored
@@ -12,7 +12,6 @@
|
|||||||
// Add a reference to a workspace to open. Eg-
|
// Add a reference to a workspace to open. Eg-
|
||||||
// "${workspaceRoot}/../vscode-codeql-starter/vscode-codeql-starter.code-workspace"
|
// "${workspaceRoot}/../vscode-codeql-starter/vscode-codeql-starter.code-workspace"
|
||||||
],
|
],
|
||||||
"stopOnEntry": false,
|
|
||||||
"sourceMaps": true,
|
"sourceMaps": true,
|
||||||
"outFiles": [
|
"outFiles": [
|
||||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||||
@@ -21,6 +20,9 @@
|
|||||||
// change to 'true' debug the IDE or Query servers
|
// change to 'true' debug the IDE or Query servers
|
||||||
"IDE_SERVER_JAVA_DEBUG": "false",
|
"IDE_SERVER_JAVA_DEBUG": "false",
|
||||||
"QUERY_SERVER_JAVA_DEBUG": "false",
|
"QUERY_SERVER_JAVA_DEBUG": "false",
|
||||||
|
"CLI_SERVER_JAVA_DEBUG": "false",
|
||||||
|
// Uncomment to set the JAVA_HOME for the codeql instance to use
|
||||||
|
// "CODEQL_JAVA_HOME": "/Library/Java/JavaVirtualMachines/jdk-12.0.1.jdk/Contents/Home"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -33,17 +35,31 @@
|
|||||||
"runtimeArgs": [
|
"runtimeArgs": [
|
||||||
"--inspect=9229"
|
"--inspect=9229"
|
||||||
],
|
],
|
||||||
|
"env": {
|
||||||
|
"LANG": "en-US"
|
||||||
|
},
|
||||||
"args": [
|
"args": [
|
||||||
"--exit",
|
"--exit",
|
||||||
"-u",
|
"-u",
|
||||||
"bdd",
|
"bdd",
|
||||||
"--colors",
|
"--colors",
|
||||||
"--diff",
|
"--diff",
|
||||||
"-r",
|
"--config",
|
||||||
"ts-node/register",
|
".mocharc.json",
|
||||||
"test/pure-tests/**/*.ts"
|
"test/pure-tests/**/*.ts"
|
||||||
],
|
],
|
||||||
"port": 9229,
|
"stopOnEntry": false,
|
||||||
|
"sourceMaps": true,
|
||||||
|
"console": "integratedTerminal",
|
||||||
|
"internalConsoleOptions": "neverOpen"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Launch Unit Tests - React (vscode-codeql)",
|
||||||
|
"type": "node",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "${workspaceFolder}/extensions/ql-vscode/node_modules/jest/bin/jest.js",
|
||||||
|
"showAsyncStacks": true,
|
||||||
|
"cwd": "${workspaceFolder}/extensions/ql-vscode",
|
||||||
"stopOnEntry": false,
|
"stopOnEntry": false,
|
||||||
"sourceMaps": true,
|
"sourceMaps": true,
|
||||||
"console": "integratedTerminal",
|
"console": "integratedTerminal",
|
||||||
@@ -56,9 +72,11 @@
|
|||||||
"runtimeExecutable": "${execPath}",
|
"runtimeExecutable": "${execPath}",
|
||||||
"args": [
|
"args": [
|
||||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/no-workspace/index"
|
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/no-workspace/index",
|
||||||
|
"--disable-workspace-trust",
|
||||||
|
"--disable-extensions",
|
||||||
|
"--disable-gpu"
|
||||||
],
|
],
|
||||||
"stopOnEntry": false,
|
|
||||||
"sourceMaps": true,
|
"sourceMaps": true,
|
||||||
"outFiles": [
|
"outFiles": [
|
||||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||||
@@ -72,9 +90,11 @@
|
|||||||
"args": [
|
"args": [
|
||||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/minimal-workspace/index",
|
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/minimal-workspace/index",
|
||||||
|
"--disable-workspace-trust",
|
||||||
|
"--disable-extensions",
|
||||||
|
"--disable-gpu",
|
||||||
"${workspaceRoot}/extensions/ql-vscode/test/data"
|
"${workspaceRoot}/extensions/ql-vscode/test/data"
|
||||||
],
|
],
|
||||||
"stopOnEntry": false,
|
|
||||||
"sourceMaps": true,
|
"sourceMaps": true,
|
||||||
"outFiles": [
|
"outFiles": [
|
||||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||||
@@ -88,16 +108,45 @@
|
|||||||
"args": [
|
"args": [
|
||||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode",
|
||||||
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/cli-integration/index",
|
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/cli-integration/index",
|
||||||
|
"--disable-workspace-trust",
|
||||||
|
"--disable-gpu",
|
||||||
|
"--disable-extension",
|
||||||
|
"eamodio.gitlens",
|
||||||
|
"--disable-extension",
|
||||||
|
"github.codespaces",
|
||||||
|
"--disable-extension",
|
||||||
|
"github.copilot",
|
||||||
"${workspaceRoot}/extensions/ql-vscode/src/vscode-tests/cli-integration/data",
|
"${workspaceRoot}/extensions/ql-vscode/src/vscode-tests/cli-integration/data",
|
||||||
// Add a path to a checked out instance of the codeql repository so the libraries are
|
// Uncomment the last line and modify the path to a checked out
|
||||||
|
// instance of the codeql repository so the libraries are
|
||||||
// available in the workspace for the tests.
|
// available in the workspace for the tests.
|
||||||
// "${workspaceRoot}/../codeql"
|
// "${workspaceRoot}/../codeql"
|
||||||
],
|
],
|
||||||
"stopOnEntry": false,
|
"env": {
|
||||||
|
// Optionally, set the version to use for the integration tests.
|
||||||
|
// Use "nightly" to use the latest nightly build.
|
||||||
|
// "CLI_VERSION": "2.7.0",
|
||||||
|
|
||||||
|
// If CLI_VERSION is set to nightly, set this to the url of the nightly build.
|
||||||
|
// "NIGHTLY_URL": "some url to grab the nightly build",
|
||||||
|
|
||||||
|
// Optionally, add a path to the codeql executable to be used during these tests.
|
||||||
|
// If not specified, one will be downloaded automatically.
|
||||||
|
// This option overrides the CLI_VERSION option.
|
||||||
|
// "CLI_PATH": "${workspaceRoot}/../semmle-code/target/intree/codeql/codeql",
|
||||||
|
},
|
||||||
"sourceMaps": true,
|
"sourceMaps": true,
|
||||||
"outFiles": [
|
"outFiles": [
|
||||||
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
|
||||||
],
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Launch Storybook",
|
||||||
|
"type": "node",
|
||||||
|
"request": "launch",
|
||||||
|
"cwd": "${workspaceFolder}/extensions/ql-vscode",
|
||||||
|
"runtimeExecutable": "npm",
|
||||||
|
"runtimeArgs": ["run-script", "storybook"]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
12
.vscode/settings.json
vendored
@@ -22,19 +22,19 @@
|
|||||||
"common/temp": true,
|
"common/temp": true,
|
||||||
"**/.vscode-test": true
|
"**/.vscode-test": true
|
||||||
},
|
},
|
||||||
"typescript.tsdk": "./common/temp/node_modules/typescript/lib", // we want to use the TS server from our node_modules folder to control its version
|
"typescript.tsdk": "./extensions/ql-vscode/node_modules/typescript/lib", // we want to use the TS server from our node_modules folder to control its version
|
||||||
|
"typescript.enablePromptUseWorkspaceTsdk": true,
|
||||||
"eslint.validate": [
|
"eslint.validate": [
|
||||||
"javascript",
|
"javascript",
|
||||||
"javascriptreact",
|
"javascriptreact",
|
||||||
"typescript",
|
"typescript",
|
||||||
"typescriptreact"
|
"typescriptreact"
|
||||||
],
|
],
|
||||||
"eslint.options": {
|
// This is necessary to ensure that ESLint can find the correct configuration files and plugins.
|
||||||
// This is necessary so that eslint can properly resolve its plugins
|
"eslint.workingDirectories": ["./extensions/ql-vscode"],
|
||||||
"resolvePluginsRelativeTo": "./extensions/ql-vscode"
|
|
||||||
},
|
|
||||||
"editor.formatOnSave": false,
|
"editor.formatOnSave": false,
|
||||||
"typescript.preferences.quoteStyle": "single",
|
"typescript.preferences.quoteStyle": "single",
|
||||||
"javascript.preferences.quoteStyle": "single",
|
"javascript.preferences.quoteStyle": "single",
|
||||||
"editor.wordWrapColumn": 100
|
"editor.wordWrapColumn": 100,
|
||||||
|
"jest.rootPath": "./extensions/ql-vscode"
|
||||||
}
|
}
|
||||||
|
|||||||
3
CODEOWNERS
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
**/* @github/codeql-vscode-reviewers
|
||||||
|
**/remote-queries/ @github/code-scanning-secexp-reviewers
|
||||||
|
**/variant-analysis/ @github/code-scanning-secexp-reviewers
|
||||||
151
CONTRIBUTING.md
@@ -29,7 +29,9 @@ Here are a few things you can do that will increase the likelihood of your pull
|
|||||||
|
|
||||||
## Setting up a local build
|
## Setting up a local build
|
||||||
|
|
||||||
Make sure you have installed recent versions of vscode (>= v1.52), node (>=12.16), and npm (>= 7.5.2). Earlier versions will probably work, but we no longer test against them.
|
Make sure you have installed recent versions of vscode, node, and npm. Check the `engines` block in [`package.json`](https://github.com/github/vscode-codeql/blob/main/extensions/ql-vscode/package.json) file for compatible versions. Earlier versions may work, but we no longer test against them.
|
||||||
|
|
||||||
|
To automatically switch to the correct version of node, we recommend using [nvm](https://github.com/nvm-sh/nvm), which will pick-up the node version from `.nvmrc`.
|
||||||
|
|
||||||
### Installing all packages
|
### Installing all packages
|
||||||
|
|
||||||
@@ -56,7 +58,6 @@ We recommend that you keep `npm run watch` running in the backgound and you only
|
|||||||
|
|
||||||
1. on first checkout
|
1. on first checkout
|
||||||
2. whenever any of the non-TypeScript resources have changed
|
2. whenever any of the non-TypeScript resources have changed
|
||||||
3. on any change to files included in the webview
|
|
||||||
|
|
||||||
### Installing the extension
|
### Installing the extension
|
||||||
|
|
||||||
@@ -76,23 +77,125 @@ $ vscode/scripts/code-cli.sh --install-extension dist/vscode-codeql-*.vsix # if
|
|||||||
|
|
||||||
You can use VS Code to debug the extension without explicitly installing it. Just open this directory as a workspace in VS Code, and hit `F5` to start a debugging session.
|
You can use VS Code to debug the extension without explicitly installing it. Just open this directory as a workspace in VS Code, and hit `F5` to start a debugging session.
|
||||||
|
|
||||||
### Running the unit/integration tests
|
### Storybook
|
||||||
|
|
||||||
Ensure the `CODEQL_PATH` environment variable is set to point to the `codeql` cli executable.
|
You can use [Storybook](https://storybook.js.org/) to preview React components outside VSCode. Inside the `extensions/ql-vscode` directory, run:
|
||||||
|
|
||||||
Outside of vscode, run:
|
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
npm run test && npm run integration
|
npm run storybook
|
||||||
```
|
```
|
||||||
|
|
||||||
Alternatively, you can run the tests inside of vscode. There are several vscode launch configurations defined that run the unit and integration tests. They can all be found in the debug view.
|
Your browser should automatically open to the Storybook UI. Stories live in the `src/stories` directory.
|
||||||
|
|
||||||
|
Alternatively, you can start Storybook inside of VSCode. There is a VSCode launch configuration for starting Storybook. It can be found in the debug view.
|
||||||
|
|
||||||
|
More information about Storybook can be found inside the **Overview** page once you have launched Storybook.
|
||||||
|
|
||||||
|
### Testing
|
||||||
|
|
||||||
|
We have several types of tests:
|
||||||
|
|
||||||
|
* Unit tests: these live in the `tests/pure-tests/` directory
|
||||||
|
* View tests: these live in `src/view/variant-analysis/__tests__/`
|
||||||
|
* VSCode integration tests: these live in `src/vscode-tests/no-workspace` and `src/vscode-tests/minimal-workspace`
|
||||||
|
* CLI integration tests: these live in `src/vscode-tests/cli-integration`
|
||||||
|
|
||||||
|
The CLI integration tests require an instance of the CodeQL CLI to run so they will require some extra setup steps. When adding new tests to our test suite, please be mindful of whether they need to be in the cli-integration folder. If the tests don't depend on the CLI, they are better suited to being a VSCode integration test.
|
||||||
|
|
||||||
|
#### Running the tests
|
||||||
|
|
||||||
|
##### 1. From the terminal
|
||||||
|
|
||||||
|
First move into the `extensions/ql-vscode` directory. Then, depending on which tests you want to run, use the appropriate command to run the tests:
|
||||||
|
|
||||||
|
* Unit tests: `npm run test:unit`
|
||||||
|
* View Tests: `npm test:view`
|
||||||
|
* VSCode integration tests: `npm run integration`
|
||||||
|
|
||||||
|
###### CLI integration tests
|
||||||
|
|
||||||
|
The CLI integration tests require the CodeQL standard libraries in order to run so you will need to clone a local copy of the `github/codeql` repository.
|
||||||
|
|
||||||
|
1. Set the `TEST_CODEQL_PATH` environment variable: running from a terminal, you _must_ set the `TEST_CODEQL_PATH` variable to point to a checkout of the `github/codeql` repository. The appropriate CLI version will be downloaded as part of the test.
|
||||||
|
|
||||||
|
2. Run your test command:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
cd extensions/ql-vscode && npm run cli-integration
|
||||||
|
```
|
||||||
|
|
||||||
|
##### 2. From VSCode
|
||||||
|
|
||||||
|
Alternatively, you can run the tests inside of VSCode. There are several VSCode launch configurations defined that run the unit and integration tests.
|
||||||
|
|
||||||
|
You will need to run tests using a task from inside of VS Code, under the "Run and Debug" view:
|
||||||
|
|
||||||
|
* Unit tests: run the _Launch Unit Tests - React_ task
|
||||||
|
* View Tests: run the _Launch Unit Tests_ task
|
||||||
|
* VSCode integration tests: run the _Launch Unit Tests - No Workspace_ and _Launch Unit Tests - Minimal Workspace_ tasks
|
||||||
|
|
||||||
|
###### CLI integration tests
|
||||||
|
|
||||||
|
The CLI integration tests require the CodeQL standard libraries in order to run so you will need to clone a local copy of the `github/codeql` repository.
|
||||||
|
|
||||||
|
1. Set the `TEST_CODEQL_PATH` environment variable: running from a terminal, you _must_ set the `TEST_CODEQL_PATH` variable to point to a checkout of the `github/codeql` repository. The appropriate CLI version will be downloaded as part of the test.
|
||||||
|
|
||||||
|
2. Set the codeql path in VSCode's launch configuration: open `launch.json` and under the _Launch Integration Tests - With CLI_ section, uncomment the `"${workspaceRoot}/../codeql"` line. If you've cloned the `github/codeql` repo to a different path, replace the value with the correct path.
|
||||||
|
|
||||||
|
3. Run the VSCode task from the "Run and Debug" view called _Launch Integration Tests - With CLI_.
|
||||||
|
|
||||||
|
#### Using a mock GitHub API server
|
||||||
|
|
||||||
|
Multi-Repo Variant Analyses (MRVA) rely on the GitHub API. In order to make development and testing easy, we have functionality that allows us to intercept requests to the GitHub API and provide mock responses.
|
||||||
|
|
||||||
|
##### Using a pre-recorded test scenario
|
||||||
|
|
||||||
|
To run a mock MRVA scenario, follow these steps:
|
||||||
|
1. Enable the mock GitHub API server by adding the following in your VS Code user settings (which can be found by running the `Preferences: Open User Settings (JSON)` VS Code command):
|
||||||
|
```json
|
||||||
|
"codeQL.mockGitHubApiServer": {
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Run the `CodeQL: Mock GitHub API Server: Load Scenario` command from the command pallet, and choose one of the scenarios to load.
|
||||||
|
1. Execute a normal MRVA. At this point you should see the scenario being played out, rather than an actual MRVA running.
|
||||||
|
1. Once you're done, you can stop using the mock scenario with `CodeQL: Mock GitHub API Server: Unload Scenario`
|
||||||
|
|
||||||
|
If you want to replay the same scenario you should unload and reload it so requests are replayed from the start.
|
||||||
|
|
||||||
|
##### Recording a new test scenario
|
||||||
|
To record a new mock MRVA scenario, follow these steps:
|
||||||
|
|
||||||
|
1. Enable the mock GitHub API server by adding the following in your VS Code user settings (which can be found by running the `Preferences: Open User Settings (JSON)` VS Code command):
|
||||||
|
```json
|
||||||
|
"codeQL.mockGitHubApiServer": {
|
||||||
|
"enabled": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Run the `CodeQL: Mock GitHub API Server: Start Scenario Recording` VS Code command from the command pallet.
|
||||||
|
1. Execute a normal MRVA.
|
||||||
|
1. Once what you wanted to record is done (e.g. the MRVA has finished), then run the `CodeQL: Mock GitHub API Server: Save Scenario` command from the command pallet.
|
||||||
|
1. The scenario should then be available for replaying.
|
||||||
|
|
||||||
|
If you want to cancel recording, run the `CodeQL: Mock GitHub API Server: Cancel Scenario Recording` command.
|
||||||
|
|
||||||
|
Once the scenario has been recorded, it's often useful to remove some of the requests to speed up the replay, particularly ones that fetch the variant analysis status. Once some of the request files have manually been removed, the [fix-scenario-file-numbering script](./extensions/ql-vscode/scripts/fix-scenario-file-numbering.ts) can be used to update the number of the files. See the script file for details on how to use.
|
||||||
|
|
||||||
|
#### Scenario data location
|
||||||
|
|
||||||
|
Pre-recorded scenarios are stored in `./src/mocks/scenarios`. However, it's possible to configure the location, by setting the `codeQL.mockGitHubApiServer.scenariosPath` configuration property in the VS Code user settings.
|
||||||
|
|
||||||
## Releasing (write access required)
|
## Releasing (write access required)
|
||||||
|
|
||||||
1. Double-check the `CHANGELOG.md` contains all desired change comments and has the version to be released with date at the top.
|
1. Double-check the `CHANGELOG.md` contains all desired change comments and has the version to be released with date at the top.
|
||||||
* Go through all recent PRs and make sure they are properly accounted for.
|
* Go through all recent PRs and make sure they are properly accounted for.
|
||||||
* Make sure all changelog entries have links back to their PR(s) if appropriate.
|
* Make sure all changelog entries have links back to their PR(s) if appropriate.
|
||||||
|
1. Double-check that the node version we're using matches the one used for VS Code. If it doesn't, you will then need to update the node version in the following files:
|
||||||
|
* `.nvmrc` - this will enable `nvm` to automatically switch to the correct node version when you're in the project folder
|
||||||
|
* `.github/workflows/main.yml` - all the "node-version: <version>" settings
|
||||||
|
* `.github/workflows/release.yml` - the "node-version: <version>" setting
|
||||||
1. Double-check that the extension `package.json` and `package-lock.json` have the version you intend to release. If you are doing a patch release (as opposed to minor or major version) this should already be correct.
|
1. Double-check that the extension `package.json` and `package-lock.json` have the version you intend to release. If you are doing a patch release (as opposed to minor or major version) this should already be correct.
|
||||||
1. Create a PR for this release:
|
1. Create a PR for this release:
|
||||||
* This PR will contain any missing bits from steps 1 and 2. Most of the time, this will just be updating `CHANGELOG.md` with today's date.
|
* This PR will contain any missing bits from steps 1 and 2. Most of the time, this will just be updating `CHANGELOG.md` with today's date.
|
||||||
@@ -100,19 +203,40 @@ Alternatively, you can run the tests inside of vscode. There are several vscode
|
|||||||
* Create a new commit with a message the same as the branch name.
|
* Create a new commit with a message the same as the branch name.
|
||||||
* Create a PR for this branch.
|
* Create a PR for this branch.
|
||||||
* Wait for the PR to be merged into `main`
|
* Wait for the PR to be merged into `main`
|
||||||
1. Trigger a release build on Actions by adding a new tag on branch `main` named after the release, as above. Note that when you push to upstream, you will need to fully qualify the ref. A command like this will work:
|
1. Switch to `main` and add a new tag on the `main` branch with your new version (named after the release), e.g.
|
||||||
|
```bash
|
||||||
|
git checkout main
|
||||||
|
git tag v1.3.6
|
||||||
|
```
|
||||||
|
|
||||||
|
If you've accidentally created a badly named tag, you can delete it via
|
||||||
|
```bash
|
||||||
|
git tag -d badly-named-tag
|
||||||
|
```
|
||||||
|
1. Push the new tag up:
|
||||||
|
|
||||||
|
a. If you're using a fork of the repo:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git push upstream refs/tags/v1.3.6
|
git push upstream refs/tags/v1.3.6
|
||||||
```
|
```
|
||||||
|
|
||||||
|
b. If you're working straight in this repo:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git push origin refs/tags/v1.3.6
|
||||||
|
```
|
||||||
|
|
||||||
|
This will trigger [a release build](https://github.com/github/vscode-codeql/releases) on Actions.
|
||||||
|
|
||||||
* **IMPORTANT** Make sure you are on the `main` branch and your local checkout is fully updated when you add the tag.
|
* **IMPORTANT** Make sure you are on the `main` branch and your local checkout is fully updated when you add the tag.
|
||||||
* If you accidentally add the tag to the wrong ref, you can just force push it to the right one later.
|
* If you accidentally add the tag to the wrong ref, you can just force push it to the right one later.
|
||||||
|
|
||||||
1. Monitor the status of the release build in the `Release` workflow in the Actions tab.
|
1. Monitor the status of the release build in the `Release` workflow in the Actions tab.
|
||||||
|
* DO NOT approve the "publish" stages of the workflow yet.
|
||||||
1. Download the VSIX from the draft GitHub release at the top of [the releases page](https://github.com/github/vscode-codeql/releases) that is created when the release build finishes.
|
1. Download the VSIX from the draft GitHub release at the top of [the releases page](https://github.com/github/vscode-codeql/releases) that is created when the release build finishes.
|
||||||
1. Unzip the `.vsix` and inspect its `package.json` to make sure the version is what you expect,
|
1. Unzip the `.vsix` and inspect its `package.json` to make sure the version is what you expect,
|
||||||
or look at the source if there's any doubt the right code is being shipped.
|
or look at the source if there's any doubt the right code is being shipped.
|
||||||
|
1. Install the `.vsix` file into your vscode IDE and ensure the extension can load properly. Run a single command (like run query, or add database).
|
||||||
1. Go to the actions tab of the vscode-codeql repository and select the [Release workflow](https://github.com/github/vscode-codeql/actions?query=workflow%3ARelease).
|
1. Go to the actions tab of the vscode-codeql repository and select the [Release workflow](https://github.com/github/vscode-codeql/actions?query=workflow%3ARelease).
|
||||||
- If there is an authentication failure when publishing, be sure to check that the authentication keys haven't expired. See below.
|
- If there is an authentication failure when publishing, be sure to check that the authentication keys haven't expired. See below.
|
||||||
1. Approve the deployments of the correct Release workflow. This will automatically publish to Open VSX and VS Code Marketplace.
|
1. Approve the deployments of the correct Release workflow. This will automatically publish to Open VSX and VS Code Marketplace.
|
||||||
@@ -132,12 +256,7 @@ To regenerate the Open VSX token:
|
|||||||
1. Go to the [Access Tokens](https://open-vsx.org/user-settings/tokens) page and generate a new token.
|
1. Go to the [Access Tokens](https://open-vsx.org/user-settings/tokens) page and generate a new token.
|
||||||
1. Update the secret in the `publish-open-vsx` environment in the project settings.
|
1. Update the secret in the `publish-open-vsx` environment in the project settings.
|
||||||
|
|
||||||
To regenerate the VSCode Marketplace token:
|
To regenerate the VSCode Marketplace token, please see our internal documentation. Note that Azure DevOps PATs expire every 90 days and must be regenerated.
|
||||||
|
|
||||||
1. Follow the instructions on [getting a PAT for Azure DevOps](https://code.visualstudio.com/api/working-with-extensions/publishing-extension#get-a-personal-access-token).
|
|
||||||
1. Update the secret in the `publish-vscode-marketplace` environment in the project settings.
|
|
||||||
|
|
||||||
Not that Azure DevOps PATs expire yearly and must be regenerated.
|
|
||||||
|
|
||||||
## Resources
|
## Resources
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ module.exports = {
|
|||||||
node: true,
|
node: true,
|
||||||
es6: true,
|
es6: true,
|
||||||
},
|
},
|
||||||
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
|
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended", "plugin:jest-dom/recommended"],
|
||||||
rules: {
|
rules: {
|
||||||
"@typescript-eslint/no-use-before-define": 0,
|
"@typescript-eslint/no-use-before-define": 0,
|
||||||
"@typescript-eslint/no-unused-vars": [
|
"@typescript-eslint/no-unused-vars": [
|
||||||
@@ -22,8 +22,10 @@ module.exports = {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
"@typescript-eslint/explicit-function-return-type": "off",
|
"@typescript-eslint/explicit-function-return-type": "off",
|
||||||
|
"@typescript-eslint/explicit-module-boundary-types": "off",
|
||||||
"@typescript-eslint/no-non-null-assertion": "off",
|
"@typescript-eslint/no-non-null-assertion": "off",
|
||||||
"@typescript-eslint/no-explicit-any": "off",
|
"@typescript-eslint/no-explicit-any": "off",
|
||||||
|
"@typescript-eslint/no-floating-promises": [ "error", { ignoreVoid: true } ],
|
||||||
"prefer-const": ["warn", { destructuring: "all" }],
|
"prefer-const": ["warn", { destructuring: "all" }],
|
||||||
indent: "off",
|
indent: "off",
|
||||||
"@typescript-eslint/indent": "off",
|
"@typescript-eslint/indent": "off",
|
||||||
|
|||||||
6
extensions/ql-vscode/.mocharc.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"exit": true,
|
||||||
|
"require": [
|
||||||
|
"test/mocha.setup.js"
|
||||||
|
]
|
||||||
|
}
|
||||||
2
extensions/ql-vscode/.npmrc
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
# Storybook requires this option to be set. See https://github.com/storybookjs/storybook/issues/18298
|
||||||
|
legacy-peer-deps=true
|
||||||
1
extensions/ql-vscode/.nvmrc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
v16.14.2
|
||||||
20
extensions/ql-vscode/.storybook/main.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import type { StorybookConfig } from '@storybook/core-common';
|
||||||
|
|
||||||
|
const config: StorybookConfig = {
|
||||||
|
stories: [
|
||||||
|
'../src/**/*.stories.mdx',
|
||||||
|
'../src/**/*.stories.@(js|jsx|ts|tsx)'
|
||||||
|
],
|
||||||
|
addons: [
|
||||||
|
'@storybook/addon-links',
|
||||||
|
'@storybook/addon-essentials',
|
||||||
|
'@storybook/addon-interactions',
|
||||||
|
'./vscode-theme-addon/preset.ts',
|
||||||
|
],
|
||||||
|
framework: '@storybook/react',
|
||||||
|
core: {
|
||||||
|
builder: '@storybook/builder-webpack5'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = config;
|
||||||
7
extensions/ql-vscode/.storybook/manager.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import { addons } from '@storybook/addons';
|
||||||
|
import { themes } from '@storybook/theming';
|
||||||
|
|
||||||
|
addons.setConfig({
|
||||||
|
theme: themes.dark,
|
||||||
|
enableShortcuts: false,
|
||||||
|
});
|
||||||
31
extensions/ql-vscode/.storybook/preview.ts
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import { themes } from '@storybook/theming';
|
||||||
|
import { action } from '@storybook/addon-actions';
|
||||||
|
|
||||||
|
// Allow all stories/components to use Codicons
|
||||||
|
import '@vscode/codicons/dist/codicon.css';
|
||||||
|
|
||||||
|
// https://storybook.js.org/docs/react/configure/overview#configure-story-rendering
|
||||||
|
export const parameters = {
|
||||||
|
// All props starting with `on` will automatically receive an action as a prop
|
||||||
|
actions: { argTypesRegex: "^on[A-Z].*" },
|
||||||
|
// All props matching these names will automatically get the correct control
|
||||||
|
controls: {
|
||||||
|
matchers: {
|
||||||
|
color: /(background|color)$/i,
|
||||||
|
date: /Date$/,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Use a dark theme to be aligned with VSCode
|
||||||
|
docs: {
|
||||||
|
theme: themes.dark,
|
||||||
|
},
|
||||||
|
backgrounds: {
|
||||||
|
// The background is injected by our theme CSS files
|
||||||
|
disable: true,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
(window as any).acquireVsCodeApi = () => ({
|
||||||
|
postMessage: action('post-vscode-message'),
|
||||||
|
setState: action('set-vscode-state'),
|
||||||
|
});
|
||||||
@@ -4,10 +4,7 @@
|
|||||||
"moduleResolution": "node",
|
"moduleResolution": "node",
|
||||||
"target": "es6",
|
"target": "es6",
|
||||||
"outDir": "out",
|
"outDir": "out",
|
||||||
"lib": [
|
"lib": ["ES2021", "dom"],
|
||||||
"es6",
|
|
||||||
"dom"
|
|
||||||
],
|
|
||||||
"jsx": "react",
|
"jsx": "react",
|
||||||
"sourceMap": true,
|
"sourceMap": true,
|
||||||
"rootDir": "..",
|
"rootDir": "..",
|
||||||
@@ -15,9 +12,8 @@
|
|||||||
"noUnusedLocals": true,
|
"noUnusedLocals": true,
|
||||||
"noImplicitReturns": true,
|
"noImplicitReturns": true,
|
||||||
"noFallthroughCasesInSwitch": true,
|
"noFallthroughCasesInSwitch": true,
|
||||||
"experimentalDecorators": true
|
"experimentalDecorators": true,
|
||||||
|
"skipLibCheck": true
|
||||||
},
|
},
|
||||||
"exclude": [
|
"exclude": ["node_modules"]
|
||||||
"node_modules"
|
}
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,49 @@
|
|||||||
|
import * as React from 'react';
|
||||||
|
import { FunctionComponent, useCallback } from 'react';
|
||||||
|
|
||||||
|
import { useGlobals } from '@storybook/api';
|
||||||
|
import { IconButton, Icons, WithTooltip, TooltipLinkList, Link, WithHideFn } from '@storybook/components';
|
||||||
|
|
||||||
|
import { themeNames, VSCodeTheme } from './theme';
|
||||||
|
|
||||||
|
export const ThemeSelector: FunctionComponent = () => {
|
||||||
|
const [{ vscodeTheme }, updateGlobals] = useGlobals();
|
||||||
|
|
||||||
|
const changeTheme = useCallback((theme: VSCodeTheme) => {
|
||||||
|
updateGlobals({
|
||||||
|
vscodeTheme: theme,
|
||||||
|
});
|
||||||
|
}, [updateGlobals]);
|
||||||
|
|
||||||
|
const createLinks = useCallback((onHide: () => void): Link[] => Object.values(VSCodeTheme).map((theme) => ({
|
||||||
|
id: theme,
|
||||||
|
onClick() {
|
||||||
|
changeTheme(theme);
|
||||||
|
onHide();
|
||||||
|
},
|
||||||
|
title: themeNames[theme],
|
||||||
|
value: theme,
|
||||||
|
active: vscodeTheme === theme,
|
||||||
|
})), [vscodeTheme, changeTheme]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<WithTooltip
|
||||||
|
placement="top"
|
||||||
|
trigger="click"
|
||||||
|
closeOnClick
|
||||||
|
tooltip={({ onHide }: WithHideFn) => (
|
||||||
|
<TooltipLinkList
|
||||||
|
links={createLinks(onHide)}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
<IconButton
|
||||||
|
key="theme"
|
||||||
|
title="Change the theme of the preview"
|
||||||
|
active={vscodeTheme !== VSCodeTheme.Dark}
|
||||||
|
>
|
||||||
|
<Icons icon="dashboard" />
|
||||||
|
</IconButton>
|
||||||
|
</WithTooltip>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
import * as React from 'react';
|
||||||
|
import { addons, types } from '@storybook/addons';
|
||||||
|
import { ThemeSelector } from './ThemeSelector';
|
||||||
|
|
||||||
|
const ADDON_ID = 'vscode-theme-addon';
|
||||||
|
|
||||||
|
addons.register(ADDON_ID, () => {
|
||||||
|
addons.add(ADDON_ID, {
|
||||||
|
title: 'VSCode Themes',
|
||||||
|
type: types.TOOL,
|
||||||
|
match: ({ viewMode }) => !!(viewMode && viewMode.match(/^(story|docs)$/)),
|
||||||
|
render: () => <ThemeSelector />,
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
export function config(entry = []) {
|
||||||
|
return [...entry, require.resolve("./preview.ts")];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function managerEntries(entry = []) {
|
||||||
|
return [...entry, require.resolve("./manager.tsx")];
|
||||||
|
}
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
import { withTheme } from './withTheme';
|
||||||
|
import { VSCodeTheme } from './theme';
|
||||||
|
|
||||||
|
export const decorators = [withTheme];
|
||||||
|
|
||||||
|
export const globals = {
|
||||||
|
vscodeTheme: VSCodeTheme.Dark,
|
||||||
|
};
|
||||||
@@ -0,0 +1,9 @@
|
|||||||
|
export enum VSCodeTheme {
|
||||||
|
Dark = 'dark',
|
||||||
|
Light = 'light',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const themeNames: { [key in VSCodeTheme]: string } = {
|
||||||
|
[VSCodeTheme.Dark]: 'Dark+',
|
||||||
|
[VSCodeTheme.Light]: 'Light+',
|
||||||
|
}
|
||||||
@@ -0,0 +1,36 @@
|
|||||||
|
import { useEffect, useGlobals } from '@storybook/addons';
|
||||||
|
import type { AnyFramework, PartialStoryFn as StoryFunction, StoryContext } from '@storybook/csf';
|
||||||
|
|
||||||
|
import { VSCodeTheme } from './theme';
|
||||||
|
|
||||||
|
const themeFiles: { [key in VSCodeTheme]: string } = {
|
||||||
|
[VSCodeTheme.Dark]: require('!file-loader?modules!../../src/stories/vscode-theme-dark.css').default,
|
||||||
|
[VSCodeTheme.Light]: require('!file-loader?modules!../../src/stories/vscode-theme-light.css').default,
|
||||||
|
}
|
||||||
|
|
||||||
|
export const withTheme = (
|
||||||
|
StoryFn: StoryFunction<AnyFramework>,
|
||||||
|
context: StoryContext<AnyFramework>
|
||||||
|
) => {
|
||||||
|
const [{ vscodeTheme }] = useGlobals();
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const styleSelectorId =
|
||||||
|
context.viewMode === 'docs'
|
||||||
|
? `addon-vscode-theme-docs-${context.id}`
|
||||||
|
: `addon-vscode-theme-theme`;
|
||||||
|
|
||||||
|
const theme = Object.values(VSCodeTheme).includes(vscodeTheme) ? vscodeTheme as VSCodeTheme : VSCodeTheme.Dark;
|
||||||
|
|
||||||
|
document.getElementById(styleSelectorId)?.remove();
|
||||||
|
|
||||||
|
const link = document.createElement('link');
|
||||||
|
link.id = styleSelectorId;
|
||||||
|
link.href = themeFiles[theme];
|
||||||
|
link.rel = 'stylesheet';
|
||||||
|
|
||||||
|
document.head.appendChild(link);
|
||||||
|
}, [vscodeTheme]);
|
||||||
|
|
||||||
|
return StoryFn();
|
||||||
|
};
|
||||||
@@ -1,5 +1,173 @@
|
|||||||
# CodeQL for Visual Studio Code: Changelog
|
# CodeQL for Visual Studio Code: Changelog
|
||||||
|
|
||||||
|
## 1.7.4 - 29 October 2022
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 1.7.3 - 28 October 2022
|
||||||
|
|
||||||
|
- Fix a bug where databases may be lost if VS Code is restarted while the extension is being started up. [#1638](https://github.com/github/vscode-codeql/pull/1638)
|
||||||
|
- Add commands for navigating up, down, left, or right in the result viewer. Previously there were only commands for moving up and down the currently-selected path. We suggest binding keyboard shortcuts to these commands, for navigating the result viewer using the keyboard. [#1568](https://github.com/github/vscode-codeql/pull/1568)
|
||||||
|
|
||||||
|
## 1.7.2 - 14 October 2022
|
||||||
|
|
||||||
|
- Fix a bug where results created in older versions were thought to be unsuccessful. [#1605](https://github.com/github/vscode-codeql/pull/1605)
|
||||||
|
|
||||||
|
## 1.7.1 - 12 October 2022
|
||||||
|
|
||||||
|
- Fix a bug where it was not possible to add a database folder if the folder name starts with `db-`. [#1565](https://github.com/github/vscode-codeql/pull/1565)
|
||||||
|
- Ensure the results view opens in an editor column beside the currently active editor. [#1557](https://github.com/github/vscode-codeql/pull/1557)
|
||||||
|
|
||||||
|
## 1.7.0 - 20 September 2022
|
||||||
|
|
||||||
|
- Remove ability to download databases from LGTM. [#1467](https://github.com/github/vscode-codeql/pull/1467)
|
||||||
|
- Remove the ability to manually upgrade databases from the context menu on databases. Databases are non-destructively upgraded automatically so for most users this was not needed. For advanced users this is still available in the Command Palette. [#1501](https://github.com/github/vscode-codeql/pull/1501)
|
||||||
|
- Always restart the query server after a manual database upgrade. This avoids a bug in the query server where an invalid dbscheme was being retained in memory after an upgrade. [#1519](https://github.com/github/vscode-codeql/pull/1519)
|
||||||
|
|
||||||
|
## 1.6.12 - 1 September 2022
|
||||||
|
|
||||||
|
- Add ability for users to download databases directly from GitHub. [#1485](https://github.com/github/vscode-codeql/pull/1485)
|
||||||
|
- Fix a race condition that could cause a failure to open the evaluator log when running a query. [#1490](https://github.com/github/vscode-codeql/pull/1490)
|
||||||
|
- Fix an error when running a query with an older version of the CodeQL CLI. [#1490](https://github.com/github/vscode-codeql/pull/1490)
|
||||||
|
|
||||||
|
## 1.6.11 - 25 August 2022
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 1.6.10 - 9 August 2022
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 1.6.9 - 20 July 2022
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 1.6.8 - 29 June 2022
|
||||||
|
|
||||||
|
- Fix a bug where quick queries cannot be compiled if the core libraries are not in the workspace. [#1411](https://github.com/github/vscode-codeql/pull/1411)
|
||||||
|
- Fix a bug where quick evaluation of library files would display an error message when using CodeQL CLI v2.10.0. [#1412](https://github.com/github/vscode-codeql/pull/1412)
|
||||||
|
|
||||||
|
## 1.6.7 - 15 June 2022
|
||||||
|
|
||||||
|
- Prints end-of-query evaluator log summaries to the Query Log. [#1349](https://github.com/github/vscode-codeql/pull/1349)
|
||||||
|
- Be consistent about casing in Query History menu. [#1369](https://github.com/github/vscode-codeql/pull/1369)
|
||||||
|
- Fix quoting string columns in exported CSV results. [#1379](https://github.com/github/vscode-codeql/pull/1379)
|
||||||
|
|
||||||
|
## 1.6.6 - 17 May 2022
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 1.6.5 - 25 April 2022
|
||||||
|
|
||||||
|
- Re-enable publishing to open-vsx. [#1285](https://github.com/github/vscode-codeql/pull/1285)
|
||||||
|
|
||||||
|
## 1.6.4 - 6 April 2022
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 1.6.3 - 4 April 2022
|
||||||
|
|
||||||
|
- Fix a bug where the AST viewer was not synchronizing its selected node when the editor selection changes. [#1230](https://github.com/github/vscode-codeql/pull/1230)
|
||||||
|
- Avoid synchronizing the `codeQL.cli.executablePath` setting. [#1252](https://github.com/github/vscode-codeql/pull/1252)
|
||||||
|
- Open the directory in the finder/explorer (instead of just highlighting it) when running the "Open query directory" command from the query history view. [#1235](https://github.com/github/vscode-codeql/pull/1235)
|
||||||
|
- Ensure query label in the query history view changes are persisted across restarts. [#1235](https://github.com/github/vscode-codeql/pull/1235)
|
||||||
|
- Prints end-of-query evaluator log summaries to the Query Server Console. [#1264](https://github.com/github/vscode-codeql/pull/1264)
|
||||||
|
|
||||||
|
## 1.6.1 - 17 March 2022
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 1.6.0 - 7 March 2022
|
||||||
|
|
||||||
|
- Fix a bug where database upgrades could not be resolved if some of the target pack's dependencies are outside of the workspace. [#1138](https://github.com/github/vscode-codeql/pull/1138)
|
||||||
|
- Open the query server logs for query errors (instead of the extension log). This will make it easier to track down query errors. [#1158](https://github.com/github/vscode-codeql/pull/1158)
|
||||||
|
- Fix a bug where queries took a long time to run if there are no folders in the workspace. [#1157](https://github.com/github/vscode-codeql/pull/1157)
|
||||||
|
- [BREAKING CHANGE] The `codeQL.runningQueries.customLogDirectory` setting is deprecated and no longer has any function. Instead, all query log files will be stored in the query history directory, next to the query results. [#1178](https://github.com/github/vscode-codeql/pull/1178)
|
||||||
|
- Add a _Open query directory_ command for query items. This command opens the directory containing all artifacts for a query. [#1179](https://github.com/github/vscode-codeql/pull/1179)
|
||||||
|
- Add options to display evaluator logs for a given query run. Some information that was previously found in the query server output may now be found here. [#1186](https://github.com/github/vscode-codeql/pull/1186)
|
||||||
|
|
||||||
|
## 1.5.11 - 10 February 2022
|
||||||
|
|
||||||
|
- Fix a bug where invoking _View AST_ from the file explorer would not view the selected file. Instead it would view the active editor. Also, prevent the _View AST_ from appearing if the current selection includes a directory or multiple files. [#1113](https://github.com/github/vscode-codeql/pull/1113)
|
||||||
|
- Add query history items as soon as a query is run, including new icons for each history item. [#1094](https://github.com/github/vscode-codeql/pull/1094)
|
||||||
|
- Save query history items across restarts. Items will be saved for 30 days and can be overwritten by setting the `codeQL.queryHistory.ttl` configuration setting. [#1130](https://github.com/github/vscode-codeql/pull/1130)
|
||||||
|
- Allow in-progress query items to be cancelled from the query history view. [#1105](https://github.com/github/vscode-codeql/pull/1105)
|
||||||
|
|
||||||
|
## 1.5.10 - 25 January 2022
|
||||||
|
|
||||||
|
- Fix a bug where the results view moved column even when it was already visible. [#1070](https://github.com/github/vscode-codeql/pull/1070)
|
||||||
|
- Add packaging-related commands. _CodeQL: Download Packs_ downloads query packs from the package registry that can be run locally, and _CodeQL: Install Pack Dependencies_ installs dependencies for packs in your workspace. [#1076](https://github.com/github/vscode-codeql/pull/1076)
|
||||||
|
|
||||||
|
## 1.5.9 - 17 December 2021
|
||||||
|
|
||||||
|
- Avoid creating a third column when opening the results view. The results view will always open to the right of the active editor, unless the active editor is in the rightmost editor column. In that case open in the leftmost column. [#1037](https://github.com/github/vscode-codeql/pull/1037)
|
||||||
|
- Add a CodeLens to make the Quick Evaluation command more accessible. Click the `Quick Evaluation` prompt above a predicate definition in the editor to evaluate that predicate on its own. You can enable/disable this feature in the `codeQL.runningQueries.quickEvalCodelens` setting. [#1035](https://github.com/github/vscode-codeql/pull/1035) & [#1052](https://github.com/github/vscode-codeql/pull/1052)
|
||||||
|
- Fix a bug where the _Alerts_ option would show in the results view even if there is no alerts table available. [#1038](https://github.com/github/vscode-codeql/pull/1038)
|
||||||
|
|
||||||
|
## 1.5.8 - 2 December 2021
|
||||||
|
|
||||||
|
- Emit a more explicit error message when a user tries to add a database with an unzipped source folder to the workspace. [#1021](https://github.com/github/vscode-codeql/pull/1021)
|
||||||
|
- Ensure `src.zip` archives are used as the canonical source instead of `src` folders when importing databases. [#1025](https://github.com/github/vscode-codeql/pull/1025)
|
||||||
|
|
||||||
|
## 1.5.7 - 23 November 2021
|
||||||
|
|
||||||
|
- Fix the _CodeQL: Open Referenced File_ command for Windows systems. [#979](https://github.com/github/vscode-codeql/pull/979)
|
||||||
|
- Support large SARIF results files (>4GB) without crashing VS Code. [#1004](https://github.com/github/vscode-codeql/pull/1004)
|
||||||
|
- Fix a bug that shows 'Set current database' when hovering over the currently selected database in the databases view. [#976](https://github.com/github/vscode-codeql/pull/976)
|
||||||
|
- Fix a bug with importing large databases. Databases over 4GB can now be imported directly from LGTM or from a zip file. This functionality is only available when using CodeQL CLI version 2.6.0 or later. [#971](https://github.com/github/vscode-codeql/pull/971)
|
||||||
|
- Replace certain control codes (`U+0000` - `U+001F`) with their corresponding control labels (`U+2400` - `U+241F`) in the results view. [#963](https://github.com/github/vscode-codeql/pull/963)
|
||||||
|
- Allow case-insensitive project slugs for GitHub repositories when adding a CodeQL database from LGTM. [#978](https://github.com/github/vscode-codeql/pull/961)
|
||||||
|
- Add a _CodeQL: Preview Query Help_ command to generate Markdown previews of `.qhelp` query help files. This command should only be run in trusted workspaces. See [the CodeQL CLI docs](https://codeql.github.com/docs/codeql-cli/testing-query-help-files) for more information about query help. [#988](https://github.com/github/vscode-codeql/pull/988)
|
||||||
|
- Make "Open Referenced File" command accessible from the active editor menu. [#989](https://github.com/github/vscode-codeql/pull/989)
|
||||||
|
- Fix a bug where result set names in the result set drop-down were disappearing when viewing a sorted table. [#1007](https://github.com/github/vscode-codeql/pull/1007)
|
||||||
|
- Allow query result locations with 0 as the end column value. These are treated as the first column in the line. [#1002](https://github.com/github/vscode-codeql/pull/1002)
|
||||||
|
|
||||||
|
## 1.5.6 - 07 October 2021
|
||||||
|
|
||||||
|
- Add progress messages to LGTM download option. This makes the two-step process (selecting a project, then selecting a language) more clear. [#960](https://github.com/github/vscode-codeql/pull/960)
|
||||||
|
- Remove line about selecting a language from the dropdown when downloading database from LGTM. This makes the download progress visible when the popup is not expanded. [#957](https://github.com/github/vscode-codeql/pull/957)
|
||||||
|
- Fix a bug where copying the version information fails when a CodeQL CLI cannot be found. [#958](https://github.com/github/vscode-codeql/pull/958)
|
||||||
|
- Avoid a race condition when deleting databases that can cause occasional errors. [#959](https://github.com/github/vscode-codeql/pull/959)
|
||||||
|
- Update CodeQL logos. [#965](https://github.com/github/vscode-codeql/pull/965)
|
||||||
|
|
||||||
|
## 1.5.5 - 08 September 2021
|
||||||
|
|
||||||
|
- Fix bug where a query is sometimes run before the file is saved. [#947](https://github.com/github/vscode-codeql/pull/947)
|
||||||
|
- Fix broken contextual queries, including _View AST_. [#949](https://github.com/github/vscode-codeql/pull/949)
|
||||||
|
|
||||||
|
## 1.5.4 - 02 September 2021
|
||||||
|
|
||||||
|
- Add support for filename pattern in history view. [#930](https://github.com/github/vscode-codeql/pull/930)
|
||||||
|
- Add an option _View Results (CSV)_ to view the results of a non-alert query. The existing options for alert queries have been renamed to _View Alerts_ to avoid confusion. [#929](https://github.com/github/vscode-codeql/pull/929)
|
||||||
|
- Allow users to specify the number of paths to display for each alert. [#931](https://github.com/github/vscode-codeql/pull/931)
|
||||||
|
- Adjust pagination controls in _CodeQL Query Results_ to always be visible [#936](https://github.com/github/vscode-codeql/pull/936)
|
||||||
|
- Fix bug where _View AST_ fails due to recent refactoring in the standard library and query packs. [#939](https://github.com/github/vscode-codeql/pull/939)
|
||||||
|
|
||||||
|
## 1.5.3 - 18 August 2021
|
||||||
|
|
||||||
|
- Add a command _CodeQL: Run Query on Multiple Databases_, which lets users select multiple databases to run a query on. [#898](https://github.com/github/vscode-codeql/pull/898)
|
||||||
|
- Autodetect what language a query targets. This refines the _CodeQL: Run Query on Multiple Databases_ command to only show relevant databases. [#915](https://github.com/github/vscode-codeql/pull/915)
|
||||||
|
- Adjust test log output to display diffs only when comparing failed test results with expected test results. [#920](https://github.com/github/vscode-codeql/pull/920)
|
||||||
|
|
||||||
|
## 1.5.2 - 13 July 2021
|
||||||
|
|
||||||
|
- Add the _Add Database Source to Workspace_ command to the right-click context menu in the databases view. This lets users re-add a database's source folder to the workspace and browse the source code. [#891](https://github.com/github/vscode-codeql/pull/891)
|
||||||
|
- Fix markdown rendering in the description of the `codeQL.cli.executablePath` setting. [#908](https://github.com/github/vscode-codeql/pull/908)
|
||||||
|
- Fix the _Open Query Results_ command in the query history view. [#909](https://github.com/github/vscode-codeql/pull/909)
|
||||||
|
|
||||||
|
## 1.5.1 - 23 June 2021
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 1.5.0 - 14 June 2021
|
||||||
|
|
||||||
|
- Display CodeQL CLI version being downloaded during an upgrade. [#862](https://github.com/github/vscode-codeql/pull/862)
|
||||||
|
- Display a helpful message and link to documentation when a query produces no results. [#866](https://github.com/github/vscode-codeql/pull/866)
|
||||||
|
- Refresh test databases automatically after a test run. [#868](https://github.com/github/vscode-codeql/pull/868)
|
||||||
|
- Allow users to specify a custom directory for storing query server logs (`codeQL.runningQueries.customLogDirectory`). The extension will not delete these logs automatically. [#863](https://github.com/github/vscode-codeql/pull/863)
|
||||||
|
- Support the VS Code [Workspace Trust feature](https://code.visualstudio.com/docs/editor/workspace-trust). This extension is now enabled in untrusted workspaces, but it restricts commands that contain arbitrary paths. [#861](https://github.com/github/vscode-codeql/pull/861)
|
||||||
|
- Allow the `codeQL.cli.executablePath` configuration setting to be set in workspace-scoped configuration files. This means that each workspace can now specify its own CodeQL CLI compiler, a feature that is unblocked due to implementing Workspace Trust. [#861](https://github.com/github/vscode-codeql/pull/861)
|
||||||
|
|
||||||
## 1.4.8 - 05 May 2021
|
## 1.4.8 - 05 May 2021
|
||||||
|
|
||||||
- Copy version information to the clipboard when a user clicks the CodeQL section of the status bar. [#845](https://github.com/github/vscode-codeql/pull/845)
|
- Copy version information to the clipboard when a user clicks the CodeQL section of the status bar. [#845](https://github.com/github/vscode-codeql/pull/845)
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ For information about other configurations, see the separate [CodeQL help](https
|
|||||||
|
|
||||||
### Quick start: Using CodeQL
|
### Quick start: Using CodeQL
|
||||||
|
|
||||||
1. [Import a database from LGTM](#importing-a-database-from-lgtm).
|
1. [Import a database from GitHub](#importing-a-database-from-github).
|
||||||
1. [Run a query](#running-a-query).
|
1. [Run a query](#running-a-query).
|
||||||
|
|
||||||
---
|
---
|
||||||
@@ -73,18 +73,19 @@ If you're using your own clone of the CodeQL standard libraries, you can do a `g
|
|||||||
|
|
||||||
You can find all the commands contributed by the extension in the Command Palette (**Ctrl+Shift+P** or **Cmd+Shift+P**) by typing `CodeQL`, many of them are also accessible through the interface, and via keyboard shortcuts.
|
You can find all the commands contributed by the extension in the Command Palette (**Ctrl+Shift+P** or **Cmd+Shift+P**) by typing `CodeQL`, many of them are also accessible through the interface, and via keyboard shortcuts.
|
||||||
|
|
||||||
### Importing a database from LGTM
|
### Importing a database from GitHub
|
||||||
|
|
||||||
While you can use the [CodeQL CLI to create your own databases](https://codeql.github.com/docs/codeql-cli/creating-codeql-databases/), the simplest way to start is by downloading a database from LGTM.com.
|
While you can use the [CodeQL CLI to create your own databases](https://codeql.github.com/docs/codeql-cli/creating-codeql-databases/), the simplest way to start is by downloading a database from GitHub.com.
|
||||||
|
|
||||||
1. Open [LGTM.com](https://lgtm.com/#explore) in your browser.
|
1. Find a project that you're interested in on GitHub.com, for example [Apache Kafka](https://github.com/apache/kafka).
|
||||||
1. Search for a project you're interested in, for example [Apache Kafka](https://lgtm.com/projects/g/apache/kafka).
|
1. Copy the link to that project, for example `https://github.com/apache/kafka`.
|
||||||
1. Copy the link to that project, for example `https://lgtm.com/projects/g/apache/kafka`.
|
1. In VS Code, open the Command Palette and choose the **CodeQL: Download Database from GitHub** command.
|
||||||
1. In VS Code, open the Command Palette and choose the **CodeQL: Download Database from LGTM** command.
|
|
||||||
1. Paste the link you copied earlier.
|
1. Paste the link you copied earlier.
|
||||||
1. Select the language for the database you want to download (only required if the project has databases for multiple languages).
|
1. Select the language for the database you want to download (only required if the project has databases for multiple languages).
|
||||||
1. Once the CodeQL database has been imported, it is displayed in the Databases view.
|
1. Once the CodeQL database has been imported, it is displayed in the Databases view.
|
||||||
|
|
||||||
|
For more information, see [Choosing a database](https://codeql.github.com/docs/codeql-for-visual-studio-code/analyzing-your-projects/#choosing-a-database) on codeql.github.com.
|
||||||
|
|
||||||
### Running a query
|
### Running a query
|
||||||
|
|
||||||
The instructions below assume that you're using the CodeQL starter workspace, or that you've added the CodeQL libraries and queries repository to your workspace.
|
The instructions below assume that you're using the CodeQL starter workspace, or that you've added the CodeQL libraries and queries repository to your workspace.
|
||||||
@@ -98,6 +99,10 @@ When the results are ready, they're displayed in the CodeQL Query Results view.
|
|||||||
|
|
||||||
If there are any problems running a query, a notification is displayed in the bottom right corner of the application. In addition to the error message, the notification includes details of how to fix the problem.
|
If there are any problems running a query, a notification is displayed in the bottom right corner of the application. In addition to the error message, the notification includes details of how to fix the problem.
|
||||||
|
|
||||||
|
### Keyboad navigation
|
||||||
|
|
||||||
|
If you wish to navigate the query results from your keyboard, you can bind shortcuts to the **CodeQL: Navigate Up/Down/Left/Right in Result Viewer** commands.
|
||||||
|
|
||||||
## What next?
|
## What next?
|
||||||
|
|
||||||
For more information about the CodeQL extension, [see the documentation](https://codeql.github.com/docs/codeql-for-visual-studio-code/). Otherwise, you could:
|
For more information about the CodeQL extension, [see the documentation](https://codeql.github.com/docs/codeql-for-visual-studio-code/). Otherwise, you could:
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import * as gulp from 'gulp';
|
import * as gulp from 'gulp';
|
||||||
import * as replace from 'gulp-replace';
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||||
|
const replace = require('gulp-replace');
|
||||||
|
|
||||||
/** Inject the application insights key into the telemetry file */
|
/** Inject the application insights key into the telemetry file */
|
||||||
export function injectAppInsightsKey() {
|
export function injectAppInsightsKey() {
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import * as fs from 'fs-extra';
|
import * as fs from 'fs-extra';
|
||||||
import * as jsonc from 'jsonc-parser';
|
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
|
||||||
export interface DeployedPackage {
|
export interface DeployedPackage {
|
||||||
@@ -28,7 +27,7 @@ async function copyPackage(sourcePath: string, destPath: string): Promise<void>
|
|||||||
|
|
||||||
export async function deployPackage(packageJsonPath: string): Promise<DeployedPackage> {
|
export async function deployPackage(packageJsonPath: string): Promise<DeployedPackage> {
|
||||||
try {
|
try {
|
||||||
const packageJson: any = jsonc.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
const packageJson: any = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||||
|
|
||||||
// Default to development build; use flag --release to indicate release build.
|
// Default to development build; use flag --release to indicate release build.
|
||||||
const isDevBuild = !process.argv.includes('--release');
|
const isDevBuild = !process.argv.includes('--release');
|
||||||
|
|||||||
@@ -1,15 +1,27 @@
|
|||||||
import * as gulp from 'gulp';
|
import * as gulp from 'gulp';
|
||||||
import { compileTypeScript, watchTypeScript, copyViewCss } from './typescript';
|
import { compileTypeScript, watchTypeScript, cleanOutput } from './typescript';
|
||||||
import { compileTextMateGrammar } from './textmate';
|
import { compileTextMateGrammar } from './textmate';
|
||||||
import { copyTestData } from './tests';
|
import { copyTestData } from './tests';
|
||||||
import { compileView } from './webpack';
|
import { compileView, watchView } from './webpack';
|
||||||
import { packageExtension } from './package';
|
import { packageExtension } from './package';
|
||||||
import { injectAppInsightsKey } from './appInsights';
|
import { injectAppInsightsKey } from './appInsights';
|
||||||
|
|
||||||
export const buildWithoutPackage =
|
export const buildWithoutPackage =
|
||||||
gulp.parallel(
|
gulp.series(
|
||||||
compileTypeScript, compileTextMateGrammar, compileView, copyTestData, copyViewCss
|
cleanOutput,
|
||||||
|
gulp.parallel(
|
||||||
|
compileTypeScript, compileTextMateGrammar, compileView, copyTestData
|
||||||
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
export { compileTextMateGrammar, watchTypeScript, compileTypeScript, copyTestData, injectAppInsightsKey };
|
export {
|
||||||
|
cleanOutput,
|
||||||
|
compileTextMateGrammar,
|
||||||
|
watchTypeScript,
|
||||||
|
watchView,
|
||||||
|
compileTypeScript,
|
||||||
|
copyTestData,
|
||||||
|
injectAppInsightsKey,
|
||||||
|
compileView,
|
||||||
|
};
|
||||||
export default gulp.series(buildWithoutPackage, injectAppInsightsKey, packageExtension);
|
export default gulp.series(buildWithoutPackage, injectAppInsightsKey, packageExtension);
|
||||||
|
|||||||
@@ -219,14 +219,14 @@ function transformFile(yaml: any) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function transpileTextMateGrammar() {
|
export function transpileTextMateGrammar() {
|
||||||
return through.obj((file: Vinyl, _encoding: string, callback: Function): void => {
|
return through.obj((file: Vinyl, _encoding: string, callback: (err: string | null, file: Vinyl | PluginError) => void): void => {
|
||||||
if (file.isNull()) {
|
if (file.isNull()) {
|
||||||
callback(null, file);
|
callback(null, file);
|
||||||
}
|
}
|
||||||
else if (file.isBuffer()) {
|
else if (file.isBuffer()) {
|
||||||
const buf: Buffer = file.contents;
|
const buf: Buffer = file.contents;
|
||||||
const yamlText: string = buf.toString('utf8');
|
const yamlText: string = buf.toString('utf8');
|
||||||
const jsonData: any = jsYaml.safeLoad(yamlText);
|
const jsonData: any = jsYaml.load(yamlText);
|
||||||
transformFile(jsonData);
|
transformFile(jsonData);
|
||||||
|
|
||||||
file.contents = Buffer.from(JSON.stringify(jsonData, null, 2), 'utf8');
|
file.contents = Buffer.from(JSON.stringify(jsonData, null, 2), 'utf8');
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
"strict": true,
|
"strict": true,
|
||||||
"module": "commonjs",
|
"module": "commonjs",
|
||||||
"target": "es2017",
|
"target": "es2017",
|
||||||
"lib": ["es6"],
|
"lib": ["ES2021"],
|
||||||
"moduleResolution": "node",
|
"moduleResolution": "node",
|
||||||
"sourceMap": true,
|
"sourceMap": true,
|
||||||
"rootDir": ".",
|
"rootDir": ".",
|
||||||
@@ -16,7 +16,8 @@
|
|||||||
"noImplicitReturns": true,
|
"noImplicitReturns": true,
|
||||||
"experimentalDecorators": true,
|
"experimentalDecorators": true,
|
||||||
"noUnusedLocals": true,
|
"noUnusedLocals": true,
|
||||||
"noUnusedParameters": true
|
"noUnusedParameters": true,
|
||||||
|
"esModuleInterop": true
|
||||||
},
|
},
|
||||||
"include": ["*.ts"]
|
"include": ["*.ts"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import * as colors from 'ansi-colors';
|
|||||||
import * as gulp from 'gulp';
|
import * as gulp from 'gulp';
|
||||||
import * as sourcemaps from 'gulp-sourcemaps';
|
import * as sourcemaps from 'gulp-sourcemaps';
|
||||||
import * as ts from 'gulp-typescript';
|
import * as ts from 'gulp-typescript';
|
||||||
|
import * as del from 'del';
|
||||||
|
|
||||||
function goodReporter(): ts.reporter.Reporter {
|
function goodReporter(): ts.reporter.Reporter {
|
||||||
return {
|
return {
|
||||||
@@ -20,6 +21,10 @@ function goodReporter(): ts.reporter.Reporter {
|
|||||||
|
|
||||||
const tsProject = ts.createProject('tsconfig.json');
|
const tsProject = ts.createProject('tsconfig.json');
|
||||||
|
|
||||||
|
export function cleanOutput() {
|
||||||
|
return tsProject.projectDirectory ? del(tsProject.projectDirectory + '/out/*') : Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
export function compileTypeScript() {
|
export function compileTypeScript() {
|
||||||
return tsProject.src()
|
return tsProject.src()
|
||||||
.pipe(sourcemaps.init())
|
.pipe(sourcemaps.init())
|
||||||
@@ -34,9 +39,3 @@ export function compileTypeScript() {
|
|||||||
export function watchTypeScript() {
|
export function watchTypeScript() {
|
||||||
gulp.watch('src/**/*.ts', compileTypeScript);
|
gulp.watch('src/**/*.ts', compileTypeScript);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Copy CSS files for the results view into the output directory. */
|
|
||||||
export function copyViewCss() {
|
|
||||||
return gulp.src('src/view/*.css')
|
|
||||||
.pipe(gulp.dest('out'));
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as webpack from 'webpack';
|
import * as webpack from 'webpack';
|
||||||
|
import * as MiniCssExtractPlugin from 'mini-css-extract-plugin';
|
||||||
|
|
||||||
export const config: webpack.Configuration = {
|
export const config: webpack.Configuration = {
|
||||||
mode: 'development',
|
mode: 'development',
|
||||||
entry: {
|
entry: {
|
||||||
resultsView: './src/view/results.tsx',
|
webview: './src/view/webview.tsx'
|
||||||
compareView: './src/compare/view/Compare.tsx',
|
|
||||||
},
|
},
|
||||||
output: {
|
output: {
|
||||||
path: path.resolve(__dirname, '..', 'out'),
|
path: path.resolve(__dirname, '..', 'out'),
|
||||||
@@ -30,9 +30,7 @@ export const config: webpack.Configuration = {
|
|||||||
{
|
{
|
||||||
test: /\.less$/,
|
test: /\.less$/,
|
||||||
use: [
|
use: [
|
||||||
{
|
MiniCssExtractPlugin.loader,
|
||||||
loader: 'style-loader'
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
loader: 'css-loader',
|
loader: 'css-loader',
|
||||||
options: {
|
options: {
|
||||||
@@ -52,17 +50,31 @@ export const config: webpack.Configuration = {
|
|||||||
{
|
{
|
||||||
test: /\.css$/,
|
test: /\.css$/,
|
||||||
use: [
|
use: [
|
||||||
{
|
MiniCssExtractPlugin.loader,
|
||||||
loader: 'style-loader'
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
loader: 'css-loader'
|
loader: 'css-loader'
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
test: /\.(woff(2)?|ttf|eot)$/,
|
||||||
|
use: [
|
||||||
|
{
|
||||||
|
loader: 'file-loader',
|
||||||
|
options: {
|
||||||
|
name: '[name].[ext]',
|
||||||
|
outputPath: 'fonts/',
|
||||||
|
// We need this to make Webpack use the correct path for the fonts.
|
||||||
|
// Without this, the CSS file will use `url([object Module])`
|
||||||
|
esModule: false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
],
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
performance: {
|
performance: {
|
||||||
hints: false
|
hints: false
|
||||||
}
|
},
|
||||||
|
plugins: [new MiniCssExtractPlugin()],
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -2,7 +2,23 @@ import * as webpack from 'webpack';
|
|||||||
import { config } from './webpack.config';
|
import { config } from './webpack.config';
|
||||||
|
|
||||||
export function compileView(cb: (err?: Error) => void) {
|
export function compileView(cb: (err?: Error) => void) {
|
||||||
webpack(config).run((error, stats) => {
|
doWebpack(config, true, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function watchView(cb: (err?: Error) => void) {
|
||||||
|
const watchConfig = {
|
||||||
|
...config,
|
||||||
|
watch: true,
|
||||||
|
watchOptions: {
|
||||||
|
aggregateTimeout: 200,
|
||||||
|
poll: 1000,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
doWebpack(watchConfig, false, cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
function doWebpack(internalConfig: webpack.Configuration, failOnError: boolean, cb: (err?: Error) => void) {
|
||||||
|
const resultCb = (error: Error | undefined, stats?: webpack.Stats) => {
|
||||||
if (error) {
|
if (error) {
|
||||||
cb(error);
|
cb(error);
|
||||||
}
|
}
|
||||||
@@ -20,11 +36,16 @@ export function compileView(cb: (err?: Error) => void) {
|
|||||||
errors: true
|
errors: true
|
||||||
}));
|
}));
|
||||||
if (stats.hasErrors()) {
|
if (stats.hasErrors()) {
|
||||||
cb(new Error('Compilation errors detected.'));
|
if (failOnError) {
|
||||||
return;
|
cb(new Error('Compilation errors detected.'));
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
console.error('Compilation errors detected.');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
cb();
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
cb();
|
webpack(internalConfig, resultCb);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|||||||
214
extensions/ql-vscode/jest.config.js
Normal file
@@ -0,0 +1,214 @@
|
|||||||
|
/*
|
||||||
|
* For a detailed explanation regarding each configuration property and type check, visit:
|
||||||
|
* https://jestjs.io/docs/configuration
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
// All imported modules in your tests should be mocked automatically
|
||||||
|
// automock: false,
|
||||||
|
|
||||||
|
// Stop running tests after `n` failures
|
||||||
|
// bail: 0,
|
||||||
|
|
||||||
|
// The directory where Jest should store its cached dependency information
|
||||||
|
// cacheDirectory: "/private/var/folders/6m/1394pht172qgd7dmw1fwjk100000gn/T/jest_dx",
|
||||||
|
|
||||||
|
// Automatically clear mock calls, instances, contexts and results before every test
|
||||||
|
// clearMocks: true,
|
||||||
|
|
||||||
|
// Indicates whether the coverage information should be collected while executing the test
|
||||||
|
// collectCoverage: false,
|
||||||
|
|
||||||
|
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||||
|
// collectCoverageFrom: undefined,
|
||||||
|
|
||||||
|
// The directory where Jest should output its coverage files
|
||||||
|
// coverageDirectory: undefined,
|
||||||
|
|
||||||
|
// An array of regexp pattern strings used to skip coverage collection
|
||||||
|
// coveragePathIgnorePatterns: [
|
||||||
|
// "/node_modules/"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// Indicates which provider should be used to instrument code for coverage
|
||||||
|
coverageProvider: 'v8',
|
||||||
|
|
||||||
|
// A list of reporter names that Jest uses when writing coverage reports
|
||||||
|
// coverageReporters: [
|
||||||
|
// "json",
|
||||||
|
// "text",
|
||||||
|
// "lcov",
|
||||||
|
// "clover"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An object that configures minimum threshold enforcement for coverage results
|
||||||
|
// coverageThreshold: undefined,
|
||||||
|
|
||||||
|
// A path to a custom dependency extractor
|
||||||
|
// dependencyExtractor: undefined,
|
||||||
|
|
||||||
|
// Make calling deprecated APIs throw helpful error messages
|
||||||
|
// errorOnDeprecated: false,
|
||||||
|
|
||||||
|
// The default configuration for fake timers
|
||||||
|
// fakeTimers: {
|
||||||
|
// "enableGlobally": false
|
||||||
|
// },
|
||||||
|
|
||||||
|
// Force coverage collection from ignored files using an array of glob patterns
|
||||||
|
// forceCoverageMatch: [],
|
||||||
|
|
||||||
|
// A path to a module which exports an async function that is triggered once before all test suites
|
||||||
|
// globalSetup: undefined,
|
||||||
|
|
||||||
|
// A path to a module which exports an async function that is triggered once after all test suites
|
||||||
|
// globalTeardown: undefined,
|
||||||
|
|
||||||
|
// A set of global variables that need to be available in all test environments
|
||||||
|
// globals: {},
|
||||||
|
|
||||||
|
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
|
||||||
|
// maxWorkers: "50%",
|
||||||
|
|
||||||
|
// An array of directory names to be searched recursively up from the requiring module's location
|
||||||
|
// moduleDirectories: [
|
||||||
|
// "node_modules"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An array of file extensions your modules use
|
||||||
|
moduleFileExtensions: [
|
||||||
|
'js',
|
||||||
|
'mjs',
|
||||||
|
'cjs',
|
||||||
|
'jsx',
|
||||||
|
'ts',
|
||||||
|
'tsx',
|
||||||
|
'json'
|
||||||
|
],
|
||||||
|
|
||||||
|
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
||||||
|
'moduleNameMapper': {
|
||||||
|
'\\.(jpg|jpeg|png|gif|eot|otf|webp|svg|ttf|woff|woff2|mp4|webm|wav|mp3|m4a|aac|oga)$': '<rootDir>/test/__mocks__/fileMock.ts',
|
||||||
|
'\\.(css|less)$': '<rootDir>/test/__mocks__/styleMock.ts'
|
||||||
|
},
|
||||||
|
|
||||||
|
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||||
|
// modulePathIgnorePatterns: [],
|
||||||
|
|
||||||
|
// Activates notifications for test results
|
||||||
|
// notify: false,
|
||||||
|
|
||||||
|
// An enum that specifies notification mode. Requires { notify: true }
|
||||||
|
// notifyMode: "failure-change",
|
||||||
|
|
||||||
|
// A preset that is used as a base for Jest's configuration
|
||||||
|
preset: 'ts-jest',
|
||||||
|
|
||||||
|
// Run tests from one or more projects
|
||||||
|
// projects: undefined,
|
||||||
|
|
||||||
|
// Use this configuration option to add custom reporters to Jest
|
||||||
|
// reporters: undefined,
|
||||||
|
|
||||||
|
// Automatically reset mock state before every test
|
||||||
|
// resetMocks: false,
|
||||||
|
|
||||||
|
// Reset the module registry before running each individual test
|
||||||
|
// resetModules: false,
|
||||||
|
|
||||||
|
// A path to a custom resolver
|
||||||
|
// resolver: undefined,
|
||||||
|
|
||||||
|
// Automatically restore mock state and implementation before every test
|
||||||
|
// restoreMocks: false,
|
||||||
|
|
||||||
|
// The root directory that Jest should scan for tests and modules within
|
||||||
|
// rootDir: undefined,
|
||||||
|
|
||||||
|
// A list of paths to directories that Jest should use to search for files in
|
||||||
|
// roots: [
|
||||||
|
// "<rootDir>"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// Allows you to use a custom runner instead of Jest's default test runner
|
||||||
|
// runner: "jest-runner",
|
||||||
|
|
||||||
|
// The paths to modules that run some code to configure or set up the testing environment before each test
|
||||||
|
// setupFiles: [],
|
||||||
|
|
||||||
|
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||||
|
setupFilesAfterEnv: ['<rootDir>/test/jest.setup.ts'],
|
||||||
|
|
||||||
|
// The number of seconds after which a test is considered as slow and reported as such in the results.
|
||||||
|
// slowTestThreshold: 5,
|
||||||
|
|
||||||
|
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||||
|
// snapshotSerializers: [],
|
||||||
|
|
||||||
|
// The test environment that will be used for testing
|
||||||
|
testEnvironment: 'jsdom',
|
||||||
|
|
||||||
|
// Options that will be passed to the testEnvironment
|
||||||
|
// testEnvironmentOptions: {},
|
||||||
|
|
||||||
|
// Adds a location field to test results
|
||||||
|
// testLocationInResults: false,
|
||||||
|
|
||||||
|
// The glob patterns Jest uses to detect test files
|
||||||
|
testMatch: [
|
||||||
|
'**/__tests__/**/*.[jt]s?(x)'
|
||||||
|
],
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||||
|
// testPathIgnorePatterns: [
|
||||||
|
// "/node_modules/"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||||
|
// testRegex: [],
|
||||||
|
|
||||||
|
// This option allows the use of a custom results processor
|
||||||
|
// testResultsProcessor: undefined,
|
||||||
|
|
||||||
|
// This option allows use of a custom test runner
|
||||||
|
// testRunner: "jest-circus/runner",
|
||||||
|
|
||||||
|
// A map from regular expressions to paths to transformers
|
||||||
|
transform: {
|
||||||
|
'^.+\\.tsx?$': [
|
||||||
|
'ts-jest',
|
||||||
|
{
|
||||||
|
tsconfig: 'src/view/tsconfig.spec.json',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
'node_modules': [
|
||||||
|
'babel-jest',
|
||||||
|
{
|
||||||
|
presets: [
|
||||||
|
'@babel/preset-env'
|
||||||
|
],
|
||||||
|
plugins: [
|
||||||
|
'@babel/plugin-transform-modules-commonjs',
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||||
|
'transformIgnorePatterns': [
|
||||||
|
// These use ES modules, so need to be transformed
|
||||||
|
'node_modules/(?!(?:@vscode/webview-ui-toolkit|@microsoft/.+|exenv-es6)/.*)'
|
||||||
|
],
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||||
|
// unmockedModulePathPatterns: undefined,
|
||||||
|
|
||||||
|
// Indicates whether each individual test should be reported during the run
|
||||||
|
// verbose: undefined,
|
||||||
|
|
||||||
|
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||||
|
// watchPathIgnorePatterns: [],
|
||||||
|
|
||||||
|
// Whether to use watchman for file crawling
|
||||||
|
// watchman: true,
|
||||||
|
};
|
||||||
|
Before Width: | Height: | Size: 499 KiB After Width: | Height: | Size: 31 KiB |
4
extensions/ql-vscode/media/dark/github.svg
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
<!-- From https://github.com/microsoft/vscode-icons -->
|
||||||
|
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97553 0C3.57186 0 0 3.57186 0 7.97553C0 11.4985 2.29969 14.4832 5.43119 15.5596C5.82263 15.6086 5.96942 15.3639 5.96942 15.1682C5.96942 14.9725 5.96942 14.4832 5.96942 13.7982C3.76758 14.2875 3.27829 12.7217 3.27829 12.7217C2.93578 11.792 2.39755 11.5474 2.39755 11.5474C1.66361 11.0581 2.44648 11.0581 2.44648 11.0581C3.22936 11.107 3.66972 11.8899 3.66972 11.8899C4.40367 13.1131 5.52905 12.7706 5.96942 12.5749C6.01835 12.0367 6.263 11.6942 6.45872 11.4985C4.69725 11.3028 2.83792 10.6177 2.83792 7.53517C2.83792 6.65443 3.1315 5.96942 3.66972 5.38226C3.62079 5.23547 3.32722 4.40367 3.76758 3.32722C3.76758 3.32722 4.4526 3.1315 5.96942 4.15902C6.6055 3.9633 7.29052 3.91437 7.97553 3.91437C8.66055 3.91437 9.34557 4.01223 9.98165 4.15902C11.4985 3.1315 12.1835 3.32722 12.1835 3.32722C12.6239 4.40367 12.3303 5.23547 12.2813 5.43119C12.7706 5.96942 13.1131 6.70336 13.1131 7.5841C13.1131 10.6667 11.2538 11.3028 9.49235 11.4985C9.78593 11.7431 10.0306 12.2324 10.0306 12.9664C10.0306 14.0428 10.0306 14.8746 10.0306 15.1682C10.0306 15.3639 10.1774 15.6086 10.5688 15.5596C13.7492 14.4832 16 11.4985 16 7.97553C15.9511 3.57186 12.3792 0 7.97553 0Z" fill="#C5C5C5"/>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 1.3 KiB |
7
extensions/ql-vscode/media/drive.svg
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path fill-rule="evenodd" clip-rule="evenodd" d="M15.5 12.1952C15.5 12.9126 14.9137 13.4996 14.1957 13.4996H1.80435C1.08696 13.4996 0.5 12.9126 0.5 12.1952L0.5 9.80435C0.5 9.08696 1.08696 8.5 1.80435 8.5H14.1956C14.9137 8.5 15.5 9.08696 15.5 9.80435L15.5 12.1952Z" stroke="#959DA5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||||
|
<path d="M2.45654 11.5H13.5435" stroke="#959DA5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||||
|
<path fill-rule="evenodd" clip-rule="evenodd" d="M13.5 9.5C13.224 9.5 13 9.725 13 10C13 10.275 13.224 10.5 13.5 10.5C13.776 10.5 14 10.275 14 10C14 9.725 13.776 9.5 13.5 9.5" fill="#959DA5"/>
|
||||||
|
<path fill-rule="evenodd" clip-rule="evenodd" d="M11.5 9.5C11.224 9.5 11 9.725 11 10C11 10.275 11.224 10.5 11.5 10.5C11.776 10.5 12 10.275 12 10C12 9.725 11.776 9.5 11.5 9.5" fill="#959DA5"/>
|
||||||
|
<path d="M15.5 9.81464L13.8728 2.76261C13.6922 2.06804 12.9572 1.5 12.2391 1.5H3.76087C3.04348 1.5 2.30848 2.06804 2.12783 2.76261L0.5 9.8" stroke="#959DA5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 1.1 KiB |
16
extensions/ql-vscode/media/globe.svg
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<circle cx="7.5" cy="7.5" r="7" stroke="#959DA5"/>
|
||||||
|
<mask id="mask0_394_2982" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="0" y="0" width="15" height="15">
|
||||||
|
<circle cx="7.5" cy="7.5" r="7.5" fill="#C4C4C4"/>
|
||||||
|
</mask>
|
||||||
|
<g mask="url(#mask0_394_2982)">
|
||||||
|
<path d="M14.5 7.5C14.5 9.42971 13.6822 11.1907 12.5493 12.4721C11.4035 13.7683 10.0054 14.5 8.90625 14.5C7.84644 14.5 6.81131 13.8113 6.01569 12.5383C5.22447 11.2724 4.71875 9.49235 4.71875 7.5C4.71875 5.50765 5.22447 3.72765 6.01569 2.4617C6.81131 1.1887 7.84644 0.5 8.90625 0.5C10.0054 0.5 11.4035 1.23172 12.5493 2.52786C13.6822 3.80934 14.5 5.57029 14.5 7.5Z" stroke="#959DA5"/>
|
||||||
|
</g>
|
||||||
|
<mask id="mask1_394_2982" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="1" y="0" width="16" height="15">
|
||||||
|
<circle cx="9.375" cy="7.5" r="7.5" fill="#C4C4C4"/>
|
||||||
|
</mask>
|
||||||
|
<g mask="url(#mask1_394_2982)">
|
||||||
|
<path d="M10.2812 7.5C10.2812 9.49235 9.77553 11.2724 8.98431 12.5383C8.18869 13.8113 7.15356 14.5 6.09375 14.5C4.99456 14.5 3.5965 13.7683 2.45067 12.4721C1.31781 11.1907 0.5 9.42971 0.5 7.5C0.5 5.57029 1.31781 3.80934 2.45067 2.52786C3.5965 1.23172 4.99456 0.5 6.09375 0.5C7.15356 0.5 8.18869 1.1887 8.98431 2.4617C9.77553 3.72765 10.2812 5.50765 10.2812 7.5Z" stroke="#959DA5"/>
|
||||||
|
</g>
|
||||||
|
<line y1="7.5" x2="15" y2="7.5" stroke="#959DA5"/>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 1.4 KiB |
11
extensions/ql-vscode/media/light/github.svg
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
<!-- From https://github.com/microsoft/vscode-icons -->
|
||||||
|
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<g clip-path="url(#clip0)">
|
||||||
|
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97578 0C3.57211 0 0.000244141 3.57186 0.000244141 7.97553C0.000244141 11.4985 2.29994 14.4832 5.43144 15.5596C5.82287 15.6086 5.96966 15.3639 5.96966 15.1682C5.96966 14.9725 5.96966 14.4832 5.96966 13.7982C3.76783 14.2875 3.27853 12.7217 3.27853 12.7217C2.93602 11.792 2.3978 11.5474 2.3978 11.5474C1.66385 11.0581 2.44673 11.0581 2.44673 11.0581C3.2296 11.107 3.66997 11.8899 3.66997 11.8899C4.40391 13.1131 5.5293 12.7706 5.96966 12.5749C6.01859 12.0367 6.26324 11.6942 6.45896 11.4985C4.69749 11.3028 2.83816 10.6177 2.83816 7.53517C2.83816 6.65443 3.13174 5.96942 3.66997 5.38226C3.62104 5.23547 3.32746 4.40367 3.76783 3.32722C3.76783 3.32722 4.45284 3.1315 5.96966 4.15902C6.60575 3.9633 7.29076 3.91437 7.97578 3.91437C8.66079 3.91437 9.34581 4.01223 9.98189 4.15902C11.4987 3.1315 12.1837 3.32722 12.1837 3.32722C12.6241 4.40367 12.3305 5.23547 12.2816 5.43119C12.7709 5.96942 13.1134 6.70336 13.1134 7.5841C13.1134 10.6667 11.2541 11.3028 9.4926 11.4985C9.78618 11.7431 10.0308 12.2324 10.0308 12.9664C10.0308 14.0428 10.0308 14.8746 10.0308 15.1682C10.0308 15.3639 10.1776 15.6086 10.5691 15.5596C13.7495 14.4832 16.0002 11.4985 16.0002 7.97553C15.9513 3.57186 12.3794 0 7.97578 0Z" fill="#424242"/>
|
||||||
|
</g>
|
||||||
|
<defs>
|
||||||
|
<clipPath id="clip0">
|
||||||
|
<rect width="16" height="16" fill="white" transform="translate(0.000244141)"/>
|
||||||
|
</clipPath>
|
||||||
|
</defs>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 1.5 KiB |
@@ -1,14 +1,4 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
<svg width="27px" height="16px" viewBox="0 0 27 16" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
<path d="M8.19789 8C8.19789 8.51894 8.1155 8.97538 7.95073 9.36932C7.78595 9.76326 7.56152 10.0871 7.27743 10.3409L8.10982 11.3864H6.96209L6.49334 10.8097C6.17705 10.929 5.83709 10.9886 5.47346 10.9886C4.95641 10.9886 4.4924 10.8712 4.08141 10.6364C3.67042 10.4015 3.34562 10.0611 3.10698 9.61506C2.86834 9.16903 2.74902 8.63068 2.74902 8C2.74902 7.36553 2.86834 6.82576 3.10698 6.38068C3.34562 5.9356 3.67042 5.59612 4.08141 5.36222C4.4924 5.12831 4.95641 5.01136 5.47346 5.01136C5.98861 5.01136 6.45167 5.12831 6.86266 5.36222C7.27365 5.59612 7.59893 5.9356 7.83851 6.38068C8.0781 6.82576 8.19789 7.36553 8.19789 8ZM5.04732 8.88636H6.09277L6.52459 9.44034C6.65906 9.27936 6.7637 9.0786 6.83851 8.83807C6.91332 8.59754 6.95073 8.31818 6.95073 8C6.95073 7.38257 6.81626 6.91146 6.54732 6.58665C6.27838 6.26184 5.92043 6.09943 5.47346 6.09943C5.02648 6.09943 4.66853 6.26184 4.39959 6.58665C4.13065 6.91146 3.99618 7.38257 3.99618 8C3.99618 8.61553 4.13065 9.08617 4.39959 9.41193C4.66853 9.73769 5.02648 9.90057 5.47346 9.90057C5.5852 9.90057 5.69126 9.89015 5.79164 9.86932L5.04732 8.88636ZM9.52695 11V5H10.7571V9.9858H13.2514V11H9.52695Z" fill="#24292F"/>
|
||||||
<!-- Generator: Sketch 59 (86127) - https://sketch.com -->
|
<path fill-rule="evenodd" clip-rule="evenodd" d="M13 1.5H3C2.17157 1.5 1.5 2.17157 1.5 3V13C1.5 13.8284 2.17157 14.5 3 14.5H13C13.8284 14.5 14.5 13.8284 14.5 13V3C14.5 2.17157 13.8284 1.5 13 1.5ZM3 0C1.34315 0 0 1.34315 0 3V13C0 14.6569 1.34315 16 3 16H13C14.6569 16 16 14.6569 16 13V3C16 1.34315 14.6569 0 13 0H3Z" fill="#24292F"/>
|
||||||
<title>Slice</title>
|
</svg>
|
||||||
<desc>Created with Sketch.</desc>
|
|
||||||
<g id="light" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
|
||||||
<g id="QL" transform="translate(1.000000, 1.000000)">
|
|
||||||
<rect id="Rectangle-41" stroke="#2088FF" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" x="0" y="0" width="25" height="14" rx="2"></rect>
|
|
||||||
<line x1="17" y1="5" x2="19" y2="5" id="Stroke-15" stroke="#2088FF" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"></line>
|
|
||||||
<line x1="17" y1="9" x2="21" y2="9" id="Stroke-15" stroke="#2088FF" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"></line>
|
|
||||||
<path d="M8.85227273,7 C8.85227273,7.51894199 8.76988719,7.97537682 8.60511364,8.36931818 C8.44034009,8.76325955 8.21591051,9.08711994 7.93181818,9.34090909 L8.76420455,10.3863636 L7.61647727,10.3863636 L7.14772727,9.80965909 C6.83143781,9.92897787 6.49147909,9.98863636 6.12784091,9.98863636 C5.61079287,9.98863636 5.14678236,9.8712133 4.73579545,9.63636364 C4.32480855,9.40151398 4.00000119,9.06108178 3.76136364,8.61505682 C3.52272608,8.16903186 3.40340909,7.63068497 3.40340909,7 C3.40340909,6.36552713 3.52272608,5.8257598 3.76136364,5.38068182 C4.00000119,4.93560384 4.32480855,4.59611859 4.73579545,4.36221591 C5.14678236,4.12831322 5.61079287,4.01136364 6.12784091,4.01136364 C6.642995,4.01136364 7.10605855,4.12831322 7.51704545,4.36221591 C7.92803236,4.59611859 8.2533132,4.93560384 8.49289773,5.38068182 C8.73248226,5.8257598 8.85227273,6.36552713 8.85227273,7 Z M5.70170455,7.88636364 L6.74715909,7.88636364 L7.17897727,8.44034091 C7.31344764,8.27935526 7.41808675,8.07859969 7.49289773,7.83806818 C7.56770871,7.59753668 7.60511364,7.31818341 7.60511364,7 C7.60511364,6.38257267 7.47064528,5.91145996 7.20170455,5.58664773 C6.93276381,5.2618355 6.57481284,5.09943182 6.12784091,5.09943182 C5.68086898,5.09943182 5.32291801,5.2618355 5.05397727,5.58664773 C4.78503653,5.91145996 4.65056818,6.38257267 4.65056818,7 C4.65056818,7.61553338 4.78503653,8.08617261 5.05397727,8.41193182 C5.32291801,8.73769102 5.68086898,8.90056818 6.12784091,8.90056818 C6.23958389,8.90056818 6.34564344,8.89015162 6.44602273,8.86931818 L5.70170455,7.88636364 Z M10.1813315,10 L10.1813315,4 L11.4114451,4 L11.4114451,8.98579545 L13.9057633,8.98579545 L13.9057633,10 L10.1813315,10 Z" fill="#2088FF" fill-rule="nonzero"></path>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 2.6 KiB After Width: | Height: | Size: 1.6 KiB |
58200
extensions/ql-vscode/package-lock.json
generated
78
extensions/ql-vscode/scripts/fix-scenario-file-numbering.ts
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
/**
|
||||||
|
* This scripts helps after recording a scenario to be used for replaying
|
||||||
|
* with the mock GitHub API server.
|
||||||
|
*
|
||||||
|
* Once the scenario has been recorded, it's often useful to remove some of
|
||||||
|
* the requests to speed up the replay, particularly ones that fetch the
|
||||||
|
* variant analysis status. Once some of the requests have manually been
|
||||||
|
* removed, this script can be used to update the numbering of the files.
|
||||||
|
*
|
||||||
|
* Usage: npx ts-node scripts/fix-scenario-file-numbering.ts <scenario-name>
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as fs from 'fs-extra';
|
||||||
|
import * as path from 'path';
|
||||||
|
|
||||||
|
if (process.argv.length !== 3) {
|
||||||
|
console.error('Expected 1 argument - the scenario name')
|
||||||
|
}
|
||||||
|
|
||||||
|
const scenarioName = process.argv[2];
|
||||||
|
|
||||||
|
const extensionDirectory = path.resolve(__dirname, '..');
|
||||||
|
const scenariosDirectory = path.resolve(extensionDirectory, 'src/mocks/scenarios');
|
||||||
|
const scenarioDirectory = path.resolve(scenariosDirectory, scenarioName);
|
||||||
|
|
||||||
|
async function fixScenarioFiles() {
|
||||||
|
console.log(scenarioDirectory);
|
||||||
|
if (!(await fs.pathExists(scenarioDirectory))) {
|
||||||
|
console.error('Scenario directory does not exist: ' + scenarioDirectory);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const files = await fs.readdir(scenarioDirectory);
|
||||||
|
|
||||||
|
const orderedFiles = files.sort((a, b) => {
|
||||||
|
const aNum = parseInt(a.split('-')[0]);
|
||||||
|
const bNum = parseInt(b.split('-')[0]);
|
||||||
|
return aNum - bNum;
|
||||||
|
});
|
||||||
|
|
||||||
|
let index = 0;
|
||||||
|
for (let file of orderedFiles) {
|
||||||
|
const ext = path.extname(file);
|
||||||
|
if (ext === '.json') {
|
||||||
|
const fileName = path.basename(file, ext);
|
||||||
|
const fileCurrentIndex = parseInt(fileName.split('-')[0]);
|
||||||
|
const fileNameWithoutIndex = fileName.split('-')[1];
|
||||||
|
if (fileCurrentIndex !== index) {
|
||||||
|
const newFileName = `${index}-${fileNameWithoutIndex}${ext}`;
|
||||||
|
const oldFilePath = path.join(scenarioDirectory, file);
|
||||||
|
const newFilePath = path.join(scenarioDirectory, newFileName);
|
||||||
|
console.log(`Rename: ${oldFilePath} -> ${newFilePath}`);
|
||||||
|
await fs.rename(oldFilePath, newFilePath);
|
||||||
|
|
||||||
|
if (fileNameWithoutIndex === 'getVariantAnalysisRepoResult') {
|
||||||
|
const oldZipFileName = `${fileCurrentIndex}-getVariantAnalysisRepoResult.body.zip`;
|
||||||
|
const newZipFileName = `${index}-getVariantAnalysisRepoResult.body.zip`;
|
||||||
|
const oldZipFilePath = path.join(scenarioDirectory, oldZipFileName);
|
||||||
|
const newZipFilePath = path.join(scenarioDirectory, newZipFileName);
|
||||||
|
console.log(`Rename: ${oldZipFilePath} -> ${newZipFilePath}`);
|
||||||
|
await fs.rename(oldZipFilePath, newZipFilePath);
|
||||||
|
|
||||||
|
const json = await fs.readJson(newFilePath);
|
||||||
|
json.response.body = `file:${newZipFileName}`;
|
||||||
|
console.log(`Response.body change to ${json.response.body}`);
|
||||||
|
await fs.writeJSON(newFilePath, json);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
index++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fixScenarioFiles().catch(e => {
|
||||||
|
console.error(e);
|
||||||
|
process.exit(2);
|
||||||
|
});
|
||||||
6
extensions/ql-vscode/scripts/forbid-mocha-only
Executable file
@@ -0,0 +1,6 @@
|
|||||||
|
if grep -rq --include '*.test.ts' 'it.only\|describe.only' './test' './src'; then
|
||||||
|
echo 'There is a .only() in the tests. Please remove it.'
|
||||||
|
exit 1;
|
||||||
|
else
|
||||||
|
exit 0;
|
||||||
|
fi
|
||||||
79
extensions/ql-vscode/scripts/lint-scenarios.ts
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
import * as fs from 'fs-extra';
|
||||||
|
import * as path from 'path';
|
||||||
|
|
||||||
|
import Ajv from 'ajv';
|
||||||
|
import * as tsj from 'ts-json-schema-generator';
|
||||||
|
|
||||||
|
const extensionDirectory = path.resolve(__dirname, '..');
|
||||||
|
const rootDirectory = path.resolve(extensionDirectory, '../..');
|
||||||
|
const scenariosDirectory = path.resolve(extensionDirectory, 'src/mocks/scenarios');
|
||||||
|
|
||||||
|
const debug = process.env.RUNNER_DEBUG || process.argv.includes('--debug');
|
||||||
|
|
||||||
|
async function lintScenarios() {
|
||||||
|
const schema = tsj.createGenerator({
|
||||||
|
path: path.resolve(extensionDirectory, 'src/mocks/gh-api-request.ts'),
|
||||||
|
tsconfig: path.resolve(extensionDirectory, 'tsconfig.json'),
|
||||||
|
type: 'GitHubApiRequest',
|
||||||
|
skipTypeCheck: true,
|
||||||
|
topRef: true,
|
||||||
|
additionalProperties: true,
|
||||||
|
}).createSchema('GitHubApiRequest');
|
||||||
|
|
||||||
|
const ajv = new Ajv();
|
||||||
|
|
||||||
|
if (!ajv.validateSchema(schema)) {
|
||||||
|
throw new Error('Invalid schema: ' + ajv.errorsText());
|
||||||
|
}
|
||||||
|
|
||||||
|
const validate = await ajv.compile(schema);
|
||||||
|
|
||||||
|
let invalidFiles = 0;
|
||||||
|
|
||||||
|
if (!(await fs.pathExists(scenariosDirectory))) {
|
||||||
|
console.error('Scenarios directory does not exist: ' + scenariosDirectory);
|
||||||
|
// Do not exit with a non-zero status code, as this is not a fatal error.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for await (const file of getFiles(scenariosDirectory)) {
|
||||||
|
if (!file.endsWith('.json')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const contents = await fs.readFile(file, 'utf8');
|
||||||
|
const data = JSON.parse(contents);
|
||||||
|
|
||||||
|
if (!validate(data)) {
|
||||||
|
validate.errors?.forEach(error => {
|
||||||
|
// https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-error-message
|
||||||
|
console.log(`::error file=${path.relative(rootDirectory, file)}::${error.instancePath}: ${error.message}`);
|
||||||
|
});
|
||||||
|
invalidFiles++;
|
||||||
|
} else if (debug) {
|
||||||
|
console.log(`File '${path.relative(rootDirectory, file)}' is valid`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (invalidFiles > 0) {
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://stackoverflow.com/a/45130990
|
||||||
|
async function* getFiles(dir: string): AsyncGenerator<string> {
|
||||||
|
const dirents = await fs.readdir(dir, { withFileTypes: true });
|
||||||
|
for (const dirent of dirents) {
|
||||||
|
const res = path.resolve(dir, dirent.name);
|
||||||
|
if (dirent.isDirectory()) {
|
||||||
|
yield* getFiles(res);
|
||||||
|
} else {
|
||||||
|
yield res;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lintScenarios().catch(e => {
|
||||||
|
console.error(e);
|
||||||
|
process.exit(2);
|
||||||
|
});
|
||||||
129
extensions/ql-vscode/src/abstract-webview.ts
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
import {
|
||||||
|
WebviewPanel,
|
||||||
|
ExtensionContext,
|
||||||
|
window as Window,
|
||||||
|
ViewColumn,
|
||||||
|
Uri,
|
||||||
|
WebviewPanelOptions,
|
||||||
|
WebviewOptions
|
||||||
|
} from 'vscode';
|
||||||
|
import * as path from 'path';
|
||||||
|
|
||||||
|
import { DisposableObject } from './pure/disposable-object';
|
||||||
|
import { tmpDir } from './helpers';
|
||||||
|
import { getHtmlForWebview, WebviewMessage, WebviewView } from './interface-utils';
|
||||||
|
|
||||||
|
export type WebviewPanelConfig = {
|
||||||
|
viewId: string;
|
||||||
|
title: string;
|
||||||
|
viewColumn: ViewColumn;
|
||||||
|
view: WebviewView;
|
||||||
|
preserveFocus?: boolean;
|
||||||
|
additionalOptions?: WebviewPanelOptions & WebviewOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
export abstract class AbstractWebview<ToMessage extends WebviewMessage, FromMessage extends WebviewMessage> extends DisposableObject {
|
||||||
|
protected panel: WebviewPanel | undefined;
|
||||||
|
protected panelLoaded = false;
|
||||||
|
protected panelLoadedCallBacks: (() => void)[] = [];
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
protected readonly ctx: ExtensionContext
|
||||||
|
) {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async restoreView(panel: WebviewPanel): Promise<void> {
|
||||||
|
this.panel = panel;
|
||||||
|
this.setupPanel(panel);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected get isShowingPanel() {
|
||||||
|
return !!this.panel;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected getPanel(): WebviewPanel {
|
||||||
|
if (this.panel == undefined) {
|
||||||
|
const { ctx } = this;
|
||||||
|
|
||||||
|
const config = this.getPanelConfig();
|
||||||
|
|
||||||
|
this.panel = Window.createWebviewPanel(
|
||||||
|
config.viewId,
|
||||||
|
config.title,
|
||||||
|
{ viewColumn: config.viewColumn, preserveFocus: config.preserveFocus },
|
||||||
|
{
|
||||||
|
enableScripts: true,
|
||||||
|
enableFindWidget: true,
|
||||||
|
retainContextWhenHidden: true,
|
||||||
|
...config.additionalOptions,
|
||||||
|
localResourceRoots: [
|
||||||
|
...(config.additionalOptions?.localResourceRoots ?? []),
|
||||||
|
Uri.file(tmpDir.name),
|
||||||
|
Uri.file(path.join(ctx.extensionPath, 'out'))
|
||||||
|
],
|
||||||
|
}
|
||||||
|
);
|
||||||
|
this.setupPanel(this.panel);
|
||||||
|
}
|
||||||
|
return this.panel;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected setupPanel(panel: WebviewPanel): void {
|
||||||
|
const config = this.getPanelConfig();
|
||||||
|
|
||||||
|
this.push(
|
||||||
|
panel.onDidDispose(
|
||||||
|
() => {
|
||||||
|
this.panel = undefined;
|
||||||
|
this.panelLoaded = false;
|
||||||
|
this.onPanelDispose();
|
||||||
|
},
|
||||||
|
null,
|
||||||
|
this.ctx.subscriptions
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
panel.webview.html = getHtmlForWebview(
|
||||||
|
this.ctx,
|
||||||
|
panel.webview,
|
||||||
|
config.view,
|
||||||
|
{
|
||||||
|
allowInlineStyles: true,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
this.push(
|
||||||
|
panel.webview.onDidReceiveMessage(
|
||||||
|
async (e) => this.onMessage(e),
|
||||||
|
undefined,
|
||||||
|
this.ctx.subscriptions
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected abstract getPanelConfig(): WebviewPanelConfig;
|
||||||
|
|
||||||
|
protected abstract onPanelDispose(): void;
|
||||||
|
|
||||||
|
protected abstract onMessage(msg: FromMessage): Promise<void>;
|
||||||
|
|
||||||
|
protected waitForPanelLoaded(): Promise<void> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
if (this.panelLoaded) {
|
||||||
|
resolve();
|
||||||
|
} else {
|
||||||
|
this.panelLoadedCallBacks.push(resolve);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
protected onWebViewLoaded(): void {
|
||||||
|
this.panelLoaded = true;
|
||||||
|
this.panelLoadedCallBacks.forEach((cb) => cb());
|
||||||
|
this.panelLoadedCallBacks = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
protected postMessage(msg: ToMessage): Thenable<boolean> {
|
||||||
|
return this.getPanel().webview.postMessage(msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
15
extensions/ql-vscode/src/additional-typings.d.ts
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
/**
|
||||||
|
* The d3 library is designed to work in both the browser and
|
||||||
|
* node. Consequently their typings files refer to both node
|
||||||
|
* types like `Buffer` (which don't exist in the browser), and browser
|
||||||
|
* types like `Blob` (which don't exist in node). Instead of sticking
|
||||||
|
* all of `dom` in `compilerOptions.lib`, it suffices just to put in a
|
||||||
|
* stub definition of the affected types so that compilation
|
||||||
|
* succeeds.
|
||||||
|
*/
|
||||||
|
|
||||||
|
declare type RequestInit = Record<string, unknown>;
|
||||||
|
declare type ElementTagNameMap = any;
|
||||||
|
declare type NodeListOf<T> = Record<string, T>;
|
||||||
|
declare type Node = Record<string, unknown>;
|
||||||
|
declare type XMLDocument = Record<string, unknown>;
|
||||||
@@ -115,7 +115,7 @@ class InvalidSourceArchiveUriError extends Error {
|
|||||||
export function decodeSourceArchiveUri(uri: vscode.Uri): ZipFileReference {
|
export function decodeSourceArchiveUri(uri: vscode.Uri): ZipFileReference {
|
||||||
if (!uri.authority) {
|
if (!uri.authority) {
|
||||||
// Uri is malformed, but this is recoverable
|
// Uri is malformed, but this is recoverable
|
||||||
logger.log(`Warning: ${new InvalidSourceArchiveUriError(uri).message}`);
|
void logger.log(`Warning: ${new InvalidSourceArchiveUriError(uri).message}`);
|
||||||
return {
|
return {
|
||||||
pathWithinSourceArchive: '/',
|
pathWithinSourceArchive: '/',
|
||||||
sourceArchiveZipPath: uri.path
|
sourceArchiveZipPath: uri.path
|
||||||
@@ -141,7 +141,7 @@ function ensureFile(map: DirectoryHierarchyMap, file: string) {
|
|||||||
const dirname = path.dirname(file);
|
const dirname = path.dirname(file);
|
||||||
if (dirname === '.') {
|
if (dirname === '.') {
|
||||||
const error = `Ill-formed path ${file} in zip archive (expected absolute path)`;
|
const error = `Ill-formed path ${file} in zip archive (expected absolute path)`;
|
||||||
logger.log(error);
|
void logger.log(error);
|
||||||
throw new Error(error);
|
throw new Error(error);
|
||||||
}
|
}
|
||||||
ensureDir(map, dirname);
|
ensureDir(map, dirname);
|
||||||
@@ -167,21 +167,26 @@ type Archive = {
|
|||||||
dirMap: DirectoryHierarchyMap;
|
dirMap: DirectoryHierarchyMap;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
async function parse_zip(zipPath: string): Promise<Archive> {
|
||||||
|
if (!await fs.pathExists(zipPath))
|
||||||
|
throw vscode.FileSystemError.FileNotFound(zipPath);
|
||||||
|
const archive: Archive = { unzipped: await unzipper.Open.file(zipPath), dirMap: new Map };
|
||||||
|
archive.unzipped.files.forEach(f => { ensureFile(archive.dirMap, path.resolve('/', f.path)); });
|
||||||
|
return archive;
|
||||||
|
}
|
||||||
|
|
||||||
export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
|
export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
|
||||||
private readOnlyError = vscode.FileSystemError.NoPermissions('write operation attempted, but source archive filesystem is readonly');
|
private readOnlyError = vscode.FileSystemError.NoPermissions('write operation attempted, but source archive filesystem is readonly');
|
||||||
private archives: Map<string, Archive> = new Map;
|
private archives: Map<string, Promise<Archive>> = new Map;
|
||||||
|
|
||||||
private async getArchive(zipPath: string): Promise<Archive> {
|
private async getArchive(zipPath: string): Promise<Archive> {
|
||||||
if (!this.archives.has(zipPath)) {
|
if (!this.archives.has(zipPath)) {
|
||||||
if (!await fs.pathExists(zipPath))
|
this.archives.set(zipPath, parse_zip(zipPath));
|
||||||
throw vscode.FileSystemError.FileNotFound(zipPath);
|
|
||||||
const archive: Archive = { unzipped: await unzipper.Open.file(zipPath), dirMap: new Map };
|
|
||||||
archive.unzipped.files.forEach(f => { ensureFile(archive.dirMap, path.resolve('/', f.path)); });
|
|
||||||
this.archives.set(zipPath, archive);
|
|
||||||
}
|
}
|
||||||
return this.archives.get(zipPath)!;
|
return await this.archives.get(zipPath)!;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
root = new Directory('');
|
root = new Directory('');
|
||||||
|
|
||||||
// metadata
|
// metadata
|
||||||
|
|||||||
@@ -10,7 +10,8 @@ import {
|
|||||||
TextEditorSelectionChangeEvent,
|
TextEditorSelectionChangeEvent,
|
||||||
TextEditorSelectionChangeKind,
|
TextEditorSelectionChangeKind,
|
||||||
Location,
|
Location,
|
||||||
Range
|
Range,
|
||||||
|
Uri
|
||||||
} from 'vscode';
|
} from 'vscode';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
|
||||||
@@ -56,7 +57,7 @@ class AstViewerDataProvider extends DisposableObject implements TreeDataProvider
|
|||||||
}
|
}
|
||||||
|
|
||||||
refresh(): void {
|
refresh(): void {
|
||||||
this._onDidChangeTreeData.fire();
|
this._onDidChangeTreeData.fire(undefined);
|
||||||
}
|
}
|
||||||
getChildren(item?: AstItem): ProviderResult<AstItem[]> {
|
getChildren(item?: AstItem): ProviderResult<AstItem[]> {
|
||||||
const children = item ? item.children : this.roots;
|
const children = item ? item.children : this.roots;
|
||||||
@@ -104,7 +105,7 @@ class AstViewerDataProvider extends DisposableObject implements TreeDataProvider
|
|||||||
export class AstViewer extends DisposableObject {
|
export class AstViewer extends DisposableObject {
|
||||||
private treeView: TreeView<AstItem>;
|
private treeView: TreeView<AstItem>;
|
||||||
private treeDataProvider: AstViewerDataProvider;
|
private treeDataProvider: AstViewerDataProvider;
|
||||||
private currentFile: string | undefined;
|
private currentFileUri: Uri | undefined;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
super();
|
super();
|
||||||
@@ -125,12 +126,12 @@ export class AstViewer extends DisposableObject {
|
|||||||
this.push(window.onDidChangeTextEditorSelection(this.updateTreeSelection, this));
|
this.push(window.onDidChangeTextEditorSelection(this.updateTreeSelection, this));
|
||||||
}
|
}
|
||||||
|
|
||||||
updateRoots(roots: AstItem[], db: DatabaseItem, fileName: string) {
|
updateRoots(roots: AstItem[], db: DatabaseItem, fileUri: Uri) {
|
||||||
this.treeDataProvider.roots = roots;
|
this.treeDataProvider.roots = roots;
|
||||||
this.treeDataProvider.db = db;
|
this.treeDataProvider.db = db;
|
||||||
this.treeDataProvider.refresh();
|
this.treeDataProvider.refresh();
|
||||||
this.treeView.message = `AST for ${path.basename(fileName)}`;
|
this.treeView.message = `AST for ${path.basename(fileUri.fsPath)}`;
|
||||||
this.currentFile = fileName;
|
this.currentFileUri = fileUri;
|
||||||
// Handle error on reveal. This could happen if
|
// Handle error on reveal. This could happen if
|
||||||
// the tree view is disposed during the reveal.
|
// the tree view is disposed during the reveal.
|
||||||
this.treeView.reveal(roots[0], { focus: false })?.then(
|
this.treeView.reveal(roots[0], { focus: false })?.then(
|
||||||
@@ -174,7 +175,7 @@ export class AstViewer extends DisposableObject {
|
|||||||
|
|
||||||
if (
|
if (
|
||||||
this.treeView.visible &&
|
this.treeView.visible &&
|
||||||
e.textEditor.document.uri.fsPath === this.currentFile &&
|
e.textEditor.document.uri.fsPath === this.currentFileUri?.fsPath &&
|
||||||
e.selections.length === 1
|
e.selections.length === 1
|
||||||
) {
|
) {
|
||||||
const selection = e.selections[0];
|
const selection = e.selections[0];
|
||||||
@@ -199,6 +200,6 @@ export class AstViewer extends DisposableObject {
|
|||||||
this.treeDataProvider.db = undefined;
|
this.treeDataProvider.db = undefined;
|
||||||
this.treeDataProvider.refresh();
|
this.treeDataProvider.refresh();
|
||||||
this.treeView.message = undefined;
|
this.treeView.message = undefined;
|
||||||
this.currentFile = undefined;
|
this.currentFileUri = undefined;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
103
extensions/ql-vscode/src/authentication.ts
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
import * as vscode from 'vscode';
|
||||||
|
import * as Octokit from '@octokit/rest';
|
||||||
|
import { retry } from '@octokit/plugin-retry';
|
||||||
|
|
||||||
|
const GITHUB_AUTH_PROVIDER_ID = 'github';
|
||||||
|
|
||||||
|
// We need 'repo' scope for triggering workflows and 'gist' scope for exporting results to Gist.
|
||||||
|
// For a comprehensive list of scopes, see:
|
||||||
|
// https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps
|
||||||
|
const SCOPES = ['repo', 'gist'];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles authentication to GitHub, using the VS Code [authentication API](https://code.visualstudio.com/api/references/vscode-api#authentication).
|
||||||
|
*/
|
||||||
|
export class Credentials {
|
||||||
|
private octokit: Octokit.Octokit | undefined;
|
||||||
|
|
||||||
|
// Explicitly make the constructor private, so that we can't accidentally call the constructor from outside the class
|
||||||
|
// without also initializing the class.
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||||
|
private constructor() { }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initializes an instance of credentials with an octokit instance.
|
||||||
|
*
|
||||||
|
* Do not call this method until you know you actually need an instance of credentials.
|
||||||
|
* since calling this method will require the user to log in.
|
||||||
|
*
|
||||||
|
* @param context The extension context.
|
||||||
|
* @returns An instance of credentials.
|
||||||
|
*/
|
||||||
|
static async initialize(context: vscode.ExtensionContext): Promise<Credentials> {
|
||||||
|
const c = new Credentials();
|
||||||
|
c.registerListeners(context);
|
||||||
|
c.octokit = await c.createOctokit(false);
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initializes an instance of credentials with an octokit instance using
|
||||||
|
* a token from the user's GitHub account. This method is meant to be
|
||||||
|
* used non-interactive environments such as tests.
|
||||||
|
*
|
||||||
|
* @param overrideToken The GitHub token to use for authentication.
|
||||||
|
* @returns An instance of credentials.
|
||||||
|
*/
|
||||||
|
static async initializeWithToken(overrideToken: string) {
|
||||||
|
const c = new Credentials();
|
||||||
|
c.octokit = await c.createOctokit(false, overrideToken);
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async createOctokit(createIfNone: boolean, overrideToken?: string): Promise<Octokit.Octokit | undefined> {
|
||||||
|
if (overrideToken) {
|
||||||
|
return new Octokit.Octokit({ auth: overrideToken, retry });
|
||||||
|
}
|
||||||
|
|
||||||
|
const session = await vscode.authentication.getSession(GITHUB_AUTH_PROVIDER_ID, SCOPES, { createIfNone });
|
||||||
|
|
||||||
|
if (session) {
|
||||||
|
return new Octokit.Octokit({
|
||||||
|
auth: session.accessToken,
|
||||||
|
retry
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
registerListeners(context: vscode.ExtensionContext): void {
|
||||||
|
// Sessions are changed when a user logs in or logs out.
|
||||||
|
context.subscriptions.push(vscode.authentication.onDidChangeSessions(async e => {
|
||||||
|
if (e.provider.id === GITHUB_AUTH_PROVIDER_ID) {
|
||||||
|
this.octokit = await this.createOctokit(false);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates or returns an instance of Octokit.
|
||||||
|
*
|
||||||
|
* @param requireAuthentication Whether the Octokit instance needs to be authenticated as user.
|
||||||
|
* @returns An instance of Octokit.
|
||||||
|
*/
|
||||||
|
async getOctokit(requireAuthentication = true): Promise<Octokit.Octokit> {
|
||||||
|
if (this.octokit) {
|
||||||
|
return this.octokit;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.octokit = await this.createOctokit(requireAuthentication);
|
||||||
|
|
||||||
|
if (!this.octokit) {
|
||||||
|
if (requireAuthentication) {
|
||||||
|
throw new Error('Did not initialize Octokit.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// We don't want to set this in this.octokit because that would prevent
|
||||||
|
// authenticating when requireCredentials is true.
|
||||||
|
return new Octokit.Octokit({ retry });
|
||||||
|
}
|
||||||
|
return this.octokit;
|
||||||
|
}
|
||||||
|
}
|
||||||
11
extensions/ql-vscode/src/blob.d.ts
vendored
@@ -1,11 +0,0 @@
|
|||||||
/**
|
|
||||||
* The npm library jszip is designed to work in both the browser and
|
|
||||||
* node. Consequently its typings @types/jszip refers to both node
|
|
||||||
* types like `Buffer` (which don't exist in the browser), and browser
|
|
||||||
* types like `Blob` (which don't exist in node). Instead of sticking
|
|
||||||
* all of `dom` in `compilerOptions.lib`, it suffices just to put in a
|
|
||||||
* stub definition of the type `Blob` here so that compilation
|
|
||||||
* succeeds.
|
|
||||||
*/
|
|
||||||
|
|
||||||
declare type Blob = string;
|
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
import * as semver from 'semver';
|
import * as semver from 'semver';
|
||||||
import { runCodeQlCliCommand } from './cli';
|
import { runCodeQlCliCommand } from './cli';
|
||||||
import { Logger } from './logging';
|
import { Logger } from './logging';
|
||||||
|
import { getErrorMessage } from './pure/helpers-pure';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the version of a CodeQL CLI.
|
* Get the version of a CodeQL CLI.
|
||||||
@@ -18,7 +19,7 @@ export async function getCodeQlCliVersion(codeQlPath: string, logger: Logger): P
|
|||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Failed to run the version command. This might happen if the cli version is _really_ old, or it is corrupted.
|
// Failed to run the version command. This might happen if the cli version is _really_ old, or it is corrupted.
|
||||||
// Either way, we can't determine compatibility.
|
// Either way, we can't determine compatibility.
|
||||||
logger.log(`Failed to run 'codeql version'. Reason: ${e.message}`);
|
void logger.log(`Failed to run 'codeql version'. Reason: ${getErrorMessage(e)}`);
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
/* eslint-disable @typescript-eslint/camelcase */
|
|
||||||
import * as cpp from 'child-process-promise';
|
import * as cpp from 'child-process-promise';
|
||||||
import * as child_process from 'child_process';
|
import * as child_process from 'child_process';
|
||||||
import * as fs from 'fs-extra';
|
import * as fs from 'fs-extra';
|
||||||
@@ -9,15 +8,17 @@ import { Readable } from 'stream';
|
|||||||
import { StringDecoder } from 'string_decoder';
|
import { StringDecoder } from 'string_decoder';
|
||||||
import * as tk from 'tree-kill';
|
import * as tk from 'tree-kill';
|
||||||
import { promisify } from 'util';
|
import { promisify } from 'util';
|
||||||
import { CancellationToken, Disposable } from 'vscode';
|
import { CancellationToken, commands, Disposable, Uri } from 'vscode';
|
||||||
|
|
||||||
import { BQRSInfo, DecodedBqrsChunk } from './pure/bqrs-cli-types';
|
import { BQRSInfo, DecodedBqrsChunk } from './pure/bqrs-cli-types';
|
||||||
import { CliConfig } from './config';
|
import { allowCanaryQueryServer, CliConfig } from './config';
|
||||||
import { DistributionProvider, FindDistributionResultKind } from './distribution';
|
import { DistributionProvider, FindDistributionResultKind } from './distribution';
|
||||||
import { assertNever } from './pure/helpers-pure';
|
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||||
import { QueryMetadata, SortDirection } from './pure/interface-types';
|
import { QueryMetadata, SortDirection } from './pure/interface-types';
|
||||||
import { Logger, ProgressReporter } from './logging';
|
import { Logger, ProgressReporter } from './logging';
|
||||||
import { CompilationMessage } from './pure/messages';
|
import { CompilationMessage } from './pure/legacy-messages';
|
||||||
|
import { sarifParser } from './sarif-parser';
|
||||||
|
import { dbSchemeToLanguage, walkDirectory } from './helpers';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The version of the SARIF format that we are using.
|
* The version of the SARIF format that we are using.
|
||||||
@@ -44,6 +45,16 @@ export interface QuerySetup {
|
|||||||
compilationCache?: string;
|
compilationCache?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The expected output of `codeql resolve queries --format bylanguage`.
|
||||||
|
*/
|
||||||
|
export interface QueryInfoByLanguage {
|
||||||
|
// Using `unknown` as a placeholder. For now, the value is only ever an empty object.
|
||||||
|
byLanguage: Record<string, Record<string, unknown>>;
|
||||||
|
noDeclaredLanguage: Record<string, unknown>;
|
||||||
|
multipleDeclaredLanguages: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The expected output of `codeql resolve database`.
|
* The expected output of `codeql resolve database`.
|
||||||
*/
|
*/
|
||||||
@@ -72,6 +83,20 @@ export interface UpgradesInfo {
|
|||||||
*/
|
*/
|
||||||
export type QlpacksInfo = { [name: string]: string[] };
|
export type QlpacksInfo = { [name: string]: string[] };
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The expected output of `codeql resolve languages`.
|
||||||
|
*/
|
||||||
|
export type LanguagesInfo = { [name: string]: string[] };
|
||||||
|
|
||||||
|
/** Information about an ML model, as resolved by `codeql resolve ml-models`. */
|
||||||
|
export type MlModelInfo = {
|
||||||
|
checksum: string;
|
||||||
|
path: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
/** The expected output of `codeql resolve ml-models`. */
|
||||||
|
export type MlModelsInfo = { models: MlModelInfo[] };
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The expected output of `codeql resolve qlref`.
|
* The expected output of `codeql resolve qlref`.
|
||||||
*/
|
*/
|
||||||
@@ -109,6 +134,7 @@ export interface TestCompleted {
|
|||||||
expected: string;
|
expected: string;
|
||||||
diff: string[] | undefined;
|
diff: string[] | undefined;
|
||||||
failureDescription?: string;
|
failureDescription?: string;
|
||||||
|
failureStage?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -142,7 +168,12 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
nullBuffer: Buffer;
|
nullBuffer: Buffer;
|
||||||
|
|
||||||
/** Version of current cli, lazily computed by the `getVersion()` method */
|
/** Version of current cli, lazily computed by the `getVersion()` method */
|
||||||
private _version: SemVer | undefined;
|
private _version: Promise<SemVer> | undefined;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The languages supported by the current version of the CLI, computed by `getSupportedLanguages()`.
|
||||||
|
*/
|
||||||
|
private _supportedLanguages: string[] | undefined;
|
||||||
|
|
||||||
/** Path to current codeQL executable, or undefined if not running yet. */
|
/** Path to current codeQL executable, or undefined if not running yet. */
|
||||||
codeQlPath: string | undefined;
|
codeQlPath: string | undefined;
|
||||||
@@ -166,12 +197,14 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
this.distributionProvider.onDidChangeDistribution(() => {
|
this.distributionProvider.onDidChangeDistribution(() => {
|
||||||
this.restartCliServer();
|
this.restartCliServer();
|
||||||
this._version = undefined;
|
this._version = undefined;
|
||||||
|
this._supportedLanguages = undefined;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (this.cliConfig.onDidChangeConfiguration) {
|
if (this.cliConfig.onDidChangeConfiguration) {
|
||||||
this.cliConfig.onDidChangeConfiguration(() => {
|
this.cliConfig.onDidChangeConfiguration(() => {
|
||||||
this.restartCliServer();
|
this.restartCliServer();
|
||||||
this._version = undefined;
|
this._version = undefined;
|
||||||
|
this._supportedLanguages = undefined;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -183,15 +216,15 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
killProcessIfRunning(): void {
|
killProcessIfRunning(): void {
|
||||||
if (this.process) {
|
if (this.process) {
|
||||||
// Tell the Java CLI server process to shut down.
|
// Tell the Java CLI server process to shut down.
|
||||||
this.logger.log('Sending shutdown request');
|
void this.logger.log('Sending shutdown request');
|
||||||
try {
|
try {
|
||||||
this.process.stdin.write(JSON.stringify(['shutdown']), 'utf8');
|
this.process.stdin.write(JSON.stringify(['shutdown']), 'utf8');
|
||||||
this.process.stdin.write(this.nullBuffer);
|
this.process.stdin.write(this.nullBuffer);
|
||||||
this.logger.log('Sent shutdown request');
|
void this.logger.log('Sent shutdown request');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// We are probably fine here, the process has already closed stdin.
|
// We are probably fine here, the process has already closed stdin.
|
||||||
this.logger.log(`Shutdown request failed: process stdin may have already closed. The error was ${e}`);
|
void this.logger.log(`Shutdown request failed: process stdin may have already closed. The error was ${e}`);
|
||||||
this.logger.log('Stopping the process anyway.');
|
void this.logger.log('Stopping the process anyway.');
|
||||||
}
|
}
|
||||||
// Close the stdin and stdout streams.
|
// Close the stdin and stdout streams.
|
||||||
// This is important on Windows where the child process may not die cleanly.
|
// This is important on Windows where the child process may not die cleanly.
|
||||||
@@ -207,7 +240,7 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
/**
|
/**
|
||||||
* Restart the server when the current command terminates
|
* Restart the server when the current command terminates
|
||||||
*/
|
*/
|
||||||
private restartCliServer(): void {
|
restartCliServer(): void {
|
||||||
const callback = (): void => {
|
const callback = (): void => {
|
||||||
try {
|
try {
|
||||||
this.killProcessIfRunning();
|
this.killProcessIfRunning();
|
||||||
@@ -242,11 +275,16 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
*/
|
*/
|
||||||
private async launchProcess(): Promise<child_process.ChildProcessWithoutNullStreams> {
|
private async launchProcess(): Promise<child_process.ChildProcessWithoutNullStreams> {
|
||||||
const codeQlPath = await this.getCodeQlPath();
|
const codeQlPath = await this.getCodeQlPath();
|
||||||
|
const args = [];
|
||||||
|
if (shouldDebugCliServer()) {
|
||||||
|
args.push('-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9012,server=n,suspend=y,quiet=y');
|
||||||
|
}
|
||||||
|
|
||||||
return await spawnServer(
|
return await spawnServer(
|
||||||
codeQlPath,
|
codeQlPath,
|
||||||
'CodeQL CLI Server',
|
'CodeQL CLI Server',
|
||||||
['execute', 'cli-server'],
|
['execute', 'cli-server'],
|
||||||
[],
|
args,
|
||||||
this.logger,
|
this.logger,
|
||||||
_data => { /**/ }
|
_data => { /**/ }
|
||||||
);
|
);
|
||||||
@@ -271,7 +309,7 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
// Compute the full args array
|
// Compute the full args array
|
||||||
const args = command.concat(LOGGING_FLAGS).concat(commandArgs);
|
const args = command.concat(LOGGING_FLAGS).concat(commandArgs);
|
||||||
const argsString = args.join(' ');
|
const argsString = args.join(' ');
|
||||||
this.logger.log(`${description} using CodeQL CLI: ${argsString}...`);
|
void this.logger.log(`${description} using CodeQL CLI: ${argsString}...`);
|
||||||
try {
|
try {
|
||||||
await new Promise<void>((resolve, reject) => {
|
await new Promise<void>((resolve, reject) => {
|
||||||
// Start listening to stdout
|
// Start listening to stdout
|
||||||
@@ -298,7 +336,7 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
const fullBuffer = Buffer.concat(stdoutBuffers);
|
const fullBuffer = Buffer.concat(stdoutBuffers);
|
||||||
// Make sure we remove the terminator;
|
// Make sure we remove the terminator;
|
||||||
const data = fullBuffer.toString('utf8', 0, fullBuffer.length - 1);
|
const data = fullBuffer.toString('utf8', 0, fullBuffer.length - 1);
|
||||||
this.logger.log('CLI command succeeded.');
|
void this.logger.log('CLI command succeeded.');
|
||||||
return data;
|
return data;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// Kill the process if it isn't already dead.
|
// Kill the process if it isn't already dead.
|
||||||
@@ -308,10 +346,10 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
stderrBuffers.length == 0
|
stderrBuffers.length == 0
|
||||||
? new Error(`${description} failed: ${err}`)
|
? new Error(`${description} failed: ${err}`)
|
||||||
: new Error(`${description} failed: ${Buffer.concat(stderrBuffers).toString('utf8')}`);
|
: new Error(`${description} failed: ${Buffer.concat(stderrBuffers).toString('utf8')}`);
|
||||||
newError.stack += (err.stack || '');
|
newError.stack += getErrorStack(err);
|
||||||
throw newError;
|
throw newError;
|
||||||
} finally {
|
} finally {
|
||||||
this.logger.log(Buffer.concat(stderrBuffers).toString('utf8'));
|
void this.logger.log(Buffer.concat(stderrBuffers).toString('utf8'));
|
||||||
// Remove the listeners we set up.
|
// Remove the listeners we set up.
|
||||||
process.stdout.removeAllListeners('data');
|
process.stdout.removeAllListeners('data');
|
||||||
process.stderr.removeAllListeners('data');
|
process.stderr.removeAllListeners('data');
|
||||||
@@ -366,12 +404,12 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
try {
|
try {
|
||||||
if (cancellationToken !== undefined) {
|
if (cancellationToken !== undefined) {
|
||||||
cancellationRegistration = cancellationToken.onCancellationRequested(_e => {
|
cancellationRegistration = cancellationToken.onCancellationRequested(_e => {
|
||||||
tk(child.pid);
|
tk(child.pid || 0);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (logger !== undefined) {
|
if (logger !== undefined) {
|
||||||
// The human-readable output goes to stderr.
|
// The human-readable output goes to stderr.
|
||||||
logStream(child.stderr!, logger);
|
void logStream(child.stderr!, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
for await (const event of await splitStreamAtSeparators(child.stdout!, ['\0'])) {
|
for await (const event of await splitStreamAtSeparators(child.stdout!, ['\0'])) {
|
||||||
@@ -410,7 +448,7 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
try {
|
try {
|
||||||
yield JSON.parse(event) as EventType;
|
yield JSON.parse(event) as EventType;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
throw new Error(`Parsing output of ${description} failed: ${err.stderr || err}`);
|
throw new Error(`Parsing output of ${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -465,7 +503,7 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
try {
|
try {
|
||||||
return JSON.parse(result) as OutputType;
|
return JSON.parse(result) as OutputType;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
throw new Error(`Parsing output of ${description} failed: ${err.stderr || err}`);
|
throw new Error(`Parsing output of ${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -477,12 +515,24 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
async resolveLibraryPath(workspaces: string[], queryPath: string): Promise<QuerySetup> {
|
async resolveLibraryPath(workspaces: string[], queryPath: string): Promise<QuerySetup> {
|
||||||
const subcommandArgs = [
|
const subcommandArgs = [
|
||||||
'--query', queryPath,
|
'--query', queryPath,
|
||||||
'--additional-packs',
|
...this.getAdditionalPacksArg(workspaces)
|
||||||
workspaces.join(path.delimiter)
|
|
||||||
];
|
];
|
||||||
return await this.runJsonCodeQlCliCommand<QuerySetup>(['resolve', 'library-path'], subcommandArgs, 'Resolving library paths');
|
return await this.runJsonCodeQlCliCommand<QuerySetup>(['resolve', 'library-path'], subcommandArgs, 'Resolving library paths');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolves the language for a query.
|
||||||
|
* @param queryUri The URI of the query
|
||||||
|
*/
|
||||||
|
async resolveQueryByLanguage(workspaces: string[], queryUri: Uri): Promise<QueryInfoByLanguage> {
|
||||||
|
const subcommandArgs = [
|
||||||
|
'--format', 'bylanguage',
|
||||||
|
queryUri.fsPath,
|
||||||
|
...this.getAdditionalPacksArg(workspaces)
|
||||||
|
];
|
||||||
|
return JSON.parse(await this.runCodeQlCliCommand(['resolve', 'queries'], subcommandArgs, 'Resolving query by language'));
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Finds all available QL tests in a given directory.
|
* Finds all available QL tests in a given directory.
|
||||||
* @param testPath Root of directory tree to search for tests.
|
* @param testPath Root of directory tree to search for tests.
|
||||||
@@ -511,6 +561,17 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Issues an internal clear-cache command to the cli server. This
|
||||||
|
* command is used to clear the qlpack cache of the server.
|
||||||
|
*
|
||||||
|
* This cache is generally cleared every 1s. This method is used
|
||||||
|
* to force an early clearing of the cache.
|
||||||
|
*/
|
||||||
|
public async clearCache(): Promise<void> {
|
||||||
|
await this.runCodeQlCliCommand(['clear-cache'], [], 'Clearing qlpack cache');
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Runs QL tests.
|
* Runs QL tests.
|
||||||
* @param testPaths Full paths of the tests to run.
|
* @param testPaths Full paths of the tests to run.
|
||||||
@@ -522,7 +583,7 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
): AsyncGenerator<TestCompleted, void, unknown> {
|
): AsyncGenerator<TestCompleted, void, unknown> {
|
||||||
|
|
||||||
const subcommandArgs = this.cliConfig.additionalTestArguments.concat([
|
const subcommandArgs = this.cliConfig.additionalTestArguments.concat([
|
||||||
'--additional-packs', workspaces.join(path.delimiter),
|
...this.getAdditionalPacksArg(workspaces),
|
||||||
'--threads',
|
'--threads',
|
||||||
this.cliConfig.numberTestThreads.toString(),
|
this.cliConfig.numberTestThreads.toString(),
|
||||||
...testPaths
|
...testPaths
|
||||||
@@ -542,6 +603,20 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
return await this.runJsonCodeQlCliCommand<QueryMetadata>(['resolve', 'metadata'], [queryPath], 'Resolving query metadata');
|
return await this.runJsonCodeQlCliCommand<QueryMetadata>(['resolve', 'metadata'], [queryPath], 'Resolving query metadata');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Resolves the ML models that should be available when evaluating a query. */
|
||||||
|
async resolveMlModels(additionalPacks: string[], queryPath: string): Promise<MlModelsInfo> {
|
||||||
|
const args = await this.cliConstraints.supportsPreciseResolveMlModels()
|
||||||
|
// use the dirname of the path so that we can handle query libraries
|
||||||
|
? [...this.getAdditionalPacksArg(additionalPacks), path.dirname(queryPath)]
|
||||||
|
: this.getAdditionalPacksArg(additionalPacks);
|
||||||
|
return await this.runJsonCodeQlCliCommand<MlModelsInfo>(
|
||||||
|
['resolve', 'ml-models'],
|
||||||
|
args,
|
||||||
|
'Resolving ML models',
|
||||||
|
false
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the RAM setting for the query server.
|
* Gets the RAM setting for the query server.
|
||||||
* @param queryMemoryMb The maximum amount of RAM to use, in MB.
|
* @param queryMemoryMb The maximum amount of RAM to use, in MB.
|
||||||
@@ -571,6 +646,67 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
return await this.runJsonCodeQlCliCommand<BQRSInfo>(['bqrs', 'info'], subcommandArgs, 'Reading bqrs header');
|
return await this.runJsonCodeQlCliCommand<BQRSInfo>(['bqrs', 'info'], subcommandArgs, 'Reading bqrs header');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async databaseUnbundle(archivePath: string, target: string, name?: string): Promise<string> {
|
||||||
|
const subcommandArgs = [];
|
||||||
|
if (target) subcommandArgs.push('--target', target);
|
||||||
|
if (name) subcommandArgs.push('--name', name);
|
||||||
|
subcommandArgs.push(archivePath);
|
||||||
|
|
||||||
|
return await this.runCodeQlCliCommand(['database', 'unbundle'], subcommandArgs, `Extracting ${archivePath} to directory ${target}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uses a .qhelp file to generate Query Help documentation in a specified format.
|
||||||
|
* @param pathToQhelp The path to the .qhelp file
|
||||||
|
* @param format The format in which the query help should be generated {@link https://codeql.github.com/docs/codeql-cli/manual/generate-query-help/#cmdoption-codeql-generate-query-help-format}
|
||||||
|
* @param outputDirectory The output directory for the generated file
|
||||||
|
*/
|
||||||
|
async generateQueryHelp(pathToQhelp: string, outputDirectory?: string): Promise<string> {
|
||||||
|
const subcommandArgs = ['--format=markdown'];
|
||||||
|
if (outputDirectory) subcommandArgs.push('--output', outputDirectory);
|
||||||
|
subcommandArgs.push(pathToQhelp);
|
||||||
|
|
||||||
|
return await this.runCodeQlCliCommand(['generate', 'query-help'], subcommandArgs, `Generating qhelp in markdown format at ${outputDirectory}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a summary of an evaluation log.
|
||||||
|
* @param endSummaryPath The path to write only the end of query part of the human-readable summary to.
|
||||||
|
* @param inputPath The path of an evaluation event log.
|
||||||
|
* @param outputPath The path to write a human-readable summary of it to.
|
||||||
|
*/
|
||||||
|
async generateLogSummary(
|
||||||
|
inputPath: string,
|
||||||
|
outputPath: string,
|
||||||
|
endSummaryPath: string,
|
||||||
|
): Promise<string> {
|
||||||
|
const subcommandArgs = [
|
||||||
|
'--format=text',
|
||||||
|
`--end-summary=${endSummaryPath}`,
|
||||||
|
...(await this.cliConstraints.supportsSourceMap() ? ['--sourcemap'] : []),
|
||||||
|
inputPath,
|
||||||
|
outputPath
|
||||||
|
];
|
||||||
|
return await this.runCodeQlCliCommand(['generate', 'log-summary'], subcommandArgs, 'Generating log summary');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a JSON summary of an evaluation log.
|
||||||
|
* @param inputPath The path of an evaluation event log.
|
||||||
|
* @param outputPath The path to write a JSON summary of it to.
|
||||||
|
*/
|
||||||
|
async generateJsonLogSummary(
|
||||||
|
inputPath: string,
|
||||||
|
outputPath: string,
|
||||||
|
): Promise<string> {
|
||||||
|
const subcommandArgs = [
|
||||||
|
'--format=predicates',
|
||||||
|
inputPath,
|
||||||
|
outputPath
|
||||||
|
];
|
||||||
|
return await this.runCodeQlCliCommand(['generate', 'log-summary'], subcommandArgs, 'Generating JSON log summary');
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the results from a bqrs.
|
* Gets the results from a bqrs.
|
||||||
* @param bqrsPath The path to the bqrs.
|
* @param bqrsPath The path to the bqrs.
|
||||||
@@ -594,20 +730,13 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
return await this.runJsonCodeQlCliCommand<DecodedBqrsChunk>(['bqrs', 'decode'], subcommandArgs, 'Reading bqrs data');
|
return await this.runJsonCodeQlCliCommand<DecodedBqrsChunk>(['bqrs', 'decode'], subcommandArgs, 'Reading bqrs data');
|
||||||
}
|
}
|
||||||
|
|
||||||
async runInterpretCommand(format: string, metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo) {
|
async runInterpretCommand(format: string, additonalArgs: string[], metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo) {
|
||||||
const args = [
|
const args = [
|
||||||
'--output', interpretedResultsPath,
|
'--output', interpretedResultsPath,
|
||||||
'--format', format,
|
'--format', format,
|
||||||
// Forward all of the query metadata.
|
// Forward all of the query metadata.
|
||||||
...Object.entries(metadata).map(([key, value]) => `-t=${key}=${value}`)
|
...Object.entries(metadata).map(([key, value]) => `-t=${key}=${value}`)
|
||||||
];
|
].concat(additonalArgs);
|
||||||
if (format == SARIF_FORMAT) {
|
|
||||||
// TODO: This flag means that we don't group interpreted results
|
|
||||||
// by primary location. We may want to revisit whether we call
|
|
||||||
// interpretation with and without this flag, or do some
|
|
||||||
// grouping client-side.
|
|
||||||
args.push('--no-group-results');
|
|
||||||
}
|
|
||||||
if (sourceInfo !== undefined) {
|
if (sourceInfo !== undefined) {
|
||||||
args.push(
|
args.push(
|
||||||
'--source-archive', sourceInfo.sourceArchive,
|
'--source-archive', sourceInfo.sourceArchive,
|
||||||
@@ -620,32 +749,56 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
this.cliConfig.numberThreads.toString(),
|
this.cliConfig.numberThreads.toString(),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
args.push(
|
||||||
|
'--max-paths',
|
||||||
|
this.cliConfig.maxPaths.toString(),
|
||||||
|
);
|
||||||
|
|
||||||
args.push(resultsPath);
|
args.push(resultsPath);
|
||||||
await this.runCodeQlCliCommand(['bqrs', 'interpret'], args, 'Interpreting query results');
|
await this.runCodeQlCliCommand(['bqrs', 'interpret'], args, 'Interpreting query results');
|
||||||
}
|
}
|
||||||
|
|
||||||
async interpretBqrs(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<sarif.Log> {
|
async interpretBqrsSarif(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<sarif.Log> {
|
||||||
await this.runInterpretCommand(SARIF_FORMAT, metadata, resultsPath, interpretedResultsPath, sourceInfo);
|
const additionalArgs = [
|
||||||
|
// TODO: This flag means that we don't group interpreted results
|
||||||
|
// by primary location. We may want to revisit whether we call
|
||||||
|
// interpretation with and without this flag, or do some
|
||||||
|
// grouping client-side.
|
||||||
|
'--no-group-results'
|
||||||
|
];
|
||||||
|
|
||||||
let output: string;
|
await this.runInterpretCommand(SARIF_FORMAT, additionalArgs, metadata, resultsPath, interpretedResultsPath, sourceInfo);
|
||||||
try {
|
return await sarifParser(interpretedResultsPath);
|
||||||
output = await fs.readFile(interpretedResultsPath, 'utf8');
|
}
|
||||||
} catch (e) {
|
|
||||||
const rawMessage = e.stderr || e.message;
|
// Warning: this function is untenable for large dot files,
|
||||||
const errorMessage = rawMessage.startsWith('Cannot create a string')
|
async readDotFiles(dir: string): Promise<string[]> {
|
||||||
? `SARIF too large. ${rawMessage}`
|
const dotFiles: Promise<string>[] = [];
|
||||||
: rawMessage;
|
for await (const file of walkDirectory(dir)) {
|
||||||
throw new Error(`Reading output of interpretation failed: ${errorMessage}`);
|
if (file.endsWith('.dot')) {
|
||||||
|
dotFiles.push(fs.readFile(file, 'utf8'));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
return Promise.all(dotFiles);
|
||||||
|
}
|
||||||
|
|
||||||
|
async interpretBqrsGraph(metadata: QueryMetadata, resultsPath: string, interpretedResultsPath: string, sourceInfo?: SourceInfo): Promise<string[]> {
|
||||||
|
const additionalArgs = sourceInfo
|
||||||
|
? ['--dot-location-url-format', 'file://' + sourceInfo.sourceLocationPrefix + '{path}:{start:line}:{start:column}:{end:line}:{end:column}']
|
||||||
|
: [];
|
||||||
|
|
||||||
|
await this.runInterpretCommand('dot', additionalArgs, metadata, resultsPath, interpretedResultsPath, sourceInfo);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return JSON.parse(output) as sarif.Log;
|
const dot = await this.readDotFiles(interpretedResultsPath);
|
||||||
|
return dot;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
throw new Error(`Parsing output of interpretation failed: ${err.stderr || err}`);
|
throw new Error(`Reading output of interpretation failed: ${getErrorMessage(err)}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async generateResultsCsv(metadata: QueryMetadata, resultsPath: string, csvPath: string, sourceInfo?: SourceInfo): Promise<void> {
|
async generateResultsCsv(metadata: QueryMetadata, resultsPath: string, csvPath: string, sourceInfo?: SourceInfo): Promise<void> {
|
||||||
await this.runInterpretCommand(CSV_FORMAT, metadata, resultsPath, csvPath, sourceInfo);
|
await this.runInterpretCommand(CSV_FORMAT, [], metadata, resultsPath, csvPath, sourceInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
async sortBqrs(resultsPath: string, sortedResultsPath: string, resultSet: string, sortKeys: number[], sortDirections: SortDirection[]): Promise<void> {
|
async sortBqrs(resultsPath: string, sortedResultsPath: string, resultSet: string, sortKeys: number[], sortDirections: SortDirection[]): Promise<void> {
|
||||||
@@ -691,7 +844,7 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
* @returns A list of database upgrade script directories
|
* @returns A list of database upgrade script directories
|
||||||
*/
|
*/
|
||||||
async resolveUpgrades(dbScheme: string, searchPath: string[], allowDowngradesIfPossible: boolean, targetDbScheme?: string): Promise<UpgradesInfo> {
|
async resolveUpgrades(dbScheme: string, searchPath: string[], allowDowngradesIfPossible: boolean, targetDbScheme?: string): Promise<UpgradesInfo> {
|
||||||
const args = ['--additional-packs', searchPath.join(path.delimiter), '--dbscheme', dbScheme];
|
const args = [...this.getAdditionalPacksArg(searchPath), '--dbscheme', dbScheme];
|
||||||
if (targetDbScheme) {
|
if (targetDbScheme) {
|
||||||
args.push('--target-dbscheme', targetDbScheme);
|
args.push('--target-dbscheme', targetDbScheme);
|
||||||
if (allowDowngradesIfPossible && await this.cliConstraints.supportsDowngrades()) {
|
if (allowDowngradesIfPossible && await this.cliConstraints.supportsDowngrades()) {
|
||||||
@@ -713,7 +866,7 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
* @returns A dictionary mapping qlpack name to the directory it comes from
|
* @returns A dictionary mapping qlpack name to the directory it comes from
|
||||||
*/
|
*/
|
||||||
resolveQlpacks(additionalPacks: string[], searchPath?: string[]): Promise<QlpacksInfo> {
|
resolveQlpacks(additionalPacks: string[], searchPath?: string[]): Promise<QlpacksInfo> {
|
||||||
const args = ['--additional-packs', additionalPacks.join(path.delimiter)];
|
const args = this.getAdditionalPacksArg(additionalPacks);
|
||||||
if (searchPath?.length) {
|
if (searchPath?.length) {
|
||||||
args.push('--search-path', path.join(...searchPath));
|
args.push('--search-path', path.join(...searchPath));
|
||||||
}
|
}
|
||||||
@@ -725,6 +878,31 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets information about the available languages.
|
||||||
|
* @returns A dictionary mapping language name to the directory it comes from
|
||||||
|
*/
|
||||||
|
async resolveLanguages(): Promise<LanguagesInfo> {
|
||||||
|
return await this.runJsonCodeQlCliCommand<LanguagesInfo>(['resolve', 'languages'], [], 'Resolving languages');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the list of available languages. Refines the result of `resolveLanguages()`, by excluding
|
||||||
|
* extra things like "xml" and "properties".
|
||||||
|
*
|
||||||
|
* @returns An array of languages that are supported by the current version of the CodeQL CLI.
|
||||||
|
*/
|
||||||
|
public async getSupportedLanguages(): Promise<string[]> {
|
||||||
|
if (!this._supportedLanguages) {
|
||||||
|
// Get the intersection of resolveLanguages with the list of hardcoded languages in dbSchemeToLanguage.
|
||||||
|
const resolvedLanguages = Object.keys(await this.resolveLanguages());
|
||||||
|
const hardcodedLanguages = Object.values(dbSchemeToLanguage);
|
||||||
|
|
||||||
|
this._supportedLanguages = resolvedLanguages.filter(lang => hardcodedLanguages.includes(lang));
|
||||||
|
}
|
||||||
|
return this._supportedLanguages;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets information about queries in a query suite.
|
* Gets information about queries in a query suite.
|
||||||
* @param suite The suite to resolve.
|
* @param suite The suite to resolve.
|
||||||
@@ -733,11 +911,15 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
* the default CLI search path is used.
|
* the default CLI search path is used.
|
||||||
* @returns A list of query files found.
|
* @returns A list of query files found.
|
||||||
*/
|
*/
|
||||||
resolveQueriesInSuite(suite: string, additionalPacks: string[], searchPath?: string[]): Promise<string[]> {
|
async resolveQueriesInSuite(suite: string, additionalPacks: string[], searchPath?: string[]): Promise<string[]> {
|
||||||
const args = ['--additional-packs', additionalPacks.join(path.delimiter)];
|
const args = this.getAdditionalPacksArg(additionalPacks);
|
||||||
if (searchPath !== undefined) {
|
if (searchPath !== undefined) {
|
||||||
args.push('--search-path', path.join(...searchPath));
|
args.push('--search-path', path.join(...searchPath));
|
||||||
}
|
}
|
||||||
|
if (await this.cliConstraints.supportsAllowLibraryPacksInResolveQueries()) {
|
||||||
|
// All of our usage of `codeql resolve queries` needs to handle library packs.
|
||||||
|
args.push('--allow-library-packs');
|
||||||
|
}
|
||||||
args.push(suite);
|
args.push(suite);
|
||||||
return this.runJsonCodeQlCliCommand<string[]>(
|
return this.runJsonCodeQlCliCommand<string[]>(
|
||||||
['resolve', 'queries'],
|
['resolve', 'queries'],
|
||||||
@@ -746,6 +928,48 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Downloads a specified pack.
|
||||||
|
* @param packs The `<package-scope/name[@version]>` of the packs to download.
|
||||||
|
*/
|
||||||
|
async packDownload(packs: string[]) {
|
||||||
|
return this.runJsonCodeQlCliCommand(['pack', 'download'], packs, 'Downloading packs');
|
||||||
|
}
|
||||||
|
|
||||||
|
async packInstall(dir: string, forceUpdate = false) {
|
||||||
|
const args = [dir];
|
||||||
|
if (forceUpdate) {
|
||||||
|
args.push('--mode', 'update');
|
||||||
|
}
|
||||||
|
return this.runJsonCodeQlCliCommand(['pack', 'install'], args, 'Installing pack dependencies');
|
||||||
|
}
|
||||||
|
|
||||||
|
async packBundle(dir: string, workspaceFolders: string[], outputPath: string, moreOptions: string[]): Promise<void> {
|
||||||
|
const args = [
|
||||||
|
'-o',
|
||||||
|
outputPath,
|
||||||
|
dir,
|
||||||
|
...moreOptions,
|
||||||
|
...this.getAdditionalPacksArg(workspaceFolders)
|
||||||
|
];
|
||||||
|
|
||||||
|
return this.runJsonCodeQlCliCommand(['pack', 'bundle'], args, 'Bundling pack');
|
||||||
|
}
|
||||||
|
|
||||||
|
async packPacklist(dir: string, includeQueries: boolean): Promise<string[]> {
|
||||||
|
const args = includeQueries ? [dir] : ['--no-include-queries', dir];
|
||||||
|
// since 2.7.1, packlist returns an object with a "paths" property that is a list of packs.
|
||||||
|
// previous versions return a list of packs.
|
||||||
|
const results: { paths: string[] } | string[] = await this.runJsonCodeQlCliCommand(['pack', 'packlist'], args, 'Generating the pack list');
|
||||||
|
|
||||||
|
// Once we no longer need to support 2.7.0 or earlier, we can remove this and assume all versions return an object.
|
||||||
|
if ('paths' in results) {
|
||||||
|
return results.paths;
|
||||||
|
} else {
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async generateDil(qloFile: string, outFile: string): Promise<void> {
|
async generateDil(qloFile: string, outFile: string): Promise<void> {
|
||||||
const extraArgs = await this.cliConstraints.supportsDecompileDil()
|
const extraArgs = await this.cliConstraints.supportsDecompileDil()
|
||||||
? ['--kind', 'dil', '-o', outFile, qloFile]
|
? ['--kind', 'dil', '-o', outFile, qloFile]
|
||||||
@@ -759,9 +983,13 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
|
|
||||||
public async getVersion() {
|
public async getVersion() {
|
||||||
if (!this._version) {
|
if (!this._version) {
|
||||||
this._version = await this.refreshVersion();
|
this._version = this.refreshVersion();
|
||||||
|
// this._version is only undefined upon config change, so we reset CLI-based context key only when necessary.
|
||||||
|
await commands.executeCommand(
|
||||||
|
'setContext', 'codeql.supportsEvalLog', await this.cliConstraints.supportsPerQueryEvalLog()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
return this._version;
|
return await this._version;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async refreshVersion() {
|
private async refreshVersion() {
|
||||||
@@ -778,6 +1006,12 @@ export class CodeQLCliServer implements Disposable {
|
|||||||
throw new Error('No distribution found');
|
throw new Error('No distribution found');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private getAdditionalPacksArg(paths: string[]): string[] {
|
||||||
|
return paths.length
|
||||||
|
? ['--additional-packs', paths.join(path.delimiter)]
|
||||||
|
: [];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -813,7 +1047,7 @@ export function spawnServer(
|
|||||||
if (progressReporter !== undefined) {
|
if (progressReporter !== undefined) {
|
||||||
progressReporter.report({ message: `Starting ${name}` });
|
progressReporter.report({ message: `Starting ${name}` });
|
||||||
}
|
}
|
||||||
logger.log(`Starting ${name} using CodeQL CLI: ${base} ${argsString}`);
|
void logger.log(`Starting ${name} using CodeQL CLI: ${base} ${argsString}`);
|
||||||
const child = child_process.spawn(base, args);
|
const child = child_process.spawn(base, args);
|
||||||
if (!child || !child.pid) {
|
if (!child || !child.pid) {
|
||||||
throw new Error(`Failed to start ${name} using command ${base} ${argsString}.`);
|
throw new Error(`Failed to start ${name} using command ${base} ${argsString}.`);
|
||||||
@@ -829,7 +1063,7 @@ export function spawnServer(
|
|||||||
if (progressReporter !== undefined) {
|
if (progressReporter !== undefined) {
|
||||||
progressReporter.report({ message: `Started ${name}` });
|
progressReporter.report({ message: `Started ${name}` });
|
||||||
}
|
}
|
||||||
logger.log(`${name} started on PID: ${child.pid}`);
|
void logger.log(`${name} started on PID: ${child.pid}`);
|
||||||
return child;
|
return child;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -858,13 +1092,13 @@ export async function runCodeQlCliCommand(
|
|||||||
if (progressReporter !== undefined) {
|
if (progressReporter !== undefined) {
|
||||||
progressReporter.report({ message: description });
|
progressReporter.report({ message: description });
|
||||||
}
|
}
|
||||||
logger.log(`${description} using CodeQL CLI: ${codeQlPath} ${argsString}...`);
|
void logger.log(`${description} using CodeQL CLI: ${codeQlPath} ${argsString}...`);
|
||||||
const result = await promisify(child_process.execFile)(codeQlPath, args);
|
const result = await promisify(child_process.execFile)(codeQlPath, args);
|
||||||
logger.log(result.stderr);
|
void logger.log(result.stderr);
|
||||||
logger.log('CLI command succeeded.');
|
void logger.log('CLI command succeeded.');
|
||||||
return result.stdout;
|
return result.stdout;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
throw new Error(`${description} failed: ${err.stderr || err}`);
|
throw new Error(`${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -920,8 +1154,8 @@ class SplitBuffer {
|
|||||||
while (this.searchIndex <= (this.buffer.length - this.maxSeparatorLength)) {
|
while (this.searchIndex <= (this.buffer.length - this.maxSeparatorLength)) {
|
||||||
for (const separator of this.separators) {
|
for (const separator of this.separators) {
|
||||||
if (SplitBuffer.startsWith(this.buffer, separator, this.searchIndex)) {
|
if (SplitBuffer.startsWith(this.buffer, separator, this.searchIndex)) {
|
||||||
const line = this.buffer.substr(0, this.searchIndex);
|
const line = this.buffer.slice(0, this.searchIndex);
|
||||||
this.buffer = this.buffer.substr(this.searchIndex + separator.length);
|
this.buffer = this.buffer.slice(this.searchIndex + separator.length);
|
||||||
this.searchIndex = 0;
|
this.searchIndex = 0;
|
||||||
return line;
|
return line;
|
||||||
}
|
}
|
||||||
@@ -976,7 +1210,8 @@ const lineEndings = ['\r\n', '\r', '\n'];
|
|||||||
*/
|
*/
|
||||||
async function logStream(stream: Readable, logger: Logger): Promise<void> {
|
async function logStream(stream: Readable, logger: Logger): Promise<void> {
|
||||||
for await (const line of await splitStreamAtSeparators(stream, lineEndings)) {
|
for await (const line of await splitStreamAtSeparators(stream, lineEndings)) {
|
||||||
logger.log(line);
|
// Await the result of log here in order to ensure the logs are written in the correct order.
|
||||||
|
await logger.log(line);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -993,6 +1228,12 @@ export function shouldDebugQueryServer() {
|
|||||||
&& process.env.QUERY_SERVER_JAVA_DEBUG?.toLocaleLowerCase() !== 'false';
|
&& process.env.QUERY_SERVER_JAVA_DEBUG?.toLocaleLowerCase() !== 'false';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function shouldDebugCliServer() {
|
||||||
|
return 'CLI_SERVER_JAVA_DEBUG' in process.env
|
||||||
|
&& process.env.CLI_SERVER_JAVA_DEBUG !== '0'
|
||||||
|
&& process.env.CLI_SERVER_JAVA_DEBUG?.toLocaleLowerCase() !== 'false';
|
||||||
|
}
|
||||||
|
|
||||||
export class CliVersionConstraint {
|
export class CliVersionConstraint {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1005,6 +1246,9 @@ export class CliVersionConstraint {
|
|||||||
*/
|
*/
|
||||||
public static CLI_VERSION_WITH_LANGUAGE = new SemVer('2.4.1');
|
public static CLI_VERSION_WITH_LANGUAGE = new SemVer('2.4.1');
|
||||||
|
|
||||||
|
|
||||||
|
public static CLI_VERSION_WITH_NONDESTURCTIVE_UPGRADES = new SemVer('2.4.2');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* CLI version where `codeql resolve upgrades` supports
|
* CLI version where `codeql resolve upgrades` supports
|
||||||
* the `--allow-downgrades` flag
|
* the `--allow-downgrades` flag
|
||||||
@@ -1021,6 +1265,81 @@ export class CliVersionConstraint {
|
|||||||
*/
|
*/
|
||||||
public static CLI_VERSION_WITH_DB_REGISTRATION = new SemVer('2.4.1');
|
public static CLI_VERSION_WITH_DB_REGISTRATION = new SemVer('2.4.1');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI version where the `--allow-library-packs` option to `codeql resolve queries` was
|
||||||
|
* introduced.
|
||||||
|
*/
|
||||||
|
public static CLI_VERSION_WITH_ALLOW_LIBRARY_PACKS_IN_RESOLVE_QUERIES = new SemVer('2.6.1');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI version where the `database unbundle` subcommand was introduced.
|
||||||
|
*/
|
||||||
|
public static CLI_VERSION_WITH_DATABASE_UNBUNDLE = new SemVer('2.6.0');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI version where the `--no-precompile` option for pack creation was introduced.
|
||||||
|
*/
|
||||||
|
public static CLI_VERSION_WITH_NO_PRECOMPILE = new SemVer('2.7.1');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI version where remote queries (variant analysis) are supported.
|
||||||
|
*/
|
||||||
|
public static CLI_VERSION_REMOTE_QUERIES = new SemVer('2.6.3');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI version where building QLX packs for remote queries is supported.
|
||||||
|
* (The options were _accepted_ by a few earlier versions, but only from
|
||||||
|
* 2.11.3 will it actually use the existing compilation cache correctly).
|
||||||
|
*/
|
||||||
|
public static CLI_VERSION_QLX_REMOTE = new SemVer('2.11.3');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI version where the `resolve ml-models` subcommand was introduced.
|
||||||
|
*/
|
||||||
|
public static CLI_VERSION_WITH_RESOLVE_ML_MODELS = new SemVer('2.7.3');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI version where the `resolve ml-models` subcommand was enhanced to work with packaging.
|
||||||
|
*/
|
||||||
|
public static CLI_VERSION_WITH_PRECISE_RESOLVE_ML_MODELS = new SemVer('2.10.0');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI version where the `--old-eval-stats` option to the query server was introduced.
|
||||||
|
*/
|
||||||
|
public static CLI_VERSION_WITH_OLD_EVAL_STATS = new SemVer('2.7.4');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI version where packaging was introduced.
|
||||||
|
*/
|
||||||
|
public static CLI_VERSION_WITH_PACKAGING = new SemVer('2.6.0');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI version where the `--evaluator-log` and related options to the query server were introduced,
|
||||||
|
* on a per-query server basis.
|
||||||
|
*/
|
||||||
|
public static CLI_VERSION_WITH_STRUCTURED_EVAL_LOG = new SemVer('2.8.2');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI version that supports rotating structured logs to produce one per query.
|
||||||
|
*
|
||||||
|
* Note that 2.8.4 supports generating the evaluation logs and summaries,
|
||||||
|
* but 2.9.0 includes a new option to produce the end-of-query summary logs to
|
||||||
|
* the query server console. For simplicity we gate all features behind 2.9.0,
|
||||||
|
* but if a user is tied to the 2.8 release, we can enable evaluator logs
|
||||||
|
* and summaries for them.
|
||||||
|
*/
|
||||||
|
public static CLI_VERSION_WITH_PER_QUERY_EVAL_LOG = new SemVer('2.9.0');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI version that supports the `--sourcemap` option for log generation.
|
||||||
|
*/
|
||||||
|
public static CLI_VERSION_WITH_SOURCEMAP = new SemVer('2.10.3');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CLI version that supports the new query server.
|
||||||
|
*/
|
||||||
|
public static CLI_VERSION_WITH_NEW_QUERY_SERVER = new SemVer('2.11.1');
|
||||||
|
|
||||||
constructor(private readonly cli: CodeQLCliServer) {
|
constructor(private readonly cli: CodeQLCliServer) {
|
||||||
/**/
|
/**/
|
||||||
}
|
}
|
||||||
@@ -1037,6 +1356,10 @@ export class CliVersionConstraint {
|
|||||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_LANGUAGE);
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_LANGUAGE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async supportsNonDestructiveUpgrades() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_NONDESTURCTIVE_UPGRADES);
|
||||||
|
}
|
||||||
|
|
||||||
public async supportsDowngrades() {
|
public async supportsDowngrades() {
|
||||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_DOWNGRADES);
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_DOWNGRADES);
|
||||||
}
|
}
|
||||||
@@ -1045,7 +1368,66 @@ export class CliVersionConstraint {
|
|||||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_RESOLVE_QLREF);
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_RESOLVE_QLREF);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async supportsAllowLibraryPacksInResolveQueries() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_ALLOW_LIBRARY_PACKS_IN_RESOLVE_QUERIES);
|
||||||
|
}
|
||||||
|
|
||||||
async supportsDatabaseRegistration() {
|
async supportsDatabaseRegistration() {
|
||||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_DB_REGISTRATION);
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_DB_REGISTRATION);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async supportsDatabaseUnbundle() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_DATABASE_UNBUNDLE);
|
||||||
|
}
|
||||||
|
|
||||||
|
async supportsNoPrecompile() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_NO_PRECOMPILE);
|
||||||
|
}
|
||||||
|
|
||||||
|
async supportsRemoteQueries() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_REMOTE_QUERIES);
|
||||||
|
}
|
||||||
|
|
||||||
|
async supportsQlxRemote() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_QLX_REMOTE);
|
||||||
|
}
|
||||||
|
|
||||||
|
async supportsResolveMlModels() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_RESOLVE_ML_MODELS);
|
||||||
|
}
|
||||||
|
|
||||||
|
async supportsPreciseResolveMlModels() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PRECISE_RESOLVE_ML_MODELS);
|
||||||
|
}
|
||||||
|
|
||||||
|
async supportsOldEvalStats() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_OLD_EVAL_STATS);
|
||||||
|
}
|
||||||
|
|
||||||
|
async supportsPackaging() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PACKAGING);
|
||||||
|
}
|
||||||
|
|
||||||
|
async supportsStructuredEvalLog() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_STRUCTURED_EVAL_LOG);
|
||||||
|
}
|
||||||
|
|
||||||
|
async supportsPerQueryEvalLog() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PER_QUERY_EVAL_LOG);
|
||||||
|
}
|
||||||
|
|
||||||
|
async supportsSourceMap() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_SOURCEMAP);
|
||||||
|
}
|
||||||
|
|
||||||
|
async supportsNewQueryServer() {
|
||||||
|
// TODO while under development, users _must_ opt-in to the new query server
|
||||||
|
// by setting the `codeql.canaryQueryServer` setting to `true`.
|
||||||
|
return allowCanaryQueryServer() &&
|
||||||
|
this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_NEW_QUERY_SERVER);
|
||||||
|
}
|
||||||
|
|
||||||
|
async supportsNewQueryServerForTests() {
|
||||||
|
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_NEW_QUERY_SERVER);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import {
|
|||||||
} from 'vscode';
|
} from 'vscode';
|
||||||
import { showAndLogErrorMessage, showAndLogWarningMessage } from './helpers';
|
import { showAndLogErrorMessage, showAndLogWarningMessage } from './helpers';
|
||||||
import { logger } from './logging';
|
import { logger } from './logging';
|
||||||
|
import { getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||||
import { telemetryListener } from './telemetry';
|
import { telemetryListener } from './telemetry';
|
||||||
|
|
||||||
export class UserCancellationException extends Error {
|
export class UserCancellationException extends Error {
|
||||||
@@ -121,21 +122,22 @@ export function commandRunner(
|
|||||||
try {
|
try {
|
||||||
return await task(...args);
|
return await task(...args);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
error = e;
|
const errorMessage = `${getErrorMessage(e) || e} (${commandId})`;
|
||||||
const errorMessage = `${e.message || e} (${commandId})`;
|
error = e instanceof Error ? e : new Error(errorMessage);
|
||||||
|
const errorStack = getErrorStack(e);
|
||||||
if (e instanceof UserCancellationException) {
|
if (e instanceof UserCancellationException) {
|
||||||
// User has cancelled this action manually
|
// User has cancelled this action manually
|
||||||
if (e.silent) {
|
if (e.silent) {
|
||||||
logger.log(errorMessage);
|
void logger.log(errorMessage);
|
||||||
} else {
|
} else {
|
||||||
showAndLogWarningMessage(errorMessage);
|
void showAndLogWarningMessage(errorMessage);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Include the full stack in the error log only.
|
// Include the full stack in the error log only.
|
||||||
const fullMessage = e.stack
|
const fullMessage = errorStack
|
||||||
? `${errorMessage}\n${e.stack}`
|
? `${errorMessage}\n${errorStack}`
|
||||||
: errorMessage;
|
: errorMessage;
|
||||||
showAndLogErrorMessage(errorMessage, {
|
void showAndLogErrorMessage(errorMessage, {
|
||||||
fullMessage
|
fullMessage
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -160,7 +162,8 @@ export function commandRunner(
|
|||||||
export function commandRunnerWithProgress<R>(
|
export function commandRunnerWithProgress<R>(
|
||||||
commandId: string,
|
commandId: string,
|
||||||
task: ProgressTask<R>,
|
task: ProgressTask<R>,
|
||||||
progressOptions: Partial<ProgressOptions>
|
progressOptions: Partial<ProgressOptions>,
|
||||||
|
outputLogger = logger
|
||||||
): Disposable {
|
): Disposable {
|
||||||
return commands.registerCommand(commandId, async (...args: any[]) => {
|
return commands.registerCommand(commandId, async (...args: any[]) => {
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
@@ -172,21 +175,23 @@ export function commandRunnerWithProgress<R>(
|
|||||||
try {
|
try {
|
||||||
return await withProgress(progressOptionsWithDefaults, task, ...args);
|
return await withProgress(progressOptionsWithDefaults, task, ...args);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
error = e;
|
const errorMessage = `${getErrorMessage(e) || e} (${commandId})`;
|
||||||
const errorMessage = `${e.message || e} (${commandId})`;
|
error = e instanceof Error ? e : new Error(errorMessage);
|
||||||
|
const errorStack = getErrorStack(e);
|
||||||
if (e instanceof UserCancellationException) {
|
if (e instanceof UserCancellationException) {
|
||||||
// User has cancelled this action manually
|
// User has cancelled this action manually
|
||||||
if (e.silent) {
|
if (e.silent) {
|
||||||
logger.log(errorMessage);
|
void outputLogger.log(errorMessage);
|
||||||
} else {
|
} else {
|
||||||
showAndLogWarningMessage(errorMessage);
|
void showAndLogWarningMessage(errorMessage, { outputLogger });
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Include the full stack in the error log only.
|
// Include the full stack in the error log only.
|
||||||
const fullMessage = e.stack
|
const fullMessage = errorStack
|
||||||
? `${errorMessage}\n${e.stack}`
|
? `${errorMessage}\n${errorStack}`
|
||||||
: errorMessage;
|
: errorMessage;
|
||||||
showAndLogErrorMessage(errorMessage, {
|
void showAndLogErrorMessage(errorMessage, {
|
||||||
|
outputLogger,
|
||||||
fullMessage
|
fullMessage
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,15 +1,8 @@
|
|||||||
import { DisposableObject } from '../pure/disposable-object';
|
|
||||||
import {
|
import {
|
||||||
WebviewPanel,
|
|
||||||
ExtensionContext,
|
ExtensionContext,
|
||||||
window as Window,
|
|
||||||
ViewColumn,
|
ViewColumn,
|
||||||
Uri,
|
|
||||||
} from 'vscode';
|
} from 'vscode';
|
||||||
import * as path from 'path';
|
|
||||||
|
|
||||||
import { tmpDir } from '../run-queries';
|
|
||||||
import { CompletedQuery } from '../query-results';
|
|
||||||
import {
|
import {
|
||||||
FromCompareViewMessage,
|
FromCompareViewMessage,
|
||||||
ToCompareViewMessage,
|
ToCompareViewMessage,
|
||||||
@@ -18,36 +11,38 @@ import {
|
|||||||
import { Logger } from '../logging';
|
import { Logger } from '../logging';
|
||||||
import { CodeQLCliServer } from '../cli';
|
import { CodeQLCliServer } from '../cli';
|
||||||
import { DatabaseManager } from '../databases';
|
import { DatabaseManager } from '../databases';
|
||||||
import { getHtmlForWebview, jumpToLocation } from '../interface-utils';
|
import { jumpToLocation } from '../interface-utils';
|
||||||
import { transformBqrsResultSet, RawResultSet, BQRSInfo } from '../pure/bqrs-cli-types';
|
import { transformBqrsResultSet, RawResultSet, BQRSInfo } from '../pure/bqrs-cli-types';
|
||||||
import resultsDiff from './resultsDiff';
|
import resultsDiff from './resultsDiff';
|
||||||
|
import { CompletedLocalQueryInfo } from '../query-results';
|
||||||
|
import { getErrorMessage } from '../pure/helpers-pure';
|
||||||
|
import { HistoryItemLabelProvider } from '../history-item-label-provider';
|
||||||
|
import { AbstractWebview, WebviewPanelConfig } from '../abstract-webview';
|
||||||
|
|
||||||
interface ComparePair {
|
interface ComparePair {
|
||||||
from: CompletedQuery;
|
from: CompletedLocalQueryInfo;
|
||||||
to: CompletedQuery;
|
to: CompletedLocalQueryInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
export class CompareInterfaceManager extends DisposableObject {
|
export class CompareView extends AbstractWebview<ToCompareViewMessage, FromCompareViewMessage> {
|
||||||
private comparePair: ComparePair | undefined;
|
private comparePair: ComparePair | undefined;
|
||||||
private panel: WebviewPanel | undefined;
|
|
||||||
private panelLoaded = false;
|
|
||||||
private panelLoadedCallBacks: (() => void)[] = [];
|
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private ctx: ExtensionContext,
|
ctx: ExtensionContext,
|
||||||
private databaseManager: DatabaseManager,
|
private databaseManager: DatabaseManager,
|
||||||
private cliServer: CodeQLCliServer,
|
private cliServer: CodeQLCliServer,
|
||||||
private logger: Logger,
|
private logger: Logger,
|
||||||
|
private labelProvider: HistoryItemLabelProvider,
|
||||||
private showQueryResultsCallback: (
|
private showQueryResultsCallback: (
|
||||||
item: CompletedQuery
|
item: CompletedLocalQueryInfo
|
||||||
) => Promise<void>
|
) => Promise<void>
|
||||||
) {
|
) {
|
||||||
super();
|
super(ctx);
|
||||||
}
|
}
|
||||||
|
|
||||||
async showResults(
|
async showResults(
|
||||||
from: CompletedQuery,
|
from: CompletedLocalQueryInfo,
|
||||||
to: CompletedQuery,
|
to: CompletedLocalQueryInfo,
|
||||||
selectedResultSetName?: string
|
selectedResultSetName?: string
|
||||||
) {
|
) {
|
||||||
this.comparePair = { from, to };
|
this.comparePair = { from, to };
|
||||||
@@ -70,7 +65,7 @@ export class CompareInterfaceManager extends DisposableObject {
|
|||||||
try {
|
try {
|
||||||
rows = this.compareResults(fromResultSet, toResultSet);
|
rows = this.compareResults(fromResultSet, toResultSet);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
message = e.message;
|
message = getErrorMessage(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.postMessage({
|
await this.postMessage({
|
||||||
@@ -80,18 +75,14 @@ export class CompareInterfaceManager extends DisposableObject {
|
|||||||
// since we split the description into several rows
|
// since we split the description into several rows
|
||||||
// only run interpolation if the label is user-defined
|
// only run interpolation if the label is user-defined
|
||||||
// otherwise we will wind up with duplicated rows
|
// otherwise we will wind up with duplicated rows
|
||||||
name: from.options.label
|
name: this.labelProvider.getShortLabel(from),
|
||||||
? from.interpolate(from.getLabel())
|
status: from.completedQuery.statusString,
|
||||||
: from.queryName,
|
time: from.startTime,
|
||||||
status: from.statusString,
|
|
||||||
time: from.time,
|
|
||||||
},
|
},
|
||||||
toQuery: {
|
toQuery: {
|
||||||
name: to.options.label
|
name: this.labelProvider.getShortLabel(to),
|
||||||
? to.interpolate(to.getLabel())
|
status: to.completedQuery.statusString,
|
||||||
: to.queryName,
|
time: to.startTime,
|
||||||
status: to.statusString,
|
|
||||||
time: to.time,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
columns: fromResultSet.schema.columns,
|
columns: fromResultSet.schema.columns,
|
||||||
@@ -99,81 +90,33 @@ export class CompareInterfaceManager extends DisposableObject {
|
|||||||
currentResultSetName: currentResultSetName,
|
currentResultSetName: currentResultSetName,
|
||||||
rows,
|
rows,
|
||||||
message,
|
message,
|
||||||
datebaseUri: to.database.databaseUri,
|
databaseUri: to.initialInfo.databaseInfo.databaseUri,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
getPanel(): WebviewPanel {
|
protected getPanelConfig(): WebviewPanelConfig {
|
||||||
if (this.panel == undefined) {
|
return {
|
||||||
const { ctx } = this;
|
viewId: 'compareView',
|
||||||
const panel = (this.panel = Window.createWebviewPanel(
|
title: 'Compare CodeQL Query Results',
|
||||||
'compareView',
|
viewColumn: ViewColumn.Active,
|
||||||
'Compare CodeQL Query Results',
|
preserveFocus: true,
|
||||||
{ viewColumn: ViewColumn.Active, preserveFocus: true },
|
view: 'compare',
|
||||||
{
|
};
|
||||||
enableScripts: true,
|
|
||||||
enableFindWidget: true,
|
|
||||||
retainContextWhenHidden: true,
|
|
||||||
localResourceRoots: [
|
|
||||||
Uri.file(tmpDir.name),
|
|
||||||
Uri.file(path.join(this.ctx.extensionPath, 'out')),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
));
|
|
||||||
this.panel.onDidDispose(
|
|
||||||
() => {
|
|
||||||
this.panel = undefined;
|
|
||||||
this.comparePair = undefined;
|
|
||||||
},
|
|
||||||
null,
|
|
||||||
ctx.subscriptions
|
|
||||||
);
|
|
||||||
|
|
||||||
const scriptPathOnDisk = Uri.file(
|
|
||||||
ctx.asAbsolutePath('out/compareView.js')
|
|
||||||
);
|
|
||||||
|
|
||||||
const stylesheetPathOnDisk = Uri.file(
|
|
||||||
ctx.asAbsolutePath('out/resultsView.css')
|
|
||||||
);
|
|
||||||
|
|
||||||
panel.webview.html = getHtmlForWebview(
|
|
||||||
panel.webview,
|
|
||||||
scriptPathOnDisk,
|
|
||||||
stylesheetPathOnDisk
|
|
||||||
);
|
|
||||||
panel.webview.onDidReceiveMessage(
|
|
||||||
async (e) => this.handleMsgFromView(e),
|
|
||||||
undefined,
|
|
||||||
ctx.subscriptions
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return this.panel;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private waitForPanelLoaded(): Promise<void> {
|
protected onPanelDispose(): void {
|
||||||
return new Promise((resolve) => {
|
this.comparePair = undefined;
|
||||||
if (this.panelLoaded) {
|
|
||||||
resolve();
|
|
||||||
} else {
|
|
||||||
this.panelLoadedCallBacks.push(resolve);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async handleMsgFromView(
|
protected async onMessage(msg: FromCompareViewMessage): Promise<void> {
|
||||||
msg: FromCompareViewMessage
|
|
||||||
): Promise<void> {
|
|
||||||
switch (msg.t) {
|
switch (msg.t) {
|
||||||
case 'compareViewLoaded':
|
case 'viewLoaded':
|
||||||
this.panelLoaded = true;
|
this.onWebViewLoaded();
|
||||||
this.panelLoadedCallBacks.forEach((cb) => cb());
|
|
||||||
this.panelLoadedCallBacks = [];
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case 'changeCompare':
|
case 'changeCompare':
|
||||||
this.changeTable(msg.newResultSetName);
|
await this.changeTable(msg.newResultSetName);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case 'viewSourceFile':
|
case 'viewSourceFile':
|
||||||
@@ -186,20 +129,16 @@ export class CompareInterfaceManager extends DisposableObject {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private postMessage(msg: ToCompareViewMessage): Thenable<boolean> {
|
|
||||||
return this.getPanel().webview.postMessage(msg);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async findCommonResultSetNames(
|
private async findCommonResultSetNames(
|
||||||
from: CompletedQuery,
|
from: CompletedLocalQueryInfo,
|
||||||
to: CompletedQuery,
|
to: CompletedLocalQueryInfo,
|
||||||
selectedResultSetName: string | undefined
|
selectedResultSetName: string | undefined
|
||||||
): Promise<[string[], string, RawResultSet, RawResultSet]> {
|
): Promise<[string[], string, RawResultSet, RawResultSet]> {
|
||||||
const fromSchemas = await this.cliServer.bqrsInfo(
|
const fromSchemas = await this.cliServer.bqrsInfo(
|
||||||
from.query.resultsPaths.resultsPath
|
from.completedQuery.query.resultsPaths.resultsPath
|
||||||
);
|
);
|
||||||
const toSchemas = await this.cliServer.bqrsInfo(
|
const toSchemas = await this.cliServer.bqrsInfo(
|
||||||
to.query.resultsPaths.resultsPath
|
to.completedQuery.query.resultsPaths.resultsPath
|
||||||
);
|
);
|
||||||
const fromSchemaNames = fromSchemas['result-sets'].map(
|
const fromSchemaNames = fromSchemas['result-sets'].map(
|
||||||
(schema) => schema.name
|
(schema) => schema.name
|
||||||
@@ -215,12 +154,12 @@ export class CompareInterfaceManager extends DisposableObject {
|
|||||||
const fromResultSet = await this.getResultSet(
|
const fromResultSet = await this.getResultSet(
|
||||||
fromSchemas,
|
fromSchemas,
|
||||||
currentResultSetName,
|
currentResultSetName,
|
||||||
from.query.resultsPaths.resultsPath
|
from.completedQuery.query.resultsPaths.resultsPath
|
||||||
);
|
);
|
||||||
const toResultSet = await this.getResultSet(
|
const toResultSet = await this.getResultSet(
|
||||||
toSchemas,
|
toSchemas,
|
||||||
currentResultSetName,
|
currentResultSetName,
|
||||||
to.query.resultsPaths.resultsPath
|
to.completedQuery.query.resultsPaths.resultsPath
|
||||||
);
|
);
|
||||||
return [
|
return [
|
||||||
commonResultSetNames,
|
commonResultSetNames,
|
||||||
@@ -267,11 +206,11 @@ export class CompareInterfaceManager extends DisposableObject {
|
|||||||
return resultsDiff(fromResults, toResults);
|
return resultsDiff(fromResults, toResults);
|
||||||
}
|
}
|
||||||
|
|
||||||
private openQuery(kind: 'from' | 'to') {
|
private async openQuery(kind: 'from' | 'to') {
|
||||||
const toOpen =
|
const toOpen =
|
||||||
kind === 'from' ? this.comparePair?.from : this.comparePair?.to;
|
kind === 'from' ? this.comparePair?.from : this.comparePair?.to;
|
||||||
if (toOpen) {
|
if (toOpen) {
|
||||||
this.showQueryResultsCallback(toOpen);
|
await this.showQueryResultsCallback(toOpen);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2,15 +2,27 @@ import { DisposableObject } from './pure/disposable-object';
|
|||||||
import { workspace, Event, EventEmitter, ConfigurationChangeEvent, ConfigurationTarget } from 'vscode';
|
import { workspace, Event, EventEmitter, ConfigurationChangeEvent, ConfigurationTarget } from 'vscode';
|
||||||
import { DistributionManager } from './distribution';
|
import { DistributionManager } from './distribution';
|
||||||
import { logger } from './logging';
|
import { logger } from './logging';
|
||||||
|
import { ONE_DAY_IN_MS } from './pure/time';
|
||||||
|
|
||||||
|
export const ALL_SETTINGS: Setting[] = [];
|
||||||
|
|
||||||
/** Helper class to look up a labelled (and possibly nested) setting. */
|
/** Helper class to look up a labelled (and possibly nested) setting. */
|
||||||
export class Setting {
|
export class Setting {
|
||||||
name: string;
|
name: string;
|
||||||
parent?: Setting;
|
parent?: Setting;
|
||||||
|
private _hasChildren = false;
|
||||||
|
|
||||||
constructor(name: string, parent?: Setting) {
|
constructor(name: string, parent?: Setting) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
this.parent = parent;
|
this.parent = parent;
|
||||||
|
if (parent !== undefined) {
|
||||||
|
parent._hasChildren = true;
|
||||||
|
}
|
||||||
|
ALL_SETTINGS.push(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
get hasChildren() {
|
||||||
|
return this._hasChildren;
|
||||||
}
|
}
|
||||||
|
|
||||||
get qualifiedName(): string {
|
get qualifiedName(): string {
|
||||||
@@ -35,6 +47,18 @@ export class Setting {
|
|||||||
return workspace.getConfiguration(this.parent.qualifiedName).update(this.name, value, target);
|
return workspace.getConfiguration(this.parent.qualifiedName).update(this.name, value, target);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inspect<T>(): InspectionResult<T> | undefined {
|
||||||
|
if (this.parent === undefined) {
|
||||||
|
throw new Error('Cannot update the value of a root setting.');
|
||||||
|
}
|
||||||
|
return workspace.getConfiguration(this.parent.qualifiedName).inspect(this.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface InspectionResult<T> {
|
||||||
|
globalValue?: T;
|
||||||
|
workspaceValue?: T,
|
||||||
|
workspaceFolderValue?: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
const ROOT_SETTING = new Setting('codeQL');
|
const ROOT_SETTING = new Setting('codeQL');
|
||||||
@@ -43,6 +67,7 @@ const ROOT_SETTING = new Setting('codeQL');
|
|||||||
const TELEMETRY_SETTING = new Setting('telemetry', ROOT_SETTING);
|
const TELEMETRY_SETTING = new Setting('telemetry', ROOT_SETTING);
|
||||||
const AST_VIEWER_SETTING = new Setting('astViewer', ROOT_SETTING);
|
const AST_VIEWER_SETTING = new Setting('astViewer', ROOT_SETTING);
|
||||||
const GLOBAL_TELEMETRY_SETTING = new Setting('telemetry');
|
const GLOBAL_TELEMETRY_SETTING = new Setting('telemetry');
|
||||||
|
const LOG_INSIGHTS_SETTING = new Setting('logInsights', ROOT_SETTING);
|
||||||
|
|
||||||
export const LOG_TELEMETRY = new Setting('logTelemetry', TELEMETRY_SETTING);
|
export const LOG_TELEMETRY = new Setting('logTelemetry', TELEMETRY_SETTING);
|
||||||
export const ENABLE_TELEMETRY = new Setting('enableTelemetry', TELEMETRY_SETTING);
|
export const ENABLE_TELEMETRY = new Setting('enableTelemetry', TELEMETRY_SETTING);
|
||||||
@@ -54,8 +79,11 @@ const DISTRIBUTION_SETTING = new Setting('cli', ROOT_SETTING);
|
|||||||
export const CUSTOM_CODEQL_PATH_SETTING = new Setting('executablePath', DISTRIBUTION_SETTING);
|
export const CUSTOM_CODEQL_PATH_SETTING = new Setting('executablePath', DISTRIBUTION_SETTING);
|
||||||
const INCLUDE_PRERELEASE_SETTING = new Setting('includePrerelease', DISTRIBUTION_SETTING);
|
const INCLUDE_PRERELEASE_SETTING = new Setting('includePrerelease', DISTRIBUTION_SETTING);
|
||||||
const PERSONAL_ACCESS_TOKEN_SETTING = new Setting('personalAccessToken', DISTRIBUTION_SETTING);
|
const PERSONAL_ACCESS_TOKEN_SETTING = new Setting('personalAccessToken', DISTRIBUTION_SETTING);
|
||||||
|
|
||||||
|
// Query History configuration
|
||||||
const QUERY_HISTORY_SETTING = new Setting('queryHistory', ROOT_SETTING);
|
const QUERY_HISTORY_SETTING = new Setting('queryHistory', ROOT_SETTING);
|
||||||
const QUERY_HISTORY_FORMAT_SETTING = new Setting('format', QUERY_HISTORY_SETTING);
|
const QUERY_HISTORY_FORMAT_SETTING = new Setting('format', QUERY_HISTORY_SETTING);
|
||||||
|
const QUERY_HISTORY_TTL = new Setting('ttl', QUERY_HISTORY_SETTING);
|
||||||
|
|
||||||
/** When these settings change, the distribution should be updated. */
|
/** When these settings change, the distribution should be updated. */
|
||||||
const DISTRIBUTION_CHANGE_SETTINGS = [CUSTOM_CODEQL_PATH_SETTING, INCLUDE_PRERELEASE_SETTING, PERSONAL_ACCESS_TOKEN_SETTING];
|
const DISTRIBUTION_CHANGE_SETTINGS = [CUSTOM_CODEQL_PATH_SETTING, INCLUDE_PRERELEASE_SETTING, PERSONAL_ACCESS_TOKEN_SETTING];
|
||||||
@@ -71,7 +99,6 @@ export interface DistributionConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Query server configuration
|
// Query server configuration
|
||||||
|
|
||||||
const RUNNING_QUERIES_SETTING = new Setting('runningQueries', ROOT_SETTING);
|
const RUNNING_QUERIES_SETTING = new Setting('runningQueries', ROOT_SETTING);
|
||||||
const NUMBER_OF_THREADS_SETTING = new Setting('numberOfThreads', RUNNING_QUERIES_SETTING);
|
const NUMBER_OF_THREADS_SETTING = new Setting('numberOfThreads', RUNNING_QUERIES_SETTING);
|
||||||
const SAVE_CACHE_SETTING = new Setting('saveCache', RUNNING_QUERIES_SETTING);
|
const SAVE_CACHE_SETTING = new Setting('saveCache', RUNNING_QUERIES_SETTING);
|
||||||
@@ -79,6 +106,7 @@ const CACHE_SIZE_SETTING = new Setting('cacheSize', RUNNING_QUERIES_SETTING);
|
|||||||
const TIMEOUT_SETTING = new Setting('timeout', RUNNING_QUERIES_SETTING);
|
const TIMEOUT_SETTING = new Setting('timeout', RUNNING_QUERIES_SETTING);
|
||||||
const MEMORY_SETTING = new Setting('memory', RUNNING_QUERIES_SETTING);
|
const MEMORY_SETTING = new Setting('memory', RUNNING_QUERIES_SETTING);
|
||||||
const DEBUG_SETTING = new Setting('debug', RUNNING_QUERIES_SETTING);
|
const DEBUG_SETTING = new Setting('debug', RUNNING_QUERIES_SETTING);
|
||||||
|
const MAX_PATHS = new Setting('maxPaths', RUNNING_QUERIES_SETTING);
|
||||||
const RUNNING_TESTS_SETTING = new Setting('runningTests', ROOT_SETTING);
|
const RUNNING_TESTS_SETTING = new Setting('runningTests', ROOT_SETTING);
|
||||||
const RESULTS_DISPLAY_SETTING = new Setting('resultsDisplay', ROOT_SETTING);
|
const RESULTS_DISPLAY_SETTING = new Setting('resultsDisplay', ROOT_SETTING);
|
||||||
|
|
||||||
@@ -87,9 +115,13 @@ export const NUMBER_OF_TEST_THREADS_SETTING = new Setting('numberOfThreads', RUN
|
|||||||
export const MAX_QUERIES = new Setting('maxQueries', RUNNING_QUERIES_SETTING);
|
export const MAX_QUERIES = new Setting('maxQueries', RUNNING_QUERIES_SETTING);
|
||||||
export const AUTOSAVE_SETTING = new Setting('autoSave', RUNNING_QUERIES_SETTING);
|
export const AUTOSAVE_SETTING = new Setting('autoSave', RUNNING_QUERIES_SETTING);
|
||||||
export const PAGE_SIZE = new Setting('pageSize', RESULTS_DISPLAY_SETTING);
|
export const PAGE_SIZE = new Setting('pageSize', RESULTS_DISPLAY_SETTING);
|
||||||
|
const CUSTOM_LOG_DIRECTORY_SETTING = new Setting('customLogDirectory', RUNNING_QUERIES_SETTING);
|
||||||
|
|
||||||
/** When these settings change, the running query server should be restarted. */
|
/** When these settings change, the running query server should be restarted. */
|
||||||
const QUERY_SERVER_RESTARTING_SETTINGS = [NUMBER_OF_THREADS_SETTING, SAVE_CACHE_SETTING, CACHE_SIZE_SETTING, MEMORY_SETTING, DEBUG_SETTING];
|
const QUERY_SERVER_RESTARTING_SETTINGS = [
|
||||||
|
NUMBER_OF_THREADS_SETTING, SAVE_CACHE_SETTING, CACHE_SIZE_SETTING, MEMORY_SETTING,
|
||||||
|
DEBUG_SETTING, CUSTOM_LOG_DIRECTORY_SETTING,
|
||||||
|
];
|
||||||
|
|
||||||
export interface QueryServerConfig {
|
export interface QueryServerConfig {
|
||||||
codeQlPath: string;
|
codeQlPath: string;
|
||||||
@@ -99,23 +131,26 @@ export interface QueryServerConfig {
|
|||||||
cacheSize: number;
|
cacheSize: number;
|
||||||
queryMemoryMb?: number;
|
queryMemoryMb?: number;
|
||||||
timeoutSecs: number;
|
timeoutSecs: number;
|
||||||
|
customLogDirectory?: string;
|
||||||
onDidChangeConfiguration?: Event<void>;
|
onDidChangeConfiguration?: Event<void>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** When these settings change, the query history should be refreshed. */
|
/** When these settings change, the query history should be refreshed. */
|
||||||
const QUERY_HISTORY_SETTINGS = [QUERY_HISTORY_FORMAT_SETTING];
|
const QUERY_HISTORY_SETTINGS = [QUERY_HISTORY_FORMAT_SETTING, QUERY_HISTORY_TTL];
|
||||||
|
|
||||||
export interface QueryHistoryConfig {
|
export interface QueryHistoryConfig {
|
||||||
format: string;
|
format: string;
|
||||||
|
ttlInMillis: number;
|
||||||
onDidChangeConfiguration: Event<void>;
|
onDidChangeConfiguration: Event<void>;
|
||||||
}
|
}
|
||||||
|
|
||||||
const CLI_SETTINGS = [ADDITIONAL_TEST_ARGUMENTS_SETTING, NUMBER_OF_TEST_THREADS_SETTING, NUMBER_OF_THREADS_SETTING];
|
const CLI_SETTINGS = [ADDITIONAL_TEST_ARGUMENTS_SETTING, NUMBER_OF_TEST_THREADS_SETTING, NUMBER_OF_THREADS_SETTING, MAX_PATHS];
|
||||||
|
|
||||||
export interface CliConfig {
|
export interface CliConfig {
|
||||||
additionalTestArguments: string[];
|
additionalTestArguments: string[];
|
||||||
numberTestThreads: number;
|
numberTestThreads: number;
|
||||||
numberThreads: number;
|
numberThreads: number;
|
||||||
|
maxPaths: number;
|
||||||
onDidChangeConfiguration?: Event<void>;
|
onDidChangeConfiguration?: Event<void>;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -145,7 +180,7 @@ export abstract class ConfigListener extends DisposableObject {
|
|||||||
|
|
||||||
protected abstract handleDidChangeConfiguration(e: ConfigurationChangeEvent): void;
|
protected abstract handleDidChangeConfiguration(e: ConfigurationChangeEvent): void;
|
||||||
private updateConfiguration(): void {
|
private updateConfiguration(): void {
|
||||||
this._onDidChangeConfiguration.fire();
|
this._onDidChangeConfiguration.fire(undefined);
|
||||||
}
|
}
|
||||||
|
|
||||||
public get onDidChangeConfiguration(): Event<void> {
|
public get onDidChangeConfiguration(): Event<void> {
|
||||||
@@ -187,7 +222,7 @@ export class QueryServerConfigListener extends ConfigListener implements QuerySe
|
|||||||
config.push(distributionManager.onDidChangeDistribution(async () => {
|
config.push(distributionManager.onDidChangeDistribution(async () => {
|
||||||
const codeQlPath = await distributionManager.getCodeQlPathWithoutVersionCheck();
|
const codeQlPath = await distributionManager.getCodeQlPathWithoutVersionCheck();
|
||||||
config._codeQlPath = codeQlPath!;
|
config._codeQlPath = codeQlPath!;
|
||||||
config._onDidChangeConfiguration.fire();
|
config._onDidChangeConfiguration.fire(undefined);
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
return config;
|
return config;
|
||||||
@@ -197,6 +232,10 @@ export class QueryServerConfigListener extends ConfigListener implements QuerySe
|
|||||||
return this._codeQlPath;
|
return this._codeQlPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public get customLogDirectory(): string | undefined {
|
||||||
|
return CUSTOM_LOG_DIRECTORY_SETTING.getValue<string>() || undefined;
|
||||||
|
}
|
||||||
|
|
||||||
public get numThreads(): number {
|
public get numThreads(): number {
|
||||||
return NUMBER_OF_THREADS_SETTING.getValue<number>();
|
return NUMBER_OF_THREADS_SETTING.getValue<number>();
|
||||||
}
|
}
|
||||||
@@ -220,7 +259,7 @@ export class QueryServerConfigListener extends ConfigListener implements QuerySe
|
|||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
if (memory == 0 || typeof (memory) !== 'number') {
|
if (memory == 0 || typeof (memory) !== 'number') {
|
||||||
logger.log(`Ignoring value '${memory}' for setting ${MEMORY_SETTING.qualifiedName}`);
|
void logger.log(`Ignoring value '${memory}' for setting ${MEMORY_SETTING.qualifiedName}`);
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
return memory;
|
return memory;
|
||||||
@@ -243,6 +282,13 @@ export class QueryHistoryConfigListener extends ConfigListener implements QueryH
|
|||||||
public get format(): string {
|
public get format(): string {
|
||||||
return QUERY_HISTORY_FORMAT_SETTING.getValue<string>();
|
return QUERY_HISTORY_FORMAT_SETTING.getValue<string>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The configuration value is in days, but return the value in milliseconds to make it easier to use.
|
||||||
|
*/
|
||||||
|
public get ttlInMillis(): number {
|
||||||
|
return (QUERY_HISTORY_TTL.getValue<number>() || 30) * ONE_DAY_IN_MS;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class CliConfigListener extends ConfigListener implements CliConfig {
|
export class CliConfigListener extends ConfigListener implements CliConfig {
|
||||||
@@ -258,11 +304,25 @@ export class CliConfigListener extends ConfigListener implements CliConfig {
|
|||||||
return NUMBER_OF_THREADS_SETTING.getValue<number>();
|
return NUMBER_OF_THREADS_SETTING.getValue<number>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public get maxPaths(): number {
|
||||||
|
return MAX_PATHS.getValue<number>();
|
||||||
|
}
|
||||||
|
|
||||||
protected handleDidChangeConfiguration(e: ConfigurationChangeEvent): void {
|
protected handleDidChangeConfiguration(e: ConfigurationChangeEvent): void {
|
||||||
this.handleDidChangeConfigurationForRelevantSettings(CLI_SETTINGS, e);
|
this.handleDidChangeConfigurationForRelevantSettings(CLI_SETTINGS, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether to enable CodeLens for the 'Quick Evaluation' command.
|
||||||
|
*/
|
||||||
|
const QUICK_EVAL_CODELENS_SETTING = new Setting('quickEvalCodelens', RUNNING_QUERIES_SETTING);
|
||||||
|
|
||||||
|
export function isQuickEvalCodelensEnabled() {
|
||||||
|
return QUICK_EVAL_CODELENS_SETTING.getValue<boolean>();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// Enable experimental features
|
// Enable experimental features
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -281,7 +341,138 @@ export function isCanary() {
|
|||||||
return !!CANARY_FEATURES.getValue<boolean>();
|
return !!CANARY_FEATURES.getValue<boolean>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enables the experimental query server
|
||||||
|
*/
|
||||||
|
export const CANARY_QUERY_SERVER = new Setting('canaryQueryServer', ROOT_SETTING);
|
||||||
|
|
||||||
|
|
||||||
|
export function allowCanaryQueryServer() {
|
||||||
|
return !!CANARY_QUERY_SERVER.getValue<boolean>();
|
||||||
|
}
|
||||||
|
|
||||||
|
export const JOIN_ORDER_WARNING_THRESHOLD = new Setting('joinOrderWarningThreshold', LOG_INSIGHTS_SETTING);
|
||||||
|
|
||||||
|
export function joinOrderWarningThreshold(): number {
|
||||||
|
return JOIN_ORDER_WARNING_THRESHOLD.getValue<number>();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Avoids caching in the AST viewer if the user is also a canary user.
|
* Avoids caching in the AST viewer if the user is also a canary user.
|
||||||
*/
|
*/
|
||||||
export const NO_CACHE_AST_VIEWER = new Setting('disableCache', AST_VIEWER_SETTING);
|
export const NO_CACHE_AST_VIEWER = new Setting('disableCache', AST_VIEWER_SETTING);
|
||||||
|
|
||||||
|
// Settings for variant analysis
|
||||||
|
const REMOTE_QUERIES_SETTING = new Setting('variantAnalysis', ROOT_SETTING);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lists of GitHub repositories that you want to query remotely via the "Run Variant Analysis" command.
|
||||||
|
* Note: This command is only available for internal users.
|
||||||
|
*
|
||||||
|
* This setting should be a JSON object where each key is a user-specified name (string),
|
||||||
|
* and the value is an array of GitHub repositories (of the form `<owner>/<repo>`).
|
||||||
|
*/
|
||||||
|
const REMOTE_REPO_LISTS = new Setting('repositoryLists', REMOTE_QUERIES_SETTING);
|
||||||
|
|
||||||
|
export function getRemoteRepositoryLists(): Record<string, string[]> | undefined {
|
||||||
|
return REMOTE_REPO_LISTS.getValue<Record<string, string[]>>() || undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function setRemoteRepositoryLists(lists: Record<string, string[]> | undefined) {
|
||||||
|
await REMOTE_REPO_LISTS.updateValue(lists, ConfigurationTarget.Global);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Path to a file that contains lists of GitHub repositories that you want to query remotely via
|
||||||
|
* the "Run Variant Analysis" command.
|
||||||
|
* Note: This command is only available for internal users.
|
||||||
|
*
|
||||||
|
* This setting should be a path to a JSON file that contains a JSON object where each key is a
|
||||||
|
* user-specified name (string), and the value is an array of GitHub repositories
|
||||||
|
* (of the form `<owner>/<repo>`).
|
||||||
|
*/
|
||||||
|
const REPO_LISTS_PATH = new Setting('repositoryListsPath', REMOTE_QUERIES_SETTING);
|
||||||
|
|
||||||
|
export function getRemoteRepositoryListsPath(): string | undefined {
|
||||||
|
return REPO_LISTS_PATH.getValue<string>() || undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The name of the "controller" repository that you want to use with the "Run Variant Analysis" command.
|
||||||
|
* Note: This command is only available for internal users.
|
||||||
|
*
|
||||||
|
* This setting should be a GitHub repository of the form `<owner>/<repo>`.
|
||||||
|
*/
|
||||||
|
const REMOTE_CONTROLLER_REPO = new Setting('controllerRepo', REMOTE_QUERIES_SETTING);
|
||||||
|
|
||||||
|
export function getRemoteControllerRepo(): string | undefined {
|
||||||
|
return REMOTE_CONTROLLER_REPO.getValue<string>() || undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function setRemoteControllerRepo(repo: string | undefined) {
|
||||||
|
await REMOTE_CONTROLLER_REPO.updateValue(repo, ConfigurationTarget.Global);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The branch of "github/codeql-variant-analysis-action" to use with the "Run Variant Analysis" command.
|
||||||
|
* Default value is "main".
|
||||||
|
* Note: This command is only available for internal users.
|
||||||
|
*/
|
||||||
|
const ACTION_BRANCH = new Setting('actionBranch', REMOTE_QUERIES_SETTING);
|
||||||
|
|
||||||
|
export function getActionBranch(): string {
|
||||||
|
return ACTION_BRANCH.getValue<string>() || 'main';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isIntegrationTestMode() {
|
||||||
|
return process.env.INTEGRATION_TEST_MODE === 'true';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A flag indicating whether to enable the experimental "live results" feature
|
||||||
|
* for multi-repo variant analyses.
|
||||||
|
*/
|
||||||
|
const LIVE_RESULTS = new Setting('liveResults', REMOTE_QUERIES_SETTING);
|
||||||
|
|
||||||
|
export function isVariantAnalysisLiveResultsEnabled(): boolean {
|
||||||
|
return !!LIVE_RESULTS.getValue<boolean>();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Settings for mocking the GitHub API.
|
||||||
|
const MOCK_GH_API_SERVER = new Setting('mockGitHubApiServer', ROOT_SETTING);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A flag indicating whether to enable a mock GitHub API server.
|
||||||
|
*/
|
||||||
|
const MOCK_GH_API_SERVER_ENABLED = new Setting('enabled', MOCK_GH_API_SERVER);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A path to a directory containing test scenarios. If this setting is not set,
|
||||||
|
* the mock server will a default location for test scenarios in dev mode, and
|
||||||
|
* will show a menu to select a directory in production mode.
|
||||||
|
*/
|
||||||
|
const MOCK_GH_API_SERVER_SCENARIOS_PATH = new Setting('scenariosPath', MOCK_GH_API_SERVER);
|
||||||
|
|
||||||
|
export interface MockGitHubApiConfig {
|
||||||
|
mockServerEnabled: boolean;
|
||||||
|
mockScenariosPath: string;
|
||||||
|
onDidChangeConfiguration: Event<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class MockGitHubApiConfigListener extends ConfigListener implements MockGitHubApiConfig {
|
||||||
|
protected handleDidChangeConfiguration(e: ConfigurationChangeEvent): void {
|
||||||
|
this.handleDidChangeConfigurationForRelevantSettings([MOCK_GH_API_SERVER], e);
|
||||||
|
}
|
||||||
|
|
||||||
|
public get mockServerEnabled(): boolean {
|
||||||
|
return !!MOCK_GH_API_SERVER_ENABLED.getValue<boolean>();
|
||||||
|
}
|
||||||
|
|
||||||
|
public get mockScenariosPath(): string {
|
||||||
|
return MOCK_GH_API_SERVER_SCENARIOS_PATH.getValue<string>();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getMockGitHubApiServerScenariosPath(): string | undefined {
|
||||||
|
return MOCK_GH_API_SERVER_SCENARIOS_PATH.getValue<string>();
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
import { QueryWithResults } from '../run-queries';
|
|
||||||
import { CodeQLCliServer } from '../cli';
|
import { CodeQLCliServer } from '../cli';
|
||||||
import { DecodedBqrsChunk, BqrsId, EntityValue } from '../pure/bqrs-cli-types';
|
import { DecodedBqrsChunk, BqrsId, EntityValue } from '../pure/bqrs-cli-types';
|
||||||
import { DatabaseItem } from '../databases';
|
import { DatabaseItem } from '../databases';
|
||||||
import { ChildAstItem, AstItem } from '../astViewer';
|
import { ChildAstItem, AstItem } from '../astViewer';
|
||||||
import fileRangeFromURI from './fileRangeFromURI';
|
import fileRangeFromURI from './fileRangeFromURI';
|
||||||
|
import { Uri } from 'vscode';
|
||||||
|
import { QueryWithResults } from '../run-queries-shared';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A class that wraps a tree of QL results from a query that
|
* A class that wraps a tree of QL results from a query that
|
||||||
@@ -17,7 +18,7 @@ export default class AstBuilder {
|
|||||||
queryResults: QueryWithResults,
|
queryResults: QueryWithResults,
|
||||||
private cli: CodeQLCliServer,
|
private cli: CodeQLCliServer,
|
||||||
public db: DatabaseItem,
|
public db: DatabaseItem,
|
||||||
public fileName: string
|
public fileName: Uri
|
||||||
) {
|
) {
|
||||||
this.bqrsPath = queryResults.query.resultsPaths.resultsPath;
|
this.bqrsPath = queryResults.query.resultsPaths.resultsPath;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ export enum KeyType {
|
|||||||
DefinitionQuery = 'DefinitionQuery',
|
DefinitionQuery = 'DefinitionQuery',
|
||||||
ReferenceQuery = 'ReferenceQuery',
|
ReferenceQuery = 'ReferenceQuery',
|
||||||
PrintAstQuery = 'PrintAstQuery',
|
PrintAstQuery = 'PrintAstQuery',
|
||||||
|
PrintCfgQuery = 'PrintCfgQuery',
|
||||||
}
|
}
|
||||||
|
|
||||||
export function tagOfKeyType(keyType: KeyType): string {
|
export function tagOfKeyType(keyType: KeyType): string {
|
||||||
@@ -12,6 +13,8 @@ export function tagOfKeyType(keyType: KeyType): string {
|
|||||||
return 'ide-contextual-queries/local-references';
|
return 'ide-contextual-queries/local-references';
|
||||||
case KeyType.PrintAstQuery:
|
case KeyType.PrintAstQuery:
|
||||||
return 'ide-contextual-queries/print-ast';
|
return 'ide-contextual-queries/print-ast';
|
||||||
|
case KeyType.PrintCfgQuery:
|
||||||
|
return 'ide-contextual-queries/print-cfg';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -23,6 +26,8 @@ export function nameOfKeyType(keyType: KeyType): string {
|
|||||||
return 'references';
|
return 'references';
|
||||||
case KeyType.PrintAstQuery:
|
case KeyType.PrintAstQuery:
|
||||||
return 'print AST';
|
return 'print AST';
|
||||||
|
case KeyType.PrintCfgQuery:
|
||||||
|
return 'print CFG';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -32,6 +37,7 @@ export function kindOfKeyType(keyType: KeyType): string {
|
|||||||
case KeyType.ReferenceQuery:
|
case KeyType.ReferenceQuery:
|
||||||
return 'definitions';
|
return 'definitions';
|
||||||
case KeyType.PrintAstQuery:
|
case KeyType.PrintAstQuery:
|
||||||
|
case KeyType.PrintCfgQuery:
|
||||||
return 'graph';
|
return 'graph';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,22 +1,20 @@
|
|||||||
import * as vscode from 'vscode';
|
|
||||||
|
|
||||||
import { decodeSourceArchiveUri, encodeArchiveBasePath } from '../archive-filesystem-provider';
|
import { decodeSourceArchiveUri, encodeArchiveBasePath } from '../archive-filesystem-provider';
|
||||||
import { ColumnKindCode, EntityValue, getResultSetSchema, ResultSetSchema } from '../pure/bqrs-cli-types';
|
import { ColumnKindCode, EntityValue, getResultSetSchema, ResultSetSchema } from '../pure/bqrs-cli-types';
|
||||||
import { CodeQLCliServer } from '../cli';
|
import { CodeQLCliServer } from '../cli';
|
||||||
import { DatabaseManager, DatabaseItem } from '../databases';
|
import { DatabaseManager, DatabaseItem } from '../databases';
|
||||||
import fileRangeFromURI from './fileRangeFromURI';
|
import fileRangeFromURI from './fileRangeFromURI';
|
||||||
import * as messages from '../pure/messages';
|
|
||||||
import { QueryServerClient } from '../queryserver-client';
|
|
||||||
import { QueryWithResults, compileAndRunQueryAgainstDatabase } from '../run-queries';
|
|
||||||
import { ProgressCallback } from '../commandRunner';
|
import { ProgressCallback } from '../commandRunner';
|
||||||
import { KeyType } from './keyType';
|
import { KeyType } from './keyType';
|
||||||
import { qlpackOfDatabase, resolveQueries } from './queryResolver';
|
import { qlpackOfDatabase, resolveQueries } from './queryResolver';
|
||||||
|
import { CancellationToken, LocationLink, Uri } from 'vscode';
|
||||||
|
import { createInitialQueryInfo, QueryWithResults } from '../run-queries-shared';
|
||||||
|
import { QueryRunner } from '../queryRunner';
|
||||||
|
|
||||||
const SELECT_QUERY_NAME = '#select';
|
export const SELECT_QUERY_NAME = '#select';
|
||||||
export const TEMPLATE_NAME = 'selectedSourceFile';
|
export const TEMPLATE_NAME = 'selectedSourceFile';
|
||||||
|
|
||||||
export interface FullLocationLink extends vscode.LocationLink {
|
export interface FullLocationLink extends LocationLink {
|
||||||
originUri: vscode.Uri;
|
originUri: Uri;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -29,21 +27,23 @@ export interface FullLocationLink extends vscode.LocationLink {
|
|||||||
* @param dbm The database manager
|
* @param dbm The database manager
|
||||||
* @param uriString The selected source file and location
|
* @param uriString The selected source file and location
|
||||||
* @param keyType The contextual query type to run
|
* @param keyType The contextual query type to run
|
||||||
|
* @param queryStorageDir The directory to store the query results
|
||||||
* @param progress A progress callback
|
* @param progress A progress callback
|
||||||
* @param token A CancellationToken
|
* @param token A CancellationToken
|
||||||
* @param filter A function that will filter extraneous results
|
* @param filter A function that will filter extraneous results
|
||||||
*/
|
*/
|
||||||
export async function getLocationsForUriString(
|
export async function getLocationsForUriString(
|
||||||
cli: CodeQLCliServer,
|
cli: CodeQLCliServer,
|
||||||
qs: QueryServerClient,
|
qs: QueryRunner,
|
||||||
dbm: DatabaseManager,
|
dbm: DatabaseManager,
|
||||||
uriString: string,
|
uriString: string,
|
||||||
keyType: KeyType,
|
keyType: KeyType,
|
||||||
|
queryStorageDir: string,
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: vscode.CancellationToken,
|
token: CancellationToken,
|
||||||
filter: (src: string, dest: string) => boolean
|
filter: (src: string, dest: string) => boolean
|
||||||
): Promise<FullLocationLink[]> {
|
): Promise<FullLocationLink[]> {
|
||||||
const uri = decodeSourceArchiveUri(vscode.Uri.parse(uriString, true));
|
const uri = decodeSourceArchiveUri(Uri.parse(uriString, true));
|
||||||
const sourceArchiveUri = encodeArchiveBasePath(uri.sourceArchiveZipPath);
|
const sourceArchiveUri = encodeArchiveBasePath(uri.sourceArchiveZipPath);
|
||||||
|
|
||||||
const db = dbm.findDatabaseItemBySourceArchive(sourceArchiveUri);
|
const db = dbm.findDatabaseItemBySourceArchive(sourceArchiveUri);
|
||||||
@@ -56,18 +56,16 @@ export async function getLocationsForUriString(
|
|||||||
|
|
||||||
const links: FullLocationLink[] = [];
|
const links: FullLocationLink[] = [];
|
||||||
for (const query of await resolveQueries(cli, qlpack, keyType)) {
|
for (const query of await resolveQueries(cli, qlpack, keyType)) {
|
||||||
const results = await compileAndRunQueryAgainstDatabase(
|
const initialInfo = await createInitialQueryInfo(
|
||||||
cli,
|
Uri.file(query),
|
||||||
qs,
|
{
|
||||||
db,
|
name: db.name,
|
||||||
false,
|
databaseUri: db.databaseUri.toString(),
|
||||||
vscode.Uri.file(query),
|
},
|
||||||
progress,
|
false
|
||||||
token,
|
|
||||||
templates
|
|
||||||
);
|
);
|
||||||
|
const results = await qs.compileAndRunQueryAgainstDatabase(db, initialInfo, queryStorageDir, progress, token, templates);
|
||||||
if (results.result.resultType == messages.QueryResultType.SUCCESS) {
|
if (results.successful) {
|
||||||
links.push(...await getLinksFromResults(results, cli, db, filter));
|
links.push(...await getLinksFromResults(results, cli, db, filter));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -104,15 +102,9 @@ async function getLinksFromResults(
|
|||||||
return localLinks;
|
return localLinks;
|
||||||
}
|
}
|
||||||
|
|
||||||
function createTemplates(path: string): messages.TemplateDefinitions {
|
function createTemplates(path: string): Record<string, string> {
|
||||||
return {
|
return {
|
||||||
[TEMPLATE_NAME]: {
|
[TEMPLATE_NAME]: path
|
||||||
values: {
|
|
||||||
tuples: [[{
|
|
||||||
stringValue: path
|
|
||||||
}]]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,8 +11,9 @@ import {
|
|||||||
} from './keyType';
|
} from './keyType';
|
||||||
import { CodeQLCliServer } from '../cli';
|
import { CodeQLCliServer } from '../cli';
|
||||||
import { DatabaseItem } from '../databases';
|
import { DatabaseItem } from '../databases';
|
||||||
|
import { QlPacksForLanguage } from '../helpers';
|
||||||
|
|
||||||
export async function qlpackOfDatabase(cli: CodeQLCliServer, db: DatabaseItem): Promise<string> {
|
export async function qlpackOfDatabase(cli: CodeQLCliServer, db: DatabaseItem): Promise<QlPacksForLanguage> {
|
||||||
if (db.contents === undefined) {
|
if (db.contents === undefined) {
|
||||||
throw new Error('Database is invalid and cannot infer QLPack.');
|
throw new Error('Database is invalid and cannot infer QLPack.');
|
||||||
}
|
}
|
||||||
@@ -21,28 +22,85 @@ export async function qlpackOfDatabase(cli: CodeQLCliServer, db: DatabaseItem):
|
|||||||
return await helpers.getQlPackForDbscheme(cli, dbscheme);
|
return await helpers.getQlPackForDbscheme(cli, dbscheme);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
export async function resolveQueries(cli: CodeQLCliServer, qlpack: string, keyType: KeyType): Promise<string[]> {
|
* Finds the contextual queries with the specified key in a list of CodeQL packs.
|
||||||
|
*
|
||||||
|
* @param cli The CLI instance to use.
|
||||||
|
* @param qlpacks The list of packs to search.
|
||||||
|
* @param keyType The contextual query key of the query to search for.
|
||||||
|
* @returns The found queries from the first pack in which any matching queries were found.
|
||||||
|
*/
|
||||||
|
async function resolveQueriesFromPacks(cli: CodeQLCliServer, qlpacks: string[], keyType: KeyType): Promise<string[]> {
|
||||||
const suiteFile = (await tmp.file({
|
const suiteFile = (await tmp.file({
|
||||||
postfix: '.qls'
|
postfix: '.qls'
|
||||||
})).path;
|
})).path;
|
||||||
const suiteYaml = {
|
const suiteYaml = [];
|
||||||
qlpack,
|
for (const qlpack of qlpacks) {
|
||||||
include: {
|
suiteYaml.push({
|
||||||
kind: kindOfKeyType(keyType),
|
from: qlpack,
|
||||||
'tags contain': tagOfKeyType(keyType)
|
queries: '.',
|
||||||
}
|
include: {
|
||||||
};
|
kind: kindOfKeyType(keyType),
|
||||||
await fs.writeFile(suiteFile, yaml.safeDump(suiteYaml), 'utf8');
|
'tags contain': tagOfKeyType(keyType)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
await fs.writeFile(suiteFile, yaml.dump(suiteYaml), 'utf8');
|
||||||
|
|
||||||
const queries = await cli.resolveQueriesInSuite(suiteFile, helpers.getOnDiskWorkspaceFolders());
|
const queries = await cli.resolveQueriesInSuite(suiteFile, helpers.getOnDiskWorkspaceFolders());
|
||||||
if (queries.length === 0) {
|
|
||||||
helpers.showAndLogErrorMessage(
|
|
||||||
`No ${nameOfKeyType(keyType)} queries (tagged "${tagOfKeyType(keyType)}") could be found in the current library path. \
|
|
||||||
Try upgrading the CodeQL libraries. If that doesn't work, then ${nameOfKeyType(keyType)} queries are not yet available \
|
|
||||||
for this language.`
|
|
||||||
);
|
|
||||||
throw new Error(`Couldn't find any queries tagged ${tagOfKeyType(keyType)} for qlpack ${qlpack}`);
|
|
||||||
}
|
|
||||||
return queries;
|
return queries;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function resolveQueries(cli: CodeQLCliServer, qlpacks: QlPacksForLanguage, keyType: KeyType): Promise<string[]> {
|
||||||
|
const cliCanHandleLibraryPack = await cli.cliConstraints.supportsAllowLibraryPacksInResolveQueries();
|
||||||
|
const packsToSearch: string[] = [];
|
||||||
|
let blameCli: boolean;
|
||||||
|
|
||||||
|
if (cliCanHandleLibraryPack) {
|
||||||
|
// The CLI can handle both library packs and query packs, so search both packs in order.
|
||||||
|
packsToSearch.push(qlpacks.dbschemePack);
|
||||||
|
if (qlpacks.queryPack !== undefined) {
|
||||||
|
packsToSearch.push(qlpacks.queryPack);
|
||||||
|
}
|
||||||
|
// If we don't find the query, it's because it's not there, not because the CLI was unable to
|
||||||
|
// search the pack.
|
||||||
|
blameCli = false;
|
||||||
|
} else {
|
||||||
|
// Older CLIs can't handle `codeql resolve queries` with a suite that references a library pack.
|
||||||
|
if (qlpacks.dbschemePackIsLibraryPack) {
|
||||||
|
if (qlpacks.queryPack !== undefined) {
|
||||||
|
// Just search the query pack, because some older library/query releases still had the
|
||||||
|
// contextual queries in the query pack.
|
||||||
|
packsToSearch.push(qlpacks.queryPack);
|
||||||
|
}
|
||||||
|
// If we don't find it, it's because the CLI was unable to search the library pack that
|
||||||
|
// actually contains the query. Blame any failure on the CLI, not the packs.
|
||||||
|
blameCli = true;
|
||||||
|
} else {
|
||||||
|
// We have an old CLI, but the dbscheme pack is old enough that it's still a unified pack with
|
||||||
|
// both libraries and queries. Just search that pack.
|
||||||
|
packsToSearch.push(qlpacks.dbschemePack);
|
||||||
|
// Any CLI should be able to search the single query pack, so if we don't find it, it's
|
||||||
|
// because the language doesn't support it.
|
||||||
|
blameCli = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const queries = await resolveQueriesFromPacks(cli, packsToSearch, keyType);
|
||||||
|
if (queries.length > 0) {
|
||||||
|
return queries;
|
||||||
|
}
|
||||||
|
|
||||||
|
// No queries found. Determine the correct error message for the various scenarios.
|
||||||
|
const errorMessage = blameCli ?
|
||||||
|
`Your current version of the CodeQL CLI, '${(await cli.getVersion()).version}', \
|
||||||
|
is unable to use contextual queries from recent versions of the standard CodeQL libraries. \
|
||||||
|
Please upgrade to the latest version of the CodeQL CLI.`
|
||||||
|
:
|
||||||
|
`No ${nameOfKeyType(keyType)} queries (tagged "${tagOfKeyType(keyType)}") could be found in the current library path. \
|
||||||
|
Try upgrading the CodeQL libraries. If that doesn't work, then ${nameOfKeyType(keyType)} queries are not yet available \
|
||||||
|
for this language.`;
|
||||||
|
|
||||||
|
void helpers.showAndLogErrorMessage(errorMessage);
|
||||||
|
throw new Error(`Couldn't find any queries tagged ${tagOfKeyType(keyType)} in any of the following packs: ${packsToSearch.join(', ')}.`);
|
||||||
|
}
|
||||||
|
|||||||
@@ -16,9 +16,6 @@ import { CodeQLCliServer } from '../cli';
|
|||||||
import { DatabaseManager } from '../databases';
|
import { DatabaseManager } from '../databases';
|
||||||
import { CachedOperation } from '../helpers';
|
import { CachedOperation } from '../helpers';
|
||||||
import { ProgressCallback, withProgress } from '../commandRunner';
|
import { ProgressCallback, withProgress } from '../commandRunner';
|
||||||
import * as messages from '../pure/messages';
|
|
||||||
import { QueryServerClient } from '../queryserver-client';
|
|
||||||
import { compileAndRunQueryAgainstDatabase, QueryWithResults } from '../run-queries';
|
|
||||||
import AstBuilder from './astBuilder';
|
import AstBuilder from './astBuilder';
|
||||||
import {
|
import {
|
||||||
KeyType,
|
KeyType,
|
||||||
@@ -26,6 +23,8 @@ import {
|
|||||||
import { FullLocationLink, getLocationsForUriString, TEMPLATE_NAME } from './locationFinder';
|
import { FullLocationLink, getLocationsForUriString, TEMPLATE_NAME } from './locationFinder';
|
||||||
import { qlpackOfDatabase, resolveQueries } from './queryResolver';
|
import { qlpackOfDatabase, resolveQueries } from './queryResolver';
|
||||||
import { isCanary, NO_CACHE_AST_VIEWER } from '../config';
|
import { isCanary, NO_CACHE_AST_VIEWER } from '../config';
|
||||||
|
import { createInitialQueryInfo, QueryWithResults } from '../run-queries-shared';
|
||||||
|
import { QueryRunner } from '../queryRunner';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Run templated CodeQL queries to find definitions and references in
|
* Run templated CodeQL queries to find definitions and references in
|
||||||
@@ -39,8 +38,9 @@ export class TemplateQueryDefinitionProvider implements DefinitionProvider {
|
|||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private cli: CodeQLCliServer,
|
private cli: CodeQLCliServer,
|
||||||
private qs: QueryServerClient,
|
private qs: QueryRunner,
|
||||||
private dbm: DatabaseManager,
|
private dbm: DatabaseManager,
|
||||||
|
private queryStorageDir: string,
|
||||||
) {
|
) {
|
||||||
this.cache = new CachedOperation<LocationLink[]>(this.getDefinitions.bind(this));
|
this.cache = new CachedOperation<LocationLink[]>(this.getDefinitions.bind(this));
|
||||||
}
|
}
|
||||||
@@ -68,6 +68,7 @@ export class TemplateQueryDefinitionProvider implements DefinitionProvider {
|
|||||||
this.dbm,
|
this.dbm,
|
||||||
uriString,
|
uriString,
|
||||||
KeyType.DefinitionQuery,
|
KeyType.DefinitionQuery,
|
||||||
|
this.queryStorageDir,
|
||||||
progress,
|
progress,
|
||||||
token,
|
token,
|
||||||
(src, _dest) => src === uriString
|
(src, _dest) => src === uriString
|
||||||
@@ -81,8 +82,9 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
|
|||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private cli: CodeQLCliServer,
|
private cli: CodeQLCliServer,
|
||||||
private qs: QueryServerClient,
|
private qs: QueryRunner,
|
||||||
private dbm: DatabaseManager,
|
private dbm: DatabaseManager,
|
||||||
|
private queryStorageDir: string,
|
||||||
) {
|
) {
|
||||||
this.cache = new CachedOperation<FullLocationLink[]>(this.getReferences.bind(this));
|
this.cache = new CachedOperation<FullLocationLink[]>(this.getReferences.bind(this));
|
||||||
}
|
}
|
||||||
@@ -115,6 +117,7 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
|
|||||||
this.dbm,
|
this.dbm,
|
||||||
uriString,
|
uriString,
|
||||||
KeyType.DefinitionQuery,
|
KeyType.DefinitionQuery,
|
||||||
|
this.queryStorageDir,
|
||||||
progress,
|
progress,
|
||||||
token,
|
token,
|
||||||
(src, _dest) => src === uriString
|
(src, _dest) => src === uriString
|
||||||
@@ -123,33 +126,39 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type QueryWithDb = {
|
||||||
|
query: QueryWithResults,
|
||||||
|
dbUri: Uri
|
||||||
|
};
|
||||||
|
|
||||||
export class TemplatePrintAstProvider {
|
export class TemplatePrintAstProvider {
|
||||||
private cache: CachedOperation<QueryWithResults>;
|
private cache: CachedOperation<QueryWithDb>;
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private cli: CodeQLCliServer,
|
private cli: CodeQLCliServer,
|
||||||
private qs: QueryServerClient,
|
private qs: QueryRunner,
|
||||||
private dbm: DatabaseManager,
|
private dbm: DatabaseManager,
|
||||||
|
private queryStorageDir: string,
|
||||||
) {
|
) {
|
||||||
this.cache = new CachedOperation<QueryWithResults>(this.getAst.bind(this));
|
this.cache = new CachedOperation<QueryWithDb>(this.getAst.bind(this));
|
||||||
}
|
}
|
||||||
|
|
||||||
async provideAst(
|
async provideAst(
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: CancellationToken,
|
token: CancellationToken,
|
||||||
document?: TextDocument
|
fileUri?: Uri
|
||||||
): Promise<AstBuilder | undefined> {
|
): Promise<AstBuilder | undefined> {
|
||||||
if (!document) {
|
if (!fileUri) {
|
||||||
throw new Error('Cannot view the AST. Please select a valid source file inside a CodeQL database.');
|
throw new Error('Cannot view the AST. Please select a valid source file inside a CodeQL database.');
|
||||||
}
|
}
|
||||||
const queryResults = this.shouldCache()
|
const { query, dbUri } = this.shouldCache()
|
||||||
? await this.cache.get(document.uri.toString(), progress, token)
|
? await this.cache.get(fileUri.toString(), progress, token)
|
||||||
: await this.getAst(document.uri.toString(), progress, token);
|
: await this.getAst(fileUri.toString(), progress, token);
|
||||||
|
|
||||||
return new AstBuilder(
|
return new AstBuilder(
|
||||||
queryResults, this.cli,
|
query, this.cli,
|
||||||
this.dbm.findDatabaseItem(Uri.parse(queryResults.database.databaseUri!, true))!,
|
this.dbm.findDatabaseItem(dbUri)!,
|
||||||
document.fileName
|
fileUri,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -161,7 +170,7 @@ export class TemplatePrintAstProvider {
|
|||||||
uriString: string,
|
uriString: string,
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: CancellationToken
|
token: CancellationToken
|
||||||
): Promise<QueryWithResults> {
|
): Promise<QueryWithDb> {
|
||||||
const uri = Uri.parse(uriString, true);
|
const uri = Uri.parse(uriString, true);
|
||||||
if (uri.scheme !== zipArchiveScheme) {
|
if (uri.scheme !== zipArchiveScheme) {
|
||||||
throw new Error('Cannot view the AST. Please select a valid source file inside a CodeQL database.');
|
throw new Error('Cannot view the AST. Please select a valid source file inside a CodeQL database.');
|
||||||
@@ -175,8 +184,8 @@ export class TemplatePrintAstProvider {
|
|||||||
throw new Error('Can\'t infer database from the provided source.');
|
throw new Error('Can\'t infer database from the provided source.');
|
||||||
}
|
}
|
||||||
|
|
||||||
const qlpack = await qlpackOfDatabase(this.cli, db);
|
const qlpacks = await qlpackOfDatabase(this.cli, db);
|
||||||
const queries = await resolveQueries(this.cli, qlpack, KeyType.PrintAstQuery);
|
const queries = await resolveQueries(this.cli, qlpacks, KeyType.PrintAstQuery);
|
||||||
if (queries.length > 1) {
|
if (queries.length > 1) {
|
||||||
throw new Error('Found multiple Print AST queries. Can\'t continue');
|
throw new Error('Found multiple Print AST queries. Can\'t continue');
|
||||||
}
|
}
|
||||||
@@ -185,25 +194,83 @@ export class TemplatePrintAstProvider {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const query = queries[0];
|
const query = queries[0];
|
||||||
const templates: messages.TemplateDefinitions = {
|
const templates: Record<string, string> = {
|
||||||
[TEMPLATE_NAME]: {
|
[TEMPLATE_NAME]:
|
||||||
values: {
|
zippedArchive.pathWithinSourceArchive
|
||||||
tuples: [[{
|
|
||||||
stringValue: zippedArchive.pathWithinSourceArchive
|
|
||||||
}]]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return await compileAndRunQueryAgainstDatabase(
|
const initialInfo = await createInitialQueryInfo(
|
||||||
this.cli,
|
|
||||||
this.qs,
|
|
||||||
db,
|
|
||||||
false,
|
|
||||||
Uri.file(query),
|
Uri.file(query),
|
||||||
progress,
|
{
|
||||||
token,
|
name: db.name,
|
||||||
templates
|
databaseUri: db.databaseUri.toString(),
|
||||||
|
},
|
||||||
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
query: await this.qs.compileAndRunQueryAgainstDatabase(
|
||||||
|
db,
|
||||||
|
initialInfo,
|
||||||
|
this.queryStorageDir,
|
||||||
|
progress,
|
||||||
|
token,
|
||||||
|
templates
|
||||||
|
),
|
||||||
|
dbUri: db.databaseUri
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class TemplatePrintCfgProvider {
|
||||||
|
private cache: CachedOperation<[Uri, Record<string, string>] | undefined>;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
private cli: CodeQLCliServer,
|
||||||
|
private dbm: DatabaseManager,
|
||||||
|
) {
|
||||||
|
this.cache = new CachedOperation<[Uri, Record<string, string>] | undefined>(this.getCfgUri.bind(this));
|
||||||
|
}
|
||||||
|
|
||||||
|
async provideCfgUri(document?: TextDocument): Promise<[Uri, Record<string, string>] | undefined> {
|
||||||
|
if (!document) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return await this.cache.get(document.uri.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
private async getCfgUri(uriString: string): Promise<[Uri, Record<string, string>]> {
|
||||||
|
const uri = Uri.parse(uriString, true);
|
||||||
|
if (uri.scheme !== zipArchiveScheme) {
|
||||||
|
throw new Error('CFG Viewing is only available for databases with zipped source archives.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const zippedArchive = decodeSourceArchiveUri(uri);
|
||||||
|
const sourceArchiveUri = encodeArchiveBasePath(zippedArchive.sourceArchiveZipPath);
|
||||||
|
const db = this.dbm.findDatabaseItemBySourceArchive(sourceArchiveUri);
|
||||||
|
|
||||||
|
if (!db) {
|
||||||
|
throw new Error('Can\'t infer database from the provided source.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const qlpack = await qlpackOfDatabase(this.cli, db);
|
||||||
|
if (!qlpack) {
|
||||||
|
throw new Error('Can\'t infer qlpack from database source archive.');
|
||||||
|
}
|
||||||
|
const queries = await resolveQueries(this.cli, qlpack, KeyType.PrintCfgQuery);
|
||||||
|
if (queries.length > 1) {
|
||||||
|
throw new Error(`Found multiple Print CFG queries. Can't continue. Make sure there is exacly one query with the tag ${KeyType.PrintCfgQuery}`);
|
||||||
|
}
|
||||||
|
if (queries.length === 0) {
|
||||||
|
throw new Error(`Did not find any Print CFG queries. Can't continue. Make sure there is exacly one query with the tag ${KeyType.PrintCfgQuery}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const queryUri = Uri.file(queries[0]);
|
||||||
|
|
||||||
|
const templates: Record<string, string> = {
|
||||||
|
[TEMPLATE_NAME]: zippedArchive.pathWithinSourceArchive
|
||||||
|
};
|
||||||
|
|
||||||
|
return [queryUri, templates];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +1,17 @@
|
|||||||
import fetch, { Response } from 'node-fetch';
|
import fetch, { Response } from 'node-fetch';
|
||||||
import * as unzipper from 'unzipper';
|
|
||||||
import { zip } from 'zip-a-folder';
|
import { zip } from 'zip-a-folder';
|
||||||
|
import * as unzipper from 'unzipper';
|
||||||
import {
|
import {
|
||||||
Uri,
|
Uri,
|
||||||
CancellationToken,
|
CancellationToken,
|
||||||
commands,
|
commands,
|
||||||
window,
|
window,
|
||||||
} from 'vscode';
|
} from 'vscode';
|
||||||
|
import { CodeQLCliServer } from './cli';
|
||||||
import * as fs from 'fs-extra';
|
import * as fs from 'fs-extra';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
import * as Octokit from '@octokit/rest';
|
||||||
|
import { retry } from '@octokit/plugin-retry';
|
||||||
|
|
||||||
import { DatabaseManager, DatabaseItem } from './databases';
|
import { DatabaseManager, DatabaseItem } from './databases';
|
||||||
import {
|
import {
|
||||||
@@ -19,7 +22,9 @@ import {
|
|||||||
ProgressCallback,
|
ProgressCallback,
|
||||||
} from './commandRunner';
|
} from './commandRunner';
|
||||||
import { logger } from './logging';
|
import { logger } from './logging';
|
||||||
import { tmpDir } from './run-queries';
|
import { tmpDir } from './helpers';
|
||||||
|
import { Credentials } from './authentication';
|
||||||
|
import { REPO_REGEX, getErrorMessage } from './pure/helpers-pure';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Prompts a user to fetch a database from a remote location. Database is assumed to be an archive file.
|
* Prompts a user to fetch a database from a remote location. Database is assumed to be an archive file.
|
||||||
@@ -32,6 +37,7 @@ export async function promptImportInternetDatabase(
|
|||||||
storagePath: string,
|
storagePath: string,
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: CancellationToken,
|
token: CancellationToken,
|
||||||
|
cli?: CodeQLCliServer
|
||||||
): Promise<DatabaseItem | undefined> {
|
): Promise<DatabaseItem | undefined> {
|
||||||
const databaseUrl = await window.showInputBox({
|
const databaseUrl = await window.showInputBox({
|
||||||
prompt: 'Enter URL of zipfile of database to download',
|
prompt: 'Enter URL of zipfile of database to download',
|
||||||
@@ -44,20 +50,95 @@ export async function promptImportInternetDatabase(
|
|||||||
|
|
||||||
const item = await databaseArchiveFetcher(
|
const item = await databaseArchiveFetcher(
|
||||||
databaseUrl,
|
databaseUrl,
|
||||||
|
{},
|
||||||
databaseManager,
|
databaseManager,
|
||||||
storagePath,
|
storagePath,
|
||||||
|
undefined,
|
||||||
progress,
|
progress,
|
||||||
token
|
token,
|
||||||
|
cli
|
||||||
);
|
);
|
||||||
|
|
||||||
if (item) {
|
if (item) {
|
||||||
commands.executeCommand('codeQLDatabases.focus');
|
await commands.executeCommand('codeQLDatabases.focus');
|
||||||
showAndLogInformationMessage('Database downloaded and imported successfully.');
|
void showAndLogInformationMessage('Database downloaded and imported successfully.');
|
||||||
}
|
}
|
||||||
return item;
|
return item;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prompts a user to fetch a database from GitHub.
|
||||||
|
* User enters a GitHub repository and then the user is asked which language
|
||||||
|
* to download (if there is more than one)
|
||||||
|
*
|
||||||
|
* @param databaseManager the DatabaseManager
|
||||||
|
* @param storagePath where to store the unzipped database.
|
||||||
|
*/
|
||||||
|
export async function promptImportGithubDatabase(
|
||||||
|
databaseManager: DatabaseManager,
|
||||||
|
storagePath: string,
|
||||||
|
credentials: Credentials | undefined,
|
||||||
|
progress: ProgressCallback,
|
||||||
|
token: CancellationToken,
|
||||||
|
cli?: CodeQLCliServer
|
||||||
|
): Promise<DatabaseItem | undefined> {
|
||||||
|
progress({
|
||||||
|
message: 'Choose repository',
|
||||||
|
step: 1,
|
||||||
|
maxStep: 2
|
||||||
|
});
|
||||||
|
const githubRepo = await window.showInputBox({
|
||||||
|
title: 'Enter a GitHub repository URL or "name with owner" (e.g. https://github.com/github/codeql or github/codeql)',
|
||||||
|
placeHolder: 'https://github.com/<owner>/<repo> or <owner>/<repo>',
|
||||||
|
ignoreFocusOut: true,
|
||||||
|
});
|
||||||
|
if (!githubRepo) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!looksLikeGithubRepo(githubRepo)) {
|
||||||
|
throw new Error(`Invalid GitHub repository: ${githubRepo}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const octokit = credentials ? await credentials.getOctokit(true) : new Octokit.Octokit({ retry });
|
||||||
|
|
||||||
|
const result = await convertGithubNwoToDatabaseUrl(githubRepo, octokit, progress);
|
||||||
|
if (!result) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { databaseUrl, name, owner } = result;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The 'token' property of the token object returned by `octokit.auth()`.
|
||||||
|
* The object is undocumented, but looks something like this:
|
||||||
|
* {
|
||||||
|
* token: 'xxxx',
|
||||||
|
* tokenType: 'oauth',
|
||||||
|
* type: 'token',
|
||||||
|
* }
|
||||||
|
* We only need the actual token string.
|
||||||
|
*/
|
||||||
|
const octokitToken = (await octokit.auth() as { token: string })?.token;
|
||||||
|
const item = await databaseArchiveFetcher(
|
||||||
|
databaseUrl,
|
||||||
|
{ 'Accept': 'application/zip', 'Authorization': octokitToken ? `Bearer ${octokitToken}` : '' },
|
||||||
|
databaseManager,
|
||||||
|
storagePath,
|
||||||
|
`${owner}/${name}`,
|
||||||
|
progress,
|
||||||
|
token,
|
||||||
|
cli
|
||||||
|
);
|
||||||
|
if (item) {
|
||||||
|
await commands.executeCommand('codeQLDatabases.focus');
|
||||||
|
void showAndLogInformationMessage('Database downloaded and imported successfully.');
|
||||||
|
return item;
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Prompts a user to fetch a database from lgtm.
|
* Prompts a user to fetch a database from lgtm.
|
||||||
* User enters a project url and then the user is asked which language
|
* User enters a project url and then the user is asked which language
|
||||||
@@ -70,8 +151,14 @@ export async function promptImportLgtmDatabase(
|
|||||||
databaseManager: DatabaseManager,
|
databaseManager: DatabaseManager,
|
||||||
storagePath: string,
|
storagePath: string,
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: CancellationToken
|
token: CancellationToken,
|
||||||
|
cli?: CodeQLCliServer
|
||||||
): Promise<DatabaseItem | undefined> {
|
): Promise<DatabaseItem | undefined> {
|
||||||
|
progress({
|
||||||
|
message: 'Choose project',
|
||||||
|
step: 1,
|
||||||
|
maxStep: 2
|
||||||
|
});
|
||||||
const lgtmUrl = await window.showInputBox({
|
const lgtmUrl = await window.showInputBox({
|
||||||
prompt:
|
prompt:
|
||||||
'Enter the project slug or URL on LGTM (e.g., g/github/codeql or https://lgtm.com/projects/g/github/codeql)',
|
'Enter the project slug or URL on LGTM (e.g., g/github/codeql or https://lgtm.com/projects/g/github/codeql)',
|
||||||
@@ -81,18 +168,21 @@ export async function promptImportLgtmDatabase(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (looksLikeLgtmUrl(lgtmUrl)) {
|
if (looksLikeLgtmUrl(lgtmUrl)) {
|
||||||
const databaseUrl = await convertToDatabaseUrl(lgtmUrl);
|
const databaseUrl = await convertLgtmUrlToDatabaseUrl(lgtmUrl, progress);
|
||||||
if (databaseUrl) {
|
if (databaseUrl) {
|
||||||
const item = await databaseArchiveFetcher(
|
const item = await databaseArchiveFetcher(
|
||||||
databaseUrl,
|
databaseUrl,
|
||||||
|
{},
|
||||||
databaseManager,
|
databaseManager,
|
||||||
storagePath,
|
storagePath,
|
||||||
|
undefined,
|
||||||
progress,
|
progress,
|
||||||
token
|
token,
|
||||||
|
cli
|
||||||
);
|
);
|
||||||
if (item) {
|
if (item) {
|
||||||
commands.executeCommand('codeQLDatabases.focus');
|
await commands.executeCommand('codeQLDatabases.focus');
|
||||||
showAndLogInformationMessage('Database downloaded and imported successfully.');
|
void showAndLogInformationMessage('Database downloaded and imported successfully.');
|
||||||
}
|
}
|
||||||
return item;
|
return item;
|
||||||
}
|
}
|
||||||
@@ -102,6 +192,16 @@ export async function promptImportLgtmDatabase(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function retrieveCanonicalRepoName(lgtmUrl: string) {
|
||||||
|
const givenRepoName = extractProjectSlug(lgtmUrl);
|
||||||
|
const response = await checkForFailingResponse(await fetch(`https://api.github.com/repos/${givenRepoName}`), 'Failed to locate the repository on github');
|
||||||
|
const repo = await response.json();
|
||||||
|
if (!repo || !repo.full_name) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return repo.full_name;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Imports a database from a local archive.
|
* Imports a database from a local archive.
|
||||||
*
|
*
|
||||||
@@ -115,22 +215,26 @@ export async function importArchiveDatabase(
|
|||||||
storagePath: string,
|
storagePath: string,
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: CancellationToken,
|
token: CancellationToken,
|
||||||
|
cli?: CodeQLCliServer,
|
||||||
): Promise<DatabaseItem | undefined> {
|
): Promise<DatabaseItem | undefined> {
|
||||||
try {
|
try {
|
||||||
const item = await databaseArchiveFetcher(
|
const item = await databaseArchiveFetcher(
|
||||||
databaseUrl,
|
databaseUrl,
|
||||||
|
{},
|
||||||
databaseManager,
|
databaseManager,
|
||||||
storagePath,
|
storagePath,
|
||||||
|
undefined,
|
||||||
progress,
|
progress,
|
||||||
token
|
token,
|
||||||
|
cli
|
||||||
);
|
);
|
||||||
if (item) {
|
if (item) {
|
||||||
commands.executeCommand('codeQLDatabases.focus');
|
await commands.executeCommand('codeQLDatabases.focus');
|
||||||
showAndLogInformationMessage('Database unzipped and imported successfully.');
|
void showAndLogInformationMessage('Database unzipped and imported successfully.');
|
||||||
}
|
}
|
||||||
return item;
|
return item;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e.message.includes('unexpected end of file')) {
|
if (getErrorMessage(e).includes('unexpected end of file')) {
|
||||||
throw new Error('Database is corrupt or too large. Try unzipping outside of VS Code and importing the unzipped folder instead.');
|
throw new Error('Database is corrupt or too large. Try unzipping outside of VS Code and importing the unzipped folder instead.');
|
||||||
} else {
|
} else {
|
||||||
// delegate
|
// delegate
|
||||||
@@ -144,17 +248,22 @@ export async function importArchiveDatabase(
|
|||||||
* or in the local filesystem.
|
* or in the local filesystem.
|
||||||
*
|
*
|
||||||
* @param databaseUrl URL from which to grab the database
|
* @param databaseUrl URL from which to grab the database
|
||||||
|
* @param requestHeaders Headers to send with the request
|
||||||
* @param databaseManager the DatabaseManager
|
* @param databaseManager the DatabaseManager
|
||||||
* @param storagePath where to store the unzipped database.
|
* @param storagePath where to store the unzipped database.
|
||||||
|
* @param nameOverride a name for the database that overrides the default
|
||||||
* @param progress callback to send progress messages to
|
* @param progress callback to send progress messages to
|
||||||
* @param token cancellation token
|
* @param token cancellation token
|
||||||
*/
|
*/
|
||||||
async function databaseArchiveFetcher(
|
async function databaseArchiveFetcher(
|
||||||
databaseUrl: string,
|
databaseUrl: string,
|
||||||
|
requestHeaders: { [key: string]: string },
|
||||||
databaseManager: DatabaseManager,
|
databaseManager: DatabaseManager,
|
||||||
storagePath: string,
|
storagePath: string,
|
||||||
|
nameOverride: string | undefined,
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: CancellationToken
|
token: CancellationToken,
|
||||||
|
cli?: CodeQLCliServer,
|
||||||
): Promise<DatabaseItem> {
|
): Promise<DatabaseItem> {
|
||||||
progress({
|
progress({
|
||||||
message: 'Getting database',
|
message: 'Getting database',
|
||||||
@@ -168,9 +277,9 @@ async function databaseArchiveFetcher(
|
|||||||
const unzipPath = await getStorageFolder(storagePath, databaseUrl);
|
const unzipPath = await getStorageFolder(storagePath, databaseUrl);
|
||||||
|
|
||||||
if (isFile(databaseUrl)) {
|
if (isFile(databaseUrl)) {
|
||||||
await readAndUnzip(databaseUrl, unzipPath, progress);
|
await readAndUnzip(databaseUrl, unzipPath, cli, progress);
|
||||||
} else {
|
} else {
|
||||||
await fetchAndUnzip(databaseUrl, unzipPath, progress);
|
await fetchAndUnzip(databaseUrl, requestHeaders, unzipPath, cli, progress);
|
||||||
}
|
}
|
||||||
|
|
||||||
progress({
|
progress({
|
||||||
@@ -193,7 +302,7 @@ async function databaseArchiveFetcher(
|
|||||||
});
|
});
|
||||||
await ensureZippedSourceLocation(dbPath);
|
await ensureZippedSourceLocation(dbPath);
|
||||||
|
|
||||||
const item = await databaseManager.openDatabase(progress, token, Uri.file(dbPath));
|
const item = await databaseManager.openDatabase(progress, token, Uri.file(dbPath), nameOverride);
|
||||||
await databaseManager.setCurrentDatabaseItem(item);
|
await databaseManager.setCurrentDatabaseItem(item);
|
||||||
return item;
|
return item;
|
||||||
} else {
|
} else {
|
||||||
@@ -244,6 +353,7 @@ function validateHttpsUrl(databaseUrl: string) {
|
|||||||
async function readAndUnzip(
|
async function readAndUnzip(
|
||||||
zipUrl: string,
|
zipUrl: string,
|
||||||
unzipPath: string,
|
unzipPath: string,
|
||||||
|
cli?: CodeQLCliServer,
|
||||||
progress?: ProgressCallback
|
progress?: ProgressCallback
|
||||||
) {
|
) {
|
||||||
// TODO: Providing progress as the file is unzipped is currently blocked
|
// TODO: Providing progress as the file is unzipped is currently blocked
|
||||||
@@ -254,16 +364,23 @@ async function readAndUnzip(
|
|||||||
step: 9,
|
step: 9,
|
||||||
message: `Unzipping into ${path.basename(unzipPath)}`
|
message: `Unzipping into ${path.basename(unzipPath)}`
|
||||||
});
|
});
|
||||||
// Must get the zip central directory since streaming the
|
if (cli && await cli.cliConstraints.supportsDatabaseUnbundle()) {
|
||||||
// zip contents may not have correct local file headers.
|
// Use the `database unbundle` command if the installed cli version supports it
|
||||||
// Instead, we can only rely on the central directory.
|
await cli.databaseUnbundle(zipFile, unzipPath);
|
||||||
const directory = await unzipper.Open.file(zipFile);
|
} else {
|
||||||
await directory.extract({ path: unzipPath });
|
// Must get the zip central directory since streaming the
|
||||||
|
// zip contents may not have correct local file headers.
|
||||||
|
// Instead, we can only rely on the central directory.
|
||||||
|
const directory = await unzipper.Open.file(zipFile);
|
||||||
|
await directory.extract({ path: unzipPath });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function fetchAndUnzip(
|
async function fetchAndUnzip(
|
||||||
databaseUrl: string,
|
databaseUrl: string,
|
||||||
|
requestHeaders: { [key: string]: string },
|
||||||
unzipPath: string,
|
unzipPath: string,
|
||||||
|
cli?: CodeQLCliServer,
|
||||||
progress?: ProgressCallback
|
progress?: ProgressCallback
|
||||||
) {
|
) {
|
||||||
// Although it is possible to download and stream directly to an unzipped directory,
|
// Although it is possible to download and stream directly to an unzipped directory,
|
||||||
@@ -280,7 +397,10 @@ async function fetchAndUnzip(
|
|||||||
step: 1,
|
step: 1,
|
||||||
});
|
});
|
||||||
|
|
||||||
const response = await checkForFailingResponse(await fetch(databaseUrl));
|
const response = await checkForFailingResponse(
|
||||||
|
await fetch(databaseUrl, { headers: requestHeaders }),
|
||||||
|
'Error downloading database'
|
||||||
|
);
|
||||||
const archiveFileStream = fs.createWriteStream(archivePath);
|
const archiveFileStream = fs.createWriteStream(archivePath);
|
||||||
|
|
||||||
const contentLength = response.headers.get('content-length');
|
const contentLength = response.headers.get('content-length');
|
||||||
@@ -293,13 +413,13 @@ async function fetchAndUnzip(
|
|||||||
.on('error', reject)
|
.on('error', reject)
|
||||||
);
|
);
|
||||||
|
|
||||||
await readAndUnzip(Uri.file(archivePath).toString(true), unzipPath, progress);
|
await readAndUnzip(Uri.file(archivePath).toString(true), unzipPath, cli, progress);
|
||||||
|
|
||||||
// remove archivePath eagerly since these archives can be large.
|
// remove archivePath eagerly since these archives can be large.
|
||||||
await fs.remove(archivePath);
|
await fs.remove(archivePath);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function checkForFailingResponse(response: Response): Promise<Response | never> {
|
async function checkForFailingResponse(response: Response, errorMessage: string): Promise<Response | never> {
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
@@ -313,7 +433,7 @@ async function checkForFailingResponse(response: Response): Promise<Response | n
|
|||||||
} catch (e) {
|
} catch (e) {
|
||||||
msg = text;
|
msg = text;
|
||||||
}
|
}
|
||||||
throw new Error(`Error downloading database.\n\nReason: ${msg}`);
|
throw new Error(`${errorMessage}.\n\nReason: ${msg}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
function isFile(databaseUrl: string) {
|
function isFile(databaseUrl: string) {
|
||||||
@@ -350,6 +470,88 @@ export async function findDirWithFile(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The URL pattern is https://github.com/{owner}/{name}/{subpages}.
|
||||||
|
*
|
||||||
|
* This function accepts any URL that matches the pattern above. It also accepts just the
|
||||||
|
* name with owner (NWO): `<owner>/<repo>`.
|
||||||
|
*
|
||||||
|
* @param githubRepo The GitHub repository URL or NWO
|
||||||
|
*
|
||||||
|
* @return true if this looks like a valid GitHub repository URL or NWO
|
||||||
|
*/
|
||||||
|
export function looksLikeGithubRepo(
|
||||||
|
githubRepo: string | undefined
|
||||||
|
): githubRepo is string {
|
||||||
|
if (!githubRepo) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (REPO_REGEX.test(githubRepo) || convertGitHubUrlToNwo(githubRepo)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a GitHub repository URL to the corresponding NWO.
|
||||||
|
* @param githubUrl The GitHub repository URL
|
||||||
|
* @return The corresponding NWO, or undefined if the URL is not valid
|
||||||
|
*/
|
||||||
|
function convertGitHubUrlToNwo(githubUrl: string): string | undefined {
|
||||||
|
try {
|
||||||
|
const uri = Uri.parse(githubUrl, true);
|
||||||
|
if (uri.scheme !== 'https') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (uri.authority !== 'github.com' && uri.authority !== 'www.github.com') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const paths = uri.path.split('/').filter((segment: string) => segment);
|
||||||
|
const nwo = `${paths[0]}/${paths[1]}`;
|
||||||
|
if (REPO_REGEX.test(nwo)) {
|
||||||
|
return nwo;
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore the error here, since we catch failures at a higher level.
|
||||||
|
// In particular: returning undefined leads to an error in 'promptImportGithubDatabase'.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function convertGithubNwoToDatabaseUrl(
|
||||||
|
githubRepo: string,
|
||||||
|
octokit: Octokit.Octokit,
|
||||||
|
progress: ProgressCallback): Promise<{
|
||||||
|
databaseUrl: string,
|
||||||
|
owner: string,
|
||||||
|
name: string
|
||||||
|
} | undefined> {
|
||||||
|
try {
|
||||||
|
const nwo = convertGitHubUrlToNwo(githubRepo) || githubRepo;
|
||||||
|
const [owner, repo] = nwo.split('/');
|
||||||
|
|
||||||
|
const response = await octokit.request('GET /repos/:owner/:repo/code-scanning/codeql/databases', { owner, repo });
|
||||||
|
|
||||||
|
const languages = response.data.map((db: any) => db.language);
|
||||||
|
|
||||||
|
const language = await promptForLanguage(languages, progress);
|
||||||
|
if (!language) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
databaseUrl: `https://api.github.com/repos/${owner}/${repo}/code-scanning/codeql/databases/${language}`,
|
||||||
|
owner,
|
||||||
|
name: repo
|
||||||
|
};
|
||||||
|
|
||||||
|
} catch (e) {
|
||||||
|
void logger.log(`Error: ${getErrorMessage(e)}`);
|
||||||
|
throw new Error(`Unable to get database for '${githubRepo}'`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The URL pattern is https://lgtm.com/projects/{provider}/{org}/{name}/{irrelevant-subpages}.
|
* The URL pattern is https://lgtm.com/projects/{provider}/{org}/{name}/{irrelevant-subpages}.
|
||||||
* There are several possibilities for the provider: in addition to GitHub.com (g),
|
* There are several possibilities for the provider: in addition to GitHub.com (g),
|
||||||
@@ -385,7 +587,7 @@ export function looksLikeLgtmUrl(lgtmUrl: string | undefined): lgtmUrl is string
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
const paths = uri.path.split('/').filter((segment) => segment);
|
const paths = uri.path.split('/').filter((segment: string) => segment);
|
||||||
return paths.length >= 4 && paths[0] === 'projects';
|
return paths.length >= 4 && paths[0] === 'projects';
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return false;
|
return false;
|
||||||
@@ -404,24 +606,41 @@ function convertRawLgtmSlug(maybeSlug: string): string | undefined {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function extractProjectSlug(lgtmUrl: string): string | undefined {
|
||||||
|
// Only matches the '/g/' provider (github)
|
||||||
|
const re = new RegExp('https://lgtm.com/projects/g/(.*[^/])');
|
||||||
|
const match = lgtmUrl.match(re);
|
||||||
|
if (!match) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return match[1];
|
||||||
|
}
|
||||||
|
|
||||||
// exported for testing
|
// exported for testing
|
||||||
export async function convertToDatabaseUrl(lgtmUrl: string) {
|
export async function convertLgtmUrlToDatabaseUrl(
|
||||||
|
lgtmUrl: string,
|
||||||
|
progress: ProgressCallback) {
|
||||||
try {
|
try {
|
||||||
lgtmUrl = convertRawLgtmSlug(lgtmUrl) || lgtmUrl;
|
lgtmUrl = convertRawLgtmSlug(lgtmUrl) || lgtmUrl;
|
||||||
|
let projectJson = await downloadLgtmProjectMetadata(lgtmUrl);
|
||||||
const uri = Uri.parse(lgtmUrl, true);
|
|
||||||
const paths = ['api', 'v1.0'].concat(
|
|
||||||
uri.path.split('/').filter((segment) => segment)
|
|
||||||
).slice(0, 6);
|
|
||||||
const projectUrl = `https://lgtm.com/${paths.join('/')}`;
|
|
||||||
const projectResponse = await fetch(projectUrl);
|
|
||||||
const projectJson = await projectResponse.json();
|
|
||||||
|
|
||||||
if (projectJson.code === 404) {
|
if (projectJson.code === 404) {
|
||||||
throw new Error();
|
// fallback check for github repositories with same name but different case
|
||||||
|
// will fail for other providers
|
||||||
|
let canonicalName = await retrieveCanonicalRepoName(lgtmUrl);
|
||||||
|
if (!canonicalName) {
|
||||||
|
throw new Error(`Project was not found at ${lgtmUrl}.`);
|
||||||
|
}
|
||||||
|
canonicalName = convertRawLgtmSlug(`g/${canonicalName}`);
|
||||||
|
projectJson = await downloadLgtmProjectMetadata(canonicalName);
|
||||||
|
if (projectJson.code === 404) {
|
||||||
|
throw new Error('Failed to download project from LGTM.');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const language = await promptForLanguage(projectJson);
|
const languages = projectJson?.languages?.map((lang: { language: string }) => lang.language) || [];
|
||||||
|
|
||||||
|
const language = await promptForLanguage(languages, progress);
|
||||||
if (!language) {
|
if (!language) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -433,25 +652,43 @@ export async function convertToDatabaseUrl(lgtmUrl: string) {
|
|||||||
language,
|
language,
|
||||||
].join('/')}`;
|
].join('/')}`;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.log(`Error: ${e.message}`);
|
void logger.log(`Error: ${getErrorMessage(e)}`);
|
||||||
throw new Error(`Invalid LGTM URL: ${lgtmUrl}`);
|
throw new Error(`Invalid LGTM URL: ${lgtmUrl}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function downloadLgtmProjectMetadata(lgtmUrl: string): Promise<any> {
|
||||||
|
const uri = Uri.parse(lgtmUrl, true);
|
||||||
|
const paths = ['api', 'v1.0'].concat(
|
||||||
|
uri.path.split('/').filter((segment: string) => segment)
|
||||||
|
).slice(0, 6);
|
||||||
|
const projectUrl = `https://lgtm.com/${paths.join('/')}`;
|
||||||
|
const projectResponse = await fetch(projectUrl);
|
||||||
|
return projectResponse.json();
|
||||||
|
}
|
||||||
|
|
||||||
async function promptForLanguage(
|
async function promptForLanguage(
|
||||||
projectJson: any
|
languages: string[],
|
||||||
|
progress: ProgressCallback
|
||||||
): Promise<string | undefined> {
|
): Promise<string | undefined> {
|
||||||
if (!projectJson?.languages?.length) {
|
progress({
|
||||||
return;
|
message: 'Choose language',
|
||||||
|
step: 2,
|
||||||
|
maxStep: 2
|
||||||
|
});
|
||||||
|
if (!languages.length) {
|
||||||
|
throw new Error('No databases found');
|
||||||
}
|
}
|
||||||
if (projectJson.languages.length === 1) {
|
if (languages.length === 1) {
|
||||||
return projectJson.languages[0].language;
|
return languages[0];
|
||||||
}
|
}
|
||||||
|
|
||||||
return await window.showQuickPick(
|
return await window.showQuickPick(
|
||||||
projectJson.languages.map((lang: { language: string }) => lang.language), {
|
languages,
|
||||||
placeHolder: 'Select the database language to download:'
|
{
|
||||||
}
|
placeHolder: 'Select the database language to download:',
|
||||||
|
ignoreFocusOut: true,
|
||||||
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -28,16 +28,17 @@ import {
|
|||||||
showAndLogErrorMessage
|
showAndLogErrorMessage
|
||||||
} from './helpers';
|
} from './helpers';
|
||||||
import { logger } from './logging';
|
import { logger } from './logging';
|
||||||
import { clearCacheInDatabase } from './run-queries';
|
|
||||||
import * as qsClient from './queryserver-client';
|
|
||||||
import { upgradeDatabaseExplicit } from './upgrades';
|
|
||||||
import {
|
import {
|
||||||
importArchiveDatabase,
|
importArchiveDatabase,
|
||||||
|
promptImportGithubDatabase,
|
||||||
promptImportInternetDatabase,
|
promptImportInternetDatabase,
|
||||||
promptImportLgtmDatabase,
|
promptImportLgtmDatabase,
|
||||||
} from './databaseFetcher';
|
} from './databaseFetcher';
|
||||||
import { CancellationToken } from 'vscode';
|
import { CancellationToken } from 'vscode';
|
||||||
import { asyncFilter } from './pure/helpers-pure';
|
import { asyncFilter, getErrorMessage } from './pure/helpers-pure';
|
||||||
|
import { Credentials } from './authentication';
|
||||||
|
import { QueryRunner } from './queryRunner';
|
||||||
|
import { isCanary } from './config';
|
||||||
|
|
||||||
type ThemableIconPath = { light: string; dark: string } | string;
|
type ThemableIconPath = { light: string; dark: string } | string;
|
||||||
|
|
||||||
@@ -135,6 +136,7 @@ class DatabaseTreeDataProvider extends DisposableObject
|
|||||||
this.extensionPath,
|
this.extensionPath,
|
||||||
SELECTED_DATABASE_ICON
|
SELECTED_DATABASE_ICON
|
||||||
);
|
);
|
||||||
|
item.contextValue = 'currentDatabase';
|
||||||
} else if (element.error !== undefined) {
|
} else if (element.error !== undefined) {
|
||||||
item.iconPath = joinThemableIconPath(
|
item.iconPath = joinThemableIconPath(
|
||||||
this.extensionPath,
|
this.extensionPath,
|
||||||
@@ -179,7 +181,7 @@ class DatabaseTreeDataProvider extends DisposableObject
|
|||||||
|
|
||||||
public set sortOrder(newSortOrder: SortOrder) {
|
public set sortOrder(newSortOrder: SortOrder) {
|
||||||
this._sortOrder = newSortOrder;
|
this._sortOrder = newSortOrder;
|
||||||
this._onDidChangeTreeData.fire();
|
this._onDidChangeTreeData.fire(undefined);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -216,9 +218,10 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
|
|
||||||
public constructor(
|
public constructor(
|
||||||
private databaseManager: DatabaseManager,
|
private databaseManager: DatabaseManager,
|
||||||
private readonly queryServer: qsClient.QueryServerClient | undefined,
|
private readonly queryServer: QueryRunner | undefined,
|
||||||
private readonly storagePath: string,
|
private readonly storagePath: string,
|
||||||
readonly extensionPath: string
|
readonly extensionPath: string,
|
||||||
|
private readonly getCredentials: () => Promise<Credentials>
|
||||||
) {
|
) {
|
||||||
super();
|
super();
|
||||||
|
|
||||||
@@ -234,7 +237,7 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
}
|
}
|
||||||
|
|
||||||
init() {
|
init() {
|
||||||
logger.log('Registering database panel commands.');
|
void logger.log('Registering database panel commands.');
|
||||||
this.push(
|
this.push(
|
||||||
commandRunnerWithProgress(
|
commandRunnerWithProgress(
|
||||||
'codeQL.setCurrentDatabase',
|
'codeQL.setCurrentDatabase',
|
||||||
@@ -290,12 +293,26 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
this.push(
|
||||||
|
commandRunnerWithProgress(
|
||||||
|
'codeQLDatabases.chooseDatabaseGithub',
|
||||||
|
async (
|
||||||
|
progress: ProgressCallback,
|
||||||
|
token: CancellationToken
|
||||||
|
) => {
|
||||||
|
const credentials = isCanary() ? await this.getCredentials() : undefined;
|
||||||
|
await this.handleChooseDatabaseGithub(credentials, progress, token);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: 'Adding database from GitHub',
|
||||||
|
})
|
||||||
|
);
|
||||||
this.push(
|
this.push(
|
||||||
commandRunnerWithProgress(
|
commandRunnerWithProgress(
|
||||||
'codeQLDatabases.chooseDatabaseLgtm',
|
'codeQLDatabases.chooseDatabaseLgtm',
|
||||||
this.handleChooseDatabaseLgtm,
|
this.handleChooseDatabaseLgtm,
|
||||||
{
|
{
|
||||||
title: 'Adding database from LGTM. Choose a language from the dropdown, if requested.',
|
title: 'Adding database from LGTM',
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
this.push(
|
this.push(
|
||||||
@@ -348,6 +365,12 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
this.handleOpenFolder
|
this.handleOpenFolder
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
this.push(
|
||||||
|
commandRunner(
|
||||||
|
'codeQLDatabases.addDatabaseSource',
|
||||||
|
this.handleAddSource
|
||||||
|
)
|
||||||
|
);
|
||||||
this.push(
|
this.push(
|
||||||
commandRunner(
|
commandRunner(
|
||||||
'codeQLDatabases.removeOrphanedDatabases',
|
'codeQLDatabases.removeOrphanedDatabases',
|
||||||
@@ -365,24 +388,23 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
handleChooseDatabaseFolder = async (
|
handleChooseDatabaseFolder = async (
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: CancellationToken
|
token: CancellationToken
|
||||||
): Promise<DatabaseItem | undefined> => {
|
): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
return await this.chooseAndSetDatabase(true, progress, token);
|
await this.chooseAndSetDatabase(true, progress, token);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showAndLogErrorMessage(e.message);
|
void showAndLogErrorMessage(getErrorMessage(e));
|
||||||
return undefined;
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
handleRemoveOrphanedDatabases = async (): Promise<void> => {
|
handleRemoveOrphanedDatabases = async (): Promise<void> => {
|
||||||
logger.log('Removing orphaned databases from workspace storage.');
|
void logger.log('Removing orphaned databases from workspace storage.');
|
||||||
let dbDirs = undefined;
|
let dbDirs = undefined;
|
||||||
|
|
||||||
if (
|
if (
|
||||||
!(await fs.pathExists(this.storagePath)) ||
|
!(await fs.pathExists(this.storagePath)) ||
|
||||||
!(await fs.stat(this.storagePath)).isDirectory()
|
!(await fs.stat(this.storagePath)).isDirectory()
|
||||||
) {
|
) {
|
||||||
logger.log('Missing or invalid storage directory. Not trying to remove orphaned databases.');
|
void logger.log('Missing or invalid storage directory. Not trying to remove orphaned databases.');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -403,7 +425,7 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
dbDirs = await asyncFilter(dbDirs, isLikelyDatabaseRoot);
|
dbDirs = await asyncFilter(dbDirs, isLikelyDatabaseRoot);
|
||||||
|
|
||||||
if (!dbDirs.length) {
|
if (!dbDirs.length) {
|
||||||
logger.log('No orphaned databases found.');
|
void logger.log('No orphaned databases found.');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -412,8 +434,8 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
await Promise.all(
|
await Promise.all(
|
||||||
dbDirs.map(async dbDir => {
|
dbDirs.map(async dbDir => {
|
||||||
try {
|
try {
|
||||||
logger.log(`Deleting orphaned database '${dbDir}'.`);
|
void logger.log(`Deleting orphaned database '${dbDir}'.`);
|
||||||
await fs.rmdir(dbDir, { recursive: true } as any); // typings doesn't recognize the options argument
|
await fs.remove(dbDir);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
failures.push(`${path.basename(dbDir)}`);
|
failures.push(`${path.basename(dbDir)}`);
|
||||||
}
|
}
|
||||||
@@ -422,9 +444,8 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
|
|
||||||
if (failures.length) {
|
if (failures.length) {
|
||||||
const dirname = path.dirname(failures[0]);
|
const dirname = path.dirname(failures[0]);
|
||||||
showAndLogErrorMessage(
|
void showAndLogErrorMessage(
|
||||||
`Failed to delete unused databases (${
|
`Failed to delete unused databases (${failures.join(', ')
|
||||||
failures.join(', ')
|
|
||||||
}).\nTo delete unused databases, please remove them manually from the storage folder ${dirname}.`
|
}).\nTo delete unused databases, please remove them manually from the storage folder ${dirname}.`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -434,26 +455,39 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
handleChooseDatabaseArchive = async (
|
handleChooseDatabaseArchive = async (
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: CancellationToken
|
token: CancellationToken
|
||||||
): Promise<DatabaseItem | undefined> => {
|
): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
return await this.chooseAndSetDatabase(false, progress, token);
|
await this.chooseAndSetDatabase(false, progress, token);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showAndLogErrorMessage(e.message);
|
void showAndLogErrorMessage(getErrorMessage(e));
|
||||||
return undefined;
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
handleChooseDatabaseInternet = async (
|
handleChooseDatabaseInternet = async (
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: CancellationToken
|
token: CancellationToken
|
||||||
): Promise<
|
): Promise<DatabaseItem | undefined> => {
|
||||||
DatabaseItem | undefined
|
|
||||||
> => {
|
|
||||||
return await promptImportInternetDatabase(
|
return await promptImportInternetDatabase(
|
||||||
this.databaseManager,
|
this.databaseManager,
|
||||||
this.storagePath,
|
this.storagePath,
|
||||||
progress,
|
progress,
|
||||||
token
|
token,
|
||||||
|
this.queryServer?.cliServer
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
handleChooseDatabaseGithub = async (
|
||||||
|
credentials: Credentials | undefined,
|
||||||
|
progress: ProgressCallback,
|
||||||
|
token: CancellationToken
|
||||||
|
): Promise<DatabaseItem | undefined> => {
|
||||||
|
return await promptImportGithubDatabase(
|
||||||
|
this.databaseManager,
|
||||||
|
this.storagePath,
|
||||||
|
credentials,
|
||||||
|
progress,
|
||||||
|
token,
|
||||||
|
this.queryServer?.cliServer
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -465,7 +499,8 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
this.databaseManager,
|
this.databaseManager,
|
||||||
this.storagePath,
|
this.storagePath,
|
||||||
progress,
|
progress,
|
||||||
token
|
token,
|
||||||
|
this.queryServer?.cliServer
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -537,8 +572,7 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
|
|
||||||
// Search for upgrade scripts in any workspace folders available
|
// Search for upgrade scripts in any workspace folders available
|
||||||
|
|
||||||
await upgradeDatabaseExplicit(
|
await this.queryServer.upgradeDatabaseExplicit(
|
||||||
this.queryServer,
|
|
||||||
databaseItem,
|
databaseItem,
|
||||||
progress,
|
progress,
|
||||||
token
|
token
|
||||||
@@ -553,8 +587,7 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
this.queryServer !== undefined &&
|
this.queryServer !== undefined &&
|
||||||
this.databaseManager.currentDatabaseItem !== undefined
|
this.databaseManager.currentDatabaseItem !== undefined
|
||||||
) {
|
) {
|
||||||
await clearCacheInDatabase(
|
await this.queryServer.clearCacheInDatabase(
|
||||||
this.queryServer,
|
|
||||||
this.databaseManager.currentDatabaseItem,
|
this.databaseManager.currentDatabaseItem,
|
||||||
progress,
|
progress,
|
||||||
token
|
token
|
||||||
@@ -575,7 +608,8 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
this.databaseManager,
|
this.databaseManager,
|
||||||
this.storagePath,
|
this.storagePath,
|
||||||
progress,
|
progress,
|
||||||
token
|
token,
|
||||||
|
this.queryServer?.cliServer
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
await this.setCurrentDatabase(progress, token, uri);
|
await this.setCurrentDatabase(progress, token, uri);
|
||||||
@@ -583,9 +617,7 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
} catch (e) {
|
} catch (e) {
|
||||||
// rethrow and let this be handled by default error handling.
|
// rethrow and let this be handled by default error handling.
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Could not set database to ${path.basename(uri.fsPath)}. Reason: ${
|
`Could not set database to ${path.basename(uri.fsPath)}. Reason: ${getErrorMessage(e)}`
|
||||||
e.message
|
|
||||||
}`
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -617,7 +649,7 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (newName) {
|
if (newName) {
|
||||||
this.databaseManager.renameDatabaseItem(databaseItem, newName);
|
await this.databaseManager.renameDatabaseItem(databaseItem, newName);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -634,6 +666,24 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds the source folder of a CodeQL database to the workspace.
|
||||||
|
* When a database is first added in the "Databases" view, its source folder is added to the workspace.
|
||||||
|
* If the source folder is removed from the workspace for some reason, we want to be able to re-add it if need be.
|
||||||
|
*/
|
||||||
|
private handleAddSource = async (
|
||||||
|
databaseItem: DatabaseItem,
|
||||||
|
multiSelect: DatabaseItem[] | undefined
|
||||||
|
): Promise<void> => {
|
||||||
|
if (multiSelect?.length) {
|
||||||
|
for (const dbItem of multiSelect) {
|
||||||
|
await this.databaseManager.addDatabaseSourceArchiveFolder(dbItem);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
await this.databaseManager.addDatabaseSourceArchiveFolder(databaseItem);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the current database directory. If we don't already have a
|
* Return the current database directory. If we don't already have a
|
||||||
* current database, ask the user for one, and return that, or
|
* current database, ask the user for one, and return that, or
|
||||||
@@ -674,7 +724,6 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
token: CancellationToken,
|
token: CancellationToken,
|
||||||
): Promise<DatabaseItem | undefined> {
|
): Promise<DatabaseItem | undefined> {
|
||||||
const uri = await chooseDatabaseDir(byFolder);
|
const uri = await chooseDatabaseDir(byFolder);
|
||||||
|
|
||||||
if (!uri) {
|
if (!uri) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
@@ -691,7 +740,8 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
this.databaseManager,
|
this.databaseManager,
|
||||||
this.storagePath,
|
this.storagePath,
|
||||||
progress,
|
progress,
|
||||||
token
|
token,
|
||||||
|
this.queryServer?.cliServer
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -700,7 +750,7 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
* Perform some heuristics to ensure a proper database location is chosen.
|
* Perform some heuristics to ensure a proper database location is chosen.
|
||||||
*
|
*
|
||||||
* 1. If the selected URI to add is a file, choose the containing directory
|
* 1. If the selected URI to add is a file, choose the containing directory
|
||||||
* 2. If the selected URI is a directory matching db-*, choose the containing directory
|
* 2. If the selected URI appears to be a db language folder, choose the containing directory
|
||||||
* 3. choose the current directory
|
* 3. choose the current directory
|
||||||
*
|
*
|
||||||
* @param uri a URI that is a database folder or inside it
|
* @param uri a URI that is a database folder or inside it
|
||||||
@@ -713,7 +763,7 @@ export class DatabaseUI extends DisposableObject {
|
|||||||
dbPath = path.dirname(dbPath);
|
dbPath = path.dirname(dbPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isLikelyDbLanguageFolder(dbPath)) {
|
if (await isLikelyDbLanguageFolder(dbPath)) {
|
||||||
dbPath = path.dirname(dbPath);
|
dbPath = path.dirname(dbPath);
|
||||||
}
|
}
|
||||||
return Uri.file(dbPath);
|
return Uri.file(dbPath);
|
||||||
|
|||||||
@@ -17,8 +17,8 @@ import {
|
|||||||
import { zipArchiveScheme, encodeArchiveBasePath, decodeSourceArchiveUri, encodeSourceArchiveUri } from './archive-filesystem-provider';
|
import { zipArchiveScheme, encodeArchiveBasePath, decodeSourceArchiveUri, encodeSourceArchiveUri } from './archive-filesystem-provider';
|
||||||
import { DisposableObject } from './pure/disposable-object';
|
import { DisposableObject } from './pure/disposable-object';
|
||||||
import { Logger, logger } from './logging';
|
import { Logger, logger } from './logging';
|
||||||
import { registerDatabases, Dataset, deregisterDatabases } from './pure/messages';
|
import { getErrorMessage } from './pure/helpers-pure';
|
||||||
import { QueryServerClient } from './queryserver-client';
|
import { QueryRunner } from './queryRunner';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* databases.ts
|
* databases.ts
|
||||||
@@ -115,30 +115,31 @@ async function findDataset(parentDirectory: string): Promise<vscode.Uri> {
|
|||||||
|
|
||||||
const dbAbsolutePath = path.join(parentDirectory, dbRelativePaths[0]);
|
const dbAbsolutePath = path.join(parentDirectory, dbRelativePaths[0]);
|
||||||
if (dbRelativePaths.length > 1) {
|
if (dbRelativePaths.length > 1) {
|
||||||
showAndLogWarningMessage(`Found multiple dataset directories in database, using '${dbAbsolutePath}'.`);
|
void showAndLogWarningMessage(`Found multiple dataset directories in database, using '${dbAbsolutePath}'.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return vscode.Uri.file(dbAbsolutePath);
|
return vscode.Uri.file(dbAbsolutePath);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function findSourceArchive(
|
// exported for testing
|
||||||
|
export async function findSourceArchive(
|
||||||
databasePath: string, silent = false
|
databasePath: string, silent = false
|
||||||
): Promise<vscode.Uri | undefined> {
|
): Promise<vscode.Uri | undefined> {
|
||||||
|
|
||||||
const relativePaths = ['src', 'output/src_archive'];
|
const relativePaths = ['src', 'output/src_archive'];
|
||||||
|
|
||||||
for (const relativePath of relativePaths) {
|
for (const relativePath of relativePaths) {
|
||||||
const basePath = path.join(databasePath, relativePath);
|
const basePath = path.join(databasePath, relativePath);
|
||||||
const zipPath = basePath + '.zip';
|
const zipPath = basePath + '.zip';
|
||||||
|
|
||||||
if (await fs.pathExists(basePath)) {
|
// Prefer using a zip archive over a directory.
|
||||||
return vscode.Uri.file(basePath);
|
if (await fs.pathExists(zipPath)) {
|
||||||
} else if (await fs.pathExists(zipPath)) {
|
|
||||||
return encodeArchiveBasePath(zipPath);
|
return encodeArchiveBasePath(zipPath);
|
||||||
|
} else if (await fs.pathExists(basePath)) {
|
||||||
|
return vscode.Uri.file(basePath);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!silent) {
|
if (!silent) {
|
||||||
showAndLogInformationMessage(
|
void showAndLogInformationMessage(
|
||||||
`Could not find source archive for database '${databasePath}'. Assuming paths are absolute.`
|
`Could not find source archive for database '${databasePath}'. Assuming paths are absolute.`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -146,7 +147,7 @@ async function findSourceArchive(
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function resolveDatabase(
|
async function resolveDatabase(
|
||||||
databasePath: string
|
databasePath: string,
|
||||||
): Promise<DatabaseContents> {
|
): Promise<DatabaseContents> {
|
||||||
|
|
||||||
const name = path.basename(databasePath);
|
const name = path.basename(databasePath);
|
||||||
@@ -161,7 +162,6 @@ async function resolveDatabase(
|
|||||||
datasetUri,
|
datasetUri,
|
||||||
sourceArchiveUri
|
sourceArchiveUri
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Gets the relative paths of all `.dbscheme` files in the given directory. */
|
/** Gets the relative paths of all `.dbscheme` files in the given directory. */
|
||||||
@@ -169,7 +169,9 @@ async function getDbSchemeFiles(dbDirectory: string): Promise<string[]> {
|
|||||||
return await glob('*.dbscheme', { cwd: dbDirectory });
|
return await glob('*.dbscheme', { cwd: dbDirectory });
|
||||||
}
|
}
|
||||||
|
|
||||||
async function resolveDatabaseContents(uri: vscode.Uri): Promise<DatabaseContents> {
|
async function resolveDatabaseContents(
|
||||||
|
uri: vscode.Uri,
|
||||||
|
): Promise<DatabaseContents> {
|
||||||
if (uri.scheme !== 'file') {
|
if (uri.scheme !== 'file') {
|
||||||
throw new Error(`Database URI scheme '${uri.scheme}' not supported; only 'file' URIs are supported.`);
|
throw new Error(`Database URI scheme '${uri.scheme}' not supported; only 'file' URIs are supported.`);
|
||||||
}
|
}
|
||||||
@@ -258,17 +260,27 @@ export interface DatabaseItem {
|
|||||||
* Returns the root uri of the virtual filesystem for this database's source archive,
|
* Returns the root uri of the virtual filesystem for this database's source archive,
|
||||||
* as displayed in the filesystem explorer.
|
* as displayed in the filesystem explorer.
|
||||||
*/
|
*/
|
||||||
getSourceArchiveExplorerUri(): vscode.Uri | undefined;
|
getSourceArchiveExplorerUri(): vscode.Uri;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Holds if `uri` belongs to this database's source archive.
|
* Holds if `uri` belongs to this database's source archive.
|
||||||
*/
|
*/
|
||||||
belongsToSourceArchiveExplorerUri(uri: vscode.Uri): boolean;
|
belongsToSourceArchiveExplorerUri(uri: vscode.Uri): boolean;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether the database may be affected by test execution for the given path.
|
||||||
|
*/
|
||||||
|
isAffectedByTest(testPath: string): Promise<boolean>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the state of this database, to be persisted in the workspace state.
|
* Gets the state of this database, to be persisted in the workspace state.
|
||||||
*/
|
*/
|
||||||
getPersistedState(): PersistedDatabaseItem;
|
getPersistedState(): PersistedDatabaseItem;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verifies that this database item has a zipped source folder. Returns an error message if it does not.
|
||||||
|
*/
|
||||||
|
verifyZippedSources(): string | undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum DatabaseEventKind {
|
export enum DatabaseEventKind {
|
||||||
@@ -346,14 +358,12 @@ export class DatabaseItemImpl implements DatabaseItem {
|
|||||||
try {
|
try {
|
||||||
this._contents = await resolveDatabaseContents(this.databaseUri);
|
this._contents = await resolveDatabaseContents(this.databaseUri);
|
||||||
this._error = undefined;
|
this._error = undefined;
|
||||||
}
|
} catch (e) {
|
||||||
catch (e) {
|
|
||||||
this._contents = undefined;
|
this._contents = undefined;
|
||||||
this._error = e;
|
this._error = e instanceof Error ? e : new Error(String(e));
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
} finally {
|
||||||
finally {
|
|
||||||
this.onChanged({
|
this.onChanged({
|
||||||
kind: DatabaseEventKind.Refresh,
|
kind: DatabaseEventKind.Refresh,
|
||||||
item: this
|
item: this
|
||||||
@@ -454,13 +464,26 @@ export class DatabaseItemImpl implements DatabaseItem {
|
|||||||
/**
|
/**
|
||||||
* Returns the root uri of the virtual filesystem for this database's source archive.
|
* Returns the root uri of the virtual filesystem for this database's source archive.
|
||||||
*/
|
*/
|
||||||
public getSourceArchiveExplorerUri(): vscode.Uri | undefined {
|
public getSourceArchiveExplorerUri(): vscode.Uri {
|
||||||
const sourceArchive = this.sourceArchive;
|
const sourceArchive = this.sourceArchive;
|
||||||
if (sourceArchive === undefined || !sourceArchive.fsPath.endsWith('.zip'))
|
if (sourceArchive === undefined || !sourceArchive.fsPath.endsWith('.zip')) {
|
||||||
return undefined;
|
throw new Error(this.verifyZippedSources());
|
||||||
|
}
|
||||||
return encodeArchiveBasePath(sourceArchive.fsPath);
|
return encodeArchiveBasePath(sourceArchive.fsPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public verifyZippedSources(): string | undefined {
|
||||||
|
const sourceArchive = this.sourceArchive;
|
||||||
|
if (sourceArchive === undefined) {
|
||||||
|
return `${this.name} has no source archive.`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!sourceArchive.fsPath.endsWith('.zip')) {
|
||||||
|
return `${this.name} has a source folder that is unzipped.`;
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Holds if `uri` belongs to this database's source archive.
|
* Holds if `uri` belongs to this database's source archive.
|
||||||
*/
|
*/
|
||||||
@@ -470,6 +493,27 @@ export class DatabaseItemImpl implements DatabaseItem {
|
|||||||
return uri.scheme === zipArchiveScheme &&
|
return uri.scheme === zipArchiveScheme &&
|
||||||
decodeSourceArchiveUri(uri).sourceArchiveZipPath === this.sourceArchive.fsPath;
|
decodeSourceArchiveUri(uri).sourceArchiveZipPath === this.sourceArchive.fsPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async isAffectedByTest(testPath: string): Promise<boolean> {
|
||||||
|
const databasePath = this.databaseUri.fsPath;
|
||||||
|
if (!databasePath.endsWith('.testproj')) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const stats = await fs.stat(testPath);
|
||||||
|
if (stats.isDirectory()) {
|
||||||
|
return !path.relative(testPath, databasePath).startsWith('..');
|
||||||
|
} else {
|
||||||
|
// database for /one/two/three/test.ql is at /one/two/three/three.testproj
|
||||||
|
const testdir = path.dirname(testPath);
|
||||||
|
const testdirbase = path.basename(testdir);
|
||||||
|
return databasePath == path.join(testdir, testdirbase + '.testproj');
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// No information available for test path - assume database is unaffected.
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -480,7 +524,7 @@ export class DatabaseItemImpl implements DatabaseItem {
|
|||||||
function eventFired<T>(event: vscode.Event<T>, timeoutMs = 1000): Promise<T | undefined> {
|
function eventFired<T>(event: vscode.Event<T>, timeoutMs = 1000): Promise<T | undefined> {
|
||||||
return new Promise((res, _rej) => {
|
return new Promise((res, _rej) => {
|
||||||
const timeout = setTimeout(() => {
|
const timeout = setTimeout(() => {
|
||||||
logger.log(`Waiting for event ${event} timed out after ${timeoutMs}ms`);
|
void logger.log(`Waiting for event ${event} timed out after ${timeoutMs}ms`);
|
||||||
res(undefined);
|
res(undefined);
|
||||||
dispose();
|
dispose();
|
||||||
}, timeoutMs);
|
}, timeoutMs);
|
||||||
@@ -508,30 +552,28 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private readonly ctx: ExtensionContext,
|
private readonly ctx: ExtensionContext,
|
||||||
private readonly qs: QueryServerClient,
|
private readonly qs: QueryRunner,
|
||||||
private readonly cli: cli.CodeQLCliServer,
|
private readonly cli: cli.CodeQLCliServer,
|
||||||
public logger: Logger
|
public logger: Logger
|
||||||
) {
|
) {
|
||||||
super();
|
super();
|
||||||
|
|
||||||
qs.onDidStartQueryServer(this.reregisterDatabases.bind(this));
|
qs.onStart(this.reregisterDatabases.bind(this));
|
||||||
|
|
||||||
// Let this run async.
|
|
||||||
this.loadPersistedState();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async openDatabase(
|
public async openDatabase(
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: vscode.CancellationToken,
|
token: vscode.CancellationToken,
|
||||||
uri: vscode.Uri,
|
uri: vscode.Uri,
|
||||||
|
displayName?: string
|
||||||
): Promise<DatabaseItem> {
|
): Promise<DatabaseItem> {
|
||||||
const contents = await resolveDatabaseContents(uri);
|
const contents = await resolveDatabaseContents(uri);
|
||||||
// Ignore the source archive for QLTest databases by default.
|
// Ignore the source archive for QLTest databases by default.
|
||||||
const isQLTestDatabase = path.extname(uri.fsPath) === '.testproj';
|
const isQLTestDatabase = path.extname(uri.fsPath) === '.testproj';
|
||||||
const fullOptions: FullDatabaseOptions = {
|
const fullOptions: FullDatabaseOptions = {
|
||||||
ignoreSourceArchive: isQLTestDatabase,
|
ignoreSourceArchive: isQLTestDatabase,
|
||||||
// displayName is only set if a user explicitly renames a database
|
// If a displayName is not passed in, the basename of folder containing the database is used.
|
||||||
displayName: undefined,
|
displayName,
|
||||||
dateAdded: Date.now(),
|
dateAdded: Date.now(),
|
||||||
language: await this.getPrimaryLanguage(uri.fsPath)
|
language: await this.getPrimaryLanguage(uri.fsPath)
|
||||||
};
|
};
|
||||||
@@ -561,7 +603,7 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
private async addDatabaseSourceArchiveFolder(item: DatabaseItem) {
|
public async addDatabaseSourceArchiveFolder(item: DatabaseItem) {
|
||||||
// The folder may already be in workspace state from a previous
|
// The folder may already be in workspace state from a previous
|
||||||
// session. If not, add it.
|
// session. If not, add it.
|
||||||
const index = this.getDatabaseWorkspaceFolderIndex(item);
|
const index = this.getDatabaseWorkspaceFolderIndex(item);
|
||||||
@@ -577,26 +619,28 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
// This is undesirable, as we might be adding and removing many
|
// This is undesirable, as we might be adding and removing many
|
||||||
// workspace folders as the user adds and removes databases.
|
// workspace folders as the user adds and removes databases.
|
||||||
const end = (vscode.workspace.workspaceFolders || []).length;
|
const end = (vscode.workspace.workspaceFolders || []).length;
|
||||||
|
|
||||||
|
const msg = item.verifyZippedSources();
|
||||||
|
if (msg) {
|
||||||
|
void logger.log(`Could not add source folder because ${msg}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const uri = item.getSourceArchiveExplorerUri();
|
const uri = item.getSourceArchiveExplorerUri();
|
||||||
if (uri === undefined) {
|
void logger.log(`Adding workspace folder for ${item.name} source archive at index ${end}`);
|
||||||
logger.log(`Couldn't obtain file explorer uri for ${item.name}`);
|
if ((vscode.workspace.workspaceFolders || []).length < 2) {
|
||||||
}
|
// Adding this workspace folder makes the workspace
|
||||||
else {
|
// multi-root, which may surprise the user. Let them know
|
||||||
logger.log(`Adding workspace folder for ${item.name} source archive at index ${end}`);
|
// we're doing this.
|
||||||
if ((vscode.workspace.workspaceFolders || []).length < 2) {
|
void vscode.window.showInformationMessage(`Adding workspace folder for source archive of database ${item.name}.`);
|
||||||
// Adding this workspace folder makes the workspace
|
|
||||||
// multi-root, which may surprise the user. Let them know
|
|
||||||
// we're doing this.
|
|
||||||
vscode.window.showInformationMessage(`Adding workspace folder for source archive of database ${item.name}.`);
|
|
||||||
}
|
|
||||||
vscode.workspace.updateWorkspaceFolders(end, 0, {
|
|
||||||
name: `[${item.name} source archive]`,
|
|
||||||
uri,
|
|
||||||
});
|
|
||||||
// vscode api documentation says we must to wait for this event
|
|
||||||
// between multiple `updateWorkspaceFolders` calls.
|
|
||||||
await eventFired(vscode.workspace.onDidChangeWorkspaceFolders);
|
|
||||||
}
|
}
|
||||||
|
vscode.workspace.updateWorkspaceFolders(end, 0, {
|
||||||
|
name: `[${item.name} source archive]`,
|
||||||
|
uri,
|
||||||
|
});
|
||||||
|
// vscode api documentation says we must to wait for this event
|
||||||
|
// between multiple `updateWorkspaceFolders` calls.
|
||||||
|
await eventFired(vscode.workspace.onDidChangeWorkspaceFolders);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -640,11 +684,13 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
this._onDidChangeDatabaseItem.fire(event);
|
this._onDidChangeDatabaseItem.fire(event);
|
||||||
});
|
});
|
||||||
|
|
||||||
await this.addDatabaseItem(progress, token, item);
|
// Avoid persisting the database state after adding since that should happen only after
|
||||||
|
// all databases have been added.
|
||||||
|
await this.addDatabaseItem(progress, token, item, false);
|
||||||
return item;
|
return item;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async loadPersistedState(): Promise<void> {
|
public async loadPersistedState(): Promise<void> {
|
||||||
return withProgress({
|
return withProgress({
|
||||||
location: vscode.ProgressLocation.Notification
|
location: vscode.ProgressLocation.Notification
|
||||||
},
|
},
|
||||||
@@ -658,6 +704,7 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
step
|
step
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
|
void this.logger.log(`Found ${databases.length} persisted databases: ${databases.map(db => db.uri).join(', ')}`);
|
||||||
for (const database of databases) {
|
for (const database of databases) {
|
||||||
progress({
|
progress({
|
||||||
maxStep: databases.length,
|
maxStep: databases.length,
|
||||||
@@ -670,18 +717,22 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
await databaseItem.refresh();
|
await databaseItem.refresh();
|
||||||
await this.registerDatabase(progress, token, databaseItem);
|
await this.registerDatabase(progress, token, databaseItem);
|
||||||
if (currentDatabaseUri === database.uri) {
|
if (currentDatabaseUri === database.uri) {
|
||||||
this.setCurrentDatabaseItem(databaseItem, true);
|
await this.setCurrentDatabaseItem(databaseItem, true);
|
||||||
}
|
}
|
||||||
}
|
void this.logger.log(`Loaded database ${databaseItem.name} at URI ${database.uri}.`);
|
||||||
catch (e) {
|
} catch (e) {
|
||||||
// When loading from persisted state, leave invalid databases in the list. They will be
|
// When loading from persisted state, leave invalid databases in the list. They will be
|
||||||
// marked as invalid, and cannot be set as the current database.
|
// marked as invalid, and cannot be set as the current database.
|
||||||
|
void this.logger.log(`Error loading database ${database.uri}: ${e}.`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
await this.updatePersistedDatabaseList();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// database list had an unexpected type - nothing to be done?
|
// database list had an unexpected type - nothing to be done?
|
||||||
showAndLogErrorMessage(`Database list loading failed: ${e.message}`);
|
void showAndLogErrorMessage(`Database list loading failed: ${getErrorMessage(e)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void this.logger.log('Finished loading persisted databases.');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -705,6 +756,8 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
this._currentDatabaseItem = item;
|
this._currentDatabaseItem = item;
|
||||||
this.updatePersistedCurrentDatabaseItem();
|
this.updatePersistedCurrentDatabaseItem();
|
||||||
|
|
||||||
|
await vscode.commands.executeCommand('setContext', 'codeQL.currentDatabaseItem', item?.name);
|
||||||
|
|
||||||
this._onDidChangeCurrentDatabaseItem.fire({
|
this._onDidChangeCurrentDatabaseItem.fire({
|
||||||
item,
|
item,
|
||||||
kind: DatabaseEventKind.Change
|
kind: DatabaseEventKind.Change
|
||||||
@@ -734,10 +787,14 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
private async addDatabaseItem(
|
private async addDatabaseItem(
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: vscode.CancellationToken,
|
token: vscode.CancellationToken,
|
||||||
item: DatabaseItem
|
item: DatabaseItem,
|
||||||
|
updatePersistedState = true
|
||||||
) {
|
) {
|
||||||
this._databaseItems.push(item);
|
this._databaseItems.push(item);
|
||||||
this.updatePersistedDatabaseList();
|
|
||||||
|
if (updatePersistedState) {
|
||||||
|
await this.updatePersistedDatabaseList();
|
||||||
|
}
|
||||||
|
|
||||||
// Add this database item to the allow-list
|
// Add this database item to the allow-list
|
||||||
// Database items reconstituted from persisted state
|
// Database items reconstituted from persisted state
|
||||||
@@ -754,7 +811,7 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
|
|
||||||
public async renameDatabaseItem(item: DatabaseItem, newName: string) {
|
public async renameDatabaseItem(item: DatabaseItem, newName: string) {
|
||||||
item.name = newName;
|
item.name = newName;
|
||||||
this.updatePersistedDatabaseList();
|
await this.updatePersistedDatabaseList();
|
||||||
this._onDidChangeDatabaseItem.fire({
|
this._onDidChangeDatabaseItem.fire({
|
||||||
// pass undefined so that the entire tree is rebuilt in order to re-sort
|
// pass undefined so that the entire tree is rebuilt in order to re-sort
|
||||||
item: undefined,
|
item: undefined,
|
||||||
@@ -774,28 +831,28 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
if (index >= 0) {
|
if (index >= 0) {
|
||||||
this._databaseItems.splice(index, 1);
|
this._databaseItems.splice(index, 1);
|
||||||
}
|
}
|
||||||
this.updatePersistedDatabaseList();
|
await this.updatePersistedDatabaseList();
|
||||||
|
|
||||||
// Delete folder from workspace, if it is still there
|
// Delete folder from workspace, if it is still there
|
||||||
const folderIndex = (vscode.workspace.workspaceFolders || []).findIndex(
|
const folderIndex = (vscode.workspace.workspaceFolders || []).findIndex(
|
||||||
folder => item.belongsToSourceArchiveExplorerUri(folder.uri)
|
folder => item.belongsToSourceArchiveExplorerUri(folder.uri)
|
||||||
);
|
);
|
||||||
if (folderIndex >= 0) {
|
if (folderIndex >= 0) {
|
||||||
logger.log(`Removing workspace folder at index ${folderIndex}`);
|
void logger.log(`Removing workspace folder at index ${folderIndex}`);
|
||||||
vscode.workspace.updateWorkspaceFolders(folderIndex, 1);
|
vscode.workspace.updateWorkspaceFolders(folderIndex, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete folder from file system only if it is controlled by the extension
|
|
||||||
if (this.isExtensionControlledLocation(item.databaseUri)) {
|
|
||||||
logger.log('Deleting database from filesystem.');
|
|
||||||
fs.remove(item.databaseUri.fsPath).then(
|
|
||||||
() => logger.log(`Deleted '${item.databaseUri.fsPath}'`),
|
|
||||||
e => logger.log(`Failed to delete '${item.databaseUri.fsPath}'. Reason: ${e.message}`));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove this database item from the allow-list
|
// Remove this database item from the allow-list
|
||||||
await this.deregisterDatabase(progress, token, item);
|
await this.deregisterDatabase(progress, token, item);
|
||||||
|
|
||||||
|
// Delete folder from file system only if it is controlled by the extension
|
||||||
|
if (this.isExtensionControlledLocation(item.databaseUri)) {
|
||||||
|
void logger.log('Deleting database from filesystem.');
|
||||||
|
fs.remove(item.databaseUri.fsPath).then(
|
||||||
|
() => void logger.log(`Deleted '${item.databaseUri.fsPath}'`),
|
||||||
|
e => void logger.log(`Failed to delete '${item.databaseUri.fsPath}'. Reason: ${getErrorMessage(e)}`));
|
||||||
|
}
|
||||||
|
|
||||||
// note that we use undefined as the item in order to reset the entire tree
|
// note that we use undefined as the item in order to reset the entire tree
|
||||||
this._onDidChangeDatabaseItem.fire({
|
this._onDidChangeDatabaseItem.fire({
|
||||||
item: undefined,
|
item: undefined,
|
||||||
@@ -808,36 +865,23 @@ export class DatabaseManager extends DisposableObject {
|
|||||||
token: vscode.CancellationToken,
|
token: vscode.CancellationToken,
|
||||||
dbItem: DatabaseItem,
|
dbItem: DatabaseItem,
|
||||||
) {
|
) {
|
||||||
if (dbItem.contents && (await this.cli.cliConstraints.supportsDatabaseRegistration())) {
|
await this.qs.deregisterDatabase(progress, token, dbItem);
|
||||||
const databases: Dataset[] = [{
|
|
||||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
|
||||||
workingSet: 'default'
|
|
||||||
}];
|
|
||||||
await this.qs.sendRequest(deregisterDatabases, { databases }, token, progress);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async registerDatabase(
|
private async registerDatabase(
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: vscode.CancellationToken,
|
token: vscode.CancellationToken,
|
||||||
dbItem: DatabaseItem,
|
dbItem: DatabaseItem,
|
||||||
) {
|
) {
|
||||||
if (dbItem.contents && (await this.cli.cliConstraints.supportsDatabaseRegistration())) {
|
await this.qs.registerDatabase(progress, token, dbItem);
|
||||||
const databases: Dataset[] = [{
|
|
||||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
|
||||||
workingSet: 'default'
|
|
||||||
}];
|
|
||||||
await this.qs.sendRequest(registerDatabases, { databases }, token, progress);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private updatePersistedCurrentDatabaseItem(): void {
|
private updatePersistedCurrentDatabaseItem(): void {
|
||||||
this.ctx.workspaceState.update(CURRENT_DB, this._currentDatabaseItem ?
|
void this.ctx.workspaceState.update(CURRENT_DB, this._currentDatabaseItem ?
|
||||||
this._currentDatabaseItem.databaseUri.toString(true) : undefined);
|
this._currentDatabaseItem.databaseUri.toString(true) : undefined);
|
||||||
}
|
}
|
||||||
|
|
||||||
private updatePersistedDatabaseList(): void {
|
private async updatePersistedDatabaseList(): Promise<void> {
|
||||||
this.ctx.workspaceState.update(DB_LIST, this._databaseItems.map(item => item.getPersistedState()));
|
await this.ctx.workspaceState.update(DB_LIST, this._databaseItems.map(item => item.getPersistedState()));
|
||||||
}
|
}
|
||||||
|
|
||||||
private isExtensionControlledLocation(uri: vscode.Uri) {
|
private isExtensionControlledLocation(uri: vscode.Uri) {
|
||||||
|
|||||||
@@ -59,23 +59,23 @@ export abstract class Discovery<T> extends DisposableObject {
|
|||||||
this.discoveryInProgress = false;
|
this.discoveryInProgress = false;
|
||||||
this.update(results);
|
this.update(results);
|
||||||
}
|
}
|
||||||
});
|
})
|
||||||
|
|
||||||
discoveryPromise.catch(err => {
|
.catch(err => {
|
||||||
logger.log(`${this.name} failed. Reason: ${err.message}`);
|
void logger.log(`${this.name} failed. Reason: ${err.message}`);
|
||||||
});
|
})
|
||||||
|
|
||||||
discoveryPromise.finally(() => {
|
.finally(() => {
|
||||||
if (this.retry) {
|
if (this.retry) {
|
||||||
// Another refresh request came in while we were still running a previous discovery
|
// Another refresh request came in while we were still running a previous discovery
|
||||||
// operation. Since the discovery results we just computed are now stale, we'll launch
|
// operation. Since the discovery results we just computed are now stale, we'll launch
|
||||||
// another discovery operation instead of updating.
|
// another discovery operation instead of updating.
|
||||||
// Note that by doing this inside of `finally`, we will relaunch discovery even if the
|
// Note that by doing this inside of `finally`, we will relaunch discovery even if the
|
||||||
// initial discovery operation failed.
|
// initial discovery operation failed.
|
||||||
this.retry = false;
|
this.retry = false;
|
||||||
this.launchDiscovery();
|
this.launchDiscovery();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -153,7 +153,7 @@ export class DistributionManager implements DistributionProvider {
|
|||||||
// Check config setting, then extension specific distribution, then PATH.
|
// Check config setting, then extension specific distribution, then PATH.
|
||||||
if (this.config.customCodeQlPath) {
|
if (this.config.customCodeQlPath) {
|
||||||
if (!await fs.pathExists(this.config.customCodeQlPath)) {
|
if (!await fs.pathExists(this.config.customCodeQlPath)) {
|
||||||
showAndLogErrorMessage(`The CodeQL executable path is specified as "${this.config.customCodeQlPath}" ` +
|
void showAndLogErrorMessage(`The CodeQL executable path is specified as "${this.config.customCodeQlPath}" ` +
|
||||||
'by a configuration setting, but a CodeQL executable could not be found at that path. Please check ' +
|
'by a configuration setting, but a CodeQL executable could not be found at that path. Please check ' +
|
||||||
'that a CodeQL executable exists at the specified path or remove the setting.');
|
'that a CodeQL executable exists at the specified path or remove the setting.');
|
||||||
return undefined;
|
return undefined;
|
||||||
@@ -191,7 +191,7 @@ export class DistributionManager implements DistributionProvider {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
logger.log('INFO: Could not find CodeQL on path.');
|
void logger.log('INFO: Could not find CodeQL on path.');
|
||||||
}
|
}
|
||||||
|
|
||||||
return undefined;
|
return undefined;
|
||||||
@@ -276,7 +276,7 @@ class ExtensionSpecificDistributionManager {
|
|||||||
try {
|
try {
|
||||||
await this.removeDistribution();
|
await this.removeDistribution();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.log('WARNING: Tried to remove corrupted CodeQL CLI at ' +
|
void logger.log('WARNING: Tried to remove corrupted CodeQL CLI at ' +
|
||||||
`${this.getDistributionStoragePath()} but encountered an error: ${e}.`);
|
`${this.getDistributionStoragePath()} but encountered an error: ${e}.`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -313,7 +313,7 @@ class ExtensionSpecificDistributionManager {
|
|||||||
progressCallback?: ProgressCallback): Promise<void> {
|
progressCallback?: ProgressCallback): Promise<void> {
|
||||||
await this.downloadDistribution(release, progressCallback);
|
await this.downloadDistribution(release, progressCallback);
|
||||||
// Store the installed release within the global extension state.
|
// Store the installed release within the global extension state.
|
||||||
this.storeInstalledRelease(release);
|
await this.storeInstalledRelease(release);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async downloadDistribution(release: Release,
|
private async downloadDistribution(release: Release,
|
||||||
@@ -321,7 +321,7 @@ class ExtensionSpecificDistributionManager {
|
|||||||
try {
|
try {
|
||||||
await this.removeDistribution();
|
await this.removeDistribution();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.log(`Tried to clean up old version of CLI at ${this.getDistributionStoragePath()} ` +
|
void logger.log(`Tried to clean up old version of CLI at ${this.getDistributionStoragePath()} ` +
|
||||||
`but encountered an error: ${e}.`);
|
`but encountered an error: ${e}.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -332,7 +332,7 @@ class ExtensionSpecificDistributionManager {
|
|||||||
throw new Error(`Invariant violation: chose a release to install that didn't have ${requiredAssetName}`);
|
throw new Error(`Invariant violation: chose a release to install that didn't have ${requiredAssetName}`);
|
||||||
}
|
}
|
||||||
if (assets.length > 1) {
|
if (assets.length > 1) {
|
||||||
logger.log('WARNING: chose a release with more than one asset to install, found ' +
|
void logger.log('WARNING: chose a release with more than one asset to install, found ' +
|
||||||
assets.map(asset => asset.name).join(', '));
|
assets.map(asset => asset.name).join(', '));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -345,7 +345,7 @@ class ExtensionSpecificDistributionManager {
|
|||||||
|
|
||||||
const contentLength = assetStream.headers.get('content-length');
|
const contentLength = assetStream.headers.get('content-length');
|
||||||
const totalNumBytes = contentLength ? parseInt(contentLength, 10) : undefined;
|
const totalNumBytes = contentLength ? parseInt(contentLength, 10) : undefined;
|
||||||
reportStreamProgress(assetStream.body, 'Downloading CodeQL CLI…', totalNumBytes, progressCallback);
|
reportStreamProgress(assetStream.body, `Downloading CodeQL CLI ${release.name}…`, totalNumBytes, progressCallback);
|
||||||
|
|
||||||
await new Promise((resolve, reject) =>
|
await new Promise((resolve, reject) =>
|
||||||
assetStream.body.pipe(archiveFile)
|
assetStream.body.pipe(archiveFile)
|
||||||
@@ -355,7 +355,7 @@ class ExtensionSpecificDistributionManager {
|
|||||||
|
|
||||||
await this.bumpDistributionFolderIndex();
|
await this.bumpDistributionFolderIndex();
|
||||||
|
|
||||||
logger.log(`Extracting CodeQL CLI to ${this.getDistributionStoragePath()}`);
|
void logger.log(`Extracting CodeQL CLI to ${this.getDistributionStoragePath()}`);
|
||||||
await extractZipArchive(archivePath, this.getDistributionStoragePath());
|
await extractZipArchive(archivePath, this.getDistributionStoragePath());
|
||||||
} finally {
|
} finally {
|
||||||
await fs.remove(tmpDirectory);
|
await fs.remove(tmpDirectory);
|
||||||
@@ -368,7 +368,7 @@ class ExtensionSpecificDistributionManager {
|
|||||||
* This should not be called for a distribution that is currently in use, as remove may fail.
|
* This should not be called for a distribution that is currently in use, as remove may fail.
|
||||||
*/
|
*/
|
||||||
private async removeDistribution(): Promise<void> {
|
private async removeDistribution(): Promise<void> {
|
||||||
this.storeInstalledRelease(undefined);
|
await this.storeInstalledRelease(undefined);
|
||||||
if (await fs.pathExists(this.getDistributionStoragePath())) {
|
if (await fs.pathExists(this.getDistributionStoragePath())) {
|
||||||
await fs.remove(this.getDistributionStoragePath());
|
await fs.remove(this.getDistributionStoragePath());
|
||||||
}
|
}
|
||||||
@@ -376,7 +376,7 @@ class ExtensionSpecificDistributionManager {
|
|||||||
|
|
||||||
private async getLatestRelease(): Promise<Release> {
|
private async getLatestRelease(): Promise<Release> {
|
||||||
const requiredAssetName = DistributionManager.getRequiredAssetName();
|
const requiredAssetName = DistributionManager.getRequiredAssetName();
|
||||||
logger.log(`Searching for latest release including ${requiredAssetName}.`);
|
void logger.log(`Searching for latest release including ${requiredAssetName}.`);
|
||||||
return this.createReleasesApiConsumer().getLatestRelease(
|
return this.createReleasesApiConsumer().getLatestRelease(
|
||||||
this.versionRange,
|
this.versionRange,
|
||||||
this.config.includePrerelease,
|
this.config.includePrerelease,
|
||||||
@@ -384,11 +384,11 @@ class ExtensionSpecificDistributionManager {
|
|||||||
const matchingAssets = release.assets.filter(asset => asset.name === requiredAssetName);
|
const matchingAssets = release.assets.filter(asset => asset.name === requiredAssetName);
|
||||||
if (matchingAssets.length === 0) {
|
if (matchingAssets.length === 0) {
|
||||||
// For example, this could be a release with no platform-specific assets.
|
// For example, this could be a release with no platform-specific assets.
|
||||||
logger.log(`INFO: Ignoring a release with no assets named ${requiredAssetName}`);
|
void logger.log(`INFO: Ignoring a release with no assets named ${requiredAssetName}`);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (matchingAssets.length > 1) {
|
if (matchingAssets.length > 1) {
|
||||||
logger.log(`WARNING: Ignoring a release with more than one asset named ${requiredAssetName}`);
|
void logger.log(`WARNING: Ignoring a release with more than one asset named ${requiredAssetName}`);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
@@ -707,16 +707,14 @@ export async function getExecutableFromDirectory(directory: string, warnWhenNotF
|
|||||||
return alternateExpectedLauncherPath;
|
return alternateExpectedLauncherPath;
|
||||||
}
|
}
|
||||||
if (warnWhenNotFound) {
|
if (warnWhenNotFound) {
|
||||||
logger.log(`WARNING: Expected to find a CodeQL CLI executable at ${expectedLauncherPath} but one was not found. ` +
|
void logger.log(`WARNING: Expected to find a CodeQL CLI executable at ${expectedLauncherPath} but one was not found. ` +
|
||||||
'Will try PATH.');
|
'Will try PATH.');
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
function warnDeprecatedLauncher() {
|
function warnDeprecatedLauncher() {
|
||||||
|
void showAndLogWarningMessage(
|
||||||
showAndLogWarningMessage(
|
|
||||||
|
|
||||||
`The "${deprecatedCodeQlLauncherName()!}" launcher has been deprecated and will be removed in a future version. ` +
|
`The "${deprecatedCodeQlLauncherName()!}" launcher has been deprecated and will be removed in a future version. ` +
|
||||||
`Please use "${codeQlLauncherName()}" instead. It is recommended to update to the latest CodeQL binaries.`
|
`Please use "${codeQlLauncherName()}" instead. It is recommended to update to the latest CodeQL binaries.`
|
||||||
);
|
);
|
||||||
|
|||||||
67
extensions/ql-vscode/src/eval-log-tree-builder.ts
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
import { ChildEvalLogTreeItem, EvalLogTreeItem } from './eval-log-viewer';
|
||||||
|
import { EvalLogData as EvalLogData } from './pure/log-summary-parser';
|
||||||
|
|
||||||
|
/** Builds the tree data for the evaluator log viewer for a single query run. */
|
||||||
|
export default class EvalLogTreeBuilder {
|
||||||
|
private queryName: string;
|
||||||
|
private evalLogDataItems: EvalLogData[];
|
||||||
|
|
||||||
|
constructor(queryName: string, evaluatorLogDataItems: EvalLogData[]) {
|
||||||
|
this.queryName = queryName;
|
||||||
|
this.evalLogDataItems = evaluatorLogDataItems;
|
||||||
|
}
|
||||||
|
|
||||||
|
async getRoots(): Promise<EvalLogTreeItem[]> {
|
||||||
|
return await this.parseRoots();
|
||||||
|
}
|
||||||
|
|
||||||
|
private async parseRoots(): Promise<EvalLogTreeItem[]> {
|
||||||
|
const roots: EvalLogTreeItem[] = [];
|
||||||
|
|
||||||
|
// Once the viewer can show logs for multiple queries, there will be more than 1 item at the root
|
||||||
|
// level. For now, there will always be one root (the one query being shown).
|
||||||
|
const queryItem: EvalLogTreeItem = {
|
||||||
|
label: this.queryName,
|
||||||
|
children: [] // Will assign predicate items as children shortly.
|
||||||
|
};
|
||||||
|
|
||||||
|
// Display descriptive message when no data exists
|
||||||
|
if (this.evalLogDataItems.length === 0) {
|
||||||
|
const noResultsItem: ChildEvalLogTreeItem = {
|
||||||
|
label: 'No predicates evaluated in this query run.',
|
||||||
|
parent: queryItem,
|
||||||
|
children: [],
|
||||||
|
};
|
||||||
|
queryItem.children.push(noResultsItem);
|
||||||
|
}
|
||||||
|
|
||||||
|
// For each predicate, create a TreeItem object with appropriate parents/children
|
||||||
|
this.evalLogDataItems.forEach(logDataItem => {
|
||||||
|
const predicateLabel = `${logDataItem.predicateName} (${logDataItem.resultSize} tuples, ${logDataItem.millis} ms)`;
|
||||||
|
const predicateItem: ChildEvalLogTreeItem = {
|
||||||
|
label: predicateLabel,
|
||||||
|
parent: queryItem,
|
||||||
|
children: [] // Will assign pipeline items as children shortly.
|
||||||
|
};
|
||||||
|
for (const [pipelineName, steps] of Object.entries(logDataItem.ra)) {
|
||||||
|
const pipelineLabel = `Pipeline: ${pipelineName}`;
|
||||||
|
const pipelineItem: ChildEvalLogTreeItem = {
|
||||||
|
label: pipelineLabel,
|
||||||
|
parent: predicateItem,
|
||||||
|
children: [] // Will assign step items as children shortly.
|
||||||
|
};
|
||||||
|
predicateItem.children.push(pipelineItem);
|
||||||
|
|
||||||
|
pipelineItem.children = steps.map((step: string) => ({
|
||||||
|
label: step,
|
||||||
|
parent: pipelineItem,
|
||||||
|
children: []
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
queryItem.children.push(predicateItem);
|
||||||
|
});
|
||||||
|
|
||||||
|
roots.push(queryItem);
|
||||||
|
return roots;
|
||||||
|
}
|
||||||
|
}
|
||||||
92
extensions/ql-vscode/src/eval-log-viewer.ts
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
import { window, TreeDataProvider, TreeView, TreeItem, ProviderResult, Event, EventEmitter, TreeItemCollapsibleState } from 'vscode';
|
||||||
|
import { commandRunner } from './commandRunner';
|
||||||
|
import { DisposableObject } from './pure/disposable-object';
|
||||||
|
import { showAndLogErrorMessage } from './helpers';
|
||||||
|
|
||||||
|
export interface EvalLogTreeItem {
|
||||||
|
label?: string;
|
||||||
|
children: ChildEvalLogTreeItem[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ChildEvalLogTreeItem extends EvalLogTreeItem {
|
||||||
|
parent: ChildEvalLogTreeItem | EvalLogTreeItem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Provides data from parsed CodeQL evaluator logs to be rendered in a tree view. */
|
||||||
|
class EvalLogDataProvider extends DisposableObject implements TreeDataProvider<EvalLogTreeItem> {
|
||||||
|
public roots: EvalLogTreeItem[] = [];
|
||||||
|
|
||||||
|
private _onDidChangeTreeData: EventEmitter<EvalLogTreeItem | undefined | null | void> = new EventEmitter<EvalLogTreeItem | undefined | null | void>();
|
||||||
|
readonly onDidChangeTreeData: Event<EvalLogTreeItem | undefined | null | void> = this._onDidChangeTreeData.event;
|
||||||
|
|
||||||
|
refresh(): void {
|
||||||
|
this._onDidChangeTreeData.fire();
|
||||||
|
}
|
||||||
|
|
||||||
|
getTreeItem(element: EvalLogTreeItem): TreeItem | Thenable<TreeItem> {
|
||||||
|
const state = element.children.length
|
||||||
|
? TreeItemCollapsibleState.Collapsed
|
||||||
|
: TreeItemCollapsibleState.None;
|
||||||
|
const treeItem = new TreeItem(element.label || '', state);
|
||||||
|
treeItem.tooltip = `${treeItem.label} || ''}`;
|
||||||
|
return treeItem;
|
||||||
|
}
|
||||||
|
|
||||||
|
getChildren(element?: EvalLogTreeItem): ProviderResult<EvalLogTreeItem[]> {
|
||||||
|
// If no item is passed, return the root.
|
||||||
|
if (!element) {
|
||||||
|
return this.roots || [];
|
||||||
|
}
|
||||||
|
// Otherwise it is called with an existing item, to load its children.
|
||||||
|
return element.children;
|
||||||
|
}
|
||||||
|
|
||||||
|
getParent(element: ChildEvalLogTreeItem): ProviderResult<EvalLogTreeItem> {
|
||||||
|
return element.parent;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Manages a tree viewer of structured evaluator logs. */
|
||||||
|
export class EvalLogViewer extends DisposableObject {
|
||||||
|
private treeView: TreeView<EvalLogTreeItem>;
|
||||||
|
private treeDataProvider: EvalLogDataProvider;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
|
||||||
|
this.treeDataProvider = new EvalLogDataProvider();
|
||||||
|
this.treeView = window.createTreeView('codeQLEvalLogViewer', {
|
||||||
|
treeDataProvider: this.treeDataProvider,
|
||||||
|
showCollapseAll: true
|
||||||
|
});
|
||||||
|
|
||||||
|
this.push(this.treeView);
|
||||||
|
this.push(this.treeDataProvider);
|
||||||
|
this.push(
|
||||||
|
commandRunner('codeQLEvalLogViewer.clear', async () => {
|
||||||
|
this.clear();
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private clear(): void {
|
||||||
|
this.treeDataProvider.roots = [];
|
||||||
|
this.treeDataProvider.refresh();
|
||||||
|
this.treeView.message = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called when the Show Evaluator Log (UI) command is run on a new query.
|
||||||
|
updateRoots(roots: EvalLogTreeItem[]): void {
|
||||||
|
this.treeDataProvider.roots = roots;
|
||||||
|
this.treeDataProvider.refresh();
|
||||||
|
|
||||||
|
this.treeView.message = 'Viewer for query run:'; // Currently only one query supported at a time.
|
||||||
|
|
||||||
|
// Handle error on reveal. This could happen if
|
||||||
|
// the tree view is disposed during the reveal.
|
||||||
|
this.treeView.reveal(roots[0], { focus: false })?.then(
|
||||||
|
() => { /**/ },
|
||||||
|
err => showAndLogErrorMessage(err)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,6 +2,7 @@ import * as fs from 'fs-extra';
|
|||||||
import * as glob from 'glob-promise';
|
import * as glob from 'glob-promise';
|
||||||
import * as yaml from 'js-yaml';
|
import * as yaml from 'js-yaml';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
import * as tmp from 'tmp-promise';
|
||||||
import {
|
import {
|
||||||
ExtensionContext,
|
ExtensionContext,
|
||||||
Uri,
|
Uri,
|
||||||
@@ -9,8 +10,25 @@ import {
|
|||||||
workspace,
|
workspace,
|
||||||
env
|
env
|
||||||
} from 'vscode';
|
} from 'vscode';
|
||||||
import { CodeQLCliServer } from './cli';
|
import { CodeQLCliServer, QlpacksInfo } from './cli';
|
||||||
|
import { UserCancellationException } from './commandRunner';
|
||||||
import { logger } from './logging';
|
import { logger } from './logging';
|
||||||
|
import { QueryMetadata } from './pure/interface-types';
|
||||||
|
|
||||||
|
// Shared temporary folder for the extension.
|
||||||
|
export const tmpDir = tmp.dirSync({ prefix: 'queries_', keep: false, unsafeCleanup: true });
|
||||||
|
export const upgradesTmpDir = path.join(tmpDir.name, 'upgrades');
|
||||||
|
fs.ensureDirSync(upgradesTmpDir);
|
||||||
|
|
||||||
|
export const tmpDirDisposal = {
|
||||||
|
dispose: () => {
|
||||||
|
try {
|
||||||
|
tmpDir.removeCallback();
|
||||||
|
} catch (e) {
|
||||||
|
void logger.log(`Failed to remove temporary directory ${tmpDir.name}: ${e}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Show an error message and log it to the console
|
* Show an error message and log it to the console
|
||||||
@@ -62,9 +80,10 @@ export async function showAndLogWarningMessage(message: string, {
|
|||||||
*/
|
*/
|
||||||
export async function showAndLogInformationMessage(message: string, {
|
export async function showAndLogInformationMessage(message: string, {
|
||||||
outputLogger = logger,
|
outputLogger = logger,
|
||||||
items = [] as string[]
|
items = [] as string[],
|
||||||
|
fullMessage = ''
|
||||||
} = {}): Promise<string | undefined> {
|
} = {}): Promise<string | undefined> {
|
||||||
return internalShowAndLog(message, items, outputLogger, Window.showInformationMessage);
|
return internalShowAndLog(message, items, outputLogger, Window.showInformationMessage, fullMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
type ShowMessageFn = (message: string, ...items: string[]) => Thenable<string | undefined>;
|
type ShowMessageFn = (message: string, ...items: string[]) => Thenable<string | undefined>;
|
||||||
@@ -77,7 +96,7 @@ async function internalShowAndLog(
|
|||||||
fullMessage?: string
|
fullMessage?: string
|
||||||
): Promise<string | undefined> {
|
): Promise<string | undefined> {
|
||||||
const label = 'Show Log';
|
const label = 'Show Log';
|
||||||
outputLogger.log(fullMessage || message);
|
void outputLogger.log(fullMessage || message);
|
||||||
const result = await fn(message, label, ...items);
|
const result = await fn(message, label, ...items);
|
||||||
if (result === label) {
|
if (result === label) {
|
||||||
outputLogger.show();
|
outputLogger.show();
|
||||||
@@ -254,31 +273,75 @@ function createRateLimitedResult(): RateLimitedResult {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getQlPackForDbscheme(cliServer: CodeQLCliServer, dbschemePath: string): Promise<string> {
|
export interface QlPacksForLanguage {
|
||||||
|
/** The name of the pack containing the dbscheme. */
|
||||||
|
dbschemePack: string;
|
||||||
|
/** `true` if `dbschemePack` is a library pack. */
|
||||||
|
dbschemePackIsLibraryPack: boolean;
|
||||||
|
/**
|
||||||
|
* The name of the corresponding standard query pack.
|
||||||
|
* Only defined if `dbschemePack` is a library pack.
|
||||||
|
*/
|
||||||
|
queryPack?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface QlPackWithPath {
|
||||||
|
packName: string;
|
||||||
|
packDir: string | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function findDbschemePack(packs: QlPackWithPath[], dbschemePath: string): Promise<{ name: string; isLibraryPack: boolean; }> {
|
||||||
|
for (const { packDir, packName } of packs) {
|
||||||
|
if (packDir !== undefined) {
|
||||||
|
const qlpack = yaml.load(await fs.readFile(path.join(packDir, 'qlpack.yml'), 'utf8')) as { dbscheme?: string; library?: boolean; };
|
||||||
|
if (qlpack.dbscheme !== undefined && path.basename(qlpack.dbscheme) === path.basename(dbschemePath)) {
|
||||||
|
return {
|
||||||
|
name: packName,
|
||||||
|
isLibraryPack: qlpack.library === true
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new Error(`Could not find qlpack file for dbscheme ${dbschemePath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
function findStandardQueryPack(qlpacks: QlpacksInfo, dbschemePackName: string): string | undefined {
|
||||||
|
const matches = dbschemePackName.match(/^codeql\/(?<language>[a-z]+)-all$/);
|
||||||
|
if (matches) {
|
||||||
|
const queryPackName = `codeql/${matches.groups!.language}-queries`;
|
||||||
|
if (qlpacks[queryPackName] !== undefined) {
|
||||||
|
return queryPackName;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Either the dbscheme pack didn't look like one where the queries might be in the query pack, or
|
||||||
|
// no query pack was found in the search path. Either is OK.
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getQlPackForDbscheme(cliServer: CodeQLCliServer, dbschemePath: string): Promise<QlPacksForLanguage> {
|
||||||
const qlpacks = await cliServer.resolveQlpacks(getOnDiskWorkspaceFolders());
|
const qlpacks = await cliServer.resolveQlpacks(getOnDiskWorkspaceFolders());
|
||||||
const packs: { packDir: string | undefined; packName: string }[] =
|
const packs: QlPackWithPath[] =
|
||||||
Object.entries(qlpacks).map(([packName, dirs]) => {
|
Object.entries(qlpacks).map(([packName, dirs]) => {
|
||||||
if (dirs.length < 1) {
|
if (dirs.length < 1) {
|
||||||
logger.log(`In getQlPackFor ${dbschemePath}, qlpack ${packName} has no directories`);
|
void logger.log(`In getQlPackFor ${dbschemePath}, qlpack ${packName} has no directories`);
|
||||||
return { packName, packDir: undefined };
|
return { packName, packDir: undefined };
|
||||||
}
|
}
|
||||||
if (dirs.length > 1) {
|
if (dirs.length > 1) {
|
||||||
logger.log(`In getQlPackFor ${dbschemePath}, qlpack ${packName} has more than one directory; arbitrarily choosing the first`);
|
void logger.log(`In getQlPackFor ${dbschemePath}, qlpack ${packName} has more than one directory; arbitrarily choosing the first`);
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
packName,
|
packName,
|
||||||
packDir: dirs[0]
|
packDir: dirs[0]
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
for (const { packDir, packName } of packs) {
|
const dbschemePack = await findDbschemePack(packs, dbschemePath);
|
||||||
if (packDir !== undefined) {
|
const queryPack = dbschemePack.isLibraryPack ? findStandardQueryPack(qlpacks, dbschemePack.name) : undefined;
|
||||||
const qlpack = yaml.safeLoad(await fs.readFile(path.join(packDir, 'qlpack.yml'), 'utf8')) as { dbscheme: string };
|
return {
|
||||||
if (qlpack.dbscheme !== undefined && path.basename(qlpack.dbscheme) === path.basename(dbschemePath)) {
|
dbschemePack: dbschemePack.name,
|
||||||
return packName;
|
dbschemePackIsLibraryPack: dbschemePack.isLibraryPack,
|
||||||
}
|
queryPack
|
||||||
}
|
};
|
||||||
}
|
|
||||||
throw new Error(`Could not find qlpack file for dbscheme ${dbschemePath}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getPrimaryDbscheme(datasetFolder: string): Promise<string> {
|
export async function getPrimaryDbscheme(datasetFolder: string): Promise<string> {
|
||||||
@@ -292,7 +355,7 @@ export async function getPrimaryDbscheme(datasetFolder: string): Promise<string>
|
|||||||
const dbscheme = dbschemes[0];
|
const dbscheme = dbschemes[0];
|
||||||
|
|
||||||
if (dbschemes.length > 1) {
|
if (dbschemes.length > 1) {
|
||||||
Window.showErrorMessage(`Found multiple dbschemes in ${datasetFolder} during quick query; arbitrarily choosing the first, ${dbscheme}, to decide what library to use.`);
|
void Window.showErrorMessage(`Found multiple dbschemes in ${datasetFolder} during quick query; arbitrarily choosing the first, ${dbscheme}, to decide what library to use.`);
|
||||||
}
|
}
|
||||||
return dbscheme;
|
return dbscheme;
|
||||||
}
|
}
|
||||||
@@ -370,15 +433,22 @@ export class CachedOperation<U> {
|
|||||||
* @see cli.CliVersionConstraint.supportsLanguageName
|
* @see cli.CliVersionConstraint.supportsLanguageName
|
||||||
* @see cli.CodeQLCliServer.resolveDatabase
|
* @see cli.CodeQLCliServer.resolveDatabase
|
||||||
*/
|
*/
|
||||||
const dbSchemeToLanguage = {
|
export const dbSchemeToLanguage = {
|
||||||
'semmlecode.javascript.dbscheme': 'javascript',
|
'semmlecode.javascript.dbscheme': 'javascript',
|
||||||
'semmlecode.cpp.dbscheme': 'cpp',
|
'semmlecode.cpp.dbscheme': 'cpp',
|
||||||
'semmlecode.dbscheme': 'java',
|
'semmlecode.dbscheme': 'java',
|
||||||
'semmlecode.python.dbscheme': 'python',
|
'semmlecode.python.dbscheme': 'python',
|
||||||
'semmlecode.csharp.dbscheme': 'csharp',
|
'semmlecode.csharp.dbscheme': 'csharp',
|
||||||
'go.dbscheme': 'go'
|
'go.dbscheme': 'go',
|
||||||
|
'ruby.dbscheme': 'ruby'
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const languageToDbScheme = Object.entries(dbSchemeToLanguage).reduce((acc, [k, v]) => {
|
||||||
|
acc[v] = k;
|
||||||
|
return acc;
|
||||||
|
}, {} as { [k: string]: string });
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the initial contents for an empty query, based on the language of the selected
|
* Returns the initial contents for an empty query, based on the language of the selected
|
||||||
* databse.
|
* databse.
|
||||||
@@ -404,9 +474,9 @@ export function getInitialQueryContents(language: string, dbscheme: string) {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Heuristically determines if the directory passed in corresponds
|
* Heuristically determines if the directory passed in corresponds
|
||||||
* to a database root.
|
* to a database root. A database root is a directory that contains
|
||||||
*
|
* a codeql-database.yml or (historically) a .dbinfo file. It also
|
||||||
* @param maybeRoot
|
* contains a folder starting with `db-`.
|
||||||
*/
|
*/
|
||||||
export async function isLikelyDatabaseRoot(maybeRoot: string) {
|
export async function isLikelyDatabaseRoot(maybeRoot: string) {
|
||||||
const [a, b, c] = (await Promise.all([
|
const [a, b, c] = (await Promise.all([
|
||||||
@@ -418,9 +488,103 @@ export async function isLikelyDatabaseRoot(maybeRoot: string) {
|
|||||||
glob('db-*/', { cwd: maybeRoot })
|
glob('db-*/', { cwd: maybeRoot })
|
||||||
]));
|
]));
|
||||||
|
|
||||||
return !!((a || b) && c);
|
return ((a || b) && c.length > 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isLikelyDbLanguageFolder(dbPath: string) {
|
/**
|
||||||
return !!path.basename(dbPath).startsWith('db-');
|
* A language folder is any folder starting with `db-` that is itself not a database root.
|
||||||
|
*/
|
||||||
|
export async function isLikelyDbLanguageFolder(dbPath: string) {
|
||||||
|
return path.basename(dbPath).startsWith('db-') && !(await isLikelyDatabaseRoot(dbPath));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finds the language that a query targets.
|
||||||
|
* If it can't be autodetected, prompt the user to specify the language manually.
|
||||||
|
*/
|
||||||
|
export async function findLanguage(
|
||||||
|
cliServer: CodeQLCliServer,
|
||||||
|
queryUri: Uri | undefined
|
||||||
|
): Promise<string | undefined> {
|
||||||
|
const uri = queryUri || Window.activeTextEditor?.document.uri;
|
||||||
|
if (uri !== undefined) {
|
||||||
|
try {
|
||||||
|
const queryInfo = await cliServer.resolveQueryByLanguage(getOnDiskWorkspaceFolders(), uri);
|
||||||
|
const language = (Object.keys(queryInfo.byLanguage))[0];
|
||||||
|
void logger.log(`Detected query language: ${language}`);
|
||||||
|
return language;
|
||||||
|
} catch (e) {
|
||||||
|
void logger.log('Could not autodetect query language. Select language manually.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// will be undefined if user cancels the quick pick.
|
||||||
|
return await askForLanguage(cliServer, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function askForLanguage(cliServer: CodeQLCliServer, throwOnEmpty = true): Promise<string | undefined> {
|
||||||
|
const language = await Window.showQuickPick(
|
||||||
|
await cliServer.getSupportedLanguages(),
|
||||||
|
{ placeHolder: 'Select target language for your query', ignoreFocusOut: true }
|
||||||
|
);
|
||||||
|
if (!language) {
|
||||||
|
// This only happens if the user cancels the quick pick.
|
||||||
|
if (throwOnEmpty) {
|
||||||
|
throw new UserCancellationException('Cancelled.');
|
||||||
|
} else {
|
||||||
|
void showAndLogErrorMessage('Language not found. Language must be specified manually.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return language;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets metadata for a query, if it exists.
|
||||||
|
* @param cliServer The CLI server.
|
||||||
|
* @param queryPath The path to the query.
|
||||||
|
* @returns A promise that resolves to the query metadata, if available.
|
||||||
|
*/
|
||||||
|
export async function tryGetQueryMetadata(cliServer: CodeQLCliServer, queryPath: string): Promise<QueryMetadata | undefined> {
|
||||||
|
try {
|
||||||
|
return await cliServer.resolveMetadata(queryPath);
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore errors and provide no metadata.
|
||||||
|
void logger.log(`Couldn't resolve metadata for ${queryPath}: ${e}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a file in the query directory that indicates when this query was created.
|
||||||
|
* This is important for keeping track of when queries should be removed.
|
||||||
|
*
|
||||||
|
* @param queryPath The directory that will containt all files relevant to a query result.
|
||||||
|
* It does not need to exist.
|
||||||
|
*/
|
||||||
|
export async function createTimestampFile(storagePath: string) {
|
||||||
|
const timestampPath = path.join(storagePath, 'timestamp');
|
||||||
|
await fs.ensureDir(storagePath);
|
||||||
|
await fs.writeFile(timestampPath, Date.now().toString(), 'utf8');
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursively walk a directory and return the full path to all files found.
|
||||||
|
* Symbolic links are ignored.
|
||||||
|
*
|
||||||
|
* @param dir the directory to walk
|
||||||
|
*
|
||||||
|
* @return An iterator of the full path to all files recursively found in the directory.
|
||||||
|
*/
|
||||||
|
export async function* walkDirectory(dir: string): AsyncIterableIterator<string> {
|
||||||
|
const seenFiles = new Set<string>();
|
||||||
|
for await (const d of await fs.opendir(dir)) {
|
||||||
|
const entry = path.join(dir, d.name);
|
||||||
|
seenFiles.add(entry);
|
||||||
|
if (d.isDirectory()) {
|
||||||
|
yield* walkDirectory(entry);
|
||||||
|
} else if (d.isFile()) {
|
||||||
|
yield entry;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
107
extensions/ql-vscode/src/history-item-label-provider.ts
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
import { env } from 'vscode';
|
||||||
|
import * as path from 'path';
|
||||||
|
import { QueryHistoryConfig } from './config';
|
||||||
|
import { LocalQueryInfo } from './query-results';
|
||||||
|
import { buildRepoLabel, getRawQueryName, QueryHistoryInfo } from './query-history-info';
|
||||||
|
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
|
||||||
|
import { VariantAnalysisHistoryItem } from './remote-queries/variant-analysis-history-item';
|
||||||
|
import { assertNever } from './pure/helpers-pure';
|
||||||
|
import { pluralize } from './pure/word';
|
||||||
|
|
||||||
|
interface InterpolateReplacements {
|
||||||
|
t: string; // Start time
|
||||||
|
q: string; // Query name
|
||||||
|
d: string; // Database/Controller repo name
|
||||||
|
r: string; // Result count/Empty
|
||||||
|
s: string; // Status
|
||||||
|
f: string; // Query file name
|
||||||
|
'%': '%'; // Percent sign
|
||||||
|
}
|
||||||
|
|
||||||
|
export class HistoryItemLabelProvider {
|
||||||
|
constructor(private config: QueryHistoryConfig) {
|
||||||
|
/**/
|
||||||
|
}
|
||||||
|
|
||||||
|
getLabel(item: QueryHistoryInfo) {
|
||||||
|
let replacements: InterpolateReplacements;
|
||||||
|
switch (item.t) {
|
||||||
|
case 'local':
|
||||||
|
replacements = this.getLocalInterpolateReplacements(item);
|
||||||
|
break;
|
||||||
|
case 'remote':
|
||||||
|
replacements = this.getRemoteInterpolateReplacements(item);
|
||||||
|
break;
|
||||||
|
case 'variant-analysis':
|
||||||
|
replacements = this.getVariantAnalysisInterpolateReplacements(item);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
assertNever(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
const rawLabel = item.userSpecifiedLabel ?? (this.config.format || '%q');
|
||||||
|
|
||||||
|
return this.interpolate(rawLabel, replacements);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If there is a user-specified label for this query, interpolate and use that.
|
||||||
|
* Otherwise, use the raw name of this query.
|
||||||
|
*
|
||||||
|
* @returns the name of the query, unless there is a custom label for this query.
|
||||||
|
*/
|
||||||
|
getShortLabel(item: QueryHistoryInfo): string {
|
||||||
|
return item.userSpecifiedLabel
|
||||||
|
? this.getLabel(item)
|
||||||
|
: getRawQueryName(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private interpolate(rawLabel: string, replacements: InterpolateReplacements): string {
|
||||||
|
const label = rawLabel.replace(/%(.)/g, (match, key: keyof InterpolateReplacements) => {
|
||||||
|
const replacement = replacements[key];
|
||||||
|
return replacement !== undefined ? replacement : match;
|
||||||
|
});
|
||||||
|
|
||||||
|
return label.replace(/\s+/g, ' ');
|
||||||
|
}
|
||||||
|
|
||||||
|
private getLocalInterpolateReplacements(item: LocalQueryInfo): InterpolateReplacements {
|
||||||
|
const { resultCount = 0, statusString = 'in progress' } = item.completedQuery || {};
|
||||||
|
return {
|
||||||
|
t: item.startTime,
|
||||||
|
q: item.getQueryName(),
|
||||||
|
d: item.initialInfo.databaseInfo.name,
|
||||||
|
r: `(${resultCount} results)`,
|
||||||
|
s: statusString,
|
||||||
|
f: item.getQueryFileName(),
|
||||||
|
'%': '%',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private getRemoteInterpolateReplacements(item: RemoteQueryHistoryItem): InterpolateReplacements {
|
||||||
|
const resultCount = item.resultCount ? `(${pluralize(item.resultCount, 'result', 'results')})` : '';
|
||||||
|
return {
|
||||||
|
t: new Date(item.remoteQuery.executionStartTime).toLocaleString(env.language),
|
||||||
|
q: `${item.remoteQuery.queryName} (${item.remoteQuery.language})`,
|
||||||
|
d: buildRepoLabel(item),
|
||||||
|
r: resultCount,
|
||||||
|
s: item.status,
|
||||||
|
f: path.basename(item.remoteQuery.queryFilePath),
|
||||||
|
'%': '%'
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private getVariantAnalysisInterpolateReplacements(item: VariantAnalysisHistoryItem): InterpolateReplacements {
|
||||||
|
const resultCount = item.resultCount ? `(${pluralize(item.resultCount, 'result', 'results')})` : '';
|
||||||
|
return {
|
||||||
|
t: new Date(item.variantAnalysis.executionStartTime).toLocaleString(env.language),
|
||||||
|
q: `${item.variantAnalysis.query.name} (${item.variantAnalysis.query.language})`,
|
||||||
|
d: buildRepoLabel(item),
|
||||||
|
r: resultCount,
|
||||||
|
s: item.status,
|
||||||
|
f: path.basename(item.variantAnalysis.query.filePath),
|
||||||
|
'%': '%',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,8 +1,10 @@
|
|||||||
import * as crypto from 'crypto';
|
import * as crypto from 'crypto';
|
||||||
|
import * as os from 'os';
|
||||||
import {
|
import {
|
||||||
Uri,
|
Uri,
|
||||||
Location,
|
Location,
|
||||||
Range,
|
Range,
|
||||||
|
ExtensionContext,
|
||||||
WebviewPanel,
|
WebviewPanel,
|
||||||
Webview,
|
Webview,
|
||||||
workspace,
|
workspace,
|
||||||
@@ -70,7 +72,7 @@ function resolveFivePartLocation(
|
|||||||
Math.max(0, loc.startLine - 1),
|
Math.max(0, loc.startLine - 1),
|
||||||
Math.max(0, loc.startColumn - 1),
|
Math.max(0, loc.startColumn - 1),
|
||||||
Math.max(0, loc.endLine - 1),
|
Math.max(0, loc.endLine - 1),
|
||||||
Math.max(0, loc.endColumn)
|
Math.max(1, loc.endColumn)
|
||||||
);
|
);
|
||||||
|
|
||||||
return new Location(databaseItem.resolveSourceFile(loc.uri), range);
|
return new Location(databaseItem.resolveSourceFile(loc.uri), range);
|
||||||
@@ -110,20 +112,54 @@ export function tryResolveLocation(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type WebviewView = 'results' | 'compare' | 'remote-queries' | 'variant-analysis';
|
||||||
|
|
||||||
|
export interface WebviewMessage {
|
||||||
|
t: string;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns HTML to populate the given webview.
|
* Returns HTML to populate the given webview.
|
||||||
* Uses a content security policy that only loads the given script.
|
* Uses a content security policy that only loads the given script.
|
||||||
*/
|
*/
|
||||||
export function getHtmlForWebview(
|
export function getHtmlForWebview(
|
||||||
|
ctx: ExtensionContext,
|
||||||
webview: Webview,
|
webview: Webview,
|
||||||
scriptUriOnDisk: Uri,
|
view: WebviewView,
|
||||||
stylesheetUriOnDisk: Uri
|
{
|
||||||
|
allowInlineStyles,
|
||||||
|
}: {
|
||||||
|
allowInlineStyles?: boolean;
|
||||||
|
} = {
|
||||||
|
allowInlineStyles: false,
|
||||||
|
}
|
||||||
): string {
|
): string {
|
||||||
|
const scriptUriOnDisk = Uri.file(
|
||||||
|
ctx.asAbsolutePath('out/webview.js')
|
||||||
|
);
|
||||||
|
|
||||||
|
const stylesheetUrisOnDisk = [
|
||||||
|
Uri.file(ctx.asAbsolutePath('out/webview.css'))
|
||||||
|
];
|
||||||
|
|
||||||
// Convert the on-disk URIs into webview URIs.
|
// Convert the on-disk URIs into webview URIs.
|
||||||
const scriptWebviewUri = webview.asWebviewUri(scriptUriOnDisk);
|
const scriptWebviewUri = webview.asWebviewUri(scriptUriOnDisk);
|
||||||
const stylesheetWebviewUri = webview.asWebviewUri(stylesheetUriOnDisk);
|
const stylesheetWebviewUris = stylesheetUrisOnDisk.map(stylesheetUriOnDisk =>
|
||||||
|
webview.asWebviewUri(stylesheetUriOnDisk));
|
||||||
|
|
||||||
// Use a nonce in the content security policy to uniquely identify the above resources.
|
// Use a nonce in the content security policy to uniquely identify the above resources.
|
||||||
const nonce = getNonce();
|
const nonce = getNonce();
|
||||||
|
|
||||||
|
const stylesheetsHtmlLines = allowInlineStyles
|
||||||
|
? stylesheetWebviewUris.map(uri => createStylesLinkWithoutNonce(uri))
|
||||||
|
: stylesheetWebviewUris.map(uri => createStylesLinkWithNonce(nonce, uri));
|
||||||
|
|
||||||
|
const styleSrc = allowInlineStyles
|
||||||
|
? `${webview.cspSource} vscode-file: 'unsafe-inline'`
|
||||||
|
: `'nonce-${nonce}'`;
|
||||||
|
|
||||||
|
const fontSrc = webview.cspSource;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Content security policy:
|
* Content security policy:
|
||||||
* default-src: allow nothing by default.
|
* default-src: allow nothing by default.
|
||||||
@@ -136,11 +172,11 @@ export function getHtmlForWebview(
|
|||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
<meta http-equiv="Content-Security-Policy"
|
<meta http-equiv="Content-Security-Policy"
|
||||||
content="default-src 'none'; script-src 'nonce-${nonce}'; style-src 'nonce-${nonce}'; connect-src ${webview.cspSource};">
|
content="default-src 'none'; script-src 'nonce-${nonce}'; font-src ${fontSrc}; style-src ${styleSrc}; connect-src ${webview.cspSource};">
|
||||||
<link nonce="${nonce}" rel="stylesheet" href="${stylesheetWebviewUri}">
|
${stylesheetsHtmlLines.join(` ${os.EOL}`)}
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div id=root>
|
<div id=root data-view="${view}">
|
||||||
</div>
|
</div>
|
||||||
<script nonce="${nonce}" src="${scriptWebviewUri}">
|
<script nonce="${nonce}" src="${scriptWebviewUri}">
|
||||||
</script>
|
</script>
|
||||||
@@ -224,15 +260,23 @@ export async function jumpToLocation(
|
|||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e instanceof Error) {
|
if (e instanceof Error) {
|
||||||
if (e.message.match(/File not found/)) {
|
if (e.message.match(/File not found/)) {
|
||||||
Window.showErrorMessage(
|
void Window.showErrorMessage(
|
||||||
'Original file of this result is not in the database\'s source archive.'
|
'Original file of this result is not in the database\'s source archive.'
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
logger.log(`Unable to handleMsgFromView: ${e.message}`);
|
void logger.log(`Unable to handleMsgFromView: ${e.message}`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.log(`Unable to handleMsgFromView: ${e}`);
|
void logger.log(`Unable to handleMsgFromView: ${e}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function createStylesLinkWithNonce(nonce: string, uri: Uri): string {
|
||||||
|
return `<link nonce="${nonce}" rel="stylesheet" href="${uri}">`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createStylesLinkWithoutNonce(uri: Uri): string {
|
||||||
|
return `<link rel="stylesheet" href="${uri}">`;
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
import * as path from 'path';
|
|
||||||
import * as Sarif from 'sarif';
|
import * as Sarif from 'sarif';
|
||||||
import { DisposableObject } from './pure/disposable-object';
|
|
||||||
import * as vscode from 'vscode';
|
import * as vscode from 'vscode';
|
||||||
import {
|
import {
|
||||||
Diagnostic,
|
Diagnostic,
|
||||||
@@ -15,7 +13,7 @@ import * as cli from './cli';
|
|||||||
import { CodeQLCliServer } from './cli';
|
import { CodeQLCliServer } from './cli';
|
||||||
import { DatabaseEventKind, DatabaseItem, DatabaseManager } from './databases';
|
import { DatabaseEventKind, DatabaseItem, DatabaseManager } from './databases';
|
||||||
import { showAndLogErrorMessage } from './helpers';
|
import { showAndLogErrorMessage } from './helpers';
|
||||||
import { assertNever } from './pure/helpers-pure';
|
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||||
import {
|
import {
|
||||||
FromResultsViewMsg,
|
FromResultsViewMsg,
|
||||||
Interpretation,
|
Interpretation,
|
||||||
@@ -27,26 +25,29 @@ import {
|
|||||||
InterpretedResultsSortState,
|
InterpretedResultsSortState,
|
||||||
SortDirection,
|
SortDirection,
|
||||||
ALERTS_TABLE_NAME,
|
ALERTS_TABLE_NAME,
|
||||||
|
GRAPH_TABLE_NAME,
|
||||||
RawResultsSortState,
|
RawResultsSortState,
|
||||||
|
NavigationDirection,
|
||||||
} from './pure/interface-types';
|
} from './pure/interface-types';
|
||||||
import { Logger } from './logging';
|
import { Logger } from './logging';
|
||||||
import * as messages from './pure/messages';
|
|
||||||
import { commandRunner } from './commandRunner';
|
import { commandRunner } from './commandRunner';
|
||||||
import { CompletedQuery, interpretResults } from './query-results';
|
import { CompletedQueryInfo, interpretResultsSarif, interpretGraphResults } from './query-results';
|
||||||
import { QueryInfo, tmpDir } from './run-queries';
|
import { QueryEvaluationInfo } from './run-queries-shared';
|
||||||
import { parseSarifLocation, parseSarifPlainTextMessage } from './pure/sarif-utils';
|
import { parseSarifLocation, parseSarifPlainTextMessage } from './pure/sarif-utils';
|
||||||
import {
|
import {
|
||||||
WebviewReveal,
|
WebviewReveal,
|
||||||
fileUriToWebviewUri,
|
fileUriToWebviewUri,
|
||||||
tryResolveLocation,
|
tryResolveLocation,
|
||||||
getHtmlForWebview,
|
|
||||||
shownLocationDecoration,
|
shownLocationDecoration,
|
||||||
shownLocationLineDecoration,
|
shownLocationLineDecoration,
|
||||||
jumpToLocation,
|
jumpToLocation,
|
||||||
} from './interface-utils';
|
} from './interface-utils';
|
||||||
import { getDefaultResultSetName, ParsedResultSets } from './pure/interface-types';
|
import { getDefaultResultSetName, ParsedResultSets } from './pure/interface-types';
|
||||||
import { RawResultSet, transformBqrsResultSet, ResultSetSchema } from './pure/bqrs-cli-types';
|
import { RawResultSet, transformBqrsResultSet, ResultSetSchema } from './pure/bqrs-cli-types';
|
||||||
|
import { AbstractWebview, WebviewPanelConfig } from './abstract-webview';
|
||||||
import { PAGE_SIZE } from './config';
|
import { PAGE_SIZE } from './config';
|
||||||
|
import { CompletedLocalQueryInfo } from './query-results';
|
||||||
|
import { HistoryItemLabelProvider } from './history-item-label-provider';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* interface.ts
|
* interface.ts
|
||||||
@@ -87,20 +88,41 @@ function sortInterpretedResults(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function numPagesOfResultSet(resultSet: RawResultSet): number {
|
function interpretedPageSize(interpretation: Interpretation | undefined): number {
|
||||||
return Math.ceil(resultSet.schema.rows / PAGE_SIZE.getValue<number>());
|
if (interpretation?.data.t == 'GraphInterpretationData') {
|
||||||
|
// Graph views always have one result per page.
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
return PAGE_SIZE.getValue<number>();
|
||||||
|
}
|
||||||
|
|
||||||
|
function numPagesOfResultSet(resultSet: RawResultSet, interpretation?: Interpretation): number {
|
||||||
|
const pageSize = interpretedPageSize(interpretation);
|
||||||
|
|
||||||
|
const n = interpretation?.data.t == 'GraphInterpretationData'
|
||||||
|
? interpretation.data.dot.length
|
||||||
|
: resultSet.schema.rows;
|
||||||
|
|
||||||
|
return Math.ceil(n / pageSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
function numInterpretedPages(interpretation: Interpretation | undefined): number {
|
function numInterpretedPages(interpretation: Interpretation | undefined): number {
|
||||||
return Math.ceil((interpretation?.sarif.runs[0].results?.length || 0) / PAGE_SIZE.getValue<number>());
|
if (!interpretation) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
const pageSize = interpretedPageSize(interpretation);
|
||||||
|
|
||||||
|
const n = interpretation.data.t == 'GraphInterpretationData'
|
||||||
|
? interpretation.data.dot.length
|
||||||
|
: interpretation.data.runs[0].results?.length || 0;
|
||||||
|
|
||||||
|
return Math.ceil(n / pageSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
export class InterfaceManager extends DisposableObject {
|
export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResultsViewMsg> {
|
||||||
private _displayedQuery?: CompletedQuery;
|
private _displayedQuery?: CompletedLocalQueryInfo;
|
||||||
private _interpretation?: Interpretation;
|
private _interpretation?: Interpretation;
|
||||||
private _panel: vscode.WebviewPanel | undefined;
|
|
||||||
private _panelLoaded = false;
|
|
||||||
private _panelLoadedCallBacks: (() => void)[] = [];
|
|
||||||
|
|
||||||
private readonly _diagnosticCollection = languages.createDiagnosticCollection(
|
private readonly _diagnosticCollection = languages.createDiagnosticCollection(
|
||||||
'codeql-query-results'
|
'codeql-query-results'
|
||||||
@@ -110,35 +132,41 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
public ctx: vscode.ExtensionContext,
|
public ctx: vscode.ExtensionContext,
|
||||||
private databaseManager: DatabaseManager,
|
private databaseManager: DatabaseManager,
|
||||||
public cliServer: CodeQLCliServer,
|
public cliServer: CodeQLCliServer,
|
||||||
public logger: Logger
|
public logger: Logger,
|
||||||
|
private labelProvider: HistoryItemLabelProvider
|
||||||
) {
|
) {
|
||||||
super();
|
super(ctx);
|
||||||
this.push(this._diagnosticCollection);
|
this.push(this._diagnosticCollection);
|
||||||
this.push(
|
this.push(
|
||||||
vscode.window.onDidChangeTextEditorSelection(
|
vscode.window.onDidChangeTextEditorSelection(
|
||||||
this.handleSelectionChange.bind(this)
|
this.handleSelectionChange.bind(this)
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
logger.log('Registering path-step navigation commands.');
|
const navigationCommands = {
|
||||||
this.push(
|
'codeQLQueryResults.up': NavigationDirection.up,
|
||||||
commandRunner(
|
'codeQLQueryResults.down': NavigationDirection.down,
|
||||||
'codeQLQueryResults.nextPathStep',
|
'codeQLQueryResults.left': NavigationDirection.left,
|
||||||
this.navigatePathStep.bind(this, 1)
|
'codeQLQueryResults.right': NavigationDirection.right,
|
||||||
)
|
// For backwards compatibility with keybindings set using an earlier version of the extension.
|
||||||
);
|
'codeQLQueryResults.nextPathStep': NavigationDirection.down,
|
||||||
this.push(
|
'codeQLQueryResults.previousPathStep': NavigationDirection.up,
|
||||||
commandRunner(
|
};
|
||||||
'codeQLQueryResults.previousPathStep',
|
void logger.log('Registering result view navigation commands.');
|
||||||
this.navigatePathStep.bind(this, -1)
|
for (const [commandId, direction] of Object.entries(navigationCommands)) {
|
||||||
)
|
this.push(
|
||||||
);
|
commandRunner(
|
||||||
|
commandId,
|
||||||
|
this.navigateResultView.bind(this, direction)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
this.push(
|
this.push(
|
||||||
this.databaseManager.onDidChangeDatabaseItem(({ kind }) => {
|
this.databaseManager.onDidChangeDatabaseItem(({ kind }) => {
|
||||||
if (kind === DatabaseEventKind.Remove) {
|
if (kind === DatabaseEventKind.Remove) {
|
||||||
this._diagnosticCollection.clear();
|
this._diagnosticCollection.clear();
|
||||||
if (this.isShowingPanel()) {
|
if (this.isShowingPanel) {
|
||||||
this.postMessage({
|
void this.postMessage({
|
||||||
t: 'untoggleShowProblems'
|
t: 'untoggleShowProblems'
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -147,104 +175,35 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
async navigatePathStep(direction: number): Promise<void> {
|
async navigateResultView(direction: NavigationDirection): Promise<void> {
|
||||||
this.postMessage({ t: 'navigatePath', direction });
|
if (!this.panel?.visible) {
|
||||||
}
|
|
||||||
|
|
||||||
private isShowingPanel() {
|
|
||||||
return !!this._panel;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns the webview panel, creating it if it doesn't already
|
|
||||||
// exist.
|
|
||||||
getPanel(): vscode.WebviewPanel {
|
|
||||||
if (this._panel == undefined) {
|
|
||||||
const { ctx } = this;
|
|
||||||
const panel = (this._panel = Window.createWebviewPanel(
|
|
||||||
'resultsView', // internal name
|
|
||||||
'CodeQL Query Results', // user-visible name
|
|
||||||
{ viewColumn: vscode.ViewColumn.Beside, preserveFocus: true },
|
|
||||||
{
|
|
||||||
enableScripts: true,
|
|
||||||
enableFindWidget: true,
|
|
||||||
retainContextWhenHidden: true,
|
|
||||||
localResourceRoots: [
|
|
||||||
vscode.Uri.file(tmpDir.name),
|
|
||||||
vscode.Uri.file(path.join(this.ctx.extensionPath, 'out'))
|
|
||||||
]
|
|
||||||
}
|
|
||||||
));
|
|
||||||
|
|
||||||
this._panel.onDidDispose(
|
|
||||||
() => {
|
|
||||||
this._panel = undefined;
|
|
||||||
this._displayedQuery = undefined;
|
|
||||||
},
|
|
||||||
null,
|
|
||||||
ctx.subscriptions
|
|
||||||
);
|
|
||||||
const scriptPathOnDisk = vscode.Uri.file(
|
|
||||||
ctx.asAbsolutePath('out/resultsView.js')
|
|
||||||
);
|
|
||||||
const stylesheetPathOnDisk = vscode.Uri.file(
|
|
||||||
ctx.asAbsolutePath('out/resultsView.css')
|
|
||||||
);
|
|
||||||
panel.webview.html = getHtmlForWebview(
|
|
||||||
panel.webview,
|
|
||||||
scriptPathOnDisk,
|
|
||||||
stylesheetPathOnDisk
|
|
||||||
);
|
|
||||||
panel.webview.onDidReceiveMessage(
|
|
||||||
async (e) => this.handleMsgFromView(e),
|
|
||||||
undefined,
|
|
||||||
ctx.subscriptions
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return this._panel;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async changeInterpretedSortState(
|
|
||||||
sortState: InterpretedResultsSortState | undefined
|
|
||||||
): Promise<void> {
|
|
||||||
if (this._displayedQuery === undefined) {
|
|
||||||
showAndLogErrorMessage(
|
|
||||||
'Failed to sort results since evaluation info was unknown.'
|
|
||||||
);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Notify the webview that it should expect new results.
|
// Reveal the panel now as the subsequent call to 'Window.showTextEditor' in 'showLocation' may destroy the webview otherwise.
|
||||||
await this.postMessage({ t: 'resultsUpdating' });
|
this.panel.reveal();
|
||||||
this._displayedQuery.updateInterpretedSortState(sortState);
|
await this.postMessage({ t: 'navigate', direction });
|
||||||
await this.showResults(this._displayedQuery, WebviewReveal.NotForced, true);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async changeRawSortState(
|
protected getPanelConfig(): WebviewPanelConfig {
|
||||||
resultSetName: string,
|
return {
|
||||||
sortState: RawResultsSortState | undefined
|
viewId: 'resultsView',
|
||||||
): Promise<void> {
|
title: 'CodeQL Query Results',
|
||||||
if (this._displayedQuery === undefined) {
|
viewColumn: this.chooseColumnForWebview(),
|
||||||
showAndLogErrorMessage(
|
preserveFocus: true,
|
||||||
'Failed to sort results since evaluation info was unknown.'
|
view: 'results',
|
||||||
);
|
};
|
||||||
return;
|
|
||||||
}
|
|
||||||
// Notify the webview that it should expect new results.
|
|
||||||
await this.postMessage({ t: 'resultsUpdating' });
|
|
||||||
await this._displayedQuery.updateSortState(
|
|
||||||
this.cliServer,
|
|
||||||
resultSetName,
|
|
||||||
sortState
|
|
||||||
);
|
|
||||||
// Sorting resets to first page, as there is arguably no particular
|
|
||||||
// correlation between the results on the nth page that the user
|
|
||||||
// was previously viewing and the contents of the nth page in a
|
|
||||||
// new sorted order.
|
|
||||||
await this.showPageOfRawResults(resultSetName, 0, true);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async handleMsgFromView(msg: FromResultsViewMsg): Promise<void> {
|
protected onPanelDispose(): void {
|
||||||
|
this._displayedQuery = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected async onMessage(msg: FromResultsViewMsg): Promise<void> {
|
||||||
try {
|
try {
|
||||||
switch (msg.t) {
|
switch (msg.t) {
|
||||||
|
case 'viewLoaded':
|
||||||
|
this.onWebViewLoaded();
|
||||||
|
break;
|
||||||
case 'viewSourceFile': {
|
case 'viewSourceFile': {
|
||||||
await jumpToLocation(msg, this.databaseManager, this.logger);
|
await jumpToLocation(msg, this.databaseManager, this.logger);
|
||||||
break;
|
break;
|
||||||
@@ -267,11 +226,6 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case 'resultViewLoaded':
|
|
||||||
this._panelLoaded = true;
|
|
||||||
this._panelLoadedCallBacks.forEach((cb) => cb());
|
|
||||||
this._panelLoadedCallBacks = [];
|
|
||||||
break;
|
|
||||||
case 'changeSort':
|
case 'changeSort':
|
||||||
await this.changeRawSortState(msg.resultSetName, msg.sortState);
|
await this.changeRawSortState(msg.resultSetName, msg.sortState);
|
||||||
break;
|
break;
|
||||||
@@ -279,7 +233,7 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
await this.changeInterpretedSortState(msg.sortState);
|
await this.changeInterpretedSortState(msg.sortState);
|
||||||
break;
|
break;
|
||||||
case 'changePage':
|
case 'changePage':
|
||||||
if (msg.selectedTable === ALERTS_TABLE_NAME) {
|
if (msg.selectedTable === ALERTS_TABLE_NAME || msg.selectedTable === GRAPH_TABLE_NAME) {
|
||||||
await this.showPageOfInterpretedResults(msg.pageNumber);
|
await this.showPageOfInterpretedResults(msg.pageNumber);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -290,7 +244,7 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
// sortedResultsInfo doesn't have an entry for the current
|
// sortedResultsInfo doesn't have an entry for the current
|
||||||
// result set. Use this to determine whether or not we use
|
// result set. Use this to determine whether or not we use
|
||||||
// the sorted bqrs file.
|
// the sorted bqrs file.
|
||||||
this._displayedQuery?.sortedResultsInfo.has(msg.selectedTable) || false
|
!!this._displayedQuery?.completedQuery.sortedResultsInfo[msg.selectedTable]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@@ -301,29 +255,77 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
assertNever(msg);
|
assertNever(msg);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
showAndLogErrorMessage(e.message, {
|
void showAndLogErrorMessage(getErrorMessage(e), {
|
||||||
fullMessage: e.stack
|
fullMessage: getErrorStack(e)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
postMessage(msg: IntoResultsViewMsg): Thenable<boolean> {
|
/**
|
||||||
return this.getPanel().webview.postMessage(msg);
|
* Choose where to open the webview.
|
||||||
|
*
|
||||||
|
* If there is a single view column, then open beside it.
|
||||||
|
* If there are multiple view columns, then open beside the active column,
|
||||||
|
* unless the active editor is the last column. In this case, open in the first column.
|
||||||
|
*
|
||||||
|
* The goal is to avoid opening new columns when there already are two columns open.
|
||||||
|
*/
|
||||||
|
private chooseColumnForWebview(): vscode.ViewColumn {
|
||||||
|
// This is not a great way to determine the number of view columns, but I
|
||||||
|
// can't find a vscode API that does it any better.
|
||||||
|
// Here, iterate through all the visible editors and determine the max view column.
|
||||||
|
// This won't work if the largest view column is empty.
|
||||||
|
const colCount = Window.visibleTextEditors.reduce((maxVal, editor) =>
|
||||||
|
Math.max(maxVal, Number.parseInt(editor.viewColumn?.toFixed() || '0', 10)), 0);
|
||||||
|
if (colCount <= 1) {
|
||||||
|
return vscode.ViewColumn.Beside;
|
||||||
|
}
|
||||||
|
const activeViewColumnNum = Number.parseInt(Window.activeTextEditor?.viewColumn?.toFixed() || '0', 10);
|
||||||
|
return activeViewColumnNum === colCount ? vscode.ViewColumn.One : vscode.ViewColumn.Beside;
|
||||||
}
|
}
|
||||||
|
|
||||||
private waitForPanelLoaded(): Promise<void> {
|
private async changeInterpretedSortState(
|
||||||
return new Promise((resolve) => {
|
sortState: InterpretedResultsSortState | undefined
|
||||||
if (this._panelLoaded) {
|
): Promise<void> {
|
||||||
resolve();
|
if (this._displayedQuery === undefined) {
|
||||||
} else {
|
void showAndLogErrorMessage(
|
||||||
this._panelLoadedCallBacks.push(resolve);
|
'Failed to sort results since evaluation info was unknown.'
|
||||||
}
|
);
|
||||||
});
|
return;
|
||||||
|
}
|
||||||
|
// Notify the webview that it should expect new results.
|
||||||
|
await this.postMessage({ t: 'resultsUpdating' });
|
||||||
|
await this._displayedQuery.completedQuery.updateInterpretedSortState(sortState);
|
||||||
|
await this.showResults(this._displayedQuery, WebviewReveal.NotForced, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async changeRawSortState(
|
||||||
|
resultSetName: string,
|
||||||
|
sortState: RawResultsSortState | undefined
|
||||||
|
): Promise<void> {
|
||||||
|
if (this._displayedQuery === undefined) {
|
||||||
|
void showAndLogErrorMessage(
|
||||||
|
'Failed to sort results since evaluation info was unknown.'
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Notify the webview that it should expect new results.
|
||||||
|
await this.postMessage({ t: 'resultsUpdating' });
|
||||||
|
await this._displayedQuery.completedQuery.updateSortState(
|
||||||
|
this.cliServer,
|
||||||
|
resultSetName,
|
||||||
|
sortState
|
||||||
|
);
|
||||||
|
// Sorting resets to first page, as there is arguably no particular
|
||||||
|
// correlation between the results on the nth page that the user
|
||||||
|
// was previously viewing and the contents of the nth page in a
|
||||||
|
// new sorted order.
|
||||||
|
await this.showPageOfRawResults(resultSetName, 0, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Show query results in webview panel.
|
* Show query results in webview panel.
|
||||||
* @param results Evaluation info for the executed query.
|
* @param fullQuery Evaluation info for the executed query.
|
||||||
* @param shouldKeepOldResultsWhileRendering Should keep old results while rendering.
|
* @param shouldKeepOldResultsWhileRendering Should keep old results while rendering.
|
||||||
* @param forceReveal Force the webview panel to be visible and
|
* @param forceReveal Force the webview panel to be visible and
|
||||||
* Appropriate when the user has just performed an explicit
|
* Appropriate when the user has just performed an explicit
|
||||||
@@ -331,58 +333,59 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
* history entry.
|
* history entry.
|
||||||
*/
|
*/
|
||||||
public async showResults(
|
public async showResults(
|
||||||
results: CompletedQuery,
|
fullQuery: CompletedLocalQueryInfo,
|
||||||
forceReveal: WebviewReveal,
|
forceReveal: WebviewReveal,
|
||||||
shouldKeepOldResultsWhileRendering = false
|
shouldKeepOldResultsWhileRendering = false
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
if (results.result.resultType !== messages.QueryResultType.SUCCESS) {
|
if (!fullQuery.completedQuery.successful) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
this._interpretation = undefined;
|
this._interpretation = undefined;
|
||||||
const interpretationPage = await this.interpretResultsInfo(
|
const interpretationPage = await this.interpretResultsInfo(
|
||||||
results.query,
|
fullQuery.completedQuery.query,
|
||||||
results.interpretedResultsSortState
|
fullQuery.completedQuery.interpretedResultsSortState
|
||||||
);
|
);
|
||||||
|
|
||||||
const sortedResultsMap: SortedResultsMap = {};
|
const sortedResultsMap: SortedResultsMap = {};
|
||||||
results.sortedResultsInfo.forEach(
|
Object.entries(fullQuery.completedQuery.sortedResultsInfo).forEach(
|
||||||
(v, k) =>
|
([k, v]) =>
|
||||||
(sortedResultsMap[k] = this.convertPathPropertiesToWebviewUris(v))
|
(sortedResultsMap[k] = this.convertPathPropertiesToWebviewUris(v))
|
||||||
);
|
);
|
||||||
|
|
||||||
this._displayedQuery = results;
|
this._displayedQuery = fullQuery;
|
||||||
|
|
||||||
const panel = this.getPanel();
|
const panel = this.getPanel();
|
||||||
await this.waitForPanelLoaded();
|
await this.waitForPanelLoaded();
|
||||||
if (forceReveal === WebviewReveal.Forced) {
|
if (!panel.visible) {
|
||||||
panel.reveal(undefined, true);
|
if (forceReveal === WebviewReveal.Forced) {
|
||||||
} else if (!panel.visible) {
|
panel.reveal(undefined, true);
|
||||||
// The results panel exists, (`.getPanel()` guarantees it) but
|
} else {
|
||||||
// is not visible; it's in a not-currently-viewed tab. Show a
|
// The results panel exists, (`.getPanel()` guarantees it) but
|
||||||
// more asynchronous message to not so abruptly interrupt
|
// is not visible; it's in a not-currently-viewed tab. Show a
|
||||||
// user's workflow by immediately revealing the panel.
|
// more asynchronous message to not so abruptly interrupt
|
||||||
const showButton = 'View Results';
|
// user's workflow by immediately revealing the panel.
|
||||||
const queryName = results.queryName;
|
const showButton = 'View Results';
|
||||||
const resultPromise = vscode.window.showInformationMessage(
|
const queryName = this.labelProvider.getShortLabel(fullQuery);
|
||||||
`Finished running query ${
|
const resultPromise = vscode.window.showInformationMessage(
|
||||||
queryName.length > 0 ? ` "${queryName}"` : ''
|
`Finished running query ${queryName.length > 0 ? ` "${queryName}"` : ''
|
||||||
}.`,
|
}.`,
|
||||||
showButton
|
showButton
|
||||||
);
|
);
|
||||||
// Address this click asynchronously so we still update the
|
// Address this click asynchronously so we still update the
|
||||||
// query history immediately.
|
// query history immediately.
|
||||||
resultPromise.then((result) => {
|
void resultPromise.then((result) => {
|
||||||
if (result === showButton) {
|
if (result === showButton) {
|
||||||
panel.reveal();
|
panel.reveal();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note that the resultSetSchemas will return offsets for the default (unsorted) page,
|
// Note that the resultSetSchemas will return offsets for the default (unsorted) page,
|
||||||
// which may not be correct. However, in this case, it doesn't matter since we only
|
// which may not be correct. However, in this case, it doesn't matter since we only
|
||||||
// need the first offset, which will be the same no matter which sorting we use.
|
// need the first offset, which will be the same no matter which sorting we use.
|
||||||
const resultSetSchemas = await this.getResultSetSchemas(results);
|
const resultSetSchemas = await this.getResultSetSchemas(fullQuery.completedQuery);
|
||||||
const resultSetNames = resultSetSchemas.map(schema => schema.name);
|
const resultSetNames = resultSetSchemas.map(schema => schema.name);
|
||||||
|
|
||||||
const selectedTable = getDefaultResultSetName(resultSetNames);
|
const selectedTable = getDefaultResultSetName(resultSetNames);
|
||||||
@@ -392,7 +395,7 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
|
|
||||||
// Use sorted results path if it exists. This may happen if we are
|
// Use sorted results path if it exists. This may happen if we are
|
||||||
// reloading the results view after it has been sorted in the past.
|
// reloading the results view after it has been sorted in the past.
|
||||||
const resultsPath = results.getResultsPath(selectedTable);
|
const resultsPath = fullQuery.completedQuery.getResultsPath(selectedTable);
|
||||||
const pageSize = PAGE_SIZE.getValue<number>();
|
const pageSize = PAGE_SIZE.getValue<number>();
|
||||||
const chunk = await this.cliServer.bqrsDecode(
|
const chunk = await this.cliServer.bqrsDecode(
|
||||||
resultsPath,
|
resultsPath,
|
||||||
@@ -407,11 +410,11 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
const resultSet = transformBqrsResultSet(schema, chunk);
|
const resultSet = transformBqrsResultSet(schema, chunk);
|
||||||
results.setResultCount(interpretationPage?.numTotalResults || resultSet.schema.rows);
|
fullQuery.completedQuery.setResultCount(interpretationPage?.numTotalResults || resultSet.schema.rows);
|
||||||
const parsedResultSets: ParsedResultSets = {
|
const parsedResultSets: ParsedResultSets = {
|
||||||
pageNumber: 0,
|
pageNumber: 0,
|
||||||
pageSize,
|
pageSize,
|
||||||
numPages: numPagesOfResultSet(resultSet),
|
numPages: numPagesOfResultSet(resultSet, this._interpretation),
|
||||||
numInterpretedPages: numInterpretedPages(this._interpretation),
|
numInterpretedPages: numInterpretedPages(this._interpretation),
|
||||||
resultSet: { ...resultSet, t: 'RawResultSet' },
|
resultSet: { ...resultSet, t: 'RawResultSet' },
|
||||||
selectedTable: undefined,
|
selectedTable: undefined,
|
||||||
@@ -421,17 +424,17 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
await this.postMessage({
|
await this.postMessage({
|
||||||
t: 'setState',
|
t: 'setState',
|
||||||
interpretation: interpretationPage,
|
interpretation: interpretationPage,
|
||||||
origResultsPaths: results.query.resultsPaths,
|
origResultsPaths: fullQuery.completedQuery.query.resultsPaths,
|
||||||
resultsPath: this.convertPathToWebviewUri(
|
resultsPath: this.convertPathToWebviewUri(
|
||||||
results.query.resultsPaths.resultsPath
|
fullQuery.completedQuery.query.resultsPaths.resultsPath
|
||||||
),
|
),
|
||||||
parsedResultSets,
|
parsedResultSets,
|
||||||
sortedResultsMap,
|
sortedResultsMap,
|
||||||
database: results.database,
|
database: fullQuery.initialInfo.databaseInfo,
|
||||||
shouldKeepOldResultsWhileRendering,
|
shouldKeepOldResultsWhileRendering,
|
||||||
metadata: results.query.metadata,
|
metadata: fullQuery.completedQuery.query.metadata,
|
||||||
queryName: results.toString(),
|
queryName: this.labelProvider.getLabel(fullQuery),
|
||||||
queryPath: results.query.program.queryPath
|
queryPath: fullQuery.initialInfo.queryPath
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -447,29 +450,29 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
if (this._interpretation === undefined) {
|
if (this._interpretation === undefined) {
|
||||||
throw new Error('Trying to show interpreted results but interpretation was undefined');
|
throw new Error('Trying to show interpreted results but interpretation was undefined');
|
||||||
}
|
}
|
||||||
if (this._interpretation.sarif.runs[0].results === undefined) {
|
if (this._interpretation.data.t === 'SarifInterpretationData' && this._interpretation.data.runs[0].results === undefined) {
|
||||||
throw new Error('Trying to show interpreted results but results were undefined');
|
throw new Error('Trying to show interpreted results but results were undefined');
|
||||||
}
|
}
|
||||||
|
|
||||||
const resultSetSchemas = await this.getResultSetSchemas(this._displayedQuery);
|
const resultSetSchemas = await this.getResultSetSchemas(this._displayedQuery.completedQuery);
|
||||||
const resultSetNames = resultSetSchemas.map(schema => schema.name);
|
const resultSetNames = resultSetSchemas.map(schema => schema.name);
|
||||||
|
|
||||||
await this.postMessage({
|
await this.postMessage({
|
||||||
t: 'showInterpretedPage',
|
t: 'showInterpretedPage',
|
||||||
interpretation: this.getPageOfInterpretedResults(pageNumber),
|
interpretation: this.getPageOfInterpretedResults(pageNumber),
|
||||||
database: this._displayedQuery.database,
|
database: this._displayedQuery.initialInfo.databaseInfo,
|
||||||
metadata: this._displayedQuery.query.metadata,
|
metadata: this._displayedQuery.completedQuery.query.metadata,
|
||||||
pageNumber,
|
pageNumber,
|
||||||
resultSetNames,
|
resultSetNames,
|
||||||
pageSize: PAGE_SIZE.getValue(),
|
pageSize: interpretedPageSize(this._interpretation),
|
||||||
numPages: numInterpretedPages(this._interpretation),
|
numPages: numInterpretedPages(this._interpretation),
|
||||||
queryName: this._displayedQuery.toString(),
|
queryName: this.labelProvider.getLabel(this._displayedQuery),
|
||||||
queryPath: this._displayedQuery.query.program.queryPath
|
queryPath: this._displayedQuery.initialInfo.queryPath
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private async getResultSetSchemas(results: CompletedQuery, selectedTable = ''): Promise<ResultSetSchema[]> {
|
private async getResultSetSchemas(completedQuery: CompletedQueryInfo, selectedTable = ''): Promise<ResultSetSchema[]> {
|
||||||
const resultsPath = results.getResultsPath(selectedTable);
|
const resultsPath = completedQuery.getResultsPath(selectedTable);
|
||||||
const schemas = await this.cliServer.bqrsInfo(
|
const schemas = await this.cliServer.bqrsInfo(
|
||||||
resultsPath,
|
resultsPath,
|
||||||
PAGE_SIZE.getValue()
|
PAGE_SIZE.getValue()
|
||||||
@@ -496,13 +499,18 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const sortedResultsMap: SortedResultsMap = {};
|
const sortedResultsMap: SortedResultsMap = {};
|
||||||
results.sortedResultsInfo.forEach(
|
Object.entries(results.completedQuery.sortedResultsInfo).forEach(
|
||||||
(v, k) =>
|
([k, v]) =>
|
||||||
(sortedResultsMap[k] = this.convertPathPropertiesToWebviewUris(v))
|
(sortedResultsMap[k] = this.convertPathPropertiesToWebviewUris(v))
|
||||||
);
|
);
|
||||||
|
|
||||||
const resultSetSchemas = await this.getResultSetSchemas(results, sorted ? selectedTable : '');
|
const resultSetSchemas = await this.getResultSetSchemas(results.completedQuery, sorted ? selectedTable : '');
|
||||||
const resultSetNames = resultSetSchemas.map(schema => schema.name);
|
|
||||||
|
// If there is a specific sorted table selected, a different bqrs file is loaded that doesn't have all the result set names.
|
||||||
|
// Make sure that we load all result set names here.
|
||||||
|
// See https://github.com/github/vscode-codeql/issues/1005
|
||||||
|
const allResultSetSchemas = sorted ? await this.getResultSetSchemas(results.completedQuery, '') : resultSetSchemas;
|
||||||
|
const resultSetNames = allResultSetSchemas.map(schema => schema.name);
|
||||||
|
|
||||||
const schema = resultSetSchemas.find(
|
const schema = resultSetSchemas.find(
|
||||||
(resultSet) => resultSet.name == selectedTable
|
(resultSet) => resultSet.name == selectedTable
|
||||||
@@ -512,7 +520,7 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
|
|
||||||
const pageSize = PAGE_SIZE.getValue<number>();
|
const pageSize = PAGE_SIZE.getValue<number>();
|
||||||
const chunk = await this.cliServer.bqrsDecode(
|
const chunk = await this.cliServer.bqrsDecode(
|
||||||
results.getResultsPath(selectedTable, sorted),
|
results.completedQuery.getResultsPath(selectedTable, sorted),
|
||||||
schema.name,
|
schema.name,
|
||||||
{
|
{
|
||||||
offset: schema.pagination?.offsets[pageNumber],
|
offset: schema.pagination?.offsets[pageNumber],
|
||||||
@@ -534,17 +542,17 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
await this.postMessage({
|
await this.postMessage({
|
||||||
t: 'setState',
|
t: 'setState',
|
||||||
interpretation: this._interpretation,
|
interpretation: this._interpretation,
|
||||||
origResultsPaths: results.query.resultsPaths,
|
origResultsPaths: results.completedQuery.query.resultsPaths,
|
||||||
resultsPath: this.convertPathToWebviewUri(
|
resultsPath: this.convertPathToWebviewUri(
|
||||||
results.query.resultsPaths.resultsPath
|
results.completedQuery.query.resultsPaths.resultsPath
|
||||||
),
|
),
|
||||||
parsedResultSets,
|
parsedResultSets,
|
||||||
sortedResultsMap,
|
sortedResultsMap,
|
||||||
database: results.database,
|
database: results.initialInfo.databaseInfo,
|
||||||
shouldKeepOldResultsWhileRendering: false,
|
shouldKeepOldResultsWhileRendering: false,
|
||||||
metadata: results.query.metadata,
|
metadata: results.completedQuery.query.metadata,
|
||||||
queryName: results.toString(),
|
queryName: this.labelProvider.getLabel(results),
|
||||||
queryPath: results.query.program.queryPath
|
queryPath: results.initialInfo.queryPath
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -556,31 +564,48 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
sortState: InterpretedResultsSortState | undefined
|
sortState: InterpretedResultsSortState | undefined
|
||||||
): Promise<Interpretation | undefined> {
|
): Promise<Interpretation | undefined> {
|
||||||
if (!resultsPaths) {
|
if (!resultsPaths) {
|
||||||
this.logger.log('No results path. Cannot display interpreted results.');
|
void this.logger.log('No results path. Cannot display interpreted results.');
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
let data;
|
||||||
|
let numTotalResults;
|
||||||
|
if (metadata?.kind === GRAPH_TABLE_NAME) {
|
||||||
|
data = await interpretGraphResults(
|
||||||
|
this.cliServer,
|
||||||
|
metadata,
|
||||||
|
resultsPaths,
|
||||||
|
sourceInfo
|
||||||
|
);
|
||||||
|
numTotalResults = data.dot.length;
|
||||||
|
} else {
|
||||||
|
const sarif = await interpretResultsSarif(
|
||||||
|
this.cliServer,
|
||||||
|
metadata,
|
||||||
|
resultsPaths,
|
||||||
|
sourceInfo
|
||||||
|
);
|
||||||
|
|
||||||
const sarif = await interpretResults(
|
sarif.runs.forEach(run => {
|
||||||
this.cliServer,
|
if (run.results) {
|
||||||
metadata,
|
sortInterpretedResults(run.results, sortState);
|
||||||
resultsPaths,
|
}
|
||||||
sourceInfo
|
});
|
||||||
);
|
|
||||||
|
|
||||||
sarif.runs.forEach(run => {
|
sarif.sortState = sortState;
|
||||||
if (run.results !== undefined) {
|
data = sarif;
|
||||||
sortInterpretedResults(run.results, sortState);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const numTotalResults = sarif.runs[0]?.results?.length || 0;
|
numTotalResults = (() => {
|
||||||
|
return sarif.runs?.[0]?.results
|
||||||
|
? sarif.runs[0].results.length
|
||||||
|
: 0;
|
||||||
|
})();
|
||||||
|
}
|
||||||
|
|
||||||
const interpretation: Interpretation = {
|
const interpretation: Interpretation = {
|
||||||
sarif,
|
data,
|
||||||
sourceLocationPrefix,
|
sourceLocationPrefix,
|
||||||
numTruncatedResults: 0,
|
numTruncatedResults: 0,
|
||||||
numTotalResults,
|
numTotalResults
|
||||||
sortState,
|
|
||||||
};
|
};
|
||||||
this._interpretation = interpretation;
|
this._interpretation = interpretation;
|
||||||
return interpretation;
|
return interpretation;
|
||||||
@@ -589,7 +614,6 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
private getPageOfInterpretedResults(
|
private getPageOfInterpretedResults(
|
||||||
pageNumber: number
|
pageNumber: number
|
||||||
): Interpretation {
|
): Interpretation {
|
||||||
|
|
||||||
function getPageOfRun(run: Sarif.Run): Sarif.Run {
|
function getPageOfRun(run: Sarif.Run): Sarif.Run {
|
||||||
return {
|
return {
|
||||||
...run, results: run.results?.slice(
|
...run, results: run.results?.slice(
|
||||||
@@ -599,32 +623,44 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this._interpretation === undefined) {
|
const interp = this._interpretation;
|
||||||
|
if (interp === undefined) {
|
||||||
throw new Error('Tried to get interpreted results before interpretation finished');
|
throw new Error('Tried to get interpreted results before interpretation finished');
|
||||||
}
|
}
|
||||||
if (this._interpretation.sarif.runs.length !== 1) {
|
|
||||||
this.logger.log(`Warning: SARIF file had ${this._interpretation.sarif.runs.length} runs, expected 1`);
|
if (interp.data.t !== 'SarifInterpretationData')
|
||||||
|
return interp;
|
||||||
|
|
||||||
|
if (interp.data.runs.length !== 1) {
|
||||||
|
void this.logger.log(`Warning: SARIF file had ${interp.data.runs.length} runs, expected 1`);
|
||||||
}
|
}
|
||||||
const interp = this._interpretation;
|
|
||||||
return {
|
return {
|
||||||
...interp,
|
...interp,
|
||||||
sarif: { ...interp.sarif, runs: [getPageOfRun(interp.sarif.runs[0])] },
|
data: {
|
||||||
|
...interp.data,
|
||||||
|
runs: [getPageOfRun(interp.data.runs[0])]
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private async interpretResultsInfo(
|
private async interpretResultsInfo(
|
||||||
query: QueryInfo,
|
query: QueryEvaluationInfo,
|
||||||
sortState: InterpretedResultsSortState | undefined
|
sortState: InterpretedResultsSortState | undefined
|
||||||
): Promise<Interpretation | undefined> {
|
): Promise<Interpretation | undefined> {
|
||||||
if (
|
if (
|
||||||
(await query.canHaveInterpretedResults()) &&
|
query.canHaveInterpretedResults() &&
|
||||||
query.quickEvalPosition === undefined // never do results interpretation if quickEval
|
query.quickEvalPosition === undefined // never do results interpretation if quickEval
|
||||||
) {
|
) {
|
||||||
try {
|
try {
|
||||||
const sourceLocationPrefix = await query.dbItem.getSourceLocationPrefix(
|
const dbItem = this.databaseManager.findDatabaseItem(Uri.file(query.dbItemPath));
|
||||||
|
if (!dbItem) {
|
||||||
|
throw new Error(`Could not find database item for ${query.dbItemPath}`);
|
||||||
|
}
|
||||||
|
const sourceLocationPrefix = await dbItem.getSourceLocationPrefix(
|
||||||
this.cliServer
|
this.cliServer
|
||||||
);
|
);
|
||||||
const sourceArchiveUri = query.dbItem.sourceArchive;
|
const sourceArchiveUri = dbItem.sourceArchive;
|
||||||
const sourceInfo =
|
const sourceInfo =
|
||||||
sourceArchiveUri === undefined
|
sourceArchiveUri === undefined
|
||||||
? undefined
|
? undefined
|
||||||
@@ -642,8 +678,8 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
} catch (e) {
|
} catch (e) {
|
||||||
// If interpretation fails, accept the error and continue
|
// If interpretation fails, accept the error and continue
|
||||||
// trying to render uninterpreted results anyway.
|
// trying to render uninterpreted results anyway.
|
||||||
showAndLogErrorMessage(
|
void showAndLogErrorMessage(
|
||||||
`Showing raw results instead of interpreted ones due to an error. ${e.message}`
|
`Showing raw results instead of interpreted ones due to an error. ${getErrorMessage(e)}`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -682,9 +718,8 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
try {
|
try {
|
||||||
await this.showProblemResultsAsDiagnostics(interpretation, database);
|
await this.showProblemResultsAsDiagnostics(interpretation, database);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
const msg = e instanceof Error ? e.message : e.toString();
|
void this.logger.log(
|
||||||
this.logger.log(
|
`Exception while computing problem results as diagnostics: ${getErrorMessage(e)}`
|
||||||
`Exception while computing problem results as diagnostics: ${msg}`
|
|
||||||
);
|
);
|
||||||
this._diagnosticCollection.clear();
|
this._diagnosticCollection.clear();
|
||||||
}
|
}
|
||||||
@@ -694,10 +729,13 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
interpretation: Interpretation,
|
interpretation: Interpretation,
|
||||||
databaseItem: DatabaseItem
|
databaseItem: DatabaseItem
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const { sarif, sourceLocationPrefix } = interpretation;
|
const { data, sourceLocationPrefix } = interpretation;
|
||||||
|
|
||||||
if (!sarif.runs || !sarif.runs[0].results) {
|
if (data.t !== 'SarifInterpretationData')
|
||||||
this.logger.log(
|
return;
|
||||||
|
|
||||||
|
if (!data.runs || !data.runs[0].results) {
|
||||||
|
void this.logger.log(
|
||||||
'Didn\'t find a run in the sarif results. Error processing sarif?'
|
'Didn\'t find a run in the sarif results. Error processing sarif?'
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
@@ -705,14 +743,14 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
|
|
||||||
const diagnostics: [Uri, ReadonlyArray<Diagnostic>][] = [];
|
const diagnostics: [Uri, ReadonlyArray<Diagnostic>][] = [];
|
||||||
|
|
||||||
for (const result of sarif.runs[0].results) {
|
for (const result of data.runs[0].results) {
|
||||||
const message = result.message.text;
|
const message = result.message.text;
|
||||||
if (message === undefined) {
|
if (message === undefined) {
|
||||||
this.logger.log('Sarif had result without plaintext message');
|
void this.logger.log('Sarif had result without plaintext message');
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (!result.locations) {
|
if (!result.locations) {
|
||||||
this.logger.log('Sarif had result without location');
|
void this.logger.log('Sarif had result without location');
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -725,7 +763,7 @@ export class InterfaceManager extends DisposableObject {
|
|||||||
}
|
}
|
||||||
const resultLocation = tryResolveLocation(sarifLoc, databaseItem);
|
const resultLocation = tryResolveLocation(sarifLoc, databaseItem);
|
||||||
if (!resultLocation) {
|
if (!resultLocation) {
|
||||||
this.logger.log('Sarif location was not resolvable ' + sarifLoc);
|
void this.logger.log('Sarif location was not resolvable ' + sarifLoc);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const parsedMessage = parseSarifPlainTextMessage(message);
|
const parsedMessage = parseSarifPlainTextMessage(message);
|
||||||
|
|||||||
30
extensions/ql-vscode/src/json-rpc-server.ts
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import { Logger } from './logging';
|
||||||
|
import * as cp from 'child_process';
|
||||||
|
import { Disposable } from 'vscode';
|
||||||
|
import { MessageConnection } from 'vscode-jsonrpc';
|
||||||
|
|
||||||
|
|
||||||
|
/** A running query server process and its associated message connection. */
|
||||||
|
export class ServerProcess implements Disposable {
|
||||||
|
child: cp.ChildProcess;
|
||||||
|
connection: MessageConnection;
|
||||||
|
logger: Logger;
|
||||||
|
|
||||||
|
constructor(child: cp.ChildProcess, connection: MessageConnection, private name: string, logger: Logger) {
|
||||||
|
this.child = child;
|
||||||
|
this.connection = connection;
|
||||||
|
this.logger = logger;
|
||||||
|
}
|
||||||
|
|
||||||
|
dispose(): void {
|
||||||
|
void this.logger.log(`Stopping ${this.name}...`);
|
||||||
|
this.connection.dispose();
|
||||||
|
this.child.stdin!.end();
|
||||||
|
this.child.stderr!.destroy();
|
||||||
|
// TODO kill the process if it doesn't terminate after a certain time limit.
|
||||||
|
|
||||||
|
// On Windows, we usually have to terminate the process before closing its stdout.
|
||||||
|
this.child.stdout!.destroy();
|
||||||
|
void this.logger.log(`Stopped ${this.name}.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
65
extensions/ql-vscode/src/legacy-query-server/legacyRunner.ts
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import { CancellationToken } from 'vscode';
|
||||||
|
import { ProgressCallback } from '../commandRunner';
|
||||||
|
import { DatabaseItem } from '../databases';
|
||||||
|
import { Dataset, deregisterDatabases, registerDatabases } from '../pure/legacy-messages';
|
||||||
|
import { InitialQueryInfo, LocalQueryInfo } from '../query-results';
|
||||||
|
import { QueryRunner } from '../queryRunner';
|
||||||
|
import { QueryWithResults } from '../run-queries-shared';
|
||||||
|
import { QueryServerClient } from './queryserver-client';
|
||||||
|
import { clearCacheInDatabase, compileAndRunQueryAgainstDatabase } from './run-queries';
|
||||||
|
import { upgradeDatabaseExplicit } from './upgrades';
|
||||||
|
|
||||||
|
export class LegacyQueryRunner extends QueryRunner {
|
||||||
|
|
||||||
|
|
||||||
|
constructor(public readonly qs: QueryServerClient) {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
get cliServer() {
|
||||||
|
return this.qs.cliServer;
|
||||||
|
}
|
||||||
|
|
||||||
|
async restartQueryServer(progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||||
|
await this.qs.restartQueryServer(progress, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
onStart(callBack: (progress: ProgressCallback, token: CancellationToken) => Promise<void>) {
|
||||||
|
this.qs.onDidStartQueryServer(callBack);
|
||||||
|
}
|
||||||
|
async clearCacheInDatabase(dbItem: DatabaseItem, progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||||
|
await clearCacheInDatabase(this.qs, dbItem, progress, token);
|
||||||
|
}
|
||||||
|
async compileAndRunQueryAgainstDatabase(dbItem: DatabaseItem, initialInfo: InitialQueryInfo, queryStorageDir: string, progress: ProgressCallback, token: CancellationToken, templates?: Record<string, string>, queryInfo?: LocalQueryInfo): Promise<QueryWithResults> {
|
||||||
|
return await compileAndRunQueryAgainstDatabase(this.qs.cliServer, this.qs, dbItem, initialInfo, queryStorageDir, progress, token, templates, queryInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
async deregisterDatabase(progress: ProgressCallback, token: CancellationToken, dbItem: DatabaseItem): Promise<void> {
|
||||||
|
if (dbItem.contents && (await this.qs.cliServer.cliConstraints.supportsDatabaseRegistration())) {
|
||||||
|
const databases: Dataset[] = [{
|
||||||
|
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||||
|
workingSet: 'default'
|
||||||
|
}];
|
||||||
|
await this.qs.sendRequest(deregisterDatabases, { databases }, token, progress);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async registerDatabase(progress: ProgressCallback, token: CancellationToken, dbItem: DatabaseItem): Promise<void> {
|
||||||
|
if (dbItem.contents && (await this.qs.cliServer.cliConstraints.supportsDatabaseRegistration())) {
|
||||||
|
const databases: Dataset[] = [{
|
||||||
|
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||||
|
workingSet: 'default'
|
||||||
|
}];
|
||||||
|
await this.qs.sendRequest(registerDatabases, { databases }, token, progress);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async upgradeDatabaseExplicit(dbItem: DatabaseItem, progress: ProgressCallback, token: CancellationToken): Promise<void> {
|
||||||
|
await upgradeDatabaseExplicit(this.qs, dbItem, progress, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
async clearPackCache(): Promise<void> {
|
||||||
|
/**
|
||||||
|
* Nothing needs to be done
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,46 +1,25 @@
|
|||||||
import * as cp from 'child_process';
|
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import { DisposableObject } from './pure/disposable-object';
|
import * as fs from 'fs-extra';
|
||||||
import { Disposable, CancellationToken, commands } from 'vscode';
|
|
||||||
import { createMessageConnection, MessageConnection, RequestType } from 'vscode-jsonrpc';
|
import { DisposableObject } from '../pure/disposable-object';
|
||||||
import * as cli from './cli';
|
import { CancellationToken, commands } from 'vscode';
|
||||||
import { QueryServerConfig } from './config';
|
import { createMessageConnection, RequestType } from 'vscode-jsonrpc';
|
||||||
import { Logger, ProgressReporter } from './logging';
|
import * as cli from '../cli';
|
||||||
import { completeQuery, EvaluationResult, progress, ProgressMessage, WithProgressId } from './pure/messages';
|
import { QueryServerConfig } from '../config';
|
||||||
import * as messages from './pure/messages';
|
import { Logger, ProgressReporter } from '../logging';
|
||||||
import { ProgressCallback, ProgressTask } from './commandRunner';
|
import { completeQuery, EvaluationResult, progress, ProgressMessage, WithProgressId } from '../pure/legacy-messages';
|
||||||
|
import * as messages from '../pure/legacy-messages';
|
||||||
|
import { ProgressCallback, ProgressTask } from '../commandRunner';
|
||||||
|
import { findQueryLogFile } from '../run-queries-shared';
|
||||||
|
import { ServerProcess } from '../json-rpc-server';
|
||||||
|
|
||||||
|
type WithProgressReporting = (task: (progress: ProgressReporter, token: CancellationToken) => Thenable<void>) => Thenable<void>;
|
||||||
|
|
||||||
type ServerOpts = {
|
type ServerOpts = {
|
||||||
logger: Logger;
|
logger: Logger;
|
||||||
|
contextStoragePath: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** A running query server process and its associated message connection. */
|
|
||||||
class ServerProcess implements Disposable {
|
|
||||||
child: cp.ChildProcess;
|
|
||||||
connection: MessageConnection;
|
|
||||||
logger: Logger;
|
|
||||||
|
|
||||||
constructor(child: cp.ChildProcess, connection: MessageConnection, logger: Logger) {
|
|
||||||
this.child = child;
|
|
||||||
this.connection = connection;
|
|
||||||
this.logger = logger;
|
|
||||||
}
|
|
||||||
|
|
||||||
dispose(): void {
|
|
||||||
this.logger.log('Stopping query server...');
|
|
||||||
this.connection.dispose();
|
|
||||||
this.child.stdin!.end();
|
|
||||||
this.child.stderr!.destroy();
|
|
||||||
// TODO kill the process if it doesn't terminate after a certain time limit.
|
|
||||||
|
|
||||||
// On Windows, we usually have to terminate the process before closing its stdout.
|
|
||||||
this.child.stdout!.destroy();
|
|
||||||
this.logger.log('Stopped query server.');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type WithProgressReporting = (task: (progress: ProgressReporter, token: CancellationToken) => Thenable<void>) => Thenable<void>;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Client that manages a query server process.
|
* Client that manages a query server process.
|
||||||
* The server process is started upon initialization and tracked during its lifetime.
|
* The server process is started upon initialization and tracked during its lifetime.
|
||||||
@@ -65,7 +44,7 @@ export class QueryServerClient extends DisposableObject {
|
|||||||
this.queryServerStartListeners.push(e);
|
this.queryServerStartListeners.push(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
public activeQueryName: string | undefined;
|
public activeQueryLogFile: string | undefined;
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
readonly config: QueryServerConfig,
|
readonly config: QueryServerConfig,
|
||||||
@@ -95,7 +74,7 @@ export class QueryServerClient extends DisposableObject {
|
|||||||
if (this.serverProcess !== undefined) {
|
if (this.serverProcess !== undefined) {
|
||||||
this.disposeAndStopTracking(this.serverProcess);
|
this.disposeAndStopTracking(this.serverProcess);
|
||||||
} else {
|
} else {
|
||||||
this.logger.log('No server process to be stopped.');
|
void this.logger.log('No server process to be stopped.');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -143,12 +122,29 @@ export class QueryServerClient extends DisposableObject {
|
|||||||
args.push('--require-db-registration');
|
args.push('--require-db-registration');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (await this.cliServer.cliConstraints.supportsOldEvalStats() && !(await this.cliServer.cliConstraints.supportsPerQueryEvalLog())) {
|
||||||
|
args.push('--old-eval-stats');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (await this.cliServer.cliConstraints.supportsStructuredEvalLog()) {
|
||||||
|
const structuredLogFile = `${this.opts.contextStoragePath}/structured-evaluator-log.json`;
|
||||||
|
await fs.ensureFile(structuredLogFile);
|
||||||
|
|
||||||
|
args.push('--evaluator-log');
|
||||||
|
args.push(structuredLogFile);
|
||||||
|
|
||||||
|
// We hard-code the verbosity level to 5 and minify to false.
|
||||||
|
// This will be the behavior of the per-query structured logging in the CLI after 2.8.3.
|
||||||
|
args.push('--evaluator-log-level');
|
||||||
|
args.push('5');
|
||||||
|
}
|
||||||
|
|
||||||
if (this.config.debug) {
|
if (this.config.debug) {
|
||||||
args.push('--debug', '--tuple-counting');
|
args.push('--debug', '--tuple-counting');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (cli.shouldDebugQueryServer()) {
|
if (cli.shouldDebugQueryServer()) {
|
||||||
args.push('-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9010,server=y,suspend=n,quiet=y');
|
args.push('-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9010,server=n,suspend=y,quiet=y');
|
||||||
}
|
}
|
||||||
|
|
||||||
const child = cli.spawnServer(
|
const child = cli.spawnServer(
|
||||||
@@ -159,7 +155,7 @@ export class QueryServerClient extends DisposableObject {
|
|||||||
this.logger,
|
this.logger,
|
||||||
data => this.logger.log(data.toString(), {
|
data => this.logger.log(data.toString(), {
|
||||||
trailingNewline: false,
|
trailingNewline: false,
|
||||||
additionalLogLocation: this.activeQueryName
|
additionalLogLocation: this.activeQueryLogFile
|
||||||
}),
|
}),
|
||||||
undefined, // no listener for stdout
|
undefined, // no listener for stdout
|
||||||
progressReporter
|
progressReporter
|
||||||
@@ -168,13 +164,8 @@ export class QueryServerClient extends DisposableObject {
|
|||||||
const connection = createMessageConnection(child.stdout, child.stdin);
|
const connection = createMessageConnection(child.stdout, child.stdin);
|
||||||
connection.onRequest(completeQuery, res => {
|
connection.onRequest(completeQuery, res => {
|
||||||
if (!(res.runId in this.evaluationResultCallbacks)) {
|
if (!(res.runId in this.evaluationResultCallbacks)) {
|
||||||
this.logger.log(`No callback associated with run id ${res.runId}, continuing without executing any callback`);
|
void this.logger.log(`No callback associated with run id ${res.runId}, continuing without executing any callback`);
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
const baseLocation = this.logger.getBaseLocation();
|
|
||||||
if (baseLocation && this.activeQueryName) {
|
|
||||||
res.logFileLocation = path.join(baseLocation, this.activeQueryName);
|
|
||||||
}
|
|
||||||
this.evaluationResultCallbacks[res.runId](res);
|
this.evaluationResultCallbacks[res.runId](res);
|
||||||
}
|
}
|
||||||
return {};
|
return {};
|
||||||
@@ -185,7 +176,7 @@ export class QueryServerClient extends DisposableObject {
|
|||||||
callback(res);
|
callback(res);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
this.serverProcess = new ServerProcess(child, connection, this.opts.logger);
|
this.serverProcess = new ServerProcess(child, connection, 'Query server', this.logger);
|
||||||
// Ensure the server process is disposed together with this client.
|
// Ensure the server process is disposed together with this client.
|
||||||
this.track(this.serverProcess);
|
this.track(this.serverProcess);
|
||||||
connection.listen();
|
connection.listen();
|
||||||
@@ -207,7 +198,7 @@ export class QueryServerClient extends DisposableObject {
|
|||||||
}
|
}
|
||||||
|
|
||||||
get serverProcessPid(): number {
|
get serverProcessPid(): number {
|
||||||
return this.serverProcess!.child.pid;
|
return this.serverProcess!.child.pid || 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
async sendRequest<P, R, E, RO>(type: RequestType<WithProgressId<P>, R, E, RO>, parameter: P, token?: CancellationToken, progress?: (res: ProgressMessage) => void): Promise<R> {
|
async sendRequest<P, R, E, RO>(type: RequestType<WithProgressId<P>, R, E, RO>, parameter: P, token?: CancellationToken, progress?: (res: ProgressMessage) => void): Promise<R> {
|
||||||
@@ -235,8 +226,7 @@ export class QueryServerClient extends DisposableObject {
|
|||||||
*/
|
*/
|
||||||
private updateActiveQuery(method: string, parameter: any): void {
|
private updateActiveQuery(method: string, parameter: any): void {
|
||||||
if (method === messages.compileQuery.method) {
|
if (method === messages.compileQuery.method) {
|
||||||
const queryPath = parameter?.queryToCheck?.queryPath || 'unknown';
|
this.activeQueryLogFile = findQueryLogFile(path.dirname(parameter.resultPath));
|
||||||
this.activeQueryName = `query-${path.basename(queryPath)}-${this.nextProgress}.log`;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
526
extensions/ql-vscode/src/legacy-query-server/run-queries.ts
Normal file
@@ -0,0 +1,526 @@
|
|||||||
|
import * as crypto from 'crypto';
|
||||||
|
import * as fs from 'fs-extra';
|
||||||
|
import * as tmp from 'tmp-promise';
|
||||||
|
import * as path from 'path';
|
||||||
|
import {
|
||||||
|
CancellationToken,
|
||||||
|
Uri,
|
||||||
|
} from 'vscode';
|
||||||
|
import { ErrorCodes, ResponseError } from 'vscode-languageclient';
|
||||||
|
|
||||||
|
import * as cli from '../cli';
|
||||||
|
import { DatabaseItem, } from '../databases';
|
||||||
|
import {
|
||||||
|
getOnDiskWorkspaceFolders,
|
||||||
|
showAndLogErrorMessage,
|
||||||
|
showAndLogWarningMessage,
|
||||||
|
tryGetQueryMetadata,
|
||||||
|
upgradesTmpDir
|
||||||
|
} from '../helpers';
|
||||||
|
import { ProgressCallback } from '../commandRunner';
|
||||||
|
import { QueryMetadata } from '../pure/interface-types';
|
||||||
|
import { logger } from '../logging';
|
||||||
|
import * as messages from '../pure/legacy-messages';
|
||||||
|
import { InitialQueryInfo, LocalQueryInfo } from '../query-results';
|
||||||
|
import * as qsClient from './queryserver-client';
|
||||||
|
import { getErrorMessage } from '../pure/helpers-pure';
|
||||||
|
import { compileDatabaseUpgradeSequence, upgradeDatabaseExplicit } from './upgrades';
|
||||||
|
import { QueryEvaluationInfo, QueryWithResults } from '../run-queries-shared';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A collection of evaluation-time information about a query,
|
||||||
|
* including the query itself, and where we have decided to put
|
||||||
|
* temporary files associated with it, such as the compiled query
|
||||||
|
* output and results.
|
||||||
|
*/
|
||||||
|
export class QueryInProgress {
|
||||||
|
|
||||||
|
public queryEvalInfo: QueryEvaluationInfo;
|
||||||
|
/**
|
||||||
|
* Note that in the {@link slurpQueryHistory} method, we create a QueryEvaluationInfo instance
|
||||||
|
* by explicitly setting the prototype in order to avoid calling this constructor.
|
||||||
|
*/
|
||||||
|
constructor(
|
||||||
|
readonly querySaveDir: string,
|
||||||
|
readonly dbItemPath: string,
|
||||||
|
databaseHasMetadataFile: boolean,
|
||||||
|
readonly queryDbscheme: string, // the dbscheme file the query expects, based on library path resolution
|
||||||
|
readonly quickEvalPosition?: messages.Position,
|
||||||
|
readonly metadata?: QueryMetadata,
|
||||||
|
readonly templates?: Record<string, string>,
|
||||||
|
) {
|
||||||
|
this.queryEvalInfo = new QueryEvaluationInfo(querySaveDir, dbItemPath, databaseHasMetadataFile, quickEvalPosition, metadata);
|
||||||
|
/**/
|
||||||
|
}
|
||||||
|
|
||||||
|
get compiledQueryPath() {
|
||||||
|
return this.queryEvalInfo.compileQueryPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async run(
|
||||||
|
qs: qsClient.QueryServerClient,
|
||||||
|
upgradeQlo: string | undefined,
|
||||||
|
availableMlModels: cli.MlModelInfo[],
|
||||||
|
dbItem: DatabaseItem,
|
||||||
|
progress: ProgressCallback,
|
||||||
|
token: CancellationToken,
|
||||||
|
queryInfo?: LocalQueryInfo,
|
||||||
|
): Promise<messages.EvaluationResult> {
|
||||||
|
if (!dbItem.contents || dbItem.error) {
|
||||||
|
throw new Error('Can\'t run query on invalid database.');
|
||||||
|
}
|
||||||
|
|
||||||
|
let result: messages.EvaluationResult | null = null;
|
||||||
|
|
||||||
|
const callbackId = qs.registerCallback(res => {
|
||||||
|
result = {
|
||||||
|
...res,
|
||||||
|
logFileLocation: this.queryEvalInfo.logPath
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
const availableMlModelUris: messages.MlModel[] = availableMlModels.map(model => ({ uri: Uri.file(model.path).toString(true) }));
|
||||||
|
|
||||||
|
const queryToRun: messages.QueryToRun = {
|
||||||
|
resultsPath: this.queryEvalInfo.resultsPaths.resultsPath,
|
||||||
|
qlo: Uri.file(this.compiledQueryPath).toString(),
|
||||||
|
compiledUpgrade: upgradeQlo && Uri.file(upgradeQlo).toString(),
|
||||||
|
allowUnknownTemplates: true,
|
||||||
|
templateValues: createSimpleTemplates(this.templates),
|
||||||
|
availableMlModels: availableMlModelUris,
|
||||||
|
id: callbackId,
|
||||||
|
timeoutSecs: qs.config.timeoutSecs,
|
||||||
|
};
|
||||||
|
|
||||||
|
const dataset: messages.Dataset = {
|
||||||
|
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||||
|
workingSet: 'default'
|
||||||
|
};
|
||||||
|
if (queryInfo && await qs.cliServer.cliConstraints.supportsPerQueryEvalLog()) {
|
||||||
|
await qs.sendRequest(messages.startLog, {
|
||||||
|
db: dataset,
|
||||||
|
logPath: this.queryEvalInfo.evalLogPath,
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
const params: messages.EvaluateQueriesParams = {
|
||||||
|
db: dataset,
|
||||||
|
evaluateId: callbackId,
|
||||||
|
queries: [queryToRun],
|
||||||
|
stopOnError: false,
|
||||||
|
useSequenceHint: false
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
await qs.sendRequest(messages.runQueries, params, token, progress);
|
||||||
|
if (qs.config.customLogDirectory) {
|
||||||
|
void showAndLogWarningMessage(
|
||||||
|
`Custom log directories are no longer supported. The "codeQL.runningQueries.customLogDirectory" setting is deprecated. Unset the setting to stop seeing this message. Query logs saved to ${this.queryEvalInfo.logPath}.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
qs.unRegisterCallback(callbackId);
|
||||||
|
if (queryInfo && await qs.cliServer.cliConstraints.supportsPerQueryEvalLog()) {
|
||||||
|
await qs.sendRequest(messages.endLog, {
|
||||||
|
db: dataset,
|
||||||
|
logPath: this.queryEvalInfo.evalLogPath,
|
||||||
|
});
|
||||||
|
if (await this.queryEvalInfo.hasEvalLog()) {
|
||||||
|
await this.queryEvalInfo.addQueryLogs(queryInfo, qs.cliServer, qs.logger);
|
||||||
|
} else {
|
||||||
|
void showAndLogWarningMessage(`Failed to write structured evaluator log to ${this.queryEvalInfo.evalLogPath}.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result || {
|
||||||
|
evaluationTime: 0,
|
||||||
|
message: 'No result from server',
|
||||||
|
queryId: -1,
|
||||||
|
runId: callbackId,
|
||||||
|
resultType: messages.QueryResultType.OTHER_ERROR
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async compile(
|
||||||
|
qs: qsClient.QueryServerClient,
|
||||||
|
program: messages.QlProgram,
|
||||||
|
progress: ProgressCallback,
|
||||||
|
token: CancellationToken,
|
||||||
|
): Promise<messages.CompilationMessage[]> {
|
||||||
|
let compiled: messages.CheckQueryResult | undefined;
|
||||||
|
try {
|
||||||
|
const target = this.quickEvalPosition ? {
|
||||||
|
quickEval: { quickEvalPos: this.quickEvalPosition }
|
||||||
|
} : { query: {} };
|
||||||
|
const params: messages.CompileQueryParams = {
|
||||||
|
compilationOptions: {
|
||||||
|
computeNoLocationUrls: true,
|
||||||
|
failOnWarnings: false,
|
||||||
|
fastCompilation: false,
|
||||||
|
includeDilInQlo: true,
|
||||||
|
localChecking: false,
|
||||||
|
noComputeGetUrl: false,
|
||||||
|
noComputeToString: false,
|
||||||
|
computeDefaultStrings: true,
|
||||||
|
emitDebugInfo: true
|
||||||
|
},
|
||||||
|
extraOptions: {
|
||||||
|
timeoutSecs: qs.config.timeoutSecs
|
||||||
|
},
|
||||||
|
queryToCheck: program,
|
||||||
|
resultPath: this.compiledQueryPath,
|
||||||
|
target,
|
||||||
|
};
|
||||||
|
|
||||||
|
compiled = await qs.sendRequest(messages.compileQuery, params, token, progress);
|
||||||
|
} finally {
|
||||||
|
void qs.logger.log(' - - - COMPILATION DONE - - - ', { additionalLogLocation: this.queryEvalInfo.logPath });
|
||||||
|
}
|
||||||
|
return (compiled?.messages || []).filter(msg => msg.severity === messages.Severity.ERROR);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function clearCacheInDatabase(
|
||||||
|
qs: qsClient.QueryServerClient,
|
||||||
|
dbItem: DatabaseItem,
|
||||||
|
progress: ProgressCallback,
|
||||||
|
token: CancellationToken,
|
||||||
|
): Promise<messages.ClearCacheResult> {
|
||||||
|
if (dbItem.contents === undefined) {
|
||||||
|
throw new Error('Can\'t clear the cache in an invalid database.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const db: messages.Dataset = {
|
||||||
|
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||||
|
workingSet: 'default',
|
||||||
|
};
|
||||||
|
|
||||||
|
const params: messages.ClearCacheParams = {
|
||||||
|
dryRun: false,
|
||||||
|
db,
|
||||||
|
};
|
||||||
|
|
||||||
|
return qs.sendRequest(messages.clearCache, params, token, progress);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compare the dbscheme implied by the query `query` and that of the current database.
|
||||||
|
* - If they are compatible, do nothing.
|
||||||
|
* - If they are incompatible but the database can be upgraded, suggest that upgrade.
|
||||||
|
* - If they are incompatible and the database cannot be upgraded, throw an error.
|
||||||
|
*/
|
||||||
|
async function checkDbschemeCompatibility(
|
||||||
|
cliServer: cli.CodeQLCliServer,
|
||||||
|
qs: qsClient.QueryServerClient,
|
||||||
|
query: QueryInProgress,
|
||||||
|
qlProgram: messages.QlProgram,
|
||||||
|
dbItem: DatabaseItem,
|
||||||
|
progress: ProgressCallback,
|
||||||
|
token: CancellationToken,
|
||||||
|
): Promise<void> {
|
||||||
|
const searchPath = getOnDiskWorkspaceFolders();
|
||||||
|
|
||||||
|
if (dbItem.contents?.dbSchemeUri !== undefined) {
|
||||||
|
const { finalDbscheme } = await cliServer.resolveUpgrades(dbItem.contents.dbSchemeUri.fsPath, searchPath, false);
|
||||||
|
const hash = async function(filename: string): Promise<string> {
|
||||||
|
return crypto.createHash('sha256').update(await fs.readFile(filename)).digest('hex');
|
||||||
|
};
|
||||||
|
|
||||||
|
// At this point, we have learned about three dbschemes:
|
||||||
|
|
||||||
|
// the dbscheme of the actual database we're querying.
|
||||||
|
const dbschemeOfDb = await hash(dbItem.contents.dbSchemeUri.fsPath);
|
||||||
|
|
||||||
|
// the dbscheme of the query we're running, including the library we've resolved it to use.
|
||||||
|
const dbschemeOfLib = await hash(query.queryDbscheme);
|
||||||
|
|
||||||
|
// the database we're able to upgrade to
|
||||||
|
const upgradableTo = await hash(finalDbscheme);
|
||||||
|
|
||||||
|
if (upgradableTo != dbschemeOfLib) {
|
||||||
|
reportNoUpgradePath(qlProgram, query);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (upgradableTo == dbschemeOfLib &&
|
||||||
|
dbschemeOfDb != dbschemeOfLib) {
|
||||||
|
// Try to upgrade the database
|
||||||
|
await upgradeDatabaseExplicit(
|
||||||
|
qs,
|
||||||
|
dbItem,
|
||||||
|
progress,
|
||||||
|
token
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function reportNoUpgradePath(qlProgram: messages.QlProgram, query: QueryInProgress): void {
|
||||||
|
throw new Error(
|
||||||
|
`Query ${qlProgram.queryPath} expects database scheme ${query.queryDbscheme}, but the current database has a different scheme, and no database upgrades are available. The current database scheme may be newer than the CodeQL query libraries in your workspace.\n\nPlease try using a newer version of the query libraries.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compile a non-destructive upgrade.
|
||||||
|
*/
|
||||||
|
async function compileNonDestructiveUpgrade(
|
||||||
|
qs: qsClient.QueryServerClient,
|
||||||
|
upgradeTemp: tmp.DirectoryResult,
|
||||||
|
query: QueryInProgress,
|
||||||
|
qlProgram: messages.QlProgram,
|
||||||
|
dbItem: DatabaseItem,
|
||||||
|
progress: ProgressCallback,
|
||||||
|
token: CancellationToken,
|
||||||
|
): Promise<string> {
|
||||||
|
|
||||||
|
if (!dbItem?.contents?.dbSchemeUri) {
|
||||||
|
throw new Error('Database is invalid, and cannot be upgraded.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// When packaging is used, dependencies may exist outside of the workspace and they are always on the resolved search path.
|
||||||
|
// When packaging is not used, all dependencies are in the workspace.
|
||||||
|
const upgradesPath = (await qs.cliServer.cliConstraints.supportsPackaging())
|
||||||
|
? qlProgram.libraryPath
|
||||||
|
: getOnDiskWorkspaceFolders();
|
||||||
|
|
||||||
|
const { scripts, matchesTarget } = await qs.cliServer.resolveUpgrades(
|
||||||
|
dbItem.contents.dbSchemeUri.fsPath,
|
||||||
|
upgradesPath,
|
||||||
|
true,
|
||||||
|
query.queryDbscheme
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!matchesTarget) {
|
||||||
|
reportNoUpgradePath(qlProgram, query);
|
||||||
|
}
|
||||||
|
const result = await compileDatabaseUpgradeSequence(qs, dbItem, scripts, upgradeTemp, progress, token);
|
||||||
|
if (result.compiledUpgrade === undefined) {
|
||||||
|
const error = result.error || '[no error message available]';
|
||||||
|
throw new Error(error);
|
||||||
|
}
|
||||||
|
// We can upgrade to the actual target
|
||||||
|
qlProgram.dbschemePath = query.queryDbscheme;
|
||||||
|
// We are new enough that we will always support single file upgrades.
|
||||||
|
return result.compiledUpgrade;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
export async function compileAndRunQueryAgainstDatabase(
|
||||||
|
cliServer: cli.CodeQLCliServer,
|
||||||
|
qs: qsClient.QueryServerClient,
|
||||||
|
dbItem: DatabaseItem,
|
||||||
|
initialInfo: InitialQueryInfo,
|
||||||
|
queryStorageDir: string,
|
||||||
|
progress: ProgressCallback,
|
||||||
|
token: CancellationToken,
|
||||||
|
templates?: Record<string, string>,
|
||||||
|
queryInfo?: LocalQueryInfo, // May be omitted for queries not initiated by the user. If omitted we won't create a structured log for the query.
|
||||||
|
): Promise<QueryWithResults> {
|
||||||
|
if (!dbItem.contents || !dbItem.contents.dbSchemeUri) {
|
||||||
|
throw new Error(`Database ${dbItem.databaseUri} does not have a CodeQL database scheme.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the workspace folder paths.
|
||||||
|
const diskWorkspaceFolders = getOnDiskWorkspaceFolders();
|
||||||
|
// Figure out the library path for the query.
|
||||||
|
const packConfig = await cliServer.resolveLibraryPath(diskWorkspaceFolders, initialInfo.queryPath);
|
||||||
|
|
||||||
|
if (!packConfig.dbscheme) {
|
||||||
|
throw new Error('Could not find a database scheme for this query. Please check that you have a valid qlpack.yml file for this query, which refers to a database scheme either in the `dbscheme` field or through one of its dependencies.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check whether the query has an entirely different schema from the
|
||||||
|
// database. (Queries that merely need the database to be upgraded
|
||||||
|
// won't trigger this check)
|
||||||
|
// This test will produce confusing results if we ever change the name of the database schema files.
|
||||||
|
const querySchemaName = path.basename(packConfig.dbscheme);
|
||||||
|
const dbSchemaName = path.basename(dbItem.contents.dbSchemeUri.fsPath);
|
||||||
|
if (querySchemaName != dbSchemaName) {
|
||||||
|
void logger.log(`Query schema was ${querySchemaName}, but database schema was ${dbSchemaName}.`);
|
||||||
|
throw new Error(`The query ${path.basename(initialInfo.queryPath)} cannot be run against the selected database (${dbItem.name}): their target languages are different. Please select a different database and try again.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const qlProgram: messages.QlProgram = {
|
||||||
|
// The project of the current document determines which library path
|
||||||
|
// we use. The `libraryPath` field in this server message is relative
|
||||||
|
// to the workspace root, not to the project root.
|
||||||
|
libraryPath: packConfig.libraryPath,
|
||||||
|
// Since we are compiling and running a query against a database,
|
||||||
|
// we use the database's DB scheme here instead of the DB scheme
|
||||||
|
// from the current document's project.
|
||||||
|
dbschemePath: dbItem.contents.dbSchemeUri.fsPath,
|
||||||
|
queryPath: initialInfo.queryPath
|
||||||
|
};
|
||||||
|
|
||||||
|
// Read the query metadata if possible, to use in the UI.
|
||||||
|
const metadata = await tryGetQueryMetadata(cliServer, qlProgram.queryPath);
|
||||||
|
|
||||||
|
let availableMlModels: cli.MlModelInfo[] = [];
|
||||||
|
if (!await cliServer.cliConstraints.supportsResolveMlModels()) {
|
||||||
|
void logger.log('Resolving ML models is unsupported by this version of the CLI. Running the query without any ML models.');
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
availableMlModels = (await cliServer.resolveMlModels(diskWorkspaceFolders, initialInfo.queryPath)).models;
|
||||||
|
if (availableMlModels.length) {
|
||||||
|
void logger.log(`Found available ML models at the following paths: ${availableMlModels.map(x => `'${x.path}'`).join(', ')}.`);
|
||||||
|
} else {
|
||||||
|
void logger.log('Did not find any available ML models.');
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
const message = `Couldn't resolve available ML models for ${qlProgram.queryPath}. Running the ` +
|
||||||
|
`query without any ML models: ${e}.`;
|
||||||
|
void showAndLogErrorMessage(message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const hasMetadataFile = (await dbItem.hasMetadataFile());
|
||||||
|
const query = new QueryInProgress(
|
||||||
|
path.join(queryStorageDir, initialInfo.id),
|
||||||
|
dbItem.databaseUri.fsPath,
|
||||||
|
hasMetadataFile,
|
||||||
|
packConfig.dbscheme,
|
||||||
|
initialInfo.quickEvalPosition,
|
||||||
|
metadata,
|
||||||
|
templates
|
||||||
|
);
|
||||||
|
await query.queryEvalInfo.createTimestampFile();
|
||||||
|
|
||||||
|
let upgradeDir: tmp.DirectoryResult | undefined;
|
||||||
|
try {
|
||||||
|
let upgradeQlo;
|
||||||
|
if (await cliServer.cliConstraints.supportsNonDestructiveUpgrades()) {
|
||||||
|
upgradeDir = await tmp.dir({ dir: upgradesTmpDir, unsafeCleanup: true });
|
||||||
|
upgradeQlo = await compileNonDestructiveUpgrade(qs, upgradeDir, query, qlProgram, dbItem, progress, token);
|
||||||
|
} else {
|
||||||
|
await checkDbschemeCompatibility(cliServer, qs, query, qlProgram, dbItem, progress, token);
|
||||||
|
}
|
||||||
|
let errors;
|
||||||
|
try {
|
||||||
|
errors = await query.compile(qs, qlProgram, progress, token);
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof ResponseError && e.code == ErrorCodes.RequestCancelled) {
|
||||||
|
return createSyntheticResult(query, 'Query cancelled');
|
||||||
|
} else {
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (errors.length === 0) {
|
||||||
|
const result = await query.run(qs, upgradeQlo, availableMlModels, dbItem, progress, token, queryInfo);
|
||||||
|
if (result.resultType !== messages.QueryResultType.SUCCESS) {
|
||||||
|
const message = result.message || 'Failed to run query';
|
||||||
|
void logger.log(message);
|
||||||
|
void showAndLogErrorMessage(message);
|
||||||
|
}
|
||||||
|
const message = formatLegacyMessage(result);
|
||||||
|
|
||||||
|
return {
|
||||||
|
query: query.queryEvalInfo,
|
||||||
|
message,
|
||||||
|
result,
|
||||||
|
successful: result.resultType == messages.QueryResultType.SUCCESS,
|
||||||
|
logFileLocation: result.logFileLocation,
|
||||||
|
dispose: () => {
|
||||||
|
qs.logger.removeAdditionalLogLocation(result.logFileLocation);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
// Error dialogs are limited in size and scrollability,
|
||||||
|
// so we include a general description of the problem,
|
||||||
|
// and direct the user to the output window for the detailed compilation messages.
|
||||||
|
// However we don't show quick eval errors there so we need to display them anyway.
|
||||||
|
void qs.logger.log(
|
||||||
|
`Failed to compile query ${initialInfo.queryPath} against database scheme ${qlProgram.dbschemePath}:`,
|
||||||
|
{ additionalLogLocation: query.queryEvalInfo.logPath }
|
||||||
|
);
|
||||||
|
|
||||||
|
const formattedMessages: string[] = [];
|
||||||
|
|
||||||
|
for (const error of errors) {
|
||||||
|
const message = error.message || '[no error message available]';
|
||||||
|
const formatted = `ERROR: ${message} (${error.position.fileName}:${error.position.line}:${error.position.column}:${error.position.endLine}:${error.position.endColumn})`;
|
||||||
|
formattedMessages.push(formatted);
|
||||||
|
void qs.logger.log(formatted, { additionalLogLocation: query.queryEvalInfo.logPath });
|
||||||
|
}
|
||||||
|
if (initialInfo.isQuickEval && formattedMessages.length <= 2) {
|
||||||
|
// If there are more than 2 error messages, they will not be displayed well in a popup
|
||||||
|
// and will be trimmed by the function displaying the error popup. Accordingly, we only
|
||||||
|
// try to show the errors if there are 2 or less, otherwise we direct the user to the log.
|
||||||
|
void showAndLogErrorMessage('Quick evaluation compilation failed: ' + formattedMessages.join('\n'));
|
||||||
|
} else {
|
||||||
|
void showAndLogErrorMessage((initialInfo.isQuickEval ? 'Quick evaluation' : 'Query') + compilationFailedErrorTail);
|
||||||
|
}
|
||||||
|
return createSyntheticResult(query, 'Query had compilation errors');
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
await upgradeDir?.cleanup();
|
||||||
|
} catch (e) {
|
||||||
|
void qs.logger.log(
|
||||||
|
`Could not clean up the upgrades dir. Reason: ${getErrorMessage(e)}`,
|
||||||
|
{ additionalLogLocation: query.queryEvalInfo.logPath }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
const compilationFailedErrorTail = ' compilation failed. Please make sure there are no errors in the query, the database is up to date,' +
|
||||||
|
' and the query and database use the same target language. For more details on the error, go to View > Output,' +
|
||||||
|
' and choose CodeQL Query Server from the dropdown.';
|
||||||
|
|
||||||
|
export function formatLegacyMessage(result: messages.EvaluationResult) {
|
||||||
|
switch (result.resultType) {
|
||||||
|
case messages.QueryResultType.CANCELLATION:
|
||||||
|
return `cancelled after ${Math.round(result.evaluationTime / 1000)} seconds`;
|
||||||
|
case messages.QueryResultType.OOM:
|
||||||
|
return 'out of memory';
|
||||||
|
case messages.QueryResultType.SUCCESS:
|
||||||
|
return `finished in ${Math.round(result.evaluationTime / 1000)} seconds`;
|
||||||
|
case messages.QueryResultType.TIMEOUT:
|
||||||
|
return `timed out after ${Math.round(result.evaluationTime / 1000)} seconds`;
|
||||||
|
case messages.QueryResultType.OTHER_ERROR:
|
||||||
|
default:
|
||||||
|
return result.message ? `failed: ${result.message}` : 'failed';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a synthetic result for a query that failed to compile.
|
||||||
|
*/
|
||||||
|
function createSyntheticResult(
|
||||||
|
query: QueryInProgress,
|
||||||
|
message: string,
|
||||||
|
): QueryWithResults {
|
||||||
|
return {
|
||||||
|
query: query.queryEvalInfo,
|
||||||
|
message,
|
||||||
|
result: {
|
||||||
|
evaluationTime: 0,
|
||||||
|
queryId: 0,
|
||||||
|
resultType: messages.QueryResultType.OTHER_ERROR,
|
||||||
|
message,
|
||||||
|
runId: 0,
|
||||||
|
},
|
||||||
|
successful: false,
|
||||||
|
dispose: () => { /**/ },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function createSimpleTemplates(templates: Record<string, string> | undefined): messages.TemplateDefinitions | undefined {
|
||||||
|
if (!templates) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const result: messages.TemplateDefinitions = {};
|
||||||
|
for (const key of Object.keys(templates)) {
|
||||||
|
result[key] = {
|
||||||
|
values: {
|
||||||
|
tuples: [[{ stringValue: templates[key] }]]
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
@@ -1,14 +1,12 @@
|
|||||||
import * as vscode from 'vscode';
|
import * as vscode from 'vscode';
|
||||||
import { getOnDiskWorkspaceFolders, showAndLogErrorMessage } from './helpers';
|
import { getOnDiskWorkspaceFolders, showAndLogErrorMessage, tmpDir } from '../helpers';
|
||||||
import { ProgressCallback, UserCancellationException } from './commandRunner';
|
import { ProgressCallback, UserCancellationException } from '../commandRunner';
|
||||||
import { logger } from './logging';
|
import { logger } from '../logging';
|
||||||
import * as messages from './pure/messages';
|
import * as messages from '../pure/legacy-messages';
|
||||||
import * as qsClient from './queryserver-client';
|
import * as qsClient from './queryserver-client';
|
||||||
import { upgradesTmpDir } from './run-queries';
|
|
||||||
import * as tmp from 'tmp-promise';
|
import * as tmp from 'tmp-promise';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as semver from 'semver';
|
import { DatabaseItem } from '../databases';
|
||||||
import { DatabaseItem } from './databases';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Maximum number of lines to include from database upgrade message,
|
* Maximum number of lines to include from database upgrade message,
|
||||||
@@ -17,17 +15,6 @@ import { DatabaseItem } from './databases';
|
|||||||
*/
|
*/
|
||||||
const MAX_UPGRADE_MESSAGE_LINES = 10;
|
const MAX_UPGRADE_MESSAGE_LINES = 10;
|
||||||
|
|
||||||
/**
|
|
||||||
* Check that we support non-destructive upgrades.
|
|
||||||
*
|
|
||||||
* This requires 3 features. The ability to compile an upgrade sequence; The ability to
|
|
||||||
* run a non-destructive upgrades as a query; the ability to specify a target when
|
|
||||||
* resolving upgrades. We check for a version of codeql that has all three features.
|
|
||||||
*/
|
|
||||||
export async function hasNondestructiveUpgradeCapabilities(qs: qsClient.QueryServerClient): Promise<boolean> {
|
|
||||||
return semver.gte(await qs.cliServer.getVersion(), '2.4.2');
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Compile a database upgrade sequence.
|
* Compile a database upgrade sequence.
|
||||||
@@ -35,16 +22,16 @@ export async function hasNondestructiveUpgradeCapabilities(qs: qsClient.QuerySer
|
|||||||
*/
|
*/
|
||||||
export async function compileDatabaseUpgradeSequence(
|
export async function compileDatabaseUpgradeSequence(
|
||||||
qs: qsClient.QueryServerClient,
|
qs: qsClient.QueryServerClient,
|
||||||
db: DatabaseItem,
|
dbItem: DatabaseItem,
|
||||||
resolvedSequence: string[],
|
resolvedSequence: string[],
|
||||||
currentUpgradeTmp: tmp.DirectoryResult,
|
currentUpgradeTmp: tmp.DirectoryResult,
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: vscode.CancellationToken
|
token: vscode.CancellationToken
|
||||||
): Promise<messages.CompileUpgradeSequenceResult> {
|
): Promise<messages.CompileUpgradeSequenceResult> {
|
||||||
if (db.contents === undefined || db.contents.dbSchemeUri === undefined) {
|
if (dbItem.contents === undefined || dbItem.contents.dbSchemeUri === undefined) {
|
||||||
throw new Error('Database is invalid, and cannot be upgraded.');
|
throw new Error('Database is invalid, and cannot be upgraded.');
|
||||||
}
|
}
|
||||||
if (!await hasNondestructiveUpgradeCapabilities(qs)) {
|
if (!await qs.cliServer.cliConstraints.supportsNonDestructiveUpgrades()) {
|
||||||
throw new Error('The version of codeql is too old to run non-destructive upgrades.');
|
throw new Error('The version of codeql is too old to run non-destructive upgrades.');
|
||||||
}
|
}
|
||||||
// If possible just compile the upgrade sequence
|
// If possible just compile the upgrade sequence
|
||||||
@@ -56,14 +43,14 @@ export async function compileDatabaseUpgradeSequence(
|
|||||||
|
|
||||||
async function compileDatabaseUpgrade(
|
async function compileDatabaseUpgrade(
|
||||||
qs: qsClient.QueryServerClient,
|
qs: qsClient.QueryServerClient,
|
||||||
db: DatabaseItem,
|
dbItem: DatabaseItem,
|
||||||
targetDbScheme: string,
|
targetDbScheme: string,
|
||||||
resolvedSequence: string[],
|
resolvedSequence: string[],
|
||||||
currentUpgradeTmp: tmp.DirectoryResult,
|
currentUpgradeTmp: tmp.DirectoryResult,
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: vscode.CancellationToken
|
token: vscode.CancellationToken
|
||||||
): Promise<messages.CompileUpgradeResult> {
|
): Promise<messages.CompileUpgradeResult> {
|
||||||
if (!db.contents?.dbSchemeUri) {
|
if (!dbItem.contents?.dbSchemeUri) {
|
||||||
throw new Error('Database is invalid, and cannot be upgraded.');
|
throw new Error('Database is invalid, and cannot be upgraded.');
|
||||||
}
|
}
|
||||||
// We have the upgrades we want but compileUpgrade
|
// We have the upgrades we want but compileUpgrade
|
||||||
@@ -78,7 +65,7 @@ async function compileDatabaseUpgrade(
|
|||||||
});
|
});
|
||||||
return qs.sendRequest(messages.compileUpgrade, {
|
return qs.sendRequest(messages.compileUpgrade, {
|
||||||
upgrade: {
|
upgrade: {
|
||||||
fromDbscheme: db.contents.dbSchemeUri.fsPath,
|
fromDbscheme: dbItem.contents.dbSchemeUri.fsPath,
|
||||||
toDbscheme: targetDbScheme,
|
toDbscheme: targetDbScheme,
|
||||||
additionalUpgrades: Array.from(uniqueParentDirs)
|
additionalUpgrades: Array.from(uniqueParentDirs)
|
||||||
},
|
},
|
||||||
@@ -103,7 +90,7 @@ async function checkAndConfirmDatabaseUpgrade(
|
|||||||
descriptionMessage += `Would perform upgrade: ${script.description}\n`;
|
descriptionMessage += `Would perform upgrade: ${script.description}\n`;
|
||||||
descriptionMessage += `\t-> Compatibility: ${script.compatibility}\n`;
|
descriptionMessage += `\t-> Compatibility: ${script.compatibility}\n`;
|
||||||
}
|
}
|
||||||
logger.log(descriptionMessage);
|
void logger.log(descriptionMessage);
|
||||||
|
|
||||||
|
|
||||||
// If the quiet flag is set, do the upgrade without a popup.
|
// If the quiet flag is set, do the upgrade without a popup.
|
||||||
@@ -159,18 +146,18 @@ function getUpgradeDescriptions(compiled: messages.CompiledUpgrades): messages.U
|
|||||||
*/
|
*/
|
||||||
export async function upgradeDatabaseExplicit(
|
export async function upgradeDatabaseExplicit(
|
||||||
qs: qsClient.QueryServerClient,
|
qs: qsClient.QueryServerClient,
|
||||||
db: DatabaseItem,
|
dbItem: DatabaseItem,
|
||||||
progress: ProgressCallback,
|
progress: ProgressCallback,
|
||||||
token: vscode.CancellationToken,
|
token: vscode.CancellationToken,
|
||||||
): Promise<messages.RunUpgradeResult | undefined> {
|
): Promise<messages.RunUpgradeResult | undefined> {
|
||||||
|
|
||||||
const searchPath: string[] = getOnDiskWorkspaceFolders();
|
const searchPath: string[] = getOnDiskWorkspaceFolders();
|
||||||
|
|
||||||
if (!db?.contents?.dbSchemeUri) {
|
if (!dbItem?.contents?.dbSchemeUri) {
|
||||||
throw new Error('Database is invalid, and cannot be upgraded.');
|
throw new Error('Database is invalid, and cannot be upgraded.');
|
||||||
}
|
}
|
||||||
const upgradeInfo = await qs.cliServer.resolveUpgrades(
|
const upgradeInfo = await qs.cliServer.resolveUpgrades(
|
||||||
db.contents.dbSchemeUri.fsPath,
|
dbItem.contents.dbSchemeUri.fsPath,
|
||||||
searchPath,
|
searchPath,
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
@@ -180,42 +167,49 @@ export async function upgradeDatabaseExplicit(
|
|||||||
if (finalDbscheme === undefined) {
|
if (finalDbscheme === undefined) {
|
||||||
throw new Error('Could not determine target dbscheme to upgrade to.');
|
throw new Error('Could not determine target dbscheme to upgrade to.');
|
||||||
}
|
}
|
||||||
const currentUpgradeTmp = await tmp.dir({ dir: upgradesTmpDir.name, prefix: 'upgrade_', keep: false, unsafeCleanup: true });
|
const currentUpgradeTmp = await tmp.dir({ dir: tmpDir.name, prefix: 'upgrade_', keep: false, unsafeCleanup: true });
|
||||||
try {
|
try {
|
||||||
let compileUpgradeResult: messages.CompileUpgradeResult;
|
let compileUpgradeResult: messages.CompileUpgradeResult;
|
||||||
try {
|
try {
|
||||||
compileUpgradeResult = await compileDatabaseUpgrade(qs, db, finalDbscheme, scripts, currentUpgradeTmp, progress, token);
|
compileUpgradeResult = await compileDatabaseUpgrade(qs, dbItem, finalDbscheme, scripts, currentUpgradeTmp, progress, token);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
showAndLogErrorMessage(`Compilation of database upgrades failed: ${e}`);
|
void showAndLogErrorMessage(`Compilation of database upgrades failed: ${e}`);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
qs.logger.log('Done compiling database upgrade.');
|
void qs.logger.log('Done compiling database upgrade.');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!compileUpgradeResult.compiledUpgrades) {
|
if (!compileUpgradeResult.compiledUpgrades) {
|
||||||
const error = compileUpgradeResult.error || '[no error message available]';
|
const error = compileUpgradeResult.error || '[no error message available]';
|
||||||
showAndLogErrorMessage(`Compilation of database upgrades failed: ${error}`);
|
void showAndLogErrorMessage(`Compilation of database upgrades failed: ${error}`);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await checkAndConfirmDatabaseUpgrade(compileUpgradeResult.compiledUpgrades, db, qs.cliServer.quiet);
|
await checkAndConfirmDatabaseUpgrade(compileUpgradeResult.compiledUpgrades, dbItem, qs.cliServer.quiet);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
qs.logger.log('Running the following database upgrade:');
|
void qs.logger.log('Running the following database upgrade:');
|
||||||
|
|
||||||
getUpgradeDescriptions(compileUpgradeResult.compiledUpgrades).map(s => s.description).join('\n');
|
getUpgradeDescriptions(compileUpgradeResult.compiledUpgrades).map(s => s.description).join('\n');
|
||||||
return await runDatabaseUpgrade(qs, db, compileUpgradeResult.compiledUpgrades, progress, token);
|
const result = await runDatabaseUpgrade(qs, dbItem, compileUpgradeResult.compiledUpgrades, progress, token);
|
||||||
|
|
||||||
|
// TODO Can remove the next lines when https://github.com/github/codeql-team/issues/1241 is fixed
|
||||||
|
// restart the query server to avoid a bug in the CLI where the upgrade is applied, but the old dbscheme
|
||||||
|
// is still cached in memory.
|
||||||
|
|
||||||
|
await qs.restartQueryServer(progress, token);
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
showAndLogErrorMessage(`Database upgrade failed: ${e}`);
|
void showAndLogErrorMessage(`Database upgrade failed: ${e}`);
|
||||||
return;
|
return;
|
||||||
} finally {
|
} finally {
|
||||||
qs.logger.log('Done running database upgrade.');
|
void qs.logger.log('Done running database upgrade.');
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
currentUpgradeTmp.cleanup();
|
await currentUpgradeTmp.cleanup();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
462
extensions/ql-vscode/src/log-insights/join-order.ts
Normal file
@@ -0,0 +1,462 @@
|
|||||||
|
import * as I from 'immutable';
|
||||||
|
import { EvaluationLogProblemReporter, EvaluationLogScanner, EvaluationLogScannerProvider } from './log-scanner';
|
||||||
|
import { InLayer, ComputeRecursive, SummaryEvent, PipelineRun, ComputeSimple } from './log-summary';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Like `max`, but returns 0 if no meaningful maximum can be computed.
|
||||||
|
*/
|
||||||
|
function safeMax(it?: Iterable<number>) {
|
||||||
|
const m = Math.max(...(it || []));
|
||||||
|
return Number.isFinite(m) ? m : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute a key for the maps that that is sent to report generation.
|
||||||
|
* Should only be used on events that are known to define queryCausingWork.
|
||||||
|
*/
|
||||||
|
function makeKey(
|
||||||
|
queryCausingWork: string | undefined,
|
||||||
|
predicate: string,
|
||||||
|
suffix = ''
|
||||||
|
): string {
|
||||||
|
if (queryCausingWork === undefined) {
|
||||||
|
throw new Error(
|
||||||
|
'queryCausingWork was not defined on an event we expected it to be defined for!'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return `${queryCausingWork}:${predicate}${suffix ? ' ' + suffix : ''}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEPENDENT_PREDICATES_REGEXP = (() => {
|
||||||
|
const regexps = [
|
||||||
|
// SCAN id
|
||||||
|
String.raw`SCAN\s+([0-9a-zA-Z:#_]+)\s`,
|
||||||
|
// JOIN id WITH id
|
||||||
|
String.raw`JOIN\s+([0-9a-zA-Z:#_]+)\s+WITH\s+([0-9a-zA-Z:#_]+)\s`,
|
||||||
|
// AGGREGATE id, id
|
||||||
|
String.raw`AGGREGATE\s+([0-9a-zA-Z:#_]+)\s*,\s+([0-9a-zA-Z:#_]+)`,
|
||||||
|
// id AND NOT id
|
||||||
|
String.raw`([0-9a-zA-Z:#_]+)\s+AND\s+NOT\s+([0-9a-zA-Z:#_]+)`,
|
||||||
|
// INVOKE HIGHER-ORDER RELATION rel ON <id, ..., id>
|
||||||
|
String.raw`INVOKE\s+HIGHER-ORDER\s+RELATION\s[^\s]+\sON\s+<([0-9a-zA-Z:#_<>]+)((?:,[0-9a-zA-Z:#_<>]+)*)>`,
|
||||||
|
// SELECT id
|
||||||
|
String.raw`SELECT\s+([0-9a-zA-Z:#_]+)`
|
||||||
|
];
|
||||||
|
return new RegExp(
|
||||||
|
`${String.raw`\{[0-9]+\}\s+[0-9a-zA-Z]+\s=\s(?:` + regexps.join('|')})`
|
||||||
|
);
|
||||||
|
})();
|
||||||
|
|
||||||
|
function getDependentPredicates(operations: string[]): I.List<string> {
|
||||||
|
return I.List(operations).flatMap(operation => {
|
||||||
|
const matches = DEPENDENT_PREDICATES_REGEXP.exec(operation.trim());
|
||||||
|
if (matches !== null) {
|
||||||
|
return I.List(matches)
|
||||||
|
.rest() // Skip the first group as it's just the entire string
|
||||||
|
.filter(x => !!x && !x.match('r[0-9]+|PRIMITIVE')) // Only keep the references to predicates.
|
||||||
|
.flatMap(x => x.split(',')) // Group 2 in the INVOKE HIGHER_ORDER RELATION case is a comma-separated list of identifiers.
|
||||||
|
.filter(x => !!x); // Remove empty strings
|
||||||
|
} else {
|
||||||
|
return I.List();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function getMainHash(event: InLayer | ComputeRecursive): string {
|
||||||
|
switch (event.evaluationStrategy) {
|
||||||
|
case 'IN_LAYER':
|
||||||
|
return event.mainHash;
|
||||||
|
case 'COMPUTE_RECURSIVE':
|
||||||
|
return event.raHash;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sum arrays a and b element-wise. The shorter array is padded with 0s if the arrays are not the same length.
|
||||||
|
*/
|
||||||
|
function pointwiseSum(a: Int32Array, b: Int32Array, problemReporter: EvaluationLogProblemReporter): Int32Array {
|
||||||
|
function reportIfInconsistent(ai: number, bi: number) {
|
||||||
|
if (ai === -1 && bi !== -1) {
|
||||||
|
problemReporter.log(
|
||||||
|
`Operation was not evaluated in the first pipeline, but it was evaluated in the accumulated pipeline (with tuple count ${bi}).`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (ai !== -1 && bi === -1) {
|
||||||
|
problemReporter.log(
|
||||||
|
`Operation was evaluated in the first pipeline (with tuple count ${ai}), but it was not evaluated in the accumulated pipeline.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const length = Math.max(a.length, b.length);
|
||||||
|
const result = new Int32Array(length);
|
||||||
|
for (let i = 0; i < length; i++) {
|
||||||
|
const ai = a[i] || 0;
|
||||||
|
const bi = b[i] || 0;
|
||||||
|
// -1 is used to represent the absence of a tuple count for a line in the pretty-printed RA (e.g. an empty line), so we ignore those.
|
||||||
|
if (i < a.length && i < b.length && (ai === -1 || bi === -1)) {
|
||||||
|
result[i] = -1;
|
||||||
|
reportIfInconsistent(ai, bi);
|
||||||
|
} else {
|
||||||
|
result[i] = ai + bi;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
function pushValue<K, V>(m: Map<K, V[]>, k: K, v: V) {
|
||||||
|
if (!m.has(k)) {
|
||||||
|
m.set(k, []);
|
||||||
|
}
|
||||||
|
m.get(k)!.push(v);
|
||||||
|
return m;
|
||||||
|
}
|
||||||
|
|
||||||
|
function computeJoinOrderBadness(
|
||||||
|
maxTupleCount: number,
|
||||||
|
maxDependentPredicateSize: number,
|
||||||
|
resultSize: number
|
||||||
|
): number {
|
||||||
|
return maxTupleCount / Math.max(maxDependentPredicateSize, resultSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A bucket contains the pointwise sum of the tuple counts, result sizes and dependent predicate sizes
|
||||||
|
* For each (predicate, order) in an SCC, we will compute a bucket.
|
||||||
|
*/
|
||||||
|
interface Bucket {
|
||||||
|
tupleCounts: Int32Array;
|
||||||
|
resultSize: number;
|
||||||
|
dependentPredicateSizes: I.Map<string, number>;
|
||||||
|
}
|
||||||
|
|
||||||
|
class JoinOrderScanner implements EvaluationLogScanner {
|
||||||
|
// Map a predicate hash to its result size
|
||||||
|
private readonly predicateSizes = new Map<string, number>();
|
||||||
|
private readonly layerEvents = new Map<string, (ComputeRecursive | InLayer)[]>();
|
||||||
|
// Map a key of the form 'query-with-demand : predicate name' to its badness input.
|
||||||
|
private readonly maxTupleCountMap = new Map<string, number[]>();
|
||||||
|
private readonly resultSizeMap = new Map<string, number[]>();
|
||||||
|
private readonly maxDependentPredicateSizeMap = new Map<string, number[]>();
|
||||||
|
private readonly joinOrderMetricMap = new Map<string, number>();
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
private readonly problemReporter: EvaluationLogProblemReporter,
|
||||||
|
private readonly warningThreshold: number) {
|
||||||
|
}
|
||||||
|
|
||||||
|
public onEvent(event: SummaryEvent): void {
|
||||||
|
if (
|
||||||
|
event.completionType !== undefined &&
|
||||||
|
event.completionType !== 'SUCCESS'
|
||||||
|
) {
|
||||||
|
return; // Skip any evaluation that wasn't successful
|
||||||
|
}
|
||||||
|
|
||||||
|
this.recordPredicateSizes(event);
|
||||||
|
this.computeBadnessMetric(event);
|
||||||
|
}
|
||||||
|
|
||||||
|
public onDone(): void {
|
||||||
|
void this;
|
||||||
|
}
|
||||||
|
|
||||||
|
private recordPredicateSizes(event: SummaryEvent): void {
|
||||||
|
switch (event.evaluationStrategy) {
|
||||||
|
case 'EXTENSIONAL':
|
||||||
|
case 'COMPUTED_EXTENSIONAL':
|
||||||
|
case 'COMPUTE_SIMPLE':
|
||||||
|
case 'CACHACA':
|
||||||
|
case 'CACHE_HIT': {
|
||||||
|
this.predicateSizes.set(event.raHash, event.resultSize);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case 'SENTINEL_EMPTY': {
|
||||||
|
this.predicateSizes.set(event.raHash, 0);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case 'COMPUTE_RECURSIVE':
|
||||||
|
case 'IN_LAYER': {
|
||||||
|
this.predicateSizes.set(event.raHash, event.resultSize);
|
||||||
|
// layerEvents are indexed by the mainHash.
|
||||||
|
const hash = getMainHash(event);
|
||||||
|
if (!this.layerEvents.has(hash)) {
|
||||||
|
this.layerEvents.set(hash, []);
|
||||||
|
}
|
||||||
|
this.layerEvents.get(hash)!.push(event);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private reportProblemIfNecessary(event: SummaryEvent, iteration: number, metric: number): void {
|
||||||
|
if (metric >= this.warningThreshold) {
|
||||||
|
this.problemReporter.reportProblem(event.predicateName, event.raHash, iteration,
|
||||||
|
`Relation '${event.predicateName}' has an inefficient join order. Its join order metric is ${metric.toFixed(2)}, which is larger than the threshold of ${this.warningThreshold.toFixed(2)}.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private computeBadnessMetric(event: SummaryEvent): void {
|
||||||
|
if (
|
||||||
|
event.completionType !== undefined &&
|
||||||
|
event.completionType !== 'SUCCESS'
|
||||||
|
) {
|
||||||
|
return; // Skip any evaluation that wasn't successful
|
||||||
|
}
|
||||||
|
switch (event.evaluationStrategy) {
|
||||||
|
case 'COMPUTE_SIMPLE': {
|
||||||
|
if (!event.pipelineRuns) {
|
||||||
|
// skip if the optional pipelineRuns field is not present.
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
// Compute the badness metric for a non-recursive predicate. The metric in this case is defined as:
|
||||||
|
// badness = (max tuple count in the pipeline) / (largest predicate this pipeline depends on)
|
||||||
|
const key = makeKey(event.queryCausingWork, event.predicateName);
|
||||||
|
const resultSize = event.resultSize;
|
||||||
|
|
||||||
|
// There is only one entry in `pipelineRuns` if it's a non-recursive predicate.
|
||||||
|
const { maxTupleCount, maxDependentPredicateSize } =
|
||||||
|
this.badnessInputsForNonRecursiveDelta(event.pipelineRuns[0], event);
|
||||||
|
|
||||||
|
if (maxDependentPredicateSize > 0) {
|
||||||
|
pushValue(this.maxTupleCountMap, key, maxTupleCount);
|
||||||
|
pushValue(this.resultSizeMap, key, resultSize);
|
||||||
|
pushValue(
|
||||||
|
this.maxDependentPredicateSizeMap,
|
||||||
|
key,
|
||||||
|
maxDependentPredicateSize
|
||||||
|
);
|
||||||
|
const metric = computeJoinOrderBadness(maxTupleCount, maxDependentPredicateSize, resultSize!);
|
||||||
|
this.joinOrderMetricMap.set(key, metric);
|
||||||
|
this.reportProblemIfNecessary(event, 0, metric);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'COMPUTE_RECURSIVE': {
|
||||||
|
// Compute the badness metric for a recursive predicate for each ordering.
|
||||||
|
const sccMetricInput = this.badnessInputsForRecursiveDelta(event);
|
||||||
|
// Loop through each predicate in the SCC
|
||||||
|
sccMetricInput.forEach((buckets, predicate) => {
|
||||||
|
// Loop through each ordering of the predicate
|
||||||
|
buckets.forEach((bucket, raReference) => {
|
||||||
|
// Format the key as demanding-query:name (ordering)
|
||||||
|
const key = makeKey(
|
||||||
|
event.queryCausingWork,
|
||||||
|
predicate,
|
||||||
|
`(${raReference})`
|
||||||
|
);
|
||||||
|
const maxTupleCount = Math.max(...bucket.tupleCounts);
|
||||||
|
const resultSize = bucket.resultSize;
|
||||||
|
const maxDependentPredicateSize = Math.max(
|
||||||
|
...bucket.dependentPredicateSizes.values()
|
||||||
|
);
|
||||||
|
|
||||||
|
if (maxDependentPredicateSize > 0) {
|
||||||
|
pushValue(this.maxTupleCountMap, key, maxTupleCount);
|
||||||
|
pushValue(this.resultSizeMap, key, resultSize);
|
||||||
|
pushValue(
|
||||||
|
this.maxDependentPredicateSizeMap,
|
||||||
|
key,
|
||||||
|
maxDependentPredicateSize
|
||||||
|
);
|
||||||
|
const metric = computeJoinOrderBadness(maxTupleCount, maxDependentPredicateSize, resultSize);
|
||||||
|
const oldMetric = this.joinOrderMetricMap.get(key);
|
||||||
|
if ((oldMetric === undefined) || (metric > oldMetric)) {
|
||||||
|
this.joinOrderMetricMap.set(key, metric);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterate through an SCC with main node `event`.
|
||||||
|
*/
|
||||||
|
private iterateSCC(
|
||||||
|
event: ComputeRecursive,
|
||||||
|
func: (
|
||||||
|
inLayerEvent: ComputeRecursive | InLayer,
|
||||||
|
run: PipelineRun,
|
||||||
|
iteration: number
|
||||||
|
) => void
|
||||||
|
): void {
|
||||||
|
const sccEvents = this.layerEvents.get(event.raHash)!;
|
||||||
|
const nextPipeline: number[] = new Array(sccEvents.length).fill(0);
|
||||||
|
|
||||||
|
const maxIteration = Math.max(
|
||||||
|
...sccEvents.map(e => e.predicateIterationMillis.length)
|
||||||
|
);
|
||||||
|
|
||||||
|
for (let iteration = 0; iteration < maxIteration; ++iteration) {
|
||||||
|
// Loop through each predicate in this iteration
|
||||||
|
for (let predicate = 0; predicate < sccEvents.length; ++predicate) {
|
||||||
|
const inLayerEvent = sccEvents[predicate];
|
||||||
|
const iterationTime =
|
||||||
|
inLayerEvent.predicateIterationMillis.length <= iteration
|
||||||
|
? -1
|
||||||
|
: inLayerEvent.predicateIterationMillis[iteration];
|
||||||
|
if (iterationTime != -1) {
|
||||||
|
const run: PipelineRun =
|
||||||
|
inLayerEvent.pipelineRuns[nextPipeline[predicate]++];
|
||||||
|
func(inLayerEvent, run, iteration);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute the maximum tuple count and maximum dependent predicate size for a non-recursive pipeline
|
||||||
|
*/
|
||||||
|
private badnessInputsForNonRecursiveDelta(
|
||||||
|
pipelineRun: PipelineRun,
|
||||||
|
event: ComputeSimple
|
||||||
|
): { maxTupleCount: number; maxDependentPredicateSize: number } {
|
||||||
|
const dependentPredicateSizes = Object.values(event.dependencies).map(hash =>
|
||||||
|
this.predicateSizes.get(hash) ?? 0 // Should always be present, but zero is a safe default.
|
||||||
|
);
|
||||||
|
const maxDependentPredicateSize = safeMax(dependentPredicateSizes);
|
||||||
|
return {
|
||||||
|
maxTupleCount: safeMax(pipelineRun.counts),
|
||||||
|
maxDependentPredicateSize: maxDependentPredicateSize
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private prevDeltaSizes(event: ComputeRecursive, predicate: string, i: number) {
|
||||||
|
// If an iteration isn't present in the map it means it was skipped because the optimizer
|
||||||
|
// inferred that it was empty. So its size is 0.
|
||||||
|
return this.curDeltaSizes(event, predicate, i - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
private curDeltaSizes(event: ComputeRecursive, predicate: string, i: number) {
|
||||||
|
// If an iteration isn't present in the map it means it was skipped because the optimizer
|
||||||
|
// inferred that it was empty. So its size is 0.
|
||||||
|
return (
|
||||||
|
this.layerEvents.get(event.raHash)?.find(x => x.predicateName === predicate)?.deltaSizes[i] ?? 0
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute the metric dependent predicate sizes and the result size for a predicate in an SCC.
|
||||||
|
*/
|
||||||
|
private badnessInputsForLayer(
|
||||||
|
event: ComputeRecursive,
|
||||||
|
inLayerEvent: InLayer | ComputeRecursive,
|
||||||
|
raReference: string,
|
||||||
|
iteration: number
|
||||||
|
) {
|
||||||
|
const dependentPredicates = getDependentPredicates(
|
||||||
|
inLayerEvent.ra[raReference]
|
||||||
|
);
|
||||||
|
let dependentPredicateSizes: I.Map<string, number>;
|
||||||
|
// We treat the base case as a non-recursive pipeline. In that case, the dependent predicates are
|
||||||
|
// the dependencies of the base case and the cur_deltas.
|
||||||
|
if (raReference === 'base') {
|
||||||
|
dependentPredicateSizes = I.Map(
|
||||||
|
dependentPredicates.map((pred): [string, number] => {
|
||||||
|
// A base case cannot contain a `prev_delta`, but it can contain a `cur_delta`.
|
||||||
|
let size = 0;
|
||||||
|
if (pred.endsWith('#cur_delta')) {
|
||||||
|
size = this.curDeltaSizes(
|
||||||
|
event,
|
||||||
|
pred.slice(0, -'#cur_delta'.length),
|
||||||
|
iteration
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const hash = event.dependencies[pred];
|
||||||
|
size = this.predicateSizes.get(hash)!;
|
||||||
|
}
|
||||||
|
return [pred, size];
|
||||||
|
})
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
// It's a non-base case in a recursive pipeline. In that case, the dependent predicates are
|
||||||
|
// only the prev_deltas.
|
||||||
|
dependentPredicateSizes = I.Map(
|
||||||
|
dependentPredicates
|
||||||
|
.flatMap(pred => {
|
||||||
|
// If it's actually a prev_delta
|
||||||
|
if (pred.endsWith('#prev_delta')) {
|
||||||
|
// Return the predicate without the #prev_delta suffix.
|
||||||
|
return [pred.slice(0, -'#prev_delta'.length)];
|
||||||
|
} else {
|
||||||
|
// Not a recursive delta. Skip it.
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map((prev): [string, number] => {
|
||||||
|
const size = this.prevDeltaSizes(event, prev, iteration);
|
||||||
|
return [prev, size];
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const deltaSize = inLayerEvent.deltaSizes[iteration];
|
||||||
|
return { dependentPredicateSizes, deltaSize };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute the metric input for all the events in a SCC that starts with main node `event`
|
||||||
|
*/
|
||||||
|
private badnessInputsForRecursiveDelta(event: ComputeRecursive): Map<string, Map<string, Bucket>> {
|
||||||
|
// nameToOrderToBucket : predicate name -> ordering (i.e., standard, order_500000, etc.) -> bucket
|
||||||
|
const nameToOrderToBucket = new Map<string, Map<string, Bucket>>();
|
||||||
|
|
||||||
|
// Iterate through the SCC and compute the metric inputs
|
||||||
|
this.iterateSCC(event, (inLayerEvent, run, iteration) => {
|
||||||
|
const raReference = run.raReference;
|
||||||
|
const predicateName = inLayerEvent.predicateName;
|
||||||
|
if (!nameToOrderToBucket.has(predicateName)) {
|
||||||
|
nameToOrderToBucket.set(predicateName, new Map());
|
||||||
|
}
|
||||||
|
const orderTobucket = nameToOrderToBucket.get(predicateName)!;
|
||||||
|
if (!orderTobucket.has(raReference)) {
|
||||||
|
orderTobucket.set(raReference, {
|
||||||
|
tupleCounts: new Int32Array(0),
|
||||||
|
resultSize: 0,
|
||||||
|
dependentPredicateSizes: I.Map()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const { dependentPredicateSizes, deltaSize } = this.badnessInputsForLayer(
|
||||||
|
event,
|
||||||
|
inLayerEvent,
|
||||||
|
raReference,
|
||||||
|
iteration
|
||||||
|
);
|
||||||
|
|
||||||
|
const bucket = orderTobucket.get(raReference)!;
|
||||||
|
// Pointwise sum the tuple counts
|
||||||
|
const newTupleCounts = pointwiseSum(
|
||||||
|
bucket.tupleCounts,
|
||||||
|
new Int32Array(run.counts),
|
||||||
|
this.problemReporter
|
||||||
|
);
|
||||||
|
const resultSize = bucket.resultSize + deltaSize;
|
||||||
|
// Pointwise sum the deltas.
|
||||||
|
const newDependentPredicateSizes = bucket.dependentPredicateSizes.mergeWith(
|
||||||
|
(oldSize, newSize) => oldSize + newSize,
|
||||||
|
dependentPredicateSizes
|
||||||
|
);
|
||||||
|
orderTobucket.set(raReference, {
|
||||||
|
tupleCounts: newTupleCounts,
|
||||||
|
resultSize: resultSize,
|
||||||
|
dependentPredicateSizes: newDependentPredicateSizes
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return nameToOrderToBucket;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class JoinOrderScannerProvider implements EvaluationLogScannerProvider {
|
||||||
|
constructor(private readonly getThreshdold: () => number) {
|
||||||
|
}
|
||||||
|
|
||||||
|
public createScanner(problemReporter: EvaluationLogProblemReporter): EvaluationLogScanner {
|
||||||
|
const threshold = this.getThreshdold();
|
||||||
|
return new JoinOrderScanner(problemReporter, threshold);
|
||||||
|
}
|
||||||
|
}
|
||||||
23
extensions/ql-vscode/src/log-insights/jsonl-reader.ts
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import * as fs from 'fs-extra';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read a file consisting of multiple JSON objects. Each object is separated from the previous one
|
||||||
|
* by a double newline sequence. This is basically a more human-readable form of JSONL.
|
||||||
|
*
|
||||||
|
* The current implementation reads the entire text of the document into memory, but in the future
|
||||||
|
* it will stream the document to improve the performance with large documents.
|
||||||
|
*
|
||||||
|
* @param path The path to the file.
|
||||||
|
* @param handler Callback to be invoked for each top-level JSON object in order.
|
||||||
|
*/
|
||||||
|
export async function readJsonlFile(path: string, handler: (value: any) => Promise<void>): Promise<void> {
|
||||||
|
const logSummary = await fs.readFile(path, 'utf-8');
|
||||||
|
|
||||||
|
// Remove newline delimiters because summary is in .jsonl format.
|
||||||
|
const jsonSummaryObjects: string[] = logSummary.split(/\r?\n\r?\n/g);
|
||||||
|
|
||||||
|
for (const obj of jsonSummaryObjects) {
|
||||||
|
const jsonObj = JSON.parse(obj);
|
||||||
|
await handler(jsonObj);
|
||||||
|
}
|
||||||
|
}
|
||||||
109
extensions/ql-vscode/src/log-insights/log-scanner-service.ts
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
import { Diagnostic, DiagnosticSeverity, languages, Range, Uri } from 'vscode';
|
||||||
|
import { DisposableObject } from '../pure/disposable-object';
|
||||||
|
import { QueryHistoryManager } from '../query-history';
|
||||||
|
import { QueryHistoryInfo } from '../query-history-info';
|
||||||
|
import { EvaluationLogProblemReporter, EvaluationLogScannerSet } from './log-scanner';
|
||||||
|
import { PipelineInfo, SummarySymbols } from './summary-parser';
|
||||||
|
import * as fs from 'fs-extra';
|
||||||
|
import { logger } from '../logging';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute the key used to find a predicate in the summary symbols.
|
||||||
|
* @param name The name of the predicate.
|
||||||
|
* @param raHash The RA hash of the predicate.
|
||||||
|
* @returns The key of the predicate, consisting of `name@shortHash`, where `shortHash` is the first
|
||||||
|
* eight characters of `raHash`.
|
||||||
|
*/
|
||||||
|
function predicateSymbolKey(name: string, raHash: string): string {
|
||||||
|
return `${name}@${raHash.substring(0, 8)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implementation of `EvaluationLogProblemReporter` that generates `Diagnostic` objects to display
|
||||||
|
* in the VS Code "Problems" view.
|
||||||
|
*/
|
||||||
|
class ProblemReporter implements EvaluationLogProblemReporter {
|
||||||
|
public readonly diagnostics: Diagnostic[] = [];
|
||||||
|
|
||||||
|
constructor(private readonly symbols: SummarySymbols | undefined) {
|
||||||
|
}
|
||||||
|
|
||||||
|
public reportProblem(predicateName: string, raHash: string, iteration: number, message: string): void {
|
||||||
|
const nameWithHash = predicateSymbolKey(predicateName, raHash);
|
||||||
|
const predicateSymbol = this.symbols?.predicates[nameWithHash];
|
||||||
|
let predicateInfo: PipelineInfo | undefined = undefined;
|
||||||
|
if (predicateSymbol !== undefined) {
|
||||||
|
predicateInfo = predicateSymbol.iterations[iteration];
|
||||||
|
}
|
||||||
|
if (predicateInfo !== undefined) {
|
||||||
|
const range = new Range(predicateInfo.raStartLine, 0, predicateInfo.raEndLine + 1, 0);
|
||||||
|
this.diagnostics.push(new Diagnostic(range, message, DiagnosticSeverity.Error));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public log(message: string): void {
|
||||||
|
void logger.log(message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class LogScannerService extends DisposableObject {
|
||||||
|
public readonly scanners = new EvaluationLogScannerSet();
|
||||||
|
private readonly diagnosticCollection = this.push(languages.createDiagnosticCollection('ql-eval-log'));
|
||||||
|
private currentItem: QueryHistoryInfo | undefined = undefined;
|
||||||
|
|
||||||
|
constructor(qhm: QueryHistoryManager) {
|
||||||
|
super();
|
||||||
|
|
||||||
|
this.push(qhm.onDidChangeCurrentQueryItem(async (item) => {
|
||||||
|
if (item !== this.currentItem) {
|
||||||
|
this.currentItem = item;
|
||||||
|
await this.scanEvalLog(item);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
this.push(qhm.onDidCompleteQuery(async (item) => {
|
||||||
|
if (item === this.currentItem) {
|
||||||
|
await this.scanEvalLog(item);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scan the evaluation log for a query, and report any diagnostics.
|
||||||
|
*
|
||||||
|
* @param query The query whose log is to be scanned.
|
||||||
|
*/
|
||||||
|
public async scanEvalLog(
|
||||||
|
query: QueryHistoryInfo | undefined
|
||||||
|
): Promise<void> {
|
||||||
|
this.diagnosticCollection.clear();
|
||||||
|
|
||||||
|
if ((query?.t !== 'local')
|
||||||
|
|| (query.evalLogSummaryLocation === undefined)
|
||||||
|
|| (query.jsonEvalLogSummaryLocation === undefined)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const diagnostics = await this.scanLog(query.jsonEvalLogSummaryLocation, query.evalLogSummarySymbolsLocation);
|
||||||
|
const uri = Uri.file(query.evalLogSummaryLocation);
|
||||||
|
this.diagnosticCollection.set(uri, diagnostics);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scan the evaluator summary log for problems, using the scanners for all registered providers.
|
||||||
|
* @param jsonSummaryLocation The file path of the JSON summary log.
|
||||||
|
* @param symbolsLocation The file path of the symbols file for the human-readable log summary.
|
||||||
|
* @returns An array of `Diagnostic`s representing the problems found by scanners.
|
||||||
|
*/
|
||||||
|
private async scanLog(jsonSummaryLocation: string, symbolsLocation: string | undefined): Promise<Diagnostic[]> {
|
||||||
|
let symbols: SummarySymbols | undefined = undefined;
|
||||||
|
if (symbolsLocation !== undefined) {
|
||||||
|
symbols = JSON.parse(await fs.readFile(symbolsLocation, { encoding: 'utf-8' }));
|
||||||
|
}
|
||||||
|
const problemReporter = new ProblemReporter(symbols);
|
||||||
|
|
||||||
|
await this.scanners.scanLog(jsonSummaryLocation, problemReporter);
|
||||||
|
|
||||||
|
return problemReporter.diagnostics;
|
||||||
|
}
|
||||||
|
}
|
||||||
103
extensions/ql-vscode/src/log-insights/log-scanner.ts
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
import { SummaryEvent } from './log-summary';
|
||||||
|
import { readJsonlFile } from './jsonl-reader';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Callback interface used to report diagnostics from a log scanner.
|
||||||
|
*/
|
||||||
|
export interface EvaluationLogProblemReporter {
|
||||||
|
/**
|
||||||
|
* Report a potential problem detected in the evaluation log.
|
||||||
|
*
|
||||||
|
* @param predicateName The mangled name of the predicate with the problem.
|
||||||
|
* @param raHash The RA hash of the predicate with the problem.
|
||||||
|
* @param iteration The iteration number with the problem. For a non-recursive predicate, this
|
||||||
|
* must be zero.
|
||||||
|
* @param message The problem message.
|
||||||
|
*/
|
||||||
|
reportProblem(predicateName: string, raHash: string, iteration: number, message: string): void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log a message about a problem in the implementation of the scanner. These will typically be
|
||||||
|
* displayed separate from any problems reported via `reportProblem()`.
|
||||||
|
*/
|
||||||
|
log(message: string): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface implemented by a log scanner. Instances are created via
|
||||||
|
* `EvaluationLogScannerProvider.createScanner()`.
|
||||||
|
*/
|
||||||
|
export interface EvaluationLogScanner {
|
||||||
|
/**
|
||||||
|
* Called for each event in the log summary, in order. The implementation can report problems via
|
||||||
|
* the `EvaluationLogProblemReporter` interface that was supplied to `createScanner()`.
|
||||||
|
* @param event The log summary event.
|
||||||
|
*/
|
||||||
|
onEvent(event: SummaryEvent): void;
|
||||||
|
/**
|
||||||
|
* Called after all events in the log summary have been processed. The implementation can report
|
||||||
|
* problems via the `EvaluationLogProblemReporter` interface that was supplied to
|
||||||
|
* `createScanner()`.
|
||||||
|
*/
|
||||||
|
onDone(): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A factory for log scanners. When a log is to be scanned, all registered
|
||||||
|
* `EvaluationLogScannerProviders` will be asked to create a new instance of `EvaluationLogScanner`
|
||||||
|
* to do the scanning.
|
||||||
|
*/
|
||||||
|
export interface EvaluationLogScannerProvider {
|
||||||
|
/**
|
||||||
|
* Create a new instance of `EvaluationLogScanner` to scan a single summary log.
|
||||||
|
* @param problemReporter Callback interface for reporting any problems discovered.
|
||||||
|
*/
|
||||||
|
createScanner(problemReporter: EvaluationLogProblemReporter): EvaluationLogScanner;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Same as VSCode's `Disposable`, but avoids a dependency on VS Code.
|
||||||
|
*/
|
||||||
|
export interface Disposable {
|
||||||
|
dispose(): void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class EvaluationLogScannerSet {
|
||||||
|
private readonly scannerProviders = new Map<number, EvaluationLogScannerProvider>();
|
||||||
|
private nextScannerProviderId = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a provider that can create instances of `EvaluationLogScanner` to scan evaluation logs
|
||||||
|
* for problems.
|
||||||
|
* @param provider The provider.
|
||||||
|
* @returns A `Disposable` that, when disposed, will unregister the provider.
|
||||||
|
*/
|
||||||
|
public registerLogScannerProvider(provider: EvaluationLogScannerProvider): Disposable {
|
||||||
|
const id = this.nextScannerProviderId;
|
||||||
|
this.nextScannerProviderId++;
|
||||||
|
|
||||||
|
this.scannerProviders.set(id, provider);
|
||||||
|
return {
|
||||||
|
dispose: () => {
|
||||||
|
this.scannerProviders.delete(id);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scan the evaluator summary log for problems, using the scanners for all registered providers.
|
||||||
|
* @param jsonSummaryLocation The file path of the JSON summary log.
|
||||||
|
* @param problemReporter Callback interface for reporting any problems discovered.
|
||||||
|
*/
|
||||||
|
public async scanLog(jsonSummaryLocation: string, problemReporter: EvaluationLogProblemReporter): Promise<void> {
|
||||||
|
const scanners = [...this.scannerProviders.values()].map(p => p.createScanner(problemReporter));
|
||||||
|
|
||||||
|
await readJsonlFile(jsonSummaryLocation, async obj => {
|
||||||
|
scanners.forEach(scanner => {
|
||||||
|
scanner.onEvent(obj);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
scanners.forEach(scanner => scanner.onDone());
|
||||||
|
}
|
||||||
|
}
|
||||||
93
extensions/ql-vscode/src/log-insights/log-summary.ts
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
export interface PipelineRun {
|
||||||
|
raReference: string;
|
||||||
|
counts: number[];
|
||||||
|
duplicationPercentages: number[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Ra {
|
||||||
|
[key: string]: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export type EvaluationStrategy =
|
||||||
|
'COMPUTE_SIMPLE' |
|
||||||
|
'COMPUTE_RECURSIVE' |
|
||||||
|
'IN_LAYER' |
|
||||||
|
'COMPUTED_EXTENSIONAL' |
|
||||||
|
'EXTENSIONAL' |
|
||||||
|
'SENTINEL_EMPTY' |
|
||||||
|
'CACHACA' |
|
||||||
|
'CACHE_HIT';
|
||||||
|
|
||||||
|
interface SummaryEventBase {
|
||||||
|
evaluationStrategy: EvaluationStrategy;
|
||||||
|
predicateName: string;
|
||||||
|
raHash: string;
|
||||||
|
appearsAs: { [key: string]: { [key: string]: number[] } };
|
||||||
|
completionType?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ResultEventBase extends SummaryEventBase {
|
||||||
|
resultSize: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ComputeSimple extends ResultEventBase {
|
||||||
|
evaluationStrategy: 'COMPUTE_SIMPLE';
|
||||||
|
ra: Ra;
|
||||||
|
pipelineRuns?: [PipelineRun];
|
||||||
|
queryCausingWork?: string;
|
||||||
|
dependencies: { [key: string]: string };
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ComputeRecursive extends ResultEventBase {
|
||||||
|
evaluationStrategy: 'COMPUTE_RECURSIVE';
|
||||||
|
deltaSizes: number[];
|
||||||
|
ra: Ra;
|
||||||
|
pipelineRuns: PipelineRun[];
|
||||||
|
queryCausingWork?: string;
|
||||||
|
dependencies: { [key: string]: string };
|
||||||
|
predicateIterationMillis: number[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface InLayer extends ResultEventBase {
|
||||||
|
evaluationStrategy: 'IN_LAYER';
|
||||||
|
deltaSizes: number[];
|
||||||
|
ra: Ra;
|
||||||
|
pipelineRuns: PipelineRun[];
|
||||||
|
queryCausingWork?: string;
|
||||||
|
mainHash: string;
|
||||||
|
predicateIterationMillis: number[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ComputedExtensional extends ResultEventBase {
|
||||||
|
evaluationStrategy: 'COMPUTED_EXTENSIONAL';
|
||||||
|
queryCausingWork?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NonComputedExtensional extends ResultEventBase {
|
||||||
|
evaluationStrategy: 'EXTENSIONAL';
|
||||||
|
queryCausingWork?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SentinelEmpty extends SummaryEventBase {
|
||||||
|
evaluationStrategy: 'SENTINEL_EMPTY';
|
||||||
|
sentinelRaHash: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Cachaca extends ResultEventBase {
|
||||||
|
evaluationStrategy: 'CACHACA';
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CacheHit extends ResultEventBase {
|
||||||
|
evaluationStrategy: 'CACHE_HIT';
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Extensional = ComputedExtensional | NonComputedExtensional;
|
||||||
|
|
||||||
|
export type SummaryEvent =
|
||||||
|
| ComputeSimple
|
||||||
|
| ComputeRecursive
|
||||||
|
| InLayer
|
||||||
|
| Extensional
|
||||||
|
| SentinelEmpty
|
||||||
|
| Cachaca
|
||||||
|
| CacheHit;
|
||||||
@@ -0,0 +1,154 @@
|
|||||||
|
import * as fs from 'fs-extra';
|
||||||
|
import { RawSourceMap, SourceMapConsumer } from 'source-map';
|
||||||
|
import { commands, Position, Selection, TextDocument, TextEditor, TextEditorRevealType, TextEditorSelectionChangeEvent, ViewColumn, window, workspace } from 'vscode';
|
||||||
|
import { DisposableObject } from '../pure/disposable-object';
|
||||||
|
import { commandRunner } from '../commandRunner';
|
||||||
|
import { logger } from '../logging';
|
||||||
|
import { getErrorMessage } from '../pure/helpers-pure';
|
||||||
|
|
||||||
|
/** A `Position` within a specified file on disk. */
|
||||||
|
interface PositionInFile {
|
||||||
|
filePath: string;
|
||||||
|
position: Position;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Opens the specified source location in a text editor.
|
||||||
|
* @param position The position (including file path) to show.
|
||||||
|
*/
|
||||||
|
async function showSourceLocation(position: PositionInFile): Promise<void> {
|
||||||
|
const document = await workspace.openTextDocument(position.filePath);
|
||||||
|
const editor = await window.showTextDocument(document, ViewColumn.Active);
|
||||||
|
editor.selection = new Selection(position.position, position.position);
|
||||||
|
editor.revealRange(editor.selection, TextEditorRevealType.InCenterIfOutsideViewport);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simple language support for human-readable evaluator log summaries.
|
||||||
|
*
|
||||||
|
* This class implements the `codeQL.gotoQL` command, which jumps from RA code to the corresponding
|
||||||
|
* QL code that generated it. It also tracks the current selection and active editor to enable and
|
||||||
|
* disable that command based on whether there is a QL mapping for the current selection.
|
||||||
|
*/
|
||||||
|
export class SummaryLanguageSupport extends DisposableObject {
|
||||||
|
/**
|
||||||
|
* The last `TextDocument` (with language `ql-summary`) for which we tried to find a sourcemap, or
|
||||||
|
* `undefined` if we have not seen such a document yet.
|
||||||
|
*/
|
||||||
|
private lastDocument: TextDocument | undefined = undefined;
|
||||||
|
/**
|
||||||
|
* The sourcemap for `lastDocument`, or `undefined` if there was no such sourcemap or document.
|
||||||
|
*/
|
||||||
|
private sourceMap: SourceMapConsumer | undefined = undefined;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
|
||||||
|
this.push(window.onDidChangeActiveTextEditor(this.handleDidChangeActiveTextEditor));
|
||||||
|
this.push(window.onDidChangeTextEditorSelection(this.handleDidChangeTextEditorSelection));
|
||||||
|
this.push(workspace.onDidCloseTextDocument(this.handleDidCloseTextDocument));
|
||||||
|
|
||||||
|
this.push(commandRunner('codeQL.gotoQL', this.handleGotoQL));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the location of the QL code that generated the RA at the current selection in the active
|
||||||
|
* editor, or `undefined` if there is no mapping.
|
||||||
|
*/
|
||||||
|
private async getQLSourceLocation(): Promise<PositionInFile | undefined> {
|
||||||
|
const editor = window.activeTextEditor;
|
||||||
|
if (editor === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const document = editor.document;
|
||||||
|
if (document.languageId !== 'ql-summary') {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (document.uri.scheme !== 'file') {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.lastDocument !== document) {
|
||||||
|
this.clearCache();
|
||||||
|
|
||||||
|
const mapPath = document.uri.fsPath + '.map';
|
||||||
|
|
||||||
|
try {
|
||||||
|
const sourceMapText = await fs.readFile(mapPath, 'utf-8');
|
||||||
|
const rawMap: RawSourceMap = JSON.parse(sourceMapText);
|
||||||
|
this.sourceMap = await new SourceMapConsumer(rawMap);
|
||||||
|
} catch (e: unknown) {
|
||||||
|
// Error reading sourcemap. Pretend there was no sourcemap.
|
||||||
|
void logger.log(`Error reading sourcemap file '${mapPath}': ${getErrorMessage(e)}`);
|
||||||
|
this.sourceMap = undefined;
|
||||||
|
}
|
||||||
|
this.lastDocument = document;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.sourceMap === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const qlPosition = this.sourceMap.originalPositionFor({
|
||||||
|
line: editor.selection.start.line + 1,
|
||||||
|
column: editor.selection.start.character,
|
||||||
|
bias: SourceMapConsumer.GREATEST_LOWER_BOUND
|
||||||
|
});
|
||||||
|
|
||||||
|
if ((qlPosition.source === null) || (qlPosition.line === null)) {
|
||||||
|
// No position found.
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const line = qlPosition.line - 1; // In `source-map`, lines are 1-based...
|
||||||
|
const column = qlPosition.column ?? 0; // ...but columns are 0-based :(
|
||||||
|
|
||||||
|
return {
|
||||||
|
filePath: qlPosition.source,
|
||||||
|
position: new Position(line, column)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clears the cached sourcemap and its corresponding `TextDocument`.
|
||||||
|
*/
|
||||||
|
private clearCache(): void {
|
||||||
|
if (this.sourceMap !== undefined) {
|
||||||
|
this.sourceMap.destroy();
|
||||||
|
this.sourceMap = undefined;
|
||||||
|
this.lastDocument = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates the `codeql.hasQLSource` context variable based on the current selection. This variable
|
||||||
|
* controls whether or not the `codeQL.gotoQL` command is enabled.
|
||||||
|
*/
|
||||||
|
private async updateContext(): Promise<void> {
|
||||||
|
const position = await this.getQLSourceLocation();
|
||||||
|
|
||||||
|
await commands.executeCommand('setContext', 'codeql.hasQLSource', position !== undefined);
|
||||||
|
}
|
||||||
|
|
||||||
|
handleDidChangeActiveTextEditor = async (_editor: TextEditor | undefined): Promise<void> => {
|
||||||
|
await this.updateContext();
|
||||||
|
}
|
||||||
|
|
||||||
|
handleDidChangeTextEditorSelection = async (_e: TextEditorSelectionChangeEvent): Promise<void> => {
|
||||||
|
await this.updateContext();
|
||||||
|
}
|
||||||
|
|
||||||
|
handleDidCloseTextDocument = (document: TextDocument): void => {
|
||||||
|
if (this.lastDocument === document) {
|
||||||
|
this.clearCache();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
handleGotoQL = async (): Promise<void> => {
|
||||||
|
const position = await this.getQLSourceLocation();
|
||||||
|
if (position !== undefined) {
|
||||||
|
await showSourceLocation(position);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
113
extensions/ql-vscode/src/log-insights/summary-parser.ts
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
import * as fs from 'fs-extra';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Location information for a single pipeline invocation in the RA.
|
||||||
|
*/
|
||||||
|
export interface PipelineInfo {
|
||||||
|
startLine: number;
|
||||||
|
raStartLine: number;
|
||||||
|
raEndLine: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Location information for a single predicate in the RA.
|
||||||
|
*/
|
||||||
|
export interface PredicateSymbol {
|
||||||
|
/**
|
||||||
|
* `PipelineInfo` for each iteration. A non-recursive predicate will have a single iteration `0`.
|
||||||
|
*/
|
||||||
|
iterations: Record<number, PipelineInfo>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Location information for the RA from an evaluation log. Line numbers point into the
|
||||||
|
* human-readable log summary.
|
||||||
|
*/
|
||||||
|
export interface SummarySymbols {
|
||||||
|
predicates: Record<string, PredicateSymbol>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tuple counts for Expr::Expr::getParent#dispred#f0820431#ff@76d6745o:
|
||||||
|
const NON_RECURSIVE_TUPLE_COUNT_REGEXP = /^Evaluated relational algebra for predicate (?<predicateName>\S+) with tuple counts:$/;
|
||||||
|
// Tuple counts for Expr::Expr::getEnclosingStmt#f0820431#bf@923ddwj9 on iteration 0 running pipeline base:
|
||||||
|
const RECURSIVE_TUPLE_COUNT_REGEXP = /^Evaluated relational algebra for predicate (?<predicateName>\S+) on iteration (?<iteration>\d+) running pipeline (?<pipeline>\S+) with tuple counts:$/;
|
||||||
|
const RETURN_REGEXP = /^\s*return /;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a human-readable evaluation log summary to find the location of the RA for each pipeline
|
||||||
|
* run.
|
||||||
|
*
|
||||||
|
* TODO: Once we're more certain about the symbol format, we should have the CLI generate this as it
|
||||||
|
* generates the human-readabe summary to avoid having to rely on regular expression matching of the
|
||||||
|
* human-readable text.
|
||||||
|
*
|
||||||
|
* @param summaryPath The path to the summary file.
|
||||||
|
* @param symbolsPath The path to the symbols file to generate.
|
||||||
|
*/
|
||||||
|
export async function generateSummarySymbolsFile(summaryPath: string, symbolsPath: string): Promise<void> {
|
||||||
|
const symbols = await generateSummarySymbols(summaryPath);
|
||||||
|
await fs.writeFile(symbolsPath, JSON.stringify(symbols));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a human-readable evaluation log summary to find the location of the RA for each pipeline
|
||||||
|
* run.
|
||||||
|
*
|
||||||
|
* @param fileLocation The path to the summary file.
|
||||||
|
* @returns Symbol information for the summary file.
|
||||||
|
*/
|
||||||
|
async function generateSummarySymbols(summaryPath: string): Promise<SummarySymbols> {
|
||||||
|
const summary = await fs.promises.readFile(summaryPath, { encoding: 'utf-8' });
|
||||||
|
const symbols: SummarySymbols = {
|
||||||
|
predicates: {}
|
||||||
|
};
|
||||||
|
|
||||||
|
const lines = summary.split(/\r?\n/);
|
||||||
|
let lineNumber = 0;
|
||||||
|
while (lineNumber < lines.length) {
|
||||||
|
const startLineNumber = lineNumber;
|
||||||
|
lineNumber++;
|
||||||
|
const startLine = lines[startLineNumber];
|
||||||
|
const nonRecursiveMatch = startLine.match(NON_RECURSIVE_TUPLE_COUNT_REGEXP);
|
||||||
|
let predicateName: string | undefined = undefined;
|
||||||
|
let iteration = 0;
|
||||||
|
if (nonRecursiveMatch) {
|
||||||
|
predicateName = nonRecursiveMatch.groups!.predicateName;
|
||||||
|
} else {
|
||||||
|
const recursiveMatch = startLine.match(RECURSIVE_TUPLE_COUNT_REGEXP);
|
||||||
|
if (recursiveMatch?.groups) {
|
||||||
|
predicateName = recursiveMatch.groups.predicateName;
|
||||||
|
iteration = parseInt(recursiveMatch.groups.iteration);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (predicateName !== undefined) {
|
||||||
|
const raStartLine = lineNumber;
|
||||||
|
let raEndLine: number | undefined = undefined;
|
||||||
|
while ((lineNumber < lines.length) && (raEndLine === undefined)) {
|
||||||
|
const raLine = lines[lineNumber];
|
||||||
|
const returnMatch = raLine.match(RETURN_REGEXP);
|
||||||
|
if (returnMatch) {
|
||||||
|
raEndLine = lineNumber;
|
||||||
|
}
|
||||||
|
lineNumber++;
|
||||||
|
}
|
||||||
|
if (raEndLine !== undefined) {
|
||||||
|
let symbol = symbols.predicates[predicateName];
|
||||||
|
if (symbol === undefined) {
|
||||||
|
symbol = {
|
||||||
|
iterations: {}
|
||||||
|
};
|
||||||
|
symbols.predicates[predicateName] = symbol;
|
||||||
|
}
|
||||||
|
symbol.iterations[iteration] = {
|
||||||
|
startLine: lineNumber,
|
||||||
|
raStartLine: raStartLine,
|
||||||
|
raEndLine: raEndLine
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return symbols;
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import { window as Window, OutputChannel, Progress, Disposable } from 'vscode';
|
import { window as Window, OutputChannel, Progress } from 'vscode';
|
||||||
import { DisposableObject } from './pure/disposable-object';
|
import { DisposableObject } from './pure/disposable-object';
|
||||||
import * as fs from 'fs-extra';
|
import * as fs from 'fs-extra';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
@@ -26,11 +26,6 @@ export interface Logger {
|
|||||||
* @param location log to remove
|
* @param location log to remove
|
||||||
*/
|
*/
|
||||||
removeAdditionalLogLocation(location: string | undefined): void;
|
removeAdditionalLogLocation(location: string | undefined): void;
|
||||||
|
|
||||||
/**
|
|
||||||
* The base location location where all side log files are stored.
|
|
||||||
*/
|
|
||||||
getBaseLocation(): string | undefined;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ProgressReporter = Progress<{ message: string }>;
|
export type ProgressReporter = Progress<{ message: string }>;
|
||||||
@@ -39,19 +34,13 @@ export type ProgressReporter = Progress<{ message: string }>;
|
|||||||
export class OutputChannelLogger extends DisposableObject implements Logger {
|
export class OutputChannelLogger extends DisposableObject implements Logger {
|
||||||
public readonly outputChannel: OutputChannel;
|
public readonly outputChannel: OutputChannel;
|
||||||
private readonly additionalLocations = new Map<string, AdditionalLogLocation>();
|
private readonly additionalLocations = new Map<string, AdditionalLogLocation>();
|
||||||
private additionalLogLocationPath: string | undefined;
|
isCustomLogDirectory: boolean;
|
||||||
|
|
||||||
constructor(private title: string) {
|
constructor(title: string) {
|
||||||
super();
|
super();
|
||||||
this.outputChannel = Window.createOutputChannel(title);
|
this.outputChannel = Window.createOutputChannel(title);
|
||||||
this.push(this.outputChannel);
|
this.push(this.outputChannel);
|
||||||
}
|
this.isCustomLogDirectory = false;
|
||||||
|
|
||||||
init(storagePath: string): void {
|
|
||||||
this.additionalLogLocationPath = path.join(storagePath, this.title);
|
|
||||||
|
|
||||||
// clear out any old state from previous runs
|
|
||||||
fs.remove(this.additionalLogLocationPath);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -61,31 +50,41 @@ export class OutputChannelLogger extends DisposableObject implements Logger {
|
|||||||
* continuing.
|
* continuing.
|
||||||
*/
|
*/
|
||||||
async log(message: string, options = {} as LogOptions): Promise<void> {
|
async log(message: string, options = {} as LogOptions): Promise<void> {
|
||||||
if (options.trailingNewline === undefined) {
|
try {
|
||||||
options.trailingNewline = true;
|
if (options.trailingNewline === undefined) {
|
||||||
}
|
options.trailingNewline = true;
|
||||||
|
}
|
||||||
if (options.trailingNewline) {
|
if (options.trailingNewline) {
|
||||||
this.outputChannel.appendLine(message);
|
this.outputChannel.appendLine(message);
|
||||||
} else {
|
} else {
|
||||||
this.outputChannel.append(message);
|
this.outputChannel.append(message);
|
||||||
}
|
|
||||||
|
|
||||||
if (this.additionalLogLocationPath && options.additionalLogLocation) {
|
|
||||||
const logPath = path.join(this.additionalLogLocationPath, options.additionalLogLocation);
|
|
||||||
let additional = this.additionalLocations.get(logPath);
|
|
||||||
if (!additional) {
|
|
||||||
const msg = `| Log being saved to ${logPath} |`;
|
|
||||||
const separator = new Array(msg.length).fill('-').join('');
|
|
||||||
this.outputChannel.appendLine(separator);
|
|
||||||
this.outputChannel.appendLine(msg);
|
|
||||||
this.outputChannel.appendLine(separator);
|
|
||||||
additional = new AdditionalLogLocation(logPath);
|
|
||||||
this.additionalLocations.set(logPath, additional);
|
|
||||||
this.track(additional);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
await additional.log(message, options);
|
if (options.additionalLogLocation) {
|
||||||
|
if (!path.isAbsolute(options.additionalLogLocation)) {
|
||||||
|
throw new Error(`Additional Log Location must be an absolute path: ${options.additionalLogLocation}`);
|
||||||
|
}
|
||||||
|
const logPath = options.additionalLogLocation;
|
||||||
|
let additional = this.additionalLocations.get(logPath);
|
||||||
|
if (!additional) {
|
||||||
|
const msg = `| Log being saved to ${logPath} |`;
|
||||||
|
const separator = new Array(msg.length).fill('-').join('');
|
||||||
|
this.outputChannel.appendLine(separator);
|
||||||
|
this.outputChannel.appendLine(msg);
|
||||||
|
this.outputChannel.appendLine(separator);
|
||||||
|
additional = new AdditionalLogLocation(logPath);
|
||||||
|
this.additionalLocations.set(logPath, additional);
|
||||||
|
}
|
||||||
|
|
||||||
|
await additional.log(message, options);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
if (e instanceof Error && e.message === 'Channel has been closed') {
|
||||||
|
// Output channel is closed logging to console instead
|
||||||
|
console.log('Output channel is closed logging to console instead:', message);
|
||||||
|
} else {
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -94,26 +93,15 @@ export class OutputChannelLogger extends DisposableObject implements Logger {
|
|||||||
}
|
}
|
||||||
|
|
||||||
removeAdditionalLogLocation(location: string | undefined): void {
|
removeAdditionalLogLocation(location: string | undefined): void {
|
||||||
if (this.additionalLogLocationPath && location) {
|
if (location) {
|
||||||
const logPath = location.startsWith(this.additionalLogLocationPath)
|
this.additionalLocations.delete(location);
|
||||||
? location
|
|
||||||
: path.join(this.additionalLogLocationPath, location);
|
|
||||||
const additional = this.additionalLocations.get(logPath);
|
|
||||||
if (additional) {
|
|
||||||
this.disposeAndStopTracking(additional);
|
|
||||||
this.additionalLocations.delete(logPath);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
getBaseLocation() {
|
|
||||||
return this.additionalLogLocationPath;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
class AdditionalLogLocation extends Disposable {
|
class AdditionalLogLocation {
|
||||||
constructor(private location: string) {
|
constructor(private location: string) {
|
||||||
super(() => { /**/ });
|
/**/
|
||||||
}
|
}
|
||||||
|
|
||||||
async log(message: string, options = {} as LogOptions): Promise<void> {
|
async log(message: string, options = {} as LogOptions): Promise<void> {
|
||||||
@@ -126,10 +114,6 @@ class AdditionalLogLocation extends Disposable {
|
|||||||
encoding: 'utf8'
|
encoding: 'utf8'
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async dispose(): Promise<void> {
|
|
||||||
await fs.remove(this.location);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** The global logger for the extension. */
|
/** The global logger for the extension. */
|
||||||
|
|||||||
102
extensions/ql-vscode/src/mocks/gh-api-request.ts
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
import { Repository } from '../remote-queries/gh-api/repository';
|
||||||
|
import { VariantAnalysis, VariantAnalysisRepoTask } from '../remote-queries/gh-api/variant-analysis';
|
||||||
|
|
||||||
|
// Types that represent requests/responses from the GitHub API
|
||||||
|
// that we need to mock.
|
||||||
|
|
||||||
|
export enum RequestKind {
|
||||||
|
GetRepo = 'getRepo',
|
||||||
|
SubmitVariantAnalysis = 'submitVariantAnalysis',
|
||||||
|
GetVariantAnalysis = 'getVariantAnalysis',
|
||||||
|
GetVariantAnalysisRepo = 'getVariantAnalysisRepo',
|
||||||
|
GetVariantAnalysisRepoResult = 'getVariantAnalysisRepoResult',
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BasicErorResponse {
|
||||||
|
message: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GetRepoRequest {
|
||||||
|
request: {
|
||||||
|
kind: RequestKind.GetRepo
|
||||||
|
},
|
||||||
|
response: {
|
||||||
|
status: number,
|
||||||
|
body: Repository | BasicErorResponse | undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SubmitVariantAnalysisRequest {
|
||||||
|
request: {
|
||||||
|
kind: RequestKind.SubmitVariantAnalysis
|
||||||
|
},
|
||||||
|
response: {
|
||||||
|
status: number,
|
||||||
|
body?: VariantAnalysis | BasicErorResponse
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GetVariantAnalysisRequest {
|
||||||
|
request: {
|
||||||
|
kind: RequestKind.GetVariantAnalysis
|
||||||
|
},
|
||||||
|
response: {
|
||||||
|
status: number,
|
||||||
|
body?: VariantAnalysis | BasicErorResponse
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GetVariantAnalysisRepoRequest {
|
||||||
|
request: {
|
||||||
|
kind: RequestKind.GetVariantAnalysisRepo,
|
||||||
|
repositoryId: number
|
||||||
|
},
|
||||||
|
response: {
|
||||||
|
status: number,
|
||||||
|
body?: VariantAnalysisRepoTask | BasicErorResponse
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GetVariantAnalysisRepoResultRequest {
|
||||||
|
request: {
|
||||||
|
kind: RequestKind.GetVariantAnalysisRepoResult,
|
||||||
|
repositoryId: number
|
||||||
|
},
|
||||||
|
response: {
|
||||||
|
status: number,
|
||||||
|
body?: Buffer | string,
|
||||||
|
contentType: string,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type GitHubApiRequest =
|
||||||
|
| GetRepoRequest
|
||||||
|
| SubmitVariantAnalysisRequest
|
||||||
|
| GetVariantAnalysisRequest
|
||||||
|
| GetVariantAnalysisRepoRequest
|
||||||
|
| GetVariantAnalysisRepoResultRequest;
|
||||||
|
|
||||||
|
export const isGetRepoRequest = (
|
||||||
|
request: GitHubApiRequest
|
||||||
|
): request is GetRepoRequest =>
|
||||||
|
request.request.kind === RequestKind.GetRepo;
|
||||||
|
|
||||||
|
export const isSubmitVariantAnalysisRequest = (
|
||||||
|
request: GitHubApiRequest
|
||||||
|
): request is SubmitVariantAnalysisRequest =>
|
||||||
|
request.request.kind === RequestKind.SubmitVariantAnalysis;
|
||||||
|
|
||||||
|
export const isGetVariantAnalysisRequest = (
|
||||||
|
request: GitHubApiRequest
|
||||||
|
): request is GetVariantAnalysisRequest =>
|
||||||
|
request.request.kind === RequestKind.GetVariantAnalysis;
|
||||||
|
|
||||||
|
export const isGetVariantAnalysisRepoRequest = (
|
||||||
|
request: GitHubApiRequest
|
||||||
|
): request is GetVariantAnalysisRepoRequest =>
|
||||||
|
request.request.kind === RequestKind.GetVariantAnalysisRepo;
|
||||||
|
|
||||||
|
export const isGetVariantAnalysisRepoResultRequest = (
|
||||||
|
request: GitHubApiRequest
|
||||||
|
): request is GetVariantAnalysisRepoResultRequest =>
|
||||||
|
request.request.kind === RequestKind.GetVariantAnalysisRepoResult;
|
||||||
139
extensions/ql-vscode/src/mocks/mock-gh-api-server.ts
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
import * as path from 'path';
|
||||||
|
import * as fs from 'fs-extra';
|
||||||
|
import { setupServer, SetupServerApi } from 'msw/node';
|
||||||
|
|
||||||
|
import { DisposableObject } from '../pure/disposable-object';
|
||||||
|
|
||||||
|
import { Recorder } from './recorder';
|
||||||
|
import { createRequestHandlers } from './request-handlers';
|
||||||
|
import { getDirectoryNamesInsidePath } from '../pure/files';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enables mocking of the GitHub API server via HTTP interception, using msw.
|
||||||
|
*/
|
||||||
|
export class MockGitHubApiServer extends DisposableObject {
|
||||||
|
private _isListening: boolean;
|
||||||
|
|
||||||
|
private readonly server: SetupServerApi;
|
||||||
|
private readonly recorder: Recorder;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
this._isListening = false;
|
||||||
|
|
||||||
|
this.server = setupServer();
|
||||||
|
this.recorder = this.push(new Recorder(this.server));
|
||||||
|
}
|
||||||
|
|
||||||
|
public startServer(): void {
|
||||||
|
if (this._isListening) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.server.listen();
|
||||||
|
this._isListening = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public stopServer(): void {
|
||||||
|
this.server.close();
|
||||||
|
this._isListening = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async loadScenario(scenarioName: string, scenariosPath?: string): Promise<void> {
|
||||||
|
if (!scenariosPath) {
|
||||||
|
scenariosPath = await this.getDefaultScenariosPath();
|
||||||
|
if (!scenariosPath) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const scenarioPath = path.join(scenariosPath, scenarioName);
|
||||||
|
|
||||||
|
const handlers = await createRequestHandlers(scenarioPath);
|
||||||
|
this.server.resetHandlers();
|
||||||
|
this.server.use(...handlers);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async saveScenario(scenarioName: string, scenariosPath?: string): Promise<string> {
|
||||||
|
if (!scenariosPath) {
|
||||||
|
scenariosPath = await this.getDefaultScenariosPath();
|
||||||
|
if (!scenariosPath) {
|
||||||
|
throw new Error('Could not find scenarios path');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const filePath = await this.recorder.save(scenariosPath, scenarioName);
|
||||||
|
|
||||||
|
await this.stopRecording();
|
||||||
|
|
||||||
|
return filePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async unloadScenario(): Promise<void> {
|
||||||
|
if (!this.isScenarioLoaded) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.unloadAllScenarios();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async startRecording(): Promise<void> {
|
||||||
|
if (this.recorder.isRecording) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.isScenarioLoaded) {
|
||||||
|
await this.unloadAllScenarios();
|
||||||
|
}
|
||||||
|
|
||||||
|
this.recorder.start();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async stopRecording(): Promise<void> {
|
||||||
|
await this.recorder.stop();
|
||||||
|
await this.recorder.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getScenarioNames(scenariosPath?: string): Promise<string[]> {
|
||||||
|
if (!scenariosPath) {
|
||||||
|
scenariosPath = await this.getDefaultScenariosPath();
|
||||||
|
if (!scenariosPath) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return await getDirectoryNamesInsidePath(scenariosPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
public get isListening(): boolean {
|
||||||
|
return this._isListening;
|
||||||
|
}
|
||||||
|
|
||||||
|
public get isRecording(): boolean {
|
||||||
|
return this.recorder.isRecording;
|
||||||
|
}
|
||||||
|
|
||||||
|
public get anyRequestsRecorded(): boolean {
|
||||||
|
return this.recorder.anyRequestsRecorded;
|
||||||
|
}
|
||||||
|
|
||||||
|
public get isScenarioLoaded(): boolean {
|
||||||
|
return this.server.listHandlers().length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getDefaultScenariosPath(): Promise<string | undefined> {
|
||||||
|
// This should be the directory where package.json is located
|
||||||
|
const rootDirectory = path.resolve(__dirname, '../..');
|
||||||
|
|
||||||
|
const scenariosPath = path.resolve(rootDirectory, 'src/mocks/scenarios');
|
||||||
|
if (await fs.pathExists(scenariosPath)) {
|
||||||
|
return scenariosPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async unloadAllScenarios(): Promise<void> {
|
||||||
|
this.server.resetHandlers();
|
||||||
|
}
|
||||||
|
}
|
||||||
220
extensions/ql-vscode/src/mocks/recorder.ts
Normal file
@@ -0,0 +1,220 @@
|
|||||||
|
import * as fs from 'fs-extra';
|
||||||
|
import * as path from 'path';
|
||||||
|
|
||||||
|
import { MockedRequest } from 'msw';
|
||||||
|
import { SetupServerApi } from 'msw/node';
|
||||||
|
import { IsomorphicResponse } from '@mswjs/interceptors';
|
||||||
|
|
||||||
|
import { Headers } from 'headers-polyfill';
|
||||||
|
import fetch from 'node-fetch';
|
||||||
|
|
||||||
|
import { DisposableObject } from '../pure/disposable-object';
|
||||||
|
|
||||||
|
import { GetVariantAnalysisRepoResultRequest, GitHubApiRequest, RequestKind } from './gh-api-request';
|
||||||
|
|
||||||
|
export class Recorder extends DisposableObject {
|
||||||
|
private readonly allRequests = new Map<string, MockedRequest>();
|
||||||
|
private currentRecordedScenario: GitHubApiRequest[] = [];
|
||||||
|
|
||||||
|
private _isRecording = false;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
private readonly server: SetupServerApi,
|
||||||
|
) {
|
||||||
|
super();
|
||||||
|
this.onRequestStart = this.onRequestStart.bind(this);
|
||||||
|
this.onResponseBypass = this.onResponseBypass.bind(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
public get isRecording(): boolean {
|
||||||
|
return this._isRecording;
|
||||||
|
}
|
||||||
|
|
||||||
|
public get anyRequestsRecorded(): boolean {
|
||||||
|
return this.currentRecordedScenario.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public start(): void {
|
||||||
|
if (this._isRecording) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this._isRecording = true;
|
||||||
|
|
||||||
|
this.clear();
|
||||||
|
|
||||||
|
this.server.events.on('request:start', this.onRequestStart);
|
||||||
|
this.server.events.on('response:bypass', this.onResponseBypass);
|
||||||
|
}
|
||||||
|
|
||||||
|
public stop(): void {
|
||||||
|
if (!this._isRecording) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this._isRecording = false;
|
||||||
|
|
||||||
|
this.server.events.removeListener('request:start', this.onRequestStart);
|
||||||
|
this.server.events.removeListener('response:bypass', this.onResponseBypass);
|
||||||
|
}
|
||||||
|
|
||||||
|
public clear() {
|
||||||
|
this.currentRecordedScenario = [];
|
||||||
|
this.allRequests.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async save(scenariosPath: string, name: string): Promise<string> {
|
||||||
|
const scenarioDirectory = path.join(scenariosPath, name);
|
||||||
|
|
||||||
|
await fs.ensureDir(scenarioDirectory);
|
||||||
|
|
||||||
|
for (let i = 0; i < this.currentRecordedScenario.length; i++) {
|
||||||
|
const request = this.currentRecordedScenario[i];
|
||||||
|
|
||||||
|
const fileName = `${i}-${request.request.kind}.json`;
|
||||||
|
const filePath = path.join(scenarioDirectory, fileName);
|
||||||
|
|
||||||
|
let writtenRequest = {
|
||||||
|
...request
|
||||||
|
};
|
||||||
|
|
||||||
|
if (shouldWriteBodyToFile(writtenRequest)) {
|
||||||
|
const extension = writtenRequest.response.contentType === 'application/zip' ? 'zip' : 'bin';
|
||||||
|
|
||||||
|
const bodyFileName = `${i}-${writtenRequest.request.kind}.body.${extension}`;
|
||||||
|
const bodyFilePath = path.join(scenarioDirectory, bodyFileName);
|
||||||
|
await fs.writeFile(bodyFilePath, writtenRequest.response.body);
|
||||||
|
|
||||||
|
writtenRequest = {
|
||||||
|
...writtenRequest,
|
||||||
|
response: {
|
||||||
|
...writtenRequest.response,
|
||||||
|
body: `file:${bodyFileName}`,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
await fs.writeFile(filePath, JSON.stringify(writtenRequest, null, 2));
|
||||||
|
}
|
||||||
|
|
||||||
|
this.stop();
|
||||||
|
|
||||||
|
return scenarioDirectory;
|
||||||
|
}
|
||||||
|
|
||||||
|
private onRequestStart(request: MockedRequest): void {
|
||||||
|
if (request.headers.has('x-vscode-codeql-msw-bypass')) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.allRequests.set(request.id, request);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async onResponseBypass(response: IsomorphicResponse, requestId: string): Promise<void> {
|
||||||
|
const request = this.allRequests.get(requestId);
|
||||||
|
this.allRequests.delete(requestId);
|
||||||
|
if (!request) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.body === undefined) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const gitHubApiRequest = await createGitHubApiRequest(request.url.toString(), response.status, response.body, response.headers);
|
||||||
|
if (!gitHubApiRequest) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.currentRecordedScenario.push(gitHubApiRequest);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createGitHubApiRequest(url: string, status: number, body: string, headers: Headers): Promise<GitHubApiRequest | undefined> {
|
||||||
|
if (!url) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url.match(/\/repos\/[a-zA-Z0-9-_.]+\/[a-zA-Z0-9-_.]+$/)) {
|
||||||
|
return {
|
||||||
|
request: {
|
||||||
|
kind: RequestKind.GetRepo,
|
||||||
|
},
|
||||||
|
response: {
|
||||||
|
status,
|
||||||
|
body: JSON.parse(body),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url.match(/\/repositories\/\d+\/code-scanning\/codeql\/variant-analyses$/)) {
|
||||||
|
return {
|
||||||
|
request: {
|
||||||
|
kind: RequestKind.SubmitVariantAnalysis,
|
||||||
|
},
|
||||||
|
response: {
|
||||||
|
status,
|
||||||
|
body: JSON.parse(body),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url.match(/\/repositories\/\d+\/code-scanning\/codeql\/variant-analyses\/\d+$/)) {
|
||||||
|
return {
|
||||||
|
request: {
|
||||||
|
kind: RequestKind.GetVariantAnalysis,
|
||||||
|
},
|
||||||
|
response: {
|
||||||
|
status,
|
||||||
|
body: JSON.parse(body),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const repoTaskMatch = url.match(/\/repositories\/\d+\/code-scanning\/codeql\/variant-analyses\/\d+\/repositories\/(?<repositoryId>\d+)$/);
|
||||||
|
if (repoTaskMatch?.groups?.repositoryId) {
|
||||||
|
return {
|
||||||
|
request: {
|
||||||
|
kind: RequestKind.GetVariantAnalysisRepo,
|
||||||
|
repositoryId: parseInt(repoTaskMatch.groups.repositoryId, 10),
|
||||||
|
},
|
||||||
|
response: {
|
||||||
|
status,
|
||||||
|
body: JSON.parse(body),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// if url is a download URL for a variant analysis result, then it's a get-variant-analysis-repoResult.
|
||||||
|
const repoDownloadMatch = url.match(/objects-origin\.githubusercontent\.com\/codeql-query-console\/codeql-variant-analysis-repo-tasks\/\d+\/(?<repositoryId>\d+)/);
|
||||||
|
if (repoDownloadMatch?.groups?.repositoryId) {
|
||||||
|
// msw currently doesn't support binary response bodies, so we need to download this separately
|
||||||
|
// see https://github.com/mswjs/interceptors/blob/15eafa6215a328219999403e3ff110e71699b016/src/interceptors/ClientRequest/utils/getIncomingMessageBody.ts#L24-L33
|
||||||
|
// Essentially, mws is trying to decode a ZIP file as UTF-8 which changes the bytes and corrupts the file.
|
||||||
|
const response = await fetch(url, {
|
||||||
|
headers: {
|
||||||
|
// We need to ensure we don't end up in an infinite loop, since this request will also be intercepted
|
||||||
|
'x-vscode-codeql-msw-bypass': 'true',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const responseBuffer = await response.buffer();
|
||||||
|
|
||||||
|
return {
|
||||||
|
request: {
|
||||||
|
kind: RequestKind.GetVariantAnalysisRepoResult,
|
||||||
|
repositoryId: parseInt(repoDownloadMatch.groups.repositoryId, 10),
|
||||||
|
},
|
||||||
|
response: {
|
||||||
|
status,
|
||||||
|
body: responseBuffer,
|
||||||
|
contentType: headers.get('content-type') ?? 'application/octet-stream',
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
function shouldWriteBodyToFile(request: GitHubApiRequest): request is GetVariantAnalysisRepoResultRequest {
|
||||||
|
return request.response.body instanceof Buffer;
|
||||||
|
}
|
||||||
156
extensions/ql-vscode/src/mocks/request-handlers.ts
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
import * as path from 'path';
|
||||||
|
import * as fs from 'fs-extra';
|
||||||
|
import { DefaultBodyType, MockedRequest, rest, RestHandler } from 'msw';
|
||||||
|
import {
|
||||||
|
GitHubApiRequest,
|
||||||
|
isGetRepoRequest,
|
||||||
|
isGetVariantAnalysisRepoRequest,
|
||||||
|
isGetVariantAnalysisRepoResultRequest,
|
||||||
|
isGetVariantAnalysisRequest,
|
||||||
|
isSubmitVariantAnalysisRequest
|
||||||
|
} from './gh-api-request';
|
||||||
|
|
||||||
|
const baseUrl = 'https://api.github.com';
|
||||||
|
|
||||||
|
export type RequestHandler = RestHandler<MockedRequest<DefaultBodyType>>;
|
||||||
|
|
||||||
|
export async function createRequestHandlers(scenarioDirPath: string): Promise<RequestHandler[]> {
|
||||||
|
const requests = await readRequestFiles(scenarioDirPath);
|
||||||
|
|
||||||
|
const handlers = [
|
||||||
|
createGetRepoRequestHandler(requests),
|
||||||
|
createSubmitVariantAnalysisRequestHandler(requests),
|
||||||
|
createGetVariantAnalysisRequestHandler(requests),
|
||||||
|
createGetVariantAnalysisRepoRequestHandler(requests),
|
||||||
|
createGetVariantAnalysisRepoResultRequestHandler(requests),
|
||||||
|
];
|
||||||
|
|
||||||
|
return handlers;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function readRequestFiles(scenarioDirPath: string): Promise<GitHubApiRequest[]> {
|
||||||
|
const files = await fs.readdir(scenarioDirPath);
|
||||||
|
|
||||||
|
const orderedFiles = files.sort((a, b) => {
|
||||||
|
const aNum = parseInt(a.split('-')[0]);
|
||||||
|
const bNum = parseInt(b.split('-')[0]);
|
||||||
|
return aNum - bNum;
|
||||||
|
});
|
||||||
|
|
||||||
|
const requests: GitHubApiRequest[] = [];
|
||||||
|
for (const file of orderedFiles) {
|
||||||
|
if (!file.endsWith('.json')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const filePath = path.join(scenarioDirPath, file);
|
||||||
|
const request: GitHubApiRequest = await fs.readJson(filePath, { encoding: 'utf8' });
|
||||||
|
|
||||||
|
if (typeof request.response.body === 'string' && request.response.body.startsWith('file:')) {
|
||||||
|
request.response.body = await fs.readFile(path.join(scenarioDirPath, request.response.body.substring(5)));
|
||||||
|
}
|
||||||
|
|
||||||
|
requests.push(request);
|
||||||
|
}
|
||||||
|
|
||||||
|
return requests;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createGetRepoRequestHandler(requests: GitHubApiRequest[]): RequestHandler {
|
||||||
|
const getRepoRequests = requests.filter(isGetRepoRequest);
|
||||||
|
|
||||||
|
if (getRepoRequests.length > 1) {
|
||||||
|
throw Error('More than one get repo request found');
|
||||||
|
}
|
||||||
|
|
||||||
|
const getRepoRequest = getRepoRequests[0];
|
||||||
|
|
||||||
|
return rest.get(`${baseUrl}/repos/:owner/:name`, (_req, res, ctx) => {
|
||||||
|
return res(
|
||||||
|
ctx.status(getRepoRequest.response.status),
|
||||||
|
ctx.json(getRepoRequest.response.body),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function createSubmitVariantAnalysisRequestHandler(requests: GitHubApiRequest[]): RequestHandler {
|
||||||
|
const submitVariantAnalysisRequests = requests.filter(isSubmitVariantAnalysisRequest);
|
||||||
|
|
||||||
|
if (submitVariantAnalysisRequests.length > 1) {
|
||||||
|
throw Error('More than one submit variant analysis request found');
|
||||||
|
}
|
||||||
|
|
||||||
|
const getRepoRequest = submitVariantAnalysisRequests[0];
|
||||||
|
|
||||||
|
return rest.post(`${baseUrl}/repositories/:controllerRepoId/code-scanning/codeql/variant-analyses`, (_req, res, ctx) => {
|
||||||
|
return res(
|
||||||
|
ctx.status(getRepoRequest.response.status),
|
||||||
|
ctx.json(getRepoRequest.response.body),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function createGetVariantAnalysisRequestHandler(requests: GitHubApiRequest[]): RequestHandler {
|
||||||
|
const getVariantAnalysisRequests = requests.filter(isGetVariantAnalysisRequest);
|
||||||
|
let requestIndex = 0;
|
||||||
|
|
||||||
|
// During the lifetime of a variant analysis run, there are multiple requests
|
||||||
|
// to get the variant analysis. We need to return different responses for each
|
||||||
|
// request, so keep an index of the request and return the appropriate response.
|
||||||
|
return rest.get(`${baseUrl}/repositories/:controllerRepoId/code-scanning/codeql/variant-analyses/:variantAnalysisId`, (_req, res, ctx) => {
|
||||||
|
const request = getVariantAnalysisRequests[requestIndex];
|
||||||
|
|
||||||
|
if (requestIndex < getVariantAnalysisRequests.length - 1) {
|
||||||
|
// If there are more requests to come, increment the index.
|
||||||
|
requestIndex++;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res(
|
||||||
|
ctx.status(request.response.status),
|
||||||
|
ctx.json(request.response.body),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function createGetVariantAnalysisRepoRequestHandler(requests: GitHubApiRequest[]): RequestHandler {
|
||||||
|
const getVariantAnalysisRepoRequests = requests.filter(isGetVariantAnalysisRepoRequest);
|
||||||
|
|
||||||
|
return rest.get(
|
||||||
|
`${baseUrl}/repositories/:controllerRepoId/code-scanning/codeql/variant-analyses/:variantAnalysisId/repositories/:repoId`,
|
||||||
|
(req, res, ctx) => {
|
||||||
|
const scenarioRequest = getVariantAnalysisRepoRequests.find(r => r.request.repositoryId.toString() === req.params.repoId);
|
||||||
|
if (!scenarioRequest) {
|
||||||
|
throw Error(`No scenario request found for ${req.url}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return res(
|
||||||
|
ctx.status(scenarioRequest.response.status),
|
||||||
|
ctx.json(scenarioRequest.response.body),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function createGetVariantAnalysisRepoResultRequestHandler(requests: GitHubApiRequest[]): RequestHandler {
|
||||||
|
const getVariantAnalysisRepoResultRequests = requests.filter(isGetVariantAnalysisRepoResultRequest);
|
||||||
|
|
||||||
|
return rest.get(
|
||||||
|
'https://objects-origin.githubusercontent.com/codeql-query-console/codeql-variant-analysis-repo-tasks/:variantAnalysisId/:repoId/*',
|
||||||
|
(req, res, ctx) => {
|
||||||
|
const scenarioRequest = getVariantAnalysisRepoResultRequests.find(r => r.request.repositoryId.toString() === req.params.repoId);
|
||||||
|
if (!scenarioRequest) {
|
||||||
|
throw Error(`No scenario request found for ${req.url}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (scenarioRequest.response.body) {
|
||||||
|
return res(
|
||||||
|
ctx.status(scenarioRequest.response.status),
|
||||||
|
ctx.set('Content-Type', scenarioRequest.response.contentType),
|
||||||
|
ctx.body(scenarioRequest.response.body),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
return res(
|
||||||
|
ctx.status(scenarioRequest.response.status),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"request": {
|
||||||
|
"kind": "getRepo"
|
||||||
|
},
|
||||||
|
"response": {
|
||||||
|
"status": 404,
|
||||||
|
"body": {
|
||||||
|
"message": "Repository not found"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,159 @@
|
|||||||
|
{
|
||||||
|
"request": {
|
||||||
|
"kind": "getRepo"
|
||||||
|
},
|
||||||
|
"response": {
|
||||||
|
"status": 200,
|
||||||
|
"body": {
|
||||||
|
"id": 557804416,
|
||||||
|
"node_id": "R_kgDOIT9rgA",
|
||||||
|
"name": "mrva-demo-controller-repo",
|
||||||
|
"full_name": "github/mrva-demo-controller-repo",
|
||||||
|
"private": true,
|
||||||
|
"owner": {
|
||||||
|
"login": "github",
|
||||||
|
"id": 9919,
|
||||||
|
"node_id": "MDEyOk9yZ2FuaXphdGlvbjk5MTk=",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/9919?v=4",
|
||||||
|
"gravatar_id": "",
|
||||||
|
"url": "https://api.github.com/users/github",
|
||||||
|
"html_url": "https://github.com/github",
|
||||||
|
"followers_url": "https://api.github.com/users/github/followers",
|
||||||
|
"following_url": "https://api.github.com/users/github/following{/other_user}",
|
||||||
|
"gists_url": "https://api.github.com/users/github/gists{/gist_id}",
|
||||||
|
"starred_url": "https://api.github.com/users/github/starred{/owner}{/repo}",
|
||||||
|
"subscriptions_url": "https://api.github.com/users/github/subscriptions",
|
||||||
|
"organizations_url": "https://api.github.com/users/github/orgs",
|
||||||
|
"repos_url": "https://api.github.com/users/github/repos",
|
||||||
|
"events_url": "https://api.github.com/users/github/events{/privacy}",
|
||||||
|
"received_events_url": "https://api.github.com/users/github/received_events",
|
||||||
|
"type": "Organization",
|
||||||
|
"site_admin": false
|
||||||
|
},
|
||||||
|
"html_url": "https://github.com/github/mrva-demo-controller-repo",
|
||||||
|
"description": null,
|
||||||
|
"fork": false,
|
||||||
|
"url": "https://api.github.com/repos/github/mrva-demo-controller-repo",
|
||||||
|
"forks_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/forks",
|
||||||
|
"keys_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/keys{/key_id}",
|
||||||
|
"collaborators_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/collaborators{/collaborator}",
|
||||||
|
"teams_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/teams",
|
||||||
|
"hooks_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/hooks",
|
||||||
|
"issue_events_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/issues/events{/number}",
|
||||||
|
"events_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/events",
|
||||||
|
"assignees_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/assignees{/user}",
|
||||||
|
"branches_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/branches{/branch}",
|
||||||
|
"tags_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/tags",
|
||||||
|
"blobs_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/blobs{/sha}",
|
||||||
|
"git_tags_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/tags{/sha}",
|
||||||
|
"git_refs_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/refs{/sha}",
|
||||||
|
"trees_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/trees{/sha}",
|
||||||
|
"statuses_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/statuses/{sha}",
|
||||||
|
"languages_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/languages",
|
||||||
|
"stargazers_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/stargazers",
|
||||||
|
"contributors_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/contributors",
|
||||||
|
"subscribers_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/subscribers",
|
||||||
|
"subscription_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/subscription",
|
||||||
|
"commits_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/commits{/sha}",
|
||||||
|
"git_commits_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/commits{/sha}",
|
||||||
|
"comments_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/comments{/number}",
|
||||||
|
"issue_comment_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/issues/comments{/number}",
|
||||||
|
"contents_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/contents/{+path}",
|
||||||
|
"compare_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/compare/{base}...{head}",
|
||||||
|
"merges_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/merges",
|
||||||
|
"archive_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/{archive_format}{/ref}",
|
||||||
|
"downloads_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/downloads",
|
||||||
|
"issues_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/issues{/number}",
|
||||||
|
"pulls_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/pulls{/number}",
|
||||||
|
"milestones_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/milestones{/number}",
|
||||||
|
"notifications_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/notifications{?since,all,participating}",
|
||||||
|
"labels_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/labels{/name}",
|
||||||
|
"releases_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/releases{/id}",
|
||||||
|
"deployments_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/deployments",
|
||||||
|
"created_at": "2022-10-26T10:37:59Z",
|
||||||
|
"updated_at": "2022-10-26T10:37:59Z",
|
||||||
|
"pushed_at": "2022-10-26T10:38:02Z",
|
||||||
|
"git_url": "git://github.com/github/mrva-demo-controller-repo.git",
|
||||||
|
"ssh_url": "git@github.com:github/mrva-demo-controller-repo.git",
|
||||||
|
"clone_url": "https://github.com/github/mrva-demo-controller-repo.git",
|
||||||
|
"svn_url": "https://github.com/github/mrva-demo-controller-repo",
|
||||||
|
"homepage": null,
|
||||||
|
"size": 0,
|
||||||
|
"stargazers_count": 0,
|
||||||
|
"watchers_count": 0,
|
||||||
|
"language": null,
|
||||||
|
"has_issues": true,
|
||||||
|
"has_projects": true,
|
||||||
|
"has_downloads": true,
|
||||||
|
"has_wiki": true,
|
||||||
|
"has_pages": false,
|
||||||
|
"forks_count": 0,
|
||||||
|
"mirror_url": null,
|
||||||
|
"archived": false,
|
||||||
|
"disabled": false,
|
||||||
|
"open_issues_count": 0,
|
||||||
|
"license": null,
|
||||||
|
"allow_forking": false,
|
||||||
|
"is_template": false,
|
||||||
|
"web_commit_signoff_required": false,
|
||||||
|
"topics": [],
|
||||||
|
"visibility": "private",
|
||||||
|
"forks": 0,
|
||||||
|
"open_issues": 0,
|
||||||
|
"watchers": 0,
|
||||||
|
"default_branch": "main",
|
||||||
|
"permissions": {
|
||||||
|
"admin": true,
|
||||||
|
"maintain": true,
|
||||||
|
"push": true,
|
||||||
|
"triage": true,
|
||||||
|
"pull": true
|
||||||
|
},
|
||||||
|
"temp_clone_token": "AACMDDJSXFX6QQXTSB4YQCDDLEWP4",
|
||||||
|
"allow_squash_merge": true,
|
||||||
|
"allow_merge_commit": true,
|
||||||
|
"allow_rebase_merge": true,
|
||||||
|
"allow_auto_merge": false,
|
||||||
|
"delete_branch_on_merge": false,
|
||||||
|
"allow_update_branch": false,
|
||||||
|
"use_squash_pr_title_as_default": false,
|
||||||
|
"squash_merge_commit_message": "COMMIT_MESSAGES",
|
||||||
|
"squash_merge_commit_title": "COMMIT_OR_PR_TITLE",
|
||||||
|
"merge_commit_message": "PR_TITLE",
|
||||||
|
"merge_commit_title": "MERGE_MESSAGE",
|
||||||
|
"organization": {
|
||||||
|
"login": "github",
|
||||||
|
"id": 9919,
|
||||||
|
"node_id": "MDEyOk9yZ2FuaXphdGlvbjk5MTk=",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/9919?v=4",
|
||||||
|
"gravatar_id": "",
|
||||||
|
"url": "https://api.github.com/users/github",
|
||||||
|
"html_url": "https://github.com/github",
|
||||||
|
"followers_url": "https://api.github.com/users/github/followers",
|
||||||
|
"following_url": "https://api.github.com/users/github/following{/other_user}",
|
||||||
|
"gists_url": "https://api.github.com/users/github/gists{/gist_id}",
|
||||||
|
"starred_url": "https://api.github.com/users/github/starred{/owner}{/repo}",
|
||||||
|
"subscriptions_url": "https://api.github.com/users/github/subscriptions",
|
||||||
|
"organizations_url": "https://api.github.com/users/github/orgs",
|
||||||
|
"repos_url": "https://api.github.com/users/github/repos",
|
||||||
|
"events_url": "https://api.github.com/users/github/events{/privacy}",
|
||||||
|
"received_events_url": "https://api.github.com/users/github/received_events",
|
||||||
|
"type": "Organization",
|
||||||
|
"site_admin": false
|
||||||
|
},
|
||||||
|
"security_and_analysis": {
|
||||||
|
"advanced_security": {
|
||||||
|
"status": "enabled"
|
||||||
|
},
|
||||||
|
"secret_scanning": {
|
||||||
|
"status": "enabled"
|
||||||
|
},
|
||||||
|
"secret_scanning_push_protection": {
|
||||||
|
"status": "enabled"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"network_count": 0,
|
||||||
|
"subscribers_count": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,104 @@
|
|||||||
|
{
|
||||||
|
"request": {
|
||||||
|
"kind": "submitVariantAnalysis"
|
||||||
|
},
|
||||||
|
"response": {
|
||||||
|
"status": 201,
|
||||||
|
"body": {
|
||||||
|
"id": 146,
|
||||||
|
"controller_repo": {
|
||||||
|
"id": 557804416,
|
||||||
|
"node_id": "R_kgDOIT9rgA",
|
||||||
|
"name": "mrva-demo-controller-repo",
|
||||||
|
"full_name": "github/mrva-demo-controller-repo",
|
||||||
|
"private": true,
|
||||||
|
"owner": {
|
||||||
|
"login": "github",
|
||||||
|
"id": 9919,
|
||||||
|
"node_id": "MDEyOk9yZ2FuaXphdGlvbjk5MTk=",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/9919?v=4",
|
||||||
|
"gravatar_id": "",
|
||||||
|
"url": "https://api.github.com/users/github",
|
||||||
|
"html_url": "https://github.com/github",
|
||||||
|
"followers_url": "https://api.github.com/users/github/followers",
|
||||||
|
"following_url": "https://api.github.com/users/github/following{/other_user}",
|
||||||
|
"gists_url": "https://api.github.com/users/github/gists{/gist_id}",
|
||||||
|
"starred_url": "https://api.github.com/users/github/starred{/owner}{/repo}",
|
||||||
|
"subscriptions_url": "https://api.github.com/users/github/subscriptions",
|
||||||
|
"organizations_url": "https://api.github.com/users/github/orgs",
|
||||||
|
"repos_url": "https://api.github.com/users/github/repos",
|
||||||
|
"events_url": "https://api.github.com/users/github/events{/privacy}",
|
||||||
|
"received_events_url": "https://api.github.com/users/github/received_events",
|
||||||
|
"type": "Organization",
|
||||||
|
"site_admin": false
|
||||||
|
},
|
||||||
|
"html_url": "https://github.com/github/mrva-demo-controller-repo",
|
||||||
|
"description": null,
|
||||||
|
"fork": false,
|
||||||
|
"url": "https://api.github.com/repos/github/mrva-demo-controller-repo",
|
||||||
|
"forks_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/forks",
|
||||||
|
"keys_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/keys{/key_id}",
|
||||||
|
"collaborators_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/collaborators{/collaborator}",
|
||||||
|
"teams_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/teams",
|
||||||
|
"hooks_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/hooks",
|
||||||
|
"issue_events_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/issues/events{/number}",
|
||||||
|
"events_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/events",
|
||||||
|
"assignees_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/assignees{/user}",
|
||||||
|
"branches_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/branches{/branch}",
|
||||||
|
"tags_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/tags",
|
||||||
|
"blobs_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/blobs{/sha}",
|
||||||
|
"git_tags_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/tags{/sha}",
|
||||||
|
"git_refs_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/refs{/sha}",
|
||||||
|
"trees_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/trees{/sha}",
|
||||||
|
"statuses_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/statuses/{sha}",
|
||||||
|
"languages_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/languages",
|
||||||
|
"stargazers_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/stargazers",
|
||||||
|
"contributors_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/contributors",
|
||||||
|
"subscribers_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/subscribers",
|
||||||
|
"subscription_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/subscription",
|
||||||
|
"commits_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/commits{/sha}",
|
||||||
|
"git_commits_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/commits{/sha}",
|
||||||
|
"comments_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/comments{/number}",
|
||||||
|
"issue_comment_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/issues/comments{/number}",
|
||||||
|
"contents_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/contents/{+path}",
|
||||||
|
"compare_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/compare/{base}...{head}",
|
||||||
|
"merges_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/merges",
|
||||||
|
"archive_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/{archive_format}{/ref}",
|
||||||
|
"downloads_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/downloads",
|
||||||
|
"issues_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/issues{/number}",
|
||||||
|
"pulls_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/pulls{/number}",
|
||||||
|
"milestones_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/milestones{/number}",
|
||||||
|
"notifications_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/notifications{?since,all,participating}",
|
||||||
|
"labels_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/labels{/name}",
|
||||||
|
"releases_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/releases{/id}",
|
||||||
|
"deployments_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/deployments"
|
||||||
|
},
|
||||||
|
"actor": {
|
||||||
|
"login": "charisk",
|
||||||
|
"id": 311693,
|
||||||
|
"node_id": "MDQ6VXNlcjMxMTY5Mw==",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/311693?v=4",
|
||||||
|
"gravatar_id": "",
|
||||||
|
"url": "https://api.github.com/users/charisk",
|
||||||
|
"html_url": "https://github.com/charisk",
|
||||||
|
"followers_url": "https://api.github.com/users/charisk/followers",
|
||||||
|
"following_url": "https://api.github.com/users/charisk/following{/other_user}",
|
||||||
|
"gists_url": "https://api.github.com/users/charisk/gists{/gist_id}",
|
||||||
|
"starred_url": "https://api.github.com/users/charisk/starred{/owner}{/repo}",
|
||||||
|
"subscriptions_url": "https://api.github.com/users/charisk/subscriptions",
|
||||||
|
"organizations_url": "https://api.github.com/users/charisk/orgs",
|
||||||
|
"repos_url": "https://api.github.com/users/charisk/repos",
|
||||||
|
"events_url": "https://api.github.com/users/charisk/events{/privacy}",
|
||||||
|
"received_events_url": "https://api.github.com/users/charisk/received_events",
|
||||||
|
"type": "User",
|
||||||
|
"site_admin": true
|
||||||
|
},
|
||||||
|
"query_language": "javascript",
|
||||||
|
"query_pack_url": "https://objects-origin.githubusercontent.com/codeql-query-console/variant_analyses/146/query_pack?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=queryconsoleprod%2F20221026%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20221026T124513Z&X-Amz-Expires=3600&X-Amz-Signature=0f5f84090c84c1b915e47960bcbc6f66433cd345cdc81cc08669920b48f6b622&X-Amz-SignedHeaders=host",
|
||||||
|
"created_at": "2022-10-26T12:45:12Z",
|
||||||
|
"updated_at": "2022-10-26T12:45:13Z",
|
||||||
|
"status": "in_progress",
|
||||||
|
"skipped_repositories": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"request": {
|
||||||
|
"kind": "getVariantAnalysisRepoResult",
|
||||||
|
"repositoryId": 206444
|
||||||
|
},
|
||||||
|
"response": {
|
||||||
|
"status": 200,
|
||||||
|
"body": "file:17-getVariantAnalysisRepoResult.body.zip",
|
||||||
|
"contentType": "application/zip"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,171 @@
|
|||||||
|
{
|
||||||
|
"request": {
|
||||||
|
"kind": "getVariantAnalysis"
|
||||||
|
},
|
||||||
|
"response": {
|
||||||
|
"status": 200,
|
||||||
|
"body": {
|
||||||
|
"id": 146,
|
||||||
|
"controller_repo": {
|
||||||
|
"id": 557804416,
|
||||||
|
"node_id": "R_kgDOIT9rgA",
|
||||||
|
"name": "mrva-demo-controller-repo",
|
||||||
|
"full_name": "github/mrva-demo-controller-repo",
|
||||||
|
"private": true,
|
||||||
|
"owner": {
|
||||||
|
"login": "github",
|
||||||
|
"id": 9919,
|
||||||
|
"node_id": "MDEyOk9yZ2FuaXphdGlvbjk5MTk=",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/9919?v=4",
|
||||||
|
"gravatar_id": "",
|
||||||
|
"url": "https://api.github.com/users/github",
|
||||||
|
"html_url": "https://github.com/github",
|
||||||
|
"followers_url": "https://api.github.com/users/github/followers",
|
||||||
|
"following_url": "https://api.github.com/users/github/following{/other_user}",
|
||||||
|
"gists_url": "https://api.github.com/users/github/gists{/gist_id}",
|
||||||
|
"starred_url": "https://api.github.com/users/github/starred{/owner}{/repo}",
|
||||||
|
"subscriptions_url": "https://api.github.com/users/github/subscriptions",
|
||||||
|
"organizations_url": "https://api.github.com/users/github/orgs",
|
||||||
|
"repos_url": "https://api.github.com/users/github/repos",
|
||||||
|
"events_url": "https://api.github.com/users/github/events{/privacy}",
|
||||||
|
"received_events_url": "https://api.github.com/users/github/received_events",
|
||||||
|
"type": "Organization",
|
||||||
|
"site_admin": false
|
||||||
|
},
|
||||||
|
"html_url": "https://github.com/github/mrva-demo-controller-repo",
|
||||||
|
"description": null,
|
||||||
|
"fork": false,
|
||||||
|
"url": "https://api.github.com/repos/github/mrva-demo-controller-repo",
|
||||||
|
"forks_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/forks",
|
||||||
|
"keys_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/keys{/key_id}",
|
||||||
|
"collaborators_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/collaborators{/collaborator}",
|
||||||
|
"teams_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/teams",
|
||||||
|
"hooks_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/hooks",
|
||||||
|
"issue_events_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/issues/events{/number}",
|
||||||
|
"events_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/events",
|
||||||
|
"assignees_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/assignees{/user}",
|
||||||
|
"branches_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/branches{/branch}",
|
||||||
|
"tags_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/tags",
|
||||||
|
"blobs_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/blobs{/sha}",
|
||||||
|
"git_tags_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/tags{/sha}",
|
||||||
|
"git_refs_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/refs{/sha}",
|
||||||
|
"trees_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/trees{/sha}",
|
||||||
|
"statuses_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/statuses/{sha}",
|
||||||
|
"languages_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/languages",
|
||||||
|
"stargazers_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/stargazers",
|
||||||
|
"contributors_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/contributors",
|
||||||
|
"subscribers_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/subscribers",
|
||||||
|
"subscription_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/subscription",
|
||||||
|
"commits_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/commits{/sha}",
|
||||||
|
"git_commits_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/git/commits{/sha}",
|
||||||
|
"comments_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/comments{/number}",
|
||||||
|
"issue_comment_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/issues/comments{/number}",
|
||||||
|
"contents_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/contents/{+path}",
|
||||||
|
"compare_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/compare/{base}...{head}",
|
||||||
|
"merges_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/merges",
|
||||||
|
"archive_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/{archive_format}{/ref}",
|
||||||
|
"downloads_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/downloads",
|
||||||
|
"issues_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/issues{/number}",
|
||||||
|
"pulls_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/pulls{/number}",
|
||||||
|
"milestones_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/milestones{/number}",
|
||||||
|
"notifications_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/notifications{?since,all,participating}",
|
||||||
|
"labels_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/labels{/name}",
|
||||||
|
"releases_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/releases{/id}",
|
||||||
|
"deployments_url": "https://api.github.com/repos/github/mrva-demo-controller-repo/deployments"
|
||||||
|
},
|
||||||
|
"actor": {
|
||||||
|
"login": "charisk",
|
||||||
|
"id": 311693,
|
||||||
|
"node_id": "MDQ6VXNlcjMxMTY5Mw==",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/311693?v=4",
|
||||||
|
"gravatar_id": "",
|
||||||
|
"url": "https://api.github.com/users/charisk",
|
||||||
|
"html_url": "https://github.com/charisk",
|
||||||
|
"followers_url": "https://api.github.com/users/charisk/followers",
|
||||||
|
"following_url": "https://api.github.com/users/charisk/following{/other_user}",
|
||||||
|
"gists_url": "https://api.github.com/users/charisk/gists{/gist_id}",
|
||||||
|
"starred_url": "https://api.github.com/users/charisk/starred{/owner}{/repo}",
|
||||||
|
"subscriptions_url": "https://api.github.com/users/charisk/subscriptions",
|
||||||
|
"organizations_url": "https://api.github.com/users/charisk/orgs",
|
||||||
|
"repos_url": "https://api.github.com/users/charisk/repos",
|
||||||
|
"events_url": "https://api.github.com/users/charisk/events{/privacy}",
|
||||||
|
"received_events_url": "https://api.github.com/users/charisk/received_events",
|
||||||
|
"type": "User",
|
||||||
|
"site_admin": true
|
||||||
|
},
|
||||||
|
"query_language": "javascript",
|
||||||
|
"query_pack_url": "https://objects-origin.githubusercontent.com/codeql-query-console/variant_analyses/146/query_pack?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=queryconsoleprod%2F20221026%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20221026T124653Z&X-Amz-Expires=3600&X-Amz-Signature=2147ea8461603acdb32fc38544dffb62e74db12fbbe5a32f269420d4945841a1&X-Amz-SignedHeaders=host",
|
||||||
|
"created_at": "2022-10-26T12:45:12Z",
|
||||||
|
"updated_at": "2022-10-26T12:45:15Z",
|
||||||
|
"actions_workflow_run_id": 3329095282,
|
||||||
|
"status": "in_progress",
|
||||||
|
"scanned_repositories": [
|
||||||
|
{
|
||||||
|
"repository": {
|
||||||
|
"id": 206444,
|
||||||
|
"name": "hive",
|
||||||
|
"full_name": "apache/hive",
|
||||||
|
"private": false
|
||||||
|
},
|
||||||
|
"analysis_status": "succeeded",
|
||||||
|
"artifact_size_in_bytes": 81841,
|
||||||
|
"result_count": 4
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"repository": {
|
||||||
|
"id": 20753500,
|
||||||
|
"name": "ng-nice",
|
||||||
|
"full_name": "angular-cn/ng-nice",
|
||||||
|
"private": false
|
||||||
|
},
|
||||||
|
"analysis_status": "in_progress"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"repository": {
|
||||||
|
"id": 23418517,
|
||||||
|
"name": "hadoop",
|
||||||
|
"full_name": "apache/hadoop",
|
||||||
|
"private": false
|
||||||
|
},
|
||||||
|
"analysis_status": "succeeded",
|
||||||
|
"artifact_size_in_bytes": 66895,
|
||||||
|
"result_count": 3
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"repository": {
|
||||||
|
"id": 236095576,
|
||||||
|
"name": "backstage",
|
||||||
|
"full_name": "backstage/backstage",
|
||||||
|
"private": false
|
||||||
|
},
|
||||||
|
"analysis_status": "in_progress"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"repository": {
|
||||||
|
"id": 257485422,
|
||||||
|
"name": "vite",
|
||||||
|
"full_name": "vitejs/vite",
|
||||||
|
"private": false
|
||||||
|
},
|
||||||
|
"analysis_status": "succeeded",
|
||||||
|
"artifact_size_in_bytes": 702,
|
||||||
|
"result_count": 0
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"skipped_repositories": {
|
||||||
|
"access_mismatch_repos": {
|
||||||
|
"repository_count": 0,
|
||||||
|
"repositories": []
|
||||||
|
},
|
||||||
|
"no_codeql_db_repos": {
|
||||||
|
"repository_count": 0,
|
||||||
|
"repositories": []
|
||||||
|
},
|
||||||
|
"over_limit_repos": {
|
||||||
|
"repository_count": 0,
|
||||||
|
"repositories": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,84 @@
|
|||||||
|
{
|
||||||
|
"request": {
|
||||||
|
"kind": "getVariantAnalysisRepo",
|
||||||
|
"repositoryId": 23418517
|
||||||
|
},
|
||||||
|
"response": {
|
||||||
|
"status": 200,
|
||||||
|
"body": {
|
||||||
|
"repository": {
|
||||||
|
"id": 23418517,
|
||||||
|
"node_id": "MDEwOlJlcG9zaXRvcnkyMzQxODUxNw==",
|
||||||
|
"name": "hadoop",
|
||||||
|
"full_name": "apache/hadoop",
|
||||||
|
"private": false,
|
||||||
|
"owner": {
|
||||||
|
"login": "apache",
|
||||||
|
"id": 47359,
|
||||||
|
"node_id": "MDEyOk9yZ2FuaXphdGlvbjQ3MzU5",
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/47359?v=4",
|
||||||
|
"gravatar_id": "",
|
||||||
|
"url": "https://api.github.com/users/apache",
|
||||||
|
"html_url": "https://github.com/apache",
|
||||||
|
"followers_url": "https://api.github.com/users/apache/followers",
|
||||||
|
"following_url": "https://api.github.com/users/apache/following{/other_user}",
|
||||||
|
"gists_url": "https://api.github.com/users/apache/gists{/gist_id}",
|
||||||
|
"starred_url": "https://api.github.com/users/apache/starred{/owner}{/repo}",
|
||||||
|
"subscriptions_url": "https://api.github.com/users/apache/subscriptions",
|
||||||
|
"organizations_url": "https://api.github.com/users/apache/orgs",
|
||||||
|
"repos_url": "https://api.github.com/users/apache/repos",
|
||||||
|
"events_url": "https://api.github.com/users/apache/events{/privacy}",
|
||||||
|
"received_events_url": "https://api.github.com/users/apache/received_events",
|
||||||
|
"type": "Organization",
|
||||||
|
"site_admin": false
|
||||||
|
},
|
||||||
|
"html_url": "https://github.com/apache/hadoop",
|
||||||
|
"description": "Apache Hadoop",
|
||||||
|
"fork": false,
|
||||||
|
"url": "https://api.github.com/repos/apache/hadoop",
|
||||||
|
"forks_url": "https://api.github.com/repos/apache/hadoop/forks",
|
||||||
|
"keys_url": "https://api.github.com/repos/apache/hadoop/keys{/key_id}",
|
||||||
|
"collaborators_url": "https://api.github.com/repos/apache/hadoop/collaborators{/collaborator}",
|
||||||
|
"teams_url": "https://api.github.com/repos/apache/hadoop/teams",
|
||||||
|
"hooks_url": "https://api.github.com/repos/apache/hadoop/hooks",
|
||||||
|
"issue_events_url": "https://api.github.com/repos/apache/hadoop/issues/events{/number}",
|
||||||
|
"events_url": "https://api.github.com/repos/apache/hadoop/events",
|
||||||
|
"assignees_url": "https://api.github.com/repos/apache/hadoop/assignees{/user}",
|
||||||
|
"branches_url": "https://api.github.com/repos/apache/hadoop/branches{/branch}",
|
||||||
|
"tags_url": "https://api.github.com/repos/apache/hadoop/tags",
|
||||||
|
"blobs_url": "https://api.github.com/repos/apache/hadoop/git/blobs{/sha}",
|
||||||
|
"git_tags_url": "https://api.github.com/repos/apache/hadoop/git/tags{/sha}",
|
||||||
|
"git_refs_url": "https://api.github.com/repos/apache/hadoop/git/refs{/sha}",
|
||||||
|
"trees_url": "https://api.github.com/repos/apache/hadoop/git/trees{/sha}",
|
||||||
|
"statuses_url": "https://api.github.com/repos/apache/hadoop/statuses/{sha}",
|
||||||
|
"languages_url": "https://api.github.com/repos/apache/hadoop/languages",
|
||||||
|
"stargazers_url": "https://api.github.com/repos/apache/hadoop/stargazers",
|
||||||
|
"contributors_url": "https://api.github.com/repos/apache/hadoop/contributors",
|
||||||
|
"subscribers_url": "https://api.github.com/repos/apache/hadoop/subscribers",
|
||||||
|
"subscription_url": "https://api.github.com/repos/apache/hadoop/subscription",
|
||||||
|
"commits_url": "https://api.github.com/repos/apache/hadoop/commits{/sha}",
|
||||||
|
"git_commits_url": "https://api.github.com/repos/apache/hadoop/git/commits{/sha}",
|
||||||
|
"comments_url": "https://api.github.com/repos/apache/hadoop/comments{/number}",
|
||||||
|
"issue_comment_url": "https://api.github.com/repos/apache/hadoop/issues/comments{/number}",
|
||||||
|
"contents_url": "https://api.github.com/repos/apache/hadoop/contents/{+path}",
|
||||||
|
"compare_url": "https://api.github.com/repos/apache/hadoop/compare/{base}...{head}",
|
||||||
|
"merges_url": "https://api.github.com/repos/apache/hadoop/merges",
|
||||||
|
"archive_url": "https://api.github.com/repos/apache/hadoop/{archive_format}{/ref}",
|
||||||
|
"downloads_url": "https://api.github.com/repos/apache/hadoop/downloads",
|
||||||
|
"issues_url": "https://api.github.com/repos/apache/hadoop/issues{/number}",
|
||||||
|
"pulls_url": "https://api.github.com/repos/apache/hadoop/pulls{/number}",
|
||||||
|
"milestones_url": "https://api.github.com/repos/apache/hadoop/milestones{/number}",
|
||||||
|
"notifications_url": "https://api.github.com/repos/apache/hadoop/notifications{?since,all,participating}",
|
||||||
|
"labels_url": "https://api.github.com/repos/apache/hadoop/labels{/name}",
|
||||||
|
"releases_url": "https://api.github.com/repos/apache/hadoop/releases{/id}",
|
||||||
|
"deployments_url": "https://api.github.com/repos/apache/hadoop/deployments"
|
||||||
|
},
|
||||||
|
"analysis_status": "succeeded",
|
||||||
|
"artifact_size_in_bytes": 66895,
|
||||||
|
"result_count": 3,
|
||||||
|
"database_commit_sha": "aac87ffe76451c2fd535350b7aefb384e2be6241",
|
||||||
|
"source_location_prefix": "/home/runner/work/bulk-builder/bulk-builder",
|
||||||
|
"artifact_url": "https://objects-origin.githubusercontent.com/codeql-query-console/codeql-variant-analysis-repo-tasks/146/23418517/425ed1e9-c214-4f71-832d-798da3ed7452?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=queryconsoleprod%2F20221026%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20221026T124654Z&X-Amz-Expires=300&X-Amz-Signature=98dc5dfcc4c70c4cc40fb62fb87a21671b1ee26266e8ade3109d6f39cfceef5a&X-Amz-SignedHeaders=host&actor_id=311693&key_id=0&repo_id=557804416"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||