Compare commits
94 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
426cc95e9f | ||
|
|
9e40043fe0 | ||
|
|
22ed090685 | ||
|
|
2ca4097daf | ||
|
|
f1d16015bf | ||
|
|
9a81ad05ed | ||
|
|
76e983d19c | ||
|
|
a3015c0fa3 | ||
|
|
88d0bda049 | ||
|
|
d2ec54e89e | ||
|
|
4559c5a38d | ||
|
|
16bd106abc | ||
|
|
e5dcec8d8e | ||
|
|
ad3565d3ad | ||
|
|
5fe12ecd74 | ||
|
|
318214642f | ||
|
|
227fe3ee6b | ||
|
|
978a82dd1a | ||
|
|
04f72a7da9 | ||
|
|
a0954a1dc0 | ||
|
|
cc1bf74370 | ||
|
|
2f7908773a | ||
|
|
0efd02979e | ||
|
|
bd9776c4b7 | ||
|
|
35e9da83ec | ||
|
|
4f5ca0bca9 | ||
|
|
43f314b2b5 | ||
|
|
4bdf579ce2 | ||
|
|
aba3039eef | ||
|
|
bbff791c65 | ||
|
|
1ed50b3081 | ||
|
|
67336a24e7 | ||
|
|
48174c327d | ||
|
|
43f2539b42 | ||
|
|
462a7a722a | ||
|
|
4101bb252e | ||
|
|
4ff4e4827e | ||
|
|
8daa92ad49 | ||
|
|
371e83bff9 | ||
|
|
6fa0227a1e | ||
|
|
c38e4ce265 | ||
|
|
de06ed148d | ||
|
|
21bcd62ba8 | ||
|
|
76c034f79a | ||
|
|
d8d394ce40 | ||
|
|
213f4ce92f | ||
|
|
2d1726763f | ||
|
|
abfd9b3cbd | ||
|
|
6114f6a7fd | ||
|
|
61e674e9f6 | ||
|
|
006cc8c52a | ||
|
|
ffe7fdcb46 | ||
|
|
49cceffe1b | ||
|
|
011782395a | ||
|
|
558009543f | ||
|
|
aaef5bde2c | ||
|
|
f52f595d56 | ||
|
|
50196d8430 | ||
|
|
2ecfbfbb42 | ||
|
|
9508dffe6d | ||
|
|
b4a72bbcab | ||
|
|
4ceaaf92cc | ||
|
|
ef28c9531b | ||
|
|
c86c602e39 | ||
|
|
3bee2905e5 | ||
|
|
9ac8a15cd5 | ||
|
|
81b8104064 | ||
|
|
65f58b1f98 | ||
|
|
7e872aa6d6 | ||
|
|
0383a91a68 | ||
|
|
bb6ebe5750 | ||
|
|
71aa3d145f | ||
|
|
2f1f80029b | ||
|
|
ad18cfa284 | ||
|
|
92ed1c6ac9 | ||
|
|
e71e04a8f1 | ||
|
|
ef127c279c | ||
|
|
4afac5fa4d | ||
|
|
29ae97aa82 | ||
|
|
9319d7e8ef | ||
|
|
689db3713b | ||
|
|
0b9fcb884b | ||
|
|
23e29a1fdc | ||
|
|
90d636a026 | ||
|
|
3e3e12afb9 | ||
|
|
421f5d23ec | ||
|
|
0fa91f32cb | ||
|
|
3d21b203be | ||
|
|
3972b8f4c1 | ||
|
|
2d1707db00 | ||
|
|
72aa4f0561 | ||
|
|
fd57cc95e9 | ||
|
|
04c392be7e | ||
|
|
38da598214 |
22
.github/dependabot.yml
vendored
Normal file
22
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "extensions/ql-vscode"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Thursday is arbitrary
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: ".github"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "thursday" # Thursday is arbitrary
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
2
.github/workflows/main.yml
vendored
2
.github/workflows/main.yml
vendored
@@ -135,7 +135,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
version: ['v2.3.3', 'v2.4.6', 'v2.5.9', 'v2.6.3', 'v2.7.6', 'v2.8.3', 'nightly']
|
||||
version: ['v2.3.3', 'v2.4.6', 'v2.5.9', 'v2.6.3', 'v2.7.6', 'v2.8.4', 'nightly']
|
||||
env:
|
||||
CLI_VERSION: ${{ matrix.version }}
|
||||
NIGHTLY_URL: ${{ needs.find-nightly.outputs.url }}
|
||||
|
||||
@@ -124,6 +124,7 @@ From inside of VSCode, open the `launch.json` file and in the _Launch Integratio
|
||||
1. Download the VSIX from the draft GitHub release at the top of [the releases page](https://github.com/github/vscode-codeql/releases) that is created when the release build finishes.
|
||||
1. Unzip the `.vsix` and inspect its `package.json` to make sure the version is what you expect,
|
||||
or look at the source if there's any doubt the right code is being shipped.
|
||||
1. Install the `.vsix` file into your vscode IDE and ensure the extension can load properly. Run a single command (like run query, or add database).
|
||||
1. Go to the actions tab of the vscode-codeql repository and select the [Release workflow](https://github.com/github/vscode-codeql/actions?query=workflow%3ARelease).
|
||||
- If there is an authentication failure when publishing, be sure to check that the authentication keys haven't expired. See below.
|
||||
1. Approve the deployments of the correct Release workflow. This will automatically publish to Open VSX and VS Code Marketplace.
|
||||
@@ -143,12 +144,7 @@ To regenerate the Open VSX token:
|
||||
1. Go to the [Access Tokens](https://open-vsx.org/user-settings/tokens) page and generate a new token.
|
||||
1. Update the secret in the `publish-open-vsx` environment in the project settings.
|
||||
|
||||
To regenerate the VSCode Marketplace token:
|
||||
|
||||
1. Follow the instructions on [getting a PAT for Azure DevOps](https://code.visualstudio.com/api/working-with-extensions/publishing-extension#get-a-personal-access-token).
|
||||
1. Update the secret in the `publish-vscode-marketplace` environment in the project settings.
|
||||
|
||||
Not that Azure DevOps PATs expire yearly and must be regenerated.
|
||||
To regenerate the VSCode Marketplace token, please see our internal documentation. Note that Azure DevOps PATs expire every 90 days and must be regenerated.
|
||||
|
||||
## Resources
|
||||
|
||||
|
||||
@@ -1,5 +1,17 @@
|
||||
# CodeQL for Visual Studio Code: Changelog
|
||||
|
||||
## 1.6.4 - 6 April 2022
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 1.6.3 - 4 April 2022
|
||||
|
||||
- Fix a bug where the AST viewer was not synchronizing its selected node when the editor selection changes. [#1230](https://github.com/github/vscode-codeql/pull/1230)
|
||||
- Avoid synchronizing the `codeQL.cli.executablePath` setting. [#1252](https://github.com/github/vscode-codeql/pull/1252)
|
||||
- Open the directory in the finder/explorer (instead of just highlighting it) when running the "Open query directory" command from the query history view. [#1235](https://github.com/github/vscode-codeql/pull/1235)
|
||||
- Ensure query label in the query history view changes are persisted across restarts. [#1235](https://github.com/github/vscode-codeql/pull/1235)
|
||||
- Prints end-of-query evaluator log summaries to the Query Server Console. [#1264](https://github.com/github/vscode-codeql/pull/1264)
|
||||
|
||||
## 1.6.1 - 17 March 2022
|
||||
|
||||
No user facing changes.
|
||||
@@ -11,6 +23,7 @@ No user facing changes.
|
||||
- Fix a bug where queries took a long time to run if there are no folders in the workspace. [#1157](https://github.com/github/vscode-codeql/pull/1157)
|
||||
- [BREAKING CHANGE] The `codeQL.runningQueries.customLogDirectory` setting is deprecated and no longer has any function. Instead, all query log files will be stored in the query history directory, next to the query results. [#1178](https://github.com/github/vscode-codeql/pull/1178)
|
||||
- Add a _Open query directory_ command for query items. This command opens the directory containing all artifacts for a query. [#1179](https://github.com/github/vscode-codeql/pull/1179)
|
||||
- Add options to display evaluator logs for a given query run. Some information that was previously found in the query server output may now be found here. [#1186](https://github.com/github/vscode-codeql/pull/1186)
|
||||
|
||||
## 1.5.11 - 10 February 2022
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as gulp from 'gulp';
|
||||
import * as replace from 'gulp-replace';
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const replace = require('gulp-replace');
|
||||
|
||||
/** Inject the application insights key into the telemetry file */
|
||||
export function injectAppInsightsKey() {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import * as gulp from 'gulp';
|
||||
import { compileTypeScript, watchTypeScript, copyViewCss, cleanOutput } from './typescript';
|
||||
import { compileTypeScript, watchTypeScript, copyViewCss, cleanOutput, watchCss } from './typescript';
|
||||
import { compileTextMateGrammar } from './textmate';
|
||||
import { copyTestData } from './tests';
|
||||
import { compileView } from './webpack';
|
||||
import { compileView, watchView } from './webpack';
|
||||
import { packageExtension } from './package';
|
||||
import { injectAppInsightsKey } from './appInsights';
|
||||
|
||||
@@ -14,5 +14,15 @@ export const buildWithoutPackage =
|
||||
)
|
||||
);
|
||||
|
||||
export { cleanOutput, compileTextMateGrammar, watchTypeScript, compileTypeScript, copyTestData, injectAppInsightsKey };
|
||||
export {
|
||||
cleanOutput,
|
||||
compileTextMateGrammar,
|
||||
watchTypeScript,
|
||||
watchView,
|
||||
compileTypeScript,
|
||||
copyTestData,
|
||||
injectAppInsightsKey,
|
||||
compileView,
|
||||
watchCss
|
||||
};
|
||||
export default gulp.series(buildWithoutPackage, injectAppInsightsKey, packageExtension);
|
||||
|
||||
@@ -16,7 +16,8 @@
|
||||
"noImplicitReturns": true,
|
||||
"experimentalDecorators": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true
|
||||
"noUnusedParameters": true,
|
||||
"esModuleInterop": true
|
||||
},
|
||||
"include": ["*.ts"]
|
||||
}
|
||||
|
||||
@@ -40,6 +40,10 @@ export function watchTypeScript() {
|
||||
gulp.watch('src/**/*.ts', compileTypeScript);
|
||||
}
|
||||
|
||||
export function watchCss() {
|
||||
gulp.watch('src/**/*.css', copyViewCss);
|
||||
}
|
||||
|
||||
/** Copy CSS files for the results view into the output directory. */
|
||||
export function copyViewCss() {
|
||||
return gulp.src('src/**/view/*.css')
|
||||
|
||||
@@ -2,7 +2,23 @@ import * as webpack from 'webpack';
|
||||
import { config } from './webpack.config';
|
||||
|
||||
export function compileView(cb: (err?: Error) => void) {
|
||||
webpack(config).run((error, stats) => {
|
||||
doWebpack(config, true, cb);
|
||||
}
|
||||
|
||||
export function watchView(cb: (err?: Error) => void) {
|
||||
const watchConfig = {
|
||||
...config,
|
||||
watch: true,
|
||||
watchOptions: {
|
||||
aggregateTimeout: 200,
|
||||
poll: 1000,
|
||||
}
|
||||
};
|
||||
doWebpack(watchConfig, false, cb);
|
||||
}
|
||||
|
||||
function doWebpack(internalConfig: webpack.Configuration, failOnError: boolean, cb: (err?: Error) => void) {
|
||||
const resultCb = (error: Error | undefined, stats?: webpack.Stats) => {
|
||||
if (error) {
|
||||
cb(error);
|
||||
}
|
||||
@@ -20,11 +36,16 @@ export function compileView(cb: (err?: Error) => void) {
|
||||
errors: true
|
||||
}));
|
||||
if (stats.hasErrors()) {
|
||||
cb(new Error('Compilation errors detected.'));
|
||||
return;
|
||||
if (failOnError) {
|
||||
cb(new Error('Compilation errors detected.'));
|
||||
return;
|
||||
} else {
|
||||
console.error('Compilation errors detected.');
|
||||
}
|
||||
}
|
||||
cb();
|
||||
}
|
||||
};
|
||||
|
||||
cb();
|
||||
});
|
||||
webpack(internalConfig, resultCb);
|
||||
}
|
||||
|
||||
4
extensions/ql-vscode/media/dark/github.svg
Normal file
4
extensions/ql-vscode/media/dark/github.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<!-- From https://github.com/microsoft/vscode-icons -->
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97553 0C3.57186 0 0 3.57186 0 7.97553C0 11.4985 2.29969 14.4832 5.43119 15.5596C5.82263 15.6086 5.96942 15.3639 5.96942 15.1682C5.96942 14.9725 5.96942 14.4832 5.96942 13.7982C3.76758 14.2875 3.27829 12.7217 3.27829 12.7217C2.93578 11.792 2.39755 11.5474 2.39755 11.5474C1.66361 11.0581 2.44648 11.0581 2.44648 11.0581C3.22936 11.107 3.66972 11.8899 3.66972 11.8899C4.40367 13.1131 5.52905 12.7706 5.96942 12.5749C6.01835 12.0367 6.263 11.6942 6.45872 11.4985C4.69725 11.3028 2.83792 10.6177 2.83792 7.53517C2.83792 6.65443 3.1315 5.96942 3.66972 5.38226C3.62079 5.23547 3.32722 4.40367 3.76758 3.32722C3.76758 3.32722 4.4526 3.1315 5.96942 4.15902C6.6055 3.9633 7.29052 3.91437 7.97553 3.91437C8.66055 3.91437 9.34557 4.01223 9.98165 4.15902C11.4985 3.1315 12.1835 3.32722 12.1835 3.32722C12.6239 4.40367 12.3303 5.23547 12.2813 5.43119C12.7706 5.96942 13.1131 6.70336 13.1131 7.5841C13.1131 10.6667 11.2538 11.3028 9.49235 11.4985C9.78593 11.7431 10.0306 12.2324 10.0306 12.9664C10.0306 14.0428 10.0306 14.8746 10.0306 15.1682C10.0306 15.3639 10.1774 15.6086 10.5688 15.5596C13.7492 14.4832 16 11.4985 16 7.97553C15.9511 3.57186 12.3792 0 7.97553 0Z" fill="#C5C5C5"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
11
extensions/ql-vscode/media/light/github.svg
Normal file
11
extensions/ql-vscode/media/light/github.svg
Normal file
@@ -0,0 +1,11 @@
|
||||
<!-- From https://github.com/microsoft/vscode-icons -->
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0)">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97578 0C3.57211 0 0.000244141 3.57186 0.000244141 7.97553C0.000244141 11.4985 2.29994 14.4832 5.43144 15.5596C5.82287 15.6086 5.96966 15.3639 5.96966 15.1682C5.96966 14.9725 5.96966 14.4832 5.96966 13.7982C3.76783 14.2875 3.27853 12.7217 3.27853 12.7217C2.93602 11.792 2.3978 11.5474 2.3978 11.5474C1.66385 11.0581 2.44673 11.0581 2.44673 11.0581C3.2296 11.107 3.66997 11.8899 3.66997 11.8899C4.40391 13.1131 5.5293 12.7706 5.96966 12.5749C6.01859 12.0367 6.26324 11.6942 6.45896 11.4985C4.69749 11.3028 2.83816 10.6177 2.83816 7.53517C2.83816 6.65443 3.13174 5.96942 3.66997 5.38226C3.62104 5.23547 3.32746 4.40367 3.76783 3.32722C3.76783 3.32722 4.45284 3.1315 5.96966 4.15902C6.60575 3.9633 7.29076 3.91437 7.97578 3.91437C8.66079 3.91437 9.34581 4.01223 9.98189 4.15902C11.4987 3.1315 12.1837 3.32722 12.1837 3.32722C12.6241 4.40367 12.3305 5.23547 12.2816 5.43119C12.7709 5.96942 13.1134 6.70336 13.1134 7.5841C13.1134 10.6667 11.2541 11.3028 9.4926 11.4985C9.78618 11.7431 10.0308 12.2324 10.0308 12.9664C10.0308 14.0428 10.0308 14.8746 10.0308 15.1682C10.0308 15.3639 10.1776 15.6086 10.5691 15.5596C13.7495 14.4832 16.0002 11.4985 16.0002 7.97553C15.9513 3.57186 12.3794 0 7.97578 0Z" fill="#424242"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0">
|
||||
<rect width="16" height="16" fill="white" transform="translate(0.000244141)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.5 KiB |
2052
extensions/ql-vscode/package-lock.json
generated
2052
extensions/ql-vscode/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@
|
||||
"description": "CodeQL for Visual Studio Code",
|
||||
"author": "GitHub",
|
||||
"private": true,
|
||||
"version": "1.6.1",
|
||||
"version": "1.6.4",
|
||||
"publisher": "GitHub",
|
||||
"license": "MIT",
|
||||
"icon": "media/VS-marketplace-CodeQL-icon.png",
|
||||
@@ -14,8 +14,8 @@
|
||||
},
|
||||
"engines": {
|
||||
"vscode": "^1.59.0",
|
||||
"node": "^14.17.1",
|
||||
"npm": "^7.20.6"
|
||||
"node": ">=14.17.1",
|
||||
"npm": ">=7.20.6"
|
||||
},
|
||||
"categories": [
|
||||
"Programming Languages"
|
||||
@@ -45,6 +45,7 @@
|
||||
"onCommand:codeQLDatabases.chooseDatabaseFolder",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseArchive",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseInternet",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseGithub",
|
||||
"onCommand:codeQLDatabases.chooseDatabaseLgtm",
|
||||
"onCommand:codeQL.setCurrentDatabase",
|
||||
"onCommand:codeQL.viewAst",
|
||||
@@ -54,6 +55,7 @@
|
||||
"onCommand:codeQL.chooseDatabaseFolder",
|
||||
"onCommand:codeQL.chooseDatabaseArchive",
|
||||
"onCommand:codeQL.chooseDatabaseInternet",
|
||||
"onCommand:codeQL.chooseDatabaseGithub",
|
||||
"onCommand:codeQL.chooseDatabaseLgtm",
|
||||
"onCommand:codeQLDatabases.chooseDatabase",
|
||||
"onCommand:codeQLDatabases.setCurrentDatabase",
|
||||
@@ -134,7 +136,7 @@
|
||||
"title": "CodeQL",
|
||||
"properties": {
|
||||
"codeQL.cli.executablePath": {
|
||||
"scope": "window",
|
||||
"scope": "machine-overridable",
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"markdownDescription": "Path to the CodeQL executable that should be used by the CodeQL extension. The executable is named `codeql` on Linux/Mac and `codeql.exe` on Windows. If empty, the extension will look for a CodeQL executable on your shell PATH, or if CodeQL is not on your PATH, download and manage its own CodeQL executable."
|
||||
@@ -356,6 +358,14 @@
|
||||
"dark": "media/dark/cloud-download.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"title": "Download Database from GitHub",
|
||||
"icon": {
|
||||
"light": "media/light/github.svg",
|
||||
"dark": "media/dark/github.svg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"title": "Download from LGTM",
|
||||
@@ -428,6 +438,10 @@
|
||||
"command": "codeQL.chooseDatabaseInternet",
|
||||
"title": "CodeQL: Download Database"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseGithub",
|
||||
"title": "CodeQL: Download Database from GitHub"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseLgtm",
|
||||
"title": "CodeQL: Download Database from LGTM"
|
||||
@@ -508,6 +522,14 @@
|
||||
"command": "codeQLQueryHistory.openQueryDirectory",
|
||||
"title": "Open query directory"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"title": "Show Evaluator Log (Raw)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"title": "Show Evaluator Log (Summary)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.cancel",
|
||||
"title": "Cancel"
|
||||
@@ -604,6 +626,11 @@
|
||||
"when": "view == codeQLDatabases",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"when": "config.codeQL.canary && view == codeQLDatabases",
|
||||
"group": "navigation"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"when": "view == codeQLDatabases",
|
||||
@@ -706,6 +733,16 @@
|
||||
"group": "9_qlCommands",
|
||||
"when": "view == codeQLQueryHistory && !hasRemoteServer"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"group": "9_qlCommands",
|
||||
"when": "codeql.supportsEvalLog && (viewItem == rawResultsItem || viewItem == interpretedResultsItem || viewItem == cancelledResultsItem)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"group": "9_qlCommands",
|
||||
"when": "codeql.supportsEvalLog && (viewItem == rawResultsItem || viewItem == interpretedResultsItem || viewItem == cancelledResultsItem)"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showQueryText",
|
||||
"group": "9_qlCommands",
|
||||
@@ -829,6 +866,10 @@
|
||||
"command": "codeQL.viewCfg",
|
||||
"when": "resourceScheme == codeql-zip-archive && config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQL.chooseDatabaseGithub",
|
||||
"when": "config.codeQL.canary"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.setCurrentDatabase",
|
||||
"when": "false"
|
||||
@@ -873,6 +914,10 @@
|
||||
"command": "codeQLDatabases.chooseDatabaseInternet",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseGithub",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLDatabases.chooseDatabaseLgtm",
|
||||
"when": "false"
|
||||
@@ -897,6 +942,14 @@
|
||||
"command": "codeQLQueryHistory.showQueryLog",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLog",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.showEvalLogSummary",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "codeQLQueryHistory.openQueryDirectory",
|
||||
"when": "false"
|
||||
@@ -1045,6 +1098,8 @@
|
||||
"build": "gulp",
|
||||
"watch": "npm-run-all -p watch:*",
|
||||
"watch:extension": "tsc --watch",
|
||||
"watch:webpack": "gulp watchView",
|
||||
"watch:css": "gulp watchCss",
|
||||
"test": "mocha --exit -r ts-node/register test/pure-tests/**/*.ts",
|
||||
"preintegration": "rm -rf ./out/vscode-tests && gulp",
|
||||
"integration": "node ./out/vscode-tests/run-integration-tests.js no-workspace,minimal-workspace",
|
||||
@@ -1062,16 +1117,17 @@
|
||||
"classnames": "~2.2.6",
|
||||
"d3": "^6.3.1",
|
||||
"d3-graphviz": "^2.6.1",
|
||||
"fs-extra": "^9.0.1",
|
||||
"fs-extra": "^10.0.1",
|
||||
"glob-promise": "^3.4.0",
|
||||
"js-yaml": "^3.14.0",
|
||||
"minimist": "~1.2.5",
|
||||
"minimist": "~1.2.6",
|
||||
"nanoid": "^3.2.0",
|
||||
"node-fetch": "~2.6.7",
|
||||
"path-browserify": "^1.0.1",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"semver": "~7.3.2",
|
||||
"source-map-support": "^0.5.21",
|
||||
"stream": "^0.0.2",
|
||||
"stream-chain": "~2.2.4",
|
||||
"stream-json": "~1.7.3",
|
||||
@@ -1092,14 +1148,14 @@
|
||||
"@types/chai-as-promised": "~7.1.2",
|
||||
"@types/child-process-promise": "^2.2.1",
|
||||
"@types/classnames": "~2.2.9",
|
||||
"@types/del": "^4.0.0",
|
||||
"@types/d3": "^6.2.0",
|
||||
"@types/d3-graphviz": "^2.6.6",
|
||||
"@types/del": "^4.0.0",
|
||||
"@types/fs-extra": "^9.0.6",
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/google-protobuf": "^3.2.7",
|
||||
"@types/gulp": "^4.0.9",
|
||||
"@types/gulp-replace": "0.0.31",
|
||||
"@types/gulp-replace": "^1.1.0",
|
||||
"@types/gulp-sourcemaps": "0.0.32",
|
||||
"@types/js-yaml": "^3.12.5",
|
||||
"@types/jszip": "~3.1.6",
|
||||
@@ -1120,7 +1176,7 @@
|
||||
"@types/tmp": "^0.1.0",
|
||||
"@types/unzipper": "~0.10.1",
|
||||
"@types/vscode": "^1.59.0",
|
||||
"@types/webpack": "^4.32.1",
|
||||
"@types/webpack": "^5.28.0",
|
||||
"@types/xml2js": "~0.4.4",
|
||||
"@typescript-eslint/eslint-plugin": "^4.26.0",
|
||||
"@typescript-eslint/parser": "^4.26.0",
|
||||
@@ -1134,8 +1190,8 @@
|
||||
"eslint-plugin-react": "~7.19.0",
|
||||
"glob": "^7.1.4",
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-replace": "^1.0.0",
|
||||
"gulp-sourcemaps": "^2.6.5",
|
||||
"gulp-replace": "^1.1.3",
|
||||
"gulp-sourcemaps": "^3.0.0",
|
||||
"gulp-typescript": "^5.0.1",
|
||||
"husky": "~4.2.5",
|
||||
"jsonc-parser": "^2.3.0",
|
||||
@@ -1145,16 +1201,16 @@
|
||||
"npm-run-all": "^4.1.5",
|
||||
"prettier": "~2.0.5",
|
||||
"proxyquire": "~2.1.3",
|
||||
"sinon": "~9.0.0",
|
||||
"sinon": "~13.0.1",
|
||||
"sinon-chai": "~3.5.0",
|
||||
"style-loader": "~0.23.1",
|
||||
"through2": "^3.0.1",
|
||||
"through2": "^4.0.2",
|
||||
"ts-loader": "^8.1.0",
|
||||
"ts-node": "^8.3.0",
|
||||
"ts-node": "^10.7.0",
|
||||
"ts-protoc-gen": "^0.9.0",
|
||||
"typescript": "^4.3.2",
|
||||
"typescript": "^4.5.5",
|
||||
"typescript-formatter": "^7.2.2",
|
||||
"vsce": "^1.65.0",
|
||||
"vsce": "^2.7.0",
|
||||
"vscode-test": "^1.4.0",
|
||||
"webpack": "^5.28.0",
|
||||
"webpack-cli": "^4.6.0"
|
||||
|
||||
@@ -10,7 +10,8 @@ import {
|
||||
TextEditorSelectionChangeEvent,
|
||||
TextEditorSelectionChangeKind,
|
||||
Location,
|
||||
Range
|
||||
Range,
|
||||
Uri
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
@@ -104,7 +105,7 @@ class AstViewerDataProvider extends DisposableObject implements TreeDataProvider
|
||||
export class AstViewer extends DisposableObject {
|
||||
private treeView: TreeView<AstItem>;
|
||||
private treeDataProvider: AstViewerDataProvider;
|
||||
private currentFile: string | undefined;
|
||||
private currentFileUri: Uri | undefined;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
@@ -125,12 +126,12 @@ export class AstViewer extends DisposableObject {
|
||||
this.push(window.onDidChangeTextEditorSelection(this.updateTreeSelection, this));
|
||||
}
|
||||
|
||||
updateRoots(roots: AstItem[], db: DatabaseItem, fileName: string) {
|
||||
updateRoots(roots: AstItem[], db: DatabaseItem, fileUri: Uri) {
|
||||
this.treeDataProvider.roots = roots;
|
||||
this.treeDataProvider.db = db;
|
||||
this.treeDataProvider.refresh();
|
||||
this.treeView.message = `AST for ${path.basename(fileName)}`;
|
||||
this.currentFile = fileName;
|
||||
this.treeView.message = `AST for ${path.basename(fileUri.fsPath)}`;
|
||||
this.currentFileUri = fileUri;
|
||||
// Handle error on reveal. This could happen if
|
||||
// the tree view is disposed during the reveal.
|
||||
this.treeView.reveal(roots[0], { focus: false })?.then(
|
||||
@@ -174,7 +175,7 @@ export class AstViewer extends DisposableObject {
|
||||
|
||||
if (
|
||||
this.treeView.visible &&
|
||||
e.textEditor.document.uri.fsPath === this.currentFile &&
|
||||
e.textEditor.document.uri.fsPath === this.currentFileUri?.fsPath &&
|
||||
e.selections.length === 1
|
||||
) {
|
||||
const selection = e.selections[0];
|
||||
@@ -199,6 +200,6 @@ export class AstViewer extends DisposableObject {
|
||||
this.treeDataProvider.db = undefined;
|
||||
this.treeDataProvider.refresh();
|
||||
this.treeView.message = undefined;
|
||||
this.currentFile = undefined;
|
||||
this.currentFileUri = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as semver from 'semver';
|
||||
import { runCodeQlCliCommand } from './cli';
|
||||
import { Logger } from './logging';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* Get the version of a CodeQL CLI.
|
||||
@@ -18,7 +19,7 @@ export async function getCodeQlCliVersion(codeQlPath: string, logger: Logger): P
|
||||
} catch (e) {
|
||||
// Failed to run the version command. This might happen if the cli version is _really_ old, or it is corrupted.
|
||||
// Either way, we can't determine compatibility.
|
||||
void logger.log(`Failed to run 'codeql version'. Reason: ${e.message}`);
|
||||
void logger.log(`Failed to run 'codeql version'. Reason: ${getErrorMessage(e)}`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,12 +8,12 @@ import { Readable } from 'stream';
|
||||
import { StringDecoder } from 'string_decoder';
|
||||
import * as tk from 'tree-kill';
|
||||
import { promisify } from 'util';
|
||||
import { CancellationToken, Disposable, Uri } from 'vscode';
|
||||
import { CancellationToken, commands, Disposable, Uri } from 'vscode';
|
||||
|
||||
import { BQRSInfo, DecodedBqrsChunk } from './pure/bqrs-cli-types';
|
||||
import { CliConfig } from './config';
|
||||
import { DistributionProvider, FindDistributionResultKind } from './distribution';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { QueryMetadata, SortDirection } from './pure/interface-types';
|
||||
import { Logger, ProgressReporter } from './logging';
|
||||
import { CompilationMessage } from './pure/messages';
|
||||
@@ -346,7 +346,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
stderrBuffers.length == 0
|
||||
? new Error(`${description} failed: ${err}`)
|
||||
: new Error(`${description} failed: ${Buffer.concat(stderrBuffers).toString('utf8')}`);
|
||||
newError.stack += (err.stack || '');
|
||||
newError.stack += getErrorStack(err);
|
||||
throw newError;
|
||||
} finally {
|
||||
void this.logger.log(Buffer.concat(stderrBuffers).toString('utf8'));
|
||||
@@ -448,7 +448,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
try {
|
||||
yield JSON.parse(event) as EventType;
|
||||
} catch (err) {
|
||||
throw new Error(`Parsing output of ${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`Parsing output of ${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -503,7 +503,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
try {
|
||||
return JSON.parse(result) as OutputType;
|
||||
} catch (err) {
|
||||
throw new Error(`Parsing output of ${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`Parsing output of ${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -665,6 +665,26 @@ export class CodeQLCliServer implements Disposable {
|
||||
return await this.runCodeQlCliCommand(['generate', 'query-help'], subcommandArgs, `Generating qhelp in markdown format at ${outputDirectory}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a summary of an evaluation log.
|
||||
* @param endSummaryPath The path to write only the end of query part of the human-readable summary to.
|
||||
* @param inputPath The path of an evaluation event log.
|
||||
* @param outputPath The path to write a human-readable summary of it to.
|
||||
*/
|
||||
async generateLogSummary(
|
||||
inputPath: string,
|
||||
outputPath: string,
|
||||
endSummaryPath: string,
|
||||
): Promise<string> {
|
||||
const subcommandArgs = [
|
||||
'--format=text',
|
||||
`--end-summary=${endSummaryPath}`,
|
||||
inputPath,
|
||||
outputPath
|
||||
];
|
||||
return await this.runCodeQlCliCommand(['generate', 'log-summary'], subcommandArgs, 'Generating log summary');
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the results from a bqrs.
|
||||
* @param bqrsPath The path to the bqrs.
|
||||
@@ -751,7 +771,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
const dot = await this.readDotFiles(interpretedResultsPath);
|
||||
return dot;
|
||||
} catch (err) {
|
||||
throw new Error(`Reading output of interpretation failed: ${err.stderr || err}`);
|
||||
throw new Error(`Reading output of interpretation failed: ${getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -940,6 +960,10 @@ export class CodeQLCliServer implements Disposable {
|
||||
public async getVersion() {
|
||||
if (!this._version) {
|
||||
this._version = await this.refreshVersion();
|
||||
// this._version is only undefined upon config change, so we reset CLI-based context key only when necessary.
|
||||
await commands.executeCommand(
|
||||
'setContext', 'codeql.supportsEvalLog', await this.cliConstraints.supportsPerQueryEvalLog()
|
||||
);
|
||||
}
|
||||
return this._version;
|
||||
}
|
||||
@@ -1050,7 +1074,7 @@ export async function runCodeQlCliCommand(
|
||||
void logger.log('CLI command succeeded.');
|
||||
return result.stdout;
|
||||
} catch (err) {
|
||||
throw new Error(`${description} failed: ${err.stderr || err}`);
|
||||
throw new Error(`${description} failed: ${(err as any).stderr || getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1106,8 +1130,8 @@ class SplitBuffer {
|
||||
while (this.searchIndex <= (this.buffer.length - this.maxSeparatorLength)) {
|
||||
for (const separator of this.separators) {
|
||||
if (SplitBuffer.startsWith(this.buffer, separator, this.searchIndex)) {
|
||||
const line = this.buffer.substr(0, this.searchIndex);
|
||||
this.buffer = this.buffer.substr(this.searchIndex + separator.length);
|
||||
const line = this.buffer.slice(0, this.searchIndex);
|
||||
this.buffer = this.buffer.slice(this.searchIndex + separator.length);
|
||||
this.searchIndex = 0;
|
||||
return line;
|
||||
}
|
||||
@@ -1256,6 +1280,17 @@ export class CliVersionConstraint {
|
||||
*/
|
||||
public static CLI_VERSION_WITH_STRUCTURED_EVAL_LOG = new SemVer('2.8.2');
|
||||
|
||||
/**
|
||||
* CLI version that supports rotating structured logs to produce one per query.
|
||||
*
|
||||
* Note that 2.8.4 supports generating the evaluation logs and summaries,
|
||||
* but 2.9.0 includes a new option to produce the end-of-query summary logs to
|
||||
* the query server console. For simplicity we gate all features behind 2.9.0,
|
||||
* but if a user is tied to the 2.8 release, we can enable evaluator logs
|
||||
* and summaries for them.
|
||||
*/
|
||||
public static CLI_VERSION_WITH_PER_QUERY_EVAL_LOG = new SemVer('2.9.0');
|
||||
|
||||
constructor(private readonly cli: CodeQLCliServer) {
|
||||
/**/
|
||||
}
|
||||
@@ -1315,4 +1350,8 @@ export class CliVersionConstraint {
|
||||
async supportsStructuredEvalLog() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_STRUCTURED_EVAL_LOG);
|
||||
}
|
||||
|
||||
async supportsPerQueryEvalLog() {
|
||||
return this.isVersionAtLeast(CliVersionConstraint.CLI_VERSION_WITH_PER_QUERY_EVAL_LOG);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
} from 'vscode';
|
||||
import { showAndLogErrorMessage, showAndLogWarningMessage } from './helpers';
|
||||
import { logger } from './logging';
|
||||
import { getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { telemetryListener } from './telemetry';
|
||||
|
||||
export class UserCancellationException extends Error {
|
||||
@@ -121,8 +122,9 @@ export function commandRunner(
|
||||
try {
|
||||
return await task(...args);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
const errorMessage = `${e.message || e} (${commandId})`;
|
||||
const errorMessage = `${getErrorMessage(e) || e} (${commandId})`;
|
||||
error = e instanceof Error ? e : new Error(errorMessage);
|
||||
const errorStack = getErrorStack(e);
|
||||
if (e instanceof UserCancellationException) {
|
||||
// User has cancelled this action manually
|
||||
if (e.silent) {
|
||||
@@ -132,8 +134,8 @@ export function commandRunner(
|
||||
}
|
||||
} else {
|
||||
// Include the full stack in the error log only.
|
||||
const fullMessage = e.stack
|
||||
? `${errorMessage}\n${e.stack}`
|
||||
const fullMessage = errorStack
|
||||
? `${errorMessage}\n${errorStack}`
|
||||
: errorMessage;
|
||||
void showAndLogErrorMessage(errorMessage, {
|
||||
fullMessage
|
||||
@@ -173,8 +175,9 @@ export function commandRunnerWithProgress<R>(
|
||||
try {
|
||||
return await withProgress(progressOptionsWithDefaults, task, ...args);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
const errorMessage = `${e.message || e} (${commandId})`;
|
||||
const errorMessage = `${getErrorMessage(e) || e} (${commandId})`;
|
||||
error = e instanceof Error ? e : new Error(errorMessage);
|
||||
const errorStack = getErrorStack(e);
|
||||
if (e instanceof UserCancellationException) {
|
||||
// User has cancelled this action manually
|
||||
if (e.silent) {
|
||||
@@ -184,8 +187,8 @@ export function commandRunnerWithProgress<R>(
|
||||
}
|
||||
} else {
|
||||
// Include the full stack in the error log only.
|
||||
const fullMessage = e.stack
|
||||
? `${errorMessage}\n${e.stack}`
|
||||
const fullMessage = errorStack
|
||||
? `${errorMessage}\n${errorStack}`
|
||||
: errorMessage;
|
||||
void showAndLogErrorMessage(errorMessage, {
|
||||
outputLogger,
|
||||
|
||||
@@ -21,6 +21,7 @@ import { getHtmlForWebview, jumpToLocation } from '../interface-utils';
|
||||
import { transformBqrsResultSet, RawResultSet, BQRSInfo } from '../pure/bqrs-cli-types';
|
||||
import resultsDiff from './resultsDiff';
|
||||
import { CompletedLocalQueryInfo } from '../query-results';
|
||||
import { getErrorMessage } from '../pure/helpers-pure';
|
||||
|
||||
interface ComparePair {
|
||||
from: CompletedLocalQueryInfo;
|
||||
@@ -70,7 +71,7 @@ export class CompareInterfaceManager extends DisposableObject {
|
||||
try {
|
||||
rows = this.compareResults(fromResultSet, toResultSet);
|
||||
} catch (e) {
|
||||
message = e.message;
|
||||
message = getErrorMessage(e);
|
||||
}
|
||||
|
||||
await this.postMessage({
|
||||
|
||||
@@ -357,3 +357,14 @@ export function getRemoteControllerRepo(): string | undefined {
|
||||
export async function setRemoteControllerRepo(repo: string | undefined) {
|
||||
await REMOTE_CONTROLLER_REPO.updateValue(repo, ConfigurationTarget.Global);
|
||||
}
|
||||
|
||||
/**
|
||||
* The branch of "github/codeql-variant-analysis-action" to use with the "Run Variant Analysis" command.
|
||||
* Default value is "main".
|
||||
* Note: This command is only available for internal users.
|
||||
*/
|
||||
const ACTION_BRANCH = new Setting('actionBranch', REMOTE_QUERIES_SETTING);
|
||||
|
||||
export function getActionBranch(): string {
|
||||
return ACTION_BRANCH.getValue<string>() || 'main';
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import { DecodedBqrsChunk, BqrsId, EntityValue } from '../pure/bqrs-cli-types';
|
||||
import { DatabaseItem } from '../databases';
|
||||
import { ChildAstItem, AstItem } from '../astViewer';
|
||||
import fileRangeFromURI from './fileRangeFromURI';
|
||||
import { Uri } from 'vscode';
|
||||
|
||||
/**
|
||||
* A class that wraps a tree of QL results from a query that
|
||||
@@ -17,7 +18,7 @@ export default class AstBuilder {
|
||||
queryResults: QueryWithResults,
|
||||
private cli: CodeQLCliServer,
|
||||
public db: DatabaseItem,
|
||||
public fileName: string
|
||||
public fileName: Uri
|
||||
) {
|
||||
this.bqrsPath = queryResults.query.resultsPaths.resultsPath;
|
||||
}
|
||||
|
||||
@@ -10,7 +10,6 @@ import {
|
||||
TextDocument,
|
||||
Uri
|
||||
} from 'vscode';
|
||||
import * as path from 'path';
|
||||
|
||||
import { decodeSourceArchiveUri, encodeArchiveBasePath, zipArchiveScheme } from '../archive-filesystem-provider';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
@@ -160,7 +159,7 @@ export class TemplatePrintAstProvider {
|
||||
return new AstBuilder(
|
||||
query, this.cli,
|
||||
this.dbm.findDatabaseItem(dbUri)!,
|
||||
path.basename(fileUri.fsPath),
|
||||
fileUri,
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -21,6 +21,8 @@ import {
|
||||
} from './commandRunner';
|
||||
import { logger } from './logging';
|
||||
import { tmpDir } from './helpers';
|
||||
import { Credentials } from './authentication';
|
||||
import { REPO_REGEX, getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from a remote location. Database is assumed to be an archive file.
|
||||
@@ -46,6 +48,7 @@ export async function promptImportInternetDatabase(
|
||||
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
progress,
|
||||
@@ -61,6 +64,79 @@ export async function promptImportInternetDatabase(
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from GitHub.
|
||||
* User enters a GitHub repository and then the user is asked which language
|
||||
* to download (if there is more than one)
|
||||
*
|
||||
* @param databaseManager the DatabaseManager
|
||||
* @param storagePath where to store the unzipped database.
|
||||
*/
|
||||
export async function promptImportGithubDatabase(
|
||||
databaseManager: DatabaseManager,
|
||||
storagePath: string,
|
||||
credentials: Credentials,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
cli?: CodeQLCliServer
|
||||
): Promise<DatabaseItem | undefined> {
|
||||
progress({
|
||||
message: 'Choose repository',
|
||||
step: 1,
|
||||
maxStep: 2
|
||||
});
|
||||
const githubRepo = await window.showInputBox({
|
||||
title: 'Enter a GitHub repository URL or "name with owner" (e.g. https://github.com/github/codeql or github/codeql)',
|
||||
placeHolder: 'https://github.com/<owner>/<repo> or <owner>/<repo>',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
if (!githubRepo) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!looksLikeGithubRepo(githubRepo)) {
|
||||
throw new Error(`Invalid GitHub repository: ${githubRepo}`);
|
||||
}
|
||||
|
||||
const databaseUrl = await convertGithubNwoToDatabaseUrl(githubRepo, credentials, progress);
|
||||
if (!databaseUrl) {
|
||||
return;
|
||||
}
|
||||
|
||||
const octokit = await credentials.getOctokit();
|
||||
/**
|
||||
* The 'token' property of the token object returned by `octokit.auth()`.
|
||||
* The object is undocumented, but looks something like this:
|
||||
* {
|
||||
* token: 'xxxx',
|
||||
* tokenType: 'oauth',
|
||||
* type: 'token',
|
||||
* }
|
||||
* We only need the actual token string.
|
||||
*/
|
||||
const octokitToken = (await octokit.auth() as { token: string })?.token;
|
||||
if (!octokitToken) {
|
||||
// Just print a generic error message for now. Ideally we could show more debugging info, like the
|
||||
// octokit object, but that would expose a user token.
|
||||
throw new Error('Unable to get GitHub token.');
|
||||
}
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{ 'Accept': 'application/zip', 'Authorization': `Bearer ${octokitToken}` },
|
||||
databaseManager,
|
||||
storagePath,
|
||||
progress,
|
||||
token,
|
||||
cli
|
||||
);
|
||||
if (item) {
|
||||
await commands.executeCommand('codeQLDatabases.focus');
|
||||
void showAndLogInformationMessage('Database downloaded and imported successfully.');
|
||||
return item;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompts a user to fetch a database from lgtm.
|
||||
* User enters a project url and then the user is asked which language
|
||||
@@ -90,10 +166,11 @@ export async function promptImportLgtmDatabase(
|
||||
}
|
||||
|
||||
if (looksLikeLgtmUrl(lgtmUrl)) {
|
||||
const databaseUrl = await convertToDatabaseUrl(lgtmUrl, progress);
|
||||
const databaseUrl = await convertLgtmUrlToDatabaseUrl(lgtmUrl, progress);
|
||||
if (databaseUrl) {
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
progress,
|
||||
@@ -140,6 +217,7 @@ export async function importArchiveDatabase(
|
||||
try {
|
||||
const item = await databaseArchiveFetcher(
|
||||
databaseUrl,
|
||||
{},
|
||||
databaseManager,
|
||||
storagePath,
|
||||
progress,
|
||||
@@ -152,7 +230,7 @@ export async function importArchiveDatabase(
|
||||
}
|
||||
return item;
|
||||
} catch (e) {
|
||||
if (e.message.includes('unexpected end of file')) {
|
||||
if (getErrorMessage(e).includes('unexpected end of file')) {
|
||||
throw new Error('Database is corrupt or too large. Try unzipping outside of VS Code and importing the unzipped folder instead.');
|
||||
} else {
|
||||
// delegate
|
||||
@@ -166,6 +244,7 @@ export async function importArchiveDatabase(
|
||||
* or in the local filesystem.
|
||||
*
|
||||
* @param databaseUrl URL from which to grab the database
|
||||
* @param requestHeaders Headers to send with the request
|
||||
* @param databaseManager the DatabaseManager
|
||||
* @param storagePath where to store the unzipped database.
|
||||
* @param progress callback to send progress messages to
|
||||
@@ -173,6 +252,7 @@ export async function importArchiveDatabase(
|
||||
*/
|
||||
async function databaseArchiveFetcher(
|
||||
databaseUrl: string,
|
||||
requestHeaders: { [key: string]: string },
|
||||
databaseManager: DatabaseManager,
|
||||
storagePath: string,
|
||||
progress: ProgressCallback,
|
||||
@@ -193,7 +273,7 @@ async function databaseArchiveFetcher(
|
||||
if (isFile(databaseUrl)) {
|
||||
await readAndUnzip(databaseUrl, unzipPath, cli, progress);
|
||||
} else {
|
||||
await fetchAndUnzip(databaseUrl, unzipPath, cli, progress);
|
||||
await fetchAndUnzip(databaseUrl, requestHeaders, unzipPath, cli, progress);
|
||||
}
|
||||
|
||||
progress({
|
||||
@@ -292,6 +372,7 @@ async function readAndUnzip(
|
||||
|
||||
async function fetchAndUnzip(
|
||||
databaseUrl: string,
|
||||
requestHeaders: { [key: string]: string },
|
||||
unzipPath: string,
|
||||
cli?: CodeQLCliServer,
|
||||
progress?: ProgressCallback
|
||||
@@ -310,7 +391,10 @@ async function fetchAndUnzip(
|
||||
step: 1,
|
||||
});
|
||||
|
||||
const response = await checkForFailingResponse(await fetch(databaseUrl), 'Error downloading database');
|
||||
const response = await checkForFailingResponse(
|
||||
await fetch(databaseUrl, { headers: requestHeaders }),
|
||||
'Error downloading database'
|
||||
);
|
||||
const archiveFileStream = fs.createWriteStream(archivePath);
|
||||
|
||||
const contentLength = response.headers.get('content-length');
|
||||
@@ -381,6 +465,81 @@ export async function findDirWithFile(
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* The URL pattern is https://github.com/{owner}/{name}/{subpages}.
|
||||
*
|
||||
* This function accepts any URL that matches the pattern above. It also accepts just the
|
||||
* name with owner (NWO): `<owner>/<repo>`.
|
||||
*
|
||||
* @param githubRepo The GitHub repository URL or NWO
|
||||
*
|
||||
* @return true if this looks like a valid GitHub repository URL or NWO
|
||||
*/
|
||||
export function looksLikeGithubRepo(
|
||||
githubRepo: string | undefined
|
||||
): githubRepo is string {
|
||||
if (!githubRepo) {
|
||||
return false;
|
||||
}
|
||||
if (REPO_REGEX.test(githubRepo) || convertGitHubUrlToNwo(githubRepo)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a GitHub repository URL to the corresponding NWO.
|
||||
* @param githubUrl The GitHub repository URL
|
||||
* @return The corresponding NWO, or undefined if the URL is not valid
|
||||
*/
|
||||
function convertGitHubUrlToNwo(githubUrl: string): string | undefined {
|
||||
try {
|
||||
const uri = Uri.parse(githubUrl, true);
|
||||
if (uri.scheme !== 'https') {
|
||||
return;
|
||||
}
|
||||
if (uri.authority !== 'github.com' && uri.authority !== 'www.github.com') {
|
||||
return;
|
||||
}
|
||||
const paths = uri.path.split('/').filter((segment: string) => segment);
|
||||
const nwo = `${paths[0]}/${paths[1]}`;
|
||||
if (REPO_REGEX.test(nwo)) {
|
||||
return nwo;
|
||||
}
|
||||
return;
|
||||
} catch (e) {
|
||||
// Ignore the error here, since we catch failures at a higher level.
|
||||
// In particular: returning undefined leads to an error in 'promptImportGithubDatabase'.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
export async function convertGithubNwoToDatabaseUrl(
|
||||
githubRepo: string,
|
||||
credentials: Credentials,
|
||||
progress: ProgressCallback): Promise<string | undefined> {
|
||||
try {
|
||||
const nwo = convertGitHubUrlToNwo(githubRepo) || githubRepo;
|
||||
const [owner, repo] = nwo.split('/');
|
||||
|
||||
const octokit = await credentials.getOctokit();
|
||||
const response = await octokit.request('GET /repos/:owner/:repo/code-scanning/codeql/databases', { owner, repo });
|
||||
|
||||
const languages = response.data.map((db: any) => db.language);
|
||||
|
||||
const language = await promptForLanguage(languages, progress);
|
||||
if (!language) {
|
||||
return;
|
||||
}
|
||||
|
||||
return `https://api.github.com/repos/${owner}/${repo}/code-scanning/codeql/databases/${language}`;
|
||||
|
||||
} catch (e) {
|
||||
void logger.log(`Error: ${getErrorMessage(e)}`);
|
||||
throw new Error(`Unable to get database for '${githubRepo}'`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The URL pattern is https://lgtm.com/projects/{provider}/{org}/{name}/{irrelevant-subpages}.
|
||||
* There are several possibilities for the provider: in addition to GitHub.com (g),
|
||||
@@ -416,7 +575,7 @@ export function looksLikeLgtmUrl(lgtmUrl: string | undefined): lgtmUrl is string
|
||||
return false;
|
||||
}
|
||||
|
||||
const paths = uri.path.split('/').filter((segment) => segment);
|
||||
const paths = uri.path.split('/').filter((segment: string) => segment);
|
||||
return paths.length >= 4 && paths[0] === 'projects';
|
||||
} catch (e) {
|
||||
return false;
|
||||
@@ -446,7 +605,7 @@ function extractProjectSlug(lgtmUrl: string): string | undefined {
|
||||
}
|
||||
|
||||
// exported for testing
|
||||
export async function convertToDatabaseUrl(
|
||||
export async function convertLgtmUrlToDatabaseUrl(
|
||||
lgtmUrl: string,
|
||||
progress: ProgressCallback) {
|
||||
try {
|
||||
@@ -467,7 +626,9 @@ export async function convertToDatabaseUrl(
|
||||
}
|
||||
}
|
||||
|
||||
const language = await promptForLanguage(projectJson, progress);
|
||||
const languages = projectJson?.languages?.map((lang: { language: string }) => lang.language) || [];
|
||||
|
||||
const language = await promptForLanguage(languages, progress);
|
||||
if (!language) {
|
||||
return;
|
||||
}
|
||||
@@ -479,7 +640,7 @@ export async function convertToDatabaseUrl(
|
||||
language,
|
||||
].join('/')}`;
|
||||
} catch (e) {
|
||||
void logger.log(`Error: ${e.message}`);
|
||||
void logger.log(`Error: ${getErrorMessage(e)}`);
|
||||
throw new Error(`Invalid LGTM URL: ${lgtmUrl}`);
|
||||
}
|
||||
}
|
||||
@@ -487,7 +648,7 @@ export async function convertToDatabaseUrl(
|
||||
async function downloadLgtmProjectMetadata(lgtmUrl: string): Promise<any> {
|
||||
const uri = Uri.parse(lgtmUrl, true);
|
||||
const paths = ['api', 'v1.0'].concat(
|
||||
uri.path.split('/').filter((segment) => segment)
|
||||
uri.path.split('/').filter((segment: string) => segment)
|
||||
).slice(0, 6);
|
||||
const projectUrl = `https://lgtm.com/${paths.join('/')}`;
|
||||
const projectResponse = await fetch(projectUrl);
|
||||
@@ -495,7 +656,7 @@ async function downloadLgtmProjectMetadata(lgtmUrl: string): Promise<any> {
|
||||
}
|
||||
|
||||
async function promptForLanguage(
|
||||
projectJson: any,
|
||||
languages: string[],
|
||||
progress: ProgressCallback
|
||||
): Promise<string | undefined> {
|
||||
progress({
|
||||
@@ -503,17 +664,19 @@ async function promptForLanguage(
|
||||
step: 2,
|
||||
maxStep: 2
|
||||
});
|
||||
if (!projectJson?.languages?.length) {
|
||||
return;
|
||||
if (!languages.length) {
|
||||
throw new Error('No databases found');
|
||||
}
|
||||
if (projectJson.languages.length === 1) {
|
||||
return projectJson.languages[0].language;
|
||||
if (languages.length === 1) {
|
||||
return languages[0];
|
||||
}
|
||||
|
||||
return await window.showQuickPick(
|
||||
projectJson.languages.map((lang: { language: string }) => lang.language), {
|
||||
placeHolder: 'Select the database language to download:'
|
||||
}
|
||||
languages,
|
||||
{
|
||||
placeHolder: 'Select the database language to download:',
|
||||
ignoreFocusOut: true,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -33,11 +33,13 @@ import * as qsClient from './queryserver-client';
|
||||
import { upgradeDatabaseExplicit } from './upgrades';
|
||||
import {
|
||||
importArchiveDatabase,
|
||||
promptImportGithubDatabase,
|
||||
promptImportInternetDatabase,
|
||||
promptImportLgtmDatabase,
|
||||
} from './databaseFetcher';
|
||||
import { CancellationToken } from 'vscode';
|
||||
import { asyncFilter } from './pure/helpers-pure';
|
||||
import { asyncFilter, getErrorMessage } from './pure/helpers-pure';
|
||||
import { Credentials } from './authentication';
|
||||
|
||||
type ThemableIconPath = { light: string; dark: string } | string;
|
||||
|
||||
@@ -219,7 +221,8 @@ export class DatabaseUI extends DisposableObject {
|
||||
private databaseManager: DatabaseManager,
|
||||
private readonly queryServer: qsClient.QueryServerClient | undefined,
|
||||
private readonly storagePath: string,
|
||||
readonly extensionPath: string
|
||||
readonly extensionPath: string,
|
||||
private readonly getCredentials: () => Promise<Credentials>
|
||||
) {
|
||||
super();
|
||||
|
||||
@@ -291,6 +294,20 @@ export class DatabaseUI extends DisposableObject {
|
||||
}
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQLDatabases.chooseDatabaseGithub',
|
||||
async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const credentials = await this.getCredentials();
|
||||
await this.handleChooseDatabaseGithub(credentials, progress, token);
|
||||
},
|
||||
{
|
||||
title: 'Adding database from GitHub',
|
||||
})
|
||||
);
|
||||
this.push(
|
||||
commandRunnerWithProgress(
|
||||
'codeQLDatabases.chooseDatabaseLgtm',
|
||||
@@ -376,7 +393,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
try {
|
||||
return await this.chooseAndSetDatabase(true, progress, token);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
@@ -444,7 +461,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
try {
|
||||
return await this.chooseAndSetDatabase(false, progress, token);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
@@ -462,6 +479,21 @@ export class DatabaseUI extends DisposableObject {
|
||||
);
|
||||
};
|
||||
|
||||
handleChooseDatabaseGithub = async (
|
||||
credentials: Credentials,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
): Promise<DatabaseItem | undefined> => {
|
||||
return await promptImportGithubDatabase(
|
||||
this.databaseManager,
|
||||
this.storagePath,
|
||||
credentials,
|
||||
progress,
|
||||
token,
|
||||
this.queryServer?.cliServer
|
||||
);
|
||||
};
|
||||
|
||||
handleChooseDatabaseLgtm = async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
@@ -590,8 +622,7 @@ export class DatabaseUI extends DisposableObject {
|
||||
} catch (e) {
|
||||
// rethrow and let this be handled by default error handling.
|
||||
throw new Error(
|
||||
`Could not set database to ${path.basename(uri.fsPath)}. Reason: ${e.message
|
||||
}`
|
||||
`Could not set database to ${path.basename(uri.fsPath)}. Reason: ${getErrorMessage(e)}`
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -19,6 +19,7 @@ import { DisposableObject } from './pure/disposable-object';
|
||||
import { Logger, logger } from './logging';
|
||||
import { registerDatabases, Dataset, deregisterDatabases } from './pure/messages';
|
||||
import { QueryServerClient } from './queryserver-client';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* databases.ts
|
||||
@@ -359,7 +360,7 @@ export class DatabaseItemImpl implements DatabaseItem {
|
||||
}
|
||||
catch (e) {
|
||||
this._contents = undefined;
|
||||
this._error = e;
|
||||
this._error = e instanceof Error ? e : new Error(String(e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -726,7 +727,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
}
|
||||
} catch (e) {
|
||||
// database list had an unexpected type - nothing to be done?
|
||||
void showAndLogErrorMessage(`Database list loading failed: ${e.message}`);
|
||||
void showAndLogErrorMessage(`Database list loading failed: ${getErrorMessage(e)}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -841,7 +842,7 @@ export class DatabaseManager extends DisposableObject {
|
||||
void logger.log('Deleting database from filesystem.');
|
||||
fs.remove(item.databaseUri.fsPath).then(
|
||||
() => void logger.log(`Deleted '${item.databaseUri.fsPath}'`),
|
||||
e => void logger.log(`Failed to delete '${item.databaseUri.fsPath}'. Reason: ${e.message}`));
|
||||
e => void logger.log(`Failed to delete '${item.databaseUri.fsPath}'. Reason: ${getErrorMessage(e)}`));
|
||||
}
|
||||
|
||||
// note that we use undefined as the item in order to reset the entire tree
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import 'source-map-support/register';
|
||||
import {
|
||||
CancellationToken,
|
||||
CancellationTokenSource,
|
||||
@@ -65,7 +66,7 @@ import {
|
||||
showInformationMessageWithAction,
|
||||
tmpDir
|
||||
} from './helpers';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { asError, assertNever, getErrorMessage } from './pure/helpers-pure';
|
||||
import { spawnIdeServer } from './ide-server';
|
||||
import { InterfaceManager } from './interface';
|
||||
import { WebviewReveal } from './interface-utils';
|
||||
@@ -433,7 +434,8 @@ async function activateWithInstalledDistribution(
|
||||
dbm,
|
||||
qs,
|
||||
getContextStoragePath(ctx),
|
||||
ctx.extensionPath
|
||||
ctx.extensionPath,
|
||||
() => Credentials.initialize(ctx),
|
||||
);
|
||||
databaseUI.init();
|
||||
ctx.subscriptions.push(databaseUI);
|
||||
@@ -488,7 +490,7 @@ async function activateWithInstalledDistribution(
|
||||
try {
|
||||
await cmpm.showResults(from, to);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -534,14 +536,17 @@ async function activateWithInstalledDistribution(
|
||||
queryStorageDir,
|
||||
progress,
|
||||
source.token,
|
||||
undefined,
|
||||
item,
|
||||
);
|
||||
item.completeThisQuery(completedQueryInfo);
|
||||
await showResultsForCompletedQuery(item as CompletedLocalQueryInfo, WebviewReveal.NotForced);
|
||||
// Note we must update the query history view after showing results as the
|
||||
// display and sorting might depend on the number of results
|
||||
} catch (e) {
|
||||
e.message = `Error running query: ${e.message}`;
|
||||
item.failureReason = e.message;
|
||||
const err = asError(e);
|
||||
err.message = `Error running query: ${err.message}`;
|
||||
item.failureReason = err.message;
|
||||
throw e;
|
||||
} finally {
|
||||
await qhm.refreshTreeView();
|
||||
@@ -566,11 +571,11 @@ async function activateWithInstalledDistribution(
|
||||
try {
|
||||
await cliServer.generateQueryHelp(pathToQhelp, absolutePathToMd);
|
||||
await commands.executeCommand('markdown.showPreviewToSide', uri);
|
||||
} catch (err) {
|
||||
const errorMessage = err.message.includes('Generating qhelp in markdown') ? (
|
||||
} catch (e) {
|
||||
const errorMessage = getErrorMessage(e).includes('Generating qhelp in markdown') ? (
|
||||
`Could not generate markdown from ${pathToQhelp}: Bad formatting in .qhelp file.`
|
||||
) : `Could not open a preview of the generated file (${absolutePathToMd}).`;
|
||||
void showAndLogErrorMessage(errorMessage, { fullMessage: `${errorMessage}\n${err}` });
|
||||
void showAndLogErrorMessage(errorMessage, { fullMessage: `${errorMessage}\n${e}` });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -693,9 +698,9 @@ async function activateWithInstalledDistribution(
|
||||
for (const item of quickpick) {
|
||||
try {
|
||||
await compileAndRunQuery(false, uri, progress, token, item.databaseItem);
|
||||
} catch (error) {
|
||||
} catch (e) {
|
||||
skippedDatabases.push(item.label);
|
||||
errors.push(error.message);
|
||||
errors.push(getErrorMessage(e));
|
||||
}
|
||||
}
|
||||
if (skippedDatabases.length > 0) {
|
||||
@@ -931,6 +936,18 @@ async function activateWithInstalledDistribution(
|
||||
title: 'Choose a Database from an Archive'
|
||||
})
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.chooseDatabaseGithub', async (
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken
|
||||
) => {
|
||||
const credentials = await Credentials.initialize(ctx);
|
||||
await databaseUI.handleChooseDatabaseGithub(credentials, progress, token);
|
||||
},
|
||||
{
|
||||
title: 'Adding database from GitHub',
|
||||
})
|
||||
);
|
||||
ctx.subscriptions.push(
|
||||
commandRunnerWithProgress('codeQL.chooseDatabaseLgtm', (
|
||||
progress: ProgressCallback,
|
||||
|
||||
@@ -15,7 +15,7 @@ import * as cli from './cli';
|
||||
import { CodeQLCliServer } from './cli';
|
||||
import { DatabaseEventKind, DatabaseItem, DatabaseManager } from './databases';
|
||||
import { showAndLogErrorMessage, tmpDir } from './helpers';
|
||||
import { assertNever } from './pure/helpers-pure';
|
||||
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import {
|
||||
FromResultsViewMsg,
|
||||
Interpretation,
|
||||
@@ -353,8 +353,8 @@ export class InterfaceManager extends DisposableObject {
|
||||
assertNever(msg);
|
||||
}
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message, {
|
||||
fullMessage: e.stack
|
||||
void showAndLogErrorMessage(getErrorMessage(e), {
|
||||
fullMessage: getErrorStack(e)
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -729,7 +729,7 @@ export class InterfaceManager extends DisposableObject {
|
||||
// If interpretation fails, accept the error and continue
|
||||
// trying to render uninterpreted results anyway.
|
||||
void showAndLogErrorMessage(
|
||||
`Showing raw results instead of interpreted ones due to an error. ${e.message}`
|
||||
`Showing raw results instead of interpreted ones due to an error. ${getErrorMessage(e)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -768,9 +768,8 @@ export class InterfaceManager extends DisposableObject {
|
||||
try {
|
||||
await this.showProblemResultsAsDiagnostics(interpretation, database);
|
||||
} catch (e) {
|
||||
const msg = e instanceof Error ? e.message : e.toString();
|
||||
void this.logger.log(
|
||||
`Exception while computing problem results as diagnostics: ${msg}`
|
||||
`Exception while computing problem results as diagnostics: ${getErrorMessage(e)}`
|
||||
);
|
||||
this._diagnosticCollection.clear();
|
||||
}
|
||||
|
||||
@@ -35,3 +35,22 @@ export const ONE_DAY_IN_MS = 24 * 60 * 60 * 1000;
|
||||
export const ONE_HOUR_IN_MS = 1000 * 60 * 60;
|
||||
export const TWO_HOURS_IN_MS = 1000 * 60 * 60 * 2;
|
||||
export const THREE_HOURS_IN_MS = 1000 * 60 * 60 * 3;
|
||||
|
||||
/**
|
||||
* This regex matches strings of the form `owner/repo` where:
|
||||
* - `owner` is made up of alphanumeric characters or single hyphens, starting and ending in an alphanumeric character
|
||||
* - `repo` is made up of alphanumeric characters, hyphens, or underscores
|
||||
*/
|
||||
export const REPO_REGEX = /^(?:[a-zA-Z0-9]+-)*[a-zA-Z0-9]+\/[a-zA-Z0-9-_]+$/;
|
||||
|
||||
export function getErrorMessage(e: any) {
|
||||
return e instanceof Error ? e.message : String(e);
|
||||
}
|
||||
|
||||
export function getErrorStack(e: any) {
|
||||
return e instanceof Error ? e.stack ?? '' : '';
|
||||
}
|
||||
|
||||
export function asError(e: any): Error {
|
||||
return e instanceof Error ? e : new Error(String(e));
|
||||
}
|
||||
|
||||
@@ -646,6 +646,35 @@ export interface ClearCacheParams {
|
||||
*/
|
||||
dryRun: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters to start a new structured log
|
||||
*/
|
||||
export interface StartLogParams {
|
||||
/**
|
||||
* The dataset for which we want to start a new structured log
|
||||
*/
|
||||
db: Dataset;
|
||||
/**
|
||||
* The path where we want to place the new structured log
|
||||
*/
|
||||
logPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters to terminate a structured log
|
||||
*/
|
||||
export interface EndLogParams {
|
||||
/**
|
||||
* The dataset for which we want to terminated the log
|
||||
*/
|
||||
db: Dataset;
|
||||
/**
|
||||
* The path of the log to terminate, will be a no-op if we aren't logging here
|
||||
*/
|
||||
logPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for trimming the cache of a dataset
|
||||
*/
|
||||
@@ -682,6 +711,26 @@ export interface ClearCacheResult {
|
||||
deletionMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of starting a new structured log.
|
||||
*/
|
||||
export interface StartLogResult {
|
||||
/**
|
||||
* A user friendly message saying what happened.
|
||||
*/
|
||||
outcomeMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The result of terminating a structured log.
|
||||
*/
|
||||
export interface EndLogResult {
|
||||
/**
|
||||
* A user friendly message saying what happened.
|
||||
*/
|
||||
outcomeMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parameters for running a set of queries
|
||||
*/
|
||||
@@ -1018,6 +1067,16 @@ export const compileUpgrade = new rpc.RequestType<WithProgressId<CompileUpgradeP
|
||||
*/
|
||||
export const compileUpgradeSequence = new rpc.RequestType<WithProgressId<CompileUpgradeSequenceParams>, CompileUpgradeSequenceResult, void, void>('compilation/compileUpgradeSequence');
|
||||
|
||||
/**
|
||||
* Start a new structured log in the evaluator, terminating the previous one if it exists
|
||||
*/
|
||||
export const startLog = new rpc.RequestType<WithProgressId<StartLogParams>, StartLogResult, void, void>('evaluation/startLog');
|
||||
|
||||
/**
|
||||
* Terminate a structured log in the evaluator. Is a no-op if we aren't logging to the given location
|
||||
*/
|
||||
export const endLog = new rpc.RequestType<WithProgressId<EndLogParams>, EndLogResult, void, void>('evaluation/endLog');
|
||||
|
||||
/**
|
||||
* Clear the cache of a dataset
|
||||
*/
|
||||
|
||||
@@ -28,12 +28,14 @@ import { URLSearchParams } from 'url';
|
||||
import { QueryServerClient } from './queryserver-client';
|
||||
import { DisposableObject } from './pure/disposable-object';
|
||||
import { commandRunner } from './commandRunner';
|
||||
import { assertNever, ONE_HOUR_IN_MS, TWO_HOURS_IN_MS } from './pure/helpers-pure';
|
||||
import { assertNever, ONE_HOUR_IN_MS, TWO_HOURS_IN_MS, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { CompletedLocalQueryInfo, LocalQueryInfo as LocalQueryInfo, QueryHistoryInfo } from './query-results';
|
||||
import { DatabaseManager } from './databases';
|
||||
import { registerQueryHistoryScubber } from './query-history-scrubber';
|
||||
import { QueryStatus } from './query-status';
|
||||
import { slurpQueryHistory, splatQueryHistory } from './query-serialization';
|
||||
import * as fs from 'fs-extra';
|
||||
import { CliVersionConstraint } from './cli';
|
||||
|
||||
/**
|
||||
* query-history.ts
|
||||
@@ -181,38 +183,48 @@ export class HistoryTreeDataProvider extends DisposableObject {
|
||||
): ProviderResult<QueryHistoryInfo[]> {
|
||||
return element ? [] : this.history.sort((h1, h2) => {
|
||||
|
||||
// TODO remote queries are not implemented yet.
|
||||
if (h1.t !== 'local' && h2.t !== 'local') {
|
||||
return 0;
|
||||
}
|
||||
if (h1.t !== 'local') {
|
||||
return -1;
|
||||
}
|
||||
if (h2.t !== 'local') {
|
||||
return 1;
|
||||
}
|
||||
const h1Label = h1.label.toLowerCase();
|
||||
const h2Label = h2.label.toLowerCase();
|
||||
|
||||
const resultCount1 = h1.completedQuery?.resultCount ?? -1;
|
||||
const resultCount2 = h2.completedQuery?.resultCount ?? -1;
|
||||
const h1Date = h1.t === 'local'
|
||||
? h1.initialInfo.start.getTime()
|
||||
: h1.remoteQuery?.executionStartTime;
|
||||
|
||||
const h2Date = h2.t === 'local'
|
||||
? h2.initialInfo.start.getTime()
|
||||
: h2.remoteQuery?.executionStartTime;
|
||||
|
||||
// result count for remote queries is not available here.
|
||||
const resultCount1 = h1.t === 'local'
|
||||
? h1.completedQuery?.resultCount ?? -1
|
||||
: -1;
|
||||
const resultCount2 = h2.t === 'local'
|
||||
? h2.completedQuery?.resultCount ?? -1
|
||||
: -1;
|
||||
|
||||
switch (this.sortOrder) {
|
||||
case SortOrder.NameAsc:
|
||||
return h1.label.localeCompare(h2.label, env.language);
|
||||
return h1Label.localeCompare(h2Label, env.language);
|
||||
|
||||
case SortOrder.NameDesc:
|
||||
return h2.label.localeCompare(h1.label, env.language);
|
||||
return h2Label.localeCompare(h1Label, env.language);
|
||||
|
||||
case SortOrder.DateAsc:
|
||||
return h1.initialInfo.start.getTime() - h2.initialInfo.start.getTime();
|
||||
return h1Date - h2Date;
|
||||
|
||||
case SortOrder.DateDesc:
|
||||
return h2.initialInfo.start.getTime() - h1.initialInfo.start.getTime();
|
||||
return h2Date - h1Date;
|
||||
|
||||
case SortOrder.CountAsc:
|
||||
// If the result counts are equal, sort by name.
|
||||
return resultCount1 - resultCount2 === 0
|
||||
? h1.label.localeCompare(h2.label, env.language)
|
||||
? h1Label.localeCompare(h2Label, env.language)
|
||||
: resultCount1 - resultCount2;
|
||||
|
||||
case SortOrder.CountDesc:
|
||||
// If the result counts are equal, sort by name.
|
||||
return resultCount2 - resultCount1 === 0
|
||||
? h2.label.localeCompare(h1.label, env.language)
|
||||
? h2Label.localeCompare(h1Label, env.language)
|
||||
: resultCount2 - resultCount1;
|
||||
default:
|
||||
assertNever(this.sortOrder);
|
||||
@@ -406,6 +418,18 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
this.handleOpenQueryDirectory.bind(this)
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLQueryHistory.showEvalLog',
|
||||
this.handleShowEvalLog.bind(this)
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLQueryHistory.showEvalLogSummary',
|
||||
this.handleShowEvalLogSummary.bind(this)
|
||||
)
|
||||
);
|
||||
this.push(
|
||||
commandRunner(
|
||||
'codeQLQueryHistory.cancel',
|
||||
@@ -636,7 +660,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
if (response !== undefined) {
|
||||
// Interpret empty string response as 'go back to using default'
|
||||
finalSingleItem.initialInfo.userSpecifiedLabel = response === '' ? undefined : response;
|
||||
this.treeDataProvider.refresh();
|
||||
await this.refreshTreeView();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -663,7 +687,7 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
await this.doCompareCallback(from as CompletedLocalQueryInfo, to as CompletedLocalQueryInfo);
|
||||
}
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -727,21 +751,74 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
return;
|
||||
}
|
||||
|
||||
let p: string | undefined;
|
||||
let externalFilePath: string | undefined;
|
||||
if (finalSingleItem.t === 'local') {
|
||||
if (finalSingleItem.completedQuery) {
|
||||
p = finalSingleItem.completedQuery.query.querySaveDir;
|
||||
externalFilePath = path.join(finalSingleItem.completedQuery.query.querySaveDir, 'timestamp');
|
||||
}
|
||||
} else if (finalSingleItem.t === 'remote') {
|
||||
p = path.join(this.queryStorageDir, finalSingleItem.queryId);
|
||||
externalFilePath = path.join(this.queryStorageDir, finalSingleItem.queryId, 'timestamp');
|
||||
}
|
||||
|
||||
if (p) {
|
||||
try {
|
||||
await commands.executeCommand('revealFileInOS', Uri.file(p));
|
||||
} catch (e) {
|
||||
throw new Error(`Failed to open ${p}: ${e.message}`);
|
||||
if (externalFilePath) {
|
||||
if (!(await fs.pathExists(externalFilePath))) {
|
||||
// timestamp file is missing (manually deleted?) try selecting the parent folder.
|
||||
// It's less nice, but at least it will work.
|
||||
externalFilePath = path.dirname(externalFilePath);
|
||||
if (!(await fs.pathExists(externalFilePath))) {
|
||||
throw new Error(`Query directory does not exist: ${externalFilePath}`);
|
||||
}
|
||||
}
|
||||
try {
|
||||
await commands.executeCommand('revealFileInOS', Uri.file(externalFilePath));
|
||||
} catch (e) {
|
||||
throw new Error(`Failed to open ${externalFilePath}: ${getErrorMessage(e)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private warnNoEvalLog() {
|
||||
void showAndLogWarningMessage('No evaluator log is available for this run. Perhaps it failed before evaluation, or you are running with a version of CodeQL before ' + CliVersionConstraint.CLI_VERSION_WITH_PER_QUERY_EVAL_LOG + '?');
|
||||
}
|
||||
|
||||
private warnNoEvalLogSummary() {
|
||||
void showAndLogWarningMessage(`No evaluator log summary is available for this run. Perhaps it failed before evaluation, or you are running with a version of CodeQL before ${CliVersionConstraint.CLI_VERSION_WITH_PER_QUERY_EVAL_LOG}?`);
|
||||
}
|
||||
|
||||
|
||||
async handleShowEvalLog(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[]
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
|
||||
|
||||
// Only applicable to an individual local query
|
||||
if (!this.assertSingleQuery(finalMultiSelect) || !finalSingleItem || finalSingleItem.t !== 'local') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (finalSingleItem.evalLogLocation) {
|
||||
await this.tryOpenExternalFile(finalSingleItem.evalLogLocation);
|
||||
} else {
|
||||
this.warnNoEvalLog();
|
||||
}
|
||||
}
|
||||
|
||||
async handleShowEvalLogSummary(
|
||||
singleItem: QueryHistoryInfo,
|
||||
multiSelect: QueryHistoryInfo[]
|
||||
) {
|
||||
const { finalSingleItem, finalMultiSelect } = this.determineSelection(singleItem, multiSelect);
|
||||
|
||||
// Only applicable to an individual local query
|
||||
if (!this.assertSingleQuery(finalMultiSelect) || !finalSingleItem || finalSingleItem.t !== 'local') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (finalSingleItem.evalLogSummaryLocation) {
|
||||
await this.tryOpenExternalFile(finalSingleItem.evalLogSummaryLocation);
|
||||
} else {
|
||||
this.warnNoEvalLogSummary();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -920,11 +997,12 @@ export class QueryHistoryManager extends DisposableObject {
|
||||
try {
|
||||
await window.showTextDocument(uri, { preview: false });
|
||||
} catch (e) {
|
||||
const msg = getErrorMessage(e);
|
||||
if (
|
||||
e.message.includes(
|
||||
msg.includes(
|
||||
'Files above 50MB cannot be synchronized with extensions'
|
||||
) ||
|
||||
e.message.includes('too large to open')
|
||||
msg.includes('too large to open')
|
||||
) {
|
||||
const res = await showBinaryChoiceDialog(
|
||||
`VS Code does not allow extensions to open files >50MB. This file
|
||||
@@ -937,13 +1015,13 @@ the file in the file explorer and dragging it into the workspace.`
|
||||
try {
|
||||
await commands.executeCommand('revealFileInOS', uri);
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage(e.message);
|
||||
void showAndLogErrorMessage(getErrorMessage(e));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
void showAndLogErrorMessage(`Could not open file ${fileLocation}`);
|
||||
void logger.log(e.message);
|
||||
void logger.log(e.stack);
|
||||
void logger.log(getErrorMessage(e));
|
||||
void logger.log(getErrorStack(e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -216,6 +216,8 @@ export class LocalQueryInfo {
|
||||
|
||||
public failureReason: string | undefined;
|
||||
public completedQuery: CompletedQueryInfo | undefined;
|
||||
public evalLogLocation: string | undefined;
|
||||
public evalLogSummaryLocation: string | undefined;
|
||||
private config: QueryHistoryConfig | undefined;
|
||||
|
||||
/**
|
||||
|
||||
@@ -3,7 +3,7 @@ import * as path from 'path';
|
||||
|
||||
import { QueryHistoryConfig } from './config';
|
||||
import { showAndLogErrorMessage } from './helpers';
|
||||
import { asyncFilter } from './pure/helpers-pure';
|
||||
import { asyncFilter, getErrorMessage, getErrorStack } from './pure/helpers-pure';
|
||||
import { CompletedQueryInfo, LocalQueryInfo, QueryHistoryInfo } from './query-results';
|
||||
import { QueryEvaluationInfo } from './run-queries';
|
||||
|
||||
@@ -64,7 +64,7 @@ export async function slurpQueryHistory(fsPath: string, config: QueryHistoryConf
|
||||
});
|
||||
} catch (e) {
|
||||
void showAndLogErrorMessage('Error loading query history.', {
|
||||
fullMessage: ['Error loading query history.', e.stack].join('\n'),
|
||||
fullMessage: ['Error loading query history.', getErrorStack(e)].join('\n'),
|
||||
});
|
||||
// since the query history is invalid, it should be deleted so this error does not happen on next startup.
|
||||
await fs.remove(fsPath);
|
||||
@@ -94,6 +94,6 @@ export async function splatQueryHistory(queries: QueryHistoryInfo[], fsPath: str
|
||||
}, null, 2);
|
||||
await fs.writeFile(fsPath, data);
|
||||
} catch (e) {
|
||||
throw new Error(`Error saving query history to ${fsPath}: ${e.message}`);
|
||||
throw new Error(`Error saving query history to ${fsPath}: ${getErrorMessage(e)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -146,7 +146,7 @@ export class QueryServerClient extends DisposableObject {
|
||||
args.push('--require-db-registration');
|
||||
}
|
||||
|
||||
if (await this.cliServer.cliConstraints.supportsOldEvalStats()) {
|
||||
if (await this.cliServer.cliConstraints.supportsOldEvalStats() && !(await this.cliServer.cliConstraints.supportsPerQueryEvalLog())) {
|
||||
args.push('--old-eval-stats');
|
||||
}
|
||||
|
||||
@@ -258,3 +258,15 @@ export class QueryServerClient extends DisposableObject {
|
||||
export function findQueryLogFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'query.log');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.jsonl');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log.summary');
|
||||
}
|
||||
|
||||
export function findQueryEvalLogEndSummaryFile(resultPath: string): string {
|
||||
return path.join(resultPath, 'evaluator-log-end.summary');
|
||||
}
|
||||
@@ -21,6 +21,7 @@ import {
|
||||
ProgressCallback,
|
||||
UserCancellationException
|
||||
} from './commandRunner';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
const QUICK_QUERIES_DIR_NAME = 'quick-queries';
|
||||
const QUICK_QUERY_QUERY_NAME = 'quick-query.ql';
|
||||
@@ -132,7 +133,7 @@ export async function displayQuickQuery(
|
||||
await Window.showTextDocument(await workspace.openTextDocument(qlFile));
|
||||
} catch (e) {
|
||||
if (e instanceof ResponseError && e.code == ErrorCodes.RequestCancelled) {
|
||||
throw new UserCancellationException(e.message);
|
||||
throw new UserCancellationException(getErrorMessage(e));
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import { CancellationToken, ExtensionContext } from 'vscode';
|
||||
@@ -12,6 +13,8 @@ import { sarifParser } from '../sarif-parser';
|
||||
import { extractAnalysisAlerts } from './sarif-processing';
|
||||
import { CodeQLCliServer } from '../cli';
|
||||
import { extractRawResults } from './bqrs-processing';
|
||||
import { asyncFilter, getErrorMessage } from '../pure/helpers-pure';
|
||||
import { createDownloadPath } from './download-link';
|
||||
|
||||
export class AnalysesResultsManager {
|
||||
// Store for the results of various analyses for each remote query.
|
||||
@@ -43,13 +46,22 @@ export class AnalysesResultsManager {
|
||||
await this.downloadSingleAnalysisResults(analysisSummary, credentials, publishResults);
|
||||
}
|
||||
|
||||
public async downloadAnalysesResults(
|
||||
allAnalysesToDownload: AnalysisSummary[],
|
||||
token: CancellationToken | undefined,
|
||||
publishResults: (analysesResults: AnalysisResults[]) => Promise<void>
|
||||
/**
|
||||
* Loads the array analysis results. For each analysis results, if it is not downloaded yet,
|
||||
* it will be downloaded. If it is already downloaded, it will be loaded into memory.
|
||||
* If it is already in memory, this will be a no-op.
|
||||
*
|
||||
* @param allAnalysesToLoad List of analyses to ensure are downloaded and in memory
|
||||
* @param token Optional cancellation token
|
||||
* @param publishResults Optional function to publish the results after loading
|
||||
*/
|
||||
public async loadAnalysesResults(
|
||||
allAnalysesToLoad: AnalysisSummary[],
|
||||
token?: CancellationToken,
|
||||
publishResults: (analysesResults: AnalysisResults[]) => Promise<void> = () => Promise.resolve()
|
||||
): Promise<void> {
|
||||
// Filter out analyses that we have already in memory.
|
||||
const analysesToDownload = allAnalysesToDownload.filter(x => !this.isAnalysisInMemory(x));
|
||||
const analysesToDownload = allAnalysesToLoad.filter(x => !this.isAnalysisInMemory(x));
|
||||
|
||||
const credentials = await Credentials.initialize(this.ctx);
|
||||
|
||||
@@ -118,7 +130,7 @@ export class AnalysesResultsManager {
|
||||
artifactPath = await downloadArtifactFromLink(credentials, this.storagePath, analysis.downloadLink);
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`Could not download the analysis results for ${analysis.nwo}: ${e.message}`);
|
||||
throw new Error(`Could not download the analysis results for ${analysis.nwo}: ${getErrorMessage(e)}`);
|
||||
}
|
||||
|
||||
const fileLinkPrefix = this.createGitHubDotcomFileLinkPrefix(analysis.nwo, analysis.databaseSha);
|
||||
@@ -150,6 +162,21 @@ export class AnalysesResultsManager {
|
||||
void publishResults([...resultsForQuery]);
|
||||
}
|
||||
|
||||
|
||||
public async loadDownloadedAnalyses(
|
||||
allAnalysesToCheck: AnalysisSummary[]
|
||||
) {
|
||||
|
||||
// Find all analyses that are already downloaded.
|
||||
const allDownloadedAnalyses = await asyncFilter(allAnalysesToCheck, x => this.isAnalysisDownloaded(x));
|
||||
// Now, ensure that all of these analyses are in memory. Some may already be in memory. These are ignored.
|
||||
await this.loadAnalysesResults(allDownloadedAnalyses);
|
||||
}
|
||||
|
||||
private async isAnalysisDownloaded(analysis: AnalysisSummary): Promise<boolean> {
|
||||
return await fs.pathExists(createDownloadPath(this.storagePath, analysis.downloadLink));
|
||||
}
|
||||
|
||||
private async readBqrsResults(filePath: string, fileLinkPrefix: string): Promise<AnalysisRawResults> {
|
||||
return await extractRawResults(this.cliServer, this.logger, filePath, fileLinkPrefix);
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import * as path from 'path';
|
||||
|
||||
/**
|
||||
* Represents a link to an artifact to be downloaded.
|
||||
*/
|
||||
@@ -23,3 +25,16 @@ export interface DownloadLink {
|
||||
*/
|
||||
queryId: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a downloadLink to the path where the artifact should be stored.
|
||||
*
|
||||
* @param storagePath The base directory to store artifacts in.
|
||||
* @param downloadLink The DownloadLink
|
||||
* @param extension An optional file extension to append to the artifact (no `.`).
|
||||
*
|
||||
* @returns A full path to the download location of the artifact
|
||||
*/
|
||||
export function createDownloadPath(storagePath: string, downloadLink: DownloadLink, extension = '') {
|
||||
return path.join(storagePath, downloadLink.queryId, downloadLink.id + (extension ? `.${extension}` : ''));
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import { showAndLogWarningMessage, tmpDir } from '../helpers';
|
||||
import { Credentials } from '../authentication';
|
||||
import { logger } from '../logging';
|
||||
import { RemoteQueryWorkflowResult } from './remote-query-workflow-result';
|
||||
import { DownloadLink } from './download-link';
|
||||
import { DownloadLink, createDownloadPath } from './download-link';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQueryFailureIndexItem, RemoteQueryResultIndex, RemoteQuerySuccessIndexItem } from './remote-query-result-index';
|
||||
|
||||
@@ -82,14 +82,14 @@ export async function downloadArtifactFromLink(
|
||||
|
||||
const octokit = await credentials.getOctokit();
|
||||
|
||||
const extractedPath = path.join(storagePath, downloadLink.queryId, downloadLink.id);
|
||||
const extractedPath = createDownloadPath(storagePath, downloadLink);
|
||||
|
||||
// first check if we already have the artifact
|
||||
if (!(await fs.pathExists(extractedPath))) {
|
||||
// Download the zipped artifact.
|
||||
const response = await octokit.request(`GET ${downloadLink.urlPath}/zip`, {});
|
||||
|
||||
const zipFilePath = path.join(storagePath, downloadLink.queryId, `${downloadLink.id}.zip`);
|
||||
const zipFilePath = createDownloadPath(storagePath, downloadLink, 'zip');
|
||||
await saveFile(`${zipFilePath}`, response.data as ArrayBuffer);
|
||||
|
||||
// Extract the zipped artifact.
|
||||
|
||||
@@ -46,11 +46,15 @@ export class RemoteQueriesInterfaceManager {
|
||||
this.getPanel().reveal(undefined, true);
|
||||
|
||||
await this.waitForPanelLoaded();
|
||||
const model = this.buildViewModel(query, queryResult);
|
||||
await this.postMessage({
|
||||
t: 'setRemoteQueryResult',
|
||||
queryResult: this.buildViewModel(query, queryResult)
|
||||
queryResult: model
|
||||
});
|
||||
|
||||
// Ensure all pre-downloaded artifacts are loaded into memory
|
||||
await this.analysesResultsManager.loadDownloadedAnalyses(model.analysisSummaries);
|
||||
|
||||
await this.setAnalysisResults(this.analysesResultsManager.getAnalysesResults(queryResult.queryId));
|
||||
}
|
||||
|
||||
@@ -67,6 +71,7 @@ export class RemoteQueriesInterfaceManager {
|
||||
const totalResultCount = queryResult.analysisSummaries.reduce((acc, cur) => acc + cur.resultCount, 0);
|
||||
const executionDuration = this.getDuration(queryResult.executionEndTime, query.executionStartTime);
|
||||
const analysisSummaries = this.buildAnalysisSummaries(queryResult.analysisSummaries);
|
||||
const totalRepositoryCount = queryResult.analysisSummaries.length;
|
||||
const affectedRepositories = queryResult.analysisSummaries.filter(r => r.resultCount > 0);
|
||||
|
||||
return {
|
||||
@@ -76,7 +81,7 @@ export class RemoteQueriesInterfaceManager {
|
||||
queryText: query.queryText,
|
||||
language: query.language,
|
||||
workflowRunUrl: `https://github.com/${query.controllerRepository.owner}/${query.controllerRepository.name}/actions/runs/${query.actionsWorkflowRunId}`,
|
||||
totalRepositoryCount: query.repositories.length,
|
||||
totalRepositoryCount: totalRepositoryCount,
|
||||
affectedRepositoryCount: affectedRepositories.length,
|
||||
totalResultCount: totalResultCount,
|
||||
executionTimestamp: this.formatDate(query.executionStartTime),
|
||||
@@ -213,7 +218,7 @@ export class RemoteQueriesInterfaceManager {
|
||||
}
|
||||
|
||||
private async downloadAllAnalysesResults(msg: RemoteQueryDownloadAllAnalysesResultsMessage): Promise<void> {
|
||||
await this.analysesResultsManager.downloadAnalysesResults(
|
||||
await this.analysesResultsManager.loadAnalysesResults(
|
||||
msg.analysisSummaries,
|
||||
undefined,
|
||||
results => this.setAnalysisResults(results));
|
||||
|
||||
@@ -187,7 +187,7 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
fileSize: String(a.fileSizeInBytes)
|
||||
}));
|
||||
|
||||
await this.analysesResultsManager.downloadAnalysesResults(
|
||||
await this.analysesResultsManager.loadAnalysesResults(
|
||||
analysesToDownload,
|
||||
token,
|
||||
results => this.interfaceManager.setAnalysisResults(results));
|
||||
@@ -226,7 +226,8 @@ export class RemoteQueriesManager extends DisposableObject {
|
||||
|
||||
private async askToOpenResults(query: RemoteQuery, queryResult: RemoteQueryResult): Promise<void> {
|
||||
const totalResultCount = queryResult.analysisSummaries.reduce((acc, cur) => acc + cur.resultCount, 0);
|
||||
const message = `Query "${query.queryName}" run on ${query.repositories.length} repositories and returned ${totalResultCount} results`;
|
||||
const totalRepoCount = queryResult.analysisSummaries.length;
|
||||
const message = `Query "${query.queryName}" run on ${totalRepoCount} repositories and returned ${totalResultCount} results`;
|
||||
|
||||
const shouldOpenView = await showInformationMessageWithAction(message, 'View');
|
||||
if (shouldOpenView) {
|
||||
|
||||
@@ -6,7 +6,6 @@ export interface RemoteQuery {
|
||||
queryText: string;
|
||||
language: string;
|
||||
controllerRepository: Repository;
|
||||
repositories: Repository[];
|
||||
executionStartTime: number; // Use number here since it needs to be serialized and desserialized.
|
||||
actionsWorkflowRunId: number;
|
||||
}
|
||||
|
||||
116
extensions/ql-vscode/src/remote-queries/repository-selection.ts
Normal file
116
extensions/ql-vscode/src/remote-queries/repository-selection.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { QuickPickItem, window } from 'vscode';
|
||||
import { showAndLogErrorMessage } from '../helpers';
|
||||
import { logger } from '../logging';
|
||||
import { getRemoteRepositoryLists } from '../config';
|
||||
import { REPO_REGEX } from '../pure/helpers-pure';
|
||||
|
||||
export interface RepositorySelection {
|
||||
repositories?: string[];
|
||||
repositoryLists?: string[]
|
||||
}
|
||||
|
||||
interface RepoListQuickPickItem extends QuickPickItem {
|
||||
repositories?: string[];
|
||||
repositoryList?: string;
|
||||
useCustomRepository?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the repositories or repository lists to run the query against.
|
||||
* @returns The user selection.
|
||||
*/
|
||||
export async function getRepositorySelection(): Promise<RepositorySelection> {
|
||||
const quickPickItems = [
|
||||
createCustomRepoQuickPickItem(),
|
||||
...createSystemDefinedRepoListsQuickPickItems(),
|
||||
...createUserDefinedRepoListsQuickPickItems(),
|
||||
];
|
||||
|
||||
const options = {
|
||||
placeHolder: 'Select a repository list. You can define repository lists in the `codeQL.variantAnalysis.repositoryLists` setting.',
|
||||
ignoreFocusOut: true,
|
||||
};
|
||||
|
||||
const quickpick = await window.showQuickPick<RepoListQuickPickItem>(
|
||||
quickPickItems,
|
||||
options);
|
||||
|
||||
if (quickpick?.repositories?.length) {
|
||||
void logger.log(`Selected repositories: ${quickpick.repositories.join(', ')}`);
|
||||
return { repositories: quickpick.repositories };
|
||||
} else if (quickpick?.repositoryList) {
|
||||
void logger.log(`Selected repository list: ${quickpick.repositoryList}`);
|
||||
return { repositoryLists: [quickpick.repositoryList] };
|
||||
} else if (quickpick?.useCustomRepository) {
|
||||
const customRepo = await getCustomRepo();
|
||||
if (!customRepo || !REPO_REGEX.test(customRepo)) {
|
||||
void showAndLogErrorMessage('Invalid repository format. Please enter a valid repository in the format <owner>/<repo> (e.g. github/codeql)');
|
||||
return {};
|
||||
}
|
||||
void logger.log(`Entered repository: ${customRepo}`);
|
||||
return { repositories: [customRepo] };
|
||||
} else {
|
||||
void showAndLogErrorMessage('No repositories selected.');
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the selection is valid or not.
|
||||
* @param repoSelection The selection to check.
|
||||
* @returns A boolean flag indicating if the selection is valid or not.
|
||||
*/
|
||||
export function isValidSelection(repoSelection: RepositorySelection): boolean {
|
||||
if (repoSelection.repositories === undefined && repoSelection.repositoryLists === undefined) {
|
||||
return false;
|
||||
}
|
||||
if (repoSelection.repositories !== undefined && repoSelection.repositories.length === 0) {
|
||||
return false;
|
||||
}
|
||||
if (repoSelection.repositoryLists?.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function createSystemDefinedRepoListsQuickPickItems(): RepoListQuickPickItem[] {
|
||||
const topNs = [10, 100, 1000];
|
||||
|
||||
return topNs.map(n => ({
|
||||
label: '$(star) Top ' + n,
|
||||
repositoryList: `top_${n}`,
|
||||
alwaysShow: true
|
||||
} as RepoListQuickPickItem));
|
||||
}
|
||||
|
||||
function createUserDefinedRepoListsQuickPickItems(): RepoListQuickPickItem[] {
|
||||
const repoLists = getRemoteRepositoryLists();
|
||||
if (!repoLists) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return Object.entries(repoLists).map<RepoListQuickPickItem>(([label, repositories]) => (
|
||||
{
|
||||
label, // the name of the repository list
|
||||
repositories // the actual array of repositories
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
function createCustomRepoQuickPickItem(): RepoListQuickPickItem {
|
||||
return {
|
||||
label: '$(edit) Enter a GitHub repository',
|
||||
useCustomRepository: true,
|
||||
alwaysShow: true,
|
||||
};
|
||||
}
|
||||
|
||||
async function getCustomRepo(): Promise<string | undefined> {
|
||||
return await window.showInputBox({
|
||||
title: 'Enter a GitHub repository in the format <owner>/<repo> (e.g. github/codeql)',
|
||||
placeHolder: '<owner>/<repo>',
|
||||
prompt: 'Tip: you can save frequently used repositories in the `codeQL.variantAnalysis.repositoryLists` setting',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { CancellationToken, QuickPickItem, Uri, window } from 'vscode';
|
||||
import { CancellationToken, Uri, window } from 'vscode';
|
||||
import * as path from 'path';
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as fs from 'fs-extra';
|
||||
@@ -9,19 +9,20 @@ import {
|
||||
getOnDiskWorkspaceFolders,
|
||||
showAndLogErrorMessage,
|
||||
showAndLogInformationMessage,
|
||||
showInformationMessageWithAction,
|
||||
tryGetQueryMetadata,
|
||||
tmpDir
|
||||
} from '../helpers';
|
||||
import { Credentials } from '../authentication';
|
||||
import * as cli from '../cli';
|
||||
import { logger } from '../logging';
|
||||
import { getRemoteControllerRepo, getRemoteRepositoryLists, setRemoteControllerRepo } from '../config';
|
||||
import { getActionBranch, getRemoteControllerRepo, setRemoteControllerRepo } from '../config';
|
||||
import { ProgressCallback, UserCancellationException } from '../commandRunner';
|
||||
import { OctokitResponse } from '@octokit/types/dist-types';
|
||||
import { RemoteQuery } from './remote-query';
|
||||
import { RemoteQuerySubmissionResult } from './remote-query-submission-result';
|
||||
import { QueryMetadata } from '../pure/interface-types';
|
||||
import { getErrorMessage, REPO_REGEX } from '../pure/helpers-pure';
|
||||
import { getRepositorySelection, isValidSelection, RepositorySelection } from './repository-selection';
|
||||
|
||||
export interface QlPack {
|
||||
name: string;
|
||||
@@ -30,71 +31,16 @@ export interface QlPack {
|
||||
defaultSuite?: Record<string, unknown>[];
|
||||
defaultSuiteFile?: string;
|
||||
}
|
||||
interface RepoListQuickPickItem extends QuickPickItem {
|
||||
repoList: string[];
|
||||
}
|
||||
|
||||
interface QueriesResponse {
|
||||
workflow_run_id: number
|
||||
}
|
||||
|
||||
/**
|
||||
* This regex matches strings of the form `owner/repo` where:
|
||||
* - `owner` is made up of alphanumeric characters or single hyphens, starting and ending in an alphanumeric character
|
||||
* - `repo` is made up of alphanumeric characters, hyphens, or underscores
|
||||
*/
|
||||
const REPO_REGEX = /^(?:[a-zA-Z0-9]+-)*[a-zA-Z0-9]+\/[a-zA-Z0-9-_]+$/;
|
||||
|
||||
/**
|
||||
* Well-known names for the query pack used by the server.
|
||||
*/
|
||||
const QUERY_PACK_NAME = 'codeql-remote/query';
|
||||
|
||||
/**
|
||||
* Gets the repositories to run the query against.
|
||||
*/
|
||||
export async function getRepositories(): Promise<string[] | undefined> {
|
||||
const repoLists = getRemoteRepositoryLists();
|
||||
if (repoLists && Object.keys(repoLists).length) {
|
||||
const quickPickItems = Object.entries(repoLists).map<RepoListQuickPickItem>(([key, value]) => (
|
||||
{
|
||||
label: key, // the name of the repository list
|
||||
repoList: value, // the actual array of repositories
|
||||
}
|
||||
));
|
||||
const quickpick = await window.showQuickPick<RepoListQuickPickItem>(
|
||||
quickPickItems,
|
||||
{
|
||||
placeHolder: 'Select a repository list. You can define repository lists in the `codeQL.variantAnalysis.repositoryLists` setting.',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
if (quickpick?.repoList.length) {
|
||||
void logger.log(`Selected repositories: ${quickpick.repoList.join(', ')}`);
|
||||
return quickpick.repoList;
|
||||
} else {
|
||||
void showAndLogErrorMessage('No repositories selected.');
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
void logger.log('No repository lists defined. Displaying text input box.');
|
||||
const remoteRepo = await window.showInputBox({
|
||||
title: 'Enter a GitHub repository in the format <owner>/<repo> (e.g. github/codeql)',
|
||||
placeHolder: '<owner>/<repo>',
|
||||
prompt: 'Tip: you can save frequently used repositories in the `codeQL.variantAnalysis.repositoryLists` setting',
|
||||
ignoreFocusOut: true,
|
||||
});
|
||||
if (!remoteRepo) {
|
||||
void showAndLogErrorMessage('No repositories entered.');
|
||||
return;
|
||||
} else if (!REPO_REGEX.test(remoteRepo)) { // Check if user entered invalid input
|
||||
void showAndLogErrorMessage('Invalid repository format. Must be in the format <owner>/<repo> (e.g. github/codeql)');
|
||||
return;
|
||||
}
|
||||
void logger.log(`Entered repository: ${remoteRepo}`);
|
||||
return [remoteRepo];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Two possibilities:
|
||||
* 1. There is no qlpack.yml in this directory. Assume this is a lone query and generate a synthetic qlpack for it.
|
||||
@@ -243,8 +189,8 @@ export async function runRemoteQuery(
|
||||
message: 'Determining query target language'
|
||||
});
|
||||
|
||||
const repositories = await getRepositories();
|
||||
if (!repositories || repositories.length === 0) {
|
||||
const repoSelection = await getRepositorySelection();
|
||||
if (!isValidSelection(repoSelection)) {
|
||||
throw new UserCancellationException('No repositories to query.');
|
||||
}
|
||||
|
||||
@@ -302,7 +248,8 @@ export async function runRemoteQuery(
|
||||
message: 'Sending request'
|
||||
});
|
||||
|
||||
const workflowRunId = await runRemoteQueriesApiRequest(credentials, 'main', language, repositories, owner, repo, base64Pack, dryRun);
|
||||
const actionBranch = getActionBranch();
|
||||
const workflowRunId = await runRemoteQueriesApiRequest(credentials, actionBranch, language, repoSelection, owner, repo, base64Pack, dryRun);
|
||||
const queryStartTime = Date.now();
|
||||
const queryMetadata = await tryGetQueryMetadata(cliServer, queryFile);
|
||||
|
||||
@@ -314,7 +261,6 @@ export async function runRemoteQuery(
|
||||
}
|
||||
|
||||
const remoteQuery = await buildRemoteQueryEntity(
|
||||
repositories,
|
||||
queryFile,
|
||||
queryMetadata,
|
||||
owner,
|
||||
@@ -341,15 +287,30 @@ async function runRemoteQueriesApiRequest(
|
||||
credentials: Credentials,
|
||||
ref: string,
|
||||
language: string,
|
||||
repositories: string[],
|
||||
repoSelection: RepositorySelection,
|
||||
owner: string,
|
||||
repo: string,
|
||||
queryPackBase64: string,
|
||||
dryRun = false
|
||||
): Promise<void | number> {
|
||||
const data = {
|
||||
ref,
|
||||
language,
|
||||
repositories: repoSelection.repositories ?? undefined,
|
||||
repository_lists: repoSelection.repositoryLists ?? undefined,
|
||||
query_pack: queryPackBase64,
|
||||
};
|
||||
|
||||
if (dryRun) {
|
||||
void showAndLogInformationMessage('[DRY RUN] Would have sent request. See extension log for the payload.');
|
||||
void logger.log(JSON.stringify({ ref, language, repositories, owner, repo, queryPackBase64: queryPackBase64.substring(0, 100) + '... ' + queryPackBase64.length + ' bytes' }));
|
||||
void logger.log(JSON.stringify({
|
||||
owner,
|
||||
repo,
|
||||
data: {
|
||||
...data,
|
||||
queryPackBase64: queryPackBase64.substring(0, 100) + '... ' + queryPackBase64.length + ' bytes'
|
||||
}
|
||||
}));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -360,56 +321,14 @@ async function runRemoteQueriesApiRequest(
|
||||
{
|
||||
owner,
|
||||
repo,
|
||||
data: {
|
||||
ref,
|
||||
language,
|
||||
repositories,
|
||||
query_pack: queryPackBase64,
|
||||
}
|
||||
data
|
||||
}
|
||||
);
|
||||
const workflowRunId = response.data.workflow_run_id;
|
||||
void showAndLogInformationMessage(`Successfully scheduled runs. [Click here to see the progress](https://github.com/${owner}/${repo}/actions/runs/${workflowRunId}).`);
|
||||
return workflowRunId;
|
||||
} catch (error) {
|
||||
return await attemptRerun(error, credentials, ref, language, repositories, owner, repo, queryPackBase64, dryRun);
|
||||
}
|
||||
}
|
||||
|
||||
/** Attempts to rerun the query on only the valid repositories */
|
||||
export async function attemptRerun(
|
||||
error: any,
|
||||
credentials: Credentials,
|
||||
ref: string,
|
||||
language: string,
|
||||
repositories: string[],
|
||||
owner: string,
|
||||
repo: string,
|
||||
queryPackBase64: string,
|
||||
dryRun = false
|
||||
) {
|
||||
if (typeof error.message === 'string' && error.message.includes('Some repositories were invalid')) {
|
||||
const invalidRepos = error?.response?.data?.invalid_repos || [];
|
||||
void logger.log('Unable to run query on some of the specified repositories');
|
||||
if (invalidRepos.length > 0) {
|
||||
void logger.log(`Invalid repos: ${invalidRepos.join(', ')}`);
|
||||
}
|
||||
|
||||
if (invalidRepos.length === repositories.length) {
|
||||
// Every repo is invalid in some way
|
||||
void showAndLogErrorMessage('Unable to run query on any of the specified repositories.');
|
||||
return;
|
||||
}
|
||||
|
||||
const popupMessage = 'Unable to run query on some of the specified repositories. [See logs for more details](command:codeQL.showLogs).';
|
||||
const rerunQuery = await showInformationMessageWithAction(popupMessage, 'Rerun on the valid repositories only');
|
||||
if (rerunQuery) {
|
||||
const validRepositories = repositories.filter(r => !invalidRepos.includes(r));
|
||||
void logger.log(`Rerunning query on set of valid repositories: ${JSON.stringify(validRepositories)}`);
|
||||
return await runRemoteQueriesApiRequest(credentials, ref, language, validRepositories, owner, repo, queryPackBase64, dryRun);
|
||||
}
|
||||
} else {
|
||||
void showAndLogErrorMessage(error);
|
||||
void showAndLogErrorMessage(getErrorMessage(error));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -438,7 +357,6 @@ async function ensureNameAndSuite(queryPackDir: string, packRelativePath: string
|
||||
}
|
||||
|
||||
async function buildRemoteQueryEntity(
|
||||
repositories: string[],
|
||||
queryFilePath: string,
|
||||
queryMetadata: QueryMetadata | undefined,
|
||||
controllerRepoOwner: string,
|
||||
@@ -450,11 +368,6 @@ async function buildRemoteQueryEntity(
|
||||
// The query name is either the name as specified in the query metadata, or the file name.
|
||||
const queryName = queryMetadata?.name ?? path.basename(queryFilePath);
|
||||
|
||||
const queryRepos = repositories.map(r => {
|
||||
const [owner, repo] = r.split('/');
|
||||
return { owner: owner, name: repo };
|
||||
});
|
||||
|
||||
const queryText = await fs.readFile(queryFilePath, 'utf8');
|
||||
|
||||
return {
|
||||
@@ -466,7 +379,6 @@ async function buildRemoteQueryEntity(
|
||||
owner: controllerRepoOwner,
|
||||
name: controllerRepoName,
|
||||
},
|
||||
repositories: queryRepos,
|
||||
executionStartTime: queryStartTime,
|
||||
actionsWorkflowRunId: workflowRunId
|
||||
};
|
||||
|
||||
@@ -54,9 +54,9 @@ function extractResultAlerts(
|
||||
for (const location of result.locations ?? []) {
|
||||
const physicalLocation = location.physicalLocation!;
|
||||
const filePath = physicalLocation.artifactLocation!.uri!;
|
||||
const codeSnippet = getCodeSnippet(physicalLocation.contextRegion!);
|
||||
const codeSnippet = getCodeSnippet(physicalLocation.contextRegion, physicalLocation.region);
|
||||
const highlightedRegion = physicalLocation.region
|
||||
? getHighlightedRegion(physicalLocation.region!)
|
||||
? getHighlightedRegion(physicalLocation.region)
|
||||
: undefined;
|
||||
|
||||
const analysisAlert: AnalysisAlert = {
|
||||
@@ -156,15 +156,21 @@ export function tryGetRule(
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function getCodeSnippet(region: sarif.Region): CodeSnippet {
|
||||
const text = region.snippet!.text!;
|
||||
function getCodeSnippet(region?: sarif.Region, alternateRegion?: sarif.Region): CodeSnippet | undefined {
|
||||
region = region ?? alternateRegion;
|
||||
|
||||
if (!region) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const text = region.snippet?.text || '';
|
||||
const { startLine, endLine } = parseSarifRegion(region);
|
||||
|
||||
return {
|
||||
startLine,
|
||||
endLine,
|
||||
text
|
||||
} as CodeSnippet;
|
||||
};
|
||||
}
|
||||
|
||||
function getHighlightedRegion(region: sarif.Region): HighlightedRegion {
|
||||
@@ -175,7 +181,7 @@ function getHighlightedRegion(region: sarif.Region): HighlightedRegion {
|
||||
startColumn,
|
||||
endLine,
|
||||
|
||||
// parseSarifRegion currently shifts the end column by 1 to account
|
||||
// parseSarifRegion currently shifts the end column by 1 to account
|
||||
// for the way vscode counts columns so we need to shift it back.
|
||||
endColumn: endColumn + 1
|
||||
};
|
||||
@@ -195,7 +201,7 @@ function getCodeFlows(
|
||||
for (const threadFlowLocation of threadFlow.locations) {
|
||||
const physicalLocation = threadFlowLocation!.location!.physicalLocation!;
|
||||
const filePath = physicalLocation!.artifactLocation!.uri!;
|
||||
const codeSnippet = getCodeSnippet(physicalLocation.contextRegion!);
|
||||
const codeSnippet = getCodeSnippet(physicalLocation.contextRegion, physicalLocation.region);
|
||||
const highlightedRegion = physicalLocation.region
|
||||
? getHighlightedRegion(physicalLocation.region)
|
||||
: undefined;
|
||||
|
||||
@@ -21,7 +21,7 @@ export interface AnalysisAlert {
|
||||
shortDescription: string;
|
||||
severity: ResultSeverity;
|
||||
fileLink: FileLink;
|
||||
codeSnippet: CodeSnippet;
|
||||
codeSnippet?: CodeSnippet;
|
||||
highlightedRegion?: HighlightedRegion;
|
||||
codeFlows: CodeFlow[];
|
||||
}
|
||||
|
||||
@@ -37,7 +37,6 @@ const shouldHighlightLine = (lineNumber: number, highlightedRegion: HighlightedR
|
||||
const Container = styled.div`
|
||||
font-family: ui-monospace, SFMono-Regular, SF Mono, Menlo, Consolas, Liberation Mono, monospace;
|
||||
font-size: x-small;
|
||||
width: 55em;
|
||||
`;
|
||||
|
||||
const TitleContainer = styled.div`
|
||||
@@ -181,23 +180,33 @@ const FileCodeSnippet = ({
|
||||
messageChildren,
|
||||
}: {
|
||||
fileLink: FileLink,
|
||||
codeSnippet: CodeSnippet,
|
||||
codeSnippet?: CodeSnippet,
|
||||
highlightedRegion?: HighlightedRegion,
|
||||
severity?: ResultSeverity,
|
||||
message?: AnalysisMessage,
|
||||
messageChildren?: React.ReactNode,
|
||||
}) => {
|
||||
|
||||
const code = codeSnippet.text.split('\n');
|
||||
|
||||
const startingLine = codeSnippet.startLine;
|
||||
const endingLine = codeSnippet.endLine;
|
||||
const startingLine = codeSnippet?.startLine || 0;
|
||||
const endingLine = codeSnippet?.endLine || 0;
|
||||
|
||||
const titleFileUri = createRemoteFileRef(
|
||||
fileLink,
|
||||
startingLine,
|
||||
endingLine);
|
||||
|
||||
if (!codeSnippet) {
|
||||
return (
|
||||
<Container>
|
||||
<TitleContainer>
|
||||
<Link href={titleFileUri}>{fileLink.filePath}</Link>
|
||||
</TitleContainer>
|
||||
</Container>
|
||||
);
|
||||
}
|
||||
|
||||
const code = codeSnippet.text.split('\n');
|
||||
|
||||
return (
|
||||
<Container>
|
||||
<TitleContainer>
|
||||
|
||||
@@ -19,6 +19,7 @@ import CollapsibleItem from './CollapsibleItem';
|
||||
import { AlertIcon, CodeSquareIcon, FileCodeIcon, RepoIcon, TerminalIcon } from '@primer/octicons-react';
|
||||
import AnalysisAlertResult from './AnalysisAlertResult';
|
||||
import RawResultsTable from './RawResultsTable';
|
||||
import RepositoriesSearch from './RepositoriesSearch';
|
||||
|
||||
const numOfReposInContractedMode = 10;
|
||||
|
||||
@@ -308,6 +309,7 @@ const AnalysesResults = ({
|
||||
totalResults: number
|
||||
}) => {
|
||||
const totalAnalysesResults = sumAnalysesResults(analysesResults);
|
||||
const [filterValue, setFilterValue] = React.useState('');
|
||||
|
||||
if (totalResults === 0) {
|
||||
return <></>;
|
||||
@@ -322,11 +324,20 @@ const AnalysesResults = ({
|
||||
<AnalysesResultsDescription
|
||||
queryResult={queryResult}
|
||||
analysesResults={analysesResults} />
|
||||
|
||||
<VerticalSpace size={2} />
|
||||
<RepositoriesSearch
|
||||
filterValue={filterValue}
|
||||
setFilterValue={setFilterValue} />
|
||||
|
||||
<ul className="vscode-codeql__flat-list">
|
||||
{analysesResults.filter(a => a.interpretedResults.length > 0 || a.rawResults).map(r =>
|
||||
<li key={r.nwo} className="vscode-codeql__analyses-results-list-item">
|
||||
<RepoAnalysisResults {...r} />
|
||||
</li>)}
|
||||
{analysesResults
|
||||
.filter(a => a.interpretedResults.length > 0 || a.rawResults)
|
||||
.filter(a => a.nwo.toLowerCase().includes(filterValue.toLowerCase()))
|
||||
.map(r =>
|
||||
<li key={r.nwo} className="vscode-codeql__analyses-results-list-item">
|
||||
<RepoAnalysisResults {...r} />
|
||||
</li>)}
|
||||
</ul>
|
||||
</>
|
||||
);
|
||||
@@ -358,18 +369,20 @@ export function RemoteQueries(): JSX.Element {
|
||||
}
|
||||
|
||||
try {
|
||||
return <div>
|
||||
<ThemeProvider colorMode="auto">
|
||||
<ViewTitle>{queryResult.queryTitle}</ViewTitle>
|
||||
<QueryInfo {...queryResult} />
|
||||
<Failures {...queryResult} />
|
||||
<Summary queryResult={queryResult} analysesResults={analysesResults} />
|
||||
<AnalysesResults
|
||||
queryResult={queryResult}
|
||||
analysesResults={analysesResults}
|
||||
totalResults={queryResult.totalResultCount} />
|
||||
</ThemeProvider>
|
||||
</div>;
|
||||
return (
|
||||
<div className="vscode-codeql__remote-queries">
|
||||
<ThemeProvider colorMode="auto">
|
||||
<ViewTitle>{queryResult.queryTitle}</ViewTitle>
|
||||
<QueryInfo {...queryResult} />
|
||||
<Failures {...queryResult} />
|
||||
<Summary queryResult={queryResult} analysesResults={analysesResults} />
|
||||
<AnalysesResults
|
||||
queryResult={queryResult}
|
||||
analysesResults={analysesResults}
|
||||
totalResults={queryResult.totalResultCount} />
|
||||
</ThemeProvider>
|
||||
</div>
|
||||
);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
return <div>There was an error displaying the view.</div>;
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
import * as React from 'react';
|
||||
import { ChangeEvent } from 'react';
|
||||
import { TextInput } from '@primer/react';
|
||||
import { SearchIcon } from '@primer/octicons-react';
|
||||
|
||||
interface RepositoriesSearchProps {
|
||||
filterValue: string;
|
||||
setFilterValue: (value: string) => void;
|
||||
}
|
||||
|
||||
const RepositoriesSearch = ({ filterValue, setFilterValue }: RepositoriesSearchProps) => {
|
||||
return <>
|
||||
<TextInput
|
||||
block
|
||||
sx={{
|
||||
backgroundColor: 'var(--vscode-editor-background);',
|
||||
color: 'var(--vscode-editor-foreground);',
|
||||
width: 'calc(100% - 14px)',
|
||||
}}
|
||||
leadingVisual={SearchIcon}
|
||||
aria-label="Repository search"
|
||||
name="repository-search"
|
||||
placeholder="Filter by repository owner/name"
|
||||
value={filterValue}
|
||||
onChange={(e: ChangeEvent) => setFilterValue((e.target as HTMLInputElement).value)}
|
||||
/>
|
||||
</>;
|
||||
};
|
||||
|
||||
export default RepositoriesSearch;
|
||||
@@ -1,3 +1,7 @@
|
||||
.vscode-codeql__remote-queries {
|
||||
max-width: 55em;
|
||||
}
|
||||
|
||||
.vscode-codeql__query-info-link {
|
||||
text-decoration: none;
|
||||
padding-right: 1em;
|
||||
|
||||
@@ -29,13 +29,14 @@ import { ProgressCallback, UserCancellationException } from './commandRunner';
|
||||
import { DatabaseInfo, QueryMetadata } from './pure/interface-types';
|
||||
import { logger } from './logging';
|
||||
import * as messages from './pure/messages';
|
||||
import { InitialQueryInfo } from './query-results';
|
||||
import { InitialQueryInfo, LocalQueryInfo } from './query-results';
|
||||
import * as qsClient from './queryserver-client';
|
||||
import { isQuickQueryPath } from './quick-query';
|
||||
import { compileDatabaseUpgradeSequence, hasNondestructiveUpgradeCapabilities, upgradeDatabaseExplicit } from './upgrades';
|
||||
import { ensureMetadataIsComplete } from './query-results';
|
||||
import { SELECT_QUERY_NAME } from './contextual/locationFinder';
|
||||
import { DecodedBqrsChunk } from './pure/bqrs-cli-types';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* run-queries.ts
|
||||
@@ -94,6 +95,18 @@ export class QueryEvaluationInfo {
|
||||
return qsClient.findQueryLogFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get evalLogPath() {
|
||||
return qsClient.findQueryEvalLogFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get evalLogSummaryPath() {
|
||||
return qsClient.findQueryEvalLogSummaryFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get evalLogEndSummaryPath() {
|
||||
return qsClient.findQueryEvalLogEndSummaryFile(this.querySaveDir);
|
||||
}
|
||||
|
||||
get resultsPaths() {
|
||||
return {
|
||||
resultsPath: path.join(this.querySaveDir, 'results.bqrs'),
|
||||
@@ -124,6 +137,7 @@ export class QueryEvaluationInfo {
|
||||
dbItem: DatabaseItem,
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
queryInfo?: LocalQueryInfo,
|
||||
): Promise<messages.EvaluationResult> {
|
||||
if (!dbItem.contents || dbItem.error) {
|
||||
throw new Error('Can\'t run query on invalid database.');
|
||||
@@ -155,6 +169,13 @@ export class QueryEvaluationInfo {
|
||||
dbDir: dbItem.contents.datasetUri.fsPath,
|
||||
workingSet: 'default'
|
||||
};
|
||||
if (queryInfo && await qs.cliServer.cliConstraints.supportsPerQueryEvalLog()) {
|
||||
await qs.sendRequest(messages.startLog, {
|
||||
db: dataset,
|
||||
logPath: this.evalLogPath,
|
||||
});
|
||||
|
||||
}
|
||||
const params: messages.EvaluateQueriesParams = {
|
||||
db: dataset,
|
||||
evaluateId: callbackId,
|
||||
@@ -171,6 +192,26 @@ export class QueryEvaluationInfo {
|
||||
}
|
||||
} finally {
|
||||
qs.unRegisterCallback(callbackId);
|
||||
if (queryInfo && await qs.cliServer.cliConstraints.supportsPerQueryEvalLog()) {
|
||||
await qs.sendRequest(messages.endLog, {
|
||||
db: dataset,
|
||||
logPath: this.evalLogPath,
|
||||
});
|
||||
if (await this.hasEvalLog()) {
|
||||
queryInfo.evalLogLocation = this.evalLogPath;
|
||||
await qs.cliServer.generateLogSummary(this.evalLogPath, this.evalLogSummaryPath, this.evalLogEndSummaryPath);
|
||||
queryInfo.evalLogSummaryLocation = this.evalLogSummaryPath;
|
||||
fs.readFile(this.evalLogEndSummaryPath, (err, buffer) => {
|
||||
if (err) {
|
||||
throw new Error(`Could not read structured evaluator log end of summary file at ${this.evalLogEndSummaryPath}.`);
|
||||
}
|
||||
void qs.logger.log(' --- Evaluator Log Summary --- ');
|
||||
void qs.logger.log(buffer.toString());
|
||||
});
|
||||
} else {
|
||||
void showAndLogWarningMessage(`Failed to write structured evaluator log to ${this.evalLogPath}.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result || {
|
||||
evaluationTime: 0,
|
||||
@@ -284,6 +325,13 @@ export class QueryEvaluationInfo {
|
||||
return this.dilPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if this query already has a completed structured evaluator log
|
||||
*/
|
||||
async hasEvalLog(): Promise<boolean> {
|
||||
return fs.pathExists(this.evalLogPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the CSV file containing the results of this query. This will only be called if the query
|
||||
* does not have interpreted results and the CSV file does not already exist.
|
||||
@@ -657,6 +705,7 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
progress: ProgressCallback,
|
||||
token: CancellationToken,
|
||||
templates?: messages.TemplateDefinitions,
|
||||
queryInfo?: LocalQueryInfo, // May be omitted for queries not initiated by the user. If omitted we won't create a structured log for the query.
|
||||
): Promise<QueryWithResults> {
|
||||
if (!dbItem.contents || !dbItem.contents.dbSchemeUri) {
|
||||
throw new Error(`Database ${dbItem.databaseUri} does not have a CodeQL database scheme.`);
|
||||
@@ -742,7 +791,7 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
}
|
||||
|
||||
if (errors.length === 0) {
|
||||
const result = await query.run(qs, upgradeQlo, availableMlModels, dbItem, progress, token);
|
||||
const result = await query.run(qs, upgradeQlo, availableMlModels, dbItem, progress, token, queryInfo);
|
||||
if (result.resultType !== messages.QueryResultType.SUCCESS) {
|
||||
const message = result.message || 'Failed to run query';
|
||||
void logger.log(message);
|
||||
@@ -790,7 +839,7 @@ export async function compileAndRunQueryAgainstDatabase(
|
||||
await upgradeDir?.cleanup();
|
||||
} catch (e) {
|
||||
void qs.logger.log(
|
||||
`Could not clean up the upgrades dir. Reason: ${e.message || e}`,
|
||||
`Could not clean up the upgrades dir. Reason: ${getErrorMessage(e)}`,
|
||||
{ additionalLogLocation: query.logPath }
|
||||
);
|
||||
}
|
||||
|
||||
@@ -4,17 +4,18 @@ import { parser } from 'stream-json';
|
||||
import { pick } from 'stream-json/filters/Pick';
|
||||
import Assembler = require('stream-json/Assembler');
|
||||
import { chain } from 'stream-chain';
|
||||
import { getErrorMessage } from './pure/helpers-pure';
|
||||
|
||||
const DUMMY_TOOL : Sarif.Tool = {driver: {name: ''}};
|
||||
const DUMMY_TOOL: Sarif.Tool = { driver: { name: '' } };
|
||||
|
||||
export async function sarifParser(interpretedResultsPath: string) : Promise<Sarif.Log> {
|
||||
export async function sarifParser(interpretedResultsPath: string): Promise<Sarif.Log> {
|
||||
try {
|
||||
// Parse the SARIF file into token streams, filtering out only the results array.
|
||||
const p = parser();
|
||||
const pipeline = chain([
|
||||
fs.createReadStream(interpretedResultsPath),
|
||||
p,
|
||||
pick({filter: 'runs.0.results'})
|
||||
pick({ filter: 'runs.0.results' })
|
||||
]);
|
||||
|
||||
// Creates JavaScript objects from the token stream
|
||||
@@ -26,23 +27,23 @@ export async function sarifParser(interpretedResultsPath: string) : Promise<Sari
|
||||
pipeline.on('error', (error) => {
|
||||
reject(error);
|
||||
});
|
||||
|
||||
|
||||
asm.on('done', (asm) => {
|
||||
|
||||
const log : Sarif.Log = {
|
||||
version: '2.1.0',
|
||||
const log: Sarif.Log = {
|
||||
version: '2.1.0',
|
||||
runs: [
|
||||
{
|
||||
tool: DUMMY_TOOL,
|
||||
{
|
||||
tool: DUMMY_TOOL,
|
||||
results: asm.current ?? []
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
resolve(log);
|
||||
});
|
||||
});
|
||||
} catch (err) {
|
||||
throw new Error(`Parsing output of interpretation failed: ${err.stderr || err}`);
|
||||
} catch (e) {
|
||||
throw new Error(`Parsing output of interpretation failed: ${(e as any).stderr || getErrorMessage(e)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -76,7 +76,7 @@ export class QLTestAdapterFactory extends DisposableObject {
|
||||
* @param ext The new extension, including the `.`.
|
||||
*/
|
||||
function changeExtension(p: string, ext: string): string {
|
||||
return p.substr(0, p.length - path.extname(p).length) + ext;
|
||||
return p.slice(0, -path.extname(p).length) + ext;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -18,7 +18,7 @@ describe('Databases', function() {
|
||||
this.timeout(60000);
|
||||
|
||||
const LGTM_URL = 'https://lgtm.com/projects/g/aeisenberg/angular-bind-notifier/';
|
||||
|
||||
|
||||
let databaseManager: DatabaseManager;
|
||||
let sandbox: sinon.SinonSandbox;
|
||||
let inputBoxStub: sinon.SinonStub;
|
||||
@@ -40,7 +40,7 @@ describe('Databases', function() {
|
||||
progressCallback = sandbox.spy();
|
||||
inputBoxStub = sandbox.stub(window, 'showInputBox');
|
||||
} catch (e) {
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -48,7 +48,7 @@ describe('Databases', function() {
|
||||
try {
|
||||
sandbox.restore();
|
||||
} catch (e) {
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import * as path from 'path';
|
||||
import { extensions } from 'vscode';
|
||||
import 'mocha';
|
||||
|
||||
import { CodeQLCliServer } from '../../cli';
|
||||
import { CodeQLExtensionInterface } from '../../extension';
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import 'source-map-support/register';
|
||||
import { runTestsInDirectory } from '../index-template';
|
||||
import 'mocha';
|
||||
import * as sinonChai from 'sinon-chai';
|
||||
import * as chai from 'chai';
|
||||
import 'chai/register-should';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
chai.use(chaiAsPromised);
|
||||
chai.use(sinonChai);
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import * as sinon from 'sinon';
|
||||
import { extensions, window } from 'vscode';
|
||||
import 'mocha';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as pq from 'proxyquire';
|
||||
@@ -8,6 +7,7 @@ import * as pq from 'proxyquire';
|
||||
import { CliVersionConstraint, CodeQLCliServer } from '../../cli';
|
||||
import { CodeQLExtensionInterface } from '../../extension';
|
||||
import { expect } from 'chai';
|
||||
import { getErrorMessage } from '../../pure/helpers-pure';
|
||||
|
||||
const proxyquire = pq.noPreserveCache();
|
||||
|
||||
@@ -121,8 +121,8 @@ describe('Packaging commands', function() {
|
||||
await mod.handleInstallPackDependencies(cli, progress);
|
||||
// This line should not be reached
|
||||
expect(true).to.be.false;
|
||||
} catch (error) {
|
||||
expect(error.message).to.contain('Unable to install pack dependencies');
|
||||
} catch (e) {
|
||||
expect(getErrorMessage(e)).to.contain('Unable to install pack dependencies');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,7 +3,6 @@ import { CancellationToken, commands, ExtensionContext, extensions, Uri } from '
|
||||
import * as sinon from 'sinon';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import 'mocha';
|
||||
import { expect } from 'chai';
|
||||
import * as yaml from 'js-yaml';
|
||||
|
||||
@@ -78,7 +77,7 @@ describe('Queries', function() {
|
||||
}
|
||||
dbItem = maybeDbItem;
|
||||
} catch (e) {
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -86,7 +85,7 @@ describe('Queries', function() {
|
||||
try {
|
||||
sandbox.restore();
|
||||
} catch (e) {
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -107,7 +106,7 @@ describe('Queries', function() {
|
||||
expect(result.result.resultType).to.eq(QueryResultType.SUCCESS);
|
||||
} catch (e) {
|
||||
console.error('Test Failed');
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -131,7 +130,7 @@ describe('Queries', function() {
|
||||
expect(result.result.resultType).to.eq(QueryResultType.SUCCESS);
|
||||
} catch (e) {
|
||||
console.error('Test Failed');
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { expect } from 'chai';
|
||||
import * as fs from 'fs-extra';
|
||||
import 'mocha';
|
||||
import * as path from 'path';
|
||||
import * as tmp from 'tmp';
|
||||
import * as url from 'url';
|
||||
@@ -113,7 +112,7 @@ describe('using the query server', function() {
|
||||
throw new Error('Extension not initialized. Make sure cli is downloaded and installed properly.');
|
||||
}
|
||||
} catch (e) {
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -163,7 +162,7 @@ describe('using the query server', function() {
|
||||
await compilationSucceeded.resolve();
|
||||
}
|
||||
catch (e) {
|
||||
await compilationSucceeded.reject(e);
|
||||
await compilationSucceeded.reject(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -190,7 +189,7 @@ describe('using the query server', function() {
|
||||
await qs.sendRequest(messages.runQueries, params, token, () => { /**/ });
|
||||
}
|
||||
catch (e) {
|
||||
await evaluationSucceeded.reject(e);
|
||||
await evaluationSucceeded.reject(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -2,21 +2,20 @@ import { assert, expect } from 'chai';
|
||||
import * as path from 'path';
|
||||
import * as sinon from 'sinon';
|
||||
import { CancellationToken, extensions, QuickPickItem, Uri, window } from 'vscode';
|
||||
import 'mocha';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as yaml from 'js-yaml';
|
||||
|
||||
import { QlPack, runRemoteQuery } from '../../remote-queries/run-remote-query';
|
||||
import { Credentials } from '../../authentication';
|
||||
import { CliVersionConstraint, CodeQLCliServer } from '../../cli';
|
||||
import { CodeQLExtensionInterface } from '../../extension';
|
||||
import { setRemoteControllerRepo, setRemoteRepositoryLists } from '../../config';
|
||||
import { UserCancellationException } from '../../commandRunner';
|
||||
import { QlPack, runRemoteQuery } from '../../../remote-queries/run-remote-query';
|
||||
import { Credentials } from '../../../authentication';
|
||||
import { CliVersionConstraint, CodeQLCliServer } from '../../../cli';
|
||||
import { CodeQLExtensionInterface } from '../../../extension';
|
||||
import { setRemoteControllerRepo, setRemoteRepositoryLists } from '../../../config';
|
||||
import { UserCancellationException } from '../../../commandRunner';
|
||||
import { lte } from 'semver';
|
||||
|
||||
describe('Remote queries', function() {
|
||||
const baseDir = path.join(__dirname, '../../../src/vscode-tests/cli-integration');
|
||||
const baseDir = path.join(__dirname, '../../../../src/vscode-tests/cli-integration');
|
||||
|
||||
let sandbox: sinon.SinonSandbox;
|
||||
|
||||
@@ -53,7 +52,7 @@ describe('Remote queries', function() {
|
||||
progress = sandbox.spy();
|
||||
// Should not have asked for a language
|
||||
showQuickPickSpy = sandbox.stub(window, 'showQuickPick')
|
||||
.onFirstCall().resolves({ repoList: ['github/vscode-codeql'] } as unknown as QuickPickItem)
|
||||
.onFirstCall().resolves({ repositories: ['github/vscode-codeql'] } as unknown as QuickPickItem)
|
||||
.onSecondCall().resolves('javascript' as unknown as QuickPickItem);
|
||||
|
||||
// always run in the vscode-codeql repo
|
||||
@@ -44,7 +44,7 @@ const _10MB = _1MB * 10;
|
||||
|
||||
// CLI version to test. Hard code the latest as default. And be sure
|
||||
// to update the env if it is not otherwise set.
|
||||
const CLI_VERSION = process.env.CLI_VERSION || 'v2.8.3';
|
||||
const CLI_VERSION = process.env.CLI_VERSION || 'v2.8.4';
|
||||
process.env.CLI_VERSION = CLI_VERSION;
|
||||
|
||||
// Base dir where CLIs will be downloaded into
|
||||
|
||||
@@ -1,13 +1,8 @@
|
||||
import * as assert from 'assert';
|
||||
import * as chai from 'chai';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import 'mocha';
|
||||
import * as path from 'path';
|
||||
import * as vscode from 'vscode';
|
||||
import * as determiningSelectedQueryTest from './determining-selected-query-test';
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
|
||||
describe('launching with a minimal workspace', async () => {
|
||||
|
||||
const ext = vscode.extensions.getExtension('GitHub.vscode-codeql');
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import 'vscode-test';
|
||||
import 'mocha';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import 'sinon-chai';
|
||||
import * as Sinon from 'sinon';
|
||||
import * as chai from 'chai';
|
||||
import { expect } from 'chai';
|
||||
import { workspace } from 'vscode';
|
||||
|
||||
import {
|
||||
@@ -12,9 +9,6 @@ import {
|
||||
QueryServerConfigListener
|
||||
} from '../../config';
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
|
||||
describe('config listeners', function() {
|
||||
// Because we are adding some extra waiting, need to bump the test timeouts.
|
||||
this.timeout(5000);
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import 'vscode-test';
|
||||
import 'mocha';
|
||||
import * as sinon from 'sinon';
|
||||
import * as tmp from 'tmp';
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import 'source-map-support/register';
|
||||
import { runTestsInDirectory } from '../index-template';
|
||||
|
||||
import * as sinonChai from 'sinon-chai';
|
||||
import * as chai from 'chai';
|
||||
import 'chai/register-should';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
chai.use(chaiAsPromised);
|
||||
chai.use(sinonChai);
|
||||
|
||||
|
||||
export function run(): Promise<void> {
|
||||
return runTestsInDirectory(__dirname);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import 'vscode-test';
|
||||
import 'mocha';
|
||||
import { Uri, WorkspaceFolder } from 'vscode';
|
||||
import { expect } from 'chai';
|
||||
import * as fs from 'fs-extra';
|
||||
|
||||
@@ -1,19 +1,13 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as chai from 'chai';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import { expect } from 'chai';
|
||||
import * as sinon from 'sinon';
|
||||
import * as yaml from 'js-yaml';
|
||||
|
||||
import { AstViewer, AstItem } from '../../astViewer';
|
||||
import { commands, Range } from 'vscode';
|
||||
import { commands, Range, Uri } from 'vscode';
|
||||
import { DatabaseItem } from '../../databases';
|
||||
import { testDisposeHandler } from '../test-dispose-handler';
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
|
||||
|
||||
|
||||
describe('AstViewer', () => {
|
||||
let astRoots: AstItem[];
|
||||
let viewer: AstViewer | undefined;
|
||||
@@ -40,7 +34,7 @@ describe('AstViewer', () => {
|
||||
it('should update the viewer roots', () => {
|
||||
const item = {} as DatabaseItem;
|
||||
viewer = new AstViewer();
|
||||
viewer.updateRoots(astRoots, item, 'def/abc');
|
||||
viewer.updateRoots(astRoots, item, Uri.file('def/abc'));
|
||||
|
||||
expect((viewer as any).treeDataProvider.roots).to.eq(astRoots);
|
||||
expect((viewer as any).treeDataProvider.db).to.eq(item);
|
||||
@@ -59,25 +53,31 @@ describe('AstViewer', () => {
|
||||
doSelectionTest(expr, expr.fileLocation?.range);
|
||||
});
|
||||
|
||||
it('should select nothing', () => {
|
||||
it('should select nothing because of no overlap in range', () => {
|
||||
doSelectionTest(undefined, new Range(2, 3, 4, 5));
|
||||
});
|
||||
|
||||
it('should select nothing because of different file', () => {
|
||||
doSelectionTest(undefined, astRoots[0].fileLocation?.range, Uri.file('def'));
|
||||
});
|
||||
|
||||
const defaultUri = Uri.file('def/abc');
|
||||
|
||||
function doSelectionTest(
|
||||
expectedSelection: any,
|
||||
selectionRange: Range | undefined,
|
||||
fsPath = 'def/abc',
|
||||
fileUri = defaultUri
|
||||
) {
|
||||
const item = {} as DatabaseItem;
|
||||
viewer = new AstViewer();
|
||||
viewer.updateRoots(astRoots, item, fsPath);
|
||||
viewer.updateRoots(astRoots, item, defaultUri);
|
||||
const spy = sandbox.spy();
|
||||
(viewer as any).treeView.reveal = spy;
|
||||
Object.defineProperty((viewer as any).treeView, 'visible', {
|
||||
value: true
|
||||
});
|
||||
|
||||
const mockEvent = createMockEvent(selectionRange, fsPath);
|
||||
const mockEvent = createMockEvent(selectionRange, fileUri);
|
||||
(viewer as any).updateTreeSelection(mockEvent);
|
||||
if (expectedSelection) {
|
||||
expect(spy).to.have.been.calledWith(expectedSelection);
|
||||
@@ -88,7 +88,7 @@ describe('AstViewer', () => {
|
||||
|
||||
function createMockEvent(
|
||||
selectionRange: Range | undefined,
|
||||
fsPath: string,
|
||||
uri: Uri,
|
||||
) {
|
||||
return {
|
||||
selections: [{
|
||||
@@ -98,7 +98,7 @@ describe('AstViewer', () => {
|
||||
textEditor: {
|
||||
document: {
|
||||
uri: {
|
||||
fsPath
|
||||
fsPath: uri.fsPath
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as chai from 'chai';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import { expect } from 'chai';
|
||||
import * as sinon from 'sinon';
|
||||
|
||||
import AstBuilder from '../../../contextual/astBuilder';
|
||||
import { QueryWithResults } from '../../../run-queries';
|
||||
import { CodeQLCliServer } from '../../../cli';
|
||||
import { DatabaseItem } from '../../../databases';
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
import { Uri } from 'vscode';
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -145,7 +142,7 @@ describe('AstBuilder', () => {
|
||||
resultsPath: '/a/b/c'
|
||||
}
|
||||
}
|
||||
} as QueryWithResults, mockCli, {} as DatabaseItem, '');
|
||||
} as QueryWithResults, mockCli, {} as DatabaseItem, Uri.file(''));
|
||||
}
|
||||
|
||||
function mockDecode(resultSet: 'nodes' | 'edges' | 'graphProperties') {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import 'vscode-test';
|
||||
import 'mocha';
|
||||
import { expect } from 'chai';
|
||||
import { Uri, Range } from 'vscode';
|
||||
|
||||
|
||||
@@ -1,17 +1,12 @@
|
||||
import 'vscode-test';
|
||||
import 'mocha';
|
||||
import * as yaml from 'js-yaml';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import * as sinon from 'sinon';
|
||||
import * as chai from 'chai';
|
||||
import * as sinonChai from 'sinon-chai';
|
||||
import { expect } from 'chai';
|
||||
import * as pq from 'proxyquire';
|
||||
import { KeyType } from '../../../contextual/keyType';
|
||||
import { getErrorMessage } from '../../../pure/helpers-pure';
|
||||
|
||||
const proxyquire = pq.noPreserveCache().noCallThru();
|
||||
chai.use(chaiAsPromised);
|
||||
chai.use(sinonChai);
|
||||
const expect = chai.expect;
|
||||
|
||||
describe('queryResolver', () => {
|
||||
let module: Record<string, Function>;
|
||||
@@ -70,7 +65,7 @@ describe('queryResolver', () => {
|
||||
// should reject
|
||||
expect(true).to.be.false;
|
||||
} catch (e) {
|
||||
expect(e.message).to.eq(
|
||||
expect(getErrorMessage(e)).to.eq(
|
||||
'Couldn\'t find any queries tagged ide-contextual-queries/local-definitions in any of the following packs: my-qlpack.'
|
||||
);
|
||||
}
|
||||
|
||||
@@ -22,6 +22,17 @@
|
||||
"innerFilePath": "results.sarif",
|
||||
"queryId": "MRVA Integration test 1-6sBi6oaky_fxqXW2NA4bx"
|
||||
}
|
||||
},
|
||||
{
|
||||
"nwo": "hucairz/i-dont-exist",
|
||||
"resultCount": 5,
|
||||
"fileSizeInBytes": 81237,
|
||||
"downloadLink": {
|
||||
"id": "999999",
|
||||
"urlPath": "/these/results/will/never/be/downloaded/999999",
|
||||
"innerFilePath": "results.sarif",
|
||||
"queryId": "MRVA Integration test 2-UL-vbKAjP8ffObxjsp7hN"
|
||||
}
|
||||
}
|
||||
],
|
||||
"analysisFailures": [],
|
||||
|
||||
@@ -1,27 +1,153 @@
|
||||
import 'vscode-test';
|
||||
import 'mocha';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import * as sinon from 'sinon';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as tmp from 'tmp';
|
||||
import * as chai from 'chai';
|
||||
import { expect } from 'chai';
|
||||
import { window } from 'vscode';
|
||||
|
||||
import {
|
||||
convertToDatabaseUrl,
|
||||
convertLgtmUrlToDatabaseUrl,
|
||||
looksLikeLgtmUrl,
|
||||
findDirWithFile,
|
||||
looksLikeGithubRepo,
|
||||
} from '../../databaseFetcher';
|
||||
import { ProgressCallback } from '../../commandRunner';
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
import * as pq from 'proxyquire';
|
||||
|
||||
describe('databaseFetcher', function () {
|
||||
const proxyquire = pq.noPreserveCache();
|
||||
|
||||
describe('databaseFetcher', function() {
|
||||
// These tests make API calls and may need extra time to complete.
|
||||
this.timeout(10000);
|
||||
|
||||
describe('convertToDatabaseUrl', () => {
|
||||
describe('convertGithubNwoToDatabaseUrl', () => {
|
||||
let sandbox: sinon.SinonSandbox;
|
||||
let quickPickSpy: sinon.SinonStub;
|
||||
let progressSpy: ProgressCallback;
|
||||
let mockRequest: sinon.SinonStub;
|
||||
let mod: any;
|
||||
|
||||
const credentials = getMockCredentials(0);
|
||||
|
||||
beforeEach(() => {
|
||||
sandbox = sinon.createSandbox();
|
||||
quickPickSpy = sandbox.stub(window, 'showQuickPick');
|
||||
progressSpy = sandbox.spy();
|
||||
mockRequest = sandbox.stub();
|
||||
mod = proxyquire('../../databaseFetcher', {
|
||||
'./authentication': {
|
||||
Credentials: credentials,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
it('should convert a GitHub nwo to a database url', async () => {
|
||||
// We can't make the real octokit request (since we need credentials), so we mock the response.
|
||||
const mockApiResponse = {
|
||||
data: [
|
||||
{
|
||||
id: 1495869,
|
||||
name: 'csharp-database',
|
||||
language: 'csharp',
|
||||
uploader: {},
|
||||
content_type: 'application/zip',
|
||||
state: 'uploaded',
|
||||
size: 55599715,
|
||||
created_at: '2022-03-24T10:46:24Z',
|
||||
updated_at: '2022-03-24T10:46:27Z',
|
||||
url: 'https://api.github.com/repositories/143040428/code-scanning/codeql/databases/csharp',
|
||||
},
|
||||
{
|
||||
id: 1100671,
|
||||
name: 'database.zip',
|
||||
language: 'javascript',
|
||||
uploader: {},
|
||||
content_type: 'application/zip',
|
||||
state: 'uploaded',
|
||||
size: 29294434,
|
||||
created_at: '2022-03-01T16:00:04Z',
|
||||
updated_at: '2022-03-01T16:00:06Z',
|
||||
url: 'https://api.github.com/repositories/143040428/code-scanning/codeql/databases/javascript',
|
||||
},
|
||||
{
|
||||
id: 648738,
|
||||
name: 'ql-database',
|
||||
language: 'ql',
|
||||
uploader: {},
|
||||
content_type: 'application/json; charset=utf-8',
|
||||
state: 'uploaded',
|
||||
size: 39735500,
|
||||
created_at: '2022-02-02T09:38:50Z',
|
||||
updated_at: '2022-02-02T09:38:51Z',
|
||||
url: 'https://api.github.com/repositories/143040428/code-scanning/codeql/databases/ql',
|
||||
},
|
||||
],
|
||||
};
|
||||
mockRequest.resolves(mockApiResponse);
|
||||
quickPickSpy.resolves('javascript');
|
||||
const githubRepo = 'github/codeql';
|
||||
const dbUrl = await mod.convertGithubNwoToDatabaseUrl(
|
||||
githubRepo,
|
||||
credentials,
|
||||
progressSpy
|
||||
);
|
||||
|
||||
expect(dbUrl).to.equal(
|
||||
'https://api.github.com/repos/github/codeql/code-scanning/codeql/databases/javascript'
|
||||
);
|
||||
expect(quickPickSpy.firstCall.args[0]).to.deep.equal([
|
||||
'csharp',
|
||||
'javascript',
|
||||
'ql',
|
||||
]);
|
||||
});
|
||||
|
||||
// Repository doesn't exist, or the user has no access to the repository.
|
||||
it('should fail on an invalid/inaccessible repository', async () => {
|
||||
const mockApiResponse = {
|
||||
data: {
|
||||
message: 'Not Found',
|
||||
},
|
||||
status: 404,
|
||||
};
|
||||
mockRequest.resolves(mockApiResponse);
|
||||
const githubRepo = 'foo/bar-not-real';
|
||||
await expect(
|
||||
mod.convertGithubNwoToDatabaseUrl(githubRepo, credentials, progressSpy)
|
||||
).to.be.rejectedWith(/Unable to get database/);
|
||||
expect(progressSpy).to.have.callCount(0);
|
||||
});
|
||||
|
||||
// User has access to the repository, but there are no databases for any language.
|
||||
it('should fail on a repository with no databases', async () => {
|
||||
const mockApiResponse = {
|
||||
data: [],
|
||||
};
|
||||
|
||||
mockRequest.resolves(mockApiResponse);
|
||||
const githubRepo = 'foo/bar-with-no-dbs';
|
||||
await expect(
|
||||
mod.convertGithubNwoToDatabaseUrl(githubRepo, credentials, progressSpy)
|
||||
).to.be.rejectedWith(/Unable to get database/);
|
||||
expect(progressSpy).to.have.been.calledOnce;
|
||||
});
|
||||
|
||||
function getMockCredentials(response: any) {
|
||||
mockRequest = sinon.stub().resolves(response);
|
||||
return {
|
||||
getOctokit: () => ({
|
||||
request: mockRequest,
|
||||
}),
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
describe('convertLgtmUrlToDatabaseUrl', () => {
|
||||
let sandbox: sinon.SinonSandbox;
|
||||
let quickPickSpy: sinon.SinonStub;
|
||||
let progressSpy: ProgressCallback;
|
||||
@@ -39,7 +165,7 @@ describe('databaseFetcher', function () {
|
||||
it('should convert a project url to a database url', async () => {
|
||||
quickPickSpy.resolves('javascript');
|
||||
const lgtmUrl = 'https://lgtm.com/projects/g/github/codeql';
|
||||
const dbUrl = await convertToDatabaseUrl(lgtmUrl, progressSpy);
|
||||
const dbUrl = await convertLgtmUrlToDatabaseUrl(lgtmUrl, progressSpy);
|
||||
|
||||
expect(dbUrl).to.equal(
|
||||
'https://lgtm.com/api/v1.0/snapshots/1506465042581/javascript'
|
||||
@@ -52,7 +178,7 @@ describe('databaseFetcher', function () {
|
||||
quickPickSpy.resolves('python');
|
||||
const lgtmUrl =
|
||||
'https://lgtm.com/projects/g/github/codeql/subpage/subpage2?query=xxx';
|
||||
const dbUrl = await convertToDatabaseUrl(lgtmUrl, progressSpy);
|
||||
const dbUrl = await convertLgtmUrlToDatabaseUrl(lgtmUrl, progressSpy);
|
||||
|
||||
expect(dbUrl).to.equal(
|
||||
'https://lgtm.com/api/v1.0/snapshots/1506465042581/python'
|
||||
@@ -64,7 +190,7 @@ describe('databaseFetcher', function () {
|
||||
quickPickSpy.resolves('python');
|
||||
const lgtmUrl =
|
||||
'g/github/codeql';
|
||||
const dbUrl = await convertToDatabaseUrl(lgtmUrl, progressSpy);
|
||||
const dbUrl = await convertLgtmUrlToDatabaseUrl(lgtmUrl, progressSpy);
|
||||
|
||||
expect(dbUrl).to.equal(
|
||||
'https://lgtm.com/api/v1.0/snapshots/1506465042581/python'
|
||||
@@ -75,11 +201,37 @@ describe('databaseFetcher', function () {
|
||||
it('should fail on a nonexistent project', async () => {
|
||||
quickPickSpy.resolves('javascript');
|
||||
const lgtmUrl = 'https://lgtm.com/projects/g/github/hucairz';
|
||||
await expect(convertToDatabaseUrl(lgtmUrl, progressSpy)).to.rejectedWith(/Invalid LGTM URL/);
|
||||
await expect(convertLgtmUrlToDatabaseUrl(lgtmUrl, progressSpy)).to.rejectedWith(/Invalid LGTM URL/);
|
||||
expect(progressSpy).to.have.callCount(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('looksLikeGithubRepo', () => {
|
||||
it('should handle invalid urls', () => {
|
||||
expect(looksLikeGithubRepo(''))
|
||||
.to.be.false;
|
||||
expect(looksLikeGithubRepo('http://github.com/foo/bar'))
|
||||
.to.be.false;
|
||||
expect(looksLikeGithubRepo('https://ww.github.com/foo/bar'))
|
||||
.to.be.false;
|
||||
expect(looksLikeGithubRepo('https://ww.github.com/foo'))
|
||||
.to.be.false;
|
||||
expect(looksLikeGithubRepo('foo'))
|
||||
.to.be.false;
|
||||
});
|
||||
|
||||
it('should handle valid urls', () => {
|
||||
expect(looksLikeGithubRepo('https://github.com/foo/bar'))
|
||||
.to.be.true;
|
||||
expect(looksLikeGithubRepo('https://www.github.com/foo/bar'))
|
||||
.to.be.true;
|
||||
expect(looksLikeGithubRepo('https://github.com/foo/bar/sub/pages'))
|
||||
.to.be.true;
|
||||
expect(looksLikeGithubRepo('foo/bar'))
|
||||
.to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
describe('looksLikeLgtmUrl', () => {
|
||||
it('should handle invalid urls', () => {
|
||||
expect(looksLikeLgtmUrl('')).to.be.false;
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import 'vscode-test';
|
||||
import 'mocha';
|
||||
import * as tmp from 'tmp';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
@@ -8,6 +7,7 @@ import { Uri } from 'vscode';
|
||||
|
||||
import { DatabaseUI } from '../../databases-ui';
|
||||
import { testDisposeHandler } from '../test-dispose-handler';
|
||||
import { Credentials } from '../../authentication';
|
||||
|
||||
describe('databases-ui', () => {
|
||||
describe('fixDbUri', () => {
|
||||
@@ -78,7 +78,8 @@ describe('databases-ui', () => {
|
||||
} as any,
|
||||
{} as any,
|
||||
storageDir,
|
||||
storageDir
|
||||
storageDir,
|
||||
() => Promise.resolve({} as Credentials),
|
||||
);
|
||||
|
||||
await databaseUI.handleRemoveOrphanedDatabases();
|
||||
|
||||
@@ -1,18 +1,13 @@
|
||||
import * as chai from 'chai';
|
||||
import { expect } from 'chai';
|
||||
import * as path from 'path';
|
||||
import * as fetch from 'node-fetch';
|
||||
import 'chai/register-should';
|
||||
import * as semver from 'semver';
|
||||
import * as sinonChai from 'sinon-chai';
|
||||
import * as sinon from 'sinon';
|
||||
import * as pq from 'proxyquire';
|
||||
import 'mocha';
|
||||
|
||||
import { GithubRelease, GithubReleaseAsset, ReleasesApiConsumer } from '../../distribution';
|
||||
|
||||
const proxyquire = pq.noPreserveCache();
|
||||
chai.use(sinonChai);
|
||||
const expect = chai.expect;
|
||||
|
||||
describe('Releases API consumer', () => {
|
||||
const owner = 'someowner';
|
||||
@@ -95,7 +90,7 @@ describe('Releases API consumer', () => {
|
||||
it('fails if none of the releases are within the version range', async () => {
|
||||
const consumer = new MockReleasesApiConsumer(owner, repo);
|
||||
|
||||
await chai.expect(
|
||||
await expect(
|
||||
consumer.getLatestRelease(new semver.Range('5.*.*'))
|
||||
).to.be.rejectedWith(Error);
|
||||
});
|
||||
@@ -114,7 +109,7 @@ describe('Releases API consumer', () => {
|
||||
it('fails if none of the releases pass the additional compatibility test', async () => {
|
||||
const consumer = new MockReleasesApiConsumer(owner, repo);
|
||||
|
||||
await chai.expect(consumer.getLatestRelease(
|
||||
await expect(consumer.getLatestRelease(
|
||||
new semver.Range('2.*.*'),
|
||||
true,
|
||||
release => release.assets.some(asset => asset.name === 'otherExampleAsset.txt')
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
import { expect } from 'chai';
|
||||
import 'mocha';
|
||||
import * as path from 'path';
|
||||
|
||||
import { DownloadLink, createDownloadPath } from '../../remote-queries/download-link';
|
||||
|
||||
describe('createDownloadPath', () => {
|
||||
it('should return the correct path', () => {
|
||||
const downloadLink: DownloadLink = {
|
||||
id: 'abc',
|
||||
urlPath: '',
|
||||
innerFilePath: '',
|
||||
queryId: 'def'
|
||||
};
|
||||
const expectedPath = path.join('storage', 'def', 'abc');
|
||||
|
||||
const actualPath = createDownloadPath('storage', downloadLink);
|
||||
|
||||
expect(actualPath).to.equal(expectedPath);
|
||||
});
|
||||
|
||||
it('should return the correct path with extension', () => {
|
||||
const downloadLink: DownloadLink = {
|
||||
id: 'abc',
|
||||
urlPath: '',
|
||||
innerFilePath: '',
|
||||
queryId: 'def'
|
||||
};
|
||||
|
||||
const expectedPath = path.join('storage', 'def', 'abc.zip');
|
||||
|
||||
const actualPath = createDownloadPath('storage', downloadLink, 'zip');
|
||||
|
||||
expect(actualPath).to.equal(expectedPath);
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,4 @@
|
||||
import { expect } from 'chai';
|
||||
import 'mocha';
|
||||
import {
|
||||
EnvironmentVariableCollection,
|
||||
EnvironmentVariableMutator,
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
import 'source-map-support/register';
|
||||
import { runTestsInDirectory } from '../index-template';
|
||||
import * as sinonChai from 'sinon-chai';
|
||||
import * as chai from 'chai';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import 'chai/register-should';
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
chai.use(sinonChai);
|
||||
|
||||
export function run(): Promise<void> {
|
||||
return runTestsInDirectory(__dirname);
|
||||
}
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as chai from 'chai';
|
||||
import 'mocha';
|
||||
import 'sinon-chai';
|
||||
import { expect, assert } from 'chai';
|
||||
import * as vscode from 'vscode';
|
||||
import * as sinon from 'sinon';
|
||||
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import { logger } from '../../logging';
|
||||
import { registerQueryHistoryScubber } from '../../query-history-scrubber';
|
||||
import { QueryHistoryManager, HistoryTreeDataProvider, SortOrder } from '../../query-history';
|
||||
@@ -19,10 +16,7 @@ import { DatabaseManager } from '../../databases';
|
||||
import * as tmp from 'tmp-promise';
|
||||
import { ONE_DAY_IN_MS, ONE_HOUR_IN_MS, TWO_HOURS_IN_MS, THREE_HOURS_IN_MS } from '../../pure/helpers-pure';
|
||||
import { tmpDir } from '../../helpers';
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
const assert = chai.assert;
|
||||
import { getErrorMessage } from '../../pure/helpers-pure';
|
||||
|
||||
describe('query-history', () => {
|
||||
const mockExtensionLocation = path.join(tmpDir.name, 'mock-extension-location');
|
||||
@@ -162,7 +156,7 @@ describe('query-history', () => {
|
||||
await (queryHistoryManager as any).findOtherQueryToCompare(thisQuery, [thisQuery, allHistory[0]]);
|
||||
assert(false, 'Should have thrown');
|
||||
} catch (e) {
|
||||
expect(e.message).to.eq('Please select a successful query.');
|
||||
expect(getErrorMessage(e)).to.eq('Please select a successful query.');
|
||||
}
|
||||
});
|
||||
|
||||
@@ -175,7 +169,7 @@ describe('query-history', () => {
|
||||
await (queryHistoryManager as any).findOtherQueryToCompare(allHistory[0], [allHistory[0], allHistory[1]]);
|
||||
assert(false, 'Should have thrown');
|
||||
} catch (e) {
|
||||
expect(e.message).to.eq('Query databases must be the same.');
|
||||
expect(getErrorMessage(e)).to.eq('Query databases must be the same.');
|
||||
}
|
||||
});
|
||||
|
||||
@@ -187,7 +181,7 @@ describe('query-history', () => {
|
||||
await (queryHistoryManager as any).findOtherQueryToCompare(thisQuery, [thisQuery, allHistory[0], allHistory[1]]);
|
||||
assert(false, 'Should have thrown');
|
||||
} catch (e) {
|
||||
expect(e.message).to.eq('Please select no more than 2 queries.');
|
||||
expect(getErrorMessage(e)).to.eq('Please select no more than 2 queries.');
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -426,9 +420,11 @@ describe('query-history', () => {
|
||||
|
||||
describe('getChildren', () => {
|
||||
const history = [
|
||||
item('a', 10, 20),
|
||||
item('b', 5, 30),
|
||||
item('c', 1, 25),
|
||||
item('a', 2, 'remote'),
|
||||
item('b', 10, 'local', 20),
|
||||
item('c', 5, 'local', 30),
|
||||
item('d', 1, 'local', 25),
|
||||
item('e', 6, 'remote'),
|
||||
];
|
||||
let treeDataProvider: HistoryTreeDataProvider;
|
||||
|
||||
@@ -455,7 +451,7 @@ describe('query-history', () => {
|
||||
});
|
||||
|
||||
it('should get children for date ascending', async () => {
|
||||
const expected = [history[2], history[1], history[0]];
|
||||
const expected = [history[3], history[0], history[2], history[4], history[1]];
|
||||
treeDataProvider.sortOrder = SortOrder.DateAsc;
|
||||
|
||||
const children = await treeDataProvider.getChildren();
|
||||
@@ -463,7 +459,7 @@ describe('query-history', () => {
|
||||
});
|
||||
|
||||
it('should get children for date descending', async () => {
|
||||
const expected = [history[0], history[1], history[2]];
|
||||
const expected = [history[3], history[0], history[2], history[4], history[1]].reverse();
|
||||
treeDataProvider.sortOrder = SortOrder.DateDesc;
|
||||
|
||||
const children = await treeDataProvider.getChildren();
|
||||
@@ -471,7 +467,7 @@ describe('query-history', () => {
|
||||
});
|
||||
|
||||
it('should get children for result count ascending', async () => {
|
||||
const expected = [history[0], history[2], history[1]];
|
||||
const expected = [history[0], history[4], history[1], history[3], history[2]];
|
||||
treeDataProvider.sortOrder = SortOrder.CountAsc;
|
||||
|
||||
const children = await treeDataProvider.getChildren();
|
||||
@@ -479,7 +475,7 @@ describe('query-history', () => {
|
||||
});
|
||||
|
||||
it('should get children for result count descending', async () => {
|
||||
const expected = [history[1], history[2], history[0]];
|
||||
const expected = [history[0], history[4], history[1], history[3], history[2]].reverse();
|
||||
treeDataProvider.sortOrder = SortOrder.CountDesc;
|
||||
|
||||
const children = await treeDataProvider.getChildren();
|
||||
@@ -508,17 +504,27 @@ describe('query-history', () => {
|
||||
expect(children).to.deep.eq(expected);
|
||||
});
|
||||
|
||||
function item(label: string, start: number, resultCount?: number) {
|
||||
return {
|
||||
label,
|
||||
initialInfo: {
|
||||
start: new Date(start),
|
||||
},
|
||||
completedQuery: {
|
||||
resultCount,
|
||||
},
|
||||
t: 'local'
|
||||
};
|
||||
function item(label: string, start: number, t = 'local', resultCount?: number) {
|
||||
if (t === 'local') {
|
||||
return {
|
||||
label,
|
||||
initialInfo: {
|
||||
start: new Date(start),
|
||||
},
|
||||
completedQuery: {
|
||||
resultCount,
|
||||
},
|
||||
t
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
label,
|
||||
remoteQuery: {
|
||||
executionStartTime: start,
|
||||
},
|
||||
t
|
||||
};
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import * as chai from 'chai';
|
||||
import { expect } from 'chai';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs-extra';
|
||||
import 'mocha';
|
||||
import 'sinon-chai';
|
||||
import * as sinon from 'sinon';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import { LocalQueryInfo, InitialQueryInfo, interpretResultsSarif } from '../../query-results';
|
||||
import { QueryEvaluationInfo, QueryWithResults } from '../../run-queries';
|
||||
import { QueryHistoryConfig } from '../../config';
|
||||
@@ -15,9 +12,6 @@ import { CancellationTokenSource, Uri, env } from 'vscode';
|
||||
import { tmpDir } from '../../helpers';
|
||||
import { slurpQueryHistory, splatQueryHistory } from '../../query-serialization';
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
|
||||
describe('query-results', () => {
|
||||
let disposeSpy: sinon.SinonSpy;
|
||||
let onDidChangeQueryHistoryConfigurationSpy: sinon.SinonSpy;
|
||||
|
||||
@@ -1,26 +1,21 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as sinon from 'sinon';
|
||||
import * as chai from 'chai';
|
||||
import 'mocha';
|
||||
import 'sinon-chai';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import { expect } from 'chai';
|
||||
|
||||
import { CancellationToken, ExtensionContext, Uri, window, workspace } from 'vscode';
|
||||
import { QueryHistoryConfig } from '../../config';
|
||||
import { DatabaseManager } from '../../databases';
|
||||
import { tmpDir } from '../../helpers';
|
||||
import { QueryHistoryManager } from '../../query-history';
|
||||
import { QueryServerClient } from '../../queryserver-client';
|
||||
import { Credentials } from '../../authentication';
|
||||
import { AnalysesResultsManager } from '../../remote-queries/analyses-results-manager';
|
||||
import { RemoteQueryResult } from '../../remote-queries/shared/remote-query-result';
|
||||
import { DisposableBucket } from '../disposable-bucket';
|
||||
import { testDisposeHandler } from '../test-dispose-handler';
|
||||
import { walkDirectory } from '../../helpers';
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
import { QueryHistoryConfig } from '../../../config';
|
||||
import { DatabaseManager } from '../../../databases';
|
||||
import { tmpDir } from '../../../helpers';
|
||||
import { QueryHistoryManager } from '../../../query-history';
|
||||
import { QueryServerClient } from '../../../queryserver-client';
|
||||
import { Credentials } from '../../../authentication';
|
||||
import { AnalysesResultsManager } from '../../../remote-queries/analyses-results-manager';
|
||||
import { RemoteQueryResult } from '../../../remote-queries/shared/remote-query-result';
|
||||
import { DisposableBucket } from '../../disposable-bucket';
|
||||
import { testDisposeHandler } from '../../test-dispose-handler';
|
||||
import { walkDirectory } from '../../../helpers';
|
||||
import { getErrorMessage } from '../../../pure/helpers-pure';
|
||||
|
||||
/**
|
||||
* Tests for remote queries and how they interact with the query history manager.
|
||||
@@ -28,7 +23,7 @@ const expect = chai.expect;
|
||||
|
||||
describe('Remote queries and query history manager', function() {
|
||||
|
||||
const EXTENSION_PATH = path.join(__dirname, '../../../');
|
||||
const EXTENSION_PATH = path.join(__dirname, '../../../../');
|
||||
const STORAGE_DIR = Uri.file(path.join(tmpDir.name, 'remote-queries')).fsPath;
|
||||
const asyncNoop = async () => { /** noop */ };
|
||||
|
||||
@@ -244,8 +239,8 @@ describe('Remote queries and query history manager', function() {
|
||||
|
||||
it('should download two artifacts at once', async () => {
|
||||
const publisher = sandbox.spy();
|
||||
const analysisSummaries = [...remoteQueryResult0.analysisSummaries];
|
||||
await arm.downloadAnalysesResults(analysisSummaries, undefined, publisher);
|
||||
const analysisSummaries = [remoteQueryResult0.analysisSummaries[0], remoteQueryResult0.analysisSummaries[1]];
|
||||
await arm.loadAnalysesResults(analysisSummaries, undefined, publisher);
|
||||
|
||||
const trimmed = publisher.getCalls().map(call => call.args[0]).map(args => {
|
||||
args.forEach((analysisResult: any) => delete analysisResult.interpretedResults);
|
||||
@@ -286,12 +281,12 @@ describe('Remote queries and query history manager', function() {
|
||||
const analysisSummaries = [...remoteQueryResult0.analysisSummaries];
|
||||
|
||||
try {
|
||||
await arm.downloadAnalysesResults(analysisSummaries, {
|
||||
await arm.loadAnalysesResults(analysisSummaries, {
|
||||
isCancellationRequested: true
|
||||
} as CancellationToken, publisher);
|
||||
expect.fail('Should have thrown');
|
||||
} catch (e) {
|
||||
expect(e.message).to.contain('cancelled');
|
||||
expect(getErrorMessage(e)).to.contain('cancelled');
|
||||
}
|
||||
|
||||
expect(publisher).not.to.have.been.called;
|
||||
@@ -299,11 +294,11 @@ describe('Remote queries and query history manager', function() {
|
||||
|
||||
it('should get the analysis results', async () => {
|
||||
const publisher = sandbox.spy();
|
||||
const analysisSummaries0 = [...remoteQueryResult0.analysisSummaries];
|
||||
const analysisSummaries0 = [remoteQueryResult0.analysisSummaries[0], remoteQueryResult0.analysisSummaries[1]];
|
||||
const analysisSummaries1 = [...remoteQueryResult1.analysisSummaries];
|
||||
|
||||
await arm.downloadAnalysesResults(analysisSummaries0, undefined, publisher);
|
||||
await arm.downloadAnalysesResults(analysisSummaries1, undefined, publisher);
|
||||
await arm.loadAnalysesResults(analysisSummaries0, undefined, publisher);
|
||||
await arm.loadAnalysesResults(analysisSummaries1, undefined, publisher);
|
||||
|
||||
const result0 = arm.getAnalysesResults(rawQueryHistory[0].queryId);
|
||||
const result0Again = arm.getAnalysesResults(rawQueryHistory[0].queryId);
|
||||
@@ -322,7 +317,7 @@ describe('Remote queries and query history manager', function() {
|
||||
it.skip('should read sarif', async () => {
|
||||
const publisher = sandbox.spy();
|
||||
const analysisSummaries0 = [remoteQueryResult0.analysisSummaries[0]];
|
||||
await arm.downloadAnalysesResults(analysisSummaries0, undefined, publisher);
|
||||
await arm.loadAnalysesResults(analysisSummaries0, undefined, publisher);
|
||||
|
||||
const sarif = fs.readJSONSync(path.join(STORAGE_DIR, 'queries', rawQueryHistory[0].queryId, '171543249', 'results.sarif'));
|
||||
const queryResults = sarif.runs
|
||||
@@ -331,11 +326,34 @@ describe('Remote queries and query history manager', function() {
|
||||
|
||||
expect(publisher.getCall(1).args[0][0].results).to.deep.eq(queryResults);
|
||||
});
|
||||
|
||||
it('should check if an artifact is downloaded and not in memory', async () => {
|
||||
// Load remoteQueryResult0.analysisSummaries[1] into memory
|
||||
await arm.downloadAnalysisResults(remoteQueryResult0.analysisSummaries[1], () => Promise.resolve());
|
||||
|
||||
// on disk
|
||||
expect(await (arm as any).isAnalysisDownloaded(remoteQueryResult0.analysisSummaries[0])).to.be.true;
|
||||
|
||||
// in memory
|
||||
expect(await (arm as any).isAnalysisDownloaded(remoteQueryResult0.analysisSummaries[1])).to.be.true;
|
||||
|
||||
// not downloaded
|
||||
expect(await (arm as any).isAnalysisDownloaded(remoteQueryResult0.analysisSummaries[2])).to.be.false;
|
||||
});
|
||||
|
||||
it('should load downloaded artifacts', async () => {
|
||||
await arm.loadDownloadedAnalyses(remoteQueryResult0.analysisSummaries);
|
||||
const queryId = rawQueryHistory[0].queryId;
|
||||
const analysesResultsNwos = arm.getAnalysesResults(queryId).map(ar => ar.nwo).sort();
|
||||
expect(analysesResultsNwos[0]).to.eq('github/vscode-codeql');
|
||||
expect(analysesResultsNwos[1]).to.eq('other/hucairz');
|
||||
expect(analysesResultsNwos.length).to.eq(2);
|
||||
});
|
||||
});
|
||||
|
||||
async function copyHistoryState() {
|
||||
fs.ensureDirSync(STORAGE_DIR);
|
||||
fs.copySync(path.join(__dirname, 'data/remote-queries/'), path.join(tmpDir.name, 'remote-queries'));
|
||||
fs.copySync(path.join(__dirname, '../data/remote-queries/'), path.join(tmpDir.name, 'remote-queries'));
|
||||
|
||||
// also, replace the files with "PLACEHOLDER" so that they have the correct directory
|
||||
for await (const p of walkDirectory(STORAGE_DIR)) {
|
||||
@@ -0,0 +1,132 @@
|
||||
import 'vscode-test';
|
||||
import * as sinon from 'sinon';
|
||||
import { expect } from 'chai';
|
||||
import { window } from 'vscode';
|
||||
import * as pq from 'proxyquire';
|
||||
|
||||
const proxyquire = pq.noPreserveCache();
|
||||
|
||||
describe('repository-selection', function() {
|
||||
|
||||
describe('getRepositorySelection', () => {
|
||||
let sandbox: sinon.SinonSandbox;
|
||||
let quickPickSpy: sinon.SinonStub;
|
||||
let showInputBoxSpy: sinon.SinonStub;
|
||||
let getRemoteRepositoryListsSpy: sinon.SinonStub;
|
||||
let showAndLogErrorMessageSpy: sinon.SinonStub;
|
||||
let mod: any;
|
||||
beforeEach(() => {
|
||||
sandbox = sinon.createSandbox();
|
||||
quickPickSpy = sandbox.stub(window, 'showQuickPick');
|
||||
showInputBoxSpy = sandbox.stub(window, 'showInputBox');
|
||||
getRemoteRepositoryListsSpy = sandbox.stub();
|
||||
showAndLogErrorMessageSpy = sandbox.stub();
|
||||
mod = proxyquire('../../../remote-queries/repository-selection', {
|
||||
'../config': {
|
||||
getRemoteRepositoryLists: getRemoteRepositoryListsSpy
|
||||
},
|
||||
'../helpers': {
|
||||
showAndLogErrorMessage: showAndLogErrorMessageSpy
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
it('should allow selection from repo lists from your pre-defined config', async () => {
|
||||
// fake return values
|
||||
quickPickSpy.resolves(
|
||||
{ repositories: ['foo/bar', 'foo/baz'] }
|
||||
);
|
||||
getRemoteRepositoryListsSpy.returns(
|
||||
{
|
||||
'list1': ['foo/bar', 'foo/baz'],
|
||||
'list2': [],
|
||||
}
|
||||
);
|
||||
|
||||
// make the function call
|
||||
const repoSelection = await mod.getRepositorySelection();
|
||||
|
||||
// Check that the return value is correct
|
||||
expect(repoSelection.repositoryLists).to.be.undefined;
|
||||
expect(repoSelection.repositories).to.deep.eq(
|
||||
['foo/bar', 'foo/baz']
|
||||
);
|
||||
});
|
||||
|
||||
it('should allow selection from repo lists defined at the system level', async () => {
|
||||
// fake return values
|
||||
quickPickSpy.resolves(
|
||||
{ repositoryList: 'top_100' }
|
||||
);
|
||||
getRemoteRepositoryListsSpy.returns(
|
||||
{
|
||||
'list1': ['foo/bar', 'foo/baz'],
|
||||
'list2': [],
|
||||
}
|
||||
);
|
||||
|
||||
// make the function call
|
||||
const repoSelection = await mod.getRepositorySelection();
|
||||
|
||||
// Check that the return value is correct
|
||||
expect(repoSelection.repositories).to.be.undefined;
|
||||
expect(repoSelection.repositoryLists).to.deep.eq(
|
||||
['top_100']
|
||||
);
|
||||
});
|
||||
|
||||
// Test the regex in various "good" cases
|
||||
const goodRepos = [
|
||||
'owner/repo',
|
||||
'owner-with-hyphens/repo-with-hyphens_and_underscores',
|
||||
'ownerWithNumbers58/repoWithNumbers37'
|
||||
];
|
||||
goodRepos.forEach(repo => {
|
||||
it(`should run on a valid repo that you enter in the text box: ${repo}`, async () => {
|
||||
// fake return values
|
||||
quickPickSpy.resolves(
|
||||
{ useCustomRepository: true }
|
||||
);
|
||||
getRemoteRepositoryListsSpy.returns({}); // no pre-defined repo lists
|
||||
showInputBoxSpy.resolves(repo);
|
||||
|
||||
// make the function call
|
||||
const repoSelection = await mod.getRepositorySelection();
|
||||
|
||||
// Check that the return value is correct
|
||||
expect(repoSelection.repositories).to.deep.equal(
|
||||
[repo]
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// Test the regex in various "bad" cases
|
||||
const badRepos = [
|
||||
'invalid_owner/repo',
|
||||
'owner/repo+some&invalid&stuff',
|
||||
'owner-with-no-repo/',
|
||||
'/repo-with-no-owner'
|
||||
];
|
||||
badRepos.forEach(repo => {
|
||||
it(`should show an error message if you enter an invalid repo in the text box: ${repo}`, async () => {
|
||||
// fake return values
|
||||
quickPickSpy.resolves(
|
||||
{ useCustomRepository: true }
|
||||
);
|
||||
getRemoteRepositoryListsSpy.returns({}); // no pre-defined repo lists
|
||||
showInputBoxSpy.resolves(repo);
|
||||
|
||||
// make the function call
|
||||
await mod.getRepositorySelection();
|
||||
|
||||
// check that we get the right error message
|
||||
expect(showAndLogErrorMessageSpy.firstCall.args[0]).to.contain('Invalid repository format');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
@@ -1,18 +1,12 @@
|
||||
import * as chai from 'chai';
|
||||
import { expect } from 'chai';
|
||||
import * as path from 'path';
|
||||
import 'mocha';
|
||||
import 'sinon-chai';
|
||||
import * as sinon from 'sinon';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import { Uri } from 'vscode';
|
||||
|
||||
import { QueryEvaluationInfo } from '../../run-queries';
|
||||
import { Severity, compileQuery } from '../../pure/messages';
|
||||
import * as config from '../../config';
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
|
||||
describe('run-queries', () => {
|
||||
let sandbox: sinon.SinonSandbox;
|
||||
beforeEach(() => {
|
||||
|
||||
@@ -1,198 +0,0 @@
|
||||
import 'vscode-test';
|
||||
import 'mocha';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import * as sinon from 'sinon';
|
||||
import * as chai from 'chai';
|
||||
import { window } from 'vscode';
|
||||
import * as pq from 'proxyquire';
|
||||
|
||||
const proxyquire = pq.noPreserveCache();
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
|
||||
describe('run-remote-query', function() {
|
||||
|
||||
describe('getRepositories', () => {
|
||||
let sandbox: sinon.SinonSandbox;
|
||||
let quickPickSpy: sinon.SinonStub;
|
||||
let showInputBoxSpy: sinon.SinonStub;
|
||||
let getRemoteRepositoryListsSpy: sinon.SinonStub;
|
||||
let showAndLogErrorMessageSpy: sinon.SinonStub;
|
||||
let mod: any;
|
||||
beforeEach(() => {
|
||||
sandbox = sinon.createSandbox();
|
||||
quickPickSpy = sandbox.stub(window, 'showQuickPick');
|
||||
showInputBoxSpy = sandbox.stub(window, 'showInputBox');
|
||||
getRemoteRepositoryListsSpy = sandbox.stub();
|
||||
showAndLogErrorMessageSpy = sandbox.stub();
|
||||
mod = proxyquire('../../remote-queries/run-remote-query', {
|
||||
'../config': {
|
||||
getRemoteRepositoryLists: getRemoteRepositoryListsSpy
|
||||
},
|
||||
'../helpers': {
|
||||
showAndLogErrorMessage: showAndLogErrorMessageSpy
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
it('should run on a repo list that you chose from your pre-defined config', async () => {
|
||||
// fake return values
|
||||
quickPickSpy.resolves(
|
||||
{ repoList: ['foo/bar', 'foo/baz'] }
|
||||
);
|
||||
getRemoteRepositoryListsSpy.returns(
|
||||
{
|
||||
'list1': ['foo/bar', 'foo/baz'],
|
||||
'list2': [],
|
||||
}
|
||||
);
|
||||
|
||||
// make the function call
|
||||
const repoList = await mod.getRepositories();
|
||||
|
||||
// Check that the return value is correct
|
||||
expect(repoList).to.deep.eq(
|
||||
['foo/bar', 'foo/baz']
|
||||
);
|
||||
});
|
||||
|
||||
// Test the regex in various "good" cases
|
||||
const goodRepos = [
|
||||
'owner/repo',
|
||||
'owner-with-hyphens/repo-with-hyphens_and_underscores',
|
||||
'ownerWithNumbers58/repoWithNumbers37'
|
||||
];
|
||||
goodRepos.forEach(repo => {
|
||||
it(`should run on a valid repo that you enter in the text box: ${repo}`, async () => {
|
||||
// fake return values
|
||||
getRemoteRepositoryListsSpy.returns({}); // no pre-defined repo lists
|
||||
showInputBoxSpy.resolves(repo);
|
||||
|
||||
// make the function call
|
||||
const repoList = await mod.getRepositories();
|
||||
|
||||
// Check that the return value is correct
|
||||
expect(repoList).to.deep.equal(
|
||||
[repo]
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// Test the regex in various "bad" cases
|
||||
const badRepos = [
|
||||
'invalid_owner/repo',
|
||||
'owner/repo+some&invalid&stuff',
|
||||
'owner-with-no-repo/',
|
||||
'/repo-with-no-owner'
|
||||
];
|
||||
badRepos.forEach(repo => {
|
||||
it(`should show an error message if you enter an invalid repo in the text box: ${repo}`, async () => {
|
||||
// fake return values
|
||||
getRemoteRepositoryListsSpy.returns({}); // no pre-defined repo lists
|
||||
showInputBoxSpy.resolves(repo);
|
||||
|
||||
// make the function call
|
||||
await mod.getRepositories();
|
||||
|
||||
// check that we get the right error message
|
||||
expect(showAndLogErrorMessageSpy.firstCall.args[0]).to.contain('Invalid repository format');
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('attemptRerun', () => {
|
||||
let sandbox: sinon.SinonSandbox;
|
||||
let showAndLogErrorMessageSpy: sinon.SinonStub;
|
||||
let showInformationMessageWithActionSpy: sinon.SinonStub;
|
||||
let mockRequest: sinon.SinonStub;
|
||||
let logSpy: sinon.SinonStub;
|
||||
let mod: any;
|
||||
|
||||
const error = {
|
||||
message: 'Unable to run query on the specified repositories. Some repositories were invalid.',
|
||||
response: {
|
||||
data: {
|
||||
invalid_repos: ['abc/def', 'ghi/jkl']
|
||||
}
|
||||
}
|
||||
};
|
||||
const ref = 'main';
|
||||
const language = 'javascript';
|
||||
const credentials = getMockCredentials(0);
|
||||
const query = 'select 1';
|
||||
const owner = 'owner';
|
||||
const repo = 'repo';
|
||||
|
||||
beforeEach(() => {
|
||||
sandbox = sinon.createSandbox();
|
||||
logSpy = sandbox.stub();
|
||||
showAndLogErrorMessageSpy = sandbox.stub();
|
||||
showInformationMessageWithActionSpy = sandbox.stub();
|
||||
mod = proxyquire('../../remote-queries/run-remote-query', {
|
||||
'../helpers': {
|
||||
showAndLogErrorMessage: showAndLogErrorMessageSpy,
|
||||
showInformationMessageWithAction: showInformationMessageWithActionSpy
|
||||
},
|
||||
'../logging': {
|
||||
'logger': {
|
||||
log: logSpy
|
||||
}
|
||||
},
|
||||
});
|
||||
});
|
||||
afterEach(() => {
|
||||
sandbox.restore();
|
||||
});
|
||||
|
||||
it('should return and log error if it can\'t run on any repos', async () => {
|
||||
const repositories = ['abc/def', 'ghi/jkl'];
|
||||
|
||||
// make the function call
|
||||
await mod.attemptRerun(error, credentials, ref, language, repositories, query, owner, repo);
|
||||
|
||||
// check logging output
|
||||
expect(logSpy.firstCall.args[0]).to.contain('Unable to run query');
|
||||
expect(logSpy.secondCall.args[0]).to.contain('Invalid repos: abc/def, ghi/jkl');
|
||||
expect(showAndLogErrorMessageSpy.firstCall.args[0]).to.contain('Unable to run query on any');
|
||||
});
|
||||
|
||||
it('should list invalid repos and rerun on valid ones', async () => {
|
||||
const repositories = ['foo/bar', 'abc/def', 'ghi/jkl', 'foo/baz'];
|
||||
|
||||
// fake return values
|
||||
showInformationMessageWithActionSpy.resolves(true);
|
||||
|
||||
// make the function call
|
||||
await mod.attemptRerun(error, credentials, ref, language, repositories, query, owner, repo);
|
||||
|
||||
// check logging output
|
||||
expect(logSpy.firstCall.args[0]).to.contain('Unable to run query');
|
||||
expect(logSpy.secondCall.args[0]).to.contain('Invalid repos: abc/def, ghi/jkl');
|
||||
|
||||
// check that the correct information message is displayed
|
||||
expect(showInformationMessageWithActionSpy.firstCall.args[0]).to.contain('Unable to run query on some');
|
||||
expect(showInformationMessageWithActionSpy.firstCall.args[1]).to.contain('Rerun');
|
||||
|
||||
// check that API request is made again, with only valid repos
|
||||
expect(logSpy.lastCall.args[0]).to.contain('valid repositories: ["foo/bar","foo/baz"]');
|
||||
// test a few values in the octokit request
|
||||
expect(mockRequest.firstCall.args[1].data.language).to.eq('javascript');
|
||||
expect(mockRequest.firstCall.args[1].data.repositories).to.deep.eq(['foo/bar', 'foo/baz']);
|
||||
|
||||
});
|
||||
|
||||
function getMockCredentials(response: any) {
|
||||
mockRequest = sinon.stub().resolves(response);
|
||||
return {
|
||||
getOctokit: () => ({
|
||||
request: mockRequest
|
||||
})
|
||||
};
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -1,8 +1,5 @@
|
||||
import * as chai from 'chai';
|
||||
import 'mocha';
|
||||
import 'sinon-chai';
|
||||
import { expect } from 'chai';
|
||||
import * as sinon from 'sinon';
|
||||
import * as chaiAsPromised from 'chai-as-promised';
|
||||
import TelemetryReporter from 'vscode-extension-telemetry';
|
||||
import { ExtensionContext, workspace, ConfigurationTarget, window } from 'vscode';
|
||||
import { TelemetryListener, telemetryListener as globalTelemetryListener } from '../../telemetry';
|
||||
@@ -10,9 +7,6 @@ import { UserCancellationException } from '../../commandRunner';
|
||||
import { fail } from 'assert';
|
||||
import { ENABLE_TELEMETRY } from '../../config';
|
||||
|
||||
chai.use(chaiAsPromised);
|
||||
const expect = chai.expect;
|
||||
|
||||
const sandbox = sinon.createSandbox();
|
||||
|
||||
describe('telemetry reporting', function() {
|
||||
@@ -87,7 +81,7 @@ describe('telemetry reporting', function() {
|
||||
const reporter: any = telemetryListener._reporter;
|
||||
expect(reporter.userOptIn).to.eq(false); // disabled
|
||||
} catch (e) {
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -98,7 +92,7 @@ describe('telemetry reporting', function() {
|
||||
|
||||
expect(telemetryListener._reporter).to.be.undefined;
|
||||
} catch (e) {
|
||||
fail(e);
|
||||
fail(e as Error);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import 'vscode-test';
|
||||
import 'mocha';
|
||||
import * as sinon from 'sinon';
|
||||
import * as fs from 'fs-extra';
|
||||
import { Uri, WorkspaceFolder } from 'vscode';
|
||||
@@ -138,7 +137,7 @@ describe('test-adapter', () => {
|
||||
// However, we can pretend the same thing by just returning an async array.
|
||||
runTestsSpy = sandox.stub();
|
||||
runTestsSpy.returns(
|
||||
(async function* () {
|
||||
(async function*() {
|
||||
yield Promise.resolve({
|
||||
test: Uri.parse('file:/ab/c/d.ql').fsPath,
|
||||
pass: true,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { fail } from 'assert';
|
||||
import { expect } from 'chai';
|
||||
|
||||
import { asyncFilter } from '../../src/pure/helpers-pure';
|
||||
import { asyncFilter, getErrorMessage } from '../../src/pure/helpers-pure';
|
||||
|
||||
describe('helpers-pure', () => {
|
||||
it('should filter asynchronously', async () => {
|
||||
@@ -17,7 +17,7 @@ describe('helpers-pure', () => {
|
||||
await asyncFilter([1, 2, 3], rejects);
|
||||
fail('Should have thrown');
|
||||
} catch (e) {
|
||||
expect(e.message).to.eq('opps');
|
||||
expect(getErrorMessage(e)).to.eq('opps');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -419,15 +419,42 @@ describe('SARIF processing', () => {
|
||||
expectResultParsingError(result.errors[0]);
|
||||
});
|
||||
|
||||
it('should return errors for result locations with no context region', () => {
|
||||
it('should not return errors for result locations with no snippet', () => {
|
||||
const sarif = buildValidSarifLog();
|
||||
sarif.runs![0]!.results![0]!.locations![0]!.physicalLocation!.contextRegion!.snippet = undefined;
|
||||
|
||||
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
|
||||
|
||||
const expectedCodeSnippet = {
|
||||
startLine: result.alerts[0].codeSnippet!.startLine,
|
||||
endLine: result.alerts[0].codeSnippet!.endLine,
|
||||
text: ''
|
||||
};
|
||||
|
||||
const actualCodeSnippet = result.alerts[0].codeSnippet;
|
||||
|
||||
expect(result).to.be.ok;
|
||||
expectNoParsingError(result);
|
||||
expect(actualCodeSnippet).to.deep.equal(expectedCodeSnippet);
|
||||
});
|
||||
|
||||
it('should use highlightedRegion for result locations with no contextRegion', () => {
|
||||
const sarif = buildValidSarifLog();
|
||||
sarif.runs![0]!.results![0]!.locations![0]!.physicalLocation!.contextRegion = undefined;
|
||||
|
||||
const result = extractAnalysisAlerts(sarif, fakefileLinkPrefix);
|
||||
|
||||
const expectedCodeSnippet = {
|
||||
startLine: result.alerts[0].highlightedRegion!.startLine,
|
||||
endLine: result.alerts[0].highlightedRegion!.endLine,
|
||||
text: ''
|
||||
};
|
||||
|
||||
const actualCodeSnippet = result.alerts[0].codeSnippet;
|
||||
|
||||
expect(result).to.be.ok;
|
||||
expect(result.errors.length).to.equal(1);
|
||||
expectResultParsingError(result.errors[0]);
|
||||
expectNoParsingError(result);
|
||||
expect(actualCodeSnippet).to.deep.equal(expectedCodeSnippet);
|
||||
});
|
||||
|
||||
it('should not return errors for result locations with no region', () => {
|
||||
@@ -438,6 +465,7 @@ describe('SARIF processing', () => {
|
||||
|
||||
expect(result).to.be.ok;
|
||||
expect(result.alerts.length).to.equal(1);
|
||||
expectNoParsingError(result);
|
||||
});
|
||||
|
||||
it('should return errors for result locations with no physical location', () => {
|
||||
@@ -537,9 +565,9 @@ describe('SARIF processing', () => {
|
||||
expect(result).to.be.ok;
|
||||
expect(result.errors.length).to.equal(0);
|
||||
expect(result.alerts.length).to.equal(3);
|
||||
expect(result.alerts.find(a => getMessageText(a.message) === 'msg1' && a.codeSnippet.text === 'foo')).to.be.ok;
|
||||
expect(result.alerts.find(a => getMessageText(a.message) === 'msg1' && a.codeSnippet.text === 'bar')).to.be.ok;
|
||||
expect(result.alerts.find(a => getMessageText(a.message) === 'msg2' && a.codeSnippet.text === 'baz')).to.be.ok;
|
||||
expect(result.alerts.find(a => getMessageText(a.message) === 'msg1' && a.codeSnippet!.text === 'foo')).to.be.ok;
|
||||
expect(result.alerts.find(a => getMessageText(a.message) === 'msg1' && a.codeSnippet!.text === 'bar')).to.be.ok;
|
||||
expect(result.alerts.find(a => getMessageText(a.message) === 'msg2' && a.codeSnippet!.text === 'baz')).to.be.ok;
|
||||
expect(result.alerts.every(a => a.severity === 'Warning')).to.be.true;
|
||||
});
|
||||
|
||||
@@ -595,9 +623,14 @@ describe('SARIF processing', () => {
|
||||
expect(msg.startsWith('Error when processing SARIF result')).to.be.true;
|
||||
}
|
||||
|
||||
function expectNoParsingError(result: { errors: string[] }) {
|
||||
const array = result.errors;
|
||||
expect(array.length, array.join()).to.equal(0);
|
||||
}
|
||||
|
||||
function buildValidSarifLog(): sarif.Log {
|
||||
return {
|
||||
version: '0.0.1' as sarif.Log.version,
|
||||
version: '2.1.0',
|
||||
runs: [
|
||||
{
|
||||
results: [
|
||||
|
||||
Reference in New Issue
Block a user