Compare commits

...

472 Commits

Author SHA1 Message Date
Andrew Eisenberg
f36ab18310 Merge pull request #1778 from github/v1.7.6
Some checks failed
Release / Release (push) Has been cancelled
Release / Publish to VS Code Marketplace (push) Has been cancelled
Release / Publish to Open VSX Registry (push) Has been cancelled
v1.7.6
2022-11-21 09:42:39 -08:00
Andrew Eisenberg
663b26328b v1.7.6 2022-11-21 09:01:47 -08:00
Andrew Eisenberg
e93d8393ca Merge pull request #1774 from github/aeisenberg/fix-mrva-packs
Allow synthetic variant analysis packs to handle `${workspace}`
2022-11-18 15:00:05 -08:00
Andrew Eisenberg
24bbd5153c Allow synthetic variant analysis packs to handle ${workspace}
`${workspace}` references are new in CLI version 2.11.3. These mean that
the version depended upon in a pack must be the version available in the
current codeql workspace.

When generating a variant analysis pack, however, we copy the target
query and generate a synthetic pack with the original dependencies.
This breaks workspace references since the synthetic pack is no longer
in the same workspace.

A simple workaround is to replace `${workspace}` with `*` references.
2022-11-18 14:47:07 -08:00
Andrew Eisenberg
4eb465277a Update gitignore to avoid codeql metadata files
Also, we're no longer using the rush package manager, so delete that
from gitignore.
2022-11-18 11:33:53 -08:00
Koen Vlaswinkel
3e7e4b86bd Merge pull request #1525 from github/koesie10/prettier
Add Prettier
2022-11-17 09:58:47 +01:00
Koen Vlaswinkel
f515663640 Add .git-blame-ignore-revs file for previous commit
See https://www.stefanjudis.com/today-i-learned/how-to-exclude-commits-from-git-blame/
2022-11-16 19:07:01 +01:00
Koen Vlaswinkel
ebcdf8ad0b Run Prettier on all files
This will change all existing files to match Prettier formatting.

The command used is `npm run format`.
2022-11-16 19:06:13 +01:00
Koen Vlaswinkel
f41ca1a330 Add Prettier
This adds Prettier and makes it replace tsfmt. VSCode is set to use
Prettier for formatting TypeScript/TSX files and format on save since
Prettier is very fast and does not cause any noticeable delay.
2022-11-16 19:04:17 +01:00
Koen Vlaswinkel
6219b43051 Merge pull request #1766 from github/koesie10/export-results-sorting-filtering
Add filtering and sorting to exported results
2022-11-16 19:01:36 +01:00
Koen Vlaswinkel
47297800ab Merge remote-tracking branch 'origin/main' into koesie10/export-results-sorting-filtering 2022-11-16 18:44:36 +01:00
Koen Vlaswinkel
5838f1b6a3 Merge pull request #1765 from github/koesie10/selected-copy
Use selected repositories for copying repo lists
2022-11-16 18:41:46 +01:00
Koen Vlaswinkel
bbaa27a1f0 Split filter and sort methods and simplify type checks 2022-11-16 16:32:17 +01:00
dependabot[bot]
d7fd84c8e1 Bump loader-utils from 1.4.1 to 1.4.2 in /extensions/ql-vscode (#1772)
Bumps [loader-utils](https://github.com/webpack/loader-utils) from 1.4.1 to 1.4.2.
- [Release notes](https://github.com/webpack/loader-utils/releases)
- [Changelog](https://github.com/webpack/loader-utils/blob/v1.4.2/CHANGELOG.md)
- [Commits](https://github.com/webpack/loader-utils/compare/v1.4.1...v1.4.2)

---
updated-dependencies:
- dependency-name: loader-utils
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-11-16 15:03:23 +00:00
Koen Vlaswinkel
6aaa7d63a7 Simplify type checks for repositoryIds 2022-11-16 13:02:55 +01:00
Robert
ed0868ba26 Merge pull request #1768 from github/robertbrignull/openVariantAnalysis
Remove the openVariantAnalysis command
2022-11-16 11:04:02 +00:00
Shati Patel
b2a3703d3a Render nodes for local database items (#1759) 2022-11-16 11:03:49 +00:00
Nora
539dd1f0f3 Merge pull request #1771 from github/nora/move-db-panel-files
New DB Panel: Move config files to config folder
2022-11-16 10:55:30 +01:00
Koen Vlaswinkel
2a30c669e1 Merge pull request #1760 from github/koesie10/filter-export-copy
Add filtering and sorting to exported repo list
2022-11-16 10:31:52 +01:00
Nora
bd6c3add1b Move files to config folder 2022-11-16 10:22:54 +01:00
Charis Kyriakou
435fc0a327 Fix rootDir used in test tsconfig (#1770) 2022-11-16 09:01:55 +00:00
Koen Vlaswinkel
53c1a585c1 Simplify filtering of repositories
Co-authored-by: Elena Tanasoiu <elenatanasoiu@github.com>
2022-11-16 09:53:37 +01:00
Andrew Eisenberg
ede534cf5f Merge pull request #1744 from github/aeisenberg/new-query-server
Enable the new query server by default
2022-11-15 13:24:01 -08:00
Andrew Eisenberg
9a7489f6cd Merge pull request #1674 from github/aeisenberg/assert-version
Assert VSCode version is high enough
2022-11-15 10:51:42 -08:00
Robert
2f5f09550d Remove openVariantAnalysis command 2022-11-15 16:49:49 +00:00
Koen Vlaswinkel
1793963f06 Merge branch 'koesie10/selected-copy' into koesie10/export-results-sorting-filtering 2022-11-15 16:14:45 +01:00
Koen Vlaswinkel
e0230375e3 Merge branch 'koesie10/filter-export-copy' into koesie10/selected-copy 2022-11-15 16:13:29 +01:00
Koen Vlaswinkel
b8a0ed4ec3 Merge remote-tracking branch 'origin/main' into koesie10/filter-export-copy 2022-11-15 16:12:13 +01:00
Nora
846eda5428 Merge pull request #1761 from github/nora/add-selected-state-to-db-config
Add selected to config
2022-11-15 16:09:06 +01:00
Koen Vlaswinkel
796611d3bc Merge pull request #1758 from github/koesie10/export-results-from-view
Add exporting of results to view
2022-11-15 16:02:23 +01:00
Koen Vlaswinkel
c88b32091b Merge pull request #1750 from github/koesie10/export-results
Add exporting of results from the context menu
2022-11-15 16:01:50 +01:00
Nora
a012d80341 Merge comments 2022-11-15 15:48:28 +01:00
Charis Kyriakou
542a78e4ef Stop showing warnings for unhandled requests by the mock API server (#1767) 2022-11-15 14:45:21 +00:00
Nora
cf7e33363a Resolve conflicts 2022-11-15 14:19:51 +01:00
Nora
0505d5a0ab Merge comments 2022-11-15 14:19:51 +01:00
Nora
27f62bc1ff Add selected to config 2022-11-15 14:19:51 +01:00
Koen Vlaswinkel
5ed4981673 Add tests for handleCopyRepoList and handleExportResults 2022-11-15 14:13:35 +01:00
Koen Vlaswinkel
e9681bc546 Restore private visibility of loadResultsFromStorage 2022-11-15 13:20:44 +01:00
Koen Vlaswinkel
7486431bea Use options instead of separate method for skipping cache store
This adds a new options argument to the `loadResults` method which
allows the caller to specify that the results should not be saved to the
cache. This exposes a smaller API surface and makes it harder to misuse
the methods.
2022-11-15 13:16:59 +01:00
Koen Vlaswinkel
95e073b9a4 Use separate directory for variant analysis exports
Each variant analysis export can be different due to different filters,
so there are two options:
- We need to clean up the directory before each export to ensure no old
  files are left
- We need to use a separate directory for each export

This implements the second option, which is more flexible and allows the
user to retain different result exports.
2022-11-15 13:05:37 +01:00
Koen Vlaswinkel
f94837ebca Add filtering and sorting to exported results
This adds filtering (based on search and selected repositories) and
sorting to exporting results. This is done in the same way as for
copying the repository list, so the changes are fairly minimal.
2022-11-15 12:06:06 +01:00
Koen Vlaswinkel
dd1ced9ba3 Merge branch 'koesie10/selected-copy' into koesie10/export-results-sorting-filtering 2022-11-15 12:02:21 +01:00
Charis Kyriakou
f1b24987eb Re-render db panel on config update (#1763) 2022-11-15 10:36:38 +00:00
Koen Vlaswinkel
4ed409d91a Use selected repositories for copying repo lists
This will use the selected repositories to limit which repositories are
included in the copied repo list. If there are both selected
repositories and a search filter (on the full name), the search filter
will be ignored and the selected repositories will be used in full.
2022-11-15 11:23:01 +01:00
Charis Kyriakou
39ae3cd7f4 Add mode and subscriptions to App container (#1762) 2022-11-15 10:03:10 +00:00
Koen Vlaswinkel
73ff057df0 Merge remote-tracking branch 'origin/main' into koesie10/filter-export-copy 2022-11-15 10:53:58 +01:00
Andrew Eisenberg
2dd482a8a4 Enable the new query server by default 2022-11-14 17:36:31 -08:00
Andrew Eisenberg
93cc4082dc Merge branch 'main' into aeisenberg/assert-version 2022-11-14 17:35:25 -08:00
Charis Kyriakou
345125994a Revert "Rename iconPath to icon" (#1764)
* Revert "Rename iconPath to icon (#1742)"

This reverts commit 93b6abeeb4.

* Add comment to explain things
2022-11-14 16:38:59 +00:00
Koen Vlaswinkel
9b0fe4ddc7 Merge pull request #1755 from github/koesie10/checkbox-state
Keep track of checkbox state in view
2022-11-14 17:18:30 +01:00
Charis Kyriakou
fca2faf021 Add extension and storage paths to App container (#1756) 2022-11-14 15:47:58 +00:00
Koen Vlaswinkel
30988993be Simplify canSelect method
Co-authored-by: Robert <robertbrignull@github.com>
2022-11-14 16:33:08 +01:00
Koen Vlaswinkel
bb4307ea3e Add filtering and sorting to exported repo list
This will pass the filter and sort parameters in the export repo list
message so it can be used by the command to filter and sort the
repositories which are placed in the repo list.
2022-11-14 14:50:52 +01:00
Koen Vlaswinkel
82a7fc5070 Move filter and sort functions to pure
These functions can be re-used by the sorting and filtering code for
exporting results and copying repository lists, so these should not be
in the view directory.

The tests have been kept in the same place for now, but they should be
moved to the pure tests directory once those have been switched to Jest.
I figured it wasn't worth it to convert these to Mocha, and convert them
back to Jest in a week.
2022-11-14 14:10:40 +01:00
Shati Patel
3fd9fd449c Add support for local dbs in config (#1751) 2022-11-14 12:45:18 +00:00
Koen Vlaswinkel
d6ae5b1359 Add exporting of results to view
This will allow a user to export the results from the view by clicking
the "Export results" button.
2022-11-14 12:03:30 +01:00
Koen Vlaswinkel
922d2122a7 Merge remote-tracking branch 'origin/main' into koesie10/export-results 2022-11-14 12:01:23 +01:00
Koen Vlaswinkel
f48567101a Merge pull request #1757 from github/koesie10/copy-repo-list-from-view
Add copying of repository list to view
2022-11-14 11:55:59 +01:00
Koen Vlaswinkel
96c24ccd5b Add copying of repository list to view
This will allow a user to use the "Copy repository list" button in the
view to copy a repository list.
2022-11-14 11:26:04 +01:00
Koen Vlaswinkel
6168145116 Merge pull request #1753 from github/koesie10/copy-repo-list
Add copying of repository list for variant analyses
2022-11-14 09:48:43 +01:00
Charis Kyriakou
a05313733f Add event emitter abstraction (#1754) 2022-11-11 16:28:20 +00:00
Koen Vlaswinkel
4caa1e2d82 Merge remote-tracking branch 'origin/main' into koesie10/export-results 2022-11-11 16:49:35 +01:00
Koen Vlaswinkel
e90f3d0b46 Keep track of checkbox state in view
This will add a new `useState` call on the top-level to keep track of
the checkbox state. It will allow all downloaded repositories to be
selected. This will allow us to make the copy repository list and export
results button dependent on the selected repositories.
2022-11-11 16:33:20 +01:00
Shati Patel
7e8fa5c5fe Bump CLI version to 2.11.3 for integration tests (#1752) 2022-11-11 14:35:37 +00:00
Koen Vlaswinkel
baae8d5710 Merge remote-tracking branch 'origin/main' into koesie10/copy-repo-list 2022-11-11 15:23:42 +01:00
Koen Vlaswinkel
0d15768093 Merge pull request #1749 from github/koesie10/split-remote-query-flows
Split flows for running remote queries
2022-11-11 15:15:26 +01:00
Koen Vlaswinkel
022d5c564f Add copying of repository list for variant analyses
This adds the ability to copy the repository list for variant analyses
from the context menu in the query history.
2022-11-11 15:14:28 +01:00
Koen Vlaswinkel
47ee82fc03 Merge remote-tracking branch 'origin/main' into koesie10/split-remote-query-flows 2022-11-11 15:00:02 +01:00
Koen Vlaswinkel
4eb8c55045 Merge pull request #1748 from github/koesie10/reduce-nesting-remote-query
Reduce nesting in `runRemoteQuery`
2022-11-11 14:46:05 +01:00
Koen Vlaswinkel
175f8eadc5 Merge remote-tracking branch 'origin/main' into koesie10/split-remote-query-flows 2022-11-11 14:37:51 +01:00
Koen Vlaswinkel
65641e3776 Move remote queries specific code out of run-remote-query
This moves some of the code that is specific to remote queries out of
the `run-remote-query.ts` file and instead places it in separate files
that only deal with remote queries, rather than also dealing with
variant analyses.
2022-11-11 14:35:02 +01:00
Koen Vlaswinkel
7649f20d4d Remove unused return values for run methods
The `runRemoteQuery` and `runVariantAnalysis` were returning values
which were only used in tests. This removes them and replaces the tests
by expectations on the commands called by the methods.
2022-11-11 14:28:09 +01:00
Koen Vlaswinkel
09120a2328 Declare type explicitly instead of inferring it 2022-11-11 14:12:55 +01:00
Koen Vlaswinkel
42c642df25 Add exporting of variant analysis results
This adds the export of variant analysis results. This is unfortunately
a larger change than I would have liked because there are many
differences in the types and I think further unification of the code
might make it less clear and would actually make this code harder to
read when the remote queries code is removed.

In general, the idea for the export of a variant analysis follows the
same process as the export of remote queries, with the difference being
that variant analysis results are loaded on-the-fly from dis, rather
than only loading from memory. This means it should use less memory, but
it also means that the export is slower.
2022-11-11 14:01:34 +01:00
Koen Vlaswinkel
62453d12c6 Split commands for exporting results
There was only a single command for exporting variant analysis results,
which would either export the selected result or a given result. From
the query history, the command was always calculating the exported
result, while we can just give a query ID to export.

This will create two separate commands for exporting results, one for
exporting the selected results (user-visible) and one for exporting a
specific remote query result. This will make it easier to add support
for exporting variant analysis results.

I'm not sure if there will be impact from renaming the command. I expect
the only impact to be that the command history might not show the
command in the correct place (i.e. it disappears from recently used
commands), but please check if that is the only impact.
2022-11-11 14:01:34 +01:00
Koen Vlaswinkel
8b360f3004 Merge pull request #1747 from github/koesie10/remove-dry-run-parameter
Remove `dryRun` parameter from `runRemoteQuery`
2022-11-11 11:38:25 +01:00
Koen Vlaswinkel
9894eda60c Merge pull request #1746 from github/koesie10/remote-queries-tests-tar
Change remote queries to test against submitted data
2022-11-11 11:36:06 +01:00
Shati Patel
1241ce985a Fix date format to 'en-US' (#1741) 2022-11-11 10:35:24 +00:00
Koen Vlaswinkel
a4402cb68b Add tar-stream as dev dependency 2022-11-11 11:24:22 +01:00
Koen Vlaswinkel
21eabc2a93 Merge pull request #1745 from github/koesie10/extract-remote-query-preparation
Extract remote query preparation to separate method
2022-11-11 11:23:07 +01:00
Koen Vlaswinkel
c089135626 Change arrow function to exported function 2022-11-11 11:23:03 +01:00
Koen Vlaswinkel
00905a9be3 Add return type to prepareRemoteQueryRun 2022-11-11 11:07:11 +01:00
Charis Kyriakou
93b6abeeb4 Rename iconPath to icon (#1742) 2022-11-11 10:56:31 +01:00
Koen Vlaswinkel
b1a5f14471 Split flows for running remote queries
This removes the `runRemoteQuery` method and instead moves all logic
specific to remote queries/variant analysis to the remote queries
manager and variant analysis manager respectively. This will make it
easier to completely remove the remote queries manager in the future.
2022-11-11 10:48:43 +01:00
Koen Vlaswinkel
082fea8587 Reduce nesting in runRemoteQuery
Now that we do not have a dry run mode, we can create and clean up the
temporary directory in the same function. This allows us to remove the
complete try..finally block inside `runRemoteQuery` and move it to a
much more local spot.
2022-11-11 10:45:14 +01:00
Koen Vlaswinkel
cca13fde48 Remove dryRun parameter from runRemoteQuery 2022-11-11 10:43:11 +01:00
Koen Vlaswinkel
2dad33f2ba Change remote queries to test against submitted data
The remote queries tests were testing the data on the filesystem, rather
than the data submitted to the server. This required using a `dryRun`
parameter to prevent deleting the temporary directory, while we can
actually just test against the submitted data.

This will create an in-memory filesystem of the submitted query pack by
un-tar-gz'ing the query pack into memory and using that to test the
existence of certain files.
2022-11-11 10:40:04 +01:00
Koen Vlaswinkel
5905cf8811 Extract remote query preparation to separate method
There is some common logic between remote queries and the variant
analysis flows which deals with parsing the query and asking the user
how to run the query. This extracts that part of the logic to a separate
method such that the only logic left in the actual `runRemoteQuery`
method is related to submitting the query.
2022-11-11 10:38:41 +01:00
Koen Vlaswinkel
616a2693ef Merge pull request #1726 from github/koesie10/new-variant-analysis-statuses
Use new variant analysis statuses
2022-11-11 10:25:10 +01:00
Koen Vlaswinkel
a80ace5334 Add alert message for failed Actions workflow
This also changes the failure reason alert component to remove the logs
button since it's not used by any failure reason. Instead, a link is
added into the message for a failed Actions workflow using which the
Actions workflow run may be opened.
2022-11-10 15:07:58 +01:00
Koen Vlaswinkel
b333a16570 Merge remote-tracking branch 'origin/main' into koesie10/new-variant-analysis-statuses 2022-11-10 14:58:37 +01:00
Koen Vlaswinkel
03e48b7ae0 Merge pull request #1728 from github/koesie10/failed-view
Handle failed status in variant analysis view
2022-11-10 11:22:24 +01:00
Charis Kyriakou
71297e360b Remove unused method argument (#1740) 2022-11-10 08:18:04 +00:00
Charis Kyriakou
47af1f8c05 Add some integration tests for the db panel (#1739) 2022-11-09 15:37:47 +00:00
Koen Vlaswinkel
a2398990bc Merge pull request #1729 from github/koesie10/failed-download-view
Handle failed result download in view
2022-11-09 12:43:51 +01:00
Charis Kyriakou
8183c3108c Surface db config errors (#1730) 2022-11-09 11:37:22 +00:00
Koen Vlaswinkel
5390a11dc7 Merge pull request #1738 from github/koesie10/update-mrva-messages
Update messages for variant analysis warnings
2022-11-09 11:54:05 +01:00
Koen Vlaswinkel
bac39630d6 Update messages for variant analysis warnings
This uses the correct title and message for all warnings currently
supported.
2022-11-09 10:16:18 +01:00
Koen Vlaswinkel
c2baa4557f Merge remote-tracking branch 'origin/main' into koesie10/failed-view 2022-11-09 10:05:14 +01:00
Andrew Eisenberg
96d99a71a7 Merge pull request #1737 from github/version/bump-to-v1.7.6
Bump version to v1.7.6
2022-11-08 20:08:54 -08:00
aeisenberg
632a675b13 Bump version to v1.7.6 2022-11-09 02:04:18 +00:00
Andrew Eisenberg
acc37fb4de Merge pull request #1736 from github/v1.7.5
Some checks failed
Release / Release (push) Has been cancelled
Release / Publish to VS Code Marketplace (push) Has been cancelled
Release / Publish to Open VSX Registry (push) Has been cancelled
v1.7.5
2022-11-08 18:00:33 -08:00
Andrew Eisenberg
05951b6c74 v1.7.5 2022-11-08 17:24:55 -08:00
Andrew Eisenberg
473569df5b Merge pull request #1735 from adityasharad/ast/synthetic-query-pack
Contextual queries: Support running when the library pack is in the package cache
2022-11-08 17:15:52 -08:00
Aditya Sharad
97c985b716 Contextual queries: Record whether a temp lock file was created 2022-11-08 16:37:30 -08:00
Aditya Sharad
84b9d9c994 Contextual queries: Factor common code for resolving and running
Shared by the AST viewer, jump to def, and find references
contextual queries.

This allows contextual queries to have their dependencies
resolved and be run whether the library pack is in the
workspace or in the package cache.
2022-11-08 16:06:27 -08:00
Aditya Sharad
639487be0a AST viewer: Address review comments
Clear the CLI server's pack cache before installing packs,
to avoid race conditions where the new lock file is not
detected during query running.

Adjust some helper methods.
2022-11-08 15:33:04 -08:00
Aditya Sharad
f9a19b6a4a AST viewer: Support running when the library pack is in the package cache
If the library pack containing the AST query does not have
a lock file, it is likely to be in the package cache, not
a checkout of the CodeQL repo.
In this case, use `codeql pack resolve-dependencies`
to create a temporary lock file, and `codeql pack install`
to install the dependencies of this library pack.

This allows the CLI to resolve the library path and
dependencies for the AST query before running it.
2022-11-08 14:16:18 -08:00
Koen Vlaswinkel
ab18a74b73 Update error message for no repos queried 2022-11-08 16:56:37 +01:00
Andrew Eisenberg
ab0e8a0ff2 Merge pull request #1722 from github/aeisenberg/bump-timeouts-in-test 2022-11-08 07:35:43 -08:00
Nora
03f58593ce Merge pull request #1717 from github/norascheuch/add-config-validation
Add config validation
2022-11-08 16:11:15 +01:00
Koen Vlaswinkel
a24e7c6172 Merge pull request #1727 from github/koesie10/canceled-view
Handle canceled status in variant analysis view
2022-11-08 14:10:43 +01:00
Koen Vlaswinkel
bf52c7105d Handle failed result download in view
We were not yet showing any errors when a result download had failed.
This adds a warning icon to any repositories for which the download has
failed and allow expanding the item to show an alert.
2022-11-08 13:40:22 +01:00
Koen Vlaswinkel
8e6cfa8551 Handle failed status in variant analysis view
This will show a message for the failure reason in the variant analysis
view when the variant analysis has failed. There don't seem to be
designs for these alerts, but we will need to do a full design review of
the view at some point anyway, so I don't think the exact text is
important.
2022-11-08 13:22:53 +01:00
Koen Vlaswinkel
d711d80cfb Use new variant analysis statuses
The `status` on the variant analysis has changed to have `succeeded`,
`failed` and `cancelled` instead of `completed`.
2022-11-08 13:13:55 +01:00
Koen Vlaswinkel
e164bbb79e Handle canceled status in variant analysis view
The variant analysis view was missing an alert when the variant
analysis was canceled. This adds it, and also adds a story for checking
what the view of a canceled variant analysis looks like.
2022-11-08 13:11:50 +01:00
Koen Vlaswinkel
bbdad0afc4 Merge pull request #1720 from github/koesie10/stop-query-button
Add cancelling of variant analysis to view
2022-11-08 11:05:48 +01:00
Koen Vlaswinkel
89359e30bf Use ID instead of query name in error message
Co-authored-by: Robert <robertbrignull@github.com>
2022-11-08 10:36:42 +01:00
Koen Vlaswinkel
4c0c93d120 Merge pull request #1721 from github/koesie10/run-all-unit-tests
Run all unit tests on Linux/macOS
2022-11-08 09:53:40 +01:00
Andrew Eisenberg
94b411e3b2 Merge pull request #1724 from github/aeisenberg/fix-actions-warnings
Uses newer actions versions and avoids `set-output`
2022-11-07 20:02:38 -08:00
Andrew Eisenberg
2baf99b38a Merge pull request #1723 from github/dependabot/npm_and_yarn/extensions/ql-vscode/loader-utils-1.4.1
Bump loader-utils from 1.4.0 to 1.4.1 in /extensions/ql-vscode
2022-11-07 19:53:55 -08:00
Andrew Eisenberg
082d4b8c78 Uses newer actions versions and avoids set-output
`set-output` is deprecated:
https://github.blog/changelog/2022-10-11-github-actions-deprecating-save-state-and-set-output-commands/
2022-11-07 16:20:29 -08:00
dependabot[bot]
66cb7dc92f Bump loader-utils from 1.4.0 to 1.4.1 in /extensions/ql-vscode
Bumps [loader-utils](https://github.com/webpack/loader-utils) from 1.4.0 to 1.4.1.
- [Release notes](https://github.com/webpack/loader-utils/releases)
- [Changelog](https://github.com/webpack/loader-utils/blob/v1.4.1/CHANGELOG.md)
- [Commits](https://github.com/webpack/loader-utils/compare/v1.4.0...v1.4.1)

---
updated-dependencies:
- dependency-name: loader-utils
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-11-07 23:29:11 +00:00
Andrew Eisenberg
62ed83b655 Add CHANGELOG entry 2022-11-07 15:23:24 -08:00
Andrew Eisenberg
767a5cf809 Merge branch 'main' into aeisenberg/assert-version 2022-11-07 15:21:57 -08:00
Andrew Eisenberg
9519efe525 Merge pull request #1679 from github/version/bump-to-v1.7.5
Bump version to v1.7.5
2022-11-07 15:21:25 -08:00
Andrew Eisenberg
71daf0b8c6 Bumps timeouts in tests
This tests occasionally times out during setup. This change attempts to
avoid this problem by increasing the max length of the test.
2022-11-07 15:19:58 -08:00
Andrew Eisenberg
67d2fe5332 Add a way to disable version checks
Version checks are re-enabled whenever the version of vscode changes.
This is because the user would have needed to manually update their
vscode version in order to get this new version. And another failing
version check would mean there is a newer version that needs to be
downloaded.
2022-11-07 14:25:45 -08:00
Aditya Sharad
b526ff4d3a Contextual queries: update doc comments 2022-11-07 14:02:50 -08:00
Charis Kyriakou
03bc63c689 Read schema from file 2022-11-07 16:07:10 +00:00
Charis Kyriakou
916b840407 Use new function in db config store 2022-11-07 16:57:00 +01:00
Nora
4858be6ac8 Extract validation to its own file 2022-11-07 16:56:48 +01:00
Nora
daec8b691d Extract validation to its own file 2022-11-07 16:56:48 +01:00
Nora
3df94b92cd Add basic public config validation 2022-11-07 16:56:23 +01:00
Koen Vlaswinkel
5f1431ffc2 Run all unit tests on Linux/macOS
It seems like the expansion of the test files pattern is different
between Windows and Linux/macOS. This fixes it by allowing Mocha to
expand the glob pattern rather than the shell which should fix the
inconsistency.
2022-11-07 16:41:52 +01:00
Shati Patel
3bb10d8c6e Add button to open database config from the new databases UI (#1719) 2022-11-07 13:24:15 +00:00
Charis Kyriakou
3f001c981d Create remote db tree view items based on the user's db config (#1714) 2022-11-07 11:26:33 +00:00
Koen Vlaswinkel
09bae13732 Add cancelling of variant analysis to view
This implements the "Stop query" button on the view. It moves some of
the logic of actually cancelling the variant analysis to the manager
instead of being in the query history to allow better re-use of the
code.
2022-11-07 11:21:24 +01:00
Koen Vlaswinkel
93054e14a2 Merge pull request #1683 from github/koesie10/fix-duplicate-downloads
Fix duplicate variant analysis results downloads
2022-11-07 10:37:51 +01:00
Koen Vlaswinkel
a661daaa37 Merge remote-tracking branch 'origin/main' into koesie10/fix-duplicate-downloads 2022-11-07 09:43:31 +01:00
Koen Vlaswinkel
b062f61544 Merge pull request #1716 from github/koesie10/use-shared-types-in-variant-analysis
Remove `gh-api` usage from variant analysis code
2022-11-04 17:07:54 +01:00
Elena Tanasoiu
1fdb1e23f4 Merge pull request #1715 from github/elena/cancel-variant-analysis
Implement query history "cancel" option
2022-11-04 15:19:20 +00:00
Elena Tanasoiu
145f0bf1f2 Be slightly less verbose when mocking a variant analysis 2022-11-04 14:13:18 +00:00
Elena Tanasoiu
2ca15f3665 Make it possible to cancel a variant analysis from Query History
This also adds tests for cancelling a local query and a remote query.

NB: We only cancel queries that are in progress, so the tests check
the behaviour both for in progress and not in progress items.
2022-11-04 14:13:17 +00:00
Elena Tanasoiu
20b127c693 Make it possible to cancel a mock local query
We're going to add tests to check that we cancel a remote query.

We'll need a `.cancel` method to be present on our mock local queries for this.
2022-11-04 14:13:17 +00:00
Elena Tanasoiu
620cc39e63 Introduce createVariantAnalysis method in actions API
This will allow us to make an API call to cancel a variant analysis.
2022-11-04 14:13:17 +00:00
Koen Vlaswinkel
8345284327 Merge pull request #1712 from github/koesie10/filter-sort-fixes
Add missing margin to repositories sort/filter row
2022-11-04 14:54:12 +01:00
Koen Vlaswinkel
ae3c0888e8 Remove gh-api usage from variant analysis code
This removes all usages of the `gh-api` types from the variant analysis
code by replacing it by the same types defined in `shared`.

This is a breaking change for the query history since the files
serialized to disk now also change. However, since this is still behind
a feature flag the change should be safe to make now.
2022-11-04 14:44:37 +01:00
Nora
4d00c4abfc Merge pull request #1713 from github/charis-nora/introduce-error-view-in-db-panel
Introduce error in new db panel
2022-11-04 13:41:27 +01:00
Nora
99e523f645 Update extensions/ql-vscode/src/databases/ui/db-tree-data-provider.ts
Co-authored-by: Charis Kyriakou <charisk@users.noreply.github.com>
2022-11-04 13:23:29 +01:00
Nora
6e19b5200c Update extensions/ql-vscode/src/databases/ui/db-tree-data-provider.ts
Co-authored-by: Charis Kyriakou <charisk@users.noreply.github.com>
2022-11-04 13:23:23 +01:00
Nora
45bb643a8a Update extensions/ql-vscode/src/databases/ui/db-tree-view-item.ts
Co-authored-by: Charis Kyriakou <charisk@users.noreply.github.com>
2022-11-04 13:23:17 +01:00
Koen Vlaswinkel
c6548cbe41 Add missing margin to repositories sort/filter row 2022-11-04 13:08:43 +01:00
Nora
3cc7d3ce8a Add sample error
Co-authored-by: Charis Kyriakou <charisk@users.noreply.github.com>
2022-11-04 13:06:31 +01:00
Koen Vlaswinkel
3dbd071570 Merge pull request #1711 from github/koesie10/sort-repositories
Add sorting to variant analysis repositories
2022-11-04 12:27:43 +01:00
Nora
fe90f38ad4 Merge pull request #1709 from github/charis-nora/validate-db-config-file
Validate workspace-databases.json
2022-11-04 12:20:20 +01:00
Koen Vlaswinkel
588351b101 Merge remote-tracking branch 'origin/main' into koesie10/sort-repositories 2022-11-04 11:30:24 +01:00
Koen Vlaswinkel
c0ba9c5f6e Merge pull request #1705 from github/koesie10/filter-repositories-by-name
Add repository filter by full name
2022-11-04 11:28:58 +01:00
Koen Vlaswinkel
18e1cfa8aa Merge remote-tracking branch 'origin/main' into koesie10/fix-duplicate-downloads 2022-11-04 11:27:50 +01:00
Koen Vlaswinkel
49f97e1bcc Add tests for repo states 2022-11-04 11:24:52 +01:00
Nora
f6b50bb3d6 Use contributes jsonValidation instead 2022-11-04 11:24:46 +01:00
Nora
544ff89bf8 Merge comments 2022-11-04 10:20:47 +01:00
Elena Tanasoiu
e7a0c7e652 Merge pull request #1710 from github/elena/sort-sorting
Make sorting tests work with remote queries & variant analysis history items
2022-11-03 16:57:14 +00:00
Koen Vlaswinkel
5ff7b8a1c1 Add sorting to variant analysis repositories
This adds sorting to the variant analysis repositories on the outcome
panels. The sort state is shared between all panels, so unlike the
design this doesn't disable the sort when you are on e.g. the no access
panel.
2022-11-03 16:18:22 +01:00
Elena Tanasoiu
f0aa0a5ef6 Get rid of old item factory method
We don't use it anymore.
2022-11-03 14:34:27 +00:00
Elena Tanasoiu
52ee4563dc Make sorting tests work with remote queries and variant analysis history items
We can now, finally, test sorting works, with REAL objects.
2022-11-03 14:34:27 +00:00
Elena Tanasoiu
a7431bdc1d Use new factory methods in query history tests 2022-11-03 14:18:52 +00:00
Elena Tanasoiu
a1d3c612ad Use new createMockLocalQueryInfo method in query-history-info tests 2022-11-03 14:16:43 +00:00
Elena Tanasoiu
2fe0c7925d Adapt tests for query history labels to use new LocalQueryInfo factory method
We're making a number of changes:

1. We're changing the userSpecifiedLabel value to be
`user-specified-name` instead of `xxx`

2. For local queries, we're changing `in progress` to `finished in 0
seconds` when the query has results. The previous version was
contradictory because any query still in progress wouldn't have results.

3. Similarly, for remote queries, we're changing `in progress` to
`completed` when the query has results. Here we actually set a `status`
property which means `in progress` becomes `completed`.
2022-11-03 14:08:16 +00:00
Elena Tanasoiu
028cc79bb9 Combine createMockLocalQueryInfo and createMockLocalQuery
One factory method to rule them all!

There were a number of problems with these methods:

1. We were previously using two different factory methods to generate
a fake local queries. Ideally we'd just have one.

2. We weren't really creating a real LocalQueryInfo object, which
blocked us [1] from being able to correctly understand which fields we
need in our tests and how they interact together.

3. We stubbed a bunch of methods on the original object to get our tests
to work. We can now use a real object with all the trimmings.

[1]: https://github.com/github/vscode-codeql/pull/1697#discussion_r1011990685
2022-11-03 14:02:14 +00:00
Elena Tanasoiu
0639c66bb0 Change createMockRemoteQueryHistoryItem to receive resultCount and startTime
Again, we'll need these for sorting.

We also want to be able to set/unset a userSpecifiedLabel. Since this factory
method is used in `history-item-label-provider.test.ts`, we have tests there
that count on this custom label being defined/undefined.
2022-11-03 13:58:09 +00:00
Nora
e6d0646786 Fix rebase conflict changes 2022-11-03 14:56:33 +01:00
Nora
fcc814c0c9 Call db config and initialize with extension path 2022-11-03 14:56:33 +01:00
Nora
ea0887622e Copy schema to extension files 2022-11-03 14:56:33 +01:00
Nora
d4fb0fa177 Add workspace databases schema 2022-11-03 14:56:33 +01:00
Elena Tanasoiu
44aeaf7d32 Allow createMockVariantAnalysisHistoryItem to receive named params
Similarly, we want to provide params selectively to test sorting.

We're also setting some defaults to play nicely with our current
tests.
2022-11-03 13:56:09 +00:00
Elena Tanasoiu
dd55b7e03e Change createMockVariantAnalysis calls to use named params 2022-11-03 13:55:20 +00:00
Elena Tanasoiu
304c9fb12d Change createMockVariantAnalysis to receive named params
So that we can set them selectively.

For example, we'd like to set executionStartTime to test sorting by date.
2022-11-03 13:55:20 +00:00
Shati Patel
4e3b201054 Rename dbconfig file to workspace-databases.json (#1707) 2022-11-03 13:38:32 +00:00
Elena Tanasoiu
4fa5355239 Merge pull request #1702 from github/elena/de-gulp
Don't run gulp for integration test commands
2022-11-03 13:02:28 +00:00
Charis Kyriakou
7cff1fb84d Add core classes and models for the new (experimental) databases panel (#1704) 2022-11-03 12:47:23 +00:00
Shati Patel
684c492a43 Set up a watcher that checks for changes to the new db config (#1699)
Co-authored-by: Nora <norascheuch@users.noreply.github.com>
Co-authored-by: Charis Kyriakou <charisk@users.noreply.github.com>
2022-11-03 12:26:30 +00:00
Elena Tanasoiu
20e7432ca0 Mention npm run build in the docs 2022-11-03 12:22:24 +00:00
Koen Vlaswinkel
26e2021551 Merge pull request #1692 from github/koesie10/add-metadata-to-repo-row
Add repository metadata to row
2022-11-03 11:55:03 +01:00
Koen Vlaswinkel
18111ff4bf Add repository filter by full name
This adds a new textbox to the outcome panels that allows filtering by
the repository full name (e.g. `github/vscode-codeql`). The filtering
uses the same logic as the existing remote queries filter, i.e. by
converting the input and the repository full name to lower case and
checking the the latter includes the former.
2022-11-03 11:12:29 +01:00
Elena Tanasoiu
75ed7d20dc Await file operations when copying test data
Both `copyNoWorkspaceData` and `copyCliIntegrationData` return
promises. Since file copy-ing is quite fast at the moment, this
hasn't been a problem, but it might become a problem in the future
if we start copying larger files.

Let's wait for the operations to finish.
2022-11-03 09:49:57 +00:00
Elena Tanasoiu
abfd1e2e83 Make it possible to have test data in cli-integration
This will now reload any files that live in `src/vscode-tests/*/data`.
2022-11-03 09:49:56 +00:00
Elena Tanasoiu
8e7fa4f1d6 Update docs to add reminder about using test data 2022-11-03 09:49:52 +00:00
Robert
afc0d4e6db Merge pull request #1697 from github/robertbrignull/open_pending_analysis
Don't show "open on github" link when we don't yet have anything to show
2022-11-02 16:03:41 +00:00
Elena Tanasoiu
aba2277053 Stop removing files during test setup
Now that we have a watch command to check when our test files
need updating, we don't need to do this step during the setup.

Co-authored-by: Andrew Eisenberg <aeisenberg@github.com>
2022-11-02 15:51:35 +00:00
Elena Tanasoiu
cf6a10ccb2 Introduce watch method for test files
Because we're no longer running `gulp` when we run our test command,
we're going to need a way to update our test files when they change.

This will watch for any changes in our test files and copy the new
version over.

Co-authored-by: Andrew Eisenberg <aeisenberg@github.com>
2022-11-02 15:50:59 +00:00
Shati Patel
7296c645b9 Add database configuration store (#1691)
This "config store" creates a `dbconfig.json` file (if it doesn't yet exist),
and reads the file to load the database panel state.

Only the database config store should be able to modify the config
— the config cannot be modified externally.
2022-11-02 15:07:23 +00:00
Elena Tanasoiu
d8a3e49d19 Update contribution docs 2022-11-02 14:09:53 +00:00
Elena Tanasoiu
b639f62cfd Don't run gulp for integration tests
We previously attempted to speed up no-workspace tests [1] but realised
we still needed to run some setup steps to get the latest files [2].

Given that we already have `npm run watch` running in the background
when we run our tests, we should be able to regenerate files on the fly.

This means we can drop `gulp` from our setup steps when running integration
tests.

While there's still a danger that you forget to run `npm run watch` in
the background, we think the massive speed up (10s -> 1s) is worth it
as we add more and more tests to this extension.

[1]: https://github.com/github/vscode-codeql/pull/1694
[2]: https://github.com/github/vscode-codeql/pull/1696
2022-11-02 14:02:19 +00:00
Koen Vlaswinkel
1487ff5e0e Merge remote-tracking branch 'origin/main' into koesie10/add-metadata-to-repo-row 2022-11-02 14:05:08 +01:00
Elena Tanasoiu
0965448c02 Merge pull request #1688 from github/elena/query-history-testing-with-different-items
Make query history tests work with remote queries & variant analyses
2022-11-02 12:47:32 +00:00
Koen Vlaswinkel
cb1233d018 Improve humanizeRelativeTime tests
The tests were expecting the wrong results, except for the case where
the time was less than a second. For less than a second ago, it makes
sense to return "this minute". For times that are 2.001 minutes ago, it
makes sense to return "2 minutes ago" rather then the previous behaviour
of "3 minutes ago".
2022-11-02 13:45:44 +01:00
Koen Vlaswinkel
3c860cfcff Merge pull request #1701 from github/koesie10/rename-not-found-repo-nwos
Rename `not_found_repo_nwos` to `not_found_repos`
2022-11-02 13:18:02 +01:00
Shati Patel
4064da640b Refactor: Move sleep function to shared module and import it (#1700) 2022-11-02 12:01:14 +00:00
Robert
30dd163265 Merge pull request #1698 from github/robertbrignull/await_monitor
Don't await the monitoring command
2022-11-02 12:00:57 +00:00
Shati Patel
bd1f6cf30e Fix indentation in exported markdown results (#1693) 2022-11-02 11:51:43 +00:00
Koen Vlaswinkel
25fec684e2 Rename not_found_repo_nwos to not_found_repos
The `not_found_repo_nwos` field doesn't actually exist (anymore?) on the
GitHub API. The correct name is `not_found_repos`, so this renames the
field on the type and in the scenarios.
2022-11-02 12:49:05 +01:00
Koen Vlaswinkel
06fd3a58a7 Add new GitHub API fields to scenario files
This uses a script to add the new `stargazers_count` and `updated_at` to
the scenario files. This is done by using the GitHub API to get the
information for each repo and then updating the scenario file.

The `updated_at` values are not completely representative since they are
the `updated_at` at time of running the script, rather than at the time
the variant analysis was run. However, this should not really matter in
practice. An alternative for scanned repositories might be getting the
creation time of the `database_commit_sha` commit.
2022-11-02 12:39:30 +01:00
Robert
f6702844b3 Don't await the monitoring command 2022-11-02 11:20:44 +00:00
Robert
aac9971e2e Differentiate pending vs in-progress states in query history 2022-11-02 11:03:02 +00:00
Robert
7835ba45a4 Pull out getIconPath and getContextValue methods 2022-11-02 11:00:11 +00:00
Elena Tanasoiu
841c66c7f8 Remove typecasting for factories
To produce valid history items and have them blow up when we need to
add new fields.
2022-11-02 10:36:46 +00:00
Robert
78f091729f Merge pull request #1696 from github/robertbrignull/integration-tests
Run test setup before integration tests
2022-11-02 10:25:22 +00:00
Robert
aff369ad7f Run test setup before integration tests 2022-11-02 10:00:21 +00:00
Elena Tanasoiu
d610701d45 Use factory method to generate variant analysis 2022-11-02 09:44:25 +00:00
Robert
36754a8a10 Merge pull request #1690 from github/robertbrignull/handle_states_monitoring
Make the monitoring command slightly simpler and handle being called on any variant analysis
2022-11-02 09:33:37 +00:00
Elena Tanasoiu
4cc423217d Merge pull request #1694 from github/robert-elena/extract-no-workspace-command
Introduce command to run no-workspace tests separately
2022-11-02 09:16:26 +00:00
Elena Tanasoiu
5a4015900f Only test removal for local queries for now
Paired with @robertbrignull on debugging why having all types of
query history items isn't playing nicely when we try to remove an item.

We've tracked down the issue it the handleRemoveHistoryItem method
not correctly setting the `current` item after a deletion.

However, it's unclear whether the test setup is to blame or this is a
real bug.

I'm going to leave the tests for `handleRemoveHistoryItem` to test just
local queries for now (as they were originally) and will come back to
this in a different PR.
2022-11-01 17:31:41 +00:00
Elena Tanasoiu
e0a0051f70 Introduce command to run no-workspace tests separately
This takes down the feedback loop from 5-10 seconds to half a second
since we're not running through the setup for minimal workspace tests.
2022-11-01 17:21:31 +00:00
Koen Vlaswinkel
be62bd3b25 Add metadata to repository row
This will add the star count and last updated fields to the repository
row. We are able to re-use some components from remote queries, but we
cannot re-use `LastUpdated` since it requires a numeric duration, while
we are dealing with an ISO8601 date.
2022-11-01 16:56:11 +01:00
Robert
ae31a17344 Merge pull request #1672 from github/robertbrignull/always_trigger_monitoring
When rehydrating, always trigger a monitoring command unless the variant analysis is fully complete
2022-11-01 13:57:02 +00:00
Robert
9359d5de24 Merge pull request #1687 from github/robertbrignull/load_query_history_later
Move loading query history to after registering commands
2022-11-01 13:56:41 +00:00
Koen Vlaswinkel
fcb1ef4fd7 Fix typings on Storybook stories
It seems like the Storybook stories were not being type-checked by CI
and got out-of-sync with the required types. This fixes the types and
also uses the factories to reduce the chance of this happening with
future changes.
2022-11-01 14:25:13 +01:00
Elena Tanasoiu
a1daa91502 Break down handleItemClicked tests per history item type
We were expecting all three types to behave the same when clicked /
double clicked.

In fact local & remote queries only allow you to open the results view
when they're complete, while variant analyses always allow you to open
the results view no matter what their status.

Let's break down these tests per history item type and test the
expected behaviour more granularly.

NB: I tried moving some of the setup in beforeEach blocks, but alas
queryHistoryManager can be undefined so rather than adding `?` to
every method call, I'm just gonna leave the setup inside the tests.

In an ideal world, we'd stop declaring `queryHistoryManager` as
`undefined`:

```
let queryHistoryManager: QueryHistoryManager | undefined;
```

Baby steps!
2022-11-01 11:39:53 +00:00
Elena Tanasoiu
a21dec7461 Fix label text
In [1] we changed our factory methods to actually use QueryStatus when
creating remote query & variant analysis history items.

Previously we were just setting the value to `in progress`...

... which made the tests for history-item-label-provider.test.ts pass...

... but that value did not reflect reality ...

What we actually need to do is introduce a method to map different
query statuses to human readable strings, e.g.

QueryStatus.InProgress becomes 'in progress'

[1]: 4b9db6a298 (diff-217b085c45cd008d938c3da4714b5782db6ad31438b27b07e969254feec8298aL28)
2022-11-01 11:39:53 +00:00
Elena Tanasoiu
8cfa82f334 Move query-history-info tests next to query-history tests
We've introduced a new `local-query-history-item.ts` factory method [1]
which includes a cancellation token. The factory will need to import the
CancellationTokenSource from `vscode`.

We already had a factory method but it didn't quite map with the setup
we needed. For example we need to call `.completeQuery` rather than
providing a dummy `completedQuery` object.

The previous factory method was used in the tests for
`query-history-info.test.ts`. Because that factory omitted the
cancellation token, we could get away with having these tests in the
`tests/pure-tests` folder.

With the addition of the second factory method, the tests for
`query-history-info` blow up because they can't find `vscode`.

Now that we need to add more fields to local query history items, it's
becoming clearer that these `query-history-info` tests should live next
to the `query-history` tests in `vscode-tests/no-workspace`.

Granted, in an ideal situation we'd only have one factory method to
generate a local query history item, but combining these two methods
is actually quite painful. So for now let's at least have the query
history tests next to each other and appease Typescript.
2022-11-01 11:36:25 +00:00
Robert
cc955c70f9 Slim down VariantAnalysisMonitorResult 2022-11-01 11:15:54 +00:00
Robert
594d879640 Try to download results even if there's a failure reason 2022-11-01 11:15:54 +00:00
Robert
5174afed00 Only fire _onVariantAnalysisChange from one place 2022-11-01 11:15:54 +00:00
Koen Vlaswinkel
5a2a9fc4bf Add repository metadata to types
This adds the new `stargazers_count` and `updated_at` fields in the
repositories to the appropriate `gh-api` and `shared` types.

To make testing easier this also moves the
`variant-analysis-processor.test.ts` to the pure tests since it doesn't
and shouldn't depend on any `vscode` APIs.
2022-11-01 12:05:50 +01:00
Koen Vlaswinkel
be2ca6c368 Merge pull request #1689 from github/koesie10/move-create-gist-to-gh-api-client
Move `createGist` to `gh-api-client`
2022-11-01 11:03:00 +01:00
Elena Tanasoiu
99af9ca47d Combine getChildren tests together
And rename original `getChildren` describe block to `sorting` since
that's what the tests are checking.
2022-11-01 09:39:52 +00:00
Elena Tanasoiu
b3c8ef115d Move getChildren tests into HistoryTreeDataProvider describe block 2022-11-01 09:39:52 +00:00
Elena Tanasoiu
661638517f Move tests for getTreeItem into getTreeItem describe block 2022-11-01 09:39:52 +00:00
Elena Tanasoiu
6d3f7e3fdb Group dangling tests into a QueryHistoryManager describe block 2022-11-01 09:39:52 +00:00
Elena Tanasoiu
8550778609 Wrap tests for removal in handleRemoveHistoryItem describe block
Instead of having them dangle around.
2022-11-01 09:39:52 +00:00
Elena Tanasoiu
f837508d86 Combine beforeEach blocks
I don't know why there are two.
2022-11-01 09:39:52 +00:00
Elena Tanasoiu
4b9db6a298 Make query history tests work with remote / variant analysis queries
We're adding both remote query history items and variant analysis history
items to the query history.

We've introduced a little method to shuffle the query history list
before we run our tests so that we don't accidentally write tests that
depend on a fixed order.

The query history now has increased test coverage for:
- handling an item being clicked
- removing and selecting the next item in query history
- handling single / multi selection
- showing the item results

While we're here we're also:
1. Adding a factory to generate variant analysis history items
2. Providing all fields for remote query history items and ordering them
according to their type definition order. At least one field (`queryId`)
was missing from our factory, which we will need to make the tests work
with remote queries.
2022-11-01 09:39:51 +00:00
Elena Tanasoiu
6289411e08 Move local query tests into their own describe block
There are a couple of tests that check whether we can correctly
compare two local queries.

These shouldn't be applied to remote queries [1] so let's just
make that a bit clearer by moving them into a local queries describe
block and using the `localHistory` array to choose items to compare
instead of the `allHistory` array.

[1]: bf1e3c10db/extensions/ql-vscode/src/query-history.ts (L1311-L1314)
2022-11-01 09:39:51 +00:00
Elena Tanasoiu
0164d1094c Move factory for creating query results into shared folder 2022-11-01 09:39:51 +00:00
Elena Tanasoiu
b0ba1e0576 Move factory for creating local query into shared folder
There's a lot of clean-up in these tests so I'm making one change per commit.

Let's move out the utility methods so we can focus on just our tests.
2022-11-01 09:39:51 +00:00
Elena Tanasoiu
718a6d6948 Allow testing for multiple types of history items
At the moment our query history tests are set up to only check
local queries.

Let's prepare the ground to introduce remote query history items
and variant analysis history items.

This will allow us to expand test coverage for these other types
of items.
2022-11-01 09:39:50 +00:00
Koen Vlaswinkel
6ccaf5302f Move createGist to gh-api-client
The `createGist` functionw was part of `gh-actions-api-client`, while it
didn't actually involve anything related to the GitHub Actions API. This
moves it to the non-Actions-specific `gh-api-client` module.

Another candidate for moving to `gh-api-client` is
`getRepositoriesMetadata`, but that one is a bit more involved since it
uses `showAndLogErrorMessage`, so depends on the `vscode` module. This
means it would not be possible to test in the "pure" tests and we would
need to move all our `gh-actions-api` tests to the integration tests. It
will not be used for variant analysis queries anymore, so I don't think
it's worth moving or refactoring to not depend on `vscode`.
2022-11-01 10:38:05 +01:00
Robert
952f033377 Merge branch 'main' into robertbrignull/always_trigger_monitoring 2022-10-31 15:42:52 +00:00
Robert
12d5255073 Fix rehydrateVariantAnalysis integration tests
The method no longer accepts a second argument
2022-10-31 15:41:28 +00:00
Koen Vlaswinkel
bf1e3c10db Merge pull request #1686 from github/koesie10/live-results-view-logs
Open Actions workflow run when clicking on "View logs"
2022-10-31 16:21:21 +01:00
Koen Vlaswinkel
453cc77c3e Merge pull request #1685 from github/koesie10/open-live-results-on-github
Add opening on GitHub of live results variant analyses
2022-10-31 16:20:56 +01:00
Robert
6dc684f2b6 Fix unit test expectated results 🤦 2022-10-31 15:14:52 +00:00
Robert
6b578c830d Move loading query history to after registering commands 2022-10-31 14:54:45 +00:00
Charis Kyriakou
76119e40fb A new experimental database panel (#1682)
* Add new config setting
* Add a new database panel behind experimental config
2022-10-31 14:53:16 +00:00
Robert
c9038f5334 Add awaits 2022-10-31 14:29:26 +00:00
Koen Vlaswinkel
45764d9bff Clean up opening variant analysis on GitHub
This removes a comment and makes the test lines shorter.
2022-10-31 15:18:17 +01:00
Koen Vlaswinkel
804632274a Open Actions workflow run when clicking on "View logs"
This will hook up the "View logs" link to make it open the variant
analysis actions workflow run. The method for creating the actions
workflow run URL has been extracted from the query history to make it
callable without a history item.
2022-10-31 14:46:32 +01:00
Koen Vlaswinkel
847cb13694 Add opening on GitHub of live results variant analyses
This implements the "Open on GitHub" context menu item for live results
variant analyses.
2022-10-31 14:46:01 +01:00
Koen Vlaswinkel
dad6467599 Add controller repo to VariantAnalysis type
This adds the `controllerRepo` field to the `VariantAnalysis` shared
type. This is technically a breaking change since the old history won't
have this field and all calls on this will fail. However, the feature
is not available so this should be fine.
2022-10-31 14:19:12 +01:00
Koen Vlaswinkel
b6410073d4 Merge pull request #1681 from github/koesie10/variant-analysis-view-title
Set variant analysis view title to query name
2022-10-31 14:14:52 +01:00
Koen Vlaswinkel
1e1c7d4a5f Merge pull request #1684 from github/koesie10/fix-eslint
Fix ESLint configuration for `scripts` directory
2022-10-31 14:05:58 +01:00
Koen Vlaswinkel
31a28e70b2 Remove unnecessary await in restoreView 2022-10-31 13:57:49 +01:00
Koen Vlaswinkel
ccf03cbcff Fix view not taking into account download status
The variant analysis view would allow expanding the results when the
repo task was completed. However, it did not take into account whether
the results were actually downloaded. This will that by usign the
download status when the repo task was succeeded and sending the repo
states to the view on load.
2022-10-31 13:51:32 +01:00
Koen Vlaswinkel
47045f23c3 Fix duplicate variant analysis results downloads
This adds a new file `repo_states.json` which tracks the download status
of all repositories of a variant analysis. We will write this file when
a download has completed and skip a repository download if the repo
state is marked as `succeeded`. This should prevent duplicate downloads.

This will still queue all repositories, even those which have already
been downloaded. However, I expect the actual cost in the download
method to be negligible since it's just an in-memory check.
2022-10-31 13:51:32 +01:00
Koen Vlaswinkel
c1f24142c9 Run ESLint on all files
This wil remove the discrepancy between the files on which ESLint is run
when `lint-staged` is used and the files that are checked using
`npm run lint` and `npm run format`.

It will now also include the `.storybook` directory which was previously
excluded from the ESLint configuration.
2022-10-31 13:22:27 +01:00
Koen Vlaswinkel
762288b57f Add tsconfig.json file for scripts directory
This will fix linting errors when `lint-staged` is run and there are
changed files in the `scripts` directory.
2022-10-31 13:21:56 +01:00
Robert
c36fa0f5d3 Merge branch 'main' into robertbrignull/always_trigger_monitoring 2022-10-31 11:53:20 +00:00
Robert
b751cee618 Check directory exists before deleting 2022-10-31 11:39:05 +00:00
Robert
b497c4fa00 make public prepareStorageDirectory private 2022-10-31 11:34:06 +00:00
Robert
b53366f277 Move isVariantAnalysisComplete implementation out of variant analysis manager 2022-10-31 11:34:03 +00:00
Koen Vlaswinkel
1cce7ae0e2 Merge pull request #1680 from github/koesie10/close-webview-on-history-item-delete
Close view when variant analyis is deleted from query history
2022-10-31 12:03:08 +01:00
Koen Vlaswinkel
9b59b65591 Set variant analysis view title to query name 2022-10-31 11:41:54 +01:00
Robert
7e59d4c736 Convert to using sinon spies 2022-10-31 10:38:44 +00:00
Koen Vlaswinkel
cdbdf62612 Make webview panel creation async
This will make the creation of a webview panel async to allow the
`getPanelConfig` method to be an async function. This will allow us to
do some work (like retrieving the variant analysis) in the
`getPanelConfig` method.
2022-10-31 11:34:58 +01:00
Elena Tanasoiu
c36ce4867e Merge pull request #1650 from github/nora-elena/integration-tests
Add tests for variant analysis history item
2022-10-31 10:33:59 +00:00
Robert
7748f82c96 Stop checking result count and rename repoScanHasResults 2022-10-31 10:24:34 +00:00
Koen Vlaswinkel
5b217220fa Close view when variant analyis is deleted from query history
This will close the variant analysis view when the corresponding variant
analysis history item is deleted from the query history. This required
some extra code to handle `dispose` being called on the view to ensure
this actually disposes the panel, but we can now call `dispose()` on the
view to close it.
2022-10-31 11:18:00 +01:00
Elena Tanasoiu
7685d7651e Don't test rehydration twice 2022-10-31 10:06:27 +00:00
Koen Vlaswinkel
25a9ee1606 Merge pull request #1665 from github/koesie10/msw-integration-test
Add some basic integration tests for MRVA
2022-10-31 09:22:37 +01:00
aeisenberg
12e2eb2d8b Bump version to v1.7.5 2022-10-29 20:41:07 +00:00
Andrew Eisenberg
84b48f416f Merge pull request #1678 from github/v1.7.4
Some checks failed
Release / Release (push) Has been cancelled
Release / Publish to VS Code Marketplace (push) Has been cancelled
Release / Publish to Open VSX Registry (push) Has been cancelled
v1.7.4
2022-10-29 13:36:36 -07:00
Andrew Eisenberg
606ffe41b0 v1.7.4 2022-10-29 13:17:19 -07:00
github-actions[bot]
75b4934d97 Bump version to v1.7.4 (#1676)
Co-authored-by: angelapwen <angelapwen@users.noreply.github.com>
2022-10-29 00:31:47 +00:00
Angela P Wen
77c28e0453 v1.7.3 (#1675)
Some checks failed
Release / Release (push) Has been cancelled
Release / Publish to VS Code Marketplace (push) Has been cancelled
Release / Publish to Open VSX Registry (push) Has been cancelled
2022-10-28 23:11:00 +00:00
Andrew Eisenberg
47ae6e2b41 Assert minimum version of vscode 2022-10-28 09:58:12 -07:00
Andrew Eisenberg
9f03db2f28 Fix typos 2022-10-28 09:54:33 -07:00
Koen Vlaswinkel
72b335649c Use new mock API server and rename integration test 2022-10-28 17:22:34 +02:00
Koen Vlaswinkel
2a437c0d1a Merge remote-tracking branch 'origin/main' into koesie10/msw-integration-test 2022-10-28 17:20:48 +02:00
Koen Vlaswinkel
bc10a7a38a Merge pull request #1663 from github/koesie10/gh-api-client-msw-test
Add msw tests for gh-api-client
2022-10-28 17:20:34 +02:00
Koen Vlaswinkel
562986546d Use scenario JSON files in tests
This will check that the data returned matches the data in the JSON
files, rather than checking against constants/magic values.
2022-10-28 16:36:03 +02:00
Koen Vlaswinkel
c4d9ef26a8 Use correct tsconfig.json in pure tests
This will change the pure tests Mocha setup to actually use the
`tsconfig.json` located in the `test` directory. Before, it was using
the root-level `tsconfig.json`. To ensure we are still using mostly the
same settings, this will extend the `test/tsconfig.json` from the
root-level `tsconfig.json`.
2022-10-28 16:34:36 +02:00
Koen Vlaswinkel
a9e49f2d72 Split mock GitHub API server into VSCode and non-VSCode
This splits the mock GitHub API server class into two parts: one for the
interactive, VSCode parts and one for the non-VSCode parts. This allows
us to use the non-VSCode part in tests.
2022-10-28 14:59:18 +02:00
Charis Kyriakou
b15626853b Add script to help with scenario recording (#1671) 2022-10-28 09:22:11 +00:00
Charis Kyriakou
f04c34629e Set in memory variant analysis on submission (#1673) 2022-10-28 10:07:14 +01:00
Charis Kyriakou
a217b53bf3 Add some more mock scenarios (#1667) 2022-10-28 09:44:14 +01:00
Nora
626592ee79 Fix tests 2022-10-28 10:22:29 +02:00
Elena Tanasoiu
4826a7e91f Merge pull request #1658 from github/elena/add-tests-for-removal
Add test for `removeVariantAnalysis`
2022-10-28 09:11:17 +01:00
Koen Vlaswinkel
d12cdbe679 Merge pull request #1664 from github/koesie10/upgrade-vscode-test
Upgrade from vscode-test to @vscode/test-electron
2022-10-28 09:41:46 +02:00
Robert
599a9ed5d9 When rehydrating, always trigger a monitoring command if variant analysis is not complete 2022-10-27 17:05:32 +01:00
Robert
caeaba2f2f Make isVariantAnalysisRepoDownloaded public 2022-10-27 17:04:50 +01:00
Robert
51589e953e Move test fixtures earlier in file 2022-10-27 17:02:36 +01:00
Robert
c4b1134903 Rename variantAnalysis to be more specific 2022-10-27 16:58:43 +01:00
Koen Vlaswinkel
1ec3d044cf Merge pull request #1668 from github/koesie10/dangling-foreach-promises
Fix dangling promises in `forEach` calls
2022-10-27 15:17:28 +02:00
Charis Kyriakou
dbb549d5fb Add mock GitHub API server docs (#1661) 2022-10-27 12:08:08 +00:00
Koen Vlaswinkel
4c469e7386 Fix dangling promises in forEach calls
This will ensure that we are awaiting all calls by using `Promise.all`.
2022-10-27 14:06:48 +02:00
Shati Patel
2ccdb9e577 Correctly rehydrate variant analyses (#1666) 2022-10-27 12:30:36 +01:00
Koen Vlaswinkel
00145bbfd4 Add some basic integration tests for MRVA
This adds some basic integration tests for MRVA using the GitHub mock
API server. It only does basic assertions and still needs to stub some
things because it is quite hard to properly test things since VSCode
does not expose an API to e.g. answer quick pick pop-ups.

I'm not sure how useful these integration tests will actually be in
practice, but they do at least ensure that we are able to successfully
submit a variant analysis.
2022-10-27 12:24:22 +02:00
Koen Vlaswinkel
123219aace Upgrade from vscode-test to @vscode/test-electron 2022-10-27 10:51:30 +02:00
Koen Vlaswinkel
ecdc485e79 Add msw tests for gh-api-client
This adds some really simple tests for the `gh-api-client` file to
ensure that we can use msw mocks in pure tests.
2022-10-27 10:29:23 +02:00
Charis Kyriakou
3812e3dcb0 Add some mock scenarios (#1660) 2022-10-27 09:23:26 +01:00
Elena Tanasoiu
eb09a0db8a Use getter method 2022-10-26 16:55:53 +01:00
Elena Tanasoiu
ad7a04e385 Update tests to account for item removal and showView
We've merged https://github.com/github/vscode-codeql/pull/1656
which actually implements item removal. We'll need to change our
tests to account for this.

We've also merged https://github.com/github/vscode-codeql/pull/1654
which implements opening the view when we click on a variant analysis
history item. So we've changed our tests to take into account that
there's now a `showView` method being called.
2022-10-26 16:52:10 +01:00
Charis Kyriakou
158bebd03f Remove incorrect and unnecessary field from data model (#1659) 2022-10-26 15:46:42 +00:00
Nora
c4b4cee057 Adjust handling of historyItem 2022-10-26 15:59:26 +01:00
Nora
7cffb997a7 Add query text test 2022-10-26 15:52:17 +01:00
Nora
83291c5ceb Add should handle click test 2022-10-26 15:52:17 +01:00
Nora
3c870a10e2 Add test to remove 2 entries from query history 2022-10-26 15:52:17 +01:00
Elena Tanasoiu
2a722ba264 Add test for removing a variant analysis history item
Co-authored-by: Nora Scheuch <norascheuch@github.com>
2022-10-26 15:52:17 +01:00
Elena Tanasoiu
da754a23e4 Create basic test set up and first test
Co-authored-by: Nora Scheuch <norascheuch@github.com>
2022-10-26 15:52:16 +01:00
Elena Tanasoiu
4a237ba019 Add test data for variant analysis
We will need to set up some VariantAnalysisHistoryItem types in order
to use them in our tests.

We're repeating what we've done for RemoteQueryHistoryItem for now.

Separately we'll think about setting up tests that check for both
remote queries and variant analysis in the query history.

At the moment we'd like to focus on just adding some test coverage
for variant analysis history items.

Co-authored-by: Nora Scheuch <norascheuch@github.com>
2022-10-26 15:52:16 +01:00
Nora
319d8ce0f5 Combine beforeEach 2022-10-26 15:52:16 +01:00
Nora
f313648ab7 Combine afterEach calls 2022-10-26 15:52:16 +01:00
Elena Tanasoiu
1a3fecd3e8 Add test for removeVariantAnalysis 2022-10-26 15:15:12 +01:00
Elena Tanasoiu
1348de5a5f Surround tests with describe blocks
To better indicate which method they're testing.
2022-10-26 15:06:46 +01:00
Elena Tanasoiu
8521138bce Merge pull request #1657 from github/elena/decouple-manager-from-results
Decouple VariantAnalysisManager from VariantAnalysisResultsManager
2022-10-26 15:04:10 +01:00
Elena Tanasoiu
8569fa7399 Don't track results manager as a disposable object 2022-10-26 14:36:51 +01:00
Elena Tanasoiu
ee37fbff63 Merge pull request #1656 from github/nora-charis-elena/handle-item-removal-take-two
Implement `handleRemoveHistoryItem` for variant analysis history items - take two
2022-10-26 14:16:10 +01:00
Elena Tanasoiu
d49bffe98e Decouple VariantAnalysisManager from VariantAnalysisResultsManager
At the moment we create the results manager as a private property on the `VariantAnalysisManager`.

If we instead created it at the extension level and passed it to the `VariantAnalysisManager`, we would have more freedom to write unit tests for the `VariantAnalysisManager` without needing to reach into a private results manager property.
2022-10-26 13:47:52 +01:00
Nora
832211d789 Adjust comment 2022-10-26 14:33:47 +02:00
Nora
f9553d7033 Rename method 2022-10-26 14:29:50 +02:00
Elena Tanasoiu
f18f1b0ca7 Implement handleRemoveHistoryItem for variant analysis history items
We had previously added a no-op placeholder for when we attempt
to remove a variant analysis from our query history.

This adds the implementation:
- removes the item from the query history
- cleans up any existing result files attached to the variant analysis

NB: The remote queries would store all their results in a single folder.
For variant analysis, we store results per repo. The folder names are build
using a cache key and are stored in `cachedResults`. The cache key is
built from the variant analysis id and the repo name.

In order to delete the results, we've had to pass in the full variant analysis
object to the manager and call `cacheResults.delete()` for each of its scanned
repos.

Co-authored-by: Charis Kyriakou <charisk@github.com>
Co-authored-by: Nora Scheuch <norascheuch@github.com>
2022-10-26 12:21:46 +01:00
Shati Patel
50ec71893c Implement openQueryResults for variant analysis items (#1654) 2022-10-26 10:20:23 +01:00
Andrew Eisenberg
56af69e58d Merge pull request #1638 from github/aeisenberg/persist-dbs
Fix bug where dbs are lost on restart
2022-10-25 10:28:58 -07:00
Andrew Eisenberg
d209e52a0b Merge branch 'main' into aeisenberg/persist-dbs 2022-10-25 08:54:31 -07:00
Andrew Eisenberg
09b30fe5a3 Merge pull request #1568 from asgerf/asgerf/navigate-alerts
Add commands for navigation of alerts
2022-10-25 08:51:38 -07:00
Andrew Eisenberg
c6d54de748 Update extensions/ql-vscode/CHANGELOG.md
Co-authored-by: Edoardo Pirovano <6748066+edoardopirovano@users.noreply.github.com>
2022-10-25 08:42:27 -07:00
Andrew Eisenberg
a3fafc8e59 Merge pull request #1611 from github/aeisenberg/fix-flakes
Test cleanups
2022-10-25 08:26:26 -07:00
Charis Kyriakou
6a636baa21 Remove historyItemId for variant analyses (#1651) 2022-10-25 12:57:48 +00:00
Charis Kyriakou
9e92d0c4a7 Allow multiple query history items for a single variant analysis (#1653) 2022-10-25 13:47:53 +01:00
Shati Patel
78a0a4e580 Bump CLI version to 2.11.2 for integration tests 2022-10-25 11:30:04 +01:00
Koen Vlaswinkel
f0f01720f1 Merge pull request #1648 from github/koesie10/store-scenario-bodies-as-files
Store binary scenario bodies as files
2022-10-25 11:18:38 +02:00
Elena Tanasoiu
c8b0461f7f Merge pull request #1620 from github/mob/consume-update-event
QueryHistory: Consume event when variant analysis status is updated
2022-10-25 09:20:31 +01:00
Koen Vlaswinkel
00de0820fb Add proper handling of binary responses
msw doesn't seem to support binary responses because it decodes them to
a UTF-8 string. To work around that, we will do a separate fetch of the
file and save that.
2022-10-25 10:07:43 +02:00
Koen Vlaswinkel
5a76df8489 Load response bodies from files 2022-10-25 10:07:16 +02:00
Koen Vlaswinkel
9764a93900 Store ZIP bodies as files 2022-10-25 10:06:50 +02:00
Charis Kyriakou
130d8efe35 Tidy up msw handlers used for scenario replay (#1649) 2022-10-25 08:44:29 +01:00
Angela P Wen
63a5021e5e Use sarif parser for reopened results (#1457) 2022-10-24 12:31:35 -07:00
Edoardo Pirovano
e891169ca3 MRVA: Use QLX to precompile queries
Co-authored-by: Henning Makholm <hmakholm@github.com>
2022-10-24 17:33:25 +01:00
Charis Kyriakou
98284d9b2c Add loading of mock scenarios (#1641) 2022-10-24 16:27:37 +01:00
Asger F
b480f8f375 Fix incorrect merge resolution in changelog 2022-10-24 13:20:42 +02:00
Asger F
ead1fb4cd9 Merge branch 'main' into asgerf/navigate-alerts 2022-10-24 13:19:05 +02:00
Asger F
0acf9f7b66 Fix bad suggestion merge in package.json 2022-10-24 13:18:33 +02:00
Asger F
9cb4b9d372 Update extensions/ql-vscode/package.json
Co-authored-by: Andrew Eisenberg <aeisenberg@github.com>
2022-10-24 13:13:57 +02:00
Shati Patel
8a10a49f66 Merge pull request #1639 from github/shati-patel/repo-count
Implement query history label for variant analysis items
2022-10-24 11:29:09 +01:00
Koen Vlaswinkel
40d281aa3f Merge pull request #1646 from github/koesie10/fix-command-visibility
Fix scenario commands visibility
2022-10-24 12:17:42 +02:00
Elena Tanasoiu
b25cb8adbe Merge branch 'main' into mob/consume-update-event 2022-10-24 11:13:41 +01:00
Nora
88edcaf067 Merge pull request #1621 from github/norascheuch/variant-analysis-rehydrate-and-remove-event
Implement rehydration for variant analysis
2022-10-24 11:47:44 +02:00
shati-patel
8737cfde0b Add undefined case to pluralize unit tests 2022-10-24 10:44:16 +01:00
shati-patel
593ca57497 Cover more cases in buildRepoLabel unit tests 2022-10-24 10:30:56 +01:00
Koen Vlaswinkel
471ead37c0 Fix scenario commands visibility
When the mock GitHub API server setting was moved to the top-level, we
forgot the comamnds in the `package.json`. This updates the commands to
have the correct visibility.

See: https://github.com/github/vscode-codeql/pull/1643
2022-10-24 11:30:09 +02:00
shati-patel
436af066fc Add unit tests for pluralize 2022-10-24 10:17:03 +01:00
shati-patel
c85338d11a refactor: move pluralize into its own module 2022-10-24 10:08:49 +01:00
Nora
1523babcb3 Implement new filePath method suggestion
Co-authored-by: Robert <robertbrignull@github.com>
2022-10-24 10:40:03 +02:00
Nora
4ed0e0fa09 Implement find-method suggestion
Co-authored-by: Robert <robertbrignull@github.com>
2022-10-24 10:40:03 +02:00
Nora
79bb894a7d Implement rehydration for variant analysis and introduce onRemoveVariantAnalysis event 2022-10-24 10:40:03 +02:00
Charis Kyriakou
da63b99a94 Update mock GitHub API request models to support failures (#1644) 2022-10-24 08:02:36 +00:00
Koen Vlaswinkel
c325a725ea Merge pull request #1642 from github/koesie10/validate-mocks
Add linter for scenario files
2022-10-24 10:01:44 +02:00
Charis Kyriakou
6c8c15155b Restructure configuration around mock GitHub API (#1643) 2022-10-24 08:44:32 +01:00
shati-patel
3cd025f879 Add a unit test for building repo labels 2022-10-21 17:01:46 +01:00
Charis Kyriakou
6b1fce9cd5 Revert to previously used version of husky (#1640) 2022-10-21 14:51:56 +00:00
Koen Vlaswinkel
104055e703 Add linter for scenario files
This adds a linter for JSON scenario files which will validate the JSON
files in the scenarios directory against the TypeScript types. It will
convert the TypeScript types to JSON schema to simplify this process.

Unfortunately, this will not currently allow adding scenarios with
failing requests since the types do not allow this. Rather than removing
this validation, we should fix the types. This can be done in a follow-up
PR.
2022-10-21 16:41:01 +02:00
shati-patel
add5417a42 Move pluralize method into "pure" helpers file (since we don't require vscode) 2022-10-21 15:04:01 +01:00
Shati Patel
31ef6aef29 Implement buildRepoLabel for variant analysis items 2022-10-21 14:18:53 +01:00
Shati Patel
bd81d3c4a7 Fix bug in pluralize helper method
Correctly handle the zero case
2022-10-21 14:17:40 +01:00
Koen Vlaswinkel
4e5abee2ea Merge pull request #1634 from github/koesie10/record-scenario
Add recording of mock scenarios
2022-10-21 14:52:29 +02:00
Asger F
bdf7208476 Mention keyboard navigation in README 2022-10-21 14:26:54 +02:00
Asger F
e1a56dd91d Update a new more nullish checks 2022-10-21 14:26:37 +02:00
Asger F
d4a58a64ee Consistently check for undefined rather than nullish 2022-10-21 14:17:06 +02:00
Koen Vlaswinkel
71b1b49502 Fix incorrect development scenarios path
Co-authored-by: Charis Kyriakou <charisk@users.noreply.github.com>
2022-10-21 14:12:35 +02:00
Asger F
65777b5e60 Use null-aware accessors in getResult 2022-10-21 13:56:30 +02:00
Asger F
53bb9d797b Title-case command names, like other commands 2022-10-21 13:55:07 +02:00
Asger F
cbf15e6d02 Update extensions/ql-vscode/src/view/results/alert-table.tsx
Co-authored-by: Andrew Eisenberg <aeisenberg@github.com>
2022-10-21 13:54:45 +02:00
Asger F
ecc07a50be Update extensions/ql-vscode/CHANGELOG.md
Co-authored-by: Andrew Eisenberg <aeisenberg@github.com>
2022-10-21 13:54:33 +02:00
Koen Vlaswinkel
7288712e47 Unscope mock commands
This reverts commit 57ba12db8b.
2022-10-21 13:12:38 +02:00
Koen Vlaswinkel
74ae5a7fdc Start mock server on startup
The mock server wasn't being started when the extension was activated
when the config setting was already set.
2022-10-21 11:39:29 +02:00
Koen Vlaswinkel
302722b982 Fix isRecording variable name 2022-10-21 11:32:13 +02:00
Koen Vlaswinkel
22f28fa6ff Hard-code scenarios path in development
The scenarios path can still be overridden by setting the config setting.
2022-10-21 11:18:55 +02:00
Koen Vlaswinkel
9ba06ef562 Merge pull request #1627 from github/koesie10/storybook-vscode-theme-addon
Create Storybook add-on for switching VSCode themes
2022-10-21 10:53:14 +02:00
Koen Vlaswinkel
cff56b7e7b Change title of cancelRecording command 2022-10-21 10:45:57 +02:00
Koen Vlaswinkel
ad41a043a7 Add comment for setContext call 2022-10-21 10:45:18 +02:00
Koen Vlaswinkel
66c6bf5e86 Rename some variables 2022-10-21 10:43:32 +02:00
Koen Vlaswinkel
463633334c Rename recordScenario to startRecording 2022-10-21 10:41:38 +02:00
Koen Vlaswinkel
9278422406 Ensure save and cancel commands are only visible with feature flag 2022-10-21 10:41:03 +02:00
Koen Vlaswinkel
57ba12db8b Scope mock commands
The command lint expects all command palette commands to have a common
prefix which these violated. So, I've moved them to being a scoped
command so we can have different lints.
2022-10-21 10:39:47 +02:00
Andrew Eisenberg
ccdffc296c Merge branch 'main' into aeisenberg/fix-flakes 2022-10-20 17:07:39 -07:00
Andrew Eisenberg
24e9fbe8ca Update changelog 2022-10-20 16:25:14 -07:00
Andrew Eisenberg
6e33b3c032 Fix bug where dbs are lost on restart
If the workspace is restarted while databases are being loaded, this
change prevents any from being lost.

The bug was that each time a database was added when rehydrating a db
from persisted state on startup, the persisted db list
was being updated. Instead of updating the list each time we add a db,
on restart, instead update the persisted list only after all are added.

Note that we need to update the persisted list after reading it in since
the act of rehydrating a database _may_ change its persisted state.
For example, the primary language of the database may be initialized
if it was not able to be determined originally.
2022-10-20 16:20:06 -07:00
Andrew Eisenberg
a625a39999 Merge pull request #1625 from github/aeisenberg/dil-fixes
A couple of small changes around the new query server
2022-10-20 10:38:20 -07:00
Andrew Eisenberg
94ef752c0b A couple of small changes around the new query server 2022-10-20 10:21:05 -07:00
Alexander Eyers-Taylor
9957b211e0 Fix missing DIL for new query server (#1623)
* Fix missing DIL for new query server

* Fix DIL error message when QLO was not expected.

* Update extensions/ql-vscode/src/run-queries-shared.ts

Co-authored-by: Andrew Eisenberg <aeisenberg@github.com>

Co-authored-by: Andrew Eisenberg <aeisenberg@github.com>
2022-10-20 14:36:31 +01:00
Elena Tanasoiu
38f22b65ef Merge pull request #1635 from github/elenatanasoiu/update-readme
Clarify the roles of different types of tests
2022-10-20 11:28:23 +01:00
Shati Patel
4eecdbfada Merge pull request #1622 from github/shati-patel/open-query-dir
Implement "open query directory" for variant analysis history items
2022-10-20 10:30:03 +01:00
Andrew Eisenberg
7637f9428a Calculate hidden configuration keys using package.json 2022-10-19 14:57:38 -07:00
Asger F
0e3679d186 Scroll selected item into view 2022-10-19 19:49:12 +02:00
Elena Tanasoiu
e5dcffc04b Clarify the roles of different types of tests
And also clean up the explanation on how to run each type of test.
2022-10-19 18:38:27 +01:00
dependabot[bot]
0ce25eef63 Bump ansi-regex from 4.1.0 to 5.0.1 in /extensions/ql-vscode (#1613)
Bumps [ansi-regex](https://github.com/chalk/ansi-regex) from 4.1.0 to 5.0.1.
- [Release notes](https://github.com/chalk/ansi-regex/releases)
- [Commits](https://github.com/chalk/ansi-regex/compare/v4.1.0...v5.0.1)

---
updated-dependencies:
- dependency-name: ansi-regex
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-10-19 08:49:22 -07:00
dependabot[bot]
2ca4fb052e Bump husky from 4.3.8 to 8.0.1 in /extensions/ql-vscode (#1555)
Bumps [husky](https://github.com/typicode/husky) from 4.3.8 to 8.0.1.
- [Release notes](https://github.com/typicode/husky/releases)
- [Commits](https://github.com/typicode/husky/compare/v4.3.8...v8.0.1)

---
updated-dependencies:
- dependency-name: husky
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2022-10-19 08:48:35 -07:00
shati-patel
dab67f148e Implement "open query directory" for variant analysis history items 2022-10-19 16:26:34 +01:00
Koen Vlaswinkel
a032678f24 Add commands for recording of scenario
This will add the commands and the implementation in the
`MockGitHubApiServer` for the recording of a scenario.
2022-10-19 17:22:36 +02:00
Koen Vlaswinkel
dcac6f56da Add scenario recorder
This adds a new class which will setup the MSW server to record requests,
save them to memory and save them to files when calling a separate save
method.
2022-10-19 17:22:36 +02:00
Shati Patel
57ee00efd0 Merge pull request #1631 from github/shati-patel/storage-dir
Get `variantAnalysisStoragePath` from the variant analysis manager + create `timestamp` file
2022-10-19 16:17:54 +01:00
Koen Vlaswinkel
4bc799246f Merge pull request #1619 from github/koesie10/vscode-light-theme
Add VSCode light theme to Storybook
2022-10-19 17:15:03 +02:00
Charis Kyriakou
6a7856052c Add mock GitHub API config listener (#1632) 2022-10-19 15:01:29 +00:00
Koen Vlaswinkel
fe31730dd1 Merge pull request #1633 from github/koesie10/add-missing-mock-type
Add missing request type for mock GitHub API
2022-10-19 16:59:38 +02:00
shati-patel
cc74533267 Increase timeout so tests are less flaky 2022-10-19 15:40:13 +01:00
Elena Tanasoiu
1a219af821 Update all history items that are connected to the same variant analysis 2022-10-19 15:33:03 +01:00
shati-patel
43de90f03d Pass variantAnalysisStorageLocation to the results manager 2022-10-19 15:26:16 +01:00
Koen Vlaswinkel
9624858335 Add missing request type for mock GitHub API
We were still missing the `SubmitVariantAnalysisRequest` type and a type
to represent the union of all request types. This adds both of them.
2022-10-19 16:25:32 +02:00
Elena Tanasoiu
3d4cdb69b1 Check variant analysis id instead of history item ID
Since have no way to find the query item via the historyItemId.
2022-10-19 14:49:30 +01:00
shati-patel
6b7ebf543c Create timestamp inside onVariantAnalysisSubmitted 2022-10-19 14:43:02 +01:00
Charis Kyriakou
7e8782723d Add mock GitHub API server setting (#1630) 2022-10-19 13:23:39 +00:00
shati-patel
55fb0b7078 Create timestamp file in variant analysis storage directory 2022-10-19 13:12:20 +01:00
Asger F
45b6288363 Reveal panel on navigate, to prevent webview destruction 2022-10-19 14:09:38 +02:00
Charis Kyriakou
424520613e Initial setup around GitHub API mock server (#1629) 2022-10-19 12:05:28 +00:00
shati-patel
304a96cb25 Expose variant analysis storage location in variant analysis manager 2022-10-19 13:05:25 +01:00
Elena Tanasoiu
32dbc87049 Also store variantAnalysis on the query history item
Since this is expected in the `VariantAnalysisQueryHistoryItem` type,
let's also store the variant analysis object.
2022-10-19 12:19:45 +01:00
Charis Kyriakou
baf1b70460 Add GitHub API requests that will be mocked (#1626) 2022-10-19 11:38:41 +01:00
Koen Vlaswinkel
a254ceaa59 Create Storybook add-on for switching VSCode themes
This adds a Storybook add-on that allows you to switch between VSCode
theme. It follows the pattern of the [outline](https://github.com/storybookjs/storybook/tree/v6.5.12/addons/outline/src)
and [backgrounds](https://github.com/storybookjs/storybook/tree/v6.5.12/addons/backgrounds)
add-ons.

Unfortunately, it doesn't apply the CSS to just the elements it should
be applied to, but globally to the complete preview. This is a limitation
of using CSS files rather than setting inline styles on the elements. We
might be able to resolve this in the future by extracting the CSS
variables from the CSS files, but this is somewhat more involved.
2022-10-19 11:01:25 +02:00
Elena Tanasoiu
7fd5999ead QueryHistory: Consume event when variant analysis status is updated
Update the state of a query history item when the status of the variant analysis has changed.
2022-10-19 09:18:15 +01:00
Andrew Eisenberg
c85ef15d9e Test cleanups
- Avoid Installing `xvfb` since it is already available.
- Ensure `supportsNewQueryServer()` takes the CLI version into account
- Always run the new query server tests on v2.11.1 and later
- Avoid printing directory contents in `run-remote-query-tests`
- Run tests with `--disable-workspace-trust` to avoid a non-fatal error
  being thrown from the dialog service.
- Ensure the exit code of the extension host while running integration
  tests is the exit code of the actual process. Otherwise, there is
  a possibility that an error exit code is swallowed up and ignored.
- Remove a duplicate unhandledRejection handler.
- Handle Exit code 7 from windows. This appears to be a failure on
  exiting and unrelated to the tests.
- Fix handling of configuration in tests:
    1. It is not possible to update a configuration setting for internal
       settings like `codeql.canary`.
    2. On windows CI, updating configuration after global teardown. So,
       on tests avoid resetting test configuration when tests are over.

Also, I tried to remove all those pesky errors in the logs like:

> [2094:1017/235357.424002:ERROR:bus.cc(398)] Failed to connect to the bus: Could not parse server address: Unknown address type (examples of valid types are "tcp" and on UNIX "unix")

I was following advice from here, but I can't get it working.

- https://github.com/microsoft/vscode-test/issues/127
- https://github.com/electron/electron/issues/31981
2022-10-18 16:25:35 -07:00
shati-patel
986749b40f Open query text as a .ql file
This fixes a minor bug (introduced in https://github.com/github/vscode-codeql/pull/1115/files#diff-cf43a74569f5e6eea483a9178f5de8fc372f8ca652017d3f1451dd2640876874L603) where we accidentally removed the `.ql` file extension. I've hard-coded it here, and tested it works for the local, remote, and variant-analysis cases.
2022-10-18 17:10:08 +01:00
shati-patel
9c2821a418 Unit tests for getQueryText 2022-10-18 17:10:08 +01:00
shati-patel
93a6f50c0e Create query history items at start of test 2022-10-18 17:10:08 +01:00
shati-patel
0413b01990 Implement getQueryText for variant analysis items
Also moved this to the `query-history-info` helper file
2022-10-18 17:10:08 +01:00
Koen Vlaswinkel
157a5d6afd Add VSCode light theme to Storybook
This adds a documented way to change the theme in Storybook from the
VSCode Dark+ theme to the VSCode Light+ theme. It requires multiple
changes to two files, but these are all quite simple and it has been
documented on the "Overview" page.
2022-10-18 16:25:53 +02:00
Shati Patel
6b27a4209a Merge pull request #1614 from github/shati-patel/query-history-get-id
Get query ID for query history items (incl `VariantAnalysisHistoryItem`)
2022-10-18 12:40:04 +01:00
shati-patel
1bb68d65f9 Merge branch 'main' into shati-patel/query-history-get-id 2022-10-18 11:59:22 +01:00
shati-patel
fd13c35b5d Rename queryId -> historyItemId 2022-10-18 11:58:55 +01:00
shati-patel
77deea77fc Rename: queryId -> historyItemId 2022-10-18 11:39:16 +01:00
shati-patel
2eaa923019 Rename function: getQueryId -> getQueryHistoryItemId 2022-10-18 11:31:46 +01:00
shati-patel
ad9b46e494 Use a generated queryId on VariantAnalysisHistoryItem 2022-10-18 11:29:17 +01:00
Elena Tanasoiu
fa4766fe91 Merge pull request #1599 from github/elenatanasoiu/add-batching-to-download-take-two
Download variant analysis results in batches - take two
2022-10-18 10:32:18 +01:00
Shati Patel
28eb9ead01 Merge pull request #1606 from github/shati-patel/query-history-info-tests
Add unit tests for `query-history-info`
2022-10-18 10:17:33 +01:00
Shati Patel
0013a0f1b2 Clean-up: Remove unsupported CLI version from testing matrix
We officially only support the previous 4 minor versions of the CLI, so v2.6.3 can go.
2022-10-18 09:50:13 +01:00
Elena Tanasoiu
25b71e8651 Merge branch 'main' into elenatanasoiu/add-batching-to-download-take-two 2022-10-17 16:20:31 +01:00
Elena Tanasoiu
31a97897c8 Use fewer async/awaits 2022-10-17 16:13:46 +01:00
Elena Tanasoiu
bf7509e3df Add test to check results are being downloaded
Previously we were only checking whether we're triggering the download
command in the extension.

Now we're mocking `autoDownloadVariantAnalysisResult` on the
variantAnalysisManager and checking that it's being called for all repos
that have available results.
2022-10-17 15:58:01 +01:00
Charis Kyriakou
4fd9b54c58 Remove debug log entry (#1616) 2022-10-17 14:52:26 +00:00
Elena Tanasoiu
7d2bae1f1b Remove extra parentheses 2022-10-17 14:55:09 +01:00
Elena Tanasoiu
1f4e1f27ae Make queue private and one-line initiatialization for it 2022-10-17 14:51:06 +01:00
Elena Tanasoiu
3a1800319a Move guts for adding task to a queue into a method
So that the extension command doesn't need to know how to push to a queue.
2022-10-17 14:51:06 +01:00
Elena Tanasoiu
f2fe1063d9 Rename maxConcurrentTasks to maxConcurrentDownloads 2022-10-17 14:51:06 +01:00
Elena Tanasoiu
7e1b35eae4 Don't use Promise.all
Since we're only adding one task at a time.
2022-10-17 14:51:05 +01:00
Elena Tanasoiu
3283b68ff9 Download results in batches
This makes use of the p-queue package to download our variant analysis
results in batches of 3 at a time.
2022-10-17 14:51:05 +01:00
Elena Tanasoiu
aaf21d35f6 Install p-queue 2022-10-17 14:51:05 +01:00
Elena Tanasoiu
67a6ab5c8e Test that we only download results once per repo
To make us confident that we're not repeatedly downloading results.
2022-10-17 14:51:05 +01:00
Elena Tanasoiu
64994d7c03 Extract downloadVariantAnalysisResults method
In the next commit we'll start changing this method to support batching.
2022-10-17 14:51:05 +01:00
Elena Tanasoiu
362094b8de Extract getReposToDownload method 2022-10-17 14:51:04 +01:00
Elena Tanasoiu
e791e77ef9 Extract shouldDownload method 2022-10-17 14:51:04 +01:00
Elena Tanasoiu
0009114f7b Extract scheduleForDownload method
Before we make any changes, let's extract some of the monitor code into
smaller methods.

Since we have test coverage, we're able to do this quite comfortably.
2022-10-17 14:51:04 +01:00
aeisenberg
3b644fea7b Bump version to v1.7.3 2022-10-17 10:03:25 +01:00
shati-patel
04c9f17398 Get query ID for query history items (incl VariantAnalysisHistoryItem) 2022-10-17 09:53:34 +01:00
shati-patel
60e9f552db Add unit tests for query-history-info 2022-10-17 09:51:56 +01:00
Dave Bartolomeo
38caad032b Merge pull request #1604 from github/dbartol/join-order-threshold
Make bad join order warning threshold configurable
2022-10-14 18:20:42 -04:00
Andrew Eisenberg
7c1a8b3bc9 Merge pull request #1612 from github/revert-1586-koesie10/upgrade-vscode-test
Revert "Upgrade from vscode-test to @vscode/test-electron"
2022-10-14 14:50:51 -07:00
Andrew Eisenberg
c7c709b366 Revert "Upgrade from vscode-test to @vscode/test-electron" 2022-10-14 14:15:18 -07:00
Dave Bartolomeo
131e72b162 Decouple join-order.ts from VS Code for testing purposes 2022-10-14 15:32:03 -04:00
Dave Bartolomeo
4b875e7e42 Merge remote-tracking branch 'origin/main' into dbartol/join-order-threshold 2022-10-14 15:01:41 -04:00
Dave Bartolomeo
9b0d4bd7b8 Make bad join order warning threshold configurable
The threshold at which the bad join order detection reports a warning was previously hard-coded to 50. Initial feedback from internal QL developers suggests that this is too high, and should be configurable in any case. I've made it configurable via the `codeQL.logInsights.joinOrderWarningThreshold` setting, leaving the default at 50. Once we get more feedback about what a better default value is, I'll update the default.
2022-10-14 10:36:58 -04:00
Asger F
d08e005b46 When stepping up or down, collapse the previous node 2022-10-11 11:09:45 +02:00
Asger F
4871728216 Added change note 2022-10-10 15:36:36 +02:00
Asger F
f759eed0f5 Remove unsed parts of result-keys.ts 2022-10-07 16:26:56 +02:00
Asger F
5a694653d7 Rename command IDs.
We register a handler for the old command ID, but do not mention it in package.json.
This seems to be backward compatible without polluting the command palette.
2022-10-07 10:35:41 +02:00
Asger F
0f6100cc42 Bugfix in getPathNode 2022-10-07 09:22:07 +02:00
Asger F
88bfd19c91 Switch commands to up/down/left/right semantics 2022-10-06 15:06:08 +02:00
Asger F
125f63887a Make raw result view respond to navigation events 2022-10-05 22:57:32 +02:00
Asger F
20dea5ea46 Also show selection in raw result view 2022-10-05 22:57:32 +02:00
Asger F
3c4682e556 Ensure nodes are expanded 2022-10-05 22:57:32 +02:00
Asger F
bb61b5ea25 Replace the expansion index with the result key 2022-10-05 22:57:29 +02:00
Asger F
2949fc33d1 Replace 'expanded' with a Set<number> 2022-10-05 22:56:58 +02:00
Asger F
ab933fcb81 Add 'show next/previous alert' commands 2022-10-05 22:56:26 +02:00
541 changed files with 147019 additions and 17719 deletions

3
.gitattributes vendored
View File

@@ -23,3 +23,6 @@ CHANGELOG.md merge=union
# Mark some JSON files containing test data as generated so they are not included
# as part of diffs or language statistics.
extensions/ql-vscode/src/stories/remote-queries/data/*.json linguist-generated
# Always use LF line endings, also on Windows
* text=auto eol=lf

View File

@@ -19,7 +19,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Initialize CodeQL
uses: github/codeql-action/init@main

View File

@@ -16,11 +16,11 @@ jobs:
os: [ubuntu-latest, windows-latest]
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3
with:
fetch-depth: 1
- uses: actions/setup-node@v1
- uses: actions/setup-node@v3
with:
node-version: '16.14.2'
@@ -65,7 +65,7 @@ jobs:
# This workflow step gets an unstable testing version of the CodeQL CLI. It should not be used outside of these tests.
run: |
LATEST=`gh api repos/dsp-testing/codeql-cli-nightlies/releases --jq '.[].tag_name' --method GET --raw-field 'per_page=1'`
echo "::set-output name=nightly-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$LATEST"
echo "nightly-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$LATEST" >> "$GITHUB_OUTPUT"
test:
name: Test
@@ -76,13 +76,13 @@ jobs:
os: [ubuntu-latest, windows-latest]
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3
with:
fetch-depth: 1
- uses: actions/setup-node@v1
- uses: actions/setup-node@v3
with:
node-version: '16.14.0'
node-version: '16.14.2'
- name: Install dependencies
working-directory: extensions/ql-vscode
@@ -103,6 +103,11 @@ jobs:
run: |
npm run lint
- name: Lint scenarios
working-directory: extensions/ql-vscode
run: |
npm run lint:scenarios
- name: Run unit tests (Linux)
working-directory: extensions/ql-vscode
if: matrix.os == 'ubuntu-latest'
@@ -121,7 +126,7 @@ jobs:
env:
VSCODE_CODEQL_GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
run: |
sudo apt-get install xvfb
unset DBUS_SESSION_BUS_ADDRESS
/usr/bin/xvfb-run npm run integration
- name: Run integration tests (Windows)
@@ -139,7 +144,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, windows-latest]
version: ['v2.6.3', 'v2.7.6', 'v2.8.5', 'v2.9.4', 'v2.10.5', 'v2.11.1', 'nightly']
version: ['v2.7.6', 'v2.8.5', 'v2.9.4', 'v2.10.5', 'v2.11.3', 'nightly']
env:
CLI_VERSION: ${{ matrix.version }}
NIGHTLY_URL: ${{ needs.find-nightly.outputs.url }}
@@ -147,11 +152,11 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3
- uses: actions/setup-node@v1
- uses: actions/setup-node@v3
with:
node-version: '16.14.0'
node-version: '16.14.2'
- name: Install dependencies
working-directory: extensions/ql-vscode
@@ -175,10 +180,10 @@ jobs:
else
REF="codeql-cli/${{ matrix.version }}"
fi
echo "::set-output name=ref::$REF"
echo "ref=$REF" >> "$GITHUB_OUTPUT"
- name: Checkout QL
uses: actions/checkout@v2
uses: actions/checkout@v3
with:
repository: github/codeql
ref: ${{ steps.choose-ref.outputs.ref }}
@@ -188,6 +193,7 @@ jobs:
working-directory: extensions/ql-vscode
if: matrix.os == 'ubuntu-latest'
run: |
unset DBUS_SESSION_BUS_ADDRESS
/usr/bin/xvfb-run npm run cli-integration
- name: Run CLI tests (Windows)

View File

@@ -18,9 +18,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3
- uses: actions/setup-node@v1
- uses: actions/setup-node@v3
with:
node-version: '16.14.2'
@@ -47,11 +47,11 @@ jobs:
# Record the VSIX path as an output of this step.
# This will be used later when uploading a release asset.
VSIX_PATH="$(ls dist/*.vsix)"
echo "::set-output name=vsix_path::$VSIX_PATH"
echo "vsix_path=$VSIX_PATH" >> "$GITHUB_OUTPUT"
# Transform the GitHub ref so it can be used in a filename.
# The last sed invocation is used for testing branches that modify this workflow.
REF_NAME="$(echo ${{ github.ref }} | sed -e 's:^refs/tags/::' | sed -e 's:/:-:g')"
echo "::set-output name=ref_name::$REF_NAME"
echo "ref_name=$REF_NAME" >> "$GITHUB_OUTPUT"
- name: Upload artifacts
uses: actions/upload-artifact@v2
@@ -107,7 +107,7 @@ jobs:
# Bump to the next patch version. Major or minor version bumps will have to be done manually.
# Record the next version number as an output of this step.
NEXT_VERSION="$(npm version patch)"
echo "::set-output name=next_version::$NEXT_VERSION"
echo "next_version=$NEXT_VERSION" >> "$GITHUB_OUTPUT"
- name: Add changelog for next release
if: success()
@@ -136,7 +136,7 @@ jobs:
VSCE_TOKEN: ${{ secrets.VSCE_TOKEN }}
steps:
- name: Download artifact
uses: actions/download-artifact@v2
uses: actions/download-artifact@v3
with:
name: vscode-codeql-extension
@@ -156,7 +156,7 @@ jobs:
OPEN_VSX_TOKEN: ${{ secrets.OPEN_VSX_TOKEN }}
steps:
- name: Download artifact
uses: actions/download-artifact@v2
uses: actions/download-artifact@v3
with:
name: vscode-codeql-extension

7
.gitignore vendored
View File

@@ -16,7 +16,6 @@ artifacts/
# Visual Studio workspace state
.vs/
# Rush files
/common/temp/**
package-deps.json
**/.rush/temp
# CodeQL metadata
.cache/
.codeql/

View File

@@ -5,7 +5,7 @@
"recommendations": [
"amodio.tsl-problem-matcher",
"dbaeumer.vscode-eslint",
"eternalphane.tsfmt-vscode"
"esbenp.prettier-vscode"
],
// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
"unwantedRecommendations": []

6
.vscode/launch.json vendored
View File

@@ -44,10 +44,8 @@
"bdd",
"--colors",
"--diff",
"-r",
"ts-node/register",
"-r",
"test/mocha.setup.js",
"--config",
".mocharc.json",
"test/pure-tests/**/*.ts"
],
"stopOnEntry": false,

10
.vscode/settings.json vendored
View File

@@ -36,5 +36,13 @@
"typescript.preferences.quoteStyle": "single",
"javascript.preferences.quoteStyle": "single",
"editor.wordWrapColumn": 100,
"jest.rootPath": "./extensions/ql-vscode"
"jest.rootPath": "./extensions/ql-vscode",
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
},
"[typescriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
},
}

View File

@@ -1,3 +1,4 @@
**/* @github/codeql-vscode-reviewers
**/remote-queries/ @github/code-scanning-secexp-reviewers
**/variant-analysis/ @github/code-scanning-secexp-reviewers
**/databases/ @github/code-scanning-secexp-reviewers

View File

@@ -54,7 +54,7 @@ Alternatively, you can build the extension within VS Code via `Terminal > Run Bu
Before running any of the launch commands, be sure to have run the `build` command to ensure that the JavaScript is compiled and the resources are copied to the proper location.
We recommend that you keep `npm run watch` running in the backgound and you only need to re-run `npm run build` in the following situations:
We recommend that you keep `npm run watch` running in the background and you only need to re-run `npm run build` in the following situations:
1. on first checkout
2. whenever any of the non-TypeScript resources have changed
@@ -91,29 +91,107 @@ Alternatively, you can start Storybook inside of VSCode. There is a VSCode launc
More information about Storybook can be found inside the **Overview** page once you have launched Storybook.
### Running the unit tests and integration tests that do not require a CLI instance
### Testing
Unit tests and many integration tests do not require a copy of the CodeQL CLI.
We have several types of tests:
Outside of vscode, in the `extensions/ql-vscode` directory, run:
* Unit tests: these live in the `tests/pure-tests/` directory
* View tests: these live in `src/view/variant-analysis/__tests__/`
* VSCode integration tests: these live in `src/vscode-tests/no-workspace` and `src/vscode-tests/minimal-workspace`
* CLI integration tests: these live in `src/vscode-tests/cli-integration`
```shell
npm run test && npm run integration
```
The CLI integration tests require an instance of the CodeQL CLI to run so they will require some extra setup steps. When adding new tests to our test suite, please be mindful of whether they need to be in the cli-integration folder. If the tests don't depend on the CLI, they are better suited to being a VSCode integration test.
Alternatively, you can run the tests inside of vscode. There are several vscode launch configurations defined that run the unit and integration tests. They can all be found in the debug view.
Any test data you're using (sample projects, config files, etc.) must go in a `src/vscode-tests/*/data` directory. When you run the tests, the test runner will copy the data directory to `out/vscode-tests/*/data`.
Only the _With CLI_ tests require a CLI instance to run. See below on how to do that.
#### Running the tests
Running from a terminal, you _must_ set the `TEST_CODEQL_PATH` variable to point to a checkout of the `github/codeql` repository. The appropriate CLI version will be downloaded as part of the test.
Pre-requisites:
1. Run `npm run build`.
2. You will need to have `npm run watch` running in the background.
### Running the integration tests
##### 1. From the terminal
You will need to run CLI tests using a task from inside of VS Code called _Launch Integration Tests - With CLI_.
Then, from the `extensions/ql-vscode` directory, use the appropriate command to run the tests:
* Unit tests: `npm run test:unit`
* View Tests: `npm test:view`
* VSCode integration tests: `npm run integration`
###### CLI integration tests
The CLI integration tests require the CodeQL standard libraries in order to run so you will need to clone a local copy of the `github/codeql` repository.
From inside of VSCode, open the `launch.json` file and in the _Launch Integration Tests - With CLI_ task, uncomment the `"${workspaceRoot}/../codeql"` line. If necessary, replace value with a path to your checkout, and then run the task.
1. Set the `TEST_CODEQL_PATH` environment variable: running from a terminal, you _must_ set the `TEST_CODEQL_PATH` variable to point to a checkout of the `github/codeql` repository. The appropriate CLI version will be downloaded as part of the test.
2. Run your test command:
```shell
cd extensions/ql-vscode && npm run cli-integration
```
##### 2. From VSCode
Alternatively, you can run the tests inside of VSCode. There are several VSCode launch configurations defined that run the unit and integration tests.
You will need to run tests using a task from inside of VS Code, under the "Run and Debug" view:
* Unit tests: run the _Launch Unit Tests - React_ task
* View Tests: run the _Launch Unit Tests_ task
* VSCode integration tests: run the _Launch Unit Tests - No Workspace_ and _Launch Unit Tests - Minimal Workspace_ tasks
###### CLI integration tests
The CLI integration tests require the CodeQL standard libraries in order to run so you will need to clone a local copy of the `github/codeql` repository.
1. Set the `TEST_CODEQL_PATH` environment variable: running from a terminal, you _must_ set the `TEST_CODEQL_PATH` variable to point to a checkout of the `github/codeql` repository. The appropriate CLI version will be downloaded as part of the test.
2. Set the codeql path in VSCode's launch configuration: open `launch.json` and under the _Launch Integration Tests - With CLI_ section, uncomment the `"${workspaceRoot}/../codeql"` line. If you've cloned the `github/codeql` repo to a different path, replace the value with the correct path.
3. Run the VSCode task from the "Run and Debug" view called _Launch Integration Tests - With CLI_.
#### Using a mock GitHub API server
Multi-Repo Variant Analyses (MRVA) rely on the GitHub API. In order to make development and testing easy, we have functionality that allows us to intercept requests to the GitHub API and provide mock responses.
##### Using a pre-recorded test scenario
To run a mock MRVA scenario, follow these steps:
1. Enable the mock GitHub API server by adding the following in your VS Code user settings (which can be found by running the `Preferences: Open User Settings (JSON)` VS Code command):
```json
"codeQL.mockGitHubApiServer": {
"enabled": true
}
```
1. Run the `CodeQL: Mock GitHub API Server: Load Scenario` command from the command pallet, and choose one of the scenarios to load.
1. Execute a normal MRVA. At this point you should see the scenario being played out, rather than an actual MRVA running.
1. Once you're done, you can stop using the mock scenario with `CodeQL: Mock GitHub API Server: Unload Scenario`
If you want to replay the same scenario you should unload and reload it so requests are replayed from the start.
##### Recording a new test scenario
To record a new mock MRVA scenario, follow these steps:
1. Enable the mock GitHub API server by adding the following in your VS Code user settings (which can be found by running the `Preferences: Open User Settings (JSON)` VS Code command):
```json
"codeQL.mockGitHubApiServer": {
"enabled": true
}
```
1. Run the `CodeQL: Mock GitHub API Server: Start Scenario Recording` VS Code command from the command pallet.
1. Execute a normal MRVA.
1. Once what you wanted to record is done (e.g. the MRVA has finished), then run the `CodeQL: Mock GitHub API Server: Save Scenario` command from the command pallet.
1. The scenario should then be available for replaying.
If you want to cancel recording, run the `CodeQL: Mock GitHub API Server: Cancel Scenario Recording` command.
Once the scenario has been recorded, it's often useful to remove some of the requests to speed up the replay, particularly ones that fetch the variant analysis status. Once some of the request files have manually been removed, the [fix-scenario-file-numbering script](./extensions/ql-vscode/scripts/fix-scenario-file-numbering.ts) can be used to update the number of the files. See the script file for details on how to use.
#### Scenario data location
Pre-recorded scenarios are stored in `./src/mocks/scenarios`. However, it's possible to configure the location, by setting the `codeQL.mockGitHubApiServer.scenariosPath` configuration property in the VS Code user settings.
## Releasing (write access required)
@@ -137,7 +215,7 @@ From inside of VSCode, open the `launch.json` file and in the _Launch Integratio
git tag v1.3.6
```
If you've accidentally created a badly named tag, you can delete it via
If you've accidentally created a badly named tag, you can delete it via
```bash
git tag -d badly-named-tag
```
@@ -148,13 +226,13 @@ From inside of VSCode, open the `launch.json` file and in the _Launch Integratio
```bash
git push upstream refs/tags/v1.3.6
```
b. If you're working straight in this repo:
```bash
git push origin refs/tags/v1.3.6
```
```
This will trigger [a release build](https://github.com/github/vscode-codeql/releases) on Actions.
* **IMPORTANT** Make sure you are on the `main` branch and your local checkout is fully updated when you add the tag.

View File

@@ -0,0 +1,6 @@
.vscode-test/
node_modules/
out/
# Include the Storybook config
!.storybook

View File

@@ -3,14 +3,19 @@ module.exports = {
parserOptions: {
ecmaVersion: 2018,
sourceType: "module",
project: ["tsconfig.json", "./src/**/tsconfig.json", "./gulpfile.ts/tsconfig.json"],
project: ["tsconfig.json", "./src/**/tsconfig.json", "./gulpfile.ts/tsconfig.json", "./scripts/tsconfig.json", "./.storybook/tsconfig.json"],
},
plugins: ["@typescript-eslint"],
env: {
node: true,
es6: true,
},
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended", "plugin:jest-dom/recommended"],
extends: [
"eslint:recommended",
"plugin:@typescript-eslint/recommended",
"plugin:jest-dom/recommended",
"plugin:prettier/recommended"
],
rules: {
"@typescript-eslint/no-use-before-define": 0,
"@typescript-eslint/no-unused-vars": [
@@ -27,11 +32,7 @@ module.exports = {
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/no-floating-promises": [ "error", { ignoreVoid: true } ],
"prefer-const": ["warn", { destructuring: "all" }],
indent: "off",
"@typescript-eslint/indent": "off",
"@typescript-eslint/no-throw-literal": "error",
"no-useless-escape": 0,
semi: 2,
quotes: ["warn", "single"]
},
};

View File

@@ -0,0 +1,5 @@
# Run this command to always ignore formatting commits in `git blame`
# git config blame.ignoreRevsFile .git-blame-ignore-revs
# Formatted all code using Prettier instead of tsfmt
ebcdf8ad0bb5bcb3efa679211709671716b892ba

View File

@@ -0,0 +1,4 @@
{
"exit": true,
"require": ["test/mocha.setup.js"]
}

View File

@@ -0,0 +1,6 @@
.vscode-test/
node_modules/
out/
# Include the Storybook config
!.storybook

View File

@@ -0,0 +1,3 @@
{
"trailingComma": "all"
}

View File

@@ -1,19 +1,17 @@
import type { StorybookConfig } from '@storybook/core-common';
import type { StorybookConfig } from "@storybook/core-common";
const config: StorybookConfig = {
stories: [
'../src/**/*.stories.mdx',
'../src/**/*.stories.@(js|jsx|ts|tsx)'
],
stories: ["../src/**/*.stories.mdx", "../src/**/*.stories.@(js|jsx|ts|tsx)"],
addons: [
'@storybook/addon-links',
'@storybook/addon-essentials',
'@storybook/addon-interactions'
"@storybook/addon-links",
"@storybook/addon-essentials",
"@storybook/addon-interactions",
"./vscode-theme-addon/preset.ts",
],
framework: '@storybook/react',
framework: "@storybook/react",
core: {
builder: '@storybook/builder-webpack5'
}
builder: "@storybook/builder-webpack5",
},
};
module.exports = config;

View File

@@ -1,5 +1,5 @@
import { addons } from '@storybook/addons';
import { themes } from '@storybook/theming';
import { addons } from "@storybook/addons";
import { themes } from "@storybook/theming";
addons.setConfig({
theme: themes.dark,

View File

@@ -1,10 +1,8 @@
import { themes } from '@storybook/theming';
import { action } from '@storybook/addon-actions';
import { themes } from "@storybook/theming";
import { action } from "@storybook/addon-actions";
// Allow all stories/components to use Codicons
import '@vscode/codicons/dist/codicon.css';
import '../src/stories/vscode-theme.css';
import "@vscode/codicons/dist/codicon.css";
// https://storybook.js.org/docs/react/configure/overview#configure-story-rendering
export const parameters = {
@@ -22,17 +20,12 @@ export const parameters = {
theme: themes.dark,
},
backgrounds: {
default: 'dark',
values: [
{
name: 'dark',
value: '#1e1e1e',
},
],
}
// The background is injected by our theme CSS files
disable: true,
},
};
(window as any).acquireVsCodeApi = () => ({
postMessage: action('post-vscode-message'),
setState: action('set-vscode-state'),
postMessage: action("post-vscode-message"),
setState: action("set-vscode-state"),
});

View File

@@ -0,0 +1,19 @@
{
"compilerOptions": {
"module": "esnext",
"moduleResolution": "node",
"target": "es6",
"outDir": "out",
"lib": ["ES2021", "dom"],
"jsx": "react",
"sourceMap": true,
"rootDir": "..",
"strict": true,
"noUnusedLocals": true,
"noImplicitReturns": true,
"noFallthroughCasesInSwitch": true,
"experimentalDecorators": true,
"skipLibCheck": true
},
"exclude": ["node_modules"]
}

View File

@@ -0,0 +1,61 @@
import * as React from "react";
import { FunctionComponent, useCallback } from "react";
import { useGlobals } from "@storybook/api";
import {
IconButton,
Icons,
WithTooltip,
TooltipLinkList,
Link,
WithHideFn,
} from "@storybook/components";
import { themeNames, VSCodeTheme } from "./theme";
export const ThemeSelector: FunctionComponent = () => {
const [{ vscodeTheme }, updateGlobals] = useGlobals();
const changeTheme = useCallback(
(theme: VSCodeTheme) => {
updateGlobals({
vscodeTheme: theme,
});
},
[updateGlobals],
);
const createLinks = useCallback(
(onHide: () => void): Link[] =>
Object.values(VSCodeTheme).map((theme) => ({
id: theme,
onClick() {
changeTheme(theme);
onHide();
},
title: themeNames[theme],
value: theme,
active: vscodeTheme === theme,
})),
[vscodeTheme, changeTheme],
);
return (
<WithTooltip
placement="top"
trigger="click"
closeOnClick
tooltip={({ onHide }: WithHideFn) => (
<TooltipLinkList links={createLinks(onHide)} />
)}
>
<IconButton
key="theme"
title="Change the theme of the preview"
active={vscodeTheme !== VSCodeTheme.Dark}
>
<Icons icon="dashboard" />
</IconButton>
</WithTooltip>
);
};

View File

@@ -0,0 +1,14 @@
import * as React from "react";
import { addons, types } from "@storybook/addons";
import { ThemeSelector } from "./ThemeSelector";
const ADDON_ID = "vscode-theme-addon";
addons.register(ADDON_ID, () => {
addons.add(ADDON_ID, {
title: "VSCode Themes",
type: types.TOOL,
match: ({ viewMode }) => !!(viewMode && viewMode.match(/^(story|docs)$/)),
render: () => <ThemeSelector />,
});
});

View File

@@ -0,0 +1,7 @@
export function config(entry = []) {
return [...entry, require.resolve("./preview.ts")];
}
export function managerEntries(entry = []) {
return [...entry, require.resolve("./manager.tsx")];
}

View File

@@ -0,0 +1,8 @@
import { withTheme } from "./withTheme";
import { VSCodeTheme } from "./theme";
export const decorators = [withTheme];
export const globals = {
vscodeTheme: VSCodeTheme.Dark,
};

View File

@@ -0,0 +1,9 @@
export enum VSCodeTheme {
Dark = "dark",
Light = "light",
}
export const themeNames: { [key in VSCodeTheme]: string } = {
[VSCodeTheme.Dark]: "Dark+",
[VSCodeTheme.Light]: "Light+",
};

View File

@@ -0,0 +1,48 @@
import { useEffect, useGlobals } from "@storybook/addons";
import type {
AnyFramework,
PartialStoryFn as StoryFunction,
StoryContext,
} from "@storybook/csf";
import { VSCodeTheme } from "./theme";
const themeFiles: { [key in VSCodeTheme]: string } = {
[VSCodeTheme.Dark]:
// eslint-disable-next-line @typescript-eslint/no-var-requires
require("!file-loader?modules!../../src/stories/vscode-theme-dark.css")
.default,
[VSCodeTheme.Light]:
// eslint-disable-next-line @typescript-eslint/no-var-requires
require("!file-loader?modules!../../src/stories/vscode-theme-light.css")
.default,
};
export const withTheme = (
StoryFn: StoryFunction<AnyFramework>,
context: StoryContext<AnyFramework>,
) => {
const [{ vscodeTheme }] = useGlobals();
useEffect(() => {
const styleSelectorId =
context.viewMode === "docs"
? `addon-vscode-theme-docs-${context.id}`
: "addon-vscode-theme-theme";
const theme = Object.values(VSCodeTheme).includes(vscodeTheme)
? (vscodeTheme as VSCodeTheme)
: VSCodeTheme.Dark;
document.getElementById(styleSelectorId)?.remove();
const link = document.createElement("link");
link.id = styleSelectorId;
link.href = themeFiles[theme];
link.rel = "stylesheet";
document.head.appendChild(link);
}, [vscodeTheme]);
return StoryFn();
};

View File

@@ -12,5 +12,5 @@ src/**
.gitignore
gulpfile.js/**
tsconfig.json
tsfmt.json
.prettierrc
vsc-extension-quickstart.md

View File

@@ -1,5 +1,22 @@
# CodeQL for Visual Studio Code: Changelog
## 1.7.6 - 21 November 2022
- Warn users when their VS Code version is too old to support all features in the vscode-codeql extension. [#1674](https://github.com/github/vscode-codeql/pull/1674)
## 1.7.5 - 8 November 2022
- Fix a bug where the AST Viewer was not working unless the associated CodeQL library pack is in the workspace. [#1735](https://github.com/github/vscode-codeql/pull/1735)
## 1.7.4 - 29 October 2022
No user facing changes.
## 1.7.3 - 28 October 2022
- Fix a bug where databases may be lost if VS Code is restarted while the extension is being started up. [#1638](https://github.com/github/vscode-codeql/pull/1638)
- Add commands for navigating up, down, left, or right in the result viewer. Previously there were only commands for moving up and down the currently-selected path. We suggest binding keyboard shortcuts to these commands, for navigating the result viewer using the keyboard. [#1568](https://github.com/github/vscode-codeql/pull/1568)
## 1.7.2 - 14 October 2022
- Fix a bug where results created in older versions were thought to be unsuccessful. [#1605](https://github.com/github/vscode-codeql/pull/1605)

View File

@@ -99,6 +99,10 @@ When the results are ready, they're displayed in the CodeQL Query Results view.
If there are any problems running a query, a notification is displayed in the bottom right corner of the application. In addition to the error message, the notification includes details of how to fix the problem.
### Keyboard navigation
If you wish to navigate the query results from your keyboard, you can bind shortcuts to the **CodeQL: Navigate Up/Down/Left/Right in Result Viewer** commands.
## What next?
For more information about the CodeQL extension, [see the documentation](https://codeql.github.com/docs/codeql-for-visual-studio-code/). Otherwise, you could:

View File

@@ -1,17 +1,20 @@
import * as gulp from 'gulp';
import * as gulp from "gulp";
// eslint-disable-next-line @typescript-eslint/no-var-requires
const replace = require('gulp-replace');
const replace = require("gulp-replace");
/** Inject the application insights key into the telemetry file */
export function injectAppInsightsKey() {
if (!process.env.APP_INSIGHTS_KEY) {
// noop
console.log('APP_INSIGHTS_KEY environment variable is not set. So, cannot inject it into the application.');
console.log(
"APP_INSIGHTS_KEY environment variable is not set. So, cannot inject it into the application.",
);
return Promise.resolve();
}
// replace the key
return gulp.src(['out/telemetry.js'])
return gulp
.src(["out/telemetry.js"])
.pipe(replace(/REPLACE-APP-INSIGHTS-KEY/, process.env.APP_INSIGHTS_KEY))
.pipe(gulp.dest('out/'));
.pipe(gulp.dest("out/"));
}

View File

@@ -1,5 +1,5 @@
import * as fs from 'fs-extra';
import * as path from 'path';
import * as fs from "fs-extra";
import * as path from "path";
export interface DeployedPackage {
distPath: string;
@@ -8,44 +8,64 @@ export interface DeployedPackage {
}
const packageFiles = [
'.vscodeignore',
'CHANGELOG.md',
'README.md',
'language-configuration.json',
'snippets.json',
'media',
'node_modules',
'out'
".vscodeignore",
"CHANGELOG.md",
"README.md",
"language-configuration.json",
"snippets.json",
"media",
"node_modules",
"out",
"workspace-databases-schema.json",
];
async function copyPackage(sourcePath: string, destPath: string): Promise<void> {
async function copyPackage(
sourcePath: string,
destPath: string,
): Promise<void> {
for (const file of packageFiles) {
console.log(`copying ${path.resolve(sourcePath, file)} to ${path.resolve(destPath, file)}`);
console.log(
`copying ${path.resolve(sourcePath, file)} to ${path.resolve(
destPath,
file,
)}`,
);
await fs.copy(path.resolve(sourcePath, file), path.resolve(destPath, file));
}
}
export async function deployPackage(packageJsonPath: string): Promise<DeployedPackage> {
export async function deployPackage(
packageJsonPath: string,
): Promise<DeployedPackage> {
try {
const packageJson: any = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
const packageJson: any = JSON.parse(
await fs.readFile(packageJsonPath, "utf8"),
);
// Default to development build; use flag --release to indicate release build.
const isDevBuild = !process.argv.includes('--release');
const distDir = path.join(__dirname, '../../../dist');
const isDevBuild = !process.argv.includes("--release");
const distDir = path.join(__dirname, "../../../dist");
await fs.mkdirs(distDir);
if (isDevBuild) {
// NOTE: rootPackage.name had better not have any regex metacharacters
const oldDevBuildPattern = new RegExp('^' + packageJson.name + '[^/]+-dev[0-9.]+\\.vsix$');
const oldDevBuildPattern = new RegExp(
"^" + packageJson.name + "[^/]+-dev[0-9.]+\\.vsix$",
);
// Dev package filenames are of the form
// vscode-codeql-0.0.1-dev.2019.9.27.19.55.20.vsix
(await fs.readdir(distDir)).filter(name => name.match(oldDevBuildPattern)).map(build => {
console.log(`Deleting old dev build ${build}...`);
fs.unlinkSync(path.join(distDir, build));
});
(await fs.readdir(distDir))
.filter((name) => name.match(oldDevBuildPattern))
.map((build) => {
console.log(`Deleting old dev build ${build}...`);
fs.unlinkSync(path.join(distDir, build));
});
const now = new Date();
packageJson.version = packageJson.version +
`-dev.${now.getUTCFullYear()}.${now.getUTCMonth() + 1}.${now.getUTCDate()}` +
packageJson.version =
packageJson.version +
`-dev.${now.getUTCFullYear()}.${
now.getUTCMonth() + 1
}.${now.getUTCDate()}` +
`.${now.getUTCHours()}.${now.getUTCMinutes()}.${now.getUTCSeconds()}`;
}
@@ -53,19 +73,23 @@ export async function deployPackage(packageJsonPath: string): Promise<DeployedPa
await fs.remove(distPath);
await fs.mkdirs(distPath);
await fs.writeFile(path.join(distPath, 'package.json'), JSON.stringify(packageJson, null, 2));
await fs.writeFile(
path.join(distPath, "package.json"),
JSON.stringify(packageJson, null, 2),
);
const sourcePath = path.join(__dirname, '..');
console.log(`Copying package '${packageJson.name}' and its dependencies to '${distPath}'...`);
const sourcePath = path.join(__dirname, "..");
console.log(
`Copying package '${packageJson.name}' and its dependencies to '${distPath}'...`,
);
await copyPackage(sourcePath, distPath);
return {
distPath: distPath,
name: packageJson.name,
version: packageJson.version
version: packageJson.version,
};
}
catch (e) {
} catch (e) {
console.error(e);
throw e;
}

View File

@@ -1,18 +1,20 @@
import * as gulp from 'gulp';
import { compileTypeScript, watchTypeScript, cleanOutput } from './typescript';
import { compileTextMateGrammar } from './textmate';
import { copyTestData } from './tests';
import { compileView, watchView } from './webpack';
import { packageExtension } from './package';
import { injectAppInsightsKey } from './appInsights';
import * as gulp from "gulp";
import { compileTypeScript, watchTypeScript, cleanOutput } from "./typescript";
import { compileTextMateGrammar } from "./textmate";
import { copyTestData, watchTestData } from "./tests";
import { compileView, watchView } from "./webpack";
import { packageExtension } from "./package";
import { injectAppInsightsKey } from "./appInsights";
export const buildWithoutPackage =
gulp.series(
cleanOutput,
gulp.parallel(
compileTypeScript, compileTextMateGrammar, compileView, copyTestData
)
);
export const buildWithoutPackage = gulp.series(
cleanOutput,
gulp.parallel(
compileTypeScript,
compileTextMateGrammar,
compileView,
copyTestData,
),
);
export {
cleanOutput,
@@ -21,7 +23,12 @@ export {
watchView,
compileTypeScript,
copyTestData,
watchTestData,
injectAppInsightsKey,
compileView,
};
export default gulp.series(buildWithoutPackage, injectAppInsightsKey, packageExtension);
export default gulp.series(
buildWithoutPackage,
injectAppInsightsKey,
packageExtension,
);

View File

@@ -1,21 +1,28 @@
import * as path from 'path';
import { deployPackage } from './deploy';
import * as childProcess from 'child-process-promise';
import * as path from "path";
import { deployPackage } from "./deploy";
import * as childProcess from "child-process-promise";
export async function packageExtension(): Promise<void> {
const deployedPackage = await deployPackage(path.resolve('package.json'));
console.log(`Packaging extension '${deployedPackage.name}@${deployedPackage.version}'...`);
const deployedPackage = await deployPackage(path.resolve("package.json"));
console.log(
`Packaging extension '${deployedPackage.name}@${deployedPackage.version}'...`,
);
const args = [
'package',
'--out', path.resolve(deployedPackage.distPath, '..', `${deployedPackage.name}-${deployedPackage.version}.vsix`)
"package",
"--out",
path.resolve(
deployedPackage.distPath,
"..",
`${deployedPackage.name}-${deployedPackage.version}.vsix`,
),
];
const proc = childProcess.spawn('./node_modules/.bin/vsce', args, {
cwd: deployedPackage.distPath
const proc = childProcess.spawn("./node_modules/.bin/vsce", args, {
cwd: deployedPackage.distPath,
});
proc.childProcess.stdout!.on('data', (data) => {
proc.childProcess.stdout!.on("data", (data) => {
console.log(data.toString());
});
proc.childProcess.stderr!.on('data', (data) => {
proc.childProcess.stderr!.on("data", (data) => {
console.error(data.toString());
});

View File

@@ -1,17 +1,21 @@
import * as gulp from 'gulp';
import * as gulp from "gulp";
export function copyTestData() {
copyNoWorkspaceData();
copyCliIntegrationData();
return Promise.resolve();
return Promise.all([copyNoWorkspaceData(), copyCliIntegrationData()]);
}
export function watchTestData() {
return gulp.watch(["src/vscode-tests/*/data/**/*"], copyTestData);
}
function copyNoWorkspaceData() {
return gulp.src('src/vscode-tests/no-workspace/data/**/*')
.pipe(gulp.dest('out/vscode-tests/no-workspace/data'));
return gulp
.src("src/vscode-tests/no-workspace/data/**/*")
.pipe(gulp.dest("out/vscode-tests/no-workspace/data"));
}
function copyCliIntegrationData() {
return gulp.src('src/vscode-tests/cli-integration/data/**/*')
.pipe(gulp.dest('out/vscode-tests/cli-integration/data'));
return gulp
.src("src/vscode-tests/cli-integration/data/**/*")
.pipe(gulp.dest("out/vscode-tests/cli-integration/data"));
}

View File

@@ -1,8 +1,8 @@
import * as gulp from 'gulp';
import * as jsYaml from 'js-yaml';
import * as through from 'through2';
import * as PluginError from 'plugin-error';
import * as Vinyl from 'vinyl';
import * as gulp from "gulp";
import * as jsYaml from "js-yaml";
import * as through from "through2";
import * as PluginError from "plugin-error";
import * as Vinyl from "vinyl";
/**
* Replaces all rule references with the match pattern of the referenced rule.
@@ -11,7 +11,10 @@ import * as Vinyl from 'vinyl';
* @param replacements Map from rule name to match text.
* @returns The new regex after replacement.
*/
function replaceReferencesWithStrings(value: string, replacements: Map<string, string>): string {
function replaceReferencesWithStrings(
value: string,
replacements: Map<string, string>,
): string {
let result = value;
// eslint-disable-next-line no-constant-condition
while (true) {
@@ -52,21 +55,19 @@ function getNodeMatchText(rule: any): string {
if (rule.match !== undefined) {
// For a match string, just use that string as the replacement.
return rule.match;
}
else if (rule.patterns !== undefined) {
} else if (rule.patterns !== undefined) {
const patterns: string[] = [];
// For a list of patterns, use the disjunction of those patterns.
for (const patternIndex in rule.patterns) {
const pattern = rule.patterns[patternIndex];
if (pattern.include !== null) {
patterns.push('(?' + pattern.include + ')');
patterns.push("(?" + pattern.include + ")");
}
}
return '(?:' + patterns.join('|') + ')';
}
else {
return '';
return "(?:" + patterns.join("|") + ")";
} else {
return "";
}
}
@@ -109,7 +110,7 @@ function visitAllRulesInFile(yaml: any, action: (rule: any) => void) {
function visitAllRulesInRuleMap(ruleMap: any, action: (rule: any) => void) {
for (const key in ruleMap) {
const rule = ruleMap[key];
if ((typeof rule) === 'object') {
if (typeof rule === "object") {
action(rule);
if (rule.patterns !== undefined) {
visitAllRulesInRuleMap(rule.patterns, action);
@@ -127,10 +128,10 @@ function visitAllRulesInRuleMap(ruleMap: any, action: (rule: any) => void) {
function visitAllMatchesInRule(rule: any, action: (match: any) => any) {
for (const key in rule) {
switch (key) {
case 'begin':
case 'end':
case 'match':
case 'while':
case "begin":
case "end":
case "match":
case "while":
rule[key] = action(rule[key]);
break;
@@ -147,21 +148,21 @@ function visitAllMatchesInRule(rule: any, action: (match: any) => any) {
* @param rule Rule to be transformed.
* @param key Base key of the property to be transformed.
*/
function expandPatternMatchProperties(rule: any, key: 'begin' | 'end') {
const patternKey = key + 'Pattern';
const capturesKey = key + 'Captures';
function expandPatternMatchProperties(rule: any, key: "begin" | "end") {
const patternKey = key + "Pattern";
const capturesKey = key + "Captures";
const pattern = rule[patternKey];
if (pattern !== undefined) {
const patterns: string[] = Array.isArray(pattern) ? pattern : [pattern];
rule[key] = patterns.map(p => `((?${p}))`).join('|');
rule[key] = patterns.map((p) => `((?${p}))`).join("|");
const captures: { [index: string]: any } = {};
for (const patternIndex in patterns) {
captures[(Number(patternIndex) + 1).toString()] = {
patterns: [
{
include: patterns[patternIndex]
}
]
include: patterns[patternIndex],
},
],
};
}
rule[capturesKey] = captures;
@@ -177,20 +178,19 @@ function expandPatternMatchProperties(rule: any, key: 'begin' | 'end') {
function transformFile(yaml: any) {
const macros = gatherMacros(yaml);
visitAllRulesInFile(yaml, (rule) => {
expandPatternMatchProperties(rule, 'begin');
expandPatternMatchProperties(rule, 'end');
expandPatternMatchProperties(rule, "begin");
expandPatternMatchProperties(rule, "end");
});
// Expand macros in matches.
visitAllRulesInFile(yaml, (rule) => {
visitAllMatchesInRule(rule, (match) => {
if ((typeof match) === 'object') {
if (typeof match === "object") {
for (const key in match) {
return macros.get(key)!.replace('(?#)', `(?:${match[key]})`);
return macros.get(key)!.replace("(?#)", `(?:${match[key]})`);
}
throw new Error('No key in macro map.');
}
else {
throw new Error("No key in macro map.");
} else {
return match;
}
});
@@ -207,7 +207,7 @@ function transformFile(yaml: any) {
});
if (yaml.regexOptions !== undefined) {
const regexOptions = '(?' + yaml.regexOptions + ')';
const regexOptions = "(?" + yaml.regexOptions + ")";
visitAllRulesInFile(yaml, (rule) => {
visitAllMatchesInRule(rule, (match) => {
return regexOptions + match;
@@ -219,28 +219,36 @@ function transformFile(yaml: any) {
}
export function transpileTextMateGrammar() {
return through.obj((file: Vinyl, _encoding: string, callback: (err: string | null, file: Vinyl | PluginError) => void): void => {
if (file.isNull()) {
callback(null, file);
}
else if (file.isBuffer()) {
const buf: Buffer = file.contents;
const yamlText: string = buf.toString('utf8');
const jsonData: any = jsYaml.load(yamlText);
transformFile(jsonData);
return through.obj(
(
file: Vinyl,
_encoding: string,
callback: (err: string | null, file: Vinyl | PluginError) => void,
): void => {
if (file.isNull()) {
callback(null, file);
} else if (file.isBuffer()) {
const buf: Buffer = file.contents;
const yamlText: string = buf.toString("utf8");
const jsonData: any = jsYaml.load(yamlText);
transformFile(jsonData);
file.contents = Buffer.from(JSON.stringify(jsonData, null, 2), 'utf8');
file.extname = '.json';
callback(null, file);
}
else {
callback('error', new PluginError('transpileTextMateGrammar', 'Format not supported.'));
}
});
file.contents = Buffer.from(JSON.stringify(jsonData, null, 2), "utf8");
file.extname = ".json";
callback(null, file);
} else {
callback(
"error",
new PluginError("transpileTextMateGrammar", "Format not supported."),
);
}
},
);
}
export function compileTextMateGrammar() {
return gulp.src('syntaxes/*.tmLanguage.yml')
return gulp
.src("syntaxes/*.tmLanguage.yml")
.pipe(transpileTextMateGrammar())
.pipe(gulp.dest('out/syntaxes'));
.pipe(gulp.dest("out/syntaxes"));
}

View File

@@ -1,41 +1,62 @@
import * as colors from 'ansi-colors';
import * as gulp from 'gulp';
import * as sourcemaps from 'gulp-sourcemaps';
import * as ts from 'gulp-typescript';
import * as del from 'del';
import * as colors from "ansi-colors";
import * as gulp from "gulp";
import * as sourcemaps from "gulp-sourcemaps";
import * as ts from "gulp-typescript";
import * as del from "del";
function goodReporter(): ts.reporter.Reporter {
return {
error: (error, typescript) => {
if (error.tsFile) {
console.log('[' + colors.gray('gulp-typescript') + '] ' + colors.red(error.fullFilename
+ '(' + (error.startPosition!.line + 1) + ',' + error.startPosition!.character + '): ')
+ 'error TS' + error.diagnostic.code + ': ' + typescript.flattenDiagnosticMessageText(error.diagnostic.messageText, '\n'));
}
else {
console.log(
"[" +
colors.gray("gulp-typescript") +
"] " +
colors.red(
error.fullFilename +
"(" +
(error.startPosition!.line + 1) +
"," +
error.startPosition!.character +
"): ",
) +
"error TS" +
error.diagnostic.code +
": " +
typescript.flattenDiagnosticMessageText(
error.diagnostic.messageText,
"\n",
),
);
} else {
console.log(error.message);
}
},
};
}
const tsProject = ts.createProject('tsconfig.json');
const tsProject = ts.createProject("tsconfig.json");
export function cleanOutput() {
return tsProject.projectDirectory ? del(tsProject.projectDirectory + '/out/*') : Promise.resolve();
return tsProject.projectDirectory
? del(tsProject.projectDirectory + "/out/*")
: Promise.resolve();
}
export function compileTypeScript() {
return tsProject.src()
return tsProject
.src()
.pipe(sourcemaps.init())
.pipe(tsProject(goodReporter()))
.pipe(sourcemaps.write('.', {
includeContent: false,
sourceRoot: '.',
}))
.pipe(gulp.dest('out'));
.pipe(
sourcemaps.write(".", {
includeContent: false,
sourceRoot: ".",
}),
)
.pipe(gulp.dest("out"));
}
export function watchTypeScript() {
gulp.watch('src/**/*.ts', compileTypeScript);
gulp.watch("src/**/*.ts", compileTypeScript);
}

View File

@@ -1,80 +1,80 @@
import * as path from 'path';
import * as webpack from 'webpack';
import * as MiniCssExtractPlugin from 'mini-css-extract-plugin';
import * as path from "path";
import * as webpack from "webpack";
import * as MiniCssExtractPlugin from "mini-css-extract-plugin";
export const config: webpack.Configuration = {
mode: 'development',
mode: "development",
entry: {
webview: './src/view/webview.tsx'
webview: "./src/view/webview.tsx",
},
output: {
path: path.resolve(__dirname, '..', 'out'),
filename: '[name].js'
path: path.resolve(__dirname, "..", "out"),
filename: "[name].js",
},
devtool: 'inline-source-map',
devtool: "inline-source-map",
resolve: {
extensions: ['.js', '.ts', '.tsx', '.json'],
extensions: [".js", ".ts", ".tsx", ".json"],
fallback: {
path: require.resolve('path-browserify')
}
path: require.resolve("path-browserify"),
},
},
module: {
rules: [
{
test: /\.(ts|tsx)$/,
loader: 'ts-loader',
loader: "ts-loader",
options: {
configFile: 'src/view/tsconfig.json',
}
configFile: "src/view/tsconfig.json",
},
},
{
test: /\.less$/,
use: [
MiniCssExtractPlugin.loader,
{
loader: 'css-loader',
loader: "css-loader",
options: {
importLoaders: 1,
sourceMap: true
}
sourceMap: true,
},
},
{
loader: 'less-loader',
loader: "less-loader",
options: {
javascriptEnabled: true,
sourceMap: true
}
}
]
sourceMap: true,
},
},
],
},
{
test: /\.css$/,
use: [
MiniCssExtractPlugin.loader,
{
loader: 'css-loader'
}
]
loader: "css-loader",
},
],
},
{
test: /\.(woff(2)?|ttf|eot)$/,
use: [
{
loader: 'file-loader',
loader: "file-loader",
options: {
name: '[name].[ext]',
outputPath: 'fonts/',
name: "[name].[ext]",
outputPath: "fonts/",
// We need this to make Webpack use the correct path for the fonts.
// Without this, the CSS file will use `url([object Module])`
esModule: false
}
esModule: false,
},
},
],
}
]
},
],
},
performance: {
hints: false
hints: false,
},
plugins: [new MiniCssExtractPlugin()],
};

View File

@@ -1,5 +1,5 @@
import * as webpack from 'webpack';
import { config } from './webpack.config';
import * as webpack from "webpack";
import { config } from "./webpack.config";
export function compileView(cb: (err?: Error) => void) {
doWebpack(config, true, cb);
@@ -12,35 +12,41 @@ export function watchView(cb: (err?: Error) => void) {
watchOptions: {
aggregateTimeout: 200,
poll: 1000,
}
},
};
doWebpack(watchConfig, false, cb);
}
function doWebpack(internalConfig: webpack.Configuration, failOnError: boolean, cb: (err?: Error) => void) {
function doWebpack(
internalConfig: webpack.Configuration,
failOnError: boolean,
cb: (err?: Error) => void,
) {
const resultCb = (error: Error | undefined, stats?: webpack.Stats) => {
if (error) {
cb(error);
}
if (stats) {
console.log(stats.toString({
errorDetails: true,
colors: true,
assets: false,
builtAt: false,
version: false,
hash: false,
entrypoints: false,
timings: false,
modules: false,
errors: true
}));
console.log(
stats.toString({
errorDetails: true,
colors: true,
assets: false,
builtAt: false,
version: false,
hash: false,
entrypoints: false,
timings: false,
modules: false,
errors: true,
}),
);
if (stats.hasErrors()) {
if (failOnError) {
cb(new Error('Compilation errors detected.'));
cb(new Error("Compilation errors detected."));
return;
} else {
console.error('Compilation errors detected.');
console.error("Compilation errors detected.");
}
}
cb();

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@
"description": "CodeQL for Visual Studio Code",
"author": "GitHub",
"private": true,
"version": "1.7.2",
"version": "1.7.6",
"publisher": "GitHub",
"license": "MIT",
"icon": "media/VS-marketplace-CodeQL-icon.png",
@@ -37,6 +37,7 @@
"onLanguage:ql",
"onLanguage:ql-summary",
"onView:codeQLDatabases",
"onView:codeQLDatabasesExperimental",
"onView:codeQLQueryHistory",
"onView:codeQLAstViewer",
"onView:codeQLEvalLogViewer",
@@ -83,6 +84,12 @@
"editor.wordBasedSuggestions": false
}
},
"jsonValidation": [
{
"fileMatch": "workspace-databases.json",
"url": "./workspace-databases-schema.json"
}
],
"languages": [
{
"id": "ql",
@@ -290,6 +297,13 @@
"pattern": "^$|^(?:[a-zA-Z0-9]+-)*[a-zA-Z0-9]+/[a-zA-Z0-9-_]+$",
"patternErrorMessage": "Please enter a valid GitHub repository",
"markdownDescription": "[For internal use only] The name of the GitHub repository in which the GitHub Actions workflow is run when using the \"Run Variant Analysis\" command. The repository should be of the form `<owner>/<repo>`)."
},
"codeQL.logInsights.joinOrderWarningThreshold": {
"type": "number",
"default": 50,
"scope": "window",
"minimum": 0,
"description": "Report a warning for any join order whose metric exceeds this value."
}
}
},
@@ -311,13 +325,9 @@
"title": "CodeQL: Run Variant Analysis"
},
{
"command": "codeQL.exportVariantAnalysisResults",
"command": "codeQL.exportSelectedVariantAnalysisResults",
"title": "CodeQL: Export Variant Analysis Results"
},
{
"command": "codeQL.openVariantAnalysis",
"title": "CodeQL: Open Variant Analysis"
},
{
"command": "codeQL.runQueries",
"title": "CodeQL: Run Queries in Selected Files"
@@ -346,6 +356,14 @@
"command": "codeQL.copyVersion",
"title": "CodeQL: Copy Version Information"
},
{
"command": "codeQLDatabasesExperimental.openConfigFile",
"title": "Open Database Configuration File",
"icon": {
"light": "media/light/edit.svg",
"dark": "media/dark/edit.svg"
}
},
{
"command": "codeQLDatabases.chooseDatabaseFolder",
"title": "Choose Database from Folder",
@@ -595,12 +613,20 @@
"title": "Copy Repository List"
},
{
"command": "codeQLQueryResults.nextPathStep",
"title": "CodeQL: Show Next Step on Path"
"command": "codeQLQueryResults.down",
"title": "CodeQL: Navigate Down in Local Result Viewer"
},
{
"command": "codeQLQueryResults.previousPathStep",
"title": "CodeQL: Show Previous Step on Path"
"command": "codeQLQueryResults.up",
"title": "CodeQL: Navigate Up in Local Result Viewer"
},
{
"command": "codeQLQueryResults.right",
"title": "CodeQL: Navigate Right in Local Result Viewer"
},
{
"command": "codeQLQueryResults.left",
"title": "CodeQL: Navigate Left in Local Result Viewer"
},
{
"command": "codeQL.restartQueryServer",
@@ -638,6 +664,26 @@
"command": "codeQL.gotoQL",
"title": "CodeQL: Go to QL Code",
"enablement": "codeql.hasQLSource"
},
{
"command": "codeQL.mockGitHubApiServer.startRecording",
"title": "CodeQL: Mock GitHub API Server: Start Scenario Recording"
},
{
"command": "codeQL.mockGitHubApiServer.saveScenario",
"title": "CodeQL: Mock GitHub API Server: Save Scenario"
},
{
"command": "codeQL.mockGitHubApiServer.cancelRecording",
"title": "CodeQL: Mock GitHub API Server: Cancel Scenario Recording"
},
{
"command": "codeQL.mockGitHubApiServer.loadScenario",
"title": "CodeQL: Mock GitHub API Server: Load Scenario"
},
{
"command": "codeQL.mockGitHubApiServer.unloadScenario",
"title": "CodeQL: Mock GitHub API Server: Unload Scenario"
}
],
"menus": {
@@ -716,6 +762,11 @@
"command": "codeQLEvalLogViewer.clear",
"when": "view == codeQLEvalLogViewer",
"group": "navigation"
},
{
"command": "codeQLDatabasesExperimental.openConfigFile",
"when": "view == codeQLDatabasesExperimental",
"group": "navigation"
}
],
"view/item/context": [
@@ -895,11 +946,7 @@
"when": "config.codeQL.canary && editorLangId == ql && resourceExtname == .ql"
},
{
"command": "codeQL.openVariantAnalysis",
"when": "config.codeQL.canary && config.codeQL.variantAnalysis.liveResults"
},
{
"command": "codeQL.exportVariantAnalysisResults",
"command": "codeQL.exportSelectedVariantAnalysisResults",
"when": "config.codeQL.canary"
},
{
@@ -934,6 +981,10 @@
"command": "codeQL.chooseDatabaseLgtm",
"when": "config.codeQL.canary"
},
{
"command": "codeQLDatabasesExperimental.openConfigFile",
"when": "false"
},
{
"command": "codeQLDatabases.setCurrentDatabase",
"when": "false"
@@ -1097,6 +1148,26 @@
{
"command": "codeQLTests.showOutputDifferences",
"when": "false"
},
{
"command": "codeQL.mockGitHubApiServer.startRecording",
"when": "config.codeQL.mockGitHubApiServer.enabled && !codeQL.mockGitHubApiServer.recording"
},
{
"command": "codeQL.mockGitHubApiServer.saveScenario",
"when": "config.codeQL.mockGitHubApiServer.enabled && codeQL.mockGitHubApiServer.recording"
},
{
"command": "codeQL.mockGitHubApiServer.cancelRecording",
"when": "config.codeQL.mockGitHubApiServer.enabled && codeQL.mockGitHubApiServer.recording"
},
{
"command": "codeQL.mockGitHubApiServer.loadScenario",
"when": "config.codeQL.mockGitHubApiServer.enabled && !codeQL.mockGitHubApiServer.recording"
},
{
"command": "codeQL.mockGitHubApiServer.unloadScenario",
"when": "config.codeQL.mockGitHubApiServer.enabled && codeQL.mockGitHubApiServer.scenarioLoaded"
}
],
"editor/context": [
@@ -1153,6 +1224,11 @@
"id": "codeQLDatabases",
"name": "Databases"
},
{
"id": "codeQLDatabasesExperimental",
"name": "Databases",
"when": "config.codeQL.canary && config.codeQL.newQueryRunExperience"
},
{
"id": "codeQLQueryHistory",
"name": "Query History"
@@ -1192,18 +1268,21 @@
"watch": "npm-run-all -p watch:*",
"watch:extension": "tsc --watch",
"watch:webpack": "gulp watchView",
"watch:files": "gulp watchTestData",
"test": "npm-run-all -p test:*",
"test:unit": "mocha --exit -r ts-node/register -r test/mocha.setup.js test/pure-tests/**/*.ts",
"test:unit": "mocha --config .mocharc.json 'test/pure-tests/**/*.ts'",
"test:view": "jest",
"preintegration": "rm -rf ./out/vscode-tests && gulp",
"integration": "node ./out/vscode-tests/run-integration-tests.js no-workspace,minimal-workspace",
"cli-integration": "npm run preintegration && node ./out/vscode-tests/run-integration-tests.js cli-integration",
"integration:no-workspace": "node ./out/vscode-tests/run-integration-tests.js no-workspace",
"integration:minimal-workspace": "node ./out/vscode-tests/run-integration-tests.js minimal-workspace",
"cli-integration": "node ./out/vscode-tests/run-integration-tests.js cli-integration",
"update-vscode": "node ./node_modules/vscode/bin/install",
"format": "tsfmt -r && eslint src test --ext .ts,.tsx --fix",
"lint": "eslint src test --ext .ts,.tsx --max-warnings=0",
"format": "prettier --write **/*.{ts,tsx} && eslint . --ext .ts,.tsx --fix",
"lint": "eslint . --ext .ts,.tsx --max-warnings=0",
"format-staged": "lint-staged",
"storybook": "start-storybook -p 6006",
"build-storybook": "build-storybook"
"build-storybook": "build-storybook",
"lint:scenarios": "ts-node scripts/lint-scenarios.ts"
},
"dependencies": {
"@octokit/plugin-retry": "^3.0.9",
@@ -1212,7 +1291,9 @@
"@primer/react": "^35.0.0",
"@vscode/codicons": "^0.0.31",
"@vscode/webview-ui-toolkit": "^1.0.1",
"ajv": "^8.11.0",
"child-process-promise": "^2.2.1",
"chokidar": "^3.5.3",
"classnames": "~2.2.6",
"d3": "^7.6.1",
"d3-graphviz": "^2.6.1",
@@ -1221,8 +1302,10 @@
"immutable": "^4.0.0",
"js-yaml": "^4.1.0",
"minimist": "~1.2.6",
"msw": "^0.47.4",
"nanoid": "^3.2.0",
"node-fetch": "~2.6.7",
"p-queue": "^6.0.0",
"path-browserify": "^1.0.1",
"react": "^17.0.2",
"react-dom": "^17.0.2",
@@ -1248,6 +1331,7 @@
"@babel/core": "^7.18.13",
"@babel/plugin-transform-modules-commonjs": "^7.18.6",
"@faker-js/faker": "^7.5.0",
"@octokit/plugin-throttling": "^4.3.2",
"@storybook/addon-actions": "^6.5.10",
"@storybook/addon-essentials": "^6.5.10",
"@storybook/addon-interactions": "^6.5.10",
@@ -1288,6 +1372,7 @@
"@types/sinon-chai": "~3.2.3",
"@types/stream-chain": "~2.0.1",
"@types/stream-json": "~1.7.1",
"@types/tar-stream": "^2.2.2",
"@types/through2": "^2.0.36",
"@types/tmp": "^0.1.0",
"@types/unzipper": "~0.10.1",
@@ -1295,9 +1380,9 @@
"@types/webpack": "^5.28.0",
"@types/webpack-env": "^1.18.0",
"@types/xml2js": "~0.4.4",
"@typescript-eslint/eslint-plugin": "^4.26.0",
"@typescript-eslint/parser": "^4.26.0",
"@vscode/test-electron": "^2.1.5",
"@typescript-eslint/eslint-plugin": "^5.38.0",
"@typescript-eslint/parser": "^5.38.0",
"@vscode/test-electron": "^2.2.0",
"ansi-colors": "^4.1.1",
"applicationinsights": "^2.3.5",
"babel-loader": "^8.2.5",
@@ -1305,9 +1390,11 @@
"chai-as-promised": "~7.1.1",
"css-loader": "~3.1.0",
"del": "^6.0.0",
"eslint": "~6.8.0",
"eslint": "^8.23.1",
"eslint-config-prettier": "^8.5.0",
"eslint-plugin-jest-dom": "^4.0.2",
"eslint-plugin-react": "~7.19.0",
"eslint-plugin-prettier": "^4.2.1",
"eslint-plugin-react": "^7.31.8",
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-storybook": "^0.6.4",
"file-loader": "^6.2.0",
@@ -1324,17 +1411,18 @@
"mocha": "^10.0.0",
"mocha-sinon": "~2.1.2",
"npm-run-all": "^4.1.5",
"prettier": "~2.0.5",
"prettier": "^2.7.1",
"proxyquire": "~2.1.3",
"sinon": "~14.0.0",
"sinon-chai": "~3.5.0",
"tar-stream": "^2.2.0",
"through2": "^4.0.2",
"ts-jest": "^29.0.1",
"ts-json-schema-generator": "^1.1.2",
"ts-loader": "^8.1.0",
"ts-node": "^10.7.0",
"ts-protoc-gen": "^0.9.0",
"typescript": "^4.5.5",
"typescript-formatter": "^7.2.2",
"vsce": "^2.7.0",
"webpack": "^5.62.2",
"webpack-cli": "^4.6.0"
@@ -1350,7 +1438,7 @@
"prettier --write"
],
"./**/*.{ts,tsx}": [
"tsfmt -r",
"prettier --write",
"eslint --fix"
]
},

View File

@@ -0,0 +1,147 @@
/**
* This scripts helps after adding a new field in the GitHub API. You will
* need to modify this script to add the new field to the scenarios. This
* is just a template and should not be used as-is since it has already been
* applied.
*
* Depending on the actual implementation of the script, you might run into
* rate limits. If that happens, you can set a `GITHUB_TOKEN` environment
* variable. For example, use: ``export GITHUB_TOKEN=`gh auth token```.
*
* Usage: npx ts-node scripts/add-fields-to-scenarios.ts
*/
import * as fs from "fs-extra";
import * as path from "path";
import { Octokit, type RestEndpointMethodTypes } from "@octokit/rest";
import { throttling } from "@octokit/plugin-throttling";
import { getFiles } from "./util/files";
import type { GitHubApiRequest } from "../src/mocks/gh-api-request";
import { isGetVariantAnalysisRequest } from "../src/mocks/gh-api-request";
import { VariantAnalysis } from "../src/remote-queries/gh-api/variant-analysis";
import { RepositoryWithMetadata } from "../src/remote-queries/gh-api/repository";
const extensionDirectory = path.resolve(__dirname, "..");
const scenariosDirectory = path.resolve(
extensionDirectory,
"src/mocks/scenarios",
);
// Make sure we don't run into rate limits by automatically waiting until we can
// make another request.
const MyOctokit = Octokit.plugin(throttling);
const auth = process.env.GITHUB_TOKEN;
const octokit = new MyOctokit({
auth,
throttle: {
onRateLimit: (
retryAfter: number,
options: any,
octokit: Octokit,
): boolean => {
octokit.log.warn(
`Request quota exhausted for request ${options.method} ${options.url}. Retrying after ${retryAfter} seconds!`,
);
return true;
},
onSecondaryRateLimit: (
_retryAfter: number,
options: any,
octokit: Octokit,
): void => {
octokit.log.warn(
`SecondaryRateLimit detected for request ${options.method} ${options.url}`,
);
},
},
});
const repositories = new Map<
number,
RestEndpointMethodTypes["repos"]["get"]["response"]["data"]
>();
async function addFieldsToRepository(repository: RepositoryWithMetadata) {
if (!repositories.has(repository.id)) {
const [owner, repo] = repository.full_name.split("/");
const apiRepository = await octokit.repos.get({
owner,
repo,
});
repositories.set(repository.id, apiRepository.data);
}
const apiRepository = repositories.get(repository.id)!;
repository.stargazers_count = apiRepository.stargazers_count;
repository.updated_at = apiRepository.updated_at;
}
async function addFieldsToScenarios() {
if (!(await fs.pathExists(scenariosDirectory))) {
console.error("Scenarios directory does not exist: " + scenariosDirectory);
return;
}
for await (const file of getFiles(scenariosDirectory)) {
if (!file.endsWith(".json")) {
continue;
}
const data: GitHubApiRequest = await fs.readJson(file);
if (!isGetVariantAnalysisRequest(data)) {
continue;
}
if (!data.response.body || !("controller_repo" in data.response.body)) {
continue;
}
console.log(
`Adding fields to '${path.relative(scenariosDirectory, file)}'`,
);
const variantAnalysis = data.response.body as VariantAnalysis;
if (variantAnalysis.scanned_repositories) {
for (const item of variantAnalysis.scanned_repositories) {
await addFieldsToRepository(item.repository);
}
}
if (variantAnalysis.skipped_repositories?.access_mismatch_repos) {
for (const item of variantAnalysis.skipped_repositories
.access_mismatch_repos.repositories) {
await addFieldsToRepository(item);
}
}
if (variantAnalysis.skipped_repositories?.no_codeql_db_repos) {
for (const item of variantAnalysis.skipped_repositories.no_codeql_db_repos
.repositories) {
await addFieldsToRepository(item);
}
}
if (variantAnalysis.skipped_repositories?.over_limit_repos) {
for (const item of variantAnalysis.skipped_repositories.over_limit_repos
.repositories) {
await addFieldsToRepository(item);
}
}
await fs.writeJson(file, data, { spaces: 2 });
}
}
addFieldsToScenarios().catch((e) => {
console.error(e);
process.exit(2);
});

View File

@@ -0,0 +1,81 @@
/**
* This scripts helps after recording a scenario to be used for replaying
* with the mock GitHub API server.
*
* Once the scenario has been recorded, it's often useful to remove some of
* the requests to speed up the replay, particularly ones that fetch the
* variant analysis status. Once some of the requests have manually been
* removed, this script can be used to update the numbering of the files.
*
* Usage: npx ts-node scripts/fix-scenario-file-numbering.ts <scenario-name>
*/
import * as fs from "fs-extra";
import * as path from "path";
if (process.argv.length !== 3) {
console.error("Expected 1 argument - the scenario name");
}
const scenarioName = process.argv[2];
const extensionDirectory = path.resolve(__dirname, "..");
const scenariosDirectory = path.resolve(
extensionDirectory,
"src/mocks/scenarios",
);
const scenarioDirectory = path.resolve(scenariosDirectory, scenarioName);
async function fixScenarioFiles() {
console.log(scenarioDirectory);
if (!(await fs.pathExists(scenarioDirectory))) {
console.error("Scenario directory does not exist: " + scenarioDirectory);
return;
}
const files = await fs.readdir(scenarioDirectory);
const orderedFiles = files.sort((a, b) => {
const aNum = parseInt(a.split("-")[0]);
const bNum = parseInt(b.split("-")[0]);
return aNum - bNum;
});
let index = 0;
for (const file of orderedFiles) {
const ext = path.extname(file);
if (ext === ".json") {
const fileName = path.basename(file, ext);
const fileCurrentIndex = parseInt(fileName.split("-")[0]);
const fileNameWithoutIndex = fileName.split("-")[1];
if (fileCurrentIndex !== index) {
const newFileName = `${index}-${fileNameWithoutIndex}${ext}`;
const oldFilePath = path.join(scenarioDirectory, file);
const newFilePath = path.join(scenarioDirectory, newFileName);
console.log(`Rename: ${oldFilePath} -> ${newFilePath}`);
await fs.rename(oldFilePath, newFilePath);
if (fileNameWithoutIndex === "getVariantAnalysisRepoResult") {
const oldZipFileName = `${fileCurrentIndex}-getVariantAnalysisRepoResult.body.zip`;
const newZipFileName = `${index}-getVariantAnalysisRepoResult.body.zip`;
const oldZipFilePath = path.join(scenarioDirectory, oldZipFileName);
const newZipFilePath = path.join(scenarioDirectory, newZipFileName);
console.log(`Rename: ${oldZipFilePath} -> ${newZipFilePath}`);
await fs.rename(oldZipFilePath, newZipFilePath);
const json = await fs.readJson(newFilePath);
json.response.body = `file:${newZipFileName}`;
console.log(`Response.body change to ${json.response.body}`);
await fs.writeJSON(newFilePath, json);
}
}
index++;
}
}
}
fixScenarioFiles().catch((e) => {
console.error(e);
process.exit(2);
});

View File

@@ -0,0 +1,77 @@
import * as fs from "fs-extra";
import * as path from "path";
import Ajv from "ajv";
import * as tsj from "ts-json-schema-generator";
import { getFiles } from "./util/files";
const extensionDirectory = path.resolve(__dirname, "..");
const rootDirectory = path.resolve(extensionDirectory, "../..");
const scenariosDirectory = path.resolve(
extensionDirectory,
"src/mocks/scenarios",
);
const debug = process.env.RUNNER_DEBUG || process.argv.includes("--debug");
async function lintScenarios() {
const schema = tsj
.createGenerator({
path: path.resolve(extensionDirectory, "src/mocks/gh-api-request.ts"),
tsconfig: path.resolve(extensionDirectory, "tsconfig.json"),
type: "GitHubApiRequest",
skipTypeCheck: true,
topRef: true,
additionalProperties: true,
})
.createSchema("GitHubApiRequest");
const ajv = new Ajv();
if (!ajv.validateSchema(schema)) {
throw new Error("Invalid schema: " + ajv.errorsText());
}
const validate = await ajv.compile(schema);
let invalidFiles = 0;
if (!(await fs.pathExists(scenariosDirectory))) {
console.error("Scenarios directory does not exist: " + scenariosDirectory);
// Do not exit with a non-zero status code, as this is not a fatal error.
return;
}
for await (const file of getFiles(scenariosDirectory)) {
if (!file.endsWith(".json")) {
continue;
}
const contents = await fs.readFile(file, "utf8");
const data = JSON.parse(contents);
if (!validate(data)) {
validate.errors?.forEach((error) => {
// https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-error-message
console.log(
`::error file=${path.relative(rootDirectory, file)}::${
error.instancePath
}: ${error.message}`,
);
});
invalidFiles++;
} else if (debug) {
console.log(`File '${path.relative(rootDirectory, file)}' is valid`);
}
}
if (invalidFiles > 0) {
process.exit(1);
}
}
lintScenarios().catch((e) => {
console.error(e);
process.exit(2);
});

View File

@@ -0,0 +1,10 @@
{
"$schema": "https://json.schemastore.org/tsconfig",
"extends": "../tsconfig.json",
"include": ["**/*.ts"],
"exclude": [],
"compilerOptions": {
"rootDir": "..",
"noEmit": true
}
}

View File

@@ -0,0 +1,15 @@
import * as fs from "fs-extra";
import * as path from "path";
// https://stackoverflow.com/a/45130990
export async function* getFiles(dir: string): AsyncGenerator<string> {
const dirents = await fs.readdir(dir, { withFileTypes: true });
for (const dirent of dirents) {
const res = path.resolve(dir, dirent.name);
if (dirent.isDirectory()) {
yield* getFiles(res);
} else {
yield res;
}
}
}

View File

@@ -5,13 +5,17 @@ import {
ViewColumn,
Uri,
WebviewPanelOptions,
WebviewOptions
} from 'vscode';
import * as path from 'path';
WebviewOptions,
} from "vscode";
import * as path from "path";
import { DisposableObject } from './pure/disposable-object';
import { tmpDir } from './helpers';
import { getHtmlForWebview, WebviewMessage, WebviewView } from './interface-utils';
import { DisposableObject, DisposeHandler } from "./pure/disposable-object";
import { tmpDir } from "./helpers";
import {
getHtmlForWebview,
WebviewMessage,
WebviewView,
} from "./interface-utils";
export type WebviewPanelConfig = {
viewId: string;
@@ -20,35 +24,54 @@ export type WebviewPanelConfig = {
view: WebviewView;
preserveFocus?: boolean;
additionalOptions?: WebviewPanelOptions & WebviewOptions;
}
};
export abstract class AbstractWebview<ToMessage extends WebviewMessage, FromMessage extends WebviewMessage> extends DisposableObject {
export abstract class AbstractWebview<
ToMessage extends WebviewMessage,
FromMessage extends WebviewMessage,
> extends DisposableObject {
protected panel: WebviewPanel | undefined;
protected panelLoaded = false;
protected panelLoadedCallBacks: (() => void)[] = [];
constructor(
protected readonly ctx: ExtensionContext
) {
private panelResolves?: Array<(panel: WebviewPanel) => void>;
constructor(protected readonly ctx: ExtensionContext) {
super();
}
public async restoreView(panel: WebviewPanel): Promise<void> {
this.panel = panel;
this.setupPanel(panel);
const config = await this.getPanelConfig();
this.setupPanel(panel, config);
}
protected get isShowingPanel() {
return !!this.panel;
}
protected getPanel(): WebviewPanel {
protected async getPanel(): Promise<WebviewPanel> {
if (this.panel == undefined) {
const { ctx } = this;
const config = this.getPanelConfig();
// This is an async method, so in theory this method can be called concurrently. To ensure that we don't
// create two panels, we use a promise that resolves when the panel is created. This way, if the panel is
// being created, the promise will resolve when it is done.
if (this.panelResolves !== undefined) {
return new Promise((resolve) => {
if (this.panel !== undefined) {
resolve(this.panel);
return;
}
this.panel = Window.createWebviewPanel(
this.panelResolves?.push(resolve);
});
}
this.panelResolves = [];
const config = await this.getPanelConfig();
const panel = Window.createWebviewPanel(
config.viewId,
config.title,
{ viewColumn: config.viewColumn, preserveFocus: config.preserveFocus },
@@ -60,18 +83,21 @@ export abstract class AbstractWebview<ToMessage extends WebviewMessage, FromMess
localResourceRoots: [
...(config.additionalOptions?.localResourceRoots ?? []),
Uri.file(tmpDir.name),
Uri.file(path.join(ctx.extensionPath, 'out'))
Uri.file(path.join(ctx.extensionPath, "out")),
],
}
},
);
this.setupPanel(this.panel);
this.panel = panel;
this.setupPanel(panel, config);
this.panelResolves.forEach((resolve) => resolve(panel));
this.panelResolves = undefined;
}
return this.panel;
}
protected setupPanel(panel: WebviewPanel): void {
const config = this.getPanelConfig();
protected setupPanel(panel: WebviewPanel, config: WebviewPanelConfig): void {
this.push(
panel.onDidDispose(
() => {
@@ -80,8 +106,8 @@ export abstract class AbstractWebview<ToMessage extends WebviewMessage, FromMess
this.onPanelDispose();
},
null,
this.ctx.subscriptions
)
this.ctx.subscriptions,
),
);
panel.webview.html = getHtmlForWebview(
@@ -90,18 +116,20 @@ export abstract class AbstractWebview<ToMessage extends WebviewMessage, FromMess
config.view,
{
allowInlineStyles: true,
}
},
);
this.push(
panel.webview.onDidReceiveMessage(
async (e) => this.onMessage(e),
undefined,
this.ctx.subscriptions
)
this.ctx.subscriptions,
),
);
}
protected abstract getPanelConfig(): WebviewPanelConfig;
protected abstract getPanelConfig():
| WebviewPanelConfig
| Promise<WebviewPanelConfig>;
protected abstract onPanelDispose(): void;
@@ -123,7 +151,13 @@ export abstract class AbstractWebview<ToMessage extends WebviewMessage, FromMess
this.panelLoadedCallBacks = [];
}
protected postMessage(msg: ToMessage): Thenable<boolean> {
return this.getPanel().webview.postMessage(msg);
protected async postMessage(msg: ToMessage): Promise<boolean> {
const panel = await this.getPanel();
return panel.webview.postMessage(msg);
}
public dispose(disposeHandler?: DisposeHandler) {
this.panel?.dispose();
super.dispose(disposeHandler);
}
}

View File

@@ -1,11 +1,11 @@
import * as fs from 'fs-extra';
import * as unzipper from 'unzipper';
import * as vscode from 'vscode';
import { logger } from './logging';
import * as fs from "fs-extra";
import * as unzipper from "unzipper";
import * as vscode from "vscode";
import { logger } from "./logging";
// All path operations in this file must be on paths *within* the zip
// archive.
import * as _path from 'path';
import * as _path from "path";
const path = _path.posix;
export class File implements vscode.FileStat {
@@ -72,19 +72,20 @@ export function encodeSourceArchiveUri(ref: ZipFileReference): vscode.Uri {
// Since we will use an authority component, we add a leading slash if necessary
// (paths on Windows usually start with the drive letter).
let sourceArchiveZipPathStartIndex: number;
if (encodedPath.startsWith('/')) {
if (encodedPath.startsWith("/")) {
sourceArchiveZipPathStartIndex = 0;
} else {
encodedPath = '/' + encodedPath;
encodedPath = "/" + encodedPath;
sourceArchiveZipPathStartIndex = 1;
}
// The authority component of the URI records the 0-based inclusive start and exclusive end index
// of the source archive zip path within the path component of the resulting URI.
// This lets us separate the paths, ignoring the leading slash if we added one.
const sourceArchiveZipPathEndIndex = sourceArchiveZipPathStartIndex + sourceArchiveZipPath.length;
const sourceArchiveZipPathEndIndex =
sourceArchiveZipPathStartIndex + sourceArchiveZipPath.length;
const authority = `${sourceArchiveZipPathStartIndex}-${sourceArchiveZipPathEndIndex}`;
return vscode.Uri.parse(zipArchiveScheme + ':/', true).with({
return vscode.Uri.parse(zipArchiveScheme + ":/", true).with({
path: encodedPath,
authority,
});
@@ -99,7 +100,7 @@ export function encodeSourceArchiveUri(ref: ZipFileReference): vscode.Uri {
export function encodeArchiveBasePath(sourceArchiveZipPath: string) {
return encodeSourceArchiveUri({
sourceArchiveZipPath,
pathWithinSourceArchive: ''
pathWithinSourceArchive: "",
});
}
@@ -107,7 +108,9 @@ const sourceArchiveUriAuthorityPattern = /^(\d+)-(\d+)$/;
class InvalidSourceArchiveUriError extends Error {
constructor(uri: vscode.Uri) {
super(`Can't decode uri ${uri}: authority should be of the form startIndex-endIndex (where both indices are integers).`);
super(
`Can't decode uri ${uri}: authority should be of the form startIndex-endIndex (where both indices are integers).`,
);
}
}
@@ -115,22 +118,26 @@ class InvalidSourceArchiveUriError extends Error {
export function decodeSourceArchiveUri(uri: vscode.Uri): ZipFileReference {
if (!uri.authority) {
// Uri is malformed, but this is recoverable
void logger.log(`Warning: ${new InvalidSourceArchiveUriError(uri).message}`);
void logger.log(
`Warning: ${new InvalidSourceArchiveUriError(uri).message}`,
);
return {
pathWithinSourceArchive: '/',
sourceArchiveZipPath: uri.path
pathWithinSourceArchive: "/",
sourceArchiveZipPath: uri.path,
};
}
const match = sourceArchiveUriAuthorityPattern.exec(uri.authority);
if (match === null)
throw new InvalidSourceArchiveUriError(uri);
if (match === null) throw new InvalidSourceArchiveUriError(uri);
const zipPathStartIndex = parseInt(match[1]);
const zipPathEndIndex = parseInt(match[2]);
if (isNaN(zipPathStartIndex) || isNaN(zipPathEndIndex))
throw new InvalidSourceArchiveUriError(uri);
return {
pathWithinSourceArchive: uri.path.substring(zipPathEndIndex) || '/',
sourceArchiveZipPath: uri.path.substring(zipPathStartIndex, zipPathEndIndex),
pathWithinSourceArchive: uri.path.substring(zipPathEndIndex) || "/",
sourceArchiveZipPath: uri.path.substring(
zipPathStartIndex,
zipPathEndIndex,
),
};
}
@@ -139,7 +146,7 @@ export function decodeSourceArchiveUri(uri: vscode.Uri): ZipFileReference {
*/
function ensureFile(map: DirectoryHierarchyMap, file: string) {
const dirname = path.dirname(file);
if (dirname === '.') {
if (dirname === ".") {
const error = `Ill-formed path ${file} in zip archive (expected absolute path)`;
void logger.log(error);
throw new Error(error);
@@ -154,8 +161,9 @@ function ensureFile(map: DirectoryHierarchyMap, file: string) {
function ensureDir(map: DirectoryHierarchyMap, dir: string) {
const parent = path.dirname(dir);
if (!map.has(dir)) {
map.set(dir, new Map);
if (dir !== parent) { // not the root directory
map.set(dir, new Map());
if (dir !== parent) {
// not the root directory
ensureDir(map, parent);
map.get(parent)!.set(path.basename(dir), vscode.FileType.Directory);
}
@@ -168,16 +176,23 @@ type Archive = {
};
async function parse_zip(zipPath: string): Promise<Archive> {
if (!await fs.pathExists(zipPath))
if (!(await fs.pathExists(zipPath)))
throw vscode.FileSystemError.FileNotFound(zipPath);
const archive: Archive = { unzipped: await unzipper.Open.file(zipPath), dirMap: new Map };
archive.unzipped.files.forEach(f => { ensureFile(archive.dirMap, path.resolve('/', f.path)); });
const archive: Archive = {
unzipped: await unzipper.Open.file(zipPath),
dirMap: new Map(),
};
archive.unzipped.files.forEach((f) => {
ensureFile(archive.dirMap, path.resolve("/", f.path));
});
return archive;
}
export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
private readOnlyError = vscode.FileSystemError.NoPermissions('write operation attempted, but source archive filesystem is readonly');
private archives: Map<string, Promise<Archive>> = new Map;
private readOnlyError = vscode.FileSystemError.NoPermissions(
"write operation attempted, but source archive filesystem is readonly",
);
private archives: Map<string, Promise<Archive>> = new Map();
private async getArchive(zipPath: string): Promise<Archive> {
if (!this.archives.has(zipPath)) {
@@ -186,8 +201,7 @@ export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
return await this.archives.get(zipPath)!;
}
root = new Directory('');
root = new Directory("");
// metadata
@@ -199,7 +213,8 @@ export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
const ref = decodeSourceArchiveUri(uri);
const archive = await this.getArchive(ref.sourceArchiveZipPath);
const contents = archive.dirMap.get(ref.pathWithinSourceArchive);
const result = contents === undefined ? undefined : Array.from(contents.entries());
const result =
contents === undefined ? undefined : Array.from(contents.entries());
if (result === undefined) {
throw vscode.FileSystemError.FileNotFound(uri);
}
@@ -218,11 +233,19 @@ export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
// write operations, all disabled
writeFile(_uri: vscode.Uri, _content: Uint8Array, _options: { create: boolean; overwrite: boolean }): void {
writeFile(
_uri: vscode.Uri,
_content: Uint8Array,
_options: { create: boolean; overwrite: boolean },
): void {
throw this.readOnlyError;
}
rename(_oldUri: vscode.Uri, _newUri: vscode.Uri, _options: { overwrite: boolean }): void {
rename(
_oldUri: vscode.Uri,
_newUri: vscode.Uri,
_options: { overwrite: boolean },
): void {
throw this.readOnlyError;
}
@@ -244,18 +267,18 @@ export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
// use '/' as path separator throughout
const reqPath = ref.pathWithinSourceArchive;
const file = archive.unzipped.files.find(
f => {
const absolutePath = path.resolve('/', f.path);
return absolutePath === reqPath
|| absolutePath === path.join('/src_archive', reqPath);
}
);
const file = archive.unzipped.files.find((f) => {
const absolutePath = path.resolve("/", f.path);
return (
absolutePath === reqPath ||
absolutePath === path.join("/src_archive", reqPath)
);
});
if (file !== undefined) {
if (file.type === 'File') {
if (file.type === "File") {
return new File(reqPath, await file.buffer());
}
else { // file.type === 'Directory'
} else {
// file.type === 'Directory'
// I haven't observed this case in practice. Could it happen
// with a zip file that contains empty directories?
return new Directory(reqPath);
@@ -264,7 +287,11 @@ export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
if (archive.dirMap.has(reqPath)) {
return new Directory(reqPath);
}
throw vscode.FileSystemError.FileNotFound(`uri '${uri.toString()}', interpreted as '${reqPath}' in archive '${ref.sourceArchiveZipPath}'`);
throw vscode.FileSystemError.FileNotFound(
`uri '${uri.toString()}', interpreted as '${reqPath}' in archive '${
ref.sourceArchiveZipPath
}'`,
);
}
private async _lookupAsFile(uri: vscode.Uri): Promise<File> {
@@ -279,11 +306,14 @@ export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
private _emitter = new vscode.EventEmitter<vscode.FileChangeEvent[]>();
readonly onDidChangeFile: vscode.Event<vscode.FileChangeEvent[]> = this._emitter.event;
readonly onDidChangeFile: vscode.Event<vscode.FileChangeEvent[]> =
this._emitter.event;
watch(_resource: vscode.Uri): vscode.Disposable {
// ignore, fires for all changes...
return new vscode.Disposable(() => { /**/ });
return new vscode.Disposable(() => {
/**/
});
}
}
@@ -295,15 +325,17 @@ export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
* (cf. https://www.ietf.org/rfc/rfc2396.txt (Appendix A, page 26) for
* the fact that hyphens are allowed in uri schemes)
*/
export const zipArchiveScheme = 'codeql-zip-archive';
export const zipArchiveScheme = "codeql-zip-archive";
export function activate(ctx: vscode.ExtensionContext) {
ctx.subscriptions.push(vscode.workspace.registerFileSystemProvider(
zipArchiveScheme,
new ArchiveFileSystemProvider(),
{
isCaseSensitive: true,
isReadonly: true,
}
));
ctx.subscriptions.push(
vscode.workspace.registerFileSystemProvider(
zipArchiveScheme,
new ArchiveFileSystemProvider(),
{
isCaseSensitive: true,
isReadonly: true,
},
),
);
}

View File

@@ -11,17 +11,21 @@ import {
TextEditorSelectionChangeKind,
Location,
Range,
Uri
} from 'vscode';
import * as path from 'path';
Uri,
} from "vscode";
import * as path from "path";
import { DatabaseItem } from './databases';
import { UrlValue, BqrsId } from './pure/bqrs-cli-types';
import { showLocation } from './interface-utils';
import { isStringLoc, isWholeFileLoc, isLineColumnLoc } from './pure/bqrs-utils';
import { commandRunner } from './commandRunner';
import { DisposableObject } from './pure/disposable-object';
import { showAndLogErrorMessage } from './helpers';
import { DatabaseItem } from "./databases";
import { UrlValue, BqrsId } from "./pure/bqrs-cli-types";
import { showLocation } from "./interface-utils";
import {
isStringLoc,
isWholeFileLoc,
isLineColumnLoc,
} from "./pure/bqrs-utils";
import { commandRunner } from "./commandRunner";
import { DisposableObject } from "./pure/disposable-object";
import { showAndLogErrorMessage } from "./helpers";
export interface AstItem {
id: BqrsId;
@@ -36,23 +40,25 @@ export interface ChildAstItem extends AstItem {
parent: ChildAstItem | AstItem;
}
class AstViewerDataProvider extends DisposableObject implements TreeDataProvider<AstItem> {
class AstViewerDataProvider
extends DisposableObject
implements TreeDataProvider<AstItem>
{
public roots: AstItem[] = [];
public db: DatabaseItem | undefined;
private _onDidChangeTreeData =
this.push(new EventEmitter<AstItem | undefined>());
private _onDidChangeTreeData = this.push(
new EventEmitter<AstItem | undefined>(),
);
readonly onDidChangeTreeData: Event<AstItem | undefined> =
this._onDidChangeTreeData.event;
constructor() {
super();
this.push(
commandRunner('codeQLAstViewer.gotoCode',
async (item: AstItem) => {
await showLocation(item.fileLocation);
})
commandRunner("codeQLAstViewer.gotoCode", async (item: AstItem) => {
await showLocation(item.fileLocation);
}),
);
}
@@ -61,7 +67,7 @@ class AstViewerDataProvider extends DisposableObject implements TreeDataProvider
}
getChildren(item?: AstItem): ProviderResult<AstItem[]> {
const children = item ? item.children : this.roots;
return children.sort((c1, c2) => (c1.order - c2.order));
return children.sort((c1, c2) => c1.order - c2.order);
}
getParent(item: ChildAstItem): ProviderResult<AstItem> {
@@ -74,22 +80,22 @@ class AstViewerDataProvider extends DisposableObject implements TreeDataProvider
const state = item.children.length
? TreeItemCollapsibleState.Collapsed
: TreeItemCollapsibleState.None;
const treeItem = new TreeItem(item.label || '', state);
treeItem.description = line ? `Line ${line}` : '';
const treeItem = new TreeItem(item.label || "", state);
treeItem.description = line ? `Line ${line}` : "";
treeItem.id = String(item.id);
treeItem.tooltip = `${treeItem.description} ${treeItem.label}`;
treeItem.command = {
command: 'codeQLAstViewer.gotoCode',
title: 'Go To Code',
command: "codeQLAstViewer.gotoCode",
title: "Go To Code",
tooltip: `Go To ${item.location}`,
arguments: [item]
arguments: [item],
};
return treeItem;
}
private extractLineInfo(loc?: UrlValue) {
if (!loc) {
return '';
return "";
} else if (isStringLoc(loc)) {
return loc;
} else if (isWholeFileLoc(loc)) {
@@ -97,7 +103,7 @@ class AstViewerDataProvider extends DisposableObject implements TreeDataProvider
} else if (isLineColumnLoc(loc)) {
return loc.startLine;
} else {
return '';
return "";
}
}
}
@@ -111,19 +117,21 @@ export class AstViewer extends DisposableObject {
super();
this.treeDataProvider = new AstViewerDataProvider();
this.treeView = window.createTreeView('codeQLAstViewer', {
this.treeView = window.createTreeView("codeQLAstViewer", {
treeDataProvider: this.treeDataProvider,
showCollapseAll: true
showCollapseAll: true,
});
this.push(this.treeView);
this.push(this.treeDataProvider);
this.push(
commandRunner('codeQLAstViewer.clear', async () => {
commandRunner("codeQLAstViewer.clear", async () => {
this.clear();
})
}),
);
this.push(
window.onDidChangeTextEditorSelection(this.updateTreeSelection, this),
);
this.push(window.onDidChangeTextEditorSelection(this.updateTreeSelection, this));
}
updateRoots(roots: AstItem[], db: DatabaseItem, fileUri: Uri) {
@@ -135,8 +143,10 @@ export class AstViewer extends DisposableObject {
// Handle error on reveal. This could happen if
// the tree view is disposed during the reveal.
this.treeView.reveal(roots[0], { focus: false })?.then(
() => { /**/ },
err => showAndLogErrorMessage(err)
() => {
/**/
},
(err) => showAndLogErrorMessage(err),
);
}
@@ -149,7 +159,10 @@ export class AstViewer extends DisposableObject {
// range that contains the selection.
// Some nodes do not have a location, but their children might, so must
// recurse though location-less AST nodes to see if children are correct.
function findBest(selectedRange: Range, items?: AstItem[]): AstItem | undefined {
function findBest(
selectedRange: Range,
items?: AstItem[],
): AstItem | undefined {
if (!items || !items.length) {
return;
}
@@ -188,8 +201,10 @@ export class AstViewer extends DisposableObject {
// Handle error on reveal. This could happen if
// the tree view is disposed during the reveal.
this.treeView.reveal(targetItem)?.then(
() => { /**/ },
err => showAndLogErrorMessage(err)
() => {
/**/
},
(err) => showAndLogErrorMessage(err),
);
}
}

View File

@@ -1,13 +1,13 @@
import * as vscode from 'vscode';
import * as Octokit from '@octokit/rest';
import { retry } from '@octokit/plugin-retry';
import * as vscode from "vscode";
import * as Octokit from "@octokit/rest";
import { retry } from "@octokit/plugin-retry";
const GITHUB_AUTH_PROVIDER_ID = 'github';
const GITHUB_AUTH_PROVIDER_ID = "github";
// We need 'repo' scope for triggering workflows and 'gist' scope for exporting results to Gist.
// For a comprehensive list of scopes, see:
// https://docs.github.com/apps/building-oauth-apps/understanding-scopes-for-oauth-apps
const SCOPES = ['repo', 'gist'];
const SCOPES = ["repo", "gist"];
/**
* Handles authentication to GitHub, using the VS Code [authentication API](https://code.visualstudio.com/api/references/vscode-api#authentication).
@@ -18,7 +18,7 @@ export class Credentials {
// Explicitly make the constructor private, so that we can't accidentally call the constructor from outside the class
// without also initializing the class.
// eslint-disable-next-line @typescript-eslint/no-empty-function
private constructor() { }
private constructor() {}
/**
* Initializes an instance of credentials with an octokit instance.
@@ -29,7 +29,9 @@ export class Credentials {
* @param context The extension context.
* @returns An instance of credentials.
*/
static async initialize(context: vscode.ExtensionContext): Promise<Credentials> {
static async initialize(
context: vscode.ExtensionContext,
): Promise<Credentials> {
const c = new Credentials();
c.registerListeners(context);
c.octokit = await c.createOctokit(false);
@@ -50,17 +52,24 @@ export class Credentials {
return c;
}
private async createOctokit(createIfNone: boolean, overrideToken?: string): Promise<Octokit.Octokit | undefined> {
private async createOctokit(
createIfNone: boolean,
overrideToken?: string,
): Promise<Octokit.Octokit | undefined> {
if (overrideToken) {
return new Octokit.Octokit({ auth: overrideToken, retry });
}
const session = await vscode.authentication.getSession(GITHUB_AUTH_PROVIDER_ID, SCOPES, { createIfNone });
const session = await vscode.authentication.getSession(
GITHUB_AUTH_PROVIDER_ID,
SCOPES,
{ createIfNone },
);
if (session) {
return new Octokit.Octokit({
auth: session.accessToken,
retry
retry,
});
} else {
return undefined;
@@ -69,11 +78,13 @@ export class Credentials {
registerListeners(context: vscode.ExtensionContext): void {
// Sessions are changed when a user logs in or logs out.
context.subscriptions.push(vscode.authentication.onDidChangeSessions(async e => {
if (e.provider.id === GITHUB_AUTH_PROVIDER_ID) {
this.octokit = await this.createOctokit(false);
}
}));
context.subscriptions.push(
vscode.authentication.onDidChangeSessions(async (e) => {
if (e.provider.id === GITHUB_AUTH_PROVIDER_ID) {
this.octokit = await this.createOctokit(false);
}
}),
);
}
/**
@@ -91,7 +102,7 @@ export class Credentials {
if (!this.octokit) {
if (requireAuthentication) {
throw new Error('Did not initialize Octokit.');
throw new Error("Did not initialize Octokit.");
}
// We don't want to set this in this.octokit because that would prevent

View File

@@ -1,25 +1,30 @@
import * as semver from 'semver';
import { runCodeQlCliCommand } from './cli';
import { Logger } from './logging';
import { getErrorMessage } from './pure/helpers-pure';
import * as semver from "semver";
import { runCodeQlCliCommand } from "./cli";
import { Logger } from "./logging";
import { getErrorMessage } from "./pure/helpers-pure";
/**
* Get the version of a CodeQL CLI.
*/
export async function getCodeQlCliVersion(codeQlPath: string, logger: Logger): Promise<semver.SemVer | undefined> {
export async function getCodeQlCliVersion(
codeQlPath: string,
logger: Logger,
): Promise<semver.SemVer | undefined> {
try {
const output: string = await runCodeQlCliCommand(
codeQlPath,
['version'],
['--format=terse'],
'Checking CodeQL version',
logger
["version"],
["--format=terse"],
"Checking CodeQL version",
logger,
);
return semver.parse(output.trim()) || undefined;
} catch (e) {
// Failed to run the version command. This might happen if the cli version is _really_ old, or it is corrupted.
// Either way, we can't determine compatibility.
void logger.log(`Failed to run 'codeql version'. Reason: ${getErrorMessage(e)}`);
void logger.log(
`Failed to run 'codeql version'. Reason: ${getErrorMessage(e)}`,
);
return undefined;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -4,12 +4,12 @@ import {
window as Window,
commands,
Disposable,
ProgressLocation
} from 'vscode';
import { showAndLogErrorMessage, showAndLogWarningMessage } from './helpers';
import { logger } from './logging';
import { getErrorMessage, getErrorStack } from './pure/helpers-pure';
import { telemetryListener } from './telemetry';
ProgressLocation,
} from "vscode";
import { showAndLogErrorMessage, showAndLogWarningMessage } from "./helpers";
import { logger } from "./logging";
import { getErrorMessage, getErrorStack } from "./pure/helpers-pure";
import { telemetryListener } from "./telemetry";
export class UserCancellationException extends Error {
/**
@@ -67,7 +67,7 @@ export type ProgressTask<R> = (
* @param args arguments passed to this task passed on from
* `commands.registerCommand`.
*/
type NoProgressTask = ((...args: any[]) => Promise<any>);
type NoProgressTask = (...args: any[]) => Promise<any>;
/**
* This mediates between the kind of progress callbacks we want to
@@ -91,15 +91,18 @@ export function withProgress<R>(
...args: any[]
): Thenable<R> {
let progressAchieved = 0;
return Window.withProgress(options,
(progress, token) => {
return task(p => {
return Window.withProgress(options, (progress, token) => {
return task(
(p) => {
const { message, step, maxStep } = p;
const increment = 100 * (step - progressAchieved) / maxStep;
const increment = (100 * (step - progressAchieved)) / maxStep;
progressAchieved = step;
progress.report({ message, increment });
}, token, ...args);
});
},
token,
...args,
);
});
}
/**
@@ -138,7 +141,7 @@ export function commandRunner(
? `${errorMessage}\n${errorStack}`
: errorMessage;
void showAndLogErrorMessage(errorMessage, {
fullMessage
fullMessage,
});
}
return undefined;
@@ -163,14 +166,14 @@ export function commandRunnerWithProgress<R>(
commandId: string,
task: ProgressTask<R>,
progressOptions: Partial<ProgressOptions>,
outputLogger = logger
outputLogger = logger,
): Disposable {
return commands.registerCommand(commandId, async (...args: any[]) => {
const startTime = Date.now();
let error: Error | undefined;
const progressOptionsWithDefaults = {
location: ProgressLocation.Notification,
...progressOptions
...progressOptions,
};
try {
return await withProgress(progressOptionsWithDefaults, task, ...args);
@@ -192,7 +195,7 @@ export function commandRunnerWithProgress<R>(
: errorMessage;
void showAndLogErrorMessage(errorMessage, {
outputLogger,
fullMessage
fullMessage,
});
}
return undefined;
@@ -216,23 +219,26 @@ export function reportStreamProgress(
readable: NodeJS.ReadableStream,
messagePrefix: string,
totalNumBytes?: number,
progress?: ProgressCallback
progress?: ProgressCallback,
) {
if (progress && totalNumBytes) {
let numBytesDownloaded = 0;
const bytesToDisplayMB = (numBytes: number): string => `${(numBytes / (1024 * 1024)).toFixed(1)} MB`;
const bytesToDisplayMB = (numBytes: number): string =>
`${(numBytes / (1024 * 1024)).toFixed(1)} MB`;
const updateProgress = () => {
progress({
step: numBytesDownloaded,
maxStep: totalNumBytes,
message: `${messagePrefix} [${bytesToDisplayMB(numBytesDownloaded)} of ${bytesToDisplayMB(totalNumBytes)}]`,
message: `${messagePrefix} [${bytesToDisplayMB(
numBytesDownloaded,
)} of ${bytesToDisplayMB(totalNumBytes)}]`,
});
};
// Display the progress straight away rather than waiting for the first chunk.
updateProgress();
readable.on('data', data => {
readable.on("data", (data) => {
numBytesDownloaded += data.length;
updateProgress();
});

View File

@@ -0,0 +1,17 @@
import { Disposable } from "../pure/disposable-object";
import { AppEventEmitter } from "./events";
export interface App {
createEventEmitter<T>(): AppEventEmitter<T>;
mode: AppMode;
subscriptions: Disposable[];
extensionPath: string;
globalStoragePath: string;
workspaceStoragePath?: string;
}
export enum AppMode {
Production = 1,
Development = 2,
Test = 3,
}

View File

@@ -0,0 +1,10 @@
import { Disposable } from "../pure/disposable-object";
export interface AppEvent<T> {
(listener: (event: T) => void): Disposable;
}
export interface AppEventEmitter<T> {
event: AppEvent<T>;
fire(data: T): void;
}

View File

@@ -0,0 +1,51 @@
/**
* Represents a result that can be either a value or some errors.
*/
export class ValueResult<TValue> {
private constructor(
private readonly errorMsgs: string[],
private readonly val?: TValue,
) {}
public static ok<TValue>(value: TValue): ValueResult<TValue> {
if (value === undefined) {
throw new Error("Value must be set for successful result");
}
return new ValueResult([], value);
}
public static fail<TValue>(errorMsgs: string[]): ValueResult<TValue> {
if (errorMsgs.length === 0) {
throw new Error(
"At least one error message must be set for a failed result",
);
}
return new ValueResult<TValue>(errorMsgs, undefined);
}
public get isOk(): boolean {
return this.errorMsgs.length === 0;
}
public get isFailure(): boolean {
return this.errorMsgs.length > 0;
}
public get errors(): string[] {
if (!this.errorMsgs) {
throw new Error("Cannot get error for successful result");
}
return this.errorMsgs;
}
public get value(): TValue {
if (this.val === undefined) {
throw new Error("Cannot get value for unsuccessful result");
}
return this.val;
}
}

View File

@@ -0,0 +1,6 @@
import * as vscode from "vscode";
import { AppEventEmitter } from "../events";
export class VSCodeAppEventEmitter<T>
extends vscode.EventEmitter<T>
implements AppEventEmitter<T> {}

View File

@@ -0,0 +1,42 @@
import * as vscode from "vscode";
import { Disposable } from "../../pure/disposable-object";
import { App, AppMode } from "../app";
import { AppEventEmitter } from "../events";
import { VSCodeAppEventEmitter } from "./events";
export class ExtensionApp implements App {
public constructor(
public readonly extensionContext: vscode.ExtensionContext,
) {}
public get extensionPath(): string {
return this.extensionContext.extensionPath;
}
public get globalStoragePath(): string {
return this.extensionContext.globalStorageUri.fsPath;
}
public get workspaceStoragePath(): string | undefined {
return this.extensionContext.storageUri?.fsPath;
}
public get subscriptions(): Disposable[] {
return this.extensionContext.subscriptions;
}
public get mode(): AppMode {
switch (this.extensionContext.extensionMode) {
case vscode.ExtensionMode.Development:
return AppMode.Development;
case vscode.ExtensionMode.Test:
return AppMode.Test;
default:
return AppMode.Production;
}
}
public createEventEmitter<T>(): AppEventEmitter<T> {
return new VSCodeAppEventEmitter<T>();
}
}

View File

@@ -1,30 +1,34 @@
import {
ExtensionContext,
ViewColumn,
} from 'vscode';
import { ExtensionContext, ViewColumn } from "vscode";
import {
FromCompareViewMessage,
ToCompareViewMessage,
QueryCompareResult,
} from '../pure/interface-types';
import { Logger } from '../logging';
import { CodeQLCliServer } from '../cli';
import { DatabaseManager } from '../databases';
import { jumpToLocation } from '../interface-utils';
import { transformBqrsResultSet, RawResultSet, BQRSInfo } from '../pure/bqrs-cli-types';
import resultsDiff from './resultsDiff';
import { CompletedLocalQueryInfo } from '../query-results';
import { getErrorMessage } from '../pure/helpers-pure';
import { HistoryItemLabelProvider } from '../history-item-label-provider';
import { AbstractWebview, WebviewPanelConfig } from '../abstract-webview';
} from "../pure/interface-types";
import { Logger } from "../logging";
import { CodeQLCliServer } from "../cli";
import { DatabaseManager } from "../databases";
import { jumpToLocation } from "../interface-utils";
import {
transformBqrsResultSet,
RawResultSet,
BQRSInfo,
} from "../pure/bqrs-cli-types";
import resultsDiff from "./resultsDiff";
import { CompletedLocalQueryInfo } from "../query-results";
import { getErrorMessage } from "../pure/helpers-pure";
import { HistoryItemLabelProvider } from "../history-item-label-provider";
import { AbstractWebview, WebviewPanelConfig } from "../abstract-webview";
interface ComparePair {
from: CompletedLocalQueryInfo;
to: CompletedLocalQueryInfo;
}
export class CompareView extends AbstractWebview<ToCompareViewMessage, FromCompareViewMessage> {
export class CompareView extends AbstractWebview<
ToCompareViewMessage,
FromCompareViewMessage
> {
private comparePair: ComparePair | undefined;
constructor(
@@ -34,8 +38,8 @@ export class CompareView extends AbstractWebview<ToCompareViewMessage, FromCompa
private logger: Logger,
private labelProvider: HistoryItemLabelProvider,
private showQueryResultsCallback: (
item: CompletedLocalQueryInfo
) => Promise<void>
item: CompletedLocalQueryInfo,
) => Promise<void>,
) {
super(ctx);
}
@@ -43,10 +47,11 @@ export class CompareView extends AbstractWebview<ToCompareViewMessage, FromCompa
async showResults(
from: CompletedLocalQueryInfo,
to: CompletedLocalQueryInfo,
selectedResultSetName?: string
selectedResultSetName?: string,
) {
this.comparePair = { from, to };
this.getPanel().reveal(undefined, true);
const panel = await this.getPanel();
panel.reveal(undefined, true);
await this.waitForPanelLoaded();
const [
@@ -54,11 +59,7 @@ export class CompareView extends AbstractWebview<ToCompareViewMessage, FromCompa
currentResultSetName,
fromResultSet,
toResultSet,
] = await this.findCommonResultSetNames(
from,
to,
selectedResultSetName
);
] = await this.findCommonResultSetNames(from, to, selectedResultSetName);
if (currentResultSetName) {
let rows: QueryCompareResult | undefined;
let message: string | undefined;
@@ -69,7 +70,7 @@ export class CompareView extends AbstractWebview<ToCompareViewMessage, FromCompa
}
await this.postMessage({
t: 'setComparisons',
t: "setComparisons",
stats: {
fromQuery: {
// since we split the description into several rows
@@ -97,11 +98,11 @@ export class CompareView extends AbstractWebview<ToCompareViewMessage, FromCompa
protected getPanelConfig(): WebviewPanelConfig {
return {
viewId: 'compareView',
title: 'Compare CodeQL Query Results',
viewId: "compareView",
title: "Compare CodeQL Query Results",
viewColumn: ViewColumn.Active,
preserveFocus: true,
view: 'compare',
view: "compare",
};
}
@@ -111,19 +112,19 @@ export class CompareView extends AbstractWebview<ToCompareViewMessage, FromCompa
protected async onMessage(msg: FromCompareViewMessage): Promise<void> {
switch (msg.t) {
case 'viewLoaded':
case "viewLoaded":
this.onWebViewLoaded();
break;
case 'changeCompare':
case "changeCompare":
await this.changeTable(msg.newResultSetName);
break;
case 'viewSourceFile':
case "viewSourceFile":
await jumpToLocation(msg, this.databaseManager, this.logger);
break;
case 'openQuery':
case "openQuery":
await this.openQuery(msg.kind);
break;
}
@@ -132,34 +133,32 @@ export class CompareView extends AbstractWebview<ToCompareViewMessage, FromCompa
private async findCommonResultSetNames(
from: CompletedLocalQueryInfo,
to: CompletedLocalQueryInfo,
selectedResultSetName: string | undefined
selectedResultSetName: string | undefined,
): Promise<[string[], string, RawResultSet, RawResultSet]> {
const fromSchemas = await this.cliServer.bqrsInfo(
from.completedQuery.query.resultsPaths.resultsPath
from.completedQuery.query.resultsPaths.resultsPath,
);
const toSchemas = await this.cliServer.bqrsInfo(
to.completedQuery.query.resultsPaths.resultsPath
to.completedQuery.query.resultsPaths.resultsPath,
);
const fromSchemaNames = fromSchemas['result-sets'].map(
(schema) => schema.name
);
const toSchemaNames = toSchemas['result-sets'].map(
(schema) => schema.name
const fromSchemaNames = fromSchemas["result-sets"].map(
(schema) => schema.name,
);
const toSchemaNames = toSchemas["result-sets"].map((schema) => schema.name);
const commonResultSetNames = fromSchemaNames.filter((name) =>
toSchemaNames.includes(name)
toSchemaNames.includes(name),
);
const currentResultSetName =
selectedResultSetName || commonResultSetNames[0];
const fromResultSet = await this.getResultSet(
fromSchemas,
currentResultSetName,
from.completedQuery.query.resultsPaths.resultsPath
from.completedQuery.query.resultsPaths.resultsPath,
);
const toResultSet = await this.getResultSet(
toSchemas,
currentResultSetName,
to.completedQuery.query.resultsPaths.resultsPath
to.completedQuery.query.resultsPaths.resultsPath,
);
return [
commonResultSetNames,
@@ -176,39 +175,36 @@ export class CompareView extends AbstractWebview<ToCompareViewMessage, FromCompa
await this.showResults(
this.comparePair.from,
this.comparePair.to,
newResultSetName
newResultSetName,
);
}
private async getResultSet(
bqrsInfo: BQRSInfo,
resultSetName: string,
resultsPath: string
resultsPath: string,
): Promise<RawResultSet> {
const schema = bqrsInfo['result-sets'].find(
(schema) => schema.name === resultSetName
const schema = bqrsInfo["result-sets"].find(
(schema) => schema.name === resultSetName,
);
if (!schema) {
throw new Error(`Schema ${resultSetName} not found.`);
}
const chunk = await this.cliServer.bqrsDecode(
resultsPath,
resultSetName
);
const chunk = await this.cliServer.bqrsDecode(resultsPath, resultSetName);
return transformBqrsResultSet(schema, chunk);
}
private compareResults(
fromResults: RawResultSet,
toResults: RawResultSet
toResults: RawResultSet,
): QueryCompareResult {
// Only compare columns that have the same name
return resultsDiff(fromResults, toResults);
}
private async openQuery(kind: 'from' | 'to') {
private async openQuery(kind: "from" | "to") {
const toOpen =
kind === 'from' ? this.comparePair?.from : this.comparePair?.to;
kind === "from" ? this.comparePair?.from : this.comparePair?.to;
if (toOpen) {
await this.showQueryResultsCallback(toOpen);
}

View File

@@ -1,5 +1,5 @@
import { RawResultSet } from '../pure/bqrs-cli-types';
import { QueryCompareResult } from '../pure/interface-types';
import { RawResultSet } from "../pure/bqrs-cli-types";
import { QueryCompareResult } from "../pure/interface-types";
/**
* Compare the rows of two queries. Use deep equality to determine if
@@ -21,19 +21,18 @@ import { QueryCompareResult } from '../pure/interface-types';
*/
export default function resultsDiff(
fromResults: RawResultSet,
toResults: RawResultSet
toResults: RawResultSet,
): QueryCompareResult {
if (fromResults.schema.columns.length !== toResults.schema.columns.length) {
throw new Error('CodeQL Compare: Columns do not match.');
throw new Error("CodeQL Compare: Columns do not match.");
}
if (!fromResults.rows.length) {
throw new Error('CodeQL Compare: Source query has no results.');
throw new Error("CodeQL Compare: Source query has no results.");
}
if (!toResults.rows.length) {
throw new Error('CodeQL Compare: Target query has no results.');
throw new Error("CodeQL Compare: Target query has no results.");
}
const results = {
@@ -45,7 +44,7 @@ export default function resultsDiff(
fromResults.rows.length === results.from.length &&
toResults.rows.length === results.to.length
) {
throw new Error('CodeQL Compare: No overlap between the selected queries.');
throw new Error("CodeQL Compare: No overlap between the selected queries.");
}
return results;

View File

@@ -1,8 +1,14 @@
import { DisposableObject } from './pure/disposable-object';
import { workspace, Event, EventEmitter, ConfigurationChangeEvent, ConfigurationTarget } from 'vscode';
import { DistributionManager } from './distribution';
import { logger } from './logging';
import { ONE_DAY_IN_MS } from './pure/time';
import { DisposableObject } from "./pure/disposable-object";
import {
workspace,
Event,
EventEmitter,
ConfigurationChangeEvent,
ConfigurationTarget,
} from "vscode";
import { DistributionManager } from "./distribution";
import { logger } from "./logging";
import { ONE_DAY_IN_MS } from "./pure/time";
export const ALL_SETTINGS: Setting[] = [];
@@ -10,13 +16,21 @@ export const ALL_SETTINGS: Setting[] = [];
export class Setting {
name: string;
parent?: Setting;
private _hasChildren = false;
constructor(name: string, parent?: Setting) {
this.name = name;
this.parent = parent;
if (parent !== undefined) {
parent._hasChildren = true;
}
ALL_SETTINGS.push(this);
}
get hasChildren() {
return this._hasChildren;
}
get qualifiedName(): string {
if (this.parent === undefined) {
return this.name;
@@ -27,57 +41,86 @@ export class Setting {
getValue<T>(): T {
if (this.parent === undefined) {
throw new Error('Cannot get the value of a root setting.');
throw new Error("Cannot get the value of a root setting.");
}
return workspace.getConfiguration(this.parent.qualifiedName).get<T>(this.name)!;
return workspace
.getConfiguration(this.parent.qualifiedName)
.get<T>(this.name)!;
}
updateValue<T>(value: T, target: ConfigurationTarget): Thenable<void> {
if (this.parent === undefined) {
throw new Error('Cannot update the value of a root setting.');
throw new Error("Cannot update the value of a root setting.");
}
return workspace.getConfiguration(this.parent.qualifiedName).update(this.name, value, target);
return workspace
.getConfiguration(this.parent.qualifiedName)
.update(this.name, value, target);
}
inspect<T>(): InspectionResult<T> | undefined {
if (this.parent === undefined) {
throw new Error('Cannot update the value of a root setting.');
throw new Error("Cannot update the value of a root setting.");
}
return workspace.getConfiguration(this.parent.qualifiedName).inspect(this.name);
return workspace
.getConfiguration(this.parent.qualifiedName)
.inspect(this.name);
}
}
export interface InspectionResult<T> {
globalValue?: T;
workspaceValue?: T,
workspaceFolderValue?: T,
workspaceValue?: T;
workspaceFolderValue?: T;
}
const ROOT_SETTING = new Setting('codeQL');
const ROOT_SETTING = new Setting("codeQL");
// Global configuration
const TELEMETRY_SETTING = new Setting('telemetry', ROOT_SETTING);
const AST_VIEWER_SETTING = new Setting('astViewer', ROOT_SETTING);
const GLOBAL_TELEMETRY_SETTING = new Setting('telemetry');
const TELEMETRY_SETTING = new Setting("telemetry", ROOT_SETTING);
const AST_VIEWER_SETTING = new Setting("astViewer", ROOT_SETTING);
const GLOBAL_TELEMETRY_SETTING = new Setting("telemetry");
const LOG_INSIGHTS_SETTING = new Setting("logInsights", ROOT_SETTING);
export const LOG_TELEMETRY = new Setting('logTelemetry', TELEMETRY_SETTING);
export const ENABLE_TELEMETRY = new Setting('enableTelemetry', TELEMETRY_SETTING);
export const LOG_TELEMETRY = new Setting("logTelemetry", TELEMETRY_SETTING);
export const ENABLE_TELEMETRY = new Setting(
"enableTelemetry",
TELEMETRY_SETTING,
);
export const GLOBAL_ENABLE_TELEMETRY = new Setting('enableTelemetry', GLOBAL_TELEMETRY_SETTING);
export const GLOBAL_ENABLE_TELEMETRY = new Setting(
"enableTelemetry",
GLOBAL_TELEMETRY_SETTING,
);
// Distribution configuration
const DISTRIBUTION_SETTING = new Setting('cli', ROOT_SETTING);
export const CUSTOM_CODEQL_PATH_SETTING = new Setting('executablePath', DISTRIBUTION_SETTING);
const INCLUDE_PRERELEASE_SETTING = new Setting('includePrerelease', DISTRIBUTION_SETTING);
const PERSONAL_ACCESS_TOKEN_SETTING = new Setting('personalAccessToken', DISTRIBUTION_SETTING);
const DISTRIBUTION_SETTING = new Setting("cli", ROOT_SETTING);
export const CUSTOM_CODEQL_PATH_SETTING = new Setting(
"executablePath",
DISTRIBUTION_SETTING,
);
const INCLUDE_PRERELEASE_SETTING = new Setting(
"includePrerelease",
DISTRIBUTION_SETTING,
);
const PERSONAL_ACCESS_TOKEN_SETTING = new Setting(
"personalAccessToken",
DISTRIBUTION_SETTING,
);
// Query History configuration
const QUERY_HISTORY_SETTING = new Setting('queryHistory', ROOT_SETTING);
const QUERY_HISTORY_FORMAT_SETTING = new Setting('format', QUERY_HISTORY_SETTING);
const QUERY_HISTORY_TTL = new Setting('ttl', QUERY_HISTORY_SETTING);
const QUERY_HISTORY_SETTING = new Setting("queryHistory", ROOT_SETTING);
const QUERY_HISTORY_FORMAT_SETTING = new Setting(
"format",
QUERY_HISTORY_SETTING,
);
const QUERY_HISTORY_TTL = new Setting("ttl", QUERY_HISTORY_SETTING);
/** When these settings change, the distribution should be updated. */
const DISTRIBUTION_CHANGE_SETTINGS = [CUSTOM_CODEQL_PATH_SETTING, INCLUDE_PRERELEASE_SETTING, PERSONAL_ACCESS_TOKEN_SETTING];
const DISTRIBUTION_CHANGE_SETTINGS = [
CUSTOM_CODEQL_PATH_SETTING,
INCLUDE_PRERELEASE_SETTING,
PERSONAL_ACCESS_TOKEN_SETTING,
];
export interface DistributionConfig {
readonly customCodeQlPath?: string;
@@ -90,28 +133,47 @@ export interface DistributionConfig {
}
// Query server configuration
const RUNNING_QUERIES_SETTING = new Setting('runningQueries', ROOT_SETTING);
const NUMBER_OF_THREADS_SETTING = new Setting('numberOfThreads', RUNNING_QUERIES_SETTING);
const SAVE_CACHE_SETTING = new Setting('saveCache', RUNNING_QUERIES_SETTING);
const CACHE_SIZE_SETTING = new Setting('cacheSize', RUNNING_QUERIES_SETTING);
const TIMEOUT_SETTING = new Setting('timeout', RUNNING_QUERIES_SETTING);
const MEMORY_SETTING = new Setting('memory', RUNNING_QUERIES_SETTING);
const DEBUG_SETTING = new Setting('debug', RUNNING_QUERIES_SETTING);
const MAX_PATHS = new Setting('maxPaths', RUNNING_QUERIES_SETTING);
const RUNNING_TESTS_SETTING = new Setting('runningTests', ROOT_SETTING);
const RESULTS_DISPLAY_SETTING = new Setting('resultsDisplay', ROOT_SETTING);
const RUNNING_QUERIES_SETTING = new Setting("runningQueries", ROOT_SETTING);
const NUMBER_OF_THREADS_SETTING = new Setting(
"numberOfThreads",
RUNNING_QUERIES_SETTING,
);
const SAVE_CACHE_SETTING = new Setting("saveCache", RUNNING_QUERIES_SETTING);
const CACHE_SIZE_SETTING = new Setting("cacheSize", RUNNING_QUERIES_SETTING);
const TIMEOUT_SETTING = new Setting("timeout", RUNNING_QUERIES_SETTING);
const MEMORY_SETTING = new Setting("memory", RUNNING_QUERIES_SETTING);
const DEBUG_SETTING = new Setting("debug", RUNNING_QUERIES_SETTING);
const MAX_PATHS = new Setting("maxPaths", RUNNING_QUERIES_SETTING);
const RUNNING_TESTS_SETTING = new Setting("runningTests", ROOT_SETTING);
const RESULTS_DISPLAY_SETTING = new Setting("resultsDisplay", ROOT_SETTING);
export const ADDITIONAL_TEST_ARGUMENTS_SETTING = new Setting('additionalTestArguments', RUNNING_TESTS_SETTING);
export const NUMBER_OF_TEST_THREADS_SETTING = new Setting('numberOfThreads', RUNNING_TESTS_SETTING);
export const MAX_QUERIES = new Setting('maxQueries', RUNNING_QUERIES_SETTING);
export const AUTOSAVE_SETTING = new Setting('autoSave', RUNNING_QUERIES_SETTING);
export const PAGE_SIZE = new Setting('pageSize', RESULTS_DISPLAY_SETTING);
const CUSTOM_LOG_DIRECTORY_SETTING = new Setting('customLogDirectory', RUNNING_QUERIES_SETTING);
export const ADDITIONAL_TEST_ARGUMENTS_SETTING = new Setting(
"additionalTestArguments",
RUNNING_TESTS_SETTING,
);
export const NUMBER_OF_TEST_THREADS_SETTING = new Setting(
"numberOfThreads",
RUNNING_TESTS_SETTING,
);
export const MAX_QUERIES = new Setting("maxQueries", RUNNING_QUERIES_SETTING);
export const AUTOSAVE_SETTING = new Setting(
"autoSave",
RUNNING_QUERIES_SETTING,
);
export const PAGE_SIZE = new Setting("pageSize", RESULTS_DISPLAY_SETTING);
const CUSTOM_LOG_DIRECTORY_SETTING = new Setting(
"customLogDirectory",
RUNNING_QUERIES_SETTING,
);
/** When these settings change, the running query server should be restarted. */
const QUERY_SERVER_RESTARTING_SETTINGS = [
NUMBER_OF_THREADS_SETTING, SAVE_CACHE_SETTING, CACHE_SIZE_SETTING, MEMORY_SETTING,
DEBUG_SETTING, CUSTOM_LOG_DIRECTORY_SETTING,
NUMBER_OF_THREADS_SETTING,
SAVE_CACHE_SETTING,
CACHE_SIZE_SETTING,
MEMORY_SETTING,
DEBUG_SETTING,
CUSTOM_LOG_DIRECTORY_SETTING,
];
export interface QueryServerConfig {
@@ -127,7 +189,10 @@ export interface QueryServerConfig {
}
/** When these settings change, the query history should be refreshed. */
const QUERY_HISTORY_SETTINGS = [QUERY_HISTORY_FORMAT_SETTING, QUERY_HISTORY_TTL];
const QUERY_HISTORY_SETTINGS = [
QUERY_HISTORY_FORMAT_SETTING,
QUERY_HISTORY_TTL,
];
export interface QueryHistoryConfig {
format: string;
@@ -135,7 +200,12 @@ export interface QueryHistoryConfig {
onDidChangeConfiguration: Event<void>;
}
const CLI_SETTINGS = [ADDITIONAL_TEST_ARGUMENTS_SETTING, NUMBER_OF_TEST_THREADS_SETTING, NUMBER_OF_THREADS_SETTING, MAX_PATHS];
const CLI_SETTINGS = [
ADDITIONAL_TEST_ARGUMENTS_SETTING,
NUMBER_OF_TEST_THREADS_SETTING,
NUMBER_OF_THREADS_SETTING,
MAX_PATHS,
];
export interface CliConfig {
additionalTestArguments: string[];
@@ -145,20 +215,29 @@ export interface CliConfig {
onDidChangeConfiguration?: Event<void>;
}
export abstract class ConfigListener extends DisposableObject {
protected readonly _onDidChangeConfiguration = this.push(new EventEmitter<void>());
protected readonly _onDidChangeConfiguration = this.push(
new EventEmitter<void>(),
);
constructor() {
super();
this.updateConfiguration();
this.push(workspace.onDidChangeConfiguration(this.handleDidChangeConfiguration, this));
this.push(
workspace.onDidChangeConfiguration(
this.handleDidChangeConfiguration,
this,
),
);
}
/**
* Calls `updateConfiguration` if any of the `relevantSettings` have changed.
*/
protected handleDidChangeConfigurationForRelevantSettings(relevantSettings: Setting[], e: ConfigurationChangeEvent): void {
protected handleDidChangeConfigurationForRelevantSettings(
relevantSettings: Setting[],
e: ConfigurationChangeEvent,
): void {
// Check whether any options that affect query running were changed.
for (const option of relevantSettings) {
// TODO: compare old and new values, only update if there was actually a change?
@@ -169,7 +248,9 @@ export abstract class ConfigListener extends DisposableObject {
}
}
protected abstract handleDidChangeConfiguration(e: ConfigurationChangeEvent): void;
protected abstract handleDidChangeConfiguration(
e: ConfigurationChangeEvent,
): void;
private updateConfiguration(): void {
this._onDidChangeConfiguration.fire(undefined);
}
@@ -179,7 +260,10 @@ export abstract class ConfigListener extends DisposableObject {
}
}
export class DistributionConfigListener extends ConfigListener implements DistributionConfig {
export class DistributionConfigListener
extends ConfigListener
implements DistributionConfig
{
public get customCodeQlPath(): string | undefined {
return CUSTOM_CODEQL_PATH_SETTING.getValue() || undefined;
}
@@ -193,28 +277,43 @@ export class DistributionConfigListener extends ConfigListener implements Distri
}
public async updateCustomCodeQlPath(newPath: string | undefined) {
await CUSTOM_CODEQL_PATH_SETTING.updateValue(newPath, ConfigurationTarget.Global);
await CUSTOM_CODEQL_PATH_SETTING.updateValue(
newPath,
ConfigurationTarget.Global,
);
}
protected handleDidChangeConfiguration(e: ConfigurationChangeEvent): void {
this.handleDidChangeConfigurationForRelevantSettings(DISTRIBUTION_CHANGE_SETTINGS, e);
this.handleDidChangeConfigurationForRelevantSettings(
DISTRIBUTION_CHANGE_SETTINGS,
e,
);
}
}
export class QueryServerConfigListener extends ConfigListener implements QueryServerConfig {
public constructor(private _codeQlPath = '') {
export class QueryServerConfigListener
extends ConfigListener
implements QueryServerConfig
{
public constructor(private _codeQlPath = "") {
super();
}
public static async createQueryServerConfigListener(distributionManager: DistributionManager): Promise<QueryServerConfigListener> {
const codeQlPath = await distributionManager.getCodeQlPathWithoutVersionCheck();
public static async createQueryServerConfigListener(
distributionManager: DistributionManager,
): Promise<QueryServerConfigListener> {
const codeQlPath =
await distributionManager.getCodeQlPathWithoutVersionCheck();
const config = new QueryServerConfigListener(codeQlPath!);
if (distributionManager.onDidChangeDistribution) {
config.push(distributionManager.onDidChangeDistribution(async () => {
const codeQlPath = await distributionManager.getCodeQlPathWithoutVersionCheck();
config._codeQlPath = codeQlPath!;
config._onDidChangeConfiguration.fire(undefined);
}));
config.push(
distributionManager.onDidChangeDistribution(async () => {
const codeQlPath =
await distributionManager.getCodeQlPathWithoutVersionCheck();
config._codeQlPath = codeQlPath!;
config._onDidChangeConfiguration.fire(undefined);
}),
);
}
return config;
}
@@ -249,8 +348,10 @@ export class QueryServerConfigListener extends ConfigListener implements QuerySe
if (memory === null) {
return undefined;
}
if (memory == 0 || typeof (memory) !== 'number') {
void logger.log(`Ignoring value '${memory}' for setting ${MEMORY_SETTING.qualifiedName}`);
if (memory == 0 || typeof memory !== "number") {
void logger.log(
`Ignoring value '${memory}' for setting ${MEMORY_SETTING.qualifiedName}`,
);
return undefined;
}
return memory;
@@ -261,13 +362,22 @@ export class QueryServerConfigListener extends ConfigListener implements QuerySe
}
protected handleDidChangeConfiguration(e: ConfigurationChangeEvent): void {
this.handleDidChangeConfigurationForRelevantSettings(QUERY_SERVER_RESTARTING_SETTINGS, e);
this.handleDidChangeConfigurationForRelevantSettings(
QUERY_SERVER_RESTARTING_SETTINGS,
e,
);
}
}
export class QueryHistoryConfigListener extends ConfigListener implements QueryHistoryConfig {
export class QueryHistoryConfigListener
extends ConfigListener
implements QueryHistoryConfig
{
protected handleDidChangeConfiguration(e: ConfigurationChangeEvent): void {
this.handleDidChangeConfigurationForRelevantSettings(QUERY_HISTORY_SETTINGS, e);
this.handleDidChangeConfigurationForRelevantSettings(
QUERY_HISTORY_SETTINGS,
e,
);
}
public get format(): string {
@@ -307,13 +417,15 @@ export class CliConfigListener extends ConfigListener implements CliConfig {
/**
* Whether to enable CodeLens for the 'Quick Evaluation' command.
*/
const QUICK_EVAL_CODELENS_SETTING = new Setting('quickEvalCodelens', RUNNING_QUERIES_SETTING);
const QUICK_EVAL_CODELENS_SETTING = new Setting(
"quickEvalCodelens",
RUNNING_QUERIES_SETTING,
);
export function isQuickEvalCodelensEnabled() {
return QUICK_EVAL_CODELENS_SETTING.getValue<boolean>();
}
// Enable experimental features
/**
@@ -326,7 +438,7 @@ export function isQuickEvalCodelensEnabled() {
/**
* Enables canary features of this extension. Recommended for all internal users.
*/
export const CANARY_FEATURES = new Setting('canary', ROOT_SETTING);
export const CANARY_FEATURES = new Setting("canary", ROOT_SETTING);
export function isCanary() {
return !!CANARY_FEATURES.getValue<boolean>();
@@ -335,21 +447,36 @@ export function isCanary() {
/**
* Enables the experimental query server
*/
export const CANARY_QUERY_SERVER = new Setting('canaryQueryServer', ROOT_SETTING);
export const CANARY_QUERY_SERVER = new Setting(
"canaryQueryServer",
ROOT_SETTING,
);
// The default value for this setting is now `true`
export function allowCanaryQueryServer() {
return !!CANARY_QUERY_SERVER.getValue<boolean>();
const value = CANARY_QUERY_SERVER.getValue<boolean>();
return value === undefined ? true : !!value;
}
export const JOIN_ORDER_WARNING_THRESHOLD = new Setting(
"joinOrderWarningThreshold",
LOG_INSIGHTS_SETTING,
);
export function joinOrderWarningThreshold(): number {
return JOIN_ORDER_WARNING_THRESHOLD.getValue<number>();
}
/**
* Avoids caching in the AST viewer if the user is also a canary user.
*/
export const NO_CACHE_AST_VIEWER = new Setting('disableCache', AST_VIEWER_SETTING);
export const NO_CACHE_AST_VIEWER = new Setting(
"disableCache",
AST_VIEWER_SETTING,
);
// Settings for variant analysis
const REMOTE_QUERIES_SETTING = new Setting('variantAnalysis', ROOT_SETTING);
const REMOTE_QUERIES_SETTING = new Setting("variantAnalysis", ROOT_SETTING);
/**
* Lists of GitHub repositories that you want to query remotely via the "Run Variant Analysis" command.
@@ -358,13 +485,20 @@ const REMOTE_QUERIES_SETTING = new Setting('variantAnalysis', ROOT_SETTING);
* This setting should be a JSON object where each key is a user-specified name (string),
* and the value is an array of GitHub repositories (of the form `<owner>/<repo>`).
*/
const REMOTE_REPO_LISTS = new Setting('repositoryLists', REMOTE_QUERIES_SETTING);
const REMOTE_REPO_LISTS = new Setting(
"repositoryLists",
REMOTE_QUERIES_SETTING,
);
export function getRemoteRepositoryLists(): Record<string, string[]> | undefined {
export function getRemoteRepositoryLists():
| Record<string, string[]>
| undefined {
return REMOTE_REPO_LISTS.getValue<Record<string, string[]>>() || undefined;
}
export async function setRemoteRepositoryLists(lists: Record<string, string[]> | undefined) {
export async function setRemoteRepositoryLists(
lists: Record<string, string[]> | undefined,
) {
await REMOTE_REPO_LISTS.updateValue(lists, ConfigurationTarget.Global);
}
@@ -377,7 +511,10 @@ export async function setRemoteRepositoryLists(lists: Record<string, string[]> |
* user-specified name (string), and the value is an array of GitHub repositories
* (of the form `<owner>/<repo>`).
*/
const REPO_LISTS_PATH = new Setting('repositoryListsPath', REMOTE_QUERIES_SETTING);
const REPO_LISTS_PATH = new Setting(
"repositoryListsPath",
REMOTE_QUERIES_SETTING,
);
export function getRemoteRepositoryListsPath(): string | undefined {
return REPO_LISTS_PATH.getValue<string>() || undefined;
@@ -389,7 +526,10 @@ export function getRemoteRepositoryListsPath(): string | undefined {
*
* This setting should be a GitHub repository of the form `<owner>/<repo>`.
*/
const REMOTE_CONTROLLER_REPO = new Setting('controllerRepo', REMOTE_QUERIES_SETTING);
const REMOTE_CONTROLLER_REPO = new Setting(
"controllerRepo",
REMOTE_QUERIES_SETTING,
);
export function getRemoteControllerRepo(): string | undefined {
return REMOTE_CONTROLLER_REPO.getValue<string>() || undefined;
@@ -404,22 +544,83 @@ export async function setRemoteControllerRepo(repo: string | undefined) {
* Default value is "main".
* Note: This command is only available for internal users.
*/
const ACTION_BRANCH = new Setting('actionBranch', REMOTE_QUERIES_SETTING);
const ACTION_BRANCH = new Setting("actionBranch", REMOTE_QUERIES_SETTING);
export function getActionBranch(): string {
return ACTION_BRANCH.getValue<string>() || 'main';
return ACTION_BRANCH.getValue<string>() || "main";
}
export function isIntegrationTestMode() {
return process.env.INTEGRATION_TEST_MODE === 'true';
return process.env.INTEGRATION_TEST_MODE === "true";
}
/**
* A flag indicating whether to enable the experimental "live results" feature
* for multi-repo variant analyses.
*/
const LIVE_RESULTS = new Setting('liveResults', REMOTE_QUERIES_SETTING);
const LIVE_RESULTS = new Setting("liveResults", REMOTE_QUERIES_SETTING);
export function isVariantAnalysisLiveResultsEnabled(): boolean {
return !!LIVE_RESULTS.getValue<boolean>();
}
/**
* A flag indicating whether to use the new query run experience which involves
* using a new database panel.
*/
const NEW_QUERY_RUN_EXPERIENCE = new Setting(
"newQueryRunExperience",
ROOT_SETTING,
);
export function isNewQueryRunExperienceEnabled(): boolean {
return !!NEW_QUERY_RUN_EXPERIENCE.getValue<boolean>();
}
// Settings for mocking the GitHub API.
const MOCK_GH_API_SERVER = new Setting("mockGitHubApiServer", ROOT_SETTING);
/**
* A flag indicating whether to enable a mock GitHub API server.
*/
const MOCK_GH_API_SERVER_ENABLED = new Setting("enabled", MOCK_GH_API_SERVER);
/**
* A path to a directory containing test scenarios. If this setting is not set,
* the mock server will a default location for test scenarios in dev mode, and
* will show a menu to select a directory in production mode.
*/
const MOCK_GH_API_SERVER_SCENARIOS_PATH = new Setting(
"scenariosPath",
MOCK_GH_API_SERVER,
);
export interface MockGitHubApiConfig {
mockServerEnabled: boolean;
mockScenariosPath: string;
onDidChangeConfiguration: Event<void>;
}
export class MockGitHubApiConfigListener
extends ConfigListener
implements MockGitHubApiConfig
{
protected handleDidChangeConfiguration(e: ConfigurationChangeEvent): void {
this.handleDidChangeConfigurationForRelevantSettings(
[MOCK_GH_API_SERVER],
e,
);
}
public get mockServerEnabled(): boolean {
return !!MOCK_GH_API_SERVER_ENABLED.getValue<boolean>();
}
public get mockScenariosPath(): string {
return MOCK_GH_API_SERVER_SCENARIOS_PATH.getValue<string>();
}
}
export function getMockGitHubApiServerScenariosPath(): string | undefined {
return MOCK_GH_API_SERVER_SCENARIOS_PATH.getValue<string>();
}

View File

@@ -1,24 +1,23 @@
import { CodeQLCliServer } from '../cli';
import { DecodedBqrsChunk, BqrsId, EntityValue } from '../pure/bqrs-cli-types';
import { DatabaseItem } from '../databases';
import { ChildAstItem, AstItem } from '../astViewer';
import fileRangeFromURI from './fileRangeFromURI';
import { Uri } from 'vscode';
import { QueryWithResults } from '../run-queries-shared';
import { CodeQLCliServer } from "../cli";
import { DecodedBqrsChunk, BqrsId, EntityValue } from "../pure/bqrs-cli-types";
import { DatabaseItem } from "../databases";
import { ChildAstItem, AstItem } from "../astViewer";
import fileRangeFromURI from "./fileRangeFromURI";
import { Uri } from "vscode";
import { QueryWithResults } from "../run-queries-shared";
/**
* A class that wraps a tree of QL results from a query that
* has an @kind of graph
*/
export default class AstBuilder {
private roots: AstItem[] | undefined;
private bqrsPath: string;
constructor(
queryResults: QueryWithResults,
private cli: CodeQLCliServer,
public db: DatabaseItem,
public fileName: Uri
public fileName: Uri,
) {
this.bqrsPath = queryResults.query.resultsPaths.resultsPath;
}
@@ -31,15 +30,15 @@ export default class AstBuilder {
}
private async parseRoots(): Promise<AstItem[]> {
const options = { entities: ['id', 'url', 'string'] };
const options = { entities: ["id", "url", "string"] };
const [nodeTuples, edgeTuples, graphProperties] = await Promise.all([
await this.cli.bqrsDecode(this.bqrsPath, 'nodes', options),
await this.cli.bqrsDecode(this.bqrsPath, 'edges', options),
await this.cli.bqrsDecode(this.bqrsPath, 'graphProperties', options),
await this.cli.bqrsDecode(this.bqrsPath, "nodes", options),
await this.cli.bqrsDecode(this.bqrsPath, "edges", options),
await this.cli.bqrsDecode(this.bqrsPath, "graphProperties", options),
]);
if (!this.isValidGraph(graphProperties)) {
throw new Error('AST is invalid');
throw new Error("AST is invalid");
}
const idToItem = new Map<BqrsId, AstItem>();
@@ -50,21 +49,26 @@ export default class AstBuilder {
const roots = [];
// Build up the parent-child relationships
edgeTuples.tuples.forEach(tuple => {
const [source, target, tupleType, value] = tuple as [EntityValue, EntityValue, string, string];
edgeTuples.tuples.forEach((tuple) => {
const [source, target, tupleType, value] = tuple as [
EntityValue,
EntityValue,
string,
string,
];
const sourceId = source.id!;
const targetId = target.id!;
switch (tupleType) {
case 'semmle.order':
case "semmle.order":
astOrder.set(targetId, Number(value));
break;
case 'semmle.label': {
case "semmle.label": {
childToParent.set(targetId, sourceId);
let children = parentToChildren.get(sourceId);
if (!children) {
parentToChildren.set(sourceId, children = []);
parentToChildren.set(sourceId, (children = []));
}
children.push(targetId);
@@ -81,39 +85,43 @@ export default class AstBuilder {
});
// populate parents and children
nodeTuples.tuples.forEach(tuple => {
nodeTuples.tuples.forEach((tuple) => {
const [entity, tupleType, value] = tuple as [EntityValue, string, string];
const id = entity.id!;
switch (tupleType) {
case 'semmle.order':
case "semmle.order":
astOrder.set(id, Number(value));
break;
case 'semmle.label': {
case "semmle.label": {
// If an edge label exists, include it and separate from the node label using ':'
const nodeLabel = value ?? entity.label;
const edgeLabel = edgeLabels.get(id);
const label = [edgeLabel, nodeLabel].filter(e => e).join(': ');
const label = [edgeLabel, nodeLabel].filter((e) => e).join(": ");
const item = {
id,
label,
location: entity.url,
fileLocation: fileRangeFromURI(entity.url, this.db),
children: [] as ChildAstItem[],
order: Number.MAX_SAFE_INTEGER
order: Number.MAX_SAFE_INTEGER,
};
idToItem.set(id, item);
const parent = idToItem.get(childToParent.has(id) ? childToParent.get(id)! : -1);
const parent = idToItem.get(
childToParent.has(id) ? childToParent.get(id)! : -1,
);
if (parent) {
const astItem = item as ChildAstItem;
astItem.parent = parent;
parent.children.push(astItem);
}
const children = parentToChildren.has(id) ? parentToChildren.get(id)! : [];
children.forEach(childId => {
const children = parentToChildren.has(id)
? parentToChildren.get(id)!
: [];
children.forEach((childId) => {
const child = idToItem.get(childId) as ChildAstItem | undefined;
if (child) {
child.parent = item;
@@ -134,7 +142,7 @@ export default class AstBuilder {
? astOrder.get(item.id)!
: Number.MAX_SAFE_INTEGER;
if (!('parent' in item)) {
if (!("parent" in item)) {
roots.push(item);
}
}
@@ -142,7 +150,9 @@ export default class AstBuilder {
}
private isValidGraph(graphProperties: DecodedBqrsChunk) {
const tuple = graphProperties?.tuples?.find(t => t[0] === 'semmle.graphKind');
return tuple?.[1] === 'tree';
const tuple = graphProperties?.tuples?.find(
(t) => t[0] === "semmle.graphKind",
);
return tuple?.[1] === "tree";
}
}

View File

@@ -1,26 +1,30 @@
import * as vscode from 'vscode';
import * as vscode from "vscode";
import { UrlValue, LineColumnLocation } from '../pure/bqrs-cli-types';
import { isEmptyPath } from '../pure/bqrs-utils';
import { DatabaseItem } from '../databases';
import { UrlValue, LineColumnLocation } from "../pure/bqrs-cli-types";
import { isEmptyPath } from "../pure/bqrs-utils";
import { DatabaseItem } from "../databases";
export default function fileRangeFromURI(uri: UrlValue | undefined, db: DatabaseItem): vscode.Location | undefined {
if (!uri || typeof uri === 'string') {
export default function fileRangeFromURI(
uri: UrlValue | undefined,
db: DatabaseItem,
): vscode.Location | undefined {
if (!uri || typeof uri === "string") {
return undefined;
} else if ('startOffset' in uri) {
} else if ("startOffset" in uri) {
return undefined;
} else {
const loc = uri as LineColumnLocation;
if (isEmptyPath(loc.uri)) {
return undefined;
}
const range = new vscode.Range(Math.max(0, (loc.startLine || 0) - 1),
const range = new vscode.Range(
Math.max(0, (loc.startLine || 0) - 1),
Math.max(0, (loc.startColumn || 0) - 1),
Math.max(0, (loc.endLine || 0) - 1),
Math.max(0, (loc.endColumn || 0)));
Math.max(0, loc.endColumn || 0),
);
try {
if (uri.uri.startsWith('file:')) {
if (uri.uri.startsWith("file:")) {
return new vscode.Location(db.resolveSourceFile(uri.uri), range);
}
return undefined;

View File

@@ -1,33 +1,33 @@
export enum KeyType {
DefinitionQuery = 'DefinitionQuery',
ReferenceQuery = 'ReferenceQuery',
PrintAstQuery = 'PrintAstQuery',
PrintCfgQuery = 'PrintCfgQuery',
DefinitionQuery = "DefinitionQuery",
ReferenceQuery = "ReferenceQuery",
PrintAstQuery = "PrintAstQuery",
PrintCfgQuery = "PrintCfgQuery",
}
export function tagOfKeyType(keyType: KeyType): string {
switch (keyType) {
case KeyType.DefinitionQuery:
return 'ide-contextual-queries/local-definitions';
return "ide-contextual-queries/local-definitions";
case KeyType.ReferenceQuery:
return 'ide-contextual-queries/local-references';
return "ide-contextual-queries/local-references";
case KeyType.PrintAstQuery:
return 'ide-contextual-queries/print-ast';
return "ide-contextual-queries/print-ast";
case KeyType.PrintCfgQuery:
return 'ide-contextual-queries/print-cfg';
return "ide-contextual-queries/print-cfg";
}
}
export function nameOfKeyType(keyType: KeyType): string {
switch (keyType) {
case KeyType.DefinitionQuery:
return 'definitions';
return "definitions";
case KeyType.ReferenceQuery:
return 'references';
return "references";
case KeyType.PrintAstQuery:
return 'print AST';
return "print AST";
case KeyType.PrintCfgQuery:
return 'print CFG';
return "print CFG";
}
}
@@ -35,9 +35,9 @@ export function kindOfKeyType(keyType: KeyType): string {
switch (keyType) {
case KeyType.DefinitionQuery:
case KeyType.ReferenceQuery:
return 'definitions';
return "definitions";
case KeyType.PrintAstQuery:
case KeyType.PrintCfgQuery:
return 'graph';
return "graph";
}
}

View File

@@ -1,17 +1,29 @@
import { decodeSourceArchiveUri, encodeArchiveBasePath } from '../archive-filesystem-provider';
import { ColumnKindCode, EntityValue, getResultSetSchema, ResultSetSchema } from '../pure/bqrs-cli-types';
import { CodeQLCliServer } from '../cli';
import { DatabaseManager, DatabaseItem } from '../databases';
import fileRangeFromURI from './fileRangeFromURI';
import { ProgressCallback } from '../commandRunner';
import { KeyType } from './keyType';
import { qlpackOfDatabase, resolveQueries } from './queryResolver';
import { CancellationToken, LocationLink, Uri } from 'vscode';
import { createInitialQueryInfo, QueryWithResults } from '../run-queries-shared';
import { QueryRunner } from '../queryRunner';
import {
decodeSourceArchiveUri,
encodeArchiveBasePath,
} from "../archive-filesystem-provider";
import {
ColumnKindCode,
EntityValue,
getResultSetSchema,
ResultSetSchema,
} from "../pure/bqrs-cli-types";
import { CodeQLCliServer } from "../cli";
import { DatabaseManager, DatabaseItem } from "../databases";
import fileRangeFromURI from "./fileRangeFromURI";
import { ProgressCallback } from "../commandRunner";
import { KeyType } from "./keyType";
import {
qlpackOfDatabase,
resolveQueries,
runContextualQuery,
} from "./queryResolver";
import { CancellationToken, LocationLink, Uri } from "vscode";
import { QueryWithResults } from "../run-queries-shared";
import { QueryRunner } from "../queryRunner";
export const SELECT_QUERY_NAME = '#select';
export const TEMPLATE_NAME = 'selectedSourceFile';
export const SELECT_QUERY_NAME = "#select";
export const TEMPLATE_NAME = "selectedSourceFile";
export interface FullLocationLink extends LocationLink {
originUri: Uri;
@@ -41,7 +53,7 @@ export async function getLocationsForUriString(
queryStorageDir: string,
progress: ProgressCallback,
token: CancellationToken,
filter: (src: string, dest: string) => boolean
filter: (src: string, dest: string) => boolean,
): Promise<FullLocationLink[]> {
const uri = decodeSourceArchiveUri(Uri.parse(uriString, true));
const sourceArchiveUri = encodeArchiveBasePath(uri.sourceArchiveZipPath);
@@ -56,17 +68,18 @@ export async function getLocationsForUriString(
const links: FullLocationLink[] = [];
for (const query of await resolveQueries(cli, qlpack, keyType)) {
const initialInfo = await createInitialQueryInfo(
Uri.file(query),
{
name: db.name,
databaseUri: db.databaseUri.toString(),
},
false
const results = await runContextualQuery(
query,
db,
queryStorageDir,
qs,
cli,
progress,
token,
templates,
);
const results = await qs.compileAndRunQueryAgainstDatabase(db, initialInfo, queryStorageDir, progress, token, templates);
if (results.successful) {
links.push(...await getLinksFromResults(results, cli, db, filter));
links.push(...(await getLinksFromResults(results, cli, db, filter)));
}
}
return links;
@@ -76,7 +89,7 @@ async function getLinksFromResults(
results: QueryWithResults,
cli: CodeQLCliServer,
db: DatabaseItem,
filter: (srcFile: string, destFile: string) => boolean
filter: (srcFile: string, destFile: string) => boolean,
): Promise<FullLocationLink[]> {
const localLinks: FullLocationLink[] = [];
const bqrsPath = results.query.resultsPaths.resultsPath;
@@ -89,12 +102,16 @@ async function getLinksFromResults(
const [src, dest] = tuple as [EntityValue, EntityValue];
const srcFile = src.url && fileRangeFromURI(src.url, db);
const destFile = dest.url && fileRangeFromURI(dest.url, db);
if (srcFile && destFile && filter(srcFile.uri.toString(), destFile.uri.toString())) {
if (
srcFile &&
destFile &&
filter(srcFile.uri.toString(), destFile.uri.toString())
) {
localLinks.push({
targetRange: destFile.range,
targetUri: destFile.uri,
originSelectionRange: srcFile.range,
originUri: srcFile.uri
originUri: srcFile.uri,
});
}
}
@@ -104,13 +121,16 @@ async function getLinksFromResults(
function createTemplates(path: string): Record<string, string> {
return {
[TEMPLATE_NAME]: path
[TEMPLATE_NAME]: path,
};
}
function isValidSelect(selectInfo: ResultSetSchema | undefined) {
return selectInfo && selectInfo.columns.length == 3
&& selectInfo.columns[0].kind == ColumnKindCode.ENTITY
&& selectInfo.columns[1].kind == ColumnKindCode.ENTITY
&& selectInfo.columns[2].kind == ColumnKindCode.STRING;
return (
selectInfo &&
selectInfo.columns.length == 3 &&
selectInfo.columns[0].kind == ColumnKindCode.ENTITY &&
selectInfo.columns[1].kind == ColumnKindCode.ENTITY &&
selectInfo.columns[2].kind == ColumnKindCode.STRING
);
}

View File

@@ -1,21 +1,25 @@
import * as fs from 'fs-extra';
import * as yaml from 'js-yaml';
import * as tmp from 'tmp-promise';
import * as fs from "fs-extra";
import * as yaml from "js-yaml";
import * as tmp from "tmp-promise";
import * as path from "path";
import * as helpers from '../helpers';
import {
KeyType,
kindOfKeyType,
nameOfKeyType,
tagOfKeyType
} from './keyType';
import { CodeQLCliServer } from '../cli';
import { DatabaseItem } from '../databases';
import { QlPacksForLanguage } from '../helpers';
import * as helpers from "../helpers";
import { KeyType, kindOfKeyType, nameOfKeyType, tagOfKeyType } from "./keyType";
import { CodeQLCliServer } from "../cli";
import { DatabaseItem } from "../databases";
import { QlPacksForLanguage } from "../helpers";
import { logger } from "../logging";
import { createInitialQueryInfo } from "../run-queries-shared";
import { CancellationToken, Uri } from "vscode";
import { ProgressCallback } from "../commandRunner";
import { QueryRunner } from "../queryRunner";
export async function qlpackOfDatabase(cli: CodeQLCliServer, db: DatabaseItem): Promise<QlPacksForLanguage> {
export async function qlpackOfDatabase(
cli: CodeQLCliServer,
db: DatabaseItem,
): Promise<QlPacksForLanguage> {
if (db.contents === undefined) {
throw new Error('Database is invalid and cannot infer QLPack.');
throw new Error("Database is invalid and cannot infer QLPack.");
}
const datasetPath = db.contents.datasetUri.fsPath;
const dbscheme = await helpers.getPrimaryDbscheme(datasetPath);
@@ -30,29 +34,43 @@ export async function qlpackOfDatabase(cli: CodeQLCliServer, db: DatabaseItem):
* @param keyType The contextual query key of the query to search for.
* @returns The found queries from the first pack in which any matching queries were found.
*/
async function resolveQueriesFromPacks(cli: CodeQLCliServer, qlpacks: string[], keyType: KeyType): Promise<string[]> {
const suiteFile = (await tmp.file({
postfix: '.qls'
})).path;
async function resolveQueriesFromPacks(
cli: CodeQLCliServer,
qlpacks: string[],
keyType: KeyType,
): Promise<string[]> {
const suiteFile = (
await tmp.file({
postfix: ".qls",
})
).path;
const suiteYaml = [];
for (const qlpack of qlpacks) {
suiteYaml.push({
from: qlpack,
queries: '.',
queries: ".",
include: {
kind: kindOfKeyType(keyType),
'tags contain': tagOfKeyType(keyType)
}
"tags contain": tagOfKeyType(keyType),
},
});
}
await fs.writeFile(suiteFile, yaml.dump(suiteYaml), 'utf8');
await fs.writeFile(suiteFile, yaml.dump(suiteYaml), "utf8");
const queries = await cli.resolveQueriesInSuite(suiteFile, helpers.getOnDiskWorkspaceFolders());
const queries = await cli.resolveQueriesInSuite(
suiteFile,
helpers.getOnDiskWorkspaceFolders(),
);
return queries;
}
export async function resolveQueries(cli: CodeQLCliServer, qlpacks: QlPacksForLanguage, keyType: KeyType): Promise<string[]> {
const cliCanHandleLibraryPack = await cli.cliConstraints.supportsAllowLibraryPacksInResolveQueries();
export async function resolveQueries(
cli: CodeQLCliServer,
qlpacks: QlPacksForLanguage,
keyType: KeyType,
): Promise<string[]> {
const cliCanHandleLibraryPack =
await cli.cliConstraints.supportsAllowLibraryPacksInResolveQueries();
const packsToSearch: string[] = [];
let blameCli: boolean;
@@ -92,15 +110,121 @@ export async function resolveQueries(cli: CodeQLCliServer, qlpacks: QlPacksForLa
}
// No queries found. Determine the correct error message for the various scenarios.
const errorMessage = blameCli ?
`Your current version of the CodeQL CLI, '${(await cli.getVersion()).version}', \
const errorMessage = blameCli
? `Your current version of the CodeQL CLI, '${
(await cli.getVersion()).version
}', \
is unable to use contextual queries from recent versions of the standard CodeQL libraries. \
Please upgrade to the latest version of the CodeQL CLI.`
:
`No ${nameOfKeyType(keyType)} queries (tagged "${tagOfKeyType(keyType)}") could be found in the current library path. \
Try upgrading the CodeQL libraries. If that doesn't work, then ${nameOfKeyType(keyType)} queries are not yet available \
: `No ${nameOfKeyType(keyType)} queries (tagged "${tagOfKeyType(
keyType,
)}") could be found in the current library path. \
Try upgrading the CodeQL libraries. If that doesn't work, then ${nameOfKeyType(
keyType,
)} queries are not yet available \
for this language.`;
void helpers.showAndLogErrorMessage(errorMessage);
throw new Error(`Couldn't find any queries tagged ${tagOfKeyType(keyType)} in any of the following packs: ${packsToSearch.join(', ')}.`);
throw new Error(
`Couldn't find any queries tagged ${tagOfKeyType(
keyType,
)} in any of the following packs: ${packsToSearch.join(", ")}.`,
);
}
async function resolveContextualQuery(
cli: CodeQLCliServer,
query: string,
): Promise<{ packPath: string; createdTempLockFile: boolean }> {
// Contextual queries now live within the standard library packs.
// This simplifies distribution (you don't need the standard query pack to use the AST viewer),
// but if the library pack doesn't have a lockfile, we won't be able to find
// other pack dependencies of the library pack.
// Work out the enclosing pack.
const packContents = await cli.packPacklist(query, false);
const packFilePath = packContents.find((p) =>
["codeql-pack.yml", "qlpack.yml"].includes(path.basename(p)),
);
if (packFilePath === undefined) {
// Should not happen; we already resolved this query.
throw new Error(
`Could not find a CodeQL pack file for the pack enclosing the contextual query ${query}`,
);
}
const packPath = path.dirname(packFilePath);
const lockFilePath = packContents.find((p) =>
["codeql-pack.lock.yml", "qlpack.lock.yml"].includes(path.basename(p)),
);
let createdTempLockFile = false;
if (!lockFilePath) {
// No lock file, likely because this library pack is in the package cache.
// Create a lock file so that we can resolve dependencies and library path
// for the contextual query.
void logger.log(
`Library pack ${packPath} is missing a lock file; creating a temporary lock file`,
);
await cli.packResolveDependencies(packPath);
createdTempLockFile = true;
// Clear CLI server pack cache before installing dependencies,
// so that it picks up the new lock file, not the previously cached pack.
void logger.log("Clearing the CodeQL CLI server's pack cache");
await cli.clearCache();
// Install dependencies.
void logger.log(
`Installing package dependencies for library pack ${packPath}`,
);
await cli.packInstall(packPath);
}
return { packPath, createdTempLockFile };
}
async function removeTemporaryLockFile(packPath: string) {
const tempLockFilePath = path.resolve(packPath, "codeql-pack.lock.yml");
void logger.log(
`Deleting temporary package lock file at ${tempLockFilePath}`,
);
// It's fine if the file doesn't exist.
await fs.promises.rm(path.resolve(packPath, "codeql-pack.lock.yml"), {
force: true,
});
}
export async function runContextualQuery(
query: string,
db: DatabaseItem,
queryStorageDir: string,
qs: QueryRunner,
cli: CodeQLCliServer,
progress: ProgressCallback,
token: CancellationToken,
templates: Record<string, string>,
) {
const { packPath, createdTempLockFile } = await resolveContextualQuery(
cli,
query,
);
const initialInfo = await createInitialQueryInfo(
Uri.file(query),
{
name: db.name,
databaseUri: db.databaseUri.toString(),
},
false,
);
void logger.log(
`Running contextual query ${query}; results will be stored in ${queryStorageDir}`,
);
const queryResult = await qs.compileAndRunQueryAgainstDatabase(
db,
initialInfo,
queryStorageDir,
progress,
token,
templates,
);
if (createdTempLockFile) {
await removeTemporaryLockFile(packPath);
}
return queryResult;
}

View File

@@ -8,31 +8,40 @@ import {
ReferenceContext,
ReferenceProvider,
TextDocument,
Uri
} from 'vscode';
Uri,
} from "vscode";
import { decodeSourceArchiveUri, encodeArchiveBasePath, zipArchiveScheme } from '../archive-filesystem-provider';
import { CodeQLCliServer } from '../cli';
import { DatabaseManager } from '../databases';
import { CachedOperation } from '../helpers';
import { ProgressCallback, withProgress } from '../commandRunner';
import AstBuilder from './astBuilder';
import {
KeyType,
} from './keyType';
import { FullLocationLink, getLocationsForUriString, TEMPLATE_NAME } from './locationFinder';
import { qlpackOfDatabase, resolveQueries } from './queryResolver';
import { isCanary, NO_CACHE_AST_VIEWER } from '../config';
import { createInitialQueryInfo, QueryWithResults } from '../run-queries-shared';
import { QueryRunner } from '../queryRunner';
decodeSourceArchiveUri,
encodeArchiveBasePath,
zipArchiveScheme,
} from "../archive-filesystem-provider";
import { CodeQLCliServer } from "../cli";
import { DatabaseManager } from "../databases";
import { CachedOperation } from "../helpers";
import { ProgressCallback, withProgress } from "../commandRunner";
import AstBuilder from "./astBuilder";
import { KeyType } from "./keyType";
import {
FullLocationLink,
getLocationsForUriString,
TEMPLATE_NAME,
} from "./locationFinder";
import {
qlpackOfDatabase,
resolveQueries,
runContextualQuery,
} from "./queryResolver";
import { isCanary, NO_CACHE_AST_VIEWER } from "../config";
import { QueryWithResults } from "../run-queries-shared";
import { QueryRunner } from "../queryRunner";
/**
* Run templated CodeQL queries to find definitions and references in
* Runs templated CodeQL queries to find definitions in
* source-language files. We may eventually want to find a way to
* generalize this to other custom queries, e.g. showing dataflow to
* or from a selected identifier.
*/
export class TemplateQueryDefinitionProvider implements DefinitionProvider {
private cache: CachedOperation<LocationLink[]>;
@@ -42,10 +51,16 @@ export class TemplateQueryDefinitionProvider implements DefinitionProvider {
private dbm: DatabaseManager,
private queryStorageDir: string,
) {
this.cache = new CachedOperation<LocationLink[]>(this.getDefinitions.bind(this));
this.cache = new CachedOperation<LocationLink[]>(
this.getDefinitions.bind(this),
);
}
async provideDefinition(document: TextDocument, position: Position, _token: CancellationToken): Promise<LocationLink[]> {
async provideDefinition(
document: TextDocument,
position: Position,
_token: CancellationToken,
): Promise<LocationLink[]> {
const fileLinks = await this.cache.get(document.uri.toString());
const locLinks: LocationLink[] = [];
for (const link of fileLinks) {
@@ -57,26 +72,35 @@ export class TemplateQueryDefinitionProvider implements DefinitionProvider {
}
private async getDefinitions(uriString: string): Promise<LocationLink[]> {
return withProgress({
location: ProgressLocation.Notification,
cancellable: true,
title: 'Finding definitions'
}, async (progress, token) => {
return getLocationsForUriString(
this.cli,
this.qs,
this.dbm,
uriString,
KeyType.DefinitionQuery,
this.queryStorageDir,
progress,
token,
(src, _dest) => src === uriString
);
});
return withProgress(
{
location: ProgressLocation.Notification,
cancellable: true,
title: "Finding definitions",
},
async (progress, token) => {
return getLocationsForUriString(
this.cli,
this.qs,
this.dbm,
uriString,
KeyType.DefinitionQuery,
this.queryStorageDir,
progress,
token,
(src, _dest) => src === uriString,
);
},
);
}
}
/**
* Runs templated CodeQL queries to find references in
* source-language files. We may eventually want to find a way to
* generalize this to other custom queries, e.g. showing dataflow to
* or from a selected identifier.
*/
export class TemplateQueryReferenceProvider implements ReferenceProvider {
private cache: CachedOperation<FullLocationLink[]>;
@@ -86,51 +110,63 @@ export class TemplateQueryReferenceProvider implements ReferenceProvider {
private dbm: DatabaseManager,
private queryStorageDir: string,
) {
this.cache = new CachedOperation<FullLocationLink[]>(this.getReferences.bind(this));
this.cache = new CachedOperation<FullLocationLink[]>(
this.getReferences.bind(this),
);
}
async provideReferences(
document: TextDocument,
position: Position,
_context: ReferenceContext,
_token: CancellationToken
_token: CancellationToken,
): Promise<Location[]> {
const fileLinks = await this.cache.get(document.uri.toString());
const locLinks: Location[] = [];
for (const link of fileLinks) {
if (link.targetRange!.contains(position)) {
locLinks.push({ range: link.originSelectionRange!, uri: link.originUri });
locLinks.push({
range: link.originSelectionRange!,
uri: link.originUri,
});
}
}
return locLinks;
}
private async getReferences(uriString: string): Promise<FullLocationLink[]> {
return withProgress({
location: ProgressLocation.Notification,
cancellable: true,
title: 'Finding references'
}, async (progress, token) => {
return getLocationsForUriString(
this.cli,
this.qs,
this.dbm,
uriString,
KeyType.DefinitionQuery,
this.queryStorageDir,
progress,
token,
(src, _dest) => src === uriString
);
});
return withProgress(
{
location: ProgressLocation.Notification,
cancellable: true,
title: "Finding references",
},
async (progress, token) => {
return getLocationsForUriString(
this.cli,
this.qs,
this.dbm,
uriString,
KeyType.DefinitionQuery,
this.queryStorageDir,
progress,
token,
(src, _dest) => src === uriString,
);
},
);
}
}
type QueryWithDb = {
query: QueryWithResults,
dbUri: Uri
query: QueryWithResults;
dbUri: Uri;
};
/**
* Run templated CodeQL queries to produce AST information for
* source-language files.
*/
export class TemplatePrintAstProvider {
private cache: CachedOperation<QueryWithDb>;
@@ -146,17 +182,20 @@ export class TemplatePrintAstProvider {
async provideAst(
progress: ProgressCallback,
token: CancellationToken,
fileUri?: Uri
fileUri?: Uri,
): Promise<AstBuilder | undefined> {
if (!fileUri) {
throw new Error('Cannot view the AST. Please select a valid source file inside a CodeQL database.');
throw new Error(
"Cannot view the AST. Please select a valid source file inside a CodeQL database.",
);
}
const { query, dbUri } = this.shouldCache()
? await this.cache.get(fileUri.toString(), progress, token)
: await this.getAst(fileUri.toString(), progress, token);
return new AstBuilder(
query, this.cli,
query,
this.cli,
this.dbm.findDatabaseItem(dbUri)!,
fileUri,
);
@@ -169,106 +208,126 @@ export class TemplatePrintAstProvider {
private async getAst(
uriString: string,
progress: ProgressCallback,
token: CancellationToken
token: CancellationToken,
): Promise<QueryWithDb> {
const uri = Uri.parse(uriString, true);
if (uri.scheme !== zipArchiveScheme) {
throw new Error('Cannot view the AST. Please select a valid source file inside a CodeQL database.');
throw new Error(
"Cannot view the AST. Please select a valid source file inside a CodeQL database.",
);
}
const zippedArchive = decodeSourceArchiveUri(uri);
const sourceArchiveUri = encodeArchiveBasePath(zippedArchive.sourceArchiveZipPath);
const sourceArchiveUri = encodeArchiveBasePath(
zippedArchive.sourceArchiveZipPath,
);
const db = this.dbm.findDatabaseItemBySourceArchive(sourceArchiveUri);
if (!db) {
throw new Error('Can\'t infer database from the provided source.');
throw new Error("Can't infer database from the provided source.");
}
const qlpacks = await qlpackOfDatabase(this.cli, db);
const queries = await resolveQueries(this.cli, qlpacks, KeyType.PrintAstQuery);
const queries = await resolveQueries(
this.cli,
qlpacks,
KeyType.PrintAstQuery,
);
if (queries.length > 1) {
throw new Error('Found multiple Print AST queries. Can\'t continue');
throw new Error("Found multiple Print AST queries. Can't continue");
}
if (queries.length === 0) {
throw new Error('Did not find any Print AST queries. Can\'t continue');
throw new Error("Did not find any Print AST queries. Can't continue");
}
const query = queries[0];
const templates: Record<string, string> = {
[TEMPLATE_NAME]:
zippedArchive.pathWithinSourceArchive
[TEMPLATE_NAME]: zippedArchive.pathWithinSourceArchive,
};
const initialInfo = await createInitialQueryInfo(
Uri.file(query),
{
name: db.name,
databaseUri: db.databaseUri.toString(),
},
false
const queryResult = await runContextualQuery(
query,
db,
this.queryStorageDir,
this.qs,
this.cli,
progress,
token,
templates,
);
return {
query: await this.qs.compileAndRunQueryAgainstDatabase(
db,
initialInfo,
this.queryStorageDir,
progress,
token,
templates
),
dbUri: db.databaseUri
query: queryResult,
dbUri: db.databaseUri,
};
}
}
/**
* Run templated CodeQL queries to produce CFG information for
* source-language files.
*/
export class TemplatePrintCfgProvider {
private cache: CachedOperation<[Uri, Record<string, string>] | undefined>;
constructor(
private cli: CodeQLCliServer,
private dbm: DatabaseManager,
) {
this.cache = new CachedOperation<[Uri, Record<string, string>] | undefined>(this.getCfgUri.bind(this));
constructor(private cli: CodeQLCliServer, private dbm: DatabaseManager) {
this.cache = new CachedOperation<[Uri, Record<string, string>] | undefined>(
this.getCfgUri.bind(this),
);
}
async provideCfgUri(document?: TextDocument): Promise<[Uri, Record<string, string>] | undefined> {
async provideCfgUri(
document?: TextDocument,
): Promise<[Uri, Record<string, string>] | undefined> {
if (!document) {
return;
}
return await this.cache.get(document.uri.toString());
}
private async getCfgUri(uriString: string): Promise<[Uri, Record<string, string>]> {
private async getCfgUri(
uriString: string,
): Promise<[Uri, Record<string, string>]> {
const uri = Uri.parse(uriString, true);
if (uri.scheme !== zipArchiveScheme) {
throw new Error('CFG Viewing is only available for databases with zipped source archives.');
throw new Error(
"CFG Viewing is only available for databases with zipped source archives.",
);
}
const zippedArchive = decodeSourceArchiveUri(uri);
const sourceArchiveUri = encodeArchiveBasePath(zippedArchive.sourceArchiveZipPath);
const sourceArchiveUri = encodeArchiveBasePath(
zippedArchive.sourceArchiveZipPath,
);
const db = this.dbm.findDatabaseItemBySourceArchive(sourceArchiveUri);
if (!db) {
throw new Error('Can\'t infer database from the provided source.');
throw new Error("Can't infer database from the provided source.");
}
const qlpack = await qlpackOfDatabase(this.cli, db);
if (!qlpack) {
throw new Error('Can\'t infer qlpack from database source archive.');
throw new Error("Can't infer qlpack from database source archive.");
}
const queries = await resolveQueries(this.cli, qlpack, KeyType.PrintCfgQuery);
const queries = await resolveQueries(
this.cli,
qlpack,
KeyType.PrintCfgQuery,
);
if (queries.length > 1) {
throw new Error(`Found multiple Print CFG queries. Can't continue. Make sure there is exacly one query with the tag ${KeyType.PrintCfgQuery}`);
throw new Error(
`Found multiple Print CFG queries. Can't continue. Make sure there is exacly one query with the tag ${KeyType.PrintCfgQuery}`,
);
}
if (queries.length === 0) {
throw new Error(`Did not find any Print CFG queries. Can't continue. Make sure there is exacly one query with the tag ${KeyType.PrintCfgQuery}`);
throw new Error(
`Did not find any Print CFG queries. Can't continue. Make sure there is exacly one query with the tag ${KeyType.PrintCfgQuery}`,
);
}
const queryUri = Uri.file(queries[0]);
const templates: Record<string, string> = {
[TEMPLATE_NAME]: zippedArchive.pathWithinSourceArchive
[TEMPLATE_NAME]: zippedArchive.pathWithinSourceArchive,
};
return [queryUri, templates];

View File

@@ -1,30 +1,20 @@
import fetch, { Response } from 'node-fetch';
import { zip } from 'zip-a-folder';
import * as unzipper from 'unzipper';
import {
Uri,
CancellationToken,
commands,
window,
} from 'vscode';
import { CodeQLCliServer } from './cli';
import * as fs from 'fs-extra';
import * as path from 'path';
import * as Octokit from '@octokit/rest';
import { retry } from '@octokit/plugin-retry';
import fetch, { Response } from "node-fetch";
import { zip } from "zip-a-folder";
import * as unzipper from "unzipper";
import { Uri, CancellationToken, commands, window } from "vscode";
import { CodeQLCliServer } from "./cli";
import * as fs from "fs-extra";
import * as path from "path";
import * as Octokit from "@octokit/rest";
import { retry } from "@octokit/plugin-retry";
import { DatabaseManager, DatabaseItem } from './databases';
import {
showAndLogInformationMessage,
} from './helpers';
import {
reportStreamProgress,
ProgressCallback,
} from './commandRunner';
import { logger } from './logging';
import { tmpDir } from './helpers';
import { Credentials } from './authentication';
import { REPO_REGEX, getErrorMessage } from './pure/helpers-pure';
import { DatabaseManager, DatabaseItem } from "./databases";
import { showAndLogInformationMessage } from "./helpers";
import { reportStreamProgress, ProgressCallback } from "./commandRunner";
import { logger } from "./logging";
import { tmpDir } from "./helpers";
import { Credentials } from "./authentication";
import { REPO_REGEX, getErrorMessage } from "./pure/helpers-pure";
/**
* Prompts a user to fetch a database from a remote location. Database is assumed to be an archive file.
@@ -37,10 +27,10 @@ export async function promptImportInternetDatabase(
storagePath: string,
progress: ProgressCallback,
token: CancellationToken,
cli?: CodeQLCliServer
cli?: CodeQLCliServer,
): Promise<DatabaseItem | undefined> {
const databaseUrl = await window.showInputBox({
prompt: 'Enter URL of zipfile of database to download',
prompt: "Enter URL of zipfile of database to download",
});
if (!databaseUrl) {
return;
@@ -56,15 +46,16 @@ export async function promptImportInternetDatabase(
undefined,
progress,
token,
cli
cli,
);
if (item) {
await commands.executeCommand('codeQLDatabases.focus');
void showAndLogInformationMessage('Database downloaded and imported successfully.');
await commands.executeCommand("codeQLDatabases.focus");
void showAndLogInformationMessage(
"Database downloaded and imported successfully.",
);
}
return item;
}
/**
@@ -81,16 +72,17 @@ export async function promptImportGithubDatabase(
credentials: Credentials | undefined,
progress: ProgressCallback,
token: CancellationToken,
cli?: CodeQLCliServer
cli?: CodeQLCliServer,
): Promise<DatabaseItem | undefined> {
progress({
message: 'Choose repository',
message: "Choose repository",
step: 1,
maxStep: 2
maxStep: 2,
});
const githubRepo = await window.showInputBox({
title: 'Enter a GitHub repository URL or "name with owner" (e.g. https://github.com/github/codeql or github/codeql)',
placeHolder: 'https://github.com/<owner>/<repo> or <owner>/<repo>',
title:
'Enter a GitHub repository URL or "name with owner" (e.g. https://github.com/github/codeql or github/codeql)',
placeHolder: "https://github.com/<owner>/<repo> or <owner>/<repo>",
ignoreFocusOut: true,
});
if (!githubRepo) {
@@ -101,9 +93,15 @@ export async function promptImportGithubDatabase(
throw new Error(`Invalid GitHub repository: ${githubRepo}`);
}
const octokit = credentials ? await credentials.getOctokit(true) : new Octokit.Octokit({ retry });
const octokit = credentials
? await credentials.getOctokit(true)
: new Octokit.Octokit({ retry });
const result = await convertGithubNwoToDatabaseUrl(githubRepo, octokit, progress);
const result = await convertGithubNwoToDatabaseUrl(
githubRepo,
octokit,
progress,
);
if (!result) {
return;
}
@@ -120,20 +118,25 @@ export async function promptImportGithubDatabase(
* }
* We only need the actual token string.
*/
const octokitToken = (await octokit.auth() as { token: string })?.token;
const octokitToken = ((await octokit.auth()) as { token: string })?.token;
const item = await databaseArchiveFetcher(
databaseUrl,
{ 'Accept': 'application/zip', 'Authorization': octokitToken ? `Bearer ${octokitToken}` : '' },
{
Accept: "application/zip",
Authorization: octokitToken ? `Bearer ${octokitToken}` : "",
},
databaseManager,
storagePath,
`${owner}/${name}`,
progress,
token,
cli
cli,
);
if (item) {
await commands.executeCommand('codeQLDatabases.focus');
void showAndLogInformationMessage('Database downloaded and imported successfully.');
await commands.executeCommand("codeQLDatabases.focus");
void showAndLogInformationMessage(
"Database downloaded and imported successfully.",
);
return item;
}
return;
@@ -152,16 +155,16 @@ export async function promptImportLgtmDatabase(
storagePath: string,
progress: ProgressCallback,
token: CancellationToken,
cli?: CodeQLCliServer
cli?: CodeQLCliServer,
): Promise<DatabaseItem | undefined> {
progress({
message: 'Choose project',
message: "Choose project",
step: 1,
maxStep: 2
maxStep: 2,
});
const lgtmUrl = await window.showInputBox({
prompt:
'Enter the project slug or URL on LGTM (e.g., g/github/codeql or https://lgtm.com/projects/g/github/codeql)',
"Enter the project slug or URL on LGTM (e.g., g/github/codeql or https://lgtm.com/projects/g/github/codeql)",
});
if (!lgtmUrl) {
return;
@@ -178,11 +181,13 @@ export async function promptImportLgtmDatabase(
undefined,
progress,
token,
cli
cli,
);
if (item) {
await commands.executeCommand('codeQLDatabases.focus');
void showAndLogInformationMessage('Database downloaded and imported successfully.');
await commands.executeCommand("codeQLDatabases.focus");
void showAndLogInformationMessage(
"Database downloaded and imported successfully.",
);
}
return item;
}
@@ -194,7 +199,10 @@ export async function promptImportLgtmDatabase(
export async function retrieveCanonicalRepoName(lgtmUrl: string) {
const givenRepoName = extractProjectSlug(lgtmUrl);
const response = await checkForFailingResponse(await fetch(`https://api.github.com/repos/${givenRepoName}`), 'Failed to locate the repository on github');
const response = await checkForFailingResponse(
await fetch(`https://api.github.com/repos/${givenRepoName}`),
"Failed to locate the repository on github",
);
const repo = await response.json();
if (!repo || !repo.full_name) {
return;
@@ -226,16 +234,20 @@ export async function importArchiveDatabase(
undefined,
progress,
token,
cli
cli,
);
if (item) {
await commands.executeCommand('codeQLDatabases.focus');
void showAndLogInformationMessage('Database unzipped and imported successfully.');
await commands.executeCommand("codeQLDatabases.focus");
void showAndLogInformationMessage(
"Database unzipped and imported successfully.",
);
}
return item;
} catch (e) {
if (getErrorMessage(e).includes('unexpected end of file')) {
throw new Error('Database is corrupt or too large. Try unzipping outside of VS Code and importing the unzipped folder instead.');
if (getErrorMessage(e).includes("unexpected end of file")) {
throw new Error(
"Database is corrupt or too large. Try unzipping outside of VS Code and importing the unzipped folder instead.",
);
} else {
// delegate
throw e;
@@ -266,12 +278,12 @@ async function databaseArchiveFetcher(
cli?: CodeQLCliServer,
): Promise<DatabaseItem> {
progress({
message: 'Getting database',
message: "Getting database",
step: 1,
maxStep: 4,
});
if (!storagePath) {
throw new Error('No storage path specified.');
throw new Error("No storage path specified.");
}
await fs.ensureDir(storagePath);
const unzipPath = await getStorageFolder(storagePath, databaseUrl);
@@ -283,7 +295,7 @@ async function databaseArchiveFetcher(
}
progress({
message: 'Opening database',
message: "Opening database",
step: 3,
maxStep: 4,
});
@@ -291,22 +303,27 @@ async function databaseArchiveFetcher(
// find the path to the database. The actual database might be in a sub-folder
const dbPath = await findDirWithFile(
unzipPath,
'.dbinfo',
'codeql-database.yml'
".dbinfo",
"codeql-database.yml",
);
if (dbPath) {
progress({
message: 'Validating and fixing source location',
message: "Validating and fixing source location",
step: 4,
maxStep: 4,
});
await ensureZippedSourceLocation(dbPath);
const item = await databaseManager.openDatabase(progress, token, Uri.file(dbPath), nameOverride);
const item = await databaseManager.openDatabase(
progress,
token,
Uri.file(dbPath),
nameOverride,
);
await databaseManager.setCurrentDatabaseItem(item);
return item;
} else {
throw new Error('Database not found in archive.');
throw new Error("Database not found in archive.");
}
}
@@ -318,7 +335,7 @@ async function getStorageFolder(storagePath: string, urlStr: string) {
// MacOS has a max filename length of 255
// and remove a few extra chars in case we need to add a counter at the end.
let lastName = path.basename(url.path).substring(0, 250);
if (lastName.endsWith('.zip')) {
if (lastName.endsWith(".zip")) {
lastName = lastName.substring(0, lastName.length - 4);
}
@@ -331,7 +348,7 @@ async function getStorageFolder(storagePath: string, urlStr: string) {
counter++;
folderName = path.join(realpath, `${lastName}-${counter}`);
if (counter > 100) {
throw new Error('Could not find a unique name for downloaded database.');
throw new Error("Could not find a unique name for downloaded database.");
}
}
return folderName;
@@ -345,8 +362,8 @@ function validateHttpsUrl(databaseUrl: string) {
throw new Error(`Invalid url: ${databaseUrl}`);
}
if (uri.scheme !== 'https') {
throw new Error('Must use https for downloading a database.');
if (uri.scheme !== "https") {
throw new Error("Must use https for downloading a database.");
}
}
@@ -354,7 +371,7 @@ async function readAndUnzip(
zipUrl: string,
unzipPath: string,
cli?: CodeQLCliServer,
progress?: ProgressCallback
progress?: ProgressCallback,
) {
// TODO: Providing progress as the file is unzipped is currently blocked
// on https://github.com/ZJONSSON/node-unzipper/issues/222
@@ -362,9 +379,9 @@ async function readAndUnzip(
progress?.({
maxStep: 10,
step: 9,
message: `Unzipping into ${path.basename(unzipPath)}`
message: `Unzipping into ${path.basename(unzipPath)}`,
});
if (cli && await cli.cliConstraints.supportsDatabaseUnbundle()) {
if (cli && (await cli.cliConstraints.supportsDatabaseUnbundle())) {
// Use the `database unbundle` command if the installed cli version supports it
await cli.databaseUnbundle(zipFile, unzipPath);
} else {
@@ -381,7 +398,7 @@ async function fetchAndUnzip(
requestHeaders: { [key: string]: string },
unzipPath: string,
cli?: CodeQLCliServer,
progress?: ProgressCallback
progress?: ProgressCallback,
) {
// Although it is possible to download and stream directly to an unzipped directory,
// we need to avoid this for two reasons. The central directory is located at the
@@ -393,33 +410,47 @@ async function fetchAndUnzip(
progress?.({
maxStep: 3,
message: 'Downloading database',
message: "Downloading database",
step: 1,
});
const response = await checkForFailingResponse(
await fetch(databaseUrl, { headers: requestHeaders }),
'Error downloading database'
"Error downloading database",
);
const archiveFileStream = fs.createWriteStream(archivePath);
const contentLength = response.headers.get('content-length');
const contentLength = response.headers.get("content-length");
const totalNumBytes = contentLength ? parseInt(contentLength, 10) : undefined;
reportStreamProgress(response.body, 'Downloading database', totalNumBytes, progress);
await new Promise((resolve, reject) =>
response.body.pipe(archiveFileStream)
.on('finish', resolve)
.on('error', reject)
reportStreamProgress(
response.body,
"Downloading database",
totalNumBytes,
progress,
);
await readAndUnzip(Uri.file(archivePath).toString(true), unzipPath, cli, progress);
await new Promise((resolve, reject) =>
response.body
.pipe(archiveFileStream)
.on("finish", resolve)
.on("error", reject),
);
await readAndUnzip(
Uri.file(archivePath).toString(true),
unzipPath,
cli,
progress,
);
// remove archivePath eagerly since these archives can be large.
await fs.remove(archivePath);
}
async function checkForFailingResponse(response: Response, errorMessage: string): Promise<Response | never> {
async function checkForFailingResponse(
response: Response,
errorMessage: string,
): Promise<Response | never> {
if (response.ok) {
return response;
}
@@ -429,7 +460,8 @@ async function checkForFailingResponse(response: Response, errorMessage: string)
let msg: string;
try {
const obj = JSON.parse(text);
msg = obj.error || obj.message || obj.reason || JSON.stringify(obj, null, 2);
msg =
obj.error || obj.message || obj.reason || JSON.stringify(obj, null, 2);
} catch (e) {
msg = text;
}
@@ -437,7 +469,7 @@ async function checkForFailingResponse(response: Response, errorMessage: string)
}
function isFile(databaseUrl: string) {
return Uri.parse(databaseUrl).scheme === 'file';
return Uri.parse(databaseUrl).scheme === "file";
}
/**
@@ -481,7 +513,7 @@ export async function findDirWithFile(
* @return true if this looks like a valid GitHub repository URL or NWO
*/
export function looksLikeGithubRepo(
githubRepo: string | undefined
githubRepo: string | undefined,
): githubRepo is string {
if (!githubRepo) {
return false;
@@ -500,13 +532,13 @@ export function looksLikeGithubRepo(
function convertGitHubUrlToNwo(githubUrl: string): string | undefined {
try {
const uri = Uri.parse(githubUrl, true);
if (uri.scheme !== 'https') {
if (uri.scheme !== "https") {
return;
}
if (uri.authority !== 'github.com' && uri.authority !== 'www.github.com') {
if (uri.authority !== "github.com" && uri.authority !== "www.github.com") {
return;
}
const paths = uri.path.split('/').filter((segment: string) => segment);
const paths = uri.path.split("/").filter((segment: string) => segment);
const nwo = `${paths[0]}/${paths[1]}`;
if (REPO_REGEX.test(nwo)) {
return nwo;
@@ -522,16 +554,23 @@ function convertGitHubUrlToNwo(githubUrl: string): string | undefined {
export async function convertGithubNwoToDatabaseUrl(
githubRepo: string,
octokit: Octokit.Octokit,
progress: ProgressCallback): Promise<{
databaseUrl: string,
owner: string,
name: string
} | undefined> {
progress: ProgressCallback,
): Promise<
| {
databaseUrl: string;
owner: string;
name: string;
}
| undefined
> {
try {
const nwo = convertGitHubUrlToNwo(githubRepo) || githubRepo;
const [owner, repo] = nwo.split('/');
const [owner, repo] = nwo.split("/");
const response = await octokit.request('GET /repos/:owner/:repo/code-scanning/codeql/databases', { owner, repo });
const response = await octokit.request(
"GET /repos/:owner/:repo/code-scanning/codeql/databases",
{ owner, repo },
);
const languages = response.data.map((db: any) => db.language);
@@ -543,9 +582,8 @@ export async function convertGithubNwoToDatabaseUrl(
return {
databaseUrl: `https://api.github.com/repos/${owner}/${repo}/code-scanning/codeql/databases/${language}`,
owner,
name: repo
name: repo,
};
} catch (e) {
void logger.log(`Error: ${getErrorMessage(e)}`);
throw new Error(`Unable to get database for '${githubRepo}'`);
@@ -568,7 +606,9 @@ export async function convertGithubNwoToDatabaseUrl(
* @return true if this looks like an LGTM project url
*/
// exported for testing
export function looksLikeLgtmUrl(lgtmUrl: string | undefined): lgtmUrl is string {
export function looksLikeLgtmUrl(
lgtmUrl: string | undefined,
): lgtmUrl is string {
if (!lgtmUrl) {
return false;
}
@@ -579,16 +619,16 @@ export function looksLikeLgtmUrl(lgtmUrl: string | undefined): lgtmUrl is string
try {
const uri = Uri.parse(lgtmUrl, true);
if (uri.scheme !== 'https') {
if (uri.scheme !== "https") {
return false;
}
if (uri.authority !== 'lgtm.com' && uri.authority !== 'www.lgtm.com') {
if (uri.authority !== "lgtm.com" && uri.authority !== "www.lgtm.com") {
return false;
}
const paths = uri.path.split('/').filter((segment: string) => segment);
return paths.length >= 4 && paths[0] === 'projects';
const paths = uri.path.split("/").filter((segment: string) => segment);
return paths.length >= 4 && paths[0] === "projects";
} catch (e) {
return false;
}
@@ -598,8 +638,8 @@ function convertRawLgtmSlug(maybeSlug: string): string | undefined {
if (!maybeSlug) {
return;
}
const segments = maybeSlug.split('/');
const providers = ['g', 'gl', 'b', 'git'];
const segments = maybeSlug.split("/");
const providers = ["g", "gl", "b", "git"];
if (segments.length === 3 && providers.includes(segments[0])) {
return `https://lgtm.com/projects/${maybeSlug}`;
}
@@ -608,7 +648,7 @@ function convertRawLgtmSlug(maybeSlug: string): string | undefined {
function extractProjectSlug(lgtmUrl: string): string | undefined {
// Only matches the '/g/' provider (github)
const re = new RegExp('https://lgtm.com/projects/g/(.*[^/])');
const re = new RegExp("https://lgtm.com/projects/g/(.*[^/])");
const match = lgtmUrl.match(re);
if (!match) {
return;
@@ -619,7 +659,8 @@ function extractProjectSlug(lgtmUrl: string): string | undefined {
// exported for testing
export async function convertLgtmUrlToDatabaseUrl(
lgtmUrl: string,
progress: ProgressCallback) {
progress: ProgressCallback,
) {
try {
lgtmUrl = convertRawLgtmSlug(lgtmUrl) || lgtmUrl;
let projectJson = await downloadLgtmProjectMetadata(lgtmUrl);
@@ -634,23 +675,26 @@ export async function convertLgtmUrlToDatabaseUrl(
canonicalName = convertRawLgtmSlug(`g/${canonicalName}`);
projectJson = await downloadLgtmProjectMetadata(canonicalName);
if (projectJson.code === 404) {
throw new Error('Failed to download project from LGTM.');
throw new Error("Failed to download project from LGTM.");
}
}
const languages = projectJson?.languages?.map((lang: { language: string }) => lang.language) || [];
const languages =
projectJson?.languages?.map(
(lang: { language: string }) => lang.language,
) || [];
const language = await promptForLanguage(languages, progress);
if (!language) {
return;
}
return `https://lgtm.com/${[
'api',
'v1.0',
'snapshots',
"api",
"v1.0",
"snapshots",
projectJson.id,
language,
].join('/')}`;
].join("/")}`;
} catch (e) {
void logger.log(`Error: ${getErrorMessage(e)}`);
throw new Error(`Invalid LGTM URL: ${lgtmUrl}`);
@@ -659,37 +703,34 @@ export async function convertLgtmUrlToDatabaseUrl(
async function downloadLgtmProjectMetadata(lgtmUrl: string): Promise<any> {
const uri = Uri.parse(lgtmUrl, true);
const paths = ['api', 'v1.0'].concat(
uri.path.split('/').filter((segment: string) => segment)
).slice(0, 6);
const projectUrl = `https://lgtm.com/${paths.join('/')}`;
const paths = ["api", "v1.0"]
.concat(uri.path.split("/").filter((segment: string) => segment))
.slice(0, 6);
const projectUrl = `https://lgtm.com/${paths.join("/")}`;
const projectResponse = await fetch(projectUrl);
return projectResponse.json();
}
async function promptForLanguage(
languages: string[],
progress: ProgressCallback
progress: ProgressCallback,
): Promise<string | undefined> {
progress({
message: 'Choose language',
message: "Choose language",
step: 2,
maxStep: 2
maxStep: 2,
});
if (!languages.length) {
throw new Error('No databases found');
throw new Error("No databases found");
}
if (languages.length === 1) {
return languages[0];
}
return await window.showQuickPick(
languages,
{
placeHolder: 'Select the database language to download:',
ignoreFocusOut: true,
}
);
return await window.showQuickPick(languages, {
placeHolder: "Select the database language to download:",
ignoreFocusOut: true,
});
}
/**
@@ -704,10 +745,13 @@ async function promptForLanguage(
* @param databasePath The full path to the unzipped database
*/
async function ensureZippedSourceLocation(databasePath: string): Promise<void> {
const srcFolderPath = path.join(databasePath, 'src');
const srcZipPath = srcFolderPath + '.zip';
const srcFolderPath = path.join(databasePath, "src");
const srcZipPath = srcFolderPath + ".zip";
if ((await fs.pathExists(srcFolderPath)) && !(await fs.pathExists(srcZipPath))) {
if (
(await fs.pathExists(srcFolderPath)) &&
!(await fs.pathExists(srcZipPath))
) {
await zip(srcFolderPath, srcZipPath);
await fs.remove(srcFolderPath);
}

View File

@@ -1,5 +1,5 @@
import * as path from 'path';
import { DisposableObject } from './pure/disposable-object';
import * as path from "path";
import { DisposableObject } from "./pure/disposable-object";
import {
Event,
EventEmitter,
@@ -9,36 +9,36 @@ import {
Uri,
window,
env,
} from 'vscode';
import * as fs from 'fs-extra';
} from "vscode";
import * as fs from "fs-extra";
import {
DatabaseChangedEvent,
DatabaseItem,
DatabaseManager,
} from './databases';
} from "./databases";
import {
commandRunner,
commandRunnerWithProgress,
ProgressCallback,
} from './commandRunner';
} from "./commandRunner";
import {
isLikelyDatabaseRoot,
isLikelyDbLanguageFolder,
showAndLogErrorMessage
} from './helpers';
import { logger } from './logging';
showAndLogErrorMessage,
} from "./helpers";
import { logger } from "./logging";
import {
importArchiveDatabase,
promptImportGithubDatabase,
promptImportInternetDatabase,
promptImportLgtmDatabase,
} from './databaseFetcher';
import { CancellationToken } from 'vscode';
import { asyncFilter, getErrorMessage } from './pure/helpers-pure';
import { Credentials } from './authentication';
import { QueryRunner } from './queryRunner';
import { isCanary } from './config';
} from "./databaseFetcher";
import { CancellationToken } from "vscode";
import { asyncFilter, getErrorMessage } from "./pure/helpers-pure";
import { Credentials } from "./authentication";
import { QueryRunner } from "./queryRunner";
import { isCanary } from "./config";
type ThemableIconPath = { light: string; dark: string } | string;
@@ -46,20 +46,20 @@ type ThemableIconPath = { light: string; dark: string } | string;
* Path to icons to display next to currently selected database.
*/
const SELECTED_DATABASE_ICON: ThemableIconPath = {
light: 'media/light/check.svg',
dark: 'media/dark/check.svg',
light: "media/light/check.svg",
dark: "media/dark/check.svg",
};
/**
* Path to icon to display next to an invalid database.
*/
const INVALID_DATABASE_ICON: ThemableIconPath = 'media/red-x.svg';
const INVALID_DATABASE_ICON: ThemableIconPath = "media/red-x.svg";
function joinThemableIconPath(
base: string,
iconPath: ThemableIconPath
iconPath: ThemableIconPath,
): ThemableIconPath {
if (typeof iconPath == 'object')
if (typeof iconPath == "object")
return {
light: path.join(base, iconPath.light),
dark: path.join(base, iconPath.dark),
@@ -68,25 +68,29 @@ function joinThemableIconPath(
}
enum SortOrder {
NameAsc = 'NameAsc',
NameDesc = 'NameDesc',
DateAddedAsc = 'DateAddedAsc',
DateAddedDesc = 'DateAddedDesc',
NameAsc = "NameAsc",
NameDesc = "NameDesc",
DateAddedAsc = "DateAddedAsc",
DateAddedDesc = "DateAddedDesc",
}
/**
* Tree data provider for the databases view.
*/
class DatabaseTreeDataProvider extends DisposableObject
implements TreeDataProvider<DatabaseItem> {
class DatabaseTreeDataProvider
extends DisposableObject
implements TreeDataProvider<DatabaseItem>
{
private _sortOrder = SortOrder.NameAsc;
private readonly _onDidChangeTreeData = this.push(new EventEmitter<DatabaseItem | undefined>());
private readonly _onDidChangeTreeData = this.push(
new EventEmitter<DatabaseItem | undefined>(),
);
private currentDatabaseItem: DatabaseItem | undefined;
constructor(
private databaseManager: DatabaseManager,
private readonly extensionPath: string
private readonly extensionPath: string,
) {
super();
@@ -94,13 +98,13 @@ class DatabaseTreeDataProvider extends DisposableObject
this.push(
this.databaseManager.onDidChangeDatabaseItem(
this.handleDidChangeDatabaseItem
)
this.handleDidChangeDatabaseItem,
),
);
this.push(
this.databaseManager.onDidChangeCurrentDatabaseItem(
this.handleDidChangeCurrentDatabaseItem
)
this.handleDidChangeCurrentDatabaseItem,
),
);
}
@@ -118,7 +122,7 @@ class DatabaseTreeDataProvider extends DisposableObject
};
private handleDidChangeCurrentDatabaseItem = (
event: DatabaseChangedEvent
event: DatabaseChangedEvent,
): void => {
if (this.currentDatabaseItem) {
this._onDidChangeTreeData.fire(this.currentDatabaseItem);
@@ -134,13 +138,13 @@ class DatabaseTreeDataProvider extends DisposableObject
if (element === this.currentDatabaseItem) {
item.iconPath = joinThemableIconPath(
this.extensionPath,
SELECTED_DATABASE_ICON
SELECTED_DATABASE_ICON,
);
item.contextValue = 'currentDatabase';
item.contextValue = "currentDatabase";
} else if (element.error !== undefined) {
item.iconPath = joinThemableIconPath(
this.extensionPath,
INVALID_DATABASE_ICON
INVALID_DATABASE_ICON,
);
}
item.tooltip = element.databaseUri.fsPath;
@@ -204,11 +208,11 @@ function getFirst(list: Uri[] | undefined): Uri | undefined {
*/
async function chooseDatabaseDir(byFolder: boolean): Promise<Uri | undefined> {
const chosen = await window.showOpenDialog({
openLabel: byFolder ? 'Choose Database folder' : 'Choose Database archive',
openLabel: byFolder ? "Choose Database folder" : "Choose Database archive",
canSelectFiles: !byFolder,
canSelectFolders: byFolder,
canSelectMany: false,
filters: byFolder ? {} : { Archives: ['zip'] },
filters: byFolder ? {} : { Archives: ["zip"] },
});
return getFirst(chosen);
}
@@ -221,173 +225,165 @@ export class DatabaseUI extends DisposableObject {
private readonly queryServer: QueryRunner | undefined,
private readonly storagePath: string,
readonly extensionPath: string,
private readonly getCredentials: () => Promise<Credentials>
private readonly getCredentials: () => Promise<Credentials>,
) {
super();
this.treeDataProvider = this.push(
new DatabaseTreeDataProvider(databaseManager, extensionPath)
new DatabaseTreeDataProvider(databaseManager, extensionPath),
);
this.push(
window.createTreeView('codeQLDatabases', {
window.createTreeView("codeQLDatabases", {
treeDataProvider: this.treeDataProvider,
canSelectMany: true,
})
}),
);
}
init() {
void logger.log('Registering database panel commands.');
void logger.log("Registering database panel commands.");
this.push(
commandRunnerWithProgress(
'codeQL.setCurrentDatabase',
"codeQL.setCurrentDatabase",
this.handleSetCurrentDatabase,
{
title: 'Importing database from archive',
}
)
title: "Importing database from archive",
},
),
);
this.push(
commandRunnerWithProgress(
'codeQL.upgradeCurrentDatabase',
"codeQL.upgradeCurrentDatabase",
this.handleUpgradeCurrentDatabase,
{
title: 'Upgrading current database',
title: "Upgrading current database",
cancellable: true,
}
)
},
),
);
this.push(
commandRunnerWithProgress(
'codeQL.clearCache',
this.handleClearCache,
{
title: 'Clearing Cache',
})
commandRunnerWithProgress("codeQL.clearCache", this.handleClearCache, {
title: "Clearing Cache",
}),
);
this.push(
commandRunnerWithProgress(
'codeQLDatabases.chooseDatabaseFolder',
"codeQLDatabases.chooseDatabaseFolder",
this.handleChooseDatabaseFolder,
{
title: 'Adding database from folder',
}
)
title: "Adding database from folder",
},
),
);
this.push(
commandRunnerWithProgress(
'codeQLDatabases.chooseDatabaseArchive',
"codeQLDatabases.chooseDatabaseArchive",
this.handleChooseDatabaseArchive,
{
title: 'Adding database from archive',
}
)
title: "Adding database from archive",
},
),
);
this.push(
commandRunnerWithProgress(
'codeQLDatabases.chooseDatabaseInternet',
"codeQLDatabases.chooseDatabaseInternet",
this.handleChooseDatabaseInternet,
{
title: 'Adding database from URL',
}
)
title: "Adding database from URL",
},
),
);
this.push(
commandRunnerWithProgress(
'codeQLDatabases.chooseDatabaseGithub',
async (
progress: ProgressCallback,
token: CancellationToken
) => {
const credentials = isCanary() ? await this.getCredentials() : undefined;
"codeQLDatabases.chooseDatabaseGithub",
async (progress: ProgressCallback, token: CancellationToken) => {
const credentials = isCanary()
? await this.getCredentials()
: undefined;
await this.handleChooseDatabaseGithub(credentials, progress, token);
},
{
title: 'Adding database from GitHub',
})
title: "Adding database from GitHub",
},
),
);
this.push(
commandRunnerWithProgress(
'codeQLDatabases.chooseDatabaseLgtm',
"codeQLDatabases.chooseDatabaseLgtm",
this.handleChooseDatabaseLgtm,
{
title: 'Adding database from LGTM',
})
title: "Adding database from LGTM",
},
),
);
this.push(
commandRunner(
'codeQLDatabases.setCurrentDatabase',
this.handleMakeCurrentDatabase
)
"codeQLDatabases.setCurrentDatabase",
this.handleMakeCurrentDatabase,
),
);
this.push(
commandRunner("codeQLDatabases.sortByName", this.handleSortByName),
);
this.push(
commandRunner(
'codeQLDatabases.sortByName',
this.handleSortByName
)
);
this.push(
commandRunner(
'codeQLDatabases.sortByDateAdded',
this.handleSortByDateAdded
)
"codeQLDatabases.sortByDateAdded",
this.handleSortByDateAdded,
),
);
this.push(
commandRunnerWithProgress(
'codeQLDatabases.removeDatabase',
"codeQLDatabases.removeDatabase",
this.handleRemoveDatabase,
{
title: 'Removing database',
cancellable: false
}
)
title: "Removing database",
cancellable: false,
},
),
);
this.push(
commandRunnerWithProgress(
'codeQLDatabases.upgradeDatabase',
"codeQLDatabases.upgradeDatabase",
this.handleUpgradeDatabase,
{
title: 'Upgrading database',
title: "Upgrading database",
cancellable: true,
}
)
},
),
);
this.push(
commandRunner(
'codeQLDatabases.renameDatabase',
this.handleRenameDatabase
)
"codeQLDatabases.renameDatabase",
this.handleRenameDatabase,
),
);
this.push(
commandRunner(
'codeQLDatabases.openDatabaseFolder',
this.handleOpenFolder
)
"codeQLDatabases.openDatabaseFolder",
this.handleOpenFolder,
),
);
this.push(
commandRunner("codeQLDatabases.addDatabaseSource", this.handleAddSource),
);
this.push(
commandRunner(
'codeQLDatabases.addDatabaseSource',
this.handleAddSource
)
);
this.push(
commandRunner(
'codeQLDatabases.removeOrphanedDatabases',
this.handleRemoveOrphanedDatabases
)
"codeQLDatabases.removeOrphanedDatabases",
this.handleRemoveOrphanedDatabases,
),
);
}
private handleMakeCurrentDatabase = async (
databaseItem: DatabaseItem
databaseItem: DatabaseItem,
): Promise<void> => {
await this.databaseManager.setCurrentDatabaseItem(databaseItem);
};
handleChooseDatabaseFolder = async (
progress: ProgressCallback,
token: CancellationToken
token: CancellationToken,
): Promise<void> => {
try {
await this.chooseAndSetDatabase(true, progress, token);
@@ -397,14 +393,16 @@ export class DatabaseUI extends DisposableObject {
};
handleRemoveOrphanedDatabases = async (): Promise<void> => {
void logger.log('Removing orphaned databases from workspace storage.');
void logger.log("Removing orphaned databases from workspace storage.");
let dbDirs = undefined;
if (
!(await fs.pathExists(this.storagePath)) ||
!(await fs.stat(this.storagePath)).isDirectory()
) {
void logger.log('Missing or invalid storage directory. Not trying to remove orphaned databases.');
void logger.log(
"Missing or invalid storage directory. Not trying to remove orphaned databases.",
);
return;
}
@@ -412,49 +410,51 @@ export class DatabaseUI extends DisposableObject {
// read directory
(await fs.readdir(this.storagePath, { withFileTypes: true }))
// remove non-directories
.filter(dirent => dirent.isDirectory())
.filter((dirent) => dirent.isDirectory())
// get the full path
.map(dirent => path.join(this.storagePath, dirent.name))
.map((dirent) => path.join(this.storagePath, dirent.name))
// remove databases still in workspace
.filter(dbDir => {
.filter((dbDir) => {
const dbUri = Uri.file(dbDir);
return this.databaseManager.databaseItems.every(item => item.databaseUri.fsPath !== dbUri.fsPath);
return this.databaseManager.databaseItems.every(
(item) => item.databaseUri.fsPath !== dbUri.fsPath,
);
});
// remove non-databases
dbDirs = await asyncFilter(dbDirs, isLikelyDatabaseRoot);
if (!dbDirs.length) {
void logger.log('No orphaned databases found.');
void logger.log("No orphaned databases found.");
return;
}
// delete
const failures = [] as string[];
await Promise.all(
dbDirs.map(async dbDir => {
dbDirs.map(async (dbDir) => {
try {
void logger.log(`Deleting orphaned database '${dbDir}'.`);
await fs.remove(dbDir);
} catch (e) {
failures.push(`${path.basename(dbDir)}`);
}
})
}),
);
if (failures.length) {
const dirname = path.dirname(failures[0]);
void showAndLogErrorMessage(
`Failed to delete unused databases (${failures.join(', ')
}).\nTo delete unused databases, please remove them manually from the storage folder ${dirname}.`
`Failed to delete unused databases (${failures.join(
", ",
)}).\nTo delete unused databases, please remove them manually from the storage folder ${dirname}.`,
);
}
};
handleChooseDatabaseArchive = async (
progress: ProgressCallback,
token: CancellationToken
token: CancellationToken,
): Promise<void> => {
try {
await this.chooseAndSetDatabase(false, progress, token);
@@ -465,21 +465,21 @@ export class DatabaseUI extends DisposableObject {
handleChooseDatabaseInternet = async (
progress: ProgressCallback,
token: CancellationToken
token: CancellationToken,
): Promise<DatabaseItem | undefined> => {
return await promptImportInternetDatabase(
this.databaseManager,
this.storagePath,
progress,
token,
this.queryServer?.cliServer
this.queryServer?.cliServer,
);
};
handleChooseDatabaseGithub = async (
credentials: Credentials | undefined,
progress: ProgressCallback,
token: CancellationToken
token: CancellationToken,
): Promise<DatabaseItem | undefined> => {
return await promptImportGithubDatabase(
this.databaseManager,
@@ -487,26 +487,26 @@ export class DatabaseUI extends DisposableObject {
credentials,
progress,
token,
this.queryServer?.cliServer
this.queryServer?.cliServer,
);
};
handleChooseDatabaseLgtm = async (
progress: ProgressCallback,
token: CancellationToken
token: CancellationToken,
): Promise<DatabaseItem | undefined> => {
return await promptImportLgtmDatabase(
this.databaseManager,
this.storagePath,
progress,
token,
this.queryServer?.cliServer
this.queryServer?.cliServer,
);
};
async tryUpgradeCurrentDatabase(
progress: ProgressCallback,
token: CancellationToken
token: CancellationToken,
) {
await this.handleUpgradeCurrentDatabase(progress, token);
}
@@ -532,9 +532,10 @@ export class DatabaseUI extends DisposableObject {
token: CancellationToken,
): Promise<void> => {
await this.handleUpgradeDatabase(
progress, token,
progress,
token,
this.databaseManager.currentDatabaseItem,
[]
[],
);
};
@@ -546,27 +547,29 @@ export class DatabaseUI extends DisposableObject {
): Promise<void> => {
if (multiSelect?.length) {
await Promise.all(
multiSelect.map((dbItem) => this.handleUpgradeDatabase(progress, token, dbItem, []))
multiSelect.map((dbItem) =>
this.handleUpgradeDatabase(progress, token, dbItem, []),
),
);
}
if (this.queryServer === undefined) {
throw new Error(
'Received request to upgrade database, but there is no running query server.'
"Received request to upgrade database, but there is no running query server.",
);
}
if (databaseItem === undefined) {
throw new Error(
'Received request to upgrade database, but no database was provided.'
"Received request to upgrade database, but no database was provided.",
);
}
if (databaseItem.contents === undefined) {
throw new Error(
'Received request to upgrade database, but database contents could not be found.'
"Received request to upgrade database, but database contents could not be found.",
);
}
if (databaseItem.contents.dbSchemeUri === undefined) {
throw new Error(
'Received request to upgrade database, but database has no schema.'
"Received request to upgrade database, but database has no schema.",
);
}
@@ -575,7 +578,7 @@ export class DatabaseUI extends DisposableObject {
await this.queryServer.upgradeDatabaseExplicit(
databaseItem,
progress,
token
token,
);
};
@@ -590,7 +593,7 @@ export class DatabaseUI extends DisposableObject {
await this.queryServer.clearCacheInDatabase(
this.databaseManager.currentDatabaseItem,
progress,
token
token,
);
}
};
@@ -602,14 +605,14 @@ export class DatabaseUI extends DisposableObject {
): Promise<void> => {
try {
// Assume user has selected an archive if the file has a .zip extension
if (uri.path.endsWith('.zip')) {
if (uri.path.endsWith(".zip")) {
await importArchiveDatabase(
uri.toString(true),
this.databaseManager,
this.storagePath,
progress,
token,
this.queryServer?.cliServer
this.queryServer?.cliServer,
);
} else {
await this.setCurrentDatabase(progress, token, uri);
@@ -617,7 +620,9 @@ export class DatabaseUI extends DisposableObject {
} catch (e) {
// rethrow and let this be handled by default error handling.
throw new Error(
`Could not set database to ${path.basename(uri.fsPath)}. Reason: ${getErrorMessage(e)}`
`Could not set database to ${path.basename(
uri.fsPath,
)}. Reason: ${getErrorMessage(e)}`,
);
}
};
@@ -626,25 +631,31 @@ export class DatabaseUI extends DisposableObject {
progress: ProgressCallback,
token: CancellationToken,
databaseItem: DatabaseItem,
multiSelect: DatabaseItem[] | undefined
multiSelect: DatabaseItem[] | undefined,
): Promise<void> => {
if (multiSelect?.length) {
await Promise.all(multiSelect.map((dbItem) =>
this.databaseManager.removeDatabaseItem(progress, token, dbItem)
));
await Promise.all(
multiSelect.map((dbItem) =>
this.databaseManager.removeDatabaseItem(progress, token, dbItem),
),
);
} else {
await this.databaseManager.removeDatabaseItem(progress, token, databaseItem);
await this.databaseManager.removeDatabaseItem(
progress,
token,
databaseItem,
);
}
};
private handleRenameDatabase = async (
databaseItem: DatabaseItem,
multiSelect: DatabaseItem[] | undefined
multiSelect: DatabaseItem[] | undefined,
): Promise<void> => {
this.assertSingleDatabase(multiSelect);
const newName = await window.showInputBox({
prompt: 'Choose new database name',
prompt: "Choose new database name",
value: databaseItem.name,
});
@@ -655,11 +666,11 @@ export class DatabaseUI extends DisposableObject {
private handleOpenFolder = async (
databaseItem: DatabaseItem,
multiSelect: DatabaseItem[] | undefined
multiSelect: DatabaseItem[] | undefined,
): Promise<void> => {
if (multiSelect?.length) {
await Promise.all(
multiSelect.map((dbItem) => env.openExternal(dbItem.databaseUri))
multiSelect.map((dbItem) => env.openExternal(dbItem.databaseUri)),
);
} else {
await env.openExternal(databaseItem.databaseUri);
@@ -673,7 +684,7 @@ export class DatabaseUI extends DisposableObject {
*/
private handleAddSource = async (
databaseItem: DatabaseItem,
multiSelect: DatabaseItem[] | undefined
multiSelect: DatabaseItem[] | undefined,
): Promise<void> => {
if (multiSelect?.length) {
for (const dbItem of multiSelect) {
@@ -691,7 +702,7 @@ export class DatabaseUI extends DisposableObject {
*/
public async getDatabaseItem(
progress: ProgressCallback,
token: CancellationToken
token: CancellationToken,
): Promise<DatabaseItem | undefined> {
if (this.databaseManager.currentDatabaseItem === undefined) {
await this.chooseAndSetDatabase(false, progress, token);
@@ -703,11 +714,15 @@ export class DatabaseUI extends DisposableObject {
private async setCurrentDatabase(
progress: ProgressCallback,
token: CancellationToken,
uri: Uri
uri: Uri,
): Promise<DatabaseItem | undefined> {
let databaseItem = this.databaseManager.findDatabaseItem(uri);
if (databaseItem === undefined) {
databaseItem = await this.databaseManager.openDatabase(progress, token, uri);
databaseItem = await this.databaseManager.openDatabase(
progress,
token,
uri,
);
}
await this.databaseManager.setCurrentDatabaseItem(databaseItem);
@@ -741,7 +756,7 @@ export class DatabaseUI extends DisposableObject {
this.storagePath,
progress,
token,
this.queryServer?.cliServer
this.queryServer?.cliServer,
);
}
}
@@ -771,7 +786,7 @@ export class DatabaseUI extends DisposableObject {
private assertSingleDatabase(
multiSelect: DatabaseItem[] = [],
message = 'Please select a single database.'
message = "Please select a single database.",
) {
if (multiSelect.length > 1) {
throw new Error(message);

View File

@@ -1,24 +1,26 @@
import * as fs from 'fs-extra';
import * as glob from 'glob-promise';
import * as path from 'path';
import * as vscode from 'vscode';
import * as cli from './cli';
import { ExtensionContext } from 'vscode';
import * as fs from "fs-extra";
import * as glob from "glob-promise";
import * as path from "path";
import * as vscode from "vscode";
import * as cli from "./cli";
import { ExtensionContext } from "vscode";
import {
showAndLogErrorMessage,
showAndLogWarningMessage,
showAndLogInformationMessage,
isLikelyDatabaseRoot
} from './helpers';
isLikelyDatabaseRoot,
} from "./helpers";
import { ProgressCallback, withProgress } from "./commandRunner";
import {
ProgressCallback,
withProgress
} from './commandRunner';
import { zipArchiveScheme, encodeArchiveBasePath, decodeSourceArchiveUri, encodeSourceArchiveUri } from './archive-filesystem-provider';
import { DisposableObject } from './pure/disposable-object';
import { Logger, logger } from './logging';
import { getErrorMessage } from './pure/helpers-pure';
import { QueryRunner } from './queryRunner';
zipArchiveScheme,
encodeArchiveBasePath,
decodeSourceArchiveUri,
encodeSourceArchiveUri,
} from "./archive-filesystem-provider";
import { DisposableObject } from "./pure/disposable-object";
import { Logger, logger } from "./logging";
import { getErrorMessage } from "./pure/helpers-pure";
import { QueryRunner } from "./queryRunner";
/**
* databases.ts
@@ -34,13 +36,13 @@ import { QueryRunner } from './queryRunner';
* The name of the key in the workspaceState dictionary in which we
* persist the current database across sessions.
*/
const CURRENT_DB = 'currentDatabase';
const CURRENT_DB = "currentDatabase";
/**
* The name of the key in the workspaceState dictionary in which we
* persist the list of databases across sessions.
*/
const DB_LIST = 'databaseList';
const DB_LIST = "databaseList";
export interface DatabaseOptions {
displayName?: string;
@@ -67,7 +69,7 @@ export enum DatabaseKind {
/** A CodeQL database */
Database,
/** A raw QL dataset */
RawDataset
RawDataset,
}
export interface DatabaseContents {
@@ -89,33 +91,35 @@ export interface DatabaseContents {
* An error thrown when we cannot find a valid database in a putative
* database directory.
*/
class InvalidDatabaseError extends Error {
}
class InvalidDatabaseError extends Error {}
async function findDataset(parentDirectory: string): Promise<vscode.Uri> {
/*
* Look directly in the root
*/
let dbRelativePaths = await glob('db-*/', {
cwd: parentDirectory
let dbRelativePaths = await glob("db-*/", {
cwd: parentDirectory,
});
if (dbRelativePaths.length === 0) {
/*
* Check If they are in the old location
*/
dbRelativePaths = await glob('working/db-*/', {
cwd: parentDirectory
dbRelativePaths = await glob("working/db-*/", {
cwd: parentDirectory,
});
}
if (dbRelativePaths.length === 0) {
throw new InvalidDatabaseError(`'${parentDirectory}' does not contain a dataset directory.`);
throw new InvalidDatabaseError(
`'${parentDirectory}' does not contain a dataset directory.`,
);
}
const dbAbsolutePath = path.join(parentDirectory, dbRelativePaths[0]);
if (dbRelativePaths.length > 1) {
void showAndLogWarningMessage(`Found multiple dataset directories in database, using '${dbAbsolutePath}'.`);
void showAndLogWarningMessage(
`Found multiple dataset directories in database, using '${dbAbsolutePath}'.`,
);
}
return vscode.Uri.file(dbAbsolutePath);
@@ -123,13 +127,13 @@ async function findDataset(parentDirectory: string): Promise<vscode.Uri> {
// exported for testing
export async function findSourceArchive(
databasePath: string, silent = false
databasePath: string,
): Promise<vscode.Uri | undefined> {
const relativePaths = ['src', 'output/src_archive'];
const relativePaths = ["src", "output/src_archive"];
for (const relativePath of relativePaths) {
const basePath = path.join(databasePath, relativePath);
const zipPath = basePath + '.zip';
const zipPath = basePath + ".zip";
// Prefer using a zip archive over a directory.
if (await fs.pathExists(zipPath)) {
@@ -138,18 +142,16 @@ export async function findSourceArchive(
return vscode.Uri.file(basePath);
}
}
if (!silent) {
void showAndLogInformationMessage(
`Could not find source archive for database '${databasePath}'. Assuming paths are absolute.`
);
}
void showAndLogInformationMessage(
`Could not find source archive for database '${databasePath}'. Assuming paths are absolute.`,
);
return undefined;
}
async function resolveDatabase(
databasePath: string,
): Promise<DatabaseContents> {
const name = path.basename(databasePath);
// Look for dataset and source archive.
@@ -160,30 +162,36 @@ async function resolveDatabase(
kind: DatabaseKind.Database,
name,
datasetUri,
sourceArchiveUri
sourceArchiveUri,
};
}
/** Gets the relative paths of all `.dbscheme` files in the given directory. */
async function getDbSchemeFiles(dbDirectory: string): Promise<string[]> {
return await glob('*.dbscheme', { cwd: dbDirectory });
return await glob("*.dbscheme", { cwd: dbDirectory });
}
async function resolveDatabaseContents(
uri: vscode.Uri,
): Promise<DatabaseContents> {
if (uri.scheme !== 'file') {
throw new Error(`Database URI scheme '${uri.scheme}' not supported; only 'file' URIs are supported.`);
if (uri.scheme !== "file") {
throw new Error(
`Database URI scheme '${uri.scheme}' not supported; only 'file' URIs are supported.`,
);
}
const databasePath = uri.fsPath;
if (!await fs.pathExists(databasePath)) {
throw new InvalidDatabaseError(`Database '${databasePath}' does not exist.`);
if (!(await fs.pathExists(databasePath))) {
throw new InvalidDatabaseError(
`Database '${databasePath}' does not exist.`,
);
}
const contents = await resolveDatabase(databasePath);
if (contents === undefined) {
throw new InvalidDatabaseError(`'${databasePath}' is not a valid database.`);
throw new InvalidDatabaseError(
`'${databasePath}' is not a valid database.`,
);
}
// Look for a single dbscheme file within the database.
@@ -191,12 +199,17 @@ async function resolveDatabaseContents(
const dbPath = contents.datasetUri.fsPath;
const dbSchemeFiles = await getDbSchemeFiles(dbPath);
if (dbSchemeFiles.length === 0) {
throw new InvalidDatabaseError(`Database '${databasePath}' does not contain a CodeQL dbscheme under '${dbPath}'.`);
}
else if (dbSchemeFiles.length > 1) {
throw new InvalidDatabaseError(`Database '${databasePath}' contains multiple CodeQL dbschemes under '${dbPath}'.`);
throw new InvalidDatabaseError(
`Database '${databasePath}' does not contain a CodeQL dbscheme under '${dbPath}'.`,
);
} else if (dbSchemeFiles.length > 1) {
throw new InvalidDatabaseError(
`Database '${databasePath}' contains multiple CodeQL dbschemes under '${dbPath}'.`,
);
} else {
contents.dbSchemeUri = vscode.Uri.file(path.resolve(dbPath, dbSchemeFiles[0]));
contents.dbSchemeUri = vscode.Uri.file(
path.resolve(dbPath, dbSchemeFiles[0]),
);
}
return contents;
}
@@ -284,16 +297,16 @@ export interface DatabaseItem {
}
export enum DatabaseEventKind {
Add = 'Add',
Remove = 'Remove',
Add = "Add",
Remove = "Remove",
// Fired when databases are refreshed from persisted state
Refresh = 'Refresh',
Refresh = "Refresh",
// Fired when the current database changes
Change = 'Change',
Change = "Change",
Rename = 'Rename'
Rename = "Rename",
}
export interface DatabaseChangedEvent {
@@ -312,7 +325,7 @@ export class DatabaseItemImpl implements DatabaseItem {
public readonly databaseUri: vscode.Uri,
contents: DatabaseContents | undefined,
private options: FullDatabaseOptions,
private readonly onChanged: (event: DatabaseChangedEvent) => void
private readonly onChanged: (event: DatabaseChangedEvent) => void,
) {
this._contents = contents;
}
@@ -320,11 +333,9 @@ export class DatabaseItemImpl implements DatabaseItem {
public get name(): string {
if (this.options.displayName) {
return this.options.displayName;
}
else if (this._contents) {
} else if (this._contents) {
return this._contents.name;
}
else {
} else {
return path.basename(this.databaseUri.fsPath);
}
}
@@ -334,7 +345,7 @@ export class DatabaseItemImpl implements DatabaseItem {
}
public get sourceArchive(): vscode.Uri | undefined {
if (this.options.ignoreSourceArchive || (this._contents === undefined)) {
if (this.options.ignoreSourceArchive || this._contents === undefined) {
return undefined;
} else {
return this._contents.sourceArchiveUri;
@@ -366,7 +377,7 @@ export class DatabaseItemImpl implements DatabaseItem {
} finally {
this.onChanged({
kind: DatabaseEventKind.Refresh,
item: this
item: this,
});
}
}
@@ -374,8 +385,10 @@ export class DatabaseItemImpl implements DatabaseItem {
public resolveSourceFile(uriStr: string | undefined): vscode.Uri {
const sourceArchive = this.sourceArchive;
const uri = uriStr ? vscode.Uri.parse(uriStr, true) : undefined;
if (uri && uri.scheme !== 'file') {
throw new Error(`Invalid uri scheme in ${uriStr}. Only 'file' is allowed.`);
if (uri && uri.scheme !== "file") {
throw new Error(
`Invalid uri scheme in ${uriStr}. Only 'file' is allowed.`,
);
}
if (!sourceArchive) {
if (uri) {
@@ -386,28 +399,29 @@ export class DatabaseItemImpl implements DatabaseItem {
}
if (uri) {
const relativeFilePath = decodeURI(uri.path).replace(':', '_').replace(/^\/*/, '');
const relativeFilePath = decodeURI(uri.path)
.replace(":", "_")
.replace(/^\/*/, "");
if (sourceArchive.scheme === zipArchiveScheme) {
const zipRef = decodeSourceArchiveUri(sourceArchive);
const pathWithinSourceArchive = zipRef.pathWithinSourceArchive === '/'
? relativeFilePath
: zipRef.pathWithinSourceArchive + '/' + relativeFilePath;
const pathWithinSourceArchive =
zipRef.pathWithinSourceArchive === "/"
? relativeFilePath
: zipRef.pathWithinSourceArchive + "/" + relativeFilePath;
return encodeSourceArchiveUri({
pathWithinSourceArchive,
sourceArchiveZipPath: zipRef.sourceArchiveZipPath,
});
} else {
let newPath = sourceArchive.path;
if (!newPath.endsWith('/')) {
if (!newPath.endsWith("/")) {
// Ensure a trailing slash.
newPath += '/';
newPath += "/";
}
newPath += relativeFilePath;
return sourceArchive.with({ path: newPath });
}
} else {
return sourceArchive;
}
@@ -419,7 +433,7 @@ export class DatabaseItemImpl implements DatabaseItem {
public getPersistedState(): PersistedDatabaseItem {
return {
uri: this.databaseUri.toString(true),
options: this.options
options: this.options,
};
}
@@ -444,7 +458,9 @@ export class DatabaseItemImpl implements DatabaseItem {
* Returns `sourceLocationPrefix` of database. Requires that the database
* has a `.dbinfo` file, which is the source of the prefix.
*/
public async getSourceLocationPrefix(server: cli.CodeQLCliServer): Promise<string> {
public async getSourceLocationPrefix(
server: cli.CodeQLCliServer,
): Promise<string> {
const dbInfo = await this.getDbInfo(server);
return dbInfo.sourceLocationPrefix;
}
@@ -458,7 +474,7 @@ export class DatabaseItemImpl implements DatabaseItem {
}
public get language() {
return this.options.language || '';
return this.options.language || "";
}
/**
@@ -466,7 +482,7 @@ export class DatabaseItemImpl implements DatabaseItem {
*/
public getSourceArchiveExplorerUri(): vscode.Uri {
const sourceArchive = this.sourceArchive;
if (sourceArchive === undefined || !sourceArchive.fsPath.endsWith('.zip')) {
if (sourceArchive === undefined || !sourceArchive.fsPath.endsWith(".zip")) {
throw new Error(this.verifyZippedSources());
}
return encodeArchiveBasePath(sourceArchive.fsPath);
@@ -478,7 +494,7 @@ export class DatabaseItemImpl implements DatabaseItem {
return `${this.name} has no source archive.`;
}
if (!sourceArchive.fsPath.endsWith('.zip')) {
if (!sourceArchive.fsPath.endsWith(".zip")) {
return `${this.name} has a source folder that is unzipped.`;
}
return;
@@ -488,26 +504,28 @@ export class DatabaseItemImpl implements DatabaseItem {
* Holds if `uri` belongs to this database's source archive.
*/
public belongsToSourceArchiveExplorerUri(uri: vscode.Uri): boolean {
if (this.sourceArchive === undefined)
return false;
return uri.scheme === zipArchiveScheme &&
decodeSourceArchiveUri(uri).sourceArchiveZipPath === this.sourceArchive.fsPath;
if (this.sourceArchive === undefined) return false;
return (
uri.scheme === zipArchiveScheme &&
decodeSourceArchiveUri(uri).sourceArchiveZipPath ===
this.sourceArchive.fsPath
);
}
public async isAffectedByTest(testPath: string): Promise<boolean> {
const databasePath = this.databaseUri.fsPath;
if (!databasePath.endsWith('.testproj')) {
if (!databasePath.endsWith(".testproj")) {
return false;
}
try {
const stats = await fs.stat(testPath);
if (stats.isDirectory()) {
return !path.relative(testPath, databasePath).startsWith('..');
return !path.relative(testPath, databasePath).startsWith("..");
} else {
// database for /one/two/three/test.ql is at /one/two/three/three.testproj
const testdir = path.dirname(testPath);
const testdirbase = path.basename(testdir);
return databasePath == path.join(testdir, testdirbase + '.testproj');
return databasePath == path.join(testdir, testdirbase + ".testproj");
}
} catch {
// No information available for test path - assume database is unaffected.
@@ -521,14 +539,19 @@ export class DatabaseItemImpl implements DatabaseItem {
* `event` fires. If waiting for the event takes too long (by default
* >1000ms) log a warning, and resolve to undefined.
*/
function eventFired<T>(event: vscode.Event<T>, timeoutMs = 1000): Promise<T | undefined> {
function eventFired<T>(
event: vscode.Event<T>,
timeoutMs = 1000,
): Promise<T | undefined> {
return new Promise((res, _rej) => {
const timeout = setTimeout(() => {
void logger.log(`Waiting for event ${event} timed out after ${timeoutMs}ms`);
void logger.log(
`Waiting for event ${event} timed out after ${timeoutMs}ms`,
);
res(undefined);
dispose();
}, timeoutMs);
const disposable = event(e => {
const disposable = event((e) => {
res(e);
dispose();
});
@@ -540,12 +563,17 @@ function eventFired<T>(event: vscode.Event<T>, timeoutMs = 1000): Promise<T | un
}
export class DatabaseManager extends DisposableObject {
private readonly _onDidChangeDatabaseItem = this.push(new vscode.EventEmitter<DatabaseChangedEvent>());
private readonly _onDidChangeDatabaseItem = this.push(
new vscode.EventEmitter<DatabaseChangedEvent>(),
);
readonly onDidChangeDatabaseItem = this._onDidChangeDatabaseItem.event;
private readonly _onDidChangeCurrentDatabaseItem = this.push(new vscode.EventEmitter<DatabaseChangedEvent>());
readonly onDidChangeCurrentDatabaseItem = this._onDidChangeCurrentDatabaseItem.event;
private readonly _onDidChangeCurrentDatabaseItem = this.push(
new vscode.EventEmitter<DatabaseChangedEvent>(),
);
readonly onDidChangeCurrentDatabaseItem =
this._onDidChangeCurrentDatabaseItem.event;
private readonly _databaseItems: DatabaseItem[] = [];
private _currentDatabaseItem: DatabaseItem | undefined = undefined;
@@ -554,7 +582,7 @@ export class DatabaseManager extends DisposableObject {
private readonly ctx: ExtensionContext,
private readonly qs: QueryRunner,
private readonly cli: cli.CodeQLCliServer,
public logger: Logger
public logger: Logger,
) {
super();
@@ -565,21 +593,26 @@ export class DatabaseManager extends DisposableObject {
progress: ProgressCallback,
token: vscode.CancellationToken,
uri: vscode.Uri,
displayName?: string
displayName?: string,
): Promise<DatabaseItem> {
const contents = await resolveDatabaseContents(uri);
// Ignore the source archive for QLTest databases by default.
const isQLTestDatabase = path.extname(uri.fsPath) === '.testproj';
const isQLTestDatabase = path.extname(uri.fsPath) === ".testproj";
const fullOptions: FullDatabaseOptions = {
ignoreSourceArchive: isQLTestDatabase,
// If a displayName is not passed in, the basename of folder containing the database is used.
displayName,
dateAdded: Date.now(),
language: await this.getPrimaryLanguage(uri.fsPath)
language: await this.getPrimaryLanguage(uri.fsPath),
};
const databaseItem = new DatabaseItemImpl(uri, contents, fullOptions, (event) => {
this._onDidChangeDatabaseItem.fire(event);
});
const databaseItem = new DatabaseItemImpl(
uri,
contents,
fullOptions,
(event) => {
this._onDidChangeDatabaseItem.fire(event);
},
);
await this.addDatabaseItem(progress, token, databaseItem);
await this.addDatabaseSourceArchiveFolder(databaseItem);
@@ -589,18 +622,20 @@ export class DatabaseManager extends DisposableObject {
private async reregisterDatabases(
progress: ProgressCallback,
token: vscode.CancellationToken
token: vscode.CancellationToken,
) {
let completed = 0;
await Promise.all(this._databaseItems.map(async (databaseItem) => {
await this.registerDatabase(progress, token, databaseItem);
completed++;
progress({
maxStep: this._databaseItems.length,
step: completed,
message: 'Re-registering databases'
});
}));
await Promise.all(
this._databaseItems.map(async (databaseItem) => {
await this.registerDatabase(progress, token, databaseItem);
completed++;
progress({
maxStep: this._databaseItems.length,
step: completed,
message: "Re-registering databases",
});
}),
);
}
public async addDatabaseSourceArchiveFolder(item: DatabaseItem) {
@@ -627,12 +662,16 @@ export class DatabaseManager extends DisposableObject {
}
const uri = item.getSourceArchiveExplorerUri();
void logger.log(`Adding workspace folder for ${item.name} source archive at index ${end}`);
void logger.log(
`Adding workspace folder for ${item.name} source archive at index ${end}`,
);
if ((vscode.workspace.workspaceFolders || []).length < 2) {
// Adding this workspace folder makes the workspace
// multi-root, which may surprise the user. Let them know
// we're doing this.
void vscode.window.showInformationMessage(`Adding workspace folder for source archive of database ${item.name}.`);
void vscode.window.showInformationMessage(
`Adding workspace folder for source archive of database ${item.name}.`,
);
}
vscode.workspace.updateWorkspaceFolders(end, 0, {
name: `[${item.name} source archive]`,
@@ -647,21 +686,20 @@ export class DatabaseManager extends DisposableObject {
private async createDatabaseItemFromPersistedState(
progress: ProgressCallback,
token: vscode.CancellationToken,
state: PersistedDatabaseItem
state: PersistedDatabaseItem,
): Promise<DatabaseItem> {
let displayName: string | undefined = undefined;
let ignoreSourceArchive = false;
let dateAdded = undefined;
let language = undefined;
if (state.options) {
if (typeof state.options.displayName === 'string') {
if (typeof state.options.displayName === "string") {
displayName = state.options.displayName;
}
if (typeof state.options.ignoreSourceArchive === 'boolean') {
if (typeof state.options.ignoreSourceArchive === "boolean") {
ignoreSourceArchive = state.options.ignoreSourceArchive;
}
if (typeof state.options.dateAdded === 'number') {
if (typeof state.options.dateAdded === "number") {
dateAdded = state.options.dateAdded;
}
language = state.options.language;
@@ -677,60 +715,90 @@ export class DatabaseManager extends DisposableObject {
ignoreSourceArchive,
displayName,
dateAdded,
language
language,
};
const item = new DatabaseItemImpl(dbBaseUri, undefined, fullOptions,
const item = new DatabaseItemImpl(
dbBaseUri,
undefined,
fullOptions,
(event) => {
this._onDidChangeDatabaseItem.fire(event);
});
},
);
await this.addDatabaseItem(progress, token, item);
// Avoid persisting the database state after adding since that should happen only after
// all databases have been added.
await this.addDatabaseItem(progress, token, item, false);
return item;
}
public async loadPersistedState(): Promise<void> {
return withProgress({
location: vscode.ProgressLocation.Notification
},
return withProgress(
{
location: vscode.ProgressLocation.Notification,
},
async (progress, token) => {
const currentDatabaseUri = this.ctx.workspaceState.get<string>(CURRENT_DB);
const databases = this.ctx.workspaceState.get<PersistedDatabaseItem[]>(DB_LIST, []);
const currentDatabaseUri =
this.ctx.workspaceState.get<string>(CURRENT_DB);
const databases = this.ctx.workspaceState.get<PersistedDatabaseItem[]>(
DB_LIST,
[],
);
let step = 0;
progress({
maxStep: databases.length,
message: 'Loading persisted databases',
step
message: "Loading persisted databases",
step,
});
try {
void this.logger.log(`Found ${databases.length} persisted databases: ${databases.map(db => db.uri).join(', ')}`);
void this.logger.log(
`Found ${databases.length} persisted databases: ${databases
.map((db) => db.uri)
.join(", ")}`,
);
for (const database of databases) {
progress({
maxStep: databases.length,
message: `Loading ${database.options?.displayName || 'databases'}`,
step: ++step
message: `Loading ${
database.options?.displayName || "databases"
}`,
step: ++step,
});
const databaseItem = await this.createDatabaseItemFromPersistedState(progress, token, database);
const databaseItem =
await this.createDatabaseItemFromPersistedState(
progress,
token,
database,
);
try {
await databaseItem.refresh();
await this.registerDatabase(progress, token, databaseItem);
if (currentDatabaseUri === database.uri) {
await this.setCurrentDatabaseItem(databaseItem, true);
}
void this.logger.log(`Loaded database ${databaseItem.name} at URI ${database.uri}.`);
void this.logger.log(
`Loaded database ${databaseItem.name} at URI ${database.uri}.`,
);
} catch (e) {
// When loading from persisted state, leave invalid databases in the list. They will be
// marked as invalid, and cannot be set as the current database.
void this.logger.log(`Error loading database ${database.uri}: ${e}.`);
void this.logger.log(
`Error loading database ${database.uri}: ${e}.`,
);
}
}
await this.updatePersistedDatabaseList();
} catch (e) {
// database list had an unexpected type - nothing to be done?
void showAndLogErrorMessage(`Database list loading failed: ${getErrorMessage(e)}`);
void showAndLogErrorMessage(
`Database list loading failed: ${getErrorMessage(e)}`,
);
}
void this.logger.log('Finished loading persisted databases.');
});
void this.logger.log("Finished loading persisted databases.");
},
);
}
public get databaseItems(): readonly DatabaseItem[] {
@@ -743,21 +811,24 @@ export class DatabaseManager extends DisposableObject {
public async setCurrentDatabaseItem(
item: DatabaseItem | undefined,
skipRefresh = false
skipRefresh = false,
): Promise<void> {
if (!skipRefresh && (item !== undefined)) {
await item.refresh(); // Will throw on invalid database.
if (!skipRefresh && item !== undefined) {
await item.refresh(); // Will throw on invalid database.
}
if (this._currentDatabaseItem !== item) {
this._currentDatabaseItem = item;
this.updatePersistedCurrentDatabaseItem();
await vscode.commands.executeCommand('setContext', 'codeQL.currentDatabaseItem', item?.name);
await vscode.commands.executeCommand(
"setContext",
"codeQL.currentDatabaseItem",
item?.name,
);
this._onDidChangeCurrentDatabaseItem.fire({
item,
kind: DatabaseEventKind.Change
kind: DatabaseEventKind.Change,
});
}
}
@@ -767,27 +838,39 @@ export class DatabaseManager extends DisposableObject {
* if there is one, and -1 otherwise.
*/
private getDatabaseWorkspaceFolderIndex(item: DatabaseItem): number {
return (vscode.workspace.workspaceFolders || [])
.findIndex(folder => item.belongsToSourceArchiveExplorerUri(folder.uri));
return (vscode.workspace.workspaceFolders || []).findIndex((folder) =>
item.belongsToSourceArchiveExplorerUri(folder.uri),
);
}
public findDatabaseItem(uri: vscode.Uri): DatabaseItem | undefined {
const uriString = uri.toString(true);
return this._databaseItems.find(item => item.databaseUri.toString(true) === uriString);
return this._databaseItems.find(
(item) => item.databaseUri.toString(true) === uriString,
);
}
public findDatabaseItemBySourceArchive(uri: vscode.Uri): DatabaseItem | undefined {
public findDatabaseItemBySourceArchive(
uri: vscode.Uri,
): DatabaseItem | undefined {
const uriString = uri.toString(true);
return this._databaseItems.find(item => item.sourceArchive && item.sourceArchive.toString(true) === uriString);
return this._databaseItems.find(
(item) =>
item.sourceArchive && item.sourceArchive.toString(true) === uriString,
);
}
private async addDatabaseItem(
progress: ProgressCallback,
token: vscode.CancellationToken,
item: DatabaseItem
item: DatabaseItem,
updatePersistedState = true,
) {
this._databaseItems.push(item);
await this.updatePersistedDatabaseList();
if (updatePersistedState) {
await this.updatePersistedDatabaseList();
}
// Add this database item to the allow-list
// Database items reconstituted from persisted state
@@ -798,7 +881,7 @@ export class DatabaseManager extends DisposableObject {
// note that we use undefined as the item in order to reset the entire tree
this._onDidChangeDatabaseItem.fire({
item: undefined,
kind: DatabaseEventKind.Add
kind: DatabaseEventKind.Add,
});
}
@@ -808,19 +891,21 @@ export class DatabaseManager extends DisposableObject {
this._onDidChangeDatabaseItem.fire({
// pass undefined so that the entire tree is rebuilt in order to re-sort
item: undefined,
kind: DatabaseEventKind.Rename
kind: DatabaseEventKind.Rename,
});
}
public async removeDatabaseItem(
progress: ProgressCallback,
token: vscode.CancellationToken,
item: DatabaseItem
item: DatabaseItem,
) {
if (this._currentDatabaseItem == item) {
this._currentDatabaseItem = undefined;
}
const index = this.databaseItems.findIndex(searchItem => searchItem === item);
const index = this.databaseItems.findIndex(
(searchItem) => searchItem === item,
);
if (index >= 0) {
this._databaseItems.splice(index, 1);
}
@@ -828,7 +913,7 @@ export class DatabaseManager extends DisposableObject {
// Delete folder from workspace, if it is still there
const folderIndex = (vscode.workspace.workspaceFolders || []).findIndex(
folder => item.belongsToSourceArchiveExplorerUri(folder.uri)
(folder) => item.belongsToSourceArchiveExplorerUri(folder.uri),
);
if (folderIndex >= 0) {
void logger.log(`Removing workspace folder at index ${folderIndex}`);
@@ -840,16 +925,22 @@ export class DatabaseManager extends DisposableObject {
// Delete folder from file system only if it is controlled by the extension
if (this.isExtensionControlledLocation(item.databaseUri)) {
void logger.log('Deleting database from filesystem.');
void logger.log("Deleting database from filesystem.");
fs.remove(item.databaseUri.fsPath).then(
() => void logger.log(`Deleted '${item.databaseUri.fsPath}'`),
e => void logger.log(`Failed to delete '${item.databaseUri.fsPath}'. Reason: ${getErrorMessage(e)}`));
(e) =>
void logger.log(
`Failed to delete '${
item.databaseUri.fsPath
}'. Reason: ${getErrorMessage(e)}`,
),
);
}
// note that we use undefined as the item in order to reset the entire tree
this._onDidChangeDatabaseItem.fire({
item: undefined,
kind: DatabaseEventKind.Remove
kind: DatabaseEventKind.Remove,
});
}
@@ -869,12 +960,19 @@ export class DatabaseManager extends DisposableObject {
}
private updatePersistedCurrentDatabaseItem(): void {
void this.ctx.workspaceState.update(CURRENT_DB, this._currentDatabaseItem ?
this._currentDatabaseItem.databaseUri.toString(true) : undefined);
void this.ctx.workspaceState.update(
CURRENT_DB,
this._currentDatabaseItem
? this._currentDatabaseItem.databaseUri.toString(true)
: undefined,
);
}
private async updatePersistedDatabaseList(): Promise<void> {
await this.ctx.workspaceState.update(DB_LIST, this._databaseItems.map(item => item.getPersistedState()));
await this.ctx.workspaceState.update(
DB_LIST,
this._databaseItems.map((item) => item.getPersistedState()),
);
}
private isExtensionControlledLocation(uri: vscode.Uri) {
@@ -896,7 +994,7 @@ export class DatabaseManager extends DisposableObject {
return undefined;
}
const dbInfo = await this.cli.resolveDatabase(dbPath);
return dbInfo.languages?.[0] || '';
return dbInfo.languages?.[0] || "";
}
}
@@ -905,7 +1003,9 @@ export class DatabaseManager extends DisposableObject {
* scripts returned by the cli's upgrade resolution.
*/
export function getUpgradesDirectories(scripts: string[]): vscode.Uri[] {
const parentDirs = scripts.map(dir => path.dirname(dir));
const parentDirs = scripts.map((dir) => path.dirname(dir));
const uniqueParentDirs = new Set(parentDirs);
return Array.from(uniqueParentDirs).map(filePath => vscode.Uri.file(filePath));
return Array.from(uniqueParentDirs).map((filePath) =>
vscode.Uri.file(filePath),
);
}

View File

@@ -0,0 +1,3 @@
### Databases
This folder contains code for the new experimental databases panel and new query run experience.

View File

@@ -0,0 +1,121 @@
import * as fs from "fs-extra";
import * as path from "path";
import { cloneDbConfig, DbConfig } from "./db-config";
import * as chokidar from "chokidar";
import { DisposableObject } from "../../pure/disposable-object";
import { DbConfigValidator } from "./db-config-validator";
import { ValueResult } from "../../common/value-result";
import { App } from "../../common/app";
import { AppEvent, AppEventEmitter } from "../../common/events";
export class DbConfigStore extends DisposableObject {
public readonly onDidChangeConfig: AppEvent<void>;
private readonly onDidChangeConfigEventEmitter: AppEventEmitter<void>;
private readonly configPath: string;
private readonly configValidator: DbConfigValidator;
private config: DbConfig | undefined;
private configErrors: string[];
private configWatcher: chokidar.FSWatcher | undefined;
public constructor(app: App) {
super();
const storagePath = app.workspaceStoragePath || app.globalStoragePath;
this.configPath = path.join(storagePath, "workspace-databases.json");
this.config = this.createEmptyConfig();
this.configErrors = [];
this.configWatcher = undefined;
this.configValidator = new DbConfigValidator(app.extensionPath);
this.onDidChangeConfigEventEmitter = app.createEventEmitter<void>();
this.onDidChangeConfig = this.onDidChangeConfigEventEmitter.event;
}
public async initialize(): Promise<void> {
await this.loadConfig();
this.watchConfig();
}
public dispose(): void {
this.configWatcher?.unwatch(this.configPath);
}
public getConfig(): ValueResult<DbConfig> {
if (this.config) {
// Clone the config so that it's not modified outside of this class.
return ValueResult.ok(cloneDbConfig(this.config));
} else {
return ValueResult.fail(this.configErrors);
}
}
public getConfigPath(): string {
return this.configPath;
}
private async loadConfig(): Promise<void> {
if (!(await fs.pathExists(this.configPath))) {
await fs.writeJSON(this.configPath, this.createEmptyConfig(), {
spaces: 2,
});
}
await this.readConfig();
}
private async readConfig(): Promise<void> {
let newConfig: DbConfig | undefined = undefined;
try {
newConfig = await fs.readJSON(this.configPath);
} catch (e) {
this.configErrors = [`Failed to read config file: ${this.configPath}`];
}
if (newConfig) {
this.configErrors = this.configValidator.validate(newConfig);
}
this.config = this.configErrors.length === 0 ? newConfig : undefined;
}
private readConfigSync(): void {
let newConfig: DbConfig | undefined = undefined;
try {
newConfig = fs.readJSONSync(this.configPath);
} catch (e) {
this.configErrors = [`Failed to read config file: ${this.configPath}`];
}
if (newConfig) {
this.configErrors = this.configValidator.validate(newConfig);
}
this.config = this.configErrors.length === 0 ? newConfig : undefined;
this.onDidChangeConfigEventEmitter.fire();
}
private watchConfig(): void {
this.configWatcher = chokidar.watch(this.configPath).on("change", () => {
this.readConfigSync();
});
}
private createEmptyConfig(): DbConfig {
return {
databases: {
remote: {
repositoryLists: [],
owners: [],
repositories: [],
},
local: {
lists: [],
databases: [],
},
},
};
}
}

View File

@@ -0,0 +1,29 @@
import * as fs from "fs-extra";
import * as path from "path";
import Ajv from "ajv";
import { DbConfig } from "./db-config";
export class DbConfigValidator {
private readonly schema: any;
constructor(extensionPath: string) {
const schemaPath = path.resolve(
extensionPath,
"workspace-databases-schema.json",
);
this.schema = fs.readJsonSync(schemaPath);
}
public validate(dbConfig: DbConfig): string[] {
const ajv = new Ajv({ allErrors: true });
ajv.validate(this.schema, dbConfig);
if (ajv.errors) {
return ajv.errors.map(
(error) => `${error.instancePath} ${error.message}`,
);
}
return [];
}
}

View File

@@ -0,0 +1,79 @@
// Contains models for the data we want to store in the database config
export interface DbConfig {
databases: DbConfigDatabases;
selected?: SelectedDbItem;
}
export interface DbConfigDatabases {
remote: RemoteDbConfig;
local: LocalDbConfig;
}
export interface SelectedDbItem {
kind: SelectedDbItemKind;
value: string;
}
export enum SelectedDbItemKind {
ConfigDefined = "configDefined",
RemoteSystemDefinedList = "remoteSystemDefinedList",
}
export interface RemoteDbConfig {
repositoryLists: RemoteRepositoryList[];
owners: string[];
repositories: string[];
}
export interface RemoteRepositoryList {
name: string;
repositories: string[];
}
export interface LocalDbConfig {
lists: LocalList[];
databases: LocalDatabase[];
}
export interface LocalList {
name: string;
databases: LocalDatabase[];
}
export interface LocalDatabase {
name: string;
dateAdded: number;
language: string;
storagePath: string;
}
export function cloneDbConfig(config: DbConfig): DbConfig {
return {
databases: {
remote: {
repositoryLists: config.databases.remote.repositoryLists.map(
(list) => ({
name: list.name,
repositories: [...list.repositories],
}),
),
owners: [...config.databases.remote.owners],
repositories: [...config.databases.remote.repositories],
},
local: {
lists: config.databases.local.lists.map((list) => ({
name: list.name,
databases: list.databases.map((db) => ({ ...db })),
})),
databases: config.databases.local.databases.map((db) => ({ ...db })),
},
},
selected: config.selected
? {
kind: config.selected.kind,
value: config.selected.value,
}
: undefined,
};
}

View File

@@ -0,0 +1,73 @@
// This file contains models that are used to represent the databases.
export enum DbItemKind {
RootLocal = "RootLocal",
LocalList = "LocalList",
LocalDatabase = "LocalDatabase",
RootRemote = "RootRemote",
RemoteSystemDefinedList = "RemoteSystemDefinedList",
RemoteUserDefinedList = "RemoteUserDefinedList",
RemoteOwner = "RemoteOwner",
RemoteRepo = "RemoteRepo",
}
export interface RootLocalDbItem {
kind: DbItemKind.RootLocal;
children: LocalDbItem[];
}
export type LocalDbItem = LocalListDbItem | LocalDatabaseDbItem;
export interface LocalListDbItem {
kind: DbItemKind.LocalList;
listName: string;
databases: LocalDatabaseDbItem[];
}
export interface LocalDatabaseDbItem {
kind: DbItemKind.LocalDatabase;
databaseName: string;
dateAdded: number;
language: string;
storagePath: string;
}
export interface RootRemoteDbItem {
kind: DbItemKind.RootRemote;
children: RemoteDbItem[];
}
export type DbItem =
| RootLocalDbItem
| RootRemoteDbItem
| RemoteDbItem
| LocalDbItem;
export type RemoteDbItem =
| RemoteSystemDefinedListDbItem
| RemoteUserDefinedListDbItem
| RemoteOwnerDbItem
| RemoteRepoDbItem;
export interface RemoteSystemDefinedListDbItem {
kind: DbItemKind.RemoteSystemDefinedList;
listName: string;
listDisplayName: string;
listDescription: string;
}
export interface RemoteUserDefinedListDbItem {
kind: DbItemKind.RemoteUserDefinedList;
listName: string;
repos: RemoteRepoDbItem[];
}
export interface RemoteOwnerDbItem {
kind: DbItemKind.RemoteOwner;
ownerName: string;
}
export interface RemoteRepoDbItem {
kind: DbItemKind.RemoteRepo;
repoFullName: string;
}

View File

@@ -0,0 +1,36 @@
import { App } from "../common/app";
import { AppEvent, AppEventEmitter } from "../common/events";
import { ValueResult } from "../common/value-result";
import { DbConfigStore } from "./config/db-config-store";
import { DbItem } from "./db-item";
import { createLocalTree, createRemoteTree } from "./db-tree-creator";
export class DbManager {
public readonly onDbItemsChanged: AppEvent<void>;
private readonly onDbItemsChangesEventEmitter: AppEventEmitter<void>;
constructor(app: App, private readonly dbConfigStore: DbConfigStore) {
this.onDbItemsChangesEventEmitter = app.createEventEmitter<void>();
this.onDbItemsChanged = this.onDbItemsChangesEventEmitter.event;
this.dbConfigStore.onDidChangeConfig(() => {
this.onDbItemsChangesEventEmitter.fire();
});
}
public getDbItems(): ValueResult<DbItem[]> {
const configResult = this.dbConfigStore.getConfig();
if (configResult.isFailure) {
return ValueResult.fail(configResult.errors);
}
return ValueResult.ok([
createRemoteTree(configResult.value),
createLocalTree(configResult.value),
]);
}
public getConfigPath(): string {
return this.dbConfigStore.getConfigPath();
}
}

View File

@@ -0,0 +1,40 @@
import { App, AppMode } from "../common/app";
import { isCanary, isNewQueryRunExperienceEnabled } from "../config";
import { logger } from "../logging";
import { DisposableObject } from "../pure/disposable-object";
import { DbConfigStore } from "./config/db-config-store";
import { DbManager } from "./db-manager";
import { DbPanel } from "./ui/db-panel";
export class DbModule extends DisposableObject {
public async initialize(app: App): Promise<void> {
if (
app.mode !== AppMode.Development ||
!isCanary() ||
!isNewQueryRunExperienceEnabled()
) {
// Currently, we only want to expose the new database panel when we
// are in development and canary mode and the developer has enabled the
// new query run experience.
return;
}
void logger.log("Initializing database module");
const dbConfigStore = new DbConfigStore(app);
await dbConfigStore.initialize();
const dbManager = new DbManager(app, dbConfigStore);
const dbPanel = new DbPanel(dbManager);
await dbPanel.initialize();
this.push(dbPanel);
this.push(dbConfigStore);
}
}
export async function initializeDbModule(app: App): Promise<DbModule> {
const dbModule = new DbModule();
await dbModule.initialize(app);
return dbModule;
}

View File

@@ -0,0 +1,102 @@
import {
DbConfig,
LocalDatabase,
LocalList,
RemoteRepositoryList,
} from "./config/db-config";
import {
DbItemKind,
LocalDatabaseDbItem,
LocalListDbItem,
RemoteOwnerDbItem,
RemoteRepoDbItem,
RemoteSystemDefinedListDbItem,
RemoteUserDefinedListDbItem,
RootLocalDbItem,
RootRemoteDbItem,
} from "./db-item";
export function createRemoteTree(dbConfig: DbConfig): RootRemoteDbItem {
const systemDefinedLists = [
createSystemDefinedList(10),
createSystemDefinedList(100),
createSystemDefinedList(1000),
];
const userDefinedRepoLists = dbConfig.databases.remote.repositoryLists.map(
createUserDefinedList,
);
const owners = dbConfig.databases.remote.owners.map(createOwnerItem);
const repos = dbConfig.databases.remote.repositories.map(createRepoItem);
return {
kind: DbItemKind.RootRemote,
children: [
...systemDefinedLists,
...owners,
...userDefinedRepoLists,
...repos,
],
};
}
export function createLocalTree(dbConfig: DbConfig): RootLocalDbItem {
const localLists = dbConfig.databases.local.lists.map(createLocalList);
const localDbs = dbConfig.databases.local.databases.map(createLocalDb);
return {
kind: DbItemKind.RootLocal,
children: [...localLists, ...localDbs],
};
}
function createSystemDefinedList(n: number): RemoteSystemDefinedListDbItem {
return {
kind: DbItemKind.RemoteSystemDefinedList,
listName: `top_${n}`,
listDisplayName: `Top ${n} repositories`,
listDescription: `Top ${n} repositories of a language`,
};
}
function createUserDefinedList(
list: RemoteRepositoryList,
): RemoteUserDefinedListDbItem {
return {
kind: DbItemKind.RemoteUserDefinedList,
listName: list.name,
repos: list.repositories.map((r) => createRepoItem(r)),
};
}
function createOwnerItem(owner: string): RemoteOwnerDbItem {
return {
kind: DbItemKind.RemoteOwner,
ownerName: owner,
};
}
function createRepoItem(repo: string): RemoteRepoDbItem {
return {
kind: DbItemKind.RemoteRepo,
repoFullName: repo,
};
}
function createLocalList(list: LocalList): LocalListDbItem {
return {
kind: DbItemKind.LocalList,
listName: list.name,
databases: list.databases.map(createLocalDb),
};
}
function createLocalDb(db: LocalDatabase): LocalDatabaseDbItem {
return {
kind: DbItemKind.LocalDatabase,
databaseName: db.name,
dateAdded: db.dateAdded,
language: db.language,
storagePath: db.storagePath,
};
}

View File

@@ -0,0 +1,64 @@
import { DbItem, DbItemKind } from "../db-item";
import {
createDbTreeViewItemLocalDatabase,
createDbTreeViewItemOwner,
createDbTreeViewItemRepo,
createDbTreeViewItemRoot,
createDbTreeViewItemSystemDefinedList,
createDbTreeViewItemUserDefinedList,
DbTreeViewItem,
} from "./db-tree-view-item";
export function mapDbItemToTreeViewItem(dbItem: DbItem): DbTreeViewItem {
switch (dbItem.kind) {
case DbItemKind.RootLocal:
return createDbTreeViewItemRoot(
dbItem,
"local",
"Local databases",
dbItem.children.map((c) => mapDbItemToTreeViewItem(c)),
);
case DbItemKind.RootRemote:
return createDbTreeViewItemRoot(
dbItem,
"remote",
"Remote databases",
dbItem.children.map((c) => mapDbItemToTreeViewItem(c)),
);
case DbItemKind.RemoteSystemDefinedList:
return createDbTreeViewItemSystemDefinedList(
dbItem,
dbItem.listDisplayName,
dbItem.listDescription,
);
case DbItemKind.RemoteUserDefinedList:
return createDbTreeViewItemUserDefinedList(
dbItem,
dbItem.listName,
dbItem.repos.map(mapDbItemToTreeViewItem),
);
case DbItemKind.RemoteOwner:
return createDbTreeViewItemOwner(dbItem, dbItem.ownerName);
case DbItemKind.RemoteRepo:
return createDbTreeViewItemRepo(dbItem, dbItem.repoFullName);
case DbItemKind.LocalList:
return createDbTreeViewItemUserDefinedList(
dbItem,
dbItem.listName,
dbItem.databases.map(mapDbItemToTreeViewItem),
);
case DbItemKind.LocalDatabase:
return createDbTreeViewItemLocalDatabase(
dbItem,
dbItem.databaseName,
dbItem.language,
);
}
}

View File

@@ -0,0 +1,39 @@
import * as vscode from "vscode";
import { commandRunner } from "../../commandRunner";
import { DisposableObject } from "../../pure/disposable-object";
import { DbManager } from "../db-manager";
import { DbTreeDataProvider } from "./db-tree-data-provider";
export class DbPanel extends DisposableObject {
private readonly dataProvider: DbTreeDataProvider;
public constructor(private readonly dbManager: DbManager) {
super();
this.dataProvider = new DbTreeDataProvider(dbManager);
const treeView = vscode.window.createTreeView(
"codeQLDatabasesExperimental",
{
treeDataProvider: this.dataProvider,
canSelectMany: false,
},
);
this.push(treeView);
}
public async initialize(): Promise<void> {
this.push(
commandRunner("codeQLDatabasesExperimental.openConfigFile", () =>
this.openConfigFile(),
),
);
}
private async openConfigFile(): Promise<void> {
const configPath = this.dbManager.getConfigPath();
const document = await vscode.workspace.openTextDocument(configPath);
await vscode.window.showTextDocument(document);
}
}

View File

@@ -0,0 +1,74 @@
import {
Event,
EventEmitter,
ProviderResult,
TreeDataProvider,
TreeItem,
} from "vscode";
import { createDbTreeViewItemError, DbTreeViewItem } from "./db-tree-view-item";
import { DbManager } from "../db-manager";
import { mapDbItemToTreeViewItem } from "./db-item-mapper";
import { DisposableObject } from "../../pure/disposable-object";
export class DbTreeDataProvider
extends DisposableObject
implements TreeDataProvider<DbTreeViewItem>
{
// This is an event to signal that there's been a change in the tree which
// will case the view to refresh. It is part of the TreeDataProvider interface.
public readonly onDidChangeTreeData: Event<DbTreeViewItem | undefined>;
private _onDidChangeTreeData = this.push(
new EventEmitter<DbTreeViewItem | undefined>(),
);
private dbTreeItems: DbTreeViewItem[];
public constructor(private readonly dbManager: DbManager) {
super();
this.dbTreeItems = this.createTree();
this.onDidChangeTreeData = this._onDidChangeTreeData.event;
dbManager.onDbItemsChanged(() => {
this.dbTreeItems = this.createTree();
this._onDidChangeTreeData.fire(undefined);
});
}
/**
* Called when expanding a node (including the root node).
* @param node The node to expand.
* @returns The children of the node.
*/
public getChildren(node?: DbTreeViewItem): ProviderResult<DbTreeViewItem[]> {
if (!node) {
// We're at the root.
return Promise.resolve(this.dbTreeItems);
} else {
return Promise.resolve(node.children);
}
}
/**
* Returns the UI presentation of the element that gets displayed in the view.
* @param node The node to represent.
* @returns The UI presentation of the node.
*/
public getTreeItem(node: DbTreeViewItem): TreeItem | Thenable<TreeItem> {
return node;
}
private createTree(): DbTreeViewItem[] {
const dbItemsResult = this.dbManager.getDbItems();
if (dbItemsResult.isFailure) {
const errorTreeViewItem = createDbTreeViewItemError(
"Error when reading databases config",
"Please open your databases config and address errors",
);
return [errorTreeViewItem];
}
return dbItemsResult.value.map(mapDbItemToTreeViewItem);
}
}

View File

@@ -0,0 +1,138 @@
import * as vscode from "vscode";
import {
DbItem,
LocalDatabaseDbItem,
LocalListDbItem,
RemoteOwnerDbItem,
RemoteRepoDbItem,
RemoteSystemDefinedListDbItem,
RemoteUserDefinedListDbItem,
RootLocalDbItem,
RootRemoteDbItem,
} from "../db-item";
/**
* Represents an item in the database tree view. This item could be
* representing an actual database item or a warning.
*/
export class DbTreeViewItem extends vscode.TreeItem {
constructor(
// iconPath and tooltip must have those names because
// they are part of the vscode.TreeItem interface
public readonly dbItem: DbItem | undefined,
public readonly iconPath: vscode.ThemeIcon | undefined,
public readonly label: string,
public readonly tooltip: string | undefined,
public readonly collapsibleState: vscode.TreeItemCollapsibleState,
public readonly children: DbTreeViewItem[],
) {
super(label, collapsibleState);
}
}
export function createDbTreeViewItemError(
label: string,
tooltip: string,
): DbTreeViewItem {
return new DbTreeViewItem(
undefined,
new vscode.ThemeIcon(
"error",
new vscode.ThemeColor("problemsErrorIcon.foreground"),
),
label,
tooltip,
vscode.TreeItemCollapsibleState.None,
[],
);
}
export function createDbTreeViewItemRoot(
dbItem: RootLocalDbItem | RootRemoteDbItem,
label: string,
tooltip: string,
children: DbTreeViewItem[],
): DbTreeViewItem {
return new DbTreeViewItem(
dbItem,
undefined,
label,
tooltip,
vscode.TreeItemCollapsibleState.Collapsed,
children,
);
}
export function createDbTreeViewItemSystemDefinedList(
dbItem: RemoteSystemDefinedListDbItem,
label: string,
tooltip: string,
): DbTreeViewItem {
return new DbTreeViewItem(
dbItem,
new vscode.ThemeIcon("github"),
label,
tooltip,
vscode.TreeItemCollapsibleState.None,
[],
);
}
export function createDbTreeViewItemUserDefinedList(
dbItem: LocalListDbItem | RemoteUserDefinedListDbItem,
listName: string,
children: DbTreeViewItem[],
): DbTreeViewItem {
return new DbTreeViewItem(
dbItem,
undefined,
listName,
undefined,
vscode.TreeItemCollapsibleState.Collapsed,
children,
);
}
export function createDbTreeViewItemOwner(
dbItem: RemoteOwnerDbItem,
ownerName: string,
): DbTreeViewItem {
return new DbTreeViewItem(
dbItem,
new vscode.ThemeIcon("organization"),
ownerName,
undefined,
vscode.TreeItemCollapsibleState.None,
[],
);
}
export function createDbTreeViewItemRepo(
dbItem: RemoteRepoDbItem,
repoName: string,
): DbTreeViewItem {
return new DbTreeViewItem(
dbItem,
new vscode.ThemeIcon("database"),
repoName,
undefined,
vscode.TreeItemCollapsibleState.None,
[],
);
}
export function createDbTreeViewItemLocalDatabase(
dbItem: LocalDatabaseDbItem,
databaseName: string,
language: string,
): DbTreeViewItem {
return new DbTreeViewItem(
dbItem,
new vscode.ThemeIcon("database"),
databaseName,
`Language: ${language}`,
vscode.TreeItemCollapsibleState.None,
[],
);
}

View File

@@ -1,5 +1,5 @@
import { DisposableObject } from './pure/disposable-object';
import { logger } from './logging';
import { DisposableObject } from "./pure/disposable-object";
import { logger } from "./logging";
/**
* Base class for "discovery" operations, which scan the file system to find specific kinds of
@@ -38,8 +38,7 @@ export abstract class Discovery<T> extends DisposableObject {
if (this.discoveryInProgress) {
// There's already a discovery operation in progress. Tell it to restart when it's done.
this.retry = true;
}
else {
} else {
// No discovery in progress, so start one now.
this.discoveryInProgress = true;
this.launchDiscovery();
@@ -53,15 +52,16 @@ export abstract class Discovery<T> extends DisposableObject {
*/
private launchDiscovery(): void {
const discoveryPromise = this.discover();
discoveryPromise.then(results => {
if (!this.retry) {
// Update any listeners with the results of the discovery.
this.discoveryInProgress = false;
this.update(results);
}
})
discoveryPromise
.then((results) => {
if (!this.retry) {
// Update any listeners with the results of the discovery.
this.discoveryInProgress = false;
this.update(results);
}
})
.catch(err => {
.catch((err) => {
void logger.log(`${this.name} failed. Reason: ${err.message}`);
})

View File

@@ -1,21 +1,21 @@
import * as fetch from 'node-fetch';
import * as fs from 'fs-extra';
import * as os from 'os';
import * as path from 'path';
import * as semver from 'semver';
import * as unzipper from 'unzipper';
import * as url from 'url';
import { ExtensionContext, Event } from 'vscode';
import { DistributionConfig } from './config';
import * as fetch from "node-fetch";
import * as fs from "fs-extra";
import * as os from "os";
import * as path from "path";
import * as semver from "semver";
import * as unzipper from "unzipper";
import * as url from "url";
import { ExtensionContext, Event } from "vscode";
import { DistributionConfig } from "./config";
import {
InvocationRateLimiter,
InvocationRateLimiterResultKind,
showAndLogErrorMessage,
showAndLogWarningMessage
} from './helpers';
import { logger } from './logging';
import { getCodeQlCliVersion } from './cli-version';
import { ProgressCallback, reportStreamProgress } from './commandRunner';
showAndLogWarningMessage,
} from "./helpers";
import { logger } from "./logging";
import { getCodeQlCliVersion } from "./cli-version";
import { ProgressCallback, reportStreamProgress } from "./commandRunner";
/**
* distribution.ts
@@ -30,7 +30,7 @@ import { ProgressCallback, reportStreamProgress } from './commandRunner';
* We set the default here rather than as a default config value so that this default is invoked
* upon blanking the setting.
*/
const DEFAULT_DISTRIBUTION_OWNER_NAME = 'github';
const DEFAULT_DISTRIBUTION_OWNER_NAME = "github";
/**
* Default value for the repository name of the extension-managed distribution on GitHub.
@@ -38,14 +38,15 @@ const DEFAULT_DISTRIBUTION_OWNER_NAME = 'github';
* We set the default here rather than as a default config value so that this default is invoked
* upon blanking the setting.
*/
const DEFAULT_DISTRIBUTION_REPOSITORY_NAME = 'codeql-cli-binaries';
const DEFAULT_DISTRIBUTION_REPOSITORY_NAME = "codeql-cli-binaries";
/**
* Range of versions of the CLI that are compatible with the extension.
*
* This applies to both extension-managed and CLI distributions.
*/
export const DEFAULT_DISTRIBUTION_VERSION_RANGE: semver.Range = new semver.Range('2.x');
export const DEFAULT_DISTRIBUTION_VERSION_RANGE: semver.Range =
new semver.Range("2.x");
export interface DistributionProvider {
getCodeQlPathWithoutVersionCheck(): Promise<string | undefined>;
@@ -54,35 +55,39 @@ export interface DistributionProvider {
}
export class DistributionManager implements DistributionProvider {
/**
* Get the name of the codeql cli installation we prefer to install, based on our current platform.
*/
public static getRequiredAssetName(): string {
switch (os.platform()) {
case 'linux':
return 'codeql-linux64.zip';
case 'darwin':
return 'codeql-osx64.zip';
case 'win32':
return 'codeql-win64.zip';
case "linux":
return "codeql-linux64.zip";
case "darwin":
return "codeql-osx64.zip";
case "win32":
return "codeql-win64.zip";
default:
return 'codeql.zip';
return "codeql.zip";
}
}
constructor(
public readonly config: DistributionConfig,
private readonly versionRange: semver.Range,
extensionContext: ExtensionContext
extensionContext: ExtensionContext,
) {
this._onDidChangeDistribution = config.onDidChangeConfiguration;
this.extensionSpecificDistributionManager =
new ExtensionSpecificDistributionManager(config, versionRange, extensionContext);
new ExtensionSpecificDistributionManager(
config,
versionRange,
extensionContext,
);
this.updateCheckRateLimiter = new InvocationRateLimiter(
extensionContext,
'extensionSpecificDistributionUpdateCheck',
() => this.extensionSpecificDistributionManager.checkForUpdatesToDistribution()
"extensionSpecificDistributionUpdateCheck",
() =>
this.extensionSpecificDistributionManager.checkForUpdatesToDistribution(),
);
}
@@ -120,7 +125,9 @@ export class DistributionManager implements DistributionProvider {
* - If the user is using an extension-managed CLI, then prereleases are only accepted when the
* includePrerelease config option is set.
*/
const includePrerelease = distribution.kind !== DistributionKind.ExtensionManaged || this.config.includePrerelease;
const includePrerelease =
distribution.kind !== DistributionKind.ExtensionManaged ||
this.config.includePrerelease;
if (!semver.satisfies(version, this.versionRange, { includePrerelease })) {
return {
@@ -132,7 +139,7 @@ export class DistributionManager implements DistributionProvider {
return {
distribution,
kind: FindDistributionResultKind.CompatibleDistribution,
version
version,
};
}
@@ -149,49 +156,58 @@ export class DistributionManager implements DistributionProvider {
/**
* Returns the path to a possibly-compatible CodeQL launcher binary, or undefined if a binary not be found.
*/
async getDistributionWithoutVersionCheck(): Promise<Distribution | undefined> {
async getDistributionWithoutVersionCheck(): Promise<
Distribution | undefined
> {
// Check config setting, then extension specific distribution, then PATH.
if (this.config.customCodeQlPath) {
if (!await fs.pathExists(this.config.customCodeQlPath)) {
void showAndLogErrorMessage(`The CodeQL executable path is specified as "${this.config.customCodeQlPath}" ` +
'by a configuration setting, but a CodeQL executable could not be found at that path. Please check ' +
'that a CodeQL executable exists at the specified path or remove the setting.');
if (!(await fs.pathExists(this.config.customCodeQlPath))) {
void showAndLogErrorMessage(
`The CodeQL executable path is specified as "${this.config.customCodeQlPath}" ` +
"by a configuration setting, but a CodeQL executable could not be found at that path. Please check " +
"that a CodeQL executable exists at the specified path or remove the setting.",
);
return undefined;
}
// emit a warning if using a deprecated launcher and a non-deprecated launcher exists
if (
deprecatedCodeQlLauncherName() &&
this.config.customCodeQlPath.endsWith(deprecatedCodeQlLauncherName()!) &&
await this.hasNewLauncherName()
this.config.customCodeQlPath.endsWith(
deprecatedCodeQlLauncherName()!,
) &&
(await this.hasNewLauncherName())
) {
warnDeprecatedLauncher();
}
return {
codeQlPath: this.config.customCodeQlPath,
kind: DistributionKind.CustomPathConfig
kind: DistributionKind.CustomPathConfig,
};
}
const extensionSpecificCodeQlPath = await this.extensionSpecificDistributionManager.getCodeQlPathWithoutVersionCheck();
const extensionSpecificCodeQlPath =
await this.extensionSpecificDistributionManager.getCodeQlPathWithoutVersionCheck();
if (extensionSpecificCodeQlPath !== undefined) {
return {
codeQlPath: extensionSpecificCodeQlPath,
kind: DistributionKind.ExtensionManaged
kind: DistributionKind.ExtensionManaged,
};
}
if (process.env.PATH) {
for (const searchDirectory of process.env.PATH.split(path.delimiter)) {
const expectedLauncherPath = await getExecutableFromDirectory(searchDirectory);
const expectedLauncherPath = await getExecutableFromDirectory(
searchDirectory,
);
if (expectedLauncherPath) {
return {
codeQlPath: expectedLauncherPath,
kind: DistributionKind.PathEnvironmentVariable
kind: DistributionKind.PathEnvironmentVariable,
};
}
}
void logger.log('INFO: Could not find CodeQL on path.');
void logger.log("INFO: Could not find CodeQL on path.");
}
return undefined;
@@ -204,14 +220,19 @@ export class DistributionManager implements DistributionProvider {
* Returns a failed promise if an unexpected error occurs during installation.
*/
public async checkForUpdatesToExtensionManagedDistribution(
minSecondsSinceLastUpdateCheck: number): Promise<DistributionUpdateCheckResult> {
minSecondsSinceLastUpdateCheck: number,
): Promise<DistributionUpdateCheckResult> {
const distribution = await this.getDistributionWithoutVersionCheck();
const extensionManagedCodeQlPath = await this.extensionSpecificDistributionManager.getCodeQlPathWithoutVersionCheck();
const extensionManagedCodeQlPath =
await this.extensionSpecificDistributionManager.getCodeQlPathWithoutVersionCheck();
if (distribution?.codeQlPath !== extensionManagedCodeQlPath) {
// A distribution is present but it isn't managed by the extension.
return createInvalidLocationResult();
}
const updateCheckResult = await this.updateCheckRateLimiter.invokeFunctionIfIntervalElapsed(minSecondsSinceLastUpdateCheck);
const updateCheckResult =
await this.updateCheckRateLimiter.invokeFunctionIfIntervalElapsed(
minSecondsSinceLastUpdateCheck,
);
switch (updateCheckResult.kind) {
case InvocationRateLimiterResultKind.Invoked:
return updateCheckResult.result;
@@ -227,9 +248,12 @@ export class DistributionManager implements DistributionProvider {
*/
public installExtensionManagedDistributionRelease(
release: Release,
progressCallback?: ProgressCallback
progressCallback?: ProgressCallback,
): Promise<void> {
return this.extensionSpecificDistributionManager.installDistributionRelease(release, progressCallback);
return this.extensionSpecificDistributionManager.installDistributionRelease(
release,
progressCallback,
);
}
public get onDidChangeDistribution(): Event<void> | undefined {
@@ -260,7 +284,7 @@ class ExtensionSpecificDistributionManager {
constructor(
private readonly config: DistributionConfig,
private readonly versionRange: semver.Range,
private readonly extensionContext: ExtensionContext
private readonly extensionContext: ExtensionContext,
) {
/**/
}
@@ -268,7 +292,10 @@ class ExtensionSpecificDistributionManager {
public async getCodeQlPathWithoutVersionCheck(): Promise<string | undefined> {
if (this.getInstalledRelease() !== undefined) {
// An extension specific distribution has been installed.
const expectedLauncherPath = await getExecutableFromDirectory(this.getDistributionRootPath(), true);
const expectedLauncherPath = await getExecutableFromDirectory(
this.getDistributionRootPath(),
true,
);
if (expectedLauncherPath) {
return expectedLauncherPath;
}
@@ -276,8 +303,10 @@ class ExtensionSpecificDistributionManager {
try {
await this.removeDistribution();
} catch (e) {
void logger.log('WARNING: Tried to remove corrupted CodeQL CLI at ' +
`${this.getDistributionStoragePath()} but encountered an error: ${e}.`);
void logger.log(
"WARNING: Tried to remove corrupted CodeQL CLI at " +
`${this.getDistributionStoragePath()} but encountered an error: ${e}.`,
);
}
}
return undefined;
@@ -309,53 +338,80 @@ class ExtensionSpecificDistributionManager {
*
* Returns a failed promise if an unexpected error occurs during installation.
*/
public async installDistributionRelease(release: Release,
progressCallback?: ProgressCallback): Promise<void> {
public async installDistributionRelease(
release: Release,
progressCallback?: ProgressCallback,
): Promise<void> {
await this.downloadDistribution(release, progressCallback);
// Store the installed release within the global extension state.
await this.storeInstalledRelease(release);
}
private async downloadDistribution(release: Release,
progressCallback?: ProgressCallback): Promise<void> {
private async downloadDistribution(
release: Release,
progressCallback?: ProgressCallback,
): Promise<void> {
try {
await this.removeDistribution();
} catch (e) {
void logger.log(`Tried to clean up old version of CLI at ${this.getDistributionStoragePath()} ` +
`but encountered an error: ${e}.`);
void logger.log(
`Tried to clean up old version of CLI at ${this.getDistributionStoragePath()} ` +
`but encountered an error: ${e}.`,
);
}
// Filter assets to the unique one that we require.
const requiredAssetName = DistributionManager.getRequiredAssetName();
const assets = release.assets.filter(asset => asset.name === requiredAssetName);
const assets = release.assets.filter(
(asset) => asset.name === requiredAssetName,
);
if (assets.length === 0) {
throw new Error(`Invariant violation: chose a release to install that didn't have ${requiredAssetName}`);
throw new Error(
`Invariant violation: chose a release to install that didn't have ${requiredAssetName}`,
);
}
if (assets.length > 1) {
void logger.log('WARNING: chose a release with more than one asset to install, found ' +
assets.map(asset => asset.name).join(', '));
void logger.log(
"WARNING: chose a release with more than one asset to install, found " +
assets.map((asset) => asset.name).join(", "),
);
}
const assetStream = await this.createReleasesApiConsumer().streamBinaryContentOfAsset(assets[0]);
const tmpDirectory = await fs.mkdtemp(path.join(os.tmpdir(), 'vscode-codeql'));
const assetStream =
await this.createReleasesApiConsumer().streamBinaryContentOfAsset(
assets[0],
);
const tmpDirectory = await fs.mkdtemp(
path.join(os.tmpdir(), "vscode-codeql"),
);
try {
const archivePath = path.join(tmpDirectory, 'distributionDownload.zip');
const archivePath = path.join(tmpDirectory, "distributionDownload.zip");
const archiveFile = fs.createWriteStream(archivePath);
const contentLength = assetStream.headers.get('content-length');
const totalNumBytes = contentLength ? parseInt(contentLength, 10) : undefined;
reportStreamProgress(assetStream.body, `Downloading CodeQL CLI ${release.name}`, totalNumBytes, progressCallback);
const contentLength = assetStream.headers.get("content-length");
const totalNumBytes = contentLength
? parseInt(contentLength, 10)
: undefined;
reportStreamProgress(
assetStream.body,
`Downloading CodeQL CLI ${release.name}`,
totalNumBytes,
progressCallback,
);
await new Promise((resolve, reject) =>
assetStream.body.pipe(archiveFile)
.on('finish', resolve)
.on('error', reject)
assetStream.body
.pipe(archiveFile)
.on("finish", resolve)
.on("error", reject),
);
await this.bumpDistributionFolderIndex();
void logger.log(`Extracting CodeQL CLI to ${this.getDistributionStoragePath()}`);
void logger.log(
`Extracting CodeQL CLI to ${this.getDistributionStoragePath()}`,
);
await extractZipArchive(archivePath, this.getDistributionStoragePath());
} finally {
await fs.remove(tmpDirectory);
@@ -376,111 +432,167 @@ class ExtensionSpecificDistributionManager {
private async getLatestRelease(): Promise<Release> {
const requiredAssetName = DistributionManager.getRequiredAssetName();
void logger.log(`Searching for latest release including ${requiredAssetName}.`);
void logger.log(
`Searching for latest release including ${requiredAssetName}.`,
);
return this.createReleasesApiConsumer().getLatestRelease(
this.versionRange,
this.config.includePrerelease,
release => {
const matchingAssets = release.assets.filter(asset => asset.name === requiredAssetName);
(release) => {
const matchingAssets = release.assets.filter(
(asset) => asset.name === requiredAssetName,
);
if (matchingAssets.length === 0) {
// For example, this could be a release with no platform-specific assets.
void logger.log(`INFO: Ignoring a release with no assets named ${requiredAssetName}`);
void logger.log(
`INFO: Ignoring a release with no assets named ${requiredAssetName}`,
);
return false;
}
if (matchingAssets.length > 1) {
void logger.log(`WARNING: Ignoring a release with more than one asset named ${requiredAssetName}`);
void logger.log(
`WARNING: Ignoring a release with more than one asset named ${requiredAssetName}`,
);
return false;
}
return true;
}
},
);
}
private createReleasesApiConsumer(): ReleasesApiConsumer {
const ownerName = this.config.ownerName ? this.config.ownerName : DEFAULT_DISTRIBUTION_OWNER_NAME;
const repositoryName = this.config.repositoryName ? this.config.repositoryName : DEFAULT_DISTRIBUTION_REPOSITORY_NAME;
return new ReleasesApiConsumer(ownerName, repositoryName, this.config.personalAccessToken);
const ownerName = this.config.ownerName
? this.config.ownerName
: DEFAULT_DISTRIBUTION_OWNER_NAME;
const repositoryName = this.config.repositoryName
? this.config.repositoryName
: DEFAULT_DISTRIBUTION_REPOSITORY_NAME;
return new ReleasesApiConsumer(
ownerName,
repositoryName,
this.config.personalAccessToken,
);
}
private async bumpDistributionFolderIndex(): Promise<void> {
const index = this.extensionContext.globalState.get(
ExtensionSpecificDistributionManager._currentDistributionFolderIndexStateKey, 0);
ExtensionSpecificDistributionManager._currentDistributionFolderIndexStateKey,
0,
);
await this.extensionContext.globalState.update(
ExtensionSpecificDistributionManager._currentDistributionFolderIndexStateKey, index + 1);
ExtensionSpecificDistributionManager._currentDistributionFolderIndexStateKey,
index + 1,
);
}
private getDistributionStoragePath(): string {
// Use an empty string for the initial distribution for backwards compatibility.
const distributionFolderIndex = this.extensionContext.globalState.get(
ExtensionSpecificDistributionManager._currentDistributionFolderIndexStateKey, 0) || '';
return path.join(this.extensionContext.globalStoragePath,
ExtensionSpecificDistributionManager._currentDistributionFolderBaseName + distributionFolderIndex);
const distributionFolderIndex =
this.extensionContext.globalState.get(
ExtensionSpecificDistributionManager._currentDistributionFolderIndexStateKey,
0,
) || "";
return path.join(
this.extensionContext.globalStoragePath,
ExtensionSpecificDistributionManager._currentDistributionFolderBaseName +
distributionFolderIndex,
);
}
private getDistributionRootPath(): string {
return path.join(this.getDistributionStoragePath(),
ExtensionSpecificDistributionManager._codeQlExtractedFolderName);
return path.join(
this.getDistributionStoragePath(),
ExtensionSpecificDistributionManager._codeQlExtractedFolderName,
);
}
private getInstalledRelease(): Release | undefined {
return this.extensionContext.globalState.get(ExtensionSpecificDistributionManager._installedReleaseStateKey);
return this.extensionContext.globalState.get(
ExtensionSpecificDistributionManager._installedReleaseStateKey,
);
}
private async storeInstalledRelease(release: Release | undefined): Promise<void> {
await this.extensionContext.globalState.update(ExtensionSpecificDistributionManager._installedReleaseStateKey, release);
private async storeInstalledRelease(
release: Release | undefined,
): Promise<void> {
await this.extensionContext.globalState.update(
ExtensionSpecificDistributionManager._installedReleaseStateKey,
release,
);
}
private static readonly _currentDistributionFolderBaseName = 'distribution';
private static readonly _currentDistributionFolderIndexStateKey = 'distributionFolderIndex';
private static readonly _installedReleaseStateKey = 'distributionRelease';
private static readonly _codeQlExtractedFolderName = 'codeql';
private static readonly _currentDistributionFolderBaseName = "distribution";
private static readonly _currentDistributionFolderIndexStateKey =
"distributionFolderIndex";
private static readonly _installedReleaseStateKey = "distributionRelease";
private static readonly _codeQlExtractedFolderName = "codeql";
}
export class ReleasesApiConsumer {
constructor(ownerName: string, repoName: string, personalAccessToken?: string) {
constructor(
ownerName: string,
repoName: string,
personalAccessToken?: string,
) {
// Specify version of the GitHub API
this._defaultHeaders['accept'] = 'application/vnd.github.v3+json';
this._defaultHeaders["accept"] = "application/vnd.github.v3+json";
if (personalAccessToken) {
this._defaultHeaders['authorization'] = `token ${personalAccessToken}`;
this._defaultHeaders["authorization"] = `token ${personalAccessToken}`;
}
this._ownerName = ownerName;
this._repoName = repoName;
}
public async getLatestRelease(versionRange: semver.Range, includePrerelease = false, additionalCompatibilityCheck?: (release: GithubRelease) => boolean): Promise<Release> {
public async getLatestRelease(
versionRange: semver.Range,
includePrerelease = false,
additionalCompatibilityCheck?: (release: GithubRelease) => boolean,
): Promise<Release> {
const apiPath = `/repos/${this._ownerName}/${this._repoName}/releases`;
const allReleases: GithubRelease[] = await (await this.makeApiCall(apiPath)).json();
const compatibleReleases = allReleases.filter(release => {
const allReleases: GithubRelease[] = await (
await this.makeApiCall(apiPath)
).json();
const compatibleReleases = allReleases.filter((release) => {
if (release.prerelease && !includePrerelease) {
return false;
}
const version = semver.parse(release.tag_name);
if (version === null || !semver.satisfies(version, versionRange, { includePrerelease })) {
if (
version === null ||
!semver.satisfies(version, versionRange, { includePrerelease })
) {
return false;
}
return !additionalCompatibilityCheck || additionalCompatibilityCheck(release);
return (
!additionalCompatibilityCheck || additionalCompatibilityCheck(release)
);
});
// Tag names must all be parsable to semvers due to the previous filtering step.
const latestRelease = compatibleReleases.sort((a, b) => {
const versionComparison = semver.compare(semver.parse(b.tag_name)!, semver.parse(a.tag_name)!);
const versionComparison = semver.compare(
semver.parse(b.tag_name)!,
semver.parse(a.tag_name)!,
);
if (versionComparison !== 0) {
return versionComparison;
}
return b.created_at.localeCompare(a.created_at, 'en-US');
return b.created_at.localeCompare(a.created_at, "en-US");
})[0];
if (latestRelease === undefined) {
throw new Error('No compatible CodeQL CLI releases were found. ' +
'Please check that the CodeQL extension is up to date.');
throw new Error(
"No compatible CodeQL CLI releases were found. " +
"Please check that the CodeQL extension is up to date.",
);
}
const assets: ReleaseAsset[] = latestRelease.assets.map(asset => {
const assets: ReleaseAsset[] = latestRelease.assets.map((asset) => {
return {
id: asset.id,
name: asset.name,
size: asset.size
size: asset.size,
};
});
@@ -488,29 +600,42 @@ export class ReleasesApiConsumer {
assets,
createdAt: latestRelease.created_at,
id: latestRelease.id,
name: latestRelease.name
name: latestRelease.name,
};
}
public async streamBinaryContentOfAsset(asset: ReleaseAsset): Promise<fetch.Response> {
public async streamBinaryContentOfAsset(
asset: ReleaseAsset,
): Promise<fetch.Response> {
const apiPath = `/repos/${this._ownerName}/${this._repoName}/releases/assets/${asset.id}`;
return await this.makeApiCall(apiPath, {
'accept': 'application/octet-stream'
accept: "application/octet-stream",
});
}
protected async makeApiCall(apiPath: string, additionalHeaders: { [key: string]: string } = {}): Promise<fetch.Response> {
const response = await this.makeRawRequest(ReleasesApiConsumer._apiBase + apiPath,
Object.assign({}, this._defaultHeaders, additionalHeaders));
protected async makeApiCall(
apiPath: string,
additionalHeaders: { [key: string]: string } = {},
): Promise<fetch.Response> {
const response = await this.makeRawRequest(
ReleasesApiConsumer._apiBase + apiPath,
Object.assign({}, this._defaultHeaders, additionalHeaders),
);
if (!response.ok) {
// Check for rate limiting
const rateLimitResetValue = response.headers.get('X-RateLimit-Reset');
const rateLimitResetValue = response.headers.get("X-RateLimit-Reset");
if (response.status === 403 && rateLimitResetValue) {
const secondsToMillisecondsFactor = 1000;
const rateLimitResetDate = new Date(parseInt(rateLimitResetValue, 10) * secondsToMillisecondsFactor);
throw new GithubRateLimitedError(response.status, await response.text(), rateLimitResetDate);
const rateLimitResetDate = new Date(
parseInt(rateLimitResetValue, 10) * secondsToMillisecondsFactor,
);
throw new GithubRateLimitedError(
response.status,
await response.text(),
rateLimitResetDate,
);
}
throw new GithubApiError(response.status, await response.text());
}
@@ -520,24 +645,29 @@ export class ReleasesApiConsumer {
private async makeRawRequest(
requestUrl: string,
headers: { [key: string]: string },
redirectCount = 0): Promise<fetch.Response> {
redirectCount = 0,
): Promise<fetch.Response> {
const response = await fetch.default(requestUrl, {
headers,
redirect: 'manual'
redirect: "manual",
});
const redirectUrl = response.headers.get('location');
if (isRedirectStatusCode(response.status) && redirectUrl && redirectCount < ReleasesApiConsumer._maxRedirects) {
const redirectUrl = response.headers.get("location");
if (
isRedirectStatusCode(response.status) &&
redirectUrl &&
redirectCount < ReleasesApiConsumer._maxRedirects
) {
const parsedRedirectUrl = url.parse(redirectUrl);
if (parsedRedirectUrl.protocol != 'https:') {
throw new Error('Encountered a non-https redirect, rejecting');
if (parsedRedirectUrl.protocol != "https:") {
throw new Error("Encountered a non-https redirect, rejecting");
}
if (parsedRedirectUrl.host != 'api.github.com') {
if (parsedRedirectUrl.host != "api.github.com") {
// Remove authorization header if we are redirected outside of the GitHub API.
//
// This is necessary to stream release assets since AWS fails if more than one auth
// mechanism is provided.
delete headers['authorization'];
delete headers["authorization"];
}
return await this.makeRawRequest(redirectUrl, headers, redirectCount + 1);
}
@@ -549,37 +679,51 @@ export class ReleasesApiConsumer {
private readonly _ownerName: string;
private readonly _repoName: string;
private static readonly _apiBase = 'https://api.github.com';
private static readonly _apiBase = "https://api.github.com";
private static readonly _maxRedirects = 20;
}
export async function extractZipArchive(archivePath: string, outPath: string): Promise<void> {
export async function extractZipArchive(
archivePath: string,
outPath: string,
): Promise<void> {
const archive = await unzipper.Open.file(archivePath);
await archive.extract({
concurrency: 4,
path: outPath
path: outPath,
});
// Set file permissions for extracted files
await Promise.all(archive.files.map(async file => {
// Only change file permissions if within outPath (path.join normalises the path)
const extractedPath = path.join(outPath, file.path);
if (extractedPath.indexOf(outPath) !== 0 || !(await fs.pathExists(extractedPath))) {
return Promise.resolve();
}
return fs.chmod(extractedPath, file.externalFileAttributes >>> 16);
}));
await Promise.all(
archive.files.map(async (file) => {
// Only change file permissions if within outPath (path.join normalises the path)
const extractedPath = path.join(outPath, file.path);
if (
extractedPath.indexOf(outPath) !== 0 ||
!(await fs.pathExists(extractedPath))
) {
return Promise.resolve();
}
return fs.chmod(extractedPath, file.externalFileAttributes >>> 16);
}),
);
}
export function codeQlLauncherName(): string {
return (os.platform() === 'win32') ? 'codeql.exe' : 'codeql';
return os.platform() === "win32" ? "codeql.exe" : "codeql";
}
function deprecatedCodeQlLauncherName(): string | undefined {
return (os.platform() === 'win32') ? 'codeql.cmd' : undefined;
return os.platform() === "win32" ? "codeql.cmd" : undefined;
}
function isRedirectStatusCode(statusCode: number): boolean {
return statusCode === 301 || statusCode === 302 || statusCode === 303 || statusCode === 307 || statusCode === 308;
return (
statusCode === 301 ||
statusCode === 302 ||
statusCode === 303 ||
statusCode === 307 ||
statusCode === 308
);
}
/*
@@ -589,7 +733,7 @@ function isRedirectStatusCode(statusCode: number): boolean {
export enum DistributionKind {
CustomPathConfig,
ExtensionManaged,
PathEnvironmentVariable
PathEnvironmentVariable,
}
export interface Distribution {
@@ -601,7 +745,7 @@ export enum FindDistributionResultKind {
CompatibleDistribution,
UnknownCompatibilityDistribution,
IncompatibleDistribution,
NoDistribution
NoDistribution,
}
export type FindDistributionResult =
@@ -641,7 +785,7 @@ export enum DistributionUpdateCheckResultKind {
AlreadyCheckedRecentlyResult,
AlreadyUpToDate,
InvalidLocation,
UpdateAvailable
UpdateAvailable,
}
type DistributionUpdateCheckResult =
@@ -672,43 +816,55 @@ export interface UpdateAvailableResult {
function createAlreadyCheckedRecentlyResult(): AlreadyCheckedRecentlyResult {
return {
kind: DistributionUpdateCheckResultKind.AlreadyCheckedRecentlyResult
kind: DistributionUpdateCheckResultKind.AlreadyCheckedRecentlyResult,
};
}
function createAlreadyUpToDateResult(): AlreadyUpToDateResult {
return {
kind: DistributionUpdateCheckResultKind.AlreadyUpToDate
kind: DistributionUpdateCheckResultKind.AlreadyUpToDate,
};
}
function createInvalidLocationResult(): InvalidLocationResult {
return {
kind: DistributionUpdateCheckResultKind.InvalidLocation
kind: DistributionUpdateCheckResultKind.InvalidLocation,
};
}
function createUpdateAvailableResult(updatedRelease: Release): UpdateAvailableResult {
function createUpdateAvailableResult(
updatedRelease: Release,
): UpdateAvailableResult {
return {
kind: DistributionUpdateCheckResultKind.UpdateAvailable,
updatedRelease
updatedRelease,
};
}
// Exported for testing
export async function getExecutableFromDirectory(directory: string, warnWhenNotFound = false): Promise<string | undefined> {
export async function getExecutableFromDirectory(
directory: string,
warnWhenNotFound = false,
): Promise<string | undefined> {
const expectedLauncherPath = path.join(directory, codeQlLauncherName());
const deprecatedLauncherName = deprecatedCodeQlLauncherName();
const alternateExpectedLauncherPath = deprecatedLauncherName ? path.join(directory, deprecatedLauncherName) : undefined;
const alternateExpectedLauncherPath = deprecatedLauncherName
? path.join(directory, deprecatedLauncherName)
: undefined;
if (await fs.pathExists(expectedLauncherPath)) {
return expectedLauncherPath;
} else if (alternateExpectedLauncherPath && (await fs.pathExists(alternateExpectedLauncherPath))) {
} else if (
alternateExpectedLauncherPath &&
(await fs.pathExists(alternateExpectedLauncherPath))
) {
warnDeprecatedLauncher();
return alternateExpectedLauncherPath;
}
if (warnWhenNotFound) {
void logger.log(`WARNING: Expected to find a CodeQL CLI executable at ${expectedLauncherPath} but one was not found. ` +
'Will try PATH.');
void logger.log(
`WARNING: Expected to find a CodeQL CLI executable at ${expectedLauncherPath} but one was not found. ` +
"Will try PATH.",
);
}
return undefined;
}
@@ -716,7 +872,7 @@ export async function getExecutableFromDirectory(directory: string, warnWhenNotF
function warnDeprecatedLauncher() {
void showAndLogWarningMessage(
`The "${deprecatedCodeQlLauncherName()!}" launcher has been deprecated and will be removed in a future version. ` +
`Please use "${codeQlLauncherName()}" instead. It is recommended to update to the latest CodeQL binaries.`
`Please use "${codeQlLauncherName()}" instead. It is recommended to update to the latest CodeQL binaries.`,
);
}
@@ -762,7 +918,6 @@ export interface ReleaseAsset {
size: number;
}
/**
* The json returned from github for a release.
*/
@@ -822,7 +977,11 @@ export class GithubApiError extends Error {
}
export class GithubRateLimitedError extends GithubApiError {
constructor(public status: number, public body: string, public rateLimitResetDate: Date) {
constructor(
public status: number,
public body: string,
public rateLimitResetDate: Date,
) {
super(status, body);
}
}

View File

@@ -1,5 +1,5 @@
import { ChildEvalLogTreeItem, EvalLogTreeItem } from './eval-log-viewer';
import { EvalLogData as EvalLogData } from './pure/log-summary-parser';
import { ChildEvalLogTreeItem, EvalLogTreeItem } from "./eval-log-viewer";
import { EvalLogData as EvalLogData } from "./pure/log-summary-parser";
/** Builds the tree data for the evaluator log viewer for a single query run. */
export default class EvalLogTreeBuilder {
@@ -22,40 +22,40 @@ export default class EvalLogTreeBuilder {
// level. For now, there will always be one root (the one query being shown).
const queryItem: EvalLogTreeItem = {
label: this.queryName,
children: [] // Will assign predicate items as children shortly.
children: [], // Will assign predicate items as children shortly.
};
// Display descriptive message when no data exists
// Display descriptive message when no data exists
if (this.evalLogDataItems.length === 0) {
const noResultsItem: ChildEvalLogTreeItem = {
label: 'No predicates evaluated in this query run.',
label: "No predicates evaluated in this query run.",
parent: queryItem,
children: [],
};
queryItem.children.push(noResultsItem);
}
// For each predicate, create a TreeItem object with appropriate parents/children
this.evalLogDataItems.forEach(logDataItem => {
// For each predicate, create a TreeItem object with appropriate parents/children
this.evalLogDataItems.forEach((logDataItem) => {
const predicateLabel = `${logDataItem.predicateName} (${logDataItem.resultSize} tuples, ${logDataItem.millis} ms)`;
const predicateItem: ChildEvalLogTreeItem = {
label: predicateLabel,
parent: queryItem,
children: [] // Will assign pipeline items as children shortly.
children: [], // Will assign pipeline items as children shortly.
};
for (const [pipelineName, steps] of Object.entries(logDataItem.ra)) {
const pipelineLabel = `Pipeline: ${pipelineName}`;
const pipelineItem: ChildEvalLogTreeItem = {
label: pipelineLabel,
parent: predicateItem,
children: [] // Will assign step items as children shortly.
children: [], // Will assign step items as children shortly.
};
predicateItem.children.push(pipelineItem);
pipelineItem.children = steps.map((step: string) => ({
label: step,
parent: pipelineItem,
children: []
children: [],
}));
}
queryItem.children.push(predicateItem);

View File

@@ -1,7 +1,16 @@
import { window, TreeDataProvider, TreeView, TreeItem, ProviderResult, Event, EventEmitter, TreeItemCollapsibleState } from 'vscode';
import { commandRunner } from './commandRunner';
import { DisposableObject } from './pure/disposable-object';
import { showAndLogErrorMessage } from './helpers';
import {
window,
TreeDataProvider,
TreeView,
TreeItem,
ProviderResult,
Event,
EventEmitter,
TreeItemCollapsibleState,
} from "vscode";
import { commandRunner } from "./commandRunner";
import { DisposableObject } from "./pure/disposable-object";
import { showAndLogErrorMessage } from "./helpers";
export interface EvalLogTreeItem {
label?: string;
@@ -13,11 +22,18 @@ export interface ChildEvalLogTreeItem extends EvalLogTreeItem {
}
/** Provides data from parsed CodeQL evaluator logs to be rendered in a tree view. */
class EvalLogDataProvider extends DisposableObject implements TreeDataProvider<EvalLogTreeItem> {
class EvalLogDataProvider
extends DisposableObject
implements TreeDataProvider<EvalLogTreeItem>
{
public roots: EvalLogTreeItem[] = [];
private _onDidChangeTreeData: EventEmitter<EvalLogTreeItem | undefined | null | void> = new EventEmitter<EvalLogTreeItem | undefined | null | void>();
readonly onDidChangeTreeData: Event<EvalLogTreeItem | undefined | null | void> = this._onDidChangeTreeData.event;
private _onDidChangeTreeData: EventEmitter<
EvalLogTreeItem | undefined | null | void
> = new EventEmitter<EvalLogTreeItem | undefined | null | void>();
readonly onDidChangeTreeData: Event<
EvalLogTreeItem | undefined | null | void
> = this._onDidChangeTreeData.event;
refresh(): void {
this._onDidChangeTreeData.fire();
@@ -27,7 +43,7 @@ class EvalLogDataProvider extends DisposableObject implements TreeDataProvider<E
const state = element.children.length
? TreeItemCollapsibleState.Collapsed
: TreeItemCollapsibleState.None;
const treeItem = new TreeItem(element.label || '', state);
const treeItem = new TreeItem(element.label || "", state);
treeItem.tooltip = `${treeItem.label} || ''}`;
return treeItem;
}
@@ -55,17 +71,17 @@ export class EvalLogViewer extends DisposableObject {
super();
this.treeDataProvider = new EvalLogDataProvider();
this.treeView = window.createTreeView('codeQLEvalLogViewer', {
this.treeView = window.createTreeView("codeQLEvalLogViewer", {
treeDataProvider: this.treeDataProvider,
showCollapseAll: true
showCollapseAll: true,
});
this.push(this.treeView);
this.push(this.treeDataProvider);
this.push(
commandRunner('codeQLEvalLogViewer.clear', async () => {
commandRunner("codeQLEvalLogViewer.clear", async () => {
this.clear();
})
}),
);
}
@@ -80,13 +96,15 @@ export class EvalLogViewer extends DisposableObject {
this.treeDataProvider.roots = roots;
this.treeDataProvider.refresh();
this.treeView.message = 'Viewer for query run:'; // Currently only one query supported at a time.
this.treeView.message = "Viewer for query run:"; // Currently only one query supported at a time.
// Handle error on reveal. This could happen if
// the tree view is disposed during the reveal.
this.treeView.reveal(roots[0], { focus: false })?.then(
() => { /**/ },
err => showAndLogErrorMessage(err)
() => {
/**/
},
(err) => showAndLogErrorMessage(err),
);
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,29 +1,39 @@
import * as fs from 'fs-extra';
import * as glob from 'glob-promise';
import * as yaml from 'js-yaml';
import * as path from 'path';
import * as tmp from 'tmp-promise';
import * as fs from "fs-extra";
import * as glob from "glob-promise";
import * as yaml from "js-yaml";
import * as path from "path";
import * as tmp from "tmp-promise";
import {
ExtensionContext,
Uri,
window as Window,
workspace,
env
} from 'vscode';
import { CodeQLCliServer, QlpacksInfo } from './cli';
import { UserCancellationException } from './commandRunner';
import { logger } from './logging';
import { QueryMetadata } from './pure/interface-types';
env,
} from "vscode";
import { CodeQLCliServer, QlpacksInfo } from "./cli";
import { UserCancellationException } from "./commandRunner";
import { logger } from "./logging";
import { QueryMetadata } from "./pure/interface-types";
// Shared temporary folder for the extension.
export const tmpDir = tmp.dirSync({ prefix: 'queries_', keep: false, unsafeCleanup: true });
export const upgradesTmpDir = path.join(tmpDir.name, 'upgrades');
export const tmpDir = tmp.dirSync({
prefix: "queries_",
keep: false,
unsafeCleanup: true,
});
export const upgradesTmpDir = path.join(tmpDir.name, "upgrades");
fs.ensureDirSync(upgradesTmpDir);
export const tmpDirDisposal = {
dispose: () => {
tmpDir.removeCallback();
}
try {
tmpDir.removeCallback();
} catch (e) {
void logger.log(
`Failed to remove temporary directory ${tmpDir.name}: ${e}`,
);
}
},
};
/**
@@ -38,16 +48,25 @@ export const tmpDirDisposal = {
*
* @return A promise that resolves to the selected item or undefined when being dismissed.
*/
export async function showAndLogErrorMessage(message: string, {
outputLogger = logger,
items = [] as string[],
fullMessage = undefined as (string | undefined)
} = {}): Promise<string | undefined> {
return internalShowAndLog(dropLinesExceptInitial(message), items, outputLogger, Window.showErrorMessage, fullMessage);
export async function showAndLogErrorMessage(
message: string,
{
outputLogger = logger,
items = [] as string[],
fullMessage = undefined as string | undefined,
} = {},
): Promise<string | undefined> {
return internalShowAndLog(
dropLinesExceptInitial(message),
items,
outputLogger,
Window.showErrorMessage,
fullMessage,
);
}
function dropLinesExceptInitial(message: string, n = 2) {
return message.toString().split(/\r?\n/).slice(0, n).join('\n');
return message.toString().split(/\r?\n/).slice(0, n).join("\n");
}
/**
@@ -59,11 +78,16 @@ function dropLinesExceptInitial(message: string, n = 2) {
*
* @return A promise that resolves to the selected item or undefined when being dismissed.
*/
export async function showAndLogWarningMessage(message: string, {
outputLogger = logger,
items = [] as string[]
} = {}): Promise<string | undefined> {
return internalShowAndLog(message, items, outputLogger, Window.showWarningMessage);
export async function showAndLogWarningMessage(
message: string,
{ outputLogger = logger, items = [] as string[] } = {},
): Promise<string | undefined> {
return internalShowAndLog(
message,
items,
outputLogger,
Window.showWarningMessage,
);
}
/**
* Show an information message and log it to the console
@@ -74,24 +98,32 @@ export async function showAndLogWarningMessage(message: string, {
*
* @return A promise that resolves to the selected item or undefined when being dismissed.
*/
export async function showAndLogInformationMessage(message: string, {
outputLogger = logger,
items = [] as string[],
fullMessage = ''
} = {}): Promise<string | undefined> {
return internalShowAndLog(message, items, outputLogger, Window.showInformationMessage, fullMessage);
export async function showAndLogInformationMessage(
message: string,
{ outputLogger = logger, items = [] as string[], fullMessage = "" } = {},
): Promise<string | undefined> {
return internalShowAndLog(
message,
items,
outputLogger,
Window.showInformationMessage,
fullMessage,
);
}
type ShowMessageFn = (message: string, ...items: string[]) => Thenable<string | undefined>;
type ShowMessageFn = (
message: string,
...items: string[]
) => Thenable<string | undefined>;
async function internalShowAndLog(
message: string,
items: string[],
outputLogger = logger,
fn: ShowMessageFn,
fullMessage?: string
fullMessage?: string,
): Promise<string | undefined> {
const label = 'Show Log';
const label = "Show Log";
void outputLogger.log(fullMessage || message);
const result = await fn(message, label, ...items);
if (result === label) {
@@ -106,16 +138,28 @@ async function internalShowAndLog(
* @param message The message to show.
* @param modal If true (the default), show a modal dialog box, otherwise dialog is non-modal and can
* be closed even if the user does not make a choice.
* @param yesTitle The text in the box indicating the affirmative choice.
* @param noTitle The text in the box indicating the negative choice.
*
* @return
* `true` if the user clicks 'Yes',
* `false` if the user clicks 'No' or cancels the dialog,
* `undefined` if the dialog is closed without the user making a choice.
*/
export async function showBinaryChoiceDialog(message: string, modal = true): Promise<boolean | undefined> {
const yesItem = { title: 'Yes', isCloseAffordance: false };
const noItem = { title: 'No', isCloseAffordance: true };
const chosenItem = await Window.showInformationMessage(message, { modal }, yesItem, noItem);
export async function showBinaryChoiceDialog(
message: string,
modal = true,
yesTitle = "Yes",
noTitle = "No",
): Promise<boolean | undefined> {
const yesItem = { title: yesTitle, isCloseAffordance: false };
const noItem = { title: noTitle, isCloseAffordance: true };
const chosenItem = await Window.showInformationMessage(
message,
{ modal },
yesItem,
noItem,
);
if (!chosenItem) {
return undefined;
}
@@ -134,17 +178,26 @@ export async function showBinaryChoiceDialog(message: string, modal = true): Pro
* `false` if the user clicks 'No' or cancels the dialog,
* `undefined` if the dialog is closed without the user making a choice.
*/
export async function showBinaryChoiceWithUrlDialog(message: string, url: string): Promise<boolean | undefined> {
const urlItem = { title: 'More Information', isCloseAffordance: false };
const yesItem = { title: 'Yes', isCloseAffordance: false };
const noItem = { title: 'No', isCloseAffordance: true };
export async function showBinaryChoiceWithUrlDialog(
message: string,
url: string,
): Promise<boolean | undefined> {
const urlItem = { title: "More Information", isCloseAffordance: false };
const yesItem = { title: "Yes", isCloseAffordance: false };
const noItem = { title: "No", isCloseAffordance: true };
let chosenItem;
// Keep the dialog open as long as the user is clicking the 'more information' option.
// To prevent an infinite loop, if the user clicks 'more information' 5 times, close the dialog and return cancelled
let count = 0;
do {
chosenItem = await Window.showInformationMessage(message, { modal: true }, urlItem, yesItem, noItem);
chosenItem = await Window.showInformationMessage(
message,
{ modal: true },
urlItem,
yesItem,
noItem,
);
if (chosenItem === urlItem) {
await env.openExternal(Uri.parse(url, true));
}
@@ -164,7 +217,10 @@ export async function showBinaryChoiceWithUrlDialog(message: string, url: string
*
* @return `true` if the user clicks the action, `false` if the user cancels the dialog.
*/
export async function showInformationMessageWithAction(message: string, actionMessage: string): Promise<boolean> {
export async function showInformationMessageWithAction(
message: string,
actionMessage: string,
): Promise<boolean> {
const actionItem = { title: actionMessage, isCloseAffordance: false };
const chosenItem = await Window.showInformationMessage(message, actionItem);
return chosenItem === actionItem;
@@ -175,7 +231,7 @@ export function getOnDiskWorkspaceFolders() {
const workspaceFolders = workspace.workspaceFolders || [];
const diskWorkspaceFolders: string[] = [];
for (const workspaceFolder of workspaceFolders) {
if (workspaceFolder.uri.scheme === 'file')
if (workspaceFolder.uri.scheme === "file")
diskWorkspaceFolders.push(workspaceFolder.uri.fsPath);
}
return diskWorkspaceFolders;
@@ -190,7 +246,9 @@ export class InvocationRateLimiter<T> {
extensionContext: ExtensionContext,
funcIdentifier: string,
func: () => Promise<T>,
createDate: (dateString?: string) => Date = s => s ? new Date(s) : new Date()) {
createDate: (dateString?: string) => Date = (s) =>
s ? new Date(s) : new Date(),
) {
this._createDate = createDate;
this._extensionContext = extensionContext;
this._func = func;
@@ -200,14 +258,17 @@ export class InvocationRateLimiter<T> {
/**
* Invoke the function if `minSecondsSinceLastInvocation` seconds have elapsed since the last invocation.
*/
public async invokeFunctionIfIntervalElapsed(minSecondsSinceLastInvocation: number): Promise<InvocationRateLimiterResult<T>> {
public async invokeFunctionIfIntervalElapsed(
minSecondsSinceLastInvocation: number,
): Promise<InvocationRateLimiterResult<T>> {
const updateCheckStartDate = this._createDate();
const lastInvocationDate = this.getLastInvocationDate();
if (
minSecondsSinceLastInvocation &&
lastInvocationDate &&
lastInvocationDate <= updateCheckStartDate &&
lastInvocationDate.getTime() + minSecondsSinceLastInvocation * 1000 > updateCheckStartDate.getTime()
lastInvocationDate.getTime() + minSecondsSinceLastInvocation * 1000 >
updateCheckStartDate.getTime()
) {
return createRateLimitedResult();
}
@@ -218,12 +279,18 @@ export class InvocationRateLimiter<T> {
private getLastInvocationDate(): Date | undefined {
const maybeDateString: string | undefined =
this._extensionContext.globalState.get(InvocationRateLimiter._invocationRateLimiterPrefix + this._funcIdentifier);
this._extensionContext.globalState.get(
InvocationRateLimiter._invocationRateLimiterPrefix +
this._funcIdentifier,
);
return maybeDateString ? this._createDate(maybeDateString) : undefined;
}
private async setLastInvocationDate(date: Date): Promise<void> {
return await this._extensionContext.globalState.update(InvocationRateLimiter._invocationRateLimiterPrefix + this._funcIdentifier, date);
return await this._extensionContext.globalState.update(
InvocationRateLimiter._invocationRateLimiterPrefix + this._funcIdentifier,
date,
);
}
private readonly _createDate: (dateString?: string) => Date;
@@ -231,12 +298,13 @@ export class InvocationRateLimiter<T> {
private readonly _func: () => Promise<T>;
private readonly _funcIdentifier: string;
private static readonly _invocationRateLimiterPrefix = 'invocationRateLimiter_lastInvocationDate_';
private static readonly _invocationRateLimiterPrefix =
"invocationRateLimiter_lastInvocationDate_";
}
export enum InvocationRateLimiterResultKind {
Invoked,
RateLimited
RateLimited,
}
/**
@@ -259,13 +327,13 @@ type InvocationRateLimiterResult<T> = InvokedResult<T> | RateLimitedResult;
function createInvokedResult<T>(result: T): InvokedResult<T> {
return {
kind: InvocationRateLimiterResultKind.Invoked,
result
result,
};
}
function createRateLimitedResult(): RateLimitedResult {
return {
kind: InvocationRateLimiterResultKind.RateLimited
kind: InvocationRateLimiterResultKind.RateLimited,
};
}
@@ -286,14 +354,22 @@ interface QlPackWithPath {
packDir: string | undefined;
}
async function findDbschemePack(packs: QlPackWithPath[], dbschemePath: string): Promise<{ name: string; isLibraryPack: boolean; }> {
async function findDbschemePack(
packs: QlPackWithPath[],
dbschemePath: string,
): Promise<{ name: string; isLibraryPack: boolean }> {
for (const { packDir, packName } of packs) {
if (packDir !== undefined) {
const qlpack = yaml.load(await fs.readFile(path.join(packDir, 'qlpack.yml'), 'utf8')) as { dbscheme?: string; library?: boolean; };
if (qlpack.dbscheme !== undefined && path.basename(qlpack.dbscheme) === path.basename(dbschemePath)) {
const qlpack = yaml.load(
await fs.readFile(path.join(packDir, "qlpack.yml"), "utf8"),
) as { dbscheme?: string; library?: boolean };
if (
qlpack.dbscheme !== undefined &&
path.basename(qlpack.dbscheme) === path.basename(dbschemePath)
) {
return {
name: packName,
isLibraryPack: qlpack.library === true
isLibraryPack: qlpack.library === true,
};
}
}
@@ -301,7 +377,10 @@ async function findDbschemePack(packs: QlPackWithPath[], dbschemePath: string):
throw new Error(`Could not find qlpack file for dbscheme ${dbschemePath}`);
}
function findStandardQueryPack(qlpacks: QlpacksInfo, dbschemePackName: string): string | undefined {
function findStandardQueryPack(
qlpacks: QlpacksInfo,
dbschemePackName: string,
): string | undefined {
const matches = dbschemePackName.match(/^codeql\/(?<language>[a-z]+)-all$/);
if (matches) {
const queryPackName = `codeql/${matches.groups!.language}-queries`;
@@ -315,43 +394,59 @@ function findStandardQueryPack(qlpacks: QlpacksInfo, dbschemePackName: string):
return undefined;
}
export async function getQlPackForDbscheme(cliServer: CodeQLCliServer, dbschemePath: string): Promise<QlPacksForLanguage> {
export async function getQlPackForDbscheme(
cliServer: CodeQLCliServer,
dbschemePath: string,
): Promise<QlPacksForLanguage> {
const qlpacks = await cliServer.resolveQlpacks(getOnDiskWorkspaceFolders());
const packs: QlPackWithPath[] =
Object.entries(qlpacks).map(([packName, dirs]) => {
const packs: QlPackWithPath[] = Object.entries(qlpacks).map(
([packName, dirs]) => {
if (dirs.length < 1) {
void logger.log(`In getQlPackFor ${dbschemePath}, qlpack ${packName} has no directories`);
void logger.log(
`In getQlPackFor ${dbschemePath}, qlpack ${packName} has no directories`,
);
return { packName, packDir: undefined };
}
if (dirs.length > 1) {
void logger.log(`In getQlPackFor ${dbschemePath}, qlpack ${packName} has more than one directory; arbitrarily choosing the first`);
void logger.log(
`In getQlPackFor ${dbschemePath}, qlpack ${packName} has more than one directory; arbitrarily choosing the first`,
);
}
return {
packName,
packDir: dirs[0]
packDir: dirs[0],
};
});
},
);
const dbschemePack = await findDbschemePack(packs, dbschemePath);
const queryPack = dbschemePack.isLibraryPack ? findStandardQueryPack(qlpacks, dbschemePack.name) : undefined;
const queryPack = dbschemePack.isLibraryPack
? findStandardQueryPack(qlpacks, dbschemePack.name)
: undefined;
return {
dbschemePack: dbschemePack.name,
dbschemePackIsLibraryPack: dbschemePack.isLibraryPack,
queryPack
queryPack,
};
}
export async function getPrimaryDbscheme(datasetFolder: string): Promise<string> {
const dbschemes = await glob(path.join(datasetFolder, '*.dbscheme'));
export async function getPrimaryDbscheme(
datasetFolder: string,
): Promise<string> {
const dbschemes = await glob(path.join(datasetFolder, "*.dbscheme"));
if (dbschemes.length < 1) {
throw new Error(`Can't find dbscheme for current database in ${datasetFolder}`);
throw new Error(
`Can't find dbscheme for current database in ${datasetFolder}`,
);
}
dbschemes.sort();
const dbscheme = dbschemes[0];
if (dbschemes.length > 1) {
void Window.showErrorMessage(`Found multiple dbschemes in ${datasetFolder} during quick query; arbitrarily choosing the first, ${dbscheme}, to decide what library to use.`);
void Window.showErrorMessage(
`Found multiple dbschemes in ${datasetFolder} during quick query; arbitrarily choosing the first, ${dbscheme}, to decide what library to use.`,
);
}
return dbscheme;
}
@@ -363,12 +458,21 @@ export class CachedOperation<U> {
private readonly operation: (t: string, ...args: any[]) => Promise<U>;
private readonly cached: Map<string, U>;
private readonly lru: string[];
private readonly inProgressCallbacks: Map<string, [(u: U) => void, (reason?: any) => void][]>;
private readonly inProgressCallbacks: Map<
string,
[(u: U) => void, (reason?: any) => void][]
>;
constructor(operation: (t: string, ...args: any[]) => Promise<U>, private cacheSize = 100) {
constructor(
operation: (t: string, ...args: any[]) => Promise<U>,
private cacheSize = 100,
) {
this.operation = operation;
this.lru = [];
this.inProgressCallbacks = new Map<string, [(u: U) => void, (reason?: any) => void][]>();
this.inProgressCallbacks = new Map<
string,
[(u: U) => void, (reason?: any) => void][]
>();
this.cached = new Map<string, U>();
}
@@ -377,7 +481,12 @@ export class CachedOperation<U> {
const fromCache = this.cached.get(t);
if (fromCache !== undefined) {
// Move to end of lru list
this.lru.push(this.lru.splice(this.lru.findIndex(v => v === t), 1)[0]);
this.lru.push(
this.lru.splice(
this.lru.findIndex((v) => v === t),
1,
)[0],
);
return fromCache;
}
// Otherwise check if in progress
@@ -394,7 +503,7 @@ export class CachedOperation<U> {
this.inProgressCallbacks.set(t, callbacks);
try {
const result = await this.operation(t, ...args);
callbacks.forEach(f => f[0](result));
callbacks.forEach((f) => f[0](result));
this.inProgressCallbacks.delete(t);
if (this.lru.length > this.cacheSize) {
const toRemove = this.lru.shift()!;
@@ -405,7 +514,7 @@ export class CachedOperation<U> {
return result;
} catch (e) {
// Rethrow error on all callbacks
callbacks.forEach(f => f[1](e));
callbacks.forEach((f) => f[1](e));
throw e;
} finally {
this.inProgressCallbacks.delete(t);
@@ -413,8 +522,6 @@ export class CachedOperation<U> {
}
}
/**
* The following functions al heuristically determine metadata about databases.
*/
@@ -430,20 +537,22 @@ export class CachedOperation<U> {
* @see cli.CodeQLCliServer.resolveDatabase
*/
export const dbSchemeToLanguage = {
'semmlecode.javascript.dbscheme': 'javascript',
'semmlecode.cpp.dbscheme': 'cpp',
'semmlecode.dbscheme': 'java',
'semmlecode.python.dbscheme': 'python',
'semmlecode.csharp.dbscheme': 'csharp',
'go.dbscheme': 'go',
'ruby.dbscheme': 'ruby'
"semmlecode.javascript.dbscheme": "javascript",
"semmlecode.cpp.dbscheme": "cpp",
"semmlecode.dbscheme": "java",
"semmlecode.python.dbscheme": "python",
"semmlecode.csharp.dbscheme": "csharp",
"go.dbscheme": "go",
"ruby.dbscheme": "ruby",
};
export const languageToDbScheme = Object.entries(dbSchemeToLanguage).reduce((acc, [k, v]) => {
acc[v] = k;
return acc;
}, {} as { [k: string]: string });
export const languageToDbScheme = Object.entries(dbSchemeToLanguage).reduce(
(acc, [k, v]) => {
acc[v] = k;
return acc;
},
{} as { [k: string]: string },
);
/**
* Returns the initial contents for an empty query, based on the language of the selected
@@ -459,13 +568,13 @@ export const languageToDbScheme = Object.entries(dbSchemeToLanguage).reduce((acc
*/
export function getInitialQueryContents(language: string, dbscheme: string) {
if (!language) {
const dbschemeBase = path.basename(dbscheme) as keyof typeof dbSchemeToLanguage;
const dbschemeBase = path.basename(
dbscheme,
) as keyof typeof dbSchemeToLanguage;
language = dbSchemeToLanguage[dbschemeBase];
}
return language
? `import ${language}\n\nselect ""`
: 'select ""';
return language ? `import ${language}\n\nselect ""` : 'select ""';
}
/**
@@ -475,23 +584,26 @@ export function getInitialQueryContents(language: string, dbscheme: string) {
* contains a folder starting with `db-`.
*/
export async function isLikelyDatabaseRoot(maybeRoot: string) {
const [a, b, c] = (await Promise.all([
const [a, b, c] = await Promise.all([
// databases can have either .dbinfo or codeql-database.yml.
fs.pathExists(path.join(maybeRoot, '.dbinfo')),
fs.pathExists(path.join(maybeRoot, 'codeql-database.yml')),
fs.pathExists(path.join(maybeRoot, ".dbinfo")),
fs.pathExists(path.join(maybeRoot, "codeql-database.yml")),
// they *must* have a db-{language} folder
glob('db-*/', { cwd: maybeRoot })
]));
glob("db-*/", { cwd: maybeRoot }),
]);
return ((a || b) && c.length > 0);
return (a || b) && c.length > 0;
}
/**
* A language folder is any folder starting with `db-` that is itself not a database root.
*/
export async function isLikelyDbLanguageFolder(dbPath: string) {
return path.basename(dbPath).startsWith('db-') && !(await isLikelyDatabaseRoot(dbPath));
return (
path.basename(dbPath).startsWith("db-") &&
!(await isLikelyDatabaseRoot(dbPath))
);
}
/**
@@ -500,17 +612,22 @@ export async function isLikelyDbLanguageFolder(dbPath: string) {
*/
export async function findLanguage(
cliServer: CodeQLCliServer,
queryUri: Uri | undefined
queryUri: Uri | undefined,
): Promise<string | undefined> {
const uri = queryUri || Window.activeTextEditor?.document.uri;
if (uri !== undefined) {
try {
const queryInfo = await cliServer.resolveQueryByLanguage(getOnDiskWorkspaceFolders(), uri);
const language = (Object.keys(queryInfo.byLanguage))[0];
const queryInfo = await cliServer.resolveQueryByLanguage(
getOnDiskWorkspaceFolders(),
uri,
);
const language = Object.keys(queryInfo.byLanguage)[0];
void logger.log(`Detected query language: ${language}`);
return language;
} catch (e) {
void logger.log('Could not autodetect query language. Select language manually.');
void logger.log(
"Could not autodetect query language. Select language manually.",
);
}
}
@@ -518,17 +635,25 @@ export async function findLanguage(
return await askForLanguage(cliServer, false);
}
export async function askForLanguage(cliServer: CodeQLCliServer, throwOnEmpty = true): Promise<string | undefined> {
export async function askForLanguage(
cliServer: CodeQLCliServer,
throwOnEmpty = true,
): Promise<string | undefined> {
const language = await Window.showQuickPick(
await cliServer.getSupportedLanguages(),
{ placeHolder: 'Select target language for your query', ignoreFocusOut: true }
{
placeHolder: "Select target language for your query",
ignoreFocusOut: true,
},
);
if (!language) {
// This only happens if the user cancels the quick pick.
if (throwOnEmpty) {
throw new UserCancellationException('Cancelled.');
throw new UserCancellationException("Cancelled.");
} else {
void showAndLogErrorMessage('Language not found. Language must be specified manually.');
void showAndLogErrorMessage(
"Language not found. Language must be specified manually.",
);
}
}
return language;
@@ -540,7 +665,10 @@ export async function askForLanguage(cliServer: CodeQLCliServer, throwOnEmpty =
* @param queryPath The path to the query.
* @returns A promise that resolves to the query metadata, if available.
*/
export async function tryGetQueryMetadata(cliServer: CodeQLCliServer, queryPath: string): Promise<QueryMetadata | undefined> {
export async function tryGetQueryMetadata(
cliServer: CodeQLCliServer,
queryPath: string,
): Promise<QueryMetadata | undefined> {
try {
return await cliServer.resolveMetadata(queryPath);
} catch (e) {
@@ -558,12 +686,11 @@ export async function tryGetQueryMetadata(cliServer: CodeQLCliServer, queryPath:
* It does not need to exist.
*/
export async function createTimestampFile(storagePath: string) {
const timestampPath = path.join(storagePath, 'timestamp');
const timestampPath = path.join(storagePath, "timestamp");
await fs.ensureDir(storagePath);
await fs.writeFile(timestampPath, Date.now().toString(), 'utf8');
await fs.writeFile(timestampPath, Date.now().toString(), "utf8");
}
/**
* Recursively walk a directory and return the full path to all files found.
* Symbolic links are ignored.
@@ -572,7 +699,9 @@ export async function createTimestampFile(storagePath: string) {
*
* @return An iterator of the full path to all files recursively found in the directory.
*/
export async function* walkDirectory(dir: string): AsyncIterableIterator<string> {
export async function* walkDirectory(
dir: string,
): AsyncIterableIterator<string> {
const seenFiles = new Set<string>();
for await (const d of await fs.opendir(dir)) {
const entry = path.join(dir, d.name);
@@ -584,11 +713,3 @@ export async function* walkDirectory(dir: string): AsyncIterableIterator<string>
}
}
}
/**
* Pluralizes a word.
* Example: Returns "N repository" if N is one, "N repositories" otherwise.
*/
export function pluralize(numItems: number | undefined, singular: string, plural: string): string {
return numItems ? `${numItems} ${numItems === 1 ? singular : plural}` : '';
}

View File

@@ -1,12 +1,17 @@
import { env } from 'vscode';
import * as path from 'path';
import { QueryHistoryConfig } from './config';
import { LocalQueryInfo } from './query-results';
import { getRawQueryName, QueryHistoryInfo } from './query-history-info';
import { RemoteQueryHistoryItem } from './remote-queries/remote-query-history-item';
import { pluralize } from './helpers';
import { VariantAnalysisHistoryItem } from './remote-queries/variant-analysis-history-item';
import { assertNever } from './pure/helpers-pure';
import { env } from "vscode";
import * as path from "path";
import { QueryHistoryConfig } from "./config";
import { LocalQueryInfo } from "./query-results";
import {
buildRepoLabel,
getRawQueryName,
QueryHistoryInfo,
} from "./query-history-info";
import { RemoteQueryHistoryItem } from "./remote-queries/remote-query-history-item";
import { VariantAnalysisHistoryItem } from "./remote-queries/variant-analysis-history-item";
import { assertNever } from "./pure/helpers-pure";
import { pluralize } from "./pure/word";
import { humanizeQueryStatus } from "./query-status";
interface InterpolateReplacements {
t: string; // Start time
@@ -15,7 +20,7 @@ interface InterpolateReplacements {
r: string; // Result count/Empty
s: string; // Status
f: string; // Query file name
'%': '%'; // Percent sign
"%": "%"; // Percent sign
}
export class HistoryItemLabelProvider {
@@ -26,20 +31,20 @@ export class HistoryItemLabelProvider {
getLabel(item: QueryHistoryInfo) {
let replacements: InterpolateReplacements;
switch (item.t) {
case 'local':
case "local":
replacements = this.getLocalInterpolateReplacements(item);
break;
case 'remote':
case "remote":
replacements = this.getRemoteInterpolateReplacements(item);
break;
case 'variant-analysis':
case "variant-analysis":
replacements = this.getVariantAnalysisInterpolateReplacements(item);
break;
default:
assertNever(item);
}
const rawLabel = item.userSpecifiedLabel ?? (this.config.format || '%q');
const rawLabel = item.userSpecifiedLabel ?? (this.config.format || "%q");
return this.interpolate(rawLabel, replacements);
}
@@ -56,18 +61,26 @@ export class HistoryItemLabelProvider {
: getRawQueryName(item);
}
private interpolate(
rawLabel: string,
replacements: InterpolateReplacements,
): string {
const label = rawLabel.replace(
/%(.)/g,
(match, key: keyof InterpolateReplacements) => {
const replacement = replacements[key];
return replacement !== undefined ? replacement : match;
},
);
private interpolate(rawLabel: string, replacements: InterpolateReplacements): string {
const label = rawLabel.replace(/%(.)/g, (match, key: keyof InterpolateReplacements) => {
const replacement = replacements[key];
return replacement !== undefined ? replacement : match;
});
return label.replace(/\s+/g, ' ');
return label.replace(/\s+/g, " ");
}
private getLocalInterpolateReplacements(item: LocalQueryInfo): InterpolateReplacements {
const { resultCount = 0, statusString = 'in progress' } = item.completedQuery || {};
private getLocalInterpolateReplacements(
item: LocalQueryInfo,
): InterpolateReplacements {
const { resultCount = 0, statusString = "in progress" } =
item.completedQuery || {};
return {
t: item.startTime,
q: item.getQueryName(),
@@ -75,44 +88,45 @@ export class HistoryItemLabelProvider {
r: `(${resultCount} results)`,
s: statusString,
f: item.getQueryFileName(),
'%': '%',
"%": "%",
};
}
// Return the number of repositories queried if available. Otherwise, use the controller repository name.
private buildRepoLabel(item: RemoteQueryHistoryItem): string {
const repositoryCount = item.remoteQuery.repositoryCount;
if (repositoryCount) {
return pluralize(repositoryCount, 'repository', 'repositories');
}
return `${item.remoteQuery.controllerRepository.owner}/${item.remoteQuery.controllerRepository.name}`;
}
private getRemoteInterpolateReplacements(item: RemoteQueryHistoryItem): InterpolateReplacements {
const resultCount = item.resultCount ? `(${pluralize(item.resultCount, 'result', 'results')})` : '';
private getRemoteInterpolateReplacements(
item: RemoteQueryHistoryItem,
): InterpolateReplacements {
const resultCount = item.resultCount
? `(${pluralize(item.resultCount, "result", "results")})`
: "";
return {
t: new Date(item.remoteQuery.executionStartTime).toLocaleString(env.language),
t: new Date(item.remoteQuery.executionStartTime).toLocaleString(
env.language,
),
q: `${item.remoteQuery.queryName} (${item.remoteQuery.language})`,
d: this.buildRepoLabel(item),
d: buildRepoLabel(item),
r: resultCount,
s: item.status,
s: humanizeQueryStatus(item.status),
f: path.basename(item.remoteQuery.queryFilePath),
'%': '%'
"%": "%",
};
}
private getVariantAnalysisInterpolateReplacements(item: VariantAnalysisHistoryItem): InterpolateReplacements {
const resultCount = item.resultCount ? `(${pluralize(item.resultCount, 'result', 'results')})` : '';
private getVariantAnalysisInterpolateReplacements(
item: VariantAnalysisHistoryItem,
): InterpolateReplacements {
const resultCount = item.resultCount
? `(${pluralize(item.resultCount, "result", "results")})`
: "";
return {
t: new Date(item.variantAnalysis.executionStartTime).toLocaleString(env.language),
t: new Date(item.variantAnalysis.executionStartTime).toLocaleString(
env.language,
),
q: `${item.variantAnalysis.query.name} (${item.variantAnalysis.query.language})`,
d: 'TODO',
d: buildRepoLabel(item),
r: resultCount,
s: item.status,
s: humanizeQueryStatus(item.status),
f: path.basename(item.variantAnalysis.query.filePath),
'%': '%',
"%": "%",
};
}
}

View File

@@ -1,30 +1,39 @@
import { ProgressLocation, window } from 'vscode';
import { StreamInfo } from 'vscode-languageclient';
import * as cli from './cli';
import { QueryServerConfig } from './config';
import { ideServerLogger } from './logging';
import { ProgressLocation, window } from "vscode";
import { StreamInfo } from "vscode-languageclient";
import * as cli from "./cli";
import { QueryServerConfig } from "./config";
import { ideServerLogger } from "./logging";
/**
* Managing the language server for CodeQL.
*/
/** Starts a new CodeQL language server process, sending progress messages to the status bar. */
export async function spawnIdeServer(config: QueryServerConfig): Promise<StreamInfo> {
return window.withProgress({ title: 'CodeQL language server', location: ProgressLocation.Window }, async (progressReporter, _) => {
const args = ['--check-errors', 'ON_CHANGE'];
if (cli.shouldDebugIdeServer()) {
args.push('-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9009,server=y,suspend=n,quiet=y');
}
const child = cli.spawnServer(
config.codeQlPath,
'CodeQL language server',
['execute', 'language-server'],
args,
ideServerLogger,
data => ideServerLogger.log(data.toString(), { trailingNewline: false }),
data => ideServerLogger.log(data.toString(), { trailingNewline: false }),
progressReporter
);
return { writer: child.stdin!, reader: child.stdout! };
});
export async function spawnIdeServer(
config: QueryServerConfig,
): Promise<StreamInfo> {
return window.withProgress(
{ title: "CodeQL language server", location: ProgressLocation.Window },
async (progressReporter, _) => {
const args = ["--check-errors", "ON_CHANGE"];
if (cli.shouldDebugIdeServer()) {
args.push(
"-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9009,server=y,suspend=n,quiet=y",
);
}
const child = cli.spawnServer(
config.codeQlPath,
"CodeQL language server",
["execute", "language-server"],
args,
ideServerLogger,
(data) =>
ideServerLogger.log(data.toString(), { trailingNewline: false }),
(data) =>
ideServerLogger.log(data.toString(), { trailingNewline: false }),
progressReporter,
);
return { writer: child.stdin!, reader: child.stdout! };
},
);
}

View File

@@ -1,5 +1,5 @@
import * as crypto from 'crypto';
import * as os from 'os';
import * as crypto from "crypto";
import * as os from "os";
import {
Uri,
Location,
@@ -13,20 +13,17 @@ import {
Selection,
TextEditorRevealType,
ThemeColor,
} from 'vscode';
import {
tryGetResolvableLocation,
isLineColumnLoc
} from './pure/bqrs-utils';
import { DatabaseItem, DatabaseManager } from './databases';
import { ViewSourceFileMsg } from './pure/interface-types';
import { Logger } from './logging';
} from "vscode";
import { tryGetResolvableLocation, isLineColumnLoc } from "./pure/bqrs-utils";
import { DatabaseItem, DatabaseManager } from "./databases";
import { ViewSourceFileMsg } from "./pure/interface-types";
import { Logger } from "./logging";
import {
LineColumnLocation,
WholeFileLocation,
UrlValue,
ResolvableLocationValue
} from './pure/bqrs-cli-types';
ResolvableLocationValue,
} from "./pure/bqrs-cli-types";
/**
* This module contains functions and types that are sharedd between
@@ -35,7 +32,7 @@ import {
/** Gets a nonce string created with 128 bits of entropy. */
export function getNonce(): string {
return crypto.randomBytes(16).toString('base64');
return crypto.randomBytes(16).toString("base64");
}
/**
@@ -52,7 +49,7 @@ export enum WebviewReveal {
*/
export function fileUriToWebviewUri(
panel: WebviewPanel,
fileUriOnDisk: Uri
fileUriOnDisk: Uri,
): string {
return panel.webview.asWebviewUri(fileUriOnDisk).toString();
}
@@ -64,7 +61,7 @@ export function fileUriToWebviewUri(
*/
function resolveFivePartLocation(
loc: LineColumnLocation,
databaseItem: DatabaseItem
databaseItem: DatabaseItem,
): Location {
// `Range` is a half-open interval, and is zero-based. CodeQL locations are closed intervals, and
// are one-based. Adjust accordingly.
@@ -72,7 +69,7 @@ function resolveFivePartLocation(
Math.max(0, loc.startLine - 1),
Math.max(0, loc.startColumn - 1),
Math.max(0, loc.endLine - 1),
Math.max(1, loc.endColumn)
Math.max(1, loc.endColumn),
);
return new Location(databaseItem.resolveSourceFile(loc.uri), range);
@@ -85,7 +82,7 @@ function resolveFivePartLocation(
*/
function resolveWholeFileLocation(
loc: WholeFileLocation,
databaseItem: DatabaseItem
databaseItem: DatabaseItem,
): Location {
// A location corresponding to the start of the file.
const range = new Range(0, 0, 0, 0);
@@ -100,10 +97,10 @@ function resolveWholeFileLocation(
*/
export function tryResolveLocation(
loc: UrlValue | undefined,
databaseItem: DatabaseItem
databaseItem: DatabaseItem,
): Location | undefined {
const resolvableLoc = tryGetResolvableLocation(loc);
if (!resolvableLoc || typeof resolvableLoc === 'string') {
if (!resolvableLoc || typeof resolvableLoc === "string") {
return;
} else if (isLineColumnLoc(resolvableLoc)) {
return resolveFivePartLocation(resolvableLoc, databaseItem);
@@ -112,7 +109,11 @@ export function tryResolveLocation(
}
}
export type WebviewView = 'results' | 'compare' | 'remote-queries' | 'variant-analysis';
export type WebviewView =
| "results"
| "compare"
| "remote-queries"
| "variant-analysis";
export interface WebviewMessage {
t: string;
@@ -131,28 +132,27 @@ export function getHtmlForWebview(
}: {
allowInlineStyles?: boolean;
} = {
allowInlineStyles: false,
}
allowInlineStyles: false,
},
): string {
const scriptUriOnDisk = Uri.file(
ctx.asAbsolutePath('out/webview.js')
);
const scriptUriOnDisk = Uri.file(ctx.asAbsolutePath("out/webview.js"));
const stylesheetUrisOnDisk = [
Uri.file(ctx.asAbsolutePath('out/webview.css'))
Uri.file(ctx.asAbsolutePath("out/webview.css")),
];
// Convert the on-disk URIs into webview URIs.
const scriptWebviewUri = webview.asWebviewUri(scriptUriOnDisk);
const stylesheetWebviewUris = stylesheetUrisOnDisk.map(stylesheetUriOnDisk =>
webview.asWebviewUri(stylesheetUriOnDisk));
const stylesheetWebviewUris = stylesheetUrisOnDisk.map(
(stylesheetUriOnDisk) => webview.asWebviewUri(stylesheetUriOnDisk),
);
// Use a nonce in the content security policy to uniquely identify the above resources.
const nonce = getNonce();
const stylesheetsHtmlLines = allowInlineStyles
? stylesheetWebviewUris.map(uri => createStylesLinkWithoutNonce(uri))
: stylesheetWebviewUris.map(uri => createStylesLinkWithNonce(nonce, uri));
? stylesheetWebviewUris.map((uri) => createStylesLinkWithoutNonce(uri))
: stylesheetWebviewUris.map((uri) => createStylesLinkWithNonce(nonce, uri));
const styleSrc = allowInlineStyles
? `${webview.cspSource} vscode-file: 'unsafe-inline'`
@@ -172,7 +172,9 @@ export function getHtmlForWebview(
<html>
<head>
<meta http-equiv="Content-Security-Policy"
content="default-src 'none'; script-src 'nonce-${nonce}'; font-src ${fontSrc}; style-src ${styleSrc}; connect-src ${webview.cspSource};">
content="default-src 'none'; script-src 'nonce-${nonce}'; font-src ${fontSrc}; style-src ${styleSrc}; connect-src ${
webview.cspSource
};">
${stylesheetsHtmlLines.join(` ${os.EOL}`)}
</head>
<body>
@@ -186,7 +188,7 @@ export function getHtmlForWebview(
export async function showResolvableLocation(
loc: ResolvableLocationValue,
databaseItem: DatabaseItem
databaseItem: DatabaseItem,
): Promise<void> {
await showLocation(tryResolveLocation(loc, databaseItem));
}
@@ -198,17 +200,16 @@ export async function showLocation(location?: Location) {
const doc = await workspace.openTextDocument(location.uri);
const editorsWithDoc = Window.visibleTextEditors.filter(
(e) => e.document === doc
(e) => e.document === doc,
);
const editor =
editorsWithDoc.length > 0
? editorsWithDoc[0]
: await Window.showTextDocument(
doc, {
// avoid preview mode so editor is sticky and will be added to navigation and search histories.
preview: false,
viewColumn: ViewColumn.One,
});
: await Window.showTextDocument(doc, {
// avoid preview mode so editor is sticky and will be added to navigation and search histories.
preview: false,
viewColumn: ViewColumn.One,
});
const range = location.range;
// When highlighting the range, vscode's occurrence-match and bracket-match highlighting will
@@ -229,30 +230,28 @@ export async function showLocation(location?: Location) {
editor.setDecorations(shownLocationLineDecoration, [range]);
}
const findMatchBackground = new ThemeColor('editor.findMatchBackground');
const findMatchBackground = new ThemeColor("editor.findMatchBackground");
const findRangeHighlightBackground = new ThemeColor(
'editor.findRangeHighlightBackground'
"editor.findRangeHighlightBackground",
);
export const shownLocationDecoration = Window.createTextEditorDecorationType({
backgroundColor: findMatchBackground,
});
export const shownLocationLineDecoration = Window.createTextEditorDecorationType(
{
export const shownLocationLineDecoration =
Window.createTextEditorDecorationType({
backgroundColor: findRangeHighlightBackground,
isWholeLine: true,
}
);
});
export async function jumpToLocation(
msg: ViewSourceFileMsg,
databaseManager: DatabaseManager,
logger: Logger
logger: Logger,
) {
const databaseItem = databaseManager.findDatabaseItem(
Uri.parse(msg.databaseUri)
Uri.parse(msg.databaseUri),
);
if (databaseItem !== undefined) {
try {
@@ -261,7 +260,7 @@ export async function jumpToLocation(
if (e instanceof Error) {
if (e.message.match(/File not found/)) {
void Window.showErrorMessage(
'Original file of this result is not in the database\'s source archive.'
"Original file of this result is not in the database's source archive.",
);
} else {
void logger.log(`Unable to handleMsgFromView: ${e.message}`);

View File

@@ -1,5 +1,5 @@
import * as Sarif from 'sarif';
import * as vscode from 'vscode';
import * as Sarif from "sarif";
import * as vscode from "vscode";
import {
Diagnostic,
DiagnosticRelatedInformation,
@@ -7,13 +7,18 @@ import {
languages,
Uri,
window as Window,
env
} from 'vscode';
import * as cli from './cli';
import { CodeQLCliServer } from './cli';
import { DatabaseEventKind, DatabaseItem, DatabaseManager } from './databases';
import { showAndLogErrorMessage } from './helpers';
import { assertNever, getErrorMessage, getErrorStack } from './pure/helpers-pure';
env,
WebviewPanel,
} from "vscode";
import * as cli from "./cli";
import { CodeQLCliServer } from "./cli";
import { DatabaseEventKind, DatabaseItem, DatabaseManager } from "./databases";
import { showAndLogErrorMessage } from "./helpers";
import {
assertNever,
getErrorMessage,
getErrorStack,
} from "./pure/helpers-pure";
import {
FromResultsViewMsg,
Interpretation,
@@ -27,12 +32,20 @@ import {
ALERTS_TABLE_NAME,
GRAPH_TABLE_NAME,
RawResultsSortState,
} from './pure/interface-types';
import { Logger } from './logging';
import { commandRunner } from './commandRunner';
import { CompletedQueryInfo, interpretResultsSarif, interpretGraphResults } from './query-results';
import { QueryEvaluationInfo } from './run-queries-shared';
import { parseSarifLocation, parseSarifPlainTextMessage } from './pure/sarif-utils';
NavigationDirection,
} from "./pure/interface-types";
import { Logger } from "./logging";
import { commandRunner } from "./commandRunner";
import {
CompletedQueryInfo,
interpretResultsSarif,
interpretGraphResults,
} from "./query-results";
import { QueryEvaluationInfo } from "./run-queries-shared";
import {
parseSarifLocation,
parseSarifPlainTextMessage,
} from "./pure/sarif-utils";
import {
WebviewReveal,
fileUriToWebviewUri,
@@ -40,13 +53,20 @@ import {
shownLocationDecoration,
shownLocationLineDecoration,
jumpToLocation,
} from './interface-utils';
import { getDefaultResultSetName, ParsedResultSets } from './pure/interface-types';
import { RawResultSet, transformBqrsResultSet, ResultSetSchema } from './pure/bqrs-cli-types';
import { AbstractWebview, WebviewPanelConfig } from './abstract-webview';
import { PAGE_SIZE } from './config';
import { CompletedLocalQueryInfo } from './query-results';
import { HistoryItemLabelProvider } from './history-item-label-provider';
} from "./interface-utils";
import {
getDefaultResultSetName,
ParsedResultSets,
} from "./pure/interface-types";
import {
RawResultSet,
transformBqrsResultSet,
ResultSetSchema,
} from "./pure/bqrs-cli-types";
import { AbstractWebview, WebviewPanelConfig } from "./abstract-webview";
import { PAGE_SIZE } from "./config";
import { CompletedLocalQueryInfo } from "./query-results";
import { HistoryItemLabelProvider } from "./history-item-label-provider";
/**
* interface.ts
@@ -67,18 +87,19 @@ function sortMultiplier(sortDirection: SortDirection): number {
function sortInterpretedResults(
results: Sarif.Result[],
sortState: InterpretedResultsSortState | undefined
sortState: InterpretedResultsSortState | undefined,
): void {
if (sortState !== undefined) {
const multiplier = sortMultiplier(sortState.sortDirection);
switch (sortState.sortBy) {
case 'alert-message':
case "alert-message":
results.sort((a, b) =>
a.message.text === undefined
? 0
: b.message.text === undefined
? 0
: multiplier * a.message.text?.localeCompare(b.message.text, env.language)
? 0
: multiplier *
a.message.text?.localeCompare(b.message.text, env.language),
);
break;
default:
@@ -87,44 +108,56 @@ function sortInterpretedResults(
}
}
function interpretedPageSize(interpretation: Interpretation | undefined): number {
if (interpretation?.data.t == 'GraphInterpretationData') {
function interpretedPageSize(
interpretation: Interpretation | undefined,
): number {
if (interpretation?.data.t == "GraphInterpretationData") {
// Graph views always have one result per page.
return 1;
}
return PAGE_SIZE.getValue<number>();
}
function numPagesOfResultSet(resultSet: RawResultSet, interpretation?: Interpretation): number {
function numPagesOfResultSet(
resultSet: RawResultSet,
interpretation?: Interpretation,
): number {
const pageSize = interpretedPageSize(interpretation);
const n = interpretation?.data.t == 'GraphInterpretationData'
? interpretation.data.dot.length
: resultSet.schema.rows;
const n =
interpretation?.data.t == "GraphInterpretationData"
? interpretation.data.dot.length
: resultSet.schema.rows;
return Math.ceil(n / pageSize);
}
function numInterpretedPages(interpretation: Interpretation | undefined): number {
function numInterpretedPages(
interpretation: Interpretation | undefined,
): number {
if (!interpretation) {
return 0;
}
const pageSize = interpretedPageSize(interpretation);
const n = interpretation.data.t == 'GraphInterpretationData'
? interpretation.data.dot.length
: interpretation.data.runs[0].results?.length || 0;
const n =
interpretation.data.t == "GraphInterpretationData"
? interpretation.data.dot.length
: interpretation.data.runs[0].results?.length || 0;
return Math.ceil(n / pageSize);
}
export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResultsViewMsg> {
export class ResultsView extends AbstractWebview<
IntoResultsViewMsg,
FromResultsViewMsg
> {
private _displayedQuery?: CompletedLocalQueryInfo;
private _interpretation?: Interpretation;
private readonly _diagnosticCollection = languages.createDiagnosticCollection(
'codeql-query-results'
"codeql-query-results",
);
constructor(
@@ -132,28 +165,30 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
private databaseManager: DatabaseManager,
public cliServer: CodeQLCliServer,
public logger: Logger,
private labelProvider: HistoryItemLabelProvider
private labelProvider: HistoryItemLabelProvider,
) {
super(ctx);
this.push(this._diagnosticCollection);
this.push(
vscode.window.onDidChangeTextEditorSelection(
this.handleSelectionChange.bind(this)
)
);
void logger.log('Registering path-step navigation commands.');
this.push(
commandRunner(
'codeQLQueryResults.nextPathStep',
this.navigatePathStep.bind(this, 1)
)
);
this.push(
commandRunner(
'codeQLQueryResults.previousPathStep',
this.navigatePathStep.bind(this, -1)
)
this.handleSelectionChange.bind(this),
),
);
const navigationCommands = {
"codeQLQueryResults.up": NavigationDirection.up,
"codeQLQueryResults.down": NavigationDirection.down,
"codeQLQueryResults.left": NavigationDirection.left,
"codeQLQueryResults.right": NavigationDirection.right,
// For backwards compatibility with keybindings set using an earlier version of the extension.
"codeQLQueryResults.nextPathStep": NavigationDirection.down,
"codeQLQueryResults.previousPathStep": NavigationDirection.up,
};
void logger.log("Registering result view navigation commands.");
for (const [commandId, direction] of Object.entries(navigationCommands)) {
this.push(
commandRunner(commandId, this.navigateResultView.bind(this, direction)),
);
}
this.push(
this.databaseManager.onDidChangeDatabaseItem(({ kind }) => {
@@ -161,25 +196,30 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
this._diagnosticCollection.clear();
if (this.isShowingPanel) {
void this.postMessage({
t: 'untoggleShowProblems'
t: "untoggleShowProblems",
});
}
}
})
}),
);
}
async navigatePathStep(direction: number): Promise<void> {
await this.postMessage({ t: 'navigatePath', direction });
async navigateResultView(direction: NavigationDirection): Promise<void> {
if (!this.panel?.visible) {
return;
}
// Reveal the panel now as the subsequent call to 'Window.showTextEditor' in 'showLocation' may destroy the webview otherwise.
this.panel.reveal();
await this.postMessage({ t: "navigate", direction });
}
protected getPanelConfig(): WebviewPanelConfig {
return {
viewId: 'resultsView',
title: 'CodeQL Query Results',
viewId: "resultsView",
title: "CodeQL Query Results",
viewColumn: this.chooseColumnForWebview(),
preserveFocus: true,
view: 'results',
view: "results",
};
}
@@ -190,23 +230,23 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
protected async onMessage(msg: FromResultsViewMsg): Promise<void> {
try {
switch (msg.t) {
case 'viewLoaded':
case "viewLoaded":
this.onWebViewLoaded();
break;
case 'viewSourceFile': {
case "viewSourceFile": {
await jumpToLocation(msg, this.databaseManager, this.logger);
break;
}
case 'toggleDiagnostics': {
case "toggleDiagnostics": {
if (msg.visible) {
const databaseItem = this.databaseManager.findDatabaseItem(
Uri.parse(msg.databaseUri)
Uri.parse(msg.databaseUri),
);
if (databaseItem !== undefined) {
await this.showResultsAsDiagnostics(
msg.origResultsPaths,
msg.metadata,
databaseItem
databaseItem,
);
}
} else {
@@ -215,17 +255,19 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
}
break;
}
case 'changeSort':
case "changeSort":
await this.changeRawSortState(msg.resultSetName, msg.sortState);
break;
case 'changeInterpretedSort':
case "changeInterpretedSort":
await this.changeInterpretedSortState(msg.sortState);
break;
case 'changePage':
if (msg.selectedTable === ALERTS_TABLE_NAME || msg.selectedTable === GRAPH_TABLE_NAME) {
case "changePage":
if (
msg.selectedTable === ALERTS_TABLE_NAME ||
msg.selectedTable === GRAPH_TABLE_NAME
) {
await this.showPageOfInterpretedResults(msg.pageNumber);
}
else {
} else {
await this.showPageOfRawResults(
msg.selectedTable,
msg.pageNumber,
@@ -233,11 +275,13 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
// sortedResultsInfo doesn't have an entry for the current
// result set. Use this to determine whether or not we use
// the sorted bqrs file.
!!this._displayedQuery?.completedQuery.sortedResultsInfo[msg.selectedTable]
!!this._displayedQuery?.completedQuery.sortedResultsInfo[
msg.selectedTable
],
);
}
break;
case 'openFile':
case "openFile":
await this.openFile(msg.filePath);
break;
default:
@@ -245,7 +289,7 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
}
} catch (e) {
void showAndLogErrorMessage(getErrorMessage(e), {
fullMessage: getErrorStack(e)
fullMessage: getErrorStack(e),
});
}
}
@@ -264,46 +308,59 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
// can't find a vscode API that does it any better.
// Here, iterate through all the visible editors and determine the max view column.
// This won't work if the largest view column is empty.
const colCount = Window.visibleTextEditors.reduce((maxVal, editor) =>
Math.max(maxVal, Number.parseInt(editor.viewColumn?.toFixed() || '0', 10)), 0);
const colCount = Window.visibleTextEditors.reduce(
(maxVal, editor) =>
Math.max(
maxVal,
Number.parseInt(editor.viewColumn?.toFixed() || "0", 10),
),
0,
);
if (colCount <= 1) {
return vscode.ViewColumn.Beside;
}
const activeViewColumnNum = Number.parseInt(Window.activeTextEditor?.viewColumn?.toFixed() || '0', 10);
return activeViewColumnNum === colCount ? vscode.ViewColumn.One : vscode.ViewColumn.Beside;
const activeViewColumnNum = Number.parseInt(
Window.activeTextEditor?.viewColumn?.toFixed() || "0",
10,
);
return activeViewColumnNum === colCount
? vscode.ViewColumn.One
: vscode.ViewColumn.Beside;
}
private async changeInterpretedSortState(
sortState: InterpretedResultsSortState | undefined
sortState: InterpretedResultsSortState | undefined,
): Promise<void> {
if (this._displayedQuery === undefined) {
void showAndLogErrorMessage(
'Failed to sort results since evaluation info was unknown.'
"Failed to sort results since evaluation info was unknown.",
);
return;
}
// Notify the webview that it should expect new results.
await this.postMessage({ t: 'resultsUpdating' });
await this._displayedQuery.completedQuery.updateInterpretedSortState(sortState);
await this.postMessage({ t: "resultsUpdating" });
await this._displayedQuery.completedQuery.updateInterpretedSortState(
sortState,
);
await this.showResults(this._displayedQuery, WebviewReveal.NotForced, true);
}
private async changeRawSortState(
resultSetName: string,
sortState: RawResultsSortState | undefined
sortState: RawResultsSortState | undefined,
): Promise<void> {
if (this._displayedQuery === undefined) {
void showAndLogErrorMessage(
'Failed to sort results since evaluation info was unknown.'
"Failed to sort results since evaluation info was unknown.",
);
return;
}
// Notify the webview that it should expect new results.
await this.postMessage({ t: 'resultsUpdating' });
await this.postMessage({ t: "resultsUpdating" });
await this._displayedQuery.completedQuery.updateSortState(
this.cliServer,
resultSetName,
sortState
sortState,
);
// Sorting resets to first page, as there is arguably no particular
// correlation between the results on the nth page that the user
@@ -324,27 +381,31 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
public async showResults(
fullQuery: CompletedLocalQueryInfo,
forceReveal: WebviewReveal,
shouldKeepOldResultsWhileRendering = false
shouldKeepOldResultsWhileRendering = false,
): Promise<void> {
if (!fullQuery.completedQuery.successful) {
return;
}
const panel = await this.getPanel();
this._interpretation = undefined;
const interpretationPage = await this.interpretResultsInfo(
fullQuery.completedQuery.query,
fullQuery.completedQuery.interpretedResultsSortState
fullQuery.completedQuery.interpretedResultsSortState,
);
const sortedResultsMap: SortedResultsMap = {};
Object.entries(fullQuery.completedQuery.sortedResultsInfo).forEach(
([k, v]) =>
(sortedResultsMap[k] = this.convertPathPropertiesToWebviewUris(v))
(sortedResultsMap[k] = this.convertPathPropertiesToWebviewUris(
panel,
v,
)),
);
this._displayedQuery = fullQuery;
const panel = this.getPanel();
await this.waitForPanelLoaded();
if (!panel.visible) {
if (forceReveal === WebviewReveal.Forced) {
@@ -354,12 +415,13 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
// is not visible; it's in a not-currently-viewed tab. Show a
// more asynchronous message to not so abruptly interrupt
// user's workflow by immediately revealing the panel.
const showButton = 'View Results';
const showButton = "View Results";
const queryName = this.labelProvider.getShortLabel(fullQuery);
const resultPromise = vscode.window.showInformationMessage(
`Finished running query ${queryName.length > 0 ? ` "${queryName}"` : ''
`Finished running query ${
queryName.length > 0 ? ` "${queryName}"` : ""
}.`,
showButton
showButton,
);
// Address this click asynchronously so we still update the
// query history immediately.
@@ -374,48 +436,49 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
// Note that the resultSetSchemas will return offsets for the default (unsorted) page,
// which may not be correct. However, in this case, it doesn't matter since we only
// need the first offset, which will be the same no matter which sorting we use.
const resultSetSchemas = await this.getResultSetSchemas(fullQuery.completedQuery);
const resultSetNames = resultSetSchemas.map(schema => schema.name);
const resultSetSchemas = await this.getResultSetSchemas(
fullQuery.completedQuery,
);
const resultSetNames = resultSetSchemas.map((schema) => schema.name);
const selectedTable = getDefaultResultSetName(resultSetNames);
const schema = resultSetSchemas.find(
(resultSet) => resultSet.name == selectedTable
(resultSet) => resultSet.name == selectedTable,
)!;
// Use sorted results path if it exists. This may happen if we are
// reloading the results view after it has been sorted in the past.
const resultsPath = fullQuery.completedQuery.getResultsPath(selectedTable);
const pageSize = PAGE_SIZE.getValue<number>();
const chunk = await this.cliServer.bqrsDecode(
resultsPath,
schema.name,
{
// Always send the first page.
// It may not wind up being the page we actually show,
// if there are interpreted results, but speculatively
// send anyway.
offset: schema.pagination?.offsets[0],
pageSize
}
);
const chunk = await this.cliServer.bqrsDecode(resultsPath, schema.name, {
// Always send the first page.
// It may not wind up being the page we actually show,
// if there are interpreted results, but speculatively
// send anyway.
offset: schema.pagination?.offsets[0],
pageSize,
});
const resultSet = transformBqrsResultSet(schema, chunk);
fullQuery.completedQuery.setResultCount(interpretationPage?.numTotalResults || resultSet.schema.rows);
fullQuery.completedQuery.setResultCount(
interpretationPage?.numTotalResults || resultSet.schema.rows,
);
const parsedResultSets: ParsedResultSets = {
pageNumber: 0,
pageSize,
numPages: numPagesOfResultSet(resultSet, this._interpretation),
numInterpretedPages: numInterpretedPages(this._interpretation),
resultSet: { ...resultSet, t: 'RawResultSet' },
resultSet: { ...resultSet, t: "RawResultSet" },
selectedTable: undefined,
resultSetNames,
};
await this.postMessage({
t: 'setState',
t: "setState",
interpretation: interpretationPage,
origResultsPaths: fullQuery.completedQuery.query.resultsPaths,
resultsPath: this.convertPathToWebviewUri(
fullQuery.completedQuery.query.resultsPaths.resultsPath
panel,
fullQuery.completedQuery.query.resultsPaths.resultsPath,
),
parsedResultSets,
sortedResultsMap,
@@ -423,31 +486,40 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
shouldKeepOldResultsWhileRendering,
metadata: fullQuery.completedQuery.query.metadata,
queryName: this.labelProvider.getLabel(fullQuery),
queryPath: fullQuery.initialInfo.queryPath
queryPath: fullQuery.initialInfo.queryPath,
});
}
/**
* Show a page of interpreted results
*/
public async showPageOfInterpretedResults(
pageNumber: number
): Promise<void> {
public async showPageOfInterpretedResults(pageNumber: number): Promise<void> {
if (this._displayedQuery === undefined) {
throw new Error('Trying to show interpreted results but displayed query was undefined');
throw new Error(
"Trying to show interpreted results but displayed query was undefined",
);
}
if (this._interpretation === undefined) {
throw new Error('Trying to show interpreted results but interpretation was undefined');
throw new Error(
"Trying to show interpreted results but interpretation was undefined",
);
}
if (this._interpretation.data.t === 'SarifInterpretationData' && this._interpretation.data.runs[0].results === undefined) {
throw new Error('Trying to show interpreted results but results were undefined');
if (
this._interpretation.data.t === "SarifInterpretationData" &&
this._interpretation.data.runs[0].results === undefined
) {
throw new Error(
"Trying to show interpreted results but results were undefined",
);
}
const resultSetSchemas = await this.getResultSetSchemas(this._displayedQuery.completedQuery);
const resultSetNames = resultSetSchemas.map(schema => schema.name);
const resultSetSchemas = await this.getResultSetSchemas(
this._displayedQuery.completedQuery,
);
const resultSetNames = resultSetSchemas.map((schema) => schema.name);
await this.postMessage({
t: 'showInterpretedPage',
t: "showInterpretedPage",
interpretation: this.getPageOfInterpretedResults(pageNumber),
database: this._displayedQuery.initialInfo.databaseInfo,
metadata: this._displayedQuery.completedQuery.query.metadata,
@@ -456,17 +528,20 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
pageSize: interpretedPageSize(this._interpretation),
numPages: numInterpretedPages(this._interpretation),
queryName: this.labelProvider.getLabel(this._displayedQuery),
queryPath: this._displayedQuery.initialInfo.queryPath
queryPath: this._displayedQuery.initialInfo.queryPath,
});
}
private async getResultSetSchemas(completedQuery: CompletedQueryInfo, selectedTable = ''): Promise<ResultSetSchema[]> {
private async getResultSetSchemas(
completedQuery: CompletedQueryInfo,
selectedTable = "",
): Promise<ResultSetSchema[]> {
const resultsPath = completedQuery.getResultsPath(selectedTable);
const schemas = await this.cliServer.bqrsInfo(
resultsPath,
PAGE_SIZE.getValue()
PAGE_SIZE.getValue(),
);
return schemas['result-sets'];
return schemas["result-sets"];
}
public async openFile(filePath: string) {
@@ -480,29 +555,39 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
public async showPageOfRawResults(
selectedTable: string,
pageNumber: number,
sorted = false
sorted = false,
): Promise<void> {
const results = this._displayedQuery;
if (results === undefined) {
throw new Error('trying to view a page of a query that is not loaded');
throw new Error("trying to view a page of a query that is not loaded");
}
const panel = await this.getPanel();
const sortedResultsMap: SortedResultsMap = {};
Object.entries(results.completedQuery.sortedResultsInfo).forEach(
([k, v]) =>
(sortedResultsMap[k] = this.convertPathPropertiesToWebviewUris(v))
(sortedResultsMap[k] = this.convertPathPropertiesToWebviewUris(
panel,
v,
)),
);
const resultSetSchemas = await this.getResultSetSchemas(results.completedQuery, sorted ? selectedTable : '');
const resultSetSchemas = await this.getResultSetSchemas(
results.completedQuery,
sorted ? selectedTable : "",
);
// If there is a specific sorted table selected, a different bqrs file is loaded that doesn't have all the result set names.
// Make sure that we load all result set names here.
// See https://github.com/github/vscode-codeql/issues/1005
const allResultSetSchemas = sorted ? await this.getResultSetSchemas(results.completedQuery, '') : resultSetSchemas;
const resultSetNames = allResultSetSchemas.map(schema => schema.name);
const allResultSetSchemas = sorted
? await this.getResultSetSchemas(results.completedQuery, "")
: resultSetSchemas;
const resultSetNames = allResultSetSchemas.map((schema) => schema.name);
const schema = resultSetSchemas.find(
(resultSet) => resultSet.name == selectedTable
(resultSet) => resultSet.name == selectedTable,
)!;
if (schema === undefined)
throw new Error(`Query result set '${selectedTable}' not found.`);
@@ -513,15 +598,15 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
schema.name,
{
offset: schema.pagination?.offsets[pageNumber],
pageSize
}
pageSize,
},
);
const resultSet = transformBqrsResultSet(schema, chunk);
const parsedResultSets: ParsedResultSets = {
pageNumber,
pageSize,
resultSet: { t: 'RawResultSet', ...resultSet },
resultSet: { t: "RawResultSet", ...resultSet },
numPages: numPagesOfResultSet(resultSet),
numInterpretedPages: numInterpretedPages(this._interpretation),
selectedTable: selectedTable,
@@ -529,11 +614,12 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
};
await this.postMessage({
t: 'setState',
t: "setState",
interpretation: this._interpretation,
origResultsPaths: results.completedQuery.query.resultsPaths,
resultsPath: this.convertPathToWebviewUri(
results.completedQuery.query.resultsPaths.resultsPath
panel,
results.completedQuery.query.resultsPaths.resultsPath,
),
parsedResultSets,
sortedResultsMap,
@@ -541,7 +627,7 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
shouldKeepOldResultsWhileRendering: false,
metadata: results.completedQuery.query.metadata,
queryName: this.labelProvider.getLabel(results),
queryPath: results.initialInfo.queryPath
queryPath: results.initialInfo.queryPath,
});
}
@@ -550,10 +636,12 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
resultsPaths: ResultsPaths,
sourceInfo: cli.SourceInfo | undefined,
sourceLocationPrefix: string,
sortState: InterpretedResultsSortState | undefined
sortState: InterpretedResultsSortState | undefined,
): Promise<Interpretation | undefined> {
if (!resultsPaths) {
void this.logger.log('No results path. Cannot display interpreted results.');
void this.logger.log(
"No results path. Cannot display interpreted results.",
);
return undefined;
}
let data;
@@ -563,7 +651,7 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
this.cliServer,
metadata,
resultsPaths,
sourceInfo
sourceInfo,
);
numTotalResults = data.dot.length;
} else {
@@ -571,10 +659,10 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
this.cliServer,
metadata,
resultsPaths,
sourceInfo
sourceInfo,
);
sarif.runs.forEach(run => {
sarif.runs.forEach((run) => {
if (run.results) {
sortInterpretedResults(run.results, sortState);
}
@@ -584,9 +672,7 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
data = sarif;
numTotalResults = (() => {
return sarif.runs?.[0]?.results
? sarif.runs[0].results.length
: 0;
return sarif.runs?.[0]?.results ? sarif.runs[0].results.length : 0;
})();
}
@@ -594,81 +680,89 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
data,
sourceLocationPrefix,
numTruncatedResults: 0,
numTotalResults
numTotalResults,
};
this._interpretation = interpretation;
return interpretation;
}
private getPageOfInterpretedResults(
pageNumber: number
): Interpretation {
private getPageOfInterpretedResults(pageNumber: number): Interpretation {
function getPageOfRun(run: Sarif.Run): Sarif.Run {
return {
...run, results: run.results?.slice(
...run,
results: run.results?.slice(
PAGE_SIZE.getValue<number>() * pageNumber,
PAGE_SIZE.getValue<number>() * (pageNumber + 1)
)
PAGE_SIZE.getValue<number>() * (pageNumber + 1),
),
};
}
const interp = this._interpretation;
if (interp === undefined) {
throw new Error('Tried to get interpreted results before interpretation finished');
throw new Error(
"Tried to get interpreted results before interpretation finished",
);
}
if (interp.data.t !== 'SarifInterpretationData')
return interp;
if (interp.data.t !== "SarifInterpretationData") return interp;
if (interp.data.runs.length !== 1) {
void this.logger.log(`Warning: SARIF file had ${interp.data.runs.length} runs, expected 1`);
void this.logger.log(
`Warning: SARIF file had ${interp.data.runs.length} runs, expected 1`,
);
}
return {
...interp,
data: {
...interp.data,
runs: [getPageOfRun(interp.data.runs[0])]
}
runs: [getPageOfRun(interp.data.runs[0])],
},
};
}
private async interpretResultsInfo(
query: QueryEvaluationInfo,
sortState: InterpretedResultsSortState | undefined
sortState: InterpretedResultsSortState | undefined,
): Promise<Interpretation | undefined> {
if (
query.canHaveInterpretedResults() &&
query.quickEvalPosition === undefined // never do results interpretation if quickEval
) {
try {
const dbItem = this.databaseManager.findDatabaseItem(Uri.file(query.dbItemPath));
const dbItem = this.databaseManager.findDatabaseItem(
Uri.file(query.dbItemPath),
);
if (!dbItem) {
throw new Error(`Could not find database item for ${query.dbItemPath}`);
throw new Error(
`Could not find database item for ${query.dbItemPath}`,
);
}
const sourceLocationPrefix = await dbItem.getSourceLocationPrefix(
this.cliServer
this.cliServer,
);
const sourceArchiveUri = dbItem.sourceArchive;
const sourceInfo =
sourceArchiveUri === undefined
? undefined
: {
sourceArchive: sourceArchiveUri.fsPath,
sourceLocationPrefix,
};
sourceArchive: sourceArchiveUri.fsPath,
sourceLocationPrefix,
};
await this._getInterpretedResults(
query.metadata,
query.resultsPaths,
sourceInfo,
sourceLocationPrefix,
sortState
sortState,
);
} catch (e) {
// If interpretation fails, accept the error and continue
// trying to render uninterpreted results anyway.
void showAndLogErrorMessage(
`Showing raw results instead of interpreted ones due to an error. ${getErrorMessage(e)}`
`Showing raw results instead of interpreted ones due to an error. ${getErrorMessage(
e,
)}`,
);
}
}
@@ -678,26 +772,26 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
private async showResultsAsDiagnostics(
resultsInfo: ResultsPaths,
metadata: QueryMetadata | undefined,
database: DatabaseItem
database: DatabaseItem,
): Promise<void> {
const sourceLocationPrefix = await database.getSourceLocationPrefix(
this.cliServer
this.cliServer,
);
const sourceArchiveUri = database.sourceArchive;
const sourceInfo =
sourceArchiveUri === undefined
? undefined
: {
sourceArchive: sourceArchiveUri.fsPath,
sourceLocationPrefix,
};
sourceArchive: sourceArchiveUri.fsPath,
sourceLocationPrefix,
};
// TODO: Performance-testing to determine whether this truncation is necessary.
const interpretation = await this._getInterpretedResults(
metadata,
resultsInfo,
sourceInfo,
sourceLocationPrefix,
undefined
undefined,
);
if (!interpretation) {
@@ -708,7 +802,9 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
await this.showProblemResultsAsDiagnostics(interpretation, database);
} catch (e) {
void this.logger.log(
`Exception while computing problem results as diagnostics: ${getErrorMessage(e)}`
`Exception while computing problem results as diagnostics: ${getErrorMessage(
e,
)}`,
);
this._diagnosticCollection.clear();
}
@@ -716,16 +812,15 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
private async showProblemResultsAsDiagnostics(
interpretation: Interpretation,
databaseItem: DatabaseItem
databaseItem: DatabaseItem,
): Promise<void> {
const { data, sourceLocationPrefix } = interpretation;
if (data.t !== 'SarifInterpretationData')
return;
if (data.t !== "SarifInterpretationData") return;
if (!data.runs || !data.runs[0].results) {
void this.logger.log(
'Didn\'t find a run in the sarif results. Error processing sarif?'
"Didn't find a run in the sarif results. Error processing sarif?",
);
return;
}
@@ -735,24 +830,24 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
for (const result of data.runs[0].results) {
const message = result.message.text;
if (message === undefined) {
void this.logger.log('Sarif had result without plaintext message');
void this.logger.log("Sarif had result without plaintext message");
continue;
}
if (!result.locations) {
void this.logger.log('Sarif had result without location');
void this.logger.log("Sarif had result without location");
continue;
}
const sarifLoc = parseSarifLocation(
result.locations[0],
sourceLocationPrefix
sourceLocationPrefix,
);
if ('hint' in sarifLoc) {
if ("hint" in sarifLoc) {
continue;
}
const resultLocation = tryResolveLocation(sarifLoc, databaseItem);
if (!resultLocation) {
void this.logger.log('Sarif location was not resolvable ' + sarifLoc);
void this.logger.log("Sarif location was not resolvable " + sarifLoc);
continue;
}
const parsedMessage = parseSarifPlainTextMessage(message);
@@ -764,26 +859,26 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
}
const resultMessageChunks: string[] = [];
for (const section of parsedMessage) {
if (typeof section === 'string') {
if (typeof section === "string") {
resultMessageChunks.push(section);
} else {
resultMessageChunks.push(section.text);
const sarifChunkLoc = parseSarifLocation(
relatedLocationsById[section.dest],
sourceLocationPrefix
sourceLocationPrefix,
);
if ('hint' in sarifChunkLoc) {
if ("hint" in sarifChunkLoc) {
continue;
}
const referenceLocation = tryResolveLocation(
sarifChunkLoc,
databaseItem
databaseItem,
);
if (referenceLocation) {
const related = new DiagnosticRelatedInformation(
referenceLocation,
section.text
section.text,
);
relatedInformation.push(related);
}
@@ -791,8 +886,8 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
}
const diagnostic = new Diagnostic(
resultLocation.range,
resultMessageChunks.join(''),
DiagnosticSeverity.Warning
resultMessageChunks.join(""),
DiagnosticSeverity.Warning,
);
diagnostic.relatedInformation = relatedInformation;
@@ -801,21 +896,22 @@ export class ResultsView extends AbstractWebview<IntoResultsViewMsg, FromResults
this._diagnosticCollection.set(diagnostics);
}
private convertPathToWebviewUri(path: string): string {
return fileUriToWebviewUri(this.getPanel(), Uri.file(path));
private convertPathToWebviewUri(panel: WebviewPanel, path: string): string {
return fileUriToWebviewUri(panel, Uri.file(path));
}
private convertPathPropertiesToWebviewUris(
info: SortedResultSetInfo
panel: WebviewPanel,
info: SortedResultSetInfo,
): SortedResultSetInfo {
return {
resultsPath: this.convertPathToWebviewUri(info.resultsPath),
resultsPath: this.convertPathToWebviewUri(panel, info.resultsPath),
sortState: info.sortState,
};
}
private handleSelectionChange(
event: vscode.TextEditorSelectionChangeEvent
event: vscode.TextEditorSelectionChangeEvent,
): void {
if (event.kind === vscode.TextEditorSelectionChangeKind.Command) {
return; // Ignore selection events we caused ourselves.

View File

@@ -1,8 +1,7 @@
import { Logger } from './logging';
import * as cp from 'child_process';
import { Disposable } from 'vscode';
import { MessageConnection } from 'vscode-jsonrpc';
import { Logger } from "./logging";
import * as cp from "child_process";
import { Disposable } from "vscode";
import { MessageConnection } from "vscode-jsonrpc";
/** A running query server process and its associated message connection. */
export class ServerProcess implements Disposable {
@@ -10,7 +9,12 @@ export class ServerProcess implements Disposable {
connection: MessageConnection;
logger: Logger;
constructor(child: cp.ChildProcess, connection: MessageConnection, private name: string, logger: Logger) {
constructor(
child: cp.ChildProcess,
connection: MessageConnection,
private name: string,
logger: Logger,
) {
this.child = child;
this.connection = connection;
this.logger = logger;

View File

@@ -1,4 +1,4 @@
import { languages, IndentAction, OnEnterRule } from 'vscode';
import { languages, IndentAction, OnEnterRule } from "vscode";
/**
* OnEnterRules are available in language-configurations, but you cannot specify them in the language-configuration.json.
@@ -12,18 +12,18 @@ import { languages, IndentAction, OnEnterRule } from 'vscode';
*/
export function install() {
// eslint-disable-next-line @typescript-eslint/no-var-requires
const langConfig = require('../language-configuration.json');
const langConfig = require("../language-configuration.json");
// setLanguageConfiguration requires a regexp for the wordpattern, not a string
langConfig.wordPattern = new RegExp(langConfig.wordPattern);
langConfig.onEnterRules = onEnterRules;
langConfig.indentationRules = {
decreaseIndentPattern: /^((?!.*?\/\*).*\*\/)?\s*[\}\]].*$/,
increaseIndentPattern: /^((?!\/\/).)*(\{[^}"'`]*|\([^)"'`]*|\[[^\]"'`]*)$/
increaseIndentPattern: /^((?!\/\/).)*(\{[^}"'`]*|\([^)"'`]*|\[[^\]"'`]*)$/,
};
languages.setLanguageConfiguration('ql', langConfig);
languages.setLanguageConfiguration('qll', langConfig);
languages.setLanguageConfiguration('dbscheme', langConfig);
languages.setLanguageConfiguration("ql", langConfig);
languages.setLanguageConfiguration("qll", langConfig);
languages.setLanguageConfiguration("dbscheme", langConfig);
}
const onEnterRules: OnEnterRule[] = [
@@ -31,18 +31,18 @@ const onEnterRules: OnEnterRule[] = [
// e.g. /** | */
beforeText: /^\s*\/\*\*(?!\/)([^\*]|\*(?!\/))*$/,
afterText: /^\s*\*\/$/,
action: { indentAction: IndentAction.IndentOutdent, appendText: ' * ' },
action: { indentAction: IndentAction.IndentOutdent, appendText: " * " },
},
{
// e.g. /** ...|
beforeText: /^\s*\/\*\*(?!\/)([^\*]|\*(?!\/))*$/,
action: { indentAction: IndentAction.None, appendText: ' * ' },
action: { indentAction: IndentAction.None, appendText: " * " },
},
{
// e.g. * ...|
beforeText: /^(\t|[ ])*[ ]\*([ ]([^\*]|\*(?!\/))*)?$/,
// oneLineAboveText: /^(\s*(\/\*\*|\*)).*/,
action: { indentAction: IndentAction.None, appendText: '* ' },
action: { indentAction: IndentAction.None, appendText: "* " },
},
{
// e.g. */|

View File

@@ -1,17 +1,22 @@
import { CancellationToken } from 'vscode';
import { ProgressCallback } from '../commandRunner';
import { DatabaseItem } from '../databases';
import { Dataset, deregisterDatabases, registerDatabases } from '../pure/legacy-messages';
import { InitialQueryInfo, LocalQueryInfo } from '../query-results';
import { QueryRunner } from '../queryRunner';
import { QueryWithResults } from '../run-queries-shared';
import { QueryServerClient } from './queryserver-client';
import { clearCacheInDatabase, compileAndRunQueryAgainstDatabase } from './run-queries';
import { upgradeDatabaseExplicit } from './upgrades';
import { CancellationToken } from "vscode";
import { ProgressCallback } from "../commandRunner";
import { DatabaseItem } from "../databases";
import {
Dataset,
deregisterDatabases,
registerDatabases,
} from "../pure/legacy-messages";
import { InitialQueryInfo, LocalQueryInfo } from "../query-results";
import { QueryRunner } from "../queryRunner";
import { QueryWithResults } from "../run-queries-shared";
import { QueryServerClient } from "./queryserver-client";
import {
clearCacheInDatabase,
compileAndRunQueryAgainstDatabase,
} from "./run-queries";
import { upgradeDatabaseExplicit } from "./upgrades";
export class LegacyQueryRunner extends QueryRunner {
constructor(public readonly qs: QueryServerClient) {
super();
}
@@ -20,40 +25,102 @@ export class LegacyQueryRunner extends QueryRunner {
return this.qs.cliServer;
}
async restartQueryServer(progress: ProgressCallback, token: CancellationToken): Promise<void> {
async restartQueryServer(
progress: ProgressCallback,
token: CancellationToken,
): Promise<void> {
await this.qs.restartQueryServer(progress, token);
}
onStart(callBack: (progress: ProgressCallback, token: CancellationToken) => Promise<void>) {
onStart(
callBack: (
progress: ProgressCallback,
token: CancellationToken,
) => Promise<void>,
) {
this.qs.onDidStartQueryServer(callBack);
}
async clearCacheInDatabase(dbItem: DatabaseItem, progress: ProgressCallback, token: CancellationToken): Promise<void> {
async clearCacheInDatabase(
dbItem: DatabaseItem,
progress: ProgressCallback,
token: CancellationToken,
): Promise<void> {
await clearCacheInDatabase(this.qs, dbItem, progress, token);
}
async compileAndRunQueryAgainstDatabase(dbItem: DatabaseItem, initialInfo: InitialQueryInfo, queryStorageDir: string, progress: ProgressCallback, token: CancellationToken, templates?: Record<string, string>, queryInfo?: LocalQueryInfo): Promise<QueryWithResults> {
return await compileAndRunQueryAgainstDatabase(this.qs.cliServer, this.qs, dbItem, initialInfo, queryStorageDir, progress, token, templates, queryInfo);
async compileAndRunQueryAgainstDatabase(
dbItem: DatabaseItem,
initialInfo: InitialQueryInfo,
queryStorageDir: string,
progress: ProgressCallback,
token: CancellationToken,
templates?: Record<string, string>,
queryInfo?: LocalQueryInfo,
): Promise<QueryWithResults> {
return await compileAndRunQueryAgainstDatabase(
this.qs.cliServer,
this.qs,
dbItem,
initialInfo,
queryStorageDir,
progress,
token,
templates,
queryInfo,
);
}
async deregisterDatabase(progress: ProgressCallback, token: CancellationToken, dbItem: DatabaseItem): Promise<void> {
if (dbItem.contents && (await this.qs.cliServer.cliConstraints.supportsDatabaseRegistration())) {
const databases: Dataset[] = [{
dbDir: dbItem.contents.datasetUri.fsPath,
workingSet: 'default'
}];
await this.qs.sendRequest(deregisterDatabases, { databases }, token, progress);
async deregisterDatabase(
progress: ProgressCallback,
token: CancellationToken,
dbItem: DatabaseItem,
): Promise<void> {
if (
dbItem.contents &&
(await this.qs.cliServer.cliConstraints.supportsDatabaseRegistration())
) {
const databases: Dataset[] = [
{
dbDir: dbItem.contents.datasetUri.fsPath,
workingSet: "default",
},
];
await this.qs.sendRequest(
deregisterDatabases,
{ databases },
token,
progress,
);
}
}
async registerDatabase(progress: ProgressCallback, token: CancellationToken, dbItem: DatabaseItem): Promise<void> {
if (dbItem.contents && (await this.qs.cliServer.cliConstraints.supportsDatabaseRegistration())) {
const databases: Dataset[] = [{
dbDir: dbItem.contents.datasetUri.fsPath,
workingSet: 'default'
}];
await this.qs.sendRequest(registerDatabases, { databases }, token, progress);
async registerDatabase(
progress: ProgressCallback,
token: CancellationToken,
dbItem: DatabaseItem,
): Promise<void> {
if (
dbItem.contents &&
(await this.qs.cliServer.cliConstraints.supportsDatabaseRegistration())
) {
const databases: Dataset[] = [
{
dbDir: dbItem.contents.datasetUri.fsPath,
workingSet: "default",
},
];
await this.qs.sendRequest(
registerDatabases,
{ databases },
token,
progress,
);
}
}
async upgradeDatabaseExplicit(dbItem: DatabaseItem, progress: ProgressCallback, token: CancellationToken): Promise<void> {
async upgradeDatabaseExplicit(
dbItem: DatabaseItem,
progress: ProgressCallback,
token: CancellationToken,
): Promise<void> {
await upgradeDatabaseExplicit(this.qs, dbItem, progress, token);
}

View File

@@ -1,24 +1,35 @@
import * as path from 'path';
import * as fs from 'fs-extra';
import * as path from "path";
import * as fs from "fs-extra";
import { DisposableObject } from '../pure/disposable-object';
import { CancellationToken, commands } from 'vscode';
import { createMessageConnection, RequestType } from 'vscode-jsonrpc';
import * as cli from '../cli';
import { QueryServerConfig } from '../config';
import { Logger, ProgressReporter } from '../logging';
import { completeQuery, EvaluationResult, progress, ProgressMessage, WithProgressId } from '../pure/legacy-messages';
import * as messages from '../pure/legacy-messages';
import { ProgressCallback, ProgressTask } from '../commandRunner';
import { findQueryLogFile } from '../run-queries-shared';
import { ServerProcess } from '../json-rpc-server';
import { DisposableObject } from "../pure/disposable-object";
import { CancellationToken, commands } from "vscode";
import { createMessageConnection, RequestType } from "vscode-jsonrpc";
import * as cli from "../cli";
import { QueryServerConfig } from "../config";
import { Logger, ProgressReporter } from "../logging";
import {
completeQuery,
EvaluationResult,
progress,
ProgressMessage,
WithProgressId,
} from "../pure/legacy-messages";
import * as messages from "../pure/legacy-messages";
import { ProgressCallback, ProgressTask } from "../commandRunner";
import { findQueryLogFile } from "../run-queries-shared";
import { ServerProcess } from "../json-rpc-server";
type WithProgressReporting = (task: (progress: ProgressReporter, token: CancellationToken) => Thenable<void>) => Thenable<void>;
type WithProgressReporting = (
task: (
progress: ProgressReporter,
token: CancellationToken,
) => Thenable<void>,
) => Thenable<void>;
type ServerOpts = {
logger: Logger;
contextStoragePath: string;
}
};
/**
* Client that manages a query server process.
@@ -27,10 +38,11 @@ type ServerOpts = {
* to restart it (which disposes the existing process and starts a new one).
*/
export class QueryServerClient extends DisposableObject {
serverProcess?: ServerProcess;
evaluationResultCallbacks: { [key: number]: (res: EvaluationResult) => void };
progressCallbacks: { [key: number]: ((res: ProgressMessage) => void) | undefined };
progressCallbacks: {
[key: number]: ((res: ProgressMessage) => void) | undefined;
};
nextCallback: number;
nextProgress: number;
withProgressReporting: WithProgressReporting;
@@ -42,7 +54,7 @@ export class QueryServerClient extends DisposableObject {
// we need here.
readonly onDidStartQueryServer = (e: ProgressTask<void>) => {
this.queryServerStartListeners.push(e);
}
};
public activeQueryLogFile: string | undefined;
@@ -50,13 +62,16 @@ export class QueryServerClient extends DisposableObject {
readonly config: QueryServerConfig,
readonly cliServer: cli.CodeQLCliServer,
readonly opts: ServerOpts,
withProgressReporting: WithProgressReporting
withProgressReporting: WithProgressReporting,
) {
super();
// When the query server configuration changes, restart the query server.
if (config.onDidChangeConfiguration !== undefined) {
this.push(config.onDidChangeConfiguration(() =>
commands.executeCommand('codeQL.restartQueryServer')));
this.push(
config.onDidChangeConfiguration(() =>
commands.executeCommand("codeQL.restartQueryServer"),
),
);
}
this.withProgressReporting = withProgressReporting;
this.nextCallback = 0;
@@ -74,24 +89,23 @@ export class QueryServerClient extends DisposableObject {
if (this.serverProcess !== undefined) {
this.disposeAndStopTracking(this.serverProcess);
} else {
void this.logger.log('No server process to be stopped.');
void this.logger.log("No server process to be stopped.");
}
}
/** Restarts the query server by disposing of the current server process and then starting a new one. */
async restartQueryServer(
progress: ProgressCallback,
token: CancellationToken
token: CancellationToken,
): Promise<void> {
this.stopQueryServer();
await this.startQueryServer();
// Ensure we await all responses from event handlers so that
// errors can be properly reported to the user.
await Promise.all(this.queryServerStartListeners.map(handler => handler(
progress,
token
)));
await Promise.all(
this.queryServerStartListeners.map((handler) => handler(progress, token)),
);
}
showLog(): void {
@@ -101,86 +115,108 @@ export class QueryServerClient extends DisposableObject {
/** Starts a new query server process, sending progress messages to the status bar. */
async startQueryServer(): Promise<void> {
// Use an arrow function to preserve the value of `this`.
return this.withProgressReporting((progress, _) => this.startQueryServerImpl(progress));
return this.withProgressReporting((progress, _) =>
this.startQueryServerImpl(progress),
);
}
/** Starts a new query server process, sending progress messages to the given reporter. */
private async startQueryServerImpl(progressReporter: ProgressReporter): Promise<void> {
const ramArgs = await this.cliServer.resolveRam(this.config.queryMemoryMb, progressReporter);
const args = ['--threads', this.config.numThreads.toString()].concat(ramArgs);
private async startQueryServerImpl(
progressReporter: ProgressReporter,
): Promise<void> {
const ramArgs = await this.cliServer.resolveRam(
this.config.queryMemoryMb,
progressReporter,
);
const args = ["--threads", this.config.numThreads.toString()].concat(
ramArgs,
);
if (this.config.saveCache) {
args.push('--save-cache');
args.push("--save-cache");
}
if (this.config.cacheSize > 0) {
args.push('--max-disk-cache');
args.push("--max-disk-cache");
args.push(this.config.cacheSize.toString());
}
if (await this.cliServer.cliConstraints.supportsDatabaseRegistration()) {
args.push('--require-db-registration');
args.push("--require-db-registration");
}
if (await this.cliServer.cliConstraints.supportsOldEvalStats() && !(await this.cliServer.cliConstraints.supportsPerQueryEvalLog())) {
args.push('--old-eval-stats');
if (
(await this.cliServer.cliConstraints.supportsOldEvalStats()) &&
!(await this.cliServer.cliConstraints.supportsPerQueryEvalLog())
) {
args.push("--old-eval-stats");
}
if (await this.cliServer.cliConstraints.supportsStructuredEvalLog()) {
const structuredLogFile = `${this.opts.contextStoragePath}/structured-evaluator-log.json`;
await fs.ensureFile(structuredLogFile);
args.push('--evaluator-log');
args.push("--evaluator-log");
args.push(structuredLogFile);
// We hard-code the verbosity level to 5 and minify to false.
// This will be the behavior of the per-query structured logging in the CLI after 2.8.3.
args.push('--evaluator-log-level');
args.push('5');
args.push("--evaluator-log-level");
args.push("5");
}
if (this.config.debug) {
args.push('--debug', '--tuple-counting');
args.push("--debug", "--tuple-counting");
}
if (cli.shouldDebugQueryServer()) {
args.push('-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9010,server=n,suspend=y,quiet=y');
args.push(
"-J=-agentlib:jdwp=transport=dt_socket,address=localhost:9010,server=n,suspend=y,quiet=y",
);
}
const child = cli.spawnServer(
this.config.codeQlPath,
'CodeQL query server',
['execute', 'query-server'],
"CodeQL query server",
["execute", "query-server"],
args,
this.logger,
data => this.logger.log(data.toString(), {
trailingNewline: false,
additionalLogLocation: this.activeQueryLogFile
}),
(data) =>
this.logger.log(data.toString(), {
trailingNewline: false,
additionalLogLocation: this.activeQueryLogFile,
}),
undefined, // no listener for stdout
progressReporter
progressReporter,
);
progressReporter.report({ message: 'Connecting to CodeQL query server' });
progressReporter.report({ message: "Connecting to CodeQL query server" });
const connection = createMessageConnection(child.stdout, child.stdin);
connection.onRequest(completeQuery, res => {
connection.onRequest(completeQuery, (res) => {
if (!(res.runId in this.evaluationResultCallbacks)) {
void this.logger.log(`No callback associated with run id ${res.runId}, continuing without executing any callback`);
void this.logger.log(
`No callback associated with run id ${res.runId}, continuing without executing any callback`,
);
} else {
this.evaluationResultCallbacks[res.runId](res);
}
return {};
});
connection.onNotification(progress, res => {
connection.onNotification(progress, (res) => {
const callback = this.progressCallbacks[res.id];
if (callback) {
callback(res);
}
});
this.serverProcess = new ServerProcess(child, connection, 'Query server', this.logger);
this.serverProcess = new ServerProcess(
child,
connection,
"Query server",
this.logger,
);
// Ensure the server process is disposed together with this client.
this.track(this.serverProcess);
connection.listen();
progressReporter.report({ message: 'Connected to CodeQL query server' });
progressReporter.report({ message: "Connected to CodeQL query server" });
this.nextCallback = 0;
this.nextProgress = 0;
this.progressCallbacks = {};
@@ -201,16 +237,25 @@ export class QueryServerClient extends DisposableObject {
return this.serverProcess!.child.pid || 0;
}
async sendRequest<P, R, E, RO>(type: RequestType<WithProgressId<P>, R, E, RO>, parameter: P, token?: CancellationToken, progress?: (res: ProgressMessage) => void): Promise<R> {
async sendRequest<P, R, E, RO>(
type: RequestType<WithProgressId<P>, R, E, RO>,
parameter: P,
token?: CancellationToken,
progress?: (res: ProgressMessage) => void,
): Promise<R> {
const id = this.nextProgress++;
this.progressCallbacks[id] = progress;
this.updateActiveQuery(type.method, parameter);
try {
if (this.serverProcess === undefined) {
throw new Error('No query server process found.');
throw new Error("No query server process found.");
}
return await this.serverProcess.connection.sendRequest(type, { body: parameter, progressId: id }, token);
return await this.serverProcess.connection.sendRequest(
type,
{ body: parameter, progressId: id },
token,
);
} finally {
delete this.progressCallbacks[id];
}
@@ -226,7 +271,9 @@ export class QueryServerClient extends DisposableObject {
*/
private updateActiveQuery(method: string, parameter: any): void {
if (method === messages.compileQuery.method) {
this.activeQueryLogFile = findQueryLogFile(path.dirname(parameter.resultPath));
this.activeQueryLogFile = findQueryLogFile(
path.dirname(parameter.resultPath),
);
}
}
}

View File

@@ -1,31 +1,31 @@
import * as crypto from 'crypto';
import * as fs from 'fs-extra';
import * as tmp from 'tmp-promise';
import * as path from 'path';
import {
CancellationToken,
Uri,
} from 'vscode';
import { ErrorCodes, ResponseError } from 'vscode-languageclient';
import * as crypto from "crypto";
import * as fs from "fs-extra";
import * as tmp from "tmp-promise";
import * as path from "path";
import { CancellationToken, Uri } from "vscode";
import { ErrorCodes, ResponseError } from "vscode-languageclient";
import * as cli from '../cli';
import { DatabaseItem, } from '../databases';
import * as cli from "../cli";
import { DatabaseItem } from "../databases";
import {
getOnDiskWorkspaceFolders,
showAndLogErrorMessage,
showAndLogWarningMessage,
tryGetQueryMetadata,
upgradesTmpDir
} from '../helpers';
import { ProgressCallback } from '../commandRunner';
import { QueryMetadata } from '../pure/interface-types';
import { logger } from '../logging';
import * as messages from '../pure/legacy-messages';
import { InitialQueryInfo, LocalQueryInfo } from '../query-results';
import * as qsClient from './queryserver-client';
import { getErrorMessage } from '../pure/helpers-pure';
import { compileDatabaseUpgradeSequence, upgradeDatabaseExplicit } from './upgrades';
import { QueryEvaluationInfo, QueryWithResults } from '../run-queries-shared';
upgradesTmpDir,
} from "../helpers";
import { ProgressCallback } from "../commandRunner";
import { QueryMetadata } from "../pure/interface-types";
import { logger } from "../logging";
import * as messages from "../pure/legacy-messages";
import { InitialQueryInfo, LocalQueryInfo } from "../query-results";
import * as qsClient from "./queryserver-client";
import { getErrorMessage } from "../pure/helpers-pure";
import {
compileDatabaseUpgradeSequence,
upgradeDatabaseExplicit,
} from "./upgrades";
import { QueryEvaluationInfo, QueryWithResults } from "../run-queries-shared";
/**
* A collection of evaluation-time information about a query,
@@ -34,7 +34,6 @@ import { QueryEvaluationInfo, QueryWithResults } from '../run-queries-shared';
* output and results.
*/
export class QueryInProgress {
public queryEvalInfo: QueryEvaluationInfo;
/**
* Note that in the {@link slurpQueryHistory} method, we create a QueryEvaluationInfo instance
@@ -49,15 +48,20 @@ export class QueryInProgress {
readonly metadata?: QueryMetadata,
readonly templates?: Record<string, string>,
) {
this.queryEvalInfo = new QueryEvaluationInfo(querySaveDir, dbItemPath, databaseHasMetadataFile, quickEvalPosition, metadata);
this.queryEvalInfo = new QueryEvaluationInfo(
querySaveDir,
dbItemPath,
databaseHasMetadataFile,
quickEvalPosition,
metadata,
);
/**/
}
get compiledQueryPath() {
return path.join(this.querySaveDir, 'compiledQuery.qlo');
return this.queryEvalInfo.compileQueryPath;
}
async run(
qs: qsClient.QueryServerClient,
upgradeQlo: string | undefined,
@@ -68,19 +72,21 @@ export class QueryInProgress {
queryInfo?: LocalQueryInfo,
): Promise<messages.EvaluationResult> {
if (!dbItem.contents || dbItem.error) {
throw new Error('Can\'t run query on invalid database.');
throw new Error("Can't run query on invalid database.");
}
let result: messages.EvaluationResult | null = null;
const callbackId = qs.registerCallback(res => {
const callbackId = qs.registerCallback((res) => {
result = {
...res,
logFileLocation: this.queryEvalInfo.logPath
logFileLocation: this.queryEvalInfo.logPath,
};
});
const availableMlModelUris: messages.MlModel[] = availableMlModels.map(model => ({ uri: Uri.file(model.path).toString(true) }));
const availableMlModelUris: messages.MlModel[] = availableMlModels.map(
(model) => ({ uri: Uri.file(model.path).toString(true) }),
);
const queryToRun: messages.QueryToRun = {
resultsPath: this.queryEvalInfo.resultsPaths.resultsPath,
@@ -95,50 +101,63 @@ export class QueryInProgress {
const dataset: messages.Dataset = {
dbDir: dbItem.contents.datasetUri.fsPath,
workingSet: 'default'
workingSet: "default",
};
if (queryInfo && await qs.cliServer.cliConstraints.supportsPerQueryEvalLog()) {
if (
queryInfo &&
(await qs.cliServer.cliConstraints.supportsPerQueryEvalLog())
) {
await qs.sendRequest(messages.startLog, {
db: dataset,
logPath: this.queryEvalInfo.evalLogPath,
});
}
const params: messages.EvaluateQueriesParams = {
db: dataset,
evaluateId: callbackId,
queries: [queryToRun],
stopOnError: false,
useSequenceHint: false
useSequenceHint: false,
};
try {
await qs.sendRequest(messages.runQueries, params, token, progress);
if (qs.config.customLogDirectory) {
void showAndLogWarningMessage(
`Custom log directories are no longer supported. The "codeQL.runningQueries.customLogDirectory" setting is deprecated. Unset the setting to stop seeing this message. Query logs saved to ${this.queryEvalInfo.logPath}.`
`Custom log directories are no longer supported. The "codeQL.runningQueries.customLogDirectory" setting is deprecated. Unset the setting to stop seeing this message. Query logs saved to ${this.queryEvalInfo.logPath}.`,
);
}
} finally {
qs.unRegisterCallback(callbackId);
if (queryInfo && await qs.cliServer.cliConstraints.supportsPerQueryEvalLog()) {
if (
queryInfo &&
(await qs.cliServer.cliConstraints.supportsPerQueryEvalLog())
) {
await qs.sendRequest(messages.endLog, {
db: dataset,
logPath: this.queryEvalInfo.evalLogPath,
});
if (await this.queryEvalInfo.hasEvalLog()) {
await this.queryEvalInfo.addQueryLogs(queryInfo, qs.cliServer, qs.logger);
await this.queryEvalInfo.addQueryLogs(
queryInfo,
qs.cliServer,
qs.logger,
);
} else {
void showAndLogWarningMessage(`Failed to write structured evaluator log to ${this.queryEvalInfo.evalLogPath}.`);
void showAndLogWarningMessage(
`Failed to write structured evaluator log to ${this.queryEvalInfo.evalLogPath}.`,
);
}
}
}
return result || {
evaluationTime: 0,
message: 'No result from server',
queryId: -1,
runId: callbackId,
resultType: messages.QueryResultType.OTHER_ERROR
};
return (
result || {
evaluationTime: 0,
message: "No result from server",
queryId: -1,
runId: callbackId,
resultType: messages.QueryResultType.OTHER_ERROR,
}
);
}
async compile(
@@ -149,9 +168,11 @@ export class QueryInProgress {
): Promise<messages.CompilationMessage[]> {
let compiled: messages.CheckQueryResult | undefined;
try {
const target = this.quickEvalPosition ? {
quickEval: { quickEvalPos: this.quickEvalPosition }
} : { query: {} };
const target = this.quickEvalPosition
? {
quickEval: { quickEvalPos: this.quickEvalPosition },
}
: { query: {} };
const params: messages.CompileQueryParams = {
compilationOptions: {
computeNoLocationUrls: true,
@@ -162,21 +183,30 @@ export class QueryInProgress {
noComputeGetUrl: false,
noComputeToString: false,
computeDefaultStrings: true,
emitDebugInfo: true
emitDebugInfo: true,
},
extraOptions: {
timeoutSecs: qs.config.timeoutSecs
timeoutSecs: qs.config.timeoutSecs,
},
queryToCheck: program,
resultPath: this.compiledQueryPath,
target,
};
compiled = await qs.sendRequest(messages.compileQuery, params, token, progress);
compiled = await qs.sendRequest(
messages.compileQuery,
params,
token,
progress,
);
} finally {
void qs.logger.log(' - - - COMPILATION DONE - - - ', { additionalLogLocation: this.queryEvalInfo.logPath });
void qs.logger.log(" - - - COMPILATION DONE - - - ", {
additionalLogLocation: this.queryEvalInfo.logPath,
});
}
return (compiled?.messages || []).filter(msg => msg.severity === messages.Severity.ERROR);
return (compiled?.messages || []).filter(
(msg) => msg.severity === messages.Severity.ERROR,
);
}
}
@@ -187,12 +217,12 @@ export async function clearCacheInDatabase(
token: CancellationToken,
): Promise<messages.ClearCacheResult> {
if (dbItem.contents === undefined) {
throw new Error('Can\'t clear the cache in an invalid database.');
throw new Error("Can't clear the cache in an invalid database.");
}
const db: messages.Dataset = {
dbDir: dbItem.contents.datasetUri.fsPath,
workingSet: 'default',
workingSet: "default",
};
const params: messages.ClearCacheParams = {
@@ -203,7 +233,6 @@ export async function clearCacheInDatabase(
return qs.sendRequest(messages.clearCache, params, token, progress);
}
/**
* Compare the dbscheme implied by the query `query` and that of the current database.
* - If they are compatible, do nothing.
@@ -222,9 +251,16 @@ async function checkDbschemeCompatibility(
const searchPath = getOnDiskWorkspaceFolders();
if (dbItem.contents?.dbSchemeUri !== undefined) {
const { finalDbscheme } = await cliServer.resolveUpgrades(dbItem.contents.dbSchemeUri.fsPath, searchPath, false);
const hash = async function(filename: string): Promise<string> {
return crypto.createHash('sha256').update(await fs.readFile(filename)).digest('hex');
const { finalDbscheme } = await cliServer.resolveUpgrades(
dbItem.contents.dbSchemeUri.fsPath,
searchPath,
false,
);
const hash = async function (filename: string): Promise<string> {
return crypto
.createHash("sha256")
.update(await fs.readFile(filename))
.digest("hex");
};
// At this point, we have learned about three dbschemes:
@@ -242,22 +278,19 @@ async function checkDbschemeCompatibility(
reportNoUpgradePath(qlProgram, query);
}
if (upgradableTo == dbschemeOfLib &&
dbschemeOfDb != dbschemeOfLib) {
if (upgradableTo == dbschemeOfLib && dbschemeOfDb != dbschemeOfLib) {
// Try to upgrade the database
await upgradeDatabaseExplicit(
qs,
dbItem,
progress,
token
);
await upgradeDatabaseExplicit(qs, dbItem, progress, token);
}
}
}
function reportNoUpgradePath(qlProgram: messages.QlProgram, query: QueryInProgress): void {
function reportNoUpgradePath(
qlProgram: messages.QlProgram,
query: QueryInProgress,
): void {
throw new Error(
`Query ${qlProgram.queryPath} expects database scheme ${query.queryDbscheme}, but the current database has a different scheme, and no database upgrades are available. The current database scheme may be newer than the CodeQL query libraries in your workspace.\n\nPlease try using a newer version of the query libraries.`
`Query ${qlProgram.queryPath} expects database scheme ${query.queryDbscheme}, but the current database has a different scheme, and no database upgrades are available. The current database scheme may be newer than the CodeQL query libraries in your workspace.\n\nPlease try using a newer version of the query libraries.`,
);
}
@@ -273,9 +306,8 @@ async function compileNonDestructiveUpgrade(
progress: ProgressCallback,
token: CancellationToken,
): Promise<string> {
if (!dbItem?.contents?.dbSchemeUri) {
throw new Error('Database is invalid, and cannot be upgraded.');
throw new Error("Database is invalid, and cannot be upgraded.");
}
// When packaging is used, dependencies may exist outside of the workspace and they are always on the resolved search path.
@@ -288,15 +320,22 @@ async function compileNonDestructiveUpgrade(
dbItem.contents.dbSchemeUri.fsPath,
upgradesPath,
true,
query.queryDbscheme
query.queryDbscheme,
);
if (!matchesTarget) {
reportNoUpgradePath(qlProgram, query);
}
const result = await compileDatabaseUpgradeSequence(qs, dbItem, scripts, upgradeTemp, progress, token);
const result = await compileDatabaseUpgradeSequence(
qs,
dbItem,
scripts,
upgradeTemp,
progress,
token,
);
if (result.compiledUpgrade === undefined) {
const error = result.error || '[no error message available]';
const error = result.error || "[no error message available]";
throw new Error(error);
}
// We can upgrade to the actual target
@@ -305,8 +344,6 @@ async function compileNonDestructiveUpgrade(
return result.compiledUpgrade;
}
export async function compileAndRunQueryAgainstDatabase(
cliServer: cli.CodeQLCliServer,
qs: qsClient.QueryServerClient,
@@ -319,16 +356,23 @@ export async function compileAndRunQueryAgainstDatabase(
queryInfo?: LocalQueryInfo, // May be omitted for queries not initiated by the user. If omitted we won't create a structured log for the query.
): Promise<QueryWithResults> {
if (!dbItem.contents || !dbItem.contents.dbSchemeUri) {
throw new Error(`Database ${dbItem.databaseUri} does not have a CodeQL database scheme.`);
throw new Error(
`Database ${dbItem.databaseUri} does not have a CodeQL database scheme.`,
);
}
// Get the workspace folder paths.
const diskWorkspaceFolders = getOnDiskWorkspaceFolders();
// Figure out the library path for the query.
const packConfig = await cliServer.resolveLibraryPath(diskWorkspaceFolders, initialInfo.queryPath);
const packConfig = await cliServer.resolveLibraryPath(
diskWorkspaceFolders,
initialInfo.queryPath,
);
if (!packConfig.dbscheme) {
throw new Error('Could not find a database scheme for this query. Please check that you have a valid qlpack.yml file for this query, which refers to a database scheme either in the `dbscheme` field or through one of its dependencies.');
throw new Error(
"Could not find a database scheme for this query. Please check that you have a valid qlpack.yml file for this query, which refers to a database scheme either in the `dbscheme` field or through one of its dependencies.",
);
}
// Check whether the query has an entirely different schema from the
@@ -338,8 +382,16 @@ export async function compileAndRunQueryAgainstDatabase(
const querySchemaName = path.basename(packConfig.dbscheme);
const dbSchemaName = path.basename(dbItem.contents.dbSchemeUri.fsPath);
if (querySchemaName != dbSchemaName) {
void logger.log(`Query schema was ${querySchemaName}, but database schema was ${dbSchemaName}.`);
throw new Error(`The query ${path.basename(initialInfo.queryPath)} cannot be run against the selected database (${dbItem.name}): their target languages are different. Please select a different database and try again.`);
void logger.log(
`Query schema was ${querySchemaName}, but database schema was ${dbSchemaName}.`,
);
throw new Error(
`The query ${path.basename(
initialInfo.queryPath,
)} cannot be run against the selected database (${
dbItem.name
}): their target languages are different. Please select a different database and try again.`,
);
}
const qlProgram: messages.QlProgram = {
@@ -351,31 +403,43 @@ export async function compileAndRunQueryAgainstDatabase(
// we use the database's DB scheme here instead of the DB scheme
// from the current document's project.
dbschemePath: dbItem.contents.dbSchemeUri.fsPath,
queryPath: initialInfo.queryPath
queryPath: initialInfo.queryPath,
};
// Read the query metadata if possible, to use in the UI.
const metadata = await tryGetQueryMetadata(cliServer, qlProgram.queryPath);
let availableMlModels: cli.MlModelInfo[] = [];
if (!await cliServer.cliConstraints.supportsResolveMlModels()) {
void logger.log('Resolving ML models is unsupported by this version of the CLI. Running the query without any ML models.');
if (!(await cliServer.cliConstraints.supportsResolveMlModels())) {
void logger.log(
"Resolving ML models is unsupported by this version of the CLI. Running the query without any ML models.",
);
} else {
try {
availableMlModels = (await cliServer.resolveMlModels(diskWorkspaceFolders, initialInfo.queryPath)).models;
availableMlModels = (
await cliServer.resolveMlModels(
diskWorkspaceFolders,
initialInfo.queryPath,
)
).models;
if (availableMlModels.length) {
void logger.log(`Found available ML models at the following paths: ${availableMlModels.map(x => `'${x.path}'`).join(', ')}.`);
void logger.log(
`Found available ML models at the following paths: ${availableMlModels
.map((x) => `'${x.path}'`)
.join(", ")}.`,
);
} else {
void logger.log('Did not find any available ML models.');
void logger.log("Did not find any available ML models.");
}
} catch (e) {
const message = `Couldn't resolve available ML models for ${qlProgram.queryPath}. Running the ` +
const message =
`Couldn't resolve available ML models for ${qlProgram.queryPath}. Running the ` +
`query without any ML models: ${e}.`;
void showAndLogErrorMessage(message);
}
}
const hasMetadataFile = (await dbItem.hasMetadataFile());
const hasMetadataFile = await dbItem.hasMetadataFile();
const query = new QueryInProgress(
path.join(queryStorageDir, initialInfo.id),
dbItem.databaseUri.fsPath,
@@ -383,7 +447,7 @@ export async function compileAndRunQueryAgainstDatabase(
packConfig.dbscheme,
initialInfo.quickEvalPosition,
metadata,
templates
templates,
);
await query.queryEvalInfo.createTimestampFile();
@@ -392,25 +456,49 @@ export async function compileAndRunQueryAgainstDatabase(
let upgradeQlo;
if (await cliServer.cliConstraints.supportsNonDestructiveUpgrades()) {
upgradeDir = await tmp.dir({ dir: upgradesTmpDir, unsafeCleanup: true });
upgradeQlo = await compileNonDestructiveUpgrade(qs, upgradeDir, query, qlProgram, dbItem, progress, token);
upgradeQlo = await compileNonDestructiveUpgrade(
qs,
upgradeDir,
query,
qlProgram,
dbItem,
progress,
token,
);
} else {
await checkDbschemeCompatibility(cliServer, qs, query, qlProgram, dbItem, progress, token);
await checkDbschemeCompatibility(
cliServer,
qs,
query,
qlProgram,
dbItem,
progress,
token,
);
}
let errors;
try {
errors = await query.compile(qs, qlProgram, progress, token);
} catch (e) {
if (e instanceof ResponseError && e.code == ErrorCodes.RequestCancelled) {
return createSyntheticResult(query, 'Query cancelled');
return createSyntheticResult(query, "Query cancelled");
} else {
throw e;
}
}
if (errors.length === 0) {
const result = await query.run(qs, upgradeQlo, availableMlModels, dbItem, progress, token, queryInfo);
const result = await query.run(
qs,
upgradeQlo,
availableMlModels,
dbItem,
progress,
token,
queryInfo,
);
if (result.resultType !== messages.QueryResultType.SUCCESS) {
const message = result.message || 'Failed to run query';
const message = result.message || "Failed to run query";
void logger.log(message);
void showAndLogErrorMessage(message);
}
@@ -424,7 +512,7 @@ export async function compileAndRunQueryAgainstDatabase(
logFileLocation: result.logFileLocation,
dispose: () => {
qs.logger.removeAdditionalLogLocation(result.logFileLocation);
}
},
};
} else {
// Error dialogs are limited in size and scrollability,
@@ -433,26 +521,34 @@ export async function compileAndRunQueryAgainstDatabase(
// However we don't show quick eval errors there so we need to display them anyway.
void qs.logger.log(
`Failed to compile query ${initialInfo.queryPath} against database scheme ${qlProgram.dbschemePath}:`,
{ additionalLogLocation: query.queryEvalInfo.logPath }
{ additionalLogLocation: query.queryEvalInfo.logPath },
);
const formattedMessages: string[] = [];
for (const error of errors) {
const message = error.message || '[no error message available]';
const message = error.message || "[no error message available]";
const formatted = `ERROR: ${message} (${error.position.fileName}:${error.position.line}:${error.position.column}:${error.position.endLine}:${error.position.endColumn})`;
formattedMessages.push(formatted);
void qs.logger.log(formatted, { additionalLogLocation: query.queryEvalInfo.logPath });
void qs.logger.log(formatted, {
additionalLogLocation: query.queryEvalInfo.logPath,
});
}
if (initialInfo.isQuickEval && formattedMessages.length <= 2) {
// If there are more than 2 error messages, they will not be displayed well in a popup
// and will be trimmed by the function displaying the error popup. Accordingly, we only
// try to show the errors if there are 2 or less, otherwise we direct the user to the log.
void showAndLogErrorMessage('Quick evaluation compilation failed: ' + formattedMessages.join('\n'));
void showAndLogErrorMessage(
"Quick evaluation compilation failed: " +
formattedMessages.join("\n"),
);
} else {
void showAndLogErrorMessage((initialInfo.isQuickEval ? 'Quick evaluation' : 'Query') + compilationFailedErrorTail);
void showAndLogErrorMessage(
(initialInfo.isQuickEval ? "Quick evaluation" : "Query") +
compilationFailedErrorTail,
);
}
return createSyntheticResult(query, 'Query had compilation errors');
return createSyntheticResult(query, "Query had compilation errors");
}
} finally {
try {
@@ -460,30 +556,34 @@ export async function compileAndRunQueryAgainstDatabase(
} catch (e) {
void qs.logger.log(
`Could not clean up the upgrades dir. Reason: ${getErrorMessage(e)}`,
{ additionalLogLocation: query.queryEvalInfo.logPath }
{ additionalLogLocation: query.queryEvalInfo.logPath },
);
}
}
}
const compilationFailedErrorTail = ' compilation failed. Please make sure there are no errors in the query, the database is up to date,' +
' and the query and database use the same target language. For more details on the error, go to View > Output,' +
' and choose CodeQL Query Server from the dropdown.';
const compilationFailedErrorTail =
" compilation failed. Please make sure there are no errors in the query, the database is up to date," +
" and the query and database use the same target language. For more details on the error, go to View > Output," +
" and choose CodeQL Query Server from the dropdown.";
export function formatLegacyMessage(result: messages.EvaluationResult) {
switch (result.resultType) {
case messages.QueryResultType.CANCELLATION:
return `cancelled after ${Math.round(result.evaluationTime / 1000)} seconds`;
return `cancelled after ${Math.round(
result.evaluationTime / 1000,
)} seconds`;
case messages.QueryResultType.OOM:
return 'out of memory';
return "out of memory";
case messages.QueryResultType.SUCCESS:
return `finished in ${Math.round(result.evaluationTime / 1000)} seconds`;
case messages.QueryResultType.TIMEOUT:
return `timed out after ${Math.round(result.evaluationTime / 1000)} seconds`;
return `timed out after ${Math.round(
result.evaluationTime / 1000,
)} seconds`;
case messages.QueryResultType.OTHER_ERROR:
default:
return result.message ? `failed: ${result.message}` : 'failed';
return result.message ? `failed: ${result.message}` : "failed";
}
}
@@ -505,12 +605,15 @@ function createSyntheticResult(
runId: 0,
},
successful: false,
dispose: () => { /**/ },
dispose: () => {
/**/
},
};
}
function createSimpleTemplates(templates: Record<string, string> | undefined): messages.TemplateDefinitions | undefined {
function createSimpleTemplates(
templates: Record<string, string> | undefined,
): messages.TemplateDefinitions | undefined {
if (!templates) {
return undefined;
}
@@ -518,8 +621,8 @@ function createSimpleTemplates(templates: Record<string, string> | undefined): m
for (const key of Object.keys(templates)) {
result[key] = {
values: {
tuples: [[{ stringValue: templates[key] }]]
}
tuples: [[{ stringValue: templates[key] }]],
},
};
}
return result;

View File

@@ -1,12 +1,16 @@
import * as vscode from 'vscode';
import { getOnDiskWorkspaceFolders, showAndLogErrorMessage, tmpDir } from '../helpers';
import { ProgressCallback, UserCancellationException } from '../commandRunner';
import { logger } from '../logging';
import * as messages from '../pure/legacy-messages';
import * as qsClient from './queryserver-client';
import * as tmp from 'tmp-promise';
import * as path from 'path';
import { DatabaseItem } from '../databases';
import * as vscode from "vscode";
import {
getOnDiskWorkspaceFolders,
showAndLogErrorMessage,
tmpDir,
} from "../helpers";
import { ProgressCallback, UserCancellationException } from "../commandRunner";
import { logger } from "../logging";
import * as messages from "../pure/legacy-messages";
import * as qsClient from "./queryserver-client";
import * as tmp from "tmp-promise";
import * as path from "path";
import { DatabaseItem } from "../databases";
/**
* Maximum number of lines to include from database upgrade message,
@@ -15,7 +19,6 @@ import { DatabaseItem } from '../databases';
*/
const MAX_UPGRADE_MESSAGE_LINES = 10;
/**
* Compile a database upgrade sequence.
* Callers must check that this is valid with the current queryserver first.
@@ -26,19 +29,29 @@ export async function compileDatabaseUpgradeSequence(
resolvedSequence: string[],
currentUpgradeTmp: tmp.DirectoryResult,
progress: ProgressCallback,
token: vscode.CancellationToken
token: vscode.CancellationToken,
): Promise<messages.CompileUpgradeSequenceResult> {
if (dbItem.contents === undefined || dbItem.contents.dbSchemeUri === undefined) {
throw new Error('Database is invalid, and cannot be upgraded.');
if (
dbItem.contents === undefined ||
dbItem.contents.dbSchemeUri === undefined
) {
throw new Error("Database is invalid, and cannot be upgraded.");
}
if (!await qs.cliServer.cliConstraints.supportsNonDestructiveUpgrades()) {
throw new Error('The version of codeql is too old to run non-destructive upgrades.');
if (!(await qs.cliServer.cliConstraints.supportsNonDestructiveUpgrades())) {
throw new Error(
"The version of codeql is too old to run non-destructive upgrades.",
);
}
// If possible just compile the upgrade sequence
return await qs.sendRequest(messages.compileUpgradeSequence, {
upgradeTempDir: currentUpgradeTmp.path,
upgradePaths: resolvedSequence
}, token, progress);
return await qs.sendRequest(
messages.compileUpgradeSequence,
{
upgradeTempDir: currentUpgradeTmp.path,
upgradePaths: resolvedSequence,
},
token,
progress,
);
}
async function compileDatabaseUpgrade(
@@ -48,30 +61,35 @@ async function compileDatabaseUpgrade(
resolvedSequence: string[],
currentUpgradeTmp: tmp.DirectoryResult,
progress: ProgressCallback,
token: vscode.CancellationToken
token: vscode.CancellationToken,
): Promise<messages.CompileUpgradeResult> {
if (!dbItem.contents?.dbSchemeUri) {
throw new Error('Database is invalid, and cannot be upgraded.');
throw new Error("Database is invalid, and cannot be upgraded.");
}
// We have the upgrades we want but compileUpgrade
// requires searching for them. So we use the parent directories of the upgrades
// as the upgrade path.
const parentDirs = resolvedSequence.map(dir => path.dirname(dir));
const parentDirs = resolvedSequence.map((dir) => path.dirname(dir));
const uniqueParentDirs = new Set(parentDirs);
progress({
step: 1,
maxStep: 3,
message: 'Checking for database upgrades'
message: "Checking for database upgrades",
});
return qs.sendRequest(messages.compileUpgrade, {
upgrade: {
fromDbscheme: dbItem.contents.dbSchemeUri.fsPath,
toDbscheme: targetDbScheme,
additionalUpgrades: Array.from(uniqueParentDirs)
return qs.sendRequest(
messages.compileUpgrade,
{
upgrade: {
fromDbscheme: dbItem.contents.dbSchemeUri.fsPath,
toDbscheme: targetDbScheme,
additionalUpgrades: Array.from(uniqueParentDirs),
},
upgradeTempDir: currentUpgradeTmp.path,
singleFileUpgrades: true,
},
upgradeTempDir: currentUpgradeTmp.path,
singleFileUpgrades: true,
}, token, progress);
token,
progress,
);
}
/**
@@ -81,10 +99,9 @@ async function compileDatabaseUpgrade(
async function checkAndConfirmDatabaseUpgrade(
compiled: messages.CompiledUpgrades,
db: DatabaseItem,
quiet: boolean
quiet: boolean,
): Promise<void> {
let descriptionMessage = '';
let descriptionMessage = "";
const descriptions = getUpgradeDescriptions(compiled);
for (const script of descriptions) {
descriptionMessage += `Would perform upgrade: ${script.description}\n`;
@@ -92,7 +109,6 @@ async function checkAndConfirmDatabaseUpgrade(
}
void logger.log(descriptionMessage);
// If the quiet flag is set, do the upgrade without a popup.
if (quiet) {
return;
@@ -100,39 +116,52 @@ async function checkAndConfirmDatabaseUpgrade(
// Ask the user to confirm the upgrade.
const showLogItem: vscode.MessageItem = { title: 'No, Show Changes', isCloseAffordance: true };
const yesItem = { title: 'Yes', isCloseAffordance: false };
const noItem = { title: 'No', isCloseAffordance: true };
const showLogItem: vscode.MessageItem = {
title: "No, Show Changes",
isCloseAffordance: true,
};
const yesItem = { title: "Yes", isCloseAffordance: false };
const noItem = { title: "No", isCloseAffordance: true };
const dialogOptions: vscode.MessageItem[] = [yesItem, noItem];
let messageLines = descriptionMessage.split('\n');
let messageLines = descriptionMessage.split("\n");
if (messageLines.length > MAX_UPGRADE_MESSAGE_LINES) {
messageLines = messageLines.slice(0, MAX_UPGRADE_MESSAGE_LINES);
messageLines.push('The list of upgrades was truncated, click "No, Show Changes" to see the full list.');
messageLines.push(
'The list of upgrades was truncated, click "No, Show Changes" to see the full list.',
);
dialogOptions.push(showLogItem);
}
const message = `Should the database ${db.databaseUri.fsPath} be upgraded?\n\n${messageLines.join('\n')}`;
const chosenItem = await vscode.window.showInformationMessage(message, { modal: true }, ...dialogOptions);
const message = `Should the database ${
db.databaseUri.fsPath
} be upgraded?\n\n${messageLines.join("\n")}`;
const chosenItem = await vscode.window.showInformationMessage(
message,
{ modal: true },
...dialogOptions,
);
if (chosenItem === showLogItem) {
logger.outputChannel.show();
}
if (chosenItem !== yesItem) {
throw new UserCancellationException('User cancelled the database upgrade.');
throw new UserCancellationException("User cancelled the database upgrade.");
}
}
/**
* Get the descriptions from a compiled upgrade
*/
function getUpgradeDescriptions(compiled: messages.CompiledUpgrades): messages.UpgradeDescription[] {
function getUpgradeDescriptions(
compiled: messages.CompiledUpgrades,
): messages.UpgradeDescription[] {
// We use the presence of compiledUpgradeFile to check
// if it is multifile or not. We need to explicitly check undefined
// as the types claim the empty string is a valid value
if (compiled.compiledUpgradeFile === undefined) {
return compiled.scripts.map(script => script.description);
return compiled.scripts.map((script) => script.description);
} else {
return compiled.descriptions;
}
@@ -150,50 +179,77 @@ export async function upgradeDatabaseExplicit(
progress: ProgressCallback,
token: vscode.CancellationToken,
): Promise<messages.RunUpgradeResult | undefined> {
const searchPath: string[] = getOnDiskWorkspaceFolders();
if (!dbItem?.contents?.dbSchemeUri) {
throw new Error('Database is invalid, and cannot be upgraded.');
throw new Error("Database is invalid, and cannot be upgraded.");
}
const upgradeInfo = await qs.cliServer.resolveUpgrades(
dbItem.contents.dbSchemeUri.fsPath,
searchPath,
false
false,
);
const { scripts, finalDbscheme } = upgradeInfo;
if (finalDbscheme === undefined) {
throw new Error('Could not determine target dbscheme to upgrade to.');
throw new Error("Could not determine target dbscheme to upgrade to.");
}
const currentUpgradeTmp = await tmp.dir({ dir: tmpDir.name, prefix: 'upgrade_', keep: false, unsafeCleanup: true });
const currentUpgradeTmp = await tmp.dir({
dir: tmpDir.name,
prefix: "upgrade_",
keep: false,
unsafeCleanup: true,
});
try {
let compileUpgradeResult: messages.CompileUpgradeResult;
try {
compileUpgradeResult = await compileDatabaseUpgrade(qs, dbItem, finalDbscheme, scripts, currentUpgradeTmp, progress, token);
}
catch (e) {
void showAndLogErrorMessage(`Compilation of database upgrades failed: ${e}`);
compileUpgradeResult = await compileDatabaseUpgrade(
qs,
dbItem,
finalDbscheme,
scripts,
currentUpgradeTmp,
progress,
token,
);
} catch (e) {
void showAndLogErrorMessage(
`Compilation of database upgrades failed: ${e}`,
);
return;
}
finally {
void qs.logger.log('Done compiling database upgrade.');
} finally {
void qs.logger.log("Done compiling database upgrade.");
}
if (!compileUpgradeResult.compiledUpgrades) {
const error = compileUpgradeResult.error || '[no error message available]';
void showAndLogErrorMessage(`Compilation of database upgrades failed: ${error}`);
const error =
compileUpgradeResult.error || "[no error message available]";
void showAndLogErrorMessage(
`Compilation of database upgrades failed: ${error}`,
);
return;
}
await checkAndConfirmDatabaseUpgrade(compileUpgradeResult.compiledUpgrades, dbItem, qs.cliServer.quiet);
await checkAndConfirmDatabaseUpgrade(
compileUpgradeResult.compiledUpgrades,
dbItem,
qs.cliServer.quiet,
);
try {
void qs.logger.log('Running the following database upgrade:');
void qs.logger.log("Running the following database upgrade:");
getUpgradeDescriptions(compileUpgradeResult.compiledUpgrades).map(s => s.description).join('\n');
const result = await runDatabaseUpgrade(qs, dbItem, compileUpgradeResult.compiledUpgrades, progress, token);
getUpgradeDescriptions(compileUpgradeResult.compiledUpgrades)
.map((s) => s.description)
.join("\n");
const result = await runDatabaseUpgrade(
qs,
dbItem,
compileUpgradeResult.compiledUpgrades,
progress,
token,
);
// TODO Can remove the next lines when https://github.com/github/codeql-team/issues/1241 is fixed
// restart the query server to avoid a bug in the CLI where the upgrade is applied, but the old dbscheme
@@ -201,12 +257,11 @@ export async function upgradeDatabaseExplicit(
await qs.restartQueryServer(progress, token);
return result;
}
catch (e) {
} catch (e) {
void showAndLogErrorMessage(`Database upgrade failed: ${e}`);
return;
} finally {
void qs.logger.log('Done running database upgrade.');
void qs.logger.log("Done running database upgrade.");
}
} finally {
await currentUpgradeTmp.cleanup();
@@ -220,19 +275,18 @@ async function runDatabaseUpgrade(
progress: ProgressCallback,
token: vscode.CancellationToken,
): Promise<messages.RunUpgradeResult> {
if (db.contents === undefined || db.contents.datasetUri === undefined) {
throw new Error('Can\'t upgrade an invalid database.');
throw new Error("Can't upgrade an invalid database.");
}
const database: messages.Dataset = {
dbDir: db.contents.datasetUri.fsPath,
workingSet: 'default'
workingSet: "default",
};
const params: messages.RunUpgradeParams = {
db: database,
timeoutSecs: qs.config.timeoutSecs,
toRun: upgrades
toRun: upgrades,
};
return qs.sendRequest(messages.runUpgrade, params, token, progress);

View File

@@ -1,8 +1,16 @@
import * as I from 'immutable';
import { EvaluationLogProblemReporter, EvaluationLogScanner, EvaluationLogScannerProvider } from './log-scanner';
import { InLayer, ComputeRecursive, SummaryEvent, PipelineRun, ComputeSimple } from './log-summary';
const DEFAULT_WARNING_THRESHOLD = 50;
import * as I from "immutable";
import {
EvaluationLogProblemReporter,
EvaluationLogScanner,
EvaluationLogScannerProvider,
} from "./log-scanner";
import {
InLayer,
ComputeRecursive,
SummaryEvent,
PipelineRun,
ComputeSimple,
} from "./log-summary";
/**
* Like `max`, but returns 0 if no meaningful maximum can be computed.
@@ -19,14 +27,14 @@ function safeMax(it?: Iterable<number>) {
function makeKey(
queryCausingWork: string | undefined,
predicate: string,
suffix = ''
suffix = "",
): string {
if (queryCausingWork === undefined) {
throw new Error(
'queryCausingWork was not defined on an event we expected it to be defined for!'
"queryCausingWork was not defined on an event we expected it to be defined for!",
);
}
return `${queryCausingWork}:${predicate}${suffix ? ' ' + suffix : ''}`;
return `${queryCausingWork}:${predicate}${suffix ? " " + suffix : ""}`;
}
const DEPENDENT_PREDICATES_REGEXP = (() => {
@@ -42,22 +50,22 @@ const DEPENDENT_PREDICATES_REGEXP = (() => {
// INVOKE HIGHER-ORDER RELATION rel ON <id, ..., id>
String.raw`INVOKE\s+HIGHER-ORDER\s+RELATION\s[^\s]+\sON\s+<([0-9a-zA-Z:#_<>]+)((?:,[0-9a-zA-Z:#_<>]+)*)>`,
// SELECT id
String.raw`SELECT\s+([0-9a-zA-Z:#_]+)`
String.raw`SELECT\s+([0-9a-zA-Z:#_]+)`,
];
return new RegExp(
`${String.raw`\{[0-9]+\}\s+[0-9a-zA-Z]+\s=\s(?:` + regexps.join('|')})`
`${String.raw`\{[0-9]+\}\s+[0-9a-zA-Z]+\s=\s(?:` + regexps.join("|")})`,
);
})();
function getDependentPredicates(operations: string[]): I.List<string> {
return I.List(operations).flatMap(operation => {
return I.List(operations).flatMap((operation) => {
const matches = DEPENDENT_PREDICATES_REGEXP.exec(operation.trim());
if (matches !== null) {
return I.List(matches)
.rest() // Skip the first group as it's just the entire string
.filter(x => !!x && !x.match('r[0-9]+|PRIMITIVE')) // Only keep the references to predicates.
.flatMap(x => x.split(',')) // Group 2 in the INVOKE HIGHER_ORDER RELATION case is a comma-separated list of identifiers.
.filter(x => !!x); // Remove empty strings
.filter((x) => !!x && !x.match("r[0-9]+|PRIMITIVE")) // Only keep the references to predicates.
.flatMap((x) => x.split(",")) // Group 2 in the INVOKE HIGHER_ORDER RELATION case is a comma-separated list of identifiers.
.filter((x) => !!x); // Remove empty strings
} else {
return I.List();
}
@@ -66,9 +74,9 @@ function getDependentPredicates(operations: string[]): I.List<string> {
function getMainHash(event: InLayer | ComputeRecursive): string {
switch (event.evaluationStrategy) {
case 'IN_LAYER':
case "IN_LAYER":
return event.mainHash;
case 'COMPUTE_RECURSIVE':
case "COMPUTE_RECURSIVE":
return event.raHash;
}
}
@@ -76,16 +84,20 @@ function getMainHash(event: InLayer | ComputeRecursive): string {
/**
* Sum arrays a and b element-wise. The shorter array is padded with 0s if the arrays are not the same length.
*/
function pointwiseSum(a: Int32Array, b: Int32Array, problemReporter: EvaluationLogProblemReporter): Int32Array {
function pointwiseSum(
a: Int32Array,
b: Int32Array,
problemReporter: EvaluationLogProblemReporter,
): Int32Array {
function reportIfInconsistent(ai: number, bi: number) {
if (ai === -1 && bi !== -1) {
problemReporter.log(
`Operation was not evaluated in the first pipeline, but it was evaluated in the accumulated pipeline (with tuple count ${bi}).`
`Operation was not evaluated in the first pipeline, but it was evaluated in the accumulated pipeline (with tuple count ${bi}).`,
);
}
if (ai !== -1 && bi === -1) {
problemReporter.log(
`Operation was evaluated in the first pipeline (with tuple count ${ai}), but it was not evaluated in the accumulated pipeline.`
`Operation was evaluated in the first pipeline (with tuple count ${ai}), but it was not evaluated in the accumulated pipeline.`,
);
}
}
@@ -117,7 +129,7 @@ function pushValue<K, V>(m: Map<K, V[]>, k: K, v: V) {
function computeJoinOrderBadness(
maxTupleCount: number,
maxDependentPredicateSize: number,
resultSize: number
resultSize: number,
): number {
return maxTupleCount / Math.max(maxDependentPredicateSize, resultSize);
}
@@ -135,7 +147,10 @@ interface Bucket {
class JoinOrderScanner implements EvaluationLogScanner {
// Map a predicate hash to its result size
private readonly predicateSizes = new Map<string, number>();
private readonly layerEvents = new Map<string, (ComputeRecursive | InLayer)[]>();
private readonly layerEvents = new Map<
string,
(ComputeRecursive | InLayer)[]
>();
// Map a key of the form 'query-with-demand : predicate name' to its badness input.
private readonly maxTupleCountMap = new Map<string, number[]>();
private readonly resultSizeMap = new Map<string, number[]>();
@@ -144,13 +159,13 @@ class JoinOrderScanner implements EvaluationLogScanner {
constructor(
private readonly problemReporter: EvaluationLogProblemReporter,
private readonly warningThreshold: number) {
}
private readonly warningThreshold: number,
) {}
public onEvent(event: SummaryEvent): void {
if (
event.completionType !== undefined &&
event.completionType !== 'SUCCESS'
event.completionType !== "SUCCESS"
) {
return; // Skip any evaluation that wasn't successful
}
@@ -165,20 +180,20 @@ class JoinOrderScanner implements EvaluationLogScanner {
private recordPredicateSizes(event: SummaryEvent): void {
switch (event.evaluationStrategy) {
case 'EXTENSIONAL':
case 'COMPUTED_EXTENSIONAL':
case 'COMPUTE_SIMPLE':
case 'CACHACA':
case 'CACHE_HIT': {
case "EXTENSIONAL":
case "COMPUTED_EXTENSIONAL":
case "COMPUTE_SIMPLE":
case "CACHACA":
case "CACHE_HIT": {
this.predicateSizes.set(event.raHash, event.resultSize);
break;
}
case 'SENTINEL_EMPTY': {
case "SENTINEL_EMPTY": {
this.predicateSizes.set(event.raHash, 0);
break;
}
case 'COMPUTE_RECURSIVE':
case 'IN_LAYER': {
case "COMPUTE_RECURSIVE":
case "IN_LAYER": {
this.predicateSizes.set(event.raHash, event.resultSize);
// layerEvents are indexed by the mainHash.
const hash = getMainHash(event);
@@ -191,22 +206,36 @@ class JoinOrderScanner implements EvaluationLogScanner {
}
}
private reportProblemIfNecessary(event: SummaryEvent, iteration: number, metric: number): void {
private reportProblemIfNecessary(
event: SummaryEvent,
iteration: number,
metric: number,
): void {
if (metric >= this.warningThreshold) {
this.problemReporter.reportProblem(event.predicateName, event.raHash, iteration,
`Relation '${event.predicateName}' has an inefficient join order. Its join order metric is ${metric.toFixed(2)}, which is larger than the threshold of ${this.warningThreshold.toFixed(2)}.`);
this.problemReporter.reportProblem(
event.predicateName,
event.raHash,
iteration,
`Relation '${
event.predicateName
}' has an inefficient join order. Its join order metric is ${metric.toFixed(
2,
)}, which is larger than the threshold of ${this.warningThreshold.toFixed(
2,
)}.`,
);
}
}
private computeBadnessMetric(event: SummaryEvent): void {
if (
event.completionType !== undefined &&
event.completionType !== 'SUCCESS'
event.completionType !== "SUCCESS"
) {
return; // Skip any evaluation that wasn't successful
}
switch (event.evaluationStrategy) {
case 'COMPUTE_SIMPLE': {
case "COMPUTE_SIMPLE": {
if (!event.pipelineRuns) {
// skip if the optional pipelineRuns field is not present.
break;
@@ -226,16 +255,20 @@ class JoinOrderScanner implements EvaluationLogScanner {
pushValue(
this.maxDependentPredicateSizeMap,
key,
maxDependentPredicateSize
maxDependentPredicateSize,
);
const metric = computeJoinOrderBadness(
maxTupleCount,
maxDependentPredicateSize,
resultSize!,
);
const metric = computeJoinOrderBadness(maxTupleCount, maxDependentPredicateSize, resultSize!);
this.joinOrderMetricMap.set(key, metric);
this.reportProblemIfNecessary(event, 0, metric);
}
break;
}
case 'COMPUTE_RECURSIVE': {
case "COMPUTE_RECURSIVE": {
// Compute the badness metric for a recursive predicate for each ordering.
const sccMetricInput = this.badnessInputsForRecursiveDelta(event);
// Loop through each predicate in the SCC
@@ -246,12 +279,12 @@ class JoinOrderScanner implements EvaluationLogScanner {
const key = makeKey(
event.queryCausingWork,
predicate,
`(${raReference})`
`(${raReference})`,
);
const maxTupleCount = Math.max(...bucket.tupleCounts);
const resultSize = bucket.resultSize;
const maxDependentPredicateSize = Math.max(
...bucket.dependentPredicateSizes.values()
...bucket.dependentPredicateSizes.values(),
);
if (maxDependentPredicateSize > 0) {
@@ -260,11 +293,15 @@ class JoinOrderScanner implements EvaluationLogScanner {
pushValue(
this.maxDependentPredicateSizeMap,
key,
maxDependentPredicateSize
maxDependentPredicateSize,
);
const metric = computeJoinOrderBadness(
maxTupleCount,
maxDependentPredicateSize,
resultSize,
);
const metric = computeJoinOrderBadness(maxTupleCount, maxDependentPredicateSize, resultSize);
const oldMetric = this.joinOrderMetricMap.get(key);
if ((oldMetric === undefined) || (metric > oldMetric)) {
if (oldMetric === undefined || metric > oldMetric) {
this.joinOrderMetricMap.set(key, metric);
}
}
@@ -283,14 +320,14 @@ class JoinOrderScanner implements EvaluationLogScanner {
func: (
inLayerEvent: ComputeRecursive | InLayer,
run: PipelineRun,
iteration: number
) => void
iteration: number,
) => void,
): void {
const sccEvents = this.layerEvents.get(event.raHash)!;
const nextPipeline: number[] = new Array(sccEvents.length).fill(0);
const maxIteration = Math.max(
...sccEvents.map(e => e.predicateIterationMillis.length)
...sccEvents.map((e) => e.predicateIterationMillis.length),
);
for (let iteration = 0; iteration < maxIteration; ++iteration) {
@@ -315,19 +352,23 @@ class JoinOrderScanner implements EvaluationLogScanner {
*/
private badnessInputsForNonRecursiveDelta(
pipelineRun: PipelineRun,
event: ComputeSimple
event: ComputeSimple,
): { maxTupleCount: number; maxDependentPredicateSize: number } {
const dependentPredicateSizes = Object.values(event.dependencies).map(hash =>
this.predicateSizes.get(hash) ?? 0 // Should always be present, but zero is a safe default.
const dependentPredicateSizes = Object.values(event.dependencies).map(
(hash) => this.predicateSizes.get(hash) ?? 0, // Should always be present, but zero is a safe default.
);
const maxDependentPredicateSize = safeMax(dependentPredicateSizes);
return {
maxTupleCount: safeMax(pipelineRun.counts),
maxDependentPredicateSize: maxDependentPredicateSize
maxDependentPredicateSize: maxDependentPredicateSize,
};
}
private prevDeltaSizes(event: ComputeRecursive, predicate: string, i: number) {
private prevDeltaSizes(
event: ComputeRecursive,
predicate: string,
i: number,
) {
// If an iteration isn't present in the map it means it was skipped because the optimizer
// inferred that it was empty. So its size is 0.
return this.curDeltaSizes(event, predicate, i - 1);
@@ -337,7 +378,9 @@ class JoinOrderScanner implements EvaluationLogScanner {
// If an iteration isn't present in the map it means it was skipped because the optimizer
// inferred that it was empty. So its size is 0.
return (
this.layerEvents.get(event.raHash)?.find(x => x.predicateName === predicate)?.deltaSizes[i] ?? 0
this.layerEvents
.get(event.raHash)
?.find((x) => x.predicateName === predicate)?.deltaSizes[i] ?? 0
);
}
@@ -348,42 +391,42 @@ class JoinOrderScanner implements EvaluationLogScanner {
event: ComputeRecursive,
inLayerEvent: InLayer | ComputeRecursive,
raReference: string,
iteration: number
iteration: number,
) {
const dependentPredicates = getDependentPredicates(
inLayerEvent.ra[raReference]
inLayerEvent.ra[raReference],
);
let dependentPredicateSizes: I.Map<string, number>;
// We treat the base case as a non-recursive pipeline. In that case, the dependent predicates are
// the dependencies of the base case and the cur_deltas.
if (raReference === 'base') {
if (raReference === "base") {
dependentPredicateSizes = I.Map(
dependentPredicates.map((pred): [string, number] => {
// A base case cannot contain a `prev_delta`, but it can contain a `cur_delta`.
let size = 0;
if (pred.endsWith('#cur_delta')) {
if (pred.endsWith("#cur_delta")) {
size = this.curDeltaSizes(
event,
pred.slice(0, -'#cur_delta'.length),
iteration
pred.slice(0, -"#cur_delta".length),
iteration,
);
} else {
const hash = event.dependencies[pred];
size = this.predicateSizes.get(hash)!;
}
return [pred, size];
})
}),
);
} else {
// It's a non-base case in a recursive pipeline. In that case, the dependent predicates are
// only the prev_deltas.
dependentPredicateSizes = I.Map(
dependentPredicates
.flatMap(pred => {
.flatMap((pred) => {
// If it's actually a prev_delta
if (pred.endsWith('#prev_delta')) {
if (pred.endsWith("#prev_delta")) {
// Return the predicate without the #prev_delta suffix.
return [pred.slice(0, -'#prev_delta'.length)];
return [pred.slice(0, -"#prev_delta".length)];
} else {
// Not a recursive delta. Skip it.
return [];
@@ -392,7 +435,7 @@ class JoinOrderScanner implements EvaluationLogScanner {
.map((prev): [string, number] => {
const size = this.prevDeltaSizes(event, prev, iteration);
return [prev, size];
})
}),
);
}
@@ -403,7 +446,9 @@ class JoinOrderScanner implements EvaluationLogScanner {
/**
* Compute the metric input for all the events in a SCC that starts with main node `event`
*/
private badnessInputsForRecursiveDelta(event: ComputeRecursive): Map<string, Map<string, Bucket>> {
private badnessInputsForRecursiveDelta(
event: ComputeRecursive,
): Map<string, Map<string, Bucket>> {
// nameToOrderToBucket : predicate name -> ordering (i.e., standard, order_500000, etc.) -> bucket
const nameToOrderToBucket = new Map<string, Map<string, Bucket>>();
@@ -419,7 +464,7 @@ class JoinOrderScanner implements EvaluationLogScanner {
orderTobucket.set(raReference, {
tupleCounts: new Int32Array(0),
resultSize: 0,
dependentPredicateSizes: I.Map()
dependentPredicateSizes: I.Map(),
});
}
@@ -427,7 +472,7 @@ class JoinOrderScanner implements EvaluationLogScanner {
event,
inLayerEvent,
raReference,
iteration
iteration,
);
const bucket = orderTobucket.get(raReference)!;
@@ -435,18 +480,19 @@ class JoinOrderScanner implements EvaluationLogScanner {
const newTupleCounts = pointwiseSum(
bucket.tupleCounts,
new Int32Array(run.counts),
this.problemReporter
this.problemReporter,
);
const resultSize = bucket.resultSize + deltaSize;
// Pointwise sum the deltas.
const newDependentPredicateSizes = bucket.dependentPredicateSizes.mergeWith(
(oldSize, newSize) => oldSize + newSize,
dependentPredicateSizes
);
const newDependentPredicateSizes =
bucket.dependentPredicateSizes.mergeWith(
(oldSize, newSize) => oldSize + newSize,
dependentPredicateSizes,
);
orderTobucket.set(raReference, {
tupleCounts: newTupleCounts,
resultSize: resultSize,
dependentPredicateSizes: newDependentPredicateSizes
dependentPredicateSizes: newDependentPredicateSizes,
});
});
return nameToOrderToBucket;
@@ -454,7 +500,12 @@ class JoinOrderScanner implements EvaluationLogScanner {
}
export class JoinOrderScannerProvider implements EvaluationLogScannerProvider {
public createScanner(problemReporter: EvaluationLogProblemReporter): EvaluationLogScanner {
return new JoinOrderScanner(problemReporter, DEFAULT_WARNING_THRESHOLD);
constructor(private readonly getThreshdold: () => number) {}
public createScanner(
problemReporter: EvaluationLogProblemReporter,
): EvaluationLogScanner {
const threshold = this.getThreshdold();
return new JoinOrderScanner(problemReporter, threshold);
}
}

View File

@@ -1,4 +1,4 @@
import * as fs from 'fs-extra';
import * as fs from "fs-extra";
/**
* Read a file consisting of multiple JSON objects. Each object is separated from the previous one
@@ -10,8 +10,11 @@ import * as fs from 'fs-extra';
* @param path The path to the file.
* @param handler Callback to be invoked for each top-level JSON object in order.
*/
export async function readJsonlFile(path: string, handler: (value: any) => Promise<void>): Promise<void> {
const logSummary = await fs.readFile(path, 'utf-8');
export async function readJsonlFile(
path: string,
handler: (value: any) => Promise<void>,
): Promise<void> {
const logSummary = await fs.readFile(path, "utf-8");
// Remove newline delimiters because summary is in .jsonl format.
const jsonSummaryObjects: string[] = logSummary.split(/\r?\n\r?\n/g);

View File

@@ -1,11 +1,14 @@
import { Diagnostic, DiagnosticSeverity, languages, Range, Uri } from 'vscode';
import { DisposableObject } from '../pure/disposable-object';
import { QueryHistoryManager } from '../query-history';
import { QueryHistoryInfo } from '../query-history-info';
import { EvaluationLogProblemReporter, EvaluationLogScannerSet } from './log-scanner';
import { PipelineInfo, SummarySymbols } from './summary-parser';
import * as fs from 'fs-extra';
import { logger } from '../logging';
import { Diagnostic, DiagnosticSeverity, languages, Range, Uri } from "vscode";
import { DisposableObject } from "../pure/disposable-object";
import { QueryHistoryManager } from "../query-history";
import { QueryHistoryInfo } from "../query-history-info";
import {
EvaluationLogProblemReporter,
EvaluationLogScannerSet,
} from "./log-scanner";
import { PipelineInfo, SummarySymbols } from "./summary-parser";
import * as fs from "fs-extra";
import { logger } from "../logging";
/**
* Compute the key used to find a predicate in the summary symbols.
@@ -25,10 +28,14 @@ function predicateSymbolKey(name: string, raHash: string): string {
class ProblemReporter implements EvaluationLogProblemReporter {
public readonly diagnostics: Diagnostic[] = [];
constructor(private readonly symbols: SummarySymbols | undefined) {
}
constructor(private readonly symbols: SummarySymbols | undefined) {}
public reportProblem(predicateName: string, raHash: string, iteration: number, message: string): void {
public reportProblem(
predicateName: string,
raHash: string,
iteration: number,
message: string,
): void {
const nameWithHash = predicateSymbolKey(predicateName, raHash);
const predicateSymbol = this.symbols?.predicates[nameWithHash];
let predicateInfo: PipelineInfo | undefined = undefined;
@@ -36,8 +43,15 @@ class ProblemReporter implements EvaluationLogProblemReporter {
predicateInfo = predicateSymbol.iterations[iteration];
}
if (predicateInfo !== undefined) {
const range = new Range(predicateInfo.raStartLine, 0, predicateInfo.raEndLine + 1, 0);
this.diagnostics.push(new Diagnostic(range, message, DiagnosticSeverity.Error));
const range = new Range(
predicateInfo.raStartLine,
0,
predicateInfo.raEndLine + 1,
0,
);
this.diagnostics.push(
new Diagnostic(range, message, DiagnosticSeverity.Error),
);
}
}
@@ -48,24 +62,30 @@ class ProblemReporter implements EvaluationLogProblemReporter {
export class LogScannerService extends DisposableObject {
public readonly scanners = new EvaluationLogScannerSet();
private readonly diagnosticCollection = this.push(languages.createDiagnosticCollection('ql-eval-log'));
private readonly diagnosticCollection = this.push(
languages.createDiagnosticCollection("ql-eval-log"),
);
private currentItem: QueryHistoryInfo | undefined = undefined;
constructor(qhm: QueryHistoryManager) {
super();
this.push(qhm.onDidChangeCurrentQueryItem(async (item) => {
if (item !== this.currentItem) {
this.currentItem = item;
await this.scanEvalLog(item);
}
}));
this.push(
qhm.onDidChangeCurrentQueryItem(async (item) => {
if (item !== this.currentItem) {
this.currentItem = item;
await this.scanEvalLog(item);
}
}),
);
this.push(qhm.onDidCompleteQuery(async (item) => {
if (item === this.currentItem) {
await this.scanEvalLog(item);
}
}));
this.push(
qhm.onDidCompleteQuery(async (item) => {
if (item === this.currentItem) {
await this.scanEvalLog(item);
}
}),
);
}
/**
@@ -73,18 +93,21 @@ export class LogScannerService extends DisposableObject {
*
* @param query The query whose log is to be scanned.
*/
public async scanEvalLog(
query: QueryHistoryInfo | undefined
): Promise<void> {
public async scanEvalLog(query: QueryHistoryInfo | undefined): Promise<void> {
this.diagnosticCollection.clear();
if ((query?.t !== 'local')
|| (query.evalLogSummaryLocation === undefined)
|| (query.jsonEvalLogSummaryLocation === undefined)) {
if (
query?.t !== "local" ||
query.evalLogSummaryLocation === undefined ||
query.jsonEvalLogSummaryLocation === undefined
) {
return;
}
const diagnostics = await this.scanLog(query.jsonEvalLogSummaryLocation, query.evalLogSummarySymbolsLocation);
const diagnostics = await this.scanLog(
query.jsonEvalLogSummaryLocation,
query.evalLogSummarySymbolsLocation,
);
const uri = Uri.file(query.evalLogSummaryLocation);
this.diagnosticCollection.set(uri, diagnostics);
}
@@ -95,10 +118,15 @@ export class LogScannerService extends DisposableObject {
* @param symbolsLocation The file path of the symbols file for the human-readable log summary.
* @returns An array of `Diagnostic`s representing the problems found by scanners.
*/
private async scanLog(jsonSummaryLocation: string, symbolsLocation: string | undefined): Promise<Diagnostic[]> {
private async scanLog(
jsonSummaryLocation: string,
symbolsLocation: string | undefined,
): Promise<Diagnostic[]> {
let symbols: SummarySymbols | undefined = undefined;
if (symbolsLocation !== undefined) {
symbols = JSON.parse(await fs.readFile(symbolsLocation, { encoding: 'utf-8' }));
symbols = JSON.parse(
await fs.readFile(symbolsLocation, { encoding: "utf-8" }),
);
}
const problemReporter = new ProblemReporter(symbols);

Some files were not shown because too many files have changed in this diff Show More