Compare commits
275 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8834111f57 | ||
|
|
3d5ef60fe0 | ||
|
|
6ab1f4c7ee | ||
|
|
d0a12753d8 | ||
|
|
ac745a6955 | ||
|
|
1b737678ba | ||
|
|
19b24f87ec | ||
|
|
b2bdcdbcb8 | ||
|
|
37c3b03e9a | ||
|
|
c7c79175d2 | ||
|
|
01e7b3fc0e | ||
|
|
58ea742e85 | ||
|
|
188bc53761 | ||
|
|
26e619ba71 | ||
|
|
00707fca4b | ||
|
|
51627ad971 | ||
|
|
f21ee5173c | ||
|
|
397693a2cb | ||
|
|
0648ce8ae6 | ||
|
|
183de7b6a1 | ||
|
|
b6f32ec5d7 | ||
|
|
70c2beb510 | ||
|
|
31c90a5068 | ||
|
|
2ce6fd3bb1 | ||
|
|
da5852cfbb | ||
|
|
5c83c94335 | ||
|
|
21faed0191 | ||
|
|
9d6962ede2 | ||
|
|
d6d5d27d70 | ||
|
|
5c7efe5116 | ||
|
|
1d42d4848b | ||
|
|
745544cd9c | ||
|
|
38a3778fe1 | ||
|
|
50df8cd376 | ||
|
|
034bfc230c | ||
|
|
d342b060ed | ||
|
|
45fe4aa1d4 | ||
|
|
902074f7a5 | ||
|
|
fd8f88890e | ||
|
|
db842dea7c | ||
|
|
eb59ead817 | ||
|
|
d0e81124ac | ||
|
|
8697dcc80d | ||
|
|
ad8dc1e906 | ||
|
|
ceb8b1f9c1 | ||
|
|
b77acc5468 | ||
|
|
555fa6cf78 | ||
|
|
f7de8e5d09 | ||
|
|
299280cd08 | ||
|
|
92bebef49d | ||
|
|
1ce2b365a6 | ||
|
|
47929943ba | ||
|
|
0b27912314 | ||
|
|
6a65094720 | ||
|
|
6746b07879 | ||
|
|
593c01388b | ||
|
|
dbee8dd57a | ||
|
|
426ab98623 | ||
|
|
9fd8c1c3e0 | ||
|
|
b8cbd75737 | ||
|
|
c575edec71 | ||
|
|
337e5f81ea | ||
|
|
6d1cf0887a | ||
|
|
bc7c956d6d | ||
|
|
90d72cec78 | ||
|
|
2f9c8d1bfa | ||
|
|
4b49a8f9bb | ||
|
|
9718aa5806 | ||
|
|
7444605970 | ||
|
|
cce0e146ff | ||
|
|
de24e25486 | ||
|
|
8daf3374bf | ||
|
|
b27d8a1000 | ||
|
|
1ad20fa354 | ||
|
|
b082455604 | ||
|
|
ed4b296e41 | ||
|
|
cf00f8f191 | ||
|
|
dc3e1ce096 | ||
|
|
e5ffdd0fbc | ||
|
|
2c7b67e2a4 | ||
|
|
4129962fa0 | ||
|
|
e79f732b84 | ||
|
|
49deb2e756 | ||
|
|
0be60f3e8f | ||
|
|
e294dfdf4a | ||
|
|
870874d7f3 | ||
|
|
856f97f859 | ||
|
|
cfbc44311b | ||
|
|
27a326ab3e | ||
|
|
91ab97e923 | ||
|
|
fa7ecb782e | ||
|
|
f5dbcc8cc1 | ||
|
|
c423505c04 | ||
|
|
e7eb33e0cf | ||
|
|
528f8b951b | ||
|
|
74568b86b1 | ||
|
|
039077467a | ||
|
|
ef28154b95 | ||
|
|
47cabbb331 | ||
|
|
c1a3c2f6e5 | ||
|
|
c210a7fdf4 | ||
|
|
184a98531f | ||
|
|
9a6d44b86a | ||
|
|
d0df81c524 | ||
|
|
ba8788e0a5 | ||
|
|
767c3cc35c | ||
|
|
a6d9b6bbec | ||
|
|
f139f1ebf8 | ||
|
|
d227af4c7b | ||
|
|
2b625c85e8 | ||
|
|
5ea1438036 | ||
|
|
7a7092de0d | ||
|
|
2ba39647f2 | ||
|
|
48ff62f474 | ||
|
|
82c7dd52be | ||
|
|
d0f122ad87 | ||
|
|
ef575acd59 | ||
|
|
785a5fa48a | ||
|
|
43ea7eb41d | ||
|
|
4b27d0d59c | ||
|
|
1df2e54074 | ||
|
|
b7eba71ab6 | ||
|
|
7375fce82d | ||
|
|
eac3836535 | ||
|
|
e8f750776b | ||
|
|
91a542dce6 | ||
|
|
b902bfd71d | ||
|
|
6dd6460b33 | ||
|
|
36d23e7eec | ||
|
|
b65f0490f2 | ||
|
|
a8dd368578 | ||
|
|
4d130bc2fa | ||
|
|
1b526783e2 | ||
|
|
4e4345f0c2 | ||
|
|
b2df8b6971 | ||
|
|
91ea2f089e | ||
|
|
fbaf3d1cdf | ||
|
|
7bcf62c6fe | ||
|
|
f7c0d4dc91 | ||
|
|
e81f1a1fde | ||
|
|
c14fa63321 | ||
|
|
7554c415af | ||
|
|
bbdad0fe2c | ||
|
|
b3da120d70 | ||
|
|
3689f84612 | ||
|
|
f99432153b | ||
|
|
704895ba6c | ||
|
|
f3780c6305 | ||
|
|
bb3ec207e3 | ||
|
|
e56503249d | ||
|
|
37b2e422cd | ||
|
|
4140303b5c | ||
|
|
218be87e88 | ||
|
|
c7e2d69daa | ||
|
|
ba2f44b8ae | ||
|
|
2267e9d4db | ||
|
|
6f85894a11 | ||
|
|
8cbd77cf65 | ||
|
|
641f714fa4 | ||
|
|
216f10d327 | ||
|
|
5045b2df60 | ||
|
|
857a997037 | ||
|
|
262744e6e5 | ||
|
|
4444951093 | ||
|
|
e824fda9e7 | ||
|
|
ef8bf9fd1b | ||
|
|
69dea08c47 | ||
|
|
0360bf294f | ||
|
|
996e8036c1 | ||
|
|
2e51c1a657 | ||
|
|
f1fe4a20f2 | ||
|
|
be4059864e | ||
|
|
a80098d7df | ||
|
|
f45b790591 | ||
|
|
7a1f157225 | ||
|
|
ba9b284606 | ||
|
|
1055515d59 | ||
|
|
4eee14be61 | ||
|
|
7c00768c90 | ||
|
|
9038586aab | ||
|
|
800890443e | ||
|
|
0dcc814953 | ||
|
|
64ad40e369 | ||
|
|
f736adc4f1 | ||
|
|
e89a04d442 | ||
|
|
2e6eb1c7bc | ||
|
|
fdc36ad36b | ||
|
|
3f6c1055e4 | ||
|
|
b833591d1e | ||
|
|
82f17c4e53 | ||
|
|
790503bd60 | ||
|
|
770834756a | ||
|
|
bc0506d058 | ||
|
|
14b2282015 | ||
|
|
12ea1b9598 | ||
|
|
6cfa0a93c9 | ||
|
|
ef2f9d9c90 | ||
|
|
1891763d64 | ||
|
|
0b4fc76fdb | ||
|
|
d036e8144d | ||
|
|
4f2d768e07 | ||
|
|
60d777abf1 | ||
|
|
34bd7c21a6 | ||
|
|
6cf8d0be4f | ||
|
|
b340b1a740 | ||
|
|
ce83d1df23 | ||
|
|
d63b756520 | ||
|
|
47fbf3428f | ||
|
|
889055ee19 | ||
|
|
c7f08e328d | ||
|
|
0273be9710 | ||
|
|
e94da0e765 | ||
|
|
cb15c8dbeb | ||
|
|
c7173e86a2 | ||
|
|
b3c8c25572 | ||
|
|
2bad58be71 | ||
|
|
fe4116fe05 | ||
|
|
b9b16a8beb | ||
|
|
c3b30a30e8 | ||
|
|
04e4efb26b | ||
|
|
8d1c8bc725 | ||
|
|
eb1bf8fd69 | ||
|
|
9a77748a59 | ||
|
|
d77a08fc37 | ||
|
|
cb2e5026a1 | ||
|
|
d4a4536d40 | ||
|
|
ab1966abf1 | ||
|
|
6de954143c | ||
|
|
109766d411 | ||
|
|
4bd1981a34 | ||
|
|
0415ac539c | ||
|
|
cf50a71a16 | ||
|
|
598f2eb3f2 | ||
|
|
480f90ac2f | ||
|
|
e15f15358b | ||
|
|
8516940e64 | ||
|
|
ef69a51741 | ||
|
|
9594a5e951 | ||
|
|
7b9c7f4e21 | ||
|
|
25ff95a2f3 | ||
|
|
25779b4db1 | ||
|
|
611f55b9cd | ||
|
|
b8558a67a3 | ||
|
|
3d65ecaac9 | ||
|
|
61c9503d52 | ||
|
|
01f5a65f7d | ||
|
|
612499916d | ||
|
|
a2294bb5cc | ||
|
|
0b7b6f1b13 | ||
|
|
8b2fd1a630 | ||
|
|
4bb25a7d69 | ||
|
|
83bdb54047 | ||
|
|
0d380736b3 | ||
|
|
54a03c8839 | ||
|
|
bc1c08c653 | ||
|
|
66c0714606 | ||
|
|
3a8beb9ab9 | ||
|
|
afdd20a957 | ||
|
|
bf5398bfc9 | ||
|
|
01085e75ba | ||
|
|
7f332e3374 | ||
|
|
39229c26d4 | ||
|
|
49114cc143 | ||
|
|
b8ec847253 | ||
|
|
a0c85b78c5 | ||
|
|
f0a354917e | ||
|
|
082c5e4a31 | ||
|
|
439cfcc023 | ||
|
|
297f5a9f67 | ||
|
|
e7c79f04c6 | ||
|
|
76529572f5 | ||
|
|
63d6793795 | ||
|
|
fc2e6d0432 | ||
|
|
cb707aea50 | ||
|
|
cd26f27e6e |
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
@@ -8,8 +8,11 @@ updates:
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
# @types/node is related to the version of VS Code we're supporting and should
|
||||
# not be updated to a newer version of Node automatically. However, patch versions
|
||||
# are unrelated to the Node version, so we allow those.
|
||||
- dependency-name: "@types/node"
|
||||
update-types: ["version-update:semver-major", "version-update:semver-minor"]
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
@@ -17,6 +20,3 @@ updates:
|
||||
day: "thursday" # Thursday is arbitrary
|
||||
labels:
|
||||
- "Update dependencies"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
||||
|
||||
40
.github/workflows/cli-test.yml
vendored
40
.github/workflows/cli-test.yml
vendored
@@ -109,3 +109,43 @@ jobs:
|
||||
if: matrix.os == 'windows-latest'
|
||||
run: |
|
||||
npm run test:cli-integration
|
||||
|
||||
report-failure:
|
||||
name: Report failure on the default branch
|
||||
runs-on: ubuntu-latest
|
||||
needs: [cli-test]
|
||||
if: failure() && github.ref == 'refs/heads/main'
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
steps:
|
||||
- name: Create GitHub issue
|
||||
run: |
|
||||
# Set -eu so that we fail if the gh command fails.
|
||||
set -eu
|
||||
|
||||
# Try to find an existing open issue if there is one
|
||||
ISSUE="$(gh issue list --repo "$GITHUB_REPOSITORY" --label "cli-test-failure" --state "open" --limit 1 --json number -q '.[0].number')"
|
||||
|
||||
if [[ -n "$ISSUE" ]]; then
|
||||
echo "Found open issue number $ISSUE ($GITHUB_SERVER_URL/$GITHUB_REPOSITORY/issues/$ISSUE)"
|
||||
else
|
||||
echo "Did not find an open tracking issue. Creating one."
|
||||
|
||||
ISSUE_BODY="issue-body.md"
|
||||
printf "CLI tests have failed on the default branch.\n\n@github/code-scanning-secexp-reviewers" > "$ISSUE_BODY"
|
||||
|
||||
ISSUE="$(gh issue create --repo "$GITHUB_REPOSITORY" --label "cli-test-failure" --title "CLI test failure" --body-file "$ISSUE_BODY")"
|
||||
# `gh issue create` returns the full issue URL, not just the number.
|
||||
echo "Created issue with URL $ISSUE"
|
||||
fi
|
||||
|
||||
COMMENT_FILE="comment.md"
|
||||
RUN_URL=$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID
|
||||
printf 'CLI test [%s](%s) failed on ref `%s`' "$GITHUB_RUN_ID" "$RUN_URL" "$GITHUB_REF" > "$COMMENT_FILE"
|
||||
|
||||
# `gh issue create` returns an issue URL, and `gh issue list | cut -f 1` returns an issue number.
|
||||
# Both are accepted here.
|
||||
gh issue comment "$ISSUE" --repo "$GITHUB_REPOSITORY" --body-file "$COMMENT_FILE"
|
||||
|
||||
2
.github/workflows/main.yml
vendored
2
.github/workflows/main.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
cp dist/*.vsix artifacts
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
with:
|
||||
name: vscode-codeql-extension
|
||||
|
||||
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@@ -54,13 +54,13 @@ jobs:
|
||||
echo "ref_name=$REF_NAME" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: vscode-codeql-extension
|
||||
path: artifacts
|
||||
|
||||
- name: Upload source maps
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: vscode-codeql-sourcemaps
|
||||
path: dist/vscode-codeql/out/*.map
|
||||
@@ -128,7 +128,7 @@ jobs:
|
||||
VSCE_TOKEN: ${{ secrets.VSCE_TOKEN }}
|
||||
steps:
|
||||
- name: Download artifact
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: vscode-codeql-extension
|
||||
|
||||
@@ -145,7 +145,7 @@ jobs:
|
||||
OPEN_VSX_TOKEN: ${{ secrets.OPEN_VSX_TOKEN }}
|
||||
steps:
|
||||
- name: Download artifact
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: vscode-codeql-extension
|
||||
|
||||
|
||||
58
.github/workflows/update-node-version.yml
vendored
Normal file
58
.github/workflows/update-node-version.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: Update Node version
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '15 12 * * *' # At 12:15 PM UTC every day
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
create-pr:
|
||||
name: Create PR
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: extensions/ql-vscode/.nvmrc
|
||||
cache: 'npm'
|
||||
cache-dependency-path: extensions/ql-vscode/package-lock.json
|
||||
- name: Install dependencies
|
||||
working-directory: extensions/ql-vscode
|
||||
run: |
|
||||
npm ci
|
||||
shell: bash
|
||||
- name: Get current Node version
|
||||
working-directory: extensions/ql-vscode
|
||||
id: get-current-node-version
|
||||
run: |
|
||||
echo "version=$(cat .nvmrc)" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
- name: Update Node version
|
||||
working-directory: extensions/ql-vscode
|
||||
run: |
|
||||
npx ts-node scripts/update-node-version.ts
|
||||
shell: bash
|
||||
- name: Get current Node version
|
||||
working-directory: extensions/ql-vscode
|
||||
id: get-new-node-version
|
||||
run: |
|
||||
echo "version=$(cat .nvmrc)" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
- name: Commit, Push and Open a PR
|
||||
uses: ./.github/actions/create-pr
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
base-branch: main
|
||||
head-branch: github-action/bump-node-version
|
||||
commit-message: Bump Node version to ${{ steps.get-new-node-version.outputs.version }}
|
||||
title: Bump Node version to ${{ steps.get-new-node-version.outputs.version }}
|
||||
body: >
|
||||
The Node version used in the latest version of VS Code has been updated. This PR updates the Node version
|
||||
used for integration tests to match.
|
||||
|
||||
The previous Node version was ${{ steps.get-current-node-version.outputs.version }}. This PR updates the
|
||||
Node version to ${{ steps.get-new-node-version.outputs.version }}.
|
||||
@@ -1,4 +1,7 @@
|
||||
{
|
||||
"ul-style": {
|
||||
"style": "dash"
|
||||
},
|
||||
"MD013": false,
|
||||
"MD041": false
|
||||
}
|
||||
|
||||
@@ -14,21 +14,21 @@ appearance, race, religion, or sexual identity and orientation.
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
- Using welcoming and inclusive language
|
||||
- Being respectful of differing viewpoints and experiences
|
||||
- Gracefully accepting constructive criticism
|
||||
- Focusing on what is best for the community
|
||||
- Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
- The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
- Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
@@ -55,7 +55,7 @@ a project may be further defined and clarified by project maintainers.
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at opensource@github.com. All
|
||||
reported by contacting the project team at <opensource@github.com>. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
|
||||
@@ -22,12 +22,12 @@ Please note that this project is released with a [Contributor Code of Conduct][c
|
||||
|
||||
Here are a few things you can do that will increase the likelihood of your pull request being accepted:
|
||||
|
||||
* Follow the [style guide][style].
|
||||
* Write tests:
|
||||
* [Tests that don't require the VS Code API are located here](extensions/ql-vscode/test).
|
||||
* [Integration tests that do require the VS Code API are located here](extensions/ql-vscode/src/vscode-tests).
|
||||
* Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
|
||||
* Write a [good commit message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
|
||||
- Follow the [style guide][style].
|
||||
- Write tests:
|
||||
- [Tests that don't require the VS Code API are located here](extensions/ql-vscode/test).
|
||||
- [Integration tests that do require the VS Code API are located here](extensions/ql-vscode/src/vscode-tests).
|
||||
- Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
|
||||
- Write a [good commit message](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
|
||||
|
||||
## Setting up a local build
|
||||
|
||||
@@ -99,6 +99,6 @@ More information about Storybook can be found inside the **Overview** page once
|
||||
|
||||
## Resources
|
||||
|
||||
* [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
||||
* [Using Pull Requests](https://help.github.com/articles/about-pull-requests/)
|
||||
* [GitHub Help](https://help.github.com)
|
||||
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
||||
- [Using Pull Requests](https://help.github.com/articles/about-pull-requests/)
|
||||
- [GitHub Help](https://help.github.com)
|
||||
|
||||
16
README.md
16
README.md
@@ -6,16 +6,16 @@ The extension is released. You can download it from the [Visual Studio Marketpla
|
||||
|
||||
To see what has changed in the last few versions of the extension, see the [Changelog](https://github.com/github/vscode-codeql/blob/main/extensions/ql-vscode/CHANGELOG.md).
|
||||
|
||||
[](https://github.com/github/vscode-codeql/actions?query=workflow%3A%22Build+Extension%22+branch%3Amaster)
|
||||
[](https://github.com/github/vscode-codeql/actions?query=workflow%3A%22Build+Extension%22+branch%3Amain)
|
||||
[](https://marketplace.visualstudio.com/items?itemName=github.vscode-codeql)
|
||||
|
||||
## Features
|
||||
|
||||
* Enables you to use CodeQL to query databases and discover problems in codebases.
|
||||
* Shows the flow of data through the results of path queries, which is essential for triaging security results.
|
||||
* Provides an easy way to run queries from the large, open source repository of [CodeQL security queries](https://github.com/github/codeql).
|
||||
* Adds IntelliSense to support you writing and editing your own CodeQL query and library files.
|
||||
* Supports you running CodeQL queries against thousands of repositories on GitHub using multi-repository variant analysis.
|
||||
- Enables you to use CodeQL to query databases and discover problems in codebases.
|
||||
- Shows the flow of data through the results of path queries, which is essential for triaging security results.
|
||||
- Provides an easy way to run queries from the large, open source repository of [CodeQL security queries](https://github.com/github/codeql).
|
||||
- Adds IntelliSense to support you writing and editing your own CodeQL query and library files.
|
||||
- Supports you running CodeQL queries against thousands of repositories on GitHub using multi-repository variant analysis.
|
||||
|
||||
## Project goals and scope
|
||||
|
||||
@@ -25,8 +25,8 @@ This project will track new feature development in CodeQL and, whenever appropri
|
||||
|
||||
This extension depends on the following two extensions for required functionality. They will be installed automatically when you install VS Code CodeQL.
|
||||
|
||||
* [Test Adapter Converter](https://marketplace.visualstudio.com/items?itemName=ms-vscode.test-adapter-converter)
|
||||
* [Test Explorer UI](https://marketplace.visualstudio.com/items?itemName=hbenl.vscode-test-explorer)
|
||||
- [Test Adapter Converter](https://marketplace.visualstudio.com/items?itemName=ms-vscode.test-adapter-converter)
|
||||
- [Test Explorer UI](https://marketplace.visualstudio.com/items?itemName=hbenl.vscode-test-explorer)
|
||||
|
||||
## Contributing
|
||||
|
||||
|
||||
BIN
docs/images/about-vscode-chromium.png
Normal file
BIN
docs/images/about-vscode-chromium.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 86 KiB |
BIN
docs/images/electron-chromium-version.png
Normal file
BIN
docs/images/electron-chromium-version.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 10 KiB |
BIN
docs/images/electron-version.png
Normal file
BIN
docs/images/electron-version.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 10 KiB |
BIN
docs/images/github-database-download-prompt.png
Normal file
BIN
docs/images/github-database-download-prompt.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 19 KiB |
@@ -7,24 +7,20 @@ We should make sure the CodeQL for VS Code extension works with the Node.js vers
|
||||
|
||||
## Checking the version of Node.js supplied by VS Code
|
||||
|
||||
You can find this info by seleting "About Visual Studio Code" from the top menu.
|
||||
You can find this info by selecting "About Visual Studio Code" from the top menu.
|
||||
|
||||

|
||||
|
||||
## Updating the Node.js version
|
||||
|
||||
The following files will need to be updated:
|
||||
To update the Node.js version, run:
|
||||
|
||||
- `extensions/ql-vscode/.nvmrc` - this will enable nvm to automatically switch to the correct Node
|
||||
version when you're in the project folder. It will also change the Node version the GitHub Actions
|
||||
workflows use.
|
||||
- `extensions/ql-vscode/package.json` - the "engines.node: '[VERSION]'" setting
|
||||
- `extensions/ql-vscode/package.json` - the "@types/node: '[VERSION]'" dependency
|
||||
|
||||
Then run `npm install` to update the `extensions/ql-vscode/package-lock.json` file.
|
||||
```bash
|
||||
npx ts-node scripts/update-node-version.ts
|
||||
```
|
||||
|
||||
## Node.js version used in tests
|
||||
|
||||
Unit tests will use whatever version of Node.js is installed locally. In CI this will be the version specified in the workflow.
|
||||
|
||||
Integration tests download a copy of VS Code and then will use whatever version of Node.js is provided by VS Code. Our integration tests are currently pinned to an older version of VS Code. See [VS Code version used in tests](./vscode-version.md#vs-code-version-used-in-tests) for more information.
|
||||
Integration tests download a copy of VS Code and then will use whatever version of Node.js is provided by VS Code. See [VS Code version used in tests](./vscode-version.md#vs-code-version-used-in-tests) for more information.
|
||||
|
||||
@@ -1,33 +1,33 @@
|
||||
# Releasing (write access required)
|
||||
|
||||
1. Determine the new version number. We default to increasing the patch version number, but make our own judgement about whether a change is big enough to warrant a minor version bump. Common reasons for a minor bump could include:
|
||||
* Making substantial new features available to all users. This can include lifting a feature flag.
|
||||
* Breakage in compatibility with recent versions of the CLI.
|
||||
* Minimum required version of VS Code is increased.
|
||||
* New telemetry events are added.
|
||||
* Deprecation or removal of commands.
|
||||
* Accumulation of many changes, none of which are individually big enough to warrant a minor bump, but which together are. This does not include changes which are purely internal to the extension, such as refactoring, or which are only available behind a feature flag.
|
||||
- Making substantial new features available to all users. This can include lifting a feature flag.
|
||||
- Breakage in compatibility with recent versions of the CLI.
|
||||
- Minimum required version of VS Code is increased.
|
||||
- New telemetry events are added.
|
||||
- Deprecation or removal of commands.
|
||||
- Accumulation of many changes, none of which are individually big enough to warrant a minor bump, but which together are. This does not include changes which are purely internal to the extension, such as refactoring, or which are only available behind a feature flag.
|
||||
1. Create a release branch named after the new version (e.g. `v1.3.6`):
|
||||
* For a regular scheduled release this branch will be based on latest `main`.
|
||||
* Make sure your local copy of `main` is up to date so you are including all changes.
|
||||
* To do a minimal bug-fix release, base the release branch on the tag from the most recent release and then add only the changes you want to release.
|
||||
* Choose this option if you want to release a specific set of changes (e.g. a bug fix) and don't want to incur extra risk by including other changes that have been merged to the `main` branch.
|
||||
- For a regular scheduled release this branch will be based on latest `main`.
|
||||
- Make sure your local copy of `main` is up to date so you are including all changes.
|
||||
- To do a minimal bug-fix release, base the release branch on the tag from the most recent release and then add only the changes you want to release.
|
||||
- Choose this option if you want to release a specific set of changes (e.g. a bug fix) and don't want to incur extra risk by including other changes that have been merged to the `main` branch.
|
||||
|
||||
```bash
|
||||
git checkout -b <new_release_branch> <previous_release_tag>
|
||||
```
|
||||
|
||||
1. Run the ["Run CLI tests" workflow](https://github.com/github/vscode-codeql/actions/workflows/cli-test.yml) and make sure the tests are green.
|
||||
* You can skip this step if you are releasing from `main` and there were no merges since the most recent daily scheduled run of this workflow.
|
||||
- You can skip this step if you are releasing from `main` and there were no merges since the most recent daily scheduled run of this workflow.
|
||||
1. Double-check the `CHANGELOG.md` contains all desired change comments and has the version to be released with date at the top.
|
||||
* Go through PRs that have been merged since the previous release and make sure they are properly accounted for.
|
||||
* Make sure all changelog entries have links back to their PR(s) if appropriate.
|
||||
- Go through PRs that have been merged since the previous release and make sure they are properly accounted for.
|
||||
- Make sure all changelog entries have links back to their PR(s) if appropriate.
|
||||
1. Double-check that the extension `package.json` and `package-lock.json` have the version you intend to release. If you are doing a patch release (as opposed to minor or major version) this should already be correct.
|
||||
1. Commit any changes made during steps 4 and 5 with a commit message the same as the branch name (e.g. `v1.3.6`).
|
||||
1. Open a PR for this release.
|
||||
* The PR diff should contain:
|
||||
* Any missing bits from steps 4 and 5. Most of the time, this will just be updating `CHANGELOG.md` with today's date.
|
||||
* If releasing from a branch other than `main`, this PR will also contain the extension changes being released.
|
||||
- The PR diff should contain:
|
||||
- Any missing bits from steps 4 and 5. Most of the time, this will just be updating `CHANGELOG.md` with today's date.
|
||||
- If releasing from a branch other than `main`, this PR will also contain the extension changes being released.
|
||||
1. Build the extension using `npm run build` and install it on your VS Code using "Install from VSIX".
|
||||
1. Go through [our test plan](./test-plan.md) to ensure that the extension is working as expected.
|
||||
1. Create a new tag on the release branch with your new version (named after the release), e.g.
|
||||
@@ -37,8 +37,8 @@
|
||||
```
|
||||
|
||||
1. Merge the release PR into `main`.
|
||||
* If there are conflicts in the changelog, make sure to place any new changelog entries at the top, above the section for the current release, as these new entries are not part of the current release and should be placed in the "unreleased" section.
|
||||
* The release PR must be merged before pushing the tag to ensure that we always release a commit that is present on the `main` branch. It's not required that the commit is the head of the `main` branch, but there should be no chance of a future release accidentally not including changes from this release.
|
||||
- If there are conflicts in the changelog, make sure to place any new changelog entries at the top, above the section for the current release, as these new entries are not part of the current release and should be placed in the "unreleased" section.
|
||||
- The release PR must be merged before pushing the tag to ensure that we always release a commit that is present on the `main` branch. It's not required that the commit is the head of the `main` branch, but there should be no chance of a future release accidentally not including changes from this release.
|
||||
1. Push the new tag up:
|
||||
|
||||
```bash
|
||||
@@ -46,13 +46,13 @@
|
||||
```
|
||||
|
||||
1. Find the [Release](https://github.com/github/vscode-codeql/actions?query=workflow%3ARelease) workflow run that was just triggered by pushing the tag, and monitor the status of the release build.
|
||||
* DO NOT approve the "publish" stages of the workflow yet.
|
||||
- DO NOT approve the "publish" stages of the workflow yet.
|
||||
1. Download the VSIX from the draft GitHub release at the top of [the releases page](https://github.com/github/vscode-codeql/releases) that is created when the release build finishes.
|
||||
1. Unzip the `.vsix` and inspect its `package.json` to make sure the version is what you expect,
|
||||
or look at the source if there's any doubt the right code is being shipped.
|
||||
1. Install the `.vsix` file into your vscode IDE and ensure the extension can load properly. Run a single command (like run query, or add database).
|
||||
1. Approve the deployments of the [Release](https://github.com/github/vscode-codeql/actions?query=workflow%3ARelease) workflow run. This will automatically publish to Open VSX and VS Code Marketplace.
|
||||
* If there is an authentication failure when publishing, be sure to check that the authentication keys haven't expired. See below.
|
||||
- If there is an authentication failure when publishing, be sure to check that the authentication keys haven't expired. See below.
|
||||
1. Go to the draft GitHub release in [the releases page](https://github.com/github/vscode-codeql/releases), click 'Edit', add some summary description, and publish it.
|
||||
1. Confirm the new release is marked as the latest release.
|
||||
1. If documentation changes need to be published, notify documentation team that release has been made.
|
||||
|
||||
@@ -185,6 +185,24 @@ Note that this test requires the feature flag: `codeQL.model.flowGeneration`
|
||||
2. Click "Generate".
|
||||
- Check that rows are filled out.
|
||||
|
||||
### GitHub database download
|
||||
|
||||
#### Test case 1: Download a database
|
||||
|
||||
Open a clone of the [`github/codeql`](https://github.com/github/codeql) repository as a folder.
|
||||
|
||||
1. Wait a few seconds until the CodeQL extension is fully initialized.
|
||||
- Check that the following prompt appears:
|
||||
|
||||

|
||||
|
||||
- If the prompt does not appear, ensure that the `codeQL.githubDatabase.download` setting is not set in workspace or user settings.
|
||||
|
||||
2. Click "Download".
|
||||
3. Select the "C#" and "JavaScript" databases.
|
||||
- Check that there are separate notifications for both downloads.
|
||||
- Check that both databases are added when the downloads are complete.
|
||||
|
||||
### General
|
||||
|
||||
#### Test case 1: Change to a different colour theme
|
||||
|
||||
@@ -2,14 +2,14 @@
|
||||
|
||||
We have several types of tests:
|
||||
|
||||
* Unit tests: these live in the `tests/unit-tests/` directory
|
||||
* View tests: these live in `src/view/variant-analysis/__tests__/`
|
||||
* VSCode integration tests:
|
||||
* `test/vscode-tests/activated-extension` tests: These are intended to cover functionality that require the full extension to be activated but don't require the CLI. This suite is not run against multiple versions of the CLI in CI.
|
||||
* `test/vscode-tests/no-workspace` tests: These are intended to cover functionality around not having a workspace. The extension is not activated in these tests.
|
||||
* `test/vscode-tests/minimal-workspace` tests: These are intended to cover functionality that need a workspace but don't require the full extension to be activated.
|
||||
* CLI integration tests: these live in `test/vscode-tests/cli-integration`
|
||||
* These tests are intended to cover functionality that is related to the integration between the CodeQL CLI and the extension. These tests are run against each supported versions of the CLI in CI.
|
||||
- Unit tests: these live in the `tests/unit-tests/` directory
|
||||
- View tests: these live in `src/view/variant-analysis/__tests__/`
|
||||
- VSCode integration tests:
|
||||
- `test/vscode-tests/activated-extension` tests: These are intended to cover functionality that require the full extension to be activated but don't require the CLI. This suite is not run against multiple versions of the CLI in CI.
|
||||
- `test/vscode-tests/no-workspace` tests: These are intended to cover functionality around not having a workspace. The extension is not activated in these tests.
|
||||
- `test/vscode-tests/minimal-workspace` tests: These are intended to cover functionality that need a workspace but don't require the full extension to be activated.
|
||||
- CLI integration tests: these live in `test/vscode-tests/cli-integration`
|
||||
- These tests are intended to cover functionality that is related to the integration between the CodeQL CLI and the extension. These tests are run against each supported versions of the CLI in CI.
|
||||
|
||||
The CLI integration tests require an instance of the CodeQL CLI to run so they will require some extra setup steps. When adding new tests to our test suite, please be mindful of whether they need to be in the cli-integration folder. If the tests don't depend on the CLI, they are better suited to being a VSCode integration test.
|
||||
|
||||
@@ -26,9 +26,9 @@ Pre-requisites:
|
||||
|
||||
Then, from the `extensions/ql-vscode` directory, use the appropriate command to run the tests:
|
||||
|
||||
* Unit tests: `npm run test:unit`
|
||||
* View Tests: `npm run test:view`
|
||||
* VSCode integration tests: `npm run test:vscode-integration`
|
||||
- Unit tests: `npm run test:unit`
|
||||
- View Tests: `npm run test:view`
|
||||
- VSCode integration tests: `npm run test:vscode-integration`
|
||||
|
||||
#### Running CLI integration tests from the terminal
|
||||
|
||||
@@ -48,9 +48,9 @@ Alternatively, you can run the tests inside of VSCode. There are several VSCode
|
||||
|
||||
You will need to run tests using a task from inside of VS Code, under the "Run and Debug" view:
|
||||
|
||||
* Unit tests: run the _Launch Unit Tests_ task
|
||||
* View Tests: run the _Launch Unit Tests - React_ task
|
||||
* VSCode integration tests: run the _Launch Unit Tests - No Workspace_ and _Launch Unit Tests - Minimal Workspace_ tasks
|
||||
- Unit tests: run the _Launch Unit Tests_ task
|
||||
- View Tests: run the _Launch Unit Tests - React_ task
|
||||
- VSCode integration tests: run the _Launch Unit Tests - No Workspace_ and _Launch Unit Tests - Minimal Workspace_ tasks
|
||||
|
||||
#### Running CLI integration tests from VSCode
|
||||
|
||||
|
||||
@@ -24,10 +24,18 @@ Also consider what percentage of our users are using each VS Code version. This
|
||||
|
||||
## How to update the VS Code version
|
||||
|
||||
To provide a good experience to users, it is recommented to update the `MIN_VERSION` in `extension.ts` first and release, and then update the `vscode` version in `package.json` and release again. By stagging this update across two releases it gives users on older VS Code versions a chance to upgrade before it silently refuses to upgrade them.
|
||||
To provide a good experience to users, it is recommented to update the `MIN_VERSION` in `extension.ts` first and release, and then update the `vscode` version in `package.json` and release again.
|
||||
By staggering this update across two releases it gives users on older VS Code versions a chance to upgrade before it silently refuses to upgrade them.
|
||||
|
||||
After updating the minimum version in `package.json`, make sure to also run the following command to update any generated
|
||||
files dependent on this version:
|
||||
|
||||
```bash
|
||||
npm run generate
|
||||
```
|
||||
|
||||
## VS Code version used in tests
|
||||
|
||||
Our integration tests are currently pinned to use an older version of VS Code due to <https://github.com/github/vscode-codeql/issues/2402>.
|
||||
This version is specified in [`jest-runner-vscode.config.base.js`](https://github.com/github/vscode-codeql/blob/d93f2b67c84e79737b0ce4bb74e31558b5f5166e/extensions/ql-vscode/test/vscode-tests/jest-runner-vscode.config.base.js#L17).
|
||||
Until this is resolved this will limit us updating our minimum supported version of VS Code.
|
||||
The integration tests use the latest stable version of VS Code. This is specified in
|
||||
the [`test/vscode-tests/jest-runner-vscode.config.base.js`](https://github.com/github/vscode-codeql/blob/main/extensions/ql-vscode/test/vscode-tests/jest-runner-vscode.config.base.js#L15)
|
||||
file. This shouldn't need to be updated unless there is a breaking change in VS Code that prevents the tests from running.
|
||||
|
||||
@@ -21,16 +21,16 @@ const baseConfig = {
|
||||
},
|
||||
extends: [
|
||||
"eslint:recommended",
|
||||
"plugin:github/react",
|
||||
"plugin:github/recommended",
|
||||
"plugin:github/typescript",
|
||||
"plugin:jest-dom/recommended",
|
||||
"plugin:prettier/recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:import/recommended",
|
||||
"plugin:import/typescript",
|
||||
],
|
||||
rules: {
|
||||
"@typescript-eslint/await-thenable": "error",
|
||||
"@typescript-eslint/no-use-before-define": 0,
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"warn",
|
||||
{
|
||||
@@ -39,40 +39,37 @@ const baseConfig = {
|
||||
ignoreRestSiblings: false,
|
||||
},
|
||||
],
|
||||
"@typescript-eslint/explicit-function-return-type": "off",
|
||||
"@typescript-eslint/explicit-module-boundary-types": "off",
|
||||
"@typescript-eslint/no-non-null-assertion": "off",
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/no-floating-promises": ["error", { ignoreVoid: true }],
|
||||
"@typescript-eslint/no-invalid-this": "off",
|
||||
"@typescript-eslint/no-shadow": "off",
|
||||
"prefer-const": ["warn", { destructuring: "all" }],
|
||||
"@typescript-eslint/no-throw-literal": "error",
|
||||
"no-useless-escape": 0,
|
||||
camelcase: "off",
|
||||
"@typescript-eslint/consistent-type-imports": "error",
|
||||
"import/consistent-type-specifier-style": ["error", "prefer-top-level"],
|
||||
curly: ["error", "all"],
|
||||
"escompat/no-regexp-lookbehind": "off",
|
||||
"etc/no-implicit-any-catch": "error",
|
||||
"filenames/match-regex": "off",
|
||||
"filenames/match-regexp": "off",
|
||||
"func-style": "off",
|
||||
"i18n-text/no-en": "off",
|
||||
"import/named": "off",
|
||||
"import/no-dynamic-require": "off",
|
||||
"import/no-dynamic-required": "off",
|
||||
"import/no-anonymous-default-export": "off",
|
||||
"import/no-commonjs": "off",
|
||||
"import/no-mutable-exports": "off",
|
||||
"import/no-namespace": "off",
|
||||
"import/no-unresolved": "off",
|
||||
"import/no-webpack-loader-syntax": "off",
|
||||
"no-invalid-this": "off",
|
||||
"no-fallthrough": "off",
|
||||
"no-console": "off",
|
||||
"no-shadow": "off",
|
||||
"github/array-foreach": "off",
|
||||
"github/no-then": "off",
|
||||
"react/jsx-key": ["error", { checkFragmentShorthand: true }],
|
||||
"import/no-cycle": "error",
|
||||
// Never allow extensions in import paths, except for JSON files where they are required.
|
||||
"import/extensions": ["error", "never", { json: "always" }],
|
||||
},
|
||||
settings: {
|
||||
"import/resolver": {
|
||||
typescript: true,
|
||||
node: true,
|
||||
},
|
||||
"import/extensions": [".js", ".jsx", ".ts", ".tsx", ".json"],
|
||||
// vscode and sarif don't exist on-disk, but only provide types.
|
||||
"import/core-modules": ["vscode", "sarif"],
|
||||
},
|
||||
};
|
||||
|
||||
@@ -88,8 +85,10 @@ module.exports = {
|
||||
extends: [
|
||||
...baseConfig.extends,
|
||||
"plugin:react/recommended",
|
||||
"plugin:react/jsx-runtime",
|
||||
"plugin:react-hooks/recommended",
|
||||
"plugin:storybook/recommended",
|
||||
"plugin:github/react",
|
||||
],
|
||||
rules: {
|
||||
...baseConfig.rules,
|
||||
@@ -108,7 +107,9 @@ module.exports = {
|
||||
extends: [
|
||||
...baseConfig.extends,
|
||||
"plugin:react/recommended",
|
||||
"plugin:react/jsx-runtime",
|
||||
"plugin:react-hooks/recommended",
|
||||
"plugin:github/react",
|
||||
],
|
||||
rules: {
|
||||
...baseConfig.rules,
|
||||
@@ -138,6 +139,8 @@ module.exports = {
|
||||
},
|
||||
rules: {
|
||||
...baseConfig.rules,
|
||||
// We want to allow mocking of functions in modules, so we need to allow namespace imports.
|
||||
"import/no-namespace": "off",
|
||||
"@typescript-eslint/ban-types": [
|
||||
"error",
|
||||
{
|
||||
@@ -172,5 +175,17 @@ module.exports = {
|
||||
"@typescript-eslint/no-var-requires": "off",
|
||||
},
|
||||
},
|
||||
{
|
||||
files: [".storybook/**/*.tsx"],
|
||||
parserOptions: {
|
||||
project: resolve(__dirname, ".storybook/tsconfig.json"),
|
||||
},
|
||||
rules: {
|
||||
...baseConfig.rules,
|
||||
// Storybook doesn't use the automatic JSX runtime in the addon yet, so we need to allow
|
||||
// `React` to be imported.
|
||||
"import/no-namespace": ["error", { ignore: ["react"] }],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Preview } from "@storybook/react";
|
||||
import type { Preview } from "@storybook/react";
|
||||
import { themes } from "@storybook/theming";
|
||||
import { action } from "@storybook/addon-actions";
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as React from "react";
|
||||
import { FunctionComponent, useCallback } from "react";
|
||||
import type { FunctionComponent } from "react";
|
||||
import { useCallback } from "react";
|
||||
|
||||
import { useGlobals } from "@storybook/manager-api";
|
||||
import {
|
||||
|
||||
@@ -8,27 +8,27 @@ import { VSCodeTheme } from "./theme";
|
||||
|
||||
const themeFiles: { [key in VSCodeTheme]: string } = {
|
||||
[VSCodeTheme.Dark]:
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
|
||||
require("!file-loader?modules!../../src/stories/vscode-theme-dark.css")
|
||||
.default,
|
||||
[VSCodeTheme.Light]:
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
|
||||
require("!file-loader?modules!../../src/stories/vscode-theme-light.css")
|
||||
.default,
|
||||
[VSCodeTheme.LightHighContrast]:
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
|
||||
require("!file-loader?modules!../../src/stories/vscode-theme-light-high-contrast.css")
|
||||
.default,
|
||||
[VSCodeTheme.DarkHighContrast]:
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
|
||||
require("!file-loader?modules!../../src/stories/vscode-theme-dark-high-contrast.css")
|
||||
.default,
|
||||
[VSCodeTheme.GitHubLightDefault]:
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
|
||||
require("!file-loader?modules!../../src/stories/vscode-theme-github-light-default.css")
|
||||
.default,
|
||||
[VSCodeTheme.GitHubDarkDefault]:
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs,import/no-webpack-loader-syntax
|
||||
require("!file-loader?modules!../../src/stories/vscode-theme-github-dark-default.css")
|
||||
.default,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,14 @@
|
||||
# CodeQL for Visual Studio Code: Changelog
|
||||
|
||||
## 1.12.0 - 11 January 2024
|
||||
|
||||
- Add a prompt for downloading a GitHub database when opening a GitHub repository. [#3138](https://github.com/github/vscode-codeql/pull/3138)
|
||||
- Avoid showing a popup when hovering over source elements in database source files. [#3125](https://github.com/github/vscode-codeql/pull/3125)
|
||||
- Add comparison of alerts when comparing query results. This allows viewing path explanations for differences in alerts. [#3113](https://github.com/github/vscode-codeql/pull/3113)
|
||||
- Fix a bug where the CodeQL CLI and variant analysis results were corrupted after extraction in VS Code Insiders. [#3151](https://github.com/github/vscode-codeql/pull/3151) & [#3152](https://github.com/github/vscode-codeql/pull/3152)
|
||||
- Show progress when extracting the CodeQL CLI distribution during installation. [#3157](https://github.com/github/vscode-codeql/pull/3157)
|
||||
- Add option to cancel opening the model editor. [#3189](https://github.com/github/vscode-codeql/pull/3189)
|
||||
|
||||
## 1.11.0 - 13 December 2023
|
||||
|
||||
- Add a new method modeling panel to classify methods as sources/sinks/summaries while in the context of the source code. [#3128](https://github.com/github/vscode-codeql/pull/3128)
|
||||
@@ -17,7 +26,7 @@
|
||||
- Add new CodeQL views for managing databases and queries:
|
||||
1. A queries panel that shows all queries in your workspace. It allows you to view, create, and run queries in one place.
|
||||
2. A language selector, which allows you to quickly filter databases and queries by language.
|
||||
|
||||
|
||||
For more information, see the [documentation](https://codeql.github.com/docs/codeql-for-visual-studio-code/analyzing-your-projects/#filtering-databases-and-queries-by-language).
|
||||
- When adding a CodeQL database, we no longer add the database source folder to the workspace by default (since this caused bugs in single-folder workspaces). [#3047](https://github.com/github/vscode-codeql/pull/3047)
|
||||
- You can manually add individual database source folders to the workspace with the "Add Database Source to Workspace" right-click command in the databases view.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { src, dest } from "gulp";
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs
|
||||
const replace = require("gulp-replace");
|
||||
|
||||
/** Inject the application insights key into the telemetry file */
|
||||
|
||||
4
extensions/ql-vscode/gulpfile.ts/chromium-version.json
Normal file
4
extensions/ql-vscode/gulpfile.ts/chromium-version.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"chromiumVersion": "114",
|
||||
"electronVersion": "25.8.0"
|
||||
}
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
} from "fs-extra";
|
||||
import { resolve, join } from "path";
|
||||
import { isDevBuild } from "./dev";
|
||||
import type * as packageJsonType from "../package.json";
|
||||
import type packageJsonType from "../package.json";
|
||||
|
||||
export interface DeployedPackage {
|
||||
distPath: string;
|
||||
|
||||
@@ -8,9 +8,14 @@ import {
|
||||
copyWasmFiles,
|
||||
} from "./typescript";
|
||||
import { compileTextMateGrammar } from "./textmate";
|
||||
import { compileView, watchView } from "./webpack";
|
||||
import { packageExtension } from "./package";
|
||||
import { injectAppInsightsKey } from "./appInsights";
|
||||
import {
|
||||
checkViewTypeScript,
|
||||
compileViewEsbuild,
|
||||
watchViewCheckTypeScript,
|
||||
watchViewEsbuild,
|
||||
} from "./view";
|
||||
|
||||
export const buildWithoutPackage = series(
|
||||
cleanOutput,
|
||||
@@ -19,23 +24,33 @@ export const buildWithoutPackage = series(
|
||||
copyWasmFiles,
|
||||
checkTypeScript,
|
||||
compileTextMateGrammar,
|
||||
compileView,
|
||||
compileViewEsbuild,
|
||||
checkViewTypeScript,
|
||||
),
|
||||
);
|
||||
|
||||
export const watch = parallel(watchEsbuild, watchCheckTypeScript, watchView);
|
||||
export const watch = parallel(
|
||||
// Always build first, so that we don't have to run build manually
|
||||
compileEsbuild,
|
||||
compileViewEsbuild,
|
||||
watchEsbuild,
|
||||
watchCheckTypeScript,
|
||||
watchViewEsbuild,
|
||||
watchViewCheckTypeScript,
|
||||
);
|
||||
|
||||
export {
|
||||
cleanOutput,
|
||||
compileTextMateGrammar,
|
||||
watchEsbuild,
|
||||
watchCheckTypeScript,
|
||||
watchView,
|
||||
watchViewEsbuild,
|
||||
compileEsbuild,
|
||||
copyWasmFiles,
|
||||
checkTypeScript,
|
||||
injectAppInsightsKey,
|
||||
compileView,
|
||||
compileViewEsbuild,
|
||||
checkViewTypeScript,
|
||||
};
|
||||
export default series(
|
||||
buildWithoutPackage,
|
||||
|
||||
@@ -2,7 +2,7 @@ import { dest, src } from "gulp";
|
||||
import { load } from "js-yaml";
|
||||
import { obj } from "through2";
|
||||
import PluginError from "plugin-error";
|
||||
import * as Vinyl from "vinyl";
|
||||
import type Vinyl from "vinyl";
|
||||
|
||||
/**
|
||||
* Replaces all rule references with the match pattern of the referenced rule.
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { gray, red } from "ansi-colors";
|
||||
import { dest, src, watch } from "gulp";
|
||||
import esbuild from "gulp-esbuild";
|
||||
import ts from "gulp-typescript";
|
||||
import type { reporter } from "gulp-typescript";
|
||||
import { createProject } from "gulp-typescript";
|
||||
import del from "del";
|
||||
|
||||
function goodReporter(): ts.reporter.Reporter {
|
||||
export function goodReporter(): reporter.Reporter {
|
||||
return {
|
||||
error: (error, typescript) => {
|
||||
if (error.tsFile) {
|
||||
@@ -27,7 +28,7 @@ function goodReporter(): ts.reporter.Reporter {
|
||||
};
|
||||
}
|
||||
|
||||
const tsProject = ts.createProject("tsconfig.json");
|
||||
const tsProject = createProject("tsconfig.json");
|
||||
|
||||
export function cleanOutput() {
|
||||
return tsProject.projectDirectory
|
||||
@@ -56,7 +57,7 @@ export function compileEsbuild() {
|
||||
}
|
||||
|
||||
export function watchEsbuild() {
|
||||
watch("src/**/*.ts", compileEsbuild);
|
||||
watch(["src/**/*.ts", "!src/view/**/*.ts"], compileEsbuild);
|
||||
}
|
||||
|
||||
export function checkTypeScript() {
|
||||
@@ -66,7 +67,7 @@ export function checkTypeScript() {
|
||||
}
|
||||
|
||||
export function watchCheckTypeScript() {
|
||||
watch("src/**/*.ts", checkTypeScript);
|
||||
watch(["src/**/*.ts", "!src/view/**/*.ts"], checkTypeScript);
|
||||
}
|
||||
|
||||
export function copyWasmFiles() {
|
||||
|
||||
42
extensions/ql-vscode/gulpfile.ts/view.ts
Normal file
42
extensions/ql-vscode/gulpfile.ts/view.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { dest, src, watch } from "gulp";
|
||||
import esbuild from "gulp-esbuild";
|
||||
import { createProject } from "gulp-typescript";
|
||||
import { goodReporter } from "./typescript";
|
||||
|
||||
import chromiumVersion from "./chromium-version.json";
|
||||
|
||||
const tsProject = createProject("src/view/tsconfig.json");
|
||||
|
||||
export function compileViewEsbuild() {
|
||||
return src("./src/view/webview.tsx")
|
||||
.pipe(
|
||||
esbuild({
|
||||
outfile: "webview.js",
|
||||
bundle: true,
|
||||
format: "iife",
|
||||
platform: "browser",
|
||||
target: `chrome${chromiumVersion.chromiumVersion}`,
|
||||
jsx: "automatic",
|
||||
sourcemap: "linked",
|
||||
sourceRoot: "..",
|
||||
loader: {
|
||||
".ttf": "file",
|
||||
},
|
||||
}),
|
||||
)
|
||||
.pipe(dest("out"));
|
||||
}
|
||||
|
||||
export function watchViewEsbuild() {
|
||||
watch(["src/view/**/*.{ts,tsx}"], compileViewEsbuild);
|
||||
}
|
||||
|
||||
export function checkViewTypeScript() {
|
||||
// This doesn't actually output the TypeScript files, it just
|
||||
// runs the TypeScript compiler and reports any errors.
|
||||
return tsProject.src().pipe(tsProject(goodReporter()));
|
||||
}
|
||||
|
||||
export function watchViewCheckTypeScript() {
|
||||
watch(["src/view/**/*.{ts,tsx}"], checkViewTypeScript);
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
import { resolve } from "path";
|
||||
import * as webpack from "webpack";
|
||||
import MiniCssExtractPlugin from "mini-css-extract-plugin";
|
||||
import { isDevBuild } from "./dev";
|
||||
|
||||
export const config: webpack.Configuration = {
|
||||
mode: isDevBuild ? "development" : "production",
|
||||
entry: {
|
||||
webview: "./src/view/webview.tsx",
|
||||
},
|
||||
output: {
|
||||
path: resolve(__dirname, "..", "out"),
|
||||
filename: "[name].js",
|
||||
},
|
||||
devtool: isDevBuild ? "inline-source-map" : "source-map",
|
||||
resolve: {
|
||||
extensions: [".js", ".ts", ".tsx", ".json"],
|
||||
},
|
||||
module: {
|
||||
rules: [
|
||||
{
|
||||
test: /\.(ts|tsx)$/,
|
||||
loader: "ts-loader",
|
||||
options: {
|
||||
configFile: "src/view/tsconfig.json",
|
||||
},
|
||||
},
|
||||
{
|
||||
test: /\.less$/,
|
||||
use: [
|
||||
MiniCssExtractPlugin.loader,
|
||||
{
|
||||
loader: "css-loader",
|
||||
options: {
|
||||
importLoaders: 1,
|
||||
sourceMap: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
loader: "less-loader",
|
||||
options: {
|
||||
javascriptEnabled: true,
|
||||
sourceMap: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
test: /\.css$/,
|
||||
use: [
|
||||
MiniCssExtractPlugin.loader,
|
||||
{
|
||||
loader: "css-loader",
|
||||
options: {
|
||||
sourceMap: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
test: /\.(woff(2)?|ttf|eot)$/,
|
||||
type: "asset/resource",
|
||||
generator: {
|
||||
filename: "fonts/[hash][ext][query]",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
performance: {
|
||||
hints: false,
|
||||
},
|
||||
plugins: [new MiniCssExtractPlugin()],
|
||||
};
|
||||
@@ -1,57 +0,0 @@
|
||||
import webpack from "webpack";
|
||||
import { config } from "./webpack.config";
|
||||
|
||||
export function compileView(cb: (err?: Error) => void) {
|
||||
doWebpack(config, true, cb);
|
||||
}
|
||||
|
||||
export function watchView(cb: (err?: Error) => void) {
|
||||
const watchConfig = {
|
||||
...config,
|
||||
watch: true,
|
||||
watchOptions: {
|
||||
aggregateTimeout: 200,
|
||||
poll: 1000,
|
||||
},
|
||||
};
|
||||
doWebpack(watchConfig, false, cb);
|
||||
}
|
||||
|
||||
function doWebpack(
|
||||
internalConfig: webpack.Configuration,
|
||||
failOnError: boolean,
|
||||
cb: (err?: Error) => void,
|
||||
) {
|
||||
const resultCb = (error: Error | undefined, stats?: webpack.Stats) => {
|
||||
if (error) {
|
||||
cb(error);
|
||||
}
|
||||
if (stats) {
|
||||
console.log(
|
||||
stats.toString({
|
||||
errorDetails: true,
|
||||
colors: true,
|
||||
assets: false,
|
||||
builtAt: false,
|
||||
version: false,
|
||||
hash: false,
|
||||
entrypoints: false,
|
||||
timings: false,
|
||||
modules: false,
|
||||
errors: true,
|
||||
}),
|
||||
);
|
||||
if (stats.hasErrors()) {
|
||||
if (failOnError) {
|
||||
cb(new Error("Compilation errors detected."));
|
||||
return;
|
||||
} else {
|
||||
console.error("Compilation errors detected.");
|
||||
}
|
||||
}
|
||||
cb();
|
||||
}
|
||||
};
|
||||
|
||||
webpack(internalConfig, resultCb);
|
||||
}
|
||||
@@ -4,6 +4,7 @@
|
||||
*/
|
||||
|
||||
/** @type {import('@jest/types').Config.InitialOptions} */
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
module.exports = {
|
||||
projects: [
|
||||
"<rootDir>/src/view",
|
||||
|
||||
46111
extensions/ql-vscode/package-lock.json
generated
46111
extensions/ql-vscode/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@
|
||||
"description": "CodeQL for Visual Studio Code",
|
||||
"author": "GitHub",
|
||||
"private": true,
|
||||
"version": "1.11.0",
|
||||
"version": "1.12.0",
|
||||
"publisher": "GitHub",
|
||||
"license": "MIT",
|
||||
"icon": "media/VS-marketplace-CodeQL-icon.png",
|
||||
@@ -424,11 +424,6 @@
|
||||
"title": "GitHub Databases",
|
||||
"order": 8,
|
||||
"properties": {
|
||||
"codeQL.githubDatabase.enable": {
|
||||
"type": "boolean",
|
||||
"default": false,
|
||||
"markdownDescription": "Enable automatic detection of GitHub databases."
|
||||
},
|
||||
"codeQL.githubDatabase.download": {
|
||||
"type": "string",
|
||||
"default": "ask",
|
||||
@@ -1907,6 +1902,7 @@
|
||||
"lint:scenarios": "ts-node scripts/lint-scenarios.ts",
|
||||
"generate": "npm-run-all -p generate:*",
|
||||
"generate:schemas": "ts-node scripts/generate-schemas.ts",
|
||||
"generate:chromium-version": "ts-node scripts/generate-chromium-version.ts",
|
||||
"check-types": "find . -type f -name \"tsconfig.json\" -not -path \"./node_modules/*\" | sed -r 's|/[^/]+$||' | sort | uniq | xargs -I {} sh -c \"echo Checking types in {} && cd {} && npx tsc --noEmit\"",
|
||||
"postinstall": "patch-package",
|
||||
"prepare": "cd ../.. && husky install"
|
||||
@@ -1914,23 +1910,21 @@
|
||||
"dependencies": {
|
||||
"@octokit/plugin-retry": "^6.0.1",
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@vscode/codicons": "^0.0.31",
|
||||
"@vscode/codicons": "^0.0.35",
|
||||
"@vscode/debugadapter": "^1.59.0",
|
||||
"@vscode/debugprotocol": "^1.59.0",
|
||||
"@vscode/webview-ui-toolkit": "^1.0.1",
|
||||
"ajv": "^8.11.0",
|
||||
"child-process-promise": "^2.2.1",
|
||||
"chokidar": "^3.5.3",
|
||||
"classnames": "^2.2.6",
|
||||
"d3": "^7.6.1",
|
||||
"d3-graphviz": "^5.0.2",
|
||||
"fs-extra": "^11.1.1",
|
||||
"immutable": "^4.0.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"msw": "^2.0.0",
|
||||
"msw": "^2.0.11",
|
||||
"nanoid": "^5.0.1",
|
||||
"node-fetch": "^2.6.7",
|
||||
"p-queue": "^7.4.1",
|
||||
"p-queue": "^8.0.1",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"semver": "^7.5.2",
|
||||
@@ -1938,43 +1932,42 @@
|
||||
"source-map-support": "^0.5.21",
|
||||
"stream-json": "^1.7.3",
|
||||
"styled-components": "^6.0.2",
|
||||
"tmp": "^0.1.0",
|
||||
"tmp": "^0.2.1",
|
||||
"tmp-promise": "^3.0.2",
|
||||
"tree-kill": "^1.2.2",
|
||||
"unzipper": "^0.10.5",
|
||||
"vscode-extension-telemetry": "^0.1.6",
|
||||
"vscode-jsonrpc": "^8.0.2",
|
||||
"vscode-languageclient": "^8.0.2",
|
||||
"vscode-test-adapter-api": "^1.7.0",
|
||||
"vscode-test-adapter-util": "^0.7.0",
|
||||
"yauzl": "^2.10.0",
|
||||
"zip-a-folder": "^3.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.18.13",
|
||||
"@babel/plugin-transform-modules-commonjs": "^7.18.6",
|
||||
"@babel/preset-env": "^7.21.4",
|
||||
"@babel/preset-env": "^7.23.7",
|
||||
"@babel/preset-react": "^7.18.6",
|
||||
"@babel/preset-typescript": "^7.21.4",
|
||||
"@faker-js/faker": "^8.0.2",
|
||||
"@github/markdownlint-github": "^0.3.0",
|
||||
"@github/markdownlint-github": "^0.6.0",
|
||||
"@octokit/plugin-throttling": "^8.0.0",
|
||||
"@storybook/addon-a11y": "^7.4.6",
|
||||
"@storybook/addon-actions": "^7.1.0",
|
||||
"@storybook/addon-essentials": "^7.1.0",
|
||||
"@storybook/addon-interactions": "^7.1.0",
|
||||
"@storybook/addon-links": "^7.1.0",
|
||||
"@storybook/components": "^7.1.0",
|
||||
"@storybook/components": "^7.6.7",
|
||||
"@storybook/csf": "^0.1.1",
|
||||
"@storybook/manager-api": "^7.1.0",
|
||||
"@storybook/manager-api": "^7.6.6",
|
||||
"@storybook/react": "^7.1.0",
|
||||
"@storybook/react-webpack5": "^7.1.0",
|
||||
"@storybook/theming": "^7.1.0",
|
||||
"@storybook/react-webpack5": "^7.6.7",
|
||||
"@storybook/theming": "^7.6.7",
|
||||
"@testing-library/dom": "^9.3.0",
|
||||
"@testing-library/jest-dom": "^6.0.0",
|
||||
"@testing-library/jest-dom": "^6.2.0",
|
||||
"@testing-library/react": "^14.0.0",
|
||||
"@testing-library/user-event": "^14.4.3",
|
||||
"@testing-library/user-event": "^14.5.2",
|
||||
"@types/child-process-promise": "^2.2.1",
|
||||
"@types/classnames": "^2.2.9",
|
||||
"@types/d3": "^7.4.0",
|
||||
"@types/d3-graphviz": "^2.6.6",
|
||||
"@types/del": "^4.0.0",
|
||||
@@ -1984,7 +1977,7 @@
|
||||
"@types/jest": "^29.0.2",
|
||||
"@types/js-yaml": "^4.0.6",
|
||||
"@types/nanoid": "^3.0.0",
|
||||
"@types/node": "18.15.0",
|
||||
"@types/node": "18.15.*",
|
||||
"@types/node-fetch": "^2.5.2",
|
||||
"@types/react": "^18.0.28",
|
||||
"@types/react-dom": "^18.0.11",
|
||||
@@ -1992,15 +1985,14 @@
|
||||
"@types/semver": "^7.2.0",
|
||||
"@types/stream-json": "^1.7.1",
|
||||
"@types/styled-components": "^5.1.11",
|
||||
"@types/tar-stream": "^2.2.2",
|
||||
"@types/tar-stream": "^3.1.3",
|
||||
"@types/through2": "^2.0.36",
|
||||
"@types/tmp": "^0.1.0",
|
||||
"@types/tmp": "^0.2.6",
|
||||
"@types/unzipper": "^0.10.1",
|
||||
"@types/vscode": "^1.82.0",
|
||||
"@types/webpack": "^5.28.0",
|
||||
"@types/webpack-env": "^1.18.0",
|
||||
"@typescript-eslint/eslint-plugin": "^6.2.1",
|
||||
"@typescript-eslint/parser": "^6.2.1",
|
||||
"@types/yauzl": "^2.10.3",
|
||||
"@typescript-eslint/eslint-plugin": "^6.18.0",
|
||||
"@typescript-eslint/parser": "^6.16.0",
|
||||
"@vscode/test-electron": "^2.2.0",
|
||||
"@vscode/vsce": "^2.19.0",
|
||||
"ansi-colors": "^4.1.1",
|
||||
@@ -2009,11 +2001,12 @@
|
||||
"cross-env": "^7.0.3",
|
||||
"css-loader": "^6.8.1",
|
||||
"del": "^6.0.0",
|
||||
"esbuild": "^0.15.15",
|
||||
"eslint": "^8.23.1",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-prettier": "^9.0.0",
|
||||
"eslint-import-resolver-typescript": "^3.6.1",
|
||||
"eslint-plugin-etc": "^2.0.2",
|
||||
"eslint-plugin-github": "^4.4.1",
|
||||
"eslint-plugin-import": "^2.29.1",
|
||||
"eslint-plugin-jest-dom": "^5.0.1",
|
||||
"eslint-plugin-prettier": "^5.0.0",
|
||||
"eslint-plugin-react": "^7.31.8",
|
||||
@@ -2022,7 +2015,7 @@
|
||||
"file-loader": "^6.2.0",
|
||||
"glob": "^10.0.0",
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-esbuild": "^0.10.5",
|
||||
"gulp-esbuild": "^0.12.0",
|
||||
"gulp-replace": "^1.1.3",
|
||||
"gulp-typescript": "^5.0.1",
|
||||
"husky": "^8.0.0",
|
||||
@@ -2030,13 +2023,13 @@
|
||||
"jest-environment-jsdom": "^29.0.3",
|
||||
"jest-runner-vscode": "^3.0.1",
|
||||
"lint-staged": "^15.0.2",
|
||||
"markdownlint-cli2": "^0.6.0",
|
||||
"markdownlint-cli2-formatter-pretty": "^0.0.4",
|
||||
"markdownlint-cli2": "^0.11.0",
|
||||
"markdownlint-cli2-formatter-pretty": "^0.0.5",
|
||||
"mini-css-extract-plugin": "^2.6.1",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"patch-package": "^8.0.0",
|
||||
"prettier": "^3.0.0",
|
||||
"storybook": "^7.1.0",
|
||||
"storybook": "^7.6.7",
|
||||
"tar-stream": "^3.0.0",
|
||||
"through2": "^4.0.2",
|
||||
"ts-jest": "^29.0.1",
|
||||
@@ -2044,9 +2037,7 @@
|
||||
"ts-loader": "^9.4.2",
|
||||
"ts-node": "^10.7.0",
|
||||
"ts-unused-exports": "^10.0.0",
|
||||
"typescript": "^5.0.2",
|
||||
"webpack": "^5.76.0",
|
||||
"webpack-cli": "^5.0.1"
|
||||
"typescript": "^5.0.2"
|
||||
},
|
||||
"lint-staged": {
|
||||
"./**/*.{json,css,scss}": [
|
||||
|
||||
@@ -14,15 +14,15 @@
|
||||
import { pathExists, readJson, writeJson } from "fs-extra";
|
||||
import { resolve, relative } from "path";
|
||||
|
||||
import { Octokit } from "@octokit/core";
|
||||
import { type RestEndpointMethodTypes } from "@octokit/rest";
|
||||
import type { Octokit } from "@octokit/core";
|
||||
import type { RestEndpointMethodTypes } from "@octokit/rest";
|
||||
import { throttling } from "@octokit/plugin-throttling";
|
||||
|
||||
import { getFiles } from "./util/files";
|
||||
import type { GitHubApiRequest } from "../src/common/mock-gh-api/gh-api-request";
|
||||
import { isGetVariantAnalysisRequest } from "../src/common/mock-gh-api/gh-api-request";
|
||||
import { VariantAnalysis } from "../src/variant-analysis/gh-api/variant-analysis";
|
||||
import { RepositoryWithMetadata } from "../src/variant-analysis/gh-api/repository";
|
||||
import type { VariantAnalysis } from "../src/variant-analysis/gh-api/variant-analysis";
|
||||
import type { RepositoryWithMetadata } from "../src/variant-analysis/gh-api/repository";
|
||||
import { AppOctokit } from "../src/common/octokit";
|
||||
|
||||
const extensionDirectory = resolve(__dirname, "..");
|
||||
|
||||
42
extensions/ql-vscode/scripts/generate-chromium-version.ts
Normal file
42
extensions/ql-vscode/scripts/generate-chromium-version.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { join, resolve } from "path";
|
||||
import { outputFile, readJSON } from "fs-extra";
|
||||
import { minVersion } from "semver";
|
||||
import { getVersionInformation } from "./util/vscode-versions";
|
||||
|
||||
const extensionDirectory = resolve(__dirname, "..");
|
||||
|
||||
async function generateChromiumVersion() {
|
||||
const packageJson = await readJSON(
|
||||
resolve(extensionDirectory, "package.json"),
|
||||
);
|
||||
|
||||
const minimumVsCodeVersion = minVersion(packageJson.engines.vscode)?.version;
|
||||
if (!minimumVsCodeVersion) {
|
||||
throw new Error("Could not find minimum VS Code version");
|
||||
}
|
||||
|
||||
const versionInformation = await getVersionInformation(minimumVsCodeVersion);
|
||||
|
||||
const chromiumMajorVersion = versionInformation.chromiumVersion.split(".")[0];
|
||||
|
||||
console.log(
|
||||
`VS Code ${minimumVsCodeVersion} uses Chromium ${chromiumMajorVersion}`,
|
||||
);
|
||||
|
||||
await outputFile(
|
||||
join(extensionDirectory, "gulpfile.ts", "chromium-version.json"),
|
||||
`${JSON.stringify(
|
||||
{
|
||||
chromiumVersion: chromiumMajorVersion,
|
||||
electronVersion: versionInformation.electronVersion,
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}\n`,
|
||||
);
|
||||
}
|
||||
|
||||
generateChromiumVersion().catch((e: unknown) => {
|
||||
console.error(e);
|
||||
process.exit(2);
|
||||
});
|
||||
@@ -6,6 +6,16 @@ import { format, resolveConfig } from "prettier";
|
||||
const extensionDirectory = resolve(__dirname, "..");
|
||||
|
||||
const schemas = [
|
||||
{
|
||||
path: join(extensionDirectory, "src", "packaging", "qlpack-file.ts"),
|
||||
type: "QlPackFile",
|
||||
schemaPath: join(
|
||||
extensionDirectory,
|
||||
"src",
|
||||
"packaging",
|
||||
"qlpack-file.schema.json",
|
||||
),
|
||||
},
|
||||
{
|
||||
path: join(
|
||||
extensionDirectory,
|
||||
|
||||
@@ -19,8 +19,9 @@
|
||||
import { spawnSync } from "child_process";
|
||||
import { basename, resolve } from "path";
|
||||
import { pathExists, readJSON } from "fs-extra";
|
||||
import { RawSourceMap, SourceMapConsumer } from "source-map";
|
||||
import { Open } from "unzipper";
|
||||
import type { RawSourceMap } from "source-map";
|
||||
import { SourceMapConsumer } from "source-map";
|
||||
import { unzipToDirectorySequentially } from "../src/common/unzip";
|
||||
|
||||
if (process.argv.length !== 4) {
|
||||
console.error(
|
||||
@@ -78,10 +79,10 @@ async function extractSourceMap() {
|
||||
releaseAssetsDirectory,
|
||||
]);
|
||||
|
||||
const file = await Open.file(
|
||||
await unzipToDirectorySequentially(
|
||||
resolve(releaseAssetsDirectory, sourcemapAsset.name),
|
||||
sourceMapsDirectory,
|
||||
);
|
||||
await file.extract({ path: sourceMapsDirectory });
|
||||
} else {
|
||||
const workflowRuns = runGhJSON<WorkflowRunListItem[]>([
|
||||
"run",
|
||||
|
||||
87
extensions/ql-vscode/scripts/update-node-version.ts
Normal file
87
extensions/ql-vscode/scripts/update-node-version.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { join, resolve } from "path";
|
||||
import { execSync } from "child_process";
|
||||
import { outputFile, readFile, readJSON } from "fs-extra";
|
||||
import { getVersionInformation } from "./util/vscode-versions";
|
||||
import { fetchJson } from "./util/fetch";
|
||||
|
||||
const extensionDirectory = resolve(__dirname, "..");
|
||||
|
||||
interface Release {
|
||||
tag_name: string;
|
||||
}
|
||||
|
||||
async function updateNodeVersion() {
|
||||
const latestVsCodeRelease = await fetchJson<Release>(
|
||||
"https://api.github.com/repos/microsoft/vscode/releases/latest",
|
||||
);
|
||||
const latestVsCodeVersion = latestVsCodeRelease.tag_name;
|
||||
|
||||
console.log(`Latest VS Code version is ${latestVsCodeVersion}`);
|
||||
|
||||
const versionInformation = await getVersionInformation(latestVsCodeVersion);
|
||||
console.log(
|
||||
`VS Code ${versionInformation.vscodeVersion} uses Electron ${versionInformation.electronVersion} and Node ${versionInformation.nodeVersion}`,
|
||||
);
|
||||
|
||||
let currentNodeVersion = (
|
||||
await readFile(join(extensionDirectory, ".nvmrc"), "utf8")
|
||||
).trim();
|
||||
if (currentNodeVersion.startsWith("v")) {
|
||||
currentNodeVersion = currentNodeVersion.slice(1);
|
||||
}
|
||||
|
||||
if (currentNodeVersion === versionInformation.nodeVersion) {
|
||||
console.log("Node version is already up to date");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log("Node version needs to be updated, updating now");
|
||||
|
||||
await outputFile(
|
||||
join(extensionDirectory, ".nvmrc"),
|
||||
`v${versionInformation.nodeVersion}\n`,
|
||||
);
|
||||
|
||||
console.log("Updated .nvmrc");
|
||||
|
||||
const packageJson = await readJSON(
|
||||
join(extensionDirectory, "package.json"),
|
||||
"utf8",
|
||||
);
|
||||
|
||||
// The @types/node version needs to match the first two parts of the Node
|
||||
// version, e.g. if the Node version is 18.17.3, the @types/node version
|
||||
// should be 18.17.*. This corresponds with the documentation at
|
||||
// https://github.com/definitelytyped/definitelytyped#how-do-definitely-typed-package-versions-relate-to-versions-of-the-corresponding-library
|
||||
// "The patch version of the type declaration package is unrelated to the library patch version. This allows
|
||||
// Definitely Typed to safely update type declarations for the same major/minor version of a library."
|
||||
// 18.17.* is equivalent to >=18.17.0 <18.18.0
|
||||
const typesNodeVersion = versionInformation.nodeVersion
|
||||
.split(".")
|
||||
.slice(0, 2)
|
||||
.join(".");
|
||||
|
||||
packageJson.engines.node = `^${versionInformation.nodeVersion}`;
|
||||
packageJson.devDependencies["@types/node"] = `${typesNodeVersion}.*`;
|
||||
|
||||
await outputFile(
|
||||
join(extensionDirectory, "package.json"),
|
||||
`${JSON.stringify(packageJson, null, 2)}\n`,
|
||||
);
|
||||
|
||||
console.log("Updated package.json, now running npm install");
|
||||
|
||||
execSync("npm install", { cwd: extensionDirectory, stdio: "inherit" });
|
||||
// Always use the latest patch version of @types/node
|
||||
execSync("npm upgrade @types/node", {
|
||||
cwd: extensionDirectory,
|
||||
stdio: "inherit",
|
||||
});
|
||||
|
||||
console.log("Node version updated successfully");
|
||||
}
|
||||
|
||||
updateNodeVersion().catch((e: unknown) => {
|
||||
console.error(e);
|
||||
process.exit(2);
|
||||
});
|
||||
10
extensions/ql-vscode/scripts/util/fetch.ts
Normal file
10
extensions/ql-vscode/scripts/util/fetch.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
export async function fetchJson<T>(url: string): Promise<T> {
|
||||
const response = await fetch(url);
|
||||
if (!response.ok) {
|
||||
throw new Error(
|
||||
`Could not fetch ${url}: ${response.status} ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
return (await response.json()) as T;
|
||||
}
|
||||
69
extensions/ql-vscode/scripts/util/vscode-versions.ts
Normal file
69
extensions/ql-vscode/scripts/util/vscode-versions.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { minVersion } from "semver";
|
||||
import { fetchJson } from "./fetch";
|
||||
|
||||
type VsCodePackageJson = {
|
||||
devDependencies: {
|
||||
electron: string;
|
||||
};
|
||||
};
|
||||
|
||||
async function getVsCodePackageJson(
|
||||
version: string,
|
||||
): Promise<VsCodePackageJson> {
|
||||
return await fetchJson(
|
||||
`https://raw.githubusercontent.com/microsoft/vscode/${version}/package.json`,
|
||||
);
|
||||
}
|
||||
|
||||
interface ElectronVersion {
|
||||
version: string;
|
||||
date: string;
|
||||
node: string;
|
||||
v8: string;
|
||||
uv: string;
|
||||
zlib: string;
|
||||
openssl: string;
|
||||
modules: string;
|
||||
chrome: string;
|
||||
files: string[];
|
||||
body?: string;
|
||||
apm?: string;
|
||||
}
|
||||
|
||||
async function getElectronReleases(): Promise<ElectronVersion[]> {
|
||||
return await fetchJson("https://releases.electronjs.org/releases.json");
|
||||
}
|
||||
|
||||
type VersionInformation = {
|
||||
vscodeVersion: string;
|
||||
electronVersion: string;
|
||||
nodeVersion: string;
|
||||
chromiumVersion: string;
|
||||
};
|
||||
|
||||
export async function getVersionInformation(
|
||||
vscodeVersion: string,
|
||||
): Promise<VersionInformation> {
|
||||
const vsCodePackageJson = await getVsCodePackageJson(vscodeVersion);
|
||||
const electronVersion = minVersion(vsCodePackageJson.devDependencies.electron)
|
||||
?.version;
|
||||
if (!electronVersion) {
|
||||
throw new Error("Could not find Electron version");
|
||||
}
|
||||
|
||||
const electronReleases = await getElectronReleases();
|
||||
|
||||
const electronRelease = electronReleases.find(
|
||||
(release) => release.version === electronVersion,
|
||||
);
|
||||
if (!electronRelease) {
|
||||
throw new Error(`Could not find Electron release ${electronVersion}`);
|
||||
}
|
||||
|
||||
return {
|
||||
vscodeVersion,
|
||||
electronVersion,
|
||||
nodeVersion: electronRelease.node,
|
||||
chromiumVersion: electronRelease.chrome,
|
||||
};
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AppCommandManager } from "../common/commands";
|
||||
import type { AppCommandManager } from "../common/commands";
|
||||
import { Uri, workspace } from "vscode";
|
||||
import { join } from "path";
|
||||
import { pathExists } from "fs-extra";
|
||||
|
||||
50
extensions/ql-vscode/src/codeql-cli/cli-command.ts
Normal file
50
extensions/ql-vscode/src/codeql-cli/cli-command.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { execFile } from "child_process";
|
||||
import { promisify } from "util";
|
||||
|
||||
import type { BaseLogger } from "../common/logging";
|
||||
import type { ProgressReporter } from "../common/logging/vscode";
|
||||
import { getChildProcessErrorMessage } from "../common/helpers-pure";
|
||||
|
||||
/**
|
||||
* Flags to pass to all cli commands.
|
||||
*/
|
||||
export const LOGGING_FLAGS = ["-v", "--log-to-stderr"];
|
||||
|
||||
/**
|
||||
* Runs a CodeQL CLI command without invoking the CLI server, returning the output as a string.
|
||||
* @param codeQlPath The path to the CLI.
|
||||
* @param command The `codeql` command to be run, provided as an array of command/subcommand names.
|
||||
* @param commandArgs The arguments to pass to the `codeql` command.
|
||||
* @param description Description of the action being run, to be shown in log and error messages.
|
||||
* @param logger Logger to write command log messages, e.g. to an output channel.
|
||||
* @param progressReporter Used to output progress messages, e.g. to the status bar.
|
||||
* @returns The contents of the command's stdout, if the command succeeded.
|
||||
*/
|
||||
export async function runCodeQlCliCommand(
|
||||
codeQlPath: string,
|
||||
command: string[],
|
||||
commandArgs: string[],
|
||||
description: string,
|
||||
logger: BaseLogger,
|
||||
progressReporter?: ProgressReporter,
|
||||
): Promise<string> {
|
||||
// Add logging arguments first, in case commandArgs contains positional parameters.
|
||||
const args = command.concat(LOGGING_FLAGS).concat(commandArgs);
|
||||
const argsString = args.join(" ");
|
||||
try {
|
||||
if (progressReporter !== undefined) {
|
||||
progressReporter.report({ message: description });
|
||||
}
|
||||
void logger.log(
|
||||
`${description} using CodeQL CLI: ${codeQlPath} ${argsString}...`,
|
||||
);
|
||||
const result = await promisify(execFile)(codeQlPath, args);
|
||||
void logger.log(result.stderr);
|
||||
void logger.log("CLI command succeeded.");
|
||||
return result.stdout;
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
`${description} failed: ${getChildProcessErrorMessage(err)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as semver from "semver";
|
||||
import { runCodeQlCliCommand } from "./cli";
|
||||
import { Logger } from "../common/logging";
|
||||
import type { SemVer } from "semver";
|
||||
import { parse } from "semver";
|
||||
import { runCodeQlCliCommand } from "./cli-command";
|
||||
import type { Logger } from "../common/logging";
|
||||
import { getErrorMessage } from "../common/helpers-pure";
|
||||
|
||||
/**
|
||||
@@ -9,7 +10,7 @@ import { getErrorMessage } from "../common/helpers-pure";
|
||||
export async function getCodeQlCliVersion(
|
||||
codeQlPath: string,
|
||||
logger: Logger,
|
||||
): Promise<semver.SemVer | undefined> {
|
||||
): Promise<SemVer | undefined> {
|
||||
try {
|
||||
const output: string = await runCodeQlCliCommand(
|
||||
codeQlPath,
|
||||
@@ -18,7 +19,7 @@ export async function getCodeQlCliVersion(
|
||||
"Checking CodeQL version",
|
||||
logger,
|
||||
);
|
||||
return semver.parse(output.trim()) || undefined;
|
||||
return parse(output.trim()) || undefined;
|
||||
} catch (e) {
|
||||
// Failed to run the version command. This might happen if the cli version is _really_ old, or it is corrupted.
|
||||
// Either way, we can't determine compatibility.
|
||||
|
||||
@@ -1,40 +1,39 @@
|
||||
import { EOL } from "os";
|
||||
import { spawn } from "child-process-promise";
|
||||
import * as child_process from "child_process";
|
||||
import type { ChildProcessWithoutNullStreams } from "child_process";
|
||||
import { spawn as spawnChildProcess } from "child_process";
|
||||
import { readFile } from "fs-extra";
|
||||
import { delimiter, dirname, join } from "path";
|
||||
import * as sarif from "sarif";
|
||||
import type { Log } from "sarif";
|
||||
import { SemVer } from "semver";
|
||||
import { Readable } from "stream";
|
||||
import type { Readable } from "stream";
|
||||
import tk from "tree-kill";
|
||||
import { promisify } from "util";
|
||||
import { CancellationToken, Disposable, Uri } from "vscode";
|
||||
import type { CancellationToken, Disposable, Uri } from "vscode";
|
||||
|
||||
import {
|
||||
BQRSInfo,
|
||||
import type {
|
||||
BqrsInfo,
|
||||
DecodedBqrs,
|
||||
DecodedBqrsChunk,
|
||||
} from "../common/bqrs-cli-types";
|
||||
import { CliConfig } from "../config";
|
||||
import {
|
||||
DistributionProvider,
|
||||
FindDistributionResultKind,
|
||||
} from "./distribution";
|
||||
import type { CliConfig } from "../config";
|
||||
import type { DistributionProvider } from "./distribution";
|
||||
import { FindDistributionResultKind } from "./distribution";
|
||||
import {
|
||||
assertNever,
|
||||
getChildProcessErrorMessage,
|
||||
getErrorMessage,
|
||||
getErrorStack,
|
||||
} from "../common/helpers-pure";
|
||||
import { walkDirectory } from "../common/files";
|
||||
import { QueryMetadata, SortDirection } from "../common/interface-types";
|
||||
import { BaseLogger, Logger } from "../common/logging";
|
||||
import { ProgressReporter } from "../common/logging/vscode";
|
||||
import { CompilationMessage } from "../query-server/legacy-messages";
|
||||
import type { QueryMetadata } from "../common/interface-types";
|
||||
import { SortDirection } from "../common/interface-types";
|
||||
import type { BaseLogger, Logger } from "../common/logging";
|
||||
import type { ProgressReporter } from "../common/logging/vscode";
|
||||
import { sarifParser } from "../common/sarif-parser";
|
||||
import { App } from "../common/app";
|
||||
import type { App } from "../common/app";
|
||||
import { QueryLanguage } from "../common/query-language";
|
||||
import { LINE_ENDINGS, splitStreamAtSeparators } from "../common/split-stream";
|
||||
import type { Position } from "../query-server/messages";
|
||||
import { LOGGING_FLAGS } from "./cli-command";
|
||||
|
||||
/**
|
||||
* The version of the SARIF format that we are using.
|
||||
@@ -46,21 +45,6 @@ const SARIF_FORMAT = "sarifv2.1.0";
|
||||
*/
|
||||
const CSV_FORMAT = "csv";
|
||||
|
||||
/**
|
||||
* Flags to pass to all cli commands.
|
||||
*/
|
||||
const LOGGING_FLAGS = ["-v", "--log-to-stderr"];
|
||||
|
||||
/**
|
||||
* The expected output of `codeql resolve library-path`.
|
||||
*/
|
||||
export interface QuerySetup {
|
||||
libraryPath: string[];
|
||||
dbscheme: string;
|
||||
relativeName?: string;
|
||||
compilationCache?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The expected output of `codeql resolve queries --format bylanguage`.
|
||||
*/
|
||||
@@ -88,7 +72,7 @@ export interface DbInfo {
|
||||
/**
|
||||
* The expected output of `codeql resolve upgrades`.
|
||||
*/
|
||||
export interface UpgradesInfo {
|
||||
interface UpgradesInfo {
|
||||
scripts: string[];
|
||||
finalDbscheme: string;
|
||||
matchesTarget?: boolean;
|
||||
@@ -102,33 +86,33 @@ export type QlpacksInfo = { [name: string]: string[] };
|
||||
/**
|
||||
* The expected output of `codeql resolve languages`.
|
||||
*/
|
||||
export type LanguagesInfo = { [name: string]: string[] };
|
||||
type LanguagesInfo = { [name: string]: string[] };
|
||||
|
||||
/** Information about an ML model, as resolved by `codeql resolve ml-models`. */
|
||||
export type MlModelInfo = {
|
||||
type MlModelInfo = {
|
||||
checksum: string;
|
||||
path: string;
|
||||
};
|
||||
|
||||
/** The expected output of `codeql resolve ml-models`. */
|
||||
export type MlModelsInfo = { models: MlModelInfo[] };
|
||||
type MlModelsInfo = { models: MlModelInfo[] };
|
||||
|
||||
/** Information about a data extension predicate, as resolved by `codeql resolve extensions`. */
|
||||
export type DataExtensionResult = {
|
||||
type DataExtensionResult = {
|
||||
predicate: string;
|
||||
file: string;
|
||||
index: number;
|
||||
};
|
||||
|
||||
/** The expected output of `codeql resolve extensions`. */
|
||||
export type ResolveExtensionsResult = {
|
||||
type ResolveExtensionsResult = {
|
||||
models: MlModelInfo[];
|
||||
data: {
|
||||
[path: string]: DataExtensionResult[];
|
||||
};
|
||||
};
|
||||
|
||||
export type GenerateExtensiblePredicateMetadataResult = {
|
||||
type GenerateExtensiblePredicateMetadataResult = {
|
||||
// There are other properties in this object, but they are
|
||||
// not relevant for its use in the extension, so we omit them.
|
||||
extensible_predicates: Array<{
|
||||
@@ -140,7 +124,7 @@ export type GenerateExtensiblePredicateMetadataResult = {
|
||||
/**
|
||||
* The expected output of `codeql resolve qlref`.
|
||||
*/
|
||||
export type QlrefInfo = { resolvedPath: string };
|
||||
type QlrefInfo = { resolvedPath: string };
|
||||
|
||||
// `codeql bqrs interpret` requires both of these to be present or
|
||||
// both absent.
|
||||
@@ -152,12 +136,30 @@ export interface SourceInfo {
|
||||
/**
|
||||
* The expected output of `codeql resolve queries`.
|
||||
*/
|
||||
export type ResolvedQueries = string[];
|
||||
type ResolvedQueries = string[];
|
||||
|
||||
/**
|
||||
* The expected output of `codeql resolve tests`.
|
||||
*/
|
||||
export type ResolvedTests = string[];
|
||||
type ResolvedTests = string[];
|
||||
|
||||
/**
|
||||
* A compilation message for a test message (either an error or a warning)
|
||||
*/
|
||||
interface CompilationMessage {
|
||||
/**
|
||||
* The text of the message
|
||||
*/
|
||||
message: string;
|
||||
/**
|
||||
* The source position associated with the message
|
||||
*/
|
||||
position: Position;
|
||||
/**
|
||||
* The severity of the message
|
||||
*/
|
||||
severity: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Event fired by `codeql test run`.
|
||||
@@ -187,7 +189,7 @@ interface BqrsDecodeOptions {
|
||||
entities?: string[];
|
||||
}
|
||||
|
||||
export type OnLineCallback = (
|
||||
type OnLineCallback = (
|
||||
line: string,
|
||||
) => Promise<string | undefined> | string | undefined;
|
||||
|
||||
@@ -201,7 +203,7 @@ type VersionChangedListener = (newVersion: SemVer | undefined) => void;
|
||||
*/
|
||||
export class CodeQLCliServer implements Disposable {
|
||||
/** The process for the cli server, or undefined if one doesn't exist yet */
|
||||
process?: child_process.ChildProcessWithoutNullStreams;
|
||||
process?: ChildProcessWithoutNullStreams;
|
||||
/** Queue of future commands*/
|
||||
commandQueue: Array<() => void>;
|
||||
/** Whether a command is running */
|
||||
@@ -317,7 +319,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
/**
|
||||
* Launch the cli server
|
||||
*/
|
||||
private async launchProcess(): Promise<child_process.ChildProcessWithoutNullStreams> {
|
||||
private async launchProcess(): Promise<ChildProcessWithoutNullStreams> {
|
||||
const codeQlPath = await this.getCodeQlPath();
|
||||
const args = [];
|
||||
if (shouldDebugCliServer()) {
|
||||
@@ -720,29 +722,6 @@ export class CodeQLCliServer implements Disposable {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the library path and dbscheme for a query.
|
||||
* @param workspaces The current open workspaces
|
||||
* @param queryPath The path to the query
|
||||
*/
|
||||
async resolveLibraryPath(
|
||||
workspaces: string[],
|
||||
queryPath: string,
|
||||
silent = false,
|
||||
): Promise<QuerySetup> {
|
||||
const subcommandArgs = [
|
||||
"--query",
|
||||
queryPath,
|
||||
...this.getAdditionalPacksArg(workspaces),
|
||||
];
|
||||
return await this.runJsonCodeQlCliCommand<QuerySetup>(
|
||||
["resolve", "library-path"],
|
||||
subcommandArgs,
|
||||
"Resolving library paths",
|
||||
{ silent },
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the language for a query.
|
||||
* @param queryUri The URI of the query
|
||||
@@ -928,11 +907,11 @@ export class CodeQLCliServer implements Disposable {
|
||||
* @param bqrsPath The path to the bqrs.
|
||||
* @param pageSize The page size to precompute offsets into the binary file for.
|
||||
*/
|
||||
async bqrsInfo(bqrsPath: string, pageSize?: number): Promise<BQRSInfo> {
|
||||
async bqrsInfo(bqrsPath: string, pageSize?: number): Promise<BqrsInfo> {
|
||||
const subcommandArgs = (
|
||||
pageSize ? ["--paginate-rows", pageSize.toString()] : []
|
||||
).concat(bqrsPath);
|
||||
return await this.runJsonCodeQlCliCommand<BQRSInfo>(
|
||||
return await this.runJsonCodeQlCliCommand<BqrsInfo>(
|
||||
["bqrs", "info"],
|
||||
subcommandArgs,
|
||||
"Reading bqrs header",
|
||||
@@ -1106,7 +1085,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
interpretedResultsPath: string,
|
||||
sourceInfo?: SourceInfo,
|
||||
args?: string[],
|
||||
): Promise<sarif.Log> {
|
||||
): Promise<Log> {
|
||||
const additionalArgs = [
|
||||
// TODO: This flag means that we don't group interpreted results
|
||||
// by primary location. We may want to revisit whether we call
|
||||
@@ -1547,7 +1526,7 @@ export class CodeQLCliServer implements Disposable {
|
||||
const distribution = await this.distributionProvider.getDistribution();
|
||||
switch (distribution.kind) {
|
||||
case FindDistributionResultKind.CompatibleDistribution:
|
||||
|
||||
// eslint-disable-next-line no-fallthrough -- Intentional fallthrough
|
||||
case FindDistributionResultKind.IncompatibleDistribution:
|
||||
return distribution.version;
|
||||
|
||||
@@ -1597,7 +1576,7 @@ export function spawnServer(
|
||||
stderrListener: (data: any) => void,
|
||||
stdoutListener?: (data: any) => void,
|
||||
progressReporter?: ProgressReporter,
|
||||
): child_process.ChildProcessWithoutNullStreams {
|
||||
): ChildProcessWithoutNullStreams {
|
||||
// Enable verbose logging.
|
||||
const args = command.concat(commandArgs).concat(LOGGING_FLAGS);
|
||||
|
||||
@@ -1608,7 +1587,7 @@ export function spawnServer(
|
||||
progressReporter.report({ message: `Starting ${name}` });
|
||||
}
|
||||
void logger.log(`Starting ${name} using CodeQL CLI: ${base} ${argsString}`);
|
||||
const child = child_process.spawn(base, args);
|
||||
const child = spawnChildProcess(base, args);
|
||||
if (!child || !child.pid) {
|
||||
throw new Error(
|
||||
`Failed to start ${name} using command ${base} ${argsString}.`,
|
||||
@@ -1647,45 +1626,6 @@ export function spawnServer(
|
||||
return child;
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs a CodeQL CLI command without invoking the CLI server, returning the output as a string.
|
||||
* @param codeQlPath The path to the CLI.
|
||||
* @param command The `codeql` command to be run, provided as an array of command/subcommand names.
|
||||
* @param commandArgs The arguments to pass to the `codeql` command.
|
||||
* @param description Description of the action being run, to be shown in log and error messages.
|
||||
* @param logger Logger to write command log messages, e.g. to an output channel.
|
||||
* @param progressReporter Used to output progress messages, e.g. to the status bar.
|
||||
* @returns The contents of the command's stdout, if the command succeeded.
|
||||
*/
|
||||
export async function runCodeQlCliCommand(
|
||||
codeQlPath: string,
|
||||
command: string[],
|
||||
commandArgs: string[],
|
||||
description: string,
|
||||
logger: Logger,
|
||||
progressReporter?: ProgressReporter,
|
||||
): Promise<string> {
|
||||
// Add logging arguments first, in case commandArgs contains positional parameters.
|
||||
const args = command.concat(LOGGING_FLAGS).concat(commandArgs);
|
||||
const argsString = args.join(" ");
|
||||
try {
|
||||
if (progressReporter !== undefined) {
|
||||
progressReporter.report({ message: description });
|
||||
}
|
||||
void logger.log(
|
||||
`${description} using CodeQL CLI: ${codeQlPath} ${argsString}...`,
|
||||
);
|
||||
const result = await promisify(child_process.execFile)(codeQlPath, args);
|
||||
void logger.log(result.stderr);
|
||||
void logger.log("CLI command succeeded.");
|
||||
return result.stdout;
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
`${description} failed: ${getChildProcessErrorMessage(err)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Log a text stream to a `Logger` interface.
|
||||
* @param stream The stream to log.
|
||||
@@ -1715,7 +1655,7 @@ export function shouldDebugQueryServer() {
|
||||
return isEnvTrue("QUERY_SERVER_JAVA_DEBUG");
|
||||
}
|
||||
|
||||
export function shouldDebugCliServer() {
|
||||
function shouldDebugCliServer() {
|
||||
return isEnvTrue("CLI_SERVER_JAVA_DEBUG");
|
||||
}
|
||||
|
||||
|
||||
@@ -1,21 +1,17 @@
|
||||
import * as fetch from "node-fetch";
|
||||
import { pathExists, mkdtemp, createWriteStream, remove } from "fs-extra";
|
||||
import { createWriteStream, mkdtemp, pathExists, remove } from "fs-extra";
|
||||
import { tmpdir } from "os";
|
||||
import { delimiter, dirname, join } from "path";
|
||||
import * as semver from "semver";
|
||||
import { URL } from "url";
|
||||
import { ExtensionContext, Event } from "vscode";
|
||||
import { DistributionConfig } from "../config";
|
||||
import type { SemVer } from "semver";
|
||||
import { Range, satisfies } from "semver";
|
||||
import type { Event, ExtensionContext } from "vscode";
|
||||
import type { DistributionConfig } from "../config";
|
||||
import { extLogger } from "../common/logging/vscode";
|
||||
import { getCodeQlCliVersion } from "./cli-version";
|
||||
import {
|
||||
ProgressCallback,
|
||||
reportStreamProgress,
|
||||
} from "../common/vscode/progress";
|
||||
import type { ProgressCallback } from "../common/vscode/progress";
|
||||
import { reportStreamProgress } from "../common/vscode/progress";
|
||||
import {
|
||||
codeQlLauncherName,
|
||||
deprecatedCodeQlLauncherName,
|
||||
extractZipArchive,
|
||||
getRequiredAssetName,
|
||||
} from "../common/distribution";
|
||||
import {
|
||||
@@ -26,6 +22,10 @@ import {
|
||||
showAndLogErrorMessage,
|
||||
showAndLogWarningMessage,
|
||||
} from "../common/logging";
|
||||
import { unzipToDirectoryConcurrently } from "../common/unzip-concurrently";
|
||||
import { reportUnzipProgress } from "../common/vscode/unzip-progress";
|
||||
import type { Release } from "./distribution/release";
|
||||
import { ReleasesApiConsumer } from "./distribution/releases-api-consumer";
|
||||
|
||||
/**
|
||||
* distribution.ts
|
||||
@@ -35,28 +35,21 @@ import {
|
||||
*/
|
||||
|
||||
/**
|
||||
* Default value for the owner name of the extension-managed distribution on GitHub.
|
||||
*
|
||||
* We set the default here rather than as a default config value so that this default is invoked
|
||||
* upon blanking the setting.
|
||||
* Repository name with owner of the stable version of the extension-managed distribution on GitHub.
|
||||
*/
|
||||
const DEFAULT_DISTRIBUTION_OWNER_NAME = "github";
|
||||
const STABLE_DISTRIBUTION_REPOSITORY_NWO = "github/codeql-cli-binaries";
|
||||
|
||||
/**
|
||||
* Default value for the repository name of the extension-managed distribution on GitHub.
|
||||
*
|
||||
* We set the default here rather than as a default config value so that this default is invoked
|
||||
* upon blanking the setting.
|
||||
* Repository name with owner of the nightly version of the extension-managed distribution on GitHub.
|
||||
*/
|
||||
const DEFAULT_DISTRIBUTION_REPOSITORY_NAME = "codeql-cli-binaries";
|
||||
const NIGHTLY_DISTRIBUTION_REPOSITORY_NWO = "dsp-testing/codeql-cli-nightlies";
|
||||
|
||||
/**
|
||||
* Range of versions of the CLI that are compatible with the extension.
|
||||
*
|
||||
* This applies to both extension-managed and CLI distributions.
|
||||
*/
|
||||
export const DEFAULT_DISTRIBUTION_VERSION_RANGE: semver.Range =
|
||||
new semver.Range("2.x");
|
||||
export const DEFAULT_DISTRIBUTION_VERSION_RANGE: Range = new Range("2.x");
|
||||
|
||||
export interface DistributionProvider {
|
||||
getCodeQlPathWithoutVersionCheck(): Promise<string | undefined>;
|
||||
@@ -67,7 +60,7 @@ export interface DistributionProvider {
|
||||
export class DistributionManager implements DistributionProvider {
|
||||
constructor(
|
||||
public readonly config: DistributionConfig,
|
||||
private readonly versionRange: semver.Range,
|
||||
private readonly versionRange: Range,
|
||||
extensionContext: ExtensionContext,
|
||||
) {
|
||||
this._onDidChangeDistribution = config.onDidChangeConfiguration;
|
||||
@@ -126,7 +119,7 @@ export class DistributionManager implements DistributionProvider {
|
||||
distribution.kind !== DistributionKind.ExtensionManaged ||
|
||||
this.config.includePrerelease;
|
||||
|
||||
if (!semver.satisfies(version, this.versionRange, { includePrerelease })) {
|
||||
if (!satisfies(version, this.versionRange, { includePrerelease })) {
|
||||
return {
|
||||
distribution,
|
||||
kind: FindDistributionResultKind.IncompatibleDistribution,
|
||||
@@ -195,9 +188,8 @@ export class DistributionManager implements DistributionProvider {
|
||||
|
||||
if (process.env.PATH) {
|
||||
for (const searchDirectory of process.env.PATH.split(delimiter)) {
|
||||
const expectedLauncherPath = await getExecutableFromDirectory(
|
||||
searchDirectory,
|
||||
);
|
||||
const expectedLauncherPath =
|
||||
await getExecutableFromDirectory(searchDirectory);
|
||||
if (expectedLauncherPath) {
|
||||
return {
|
||||
codeQlPath: expectedLauncherPath,
|
||||
@@ -284,7 +276,7 @@ export class DistributionManager implements DistributionProvider {
|
||||
class ExtensionSpecificDistributionManager {
|
||||
constructor(
|
||||
private readonly config: DistributionConfig,
|
||||
private readonly versionRange: semver.Range,
|
||||
private readonly versionRange: Range,
|
||||
private readonly extensionContext: ExtensionContext,
|
||||
) {
|
||||
/**/
|
||||
@@ -421,7 +413,16 @@ class ExtensionSpecificDistributionManager {
|
||||
void extLogger.log(
|
||||
`Extracting CodeQL CLI to ${this.getDistributionStoragePath()}`,
|
||||
);
|
||||
await extractZipArchive(archivePath, this.getDistributionStoragePath());
|
||||
await unzipToDirectoryConcurrently(
|
||||
archivePath,
|
||||
this.getDistributionStoragePath(),
|
||||
progressCallback
|
||||
? reportUnzipProgress(
|
||||
`Extracting CodeQL CLI ${release.name}…`,
|
||||
progressCallback,
|
||||
)
|
||||
: undefined,
|
||||
);
|
||||
} finally {
|
||||
await remove(tmpDirectory);
|
||||
}
|
||||
@@ -444,9 +445,18 @@ class ExtensionSpecificDistributionManager {
|
||||
void extLogger.log(
|
||||
`Searching for latest release including ${requiredAssetName}.`,
|
||||
);
|
||||
|
||||
const versionRange = this.usingNightlyReleases
|
||||
? undefined
|
||||
: this.versionRange;
|
||||
const orderBySemver = !this.usingNightlyReleases;
|
||||
const includePrerelease =
|
||||
this.usingNightlyReleases || this.config.includePrerelease;
|
||||
|
||||
return this.createReleasesApiConsumer().getLatestRelease(
|
||||
this.versionRange,
|
||||
this.config.includePrerelease,
|
||||
versionRange,
|
||||
orderBySemver,
|
||||
includePrerelease,
|
||||
(release) => {
|
||||
// v2.12.3 was released with a bug that causes the extension to fail
|
||||
// so we force the extension to ignore it.
|
||||
@@ -476,19 +486,26 @@ class ExtensionSpecificDistributionManager {
|
||||
}
|
||||
|
||||
private createReleasesApiConsumer(): ReleasesApiConsumer {
|
||||
const ownerName = this.config.ownerName
|
||||
? this.config.ownerName
|
||||
: DEFAULT_DISTRIBUTION_OWNER_NAME;
|
||||
const repositoryName = this.config.repositoryName
|
||||
? this.config.repositoryName
|
||||
: DEFAULT_DISTRIBUTION_REPOSITORY_NAME;
|
||||
return new ReleasesApiConsumer(
|
||||
ownerName,
|
||||
repositoryName,
|
||||
this.distributionRepositoryNwo,
|
||||
this.config.personalAccessToken,
|
||||
);
|
||||
}
|
||||
|
||||
private get distributionRepositoryNwo(): string {
|
||||
if (this.config.channel === "nightly") {
|
||||
return NIGHTLY_DISTRIBUTION_REPOSITORY_NWO;
|
||||
} else {
|
||||
return STABLE_DISTRIBUTION_REPOSITORY_NWO;
|
||||
}
|
||||
}
|
||||
|
||||
private get usingNightlyReleases(): boolean {
|
||||
return (
|
||||
this.distributionRepositoryNwo === NIGHTLY_DISTRIBUTION_REPOSITORY_NWO
|
||||
);
|
||||
}
|
||||
|
||||
private async bumpDistributionFolderIndex(): Promise<void> {
|
||||
const index = this.extensionContext.globalState.get(
|
||||
ExtensionSpecificDistributionManager._currentDistributionFolderIndexStateKey,
|
||||
@@ -543,171 +560,6 @@ class ExtensionSpecificDistributionManager {
|
||||
private static readonly _codeQlExtractedFolderName = "codeql";
|
||||
}
|
||||
|
||||
export class ReleasesApiConsumer {
|
||||
constructor(
|
||||
ownerName: string,
|
||||
repoName: string,
|
||||
personalAccessToken?: string,
|
||||
) {
|
||||
// Specify version of the GitHub API
|
||||
this._defaultHeaders["accept"] = "application/vnd.github.v3+json";
|
||||
|
||||
if (personalAccessToken) {
|
||||
this._defaultHeaders["authorization"] = `token ${personalAccessToken}`;
|
||||
}
|
||||
|
||||
this._ownerName = ownerName;
|
||||
this._repoName = repoName;
|
||||
}
|
||||
|
||||
public async getLatestRelease(
|
||||
versionRange: semver.Range,
|
||||
includePrerelease = false,
|
||||
additionalCompatibilityCheck?: (release: GithubRelease) => boolean,
|
||||
): Promise<Release> {
|
||||
const apiPath = `/repos/${this._ownerName}/${this._repoName}/releases`;
|
||||
const allReleases: GithubRelease[] = await (
|
||||
await this.makeApiCall(apiPath)
|
||||
).json();
|
||||
const compatibleReleases = allReleases.filter((release) => {
|
||||
if (release.prerelease && !includePrerelease) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const version = semver.parse(release.tag_name);
|
||||
if (
|
||||
version === null ||
|
||||
!semver.satisfies(version, versionRange, { includePrerelease })
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return (
|
||||
!additionalCompatibilityCheck || additionalCompatibilityCheck(release)
|
||||
);
|
||||
});
|
||||
// Tag names must all be parsable to semvers due to the previous filtering step.
|
||||
const latestRelease = compatibleReleases.sort((a, b) => {
|
||||
const versionComparison = semver.compare(
|
||||
semver.parse(b.tag_name)!,
|
||||
semver.parse(a.tag_name)!,
|
||||
);
|
||||
if (versionComparison !== 0) {
|
||||
return versionComparison;
|
||||
}
|
||||
return b.created_at.localeCompare(a.created_at, "en-US");
|
||||
})[0];
|
||||
if (latestRelease === undefined) {
|
||||
throw new Error(
|
||||
"No compatible CodeQL CLI releases were found. " +
|
||||
"Please check that the CodeQL extension is up to date.",
|
||||
);
|
||||
}
|
||||
const assets: ReleaseAsset[] = latestRelease.assets.map((asset) => {
|
||||
return {
|
||||
id: asset.id,
|
||||
name: asset.name,
|
||||
size: asset.size,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
assets,
|
||||
createdAt: latestRelease.created_at,
|
||||
id: latestRelease.id,
|
||||
name: latestRelease.name,
|
||||
};
|
||||
}
|
||||
|
||||
public async streamBinaryContentOfAsset(
|
||||
asset: ReleaseAsset,
|
||||
): Promise<fetch.Response> {
|
||||
const apiPath = `/repos/${this._ownerName}/${this._repoName}/releases/assets/${asset.id}`;
|
||||
|
||||
return await this.makeApiCall(apiPath, {
|
||||
accept: "application/octet-stream",
|
||||
});
|
||||
}
|
||||
|
||||
protected async makeApiCall(
|
||||
apiPath: string,
|
||||
additionalHeaders: { [key: string]: string } = {},
|
||||
): Promise<fetch.Response> {
|
||||
const response = await this.makeRawRequest(
|
||||
ReleasesApiConsumer._apiBase + apiPath,
|
||||
Object.assign({}, this._defaultHeaders, additionalHeaders),
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
// Check for rate limiting
|
||||
const rateLimitResetValue = response.headers.get("X-RateLimit-Reset");
|
||||
if (response.status === 403 && rateLimitResetValue) {
|
||||
const secondsToMillisecondsFactor = 1000;
|
||||
const rateLimitResetDate = new Date(
|
||||
parseInt(rateLimitResetValue, 10) * secondsToMillisecondsFactor,
|
||||
);
|
||||
throw new GithubRateLimitedError(
|
||||
response.status,
|
||||
await response.text(),
|
||||
rateLimitResetDate,
|
||||
);
|
||||
}
|
||||
throw new GithubApiError(response.status, await response.text());
|
||||
}
|
||||
return response;
|
||||
}
|
||||
|
||||
private async makeRawRequest(
|
||||
requestUrl: string,
|
||||
headers: { [key: string]: string },
|
||||
redirectCount = 0,
|
||||
): Promise<fetch.Response> {
|
||||
const response = await fetch.default(requestUrl, {
|
||||
headers,
|
||||
redirect: "manual",
|
||||
});
|
||||
|
||||
const redirectUrl = response.headers.get("location");
|
||||
if (
|
||||
isRedirectStatusCode(response.status) &&
|
||||
redirectUrl &&
|
||||
redirectCount < ReleasesApiConsumer._maxRedirects
|
||||
) {
|
||||
const parsedRedirectUrl = new URL(redirectUrl);
|
||||
if (parsedRedirectUrl.protocol !== "https:") {
|
||||
throw new Error("Encountered a non-https redirect, rejecting");
|
||||
}
|
||||
if (parsedRedirectUrl.host !== "api.github.com") {
|
||||
// Remove authorization header if we are redirected outside of the GitHub API.
|
||||
//
|
||||
// This is necessary to stream release assets since AWS fails if more than one auth
|
||||
// mechanism is provided.
|
||||
delete headers["authorization"];
|
||||
}
|
||||
return await this.makeRawRequest(redirectUrl, headers, redirectCount + 1);
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
private readonly _defaultHeaders: { [key: string]: string } = {};
|
||||
private readonly _ownerName: string;
|
||||
private readonly _repoName: string;
|
||||
|
||||
private static readonly _apiBase = "https://api.github.com";
|
||||
private static readonly _maxRedirects = 20;
|
||||
}
|
||||
|
||||
function isRedirectStatusCode(statusCode: number): boolean {
|
||||
return (
|
||||
statusCode === 301 ||
|
||||
statusCode === 302 ||
|
||||
statusCode === 303 ||
|
||||
statusCode === 307 ||
|
||||
statusCode === 308
|
||||
);
|
||||
}
|
||||
|
||||
/*
|
||||
* Types and helper functions relating to those types.
|
||||
*/
|
||||
@@ -747,7 +599,7 @@ interface DistributionResult {
|
||||
|
||||
interface CompatibleDistributionResult extends DistributionResult {
|
||||
kind: FindDistributionResultKind.CompatibleDistribution;
|
||||
version: semver.SemVer;
|
||||
version: SemVer;
|
||||
}
|
||||
|
||||
interface UnknownCompatibilityDistributionResult extends DistributionResult {
|
||||
@@ -756,7 +608,7 @@ interface UnknownCompatibilityDistributionResult extends DistributionResult {
|
||||
|
||||
interface IncompatibleDistributionResult extends DistributionResult {
|
||||
kind: FindDistributionResultKind.IncompatibleDistribution;
|
||||
version: semver.SemVer;
|
||||
version: SemVer;
|
||||
}
|
||||
|
||||
interface NoDistributionResult {
|
||||
@@ -858,116 +710,3 @@ function warnDeprecatedLauncher() {
|
||||
`Please use "${codeQlLauncherName()}" instead. It is recommended to update to the latest CodeQL binaries.`,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* A release on GitHub.
|
||||
*/
|
||||
interface Release {
|
||||
assets: ReleaseAsset[];
|
||||
|
||||
/**
|
||||
* The creation date of the release on GitHub.
|
||||
*/
|
||||
createdAt: string;
|
||||
|
||||
/**
|
||||
* The id associated with the release on GitHub.
|
||||
*/
|
||||
id: number;
|
||||
|
||||
/**
|
||||
* The name associated with the release on GitHub.
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* An asset corresponding to a release on GitHub.
|
||||
*/
|
||||
interface ReleaseAsset {
|
||||
/**
|
||||
* The id associated with the asset on GitHub.
|
||||
*/
|
||||
id: number;
|
||||
|
||||
/**
|
||||
* The name associated with the asset on GitHub.
|
||||
*/
|
||||
name: string;
|
||||
|
||||
/**
|
||||
* The size of the asset in bytes.
|
||||
*/
|
||||
size: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* The json returned from github for a release.
|
||||
*/
|
||||
export interface GithubRelease {
|
||||
assets: GithubReleaseAsset[];
|
||||
|
||||
/**
|
||||
* The creation date of the release on GitHub, in ISO 8601 format.
|
||||
*/
|
||||
created_at: string;
|
||||
|
||||
/**
|
||||
* The id associated with the release on GitHub.
|
||||
*/
|
||||
id: number;
|
||||
|
||||
/**
|
||||
* The name associated with the release on GitHub.
|
||||
*/
|
||||
name: string;
|
||||
|
||||
/**
|
||||
* Whether the release is a prerelease.
|
||||
*/
|
||||
prerelease: boolean;
|
||||
|
||||
/**
|
||||
* The tag name. This should be the version.
|
||||
*/
|
||||
tag_name: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The json returned by github for an asset in a release.
|
||||
*/
|
||||
export interface GithubReleaseAsset {
|
||||
/**
|
||||
* The id associated with the asset on GitHub.
|
||||
*/
|
||||
id: number;
|
||||
|
||||
/**
|
||||
* The name associated with the asset on GitHub.
|
||||
*/
|
||||
name: string;
|
||||
|
||||
/**
|
||||
* The size of the asset in bytes.
|
||||
*/
|
||||
size: number;
|
||||
}
|
||||
|
||||
export class GithubApiError extends Error {
|
||||
constructor(
|
||||
public status: number,
|
||||
public body: string,
|
||||
) {
|
||||
super(`API call failed with status code ${status}, body: ${body}`);
|
||||
}
|
||||
}
|
||||
|
||||
export class GithubRateLimitedError extends GithubApiError {
|
||||
constructor(
|
||||
public status: number,
|
||||
public body: string,
|
||||
public rateLimitResetDate: Date,
|
||||
) {
|
||||
super(status, body);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
export class GithubApiError extends Error {
|
||||
constructor(
|
||||
public status: number,
|
||||
public body: string,
|
||||
) {
|
||||
super(`API call failed with status code ${status}, body: ${body}`);
|
||||
}
|
||||
}
|
||||
|
||||
export class GithubRateLimitedError extends GithubApiError {
|
||||
constructor(
|
||||
public status: number,
|
||||
public body: string,
|
||||
public rateLimitResetDate: Date,
|
||||
) {
|
||||
super(status, body);
|
||||
}
|
||||
}
|
||||
48
extensions/ql-vscode/src/codeql-cli/distribution/release.ts
Normal file
48
extensions/ql-vscode/src/codeql-cli/distribution/release.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
/**
|
||||
* A release of the CodeQL CLI hosted on GitHub.
|
||||
*/
|
||||
export interface Release {
|
||||
/**
|
||||
* The assets associated with the release on GitHub.
|
||||
*/
|
||||
assets: ReleaseAsset[];
|
||||
|
||||
/**
|
||||
* The creation date of the release on GitHub.
|
||||
*
|
||||
* This is the date that the release was uploaded to GitHub, and not the date
|
||||
* when we downloaded it or the date when we fetched the data from the GitHub API.
|
||||
*/
|
||||
createdAt: string;
|
||||
|
||||
/**
|
||||
* The id associated with the release on GitHub.
|
||||
*/
|
||||
id: number;
|
||||
|
||||
/**
|
||||
* The name associated with the release on GitHub.
|
||||
*/
|
||||
name: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* An asset attached to a release on GitHub.
|
||||
* Each release may have multiple assets, and each asset can be downloaded independently.
|
||||
*/
|
||||
export interface ReleaseAsset {
|
||||
/**
|
||||
* The id associated with the asset on GitHub.
|
||||
*/
|
||||
id: number;
|
||||
|
||||
/**
|
||||
* The name associated with the asset on GitHub.
|
||||
*/
|
||||
name: string;
|
||||
|
||||
/**
|
||||
* The size of the asset in bytes.
|
||||
*/
|
||||
size: number;
|
||||
}
|
||||
@@ -0,0 +1,198 @@
|
||||
import type { Response } from "node-fetch";
|
||||
import { default as fetch } from "node-fetch";
|
||||
import type { Range } from "semver";
|
||||
import { compare, parse, satisfies } from "semver";
|
||||
import { URL } from "url";
|
||||
import type { Release, ReleaseAsset } from "./release";
|
||||
import { GithubApiError, GithubRateLimitedError } from "./github-api-error";
|
||||
|
||||
/**
|
||||
* Communicates with the GitHub API to determine the latest compatible release and download assets.
|
||||
*/
|
||||
export class ReleasesApiConsumer {
|
||||
private static readonly apiBase = "https://api.github.com";
|
||||
private static readonly maxRedirects = 20;
|
||||
|
||||
private readonly defaultHeaders: { [key: string]: string } = {};
|
||||
|
||||
constructor(
|
||||
private readonly repositoryNwo: string,
|
||||
personalAccessToken?: string,
|
||||
) {
|
||||
// Specify version of the GitHub API
|
||||
this.defaultHeaders["accept"] = "application/vnd.github.v3+json";
|
||||
|
||||
if (personalAccessToken) {
|
||||
this.defaultHeaders["authorization"] = `token ${personalAccessToken}`;
|
||||
}
|
||||
}
|
||||
|
||||
public async getLatestRelease(
|
||||
versionRange: Range | undefined,
|
||||
orderBySemver = true,
|
||||
includePrerelease = false,
|
||||
additionalCompatibilityCheck?: (release: GithubRelease) => boolean,
|
||||
): Promise<Release> {
|
||||
const apiPath = `/repos/${this.repositoryNwo}/releases`;
|
||||
const allReleases: GithubRelease[] = await (
|
||||
await this.makeApiCall(apiPath)
|
||||
).json();
|
||||
const compatibleReleases = allReleases.filter((release) => {
|
||||
if (release.prerelease && !includePrerelease) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (versionRange !== undefined) {
|
||||
const version = parse(release.tag_name);
|
||||
if (
|
||||
version === null ||
|
||||
!satisfies(version, versionRange, { includePrerelease })
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
!additionalCompatibilityCheck || additionalCompatibilityCheck(release)
|
||||
);
|
||||
});
|
||||
// Tag names must all be parsable to semvers due to the previous filtering step.
|
||||
const latestRelease = compatibleReleases.sort((a, b) => {
|
||||
const versionComparison = orderBySemver
|
||||
? compare(parse(b.tag_name)!, parse(a.tag_name)!)
|
||||
: b.id - a.id;
|
||||
if (versionComparison !== 0) {
|
||||
return versionComparison;
|
||||
}
|
||||
return b.created_at.localeCompare(a.created_at, "en-US");
|
||||
})[0];
|
||||
if (latestRelease === undefined) {
|
||||
throw new Error(
|
||||
"No compatible CodeQL CLI releases were found. " +
|
||||
"Please check that the CodeQL extension is up to date.",
|
||||
);
|
||||
}
|
||||
const assets: ReleaseAsset[] = latestRelease.assets.map((asset) => {
|
||||
return {
|
||||
id: asset.id,
|
||||
name: asset.name,
|
||||
size: asset.size,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
assets,
|
||||
createdAt: latestRelease.created_at,
|
||||
id: latestRelease.id,
|
||||
name: latestRelease.name,
|
||||
};
|
||||
}
|
||||
|
||||
public async streamBinaryContentOfAsset(
|
||||
asset: ReleaseAsset,
|
||||
): Promise<Response> {
|
||||
const apiPath = `/repos/${this.repositoryNwo}/releases/assets/${asset.id}`;
|
||||
|
||||
return await this.makeApiCall(apiPath, {
|
||||
accept: "application/octet-stream",
|
||||
});
|
||||
}
|
||||
|
||||
protected async makeApiCall(
|
||||
apiPath: string,
|
||||
additionalHeaders: { [key: string]: string } = {},
|
||||
): Promise<Response> {
|
||||
const response = await this.makeRawRequest(
|
||||
ReleasesApiConsumer.apiBase + apiPath,
|
||||
Object.assign({}, this.defaultHeaders, additionalHeaders),
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
// Check for rate limiting
|
||||
const rateLimitResetValue = response.headers.get("X-RateLimit-Reset");
|
||||
if (response.status === 403 && rateLimitResetValue) {
|
||||
const secondsToMillisecondsFactor = 1000;
|
||||
const rateLimitResetDate = new Date(
|
||||
parseInt(rateLimitResetValue, 10) * secondsToMillisecondsFactor,
|
||||
);
|
||||
throw new GithubRateLimitedError(
|
||||
response.status,
|
||||
await response.text(),
|
||||
rateLimitResetDate,
|
||||
);
|
||||
}
|
||||
throw new GithubApiError(response.status, await response.text());
|
||||
}
|
||||
return response;
|
||||
}
|
||||
|
||||
private async makeRawRequest(
|
||||
requestUrl: string,
|
||||
headers: { [key: string]: string },
|
||||
redirectCount = 0,
|
||||
): Promise<Response> {
|
||||
const response = await fetch(requestUrl, {
|
||||
headers,
|
||||
redirect: "manual",
|
||||
});
|
||||
|
||||
const redirectUrl = response.headers.get("location");
|
||||
if (
|
||||
isRedirectStatusCode(response.status) &&
|
||||
redirectUrl &&
|
||||
redirectCount < ReleasesApiConsumer.maxRedirects
|
||||
) {
|
||||
const parsedRedirectUrl = new URL(redirectUrl);
|
||||
if (parsedRedirectUrl.protocol !== "https:") {
|
||||
throw new Error("Encountered a non-https redirect, rejecting");
|
||||
}
|
||||
if (parsedRedirectUrl.host !== "api.github.com") {
|
||||
// Remove authorization header if we are redirected outside of the GitHub API.
|
||||
//
|
||||
// This is necessary to stream release assets since AWS fails if more than one auth
|
||||
// mechanism is provided.
|
||||
delete headers["authorization"];
|
||||
}
|
||||
return await this.makeRawRequest(redirectUrl, headers, redirectCount + 1);
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
function isRedirectStatusCode(statusCode: number): boolean {
|
||||
return (
|
||||
statusCode === 301 ||
|
||||
statusCode === 302 ||
|
||||
statusCode === 303 ||
|
||||
statusCode === 307 ||
|
||||
statusCode === 308
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* The json returned from github for a release.
|
||||
* See https://docs.github.com/en/rest/releases/releases#get-a-release for example response and response schema.
|
||||
*
|
||||
* This type must match the format of the GitHub API and is not intended to be used outside of this file except for tests. Please use the `Release` type instead.
|
||||
*/
|
||||
export interface GithubRelease {
|
||||
assets: GithubReleaseAsset[];
|
||||
created_at: string;
|
||||
id: number;
|
||||
name: string;
|
||||
prerelease: boolean;
|
||||
tag_name: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The json returned by github for an asset in a release.
|
||||
* See https://docs.github.com/en/rest/releases/releases#get-a-release for example response and response schema.
|
||||
*
|
||||
* This type must match the format of the GitHub API and is not intended to be used outside of this file except for tests. Please use the `ReleaseAsset` type instead.
|
||||
*/
|
||||
export interface GithubReleaseAsset {
|
||||
id: number;
|
||||
name: string;
|
||||
size: number;
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import { CodeQLCliServer } from "./cli";
|
||||
import { Uri, window } from "vscode";
|
||||
import type { CodeQLCliServer } from "./cli";
|
||||
import type { Uri } from "vscode";
|
||||
import { window } from "vscode";
|
||||
import {
|
||||
getLanguageDisplayName,
|
||||
isQueryLanguage,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { CodeQLCliServer } from "./cli";
|
||||
import { QueryMetadata } from "../common/interface-types";
|
||||
import type { CodeQLCliServer } from "./cli";
|
||||
import type { QueryMetadata } from "../common/interface-types";
|
||||
import { extLogger } from "../common/logging/vscode";
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { Credentials } from "./authentication";
|
||||
import { Disposable } from "./disposable-object";
|
||||
import { AppEventEmitter } from "./events";
|
||||
import { NotificationLogger } from "./logging";
|
||||
import { Memento } from "./memento";
|
||||
import { AppCommandManager } from "./commands";
|
||||
import { AppTelemetry } from "./telemetry";
|
||||
import type { Credentials } from "./authentication";
|
||||
import type { Disposable } from "./disposable-object";
|
||||
import type { AppEventEmitter } from "./events";
|
||||
import type { NotificationLogger } from "./logging";
|
||||
import type { Memento } from "./memento";
|
||||
import type { AppCommandManager } from "./commands";
|
||||
import type { AppTelemetry } from "./telemetry";
|
||||
|
||||
export interface App {
|
||||
createEventEmitter<T>(): AppEventEmitter<T>;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as Octokit from "@octokit/rest";
|
||||
import type { Octokit } from "@octokit/rest";
|
||||
|
||||
/**
|
||||
* An interface providing methods for obtaining access tokens
|
||||
@@ -12,7 +12,7 @@ export interface Credentials {
|
||||
*
|
||||
* @returns An instance of Octokit.
|
||||
*/
|
||||
getOctokit(): Promise<Octokit.Octokit>;
|
||||
getOctokit(): Promise<Octokit>;
|
||||
|
||||
/**
|
||||
* Returns an OAuth access token.
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
* the "for the sake of extensibility" comment in messages.ts.
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-namespace
|
||||
export namespace ColumnKindCode {
|
||||
export namespace BqrsColumnKindCode {
|
||||
export const FLOAT = "f";
|
||||
export const INTEGER = "i";
|
||||
export const STRING = "s";
|
||||
@@ -13,55 +13,44 @@ export namespace ColumnKindCode {
|
||||
export const ENTITY = "e";
|
||||
}
|
||||
|
||||
type ColumnKind =
|
||||
| typeof ColumnKindCode.FLOAT
|
||||
| typeof ColumnKindCode.INTEGER
|
||||
| typeof ColumnKindCode.STRING
|
||||
| typeof ColumnKindCode.BOOLEAN
|
||||
| typeof ColumnKindCode.DATE
|
||||
| typeof ColumnKindCode.ENTITY;
|
||||
export type BqrsColumnKind =
|
||||
| typeof BqrsColumnKindCode.FLOAT
|
||||
| typeof BqrsColumnKindCode.INTEGER
|
||||
| typeof BqrsColumnKindCode.STRING
|
||||
| typeof BqrsColumnKindCode.BOOLEAN
|
||||
| typeof BqrsColumnKindCode.DATE
|
||||
| typeof BqrsColumnKindCode.ENTITY;
|
||||
|
||||
interface Column {
|
||||
export interface BqrsSchemaColumn {
|
||||
name?: string;
|
||||
kind: ColumnKind;
|
||||
kind: BqrsColumnKind;
|
||||
}
|
||||
|
||||
export interface ResultSetSchema {
|
||||
export interface BqrsResultSetSchema {
|
||||
name: string;
|
||||
rows: number;
|
||||
columns: Column[];
|
||||
pagination?: PaginationInfo;
|
||||
columns: BqrsSchemaColumn[];
|
||||
pagination?: BqrsPaginationInfo;
|
||||
}
|
||||
|
||||
export function getResultSetSchema(
|
||||
resultSetName: string,
|
||||
resultSets: BQRSInfo,
|
||||
): ResultSetSchema | undefined {
|
||||
for (const schema of resultSets["result-sets"]) {
|
||||
if (schema.name === resultSetName) {
|
||||
return schema;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
interface PaginationInfo {
|
||||
interface BqrsPaginationInfo {
|
||||
"step-size": number;
|
||||
offsets: number[];
|
||||
}
|
||||
|
||||
export interface BQRSInfo {
|
||||
"result-sets": ResultSetSchema[];
|
||||
export interface BqrsInfo {
|
||||
"result-sets": BqrsResultSetSchema[];
|
||||
}
|
||||
|
||||
export type BqrsId = number;
|
||||
|
||||
export interface EntityValue {
|
||||
url?: UrlValue;
|
||||
export interface BqrsEntityValue {
|
||||
url?: BqrsUrlValue;
|
||||
label?: string;
|
||||
id?: BqrsId;
|
||||
}
|
||||
|
||||
export interface LineColumnLocation {
|
||||
export interface BqrsLineColumnLocation {
|
||||
uri: string;
|
||||
startLine: number;
|
||||
startColumn: number;
|
||||
@@ -69,7 +58,7 @@ export interface LineColumnLocation {
|
||||
endColumn: number;
|
||||
}
|
||||
|
||||
export interface WholeFileLocation {
|
||||
export interface BqrsWholeFileLocation {
|
||||
uri: string;
|
||||
startLine: never;
|
||||
startColumn: never;
|
||||
@@ -77,47 +66,28 @@ export interface WholeFileLocation {
|
||||
endColumn: never;
|
||||
}
|
||||
|
||||
export type ResolvableLocationValue = WholeFileLocation | LineColumnLocation;
|
||||
export type BqrsUrlValue =
|
||||
| BqrsWholeFileLocation
|
||||
| BqrsLineColumnLocation
|
||||
| string;
|
||||
|
||||
export type UrlValue = ResolvableLocationValue | string;
|
||||
|
||||
export type CellValue = EntityValue | number | string | boolean;
|
||||
|
||||
export type ResultRow = CellValue[];
|
||||
|
||||
export interface RawResultSet {
|
||||
readonly schema: ResultSetSchema;
|
||||
readonly rows: readonly ResultRow[];
|
||||
}
|
||||
|
||||
// TODO: This function is not necessary. It generates a tuple that is slightly easier
|
||||
// to handle than the ResultSetSchema and DecodedBqrsChunk. But perhaps it is unnecessary
|
||||
// boilerplate.
|
||||
export function transformBqrsResultSet(
|
||||
schema: ResultSetSchema,
|
||||
page: DecodedBqrsChunk,
|
||||
): RawResultSet {
|
||||
return {
|
||||
schema,
|
||||
rows: Array.from(page.tuples),
|
||||
};
|
||||
}
|
||||
export type BqrsCellValue = BqrsEntityValue | number | string | boolean;
|
||||
|
||||
export type BqrsKind =
|
||||
| "String"
|
||||
| "Float"
|
||||
| "Integer"
|
||||
| "String"
|
||||
| "Boolean"
|
||||
| "Date"
|
||||
| "Entity";
|
||||
|
||||
export interface BqrsColumn {
|
||||
interface BqrsColumn {
|
||||
name?: string;
|
||||
kind: BqrsKind;
|
||||
}
|
||||
|
||||
export interface DecodedBqrsChunk {
|
||||
tuples: CellValue[][];
|
||||
tuples: BqrsCellValue[][];
|
||||
next?: number;
|
||||
columns: BqrsColumn[];
|
||||
}
|
||||
|
||||
216
extensions/ql-vscode/src/common/bqrs-raw-results-mapper.ts
Normal file
216
extensions/ql-vscode/src/common/bqrs-raw-results-mapper.ts
Normal file
@@ -0,0 +1,216 @@
|
||||
import type {
|
||||
BqrsCellValue as BqrsCellValue,
|
||||
BqrsColumnKind as BqrsColumnKind,
|
||||
DecodedBqrsChunk,
|
||||
BqrsEntityValue as BqrsEntityValue,
|
||||
BqrsLineColumnLocation,
|
||||
BqrsResultSetSchema,
|
||||
BqrsUrlValue as BqrsUrlValue,
|
||||
BqrsWholeFileLocation,
|
||||
BqrsSchemaColumn,
|
||||
} from "./bqrs-cli-types";
|
||||
import { BqrsColumnKindCode } from "./bqrs-cli-types";
|
||||
import type {
|
||||
CellValue,
|
||||
Column,
|
||||
EntityValue,
|
||||
RawResultSet,
|
||||
Row,
|
||||
UrlValue,
|
||||
UrlValueResolvable,
|
||||
} from "./raw-result-types";
|
||||
import { ColumnKind } from "./raw-result-types";
|
||||
import { assertNever } from "./helpers-pure";
|
||||
import { isEmptyPath } from "./bqrs-utils";
|
||||
|
||||
export function bqrsToResultSet(
|
||||
schema: BqrsResultSetSchema,
|
||||
chunk: DecodedBqrsChunk,
|
||||
): RawResultSet {
|
||||
const name = schema.name;
|
||||
const totalRowCount = schema.rows;
|
||||
|
||||
const columns = schema.columns.map(mapColumn);
|
||||
|
||||
const rows = chunk.tuples.map(
|
||||
(tuple): Row => tuple.map((cell): CellValue => mapCellValue(cell)),
|
||||
);
|
||||
|
||||
const resultSet: RawResultSet = {
|
||||
name,
|
||||
totalRowCount,
|
||||
columns,
|
||||
rows,
|
||||
};
|
||||
|
||||
if (chunk.next) {
|
||||
resultSet.nextPageOffset = chunk.next;
|
||||
}
|
||||
|
||||
return resultSet;
|
||||
}
|
||||
|
||||
function mapColumn(column: BqrsSchemaColumn): Column {
|
||||
const result: Column = {
|
||||
kind: mapColumnKind(column.kind),
|
||||
};
|
||||
|
||||
if (column.name) {
|
||||
result.name = column.name;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function mapColumnKind(kind: BqrsColumnKind): ColumnKind {
|
||||
switch (kind) {
|
||||
case BqrsColumnKindCode.STRING:
|
||||
return ColumnKind.String;
|
||||
case BqrsColumnKindCode.FLOAT:
|
||||
return ColumnKind.Float;
|
||||
case BqrsColumnKindCode.INTEGER:
|
||||
return ColumnKind.Integer;
|
||||
case BqrsColumnKindCode.BOOLEAN:
|
||||
return ColumnKind.Boolean;
|
||||
case BqrsColumnKindCode.DATE:
|
||||
return ColumnKind.Date;
|
||||
case BqrsColumnKindCode.ENTITY:
|
||||
return ColumnKind.Entity;
|
||||
default:
|
||||
assertNever(kind);
|
||||
}
|
||||
}
|
||||
|
||||
function mapCellValue(cellValue: BqrsCellValue): CellValue {
|
||||
switch (typeof cellValue) {
|
||||
case "string":
|
||||
return {
|
||||
type: "string",
|
||||
value: cellValue,
|
||||
};
|
||||
case "number":
|
||||
return {
|
||||
type: "number",
|
||||
value: cellValue,
|
||||
};
|
||||
case "boolean":
|
||||
return {
|
||||
type: "boolean",
|
||||
value: cellValue,
|
||||
};
|
||||
case "object":
|
||||
return {
|
||||
type: "entity",
|
||||
value: mapEntityValue(cellValue),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function mapEntityValue(cellValue: BqrsEntityValue): EntityValue {
|
||||
const result: EntityValue = {};
|
||||
|
||||
if (cellValue.id) {
|
||||
result.id = cellValue.id;
|
||||
}
|
||||
if (cellValue.label) {
|
||||
result.label = cellValue.label;
|
||||
}
|
||||
if (cellValue.url) {
|
||||
result.url = mapUrlValue(cellValue.url);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function mapUrlValue(urlValue: BqrsUrlValue): UrlValue | undefined {
|
||||
if (typeof urlValue === "string") {
|
||||
const location = tryGetLocationFromString(urlValue);
|
||||
if (location !== undefined) {
|
||||
return location;
|
||||
}
|
||||
|
||||
return {
|
||||
type: "string",
|
||||
value: urlValue,
|
||||
};
|
||||
}
|
||||
|
||||
if (isWholeFileLoc(urlValue)) {
|
||||
return {
|
||||
type: "wholeFileLocation",
|
||||
uri: urlValue.uri,
|
||||
};
|
||||
}
|
||||
|
||||
if (isLineColumnLoc(urlValue)) {
|
||||
return {
|
||||
type: "lineColumnLocation",
|
||||
uri: urlValue.uri,
|
||||
startLine: urlValue.startLine,
|
||||
startColumn: urlValue.startColumn,
|
||||
endLine: urlValue.endLine,
|
||||
endColumn: urlValue.endColumn,
|
||||
};
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function isLineColumnLoc(loc: BqrsUrlValue): loc is BqrsLineColumnLocation {
|
||||
return (
|
||||
typeof loc !== "string" &&
|
||||
!isEmptyPath(loc.uri) &&
|
||||
"startLine" in loc &&
|
||||
"startColumn" in loc &&
|
||||
"endLine" in loc &&
|
||||
"endColumn" in loc
|
||||
);
|
||||
}
|
||||
|
||||
function isWholeFileLoc(loc: BqrsUrlValue): loc is BqrsWholeFileLocation {
|
||||
return (
|
||||
typeof loc !== "string" && !isEmptyPath(loc.uri) && !isLineColumnLoc(loc)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* The CodeQL filesystem libraries use this pattern in `getURL()` predicates
|
||||
* to describe the location of an entire filesystem resource.
|
||||
* Such locations appear as `StringLocation`s instead of `FivePartLocation`s.
|
||||
*
|
||||
* Folder resources also get similar URLs, but with the `folder` scheme.
|
||||
* They are deliberately ignored here, since there is no suitable location to show the user.
|
||||
*/
|
||||
const FILE_LOCATION_REGEX = /file:\/\/(.+):([0-9]+):([0-9]+):([0-9]+):([0-9]+)/;
|
||||
|
||||
function tryGetLocationFromString(loc: string): UrlValueResolvable | undefined {
|
||||
const matches = FILE_LOCATION_REGEX.exec(loc);
|
||||
if (matches && matches.length > 1 && matches[1]) {
|
||||
if (isWholeFileMatch(matches)) {
|
||||
return {
|
||||
type: "wholeFileLocation",
|
||||
uri: matches[1],
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
type: "lineColumnLocation",
|
||||
uri: matches[1],
|
||||
startLine: Number(matches[2]),
|
||||
startColumn: Number(matches[3]),
|
||||
endLine: Number(matches[4]),
|
||||
endColumn: Number(matches[5]),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function isWholeFileMatch(matches: RegExpExecArray): boolean {
|
||||
return (
|
||||
matches[2] === "0" &&
|
||||
matches[3] === "0" &&
|
||||
matches[4] === "0" &&
|
||||
matches[5] === "0"
|
||||
);
|
||||
}
|
||||
@@ -1,111 +1,21 @@
|
||||
import {
|
||||
UrlValue,
|
||||
ResolvableLocationValue,
|
||||
LineColumnLocation,
|
||||
WholeFileLocation,
|
||||
} from "./bqrs-cli-types";
|
||||
import { createRemoteFileRef } from "../common/location-link-utils";
|
||||
|
||||
/**
|
||||
* The CodeQL filesystem libraries use this pattern in `getURL()` predicates
|
||||
* to describe the location of an entire filesystem resource.
|
||||
* Such locations appear as `StringLocation`s instead of `FivePartLocation`s.
|
||||
*
|
||||
* Folder resources also get similar URLs, but with the `folder` scheme.
|
||||
* They are deliberately ignored here, since there is no suitable location to show the user.
|
||||
*/
|
||||
const FILE_LOCATION_REGEX = /file:\/\/(.+):([0-9]+):([0-9]+):([0-9]+):([0-9]+)/;
|
||||
/**
|
||||
* Gets a resolvable source file location for the specified `LocationValue`, if possible.
|
||||
* @param loc The location to test.
|
||||
*/
|
||||
export function tryGetResolvableLocation(
|
||||
loc: UrlValue | undefined,
|
||||
): ResolvableLocationValue | undefined {
|
||||
let resolvedLoc;
|
||||
if (loc === undefined) {
|
||||
resolvedLoc = undefined;
|
||||
} else if (isWholeFileLoc(loc) || isLineColumnLoc(loc)) {
|
||||
resolvedLoc = loc as ResolvableLocationValue;
|
||||
} else if (isStringLoc(loc)) {
|
||||
resolvedLoc = tryGetLocationFromString(loc);
|
||||
} else {
|
||||
resolvedLoc = undefined;
|
||||
}
|
||||
|
||||
return resolvedLoc;
|
||||
}
|
||||
|
||||
export function tryGetLocationFromString(
|
||||
loc: string,
|
||||
): ResolvableLocationValue | undefined {
|
||||
const matches = FILE_LOCATION_REGEX.exec(loc);
|
||||
if (matches && matches.length > 1 && matches[1]) {
|
||||
if (isWholeFileMatch(matches)) {
|
||||
return {
|
||||
uri: matches[1],
|
||||
} as WholeFileLocation;
|
||||
} else {
|
||||
return {
|
||||
uri: matches[1],
|
||||
startLine: Number(matches[2]),
|
||||
startColumn: Number(matches[3]),
|
||||
endLine: Number(matches[4]),
|
||||
endColumn: Number(matches[5]),
|
||||
};
|
||||
}
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function isWholeFileMatch(matches: RegExpExecArray): boolean {
|
||||
return (
|
||||
matches[2] === "0" &&
|
||||
matches[3] === "0" &&
|
||||
matches[4] === "0" &&
|
||||
matches[5] === "0"
|
||||
);
|
||||
}
|
||||
import type { UrlValue } from "./raw-result-types";
|
||||
import { isUrlValueResolvable } from "./raw-result-types";
|
||||
|
||||
/**
|
||||
* Checks whether the file path is empty. If so, we do not want to render this location
|
||||
* as a link.
|
||||
*
|
||||
* @param uri A file uri
|
||||
*/
|
||||
export function isEmptyPath(uriStr: string) {
|
||||
return !uriStr || uriStr === "file:/";
|
||||
}
|
||||
|
||||
export function isLineColumnLoc(loc: UrlValue): loc is LineColumnLocation {
|
||||
return (
|
||||
typeof loc !== "string" &&
|
||||
!isEmptyPath(loc.uri) &&
|
||||
"startLine" in loc &&
|
||||
"startColumn" in loc &&
|
||||
"endLine" in loc &&
|
||||
"endColumn" in loc
|
||||
);
|
||||
}
|
||||
|
||||
export function isWholeFileLoc(loc: UrlValue): loc is WholeFileLocation {
|
||||
return (
|
||||
typeof loc !== "string" && !isEmptyPath(loc.uri) && !isLineColumnLoc(loc)
|
||||
);
|
||||
}
|
||||
|
||||
export function isStringLoc(loc: UrlValue): loc is string {
|
||||
return typeof loc === "string";
|
||||
}
|
||||
|
||||
export function tryGetRemoteLocation(
|
||||
loc: UrlValue | undefined,
|
||||
fileLinkPrefix: string,
|
||||
sourceLocationPrefix: string | undefined,
|
||||
): string | undefined {
|
||||
const resolvableLocation = tryGetResolvableLocation(loc);
|
||||
if (!resolvableLocation) {
|
||||
if (!loc || !isUrlValueResolvable(loc)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -115,22 +25,19 @@ export function tryGetRemoteLocation(
|
||||
// "file:${sourceLocationPrefix}/relative/path/to/file"
|
||||
// So we need to strip off the first part to get the relative path.
|
||||
if (sourceLocationPrefix) {
|
||||
if (!resolvableLocation.uri.startsWith(`file:${sourceLocationPrefix}/`)) {
|
||||
if (!loc.uri.startsWith(`file:${sourceLocationPrefix}/`)) {
|
||||
return undefined;
|
||||
}
|
||||
trimmedLocation = resolvableLocation.uri.replace(
|
||||
`file:${sourceLocationPrefix}/`,
|
||||
"",
|
||||
);
|
||||
trimmedLocation = loc.uri.replace(`file:${sourceLocationPrefix}/`, "");
|
||||
} else {
|
||||
// If the source location prefix is empty (e.g. for older remote queries), we assume that the database
|
||||
// was created on a Linux actions runner and has the format:
|
||||
// "file:/home/runner/work/<repo>/<repo>/relative/path/to/file"
|
||||
// So we need to drop the first 6 parts of the path.
|
||||
if (!resolvableLocation.uri.startsWith("file:/home/runner/work/")) {
|
||||
if (!loc.uri.startsWith("file:/home/runner/work/")) {
|
||||
return undefined;
|
||||
}
|
||||
const locationParts = resolvableLocation.uri.split("/");
|
||||
const locationParts = loc.uri.split("/");
|
||||
trimmedLocation = locationParts.slice(6, locationParts.length).join("/");
|
||||
}
|
||||
|
||||
@@ -138,11 +45,16 @@ export function tryGetRemoteLocation(
|
||||
fileLinkPrefix,
|
||||
filePath: trimmedLocation,
|
||||
};
|
||||
|
||||
if (loc.type === "wholeFileLocation") {
|
||||
return createRemoteFileRef(fileLink);
|
||||
}
|
||||
|
||||
return createRemoteFileRef(
|
||||
fileLink,
|
||||
resolvableLocation.startLine,
|
||||
resolvableLocation.endLine,
|
||||
resolvableLocation.startColumn,
|
||||
resolvableLocation.endColumn,
|
||||
loc.startLine,
|
||||
loc.endLine,
|
||||
loc.startColumn,
|
||||
loc.endColumn,
|
||||
);
|
||||
}
|
||||
|
||||
3
extensions/ql-vscode/src/common/bytes.ts
Normal file
3
extensions/ql-vscode/src/common/bytes.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function readableBytesMb(numBytes: number): string {
|
||||
return `${(numBytes / (1024 * 1024)).toFixed(1)} MB`;
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import { DisposableObject } from "./disposable-object";
|
||||
import { getErrorMessage } from "./helpers-pure";
|
||||
import { Logger } from "./logging";
|
||||
import type { BaseLogger } from "./logging";
|
||||
|
||||
/**
|
||||
* Base class for "discovery" operations, which scan the file system to find specific kinds of
|
||||
@@ -13,7 +13,7 @@ export abstract class Discovery extends DisposableObject {
|
||||
|
||||
constructor(
|
||||
protected readonly name: string,
|
||||
private readonly logger: Logger,
|
||||
protected readonly logger: BaseLogger,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
import { platform } from "os";
|
||||
import { Open } from "unzipper";
|
||||
import { join } from "path";
|
||||
import { pathExists, chmod } from "fs-extra";
|
||||
|
||||
/**
|
||||
* Get the name of the codeql cli installation we prefer to install, based on our current platform.
|
||||
@@ -19,31 +16,6 @@ export function getRequiredAssetName(): string {
|
||||
}
|
||||
}
|
||||
|
||||
export async function extractZipArchive(
|
||||
archivePath: string,
|
||||
outPath: string,
|
||||
): Promise<void> {
|
||||
const archive = await Open.file(archivePath);
|
||||
await archive.extract({
|
||||
concurrency: 4,
|
||||
path: outPath,
|
||||
});
|
||||
// Set file permissions for extracted files
|
||||
await Promise.all(
|
||||
archive.files.map(async (file) => {
|
||||
// Only change file permissions if within outPath (path.join normalises the path)
|
||||
const extractedPath = join(outPath, file.path);
|
||||
if (
|
||||
extractedPath.indexOf(outPath) !== 0 ||
|
||||
!(await pathExists(extractedPath))
|
||||
) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
return chmod(extractedPath, file.externalFileAttributes >>> 16);
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
export function codeQlLauncherName(): string {
|
||||
return platform() === "win32" ? "codeql.exe" : "codeql";
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Disposable } from "./disposable-object";
|
||||
import type { Disposable } from "./disposable-object";
|
||||
|
||||
export interface AppEvent<T> {
|
||||
(listener: (event: T) => void): Disposable;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { basename, dirname, join } from "path";
|
||||
import { EnvironmentContext } from "./app";
|
||||
import type { EnvironmentContext } from "./app";
|
||||
|
||||
/**
|
||||
* A node in the tree of files. This will be either a `FileTreeDirectory` or a `FileTreeLeaf`.
|
||||
|
||||
@@ -91,18 +91,23 @@ export async function readDirFullPaths(path: string): Promise<string[]> {
|
||||
* Symbolic links are ignored.
|
||||
*
|
||||
* @param dir the directory to walk
|
||||
* @param includeDirectories whether to include directories in the results
|
||||
*
|
||||
* @return An iterator of the full path to all files recursively found in the directory.
|
||||
*/
|
||||
export async function* walkDirectory(
|
||||
dir: string,
|
||||
includeDirectories = false,
|
||||
): AsyncIterableIterator<string> {
|
||||
const seenFiles = new Set<string>();
|
||||
for await (const d of await opendir(dir)) {
|
||||
const entry = join(dir, d.name);
|
||||
seenFiles.add(entry);
|
||||
if (d.isDirectory()) {
|
||||
yield* walkDirectory(entry);
|
||||
if (includeDirectories) {
|
||||
yield entry;
|
||||
}
|
||||
yield* walkDirectory(entry, includeDirectories);
|
||||
} else if (d.isFile()) {
|
||||
yield entry;
|
||||
}
|
||||
|
||||
@@ -27,26 +27,26 @@ export function assertNever(value: never): never {
|
||||
/**
|
||||
* Use to perform array filters where the predicate is asynchronous.
|
||||
*/
|
||||
export const asyncFilter = async function <T>(
|
||||
export async function asyncFilter<T>(
|
||||
arr: T[],
|
||||
predicate: (arg0: T) => Promise<boolean>,
|
||||
) {
|
||||
const results = await Promise.all(arr.map(predicate));
|
||||
return arr.filter((_, index) => results[index]);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* This regex matches strings of the form `owner/repo` where:
|
||||
* - `owner` is made up of alphanumeric characters, hyphens, underscores, or periods
|
||||
* - `repo` is made up of alphanumeric characters, hyphens, underscores, or periods
|
||||
*/
|
||||
export const REPO_REGEX = /^[a-zA-Z0-9-_\.]+\/[a-zA-Z0-9-_\.]+$/;
|
||||
export const REPO_REGEX = /^[a-zA-Z0-9-_.]+\/[a-zA-Z0-9-_.]+$/;
|
||||
|
||||
/**
|
||||
* This regex matches GiHub organization and user strings. These are made up for alphanumeric
|
||||
* characters, hyphens, underscores or periods.
|
||||
*/
|
||||
export const OWNER_REGEX = /^[a-zA-Z0-9-_\.]+$/;
|
||||
export const OWNER_REGEX = /^[a-zA-Z0-9-_.]+$/;
|
||||
|
||||
export function getErrorMessage(e: unknown): string {
|
||||
if (e instanceof RedactableError) {
|
||||
|
||||
@@ -1,30 +1,29 @@
|
||||
import * as sarif from "sarif";
|
||||
import {
|
||||
RawResultSet,
|
||||
ResultRow,
|
||||
ResultSetSchema,
|
||||
ResolvableLocationValue,
|
||||
BqrsColumn,
|
||||
} from "../common/bqrs-cli-types";
|
||||
import {
|
||||
import type { Log, Result } from "sarif";
|
||||
import type {
|
||||
VariantAnalysis,
|
||||
VariantAnalysisScannedRepositoryResult,
|
||||
VariantAnalysisScannedRepositoryState,
|
||||
} from "../variant-analysis/shared/variant-analysis";
|
||||
import {
|
||||
import type {
|
||||
RepositoriesFilterSortState,
|
||||
RepositoriesFilterSortStateWithIds,
|
||||
} from "../variant-analysis/shared/variant-analysis-filter-sort";
|
||||
import { ErrorLike } from "../common/errors";
|
||||
import { DataFlowPaths } from "../variant-analysis/shared/data-flow-paths";
|
||||
import { Method } from "../model-editor/method";
|
||||
import { ModeledMethod } from "../model-editor/modeled-method";
|
||||
import {
|
||||
import type { ErrorLike } from "../common/errors";
|
||||
import type { DataFlowPaths } from "../variant-analysis/shared/data-flow-paths";
|
||||
import type { Method } from "../model-editor/method";
|
||||
import type { ModeledMethod } from "../model-editor/modeled-method";
|
||||
import type {
|
||||
MethodModelingPanelViewState,
|
||||
ModelEditorViewState,
|
||||
} from "../model-editor/shared/view-state";
|
||||
import { Mode } from "../model-editor/shared/mode";
|
||||
import { QueryLanguage } from "./query-language";
|
||||
import type { Mode } from "../model-editor/shared/mode";
|
||||
import type { QueryLanguage } from "./query-language";
|
||||
import type {
|
||||
Column,
|
||||
RawResultSet,
|
||||
Row,
|
||||
UrlValueResolvable,
|
||||
} from "./raw-result-types";
|
||||
|
||||
/**
|
||||
* This module contains types and code that are shared between
|
||||
@@ -35,10 +34,13 @@ export const SELECT_TABLE_NAME = "#select";
|
||||
export const ALERTS_TABLE_NAME = "alerts";
|
||||
export const GRAPH_TABLE_NAME = "graph";
|
||||
|
||||
export type RawTableResultSet = { t: "RawResultSet" } & RawResultSet;
|
||||
export type InterpretedResultSet<T> = {
|
||||
type RawTableResultSet = {
|
||||
t: "RawResultSet";
|
||||
resultSet: RawResultSet;
|
||||
};
|
||||
|
||||
type InterpretedResultSet<T> = {
|
||||
t: "InterpretedResultSet";
|
||||
readonly schema: ResultSetSchema;
|
||||
name: string;
|
||||
interpretation: InterpretationT<T>;
|
||||
};
|
||||
@@ -74,7 +76,7 @@ export type SarifInterpretationData = {
|
||||
* they appear in the sarif file.
|
||||
*/
|
||||
sortState?: InterpretedResultsSortState;
|
||||
} & sarif.Log;
|
||||
} & Log;
|
||||
|
||||
export type GraphInterpretationData = {
|
||||
t: "GraphInterpretationData";
|
||||
@@ -208,7 +210,7 @@ export type FromResultsViewMsg =
|
||||
*/
|
||||
interface ViewSourceFileMsg {
|
||||
t: "viewSourceFile";
|
||||
loc: ResolvableLocationValue;
|
||||
loc: UrlValueResolvable;
|
||||
databaseUri: string;
|
||||
}
|
||||
|
||||
@@ -369,7 +371,9 @@ export interface SetComparisonsMessage {
|
||||
readonly message: string | undefined;
|
||||
}
|
||||
|
||||
type QueryCompareResult = RawQueryCompareResult | InterpretedQueryCompareResult;
|
||||
export type QueryCompareResult =
|
||||
| RawQueryCompareResult
|
||||
| InterpretedQueryCompareResult;
|
||||
|
||||
/**
|
||||
* from is the set of rows that have changes in the "from" query.
|
||||
@@ -377,20 +381,20 @@ type QueryCompareResult = RawQueryCompareResult | InterpretedQueryCompareResult;
|
||||
*/
|
||||
export type RawQueryCompareResult = {
|
||||
kind: "raw";
|
||||
columns: readonly BqrsColumn[];
|
||||
from: ResultRow[];
|
||||
to: ResultRow[];
|
||||
columns: readonly Column[];
|
||||
from: Row[];
|
||||
to: Row[];
|
||||
};
|
||||
|
||||
/**
|
||||
* from is the set of results that have changes in the "from" query.
|
||||
* to is the set of results that have changes in the "to" query.
|
||||
*/
|
||||
type InterpretedQueryCompareResult = {
|
||||
export type InterpretedQueryCompareResult = {
|
||||
kind: "interpreted";
|
||||
sourceLocationPrefix: string;
|
||||
from: sarif.Result[];
|
||||
to: sarif.Result[];
|
||||
from: Result[];
|
||||
to: Result[];
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Memento } from "./memento";
|
||||
import type { Memento } from "./memento";
|
||||
|
||||
/**
|
||||
* Provides a utility method to invoke a function only if a minimum time interval has elapsed since
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { FileLink } from "../variant-analysis/shared/analysis-result";
|
||||
import type { FileLink } from "../variant-analysis/shared/analysis-result";
|
||||
|
||||
export function createRemoteFileRef(
|
||||
fileLink: FileLink,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Logger } from "./logger";
|
||||
import type { Logger } from "./logger";
|
||||
|
||||
export interface NotificationLogger extends Logger {
|
||||
showErrorMessage(message: string): Promise<void>;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { NotificationLogger } from "./notification-logger";
|
||||
import { AppTelemetry } from "../telemetry";
|
||||
import { RedactableError } from "../errors";
|
||||
import type { NotificationLogger } from "./notification-logger";
|
||||
import type { AppTelemetry } from "../telemetry";
|
||||
import type { RedactableError } from "../errors";
|
||||
|
||||
interface ShowAndLogOptions {
|
||||
/**
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { appendFile, ensureFile } from "fs-extra";
|
||||
import { isAbsolute } from "path";
|
||||
import { getErrorMessage } from "../helpers-pure";
|
||||
import { Logger, LogOptions } from "./logger";
|
||||
import type { Logger, LogOptions } from "./logger";
|
||||
|
||||
/**
|
||||
* An implementation of {@link Logger} that sends the output both to another {@link Logger}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { window as Window, OutputChannel, Progress } from "vscode";
|
||||
import { Logger, LogOptions } from "../logger";
|
||||
import type { OutputChannel, Progress } from "vscode";
|
||||
import { window as Window } from "vscode";
|
||||
import type { Logger, LogOptions } from "../logger";
|
||||
import { DisposableObject } from "../../disposable-object";
|
||||
import { NotificationLogger } from "../notification-logger";
|
||||
import type { NotificationLogger } from "../notification-logger";
|
||||
|
||||
/**
|
||||
* A logger that writes messages to an output channel in the VS Code Output tab.
|
||||
@@ -63,7 +64,7 @@ export class OutputChannelLogger
|
||||
message: string,
|
||||
show: (message: string, ...items: string[]) => Thenable<string | undefined>,
|
||||
): Promise<void> {
|
||||
const label = "Show Log";
|
||||
const label = "View extension logs";
|
||||
const result = await show(message, label);
|
||||
|
||||
if (result === label) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Repository } from "../../variant-analysis/gh-api/repository";
|
||||
import {
|
||||
import type { Repository } from "../../variant-analysis/gh-api/repository";
|
||||
import type {
|
||||
VariantAnalysis,
|
||||
VariantAnalysisRepoTask,
|
||||
} from "../../variant-analysis/gh-api/variant-analysis";
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { join, resolve } from "path";
|
||||
import { pathExists } from "fs-extra";
|
||||
import { setupServer, SetupServer } from "msw/node";
|
||||
import type { SetupServer } from "msw/node";
|
||||
import { setupServer } from "msw/node";
|
||||
|
||||
import { DisposableObject } from "../disposable-object";
|
||||
|
||||
|
||||
@@ -2,24 +2,24 @@ import { ensureDir, writeFile } from "fs-extra";
|
||||
import { join } from "path";
|
||||
|
||||
import fetch from "node-fetch";
|
||||
import { SetupServer } from "msw/node";
|
||||
import type { SetupServer } from "msw/node";
|
||||
|
||||
import { DisposableObject } from "../disposable-object";
|
||||
import { gzipDecode } from "../zlib";
|
||||
|
||||
import {
|
||||
import type {
|
||||
AutoModelResponse,
|
||||
BasicErrorResponse,
|
||||
CodeSearchResponse,
|
||||
GetVariantAnalysisRepoResultRequest,
|
||||
GitHubApiRequest,
|
||||
RequestKind,
|
||||
} from "./gh-api-request";
|
||||
import {
|
||||
import { RequestKind } from "./gh-api-request";
|
||||
import type {
|
||||
VariantAnalysis,
|
||||
VariantAnalysisRepoTask,
|
||||
} from "../../variant-analysis/gh-api/variant-analysis";
|
||||
import { Repository } from "../../variant-analysis/gh-api/repository";
|
||||
import type { Repository } from "../../variant-analysis/gh-api/repository";
|
||||
|
||||
export class Recorder extends DisposableObject {
|
||||
private currentRecordedScenario: GitHubApiRequest[] = [];
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { join } from "path";
|
||||
import { readdir, readJson, readFile } from "fs-extra";
|
||||
import { http, RequestHandler } from "msw";
|
||||
import type { RequestHandler } from "msw";
|
||||
import { http } from "msw";
|
||||
import type { GitHubApiRequest } from "./gh-api-request";
|
||||
import {
|
||||
GitHubApiRequest,
|
||||
isAutoModelRequest,
|
||||
isCodeSearchRequest,
|
||||
isGetRepoRequest,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { pathExists } from "fs-extra";
|
||||
import { env, QuickPickItem, Uri, window } from "vscode";
|
||||
import type { QuickPickItem } from "vscode";
|
||||
import { env, Uri, window } from "vscode";
|
||||
|
||||
import {
|
||||
getMockGitHubApiServerScenariosPath,
|
||||
@@ -7,8 +8,9 @@ import {
|
||||
} from "../../../config";
|
||||
import { DisposableObject } from "../../disposable-object";
|
||||
import { MockGitHubApiServer } from "../mock-gh-api-server";
|
||||
import { MockGitHubApiServerCommands } from "../../commands";
|
||||
import { App, AppMode } from "../../app";
|
||||
import type { MockGitHubApiServerCommands } from "../../commands";
|
||||
import type { App } from "../../app";
|
||||
import { AppMode } from "../../app";
|
||||
import path from "path";
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import * as Octokit from "@octokit/rest";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { retry } from "@octokit/plugin-retry";
|
||||
import fetch from "node-fetch";
|
||||
|
||||
export const AppOctokit = Octokit.Octokit.defaults({
|
||||
export const AppOctokit = Octokit.defaults({
|
||||
request: {
|
||||
fetch,
|
||||
},
|
||||
|
||||
26
extensions/ql-vscode/src/common/qlpack-language.ts
Normal file
26
extensions/ql-vscode/src/common/qlpack-language.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { QueryLanguage } from "./query-language";
|
||||
import { loadQlpackFile } from "../packaging/qlpack-file-loader";
|
||||
|
||||
/**
|
||||
* @param qlpackPath The path to the `qlpack.yml` or `codeql-pack.yml` file.
|
||||
* @return the language of the given qlpack file, or undefined if the file is
|
||||
* not a valid qlpack file or does not contain exactly one language.
|
||||
*/
|
||||
export async function getQlPackLanguage(
|
||||
qlpackPath: string,
|
||||
): Promise<QueryLanguage | undefined> {
|
||||
const qlPack = await loadQlpackFile(qlpackPath);
|
||||
const dependencies = qlPack?.dependencies;
|
||||
if (!dependencies) {
|
||||
return;
|
||||
}
|
||||
|
||||
const matchingLanguages = Object.values(QueryLanguage).filter(
|
||||
(language) => `codeql/${language}-all` in dependencies,
|
||||
);
|
||||
if (matchingLanguages.length !== 1) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return matchingLanguages[0];
|
||||
}
|
||||
90
extensions/ql-vscode/src/common/raw-result-types.ts
Normal file
90
extensions/ql-vscode/src/common/raw-result-types.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
export enum ColumnKind {
|
||||
String = "string",
|
||||
Float = "float",
|
||||
Integer = "integer",
|
||||
Boolean = "boolean",
|
||||
Date = "date",
|
||||
Entity = "entity",
|
||||
}
|
||||
|
||||
export type Column = {
|
||||
name?: string;
|
||||
kind: ColumnKind;
|
||||
};
|
||||
|
||||
type UrlValueString = {
|
||||
type: "string";
|
||||
value: string;
|
||||
};
|
||||
|
||||
export type UrlValueWholeFileLocation = {
|
||||
type: "wholeFileLocation";
|
||||
uri: string;
|
||||
};
|
||||
|
||||
export type UrlValueLineColumnLocation = {
|
||||
type: "lineColumnLocation";
|
||||
uri: string;
|
||||
startLine: number;
|
||||
startColumn: number;
|
||||
endLine: number;
|
||||
endColumn: number;
|
||||
};
|
||||
|
||||
export type UrlValueResolvable =
|
||||
| UrlValueWholeFileLocation
|
||||
| UrlValueLineColumnLocation;
|
||||
|
||||
export function isUrlValueResolvable(
|
||||
value: UrlValue,
|
||||
): value is UrlValueResolvable {
|
||||
return (
|
||||
value.type === "wholeFileLocation" || value.type === "lineColumnLocation"
|
||||
);
|
||||
}
|
||||
|
||||
export type UrlValue = UrlValueString | UrlValueResolvable;
|
||||
|
||||
export type EntityValue = {
|
||||
url?: UrlValue;
|
||||
label?: string;
|
||||
id?: number;
|
||||
};
|
||||
|
||||
type CellValueEntity = {
|
||||
type: "entity";
|
||||
value: EntityValue;
|
||||
};
|
||||
|
||||
type CellValueNumber = {
|
||||
type: "number";
|
||||
value: number;
|
||||
};
|
||||
|
||||
type CellValueString = {
|
||||
type: "string";
|
||||
value: string;
|
||||
};
|
||||
|
||||
type CellValueBoolean = {
|
||||
type: "boolean";
|
||||
value: boolean;
|
||||
};
|
||||
|
||||
export type CellValue =
|
||||
| CellValueEntity
|
||||
| CellValueNumber
|
||||
| CellValueString
|
||||
| CellValueBoolean;
|
||||
|
||||
export type Row = CellValue[];
|
||||
|
||||
export type RawResultSet = {
|
||||
name: string;
|
||||
totalRowCount: number;
|
||||
|
||||
columns: Column[];
|
||||
rows: Row[];
|
||||
|
||||
nextPageOffset?: number;
|
||||
};
|
||||
@@ -1,11 +1,11 @@
|
||||
export type DeepReadonly<T> = T extends Array<infer R>
|
||||
? DeepReadonlyArray<R>
|
||||
: // eslint-disable-next-line @typescript-eslint/ban-types
|
||||
T extends Function
|
||||
? T
|
||||
: T extends object
|
||||
? DeepReadonlyObject<T>
|
||||
: T;
|
||||
T extends Function
|
||||
? T
|
||||
: T extends object
|
||||
? DeepReadonlyObject<T>
|
||||
: T;
|
||||
|
||||
interface DeepReadonlyArray<T> extends ReadonlyArray<DeepReadonly<T>> {}
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import * as Sarif from "sarif";
|
||||
import type { Log, Tool } from "sarif";
|
||||
import { createReadStream } from "fs-extra";
|
||||
import { connectTo } from "stream-json/Assembler";
|
||||
import { getErrorMessage } from "./helpers-pure";
|
||||
import { withParser } from "stream-json/filters/Pick";
|
||||
|
||||
const DUMMY_TOOL: Sarif.Tool = { driver: { name: "" } };
|
||||
const DUMMY_TOOL: Tool = { driver: { name: "" } };
|
||||
|
||||
export async function sarifParser(
|
||||
interpretedResultsPath: string,
|
||||
): Promise<Sarif.Log> {
|
||||
): Promise<Log> {
|
||||
try {
|
||||
// Parse the SARIF file into token streams, filtering out only the results array.
|
||||
const pipeline = createReadStream(interpretedResultsPath).pipe(
|
||||
@@ -38,7 +38,7 @@ export async function sarifParser(
|
||||
});
|
||||
|
||||
asm.on("done", (asm) => {
|
||||
const log: Sarif.Log = {
|
||||
const log: Log = {
|
||||
version: "2.1.0",
|
||||
runs: [
|
||||
{
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as Sarif from "sarif";
|
||||
import type { Location, Region } from "sarif";
|
||||
import type { HighlightedRegion } from "../variant-analysis/shared/analysis-result";
|
||||
import { ResolvableLocationValue } from "../common/bqrs-cli-types";
|
||||
import type { UrlValueResolvable } from "./raw-result-types";
|
||||
import { isEmptyPath } from "./bqrs-utils";
|
||||
|
||||
export interface SarifLink {
|
||||
@@ -16,7 +16,7 @@ interface NoLocation {
|
||||
}
|
||||
|
||||
type ParsedSarifLocation =
|
||||
| (ResolvableLocationValue & {
|
||||
| (UrlValueResolvable & {
|
||||
userVisibleFile: string;
|
||||
})
|
||||
// Resolvable locations have a `uri` field, but it will sometimes include
|
||||
@@ -47,7 +47,7 @@ export function parseSarifPlainTextMessage(
|
||||
// Technically we could have any uri in the target but we don't output that yet.
|
||||
// The possibility of escaping outside the link is not mentioned in the sarif spec but we always output sartif this way.
|
||||
const linkRegex =
|
||||
/(?<=(?<!\\)(\\\\)*)\[(?<linkText>([^\\\]\[]|\\\\|\\\]|\\\[)*)\]\((?<linkTarget>[0-9]+)\)/g;
|
||||
/(?<=(?<!\\)(\\\\)*)\[(?<linkText>([^\\\][]|\\\\|\\\]|\\\[)*)\]\((?<linkTarget>[0-9]+)\)/g;
|
||||
let result: RegExpExecArray | null;
|
||||
let curIndex = 0;
|
||||
while ((result = linkRegex.exec(message)) !== null) {
|
||||
@@ -103,7 +103,7 @@ export function getPathRelativeToSourceLocationPrefix(
|
||||
* @param sourceLocationPrefix a file path (usually a full path) to the database containing the source location.
|
||||
*/
|
||||
export function parseSarifLocation(
|
||||
loc: Sarif.Location,
|
||||
loc: Location,
|
||||
sourceLocationPrefix: string,
|
||||
): ParsedSarifLocation {
|
||||
const physicalLocation = loc.physicalLocation;
|
||||
@@ -137,6 +137,7 @@ export function parseSarifLocation(
|
||||
// If the region property is absent, the physicalLocation object refers to the entire file.
|
||||
// Source: https://docs.oasis-open.org/sarif/sarif/v2.1.0/cs01/sarif-v2.1.0-cs01.html#_Toc16012638.
|
||||
return {
|
||||
type: "wholeFileLocation",
|
||||
uri: effectiveLocation,
|
||||
userVisibleFile,
|
||||
} as ParsedSarifLocation;
|
||||
@@ -144,6 +145,7 @@ export function parseSarifLocation(
|
||||
const region = parseSarifRegion(physicalLocation.region);
|
||||
|
||||
return {
|
||||
type: "lineColumnLocation",
|
||||
uri: effectiveLocation,
|
||||
userVisibleFile,
|
||||
...region,
|
||||
@@ -151,7 +153,7 @@ export function parseSarifLocation(
|
||||
}
|
||||
}
|
||||
|
||||
export function parseSarifRegion(region: Sarif.Region): {
|
||||
export function parseSarifRegion(region: Region): {
|
||||
startLine: number;
|
||||
endLine: number;
|
||||
startColumn: number;
|
||||
@@ -232,14 +234,14 @@ export function parseHighlightedLine(
|
||||
const highlightStartColumn = isSingleLineHighlight
|
||||
? highlightedRegion.startColumn
|
||||
: isFirstHighlightedLine
|
||||
? highlightedRegion.startColumn
|
||||
: 0;
|
||||
? highlightedRegion.startColumn
|
||||
: 0;
|
||||
|
||||
const highlightEndColumn = isSingleLineHighlight
|
||||
? highlightedRegion.endColumn
|
||||
: isLastHighlightedLine
|
||||
? highlightedRegion.endColumn
|
||||
: line.length + 1;
|
||||
? highlightedRegion.endColumn
|
||||
: line.length + 1;
|
||||
|
||||
const plainSection1 = line.substring(0, highlightStartColumn - 1);
|
||||
const highlightedSection = line.substring(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Readable } from "stream";
|
||||
import type { Readable } from "stream";
|
||||
import { StringDecoder } from "string_decoder";
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { RedactableError } from "./errors";
|
||||
import type { RedactableError } from "./errors";
|
||||
|
||||
export interface AppTelemetry {
|
||||
sendCommandUsage(name: string, executionTime: number, error?: Error): void;
|
||||
|
||||
23
extensions/ql-vscode/src/common/unzip-concurrently.ts
Normal file
23
extensions/ql-vscode/src/common/unzip-concurrently.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { availableParallelism } from "os";
|
||||
import type { UnzipProgressCallback } from "./unzip";
|
||||
import { unzipToDirectory } from "./unzip";
|
||||
import PQueue from "p-queue";
|
||||
|
||||
export async function unzipToDirectoryConcurrently(
|
||||
archivePath: string,
|
||||
destinationPath: string,
|
||||
progress?: UnzipProgressCallback,
|
||||
): Promise<void> {
|
||||
const queue = new PQueue({
|
||||
concurrency: availableParallelism(),
|
||||
});
|
||||
|
||||
return unzipToDirectory(
|
||||
archivePath,
|
||||
destinationPath,
|
||||
progress,
|
||||
async (tasks) => {
|
||||
await queue.addAll(tasks);
|
||||
},
|
||||
);
|
||||
}
|
||||
264
extensions/ql-vscode/src/common/unzip.ts
Normal file
264
extensions/ql-vscode/src/common/unzip.ts
Normal file
@@ -0,0 +1,264 @@
|
||||
import type { Entry as ZipEntry, Options as ZipOptions, ZipFile } from "yauzl";
|
||||
import { open } from "yauzl";
|
||||
import type { Readable } from "stream";
|
||||
import { Transform } from "stream";
|
||||
import { dirname, join } from "path";
|
||||
import type { WriteStream } from "fs";
|
||||
import { createWriteStream, ensureDir } from "fs-extra";
|
||||
|
||||
// We can't use promisify because it picks up the wrong overload.
|
||||
export function openZip(
|
||||
path: string,
|
||||
options: ZipOptions = {},
|
||||
): Promise<ZipFile> {
|
||||
return new Promise((resolve, reject) => {
|
||||
open(path, options, (err, zipFile) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
resolve(zipFile);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export function excludeDirectories(entries: ZipEntry[]): ZipEntry[] {
|
||||
return entries.filter((entry) => !/\/$/.test(entry.fileName));
|
||||
}
|
||||
|
||||
function calculateTotalUncompressedByteSize(entries: ZipEntry[]): number {
|
||||
return entries.reduce((total, entry) => total + entry.uncompressedSize, 0);
|
||||
}
|
||||
|
||||
export function readZipEntries(zipFile: ZipFile): Promise<ZipEntry[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const files: ZipEntry[] = [];
|
||||
|
||||
zipFile.readEntry();
|
||||
zipFile.on("entry", (entry: ZipEntry) => {
|
||||
files.push(entry);
|
||||
|
||||
zipFile.readEntry();
|
||||
});
|
||||
|
||||
zipFile.on("end", () => {
|
||||
resolve(files);
|
||||
});
|
||||
|
||||
zipFile.on("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function openZipReadStream(
|
||||
zipFile: ZipFile,
|
||||
entry: ZipEntry,
|
||||
): Promise<Readable> {
|
||||
return new Promise((resolve, reject) => {
|
||||
zipFile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
resolve(readStream);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function openZipBuffer(
|
||||
zipFile: ZipFile,
|
||||
entry: ZipEntry,
|
||||
): Promise<Buffer> {
|
||||
const readable = await openZipReadStream(zipFile, entry);
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks: Buffer[] = [];
|
||||
readable.on("data", (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
readable.on("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
readable.on("end", () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function copyStream(
|
||||
readable: Readable,
|
||||
writeStream: WriteStream,
|
||||
bytesExtractedCallback?: (bytesExtracted: number) => void,
|
||||
): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
readable.on("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
readable.on("end", () => {
|
||||
resolve();
|
||||
});
|
||||
|
||||
readable
|
||||
.pipe(
|
||||
new Transform({
|
||||
transform(chunk, _encoding, callback) {
|
||||
bytesExtractedCallback?.(chunk.length);
|
||||
this.push(chunk);
|
||||
callback();
|
||||
},
|
||||
}),
|
||||
)
|
||||
.pipe(writeStream);
|
||||
});
|
||||
}
|
||||
|
||||
type UnzipProgress = {
|
||||
filesExtracted: number;
|
||||
totalFiles: number;
|
||||
|
||||
bytesExtracted: number;
|
||||
totalBytes: number;
|
||||
};
|
||||
|
||||
export type UnzipProgressCallback = (progress: UnzipProgress) => void;
|
||||
|
||||
/**
|
||||
* Unzips a single file from a zip archive.
|
||||
*
|
||||
* @param zipFile
|
||||
* @param entry
|
||||
* @param rootDestinationPath
|
||||
* @param bytesExtractedCallback Called when bytes are extracted.
|
||||
* @return The number of bytes extracted.
|
||||
*/
|
||||
async function unzipFile(
|
||||
zipFile: ZipFile,
|
||||
entry: ZipEntry,
|
||||
rootDestinationPath: string,
|
||||
bytesExtractedCallback?: (bytesExtracted: number) => void,
|
||||
): Promise<number> {
|
||||
const path = join(rootDestinationPath, entry.fileName);
|
||||
|
||||
if (/\/$/.test(entry.fileName)) {
|
||||
// Directory file names end with '/'
|
||||
|
||||
await ensureDir(path);
|
||||
|
||||
return 0;
|
||||
} else {
|
||||
// Ensure the directory exists
|
||||
await ensureDir(dirname(path));
|
||||
|
||||
const readable = await openZipReadStream(zipFile, entry);
|
||||
|
||||
let mode: number | undefined = entry.externalFileAttributes >>> 16;
|
||||
if (mode <= 0) {
|
||||
mode = undefined;
|
||||
}
|
||||
|
||||
const writeStream = createWriteStream(path, {
|
||||
autoClose: true,
|
||||
mode,
|
||||
});
|
||||
|
||||
await copyStream(readable, writeStream, bytesExtractedCallback);
|
||||
|
||||
return entry.uncompressedSize;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Unzips all files from a zip archive. Please use
|
||||
* `unzipToDirectoryConcurrently` or `unzipToDirectorySequentially` instead
|
||||
* of this function.
|
||||
*
|
||||
* @param archivePath
|
||||
* @param destinationPath
|
||||
* @param taskRunner A function that runs the tasks (either sequentially or concurrently).
|
||||
* @param progress
|
||||
*/
|
||||
export async function unzipToDirectory(
|
||||
archivePath: string,
|
||||
destinationPath: string,
|
||||
progress: UnzipProgressCallback | undefined,
|
||||
taskRunner: (tasks: Array<() => Promise<void>>) => Promise<void>,
|
||||
): Promise<void> {
|
||||
const zipFile = await openZip(archivePath, {
|
||||
autoClose: false,
|
||||
strictFileNames: true,
|
||||
lazyEntries: true,
|
||||
});
|
||||
|
||||
try {
|
||||
const entries = await readZipEntries(zipFile);
|
||||
|
||||
let filesExtracted = 0;
|
||||
const totalFiles = entries.length;
|
||||
let bytesExtracted = 0;
|
||||
const totalBytes = calculateTotalUncompressedByteSize(entries);
|
||||
|
||||
const reportProgress = () => {
|
||||
progress?.({
|
||||
filesExtracted,
|
||||
totalFiles,
|
||||
bytesExtracted,
|
||||
totalBytes,
|
||||
});
|
||||
};
|
||||
|
||||
reportProgress();
|
||||
|
||||
await taskRunner(
|
||||
entries.map((entry) => async () => {
|
||||
let entryBytesExtracted = 0;
|
||||
|
||||
const totalEntryBytesExtracted = await unzipFile(
|
||||
zipFile,
|
||||
entry,
|
||||
destinationPath,
|
||||
(thisBytesExtracted) => {
|
||||
entryBytesExtracted += thisBytesExtracted;
|
||||
bytesExtracted += thisBytesExtracted;
|
||||
reportProgress();
|
||||
},
|
||||
);
|
||||
|
||||
// Should be 0, but just in case.
|
||||
bytesExtracted += -entryBytesExtracted + totalEntryBytesExtracted;
|
||||
|
||||
filesExtracted++;
|
||||
reportProgress();
|
||||
}),
|
||||
);
|
||||
} finally {
|
||||
zipFile.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sequentially unzips all files from a zip archive. Please use
|
||||
* `unzipToDirectoryConcurrently` if you can. This function is only
|
||||
* provided because Jest cannot import `p-queue`.
|
||||
*
|
||||
* @param archivePath
|
||||
* @param destinationPath
|
||||
* @param progress
|
||||
*/
|
||||
export async function unzipToDirectorySequentially(
|
||||
archivePath: string,
|
||||
destinationPath: string,
|
||||
progress?: UnzipProgressCallback,
|
||||
): Promise<void> {
|
||||
return unzipToDirectory(
|
||||
archivePath,
|
||||
destinationPath,
|
||||
progress,
|
||||
async (tasks) => {
|
||||
for (const task of tasks) {
|
||||
await task();
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -1,16 +1,22 @@
|
||||
import * as vscode from "vscode";
|
||||
import { Uri, WebviewViewProvider } from "vscode";
|
||||
import { WebviewKind, WebviewMessage, getHtmlForWebview } from "./webview-html";
|
||||
import { Disposable } from "../disposable-object";
|
||||
import { App } from "../app";
|
||||
import { DeepReadonly } from "../readonly";
|
||||
import type {
|
||||
CancellationToken,
|
||||
WebviewView,
|
||||
WebviewViewProvider,
|
||||
WebviewViewResolveContext,
|
||||
} from "vscode";
|
||||
import { Uri } from "vscode";
|
||||
import type { WebviewKind, WebviewMessage } from "./webview-html";
|
||||
import { getHtmlForWebview } from "./webview-html";
|
||||
import type { Disposable } from "../disposable-object";
|
||||
import type { App } from "../app";
|
||||
import type { DeepReadonly } from "../readonly";
|
||||
|
||||
export abstract class AbstractWebviewViewProvider<
|
||||
ToMessage extends WebviewMessage,
|
||||
FromMessage extends WebviewMessage,
|
||||
> implements WebviewViewProvider
|
||||
{
|
||||
protected webviewView: vscode.WebviewView | undefined = undefined;
|
||||
protected webviewView: WebviewView | undefined = undefined;
|
||||
private disposables: Disposable[] = [];
|
||||
|
||||
constructor(
|
||||
@@ -23,9 +29,9 @@ export abstract class AbstractWebviewViewProvider<
|
||||
* first loaded or when the user hides and then shows a view again.
|
||||
*/
|
||||
public resolveWebviewView(
|
||||
webviewView: vscode.WebviewView,
|
||||
_context: vscode.WebviewViewResolveContext,
|
||||
_token: vscode.CancellationToken,
|
||||
webviewView: WebviewView,
|
||||
_context: WebviewViewResolveContext,
|
||||
_token: CancellationToken,
|
||||
) {
|
||||
webviewView.webview.options = {
|
||||
enableScripts: true,
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
import {
|
||||
import type {
|
||||
WebviewPanel,
|
||||
window as Window,
|
||||
ViewColumn,
|
||||
Uri,
|
||||
WebviewPanelOptions,
|
||||
WebviewOptions,
|
||||
} from "vscode";
|
||||
import { window as Window, Uri } from "vscode";
|
||||
import { join } from "path";
|
||||
|
||||
import { App } from "../app";
|
||||
import { Disposable } from "../disposable-object";
|
||||
import type { App } from "../app";
|
||||
import type { Disposable } from "../disposable-object";
|
||||
import { tmpDir } from "../../tmp-dir";
|
||||
import { getHtmlForWebview, WebviewMessage, WebviewKind } from "./webview-html";
|
||||
import { DeepReadonly } from "../readonly";
|
||||
import type { WebviewMessage, WebviewKind } from "./webview-html";
|
||||
import { getHtmlForWebview } from "./webview-html";
|
||||
import type { DeepReadonly } from "../readonly";
|
||||
|
||||
export type WebviewPanelConfig = {
|
||||
viewId: string;
|
||||
|
||||
@@ -1,15 +1,36 @@
|
||||
import { pathExists } from "fs-extra";
|
||||
import * as unzipper from "unzipper";
|
||||
import * as vscode from "vscode";
|
||||
import type { Entry as ZipEntry, ZipFile } from "yauzl";
|
||||
import type {
|
||||
Event,
|
||||
ExtensionContext,
|
||||
FileChangeEvent,
|
||||
FileStat,
|
||||
FileSystemProvider,
|
||||
} from "vscode";
|
||||
import {
|
||||
Disposable,
|
||||
EventEmitter,
|
||||
FileSystemError,
|
||||
FileType,
|
||||
Uri,
|
||||
workspace,
|
||||
} from "vscode";
|
||||
import { extLogger } from "../logging/vscode";
|
||||
import {
|
||||
excludeDirectories,
|
||||
openZip,
|
||||
openZipBuffer,
|
||||
readZipEntries,
|
||||
} from "../unzip";
|
||||
|
||||
// All path operations in this file must be on paths *within* the zip
|
||||
// archive.
|
||||
import { posix } from "path";
|
||||
|
||||
const path = posix;
|
||||
|
||||
class File implements vscode.FileStat {
|
||||
type: vscode.FileType;
|
||||
class File implements FileStat {
|
||||
type: FileType;
|
||||
ctime: number;
|
||||
mtime: number;
|
||||
size: number;
|
||||
@@ -18,7 +39,7 @@ class File implements vscode.FileStat {
|
||||
public name: string,
|
||||
public data: Uint8Array,
|
||||
) {
|
||||
this.type = vscode.FileType.File;
|
||||
this.type = FileType.File;
|
||||
this.ctime = Date.now();
|
||||
this.mtime = Date.now();
|
||||
this.size = data.length;
|
||||
@@ -26,15 +47,15 @@ class File implements vscode.FileStat {
|
||||
}
|
||||
}
|
||||
|
||||
class Directory implements vscode.FileStat {
|
||||
type: vscode.FileType;
|
||||
class Directory implements FileStat {
|
||||
type: FileType;
|
||||
ctime: number;
|
||||
mtime: number;
|
||||
size: number;
|
||||
entries: Map<string, Entry> = new Map();
|
||||
|
||||
constructor(public name: string) {
|
||||
this.type = vscode.FileType.Directory;
|
||||
this.type = FileType.Directory;
|
||||
this.ctime = Date.now();
|
||||
this.mtime = Date.now();
|
||||
this.size = 0;
|
||||
@@ -52,7 +73,7 @@ type Entry = File | Directory;
|
||||
* dirMap['/foo'] = {'bar': vscode.FileType.Directory}
|
||||
* dirMap['/foo/bar'] = {'baz': vscode.FileType.File}
|
||||
*/
|
||||
type DirectoryHierarchyMap = Map<string, Map<string, vscode.FileType>>;
|
||||
type DirectoryHierarchyMap = Map<string, Map<string, FileType>>;
|
||||
|
||||
export type ZipFileReference = {
|
||||
sourceArchiveZipPath: string;
|
||||
@@ -60,7 +81,7 @@ export type ZipFileReference = {
|
||||
};
|
||||
|
||||
/** Encodes a reference to a source file within a zipped source archive into a single URI. */
|
||||
export function encodeSourceArchiveUri(ref: ZipFileReference): vscode.Uri {
|
||||
export function encodeSourceArchiveUri(ref: ZipFileReference): Uri {
|
||||
const { sourceArchiveZipPath, pathWithinSourceArchive } = ref;
|
||||
|
||||
// These two paths are put into a single URI with a custom scheme.
|
||||
@@ -88,7 +109,7 @@ export function encodeSourceArchiveUri(ref: ZipFileReference): vscode.Uri {
|
||||
const sourceArchiveZipPathEndIndex =
|
||||
sourceArchiveZipPathStartIndex + sourceArchiveZipPath.length;
|
||||
const authority = `${sourceArchiveZipPathStartIndex}-${sourceArchiveZipPathEndIndex}`;
|
||||
return vscode.Uri.parse(`${zipArchiveScheme}:/`, true).with({
|
||||
return Uri.parse(`${zipArchiveScheme}:/`, true).with({
|
||||
path: encodedPath,
|
||||
authority,
|
||||
});
|
||||
@@ -110,7 +131,7 @@ export function encodeArchiveBasePath(sourceArchiveZipPath: string) {
|
||||
const sourceArchiveUriAuthorityPattern = /^(\d+)-(\d+)$/;
|
||||
|
||||
class InvalidSourceArchiveUriError extends Error {
|
||||
constructor(uri: vscode.Uri) {
|
||||
constructor(uri: Uri) {
|
||||
super(
|
||||
`Can't decode uri ${uri}: authority should be of the form startIndex-endIndex (where both indices are integers).`,
|
||||
);
|
||||
@@ -118,7 +139,7 @@ class InvalidSourceArchiveUriError extends Error {
|
||||
}
|
||||
|
||||
/** Decodes an encoded source archive URI into its corresponding paths. Inverse of `encodeSourceArchiveUri`. */
|
||||
export function decodeSourceArchiveUri(uri: vscode.Uri): ZipFileReference {
|
||||
export function decodeSourceArchiveUri(uri: Uri): ZipFileReference {
|
||||
if (!uri.authority) {
|
||||
// Uri is malformed, but this is recoverable
|
||||
void extLogger.log(
|
||||
@@ -158,7 +179,7 @@ function ensureFile(map: DirectoryHierarchyMap, file: string) {
|
||||
throw new Error(error);
|
||||
}
|
||||
ensureDir(map, dirname);
|
||||
map.get(dirname)!.set(path.basename(file), vscode.FileType.File);
|
||||
map.get(dirname)!.set(path.basename(file), FileType.File);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -171,32 +192,43 @@ function ensureDir(map: DirectoryHierarchyMap, dir: string) {
|
||||
if (dir !== parent) {
|
||||
// not the root directory
|
||||
ensureDir(map, parent);
|
||||
map.get(parent)!.set(path.basename(dir), vscode.FileType.Directory);
|
||||
map.get(parent)!.set(path.basename(dir), FileType.Directory);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type Archive = {
|
||||
unzipped: unzipper.CentralDirectory;
|
||||
zipFile: ZipFile;
|
||||
entries: ZipEntry[];
|
||||
dirMap: DirectoryHierarchyMap;
|
||||
};
|
||||
|
||||
async function parse_zip(zipPath: string): Promise<Archive> {
|
||||
if (!(await pathExists(zipPath))) {
|
||||
throw vscode.FileSystemError.FileNotFound(zipPath);
|
||||
throw FileSystemError.FileNotFound(zipPath);
|
||||
}
|
||||
const zipFile = await openZip(zipPath, {
|
||||
lazyEntries: true,
|
||||
autoClose: false,
|
||||
strictFileNames: true,
|
||||
});
|
||||
|
||||
const entries = excludeDirectories(await readZipEntries(zipFile));
|
||||
|
||||
const archive: Archive = {
|
||||
unzipped: await unzipper.Open.file(zipPath),
|
||||
zipFile,
|
||||
entries,
|
||||
dirMap: new Map(),
|
||||
};
|
||||
archive.unzipped.files.forEach((f) => {
|
||||
ensureFile(archive.dirMap, path.resolve("/", f.path));
|
||||
|
||||
entries.forEach((f) => {
|
||||
ensureFile(archive.dirMap, path.resolve("/", f.fileName));
|
||||
});
|
||||
return archive;
|
||||
}
|
||||
|
||||
export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
|
||||
private readOnlyError = vscode.FileSystemError.NoPermissions(
|
||||
export class ArchiveFileSystemProvider implements FileSystemProvider {
|
||||
private readOnlyError = FileSystemError.NoPermissions(
|
||||
"write operation attempted, but source archive filesystem is readonly",
|
||||
);
|
||||
private archives: Map<string, Promise<Archive>> = new Map();
|
||||
@@ -212,63 +244,57 @@ export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
|
||||
|
||||
// metadata
|
||||
|
||||
async stat(uri: vscode.Uri): Promise<vscode.FileStat> {
|
||||
async stat(uri: Uri): Promise<FileStat> {
|
||||
return await this._lookup(uri);
|
||||
}
|
||||
|
||||
async readDirectory(
|
||||
uri: vscode.Uri,
|
||||
): Promise<Array<[string, vscode.FileType]>> {
|
||||
async readDirectory(uri: Uri): Promise<Array<[string, FileType]>> {
|
||||
const ref = decodeSourceArchiveUri(uri);
|
||||
const archive = await this.getArchive(ref.sourceArchiveZipPath);
|
||||
const contents = archive.dirMap.get(ref.pathWithinSourceArchive);
|
||||
const result =
|
||||
contents === undefined ? undefined : Array.from(contents.entries());
|
||||
if (result === undefined) {
|
||||
throw vscode.FileSystemError.FileNotFound(uri);
|
||||
throw FileSystemError.FileNotFound(uri);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// file contents
|
||||
|
||||
async readFile(uri: vscode.Uri): Promise<Uint8Array> {
|
||||
async readFile(uri: Uri): Promise<Uint8Array> {
|
||||
const data = (await this._lookupAsFile(uri)).data;
|
||||
if (data) {
|
||||
return data;
|
||||
}
|
||||
throw vscode.FileSystemError.FileNotFound();
|
||||
throw FileSystemError.FileNotFound();
|
||||
}
|
||||
|
||||
// write operations, all disabled
|
||||
|
||||
writeFile(
|
||||
_uri: vscode.Uri,
|
||||
_uri: Uri,
|
||||
_content: Uint8Array,
|
||||
_options: { create: boolean; overwrite: boolean },
|
||||
): void {
|
||||
throw this.readOnlyError;
|
||||
}
|
||||
|
||||
rename(
|
||||
_oldUri: vscode.Uri,
|
||||
_newUri: vscode.Uri,
|
||||
_options: { overwrite: boolean },
|
||||
): void {
|
||||
rename(_oldUri: Uri, _newUri: Uri, _options: { overwrite: boolean }): void {
|
||||
throw this.readOnlyError;
|
||||
}
|
||||
|
||||
delete(_uri: vscode.Uri): void {
|
||||
delete(_uri: Uri): void {
|
||||
throw this.readOnlyError;
|
||||
}
|
||||
|
||||
createDirectory(_uri: vscode.Uri): void {
|
||||
createDirectory(_uri: Uri): void {
|
||||
throw this.readOnlyError;
|
||||
}
|
||||
|
||||
// content lookup
|
||||
|
||||
private async _lookup(uri: vscode.Uri): Promise<Entry> {
|
||||
private async _lookup(uri: Uri): Promise<Entry> {
|
||||
const ref = decodeSourceArchiveUri(uri);
|
||||
const archive = await this.getArchive(ref.sourceArchiveZipPath);
|
||||
|
||||
@@ -276,51 +302,44 @@ export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
|
||||
// use '/' as path separator throughout
|
||||
const reqPath = ref.pathWithinSourceArchive;
|
||||
|
||||
const file = archive.unzipped.files.find((f) => {
|
||||
const absolutePath = path.resolve("/", f.path);
|
||||
const file = archive.entries.find((f) => {
|
||||
const absolutePath = path.resolve("/", f.fileName);
|
||||
return (
|
||||
absolutePath === reqPath ||
|
||||
absolutePath === path.join("/src_archive", reqPath)
|
||||
);
|
||||
});
|
||||
if (file !== undefined) {
|
||||
if (file.type === "File") {
|
||||
return new File(reqPath, await file.buffer());
|
||||
} else {
|
||||
// file.type === 'Directory'
|
||||
// I haven't observed this case in practice. Could it happen
|
||||
// with a zip file that contains empty directories?
|
||||
return new Directory(reqPath);
|
||||
}
|
||||
const buffer = await openZipBuffer(archive.zipFile, file);
|
||||
return new File(reqPath, buffer);
|
||||
}
|
||||
if (archive.dirMap.has(reqPath)) {
|
||||
return new Directory(reqPath);
|
||||
}
|
||||
throw vscode.FileSystemError.FileNotFound(
|
||||
throw FileSystemError.FileNotFound(
|
||||
`uri '${uri.toString()}', interpreted as '${reqPath}' in archive '${
|
||||
ref.sourceArchiveZipPath
|
||||
}'`,
|
||||
);
|
||||
}
|
||||
|
||||
private async _lookupAsFile(uri: vscode.Uri): Promise<File> {
|
||||
private async _lookupAsFile(uri: Uri): Promise<File> {
|
||||
const entry = await this._lookup(uri);
|
||||
if (entry instanceof File) {
|
||||
return entry;
|
||||
}
|
||||
throw vscode.FileSystemError.FileIsADirectory(uri);
|
||||
throw FileSystemError.FileIsADirectory(uri);
|
||||
}
|
||||
|
||||
// file events
|
||||
|
||||
private _emitter = new vscode.EventEmitter<vscode.FileChangeEvent[]>();
|
||||
private _emitter = new EventEmitter<FileChangeEvent[]>();
|
||||
|
||||
readonly onDidChangeFile: vscode.Event<vscode.FileChangeEvent[]> =
|
||||
this._emitter.event;
|
||||
readonly onDidChangeFile: Event<FileChangeEvent[]> = this._emitter.event;
|
||||
|
||||
watch(_resource: vscode.Uri): vscode.Disposable {
|
||||
watch(_resource: Uri): Disposable {
|
||||
// ignore, fires for all changes...
|
||||
return new vscode.Disposable(() => {
|
||||
return new Disposable(() => {
|
||||
/**/
|
||||
});
|
||||
}
|
||||
@@ -336,9 +355,9 @@ export class ArchiveFileSystemProvider implements vscode.FileSystemProvider {
|
||||
*/
|
||||
export const zipArchiveScheme = "codeql-zip-archive";
|
||||
|
||||
export function activate(ctx: vscode.ExtensionContext) {
|
||||
export function activate(ctx: ExtensionContext) {
|
||||
ctx.subscriptions.push(
|
||||
vscode.workspace.registerFileSystemProvider(
|
||||
workspace.registerFileSystemProvider(
|
||||
zipArchiveScheme,
|
||||
new ArchiveFileSystemProvider(),
|
||||
{
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as vscode from "vscode";
|
||||
import * as Octokit from "@octokit/rest";
|
||||
import { Credentials } from "../authentication";
|
||||
import { authentication } from "vscode";
|
||||
import type { Octokit } from "@octokit/rest";
|
||||
import type { Credentials } from "../authentication";
|
||||
import { AppOctokit } from "../octokit";
|
||||
|
||||
export const GITHUB_AUTH_PROVIDER_ID = "github";
|
||||
@@ -18,14 +18,14 @@ export class VSCodeCredentials implements Credentials {
|
||||
/**
|
||||
* A specific octokit to return, otherwise a new authenticated octokit will be created when needed.
|
||||
*/
|
||||
private octokit: Octokit.Octokit | undefined;
|
||||
private octokit: Octokit | undefined;
|
||||
|
||||
/**
|
||||
* Creates or returns an instance of Octokit.
|
||||
*
|
||||
* @returns An instance of Octokit.
|
||||
*/
|
||||
async getOctokit(): Promise<Octokit.Octokit> {
|
||||
async getOctokit(): Promise<Octokit> {
|
||||
if (this.octokit) {
|
||||
return this.octokit;
|
||||
}
|
||||
@@ -38,7 +38,7 @@ export class VSCodeCredentials implements Credentials {
|
||||
}
|
||||
|
||||
async getAccessToken(): Promise<string> {
|
||||
const session = await vscode.authentication.getSession(
|
||||
const session = await authentication.getSession(
|
||||
GITHUB_AUTH_PROVIDER_ID,
|
||||
SCOPES,
|
||||
{ createIfNone: true },
|
||||
@@ -48,7 +48,7 @@ export class VSCodeCredentials implements Credentials {
|
||||
}
|
||||
|
||||
async getExistingAccessToken(): Promise<string | undefined> {
|
||||
const session = await vscode.authentication.getSession(
|
||||
const session = await authentication.getSession(
|
||||
GITHUB_AUTH_PROVIDER_ID,
|
||||
SCOPES,
|
||||
{ createIfNone: false },
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { commands, Disposable } from "vscode";
|
||||
import { CommandFunction, CommandManager } from "../../packages/commands";
|
||||
import type { Disposable } from "vscode";
|
||||
import { commands } from "vscode";
|
||||
import type { CommandFunction } from "../../packages/commands";
|
||||
import { CommandManager } from "../../packages/commands";
|
||||
import type { NotificationLogger } from "../logging";
|
||||
import {
|
||||
NotificationLogger,
|
||||
showAndLogWarningMessage,
|
||||
showAndLogExceptionWithTelemetry,
|
||||
} from "../logging";
|
||||
@@ -10,7 +12,7 @@ import { asError, getErrorMessage } from "../../common/helpers-pure";
|
||||
import { redactableError } from "../../common/errors";
|
||||
import { UserCancellationException } from "./progress";
|
||||
import { telemetryListener } from "./telemetry";
|
||||
import { AppTelemetry } from "../telemetry";
|
||||
import type { AppTelemetry } from "../telemetry";
|
||||
|
||||
/**
|
||||
* Create a command manager for VSCode, wrapping registerCommandWithErrorHandling
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { env } from "vscode";
|
||||
import { EnvironmentContext } from "../app";
|
||||
import type { EnvironmentContext } from "../app";
|
||||
|
||||
export class AppEnvironmentContext implements EnvironmentContext {
|
||||
public get language(): string {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { EventEmitter } from "vscode";
|
||||
import { AppEventEmitter } from "../events";
|
||||
import type { AppEventEmitter } from "../events";
|
||||
|
||||
export class VSCodeAppEventEmitter<T>
|
||||
extends EventEmitter<T>
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import {
|
||||
import type {
|
||||
Uri,
|
||||
Event,
|
||||
Disposable,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Uri, window } from "vscode";
|
||||
import { AppCommandManager } from "../commands";
|
||||
import type { AppCommandManager } from "../commands";
|
||||
import { showBinaryChoiceDialog } from "./dialog";
|
||||
import { redactableError } from "../../common/errors";
|
||||
import {
|
||||
|
||||
@@ -1,14 +1,8 @@
|
||||
import { Discovery } from "../discovery";
|
||||
import {
|
||||
Event,
|
||||
EventEmitter,
|
||||
RelativePattern,
|
||||
Uri,
|
||||
WorkspaceFoldersChangeEvent,
|
||||
workspace,
|
||||
} from "vscode";
|
||||
import type { Event, Uri, WorkspaceFoldersChangeEvent } from "vscode";
|
||||
import { EventEmitter, RelativePattern, workspace } from "vscode";
|
||||
import { MultiFileSystemWatcher } from "./multi-file-system-watcher";
|
||||
import { AppEventEmitter } from "../events";
|
||||
import type { AppEventEmitter } from "../events";
|
||||
import { extLogger } from "../logging/vscode";
|
||||
import { lstat } from "fs-extra";
|
||||
import { containsPath, isIOError } from "../files";
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { CancellationToken, Disposable } from "vscode";
|
||||
import type { CancellationToken, Disposable } from "vscode";
|
||||
import { DisposableObject } from "../disposable-object";
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { DisposableObject } from "../disposable-object";
|
||||
import { EventEmitter, Event, Uri, GlobPattern, workspace } from "vscode";
|
||||
import type { Event, Uri, GlobPattern } from "vscode";
|
||||
import { EventEmitter, workspace } from "vscode";
|
||||
|
||||
/**
|
||||
* A collection of `FileSystemWatcher` objects. Disposing this object disposes all of the individual
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import {
|
||||
import type {
|
||||
CancellationToken,
|
||||
ProgressLocation,
|
||||
ProgressOptions as VSCodeProgressOptions,
|
||||
window as Window,
|
||||
} from "vscode";
|
||||
import { ProgressLocation, window as Window } from "vscode";
|
||||
import { readableBytesMb } from "../bytes";
|
||||
|
||||
export class UserCancellationException extends Error {
|
||||
/**
|
||||
@@ -125,15 +125,13 @@ export function reportStreamProgress(
|
||||
) {
|
||||
if (progress && totalNumBytes) {
|
||||
let numBytesDownloaded = 0;
|
||||
const bytesToDisplayMB = (numBytes: number): string =>
|
||||
`${(numBytes / (1024 * 1024)).toFixed(1)} MB`;
|
||||
const updateProgress = () => {
|
||||
progress({
|
||||
step: numBytesDownloaded,
|
||||
maxStep: totalNumBytes,
|
||||
message: `${messagePrefix} [${bytesToDisplayMB(
|
||||
message: `${messagePrefix} [${readableBytesMb(
|
||||
numBytesDownloaded,
|
||||
)} of ${bytesToDisplayMB(totalNumBytes)}]`,
|
||||
)} of ${readableBytesMb(totalNumBytes)}]`,
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import {
|
||||
import type {
|
||||
ExplorerSelectionCommandFunction,
|
||||
TreeViewContextMultiSelectionCommandFunction,
|
||||
TreeViewContextSingleSelectionCommandFunction,
|
||||
} from "../commands";
|
||||
import { showAndLogErrorMessage, NotificationLogger } from "../logging";
|
||||
import type { NotificationLogger } from "../logging";
|
||||
import { showAndLogErrorMessage } from "../logging";
|
||||
|
||||
// A hack to match types that are not an array, which is useful to help avoid
|
||||
// misusing createSingleSelectionCommand, e.g. where T accidentally gets instantiated
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user