Compare commits

..

4 Commits

Author SHA1 Message Date
Michael Hohn
495b52661e add viewer configuration
Some checks failed
Run CLI tests / Find Nightly Release (push) Has been cancelled
Run CLI tests / Set Matrix for cli-test (push) Has been cancelled
Run CLI tests / CLI Test (push) Has been cancelled
Run CLI tests / Report failure on the default branch (push) Has been cancelled
Update Node version / Create PR (push) Has been cancelled
Release / Release (push) Has been cancelled
Release / Publish to VS Code Marketplace (push) Has been cancelled
Release / Publish to Open VSX Registry (push) Has been cancelled
Bump CLI version / Build (push) Has been cancelled
Code Scanning - CodeQL / codeql (push) Has been cancelled
2025-08-19 11:24:15 -07:00
Michael Hohn
9a6aa52a40 fix: reconciled status names between server/agent/vscode-codeql
Some checks failed
Update Node version / Create PR (push) Has been cancelled
Run CLI tests / Find Nightly Release (push) Has been cancelled
Run CLI tests / Set Matrix for cli-test (push) Has been cancelled
Run CLI tests / CLI Test (push) Has been cancelled
Run CLI tests / Report failure on the default branch (push) Has been cancelled
Code Scanning - CodeQL / codeql (push) Has been cancelled
Bump CLI version / Build (push) Has been cancelled
2025-03-14 12:45:36 -07:00
Michael Hohn
c106903f01 wip: update settings, push new images 2025-02-20 10:30:22 -08:00
Nicolas Will
d40cda150c WIP: standalone MRVA 2024-07-01 18:20:23 +02:00
227 changed files with 13986 additions and 45419 deletions

View File

@@ -38,7 +38,7 @@ updates:
labels:
- "Update dependencies"
- package-ecosystem: docker
directory: "extensions/ql-vscode/test/e2e/docker"
directory: "extensions/ql-vscode/test/e2e"
schedule:
interval: "weekly"
day: "thursday" # Thursday is arbitrary

View File

@@ -26,7 +26,7 @@ jobs:
- name: Start containers
working-directory: extensions/ql-vscode/test/e2e
run: docker compose -f "docker-compose.yml" up -d --build
run: docker-compose -f "docker-compose.yml" up -d --build
- name: Install Playwright Browsers
working-directory: extensions/ql-vscode
@@ -43,4 +43,4 @@ jobs:
- name: Stop containers
working-directory: extensions/ql-vscode/test/e2e
if: always()
run: docker compose -f "docker-compose.yml" down -v
run: docker-compose -f "docker-compose.yml" down -v

View File

@@ -79,6 +79,13 @@ jobs:
run: |
npm run check-types
- name: Lint
working-directory: extensions/ql-vscode
env:
NODE_OPTIONS: '--max-old-space-size=4096'
run: |
npm run lint
- name: Lint Markdown
working-directory: extensions/ql-vscode
run: |
@@ -94,21 +101,6 @@ jobs:
run: |
npm run find-deadcode
- name: Lint
if: "${{ !cancelled() }}"
working-directory: extensions/ql-vscode
env:
NODE_OPTIONS: '--max-old-space-size=4096'
run: |
npm run lint-ci
- name: Upload ESLint results to Code Scanning
if: "${{ !cancelled() && !startsWith(github.head_ref, 'dependabot/')}}"
uses: github/codeql-action/upload-sarif@main
with:
sarif_file: extensions/ql-vscode/build/eslint.sarif
category: eslint
generated:
name: Check generated code
runs-on: ubuntu-latest

View File

@@ -124,35 +124,17 @@ jobs:
needs: build
environment: publish-vscode-marketplace
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
env:
VSCE_TOKEN: ${{ secrets.VSCE_TOKEN }}
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: extensions/ql-vscode/.nvmrc
- name: Download artifact
uses: actions/download-artifact@v4
with:
name: vscode-codeql-extension
- name: Azure User-assigned managed identity login
uses: azure/login@v2
with:
client-id: ${{ secrets.AZURE_CLIENT_ID }}
tenant-id: ${{ secrets.AZURE_TENANT_ID }}
allow-no-subscriptions: true
- name: Publish to Registry
uses: azure/cli@v2
with:
azcliversion: latest
inlineScript: |
npx @vscode/vsce publish --azure-credential --packagePath *.vsix
run: |
npx @vscode/vsce publish -p $VSCE_TOKEN --packagePath *.vsix
open-vsx-publish:
name: Publish to Open VSX Registry
@@ -162,13 +144,6 @@ jobs:
env:
OPEN_VSX_TOKEN: ${{ secrets.OPEN_VSX_TOKEN }}
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version-file: extensions/ql-vscode/.nvmrc
- name: Download artifact
uses: actions/download-artifact@v4
with:

View File

@@ -34,7 +34,7 @@ jobs:
- name: Update Node version
working-directory: extensions/ql-vscode
run: |
npx vite-node scripts/update-node-version.ts
npx ts-node scripts/update-node-version.ts
shell: bash
- name: Get current Node version
working-directory: extensions/ql-vscode

View File

@@ -1 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
cd extensions/ql-vscode && npm run format-staged

View File

@@ -1 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
cd extensions/ql-vscode && ./scripts/forbid-test-only

View File

@@ -111,4 +111,10 @@
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
},
"github.copilot.advanced": {
},
"codeQL.variantAnalysis.enableGhecDr": true,
"github-enterprise.uri": "http://server:8080/"
}

View File

@@ -78,7 +78,7 @@ $ vscode/scripts/code-cli.sh --install-extension dist/vscode-codeql-*.vsix # if
### Debugging
You can use VS Code to debug the extension without explicitly installing it. Just open this repository's root directory as a workspace in VS Code, and hit `F5` to start a debugging session.
You can use VS Code to debug the extension without explicitly installing it. Just open this directory as a workspace in VS Code, and hit `F5` to start a debugging session.
### Storybook

View File

@@ -6,7 +6,7 @@ The extension is released. You can download it from the [Visual Studio Marketpla
To see what has changed in the last few versions of the extension, see the [Changelog](https://github.com/github/vscode-codeql/blob/main/extensions/ql-vscode/CHANGELOG.md).
[![CI status badge](https://github.com/github/vscode-codeql/actions/workflows/main.yml/badge.svg?branch=main)](https://github.com/github/vscode-codeql/actions?query=workflow%3A%22Build+Extension%22+branch%3Amain)
[![CI status badge](https://github.com/github/vscode-codeql/workflows/Build%20Extension/badge.svg)](https://github.com/github/vscode-codeql/actions?query=workflow%3A%22Build+Extension%22+branch%3Amain)
[![VS Marketplace badge](https://vsmarketplacebadges.dev/version/github.vscode-codeql.svg)](https://marketplace.visualstudio.com/items?itemName=github.vscode-codeql)
## Features

View File

@@ -70,4 +70,4 @@ To regenerate the Open VSX token:
1. Go to the [Access Tokens](https://open-vsx.org/user-settings/tokens) page and generate a new token.
1. Update the secret in the `publish-open-vsx` environment in the project settings.
Publishing to the VS Code Marketplace is done using a user-assigned managed identity and should not require the token to be manually updated.
To regenerate the VSCode Marketplace token, please see our internal documentation. Note that Azure DevOps PATs expire every 7 days and must be regenerated.

View File

@@ -3,14 +3,5 @@ node_modules/
out/
build/
# Ignore js files
.eslintrc.js
jest.config.js
test/vscode-tests/activated-extension/jest-runner-vscode.config.js
test/vscode-tests/cli-integration/jest-runner-vscode.config.js
test/vscode-tests/jest-runner-vscode.config.base.js
test/vscode-tests/minimal-workspace/jest-runner-vscode.config.js
test/vscode-tests/no-workspace/jest-runner-vscode.config.js
# Include the Storybook config
!.storybook

View File

@@ -45,7 +45,7 @@ const baseConfig = {
"@typescript-eslint/no-invalid-this": "off",
"@typescript-eslint/no-shadow": "off",
"prefer-const": ["warn", { destructuring: "all" }],
"@typescript-eslint/only-throw-error": "error",
"@typescript-eslint/no-throw-literal": "error",
"@typescript-eslint/consistent-type-imports": "error",
"import/consistent-type-specifier-style": ["error", "prefer-top-level"],
curly: ["error", "all"],
@@ -133,7 +133,18 @@ module.exports = {
...baseConfig.rules,
// We want to allow mocking of functions in modules, so we need to allow namespace imports.
"import/no-namespace": "off",
"@typescript-eslint/no-unsafe-function-type": "off",
"@typescript-eslint/ban-types": [
"error",
{
// For a full list of the default banned types, see:
// https://github.com/typescript-eslint/typescript-eslint/blob/master/packages/eslint-plugin/docs/rules/ban-types.md
extendDefaults: true,
types: {
// Don't complain about the `Function` type in test files. (Default is `true`.)
Function: false,
},
},
],
},
},
{

View File

@@ -1 +1 @@
v20.18.1
v20.9.0

View File

@@ -2,7 +2,7 @@
"compilerOptions": {
"module": "esnext",
"moduleResolution": "node",
"target": "es2021",
"target": "es6",
"outDir": "out",
"lib": ["ES2021", "dom"],
"jsx": "react",

View File

@@ -1,6 +1,6 @@
import * as React from "react";
import { addons } from "@storybook/manager-api";
import { Addon_TypesEnum } from "storybook/internal/types";
import { Addon_TypesEnum } from "@storybook/types";
import { ThemeSelector } from "./ThemeSelector";
const ADDON_ID = "vscode-theme-addon";

View File

@@ -1,33 +1,6 @@
# CodeQL for Visual Studio Code: Changelog
## 1.16.2 - 19 December 2024
- Add a palette command that allows importing all databases directly inside of a parent folder. [#3797](https://github.com/github/vscode-codeql/pull/3797)
- Only use VS Code telemetry settings instead of using `codeQL.telemetry.enableTelemetry` [#3853](https://github.com/github/vscode-codeql/pull/3853)
- Improve the performance of the results view with large numbers of results. [#3862](https://github.com/github/vscode-codeql/pull/3862)
## 1.16.1 - 6 November 2024
- Support result columns of type `QlBuiltins::BigInt` in quick evaluations. [#3647](https://github.com/github/vscode-codeql/pull/3647)
- Fix a bug where the CodeQL CLI would be re-downloaded if you switched to a different filesystem (for example Codespaces or a remote SSH host). [#3762](https://github.com/github/vscode-codeql/pull/3762)
- Clean up old extension-managed CodeQL CLI distributions. [#3763](https://github.com/github/vscode-codeql/pull/3763)
- Only compare the source and sink of a path when comparing alerts of local queries. [#3772](https://github.com/github/vscode-codeql/pull/3772)
## 1.16.0 - 10 October 2024
- Increase the required version of VS Code to 1.90.0. [#3737](https://github.com/github/vscode-codeql/pull/3737)
- Fix a bug where some variant analysis results failed to download. [#3750](https://github.com/github/vscode-codeql/pull/3750)
## 1.15.0 - 26 September 2024
- Update results view to display the length of the shortest path for path queries. [#3687](https://github.com/github/vscode-codeql/pull/3687)
- Remove support for CodeQL CLI versions older than 2.16.6. [#3728](https://github.com/github/vscode-codeql/pull/3728)
## 1.14.0 - 7 August 2024
- Add Python support to the CodeQL Model Editor. [#3676](https://github.com/github/vscode-codeql/pull/3676)
- Update variant analysis view to display the length of the shortest path for path queries. [#3671](https://github.com/github/vscode-codeql/pull/3671)
- Remove support for CodeQL CLI versions older than 2.15.5. [#3681](https://github.com/github/vscode-codeql/pull/3681)
## [UNRELEASED]
## 1.13.1 - 29 May 2024

View File

@@ -1,5 +1,5 @@
import { src, dest } from "gulp";
// eslint-disable-next-line @typescript-eslint/no-require-imports,import/no-commonjs
// eslint-disable-next-line @typescript-eslint/no-var-requires,import/no-commonjs
const replace = require("gulp-replace");
/** Inject the application insights key into the telemetry file */

View File

@@ -1,4 +1,4 @@
{
"chromiumVersion": "122",
"electronVersion": "29.4.0"
"chromiumVersion": "114",
"electronVersion": "25.8.0"
}

View File

@@ -1,6 +1,6 @@
import { resolve } from "path";
import { deployPackage } from "./deploy";
import { spawn } from "cross-spawn";
import { spawn } from "child-process-promise";
export async function packageExtension(): Promise<void> {
const deployedPackage = await deployPackage();
@@ -16,22 +16,16 @@ export async function packageExtension(): Promise<void> {
`${deployedPackage.name}-${deployedPackage.version}.vsix`,
),
"--no-dependencies",
"--skip-license",
];
const proc = spawn(resolve(__dirname, "../node_modules/.bin/vsce"), args, {
cwd: deployedPackage.distPath,
stdio: ["ignore", "inherit", "inherit"],
});
proc.childProcess.stdout!.on("data", (data) => {
console.log(data.toString());
});
proc.childProcess.stderr!.on("data", (data) => {
console.error(data.toString());
});
await new Promise((resolve, reject) => {
proc.on("error", reject);
proc.on("close", (code) => {
if (code === 0) {
resolve(undefined);
} else {
reject(new Error(`Failed to package extension with code ${code}`));
}
});
});
await proc;
}

View File

@@ -77,8 +77,5 @@ export function copyWasmFiles() {
// to configure the path to the WASM file. So, source-map will always load the file from `__dirname/mappings.wasm`.
// In version 0.8.0, it may be possible to do this properly by calling SourceMapConsumer.initialize by
// using the "browser" field in source-map's package.json to load the WASM file from a given file path.
return src("node_modules/source-map/lib/mappings.wasm", {
// WASM is a binary format, so don't try to re-encode it as text.
encoding: false,
}).pipe(dest("out"));
return src("node_modules/source-map/lib/mappings.wasm").pipe(dest("out"));
}

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@
"description": "CodeQL for Visual Studio Code",
"author": "GitHub",
"private": true,
"version": "1.16.2",
"version": "1.13.2",
"publisher": "GitHub",
"license": "MIT",
"icon": "media/VS-marketplace-CodeQL-icon.png",
@@ -13,8 +13,8 @@
"url": "https://github.com/github/vscode-codeql"
},
"engines": {
"vscode": "^1.90.0",
"node": "^20.18.1",
"vscode": "^1.82.0",
"node": "^20.9.0",
"npm": ">=7.20.6"
},
"categories": [
@@ -302,8 +302,8 @@
"properties": {
"codeQL.queryHistory.format": {
"type": "string",
"default": "${queryName} on ${databaseName} - ${status} ${resultCount} [${startTime}]",
"markdownDescription": "Default string for how to label query history items.\n\nThe following variables are supported:\n* **${startTime}** - the time of the query\n* **${queryName}** - the human-readable query name\n* **${queryFileBasename}** - the query file's base name\n* **${queryLanguage}** - the query language\n* **${databaseName}** - the database name\n* **${resultCount}** - the number of results\n* **${status}** - a status string"
"default": "%q on %d - %s %r [%t]",
"markdownDescription": "Default string for how to label query history items.\n* %t is the time of the query\n* %q is the human-readable query name\n* %f is the query file name\n* %d is the database name\n* %r is the number of results\n* %s is a status string"
},
"codeQL.queryHistory.ttl": {
"type": "number",
@@ -339,13 +339,6 @@
"title": "Variant analysis",
"order": 5,
"properties": {
"codeQL.variantAnalysis.controllerRepo": {
"type": "string",
"default": "",
"pattern": "^$|^(?:[a-zA-Z0-9]+-)*[a-zA-Z0-9]+/[a-zA-Z0-9-_]+$",
"patternErrorMessage": "Please enter a valid GitHub repository",
"markdownDescription": "[For internal use only] The name of the GitHub repository in which the GitHub Actions workflow is run when using the \"Run Variant Analysis\" command. The repository should be of the form `<owner>/<repo>`)."
},
"codeQL.variantAnalysis.defaultResultsFilter": {
"type": "string",
"default": "all",
@@ -497,6 +490,16 @@
"title": "Telemetry",
"order": 11,
"properties": {
"codeQL.telemetry.enableTelemetry": {
"type": "boolean",
"default": false,
"scope": "application",
"markdownDescription": "Specifies whether to send CodeQL usage telemetry. This setting AND the one of the global telemetry settings (`#telemetry.enableTelemetry#` or `#telemetry.telemetryLevel#`) must be enabled for telemetry to be sent to GitHub. For more information, see the [telemetry documentation](https://codeql.github.com/docs/codeql-for-visual-studio-code/about-telemetry-in-codeql-for-visual-studio-code)",
"tags": [
"telemetry",
"usesOnlineServices"
]
},
"codeQL.telemetry.logTelemetry": {
"type": "boolean",
"default": false,
@@ -829,10 +832,6 @@
"command": "codeQL.chooseDatabaseFolder",
"title": "CodeQL: Choose Database from Folder"
},
{
"command": "codeQL.chooseDatabaseFoldersParent",
"title": "CodeQL: Import All Databases Directly Contained in a Parent Folder"
},
{
"command": "codeQL.chooseDatabaseArchive",
"title": "CodeQL: Choose Database from Archive"
@@ -1784,7 +1783,8 @@
"when": "false"
},
{
"command": "codeQL.trimCache"
"command": "codeQL.trimCache",
"when": "codeql.supportsTrimCache"
}
],
"editor/context": [
@@ -1924,11 +1924,6 @@
{
"view": "codeQLEvalLogViewer",
"contents": "Run the 'Show Evaluator Log (UI)' command on a CodeQL query run in the Query History view."
},
{
"view": "codeQLVariantAnalysisRepositories",
"contents": "Set up a controller repository to start using variant analysis. [Learn more](https://codeql.github.com/docs/codeql-for-visual-studio-code/running-codeql-queries-at-scale-with-mrva#controller-repository) about controller repositories. \n[Set up controller repository](command:codeQLVariantAnalysisRepositories.setupControllerRepository)",
"when": "!config.codeQL.variantAnalysis.controllerRepo"
}
]
},
@@ -1947,7 +1942,6 @@
"update-vscode": "node ./node_modules/vscode/bin/install",
"format": "prettier --write **/*.{ts,tsx} && eslint . --ext .ts,.tsx --fix",
"lint": "eslint . --ext .js,.ts,.tsx --max-warnings=0",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint . --ext .js,.ts,.tsx --max-warnings=0 --format @microsoft/eslint-formatter-sarif --output-file=build/eslint.sarif",
"lint:markdown": "markdownlint-cli2 \"../../**/*.{md,mdx}\" \"!**/node_modules/**\" \"!**/.vscode-test/**\" \"!**/build/cli/v*/**\"",
"find-deadcode": "vite-node scripts/find-deadcode.ts",
"format-staged": "lint-staged",
@@ -1957,73 +1951,73 @@
"generate": "npm-run-all -p generate:*",
"generate:schemas": "vite-node scripts/generate-schemas.ts",
"generate:chromium-version": "vite-node scripts/generate-chromium-version.ts",
"check-types": "find . -type f -name \"tsconfig.json\" -not -path \"./node_modules/*\" -not -path \"*/.vscode-test/*\" | sed -r 's|/[^/]+$||' | sort | uniq | xargs -I {} sh -c \"echo Checking types in {} && cd {} && npx tsc --noEmit\"",
"check-types": "find . -type f -name \"tsconfig.json\" -not -path \"./node_modules/*\" | sed -r 's|/[^/]+$||' | sort | uniq | xargs -I {} sh -c \"echo Checking types in {} && cd {} && npx tsc --noEmit\"",
"postinstall": "patch-package",
"prepare": "cd ../.. && husky"
"prepare": "cd ../.. && husky install"
},
"dependencies": {
"@floating-ui/react": "^0.27.0",
"@octokit/plugin-retry": "^7.1.2",
"@octokit/plugin-throttling": "^9.3.2",
"@octokit/rest": "^21.0.2",
"@vscode/codicons": "^0.0.36",
"@floating-ui/react": "^0.26.12",
"@octokit/plugin-retry": "^6.0.1",
"@octokit/plugin-throttling": "^8.0.0",
"@octokit/rest": "^20.0.2",
"@vscode/codicons": "^0.0.35",
"@vscode/debugadapter": "^1.59.0",
"@vscode/debugprotocol": "^1.68.0",
"@vscode/debugprotocol": "^1.65.0",
"@vscode/webview-ui-toolkit": "^1.0.1",
"ajv": "^8.11.0",
"child-process-promise": "^2.2.1",
"chokidar": "^3.6.0",
"d3": "^7.9.0",
"d3-graphviz": "^5.0.2",
"fs-extra": "^11.1.1",
"js-yaml": "^4.1.0",
"msw": "^2.6.8",
"msw": "^2.2.13",
"nanoid": "^5.0.7",
"node-fetch": "^2.6.7",
"p-queue": "^8.0.1",
"proper-lockfile": "^4.1.2",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"semver": "^7.6.2",
"source-map": "^0.7.4",
"source-map-support": "^0.5.21",
"stream-json": "^1.7.3",
"styled-components": "^6.1.13",
"styled-components": "^6.1.9",
"tmp": "^0.2.1",
"tmp-promise": "^3.0.2",
"tree-kill": "^1.2.2",
"vscode-extension-telemetry": "^0.1.6",
"vscode-jsonrpc": "^8.2.1",
"vscode-jsonrpc": "^8.0.2",
"vscode-languageclient": "^8.0.2",
"yauzl": "^2.10.0",
"zip-a-folder": "^3.1.6"
},
"devDependencies": {
"@babel/core": "^7.24.6",
"@babel/plugin-transform-modules-commonjs": "^7.26.3",
"@babel/plugin-transform-modules-commonjs": "^7.18.6",
"@babel/preset-env": "^7.24.4",
"@babel/preset-react": "^7.25.9",
"@babel/preset-typescript": "^7.26.0",
"@faker-js/faker": "^9.0.3",
"@github/markdownlint-github": "^0.6.3",
"@microsoft/eslint-formatter-sarif": "^3.1.0",
"@playwright/test": "^1.49.0",
"@storybook/addon-a11y": "^8.4.7",
"@storybook/addon-actions": "^8.4.7",
"@storybook/addon-essentials": "^8.4.7",
"@storybook/addon-interactions": "^8.4.7",
"@storybook/addon-links": "^8.4.7",
"@babel/preset-react": "^7.24.1",
"@babel/preset-typescript": "^7.21.4",
"@faker-js/faker": "^8.4.1",
"@github/markdownlint-github": "^0.6.2",
"@playwright/test": "^1.40.1",
"@storybook/addon-a11y": "^8.1.10",
"@storybook/addon-actions": "^8.1.10",
"@storybook/addon-essentials": "^8.1.10",
"@storybook/addon-interactions": "^8.1.10",
"@storybook/addon-links": "^8.1.10",
"@storybook/blocks": "^8.0.2",
"@storybook/components": "^8.4.7",
"@storybook/csf": "^0.1.12",
"@storybook/icons": "^1.3.0",
"@storybook/manager-api": "^8.4.7",
"@storybook/react": "^8.4.7",
"@storybook/react-vite": "^8.4.7",
"@storybook/theming": "^8.2.4",
"@testing-library/dom": "^10.4.0",
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^16.1.0",
"@storybook/components": "^8.0.2",
"@storybook/csf": "^0.1.8",
"@storybook/icons": "^1.2.9",
"@storybook/manager-api": "^8.1.10",
"@storybook/react": "^8.1.10",
"@storybook/react-vite": "^8.1.10",
"@storybook/theming": "^8.1.10",
"@testing-library/dom": "^10.1.0",
"@testing-library/jest-dom": "^6.4.6",
"@testing-library/react": "^16.0.0",
"@testing-library/user-event": "^14.5.2",
"@types/cross-spawn": "^6.0.6",
"@types/child-process-promise": "^2.2.1",
"@types/d3": "^7.4.0",
"@types/d3-graphviz": "^2.6.6",
"@types/del": "^4.0.0",
@@ -2032,10 +2026,11 @@
"@types/gulp-replace": "^1.1.0",
"@types/jest": "^29.5.12",
"@types/js-yaml": "^4.0.6",
"@types/node": "20.17.*",
"@types/proper-lockfile": "^4.1.4",
"@types/react": "^18.3.12",
"@types/react-dom": "^18.3.1",
"@types/nanoid": "^3.0.0",
"@types/node": "20.9.*",
"@types/node-fetch": "^2.5.2",
"@types/react": "^18.3.1",
"@types/react-dom": "^18.3.0",
"@types/sarif": "^2.1.2",
"@types/semver": "^7.5.8",
"@types/stream-json": "^1.7.1",
@@ -2043,55 +2038,54 @@
"@types/tar-stream": "^3.1.3",
"@types/through2": "^2.0.36",
"@types/tmp": "^0.2.6",
"@types/vscode": "1.90.0",
"@types/vscode": "^1.82.0",
"@types/yauzl": "^2.10.3",
"@typescript-eslint/eslint-plugin": "^8.18.0",
"@typescript-eslint/parser": "^8.18.0",
"@typescript-eslint/eslint-plugin": "^7.5.0",
"@typescript-eslint/parser": "^7.5.0",
"@vscode/test-electron": "^2.3.9",
"@vscode/vsce": "^2.24.0",
"ansi-colors": "^4.1.1",
"applicationinsights": "^2.9.5",
"cosmiconfig": "^9.0.0",
"cross-env": "^7.0.3",
"cross-spawn": "^7.0.6",
"del": "^6.0.0",
"eslint": "^8.56.0",
"eslint-config-prettier": "^9.0.0",
"eslint-import-resolver-typescript": "^3.6.3",
"eslint-plugin-deprecation": "^3.0.0",
"eslint-import-resolver-typescript": "^3.6.1",
"eslint-plugin-deprecation": "^2.0.0",
"eslint-plugin-etc": "^2.0.2",
"eslint-plugin-github": "^5.0.1",
"eslint-plugin-import": "^2.31.0",
"eslint-plugin-jest-dom": "^5.4.0",
"eslint-plugin-github": "^4.10.2",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-jest-dom": "^5.2.0",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-react": "^7.37.2",
"eslint-plugin-react-hooks": "^4.6.2",
"eslint-plugin-react": "^7.34.1",
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-storybook": "^0.8.0",
"glob": "^11.0.0",
"gulp": "^5.0.0",
"gulp-esbuild": "^0.12.1",
"glob": "^10.0.0",
"gulp": "^4.0.2",
"gulp-esbuild": "^0.12.0",
"gulp-replace": "^1.1.3",
"gulp-typescript": "^5.0.1",
"husky": "^9.1.5",
"husky": "^9.1.7",
"jest": "^29.0.3",
"jest-environment-jsdom": "^29.0.3",
"jest-runner-vscode": "^3.0.1",
"lint-staged": "^15.2.10",
"lint-staged": "^15.2.2",
"markdownlint-cli2": "^0.13.0",
"markdownlint-cli2-formatter-pretty": "^0.0.7",
"markdownlint-cli2-formatter-pretty": "^0.0.6",
"npm-run-all": "^4.1.5",
"patch-package": "^8.0.0",
"prettier": "^3.2.5",
"storybook": "^8.4.7",
"storybook": "^8.1.10",
"tar-stream": "^3.1.7",
"through2": "^4.0.2",
"ts-jest": "^29.2.5",
"ts-json-schema-generator": "^2.3.0",
"ts-jest": "^29.1.4",
"ts-json-schema-generator": "^2.1.1",
"ts-node": "^10.9.2",
"ts-unused-exports": "^10.1.0",
"typescript": "^5.6.2",
"vite": "^6.0.1",
"vite-node": "^2.0.5"
"typescript": "^5.0.2",
"vite": "^5.2.11",
"vite-node": "^1.5.3"
},
"lint-staged": {
"./**/*.{json,css,scss}": [

View File

@@ -1,3 +1,29 @@
diff --git a/node_modules/jest-runner-vscode/dist/child/environment.js b/node_modules/jest-runner-vscode/dist/child/environment.js
index 1ac28d5..f91f216 100644
--- a/node_modules/jest-runner-vscode/dist/child/environment.js
+++ b/node_modules/jest-runner-vscode/dist/child/environment.js
@@ -10,6 +10,21 @@ const wrap_io_1 = __importDefault(require("./wrap-io"));
const load_pnp_1 = __importDefault(require("./load-pnp"));
const ipc = new ipc_client_1.default('env');
class VSCodeEnvironment extends jest_environment_node_1.default {
+ constructor(config, context) {
+ super(config, context);
+ // The _VSCODE_NODE_MODULES is a proxy which will require a module if any property
+ // on it is accessed. This is a workaround for the fact that jest will call
+ // _isMockFunction on the module, which will cause that function to be required.
+ this.global._VSCODE_NODE_MODULES = new Proxy(this.global._VSCODE_NODE_MODULES, {
+ get(target, prop) {
+ if (prop === '_isMockFunction') {
+ return undefined;
+ }
+ return target[prop];
+ },
+ });
+ }
+
async setup() {
await super.setup();
await (0, load_pnp_1.default)();
diff --git a/node_modules/jest-runner-vscode/dist/child/runner.js b/node_modules/jest-runner-vscode/dist/child/runner.js
index 0663c5c..bdf4a8b 100644
--- a/node_modules/jest-runner-vscode/dist/child/runner.js

View File

@@ -41,7 +41,6 @@ async function extractSourceMap() {
const releaseAssetsDirectory = resolve(
__dirname,
"..",
"artifacts",
"release-assets",
versionNumber,
);
@@ -65,9 +64,7 @@ async function extractSourceMap() {
]);
const sourcemapAsset = release.assets.find(
(asset) =>
asset.label === `vscode-codeql-sourcemaps-${versionNumber}.zip` ||
asset.name === "vscode-codeql-sourcemaps.zip",
(asset) => asset.name === `vscode-codeql-sourcemaps-${versionNumber}.zip`,
);
if (sourcemapAsset) {
@@ -216,7 +213,9 @@ extractSourceMap().catch((e: unknown) => {
function runGh(args: readonly string[]): string {
const gh = spawnSync("gh", args);
if (gh.status !== 0) {
throw new Error(`Failed to run gh ${args.join(" ")}: ${gh.stderr}`);
throw new Error(
`Failed to get the source map for ${versionNumber}: ${gh.stderr}`,
);
}
return gh.stdout.toString("utf-8");
}
@@ -228,7 +227,6 @@ function runGhJSON<T>(args: readonly string[]): T {
type ReleaseAsset = {
id: string;
name: string;
label: string;
};
type Release = {

View File

@@ -1,9 +1,8 @@
import { join, resolve } from "path";
import { execSync } from "child_process";
import { outputFile, readJSON } from "fs-extra";
import { outputFile, readFile, readJSON } from "fs-extra";
import { getVersionInformation } from "./util/vscode-versions";
import { fetchJson } from "./util/fetch";
import { SemVer } from "semver";
const extensionDirectory = resolve(__dirname, "..");
@@ -11,29 +10,6 @@ interface Release {
tag_name: string;
}
interface NpmViewError {
error: {
code: string;
summary: string;
detail: string;
};
}
interface ExecError extends Error {
status: number;
stdout: string;
}
function isExecError(e: unknown): e is ExecError {
return (
e instanceof Error &&
"status" in e &&
typeof e.status === "number" &&
"stdout" in e &&
typeof e.stdout === "string"
);
}
async function updateNodeVersion() {
const latestVsCodeRelease = await fetchJson<Release>(
"https://api.github.com/repos/microsoft/vscode/releases/latest",
@@ -47,7 +23,19 @@ async function updateNodeVersion() {
`VS Code ${versionInformation.vscodeVersion} uses Electron ${versionInformation.electronVersion} and Node ${versionInformation.nodeVersion}`,
);
console.log("Updating files related to the Node version");
let currentNodeVersion = (
await readFile(join(extensionDirectory, ".nvmrc"), "utf8")
).trim();
if (currentNodeVersion.startsWith("v")) {
currentNodeVersion = currentNodeVersion.slice(1);
}
if (currentNodeVersion === versionInformation.nodeVersion) {
console.log("Node version is already up to date");
return;
}
console.log("Node version needs to be updated, updating now");
await outputFile(
join(extensionDirectory, ".nvmrc"),
@@ -61,8 +49,6 @@ async function updateNodeVersion() {
"utf8",
);
const nodeVersion = new SemVer(versionInformation.nodeVersion);
// The @types/node version needs to match the first two parts of the Node
// version, e.g. if the Node version is 18.17.3, the @types/node version
// should be 18.17.*. This corresponds with the documentation at
@@ -70,56 +56,13 @@ async function updateNodeVersion() {
// "The patch version of the type declaration package is unrelated to the library patch version. This allows
// Definitely Typed to safely update type declarations for the same major/minor version of a library."
// 18.17.* is equivalent to >=18.17.0 <18.18.0
// In some cases, the @types/node version matching the exact Node version may not exist, in which case we'll try
// the next lower minor version, and so on, until we find a version that exists.
const typesNodeSemver = new SemVer(nodeVersion);
typesNodeSemver.patch = 0;
// eslint-disable-next-line no-constant-condition
while (true) {
const typesNodeVersion = `${typesNodeSemver.major}.${typesNodeSemver.minor}.*`;
try {
// Check that this version actually exists
console.log(`Checking if @types/node@${typesNodeVersion} exists`);
execSync(`npm view --json "@types/node@${typesNodeVersion}"`, {
encoding: "utf-8",
stdio: "pipe",
maxBuffer: 10 * 1024 * 1024,
});
console.log(`@types/node@${typesNodeVersion} exists`);
// If it exists, we can break out of this loop
break;
} catch (e: unknown) {
if (!isExecError(e)) {
throw e;
}
const error = JSON.parse(e.stdout) as NpmViewError;
if (error.error.code !== "E404") {
throw new Error(error.error.detail);
}
console.log(
`@types/node package doesn't exist for ${typesNodeVersion}, trying a lower version (${error.error.summary})`,
);
// This means the version doesn't exist, so we'll try decrementing the minor version
typesNodeSemver.minor -= 1;
if (typesNodeSemver.minor < 0) {
throw new Error(
`Could not find a suitable @types/node version for Node ${nodeVersion.format()}`,
);
}
}
}
const typesNodeVersion = versionInformation.nodeVersion
.split(".")
.slice(0, 2)
.join(".");
packageJson.engines.node = `^${versionInformation.nodeVersion}`;
packageJson.devDependencies["@types/node"] =
`${typesNodeSemver.major}.${typesNodeSemver.minor}.*`;
packageJson.devDependencies["@types/node"] = `${typesNodeVersion}.*`;
await outputFile(
join(extensionDirectory, "package.json"),

View File

@@ -12,7 +12,6 @@ interface VersionResult {
export interface CliFeatures {
featuresInVersionResult?: boolean;
mrvaPackCreate?: boolean;
generateSummarySymbolMap?: boolean;
}
export interface VersionAndFeatures {

View File

@@ -1,9 +1,9 @@
import { EOL } from "os";
import { spawn } from "cross-spawn";
import { spawn } from "child-process-promise";
import type { ChildProcessWithoutNullStreams } from "child_process";
import { spawn as spawnChildProcess } from "child_process";
import { readFile } from "fs-extra";
import { delimiter, join } from "path";
import { delimiter, dirname, join } from "path";
import type { Log } from "sarif";
import { SemVer } from "semver";
import type { Readable } from "stream";
@@ -37,7 +37,6 @@ import { LOGGING_FLAGS } from "./cli-command";
import type { CliFeatures, VersionAndFeatures } from "./cli-version";
import { ExitCodeError, getCliError } from "./cli-errors";
import { UserCancellationException } from "../common/vscode/progress";
import type { LanguageClient } from "vscode-languageclient/node";
/**
* The version of the SARIF format that we are using.
@@ -92,6 +91,15 @@ export type QlpacksInfo = { [name: string]: string[] };
*/
type LanguagesInfo = { [name: string]: string[] };
/** Information about an ML model, as resolved by `codeql resolve ml-models`. */
type MlModelInfo = {
checksum: string;
path: string;
};
/** The expected output of `codeql resolve ml-models`. */
type MlModelsInfo = { models: MlModelInfo[] };
/** Information about a data extension predicate, as resolved by `codeql resolve extensions`. */
type DataExtensionResult = {
predicate: string;
@@ -101,6 +109,7 @@ type DataExtensionResult = {
/** The expected output of `codeql resolve extensions`. */
type ResolveExtensionsResult = {
models: MlModelInfo[];
data: {
[path: string]: DataExtensionResult[];
};
@@ -278,7 +287,6 @@ export class CodeQLCliServer implements Disposable {
constructor(
private readonly app: App,
private readonly languageClient: LanguageClient,
private distributionProvider: DistributionProvider,
private cliConfig: CliConfig,
public readonly logger: Logger,
@@ -718,7 +726,13 @@ export class CodeQLCliServer implements Disposable {
// Spawn the CodeQL process
const codeqlPath = await this.getCodeQlPath();
const child = spawn(codeqlPath, args);
const childPromise = spawn(codeqlPath, args);
// Avoid a runtime message about unhandled rejection.
childPromise.catch(() => {
/**/
});
const child = childPromise.childProcess;
let cancellationRegistration: Disposable | undefined = undefined;
try {
@@ -731,28 +745,16 @@ export class CodeQLCliServer implements Disposable {
}
if (logger !== undefined) {
// The human-readable output goes to stderr.
void logStream(child.stderr, logger);
void logStream(child.stderr!, logger);
}
for await (const event of splitStreamAtSeparators(child.stdout, ["\0"])) {
for await (const event of splitStreamAtSeparators(child.stdout!, [
"\0",
])) {
yield event;
}
await new Promise((resolve, reject) => {
child.on("error", reject);
child.on("close", (code) => {
if (code === 0) {
resolve(undefined);
} else {
reject(
new Error(
`${command} ${commandArgs.join(" ")} failed with code ${code}`,
),
);
}
});
});
await childPromise;
} finally {
if (cancellationRegistration !== undefined) {
cancellationRegistration.dispose();
@@ -941,7 +943,7 @@ export class CodeQLCliServer implements Disposable {
if (line.startsWith("Enter value for --github-auth-stdin")) {
try {
return await this.app.credentials.getAccessToken();
} catch {
} catch (e) {
// If the user cancels the authentication prompt, we still need to give a value to the CLI.
// By giving a potentially invalid value, the user will just get a 401/403 when they try to access a
// private package and the access token is invalid.
@@ -1095,6 +1097,24 @@ export class CodeQLCliServer implements Disposable {
);
}
/** Resolves the ML models that should be available when evaluating a query. */
async resolveMlModels(
additionalPacks: string[],
queryPath: string,
): Promise<MlModelsInfo> {
const args =
// use the dirname of the path so that we can handle query libraries
[...this.getAdditionalPacksArg(additionalPacks), dirname(queryPath)];
return await this.runJsonCodeQlCliCommand<MlModelsInfo>(
["resolve", "ml-models"],
args,
"Resolving ML models",
{
addFormat: false,
},
);
}
/**
* Gets the RAM setting for the query server.
* @param queryMemoryMb The maximum amount of RAM to use, in MB.
@@ -1191,15 +1211,10 @@ export class CodeQLCliServer implements Disposable {
outputPath: string,
endSummaryPath: string,
): Promise<string> {
const supportsGenerateSummarySymbolMap =
await this.cliConstraints.supportsGenerateSummarySymbolMap();
const subcommandArgs = [
"--format=text",
`--end-summary=${endSummaryPath}`,
"--sourcemap",
...(supportsGenerateSummarySymbolMap
? ["--summary-symbol-map", "--minify-output"]
: []),
inputPath,
outputPath,
];
@@ -1586,13 +1601,11 @@ export class CodeQLCliServer implements Disposable {
async packAdd(dir: string, queryLanguage: QueryLanguage) {
const args = ["--dir", dir];
args.push(`codeql/${queryLanguage}-all`);
const ret = await this.runCodeQlCliCommand(
return this.runCodeQlCliCommand(
["pack", "add"],
args,
`Adding and installing ${queryLanguage} pack dependency.`,
);
await this.notifyPackInstalled();
return ret;
}
/**
@@ -1627,18 +1640,16 @@ export class CodeQLCliServer implements Disposable {
args.push(
// Allow prerelease packs from the ql submodule.
"--allow-prerelease",
// Allow the use of --additional-packs argument without issuing a warning
// Allow the use of --additional-packs argument without issueing a warning
"--no-strict-mode",
...this.getAdditionalPacksArg(workspaceFolders),
);
}
const ret = await this.runJsonCodeQlCliCommandWithAuthentication(
return this.runJsonCodeQlCliCommandWithAuthentication(
["pack", "install"],
args,
"Installing pack dependencies",
);
await this.notifyPackInstalled();
return ret;
}
/**
@@ -1739,6 +1750,14 @@ export class CodeQLCliServer implements Disposable {
this._versionChangedListeners.forEach((listener) =>
listener(newVersionAndFeatures),
);
// this._version is only undefined upon config change, so we reset CLI-based context key only when necessary.
await this.app.commands.execute(
"setContext",
"codeql.supportsTrimCache",
newVersionAndFeatures.version.compare(
CliVersionConstraint.CLI_VERSION_WITH_TRIM_CACHE,
) >= 0,
);
} catch (e) {
this._versionChangedListeners.forEach((listener) =>
listener(undefined),
@@ -1756,17 +1775,6 @@ export class CodeQLCliServer implements Disposable {
this._versionChangedListeners.push(listener);
}
/**
* This method should be called after a pack has been installed.
*
* This restarts the language client. Restarting the language client has the
* effect of removing compilation errors in open ql/qll files that are caused
* by the pack not having been installed previously.
*/
private async notifyPackInstalled() {
await this.languageClient.restart();
}
private async refreshVersion(): Promise<VersionAndFeatures> {
const distribution = await this.distributionProvider.getDistribution();
switch (distribution.kind) {
@@ -1904,17 +1912,45 @@ function shouldDebugCliServer() {
export class CliVersionConstraint {
// The oldest version of the CLI that we support. This is used to determine
// whether to show a warning about the CLI being too old on startup.
public static OLDEST_SUPPORTED_CLI_VERSION = new SemVer("2.16.6");
public static OLDEST_SUPPORTED_CLI_VERSION = new SemVer("2.14.6");
/**
* CLI version where the query server supports the `evaluation/trimCache` method
* with `codeql database cleanup --mode=trim` semantics.
*/
public static CLI_VERSION_WITH_TRIM_CACHE = new SemVer("2.15.1");
public static CLI_VERSION_WITHOUT_MRVA_EXTENSIBLE_PREDICATE_HACK = new SemVer(
"2.16.1",
);
/**
* CLI version where there is support for multiple queries on the pack create command.
*/
public static CLI_VERSION_WITH_MULTI_QUERY_PACK_CREATE = new SemVer("2.16.1");
constructor(private readonly cli: CodeQLCliServer) {
/**/
}
private async isVersionAtLeast(v: SemVer) {
return (await this.cli.getVersion()).compare(v) >= 0;
}
async preservesExtensiblePredicatesInMrvaPack() {
// Negated, because we _stopped_ preserving these in 2.16.1.
return !(await this.isVersionAtLeast(
CliVersionConstraint.CLI_VERSION_WITHOUT_MRVA_EXTENSIBLE_PREDICATE_HACK,
));
}
async supportsPackCreateWithMultipleQueries() {
return this.isVersionAtLeast(
CliVersionConstraint.CLI_VERSION_WITH_MULTI_QUERY_PACK_CREATE,
);
}
async supportsMrvaPackCreate(): Promise<boolean> {
return (await this.cli.getFeatures()).mrvaPackCreate === true;
}
async supportsGenerateSummarySymbolMap(): Promise<boolean> {
return (await this.cli.getFeatures()).generateSummarySymbolMap === true;
}
}

View File

@@ -1,12 +1,5 @@
import type { WriteStream } from "fs";
import {
createWriteStream,
mkdtemp,
outputJson,
pathExists,
readJson,
remove,
} from "fs-extra";
import { createWriteStream, mkdtemp, pathExists, remove } from "fs-extra";
import { tmpdir } from "os";
import { delimiter, dirname, join } from "path";
import { Range, satisfies } from "semver";
@@ -26,9 +19,7 @@ import {
InvocationRateLimiter,
InvocationRateLimiterResultKind,
} from "../common/invocation-rate-limiter";
import type { NotificationLogger } from "../common/logging";
import {
showAndLogExceptionWithTelemetry,
showAndLogErrorMessage,
showAndLogWarningMessage,
} from "../common/logging";
@@ -37,12 +28,7 @@ import { reportUnzipProgress } from "../common/vscode/unzip-progress";
import type { Release } from "./distribution/release";
import { ReleasesApiConsumer } from "./distribution/releases-api-consumer";
import { createTimeoutSignal } from "../common/fetch-stream";
import { withDistributionUpdateLock } from "./lock";
import { asError, getErrorMessage } from "../common/helpers-pure";
import { isIOError } from "../common/files";
import { telemetryListener } from "../common/vscode/telemetry";
import { redactableError } from "../common/errors";
import { ExtensionManagedDistributionCleaner } from "./distribution/cleaner";
import { AbortError } from "node-fetch";
/**
* distribution.ts
@@ -68,11 +54,6 @@ const NIGHTLY_DISTRIBUTION_REPOSITORY_NWO = "dsp-testing/codeql-cli-nightlies";
*/
export const DEFAULT_DISTRIBUTION_VERSION_RANGE: Range = new Range("2.x");
export interface DistributionState {
folderIndex: number;
release: Release | null;
}
export interface DistributionProvider {
getCodeQlPathWithoutVersionCheck(): Promise<string | undefined>;
onDidChangeDistribution?: Event<void>;
@@ -84,7 +65,6 @@ export class DistributionManager implements DistributionProvider {
public readonly config: DistributionConfig,
private readonly versionRange: Range,
extensionContext: ExtensionContext,
logger: NotificationLogger,
) {
this._onDidChangeDistribution = config.onDidChangeConfiguration;
this.extensionSpecificDistributionManager =
@@ -92,7 +72,6 @@ export class DistributionManager implements DistributionProvider {
config,
versionRange,
extensionContext,
logger,
);
this.updateCheckRateLimiter = new InvocationRateLimiter(
extensionContext.globalState,
@@ -100,16 +79,6 @@ export class DistributionManager implements DistributionProvider {
() =>
this.extensionSpecificDistributionManager.checkForUpdatesToDistribution(),
);
this.extensionManagedDistributionCleaner =
new ExtensionManagedDistributionCleaner(
extensionContext,
logger,
this.extensionSpecificDistributionManager,
);
}
public async initialize(): Promise<void> {
await this.extensionSpecificDistributionManager.initialize();
}
/**
@@ -287,10 +256,6 @@ export class DistributionManager implements DistributionProvider {
);
}
public startCleanup() {
this.extensionManagedDistributionCleaner.start();
}
public get onDidChangeDistribution(): Event<void> | undefined {
return this._onDidChangeDistribution;
}
@@ -312,63 +277,18 @@ export class DistributionManager implements DistributionProvider {
private readonly extensionSpecificDistributionManager: ExtensionSpecificDistributionManager;
private readonly updateCheckRateLimiter: InvocationRateLimiter<DistributionUpdateCheckResult>;
private readonly extensionManagedDistributionCleaner: ExtensionManagedDistributionCleaner;
private readonly _onDidChangeDistribution: Event<void> | undefined;
}
class ExtensionSpecificDistributionManager {
private distributionState: DistributionState | undefined;
constructor(
private readonly config: DistributionConfig,
private readonly versionRange: Range,
private readonly extensionContext: ExtensionContext,
private readonly logger: NotificationLogger,
) {
/**/
}
public async initialize() {
await this.ensureDistributionStateExists();
}
private async ensureDistributionStateExists() {
const distributionStatePath = this.getDistributionStatePath();
try {
this.distributionState = await readJson(distributionStatePath);
} catch (e: unknown) {
if (isIOError(e) && e.code === "ENOENT") {
// If the file doesn't exist, that just means we need to create it
this.distributionState = {
folderIndex:
this.extensionContext.globalState.get(
"distributionFolderIndex",
0,
) ?? 0,
release: (this.extensionContext.globalState.get(
"distributionRelease",
) ?? null) as Release | null,
};
// This may result in a race condition, but when this happens both processes should write the same file.
await outputJson(distributionStatePath, this.distributionState);
} else {
void showAndLogExceptionWithTelemetry(
this.logger,
telemetryListener,
redactableError(
asError(e),
)`Failed to read distribution state from ${distributionStatePath}: ${getErrorMessage(e)}`,
);
this.distributionState = {
folderIndex: 0,
release: null,
};
}
}
}
public async getCodeQlPathWithoutVersionCheck(): Promise<string | undefined> {
if (this.getInstalledRelease() !== undefined) {
// An extension specific distribution has been installed.
@@ -431,21 +351,9 @@ class ExtensionSpecificDistributionManager {
release: Release,
progressCallback?: ProgressCallback,
): Promise<void> {
if (!this.distributionState) {
await this.ensureDistributionStateExists();
}
const distributionStatePath = this.getDistributionStatePath();
await withDistributionUpdateLock(
// .lock will be appended to this filename
distributionStatePath,
async () => {
await this.downloadDistribution(release, progressCallback);
// Store the installed release within the global extension state.
await this.storeInstalledRelease(release);
},
);
await this.downloadDistribution(release, progressCallback);
// Store the installed release within the global extension state.
await this.storeInstalledRelease(release);
}
private async downloadDistribution(
@@ -496,11 +404,6 @@ class ExtensionSpecificDistributionManager {
signal,
);
const body = assetStream.body;
if (!body) {
throw new Error("No body in asset stream");
}
const archivePath = join(tmpDirectory, "distributionDownload.zip");
archiveFile = createWriteStream(archivePath);
@@ -508,40 +411,27 @@ class ExtensionSpecificDistributionManager {
const totalNumBytes = contentLength
? parseInt(contentLength, 10)
: undefined;
const reportProgress = reportStreamProgress(
reportStreamProgress(
assetStream.body,
`Downloading CodeQL CLI ${release.name}`,
totalNumBytes,
progressCallback,
);
const reader = body.getReader();
for (;;) {
const { done, value } = await reader.read();
if (done) {
break;
}
onData();
reportProgress(value?.length ?? 0);
await new Promise((resolve, reject) => {
archiveFile?.write(value, (err) => {
if (err) {
reject(err);
}
resolve(undefined);
});
});
}
assetStream.body.on("data", onData);
await new Promise((resolve, reject) => {
archiveFile?.close((err) => {
if (err) {
reject(err);
}
resolve(undefined);
});
if (!archiveFile) {
throw new Error("Invariant violation: archiveFile not set");
}
assetStream.body
.pipe(archiveFile)
.on("finish", resolve)
.on("error", reject);
// If an error occurs on the body, we also want to reject the promise (e.g. during a timeout error).
assetStream.body.on("error", reject);
});
disposeTimeout();
@@ -562,8 +452,8 @@ class ExtensionSpecificDistributionManager {
: undefined,
);
} catch (e) {
if (e instanceof DOMException && e.name === "AbortError") {
const thrownError = new Error("The download timed out.");
if (e instanceof AbortError) {
const thrownError = new AbortError("The download timed out.");
thrownError.stack = e.stack;
throw thrownError;
}
@@ -657,19 +547,23 @@ class ExtensionSpecificDistributionManager {
}
private async bumpDistributionFolderIndex(): Promise<void> {
await this.updateState((oldState) => {
return {
...oldState,
folderIndex: (oldState.folderIndex ?? 0) + 1,
};
});
const index = this.extensionContext.globalState.get(
ExtensionSpecificDistributionManager._currentDistributionFolderIndexStateKey,
0,
);
await this.extensionContext.globalState.update(
ExtensionSpecificDistributionManager._currentDistributionFolderIndexStateKey,
index + 1,
);
}
private getDistributionStoragePath(): string {
const distributionState = this.getDistributionState();
// Use an empty string for the initial distribution for backwards compatibility.
const distributionFolderIndex = distributionState.folderIndex || "";
const distributionFolderIndex =
this.extensionContext.globalState.get(
ExtensionSpecificDistributionManager._currentDistributionFolderIndexStateKey,
0,
) || "";
return join(
this.extensionContext.globalStorageUri.fsPath,
ExtensionSpecificDistributionManager._currentDistributionFolderBaseName +
@@ -684,65 +578,26 @@ class ExtensionSpecificDistributionManager {
);
}
private getDistributionStatePath(): string {
return join(
this.extensionContext.globalStorageUri.fsPath,
ExtensionSpecificDistributionManager._distributionStateFilename,
);
}
private getInstalledRelease(): Release | undefined {
return this.getDistributionState().release ?? undefined;
return this.extensionContext.globalState.get(
ExtensionSpecificDistributionManager._installedReleaseStateKey,
);
}
private async storeInstalledRelease(
release: Release | undefined,
): Promise<void> {
await this.updateState((oldState) => ({
...oldState,
release: release ?? null,
}));
}
private getDistributionState(): DistributionState {
const distributionState = this.distributionState;
if (distributionState === undefined) {
throw new Error(
"Invariant violation: distribution state not initialized",
);
}
return distributionState;
}
private async updateState(
f: (oldState: DistributionState) => DistributionState,
) {
const oldState = this.distributionState;
if (oldState === undefined) {
throw new Error(
"Invariant violation: distribution state not initialized",
);
}
const newState = f(oldState);
this.distributionState = newState;
const distributionStatePath = this.getDistributionStatePath();
await outputJson(distributionStatePath, newState);
}
public get folderIndex() {
const distributionState = this.getDistributionState();
return distributionState.folderIndex;
}
public get distributionFolderPrefix() {
return ExtensionSpecificDistributionManager._currentDistributionFolderBaseName;
await this.extensionContext.globalState.update(
ExtensionSpecificDistributionManager._installedReleaseStateKey,
release,
);
}
private static readonly _currentDistributionFolderBaseName = "distribution";
private static readonly _currentDistributionFolderIndexStateKey =
"distributionFolderIndex";
private static readonly _installedReleaseStateKey = "distributionRelease";
private static readonly _codeQlExtractedFolderName = "codeql";
private static readonly _distributionStateFilename = "distribution.json";
}
/*

View File

@@ -1,127 +0,0 @@
import type { ExtensionContext } from "vscode";
import { getDirectoryNamesInsidePath, isIOError } from "../../common/files";
import { sleep } from "../../common/time";
import type { BaseLogger } from "../../common/logging";
import { join } from "path";
import { getErrorMessage } from "../../common/helpers-pure";
import { pathExists, remove } from "fs-extra";
interface ExtensionManagedDistributionManager {
folderIndex: number;
distributionFolderPrefix: string;
}
interface DistributionDirectory {
directoryName: string;
folderIndex: number;
}
/**
* This class is responsible for cleaning up old distributions that are no longer needed. In normal operation, this
* should not be necessary as the old distribution is deleted when the distribution is updated. However, in some cases
* the extension may leave behind old distribution which can result in a significant amount of space (> 100 GB) being
* taking up by unused distributions.
*/
export class ExtensionManagedDistributionCleaner {
constructor(
private readonly extensionContext: ExtensionContext,
private readonly logger: BaseLogger,
private readonly manager: ExtensionManagedDistributionManager,
) {}
public start() {
// Intentionally starting this without waiting for it
void this.cleanup().catch((e: unknown) => {
void this.logger.log(
`Failed to clean up old versions of the CLI: ${getErrorMessage(e)}`,
);
});
}
public async cleanup() {
if (!(await pathExists(this.extensionContext.globalStorageUri.fsPath))) {
return;
}
const currentFolderIndex = this.manager.folderIndex;
const distributionDirectoryRegex = new RegExp(
`^${this.manager.distributionFolderPrefix}(\\d+)$`,
);
const existingDirectories = await getDirectoryNamesInsidePath(
this.extensionContext.globalStorageUri.fsPath,
);
const distributionDirectories = existingDirectories
.map((dir): DistributionDirectory | null => {
const match = dir.match(distributionDirectoryRegex);
if (!match) {
// When the folderIndex is 0, the distributionFolderPrefix is used as the directory name
if (dir === this.manager.distributionFolderPrefix) {
return {
directoryName: dir,
folderIndex: 0,
};
}
return null;
}
return {
directoryName: dir,
folderIndex: parseInt(match[1]),
};
})
.filter((dir) => dir !== null);
// Clean up all directories that are older than the current one
const cleanableDirectories = distributionDirectories.filter(
(dir) => dir.folderIndex < currentFolderIndex,
);
if (cleanableDirectories.length === 0) {
return;
}
// Shuffle the array so that multiple VS Code processes don't all try to clean up the same directory at the same time
for (let i = cleanableDirectories.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[cleanableDirectories[i], cleanableDirectories[j]] = [
cleanableDirectories[j],
cleanableDirectories[i],
];
}
void this.logger.log(
`Cleaning up ${cleanableDirectories.length} old versions of the CLI.`,
);
for (const cleanableDirectory of cleanableDirectories) {
// Wait 10 seconds between each cleanup to avoid overloading the system (even though the remove call should be async)
await sleep(10_000);
const path = join(
this.extensionContext.globalStorageUri.fsPath,
cleanableDirectory.directoryName,
);
// Delete this directory
try {
await remove(path);
} catch (e) {
if (isIOError(e) && e.code === "ENOENT") {
// If the directory doesn't exist, that's fine
continue;
}
void this.logger.log(
`Tried to clean up an old version of the CLI at ${path} but encountered an error: ${getErrorMessage(e)}.`,
);
}
}
void this.logger.log(
`Cleaned up ${cleanableDirectories.length} old versions of the CLI.`,
);
}
}

View File

@@ -1,3 +1,5 @@
import type { Response } from "node-fetch";
import { default as fetch } from "node-fetch";
import type { Range } from "semver";
import { compare, parse, satisfies } from "semver";
import { URL } from "url";
@@ -32,9 +34,9 @@ export class ReleasesApiConsumer {
additionalCompatibilityCheck?: (release: GithubRelease) => boolean,
): Promise<Release> {
const apiPath = `/repos/${this.repositoryNwo}/releases`;
const allReleases = (await (
const allReleases: GithubRelease[] = await (
await this.makeApiCall(apiPath)
).json()) as GithubRelease[];
).json();
const compatibleReleases = allReleases.filter((release) => {
if (release.prerelease && !includePrerelease) {
return false;

View File

@@ -1,22 +0,0 @@
import { lock } from "proper-lockfile";
export async function withDistributionUpdateLock(
lockFile: string,
f: () => Promise<void>,
) {
const release = await lock(lockFile, {
stale: 60_000, // 1 minute. We can take the lock longer than this because that's based on the update interval.
update: 10_000, // 10 seconds
retries: {
minTimeout: 10_000,
maxTimeout: 60_000,
retries: 100,
},
});
try {
await f();
} finally {
await release();
}
}

View File

@@ -36,7 +36,7 @@ export async function findLanguage(
void extLogger.log(
"Query language is unsupported. Select language manually.",
);
} catch {
} catch (e) {
void extLogger.log(
"Could not autodetect query language. Select language manually.",
);

View File

@@ -11,7 +11,6 @@ export namespace BqrsColumnKindCode {
export const BOOLEAN = "b";
export const DATE = "d";
export const ENTITY = "e";
export const BIGINT = "z";
}
export type BqrsColumnKind =
@@ -20,8 +19,7 @@ export type BqrsColumnKind =
| typeof BqrsColumnKindCode.STRING
| typeof BqrsColumnKindCode.BOOLEAN
| typeof BqrsColumnKindCode.DATE
| typeof BqrsColumnKindCode.ENTITY
| typeof BqrsColumnKindCode.BIGINT;
| typeof BqrsColumnKindCode.ENTITY;
export interface BqrsSchemaColumn {
name?: string;
@@ -81,8 +79,7 @@ export type BqrsKind =
| "Integer"
| "Boolean"
| "Date"
| "Entity"
| "BigInt";
| "Entity";
interface BqrsColumn {
name?: string;

View File

@@ -76,8 +76,6 @@ function mapColumnKind(kind: BqrsColumnKind): ColumnKind {
return ColumnKind.Date;
case BqrsColumnKindCode.ENTITY:
return ColumnKind.Entity;
case BqrsColumnKindCode.BIGINT:
return ColumnKind.BigInt;
default:
assertNever(kind);
}

View File

@@ -211,7 +211,6 @@ export type LanguageSelectionCommands = {
export type LocalDatabasesCommands = {
// Command palette commands
"codeQL.chooseDatabaseFolder": () => Promise<void>;
"codeQL.chooseDatabaseFoldersParent": () => Promise<void>;
"codeQL.chooseDatabaseArchive": () => Promise<void>;
"codeQL.chooseDatabaseInternet": () => Promise<void>;
"codeQL.chooseDatabaseGithub": () => Promise<void>;
@@ -291,7 +290,6 @@ export type DatabasePanelCommands = {
"codeQLVariantAnalysisRepositories.openConfigFile": () => Promise<void>;
"codeQLVariantAnalysisRepositories.addNewDatabase": () => Promise<void>;
"codeQLVariantAnalysisRepositories.addNewList": () => Promise<void>;
"codeQLVariantAnalysisRepositories.setupControllerRepository": () => Promise<void>;
"codeQLVariantAnalysisRepositories.setSelectedItem": TreeViewContextSingleSelectionCommandFunction<DbTreeViewItem>;
"codeQLVariantAnalysisRepositories.setSelectedItemContextMenu": TreeViewContextSingleSelectionCommandFunction<DbTreeViewItem>;
@@ -349,9 +347,7 @@ export type MockGitHubApiServerCommands = {
"codeQL.mockGitHubApiServer.startRecording": () => Promise<void>;
"codeQL.mockGitHubApiServer.saveScenario": () => Promise<void>;
"codeQL.mockGitHubApiServer.cancelRecording": () => Promise<void>;
"codeQL.mockGitHubApiServer.loadScenario": (
scenario?: string,
) => Promise<void>;
"codeQL.mockGitHubApiServer.loadScenario": () => Promise<void>;
"codeQL.mockGitHubApiServer.unloadScenario": () => Promise<void>;
};

View File

@@ -78,7 +78,7 @@ function getNwoOrOwnerFromGitHubUrl(
}
const nwo = `${paths[0]}/${paths[1]}`;
return paths[1] ? nwo : undefined;
} catch {
} catch (e) {
// Ignore the error here, since we catch failures at a higher level.
return;
}

View File

@@ -147,21 +147,6 @@ interface SetStateMsg {
parsedResultSets: ParsedResultSets;
}
export interface UserSettings {
/** Whether to display links to the dataflow models that generated particular nodes in a flow path. */
shouldShowProvenance: boolean;
}
export const DEFAULT_USER_SETTINGS: UserSettings = {
shouldShowProvenance: false,
};
/** Message indicating that the user's configuration settings have changed. */
interface SetUserSettingsMsg {
t: "setUserSettings";
userSettings: UserSettings;
}
/**
* Message indicating that the results view should display interpreted
* results.
@@ -206,7 +191,6 @@ interface UntoggleShowProblemsMsg {
export type IntoResultsViewMsg =
| ResultsUpdatingMsg
| SetStateMsg
| SetUserSettingsMsg
| ShowInterpretedPageMsg
| NavigateMsg
| UntoggleShowProblemsMsg;
@@ -224,15 +208,13 @@ export type FromResultsViewMsg =
| OpenFileMsg;
/**
* Message from the results view to open a source
* Message from the results view to open a database source
* file at the provided location.
*/
interface ViewSourceFileMsg {
t: "viewSourceFile";
loc: UrlValueResolvable;
/** URI of the database whose source archive contains the file, or `undefined` to open a file from
* the local disk. The latter case is used for opening links to data extension model files. */
databaseUri: string | undefined;
databaseUri: string;
}
/**
@@ -359,11 +341,7 @@ interface ChangeCompareMessage {
export type ToCompareViewMessage =
| SetComparisonQueryInfoMessage
| SetComparisonsMessage
| StreamingComparisonSetupMessage
| StreamingComparisonAddResultsMessage
| StreamingComparisonCompleteMessage
| SetUserSettingsMsg;
| SetComparisonsMessage;
/**
* Message to the compare view that sets the metadata of the compared queries.
@@ -422,28 +400,6 @@ export type InterpretedQueryCompareResult = {
to: Result[];
};
export interface StreamingComparisonSetupMessage {
readonly t: "streamingComparisonSetup";
// The id of this streaming comparison
readonly id: string;
readonly currentResultSetName: string;
readonly message: string | undefined;
// The from and to fields will only contain a chunk of the results
readonly result: QueryCompareResult;
}
interface StreamingComparisonAddResultsMessage {
readonly t: "streamingComparisonAddResults";
readonly id: string;
// The from and to fields will only contain a chunk of the results
readonly result: QueryCompareResult;
}
interface StreamingComparisonCompleteMessage {
readonly t: "streamingComparisonComplete";
readonly id: string;
}
/**
* Extract the name of the default result. Prefer returning
* 'alerts', or '#select'. Otherwise return the first in the list.
@@ -585,6 +541,16 @@ interface SetModifiedMethodsMessage {
methodSignatures: string[];
}
interface SetInProgressMethodsMessage {
t: "setInProgressMethods";
methods: string[];
}
interface SetProcessedByAutoModelMethodsMessage {
t: "setProcessedByAutoModelMethods";
methods: string[];
}
interface SwitchModeMessage {
t: "switchMode";
mode: Mode;
@@ -616,6 +582,17 @@ interface GenerateMethodMessage {
t: "generateMethod";
}
interface GenerateMethodsFromLlmMessage {
t: "generateMethodsFromLlm";
packageName: string;
methodSignatures: string[];
}
interface StopGeneratingMethodsFromLlmMessage {
t: "stopGeneratingMethodsFromLlm";
packageName: string;
}
interface StartModelEvaluationMessage {
t: "startModelEvaluation";
}
@@ -653,6 +630,16 @@ interface SetInModelingModeMessage {
inModelingMode: boolean;
}
interface SetInProgressMessage {
t: "setInProgress";
inProgress: boolean;
}
interface SetProcessedByAutoModelMessage {
t: "setProcessedByAutoModel";
processedByAutoModel: boolean;
}
interface RevealMethodMessage {
t: "revealMethod";
methodSignature: string;
@@ -673,6 +660,8 @@ export type ToModelEditorMessage =
| SetMethodsMessage
| SetModeledAndModifiedMethodsMessage
| SetModifiedMethodsMessage
| SetInProgressMethodsMessage
| SetProcessedByAutoModelMethodsMessage
| RevealMethodMessage
| SetAccessPathSuggestionsMessage
| SetModelEvaluationRunMessage;
@@ -686,6 +675,8 @@ export type FromModelEditorMessage =
| JumpToMethodMessage
| SaveModeledMethods
| GenerateMethodMessage
| GenerateMethodsFromLlmMessage
| StopGeneratingMethodsFromLlmMessage
| ModelDependencyMessage
| HideModeledMethodsMessage
| SetMultipleModeledMethodsMessage
@@ -728,6 +719,8 @@ interface SetSelectedMethodMessage {
method: Method;
modeledMethods: ModeledMethod[];
isModified: boolean;
isInProgress: boolean;
processedByAutoModel: boolean;
}
export type ToMethodModelingMessage =
@@ -736,7 +729,9 @@ export type ToMethodModelingMessage =
| SetMethodModifiedMessage
| SetNoMethodSelectedMessage
| SetSelectedMethodMessage
| SetInModelingModeMessage;
| SetInModelingModeMessage
| SetInProgressMessage
| SetProcessedByAutoModelMessage;
interface SetModelAlertsViewStateMessage {
t: "setModelAlertsViewState";

View File

@@ -1,56 +1,26 @@
import { stat } from "fs/promises";
import { createReadStream } from "fs-extra";
import type { BaseLogger } from "./logging";
const doubleLineBreakRegexp = /\n\r?\n/;
import { readFile } from "fs-extra";
/**
* Read a file consisting of multiple JSON objects. Each object is separated from the previous one
* by a double newline sequence. This is basically a more human-readable form of JSONL.
*
* The current implementation reads the entire text of the document into memory, but in the future
* it will stream the document to improve the performance with large documents.
*
* @param path The path to the file.
* @param handler Callback to be invoked for each top-level JSON object in order.
*/
export async function readJsonlFile<T>(
path: string,
handler: (value: T) => Promise<void>,
logger?: BaseLogger,
): Promise<void> {
// Stream the data as large evaluator logs won't fit in memory.
// Also avoid using 'readline' as it is slower than our manual line splitting.
void logger?.log(
`Parsing ${path} (${(await stat(path)).size / 1024 / 1024} MB)...`,
);
return new Promise((resolve, reject) => {
const stream = createReadStream(path, { encoding: "utf8" });
let buffer = "";
stream.on("data", async (chunk: string) => {
const parts = (buffer + chunk).split(doubleLineBreakRegexp);
buffer = parts.pop()!;
if (parts.length > 0) {
try {
stream.pause();
for (const part of parts) {
await handler(JSON.parse(part));
}
stream.resume();
} catch (e) {
stream.destroy();
reject(e);
}
}
});
stream.on("end", async () => {
try {
if (buffer.trim().length > 0) {
await handler(JSON.parse(buffer));
}
void logger?.log(`Finished parsing ${path}`);
resolve();
} catch (e) {
reject(e);
}
});
stream.on("error", reject);
});
const logSummary = await readFile(path, "utf-8");
// Remove newline delimiters because summary is in .jsonl format.
const jsonSummaryObjects: string[] = logSummary.split(/\r?\n\r?\n/g);
for (const obj of jsonSummaryObjects) {
const jsonObj = JSON.parse(obj) as T;
await handler(jsonObj);
}
}

View File

@@ -14,6 +14,7 @@ export enum RequestKind {
GetVariantAnalysisRepo = "getVariantAnalysisRepo",
GetVariantAnalysisRepoResult = "getVariantAnalysisRepoResult",
CodeSearch = "codeSearch",
AutoModel = "autoModel",
}
export interface BasicErrorResponse {
@@ -68,7 +69,7 @@ export interface GetVariantAnalysisRepoResultRequest {
};
response: {
status: number;
body?: ArrayBuffer | string;
body?: Buffer | string;
contentType: string;
};
}
@@ -91,13 +92,31 @@ interface CodeSearchRequest {
};
}
export interface AutoModelResponse {
models: string;
}
interface AutoModelRequest {
request: {
kind: RequestKind.AutoModel;
body?: {
candidates: string;
};
};
response: {
status: number;
body?: AutoModelResponse | BasicErrorResponse;
};
}
export type GitHubApiRequest =
| GetRepoRequest
| SubmitVariantAnalysisRequest
| GetVariantAnalysisRequest
| GetVariantAnalysisRepoRequest
| GetVariantAnalysisRepoResultRequest
| CodeSearchRequest;
| CodeSearchRequest
| AutoModelRequest;
export const isGetRepoRequest = (
request: GitHubApiRequest,
@@ -127,3 +146,8 @@ export const isCodeSearchRequest = (
request: GitHubApiRequest,
): request is CodeSearchRequest =>
request.request.kind === RequestKind.CodeSearch;
export const isAutoModelRequest = (
request: GitHubApiRequest,
): request is AutoModelRequest =>
request.request.kind === RequestKind.AutoModel;

View File

@@ -2,7 +2,6 @@ import { join, resolve } from "path";
import { pathExists } from "fs-extra";
import type { SetupServer } from "msw/node";
import { setupServer } from "msw/node";
import type { UnhandledRequestStrategy } from "msw/lib/core/utils/request/onUnhandledRequest";
import { DisposableObject } from "../disposable-object";
@@ -27,14 +26,12 @@ export class MockGitHubApiServer extends DisposableObject {
this.recorder = this.push(new Recorder(this.server));
}
public startServer(
onUnhandledRequest: UnhandledRequestStrategy = "bypass",
): void {
public startServer(): void {
if (this._isListening) {
return;
}
this.server.listen({ onUnhandledRequest });
this.server.listen({ onUnhandledRequest: "bypass" });
this._isListening = true;
}
@@ -57,7 +54,8 @@ export class MockGitHubApiServer extends DisposableObject {
const scenarioPath = join(scenariosPath, scenarioName);
const handlers = await createRequestHandlers(scenarioPath);
this.server.resetHandlers(...handlers);
this.server.resetHandlers();
this.server.use(...handlers);
}
public async saveScenario(

View File

@@ -1,12 +1,14 @@
import { ensureDir, writeFile } from "fs-extra";
import { join } from "path";
import fetch from "node-fetch";
import type { SetupServer } from "msw/node";
import { DisposableObject } from "../disposable-object";
import { gzipDecode } from "../zlib";
import type {
AutoModelResponse,
BasicErrorResponse,
CodeSearchResponse,
GetVariantAnalysisRepoResultRequest,
@@ -89,14 +91,7 @@ export class Recorder extends DisposableObject {
let bodyFileLink = undefined;
if (writtenRequest.response.body) {
if (typeof writtenRequest.response.body === "string") {
await writeFile(bodyFilePath, writtenRequest.response.body);
} else {
await writeFile(
bodyFilePath,
Buffer.from(writtenRequest.response.body),
);
}
await writeFile(bodyFilePath, writtenRequest.response.body);
bodyFileLink = `file:${bodyFileName}`;
}
@@ -231,7 +226,7 @@ async function createGitHubApiRequest(
"x-vscode-codeql-msw-bypass": "true",
},
});
const responseBuffer = await response.arrayBuffer();
const responseBuffer = await response.buffer();
return {
request: {
@@ -263,6 +258,23 @@ async function createGitHubApiRequest(
};
}
const autoModelMatch = url.match(
/\/repos\/github\/codeql\/code-scanning\/codeql\/auto-model/,
);
if (autoModelMatch) {
return {
request: {
kind: RequestKind.AutoModel,
},
response: {
status,
body: await jsonResponseBody<
BasicErrorResponse | AutoModelResponse | undefined
>(response),
},
};
}
return undefined;
}

View File

@@ -4,6 +4,7 @@ import type { RequestHandler } from "msw";
import { http } from "msw";
import type { GitHubApiRequest } from "./gh-api-request";
import {
isAutoModelRequest,
isCodeSearchRequest,
isGetRepoRequest,
isGetVariantAnalysisRepoRequest,
@@ -40,6 +41,7 @@ export async function createRequestHandlers(
createGetVariantAnalysisRepoRequestHandler(requests),
createGetVariantAnalysisRepoResultRequestHandler(requests),
createCodeSearchRequestHandler(requests),
createAutoModelRequestHandler(requests),
];
return handlers;
@@ -228,3 +230,29 @@ function createCodeSearchRequestHandler(
});
});
}
function createAutoModelRequestHandler(
requests: GitHubApiRequest[],
): RequestHandler {
const autoModelRequests = requests.filter(isAutoModelRequest);
let requestIndex = 0;
// During automodeling there can be multiple API requests for each batch
// of candidates we want to model. We need to return different responses for each request,
// so keep an index of the request and return the appropriate response.
return http.post(
`${baseUrl}/repos/github/codeql/code-scanning/codeql/auto-model`,
() => {
const request = autoModelRequests[requestIndex];
if (requestIndex < autoModelRequests.length - 1) {
// If there are more requests to come, increment the index.
requestIndex++;
}
return jsonResponse(request.response.body, {
status: request.response.status,
});
},
);
}

View File

@@ -0,0 +1,11 @@
{
"request": {
"kind": "autoModel"
},
"response": {
"status": 200,
"body": {
"models": "extensions:\n- addsTo: {extensible: sinkModel, pack: codeql/java-all}\n data:\n - [javax.servlet.http, HttpServletResponse, true, sendRedirect, (String), '', 'Argument[this]',\n request-forgery, ai-generated]\n - [javax.servlet.http, HttpServletResponse, true, sendRedirect, (String), '', 'Argument[0]',\n request-forgery, ai-generated]\n"
}
}
}

View File

@@ -0,0 +1,11 @@
{
"request": {
"kind": "autoModel"
},
"response": {
"status": 200,
"body": {
"models": "extensions:\n- addsTo: {extensible: sinkModel, pack: codeql/java-all}\n data:\n - [javax.servlet, MultipartConfigElement, true, MultipartConfigElement, (String),\n '', 'Argument[0]', request-forgery, ai-generated]\n"
}
}
}

View File

@@ -0,0 +1 @@
This scenario is best when modeling the `javax.servlet-api` package.

View File

@@ -63,33 +63,26 @@ export class VSCodeMockGitHubApiServer extends DisposableObject {
);
}
public async loadScenario(scenario?: string): Promise<void> {
public async loadScenario(): Promise<void> {
const scenariosPath = await this.getScenariosPath();
if (!scenariosPath) {
return;
}
let scenarioName = scenario;
if (!scenarioName) {
const scenarioNames = await this.server.getScenarioNames(scenariosPath);
const scenarioQuickPickItems = scenarioNames.map((s) => ({ label: s }));
const quickPickOptions = {
placeHolder: "Select a scenario to load",
};
const selectedScenario = await window.showQuickPick<QuickPickItem>(
scenarioQuickPickItems,
quickPickOptions,
);
if (!selectedScenario) {
return;
}
scenarioName = selectedScenario.label;
const scenarioNames = await this.server.getScenarioNames(scenariosPath);
const scenarioQuickPickItems = scenarioNames.map((s) => ({ label: s }));
const quickPickOptions = {
placeHolder: "Select a scenario to load",
};
const selectedScenario = await window.showQuickPick<QuickPickItem>(
scenarioQuickPickItems,
quickPickOptions,
);
if (!selectedScenario) {
return;
}
if (!this.server.isListening && this.app.mode === AppMode.Test) {
await this.startServer();
}
const scenarioName = selectedScenario.label;
await this.server.loadScenario(scenarioName, scenariosPath);
@@ -101,12 +94,12 @@ export class VSCodeMockGitHubApiServer extends DisposableObject {
true,
);
void window.showInformationMessage(`Loaded scenario '${scenarioName}'`);
await window.showInformationMessage(`Loaded scenario '${scenarioName}'`);
}
public async unloadScenario(): Promise<void> {
if (!this.server.isScenarioLoaded) {
void window.showInformationMessage("No scenario currently loaded");
await window.showInformationMessage("No scenario currently loaded");
} else {
await this.server.unloadScenario();
await this.app.commands.execute(
@@ -114,11 +107,7 @@ export class VSCodeMockGitHubApiServer extends DisposableObject {
"codeQL.mockGitHubApiServer.scenarioLoaded",
false,
);
void window.showInformationMessage("Unloaded scenario");
}
if (this.server.isListening && this.app.mode === AppMode.Test) {
await this.stopServer();
await window.showInformationMessage("Unloaded scenario");
}
}
@@ -150,7 +139,7 @@ export class VSCodeMockGitHubApiServer extends DisposableObject {
true,
);
void window.showInformationMessage(
await window.showInformationMessage(
'Recording scenario. To save the scenario, use the "CodeQL Mock GitHub API Server: Save Scenario" command.',
);
}
@@ -232,10 +221,7 @@ export class VSCodeMockGitHubApiServer extends DisposableObject {
return scenariosPath;
}
if (
this.app.mode === AppMode.Development ||
this.app.mode === AppMode.Test
) {
if (this.app.mode === AppMode.Development) {
const developmentScenariosPath = path.join(
this.app.extensionPath,
"src/common/mock-gh-api/scenarios",

View File

@@ -1,13 +1,10 @@
import { Octokit } from "@octokit/rest";
import { retry } from "@octokit/plugin-retry";
import fetch from "node-fetch";
export const AppOctokit = Octokit.defaults({
request: {
// MSW replaces the global fetch object, so we can't just pass a reference to the
// fetch object at initialization time. Instead, we pass a function that will
// always call the global fetch object.
fetch: (input: string | URL | Request, init?: RequestInit) =>
fetch(input, init),
fetch,
},
retry,
});

View File

@@ -5,7 +5,6 @@ export enum ColumnKind {
Boolean = "boolean",
Date = "date",
Entity = "entity",
BigInt = "bigint",
}
export type Column = {
@@ -62,11 +61,6 @@ type CellValueNumber = {
value: number;
};
type CellValueBigInt = {
type: "number";
value: number;
};
type CellValueString = {
type: "string";
value: string;
@@ -81,8 +75,7 @@ export type CellValue =
| CellValueEntity
| CellValueNumber
| CellValueString
| CellValueBoolean
| CellValueBigInt;
| CellValueBoolean;
export type Row = CellValue[];

View File

@@ -1,14 +1,14 @@
export type DeepReadonly<T> =
T extends Array<infer R>
? DeepReadonlyArray<R>
: // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type
: // eslint-disable-next-line @typescript-eslint/ban-types
T extends Function
? T
: T extends object
? DeepReadonlyObject<T>
: T;
type DeepReadonlyArray<T> = ReadonlyArray<DeepReadonly<T>>;
interface DeepReadonlyArray<T> extends ReadonlyArray<DeepReadonly<T>> {}
type DeepReadonlyObject<T> = {
readonly [P in keyof T]: DeepReadonly<T[P]>;

View File

@@ -1,20 +1,18 @@
import type { Log } from "sarif";
import type { Log, Tool } from "sarif";
import { createReadStream } from "fs-extra";
import { connectTo } from "stream-json/Assembler";
import { getErrorMessage } from "./helpers-pure";
import { withParser } from "stream-json/filters/Ignore";
import { withParser } from "stream-json/filters/Pick";
const DUMMY_TOOL: Tool = { driver: { name: "" } };
export async function sarifParser(
interpretedResultsPath: string,
): Promise<Log> {
try {
// Parse the SARIF file into token streams, filtering out some of the larger subtrees that we
// don't need.
// Parse the SARIF file into token streams, filtering out only the results array.
const pipeline = createReadStream(interpretedResultsPath).pipe(
withParser({
// We don't need to run's `artifacts` property, nor the driver's `notifications` property.
filter: /^runs\.\d+\.(artifacts|tool\.driver\.notifications)/,
}),
withParser({ filter: "runs.0.results" }),
);
// Creates JavaScript objects from the token stream
@@ -40,17 +38,15 @@ export async function sarifParser(
});
asm.on("done", (asm) => {
const log = asm.current;
// Do some trivial validation. This isn't a full validation of the SARIF file, but it's at
// least enough to ensure that we're not trying to parse complete garbage later.
if (log.runs === undefined || log.runs.length < 1) {
reject(
new Error(
"Invalid SARIF file: expecting at least one run with result.",
),
);
}
const log: Log = {
version: "2.1.0",
runs: [
{
tool: DUMMY_TOOL,
results: asm.current ?? [],
},
],
};
resolve(log);
alreadyDone = true;

View File

@@ -1,4 +1,4 @@
import { window } from "vscode";
import { env, Uri, window } from "vscode";
/**
* Opens a modal dialog for the user to make a yes/no choice.
@@ -34,6 +34,50 @@ export async function showBinaryChoiceDialog(
return chosenItem?.title === yesItem.title;
}
/**
* Opens a modal dialog for the user to make a yes/no choice.
*
* @param message The message to show.
* @param modal If true (the default), show a modal dialog box, otherwise dialog is non-modal and can
* be closed even if the user does not make a choice.
*
* @return
* `true` if the user clicks 'Yes',
* `false` if the user clicks 'No' or cancels the dialog,
* `undefined` if the dialog is closed without the user making a choice.
*/
export async function showBinaryChoiceWithUrlDialog(
message: string,
url: string,
): Promise<boolean | undefined> {
const urlItem = { title: "More Information", isCloseAffordance: false };
const yesItem = { title: "Yes", isCloseAffordance: false };
const noItem = { title: "No", isCloseAffordance: true };
let chosenItem;
// Keep the dialog open as long as the user is clicking the 'more information' option.
// To prevent an infinite loop, if the user clicks 'more information' 5 times, close the dialog and return cancelled
let count = 0;
do {
chosenItem = await window.showInformationMessage(
message,
{ modal: true },
urlItem,
yesItem,
noItem,
);
if (chosenItem === urlItem) {
await env.openExternal(Uri.parse(url, true));
}
count++;
} while (chosenItem === urlItem && count < 5);
if (!chosenItem || chosenItem.title === urlItem.title) {
return undefined;
}
return chosenItem.title === yesItem.title;
}
/**
* Show an information message with a customisable action.
* @param message The message to show.

View File

@@ -97,15 +97,17 @@ export function withProgress<R>(
* Displays a progress monitor that indicates how much progess has been made
* reading from a stream.
*
* @param readable The stream to read progress from
* @param messagePrefix A prefix for displaying the message
* @param totalNumBytes Total number of bytes in this stream
* @param progress The progress callback used to set messages
*/
export function reportStreamProgress(
readable: NodeJS.ReadableStream,
messagePrefix: string,
totalNumBytes?: number,
progress?: ProgressCallback,
): (bytesRead: number) => void {
) {
if (progress && totalNumBytes) {
let numBytesDownloaded = 0;
const updateProgress = () => {
@@ -121,10 +123,10 @@ export function reportStreamProgress(
// Display the progress straight away rather than waiting for the first chunk.
updateProgress();
return (bytesRead: number) => {
numBytesDownloaded += bytesRead;
readable.on("data", (data) => {
numBytesDownloaded += data.length;
updateProgress();
};
});
} else if (progress) {
progress({
step: 1,
@@ -132,6 +134,4 @@ export function reportStreamProgress(
message: `${messagePrefix} (Size unknown)`,
});
}
return () => {};
}

View File

@@ -1,15 +1,26 @@
import type { Extension, ExtensionContext } from "vscode";
import { ConfigurationTarget, env, Uri, window } from "vscode";
import type {
Extension,
ExtensionContext,
ConfigurationChangeEvent,
} from "vscode";
import { ConfigurationTarget, env } from "vscode";
import TelemetryReporter from "vscode-extension-telemetry";
import { ENABLE_TELEMETRY, isCanary, LOG_TELEMETRY } from "../../config";
import {
ConfigListener,
CANARY_FEATURES,
ENABLE_TELEMETRY,
LOG_TELEMETRY,
isIntegrationTestMode,
isCanary,
} from "../../config";
import type { TelemetryClient } from "applicationinsights";
import { extLogger } from "../logging/vscode";
import { UserCancellationException } from "./progress";
import { showBinaryChoiceWithUrlDialog } from "./dialog";
import type { RedactableError } from "../errors";
import type { SemVer } from "semver";
import type { AppTelemetry } from "../telemetry";
import type { EnvelopeTelemetry } from "applicationinsights/out/Declarations/Contracts";
import type { Disposable } from "../disposable-object";
// Key is injected at build time through the APP_INSIGHTS_KEY environment variable.
const key = "REPLACE-APP-INSIGHTS-KEY";
@@ -44,25 +55,80 @@ const baseDataPropertiesToRemove = [
const NOT_SET_CLI_VERSION = "not-set";
export class ExtensionTelemetryListener implements AppTelemetry, Disposable {
private readonly reporter: TelemetryReporter;
export class ExtensionTelemetryListener
extends ConfigListener
implements AppTelemetry
{
private reporter?: TelemetryReporter;
private cliVersionStr = NOT_SET_CLI_VERSION;
constructor(id: string, version: string, key: string) {
// We can always initialize this and send events using it because the vscode-extension-telemetry will check
// whether the `telemetry.telemetryLevel` setting is enabled.
this.reporter = new TelemetryReporter(
id,
version,
key,
/* anonymize stack traces */ true,
);
constructor(
private readonly id: string,
private readonly version: string,
private readonly key: string,
private readonly ctx: ExtensionContext,
) {
super();
this.addTelemetryProcessor();
env.onDidChangeTelemetryEnabled(async () => {
await this.initialize();
});
}
private addTelemetryProcessor() {
/**
* This function handles changes to relevant configuration elements. There are 2 configuration
* ids that this function cares about:
*
* * `codeQL.telemetry.enableTelemetry`: If this one has changed, then we need to re-initialize
* the reporter and the reporter may wind up being removed.
* * `codeQL.canary`: A change here could possibly re-trigger a dialog popup.
*
* Note that the global telemetry setting also gate-keeps whether or not to send telemetry events
* to Application Insights. However, this gatekeeping happens inside of the vscode-extension-telemetry
* package. So, this does not need to be handled here.
*
* @param e the configuration change event
*/
async handleDidChangeConfiguration(
e: ConfigurationChangeEvent,
): Promise<void> {
if (e.affectsConfiguration(ENABLE_TELEMETRY.qualifiedName)) {
await this.initialize();
}
// Re-request telemetry so that users can see the dialog again.
// Re-request if codeQL.canary is being set to `true` and telemetry
// is not currently enabled.
if (
e.affectsConfiguration(CANARY_FEATURES.qualifiedName) &&
CANARY_FEATURES.getValue() &&
!ENABLE_TELEMETRY.getValue()
) {
await this.setTelemetryRequested(false);
await this.requestTelemetryPermission();
}
}
async initialize() {
await this.requestTelemetryPermission();
this.disposeReporter();
if (ENABLE_TELEMETRY.getValue<boolean>()) {
this.createReporter();
}
}
private createReporter() {
this.reporter = new TelemetryReporter(
this.id,
this.version,
this.key,
/* anonymize stack traces */ true,
);
this.push(this.reporter);
// The appInsightsClient field is private but we want to access it anyway
const client = this.reporter["appInsightsClient"] as TelemetryClient;
if (client) {
@@ -85,10 +151,14 @@ export class ExtensionTelemetryListener implements AppTelemetry, Disposable {
}
dispose() {
void this.reporter.dispose();
super.dispose();
void this.reporter?.dispose();
}
sendCommandUsage(name: string, executionTime: number, error?: Error): void {
if (!this.reporter) {
return;
}
const status = !error
? CommandCompletion.Success
: error instanceof UserCancellationException
@@ -108,6 +178,10 @@ export class ExtensionTelemetryListener implements AppTelemetry, Disposable {
}
sendUIInteraction(name: string): void {
if (!this.reporter) {
return;
}
this.reporter.sendTelemetryEvent(
"ui-interaction",
{
@@ -123,6 +197,10 @@ export class ExtensionTelemetryListener implements AppTelemetry, Disposable {
error: RedactableError,
extraProperties?: { [key: string]: string },
): void {
if (!this.reporter) {
return;
}
const properties: { [key: string]: string } = {
isCanary: isCanary().toString(),
cliVersion: this.cliVersionStr,
@@ -137,6 +215,10 @@ export class ExtensionTelemetryListener implements AppTelemetry, Disposable {
}
sendConfigInformation(config: Record<string, string>): void {
if (!this.reporter) {
return;
}
this.reporter.sendTelemetryEvent(
"config",
{
@@ -148,6 +230,37 @@ export class ExtensionTelemetryListener implements AppTelemetry, Disposable {
);
}
/**
* Displays a popup asking the user if they want to enable telemetry
* for this extension.
*/
async requestTelemetryPermission() {
if (!this.wasTelemetryRequested()) {
// if global telemetry is disabled, avoid showing the dialog or making any changes
let result = undefined;
if (
env.isTelemetryEnabled &&
// Avoid showing the dialog if we are in integration test mode.
!isIntegrationTestMode()
) {
// Extension won't start until this completes.
result = await showBinaryChoiceWithUrlDialog(
"Does the CodeQL Extension by GitHub have your permission to collect usage data and metrics to help us improve CodeQL for VSCode?",
"https://codeql.github.com/docs/codeql-for-visual-studio-code/about-telemetry-in-codeql-for-visual-studio-code",
);
}
if (result !== undefined) {
await Promise.all([
this.setTelemetryRequested(true),
ENABLE_TELEMETRY.updateValue<boolean>(
result,
ConfigurationTarget.Global,
),
]);
}
}
}
/**
* Exposed for testing
*/
@@ -158,45 +271,21 @@ export class ExtensionTelemetryListener implements AppTelemetry, Disposable {
set cliVersion(version: SemVer | undefined) {
this.cliVersionStr = version ? version.toString() : NOT_SET_CLI_VERSION;
}
}
async function notifyTelemetryChange() {
const continueItem = { title: "Continue", isCloseAffordance: false };
const vsCodeTelemetryItem = {
title: "More Information about VS Code Telemetry",
isCloseAffordance: false,
};
const codeqlTelemetryItem = {
title: "More Information about CodeQL Telemetry",
isCloseAffordance: false,
};
let chosenItem;
private disposeReporter() {
if (this.reporter) {
void this.reporter.dispose();
this.reporter = undefined;
}
}
do {
chosenItem = await window.showInformationMessage(
"The CodeQL extension now follows VS Code's telemetry settings. VS Code telemetry is currently enabled. Learn how to update your telemetry settings by clicking the links below.",
{ modal: true },
continueItem,
vsCodeTelemetryItem,
codeqlTelemetryItem,
);
if (chosenItem === vsCodeTelemetryItem) {
await env.openExternal(
Uri.parse(
"https://code.visualstudio.com/docs/getstarted/telemetry",
true,
),
);
}
if (chosenItem === codeqlTelemetryItem) {
await env.openExternal(
Uri.parse(
"https://docs.github.com/en/code-security/codeql-for-vs-code/using-the-advanced-functionality-of-the-codeql-for-vs-code-extension/telemetry-in-codeql-for-visual-studio-code",
true,
),
);
}
} while (chosenItem !== continueItem);
private wasTelemetryRequested(): boolean {
return !!this.ctx.globalState.get<boolean>("telemetry-request-viewed");
}
private async setTelemetryRequested(newValue: boolean): Promise<void> {
await this.ctx.globalState.update("telemetry-request-viewed", newValue);
}
}
/**
@@ -212,28 +301,15 @@ export async function initializeTelemetry(
if (telemetryListener !== undefined) {
throw new Error("Telemetry is already initialized");
}
if (ENABLE_TELEMETRY.getValue<boolean | undefined>() === false) {
if (env.isTelemetryEnabled) {
// Await this so that the user is notified before any telemetry is sent
await notifyTelemetryChange();
}
// Remove the deprecated telemetry setting
ENABLE_TELEMETRY.updateValue(undefined, ConfigurationTarget.Global);
ENABLE_TELEMETRY.updateValue(undefined, ConfigurationTarget.Workspace);
ENABLE_TELEMETRY.updateValue(
undefined,
ConfigurationTarget.WorkspaceFolder,
);
}
telemetryListener = new ExtensionTelemetryListener(
extension.id,
extension.packageJSON.version,
key,
ctx,
);
// do not await initialization, since doing so will sometimes cause a modal popup.
// this is a particular problem during integration tests, which will hang if a modal popup is displayed.
void telemetryListener.initialize();
ctx.subscriptions.push(telemetryListener);
return telemetryListener;
}

View File

@@ -1,5 +1,10 @@
import { promisify } from "util";
import { gunzip } from "zlib";
import { gzip, gunzip } from "zlib";
/**
* Promisified version of zlib.gzip
*/
export const gzipEncode = promisify(gzip);
/**
* Promisified version of zlib.gunzip

View File

@@ -33,8 +33,6 @@ import {
getResultSetNames,
} from "./result-set-names";
import { compareInterpretedResults } from "./interpreted-results";
import { isCanary } from "../config";
import { nanoid } from "nanoid";
interface ComparePair {
from: CompletedLocalQueryInfo;
@@ -118,13 +116,6 @@ export class CompareView extends AbstractWebview<
panel.reveal(undefined, true);
await this.waitForPanelLoaded();
await this.postMessage({
t: "setUserSettings",
userSettings: {
shouldShowProvenance: isCanary(),
},
});
await this.postMessage({
t: "setComparisonQueryInfo",
stats: {
@@ -184,97 +175,13 @@ export class CompareView extends AbstractWebview<
message = getErrorMessage(e);
}
await this.streamResults(result, currentResultSetDisplayName, message);
}
}
private async streamResults(
result: QueryCompareResult | undefined,
currentResultSetName: string,
message: string | undefined,
) {
// Since there is a string limit of 1GB in Node.js, the comparison is send as a JSON.stringified string to the webview
// and some comparisons may be larger than that, we sometimes need to stream results. This uses a heuristic of 2,000 results
// to determine if we should stream results.
if (!this.shouldStreamResults(result)) {
await this.postMessage({
t: "setComparisons",
result,
currentResultSetName,
currentResultSetName: currentResultSetDisplayName,
message,
});
return;
}
const id = nanoid();
// Streaming itself is implemented like this:
// - 1 setup message which contains the first 1,000 results
// - n "add results" messages which contain 1,000 results each
// - 1 complete message which just tells the webview that we're done
await this.postMessage({
t: "streamingComparisonSetup",
id,
result: this.chunkResults(result, 0, 1000),
currentResultSetName,
message,
});
const { from, to } = result;
const maxResults = Math.max(from.length, to.length);
for (let i = 1000; i < maxResults; i += 1000) {
const chunk = this.chunkResults(result, i, i + 1000);
await this.postMessage({
t: "streamingComparisonAddResults",
id,
result: chunk,
});
}
await this.postMessage({
t: "streamingComparisonComplete",
id,
});
}
private shouldStreamResults(
result: QueryCompareResult | undefined,
): result is QueryCompareResult {
if (result === undefined) {
return false;
}
// We probably won't run into limits if we have less than 2,000 total results
const totalResults = result.from.length + result.to.length;
return totalResults > 2000;
}
private chunkResults(
result: QueryCompareResult,
start: number,
end: number,
): QueryCompareResult {
if (result.kind === "raw") {
return {
...result,
from: result.from.slice(start, end),
to: result.to.slice(start, end),
};
}
if (result.kind === "interpreted") {
return {
...result,
from: result.from.slice(start, end),
to: result.to.slice(start, end),
};
}
assertNever(result);
}
protected getPanelConfig(): WebviewPanelConfig {

View File

@@ -1,4 +1,4 @@
import type { Location, Result, ThreadFlowLocation } from "sarif";
import type { Location, Result } from "sarif";
function toCanonicalLocation(location: Location): Location {
if (location.physicalLocation?.artifactLocation?.index !== undefined) {
@@ -25,19 +25,6 @@ function toCanonicalLocation(location: Location): Location {
return location;
}
function toCanonicalThreadFlowLocation(
threadFlowLocation: ThreadFlowLocation,
): ThreadFlowLocation {
if (threadFlowLocation.location) {
return {
...threadFlowLocation,
location: toCanonicalLocation(threadFlowLocation.location),
};
}
return threadFlowLocation;
}
function toCanonicalResult(result: Result): Result {
const canonicalResult = {
...result,
@@ -53,30 +40,37 @@ function toCanonicalResult(result: Result): Result {
canonicalResult.relatedLocations.map(toCanonicalLocation);
}
if (canonicalResult.codeFlows && canonicalResult.codeFlows.length > 0) {
// If there are codeFlows, we don't want to compare the full codeFlows. Instead, we just want to compare the
// source and the sink (i.e. the first and last item). CodeQL should guarantee that the first and last threadFlow
// of every codeFlow is the same (i.e. every codeFlow has the same source and sink). Therefore, we just compare the
// first codeFlow and ignore the other codeFlows completely.
// If the codeFlow has a length of 1, this doesn't change the result.
if (canonicalResult.codeFlows) {
canonicalResult.codeFlows = canonicalResult.codeFlows.map((codeFlow) => {
if (codeFlow.threadFlows) {
return {
...codeFlow,
threadFlows: codeFlow.threadFlows.map((threadFlow) => {
if (threadFlow.locations) {
return {
...threadFlow,
locations: threadFlow.locations.map((threadFlowLocation) => {
if (threadFlowLocation.location) {
return {
...threadFlowLocation,
location: toCanonicalLocation(
threadFlowLocation.location,
),
};
}
const source = {
...canonicalResult.codeFlows[0].threadFlows[0],
};
const sink = {
...canonicalResult.codeFlows[0].threadFlows[
canonicalResult.codeFlows[0].threadFlows.length - 1
],
};
source.locations = source.locations.map(toCanonicalThreadFlowLocation);
sink.locations = sink.locations.map(toCanonicalThreadFlowLocation);
return threadFlowLocation;
}),
};
}
canonicalResult.codeFlows = [
{
...canonicalResult.codeFlows[0],
threadFlows: [source, sink],
},
];
return threadFlow;
}),
};
}
return codeFlow;
});
}
return canonicalResult;
@@ -85,9 +79,11 @@ function toCanonicalResult(result: Result): Result {
/**
* Compare the alerts of two queries. Use deep equality to determine if
* results have been added or removed across two invocations of a query.
* It first canonicalizes the results to ensure that when small changes
* to the query are made, the results are still considered the same. This
* includes the removal of all paths except for the source and sink.
*
* Assumptions:
*
* 1. Queries have the same sort order
* 2. Results are not changed or re-ordered, they are only added or removed
*
* @param fromResults the source query
* @param toResults the target query
@@ -108,30 +104,19 @@ export function sarifDiff(fromResults: Result[], toResults: Result[]) {
const canonicalFromResults = fromResults.map(toCanonicalResult);
const canonicalToResults = toResults.map(toCanonicalResult);
const diffResults = {
const results = {
from: arrayDiff(canonicalFromResults, canonicalToResults),
to: arrayDiff(canonicalToResults, canonicalFromResults),
};
if (
fromResults.length === diffResults.from.length &&
toResults.length === diffResults.to.length
fromResults.length === results.from.length &&
toResults.length === results.to.length
) {
throw new Error("CodeQL Compare: No overlap between the selected queries.");
}
// We don't want to return the canonical results, we want to return the original results.
// We can retrieve this by finding the index of the canonical result in the canonical results
// and then using that index to find the original result. This is possible because we know that
// we did a 1-to-1 map between the canonical results and the original results.
return {
from: diffResults.from.map(
(result) => fromResults[canonicalFromResults.indexOf(result)],
),
to: diffResults.to.map(
(result) => toResults[canonicalToResults.indexOf(result)],
),
};
return results;
}
function arrayDiff<T>(source: readonly T[], toRemove: readonly T[]): T[] {

View File

@@ -112,7 +112,9 @@ export function hasEnterpriseUri(): boolean {
* Does the uri look like GHEC-DR?
*/
function isGhecDrUri(uri: Uri | undefined): boolean {
return uri !== undefined && uri.authority.toLowerCase().endsWith(".ghe.com");
return (
uri !== undefined && !uri.authority.toLowerCase().endsWith("github.com")
);
}
/**
@@ -165,8 +167,6 @@ const ROOT_SETTING = new Setting("codeQL");
const TELEMETRY_SETTING = new Setting("telemetry", ROOT_SETTING);
export const LOG_TELEMETRY = new Setting("logTelemetry", TELEMETRY_SETTING);
// Legacy setting that is no longer used, but is used for showing a message when the user upgrades.
export const ENABLE_TELEMETRY = new Setting(
"enableTelemetry",
TELEMETRY_SETTING,
@@ -593,27 +593,7 @@ export const NO_CACHE_CONTEXTUAL_QUERIES = new Setting(
// Settings for variant analysis
const VARIANT_ANALYSIS_SETTING = new Setting("variantAnalysis", ROOT_SETTING);
/**
* The name of the "controller" repository that you want to use with the "Run Variant Analysis" command.
* Note: This command is only available for internal users.
*
* This setting should be a GitHub repository of the form `<owner>/<repo>`.
*/
const REMOTE_CONTROLLER_REPO = new Setting(
"controllerRepo",
VARIANT_ANALYSIS_SETTING,
);
export function getRemoteControllerRepo(): string | undefined {
return REMOTE_CONTROLLER_REPO.getValue<string>() || undefined;
}
export async function setRemoteControllerRepo(repo: string | undefined) {
await REMOTE_CONTROLLER_REPO.updateValue(repo, ConfigurationTarget.Global);
}
export interface VariantAnalysisConfig {
controllerRepo: string | undefined;
showSystemDefinedRepositoryLists: boolean;
/**
* This uses a URL instead of a URI because the URL class is available in
@@ -634,10 +614,6 @@ export class VariantAnalysisConfigListener
);
}
public get controllerRepo(): string | undefined {
return getRemoteControllerRepo();
}
public get showSystemDefinedRepositoryLists(): boolean {
return !hasEnterpriseUri();
}
@@ -830,9 +806,19 @@ export async function setAutogenerateQlPacks(choice: AutogenerateQLPacks) {
const MODEL_SETTING = new Setting("model", ROOT_SETTING);
const FLOW_GENERATION = new Setting("flowGeneration", MODEL_SETTING);
const LLM_GENERATION = new Setting("llmGeneration", MODEL_SETTING);
const LLM_GENERATION_BATCH_SIZE = new Setting(
"llmGenerationBatchSize",
MODEL_SETTING,
);
const LLM_GENERATION_DEV_ENDPOINT = new Setting(
"llmGenerationDevEndpoint",
MODEL_SETTING,
);
const MODEL_EVALUATION = new Setting("evaluation", MODEL_SETTING);
const MODEL_PACK_LOCATION = new Setting("packLocation", MODEL_SETTING);
const MODEL_PACK_NAME = new Setting("packName", MODEL_SETTING);
const ENABLE_PYTHON = new Setting("enablePython", MODEL_SETTING);
export type ModelConfigPackVariables = {
database: string;
@@ -843,11 +829,13 @@ export type ModelConfigPackVariables = {
export interface ModelConfig {
flowGeneration: boolean;
llmGeneration: boolean;
getPackLocation(
languageId: string,
variables: ModelConfigPackVariables,
): string;
getPackName(languageId: string, variables: ModelConfigPackVariables): string;
enablePython: boolean;
}
export class ModelConfigListener extends ConfigListener implements ModelConfig {
@@ -862,6 +850,26 @@ export class ModelConfigListener extends ConfigListener implements ModelConfig {
return !!FLOW_GENERATION.getValue<boolean>();
}
public get llmGeneration(): boolean {
return !!LLM_GENERATION.getValue<boolean>() && !hasEnterpriseUri();
}
/**
* Limits the number of candidates we send to the model in each request to avoid long requests.
* Note that the model may return fewer than this number of candidates.
*/
public get llmGenerationBatchSize(): number {
return LLM_GENERATION_BATCH_SIZE.getValue<number | null>() || 5;
}
/**
* The URL of the endpoint to use for LLM generation. This should only be set
* if you want to test against a dev server.
*/
public get llmGenerationDevEndpoint(): string | undefined {
return LLM_GENERATION_DEV_ENDPOINT.getValue<string | undefined>();
}
public get modelEvaluation(): boolean {
return !!MODEL_EVALUATION.getValue<boolean>();
}
@@ -889,6 +897,10 @@ export class ModelConfigListener extends ConfigListener implements ModelConfig {
variables,
);
}
public get enablePython(): boolean {
return !!ENABLE_PYTHON.getValue<boolean>();
}
}
const GITHUB_DATABASE_SETTING = new Setting("githubDatabase", ROOT_SETTING);

View File

@@ -298,7 +298,7 @@ export class DbConfigStore extends DisposableObject {
let newConfig: DbConfig | undefined = undefined;
try {
newConfig = await readJSON(this.configPath);
} catch {
} catch (e) {
this.configErrors = [
{
kind: DbConfigValidationErrorKind.InvalidJson,
@@ -332,7 +332,7 @@ export class DbConfigStore extends DisposableObject {
let newConfig: DbConfig | undefined = undefined;
try {
newConfig = readJSONSync(this.configPath);
} catch {
} catch (e) {
this.configErrors = [
{
kind: DbConfigValidationErrorKind.InvalidJson,

View File

@@ -1,3 +1,5 @@
import type { Response } from "node-fetch";
import fetch, { AbortError } from "node-fetch";
import type { InputBoxOptions } from "vscode";
import { Uri, window } from "vscode";
import type { CodeQLCliServer } from "../codeql-cli/cli";
@@ -452,7 +454,7 @@ export class DatabaseFetcher {
let uri;
try {
uri = Uri.parse(databaseUrl, true);
} catch {
} catch (e) {
throw new Error(`Invalid url: ${databaseUrl}`);
}
@@ -534,8 +536,8 @@ export class DatabaseFetcher {
} catch (e) {
disposeTimeout();
if (e instanceof DOMException && e.name === "AbortError") {
const thrownError = new Error("The request timed out.");
if (e instanceof AbortError) {
const thrownError = new AbortError("The request timed out.");
thrownError.stack = e.stack;
throw thrownError;
}
@@ -543,52 +545,30 @@ export class DatabaseFetcher {
throw e;
}
const body = response.body;
if (!body) {
throw new Error("No response body found");
}
const archiveFileStream = createWriteStream(archivePath);
const contentLength = response.headers.get("content-length");
const totalNumBytes = contentLength
? parseInt(contentLength, 10)
: undefined;
const reportProgress = reportStreamProgress(
reportStreamProgress(
response.body,
"Downloading database",
totalNumBytes,
progress,
);
response.body.on("data", onData);
try {
const reader = body.getReader();
for (;;) {
const { done, value } = await reader.read();
if (done) {
break;
}
onData();
reportProgress(value?.length ?? 0);
await new Promise((resolve, reject) => {
archiveFileStream.write(value, (err) => {
if (err) {
reject(err);
}
resolve(undefined);
});
});
}
await new Promise((resolve, reject) => {
archiveFileStream.close((err) => {
if (err) {
reject(err);
}
resolve(undefined);
});
response.body
.pipe(archiveFileStream)
.on("finish", resolve)
.on("error", reject);
// If an error occurs on the body, we also want to reject the promise (e.g. during a timeout error).
response.body.on("error", reject);
});
} catch (e) {
// Close and remove the file if an error occurs
@@ -596,8 +576,8 @@ export class DatabaseFetcher {
void remove(archivePath);
});
if (e instanceof DOMException && e.name === "AbortError") {
const thrownError = new Error("The download timed out.");
if (e instanceof AbortError) {
const thrownError = new AbortError("The download timed out.");
thrownError.stack = e.stack;
throw thrownError;
}
@@ -632,7 +612,7 @@ export class DatabaseFetcher {
const obj = JSON.parse(text);
msg =
obj.error || obj.message || obj.reason || JSON.stringify(obj, null, 2);
} catch {
} catch (e) {
msg = text;
}
throw new Error(`${errorMessage}.\n\nReason: ${msg}`);

View File

@@ -16,7 +16,6 @@ import {
ThemeIcon,
ThemeColor,
workspace,
FileType,
} from "vscode";
import { pathExists, stat, readdir, remove } from "fs-extra";
@@ -37,7 +36,6 @@ import {
import {
showAndLogExceptionWithTelemetry,
showAndLogErrorMessage,
showAndLogInformationMessage,
} from "../common/logging";
import type { DatabaseFetcher } from "./database-fetcher";
import { asError, asyncFilter, getErrorMessage } from "../common/helpers-pure";
@@ -269,8 +267,6 @@ export class DatabaseUI extends DisposableObject {
"codeQL.getCurrentDatabase": this.handleGetCurrentDatabase.bind(this),
"codeQL.chooseDatabaseFolder":
this.handleChooseDatabaseFolderFromPalette.bind(this),
"codeQL.chooseDatabaseFoldersParent":
this.handleChooseDatabaseFoldersParentFromPalette.bind(this),
"codeQL.chooseDatabaseArchive":
this.handleChooseDatabaseArchiveFromPalette.bind(this),
"codeQL.chooseDatabaseInternet":
@@ -363,12 +359,6 @@ export class DatabaseUI extends DisposableObject {
);
}
private async handleChooseDatabaseFoldersParentFromPalette(): Promise<void> {
return withProgress(async (progress) => {
await this.chooseDatabasesParentFolder(progress);
});
}
private async handleSetDefaultTourDatabase(): Promise<void> {
return withProgress(
async () => {
@@ -966,32 +956,6 @@ export class DatabaseUI extends DisposableObject {
}
}
/**
* Import database from uri. Returns the imported database, or `undefined` if the
* operation was unsuccessful or canceled.
*/
private async importDatabase(
uri: Uri,
byFolder: boolean,
progress: ProgressCallback,
): Promise<DatabaseItem | undefined> {
if (byFolder && !uri.fsPath.endsWith(".testproj")) {
const fixedUri = await this.fixDbUri(uri);
// we are selecting a database folder
return await this.databaseManager.openDatabase(fixedUri, {
type: "folder",
});
} else {
// we are selecting a database archive or a .testproj.
// Unzip archives (if an archive) and copy into a workspace-controlled area
// before importing.
return await this.databaseFetcher.importLocalDatabase(
uri.toString(true),
progress,
);
}
}
/**
* Ask the user for a database directory. Returns the chosen database, or `undefined` if the
* operation was canceled.
@@ -1005,89 +969,21 @@ export class DatabaseUI extends DisposableObject {
return undefined;
}
return await this.importDatabase(uri, byFolder, progress);
}
/**
* Ask the user for a parent directory that contains all databases.
* Returns all valid databases, or `undefined` if the operation was canceled.
*/
private async chooseDatabasesParentFolder(
progress: ProgressCallback,
): Promise<DatabaseItem[] | undefined> {
const uri = await chooseDatabaseDir(true);
if (!uri) {
return undefined;
}
const databases: DatabaseItem[] = [];
const failures: string[] = [];
const entries = await workspace.fs.readDirectory(uri);
const validFileTypes = [FileType.File, FileType.Directory];
for (const [index, entry] of entries.entries()) {
progress({
step: index + 1,
maxStep: entries.length,
message: `Importing '${entry[0]}'`,
if (byFolder && !uri.fsPath.endsWith("testproj")) {
const fixedUri = await this.fixDbUri(uri);
// we are selecting a database folder
return await this.databaseManager.openDatabase(fixedUri, {
type: "folder",
});
const subProgress: ProgressCallback = (p) => {
progress({
step: index + 1,
maxStep: entries.length,
message: `Importing '${entry[0]}': (${p.step}/${p.maxStep}) ${p.message}`,
});
};
if (!validFileTypes.includes(entry[1])) {
void this.app.logger.log(
`Skipping import for '${entry}', invalid file type: ${entry[1]}`,
);
continue;
}
try {
const databaseUri = Uri.joinPath(uri, entry[0]);
void this.app.logger.log(`Importing from ${databaseUri}`);
const database = await this.importDatabase(
databaseUri,
entry[1] === FileType.Directory,
subProgress,
);
if (database) {
databases.push(database);
} else {
failures.push(entry[0]);
}
} catch (e) {
failures.push(`${entry[0]}: ${getErrorMessage(e)}`.trim());
}
}
if (failures.length) {
void showAndLogErrorMessage(
this.app.logger,
`Failed to import ${failures.length} database(s), successfully imported ${databases.length} database(s).`,
{
fullMessage: `Failed to import ${failures.length} database(s), successfully imported ${databases.length} database(s).\nFailed databases:\n - ${failures.join("\n - ")}`,
},
);
} else if (databases.length === 0) {
void showAndLogErrorMessage(
this.app.logger,
`No database folder to import.`,
);
return undefined;
} else {
void showAndLogInformationMessage(
this.app.logger,
`Successfully imported ${databases.length} database(s).`,
// we are selecting a database archive or a testproj.
// Unzip archives (if an archive) and copy into a workspace-controlled area
// before importing.
return await this.databaseFetcher.importLocalDatabase(
uri.toString(true),
progress,
);
}
return databases;
}
/**

View File

@@ -231,7 +231,7 @@ export class DatabaseManager extends DisposableObject {
let originStat;
try {
originStat = await stat(originDbYml);
} catch {
} catch (e) {
// if there is an error here, assume that the origin database
// is no longer available. Safely ignore and do not try to re-import.
return false;
@@ -240,7 +240,7 @@ export class DatabaseManager extends DisposableObject {
try {
const importedStat = await stat(importedDbYml);
return originStat.mtimeMs > importedStat.mtimeMs;
} catch {
} catch (e) {
// If either of the files does not exist, we assume the origin is newer.
// This shouldn't happen unless the user manually deleted one of the files.
return true;

View File

@@ -37,12 +37,11 @@ export const shownLocationLineDecoration =
/**
* Resolves the specified CodeQL location to a URI into the source archive.
* @param loc CodeQL location to resolve. Must have a non-empty value for `loc.file`.
* @param databaseItem Database in which to resolve the file location, or `undefined` to resolve
* from the local file system.
* @param databaseItem Database in which to resolve the file location.
*/
function resolveFivePartLocation(
loc: UrlValueLineColumnLocation,
databaseItem: DatabaseItem | undefined,
databaseItem: DatabaseItem,
): Location {
// `Range` is a half-open interval, and is zero-based. CodeQL locations are closed intervals, and
// are one-based. Adjust accordingly.
@@ -53,10 +52,7 @@ function resolveFivePartLocation(
Math.max(1, loc.endColumn),
);
return new Location(
databaseItem?.resolveSourceFile(loc.uri) ?? Uri.parse(loc.uri),
range,
);
return new Location(databaseItem.resolveSourceFile(loc.uri), range);
}
/**
@@ -66,26 +62,22 @@ function resolveFivePartLocation(
*/
function resolveWholeFileLocation(
loc: UrlValueWholeFileLocation,
databaseItem: DatabaseItem | undefined,
databaseItem: DatabaseItem,
): Location {
// A location corresponding to the start of the file.
const range = new Range(0, 0, 0, 0);
return new Location(
databaseItem?.resolveSourceFile(loc.uri) ?? Uri.parse(loc.uri),
range,
);
return new Location(databaseItem.resolveSourceFile(loc.uri), range);
}
/**
* Try to resolve the specified CodeQL location to a URI into the source archive. If no exact location
* can be resolved, returns `undefined`.
* @param loc CodeQL location to resolve
* @param databaseItem Database in which to resolve the file location, or `undefined` to resolve
* from the local file system.
* @param databaseItem Database in which to resolve the file location.
*/
export function tryResolveLocation(
loc: UrlValueResolvable | undefined,
databaseItem: DatabaseItem | undefined,
databaseItem: DatabaseItem,
): Location | undefined {
if (!loc) {
return;
@@ -103,7 +95,7 @@ export function tryResolveLocation(
export async function showResolvableLocation(
loc: UrlValueResolvable,
databaseItem: DatabaseItem | undefined,
databaseItem: DatabaseItem,
logger: Logger,
): Promise<void> {
try {
@@ -159,14 +151,13 @@ export async function showLocation(location?: Location) {
}
export async function jumpToLocation(
databaseUri: string | undefined,
databaseUri: string,
loc: UrlValueResolvable,
databaseManager: DatabaseManager,
logger: Logger,
) {
const databaseItem =
databaseUri !== undefined
? databaseManager.findDatabaseItem(Uri.parse(databaseUri))
: undefined;
await showResolvableLocation(loc, databaseItem, logger);
const databaseItem = databaseManager.findDatabaseItem(Uri.parse(databaseUri));
if (databaseItem !== undefined) {
await showResolvableLocation(loc, databaseItem, logger);
}
}

View File

@@ -18,8 +18,6 @@ import type { DbManager } from "../db-manager";
import { DbTreeDataProvider } from "./db-tree-data-provider";
import type { DbTreeViewItem } from "./db-tree-view-item";
import { getGitHubUrl } from "./db-tree-view-item-action";
import { getControllerRepo } from "../../variant-analysis/run-remote-query";
import { getErrorMessage } from "../../common/helpers-pure";
import type { DatabasePanelCommands } from "../../common/commands";
import type { App } from "../../common/app";
import { QueryLanguage } from "../../common/query-language";
@@ -74,9 +72,6 @@ export class DbPanel extends DisposableObject {
this.addNewRemoteDatabase.bind(this),
"codeQLVariantAnalysisRepositories.addNewList":
this.addNewList.bind(this),
"codeQLVariantAnalysisRepositories.setupControllerRepository":
this.setupControllerRepository.bind(this),
"codeQLVariantAnalysisRepositories.setSelectedItem":
this.setSelectedItem.bind(this),
"codeQLVariantAnalysisRepositories.setSelectedItemContextMenu":
@@ -427,22 +422,4 @@ export class DbPanel extends DisposableObject {
await this.app.commands.execute("vscode.open", Uri.parse(githubUrl));
}
private async setupControllerRepository(): Promise<void> {
try {
// This will also validate that the controller repository is valid
await getControllerRepo(this.app.credentials);
} catch (e: unknown) {
if (e instanceof UserCancellationException) {
return;
}
void showAndLogErrorMessage(
this.app.logger,
`An error occurred while setting up the controller repository: ${getErrorMessage(
e,
)}`,
);
}
}
}

View File

@@ -83,11 +83,6 @@ export class DbTreeDataProvider
}
private createTree(): DbTreeViewItem[] {
// Returning an empty tree here will show the welcome view
if (!this.variantAnalysisConfig.controllerRepo) {
return [];
}
const dbItemsResult = this.dbManager.getDbItems();
if (dbItemsResult.isFailure) {

View File

@@ -96,7 +96,7 @@ export type Response = DebugProtocol.Response & { type: "response" };
export type InitializeResponse = DebugProtocol.InitializeResponse &
Response & { command: "initialize" };
export type QuickEvalResponse = Response;
export interface QuickEvalResponse extends Response {}
export type AnyResponse = InitializeResponse | QuickEvalResponse;

View File

@@ -300,12 +300,12 @@ const shouldUpdateOnNextActivationKey = "shouldUpdateOnNextActivation";
const codeQlVersionRange = DEFAULT_DISTRIBUTION_VERSION_RANGE;
// This is the minimum version of vscode that we _want_ to support. We want to update to Node 20, but that
// requires 1.90 or later. If we change the minimum version in the package.json, then anyone on an older version of vscode will
// This is the minimum version of vscode that we _want_ to support. We want to update to Node 18, but that
// requires 1.82 or later. If we change the minimum version in the package.json, then anyone on an older version of vscode will
// silently be unable to upgrade. So, the solution is to first bump the minimum version here and release. Then
// bump the version in the package.json and release again. This way, anyone on an older version of vscode will get a warning
// before silently being refused to upgrade.
const MIN_VERSION = "1.90.0";
const MIN_VERSION = "1.82.0";
function sendConfigTelemetryData() {
const config: Record<string, string> = {};
@@ -362,9 +362,7 @@ export async function activate(
distributionConfigListener,
codeQlVersionRange,
ctx,
app.logger,
);
await distributionManager.initialize();
registerErrorStubs([checkForUpdatesCommand], (command) => async () => {
void showAndLogErrorMessage(
@@ -748,13 +746,9 @@ async function activateWithInstalledDistribution(
);
ctx.subscriptions.push(qlConfigurationListener);
void extLogger.log("Initializing CodeQL language server.");
const languageClient = createLanguageClient(qlConfigurationListener);
void extLogger.log("Initializing CodeQL cli server...");
const cliServer = new CodeQLCliServer(
app,
languageClient,
distributionManager,
new CliConfigListener(),
extLogger,
@@ -965,6 +959,9 @@ async function activateWithInstalledDistribution(
ctx.subscriptions.push(tmpDirDisposal);
void extLogger.log("Initializing CodeQL language server.");
const languageClient = createLanguageClient(qlConfigurationListener);
const localQueries = new LocalQueries(
app,
qs,
@@ -1126,8 +1123,6 @@ async function activateWithInstalledDistribution(
void extLogger.log("Reading query history");
await qhm.readQueryHistory();
distributionManager.startCleanup();
void extLogger.log("Successfully finished extension initialization.");
return {

View File

@@ -7,7 +7,6 @@ export class CachedOperation<S extends unknown[], U> {
private readonly operation: (t: string, ...args: S) => Promise<U>;
private readonly cached: Map<string, U>;
private readonly lru: string[];
private generation: number;
private readonly inProgressCallbacks: Map<
string,
Array<[(u: U) => void, (reason?: Error) => void]>
@@ -18,7 +17,6 @@ export class CachedOperation<S extends unknown[], U> {
private cacheSize = 100,
) {
this.operation = operation;
this.generation = 0;
this.lru = [];
this.inProgressCallbacks = new Map<
string,
@@ -48,7 +46,7 @@ export class CachedOperation<S extends unknown[], U> {
inProgressCallback.push([resolve, reject]);
});
}
const origGeneration = this.generation;
// Otherwise compute the new value, but leave a callback to allow sharing work
const callbacks: Array<[(u: U) => void, (reason?: Error) => void]> = [];
this.inProgressCallbacks.set(t, callbacks);
@@ -56,11 +54,6 @@ export class CachedOperation<S extends unknown[], U> {
const result = await this.operation(t, ...args);
callbacks.forEach((f) => f[0](result));
this.inProgressCallbacks.delete(t);
if (this.generation !== origGeneration) {
// Cache was reset in the meantime so don't trust this
// result enough to cache it.
return result;
}
if (this.lru.length > this.cacheSize) {
const toRemove = this.lru.shift()!;
this.cached.delete(toRemove);
@@ -76,11 +69,4 @@ export class CachedOperation<S extends unknown[], U> {
this.inProgressCallbacks.delete(t);
}
}
reset() {
this.cached.clear();
this.lru.length = 0;
this.generation++;
this.inProgressCallbacks.clear();
}
}

View File

@@ -31,7 +31,7 @@ export function fileRangeFromURI(
return new Location(db.resolveSourceFile(uri.uri), range);
}
return undefined;
} catch {
} catch (e) {
return undefined;
}
}

View File

@@ -28,7 +28,7 @@ export async function resolveContextualQlPacksForDatabase(
): Promise<QlPacksForLanguage> {
try {
return await qlpackOfDatabase(cli, databaseItem);
} catch {
} catch (e) {
// If we can't find the qlpacks for the database, use the defaults instead
}

View File

@@ -12,7 +12,7 @@ import { languages, IndentAction } from "vscode";
* See https://github.com/microsoft/vscode/blob/master/src/vs/editor/test/common/modes/supports/javascriptOnEnterRules.ts
*/
export function install() {
// eslint-disable-next-line @typescript-eslint/no-require-imports
// eslint-disable-next-line @typescript-eslint/no-var-requires
const langConfig = require("../../language-configuration.json");
// setLanguageConfiguration requires a regexp for the wordpattern, not a string
langConfig.wordPattern = new RegExp(langConfig.wordPattern);

View File

@@ -75,7 +75,6 @@ import type { App } from "../common/app";
import type { Disposable } from "../common/disposable-object";
import type { RawResultSet } from "../common/raw-result-types";
import type { BqrsResultSetSchema } from "../common/bqrs-cli-types";
import { CachedOperation } from "../language-support/contextual/cached-operation";
/**
* results-view.ts
@@ -178,8 +177,6 @@ export class ResultsView extends AbstractWebview<
// Event listeners that should be disposed of when the view is disposed.
private disposableEventListeners: Disposable[] = [];
private schemaCache: CachedOperation<[], BqrsResultSetSchema[]>;
constructor(
app: App,
private databaseManager: DatabaseManager,
@@ -209,10 +206,6 @@ export class ResultsView extends AbstractWebview<
}
}),
);
this.schemaCache = new CachedOperation(
this.getResultSetSchemasImpl.bind(this),
);
}
public getCommands(): ResultsViewCommands {
@@ -427,7 +420,6 @@ export class ResultsView extends AbstractWebview<
);
return;
}
this.schemaCache.reset();
// Notify the webview that it should expect new results.
await this.postMessage({ t: "resultsUpdating" });
await this._displayedQuery.completedQuery.updateSortState(
@@ -545,14 +537,6 @@ export class ResultsView extends AbstractWebview<
resultSetNames,
};
await this.postMessage({
t: "setUserSettings",
userSettings: {
// Only show provenance info in canary mode for now.
shouldShowProvenance: isCanary(),
},
});
await this.postMessage({
t: "setState",
interpretation: interpretationPage,
@@ -618,12 +602,6 @@ export class ResultsView extends AbstractWebview<
selectedTable = "",
): Promise<BqrsResultSetSchema[]> {
const resultsPath = completedQuery.getResultsPath(selectedTable);
return this.schemaCache.get(resultsPath);
}
private async getResultSetSchemasImpl(
resultsPath: string,
): Promise<BqrsResultSetSchema[]> {
const schemas = await this.cliServer.bqrsInfo(
resultsPath,
PAGE_SIZE.getValue(),

View File

@@ -94,19 +94,19 @@ export class LogScannerService extends DisposableObject {
public async scanEvalLog(query: QueryHistoryInfo | undefined): Promise<void> {
this.diagnosticCollection.clear();
if (query?.t !== "local" || query.evaluatorLogPaths === undefined) {
if (
query?.t !== "local" ||
query.evalLogSummaryLocation === undefined ||
query.jsonEvalLogSummaryLocation === undefined
) {
return;
}
const { summarySymbols, jsonSummary, humanReadableSummary } =
query.evaluatorLogPaths;
if (jsonSummary === undefined || humanReadableSummary === undefined) {
return;
}
const diagnostics = await this.scanLog(jsonSummary, summarySymbols);
const uri = Uri.file(humanReadableSummary);
const diagnostics = await this.scanLog(
query.jsonEvalLogSummaryLocation,
query.evalLogSummarySymbolsLocation,
);
const uri = Uri.file(query.evalLogSummaryLocation);
this.diagnosticCollection.set(uri, diagnostics);
}

View File

@@ -0,0 +1,65 @@
import type { Credentials } from "../common/authentication";
import type { OctokitResponse } from "@octokit/types";
import fetch from "node-fetch";
import type { ModelConfigListener } from "../config";
export enum AutomodelMode {
Unspecified = "AUTOMODEL_MODE_UNSPECIFIED",
Framework = "AUTOMODEL_MODE_FRAMEWORK",
Application = "AUTOMODEL_MODE_APPLICATION",
}
export interface ModelRequest {
mode: AutomodelMode;
// Base64-encoded GZIP-compressed SARIF log
candidates: string;
}
export interface ModelResponse {
models: string;
}
export async function autoModel(
credentials: Credentials,
request: ModelRequest,
modelingConfig: ModelConfigListener,
): Promise<ModelResponse> {
const devEndpoint = modelingConfig.llmGenerationDevEndpoint;
if (devEndpoint) {
return callAutoModelDevEndpoint(devEndpoint, request);
} else {
const octokit = await credentials.getOctokit();
const response: OctokitResponse<ModelResponse> = await octokit.request(
"POST /repos/github/codeql/code-scanning/codeql/auto-model",
{
data: request,
},
);
return response.data;
}
}
async function callAutoModelDevEndpoint(
endpoint: string,
request: ModelRequest,
): Promise<ModelResponse> {
const json = JSON.stringify(request);
const response = await fetch(endpoint, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: json,
});
if (!response.ok) {
throw new Error(
`Error calling auto-model API: ${response.status} ${response.statusText}`,
);
}
const data = await response.json();
return data as ModelResponse;
}

View File

@@ -0,0 +1,233 @@
import type { CodeQLCliServer, SourceInfo } from "../codeql-cli/cli";
import type { CoreCompletedQuery, QueryRunner } from "../query-server";
import type { DatabaseItem } from "../databases/local-databases";
import type { ProgressCallback } from "../common/vscode/progress";
import type { Log } from "sarif";
import type { Mode } from "./shared/mode";
import { getOnDiskWorkspaceFolders } from "../common/vscode/workspace-folders";
import { interpretResultsSarif } from "../query-results";
import { join } from "path";
import { dir } from "tmp-promise";
import { writeFile, outputFile } from "fs-extra";
import { dump as dumpYaml } from "js-yaml";
import type { MethodSignature } from "./method";
import { runQuery } from "../local-queries/run-query";
import type { QueryMetadata } from "../common/interface-types";
import type { CancellationTokenSource } from "vscode";
import { resolveQueries } from "../local-queries";
import { modeTag } from "./mode-tag";
type AutoModelQueriesOptions = {
mode: Mode;
candidateMethods: MethodSignature[];
cliServer: CodeQLCliServer;
queryRunner: QueryRunner;
databaseItem: DatabaseItem;
queryStorageDir: string;
progress: ProgressCallback;
cancellationTokenSource: CancellationTokenSource;
};
export type AutoModelQueriesResult = {
candidates: Log;
};
export async function runAutoModelQueries({
mode,
candidateMethods,
cliServer,
queryRunner,
databaseItem,
queryStorageDir,
progress,
cancellationTokenSource,
}: AutoModelQueriesOptions): Promise<AutoModelQueriesResult | undefined> {
// First, resolve the query that we want to run.
const queryPath = await resolveAutomodelQuery(
cliServer,
databaseItem,
"candidates",
mode,
);
// Generate a pack containing the candidate filters
const { packDir: filterPackDir, cleanup: cleanupFilterPack } =
await generateCandidateFilterPack(databaseItem.language, candidateMethods);
const additionalPacks = [...getOnDiskWorkspaceFolders(), filterPackDir];
const extensionPacks = Object.keys(
await cliServer.resolveQlpacks(additionalPacks, true),
);
// Run the actual query
const completedQuery = await runQuery({
queryRunner,
databaseItem,
queryPath,
queryStorageDir,
additionalPacks,
extensionPacks,
progress,
token: cancellationTokenSource.token,
});
await cleanupFilterPack();
if (!completedQuery) {
return undefined;
}
// Get metadata for the query. This is required to interpret the results. We already know the kind is problem
// (because of the constraint in resolveQueries), so we don't need any more checks on the metadata.
const metadata = await cliServer.resolveMetadata(queryPath);
// CodeQL needs to have access to the database to be able to retrieve the
// snippets from it. The source location prefix is used to determine the
// base path of the database.
const sourceLocationPrefix =
await databaseItem.getSourceLocationPrefix(cliServer);
const sourceArchiveUri = databaseItem.sourceArchive;
const sourceInfo =
sourceArchiveUri === undefined
? undefined
: {
sourceArchive: sourceArchiveUri.fsPath,
sourceLocationPrefix,
};
const candidates = await interpretAutomodelResults(
cliServer,
completedQuery,
metadata,
sourceInfo,
);
return {
candidates,
};
}
async function resolveAutomodelQuery(
cliServer: CodeQLCliServer,
databaseItem: DatabaseItem,
queryTag: string,
mode: Mode,
): Promise<string> {
const packsToSearch = [`codeql/${databaseItem.language}-automodel-queries`];
// First, resolve the query that we want to run.
// All queries are tagged like this:
// internal extract automodel <mode> <queryTag>
// Example: internal extract automodel framework-mode candidates
const queries = await resolveQueries(
cliServer,
packsToSearch,
`Extract automodel ${queryTag}`,
{
kind: "problem",
"tags contain all": ["automodel", modeTag(mode), ...queryTag.split(" ")],
},
);
if (queries.length > 1) {
throw new Error(
`Found multiple auto model queries for ${mode} ${queryTag}. Can't continue`,
);
}
if (queries.length === 0) {
throw new Error(
`Did not found any auto model queries for ${mode} ${queryTag}. Can't continue`,
);
}
return queries[0];
}
type CandidateFilterPackResult = {
packDir: string;
cleanup: () => Promise<void>;
};
/**
* generateCandidateFilterPack will create a temporary extension pack.
* This pack will contain a filter that will restrict the automodel queries
* to the specified candidate methods only.
* This is done using the `extensible` predicate "automodelCandidateFilter".
* @param language
* @param candidateMethods
* @returns
*/
export async function generateCandidateFilterPack(
language: string,
candidateMethods: MethodSignature[],
): Promise<CandidateFilterPackResult> {
// Pack resides in a temporary directory, to not pollute the workspace.
const { path: packDir, cleanup } = await dir({ unsafeCleanup: true });
const syntheticConfigPack = {
name: "codeql/automodel-filter",
version: "0.0.0",
library: true,
extensionTargets: {
[`codeql/${language}-automodel-queries`]: "*",
},
dataExtensions: ["filter.yml"],
};
const qlpackFile = join(packDir, "codeql-pack.yml");
await outputFile(qlpackFile, dumpYaml(syntheticConfigPack), "utf8");
// The predicate has the following defintion:
// extensible predicate automodelCandidateFilter(string package, string type, string name, string signature)
const dataRows = candidateMethods.map((method) => [
method.packageName,
method.typeName,
method.methodName,
method.methodParameters,
]);
const filter = {
extensions: [
{
addsTo: {
pack: `codeql/${language}-automodel-queries`,
extensible: "automodelCandidateFilter",
},
data: dataRows,
},
],
};
const filterFile = join(packDir, "filter.yml");
await writeFile(filterFile, dumpYaml(filter), "utf8");
return {
packDir,
cleanup,
};
}
async function interpretAutomodelResults(
cliServer: CodeQLCliServer,
completedQuery: CoreCompletedQuery,
metadata: QueryMetadata,
sourceInfo: SourceInfo | undefined,
): Promise<Log> {
const interpretedResultsPath = join(
completedQuery.outputDir.querySaveDir,
"results.sarif",
);
const { ...sarif } = await interpretResultsSarif(
cliServer,
metadata,
{
resultsPath: completedQuery.outputDir.bqrsPath,
interpretedResultsPath,
},
sourceInfo,
["--sarif-add-snippets"],
);
return sarif;
}

View File

@@ -0,0 +1,41 @@
import type { ModelRequest } from "./auto-model-api";
import { AutomodelMode } from "./auto-model-api";
import { Mode } from "./shared/mode";
import type { AutoModelQueriesResult } from "./auto-model-codeml-queries";
import { assertNever } from "../common/helpers-pure";
import type { Log } from "sarif";
import { gzipEncode } from "../common/zlib";
/**
* Encode a SARIF log to the format expected by the server: JSON, GZIP-compressed, base64-encoded
* @param log SARIF log to encode
* @returns base64-encoded GZIP-compressed SARIF log
*/
export async function encodeSarif(log: Log): Promise<string> {
const json = JSON.stringify(log);
const buffer = Buffer.from(json, "utf-8");
const compressed = await gzipEncode(buffer);
return compressed.toString("base64");
}
export async function createAutoModelRequest(
mode: Mode,
result: AutoModelQueriesResult,
): Promise<ModelRequest> {
let requestMode: AutomodelMode;
switch (mode) {
case Mode.Application:
requestMode = AutomodelMode.Application;
break;
case Mode.Framework:
requestMode = AutomodelMode.Framework;
break;
default:
assertNever(mode);
}
return {
mode: requestMode,
candidates: await encodeSarif(result.candidates),
};
}

View File

@@ -0,0 +1,249 @@
import type { Method, MethodSignature } from "./method";
import type { ModeledMethod } from "./modeled-method";
import { load as loadYaml } from "js-yaml";
import type { ProgressCallback } from "../common/vscode/progress";
import { withProgress } from "../common/vscode/progress";
import { createAutoModelRequest } from "./auto-model";
import { getCandidates } from "./shared/auto-model-candidates";
import { runAutoModelQueries } from "./auto-model-codeml-queries";
import { loadDataExtensionYaml } from "./yaml";
import type { ModelRequest, ModelResponse } from "./auto-model-api";
import { autoModel } from "./auto-model-api";
import { RequestError } from "@octokit/request-error";
import { showAndLogExceptionWithTelemetry } from "../common/logging";
import { redactableError } from "../common/errors";
import type { App } from "../common/app";
import type { CodeQLCliServer } from "../codeql-cli/cli";
import type { QueryRunner } from "../query-server";
import type { DatabaseItem } from "../databases/local-databases";
import type { Mode } from "./shared/mode";
import { CancellationTokenSource } from "vscode";
import type { ModelingStore } from "./modeling-store";
import type { ModelConfigListener } from "../config";
import type { QueryLanguage } from "../common/query-language";
/**
* The auto-modeler holds state around auto-modeling jobs and allows
* starting and stopping them.
*/
export class AutoModeler {
// Keep track of auto-modeling jobs that are in progress
// so that we can stop them.
private readonly jobs: Map<string, CancellationTokenSource>;
constructor(
private readonly app: App,
private readonly cliServer: CodeQLCliServer,
private readonly queryRunner: QueryRunner,
private readonly modelConfig: ModelConfigListener,
private readonly modelingStore: ModelingStore,
private readonly queryStorageDir: string,
private readonly databaseItem: DatabaseItem,
private readonly language: QueryLanguage,
private readonly addModeledMethods: (
modeledMethods: Record<string, ModeledMethod[]>,
) => Promise<void>,
) {
this.jobs = new Map<string, CancellationTokenSource>();
}
/**
* Models the given package's external API usages, except
* the ones that are already modeled.
* @param packageName The name of the package to model.
* @param methods The methods.
* @param modeledMethods The currently modeled methods.
* @param mode The mode we are modeling in.
*/
public async startModeling(
packageName: string,
methods: readonly Method[],
modeledMethods: Record<string, readonly ModeledMethod[]>,
processedByAutoModelMethods: Set<string>,
mode: Mode,
): Promise<void> {
if (this.jobs.has(packageName)) {
return;
}
const cancellationTokenSource = new CancellationTokenSource();
this.jobs.set(packageName, cancellationTokenSource);
try {
await this.modelPackage(
packageName,
methods,
modeledMethods,
processedByAutoModelMethods,
mode,
cancellationTokenSource,
);
} finally {
this.jobs.delete(packageName);
}
}
/**
* Stops modeling the given package.
* @param packageName The name of the package to stop modeling.
*/
public async stopModeling(packageName: string): Promise<void> {
void this.app.logger.log(`Stopping modeling for package ${packageName}`);
const cancellationTokenSource = this.jobs.get(packageName);
if (cancellationTokenSource) {
cancellationTokenSource.cancel();
}
}
/**
* Stops all in-progress modeling jobs.
*/
public async stopAllModeling(): Promise<void> {
for (const cancellationTokenSource of this.jobs.values()) {
cancellationTokenSource.cancel();
}
}
private async modelPackage(
packageName: string,
methods: readonly Method[],
modeledMethods: Record<string, readonly ModeledMethod[]>,
processedByAutoModelMethods: Set<string>,
mode: Mode,
cancellationTokenSource: CancellationTokenSource,
): Promise<void> {
void this.app.logger.log(`Modeling package ${packageName}`);
const candidateBatchSize = this.modelConfig.llmGenerationBatchSize;
await withProgress(async (progress) => {
// Fetch the candidates to send to the model
const allCandidateMethods = getCandidates(
mode,
methods,
modeledMethods,
processedByAutoModelMethods,
);
// If there are no candidates, there is nothing to model and we just return
if (allCandidateMethods.length === 0) {
void this.app.logger.log("No candidates to model. Stopping.");
return;
}
// Find number of slices to make
const batchNumber = Math.ceil(
allCandidateMethods.length / candidateBatchSize,
);
try {
for (let i = 0; i < batchNumber; i++) {
// Check if we should stop
if (cancellationTokenSource.token.isCancellationRequested) {
break;
}
const start = i * candidateBatchSize;
const end = start + candidateBatchSize;
const candidatesToProcess = allCandidateMethods.slice(start, end);
const candidateSignatures = candidatesToProcess.map(
(c) => c.signature,
);
// Let the UI know which candidates we are modeling
this.modelingStore.addInProgressMethods(
this.databaseItem,
candidateSignatures,
);
// Kick off the process to model the slice of candidates
await this.modelCandidates(
candidatesToProcess,
mode,
progress,
cancellationTokenSource,
);
// Let the UI know which candidates we are done modeling
this.modelingStore.removeInProgressMethods(
this.databaseItem,
candidateSignatures,
);
// Let the UI know which methods have been sent to the LLM
this.modelingStore.addProcessedByAutoModelMethods(
this.databaseItem,
candidateSignatures,
);
}
} finally {
// Clear out in progress methods in case anything went wrong
this.modelingStore.removeInProgressMethods(
this.databaseItem,
allCandidateMethods.map((c) => c.signature),
);
}
});
}
private async modelCandidates(
candidateMethods: MethodSignature[],
mode: Mode,
progress: ProgressCallback,
cancellationTokenSource: CancellationTokenSource,
): Promise<void> {
void this.app.logger.log("Executing auto-model queries");
const usages = await runAutoModelQueries({
mode,
candidateMethods,
cliServer: this.cliServer,
queryRunner: this.queryRunner,
queryStorageDir: this.queryStorageDir,
databaseItem: this.databaseItem,
progress: (update) => progress({ ...update }),
cancellationTokenSource,
});
if (!usages) {
return;
}
const request = await createAutoModelRequest(mode, usages);
void this.app.logger.log("Calling auto-model API");
const response = await this.callAutoModelApi(request);
if (!response) {
return;
}
const models = loadYaml(response.models, {
filename: "auto-model.yml",
});
const loadedMethods = loadDataExtensionYaml(models, this.language);
if (!loadedMethods) {
return;
}
await this.addModeledMethods(loadedMethods);
}
private async callAutoModelApi(
request: ModelRequest,
): Promise<ModelResponse | null> {
try {
return await autoModel(this.app.credentials, request, this.modelConfig);
} catch (e) {
if (e instanceof RequestError && e.status === 429) {
void showAndLogExceptionWithTelemetry(
this.app.logger,
this.app.telemetry,
redactableError`Rate limit hit, please try again soon.`,
);
return null;
} else {
throw e;
}
}
}
}

View File

@@ -33,7 +33,6 @@ export function decodeBqrsToMethods(
let libraryVersion: string | undefined;
let type: ModeledMethodType;
let classification: CallClassification;
let endpointKindColumn: string | BqrsEntityValue | undefined;
let endpointType: EndpointType | undefined = undefined;
if (mode === Mode.Application) {
@@ -48,7 +47,6 @@ export function decodeBqrsToMethods(
libraryVersion,
type,
classification,
endpointKindColumn,
] = tuple as ApplicationModeTuple;
} else {
[
@@ -60,7 +58,6 @@ export function decodeBqrsToMethods(
supported,
library,
type,
endpointKindColumn,
] = tuple as FrameworkModeTuple;
classification = CallClassification.Unknown;
@@ -71,18 +68,13 @@ export function decodeBqrsToMethods(
}
if (definition.endpointTypeForEndpoint) {
endpointType = definition.endpointTypeForEndpoint(
{
endpointType,
packageName,
typeName,
methodName,
methodParameters,
},
typeof endpointKindColumn === "object"
? endpointKindColumn.label
: endpointKindColumn,
);
endpointType = definition.endpointTypeForEndpoint({
endpointType,
packageName,
typeName,
methodName,
methodParameters,
});
}
if (endpointType === undefined) {

View File

@@ -133,7 +133,7 @@ async function findGitFolder(
const stat = await workspace.fs.stat(gitFolder);
// Check whether it's a directory
return (stat.type & FileType.Directory) !== 0;
} catch {
} catch (e) {
return false;
}
}),

View File

@@ -174,14 +174,11 @@ export type ModelsAsDataLanguage = {
* be determined by heuristics.
* @param method The method to get the endpoint type for. The endpoint type can be undefined if the
* query does not return an endpoint type.
* @param endpointKind An optional column that may be provided by the query to help determine the
* endpoint type.
*/
endpointTypeForEndpoint?: (
method: Omit<MethodDefinition, "endpointType"> & {
endpointType: EndpointType | undefined;
},
endpointKind: string | undefined,
) => EndpointType | undefined;
predicates: ModelsAsDataLanguagePredicates;
modelGeneration?: ModelsAsDataLanguageModelGeneration;

View File

@@ -4,26 +4,7 @@ import { EndpointType } from "../../method";
const memberTokenRegex = /^Member\[(.+)]$/;
// In Python, the type can contain both the package name and the type name.
export function parsePythonType(type: string) {
// The first part is always the package name. All remaining parts are the type
// name.
const parts = type.split(".");
const packageName = parts.shift() ?? "";
return {
packageName,
typeName: parts.join("."),
};
}
// The type name can also be specified in the type, so this will combine
// the already parsed type name and the type name from the access path.
export function parsePythonAccessPath(
path: string,
shortTypeName: string,
): {
export function parsePythonAccessPath(path: string): {
typeName: string;
methodName: string;
endpointType: EndpointType;
@@ -32,12 +13,8 @@ export function parsePythonAccessPath(
const tokens = parseAccessPathTokens(path);
if (tokens.length === 0) {
const typeName = shortTypeName.endsWith("!")
? shortTypeName.slice(0, -1)
: shortTypeName;
return {
typeName,
typeName: "",
methodName: "",
endpointType: EndpointType.Method,
path: "",
@@ -46,10 +23,6 @@ export function parsePythonAccessPath(
const typeParts = [];
let endpointType = EndpointType.Function;
// If a short type name was given and it doesn't end in a `!`, then this refers to a method.
if (shortTypeName !== "" && !shortTypeName.endsWith("!")) {
endpointType = EndpointType.Method;
}
let remainingTokens: typeof tokens = [];
@@ -59,7 +32,6 @@ export function parsePythonAccessPath(
if (memberMatch) {
typeParts.push(memberMatch[1]);
} else if (token.text === "Instance") {
// Alternative way of specifying that this refers to a method.
endpointType = EndpointType.Method;
} else {
remainingTokens = tokens.slice(i);
@@ -68,22 +40,9 @@ export function parsePythonAccessPath(
}
const methodName = typeParts.pop() ?? "";
let typeName = typeParts.join(".");
const typeName = typeParts.join(".");
const remainingPath = remainingTokens.map((token) => token.text).join(".");
if (shortTypeName !== "") {
if (shortTypeName.endsWith("!")) {
// The actual type name is the name without the `!`.
shortTypeName = shortTypeName.slice(0, -1);
}
if (typeName !== "") {
typeName = `${shortTypeName}.${typeName}`;
} else {
typeName = shortTypeName;
}
}
return {
methodName,
typeName,
@@ -92,59 +51,53 @@ export function parsePythonAccessPath(
};
}
export function parsePythonTypeAndPath(
type: string,
path: string,
): {
packageName: string;
typeName: string;
methodName: string;
endpointType: EndpointType;
path: string;
} {
const { packageName, typeName: shortTypeName } = parsePythonType(type);
const {
typeName,
methodName,
endpointType,
path: remainingPath,
} = parsePythonAccessPath(path, shortTypeName);
return {
packageName,
typeName,
methodName,
endpointType,
path: remainingPath,
};
}
export function pythonMethodSignature(typeName: string, methodName: string) {
return `${typeName}#${methodName}`;
}
export function pythonType(
packageName: string,
typeName: string,
endpointType: EndpointType,
) {
if (typeName !== "" && packageName !== "") {
return `${packageName}.${typeName}${endpointType === EndpointType.Function ? "!" : ""}`;
}
return `${packageName}${typeName}`;
}
export function pythonMethodPath(methodName: string) {
if (methodName === "") {
function pythonTypePath(typeName: string) {
if (typeName === "") {
return "";
}
return `Member[${methodName}]`;
return typeName
.split(".")
.map((part) => `Member[${part}]`)
.join(".");
}
export function pythonPath(methodName: string, path: string) {
const methodPath = pythonMethodPath(methodName);
export function pythonMethodPath(
typeName: string,
methodName: string,
endpointType: EndpointType,
) {
if (methodName === "") {
return pythonTypePath(typeName);
}
const typePath = pythonTypePath(typeName);
let result = typePath;
if (typePath !== "" && endpointType === EndpointType.Method) {
result += ".Instance";
}
if (result !== "") {
result += ".";
}
result += `Member[${methodName}]`;
return result;
}
export function pythonPath(
typeName: string,
methodName: string,
endpointType: EndpointType,
path: string,
) {
const methodPath = pythonMethodPath(typeName, methodName, endpointType);
if (methodPath === "") {
return path;
}
@@ -158,24 +111,7 @@ export function pythonPath(methodName: string, path: string) {
export function pythonEndpointType(
method: Omit<MethodDefinition, "endpointType">,
endpointKind: string | undefined,
): EndpointType {
switch (endpointKind) {
case "Function":
return EndpointType.Function;
case "InstanceMethod":
return EndpointType.Method;
case "ClassMethod":
return EndpointType.ClassMethod;
case "StaticMethod":
return EndpointType.StaticMethod;
case "InitMethod":
return EndpointType.Constructor;
case "Class":
return EndpointType.Class;
}
// Legacy behavior for when the kind column is missing.
if (
method.methodParameters.startsWith("(self,") ||
method.methodParameters === "(self)"
@@ -184,12 +120,3 @@ export function pythonEndpointType(
}
return EndpointType.Function;
}
export function hasPythonSelfArgument(endpointType: EndpointType): boolean {
// Instance methods and class methods both use `Argument[self]` for the first parameter. The first
// parameter after self is called `Argument[0]`.
return (
endpointType === EndpointType.Method ||
endpointType === EndpointType.ClassMethod
);
}

View File

@@ -4,48 +4,44 @@ import { Mode } from "../../shared/mode";
import type { MethodArgument } from "../../method";
import { EndpointType, getArgumentsList } from "../../method";
import {
hasPythonSelfArgument,
parsePythonTypeAndPath,
parsePythonAccessPath,
pythonEndpointType,
pythonMethodPath,
pythonMethodSignature,
pythonPath,
pythonType,
} from "./access-paths";
export const python: ModelsAsDataLanguage = {
availableModes: [Mode.Framework],
createMethodSignature: ({ typeName, methodName }) =>
`${typeName}#${methodName}`,
endpointTypeForEndpoint: (method, endpointKind) =>
pythonEndpointType(method, endpointKind),
endpointTypeForEndpoint: (method) => pythonEndpointType(method),
predicates: {
source: {
extensiblePredicate: sharedExtensiblePredicates.source,
supportedKinds: sharedKinds.source,
supportedEndpointTypes: [
EndpointType.Method,
EndpointType.Function,
EndpointType.Constructor,
EndpointType.ClassMethod,
EndpointType.StaticMethod,
],
supportedEndpointTypes: [EndpointType.Method, EndpointType.Function],
// extensible predicate sourceModel(
// string type, string path, string kind
// );
generateMethodDefinition: (method) => [
pythonType(method.packageName, method.typeName, method.endpointType),
pythonPath(method.methodName, method.output),
method.packageName,
pythonPath(
method.typeName,
method.methodName,
method.endpointType,
method.output,
),
method.kind,
],
readModeledMethod: (row) => {
const packageName = row[0] as string;
const {
packageName,
typeName,
methodName,
endpointType,
path: output,
} = parsePythonTypeAndPath(row[0] as string, row[1] as string);
} = parsePythonAccessPath(row[1] as string);
return {
type: "source",
output,
@@ -63,31 +59,30 @@ export const python: ModelsAsDataLanguage = {
sink: {
extensiblePredicate: sharedExtensiblePredicates.sink,
supportedKinds: sharedKinds.sink,
supportedEndpointTypes: [
EndpointType.Method,
EndpointType.Function,
EndpointType.Constructor,
EndpointType.ClassMethod,
EndpointType.StaticMethod,
],
supportedEndpointTypes: [EndpointType.Method, EndpointType.Function],
// extensible predicate sinkModel(
// string type, string path, string kind
// );
generateMethodDefinition: (method) => {
return [
pythonType(method.packageName, method.typeName, method.endpointType),
pythonPath(method.methodName, method.input),
method.packageName,
pythonPath(
method.typeName,
method.methodName,
method.endpointType,
method.input,
),
method.kind,
];
},
readModeledMethod: (row) => {
const packageName = row[0] as string;
const {
packageName,
typeName,
methodName,
endpointType,
path: input,
} = parsePythonTypeAndPath(row[0] as string, row[1] as string);
} = parsePythonAccessPath(row[1] as string);
return {
type: "sink",
input,
@@ -105,26 +100,25 @@ export const python: ModelsAsDataLanguage = {
summary: {
extensiblePredicate: sharedExtensiblePredicates.summary,
supportedKinds: sharedKinds.summary,
supportedEndpointTypes: [
EndpointType.Method,
EndpointType.Function,
EndpointType.Constructor,
EndpointType.ClassMethod,
EndpointType.StaticMethod,
],
supportedEndpointTypes: [EndpointType.Method, EndpointType.Function],
// extensible predicate summaryModel(
// string type, string path, string input, string output, string kind
// );
generateMethodDefinition: (method) => [
pythonType(method.packageName, method.typeName, method.endpointType),
pythonMethodPath(method.methodName),
method.packageName,
pythonMethodPath(
method.typeName,
method.methodName,
method.endpointType,
),
method.input,
method.output,
method.kind,
],
readModeledMethod: (row) => {
const { packageName, typeName, methodName, endpointType, path } =
parsePythonTypeAndPath(row[0] as string, row[1] as string);
const packageName = row[0] as string;
const { typeName, methodName, endpointType, path } =
parsePythonAccessPath(row[1] as string);
if (path !== "") {
throw new Error("Summary path must be a method");
}
@@ -150,13 +144,18 @@ export const python: ModelsAsDataLanguage = {
// string type, string path, string kind
// );
generateMethodDefinition: (method) => [
pythonType(method.packageName, method.typeName, method.endpointType),
pythonMethodPath(method.methodName),
method.packageName,
pythonMethodPath(
method.typeName,
method.methodName,
method.endpointType,
),
method.kind,
],
readModeledMethod: (row) => {
const { packageName, typeName, methodName, endpointType, path } =
parsePythonTypeAndPath(row[0] as string, row[1] as string);
const packageName = row[0] as string;
const { typeName, methodName, endpointType, path } =
parsePythonAccessPath(row[1] as string);
if (path !== "") {
throw new Error("Neutral path must be a method");
}
@@ -173,46 +172,25 @@ export const python: ModelsAsDataLanguage = {
};
},
},
type: {
extensiblePredicate: "typeModel",
// extensible predicate typeModel(string type1, string type2, string path);
generateMethodDefinition: (method) => [
method.relatedTypeName,
pythonType(method.packageName, method.typeName, method.endpointType),
pythonPath(method.methodName, method.path),
],
readModeledMethod: (row) => {
const { packageName, typeName, methodName, endpointType, path } =
parsePythonTypeAndPath(row[1] as string, row[2] as string);
return {
type: "type",
relatedTypeName: row[0] as string,
path,
signature: pythonMethodSignature(typeName, methodName),
endpointType,
packageName,
typeName,
methodName,
methodParameters: "",
};
},
},
},
getArgumentOptions: (method) => {
// Argument and Parameter are equivalent in Python, but we'll use Argument in the model editor
const argumentsList = getArgumentsList(method.methodParameters).map(
(argument, index): MethodArgument => {
if (hasPythonSelfArgument(method.endpointType) && index === 0) {
if (
method.endpointType === EndpointType.Method &&
argument === "self" &&
index === 0
) {
return {
path: "Argument[self]",
label: `Argument[self]: ${argument}`,
label: "Argument[self]: self",
};
}
// If this endpoint has a self argument, self does not count as an argument index so we
// If this is a method, self does not count as an argument index, so we
// should start at 0 for the second argument
if (hasPythonSelfArgument(method.endpointType)) {
if (method.endpointType === EndpointType.Method) {
index -= 1;
}

View File

@@ -70,6 +70,8 @@ export class MethodModelingViewProvider extends AbstractWebviewViewProvider<
method: Method,
modeledMethods: readonly ModeledMethod[],
isModified: boolean,
isInProgress: boolean,
processedByAutoModel: boolean,
): Promise<void> {
this.method = method;
this.databaseItem = databaseItem;
@@ -80,6 +82,8 @@ export class MethodModelingViewProvider extends AbstractWebviewViewProvider<
method,
modeledMethods,
isModified,
isInProgress,
processedByAutoModel,
});
}
@@ -100,6 +104,8 @@ export class MethodModelingViewProvider extends AbstractWebviewViewProvider<
selectedMethod.method,
selectedMethod.modeledMethods,
selectedMethod.isModified,
selectedMethod.isInProgress,
selectedMethod.processedByAutoModel,
);
}
}
@@ -197,6 +203,8 @@ export class MethodModelingViewProvider extends AbstractWebviewViewProvider<
e.method,
e.modeledMethods,
e.isModified,
e.isInProgress,
e.processedByAutoModel,
);
}
}),
@@ -224,6 +232,36 @@ export class MethodModelingViewProvider extends AbstractWebviewViewProvider<
}
}),
);
this.push(
this.modelingEvents.onInProgressMethodsChanged(async (e) => {
if (this.method && this.databaseItem) {
const dbUri = this.databaseItem.databaseUri.toString();
if (e.dbUri === dbUri) {
const inProgress = e.methods.has(this.method.signature);
await this.postMessage({
t: "setInProgress",
inProgress,
});
}
}
}),
);
this.push(
this.modelingEvents.onProcessedByAutoModelMethodsChanged(async (e) => {
if (this.method && this.databaseItem) {
const dbUri = this.databaseItem.databaseUri.toString();
if (e.dbUri === dbUri) {
const processedByAutoModel = e.methods.has(this.method.signature);
await this.postMessage({
t: "setProcessedByAutoModel",
processedByAutoModel,
});
}
}
}),
);
}
private registerToModelConfigEvents(): void {

View File

@@ -29,8 +29,6 @@ export enum EndpointType {
Method = "method",
Constructor = "constructor",
Function = "function",
StaticMethod = "staticMethod",
ClassMethod = "classMethod",
}
export interface MethodDefinition {

View File

@@ -204,6 +204,7 @@ export class ModelEditorModule extends DisposableObject {
this.app.logger,
queryDir,
language,
this.modelConfig,
initialMode,
);
if (!success) {

View File

@@ -8,6 +8,7 @@ import {
syntheticQueryPackName,
} from "./model-editor-queries";
import type { CodeQLCliServer } from "../codeql-cli/cli";
import type { ModelConfig } from "../config";
import type { Mode } from "./shared/mode";
import type { NotificationLogger } from "../common/logging";
@@ -29,6 +30,7 @@ import type { NotificationLogger } from "../common/logging";
* @param logger The logger to use.
* @param queryDir The directory to set up.
* @param language The language to use for the queries.
* @param modelConfig The model config to use.
* @param initialMode The initial mode to use to check the existence of the queries.
* @returns true if the setup was successful, false otherwise.
*/
@@ -37,6 +39,7 @@ export async function setUpPack(
logger: NotificationLogger,
queryDir: string,
language: QueryLanguage,
modelConfig: ModelConfig,
initialMode: Mode,
): Promise<boolean> {
// Download the required query packs
@@ -88,5 +91,10 @@ export async function setUpPack(
await cliServer.packInstall(queryDir);
}
// Download any other required packs
if (language === "java" && modelConfig.llmGeneration) {
await cliServer.packDownload([`codeql/${language}-automodel-queries`]);
}
return true;
}

View File

@@ -49,6 +49,7 @@ import {
import { pickExtensionPack } from "./extension-pack-picker";
import type { QueryLanguage } from "../common/query-language";
import { getLanguageDisplayName } from "../common/query-language";
import { AutoModeler } from "./auto-modeler";
import { telemetryListener } from "../common/vscode/telemetry";
import type { ModelingStore } from "./modeling-store";
import type { ModelingEvents } from "./modeling-events";
@@ -76,6 +77,7 @@ export class ModelEditorView extends AbstractWebview<
ToModelEditorMessage,
FromModelEditorMessage
> {
private readonly autoModeler: AutoModeler;
private readonly modelEvaluator: ModelEvaluator;
private readonly languageDefinition: ModelsAsDataLanguage;
// Cancellation token source that can be used for passing into long-running operations. Should only
@@ -112,6 +114,19 @@ export class ModelEditorView extends AbstractWebview<
this.registerToModelingEvents();
this.registerToModelConfigEvents();
this.autoModeler = new AutoModeler(
app,
cliServer,
queryRunner,
this.modelConfig,
modelingStore,
queryStorageDir,
databaseItem,
language,
async (modeledMethods) => {
this.addModeledMethods(modeledMethods);
},
);
this.languageDefinition = getModelsAsDataLanguage(language);
this.modelEvaluator = new ModelEvaluator(
@@ -302,6 +317,21 @@ export class ModelEditorView extends AbstractWebview<
"model-editor-generate-modeled-methods",
);
break;
case "generateMethodsFromLlm":
await this.generateModeledMethodsFromLlm(
msg.packageName,
msg.methodSignatures,
);
void telemetryListener?.sendUIInteraction(
"model-editor-generate-methods-from-llm",
);
break;
case "stopGeneratingMethodsFromLlm":
await this.autoModeler.stopModeling(msg.packageName);
void telemetryListener?.sendUIInteraction(
"model-editor-stop-generating-methods-from-llm",
);
break;
case "modelDependency":
await this.modelDependency();
@@ -408,6 +438,9 @@ export class ModelEditorView extends AbstractWebview<
this.modelConfig.flowGeneration) &&
!!modelsAsDataLanguage.modelGeneration;
const showLlmButton =
this.databaseItem.language === "java" && this.modelConfig.llmGeneration;
const showEvaluationUi = this.modelConfig.modelEvaluation;
const sourceArchiveAvailable =
@@ -423,6 +456,7 @@ export class ModelEditorView extends AbstractWebview<
extensionPack: this.extensionPack,
language: this.language,
showGenerateButton,
showLlmButton,
showEvaluationUi,
mode: this.modelingStore.getMode(this.databaseItem),
showModeSwitchButton,
@@ -771,6 +805,33 @@ export class ModelEditorView extends AbstractWebview<
);
}
private async generateModeledMethodsFromLlm(
packageName: string,
methodSignatures: string[],
): Promise<void> {
const methods = this.modelingStore.getMethods(
this.databaseItem,
methodSignatures,
);
const modeledMethods = this.modelingStore.getModeledMethods(
this.databaseItem,
methodSignatures,
);
const processedByAutoModelMethods =
this.modelingStore.getProcessedByAutoModelMethods(
this.databaseItem,
methodSignatures,
);
const mode = this.modelingStore.getMode(this.databaseItem);
await this.autoModeler.startModeling(
packageName,
methods,
modeledMethods,
processedByAutoModelMethods,
mode,
);
}
private async modelDependency(): Promise<void> {
return withProgress(
async (progress, token) => {
@@ -922,6 +983,30 @@ export class ModelEditorView extends AbstractWebview<
}),
);
this.push(
this.modelingEvents.onInProgressMethodsChanged(async (event) => {
if (event.dbUri === this.databaseItem.databaseUri.toString()) {
await this.postMessage({
t: "setInProgressMethods",
methods: Array.from(event.methods),
});
}
}),
);
this.push(
this.modelingEvents.onProcessedByAutoModelMethodsChanged(
async (event) => {
if (event.dbUri === this.databaseItem.databaseUri.toString()) {
await this.postMessage({
t: "setProcessedByAutoModelMethods",
methods: Array.from(event.methods),
});
}
},
),
);
this.push(
this.modelingEvents.onRevealInModelEditor(async (event) => {
if (event.dbUri === this.databaseItem.databaseUri.toString()) {

View File

@@ -1,5 +1,6 @@
import { assertNever } from "../common/helpers-pure";
import type { MethodSignature } from "./method";
import type { ModelingStatus } from "./shared/modeling-status";
export type ModeledMethodType =
| "none"
@@ -14,6 +15,10 @@ export type Provenance =
| "df-generated"
// Generated by the dataflow model and manually edited
| "df-manual"
// Generated by the auto-model
| "ai-generated"
// Generated by the auto-model and manually edited
| "ai-manual"
// Entered by the user in the editor manually
| "manual";
@@ -107,6 +112,30 @@ export function modeledMethodSupportsProvenance(
);
}
export function isModelPending(
modeledMethod: ModeledMethod | undefined,
modelingStatus: ModelingStatus,
processedByAutoModel: boolean,
): boolean {
if (
(!modeledMethod || modeledMethod.type === "none") &&
processedByAutoModel
) {
return true;
}
if (!modeledMethod) {
return false;
}
return (
modelingStatus === "unsaved" &&
modeledMethod.type !== "none" &&
modeledMethodSupportsProvenance(modeledMethod) &&
modeledMethod.provenance === "ai-generated"
);
}
/**
* Calculates the new provenance for a modeled method based on the current provenance.
* @param modeledMethod The modeled method if there is one.
@@ -129,6 +158,13 @@ export function calculateNewProvenance(
case "df-manual":
// If the method has had manual edits, we want the provenance to stay the same.
return "df-manual";
case "ai-generated":
// If the method has been generated and there has been a change, we assume
// that the user has manually edited it.
return "ai-manual";
case "ai-manual":
// If the method has had manual edits, we want the provenance to stay the same.
return "ai-manual";
default:
// The method has been modeled manually.
return "manual";

View File

@@ -37,6 +37,18 @@ interface SelectedMethodChangedEvent {
readonly usage: Usage;
readonly modeledMethods: readonly ModeledMethod[];
readonly isModified: boolean;
readonly isInProgress: boolean;
readonly processedByAutoModel: boolean;
}
interface InProgressMethodsChangedEvent {
readonly dbUri: string;
readonly methods: ReadonlySet<string>;
}
interface ProcessedByAutoModelMethodsChangedEvent {
readonly dbUri: string;
readonly methods: ReadonlySet<string>;
}
interface ModelEvaluationRunChangedEvent {
@@ -71,6 +83,8 @@ export class ModelingEvents extends DisposableObject {
public readonly onModeChanged: AppEvent<ModeChangedEvent>;
public readonly onModeledAndModifiedMethodsChanged: AppEvent<ModeledAndModifiedMethodsChangedEvent>;
public readonly onSelectedMethodChanged: AppEvent<SelectedMethodChangedEvent>;
public readonly onInProgressMethodsChanged: AppEvent<InProgressMethodsChangedEvent>;
public readonly onProcessedByAutoModelMethodsChanged: AppEvent<ProcessedByAutoModelMethodsChangedEvent>;
public readonly onModelEvaluationRunChanged: AppEvent<ModelEvaluationRunChangedEvent>;
public readonly onRevealInModelEditor: AppEvent<RevealInModelEditorEvent>;
public readonly onFocusModelEditor: AppEvent<FocusModelEditorEvent>;
@@ -85,6 +99,8 @@ export class ModelingEvents extends DisposableObject {
private readonly onModeChangedEventEmitter: AppEventEmitter<ModeChangedEvent>;
private readonly onModeledAndModifiedMethodsChangedEventEmitter: AppEventEmitter<ModeledAndModifiedMethodsChangedEvent>;
private readonly onSelectedMethodChangedEventEmitter: AppEventEmitter<SelectedMethodChangedEvent>;
private readonly onInProgressMethodsChangedEventEmitter: AppEventEmitter<InProgressMethodsChangedEvent>;
private readonly onProcessedByAutoModelMethodsChangedEventEmitter: AppEventEmitter<ProcessedByAutoModelMethodsChangedEvent>;
private readonly onModelEvaluationRunChangedEventEmitter: AppEventEmitter<ModelEvaluationRunChangedEvent>;
private readonly onRevealInModelEditorEventEmitter: AppEventEmitter<RevealInModelEditorEvent>;
private readonly onFocusModelEditorEventEmitter: AppEventEmitter<FocusModelEditorEvent>;
@@ -135,6 +151,18 @@ export class ModelingEvents extends DisposableObject {
this.onSelectedMethodChanged =
this.onSelectedMethodChangedEventEmitter.event;
this.onInProgressMethodsChangedEventEmitter = this.push(
app.createEventEmitter<InProgressMethodsChangedEvent>(),
);
this.onInProgressMethodsChanged =
this.onInProgressMethodsChangedEventEmitter.event;
this.onProcessedByAutoModelMethodsChangedEventEmitter = this.push(
app.createEventEmitter<ProcessedByAutoModelMethodsChangedEvent>(),
);
this.onProcessedByAutoModelMethodsChanged =
this.onProcessedByAutoModelMethodsChangedEventEmitter.event;
this.onModelEvaluationRunChangedEventEmitter = this.push(
app.createEventEmitter<ModelEvaluationRunChangedEvent>(),
);
@@ -226,6 +254,8 @@ export class ModelingEvents extends DisposableObject {
usage: Usage,
modeledMethods: ModeledMethod[],
isModified: boolean,
isInProgress: boolean,
processedByAutoModel: boolean,
) {
this.onSelectedMethodChangedEventEmitter.fire({
databaseItem,
@@ -233,6 +263,28 @@ export class ModelingEvents extends DisposableObject {
usage,
modeledMethods,
isModified,
isInProgress,
processedByAutoModel,
});
}
public fireInProgressMethodsChangedEvent(
dbUri: string,
methods: ReadonlySet<string>,
) {
this.onInProgressMethodsChangedEventEmitter.fire({
dbUri,
methods,
});
}
public fireProcessedByAutoModelMethodsChangedEvent(
dbUri: string,
methods: ReadonlySet<string>,
) {
this.onProcessedByAutoModelMethodsChangedEventEmitter.fire({
dbUri,
methods,
});
}

View File

@@ -15,6 +15,8 @@ interface InternalDbModelingState {
mode: Mode;
modeledMethods: Record<string, ModeledMethod[]>;
modifiedMethodSignatures: Set<string>;
inProgressMethods: Set<string>;
processedByAutoModelMethods: Set<string>;
selectedMethod: Method | undefined;
selectedUsage: Usage | undefined;
modelEvaluationRun: ModelEvaluationRun | undefined;
@@ -28,6 +30,8 @@ export interface DbModelingState {
readonly mode: Mode;
readonly modeledMethods: Readonly<Record<string, readonly ModeledMethod[]>>;
readonly modifiedMethodSignatures: ReadonlySet<string>;
readonly inProgressMethods: ReadonlySet<string>;
readonly processedByAutoModelMethods: ReadonlySet<string>;
readonly selectedMethod: Method | undefined;
readonly selectedUsage: Usage | undefined;
readonly modelEvaluationRun: ModelEvaluationRun | undefined;
@@ -40,6 +44,8 @@ export interface SelectedMethodDetails {
readonly usage: Usage | undefined;
readonly modeledMethods: readonly ModeledMethod[];
readonly isModified: boolean;
readonly isInProgress: boolean;
readonly processedByAutoModel: boolean;
}
export class ModelingStore extends DisposableObject {
@@ -62,8 +68,10 @@ export class ModelingStore extends DisposableObject {
mode,
modeledMethods: {},
modifiedMethodSignatures: new Set(),
processedByAutoModelMethods: new Set(),
selectedMethod: undefined,
selectedUsage: undefined,
inProgressMethods: new Set(),
modelEvaluationRun: undefined,
isModelAlertsViewOpen: false,
});
@@ -152,6 +160,7 @@ export class ModelingStore extends DisposableObject {
methods,
state.modeledMethods,
state.modifiedMethodSignatures,
state.processedByAutoModelMethods,
);
});
}
@@ -162,6 +171,7 @@ export class ModelingStore extends DisposableObject {
state.methods,
state.modeledMethods,
state.modifiedMethodSignatures,
state.processedByAutoModelMethods,
);
});
}
@@ -298,15 +308,75 @@ export class ModelingStore extends DisposableObject {
const modeledMethods = dbState.modeledMethods[method.signature] ?? [];
const isModified = dbState.modifiedMethodSignatures.has(method.signature);
const isInProgress = dbState.inProgressMethods.has(method.signature);
const processedByAutoModel = dbState.processedByAutoModelMethods.has(
method.signature,
);
this.modelingEvents.fireSelectedMethodChangedEvent(
dbItem,
method,
usage,
modeledMethods,
isModified,
isInProgress,
processedByAutoModel,
);
}
public addInProgressMethods(
dbItem: DatabaseItem,
inProgressMethods: string[],
) {
this.changeInProgressMethods(dbItem, (state) => {
state.inProgressMethods = new Set([
...state.inProgressMethods,
...inProgressMethods,
]);
});
}
public removeInProgressMethods(
dbItem: DatabaseItem,
methodSignatures: string[],
) {
this.changeInProgressMethods(dbItem, (state) => {
state.inProgressMethods = new Set(
Array.from(state.inProgressMethods).filter(
(s) => !methodSignatures.includes(s),
),
);
});
}
public getProcessedByAutoModelMethods(
dbItem: DatabaseItem,
methodSignatures?: string[],
): Set<string> {
const processedByAutoModelMethods =
this.getState(dbItem).processedByAutoModelMethods;
if (!methodSignatures) {
return processedByAutoModelMethods;
}
return new Set(
Array.from(processedByAutoModelMethods).filter((x) =>
methodSignatures.includes(x),
),
);
}
public addProcessedByAutoModelMethods(
dbItem: DatabaseItem,
processedByAutoModelMethods: string[],
) {
this.changeProcessedByAutoModelMethods(dbItem, (state) => {
state.processedByAutoModelMethods = new Set([
...state.processedByAutoModelMethods,
...processedByAutoModelMethods,
]);
});
this.updateMethodSorting(dbItem);
}
public updateModelEvaluationRun(
dbItem: DatabaseItem,
evaluationRun: ModelEvaluationRun | undefined,
@@ -335,6 +405,10 @@ export class ModelingStore extends DisposableObject {
isModified: dbState.modifiedMethodSignatures.has(
selectedMethod.signature,
),
isInProgress: dbState.inProgressMethods.has(selectedMethod.signature),
processedByAutoModel: dbState.processedByAutoModelMethods.has(
selectedMethod.signature,
),
};
}
@@ -386,6 +460,34 @@ export class ModelingStore extends DisposableObject {
);
}
private changeInProgressMethods(
dbItem: DatabaseItem,
updateState: (state: InternalDbModelingState) => void,
) {
const state = this.getState(dbItem);
updateState(state);
this.modelingEvents.fireInProgressMethodsChangedEvent(
dbItem.databaseUri.toString(),
state.inProgressMethods,
);
}
private changeProcessedByAutoModelMethods(
dbItem: DatabaseItem,
updateState: (state: InternalDbModelingState) => void,
) {
const state = this.getState(dbItem);
updateState(state);
this.modelingEvents.fireProcessedByAutoModelMethodsChangedEvent(
dbItem.databaseUri.toString(),
state.processedByAutoModelMethods,
);
}
private changeModelEvaluationRun(
dbItem: DatabaseItem,
updateState: (state: InternalDbModelingState) => void,

View File

@@ -17,7 +17,6 @@ export type Query = {
* - libraryVersion: the version of the library that contains the external API. This is a string and can be empty if the version cannot be determined.
* - type: the modeled kind of the method, either "sink", "source", "summary", or "neutral"
* - classification: the classification of the use of the method, either "source", "test", "generated", or "unknown"
* - kind: the kind of the endpoint, language-specific, e.g. "method" or "function"
*/
applicationModeQuery: string;
/**
@@ -33,7 +32,6 @@ export type Query = {
* - supported: whether this method is modeled. This should be a string representation of a boolean to satify the result pattern for a problem query.
* - libraryName: the name of the file or library that contains the method. This is a string and usually the basename of a file.
* - type: the modeled kind of the method, either "sink", "source", "summary", or "neutral"
* - kind: the kind of the endpoint, language-specific, e.g. "method" or "function"
*/
frameworkModeQuery: string;
dependencies?: {
@@ -52,7 +50,6 @@ export type ApplicationModeTuple = [
string,
ModeledMethodType,
CallClassification,
string | BqrsEntityValue | undefined,
];
export type FrameworkModeTuple = [
@@ -64,5 +61,4 @@ export type FrameworkModeTuple = [
boolean,
string,
ModeledMethodType,
string | BqrsEntityValue | undefined,
];

View File

@@ -0,0 +1,48 @@
import type { Method, MethodSignature } from "../method";
import type { ModeledMethod } from "../modeled-method";
import type { Mode } from "./mode";
import { groupMethods, sortGroupNames } from "./sorting";
/**
* Return the candidates that the model should be run on. This includes limiting the number of
* candidates to the candidate limit and filtering out anything that is already modeled and respecting
* the order in the UI.
* @param mode Whether it is application or framework mode.
* @param methods all methods.
* @param modeledMethodsBySignature the currently modeled methods.
* @returns list of modeled methods that are candidates for modeling.
*/
export function getCandidates(
mode: Mode,
methods: readonly Method[],
modeledMethodsBySignature: Record<string, readonly ModeledMethod[]>,
processedByAutoModelMethods: Set<string>,
): MethodSignature[] {
const candidateMethods = methods.filter((method) => {
// Filter out any methods already processed by auto-model
if (processedByAutoModelMethods.has(method.signature)) {
return false;
}
const modeledMethods: ModeledMethod[] = [
...(modeledMethodsBySignature[method.signature] ?? []),
];
// Anything that is modeled is not a candidate
if (modeledMethods.some((m) => m.type !== "none")) {
return false;
}
// A method that is supported is modeled outside of the model file, so it is not a candidate.
if (method.supported) {
return false;
}
return true;
});
// Sort the same way as the UI so we send the first ones listed in the UI first
const grouped = groupMethods(candidateMethods, mode);
return sortGroupNames(grouped).flatMap((name) => grouped[name]);
}

View File

@@ -47,6 +47,7 @@ export function sortMethods(
methods: readonly Method[],
modeledMethodsMap: Record<string, readonly ModeledMethod[]>,
modifiedSignatures: ReadonlySet<string>,
processedByAutoModelMethods: ReadonlySet<string>,
): Method[] {
const sortedMethods = [...methods];
sortedMethods.sort((a, b) => {
@@ -55,11 +56,13 @@ export function sortMethods(
a,
modeledMethodsMap[a.signature] ?? [],
modifiedSignatures.has(a.signature),
processedByAutoModelMethods.has(a.signature),
);
const methodBPrimarySortOrdinal = getMethodPrimarySortOrdinal(
b,
modeledMethodsMap[b.signature] ?? [],
modifiedSignatures.has(b.signature),
processedByAutoModelMethods.has(b.signature),
);
if (methodAPrimarySortOrdinal !== methodBPrimarySortOrdinal) {
return methodAPrimarySortOrdinal - methodBPrimarySortOrdinal;
@@ -79,25 +82,32 @@ export function sortMethods(
/**
* Assigns numbers to the following classes of methods:
* - Unsaved manual models + unmodeled methods => 0
* - Saved models from this model pack (AutoModel and manual) => 1
* - Methods not modelable in this model pack => 2
* - Unsaved positive AutoModel predictions => 0
* - Negative AutoModel predictions => 1
* - Unsaved manual models + unmodeled methods => 2
* - Saved models from this model pack (AutoModel and manual) => 3
* - Methods not modelable in this model pack => 4
*/
function getMethodPrimarySortOrdinal(
method: Method,
modeledMethods: readonly ModeledMethod[],
isUnsaved: boolean,
isProcessedByAutoModel: boolean,
): number {
const canBeModeled = canMethodBeModeled(method, modeledMethods, isUnsaved);
const isModeled = modeledMethods.length > 0;
if (canBeModeled) {
if ((isModeled && isUnsaved) || !isModeled) {
if (isModeled && isUnsaved && isProcessedByAutoModel) {
return 0;
} else {
} else if (!isModeled && isProcessedByAutoModel) {
return 1;
} else if ((isModeled && isUnsaved) || !isModeled) {
return 2;
} else {
return 3;
}
} else {
return 2;
return 4;
}
}

View File

@@ -7,6 +7,7 @@ export interface ModelEditorViewState {
extensionPack: ExtensionPack;
language: QueryLanguage;
showGenerateButton: boolean;
showLlmButton: boolean;
showEvaluationUi: boolean;
mode: Mode;
showModeSwitchButton: boolean;

View File

@@ -9,16 +9,20 @@ export const SUPPORTED_LANGUAGES: QueryLanguage[] = [
QueryLanguage.Java,
QueryLanguage.CSharp,
QueryLanguage.Ruby,
QueryLanguage.Python,
];
export function isSupportedLanguage(
language: QueryLanguage,
_modelConfig: ModelConfig,
modelConfig: ModelConfig,
) {
if (SUPPORTED_LANGUAGES.includes(language)) {
return true;
}
if (language === QueryLanguage.Python) {
// Python is only enabled when the config setting is set
return modelConfig.enablePython;
}
return false;
}

View File

@@ -12,36 +12,17 @@ import type { VariantAnalysisHistoryItem } from "./variant-analysis-history-item
import { assertNever } from "../common/helpers-pure";
import { pluralize } from "../common/word";
import { humanizeQueryStatus } from "./query-status";
import { substituteConfigVariables } from "../common/config-template";
type LabelVariables = {
startTime: string;
queryName: string;
databaseName: string;
resultCount: string;
status: string;
queryFileBasename: string;
queryLanguage: string;
};
const legacyVariableInterpolateReplacements: Record<
keyof LabelVariables,
string
> = {
startTime: "t",
queryName: "q",
databaseName: "d",
resultCount: "r",
status: "s",
queryFileBasename: "f",
queryLanguage: "l",
};
// If any of the "legacy" variables are used, we need to use legacy interpolation.
const legacyLabelRegex = new RegExp(
`%([${Object.values(legacyVariableInterpolateReplacements).join("")}%])`,
"g",
);
interface InterpolateReplacements {
t: string; // Start time
q: string; // Query name
d: string; // Database/repositories count
r: string; // Result count/Empty
s: string; // Status
f: string; // Query file name
l: string; // Query language
"%": "%"; // Percent sign
}
export class HistoryItemLabelProvider {
constructor(private config: QueryHistoryConfig) {
@@ -49,26 +30,21 @@ export class HistoryItemLabelProvider {
}
getLabel(item: QueryHistoryInfo) {
let variables: LabelVariables;
let replacements: InterpolateReplacements;
switch (item.t) {
case "local":
variables = this.getLocalVariables(item);
replacements = this.getLocalInterpolateReplacements(item);
break;
case "variant-analysis":
variables = this.getVariantAnalysisVariables(item);
replacements = this.getVariantAnalysisInterpolateReplacements(item);
break;
default:
assertNever(item);
}
const rawLabel =
item.userSpecifiedLabel ?? (this.config.format || "${queryName}");
const rawLabel = item.userSpecifiedLabel ?? (this.config.format || "%q");
if (legacyLabelRegex.test(rawLabel)) {
return this.legacyInterpolate(rawLabel, variables);
}
return substituteConfigVariables(rawLabel, variables).replace(/\s+/g, " ");
return this.interpolate(rawLabel, replacements);
}
/**
@@ -83,60 +59,55 @@ export class HistoryItemLabelProvider {
: getRawQueryName(item);
}
private legacyInterpolate(
private interpolate(
rawLabel: string,
variables: LabelVariables,
replacements: InterpolateReplacements,
): string {
const replacements = Object.entries(variables).reduce(
(acc, [key, value]) => {
acc[
legacyVariableInterpolateReplacements[key as keyof LabelVariables]
] = value;
return acc;
const label = rawLabel.replace(
/%(.)/g,
(match, key: keyof InterpolateReplacements) => {
const replacement = replacements[key];
return replacement !== undefined ? replacement : match;
},
{
"%": "%",
} as Record<string, string>,
);
const label = rawLabel.replace(/%(.)/g, (match, key: string) => {
const replacement = replacements[key];
return replacement !== undefined ? replacement : match;
});
return label.replace(/\s+/g, " ");
}
private getLocalVariables(item: LocalQueryInfo): LabelVariables {
private getLocalInterpolateReplacements(
item: LocalQueryInfo,
): InterpolateReplacements {
const { resultCount = 0, message = "in progress" } =
item.completedQuery || {};
return {
startTime: item.startTime,
queryName: item.getQueryName(),
databaseName: item.databaseName,
resultCount: `(${resultCount} results)`,
status: message,
queryFileBasename: item.getQueryFileName(),
queryLanguage: this.getLanguageLabel(item),
t: item.startTime,
q: item.getQueryName(),
d: item.databaseName,
r: `(${resultCount} results)`,
s: message,
f: item.getQueryFileName(),
l: this.getLanguageLabel(item),
"%": "%",
};
}
private getVariantAnalysisVariables(
private getVariantAnalysisInterpolateReplacements(
item: VariantAnalysisHistoryItem,
): LabelVariables {
): InterpolateReplacements {
const resultCount = item.resultCount
? `(${pluralize(item.resultCount, "result", "results")})`
: "";
return {
startTime: new Date(
item.variantAnalysis.executionStartTime,
).toLocaleString(env.language),
queryName: `${item.variantAnalysis.query.name} (${item.variantAnalysis.language})`,
databaseName: buildRepoLabel(item),
resultCount,
status: humanizeQueryStatus(item.status),
queryFileBasename: basename(item.variantAnalysis.query.filePath),
queryLanguage: this.getLanguageLabel(item),
t: new Date(item.variantAnalysis.executionStartTime).toLocaleString(
env.language,
),
q: `${item.variantAnalysis.query.name} (${item.variantAnalysis.language})`,
d: buildRepoLabel(item),
r: resultCount,
s: humanizeQueryStatus(item.status),
f: basename(item.variantAnalysis.query.filePath),
l: this.getLanguageLabel(item),
"%": "%",
};
}

View File

@@ -781,7 +781,7 @@ export class QueryHistoryManager extends DisposableObject {
private async warnNoEvalLogSummary(item: LocalQueryInfo) {
const evalLogLocation =
item.evaluatorLogPaths?.log ?? item.initialInfo.outputDir?.evalLogPath;
item.evalLogLocation ?? item.initialInfo.outputDir?.evalLogPath;
// Summary log file doesn't exist.
if (evalLogLocation && (await pathExists(evalLogLocation))) {
@@ -801,7 +801,7 @@ export class QueryHistoryManager extends DisposableObject {
}
const evalLogLocation =
item.evaluatorLogPaths?.log ?? item.initialInfo.outputDir?.evalLogPath;
item.evalLogLocation ?? item.initialInfo.outputDir?.evalLogPath;
if (evalLogLocation && (await pathExists(evalLogLocation))) {
await tryOpenExternalFile(this.app.commands, evalLogLocation);
@@ -816,15 +816,12 @@ export class QueryHistoryManager extends DisposableObject {
}
// If the summary file location wasn't saved, display error
if (!item.evaluatorLogPaths?.humanReadableSummary) {
if (!item.evalLogSummaryLocation) {
await this.warnNoEvalLogSummary(item);
return;
}
await tryOpenExternalFile(
this.app.commands,
item.evaluatorLogPaths.humanReadableSummary,
);
await tryOpenExternalFile(this.app.commands, item.evalLogSummaryLocation);
}
async handleShowEvalLogViewer(item: QueryHistoryInfo) {
@@ -833,7 +830,7 @@ export class QueryHistoryManager extends DisposableObject {
}
// If the JSON summary file location wasn't saved, display error
if (item.evaluatorLogPaths?.jsonSummary === undefined) {
if (item.jsonEvalLogSummaryLocation === undefined) {
await this.warnNoEvalLogSummary(item);
return;
}
@@ -841,16 +838,16 @@ export class QueryHistoryManager extends DisposableObject {
// TODO(angelapwen): Stream the file in.
try {
const evalLogData: EvalLogData[] = await parseViewerData(
item.evaluatorLogPaths.jsonSummary,
item.jsonEvalLogSummaryLocation,
);
const evalLogTreeBuilder = new EvalLogTreeBuilder(
item.getQueryName(),
evalLogData,
);
this.evalLogViewer.updateRoots(await evalLogTreeBuilder.getRoots());
} catch {
} catch (e) {
throw new Error(
`Could not read evaluator log summary JSON file to generate viewer data at ${item.evaluatorLogPaths.jsonSummary}.`,
`Could not read evaluator log summary JSON file to generate viewer data at ${item.jsonEvalLogSummaryLocation}.`,
);
}
}

View File

@@ -25,10 +25,10 @@ export function mapLocalQueryInfoToDto(
return {
initialInfo: mapInitialQueryInfoToDto(query.initialInfo),
t: "local",
evalLogLocation: query.evaluatorLogPaths?.log,
evalLogSummaryLocation: query.evaluatorLogPaths?.humanReadableSummary,
jsonEvalLogSummaryLocation: query.evaluatorLogPaths?.jsonSummary,
evalLogSummarySymbolsLocation: query.evaluatorLogPaths?.summarySymbols,
evalLogLocation: query.evalLogLocation,
evalLogSummaryLocation: query.evalLogSummaryLocation,
jsonEvalLogSummaryLocation: query.jsonEvalLogSummaryLocation,
evalLogSummarySymbolsLocation: query.evalLogSummarySymbolsLocation,
failureReason: query.failureReason,
completedQuery:
query.completedQuery && mapCompletedQueryToDto(query.completedQuery),

View File

@@ -32,15 +32,10 @@ export function mapLocalQueryItemToDomainModel(
localQuery.failureReason,
localQuery.completedQuery &&
mapCompletedQueryInfoToDomainModel(localQuery.completedQuery),
localQuery.evalLogLocation
? {
log: localQuery.evalLogLocation,
humanReadableSummary: localQuery.evalLogSummaryLocation,
jsonSummary: localQuery.jsonEvalLogSummaryLocation,
summarySymbols: localQuery.evalLogSummarySymbolsLocation,
endSummary: undefined,
}
: undefined,
localQuery.evalLogLocation,
localQuery.evalLogSummaryLocation,
localQuery.jsonEvalLogSummaryLocation,
localQuery.evalLogSummarySymbolsLocation,
);
}

View File

@@ -48,11 +48,6 @@ function mapVariantAnalysisDtoToDto(
): VariantAnalysisDto {
return {
id: variantAnalysis.id,
controllerRepo: {
id: variantAnalysis.controllerRepo.id,
fullName: variantAnalysis.controllerRepo.fullName,
private: variantAnalysis.controllerRepo.private,
},
query: {
name: variantAnalysis.query.name,
filePath: variantAnalysis.query.filePath,

Some files were not shown because too many files have changed in this diff Show More