Merge pull request #503 from jcreedcmu/jcreed/untangle2

Try moving build to just gulp
This commit is contained in:
jcreedcmu
2020-07-21 12:56:34 -04:00
committed by GitHub
87 changed files with 10280 additions and 11488 deletions

View File

@@ -19,11 +19,15 @@ jobs:
node-version: '10.18.1'
- name: Install dependencies
run: node common/scripts/install-run-rush.js install
run: |
cd extensions/ql-vscode
npm install
shell: bash
- name: Build
run: node common/scripts/install-run-rush.js build
run: |
cd extensions/ql-vscode
npm run build
shell: bash
- name: Prepare artifacts
@@ -57,11 +61,15 @@ jobs:
# We have to build the dependencies in `lib` before running any tests.
- name: Install dependencies
run: node common/scripts/install-run-rush.js install
run: |
cd extensions/ql-vscode
npm install
shell: bash
- name: Build
run: node common/scripts/install-run-rush.js build
run: |
cd extensions/ql-vscode
npm run build
shell: bash
- name: Lint

View File

@@ -34,11 +34,15 @@ jobs:
node-version: '10.18.1'
- name: Install dependencies
run: node common/scripts/install-run-rush.js install
run: |
cd extensions/ql-vscode
npm install
shell: bash
- name: Build
run: node common/scripts/install-run-rush.js build --release
run: |
cd extensions/ql-vscode
npm run build -- --release
shell: bash
- name: Prepare artifacts

16
.vscode/launch.json vendored
View File

@@ -8,16 +8,12 @@
"request": "launch",
"runtimeExecutable": "${execPath}",
"args": [
"--extensionDevelopmentPath=${workspaceRoot}/dist/vscode-codeql"
"--extensionDevelopmentPath=${workspaceRoot}/extensions/ql-vscode"
],
"stopOnEntry": false,
"sourceMaps": true,
"outFiles": [
"${workspaceRoot}/dist/vscode-codeql/out/**/*.js",
"${workspaceRoot}/dist/vscode-codeql/node_modules/semmle-bqrs/out/**/*.js",
"${workspaceRoot}/dist/vscode-codeql/node_modules/semmle-io/out/**/*.js",
"${workspaceRoot}/dist/vscode-codeql/node_modules/semmle-io-node/out/**/*.js",
"${workspaceRoot}/dist/vscode-codeql/node_modules/@github/codeql-vscode-utils/out/**/*.js"
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
],
"preLaunchTask": "Build"
},
@@ -54,13 +50,13 @@
"request": "launch",
"runtimeExecutable": "${execPath}",
"args": [
"--extensionDevelopmentPath=${workspaceRoot}/dist/vscode-codeql",
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/no-workspace/index"
],
"stopOnEntry": false,
"sourceMaps": true,
"outFiles": [
"${workspaceRoot}/dist/vscode-codeql/out/**/*.js",
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
"${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/**/*.js"
],
"preLaunchTask": "Build"
@@ -71,14 +67,14 @@
"request": "launch",
"runtimeExecutable": "${execPath}",
"args": [
"--extensionDevelopmentPath=${workspaceRoot}/dist/vscode-codeql",
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
"--extensionTestsPath=${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/minimal-workspace/index",
"${workspaceRoot}/extensions/ql-vscode/test/data"
],
"stopOnEntry": false,
"sourceMaps": true,
"outFiles": [
"${workspaceRoot}/dist/vscode-codeql/out/**/*.js",
"${workspaceRoot}/extensions/ql-vscode/out/**/*.js",
"${workspaceRoot}/extensions/ql-vscode/out/vscode-tests/**/*.js"
],
"preLaunchTask": "Build"

66
.vscode/tasks.json vendored
View File

@@ -10,7 +10,10 @@
"kind": "build",
"isDefault": true
},
"command": "node common/scripts/install-run-rush.js build --verbose",
"command": "npx gulp buildWithoutPackage --verbose",
"options": {
"cwd": "extensions/ql-vscode/"
},
"presentation": {
"echo": true,
"reveal": "always",
@@ -33,64 +36,13 @@
"$ts-webpack"
]
},
{
"label": "Rebuild",
"type": "shell",
"group": "build",
"command": "node common/scripts/install-run-rush.js rebuild --verbose",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "shared",
"showReuseMessage": true,
"clear": true
},
"problemMatcher": [
{
"owner": "typescript",
"fileLocation": "absolute",
"pattern": {
"regexp": "^\\[gulp-typescript\\] ([^(]+)\\((\\d+|\\d+,\\d+|\\d+,\\d+,\\d+,\\d+)\\): error TS\\d+: (.*)$",
"file": 1,
"location": 2,
"message": 3
}
}
]
},
{
"label": "Update",
"type": "shell",
"command": "node common/scripts/install-run-rush.js update",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "shared",
"showReuseMessage": true,
"clear": true
},
"problemMatcher": []
},
{
"label": "Update (full)",
"type": "shell",
"command": "node common/scripts/install-run-rush.js update --full",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "shared",
"showReuseMessage": true,
"clear": true
},
"problemMatcher": []
},
{
"label": "Format",
"type": "shell",
"command": "node common/scripts/install-run-rush.js format",
"command": "npm run format",
"options": {
"cwd": "extensions/ql-vscode/"
},
"presentation": {
"echo": true,
"reveal": "always",
@@ -111,4 +63,4 @@
"group": "build"
}
]
}
}

View File

@@ -1,28 +0,0 @@
This directory contains content from https://github.com/microsoft/rushstack,
used under the MIT license as follows.
See https://github.com/microsoft/rushstack/blob/master/stack/rush-stack/LICENSE.
@microsoft/rush-stack
Copyright (c) Microsoft Corporation. All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -1,12 +0,0 @@
# Rush uses this file to configure the package registry, regardless of whether the
# package manager is PNPM, NPM, or Yarn. Prior to invoking the package manager,
# Rush will always copy this file to the folder where installation is performed.
# When NPM is the package manager, Rush works around NPM's processing of
# undefined environment variables by deleting any lines that reference undefined
# environment variables.
#
# DO NOT SPECIFY AUTHENTICATION CREDENTIALS IN THIS FILE. It should only be used
# to configure registry sources.
registry=https://registry.npmjs.org/
always-auth=false

View File

@@ -1,32 +0,0 @@
/**
* This configuration file defines custom commands for the "rush" command-line.
* For full documentation, please see https://rushjs.io/pages/configs/command_line_json/
*/
{
"$schema": "https://developer.microsoft.com/json-schemas/rush/v5/command-line.schema.json",
"commands": [
{
"commandKind": "bulk",
"name": "format",
"summary": "Reformat source code in all projects",
"description": "Runs the `format` npm task in each project, if present.",
"safeForSimultaneousRushProcesses": false,
"enableParallelism": true,
"ignoreDependencyOrder": true,
"ignoreMissingScript": true,
"allowWarningsInSuccessfulBuild": false
}
],
"parameters": [
{
"parameterKind": "flag",
"longName": "--release",
"shortName": "-r",
"description": "Perform a release build",
"associatedCommands": [
"build",
"rebuild"
],
}
]
}

View File

@@ -1,43 +0,0 @@
/**
* This configuration file specifies NPM dependency version selections that affect all projects
* in a Rush repo. For full documentation, please see https://rushjs.io
*/
{
"$schema": "https://developer.microsoft.com/json-schemas/rush/v5/common-versions.schema.json",
/**
* A table that specifies a "preferred version" for a dependency package. The "preferred version"
* is typically used to hold an indirect dependency back to a specific version, however generally
* it can be any SemVer range specifier (e.g. "~1.2.3"), and it will narrow any (compatible)
* SemVer range specifier. See the Rush documentation for details about this feature.
*/
"preferredVersions": {
/**
* When someone asks for "^1.0.0" make sure they get "1.2.3" when working in this repo,
* instead of the latest version.
*/
// "some-library": "1.2.3"
},
/**
* The "rush check" command can be used to enforce that every project in the repo must specify
* the same SemVer range for a given dependency. However, sometimes exceptions are needed.
* The allowedAlternativeVersions table allows you to list other SemVer ranges that will be
* accepted by "rush check" for a given dependency.
*
* IMPORTANT: THIS TABLE IS FOR *ADDITIONAL* VERSION RANGES THAT ARE ALTERNATIVES TO THE
* USUAL VERSION (WHICH IS INFERRED BY LOOKING AT ALL PROJECTS IN THE REPO).
* This design avoids unnecessary churn in this file.
*/
"allowedAlternativeVersions": {
/**
* For example, allow some projects to use an older TypeScript compiler
* (in addition to whatever "usual" version is being used by other projects in the repo):
*/
// "typescript": [
// "~2.4.0"
// ]
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,32 +0,0 @@
"use strict";
/**
* When using the PNPM package manager, you can use pnpmfile.js to workaround
* dependencies that have mistakes in their package.json file. (This feature is
* functionally similar to Yarn's "resolutions".)
*
* For details, see the PNPM documentation:
* https://pnpm.js.org/docs/en/hooks.html
*
* IMPORTANT: SINCE THIS FILE CONTAINS EXECUTABLE CODE, MODIFYING IT IS LIKELY
* TO INVALIDATE ANY CACHED DEPENDENCY ANALYSIS. We recommend to run "rush update --full"
* after any modification to pnpmfile.js.
*
*/
module.exports = {
hooks: {
readPackage
}
};
/**
* This hook is invoked during installation before a package's dependencies
* are selected.
* The `packageJson` parameter is the deserialized package.json
* contents for the package that is about to be installed.
* The `context` parameter provides a log() function.
* The return value is the updated object.
*/
function readPackage(packageJson, context) {
return packageJson;
}

View File

@@ -1,10 +0,0 @@
/**
* This is configuration file is used for advanced publishing configurations with Rush.
* For full documentation, please see https://rushjs.io/pages/configs/version_policies_json/
*/
[
{
"definitionName": "individualVersion",
"policyName": "utilities"
}
]

View File

@@ -1,67 +0,0 @@
"use strict";
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
// See the @microsoft/rush package's LICENSE file for license information.
Object.defineProperty(exports, "__esModule", { value: true });
// THIS FILE WAS GENERATED BY A TOOL. ANY MANUAL MODIFICATIONS WILL GET OVERWRITTEN WHENEVER RUSH IS UPGRADED.
//
// This script is intended for usage in an automated build environment where the Rush command may not have
// been preinstalled, or may have an unpredictable version. This script will automatically install the version of Rush
// specified in the rush.json configuration file (if not already installed), and then pass a command-line to it.
// An example usage would be:
//
// node common/scripts/install-run-rush.js install
//
// For more information, see: https://rushjs.io/pages/maintainer/setup_new_repo/
const path = require("path");
const fs = require("fs");
const install_run_1 = require("./install-run");
const PACKAGE_NAME = '@microsoft/rush';
const RUSH_PREVIEW_VERSION = 'RUSH_PREVIEW_VERSION';
function _getRushVersion() {
const rushPreviewVersion = process.env[RUSH_PREVIEW_VERSION];
if (rushPreviewVersion !== undefined) {
console.log(`Using Rush version from environment variable ${RUSH_PREVIEW_VERSION}=${rushPreviewVersion}`);
return rushPreviewVersion;
}
const rushJsonFolder = install_run_1.findRushJsonFolder();
const rushJsonPath = path.join(rushJsonFolder, install_run_1.RUSH_JSON_FILENAME);
try {
const rushJsonContents = fs.readFileSync(rushJsonPath, 'utf-8');
// Use a regular expression to parse out the rushVersion value because rush.json supports comments,
// but JSON.parse does not and we don't want to pull in more dependencies than we need to in this script.
const rushJsonMatches = rushJsonContents.match(/\"rushVersion\"\s*\:\s*\"([0-9a-zA-Z.+\-]+)\"/);
return rushJsonMatches[1];
}
catch (e) {
throw new Error(`Unable to determine the required version of Rush from rush.json (${rushJsonFolder}). ` +
'The \'rushVersion\' field is either not assigned in rush.json or was specified ' +
'using an unexpected syntax.');
}
}
function _run() {
const [nodePath, /* Ex: /bin/node */ scriptPath, /* /repo/common/scripts/install-run-rush.js */ ...packageBinArgs /* [build, --to, myproject] */] = process.argv;
// Detect if this script was directly invoked, or if the install-run-rushx script was invokved to select the
// appropriate binary inside the rush package to run
const scriptName = path.basename(scriptPath);
const bin = scriptName.toLowerCase() === 'install-run-rushx.js' ? 'rushx' : 'rush';
if (!nodePath || !scriptPath) {
throw new Error('Unexpected exception: could not detect node path or script path');
}
if (process.argv.length < 3) {
console.log(`Usage: ${scriptName} <command> [args...]`);
if (scriptName === 'install-run-rush.js') {
console.log(`Example: ${scriptName} build --to myproject`);
}
else {
console.log(`Example: ${scriptName} custom-command`);
}
process.exit(1);
}
install_run_1.runWithErrorAndStatusCode(() => {
const version = _getRushVersion();
console.log(`The rush.json configuration requests Rush version ${version}`);
return install_run_1.installAndRun(PACKAGE_NAME, version, bin, packageBinArgs);
});
}
_run();
//# sourceMappingURL=install-run-rush.js.map

View File

@@ -1,18 +0,0 @@
"use strict";
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
// See the @microsoft/rush package's LICENSE file for license information.
Object.defineProperty(exports, "__esModule", { value: true });
// THIS FILE WAS GENERATED BY A TOOL. ANY MANUAL MODIFICATIONS WILL GET OVERWRITTEN WHENEVER RUSH IS UPGRADED.
//
// This script is intended for usage in an automated build environment where the Rush command may not have
// been preinstalled, or may have an unpredictable version. This script will automatically install the version of Rush
// specified in the rush.json configuration file (if not already installed), and then pass a command-line to the
// rushx command.
//
// An example usage would be:
//
// node common/scripts/install-run-rushx.js custom-command
//
// For more information, see: https://rushjs.io/pages/maintainer/setup_new_repo/
require("./install-run-rush");
//# sourceMappingURL=install-run-rushx.js.map

View File

@@ -1,433 +0,0 @@
"use strict";
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
// See the @microsoft/rush package's LICENSE file for license information.
Object.defineProperty(exports, "__esModule", { value: true });
// THIS FILE WAS GENERATED BY A TOOL. ANY MANUAL MODIFICATIONS WILL GET OVERWRITTEN WHENEVER RUSH IS UPGRADED.
//
// This script is intended for usage in an automated build environment where a Node tool may not have
// been preinstalled, or may have an unpredictable version. This script will automatically install the specified
// version of the specified tool (if not already installed), and then pass a command-line to it.
// An example usage would be:
//
// node common/scripts/install-run.js qrcode@1.2.2 qrcode https://rushjs.io
//
// For more information, see: https://rushjs.io/pages/maintainer/setup_new_repo/
const childProcess = require("child_process");
const fs = require("fs");
const os = require("os");
const path = require("path");
exports.RUSH_JSON_FILENAME = 'rush.json';
const RUSH_TEMP_FOLDER_ENV_VARIABLE_NAME = 'RUSH_TEMP_FOLDER';
const INSTALLED_FLAG_FILENAME = 'installed.flag';
const NODE_MODULES_FOLDER_NAME = 'node_modules';
const PACKAGE_JSON_FILENAME = 'package.json';
/**
* Parse a package specifier (in the form of name\@version) into name and version parts.
*/
function _parsePackageSpecifier(rawPackageSpecifier) {
rawPackageSpecifier = (rawPackageSpecifier || '').trim();
const separatorIndex = rawPackageSpecifier.lastIndexOf('@');
let name;
let version = undefined;
if (separatorIndex === 0) {
// The specifier starts with a scope and doesn't have a version specified
name = rawPackageSpecifier;
}
else if (separatorIndex === -1) {
// The specifier doesn't have a version
name = rawPackageSpecifier;
}
else {
name = rawPackageSpecifier.substring(0, separatorIndex);
version = rawPackageSpecifier.substring(separatorIndex + 1);
}
if (!name) {
throw new Error(`Invalid package specifier: ${rawPackageSpecifier}`);
}
return { name, version };
}
/**
* As a workaround, copyAndTrimNpmrcFile() copies the .npmrc file to the target folder, and also trims
* unusable lines from the .npmrc file.
*
* Why are we trimming the .npmrc lines? NPM allows environment variables to be specified in
* the .npmrc file to provide different authentication tokens for different registry.
* However, if the environment variable is undefined, it expands to an empty string, which
* produces a valid-looking mapping with an invalid URL that causes an error. Instead,
* we'd prefer to skip that line and continue looking in other places such as the user's
* home directory.
*
* IMPORTANT: THIS CODE SHOULD BE KEPT UP TO DATE WITH Utilities._copyNpmrcFile()
*/
function _copyAndTrimNpmrcFile(sourceNpmrcPath, targetNpmrcPath) {
console.log(`Copying ${sourceNpmrcPath} --> ${targetNpmrcPath}`); // Verbose
let npmrcFileLines = fs.readFileSync(sourceNpmrcPath).toString().split('\n');
npmrcFileLines = npmrcFileLines.map((line) => (line || '').trim());
const resultLines = [];
// Trim out lines that reference environment variables that aren't defined
for (const line of npmrcFileLines) {
// This finds environment variable tokens that look like "${VAR_NAME}"
const regex = /\$\{([^\}]+)\}/g;
const environmentVariables = line.match(regex);
let lineShouldBeTrimmed = false;
if (environmentVariables) {
for (const token of environmentVariables) {
// Remove the leading "${" and the trailing "}" from the token
const environmentVariableName = token.substring(2, token.length - 1);
if (!process.env[environmentVariableName]) {
lineShouldBeTrimmed = true;
break;
}
}
}
if (lineShouldBeTrimmed) {
// Example output:
// "; MISSING ENVIRONMENT VARIABLE: //my-registry.com/npm/:_authToken=${MY_AUTH_TOKEN}"
resultLines.push('; MISSING ENVIRONMENT VARIABLE: ' + line);
}
else {
resultLines.push(line);
}
}
fs.writeFileSync(targetNpmrcPath, resultLines.join(os.EOL));
}
/**
* syncNpmrc() copies the .npmrc file to the target folder, and also trims unusable lines from the .npmrc file.
* If the source .npmrc file not exist, then syncNpmrc() will delete an .npmrc that is found in the target folder.
*
* IMPORTANT: THIS CODE SHOULD BE KEPT UP TO DATE WITH Utilities._syncNpmrc()
*/
function _syncNpmrc(sourceNpmrcFolder, targetNpmrcFolder, useNpmrcPublish) {
const sourceNpmrcPath = path.join(sourceNpmrcFolder, !useNpmrcPublish ? '.npmrc' : '.npmrc-publish');
const targetNpmrcPath = path.join(targetNpmrcFolder, '.npmrc');
try {
if (fs.existsSync(sourceNpmrcPath)) {
_copyAndTrimNpmrcFile(sourceNpmrcPath, targetNpmrcPath);
}
else if (fs.existsSync(targetNpmrcPath)) {
// If the source .npmrc doesn't exist and there is one in the target, delete the one in the target
console.log(`Deleting ${targetNpmrcPath}`); // Verbose
fs.unlinkSync(targetNpmrcPath);
}
}
catch (e) {
throw new Error(`Error syncing .npmrc file: ${e}`);
}
}
let _npmPath = undefined;
/**
* Get the absolute path to the npm executable
*/
function getNpmPath() {
if (!_npmPath) {
try {
if (os.platform() === 'win32') {
// We're on Windows
const whereOutput = childProcess.execSync('where npm', { stdio: [] }).toString();
const lines = whereOutput.split(os.EOL).filter((line) => !!line);
// take the last result, we are looking for a .cmd command
// see https://github.com/microsoft/rushstack/issues/759
_npmPath = lines[lines.length - 1];
}
else {
// We aren't on Windows - assume we're on *NIX or Darwin
_npmPath = childProcess.execSync('which npm', { stdio: [] }).toString();
}
}
catch (e) {
throw new Error(`Unable to determine the path to the NPM tool: ${e}`);
}
_npmPath = _npmPath.trim();
if (!fs.existsSync(_npmPath)) {
throw new Error('The NPM executable does not exist');
}
}
return _npmPath;
}
exports.getNpmPath = getNpmPath;
function _ensureFolder(folderPath) {
if (!fs.existsSync(folderPath)) {
const parentDir = path.dirname(folderPath);
_ensureFolder(parentDir);
fs.mkdirSync(folderPath);
}
}
/**
* Create missing directories under the specified base directory, and return the resolved directory.
*
* Does not support "." or ".." path segments.
* Assumes the baseFolder exists.
*/
function _ensureAndJoinPath(baseFolder, ...pathSegments) {
let joinedPath = baseFolder;
try {
for (let pathSegment of pathSegments) {
pathSegment = pathSegment.replace(/[\\\/]/g, '+');
joinedPath = path.join(joinedPath, pathSegment);
if (!fs.existsSync(joinedPath)) {
fs.mkdirSync(joinedPath);
}
}
}
catch (e) {
throw new Error(`Error building local installation folder (${path.join(baseFolder, ...pathSegments)}): ${e}`);
}
return joinedPath;
}
function _getRushTempFolder(rushCommonFolder) {
const rushTempFolder = process.env[RUSH_TEMP_FOLDER_ENV_VARIABLE_NAME];
if (rushTempFolder !== undefined) {
_ensureFolder(rushTempFolder);
return rushTempFolder;
}
else {
return _ensureAndJoinPath(rushCommonFolder, 'temp');
}
}
/**
* Resolve a package specifier to a static version
*/
function _resolvePackageVersion(rushCommonFolder, { name, version }) {
if (!version) {
version = '*'; // If no version is specified, use the latest version
}
if (version.match(/^[a-zA-Z0-9\-\+\.]+$/)) {
// If the version contains only characters that we recognize to be used in static version specifiers,
// pass the version through
return version;
}
else {
// version resolves to
try {
const rushTempFolder = _getRushTempFolder(rushCommonFolder);
const sourceNpmrcFolder = path.join(rushCommonFolder, 'config', 'rush');
_syncNpmrc(sourceNpmrcFolder, rushTempFolder);
const npmPath = getNpmPath();
// This returns something that looks like:
// @microsoft/rush@3.0.0 '3.0.0'
// @microsoft/rush@3.0.1 '3.0.1'
// ...
// @microsoft/rush@3.0.20 '3.0.20'
// <blank line>
const npmVersionSpawnResult = childProcess.spawnSync(npmPath, ['view', `${name}@${version}`, 'version', '--no-update-notifier'], {
cwd: rushTempFolder,
stdio: []
});
if (npmVersionSpawnResult.status !== 0) {
throw new Error(`"npm view" returned error code ${npmVersionSpawnResult.status}`);
}
const npmViewVersionOutput = npmVersionSpawnResult.stdout.toString();
const versionLines = npmViewVersionOutput.split('\n').filter((line) => !!line);
const latestVersion = versionLines[versionLines.length - 1];
if (!latestVersion) {
throw new Error('No versions found for the specified version range.');
}
const versionMatches = latestVersion.match(/^.+\s\'(.+)\'$/);
if (!versionMatches) {
throw new Error(`Invalid npm output ${latestVersion}`);
}
return versionMatches[1];
}
catch (e) {
throw new Error(`Unable to resolve version ${version} of package ${name}: ${e}`);
}
}
}
let _rushJsonFolder;
/**
* Find the absolute path to the folder containing rush.json
*/
function findRushJsonFolder() {
if (!_rushJsonFolder) {
let basePath = __dirname;
let tempPath = __dirname;
do {
const testRushJsonPath = path.join(basePath, exports.RUSH_JSON_FILENAME);
if (fs.existsSync(testRushJsonPath)) {
_rushJsonFolder = basePath;
break;
}
else {
basePath = tempPath;
}
} while (basePath !== (tempPath = path.dirname(basePath))); // Exit the loop when we hit the disk root
if (!_rushJsonFolder) {
throw new Error('Unable to find rush.json.');
}
}
return _rushJsonFolder;
}
exports.findRushJsonFolder = findRushJsonFolder;
/**
* Detects if the package in the specified directory is installed
*/
function _isPackageAlreadyInstalled(packageInstallFolder) {
try {
const flagFilePath = path.join(packageInstallFolder, INSTALLED_FLAG_FILENAME);
if (!fs.existsSync(flagFilePath)) {
return false;
}
const fileContents = fs.readFileSync(flagFilePath).toString();
return fileContents.trim() === process.version;
}
catch (e) {
return false;
}
}
/**
* Removes the following files and directories under the specified folder path:
* - installed.flag
* -
* - node_modules
*/
function _cleanInstallFolder(rushTempFolder, packageInstallFolder) {
try {
const flagFile = path.resolve(packageInstallFolder, INSTALLED_FLAG_FILENAME);
if (fs.existsSync(flagFile)) {
fs.unlinkSync(flagFile);
}
const packageLockFile = path.resolve(packageInstallFolder, 'package-lock.json');
if (fs.existsSync(packageLockFile)) {
fs.unlinkSync(packageLockFile);
}
const nodeModulesFolder = path.resolve(packageInstallFolder, NODE_MODULES_FOLDER_NAME);
if (fs.existsSync(nodeModulesFolder)) {
const rushRecyclerFolder = _ensureAndJoinPath(rushTempFolder, 'rush-recycler', `install-run-${Date.now().toString()}`);
fs.renameSync(nodeModulesFolder, rushRecyclerFolder);
}
}
catch (e) {
throw new Error(`Error cleaning the package install folder (${packageInstallFolder}): ${e}`);
}
}
function _createPackageJson(packageInstallFolder, name, version) {
try {
const packageJsonContents = {
'name': 'ci-rush',
'version': '0.0.0',
'dependencies': {
[name]: version
},
'description': 'DON\'T WARN',
'repository': 'DON\'T WARN',
'license': 'MIT'
};
const packageJsonPath = path.join(packageInstallFolder, PACKAGE_JSON_FILENAME);
fs.writeFileSync(packageJsonPath, JSON.stringify(packageJsonContents, undefined, 2));
}
catch (e) {
throw new Error(`Unable to create package.json: ${e}`);
}
}
/**
* Run "npm install" in the package install folder.
*/
function _installPackage(packageInstallFolder, name, version) {
try {
console.log(`Installing ${name}...`);
const npmPath = getNpmPath();
const result = childProcess.spawnSync(npmPath, ['install'], {
stdio: 'inherit',
cwd: packageInstallFolder,
env: process.env
});
if (result.status !== 0) {
throw new Error('"npm install" encountered an error');
}
console.log(`Successfully installed ${name}@${version}`);
}
catch (e) {
throw new Error(`Unable to install package: ${e}`);
}
}
/**
* Get the ".bin" path for the package.
*/
function _getBinPath(packageInstallFolder, binName) {
const binFolderPath = path.resolve(packageInstallFolder, NODE_MODULES_FOLDER_NAME, '.bin');
const resolvedBinName = (os.platform() === 'win32') ? `${binName}.cmd` : binName;
return path.resolve(binFolderPath, resolvedBinName);
}
/**
* Write a flag file to the package's install directory, signifying that the install was successful.
*/
function _writeFlagFile(packageInstallFolder) {
try {
const flagFilePath = path.join(packageInstallFolder, INSTALLED_FLAG_FILENAME);
fs.writeFileSync(flagFilePath, process.version);
}
catch (e) {
throw new Error(`Unable to create installed.flag file in ${packageInstallFolder}`);
}
}
function installAndRun(packageName, packageVersion, packageBinName, packageBinArgs) {
const rushJsonFolder = findRushJsonFolder();
const rushCommonFolder = path.join(rushJsonFolder, 'common');
const rushTempFolder = _getRushTempFolder(rushCommonFolder);
const packageInstallFolder = _ensureAndJoinPath(rushTempFolder, 'install-run', `${packageName}@${packageVersion}`);
if (!_isPackageAlreadyInstalled(packageInstallFolder)) {
// The package isn't already installed
_cleanInstallFolder(rushTempFolder, packageInstallFolder);
const sourceNpmrcFolder = path.join(rushCommonFolder, 'config', 'rush');
_syncNpmrc(sourceNpmrcFolder, packageInstallFolder);
_createPackageJson(packageInstallFolder, packageName, packageVersion);
_installPackage(packageInstallFolder, packageName, packageVersion);
_writeFlagFile(packageInstallFolder);
}
const statusMessage = `Invoking "${packageBinName} ${packageBinArgs.join(' ')}"`;
const statusMessageLine = new Array(statusMessage.length + 1).join('-');
console.log(os.EOL + statusMessage + os.EOL + statusMessageLine + os.EOL);
const binPath = _getBinPath(packageInstallFolder, packageBinName);
const result = childProcess.spawnSync(binPath, packageBinArgs, {
stdio: 'inherit',
cwd: process.cwd(),
env: process.env
});
if (result.status !== null) {
return result.status;
}
else {
throw result.error || new Error('An unknown error occurred.');
}
}
exports.installAndRun = installAndRun;
function runWithErrorAndStatusCode(fn) {
process.exitCode = 1;
try {
const exitCode = fn();
process.exitCode = exitCode;
}
catch (e) {
console.error(os.EOL + os.EOL + e.toString() + os.EOL + os.EOL);
}
}
exports.runWithErrorAndStatusCode = runWithErrorAndStatusCode;
function _run() {
const [nodePath, /* Ex: /bin/node */ scriptPath, /* /repo/common/scripts/install-run-rush.js */ rawPackageSpecifier, /* qrcode@^1.2.0 */ packageBinName, /* qrcode */ ...packageBinArgs /* [-f, myproject/lib] */] = process.argv;
if (!nodePath) {
throw new Error('Unexpected exception: could not detect node path');
}
if (path.basename(scriptPath).toLowerCase() !== 'install-run.js') {
// If install-run.js wasn't directly invoked, don't execute the rest of this function. Return control
// to the script that (presumably) imported this file
return;
}
if (process.argv.length < 4) {
console.log('Usage: install-run.js <package>@<version> <command> [args...]');
console.log('Example: install-run.js qrcode@1.2.2 qrcode https://rushjs.io');
process.exit(1);
}
runWithErrorAndStatusCode(() => {
const rushJsonFolder = findRushJsonFolder();
const rushCommonFolder = _ensureAndJoinPath(rushJsonFolder, 'common');
const packageSpecifier = _parsePackageSpecifier(rawPackageSpecifier);
const name = packageSpecifier.name;
const version = _resolvePackageVersion(rushCommonFolder, packageSpecifier);
if (packageSpecifier.version !== version) {
console.log(`Resolved to ${name}@${version}`);
}
return installAndRun(name, version, packageBinName, packageBinArgs);
});
}
_run();
//# sourceMappingURL=install-run.js.map

View File

@@ -1,8 +0,0 @@
{
"$schema": "http://json.schemastore.org/tsconfig",
"extends": "./common.tsconfig.json",
"compilerOptions": {
"declaration": false,
"strict": true
}
}

View File

@@ -1,4 +0,0 @@
{
"$schema": "http://json.schemastore.org/tsconfig",
"extends": "./common.tsconfig.json"
}

View File

@@ -1,18 +0,0 @@
{
"name": "typescript-config",
"description": "TypeScript configurations",
"author": "GitHub",
"private": true,
"version": "0.0.1",
"publisher": "GitHub",
"repository": {
"type": "git",
"url": "https://github.com/github/vscode-codeql"
},
"scripts": {
"build": "",
"format": ""
},
"devDependencies": {},
"dependencies": {}
}

View File

@@ -3,7 +3,7 @@ module.exports = {
parserOptions: {
ecmaVersion: 2018,
sourceType: "module",
project: ["tsconfig.json", "./src/**/tsconfig.json"],
project: ["tsconfig.json", "./src/**/tsconfig.json", "./gulpfile.ts/tsconfig.json"],
},
plugins: ["@typescript-eslint"],
env: {

View File

@@ -1,19 +0,0 @@
'use strict';
require('ts-node').register({});
const gulp = require('gulp');
const {
compileTypeScript,
watchTypeScript,
packageExtension,
compileTextMateGrammar,
copyTestData,
copyViewCss
} = require('@github/codeql-gulp-tasks');
const { compileView } = require('./webpack');
exports.buildWithoutPackage = gulp.parallel(compileTypeScript, compileTextMateGrammar, compileView, copyTestData, copyViewCss);
exports.compileTextMateGrammar = compileTextMateGrammar;
exports.default = gulp.series(exports.buildWithoutPackage, packageExtension);
exports.watchTypeScript = watchTypeScript;
exports.compileTypeScript = compileTypeScript;

View File

@@ -0,0 +1,72 @@
import * as fs from 'fs-extra';
import * as jsonc from 'jsonc-parser';
import * as path from 'path';
export interface DeployedPackage {
distPath: string;
name: string;
version: string;
}
const packageFiles = [
'.vscodeignore',
'CHANGELOG.md',
'README.md',
'language-configuration.json',
'media',
'node_modules',
'out'
];
async function copyPackage(sourcePath: string, destPath: string): Promise<void> {
for (const file of packageFiles) {
console.log(`copying ${path.resolve(sourcePath, file)} to ${path.resolve(destPath, file)}`);
await fs.copy(path.resolve(sourcePath, file), path.resolve(destPath, file));
}
}
export async function deployPackage(packageJsonPath: string): Promise<DeployedPackage> {
try {
const packageJson: any = jsonc.parse(await fs.readFile(packageJsonPath, 'utf8'));
// Default to development build; use flag --release to indicate release build.
const isDevBuild = !process.argv.includes('--release');
const distDir = path.join(__dirname, '../../../dist');
await fs.mkdirs(distDir);
if (isDevBuild) {
// NOTE: rootPackage.name had better not have any regex metacharacters
const oldDevBuildPattern = new RegExp('^' + packageJson.name + '[^/]+-dev[0-9.]+\\.vsix$');
// Dev package filenames are of the form
// vscode-codeql-0.0.1-dev.2019.9.27.19.55.20.vsix
(await fs.readdir(distDir)).filter(name => name.match(oldDevBuildPattern)).map(build => {
console.log(`Deleting old dev build ${build}...`);
fs.unlinkSync(path.join(distDir, build));
});
const now = new Date();
packageJson.version = packageJson.version +
`-dev.${now.getUTCFullYear()}.${now.getUTCMonth() + 1}.${now.getUTCDate()}` +
`.${now.getUTCHours()}.${now.getUTCMinutes()}.${now.getUTCSeconds()}`;
}
const distPath = path.join(distDir, packageJson.name);
await fs.remove(distPath);
await fs.mkdirs(distPath);
await fs.writeFile(path.join(distPath, 'package.json'), JSON.stringify(packageJson, null, 2));
const sourcePath = path.join(__dirname, '..');
console.log(`Copying package '${packageJson.name}' and its dependencies to '${distPath}'...`);
await copyPackage(sourcePath, distPath);
return {
distPath: distPath,
name: packageJson.name,
version: packageJson.version
};
}
catch (e) {
console.error(e);
throw e;
}
}

View File

@@ -0,0 +1,10 @@
import * as gulp from 'gulp';
import { compileTypeScript, watchTypeScript, copyViewCss } from './typescript';
import { compileTextMateGrammar } from './textmate';
import { copyTestData } from './tests';
import { compileView } from './webpack';
import { packageExtension } from './package';
export const buildWithoutPackage = gulp.parallel(compileTypeScript, compileTextMateGrammar, compileView, copyTestData, copyViewCss);
export { compileTextMateGrammar, watchTypeScript, compileTypeScript };
exports.default = gulp.series(exports.buildWithoutPackage, packageExtension);

View File

@@ -1,6 +1,6 @@
import * as path from 'path';
import { deployPackage } from './deploy';
import * as child_process from 'child-process-promise';
import * as childProcess from 'child-process-promise';
export async function packageExtension(): Promise<void> {
const deployedPackage = await deployPackage(path.resolve('package.json'));
@@ -9,7 +9,7 @@ export async function packageExtension(): Promise<void> {
'package',
'--out', path.resolve(deployedPackage.distPath, '..', `${deployedPackage.name}-${deployedPackage.version}.vsix`)
];
const proc = child_process.spawn('vsce', args, {
const proc = childProcess.spawn('./node_modules/.bin/vsce', args, {
cwd: deployedPackage.distPath
});
proc.childProcess.stdout!.on('data', (data) => {

View File

@@ -1,5 +1,5 @@
import * as gulp from 'gulp';
import * as js_yaml from 'js-yaml';
import * as jsYaml from 'js-yaml';
import * as through from 'through2';
import * as PluginError from 'plugin-error';
import * as Vinyl from 'vinyl';
@@ -13,9 +13,10 @@ import * as Vinyl from 'vinyl';
*/
function replaceReferencesWithStrings(value: string, replacements: Map<string, string>): string {
let result = value;
// eslint-disable-next-line no-constant-condition
while (true) {
const original = result;
for (const key of replacements.keys()) {
for (const key of Array.from(replacements.keys())) {
result = result.replace(`(?#${key})`, `(?:${replacements.get(key)})`);
}
if (result === original) {
@@ -32,7 +33,7 @@ function replaceReferencesWithStrings(value: string, replacements: Map<string, s
*/
function gatherMacros(yaml: any): Map<string, string> {
const macros = new Map<string, string>();
for (var key in yaml.macros) {
for (const key in yaml.macros) {
macros.set(key, yaml.macros[key]);
}
@@ -55,7 +56,7 @@ function getNodeMatchText(rule: any): string {
else if (rule.patterns !== undefined) {
const patterns: string[] = [];
// For a list of patterns, use the disjunction of those patterns.
for (var patternIndex in rule.patterns) {
for (const patternIndex in rule.patterns) {
const pattern = rule.patterns[patternIndex];
if (pattern.include !== null) {
patterns.push('(?' + pattern.include + ')');
@@ -65,7 +66,7 @@ function getNodeMatchText(rule: any): string {
return '(?:' + patterns.join('|') + ')';
}
else {
return ''
return '';
}
}
@@ -78,7 +79,7 @@ function getNodeMatchText(rule: any): string {
*/
function gatherMatchTextForRules(yaml: any): Map<string, string> {
const replacements = new Map<string, string>();
for (var key in yaml.repository) {
for (const key in yaml.repository) {
const node = yaml.repository[key];
replacements.set(key, getNodeMatchText(node));
}
@@ -106,7 +107,7 @@ function visitAllRulesInFile(yaml: any, action: (rule: any) => void) {
* @param action Callback to invoke on each rule.
*/
function visitAllRulesInRuleMap(ruleMap: any, action: (rule: any) => void) {
for (var key in ruleMap) {
for (const key in ruleMap) {
const rule = ruleMap[key];
if ((typeof rule) === 'object') {
action(rule);
@@ -124,7 +125,7 @@ function visitAllRulesInRuleMap(ruleMap: any, action: (rule: any) => void) {
* @param action The transformation to make on each match pattern.
*/
function visitAllMatchesInRule(rule: any, action: (match: any) => any) {
for (var key in rule) {
for (const key in rule) {
switch (key) {
case 'begin':
case 'end':
@@ -184,10 +185,10 @@ function transformFile(yaml: any) {
visitAllRulesInFile(yaml, (rule) => {
visitAllMatchesInRule(rule, (match) => {
if ((typeof match) === 'object') {
for (var key in match) {
for (const key in match) {
return macros.get(key)!.replace('(?#)', `(?:${match[key]})`);
}
throw new Error("No key in macro map.")
throw new Error('No key in macro map.');
}
else {
return match;
@@ -225,7 +226,7 @@ export function transpileTextMateGrammar() {
else if (file.isBuffer()) {
const buf: Buffer = file.contents;
const yamlText: string = buf.toString('utf8');
const jsonData: any = js_yaml.safeLoad(yamlText);
const jsonData: any = jsYaml.safeLoad(yamlText);
transformFile(jsonData);
file.contents = Buffer.from(JSON.stringify(jsonData, null, 2), 'utf8');

View File

@@ -1,15 +1,14 @@
{
"$schema": "http://json.schemastore.org/tsconfig",
"compilerOptions": {
"declaration": true,
"strict": true,
"module": "commonjs",
"target": "es2017",
"outDir": "out",
"lib": [
"es6"
],
"lib": ["es6"],
"moduleResolution": "node",
"sourceMap": true,
"rootDir": "../../src",
"rootDir": ".",
"strictNullChecks": true,
"noFallthroughCasesInSwitch": true,
"preserveWatchOutput": true,
@@ -19,12 +18,5 @@
"noUnusedLocals": true,
"noUnusedParameters": true
},
"include": [
"../../src/**/*.ts"
],
"exclude": [
"../../node_modules",
"../../test",
"../../**/view"
]
"include": ["*.ts"]
}

View File

@@ -0,0 +1,42 @@
import * as colors from 'ansi-colors';
import * as gulp from 'gulp';
import * as sourcemaps from 'gulp-sourcemaps';
import * as ts from 'gulp-typescript';
function goodReporter(): ts.reporter.Reporter {
return {
error: (error, typescript) => {
if (error.tsFile) {
console.log('[' + colors.gray('gulp-typescript') + '] ' + colors.red(error.fullFilename
+ '(' + (error.startPosition!.line + 1) + ',' + error.startPosition!.character + '): ')
+ 'error TS' + error.diagnostic.code + ': ' + typescript.flattenDiagnosticMessageText(error.diagnostic.messageText, '\n'));
}
else {
console.log(error.message);
}
},
};
}
const tsProject = ts.createProject('tsconfig.json');
export function compileTypeScript() {
return tsProject.src()
.pipe(sourcemaps.init())
.pipe(tsProject(goodReporter()))
.pipe(sourcemaps.write('.', {
includeContent: false,
sourceRoot: '.',
}))
.pipe(gulp.dest('out'));
}
export function watchTypeScript() {
gulp.watch('src/**/*.ts', compileTypeScript);
}
/** Copy CSS files for the results view into the output directory. */
export function copyViewCss() {
return gulp.src('src/view/*.css')
.pipe(gulp.dest('out'));
}

View File

@@ -9,9 +9,9 @@ export const config: webpack.Configuration = {
},
output: {
path: path.resolve(__dirname, '..', 'out'),
filename: "[name].js"
filename: '[name].js'
},
devtool: "inline-source-map",
devtool: 'inline-source-map',
resolve: {
extensions: ['.js', '.ts', '.tsx', '.json']
},

10029
extensions/ql-vscode/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -13,7 +13,7 @@
"url": "https://github.com/github/vscode-codeql"
},
"engines": {
"vscode": "^1.39.0"
"vscode": "^1.43.0"
},
"categories": [
"Programming Languages"
@@ -586,7 +586,6 @@
"preintegration": "rm -rf ./out/vscode-tests && gulp",
"integration": "node ./out/vscode-tests/run-integration-tests.js",
"update-vscode": "node ./node_modules/vscode/bin/install",
"postinstall": "npm rebuild && node ./node_modules/vscode/bin/install",
"format": "tsfmt -r && eslint src test --ext .ts,.tsx --fix",
"lint": "eslint src test --ext .ts,.tsx --max-warnings=0",
"format-staged": "lint-staged"
@@ -596,82 +595,83 @@
"classnames": "~2.2.6",
"fs-extra": "^8.1.0",
"glob-promise": "^3.4.0",
"js-yaml": "^3.12.0",
"js-yaml": "^3.14.0",
"minimist": "~1.2.5",
"node-fetch": "~2.6.0",
"react": "^16.8.6",
"react-dom": "^16.8.6",
"@github/codeql-vscode-utils": "^0.0.4",
"semver": "~7.3.2",
"tmp": "^0.1.0",
"tmp-promise": "~3.0.2",
"tree-kill": "~1.2.2",
"unzipper": "~0.10.5",
"vscode-jsonrpc": "^5.0.1",
"vscode-languageclient": "^6.1.3",
"vscode-test-adapter-api": "~1.7.0",
"vscode-test-adapter-util": "~0.7.0",
"minimist": "~1.2.5",
"semver": "~7.3.2",
"@types/semver": "~7.2.0",
"tmp-promise": "~3.0.2",
"zip-a-folder": "~0.0.12"
},
"devDependencies": {
"@types/semver": "~7.2.0",
"@types/chai": "^4.1.7",
"@types/chai-as-promised": "~7.1.2",
"@types/child-process-promise": "^2.2.1",
"@types/classnames": "~2.2.9",
"@types/fs-extra": "^8.0.0",
"@types/glob": "^7.1.1",
"@types/google-protobuf": "^3.2.7",
"@types/gulp": "^4.0.6",
"@types/js-yaml": "~3.12.1",
"@types/gulp-sourcemaps": "0.0.32",
"@types/js-yaml": "~3.12.2",
"@types/jszip": "~3.1.6",
"@types/mocha": "~5.2.7",
"@types/node": "^12.0.8",
"@types/node-fetch": "~2.5.2",
"@types/proxyquire": "~1.3.28",
"@types/react": "^16.8.17",
"@types/react-dom": "^16.8.4",
"@types/sarif": "~2.1.2",
"@types/sinon": "~7.5.2",
"@types/sinon-chai": "~3.2.3",
"@types/through2": "^2.0.36",
"@types/tmp": "^0.1.0",
"@types/unzipper": "~0.10.1",
"@types/vscode": "^1.39.0",
"@types/vscode": "^1.43.0",
"@types/webpack": "^4.32.1",
"@types/xml2js": "~0.4.4",
"@github/codeql-gulp-tasks": "^0.0.4",
"@typescript-eslint/eslint-plugin": "~2.23.0",
"@typescript-eslint/parser": "~2.23.0",
"ansi-colors": "^4.1.1",
"chai": "^4.2.0",
"chai-as-promised": "~7.1.1",
"css-loader": "~3.1.0",
"eslint": "~6.8.0",
"eslint-plugin-react": "~7.19.0",
"glob": "^7.1.4",
"gulp": "^4.0.2",
"gulp-sourcemaps": "^2.6.5",
"gulp-typescript": "^5.0.1",
"husky": "~4.2.5",
"jsonc-parser": "^2.3.0",
"lint-staged": "~10.2.2",
"mocha": "~6.2.1",
"mocha-sinon": "~2.1.0",
"npm-run-all": "^4.1.5",
"prettier": "~2.0.5",
"proxyquire": "~2.1.3",
"sinon": "~9.0.0",
"sinon-chai": "~3.5.0",
"style-loader": "~0.23.1",
"through2": "^3.0.1",
"ts-loader": "^5.4.5",
"ts-node": "^8.3.0",
"ts-protoc-gen": "^0.9.0",
"typescript": "^3.7.2",
"typescript-config": "^0.0.1",
"typescript": "~3.8.3",
"typescript-formatter": "^7.2.2",
"vsce": "^1.65.0",
"vscode-test": "^1.4.0",
"webpack": "^4.38.0",
"webpack-cli": "^3.3.2",
"eslint": "~6.8.0",
"@typescript-eslint/eslint-plugin": "~2.23.0",
"@typescript-eslint/parser": "~2.23.0",
"chai-as-promised": "~7.1.1",
"@types/chai-as-promised": "~7.1.2",
"@types/sinon": "~7.5.2",
"sinon-chai": "~3.5.0",
"@types/sinon-chai": "~3.2.3",
"proxyquire": "~2.1.3",
"@types/proxyquire": "~1.3.28",
"eslint-plugin-react": "~7.19.0",
"husky": "~4.2.5",
"lint-staged": "~10.2.2",
"prettier": "~2.0.5"
"webpack-cli": "^3.3.2"
},
"husky": {
"hooks": {

View File

@@ -1,4 +1,4 @@
import { DisposableObject } from '@github/codeql-vscode-utils';
import { DisposableObject } from '../vscode-utils/disposable-object';
import {
WebviewPanel,
ExtensionContext,

View File

@@ -1,4 +1,4 @@
import { DisposableObject } from '@github/codeql-vscode-utils';
import { DisposableObject } from './vscode-utils/disposable-object';
import { workspace, Event, EventEmitter, ConfigurationChangeEvent, ConfigurationTarget } from 'vscode';
import { DistributionManager } from './distribution';
import { logger } from './logging';

View File

@@ -1,5 +1,5 @@
import * as path from 'path';
import { DisposableObject } from '@github/codeql-vscode-utils';
import { DisposableObject } from './vscode-utils/disposable-object';
import {
commands,
Event,

View File

@@ -6,7 +6,7 @@ import * as cli from './cli';
import { ExtensionContext } from 'vscode';
import { showAndLogErrorMessage, showAndLogWarningMessage, showAndLogInformationMessage } from './helpers';
import { zipArchiveScheme, encodeSourceArchiveUri, decodeSourceArchiveUri } from './archive-filesystem-provider';
import { DisposableObject } from '@github/codeql-vscode-utils';
import { DisposableObject } from './vscode-utils/disposable-object';
import { QueryServerConfig } from './config';
import { Logger, logger } from './logging';

View File

@@ -1,4 +1,4 @@
import { DisposableObject } from '@github/codeql-vscode-utils';
import { DisposableObject } from './vscode-utils/disposable-object';
/**
* Base class for "discovery" operations, which scan the file system to find specific kinds of

View File

@@ -1,6 +1,6 @@
import * as path from 'path';
import * as Sarif from 'sarif';
import { DisposableObject } from '@github/codeql-vscode-utils';
import { DisposableObject } from './vscode-utils/disposable-object';
import * as vscode from 'vscode';
import {
Diagnostic,

View File

@@ -1,5 +1,5 @@
import { window as Window, OutputChannel, Progress, Disposable } from 'vscode';
import { DisposableObject } from '@github/codeql-vscode-utils';
import { DisposableObject } from './vscode-utils/disposable-object';
import * as fs from 'fs-extra';
import * as path from 'path';

View File

@@ -1,5 +1,5 @@
import { EventEmitter, Event, Uri, WorkspaceFolder, RelativePattern } from 'vscode';
import { MultiFileSystemWatcher } from '@github/codeql-vscode-utils';
import { MultiFileSystemWatcher } from './vscode-utils/multi-file-system-watcher';
import { CodeQLCliServer, QlpacksInfo } from './cli';
import { Discovery } from './discovery';

View File

@@ -2,7 +2,7 @@ import * as path from 'path';
import { QLPackDiscovery } from './qlpack-discovery';
import { Discovery } from './discovery';
import { EventEmitter, Event, Uri, RelativePattern, env } from 'vscode';
import { MultiFileSystemWatcher } from '@github/codeql-vscode-utils';
import { MultiFileSystemWatcher } from './vscode-utils/multi-file-system-watcher';
import { CodeQLCliServer } from './cli';
/**

View File

@@ -1,8 +1,6 @@
import * as cp from 'child_process';
import * as path from 'path';
// Import from the specific module within `semmle-vscode-utils`, rather than via `index.ts`, because
// we avoid taking an accidental runtime dependency on `vscode` this way.
import { DisposableObject } from '@github/codeql-vscode-utils/out/disposable-object';
import { DisposableObject } from './vscode-utils/disposable-object';
import { Disposable } from 'vscode';
import { CancellationToken, createMessageConnection, MessageConnection, RequestType } from 'vscode-jsonrpc';
import * as cli from './cli';

View File

@@ -15,7 +15,7 @@ import {
import { TestAdapterRegistrar } from 'vscode-test-adapter-util';
import { QLTestFile, QLTestNode, QLTestDirectory, QLTestDiscovery } from './qltest-discovery';
import { Event, EventEmitter, CancellationTokenSource, CancellationToken } from 'vscode';
import { DisposableObject } from '@github/codeql-vscode-utils';
import { DisposableObject } from './vscode-utils/disposable-object';
import { QLPackDiscovery } from './qlpack-discovery';
import { CodeQLCliServer } from './cli';
import { getOnDiskWorkspaceFolders } from './helpers';

View File

@@ -2,7 +2,8 @@ import * as fs from 'fs-extra';
import * as path from 'path';
import { Uri, TextDocumentShowOptions, commands, window } from 'vscode';
import { TestTreeNode } from './test-tree-node';
import { DisposableObject, UIService } from '@github/codeql-vscode-utils';
import { DisposableObject } from './vscode-utils/disposable-object';
import { UIService } from './vscode-utils/ui-service';
import { TestHub, TestController, TestAdapter, TestRunStartedEvent, TestRunFinishedEvent, TestEvent, TestSuiteEvent } from 'vscode-test-adapter-api';
import { QLTestAdapter, getExpectedFile, getActualFile } from './test-adapter';
import { logger } from './logging';

View File

@@ -1,4 +1,4 @@
import { Disposable } from "vscode";
import { Disposable } from 'vscode';
/**
* Base class to make it easier to implement a `Disposable` that owns other disposable object.
@@ -7,9 +7,6 @@ export abstract class DisposableObject implements Disposable {
private disposables: Disposable[] = [];
private tracked?: Set<Disposable> = undefined;
constructor() {
}
/**
* Adds `obj` to a list of objects to dispose when `this` is disposed. Objects added by `push` are
* disposed in reverse order of being added.

View File

@@ -1,3 +1,32 @@
{
"extends": "./node_modules/typescript-config/extension.tsconfig.json"
"$schema": "http://json.schemastore.org/tsconfig",
"compilerOptions": {
"declaration": true,
"strict": true,
"module": "commonjs",
"target": "es2017",
"outDir": "out",
"lib": [
"es6"
],
"moduleResolution": "node",
"sourceMap": true,
"rootDir": "src",
"strictNullChecks": true,
"noFallthroughCasesInSwitch": true,
"preserveWatchOutput": true,
"newLine": "lf",
"noImplicitReturns": true,
"experimentalDecorators": true,
"noUnusedLocals": true,
"noUnusedParameters": true
},
"include": [
"src/**/*.ts"
],
"exclude": [
"node_modules",
"test",
"**/view"
]
}

View File

@@ -1,7 +0,0 @@
'use strict';
require('ts-node').register({});
const { compileTypeScript, watchTypeScript } = require('@github/codeql-gulp-tasks');
exports.default = compileTypeScript;
exports.watchTypeScript = watchTypeScript;

View File

@@ -1,32 +0,0 @@
{
"name": "semmle-bqrs",
"description": "Parses Binary Query Result Sets generated by CodeQL",
"author": "GitHub",
"private": true,
"version": "0.0.1",
"publisher": "GitHub",
"repository": {
"type": "git",
"url": "https://github.com/github/vscode-codeql"
},
"main": "./out/index",
"files": [
"out/**",
"package.json"
],
"scripts": {
"build": "gulp",
"format": "tsfmt -r"
},
"dependencies": {
"leb": "^0.3.0",
"reflect-metadata": "~0.1.13",
"semmle-io": "^0.0.1"
},
"devDependencies": {
"@types/node": "^12.0.8",
"@github/codeql-gulp-tasks": "^0.0.4",
"typescript-config": "^0.0.1",
"typescript-formatter": "^7.2.2"
}
}

View File

@@ -1,407 +0,0 @@
import { ResultSetSchema, LocationStyle, ColumnTypeKind } from "./bqrs-schema";
import { ResultSetsReader, ResultSetReader } from "./bqrs-file";
import { ElementBase, ColumnValue } from "./bqrs-results";
/**
* Represents a binding to all remaining columns, starting at the column index specified by
* `startColumn`.
*/
export interface RestColumnIndex {
startColumn: number
}
/**
* Indentifies the result column to which a property is bound. May be the index of a specific
* column, or an instance of `RestColumnIndex` to bind to all remaining columns.
*/
export type ColumnIndex = number | RestColumnIndex;
/**
* Options that can be specified for a `@qlTable` attribute.
*/
export interface TableOptions {
/**
* The name of the table to bind to. If multiple values are specified, the property is bound to
* the the table whose name is earliest in the list.
*/
name?: string | string[];
}
export enum QLOption {
Required = 'required',
Optional = 'optional',
Forbidden = 'forbidden'
}
/**
* Options that can be specified for a `@qlElement` attribute.
*/
export interface ElementOptions {
label?: QLOption;
location?: QLOption;
}
/**
* An attribute that binds the target property to a result column representing a QL element.
* @param index Index of the column to be bound.
* @param options Binding options.
*/
export function qlElement(index: ColumnIndex, options: ElementOptions = {}): PropertyDecorator {
return (proto: any, key: PropertyKey): void => {
column(proto, {
key: key,
index: index,
type: 'e',
options: {
label: options.label ? options.label : QLOption.Required,
location: options.location ? options.location : QLOption.Required
}
});
}
}
/**
* An attribute that binds the target property to a result column containing a QL string.
* @param index Index of the column to be bound.
*/
export function qlString(index: ColumnIndex): PropertyDecorator {
return (proto: any, key: PropertyKey): void => {
column(proto, {
key: key,
index: index,
type: 's'
});
}
}
/**
* An attribute that binds the target property to a set of result columns. The individual
* columns are bound to properties of the underlying type of the target property.
* @param index Index of the first column to be bound.
* @param type The type of the property.
*/
export function qlTuple(index: ColumnIndex, type: { new(): any }): PropertyDecorator {
return (proto: any, key: PropertyKey): void => {
column(proto, {
key: key,
index: index,
type: type
});
}
}
type PropertyKey = string | symbol;
interface ColumnProperty {
key: PropertyKey;
index: ColumnIndex;
type: ColumnTypeKind | { new(): any };
}
interface ElementProperty extends ColumnProperty {
type: 'e';
options: Required<ElementOptions>;
}
function isElement(property: ColumnProperty): property is ElementProperty {
return property.type === 'e';
}
const columnPropertiesSymbol = Symbol('columnProperties');
type PropertyDecorator = (proto: any, key: PropertyKey) => void;
function column<T extends ColumnProperty>(proto: any, property: T): void {
let columnProperties: ColumnProperty[] | undefined = Reflect.getMetadata(columnPropertiesSymbol, proto);
if (columnProperties === undefined) {
columnProperties = [];
Reflect.defineMetadata(columnPropertiesSymbol, columnProperties, proto);
}
columnProperties.push(property);
}
interface TableProperty {
key: PropertyKey;
tableNames: string[];
rowType: any;
}
const tablePropertiesSymbol = Symbol('tableProperties');
/**
* An attribute that binds the target property to the contents of a result table.
* @param rowType The type representing a single row in the bound table. The type of the target
* property must be an array of this type.
* @param options Binding options.
*/
export function qlTable(rowType: any, options?: TableOptions): any {
return (proto, key: PropertyKey) => {
const realOptions = options || {};
let names: string[];
if (realOptions.name === undefined) {
names = [key.toString()]
}
else if (typeof realOptions.name === 'string') {
names = [realOptions.name];
}
else {
names = realOptions.name;
}
let tableProperties: TableProperty[] | undefined = Reflect.getMetadata(tablePropertiesSymbol, proto);
if (tableProperties === undefined) {
tableProperties = [];
Reflect.defineMetadata(tablePropertiesSymbol, tableProperties, proto);
}
tableProperties.push({
key: key,
tableNames: names,
rowType: rowType
});
};
}
type ParseTupleAction = (src: readonly ColumnValue[], dest: any) => void;
type TupleParser<T> = (src: readonly ColumnValue[]) => T;
export class CustomResultSet<TTuple> {
public constructor(private reader: ResultSetReader,
private readonly tupleParser: TupleParser<TTuple>) {
}
public async* readTuples(): AsyncIterableIterator<TTuple> {
for await (const tuple of this.reader.readTuples()) {
yield this.tupleParser(tuple);
}
}
}
class CustomResultSetBinder {
private readonly boundColumns: boolean[];
private constructor(private readonly rowType: { new(): any },
private readonly schema: ResultSetSchema) {
this.boundColumns = Array(schema.columns.length).fill(false);
}
public static bind<TTuple>(reader: ResultSetReader, rowType: { new(): TTuple }):
CustomResultSet<TTuple> {
const binder = new CustomResultSetBinder(rowType, reader.schema);
const tupleParser = binder.bindRoot<TTuple>();
return new CustomResultSet<TTuple>(reader, tupleParser);
}
private bindRoot<TTuple>(): TupleParser<TTuple> {
const { action } = this.bindObject(this.rowType, 0, true);
const unboundColumnIndex = this.boundColumns.indexOf(false);
if (unboundColumnIndex >= 0) {
throw new Error(`Column '${this.schema.name}[${unboundColumnIndex}]' is not bound to a property.`);
}
return tuple => {
let result = new this.rowType;
action(tuple, result);
return result;
}
}
private checkElementProperty(index: ColumnIndex, propertyName: 'location' | 'label',
hasProperty: boolean, expectsProperty: QLOption): void {
switch (expectsProperty) {
case QLOption.Required:
if (!hasProperty) {
throw new Error(`Element column '${this.schema.name}[${index}]' does not have the required '${propertyName}' property.`);
}
break;
case QLOption.Forbidden:
if (!hasProperty) {
throw new Error(`Element column '${this.schema.name}[${index}]' has unexpected '${propertyName}' property.`);
}
break;
case QLOption.Optional:
break;
}
}
private bindObject(type: { new(): any }, startIndex: number, isRoot: boolean): {
action: ParseTupleAction,
lastColumn: number
} {
const columnProperties: ColumnProperty[] | undefined =
Reflect.getMetadata(columnPropertiesSymbol, type.prototype);
if (columnProperties === undefined) {
throw new Error(`Type '${type.toString()}' does not have any properties decorated with '@column'.`);
}
const actions: ParseTupleAction[] = [];
let restProperty: ColumnProperty | undefined = undefined;
let lastColumn = startIndex;
for (const property of columnProperties) {
if (typeof property.index === 'object') {
if (!isRoot) {
throw new Error(`Type '${type.toString()}' has a property bound to '...', but is not the root type.`);
}
if (restProperty !== undefined) {
throw new Error(`Type '${type.toString()}' has multiple properties bound to '...'.`);
}
restProperty = property;
}
else {
const index = property.index + startIndex;
const { action, lastColumn: lastChildColumn } = this.bindColumn(index, type, property,
property.key);
actions.push(action);
lastColumn = Math.max(lastColumn, lastChildColumn);
}
}
if (restProperty !== undefined) {
const startIndex = (<RestColumnIndex>restProperty.index).startColumn;
let index = startIndex;
let elementIndex = 0;
const elementActions: ParseTupleAction[] = [];
while (index < this.schema.columns.length) {
const { action, lastColumn: lastChildColumn } = this.bindColumn(index, type, restProperty, elementIndex);
elementActions.push(action);
index = lastChildColumn + 1;
elementIndex++;
}
const key = restProperty.key;
actions.push((src, dest) => {
const destArray = Array(elementActions.length);
elementActions.forEach(action => action(src, destArray));
dest[key] = destArray;
});
}
return {
action: (src, dest) => actions.forEach(action => action(src, dest)),
lastColumn: lastColumn
};
}
private bindColumn(index: number, type: new () => any, property: ColumnProperty,
key: PropertyKey | number): {
action: ParseTupleAction,
lastColumn: number
} {
if ((index < 0) || (index >= this.schema.columns.length)) {
throw new Error(`No matching column '${index}' found for property '${type.toString()}.${property.key.toString()}' when binding root type '${this.rowType.toString()}'.`);
}
if (typeof property.type === 'string') {
// This property is bound to a single column
return {
action: this.bindSingleColumn(index, property, type, key),
lastColumn: index
};
}
else {
// This property is a tuple that has properties that are bound to columns.
const propertyType = property.type;
const { action: objectParser, lastColumn: lastChildColumn } = this.bindObject(propertyType, index, false);
return {
action: (src, dest) => {
const destObject = new propertyType;
objectParser(src, destObject);
dest[key] = destObject;
},
lastColumn: lastChildColumn
};
}
}
private bindSingleColumn(index: number, property: ColumnProperty, type: new () => any,
key: PropertyKey | number): ParseTupleAction {
if (this.boundColumns[index]) {
throw new Error(`Column '${this.schema.name}[${index}]' is bound to multiple columns in root type '${this.rowType.toString()}'.`);
}
const column = this.schema.columns[index];
if (column.type.type !== property.type) {
throw new Error(`Column '${this.schema.name}[${index}]' has type '${column.type.type}', but property '${type.toString()}.${property.key.toString()}' expected type '${property.type}'.`);
}
this.boundColumns[index] = true;
if (isElement(property) && (column.type.type === 'e')) {
const hasLabel = column.type.hasLabel;
this.checkElementProperty(index, 'label', hasLabel, property.options.label);
const hasLocation = column.type.locationStyle !== LocationStyle.None;
this.checkElementProperty(index, 'location', hasLocation, property.options.location);
return (src, dest) => {
const srcElement = <ElementBase>src[index];
const destElement: ElementBase = {
id: srcElement.id
};
if (hasLabel) {
destElement.label = srcElement.label;
}
if (hasLocation) {
destElement.location = srcElement.location;
}
dest[key] = destElement;
};
}
else {
return (src, dest) => {
dest[key] = src[index];
};
}
}
}
type ArrayElementType<T> = T extends Array<infer U> ? U : never;
export type CustomResultSets<T> = {
[P in keyof T]: CustomResultSet<ArrayElementType<T[P]>>;
}
export function createCustomResultSets<T>(reader: ResultSetsReader, type: { new(): T }):
CustomResultSets<T> {
const tableProperties: TableProperty[] | undefined = Reflect.getMetadata(tablePropertiesSymbol, type.prototype);
if (tableProperties === undefined) {
throw new Error(`Type '${type.toString()}' does not have any properties decorated with '@table'.`);
}
const customResultSets: Partial<CustomResultSets<T>> = {};
const boundProperties = new Set<PropertyKey>();
for (const resultSet of reader.resultSets) {
const tableProperty = findPropertyForTable(resultSet.schema, tableProperties);
if (tableProperty === undefined) {
throw new Error(`No matching property found for result set '${resultSet.schema.name}'.`);
}
if (boundProperties.has(tableProperty.key)) {
throw new Error(`Multiple result sets bound to property '${tableProperty.key.toString()}'.`);
}
boundProperties.add(tableProperty.key);
customResultSets[tableProperty.key] = CustomResultSetBinder.bind(resultSet,
tableProperty.rowType);
}
for (const tableProperty of tableProperties) {
if (!boundProperties.has(tableProperty.key)) {
throw new Error(`No matching table found for property '${tableProperty.key.toString()}'.`);
}
}
return <CustomResultSets<T>>customResultSets;
}
function findPropertyForTable(resultSet: ResultSetSchema, tableProperties: TableProperty[]):
TableProperty | undefined {
const tableName = resultSet.name === '#select' ? 'select' : resultSet.name;
return tableProperties.find(tableProperty => tableProperty.tableNames.find(name => name === tableName));
}

View File

@@ -1,191 +0,0 @@
import { RandomAccessReader, StreamDigester } from 'semmle-io';
import { parseResultSetsHeader, StringPool, parseResultSetSchema, readTuples } from './bqrs-parse';
import { ResultSetsSchema, ResultSetSchema } from './bqrs-schema';
import { ColumnValue } from './bqrs-results';
/**
* The result of parsing data from a specific file region.
*/
interface RegionResult<T> {
/** The parsed data. */
result: T,
/** The exclusive end position of the parsed data in the file. */
finalOffset: number
}
/** Reads data from the specified region of the file, and parses it using the given function. */
async function inFileRegion<T>(
file: RandomAccessReader,
start: number,
end: number | undefined,
parse: (d: StreamDigester) => Promise<T>
): Promise<RegionResult<T>> {
const stream = file.readStream(start, end);
try {
const d = StreamDigester.fromChunkIterator(stream);
const result = await parse(d);
return {
result: result,
finalOffset: start + d.position
};
}
finally {
stream.dispose();
}
}
/**
* A single result set in a BQRS file.
*/
export interface ResultSetReader {
/**
* The schema that describes the result set.
*/
readonly schema: ResultSetSchema;
/**
* Reads all of the tuples in the result set.
*/
readTuples(): AsyncIterableIterator<ColumnValue[]>;
}
/**
* A Binary Query Result Sets ("BQRS") file.
*
* @remarks
* Allows independant access to individual tables without having to parse the entire file up front.
*/
export interface ResultSetsReader {
readonly schema: ResultSetsSchema;
readonly resultSets: readonly ResultSetReader[];
findResultSetByName(name: string): ResultSetReader | undefined;
}
/**
* Metadata for a single `ResultSet` in a BQRS file.
* Does not contain the result tuples themselves.
* Includes the offset and length of the tuple data in the file,
* which can be used to read the tuples.
*/
interface ResultSetInfo {
schema: ResultSetSchema;
rowsOffset: number;
rowsLength: number;
}
class ResultSetReaderImpl implements ResultSetReader {
public readonly schema: ResultSetSchema;
private readonly rowsOffset: number;
private readonly rowsLength: number;
public constructor(private readonly resultSets: ResultSetsReaderImpl, info: ResultSetInfo) {
this.schema = info.schema;
this.rowsOffset = info.rowsOffset;
this.rowsLength = info.rowsLength;
}
public async* readTuples(): AsyncIterableIterator<ColumnValue[]> {
const stream = this.resultSets.file.readStream(this.rowsOffset,
this.rowsOffset + this.rowsLength);
try {
const d = StreamDigester.fromChunkIterator(stream);
for await (const tuple of readTuples(d, this.schema, await this.resultSets.getStringPool())) {
yield tuple;
}
}
finally {
stream.dispose();
}
}
}
class ResultSetsReaderImpl implements ResultSetsReader {
private stringPool?: StringPool = undefined;
private readonly _resultSets: ResultSetReaderImpl[];
private constructor(public readonly file: RandomAccessReader,
public readonly schema: ResultSetsSchema, resultSets: ResultSetInfo[],
private readonly stringPoolOffset: number) {
this._resultSets = resultSets.map((info) => {
return new ResultSetReaderImpl(this, info);
});
}
public get resultSets(): readonly ResultSetReader[] {
return this._resultSets;
}
public findResultSetByName(name: string): ResultSetReader | undefined {
return this._resultSets.find((resultSet) => resultSet.schema.name === name);
}
public async getStringPool(): Promise<StringPool> {
if (this.stringPool === undefined) {
const { result: stringPoolBuffer } = await inFileRegion(this.file, this.stringPoolOffset,
this.stringPoolOffset + this.schema.stringPoolSize,
async d => await d.read(this.schema.stringPoolSize));
this.stringPool = new StringPool(stringPoolBuffer);
}
return this.stringPool;
}
public static async open(file: RandomAccessReader): Promise<ResultSetsReader> {
// Parse the header of the entire BQRS file.
const { result: header, finalOffset: stringPoolOffset } =
await inFileRegion(file, 0, undefined, d => parseResultSetsHeader(d));
// The header is followed by a shared string pool.
// We have saved the offset and length of the string pool within the file,
// so we can read it later when needed.
// For now, skip over the string pool to reach the starting point of the first result set.
let currentResultSetOffset = stringPoolOffset + header.stringPoolSize;
// Parse information about each result set within the file.
const resultSets: ResultSetInfo[] = [];
for (let resultSetIndex = 0; resultSetIndex < header.resultSetCount; resultSetIndex++) {
// Read the length of this result set (encoded as a single byte).
// Note: reading length and schema together from a file region may be more efficient.
// Reading them separately just makes it easier to compute the
// starting offset and length of the schema.
const { result: resultSetLength, finalOffset: resultSetSchemaOffset } =
await inFileRegion(file, currentResultSetOffset, undefined, d => d.readLEB128UInt32());
// Read the schema of this result set.
const { result: resultSetSchema, finalOffset: resultSetRowsOffset } =
await inFileRegion(file, resultSetSchemaOffset, undefined, d => parseResultSetSchema(d));
const resultSetSchemaLength = resultSetRowsOffset - resultSetSchemaOffset;
// The schema is followed by the tuple/row data for the result set.
// We save the offset and length of the tuple data within the file,
// so we can read it later when needed.
const info: ResultSetInfo = {
// length of result set = length of schema + length of tuple data
// The 1 byte that encodes the length itself is not counted.
rowsLength: resultSetLength - resultSetSchemaLength,
rowsOffset: resultSetRowsOffset,
schema: resultSetSchema,
};
resultSets.push(info);
// Skip over the tuple data of the current result set,
// to reach the starting offset of the next result set.
currentResultSetOffset = info.rowsOffset + info.rowsLength;
}
const schema: ResultSetsSchema = {
version: header.version,
stringPoolSize: header.stringPoolSize,
resultSets: resultSets.map(resultSet => resultSet.schema)
};
const reader = new ResultSetsReaderImpl(file, schema, resultSets, stringPoolOffset);
return reader;
}
}
export function open(file: RandomAccessReader): Promise<ResultSetsReader> {
return ResultSetsReaderImpl.open(file);
}

View File

@@ -1,209 +0,0 @@
import { decodeUInt32 } from 'leb';
import { StreamDigester } from 'semmle-io';
import { ColumnValue, RawLocationValue } from './bqrs-results';
import { ColumnSchema, ColumnType, LocationStyle, PrimitiveTypeKind, ResultSetSchema } from './bqrs-schema';
/**
* bqrs-parse.ts
* -------
*
* Parsing Binary Query Result Set files.
* See [[https://git.semmle.com/Semmle/code/tree/master/queryserver-client/src/com/semmle/api/result/BinaryQueryResultSets.java]].
*/
const RESULT_SET_VERSION = 1;
const RESULT_SETS_VERSION = 2;
export type TupleParser = (tuple: readonly ColumnValue[]) => void;
export interface ResultSetsHeader {
version: number,
resultSetCount: number,
stringPoolSize: number
}
async function parseResultColumnType(d: StreamDigester): Promise<ColumnType> {
const t = await d.readASCIIChar();
if (t === 'e') {
const primitiveType: PrimitiveTypeKind =
(await d.readASCIIChar()) as PrimitiveTypeKind;
const hasLabel = (await d.readByte()) !== 0;
const locationStyle = await d.readByte();
return { type: 'e', locationStyle, hasLabel, primitiveType };
}
else {
return { type: <PrimitiveTypeKind>t };
}
}
async function parseColumnSchema(d: StreamDigester): Promise<ColumnSchema[]> {
const numColumns = await d.readLEB128UInt32();
const rv: ColumnSchema[] = [];
for (let i = 0; i < numColumns; i++) {
const name = await readLengthPrefixedString(d);
const type = await parseResultColumnType(d);
rv.push({ name, type });
}
return rv;
}
function getTrueStringLength(encodedLength: number): number {
const stringLength = (encodedLength as number) - 1;
if (stringLength === -1) {
// XXX why is this a possibility? Does a '(-1)-length' string
// (i.e. a single 0x00 byte) mean something different from a
// 0-length string? (i.e. a single 0x01 byte)
return 0;
}
else {
return stringLength;
}
}
export class StringPool {
public constructor(private readonly buffer: Buffer) {
}
public getString(offset: number): string {
//TODO: Memoize?
const { value: encodedStringLength, nextIndex } = decodeUInt32(this.buffer, offset);
const stringLength = getTrueStringLength(encodedStringLength);
const value = this.buffer.toString('utf8', nextIndex, nextIndex + stringLength);
return value;
}
}
export async function parseResultSetsHeader(d: StreamDigester): Promise<ResultSetsHeader> {
const version = await d.readLEB128UInt32();
if (version !== RESULT_SETS_VERSION) {
throw new Error(`Mismatched binary query results version. Got '${version}', but expected '${RESULT_SETS_VERSION}'.`);
}
const resultSetCount = await d.readLEB128UInt32();
const stringPoolSize = await d.readLEB128UInt32();
return {
version: version,
stringPoolSize: stringPoolSize,
resultSetCount: resultSetCount
};
}
async function readLengthPrefixedString(d: StreamDigester): Promise<string> {
const encodedLength = await d.readLEB128UInt32();
const stringLength = getTrueStringLength(encodedLength);
return await d.readUTF8String(stringLength);
}
export async function parseResultSetSchema(d: StreamDigester): Promise<ResultSetSchema> {
const version = await d.readLEB128UInt32();
if (version !== RESULT_SET_VERSION) {
throw new Error(`Mismatched binary query result version. Got '${version}', but expected '${RESULT_SET_VERSION}'.`);
}
const name = await readLengthPrefixedString(d);
const tupleCount = await d.readLEB128UInt32();
const columns = await parseColumnSchema(d);
return {
version: version,
name: name,
tupleCount: tupleCount,
columns: columns
};
}
async function parseString(d: StreamDigester, pool: StringPool): Promise<string> {
const stringOffset = await d.readLEB128UInt32();
const value = pool.getString(stringOffset);
return value;
}
async function parseLocation(d: StreamDigester, t: LocationStyle, pool: StringPool):
Promise<RawLocationValue | undefined> {
switch (t) {
case LocationStyle.None: return undefined;
case LocationStyle.String: return { t, loc: await parseString(d, pool) };
case LocationStyle.FivePart: {
const file = await parseString(d, pool);
const lineStart = await d.readLEB128UInt32();
const colStart = await d.readLEB128UInt32();
const lineEnd = await d.readLEB128UInt32();
const colEnd = await d.readLEB128UInt32();
return { t, file, lineStart, colStart, lineEnd, colEnd };
}
case LocationStyle.WholeFile:
throw new Error('Whole-file locations should appear as string locations in BQRS files.');
}
throw new Error(`Unknown Location Style ${t}`);
}
async function parsePrimitiveColumn(d: StreamDigester, type: PrimitiveTypeKind,
pool: StringPool): Promise<ColumnValue> {
switch (type) {
case 's': return await parseString(d, pool);
case 'b': return await d.readByte() !== 0;
case 'i': {
const unsignedValue = await d.readLEB128UInt32();
// `int` column values are encoded as 32-bit unsigned LEB128, but are really 32-bit two's
// complement signed integers. The easiest way to reinterpret from an unsigned int32 to a
// signed int32 in JavaScript is to use a bitwise operator, which does this coercion on its
// operands automatically.
return unsignedValue | 0;
}
case 'f': return await d.readDoubleLE();
case 'd': return await d.readDate();
case 'u': return await parseString(d, pool);
default: throw new Error(`Unknown primitive column type '${type}'.`);
}
}
export async function parseColumn(d: StreamDigester, t: ColumnType, pool: StringPool):
Promise<ColumnValue> {
if (t.type === 'e') {
let primitive = await parsePrimitiveColumn(d, t.primitiveType, pool);
const label = t.hasLabel ? await parseString(d, pool) : undefined;
const loc = await parseLocation(d, t.locationStyle, pool);
return {
id: <number | string>primitive,
label: label,
location: loc
};
}
else {
return parsePrimitiveColumn(d, t.type, pool);
}
}
export async function* readTuples(d: StreamDigester, schema: ResultSetSchema,
stringPool: StringPool): AsyncIterableIterator<ColumnValue[]> {
const { tupleCount, columns } = schema;
for (let rowIndex = 0; rowIndex < tupleCount; rowIndex++) {
const tuple: ColumnValue[] = Array(columns.length);
for (let columnIndex = 0; columnIndex < columns.length; columnIndex++) {
tuple[columnIndex] = await parseColumn(d, columns[columnIndex].type, stringPool);
}
yield tuple;
}
}
export async function parseTuples(d: StreamDigester, schema: ResultSetSchema,
stringPool: StringPool, tupleParser: TupleParser): Promise<void> {
const { tupleCount, columns } = schema;
// Create a single temporary tuple to hold the values we read from each row. Fill it with
// zero values initially so that we don't have to type it as `TupleValue | undefined`.
const tempTuple: ColumnValue[] = Array(columns.length).fill(0);
for (let rowIndex = 0; rowIndex < tupleCount; rowIndex++) {
for (let columnIndex = 0; columnIndex < columns.length; columnIndex++) {
tempTuple[columnIndex] = await parseColumn(d, columns[columnIndex].type, stringPool);
}
tupleParser(tempTuple);
}
}

View File

@@ -1,114 +0,0 @@
import { LocationStyle } from "./bqrs-schema";
// See https://help.semmle.com/QL/learn-ql/ql/locations.html for how these are used.
export interface FivePartLocation {
t: LocationStyle.FivePart;
file: string;
lineStart: number;
colStart: number;
lineEnd: number;
colEnd: number;
}
export interface StringLocation {
t: LocationStyle.String;
loc: string;
}
/**
* A location representing an entire filesystem resource.
* This is usually derived from a `StringLocation` with the entire filesystem URL.
*/
export interface WholeFileLocation {
t: LocationStyle.WholeFile;
file: string;
}
export type RawLocationValue = FivePartLocation | StringLocation;
export type LocationValue = RawLocationValue | WholeFileLocation;
/** A location that may be resolved to a source code element. */
export type ResolvableLocationValue = FivePartLocation | WholeFileLocation;
/**
* The CodeQL filesystem libraries use this pattern in `getURL()` predicates
* to describe the location of an entire filesystem resource.
* Such locations appear as `StringLocation`s instead of `FivePartLocation`s.
*
* Folder resources also get similar URLs, but with the `folder` scheme.
* They are deliberately ignored here, since there is no suitable location to show the user.
*/
const FILE_LOCATION_REGEX = /file:\/\/(.+):([0-9]+):([0-9]+):([0-9]+):([0-9]+)/;
/**
* Gets a resolvable source file location for the specified `LocationValue`, if possible.
* @param loc The location to test.
*/
export function tryGetResolvableLocation(
loc: LocationValue | undefined
): ResolvableLocationValue | undefined {
if (loc === undefined) {
return undefined;
} else if (loc.t === LocationStyle.FivePart && loc.file) {
return loc;
} else if (loc.t === LocationStyle.WholeFile && loc.file) {
return loc;
} else if (loc.t === LocationStyle.String && loc.loc) {
return tryGetLocationFromString(loc);
} else {
return undefined;
}
}
export function tryGetLocationFromString(
loc: StringLocation
): ResolvableLocationValue | undefined {
const matches = FILE_LOCATION_REGEX.exec(loc.loc);
if (matches && matches.length > 1 && matches[1]) {
if (isWholeFileMatch(matches)) {
return {
t: LocationStyle.WholeFile,
file: matches[1],
};
} else {
return {
t: LocationStyle.FivePart,
file: matches[1],
lineStart: Number(matches[2]),
colStart: Number(matches[3]),
lineEnd: Number(matches[4]),
colEnd: Number(matches[5]),
}
}
} else {
return undefined;
}
}
function isWholeFileMatch(matches: RegExpExecArray): boolean {
return (
matches[2] === "0" &&
matches[3] === "0" &&
matches[4] === "0" &&
matches[5] === "0"
);
}
export interface ElementBase {
id: PrimitiveColumnValue;
label?: string;
location?: LocationValue;
}
export interface ElementWithLabel extends ElementBase {
label: string;
}
export interface ElementWithLocation extends ElementBase {
location: LocationValue;
}
export interface Element extends Required<ElementBase> {}
export type PrimitiveColumnValue = string | boolean | number | Date;
export type ColumnValue = PrimitiveColumnValue | ElementBase;

View File

@@ -1,66 +0,0 @@
export enum LocationStyle {
None = 0,
String,
FivePart,
/** Does not occur in BQRS files. Used only to distinguish whole-file locations in client code. */
WholeFile
}
/**
* A primitive type (any type other than an element).
*/
export type PrimitiveTypeKind = 's' | 'b' | 'i' | 'f' | 'd' | 'u';
/**
* A kind of type that a column may have.
*/
export type ColumnTypeKind = PrimitiveTypeKind | 'e';
/**
* A column type that is a primitive type.
*/
export interface PrimitiveColumnType {
type: PrimitiveTypeKind;
}
/**
* A column type that is an element type.
*/
export interface ElementColumnType {
type: 'e';
primitiveType: PrimitiveTypeKind;
locationStyle: LocationStyle;
hasLabel: boolean;
}
/**
* The type of a column.
*/
export type ColumnType = PrimitiveColumnType | ElementColumnType;
/**
* The schema describing a single column in a `ResultSet`.
*/
export interface ColumnSchema {
readonly name: string;
readonly type: ColumnType;
}
/**
* The schema of a single `ResultSet` in a BQRS file.
*/
export interface ResultSetSchema {
readonly version: number;
readonly name: string;
readonly tupleCount: number;
readonly columns: readonly ColumnSchema[];
}
/**
* The schema describing the contents of a BQRS file.
*/
export interface ResultSetsSchema {
readonly version: number,
readonly stringPoolSize: number,
readonly resultSets: readonly ResultSetSchema[]
}

View File

@@ -1,18 +0,0 @@
import { ResultSetSchema } from './bqrs-schema';
import { StreamDigester, ChunkIterator } from 'semmle-io';
import { parseResultSetsHeader, StringPool, parseResultSetSchema, parseTuples, TupleParser } from './bqrs-parse';
export async function parse(rs: ChunkIterator,
resultSetHandler: (resultSet: ResultSetSchema) => TupleParser): Promise<void> {
const d = StreamDigester.fromChunkIterator(rs);
const header = await parseResultSetsHeader(d);
const stringPool = new StringPool(await d.read(header.stringPoolSize));
for (let resultSetIndex = 0; resultSetIndex < header.resultSetCount; resultSetIndex++) {
await d.readLEB128UInt32(); // Length of result set. Unused.
const resultSetSchema = await parseResultSetSchema(d);
const tupleParser = resultSetHandler(resultSetSchema);
await parseTuples(d, resultSetSchema, stringPool, tupleParser);
}
}

View File

@@ -1,7 +0,0 @@
export * from './bqrs';
export * from './bqrs-custom';
export * from './bqrs-file';
export * from './bqrs-results';
export * from './bqrs-schema';
export * from './path-problem-query-results';
export * from './problem-query-results';

View File

@@ -1,49 +0,0 @@
import 'reflect-metadata';
import { Element } from './bqrs-results';
import { qlElement, qlString, qlTuple, qlTable } from './bqrs-custom';
import { ElementReference } from './problem-query-results';
export class PathProblemAlert {
@qlElement(0)
element: Element;
@qlElement(1)
source: Element;
@qlElement(2)
sink: Element;
@qlString(3)
message: string;
@qlTuple({ startColumn: 4 }, ElementReference)
references?: ElementReference[];
}
export class PathProblemEdge {
@qlElement(0)
predecessor: Element;
@qlElement(1)
successor: Element;
}
export class GraphProperty {
@qlString(0)
key: string;
@qlString(1)
value: string;
}
export class PathProblemNode {
@qlElement(0)
node: Element;
// There can really only be zero or one of these, but until we support optional columns, we'll
// model it as a "rest" property.
@qlTuple({ startColumn: 1 }, GraphProperty)
properties?: GraphProperty[];
}
export class PathProblemQueryResults {
@qlTable(PathProblemAlert, { name: ['select', 'problems'] })
problems: PathProblemAlert[];
@qlTable(PathProblemNode)
nodes: PathProblemNode[];
@qlTable(PathProblemEdge)
edges: PathProblemEdge[];
}

View File

@@ -1,24 +0,0 @@
import 'reflect-metadata';
import { Element } from './bqrs-results';
import { qlElement, qlString, qlTuple, qlTable } from './bqrs-custom';
export class ElementReference {
@qlElement(0)
element: Element;
@qlString(1)
text: string;
}
export class ProblemAlert {
@qlElement(0)
element: Element;
@qlString(1)
message: string;
@qlTuple({ startColumn: 2 }, ElementReference)
references?: ElementReference[];
}
export class ProblemQueryResults {
@qlTable(ProblemAlert, { name: ['select', 'problems'] })
problems: ProblemAlert[];
}

View File

@@ -1,3 +0,0 @@
{
"extends": "./node_modules/typescript-config/lib.tsconfig.json"
}

View File

@@ -1,7 +0,0 @@
'use strict';
require('ts-node').register({});
const { compileTypeScript, watchTypeScript } = require('@github/codeql-gulp-tasks');
exports.default = compileTypeScript;
exports.watchTypeScript = watchTypeScript;

View File

@@ -1,32 +0,0 @@
{
"name": "semmle-io-node",
"description": "I/O utilities for the Node.js runtime",
"author": "GitHub",
"private": true,
"version": "0.0.1",
"publisher": "GitHub",
"repository": {
"type": "git",
"url": "https://github.com/github/vscode-codeql"
},
"main": "./out/index",
"files": [
"out/**",
"package.json"
],
"scripts": {
"build": "gulp",
"format": "tsfmt -r"
},
"dependencies": {
"fs-extra": "^8.1.0",
"semmle-io": "^0.0.1"
},
"devDependencies": {
"@types/fs-extra": "^8.0.0",
"@types/node": "^12.0.8",
"@github/codeql-gulp-tasks": "^0.0.4",
"typescript-config": "^0.0.1",
"typescript-formatter": "^7.2.2"
}
}

View File

@@ -1,66 +0,0 @@
import * as fs from 'fs-extra';
import { ReadStream } from 'fs-extra';
import { RandomAccessReader, StreamReader } from 'semmle-io';
export class FileReader implements RandomAccessReader {
private _fd?: number;
private constructor(fd: number) {
this._fd = fd;
}
public dispose(): void {
if (this._fd !== undefined) {
fs.closeSync(this._fd);
this._fd = undefined;
}
}
public get fd(): number {
if (this._fd === undefined) {
throw new Error('Object disposed.');
}
return this._fd;
}
public readStream(start?: number, end?: number): StreamReader {
return new FileStreamReader(fs.createReadStream('', {
fd: this.fd,
start: start,
end: end,
autoClose: false
}));
}
public static async open(file: string): Promise<FileReader> {
const fd: number = await fs.open(file, 'r');
return new FileReader(fd); // Take ownership
}
}
class FileStreamReader implements StreamReader {
private _stream?: ReadStream;
public constructor(stream: ReadStream) {
this._stream = stream;
}
public [Symbol.asyncIterator](): AsyncIterator<Uint8Array> {
return this.stream[Symbol.asyncIterator]();
}
public dispose(): void {
if (this._stream !== undefined) {
this._stream = undefined;
}
}
private get stream(): ReadStream {
if (this._stream === undefined) {
throw new Error('Object disposed.');
}
return this._stream;
}
}

View File

@@ -1 +0,0 @@
export * from './file-reader';

View File

@@ -1,3 +0,0 @@
{
"extends": "./node_modules/typescript-config/lib.tsconfig.json"
}

View File

@@ -1,7 +0,0 @@
'use strict';
require('ts-node').register({});
const { compileTypeScript, watchTypeScript } = require('@github/codeql-gulp-tasks');
exports.default = compileTypeScript;
exports.watchTypeScript = watchTypeScript;

View File

@@ -1,30 +0,0 @@
{
"name": "semmle-io",
"description": "I/O utilities",
"author": "GitHub",
"private": true,
"version": "0.0.1",
"publisher": "GitHub",
"repository": {
"type": "git",
"url": "https://github.com/github/vscode-codeql"
},
"main": "./out/index",
"files": [
"out/**",
"package.json"
],
"scripts": {
"build": "gulp",
"format": "tsfmt -r"
},
"dependencies": {
"leb": "^0.3.0"
},
"devDependencies": {
"@types/node": "^12.0.8",
"@github/codeql-gulp-tasks": "^0.0.4",
"typescript-config": "^0.0.1",
"typescript-formatter": "^7.2.2"
}
}

View File

@@ -1,303 +0,0 @@
import * as leb from 'leb';
/**
* digester.ts
* -----------
*
* A wrapper around node's stream and buffer types to make reading the
* binary formats used by the QL query server a little more uniform
* and convenient.
*
* This works around limitations in using Node streams (whether 'paused' or 'flowing')
* with async/await. This code can be simplified if there is a convenient library for doing this.
*/
export type ChunkIterator = AsyncIterable<Uint8Array>;
function endOfStreamError(): Error {
return new Error('Attempt to read past end of stream.');
}
const emptyBuffer = Buffer.alloc(0);
/**
* A class to read and decode bytes out of a sequence of `Buffer`s provided by an async iterator.
*/
export class StreamDigester {
private static readonly MIN_SEAM_BUFFER_LENGTH = 256;
private currentChunk = emptyBuffer;
private seamBuffer = emptyBuffer;
private done = false;
private positionOfCurrentChunk = 0;
private offsetInCurrentChunk = 0;
private readonly chunks: AsyncIterator<Uint8Array>;
private constructor(chunks: ChunkIterator) {
this.chunks = chunks[Symbol.asyncIterator]();
}
/**
* Create a `StreamDigester`.
*
* @param chunks An async iterator that provides the sequence of buffers from which to read.
*/
public static fromChunkIterator(chunks: ChunkIterator): StreamDigester {
return new StreamDigester(chunks);
}
public static fromBuffer(buffer: Buffer): StreamDigester {
return new StreamDigester(StreamDigester.singleChunkIterator(buffer));
}
public get position(): number {
return this.positionOfCurrentChunk + this.offsetInCurrentChunk;
}
private static async* singleChunkIterator(chunk: Buffer): AsyncIterableIterator<Buffer> {
yield chunk;
}
/**
* Gets the next chunk from the iterator, throwing an exception if there are no more chunks
* available.
*/
private async readNextChunk(): Promise<void> {
if (this.done) {
throw endOfStreamError();
}
const { value, done } = await this.chunks.next();
if (done) {
this.done = true;
throw endOfStreamError();
}
this.positionOfCurrentChunk += this.currentChunk.length;
this.currentChunk = Buffer.from(value);
this.offsetInCurrentChunk = 0;
}
private get bytesLeftInCurrentChunk(): number {
return this.currentChunk.length - this.offsetInCurrentChunk;
}
private getSeamBuffer(byteCount: number, previousBuffer: Buffer, previousOffset: number,
previousByteCount: number): Buffer {
if (this.seamBuffer.length < byteCount) {
// Start at double the current length, or `MIN_SEAM_BUFFER_LENGTH`, whichever is larger.
let newSeamBufferLength = Math.max(this.seamBuffer.length * 2,
StreamDigester.MIN_SEAM_BUFFER_LENGTH);
while (newSeamBufferLength < byteCount) {
newSeamBufferLength *= 2;
}
this.seamBuffer = Buffer.alloc(newSeamBufferLength);
}
if (previousByteCount > 0) {
if (previousBuffer === this.seamBuffer) {
if (previousOffset !== 0) {
previousBuffer.copyWithin(0, previousOffset, previousOffset + previousByteCount);
}
}
else {
previousBuffer.copy(this.seamBuffer, 0, previousOffset, previousOffset + previousByteCount);
}
}
return this.seamBuffer;
}
private async fillBuffer(buffer: Buffer, start: number, end: number): Promise<void> {
let destOffset = start;
do {
const bytesToCopy = Math.min(end - destOffset, this.bytesLeftInCurrentChunk);
this.currentChunk.copy(buffer, destOffset, this.offsetInCurrentChunk,
this.offsetInCurrentChunk + bytesToCopy);
this.offsetInCurrentChunk += bytesToCopy;
destOffset += bytesToCopy;
if (destOffset < end) {
await this.readNextChunk();
}
} while (destOffset < end);
}
/**
* Implements an async read that spans multiple buffers.
*
* @param canReadFunc Callback function to determine how many bytes are required to complete the
* read operation.
* @param readFunc Callback function to read the requested data from a `Buffer`.
*/
private async readAcrossSeam<T>(
canReadFunc: (buffer: Buffer, start: number, byteCount: number) => number,
readFunc: (buffer: Buffer, offset: number) => T): Promise<T> {
// We'll copy the leftover bytes from the current chunk, plus whatever bytes we need from
// subsequent chunks, into a "seam buffer", and read the value from there.
let buffer = this.currentChunk;
let offsetInBuffer = this.offsetInCurrentChunk;
let discardedBytes = 0;
let bytesInBuffer = this.bytesLeftInCurrentChunk;
while (true) {
// Ask how many bytes we need to complete the read.
const requestedBytes = canReadFunc(buffer, offsetInBuffer, bytesInBuffer);
if (requestedBytes <= bytesInBuffer) {
// We have enough bytes. Do the read.
const value = readFunc(buffer, offsetInBuffer);
this.offsetInCurrentChunk += requestedBytes - discardedBytes;
return value;
}
// We've already copied all the bytes from our current chunk to the seam buffer. We're
// guaranteed to wind up reading all of those bytes, and will need at least one more byte, so
// get the next chunk.
await this.readNextChunk();
// Create or extend our seam buffer to hold the additional bytes we're about to read.
const bytesToCopy = Math.min(requestedBytes - bytesInBuffer, this.bytesLeftInCurrentChunk);
buffer = this.getSeamBuffer(bytesInBuffer + bytesToCopy, buffer, offsetInBuffer, bytesInBuffer);
discardedBytes = bytesInBuffer;
offsetInBuffer = 0;
// Append the new bytes to our seam buffer.
this.currentChunk.copy(buffer, bytesInBuffer, 0, bytesToCopy);
bytesInBuffer += bytesToCopy;
}
}
private readVariableSize<T>(
canReadFunc: (buffer: Buffer, start: number, byteCount: number) => number,
readFunc: (buffer: Buffer, offset: number) => T): Promise<T> {
const requestedBytes = canReadFunc(this.currentChunk, this.offsetInCurrentChunk,
this.bytesLeftInCurrentChunk);
if (requestedBytes <= this.bytesLeftInCurrentChunk) {
const value = readFunc(this.currentChunk, this.offsetInCurrentChunk);
this.offsetInCurrentChunk += requestedBytes;
return Promise.resolve(value);
}
else {
return this.readAcrossSeam(canReadFunc, readFunc);
}
}
private readKnownSizeAcrossSeam<T>(byteCount: number,
readFunc: (buffer: Buffer, offset: number) => T): Promise<T> {
return this.readAcrossSeam((_buffer, _offset, _availableByteCount) => byteCount, readFunc);
}
private readKnownSize<T>(byteCount: number, readFunc: (buffer: Buffer, offset: number) => T):
Promise<T> {
if (this.bytesLeftInCurrentChunk >= byteCount) {
// We have enough data. Just read it directly.
const value = readFunc(this.currentChunk, this.offsetInCurrentChunk);
this.offsetInCurrentChunk += byteCount;
return Promise.resolve(value);
}
else {
return this.readKnownSizeAcrossSeam(byteCount, readFunc);
}
}
/**
* Read a leb128-encoded unsigned 32-bit number
* [https://en.wikipedia.org/wiki/LEB128]
*/
public readLEB128UInt32(): Promise<number> {
return this.readVariableSize(canDecodeLEB128UInt32, decodeLEB128UInt32);
}
/**
* Read a single byte.
*/
public readByte(): Promise<number> {
return this.readKnownSize(1, (buffer, offset) => buffer[offset]);
}
/**
* Read a single ASCII character as a string.
*/
public async readASCIIChar(): Promise<string> {
return String.fromCodePoint(await this.readByte());
}
/**
* Read the specified number of bytes.
*
* @param byteCount Number of bytes to read.
*/
public async read(byteCount: number): Promise<Buffer> {
const buffer = Buffer.alloc(byteCount);
await this.fillBuffer(buffer, 0, byteCount);
return buffer;
}
/**
* Read a `Date` encoded as an 8-byte sequence.
*/
public readDate(): Promise<Date> {
return this.readKnownSize(8, decodeDate);
}
/**
* Read a little-endian 64-bit IEEE floating-point number.
*/
public readDoubleLE(): Promise<number> {
return this.readKnownSize(8, (buffer, offset) => buffer.readDoubleLE(offset));
}
/**
* Read a UTF-8 encoded string.
* @param byteCount Length of encoded string in bytes.
*/
public readUTF8String(byteCount: number): Promise<string> {
return this.readKnownSize(byteCount, (buffer, offset) =>
buffer.toString('utf8', offset, offset + byteCount));
}
}
function decodeDate(buffer: Buffer, offset: number): Date {
const low = buffer.readUInt32LE(offset);
const high = buffer.readUInt32LE(offset + 4);
const year = (high & 0x1ffffff0) >> 4;
const month = high & 0x0000000f;
const day = (low & 0xf8000000) >>> 27;
const hours = (low & 0x07c00000) >> 22;
const minutes = (low & 0x003f0000) >> 16;
const seconds = (low & 0x0000fc00) >> 10;
const ms = low & 0x000003ff;
return new Date(year, month, day, hours, minutes, seconds, ms);
}
/**
* The longest possible byte length of a correctly encoded LEB128 UInt32:
* `0xff 0xff 0xff 0xff 0x8f` (5 bytes)
*/
const MAX_ENCODED_UINT32_LENGTH = 5;
function canDecodeLEB128UInt32(buffer: Buffer, offset: number, byteCount: number): number {
const endOffset = offset + Math.min(byteCount, MAX_ENCODED_UINT32_LENGTH);
for (let byteOffset = offset; byteOffset < endOffset; byteOffset++) {
if ((buffer[byteOffset] & 0x80) === 0) {
return (byteOffset - offset) + 1;
}
}
if ((endOffset - offset) > MAX_ENCODED_UINT32_LENGTH) {
throw new Error('Invalid LEB128 encoding.')
}
return MAX_ENCODED_UINT32_LENGTH;
}
function decodeLEB128UInt32(buffer: Buffer, offset: number): number {
const { value } = leb.decodeUInt32(buffer, offset);
return value;
}

View File

@@ -1,2 +0,0 @@
export * from './digester';
export * from './random-access-reader';

View File

@@ -1,8 +0,0 @@
export interface StreamReader extends AsyncIterable<Uint8Array> {
dispose(): void;
}
export interface RandomAccessReader {
readStream(start?: number, end?: number): StreamReader;
dispose(): void;
}

View File

@@ -1,3 +0,0 @@
{
"extends": "./node_modules/typescript-config/lib.tsconfig.json"
}

View File

@@ -1,56 +0,0 @@
{
"name": "@github/codeql-vscode-utils",
"entries": [
{
"version": "0.0.4",
"tag": "@github/codeql-vscode-utils_v0.0.4",
"date": "Tue, 09 Jun 2020 13:58:46 GMT",
"comments": {
"patch": [
{
"comment": "Update dependencies."
}
],
"dependency": [
{
"comment": "Updating dependency \"@github/codeql-gulp-tasks\" from `^0.0.3` to `^0.0.4`"
}
]
}
},
{
"version": "0.0.3",
"tag": "@github/codeql-vscode-utils_v0.0.3",
"date": "Sat, 06 Jun 2020 06:54:45 GMT",
"comments": {
"patch": [
{
"comment": "Removed unnecessary dependency on `typescript`"
}
],
"dependency": [
{
"comment": "Updating dependency \"@github/codeql-build-tasks\" from `^0.0.2` to `^0.0.3`"
}
]
}
},
{
"version": "0.0.2",
"tag": "@github/codeql-vscode-utils_v0.0.2",
"date": "Fri, 05 Jun 2020 21:52:18 GMT",
"comments": {
"patch": [
{
"comment": "Initial configuration for publishing"
}
],
"dependency": [
{
"comment": "Updating dependency \"@github/codeql-build-tasks\" from `^0.0.1` to `^0.0.2`"
}
]
}
}
]
}

View File

@@ -1,25 +0,0 @@
# Change Log - @github/codeql-vscode-utils
This log was last generated on Tue, 09 Jun 2020 13:58:46 GMT and should not be manually modified.
## 0.0.4
Tue, 09 Jun 2020 13:58:46 GMT
### Patches
- Update dependencies.
## 0.0.3
Sat, 06 Jun 2020 06:54:45 GMT
### Patches
- Removed unnecessary dependency on `typescript`
## 0.0.2
Fri, 05 Jun 2020 21:52:18 GMT
### Patches
- Initial configuration for publishing

View File

@@ -1,7 +0,0 @@
'use strict';
require('ts-node').register({});
const { compileTypeScript, watchTypeScript } = require('@github/codeql-gulp-tasks');
exports.default = compileTypeScript;
exports.watchTypeScript = watchTypeScript;

View File

@@ -1,27 +0,0 @@
{
"name": "@github/codeql-vscode-utils",
"description": "Shared utilities for writing Visual Studio Code extensions",
"author": "GitHub",
"private": false,
"version": "0.0.4",
"publisher": "GitHub",
"repository": {
"type": "git",
"url": "https://github.com/github/codeql-coreql-team.git"
},
"publishConfig": {
"registry": "https://npm.pkg.github.com/"
},
"main": "./out/index",
"scripts": {
"build": "gulp",
"format": "tsfmt -r"
},
"devDependencies": {
"@github/codeql-gulp-tasks": "^0.0.4",
"@types/node": "^12.0.8",
"@types/vscode": "^1.39.0",
"typescript-config": "^0.0.1",
"typescript-formatter": "^7.2.2"
}
}

View File

@@ -1,4 +0,0 @@
export * from './disposable-object';
export * from './multi-file-system-watcher';
export * from './ui-service';

View File

@@ -1,3 +0,0 @@
{
"extends": "./node_modules/typescript-config/lib.tsconfig.json"
}

View File

@@ -1,60 +0,0 @@
/**
* This is the main configuration file for Rush.
* For full documentation, please see https://rushjs.io/pages/configs/rush_json/
*/
{
"$schema": "https://developer.microsoft.com/json-schemas/rush/v5/rush.schema.json",
"rushVersion": "5.20.0",
"pnpmVersion": "4.8.0",
"pnpmOptions": {
"strictPeerDependencies": true
},
"nodeSupportedVersionRange": ">=10.13.0 <15.0.0",
"suppressNodeLtsWarning": true,
"ensureConsistentVersions": true,
"projectFolderMinDepth": 2,
"projectFolderMaxDepth": 2,
"gitPolicy": {},
"repository": {
"url": "https://github.com/github/vscode-codeql.git"
},
"eventHooks": {
"preRushInstall": [],
"postRushInstall": [],
"preRushBuild": [],
"postRushBuild": []
},
"variants": [],
"projects": [
{
"packageName": "typescript-config",
"projectFolder": "configs/typescript-config"
},
{
"packageName": "@github/codeql-gulp-tasks",
"projectFolder": "tools/build-tasks",
"versionPolicyName": "utilities"
},
{
"packageName": "semmle-bqrs",
"projectFolder": "lib/semmle-bqrs"
},
{
"packageName": "semmle-io",
"projectFolder": "lib/semmle-io"
},
{
"packageName": "semmle-io-node",
"projectFolder": "lib/semmle-io-node"
},
{
"packageName": "@github/codeql-vscode-utils",
"projectFolder": "lib/semmle-vscode-utils",
"versionPolicyName": "utilities"
},
{
"packageName": "vscode-codeql",
"projectFolder": "extensions/ql-vscode"
}
]
}

View File

@@ -1,41 +0,0 @@
{
"name": "@github/codeql-gulp-tasks",
"entries": [
{
"version": "0.0.4",
"tag": "@github/codeql-gulp-tasks_v0.0.4",
"date": "Tue, 09 Jun 2020 13:58:46 GMT",
"comments": {
"patch": [
{
"comment": "Support packages from multiple registries when packaging an extension."
}
]
}
},
{
"version": "0.0.3",
"tag": "@github/codeql-build-tasks_v0.0.3",
"date": "Sat, 06 Jun 2020 06:54:45 GMT",
"comments": {
"patch": [
{
"comment": "Added missing dependency on `glob`"
}
]
}
},
{
"version": "0.0.2",
"tag": "@github/codeql-build-tasks_v0.0.2",
"date": "Fri, 05 Jun 2020 21:52:18 GMT",
"comments": {
"patch": [
{
"comment": "Initial configuration for publishing"
}
]
}
}
]
}

View File

@@ -1,25 +0,0 @@
# Change Log - @github/codeql-gulp-tasks
This log was last generated on Tue, 09 Jun 2020 13:58:46 GMT and should not be manually modified.
## 0.0.4
Tue, 09 Jun 2020 13:58:46 GMT
### Patches
- Support packages from multiple registries when packaging an extension.
## 0.0.3
Sat, 06 Jun 2020 06:54:45 GMT
### Patches
- Added missing dependency on `glob`
## 0.0.2
Fri, 05 Jun 2020 21:52:18 GMT
### Patches
- Initial configuration for publishing

View File

@@ -1,7 +0,0 @@
'use strict';
require('ts-node').register({});
const { compileTypeScript, watchTypeScript } = require('../src/index');
exports.default = compileTypeScript;
exports.watchTypeScript = watchTypeScript;

View File

@@ -1,51 +0,0 @@
{
"name": "@github/codeql-gulp-tasks",
"description": "Internal Gulp tasks",
"author": "GitHub",
"private": false,
"version": "0.0.4",
"publisher": "GitHub",
"repository": {
"type": "git",
"url": "https://github.com/github/codeql-coreql-team.git"
},
"publishConfig": {
"registry": "https://npm.pkg.github.com/"
},
"main": "./out/index",
"scripts": {
"build": "gulp",
"format": "tsfmt -r"
},
"dependencies": {
"@microsoft/node-core-library": "~3.13.0",
"@microsoft/rush-lib": "~5.20.0",
"ansi-colors": "^4.0.1",
"child-process-promise": "^2.2.1",
"fs-extra": "^8.1.0",
"glob": "^7.1.4",
"glob-promise": "^3.4.0",
"gulp": "^4.0.2",
"gulp-sourcemaps": "^2.6.5",
"gulp-typescript": "^5.0.1",
"js-yaml": "^3.12.0",
"jsonc-parser": "~2.1.0",
"npm-packlist": "~1.4.4",
"plugin-error": "^1.0.1",
"through2": "^3.0.1",
"vinyl": "^2.2.0"
},
"devDependencies": {
"@types/child-process-promise": "^2.2.1",
"@types/fs-extra": "^8.0.0",
"@types/gulp": "^4.0.6",
"@types/js-yaml": "~3.12.1",
"@types/node": "^12.0.8",
"@types/npm-packlist": "~1.1.1",
"@types/through2": "~2.0.34",
"@types/vinyl": "~2.0.3",
"typescript": "^3.7.2",
"typescript-config": "^0.0.1",
"typescript-formatter": "^7.2.2"
}
}

View File

@@ -1,204 +0,0 @@
import * as fs from 'fs-extra';
import * as jsonc from 'jsonc-parser';
import { IPackageJson } from '@microsoft/node-core-library';
import * as path from 'path';
import { getRushContext, RushContext } from './rush';
import * as packlist from 'npm-packlist';
import * as glob from 'glob-promise';
import * as cpp from 'child-process-promise';
interface IPackageInfo {
name: string;
version: string;
sourcePath: string;
files: string[];
dependencies: IPackageInfo[];
isRoot?: boolean;
copied?: boolean;
}
async function copyPackage(packageFiles: IPackageInfo, destPath: string): Promise<void> {
for (const file of packageFiles.files) {
const sourceFilePath = path.resolve(packageFiles.sourcePath, file);
const destFilePath = path.resolve(destPath, file);
if (packageFiles.isRoot && (file === 'package.json')) {
// For non-release builds, we tweak the version number of the extension to add a prerelease
// suffix. Rather than just copying `package.json`, we'll parse the original copy, update the
// `version` property, and write it out to the new location.
const packageJson = jsonc.parse((await fs.readFile(sourceFilePath)).toString());
packageJson.version = packageFiles.version;
await fs.writeFile(destFilePath, JSON.stringify(packageJson));
}
else {
await fs.copy(sourceFilePath, destFilePath);
}
}
}
export interface DeployedPackage {
distPath: string;
name: string;
version: string;
}
class PackageMap {
private map = new Map<string, Map<string, IPackageInfo>>();
constructor() {
}
public getPackageInfo(name: string, version: string): IPackageInfo | undefined {
const versionMap = this.map.get(name);
if (versionMap === undefined) {
return undefined;
}
return versionMap.get(version);
}
public addPackageInfo(pkg: IPackageInfo): void {
if (this.getPackageInfo(pkg.name, pkg.version)) {
throw new Error(`Attempt to add duplicate package '${pkg.name}@${pkg.version}'.`);
}
let versionMap = this.map.get(pkg.name);
if (versionMap === undefined) {
versionMap = new Map<string, IPackageInfo>();
this.map.set(pkg.name, versionMap);
}
versionMap.set(pkg.version, pkg);
}
public hasMultipleVersions(name: string): boolean {
return this.map.get(name)!.size > 1;
}
}
async function collectPackages(context: RushContext, name: string, version: string,
pkgs: PackageMap): Promise<IPackageInfo> {
let pkg = pkgs.getPackageInfo(name, version);
if (!pkg) {
const info = await context.getPackageInfo(name, version);
let files: string[];
if (info.isLocal) {
// For local packages, use `packlist` to get the list of files that npm would have packed
// into the tarball.
files = packlist.sync({ path: info.path });
}
else {
// For non-local packages, just copy everything.
files = await glob('**/*', {
nodir: true,
cwd: info.path
});
}
pkg = {
name: name,
version: version,
sourcePath: info.path,
files: files,
dependencies: []
};
pkgs.addPackageInfo(pkg);
for (const dependencyName of info.dependencies.keys()) {
const dependencyVersion = info.dependencies.get(dependencyName)!;
const dependencyPackage = await collectPackages(context, dependencyName, dependencyVersion, pkgs);
pkg.dependencies.push(dependencyPackage);
}
}
return pkg;
}
async function copyPackageAndModules(pkg: IPackageInfo, pkgs: PackageMap, destPath: string,
rootNodeModulesPath: string): Promise<void> {
let destPackagePath: string;
if (pkgs.hasMultipleVersions(pkg.name) || pkg.isRoot) {
// Copy as a nested package, and let `npm dedupe` fix it up later if possible.
destPackagePath = path.join(destPath, pkg.name);
}
else {
// Copy to the root `node_modules` directory.
if (pkg.copied) {
return;
}
pkg.copied = true;
destPackagePath = path.join(rootNodeModulesPath, pkg.name);
}
await copyPackage(pkg, destPackagePath);
const nodeModulesPath = path.join(destPackagePath, 'node_modules');
for (const dependencyPkg of pkg.dependencies) {
await copyPackageAndModules(dependencyPkg, pkgs, nodeModulesPath, rootNodeModulesPath);
}
}
export async function deployPackage(packageJsonPath: string): Promise<DeployedPackage> {
try {
const context = await getRushContext(path.dirname(packageJsonPath));
const rootPackage: IPackageJson = jsonc.parse(await fs.readFile(packageJsonPath, 'utf8'));
// Default to development build; use flag --release to indicate release build.
const isDevBuild = !process.argv.includes('--release');
const distDir = path.join(context.rushConfig.rushJsonFolder, 'dist');
await fs.mkdirs(distDir);
if (isDevBuild) {
// NOTE: rootPackage.name had better not have any regex metacharacters
const oldDevBuildPattern = new RegExp('^' + rootPackage.name + '[^/]+-dev[0-9.]+\\.vsix$');
// Dev package filenames are of the form
// vscode-codeql-0.0.1-dev.2019.9.27.19.55.20.vsix
(await fs.readdir(distDir)).filter(name => name.match(oldDevBuildPattern)).map(build => {
console.log(`Deleting old dev build ${build}...`);
fs.unlinkSync(path.join(distDir, build));
});
const now = new Date();
rootPackage.version = rootPackage.version +
`-dev.${now.getUTCFullYear()}.${now.getUTCMonth() + 1}.${now.getUTCDate()}` +
`.${now.getUTCHours()}.${now.getUTCMinutes()}.${now.getUTCSeconds()}`;
}
const distPath = path.join(distDir, rootPackage.name);
await fs.remove(distPath);
await fs.mkdirs(distPath);
console.log(`Gathering transitive dependencies of package '${rootPackage.name}'...`);
const pkgs = new PackageMap();
const rootPkg = await collectPackages(context, rootPackage.name, rootPackage.version, pkgs);
rootPkg.isRoot = true;
console.log(`Copying package '${rootPackage.name}' and its dependencies to '${distPath}'...`);
await copyPackageAndModules(rootPkg, pkgs, path.dirname(distPath), path.join(distPath, 'node_modules'));
await fs.copy(path.resolve(rootPkg.sourcePath, ".vscodeignore"), path.resolve(distPath, ".vscodeignore"));
console.log(`Deduplicating dependencies of package '${rootPackage.name}'...`);
// We create a temporary `package-lock.json` file just to prevent `npm ls` from printing out the
// message that it created a package-lock.json.
const packageLockPath = path.join(distPath, 'package-lock.json');
await fs.writeFile(packageLockPath, '{}');
await cpp.spawn('npm', ['dedupe'], {
cwd: distPath,
stdio: 'inherit'
});
await fs.unlink(packageLockPath);
return {
distPath: distPath,
name: rootPackage.name,
version: rootPackage.version
};
}
catch (e) {
console.error(e);
throw e;
}
}

View File

@@ -1,4 +0,0 @@
export * from './package';
export * from './textmate';
export * from './typescript';
export * from './tests';

View File

@@ -1,17 +0,0 @@
export interface PackageDependencies {
[key: string]: string;
}
export interface ShrinkwrapPackage {
dependencies?: PackageDependencies;
dev?: boolean;
name?: string;
version?: string;
}
export interface Shrinkwrap {
dependencies: PackageDependencies;
packages: {
[key: string]: ShrinkwrapPackage;
}
}

View File

@@ -1,166 +0,0 @@
import * as fs from 'fs-extra';
import * as glob from 'glob-promise';
import * as jsonc from 'jsonc-parser';
import { Shrinkwrap, ShrinkwrapPackage } from './pnpm';
import * as path from 'path';
import { IPackageJson } from '@microsoft/node-core-library';
import { RushConfiguration } from '@microsoft/rush-lib';
import * as yaml from 'js-yaml';
export interface IPackageJsonWithFiles extends IPackageJson {
files?: string[]
}
interface PackageInfo {
path: string;
dependencies: Map<string, string>;
config: IPackageJsonWithFiles;
isLocal: boolean;
}
const peerDependencyVersionPattern = /^\/((?:@(?:[^\/]+)\/)?[^\/]+)\/([^\/]+)\//;
export class RushContext {
private shrinkwrap?: Shrinkwrap;
private shrinkwrapPackages?: Map<string, ShrinkwrapPackage>;
private readonly packageStore: string;
constructor(public readonly rushConfig: RushConfiguration) {
this.packageStore = path.join(rushConfig.pnpmStoreFolder, '2');
}
private async findPackageInRepository(name: string, version: string): Promise<string> {
// Packages may be pulled from multiple registries, each of which has its own directory in the
// pnpm store. Search for the package name in any of these directories. We use `*.*` to match
// the directory name to avoid searching the `local` directory, which does not represent a
// package registry.
const results = await glob(`*.*/${name}/${version}/package`, {
absolute: true,
cwd: this.packageStore
});
if (results.length === 0) {
throw new Error(`Package '${name}:${version}' not found in package repository.`);
}
else if (results.length > 1) {
throw new Error(`Multiple copies of package '${name}:${version}' found in package repository.`);
}
else {
return results[0];
}
}
private getRushProjectPath(name: string): string | undefined {
const project = this.rushConfig.getProjectByName(name);
if (project) {
return project.projectFolder;
}
else {
return undefined;
}
}
private async getShrinkwrap(): Promise<Shrinkwrap> {
if (!this.shrinkwrap) {
this.shrinkwrap = yaml.safeLoad(await fs.readFile(this.rushConfig.getCommittedShrinkwrapFilename(), 'utf8'));
}
return this.shrinkwrap!;
}
private async getShrinkwrapPackage(name: string, version: string): Promise<ShrinkwrapPackage> {
const shrinkwrap = await this.getShrinkwrap();
if (!this.shrinkwrapPackages) {
this.shrinkwrapPackages = new Map<string, ShrinkwrapPackage>();
for (const name in shrinkwrap.packages) {
const pkg = shrinkwrap.packages[name];
let packageKey: string;
if (pkg.name) {
packageKey = makePackageKey(pkg.name, pkg.version!);
}
else {
packageKey = name;
}
this.shrinkwrapPackages.set(packageKey, pkg);
}
}
const packageKey = makePackageKey(name, version);
const shrinkwrapPackage = this.shrinkwrapPackages.get(packageKey);
if (!shrinkwrapPackage) {
throw new Error(`Package '${packageKey}' not found in shrinkwrap file.`);
}
return shrinkwrapPackage;
}
public async getPackageInfo(name: string, version: string): Promise<PackageInfo> {
let pkg: ShrinkwrapPackage;
const rushProject = this.rushConfig.getProjectByName(name);
let packagePath: string;
let config: IPackageJsonWithFiles;
if (rushProject) {
packagePath = rushProject.projectFolder;
pkg = await this.getShrinkwrapPackage(rushProject.tempProjectName, '0.0.0');
config = rushProject.packageJson;
}
else {
pkg = await this.getShrinkwrapPackage(name, version);
// Ensure a proper version number. pnpm uses syntax like 3.4.0_glob@7.1.6 for peer dependencies
version = version.split('_')[0];
packagePath = await this.findPackageInRepository(name, version);
packagePath = await fs.realpath(packagePath);
config = jsonc.parse(await fs.readFile(path.join(packagePath, 'package.json'), 'utf8'));
}
const dependencies = new Map<string, string>();
if (config.dependencies) {
for (const dependencyName in config.dependencies) {
let dependencyVersion: string;
if (await this.getRushProjectPath(dependencyName)) {
dependencyVersion = '0.0.0';
}
else {
dependencyVersion = pkg.dependencies![dependencyName];
if (!dependencyVersion) {
throw new Error(`Package '${name}' depends on unresolved package '${dependencyName}'.`);
}
if (dependencyVersion.startsWith('/')) {
// This is a package with a peer dependency. We need to extract the actual package
// version.
const match = dependencyVersion.match(peerDependencyVersionPattern);
if (match) {
if (match[1] !== dependencyName) {
throw new Error(`Mismatch between package name '${dependencyName}' and peer dependency specifier '${dependencyVersion}'.`);
}
dependencyVersion = match[2];
}
else {
throw new Error(`Invalid peer dependency specifier '${dependencyVersion}'.`);
}
}
}
dependencies.set(dependencyName, dependencyVersion);
}
}
return {
path: packagePath,
dependencies: dependencies,
config: config,
isLocal: rushProject !== undefined
};
}
}
function makePackageKey(name: string, version: string): string {
return `/${name}/${version}`;
}
export async function getRushContext(startingFolder?: string): Promise<RushContext> {
const rushConfig = RushConfiguration.loadFromDefaultLocation({
startingFolder: startingFolder
});
return new RushContext(rushConfig);
}

View File

@@ -1,68 +0,0 @@
import * as colors from 'ansi-colors';
import * as gulp from 'gulp';
import * as path from 'path';
import * as sourcemaps from 'gulp-sourcemaps';
import * as ts from 'gulp-typescript';
import { RushConfiguration } from '@microsoft/rush-lib';
function goodReporter(): ts.reporter.Reporter {
return {
error: (error, typescript) => {
if (error.tsFile) {
console.log('[' + colors.gray('gulp-typescript') + '] ' + colors.red(error.fullFilename
+ '(' + (error.startPosition!.line + 1) + ',' + error.startPosition!.character + '): ')
+ 'error TS' + error.diagnostic.code + ': ' + typescript.flattenDiagnosticMessageText(error.diagnostic.messageText, '\n'));
}
else {
console.log(error.message);
}
},
};
}
const tsProject = ts.createProject('tsconfig.json');
export function compileTypeScript() {
// Find this project's relative directory. Rush already knows this, so just ask.
const packageDir = path.resolve('.');
const rushConfig = RushConfiguration.loadFromDefaultLocation({
startingFolder: packageDir
});
const project = rushConfig.tryGetProjectForPath(packageDir);
if (!project) {
console.error(`Unable to find project for '${packageDir}' in 'rush.json'.`);
throw Error();
}
//REVIEW: Better way to detect deployable projects?
// Since extension .js files are deployed to 'dist/<package>/out', and libraries are deployed to
// 'dist/<app package>/node_modules/<package>/out'.
const pathToRoot = (path.dirname(project.projectRelativeFolder) === 'extensions') ?
'../../..' : '../../../../..';
return tsProject.src()
.pipe(sourcemaps.init())
.pipe(tsProject(goodReporter()))
.pipe(sourcemaps.mapSources((sourcePath, _file) => {
// The source path is kind of odd, because it's relative to the `tsconfig.json` file in the
// `typescript-config` package, which lives in the `node_modules` directory of the package
// that is being built. It starts out as something like '../../../src/foo.ts', and we need to
// strip out the leading '../../../'.
return path.join('a/b/c', sourcePath);
}))
.pipe(sourcemaps.write('.', {
includeContent: false,
sourceRoot: path.join(pathToRoot, project.projectRelativeFolder)
}))
.pipe(gulp.dest('out'));
}
export function watchTypeScript() {
gulp.watch('src/**/*.ts', compileTypeScript);
}
/** Copy CSS files for the results view into the output directory. */
export function copyViewCss() {
return gulp.src('src/view/*.css')
.pipe(gulp.dest('out'));
}

View File

@@ -1,3 +0,0 @@
{
"extends": "./node_modules/typescript-config/lib.tsconfig.json"
}

View File

@@ -1 +0,0 @@
{}