Remove now-unused libraries.

This commit is contained in:
Jason Reed
2020-07-14 08:00:11 -04:00
parent 868b356588
commit e5e854822d
25 changed files with 4 additions and 1670 deletions

View File

@@ -3,9 +3,6 @@ dependencies:
'@microsoft/rush-lib': 5.20.0 '@microsoft/rush-lib': 5.20.0
'@rush-temp/codeql-gulp-tasks': 'file:projects/codeql-gulp-tasks.tgz' '@rush-temp/codeql-gulp-tasks': 'file:projects/codeql-gulp-tasks.tgz'
'@rush-temp/codeql-vscode-utils': 'file:projects/codeql-vscode-utils.tgz_typescript@3.8.3' '@rush-temp/codeql-vscode-utils': 'file:projects/codeql-vscode-utils.tgz_typescript@3.8.3'
'@rush-temp/semmle-bqrs': 'file:projects/semmle-bqrs.tgz_typescript@3.8.3'
'@rush-temp/semmle-io': 'file:projects/semmle-io.tgz_typescript@3.8.3'
'@rush-temp/semmle-io-node': 'file:projects/semmle-io-node.tgz_typescript@3.8.3'
'@rush-temp/typescript-config': 'file:projects/typescript-config.tgz' '@rush-temp/typescript-config': 'file:projects/typescript-config.tgz'
'@rush-temp/vscode-codeql': 'file:projects/vscode-codeql.tgz' '@rush-temp/vscode-codeql': 'file:projects/vscode-codeql.tgz'
'@types/chai': 4.2.11 '@types/chai': 4.2.11
@@ -55,7 +52,6 @@ dependencies:
husky: 4.2.5 husky: 4.2.5
js-yaml: 3.13.1 js-yaml: 3.13.1
jsonc-parser: 2.1.1 jsonc-parser: 2.1.1
leb: 0.3.0
lint-staged: 10.2.2 lint-staged: 10.2.2
minimist: 1.2.5 minimist: 1.2.5
mocha: 6.2.2 mocha: 6.2.2
@@ -68,7 +64,6 @@ dependencies:
proxyquire: 2.1.3 proxyquire: 2.1.3
react: 16.13.0 react: 16.13.0
react-dom: 16.13.0_react@16.13.0 react-dom: 16.13.0_react@16.13.0
reflect-metadata: 0.1.13
semver: 7.3.2 semver: 7.3.2
sinon: 9.0.1 sinon: 9.0.1
sinon-chai: 3.5.0_chai@4.2.0+sinon@9.0.1 sinon-chai: 3.5.0_chai@4.2.0+sinon@9.0.1
@@ -4264,10 +4259,6 @@ packages:
node: '>= 0.10' node: '>= 0.10'
resolution: resolution:
integrity: sha1-bxT5mje+Op3XhPVJVpDlkDRm7kI= integrity: sha1-bxT5mje+Op3XhPVJVpDlkDRm7kI=
/leb/0.3.0:
dev: false
resolution:
integrity: sha1-Mr7p+tFoMo1q6oUi2DP0GA7tHaM=
/levn/0.3.0: /levn/0.3.0:
dependencies: dependencies:
prelude-ls: 1.1.2 prelude-ls: 1.1.2
@@ -6032,10 +6023,6 @@ packages:
node: '>= 0.10' node: '>= 0.10'
resolution: resolution:
integrity: sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q= integrity: sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q=
/reflect-metadata/0.1.13:
dev: false
resolution:
integrity: sha512-Ts1Y/anZELhSsjMcU605fU9RE4Oi3p5ORujwbIKXfWa+0Zxs510Qrmrce5/Jowq3cHSZSJqBjypxmHarc+vEWg==
/regenerator-runtime/0.13.5: /regenerator-runtime/0.13.5:
dev: false dev: false
resolution: resolution:
@@ -8004,7 +7991,7 @@ packages:
dev: false dev: false
name: '@rush-temp/codeql-gulp-tasks' name: '@rush-temp/codeql-gulp-tasks'
resolution: resolution:
integrity: sha512-fqE1VOiN1SmB17tolFDAlApTd/oXFfl1wh+WkQfajdOsHLxDSMPYAcWnt7FdSaQSp/EgIIZ3lNnDBTXWlPkE/g== integrity: sha512-VdaBVV0JiXUYF9fe/mQ5WdbkIAjDfp1/cf35hhmNvfuSwplDtHwvx9bCrbZZ6yElnXSE3jJ04Q4HFEAzxxNYPw==
tarball: 'file:projects/codeql-gulp-tasks.tgz' tarball: 'file:projects/codeql-gulp-tasks.tgz'
version: 0.0.0 version: 0.0.0
'file:projects/codeql-vscode-utils.tgz_typescript@3.8.3': 'file:projects/codeql-vscode-utils.tgz_typescript@3.8.3':
@@ -8018,61 +8005,14 @@ packages:
peerDependencies: peerDependencies:
typescript: '*' typescript: '*'
resolution: resolution:
integrity: sha512-UdaU/jwMCYYysISj/rwLWXxZKhSYo/uIkUBM5LpQhm4vy4z/0FPrEqqZWOKk+ldxUPMi8lx+8U7WkqXnTY5PFg== integrity: sha512-BfGUKj2eXAnv7+rYYMRAJdj6MGNCD4a+wwemyf9tfE8kXO2ghWbey/fqA8dy6qT8KE8tAokEVkuN6WjPLdMHqg==
tarball: 'file:projects/codeql-vscode-utils.tgz' tarball: 'file:projects/codeql-vscode-utils.tgz'
version: 0.0.0 version: 0.0.0
'file:projects/semmle-bqrs.tgz_typescript@3.8.3':
dependencies:
'@types/node': 12.12.30
build-tasks: 0.0.1
leb: 0.3.0
reflect-metadata: 0.1.13
typescript-formatter: 7.2.2_typescript@3.8.3
dev: false
id: 'file:projects/semmle-bqrs.tgz'
name: '@rush-temp/semmle-bqrs'
peerDependencies:
typescript: '*'
resolution:
integrity: sha512-xXKv5YnDihfs9L8fWDfHY4IndM8A/cievN+teT/bStvPQ53P3RJikeF2R11+ovg/UNg8YaCQYAHovi4z5KEx4g==
tarball: 'file:projects/semmle-bqrs.tgz'
version: 0.0.0
'file:projects/semmle-io-node.tgz_typescript@3.8.3':
dependencies:
'@types/fs-extra': 8.1.0
'@types/node': 12.12.30
build-tasks: 0.0.1
fs-extra: 8.1.0
typescript-formatter: 7.2.2_typescript@3.8.3
dev: false
id: 'file:projects/semmle-io-node.tgz'
name: '@rush-temp/semmle-io-node'
peerDependencies:
typescript: '*'
resolution:
integrity: sha512-sB1AHo/3SHXobocIOTOOF4A6RFD7PPr8n8CSv6Qq4ZiOP/8ULNmwgoxEAksp6x4Uf6OSgJ9Puawk/v0DeeK1UQ==
tarball: 'file:projects/semmle-io-node.tgz'
version: 0.0.0
'file:projects/semmle-io.tgz_typescript@3.8.3':
dependencies:
'@types/node': 12.12.30
build-tasks: 0.0.1
leb: 0.3.0
typescript-formatter: 7.2.2_typescript@3.8.3
dev: false
id: 'file:projects/semmle-io.tgz'
name: '@rush-temp/semmle-io'
peerDependencies:
typescript: '*'
resolution:
integrity: sha512-LmAIuLFIfrPg81Hv9UZ7VxfiM3jpXJ7HkH8w2L1t8jUZt6piibQbwMo3R8oz1v1F21QnL8DxBG4v7PCHgbf72w==
tarball: 'file:projects/semmle-io.tgz'
version: 0.0.0
'file:projects/typescript-config.tgz': 'file:projects/typescript-config.tgz':
dev: false dev: false
name: '@rush-temp/typescript-config' name: '@rush-temp/typescript-config'
resolution: resolution:
integrity: sha512-XuUIySaNoooIduvehnlKYaHqZJmmQoCqB1RtKhNszjCYZaSSJAnKVucViWBf5oNLKSNP7NchrD7gcoBlQ3xYvw== integrity: sha512-qJbtY2jvt6LKkmUt/seiYyXSEB6Oip/rW+SxofQEnpyplgIQv7whTZb6g5pwlSLGl8goTaQFm4NfazKhFmxXvQ==
tarball: 'file:projects/typescript-config.tgz' tarball: 'file:projects/typescript-config.tgz'
version: 0.0.0 version: 0.0.0
'file:projects/vscode-codeql.tgz': 'file:projects/vscode-codeql.tgz':
@@ -8156,7 +8096,7 @@ packages:
dev: false dev: false
name: '@rush-temp/vscode-codeql' name: '@rush-temp/vscode-codeql'
resolution: resolution:
integrity: sha512-0Y23lFB67k0+FFH/+TkoZDMBgeScx4E73Q/j+oDIsu7G+Dsx0SLp3Lzb/1nKJma9i2t/OFVYbTZiRYPUisIBmA== integrity: sha512-pzIN497d6Ht6VQHUdB90HFRVzzgFNQ71WLudM/L7P0QgsQtRuaLk9WVEcWMz/3124F7NiFa3SpC/THbN5Cc9aw==
tarball: 'file:projects/vscode-codeql.tgz' tarball: 'file:projects/vscode-codeql.tgz'
version: 0.0.0 version: 0.0.0
registry: '' registry: ''
@@ -8165,9 +8105,6 @@ specifiers:
'@microsoft/rush-lib': ~5.20.0 '@microsoft/rush-lib': ~5.20.0
'@rush-temp/codeql-gulp-tasks': 'file:./projects/codeql-gulp-tasks.tgz' '@rush-temp/codeql-gulp-tasks': 'file:./projects/codeql-gulp-tasks.tgz'
'@rush-temp/codeql-vscode-utils': 'file:./projects/codeql-vscode-utils.tgz' '@rush-temp/codeql-vscode-utils': 'file:./projects/codeql-vscode-utils.tgz'
'@rush-temp/semmle-bqrs': 'file:./projects/semmle-bqrs.tgz'
'@rush-temp/semmle-io': 'file:./projects/semmle-io.tgz'
'@rush-temp/semmle-io-node': 'file:./projects/semmle-io-node.tgz'
'@rush-temp/typescript-config': 'file:./projects/typescript-config.tgz' '@rush-temp/typescript-config': 'file:./projects/typescript-config.tgz'
'@rush-temp/vscode-codeql': 'file:./projects/vscode-codeql.tgz' '@rush-temp/vscode-codeql': 'file:./projects/vscode-codeql.tgz'
'@types/chai': ^4.1.7 '@types/chai': ^4.1.7
@@ -8217,7 +8154,6 @@ specifiers:
husky: ~4.2.5 husky: ~4.2.5
js-yaml: ^3.12.0 js-yaml: ^3.12.0
jsonc-parser: ~2.1.0 jsonc-parser: ~2.1.0
leb: ^0.3.0
lint-staged: ~10.2.2 lint-staged: ~10.2.2
minimist: ~1.2.5 minimist: ~1.2.5
mocha: ~6.2.1 mocha: ~6.2.1
@@ -8230,7 +8166,6 @@ specifiers:
proxyquire: ~2.1.3 proxyquire: ~2.1.3
react: ^16.8.6 react: ^16.8.6
react-dom: ^16.8.6 react-dom: ^16.8.6
reflect-metadata: ~0.1.13
semver: ~7.3.2 semver: ~7.3.2
sinon: ~9.0.0 sinon: ~9.0.0
sinon-chai: ~3.5.0 sinon-chai: ~3.5.0

View File

@@ -1,7 +0,0 @@
'use strict';
require('ts-node').register({});
const { compileTypeScript, watchTypeScript } = require('@github/codeql-gulp-tasks');
exports.default = compileTypeScript;
exports.watchTypeScript = watchTypeScript;

View File

@@ -1,32 +0,0 @@
{
"name": "semmle-bqrs",
"description": "Parses Binary Query Result Sets generated by CodeQL",
"author": "GitHub",
"private": true,
"version": "0.0.1",
"publisher": "GitHub",
"repository": {
"type": "git",
"url": "https://github.com/github/vscode-codeql"
},
"main": "./out/index",
"files": [
"out/**",
"package.json"
],
"scripts": {
"build": "gulp",
"format": "tsfmt -r"
},
"dependencies": {
"leb": "^0.3.0",
"reflect-metadata": "~0.1.13",
"semmle-io": "^0.0.1"
},
"devDependencies": {
"@types/node": "^12.0.8",
"@github/codeql-gulp-tasks": "^0.0.4",
"typescript-config": "^0.0.1",
"typescript-formatter": "^7.2.2"
}
}

View File

@@ -1,407 +0,0 @@
import { ResultSetSchema, LocationStyle, ColumnTypeKind } from "./bqrs-schema";
import { ResultSetsReader, ResultSetReader } from "./bqrs-file";
import { ElementBase, ColumnValue } from "./bqrs-results";
/**
* Represents a binding to all remaining columns, starting at the column index specified by
* `startColumn`.
*/
export interface RestColumnIndex {
startColumn: number
}
/**
* Indentifies the result column to which a property is bound. May be the index of a specific
* column, or an instance of `RestColumnIndex` to bind to all remaining columns.
*/
export type ColumnIndex = number | RestColumnIndex;
/**
* Options that can be specified for a `@qlTable` attribute.
*/
export interface TableOptions {
/**
* The name of the table to bind to. If multiple values are specified, the property is bound to
* the the table whose name is earliest in the list.
*/
name?: string | string[];
}
export enum QLOption {
Required = 'required',
Optional = 'optional',
Forbidden = 'forbidden'
}
/**
* Options that can be specified for a `@qlElement` attribute.
*/
export interface ElementOptions {
label?: QLOption;
location?: QLOption;
}
/**
* An attribute that binds the target property to a result column representing a QL element.
* @param index Index of the column to be bound.
* @param options Binding options.
*/
export function qlElement(index: ColumnIndex, options: ElementOptions = {}): PropertyDecorator {
return (proto: any, key: PropertyKey): void => {
column(proto, {
key: key,
index: index,
type: 'e',
options: {
label: options.label ? options.label : QLOption.Required,
location: options.location ? options.location : QLOption.Required
}
});
}
}
/**
* An attribute that binds the target property to a result column containing a QL string.
* @param index Index of the column to be bound.
*/
export function qlString(index: ColumnIndex): PropertyDecorator {
return (proto: any, key: PropertyKey): void => {
column(proto, {
key: key,
index: index,
type: 's'
});
}
}
/**
* An attribute that binds the target property to a set of result columns. The individual
* columns are bound to properties of the underlying type of the target property.
* @param index Index of the first column to be bound.
* @param type The type of the property.
*/
export function qlTuple(index: ColumnIndex, type: { new(): any }): PropertyDecorator {
return (proto: any, key: PropertyKey): void => {
column(proto, {
key: key,
index: index,
type: type
});
}
}
type PropertyKey = string | symbol;
interface ColumnProperty {
key: PropertyKey;
index: ColumnIndex;
type: ColumnTypeKind | { new(): any };
}
interface ElementProperty extends ColumnProperty {
type: 'e';
options: Required<ElementOptions>;
}
function isElement(property: ColumnProperty): property is ElementProperty {
return property.type === 'e';
}
const columnPropertiesSymbol = Symbol('columnProperties');
type PropertyDecorator = (proto: any, key: PropertyKey) => void;
function column<T extends ColumnProperty>(proto: any, property: T): void {
let columnProperties: ColumnProperty[] | undefined = Reflect.getMetadata(columnPropertiesSymbol, proto);
if (columnProperties === undefined) {
columnProperties = [];
Reflect.defineMetadata(columnPropertiesSymbol, columnProperties, proto);
}
columnProperties.push(property);
}
interface TableProperty {
key: PropertyKey;
tableNames: string[];
rowType: any;
}
const tablePropertiesSymbol = Symbol('tableProperties');
/**
* An attribute that binds the target property to the contents of a result table.
* @param rowType The type representing a single row in the bound table. The type of the target
* property must be an array of this type.
* @param options Binding options.
*/
export function qlTable(rowType: any, options?: TableOptions): any {
return (proto, key: PropertyKey) => {
const realOptions = options || {};
let names: string[];
if (realOptions.name === undefined) {
names = [key.toString()]
}
else if (typeof realOptions.name === 'string') {
names = [realOptions.name];
}
else {
names = realOptions.name;
}
let tableProperties: TableProperty[] | undefined = Reflect.getMetadata(tablePropertiesSymbol, proto);
if (tableProperties === undefined) {
tableProperties = [];
Reflect.defineMetadata(tablePropertiesSymbol, tableProperties, proto);
}
tableProperties.push({
key: key,
tableNames: names,
rowType: rowType
});
};
}
type ParseTupleAction = (src: readonly ColumnValue[], dest: any) => void;
type TupleParser<T> = (src: readonly ColumnValue[]) => T;
export class CustomResultSet<TTuple> {
public constructor(private reader: ResultSetReader,
private readonly tupleParser: TupleParser<TTuple>) {
}
public async* readTuples(): AsyncIterableIterator<TTuple> {
for await (const tuple of this.reader.readTuples()) {
yield this.tupleParser(tuple);
}
}
}
class CustomResultSetBinder {
private readonly boundColumns: boolean[];
private constructor(private readonly rowType: { new(): any },
private readonly schema: ResultSetSchema) {
this.boundColumns = Array(schema.columns.length).fill(false);
}
public static bind<TTuple>(reader: ResultSetReader, rowType: { new(): TTuple }):
CustomResultSet<TTuple> {
const binder = new CustomResultSetBinder(rowType, reader.schema);
const tupleParser = binder.bindRoot<TTuple>();
return new CustomResultSet<TTuple>(reader, tupleParser);
}
private bindRoot<TTuple>(): TupleParser<TTuple> {
const { action } = this.bindObject(this.rowType, 0, true);
const unboundColumnIndex = this.boundColumns.indexOf(false);
if (unboundColumnIndex >= 0) {
throw new Error(`Column '${this.schema.name}[${unboundColumnIndex}]' is not bound to a property.`);
}
return tuple => {
let result = new this.rowType;
action(tuple, result);
return result;
}
}
private checkElementProperty(index: ColumnIndex, propertyName: 'location' | 'label',
hasProperty: boolean, expectsProperty: QLOption): void {
switch (expectsProperty) {
case QLOption.Required:
if (!hasProperty) {
throw new Error(`Element column '${this.schema.name}[${index}]' does not have the required '${propertyName}' property.`);
}
break;
case QLOption.Forbidden:
if (!hasProperty) {
throw new Error(`Element column '${this.schema.name}[${index}]' has unexpected '${propertyName}' property.`);
}
break;
case QLOption.Optional:
break;
}
}
private bindObject(type: { new(): any }, startIndex: number, isRoot: boolean): {
action: ParseTupleAction,
lastColumn: number
} {
const columnProperties: ColumnProperty[] | undefined =
Reflect.getMetadata(columnPropertiesSymbol, type.prototype);
if (columnProperties === undefined) {
throw new Error(`Type '${type.toString()}' does not have any properties decorated with '@column'.`);
}
const actions: ParseTupleAction[] = [];
let restProperty: ColumnProperty | undefined = undefined;
let lastColumn = startIndex;
for (const property of columnProperties) {
if (typeof property.index === 'object') {
if (!isRoot) {
throw new Error(`Type '${type.toString()}' has a property bound to '...', but is not the root type.`);
}
if (restProperty !== undefined) {
throw new Error(`Type '${type.toString()}' has multiple properties bound to '...'.`);
}
restProperty = property;
}
else {
const index = property.index + startIndex;
const { action, lastColumn: lastChildColumn } = this.bindColumn(index, type, property,
property.key);
actions.push(action);
lastColumn = Math.max(lastColumn, lastChildColumn);
}
}
if (restProperty !== undefined) {
const startIndex = (<RestColumnIndex>restProperty.index).startColumn;
let index = startIndex;
let elementIndex = 0;
const elementActions: ParseTupleAction[] = [];
while (index < this.schema.columns.length) {
const { action, lastColumn: lastChildColumn } = this.bindColumn(index, type, restProperty, elementIndex);
elementActions.push(action);
index = lastChildColumn + 1;
elementIndex++;
}
const key = restProperty.key;
actions.push((src, dest) => {
const destArray = Array(elementActions.length);
elementActions.forEach(action => action(src, destArray));
dest[key] = destArray;
});
}
return {
action: (src, dest) => actions.forEach(action => action(src, dest)),
lastColumn: lastColumn
};
}
private bindColumn(index: number, type: new () => any, property: ColumnProperty,
key: PropertyKey | number): {
action: ParseTupleAction,
lastColumn: number
} {
if ((index < 0) || (index >= this.schema.columns.length)) {
throw new Error(`No matching column '${index}' found for property '${type.toString()}.${property.key.toString()}' when binding root type '${this.rowType.toString()}'.`);
}
if (typeof property.type === 'string') {
// This property is bound to a single column
return {
action: this.bindSingleColumn(index, property, type, key),
lastColumn: index
};
}
else {
// This property is a tuple that has properties that are bound to columns.
const propertyType = property.type;
const { action: objectParser, lastColumn: lastChildColumn } = this.bindObject(propertyType, index, false);
return {
action: (src, dest) => {
const destObject = new propertyType;
objectParser(src, destObject);
dest[key] = destObject;
},
lastColumn: lastChildColumn
};
}
}
private bindSingleColumn(index: number, property: ColumnProperty, type: new () => any,
key: PropertyKey | number): ParseTupleAction {
if (this.boundColumns[index]) {
throw new Error(`Column '${this.schema.name}[${index}]' is bound to multiple columns in root type '${this.rowType.toString()}'.`);
}
const column = this.schema.columns[index];
if (column.type.type !== property.type) {
throw new Error(`Column '${this.schema.name}[${index}]' has type '${column.type.type}', but property '${type.toString()}.${property.key.toString()}' expected type '${property.type}'.`);
}
this.boundColumns[index] = true;
if (isElement(property) && (column.type.type === 'e')) {
const hasLabel = column.type.hasLabel;
this.checkElementProperty(index, 'label', hasLabel, property.options.label);
const hasLocation = column.type.locationStyle !== LocationStyle.None;
this.checkElementProperty(index, 'location', hasLocation, property.options.location);
return (src, dest) => {
const srcElement = <ElementBase>src[index];
const destElement: ElementBase = {
id: srcElement.id
};
if (hasLabel) {
destElement.label = srcElement.label;
}
if (hasLocation) {
destElement.location = srcElement.location;
}
dest[key] = destElement;
};
}
else {
return (src, dest) => {
dest[key] = src[index];
};
}
}
}
type ArrayElementType<T> = T extends Array<infer U> ? U : never;
export type CustomResultSets<T> = {
[P in keyof T]: CustomResultSet<ArrayElementType<T[P]>>;
}
export function createCustomResultSets<T>(reader: ResultSetsReader, type: { new(): T }):
CustomResultSets<T> {
const tableProperties: TableProperty[] | undefined = Reflect.getMetadata(tablePropertiesSymbol, type.prototype);
if (tableProperties === undefined) {
throw new Error(`Type '${type.toString()}' does not have any properties decorated with '@table'.`);
}
const customResultSets: Partial<CustomResultSets<T>> = {};
const boundProperties = new Set<PropertyKey>();
for (const resultSet of reader.resultSets) {
const tableProperty = findPropertyForTable(resultSet.schema, tableProperties);
if (tableProperty === undefined) {
throw new Error(`No matching property found for result set '${resultSet.schema.name}'.`);
}
if (boundProperties.has(tableProperty.key)) {
throw new Error(`Multiple result sets bound to property '${tableProperty.key.toString()}'.`);
}
boundProperties.add(tableProperty.key);
customResultSets[tableProperty.key] = CustomResultSetBinder.bind(resultSet,
tableProperty.rowType);
}
for (const tableProperty of tableProperties) {
if (!boundProperties.has(tableProperty.key)) {
throw new Error(`No matching table found for property '${tableProperty.key.toString()}'.`);
}
}
return <CustomResultSets<T>>customResultSets;
}
function findPropertyForTable(resultSet: ResultSetSchema, tableProperties: TableProperty[]):
TableProperty | undefined {
const tableName = resultSet.name === '#select' ? 'select' : resultSet.name;
return tableProperties.find(tableProperty => tableProperty.tableNames.find(name => name === tableName));
}

View File

@@ -1,191 +0,0 @@
import { RandomAccessReader, StreamDigester } from 'semmle-io';
import { parseResultSetsHeader, StringPool, parseResultSetSchema, readTuples } from './bqrs-parse';
import { ResultSetsSchema, ResultSetSchema } from './bqrs-schema';
import { ColumnValue } from './bqrs-results';
/**
* The result of parsing data from a specific file region.
*/
interface RegionResult<T> {
/** The parsed data. */
result: T,
/** The exclusive end position of the parsed data in the file. */
finalOffset: number
}
/** Reads data from the specified region of the file, and parses it using the given function. */
async function inFileRegion<T>(
file: RandomAccessReader,
start: number,
end: number | undefined,
parse: (d: StreamDigester) => Promise<T>
): Promise<RegionResult<T>> {
const stream = file.readStream(start, end);
try {
const d = StreamDigester.fromChunkIterator(stream);
const result = await parse(d);
return {
result: result,
finalOffset: start + d.position
};
}
finally {
stream.dispose();
}
}
/**
* A single result set in a BQRS file.
*/
export interface ResultSetReader {
/**
* The schema that describes the result set.
*/
readonly schema: ResultSetSchema;
/**
* Reads all of the tuples in the result set.
*/
readTuples(): AsyncIterableIterator<ColumnValue[]>;
}
/**
* A Binary Query Result Sets ("BQRS") file.
*
* @remarks
* Allows independant access to individual tables without having to parse the entire file up front.
*/
export interface ResultSetsReader {
readonly schema: ResultSetsSchema;
readonly resultSets: readonly ResultSetReader[];
findResultSetByName(name: string): ResultSetReader | undefined;
}
/**
* Metadata for a single `ResultSet` in a BQRS file.
* Does not contain the result tuples themselves.
* Includes the offset and length of the tuple data in the file,
* which can be used to read the tuples.
*/
interface ResultSetInfo {
schema: ResultSetSchema;
rowsOffset: number;
rowsLength: number;
}
class ResultSetReaderImpl implements ResultSetReader {
public readonly schema: ResultSetSchema;
private readonly rowsOffset: number;
private readonly rowsLength: number;
public constructor(private readonly resultSets: ResultSetsReaderImpl, info: ResultSetInfo) {
this.schema = info.schema;
this.rowsOffset = info.rowsOffset;
this.rowsLength = info.rowsLength;
}
public async* readTuples(): AsyncIterableIterator<ColumnValue[]> {
const stream = this.resultSets.file.readStream(this.rowsOffset,
this.rowsOffset + this.rowsLength);
try {
const d = StreamDigester.fromChunkIterator(stream);
for await (const tuple of readTuples(d, this.schema, await this.resultSets.getStringPool())) {
yield tuple;
}
}
finally {
stream.dispose();
}
}
}
class ResultSetsReaderImpl implements ResultSetsReader {
private stringPool?: StringPool = undefined;
private readonly _resultSets: ResultSetReaderImpl[];
private constructor(public readonly file: RandomAccessReader,
public readonly schema: ResultSetsSchema, resultSets: ResultSetInfo[],
private readonly stringPoolOffset: number) {
this._resultSets = resultSets.map((info) => {
return new ResultSetReaderImpl(this, info);
});
}
public get resultSets(): readonly ResultSetReader[] {
return this._resultSets;
}
public findResultSetByName(name: string): ResultSetReader | undefined {
return this._resultSets.find((resultSet) => resultSet.schema.name === name);
}
public async getStringPool(): Promise<StringPool> {
if (this.stringPool === undefined) {
const { result: stringPoolBuffer } = await inFileRegion(this.file, this.stringPoolOffset,
this.stringPoolOffset + this.schema.stringPoolSize,
async d => await d.read(this.schema.stringPoolSize));
this.stringPool = new StringPool(stringPoolBuffer);
}
return this.stringPool;
}
public static async open(file: RandomAccessReader): Promise<ResultSetsReader> {
// Parse the header of the entire BQRS file.
const { result: header, finalOffset: stringPoolOffset } =
await inFileRegion(file, 0, undefined, d => parseResultSetsHeader(d));
// The header is followed by a shared string pool.
// We have saved the offset and length of the string pool within the file,
// so we can read it later when needed.
// For now, skip over the string pool to reach the starting point of the first result set.
let currentResultSetOffset = stringPoolOffset + header.stringPoolSize;
// Parse information about each result set within the file.
const resultSets: ResultSetInfo[] = [];
for (let resultSetIndex = 0; resultSetIndex < header.resultSetCount; resultSetIndex++) {
// Read the length of this result set (encoded as a single byte).
// Note: reading length and schema together from a file region may be more efficient.
// Reading them separately just makes it easier to compute the
// starting offset and length of the schema.
const { result: resultSetLength, finalOffset: resultSetSchemaOffset } =
await inFileRegion(file, currentResultSetOffset, undefined, d => d.readLEB128UInt32());
// Read the schema of this result set.
const { result: resultSetSchema, finalOffset: resultSetRowsOffset } =
await inFileRegion(file, resultSetSchemaOffset, undefined, d => parseResultSetSchema(d));
const resultSetSchemaLength = resultSetRowsOffset - resultSetSchemaOffset;
// The schema is followed by the tuple/row data for the result set.
// We save the offset and length of the tuple data within the file,
// so we can read it later when needed.
const info: ResultSetInfo = {
// length of result set = length of schema + length of tuple data
// The 1 byte that encodes the length itself is not counted.
rowsLength: resultSetLength - resultSetSchemaLength,
rowsOffset: resultSetRowsOffset,
schema: resultSetSchema,
};
resultSets.push(info);
// Skip over the tuple data of the current result set,
// to reach the starting offset of the next result set.
currentResultSetOffset = info.rowsOffset + info.rowsLength;
}
const schema: ResultSetsSchema = {
version: header.version,
stringPoolSize: header.stringPoolSize,
resultSets: resultSets.map(resultSet => resultSet.schema)
};
const reader = new ResultSetsReaderImpl(file, schema, resultSets, stringPoolOffset);
return reader;
}
}
export function open(file: RandomAccessReader): Promise<ResultSetsReader> {
return ResultSetsReaderImpl.open(file);
}

View File

@@ -1,209 +0,0 @@
import { decodeUInt32 } from 'leb';
import { StreamDigester } from 'semmle-io';
import { ColumnValue, RawLocationValue } from './bqrs-results';
import { ColumnSchema, ColumnType, LocationStyle, PrimitiveTypeKind, ResultSetSchema } from './bqrs-schema';
/**
* bqrs-parse.ts
* -------
*
* Parsing Binary Query Result Set files.
* See [[https://git.semmle.com/Semmle/code/tree/master/queryserver-client/src/com/semmle/api/result/BinaryQueryResultSets.java]].
*/
const RESULT_SET_VERSION = 1;
const RESULT_SETS_VERSION = 2;
export type TupleParser = (tuple: readonly ColumnValue[]) => void;
export interface ResultSetsHeader {
version: number,
resultSetCount: number,
stringPoolSize: number
}
async function parseResultColumnType(d: StreamDigester): Promise<ColumnType> {
const t = await d.readASCIIChar();
if (t === 'e') {
const primitiveType: PrimitiveTypeKind =
(await d.readASCIIChar()) as PrimitiveTypeKind;
const hasLabel = (await d.readByte()) !== 0;
const locationStyle = await d.readByte();
return { type: 'e', locationStyle, hasLabel, primitiveType };
}
else {
return { type: <PrimitiveTypeKind>t };
}
}
async function parseColumnSchema(d: StreamDigester): Promise<ColumnSchema[]> {
const numColumns = await d.readLEB128UInt32();
const rv: ColumnSchema[] = [];
for (let i = 0; i < numColumns; i++) {
const name = await readLengthPrefixedString(d);
const type = await parseResultColumnType(d);
rv.push({ name, type });
}
return rv;
}
function getTrueStringLength(encodedLength: number): number {
const stringLength = (encodedLength as number) - 1;
if (stringLength === -1) {
// XXX why is this a possibility? Does a '(-1)-length' string
// (i.e. a single 0x00 byte) mean something different from a
// 0-length string? (i.e. a single 0x01 byte)
return 0;
}
else {
return stringLength;
}
}
export class StringPool {
public constructor(private readonly buffer: Buffer) {
}
public getString(offset: number): string {
//TODO: Memoize?
const { value: encodedStringLength, nextIndex } = decodeUInt32(this.buffer, offset);
const stringLength = getTrueStringLength(encodedStringLength);
const value = this.buffer.toString('utf8', nextIndex, nextIndex + stringLength);
return value;
}
}
export async function parseResultSetsHeader(d: StreamDigester): Promise<ResultSetsHeader> {
const version = await d.readLEB128UInt32();
if (version !== RESULT_SETS_VERSION) {
throw new Error(`Mismatched binary query results version. Got '${version}', but expected '${RESULT_SETS_VERSION}'.`);
}
const resultSetCount = await d.readLEB128UInt32();
const stringPoolSize = await d.readLEB128UInt32();
return {
version: version,
stringPoolSize: stringPoolSize,
resultSetCount: resultSetCount
};
}
async function readLengthPrefixedString(d: StreamDigester): Promise<string> {
const encodedLength = await d.readLEB128UInt32();
const stringLength = getTrueStringLength(encodedLength);
return await d.readUTF8String(stringLength);
}
export async function parseResultSetSchema(d: StreamDigester): Promise<ResultSetSchema> {
const version = await d.readLEB128UInt32();
if (version !== RESULT_SET_VERSION) {
throw new Error(`Mismatched binary query result version. Got '${version}', but expected '${RESULT_SET_VERSION}'.`);
}
const name = await readLengthPrefixedString(d);
const tupleCount = await d.readLEB128UInt32();
const columns = await parseColumnSchema(d);
return {
version: version,
name: name,
tupleCount: tupleCount,
columns: columns
};
}
async function parseString(d: StreamDigester, pool: StringPool): Promise<string> {
const stringOffset = await d.readLEB128UInt32();
const value = pool.getString(stringOffset);
return value;
}
async function parseLocation(d: StreamDigester, t: LocationStyle, pool: StringPool):
Promise<RawLocationValue | undefined> {
switch (t) {
case LocationStyle.None: return undefined;
case LocationStyle.String: return { t, loc: await parseString(d, pool) };
case LocationStyle.FivePart: {
const file = await parseString(d, pool);
const lineStart = await d.readLEB128UInt32();
const colStart = await d.readLEB128UInt32();
const lineEnd = await d.readLEB128UInt32();
const colEnd = await d.readLEB128UInt32();
return { t, file, lineStart, colStart, lineEnd, colEnd };
}
case LocationStyle.WholeFile:
throw new Error('Whole-file locations should appear as string locations in BQRS files.');
}
throw new Error(`Unknown Location Style ${t}`);
}
async function parsePrimitiveColumn(d: StreamDigester, type: PrimitiveTypeKind,
pool: StringPool): Promise<ColumnValue> {
switch (type) {
case 's': return await parseString(d, pool);
case 'b': return await d.readByte() !== 0;
case 'i': {
const unsignedValue = await d.readLEB128UInt32();
// `int` column values are encoded as 32-bit unsigned LEB128, but are really 32-bit two's
// complement signed integers. The easiest way to reinterpret from an unsigned int32 to a
// signed int32 in JavaScript is to use a bitwise operator, which does this coercion on its
// operands automatically.
return unsignedValue | 0;
}
case 'f': return await d.readDoubleLE();
case 'd': return await d.readDate();
case 'u': return await parseString(d, pool);
default: throw new Error(`Unknown primitive column type '${type}'.`);
}
}
export async function parseColumn(d: StreamDigester, t: ColumnType, pool: StringPool):
Promise<ColumnValue> {
if (t.type === 'e') {
let primitive = await parsePrimitiveColumn(d, t.primitiveType, pool);
const label = t.hasLabel ? await parseString(d, pool) : undefined;
const loc = await parseLocation(d, t.locationStyle, pool);
return {
id: <number | string>primitive,
label: label,
location: loc
};
}
else {
return parsePrimitiveColumn(d, t.type, pool);
}
}
export async function* readTuples(d: StreamDigester, schema: ResultSetSchema,
stringPool: StringPool): AsyncIterableIterator<ColumnValue[]> {
const { tupleCount, columns } = schema;
for (let rowIndex = 0; rowIndex < tupleCount; rowIndex++) {
const tuple: ColumnValue[] = Array(columns.length);
for (let columnIndex = 0; columnIndex < columns.length; columnIndex++) {
tuple[columnIndex] = await parseColumn(d, columns[columnIndex].type, stringPool);
}
yield tuple;
}
}
export async function parseTuples(d: StreamDigester, schema: ResultSetSchema,
stringPool: StringPool, tupleParser: TupleParser): Promise<void> {
const { tupleCount, columns } = schema;
// Create a single temporary tuple to hold the values we read from each row. Fill it with
// zero values initially so that we don't have to type it as `TupleValue | undefined`.
const tempTuple: ColumnValue[] = Array(columns.length).fill(0);
for (let rowIndex = 0; rowIndex < tupleCount; rowIndex++) {
for (let columnIndex = 0; columnIndex < columns.length; columnIndex++) {
tempTuple[columnIndex] = await parseColumn(d, columns[columnIndex].type, stringPool);
}
tupleParser(tempTuple);
}
}

View File

@@ -1,114 +0,0 @@
import { LocationStyle } from "./bqrs-schema";
// See https://help.semmle.com/QL/learn-ql/ql/locations.html for how these are used.
export interface FivePartLocation {
t: LocationStyle.FivePart;
file: string;
lineStart: number;
colStart: number;
lineEnd: number;
colEnd: number;
}
export interface StringLocation {
t: LocationStyle.String;
loc: string;
}
/**
* A location representing an entire filesystem resource.
* This is usually derived from a `StringLocation` with the entire filesystem URL.
*/
export interface WholeFileLocation {
t: LocationStyle.WholeFile;
file: string;
}
export type RawLocationValue = FivePartLocation | StringLocation;
export type LocationValue = RawLocationValue | WholeFileLocation;
/** A location that may be resolved to a source code element. */
export type ResolvableLocationValue = FivePartLocation | WholeFileLocation;
/**
* The CodeQL filesystem libraries use this pattern in `getURL()` predicates
* to describe the location of an entire filesystem resource.
* Such locations appear as `StringLocation`s instead of `FivePartLocation`s.
*
* Folder resources also get similar URLs, but with the `folder` scheme.
* They are deliberately ignored here, since there is no suitable location to show the user.
*/
const FILE_LOCATION_REGEX = /file:\/\/(.+):([0-9]+):([0-9]+):([0-9]+):([0-9]+)/;
/**
* Gets a resolvable source file location for the specified `LocationValue`, if possible.
* @param loc The location to test.
*/
export function tryGetResolvableLocation(
loc: LocationValue | undefined
): ResolvableLocationValue | undefined {
if (loc === undefined) {
return undefined;
} else if (loc.t === LocationStyle.FivePart && loc.file) {
return loc;
} else if (loc.t === LocationStyle.WholeFile && loc.file) {
return loc;
} else if (loc.t === LocationStyle.String && loc.loc) {
return tryGetLocationFromString(loc);
} else {
return undefined;
}
}
export function tryGetLocationFromString(
loc: StringLocation
): ResolvableLocationValue | undefined {
const matches = FILE_LOCATION_REGEX.exec(loc.loc);
if (matches && matches.length > 1 && matches[1]) {
if (isWholeFileMatch(matches)) {
return {
t: LocationStyle.WholeFile,
file: matches[1],
};
} else {
return {
t: LocationStyle.FivePart,
file: matches[1],
lineStart: Number(matches[2]),
colStart: Number(matches[3]),
lineEnd: Number(matches[4]),
colEnd: Number(matches[5]),
}
}
} else {
return undefined;
}
}
function isWholeFileMatch(matches: RegExpExecArray): boolean {
return (
matches[2] === "0" &&
matches[3] === "0" &&
matches[4] === "0" &&
matches[5] === "0"
);
}
export interface ElementBase {
id: PrimitiveColumnValue;
label?: string;
location?: LocationValue;
}
export interface ElementWithLabel extends ElementBase {
label: string;
}
export interface ElementWithLocation extends ElementBase {
location: LocationValue;
}
export interface Element extends Required<ElementBase> {}
export type PrimitiveColumnValue = string | boolean | number | Date;
export type ColumnValue = PrimitiveColumnValue | ElementBase;

View File

@@ -1,66 +0,0 @@
export enum LocationStyle {
None = 0,
String,
FivePart,
/** Does not occur in BQRS files. Used only to distinguish whole-file locations in client code. */
WholeFile
}
/**
* A primitive type (any type other than an element).
*/
export type PrimitiveTypeKind = 's' | 'b' | 'i' | 'f' | 'd' | 'u';
/**
* A kind of type that a column may have.
*/
export type ColumnTypeKind = PrimitiveTypeKind | 'e';
/**
* A column type that is a primitive type.
*/
export interface PrimitiveColumnType {
type: PrimitiveTypeKind;
}
/**
* A column type that is an element type.
*/
export interface ElementColumnType {
type: 'e';
primitiveType: PrimitiveTypeKind;
locationStyle: LocationStyle;
hasLabel: boolean;
}
/**
* The type of a column.
*/
export type ColumnType = PrimitiveColumnType | ElementColumnType;
/**
* The schema describing a single column in a `ResultSet`.
*/
export interface ColumnSchema {
readonly name: string;
readonly type: ColumnType;
}
/**
* The schema of a single `ResultSet` in a BQRS file.
*/
export interface ResultSetSchema {
readonly version: number;
readonly name: string;
readonly tupleCount: number;
readonly columns: readonly ColumnSchema[];
}
/**
* The schema describing the contents of a BQRS file.
*/
export interface ResultSetsSchema {
readonly version: number,
readonly stringPoolSize: number,
readonly resultSets: readonly ResultSetSchema[]
}

View File

@@ -1,18 +0,0 @@
import { ResultSetSchema } from './bqrs-schema';
import { StreamDigester, ChunkIterator } from 'semmle-io';
import { parseResultSetsHeader, StringPool, parseResultSetSchema, parseTuples, TupleParser } from './bqrs-parse';
export async function parse(rs: ChunkIterator,
resultSetHandler: (resultSet: ResultSetSchema) => TupleParser): Promise<void> {
const d = StreamDigester.fromChunkIterator(rs);
const header = await parseResultSetsHeader(d);
const stringPool = new StringPool(await d.read(header.stringPoolSize));
for (let resultSetIndex = 0; resultSetIndex < header.resultSetCount; resultSetIndex++) {
await d.readLEB128UInt32(); // Length of result set. Unused.
const resultSetSchema = await parseResultSetSchema(d);
const tupleParser = resultSetHandler(resultSetSchema);
await parseTuples(d, resultSetSchema, stringPool, tupleParser);
}
}

View File

@@ -1,7 +0,0 @@
export * from './bqrs';
export * from './bqrs-custom';
export * from './bqrs-file';
export * from './bqrs-results';
export * from './bqrs-schema';
export * from './path-problem-query-results';
export * from './problem-query-results';

View File

@@ -1,49 +0,0 @@
import 'reflect-metadata';
import { Element } from './bqrs-results';
import { qlElement, qlString, qlTuple, qlTable } from './bqrs-custom';
import { ElementReference } from './problem-query-results';
export class PathProblemAlert {
@qlElement(0)
element: Element;
@qlElement(1)
source: Element;
@qlElement(2)
sink: Element;
@qlString(3)
message: string;
@qlTuple({ startColumn: 4 }, ElementReference)
references?: ElementReference[];
}
export class PathProblemEdge {
@qlElement(0)
predecessor: Element;
@qlElement(1)
successor: Element;
}
export class GraphProperty {
@qlString(0)
key: string;
@qlString(1)
value: string;
}
export class PathProblemNode {
@qlElement(0)
node: Element;
// There can really only be zero or one of these, but until we support optional columns, we'll
// model it as a "rest" property.
@qlTuple({ startColumn: 1 }, GraphProperty)
properties?: GraphProperty[];
}
export class PathProblemQueryResults {
@qlTable(PathProblemAlert, { name: ['select', 'problems'] })
problems: PathProblemAlert[];
@qlTable(PathProblemNode)
nodes: PathProblemNode[];
@qlTable(PathProblemEdge)
edges: PathProblemEdge[];
}

View File

@@ -1,24 +0,0 @@
import 'reflect-metadata';
import { Element } from './bqrs-results';
import { qlElement, qlString, qlTuple, qlTable } from './bqrs-custom';
export class ElementReference {
@qlElement(0)
element: Element;
@qlString(1)
text: string;
}
export class ProblemAlert {
@qlElement(0)
element: Element;
@qlString(1)
message: string;
@qlTuple({ startColumn: 2 }, ElementReference)
references?: ElementReference[];
}
export class ProblemQueryResults {
@qlTable(ProblemAlert, { name: ['select', 'problems'] })
problems: ProblemAlert[];
}

View File

@@ -1,3 +0,0 @@
{
"extends": "./node_modules/typescript-config/lib.tsconfig.json"
}

View File

@@ -1,7 +0,0 @@
'use strict';
require('ts-node').register({});
const { compileTypeScript, watchTypeScript } = require('@github/codeql-gulp-tasks');
exports.default = compileTypeScript;
exports.watchTypeScript = watchTypeScript;

View File

@@ -1,32 +0,0 @@
{
"name": "semmle-io-node",
"description": "I/O utilities for the Node.js runtime",
"author": "GitHub",
"private": true,
"version": "0.0.1",
"publisher": "GitHub",
"repository": {
"type": "git",
"url": "https://github.com/github/vscode-codeql"
},
"main": "./out/index",
"files": [
"out/**",
"package.json"
],
"scripts": {
"build": "gulp",
"format": "tsfmt -r"
},
"dependencies": {
"fs-extra": "^8.1.0",
"semmle-io": "^0.0.1"
},
"devDependencies": {
"@types/fs-extra": "^8.0.0",
"@types/node": "^12.0.8",
"@github/codeql-gulp-tasks": "^0.0.4",
"typescript-config": "^0.0.1",
"typescript-formatter": "^7.2.2"
}
}

View File

@@ -1,66 +0,0 @@
import * as fs from 'fs-extra';
import { ReadStream } from 'fs-extra';
import { RandomAccessReader, StreamReader } from 'semmle-io';
export class FileReader implements RandomAccessReader {
private _fd?: number;
private constructor(fd: number) {
this._fd = fd;
}
public dispose(): void {
if (this._fd !== undefined) {
fs.closeSync(this._fd);
this._fd = undefined;
}
}
public get fd(): number {
if (this._fd === undefined) {
throw new Error('Object disposed.');
}
return this._fd;
}
public readStream(start?: number, end?: number): StreamReader {
return new FileStreamReader(fs.createReadStream('', {
fd: this.fd,
start: start,
end: end,
autoClose: false
}));
}
public static async open(file: string): Promise<FileReader> {
const fd: number = await fs.open(file, 'r');
return new FileReader(fd); // Take ownership
}
}
class FileStreamReader implements StreamReader {
private _stream?: ReadStream;
public constructor(stream: ReadStream) {
this._stream = stream;
}
public [Symbol.asyncIterator](): AsyncIterator<Uint8Array> {
return this.stream[Symbol.asyncIterator]();
}
public dispose(): void {
if (this._stream !== undefined) {
this._stream = undefined;
}
}
private get stream(): ReadStream {
if (this._stream === undefined) {
throw new Error('Object disposed.');
}
return this._stream;
}
}

View File

@@ -1 +0,0 @@
export * from './file-reader';

View File

@@ -1,3 +0,0 @@
{
"extends": "./node_modules/typescript-config/lib.tsconfig.json"
}

View File

@@ -1,7 +0,0 @@
'use strict';
require('ts-node').register({});
const { compileTypeScript, watchTypeScript } = require('@github/codeql-gulp-tasks');
exports.default = compileTypeScript;
exports.watchTypeScript = watchTypeScript;

View File

@@ -1,30 +0,0 @@
{
"name": "semmle-io",
"description": "I/O utilities",
"author": "GitHub",
"private": true,
"version": "0.0.1",
"publisher": "GitHub",
"repository": {
"type": "git",
"url": "https://github.com/github/vscode-codeql"
},
"main": "./out/index",
"files": [
"out/**",
"package.json"
],
"scripts": {
"build": "gulp",
"format": "tsfmt -r"
},
"dependencies": {
"leb": "^0.3.0"
},
"devDependencies": {
"@types/node": "^12.0.8",
"@github/codeql-gulp-tasks": "^0.0.4",
"typescript-config": "^0.0.1",
"typescript-formatter": "^7.2.2"
}
}

View File

@@ -1,303 +0,0 @@
import * as leb from 'leb';
/**
* digester.ts
* -----------
*
* A wrapper around node's stream and buffer types to make reading the
* binary formats used by the QL query server a little more uniform
* and convenient.
*
* This works around limitations in using Node streams (whether 'paused' or 'flowing')
* with async/await. This code can be simplified if there is a convenient library for doing this.
*/
export type ChunkIterator = AsyncIterable<Uint8Array>;
function endOfStreamError(): Error {
return new Error('Attempt to read past end of stream.');
}
const emptyBuffer = Buffer.alloc(0);
/**
* A class to read and decode bytes out of a sequence of `Buffer`s provided by an async iterator.
*/
export class StreamDigester {
private static readonly MIN_SEAM_BUFFER_LENGTH = 256;
private currentChunk = emptyBuffer;
private seamBuffer = emptyBuffer;
private done = false;
private positionOfCurrentChunk = 0;
private offsetInCurrentChunk = 0;
private readonly chunks: AsyncIterator<Uint8Array>;
private constructor(chunks: ChunkIterator) {
this.chunks = chunks[Symbol.asyncIterator]();
}
/**
* Create a `StreamDigester`.
*
* @param chunks An async iterator that provides the sequence of buffers from which to read.
*/
public static fromChunkIterator(chunks: ChunkIterator): StreamDigester {
return new StreamDigester(chunks);
}
public static fromBuffer(buffer: Buffer): StreamDigester {
return new StreamDigester(StreamDigester.singleChunkIterator(buffer));
}
public get position(): number {
return this.positionOfCurrentChunk + this.offsetInCurrentChunk;
}
private static async* singleChunkIterator(chunk: Buffer): AsyncIterableIterator<Buffer> {
yield chunk;
}
/**
* Gets the next chunk from the iterator, throwing an exception if there are no more chunks
* available.
*/
private async readNextChunk(): Promise<void> {
if (this.done) {
throw endOfStreamError();
}
const { value, done } = await this.chunks.next();
if (done) {
this.done = true;
throw endOfStreamError();
}
this.positionOfCurrentChunk += this.currentChunk.length;
this.currentChunk = Buffer.from(value);
this.offsetInCurrentChunk = 0;
}
private get bytesLeftInCurrentChunk(): number {
return this.currentChunk.length - this.offsetInCurrentChunk;
}
private getSeamBuffer(byteCount: number, previousBuffer: Buffer, previousOffset: number,
previousByteCount: number): Buffer {
if (this.seamBuffer.length < byteCount) {
// Start at double the current length, or `MIN_SEAM_BUFFER_LENGTH`, whichever is larger.
let newSeamBufferLength = Math.max(this.seamBuffer.length * 2,
StreamDigester.MIN_SEAM_BUFFER_LENGTH);
while (newSeamBufferLength < byteCount) {
newSeamBufferLength *= 2;
}
this.seamBuffer = Buffer.alloc(newSeamBufferLength);
}
if (previousByteCount > 0) {
if (previousBuffer === this.seamBuffer) {
if (previousOffset !== 0) {
previousBuffer.copyWithin(0, previousOffset, previousOffset + previousByteCount);
}
}
else {
previousBuffer.copy(this.seamBuffer, 0, previousOffset, previousOffset + previousByteCount);
}
}
return this.seamBuffer;
}
private async fillBuffer(buffer: Buffer, start: number, end: number): Promise<void> {
let destOffset = start;
do {
const bytesToCopy = Math.min(end - destOffset, this.bytesLeftInCurrentChunk);
this.currentChunk.copy(buffer, destOffset, this.offsetInCurrentChunk,
this.offsetInCurrentChunk + bytesToCopy);
this.offsetInCurrentChunk += bytesToCopy;
destOffset += bytesToCopy;
if (destOffset < end) {
await this.readNextChunk();
}
} while (destOffset < end);
}
/**
* Implements an async read that spans multiple buffers.
*
* @param canReadFunc Callback function to determine how many bytes are required to complete the
* read operation.
* @param readFunc Callback function to read the requested data from a `Buffer`.
*/
private async readAcrossSeam<T>(
canReadFunc: (buffer: Buffer, start: number, byteCount: number) => number,
readFunc: (buffer: Buffer, offset: number) => T): Promise<T> {
// We'll copy the leftover bytes from the current chunk, plus whatever bytes we need from
// subsequent chunks, into a "seam buffer", and read the value from there.
let buffer = this.currentChunk;
let offsetInBuffer = this.offsetInCurrentChunk;
let discardedBytes = 0;
let bytesInBuffer = this.bytesLeftInCurrentChunk;
while (true) {
// Ask how many bytes we need to complete the read.
const requestedBytes = canReadFunc(buffer, offsetInBuffer, bytesInBuffer);
if (requestedBytes <= bytesInBuffer) {
// We have enough bytes. Do the read.
const value = readFunc(buffer, offsetInBuffer);
this.offsetInCurrentChunk += requestedBytes - discardedBytes;
return value;
}
// We've already copied all the bytes from our current chunk to the seam buffer. We're
// guaranteed to wind up reading all of those bytes, and will need at least one more byte, so
// get the next chunk.
await this.readNextChunk();
// Create or extend our seam buffer to hold the additional bytes we're about to read.
const bytesToCopy = Math.min(requestedBytes - bytesInBuffer, this.bytesLeftInCurrentChunk);
buffer = this.getSeamBuffer(bytesInBuffer + bytesToCopy, buffer, offsetInBuffer, bytesInBuffer);
discardedBytes = bytesInBuffer;
offsetInBuffer = 0;
// Append the new bytes to our seam buffer.
this.currentChunk.copy(buffer, bytesInBuffer, 0, bytesToCopy);
bytesInBuffer += bytesToCopy;
}
}
private readVariableSize<T>(
canReadFunc: (buffer: Buffer, start: number, byteCount: number) => number,
readFunc: (buffer: Buffer, offset: number) => T): Promise<T> {
const requestedBytes = canReadFunc(this.currentChunk, this.offsetInCurrentChunk,
this.bytesLeftInCurrentChunk);
if (requestedBytes <= this.bytesLeftInCurrentChunk) {
const value = readFunc(this.currentChunk, this.offsetInCurrentChunk);
this.offsetInCurrentChunk += requestedBytes;
return Promise.resolve(value);
}
else {
return this.readAcrossSeam(canReadFunc, readFunc);
}
}
private readKnownSizeAcrossSeam<T>(byteCount: number,
readFunc: (buffer: Buffer, offset: number) => T): Promise<T> {
return this.readAcrossSeam((_buffer, _offset, _availableByteCount) => byteCount, readFunc);
}
private readKnownSize<T>(byteCount: number, readFunc: (buffer: Buffer, offset: number) => T):
Promise<T> {
if (this.bytesLeftInCurrentChunk >= byteCount) {
// We have enough data. Just read it directly.
const value = readFunc(this.currentChunk, this.offsetInCurrentChunk);
this.offsetInCurrentChunk += byteCount;
return Promise.resolve(value);
}
else {
return this.readKnownSizeAcrossSeam(byteCount, readFunc);
}
}
/**
* Read a leb128-encoded unsigned 32-bit number
* [https://en.wikipedia.org/wiki/LEB128]
*/
public readLEB128UInt32(): Promise<number> {
return this.readVariableSize(canDecodeLEB128UInt32, decodeLEB128UInt32);
}
/**
* Read a single byte.
*/
public readByte(): Promise<number> {
return this.readKnownSize(1, (buffer, offset) => buffer[offset]);
}
/**
* Read a single ASCII character as a string.
*/
public async readASCIIChar(): Promise<string> {
return String.fromCodePoint(await this.readByte());
}
/**
* Read the specified number of bytes.
*
* @param byteCount Number of bytes to read.
*/
public async read(byteCount: number): Promise<Buffer> {
const buffer = Buffer.alloc(byteCount);
await this.fillBuffer(buffer, 0, byteCount);
return buffer;
}
/**
* Read a `Date` encoded as an 8-byte sequence.
*/
public readDate(): Promise<Date> {
return this.readKnownSize(8, decodeDate);
}
/**
* Read a little-endian 64-bit IEEE floating-point number.
*/
public readDoubleLE(): Promise<number> {
return this.readKnownSize(8, (buffer, offset) => buffer.readDoubleLE(offset));
}
/**
* Read a UTF-8 encoded string.
* @param byteCount Length of encoded string in bytes.
*/
public readUTF8String(byteCount: number): Promise<string> {
return this.readKnownSize(byteCount, (buffer, offset) =>
buffer.toString('utf8', offset, offset + byteCount));
}
}
function decodeDate(buffer: Buffer, offset: number): Date {
const low = buffer.readUInt32LE(offset);
const high = buffer.readUInt32LE(offset + 4);
const year = (high & 0x1ffffff0) >> 4;
const month = high & 0x0000000f;
const day = (low & 0xf8000000) >>> 27;
const hours = (low & 0x07c00000) >> 22;
const minutes = (low & 0x003f0000) >> 16;
const seconds = (low & 0x0000fc00) >> 10;
const ms = low & 0x000003ff;
return new Date(year, month, day, hours, minutes, seconds, ms);
}
/**
* The longest possible byte length of a correctly encoded LEB128 UInt32:
* `0xff 0xff 0xff 0xff 0x8f` (5 bytes)
*/
const MAX_ENCODED_UINT32_LENGTH = 5;
function canDecodeLEB128UInt32(buffer: Buffer, offset: number, byteCount: number): number {
const endOffset = offset + Math.min(byteCount, MAX_ENCODED_UINT32_LENGTH);
for (let byteOffset = offset; byteOffset < endOffset; byteOffset++) {
if ((buffer[byteOffset] & 0x80) === 0) {
return (byteOffset - offset) + 1;
}
}
if ((endOffset - offset) > MAX_ENCODED_UINT32_LENGTH) {
throw new Error('Invalid LEB128 encoding.')
}
return MAX_ENCODED_UINT32_LENGTH;
}
function decodeLEB128UInt32(buffer: Buffer, offset: number): number {
const { value } = leb.decodeUInt32(buffer, offset);
return value;
}

View File

@@ -1,2 +0,0 @@
export * from './digester';
export * from './random-access-reader';

View File

@@ -1,8 +0,0 @@
export interface StreamReader extends AsyncIterable<Uint8Array> {
dispose(): void;
}
export interface RandomAccessReader {
readStream(start?: number, end?: number): StreamReader;
dispose(): void;
}

View File

@@ -1,3 +0,0 @@
{
"extends": "./node_modules/typescript-config/lib.tsconfig.json"
}

View File

@@ -35,18 +35,6 @@
"projectFolder": "tools/build-tasks", "projectFolder": "tools/build-tasks",
"versionPolicyName": "utilities" "versionPolicyName": "utilities"
}, },
{
"packageName": "semmle-bqrs",
"projectFolder": "lib/semmle-bqrs"
},
{
"packageName": "semmle-io",
"projectFolder": "lib/semmle-io"
},
{
"packageName": "semmle-io-node",
"projectFolder": "lib/semmle-io-node"
},
{ {
"packageName": "@github/codeql-vscode-utils", "packageName": "@github/codeql-vscode-utils",
"projectFolder": "lib/semmle-vscode-utils", "projectFolder": "lib/semmle-vscode-utils",