Merge branch 'main' into redsun82/rust-skip-unexpanded-in-libraries

This commit is contained in:
Paolo Tranquilli
2025-06-02 09:27:56 +02:00
133 changed files with 6283 additions and 2766 deletions

View File

@@ -66,6 +66,6 @@ jobs:
# Update existing stubs in the repo with the freshly generated ones
mv "$STUBS_PATH/output/stubs/_frameworks" ql/test/resources/stubs/
git status
codeql test run --threads=0 --search-path "${{ github.workspace }}" --check-databases --check-undefined-labels --check-repeated-labels --check-redefined-labels --consistency-queries ql/consistency-queries -- ql/test/library-tests/dataflow/flowsources/aspremote
codeql test run --threads=0 --search-path "${{ github.workspace }}" --check-databases --check-diff-informed --check-undefined-labels --check-repeated-labels --check-redefined-labels --consistency-queries ql/consistency-queries -- ql/test/library-tests/dataflow/flowsources/aspremote
env:
GITHUB_TOKEN: ${{ github.token }}

View File

@@ -35,6 +35,6 @@ jobs:
key: ruby-qltest
- name: Run QL tests
run: |
codeql test run --dynamic-join-order-mode=all --threads=0 --ram 50000 --search-path "${{ github.workspace }}" --check-databases --check-undefined-labels --check-unused-labels --check-repeated-labels --check-redefined-labels --check-use-before-definition --consistency-queries ql/consistency-queries ql/test --compilation-cache "${{ steps.query-cache.outputs.cache-dir }}"
codeql test run --dynamic-join-order-mode=all --threads=0 --ram 50000 --search-path "${{ github.workspace }}" --check-databases --check-diff-informed --check-undefined-labels --check-unused-labels --check-repeated-labels --check-redefined-labels --check-use-before-definition --consistency-queries ql/consistency-queries ql/test --compilation-cache "${{ steps.query-cache.outputs.cache-dir }}"
env:
GITHUB_TOKEN: ${{ github.token }}

View File

@@ -68,6 +68,6 @@ jobs:
key: ruby-qltest
- name: Run QL tests
run: |
codeql test run --threads=0 --ram 50000 --search-path "${{ github.workspace }}" --check-databases --check-undefined-labels --check-unused-labels --check-repeated-labels --check-redefined-labels --check-use-before-definition --consistency-queries ql/consistency-queries ql/test --compilation-cache "${{ steps.query-cache.outputs.cache-dir }}"
codeql test run --threads=0 --ram 50000 --search-path "${{ github.workspace }}" --check-databases --check-diff-informed --check-undefined-labels --check-unused-labels --check-repeated-labels --check-redefined-labels --check-use-before-definition --consistency-queries ql/consistency-queries ql/test --compilation-cache "${{ steps.query-cache.outputs.cache-dir }}"
env:
GITHUB_TOKEN: ${{ github.token }}

View File

@@ -32,7 +32,7 @@ jobs:
if: github.repository_owner == 'github'
strategy:
matrix:
runner: [ubuntu-latest, macos-13-xlarge]
runner: [ubuntu-latest, macos-15-xlarge]
fail-fast: false
runs-on: ${{ matrix.runner }}
steps:

46
Cargo.lock generated
View File

@@ -242,6 +242,8 @@ version = "1.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a012a0df96dd6d06ba9a1b29d6402d1a5d77c6befd2566afdc26e10603dc93d7"
dependencies = [
"jobserver",
"libc",
"shlex",
]
@@ -390,6 +392,7 @@ dependencies = [
"tree-sitter",
"tree-sitter-json",
"tree-sitter-ql",
"zstd",
]
[[package]]
@@ -983,6 +986,15 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jobserver"
version = "0.1.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0"
dependencies = [
"libc",
]
[[package]]
name = "jod-thread"
version = "0.1.2"
@@ -1334,6 +1346,12 @@ version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
[[package]]
name = "pkg-config"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
[[package]]
name = "portable-atomic"
version = "1.11.0"
@@ -3027,3 +3045,31 @@ dependencies = [
"quote",
"syn",
]
[[package]]
name = "zstd"
version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a"
dependencies = [
"zstd-safe",
]
[[package]]
name = "zstd-safe"
version = "7.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d"
dependencies = [
"zstd-sys",
]
[[package]]
name = "zstd-sys"
version = "2.0.15+zstd.1.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237"
dependencies = [
"cc",
"pkg-config",
]

View File

@@ -124,6 +124,7 @@ use_repo(
"vendor_ts__tree-sitter-ruby-0.23.1",
"vendor_ts__triomphe-0.1.14",
"vendor_ts__ungrammar-1.16.1",
"vendor_ts__zstd-0.13.3",
)
http_archive = use_repo_rule("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")

View File

@@ -0,0 +1,9 @@
{
"strategy": "dca",
"language": "cpp",
"targets": [
{ "name": "openssl", "with-sources": false, "with-sinks": false },
{ "name": "sqlite", "with-sources": false, "with-sinks": false }
],
"destination": "cpp/ql/lib/ext/generated"
}

View File

@@ -0,0 +1,6 @@
---
category: feature
---
* Added the `pCmdLine` arguments of `WinMain` and `wWinMain` as local flow sources.
* Added source models for `GetCommandLineA`, `GetCommandLineW`, `GetEnvironmentStringsA`, `GetEnvironmentStringsW`, `GetEnvironmentVariableA`, and `GetEnvironmentVariableW`.
* Added summary models for `CommandLineToArgvA` and `CommandLineToArgvW`.

View File

@@ -0,0 +1,4 @@
---
category: feature
---
* Added local flow source models for `ReadFile`, `ReadFileEx`, `MapViewOfFile`, `MapViewOfFile2`, `MapViewOfFile3`, `MapViewOfFile3FromApp`, `MapViewOfFileEx`, `MapViewOfFileFromApp`, `MapViewOfFileNuma2`, and `NtReadFile`.

View File

@@ -1,4 +1,4 @@
# partial model of the Boost::Asio network library
# partial model of the Boost::Asio network library
extensions:
- addsTo:
pack: codeql/cpp-all

View File

@@ -0,0 +1,35 @@
# partial model of windows system calls
extensions:
- addsTo:
pack: codeql/cpp-all
extensible: sourceModel
data: # namespace, type, subtypes, name, signature, ext, output, kind, provenance
# processenv.h
- ["", "", False, "GetCommandLineA", "", "", "ReturnValue[*]", "local", "manual"]
- ["", "", False, "GetCommandLineW", "", "", "ReturnValue[*]", "local", "manual"]
- ["", "", False, "GetEnvironmentStringsA", "", "", "ReturnValue[*]", "local", "manual"]
- ["", "", False, "GetEnvironmentStringsW", "", "", "ReturnValue[*]", "local", "manual"]
- ["", "", False, "GetEnvironmentVariableA", "", "", "Argument[*1]", "local", "manual"]
- ["", "", False, "GetEnvironmentVariableW", "", "", "Argument[*1]", "local", "manual"]
# fileapi.h
- ["", "", False, "ReadFile", "", "", "Argument[*1]", "local", "manual"]
- ["", "", False, "ReadFileEx", "", "", "Argument[*1]", "local", "manual"]
# memoryapi.h
- ["", "", False, "MapViewOfFile", "", "", "ReturnValue[*]", "local", "manual"]
- ["", "", False, "MapViewOfFile2", "", "", "ReturnValue[*]", "local", "manual"]
- ["", "", False, "MapViewOfFile3", "", "", "ReturnValue[*]", "local", "manual"]
- ["", "", False, "MapViewOfFile3FromApp", "", "", "ReturnValue[*]", "local", "manual"]
- ["", "", False, "MapViewOfFileEx", "", "", "ReturnValue[*]", "local", "manual"]
- ["", "", False, "MapViewOfFileFromApp", "", "", "ReturnValue[*]", "local", "manual"]
- ["", "", False, "MapViewOfFileNuma2", "", "", "ReturnValue[*]", "local", "manual"]
# ntifs.h
- ["", "", False, "NtReadFile", "", "", "Argument[*5]", "local", "manual"]
- addsTo:
pack: codeql/cpp-all
extensible: summaryModel
data: # namespace, type, subtypes, name, signature, ext, input, output, kind, provenance
# shellapi.h
- ["", "", False, "CommandLineToArgvA", "", "", "Argument[*0]", "ReturnValue[**]", "taint", "manual"]
- ["", "", False, "CommandLineToArgvW", "", "", "Argument[*0]", "ReturnValue[**]", "taint", "manual"]
# fileapi.h
- ["", "", False, "ReadFileEx", "", "", "Argument[*3].Field[@hEvent]", "Argument[4].Parameter[*2].Field[@hEvent]", "value", "manual"]

View File

@@ -17,7 +17,7 @@ dependencies:
codeql/xml: ${workspace}
dataExtensions:
- ext/*.model.yml
- ext/generated/*.model.yml
- ext/generated/**/*.model.yml
- ext/deallocation/*.model.yml
- ext/allocation/*.model.yml
warnOnImplicitThis: true

View File

@@ -55,7 +55,7 @@ private class LocalModelSource extends LocalFlowSource {
}
/**
* A local data flow source that the `argv` parameter to `main` or `wmain`.
* A local data flow source that is the `argv` parameter to `main` or `wmain`.
*/
private class ArgvSource extends LocalFlowSource {
ArgvSource() {
@@ -69,6 +69,21 @@ private class ArgvSource extends LocalFlowSource {
override string getSourceType() { result = "a command-line argument" }
}
/**
* A local data flow source that is the `pCmdLine` parameter to `WinMain` or `wWinMain`.
*/
private class CmdLineSource extends LocalFlowSource {
CmdLineSource() {
exists(Function main, Parameter pCmdLine |
main.hasGlobalName(["WinMain", "wWinMain"]) and
main.getParameter(2) = pCmdLine and
this.asParameter(1) = pCmdLine
)
}
override string getSourceType() { result = "a command-line" }
}
/**
* A remote data flow source that is defined through 'models as data'.
*/

View File

@@ -124,7 +124,11 @@ module IRTest {
/** Common data flow configuration to be used by tests. */
module IRTestAllocationConfig implements DataFlow::ConfigSig {
private import semmle.code.cpp.security.FlowSources
predicate isSource(DataFlow::Node source) {
source instanceof FlowSource
or
source.asExpr().(FunctionCall).getTarget().getName() = "source"
or
source.asIndirectExpr(1).(FunctionCall).getTarget().getName() = "indirect_source"

View File

@@ -337,3 +337,4 @@ irFlow
| true_upon_entry.cpp:70:11:70:16 | call to source | true_upon_entry.cpp:78:8:78:8 | x |
| true_upon_entry.cpp:83:11:83:16 | call to source | true_upon_entry.cpp:86:8:86:8 | x |
| true_upon_entry.cpp:98:11:98:16 | call to source | true_upon_entry.cpp:105:8:105:8 | x |
| winmain.cpp:4:57:4:64 | *pCmdLine | winmain.cpp:6:8:6:16 | * ... |

View File

@@ -0,0 +1,9 @@
void sink(char);
void sink(char*);
int WinMain(void *hInstance, void *hPrevInstance, char *pCmdLine, int nCmdShow) { // $ ast-def=hInstance ast-def=hPrevInstance ast-def=pCmdLine ir-def=*hInstance ir-def=*hPrevInstance ir-def=*pCmdLine
sink(pCmdLine);
sink(*pCmdLine); // $ ir
return 0;
}

View File

@@ -10,33 +10,95 @@ edges
| asio_streams.cpp:100:44:100:62 | call to buffer | asio_streams.cpp:103:29:103:39 | *send_buffer | provenance | Sink:MaD:6 |
| asio_streams.cpp:100:64:100:71 | *send_str | asio_streams.cpp:56:18:56:23 | [summary param] *0 in buffer | provenance | |
| asio_streams.cpp:100:64:100:71 | *send_str | asio_streams.cpp:100:44:100:62 | call to buffer | provenance | MaD:10 |
| test.cpp:4:5:4:17 | [summary param] 0 in ymlStepManual | test.cpp:4:5:4:17 | [summary] to write: ReturnValue in ymlStepManual | provenance | MaD:23489 |
| test.cpp:5:5:5:20 | [summary param] 0 in ymlStepGenerated | test.cpp:5:5:5:20 | [summary] to write: ReturnValue in ymlStepGenerated | provenance | MaD:23490 |
| test.cpp:6:5:6:27 | [summary param] 0 in ymlStepManual_with_body | test.cpp:6:5:6:27 | [summary] to write: ReturnValue in ymlStepManual_with_body | provenance | MaD:23491 |
| test.cpp:4:5:4:17 | [summary param] 0 in ymlStepManual | test.cpp:4:5:4:17 | [summary] to write: ReturnValue in ymlStepManual | provenance | MaD:23508 |
| test.cpp:5:5:5:20 | [summary param] 0 in ymlStepGenerated | test.cpp:5:5:5:20 | [summary] to write: ReturnValue in ymlStepGenerated | provenance | MaD:23509 |
| test.cpp:6:5:6:27 | [summary param] 0 in ymlStepManual_with_body | test.cpp:6:5:6:27 | [summary] to write: ReturnValue in ymlStepManual_with_body | provenance | MaD:23510 |
| test.cpp:7:47:7:52 | value2 | test.cpp:7:64:7:69 | value2 | provenance | |
| test.cpp:7:64:7:69 | value2 | test.cpp:7:5:7:30 | *ymlStepGenerated_with_body | provenance | |
| test.cpp:10:10:10:18 | call to ymlSource | test.cpp:10:10:10:18 | call to ymlSource | provenance | Src:MaD:23487 |
| test.cpp:10:10:10:18 | call to ymlSource | test.cpp:14:10:14:10 | x | provenance | Sink:MaD:23488 |
| test.cpp:10:10:10:18 | call to ymlSource | test.cpp:10:10:10:18 | call to ymlSource | provenance | Src:MaD:23506 |
| test.cpp:10:10:10:18 | call to ymlSource | test.cpp:14:10:14:10 | x | provenance | Sink:MaD:23507 |
| test.cpp:10:10:10:18 | call to ymlSource | test.cpp:17:24:17:24 | x | provenance | |
| test.cpp:10:10:10:18 | call to ymlSource | test.cpp:21:27:21:27 | x | provenance | |
| test.cpp:10:10:10:18 | call to ymlSource | test.cpp:25:35:25:35 | x | provenance | |
| test.cpp:10:10:10:18 | call to ymlSource | test.cpp:32:41:32:41 | x | provenance | |
| test.cpp:17:10:17:22 | call to ymlStepManual | test.cpp:17:10:17:22 | call to ymlStepManual | provenance | |
| test.cpp:17:10:17:22 | call to ymlStepManual | test.cpp:18:10:18:10 | y | provenance | Sink:MaD:23488 |
| test.cpp:17:10:17:22 | call to ymlStepManual | test.cpp:18:10:18:10 | y | provenance | Sink:MaD:23507 |
| test.cpp:17:24:17:24 | x | test.cpp:4:5:4:17 | [summary param] 0 in ymlStepManual | provenance | |
| test.cpp:17:24:17:24 | x | test.cpp:17:10:17:22 | call to ymlStepManual | provenance | MaD:23489 |
| test.cpp:17:24:17:24 | x | test.cpp:17:10:17:22 | call to ymlStepManual | provenance | MaD:23508 |
| test.cpp:21:10:21:25 | call to ymlStepGenerated | test.cpp:21:10:21:25 | call to ymlStepGenerated | provenance | |
| test.cpp:21:10:21:25 | call to ymlStepGenerated | test.cpp:22:10:22:10 | z | provenance | Sink:MaD:23488 |
| test.cpp:21:10:21:25 | call to ymlStepGenerated | test.cpp:22:10:22:10 | z | provenance | Sink:MaD:23507 |
| test.cpp:21:27:21:27 | x | test.cpp:5:5:5:20 | [summary param] 0 in ymlStepGenerated | provenance | |
| test.cpp:21:27:21:27 | x | test.cpp:21:10:21:25 | call to ymlStepGenerated | provenance | MaD:23490 |
| test.cpp:21:27:21:27 | x | test.cpp:21:10:21:25 | call to ymlStepGenerated | provenance | MaD:23509 |
| test.cpp:25:11:25:33 | call to ymlStepManual_with_body | test.cpp:25:11:25:33 | call to ymlStepManual_with_body | provenance | |
| test.cpp:25:11:25:33 | call to ymlStepManual_with_body | test.cpp:26:10:26:11 | y2 | provenance | Sink:MaD:23488 |
| test.cpp:25:11:25:33 | call to ymlStepManual_with_body | test.cpp:26:10:26:11 | y2 | provenance | Sink:MaD:23507 |
| test.cpp:25:35:25:35 | x | test.cpp:6:5:6:27 | [summary param] 0 in ymlStepManual_with_body | provenance | |
| test.cpp:25:35:25:35 | x | test.cpp:25:11:25:33 | call to ymlStepManual_with_body | provenance | MaD:23491 |
| test.cpp:25:35:25:35 | x | test.cpp:25:11:25:33 | call to ymlStepManual_with_body | provenance | MaD:23510 |
| test.cpp:32:11:32:36 | call to ymlStepGenerated_with_body | test.cpp:32:11:32:36 | call to ymlStepGenerated_with_body | provenance | |
| test.cpp:32:11:32:36 | call to ymlStepGenerated_with_body | test.cpp:33:10:33:11 | z2 | provenance | Sink:MaD:23488 |
| test.cpp:32:11:32:36 | call to ymlStepGenerated_with_body | test.cpp:33:10:33:11 | z2 | provenance | Sink:MaD:23507 |
| test.cpp:32:41:32:41 | x | test.cpp:7:47:7:52 | value2 | provenance | |
| test.cpp:32:41:32:41 | x | test.cpp:32:11:32:36 | call to ymlStepGenerated_with_body | provenance | |
| windows.cpp:17:8:17:25 | [summary param] *0 in CommandLineToArgvA | windows.cpp:17:8:17:25 | [summary] to write: ReturnValue[**] in CommandLineToArgvA | provenance | MaD:341 |
| windows.cpp:22:15:22:29 | *call to GetCommandLineA | windows.cpp:22:15:22:29 | *call to GetCommandLineA | provenance | Src:MaD:325 |
| windows.cpp:22:15:22:29 | *call to GetCommandLineA | windows.cpp:24:8:24:11 | * ... | provenance | |
| windows.cpp:22:15:22:29 | *call to GetCommandLineA | windows.cpp:27:36:27:38 | *cmd | provenance | |
| windows.cpp:27:17:27:34 | **call to CommandLineToArgvA | windows.cpp:27:17:27:34 | **call to CommandLineToArgvA | provenance | |
| windows.cpp:27:17:27:34 | **call to CommandLineToArgvA | windows.cpp:30:8:30:15 | * ... | provenance | |
| windows.cpp:27:36:27:38 | *cmd | windows.cpp:17:8:17:25 | [summary param] *0 in CommandLineToArgvA | provenance | |
| windows.cpp:27:36:27:38 | *cmd | windows.cpp:27:17:27:34 | **call to CommandLineToArgvA | provenance | MaD:341 |
| windows.cpp:34:17:34:38 | *call to GetEnvironmentStringsA | windows.cpp:34:17:34:38 | *call to GetEnvironmentStringsA | provenance | Src:MaD:327 |
| windows.cpp:34:17:34:38 | *call to GetEnvironmentStringsA | windows.cpp:36:10:36:13 | * ... | provenance | |
| windows.cpp:39:36:39:38 | GetEnvironmentVariableA output argument | windows.cpp:41:10:41:13 | * ... | provenance | Src:MaD:329 |
| windows.cpp:90:6:90:15 | [summary param] *3 in ReadFileEx [*hEvent] | windows.cpp:90:6:90:15 | [summary] read: Argument[*3].Field[*hEvent] in ReadFileEx | provenance | |
| windows.cpp:90:6:90:15 | [summary param] *3 in ReadFileEx [hEvent] | windows.cpp:90:6:90:15 | [summary] read: Argument[*3].Field[hEvent] in ReadFileEx | provenance | |
| windows.cpp:90:6:90:15 | [summary] read: Argument[*3].Field[*hEvent] in ReadFileEx | windows.cpp:90:6:90:15 | [summary] to write: Argument[4].Parameter[*2].Field[*hEvent] in ReadFileEx | provenance | MaD:343 |
| windows.cpp:90:6:90:15 | [summary] read: Argument[*3].Field[hEvent] in ReadFileEx | windows.cpp:90:6:90:15 | [summary] to write: Argument[4].Parameter[*2].Field[hEvent] in ReadFileEx | provenance | MaD:343 |
| windows.cpp:90:6:90:15 | [summary] to write: Argument[4].Parameter[*2] in ReadFileEx [*hEvent] | windows.cpp:147:16:147:27 | *lpOverlapped [*hEvent] | provenance | |
| windows.cpp:90:6:90:15 | [summary] to write: Argument[4].Parameter[*2] in ReadFileEx [hEvent] | windows.cpp:157:16:157:27 | *lpOverlapped [hEvent] | provenance | |
| windows.cpp:90:6:90:15 | [summary] to write: Argument[4].Parameter[*2].Field[*hEvent] in ReadFileEx | windows.cpp:90:6:90:15 | [summary] to write: Argument[4].Parameter[*2] in ReadFileEx [*hEvent] | provenance | |
| windows.cpp:90:6:90:15 | [summary] to write: Argument[4].Parameter[*2].Field[hEvent] in ReadFileEx | windows.cpp:90:6:90:15 | [summary] to write: Argument[4].Parameter[*2] in ReadFileEx [hEvent] | provenance | |
| windows.cpp:147:16:147:27 | *lpOverlapped [*hEvent] | windows.cpp:149:42:149:53 | *lpOverlapped [*hEvent] | provenance | |
| windows.cpp:149:18:149:62 | *hEvent | windows.cpp:149:18:149:62 | *hEvent | provenance | |
| windows.cpp:149:18:149:62 | *hEvent | windows.cpp:151:8:151:14 | * ... | provenance | |
| windows.cpp:149:42:149:53 | *lpOverlapped [*hEvent] | windows.cpp:149:18:149:62 | *hEvent | provenance | |
| windows.cpp:149:42:149:53 | *lpOverlapped [*hEvent] | windows.cpp:149:56:149:61 | *hEvent | provenance | |
| windows.cpp:149:56:149:61 | *hEvent | windows.cpp:149:18:149:62 | *hEvent | provenance | |
| windows.cpp:157:16:157:27 | *lpOverlapped [hEvent] | windows.cpp:159:35:159:46 | *lpOverlapped [hEvent] | provenance | |
| windows.cpp:159:12:159:55 | hEvent | windows.cpp:159:12:159:55 | hEvent | provenance | |
| windows.cpp:159:12:159:55 | hEvent | windows.cpp:160:8:160:8 | c | provenance | |
| windows.cpp:159:35:159:46 | *lpOverlapped [hEvent] | windows.cpp:159:12:159:55 | hEvent | provenance | |
| windows.cpp:159:35:159:46 | *lpOverlapped [hEvent] | windows.cpp:159:12:159:55 | hEvent | provenance | |
| windows.cpp:168:35:168:40 | ReadFile output argument | windows.cpp:170:10:170:16 | * ... | provenance | Src:MaD:331 |
| windows.cpp:177:23:177:28 | ReadFileEx output argument | windows.cpp:179:10:179:16 | * ... | provenance | Src:MaD:332 |
| windows.cpp:189:21:189:26 | ReadFile output argument | windows.cpp:190:5:190:56 | *... = ... | provenance | Src:MaD:331 |
| windows.cpp:190:5:190:14 | *overlapped [post update] [*hEvent] | windows.cpp:192:53:192:63 | *& ... [*hEvent] | provenance | |
| windows.cpp:190:5:190:56 | *... = ... | windows.cpp:190:5:190:14 | *overlapped [post update] [*hEvent] | provenance | |
| windows.cpp:192:53:192:63 | *& ... [*hEvent] | windows.cpp:90:6:90:15 | [summary param] *3 in ReadFileEx [*hEvent] | provenance | |
| windows.cpp:198:21:198:26 | ReadFile output argument | windows.cpp:199:5:199:57 | ... = ... | provenance | Src:MaD:331 |
| windows.cpp:199:5:199:14 | *overlapped [post update] [hEvent] | windows.cpp:201:53:201:63 | *& ... [hEvent] | provenance | |
| windows.cpp:199:5:199:57 | ... = ... | windows.cpp:199:5:199:14 | *overlapped [post update] [hEvent] | provenance | |
| windows.cpp:201:53:201:63 | *& ... [hEvent] | windows.cpp:90:6:90:15 | [summary param] *3 in ReadFileEx [hEvent] | provenance | |
| windows.cpp:209:84:209:89 | NtReadFile output argument | windows.cpp:211:10:211:16 | * ... | provenance | Src:MaD:340 |
| windows.cpp:286:23:286:35 | *call to MapViewOfFile | windows.cpp:286:23:286:35 | *call to MapViewOfFile | provenance | Src:MaD:333 |
| windows.cpp:286:23:286:35 | *call to MapViewOfFile | windows.cpp:287:20:287:52 | *pMapView | provenance | |
| windows.cpp:287:20:287:52 | *pMapView | windows.cpp:289:10:289:16 | * ... | provenance | |
| windows.cpp:293:23:293:36 | *call to MapViewOfFile2 | windows.cpp:293:23:293:36 | *call to MapViewOfFile2 | provenance | Src:MaD:334 |
| windows.cpp:293:23:293:36 | *call to MapViewOfFile2 | windows.cpp:294:20:294:52 | *pMapView | provenance | |
| windows.cpp:294:20:294:52 | *pMapView | windows.cpp:296:10:296:16 | * ... | provenance | |
| windows.cpp:302:23:302:36 | *call to MapViewOfFile3 | windows.cpp:302:23:302:36 | *call to MapViewOfFile3 | provenance | Src:MaD:335 |
| windows.cpp:302:23:302:36 | *call to MapViewOfFile3 | windows.cpp:303:20:303:52 | *pMapView | provenance | |
| windows.cpp:303:20:303:52 | *pMapView | windows.cpp:305:10:305:16 | * ... | provenance | |
| windows.cpp:311:23:311:43 | *call to MapViewOfFile3FromApp | windows.cpp:311:23:311:43 | *call to MapViewOfFile3FromApp | provenance | Src:MaD:336 |
| windows.cpp:311:23:311:43 | *call to MapViewOfFile3FromApp | windows.cpp:312:20:312:52 | *pMapView | provenance | |
| windows.cpp:312:20:312:52 | *pMapView | windows.cpp:314:10:314:16 | * ... | provenance | |
| windows.cpp:318:23:318:37 | *call to MapViewOfFileEx | windows.cpp:318:23:318:37 | *call to MapViewOfFileEx | provenance | Src:MaD:337 |
| windows.cpp:318:23:318:37 | *call to MapViewOfFileEx | windows.cpp:319:20:319:52 | *pMapView | provenance | |
| windows.cpp:319:20:319:52 | *pMapView | windows.cpp:321:10:321:16 | * ... | provenance | |
| windows.cpp:325:23:325:42 | *call to MapViewOfFileFromApp | windows.cpp:325:23:325:42 | *call to MapViewOfFileFromApp | provenance | Src:MaD:338 |
| windows.cpp:325:23:325:42 | *call to MapViewOfFileFromApp | windows.cpp:326:20:326:52 | *pMapView | provenance | |
| windows.cpp:326:20:326:52 | *pMapView | windows.cpp:328:10:328:16 | * ... | provenance | |
| windows.cpp:332:23:332:40 | *call to MapViewOfFileNuma2 | windows.cpp:332:23:332:40 | *call to MapViewOfFileNuma2 | provenance | Src:MaD:339 |
| windows.cpp:332:23:332:40 | *call to MapViewOfFileNuma2 | windows.cpp:333:20:333:52 | *pMapView | provenance | |
| windows.cpp:333:20:333:52 | *pMapView | windows.cpp:335:10:335:16 | * ... | provenance | |
nodes
| asio_streams.cpp:56:18:56:23 | [summary param] *0 in buffer | semmle.label | [summary param] *0 in buffer |
| asio_streams.cpp:56:18:56:23 | [summary] to write: ReturnValue in buffer | semmle.label | [summary] to write: ReturnValue in buffer |
@@ -78,9 +140,85 @@ nodes
| test.cpp:32:11:32:36 | call to ymlStepGenerated_with_body | semmle.label | call to ymlStepGenerated_with_body |
| test.cpp:32:41:32:41 | x | semmle.label | x |
| test.cpp:33:10:33:11 | z2 | semmle.label | z2 |
| windows.cpp:17:8:17:25 | [summary param] *0 in CommandLineToArgvA | semmle.label | [summary param] *0 in CommandLineToArgvA |
| windows.cpp:17:8:17:25 | [summary] to write: ReturnValue[**] in CommandLineToArgvA | semmle.label | [summary] to write: ReturnValue[**] in CommandLineToArgvA |
| windows.cpp:22:15:22:29 | *call to GetCommandLineA | semmle.label | *call to GetCommandLineA |
| windows.cpp:22:15:22:29 | *call to GetCommandLineA | semmle.label | *call to GetCommandLineA |
| windows.cpp:24:8:24:11 | * ... | semmle.label | * ... |
| windows.cpp:27:17:27:34 | **call to CommandLineToArgvA | semmle.label | **call to CommandLineToArgvA |
| windows.cpp:27:17:27:34 | **call to CommandLineToArgvA | semmle.label | **call to CommandLineToArgvA |
| windows.cpp:27:36:27:38 | *cmd | semmle.label | *cmd |
| windows.cpp:30:8:30:15 | * ... | semmle.label | * ... |
| windows.cpp:34:17:34:38 | *call to GetEnvironmentStringsA | semmle.label | *call to GetEnvironmentStringsA |
| windows.cpp:34:17:34:38 | *call to GetEnvironmentStringsA | semmle.label | *call to GetEnvironmentStringsA |
| windows.cpp:36:10:36:13 | * ... | semmle.label | * ... |
| windows.cpp:39:36:39:38 | GetEnvironmentVariableA output argument | semmle.label | GetEnvironmentVariableA output argument |
| windows.cpp:41:10:41:13 | * ... | semmle.label | * ... |
| windows.cpp:90:6:90:15 | [summary param] *3 in ReadFileEx [*hEvent] | semmle.label | [summary param] *3 in ReadFileEx [*hEvent] |
| windows.cpp:90:6:90:15 | [summary param] *3 in ReadFileEx [hEvent] | semmle.label | [summary param] *3 in ReadFileEx [hEvent] |
| windows.cpp:90:6:90:15 | [summary] read: Argument[*3].Field[*hEvent] in ReadFileEx | semmle.label | [summary] read: Argument[*3].Field[*hEvent] in ReadFileEx |
| windows.cpp:90:6:90:15 | [summary] read: Argument[*3].Field[hEvent] in ReadFileEx | semmle.label | [summary] read: Argument[*3].Field[hEvent] in ReadFileEx |
| windows.cpp:90:6:90:15 | [summary] to write: Argument[4].Parameter[*2] in ReadFileEx [*hEvent] | semmle.label | [summary] to write: Argument[4].Parameter[*2] in ReadFileEx [*hEvent] |
| windows.cpp:90:6:90:15 | [summary] to write: Argument[4].Parameter[*2] in ReadFileEx [hEvent] | semmle.label | [summary] to write: Argument[4].Parameter[*2] in ReadFileEx [hEvent] |
| windows.cpp:90:6:90:15 | [summary] to write: Argument[4].Parameter[*2].Field[*hEvent] in ReadFileEx | semmle.label | [summary] to write: Argument[4].Parameter[*2].Field[*hEvent] in ReadFileEx |
| windows.cpp:90:6:90:15 | [summary] to write: Argument[4].Parameter[*2].Field[hEvent] in ReadFileEx | semmle.label | [summary] to write: Argument[4].Parameter[*2].Field[hEvent] in ReadFileEx |
| windows.cpp:147:16:147:27 | *lpOverlapped [*hEvent] | semmle.label | *lpOverlapped [*hEvent] |
| windows.cpp:149:18:149:62 | *hEvent | semmle.label | *hEvent |
| windows.cpp:149:18:149:62 | *hEvent | semmle.label | *hEvent |
| windows.cpp:149:42:149:53 | *lpOverlapped [*hEvent] | semmle.label | *lpOverlapped [*hEvent] |
| windows.cpp:149:56:149:61 | *hEvent | semmle.label | *hEvent |
| windows.cpp:151:8:151:14 | * ... | semmle.label | * ... |
| windows.cpp:157:16:157:27 | *lpOverlapped [hEvent] | semmle.label | *lpOverlapped [hEvent] |
| windows.cpp:159:12:159:55 | hEvent | semmle.label | hEvent |
| windows.cpp:159:12:159:55 | hEvent | semmle.label | hEvent |
| windows.cpp:159:35:159:46 | *lpOverlapped [hEvent] | semmle.label | *lpOverlapped [hEvent] |
| windows.cpp:160:8:160:8 | c | semmle.label | c |
| windows.cpp:168:35:168:40 | ReadFile output argument | semmle.label | ReadFile output argument |
| windows.cpp:170:10:170:16 | * ... | semmle.label | * ... |
| windows.cpp:177:23:177:28 | ReadFileEx output argument | semmle.label | ReadFileEx output argument |
| windows.cpp:179:10:179:16 | * ... | semmle.label | * ... |
| windows.cpp:189:21:189:26 | ReadFile output argument | semmle.label | ReadFile output argument |
| windows.cpp:190:5:190:14 | *overlapped [post update] [*hEvent] | semmle.label | *overlapped [post update] [*hEvent] |
| windows.cpp:190:5:190:56 | *... = ... | semmle.label | *... = ... |
| windows.cpp:192:53:192:63 | *& ... [*hEvent] | semmle.label | *& ... [*hEvent] |
| windows.cpp:198:21:198:26 | ReadFile output argument | semmle.label | ReadFile output argument |
| windows.cpp:199:5:199:14 | *overlapped [post update] [hEvent] | semmle.label | *overlapped [post update] [hEvent] |
| windows.cpp:199:5:199:57 | ... = ... | semmle.label | ... = ... |
| windows.cpp:201:53:201:63 | *& ... [hEvent] | semmle.label | *& ... [hEvent] |
| windows.cpp:209:84:209:89 | NtReadFile output argument | semmle.label | NtReadFile output argument |
| windows.cpp:211:10:211:16 | * ... | semmle.label | * ... |
| windows.cpp:286:23:286:35 | *call to MapViewOfFile | semmle.label | *call to MapViewOfFile |
| windows.cpp:286:23:286:35 | *call to MapViewOfFile | semmle.label | *call to MapViewOfFile |
| windows.cpp:287:20:287:52 | *pMapView | semmle.label | *pMapView |
| windows.cpp:289:10:289:16 | * ... | semmle.label | * ... |
| windows.cpp:293:23:293:36 | *call to MapViewOfFile2 | semmle.label | *call to MapViewOfFile2 |
| windows.cpp:293:23:293:36 | *call to MapViewOfFile2 | semmle.label | *call to MapViewOfFile2 |
| windows.cpp:294:20:294:52 | *pMapView | semmle.label | *pMapView |
| windows.cpp:296:10:296:16 | * ... | semmle.label | * ... |
| windows.cpp:302:23:302:36 | *call to MapViewOfFile3 | semmle.label | *call to MapViewOfFile3 |
| windows.cpp:302:23:302:36 | *call to MapViewOfFile3 | semmle.label | *call to MapViewOfFile3 |
| windows.cpp:303:20:303:52 | *pMapView | semmle.label | *pMapView |
| windows.cpp:305:10:305:16 | * ... | semmle.label | * ... |
| windows.cpp:311:23:311:43 | *call to MapViewOfFile3FromApp | semmle.label | *call to MapViewOfFile3FromApp |
| windows.cpp:311:23:311:43 | *call to MapViewOfFile3FromApp | semmle.label | *call to MapViewOfFile3FromApp |
| windows.cpp:312:20:312:52 | *pMapView | semmle.label | *pMapView |
| windows.cpp:314:10:314:16 | * ... | semmle.label | * ... |
| windows.cpp:318:23:318:37 | *call to MapViewOfFileEx | semmle.label | *call to MapViewOfFileEx |
| windows.cpp:318:23:318:37 | *call to MapViewOfFileEx | semmle.label | *call to MapViewOfFileEx |
| windows.cpp:319:20:319:52 | *pMapView | semmle.label | *pMapView |
| windows.cpp:321:10:321:16 | * ... | semmle.label | * ... |
| windows.cpp:325:23:325:42 | *call to MapViewOfFileFromApp | semmle.label | *call to MapViewOfFileFromApp |
| windows.cpp:325:23:325:42 | *call to MapViewOfFileFromApp | semmle.label | *call to MapViewOfFileFromApp |
| windows.cpp:326:20:326:52 | *pMapView | semmle.label | *pMapView |
| windows.cpp:328:10:328:16 | * ... | semmle.label | * ... |
| windows.cpp:332:23:332:40 | *call to MapViewOfFileNuma2 | semmle.label | *call to MapViewOfFileNuma2 |
| windows.cpp:332:23:332:40 | *call to MapViewOfFileNuma2 | semmle.label | *call to MapViewOfFileNuma2 |
| windows.cpp:333:20:333:52 | *pMapView | semmle.label | *pMapView |
| windows.cpp:335:10:335:16 | * ... | semmle.label | * ... |
subpaths
| asio_streams.cpp:100:64:100:71 | *send_str | asio_streams.cpp:56:18:56:23 | [summary param] *0 in buffer | asio_streams.cpp:56:18:56:23 | [summary] to write: ReturnValue in buffer | asio_streams.cpp:100:44:100:62 | call to buffer |
| test.cpp:17:24:17:24 | x | test.cpp:4:5:4:17 | [summary param] 0 in ymlStepManual | test.cpp:4:5:4:17 | [summary] to write: ReturnValue in ymlStepManual | test.cpp:17:10:17:22 | call to ymlStepManual |
| test.cpp:21:27:21:27 | x | test.cpp:5:5:5:20 | [summary param] 0 in ymlStepGenerated | test.cpp:5:5:5:20 | [summary] to write: ReturnValue in ymlStepGenerated | test.cpp:21:10:21:25 | call to ymlStepGenerated |
| test.cpp:25:35:25:35 | x | test.cpp:6:5:6:27 | [summary param] 0 in ymlStepManual_with_body | test.cpp:6:5:6:27 | [summary] to write: ReturnValue in ymlStepManual_with_body | test.cpp:25:11:25:33 | call to ymlStepManual_with_body |
| test.cpp:32:41:32:41 | x | test.cpp:7:47:7:52 | value2 | test.cpp:7:5:7:30 | *ymlStepGenerated_with_body | test.cpp:32:11:32:36 | call to ymlStepGenerated_with_body |
| windows.cpp:27:36:27:38 | *cmd | windows.cpp:17:8:17:25 | [summary param] *0 in CommandLineToArgvA | windows.cpp:17:8:17:25 | [summary] to write: ReturnValue[**] in CommandLineToArgvA | windows.cpp:27:17:27:34 | **call to CommandLineToArgvA |

View File

@@ -1,2 +1,19 @@
| asio_streams.cpp:87:34:87:44 | read_until output argument | remote |
| test.cpp:10:10:10:18 | call to ymlSource | local |
| windows.cpp:22:15:22:29 | *call to GetCommandLineA | local |
| windows.cpp:34:17:34:38 | *call to GetEnvironmentStringsA | local |
| windows.cpp:39:36:39:38 | GetEnvironmentVariableA output argument | local |
| windows.cpp:168:35:168:40 | ReadFile output argument | local |
| windows.cpp:177:23:177:28 | ReadFileEx output argument | local |
| windows.cpp:189:21:189:26 | ReadFile output argument | local |
| windows.cpp:192:23:192:29 | ReadFileEx output argument | local |
| windows.cpp:198:21:198:26 | ReadFile output argument | local |
| windows.cpp:201:23:201:29 | ReadFileEx output argument | local |
| windows.cpp:209:84:209:89 | NtReadFile output argument | local |
| windows.cpp:286:23:286:35 | *call to MapViewOfFile | local |
| windows.cpp:293:23:293:36 | *call to MapViewOfFile2 | local |
| windows.cpp:302:23:302:36 | *call to MapViewOfFile3 | local |
| windows.cpp:311:23:311:43 | *call to MapViewOfFile3FromApp | local |
| windows.cpp:318:23:318:37 | *call to MapViewOfFileEx | local |
| windows.cpp:325:23:325:42 | *call to MapViewOfFileFromApp | local |
| windows.cpp:332:23:332:40 | *call to MapViewOfFileNuma2 | local |

View File

@@ -5,3 +5,4 @@
| test.cpp:28:35:28:35 | 0 | test.cpp:28:11:28:33 | call to ymlStepManual_with_body |
| test.cpp:32:38:32:38 | 0 | test.cpp:32:11:32:36 | call to ymlStepGenerated_with_body |
| test.cpp:35:38:35:38 | x | test.cpp:35:11:35:36 | call to ymlStepGenerated_with_body |
| windows.cpp:27:36:27:38 | *cmd | windows.cpp:27:17:27:34 | **call to CommandLineToArgvA |

View File

@@ -3771,3 +3771,7 @@
| Dubious signature "(wchar_t *)" in summary model. |
| Dubious signature "(wchar_t, const CStringT &)" in summary model. |
| Dubious signature "(wchar_t,const CStringT &)" in summary model. |
| Unrecognized input specification "Field[****hEvent]" in summary model. |
| Unrecognized input specification "Field[***hEvent]" in summary model. |
| Unrecognized output specification "Field[****hEvent]" in summary model. |
| Unrecognized output specification "Field[***hEvent]" in summary model. |

View File

@@ -0,0 +1,337 @@
void sink(char);
void sink(char*);
void sink(char**);
using HANDLE = void*;
using DWORD = unsigned long;
using LPCH = char*;
using LPSTR = char*;
using LPCSTR = const char*;
using LPVOID = void*;
using LPDWORD = unsigned long*;
using PVOID = void*;
using ULONG_PTR = unsigned long*;
using SIZE_T = decltype(sizeof(0));
LPSTR GetCommandLineA();
LPSTR* CommandLineToArgvA(LPSTR, int*);
LPCH GetEnvironmentStringsA();
DWORD GetEnvironmentVariableA(LPCSTR, LPSTR, DWORD);
void getCommandLine() {
char* cmd = GetCommandLineA();
sink(cmd);
sink(*cmd); // $ ir
int argc;
char** argv = CommandLineToArgvA(cmd, &argc);
sink(argv);
sink(argv[1]);
sink(*argv[1]); // $ ir
}
void getEnvironment() {
char* env = GetEnvironmentStringsA();
sink(env);
sink(*env); // $ ir
char buf[1024];
GetEnvironmentVariableA("FOO", buf, sizeof(buf));
sink(buf);
sink(*buf); // $ ir
}
typedef struct _OVERLAPPED {
ULONG_PTR Internal;
ULONG_PTR InternalHigh;
union {
struct {
DWORD Offset;
DWORD OffsetHigh;
} DUMMYSTRUCTNAME;
PVOID Pointer;
} DUMMYUNIONNAME;
HANDLE hEvent;
} OVERLAPPED, *LPOVERLAPPED;
using BOOL = int;
#define FILE_MAP_READ 0x0004
using ULONG64 = unsigned long long;
using ULONG = unsigned long;
using DWORD64 = unsigned long long;
#define MEM_EXTENDED_PARAMETER_TYPE_BITS 8
typedef struct MEM_EXTENDED_PARAMETER {
struct {
DWORD64 Type : MEM_EXTENDED_PARAMETER_TYPE_BITS;
DWORD64 Reserved : 64 - MEM_EXTENDED_PARAMETER_TYPE_BITS;
} DUMMYSTRUCTNAME;
union {
DWORD64 ULong64;
PVOID Pointer;
SIZE_T Size;
HANDLE Handle;
DWORD ULong;
} DUMMYUNIONNAME;
} MEM_EXTENDED_PARAMETER, *PMEM_EXTENDED_PARAMETER;
BOOL ReadFile(
HANDLE hFile,
LPVOID lpBuffer,
DWORD nNumberOfBytesToRead,
LPDWORD lpNumberOfBytesRead,
LPOVERLAPPED lpOverlapped
);
using LPOVERLAPPED_COMPLETION_ROUTINE = void (*)(DWORD, DWORD, LPOVERLAPPED);
BOOL ReadFileEx(
HANDLE hFile,
LPVOID lpBuffer,
DWORD nNumberOfBytesToRead,
LPOVERLAPPED lpOverlapped,
LPOVERLAPPED_COMPLETION_ROUTINE lpCompletionRoutine
);
using NTSTATUS = long;
using PIO_APC_ROUTINE = void (*)(struct _DEVICE_OBJECT*, struct _IRP*, PVOID);
typedef struct _IO_STATUS_BLOCK {
union {
NTSTATUS Status;
PVOID Pointer;
} DUMMYUNIONNAME;
ULONG_PTR Information;
} IO_STATUS_BLOCK, *PIO_STATUS_BLOCK;
using LONGLONG = long long;
using LONG = long;
typedef struct _LARGE_INTEGER {
union {
struct {
ULONG LowPart;
LONG HighPart;
} DUMMYSTRUCTNAME;
LONGLONG QuadPart;
} DUMMYUNIONNAME;
} LARGE_INTEGER, *PLARGE_INTEGER;
using PULONG = unsigned long*;
NTSTATUS NtReadFile(
HANDLE FileHandle,
HANDLE Event,
PIO_APC_ROUTINE ApcRoutine,
PVOID ApcContext,
PIO_STATUS_BLOCK IoStatusBlock,
PVOID Buffer,
ULONG Length,
PLARGE_INTEGER ByteOffset,
PULONG Key
);
void FileIOCompletionRoutine(
DWORD dwErrorCode,
DWORD dwNumberOfBytesTransfered,
LPOVERLAPPED lpOverlapped
) {
char* buffer = reinterpret_cast<char*>(lpOverlapped->hEvent);
sink(buffer);
sink(*buffer); // $ MISSING: ir
}
void FileIOCompletionRoutine2(
DWORD dwErrorCode,
DWORD dwNumberOfBytesTransfered,
LPOVERLAPPED lpOverlapped
) {
char* buffer = reinterpret_cast<char*>(lpOverlapped->hEvent);
sink(buffer);
sink(*buffer); // $ ir
}
void FileIOCompletionRoutine3(
DWORD dwErrorCode,
DWORD dwNumberOfBytesTransfered,
LPOVERLAPPED lpOverlapped
) {
char c = reinterpret_cast<char>(lpOverlapped->hEvent);
sink(c); // $ ir
}
void readFile(HANDLE hFile) {
{
char buffer[1024];
DWORD bytesRead;
OVERLAPPED overlapped;
BOOL result = ReadFile(hFile, buffer, sizeof(buffer), &bytesRead, &overlapped);
sink(buffer);
sink(*buffer); // $ ir
}
{
char buffer[1024];
OVERLAPPED overlapped;
overlapped.hEvent = reinterpret_cast<HANDLE>(buffer);
ReadFileEx(hFile, buffer, sizeof(buffer) - 1, &overlapped, FileIOCompletionRoutine);
sink(buffer);
sink(*buffer); // $ ir
char* p = reinterpret_cast<char*>(overlapped.hEvent);
sink(p);
sink(*p); // $ MISSING: ir
}
{
char buffer[1024];
OVERLAPPED overlapped;
ReadFile(hFile, buffer, sizeof(buffer), nullptr, nullptr);
overlapped.hEvent = reinterpret_cast<HANDLE>(buffer);
char buffer2[1024];
ReadFileEx(hFile, buffer2, sizeof(buffer2) - 1, &overlapped, FileIOCompletionRoutine2);
}
{
char buffer[1024];
OVERLAPPED overlapped;
ReadFile(hFile, buffer, sizeof(buffer), nullptr, nullptr);
overlapped.hEvent = reinterpret_cast<HANDLE>(*buffer);
char buffer2[1024];
ReadFileEx(hFile, buffer2, sizeof(buffer2) - 1, &overlapped, FileIOCompletionRoutine3);
}
{
char buffer[1024];
IO_STATUS_BLOCK ioStatusBlock;
LARGE_INTEGER byteOffset;
ULONG key;
NTSTATUS status = NtReadFile(hFile, nullptr, nullptr, nullptr, &ioStatusBlock, buffer, sizeof(buffer), &byteOffset, &key);
sink(buffer);
sink(*buffer); // $ ir
}
}
LPVOID MapViewOfFile(
HANDLE hFileMappingObject,
DWORD dwDesiredAccess,
DWORD dwFileOffsetHigh,
DWORD dwFileOffsetLow,
SIZE_T dwNumberOfBytesToMap
);
PVOID MapViewOfFile2(
HANDLE FileMappingHandle,
HANDLE ProcessHandle,
ULONG64 Offset,
PVOID BaseAddress,
SIZE_T ViewSize,
ULONG AllocationType,
ULONG PageProtection
);
PVOID MapViewOfFile3(
HANDLE FileMapping,
HANDLE Process,
PVOID BaseAddress,
ULONG64 Offset,
SIZE_T ViewSize,
ULONG AllocationType,
ULONG PageProtection,
MEM_EXTENDED_PARAMETER *ExtendedParameters,
ULONG ParameterCount
);
PVOID MapViewOfFile3FromApp(
HANDLE FileMapping,
HANDLE Process,
PVOID BaseAddress,
ULONG64 Offset,
SIZE_T ViewSize,
ULONG AllocationType,
ULONG PageProtection,
MEM_EXTENDED_PARAMETER *ExtendedParameters,
ULONG ParameterCount
);
LPVOID MapViewOfFileEx(
HANDLE hFileMappingObject,
DWORD dwDesiredAccess,
DWORD dwFileOffsetHigh,
DWORD dwFileOffsetLow,
SIZE_T dwNumberOfBytesToMap,
LPVOID lpBaseAddress
);
PVOID MapViewOfFileFromApp(
HANDLE hFileMappingObject,
ULONG DesiredAccess,
ULONG64 FileOffset,
SIZE_T NumberOfBytesToMap
);
PVOID MapViewOfFileNuma2(
HANDLE FileMappingHandle,
HANDLE ProcessHandle,
ULONG64 Offset,
PVOID BaseAddress,
SIZE_T ViewSize,
ULONG AllocationType,
ULONG PageProtection,
ULONG PreferredNode
);
void mapViewOfFile(HANDLE hMapFile) {
{
LPVOID pMapView = MapViewOfFile(hMapFile, FILE_MAP_READ, 0, 0, 0);
char* buffer = reinterpret_cast<char*>(pMapView);
sink(buffer);
sink(*buffer); // $ ir
}
{
LPVOID pMapView = MapViewOfFile2(hMapFile, nullptr, 0, nullptr, 0, 0, 0);
char* buffer = reinterpret_cast<char*>(pMapView);
sink(buffer);
sink(*buffer); // $ ir
}
{
MEM_EXTENDED_PARAMETER extendedParams;
LPVOID pMapView = MapViewOfFile3(hMapFile, nullptr, 0, 0, 0, 0, 0, &extendedParams, 1);
char* buffer = reinterpret_cast<char*>(pMapView);
sink(buffer);
sink(*buffer); // $ ir
}
{
MEM_EXTENDED_PARAMETER extendedParams;
LPVOID pMapView = MapViewOfFile3FromApp(hMapFile, nullptr, 0, 0, 0, 0, 0, &extendedParams, 1);
char* buffer = reinterpret_cast<char*>(pMapView);
sink(buffer);
sink(*buffer); // $ ir
}
{
LPVOID pMapView = MapViewOfFileEx(hMapFile, FILE_MAP_READ, 0, 0, 0, nullptr);
char* buffer = reinterpret_cast<char*>(pMapView);
sink(buffer);
sink(*buffer); // $ ir
}
{
LPVOID pMapView = MapViewOfFileFromApp(hMapFile, FILE_MAP_READ, 0, 0);
char* buffer = reinterpret_cast<char*>(pMapView);
sink(buffer);
sink(*buffer); // $ ir
}
{
LPVOID pMapView = MapViewOfFileNuma2(hMapFile, nullptr, 0, nullptr, 0, 0, 0, 0);
char* buffer = reinterpret_cast<char*>(pMapView);
sink(buffer);
sink(*buffer); // $ ir
}
}

View File

@@ -1,6 +1,6 @@
| test.cpp:3:13:3:13 | i |
| test.cpp:3:13:3:18 | ... <? ... |
| test.cpp:3:18:3:18 | j |
| test.cpp:4:13:4:13 | i |
| test.cpp:4:13:4:18 | ... >? ... |
| test.cpp:4:13:4:18 | ... <? ... |
| test.cpp:4:18:4:18 | j |
| test.cpp:5:13:5:13 | i |
| test.cpp:5:13:5:18 | ... >? ... |
| test.cpp:5:18:5:18 | j |

View File

@@ -1,3 +1,4 @@
// semmle-extractor-options: --gnu_version 40200
void f(int i, int j) {
int k = i <? j;

View File

@@ -3,6 +3,7 @@ ql/csharp/ql/src/API Abuse/FormatInvalid.ql
ql/csharp/ql/src/API Abuse/NoDisposeCallOnLocalIDisposable.ql
ql/csharp/ql/src/Bad Practices/Control-Flow/ConstantCondition.ql
ql/csharp/ql/src/Dead Code/DeadStoreOfLocal.ql
ql/csharp/ql/src/Language Abuse/MissedReadonlyOpportunity.ql
ql/csharp/ql/src/Likely Bugs/Collections/ContainerLengthCmpOffByOne.ql
ql/csharp/ql/src/Likely Bugs/Collections/ContainerSizeCmpZero.ql
ql/csharp/ql/src/Likely Bugs/DangerousNonShortCircuitLogic.ql

View File

@@ -8,6 +8,7 @@
* @id cs/missed-readonly-modifier
* @tags maintainability
* language-features
* quality
*/
import csharp
@@ -19,13 +20,17 @@ predicate defTargetsField(AssignableDefinition def, Field f) {
predicate isReadonlyCompatibleDefinition(AssignableDefinition def, Field f) {
defTargetsField(def, f) and
(
def.getEnclosingCallable().(Constructor).getDeclaringType() = f.getDeclaringType()
def.getEnclosingCallable().(StaticConstructor).getDeclaringType() = f.getDeclaringType()
or
def.getEnclosingCallable().(InstanceConstructor).getDeclaringType() = f.getDeclaringType() and
def.getTargetAccess().(QualifiableExpr).getQualifier() instanceof ThisAccess
or
def instanceof AssignableDefinitions::InitializerDefinition
)
}
predicate canBeReadonly(Field f) {
exists(Type t | t = f.getType() | not t instanceof Struct or t.(Struct).isReadonly()) and
forex(AssignableDefinition def | defTargetsField(def, f) | isReadonlyCompatibleDefinition(def, f))
}

View File

@@ -0,0 +1,4 @@
---
category: minorAnalysis
---
* The precision of the query `cs/missed-readonly-modifier` has been improved. Some false positives related to static fields and struct type fields have been removed.

View File

@@ -1,23 +1,27 @@
class MissedReadonlyOpportunity<T>
{
public int Bad1;
public T Bad2;
public int Bad1; // $ Alert
public T Bad2; // $ Alert
public Immutable Bad3; // $ Alert
public readonly int Good1;
public readonly int Good2 = 0;
public const int Good3 = 0;
public int Good4;
public readonly T Good5;
public T Good6;
public Mutable Good7;
public MissedReadonlyOpportunity(int i, T t)
{
Bad1 = i;
Bad2 = t;
Bad3 = new Immutable();
Good1 = i;
Good2 = i;
Good4 = i;
Good5 = t;
Good6 = t;
Good7 = new Mutable();
}
public void M(int i)
@@ -27,3 +31,54 @@ class MissedReadonlyOpportunity<T>
x.Good6 = false;
}
}
struct Mutable
{
private int x;
public int Mutate()
{
x = x + 1;
return x;
}
}
readonly struct Immutable { }
class Tree
{
private Tree? Parent;
private Tree? Left; // $ Alert
private readonly Tree? Right;
public Tree(Tree left, Tree right)
{
this.Left = left;
this.Right = right;
left.Parent = this;
right.Parent = this;
}
public Tree()
{
Left = null;
Right = null;
}
}
class StaticFields
{
static int X; // $ Alert
static int Y;
// Static constructor
static StaticFields()
{
X = 0;
}
// Instance constructor
public StaticFields(int y)
{
Y = y;
}
}

View File

@@ -1,3 +1,6 @@
| MissedReadonlyOpportunity.cs:3:16:3:19 | Bad1 | Field 'Bad1' can be 'readonly'. |
| MissedReadonlyOpportunity.cs:4:14:4:17 | Bad2 | Field 'Bad2' can be 'readonly'. |
| MissedReadonlyOpportunity.cs:5:22:5:25 | Bad3 | Field 'Bad3' can be 'readonly'. |
| MissedReadonlyOpportunity.cs:50:19:50:22 | Left | Field 'Left' can be 'readonly'. |
| MissedReadonlyOpportunity.cs:70:16:70:16 | X | Field 'X' can be 'readonly'. |
| MissedReadonlyOpportunityBad.cs:3:9:3:13 | Field | Field 'Field' can be 'readonly'. |

View File

@@ -1 +1,2 @@
Language Abuse/MissedReadonlyOpportunity.ql
query: Language Abuse/MissedReadonlyOpportunity.ql
postprocess: utils/test/InlineExpectationsTestQuery.ql

View File

@@ -1,6 +1,6 @@
class Bad
{
int Field;
int Field; // $ Alert
public Bad(int i)
{

View File

@@ -54,9 +54,9 @@ ql/lib/go.dbscheme.stats: ql/lib/go.dbscheme build/stats/src.stamp extractor
codeql dataset measure -o $@ build/stats/database/db-go
test: all build/testdb/check-upgrade-path
codeql test run -j0 ql/test --search-path .. --consistency-queries ql/test/consistency --compilation-cache=$(cache) --dynamic-join-order-mode=$(rtjo)
codeql test run -j0 ql/test --search-path .. --check-diff-informed --consistency-queries ql/test/consistency --compilation-cache=$(cache) --dynamic-join-order-mode=$(rtjo) --check-databases --fail-on-trap-errors --check-undefined-labels --check-unused-labels --check-repeated-labels --check-redefined-labels --check-use-before-definition
# use GOOS=linux because GOOS=darwin GOARCH=386 is no longer supported
env GOOS=linux GOARCH=386 codeql$(EXE) test run -j0 ql/test/query-tests/Security/CWE-681 --search-path .. --consistency-queries ql/test/consistency --compilation-cache=$(cache) --dynamic-join-order-mode=$(rtjo)
env GOOS=linux GOARCH=386 codeql$(EXE) test run -j0 ql/test/query-tests/Security/CWE-681 --search-path .. --check-diff-informed --consistency-queries ql/test/consistency --compilation-cache=$(cache) --dynamic-join-order-mode=$(rtjo)
cd extractor; $(BAZEL) test ...
bash extractor-smoke-test/test.sh || (echo "Extractor smoke test FAILED"; exit 1)

View File

@@ -13,6 +13,7 @@ import (
"log"
"os"
"path/filepath"
"reflect"
"regexp"
"runtime"
"strconv"
@@ -773,18 +774,25 @@ func (extraction *Extraction) extractFileInfo(tw *trap.Writer, file string, isDu
var parentLbl trap.Label
for i, component := range components {
// displayPath is the same as rawPath except for root directories: if
// rawPath is "" then displayPath is "/"; if rawPath is "C:" then
// displayPath is "C:/".
var rawPath, displayPath string
if i == 0 {
if component == "" {
path = "/"
rawPath = component
if component == "" || regexp.MustCompile(`^[A-Za-z]:$`).MatchString(component) {
// Handle linux root and Windows drive letters by appending "/"
displayPath = rawPath + "/"
} else {
path = component
displayPath = rawPath
}
} else {
path = parentPath + "/" + component
rawPath = parentPath + "/" + component
displayPath = rawPath
}
if i == len(components)-1 {
lbl := tw.Labeler.FileLabelFor(file)
dbscheme.FilesTable.Emit(tw, lbl, path)
dbscheme.FilesTable.Emit(tw, lbl, displayPath)
dbscheme.ContainerParentTable.Emit(tw, parentLbl, lbl)
dbscheme.HasLocationTable.Emit(tw, lbl, emitLocation(tw, lbl, 0, 0, 0, 0))
extraction.Lock.Lock()
@@ -795,14 +803,12 @@ func (extraction *Extraction) extractFileInfo(tw *trap.Writer, file string, isDu
extraction.Lock.Unlock()
break
}
lbl := tw.Labeler.GlobalID(util.EscapeTrapSpecialChars(path) + ";folder")
dbscheme.FoldersTable.Emit(tw, lbl, path)
lbl := tw.Labeler.GlobalID(util.EscapeTrapSpecialChars(displayPath) + ";folder")
dbscheme.FoldersTable.Emit(tw, lbl, displayPath)
if i > 0 {
dbscheme.ContainerParentTable.Emit(tw, parentLbl, lbl)
}
if path != "/" {
parentPath = path
}
parentPath = rawPath
parentLbl = lbl
}
}
@@ -936,7 +942,7 @@ func emitScopeNodeInfo(tw *trap.Writer, nd ast.Node, lbl trap.Label) {
// extractExpr extracts AST information for the given expression and all its subexpressions
func extractExpr(tw *trap.Writer, expr ast.Expr, parent trap.Label, idx int, skipExtractingValue bool) {
if expr == nil {
if expr == nil || reflect.ValueOf(expr).IsNil() {
return
}
@@ -948,9 +954,6 @@ func extractExpr(tw *trap.Writer, expr ast.Expr, parent trap.Label, idx int, ski
case *ast.BadExpr:
kind = dbscheme.BadExpr.Index()
case *ast.Ident:
if expr == nil {
return
}
kind = dbscheme.IdentExpr.Index()
dbscheme.LiteralsTable.Emit(tw, lbl, expr.Name, expr.Name)
def := tw.Package.TypesInfo.Defs[expr]
@@ -984,15 +987,9 @@ func extractExpr(tw *trap.Writer, expr ast.Expr, parent trap.Label, idx int, ski
}
}
case *ast.Ellipsis:
if expr == nil {
return
}
kind = dbscheme.EllipsisExpr.Index()
extractExpr(tw, expr.Elt, lbl, 0, false)
case *ast.BasicLit:
if expr == nil {
return
}
value := ""
switch expr.Kind {
case token.INT:
@@ -1016,36 +1013,21 @@ func extractExpr(tw *trap.Writer, expr ast.Expr, parent trap.Label, idx int, ski
}
dbscheme.LiteralsTable.Emit(tw, lbl, value, expr.Value)
case *ast.FuncLit:
if expr == nil {
return
}
kind = dbscheme.FuncLitExpr.Index()
extractExpr(tw, expr.Type, lbl, 0, false)
extractStmt(tw, expr.Body, lbl, 1)
case *ast.CompositeLit:
if expr == nil {
return
}
kind = dbscheme.CompositeLitExpr.Index()
extractExpr(tw, expr.Type, lbl, 0, false)
extractExprs(tw, expr.Elts, lbl, 1, 1)
case *ast.ParenExpr:
if expr == nil {
return
}
kind = dbscheme.ParenExpr.Index()
extractExpr(tw, expr.X, lbl, 0, false)
case *ast.SelectorExpr:
if expr == nil {
return
}
kind = dbscheme.SelectorExpr.Index()
extractExpr(tw, expr.X, lbl, 0, false)
extractExpr(tw, expr.Sel, lbl, 1, false)
case *ast.IndexExpr:
if expr == nil {
return
}
typeofx := typeOf(tw, expr.X)
if typeofx == nil {
// We are missing type information for `expr.X`, so we cannot
@@ -1065,9 +1047,6 @@ func extractExpr(tw *trap.Writer, expr ast.Expr, parent trap.Label, idx int, ski
extractExpr(tw, expr.X, lbl, 0, false)
extractExpr(tw, expr.Index, lbl, 1, false)
case *ast.IndexListExpr:
if expr == nil {
return
}
typeofx := typeOf(tw, expr.X)
if typeofx == nil {
// We are missing type information for `expr.X`, so we cannot
@@ -1084,18 +1063,12 @@ func extractExpr(tw *trap.Writer, expr ast.Expr, parent trap.Label, idx int, ski
extractExpr(tw, expr.X, lbl, 0, false)
extractExprs(tw, expr.Indices, lbl, 1, 1)
case *ast.SliceExpr:
if expr == nil {
return
}
kind = dbscheme.SliceExpr.Index()
extractExpr(tw, expr.X, lbl, 0, false)
extractExpr(tw, expr.Low, lbl, 1, false)
extractExpr(tw, expr.High, lbl, 2, false)
extractExpr(tw, expr.Max, lbl, 3, false)
case *ast.TypeAssertExpr:
if expr == nil {
return
}
kind = dbscheme.TypeAssertExpr.Index()
extractExpr(tw, expr.X, lbl, 0, false)
// expr.Type can be `nil` if this is the `x.(type)` in a type switch.
@@ -1103,9 +1076,6 @@ func extractExpr(tw *trap.Writer, expr ast.Expr, parent trap.Label, idx int, ski
extractExpr(tw, expr.Type, lbl, 1, false)
}
case *ast.CallExpr:
if expr == nil {
return
}
kind = dbscheme.CallOrConversionExpr.Index()
extractExpr(tw, expr.Fun, lbl, 0, false)
extractExprs(tw, expr.Args, lbl, 1, 1)
@@ -1113,22 +1083,13 @@ func extractExpr(tw *trap.Writer, expr ast.Expr, parent trap.Label, idx int, ski
dbscheme.HasEllipsisTable.Emit(tw, lbl)
}
case *ast.StarExpr:
if expr == nil {
return
}
kind = dbscheme.StarExpr.Index()
extractExpr(tw, expr.X, lbl, 0, false)
case *ast.KeyValueExpr:
if expr == nil {
return
}
kind = dbscheme.KeyValueExpr.Index()
extractExpr(tw, expr.Key, lbl, 0, false)
extractExpr(tw, expr.Value, lbl, 1, false)
case *ast.UnaryExpr:
if expr == nil {
return
}
if expr.Op == token.TILDE {
kind = dbscheme.TypeSetLiteralExpr.Index()
} else {
@@ -1140,9 +1101,6 @@ func extractExpr(tw *trap.Writer, expr ast.Expr, parent trap.Label, idx int, ski
}
extractExpr(tw, expr.X, lbl, 0, false)
case *ast.BinaryExpr:
if expr == nil {
return
}
_, isUnionType := typeOf(tw, expr).(*types.Union)
if expr.Op == token.OR && isUnionType {
kind = dbscheme.TypeSetLiteralExpr.Index()
@@ -1158,46 +1116,28 @@ func extractExpr(tw *trap.Writer, expr ast.Expr, parent trap.Label, idx int, ski
extractExpr(tw, expr.Y, lbl, 1, false)
}
case *ast.ArrayType:
if expr == nil {
return
}
kind = dbscheme.ArrayTypeExpr.Index()
extractExpr(tw, expr.Len, lbl, 0, false)
extractExpr(tw, expr.Elt, lbl, 1, false)
case *ast.StructType:
if expr == nil {
return
}
kind = dbscheme.StructTypeExpr.Index()
extractFields(tw, expr.Fields, lbl, 0, 1)
case *ast.FuncType:
if expr == nil {
return
}
kind = dbscheme.FuncTypeExpr.Index()
extractFields(tw, expr.Params, lbl, 0, 1)
extractFields(tw, expr.Results, lbl, -1, -1)
emitScopeNodeInfo(tw, expr, lbl)
case *ast.InterfaceType:
if expr == nil {
return
}
kind = dbscheme.InterfaceTypeExpr.Index()
// expr.Methods contains methods, embedded interfaces and type set
// literals.
makeTypeSetLiteralsUnionTyped(tw, expr.Methods)
extractFields(tw, expr.Methods, lbl, 0, 1)
case *ast.MapType:
if expr == nil {
return
}
kind = dbscheme.MapTypeExpr.Index()
extractExpr(tw, expr.Key, lbl, 0, false)
extractExpr(tw, expr.Value, lbl, 1, false)
case *ast.ChanType:
if expr == nil {
return
}
tp := dbscheme.ChanTypeExprs[expr.Dir]
if tp == nil {
log.Fatalf("unsupported channel direction %v", expr.Dir)
@@ -1299,7 +1239,7 @@ func extractFields(tw *trap.Writer, fields *ast.FieldList, parent trap.Label, id
// extractStmt extracts AST information for a given statement and all other statements or expressions
// nested inside it
func extractStmt(tw *trap.Writer, stmt ast.Stmt, parent trap.Label, idx int) {
if stmt == nil {
if stmt == nil || reflect.ValueOf(stmt).IsNil() {
return
}
@@ -1309,37 +1249,22 @@ func extractStmt(tw *trap.Writer, stmt ast.Stmt, parent trap.Label, idx int) {
case *ast.BadStmt:
kind = dbscheme.BadStmtType.Index()
case *ast.DeclStmt:
if stmt == nil {
return
}
kind = dbscheme.DeclStmtType.Index()
extractDecl(tw, stmt.Decl, lbl, 0)
case *ast.EmptyStmt:
kind = dbscheme.EmptyStmtType.Index()
case *ast.LabeledStmt:
if stmt == nil {
return
}
kind = dbscheme.LabeledStmtType.Index()
extractExpr(tw, stmt.Label, lbl, 0, false)
extractStmt(tw, stmt.Stmt, lbl, 1)
case *ast.ExprStmt:
if stmt == nil {
return
}
kind = dbscheme.ExprStmtType.Index()
extractExpr(tw, stmt.X, lbl, 0, false)
case *ast.SendStmt:
if stmt == nil {
return
}
kind = dbscheme.SendStmtType.Index()
extractExpr(tw, stmt.Chan, lbl, 0, false)
extractExpr(tw, stmt.Value, lbl, 1, false)
case *ast.IncDecStmt:
if stmt == nil {
return
}
if stmt.Tok == token.INC {
kind = dbscheme.IncStmtType.Index()
} else if stmt.Tok == token.DEC {
@@ -1349,9 +1274,6 @@ func extractStmt(tw *trap.Writer, stmt ast.Stmt, parent trap.Label, idx int) {
}
extractExpr(tw, stmt.X, lbl, 0, false)
case *ast.AssignStmt:
if stmt == nil {
return
}
tp := dbscheme.AssignStmtTypes[stmt.Tok]
if tp == nil {
log.Fatalf("unsupported assignment statement with operator %v", stmt.Tok)
@@ -1360,24 +1282,15 @@ func extractStmt(tw *trap.Writer, stmt ast.Stmt, parent trap.Label, idx int) {
extractExprs(tw, stmt.Lhs, lbl, -1, -1)
extractExprs(tw, stmt.Rhs, lbl, 1, 1)
case *ast.GoStmt:
if stmt == nil {
return
}
kind = dbscheme.GoStmtType.Index()
extractExpr(tw, stmt.Call, lbl, 0, false)
case *ast.DeferStmt:
if stmt == nil {
return
}
kind = dbscheme.DeferStmtType.Index()
extractExpr(tw, stmt.Call, lbl, 0, false)
case *ast.ReturnStmt:
kind = dbscheme.ReturnStmtType.Index()
extractExprs(tw, stmt.Results, lbl, 0, 1)
case *ast.BranchStmt:
if stmt == nil {
return
}
switch stmt.Tok {
case token.BREAK:
kind = dbscheme.BreakStmtType.Index()
@@ -1392,16 +1305,10 @@ func extractStmt(tw *trap.Writer, stmt ast.Stmt, parent trap.Label, idx int) {
}
extractExpr(tw, stmt.Label, lbl, 0, false)
case *ast.BlockStmt:
if stmt == nil {
return
}
kind = dbscheme.BlockStmtType.Index()
extractStmts(tw, stmt.List, lbl, 0, 1)
emitScopeNodeInfo(tw, stmt, lbl)
case *ast.IfStmt:
if stmt == nil {
return
}
kind = dbscheme.IfStmtType.Index()
extractStmt(tw, stmt.Init, lbl, 0)
extractExpr(tw, stmt.Cond, lbl, 1, false)
@@ -1409,35 +1316,23 @@ func extractStmt(tw *trap.Writer, stmt ast.Stmt, parent trap.Label, idx int) {
extractStmt(tw, stmt.Else, lbl, 3)
emitScopeNodeInfo(tw, stmt, lbl)
case *ast.CaseClause:
if stmt == nil {
return
}
kind = dbscheme.CaseClauseType.Index()
extractExprs(tw, stmt.List, lbl, -1, -1)
extractStmts(tw, stmt.Body, lbl, 0, 1)
emitScopeNodeInfo(tw, stmt, lbl)
case *ast.SwitchStmt:
if stmt == nil {
return
}
kind = dbscheme.ExprSwitchStmtType.Index()
extractStmt(tw, stmt.Init, lbl, 0)
extractExpr(tw, stmt.Tag, lbl, 1, false)
extractStmt(tw, stmt.Body, lbl, 2)
emitScopeNodeInfo(tw, stmt, lbl)
case *ast.TypeSwitchStmt:
if stmt == nil {
return
}
kind = dbscheme.TypeSwitchStmtType.Index()
extractStmt(tw, stmt.Init, lbl, 0)
extractStmt(tw, stmt.Assign, lbl, 1)
extractStmt(tw, stmt.Body, lbl, 2)
emitScopeNodeInfo(tw, stmt, lbl)
case *ast.CommClause:
if stmt == nil {
return
}
kind = dbscheme.CommClauseType.Index()
extractStmt(tw, stmt.Comm, lbl, 0)
extractStmts(tw, stmt.Body, lbl, 1, 1)
@@ -1446,9 +1341,6 @@ func extractStmt(tw *trap.Writer, stmt ast.Stmt, parent trap.Label, idx int) {
kind = dbscheme.SelectStmtType.Index()
extractStmt(tw, stmt.Body, lbl, 0)
case *ast.ForStmt:
if stmt == nil {
return
}
kind = dbscheme.ForStmtType.Index()
extractStmt(tw, stmt.Init, lbl, 0)
extractExpr(tw, stmt.Cond, lbl, 1, false)
@@ -1456,9 +1348,6 @@ func extractStmt(tw *trap.Writer, stmt ast.Stmt, parent trap.Label, idx int) {
extractStmt(tw, stmt.Body, lbl, 3)
emitScopeNodeInfo(tw, stmt, lbl)
case *ast.RangeStmt:
if stmt == nil {
return
}
kind = dbscheme.RangeStmtType.Index()
extractExpr(tw, stmt.Key, lbl, 0, false)
extractExpr(tw, stmt.Value, lbl, 1, false)
@@ -1486,15 +1375,15 @@ func extractStmts(tw *trap.Writer, stmts []ast.Stmt, parent trap.Label, idx int,
// extractDecl extracts AST information for the given declaration
func extractDecl(tw *trap.Writer, decl ast.Decl, parent trap.Label, idx int) {
if reflect.ValueOf(decl).IsNil() {
return
}
lbl := tw.Labeler.LocalID(decl)
var kind int
switch decl := decl.(type) {
case *ast.BadDecl:
kind = dbscheme.BadDeclType.Index()
case *ast.GenDecl:
if decl == nil {
return
}
switch decl.Tok {
case token.IMPORT:
kind = dbscheme.ImportDeclType.Index()
@@ -1512,9 +1401,6 @@ func extractDecl(tw *trap.Writer, decl ast.Decl, parent trap.Label, idx int) {
}
extractDoc(tw, decl.Doc, lbl)
case *ast.FuncDecl:
if decl == nil {
return
}
kind = dbscheme.FuncDeclType.Index()
extractFields(tw, decl.Recv, lbl, -1, -1)
extractExpr(tw, decl.Name, lbl, 0, false)

View File

@@ -50,8 +50,8 @@ func parseRegistryConfigs(str string) ([]RegistryConfig, error) {
func getEnvVars() []string {
var result []string
if proxy_host, proxy_host_set := os.LookupEnv(PROXY_HOST); proxy_host_set {
if proxy_port, proxy_port_set := os.LookupEnv(PROXY_PORT); proxy_port_set {
if proxy_host, proxy_host_set := os.LookupEnv(PROXY_HOST); proxy_host_set && proxy_host != "" {
if proxy_port, proxy_port_set := os.LookupEnv(PROXY_PORT); proxy_port_set && proxy_port != "" {
proxy_address = fmt.Sprintf("http://%s:%s", proxy_host, proxy_port)
result = append(result, fmt.Sprintf("HTTP_PROXY=%s", proxy_address), fmt.Sprintf("HTTPS_PROXY=%s", proxy_address))
@@ -59,7 +59,7 @@ func getEnvVars() []string {
}
}
if proxy_cert, proxy_cert_set := os.LookupEnv(PROXY_CA_CERTIFICATE); proxy_cert_set {
if proxy_cert, proxy_cert_set := os.LookupEnv(PROXY_CA_CERTIFICATE); proxy_cert_set && proxy_cert != "" {
// Write the certificate to a temporary file
slog.Info("Found certificate")
@@ -82,7 +82,7 @@ func getEnvVars() []string {
}
}
if proxy_urls, proxy_urls_set := os.LookupEnv(PROXY_URLS); proxy_urls_set {
if proxy_urls, proxy_urls_set := os.LookupEnv(PROXY_URLS); proxy_urls_set && proxy_urls != "" {
val, err := parseRegistryConfigs(proxy_urls)
if err != nil {
slog.Error("Unable to parse proxy configurations", slog.String("error", err.Error()))

View File

@@ -20,10 +20,6 @@ numberOfTypeParameters
| genericFunctions.go:152:6:152:36 | multipleAnonymousTypeParamsType | 3 |
| genericFunctions.go:154:51:154:51 | f | 3 |
#select
| cmp.Compare | 0 | T | Ordered |
| cmp.Less | 0 | T | Ordered |
| cmp.Or | 0 | T | comparable |
| cmp.isNaN | 0 | T | Ordered |
| codeql-go-tests/function.EdgeConstraint | 0 | Node | interface { } |
| codeql-go-tests/function.Element | 0 | S | interface { } |
| codeql-go-tests/function.GenericFunctionInAnotherFile | 0 | T | interface { } |
@@ -57,205 +53,3 @@ numberOfTypeParameters
| codeql-go-tests/function.multipleAnonymousTypeParamsType.f | 0 | _ | interface { } |
| codeql-go-tests/function.multipleAnonymousTypeParamsType.f | 1 | _ | interface { string } |
| codeql-go-tests/function.multipleAnonymousTypeParamsType.f | 2 | _ | interface { } |
| github.com/anotherpkg.GenericFunctionInAnotherPackage | 0 | T | interface { } |
| internal/abi.Escape | 0 | T | interface { } |
| internal/bytealg.HashStr | 0 | T | interface { string \| []uint8 } |
| internal/bytealg.HashStrRev | 0 | T | interface { string \| []uint8 } |
| internal/bytealg.IndexRabinKarp | 0 | T | interface { string \| []uint8 } |
| internal/bytealg.LastIndexRabinKarp | 0 | T | interface { string \| []uint8 } |
| internal/poll.ignoringEINTR2 | 0 | T | interface { } |
| internal/runtime/atomic.Pointer.CompareAndSwap | 0 | T | interface { } |
| internal/runtime/atomic.Pointer.CompareAndSwapNoWB | 0 | T | interface { } |
| internal/runtime/atomic.Pointer.Load | 0 | T | interface { } |
| internal/runtime/atomic.Pointer.Store | 0 | T | interface { } |
| internal/runtime/atomic.Pointer.StoreNoWB | 0 | T | interface { } |
| internal/sync.HashTrieMap.All | 0 | K | comparable |
| internal/sync.HashTrieMap.All | 1 | V | interface { } |
| internal/sync.HashTrieMap.CompareAndDelete | 0 | K | comparable |
| internal/sync.HashTrieMap.CompareAndDelete | 1 | V | interface { } |
| internal/sync.HashTrieMap.CompareAndSwap | 0 | K | comparable |
| internal/sync.HashTrieMap.CompareAndSwap | 1 | V | interface { } |
| internal/sync.HashTrieMap.Delete | 0 | K | comparable |
| internal/sync.HashTrieMap.Load | 0 | K | comparable |
| internal/sync.HashTrieMap.Load | 1 | V | interface { } |
| internal/sync.HashTrieMap.LoadAndDelete | 0 | K | comparable |
| internal/sync.HashTrieMap.LoadAndDelete | 1 | V | interface { } |
| internal/sync.HashTrieMap.LoadOrStore | 0 | K | comparable |
| internal/sync.HashTrieMap.LoadOrStore | 1 | V | interface { } |
| internal/sync.HashTrieMap.Range | 0 | K | comparable |
| internal/sync.HashTrieMap.Range | 1 | V | interface { } |
| internal/sync.HashTrieMap.Store | 0 | K | comparable |
| internal/sync.HashTrieMap.Store | 1 | V | interface { } |
| internal/sync.HashTrieMap.Swap | 0 | K | comparable |
| internal/sync.HashTrieMap.Swap | 1 | V | interface { } |
| internal/sync.HashTrieMap.find | 0 | K | comparable |
| internal/sync.HashTrieMap.find | 1 | V | interface { } |
| internal/sync.HashTrieMap.iter | 0 | K | comparable |
| internal/sync.HashTrieMap.iter | 1 | V | interface { } |
| internal/sync.entry | 0 | K | comparable |
| internal/sync.entry | 1 | V | interface { } |
| internal/sync.entry.compareAndDelete | 0 | K | comparable |
| internal/sync.entry.compareAndDelete | 1 | V | interface { } |
| internal/sync.entry.compareAndSwap | 0 | K | comparable |
| internal/sync.entry.compareAndSwap | 1 | V | interface { } |
| internal/sync.entry.loadAndDelete | 0 | K | comparable |
| internal/sync.entry.loadAndDelete | 1 | V | interface { } |
| internal/sync.entry.lookup | 0 | K | comparable |
| internal/sync.entry.lookup | 1 | V | interface { } |
| internal/sync.entry.lookupWithValue | 0 | K | comparable |
| internal/sync.entry.lookupWithValue | 1 | V | interface { } |
| internal/sync.entry.swap | 0 | K | comparable |
| internal/sync.entry.swap | 1 | V | interface { } |
| internal/sync.newEntryNode | 0 | K | comparable |
| internal/sync.newEntryNode | 1 | V | interface { } |
| iter.Pull | 0 | V | interface { } |
| iter.Pull2 | 0 | K | interface { } |
| iter.Pull2 | 1 | V | interface { } |
| iter.Seq | 0 | V | interface { } |
| iter.Seq2 | 0 | K | interface { } |
| iter.Seq2 | 1 | V | interface { } |
| os.doInRoot | 0 | T | interface { } |
| os.ignoringEINTR2 | 0 | T | interface { } |
| reflect.rangeNum | 1 | N | interface { int64 \| uint64 } |
| runtime.AddCleanup | 0 | T | interface { } |
| runtime.AddCleanup | 1 | S | interface { } |
| runtime.fandbits | 0 | F | floaty |
| runtime.fmax | 0 | F | floaty |
| runtime.fmin | 0 | F | floaty |
| runtime.forbits | 0 | F | floaty |
| runtime.noEscapePtr | 0 | T | interface { } |
| slices.All | 0 | Slice | interface { ~[]E } |
| slices.All | 1 | E | interface { } |
| slices.AppendSeq | 0 | Slice | interface { ~[]E } |
| slices.AppendSeq | 1 | E | interface { } |
| slices.Backward | 0 | Slice | interface { ~[]E } |
| slices.Backward | 1 | E | interface { } |
| slices.BinarySearch | 0 | S | interface { ~[]E } |
| slices.BinarySearch | 1 | E | Ordered |
| slices.BinarySearchFunc | 0 | S | interface { ~[]E } |
| slices.BinarySearchFunc | 1 | E | interface { } |
| slices.BinarySearchFunc | 2 | T | interface { } |
| slices.Chunk | 0 | Slice | interface { ~[]E } |
| slices.Chunk | 1 | E | interface { } |
| slices.Clip | 0 | S | interface { ~[]E } |
| slices.Clip | 1 | E | interface { } |
| slices.Clone | 0 | S | interface { ~[]E } |
| slices.Clone | 1 | E | interface { } |
| slices.Collect | 0 | E | interface { } |
| slices.Compact | 0 | S | interface { ~[]E } |
| slices.Compact | 1 | E | comparable |
| slices.CompactFunc | 0 | S | interface { ~[]E } |
| slices.CompactFunc | 1 | E | interface { } |
| slices.Compare | 0 | S | interface { ~[]E } |
| slices.Compare | 1 | E | Ordered |
| slices.CompareFunc | 0 | S1 | interface { ~[]E1 } |
| slices.CompareFunc | 1 | S2 | interface { ~[]E2 } |
| slices.CompareFunc | 2 | E1 | interface { } |
| slices.CompareFunc | 3 | E2 | interface { } |
| slices.Concat | 0 | S | interface { ~[]E } |
| slices.Concat | 1 | E | interface { } |
| slices.Contains | 0 | S | interface { ~[]E } |
| slices.Contains | 1 | E | comparable |
| slices.ContainsFunc | 0 | S | interface { ~[]E } |
| slices.ContainsFunc | 1 | E | interface { } |
| slices.Delete | 0 | S | interface { ~[]E } |
| slices.Delete | 1 | E | interface { } |
| slices.DeleteFunc | 0 | S | interface { ~[]E } |
| slices.DeleteFunc | 1 | E | interface { } |
| slices.Equal | 0 | S | interface { ~[]E } |
| slices.Equal | 1 | E | comparable |
| slices.EqualFunc | 0 | S1 | interface { ~[]E1 } |
| slices.EqualFunc | 1 | S2 | interface { ~[]E2 } |
| slices.EqualFunc | 2 | E1 | interface { } |
| slices.EqualFunc | 3 | E2 | interface { } |
| slices.Grow | 0 | S | interface { ~[]E } |
| slices.Grow | 1 | E | interface { } |
| slices.Index | 0 | S | interface { ~[]E } |
| slices.Index | 1 | E | comparable |
| slices.IndexFunc | 0 | S | interface { ~[]E } |
| slices.IndexFunc | 1 | E | interface { } |
| slices.Insert | 0 | S | interface { ~[]E } |
| slices.Insert | 1 | E | interface { } |
| slices.IsSorted | 0 | S | interface { ~[]E } |
| slices.IsSorted | 1 | E | Ordered |
| slices.IsSortedFunc | 0 | S | interface { ~[]E } |
| slices.IsSortedFunc | 1 | E | interface { } |
| slices.Max | 0 | S | interface { ~[]E } |
| slices.Max | 1 | E | Ordered |
| slices.MaxFunc | 0 | S | interface { ~[]E } |
| slices.MaxFunc | 1 | E | interface { } |
| slices.Min | 0 | S | interface { ~[]E } |
| slices.Min | 1 | E | Ordered |
| slices.MinFunc | 0 | S | interface { ~[]E } |
| slices.MinFunc | 1 | E | interface { } |
| slices.Repeat | 0 | S | interface { ~[]E } |
| slices.Repeat | 1 | E | interface { } |
| slices.Replace | 0 | S | interface { ~[]E } |
| slices.Replace | 1 | E | interface { } |
| slices.Reverse | 0 | S | interface { ~[]E } |
| slices.Reverse | 1 | E | interface { } |
| slices.Sort | 0 | S | interface { ~[]E } |
| slices.Sort | 1 | E | Ordered |
| slices.SortFunc | 0 | S | interface { ~[]E } |
| slices.SortFunc | 1 | E | interface { } |
| slices.SortStableFunc | 0 | S | interface { ~[]E } |
| slices.SortStableFunc | 1 | E | interface { } |
| slices.Sorted | 0 | E | Ordered |
| slices.SortedFunc | 0 | E | interface { } |
| slices.SortedStableFunc | 0 | E | interface { } |
| slices.Values | 0 | Slice | interface { ~[]E } |
| slices.Values | 1 | E | interface { } |
| slices.breakPatternsCmpFunc | 0 | E | interface { } |
| slices.breakPatternsOrdered | 0 | E | Ordered |
| slices.choosePivotCmpFunc | 0 | E | interface { } |
| slices.choosePivotOrdered | 0 | E | Ordered |
| slices.heapSortCmpFunc | 0 | E | interface { } |
| slices.heapSortOrdered | 0 | E | Ordered |
| slices.insertionSortCmpFunc | 0 | E | interface { } |
| slices.insertionSortOrdered | 0 | E | Ordered |
| slices.isNaN | 0 | T | Ordered |
| slices.medianAdjacentCmpFunc | 0 | E | interface { } |
| slices.medianAdjacentOrdered | 0 | E | Ordered |
| slices.medianCmpFunc | 0 | E | interface { } |
| slices.medianOrdered | 0 | E | Ordered |
| slices.order2CmpFunc | 0 | E | interface { } |
| slices.order2Ordered | 0 | E | Ordered |
| slices.overlaps | 0 | E | interface { } |
| slices.partialInsertionSortCmpFunc | 0 | E | interface { } |
| slices.partialInsertionSortOrdered | 0 | E | Ordered |
| slices.partitionCmpFunc | 0 | E | interface { } |
| slices.partitionEqualCmpFunc | 0 | E | interface { } |
| slices.partitionEqualOrdered | 0 | E | Ordered |
| slices.partitionOrdered | 0 | E | Ordered |
| slices.pdqsortCmpFunc | 0 | E | interface { } |
| slices.pdqsortOrdered | 0 | E | Ordered |
| slices.reverseRangeCmpFunc | 0 | E | interface { } |
| slices.reverseRangeOrdered | 0 | E | Ordered |
| slices.rotateCmpFunc | 0 | E | interface { } |
| slices.rotateLeft | 0 | E | interface { } |
| slices.rotateOrdered | 0 | E | Ordered |
| slices.rotateRight | 0 | E | interface { } |
| slices.siftDownCmpFunc | 0 | E | interface { } |
| slices.siftDownOrdered | 0 | E | Ordered |
| slices.stableCmpFunc | 0 | E | interface { } |
| slices.stableOrdered | 0 | E | Ordered |
| slices.startIdx | 0 | E | interface { } |
| slices.swapRangeCmpFunc | 0 | E | interface { } |
| slices.swapRangeOrdered | 0 | E | Ordered |
| slices.symMergeCmpFunc | 0 | E | interface { } |
| slices.symMergeOrdered | 0 | E | Ordered |
| strconv.bsearch | 0 | S | interface { ~[]E } |
| strconv.bsearch | 1 | E | interface { ~uint16 \| ~uint32 } |
| sync.OnceValue | 0 | T | interface { } |
| sync.OnceValues | 0 | T1 | interface { } |
| sync.OnceValues | 1 | T2 | interface { } |
| sync/atomic.Pointer | 0 | T | interface { } |
| sync/atomic.Pointer.CompareAndSwap | 0 | T | interface { } |
| sync/atomic.Pointer.Load | 0 | T | interface { } |
| sync/atomic.Pointer.Store | 0 | T | interface { } |
| sync/atomic.Pointer.Swap | 0 | T | interface { } |
| time.atoi | 0 | bytes | interface { []uint8 \| string } |
| time.isDigit | 0 | bytes | interface { []uint8 \| string } |
| time.leadingInt | 0 | bytes | interface { []uint8 \| string } |
| time.parseNanoseconds | 0 | bytes | interface { []uint8 \| string } |
| time.parseRFC3339 | 0 | bytes | interface { []uint8 \| string } |

View File

@@ -6,5 +6,9 @@ query predicate numberOfTypeParameters(TypeParamParentEntity parent, int n) {
}
from TypeParamType tpt, TypeParamParentEntity ty
where ty = tpt.getParent()
where
ty = tpt.getParent() and
// Note that we cannot use the location of `tpt` itself as we currently fail
// to extract an object for type parameters for methods on generic structs.
exists(ty.getLocation())
select ty.getQualifiedName(), tpt.getIndex(), tpt.getParamName(), tpt.getConstraint().pp()

View File

@@ -1,3 +1,4 @@
| greet.go:6:2:6:6 | myfmt | greet.go:3:8:3:12 | myfmt | V |
| main.go:6:26:6:28 | who | main.go:5:12:5:14 | who | V |
| main.go:11:2:11:6 | greet | main.go:5:6:5:10 | greet | V |
| main.go:11:8:11:12 | world | main.go:10:2:10:6 | world | V |

View File

@@ -1,7 +1,7 @@
package main
import "fmt"
import myfmt "fmt"
func greet2() {
fmt.Println("Hello world!")
myfmt.Println("Hello world!")
}

View File

@@ -1,4 +1,138 @@
- description: Security-and-quality queries for JavaScript
- queries: .
- apply: security-and-quality-selectors.yml
from: codeql/suite-helpers
- include:
kind:
- problem
- path-problem
precision:
- high
- very-high
tags contain:
- security
- include:
kind:
- problem
- path-problem
precision: medium
problem.severity:
- error
- warning
tags contain:
- security
- include:
id:
- js/node/assignment-to-exports-variable
- js/node/missing-exports-qualifier
- js/angular/duplicate-dependency
- js/angular/missing-explicit-injection
- js/angular/dependency-injection-mismatch
- js/angular/incompatible-service
- js/angular/expression-in-url-attribute
- js/angular/repeated-dependency-injection
- js/regex/back-reference-to-negative-lookahead
- js/regex/unmatchable-dollar
- js/regex/empty-character-class
- js/regex/back-reference-before-group
- js/regex/unbound-back-reference
- js/regex/always-matches
- js/regex/unmatchable-caret
- js/regex/duplicate-in-character-class
- js/vue/arrow-method-on-vue-instance
- js/conditional-comment
- js/superfluous-trailing-arguments
- js/illegal-invocation
- js/invalid-prototype-value
- js/incomplete-object-initialization
- js/useless-type-test
- js/template-syntax-in-string-literal
- js/with-statement
- js/property-assignment-on-primitive
- js/deletion-of-non-property
- js/setter-return
- js/index-out-of-bounds
- js/unused-index-variable
- js/non-standard-language-feature
- js/syntax-error
- js/for-in-comprehension
- js/strict-mode-call-stack-introspection
- js/automatic-semicolon-insertion
- js/inconsistent-use-of-new
- js/non-linear-pattern
- js/yield-outside-generator
- js/mixed-static-instance-this-access
- js/arguments-redefinition
- js/nested-function-reference-in-default-parameter
- js/duplicate-parameter-name
- js/unreachable-method-overloads
- js/duplicate-variable-declaration
- js/function-declaration-conflict
- js/ineffective-parameter-type
- js/assignment-to-constant
- js/use-before-declaration
- js/suspicious-method-name-declaration
- js/overwritten-property
- js/useless-assignment-to-local
- js/useless-assignment-to-property
- js/variable-initialization-conflict
- js/variable-use-in-temporal-dead-zone
- js/missing-variable-declaration
- js/missing-this-qualifier
- js/unused-local-variable
- js/label-in-switch
- js/ignore-array-result
- js/inconsistent-loop-direction
- js/unreachable-statement
- js/trivial-conditional
- js/useless-comparison-test
- js/misleading-indentation-of-dangling-else
- js/use-of-returnless-function
- js/useless-assignment-in-return
- js/loop-iteration-skipped-due-to-shifting
- js/misleading-indentation-after-control-statement
- js/unused-loop-variable
- js/implicit-operand-conversion
- js/whitespace-contradicts-precedence
- js/missing-space-in-concatenation
- js/unbound-event-handler-receiver
- js/shift-out-of-range
- js/missing-dot-length-in-comparison
- js/redundant-operation
- js/comparison-with-nan
- js/duplicate-property
- js/unclear-operator-precedence
- js/unknown-directive
- js/string-instead-of-regex
- js/unneeded-defensive-code
- js/duplicate-switch-case
- js/duplicate-condition
- js/useless-expression
- js/redundant-assignment
- js/misspelled-variable-name
- js/call-to-non-callable
- js/missing-await
- js/comparison-between-incompatible-types
- js/property-access-on-non-object
- js/malformed-html-id
- js/eval-like-call
- js/duplicate-html-attribute
- js/react/unsupported-state-update-in-lifecycle-method
- js/react/unused-or-undefined-state-property
- js/react/direct-state-mutation
- js/react/inconsistent-state-update
- js/diagnostics/extraction-errors
- js/diagnostics/successfully-extracted-files
- js/summary/lines-of-code
- js/summary/lines-of-user-code
- include:
kind:
- diagnostic
- include:
kind:
- metric
tags contain:
- summary
- exclude:
deprecated: //
- exclude:
query path:
- /^experimental\/.*/

View File

@@ -672,3 +672,15 @@ alias(
actual = "@vendor_ts__ungrammar-1.16.1//:ungrammar",
tags = ["manual"],
)
alias(
name = "zstd-0.13.3",
actual = "@vendor_ts__zstd-0.13.3//:zstd",
tags = ["manual"],
)
alias(
name = "zstd",
actual = "@vendor_ts__zstd-0.13.3//:zstd",
tags = ["manual"],
)

View File

@@ -28,6 +28,9 @@ rust_library(
"WORKSPACE.bazel",
],
),
crate_features = [
"parallel",
],
crate_root = "src/lib.rs",
edition = "2018",
rustc_flags = [
@@ -81,6 +84,42 @@ rust_library(
}),
version = "1.2.7",
deps = [
"@vendor_ts__jobserver-0.1.32//:jobserver",
"@vendor_ts__shlex-1.3.0//:shlex",
],
] + select({
"@rules_rust//rust/platform:aarch64-apple-darwin": [
"@vendor_ts__libc-0.2.171//:libc", # aarch64-apple-darwin
],
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu": [
"@vendor_ts__libc-0.2.171//:libc", # aarch64-unknown-linux-gnu
],
"@rules_rust//rust/platform:aarch64-unknown-nixos-gnu": [
"@vendor_ts__libc-0.2.171//:libc", # aarch64-unknown-linux-gnu, aarch64-unknown-nixos-gnu
],
"@rules_rust//rust/platform:arm-unknown-linux-gnueabi": [
"@vendor_ts__libc-0.2.171//:libc", # arm-unknown-linux-gnueabi
],
"@rules_rust//rust/platform:i686-unknown-linux-gnu": [
"@vendor_ts__libc-0.2.171//:libc", # i686-unknown-linux-gnu
],
"@rules_rust//rust/platform:powerpc-unknown-linux-gnu": [
"@vendor_ts__libc-0.2.171//:libc", # powerpc-unknown-linux-gnu
],
"@rules_rust//rust/platform:s390x-unknown-linux-gnu": [
"@vendor_ts__libc-0.2.171//:libc", # s390x-unknown-linux-gnu
],
"@rules_rust//rust/platform:x86_64-apple-darwin": [
"@vendor_ts__libc-0.2.171//:libc", # x86_64-apple-darwin
],
"@rules_rust//rust/platform:x86_64-unknown-freebsd": [
"@vendor_ts__libc-0.2.171//:libc", # x86_64-unknown-freebsd
],
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu": [
"@vendor_ts__libc-0.2.171//:libc", # x86_64-unknown-linux-gnu
],
"@rules_rust//rust/platform:x86_64-unknown-nixos-gnu": [
"@vendor_ts__libc-0.2.171//:libc", # x86_64-unknown-linux-gnu, x86_64-unknown-nixos-gnu
],
"//conditions:default": [],
}),
)

View File

@@ -0,0 +1,158 @@
###############################################################################
# @generated
# DO NOT MODIFY: This file is auto-generated by a crate_universe tool. To
# regenerate this file, run the following:
#
# bazel run @@//misc/bazel/3rdparty:vendor_tree_sitter_extractors
###############################################################################
load("@rules_rust//rust:defs.bzl", "rust_library")
package(default_visibility = ["//visibility:public"])
rust_library(
name = "jobserver",
srcs = glob(
include = ["**/*.rs"],
allow_empty = True,
),
compile_data = glob(
include = ["**"],
allow_empty = True,
exclude = [
"**/* *",
".tmp_git_root/**/*",
"BUILD",
"BUILD.bazel",
"WORKSPACE",
"WORKSPACE.bazel",
],
),
crate_root = "src/lib.rs",
edition = "2021",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-bazel",
"crate-name=jobserver",
"manual",
"noclippy",
"norustfmt",
],
target_compatible_with = select({
"@rules_rust//rust/platform:aarch64-apple-darwin": [],
"@rules_rust//rust/platform:aarch64-apple-ios": [],
"@rules_rust//rust/platform:aarch64-apple-ios-sim": [],
"@rules_rust//rust/platform:aarch64-linux-android": [],
"@rules_rust//rust/platform:aarch64-pc-windows-msvc": [],
"@rules_rust//rust/platform:aarch64-unknown-fuchsia": [],
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu": [],
"@rules_rust//rust/platform:aarch64-unknown-nixos-gnu": [],
"@rules_rust//rust/platform:aarch64-unknown-nto-qnx710": [],
"@rules_rust//rust/platform:aarch64-unknown-uefi": [],
"@rules_rust//rust/platform:arm-unknown-linux-gnueabi": [],
"@rules_rust//rust/platform:armv7-linux-androideabi": [],
"@rules_rust//rust/platform:armv7-unknown-linux-gnueabi": [],
"@rules_rust//rust/platform:i686-apple-darwin": [],
"@rules_rust//rust/platform:i686-linux-android": [],
"@rules_rust//rust/platform:i686-pc-windows-msvc": [],
"@rules_rust//rust/platform:i686-unknown-freebsd": [],
"@rules_rust//rust/platform:i686-unknown-linux-gnu": [],
"@rules_rust//rust/platform:powerpc-unknown-linux-gnu": [],
"@rules_rust//rust/platform:riscv32imc-unknown-none-elf": [],
"@rules_rust//rust/platform:riscv64gc-unknown-none-elf": [],
"@rules_rust//rust/platform:s390x-unknown-linux-gnu": [],
"@rules_rust//rust/platform:thumbv7em-none-eabi": [],
"@rules_rust//rust/platform:thumbv8m.main-none-eabi": [],
"@rules_rust//rust/platform:wasm32-unknown-unknown": [],
"@rules_rust//rust/platform:wasm32-wasip1": [],
"@rules_rust//rust/platform:x86_64-apple-darwin": [],
"@rules_rust//rust/platform:x86_64-apple-ios": [],
"@rules_rust//rust/platform:x86_64-linux-android": [],
"@rules_rust//rust/platform:x86_64-pc-windows-msvc": [],
"@rules_rust//rust/platform:x86_64-unknown-freebsd": [],
"@rules_rust//rust/platform:x86_64-unknown-fuchsia": [],
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu": [],
"@rules_rust//rust/platform:x86_64-unknown-nixos-gnu": [],
"@rules_rust//rust/platform:x86_64-unknown-none": [],
"@rules_rust//rust/platform:x86_64-unknown-uefi": [],
"//conditions:default": ["@platforms//:incompatible"],
}),
version = "0.1.32",
deps = select({
"@rules_rust//rust/platform:aarch64-apple-darwin": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:aarch64-apple-ios": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:aarch64-apple-ios-sim": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:aarch64-linux-android": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:aarch64-unknown-fuchsia": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:aarch64-unknown-nixos-gnu": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:aarch64-unknown-nto-qnx710": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:arm-unknown-linux-gnueabi": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:armv7-linux-androideabi": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:armv7-unknown-linux-gnueabi": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:i686-apple-darwin": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:i686-linux-android": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:i686-unknown-freebsd": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:i686-unknown-linux-gnu": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:powerpc-unknown-linux-gnu": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:s390x-unknown-linux-gnu": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:x86_64-apple-darwin": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:x86_64-apple-ios": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:x86_64-linux-android": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:x86_64-unknown-freebsd": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:x86_64-unknown-fuchsia": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"@rules_rust//rust/platform:x86_64-unknown-nixos-gnu": [
"@vendor_ts__libc-0.2.171//:libc", # cfg(unix)
],
"//conditions:default": [],
}),
)

View File

@@ -0,0 +1,83 @@
###############################################################################
# @generated
# DO NOT MODIFY: This file is auto-generated by a crate_universe tool. To
# regenerate this file, run the following:
#
# bazel run @@//misc/bazel/3rdparty:vendor_tree_sitter_extractors
###############################################################################
load("@rules_rust//rust:defs.bzl", "rust_library")
package(default_visibility = ["//visibility:public"])
rust_library(
name = "pkg_config",
srcs = glob(
include = ["**/*.rs"],
allow_empty = True,
),
compile_data = glob(
include = ["**"],
allow_empty = True,
exclude = [
"**/* *",
".tmp_git_root/**/*",
"BUILD",
"BUILD.bazel",
"WORKSPACE",
"WORKSPACE.bazel",
],
),
crate_root = "src/lib.rs",
edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-bazel",
"crate-name=pkg-config",
"manual",
"noclippy",
"norustfmt",
],
target_compatible_with = select({
"@rules_rust//rust/platform:aarch64-apple-darwin": [],
"@rules_rust//rust/platform:aarch64-apple-ios": [],
"@rules_rust//rust/platform:aarch64-apple-ios-sim": [],
"@rules_rust//rust/platform:aarch64-linux-android": [],
"@rules_rust//rust/platform:aarch64-pc-windows-msvc": [],
"@rules_rust//rust/platform:aarch64-unknown-fuchsia": [],
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu": [],
"@rules_rust//rust/platform:aarch64-unknown-nixos-gnu": [],
"@rules_rust//rust/platform:aarch64-unknown-nto-qnx710": [],
"@rules_rust//rust/platform:aarch64-unknown-uefi": [],
"@rules_rust//rust/platform:arm-unknown-linux-gnueabi": [],
"@rules_rust//rust/platform:armv7-linux-androideabi": [],
"@rules_rust//rust/platform:armv7-unknown-linux-gnueabi": [],
"@rules_rust//rust/platform:i686-apple-darwin": [],
"@rules_rust//rust/platform:i686-linux-android": [],
"@rules_rust//rust/platform:i686-pc-windows-msvc": [],
"@rules_rust//rust/platform:i686-unknown-freebsd": [],
"@rules_rust//rust/platform:i686-unknown-linux-gnu": [],
"@rules_rust//rust/platform:powerpc-unknown-linux-gnu": [],
"@rules_rust//rust/platform:riscv32imc-unknown-none-elf": [],
"@rules_rust//rust/platform:riscv64gc-unknown-none-elf": [],
"@rules_rust//rust/platform:s390x-unknown-linux-gnu": [],
"@rules_rust//rust/platform:thumbv7em-none-eabi": [],
"@rules_rust//rust/platform:thumbv8m.main-none-eabi": [],
"@rules_rust//rust/platform:wasm32-unknown-unknown": [],
"@rules_rust//rust/platform:wasm32-wasip1": [],
"@rules_rust//rust/platform:x86_64-apple-darwin": [],
"@rules_rust//rust/platform:x86_64-apple-ios": [],
"@rules_rust//rust/platform:x86_64-linux-android": [],
"@rules_rust//rust/platform:x86_64-pc-windows-msvc": [],
"@rules_rust//rust/platform:x86_64-unknown-freebsd": [],
"@rules_rust//rust/platform:x86_64-unknown-fuchsia": [],
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu": [],
"@rules_rust//rust/platform:x86_64-unknown-nixos-gnu": [],
"@rules_rust//rust/platform:x86_64-unknown-none": [],
"@rules_rust//rust/platform:x86_64-unknown-uefi": [],
"//conditions:default": ["@platforms//:incompatible"],
}),
version = "0.3.32",
)

View File

@@ -0,0 +1,92 @@
###############################################################################
# @generated
# DO NOT MODIFY: This file is auto-generated by a crate_universe tool. To
# regenerate this file, run the following:
#
# bazel run @@//misc/bazel/3rdparty:vendor_tree_sitter_extractors
###############################################################################
load("@rules_rust//rust:defs.bzl", "rust_library")
package(default_visibility = ["//visibility:public"])
rust_library(
name = "zstd",
srcs = glob(
include = ["**/*.rs"],
allow_empty = True,
),
compile_data = glob(
include = ["**"],
allow_empty = True,
exclude = [
"**/* *",
".tmp_git_root/**/*",
"BUILD",
"BUILD.bazel",
"WORKSPACE",
"WORKSPACE.bazel",
],
),
crate_features = [
"arrays",
"default",
"legacy",
"zdict_builder",
],
crate_root = "src/lib.rs",
edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-bazel",
"crate-name=zstd",
"manual",
"noclippy",
"norustfmt",
],
target_compatible_with = select({
"@rules_rust//rust/platform:aarch64-apple-darwin": [],
"@rules_rust//rust/platform:aarch64-apple-ios": [],
"@rules_rust//rust/platform:aarch64-apple-ios-sim": [],
"@rules_rust//rust/platform:aarch64-linux-android": [],
"@rules_rust//rust/platform:aarch64-pc-windows-msvc": [],
"@rules_rust//rust/platform:aarch64-unknown-fuchsia": [],
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu": [],
"@rules_rust//rust/platform:aarch64-unknown-nixos-gnu": [],
"@rules_rust//rust/platform:aarch64-unknown-nto-qnx710": [],
"@rules_rust//rust/platform:aarch64-unknown-uefi": [],
"@rules_rust//rust/platform:arm-unknown-linux-gnueabi": [],
"@rules_rust//rust/platform:armv7-linux-androideabi": [],
"@rules_rust//rust/platform:armv7-unknown-linux-gnueabi": [],
"@rules_rust//rust/platform:i686-apple-darwin": [],
"@rules_rust//rust/platform:i686-linux-android": [],
"@rules_rust//rust/platform:i686-pc-windows-msvc": [],
"@rules_rust//rust/platform:i686-unknown-freebsd": [],
"@rules_rust//rust/platform:i686-unknown-linux-gnu": [],
"@rules_rust//rust/platform:powerpc-unknown-linux-gnu": [],
"@rules_rust//rust/platform:riscv32imc-unknown-none-elf": [],
"@rules_rust//rust/platform:riscv64gc-unknown-none-elf": [],
"@rules_rust//rust/platform:s390x-unknown-linux-gnu": [],
"@rules_rust//rust/platform:thumbv7em-none-eabi": [],
"@rules_rust//rust/platform:thumbv8m.main-none-eabi": [],
"@rules_rust//rust/platform:wasm32-unknown-unknown": [],
"@rules_rust//rust/platform:wasm32-wasip1": [],
"@rules_rust//rust/platform:x86_64-apple-darwin": [],
"@rules_rust//rust/platform:x86_64-apple-ios": [],
"@rules_rust//rust/platform:x86_64-linux-android": [],
"@rules_rust//rust/platform:x86_64-pc-windows-msvc": [],
"@rules_rust//rust/platform:x86_64-unknown-freebsd": [],
"@rules_rust//rust/platform:x86_64-unknown-fuchsia": [],
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu": [],
"@rules_rust//rust/platform:x86_64-unknown-nixos-gnu": [],
"@rules_rust//rust/platform:x86_64-unknown-none": [],
"@rules_rust//rust/platform:x86_64-unknown-uefi": [],
"//conditions:default": ["@platforms//:incompatible"],
}),
version = "0.13.3",
deps = [
"@vendor_ts__zstd-safe-7.2.4//:zstd_safe",
],
)

View File

@@ -0,0 +1,158 @@
###############################################################################
# @generated
# DO NOT MODIFY: This file is auto-generated by a crate_universe tool. To
# regenerate this file, run the following:
#
# bazel run @@//misc/bazel/3rdparty:vendor_tree_sitter_extractors
###############################################################################
load("@rules_rust//cargo:defs.bzl", "cargo_build_script")
load("@rules_rust//rust:defs.bzl", "rust_library")
package(default_visibility = ["//visibility:public"])
rust_library(
name = "zstd_safe",
srcs = glob(
include = ["**/*.rs"],
allow_empty = True,
),
compile_data = glob(
include = ["**"],
allow_empty = True,
exclude = [
"**/* *",
".tmp_git_root/**/*",
"BUILD",
"BUILD.bazel",
"WORKSPACE",
"WORKSPACE.bazel",
],
),
crate_features = [
"arrays",
"legacy",
"std",
"zdict_builder",
],
crate_root = "src/lib.rs",
edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-bazel",
"crate-name=zstd-safe",
"manual",
"noclippy",
"norustfmt",
],
target_compatible_with = select({
"@rules_rust//rust/platform:aarch64-apple-darwin": [],
"@rules_rust//rust/platform:aarch64-apple-ios": [],
"@rules_rust//rust/platform:aarch64-apple-ios-sim": [],
"@rules_rust//rust/platform:aarch64-linux-android": [],
"@rules_rust//rust/platform:aarch64-pc-windows-msvc": [],
"@rules_rust//rust/platform:aarch64-unknown-fuchsia": [],
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu": [],
"@rules_rust//rust/platform:aarch64-unknown-nixos-gnu": [],
"@rules_rust//rust/platform:aarch64-unknown-nto-qnx710": [],
"@rules_rust//rust/platform:aarch64-unknown-uefi": [],
"@rules_rust//rust/platform:arm-unknown-linux-gnueabi": [],
"@rules_rust//rust/platform:armv7-linux-androideabi": [],
"@rules_rust//rust/platform:armv7-unknown-linux-gnueabi": [],
"@rules_rust//rust/platform:i686-apple-darwin": [],
"@rules_rust//rust/platform:i686-linux-android": [],
"@rules_rust//rust/platform:i686-pc-windows-msvc": [],
"@rules_rust//rust/platform:i686-unknown-freebsd": [],
"@rules_rust//rust/platform:i686-unknown-linux-gnu": [],
"@rules_rust//rust/platform:powerpc-unknown-linux-gnu": [],
"@rules_rust//rust/platform:riscv32imc-unknown-none-elf": [],
"@rules_rust//rust/platform:riscv64gc-unknown-none-elf": [],
"@rules_rust//rust/platform:s390x-unknown-linux-gnu": [],
"@rules_rust//rust/platform:thumbv7em-none-eabi": [],
"@rules_rust//rust/platform:thumbv8m.main-none-eabi": [],
"@rules_rust//rust/platform:wasm32-unknown-unknown": [],
"@rules_rust//rust/platform:wasm32-wasip1": [],
"@rules_rust//rust/platform:x86_64-apple-darwin": [],
"@rules_rust//rust/platform:x86_64-apple-ios": [],
"@rules_rust//rust/platform:x86_64-linux-android": [],
"@rules_rust//rust/platform:x86_64-pc-windows-msvc": [],
"@rules_rust//rust/platform:x86_64-unknown-freebsd": [],
"@rules_rust//rust/platform:x86_64-unknown-fuchsia": [],
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu": [],
"@rules_rust//rust/platform:x86_64-unknown-nixos-gnu": [],
"@rules_rust//rust/platform:x86_64-unknown-none": [],
"@rules_rust//rust/platform:x86_64-unknown-uefi": [],
"//conditions:default": ["@platforms//:incompatible"],
}),
version = "7.2.4",
deps = [
"@vendor_ts__zstd-safe-7.2.4//:build_script_build",
"@vendor_ts__zstd-sys-2.0.15-zstd.1.5.7//:zstd_sys",
],
)
cargo_build_script(
name = "_bs",
srcs = glob(
include = ["**/*.rs"],
allow_empty = True,
),
compile_data = glob(
include = ["**"],
allow_empty = True,
exclude = [
"**/* *",
"**/*.rs",
".tmp_git_root/**/*",
"BUILD",
"BUILD.bazel",
"WORKSPACE",
"WORKSPACE.bazel",
],
),
crate_features = [
"arrays",
"legacy",
"std",
"zdict_builder",
],
crate_name = "build_script_build",
crate_root = "build.rs",
data = glob(
include = ["**"],
allow_empty = True,
exclude = [
"**/* *",
".tmp_git_root/**/*",
"BUILD",
"BUILD.bazel",
"WORKSPACE",
"WORKSPACE.bazel",
],
),
edition = "2018",
link_deps = [
"@vendor_ts__zstd-sys-2.0.15-zstd.1.5.7//:zstd_sys",
],
pkg_name = "zstd-safe",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-bazel",
"crate-name=zstd-safe",
"manual",
"noclippy",
"norustfmt",
],
version = "7.2.4",
visibility = ["//visibility:private"],
)
alias(
name = "build_script_build",
actual = ":_bs",
tags = ["manual"],
)

View File

@@ -0,0 +1,157 @@
###############################################################################
# @generated
# DO NOT MODIFY: This file is auto-generated by a crate_universe tool. To
# regenerate this file, run the following:
#
# bazel run @@//misc/bazel/3rdparty:vendor_tree_sitter_extractors
###############################################################################
load("@rules_rust//cargo:defs.bzl", "cargo_build_script")
load("@rules_rust//rust:defs.bzl", "rust_library")
package(default_visibility = ["//visibility:public"])
rust_library(
name = "zstd_sys",
srcs = glob(
include = ["**/*.rs"],
allow_empty = True,
),
compile_data = glob(
include = ["**"],
allow_empty = True,
exclude = [
"**/* *",
".tmp_git_root/**/*",
"BUILD",
"BUILD.bazel",
"WORKSPACE",
"WORKSPACE.bazel",
],
),
crate_features = [
"legacy",
"std",
"zdict_builder",
],
crate_root = "src/lib.rs",
edition = "2018",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-bazel",
"crate-name=zstd-sys",
"manual",
"noclippy",
"norustfmt",
],
target_compatible_with = select({
"@rules_rust//rust/platform:aarch64-apple-darwin": [],
"@rules_rust//rust/platform:aarch64-apple-ios": [],
"@rules_rust//rust/platform:aarch64-apple-ios-sim": [],
"@rules_rust//rust/platform:aarch64-linux-android": [],
"@rules_rust//rust/platform:aarch64-pc-windows-msvc": [],
"@rules_rust//rust/platform:aarch64-unknown-fuchsia": [],
"@rules_rust//rust/platform:aarch64-unknown-linux-gnu": [],
"@rules_rust//rust/platform:aarch64-unknown-nixos-gnu": [],
"@rules_rust//rust/platform:aarch64-unknown-nto-qnx710": [],
"@rules_rust//rust/platform:aarch64-unknown-uefi": [],
"@rules_rust//rust/platform:arm-unknown-linux-gnueabi": [],
"@rules_rust//rust/platform:armv7-linux-androideabi": [],
"@rules_rust//rust/platform:armv7-unknown-linux-gnueabi": [],
"@rules_rust//rust/platform:i686-apple-darwin": [],
"@rules_rust//rust/platform:i686-linux-android": [],
"@rules_rust//rust/platform:i686-pc-windows-msvc": [],
"@rules_rust//rust/platform:i686-unknown-freebsd": [],
"@rules_rust//rust/platform:i686-unknown-linux-gnu": [],
"@rules_rust//rust/platform:powerpc-unknown-linux-gnu": [],
"@rules_rust//rust/platform:riscv32imc-unknown-none-elf": [],
"@rules_rust//rust/platform:riscv64gc-unknown-none-elf": [],
"@rules_rust//rust/platform:s390x-unknown-linux-gnu": [],
"@rules_rust//rust/platform:thumbv7em-none-eabi": [],
"@rules_rust//rust/platform:thumbv8m.main-none-eabi": [],
"@rules_rust//rust/platform:wasm32-unknown-unknown": [],
"@rules_rust//rust/platform:wasm32-wasip1": [],
"@rules_rust//rust/platform:x86_64-apple-darwin": [],
"@rules_rust//rust/platform:x86_64-apple-ios": [],
"@rules_rust//rust/platform:x86_64-linux-android": [],
"@rules_rust//rust/platform:x86_64-pc-windows-msvc": [],
"@rules_rust//rust/platform:x86_64-unknown-freebsd": [],
"@rules_rust//rust/platform:x86_64-unknown-fuchsia": [],
"@rules_rust//rust/platform:x86_64-unknown-linux-gnu": [],
"@rules_rust//rust/platform:x86_64-unknown-nixos-gnu": [],
"@rules_rust//rust/platform:x86_64-unknown-none": [],
"@rules_rust//rust/platform:x86_64-unknown-uefi": [],
"//conditions:default": ["@platforms//:incompatible"],
}),
version = "2.0.15+zstd.1.5.7",
deps = [
"@vendor_ts__zstd-sys-2.0.15-zstd.1.5.7//:build_script_build",
],
)
cargo_build_script(
name = "_bs",
srcs = glob(
include = ["**/*.rs"],
allow_empty = True,
),
compile_data = glob(
include = ["**"],
allow_empty = True,
exclude = [
"**/* *",
"**/*.rs",
".tmp_git_root/**/*",
"BUILD",
"BUILD.bazel",
"WORKSPACE",
"WORKSPACE.bazel",
],
),
crate_features = [
"legacy",
"std",
"zdict_builder",
],
crate_name = "build_script_build",
crate_root = "build.rs",
data = glob(
include = ["**"],
allow_empty = True,
exclude = [
"**/* *",
".tmp_git_root/**/*",
"BUILD",
"BUILD.bazel",
"WORKSPACE",
"WORKSPACE.bazel",
],
),
edition = "2018",
links = "zstd",
pkg_name = "zstd-sys",
rustc_flags = [
"--cap-lints=allow",
],
tags = [
"cargo-bazel",
"crate-name=zstd-sys",
"manual",
"noclippy",
"norustfmt",
],
version = "2.0.15+zstd.1.5.7",
visibility = ["//visibility:private"],
deps = [
"@vendor_ts__cc-1.2.7//:cc",
"@vendor_ts__pkg-config-0.3.32//:pkg_config",
],
)
alias(
name = "build_script_build",
actual = ":_bs",
tags = ["manual"],
)

View File

@@ -381,6 +381,7 @@ _NORMAL_DEPENDENCIES = {
"tracing": Label("@vendor_ts__tracing-0.1.41//:tracing"),
"tracing-subscriber": Label("@vendor_ts__tracing-subscriber-0.3.19//:tracing_subscriber"),
"tree-sitter": Label("@vendor_ts__tree-sitter-0.24.6//:tree_sitter"),
"zstd": Label("@vendor_ts__zstd-0.13.3//:zstd"),
},
},
}
@@ -1658,6 +1659,16 @@ def crate_repositories():
build_file = Label("//misc/bazel/3rdparty/tree_sitter_extractors_deps:BUILD.itoa-1.0.15.bazel"),
)
maybe(
http_archive,
name = "vendor_ts__jobserver-0.1.32",
sha256 = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0",
type = "tar.gz",
urls = ["https://static.crates.io/crates/jobserver/0.1.32/download"],
strip_prefix = "jobserver-0.1.32",
build_file = Label("//misc/bazel/3rdparty/tree_sitter_extractors_deps:BUILD.jobserver-0.1.32.bazel"),
)
maybe(
http_archive,
name = "vendor_ts__jod-thread-0.1.2",
@@ -2048,6 +2059,16 @@ def crate_repositories():
build_file = Label("//misc/bazel/3rdparty/tree_sitter_extractors_deps:BUILD.pin-project-lite-0.2.16.bazel"),
)
maybe(
http_archive,
name = "vendor_ts__pkg-config-0.3.32",
sha256 = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c",
type = "tar.gz",
urls = ["https://static.crates.io/crates/pkg-config/0.3.32/download"],
strip_prefix = "pkg-config-0.3.32",
build_file = Label("//misc/bazel/3rdparty/tree_sitter_extractors_deps:BUILD.pkg-config-0.3.32.bazel"),
)
maybe(
http_archive,
name = "vendor_ts__portable-atomic-1.11.0",
@@ -3647,6 +3668,36 @@ def crate_repositories():
build_file = Label("//misc/bazel/3rdparty/tree_sitter_extractors_deps:BUILD.zerocopy-derive-0.8.20.bazel"),
)
maybe(
http_archive,
name = "vendor_ts__zstd-0.13.3",
sha256 = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a",
type = "tar.gz",
urls = ["https://static.crates.io/crates/zstd/0.13.3/download"],
strip_prefix = "zstd-0.13.3",
build_file = Label("//misc/bazel/3rdparty/tree_sitter_extractors_deps:BUILD.zstd-0.13.3.bazel"),
)
maybe(
http_archive,
name = "vendor_ts__zstd-safe-7.2.4",
sha256 = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d",
type = "tar.gz",
urls = ["https://static.crates.io/crates/zstd-safe/7.2.4/download"],
strip_prefix = "zstd-safe-7.2.4",
build_file = Label("//misc/bazel/3rdparty/tree_sitter_extractors_deps:BUILD.zstd-safe-7.2.4.bazel"),
)
maybe(
http_archive,
name = "vendor_ts__zstd-sys-2.0.15-zstd.1.5.7",
sha256 = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237",
type = "tar.gz",
urls = ["https://static.crates.io/crates/zstd-sys/2.0.15+zstd.1.5.7/download"],
strip_prefix = "zstd-sys-2.0.15+zstd.1.5.7",
build_file = Label("//misc/bazel/3rdparty/tree_sitter_extractors_deps:BUILD.zstd-sys-2.0.15+zstd.1.5.7.bazel"),
)
return [
struct(repo = "vendor_ts__anyhow-1.0.97", is_dev_dep = False),
struct(repo = "vendor_ts__argfile-0.2.1", is_dev_dep = False),
@@ -3698,6 +3749,7 @@ def crate_repositories():
struct(repo = "vendor_ts__tree-sitter-ruby-0.23.1", is_dev_dep = False),
struct(repo = "vendor_ts__triomphe-0.1.14", is_dev_dep = False),
struct(repo = "vendor_ts__ungrammar-1.16.1", is_dev_dep = False),
struct(repo = "vendor_ts__zstd-0.13.3", is_dev_dep = False),
struct(repo = "vendor_ts__rand-0.9.0", is_dev_dep = True),
struct(repo = "vendor_ts__tree-sitter-json-0.24.8", is_dev_dep = True),
struct(repo = "vendor_ts__tree-sitter-ql-0.23.1", is_dev_dep = True),

View File

@@ -0,0 +1,500 @@
"""
Experimental script for bulk generation of MaD models based on a list of projects.
Note: This file must be formatted using the Black Python formatter.
"""
import os.path
import subprocess
import sys
from typing import NotRequired, TypedDict, List
from concurrent.futures import ThreadPoolExecutor, as_completed
import time
import argparse
import json
import requests
import zipfile
import tarfile
from functools import cmp_to_key
import generate_mad as mad
gitroot = (
subprocess.check_output(["git", "rev-parse", "--show-toplevel"])
.decode("utf-8")
.strip()
)
build_dir = os.path.join(gitroot, "mad-generation-build")
# A project to generate models for
class Project(TypedDict):
"""
Type definition for projects (acquired via a GitHub repo) to model.
Attributes:
name: The name of the project
git_repo: URL to the git repository
git_tag: Optional Git tag to check out
"""
name: str
git_repo: NotRequired[str]
git_tag: NotRequired[str]
with_sinks: NotRequired[bool]
with_sinks: NotRequired[bool]
with_summaries: NotRequired[bool]
def should_generate_sinks(project: Project) -> bool:
return project.get("with-sinks", True)
def should_generate_sources(project: Project) -> bool:
return project.get("with-sources", True)
def should_generate_summaries(project: Project) -> bool:
return project.get("with-summaries", True)
def clone_project(project: Project) -> str:
"""
Shallow clone a project into the build directory.
Args:
project: A dictionary containing project information with 'name', 'git_repo', and optional 'git_tag' keys.
Returns:
The path to the cloned project directory.
"""
name = project["name"]
repo_url = project["git_repo"]
git_tag = project.get("git_tag")
# Determine target directory
target_dir = os.path.join(build_dir, name)
# Clone only if directory doesn't already exist
if not os.path.exists(target_dir):
if git_tag:
print(f"Cloning {name} from {repo_url} at tag {git_tag}")
else:
print(f"Cloning {name} from {repo_url}")
subprocess.check_call(
[
"git",
"clone",
"--quiet",
"--depth",
"1", # Shallow clone
*(
["--branch", git_tag] if git_tag else []
), # Add branch if tag is provided
repo_url,
target_dir,
]
)
print(f"Completed cloning {name}")
else:
print(f"Skipping cloning {name} as it already exists at {target_dir}")
return target_dir
def clone_projects(projects: List[Project]) -> List[tuple[Project, str]]:
"""
Clone all projects in parallel.
Args:
projects: List of projects to clone
Returns:
List of (project, project_dir) pairs in the same order as the input projects
"""
start_time = time.time()
max_workers = min(8, len(projects)) # Use at most 8 threads
project_dirs_map = {} # Map to store results by project name
with ThreadPoolExecutor(max_workers=max_workers) as executor:
# Start cloning tasks and keep track of them
future_to_project = {
executor.submit(clone_project, project): project for project in projects
}
# Process results as they complete
for future in as_completed(future_to_project):
project = future_to_project[future]
try:
project_dir = future.result()
project_dirs_map[project["name"]] = (project, project_dir)
except Exception as e:
print(f"ERROR: Failed to clone {project['name']}: {e}")
if len(project_dirs_map) != len(projects):
failed_projects = [
project["name"]
for project in projects
if project["name"] not in project_dirs_map
]
print(
f"ERROR: Only {len(project_dirs_map)} out of {len(projects)} projects were cloned successfully. Failed projects: {', '.join(failed_projects)}"
)
sys.exit(1)
project_dirs = [project_dirs_map[project["name"]] for project in projects]
clone_time = time.time() - start_time
print(f"Cloning completed in {clone_time:.2f} seconds")
return project_dirs
def build_database(
language: str, extractor_options, project: Project, project_dir: str
) -> str | None:
"""
Build a CodeQL database for a project.
Args:
language: The language for which to build the database (e.g., "rust").
extractor_options: Additional options for the extractor.
project: A dictionary containing project information with 'name' and 'git_repo' keys.
project_dir: Path to the CodeQL database.
Returns:
The path to the created database directory.
"""
name = project["name"]
# Create database directory path
database_dir = os.path.join(build_dir, f"{name}-db")
# Only build the database if it doesn't already exist
if not os.path.exists(database_dir):
print(f"Building CodeQL database for {name}...")
extractor_options = [option for x in extractor_options for option in ("-O", x)]
try:
subprocess.check_call(
[
"codeql",
"database",
"create",
f"--language={language}",
"--source-root=" + project_dir,
"--overwrite",
*extractor_options,
"--",
database_dir,
]
)
print(f"Successfully created database at {database_dir}")
except subprocess.CalledProcessError as e:
print(f"Failed to create database for {name}: {e}")
return None
else:
print(
f"Skipping database creation for {name} as it already exists at {database_dir}"
)
return database_dir
def generate_models(config, project: Project, database_dir: str) -> None:
"""
Generate models for a project.
Args:
args: Command line arguments passed to this script.
name: The name of the project.
database_dir: Path to the CodeQL database.
"""
name = project["name"]
language = config["language"]
generator = mad.Generator(language)
# Note: The argument parser converts with-sinks to with_sinks, etc.
generator.generateSinks = should_generate_sinks(project)
generator.generateSources = should_generate_sources(project)
generator.generateSummaries = should_generate_summaries(project)
generator.setenvironment(database=database_dir, folder=name)
generator.run()
def build_databases_from_projects(
language: str, extractor_options, projects: List[Project]
) -> List[tuple[Project, str | None]]:
"""
Build databases for all projects in parallel.
Args:
language: The language for which to build the databases (e.g., "rust").
extractor_options: Additional options for the extractor.
projects: List of projects to build databases for.
Returns:
List of (project_name, database_dir) pairs, where database_dir is None if the build failed.
"""
# Clone projects in parallel
print("=== Cloning projects ===")
project_dirs = clone_projects(projects)
# Build databases for all projects
print("\n=== Building databases ===")
database_results = [
(
project,
build_database(language, extractor_options, project, project_dir),
)
for project, project_dir in project_dirs
]
return database_results
def get_json_from_github(
url: str, pat: str, extra_headers: dict[str, str] = {}
) -> dict:
"""
Download a JSON file from GitHub using a personal access token (PAT).
Args:
url: The URL to download the JSON file from.
pat: Personal Access Token for GitHub API authentication.
extra_headers: Additional headers to include in the request.
Returns:
The JSON response as a dictionary.
"""
headers = {"Authorization": f"token {pat}"} | extra_headers
response = requests.get(url, headers=headers)
if response.status_code != 200:
print(f"Failed to download JSON: {response.status_code} {response.text}")
sys.exit(1)
else:
return response.json()
def download_artifact(url: str, artifact_name: str, pat: str) -> str:
"""
Download a GitHub Actions artifact from a given URL.
Args:
url: The URL to download the artifact from.
artifact_name: The name of the artifact (used for naming the downloaded file).
pat: Personal Access Token for GitHub API authentication.
Returns:
The path to the downloaded artifact file.
"""
headers = {"Authorization": f"token {pat}", "Accept": "application/vnd.github+json"}
response = requests.get(url, stream=True, headers=headers)
zipName = artifact_name + ".zip"
if response.status_code != 200:
print(f"Failed to download file. Status code: {response.status_code}")
sys.exit(1)
target_zip = os.path.join(build_dir, zipName)
with open(target_zip, "wb") as file:
for chunk in response.iter_content(chunk_size=8192):
file.write(chunk)
print(f"Download complete: {target_zip}")
return target_zip
def remove_extension(filename: str) -> str:
while "." in filename:
filename, _ = os.path.splitext(filename)
return filename
def pretty_name_from_artifact_name(artifact_name: str) -> str:
return artifact_name.split("___")[1]
def download_dca_databases(
experiment_name: str, pat: str, projects: List[Project]
) -> List[tuple[Project, str | None]]:
"""
Download databases from a DCA experiment.
Args:
experiment_name: The name of the DCA experiment to download databases from.
pat: Personal Access Token for GitHub API authentication.
projects: List of projects to download databases for.
Returns:
List of (project_name, database_dir) pairs, where database_dir is None if the download failed.
"""
database_results = {}
print("\n=== Finding projects ===")
response = get_json_from_github(
f"https://raw.githubusercontent.com/github/codeql-dca-main/data/{experiment_name}/reports/downloads.json",
pat,
)
targets = response["targets"]
project_map = {project["name"]: project for project in projects}
for data in targets.values():
downloads = data["downloads"]
analyzed_database = downloads["analyzed_database"]
artifact_name = analyzed_database["artifact_name"]
pretty_name = pretty_name_from_artifact_name(artifact_name)
if not pretty_name in project_map:
print(f"Skipping {pretty_name} as it is not in the list of projects")
continue
repository = analyzed_database["repository"]
run_id = analyzed_database["run_id"]
print(f"=== Finding artifact: {artifact_name} ===")
response = get_json_from_github(
f"https://api.github.com/repos/{repository}/actions/runs/{run_id}/artifacts",
pat,
{"Accept": "application/vnd.github+json"},
)
artifacts = response["artifacts"]
artifact_map = {artifact["name"]: artifact for artifact in artifacts}
print(f"=== Downloading artifact: {artifact_name} ===")
archive_download_url = artifact_map[artifact_name]["archive_download_url"]
artifact_zip_location = download_artifact(
archive_download_url, artifact_name, pat
)
print(f"=== Extracting artifact: {artifact_name} ===")
# The database is in a zip file, which contains a tar.gz file with the DB
# First we open the zip file
with zipfile.ZipFile(artifact_zip_location, "r") as zip_ref:
artifact_unzipped_location = os.path.join(build_dir, artifact_name)
# And then we extract it to build_dir/artifact_name
zip_ref.extractall(artifact_unzipped_location)
# And then we iterate over the contents of the extracted directory
# and extract the tar.gz files inside it
for entry in os.listdir(artifact_unzipped_location):
artifact_tar_location = os.path.join(artifact_unzipped_location, entry)
with tarfile.open(artifact_tar_location, "r:gz") as tar_ref:
# And we just untar it to the same directory as the zip file
tar_ref.extractall(artifact_unzipped_location)
database_results[pretty_name] = os.path.join(
artifact_unzipped_location, remove_extension(entry)
)
print(f"\n=== Extracted {len(database_results)} databases ===")
return [(project, database_results[project["name"]]) for project in projects]
def get_mad_destination_for_project(config, name: str) -> str:
return os.path.join(config["destination"], name)
def get_strategy(config) -> str:
return config["strategy"].lower()
def main(config, args) -> None:
"""
Main function to handle the bulk generation of MaD models.
Args:
config: Configuration dictionary containing project details and other settings.
args: Command line arguments passed to this script.
"""
projects = config["targets"]
if not "language" in config:
print("ERROR: 'language' key is missing in the configuration file.")
sys.exit(1)
language = config["language"]
# Create build directory if it doesn't exist
if not os.path.exists(build_dir):
os.makedirs(build_dir)
# Check if any of the MaD directories contain working directory changes in git
for project in projects:
mad_dir = get_mad_destination_for_project(config, project["name"])
if os.path.exists(mad_dir):
git_status_output = subprocess.check_output(
["git", "status", "-s", mad_dir], text=True
).strip()
if git_status_output:
print(
f"""ERROR: Working directory changes detected in {mad_dir}.
Before generating new models, the existing models are deleted.
To avoid loss of data, please commit your changes."""
)
sys.exit(1)
database_results = []
match get_strategy(config):
case "repo":
extractor_options = config.get("extractor_options", [])
database_results = build_databases_from_projects(
language, extractor_options, projects
)
case "dca":
experiment_name = args.dca
if experiment_name is None:
print("ERROR: --dca argument is required for DCA strategy")
sys.exit(1)
if args.pat is None:
print("ERROR: --pat argument is required for DCA strategy")
sys.exit(1)
if not os.path.exists(args.pat):
print(f"ERROR: Personal Access Token file '{pat}' does not exist.")
sys.exit(1)
with open(args.pat, "r") as f:
pat = f.read().strip()
database_results = download_dca_databases(
experiment_name, pat, projects
)
# Generate models for all projects
print("\n=== Generating models ===")
failed_builds = [
project["name"] for project, db_dir in database_results if db_dir is None
]
if failed_builds:
print(
f"ERROR: {len(failed_builds)} database builds failed: {', '.join(failed_builds)}"
)
sys.exit(1)
# Delete the MaD directory for each project
for project, database_dir in database_results:
mad_dir = get_mad_destination_for_project(config, project["name"])
if os.path.exists(mad_dir):
print(f"Deleting existing MaD directory at {mad_dir}")
subprocess.check_call(["rm", "-rf", mad_dir])
for project, database_dir in database_results:
if database_dir is not None:
generate_models(config, project, database_dir)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--config", type=str, help="Path to the configuration file.", required=True
)
parser.add_argument(
"--dca",
type=str,
help="Name of a DCA run that built all the projects",
required=False,
)
parser.add_argument(
"--pat",
type=str,
help="Path to a file containing the PAT token required to grab DCA databases (the same as the one you use for DCA)",
required=False,
)
args = parser.parse_args()
# Load config file
config = {}
if not os.path.exists(args.config):
print(f"ERROR: Config file '{args.config}' does not exist.")
sys.exit(1)
try:
with open(args.config, "r") as f:
config = json.load(f)
except json.JSONDecodeError as e:
print(f"ERROR: Failed to parse JSON file {args.config}: {e}")
sys.exit(1)
main(config, args)

View File

@@ -1,335 +0,0 @@
"""
Experimental script for bulk generation of MaD models based on a list of projects.
Currently the script only targets Rust.
"""
import os.path
import subprocess
import sys
from typing import NotRequired, TypedDict, List
from concurrent.futures import ThreadPoolExecutor, as_completed
import time
import generate_mad as mad
gitroot = (
subprocess.check_output(["git", "rev-parse", "--show-toplevel"])
.decode("utf-8")
.strip()
)
build_dir = os.path.join(gitroot, "mad-generation-build")
def path_to_mad_directory(language: str, name: str) -> str:
return os.path.join(gitroot, f"{language}/ql/lib/ext/generated/{name}")
# A project to generate models for
class Project(TypedDict):
"""
Type definition for Rust projects to model.
Attributes:
name: The name of the project
git_repo: URL to the git repository
git_tag: Optional Git tag to check out
"""
name: str
git_repo: str
git_tag: NotRequired[str]
# List of Rust projects to generate models for.
projects: List[Project] = [
{
"name": "libc",
"git_repo": "https://github.com/rust-lang/libc",
"git_tag": "0.2.172",
},
{
"name": "log",
"git_repo": "https://github.com/rust-lang/log",
"git_tag": "0.4.27",
},
{
"name": "memchr",
"git_repo": "https://github.com/BurntSushi/memchr",
"git_tag": "2.7.4",
},
{
"name": "once_cell",
"git_repo": "https://github.com/matklad/once_cell",
"git_tag": "v1.21.3",
},
{
"name": "rand",
"git_repo": "https://github.com/rust-random/rand",
"git_tag": "0.9.1",
},
{
"name": "smallvec",
"git_repo": "https://github.com/servo/rust-smallvec",
"git_tag": "v1.15.0",
},
{
"name": "serde",
"git_repo": "https://github.com/serde-rs/serde",
"git_tag": "v1.0.219",
},
{
"name": "tokio",
"git_repo": "https://github.com/tokio-rs/tokio",
"git_tag": "tokio-1.45.0",
},
{
"name": "reqwest",
"git_repo": "https://github.com/seanmonstar/reqwest",
"git_tag": "v0.12.15",
},
{
"name": "rocket",
"git_repo": "https://github.com/SergioBenitez/Rocket",
"git_tag": "v0.5.1",
},
{
"name": "actix-web",
"git_repo": "https://github.com/actix/actix-web",
"git_tag": "web-v4.11.0",
},
{
"name": "hyper",
"git_repo": "https://github.com/hyperium/hyper",
"git_tag": "v1.6.0",
},
{
"name": "clap",
"git_repo": "https://github.com/clap-rs/clap",
"git_tag": "v4.5.38",
},
]
def clone_project(project: Project) -> str:
"""
Shallow clone a project into the build directory.
Args:
project: A dictionary containing project information with 'name', 'git_repo', and optional 'git_tag' keys.
Returns:
The path to the cloned project directory.
"""
name = project["name"]
repo_url = project["git_repo"]
git_tag = project.get("git_tag")
# Determine target directory
target_dir = os.path.join(build_dir, name)
# Clone only if directory doesn't already exist
if not os.path.exists(target_dir):
if git_tag:
print(f"Cloning {name} from {repo_url} at tag {git_tag}")
else:
print(f"Cloning {name} from {repo_url}")
subprocess.check_call(
[
"git",
"clone",
"--quiet",
"--depth",
"1", # Shallow clone
*(
["--branch", git_tag] if git_tag else []
), # Add branch if tag is provided
repo_url,
target_dir,
]
)
print(f"Completed cloning {name}")
else:
print(f"Skipping cloning {name} as it already exists at {target_dir}")
return target_dir
def clone_projects(projects: List[Project]) -> List[tuple[Project, str]]:
"""
Clone all projects in parallel.
Args:
projects: List of projects to clone
Returns:
List of (project, project_dir) pairs in the same order as the input projects
"""
start_time = time.time()
max_workers = min(8, len(projects)) # Use at most 8 threads
project_dirs_map = {} # Map to store results by project name
with ThreadPoolExecutor(max_workers=max_workers) as executor:
# Start cloning tasks and keep track of them
future_to_project = {
executor.submit(clone_project, project): project for project in projects
}
# Process results as they complete
for future in as_completed(future_to_project):
project = future_to_project[future]
try:
project_dir = future.result()
project_dirs_map[project["name"]] = (project, project_dir)
except Exception as e:
print(f"ERROR: Failed to clone {project['name']}: {e}")
if len(project_dirs_map) != len(projects):
failed_projects = [
project["name"]
for project in projects
if project["name"] not in project_dirs_map
]
print(
f"ERROR: Only {len(project_dirs_map)} out of {len(projects)} projects were cloned successfully. Failed projects: {', '.join(failed_projects)}"
)
sys.exit(1)
project_dirs = [project_dirs_map[project["name"]] for project in projects]
clone_time = time.time() - start_time
print(f"Cloning completed in {clone_time:.2f} seconds")
return project_dirs
def build_database(project: Project, project_dir: str) -> str | None:
"""
Build a CodeQL database for a project.
Args:
project: A dictionary containing project information with 'name' and 'git_repo' keys.
project_dir: The directory containing the project source code.
Returns:
The path to the created database directory.
"""
name = project["name"]
# Create database directory path
database_dir = os.path.join(build_dir, f"{name}-db")
# Only build the database if it doesn't already exist
if not os.path.exists(database_dir):
print(f"Building CodeQL database for {name}...")
try:
subprocess.check_call(
[
"codeql",
"database",
"create",
"--language=rust",
"--source-root=" + project_dir,
"--overwrite",
"-O",
"cargo_features='*'",
"--",
database_dir,
]
)
print(f"Successfully created database at {database_dir}")
except subprocess.CalledProcessError as e:
print(f"Failed to create database for {name}: {e}")
return None
else:
print(
f"Skipping database creation for {name} as it already exists at {database_dir}"
)
return database_dir
def generate_models(project: Project, database_dir: str) -> None:
"""
Generate models for a project.
Args:
project: A dictionary containing project information with 'name' and 'git_repo' keys.
project_dir: The directory containing the project source code.
"""
name = project["name"]
generator = mad.Generator("rust")
generator.generateSinks = True
generator.generateSources = True
generator.generateSummaries = True
generator.setenvironment(database=database_dir, folder=name)
generator.run()
def main() -> None:
"""
Process all projects in three distinct phases:
1. Clone projects (in parallel)
2. Build databases for projects
3. Generate models for successful database builds
"""
# Create build directory if it doesn't exist
if not os.path.exists(build_dir):
os.makedirs(build_dir)
# Check if any of the MaD directories contain working directory changes in git
for project in projects:
mad_dir = path_to_mad_directory("rust", project["name"])
if os.path.exists(mad_dir):
git_status_output = subprocess.check_output(
["git", "status", "-s", mad_dir], text=True
).strip()
if git_status_output:
print(
f"""ERROR: Working directory changes detected in {mad_dir}.
Before generating new models, the existing models are deleted.
To avoid loss of data, please commit your changes."""
)
sys.exit(1)
# Phase 1: Clone projects in parallel
print("=== Phase 1: Cloning projects ===")
project_dirs = clone_projects(projects)
# Phase 2: Build databases for all projects
print("\n=== Phase 2: Building databases ===")
database_results = [
(project, build_database(project, project_dir))
for project, project_dir in project_dirs
]
# Phase 3: Generate models for all projects
print("\n=== Phase 3: Generating models ===")
failed_builds = [
project["name"] for project, db_dir in database_results if db_dir is None
]
if failed_builds:
print(
f"ERROR: {len(failed_builds)} database builds failed: {', '.join(failed_builds)}"
)
sys.exit(1)
# Delete the MaD directory for each project
for project, database_dir in database_results:
mad_dir = path_to_mad_directory("rust", project["name"])
if os.path.exists(mad_dir):
print(f"Deleting existing MaD directory at {mad_dir}")
subprocess.check_call(["rm", "-rf", mad_dir])
for project, database_dir in database_results:
if database_dir is not None:
generate_models(project, database_dir)
if __name__ == "__main__":
main()

View File

@@ -65,4 +65,4 @@ extractor: $(FILES) $(BIN_FILES)
cp ../target/release/codeql-extractor-ruby$(EXE) extractor-pack/tools/$(CODEQL_PLATFORM)/extractor$(EXE)
test: extractor dbscheme
codeql test run --check-databases --check-unused-labels --check-repeated-labels --check-redefined-labels --check-use-before-definition --search-path .. --consistency-queries ql/consistency-queries ql/test
codeql test run --check-databases --check-diff-informed --check-unused-labels --check-repeated-labels --check-redefined-labels --check-use-before-definition --search-path .. --consistency-queries ql/consistency-queries ql/test

View File

@@ -27,7 +27,7 @@ options:
title: Controls compression for the TRAP files written by the extractor.
description: >
This option is only intended for use in debugging the extractor. Accepted
values are 'gzip' (the default, to write gzip-compressed TRAP) and 'none'
(to write uncompressed TRAP).
values are 'gzip' (the default, to write gzip-compressed TRAP) 'zstd' (to
write Zstandard-compressed TRAP) and 'none' (to write uncompressed TRAP).
type: string
pattern: "^(none|gzip)$"
pattern: "^(none|gzip|zstd)$"

View File

@@ -0,0 +1 @@
| source.rb:1:1:3:3 | f |

View File

@@ -0,0 +1,4 @@
import codeql.ruby.AST
from Method m
select m

View File

@@ -0,0 +1,3 @@
def f
puts "hello"
end

View File

@@ -0,0 +1,26 @@
import pytest
@pytest.mark.parametrize(("compression", "suffix"), [
pytest.param("none", [], id="none"),
pytest.param("gzip", [".gz"], id="gzip"),
pytest.param("zstd", [".zst"], id="zstd"),
])
def test(codeql, ruby, compression, suffix, cwd):
codeql.database.create(
_env={
"CODEQL_EXTRACTOR_RUBY_OPTION_TRAP_COMPRESSION": compression,
}
)
trap_files = [*(cwd / "test-db" / "trap").rglob("*.trap*")]
assert trap_files, "No trap files found"
expected_suffixes = [".trap"] + suffix
def is_of_expected_format(file):
return file.name == "metadata.trap.gz" or \
file.suffixes[-len(expected_suffixes):] == expected_suffixes
files_with_wrong_format = [
f for f in trap_files if not is_of_expected_format(f)
]
assert not files_with_wrong_format, f"Found trap files with wrong format"

View File

@@ -23,10 +23,11 @@ options:
title: Controls compression for the TRAP files written by the extractor.
description: >
This option is only intended for use in debugging the extractor. Accepted
values are 'gzip' (to write gzip-compressed TRAP) and 'none'
(currently the default, to write uncompressed TRAP).
values are 'gzip' (to write gzip-compressed TRAP) 'zstd' (to write
Zstandard-compressed TRAP) and 'none' (the default, to write uncompressed
TRAP).
type: string
pattern: "^(none|gzip)$"
pattern: "^(none|gzip|zstd)$"
cargo_target_dir:
title: Directory to use for cargo output files.
description: >
@@ -44,9 +45,10 @@ options:
cargo_features:
title: Cargo features to turn on
description: >
Comma-separated list of features to turn on. If any value is `*` all features
are turned on. By default only default cargo features are enabled. Can be
repeated.
Comma-separated list of features to turn on. By default all features are enabled.
If any features are specified, then only those features are enabled. The `default`
feature must be explicitly specified if only default features are desired.
Can be repeated.
type: array
cargo_cfg_overrides:
title: Cargo cfg overrides
@@ -82,3 +84,11 @@ options:
title: Skip path resolution
description: >
Skip path resolution. This is experimental, while we move path resolution from the extractor to the QL library.
type: string
pattern: "^(false|true)$"
extract_dependencies_as_source:
title: Extract dependencies as source code
description: >
Extract the full source code of dependencies instead of only extracting signatures.
type: string
pattern: "^(false|true)$"

View File

@@ -29,6 +29,7 @@ pub enum Compression {
#[default] // TODO make gzip default
None,
Gzip,
Zstd,
}
impl From<Compression> for trap::Compression {
@@ -36,6 +37,7 @@ impl From<Compression> for trap::Compression {
match val {
Compression::None => Self::None,
Compression::Gzip => Self::Gzip,
Compression::Zstd => Self::Zstd,
}
}
}
@@ -55,7 +57,7 @@ pub struct Config {
pub cargo_all_targets: bool,
pub logging_flamegraph: Option<PathBuf>,
pub logging_verbosity: Option<String>,
pub compression: Compression,
pub trap_compression: Compression,
pub inputs: Vec<PathBuf>,
pub qltest: bool,
pub qltest_cargo_check: bool,
@@ -67,6 +69,7 @@ pub struct Config {
pub extra_includes: Vec<PathBuf>,
pub proc_macro_server: Option<PathBuf>,
pub skip_path_resolution: bool,
pub extract_dependencies_as_source: bool,
}
impl Config {
@@ -126,6 +129,23 @@ impl Config {
}
}
fn cargo_features(&self) -> CargoFeatures {
// '*' is to be considered deprecated but still kept in for backward compatibility
if self.cargo_features.is_empty() || self.cargo_features.iter().any(|f| f == "*") {
CargoFeatures::All
} else {
CargoFeatures::Selected {
features: self
.cargo_features
.iter()
.filter(|f| *f != "default")
.cloned()
.collect(),
no_default_features: !self.cargo_features.iter().any(|f| f == "default"),
}
}
}
pub fn to_cargo_config(&self, dir: &AbsPath) -> (CargoConfig, LoadCargoConfig) {
let sysroot = self.sysroot(dir);
(
@@ -156,16 +176,7 @@ impl Config {
.unwrap_or_else(|| self.scratch_dir.join("target")),
)
.ok(),
features: if self.cargo_features.is_empty() {
Default::default()
} else if self.cargo_features.contains(&"*".to_string()) {
CargoFeatures::All
} else {
CargoFeatures::Selected {
features: self.cargo_features.clone(),
no_default_features: false,
}
},
features: self.cargo_features(),
target: self.cargo_target.clone(),
cfg_overrides: to_cfg_overrides(&self.cargo_cfg_overrides),
wrap_rustc_in_build_scripts: false,

View File

@@ -277,6 +277,11 @@ fn main() -> anyhow::Result<()> {
} else {
ResolvePaths::Yes
};
let (library_mode, library_resolve_paths) = if cfg.extract_dependencies_as_source {
(SourceKind::Source, resolve_paths)
} else {
(SourceKind::Library, ResolvePaths::No)
};
let mut processed_files: HashSet<PathBuf, RandomState> =
HashSet::from_iter(files.iter().cloned());
for (manifest, files) in map.values().filter(|(_, files)| !files.is_empty()) {
@@ -316,8 +321,8 @@ fn main() -> anyhow::Result<()> {
file,
&semantics,
vfs,
ResolvePaths::No,
SourceKind::Library,
library_resolve_paths,
library_mode,
);
extractor.archiver.archive(file);
}

View File

@@ -1,4 +1,3 @@
use crate::config::Compression;
use crate::{config, generated};
use codeql_extractor::{extractor, file_paths, trap};
use ra_ap_ide_db::line_index::LineCol;
@@ -9,7 +8,7 @@ use std::path::{Path, PathBuf};
use tracing::debug;
pub use trap::Label as UntypedLabel;
pub use trap::Writer;
pub use trap::{Compression, Writer};
pub trait AsTrapKeyPart {
fn as_key_part(&self) -> String;
@@ -245,8 +244,7 @@ impl TrapFile {
pub fn commit(&self) -> std::io::Result<()> {
std::fs::create_dir_all(self.path.parent().unwrap())?;
self.writer
.write_to_file(&self.path, self.compression.into())
self.writer.write_to_file(&self.path, self.compression)
}
}
@@ -261,12 +259,16 @@ impl TrapFileProvider {
std::fs::create_dir_all(&trap_dir)?;
Ok(TrapFileProvider {
trap_dir,
compression: cfg.compression,
compression: cfg.trap_compression.into(),
})
}
pub fn create(&self, category: &str, key: impl AsRef<Path>) -> TrapFile {
let path = file_paths::path_for(&self.trap_dir.join(category), key.as_ref(), "trap");
let path = file_paths::path_for(
&self.trap_dir.join(category),
key.as_ref(),
self.compression.extension(),
);
debug!("creating trap file {}", path.display());
let mut writer = trap::Writer::new();
extractor::populate_empty_location(&mut writer);

View File

@@ -0,0 +1,75 @@
{
"strategy": "repo",
"language": "rust",
"targets": [
{
"name": "libc",
"git_repo": "https://github.com/rust-lang/libc",
"git_tag": "0.2.172"
},
{
"name": "log",
"git_repo": "https://github.com/rust-lang/log",
"git_tag": "0.4.27"
},
{
"name": "memchr",
"git_repo": "https://github.com/BurntSushi/memchr",
"git_tag": "2.7.4"
},
{
"name": "once_cell",
"git_repo": "https://github.com/matklad/once_cell",
"git_tag": "v1.21.3"
},
{
"name": "rand",
"git_repo": "https://github.com/rust-random/rand",
"git_tag": "0.9.1"
},
{
"name": "smallvec",
"git_repo": "https://github.com/servo/rust-smallvec",
"git_tag": "v1.15.0"
},
{
"name": "serde",
"git_repo": "https://github.com/serde-rs/serde",
"git_tag": "v1.0.219"
},
{
"name": "tokio",
"git_repo": "https://github.com/tokio-rs/tokio",
"git_tag": "tokio-1.45.0"
},
{
"name": "reqwest",
"git_repo": "https://github.com/seanmonstar/reqwest",
"git_tag": "v0.12.15"
},
{
"name": "rocket",
"git_repo": "https://github.com/SergioBenitez/Rocket",
"git_tag": "v0.5.1"
},
{
"name": "actix-web",
"git_repo": "https://github.com/actix/actix-web",
"git_tag": "web-v4.11.0"
},
{
"name": "hyper",
"git_repo": "https://github.com/hyperium/hyper",
"git_tag": "v1.6.0"
},
{
"name": "clap",
"git_repo": "https://github.com/clap-rs/clap",
"git_tag": "v4.5.38"
}
],
"destination": "rust/ql/lib/ext/generated",
"extractor_options": [
"cargo_features='*'"
]
}

View File

@@ -1 +1,2 @@
target/
Cargo.lock

View File

@@ -10,7 +10,34 @@ def test_rust_project(codeql, rust, rust_project, check_source_archive, rust_che
codeql.database.create()
@pytest.mark.ql_test(None)
def test_do_not_print_env(codeql, rust, cargo, check_env_not_dumped, rust_check_diagnostics):
# parametrizing `rust_edition` allows us to skip the default parametrization over all editions
@pytest.mark.parametrize("rust_edition", [2024])
def test_do_not_print_env(codeql, rust, rust_edition, cargo, check_env_not_dumped, rust_check_diagnostics):
codeql.database.create(_env={
"CODEQL_EXTRACTOR_RUST_VERBOSE": "2",
})
@pytest.mark.ql_test("steps.ql", expected=".cargo.expected")
@pytest.mark.parametrize(("rust_edition", "compression", "suffix"), [
pytest.param(2024, "none", [], id="none"),
pytest.param(2024, "gzip", [".gz"], id="gzip"),
pytest.param(2024, "zstd", [".zst"], id="zstd"),
])
def test_compression(codeql, rust, rust_edition, compression, suffix, cargo, rust_check_diagnostics, cwd):
codeql.database.create(
_env={
"CODEQL_EXTRACTOR_RUST_OPTION_TRAP_COMPRESSION": compression,
}
)
trap_files = [*(cwd / "test-db" / "trap").rglob("*.trap*")]
assert trap_files, "No trap files found"
expected_suffixes = [".trap"] + suffix
def is_of_expected_format(file):
return file.name == "metadata.trap.gz" or \
file.suffixes[-len(expected_suffixes):] == expected_suffixes
files_with_wrong_format = [
f for f in trap_files if not is_of_expected_format(f)
]
assert not files_with_wrong_format, f"Found trap files with wrong format"

View File

@@ -1,53 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "macro_expansion"
version = "0.1.0"
dependencies = [
"macros",
]
[[package]]
name = "macros"
version = "0.1.0"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "proc-macro2"
version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
dependencies = [
"proc-macro2",
]
[[package]]
name = "syn"
version = "2.0.101"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"

View File

@@ -1,11 +1,3 @@
[workspace]
members = ["macros"]
members = [ "attributes", "calls", "proc_macros"]
resolver = "2"
[package]
name = "macro_expansion"
version = "0.1.0"
edition = "2024"
[dependencies]
macros = { path = "macros" }

View File

@@ -0,0 +1,7 @@
[package]
name = "attributes"
version = "0.1.0"
edition = "2024"
[dependencies]
proc_macros = { path = "../proc_macros" }

View File

@@ -1,8 +1,8 @@
use macros::repeat;
use proc_macros::repeat;
#[repeat(3)]
fn foo() {
println!("Hello, world!");
_ = concat!("Hello ", "world!");
#[repeat(2)]
fn inner() {}

View File

@@ -0,0 +1,6 @@
[package]
name = "calls"
version = "0.1.0"
edition = "2024"
[dependencies]

View File

@@ -0,0 +1,3 @@
fn included() {
_ = concat!("Hello", " ", "world!"); // this doesn't expand (in included.rs) since 0.0.274
}

View File

@@ -0,0 +1,30 @@
struct S;
macro_rules! def_x {
() => {
fn x() {}
};
}
impl S {
def_x!(); // this doesn't expand since 0.0.274
}
macro_rules! my_macro {
($head:expr, $($tail:tt)*) => { format!($head, $($tail)*) };
}
fn test() {
_ = concat!("x", "y");
_ = my_macro!(
concat!("<", "{}", ">"), // this doesn't expand since 0.0.274
"hi",
);
}
include!("included.rs");
#[doc = include_str!("some.txt")] // this doesn't expand since 0.0.274
fn documented() {}

View File

@@ -0,0 +1 @@
Hey!

View File

@@ -38,7 +38,7 @@
"pretty": "__REDACTED__"
}
},
"numberOfFiles": 2,
"numberOfFiles": 4,
"numberOfManifests": 1
},
"severity": "note",

View File

@@ -1,5 +1,5 @@
[package]
name = "macros"
name = "proc_macros"
version = "0.1.0"
edition = "2024"

View File

@@ -1,2 +1,4 @@
macros/src/lib.rs
src/lib.rs
attributes/src/lib.rs
calls/src/included.rs
calls/src/lib.rs
proc_macros/src/lib.rs

View File

@@ -1,16 +0,0 @@
| Extraction errors | 0 |
| Extraction warnings | 0 |
| Files extracted - total | 2 |
| Files extracted - with errors | 0 |
| Files extracted - without errors | 2 |
| Files extracted - without errors % | 100 |
| Inconsistencies - AST | 0 |
| Inconsistencies - CFG | 0 |
| Inconsistencies - Path resolution | 0 |
| Inconsistencies - SSA | 0 |
| Inconsistencies - data flow | 0 |
| Lines of code extracted | 29 |
| Lines of user code extracted | 29 |
| Macro calls - resolved | 52 |
| Macro calls - total | 53 |
| Macro calls - unresolved | 1 |

View File

@@ -1 +0,0 @@
queries/summary/SummaryStatsReduced.ql

View File

@@ -1,17 +1,34 @@
| src/lib.rs:3:1:9:1 | fn foo | 0 | src/lib.rs:4:1:8:16 | fn foo_0 |
| src/lib.rs:3:1:9:1 | fn foo | 1 | src/lib.rs:4:1:8:16 | fn foo_1 |
| src/lib.rs:3:1:9:1 | fn foo | 2 | src/lib.rs:4:1:8:16 | fn foo_2 |
| src/lib.rs:7:5:8:16 | fn inner | 0 | src/lib.rs:8:5:8:16 | fn inner_0 |
| src/lib.rs:7:5:8:16 | fn inner | 0 | src/lib.rs:8:5:8:16 | fn inner_0 |
| src/lib.rs:7:5:8:16 | fn inner | 0 | src/lib.rs:8:5:8:16 | fn inner_0 |
| src/lib.rs:7:5:8:16 | fn inner | 1 | src/lib.rs:8:5:8:16 | fn inner_1 |
| src/lib.rs:7:5:8:16 | fn inner | 1 | src/lib.rs:8:5:8:16 | fn inner_1 |
| src/lib.rs:7:5:8:16 | fn inner | 1 | src/lib.rs:8:5:8:16 | fn inner_1 |
| src/lib.rs:11:1:13:11 | fn bar | 0 | src/lib.rs:12:1:13:10 | fn bar_0 |
| src/lib.rs:11:1:13:11 | fn bar | 1 | src/lib.rs:12:1:13:10 | fn bar_1 |
| src/lib.rs:12:1:13:10 | fn bar_0 | 0 | src/lib.rs:13:1:13:10 | fn bar_0_0 |
| src/lib.rs:12:1:13:10 | fn bar_0 | 1 | src/lib.rs:13:1:13:10 | fn bar_0_1 |
| src/lib.rs:12:1:13:10 | fn bar_0 | 2 | src/lib.rs:13:1:13:10 | fn bar_0_2 |
| src/lib.rs:12:1:13:10 | fn bar_1 | 0 | src/lib.rs:13:1:13:10 | fn bar_1_0 |
| src/lib.rs:12:1:13:10 | fn bar_1 | 1 | src/lib.rs:13:1:13:10 | fn bar_1_1 |
| src/lib.rs:12:1:13:10 | fn bar_1 | 2 | src/lib.rs:13:1:13:10 | fn bar_1_2 |
attribute_macros
| attributes/src/lib.rs:3:1:9:1 | fn foo | 0 | attributes/src/lib.rs:4:1:8:16 | fn foo_0 |
| attributes/src/lib.rs:3:1:9:1 | fn foo | 1 | attributes/src/lib.rs:4:1:8:16 | fn foo_1 |
| attributes/src/lib.rs:3:1:9:1 | fn foo | 2 | attributes/src/lib.rs:4:1:8:16 | fn foo_2 |
| attributes/src/lib.rs:7:5:8:16 | fn inner | 0 | attributes/src/lib.rs:8:5:8:16 | fn inner_0 |
| attributes/src/lib.rs:7:5:8:16 | fn inner | 0 | attributes/src/lib.rs:8:5:8:16 | fn inner_0 |
| attributes/src/lib.rs:7:5:8:16 | fn inner | 0 | attributes/src/lib.rs:8:5:8:16 | fn inner_0 |
| attributes/src/lib.rs:7:5:8:16 | fn inner | 1 | attributes/src/lib.rs:8:5:8:16 | fn inner_1 |
| attributes/src/lib.rs:7:5:8:16 | fn inner | 1 | attributes/src/lib.rs:8:5:8:16 | fn inner_1 |
| attributes/src/lib.rs:7:5:8:16 | fn inner | 1 | attributes/src/lib.rs:8:5:8:16 | fn inner_1 |
| attributes/src/lib.rs:11:1:13:11 | fn bar | 0 | attributes/src/lib.rs:12:1:13:10 | fn bar_0 |
| attributes/src/lib.rs:11:1:13:11 | fn bar | 1 | attributes/src/lib.rs:12:1:13:10 | fn bar_1 |
| attributes/src/lib.rs:12:1:13:10 | fn bar_0 | 0 | attributes/src/lib.rs:13:1:13:10 | fn bar_0_0 |
| attributes/src/lib.rs:12:1:13:10 | fn bar_0 | 1 | attributes/src/lib.rs:13:1:13:10 | fn bar_0_1 |
| attributes/src/lib.rs:12:1:13:10 | fn bar_0 | 2 | attributes/src/lib.rs:13:1:13:10 | fn bar_0_2 |
| attributes/src/lib.rs:12:1:13:10 | fn bar_1 | 0 | attributes/src/lib.rs:13:1:13:10 | fn bar_1_0 |
| attributes/src/lib.rs:12:1:13:10 | fn bar_1 | 1 | attributes/src/lib.rs:13:1:13:10 | fn bar_1_1 |
| attributes/src/lib.rs:12:1:13:10 | fn bar_1 | 2 | attributes/src/lib.rs:13:1:13:10 | fn bar_1_2 |
macro_calls
| attributes/src/lib.rs:5:9:5:34 | concat!... | attributes/src/lib.rs:5:17:5:34 | "Hello world!" |
| attributes/src/lib.rs:5:9:5:34 | concat!... | attributes/src/lib.rs:5:17:5:34 | "Hello world!" |
| attributes/src/lib.rs:5:9:5:34 | concat!... | attributes/src/lib.rs:5:17:5:34 | "Hello world!" |
| calls/src/included.rs:2:9:2:39 | concat!... | calls/src/included.rs:2:17:2:38 | "Hello world!" |
| calls/src/lib.rs:10:5:10:13 | def_x!... | calls/src/lib.rs:10:5:10:13 | MacroItems |
| calls/src/lib.rs:19:9:19:25 | concat!... | calls/src/lib.rs:19:17:19:24 | "xy" |
| calls/src/lib.rs:21:9:24:5 | my_macro!... | calls/src/lib.rs:22:9:23:13 | MacroExpr |
| calls/src/lib.rs:22:9:22:31 | concat!... | calls/src/lib.rs:22:17:22:30 | "<{}>" |
| calls/src/lib.rs:22:9:23:13 | ...::format_args!... | calls/src/lib.rs:22:9:23:13 | FormatArgsExpr |
| calls/src/lib.rs:22:9:23:13 | format!... | calls/src/lib.rs:22:9:23:13 | ...::must_use(...) |
| calls/src/lib.rs:27:1:27:24 | concat!... | calls/src/lib.rs:27:1:27:24 | "Hello world!" |
| calls/src/lib.rs:27:1:27:24 | include!... | calls/src/lib.rs:27:1:27:24 | MacroItems |
| calls/src/lib.rs:29:9:29:32 | include_str!... | calls/src/lib.rs:29:22:29:31 | "" |
unexpanded_macro_calls
| attributes/src/lib.rs:5:9:5:35 | concat!... |

View File

@@ -1,5 +1,15 @@
import rust
from Item i, MacroItems items, int index, Item expanded
where i.fromSource() and i.getAttributeMacroExpansion() = items and items.getItem(index) = expanded
select i, index, expanded
query predicate attribute_macros(Item i, int index, Item expanded) {
i.fromSource() and expanded = i.getAttributeMacroExpansion().getItem(index)
}
query predicate macro_calls(MacroCall c, AstNode expansion) {
c.fromSource() and
not c.getLocation().getFile().getAbsolutePath().matches("%proc_macros%") and
expansion = c.getMacroCallExpansion()
}
query predicate unexpanded_macro_calls(MacroCall c) {
c.fromSource() and not c.hasMacroCallExpansion()
}

View File

@@ -1,5 +1,6 @@
import pytest
@pytest.mark.ql_test(expected=".all.expected")
def test_default(codeql, rust):
codeql.database.create()
@@ -8,10 +9,33 @@ def test_default(codeql, rust):
pytest.param(p,
marks=pytest.mark.ql_test(expected=f".{e}.expected"))
for p, e in (
("default", "none"),
("foo", "foo"),
("bar", "bar"),
("*", "all"),
("foo,bar", "all"))
("foo,bar", "all"),
("default,foo", "foo"),
("default,bar", "bar"),
)
])
def test_features(codeql, rust, features):
codeql.database.create(extractor_option=f"cargo_features={features}")
@pytest.mark.parametrize("features",
[
pytest.param(p,
marks=pytest.mark.ql_test(expected=f".{e}.expected"))
for p, e in (
("default", "foo"),
("foo", "foo"),
("bar", "bar"),
("*", "all"),
("foo,bar", "all"),
("default,foo", "foo"),
("default,bar", "all"),
)
])
def test_features_with_default(codeql, rust, features):
with open("Cargo.toml", "a") as f:
print('default = ["foo"]', file=f)
codeql.database.create(extractor_option=f"cargo_features={features}")

View File

@@ -0,0 +1,38 @@
/**
* Provides classes for arithmetic operations.
*/
private import codeql.rust.elements.BinaryExpr
private import codeql.rust.elements.PrefixExpr
private import codeql.rust.elements.Operation
private import codeql.rust.elements.AssignmentOperation
/**
* An arithmetic operation, such as `+`, `*=`, or `-`.
*/
abstract private class ArithmeticOperationImpl extends Operation { }
final class ArithmeticOperation = ArithmeticOperationImpl;
/**
* A binary arithmetic operation, such as `+` or `*`.
*/
final class BinaryArithmeticOperation extends BinaryExpr, ArithmeticOperationImpl {
BinaryArithmeticOperation() { this.getOperatorName() = ["+", "-", "*", "/", "%"] }
}
/**
* An arithmetic assignment operation, such as `+=` or `*=`.
*/
final class AssignArithmeticOperation extends BinaryExpr, ArithmeticOperationImpl,
AssignmentOperation
{
AssignArithmeticOperation() { this.getOperatorName() = ["+=", "-=", "*=", "/=", "%="] }
}
/**
* A prefix arithmetic operation, such as `-`.
*/
final class PrefixArithmeticOperation extends PrefixExpr, ArithmeticOperationImpl {
PrefixArithmeticOperation() { this.getOperatorName() = "-" }
}

View File

@@ -0,0 +1,28 @@
/**
* Provides classes for bitwise operations.
*/
private import codeql.rust.elements.BinaryExpr
private import codeql.rust.elements.Operation
private import codeql.rust.elements.AssignmentOperation
/**
* A bitwise operation, such as `&`, `<<`, or `|=`.
*/
abstract private class BitwiseOperationImpl extends Operation { }
final class BitwiseOperation = BitwiseOperationImpl;
/**
* A binary bitwise operation, such as `&` or `<<`.
*/
final class BinaryBitwiseOperation extends BinaryExpr, BitwiseOperationImpl {
BinaryBitwiseOperation() { this.getOperatorName() = ["&", "|", "^", "<<", ">>"] }
}
/**
* A bitwise assignment operation, such as `|=` or `<<=`.
*/
final class AssignBitwiseOperation extends BinaryExpr, BitwiseOperationImpl, AssignmentOperation {
AssignBitwiseOperation() { this.getOperatorName() = ["&=", "|=", "^=", "<<=", ">>="] }
}

View File

@@ -0,0 +1,13 @@
/**
* Provides classes for deref expressions (`*`).
*/
private import codeql.rust.elements.PrefixExpr
private import codeql.rust.elements.Operation
/**
* A dereference expression, the prefix operator `*`.
*/
final class DerefExpr extends PrefixExpr, Operation {
DerefExpr() { this.getOperatorName() = "*" }
}

View File

@@ -28,6 +28,10 @@ module Impl {
override string getOperatorName() { result = Generated::BinaryExpr.super.getOperatorName() }
override Expr getAnOperand() { result = [this.getLhs(), this.getRhs()] }
override Expr getOperand(int n) {
n = 0 and result = this.getLhs()
or
n = 1 and result = this.getRhs()
}
}
}

View File

@@ -14,6 +14,7 @@ private import codeql.rust.elements.PathExpr
module Impl {
private import rust
private import codeql.rust.internal.PathResolution as PathResolution
private import codeql.rust.internal.TypeInference as TypeInference
pragma[nomagic]
Path getFunctionPath(CallExpr ce) { result = ce.getFunction().(PathExpr).getPath() }
@@ -36,7 +37,14 @@ module Impl {
class CallExpr extends Generated::CallExpr {
override string toStringImpl() { result = this.getFunction().toAbbreviatedString() + "(...)" }
override Callable getStaticTarget() { result = getResolvedFunction(this) }
override Callable getStaticTarget() {
// If this call is to a trait method, e.g., `Trait::foo(bar)`, then check
// if type inference can resolve it to the correct trait implementation.
result = TypeInference::resolveMethodCallTarget(this)
or
not exists(TypeInference::resolveMethodCallTarget(this)) and
result = getResolvedFunction(this)
}
/** Gets the struct that this call resolves to, if any. */
Struct getStruct() { result = getResolvedFunction(this) }

View File

@@ -14,14 +14,6 @@ private import codeql.rust.internal.TypeInference
* be referenced directly.
*/
module Impl {
private predicate isInherentImplFunction(Function f) {
f = any(Impl impl | not impl.hasTrait()).(ImplItemNode).getAnAssocItem()
}
private predicate isTraitImplFunction(Function f) {
f = any(Impl impl | impl.hasTrait()).(ImplItemNode).getAnAssocItem()
}
// the following QLdoc is generated: if you need to edit it, do it in the schema file
/**
* A method call expression. For example:
@@ -31,38 +23,7 @@ module Impl {
* ```
*/
class MethodCallExpr extends Generated::MethodCallExpr {
private Function getStaticTargetFrom(boolean fromSource) {
result = resolveMethodCallExpr(this) and
(if result.fromSource() then fromSource = true else fromSource = false) and
(
// prioritize inherent implementation methods first
isInherentImplFunction(result)
or
not isInherentImplFunction(resolveMethodCallExpr(this)) and
(
// then trait implementation methods
isTraitImplFunction(result)
or
not isTraitImplFunction(resolveMethodCallExpr(this)) and
(
// then trait methods with default implementations
result.hasBody()
or
// and finally trait methods without default implementations
not resolveMethodCallExpr(this).hasBody()
)
)
)
}
override Function getStaticTarget() {
// Functions in source code also gets extracted as library code, due to
// this duplication we prioritize functions from source code.
result = this.getStaticTargetFrom(true)
or
not exists(this.getStaticTargetFrom(true)) and
result = this.getStaticTargetFrom(false)
}
override Function getStaticTarget() { result = resolveMethodCallTarget(this) }
private string toStringPart(int index) {
index = 0 and

View File

@@ -7,6 +7,78 @@
private import rust
private import codeql.rust.elements.internal.ExprImpl::Impl as ExprImpl
/**
* Holds if the operator `op` is overloaded to a trait with the canonical path
* `path` and the method name `method`.
*/
private predicate isOverloaded(string op, string path, string method) {
// Negation
op = "-" and path = "core::ops::arith::Neg" and method = "neg"
or
// Not
op = "!" and path = "core::ops::bit::Not" and method = "not"
or
// Dereference
op = "*" and path = "core::ops::Deref" and method = "deref"
or
// Comparison operators
op = "==" and path = "core::cmp::PartialEq" and method = "eq"
or
op = "!=" and path = "core::cmp::PartialEq" and method = "ne"
or
op = "<" and path = "core::cmp::PartialOrd" and method = "lt"
or
op = "<=" and path = "core::cmp::PartialOrd" and method = "le"
or
op = ">" and path = "core::cmp::PartialOrd" and method = "gt"
or
op = ">=" and path = "core::cmp::PartialOrd" and method = "ge"
or
// Arithmetic operators
op = "+" and path = "core::ops::arith::Add" and method = "add"
or
op = "-" and path = "core::ops::arith::Sub" and method = "sub"
or
op = "*" and path = "core::ops::arith::Mul" and method = "mul"
or
op = "/" and path = "core::ops::arith::Div" and method = "div"
or
op = "%" and path = "core::ops::arith::Rem" and method = "rem"
or
// Arithmetic assignment expressions
op = "+=" and path = "core::ops::arith::AddAssign" and method = "add_assign"
or
op = "-=" and path = "core::ops::arith::SubAssign" and method = "sub_assign"
or
op = "*=" and path = "core::ops::arith::MulAssign" and method = "mul_assign"
or
op = "/=" and path = "core::ops::arith::DivAssign" and method = "div_assign"
or
op = "%=" and path = "core::ops::arith::RemAssign" and method = "rem_assign"
or
// Bitwise operators
op = "&" and path = "core::ops::bit::BitAnd" and method = "bitand"
or
op = "|" and path = "core::ops::bit::BitOr" and method = "bitor"
or
op = "^" and path = "core::ops::bit::BitXor" and method = "bitxor"
or
op = "<<" and path = "core::ops::bit::Shl" and method = "shl"
or
op = ">>" and path = "core::ops::bit::Shr" and method = "shr"
or
// Bitwise assignment operators
op = "&=" and path = "core::ops::bit::BitAndAssign" and method = "bitand_assign"
or
op = "|=" and path = "core::ops::bit::BitOrAssign" and method = "bitor_assign"
or
op = "^=" and path = "core::ops::bit::BitXorAssign" and method = "bitxor_assign"
or
op = "<<=" and path = "core::ops::bit::ShlAssign" and method = "shl_assign"
or
op = ">>=" and path = "core::ops::bit::ShrAssign" and method = "shr_assign"
}
/**
* INTERNAL: This module contains the customizable definition of `Operation` and should not
* be referenced directly.
@@ -16,14 +88,28 @@ module Impl {
* An operation, for example `&&`, `+=`, `!` or `*`.
*/
abstract class Operation extends ExprImpl::Expr {
/**
* Gets the operator name of this operation, if it exists.
*/
/** Gets the operator name of this operation, if it exists. */
abstract string getOperatorName();
/** Gets the `n`th operand of this operation, if any. */
abstract Expr getOperand(int n);
/**
* Gets an operand of this operation.
* Gets the number of operands of this operation.
*
* This is either 1 for prefix operations, or 2 for binary operations.
*/
abstract Expr getAnOperand();
final int getNumberOfOperands() { result = strictcount(this.getAnOperand()) }
/** Gets an operand of this operation. */
Expr getAnOperand() { result = this.getOperand(_) }
/**
* Holds if this operation is overloaded to the method `methodName` of the
* trait `trait`.
*/
predicate isOverloaded(Trait trait, string methodName) {
isOverloaded(this.getOperatorName(), trait.getCanonicalPath(), methodName)
}
}
}

View File

@@ -26,6 +26,6 @@ module Impl {
override string getOperatorName() { result = Generated::PrefixExpr.super.getOperatorName() }
override Expr getAnOperand() { result = this.getExpr() }
override Expr getOperand(int n) { n = 0 and result = this.getExpr() }
}
}

View File

@@ -5,6 +5,7 @@
*/
private import codeql.rust.elements.internal.generated.RefExpr
private import codeql.rust.elements.internal.OperationImpl::Impl as OperationImpl
/**
* INTERNAL: This module contains the customizable definition of `RefExpr` and should not
@@ -21,11 +22,15 @@ module Impl {
* let raw_mut: &mut i32 = &raw mut foo;
* ```
*/
class RefExpr extends Generated::RefExpr {
class RefExpr extends Generated::RefExpr, OperationImpl::Operation {
override string toStringImpl() {
result = "&" + concat(int i | | this.getSpecPart(i), " " order by i)
}
override string getOperatorName() { result = "&" }
override Expr getOperand(int n) { n = 0 and result = this.getExpr() }
private string getSpecPart(int index) {
index = 0 and this.isRaw() and result = "raw"
or

View File

@@ -610,7 +610,7 @@ module Impl {
exists(Expr mid |
assignmentExprDescendant(mid) and
getImmediateParent(e) = mid and
not mid.(PrefixExpr).getOperatorName() = "*" and
not mid instanceof DerefExpr and
not mid instanceof FieldExpr and
not mid instanceof IndexExpr
)

View File

@@ -29,6 +29,10 @@ extensions:
pack: codeql/rust-all
extensible: summaryModel
data:
# Box
- ["lang:alloc", "<crate::boxed::Box>::pin", "Argument[0]", "ReturnValue.Reference", "value", "manual"]
- ["lang:alloc", "<crate::boxed::Box>::new", "Argument[0]", "ReturnValue.Reference", "value", "manual"]
- ["lang:alloc", "<crate::boxed::Box>::into_pin", "Argument[0]", "ReturnValue", "value", "manual"]
# Fmt
- ["lang:alloc", "crate::fmt::format", "Argument[0]", "ReturnValue", "taint", "manual"]
# String

View File

@@ -32,6 +32,14 @@ extensions:
- ["lang:core", "<crate::alloc::layout::Layout>::align_to", "Argument[self]", "ReturnValue.Field[crate::result::Result::Ok(0)]", "taint", "manual"]
- ["lang:core", "<crate::alloc::layout::Layout>::pad_to_align", "Argument[self]", "ReturnValue", "taint", "manual"]
- ["lang:core", "<crate::alloc::layout::Layout>::size", "Argument[self]", "ReturnValue", "taint", "manual"]
# Pin
- ["lang:core", "crate::pin::Pin", "Argument[0]", "ReturnValue", "value", "manual"]
- ["lang:core", "<crate::pin::Pin>::new", "Argument[0]", "ReturnValue", "value", "manual"]
- ["lang:core", "<crate::pin::Pin>::new_unchecked", "Argument[0].Reference", "ReturnValue", "value", "manual"]
- ["lang:core", "<crate::pin::Pin>::into_inner", "Argument[0]", "ReturnValue", "value", "manual"]
- ["lang:core", "<crate::pin::Pin>::into_inner_unchecked", "Argument[0]", "ReturnValue", "value", "manual"]
- ["lang:core", "<crate::pin::Pin>::set", "Argument[0]", "Argument[self]", "value", "manual"]
- ["lang:core", "<crate::pin::Pin>::into_inner", "Argument[0]", "ReturnValue", "value", "manual"]
# Ptr
- ["lang:core", "crate::ptr::read", "Argument[0].Reference", "ReturnValue", "value", "manual"]
- ["lang:core", "crate::ptr::read_unaligned", "Argument[0].Reference", "ReturnValue", "value", "manual"]

View File

@@ -1413,7 +1413,7 @@ private predicate useImportEdge(Use use, string name, ItemNode item) {
*/
private predicate preludeEdge(SourceFile f, string name, ItemNode i) {
exists(Crate core, ModuleLikeNode mod, ModuleItemNode prelude, ModuleItemNode rust |
f = any(Crate c0 | core = c0.getDependency(_)).getASourceFile() and
f = any(Crate c0 | core = c0.getDependency(_) or core = c0).getASourceFile() and
core.getName() = "core" and
mod = core.getSourceFile() and
prelude = mod.getASuccessorRec("prelude") and
@@ -1438,8 +1438,8 @@ private module Debug {
private Locatable getRelevantLocatable() {
exists(string filepath, int startline, int startcolumn, int endline, int endcolumn |
result.getLocation().hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn) and
filepath.matches("%/term.rs") and
startline = [71]
filepath.matches("%/test.rs") and
startline = 74
)
}

View File

@@ -259,8 +259,7 @@ private predicate typeEqualityLeft(AstNode n1, TypePath path1, AstNode n2, TypeP
typeEquality(n1, path1, n2, path2)
or
n2 =
any(PrefixExpr pe |
pe.getOperatorName() = "*" and
any(DerefExpr pe |
pe.getExpr() = n1 and
path1.isCons(TRefTypeParameter(), path2)
)
@@ -271,8 +270,7 @@ private predicate typeEqualityRight(AstNode n1, TypePath path1, AstNode n2, Type
typeEquality(n1, path1, n2, path2)
or
n2 =
any(PrefixExpr pe |
pe.getOperatorName() = "*" and
any(DerefExpr pe |
pe.getExpr() = n1 and
path1 = TypePath::cons(TRefTypeParameter(), path2)
)
@@ -314,7 +312,7 @@ private Type getRefAdjustImplicitSelfType(SelfParam self, TypePath suffix, Type
pragma[nomagic]
private Type resolveImplSelfType(Impl i, TypePath path) {
result = i.getSelfTy().(TypeReprMention).resolveTypeAt(path)
result = i.getSelfTy().(TypeMention).resolveTypeAt(path)
}
/** Gets the type at `path` of the implicitly typed `self` parameter. */
@@ -377,7 +375,7 @@ private module StructExprMatchingInput implements MatchingInputSig {
Type getDeclaredType(DeclarationPosition dpos, TypePath path) {
// type of a field
exists(TypeReprMention tp |
exists(TypeMention tp |
tp = this.getField(dpos.asFieldPos()).getTypeRepr() and
result = tp.resolveTypeAt(path)
)
@@ -537,7 +535,7 @@ private module CallExprBaseMatchingInput implements MatchingInputSig {
override Type getParameterType(DeclarationPosition dpos, TypePath path) {
exists(int pos |
result = this.getTupleField(pos).getTypeRepr().(TypeReprMention).resolveTypeAt(path) and
result = this.getTupleField(pos).getTypeRepr().(TypeMention).resolveTypeAt(path) and
dpos = TPositionalDeclarationPosition(pos, false)
)
}
@@ -560,7 +558,7 @@ private module CallExprBaseMatchingInput implements MatchingInputSig {
override Type getParameterType(DeclarationPosition dpos, TypePath path) {
exists(int p |
result = this.getTupleField(p).getTypeRepr().(TypeReprMention).resolveTypeAt(path) and
result = this.getTupleField(p).getTypeRepr().(TypeMention).resolveTypeAt(path) and
dpos = TPositionalDeclarationPosition(p, false)
)
}
@@ -608,7 +606,7 @@ private module CallExprBaseMatchingInput implements MatchingInputSig {
}
override Type getReturnType(TypePath path) {
result = this.getRetType().getTypeRepr().(TypeReprMention).resolveTypeAt(path)
result = this.getRetType().getTypeRepr().(TypeMention).resolveTypeAt(path)
}
}
@@ -645,12 +643,22 @@ private module CallExprBaseMatchingInput implements MatchingInputSig {
private import codeql.rust.elements.internal.CallExprImpl::Impl as CallExprImpl
class Access extends CallExprBase {
private TypeReprMention getMethodTypeArg(int i) {
abstract class Access extends Expr {
abstract Type getTypeArgument(TypeArgumentPosition apos, TypePath path);
abstract AstNode getNodeAt(AccessPosition apos);
abstract Type getInferredType(AccessPosition apos, TypePath path);
abstract Declaration getTarget();
}
private class CallExprBaseAccess extends Access instanceof CallExprBase {
private TypeMention getMethodTypeArg(int i) {
result = this.(MethodCallExpr).getGenericArgList().getTypeArg(i)
}
Type getTypeArgument(TypeArgumentPosition apos, TypePath path) {
override Type getTypeArgument(TypeArgumentPosition apos, TypePath path) {
exists(TypeMention arg | result = arg.resolveTypeAt(path) |
arg = getExplicitTypeArgMention(CallExprImpl::getFunctionPath(this), apos.asTypeParam())
or
@@ -658,7 +666,7 @@ private module CallExprBaseMatchingInput implements MatchingInputSig {
)
}
AstNode getNodeAt(AccessPosition apos) {
override AstNode getNodeAt(AccessPosition apos) {
exists(int p, boolean isMethodCall |
argPos(this, result, p, isMethodCall) and
apos = TPositionalAccessPosition(p, isMethodCall)
@@ -671,14 +679,39 @@ private module CallExprBaseMatchingInput implements MatchingInputSig {
apos = TReturnAccessPosition()
}
Type getInferredType(AccessPosition apos, TypePath path) {
override Type getInferredType(AccessPosition apos, TypePath path) {
result = inferType(this.getNodeAt(apos), path)
}
Declaration getTarget() {
override Declaration getTarget() {
result = CallExprImpl::getResolvedFunction(this)
or
result = resolveMethodCallExpr(this) // mutual recursion; resolving method calls requires resolving types and vice versa
result = inferMethodCallTarget(this) // mutual recursion; resolving method calls requires resolving types and vice versa
}
}
private class OperationAccess extends Access instanceof Operation {
OperationAccess() { super.isOverloaded(_, _) }
override Type getTypeArgument(TypeArgumentPosition apos, TypePath path) {
// The syntax for operators does not allow type arguments.
none()
}
override AstNode getNodeAt(AccessPosition apos) {
result = super.getOperand(0) and apos = TSelfAccessPosition()
or
result = super.getOperand(1) and apos = TPositionalAccessPosition(0, true)
or
result = this and apos = TReturnAccessPosition()
}
override Type getInferredType(AccessPosition apos, TypePath path) {
result = inferType(this.getNodeAt(apos), path)
}
override Declaration getTarget() {
result = inferMethodCallTarget(this) // mutual recursion; resolving method calls requires resolving types and vice versa
}
}
@@ -831,7 +864,7 @@ private module FieldExprMatchingInput implements MatchingInputSig {
)
or
dpos.isField() and
result = this.getTypeRepr().(TypeReprMention).resolveTypeAt(path)
result = this.getTypeRepr().(TypeMention).resolveTypeAt(path)
}
}
@@ -1000,6 +1033,170 @@ private StructType inferLiteralType(LiteralExpr le) {
)
}
private module MethodCall {
/** An expression that calls a method. */
abstract private class MethodCallImpl extends Expr {
/** Gets the name of the method targeted. */
abstract string getMethodName();
/** Gets the number of arguments _excluding_ the `self` argument. */
abstract int getArity();
/** Gets the trait targeted by this method call, if any. */
Trait getTrait() { none() }
/** Gets the type of the receiver of the method call at `path`. */
abstract Type getTypeAt(TypePath path);
}
final class MethodCall = MethodCallImpl;
private class MethodCallExprMethodCall extends MethodCallImpl instanceof MethodCallExpr {
override string getMethodName() { result = super.getIdentifier().getText() }
override int getArity() { result = super.getArgList().getNumberOfArgs() }
pragma[nomagic]
override Type getTypeAt(TypePath path) {
exists(TypePath path0 | result = inferType(super.getReceiver(), path0) |
path0.isCons(TRefTypeParameter(), path)
or
not path0.isCons(TRefTypeParameter(), _) and
not (path0.isEmpty() and result = TRefType()) and
path = path0
)
}
}
private class CallExprMethodCall extends MethodCallImpl instanceof CallExpr {
TraitItemNode trait;
string methodName;
Expr receiver;
CallExprMethodCall() {
receiver = this.getArgList().getArg(0) and
exists(Path path, Function f |
path = this.getFunction().(PathExpr).getPath() and
f = resolvePath(path) and
f.getParamList().hasSelfParam() and
trait = resolvePath(path.getQualifier()) and
trait.getAnAssocItem() = f and
path.getSegment().getIdentifier().getText() = methodName
)
}
override string getMethodName() { result = methodName }
override int getArity() { result = super.getArgList().getNumberOfArgs() - 1 }
override Trait getTrait() { result = trait }
pragma[nomagic]
override Type getTypeAt(TypePath path) { result = inferType(receiver, path) }
}
private class OperationMethodCall extends MethodCallImpl instanceof Operation {
TraitItemNode trait;
string methodName;
OperationMethodCall() { super.isOverloaded(trait, methodName) }
override string getMethodName() { result = methodName }
override int getArity() { result = this.(Operation).getNumberOfOperands() - 1 }
override Trait getTrait() { result = trait }
pragma[nomagic]
override Type getTypeAt(TypePath path) {
result = inferType(this.(BinaryExpr).getLhs(), path)
or
result = inferType(this.(PrefixExpr).getExpr(), path)
}
}
}
import MethodCall
/**
* Holds if a method for `type` with the name `name` and the arity `arity`
* exists in `impl`.
*/
private predicate methodCandidate(Type type, string name, int arity, Impl impl) {
type = impl.getSelfTy().(TypeMention).resolveType() and
exists(Function f |
f = impl.(ImplItemNode).getASuccessor(name) and
f.getParamList().hasSelfParam() and
arity = f.getParamList().getNumberOfParams()
)
}
/**
* Holds if a method for `type` for `trait` with the name `name` and the arity
* `arity` exists in `impl`.
*/
pragma[nomagic]
private predicate methodCandidateTrait(Type type, Trait trait, string name, int arity, Impl impl) {
trait = resolvePath(impl.(ImplItemNode).getTraitPath()) and
methodCandidate(type, name, arity, impl)
}
private module IsInstantiationOfInput implements IsInstantiationOfInputSig<MethodCall> {
pragma[nomagic]
predicate potentialInstantiationOf(MethodCall mc, TypeAbstraction impl, TypeMention constraint) {
exists(Type rootType, string name, int arity |
rootType = mc.getTypeAt(TypePath::nil()) and
name = mc.getMethodName() and
arity = mc.getArity() and
constraint = impl.(ImplTypeAbstraction).getSelfTy()
|
methodCandidateTrait(rootType, mc.getTrait(), name, arity, impl)
or
not exists(mc.getTrait()) and
methodCandidate(rootType, name, arity, impl)
)
}
predicate relevantTypeMention(TypeMention constraint) {
exists(Impl impl | methodCandidate(_, _, _, impl) and constraint = impl.getSelfTy())
}
}
bindingset[item, name]
pragma[inline_late]
private Function getMethodSuccessor(ItemNode item, string name) {
result = item.getASuccessor(name)
}
bindingset[tp, name]
pragma[inline_late]
private Function getTypeParameterMethod(TypeParameter tp, string name) {
result = getMethodSuccessor(tp.(TypeParamTypeParameter).getTypeParam(), name)
or
result = getMethodSuccessor(tp.(SelfTypeParameter).getTrait(), name)
}
/** Gets a method from an `impl` block that matches the method call `mc`. */
private Function getMethodFromImpl(MethodCall mc) {
exists(Impl impl |
IsInstantiationOf<MethodCall, IsInstantiationOfInput>::isInstantiationOf(mc, impl, _) and
result = getMethodSuccessor(impl, mc.getMethodName())
)
}
/**
* Gets a method that the method call `mc` resolves to based on type inference,
* if any.
*/
private Function inferMethodCallTarget(MethodCall mc) {
// The method comes from an `impl` block targeting the type of the receiver.
result = getMethodFromImpl(mc)
or
// The type of the receiver is a type parameter and the method comes from a
// trait bound on the type parameter.
result = getTypeParameterMethod(mc.getTypeAt(TypePath::nil()), mc.getMethodName())
}
cached
private module Cached {
private import codeql.rust.internal.CachedStages
@@ -1026,92 +1223,49 @@ private module Cached {
)
}
private class ReceiverExpr extends Expr {
MethodCallExpr mce;
ReceiverExpr() { mce.getReceiver() = this }
string getField() { result = mce.getIdentifier().getText() }
int getNumberOfArgs() { result = mce.getArgList().getNumberOfArgs() }
pragma[nomagic]
Type getTypeAt(TypePath path) {
exists(TypePath path0 | result = inferType(this, path0) |
path0.isCons(TRefTypeParameter(), path)
or
not path0.isCons(TRefTypeParameter(), _) and
not (path0.isEmpty() and result = TRefType()) and
path = path0
)
}
private predicate isInherentImplFunction(Function f) {
f = any(Impl impl | not impl.hasTrait()).(ImplItemNode).getAnAssocItem()
}
/** Holds if a method for `type` with the name `name` and the arity `arity` exists in `impl`. */
pragma[nomagic]
private predicate methodCandidate(Type type, string name, int arity, Impl impl) {
type = impl.getSelfTy().(TypeReprMention).resolveType() and
exists(Function f |
f = impl.(ImplItemNode).getASuccessor(name) and
f.getParamList().hasSelfParam() and
arity = f.getParamList().getNumberOfParams()
)
private predicate isTraitImplFunction(Function f) {
f = any(Impl impl | impl.hasTrait()).(ImplItemNode).getAnAssocItem()
}
private module IsInstantiationOfInput implements IsInstantiationOfInputSig<ReceiverExpr> {
pragma[nomagic]
predicate potentialInstantiationOf(
ReceiverExpr receiver, TypeAbstraction impl, TypeMention constraint
) {
methodCandidate(receiver.getTypeAt(TypePath::nil()), receiver.getField(),
receiver.getNumberOfArgs(), impl) and
constraint = impl.(ImplTypeAbstraction).getSelfTy()
}
predicate relevantTypeMention(TypeMention constraint) {
exists(Impl impl | methodCandidate(_, _, _, impl) and constraint = impl.getSelfTy())
}
}
bindingset[item, name]
pragma[inline_late]
private Function getMethodSuccessor(ItemNode item, string name) {
result = item.getASuccessor(name)
}
bindingset[tp, name]
pragma[inline_late]
private Function getTypeParameterMethod(TypeParameter tp, string name) {
result = getMethodSuccessor(tp.(TypeParamTypeParameter).getTypeParam(), name)
or
result = getMethodSuccessor(tp.(SelfTypeParameter).getTrait(), name)
}
/**
* Gets the method from an `impl` block with an implementing type that matches
* the type of `receiver` and with a name of the method call in which
* `receiver` occurs, if any.
*/
private Function getMethodFromImpl(ReceiverExpr receiver) {
exists(Impl impl |
IsInstantiationOf<ReceiverExpr, IsInstantiationOfInput>::isInstantiationOf(receiver, impl, _) and
result = getMethodSuccessor(impl, receiver.getField())
)
}
/** Gets a method that the method call `mce` resolves to, if any. */
cached
Function resolveMethodCallExpr(MethodCallExpr mce) {
exists(ReceiverExpr receiver | mce.getReceiver() = receiver |
// The method comes from an `impl` block targeting the type of `receiver`.
result = getMethodFromImpl(receiver)
private Function resolveMethodCallTargetFrom(MethodCall mc, boolean fromSource) {
result = inferMethodCallTarget(mc) and
(if result.fromSource() then fromSource = true else fromSource = false) and
(
// prioritize inherent implementation methods first
isInherentImplFunction(result)
or
// The type of `receiver` is a type parameter and the method comes from a
// trait bound on the type parameter.
result = getTypeParameterMethod(receiver.getTypeAt(TypePath::nil()), receiver.getField())
not isInherentImplFunction(inferMethodCallTarget(mc)) and
(
// then trait implementation methods
isTraitImplFunction(result)
or
not isTraitImplFunction(inferMethodCallTarget(mc)) and
(
// then trait methods with default implementations
result.hasBody()
or
// and finally trait methods without default implementations
not inferMethodCallTarget(mc).hasBody()
)
)
)
}
/** Gets a method that the method call `mc` resolves to, if any. */
cached
Function resolveMethodCallTarget(MethodCall mc) {
// Functions in source code also gets extracted as library code, due to
// this duplication we prioritize functions from source code.
result = resolveMethodCallTargetFrom(mc, true)
or
not exists(resolveMethodCallTargetFrom(mc, true)) and
result = resolveMethodCallTargetFrom(mc, false)
}
pragma[inline]
private Type inferRootTypeDeref(AstNode n) {
result = inferType(n) and
@@ -1243,6 +1397,6 @@ private module Debug {
Function debugResolveMethodCallExpr(MethodCallExpr mce) {
mce = getRelevantLocatable() and
result = resolveMethodCallExpr(mce)
result = resolveMethodCallTarget(mce)
}
}

View File

@@ -2,4 +2,28 @@
* Provides classes for recognizing type inference inconsistencies.
*/
private import Type
private import TypeMention
private import TypeInference::Consistency as Consistency
import TypeInference::Consistency
query predicate illFormedTypeMention(TypeMention tm) {
Consistency::illFormedTypeMention(tm) and
// Only include inconsistencies in the source, as we otherwise get
// inconsistencies from library code in every project.
tm.fromSource()
}
int getTypeInferenceInconsistencyCounts(string type) {
type = "Missing type parameter ID" and
result = count(TypeParameter tp | missingTypeParameterId(tp) | tp)
or
type = "Non-functional type parameter ID" and
result = count(TypeParameter tp | nonFunctionalTypeParameterId(tp) | tp)
or
type = "Non-injective type parameter ID" and
result = count(TypeParameter tp | nonInjectiveTypeParameterId(tp, _) | tp)
or
type = "Ill-formed type mention" and
result = count(TypeMention tm | illFormedTypeMention(tm) | tm)
}

View File

@@ -31,53 +31,33 @@ abstract class TypeMention extends AstNode {
Type resolveTypeAt(TypePath path) { result = this.getMentionAt(path).resolveType() }
}
class TypeReprMention extends TypeMention, TypeRepr {
TypeReprMention() { not this instanceof InferTypeRepr }
class ArrayTypeReprMention extends TypeMention instanceof ArrayTypeRepr {
override TypeMention getTypeArgument(int i) { result = super.getElementTypeRepr() and i = 0 }
override TypeReprMention getTypeArgument(int i) {
result = this.(ArrayTypeRepr).getElementTypeRepr() and
i = 0
or
result = this.(RefTypeRepr).getTypeRepr() and
i = 0
or
result = this.(PathTypeRepr).getPath().(PathMention).getTypeArgument(i)
}
override Type resolveType() {
this instanceof ArrayTypeRepr and
result = TArrayType()
or
this instanceof RefTypeRepr and
result = TRefType()
or
result = this.(PathTypeRepr).getPath().(PathMention).resolveType()
}
override Type resolveTypeAt(TypePath path) {
result = this.(PathTypeRepr).getPath().(PathMention).resolveTypeAt(path)
or
not exists(this.(PathTypeRepr).getPath()) and
result = super.resolveTypeAt(path)
}
override Type resolveType() { result = TArrayType() }
}
/** Holds if `path` resolves to the type alias `alias` with the definition `rhs`. */
private predicate resolvePathAlias(Path path, TypeAlias alias, TypeReprMention rhs) {
alias = resolvePath(path) and rhs = alias.getTypeRepr()
class RefTypeReprMention extends TypeMention instanceof RefTypeRepr {
override TypeMention getTypeArgument(int i) { result = super.getTypeRepr() and i = 0 }
override Type resolveType() { result = TRefType() }
}
abstract class PathMention extends TypeMention, Path {
override TypeMention getTypeArgument(int i) {
result = this.getSegment().getGenericArgList().getTypeArg(i)
class PathTypeReprMention extends TypeMention instanceof PathTypeRepr {
Path path;
ItemNode resolved;
PathTypeReprMention() {
path = super.getPath() and
// NOTE: This excludes unresolvable paths which is intentional as these
// don't add value to the type inference anyway.
resolved = resolvePath(path)
}
}
class NonAliasPathMention extends PathMention {
NonAliasPathMention() { not resolvePathAlias(this, _, _) }
ItemNode getResolved() { result = resolved }
override TypeMention getTypeArgument(int i) {
result = super.getTypeArgument(i)
result = path.getSegment().getGenericArgList().getTypeArg(i)
or
// `Self` paths inside `impl` blocks have implicit type arguments that are
// the type parameters of the `impl` block. For example, in
@@ -92,17 +72,17 @@ class NonAliasPathMention extends PathMention {
//
// the `Self` return type is shorthand for `Foo<T>`.
exists(ImplItemNode node |
this = node.getASelfPath() and
path = node.getASelfPath() and
result = node.(ImplItemNode).getSelfPath().getSegment().getGenericArgList().getTypeArg(i)
)
or
// If `this` is the trait of an `impl` block then any associated types
// If `path` is the trait of an `impl` block then any associated types
// defined in the `impl` block are type arguments to the trait.
//
// For instance, for a trait implementation like this
// ```rust
// impl MyTrait for MyType {
// ^^^^^^^ this
// ^^^^^^^ path
// type AssociatedType = i64
// ^^^ result
// // ...
@@ -110,88 +90,94 @@ class NonAliasPathMention extends PathMention {
// ```
// the rhs. of the type alias is a type argument to the trait.
exists(ImplItemNode impl, AssociatedTypeTypeParameter param, TypeAlias alias |
this = impl.getTraitPath() and
param.getTrait() = resolvePath(this) and
path = impl.getTraitPath() and
param.getTrait() = resolved and
alias = impl.getASuccessor(param.getTypeAlias().getName().getText()) and
result = alias.getTypeRepr() and
param.getIndex() = i
)
}
/**
* Holds if this path resolved to a type alias with a rhs. that has the
* resulting type at `typePath`.
*/
Type aliasResolveTypeAt(TypePath typePath) {
exists(TypeAlias alias, TypeMention rhs | alias = resolved and rhs = alias.getTypeRepr() |
result = rhs.resolveTypeAt(typePath) and
not result = pathGetTypeParameter(alias, _)
or
exists(TypeParameter tp, TypeMention arg, TypePath prefix, TypePath suffix, int i |
tp = rhs.resolveTypeAt(prefix) and
tp = pathGetTypeParameter(alias, i) and
arg = path.getSegment().getGenericArgList().getTypeArg(i) and
result = arg.resolveTypeAt(suffix) and
typePath = prefix.append(suffix)
)
)
}
override Type resolveType() {
exists(ItemNode i | i = resolvePath(this) |
result = TStruct(i)
result = this.aliasResolveTypeAt(TypePath::nil())
or
not exists(resolved.(TypeAlias).getTypeRepr()) and
(
result = TStruct(resolved)
or
result = TEnum(i)
result = TEnum(resolved)
or
exists(TraitItemNode trait | trait = i |
exists(TraitItemNode trait | trait = resolved |
// If this is a `Self` path, then it resolves to the implicit `Self`
// type parameter, otherwise it is a trait bound.
if this = trait.getASelfPath()
if super.getPath() = trait.getASelfPath()
then result = TSelfTypeParameter(trait)
else result = TTrait(trait)
)
or
result = TTypeParamTypeParameter(i)
result = TTypeParamTypeParameter(resolved)
or
exists(TypeAlias alias | alias = i |
exists(TypeAlias alias | alias = resolved |
result.(AssociatedTypeTypeParameter).getTypeAlias() = alias
or
result = alias.getTypeRepr().(TypeReprMention).resolveType()
result = alias.getTypeRepr().(TypeMention).resolveType()
)
)
}
override Type resolveTypeAt(TypePath typePath) {
result = this.aliasResolveTypeAt(typePath)
or
not exists(resolved.(TypeAlias).getTypeRepr()) and
result = super.resolveTypeAt(typePath)
}
}
class AliasPathMention extends PathMention {
TypeAlias alias;
TypeReprMention rhs;
AliasPathMention() { resolvePathAlias(this, alias, rhs) }
/** Get the `i`th type parameter of the alias itself. */
private TypeParameter getTypeParameter(int i) {
result = TTypeParamTypeParameter(alias.getGenericParamList().getTypeParam(i))
}
override Type resolveType() { result = rhs.resolveType() }
override Type resolveTypeAt(TypePath path) {
result = rhs.resolveTypeAt(path) and
not result = this.getTypeParameter(_)
or
exists(TypeParameter tp, TypeMention arg, TypePath prefix, TypePath suffix, int i |
tp = rhs.resolveTypeAt(prefix) and
tp = this.getTypeParameter(i) and
arg = this.getTypeArgument(i) and
result = arg.resolveTypeAt(suffix) and
path = prefix.append(suffix)
)
}
private TypeParameter pathGetTypeParameter(TypeAlias alias, int i) {
result = TTypeParamTypeParameter(alias.getGenericParamList().getTypeParam(i))
}
// Used to represent implicit `Self` type arguments in traits and `impl` blocks,
// see `PathMention` for details.
class TypeParamMention extends TypeMention, TypeParam {
override TypeReprMention getTypeArgument(int i) { none() }
class TypeParamMention extends TypeMention instanceof TypeParam {
override TypeMention getTypeArgument(int i) { none() }
override Type resolveType() { result = TTypeParamTypeParameter(this) }
}
// Used to represent implicit type arguments for associated types in traits.
class TypeAliasMention extends TypeMention, TypeAlias {
class TypeAliasMention extends TypeMention instanceof TypeAlias {
private Type t;
TypeAliasMention() { t = TAssociatedTypeTypeParameter(this) }
override TypeReprMention getTypeArgument(int i) { none() }
override TypeMention getTypeArgument(int i) { none() }
override Type resolveType() { result = t }
}
class TraitMention extends TypeMention, TraitItemNode {
class TraitMention extends TypeMention instanceof TraitItemNode {
override TypeMention getTypeArgument(int i) {
result = this.getTypeParam(i)
result = super.getTypeParam(i)
or
traitAliasIndex(this, i, result)
}
@@ -203,7 +189,7 @@ class TraitMention extends TypeMention, TraitItemNode {
// appears in the AST, we (somewhat arbitrarily) choose the name of a trait as a
// type mention. This works because there is a one-to-one correspondence between
// a trait and its name.
class SelfTypeParameterMention extends TypeMention, Name {
class SelfTypeParameterMention extends TypeMention instanceof Name {
Trait trait;
SelfTypeParameterMention() { trait.getName() = this }
@@ -212,5 +198,5 @@ class SelfTypeParameterMention extends TypeMention, Name {
override Type resolveType() { result = TSelfTypeParameter(trait) }
override TypeReprMention getTypeArgument(int i) { none() }
override TypeMention getTypeArgument(int i) { none() }
}

View File

@@ -50,9 +50,7 @@ module AccessInvalidPointer {
* A pointer access using the unary `*` operator.
*/
private class DereferenceSink extends Sink {
DereferenceSink() {
exists(PrefixExpr p | p.getOperatorName() = "*" and p.getExpr() = this.asExpr().getExpr())
}
DereferenceSink() { any(DerefExpr p).getExpr() = this.asExpr().getExpr() }
}
/**

View File

@@ -22,64 +22,56 @@ abstract class SensitiveData extends DataFlow::Node {
}
/**
* A function that might produce sensitive data.
*/
private class SensitiveDataFunction extends Function {
SensitiveDataClassification classification;
SensitiveDataFunction() {
HeuristicNames::nameIndicatesSensitiveData(this.getName().getText(), classification)
}
SensitiveDataClassification getClassification() { result = classification }
}
/**
* A function call data flow node that might produce sensitive data.
* A function call or enum variant data flow node that might produce sensitive data.
*/
private class SensitiveDataCall extends SensitiveData {
SensitiveDataClassification classification;
SensitiveDataCall() {
classification =
this.asExpr()
.getAstNode()
.(CallExprBase)
.getStaticTarget()
.(SensitiveDataFunction)
.getClassification()
exists(CallExprBase call, string name |
call = this.asExpr().getExpr() and
name =
[
call.getStaticTarget().(Function).getName().getText(),
call.(CallExpr).getVariant().getName().getText(),
] and
HeuristicNames::nameIndicatesSensitiveData(name, classification)
)
}
override SensitiveDataClassification getClassification() { result = classification }
}
/**
* A variable that might contain sensitive data.
*/
private class SensitiveDataVariable extends Variable {
SensitiveDataClassification classification;
SensitiveDataVariable() {
HeuristicNames::nameIndicatesSensitiveData(this.getText(), classification)
}
SensitiveDataClassification getClassification() { result = classification }
}
/**
* A variable access data flow node that might produce sensitive data.
* A variable access data flow node that might be sensitive data.
*/
private class SensitiveVariableAccess extends SensitiveData {
SensitiveDataClassification classification;
SensitiveVariableAccess() {
classification =
this.asExpr()
.getAstNode()
HeuristicNames::nameIndicatesSensitiveData(this.asExpr()
.getExpr()
.(VariableAccess)
.getVariable()
.(SensitiveDataVariable)
.getClassification()
.(Variable)
.getText(), classification)
}
override SensitiveDataClassification getClassification() { result = classification }
}
private Expr fieldExprParentField(FieldExpr fe) { result = fe.getParentNode() }
/**
* A field access data flow node that might be sensitive data.
*/
private class SensitiveFieldAccess extends SensitiveData {
SensitiveDataClassification classification;
SensitiveFieldAccess() {
exists(FieldExpr fe | fieldExprParentField*(fe) = this.asExpr().getExpr() |
HeuristicNames::nameIndicatesSensitiveData(fe.getIdentifier().getText(), classification)
)
}
override SensitiveDataClassification getClassification() { result = classification }

View File

@@ -4,8 +4,11 @@ import codeql.rust.elements
import codeql.Locations
import codeql.files.FileSystem
import codeql.rust.elements.Operation
import codeql.rust.elements.ArithmeticOperation
import codeql.rust.elements.AssignmentOperation
import codeql.rust.elements.BitwiseOperation
import codeql.rust.elements.ComparisonOperation
import codeql.rust.elements.DerefExpr
import codeql.rust.elements.LiteralExprExt
import codeql.rust.elements.LogicalOperation
import codeql.rust.elements.AsyncBlockExpr

Some files were not shown because too many files have changed in this diff Show More