mirror of
https://github.com/github/codeql.git
synced 2025-12-22 19:56:32 +01:00
Merge branch 'main' into python/support-grouped-exceptions
This commit is contained in:
@@ -1,24 +0,0 @@
|
||||
---
|
||||
name: LGTM.com - false positive
|
||||
about: Tell us about an alert that shouldn't be reported
|
||||
title: LGTM.com - false positive
|
||||
labels: false-positive
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Description of the false positive**
|
||||
|
||||
<!-- Please explain briefly why you think it shouldn't be included. -->
|
||||
|
||||
**URL to the alert on the project page on LGTM.com**
|
||||
|
||||
<!--
|
||||
1. Open the project on LGTM.com.
|
||||
For example, https://lgtm.com/projects/g/pallets/click/.
|
||||
2. Switch to the `Alerts` tab. For example, https://lgtm.com/projects/g/pallets/click/alerts/.
|
||||
3. Scroll to the alert that you would like to report.
|
||||
4. Click on the right most icon `View this alert within the complete file`.
|
||||
5. A new browser tab opens. Copy and paste the page URL here.
|
||||
For example, https://lgtm.com/projects/g/pallets/click/snapshot/719fb7d8322b0767cdd1e5903ba3eb3233ba8dd5/files/click/_winconsole.py#xa08d213ab3289f87:1.
|
||||
-->
|
||||
2
.github/ISSUE_TEMPLATE/ql---general.md
vendored
2
.github/ISSUE_TEMPLATE/ql---general.md
vendored
@@ -10,5 +10,5 @@ assignees: ''
|
||||
**Description of the issue**
|
||||
|
||||
<!-- Please explain briefly what is the problem.
|
||||
If it is about an LGTM project, please include its URL.-->
|
||||
If it is about a GitHub project, please include its URL. -->
|
||||
|
||||
|
||||
36
.github/ISSUE_TEMPLATE/ql--false-positive.md
vendored
Normal file
36
.github/ISSUE_TEMPLATE/ql--false-positive.md
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
---
|
||||
name: CodeQL false positive
|
||||
about: Report CodeQL alerts that you think should not have been detected (not applicable, not exploitable, etc.)
|
||||
title: False positive
|
||||
labels: false-positive
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Description of the false positive**
|
||||
|
||||
<!-- Please explain briefly why you think it shouldn't be included. -->
|
||||
|
||||
**Code samples or links to source code**
|
||||
|
||||
<!--
|
||||
For open source code: file links with line numbers on GitHub, for example:
|
||||
https://github.com/github/codeql/blob/dc440aaee6695deb0d9676b87e06ea984e1b4ae5/javascript/ql/test/query-tests/Security/CWE-078/CommandInjection/exec-sh2.js#L10
|
||||
|
||||
For closed source code: (redacted) code samples that illustrate the problem, for example:
|
||||
|
||||
```
|
||||
function execSh(command, options) {
|
||||
return cp.spawn(getShell(), ["-c", command], options) // <- command line injection
|
||||
};
|
||||
```
|
||||
-->
|
||||
|
||||
**URL to the alert on GitHub code scanning (optional)**
|
||||
|
||||
<!--
|
||||
1. Open the project on GitHub.com.
|
||||
2. Switch to the `Security` tab.
|
||||
3. Browse to the alert that you would like to report.
|
||||
4. Copy and paste the page URL here.
|
||||
-->
|
||||
55
.github/actions/cache-query-compilation/action.yml
vendored
Normal file
55
.github/actions/cache-query-compilation/action.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
name: Cache query compilation
|
||||
description: Caches CodeQL compilation caches - should be run both on PRs and pushes to main.
|
||||
|
||||
inputs:
|
||||
key:
|
||||
description: 'The cache key to use - should be unique to the workflow'
|
||||
required: true
|
||||
|
||||
outputs:
|
||||
cache-dir:
|
||||
description: "The directory where the cache was stored"
|
||||
value: ${{ steps.fill-compilation-dir.outputs.compdir }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
# calculate the merge-base with main, in a way that works both on PRs and pushes to main.
|
||||
- name: Calculate merge-base
|
||||
shell: bash
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
env:
|
||||
BASE_BRANCH: ${{ github.base_ref }}
|
||||
run: |
|
||||
MERGE_BASE=$(git cat-file commit $GITHUB_SHA | grep '^parent ' | head -1 | cut -f 2 -d " ")
|
||||
echo "merge_base=$MERGE_BASE" >> $GITHUB_ENV
|
||||
- name: Restore read-only cache (PR)
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
uses: erik-krogh/actions-cache@a88d0603fe5fb5606db9f002dfcadeb32b5f84c6
|
||||
with:
|
||||
path: '**/.cache'
|
||||
read-only: true
|
||||
key: codeql-compile-${{ inputs.key }}-pr-${{ github.sha }}
|
||||
restore-keys: |
|
||||
codeql-compile-${{ inputs.key }}-${{ github.base_ref }}-${{ env.merge_base }}
|
||||
codeql-compile-${{ inputs.key }}-${{ github.base_ref }}-
|
||||
codeql-compile-${{ inputs.key }}-main-
|
||||
- name: Fill cache (push)
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: erik-krogh/actions-cache@a88d0603fe5fb5606db9f002dfcadeb32b5f84c6
|
||||
with:
|
||||
path: '**/.cache'
|
||||
key: codeql-compile-${{ inputs.key }}-${{ github.ref_name }}-${{ github.sha }} # just fill on main
|
||||
restore-keys: | # restore the latest cache if the exact cache is unavailable, to speed up compilation.
|
||||
codeql-compile-${{ inputs.key }}-${{ github.ref_name }}-
|
||||
codeql-compile-${{ inputs.key }}-main-
|
||||
- name: Fill compilation cache directory
|
||||
id: fill-compilation-dir
|
||||
shell: bash
|
||||
run: |
|
||||
# Move all the existing cache into another folder, so we only preserve the cache for the current queries.
|
||||
node $GITHUB_WORKSPACE/.github/actions/cache-query-compilation/move-caches.js ${COMBINED_CACHE_DIR}
|
||||
|
||||
echo "compdir=${COMBINED_CACHE_DIR}" >> $GITHUB_OUTPUT
|
||||
env:
|
||||
COMBINED_CACHE_DIR: ${{ runner.temp }}/compilation-dir
|
||||
75
.github/actions/cache-query-compilation/move-caches.js
vendored
Normal file
75
.github/actions/cache-query-compilation/move-caches.js
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
// # Move all the existing cache into another folder, so we only preserve the cache for the current queries.
|
||||
// mkdir -p ${COMBINED_CACHE_DIR}
|
||||
// rm -f **/.cache/{lock,size} # -f to avoid errors if the cache is empty.
|
||||
// # copy the contents of the .cache folders into the combined cache folder.
|
||||
// cp -r **/.cache/* ${COMBINED_CACHE_DIR}/ || : # ignore missing files
|
||||
// # clean up the .cache folders
|
||||
// rm -rf **/.cache/*
|
||||
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
// the first argv is the cache folder to create.
|
||||
const COMBINED_CACHE_DIR = process.argv[2];
|
||||
|
||||
function* walkCaches(dir) {
|
||||
const files = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const file of files) {
|
||||
if (file.isDirectory()) {
|
||||
const filePath = path.join(dir, file.name);
|
||||
yield* walkCaches(filePath);
|
||||
if (file.name === ".cache") {
|
||||
yield filePath;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function copyDir(src, dest) {
|
||||
for await (const file of await fs.promises.readdir(src, { withFileTypes: true })) {
|
||||
const srcPath = path.join(src, file.name);
|
||||
const destPath = path.join(dest, file.name);
|
||||
if (file.isDirectory()) {
|
||||
if (!fs.existsSync(destPath)) {
|
||||
fs.mkdirSync(destPath);
|
||||
}
|
||||
await copyDir(srcPath, destPath);
|
||||
} else {
|
||||
await fs.promises.copyFile(srcPath, destPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const cacheDirs = [...walkCaches(".")];
|
||||
|
||||
for (const dir of cacheDirs) {
|
||||
console.log(`Found .cache dir at ${dir}`);
|
||||
}
|
||||
|
||||
// mkdir -p ${COMBINED_CACHE_DIR}
|
||||
fs.mkdirSync(COMBINED_CACHE_DIR, { recursive: true });
|
||||
|
||||
// rm -f **/.cache/{lock,size} # -f to avoid errors if the cache is empty.
|
||||
await Promise.all(
|
||||
cacheDirs.map((cacheDir) =>
|
||||
(async function () {
|
||||
await fs.promises.rm(path.join(cacheDir, "lock"), { force: true });
|
||||
await fs.promises.rm(path.join(cacheDir, "size"), { force: true });
|
||||
})()
|
||||
)
|
||||
);
|
||||
|
||||
// # copy the contents of the .cache folders into the combined cache folder.
|
||||
// cp -r **/.cache/* ${COMBINED_CACHE_DIR}/ || : # ignore missing files
|
||||
await Promise.all(
|
||||
cacheDirs.map((cacheDir) => copyDir(cacheDir, COMBINED_CACHE_DIR))
|
||||
);
|
||||
|
||||
// # clean up the .cache folders
|
||||
// rm -rf **/.cache/*
|
||||
await Promise.all(
|
||||
cacheDirs.map((cacheDir) => fs.promises.rm(cacheDir, { recursive: true }))
|
||||
);
|
||||
}
|
||||
main();
|
||||
26
.github/actions/find-latest-bundle/action.yml
vendored
Normal file
26
.github/actions/find-latest-bundle/action.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: Find Latest CodeQL Bundle
|
||||
description: Finds the URL of the latest released version of the CodeQL bundle.
|
||||
outputs:
|
||||
url:
|
||||
description: The download URL of the latest CodeQL bundle release
|
||||
value: ${{ steps.find-latest.outputs.url }}
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Find Latest Release
|
||||
id: find-latest
|
||||
shell: pwsh
|
||||
run: |
|
||||
$Latest = gh release list --repo github/codeql-action --exclude-drafts --limit 1000 |
|
||||
ForEach-Object { $C = $_ -split "`t"; return @{ type = $C[1]; tag = $C[2]; } } |
|
||||
Where-Object { $_.type -eq 'Latest' }
|
||||
|
||||
$Tag = $Latest.tag
|
||||
if ($Tag -eq '') {
|
||||
throw 'Failed to find latest bundle release.'
|
||||
}
|
||||
|
||||
Write-Output "Latest bundle tag is '${Tag}'."
|
||||
"url=https://github.com/github/codeql-action/releases/download/${Tag}/codeql-bundle-linux64.tar.gz" >> $env:GITHUB_OUTPUT
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
11
.github/workflows/atm-check-query-suite.yml
vendored
11
.github/workflows/atm-check-query-suite.yml
vendored
@@ -13,7 +13,7 @@ on:
|
||||
|
||||
jobs:
|
||||
atm-check-query-suite:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-latest-xl
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -23,6 +23,12 @@ jobs:
|
||||
with:
|
||||
channel: release
|
||||
|
||||
- name: Cache compilation cache
|
||||
id: query-cache
|
||||
uses: ./.github/actions/cache-query-compilation
|
||||
with:
|
||||
key: atm-suite
|
||||
|
||||
- name: Install ATM model
|
||||
run: |
|
||||
set -exu
|
||||
@@ -50,10 +56,13 @@ jobs:
|
||||
echo "SARIF_PATH=${SARIF_PATH}" >> "${GITHUB_ENV}"
|
||||
|
||||
codeql database analyze \
|
||||
--threads=0 \
|
||||
--ram 50000 \
|
||||
--format sarif-latest \
|
||||
--output "${SARIF_PATH}" \
|
||||
--sarif-group-rules-by-pack \
|
||||
-vv \
|
||||
--compilation-cache "${{ steps.query-cache.outputs.cache-dir }}" \
|
||||
-- \
|
||||
"${DB_PATH}" \
|
||||
"${QUERY_PACK}/${QUERY_SUITE}"
|
||||
|
||||
21
.github/workflows/check-query-ids.yml
vendored
Normal file
21
.github/workflows/check-query-ids.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Check query IDs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "**/src/**/*.ql"
|
||||
- misc/scripts/check-query-ids.py
|
||||
- .github/workflows/check-query-ids.yml
|
||||
branches:
|
||||
- main
|
||||
- "rc/*"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Check query IDs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Check for duplicate query IDs
|
||||
run: python3 misc/scripts/check-query-ids.py
|
||||
38
.github/workflows/compile-queries.yml
vendored
38
.github/workflows/compile-queries.yml
vendored
@@ -14,46 +14,24 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
# calculate the merge-base with main, in a way that works both on PRs and pushes to main.
|
||||
- name: Calculate merge-base
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
env:
|
||||
BASE_BRANCH: ${{ github.base_ref }}
|
||||
run: |
|
||||
MERGE_BASE=$(git cat-file commit $GITHUB_SHA | grep '^parent ' | head -1 | cut -f 2 -d " ")
|
||||
echo "merge-base=$MERGE_BASE" >> $GITHUB_ENV
|
||||
- name: Read CodeQL query compilation - PR
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: '*/ql/src/.cache'
|
||||
key: codeql-compile-pr-${{ github.sha }} # deliberately not using the `compile-compile-main` keys here.
|
||||
restore-keys: |
|
||||
codeql-compile-${{ github.base_ref }}-${{ env.merge-base }}
|
||||
codeql-compile-${{ github.base_ref }}-
|
||||
codeql-compile-main-
|
||||
- name: Fill CodeQL query compilation cache - main
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: '*/ql/src/.cache'
|
||||
key: codeql-compile-${{ github.ref_name }}-${{ github.sha }} # just fill on main
|
||||
restore-keys: | # restore from another random commit, to speed up compilation.
|
||||
codeql-compile-${{ github.ref_name }}-
|
||||
codeql-compile-main-
|
||||
- name: Setup CodeQL
|
||||
uses: ./.github/actions/fetch-codeql
|
||||
with:
|
||||
channel: 'release'
|
||||
- name: Cache compilation cache
|
||||
id: query-cache
|
||||
uses: ./.github/actions/cache-query-compilation
|
||||
with:
|
||||
key: all-queries
|
||||
- name: check formatting
|
||||
run: codeql query format */ql/{src,lib,test}/**/*.{qll,ql} --check-only
|
||||
run: find */ql -type f \( -name "*.qll" -o -name "*.ql" \) -print0 | xargs -0 codeql query format --check-only
|
||||
- name: compile queries - check-only
|
||||
# run with --check-only if running in a PR (github.sha != main)
|
||||
if : ${{ github.event_name == 'pull_request' }}
|
||||
shell: bash
|
||||
run: codeql query compile -j0 */ql/src --keep-going --warnings=error --check-only
|
||||
run: codeql query compile -j0 */ql/{src,examples} --keep-going --warnings=error --check-only --compilation-cache "${{ steps.query-cache.outputs.cache-dir }}"
|
||||
- name: compile queries - full
|
||||
# do full compile if running on main - this populates the cache
|
||||
if : ${{ github.event_name != 'pull_request' }}
|
||||
shell: bash
|
||||
run: codeql query compile -j0 */ql/src --keep-going --warnings=error
|
||||
run: codeql query compile -j0 */ql/{src,examples} --keep-going --warnings=error --compilation-cache "${{ steps.query-cache.outputs.cache-dir }}"
|
||||
|
||||
86
.github/workflows/csharp-qltest.yml
vendored
Normal file
86
.github/workflows/csharp-qltest.yml
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
name: "C#: Run QL Tests"
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "csharp/**"
|
||||
- "shared/**"
|
||||
- .github/actions/fetch-codeql/action.yml
|
||||
- codeql-workspace.yml
|
||||
branches:
|
||||
- main
|
||||
- "rc/*"
|
||||
pull_request:
|
||||
paths:
|
||||
- "csharp/**"
|
||||
- "shared/**"
|
||||
- .github/workflows/csharp-qltest.yml
|
||||
- .github/actions/fetch-codeql/action.yml
|
||||
- codeql-workspace.yml
|
||||
branches:
|
||||
- main
|
||||
- "rc/*"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: csharp
|
||||
|
||||
jobs:
|
||||
qlupgrade:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/fetch-codeql
|
||||
- name: Check DB upgrade scripts
|
||||
run: |
|
||||
echo >empty.trap
|
||||
codeql dataset import -S ql/lib/upgrades/initial/semmlecode.csharp.dbscheme testdb empty.trap
|
||||
codeql dataset upgrade testdb --additional-packs ql/lib
|
||||
diff -q testdb/semmlecode.csharp.dbscheme ql/lib/semmlecode.csharp.dbscheme
|
||||
- name: Check DB downgrade scripts
|
||||
run: |
|
||||
echo >empty.trap
|
||||
rm -rf testdb; codeql dataset import -S ql/lib/semmlecode.csharp.dbscheme testdb empty.trap
|
||||
codeql resolve upgrades --format=lines --allow-downgrades --additional-packs downgrades \
|
||||
--dbscheme=ql/lib/semmlecode.csharp.dbscheme --target-dbscheme=downgrades/initial/semmlecode.csharp.dbscheme |
|
||||
xargs codeql execute upgrades testdb
|
||||
diff -q testdb/semmlecode.csharp.dbscheme downgrades/initial/semmlecode.csharp.dbscheme
|
||||
qltest:
|
||||
runs-on: ubuntu-latest-xl
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
slice: ["1/2", "2/2"]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/fetch-codeql
|
||||
- uses: ./csharp/actions/create-extractor-pack
|
||||
- name: Cache compilation cache
|
||||
id: query-cache
|
||||
uses: ./.github/actions/cache-query-compilation
|
||||
with:
|
||||
key: csharp-qltest-${{ matrix.slice }}
|
||||
- name: Run QL tests
|
||||
run: |
|
||||
CODEQL_PATH=$(gh codeql version --format=json | jq -r .unpackedLocation)
|
||||
# The legacy ASP extractor is not in this repo, so take the one from the nightly build
|
||||
mv "$CODEQL_PATH/csharp/tools/extractor-asp.jar" "${{ github.workspace }}/csharp/extractor-pack/tools"
|
||||
# Safe guard against using the bundled extractor
|
||||
rm -rf "$CODEQL_PATH/csharp"
|
||||
codeql test run --threads=0 --ram 50000 --slice ${{ matrix.slice }} --search-path "${{ github.workspace }}/csharp/extractor-pack" --check-databases --check-undefined-labels --check-repeated-labels --check-redefined-labels --consistency-queries ql/consistency-queries ql/test --compilation-cache "${{ steps.query-cache.outputs.cache-dir }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
unit-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup dotnet
|
||||
uses: actions/setup-dotnet@v3
|
||||
with:
|
||||
dotnet-version: 6.0.202
|
||||
- name: Extractor unit tests
|
||||
run: |
|
||||
dotnet test -p:RuntimeFrameworkVersion=6.0.4 "${{ github.workspace }}/csharp/extractor/Semmle.Util.Tests"
|
||||
dotnet test -p:RuntimeFrameworkVersion=6.0.4 "${{ github.workspace }}/csharp/extractor/Semmle.Extraction.Tests"
|
||||
dotnet test -p:RuntimeFrameworkVersion=6.0.4 "${{ github.workspace }}/csharp/autobuilder/Semmle.Autobuild.CSharp.Tests"
|
||||
dotnet test -p:RuntimeFrameworkVersion=6.0.4 "${{ github.workspace }}/cpp/autobuilder/Semmle.Autobuild.Cpp.Tests"
|
||||
80
.github/workflows/go-tests-other-os.yml
vendored
Normal file
80
.github/workflows/go-tests-other-os.yml
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
name: "Go: Run Tests - Other OS"
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "go/**"
|
||||
- "!go/ql/**" # don't run other-os if only ql/ files changed
|
||||
- .github/workflows/go-tests-other-os.yml
|
||||
- .github/actions/**
|
||||
- codeql-workspace.yml
|
||||
jobs:
|
||||
test-mac:
|
||||
name: Test MacOS
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Set up Go 1.19
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.19
|
||||
id: go
|
||||
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up CodeQL CLI
|
||||
uses: ./.github/actions/fetch-codeql
|
||||
|
||||
- name: Enable problem matchers in repository
|
||||
shell: bash
|
||||
run: 'find .github/problem-matchers -name \*.json -exec echo "::add-matcher::{}" \;'
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
cd go
|
||||
make
|
||||
|
||||
- name: Cache compilation cache
|
||||
id: query-cache
|
||||
uses: ./.github/actions/cache-query-compilation
|
||||
with:
|
||||
key: go-qltest
|
||||
- name: Test
|
||||
run: |
|
||||
cd go
|
||||
make test cache="${{ steps.query-cache.outputs.cache-dir }}"
|
||||
|
||||
test-win:
|
||||
name: Test Windows
|
||||
runs-on: windows-latest-xl
|
||||
steps:
|
||||
- name: Set up Go 1.19
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.19
|
||||
id: go
|
||||
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up CodeQL CLI
|
||||
uses: ./.github/actions/fetch-codeql
|
||||
|
||||
- name: Enable problem matchers in repository
|
||||
shell: bash
|
||||
run: 'find .github/problem-matchers -name \*.json -exec echo "::add-matcher::{}" \;'
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
cd go
|
||||
make
|
||||
|
||||
- name: Cache compilation cache
|
||||
id: query-cache
|
||||
uses: ./.github/actions/cache-query-compilation
|
||||
with:
|
||||
key: go-qltest
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
cd go
|
||||
make test cache="${{ steps.query-cache.outputs.cache-dir }}"
|
||||
79
.github/workflows/go-tests.yml
vendored
79
.github/workflows/go-tests.yml
vendored
@@ -1,15 +1,24 @@
|
||||
name: "Go: Run Tests"
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "go/**"
|
||||
- .github/workflows/go-tests.yml
|
||||
- .github/actions/**
|
||||
- codeql-workspace.yml
|
||||
branches:
|
||||
- main
|
||||
- "rc/*"
|
||||
pull_request:
|
||||
paths:
|
||||
- "go/**"
|
||||
- .github/workflows/go-tests.yml
|
||||
- .github/actions/fetch-codeql/action.yml
|
||||
- .github/actions/**
|
||||
- codeql-workspace.yml
|
||||
jobs:
|
||||
test-linux:
|
||||
name: Test Linux (Ubuntu)
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-latest-xl
|
||||
steps:
|
||||
- name: Set up Go 1.19
|
||||
uses: actions/setup-go@v3
|
||||
@@ -32,7 +41,7 @@ jobs:
|
||||
cd go
|
||||
make
|
||||
|
||||
- name: Check that all QL and Go code is autoformatted
|
||||
- name: Check that all Go code is autoformatted
|
||||
run: |
|
||||
cd go
|
||||
make check-formatting
|
||||
@@ -48,67 +57,13 @@ jobs:
|
||||
name: qhelp-markdown
|
||||
path: go/qhelp-out/**/*.md
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
cd go
|
||||
make test
|
||||
|
||||
test-mac:
|
||||
name: Test MacOS
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Set up Go 1.19
|
||||
uses: actions/setup-go@v3
|
||||
- name: Cache compilation cache
|
||||
id: query-cache
|
||||
uses: ./.github/actions/cache-query-compilation
|
||||
with:
|
||||
go-version: 1.19
|
||||
id: go
|
||||
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up CodeQL CLI
|
||||
uses: ./.github/actions/fetch-codeql
|
||||
|
||||
- name: Enable problem matchers in repository
|
||||
shell: bash
|
||||
run: 'find .github/problem-matchers -name \*.json -exec echo "::add-matcher::{}" \;'
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
cd go
|
||||
make
|
||||
key: go-qltest
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
cd go
|
||||
make test
|
||||
|
||||
test-win:
|
||||
name: Test Windows
|
||||
runs-on: windows-2019
|
||||
steps:
|
||||
- name: Set up Go 1.19
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.19
|
||||
id: go
|
||||
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up CodeQL CLI
|
||||
uses: ./.github/actions/fetch-codeql
|
||||
|
||||
- name: Enable problem matchers in repository
|
||||
shell: bash
|
||||
run: 'find .github/problem-matchers -name \*.json -exec echo "::add-matcher::{}" \;'
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
cd go
|
||||
make
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
cd go
|
||||
make test
|
||||
make test cache="${{ steps.query-cache.outputs.cache-dir }}"
|
||||
|
||||
44
.github/workflows/js-ml-tests.yml
vendored
44
.github/workflows/js-ml-tests.yml
vendored
@@ -23,22 +23,9 @@ defaults:
|
||||
working-directory: javascript/ql/experimental/adaptivethreatmodeling
|
||||
|
||||
jobs:
|
||||
qlformat:
|
||||
name: Check QL formatting
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: ./.github/actions/fetch-codeql
|
||||
|
||||
- name: Check QL formatting
|
||||
run: |
|
||||
find . "(" -name "*.ql" -or -name "*.qll" ")" -print0 | \
|
||||
xargs -0 codeql query format --check-only
|
||||
|
||||
qlcompile:
|
||||
name: Check QL compilation
|
||||
runs-on: ubuntu-latest
|
||||
qltest:
|
||||
name: Test QL
|
||||
runs-on: ubuntu-latest-xl
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
@@ -46,36 +33,33 @@ jobs:
|
||||
|
||||
- name: Install pack dependencies
|
||||
run: |
|
||||
for pack in modelbuilding src; do
|
||||
for pack in modelbuilding src test; do
|
||||
codeql pack install --mode verify -- "${pack}"
|
||||
done
|
||||
|
||||
- name: Cache compilation cache
|
||||
id: query-cache
|
||||
uses: ./.github/actions/cache-query-compilation
|
||||
with:
|
||||
key: js-ml-test
|
||||
|
||||
- name: Check QL compilation
|
||||
run: |
|
||||
codeql query compile \
|
||||
--check-only \
|
||||
--ram 5120 \
|
||||
--ram 50000 \
|
||||
--additional-packs "${{ github.workspace }}" \
|
||||
--threads=0 \
|
||||
--compilation-cache "${{ steps.query-cache.outputs.cache-dir }}" \
|
||||
-- \
|
||||
lib modelbuilding src
|
||||
|
||||
qltest:
|
||||
name: Run QL tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: ./.github/actions/fetch-codeql
|
||||
|
||||
- name: Install pack dependencies
|
||||
run: codeql pack install -- test
|
||||
|
||||
- name: Run QL tests
|
||||
run: |
|
||||
codeql test run \
|
||||
--threads=0 \
|
||||
--ram 5120 \
|
||||
--ram 50000 \
|
||||
--additional-packs "${{ github.workspace }}" \
|
||||
--compilation-cache "${{ steps.query-cache.outputs.cache-dir }}" \
|
||||
-- \
|
||||
test
|
||||
10
.github/workflows/mad_modelDiff.yml
vendored
10
.github/workflows/mad_modelDiff.yml
vendored
@@ -61,8 +61,8 @@ jobs:
|
||||
DATABASE=$2
|
||||
cd codeql-$QL_VARIANT
|
||||
SHORTNAME=`basename $DATABASE`
|
||||
python java/ql/src/utils/model-generator/GenerateFlowModel.py --with-summaries --with-sinks $DATABASE $MODELS/${SHORTNAME}.qll
|
||||
mv $MODELS/${SHORTNAME}.qll $MODELS/${SHORTNAME}Generated_${QL_VARIANT}.qll
|
||||
python java/ql/src/utils/model-generator/GenerateFlowModel.py --with-summaries --with-sinks $DATABASE ${SHORTNAME}.temp.model.yml
|
||||
mv java/ql/lib/ext/generated/${SHORTNAME}.temp.model.yml $MODELS/${SHORTNAME}Generated_${QL_VARIANT}.model.yml
|
||||
cd ..
|
||||
}
|
||||
|
||||
@@ -85,16 +85,16 @@ jobs:
|
||||
set -x
|
||||
MODELS=`pwd`/tmp-models
|
||||
ls -1 tmp-models/
|
||||
for m in $MODELS/*_main.qll ; do
|
||||
for m in $MODELS/*_main.model.yml ; do
|
||||
t="${m/main/"pr"}"
|
||||
basename=`basename $m`
|
||||
name="diff_${basename/_main.qll/""}"
|
||||
name="diff_${basename/_main.model.yml/""}"
|
||||
(diff -w -u $m $t | diff2html -i stdin -F $MODELS/$name.html) || true
|
||||
done
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: models
|
||||
path: tmp-models/*.qll
|
||||
path: tmp-models/*.model.yml
|
||||
retention-days: 20
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
|
||||
2
.github/workflows/mad_regenerate-models.yml
vendored
2
.github/workflows/mad_regenerate-models.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
||||
java/ql/src/utils/model-generator/RegenerateModels.py "${SLUG}" dbs/${SHORTNAME}
|
||||
- name: Stage changes
|
||||
run: |
|
||||
find java -name "*.qll" -print0 | xargs -0 git add
|
||||
find java -name "*.model.yml" -print0 | xargs -0 git add
|
||||
git status
|
||||
git diff --cached > models.patch
|
||||
- uses: actions/upload-artifact@v3
|
||||
|
||||
5
.github/workflows/ql-for-ql-build.yml
vendored
5
.github/workflows/ql-for-ql-build.yml
vendored
@@ -22,11 +22,15 @@ jobs:
|
||||
steps:
|
||||
### Build the queries ###
|
||||
- uses: actions/checkout@v3
|
||||
- name: Find latest bundle
|
||||
id: find-latest-bundle
|
||||
uses: ./.github/actions/find-latest-bundle
|
||||
- name: Find codeql
|
||||
id: find-codeql
|
||||
uses: github/codeql-action/init@77a8d2d10c0b403a8b4aadbd223dc489ecd22683
|
||||
with:
|
||||
languages: javascript # does not matter
|
||||
tools: ${{ steps.find-latest-bundle.outputs.url }}
|
||||
- name: Get CodeQL version
|
||||
id: get-codeql-version
|
||||
run: |
|
||||
@@ -138,6 +142,7 @@ jobs:
|
||||
languages: ql
|
||||
db-location: ${{ runner.temp }}/db
|
||||
config-file: ./ql-for-ql-config.yml
|
||||
tools: ${{ steps.find-latest-bundle.outputs.url }}
|
||||
- name: Move pack cache
|
||||
run: |
|
||||
cp -r ${PACK}/.cache ql/ql/src/.cache
|
||||
|
||||
5
.github/workflows/ql-for-ql-tests.yml
vendored
5
.github/workflows/ql-for-ql-tests.yml
vendored
@@ -47,8 +47,3 @@ jobs:
|
||||
find ql/ql/src "(" -name "*.ql" -or -name "*.qll" ")" -print0 | xargs -0 "${CODEQL}" query format --check-only
|
||||
env:
|
||||
CODEQL: ${{ steps.find-codeql.outputs.codeql-path }}
|
||||
- name: Check QL compilation
|
||||
run: |
|
||||
"${CODEQL}" query compile --check-only --threads=4 --warnings=error --search-path "${{ github.workspace }}/ql/extractor-pack" "ql/ql/src" "ql/ql/examples"
|
||||
env:
|
||||
CODEQL: ${{ steps.find-codeql.outputs.codeql-path }}
|
||||
|
||||
31
.github/workflows/ruby-build.yml
vendored
31
.github/workflows/ruby-build.yml
vendored
@@ -48,7 +48,19 @@ jobs:
|
||||
run: |
|
||||
brew install gnu-tar
|
||||
echo "/usr/local/opt/gnu-tar/libexec/gnubin" >> $GITHUB_PATH
|
||||
- name: Cache entire extractor
|
||||
uses: actions/cache@v3
|
||||
id: cache-extractor
|
||||
with:
|
||||
path: |
|
||||
ruby/target/release/ruby-autobuilder
|
||||
ruby/target/release/ruby-autobuilder.exe
|
||||
ruby/target/release/ruby-extractor
|
||||
ruby/target/release/ruby-extractor.exe
|
||||
ruby/ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
|
||||
key: ${{ runner.os }}-ruby-extractor-${{ hashFiles('ruby/rust-toolchain.toml', 'ruby/**/Cargo.lock') }}--${{ hashFiles('ruby/**/*.rs') }}
|
||||
- uses: actions/cache@v3
|
||||
if: steps.cache-extractor.outputs.cache-hit != 'true'
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
@@ -56,15 +68,19 @@ jobs:
|
||||
ruby/target
|
||||
key: ${{ runner.os }}-ruby-rust-cargo-${{ hashFiles('ruby/rust-toolchain.toml', 'ruby/**/Cargo.lock') }}
|
||||
- name: Check formatting
|
||||
if: steps.cache-extractor.outputs.cache-hit != 'true'
|
||||
run: cargo fmt --all -- --check
|
||||
- name: Build
|
||||
if: steps.cache-extractor.outputs.cache-hit != 'true'
|
||||
run: cargo build --verbose
|
||||
- name: Run tests
|
||||
if: steps.cache-extractor.outputs.cache-hit != 'true'
|
||||
run: cargo test --verbose
|
||||
- name: Release build
|
||||
if: steps.cache-extractor.outputs.cache-hit != 'true'
|
||||
run: cargo build --release
|
||||
- name: Generate dbscheme
|
||||
if: ${{ matrix.os == 'ubuntu-latest' }}
|
||||
if: ${{ matrix.os == 'ubuntu-latest' && steps.cache-extractor.outputs.cache-hit != 'true'}}
|
||||
run: target/release/ruby-generator --dbscheme ql/lib/ruby.dbscheme --library ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
|
||||
- uses: actions/upload-artifact@v3
|
||||
if: ${{ matrix.os == 'ubuntu-latest' }}
|
||||
@@ -86,19 +102,24 @@ jobs:
|
||||
ruby/target/release/ruby-extractor.exe
|
||||
retention-days: 1
|
||||
compile-queries:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CODEQL_THREADS: 4 # TODO: remove this once it's set by the CLI
|
||||
runs-on: ubuntu-latest-xl
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Fetch CodeQL
|
||||
uses: ./.github/actions/fetch-codeql
|
||||
- name: Cache compilation cache
|
||||
id: query-cache
|
||||
uses: ./.github/actions/cache-query-compilation
|
||||
with:
|
||||
key: ruby-build
|
||||
- name: Build Query Pack
|
||||
run: |
|
||||
rm -rf target/packs
|
||||
codeql pack create ../shared/ssa --output target/packs
|
||||
codeql pack create ../misc/suite-helpers --output target/packs
|
||||
codeql pack create ../shared/regex --output target/packs
|
||||
codeql pack create ql/lib --output target/packs
|
||||
codeql pack create ql/src --output target/packs
|
||||
codeql pack create -j0 ql/src --output target/packs --compilation-cache "${{ steps.query-cache.outputs.cache-dir }}"
|
||||
PACK_FOLDER=$(readlink -f target/packs/codeql/ruby-queries/*)
|
||||
codeql generate query-help --format=sarifv2.1.0 --output="${PACK_FOLDER}/rules.sarif" ql/src
|
||||
(cd ql/src; find queries \( -name '*.qhelp' -o -name '*.rb' -o -name '*.erb' \) -exec bash -c 'mkdir -p "'"${PACK_FOLDER}"'/$(dirname "{}")"' \; -exec cp "{}" "${PACK_FOLDER}/{}" \;)
|
||||
|
||||
30
.github/workflows/ruby-qltest.yml
vendored
30
.github/workflows/ruby-qltest.yml
vendored
@@ -4,7 +4,7 @@ on:
|
||||
push:
|
||||
paths:
|
||||
- "ruby/**"
|
||||
- .github/workflows/ruby-qltest.yml
|
||||
- .github/workflows/ruby-build.yml
|
||||
- .github/actions/fetch-codeql/action.yml
|
||||
- codeql-workspace.yml
|
||||
branches:
|
||||
@@ -28,23 +28,6 @@ defaults:
|
||||
working-directory: ruby
|
||||
|
||||
jobs:
|
||||
qlformat:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/fetch-codeql
|
||||
- name: Check QL formatting
|
||||
run: find ql "(" -name "*.ql" -or -name "*.qll" ")" -print0 | xargs -0 codeql query format --check-only
|
||||
qlcompile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/fetch-codeql
|
||||
- name: Check QL compilation
|
||||
run: |
|
||||
codeql query compile --check-only --threads=0 --ram 5000 --warnings=error "ql/src" "ql/examples"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
qlupgrade:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -65,17 +48,20 @@ jobs:
|
||||
xargs codeql execute upgrades testdb
|
||||
diff -q testdb/ruby.dbscheme downgrades/initial/ruby.dbscheme
|
||||
qltest:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-latest-xl
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
slice: ["1/2", "2/2"]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/fetch-codeql
|
||||
- uses: ./ruby/actions/create-extractor-pack
|
||||
- name: Cache compilation cache
|
||||
id: query-cache
|
||||
uses: ./.github/actions/cache-query-compilation
|
||||
with:
|
||||
key: ruby-qltest
|
||||
- name: Run QL tests
|
||||
run: |
|
||||
codeql test run --threads=0 --ram 5000 --slice ${{ matrix.slice }} --search-path "${{ github.workspace }}/ruby/extractor-pack" --check-databases --check-unused-labels --check-repeated-labels --check-redefined-labels --check-use-before-definition --consistency-queries ql/consistency-queries ql/test
|
||||
codeql test run --threads=0 --ram 50000 --search-path "${{ github.workspace }}/ruby/extractor-pack" --check-databases --check-undefined-labels --check-unused-labels --check-repeated-labels --check-redefined-labels --check-use-before-definition --consistency-queries ql/consistency-queries ql/test --compilation-cache "${{ steps.query-cache.outputs.cache-dir }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
|
||||
84
.github/workflows/swift.yml
vendored
84
.github/workflows/swift.yml
vendored
@@ -7,95 +7,77 @@ on:
|
||||
- "misc/bazel/**"
|
||||
- "*.bazel*"
|
||||
- .github/workflows/swift.yml
|
||||
- .github/actions/fetch-codeql/action.yml
|
||||
- .github/actions/**
|
||||
- codeql-workspace.yml
|
||||
- .pre-commit-config.yaml
|
||||
- "!**/*.md"
|
||||
- "!**/*.qhelp"
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
codegen: ${{ steps.filter.outputs.codegen }}
|
||||
ql: ${{ steps.filter.outputs.ql }}
|
||||
steps:
|
||||
- uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
codegen:
|
||||
- 'github/workflows/swift.yml'
|
||||
- rc/*
|
||||
push:
|
||||
paths:
|
||||
- "swift/**"
|
||||
- "misc/bazel/**"
|
||||
- "*.bazel*"
|
||||
- 'swift/actions/setup-env/**'
|
||||
- '.pre-commit-config.yaml'
|
||||
- 'swift/codegen/**'
|
||||
- 'swift/schema.py'
|
||||
- 'swift/**/*.dbscheme'
|
||||
- 'swift/ql/lib/codeql/swift/elements.qll'
|
||||
- 'swift/ql/lib/codeql/swift/elements/**'
|
||||
- 'swift/ql/lib/codeql/swift/generated/**'
|
||||
- 'swift/ql/test/extractor-tests/generated/**'
|
||||
ql:
|
||||
- 'github/workflows/swift.yml'
|
||||
- 'swift/**/*.ql'
|
||||
- 'swift/**/*.qll'
|
||||
- .github/workflows/swift.yml
|
||||
- .github/actions/**
|
||||
- codeql-workspace.yml
|
||||
- "!**/*.md"
|
||||
- "!**/*.qhelp"
|
||||
branches:
|
||||
- main
|
||||
- rc/*
|
||||
|
||||
jobs:
|
||||
# not using a matrix as you cannot depend on a specific job in a matrix, and we want to start linux checks
|
||||
# without waiting for the macOS build
|
||||
build-and-test-macos:
|
||||
runs-on: macos-12-xl
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./swift/actions/create-extractor-pack
|
||||
- uses: ./swift/actions/run-quick-tests
|
||||
- uses: ./swift/actions/print-unextracted
|
||||
- uses: ./swift/actions/build-and-test
|
||||
build-and-test-linux:
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-latest-xl
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./swift/actions/create-extractor-pack
|
||||
- uses: ./swift/actions/run-quick-tests
|
||||
- uses: ./swift/actions/print-unextracted
|
||||
- uses: ./swift/actions/build-and-test
|
||||
qltests-linux:
|
||||
needs: build-and-test-linux
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-latest-xl
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./swift/actions/run-ql-tests
|
||||
qltests-macos:
|
||||
if : ${{ github.event_name == 'pull_request' }}
|
||||
needs: build-and-test-macos
|
||||
runs-on: macos-12-xl
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
slice: ["1/2", "2/2"]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./swift/actions/run-ql-tests
|
||||
with:
|
||||
flags: --slice ${{ matrix.slice }}
|
||||
integration-tests-linux:
|
||||
needs: build-and-test-linux
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-latest-xl
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./swift/actions/run-integration-tests
|
||||
integration-tests-macos:
|
||||
if : ${{ github.event_name == 'pull_request' }}
|
||||
needs: build-and-test-macos
|
||||
runs-on: macos-12-xl
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./swift/actions/run-integration-tests
|
||||
codegen:
|
||||
if : ${{ github.event_name == 'pull_request' }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.codegen == 'true' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./swift/actions/setup-env
|
||||
- uses: bazelbuild/setup-bazelisk@v2
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version-file: 'swift/.python-version'
|
||||
- uses: pre-commit/action@v3.0.0
|
||||
name: Check that python code is properly formatted
|
||||
with:
|
||||
@@ -111,13 +93,11 @@ jobs:
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: swift-generated-cpp-files
|
||||
path: swift/generated-cpp-files/**
|
||||
qlformat:
|
||||
path: generated-cpp-files/**
|
||||
database-upgrade-scripts:
|
||||
if : ${{ github.event_name == 'pull_request' }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.ql == 'true' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ./.github/actions/fetch-codeql
|
||||
- name: Check QL formatting
|
||||
run: find swift/ql "(" -name "*.ql" -or -name "*.qll" ")" -print0 | xargs -0 codeql query format --check-only
|
||||
- uses: ./swift/actions/database-upgrade-scripts
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -27,8 +27,6 @@
|
||||
# It's useful (though not required) to be able to unpack codeql in the ql checkout itself
|
||||
/codeql/
|
||||
|
||||
csharp/extractor/Semmle.Extraction.CSharp.Driver/Properties/launchSettings.json
|
||||
|
||||
# Avoid committing cached package components
|
||||
.codeql
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ repos:
|
||||
rev: v1.6.0
|
||||
hooks:
|
||||
- id: autopep8
|
||||
files: ^swift/codegen/.*\.py
|
||||
files: ^swift/.*\.py
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
@@ -44,7 +44,7 @@ repos:
|
||||
|
||||
- id: swift-codegen
|
||||
name: Run Swift checked in code generation
|
||||
files: ^swift/(codegen/|.*/generated/|ql/lib/(swift\.dbscheme$|codeql/swift/elements))
|
||||
files: ^swift/(schema.py$|codegen/|.*/generated/|ql/lib/(swift\.dbscheme$|codeql/swift/elements)|ql/\.generated.list)
|
||||
language: system
|
||||
entry: bazel run //swift/codegen -- --quiet
|
||||
pass_filenames: false
|
||||
|
||||
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@@ -1,3 +1,5 @@
|
||||
{
|
||||
"omnisharp.autoStart": false
|
||||
"omnisharp.autoStart": false,
|
||||
"cmake.sourceDirectory": "${workspaceFolder}/swift",
|
||||
"cmake.buildDirectory": "${workspaceFolder}/bazel-cmake-build"
|
||||
}
|
||||
|
||||
11
CODEOWNERS
11
CODEOWNERS
@@ -5,20 +5,13 @@
|
||||
/javascript/ @github/codeql-javascript
|
||||
/python/ @github/codeql-python
|
||||
/ruby/ @github/codeql-ruby
|
||||
/swift/ @github/codeql-c
|
||||
/swift/ @github/codeql-swift
|
||||
/java/kotlin-extractor/ @github/codeql-kotlin
|
||||
/java/kotlin-explorer/ @github/codeql-kotlin
|
||||
|
||||
# ML-powered queries
|
||||
/javascript/ql/experimental/adaptivethreatmodeling/ @github/codeql-ml-powered-queries-reviewers
|
||||
|
||||
# Notify members of codeql-go about PRs to the shared data-flow library files
|
||||
/java/ql/src/semmle/code/java/dataflow/internal/DataFlowImpl.qll @github/codeql-java @github/codeql-go
|
||||
/java/ql/src/semmle/code/java/dataflow/internal/DataFlowImpl2.qll @github/codeql-java @github/codeql-go
|
||||
/java/ql/src/semmle/code/java/dataflow/internal/DataFlowImplCommon.qll @github/codeql-java @github/codeql-go
|
||||
/java/ql/src/semmle/code/java/dataflow/internal/tainttracking1/TaintTrackingImpl.qll @github/codeql-java @github/codeql-go
|
||||
/java/ql/src/semmle/code/java/dataflow/internal/tainttracking2/TaintTrackingImpl.qll @github/codeql-java @github/codeql-go
|
||||
|
||||
# CodeQL tools and associated docs
|
||||
/docs/codeql/codeql-cli/ @github/codeql-cli-reviewers
|
||||
/docs/codeql/codeql-for-visual-studio-code/ @github/codeql-vscode-reviewers
|
||||
@@ -45,4 +38,4 @@ WORKSPACE.bazel @github/codeql-ci-reviewers
|
||||
/.github/workflows/js-ml-tests.yml @github/codeql-ml-powered-queries-reviewers
|
||||
/.github/workflows/ql-for-ql-* @github/codeql-ql-for-ql-reviewers
|
||||
/.github/workflows/ruby-* @github/codeql-ruby
|
||||
/.github/workflows/swift-* @github/codeql-c
|
||||
/.github/workflows/swift.yml @github/codeql-swift
|
||||
|
||||
@@ -25,7 +25,8 @@ provide:
|
||||
- "misc/suite-helpers/qlpack.yml"
|
||||
- "ruby/extractor-pack/codeql-extractor.yml"
|
||||
- "swift/extractor-pack/codeql-extractor.yml"
|
||||
- "ql/extractor-pack/codeql-extractor.ym"
|
||||
- "swift/integration-tests/qlpack.yml"
|
||||
- "ql/extractor-pack/codeql-extractor.yml"
|
||||
|
||||
versionPolicies:
|
||||
default:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"DataFlow Java/C++/C#/Python": [
|
||||
"DataFlow Java/C++/C#/Go/Python/Ruby/Swift": [
|
||||
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlowImpl.qll",
|
||||
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlowImpl2.qll",
|
||||
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlowImpl3.qll",
|
||||
@@ -27,6 +27,8 @@
|
||||
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/DataFlowImpl4.qll",
|
||||
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/DataFlowImpl5.qll",
|
||||
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/DataFlowImplForContentDataFlow.qll",
|
||||
"go/ql/lib/semmle/go/dataflow/internal/DataFlowImpl.qll",
|
||||
"go/ql/lib/semmle/go/dataflow/internal/DataFlowImpl2.qll",
|
||||
"python/ql/lib/semmle/python/dataflow/new/internal/DataFlowImpl.qll",
|
||||
"python/ql/lib/semmle/python/dataflow/new/internal/DataFlowImpl2.qll",
|
||||
"python/ql/lib/semmle/python/dataflow/new/internal/DataFlowImpl3.qll",
|
||||
@@ -38,17 +40,18 @@
|
||||
"ruby/ql/lib/codeql/ruby/dataflow/internal/DataFlowImplForPathname.qll",
|
||||
"swift/ql/lib/codeql/swift/dataflow/internal/DataFlowImpl.qll"
|
||||
],
|
||||
"DataFlow Java/C++/C#/Python Common": [
|
||||
"DataFlow Java/C++/C#/Go/Python/Ruby/Swift Common": [
|
||||
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlowImplCommon.qll",
|
||||
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/DataFlowImplCommon.qll",
|
||||
"cpp/ql/lib/semmle/code/cpp/ir/dataflow/internal/DataFlowImplCommon.qll",
|
||||
"cpp/ql/lib/experimental/semmle/code/cpp/ir/dataflow/internal/DataFlowImplCommon.qll",
|
||||
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/DataFlowImplCommon.qll",
|
||||
"go/ql/lib/semmle/go/dataflow/internal/DataFlowImplCommon.qll",
|
||||
"python/ql/lib/semmle/python/dataflow/new/internal/DataFlowImplCommon.qll",
|
||||
"ruby/ql/lib/codeql/ruby/dataflow/internal/DataFlowImplCommon.qll",
|
||||
"swift/ql/lib/codeql/swift/dataflow/internal/DataFlowImplCommon.qll"
|
||||
],
|
||||
"TaintTracking::Configuration Java/C++/C#/Python": [
|
||||
"TaintTracking::Configuration Java/C++/C#/Go/Python/Ruby/Swift": [
|
||||
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/tainttracking1/TaintTrackingImpl.qll",
|
||||
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/tainttracking2/TaintTrackingImpl.qll",
|
||||
"cpp/ql/lib/semmle/code/cpp/ir/dataflow/internal/tainttracking1/TaintTrackingImpl.qll",
|
||||
@@ -62,6 +65,8 @@
|
||||
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/tainttracking3/TaintTrackingImpl.qll",
|
||||
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/tainttracking4/TaintTrackingImpl.qll",
|
||||
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/tainttracking5/TaintTrackingImpl.qll",
|
||||
"go/ql/lib/semmle/go/dataflow/internal/tainttracking1/TaintTrackingImpl.qll",
|
||||
"go/ql/lib/semmle/go/dataflow/internal/tainttracking2/TaintTrackingImpl.qll",
|
||||
"java/ql/lib/semmle/code/java/dataflow/internal/tainttracking1/TaintTrackingImpl.qll",
|
||||
"java/ql/lib/semmle/code/java/dataflow/internal/tainttracking2/TaintTrackingImpl.qll",
|
||||
"java/ql/lib/semmle/code/java/dataflow/internal/tainttracking3/TaintTrackingImpl.qll",
|
||||
@@ -72,7 +77,7 @@
|
||||
"ruby/ql/lib/codeql/ruby/dataflow/internal/tainttracking1/TaintTrackingImpl.qll",
|
||||
"swift/ql/lib/codeql/swift/dataflow/internal/tainttracking1/TaintTrackingImpl.qll"
|
||||
],
|
||||
"DataFlow Java/C++/C#/Python Consistency checks": [
|
||||
"DataFlow Java/C++/C#/Python/Ruby/Swift Consistency checks": [
|
||||
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlowImplConsistency.qll",
|
||||
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/DataFlowImplConsistency.qll",
|
||||
"cpp/ql/lib/semmle/code/cpp/ir/dataflow/internal/DataFlowImplConsistency.qll",
|
||||
@@ -82,9 +87,10 @@
|
||||
"ruby/ql/lib/codeql/ruby/dataflow/internal/DataFlowImplConsistency.qll",
|
||||
"swift/ql/lib/codeql/swift/dataflow/internal/DataFlowImplConsistency.qll"
|
||||
],
|
||||
"DataFlow Java/C#/Ruby/Python/Swift Flow Summaries": [
|
||||
"DataFlow Java/C#/Go/Ruby/Python/Swift Flow Summaries": [
|
||||
"java/ql/lib/semmle/code/java/dataflow/internal/FlowSummaryImpl.qll",
|
||||
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/FlowSummaryImpl.qll",
|
||||
"go/ql/lib/semmle/go/dataflow/internal/FlowSummaryImpl.qll",
|
||||
"ruby/ql/lib/codeql/ruby/dataflow/internal/FlowSummaryImpl.qll",
|
||||
"python/ql/lib/semmle/python/dataflow/new/internal/FlowSummaryImpl.qll",
|
||||
"swift/ql/lib/codeql/swift/dataflow/internal/FlowSummaryImpl.qll"
|
||||
@@ -464,6 +470,10 @@
|
||||
"javascript/ql/src/Comments/CommentedOutCodeReferences.inc.qhelp",
|
||||
"python/ql/src/Lexical/CommentedOutCodeReferences.inc.qhelp"
|
||||
],
|
||||
"ThreadResourceAbuse qhelp": [
|
||||
"java/ql/src/experimental/Security/CWE/CWE-400/LocalThreadResourceAbuse.qhelp",
|
||||
"java/ql/src/experimental/Security/CWE/CWE-400/ThreadResourceAbuse.qhelp"
|
||||
],
|
||||
"IDE Contextual Queries": [
|
||||
"cpp/ql/lib/IDEContextual.qll",
|
||||
"csharp/ql/lib/IDEContextual.qll",
|
||||
@@ -486,40 +496,6 @@
|
||||
"python/ql/lib/semmle/python/security/internal/SensitiveDataHeuristics.qll",
|
||||
"ruby/ql/lib/codeql/ruby/security/internal/SensitiveDataHeuristics.qll"
|
||||
],
|
||||
"ReDoS Util Python/JS/Ruby/Java": [
|
||||
"javascript/ql/lib/semmle/javascript/security/regexp/NfaUtils.qll",
|
||||
"python/ql/lib/semmle/python/security/regexp/NfaUtils.qll",
|
||||
"ruby/ql/lib/codeql/ruby/security/regexp/NfaUtils.qll",
|
||||
"java/ql/lib/semmle/code/java/security/regexp/NfaUtils.qll"
|
||||
],
|
||||
"ReDoS Exponential Python/JS/Ruby/Java": [
|
||||
"javascript/ql/lib/semmle/javascript/security/regexp/ExponentialBackTracking.qll",
|
||||
"python/ql/lib/semmle/python/security/regexp/ExponentialBackTracking.qll",
|
||||
"ruby/ql/lib/codeql/ruby/security/regexp/ExponentialBackTracking.qll",
|
||||
"java/ql/lib/semmle/code/java/security/regexp/ExponentialBackTracking.qll"
|
||||
],
|
||||
"ReDoS Polynomial Python/JS/Ruby/Java": [
|
||||
"javascript/ql/lib/semmle/javascript/security/regexp/SuperlinearBackTracking.qll",
|
||||
"python/ql/lib/semmle/python/security/regexp/SuperlinearBackTracking.qll",
|
||||
"ruby/ql/lib/codeql/ruby/security/regexp/SuperlinearBackTracking.qll",
|
||||
"java/ql/lib/semmle/code/java/security/regexp/SuperlinearBackTracking.qll"
|
||||
],
|
||||
"RegexpMatching Python/JS/Ruby": [
|
||||
"javascript/ql/lib/semmle/javascript/security/regexp/RegexpMatching.qll",
|
||||
"python/ql/lib/semmle/python/security/regexp/RegexpMatching.qll",
|
||||
"ruby/ql/lib/codeql/ruby/security/regexp/RegexpMatching.qll"
|
||||
],
|
||||
"BadTagFilterQuery Python/JS/Ruby": [
|
||||
"javascript/ql/lib/semmle/javascript/security/BadTagFilterQuery.qll",
|
||||
"python/ql/lib/semmle/python/security/BadTagFilterQuery.qll",
|
||||
"ruby/ql/lib/codeql/ruby/security/BadTagFilterQuery.qll"
|
||||
],
|
||||
"OverlyLargeRange Python/JS/Ruby/Java": [
|
||||
"javascript/ql/lib/semmle/javascript/security/OverlyLargeRangeQuery.qll",
|
||||
"python/ql/lib/semmle/python/security/OverlyLargeRangeQuery.qll",
|
||||
"ruby/ql/lib/codeql/ruby/security/OverlyLargeRangeQuery.qll",
|
||||
"java/ql/lib/semmle/code/java/security/OverlyLargeRangeQuery.qll"
|
||||
],
|
||||
"CFG": [
|
||||
"csharp/ql/lib/semmle/code/csharp/controlflow/internal/ControlFlowGraphImplShared.qll",
|
||||
"ruby/ql/lib/codeql/ruby/controlflow/internal/ControlFlowGraphImplShared.qll",
|
||||
@@ -539,6 +515,7 @@
|
||||
],
|
||||
"AccessPathSyntax": [
|
||||
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/AccessPathSyntax.qll",
|
||||
"go/ql/lib/semmle/go/dataflow/internal/AccessPathSyntax.qll",
|
||||
"java/ql/lib/semmle/code/java/dataflow/internal/AccessPathSyntax.qll",
|
||||
"javascript/ql/lib/semmle/javascript/frameworks/data/internal/AccessPathSyntax.qll",
|
||||
"ruby/ql/lib/codeql/ruby/dataflow/internal/AccessPathSyntax.qll",
|
||||
@@ -554,16 +531,16 @@
|
||||
"ruby/ql/lib/codeql/ruby/internal/ConceptsShared.qll",
|
||||
"javascript/ql/lib/semmle/javascript/internal/ConceptsShared.qll"
|
||||
],
|
||||
"Hostname Regexp queries": [
|
||||
"javascript/ql/src/Security/CWE-020/HostnameRegexpShared.qll",
|
||||
"python/ql/src/Security/CWE-020/HostnameRegexpShared.qll",
|
||||
"ruby/ql/src/queries/security/cwe-020/HostnameRegexpShared.qll"
|
||||
],
|
||||
"ApiGraphModels": [
|
||||
"javascript/ql/lib/semmle/javascript/frameworks/data/internal/ApiGraphModels.qll",
|
||||
"ruby/ql/lib/codeql/ruby/frameworks/data/internal/ApiGraphModels.qll",
|
||||
"python/ql/lib/semmle/python/frameworks/data/internal/ApiGraphModels.qll"
|
||||
],
|
||||
"ApiGraphModelsExtensions": [
|
||||
"javascript/ql/lib/semmle/javascript/frameworks/data/internal/ApiGraphModelsExtensions.qll",
|
||||
"ruby/ql/lib/codeql/ruby/frameworks/data/internal/ApiGraphModelsExtensions.qll",
|
||||
"python/ql/lib/semmle/python/frameworks/data/internal/ApiGraphModelsExtensions.qll"
|
||||
],
|
||||
"TaintedFormatStringQuery Ruby/JS": [
|
||||
"javascript/ql/lib/semmle/javascript/security/dataflow/TaintedFormatStringQuery.qll",
|
||||
"ruby/ql/lib/codeql/ruby/security/TaintedFormatStringQuery.qll"
|
||||
@@ -607,5 +584,9 @@
|
||||
"IncompleteMultiCharacterSanitization JS/Ruby": [
|
||||
"javascript/ql/lib/semmle/javascript/security/IncompleteMultiCharacterSanitizationQuery.qll",
|
||||
"ruby/ql/lib/codeql/ruby/security/IncompleteMultiCharacterSanitizationQuery.qll"
|
||||
],
|
||||
"EncryptionKeySizes Python/Java": [
|
||||
"python/ql/lib/semmle/python/security/internal/EncryptionKeySizes.qll",
|
||||
"java/ql/lib/semmle/code/java/security/internal/EncryptionKeySizes.qll"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -257,11 +257,11 @@ namespace Semmle.Autobuild.Cpp.Tests
|
||||
Actions.GetCurrentDirectory = cwd;
|
||||
Actions.IsWindows = isWindows;
|
||||
|
||||
var options = new AutobuildOptions(Actions, Language.Cpp);
|
||||
var options = new CppAutobuildOptions(Actions);
|
||||
return new CppAutobuilder(Actions, options);
|
||||
}
|
||||
|
||||
void TestAutobuilderScript(Autobuilder autobuilder, int expectedOutput, int commandsRun)
|
||||
void TestAutobuilderScript(CppAutobuilder autobuilder, int expectedOutput, int commandsRun)
|
||||
{
|
||||
Assert.Equal(expectedOutput, autobuilder.GetBuildScript().Run(Actions, StartCallback, EndCallback));
|
||||
|
||||
@@ -299,7 +299,7 @@ namespace Semmle.Autobuild.Cpp.Tests
|
||||
{
|
||||
Actions.RunProcess[@"cmd.exe /C nuget restore C:\Project\test.sln -DisableParallelProcessing"] = 1;
|
||||
Actions.RunProcess[@"cmd.exe /C C:\Project\.nuget\nuget.exe restore C:\Project\test.sln -DisableParallelProcessing"] = 0;
|
||||
Actions.RunProcess[@"cmd.exe /C CALL ^""C:\Program Files ^(x86^)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat^"" && set Platform=&& type NUL && C:\odasa\tools\odasa index --auto msbuild C:\Project\test.sln /t:rebuild /p:Platform=""x86"" /p:Configuration=""Release"" /p:MvcBuildViews=true"] = 0;
|
||||
Actions.RunProcess[@"cmd.exe /C CALL ^""C:\Program Files ^(x86^)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat^"" && set Platform=&& type NUL && msbuild C:\Project\test.sln /t:rebuild /p:Platform=""x86"" /p:Configuration=""Release"""] = 0;
|
||||
Actions.RunProcessOut[@"C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe -prerelease -legacy -property installationPath"] = "";
|
||||
Actions.RunProcess[@"C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe -prerelease -legacy -property installationPath"] = 1;
|
||||
Actions.RunProcess[@"C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe -prerelease -legacy -property installationVersion"] = 0;
|
||||
|
||||
@@ -11,11 +11,12 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="System.IO.FileSystem" Version="4.3.0" />
|
||||
<PackageReference Include="System.IO.FileSystem.Primitives" Version="4.3.0" />
|
||||
<PackageReference Include="xunit" Version="2.4.1" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.1">
|
||||
<PackageReference Include="xunit" Version="2.4.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.5">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.4.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -2,9 +2,26 @@
|
||||
|
||||
namespace Semmle.Autobuild.Cpp
|
||||
{
|
||||
public class CppAutobuilder : Autobuilder
|
||||
/// <summary>
|
||||
/// Encapsulates C++ build options.
|
||||
/// </summary>
|
||||
public class CppAutobuildOptions : AutobuildOptionsShared
|
||||
{
|
||||
public CppAutobuilder(IBuildActions actions, AutobuildOptions options) : base(actions, options) { }
|
||||
public override Language Language => Language.Cpp;
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Reads options from environment variables.
|
||||
/// Throws ArgumentOutOfRangeException for invalid arguments.
|
||||
/// </summary>
|
||||
public CppAutobuildOptions(IBuildActions actions) : base(actions)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
public class CppAutobuilder : Autobuilder<CppAutobuildOptions>
|
||||
{
|
||||
public CppAutobuilder(IBuildActions actions, CppAutobuildOptions options) : base(actions, options) { }
|
||||
|
||||
public override BuildScript GetBuildScript()
|
||||
{
|
||||
|
||||
@@ -11,7 +11,7 @@ namespace Semmle.Autobuild.Cpp
|
||||
try
|
||||
{
|
||||
var actions = SystemBuildActions.Instance;
|
||||
var options = new AutobuildOptions(actions, Language.Cpp);
|
||||
var options = new CppAutobuildOptions(actions);
|
||||
try
|
||||
{
|
||||
Console.WriteLine("CodeQL C++ autobuilder");
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Build" Version="16.11.0" />
|
||||
<PackageReference Include="Microsoft.Build" Version="17.3.2" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -1,3 +1,15 @@
|
||||
## 0.4.6
|
||||
|
||||
No user-facing changes.
|
||||
|
||||
## 0.4.5
|
||||
|
||||
No user-facing changes.
|
||||
|
||||
## 0.4.4
|
||||
|
||||
No user-facing changes.
|
||||
|
||||
## 0.4.3
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
6
cpp/ql/lib/change-notes/2022-11-14-deprecate-ast-gvn.md
Normal file
6
cpp/ql/lib/change-notes/2022-11-14-deprecate-ast-gvn.md
Normal file
@@ -0,0 +1,6 @@
|
||||
---
|
||||
category: deprecated
|
||||
---
|
||||
|
||||
|
||||
* Deprecated `semmle.code.cpp.valuenumbering.GlobalValueNumberingImpl`. Use `semmle.code.cpp.valuenumbering.GlobalValueNumbering`, which exposes the same API.
|
||||
4
cpp/ql/lib/change-notes/2022-11-16-must-flow.md
Normal file
4
cpp/ql/lib/change-notes/2022-11-16-must-flow.md
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: breaking
|
||||
---
|
||||
The predicates in the `MustFlow::Configuration` class used by the `MustFlow` library (`semmle.code.cpp.ir.dataflow.MustFlow`) have changed to be defined directly in terms of the C++ IR instead of IR dataflow nodes.
|
||||
4
cpp/ql/lib/change-notes/2022-11-17-deleted-deps.md
Normal file
4
cpp/ql/lib/change-notes/2022-11-17-deleted-deps.md
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* Deleted the deprecated `getName` and `getShortName` predicates from the `Folder` class.
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
category: deprecated
|
||||
---
|
||||
|
||||
* Deprecated `semmle.code.cpp.ir.dataflow.DefaultTaintTracking`. Use `semmle.code.cpp.ir.dataflow.TaintTracking`.
|
||||
* Deprecated `semmle.code.cpp.security.TaintTrackingImpl`. Use `semmle.code.cpp.ir.dataflow.TaintTracking`.
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* The `getaddrinfo` function is now recognized as a flow source.
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* The `secure_getenv` and `_wgetenv` functions are now recognized as local flow sources.
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* The `scanf` and `fscanf` functions and their variants are now recognized as flow sources.
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* The `ArgvSource` flow source has been generalized to handle cases where the argument vector of `main` is not named `argv`.
|
||||
3
cpp/ql/lib/change-notes/released/0.4.4.md
Normal file
3
cpp/ql/lib/change-notes/released/0.4.4.md
Normal file
@@ -0,0 +1,3 @@
|
||||
## 0.4.4
|
||||
|
||||
No user-facing changes.
|
||||
3
cpp/ql/lib/change-notes/released/0.4.5.md
Normal file
3
cpp/ql/lib/change-notes/released/0.4.5.md
Normal file
@@ -0,0 +1,3 @@
|
||||
## 0.4.5
|
||||
|
||||
No user-facing changes.
|
||||
3
cpp/ql/lib/change-notes/released/0.4.6.md
Normal file
3
cpp/ql/lib/change-notes/released/0.4.6.md
Normal file
@@ -0,0 +1,3 @@
|
||||
## 0.4.6
|
||||
|
||||
No user-facing changes.
|
||||
@@ -1,2 +1,2 @@
|
||||
---
|
||||
lastReleaseVersion: 0.4.3
|
||||
lastReleaseVersion: 0.4.6
|
||||
|
||||
@@ -12,8 +12,8 @@ import IDEContextual
|
||||
*
|
||||
* In some cases it is preferable to modify locations (the
|
||||
* `hasLocationInfo()` predicate) so that they are short, and
|
||||
* non-overlapping with other locations that might be highlighted in
|
||||
* the LGTM interface.
|
||||
* non-overlapping with other locations that might be reported as
|
||||
* code scanning alerts on GitHub.
|
||||
*
|
||||
* We need to give locations that may not be in the database, so
|
||||
* we use `hasLocationInfo()` rather than `getLocation()`.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -915,18 +915,57 @@ private module Cached {
|
||||
TDataFlowCallNone() or
|
||||
TDataFlowCallSome(DataFlowCall call)
|
||||
|
||||
cached
|
||||
newtype TParamNodeOption =
|
||||
TParamNodeNone() or
|
||||
TParamNodeSome(ParamNode p)
|
||||
|
||||
cached
|
||||
newtype TReturnCtx =
|
||||
TReturnCtxNone() or
|
||||
TReturnCtxNoFlowThrough() or
|
||||
TReturnCtxMaybeFlowThrough(ReturnPosition pos)
|
||||
|
||||
cached
|
||||
newtype TTypedContentApprox =
|
||||
MkTypedContentApprox(ContentApprox c, DataFlowType t) {
|
||||
exists(Content cont |
|
||||
c = getContentApprox(cont) and
|
||||
store(_, cont, _, _, t)
|
||||
)
|
||||
}
|
||||
|
||||
cached
|
||||
newtype TTypedContent = MkTypedContent(Content c, DataFlowType t) { store(_, c, _, _, t) }
|
||||
|
||||
cached
|
||||
TypedContent getATypedContent(TypedContentApprox c) {
|
||||
exists(ContentApprox cls, DataFlowType t, Content cont |
|
||||
c = MkTypedContentApprox(cls, pragma[only_bind_into](t)) and
|
||||
result = MkTypedContent(cont, pragma[only_bind_into](t)) and
|
||||
cls = getContentApprox(cont)
|
||||
)
|
||||
}
|
||||
|
||||
cached
|
||||
newtype TAccessPathFront =
|
||||
TFrontNil(DataFlowType t) or
|
||||
TFrontHead(TypedContent tc)
|
||||
|
||||
cached
|
||||
newtype TApproxAccessPathFront =
|
||||
TApproxFrontNil(DataFlowType t) or
|
||||
TApproxFrontHead(TypedContentApprox tc)
|
||||
|
||||
cached
|
||||
newtype TAccessPathFrontOption =
|
||||
TAccessPathFrontNone() or
|
||||
TAccessPathFrontSome(AccessPathFront apf)
|
||||
|
||||
cached
|
||||
newtype TApproxAccessPathFrontOption =
|
||||
TApproxAccessPathFrontNone() or
|
||||
TApproxAccessPathFrontSome(ApproxAccessPathFront apf)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1304,6 +1343,113 @@ class DataFlowCallOption extends TDataFlowCallOption {
|
||||
}
|
||||
}
|
||||
|
||||
/** An optional `ParamNode`. */
|
||||
class ParamNodeOption extends TParamNodeOption {
|
||||
string toString() {
|
||||
this = TParamNodeNone() and
|
||||
result = "(none)"
|
||||
or
|
||||
exists(ParamNode p |
|
||||
this = TParamNodeSome(p) and
|
||||
result = p.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A return context used to calculate flow summaries in reverse flow.
|
||||
*
|
||||
* The possible values are:
|
||||
*
|
||||
* - `TReturnCtxNone()`: no return flow.
|
||||
* - `TReturnCtxNoFlowThrough()`: return flow, but flow through is not possible.
|
||||
* - `TReturnCtxMaybeFlowThrough(ReturnPosition pos)`: return flow, of kind `pos`, and
|
||||
* flow through may be possible.
|
||||
*/
|
||||
class ReturnCtx extends TReturnCtx {
|
||||
string toString() {
|
||||
this = TReturnCtxNone() and
|
||||
result = "(none)"
|
||||
or
|
||||
this = TReturnCtxNoFlowThrough() and
|
||||
result = "(no flow through)"
|
||||
or
|
||||
exists(ReturnPosition pos |
|
||||
this = TReturnCtxMaybeFlowThrough(pos) and
|
||||
result = pos.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** An approximated `Content` tagged with the type of a containing object. */
|
||||
class TypedContentApprox extends MkTypedContentApprox {
|
||||
private ContentApprox c;
|
||||
private DataFlowType t;
|
||||
|
||||
TypedContentApprox() { this = MkTypedContentApprox(c, t) }
|
||||
|
||||
/** Gets a typed content approximated by this value. */
|
||||
TypedContent getATypedContent() { result = getATypedContent(this) }
|
||||
|
||||
/** Gets the container type. */
|
||||
DataFlowType getContainerType() { result = t }
|
||||
|
||||
/** Gets a textual representation of this approximated content. */
|
||||
string toString() { result = c.toString() }
|
||||
}
|
||||
|
||||
/**
|
||||
* The front of an approximated access path. This is either a head or a nil.
|
||||
*/
|
||||
abstract class ApproxAccessPathFront extends TApproxAccessPathFront {
|
||||
abstract string toString();
|
||||
|
||||
abstract DataFlowType getType();
|
||||
|
||||
abstract boolean toBoolNonEmpty();
|
||||
|
||||
pragma[nomagic]
|
||||
TypedContent getAHead() {
|
||||
exists(TypedContentApprox cont |
|
||||
this = TApproxFrontHead(cont) and
|
||||
result = cont.getATypedContent()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
class ApproxAccessPathFrontNil extends ApproxAccessPathFront, TApproxFrontNil {
|
||||
private DataFlowType t;
|
||||
|
||||
ApproxAccessPathFrontNil() { this = TApproxFrontNil(t) }
|
||||
|
||||
override string toString() { result = ppReprType(t) }
|
||||
|
||||
override DataFlowType getType() { result = t }
|
||||
|
||||
override boolean toBoolNonEmpty() { result = false }
|
||||
}
|
||||
|
||||
class ApproxAccessPathFrontHead extends ApproxAccessPathFront, TApproxFrontHead {
|
||||
private TypedContentApprox tc;
|
||||
|
||||
ApproxAccessPathFrontHead() { this = TApproxFrontHead(tc) }
|
||||
|
||||
override string toString() { result = tc.toString() }
|
||||
|
||||
override DataFlowType getType() { result = tc.getContainerType() }
|
||||
|
||||
override boolean toBoolNonEmpty() { result = true }
|
||||
}
|
||||
|
||||
/** An optional approximated access path front. */
|
||||
class ApproxAccessPathFrontOption extends TApproxAccessPathFrontOption {
|
||||
string toString() {
|
||||
this = TApproxAccessPathFrontNone() and result = "<none>"
|
||||
or
|
||||
this = TApproxAccessPathFrontSome(any(ApproxAccessPathFront apf | result = apf.toString()))
|
||||
}
|
||||
}
|
||||
|
||||
/** A `Content` tagged with the type of a containing object. */
|
||||
class TypedContent extends MkTypedContent {
|
||||
private Content c;
|
||||
@@ -1336,7 +1482,7 @@ abstract class AccessPathFront extends TAccessPathFront {
|
||||
|
||||
abstract DataFlowType getType();
|
||||
|
||||
abstract boolean toBoolNonEmpty();
|
||||
abstract ApproxAccessPathFront toApprox();
|
||||
|
||||
TypedContent getHead() { this = TFrontHead(result) }
|
||||
}
|
||||
@@ -1350,7 +1496,7 @@ class AccessPathFrontNil extends AccessPathFront, TFrontNil {
|
||||
|
||||
override DataFlowType getType() { result = t }
|
||||
|
||||
override boolean toBoolNonEmpty() { result = false }
|
||||
override ApproxAccessPathFront toApprox() { result = TApproxFrontNil(t) }
|
||||
}
|
||||
|
||||
class AccessPathFrontHead extends AccessPathFront, TFrontHead {
|
||||
@@ -1362,7 +1508,7 @@ class AccessPathFrontHead extends AccessPathFront, TFrontHead {
|
||||
|
||||
override DataFlowType getType() { result = tc.getContainerType() }
|
||||
|
||||
override boolean toBoolNonEmpty() { result = true }
|
||||
override ApproxAccessPathFront toApprox() { result.getAHead() = tc }
|
||||
}
|
||||
|
||||
/** An optional access path front. */
|
||||
|
||||
@@ -136,6 +136,18 @@ module Consistency {
|
||||
msg = "Local flow step does not preserve enclosing callable."
|
||||
}
|
||||
|
||||
query predicate readStepIsLocal(Node n1, Node n2, string msg) {
|
||||
readStep(n1, _, n2) and
|
||||
nodeGetEnclosingCallable(n1) != nodeGetEnclosingCallable(n2) and
|
||||
msg = "Read step does not preserve enclosing callable."
|
||||
}
|
||||
|
||||
query predicate storeStepIsLocal(Node n1, Node n2, string msg) {
|
||||
storeStep(n1, _, n2) and
|
||||
nodeGetEnclosingCallable(n1) != nodeGetEnclosingCallable(n2) and
|
||||
msg = "Store step does not preserve enclosing callable."
|
||||
}
|
||||
|
||||
private DataFlowType typeRepr() { result = getNodeType(_) }
|
||||
|
||||
query predicate compatibleTypesReflexive(DataFlowType t, string msg) {
|
||||
@@ -232,4 +244,25 @@ module Consistency {
|
||||
not callable = viableCallable(call) and
|
||||
not any(ConsistencyConfiguration c).viableImplInCallContextTooLargeExclude(call, ctx, callable)
|
||||
}
|
||||
|
||||
query predicate uniqueParameterNodeAtPosition(
|
||||
DataFlowCallable c, ParameterPosition pos, Node p, string msg
|
||||
) {
|
||||
isParameterNode(p, c, pos) and
|
||||
not exists(unique(Node p0 | isParameterNode(p0, c, pos))) and
|
||||
msg = "Parameters with overlapping positions."
|
||||
}
|
||||
|
||||
query predicate uniqueParameterNodePosition(
|
||||
DataFlowCallable c, ParameterPosition pos, Node p, string msg
|
||||
) {
|
||||
isParameterNode(p, c, pos) and
|
||||
not exists(unique(ParameterPosition pos0 | isParameterNode(p, c, pos0))) and
|
||||
msg = "Parameter node with multiple positions."
|
||||
}
|
||||
|
||||
query predicate uniqueContentApprox(Content c, string msg) {
|
||||
not exists(unique(ContentApprox approx | approx = getContentApprox(c))) and
|
||||
msg = "Non-unique content approximation."
|
||||
}
|
||||
}
|
||||
|
||||
@@ -551,6 +551,13 @@ predicate additionalLambdaFlowStep(Node nodeFrom, Node nodeTo, boolean preserves
|
||||
*/
|
||||
predicate allowParameterReturnInSelf(ParameterNode p) { none() }
|
||||
|
||||
/** An approximated `Content`. */
|
||||
class ContentApprox = Unit;
|
||||
|
||||
/** Gets an approximated value for content `c`. */
|
||||
pragma[inline]
|
||||
ContentApprox getContentApprox(Content c) { any() }
|
||||
|
||||
private class MyConsistencyConfiguration extends Consistency::ConsistencyConfiguration {
|
||||
override predicate argHasPostUpdateExclude(ArgumentNode n) {
|
||||
// The rules for whether an IR argument gets a post-update node are too
|
||||
|
||||
@@ -292,12 +292,8 @@ module SemanticExprConfig {
|
||||
final Location getLocation() { result = super.getLocation() }
|
||||
}
|
||||
|
||||
private class ValueNumberBound extends Bound {
|
||||
IRBound::ValueNumberBound bound;
|
||||
|
||||
ValueNumberBound() { bound = this }
|
||||
|
||||
override string toString() { result = bound.toString() }
|
||||
private class ValueNumberBound extends Bound instanceof IRBound::ValueNumberBound {
|
||||
override string toString() { result = IRBound::ValueNumberBound.super.toString() }
|
||||
}
|
||||
|
||||
predicate zeroBound(Bound bound) { bound instanceof IRBound::ZeroBound }
|
||||
|
||||
@@ -33,23 +33,15 @@ abstract private class FlowSignDef extends SignDef {
|
||||
}
|
||||
|
||||
/** An SSA definition whose sign is determined by the sign of that definitions source expression. */
|
||||
private class ExplicitSignDef extends FlowSignDef {
|
||||
SemSsaExplicitUpdate update;
|
||||
|
||||
ExplicitSignDef() { update = this }
|
||||
|
||||
final override Sign getSign() { result = semExprSign(update.getSourceExpr()) }
|
||||
private class ExplicitSignDef extends FlowSignDef instanceof SemSsaExplicitUpdate {
|
||||
final override Sign getSign() { result = semExprSign(super.getSourceExpr()) }
|
||||
}
|
||||
|
||||
/** An SSA Phi definition, whose sign is the union of the signs of its inputs. */
|
||||
private class PhiSignDef extends FlowSignDef {
|
||||
SemSsaPhiNode phi;
|
||||
|
||||
PhiSignDef() { phi = this }
|
||||
|
||||
private class PhiSignDef extends FlowSignDef instanceof SemSsaPhiNode {
|
||||
final override Sign getSign() {
|
||||
exists(SemSsaVariable inp, SemSsaReadPositionPhiInputEdge edge |
|
||||
edge.phiInput(phi, inp) and
|
||||
edge.phiInput(this, inp) and
|
||||
result = semSsaSign(inp, edge)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: codeql/cpp-all
|
||||
version: 0.4.4-dev
|
||||
version: 0.5.0-dev
|
||||
groups: cpp
|
||||
dbscheme: semmlecode.cpp.dbscheme
|
||||
extractor: cpp
|
||||
|
||||
@@ -189,18 +189,6 @@ class Folder extends Container, @folder {
|
||||
* Gets the URL of this folder.
|
||||
*/
|
||||
deprecated override string getURL() { result = "file://" + this.getAbsolutePath() + ":0:0:0:0" }
|
||||
|
||||
/**
|
||||
* DEPRECATED: use `getAbsolutePath` instead.
|
||||
* Gets the name of this folder.
|
||||
*/
|
||||
deprecated string getName() { folders(underlyingElement(this), result) }
|
||||
|
||||
/**
|
||||
* DEPRECATED: use `getBaseName` instead.
|
||||
* Gets the last part of the folder name.
|
||||
*/
|
||||
deprecated string getShortName() { result = this.getBaseName() }
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -397,11 +397,8 @@ private int lengthInBase16(float f) {
|
||||
/**
|
||||
* A class to represent format strings that occur as arguments to invocations of formatting functions.
|
||||
*/
|
||||
class FormatLiteral extends Literal {
|
||||
FormatLiteral() {
|
||||
exists(FormattingFunctionCall ffc | ffc.getFormat() = this) and
|
||||
this instanceof StringLiteral
|
||||
}
|
||||
class FormatLiteral extends Literal instanceof StringLiteral {
|
||||
FormatLiteral() { exists(FormattingFunctionCall ffc | ffc.getFormat() = this) }
|
||||
|
||||
/**
|
||||
* Gets the function call where this format string is used.
|
||||
|
||||
@@ -30,15 +30,12 @@ abstract class ScanfFunction extends Function {
|
||||
/**
|
||||
* The standard function `scanf` (and variations).
|
||||
*/
|
||||
class Scanf extends ScanfFunction {
|
||||
class Scanf extends ScanfFunction instanceof TopLevelFunction {
|
||||
Scanf() {
|
||||
this instanceof TopLevelFunction and
|
||||
(
|
||||
this.hasGlobalOrStdOrBslName("scanf") or // scanf(format, args...)
|
||||
this.hasGlobalOrStdOrBslName("wscanf") or // wscanf(format, args...)
|
||||
this.hasGlobalName("_scanf_l") or // _scanf_l(format, locale, args...)
|
||||
this.hasGlobalName("_wscanf_l") // _wscanf_l(format, locale, args...)
|
||||
)
|
||||
this.hasGlobalName("_wscanf_l")
|
||||
}
|
||||
|
||||
override int getInputParameterIndex() { none() }
|
||||
@@ -49,15 +46,12 @@ class Scanf extends ScanfFunction {
|
||||
/**
|
||||
* The standard function `fscanf` (and variations).
|
||||
*/
|
||||
class Fscanf extends ScanfFunction {
|
||||
class Fscanf extends ScanfFunction instanceof TopLevelFunction {
|
||||
Fscanf() {
|
||||
this instanceof TopLevelFunction and
|
||||
(
|
||||
this.hasGlobalOrStdOrBslName("fscanf") or // fscanf(src_stream, format, args...)
|
||||
this.hasGlobalOrStdOrBslName("fwscanf") or // fwscanf(src_stream, format, args...)
|
||||
this.hasGlobalName("_fscanf_l") or // _fscanf_l(src_stream, format, locale, args...)
|
||||
this.hasGlobalName("_fwscanf_l") // _fwscanf_l(src_stream, format, locale, args...)
|
||||
)
|
||||
this.hasGlobalName("_fwscanf_l")
|
||||
}
|
||||
|
||||
override int getInputParameterIndex() { result = 0 }
|
||||
@@ -68,15 +62,12 @@ class Fscanf extends ScanfFunction {
|
||||
/**
|
||||
* The standard function `sscanf` (and variations).
|
||||
*/
|
||||
class Sscanf extends ScanfFunction {
|
||||
class Sscanf extends ScanfFunction instanceof TopLevelFunction {
|
||||
Sscanf() {
|
||||
this instanceof TopLevelFunction and
|
||||
(
|
||||
this.hasGlobalOrStdOrBslName("sscanf") or // sscanf(src_stream, format, args...)
|
||||
this.hasGlobalOrStdOrBslName("swscanf") or // swscanf(src, format, args...)
|
||||
this.hasGlobalName("_sscanf_l") or // _sscanf_l(src, format, locale, args...)
|
||||
this.hasGlobalName("_swscanf_l") // _swscanf_l(src, format, locale, args...)
|
||||
)
|
||||
this.hasGlobalName("_swscanf_l")
|
||||
}
|
||||
|
||||
override int getInputParameterIndex() { result = 0 }
|
||||
@@ -87,17 +78,12 @@ class Sscanf extends ScanfFunction {
|
||||
/**
|
||||
* The standard(ish) function `snscanf` (and variations).
|
||||
*/
|
||||
class Snscanf extends ScanfFunction {
|
||||
class Snscanf extends ScanfFunction instanceof TopLevelFunction {
|
||||
Snscanf() {
|
||||
this instanceof TopLevelFunction and
|
||||
(
|
||||
this.hasGlobalName("_snscanf") or // _snscanf(src, max_amount, format, args...)
|
||||
this.hasGlobalName("_snwscanf") or // _snwscanf(src, max_amount, format, args...)
|
||||
this.hasGlobalName("_snscanf_l") or // _snscanf_l(src, max_amount, format, locale, args...)
|
||||
this.hasGlobalName("_snwscanf_l") // _snwscanf_l(src, max_amount, format, locale, args...)
|
||||
// note that the max_amount is not a limit on the output length, it's an input length
|
||||
// limit used with non null-terminated strings.
|
||||
)
|
||||
this.hasGlobalName("_snwscanf_l")
|
||||
}
|
||||
|
||||
override int getInputParameterIndex() { result = 0 }
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -915,18 +915,57 @@ private module Cached {
|
||||
TDataFlowCallNone() or
|
||||
TDataFlowCallSome(DataFlowCall call)
|
||||
|
||||
cached
|
||||
newtype TParamNodeOption =
|
||||
TParamNodeNone() or
|
||||
TParamNodeSome(ParamNode p)
|
||||
|
||||
cached
|
||||
newtype TReturnCtx =
|
||||
TReturnCtxNone() or
|
||||
TReturnCtxNoFlowThrough() or
|
||||
TReturnCtxMaybeFlowThrough(ReturnPosition pos)
|
||||
|
||||
cached
|
||||
newtype TTypedContentApprox =
|
||||
MkTypedContentApprox(ContentApprox c, DataFlowType t) {
|
||||
exists(Content cont |
|
||||
c = getContentApprox(cont) and
|
||||
store(_, cont, _, _, t)
|
||||
)
|
||||
}
|
||||
|
||||
cached
|
||||
newtype TTypedContent = MkTypedContent(Content c, DataFlowType t) { store(_, c, _, _, t) }
|
||||
|
||||
cached
|
||||
TypedContent getATypedContent(TypedContentApprox c) {
|
||||
exists(ContentApprox cls, DataFlowType t, Content cont |
|
||||
c = MkTypedContentApprox(cls, pragma[only_bind_into](t)) and
|
||||
result = MkTypedContent(cont, pragma[only_bind_into](t)) and
|
||||
cls = getContentApprox(cont)
|
||||
)
|
||||
}
|
||||
|
||||
cached
|
||||
newtype TAccessPathFront =
|
||||
TFrontNil(DataFlowType t) or
|
||||
TFrontHead(TypedContent tc)
|
||||
|
||||
cached
|
||||
newtype TApproxAccessPathFront =
|
||||
TApproxFrontNil(DataFlowType t) or
|
||||
TApproxFrontHead(TypedContentApprox tc)
|
||||
|
||||
cached
|
||||
newtype TAccessPathFrontOption =
|
||||
TAccessPathFrontNone() or
|
||||
TAccessPathFrontSome(AccessPathFront apf)
|
||||
|
||||
cached
|
||||
newtype TApproxAccessPathFrontOption =
|
||||
TApproxAccessPathFrontNone() or
|
||||
TApproxAccessPathFrontSome(ApproxAccessPathFront apf)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1304,6 +1343,113 @@ class DataFlowCallOption extends TDataFlowCallOption {
|
||||
}
|
||||
}
|
||||
|
||||
/** An optional `ParamNode`. */
|
||||
class ParamNodeOption extends TParamNodeOption {
|
||||
string toString() {
|
||||
this = TParamNodeNone() and
|
||||
result = "(none)"
|
||||
or
|
||||
exists(ParamNode p |
|
||||
this = TParamNodeSome(p) and
|
||||
result = p.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A return context used to calculate flow summaries in reverse flow.
|
||||
*
|
||||
* The possible values are:
|
||||
*
|
||||
* - `TReturnCtxNone()`: no return flow.
|
||||
* - `TReturnCtxNoFlowThrough()`: return flow, but flow through is not possible.
|
||||
* - `TReturnCtxMaybeFlowThrough(ReturnPosition pos)`: return flow, of kind `pos`, and
|
||||
* flow through may be possible.
|
||||
*/
|
||||
class ReturnCtx extends TReturnCtx {
|
||||
string toString() {
|
||||
this = TReturnCtxNone() and
|
||||
result = "(none)"
|
||||
or
|
||||
this = TReturnCtxNoFlowThrough() and
|
||||
result = "(no flow through)"
|
||||
or
|
||||
exists(ReturnPosition pos |
|
||||
this = TReturnCtxMaybeFlowThrough(pos) and
|
||||
result = pos.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** An approximated `Content` tagged with the type of a containing object. */
|
||||
class TypedContentApprox extends MkTypedContentApprox {
|
||||
private ContentApprox c;
|
||||
private DataFlowType t;
|
||||
|
||||
TypedContentApprox() { this = MkTypedContentApprox(c, t) }
|
||||
|
||||
/** Gets a typed content approximated by this value. */
|
||||
TypedContent getATypedContent() { result = getATypedContent(this) }
|
||||
|
||||
/** Gets the container type. */
|
||||
DataFlowType getContainerType() { result = t }
|
||||
|
||||
/** Gets a textual representation of this approximated content. */
|
||||
string toString() { result = c.toString() }
|
||||
}
|
||||
|
||||
/**
|
||||
* The front of an approximated access path. This is either a head or a nil.
|
||||
*/
|
||||
abstract class ApproxAccessPathFront extends TApproxAccessPathFront {
|
||||
abstract string toString();
|
||||
|
||||
abstract DataFlowType getType();
|
||||
|
||||
abstract boolean toBoolNonEmpty();
|
||||
|
||||
pragma[nomagic]
|
||||
TypedContent getAHead() {
|
||||
exists(TypedContentApprox cont |
|
||||
this = TApproxFrontHead(cont) and
|
||||
result = cont.getATypedContent()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
class ApproxAccessPathFrontNil extends ApproxAccessPathFront, TApproxFrontNil {
|
||||
private DataFlowType t;
|
||||
|
||||
ApproxAccessPathFrontNil() { this = TApproxFrontNil(t) }
|
||||
|
||||
override string toString() { result = ppReprType(t) }
|
||||
|
||||
override DataFlowType getType() { result = t }
|
||||
|
||||
override boolean toBoolNonEmpty() { result = false }
|
||||
}
|
||||
|
||||
class ApproxAccessPathFrontHead extends ApproxAccessPathFront, TApproxFrontHead {
|
||||
private TypedContentApprox tc;
|
||||
|
||||
ApproxAccessPathFrontHead() { this = TApproxFrontHead(tc) }
|
||||
|
||||
override string toString() { result = tc.toString() }
|
||||
|
||||
override DataFlowType getType() { result = tc.getContainerType() }
|
||||
|
||||
override boolean toBoolNonEmpty() { result = true }
|
||||
}
|
||||
|
||||
/** An optional approximated access path front. */
|
||||
class ApproxAccessPathFrontOption extends TApproxAccessPathFrontOption {
|
||||
string toString() {
|
||||
this = TApproxAccessPathFrontNone() and result = "<none>"
|
||||
or
|
||||
this = TApproxAccessPathFrontSome(any(ApproxAccessPathFront apf | result = apf.toString()))
|
||||
}
|
||||
}
|
||||
|
||||
/** A `Content` tagged with the type of a containing object. */
|
||||
class TypedContent extends MkTypedContent {
|
||||
private Content c;
|
||||
@@ -1336,7 +1482,7 @@ abstract class AccessPathFront extends TAccessPathFront {
|
||||
|
||||
abstract DataFlowType getType();
|
||||
|
||||
abstract boolean toBoolNonEmpty();
|
||||
abstract ApproxAccessPathFront toApprox();
|
||||
|
||||
TypedContent getHead() { this = TFrontHead(result) }
|
||||
}
|
||||
@@ -1350,7 +1496,7 @@ class AccessPathFrontNil extends AccessPathFront, TFrontNil {
|
||||
|
||||
override DataFlowType getType() { result = t }
|
||||
|
||||
override boolean toBoolNonEmpty() { result = false }
|
||||
override ApproxAccessPathFront toApprox() { result = TApproxFrontNil(t) }
|
||||
}
|
||||
|
||||
class AccessPathFrontHead extends AccessPathFront, TFrontHead {
|
||||
@@ -1362,7 +1508,7 @@ class AccessPathFrontHead extends AccessPathFront, TFrontHead {
|
||||
|
||||
override DataFlowType getType() { result = tc.getContainerType() }
|
||||
|
||||
override boolean toBoolNonEmpty() { result = true }
|
||||
override ApproxAccessPathFront toApprox() { result.getAHead() = tc }
|
||||
}
|
||||
|
||||
/** An optional access path front. */
|
||||
|
||||
@@ -136,6 +136,18 @@ module Consistency {
|
||||
msg = "Local flow step does not preserve enclosing callable."
|
||||
}
|
||||
|
||||
query predicate readStepIsLocal(Node n1, Node n2, string msg) {
|
||||
readStep(n1, _, n2) and
|
||||
nodeGetEnclosingCallable(n1) != nodeGetEnclosingCallable(n2) and
|
||||
msg = "Read step does not preserve enclosing callable."
|
||||
}
|
||||
|
||||
query predicate storeStepIsLocal(Node n1, Node n2, string msg) {
|
||||
storeStep(n1, _, n2) and
|
||||
nodeGetEnclosingCallable(n1) != nodeGetEnclosingCallable(n2) and
|
||||
msg = "Store step does not preserve enclosing callable."
|
||||
}
|
||||
|
||||
private DataFlowType typeRepr() { result = getNodeType(_) }
|
||||
|
||||
query predicate compatibleTypesReflexive(DataFlowType t, string msg) {
|
||||
@@ -232,4 +244,25 @@ module Consistency {
|
||||
not callable = viableCallable(call) and
|
||||
not any(ConsistencyConfiguration c).viableImplInCallContextTooLargeExclude(call, ctx, callable)
|
||||
}
|
||||
|
||||
query predicate uniqueParameterNodeAtPosition(
|
||||
DataFlowCallable c, ParameterPosition pos, Node p, string msg
|
||||
) {
|
||||
isParameterNode(p, c, pos) and
|
||||
not exists(unique(Node p0 | isParameterNode(p0, c, pos))) and
|
||||
msg = "Parameters with overlapping positions."
|
||||
}
|
||||
|
||||
query predicate uniqueParameterNodePosition(
|
||||
DataFlowCallable c, ParameterPosition pos, Node p, string msg
|
||||
) {
|
||||
isParameterNode(p, c, pos) and
|
||||
not exists(unique(ParameterPosition pos0 | isParameterNode(p, c, pos0))) and
|
||||
msg = "Parameter node with multiple positions."
|
||||
}
|
||||
|
||||
query predicate uniqueContentApprox(Content c, string msg) {
|
||||
not exists(unique(ContentApprox approx | approx = getContentApprox(c))) and
|
||||
msg = "Non-unique content approximation."
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -296,6 +296,13 @@ predicate additionalLambdaFlowStep(Node nodeFrom, Node nodeTo, boolean preserves
|
||||
*/
|
||||
predicate allowParameterReturnInSelf(ParameterNode p) { none() }
|
||||
|
||||
/** An approximated `Content`. */
|
||||
class ContentApprox = Unit;
|
||||
|
||||
/** Gets an approximated value for content `c`. */
|
||||
pragma[inline]
|
||||
ContentApprox getContentApprox(Content c) { any() }
|
||||
|
||||
private class MyConsistencyConfiguration extends Consistency::ConsistencyConfiguration {
|
||||
override predicate argHasPostUpdateExclude(ArgumentNode n) {
|
||||
// Is the null pointer (or something that's not really a pointer)
|
||||
|
||||
@@ -1,642 +1,21 @@
|
||||
/**
|
||||
* DEPRECATED: Use `semmle.code.cpp.ir.dataflow.TaintTracking` as a replacement.
|
||||
*
|
||||
* An IR taint tracking library that uses an IR DataFlow configuration to track
|
||||
* taint from user inputs as defined by `semmle.code.cpp.security.Security`.
|
||||
*/
|
||||
|
||||
import cpp
|
||||
import semmle.code.cpp.security.Security
|
||||
private import semmle.code.cpp.ir.dataflow.DataFlow
|
||||
private import semmle.code.cpp.ir.dataflow.internal.DataFlowUtil
|
||||
private import semmle.code.cpp.ir.dataflow.DataFlow3
|
||||
private import semmle.code.cpp.ir.IR
|
||||
private import semmle.code.cpp.ir.dataflow.ResolveCall
|
||||
private import semmle.code.cpp.controlflow.IRGuards
|
||||
private import semmle.code.cpp.models.interfaces.Taint
|
||||
private import semmle.code.cpp.models.interfaces.DataFlow
|
||||
private import semmle.code.cpp.ir.dataflow.TaintTracking
|
||||
private import semmle.code.cpp.ir.dataflow.TaintTracking2
|
||||
private import semmle.code.cpp.ir.dataflow.TaintTracking3
|
||||
private import semmle.code.cpp.ir.dataflow.internal.ModelUtil
|
||||
private import semmle.code.cpp.ir.dataflow.internal.DefaultTaintTrackingImpl as DefaultTaintTrackingImpl
|
||||
|
||||
/**
|
||||
* A predictable instruction is one where an external user can predict
|
||||
* the value. For example, a literal in the source code is considered
|
||||
* predictable.
|
||||
*/
|
||||
private predicate predictableInstruction(Instruction instr) {
|
||||
instr instanceof ConstantInstruction
|
||||
or
|
||||
instr instanceof StringConstantInstruction
|
||||
or
|
||||
// This could be a conversion on a string literal
|
||||
predictableInstruction(instr.(UnaryInstruction).getUnary())
|
||||
}
|
||||
deprecated predicate predictableOnlyFlow = DefaultTaintTrackingImpl::predictableOnlyFlow/1;
|
||||
|
||||
/**
|
||||
* Functions that we should only allow taint to flow through (to the return
|
||||
* value) if all but the source argument are 'predictable'. This is done to
|
||||
* emulate the old security library's implementation rather than due to any
|
||||
* strong belief that this is the right approach.
|
||||
*
|
||||
* Note that the list itself is not very principled; it consists of all the
|
||||
* functions listed in the old security library's [default] `isPureFunction`
|
||||
* that have more than one argument, but are not in the old taint tracking
|
||||
* library's `returnArgument` predicate.
|
||||
*/
|
||||
predicate predictableOnlyFlow(string name) {
|
||||
name =
|
||||
[
|
||||
"strcasestr", "strchnul", "strchr", "strchrnul", "strcmp", "strcspn", "strncmp", "strndup",
|
||||
"strnlen", "strrchr", "strspn", "strstr", "strtod", "strtof", "strtol", "strtoll", "strtoq",
|
||||
"strtoul"
|
||||
]
|
||||
}
|
||||
deprecated predicate tainted = DefaultTaintTrackingImpl::tainted/2;
|
||||
|
||||
private DataFlow::Node getNodeForSource(Expr source) {
|
||||
isUserInput(source, _) and
|
||||
result = getNodeForExpr(source)
|
||||
}
|
||||
deprecated predicate taintedIncludingGlobalVars =
|
||||
DefaultTaintTrackingImpl::taintedIncludingGlobalVars/3;
|
||||
|
||||
private DataFlow::Node getNodeForExpr(Expr node) {
|
||||
result = DataFlow::exprNode(node)
|
||||
or
|
||||
// Some of the sources in `isUserInput` are intended to match the value of
|
||||
// an expression, while others (those modeled below) are intended to match
|
||||
// the taint that propagates out of an argument, like the `char *` argument
|
||||
// to `gets`. It's impossible here to tell which is which, but the "access
|
||||
// to argv" source is definitely not intended to match an output argument,
|
||||
// and it causes false positives if we let it.
|
||||
//
|
||||
// This case goes together with the similar (but not identical) rule in
|
||||
// `nodeIsBarrierIn`.
|
||||
result = DataFlow::definitionByReferenceNodeFromArgument(node) and
|
||||
not argv(node.(VariableAccess).getTarget())
|
||||
}
|
||||
deprecated predicate globalVarFromId = DefaultTaintTrackingImpl::globalVarFromId/1;
|
||||
|
||||
private class DefaultTaintTrackingCfg extends TaintTracking::Configuration {
|
||||
DefaultTaintTrackingCfg() { this = "DefaultTaintTrackingCfg" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) { source = getNodeForSource(_) }
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) { exists(adjustedSink(sink)) }
|
||||
|
||||
override predicate isSanitizer(DataFlow::Node node) { nodeIsBarrier(node) }
|
||||
|
||||
override predicate isSanitizerIn(DataFlow::Node node) { nodeIsBarrierIn(node) }
|
||||
}
|
||||
|
||||
private class ToGlobalVarTaintTrackingCfg extends TaintTracking::Configuration {
|
||||
ToGlobalVarTaintTrackingCfg() { this = "GlobalVarTaintTrackingCfg" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) { source = getNodeForSource(_) }
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) {
|
||||
sink.asVariable() instanceof GlobalOrNamespaceVariable
|
||||
}
|
||||
|
||||
override predicate isAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
|
||||
writesVariable(n1.asInstruction(), n2.asVariable().(GlobalOrNamespaceVariable))
|
||||
or
|
||||
readsVariable(n2.asInstruction(), n1.asVariable().(GlobalOrNamespaceVariable))
|
||||
}
|
||||
|
||||
override predicate isSanitizer(DataFlow::Node node) { nodeIsBarrier(node) }
|
||||
|
||||
override predicate isSanitizerIn(DataFlow::Node node) { nodeIsBarrierIn(node) }
|
||||
}
|
||||
|
||||
private class FromGlobalVarTaintTrackingCfg extends TaintTracking2::Configuration {
|
||||
FromGlobalVarTaintTrackingCfg() { this = "FromGlobalVarTaintTrackingCfg" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) {
|
||||
// This set of sources should be reasonably small, which is good for
|
||||
// performance since the set of sinks is very large.
|
||||
exists(ToGlobalVarTaintTrackingCfg otherCfg | otherCfg.hasFlowTo(source))
|
||||
}
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) { exists(adjustedSink(sink)) }
|
||||
|
||||
override predicate isAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
|
||||
// Additional step for flow out of variables. There is no flow _into_
|
||||
// variables in this configuration, so this step only serves to take flow
|
||||
// out of a variable that's a source.
|
||||
readsVariable(n2.asInstruction(), n1.asVariable())
|
||||
}
|
||||
|
||||
override predicate isSanitizer(DataFlow::Node node) { nodeIsBarrier(node) }
|
||||
|
||||
override predicate isSanitizerIn(DataFlow::Node node) { nodeIsBarrierIn(node) }
|
||||
}
|
||||
|
||||
private predicate readsVariable(LoadInstruction load, Variable var) {
|
||||
load.getSourceAddress().(VariableAddressInstruction).getAstVariable() = var
|
||||
}
|
||||
|
||||
private predicate writesVariable(StoreInstruction store, Variable var) {
|
||||
store.getDestinationAddress().(VariableAddressInstruction).getAstVariable() = var
|
||||
}
|
||||
|
||||
/**
|
||||
* A variable that has any kind of upper-bound check anywhere in the program. This is
|
||||
* biased towards being inclusive because there are a lot of valid ways of doing an
|
||||
* upper bounds checks if we don't consider where it occurs, for example:
|
||||
* ```
|
||||
* if (x < 10) { sink(x); }
|
||||
*
|
||||
* if (10 > y) { sink(y); }
|
||||
*
|
||||
* if (z > 10) { z = 10; }
|
||||
* sink(z);
|
||||
* ```
|
||||
*/
|
||||
// TODO: This coarse overapproximation, ported from the old taint tracking
|
||||
// library, could be replaced with an actual semantic check that a particular
|
||||
// variable _access_ is guarded by an upper-bound check. We probably don't want
|
||||
// to do this right away since it could expose a lot of FPs that were
|
||||
// previously suppressed by this predicate by coincidence.
|
||||
private predicate hasUpperBoundsCheck(Variable var) {
|
||||
exists(RelationalOperation oper, VariableAccess access |
|
||||
oper.getAnOperand() = access and
|
||||
access.getTarget() = var and
|
||||
// Comparing to 0 is not an upper bound check
|
||||
not oper.getAnOperand().getValue() = "0"
|
||||
)
|
||||
}
|
||||
|
||||
private predicate nodeIsBarrierEqualityCandidate(
|
||||
DataFlow::Node node, Operand access, Variable checkedVar
|
||||
) {
|
||||
readsVariable(node.asInstruction(), checkedVar) and
|
||||
any(IRGuardCondition guard).ensuresEq(access, _, _, node.asInstruction().getBlock(), true)
|
||||
}
|
||||
|
||||
cached
|
||||
private module Cached {
|
||||
cached
|
||||
predicate nodeIsBarrier(DataFlow::Node node) {
|
||||
exists(Variable checkedVar |
|
||||
readsVariable(node.asInstruction(), checkedVar) and
|
||||
hasUpperBoundsCheck(checkedVar)
|
||||
)
|
||||
or
|
||||
exists(Variable checkedVar, Operand access |
|
||||
/*
|
||||
* This node is guarded by a condition that forces the accessed variable
|
||||
* to equal something else. For example:
|
||||
* ```
|
||||
* x = taintsource()
|
||||
* if (x == 10) {
|
||||
* taintsink(x); // not considered tainted
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
|
||||
nodeIsBarrierEqualityCandidate(node, access, checkedVar) and
|
||||
readsVariable(access.getDef(), checkedVar)
|
||||
)
|
||||
}
|
||||
|
||||
cached
|
||||
predicate nodeIsBarrierIn(DataFlow::Node node) {
|
||||
// don't use dataflow into taint sources, as this leads to duplicate results.
|
||||
exists(Expr source | isUserInput(source, _) |
|
||||
node = DataFlow::exprNode(source)
|
||||
or
|
||||
// This case goes together with the similar (but not identical) rule in
|
||||
// `getNodeForSource`.
|
||||
node = DataFlow::definitionByReferenceNodeFromArgument(source)
|
||||
)
|
||||
or
|
||||
// don't use dataflow into binary instructions if both operands are unpredictable
|
||||
exists(BinaryInstruction iTo |
|
||||
iTo = node.asInstruction() and
|
||||
not predictableInstruction(iTo.getLeft()) and
|
||||
not predictableInstruction(iTo.getRight()) and
|
||||
// propagate taint from either the pointer or the offset, regardless of predictability
|
||||
not iTo instanceof PointerArithmeticInstruction
|
||||
)
|
||||
or
|
||||
// don't use dataflow through calls to pure functions if two or more operands
|
||||
// are unpredictable
|
||||
exists(Instruction iFrom1, Instruction iFrom2, CallInstruction iTo |
|
||||
iTo = node.asInstruction() and
|
||||
isPureFunction(iTo.getStaticCallTarget().getName()) and
|
||||
iFrom1 = iTo.getAnArgument() and
|
||||
iFrom2 = iTo.getAnArgument() and
|
||||
not predictableInstruction(iFrom1) and
|
||||
not predictableInstruction(iFrom2) and
|
||||
iFrom1 != iFrom2
|
||||
)
|
||||
}
|
||||
|
||||
cached
|
||||
Element adjustedSink(DataFlow::Node sink) {
|
||||
// TODO: is it more appropriate to use asConvertedExpr here and avoid
|
||||
// `getConversion*`? Or will that cause us to miss some cases where there's
|
||||
// flow to a conversion (like a `ReferenceDereferenceExpr`) and we want to
|
||||
// pretend there was flow to the converted `Expr` for the sake of
|
||||
// compatibility.
|
||||
sink.asExpr().getConversion*() = result
|
||||
or
|
||||
// For compatibility, send flow from arguments to parameters, even for
|
||||
// functions with no body.
|
||||
exists(FunctionCall call, int i |
|
||||
sink.asExpr() = call.getArgument(pragma[only_bind_into](i)) and
|
||||
result = resolveCall(call).getParameter(pragma[only_bind_into](i))
|
||||
)
|
||||
or
|
||||
// For compatibility, send flow into a `Variable` if there is flow to any
|
||||
// Load or Store of that variable.
|
||||
exists(CopyInstruction copy |
|
||||
copy.getSourceValue() = sink.asInstruction() and
|
||||
(
|
||||
readsVariable(copy, result) or
|
||||
writesVariable(copy, result)
|
||||
) and
|
||||
not hasUpperBoundsCheck(result)
|
||||
)
|
||||
or
|
||||
// For compatibility, send flow into a `NotExpr` even if it's part of a
|
||||
// short-circuiting condition and thus might get skipped.
|
||||
result.(NotExpr).getOperand() = sink.asExpr()
|
||||
or
|
||||
// Taint postfix and prefix crement operations when their operand is tainted.
|
||||
result.(CrementOperation).getAnOperand() = sink.asExpr()
|
||||
or
|
||||
// Taint `e1 += e2`, `e &= e2` and friends when `e1` or `e2` is tainted.
|
||||
result.(AssignOperation).getAnOperand() = sink.asExpr()
|
||||
or
|
||||
result =
|
||||
sink.asOperand()
|
||||
.(SideEffectOperand)
|
||||
.getUse()
|
||||
.(ReadSideEffectInstruction)
|
||||
.getArgumentDef()
|
||||
.getUnconvertedResultExpression()
|
||||
}
|
||||
|
||||
/**
|
||||
* Step to return value of a modeled function when an input taints the
|
||||
* dereference of the return value.
|
||||
*/
|
||||
cached
|
||||
predicate additionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
|
||||
exists(CallInstruction call, Function func, FunctionInput modelIn, FunctionOutput modelOut |
|
||||
n1.asOperand() = callInput(call, modelIn) and
|
||||
(
|
||||
func.(TaintFunction).hasTaintFlow(modelIn, modelOut)
|
||||
or
|
||||
func.(DataFlowFunction).hasDataFlow(modelIn, modelOut)
|
||||
) and
|
||||
call.getStaticCallTarget() = func and
|
||||
modelOut.isReturnValueDeref() and
|
||||
call = n2.asInstruction()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private import Cached
|
||||
|
||||
/**
|
||||
* Holds if `tainted` may contain taint from `source`.
|
||||
*
|
||||
* A tainted expression is either directly user input, or is
|
||||
* computed from user input in a way that users can probably
|
||||
* control the exact output of the computation.
|
||||
*
|
||||
* This doesn't include data flow through global variables.
|
||||
* If you need that you must call `taintedIncludingGlobalVars`.
|
||||
*/
|
||||
cached
|
||||
predicate tainted(Expr source, Element tainted) {
|
||||
exists(DefaultTaintTrackingCfg cfg, DataFlow::Node sink |
|
||||
cfg.hasFlow(getNodeForSource(source), sink) and
|
||||
tainted = adjustedSink(sink)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `tainted` may contain taint from `source`, where the taint passed
|
||||
* through a global variable named `globalVar`.
|
||||
*
|
||||
* A tainted expression is either directly user input, or is
|
||||
* computed from user input in a way that users can probably
|
||||
* control the exact output of the computation.
|
||||
*
|
||||
* This version gives the same results as tainted but also includes
|
||||
* data flow through global variables.
|
||||
*
|
||||
* The parameter `globalVar` is the qualified name of the last global variable
|
||||
* used to move the value from source to tainted. If the taint did not pass
|
||||
* through a global variable, then `globalVar = ""`.
|
||||
*/
|
||||
cached
|
||||
predicate taintedIncludingGlobalVars(Expr source, Element tainted, string globalVar) {
|
||||
tainted(source, tainted) and
|
||||
globalVar = ""
|
||||
or
|
||||
exists(
|
||||
ToGlobalVarTaintTrackingCfg toCfg, FromGlobalVarTaintTrackingCfg fromCfg,
|
||||
DataFlow::VariableNode variableNode, GlobalOrNamespaceVariable global, DataFlow::Node sink
|
||||
|
|
||||
global = variableNode.getVariable() and
|
||||
toCfg.hasFlow(getNodeForSource(source), variableNode) and
|
||||
fromCfg.hasFlow(variableNode, sink) and
|
||||
tainted = adjustedSink(sink) and
|
||||
global = globalVarFromId(globalVar)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the global variable whose qualified name is `id`. Use this predicate
|
||||
* together with `taintedIncludingGlobalVars`. Example:
|
||||
*
|
||||
* ```
|
||||
* exists(string varName |
|
||||
* taintedIncludingGlobalVars(source, tainted, varName) and
|
||||
* var = globalVarFromId(varName)
|
||||
* )
|
||||
* ```
|
||||
*/
|
||||
GlobalOrNamespaceVariable globalVarFromId(string id) { id = result.getQualifiedName() }
|
||||
|
||||
/**
|
||||
* Provides definitions for augmenting source/sink pairs with data-flow paths
|
||||
* between them. From a `@kind path-problem` query, import this module in the
|
||||
* global scope, extend `TaintTrackingConfiguration`, and use `taintedWithPath`
|
||||
* in place of `tainted`.
|
||||
*
|
||||
* Importing this module will also import the query predicates that contain the
|
||||
* taint paths.
|
||||
*/
|
||||
module TaintedWithPath {
|
||||
private newtype TSingleton = MkSingleton()
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration that matches sources and sinks in the same
|
||||
* way as the `tainted` predicate.
|
||||
*
|
||||
* Override `isSink` and `taintThroughGlobals` as needed, but do not provide
|
||||
* a characteristic predicate.
|
||||
*/
|
||||
class TaintTrackingConfiguration extends TSingleton {
|
||||
/** Override this to specify which elements are sources in this configuration. */
|
||||
predicate isSource(Expr source) { exists(getNodeForSource(source)) }
|
||||
|
||||
/** Override this to specify which elements are sinks in this configuration. */
|
||||
abstract predicate isSink(Element e);
|
||||
|
||||
/** Override this to specify which expressions are barriers in this configuration. */
|
||||
predicate isBarrier(Expr e) { nodeIsBarrier(getNodeForExpr(e)) }
|
||||
|
||||
/**
|
||||
* Override this predicate to `any()` to allow taint to flow through global
|
||||
* variables.
|
||||
*/
|
||||
predicate taintThroughGlobals() { none() }
|
||||
|
||||
/** Gets a textual representation of this element. */
|
||||
string toString() { result = "TaintTrackingConfiguration" }
|
||||
}
|
||||
|
||||
private class AdjustedConfiguration extends TaintTracking3::Configuration {
|
||||
AdjustedConfiguration() { this = "AdjustedConfiguration" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) {
|
||||
exists(TaintTrackingConfiguration cfg, Expr e |
|
||||
cfg.isSource(e) and source = getNodeForExpr(e)
|
||||
)
|
||||
}
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) {
|
||||
exists(TaintTrackingConfiguration cfg | cfg.isSink(adjustedSink(sink)))
|
||||
}
|
||||
|
||||
override predicate isAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
|
||||
// Steps into and out of global variables
|
||||
exists(TaintTrackingConfiguration cfg | cfg.taintThroughGlobals() |
|
||||
writesVariable(n1.asInstruction(), n2.asVariable().(GlobalOrNamespaceVariable))
|
||||
or
|
||||
readsVariable(n2.asInstruction(), n1.asVariable().(GlobalOrNamespaceVariable))
|
||||
)
|
||||
or
|
||||
additionalTaintStep(n1, n2)
|
||||
}
|
||||
|
||||
override predicate isSanitizer(DataFlow::Node node) {
|
||||
exists(TaintTrackingConfiguration cfg, Expr e | cfg.isBarrier(e) and node = getNodeForExpr(e))
|
||||
}
|
||||
|
||||
override predicate isSanitizerIn(DataFlow::Node node) { nodeIsBarrierIn(node) }
|
||||
}
|
||||
|
||||
/*
|
||||
* A sink `Element` may map to multiple `DataFlowX::PathNode`s via (the
|
||||
* inverse of) `adjustedSink`. For example, an `Expr` maps to all its
|
||||
* conversions, and a `Variable` maps to all loads and stores from it. Because
|
||||
* the path node is part of the tuple that constitutes the alert, this leads
|
||||
* to duplicate alerts.
|
||||
*
|
||||
* To avoid showing duplicates, we edit the graph to replace the final node
|
||||
* coming from the data-flow library with a node that matches exactly the
|
||||
* `Element` sink that's requested.
|
||||
*
|
||||
* The same is done for sources.
|
||||
*/
|
||||
|
||||
private newtype TPathNode =
|
||||
TWrapPathNode(DataFlow3::PathNode n) or
|
||||
// There's a single newtype constructor for both sources and sinks since
|
||||
// that makes it easiest to deal with the case where source = sink.
|
||||
TEndpointPathNode(Element e) {
|
||||
exists(AdjustedConfiguration cfg, DataFlow3::Node sourceNode, DataFlow3::Node sinkNode |
|
||||
cfg.hasFlow(sourceNode, sinkNode)
|
||||
|
|
||||
sourceNode = getNodeForExpr(e) and
|
||||
exists(TaintTrackingConfiguration ttCfg | ttCfg.isSource(e))
|
||||
or
|
||||
e = adjustedSink(sinkNode) and
|
||||
exists(TaintTrackingConfiguration ttCfg | ttCfg.isSink(e))
|
||||
)
|
||||
}
|
||||
|
||||
/** An opaque type used for the nodes of a data-flow path. */
|
||||
class PathNode extends TPathNode {
|
||||
/** Gets a textual representation of this element. */
|
||||
string toString() { none() }
|
||||
|
||||
/**
|
||||
* Holds if this element is at the specified location.
|
||||
* The location spans column `startcolumn` of line `startline` to
|
||||
* column `endcolumn` of line `endline` in file `filepath`.
|
||||
* For more information, see
|
||||
* [Locations](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
|
||||
*/
|
||||
predicate hasLocationInfo(
|
||||
string filepath, int startline, int startcolumn, int endline, int endcolumn
|
||||
) {
|
||||
none()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* INTERNAL: Do not use.
|
||||
*/
|
||||
module Private {
|
||||
/** Gets a predecessor `PathNode` of `pathNode`, if any. */
|
||||
PathNode getAPredecessor(PathNode pathNode) { edges(result, pathNode) }
|
||||
|
||||
/** Gets the element that `pathNode` wraps, if any. */
|
||||
Element getElementFromPathNode(PathNode pathNode) {
|
||||
exists(DataFlow::Node node | node = pathNode.(WrapPathNode).inner().getNode() |
|
||||
result = node.asInstruction().getAst()
|
||||
or
|
||||
result = node.asOperand().getDef().getAst()
|
||||
)
|
||||
or
|
||||
result = pathNode.(EndpointPathNode).inner()
|
||||
}
|
||||
}
|
||||
|
||||
private class WrapPathNode extends PathNode, TWrapPathNode {
|
||||
DataFlow3::PathNode inner() { this = TWrapPathNode(result) }
|
||||
|
||||
override string toString() { result = this.inner().toString() }
|
||||
|
||||
override predicate hasLocationInfo(
|
||||
string filepath, int startline, int startcolumn, int endline, int endcolumn
|
||||
) {
|
||||
this.inner().hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
|
||||
}
|
||||
}
|
||||
|
||||
private class EndpointPathNode extends PathNode, TEndpointPathNode {
|
||||
Expr inner() { this = TEndpointPathNode(result) }
|
||||
|
||||
override string toString() { result = this.inner().toString() }
|
||||
|
||||
override predicate hasLocationInfo(
|
||||
string filepath, int startline, int startcolumn, int endline, int endcolumn
|
||||
) {
|
||||
this.inner()
|
||||
.getLocation()
|
||||
.hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
|
||||
}
|
||||
}
|
||||
|
||||
/** A PathNode whose `Element` is a source. It may also be a sink. */
|
||||
private class InitialPathNode extends EndpointPathNode {
|
||||
InitialPathNode() { exists(TaintTrackingConfiguration cfg | cfg.isSource(this.inner())) }
|
||||
}
|
||||
|
||||
/** A PathNode whose `Element` is a sink. It may also be a source. */
|
||||
private class FinalPathNode extends EndpointPathNode {
|
||||
FinalPathNode() { exists(TaintTrackingConfiguration cfg | cfg.isSink(this.inner())) }
|
||||
}
|
||||
|
||||
/** Holds if `(a,b)` is an edge in the graph of data flow path explanations. */
|
||||
query predicate edges(PathNode a, PathNode b) {
|
||||
DataFlow3::PathGraph::edges(a.(WrapPathNode).inner(), b.(WrapPathNode).inner())
|
||||
or
|
||||
// To avoid showing trivial-looking steps, we _replace_ the last node instead
|
||||
// of adding an edge out of it.
|
||||
exists(WrapPathNode sinkNode |
|
||||
DataFlow3::PathGraph::edges(a.(WrapPathNode).inner(), sinkNode.inner()) and
|
||||
b.(FinalPathNode).inner() = adjustedSink(sinkNode.inner().getNode())
|
||||
)
|
||||
or
|
||||
// Same for the first node
|
||||
exists(WrapPathNode sourceNode |
|
||||
DataFlow3::PathGraph::edges(sourceNode.inner(), b.(WrapPathNode).inner()) and
|
||||
sourceNode.inner().getNode() = getNodeForExpr(a.(InitialPathNode).inner())
|
||||
)
|
||||
or
|
||||
// Finally, handle the case where the path goes directly from a source to a
|
||||
// sink, meaning that they both need to be translated.
|
||||
exists(WrapPathNode sinkNode, WrapPathNode sourceNode |
|
||||
DataFlow3::PathGraph::edges(sourceNode.inner(), sinkNode.inner()) and
|
||||
sourceNode.inner().getNode() = getNodeForExpr(a.(InitialPathNode).inner()) and
|
||||
b.(FinalPathNode).inner() = adjustedSink(sinkNode.inner().getNode())
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if there is flow from `arg` to `out` across a call that can by summarized by the flow
|
||||
* from `par` to `ret` within it, in the graph of data flow path explanations.
|
||||
*/
|
||||
query predicate subpaths(PathNode arg, PathNode par, PathNode ret, PathNode out) {
|
||||
DataFlow3::PathGraph::subpaths(arg.(WrapPathNode).inner(), par.(WrapPathNode).inner(),
|
||||
ret.(WrapPathNode).inner(), out.(WrapPathNode).inner())
|
||||
or
|
||||
// To avoid showing trivial-looking steps, we _replace_ the last node instead
|
||||
// of adding an edge out of it.
|
||||
exists(WrapPathNode sinkNode |
|
||||
DataFlow3::PathGraph::subpaths(arg.(WrapPathNode).inner(), par.(WrapPathNode).inner(),
|
||||
ret.(WrapPathNode).inner(), sinkNode.inner()) and
|
||||
out.(FinalPathNode).inner() = adjustedSink(sinkNode.inner().getNode())
|
||||
)
|
||||
or
|
||||
// Same for the first node
|
||||
exists(WrapPathNode sourceNode |
|
||||
DataFlow3::PathGraph::subpaths(sourceNode.inner(), par.(WrapPathNode).inner(),
|
||||
ret.(WrapPathNode).inner(), out.(WrapPathNode).inner()) and
|
||||
sourceNode.inner().getNode() = getNodeForExpr(arg.(InitialPathNode).inner())
|
||||
)
|
||||
or
|
||||
// Finally, handle the case where the path goes directly from a source to a
|
||||
// sink, meaning that they both need to be translated.
|
||||
exists(WrapPathNode sinkNode, WrapPathNode sourceNode |
|
||||
DataFlow3::PathGraph::subpaths(sourceNode.inner(), par.(WrapPathNode).inner(),
|
||||
ret.(WrapPathNode).inner(), sinkNode.inner()) and
|
||||
sourceNode.inner().getNode() = getNodeForExpr(arg.(InitialPathNode).inner()) and
|
||||
out.(FinalPathNode).inner() = adjustedSink(sinkNode.inner().getNode())
|
||||
)
|
||||
}
|
||||
|
||||
/** Holds if `n` is a node in the graph of data flow path explanations. */
|
||||
query predicate nodes(PathNode n, string key, string val) {
|
||||
key = "semmle.label" and val = n.toString()
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `tainted` may contain taint from `source`, where `sourceNode` and
|
||||
* `sinkNode` are the corresponding `PathNode`s that can be used in a query
|
||||
* to provide path explanations. Extend `TaintTrackingConfiguration` to use
|
||||
* this predicate.
|
||||
*
|
||||
* A tainted expression is either directly user input, or is computed from
|
||||
* user input in a way that users can probably control the exact output of
|
||||
* the computation.
|
||||
*/
|
||||
predicate taintedWithPath(Expr source, Element tainted, PathNode sourceNode, PathNode sinkNode) {
|
||||
exists(AdjustedConfiguration cfg, DataFlow3::Node flowSource, DataFlow3::Node flowSink |
|
||||
source = sourceNode.(InitialPathNode).inner() and
|
||||
flowSource = getNodeForExpr(source) and
|
||||
cfg.hasFlow(flowSource, flowSink) and
|
||||
tainted = adjustedSink(flowSink) and
|
||||
tainted = sinkNode.(FinalPathNode).inner()
|
||||
)
|
||||
}
|
||||
|
||||
private predicate isGlobalVariablePathNode(WrapPathNode n) {
|
||||
n.inner().getNode().asVariable() instanceof GlobalOrNamespaceVariable
|
||||
}
|
||||
|
||||
private predicate edgesWithoutGlobals(PathNode a, PathNode b) {
|
||||
edges(a, b) and
|
||||
not isGlobalVariablePathNode(a) and
|
||||
not isGlobalVariablePathNode(b)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `tainted` can be reached from a taint source without passing
|
||||
* through a global variable.
|
||||
*/
|
||||
predicate taintedWithoutGlobals(Element tainted) {
|
||||
exists(AdjustedConfiguration cfg, PathNode sourceNode, FinalPathNode sinkNode |
|
||||
cfg.isSource(sourceNode.(WrapPathNode).inner().getNode()) and
|
||||
edgesWithoutGlobals+(sourceNode, sinkNode) and
|
||||
tainted = sinkNode.inner()
|
||||
)
|
||||
}
|
||||
}
|
||||
deprecated module TaintedWithPath = DefaultTaintTrackingImpl::TaintedWithPath;
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
*/
|
||||
|
||||
private import cpp
|
||||
import semmle.code.cpp.ir.dataflow.DataFlow
|
||||
private import semmle.code.cpp.ir.IR
|
||||
|
||||
/**
|
||||
@@ -25,18 +24,18 @@ abstract class MustFlowConfiguration extends string {
|
||||
/**
|
||||
* Holds if `source` is a relevant data flow source.
|
||||
*/
|
||||
abstract predicate isSource(DataFlow::Node source);
|
||||
abstract predicate isSource(Instruction source);
|
||||
|
||||
/**
|
||||
* Holds if `sink` is a relevant data flow sink.
|
||||
*/
|
||||
abstract predicate isSink(DataFlow::Node sink);
|
||||
abstract predicate isSink(Operand sink);
|
||||
|
||||
/**
|
||||
* Holds if the additional flow step from `node1` to `node2` must be taken
|
||||
* into account in the analysis.
|
||||
*/
|
||||
predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) { none() }
|
||||
predicate isAdditionalFlowStep(Operand node1, Instruction node2) { none() }
|
||||
|
||||
/** Holds if this configuration allows flow from arguments to parameters. */
|
||||
predicate allowInterproceduralFlow() { any() }
|
||||
@@ -48,17 +47,17 @@ abstract class MustFlowConfiguration extends string {
|
||||
* included in the module `PathGraph`.
|
||||
*/
|
||||
final predicate hasFlowPath(MustFlowPathNode source, MustFlowPathSink sink) {
|
||||
this.isSource(source.getNode()) and
|
||||
this.isSource(source.getInstruction()) and
|
||||
source.getASuccessor+() = sink
|
||||
}
|
||||
}
|
||||
|
||||
/** Holds if `node` flows from a source. */
|
||||
pragma[nomagic]
|
||||
private predicate flowsFromSource(DataFlow::Node node, MustFlowConfiguration config) {
|
||||
private predicate flowsFromSource(Instruction node, MustFlowConfiguration config) {
|
||||
config.isSource(node)
|
||||
or
|
||||
exists(DataFlow::Node mid |
|
||||
exists(Instruction mid |
|
||||
step(mid, node, config) and
|
||||
flowsFromSource(mid, pragma[only_bind_into](config))
|
||||
)
|
||||
@@ -66,12 +65,12 @@ private predicate flowsFromSource(DataFlow::Node node, MustFlowConfiguration con
|
||||
|
||||
/** Holds if `node` flows to a sink. */
|
||||
pragma[nomagic]
|
||||
private predicate flowsToSink(DataFlow::Node node, MustFlowConfiguration config) {
|
||||
private predicate flowsToSink(Instruction node, MustFlowConfiguration config) {
|
||||
flowsFromSource(node, pragma[only_bind_into](config)) and
|
||||
(
|
||||
config.isSink(node)
|
||||
config.isSink(node.getAUse())
|
||||
or
|
||||
exists(DataFlow::Node mid |
|
||||
exists(Instruction mid |
|
||||
step(node, mid, config) and
|
||||
flowsToSink(mid, pragma[only_bind_into](config))
|
||||
)
|
||||
@@ -198,12 +197,13 @@ private module Cached {
|
||||
}
|
||||
|
||||
cached
|
||||
predicate step(DataFlow::Node nodeFrom, DataFlow::Node nodeTo) {
|
||||
instructionToOperandStep(nodeFrom.asInstruction(), nodeTo.asOperand())
|
||||
predicate step(Instruction nodeFrom, Instruction nodeTo) {
|
||||
exists(Operand mid |
|
||||
instructionToOperandStep(nodeFrom, mid) and
|
||||
operandToInstructionStep(mid, nodeTo)
|
||||
)
|
||||
or
|
||||
flowThroughCallable(nodeFrom.asInstruction(), nodeTo.asInstruction())
|
||||
or
|
||||
operandToInstructionStep(nodeFrom.asOperand(), nodeTo.asInstruction())
|
||||
flowThroughCallable(nodeFrom, nodeTo)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -213,12 +213,12 @@ private module Cached {
|
||||
* way around.
|
||||
*/
|
||||
pragma[inline]
|
||||
private Declaration getEnclosingCallable(DataFlow::Node n) {
|
||||
pragma[only_bind_into](result) = pragma[only_bind_out](n).getEnclosingCallable()
|
||||
private IRFunction getEnclosingCallable(Instruction n) {
|
||||
pragma[only_bind_into](result) = pragma[only_bind_out](n).getEnclosingIRFunction()
|
||||
}
|
||||
|
||||
/** Holds if `nodeFrom` flows to `nodeTo`. */
|
||||
private predicate step(DataFlow::Node nodeFrom, DataFlow::Node nodeTo, MustFlowConfiguration config) {
|
||||
private predicate step(Instruction nodeFrom, Instruction nodeTo, MustFlowConfiguration config) {
|
||||
exists(config) and
|
||||
Cached::step(pragma[only_bind_into](nodeFrom), pragma[only_bind_into](nodeTo)) and
|
||||
(
|
||||
@@ -227,37 +227,37 @@ private predicate step(DataFlow::Node nodeFrom, DataFlow::Node nodeTo, MustFlowC
|
||||
getEnclosingCallable(nodeFrom) = getEnclosingCallable(nodeTo)
|
||||
)
|
||||
or
|
||||
config.isAdditionalFlowStep(nodeFrom, nodeTo)
|
||||
config.isAdditionalFlowStep(nodeFrom.getAUse(), nodeTo)
|
||||
}
|
||||
|
||||
private newtype TLocalPathNode =
|
||||
MkLocalPathNode(DataFlow::Node n, MustFlowConfiguration config) {
|
||||
MkLocalPathNode(Instruction n, MustFlowConfiguration config) {
|
||||
flowsToSink(n, config) and
|
||||
(
|
||||
config.isSource(n)
|
||||
or
|
||||
exists(MustFlowPathNode mid | step(mid.getNode(), n, config))
|
||||
exists(MustFlowPathNode mid | step(mid.getInstruction(), n, config))
|
||||
)
|
||||
}
|
||||
|
||||
/** A `Node` that is in a path from a source to a sink. */
|
||||
class MustFlowPathNode extends TLocalPathNode {
|
||||
DataFlow::Node n;
|
||||
Instruction n;
|
||||
|
||||
MustFlowPathNode() { this = MkLocalPathNode(n, _) }
|
||||
|
||||
/** Gets the underlying node. */
|
||||
DataFlow::Node getNode() { result = n }
|
||||
Instruction getInstruction() { result = n }
|
||||
|
||||
/** Gets a textual representation of this node. */
|
||||
string toString() { result = n.toString() }
|
||||
string toString() { result = n.getAst().toString() }
|
||||
|
||||
/** Gets the location of this element. */
|
||||
Location getLocation() { result = n.getLocation() }
|
||||
|
||||
/** Gets a successor node, if any. */
|
||||
MustFlowPathNode getASuccessor() {
|
||||
step(this.getNode(), result.getNode(), this.getConfiguration())
|
||||
step(this.getInstruction(), result.getInstruction(), this.getConfiguration())
|
||||
}
|
||||
|
||||
/** Gets the associated configuration. */
|
||||
@@ -265,7 +265,7 @@ class MustFlowPathNode extends TLocalPathNode {
|
||||
}
|
||||
|
||||
private class MustFlowPathSink extends MustFlowPathNode {
|
||||
MustFlowPathSink() { this.getConfiguration().isSink(this.getNode()) }
|
||||
MustFlowPathSink() { this.getConfiguration().isSink(this.getInstruction().getAUse()) }
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -915,18 +915,57 @@ private module Cached {
|
||||
TDataFlowCallNone() or
|
||||
TDataFlowCallSome(DataFlowCall call)
|
||||
|
||||
cached
|
||||
newtype TParamNodeOption =
|
||||
TParamNodeNone() or
|
||||
TParamNodeSome(ParamNode p)
|
||||
|
||||
cached
|
||||
newtype TReturnCtx =
|
||||
TReturnCtxNone() or
|
||||
TReturnCtxNoFlowThrough() or
|
||||
TReturnCtxMaybeFlowThrough(ReturnPosition pos)
|
||||
|
||||
cached
|
||||
newtype TTypedContentApprox =
|
||||
MkTypedContentApprox(ContentApprox c, DataFlowType t) {
|
||||
exists(Content cont |
|
||||
c = getContentApprox(cont) and
|
||||
store(_, cont, _, _, t)
|
||||
)
|
||||
}
|
||||
|
||||
cached
|
||||
newtype TTypedContent = MkTypedContent(Content c, DataFlowType t) { store(_, c, _, _, t) }
|
||||
|
||||
cached
|
||||
TypedContent getATypedContent(TypedContentApprox c) {
|
||||
exists(ContentApprox cls, DataFlowType t, Content cont |
|
||||
c = MkTypedContentApprox(cls, pragma[only_bind_into](t)) and
|
||||
result = MkTypedContent(cont, pragma[only_bind_into](t)) and
|
||||
cls = getContentApprox(cont)
|
||||
)
|
||||
}
|
||||
|
||||
cached
|
||||
newtype TAccessPathFront =
|
||||
TFrontNil(DataFlowType t) or
|
||||
TFrontHead(TypedContent tc)
|
||||
|
||||
cached
|
||||
newtype TApproxAccessPathFront =
|
||||
TApproxFrontNil(DataFlowType t) or
|
||||
TApproxFrontHead(TypedContentApprox tc)
|
||||
|
||||
cached
|
||||
newtype TAccessPathFrontOption =
|
||||
TAccessPathFrontNone() or
|
||||
TAccessPathFrontSome(AccessPathFront apf)
|
||||
|
||||
cached
|
||||
newtype TApproxAccessPathFrontOption =
|
||||
TApproxAccessPathFrontNone() or
|
||||
TApproxAccessPathFrontSome(ApproxAccessPathFront apf)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1304,6 +1343,113 @@ class DataFlowCallOption extends TDataFlowCallOption {
|
||||
}
|
||||
}
|
||||
|
||||
/** An optional `ParamNode`. */
|
||||
class ParamNodeOption extends TParamNodeOption {
|
||||
string toString() {
|
||||
this = TParamNodeNone() and
|
||||
result = "(none)"
|
||||
or
|
||||
exists(ParamNode p |
|
||||
this = TParamNodeSome(p) and
|
||||
result = p.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A return context used to calculate flow summaries in reverse flow.
|
||||
*
|
||||
* The possible values are:
|
||||
*
|
||||
* - `TReturnCtxNone()`: no return flow.
|
||||
* - `TReturnCtxNoFlowThrough()`: return flow, but flow through is not possible.
|
||||
* - `TReturnCtxMaybeFlowThrough(ReturnPosition pos)`: return flow, of kind `pos`, and
|
||||
* flow through may be possible.
|
||||
*/
|
||||
class ReturnCtx extends TReturnCtx {
|
||||
string toString() {
|
||||
this = TReturnCtxNone() and
|
||||
result = "(none)"
|
||||
or
|
||||
this = TReturnCtxNoFlowThrough() and
|
||||
result = "(no flow through)"
|
||||
or
|
||||
exists(ReturnPosition pos |
|
||||
this = TReturnCtxMaybeFlowThrough(pos) and
|
||||
result = pos.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** An approximated `Content` tagged with the type of a containing object. */
|
||||
class TypedContentApprox extends MkTypedContentApprox {
|
||||
private ContentApprox c;
|
||||
private DataFlowType t;
|
||||
|
||||
TypedContentApprox() { this = MkTypedContentApprox(c, t) }
|
||||
|
||||
/** Gets a typed content approximated by this value. */
|
||||
TypedContent getATypedContent() { result = getATypedContent(this) }
|
||||
|
||||
/** Gets the container type. */
|
||||
DataFlowType getContainerType() { result = t }
|
||||
|
||||
/** Gets a textual representation of this approximated content. */
|
||||
string toString() { result = c.toString() }
|
||||
}
|
||||
|
||||
/**
|
||||
* The front of an approximated access path. This is either a head or a nil.
|
||||
*/
|
||||
abstract class ApproxAccessPathFront extends TApproxAccessPathFront {
|
||||
abstract string toString();
|
||||
|
||||
abstract DataFlowType getType();
|
||||
|
||||
abstract boolean toBoolNonEmpty();
|
||||
|
||||
pragma[nomagic]
|
||||
TypedContent getAHead() {
|
||||
exists(TypedContentApprox cont |
|
||||
this = TApproxFrontHead(cont) and
|
||||
result = cont.getATypedContent()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
class ApproxAccessPathFrontNil extends ApproxAccessPathFront, TApproxFrontNil {
|
||||
private DataFlowType t;
|
||||
|
||||
ApproxAccessPathFrontNil() { this = TApproxFrontNil(t) }
|
||||
|
||||
override string toString() { result = ppReprType(t) }
|
||||
|
||||
override DataFlowType getType() { result = t }
|
||||
|
||||
override boolean toBoolNonEmpty() { result = false }
|
||||
}
|
||||
|
||||
class ApproxAccessPathFrontHead extends ApproxAccessPathFront, TApproxFrontHead {
|
||||
private TypedContentApprox tc;
|
||||
|
||||
ApproxAccessPathFrontHead() { this = TApproxFrontHead(tc) }
|
||||
|
||||
override string toString() { result = tc.toString() }
|
||||
|
||||
override DataFlowType getType() { result = tc.getContainerType() }
|
||||
|
||||
override boolean toBoolNonEmpty() { result = true }
|
||||
}
|
||||
|
||||
/** An optional approximated access path front. */
|
||||
class ApproxAccessPathFrontOption extends TApproxAccessPathFrontOption {
|
||||
string toString() {
|
||||
this = TApproxAccessPathFrontNone() and result = "<none>"
|
||||
or
|
||||
this = TApproxAccessPathFrontSome(any(ApproxAccessPathFront apf | result = apf.toString()))
|
||||
}
|
||||
}
|
||||
|
||||
/** A `Content` tagged with the type of a containing object. */
|
||||
class TypedContent extends MkTypedContent {
|
||||
private Content c;
|
||||
@@ -1336,7 +1482,7 @@ abstract class AccessPathFront extends TAccessPathFront {
|
||||
|
||||
abstract DataFlowType getType();
|
||||
|
||||
abstract boolean toBoolNonEmpty();
|
||||
abstract ApproxAccessPathFront toApprox();
|
||||
|
||||
TypedContent getHead() { this = TFrontHead(result) }
|
||||
}
|
||||
@@ -1350,7 +1496,7 @@ class AccessPathFrontNil extends AccessPathFront, TFrontNil {
|
||||
|
||||
override DataFlowType getType() { result = t }
|
||||
|
||||
override boolean toBoolNonEmpty() { result = false }
|
||||
override ApproxAccessPathFront toApprox() { result = TApproxFrontNil(t) }
|
||||
}
|
||||
|
||||
class AccessPathFrontHead extends AccessPathFront, TFrontHead {
|
||||
@@ -1362,7 +1508,7 @@ class AccessPathFrontHead extends AccessPathFront, TFrontHead {
|
||||
|
||||
override DataFlowType getType() { result = tc.getContainerType() }
|
||||
|
||||
override boolean toBoolNonEmpty() { result = true }
|
||||
override ApproxAccessPathFront toApprox() { result.getAHead() = tc }
|
||||
}
|
||||
|
||||
/** An optional access path front. */
|
||||
|
||||
@@ -136,6 +136,18 @@ module Consistency {
|
||||
msg = "Local flow step does not preserve enclosing callable."
|
||||
}
|
||||
|
||||
query predicate readStepIsLocal(Node n1, Node n2, string msg) {
|
||||
readStep(n1, _, n2) and
|
||||
nodeGetEnclosingCallable(n1) != nodeGetEnclosingCallable(n2) and
|
||||
msg = "Read step does not preserve enclosing callable."
|
||||
}
|
||||
|
||||
query predicate storeStepIsLocal(Node n1, Node n2, string msg) {
|
||||
storeStep(n1, _, n2) and
|
||||
nodeGetEnclosingCallable(n1) != nodeGetEnclosingCallable(n2) and
|
||||
msg = "Store step does not preserve enclosing callable."
|
||||
}
|
||||
|
||||
private DataFlowType typeRepr() { result = getNodeType(_) }
|
||||
|
||||
query predicate compatibleTypesReflexive(DataFlowType t, string msg) {
|
||||
@@ -232,4 +244,25 @@ module Consistency {
|
||||
not callable = viableCallable(call) and
|
||||
not any(ConsistencyConfiguration c).viableImplInCallContextTooLargeExclude(call, ctx, callable)
|
||||
}
|
||||
|
||||
query predicate uniqueParameterNodeAtPosition(
|
||||
DataFlowCallable c, ParameterPosition pos, Node p, string msg
|
||||
) {
|
||||
isParameterNode(p, c, pos) and
|
||||
not exists(unique(Node p0 | isParameterNode(p0, c, pos))) and
|
||||
msg = "Parameters with overlapping positions."
|
||||
}
|
||||
|
||||
query predicate uniqueParameterNodePosition(
|
||||
DataFlowCallable c, ParameterPosition pos, Node p, string msg
|
||||
) {
|
||||
isParameterNode(p, c, pos) and
|
||||
not exists(unique(ParameterPosition pos0 | isParameterNode(p, c, pos0))) and
|
||||
msg = "Parameter node with multiple positions."
|
||||
}
|
||||
|
||||
query predicate uniqueContentApprox(Content c, string msg) {
|
||||
not exists(unique(ContentApprox approx | approx = getContentApprox(c))) and
|
||||
msg = "Non-unique content approximation."
|
||||
}
|
||||
}
|
||||
|
||||
@@ -400,6 +400,13 @@ predicate additionalLambdaFlowStep(Node nodeFrom, Node nodeTo, boolean preserves
|
||||
*/
|
||||
predicate allowParameterReturnInSelf(ParameterNode p) { none() }
|
||||
|
||||
/** An approximated `Content`. */
|
||||
class ContentApprox = Unit;
|
||||
|
||||
/** Gets an approximated value for content `c`. */
|
||||
pragma[inline]
|
||||
ContentApprox getContentApprox(Content c) { any() }
|
||||
|
||||
private class MyConsistencyConfiguration extends Consistency::ConsistencyConfiguration {
|
||||
override predicate argHasPostUpdateExclude(ArgumentNode n) {
|
||||
// The rules for whether an IR argument gets a post-update node are too
|
||||
|
||||
@@ -0,0 +1,644 @@
|
||||
/**
|
||||
* INTERNAL: Do not use.
|
||||
*
|
||||
* An IR taint tracking library that uses an IR DataFlow configuration to track
|
||||
* taint from user inputs as defined by `semmle.code.cpp.security.Security`.
|
||||
*/
|
||||
|
||||
import cpp
|
||||
import semmle.code.cpp.security.Security
|
||||
private import semmle.code.cpp.ir.dataflow.DataFlow
|
||||
private import semmle.code.cpp.ir.dataflow.internal.DataFlowUtil
|
||||
private import semmle.code.cpp.ir.dataflow.DataFlow3
|
||||
private import semmle.code.cpp.ir.IR
|
||||
private import semmle.code.cpp.ir.dataflow.ResolveCall
|
||||
private import semmle.code.cpp.controlflow.IRGuards
|
||||
private import semmle.code.cpp.models.interfaces.Taint
|
||||
private import semmle.code.cpp.models.interfaces.DataFlow
|
||||
private import semmle.code.cpp.ir.dataflow.TaintTracking
|
||||
private import semmle.code.cpp.ir.dataflow.TaintTracking2
|
||||
private import semmle.code.cpp.ir.dataflow.TaintTracking3
|
||||
private import semmle.code.cpp.ir.dataflow.internal.ModelUtil
|
||||
|
||||
/**
|
||||
* A predictable instruction is one where an external user can predict
|
||||
* the value. For example, a literal in the source code is considered
|
||||
* predictable.
|
||||
*/
|
||||
private predicate predictableInstruction(Instruction instr) {
|
||||
instr instanceof ConstantInstruction
|
||||
or
|
||||
instr instanceof StringConstantInstruction
|
||||
or
|
||||
// This could be a conversion on a string literal
|
||||
predictableInstruction(instr.(UnaryInstruction).getUnary())
|
||||
}
|
||||
|
||||
/**
|
||||
* Functions that we should only allow taint to flow through (to the return
|
||||
* value) if all but the source argument are 'predictable'. This is done to
|
||||
* emulate the old security library's implementation rather than due to any
|
||||
* strong belief that this is the right approach.
|
||||
*
|
||||
* Note that the list itself is not very principled; it consists of all the
|
||||
* functions listed in the old security library's [default] `isPureFunction`
|
||||
* that have more than one argument, but are not in the old taint tracking
|
||||
* library's `returnArgument` predicate.
|
||||
*/
|
||||
predicate predictableOnlyFlow(string name) {
|
||||
name =
|
||||
[
|
||||
"strcasestr", "strchnul", "strchr", "strchrnul", "strcmp", "strcspn", "strncmp", "strndup",
|
||||
"strnlen", "strrchr", "strspn", "strstr", "strtod", "strtof", "strtol", "strtoll", "strtoq",
|
||||
"strtoul"
|
||||
]
|
||||
}
|
||||
|
||||
private DataFlow::Node getNodeForSource(Expr source) {
|
||||
isUserInput(source, _) and
|
||||
result = getNodeForExpr(source)
|
||||
}
|
||||
|
||||
private DataFlow::Node getNodeForExpr(Expr node) {
|
||||
result = DataFlow::exprNode(node)
|
||||
or
|
||||
// Some of the sources in `isUserInput` are intended to match the value of
|
||||
// an expression, while others (those modeled below) are intended to match
|
||||
// the taint that propagates out of an argument, like the `char *` argument
|
||||
// to `gets`. It's impossible here to tell which is which, but the "access
|
||||
// to argv" source is definitely not intended to match an output argument,
|
||||
// and it causes false positives if we let it.
|
||||
//
|
||||
// This case goes together with the similar (but not identical) rule in
|
||||
// `nodeIsBarrierIn`.
|
||||
result = DataFlow::definitionByReferenceNodeFromArgument(node) and
|
||||
not argv(node.(VariableAccess).getTarget())
|
||||
}
|
||||
|
||||
private class DefaultTaintTrackingCfg extends TaintTracking::Configuration {
|
||||
DefaultTaintTrackingCfg() { this = "DefaultTaintTrackingCfg" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) { source = getNodeForSource(_) }
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) { exists(adjustedSink(sink)) }
|
||||
|
||||
override predicate isSanitizer(DataFlow::Node node) { nodeIsBarrier(node) }
|
||||
|
||||
override predicate isSanitizerIn(DataFlow::Node node) { nodeIsBarrierIn(node) }
|
||||
}
|
||||
|
||||
private class ToGlobalVarTaintTrackingCfg extends TaintTracking::Configuration {
|
||||
ToGlobalVarTaintTrackingCfg() { this = "GlobalVarTaintTrackingCfg" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) { source = getNodeForSource(_) }
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) {
|
||||
sink.asVariable() instanceof GlobalOrNamespaceVariable
|
||||
}
|
||||
|
||||
override predicate isAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
|
||||
writesVariable(n1.asInstruction(), n2.asVariable().(GlobalOrNamespaceVariable))
|
||||
or
|
||||
readsVariable(n2.asInstruction(), n1.asVariable().(GlobalOrNamespaceVariable))
|
||||
}
|
||||
|
||||
override predicate isSanitizer(DataFlow::Node node) { nodeIsBarrier(node) }
|
||||
|
||||
override predicate isSanitizerIn(DataFlow::Node node) { nodeIsBarrierIn(node) }
|
||||
}
|
||||
|
||||
private class FromGlobalVarTaintTrackingCfg extends TaintTracking2::Configuration {
|
||||
FromGlobalVarTaintTrackingCfg() { this = "FromGlobalVarTaintTrackingCfg" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) {
|
||||
// This set of sources should be reasonably small, which is good for
|
||||
// performance since the set of sinks is very large.
|
||||
exists(ToGlobalVarTaintTrackingCfg otherCfg | otherCfg.hasFlowTo(source))
|
||||
}
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) { exists(adjustedSink(sink)) }
|
||||
|
||||
override predicate isAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
|
||||
// Additional step for flow out of variables. There is no flow _into_
|
||||
// variables in this configuration, so this step only serves to take flow
|
||||
// out of a variable that's a source.
|
||||
readsVariable(n2.asInstruction(), n1.asVariable())
|
||||
}
|
||||
|
||||
override predicate isSanitizer(DataFlow::Node node) { nodeIsBarrier(node) }
|
||||
|
||||
override predicate isSanitizerIn(DataFlow::Node node) { nodeIsBarrierIn(node) }
|
||||
}
|
||||
|
||||
private predicate readsVariable(LoadInstruction load, Variable var) {
|
||||
load.getSourceAddress().(VariableAddressInstruction).getAstVariable() = var
|
||||
}
|
||||
|
||||
private predicate writesVariable(StoreInstruction store, Variable var) {
|
||||
store.getDestinationAddress().(VariableAddressInstruction).getAstVariable() = var
|
||||
}
|
||||
|
||||
/**
|
||||
* A variable that has any kind of upper-bound check anywhere in the program. This is
|
||||
* biased towards being inclusive because there are a lot of valid ways of doing an
|
||||
* upper bounds checks if we don't consider where it occurs, for example:
|
||||
* ```
|
||||
* if (x < 10) { sink(x); }
|
||||
*
|
||||
* if (10 > y) { sink(y); }
|
||||
*
|
||||
* if (z > 10) { z = 10; }
|
||||
* sink(z);
|
||||
* ```
|
||||
*/
|
||||
// TODO: This coarse overapproximation, ported from the old taint tracking
|
||||
// library, could be replaced with an actual semantic check that a particular
|
||||
// variable _access_ is guarded by an upper-bound check. We probably don't want
|
||||
// to do this right away since it could expose a lot of FPs that were
|
||||
// previously suppressed by this predicate by coincidence.
|
||||
private predicate hasUpperBoundsCheck(Variable var) {
|
||||
exists(RelationalOperation oper, VariableAccess access |
|
||||
oper.getAnOperand() = access and
|
||||
access.getTarget() = var and
|
||||
// Comparing to 0 is not an upper bound check
|
||||
not oper.getAnOperand().getValue() = "0"
|
||||
)
|
||||
}
|
||||
|
||||
private predicate nodeIsBarrierEqualityCandidate(
|
||||
DataFlow::Node node, Operand access, Variable checkedVar
|
||||
) {
|
||||
readsVariable(node.asInstruction(), checkedVar) and
|
||||
any(IRGuardCondition guard).ensuresEq(access, _, _, node.asInstruction().getBlock(), true)
|
||||
}
|
||||
|
||||
cached
|
||||
private module Cached {
|
||||
cached
|
||||
predicate nodeIsBarrier(DataFlow::Node node) {
|
||||
exists(Variable checkedVar |
|
||||
readsVariable(node.asInstruction(), checkedVar) and
|
||||
hasUpperBoundsCheck(checkedVar)
|
||||
)
|
||||
or
|
||||
exists(Variable checkedVar, Operand access |
|
||||
/*
|
||||
* This node is guarded by a condition that forces the accessed variable
|
||||
* to equal something else. For example:
|
||||
* ```
|
||||
* x = taintsource()
|
||||
* if (x == 10) {
|
||||
* taintsink(x); // not considered tainted
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
|
||||
nodeIsBarrierEqualityCandidate(node, access, checkedVar) and
|
||||
readsVariable(access.getDef(), checkedVar)
|
||||
)
|
||||
}
|
||||
|
||||
cached
|
||||
predicate nodeIsBarrierIn(DataFlow::Node node) {
|
||||
// don't use dataflow into taint sources, as this leads to duplicate results.
|
||||
exists(Expr source | isUserInput(source, _) |
|
||||
node = DataFlow::exprNode(source)
|
||||
or
|
||||
// This case goes together with the similar (but not identical) rule in
|
||||
// `getNodeForSource`.
|
||||
node = DataFlow::definitionByReferenceNodeFromArgument(source)
|
||||
)
|
||||
or
|
||||
// don't use dataflow into binary instructions if both operands are unpredictable
|
||||
exists(BinaryInstruction iTo |
|
||||
iTo = node.asInstruction() and
|
||||
not predictableInstruction(iTo.getLeft()) and
|
||||
not predictableInstruction(iTo.getRight()) and
|
||||
// propagate taint from either the pointer or the offset, regardless of predictability
|
||||
not iTo instanceof PointerArithmeticInstruction
|
||||
)
|
||||
or
|
||||
// don't use dataflow through calls to pure functions if two or more operands
|
||||
// are unpredictable
|
||||
exists(Instruction iFrom1, Instruction iFrom2, CallInstruction iTo |
|
||||
iTo = node.asInstruction() and
|
||||
isPureFunction(iTo.getStaticCallTarget().getName()) and
|
||||
iFrom1 = iTo.getAnArgument() and
|
||||
iFrom2 = iTo.getAnArgument() and
|
||||
not predictableInstruction(iFrom1) and
|
||||
not predictableInstruction(iFrom2) and
|
||||
iFrom1 != iFrom2
|
||||
)
|
||||
}
|
||||
|
||||
cached
|
||||
Element adjustedSink(DataFlow::Node sink) {
|
||||
// TODO: is it more appropriate to use asConvertedExpr here and avoid
|
||||
// `getConversion*`? Or will that cause us to miss some cases where there's
|
||||
// flow to a conversion (like a `ReferenceDereferenceExpr`) and we want to
|
||||
// pretend there was flow to the converted `Expr` for the sake of
|
||||
// compatibility.
|
||||
sink.asExpr().getConversion*() = result
|
||||
or
|
||||
// For compatibility, send flow from arguments to parameters, even for
|
||||
// functions with no body.
|
||||
exists(FunctionCall call, int i |
|
||||
sink.asExpr() = call.getArgument(pragma[only_bind_into](i)) and
|
||||
result = resolveCall(call).getParameter(pragma[only_bind_into](i))
|
||||
)
|
||||
or
|
||||
// For compatibility, send flow into a `Variable` if there is flow to any
|
||||
// Load or Store of that variable.
|
||||
exists(CopyInstruction copy |
|
||||
copy.getSourceValue() = sink.asInstruction() and
|
||||
(
|
||||
readsVariable(copy, result) or
|
||||
writesVariable(copy, result)
|
||||
) and
|
||||
not hasUpperBoundsCheck(result)
|
||||
)
|
||||
or
|
||||
// For compatibility, send flow into a `NotExpr` even if it's part of a
|
||||
// short-circuiting condition and thus might get skipped.
|
||||
result.(NotExpr).getOperand() = sink.asExpr()
|
||||
or
|
||||
// Taint postfix and prefix crement operations when their operand is tainted.
|
||||
result.(CrementOperation).getAnOperand() = sink.asExpr()
|
||||
or
|
||||
// Taint `e1 += e2`, `e &= e2` and friends when `e1` or `e2` is tainted.
|
||||
result.(AssignOperation).getAnOperand() = sink.asExpr()
|
||||
or
|
||||
result =
|
||||
sink.asOperand()
|
||||
.(SideEffectOperand)
|
||||
.getUse()
|
||||
.(ReadSideEffectInstruction)
|
||||
.getArgumentDef()
|
||||
.getUnconvertedResultExpression()
|
||||
}
|
||||
|
||||
/**
|
||||
* Step to return value of a modeled function when an input taints the
|
||||
* dereference of the return value.
|
||||
*/
|
||||
cached
|
||||
predicate additionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
|
||||
exists(CallInstruction call, Function func, FunctionInput modelIn, FunctionOutput modelOut |
|
||||
n1.asOperand() = callInput(call, modelIn) and
|
||||
(
|
||||
func.(TaintFunction).hasTaintFlow(modelIn, modelOut)
|
||||
or
|
||||
func.(DataFlowFunction).hasDataFlow(modelIn, modelOut)
|
||||
) and
|
||||
call.getStaticCallTarget() = func and
|
||||
modelOut.isReturnValueDeref() and
|
||||
call = n2.asInstruction()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private import Cached
|
||||
|
||||
/**
|
||||
* Holds if `tainted` may contain taint from `source`.
|
||||
*
|
||||
* A tainted expression is either directly user input, or is
|
||||
* computed from user input in a way that users can probably
|
||||
* control the exact output of the computation.
|
||||
*
|
||||
* This doesn't include data flow through global variables.
|
||||
* If you need that you must call `taintedIncludingGlobalVars`.
|
||||
*/
|
||||
cached
|
||||
predicate tainted(Expr source, Element tainted) {
|
||||
exists(DefaultTaintTrackingCfg cfg, DataFlow::Node sink |
|
||||
cfg.hasFlow(getNodeForSource(source), sink) and
|
||||
tainted = adjustedSink(sink)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `tainted` may contain taint from `source`, where the taint passed
|
||||
* through a global variable named `globalVar`.
|
||||
*
|
||||
* A tainted expression is either directly user input, or is
|
||||
* computed from user input in a way that users can probably
|
||||
* control the exact output of the computation.
|
||||
*
|
||||
* This version gives the same results as tainted but also includes
|
||||
* data flow through global variables.
|
||||
*
|
||||
* The parameter `globalVar` is the qualified name of the last global variable
|
||||
* used to move the value from source to tainted. If the taint did not pass
|
||||
* through a global variable, then `globalVar = ""`.
|
||||
*/
|
||||
cached
|
||||
predicate taintedIncludingGlobalVars(Expr source, Element tainted, string globalVar) {
|
||||
tainted(source, tainted) and
|
||||
globalVar = ""
|
||||
or
|
||||
exists(
|
||||
ToGlobalVarTaintTrackingCfg toCfg, FromGlobalVarTaintTrackingCfg fromCfg,
|
||||
DataFlow::VariableNode variableNode, GlobalOrNamespaceVariable global, DataFlow::Node sink
|
||||
|
|
||||
global = variableNode.getVariable() and
|
||||
toCfg.hasFlow(getNodeForSource(source), variableNode) and
|
||||
fromCfg.hasFlow(variableNode, sink) and
|
||||
tainted = adjustedSink(sink) and
|
||||
global = globalVarFromId(globalVar)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the global variable whose qualified name is `id`. Use this predicate
|
||||
* together with `taintedIncludingGlobalVars`. Example:
|
||||
*
|
||||
* ```
|
||||
* exists(string varName |
|
||||
* taintedIncludingGlobalVars(source, tainted, varName) and
|
||||
* var = globalVarFromId(varName)
|
||||
* )
|
||||
* ```
|
||||
*/
|
||||
GlobalOrNamespaceVariable globalVarFromId(string id) { id = result.getQualifiedName() }
|
||||
|
||||
/**
|
||||
* Provides definitions for augmenting source/sink pairs with data-flow paths
|
||||
* between them. From a `@kind path-problem` query, import this module in the
|
||||
* global scope, extend `TaintTrackingConfiguration`, and use `taintedWithPath`
|
||||
* in place of `tainted`.
|
||||
*
|
||||
* Importing this module will also import the query predicates that contain the
|
||||
* taint paths.
|
||||
*/
|
||||
module TaintedWithPath {
|
||||
private newtype TSingleton = MkSingleton()
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration that matches sources and sinks in the same
|
||||
* way as the `tainted` predicate.
|
||||
*
|
||||
* Override `isSink` and `taintThroughGlobals` as needed, but do not provide
|
||||
* a characteristic predicate.
|
||||
*/
|
||||
class TaintTrackingConfiguration extends TSingleton {
|
||||
/** Override this to specify which elements are sources in this configuration. */
|
||||
predicate isSource(Expr source) { exists(getNodeForSource(source)) }
|
||||
|
||||
/** Override this to specify which elements are sinks in this configuration. */
|
||||
abstract predicate isSink(Element e);
|
||||
|
||||
/** Override this to specify which expressions are barriers in this configuration. */
|
||||
predicate isBarrier(Expr e) { nodeIsBarrier(getNodeForExpr(e)) }
|
||||
|
||||
/**
|
||||
* Override this predicate to `any()` to allow taint to flow through global
|
||||
* variables.
|
||||
*/
|
||||
predicate taintThroughGlobals() { none() }
|
||||
|
||||
/** Gets a textual representation of this element. */
|
||||
string toString() { result = "TaintTrackingConfiguration" }
|
||||
}
|
||||
|
||||
private class AdjustedConfiguration extends TaintTracking3::Configuration {
|
||||
AdjustedConfiguration() { this = "AdjustedConfiguration" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) {
|
||||
exists(TaintTrackingConfiguration cfg, Expr e |
|
||||
cfg.isSource(e) and source = getNodeForExpr(e)
|
||||
)
|
||||
}
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) {
|
||||
exists(TaintTrackingConfiguration cfg | cfg.isSink(adjustedSink(sink)))
|
||||
}
|
||||
|
||||
override predicate isAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
|
||||
// Steps into and out of global variables
|
||||
exists(TaintTrackingConfiguration cfg | cfg.taintThroughGlobals() |
|
||||
writesVariable(n1.asInstruction(), n2.asVariable().(GlobalOrNamespaceVariable))
|
||||
or
|
||||
readsVariable(n2.asInstruction(), n1.asVariable().(GlobalOrNamespaceVariable))
|
||||
)
|
||||
or
|
||||
additionalTaintStep(n1, n2)
|
||||
}
|
||||
|
||||
override predicate isSanitizer(DataFlow::Node node) {
|
||||
exists(TaintTrackingConfiguration cfg, Expr e | cfg.isBarrier(e) and node = getNodeForExpr(e))
|
||||
}
|
||||
|
||||
override predicate isSanitizerIn(DataFlow::Node node) { nodeIsBarrierIn(node) }
|
||||
}
|
||||
|
||||
/*
|
||||
* A sink `Element` may map to multiple `DataFlowX::PathNode`s via (the
|
||||
* inverse of) `adjustedSink`. For example, an `Expr` maps to all its
|
||||
* conversions, and a `Variable` maps to all loads and stores from it. Because
|
||||
* the path node is part of the tuple that constitutes the alert, this leads
|
||||
* to duplicate alerts.
|
||||
*
|
||||
* To avoid showing duplicates, we edit the graph to replace the final node
|
||||
* coming from the data-flow library with a node that matches exactly the
|
||||
* `Element` sink that's requested.
|
||||
*
|
||||
* The same is done for sources.
|
||||
*/
|
||||
|
||||
private newtype TPathNode =
|
||||
TWrapPathNode(DataFlow3::PathNode n) or
|
||||
// There's a single newtype constructor for both sources and sinks since
|
||||
// that makes it easiest to deal with the case where source = sink.
|
||||
TEndpointPathNode(Element e) {
|
||||
exists(AdjustedConfiguration cfg, DataFlow3::Node sourceNode, DataFlow3::Node sinkNode |
|
||||
cfg.hasFlow(sourceNode, sinkNode)
|
||||
|
|
||||
sourceNode = getNodeForExpr(e) and
|
||||
exists(TaintTrackingConfiguration ttCfg | ttCfg.isSource(e))
|
||||
or
|
||||
e = adjustedSink(sinkNode) and
|
||||
exists(TaintTrackingConfiguration ttCfg | ttCfg.isSink(e))
|
||||
)
|
||||
}
|
||||
|
||||
/** An opaque type used for the nodes of a data-flow path. */
|
||||
class PathNode extends TPathNode {
|
||||
/** Gets a textual representation of this element. */
|
||||
string toString() { none() }
|
||||
|
||||
/**
|
||||
* Holds if this element is at the specified location.
|
||||
* The location spans column `startcolumn` of line `startline` to
|
||||
* column `endcolumn` of line `endline` in file `filepath`.
|
||||
* For more information, see
|
||||
* [Locations](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
|
||||
*/
|
||||
predicate hasLocationInfo(
|
||||
string filepath, int startline, int startcolumn, int endline, int endcolumn
|
||||
) {
|
||||
none()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* INTERNAL: Do not use.
|
||||
*/
|
||||
module Private {
|
||||
/** Gets a predecessor `PathNode` of `pathNode`, if any. */
|
||||
PathNode getAPredecessor(PathNode pathNode) { edges(result, pathNode) }
|
||||
|
||||
/** Gets the element that `pathNode` wraps, if any. */
|
||||
Element getElementFromPathNode(PathNode pathNode) {
|
||||
exists(DataFlow::Node node | node = pathNode.(WrapPathNode).inner().getNode() |
|
||||
result = node.asInstruction().getAst()
|
||||
or
|
||||
result = node.asOperand().getDef().getAst()
|
||||
)
|
||||
or
|
||||
result = pathNode.(EndpointPathNode).inner()
|
||||
}
|
||||
}
|
||||
|
||||
private class WrapPathNode extends PathNode, TWrapPathNode {
|
||||
DataFlow3::PathNode inner() { this = TWrapPathNode(result) }
|
||||
|
||||
override string toString() { result = this.inner().toString() }
|
||||
|
||||
override predicate hasLocationInfo(
|
||||
string filepath, int startline, int startcolumn, int endline, int endcolumn
|
||||
) {
|
||||
this.inner().hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
|
||||
}
|
||||
}
|
||||
|
||||
private class EndpointPathNode extends PathNode, TEndpointPathNode {
|
||||
Expr inner() { this = TEndpointPathNode(result) }
|
||||
|
||||
override string toString() { result = this.inner().toString() }
|
||||
|
||||
override predicate hasLocationInfo(
|
||||
string filepath, int startline, int startcolumn, int endline, int endcolumn
|
||||
) {
|
||||
this.inner()
|
||||
.getLocation()
|
||||
.hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
|
||||
}
|
||||
}
|
||||
|
||||
/** A PathNode whose `Element` is a source. It may also be a sink. */
|
||||
private class InitialPathNode extends EndpointPathNode {
|
||||
InitialPathNode() { exists(TaintTrackingConfiguration cfg | cfg.isSource(this.inner())) }
|
||||
}
|
||||
|
||||
/** A PathNode whose `Element` is a sink. It may also be a source. */
|
||||
private class FinalPathNode extends EndpointPathNode {
|
||||
FinalPathNode() { exists(TaintTrackingConfiguration cfg | cfg.isSink(this.inner())) }
|
||||
}
|
||||
|
||||
/** Holds if `(a,b)` is an edge in the graph of data flow path explanations. */
|
||||
query predicate edges(PathNode a, PathNode b) {
|
||||
DataFlow3::PathGraph::edges(a.(WrapPathNode).inner(), b.(WrapPathNode).inner())
|
||||
or
|
||||
// To avoid showing trivial-looking steps, we _replace_ the last node instead
|
||||
// of adding an edge out of it.
|
||||
exists(WrapPathNode sinkNode |
|
||||
DataFlow3::PathGraph::edges(a.(WrapPathNode).inner(), sinkNode.inner()) and
|
||||
b.(FinalPathNode).inner() = adjustedSink(sinkNode.inner().getNode())
|
||||
)
|
||||
or
|
||||
// Same for the first node
|
||||
exists(WrapPathNode sourceNode |
|
||||
DataFlow3::PathGraph::edges(sourceNode.inner(), b.(WrapPathNode).inner()) and
|
||||
sourceNode.inner().getNode() = getNodeForExpr(a.(InitialPathNode).inner())
|
||||
)
|
||||
or
|
||||
// Finally, handle the case where the path goes directly from a source to a
|
||||
// sink, meaning that they both need to be translated.
|
||||
exists(WrapPathNode sinkNode, WrapPathNode sourceNode |
|
||||
DataFlow3::PathGraph::edges(sourceNode.inner(), sinkNode.inner()) and
|
||||
sourceNode.inner().getNode() = getNodeForExpr(a.(InitialPathNode).inner()) and
|
||||
b.(FinalPathNode).inner() = adjustedSink(sinkNode.inner().getNode())
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if there is flow from `arg` to `out` across a call that can by summarized by the flow
|
||||
* from `par` to `ret` within it, in the graph of data flow path explanations.
|
||||
*/
|
||||
query predicate subpaths(PathNode arg, PathNode par, PathNode ret, PathNode out) {
|
||||
DataFlow3::PathGraph::subpaths(arg.(WrapPathNode).inner(), par.(WrapPathNode).inner(),
|
||||
ret.(WrapPathNode).inner(), out.(WrapPathNode).inner())
|
||||
or
|
||||
// To avoid showing trivial-looking steps, we _replace_ the last node instead
|
||||
// of adding an edge out of it.
|
||||
exists(WrapPathNode sinkNode |
|
||||
DataFlow3::PathGraph::subpaths(arg.(WrapPathNode).inner(), par.(WrapPathNode).inner(),
|
||||
ret.(WrapPathNode).inner(), sinkNode.inner()) and
|
||||
out.(FinalPathNode).inner() = adjustedSink(sinkNode.inner().getNode())
|
||||
)
|
||||
or
|
||||
// Same for the first node
|
||||
exists(WrapPathNode sourceNode |
|
||||
DataFlow3::PathGraph::subpaths(sourceNode.inner(), par.(WrapPathNode).inner(),
|
||||
ret.(WrapPathNode).inner(), out.(WrapPathNode).inner()) and
|
||||
sourceNode.inner().getNode() = getNodeForExpr(arg.(InitialPathNode).inner())
|
||||
)
|
||||
or
|
||||
// Finally, handle the case where the path goes directly from a source to a
|
||||
// sink, meaning that they both need to be translated.
|
||||
exists(WrapPathNode sinkNode, WrapPathNode sourceNode |
|
||||
DataFlow3::PathGraph::subpaths(sourceNode.inner(), par.(WrapPathNode).inner(),
|
||||
ret.(WrapPathNode).inner(), sinkNode.inner()) and
|
||||
sourceNode.inner().getNode() = getNodeForExpr(arg.(InitialPathNode).inner()) and
|
||||
out.(FinalPathNode).inner() = adjustedSink(sinkNode.inner().getNode())
|
||||
)
|
||||
}
|
||||
|
||||
/** Holds if `n` is a node in the graph of data flow path explanations. */
|
||||
query predicate nodes(PathNode n, string key, string val) {
|
||||
key = "semmle.label" and val = n.toString()
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `tainted` may contain taint from `source`, where `sourceNode` and
|
||||
* `sinkNode` are the corresponding `PathNode`s that can be used in a query
|
||||
* to provide path explanations. Extend `TaintTrackingConfiguration` to use
|
||||
* this predicate.
|
||||
*
|
||||
* A tainted expression is either directly user input, or is computed from
|
||||
* user input in a way that users can probably control the exact output of
|
||||
* the computation.
|
||||
*/
|
||||
predicate taintedWithPath(Expr source, Element tainted, PathNode sourceNode, PathNode sinkNode) {
|
||||
exists(AdjustedConfiguration cfg, DataFlow3::Node flowSource, DataFlow3::Node flowSink |
|
||||
source = sourceNode.(InitialPathNode).inner() and
|
||||
flowSource = getNodeForExpr(source) and
|
||||
cfg.hasFlow(flowSource, flowSink) and
|
||||
tainted = adjustedSink(flowSink) and
|
||||
tainted = sinkNode.(FinalPathNode).inner()
|
||||
)
|
||||
}
|
||||
|
||||
private predicate isGlobalVariablePathNode(WrapPathNode n) {
|
||||
n.inner().getNode().asVariable() instanceof GlobalOrNamespaceVariable
|
||||
}
|
||||
|
||||
private predicate edgesWithoutGlobals(PathNode a, PathNode b) {
|
||||
edges(a, b) and
|
||||
not isGlobalVariablePathNode(a) and
|
||||
not isGlobalVariablePathNode(b)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `tainted` can be reached from a taint source without passing
|
||||
* through a global variable.
|
||||
*/
|
||||
predicate taintedWithoutGlobals(Element tainted) {
|
||||
exists(AdjustedConfiguration cfg, PathNode sourceNode, FinalPathNode sinkNode |
|
||||
cfg.isSource(sourceNode.(WrapPathNode).inner().getNode()) and
|
||||
edgesWithoutGlobals+(sourceNode, sinkNode) and
|
||||
tainted = sinkNode.inner()
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -27,7 +27,7 @@ private import implementations.StdString
|
||||
private import implementations.Swap
|
||||
private import implementations.GetDelim
|
||||
private import implementations.SmartPointer
|
||||
private import implementations.Sscanf
|
||||
private import implementations.Scanf
|
||||
private import implementations.Send
|
||||
private import implementations.Recv
|
||||
private import implementations.Accept
|
||||
|
||||
@@ -15,6 +15,6 @@ private class Fread extends AliasFunction, RemoteFlowSourceFunction {
|
||||
|
||||
override predicate hasRemoteFlowSource(FunctionOutput output, string description) {
|
||||
output.isParameterDeref(0) and
|
||||
description = "String read by " + this.getName()
|
||||
description = "string read by " + this.getName()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,6 +36,6 @@ private class GetDelimFunction extends TaintFunction, AliasFunction, SideEffectF
|
||||
|
||||
override predicate hasRemoteFlowSource(FunctionOutput output, string description) {
|
||||
output.isParameterDeref(0) and
|
||||
description = "String read by " + this.getName()
|
||||
description = "string read by " + this.getName()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +1,19 @@
|
||||
/**
|
||||
* Provides an implementation class modeling the POSIX function `getenv`.
|
||||
* Provides an implementation class modeling the POSIX function `getenv` and
|
||||
* various similar functions.
|
||||
*/
|
||||
|
||||
import cpp
|
||||
import semmle.code.cpp.models.interfaces.FlowSource
|
||||
|
||||
/**
|
||||
* The POSIX function `getenv`.
|
||||
* The POSIX function `getenv`, the GNU function `secure_getenv`, and the
|
||||
* Windows function `_wgetenv`.
|
||||
*/
|
||||
class Getenv extends LocalFlowSourceFunction {
|
||||
Getenv() { this.hasGlobalOrStdOrBslName("getenv") }
|
||||
Getenv() {
|
||||
this.hasGlobalOrStdOrBslName("getenv") or this.hasGlobalName(["secure_getenv", "_wgetenv"])
|
||||
}
|
||||
|
||||
override predicate hasLocalFlowSource(FunctionOutput output, string description) {
|
||||
(
|
||||
|
||||
@@ -49,10 +49,10 @@ private class FgetsFunction extends DataFlowFunction, TaintFunction, ArrayFuncti
|
||||
|
||||
override predicate hasRemoteFlowSource(FunctionOutput output, string description) {
|
||||
output.isParameterDeref(0) and
|
||||
description = "String read by " + this.getName()
|
||||
description = "string read by " + this.getName()
|
||||
or
|
||||
output.isReturnValue() and
|
||||
description = "String read by " + this.getName()
|
||||
description = "string read by " + this.getName()
|
||||
}
|
||||
|
||||
override predicate hasArrayWithVariableSize(int bufParam, int countParam) {
|
||||
@@ -98,10 +98,10 @@ private class GetsFunction extends DataFlowFunction, ArrayFunction, AliasFunctio
|
||||
|
||||
override predicate hasLocalFlowSource(FunctionOutput output, string description) {
|
||||
output.isParameterDeref(0) and
|
||||
description = "String read by " + this.getName()
|
||||
description = "string read by " + this.getName()
|
||||
or
|
||||
output.isReturnValue() and
|
||||
description = "String read by " + this.getName()
|
||||
description = "string read by " + this.getName()
|
||||
}
|
||||
|
||||
override predicate hasArrayWithUnknownSize(int bufParam) { bufParam = 0 }
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import semmle.code.cpp.models.interfaces.Taint
|
||||
import semmle.code.cpp.models.interfaces.Alias
|
||||
import semmle.code.cpp.models.interfaces.ArrayFunction
|
||||
import semmle.code.cpp.models.interfaces.FlowSource
|
||||
|
||||
private class InetNtoa extends TaintFunction {
|
||||
InetNtoa() { hasGlobalName("inet_ntoa") }
|
||||
InetNtoa() { this.hasGlobalName("inet_ntoa") }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
input.isParameter(0) and
|
||||
@@ -12,7 +13,7 @@ private class InetNtoa extends TaintFunction {
|
||||
}
|
||||
|
||||
private class InetAton extends TaintFunction, ArrayFunction {
|
||||
InetAton() { hasGlobalName("inet_aton") }
|
||||
InetAton() { this.hasGlobalName("inet_aton") }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
input.isParameterDeref(0) and
|
||||
@@ -32,7 +33,7 @@ private class InetAton extends TaintFunction, ArrayFunction {
|
||||
}
|
||||
|
||||
private class InetAddr extends TaintFunction, ArrayFunction, AliasFunction {
|
||||
InetAddr() { hasGlobalName("inet_addr") }
|
||||
InetAddr() { this.hasGlobalName("inet_addr") }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
input.isParameterDeref(0) and
|
||||
@@ -51,7 +52,7 @@ private class InetAddr extends TaintFunction, ArrayFunction, AliasFunction {
|
||||
}
|
||||
|
||||
private class InetNetwork extends TaintFunction, ArrayFunction {
|
||||
InetNetwork() { hasGlobalName("inet_network") }
|
||||
InetNetwork() { this.hasGlobalName("inet_network") }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
input.isParameterDeref(0) and
|
||||
@@ -64,7 +65,7 @@ private class InetNetwork extends TaintFunction, ArrayFunction {
|
||||
}
|
||||
|
||||
private class InetMakeaddr extends TaintFunction {
|
||||
InetMakeaddr() { hasGlobalName("inet_makeaddr") }
|
||||
InetMakeaddr() { this.hasGlobalName("inet_makeaddr") }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
(
|
||||
@@ -76,7 +77,7 @@ private class InetMakeaddr extends TaintFunction {
|
||||
}
|
||||
|
||||
private class InetLnaof extends TaintFunction {
|
||||
InetLnaof() { hasGlobalName("inet_lnaof") }
|
||||
InetLnaof() { this.hasGlobalName("inet_lnaof") }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
input.isParameter(0) and
|
||||
@@ -85,7 +86,7 @@ private class InetLnaof extends TaintFunction {
|
||||
}
|
||||
|
||||
private class InetNetof extends TaintFunction {
|
||||
InetNetof() { hasGlobalName("inet_netof") }
|
||||
InetNetof() { this.hasGlobalName("inet_netof") }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
input.isParameter(0) and
|
||||
@@ -94,7 +95,7 @@ private class InetNetof extends TaintFunction {
|
||||
}
|
||||
|
||||
private class InetPton extends TaintFunction, ArrayFunction {
|
||||
InetPton() { hasGlobalName("inet_pton") }
|
||||
InetPton() { this.hasGlobalName("inet_pton") }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
(
|
||||
@@ -114,7 +115,7 @@ private class InetPton extends TaintFunction, ArrayFunction {
|
||||
}
|
||||
|
||||
private class Gethostbyname extends TaintFunction, ArrayFunction {
|
||||
Gethostbyname() { hasGlobalName("gethostbyname") }
|
||||
Gethostbyname() { this.hasGlobalName("gethostbyname") }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
input.isParameterDeref(0) and
|
||||
@@ -127,7 +128,7 @@ private class Gethostbyname extends TaintFunction, ArrayFunction {
|
||||
}
|
||||
|
||||
private class Gethostbyaddr extends TaintFunction, ArrayFunction {
|
||||
Gethostbyaddr() { hasGlobalName("gethostbyaddr") }
|
||||
Gethostbyaddr() { this.hasGlobalName("gethostbyaddr") }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
(
|
||||
@@ -142,3 +143,21 @@ private class Gethostbyaddr extends TaintFunction, ArrayFunction {
|
||||
|
||||
override predicate hasArrayWithNullTerminator(int bufParam) { bufParam = 0 }
|
||||
}
|
||||
|
||||
private class Getaddrinfo extends TaintFunction, ArrayFunction, RemoteFlowSourceFunction {
|
||||
Getaddrinfo() { this.hasGlobalName("getaddrinfo") }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
input.isParameterDeref([0 .. 2]) and
|
||||
output.isParameterDeref(3)
|
||||
}
|
||||
|
||||
override predicate hasArrayInput(int bufParam) { bufParam in [0, 1] }
|
||||
|
||||
override predicate hasArrayWithNullTerminator(int bufParam) { bufParam in [0, 1] }
|
||||
|
||||
override predicate hasRemoteFlowSource(FunctionOutput output, string description) {
|
||||
output.isParameterDeref(3) and
|
||||
description = "address returned by " + this.getName()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,7 +31,17 @@ private class IteratorTraits extends Class {
|
||||
* `std::iterator_traits` instantiation for it.
|
||||
*/
|
||||
private class IteratorByTraits extends Iterator {
|
||||
IteratorByTraits() { exists(IteratorTraits it | it.getIteratorType() = this) }
|
||||
IteratorTraits trait;
|
||||
|
||||
IteratorByTraits() { trait.getIteratorType() = this }
|
||||
|
||||
override Type getValueType() {
|
||||
exists(TypedefType t |
|
||||
trait.getAMember() = t and
|
||||
t.getName() = "value_type" and
|
||||
result = t.getUnderlyingType()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -42,20 +52,27 @@ private class IteratorByTraits extends Iterator {
|
||||
*/
|
||||
private class IteratorByPointer extends Iterator instanceof PointerType {
|
||||
IteratorByPointer() { not this instanceof IteratorByTraits }
|
||||
|
||||
override Type getValueType() { result = super.getBaseType() }
|
||||
}
|
||||
|
||||
/**
|
||||
* A type which has the typedefs expected for an iterator.
|
||||
*/
|
||||
private class IteratorByTypedefs extends Iterator, Class {
|
||||
TypedefType valueType;
|
||||
|
||||
IteratorByTypedefs() {
|
||||
this.getAMember().(TypedefType).hasName("difference_type") and
|
||||
this.getAMember().(TypedefType).hasName("value_type") and
|
||||
valueType = this.getAMember() and
|
||||
valueType.hasName("value_type") and
|
||||
this.getAMember().(TypedefType).hasName("pointer") and
|
||||
this.getAMember().(TypedefType).hasName("reference") and
|
||||
this.getAMember().(TypedefType).hasName("iterator_category") and
|
||||
not this.hasQualifiedName(["std", "bsl"], "iterator_traits")
|
||||
}
|
||||
|
||||
override Type getValueType() { result = valueType.getUnderlyingType() }
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -63,6 +80,8 @@ private class IteratorByTypedefs extends Iterator, Class {
|
||||
*/
|
||||
private class StdIterator extends Iterator, Class {
|
||||
StdIterator() { this.hasQualifiedName(["std", "bsl"], "iterator") }
|
||||
|
||||
override Type getValueType() { result = this.getTemplateArgument(1).(Type).getUnderlyingType() }
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -166,12 +185,15 @@ private class IteratorSubOperator extends Operator, TaintFunction {
|
||||
/**
|
||||
* A non-member `operator+=` or `operator-=` function for an iterator type.
|
||||
*/
|
||||
private class IteratorAssignArithmeticOperator extends Operator, DataFlowFunction, TaintFunction {
|
||||
class IteratorAssignArithmeticOperator extends Operator {
|
||||
IteratorAssignArithmeticOperator() {
|
||||
this.hasName(["operator+=", "operator-="]) and
|
||||
exists(getIteratorArgumentInput(this, 0))
|
||||
}
|
||||
}
|
||||
|
||||
private class IteratorAssignArithmeticOperatorModel extends IteratorAssignArithmeticOperator,
|
||||
DataFlowFunction, TaintFunction {
|
||||
override predicate hasDataFlow(FunctionInput input, FunctionOutput output) {
|
||||
input.isParameter(0) and
|
||||
output.isReturnValue()
|
||||
@@ -210,11 +232,14 @@ class IteratorPointerDereferenceMemberOperator extends MemberFunction, TaintFunc
|
||||
/**
|
||||
* An `operator++` or `operator--` member function for an iterator type.
|
||||
*/
|
||||
private class IteratorCrementMemberOperator extends MemberFunction, DataFlowFunction, TaintFunction {
|
||||
class IteratorCrementMemberOperator extends MemberFunction {
|
||||
IteratorCrementMemberOperator() {
|
||||
this.getClassAndName(["operator++", "operator--"]) instanceof Iterator
|
||||
}
|
||||
}
|
||||
|
||||
private class IteratorCrementMemberOperatorModel extends IteratorCrementMemberOperator,
|
||||
DataFlowFunction, TaintFunction {
|
||||
override predicate hasDataFlow(FunctionInput input, FunctionOutput output) {
|
||||
input.isQualifierAddress() and
|
||||
output.isReturnValue()
|
||||
|
||||
@@ -83,7 +83,7 @@ private class Recv extends AliasFunction, ArrayFunction, SideEffectFunction,
|
||||
or
|
||||
this.hasGlobalName("recvfrom") and output.isParameterDeref([4, 5])
|
||||
) and
|
||||
description = "Buffer read by " + this.getName()
|
||||
description = "buffer read by " + this.getName()
|
||||
}
|
||||
|
||||
override predicate hasSocketInput(FunctionInput input) { input.isParameter(0) }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/**
|
||||
* Provides implementation classes modeling `sscanf`, `fscanf` and various similar
|
||||
* functions. See `semmle.code.cpp.models.Models` for usage information.
|
||||
* Provides implementation classes modeling the `scanf` family of functions.
|
||||
* See `semmle.code.cpp.models.Models` for usage information.
|
||||
*/
|
||||
|
||||
import semmle.code.cpp.Function
|
||||
@@ -9,18 +9,15 @@ import semmle.code.cpp.models.interfaces.ArrayFunction
|
||||
import semmle.code.cpp.models.interfaces.Taint
|
||||
import semmle.code.cpp.models.interfaces.Alias
|
||||
import semmle.code.cpp.models.interfaces.SideEffect
|
||||
import semmle.code.cpp.models.interfaces.FlowSource
|
||||
|
||||
/**
|
||||
* The standard function `sscanf`, `fscanf` and its assorted variants
|
||||
* The `scanf` family of functions.
|
||||
*/
|
||||
private class SscanfModel extends ArrayFunction, TaintFunction, AliasFunction, SideEffectFunction {
|
||||
SscanfModel() { this instanceof Sscanf or this instanceof Fscanf or this instanceof Snscanf }
|
||||
|
||||
abstract private class ScanfFunctionModel extends ArrayFunction, TaintFunction, AliasFunction,
|
||||
SideEffectFunction {
|
||||
override predicate hasArrayWithNullTerminator(int bufParam) {
|
||||
bufParam = this.(ScanfFunction).getFormatParameterIndex()
|
||||
or
|
||||
not this instanceof Fscanf and
|
||||
bufParam = this.(ScanfFunction).getInputParameterIndex()
|
||||
}
|
||||
|
||||
override predicate hasArrayInput(int bufParam) { this.hasArrayWithNullTerminator(bufParam) }
|
||||
@@ -36,7 +33,7 @@ private class SscanfModel extends ArrayFunction, TaintFunction, AliasFunction, S
|
||||
)
|
||||
}
|
||||
|
||||
private int getArgsStartPosition() { result = this.getNumberOfParameters() }
|
||||
int getArgsStartPosition() { result = this.getNumberOfParameters() }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
input.isParameterDeref(this.(ScanfFunction).getInputParameterIndex()) and
|
||||
@@ -70,3 +67,36 @@ private class SscanfModel extends ArrayFunction, TaintFunction, AliasFunction, S
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The standard function `scanf` and its assorted variants
|
||||
*/
|
||||
private class ScanfModel extends ScanfFunctionModel, LocalFlowSourceFunction instanceof Scanf {
|
||||
override predicate hasLocalFlowSource(FunctionOutput output, string description) {
|
||||
output.isParameterDeref(any(int i | i >= this.getArgsStartPosition())) and
|
||||
description = "value read by " + this.getName()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The standard function `fscanf` and its assorted variants
|
||||
*/
|
||||
private class FscanfModel extends ScanfFunctionModel, RemoteFlowSourceFunction instanceof Fscanf {
|
||||
override predicate hasRemoteFlowSource(FunctionOutput output, string description) {
|
||||
output.isParameterDeref(any(int i | i >= this.getArgsStartPosition())) and
|
||||
description = "value read by " + this.getName()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The standard function `sscanf` and its assorted variants
|
||||
*/
|
||||
private class SscanfModel extends ScanfFunctionModel {
|
||||
SscanfModel() { this instanceof Sscanf or this instanceof Snscanf }
|
||||
|
||||
override predicate hasArrayWithNullTerminator(int bufParam) {
|
||||
super.hasArrayWithNullTerminator(bufParam)
|
||||
or
|
||||
bufParam = this.(ScanfFunction).getInputParameterIndex()
|
||||
}
|
||||
}
|
||||
@@ -58,7 +58,7 @@ private class Send extends AliasFunction, ArrayFunction, SideEffectFunction, Rem
|
||||
override ParameterIndex getParameterSizeIndex(ParameterIndex i) { i = 1 and result = 2 }
|
||||
|
||||
override predicate hasRemoteFlowSink(FunctionInput input, string description) {
|
||||
input.isParameterDeref(1) and description = "Buffer sent by " + this.getName()
|
||||
input.isParameterDeref(1) and description = "buffer sent by " + this.getName()
|
||||
}
|
||||
|
||||
override predicate hasSocketInput(FunctionInput input) { input.isParameter(0) }
|
||||
|
||||
@@ -5,38 +5,53 @@
|
||||
import semmle.code.cpp.models.interfaces.Taint
|
||||
import semmle.code.cpp.models.interfaces.Iterator
|
||||
|
||||
/**
|
||||
* A sequence container template class (for example, `std::vector`) from the
|
||||
* standard library.
|
||||
*/
|
||||
abstract class StdSequenceContainer extends Class {
|
||||
Type getElementType() { result = this.getTemplateArgument(0) }
|
||||
}
|
||||
|
||||
/**
|
||||
* The `std::array` template class.
|
||||
*/
|
||||
private class Array extends Class {
|
||||
private class Array extends StdSequenceContainer {
|
||||
Array() { this.hasQualifiedName(["std", "bsl"], "array") }
|
||||
}
|
||||
|
||||
/**
|
||||
* The `std::string` template class.
|
||||
*/
|
||||
private class String extends StdSequenceContainer {
|
||||
String() { this.hasQualifiedName(["std", "bsl"], "basic_string") }
|
||||
}
|
||||
|
||||
/**
|
||||
* The `std::deque` template class.
|
||||
*/
|
||||
private class Deque extends Class {
|
||||
private class Deque extends StdSequenceContainer {
|
||||
Deque() { this.hasQualifiedName(["std", "bsl"], "deque") }
|
||||
}
|
||||
|
||||
/**
|
||||
* The `std::forward_list` template class.
|
||||
*/
|
||||
private class ForwardList extends Class {
|
||||
private class ForwardList extends StdSequenceContainer {
|
||||
ForwardList() { this.hasQualifiedName(["std", "bsl"], "forward_list") }
|
||||
}
|
||||
|
||||
/**
|
||||
* The `std::list` template class.
|
||||
*/
|
||||
private class List extends Class {
|
||||
private class List extends StdSequenceContainer {
|
||||
List() { this.hasQualifiedName(["std", "bsl"], "list") }
|
||||
}
|
||||
|
||||
/**
|
||||
* The `std::vector` template class.
|
||||
*/
|
||||
private class Vector extends Class {
|
||||
private class Vector extends StdSequenceContainer {
|
||||
Vector() { this.hasQualifiedName(["std", "bsl"], "vector") }
|
||||
}
|
||||
|
||||
|
||||
@@ -16,21 +16,32 @@ private class StdBasicString extends ClassTemplateInstantiation {
|
||||
}
|
||||
|
||||
/**
|
||||
* Additional model for `std::string` constructors that reference the character
|
||||
* type of the container, or an iterator. For example construction from
|
||||
* iterators:
|
||||
* ```
|
||||
* std::string b(a.begin(), a.end());
|
||||
* ```
|
||||
* The `std::basic_string::iterator` declaration.
|
||||
*
|
||||
* Intuitively, this class shouldn't be necessary as it's already captured
|
||||
* by the `StdIterator` class. However, this class ensures that the typedef inside the
|
||||
* body of the `std::string` class is also seen as an iterator.
|
||||
*
|
||||
* Eventually, we should be consistent about which of the following should be recognized as iterators:
|
||||
* 1. The typedef type.
|
||||
* 2. The template class of the resolved type.
|
||||
* 3. Any instantiation of the resolved type.
|
||||
*/
|
||||
private class StdStringConstructor extends Constructor, TaintFunction {
|
||||
StdStringConstructor() { this.getDeclaringType() instanceof StdBasicString }
|
||||
private class StdBasicStringIterator extends Iterator, Type {
|
||||
StdBasicStringIterator() {
|
||||
this.getEnclosingElement() instanceof StdBasicString and this.hasName("iterator")
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A `std::string` function for which taint should be propagated.
|
||||
*/
|
||||
abstract private class StdStringTaintFunction extends TaintFunction {
|
||||
/**
|
||||
* Gets the index of a parameter to this function that is a string (or
|
||||
* character).
|
||||
*/
|
||||
int getAStringParameterIndex() {
|
||||
final int getAStringParameterIndex() {
|
||||
exists(Type paramType | paramType = this.getParameter(result).getUnspecifiedType() |
|
||||
// e.g. `std::basic_string::CharT *`
|
||||
paramType instanceof PointerType
|
||||
@@ -41,15 +52,28 @@ private class StdStringConstructor extends Constructor, TaintFunction {
|
||||
this.getDeclaringType().getTemplateArgument(2).(Type).getUnspecifiedType()
|
||||
or
|
||||
// i.e. `std::basic_string::CharT`
|
||||
this.getParameter(result).getUnspecifiedType() =
|
||||
this.getDeclaringType().getTemplateArgument(0).(Type).getUnspecifiedType()
|
||||
paramType = this.getDeclaringType().getTemplateArgument(0).(Type).getUnspecifiedType()
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the index of a parameter to this function that is an iterator.
|
||||
*/
|
||||
int getAnIteratorParameterIndex() { this.getParameter(result).getType() instanceof Iterator }
|
||||
final int getAnIteratorParameterIndex() {
|
||||
this.getParameter(result).getType() instanceof Iterator
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Additional model for `std::string` constructors that reference the character
|
||||
* type of the container, or an iterator. For example construction from
|
||||
* iterators:
|
||||
* ```
|
||||
* std::string b(a.begin(), a.end());
|
||||
* ```
|
||||
*/
|
||||
private class StdStringConstructor extends Constructor, StdStringTaintFunction {
|
||||
StdStringConstructor() { this.getDeclaringType() instanceof StdBasicString }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
// taint flow from any parameter of the value type to the returned object
|
||||
@@ -68,7 +92,7 @@ private class StdStringConstructor extends Constructor, TaintFunction {
|
||||
/**
|
||||
* The `std::string` function `c_str`.
|
||||
*/
|
||||
private class StdStringCStr extends TaintFunction {
|
||||
private class StdStringCStr extends StdStringTaintFunction {
|
||||
StdStringCStr() { this.getClassAndName("c_str") instanceof StdBasicString }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
@@ -81,7 +105,7 @@ private class StdStringCStr extends TaintFunction {
|
||||
/**
|
||||
* The `std::string` function `data`.
|
||||
*/
|
||||
private class StdStringData extends TaintFunction {
|
||||
private class StdStringData extends StdStringTaintFunction {
|
||||
StdStringData() { this.getClassAndName("data") instanceof StdBasicString }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
@@ -99,7 +123,7 @@ private class StdStringData extends TaintFunction {
|
||||
/**
|
||||
* The `std::string` function `push_back`.
|
||||
*/
|
||||
private class StdStringPush extends TaintFunction {
|
||||
private class StdStringPush extends StdStringTaintFunction {
|
||||
StdStringPush() { this.getClassAndName("push_back") instanceof StdBasicString }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
@@ -112,7 +136,7 @@ private class StdStringPush extends TaintFunction {
|
||||
/**
|
||||
* The `std::string` functions `front` and `back`.
|
||||
*/
|
||||
private class StdStringFrontBack extends TaintFunction {
|
||||
private class StdStringFrontBack extends StdStringTaintFunction {
|
||||
StdStringFrontBack() { this.getClassAndName(["front", "back"]) instanceof StdBasicString }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
@@ -125,7 +149,7 @@ private class StdStringFrontBack extends TaintFunction {
|
||||
/**
|
||||
* The (non-member) `std::string` function `operator+`.
|
||||
*/
|
||||
private class StdStringPlus extends TaintFunction {
|
||||
private class StdStringPlus extends StdStringTaintFunction {
|
||||
StdStringPlus() {
|
||||
this.hasQualifiedName(["std", "bsl"], "operator+") and
|
||||
this.getUnspecifiedType() instanceof StdBasicString
|
||||
@@ -142,31 +166,15 @@ private class StdStringPlus extends TaintFunction {
|
||||
}
|
||||
|
||||
/**
|
||||
* The `std::string` functions `operator+=`, `append`, `insert` and
|
||||
* `replace`. All of these functions combine the existing string
|
||||
* with a new string (or character) from one of the arguments.
|
||||
* The `std::string` functions `operator+=`, `append` and `replace`.
|
||||
* All of these functions combine the existing string with a new
|
||||
* string (or character) from one of the arguments.
|
||||
*/
|
||||
private class StdStringAppend extends TaintFunction {
|
||||
private class StdStringAppend extends StdStringTaintFunction {
|
||||
StdStringAppend() {
|
||||
this.getClassAndName(["operator+=", "append", "insert", "replace"]) instanceof StdBasicString
|
||||
this.getClassAndName(["operator+=", "append", "replace"]) instanceof StdBasicString
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the index of a parameter to this function that is a string (or
|
||||
* character).
|
||||
*/
|
||||
int getAStringParameterIndex() {
|
||||
this.getParameter(result).getType() instanceof PointerType or // e.g. `std::basic_string::CharT *`
|
||||
this.getParameter(result).getType() instanceof ReferenceType or // e.g. `std::basic_string &`
|
||||
this.getParameter(result).getUnspecifiedType() =
|
||||
this.getDeclaringType().getTemplateArgument(0).(Type).getUnspecifiedType() // i.e. `std::basic_string::CharT`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the index of a parameter to this function that is an iterator.
|
||||
*/
|
||||
int getAnIteratorParameterIndex() { this.getParameter(result).getType() instanceof Iterator }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
// flow from string and parameter to string (qualifier) and return value
|
||||
(
|
||||
@@ -187,26 +195,42 @@ private class StdStringAppend extends TaintFunction {
|
||||
}
|
||||
|
||||
/**
|
||||
* The standard function `std::string.assign`.
|
||||
* The `std::string` function `insert`.
|
||||
*/
|
||||
private class StdStringAssign extends TaintFunction {
|
||||
StdStringAssign() { this.getClassAndName("assign") instanceof StdBasicString }
|
||||
private class StdStringInsert extends StdStringTaintFunction {
|
||||
StdStringInsert() { this.getClassAndName("insert") instanceof StdBasicString }
|
||||
|
||||
/**
|
||||
* Gets the index of a parameter to this function that is a string (or
|
||||
* character).
|
||||
* Holds if the return type is an iterator.
|
||||
*/
|
||||
int getAStringParameterIndex() {
|
||||
this.getParameter(result).getType() instanceof PointerType or // e.g. `std::basic_string::CharT *`
|
||||
this.getParameter(result).getType() instanceof ReferenceType or // e.g. `std::basic_string &`
|
||||
this.getParameter(result).getUnspecifiedType() =
|
||||
this.getDeclaringType().getTemplateArgument(0).(Type).getUnspecifiedType() // i.e. `std::basic_string::CharT`
|
||||
predicate hasIteratorReturnValue() { this.getType() instanceof Iterator }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
// flow from string and parameter to string (qualifier) and return value
|
||||
(
|
||||
input.isQualifierObject() or
|
||||
input.isParameterDeref(this.getAStringParameterIndex()) or
|
||||
input.isParameter(this.getAnIteratorParameterIndex())
|
||||
) and
|
||||
(
|
||||
output.isQualifierObject()
|
||||
or
|
||||
if this.hasIteratorReturnValue() then output.isReturnValue() else output.isReturnValueDeref()
|
||||
)
|
||||
or
|
||||
// reverse flow from returned reference to the qualifier (for writes to
|
||||
// the result)
|
||||
not this.hasIteratorReturnValue() and
|
||||
input.isReturnValueDeref() and
|
||||
output.isQualifierObject()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the index of a parameter to this function that is an iterator.
|
||||
* The standard function `std::string.assign`.
|
||||
*/
|
||||
int getAnIteratorParameterIndex() { this.getParameter(result).getType() instanceof Iterator }
|
||||
private class StdStringAssign extends StdStringTaintFunction {
|
||||
StdStringAssign() { this.getClassAndName("assign") instanceof StdBasicString }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
// flow from parameter to string itself (qualifier) and return value
|
||||
@@ -229,7 +253,7 @@ private class StdStringAssign extends TaintFunction {
|
||||
/**
|
||||
* The standard function `std::string.copy`.
|
||||
*/
|
||||
private class StdStringCopy extends TaintFunction {
|
||||
private class StdStringCopy extends StdStringTaintFunction {
|
||||
StdStringCopy() { this.getClassAndName("copy") instanceof StdBasicString }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
@@ -242,7 +266,7 @@ private class StdStringCopy extends TaintFunction {
|
||||
/**
|
||||
* The standard function `std::string.substr`.
|
||||
*/
|
||||
private class StdStringSubstr extends TaintFunction {
|
||||
private class StdStringSubstr extends StdStringTaintFunction {
|
||||
StdStringSubstr() { this.getClassAndName("substr") instanceof StdBasicString }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
@@ -255,7 +279,7 @@ private class StdStringSubstr extends TaintFunction {
|
||||
/**
|
||||
* The `std::string` functions `at` and `operator[]`.
|
||||
*/
|
||||
private class StdStringAt extends TaintFunction {
|
||||
private class StdStringAt extends StdStringTaintFunction {
|
||||
StdStringAt() { this.getClassAndName(["at", "operator[]"]) instanceof StdBasicString }
|
||||
|
||||
override predicate hasTaintFlow(FunctionInput input, FunctionOutput output) {
|
||||
|
||||
@@ -11,38 +11,6 @@
|
||||
import semmle.code.cpp.Function
|
||||
import semmle.code.cpp.models.Models
|
||||
|
||||
/**
|
||||
* An allocation function such as `malloc`.
|
||||
*/
|
||||
abstract class AllocationFunction extends Function {
|
||||
/**
|
||||
* Gets the index of the argument for the allocation size, if any. The actual
|
||||
* allocation size is the value of this argument multiplied by the result of
|
||||
* `getSizeMult()`, in bytes.
|
||||
*/
|
||||
int getSizeArg() { none() }
|
||||
|
||||
/**
|
||||
* Gets the index of an argument that multiplies the allocation size given by
|
||||
* `getSizeArg`, if any.
|
||||
*/
|
||||
int getSizeMult() { none() }
|
||||
|
||||
/**
|
||||
* Gets the index of the input pointer argument to be reallocated, if this
|
||||
* is a `realloc` function.
|
||||
*/
|
||||
int getReallocPtrArg() { none() }
|
||||
|
||||
/**
|
||||
* Whether or not this allocation requires a corresponding deallocation of
|
||||
* some sort (most do, but `alloca` for example does not). If it is unclear,
|
||||
* we default to no (for example a placement `new` allocation may or may not
|
||||
* require a corresponding `delete`).
|
||||
*/
|
||||
predicate requiresDealloc() { any() }
|
||||
}
|
||||
|
||||
/**
|
||||
* An allocation expression such as call to `malloc` or a `new` expression.
|
||||
*/
|
||||
@@ -86,6 +54,41 @@ abstract class AllocationExpr extends Expr {
|
||||
predicate requiresDealloc() { any() }
|
||||
}
|
||||
|
||||
/**
|
||||
* An allocation function such as `malloc`.
|
||||
*
|
||||
* Note: `AllocationExpr` includes calls to allocation functions, so prefer
|
||||
* to use that class unless you specifically need to reason about functions.
|
||||
*/
|
||||
abstract class AllocationFunction extends Function {
|
||||
/**
|
||||
* Gets the index of the argument for the allocation size, if any. The actual
|
||||
* allocation size is the value of this argument multiplied by the result of
|
||||
* `getSizeMult()`, in bytes.
|
||||
*/
|
||||
int getSizeArg() { none() }
|
||||
|
||||
/**
|
||||
* Gets the index of an argument that multiplies the allocation size given by
|
||||
* `getSizeArg`, if any.
|
||||
*/
|
||||
int getSizeMult() { none() }
|
||||
|
||||
/**
|
||||
* Gets the index of the input pointer argument to be reallocated, if this
|
||||
* is a `realloc` function.
|
||||
*/
|
||||
int getReallocPtrArg() { none() }
|
||||
|
||||
/**
|
||||
* Whether or not this allocation requires a corresponding deallocation of
|
||||
* some sort (most do, but `alloca` for example does not). If it is unclear,
|
||||
* we default to no (for example a placement `new` allocation may or may not
|
||||
* require a corresponding `delete`).
|
||||
*/
|
||||
predicate requiresDealloc() { any() }
|
||||
}
|
||||
|
||||
/**
|
||||
* An `operator new` or `operator new[]` function that may be associated with
|
||||
* `new` or `new[]` expressions. Note that `new` and `new[]` are not function
|
||||
|
||||
@@ -11,16 +11,6 @@
|
||||
import semmle.code.cpp.Function
|
||||
import semmle.code.cpp.models.Models
|
||||
|
||||
/**
|
||||
* A deallocation function such as `free`.
|
||||
*/
|
||||
abstract class DeallocationFunction extends Function {
|
||||
/**
|
||||
* Gets the index of the argument that is freed by this function.
|
||||
*/
|
||||
int getFreedArg() { none() }
|
||||
}
|
||||
|
||||
/**
|
||||
* An deallocation expression such as call to `free` or a `delete` expression.
|
||||
*/
|
||||
@@ -31,6 +21,19 @@ abstract class DeallocationExpr extends Expr {
|
||||
Expr getFreedExpr() { none() }
|
||||
}
|
||||
|
||||
/**
|
||||
* A deallocation function such as `free`.
|
||||
*
|
||||
* Note: `DeallocationExpr` includes calls to deallocation functions, so prefer
|
||||
* to use that class unless you specifically need to reason about functions.
|
||||
*/
|
||||
abstract class DeallocationFunction extends Function {
|
||||
/**
|
||||
* Gets the index of the argument that is freed by this function.
|
||||
*/
|
||||
int getFreedArg() { none() }
|
||||
}
|
||||
|
||||
/**
|
||||
* An `operator delete` or `operator delete[]` function that may be associated
|
||||
* with `delete` or `delete[]` expressions. Note that `delete` and `delete[]`
|
||||
|
||||
@@ -29,5 +29,17 @@ abstract class GetIteratorFunction extends Function {
|
||||
|
||||
/**
|
||||
* A type which can be used as an iterator.
|
||||
*
|
||||
* Note: Do _not_ `extend` when inheriting from this class in queries. Always use `instanceof`:
|
||||
* ```
|
||||
* class MyIterator instanceof Iterator { ... }
|
||||
* ```
|
||||
*/
|
||||
abstract class Iterator extends Type { }
|
||||
abstract class Iterator extends Type {
|
||||
/**
|
||||
* Gets the value type of this iterator, if any.
|
||||
*
|
||||
* For example, the value type of a `std::vector<int>::iterator` is `int`.
|
||||
*/
|
||||
Type getValueType() { none() }
|
||||
}
|
||||
|
||||
@@ -89,9 +89,9 @@ private class LocalParameterSource extends LocalFlowSource {
|
||||
|
||||
private class ArgvSource extends LocalFlowSource {
|
||||
ArgvSource() {
|
||||
exists(Parameter argv |
|
||||
argv.hasName("argv") and
|
||||
argv.getFunction().hasGlobalName("main") and
|
||||
exists(Function main, Parameter argv |
|
||||
main.hasGlobalName("main") and
|
||||
main.getParameter(1) = argv and
|
||||
this.asExpr() = argv.getAnAccess()
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
/*
|
||||
/**
|
||||
* Support for tracking tainted data through the program. This is an alias for
|
||||
* `semmle.code.cpp.ir.dataflow.DefaultTaintTracking` provided for backwards
|
||||
* compatibility.
|
||||
|
||||
@@ -121,7 +121,9 @@ private predicate moveToDependingOnSide(Expr src, Expr dest) {
|
||||
* (this is done to avoid false positives). Because of this we need to track if the tainted element came from an argument
|
||||
* or not, and for that we use destFromArg
|
||||
*/
|
||||
private predicate betweenFunctionsValueMoveTo(Element src, Element dest, boolean destFromArg) {
|
||||
deprecated private predicate betweenFunctionsValueMoveTo(
|
||||
Element src, Element dest, boolean destFromArg
|
||||
) {
|
||||
not unreachable(src) and
|
||||
not unreachable(dest) and
|
||||
(
|
||||
@@ -162,13 +164,13 @@ private predicate betweenFunctionsValueMoveTo(Element src, Element dest, boolean
|
||||
// predicate folding for proper join-order
|
||||
// bad magic: pushes down predicate that ruins join-order
|
||||
pragma[nomagic]
|
||||
private predicate resolveCallWithParam(Call call, Function called, int i, Parameter p) {
|
||||
deprecated private predicate resolveCallWithParam(Call call, Function called, int i, Parameter p) {
|
||||
called = resolveCall(call) and
|
||||
p = called.getParameter(i)
|
||||
}
|
||||
|
||||
/** A variable for which flow through is allowed. */
|
||||
library class FlowVariable extends Variable {
|
||||
deprecated library class FlowVariable extends Variable {
|
||||
FlowVariable() {
|
||||
(
|
||||
this instanceof LocalScopeVariable or
|
||||
@@ -179,11 +181,11 @@ library class FlowVariable extends Variable {
|
||||
}
|
||||
|
||||
/** A local scope variable for which flow through is allowed. */
|
||||
library class FlowLocalScopeVariable extends Variable {
|
||||
deprecated library class FlowLocalScopeVariable extends Variable {
|
||||
FlowLocalScopeVariable() { this instanceof LocalScopeVariable }
|
||||
}
|
||||
|
||||
private predicate insideFunctionValueMoveTo(Element src, Element dest) {
|
||||
deprecated private predicate insideFunctionValueMoveTo(Element src, Element dest) {
|
||||
not unreachable(src) and
|
||||
not unreachable(dest) and
|
||||
(
|
||||
@@ -324,7 +326,7 @@ private predicate unionAccess(Variable v, Field f, FieldAccess a) {
|
||||
a.getQualifier() = v.getAnAccess()
|
||||
}
|
||||
|
||||
GlobalOrNamespaceVariable globalVarFromId(string id) {
|
||||
deprecated GlobalOrNamespaceVariable globalVarFromId(string id) {
|
||||
if result instanceof NamespaceVariable
|
||||
then id = result.getNamespace() + "::" + result.getName()
|
||||
else id = result.getName()
|
||||
@@ -353,7 +355,7 @@ private predicate hasUpperBoundsCheck(Variable var) {
|
||||
}
|
||||
|
||||
cached
|
||||
private predicate taintedWithArgsAndGlobalVars(
|
||||
deprecated private predicate taintedWithArgsAndGlobalVars(
|
||||
Element src, Element dest, boolean destFromArg, string globalVar
|
||||
) {
|
||||
isUserInput(src, _) and
|
||||
@@ -395,7 +397,7 @@ private predicate taintedWithArgsAndGlobalVars(
|
||||
* This doesn't include data flow through global variables.
|
||||
* If you need that you must call taintedIncludingGlobalVars.
|
||||
*/
|
||||
predicate tainted(Expr source, Element tainted) {
|
||||
deprecated predicate tainted(Expr source, Element tainted) {
|
||||
taintedWithArgsAndGlobalVars(source, tainted, _, "")
|
||||
}
|
||||
|
||||
@@ -410,7 +412,7 @@ predicate tainted(Expr source, Element tainted) {
|
||||
* The parameter `globalVar` is the name of the last global variable used to move the
|
||||
* value from source to tainted.
|
||||
*/
|
||||
predicate taintedIncludingGlobalVars(Expr source, Element tainted, string globalVar) {
|
||||
deprecated predicate taintedIncludingGlobalVars(Expr source, Element tainted, string globalVar) {
|
||||
taintedWithArgsAndGlobalVars(source, tainted, _, globalVar)
|
||||
}
|
||||
|
||||
@@ -541,14 +543,14 @@ private predicate returnArgument(Function f, int sourceArg) {
|
||||
* targets a virtual method, simple data flow analysis is performed
|
||||
* in order to identify target(s).
|
||||
*/
|
||||
Function resolveCall(Call call) {
|
||||
deprecated Function resolveCall(Call call) {
|
||||
result = call.getTarget()
|
||||
or
|
||||
result = call.(DataSensitiveCallExpr).resolve()
|
||||
}
|
||||
|
||||
/** A data sensitive call expression. */
|
||||
abstract library class DataSensitiveCallExpr extends Expr {
|
||||
abstract deprecated library class DataSensitiveCallExpr extends Expr {
|
||||
DataSensitiveCallExpr() { not unreachable(this) }
|
||||
|
||||
abstract Expr getSrc();
|
||||
@@ -579,7 +581,7 @@ abstract library class DataSensitiveCallExpr extends Expr {
|
||||
}
|
||||
|
||||
/** Call through a function pointer. */
|
||||
library class DataSensitiveExprCall extends DataSensitiveCallExpr, ExprCall {
|
||||
deprecated library class DataSensitiveExprCall extends DataSensitiveCallExpr, ExprCall {
|
||||
override Expr getSrc() { result = getExpr() }
|
||||
|
||||
override Function resolve() {
|
||||
@@ -588,7 +590,8 @@ library class DataSensitiveExprCall extends DataSensitiveCallExpr, ExprCall {
|
||||
}
|
||||
|
||||
/** Call to a virtual function. */
|
||||
library class DataSensitiveOverriddenFunctionCall extends DataSensitiveCallExpr, FunctionCall {
|
||||
deprecated library class DataSensitiveOverriddenFunctionCall extends DataSensitiveCallExpr,
|
||||
FunctionCall {
|
||||
DataSensitiveOverriddenFunctionCall() {
|
||||
exists(getTarget().(VirtualFunction).getAnOverridingFunction())
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
/**
|
||||
* DEPRECATED: This library has been replaced with a newer version which
|
||||
* provides better performance and precision. Use
|
||||
* `semmle.code.cpp.valuenumbering.GlobalValueNumbering` instead.
|
||||
*
|
||||
* Provides an implementation of Global Value Numbering.
|
||||
* See https://en.wikipedia.org/wiki/Global_value_numbering
|
||||
*
|
||||
@@ -221,7 +225,7 @@ private newtype GvnBase =
|
||||
* expression with this `GVN` and using its `toString` and `getLocation`
|
||||
* methods.
|
||||
*/
|
||||
class GVN extends GvnBase {
|
||||
deprecated class GVN extends GvnBase {
|
||||
GVN() { this instanceof GvnBase }
|
||||
|
||||
/** Gets an expression that has this GVN. */
|
||||
@@ -503,7 +507,7 @@ private predicate mk_Deref(GVN p, ControlFlowNode dominator, PointerDereferenceE
|
||||
|
||||
/** Gets the global value number of expression `e`. */
|
||||
cached
|
||||
GVN globalValueNumber(Expr e) {
|
||||
deprecated GVN globalValueNumber(Expr e) {
|
||||
exists(int val, Type t |
|
||||
mk_IntConst(val, t, e) and
|
||||
result = GVN_IntConst(val, t)
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
|
||||
import cpp
|
||||
|
||||
pragma[nomagic]
|
||||
predicate beforeArrayAccess(Variable v, ArrayExpr access, Expr before) {
|
||||
exists(LogicalAndExpr andexpr |
|
||||
access.getArrayOffset() = v.getAnAccess() and
|
||||
@@ -23,6 +24,7 @@ predicate beforeArrayAccess(Variable v, ArrayExpr access, Expr before) {
|
||||
)
|
||||
}
|
||||
|
||||
pragma[nomagic]
|
||||
predicate afterArrayAccess(Variable v, ArrayExpr access, Expr after) {
|
||||
exists(LogicalAndExpr andexpr |
|
||||
access.getArrayOffset() = v.getAnAccess() and
|
||||
|
||||
@@ -1,3 +1,15 @@
|
||||
## 0.4.6
|
||||
|
||||
No user-facing changes.
|
||||
|
||||
## 0.4.5
|
||||
|
||||
No user-facing changes.
|
||||
|
||||
## 0.4.4
|
||||
|
||||
No user-facing changes.
|
||||
|
||||
## 0.4.3
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
@@ -115,7 +115,8 @@ BasicBlock blockGuardedBy(int value, string op, ScanfFunctionCall call) {
|
||||
from ScanfOutput output, ScanfFunctionCall call, Access access
|
||||
where
|
||||
output.getCall() = call and
|
||||
output.hasGuardedAccess(access, false)
|
||||
output.hasGuardedAccess(access, false) and
|
||||
not exists(DeallocationExpr dealloc | dealloc.getFreedExpr() = access)
|
||||
select access,
|
||||
"This variable is read, but may not have been written. " +
|
||||
"It should be guarded by a check that the $@ returns at least " +
|
||||
|
||||
@@ -66,9 +66,7 @@ class ElseDirective extends Directive {
|
||||
override predicate mismatched() { depth() < 1 }
|
||||
}
|
||||
|
||||
class EndifDirective extends Directive {
|
||||
EndifDirective() { this instanceof PreprocessorEndif }
|
||||
|
||||
class EndifDirective extends Directive instanceof PreprocessorEndif {
|
||||
override int depthChange() { result = -1 }
|
||||
|
||||
override predicate mismatched() { depth() < 0 }
|
||||
|
||||
@@ -26,11 +26,11 @@ predicate intentionallyReturnsStackPointer(Function f) {
|
||||
class ReturnStackAllocatedMemoryConfig extends MustFlowConfiguration {
|
||||
ReturnStackAllocatedMemoryConfig() { this = "ReturnStackAllocatedMemoryConfig" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) {
|
||||
override predicate isSource(Instruction source) {
|
||||
// Holds if `source` is a node that represents the use of a stack variable
|
||||
exists(VariableAddressInstruction var, Function func |
|
||||
var = source.asInstruction() and
|
||||
func = var.getEnclosingFunction() and
|
||||
var = source and
|
||||
func = source.getEnclosingFunction() and
|
||||
var.getAstVariable() instanceof StackVariable and
|
||||
// Pointer-to-member types aren't properly handled in the dbscheme.
|
||||
not var.getResultType() instanceof PointerToMemberType and
|
||||
@@ -40,7 +40,7 @@ class ReturnStackAllocatedMemoryConfig extends MustFlowConfiguration {
|
||||
)
|
||||
}
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) {
|
||||
override predicate isSink(Operand sink) {
|
||||
// Holds if `sink` is a node that represents the `StoreInstruction` that is subsequently used in
|
||||
// a `ReturnValueInstruction`.
|
||||
// We use the `StoreInstruction` instead of the instruction that defines the
|
||||
@@ -48,7 +48,7 @@ class ReturnStackAllocatedMemoryConfig extends MustFlowConfiguration {
|
||||
exists(StoreInstruction store |
|
||||
store.getDestinationAddress().(VariableAddressInstruction).getIRVariable() instanceof
|
||||
IRReturnVariable and
|
||||
sink.asOperand() = store.getSourceValueOperand()
|
||||
sink = store.getSourceValueOperand()
|
||||
)
|
||||
}
|
||||
|
||||
@@ -77,10 +77,10 @@ class ReturnStackAllocatedMemoryConfig extends MustFlowConfiguration {
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
override predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) {
|
||||
node2.asInstruction().(FieldAddressInstruction).getObjectAddressOperand() = node1.asOperand()
|
||||
override predicate isAdditionalFlowStep(Operand node1, Instruction node2) {
|
||||
node2.(FieldAddressInstruction).getObjectAddressOperand() = node1
|
||||
or
|
||||
node2.asInstruction().(PointerOffsetInstruction).getLeftOperand() = node1.asOperand()
|
||||
node2.(PointerOffsetInstruction).getLeftOperand() = node1
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,6 +89,6 @@ from
|
||||
ReturnStackAllocatedMemoryConfig conf
|
||||
where
|
||||
conf.hasFlowPath(pragma[only_bind_into](source), pragma[only_bind_into](sink)) and
|
||||
source.getNode().asInstruction() = var
|
||||
select sink.getNode(), source, sink, "May return stack-allocated memory from $@.", var.getAst(),
|
||||
var.getAst().toString()
|
||||
source.getInstruction() = var
|
||||
select sink.getInstruction(), source, sink, "May return stack-allocated memory from $@.",
|
||||
var.getAst(), var.getAst().toString()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user