mirror of
https://github.com/github/codeql.git
synced 2025-12-17 01:03:14 +01:00
Merge remote-tracking branch 'origin/main' into js/move-cors-query-from-experimental
This commit is contained in:
3
.bazelrc
3
.bazelrc
@@ -30,6 +30,9 @@ common --registry=https://bcr.bazel.build
|
||||
|
||||
common --@rules_dotnet//dotnet/settings:strict_deps=false
|
||||
|
||||
# we only configure a nightly toolchain
|
||||
common --@rules_rust//rust/toolchain/channel=nightly
|
||||
|
||||
# Reduce this eventually to empty, once we've fixed all our usages of java, and https://github.com/bazel-contrib/rules_go/issues/4193 is fixed
|
||||
common --incompatible_autoload_externally="+@rules_java,+@rules_shell"
|
||||
|
||||
|
||||
4
.github/workflows/build-ripunzip.yml
vendored
4
.github/workflows/build-ripunzip.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
os: [ubuntu-22.04, macos-13, windows-2022]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
repository: google/ripunzip
|
||||
ref: ${{ inputs.ripunzip-version }}
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
# see https://github.com/sfackler/rust-openssl/issues/183
|
||||
- if: runner.os == 'Linux'
|
||||
name: checkout openssl
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
repository: openssl/openssl
|
||||
path: openssl
|
||||
|
||||
2
.github/workflows/buildifier.yml
vendored
2
.github/workflows/buildifier.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Check bazel formatting
|
||||
uses: pre-commit/action@646c83fcd040023954eafda54b4db0192ce70507
|
||||
with:
|
||||
|
||||
2
.github/workflows/check-implicit-this.yml
vendored
2
.github/workflows/check-implicit-this.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Check that implicit this warnings is enabled for all packs
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
sync:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Check overlay annotations
|
||||
run: python config/add-overlay-annotations.py --check java
|
||||
|
||||
|
||||
2
.github/workflows/check-qldoc.yml
vendored
2
.github/workflows/check-qldoc.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
|
||||
2
.github/workflows/check-query-ids.yml
vendored
2
.github/workflows/check-query-ids.yml
vendored
@@ -19,6 +19,6 @@ jobs:
|
||||
name: Check query IDs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Check for duplicate query IDs
|
||||
run: python3 misc/scripts/check-query-ids.py
|
||||
|
||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
dotnet-version: 9.0.100
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
|
||||
2
.github/workflows/compile-queries.yml
vendored
2
.github/workflows/compile-queries.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
runs-on: ubuntu-latest-xl
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Setup CodeQL
|
||||
uses: ./.github/actions/fetch-codeql
|
||||
with:
|
||||
|
||||
2
.github/workflows/cpp-swift-analysis.yml
vendored
2
.github/workflows/cpp-swift-analysis.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
|
||||
4
.github/workflows/csharp-qltest.yml
vendored
4
.github/workflows/csharp-qltest.yml
vendored
@@ -39,7 +39,7 @@ jobs:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Setup dotnet
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
stubgentest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: ./csharp/actions/create-extractor-pack
|
||||
- name: Run stub generator tests
|
||||
run: |
|
||||
|
||||
4
.github/workflows/csv-coverage-metrics.yml
vendored
4
.github/workflows/csv-coverage-metrics.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Setup CodeQL
|
||||
uses: ./.github/actions/fetch-codeql
|
||||
- name: Create empty database
|
||||
@@ -51,7 +51,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Setup CodeQL
|
||||
uses: ./.github/actions/fetch-codeql
|
||||
- name: Create empty database
|
||||
|
||||
@@ -35,11 +35,11 @@ jobs:
|
||||
GITHUB_CONTEXT: ${{ toJSON(github.event) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- name: Clone self (github/codeql) - MERGE
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
path: merge
|
||||
- name: Clone self (github/codeql) - BASE
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: base
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
GITHUB_CONTEXT: ${{ toJSON(github.event) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- name: Clone self (github/codeql)
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Set up Python 3.8
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
|
||||
@@ -12,11 +12,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Clone self (github/codeql)
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
path: script
|
||||
- name: Clone self (github/codeql) for analysis
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
path: codeqlModels
|
||||
fetch-depth: 0
|
||||
|
||||
2
.github/workflows/csv-coverage-update.yml
vendored
2
.github/workflows/csv-coverage-update.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
GITHUB_CONTEXT: ${{ toJSON(github.event) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- name: Clone self (github/codeql)
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
path: ql
|
||||
fetch-depth: 0
|
||||
|
||||
4
.github/workflows/csv-coverage.yml
vendored
4
.github/workflows/csv-coverage.yml
vendored
@@ -16,11 +16,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Clone self (github/codeql)
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
path: script
|
||||
- name: Clone self (github/codeql) for analysis
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
path: codeqlModels
|
||||
ref: ${{ github.event.inputs.qlModelShaOverride || github.ref }}
|
||||
|
||||
2
.github/workflows/fast-forward.yml
vendored
2
.github/workflows/fast-forward.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
exit 1
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Git config
|
||||
shell: bash
|
||||
|
||||
2
.github/workflows/go-tests.yml
vendored
2
.github/workflows/go-tests.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
runs-on: ubuntu-latest-xl
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Run tests
|
||||
uses: ./go/actions/test
|
||||
with:
|
||||
|
||||
2
.github/workflows/kotlin-build.yml
vendored
2
.github/workflows/kotlin-build.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- run: |
|
||||
bazel query //java/kotlin-extractor/...
|
||||
# only build the default version as a quick check that we can build from `codeql`
|
||||
|
||||
4
.github/workflows/mad_modelDiff.yml
vendored
4
.github/workflows/mad_modelDiff.yml
vendored
@@ -28,12 +28,12 @@ jobs:
|
||||
slug: ${{fromJson(github.event.inputs.projects || '["apache/commons-codec", "apache/commons-io", "apache/commons-beanutils", "apache/commons-logging", "apache/commons-fileupload", "apache/commons-lang", "apache/commons-validator", "apache/commons-csv", "apache/dubbo"]' )}}
|
||||
steps:
|
||||
- name: Clone github/codeql from PR
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
if: github.event.pull_request
|
||||
with:
|
||||
path: codeql-pr
|
||||
- name: Clone github/codeql from main
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
path: codeql-main
|
||||
ref: main
|
||||
|
||||
4
.github/workflows/mad_regenerate-models.yml
vendored
4
.github/workflows/mad_regenerate-models.yml
vendored
@@ -30,11 +30,11 @@ jobs:
|
||||
ref: "placeholder"
|
||||
steps:
|
||||
- name: Clone self (github/codeql)
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Setup CodeQL binaries
|
||||
uses: ./.github/actions/fetch-codeql
|
||||
- name: Clone repositories
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
path: repos/${{ matrix.ref }}
|
||||
ref: ${{ matrix.ref }}
|
||||
|
||||
2
.github/workflows/python-tooling.yml
vendored
2
.github/workflows/python-tooling.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
check-python-tooling:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
2
.github/workflows/qhelp-pr-preview.yml
vendored
2
.github/workflows/qhelp-pr-preview.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 2
|
||||
persist-credentials: false
|
||||
|
||||
2
.github/workflows/ql-for-ql-build.yml
vendored
2
.github/workflows/ql-for-ql-build.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest-xl
|
||||
steps:
|
||||
### Build the queries ###
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Find codeql
|
||||
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
- github/codeql
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Find codeql
|
||||
id: find-codeql
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
env:
|
||||
CODEQL: ${{ steps.find-codeql.outputs.codeql-path }}
|
||||
- name: Checkout ${{ matrix.repo }}
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
repository: ${{ matrix.repo }}
|
||||
path: ${{ github.workspace }}/repo
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: measure
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: measurements
|
||||
|
||||
4
.github/workflows/ql-for-ql-tests.yml
vendored
4
.github/workflows/ql-for-ql-tests.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
qltest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Find codeql
|
||||
id: find-codeql
|
||||
uses: github/codeql-action/init@main
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
needs: [qltest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Install GNU tar
|
||||
if: runner.os == 'macOS'
|
||||
run: |
|
||||
|
||||
2
.github/workflows/query-list.yml
vendored
2
.github/workflows/query-list.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Clone self (github/codeql)
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
path: codeql
|
||||
- name: Set up Python 3.8
|
||||
|
||||
8
.github/workflows/ruby-build.yml
vendored
8
.github/workflows/ruby-build.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Install GNU tar
|
||||
if: runner.os == 'macOS'
|
||||
run: |
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
if: github.repository_owner == 'github'
|
||||
runs-on: ubuntu-latest-xl
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Fetch CodeQL
|
||||
uses: ./.github/actions/fetch-codeql
|
||||
- name: Cache compilation cache
|
||||
@@ -146,7 +146,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build, compile-queries]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ruby.dbscheme
|
||||
@@ -209,7 +209,7 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs: [package]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Fetch CodeQL
|
||||
uses: ./.github/actions/fetch-codeql
|
||||
|
||||
|
||||
6
.github/workflows/ruby-dataset-measure.yml
vendored
6
.github/workflows/ruby-dataset-measure.yml
vendored
@@ -30,14 +30,14 @@ jobs:
|
||||
repo: [rails/rails, discourse/discourse, spree/spree, ruby/ruby]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- uses: ./.github/actions/fetch-codeql
|
||||
|
||||
- uses: ./ruby/actions/create-extractor-pack
|
||||
|
||||
- name: Checkout ${{ matrix.repo }}
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
repository: ${{ matrix.repo }}
|
||||
path: ${{ github.workspace }}/repo
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: measure
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: stats
|
||||
|
||||
2
.github/workflows/ruby-qltest-rtjo.yml
vendored
2
.github/workflows/ruby-qltest-rtjo.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: ./.github/actions/fetch-codeql
|
||||
- uses: ./ruby/actions/create-extractor-pack
|
||||
- name: Cache compilation cache
|
||||
|
||||
4
.github/workflows/ruby-qltest.yml
vendored
4
.github/workflows/ruby-qltest.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
qlupgrade:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: ./.github/actions/fetch-codeql
|
||||
- name: Check DB upgrade scripts
|
||||
run: |
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: ./.github/actions/fetch-codeql
|
||||
- uses: ./ruby/actions/create-extractor-pack
|
||||
- name: Cache compilation cache
|
||||
|
||||
2
.github/workflows/rust-analysis.yml
vendored
2
.github/workflows/rust-analysis.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Query latest nightly CodeQL bundle
|
||||
shell: bash
|
||||
|
||||
6
.github/workflows/rust.yml
vendored
6
.github/workflows/rust.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
working-directory: rust/ast-generator
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Inject sources
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -53,7 +53,7 @@ jobs:
|
||||
working-directory: rust/extractor
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Format
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Install CodeQL
|
||||
uses: ./.github/actions/fetch-codeql
|
||||
- name: Code generation
|
||||
|
||||
8
.github/workflows/swift.yml
vendored
8
.github/workflows/swift.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
fail-fast: false
|
||||
runs-on: ${{ matrix.runner }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Setup (Linux)
|
||||
if: runner.os == 'Linux'
|
||||
run: |
|
||||
@@ -53,7 +53,7 @@ jobs:
|
||||
clang-format:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: pre-commit/action@646c83fcd040023954eafda54b4db0192ce70507
|
||||
name: Check that python code is properly formatted
|
||||
with:
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
codegen:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: ./.github/actions/fetch-codeql
|
||||
- uses: pre-commit/action@646c83fcd040023954eafda54b4db0192ce70507
|
||||
name: Check that QL generated code was checked in
|
||||
@@ -77,6 +77,6 @@ jobs:
|
||||
check-no-override:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Check that no override is present in load.bzl
|
||||
run: bazel test ... --test_tag_filters=override --test_output=errors
|
||||
|
||||
2
.github/workflows/sync-files.yml
vendored
2
.github/workflows/sync-files.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
sync:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Check synchronized files
|
||||
run: python config/sync-files.py
|
||||
- name: Check dbscheme fragments
|
||||
|
||||
@@ -30,7 +30,7 @@ jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Check formatting
|
||||
run: cargo fmt -- --check
|
||||
- name: Run tests
|
||||
@@ -38,12 +38,12 @@ jobs:
|
||||
fmt:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Check formatting
|
||||
run: cargo fmt --check
|
||||
clippy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Run clippy
|
||||
run: cargo clippy -- --no-deps -D warnings -A clippy::new_without_default -A clippy::too_many_arguments
|
||||
|
||||
2
.github/workflows/validate-change-notes.yml
vendored
2
.github/workflows/validate-change-notes.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Setup CodeQL
|
||||
uses: ./.github/actions/fetch-codeql
|
||||
|
||||
2
.github/workflows/zipmerge-test.yml
vendored
2
.github/workflows/zipmerge-test.yml
vendored
@@ -18,6 +18,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- run: |
|
||||
bazel test //misc/bazel/internal/zipmerge:test --test_output=all
|
||||
|
||||
770
Cargo.lock
generated
770
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
111
MODULE.bazel
111
MODULE.bazel
@@ -14,8 +14,8 @@ local_path_override(
|
||||
|
||||
# see https://registry.bazel.build/ for a list of available packages
|
||||
|
||||
bazel_dep(name = "platforms", version = "0.0.11")
|
||||
bazel_dep(name = "rules_go", version = "0.50.1")
|
||||
bazel_dep(name = "platforms", version = "1.0.0")
|
||||
bazel_dep(name = "rules_go", version = "0.56.1")
|
||||
bazel_dep(name = "rules_pkg", version = "1.0.1")
|
||||
bazel_dep(name = "rules_nodejs", version = "6.2.0-codeql.1")
|
||||
bazel_dep(name = "rules_python", version = "0.40.0")
|
||||
@@ -28,7 +28,7 @@ bazel_dep(name = "rules_kotlin", version = "2.1.3-codeql.1")
|
||||
bazel_dep(name = "gazelle", version = "0.40.0")
|
||||
bazel_dep(name = "rules_dotnet", version = "0.17.4")
|
||||
bazel_dep(name = "googletest", version = "1.14.0.bcr.1")
|
||||
bazel_dep(name = "rules_rust", version = "0.58.0")
|
||||
bazel_dep(name = "rules_rust", version = "0.63.0")
|
||||
bazel_dep(name = "zstd", version = "1.5.5.bcr.1")
|
||||
|
||||
bazel_dep(name = "buildifier_prebuilt", version = "6.4.0", dev_dependency = True)
|
||||
@@ -38,7 +38,10 @@ bazel_dep(name = "buildifier_prebuilt", version = "6.4.0", dev_dependency = True
|
||||
RUST_EDITION = "2024"
|
||||
|
||||
# run buildutils-internal/scripts/fill-rust-sha256s.py when updating (internal repo)
|
||||
RUST_VERSION = "1.86.0"
|
||||
# a nightly toolchain is required to enable experimental_use_cc_common_link, which we require internally
|
||||
# we prefer to run the same version as internally, even if experimental_use_cc_common_link is not really
|
||||
# required in this repo
|
||||
RUST_VERSION = "nightly/2025-08-01"
|
||||
|
||||
rust = use_extension("@rules_rust//rust:extensions.bzl", "rust")
|
||||
rust.toolchain(
|
||||
@@ -50,26 +53,26 @@ rust.toolchain(
|
||||
],
|
||||
# generated by buildutils-internal/scripts/fill-rust-sha256s.py (internal repo)
|
||||
sha256s = {
|
||||
"rustc-1.86.0-x86_64-unknown-linux-gnu.tar.xz": "4438b809ce4a083af31ed17aeeedcc8fc60ccffc0625bef1926620751b6989d7",
|
||||
"rustc-1.86.0-x86_64-apple-darwin.tar.xz": "42b76253626febb7912541a30d3379f463dec89581aad4cb72c6c04fb5a71dc5",
|
||||
"rustc-1.86.0-aarch64-apple-darwin.tar.xz": "23b8f52102249a47ab5bc859d54c9a3cb588a3259ba3f00f557d50edeca4fde9",
|
||||
"rustc-1.86.0-x86_64-pc-windows-msvc.tar.xz": "fdde839fea274529a31e51eb85c6df1782cc8479c9d1bc24e2914d66a0de41ab",
|
||||
"clippy-1.86.0-x86_64-unknown-linux-gnu.tar.xz": "02aaff2c1407d2da8dba19aa4970dd873e311902b120a66cbcdbe51eb8836edf",
|
||||
"clippy-1.86.0-x86_64-apple-darwin.tar.xz": "bb85efda7bbffaf124867f5ca36d50932b1e8f533c62ee923438afb32ff8fe9a",
|
||||
"clippy-1.86.0-aarch64-apple-darwin.tar.xz": "239fa3a604b124f0312f2af08537874a1227dba63385484b468cca62e7c4f2f2",
|
||||
"clippy-1.86.0-x86_64-pc-windows-msvc.tar.xz": "d00498f47d49219f032e2c5eeebdfc3d32317c0dc3d3fd7125327445bc482cb4",
|
||||
"cargo-1.86.0-x86_64-unknown-linux-gnu.tar.xz": "c5c1590f7e9246ad9f4f97cfe26ffa92707b52a769726596a9ef81565ebd908b",
|
||||
"cargo-1.86.0-x86_64-apple-darwin.tar.xz": "af163eb02d1a178044d1b4f2375960efd47130f795f6e33d09e345454bb26f4e",
|
||||
"cargo-1.86.0-aarch64-apple-darwin.tar.xz": "3cb13873d48c3e1e4cc684d42c245226a11fba52af6b047c3346ed654e7a05c0",
|
||||
"cargo-1.86.0-x86_64-pc-windows-msvc.tar.xz": "e57a9d89619b5604899bac443e68927bdd371e40f2e03e18950b6ceb3eb67966",
|
||||
"llvm-tools-1.86.0-x86_64-unknown-linux-gnu.tar.xz": "282145ab7a63c98b625856f44b905b4dc726b497246b824632a5790debe95a78",
|
||||
"llvm-tools-1.86.0-x86_64-apple-darwin.tar.xz": "b55706e92f7da989207c50c13c7add483a9fedd233bc431b106eca2a8f151ec9",
|
||||
"llvm-tools-1.86.0-aarch64-apple-darwin.tar.xz": "04d3618c686845853585f036e3211eb9e18f2d290f4610a7a78bdc1fcce1ebd9",
|
||||
"llvm-tools-1.86.0-x86_64-pc-windows-msvc.tar.xz": "721a17cc8dc219177e4277a3592253934ef08daa1e1b12eda669a67d15fad8dd",
|
||||
"rust-std-1.86.0-x86_64-unknown-linux-gnu.tar.xz": "67be7184ea388d8ce0feaf7fdea46f1775cfc2970930264343b3089898501d37",
|
||||
"rust-std-1.86.0-x86_64-apple-darwin.tar.xz": "3b1140d54870a080080e84700143f4a342fbd02a410a319b05d9c02e7dcf44cc",
|
||||
"rust-std-1.86.0-aarch64-apple-darwin.tar.xz": "0fb121fb3b8fa9027d79ff598500a7e5cd086ddbc3557482ed3fdda00832c61b",
|
||||
"rust-std-1.86.0-x86_64-pc-windows-msvc.tar.xz": "3d5354b7b9cb950b58bff3fce18a652aa374bb30c8f70caebd3bd0b43cb41a33",
|
||||
"2025-08-01/rustc-nightly-x86_64-unknown-linux-gnu.tar.xz": "9bbeaf5d3fc7247d31463a9083aa251c995cc50662c8219e7a2254d76a72a9a4",
|
||||
"2025-08-01/rustc-nightly-x86_64-apple-darwin.tar.xz": "c9ea539a8eff0d5d162701f99f9e1aabe14dd0dfb420d62362817a5d09219de7",
|
||||
"2025-08-01/rustc-nightly-aarch64-apple-darwin.tar.xz": "ae83feebbc39cfd982e4ecc8297731fe79c185173aee138467b334c5404b3773",
|
||||
"2025-08-01/rustc-nightly-x86_64-pc-windows-msvc.tar.xz": "9f170c30d802a349be60cf52ec46260802093cb1013ad667fc0d528b7b10152f",
|
||||
"2025-08-01/clippy-nightly-x86_64-unknown-linux-gnu.tar.xz": "9ae5f3cd8f557c4f6df522597c69d14398cf604cfaed2b83e767c4b77a7eaaf6",
|
||||
"2025-08-01/clippy-nightly-x86_64-apple-darwin.tar.xz": "983cb9ee0b6b968188e04ab2d33743d54764b2681ce565e1b3f2b9135c696a3e",
|
||||
"2025-08-01/clippy-nightly-aarch64-apple-darwin.tar.xz": "ed2219dbc49d088225e1b7c5c4390fa295066e071fddaa2714018f6bb39ddbf0",
|
||||
"2025-08-01/clippy-nightly-x86_64-pc-windows-msvc.tar.xz": "911f40ab5cbdd686f40e00965271fe47c4805513a308ed01f30eafb25b448a50",
|
||||
"2025-08-01/cargo-nightly-x86_64-unknown-linux-gnu.tar.xz": "106463c284e48e4904c717471eeec2be5cc83a9d2cae8d6e948b52438cad2e69",
|
||||
"2025-08-01/cargo-nightly-x86_64-apple-darwin.tar.xz": "6ad35c40efc41a8c531ea43235058347b6902d98a9693bf0aed7fc16d5590cef",
|
||||
"2025-08-01/cargo-nightly-aarch64-apple-darwin.tar.xz": "dd28c365e9d298abc3154c797720ad36a0058f131265c9978b4c8e4e37012c8a",
|
||||
"2025-08-01/cargo-nightly-x86_64-pc-windows-msvc.tar.xz": "7b431286e12d6b3834b038f078389a00cac73f351e8c3152b2504a3c06420b3b",
|
||||
"2025-08-01/llvm-tools-nightly-x86_64-unknown-linux-gnu.tar.xz": "e342e305d7927cc288d386983b2bc253cfad3776b113386e903d0b302648ef47",
|
||||
"2025-08-01/llvm-tools-nightly-x86_64-apple-darwin.tar.xz": "e44dd3506524d85c37b3a54bcc91d01378fd2c590b2db5c5974d12f05c1b84d1",
|
||||
"2025-08-01/llvm-tools-nightly-aarch64-apple-darwin.tar.xz": "0c1b5f46dd81be4a9227b10283a0fcaa39c14fea7e81aea6fd6d9887ff6cdc41",
|
||||
"2025-08-01/llvm-tools-nightly-x86_64-pc-windows-msvc.tar.xz": "423e5fd11406adccbc31b8456ceb7375ce055cdf45e90d2c3babeb2d7f58383f",
|
||||
"2025-08-01/rust-std-nightly-x86_64-unknown-linux-gnu.tar.xz": "3c0ceb46a252647a1d4c7116d9ccae684fa5e42aaf3296419febd2c962c3b41d",
|
||||
"2025-08-01/rust-std-nightly-x86_64-apple-darwin.tar.xz": "3be416003cab10f767390a753d1d16ae4d26c7421c03c98992cf1943e5b0efe8",
|
||||
"2025-08-01/rust-std-nightly-aarch64-apple-darwin.tar.xz": "4046ac0ef951cb056b5028a399124f60999fa37792eab69d008d8d7965f389b4",
|
||||
"2025-08-01/rust-std-nightly-x86_64-pc-windows-msvc.tar.xz": "191ed9d8603c3a4fe5a7bbbc2feb72049078dae2df3d3b7d5dedf3abbf823e6e",
|
||||
},
|
||||
versions = [RUST_VERSION],
|
||||
)
|
||||
@@ -95,49 +98,49 @@ use_repo(
|
||||
tree_sitter_extractors_deps = use_extension("//misc/bazel/3rdparty:tree_sitter_extractors_extension.bzl", "r")
|
||||
use_repo(
|
||||
tree_sitter_extractors_deps,
|
||||
"vendor_ts__anyhow-1.0.98",
|
||||
"vendor_ts__anyhow-1.0.99",
|
||||
"vendor_ts__argfile-0.2.1",
|
||||
"vendor_ts__chalk-ir-0.103.0",
|
||||
"vendor_ts__chalk-ir-0.104.0",
|
||||
"vendor_ts__chrono-0.4.41",
|
||||
"vendor_ts__clap-4.5.40",
|
||||
"vendor_ts__clap-4.5.44",
|
||||
"vendor_ts__dunce-1.0.5",
|
||||
"vendor_ts__either-1.15.0",
|
||||
"vendor_ts__encoding-0.2.33",
|
||||
"vendor_ts__figment-0.10.19",
|
||||
"vendor_ts__flate2-1.1.0",
|
||||
"vendor_ts__glob-0.3.2",
|
||||
"vendor_ts__glob-0.3.3",
|
||||
"vendor_ts__globset-0.4.15",
|
||||
"vendor_ts__itertools-0.14.0",
|
||||
"vendor_ts__lazy_static-1.5.0",
|
||||
"vendor_ts__mustache-0.9.0",
|
||||
"vendor_ts__num-traits-0.2.19",
|
||||
"vendor_ts__num_cpus-1.17.0",
|
||||
"vendor_ts__proc-macro2-1.0.95",
|
||||
"vendor_ts__proc-macro2-1.0.97",
|
||||
"vendor_ts__quote-1.0.40",
|
||||
"vendor_ts__ra_ap_base_db-0.0.288",
|
||||
"vendor_ts__ra_ap_cfg-0.0.288",
|
||||
"vendor_ts__ra_ap_hir-0.0.288",
|
||||
"vendor_ts__ra_ap_hir_def-0.0.288",
|
||||
"vendor_ts__ra_ap_hir_expand-0.0.288",
|
||||
"vendor_ts__ra_ap_hir_ty-0.0.288",
|
||||
"vendor_ts__ra_ap_ide_db-0.0.288",
|
||||
"vendor_ts__ra_ap_intern-0.0.288",
|
||||
"vendor_ts__ra_ap_load-cargo-0.0.288",
|
||||
"vendor_ts__ra_ap_parser-0.0.288",
|
||||
"vendor_ts__ra_ap_paths-0.0.288",
|
||||
"vendor_ts__ra_ap_project_model-0.0.288",
|
||||
"vendor_ts__ra_ap_span-0.0.288",
|
||||
"vendor_ts__ra_ap_stdx-0.0.288",
|
||||
"vendor_ts__ra_ap_syntax-0.0.288",
|
||||
"vendor_ts__ra_ap_vfs-0.0.288",
|
||||
"vendor_ts__rand-0.9.1",
|
||||
"vendor_ts__ra_ap_base_db-0.0.300",
|
||||
"vendor_ts__ra_ap_cfg-0.0.300",
|
||||
"vendor_ts__ra_ap_hir-0.0.300",
|
||||
"vendor_ts__ra_ap_hir_def-0.0.300",
|
||||
"vendor_ts__ra_ap_hir_expand-0.0.300",
|
||||
"vendor_ts__ra_ap_hir_ty-0.0.300",
|
||||
"vendor_ts__ra_ap_ide_db-0.0.300",
|
||||
"vendor_ts__ra_ap_intern-0.0.300",
|
||||
"vendor_ts__ra_ap_load-cargo-0.0.300",
|
||||
"vendor_ts__ra_ap_parser-0.0.300",
|
||||
"vendor_ts__ra_ap_paths-0.0.300",
|
||||
"vendor_ts__ra_ap_project_model-0.0.300",
|
||||
"vendor_ts__ra_ap_span-0.0.300",
|
||||
"vendor_ts__ra_ap_stdx-0.0.300",
|
||||
"vendor_ts__ra_ap_syntax-0.0.300",
|
||||
"vendor_ts__ra_ap_vfs-0.0.300",
|
||||
"vendor_ts__rand-0.9.2",
|
||||
"vendor_ts__rayon-1.10.0",
|
||||
"vendor_ts__regex-1.11.1",
|
||||
"vendor_ts__serde-1.0.219",
|
||||
"vendor_ts__serde_json-1.0.140",
|
||||
"vendor_ts__serde_with-3.13.0",
|
||||
"vendor_ts__syn-2.0.103",
|
||||
"vendor_ts__toml-0.8.23",
|
||||
"vendor_ts__serde_json-1.0.142",
|
||||
"vendor_ts__serde_with-3.14.0",
|
||||
"vendor_ts__syn-2.0.104",
|
||||
"vendor_ts__toml-0.9.5",
|
||||
"vendor_ts__tracing-0.1.41",
|
||||
"vendor_ts__tracing-flame-0.2.0",
|
||||
"vendor_ts__tracing-subscriber-0.3.19",
|
||||
@@ -230,7 +233,7 @@ use_repo(
|
||||
"kotlin-compiler-2.1.0-Beta1",
|
||||
"kotlin-compiler-2.1.20-Beta1",
|
||||
"kotlin-compiler-2.2.0-Beta1",
|
||||
"kotlin-compiler-2.2.20-Beta1",
|
||||
"kotlin-compiler-2.2.20-Beta2",
|
||||
"kotlin-compiler-embeddable-1.6.0",
|
||||
"kotlin-compiler-embeddable-1.6.20",
|
||||
"kotlin-compiler-embeddable-1.7.0",
|
||||
@@ -243,7 +246,7 @@ use_repo(
|
||||
"kotlin-compiler-embeddable-2.1.0-Beta1",
|
||||
"kotlin-compiler-embeddable-2.1.20-Beta1",
|
||||
"kotlin-compiler-embeddable-2.2.0-Beta1",
|
||||
"kotlin-compiler-embeddable-2.2.20-Beta1",
|
||||
"kotlin-compiler-embeddable-2.2.20-Beta2",
|
||||
"kotlin-stdlib-1.6.0",
|
||||
"kotlin-stdlib-1.6.20",
|
||||
"kotlin-stdlib-1.7.0",
|
||||
@@ -256,11 +259,11 @@ use_repo(
|
||||
"kotlin-stdlib-2.1.0-Beta1",
|
||||
"kotlin-stdlib-2.1.20-Beta1",
|
||||
"kotlin-stdlib-2.2.0-Beta1",
|
||||
"kotlin-stdlib-2.2.20-Beta1",
|
||||
"kotlin-stdlib-2.2.20-Beta2",
|
||||
)
|
||||
|
||||
go_sdk = use_extension("@rules_go//go:extensions.bzl", "go_sdk")
|
||||
go_sdk.download(version = "1.24.0")
|
||||
go_sdk.download(version = "1.25.0")
|
||||
|
||||
go_deps = use_extension("@gazelle//:extensions.bzl", "go_deps")
|
||||
go_deps.from_file(go_mod = "//go/extractor:go.mod")
|
||||
|
||||
@@ -6,6 +6,8 @@ column_kind: "utf16"
|
||||
unicode_newlines: true
|
||||
build_modes:
|
||||
- none
|
||||
default_queries:
|
||||
- codeql/actions-queries
|
||||
file_coverage_languages: []
|
||||
github_api_languages: []
|
||||
scc_languages: []
|
||||
|
||||
@@ -1,3 +1,15 @@
|
||||
## 0.4.16
|
||||
|
||||
No user-facing changes.
|
||||
|
||||
## 0.4.15
|
||||
|
||||
No user-facing changes.
|
||||
|
||||
## 0.4.14
|
||||
|
||||
No user-facing changes.
|
||||
|
||||
## 0.4.13
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
3
actions/ql/lib/change-notes/released/0.4.14.md
Normal file
3
actions/ql/lib/change-notes/released/0.4.14.md
Normal file
@@ -0,0 +1,3 @@
|
||||
## 0.4.14
|
||||
|
||||
No user-facing changes.
|
||||
3
actions/ql/lib/change-notes/released/0.4.15.md
Normal file
3
actions/ql/lib/change-notes/released/0.4.15.md
Normal file
@@ -0,0 +1,3 @@
|
||||
## 0.4.15
|
||||
|
||||
No user-facing changes.
|
||||
3
actions/ql/lib/change-notes/released/0.4.16.md
Normal file
3
actions/ql/lib/change-notes/released/0.4.16.md
Normal file
@@ -0,0 +1,3 @@
|
||||
## 0.4.16
|
||||
|
||||
No user-facing changes.
|
||||
@@ -1,2 +1,2 @@
|
||||
---
|
||||
lastReleaseVersion: 0.4.13
|
||||
lastReleaseVersion: 0.4.16
|
||||
|
||||
@@ -70,8 +70,8 @@ class Location extends TLocation, TBaseLocation {
|
||||
|
||||
/**
|
||||
* Holds if this element is at the specified location.
|
||||
* The location spans column `startcolumn` of line `startline` to
|
||||
* column `endcolumn` of line `endline` in file `filepath`.
|
||||
* The location spans column `sc` of line `sl` to
|
||||
* column `ec` of line `el` in file `p`.
|
||||
* For more information, see
|
||||
* [Providing locations in CodeQL queries](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
|
||||
*/
|
||||
|
||||
@@ -261,7 +261,7 @@ class If extends AstNode instanceof IfImpl {
|
||||
}
|
||||
|
||||
/**
|
||||
* An Environemnt node representing a deployment environment.
|
||||
* An Environment node representing a deployment environment.
|
||||
*/
|
||||
class Environment extends AstNode instanceof EnvironmentImpl {
|
||||
string getName() { result = super.getName() }
|
||||
|
||||
@@ -125,12 +125,11 @@ abstract class AstNodeImpl extends TAstNode {
|
||||
* Gets the enclosing Step.
|
||||
*/
|
||||
StepImpl getEnclosingStep() {
|
||||
if this instanceof StepImpl
|
||||
then result = this
|
||||
else
|
||||
if this instanceof ScalarValueImpl
|
||||
then result.getAChildNode*() = this.getParentNode()
|
||||
else none()
|
||||
this instanceof StepImpl and
|
||||
result = this
|
||||
or
|
||||
this instanceof ScalarValueImpl and
|
||||
result.getAChildNode*() = this.getParentNode()
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1416,9 +1415,8 @@ class ExternalJobImpl extends JobImpl, UsesImpl {
|
||||
override string getVersion() {
|
||||
exists(YamlString name |
|
||||
n.lookup("uses") = name and
|
||||
if not name.getValue().matches("\\.%")
|
||||
then result = name.getValue().regexpCapture(repoUsesParser(), 4)
|
||||
else none()
|
||||
not name.getValue().matches("\\.%") and
|
||||
result = name.getValue().regexpCapture(repoUsesParser(), 4)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -286,7 +286,7 @@ private module Cached {
|
||||
/**
|
||||
* Holds if `cfn` is the `i`th node in basic block `bb`.
|
||||
*
|
||||
* In other words, `i` is the shortest distance from a node `bb`
|
||||
* In other words, `i` is the shortest distance from a node `bbStart`
|
||||
* that starts a basic block to `cfn` along the `intraBBSucc` relation.
|
||||
*/
|
||||
cached
|
||||
|
||||
@@ -3,6 +3,8 @@ private import codeql.controlflow.Cfg as CfgShared
|
||||
private import codeql.Locations
|
||||
|
||||
module Completion {
|
||||
import codeql.controlflow.SuccessorType
|
||||
|
||||
private newtype TCompletion =
|
||||
TSimpleCompletion() or
|
||||
TBooleanCompletion(boolean b) { b in [false, true] } or
|
||||
@@ -25,7 +27,7 @@ module Completion {
|
||||
|
||||
override predicate isValidFor(AstNode e) { not any(Completion c).isValidForSpecific(e) }
|
||||
|
||||
override NormalSuccessor getAMatchingSuccessorType() { any() }
|
||||
override DirectSuccessor getAMatchingSuccessorType() { any() }
|
||||
}
|
||||
|
||||
class BooleanCompletion extends NormalCompletion, TBooleanCompletion {
|
||||
@@ -49,34 +51,6 @@ module Completion {
|
||||
|
||||
override ReturnSuccessor getAMatchingSuccessorType() { any() }
|
||||
}
|
||||
|
||||
cached
|
||||
private newtype TSuccessorType =
|
||||
TNormalSuccessor() or
|
||||
TBooleanSuccessor(boolean b) { b in [false, true] } or
|
||||
TReturnSuccessor()
|
||||
|
||||
class SuccessorType extends TSuccessorType {
|
||||
string toString() { none() }
|
||||
}
|
||||
|
||||
class NormalSuccessor extends SuccessorType, TNormalSuccessor {
|
||||
override string toString() { result = "successor" }
|
||||
}
|
||||
|
||||
class BooleanSuccessor extends SuccessorType, TBooleanSuccessor {
|
||||
boolean value;
|
||||
|
||||
BooleanSuccessor() { this = TBooleanSuccessor(value) }
|
||||
|
||||
override string toString() { result = value.toString() }
|
||||
|
||||
boolean getValue() { result = value }
|
||||
}
|
||||
|
||||
class ReturnSuccessor extends SuccessorType, TReturnSuccessor {
|
||||
override string toString() { result = "return" }
|
||||
}
|
||||
}
|
||||
|
||||
module CfgScope {
|
||||
@@ -127,14 +101,8 @@ private module Implementation implements CfgShared::InputSig<Location> {
|
||||
last(scope.(CompositeAction), e, c)
|
||||
}
|
||||
|
||||
predicate successorTypeIsSimple(SuccessorType t) { t instanceof NormalSuccessor }
|
||||
|
||||
predicate successorTypeIsCondition(SuccessorType t) { t instanceof BooleanSuccessor }
|
||||
|
||||
SuccessorType getAMatchingSuccessorType(Completion c) { result = c.getAMatchingSuccessorType() }
|
||||
|
||||
predicate isAbnormalExitType(SuccessorType t) { none() }
|
||||
|
||||
int idOfAstNode(AstNode node) { none() }
|
||||
|
||||
int idOfCfgScope(CfgScope scope) { none() }
|
||||
|
||||
@@ -63,10 +63,10 @@ predicate madSource(DataFlow::Node source, string kind, string fieldName) {
|
||||
(
|
||||
if fieldName.trim().matches("env.%")
|
||||
then source.asExpr() = uses.getInScopeEnvVarExpr(fieldName.trim().replaceAll("env.", ""))
|
||||
else
|
||||
if fieldName.trim().matches("output.%")
|
||||
then source.asExpr() = uses
|
||||
else none()
|
||||
else (
|
||||
fieldName.trim().matches("output.%") and
|
||||
source.asExpr() = uses
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -31,14 +31,14 @@ abstract class RemoteFlowSource extends SourceNode {
|
||||
class GitHubCtxSource extends RemoteFlowSource {
|
||||
string flag;
|
||||
string event;
|
||||
GitHubExpression e;
|
||||
|
||||
GitHubCtxSource() {
|
||||
this.asExpr() = e and
|
||||
// github.head_ref
|
||||
e.getFieldName() = "head_ref" and
|
||||
flag = "branch" and
|
||||
(
|
||||
exists(GitHubExpression e |
|
||||
this.asExpr() = e and
|
||||
// github.head_ref
|
||||
e.getFieldName() = "head_ref" and
|
||||
flag = "branch"
|
||||
|
|
||||
event = e.getATriggerEvent().getName() and
|
||||
event = "pull_request_target"
|
||||
or
|
||||
@@ -148,7 +148,6 @@ class GhCLICommandSource extends RemoteFlowSource, CommandSource {
|
||||
class GitHubEventPathSource extends RemoteFlowSource, CommandSource {
|
||||
string cmd;
|
||||
string flag;
|
||||
string access_path;
|
||||
Run run;
|
||||
|
||||
// Examples
|
||||
@@ -163,7 +162,7 @@ class GitHubEventPathSource extends RemoteFlowSource, CommandSource {
|
||||
run.getScript().getACommand() = cmd and
|
||||
cmd.matches("jq%") and
|
||||
cmd.matches("%GITHUB_EVENT_PATH%") and
|
||||
exists(string regexp |
|
||||
exists(string regexp, string access_path |
|
||||
untrustedEventPropertiesDataModel(regexp, flag) and
|
||||
not flag = "json" and
|
||||
access_path = "github.event" + cmd.regexpCapture(".*\\s+([^\\s]+)\\s+.*", 1) and
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
private import actions
|
||||
private import codeql.actions.TaintTracking
|
||||
private import codeql.actions.dataflow.ExternalFlow
|
||||
private import codeql.actions.security.ControlChecks
|
||||
import codeql.actions.dataflow.FlowSources
|
||||
import codeql.actions.DataFlow
|
||||
|
||||
@@ -18,7 +19,6 @@ abstract class ArgumentInjectionSink extends DataFlow::Node {
|
||||
*/
|
||||
class ArgumentInjectionFromEnvVarSink extends ArgumentInjectionSink {
|
||||
string command;
|
||||
string argument;
|
||||
|
||||
ArgumentInjectionFromEnvVarSink() {
|
||||
exists(Run run, string var |
|
||||
@@ -27,7 +27,7 @@ class ArgumentInjectionFromEnvVarSink extends ArgumentInjectionSink {
|
||||
exists(run.getInScopeEnvVarExpr(var)) or
|
||||
var = "GITHUB_HEAD_REF"
|
||||
) and
|
||||
run.getScript().getAnEnvReachingArgumentInjectionSink(var, command, argument)
|
||||
run.getScript().getAnEnvReachingArgumentInjectionSink(var, command, _)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -43,13 +43,12 @@ class ArgumentInjectionFromEnvVarSink extends ArgumentInjectionSink {
|
||||
*/
|
||||
class ArgumentInjectionFromCommandSink extends ArgumentInjectionSink {
|
||||
string command;
|
||||
string argument;
|
||||
|
||||
ArgumentInjectionFromCommandSink() {
|
||||
exists(CommandSource source, Run run |
|
||||
run = source.getEnclosingRun() and
|
||||
this.asExpr() = run.getScript() and
|
||||
run.getScript().getACmdReachingArgumentInjectionSink(source.getCommand(), command, argument)
|
||||
run.getScript().getACmdReachingArgumentInjectionSink(source.getCommand(), command, _)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -65,6 +64,16 @@ class ArgumentInjectionFromMaDSink extends ArgumentInjectionSink {
|
||||
override string getCommand() { result = "unknown" }
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the event that is relevant for the given node in the context of argument injection.
|
||||
*
|
||||
* This is used to highlight the event in the query results when an alert is raised.
|
||||
*/
|
||||
Event getRelevantEventInPrivilegedContext(DataFlow::Node node) {
|
||||
inPrivilegedContext(node.asExpr(), result) and
|
||||
not exists(ControlCheck check | check.protects(node.asExpr(), result, "argument-injection"))
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for unsafe user input
|
||||
* that is used to construct and evaluate a code script.
|
||||
@@ -88,6 +97,16 @@ private module ArgumentInjectionConfig implements DataFlow::ConfigSig {
|
||||
run.getScript().getAnEnvReachingArgumentInjectionSink(var, _, _)
|
||||
)
|
||||
}
|
||||
|
||||
predicate observeDiffInformedIncrementalMode() { any() }
|
||||
|
||||
Location getASelectedSourceLocation(DataFlow::Node source) { none() }
|
||||
|
||||
Location getASelectedSinkLocation(DataFlow::Node sink) {
|
||||
result = sink.getLocation()
|
||||
or
|
||||
result = getRelevantEventInPrivilegedContext(sink).getLocation()
|
||||
}
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe user input that is used to construct and evaluate a code script. */
|
||||
|
||||
@@ -4,6 +4,7 @@ import codeql.actions.DataFlow
|
||||
import codeql.actions.dataflow.FlowSources
|
||||
import codeql.actions.security.PoisonableSteps
|
||||
import codeql.actions.security.UntrustedCheckoutQuery
|
||||
import codeql.actions.security.ControlChecks
|
||||
|
||||
string unzipRegexp() { result = "(unzip|tar)\\s+.*" }
|
||||
|
||||
@@ -124,8 +125,6 @@ class LegitLabsDownloadArtifactActionStep extends UntrustedArtifactDownloadStep,
|
||||
}
|
||||
|
||||
class ActionsGitHubScriptDownloadStep extends UntrustedArtifactDownloadStep, UsesStep {
|
||||
string script;
|
||||
|
||||
ActionsGitHubScriptDownloadStep() {
|
||||
// eg:
|
||||
// - uses: actions/github-script@v6
|
||||
@@ -148,12 +147,14 @@ class ActionsGitHubScriptDownloadStep extends UntrustedArtifactDownloadStep, Use
|
||||
// var fs = require('fs');
|
||||
// fs.writeFileSync('${{github.workspace}}/test-results.zip', Buffer.from(download.data));
|
||||
this.getCallee() = "actions/github-script" and
|
||||
this.getArgument("script") = script and
|
||||
script.matches("%listWorkflowRunArtifacts(%") and
|
||||
script.matches("%downloadArtifact(%") and
|
||||
script.matches("%writeFileSync(%") and
|
||||
// Filter out artifacts that were created by pull-request.
|
||||
not script.matches("%exclude_pull_requests: true%")
|
||||
exists(string script |
|
||||
this.getArgument("script") = script and
|
||||
script.matches("%listWorkflowRunArtifacts(%") and
|
||||
script.matches("%downloadArtifact(%") and
|
||||
script.matches("%writeFileSync(%") and
|
||||
// Filter out artifacts that were created by pull-request.
|
||||
not script.matches("%exclude_pull_requests: true%")
|
||||
)
|
||||
}
|
||||
|
||||
override string getPath() {
|
||||
@@ -170,10 +171,10 @@ class ActionsGitHubScriptDownloadStep extends UntrustedArtifactDownloadStep, Use
|
||||
.getScript()
|
||||
.getACommand()
|
||||
.regexpCapture(unzipRegexp() + unzipDirArgRegexp(), 3)))
|
||||
else
|
||||
if this.getAFollowingStep().(Run).getScript().getACommand().regexpMatch(unzipRegexp())
|
||||
then result = "GITHUB_WORKSPACE/"
|
||||
else none()
|
||||
else (
|
||||
this.getAFollowingStep().(Run).getScript().getACommand().regexpMatch(unzipRegexp()) and
|
||||
result = "GITHUB_WORKSPACE/"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -206,12 +207,13 @@ class GHRunArtifactDownloadStep extends UntrustedArtifactDownloadStep, Run {
|
||||
.getScript()
|
||||
.getACommand()
|
||||
.regexpCapture(unzipRegexp() + unzipDirArgRegexp(), 3)))
|
||||
else
|
||||
if
|
||||
else (
|
||||
(
|
||||
this.getAFollowingStep().(Run).getScript().getACommand().regexpMatch(unzipRegexp()) or
|
||||
this.getScript().getACommand().regexpMatch(unzipRegexp())
|
||||
then result = "GITHUB_WORKSPACE/"
|
||||
else none()
|
||||
) and
|
||||
result = "GITHUB_WORKSPACE/"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -258,15 +260,15 @@ class DirectArtifactDownloadStep extends UntrustedArtifactDownloadStep, Run {
|
||||
|
||||
class ArtifactPoisoningSink extends DataFlow::Node {
|
||||
UntrustedArtifactDownloadStep download;
|
||||
PoisonableStep poisonable;
|
||||
|
||||
ArtifactPoisoningSink() {
|
||||
download.getAFollowingStep() = poisonable and
|
||||
// excluding artifacts downloaded to the temporary directory
|
||||
not download.getPath().regexpMatch("^/tmp.*") and
|
||||
not download.getPath().regexpMatch("^\\$\\{\\{\\s*runner\\.temp\\s*}}.*") and
|
||||
not download.getPath().regexpMatch("^\\$RUNNER_TEMP.*") and
|
||||
(
|
||||
exists(PoisonableStep poisonable |
|
||||
download.getAFollowingStep() = poisonable and
|
||||
// excluding artifacts downloaded to the temporary directory
|
||||
not download.getPath().regexpMatch("^/tmp.*") and
|
||||
not download.getPath().regexpMatch("^\\$\\{\\{\\s*runner\\.temp\\s*}}.*") and
|
||||
not download.getPath().regexpMatch("^\\$RUNNER_TEMP.*")
|
||||
|
|
||||
poisonable.(Run).getScript() = this.asExpr() and
|
||||
(
|
||||
// Check if the poisonable step is a local script execution step
|
||||
@@ -292,6 +294,16 @@ class ArtifactPoisoningSink extends DataFlow::Node {
|
||||
string getPath() { result = download.getPath() }
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the event that is relevant for the given node in the context of artifact poisoning.
|
||||
*
|
||||
* This is used to highlight the event in the query results when an alert is raised.
|
||||
*/
|
||||
Event getRelevantEventInPrivilegedContext(DataFlow::Node node) {
|
||||
inPrivilegedContext(node.asExpr(), result) and
|
||||
not exists(ControlCheck check | check.protects(node.asExpr(), result, "artifact-poisoning"))
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for unsafe artifacts
|
||||
* that is used may lead to artifact poisoning
|
||||
@@ -318,6 +330,16 @@ private module ArtifactPoisoningConfig implements DataFlow::ConfigSig {
|
||||
exists(run.getScript().getAFileReadCommand())
|
||||
)
|
||||
}
|
||||
|
||||
predicate observeDiffInformedIncrementalMode() { any() }
|
||||
|
||||
Location getASelectedSourceLocation(DataFlow::Node source) { none() }
|
||||
|
||||
Location getASelectedSinkLocation(DataFlow::Node sink) {
|
||||
result = sink.getLocation()
|
||||
or
|
||||
result = getRelevantEventInPrivilegedContext(sink).getLocation()
|
||||
}
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe artifacts that is used in an insecure way. */
|
||||
|
||||
@@ -3,6 +3,8 @@ private import codeql.actions.TaintTracking
|
||||
private import codeql.actions.dataflow.ExternalFlow
|
||||
import codeql.actions.dataflow.FlowSources
|
||||
import codeql.actions.DataFlow
|
||||
import codeql.actions.security.ControlChecks
|
||||
import codeql.actions.security.CachePoisoningQuery
|
||||
|
||||
class CodeInjectionSink extends DataFlow::Node {
|
||||
CodeInjectionSink() {
|
||||
@@ -11,6 +13,46 @@ class CodeInjectionSink extends DataFlow::Node {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the relevant event for the sink in CodeInjectionCritical.ql.
|
||||
*/
|
||||
Event getRelevantCriticalEventForSink(DataFlow::Node sink) {
|
||||
inPrivilegedContext(sink.asExpr(), result) and
|
||||
not exists(ControlCheck check | check.protects(sink.asExpr(), result, "code-injection")) and
|
||||
// exclude cases where the sink is a JS script and the expression uses toJson
|
||||
not exists(UsesStep script |
|
||||
script.getCallee() = "actions/github-script" and
|
||||
script.getArgumentExpr("script") = sink.asExpr() and
|
||||
exists(getAToJsonReferenceExpression(sink.asExpr().(Expression).getExpression(), _))
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the relevant event for the sink in CachePoisoningViaCodeInjection.ql.
|
||||
*/
|
||||
Event getRelevantCachePoisoningEventForSink(DataFlow::Node sink) {
|
||||
exists(LocalJob job |
|
||||
job = sink.asExpr().getEnclosingJob() and
|
||||
job.getATriggerEvent() = result and
|
||||
// job can be triggered by an external user
|
||||
result.isExternallyTriggerable() and
|
||||
// excluding privileged workflows since they can be exploited in easier circumstances
|
||||
// which is covered by `actions/code-injection/critical`
|
||||
not job.isPrivilegedExternallyTriggerable(result) and
|
||||
(
|
||||
// the workflow runs in the context of the default branch
|
||||
runsOnDefaultBranch(result)
|
||||
or
|
||||
// the workflow caller runs in the context of the default branch
|
||||
result.getName() = "workflow_call" and
|
||||
exists(ExternalJob caller |
|
||||
caller.getCallee() = job.getLocation().getFile().getRelativePath() and
|
||||
runsOnDefaultBranch(caller.getATriggerEvent())
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for unsafe user input
|
||||
* that is used to construct and evaluate a code script.
|
||||
@@ -35,6 +77,18 @@ private module CodeInjectionConfig implements DataFlow::ConfigSig {
|
||||
exists(run.getScript().getAFileReadCommand())
|
||||
)
|
||||
}
|
||||
|
||||
predicate observeDiffInformedIncrementalMode() { any() }
|
||||
|
||||
Location getASelectedSourceLocation(DataFlow::Node source) { none() }
|
||||
|
||||
Location getASelectedSinkLocation(DataFlow::Node sink) {
|
||||
result = sink.getLocation()
|
||||
or
|
||||
result = getRelevantCriticalEventForSink(sink).getLocation()
|
||||
or
|
||||
result = getRelevantCachePoisoningEventForSink(sink).getLocation()
|
||||
}
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe user input that is used to construct and evaluate a code script. */
|
||||
|
||||
@@ -3,11 +3,20 @@ private import codeql.actions.TaintTracking
|
||||
private import codeql.actions.dataflow.ExternalFlow
|
||||
import codeql.actions.dataflow.FlowSources
|
||||
import codeql.actions.DataFlow
|
||||
import codeql.actions.security.ControlChecks
|
||||
|
||||
private class CommandInjectionSink extends DataFlow::Node {
|
||||
CommandInjectionSink() { madSink(this, "command-injection") }
|
||||
}
|
||||
|
||||
/** Get the relevant event for the sink in CommandInjectionCritical.ql. */
|
||||
Event getRelevantEventInPrivilegedContext(DataFlow::Node sink) {
|
||||
inPrivilegedContext(sink.asExpr(), result) and
|
||||
not exists(ControlCheck check |
|
||||
check.protects(sink.asExpr(), result, ["command-injection", "code-injection"])
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for unsafe user input
|
||||
* that is used to construct and evaluate a system command.
|
||||
@@ -16,6 +25,16 @@ private module CommandInjectionConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
|
||||
|
||||
predicate isSink(DataFlow::Node sink) { sink instanceof CommandInjectionSink }
|
||||
|
||||
predicate observeDiffInformedIncrementalMode() { any() }
|
||||
|
||||
Location getASelectedSourceLocation(DataFlow::Node source) { none() }
|
||||
|
||||
Location getASelectedSinkLocation(DataFlow::Node sink) {
|
||||
result = sink.getLocation()
|
||||
or
|
||||
result = getRelevantEventInPrivilegedContext(sink).getLocation()
|
||||
}
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe user input that is used to construct and evaluate a system command. */
|
||||
|
||||
@@ -159,11 +159,8 @@ abstract class CommentVsHeadDateCheck extends ControlCheck {
|
||||
|
||||
/* Specific implementations of control checks */
|
||||
class LabelIfCheck extends LabelCheck instanceof If {
|
||||
string condition;
|
||||
|
||||
LabelIfCheck() {
|
||||
condition = normalizeExpr(this.getCondition()) and
|
||||
(
|
||||
exists(string condition | condition = normalizeExpr(this.getCondition()) |
|
||||
// eg: contains(github.event.pull_request.labels.*.name, 'safe to test')
|
||||
condition.regexpMatch(".*(^|[^!])contains\\(\\s*github\\.event\\.pull_request\\.labels\\b.*")
|
||||
or
|
||||
|
||||
@@ -72,6 +72,25 @@ class EnvPathInjectionFromMaDSink extends EnvPathInjectionSink {
|
||||
EnvPathInjectionFromMaDSink() { madSink(this, "envpath-injection") }
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the relevant event for a sink in EnvPathInjectionCritical.ql where the source type is "artifact".
|
||||
*/
|
||||
Event getRelevantArtifactEventInPrivilegedContext(DataFlow::Node sink) {
|
||||
inPrivilegedContext(sink.asExpr(), result) and
|
||||
not exists(ControlCheck check |
|
||||
check.protects(sink.asExpr(), result, ["untrusted-checkout", "artifact-poisoning"])
|
||||
) and
|
||||
sink instanceof EnvPathInjectionFromFileReadSink
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the relevant event for a sink in EnvPathInjectionCritical.ql where the source type is not "artifact".
|
||||
*/
|
||||
Event getRelevantNonArtifactEventInPrivilegedContext(DataFlow::Node sink) {
|
||||
inPrivilegedContext(sink.asExpr(), result) and
|
||||
not exists(ControlCheck check | check.protects(sink.asExpr(), result, "code-injection"))
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for unsafe user input
|
||||
* that is used to construct and evaluate an environment variable.
|
||||
@@ -108,6 +127,18 @@ private module EnvPathInjectionConfig implements DataFlow::ConfigSig {
|
||||
exists(run.getScript().getAFileReadCommand())
|
||||
)
|
||||
}
|
||||
|
||||
predicate observeDiffInformedIncrementalMode() { any() }
|
||||
|
||||
Location getASelectedSourceLocation(DataFlow::Node source) { none() }
|
||||
|
||||
Location getASelectedSinkLocation(DataFlow::Node sink) {
|
||||
result = sink.getLocation()
|
||||
or
|
||||
result = getRelevantArtifactEventInPrivilegedContext(sink).getLocation()
|
||||
or
|
||||
result = getRelevantNonArtifactEventInPrivilegedContext(sink).getLocation()
|
||||
}
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe user input that is used to construct and evaluate the PATH environment variable. */
|
||||
|
||||
@@ -55,12 +55,8 @@ class EnvVarInjectionFromFileReadSink extends EnvVarInjectionSink {
|
||||
* echo "COMMIT_MESSAGE=${COMMIT_MESSAGE}" >> $GITHUB_ENV
|
||||
*/
|
||||
class EnvVarInjectionFromCommandSink extends EnvVarInjectionSink {
|
||||
CommandSource inCommand;
|
||||
string injectedVar;
|
||||
string command;
|
||||
|
||||
EnvVarInjectionFromCommandSink() {
|
||||
exists(Run run |
|
||||
exists(Run run, CommandSource inCommand, string injectedVar, string command |
|
||||
this.asExpr() = inCommand.getEnclosingRun().getScript() and
|
||||
run = inCommand.getEnclosingRun() and
|
||||
run.getScript().getACmdReachingGitHubEnvWrite(inCommand.getCommand(), injectedVar) and
|
||||
@@ -86,12 +82,8 @@ class EnvVarInjectionFromCommandSink extends EnvVarInjectionSink {
|
||||
* echo "FOO=$BODY" >> $GITHUB_ENV
|
||||
*/
|
||||
class EnvVarInjectionFromEnvVarSink extends EnvVarInjectionSink {
|
||||
string inVar;
|
||||
string injectedVar;
|
||||
string command;
|
||||
|
||||
EnvVarInjectionFromEnvVarSink() {
|
||||
exists(Run run |
|
||||
exists(Run run, string inVar, string injectedVar, string command |
|
||||
run.getScript() = this.asExpr() and
|
||||
exists(run.getInScopeEnvVarExpr(inVar)) and
|
||||
run.getScript().getAnEnvReachingGitHubEnvWrite(inVar, injectedVar) and
|
||||
@@ -126,6 +118,32 @@ class EnvVarInjectionFromMaDSink extends EnvVarInjectionSink {
|
||||
EnvVarInjectionFromMaDSink() { madSink(this, "envvar-injection") }
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the relevant event for a sink in EnvVarInjectionCritical.ql where the source type is "artifact".
|
||||
*/
|
||||
Event getRelevantArtifactEventInPrivilegedContext(DataFlow::Node sink) {
|
||||
inPrivilegedContext(sink.asExpr(), result) and
|
||||
not exists(ControlCheck check |
|
||||
check
|
||||
.protects(sink.asExpr(), result,
|
||||
["envvar-injection", "untrusted-checkout", "artifact-poisoning"])
|
||||
) and
|
||||
(
|
||||
sink instanceof EnvVarInjectionFromFileReadSink or
|
||||
madSink(sink, "envvar-injection")
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the relevant event for a sink in EnvVarInjectionCritical.ql where the source type is not "artifact".
|
||||
*/
|
||||
Event getRelevantNonArtifactEventInPrivilegedContext(DataFlow::Node sink) {
|
||||
inPrivilegedContext(sink.asExpr(), result) and
|
||||
not exists(ControlCheck check |
|
||||
check.protects(sink.asExpr(), result, ["envvar-injection", "code-injection"])
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for unsafe user input
|
||||
* that is used to construct and evaluate an environment variable.
|
||||
@@ -163,6 +181,18 @@ private module EnvVarInjectionConfig implements DataFlow::ConfigSig {
|
||||
exists(run.getScript().getAFileReadCommand())
|
||||
)
|
||||
}
|
||||
|
||||
predicate observeDiffInformedIncrementalMode() { any() }
|
||||
|
||||
Location getASelectedSourceLocation(DataFlow::Node source) { none() }
|
||||
|
||||
Location getASelectedSinkLocation(DataFlow::Node sink) {
|
||||
result = sink.getLocation()
|
||||
or
|
||||
result = getRelevantArtifactEventInPrivilegedContext(sink).getLocation()
|
||||
or
|
||||
result = getRelevantNonArtifactEventInPrivilegedContext(sink).getLocation()
|
||||
}
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe user input that is used to construct and evaluate an environment variable. */
|
||||
|
||||
@@ -99,18 +99,14 @@ class OutputClobberingFromEnvVarSink extends OutputClobberingSink {
|
||||
* echo $BODY
|
||||
*/
|
||||
class WorkflowCommandClobberingFromEnvVarSink extends OutputClobberingSink {
|
||||
string clobbering_var;
|
||||
string clobbered_value;
|
||||
|
||||
WorkflowCommandClobberingFromEnvVarSink() {
|
||||
exists(Run run, string workflow_cmd_stmt, string clobbering_stmt |
|
||||
exists(Run run, string workflow_cmd_stmt, string clobbering_stmt, string clobbering_var |
|
||||
run.getScript() = this.asExpr() and
|
||||
run.getScript().getAStmt() = clobbering_stmt and
|
||||
clobbering_stmt.regexpMatch("echo\\s+(-e\\s+)?(\"|')?\\$(\\{)?" + clobbering_var + ".*") and
|
||||
exists(run.getInScopeEnvVarExpr(clobbering_var)) and
|
||||
run.getScript().getAStmt() = workflow_cmd_stmt and
|
||||
clobbered_value =
|
||||
trimQuotes(workflow_cmd_stmt.regexpCapture(".*::set-output\\s+name=.*::(.*)", 1))
|
||||
exists(trimQuotes(workflow_cmd_stmt.regexpCapture(".*::set-output\\s+name=.*::(.*)", 1)))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import actions
|
||||
|
||||
class UnversionedImmutableAction extends UsesStep {
|
||||
string immutable_action;
|
||||
|
||||
UnversionedImmutableAction() {
|
||||
isImmutableAction(this, immutable_action) and
|
||||
isImmutableAction(this, _) and
|
||||
not isSemVer(this.getVersion())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: codeql/actions-all
|
||||
version: 0.4.14-dev
|
||||
version: 0.4.17-dev
|
||||
library: true
|
||||
warnOnImplicitThis: true
|
||||
dependencies:
|
||||
|
||||
@@ -1,3 +1,15 @@
|
||||
## 0.6.8
|
||||
|
||||
No user-facing changes.
|
||||
|
||||
## 0.6.7
|
||||
|
||||
No user-facing changes.
|
||||
|
||||
## 0.6.6
|
||||
|
||||
No user-facing changes.
|
||||
|
||||
## 0.6.5
|
||||
|
||||
No user-facing changes.
|
||||
|
||||
@@ -21,18 +21,12 @@ import codeql.actions.security.ControlChecks
|
||||
from EnvPathInjectionFlow::PathNode source, EnvPathInjectionFlow::PathNode sink, Event event
|
||||
where
|
||||
EnvPathInjectionFlow::flowPath(source, sink) and
|
||||
inPrivilegedContext(sink.getNode().asExpr(), event) and
|
||||
(
|
||||
not source.getNode().(RemoteFlowSource).getSourceType() = "artifact" and
|
||||
not exists(ControlCheck check |
|
||||
check.protects(sink.getNode().asExpr(), event, "code-injection")
|
||||
)
|
||||
event = getRelevantNonArtifactEventInPrivilegedContext(sink.getNode())
|
||||
or
|
||||
source.getNode().(RemoteFlowSource).getSourceType() = "artifact" and
|
||||
not exists(ControlCheck check |
|
||||
check.protects(sink.getNode().asExpr(), event, ["untrusted-checkout", "artifact-poisoning"])
|
||||
) and
|
||||
sink.getNode() instanceof EnvPathInjectionFromFileReadSink
|
||||
event = getRelevantArtifactEventInPrivilegedContext(sink.getNode())
|
||||
)
|
||||
select sink.getNode(), source, sink,
|
||||
"Potential PATH environment variable injection in $@, which may be controlled by an external user ($@).",
|
||||
|
||||
@@ -22,26 +22,15 @@ import codeql.actions.security.ControlChecks
|
||||
from EnvVarInjectionFlow::PathNode source, EnvVarInjectionFlow::PathNode sink, Event event
|
||||
where
|
||||
EnvVarInjectionFlow::flowPath(source, sink) and
|
||||
inPrivilegedContext(sink.getNode().asExpr(), event) and
|
||||
// exclude paths to file read sinks from non-artifact sources
|
||||
(
|
||||
// source is text
|
||||
not source.getNode().(RemoteFlowSource).getSourceType() = "artifact" and
|
||||
not exists(ControlCheck check |
|
||||
check.protects(sink.getNode().asExpr(), event, ["envvar-injection", "code-injection"])
|
||||
)
|
||||
event = getRelevantNonArtifactEventInPrivilegedContext(sink.getNode())
|
||||
or
|
||||
// source is an artifact or a file from an untrusted checkout
|
||||
source.getNode().(RemoteFlowSource).getSourceType() = "artifact" and
|
||||
not exists(ControlCheck check |
|
||||
check
|
||||
.protects(sink.getNode().asExpr(), event,
|
||||
["envvar-injection", "untrusted-checkout", "artifact-poisoning"])
|
||||
) and
|
||||
(
|
||||
sink.getNode() instanceof EnvVarInjectionFromFileReadSink or
|
||||
madSink(sink.getNode(), "envvar-injection")
|
||||
)
|
||||
event = getRelevantArtifactEventInPrivilegedContext(sink.getNode())
|
||||
)
|
||||
select sink.getNode(), source, sink,
|
||||
"Potential environment variable injection in $@, which may be controlled by an external user ($@).",
|
||||
|
||||
@@ -22,15 +22,8 @@ import codeql.actions.security.ControlChecks
|
||||
from CodeInjectionFlow::PathNode source, CodeInjectionFlow::PathNode sink, Event event
|
||||
where
|
||||
CodeInjectionFlow::flowPath(source, sink) and
|
||||
inPrivilegedContext(sink.getNode().asExpr(), event) and
|
||||
source.getNode().(RemoteFlowSource).getEventName() = event.getName() and
|
||||
not exists(ControlCheck check | check.protects(sink.getNode().asExpr(), event, "code-injection")) and
|
||||
// exclude cases where the sink is a JS script and the expression uses toJson
|
||||
not exists(UsesStep script |
|
||||
script.getCallee() = "actions/github-script" and
|
||||
script.getArgumentExpr("script") = sink.getNode().asExpr() and
|
||||
exists(getAToJsonReferenceExpression(sink.getNode().asExpr().(Expression).getExpression(), _))
|
||||
)
|
||||
event = getRelevantCriticalEventForSink(sink.getNode()) and
|
||||
source.getNode().(RemoteFlowSource).getEventName() = event.getName()
|
||||
select sink.getNode(), source, sink,
|
||||
"Potential code injection in $@, which may be controlled by an external user ($@).", sink,
|
||||
sink.getNode().asExpr().(Expression).getRawExpression(), event, event.getName()
|
||||
|
||||
@@ -18,30 +18,13 @@ import codeql.actions.security.CachePoisoningQuery
|
||||
import CodeInjectionFlow::PathGraph
|
||||
import codeql.actions.security.ControlChecks
|
||||
|
||||
from CodeInjectionFlow::PathNode source, CodeInjectionFlow::PathNode sink, LocalJob job, Event event
|
||||
from CodeInjectionFlow::PathNode source, CodeInjectionFlow::PathNode sink, Event event
|
||||
where
|
||||
CodeInjectionFlow::flowPath(source, sink) and
|
||||
job = sink.getNode().asExpr().getEnclosingJob() and
|
||||
job.getATriggerEvent() = event and
|
||||
// job can be triggered by an external user
|
||||
event.isExternallyTriggerable() and
|
||||
event = getRelevantCachePoisoningEventForSink(sink.getNode()) and
|
||||
// the checkout is not controlled by an access check
|
||||
not exists(ControlCheck check |
|
||||
check.protects(source.getNode().asExpr(), event, "code-injection")
|
||||
) and
|
||||
// excluding privileged workflows since they can be exploited in easier circumstances
|
||||
// which is covered by `actions/code-injection/critical`
|
||||
not job.isPrivilegedExternallyTriggerable(event) and
|
||||
(
|
||||
// the workflow runs in the context of the default branch
|
||||
runsOnDefaultBranch(event)
|
||||
or
|
||||
// the workflow caller runs in the context of the default branch
|
||||
event.getName() = "workflow_call" and
|
||||
exists(ExternalJob caller |
|
||||
caller.getCallee() = job.getLocation().getFile().getRelativePath() and
|
||||
runsOnDefaultBranch(caller.getATriggerEvent())
|
||||
)
|
||||
)
|
||||
select sink.getNode(), source, sink,
|
||||
"Unprivileged code injection in $@, which may lead to cache poisoning ($@).", sink,
|
||||
|
||||
@@ -19,10 +19,7 @@ import codeql.actions.security.ControlChecks
|
||||
from ArtifactPoisoningFlow::PathNode source, ArtifactPoisoningFlow::PathNode sink, Event event
|
||||
where
|
||||
ArtifactPoisoningFlow::flowPath(source, sink) and
|
||||
inPrivilegedContext(sink.getNode().asExpr(), event) and
|
||||
not exists(ControlCheck check |
|
||||
check.protects(sink.getNode().asExpr(), event, "artifact-poisoning")
|
||||
)
|
||||
event = getRelevantEventInPrivilegedContext(sink.getNode())
|
||||
select sink.getNode(), source, sink,
|
||||
"Potential artifact poisoning in $@, which may be controlled by an external user ($@).", sink,
|
||||
sink.getNode().toString(), event, event.getName()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
## Overview
|
||||
|
||||
GitHub workflows can be triggered through various repository events, including incoming pull requests (PRs) or comments on Issues/PRs. A potentially dangerous misuse of the triggers such as `pull_request_target` or `issue_comment` followed by an explicit checkout of untrusted code (Pull Request HEAD) may lead to repository compromise if untrusted code gets executed in a privileged job.
|
||||
GitHub workflows can be triggered through various repository events, including incoming pull requests (PRs) or comments on Issues/PRs. A potentially dangerous misuse of the triggers such as `pull_request_target` or `issue_comment` followed by an explicit checkout of untrusted code (Pull Request HEAD) may lead to repository compromise if untrusted code gets executed (e.g., due to a modified build script) in a privileged job.
|
||||
|
||||
## Recommendation
|
||||
|
||||
@@ -32,7 +32,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
- run: |
|
||||
npm install
|
||||
npm install # scripts in package.json from PR would be executed here
|
||||
npm build
|
||||
|
||||
- uses: completely/fakeaction@v2
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
## Overview
|
||||
|
||||
GitHub workflows can be triggered through various repository events, including incoming pull requests (PRs) or comments on Issues/PRs. A potentially dangerous misuse of the triggers such as `pull_request_target` or `issue_comment` followed by an explicit checkout of untrusted code (Pull Request HEAD) may lead to repository compromise if untrusted code gets executed in a privileged job.
|
||||
GitHub workflows can be triggered through various repository events, including incoming pull requests (PRs) or comments on Issues/PRs. A potentially dangerous misuse of the triggers such as `pull_request_target` or `issue_comment` followed by an explicit checkout of untrusted code (Pull Request HEAD) may lead to repository compromise if untrusted code gets executed (e.g., due to a modified build script) in a privileged job.
|
||||
|
||||
## Recommendation
|
||||
|
||||
@@ -32,7 +32,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
- run: |
|
||||
npm install
|
||||
npm install # scripts in package.json from PR would be executed here
|
||||
npm build
|
||||
|
||||
- uses: completely/fakeaction@v2
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
## Overview
|
||||
|
||||
GitHub workflows can be triggered through various repository events, including incoming pull requests (PRs) or comments on Issues/PRs. A potentially dangerous misuse of the triggers such as `pull_request_target` or `issue_comment` followed by an explicit checkout of untrusted code (Pull Request HEAD) may lead to repository compromise if untrusted code gets executed in a privileged job.
|
||||
GitHub workflows can be triggered through various repository events, including incoming pull requests (PRs) or comments on Issues/PRs. A potentially dangerous misuse of the triggers such as `pull_request_target` or `issue_comment` followed by an explicit checkout of untrusted code (Pull Request HEAD) may lead to repository compromise if untrusted code gets executed (e.g., due to a modified build script) in a privileged job.
|
||||
|
||||
## Recommendation
|
||||
|
||||
@@ -32,7 +32,7 @@ jobs:
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
- run: |
|
||||
npm install
|
||||
npm install # scripts in package.json from PR would be executed here
|
||||
npm build
|
||||
|
||||
- uses: completely/fakeaction@v2
|
||||
|
||||
3
actions/ql/src/change-notes/released/0.6.6.md
Normal file
3
actions/ql/src/change-notes/released/0.6.6.md
Normal file
@@ -0,0 +1,3 @@
|
||||
## 0.6.6
|
||||
|
||||
No user-facing changes.
|
||||
3
actions/ql/src/change-notes/released/0.6.7.md
Normal file
3
actions/ql/src/change-notes/released/0.6.7.md
Normal file
@@ -0,0 +1,3 @@
|
||||
## 0.6.7
|
||||
|
||||
No user-facing changes.
|
||||
3
actions/ql/src/change-notes/released/0.6.8.md
Normal file
3
actions/ql/src/change-notes/released/0.6.8.md
Normal file
@@ -0,0 +1,3 @@
|
||||
## 0.6.8
|
||||
|
||||
No user-facing changes.
|
||||
@@ -1,2 +1,2 @@
|
||||
---
|
||||
lastReleaseVersion: 0.6.5
|
||||
lastReleaseVersion: 0.6.8
|
||||
|
||||
@@ -21,10 +21,7 @@ import codeql.actions.security.ControlChecks
|
||||
from CommandInjectionFlow::PathNode source, CommandInjectionFlow::PathNode sink, Event event
|
||||
where
|
||||
CommandInjectionFlow::flowPath(source, sink) and
|
||||
inPrivilegedContext(sink.getNode().asExpr(), event) and
|
||||
not exists(ControlCheck check |
|
||||
check.protects(sink.getNode().asExpr(), event, ["command-injection", "code-injection"])
|
||||
)
|
||||
event = getRelevantEventInPrivilegedContext(sink.getNode())
|
||||
select sink.getNode(), source, sink,
|
||||
"Potential command injection in $@, which may be controlled by an external user ($@).", sink,
|
||||
sink.getNode().asExpr().(Expression).getRawExpression(), event, event.getName()
|
||||
|
||||
@@ -20,10 +20,7 @@ import codeql.actions.security.ControlChecks
|
||||
from ArgumentInjectionFlow::PathNode source, ArgumentInjectionFlow::PathNode sink, Event event
|
||||
where
|
||||
ArgumentInjectionFlow::flowPath(source, sink) and
|
||||
inPrivilegedContext(sink.getNode().asExpr(), event) and
|
||||
not exists(ControlCheck check |
|
||||
check.protects(sink.getNode().asExpr(), event, "argument-injection")
|
||||
)
|
||||
event = getRelevantEventInPrivilegedContext(sink.getNode())
|
||||
select sink.getNode(), source, sink,
|
||||
"Potential argument injection in $@ command, which may be controlled by an external user ($@).",
|
||||
sink, sink.getNode().(ArgumentInjectionSink).getCommand(), event, event.getName()
|
||||
|
||||
@@ -37,8 +37,6 @@ where
|
||||
)
|
||||
or
|
||||
// upload artifact is not used in the same workflow
|
||||
not exists(UsesStep upload |
|
||||
download.getEnclosingWorkflow().getAJob().(LocalJob).getAStep() = upload
|
||||
)
|
||||
not download.getEnclosingWorkflow().getAJob().(LocalJob).getAStep() instanceof UsesStep
|
||||
)
|
||||
select download, "Potential artifact poisoning"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: codeql/actions-queries
|
||||
version: 0.6.6-dev
|
||||
version: 0.6.9-dev
|
||||
library: false
|
||||
warnOnImplicitThis: true
|
||||
groups: [actions, queries]
|
||||
|
||||
2436
cpp/downgrades/c16b29b27f71247023321cc0d0360998b318837c/old.dbscheme
Normal file
2436
cpp/downgrades/c16b29b27f71247023321cc0d0360998b318837c/old.dbscheme
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,4 @@
|
||||
description: Link PCH creations and uses
|
||||
compatibility: full
|
||||
pch_uses.rel: delete
|
||||
pch_creations.rel: delete
|
||||
@@ -1,3 +1,32 @@
|
||||
## 5.5.0
|
||||
|
||||
### New Features
|
||||
|
||||
* Added a new class `PchFile` representing precompiled header (PCH) files used during project compilation.
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
* Added flow summaries for the `Microsoft::WRL::ComPtr` member functions.
|
||||
* The new dataflow/taint-tracking library (`semmle.code.cpp.dataflow.new.DataFlow` and `semmle.code.cpp.dataflow.new.TaintTracking`) now resolves virtual function calls more precisely. This results in fewer false positives when running dataflow/taint-tracking queries on C++ projects.
|
||||
|
||||
## 5.4.1
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
* The guards libraries (`semmle.code.cpp.controlflow.Guards` and `semmle.code.cpp.controlflow.IRGuards`) have been improved to recognize more guards.
|
||||
* Improved dataflow through global variables in the new dataflow library (`semmle.code.cpp.dataflow.new.DataFlow` and `semmle.code.cpp.dataflow.new.TaintTracking`). Queries based on these libraries will produce more results on codebases with many global variables.
|
||||
* The global value numbering library (`semmle.code.cpp.valuenumbering.GlobalValueNumbering` and `semmle.code.cpp.ir.ValueNumbering`) has been improved so more expressions are assigned the same value number.
|
||||
|
||||
## 5.4.0
|
||||
|
||||
### New Features
|
||||
|
||||
* Exposed various SSA-related classes (`Definition`, `PhiNode`, `ExplicitDefinition`, `DirectExplicitDefinition`, and `IndirectExplicitDefinition`) which were previously only usable inside the internal dataflow directory.
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
* The `cpp/overrun-write` query now recognizes more bound checks and thus produces fewer false positives.
|
||||
|
||||
## 5.3.0
|
||||
|
||||
### Deprecated APIs
|
||||
|
||||
11
cpp/ql/lib/Customizations.qll
Normal file
11
cpp/ql/lib/Customizations.qll
Normal file
@@ -0,0 +1,11 @@
|
||||
/**
|
||||
* Contains customizations to the standard library.
|
||||
*
|
||||
* This module is imported by `cpp.qll`, so any customizations defined here automatically
|
||||
* apply to all queries.
|
||||
*
|
||||
* Typical examples of customizations include adding new subclasses of abstract classes such as
|
||||
* the `RemoteFlowSource` class to model frameworks that are not covered by the standard library.
|
||||
*/
|
||||
|
||||
import cpp
|
||||
@@ -1,4 +0,0 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* The `cpp/overrun-write` query now recognizes more bound checks and thus produces fewer false positives.
|
||||
4
cpp/ql/lib/change-notes/2025-09-02-vla.md
Normal file
4
cpp/ql/lib/change-notes/2025-09-02-vla.md
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: feature
|
||||
---
|
||||
* Added predicates `getTransitiveNumberOfVlaDimensionStmts`, `getTransitiveVlaDimensionStmt`, and `getParentVlaDecl` to `VlaDeclStmt` for handling `VlaDeclStmt`s whose base type defined in terms of an other `VlaDeclStmt` via a `typedef`.
|
||||
9
cpp/ql/lib/change-notes/released/5.4.0.md
Normal file
9
cpp/ql/lib/change-notes/released/5.4.0.md
Normal file
@@ -0,0 +1,9 @@
|
||||
## 5.4.0
|
||||
|
||||
### New Features
|
||||
|
||||
* Exposed various SSA-related classes (`Definition`, `PhiNode`, `ExplicitDefinition`, `DirectExplicitDefinition`, and `IndirectExplicitDefinition`) which were previously only usable inside the internal dataflow directory.
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
* The `cpp/overrun-write` query now recognizes more bound checks and thus produces fewer false positives.
|
||||
7
cpp/ql/lib/change-notes/released/5.4.1.md
Normal file
7
cpp/ql/lib/change-notes/released/5.4.1.md
Normal file
@@ -0,0 +1,7 @@
|
||||
## 5.4.1
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
* The guards libraries (`semmle.code.cpp.controlflow.Guards` and `semmle.code.cpp.controlflow.IRGuards`) have been improved to recognize more guards.
|
||||
* Improved dataflow through global variables in the new dataflow library (`semmle.code.cpp.dataflow.new.DataFlow` and `semmle.code.cpp.dataflow.new.TaintTracking`). Queries based on these libraries will produce more results on codebases with many global variables.
|
||||
* The global value numbering library (`semmle.code.cpp.valuenumbering.GlobalValueNumbering` and `semmle.code.cpp.ir.ValueNumbering`) has been improved so more expressions are assigned the same value number.
|
||||
10
cpp/ql/lib/change-notes/released/5.5.0.md
Normal file
10
cpp/ql/lib/change-notes/released/5.5.0.md
Normal file
@@ -0,0 +1,10 @@
|
||||
## 5.5.0
|
||||
|
||||
### New Features
|
||||
|
||||
* Added a new class `PchFile` representing precompiled header (PCH) files used during project compilation.
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
* Added flow summaries for the `Microsoft::WRL::ComPtr` member functions.
|
||||
* The new dataflow/taint-tracking library (`semmle.code.cpp.dataflow.new.DataFlow` and `semmle.code.cpp.dataflow.new.TaintTracking`) now resolves virtual function calls more precisely. This results in fewer false positives when running dataflow/taint-tracking queries on C++ projects.
|
||||
@@ -1,2 +1,2 @@
|
||||
---
|
||||
lastReleaseVersion: 5.3.0
|
||||
lastReleaseVersion: 5.5.0
|
||||
|
||||
@@ -13,7 +13,9 @@
|
||||
* https://github.com/cplusplus/draft/raw/master/papers/n4140.pdf
|
||||
*/
|
||||
|
||||
import Customizations
|
||||
import semmle.code.cpp.File
|
||||
import semmle.code.cpp.PchFile
|
||||
import semmle.code.cpp.Linkage
|
||||
import semmle.code.cpp.Location
|
||||
import semmle.code.cpp.Compilation
|
||||
|
||||
31
cpp/ql/lib/ext/ComPtr.model.yml
Normal file
31
cpp/ql/lib/ext/ComPtr.model.yml
Normal file
@@ -0,0 +1,31 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/cpp-all
|
||||
extensible: summaryModel
|
||||
data: # namespace, type, subtypes, name, signature, ext, input, output, kind, provenance
|
||||
- ["Microsoft::WRL", "ComPtr", True, "ComPtr<T>", "(T *)", "", "Argument[*@0]", "Argument[-1].Element[@]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "ComPtr", "(const ComPtr &)", "", "Argument[*0].Element[@]", "Argument[-1].Element[@]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "ComPtr", "(ComPtr &&)", "", "Argument[*0].Element[@]", "Argument[-1].Element[@]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "As", "", "", "Argument[-1]", "Argument[*0]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "AsIID", "", "", "Argument[-1]", "Argument[*1]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "AsWeak", "", "", "Argument[-1]", "Argument[*0]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "Attach", "", "", "Argument[*@0]", "Argument[-1].Element[@]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr<T>", True, "CopyTo", "(T **)", "", "Argument[-1].Element[@]", "Argument[**@0]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "CopyTo<T>", "(T **)", "", "Argument[-1].Element[@]", "Argument[**@0]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "CopyTo", "(REFIID,void **)", "", "Argument[-1].Element[@]", "Argument[**@1]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "Detach", "", "", "Argument[-1].Element[@]", "ReturnValue[*@]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "Get", "", "", "Argument[-1].Element[@]", "ReturnValue[*@]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "GetAddressOf", "", "", "Argument[-1].Element[@]", "ReturnValue[**@]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "ReleaseAndGetAddressOf", "", "", "Argument[-1].Element[@]", "ReturnValue[**@]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "Swap", "", "", "Argument[-1]", "Argument[*0]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "Swap", "", "", "Argument[*0]", "Argument[-1]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "operator&", "", "", "Argument[-1]", "ReturnValue.Element", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "operator->", "", "", "Argument[-1].Element[@]", "ReturnValue[*@]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr<T>", True, "operator=", "(T *)", "", "Argument[*@0]", "Argument[-1].Element[@]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr<T>", True, "operator=", "(T *)", "", "Argument[*@0]", "ReturnValue[*].Element[@]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "operator=<U>", "(U *)", "", "Argument[*@0]", "Argument[-1].Element[@]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "operator=<U>", "(U *)", "", "Argument[*@0]", "ReturnValue[*].Element[@]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "operator=", "(const ComPtr &)", "", "Argument[*0]", "Argument[-1]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "operator=", "(const ComPtr &)", "", "Argument[*0]", "ReturnValue[*]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "operator=", "(ComPtr &&)", "", "Argument[*0]", "Argument[-1]", "value", "manual"]
|
||||
- ["Microsoft::WRL", "ComPtr", True, "operator=", "(ComPtr &&)", "", "Argument[*0]", "ReturnValue[*]", "value", "manual"]
|
||||
12
cpp/ql/lib/ext/ComPtrRef.model.yml
Normal file
12
cpp/ql/lib/ext/ComPtrRef.model.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/cpp-all
|
||||
extensible: summaryModel
|
||||
data: # namespace, type, subtypes, name, signature, ext, input, output, kind, provenance
|
||||
- ["Microsoft::WRL::Details", "ComPtrRef", True, "ComPtrRef", "", "", "Argument[*0]", "Argument[-1].Element[@]", "value", "manual"]
|
||||
- ["Microsoft::WRL::Details", "ComPtrRef", True, "GetAddressOf", "", "", "Argument[-1].Element[@]", "ReturnValue[*@]", "value", "manual"]
|
||||
# TODO: We cannot yet model https://learn.microsoft.com/en-us/cpp/cppcx/wrl/comptrref-class?view=msvc-170#operator-interfacetype-star-star
|
||||
- ["Microsoft::WRL::Details", "ComPtrRef", True, "operator*", "", "", "Argument[-1].Element[@]", "ReturnValue[*@]", "value", "manual"]
|
||||
# TODO: We cannot yet model https://learn.microsoft.com/en-us/cpp/cppcx/wrl/comptrref-class?view=msvc-170#operator-t-star
|
||||
- ["Microsoft::WRL::Details", "ComPtrRef", True, "operator void**", "", "", "Argument[-1].Element[@]", "ReturnValue[**@]", "value", "manual"]
|
||||
- ["Microsoft::WRL::Details", "ComPtrRef", True, "ReleaseAndGetAddressOf", "", "", "Argument[-1].Element[@]", "ReturnValue[**@]", "value", "manual"]
|
||||
@@ -1,5 +1,5 @@
|
||||
name: codeql/cpp-all
|
||||
version: 5.3.1-dev
|
||||
version: 5.5.1-dev
|
||||
groups: cpp
|
||||
dbscheme: semmlecode.cpp.dbscheme
|
||||
extractor: cpp
|
||||
|
||||
26
cpp/ql/lib/semmle/code/cpp/PchFile.qll
Normal file
26
cpp/ql/lib/semmle/code/cpp/PchFile.qll
Normal file
@@ -0,0 +1,26 @@
|
||||
/**
|
||||
* Provides the `PchFile` class representing precompiled header (PCH) files created and
|
||||
* used during the build process.
|
||||
*/
|
||||
|
||||
import semmle.code.cpp.File
|
||||
|
||||
/**
|
||||
* A precompiled header (PCH) file created during the build process.
|
||||
*/
|
||||
class PchFile extends @pch {
|
||||
/**
|
||||
* Gets a textual representation of this element.
|
||||
*/
|
||||
string toString() { result = "PCH for " + this.getHeaderFile() }
|
||||
|
||||
/**
|
||||
* Gets the header file from which the PCH file was created.
|
||||
*/
|
||||
File getHeaderFile() { pch_creations(this, _, result) }
|
||||
|
||||
/**
|
||||
* Gets a source file that includes the PCH.
|
||||
*/
|
||||
File getAUse() { pch_uses(this, _, result) }
|
||||
}
|
||||
@@ -57,6 +57,18 @@ private Class getRootType(FieldAccess fa) {
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the size of `v`. This predicate does not have a result when the
|
||||
* unspecified type of `v` is a `ReferenceType`.
|
||||
*/
|
||||
private int getVariableSize(Variable v) {
|
||||
exists(Type t |
|
||||
t = v.getUnspecifiedType() and
|
||||
not t instanceof ReferenceType and
|
||||
result = t.getSize()
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the size of the buffer access at `va`.
|
||||
*/
|
||||
@@ -64,12 +76,8 @@ private int getSize(VariableAccess va) {
|
||||
exists(Variable v | va.getTarget() = v |
|
||||
// If `v` is not a field then the size of the buffer is just
|
||||
// the size of the type of `v`.
|
||||
exists(Type t |
|
||||
t = v.getUnspecifiedType() and
|
||||
not v instanceof Field and
|
||||
not t instanceof ReferenceType and
|
||||
result = t.getSize()
|
||||
)
|
||||
not v instanceof Field and
|
||||
result = getVariableSize(v)
|
||||
or
|
||||
exists(Class c, int trueSize |
|
||||
// Otherwise, we find the "outermost" object and compute the size
|
||||
@@ -92,7 +100,7 @@ private int getSize(VariableAccess va) {
|
||||
// buffer is `12 - 4 = 8`.
|
||||
c = getRootType(va) and
|
||||
// we calculate the size based on the last field, to avoid including any padding after it
|
||||
trueSize = max(Field f | | f.getOffsetInClass(c) + f.getUnspecifiedType().getSize()) and
|
||||
trueSize = max(Field f | | f.getOffsetInClass(c) + getVariableSize(f)) and
|
||||
result = trueSize - v.(Field).getOffsetInClass(c)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -936,6 +936,77 @@ private module Cached {
|
||||
ValueNumber getUnary() { result.getAnInstruction() = instr.getUnary() }
|
||||
}
|
||||
|
||||
signature predicate sinkSig(Instruction instr);
|
||||
|
||||
private module BooleanInstruction<sinkSig/1 isSink> {
|
||||
/**
|
||||
* Holds if `i1` flows to `i2` in a single step and `i2` is not an
|
||||
* instruction that produces a value of Boolean type.
|
||||
*/
|
||||
private predicate stepToNonBoolean(Instruction i1, Instruction i2) {
|
||||
not i2.getResultIRType() instanceof IRBooleanType and
|
||||
(
|
||||
i2.(CopyInstruction).getSourceValue() = i1
|
||||
or
|
||||
i2.(ConvertInstruction).getUnary() = i1
|
||||
or
|
||||
i2.(BuiltinExpectCallInstruction).getArgument(0) = i1
|
||||
)
|
||||
}
|
||||
|
||||
private predicate rev(Instruction instr) {
|
||||
isSink(instr)
|
||||
or
|
||||
exists(Instruction instr1 |
|
||||
rev(instr1) and
|
||||
stepToNonBoolean(instr, instr1)
|
||||
)
|
||||
}
|
||||
|
||||
private predicate hasBooleanType(Instruction instr) {
|
||||
instr.getResultIRType() instanceof IRBooleanType
|
||||
}
|
||||
|
||||
private predicate fwd(Instruction instr) {
|
||||
rev(instr) and
|
||||
(
|
||||
hasBooleanType(instr)
|
||||
or
|
||||
exists(Instruction instr0 |
|
||||
fwd(instr0) and
|
||||
stepToNonBoolean(instr0, instr)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
private predicate prunedStep(Instruction i1, Instruction i2) {
|
||||
fwd(i1) and
|
||||
fwd(i2) and
|
||||
stepToNonBoolean(i1, i2)
|
||||
}
|
||||
|
||||
private predicate stepsPlus(Instruction i1, Instruction i2) =
|
||||
doublyBoundedFastTC(prunedStep/2, hasBooleanType/1, isSink/1)(i1, i2)
|
||||
|
||||
/**
|
||||
* Gets the Boolean-typed instruction that defines `instr` before any
|
||||
* integer conversions are applied, if any.
|
||||
*/
|
||||
Instruction get(Instruction instr) {
|
||||
isSink(instr) and
|
||||
(
|
||||
result = instr
|
||||
or
|
||||
stepsPlus(result, instr)
|
||||
) and
|
||||
hasBooleanType(result)
|
||||
}
|
||||
}
|
||||
|
||||
private predicate isUnaryComparesEqLeft(Instruction instr) {
|
||||
unary_compares_eq(_, instr.getAUse(), 0, _, _)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `left == right + k` is `areEqual` given that test is `testIsTrue`.
|
||||
*
|
||||
@@ -966,14 +1037,19 @@ private module Cached {
|
||||
)
|
||||
or
|
||||
compares_eq(test.(BuiltinExpectCallValueNumber).getCondition(), left, right, k, areEqual, value)
|
||||
}
|
||||
|
||||
private predicate isConvertedBool(Instruction instr) {
|
||||
instr.getResultIRType() instanceof IRBooleanType
|
||||
or
|
||||
isConvertedBool(instr.(ConvertInstruction).getUnary())
|
||||
or
|
||||
isConvertedBool(instr.(BuiltinExpectCallInstruction).getCondition())
|
||||
exists(Operand l, BooleanValue bv |
|
||||
// 1. test = value -> int(l) = 0 is !bv
|
||||
unary_compares_eq(test, l, 0, bv.getValue().booleanNot(), value) and
|
||||
// 2. l = bv -> left + right is areEqual
|
||||
compares_eq(valueNumber(BooleanInstruction<isUnaryComparesEqLeft/1>::get(l.getDef())), left,
|
||||
right, k, areEqual, bv)
|
||||
// We want this to hold:
|
||||
// `test = value -> left + right is areEqual`
|
||||
// Applying 2 we need to show:
|
||||
// `test = value -> l = bv`
|
||||
// And `l = bv` holds by `int(l) = 0 is !bv`
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1006,19 +1082,11 @@ private module Cached {
|
||||
k = k1 + k2
|
||||
)
|
||||
or
|
||||
exists(CompareValueNumber cmp, Operand left, Operand right, AbstractValue v |
|
||||
test = cmp and
|
||||
pragma[only_bind_into](cmp)
|
||||
.hasOperands(pragma[only_bind_into](left), pragma[only_bind_into](right)) and
|
||||
isConvertedBool(left.getDef()) and
|
||||
int_value(right.getDef()) = 0 and
|
||||
unary_compares_eq(valueNumberOfOperand(left), op, k, areEqual, v)
|
||||
|
|
||||
cmp instanceof CompareNEValueNumber and
|
||||
v = value
|
||||
or
|
||||
cmp instanceof CompareEQValueNumber and
|
||||
v.getDualValue() = value
|
||||
// See argument for why this is correct in compares_eq
|
||||
exists(Operand l, BooleanValue bv |
|
||||
unary_compares_eq(test, l, 0, bv.getValue().booleanNot(), value) and
|
||||
unary_compares_eq(valueNumber(BooleanInstruction<isUnaryComparesEqLeft/1>::get(l.getDef())),
|
||||
op, k, areEqual, bv)
|
||||
)
|
||||
or
|
||||
unary_compares_eq(test.(BuiltinExpectCallValueNumber).getCondition(), op, k, areEqual, value)
|
||||
@@ -1116,70 +1184,26 @@ private module Cached {
|
||||
)
|
||||
}
|
||||
|
||||
private predicate isBuiltInExpectArg(Instruction instr) {
|
||||
instr = any(BuiltinExpectCallInstruction buildinExpect).getArgument(0)
|
||||
}
|
||||
|
||||
/** A call to the builtin operation `__builtin_expect`. */
|
||||
private class BuiltinExpectCallInstruction extends CallInstruction {
|
||||
BuiltinExpectCallInstruction() { this.getStaticCallTarget().hasName("__builtin_expect") }
|
||||
|
||||
/** Gets the condition of this call. */
|
||||
Instruction getCondition() { result = this.getConditionOperand().getDef() }
|
||||
|
||||
Operand getConditionOperand() {
|
||||
// The first parameter of `__builtin_expect` has type `long`. So we skip
|
||||
// the conversion when inferring guards.
|
||||
result = this.getArgument(0).(ConvertInstruction).getUnaryOperand()
|
||||
Instruction getCondition() {
|
||||
result = BooleanInstruction<isBuiltInExpectArg/1>::get(this.getArgument(0))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `left == right + k` is `areEqual` if `cmp` evaluates to `value`,
|
||||
* and `cmp` is an instruction that compares the value of
|
||||
* `__builtin_expect(left == right + k, _)` to `0`.
|
||||
*/
|
||||
private predicate builtin_expect_eq(
|
||||
CompareValueNumber cmp, Operand left, Operand right, int k, boolean areEqual,
|
||||
AbstractValue value
|
||||
) {
|
||||
exists(BuiltinExpectCallValueNumber call, Instruction const, AbstractValue innerValue |
|
||||
int_value(const) = 0 and
|
||||
cmp.hasOperands(call.getAUse(), const.getAUse()) and
|
||||
compares_eq(call.getCondition(), left, right, k, areEqual, innerValue)
|
||||
|
|
||||
cmp instanceof CompareNEValueNumber and
|
||||
value = innerValue
|
||||
or
|
||||
cmp instanceof CompareEQValueNumber and
|
||||
value.getDualValue() = innerValue
|
||||
)
|
||||
}
|
||||
|
||||
private predicate complex_eq(
|
||||
ValueNumber cmp, Operand left, Operand right, int k, boolean areEqual, AbstractValue value
|
||||
) {
|
||||
sub_eq(cmp, left, right, k, areEqual, value)
|
||||
or
|
||||
add_eq(cmp, left, right, k, areEqual, value)
|
||||
or
|
||||
builtin_expect_eq(cmp, left, right, k, areEqual, value)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `op == k` is `areEqual` if `cmp` evaluates to `value`, and `cmp` is
|
||||
* an instruction that compares the value of `__builtin_expect(op == k, _)` to `0`.
|
||||
*/
|
||||
private predicate unary_builtin_expect_eq(
|
||||
CompareValueNumber cmp, Operand op, int k, boolean areEqual, AbstractValue value
|
||||
) {
|
||||
exists(BuiltinExpectCallValueNumber call, Instruction const, AbstractValue innerValue |
|
||||
int_value(const) = 0 and
|
||||
cmp.hasOperands(call.getAUse(), const.getAUse()) and
|
||||
unary_compares_eq(call.getCondition(), op, k, areEqual, innerValue)
|
||||
|
|
||||
cmp instanceof CompareNEValueNumber and
|
||||
value = innerValue
|
||||
or
|
||||
cmp instanceof CompareEQValueNumber and
|
||||
value.getDualValue() = innerValue
|
||||
)
|
||||
}
|
||||
|
||||
private predicate unary_complex_eq(
|
||||
@@ -1188,8 +1212,6 @@ private module Cached {
|
||||
unary_sub_eq(test, op, k, areEqual, value)
|
||||
or
|
||||
unary_add_eq(test, op, k, areEqual, value)
|
||||
or
|
||||
unary_builtin_expect_eq(test, op, k, areEqual, value)
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -1215,6 +1237,15 @@ private module Cached {
|
||||
exists(AbstractValue dual | value = dual.getDualValue() |
|
||||
compares_lt(test.(LogicalNotValueNumber).getUnary(), left, right, k, isLt, dual)
|
||||
)
|
||||
or
|
||||
compares_lt(test.(BuiltinExpectCallValueNumber).getCondition(), left, right, k, isLt, value)
|
||||
or
|
||||
// See argument for why this is correct in compares_eq
|
||||
exists(Operand l, BooleanValue bv |
|
||||
unary_compares_eq(test, l, 0, bv.getValue().booleanNot(), value) and
|
||||
compares_lt(valueNumber(BooleanInstruction<isUnaryComparesEqLeft/1>::get(l.getDef())), left,
|
||||
right, k, isLt, bv)
|
||||
)
|
||||
}
|
||||
|
||||
/** Holds if `op < k` evaluates to `isLt` given that `test` evaluates to `value`. */
|
||||
@@ -1234,6 +1265,15 @@ private module Cached {
|
||||
int_value(const) = k1 and
|
||||
k = k1 + k2
|
||||
)
|
||||
or
|
||||
compares_lt(test.(BuiltinExpectCallValueNumber).getCondition(), op, k, isLt, value)
|
||||
or
|
||||
// See argument for why this is correct in compares_eq
|
||||
exists(Operand l, BooleanValue bv |
|
||||
unary_compares_eq(test, l, 0, bv.getValue().booleanNot(), value) and
|
||||
compares_lt(valueNumber(BooleanInstruction<isUnaryComparesEqLeft/1>::get(l.getDef())), op, k,
|
||||
isLt, bv)
|
||||
)
|
||||
}
|
||||
|
||||
/** `(a < b + k) => (b > a - k) => (b >= a + (1-k))` */
|
||||
|
||||
@@ -15,6 +15,13 @@ class StandardSsa extends SsaHelper {
|
||||
}
|
||||
|
||||
/**
|
||||
* NOTE: If possible, prefer the SSA classes exposed by the new dataflow
|
||||
* library:
|
||||
* ```
|
||||
* import semmle.code.cpp.dataflow.new.DataFlow
|
||||
* // use `DataFlow::Ssa::Definition`
|
||||
* ```
|
||||
*
|
||||
* A definition of one or more SSA variables, including phi node definitions.
|
||||
* An _SSA variable_, as defined in the literature, is effectively the pair of
|
||||
* an `SsaDefinition d` and a `StackVariable v`, written `(d, v)` in this
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user