mirror of
https://github.com/github/codeql.git
synced 2025-12-16 16:53:25 +01:00
Merge branch 'main' into redsun82/rust-mute-warnings-in-uncompiled-blocks
This commit is contained in:
3
.github/workflows/check-qldoc.yml
vendored
3
.github/workflows/check-qldoc.yml
vendored
@@ -30,7 +30,8 @@ jobs:
|
||||
run: |
|
||||
EXIT_CODE=0
|
||||
# TODO: remove the shared exception from the regex when coverage of qlpacks without dbschemes is supported
|
||||
changed_lib_packs="$(git diff --name-only --diff-filter=ACMRT HEAD^ HEAD | { grep -Po '^(?!(shared))[a-z]*/ql/lib' || true; } | sort -u)"
|
||||
# TODO: remove the actions exception once https://github.com/github/codeql-team/issues/3656 is fixed
|
||||
changed_lib_packs="$(git diff --name-only --diff-filter=ACMRT HEAD^ HEAD | { grep -Po '^(?!(shared|actions))[a-z]*/ql/lib' || true; } | sort -u)"
|
||||
for pack_dir in ${changed_lib_packs}; do
|
||||
lang="${pack_dir%/ql/lib}"
|
||||
codeql generate library-doc-coverage --output="${RUNNER_TEMP}/${lang}-current.txt" --dir="${pack_dir}"
|
||||
|
||||
2
.github/workflows/ql-for-ql-tests.yml
vendored
2
.github/workflows/ql-for-ql-tests.yml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
ql/target
|
||||
key: ${{ runner.os }}-${{ steps.os_version.outputs.version }}-qltest-cargo-${{ hashFiles('ql/rust-toolchain.toml', 'ql/**/Cargo.lock') }}
|
||||
- name: Check formatting
|
||||
run: cd ql; cargo fmt --all -- --check
|
||||
run: cd ql; cargo fmt -- --check
|
||||
- name: Build extractor
|
||||
run: |
|
||||
cd ql;
|
||||
|
||||
2
.github/workflows/ruby-build.yml
vendored
2
.github/workflows/ruby-build.yml
vendored
@@ -79,7 +79,7 @@ jobs:
|
||||
key: ${{ runner.os }}-${{ steps.os_version.outputs.version }}-ruby-rust-cargo-${{ hashFiles('ruby/extractor/rust-toolchain.toml', 'ruby/extractor/**/Cargo.lock') }}
|
||||
- name: Check formatting
|
||||
if: steps.cache-extractor.outputs.cache-hit != 'true'
|
||||
run: cd extractor && cargo fmt --all -- --check
|
||||
run: cd extractor && cargo fmt -- --check
|
||||
- name: Build
|
||||
if: steps.cache-extractor.outputs.cache-hit != 'true'
|
||||
run: cd extractor && cargo build --verbose
|
||||
|
||||
34
.github/workflows/rust.yml
vendored
34
.github/workflows/rust.yml
vendored
@@ -23,26 +23,48 @@ permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
rust-code:
|
||||
rust-ast-generator:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: rust/ast-generator
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Inject sources
|
||||
shell: bash
|
||||
run: |
|
||||
bazel run //rust/ast-generator:inject-sources
|
||||
- name: Format
|
||||
working-directory: rust/extractor
|
||||
shell: bash
|
||||
run: |
|
||||
cargo fmt --check
|
||||
- name: Compilation
|
||||
working-directory: rust/extractor
|
||||
shell: bash
|
||||
run: cargo check
|
||||
- name: Clippy
|
||||
working-directory: rust/extractor
|
||||
shell: bash
|
||||
run: |
|
||||
cargo clippy --fix
|
||||
git diff --exit-code
|
||||
cargo clippy --no-deps -- -D warnings
|
||||
rust-code:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: rust/extractor
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Format
|
||||
shell: bash
|
||||
run: |
|
||||
cargo fmt --check
|
||||
- name: Compilation
|
||||
shell: bash
|
||||
run: cargo check
|
||||
- name: Clippy
|
||||
shell: bash
|
||||
run: |
|
||||
cargo clippy --no-deps -- -D warnings
|
||||
rust-codegen:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
@@ -32,17 +32,17 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Check formatting
|
||||
run: cargo fmt --all -- --check
|
||||
run: cargo fmt -- --check
|
||||
- name: Run tests
|
||||
run: cargo test --verbose
|
||||
fmt:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Check formatting
|
||||
run: cargo fmt --check
|
||||
clippy:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run clippy
|
||||
|
||||
1
.vscode/tasks.json
vendored
1
.vscode/tasks.json
vendored
@@ -81,6 +81,7 @@
|
||||
"description": "Language",
|
||||
"options":
|
||||
[
|
||||
"actions",
|
||||
"go",
|
||||
"java",
|
||||
"javascript",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
/actions/ @github/codeql-dynamic
|
||||
/cpp/ @github/codeql-c-analysis
|
||||
/csharp/ @github/codeql-csharp
|
||||
/csharp/autobuilder/Semmle.Autobuild.Cpp @github/codeql-c-extractor
|
||||
|
||||
203
Cargo.lock
generated
203
Cargo.lock
generated
@@ -92,9 +92,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.93"
|
||||
version = "1.0.94"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775"
|
||||
checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7"
|
||||
|
||||
[[package]]
|
||||
name = "argfile"
|
||||
@@ -168,9 +168,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.10.0"
|
||||
version = "1.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c"
|
||||
checksum = "786a307d683a5bf92e6fd5fd69a7eb613751668d1d8d67d802846dfe367c62c8"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"serde",
|
||||
@@ -184,9 +184,9 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
|
||||
|
||||
[[package]]
|
||||
name = "bytemuck"
|
||||
version = "1.19.0"
|
||||
version = "1.20.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8334215b81e418a0a7bdb8ef0849474f40bb10c8b71f1c4ed315cff49f32494d"
|
||||
checksum = "8b37c88a63ffd85d15b406896cc343916d7cf57838a847b3a6f2ca5d39a5695a"
|
||||
|
||||
[[package]]
|
||||
name = "byteorder"
|
||||
@@ -205,9 +205,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cargo-platform"
|
||||
version = "0.1.8"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc"
|
||||
checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@@ -228,9 +228,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.1.37"
|
||||
version = "1.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "40545c26d092346d8a8dab71ee48e7685a7a9cba76e634790c215b41a4a7b4cf"
|
||||
checksum = "c31a0499c1dc64f458ad13872de75c0eb7e3fdb0e67964610c914b034fc5956e"
|
||||
dependencies = [
|
||||
"shlex",
|
||||
]
|
||||
@@ -291,7 +291,7 @@ dependencies = [
|
||||
"chalk-derive",
|
||||
"chalk-ir",
|
||||
"ena",
|
||||
"indexmap 2.6.0",
|
||||
"indexmap 2.7.0",
|
||||
"itertools 0.12.1",
|
||||
"petgraph",
|
||||
"rustc-hash 1.1.0",
|
||||
@@ -300,9 +300,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "chrono"
|
||||
version = "0.4.38"
|
||||
version = "0.4.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401"
|
||||
checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825"
|
||||
dependencies = [
|
||||
"android-tzdata",
|
||||
"iana-time-zone",
|
||||
@@ -315,9 +315,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.20"
|
||||
version = "4.5.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8"
|
||||
checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -325,9 +325,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.20"
|
||||
version = "4.5.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54"
|
||||
checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -349,9 +349,16 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_lex"
|
||||
version = "0.7.2"
|
||||
version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97"
|
||||
checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
|
||||
|
||||
[[package]]
|
||||
name = "codeql-autobuilder-rust"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"codeql-extractor",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codeql-extractor"
|
||||
@@ -464,18 +471,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-channel"
|
||||
version = "0.5.13"
|
||||
version = "0.5.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2"
|
||||
checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471"
|
||||
dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-deque"
|
||||
version = "0.8.5"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d"
|
||||
checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
|
||||
dependencies = [
|
||||
"crossbeam-epoch",
|
||||
"crossbeam-utils",
|
||||
@@ -492,9 +499,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.20"
|
||||
version = "0.8.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80"
|
||||
checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
|
||||
|
||||
[[package]]
|
||||
name = "darling"
|
||||
@@ -685,9 +692,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
|
||||
[[package]]
|
||||
name = "flate2"
|
||||
version = "1.0.34"
|
||||
version = "1.0.35"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0"
|
||||
checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c"
|
||||
dependencies = [
|
||||
"crc32fast",
|
||||
"miniz_oxide",
|
||||
@@ -749,7 +756,7 @@ dependencies = [
|
||||
"aho-corasick",
|
||||
"bstr",
|
||||
"log",
|
||||
"regex-automata 0.4.8",
|
||||
"regex-automata 0.4.9",
|
||||
"regex-syntax 0.8.5",
|
||||
]
|
||||
|
||||
@@ -767,9 +774,9 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.15.1"
|
||||
version = "0.15.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3a9bfc1af68b1726ea47d3d5109de126281def866b33970e10fbab11b5dafab3"
|
||||
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
@@ -803,11 +810,11 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||
|
||||
[[package]]
|
||||
name = "home"
|
||||
version = "0.5.9"
|
||||
version = "0.5.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
|
||||
checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf"
|
||||
dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -852,12 +859,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.6.0"
|
||||
version = "2.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da"
|
||||
checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown 0.15.1",
|
||||
"hashbrown 0.15.2",
|
||||
"serde",
|
||||
]
|
||||
|
||||
@@ -924,9 +931,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.11"
|
||||
version = "1.0.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
|
||||
checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
|
||||
|
||||
[[package]]
|
||||
name = "jod-thread"
|
||||
@@ -936,10 +943,11 @@ checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.72"
|
||||
version = "0.3.76"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9"
|
||||
checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
@@ -977,9 +985,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.162"
|
||||
version = "0.2.169"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "18d287de67fe55fd7e1581fe933d965a5a9477b38e949cfa9f8574ef01506398"
|
||||
checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
|
||||
|
||||
[[package]]
|
||||
name = "libredox"
|
||||
@@ -1050,9 +1058,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "miniz_oxide"
|
||||
version = "0.8.0"
|
||||
version = "0.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1"
|
||||
checksum = "4ffbe83022cedc1d264172192511ae958937694cd57ce297164951b8b3568394"
|
||||
dependencies = [
|
||||
"adler2",
|
||||
]
|
||||
@@ -1237,7 +1245,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db"
|
||||
dependencies = [
|
||||
"fixedbitset",
|
||||
"indexmap 2.6.0",
|
||||
"indexmap 2.7.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1263,9 +1271,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.89"
|
||||
version = "1.0.92"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e"
|
||||
checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
@@ -1430,7 +1438,7 @@ dependencies = [
|
||||
"either",
|
||||
"fst",
|
||||
"hashbrown 0.14.5",
|
||||
"indexmap 2.6.0",
|
||||
"indexmap 2.7.0",
|
||||
"itertools 0.12.1",
|
||||
"la-arena",
|
||||
"ra-ap-rustc_abi",
|
||||
@@ -1496,7 +1504,7 @@ dependencies = [
|
||||
"cov-mark",
|
||||
"either",
|
||||
"ena",
|
||||
"indexmap 2.6.0",
|
||||
"indexmap 2.7.0",
|
||||
"itertools 0.12.1",
|
||||
"la-arena",
|
||||
"nohash-hasher",
|
||||
@@ -1533,7 +1541,7 @@ dependencies = [
|
||||
"crossbeam-channel",
|
||||
"either",
|
||||
"fst",
|
||||
"indexmap 2.6.0",
|
||||
"indexmap 2.7.0",
|
||||
"itertools 0.12.1",
|
||||
"line-index",
|
||||
"memchr",
|
||||
@@ -1642,7 +1650,7 @@ version = "0.0.248"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79d9595e05b422f3258c51f9a529129c9a59f99037deac652ff2cdd0c97e0bda"
|
||||
dependencies = [
|
||||
"indexmap 2.6.0",
|
||||
"indexmap 2.7.0",
|
||||
"ra_ap_base_db",
|
||||
"ra_ap_intern",
|
||||
"ra_ap_paths",
|
||||
@@ -1698,7 +1706,7 @@ version = "0.0.248"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "504314128b0998f54d8e253a328a53e2a697f579f8d1f194b52f77cd8a540e36"
|
||||
dependencies = [
|
||||
"indexmap 2.6.0",
|
||||
"indexmap 2.7.0",
|
||||
"itertools 0.12.1",
|
||||
"lock_api",
|
||||
"oorandom",
|
||||
@@ -1761,7 +1769,7 @@ checksum = "d5af8d681a9fd408be1b17f3b150eb98907fb72b9b44000ada9c3e9b6c059188"
|
||||
dependencies = [
|
||||
"cov-mark",
|
||||
"either",
|
||||
"indexmap 2.6.0",
|
||||
"indexmap 2.7.0",
|
||||
"itertools 0.12.1",
|
||||
"ra-ap-rustc_lexer",
|
||||
"ra_ap_parser",
|
||||
@@ -1820,7 +1828,7 @@ checksum = "168a9378dd3f185ae94dda9c35e4d468a8ff105c8f4bb57e17f6e2312d84d1a8"
|
||||
dependencies = [
|
||||
"crossbeam-channel",
|
||||
"fst",
|
||||
"indexmap 2.6.0",
|
||||
"indexmap 2.7.0",
|
||||
"nohash-hasher",
|
||||
"ra_ap_paths",
|
||||
"ra_ap_stdx",
|
||||
@@ -1897,9 +1905,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.7"
|
||||
version = "0.5.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f"
|
||||
checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
]
|
||||
@@ -1912,7 +1920,7 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-automata 0.4.8",
|
||||
"regex-automata 0.4.9",
|
||||
"regex-syntax 0.8.5",
|
||||
]
|
||||
|
||||
@@ -1927,9 +1935,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.4.8"
|
||||
version = "0.4.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3"
|
||||
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -2019,27 +2027,27 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "1.0.23"
|
||||
version = "1.0.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b"
|
||||
checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.214"
|
||||
version = "1.0.216"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5"
|
||||
checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.214"
|
||||
version = "1.0.216"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766"
|
||||
checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2068,7 +2076,7 @@ dependencies = [
|
||||
"chrono",
|
||||
"hex",
|
||||
"indexmap 1.9.3",
|
||||
"indexmap 2.6.0",
|
||||
"indexmap 2.7.0",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
@@ -2094,7 +2102,7 @@ version = "0.9.34+deprecated"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47"
|
||||
dependencies = [
|
||||
"indexmap 2.6.0",
|
||||
"indexmap 2.7.0",
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde",
|
||||
@@ -2171,9 +2179,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.87"
|
||||
version = "2.0.90"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
|
||||
checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2238,9 +2246,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.3.36"
|
||||
version = "0.3.37"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885"
|
||||
checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21"
|
||||
dependencies = [
|
||||
"deranged",
|
||||
"itoa",
|
||||
@@ -2259,9 +2267,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
|
||||
|
||||
[[package]]
|
||||
name = "time-macros"
|
||||
version = "0.2.18"
|
||||
version = "0.2.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf"
|
||||
checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de"
|
||||
dependencies = [
|
||||
"num-conv",
|
||||
"time-core",
|
||||
@@ -2269,9 +2277,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
version = "0.1.40"
|
||||
version = "0.1.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
|
||||
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
|
||||
dependencies = [
|
||||
"pin-project-lite",
|
||||
"tracing-attributes",
|
||||
@@ -2280,9 +2288,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-attributes"
|
||||
version = "0.1.27"
|
||||
version = "0.1.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
|
||||
checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2291,9 +2299,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-core"
|
||||
version = "0.1.32"
|
||||
version = "0.1.33"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
|
||||
checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"valuable",
|
||||
@@ -2312,9 +2320,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-subscriber"
|
||||
version = "0.3.18"
|
||||
version = "0.3.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
|
||||
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
|
||||
dependencies = [
|
||||
"matchers",
|
||||
"nu-ansi-term",
|
||||
@@ -2330,9 +2338,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter"
|
||||
version = "0.24.4"
|
||||
version = "0.24.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b67baf55e7e1b6806063b1e51041069c90afff16afcbbccd278d899f9d84bca4"
|
||||
checksum = "8ac95b18f0f727aaaa012bd5179a1916706ee3ed071920fdbda738750b0c0bf5"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"regex",
|
||||
@@ -2363,9 +2371,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-language"
|
||||
version = "0.1.2"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8ddffe35a0e5eeeadf13ff7350af564c6e73993a24db62caee1822b185c2600"
|
||||
checksum = "c199356c799a8945965bb5f2c55b2ad9d9aa7c4b4f6e587fe9dea0bc715e5f9c"
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-ql"
|
||||
@@ -2420,9 +2428,9 @@ checksum = "a3e5df347f0bf3ec1d670aad6ca5c6a1859cd9ea61d2113125794654ccced68f"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.13"
|
||||
version = "1.0.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe"
|
||||
checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-properties"
|
||||
@@ -2478,9 +2486,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.95"
|
||||
version = "0.2.99"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e"
|
||||
checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
@@ -2489,13 +2497,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.95"
|
||||
version = "0.2.99"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358"
|
||||
checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"log",
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
@@ -2504,9 +2511,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.95"
|
||||
version = "0.2.99"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56"
|
||||
checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
@@ -2514,9 +2521,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.95"
|
||||
version = "0.2.99"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68"
|
||||
checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2527,9 +2534,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.95"
|
||||
version = "0.2.99"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d"
|
||||
checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
|
||||
@@ -8,6 +8,7 @@ members = [
|
||||
"rust/extractor",
|
||||
"rust/extractor/macros",
|
||||
"rust/ast-generator",
|
||||
"rust/autobuild",
|
||||
]
|
||||
|
||||
[patch.crates-io]
|
||||
|
||||
17
MODULE.bazel
17
MODULE.bazel
@@ -70,7 +70,22 @@ use_repo(py_deps, "vendor__anyhow-1.0.44", "vendor__cc-1.0.70", "vendor__clap-2.
|
||||
# deps for ruby+rust
|
||||
# keep in sync by running `misc/bazel/3rdparty/update_cargo_deps.sh`
|
||||
tree_sitter_extractors_deps = use_extension("//misc/bazel/3rdparty:tree_sitter_extractors_extension.bzl", "r")
|
||||
use_repo(tree_sitter_extractors_deps, "vendor__anyhow-1.0.93", "vendor__argfile-0.2.1", "vendor__chrono-0.4.38", "vendor__clap-4.5.20", "vendor__dunce-1.0.5", "vendor__either-1.13.0", "vendor__encoding-0.2.33", "vendor__figment-0.10.19", "vendor__flate2-1.0.34", "vendor__glob-0.3.1", "vendor__globset-0.4.15", "vendor__itertools-0.12.1", "vendor__itertools-0.13.0", "vendor__lazy_static-1.5.0", "vendor__log-0.4.22", "vendor__num-traits-0.2.19", "vendor__num_cpus-1.16.0", "vendor__proc-macro2-1.0.89", "vendor__quote-1.0.37", "vendor__ra_ap_base_db-0.0.248", "vendor__ra_ap_cfg-0.0.248", "vendor__ra_ap_hir-0.0.248", "vendor__ra_ap_hir_def-0.0.248", "vendor__ra_ap_hir_expand-0.0.248", "vendor__ra_ap_ide_db-0.0.248", "vendor__ra_ap_intern-0.0.248", "vendor__ra_ap_load-cargo-0.0.248", "vendor__ra_ap_parser-0.0.248", "vendor__ra_ap_paths-0.0.248", "vendor__ra_ap_project_model-0.0.248", "vendor__ra_ap_span-0.0.248", "vendor__ra_ap_stdx-0.0.248", "vendor__ra_ap_syntax-0.0.248", "vendor__ra_ap_vfs-0.0.248", "vendor__rand-0.8.5", "vendor__rayon-1.10.0", "vendor__regex-1.11.1", "vendor__serde-1.0.214", "vendor__serde_json-1.0.133", "vendor__serde_with-3.11.0", "vendor__stderrlog-0.6.0", "vendor__syn-2.0.87", "vendor__tracing-0.1.40", "vendor__tracing-subscriber-0.3.18", "vendor__tree-sitter-0.24.4", "vendor__tree-sitter-embedded-template-0.23.2", "vendor__tree-sitter-json-0.24.8", "vendor__tree-sitter-ql-0.23.1", "vendor__tree-sitter-ruby-0.23.1", "vendor__triomphe-0.1.14", "vendor__ungrammar-1.16.1")
|
||||
use_repo(tree_sitter_extractors_deps, "vendor__anyhow-1.0.94", "vendor__argfile-0.2.1", "vendor__chrono-0.4.39", "vendor__clap-4.5.23", "vendor__dunce-1.0.5", "vendor__either-1.13.0", "vendor__encoding-0.2.33", "vendor__figment-0.10.19", "vendor__flate2-1.0.35", "vendor__glob-0.3.1", "vendor__globset-0.4.15", "vendor__itertools-0.12.1", "vendor__itertools-0.13.0", "vendor__lazy_static-1.5.0", "vendor__log-0.4.22", "vendor__num-traits-0.2.19", "vendor__num_cpus-1.16.0", "vendor__proc-macro2-1.0.92", "vendor__quote-1.0.37", "vendor__ra_ap_base_db-0.0.248", "vendor__ra_ap_cfg-0.0.248", "vendor__ra_ap_hir-0.0.248", "vendor__ra_ap_hir_def-0.0.248", "vendor__ra_ap_hir_expand-0.0.248", "vendor__ra_ap_ide_db-0.0.248", "vendor__ra_ap_intern-0.0.248", "vendor__ra_ap_load-cargo-0.0.248", "vendor__ra_ap_parser-0.0.248", "vendor__ra_ap_paths-0.0.248", "vendor__ra_ap_project_model-0.0.248", "vendor__ra_ap_span-0.0.248", "vendor__ra_ap_stdx-0.0.248", "vendor__ra_ap_syntax-0.0.248", "vendor__ra_ap_vfs-0.0.248", "vendor__rand-0.8.5", "vendor__rayon-1.10.0", "vendor__regex-1.11.1", "vendor__serde-1.0.216", "vendor__serde_json-1.0.133", "vendor__serde_with-3.11.0", "vendor__stderrlog-0.6.0", "vendor__syn-2.0.90", "vendor__tracing-0.1.41", "vendor__tracing-subscriber-0.3.19", "vendor__tree-sitter-0.24.5", "vendor__tree-sitter-embedded-template-0.23.2", "vendor__tree-sitter-json-0.24.8", "vendor__tree-sitter-ql-0.23.1", "vendor__tree-sitter-ruby-0.23.1", "vendor__triomphe-0.1.14", "vendor__ungrammar-1.16.1")
|
||||
|
||||
http_archive = use_repo_rule("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
|
||||
|
||||
# rust-analyzer sources needed by the rust ast-generator (see `rust/ast-generator/README.md`)
|
||||
http_archive(
|
||||
name = "rust-analyzer-src",
|
||||
build_file = "//rust/ast-generator:BUILD.rust-analyzer-src.bazel",
|
||||
integrity = "sha256-jl4KJmZku+ilMLnuX2NU+qa1v10IauSiDiz23sZo360=",
|
||||
patch_args = ["-p1"],
|
||||
patches = [
|
||||
"//rust/ast-generator:patches/rust-analyzer.patch",
|
||||
],
|
||||
strip_prefix = "rust-analyzer-2024-12-16",
|
||||
url = "https://github.com/rust-lang/rust-analyzer/archive/refs/tags/2024-12-16.tar.gz",
|
||||
)
|
||||
|
||||
dotnet = use_extension("@rules_dotnet//dotnet:extensions.bzl", "dotnet")
|
||||
dotnet.toolchain(dotnet_version = "9.0.100")
|
||||
|
||||
@@ -1 +1 @@
|
||||
predicate placeholder(int x) { x = 0 }
|
||||
import codeql.actions.Ast
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: feature
|
||||
---
|
||||
* Initial public preview release
|
||||
4
actions/ql/lib/codeql-pack.lock.yml
Normal file
4
actions/ql/lib/codeql-pack.lock.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
lockVersion: 1.0.0
|
||||
dependencies: {}
|
||||
compiled: false
|
||||
98
actions/ql/lib/codeql/Locations.qll
Normal file
98
actions/ql/lib/codeql/Locations.qll
Normal file
@@ -0,0 +1,98 @@
|
||||
/** Provides classes for working with locations. */
|
||||
|
||||
import files.FileSystem
|
||||
import codeql.actions.ast.internal.Ast
|
||||
|
||||
bindingset[loc]
|
||||
pragma[inline_late]
|
||||
private string locationToString(Location loc) {
|
||||
exists(string filepath, int startline, int startcolumn, int endline, int endcolumn |
|
||||
loc.hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn) and
|
||||
result = filepath + "@" + startline + ":" + startcolumn + ":" + endline + ":" + endcolumn
|
||||
)
|
||||
}
|
||||
|
||||
newtype TLocation =
|
||||
TBaseLocation(string filepath, int startline, int startcolumn, int endline, int endcolumn) {
|
||||
exists(File file |
|
||||
file.getAbsolutePath() = filepath and
|
||||
locations_default(_, file, startline, startcolumn, endline, endcolumn)
|
||||
)
|
||||
or
|
||||
exists(ExpressionImpl e |
|
||||
e.hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
|
||||
)
|
||||
or
|
||||
filepath = "" and startline = 0 and startcolumn = 0 and endline = 0 and endcolumn = 0
|
||||
}
|
||||
|
||||
/**
|
||||
* A location as given by a file, a start line, a start column,
|
||||
* an end line, and an end column.
|
||||
*
|
||||
* For more information about locations see [Locations](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
|
||||
*/
|
||||
class Location extends TLocation, TBaseLocation {
|
||||
string filepath;
|
||||
int startline;
|
||||
int startcolumn;
|
||||
int endline;
|
||||
int endcolumn;
|
||||
|
||||
Location() { this = TBaseLocation(filepath, startline, startcolumn, endline, endcolumn) }
|
||||
|
||||
/** Gets the file for this location. */
|
||||
File getFile() {
|
||||
exists(File file |
|
||||
file.getAbsolutePath() = filepath and
|
||||
result = file
|
||||
)
|
||||
}
|
||||
|
||||
/** Gets the 1-based line number (inclusive) where this location starts. */
|
||||
int getStartLine() { result = startline }
|
||||
|
||||
/** Gets the 1-based column number (inclusive) where this location starts. */
|
||||
int getStartColumn() { result = startcolumn }
|
||||
|
||||
/** Gets the 1-based line number (inclusive) where this.getLocationDefault() location ends. */
|
||||
int getEndLine() { result = endline }
|
||||
|
||||
/** Gets the 1-based column number (inclusive) where this.getLocationDefault() location ends. */
|
||||
int getEndColumn() { result = endcolumn }
|
||||
|
||||
/** Gets the number of lines covered by this location. */
|
||||
int getNumLines() { result = endline - startline + 1 }
|
||||
|
||||
/** Gets a textual representation of this element. */
|
||||
pragma[inline]
|
||||
string toString() { result = locationToString(this) }
|
||||
|
||||
/**
|
||||
* Holds if this element is at the specified location.
|
||||
* The location spans column `startcolumn` of line `startline` to
|
||||
* column `endcolumn` of line `endline` in file `filepath`.
|
||||
* For more information, see
|
||||
* [Providing locations in CodeQL queries](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
|
||||
*/
|
||||
predicate hasLocationInfo(string p, int sl, int sc, int el, int ec) {
|
||||
p = filepath and
|
||||
sl = startline and
|
||||
sc = startcolumn and
|
||||
el = endline and
|
||||
ec = endcolumn
|
||||
}
|
||||
|
||||
/** Holds if this location starts strictly before the specified location. */
|
||||
pragma[inline]
|
||||
predicate strictlyBefore(Location other) {
|
||||
this.getStartLine() < other.getStartLine()
|
||||
or
|
||||
this.getStartLine() = other.getStartLine() and this.getStartColumn() < other.getStartColumn()
|
||||
}
|
||||
}
|
||||
|
||||
/** An entity representing an empty location. */
|
||||
class EmptyLocation extends Location {
|
||||
EmptyLocation() { this.hasLocationInfo("", 0, 0, 0, 0) }
|
||||
}
|
||||
400
actions/ql/lib/codeql/actions/Ast.qll
Normal file
400
actions/ql/lib/codeql/actions/Ast.qll
Normal file
@@ -0,0 +1,400 @@
|
||||
private import codeql.actions.ast.internal.Ast
|
||||
private import codeql.Locations
|
||||
import codeql.actions.Helper
|
||||
|
||||
class AstNode instanceof AstNodeImpl {
|
||||
AstNode getAChildNode() { result = super.getAChildNode() }
|
||||
|
||||
AstNode getParentNode() { result = super.getParentNode() }
|
||||
|
||||
string getAPrimaryQlClass() { result = super.getAPrimaryQlClass() }
|
||||
|
||||
Location getLocation() { result = super.getLocation() }
|
||||
|
||||
string toString() { result = super.toString() }
|
||||
|
||||
Step getEnclosingStep() { result = super.getEnclosingStep() }
|
||||
|
||||
Job getEnclosingJob() { result = super.getEnclosingJob() }
|
||||
|
||||
Event getATriggerEvent() { result = super.getATriggerEvent() }
|
||||
|
||||
Workflow getEnclosingWorkflow() { result = super.getEnclosingWorkflow() }
|
||||
|
||||
CompositeAction getEnclosingCompositeAction() { result = super.getEnclosingCompositeAction() }
|
||||
|
||||
Expression getInScopeEnvVarExpr(string name) { result = super.getInScopeEnvVarExpr(name) }
|
||||
|
||||
ScalarValue getInScopeDefaultValue(string name, string prop) {
|
||||
result = super.getInScopeDefaultValue(name, prop)
|
||||
}
|
||||
}
|
||||
|
||||
class ScalarValue extends AstNode instanceof ScalarValueImpl {
|
||||
string getValue() { result = super.getValue() }
|
||||
}
|
||||
|
||||
class Expression extends AstNode instanceof ExpressionImpl {
|
||||
string expression;
|
||||
string rawExpression;
|
||||
|
||||
Expression() {
|
||||
expression = this.getExpression() and
|
||||
rawExpression = this.getRawExpression()
|
||||
}
|
||||
|
||||
string getExpression() { result = expression }
|
||||
|
||||
string getRawExpression() { result = rawExpression }
|
||||
|
||||
string getNormalizedExpression() { result = normalizeExpr(expression) }
|
||||
}
|
||||
|
||||
/** A common class for `env` in workflow, job or step. */
|
||||
abstract class Env extends AstNode instanceof EnvImpl {
|
||||
/** Gets an environment variable value given its name. */
|
||||
ScalarValueImpl getEnvVarValue(string name) { result = super.getEnvVarValue(name) }
|
||||
|
||||
/** Gets an environment variable value. */
|
||||
ScalarValueImpl getAnEnvVarValue() { result = super.getAnEnvVarValue() }
|
||||
|
||||
/** Gets an environment variable expressin given its name. */
|
||||
ExpressionImpl getEnvVarExpr(string name) { result = super.getEnvVarExpr(name) }
|
||||
|
||||
/** Gets an environment variable expression. */
|
||||
ExpressionImpl getAnEnvVarExpr() { result = super.getAnEnvVarExpr() }
|
||||
}
|
||||
|
||||
/**
|
||||
* A custom composite action. This is a mapping at the top level of an Actions YAML action file.
|
||||
* See https://docs.github.com/en/actions/creating-actions/metadata-syntax-for-github-actions.
|
||||
*/
|
||||
class CompositeAction extends AstNode instanceof CompositeActionImpl {
|
||||
Runs getRuns() { result = super.getRuns() }
|
||||
|
||||
Outputs getOutputs() { result = super.getOutputs() }
|
||||
|
||||
Expression getAnOutputExpr() { result = super.getAnOutputExpr() }
|
||||
|
||||
Expression getOutputExpr(string outputName) { result = super.getOutputExpr(outputName) }
|
||||
|
||||
Input getAnInput() { result = super.getAnInput() }
|
||||
|
||||
Input getInput(string inputName) { result = super.getInput(inputName) }
|
||||
|
||||
LocalJob getACallerJob() { result = super.getACallerJob() }
|
||||
|
||||
UsesStep getACallerStep() { result = super.getACallerStep() }
|
||||
|
||||
predicate isPrivileged() { super.isPrivileged() }
|
||||
}
|
||||
|
||||
/**
|
||||
* An Actions workflow. This is a mapping at the top level of an Actions YAML workflow file.
|
||||
* See https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions.
|
||||
*/
|
||||
class Workflow extends AstNode instanceof WorkflowImpl {
|
||||
Env getEnv() { result = super.getEnv() }
|
||||
|
||||
string getName() { result = super.getName() }
|
||||
|
||||
Job getAJob() { result = super.getAJob() }
|
||||
|
||||
Job getJob(string jobId) { result = super.getJob(jobId) }
|
||||
|
||||
Permissions getPermissions() { result = super.getPermissions() }
|
||||
|
||||
Strategy getStrategy() { result = super.getStrategy() }
|
||||
|
||||
On getOn() { result = super.getOn() }
|
||||
}
|
||||
|
||||
class ReusableWorkflow extends Workflow instanceof ReusableWorkflowImpl {
|
||||
Outputs getOutputs() { result = super.getOutputs() }
|
||||
|
||||
Expression getAnOutputExpr() { result = super.getAnOutputExpr() }
|
||||
|
||||
Expression getOutputExpr(string outputName) { result = super.getOutputExpr(outputName) }
|
||||
|
||||
Input getAnInput() { result = super.getAnInput() }
|
||||
|
||||
Input getInput(string inputName) { result = super.getInput(inputName) }
|
||||
|
||||
ExternalJob getACaller() { result = super.getACaller() }
|
||||
}
|
||||
|
||||
class Input extends AstNode instanceof InputImpl { }
|
||||
|
||||
class Default extends AstNode instanceof DefaultsImpl {
|
||||
ScalarValue getValue(string name, string prop) { result = super.getValue(name, prop) }
|
||||
}
|
||||
|
||||
class Outputs extends AstNode instanceof OutputsImpl {
|
||||
Expression getAnOutputExpr() { result = super.getAnOutputExpr() }
|
||||
|
||||
Expression getOutputExpr(string outputName) { result = super.getOutputExpr(outputName) }
|
||||
|
||||
override string toString() { result = "Job outputs node" }
|
||||
}
|
||||
|
||||
class Permissions extends AstNode instanceof PermissionsImpl {
|
||||
bindingset[perm]
|
||||
string getPermission(string perm) { result = super.getPermission(perm) }
|
||||
|
||||
string getAPermission() { result = super.getAPermission() }
|
||||
}
|
||||
|
||||
class Strategy extends AstNode instanceof StrategyImpl {
|
||||
Expression getMatrixVarExpr(string varName) { result = super.getMatrixVarExpr(varName) }
|
||||
|
||||
Expression getAMatrixVarExpr() { result = super.getAMatrixVarExpr() }
|
||||
}
|
||||
|
||||
/**
|
||||
* https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idneeds
|
||||
*/
|
||||
class Needs extends AstNode instanceof NeedsImpl {
|
||||
Job getANeededJob() { result = super.getANeededJob() }
|
||||
}
|
||||
|
||||
class On extends AstNode instanceof OnImpl {
|
||||
Event getAnEvent() { result = super.getAnEvent() }
|
||||
}
|
||||
|
||||
class Event extends AstNode instanceof EventImpl {
|
||||
string getName() { result = super.getName() }
|
||||
|
||||
string getAnActivityType() { result = super.getAnActivityType() }
|
||||
|
||||
string getAPropertyValue(string prop) { result = super.getAPropertyValue(prop) }
|
||||
|
||||
predicate hasProperty(string prop) { super.hasProperty(prop) }
|
||||
|
||||
predicate isExternallyTriggerable() { super.isExternallyTriggerable() }
|
||||
|
||||
predicate isPrivileged() { super.isPrivileged() }
|
||||
}
|
||||
|
||||
/**
|
||||
* An Actions job within a workflow.
|
||||
* See https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobs.
|
||||
*/
|
||||
abstract class Job extends AstNode instanceof JobImpl {
|
||||
string getId() { result = super.getId() }
|
||||
|
||||
Workflow getWorkflow() { result = super.getWorkflow() }
|
||||
|
||||
Job getANeededJob() { result = super.getANeededJob() }
|
||||
|
||||
Outputs getOutputs() { result = super.getOutputs() }
|
||||
|
||||
Expression getAnOutputExpr() { result = super.getAnOutputExpr() }
|
||||
|
||||
Expression getOutputExpr(string outputName) { result = super.getOutputExpr(outputName) }
|
||||
|
||||
Env getEnv() { result = super.getEnv() }
|
||||
|
||||
If getIf() { result = super.getIf() }
|
||||
|
||||
Environment getEnvironment() { result = super.getEnvironment() }
|
||||
|
||||
Permissions getPermissions() { result = super.getPermissions() }
|
||||
|
||||
Strategy getStrategy() { result = super.getStrategy() }
|
||||
|
||||
string getARunsOnLabel() { result = super.getARunsOnLabel() }
|
||||
|
||||
predicate isPrivileged() { super.isPrivileged() }
|
||||
|
||||
predicate isPrivilegedExternallyTriggerable(Event event) {
|
||||
super.isPrivilegedExternallyTriggerable(event)
|
||||
}
|
||||
}
|
||||
|
||||
abstract class StepsContainer extends AstNode instanceof StepsContainerImpl {
|
||||
Step getAStep() { result = super.getAStep() }
|
||||
|
||||
Step getStep(int i) { result = super.getStep(i) }
|
||||
}
|
||||
|
||||
/**
|
||||
* An `runs` mapping in a custom composite action YAML.
|
||||
* See https://docs.github.com/en/actions/creating-actions/metadata-syntax-for-github-actions#runs
|
||||
*/
|
||||
class Runs extends StepsContainer instanceof RunsImpl {
|
||||
CompositeAction getAction() { result = super.getAction() }
|
||||
}
|
||||
|
||||
/**
|
||||
* An Actions job within a workflow which is composed of steps.
|
||||
* See https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobs.
|
||||
*/
|
||||
class LocalJob extends Job, StepsContainer instanceof LocalJobImpl { }
|
||||
|
||||
/**
|
||||
* A step within an Actions job.
|
||||
* See https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idsteps.
|
||||
*/
|
||||
class Step extends AstNode instanceof StepImpl {
|
||||
string getId() { result = super.getId() }
|
||||
|
||||
Env getEnv() { result = super.getEnv() }
|
||||
|
||||
If getIf() { result = super.getIf() }
|
||||
|
||||
StepsContainer getContainer() { result = super.getContainer() }
|
||||
|
||||
Step getNextStep() { result = super.getNextStep() }
|
||||
|
||||
Step getAFollowingStep() { result = super.getAFollowingStep() }
|
||||
}
|
||||
|
||||
/**
|
||||
* An If node representing a conditional statement.
|
||||
*/
|
||||
class If extends AstNode instanceof IfImpl {
|
||||
string getCondition() { result = super.getCondition() }
|
||||
|
||||
Expression getConditionExpr() { result = super.getConditionExpr() }
|
||||
|
||||
string getConditionStyle() { result = super.getConditionStyle() }
|
||||
}
|
||||
|
||||
/**
|
||||
* An Environemnt node representing a deployment environment.
|
||||
*/
|
||||
class Environment extends AstNode instanceof EnvironmentImpl {
|
||||
string getName() { result = super.getName() }
|
||||
|
||||
Expression getNameExpr() { result = super.getNameExpr() }
|
||||
}
|
||||
|
||||
abstract class Uses extends AstNode instanceof UsesImpl {
|
||||
string getCallee() { result = super.getCallee() }
|
||||
|
||||
ScalarValue getCalleeNode() { result = super.getCalleeNode() }
|
||||
|
||||
string getVersion() { result = super.getVersion() }
|
||||
|
||||
int getMajorVersion() { result = super.getMajorVersion() }
|
||||
|
||||
string getArgument(string argName) { result = super.getArgument(argName) }
|
||||
|
||||
Expression getArgumentExpr(string argName) { result = super.getArgumentExpr(argName) }
|
||||
}
|
||||
|
||||
class UsesStep extends Step, Uses instanceof UsesStepImpl { }
|
||||
|
||||
class ExternalJob extends Job, Uses instanceof ExternalJobImpl { }
|
||||
|
||||
/**
|
||||
* A `run` field within an Actions job step, which runs command-line programs using an operating system shell.
|
||||
* See https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepsrun.
|
||||
*/
|
||||
class Run extends Step instanceof RunImpl {
|
||||
ShellScript getScript() { result = super.getScript() }
|
||||
|
||||
Expression getAnScriptExpr() { result = super.getAnScriptExpr() }
|
||||
|
||||
string getWorkingDirectory() { result = super.getWorkingDirectory() }
|
||||
|
||||
string getShell() { result = super.getShell() }
|
||||
}
|
||||
|
||||
class ShellScript extends ScalarValueImpl instanceof ShellScriptImpl {
|
||||
string getRawScript() { result = super.getRawScript() }
|
||||
|
||||
string getStmt(int i) { result = super.getStmt(i) }
|
||||
|
||||
string getAStmt() { result = super.getAStmt() }
|
||||
|
||||
string getCommand(int i) { result = super.getCommand(i) }
|
||||
|
||||
string getACommand() { result = super.getACommand() }
|
||||
|
||||
string getFileReadCommand(int i) { result = super.getFileReadCommand(i) }
|
||||
|
||||
string getAFileReadCommand() { result = super.getAFileReadCommand() }
|
||||
|
||||
predicate getAssignment(int i, string name, string data) { super.getAssignment(i, name, data) }
|
||||
|
||||
predicate getAnAssignment(string name, string data) { super.getAnAssignment(name, data) }
|
||||
|
||||
predicate getAWriteToGitHubEnv(string name, string data) {
|
||||
super.getAWriteToGitHubEnv(name, data)
|
||||
}
|
||||
|
||||
predicate getAWriteToGitHubOutput(string name, string data) {
|
||||
super.getAWriteToGitHubOutput(name, data)
|
||||
}
|
||||
|
||||
predicate getAWriteToGitHubPath(string data) { super.getAWriteToGitHubPath(data) }
|
||||
|
||||
predicate getAnEnvReachingGitHubOutputWrite(string var, string output_field) {
|
||||
super.getAnEnvReachingGitHubOutputWrite(var, output_field)
|
||||
}
|
||||
|
||||
predicate getACmdReachingGitHubOutputWrite(string cmd, string output_field) {
|
||||
super.getACmdReachingGitHubOutputWrite(cmd, output_field)
|
||||
}
|
||||
|
||||
predicate getAnEnvReachingGitHubEnvWrite(string var, string output_field) {
|
||||
super.getAnEnvReachingGitHubEnvWrite(var, output_field)
|
||||
}
|
||||
|
||||
predicate getACmdReachingGitHubEnvWrite(string cmd, string output_field) {
|
||||
super.getACmdReachingGitHubEnvWrite(cmd, output_field)
|
||||
}
|
||||
|
||||
predicate getAnEnvReachingGitHubPathWrite(string var) {
|
||||
super.getAnEnvReachingGitHubPathWrite(var)
|
||||
}
|
||||
|
||||
predicate getACmdReachingGitHubPathWrite(string cmd) { super.getACmdReachingGitHubPathWrite(cmd) }
|
||||
|
||||
predicate getAnEnvReachingArgumentInjectionSink(string var, string command, string argument) {
|
||||
super.getAnEnvReachingArgumentInjectionSink(var, command, argument)
|
||||
}
|
||||
|
||||
predicate getACmdReachingArgumentInjectionSink(string cmd, string command, string argument) {
|
||||
super.getACmdReachingArgumentInjectionSink(cmd, command, argument)
|
||||
}
|
||||
|
||||
predicate fileToGitHubEnv(string path) { super.fileToGitHubEnv(path) }
|
||||
|
||||
predicate fileToGitHubOutput(string path) { super.fileToGitHubOutput(path) }
|
||||
|
||||
predicate fileToGitHubPath(string path) { super.fileToGitHubPath(path) }
|
||||
}
|
||||
|
||||
abstract class SimpleReferenceExpression extends AstNode instanceof SimpleReferenceExpressionImpl {
|
||||
string getFieldName() { result = super.getFieldName() }
|
||||
|
||||
AstNode getTarget() { result = super.getTarget() }
|
||||
}
|
||||
|
||||
class JsonReferenceExpression extends AstNode instanceof JsonReferenceExpressionImpl {
|
||||
string getAccessPath() { result = super.getAccessPath() }
|
||||
|
||||
string getInnerExpression() { result = super.getInnerExpression() }
|
||||
}
|
||||
|
||||
class GitHubExpression extends SimpleReferenceExpression instanceof GitHubExpressionImpl { }
|
||||
|
||||
class SecretsExpression extends SimpleReferenceExpression instanceof SecretsExpressionImpl { }
|
||||
|
||||
class StepsExpression extends SimpleReferenceExpression instanceof StepsExpressionImpl {
|
||||
string getStepId() { result = super.getStepId() }
|
||||
}
|
||||
|
||||
class NeedsExpression extends SimpleReferenceExpression instanceof NeedsExpressionImpl {
|
||||
string getNeededJobId() { result = super.getNeededJobId() }
|
||||
}
|
||||
|
||||
class JobsExpression extends SimpleReferenceExpression instanceof JobsExpressionImpl { }
|
||||
|
||||
class InputsExpression extends SimpleReferenceExpression instanceof InputsExpressionImpl { }
|
||||
|
||||
class EnvExpression extends SimpleReferenceExpression instanceof EnvExpressionImpl { }
|
||||
|
||||
class MatrixExpression extends SimpleReferenceExpression instanceof MatrixExpressionImpl { }
|
||||
722
actions/ql/lib/codeql/actions/Bash.qll
Normal file
722
actions/ql/lib/codeql/actions/Bash.qll
Normal file
@@ -0,0 +1,722 @@
|
||||
private import codeql.actions.Ast
|
||||
|
||||
class BashShellScript extends ShellScript {
|
||||
BashShellScript() {
|
||||
exists(Run run |
|
||||
this = run.getScript() and
|
||||
run.getShell().matches(["bash%", "sh"])
|
||||
)
|
||||
}
|
||||
|
||||
private string lineProducer(int i) {
|
||||
result = this.getRawScript().regexpReplaceAll("\\\\\\s*\n", "").splitAt("\n", i)
|
||||
}
|
||||
|
||||
private predicate cmdSubstitutionReplacement(string cmdSubs, string id, int k) {
|
||||
exists(string line | line = this.lineProducer(k) |
|
||||
exists(int i, int j |
|
||||
cmdSubs =
|
||||
// $() cmd substitution
|
||||
line.regexpFind("\\$\\((?:[^()]+|\\((?:[^()]+|\\([^()]*\\))*\\))*\\)", i, j)
|
||||
.regexpReplaceAll("^\\$\\(", "")
|
||||
.regexpReplaceAll("\\)$", "") and
|
||||
id = "cmdsubs:" + k + ":" + i + ":" + j
|
||||
)
|
||||
or
|
||||
exists(int i, int j |
|
||||
// `...` cmd substitution
|
||||
cmdSubs =
|
||||
line.regexpFind("\\`[^\\`]+\\`", i, j)
|
||||
.regexpReplaceAll("^\\`", "")
|
||||
.regexpReplaceAll("\\`$", "") and
|
||||
id = "cmd:" + k + ":" + i + ":" + j
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
private predicate rankedCmdSubstitutionReplacements(int i, string old, string new) {
|
||||
old = rank[i](string old2 | this.cmdSubstitutionReplacement(old2, _, _) | old2) and
|
||||
this.cmdSubstitutionReplacement(old, new, _)
|
||||
}
|
||||
|
||||
private predicate doReplaceCmdSubstitutions(int line, int round, string old, string new) {
|
||||
round = 0 and
|
||||
old = this.lineProducer(line) and
|
||||
new = old
|
||||
or
|
||||
round > 0 and
|
||||
exists(string middle, string target, string replacement |
|
||||
this.doReplaceCmdSubstitutions(line, round - 1, old, middle) and
|
||||
this.rankedCmdSubstitutionReplacements(round, target, replacement) and
|
||||
new = middle.replaceAll(target, replacement)
|
||||
)
|
||||
}
|
||||
|
||||
private string cmdSubstitutedLineProducer(int i) {
|
||||
// script lines where any command substitution has been replaced with a unique placeholder
|
||||
result =
|
||||
max(int round, string new |
|
||||
this.doReplaceCmdSubstitutions(i, round, _, new)
|
||||
|
|
||||
new order by round
|
||||
)
|
||||
or
|
||||
this.cmdSubstitutionReplacement(result, _, i)
|
||||
}
|
||||
|
||||
private predicate quotedStringReplacement(string quotedStr, string id) {
|
||||
exists(string line, int k | line = this.cmdSubstitutedLineProducer(k) |
|
||||
exists(int i, int j |
|
||||
// double quoted string
|
||||
quotedStr = line.regexpFind("\"((?:[^\"\\\\]|\\\\.)*)\"", i, j) and
|
||||
id =
|
||||
"qstr:" + k + ":" + i + ":" + j + ":" + quotedStr.length() + ":" +
|
||||
quotedStr.regexpReplaceAll("[^a-zA-Z0-9]", "")
|
||||
)
|
||||
or
|
||||
exists(int i, int j |
|
||||
// single quoted string
|
||||
quotedStr = line.regexpFind("'((?:\\\\.|[^'\\\\])*)'", i, j) and
|
||||
id =
|
||||
"qstr:" + k + ":" + i + ":" + j + ":" + quotedStr.length() + ":" +
|
||||
quotedStr.regexpReplaceAll("[^a-zA-Z0-9]", "")
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
private predicate rankedQuotedStringReplacements(int i, string old, string new) {
|
||||
old = rank[i](string old2 | this.quotedStringReplacement(old2, _) | old2) and
|
||||
this.quotedStringReplacement(old, new)
|
||||
}
|
||||
|
||||
private predicate doReplaceQuotedStrings(int line, int round, string old, string new) {
|
||||
round = 0 and
|
||||
old = this.cmdSubstitutedLineProducer(line) and
|
||||
new = old
|
||||
or
|
||||
round > 0 and
|
||||
exists(string middle, string target, string replacement |
|
||||
this.doReplaceQuotedStrings(line, round - 1, old, middle) and
|
||||
this.rankedQuotedStringReplacements(round, target, replacement) and
|
||||
new = middle.replaceAll(target, replacement)
|
||||
)
|
||||
}
|
||||
|
||||
private string quotedStringLineProducer(int i) {
|
||||
result =
|
||||
max(int round, string new | this.doReplaceQuotedStrings(i, round, _, new) | new order by round)
|
||||
}
|
||||
|
||||
private string stmtProducer(int i) {
|
||||
result = this.quotedStringLineProducer(i).splitAt(Bash::splitSeparator()).trim() and
|
||||
// when splitting the line with a separator that is not present, the result is the original line which may contain other separators
|
||||
// we only one the split parts that do not contain any of the separators
|
||||
not result.indexOf(Bash::splitSeparator()) > -1
|
||||
}
|
||||
|
||||
private predicate doStmtRestoreQuotedStrings(int line, int round, string old, string new) {
|
||||
round = 0 and
|
||||
old = this.stmtProducer(line) and
|
||||
new = old
|
||||
or
|
||||
round > 0 and
|
||||
exists(string middle, string target, string replacement |
|
||||
this.doStmtRestoreQuotedStrings(line, round - 1, old, middle) and
|
||||
this.rankedQuotedStringReplacements(round, target, replacement) and
|
||||
new = middle.replaceAll(replacement, target)
|
||||
)
|
||||
}
|
||||
|
||||
private string restoredStmtQuotedStringLineProducer(int i) {
|
||||
result =
|
||||
max(int round, string new |
|
||||
this.doStmtRestoreQuotedStrings(i, round, _, new)
|
||||
|
|
||||
new order by round
|
||||
) and
|
||||
not result.indexOf("qstr:") > -1
|
||||
}
|
||||
|
||||
private predicate doStmtRestoreCmdSubstitutions(int line, int round, string old, string new) {
|
||||
round = 0 and
|
||||
old = this.restoredStmtQuotedStringLineProducer(line) and
|
||||
new = old
|
||||
or
|
||||
round > 0 and
|
||||
exists(string middle, string target, string replacement |
|
||||
this.doStmtRestoreCmdSubstitutions(line, round - 1, old, middle) and
|
||||
this.rankedCmdSubstitutionReplacements(round, target, replacement) and
|
||||
new = middle.replaceAll(replacement, target)
|
||||
)
|
||||
}
|
||||
|
||||
override string getStmt(int i) {
|
||||
result =
|
||||
max(int round, string new |
|
||||
this.doStmtRestoreCmdSubstitutions(i, round, _, new)
|
||||
|
|
||||
new order by round
|
||||
) and
|
||||
not result.indexOf("cmdsubs:") > -1
|
||||
}
|
||||
|
||||
override string getAStmt() { result = this.getStmt(_) }
|
||||
|
||||
private string cmdProducer(int i) {
|
||||
result = this.quotedStringLineProducer(i).splitAt(Bash::separator()).trim() and
|
||||
// when splitting the line with a separator that is not present, the result is the original line which may contain other separators
|
||||
// we only one the split parts that do not contain any of the separators
|
||||
not result.indexOf(Bash::separator()) > -1
|
||||
}
|
||||
|
||||
private predicate doCmdRestoreQuotedStrings(int line, int round, string old, string new) {
|
||||
round = 0 and
|
||||
old = this.cmdProducer(line) and
|
||||
new = old
|
||||
or
|
||||
round > 0 and
|
||||
exists(string middle, string target, string replacement |
|
||||
this.doCmdRestoreQuotedStrings(line, round - 1, old, middle) and
|
||||
this.rankedQuotedStringReplacements(round, target, replacement) and
|
||||
new = middle.replaceAll(replacement, target)
|
||||
)
|
||||
}
|
||||
|
||||
private string restoredCmdQuotedStringLineProducer(int i) {
|
||||
result =
|
||||
max(int round, string new |
|
||||
this.doCmdRestoreQuotedStrings(i, round, _, new)
|
||||
|
|
||||
new order by round
|
||||
) and
|
||||
not result.indexOf("qstr:") > -1
|
||||
}
|
||||
|
||||
private predicate doCmdRestoreCmdSubstitutions(int line, int round, string old, string new) {
|
||||
round = 0 and
|
||||
old = this.restoredCmdQuotedStringLineProducer(line) and
|
||||
new = old
|
||||
or
|
||||
round > 0 and
|
||||
exists(string middle, string target, string replacement |
|
||||
this.doCmdRestoreCmdSubstitutions(line, round - 1, old, middle) and
|
||||
this.rankedCmdSubstitutionReplacements(round, target, replacement) and
|
||||
new = middle.replaceAll(replacement, target)
|
||||
)
|
||||
}
|
||||
|
||||
string getCmd(int i) {
|
||||
result =
|
||||
max(int round, string new |
|
||||
this.doCmdRestoreCmdSubstitutions(i, round, _, new)
|
||||
|
|
||||
new order by round
|
||||
) and
|
||||
not result.indexOf("cmdsubs:") > -1
|
||||
}
|
||||
|
||||
string getACmd() { result = this.getCmd(_) }
|
||||
|
||||
override string getCommand(int i) {
|
||||
// remove redirection
|
||||
result =
|
||||
this.getCmd(i)
|
||||
.regexpReplaceAll("(>|>>|2>|2>>|<|<<<)\\s*[\\{\\}\\$\"'_\\-0-9a-zA-Z]+$", "")
|
||||
.trim() and
|
||||
// exclude variable declarations
|
||||
not result.regexpMatch("^[a-zA-Z0-9\\-_]+=") and
|
||||
// exclude comments
|
||||
not result.trim().indexOf("#") = 0 and
|
||||
// exclude the following keywords
|
||||
not result =
|
||||
[
|
||||
"", "for", "in", "do", "done", "if", "then", "else", "elif", "fi", "while", "until", "case",
|
||||
"esac", "{", "}"
|
||||
]
|
||||
}
|
||||
|
||||
override string getACommand() { result = this.getCommand(_) }
|
||||
|
||||
override string getFileReadCommand(int i) {
|
||||
result = this.getStmt(i) and
|
||||
result.matches(Bash::fileReadCommand() + "%")
|
||||
}
|
||||
|
||||
override string getAFileReadCommand() { result = this.getFileReadCommand(_) }
|
||||
|
||||
override predicate getAssignment(int i, string name, string data) {
|
||||
exists(string stmt |
|
||||
stmt = this.getStmt(i) and
|
||||
name = stmt.regexpCapture("^([a-zA-Z0-9\\-_]+)=.*", 1) and
|
||||
data = stmt.regexpCapture("^[a-zA-Z0-9\\-_]+=(.*)", 1)
|
||||
)
|
||||
}
|
||||
|
||||
override predicate getAnAssignment(string name, string data) { this.getAssignment(_, name, data) }
|
||||
|
||||
override predicate getAWriteToGitHubEnv(string name, string data) {
|
||||
exists(string raw |
|
||||
Bash::extractFileWrite(this, "GITHUB_ENV", raw) and
|
||||
Bash::extractVariableAndValue(raw, name, data)
|
||||
)
|
||||
}
|
||||
|
||||
override predicate getAWriteToGitHubOutput(string name, string data) {
|
||||
exists(string raw |
|
||||
Bash::extractFileWrite(this, "GITHUB_OUTPUT", raw) and
|
||||
Bash::extractVariableAndValue(raw, name, data)
|
||||
)
|
||||
}
|
||||
|
||||
override predicate getAWriteToGitHubPath(string data) {
|
||||
Bash::extractFileWrite(this, "GITHUB_PATH", data)
|
||||
}
|
||||
|
||||
override predicate getAnEnvReachingGitHubOutputWrite(string var, string output_field) {
|
||||
Bash::envReachingGitHubFileWrite(this, var, "GITHUB_OUTPUT", output_field)
|
||||
}
|
||||
|
||||
override predicate getACmdReachingGitHubOutputWrite(string cmd, string output_field) {
|
||||
Bash::cmdReachingGitHubFileWrite(this, cmd, "GITHUB_OUTPUT", output_field)
|
||||
}
|
||||
|
||||
override predicate getAnEnvReachingGitHubEnvWrite(string var, string output_field) {
|
||||
Bash::envReachingGitHubFileWrite(this, var, "GITHUB_ENV", output_field)
|
||||
}
|
||||
|
||||
override predicate getACmdReachingGitHubEnvWrite(string cmd, string output_field) {
|
||||
Bash::cmdReachingGitHubFileWrite(this, cmd, "GITHUB_ENV", output_field)
|
||||
}
|
||||
|
||||
override predicate getAnEnvReachingGitHubPathWrite(string var) {
|
||||
Bash::envReachingGitHubFileWrite(this, var, "GITHUB_PATH", _)
|
||||
}
|
||||
|
||||
override predicate getACmdReachingGitHubPathWrite(string cmd) {
|
||||
Bash::cmdReachingGitHubFileWrite(this, cmd, "GITHUB_PATH", _)
|
||||
}
|
||||
|
||||
override predicate getAnEnvReachingArgumentInjectionSink(
|
||||
string var, string command, string argument
|
||||
) {
|
||||
Bash::envReachingArgumentInjectionSink(this, var, command, argument)
|
||||
}
|
||||
|
||||
override predicate getACmdReachingArgumentInjectionSink(
|
||||
string cmd, string command, string argument
|
||||
) {
|
||||
Bash::cmdReachingArgumentInjectionSink(this, cmd, command, argument)
|
||||
}
|
||||
|
||||
override predicate fileToGitHubEnv(string path) {
|
||||
Bash::fileToFileWrite(this, "GITHUB_ENV", path)
|
||||
}
|
||||
|
||||
override predicate fileToGitHubOutput(string path) {
|
||||
Bash::fileToFileWrite(this, "GITHUB_OUTPUT", path)
|
||||
}
|
||||
|
||||
override predicate fileToGitHubPath(string path) {
|
||||
Bash::fileToFileWrite(this, "GITHUB_PATH", path)
|
||||
}
|
||||
}
|
||||
|
||||
module Bash {
|
||||
string stmtSeparator() { result = ";" }
|
||||
|
||||
string commandSeparator() { result = ["&&", "||"] }
|
||||
|
||||
string splitSeparator() {
|
||||
result = stmtSeparator() or
|
||||
result = commandSeparator()
|
||||
}
|
||||
|
||||
string redirectionSeparator() { result = [">", ">>", "2>", "2>>", ">&", "2>&", "<", "<<<"] }
|
||||
|
||||
string pipeSeparator() { result = "|" }
|
||||
|
||||
string separator() {
|
||||
result = stmtSeparator() or
|
||||
result = commandSeparator() or
|
||||
result = pipeSeparator()
|
||||
}
|
||||
|
||||
string fileReadCommand() { result = ["<", "cat", "jq", "yq", "tail", "head"] }
|
||||
|
||||
/** Checks if expr is a bash command substitution */
|
||||
bindingset[expr]
|
||||
predicate isCmdSubstitution(string expr, string cmd) {
|
||||
exists(string regexp |
|
||||
// $(cmd)
|
||||
regexp = "\\$\\(([^)]+)\\)" and
|
||||
cmd = expr.regexpCapture(regexp, 1)
|
||||
or
|
||||
// `cmd`
|
||||
regexp = "`([^`]+)`" and
|
||||
cmd = expr.regexpCapture(regexp, 1)
|
||||
)
|
||||
}
|
||||
|
||||
/** Checks if expr is a bash command substitution */
|
||||
bindingset[expr]
|
||||
predicate containsCmdSubstitution(string expr, string cmd) {
|
||||
exists(string regexp |
|
||||
// $(cmd)
|
||||
regexp = ".*\\$\\(([^)]+)\\).*" and
|
||||
cmd = expr.regexpCapture(regexp, 1).trim()
|
||||
or
|
||||
// `cmd`
|
||||
regexp = ".*`([^`]+)`.*" and
|
||||
cmd = expr.regexpCapture(regexp, 1).trim()
|
||||
)
|
||||
}
|
||||
|
||||
/** Checks if expr is a bash parameter expansion */
|
||||
bindingset[expr]
|
||||
predicate isParameterExpansion(string expr, string parameter, string operator, string params) {
|
||||
exists(string regexp |
|
||||
// $VAR
|
||||
regexp = "\\$([a-zA-Z_][a-zA-Z0-9_]+)\\b" and
|
||||
parameter = expr.regexpCapture(regexp, 1) and
|
||||
operator = "" and
|
||||
params = ""
|
||||
or
|
||||
// ${VAR}
|
||||
regexp = "\\$\\{([a-zA-Z_][a-zA-Z0-9_]*)\\}" and
|
||||
parameter = expr.regexpCapture(regexp, 1) and
|
||||
operator = "" and
|
||||
params = ""
|
||||
or
|
||||
// ${!VAR}
|
||||
regexp = "\\$\\{([!#])([a-zA-Z_][a-zA-Z0-9_]*)\\}" and
|
||||
parameter = expr.regexpCapture(regexp, 2) and
|
||||
operator = expr.regexpCapture(regexp, 1) and
|
||||
params = ""
|
||||
or
|
||||
// ${VAR<OP><PARAMS>}, ...
|
||||
regexp = "\\$\\{([a-zA-Z_][a-zA-Z0-9_]*)([#%/:^,\\-+]{1,2})?(.*?)\\}" and
|
||||
parameter = expr.regexpCapture(regexp, 1) and
|
||||
operator = expr.regexpCapture(regexp, 2) and
|
||||
params = expr.regexpCapture(regexp, 3)
|
||||
)
|
||||
}
|
||||
|
||||
bindingset[expr]
|
||||
predicate containsParameterExpansion(string expr, string parameter, string operator, string params) {
|
||||
exists(string regexp |
|
||||
// $VAR
|
||||
regexp = ".*\\$([a-zA-Z_][a-zA-Z0-9_]+)\\b.*" and
|
||||
parameter = expr.regexpCapture(regexp, 1) and
|
||||
operator = "" and
|
||||
params = ""
|
||||
or
|
||||
// ${VAR}
|
||||
regexp = ".*\\$\\{([a-zA-Z_][a-zA-Z0-9_]*)\\}.*" and
|
||||
parameter = expr.regexpCapture(regexp, 1) and
|
||||
operator = "" and
|
||||
params = ""
|
||||
or
|
||||
// ${!VAR}
|
||||
regexp = ".*\\$\\{([!#])([a-zA-Z_][a-zA-Z0-9_]*)\\}.*" and
|
||||
parameter = expr.regexpCapture(regexp, 2) and
|
||||
operator = expr.regexpCapture(regexp, 1) and
|
||||
params = ""
|
||||
or
|
||||
// ${VAR<OP><PARAMS>}, ...
|
||||
regexp = ".*\\$\\{([a-zA-Z_][a-zA-Z0-9_]*)([#%/:^,\\-+]{1,2})?(.*?)\\}.*" and
|
||||
parameter = expr.regexpCapture(regexp, 1) and
|
||||
operator = expr.regexpCapture(regexp, 2) and
|
||||
params = expr.regexpCapture(regexp, 3)
|
||||
)
|
||||
}
|
||||
|
||||
bindingset[raw_content]
|
||||
predicate extractVariableAndValue(string raw_content, string key, string value) {
|
||||
exists(string regexp, string content | content = trimQuotes(raw_content) |
|
||||
regexp = "(?msi).*^([a-zA-Z_][a-zA-Z0-9_]*)\\s*<<\\s*['\"]?(\\S+)['\"]?\\s*\n(.*?)\n\\2\\s*$" and
|
||||
key = trimQuotes(content.regexpCapture(regexp, 1)) and
|
||||
value = trimQuotes(content.regexpCapture(regexp, 3))
|
||||
or
|
||||
exists(string line |
|
||||
line = content.splitAt("\n") and
|
||||
regexp = "(?i)^([a-zA-Z_][a-zA-Z0-9_\\-]*)\\s*=\\s*(.*)$" and
|
||||
key = trimQuotes(line.regexpCapture(regexp, 1)) and
|
||||
value = trimQuotes(line.regexpCapture(regexp, 2))
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
bindingset[script]
|
||||
predicate singleLineFileWrite(
|
||||
string script, string cmd, string file, string content, string filters
|
||||
) {
|
||||
exists(string regexp |
|
||||
regexp = "(?i)(echo|printf)\\s*(.*?)\\s*(>>|>|\\s*\\|\\s*tee\\s*(-a|--append)?)\\s*(\\S+)" and
|
||||
cmd = script.regexpCapture(regexp, 1) and
|
||||
file = trimQuotes(script.regexpCapture(regexp, 5)) and
|
||||
filters = "" and
|
||||
content = script.regexpCapture(regexp, 2)
|
||||
)
|
||||
}
|
||||
|
||||
bindingset[script]
|
||||
predicate singleLineWorkflowCmd(string script, string cmd, string key, string value) {
|
||||
exists(string regexp |
|
||||
regexp = "(?i)(echo|printf)\\s*(['|\"])?::(set-[a-z]+)\\s*name\\s*=\\s*(.*?)::(.*)" and
|
||||
cmd = script.regexpCapture(regexp, 3) and
|
||||
key = script.regexpCapture(regexp, 4) and
|
||||
value = trimQuotes(script.regexpCapture(regexp, 5))
|
||||
or
|
||||
regexp = "(?i)(echo|printf)\\s*(['|\"])?::(add-[a-z]+)\\s*::(.*)" and
|
||||
cmd = script.regexpCapture(regexp, 3) and
|
||||
key = "" and
|
||||
value = trimQuotes(script.regexpCapture(regexp, 4))
|
||||
)
|
||||
}
|
||||
|
||||
bindingset[script]
|
||||
predicate heredocFileWrite(string script, string cmd, string file, string content, string filters) {
|
||||
exists(string regexp |
|
||||
regexp =
|
||||
"(?msi).*^(cat)\\s*(>>|>|\\s*\\|\\s*tee\\s*(-a|--append)?)\\s*(\\S+)\\s*<<\\s*['\"]?(\\S+)['\"]?\\s*\n(.*?)\n\\4\\s*$.*" and
|
||||
cmd = script.regexpCapture(regexp, 1) and
|
||||
file = trimQuotes(script.regexpCapture(regexp, 4)) and
|
||||
content = script.regexpCapture(regexp, 6) and
|
||||
filters = ""
|
||||
or
|
||||
regexp =
|
||||
"(?msi).*^(cat)\\s*(<<|<)\\s*[-]?['\"]?(\\S+)['\"]?\\s*([^>]*)(>>|>|\\s*\\|\\s*tee\\s*(-a|--append)?)\\s*(\\S+)\\s*\n(.*?)\n\\3\\s*$.*" and
|
||||
cmd = script.regexpCapture(regexp, 1) and
|
||||
file = trimQuotes(script.regexpCapture(regexp, 7)) and
|
||||
filters = script.regexpCapture(regexp, 4) and
|
||||
content = script.regexpCapture(regexp, 8)
|
||||
)
|
||||
}
|
||||
|
||||
bindingset[script]
|
||||
predicate linesFileWrite(string script, string cmd, string file, string content, string filters) {
|
||||
exists(string regexp, string var_name |
|
||||
regexp =
|
||||
"(?msi).*((echo|printf)\\s+['|\"]?(.*?<<(\\S+))['|\"]?\\s*>>\\s*(\\S+)\\s*[\r\n]+)" +
|
||||
"(((.*?)\\s*>>\\s*\\S+\\s*[\r\n]+)+)" +
|
||||
"((echo|printf)\\s+['|\"]?(EOF)['|\"]?\\s*>>\\s*\\S+\\s*[\r\n]*).*" and
|
||||
var_name = trimQuotes(script.regexpCapture(regexp, 3)).regexpReplaceAll("<<\\s*(\\S+)", "") and
|
||||
content =
|
||||
var_name + "=$(" +
|
||||
trimQuotes(script.regexpCapture(regexp, 6))
|
||||
.regexpReplaceAll(">>.*GITHUB_(ENV|OUTPUT)(})?", "")
|
||||
.trim() + ")" and
|
||||
cmd = "echo" and
|
||||
file = trimQuotes(script.regexpCapture(regexp, 5)) and
|
||||
filters = ""
|
||||
)
|
||||
}
|
||||
|
||||
bindingset[script]
|
||||
predicate blockFileWrite(string script, string cmd, string file, string content, string filters) {
|
||||
exists(string regexp, string first_line, string var_name |
|
||||
regexp =
|
||||
"(?msi).*^\\s*\\{\\s*[\r\n]" +
|
||||
//
|
||||
"(.*?)" +
|
||||
//
|
||||
"(\\s*\\}\\s*(>>|>|\\s*\\|\\s*tee\\s*(-a|--append)?)\\s*(\\S+))\\s*$.*" and
|
||||
first_line = script.regexpCapture(regexp, 1).splitAt("\n", 0).trim() and
|
||||
var_name = first_line.regexpCapture("echo\\s+('|\\\")?(.*)<<.*", 2) and
|
||||
content = var_name + "=$(" + script.regexpCapture(regexp, 1).splitAt("\n").trim() + ")" and
|
||||
not content.indexOf("EOF") > 0 and
|
||||
file = trimQuotes(script.regexpCapture(regexp, 5)) and
|
||||
cmd = "echo" and
|
||||
filters = ""
|
||||
)
|
||||
}
|
||||
|
||||
bindingset[script]
|
||||
predicate multiLineFileWrite(
|
||||
string script, string cmd, string file, string content, string filters
|
||||
) {
|
||||
heredocFileWrite(script, cmd, file, content, filters)
|
||||
or
|
||||
linesFileWrite(script, cmd, file, content, filters)
|
||||
or
|
||||
blockFileWrite(script, cmd, file, content, filters)
|
||||
}
|
||||
|
||||
bindingset[file_var]
|
||||
predicate extractFileWrite(BashShellScript script, string file_var, string content) {
|
||||
// single line assignment
|
||||
exists(string file_expr, string raw_content |
|
||||
isParameterExpansion(file_expr, file_var, _, _) and
|
||||
singleLineFileWrite(script.getAStmt(), _, file_expr, raw_content, _) and
|
||||
content = trimQuotes(raw_content)
|
||||
)
|
||||
or
|
||||
// workflow command assignment
|
||||
exists(string key, string value, string cmd |
|
||||
(
|
||||
file_var = "GITHUB_ENV" and
|
||||
cmd = "set-env" and
|
||||
content = key + "=" + value
|
||||
or
|
||||
file_var = "GITHUB_OUTPUT" and
|
||||
cmd = "set-output" and
|
||||
content = key + "=" + value
|
||||
or
|
||||
file_var = "GITHUB_PATH" and
|
||||
cmd = "add-path" and
|
||||
content = value
|
||||
) and
|
||||
singleLineWorkflowCmd(script.getAStmt(), cmd, key, value)
|
||||
)
|
||||
or
|
||||
// multiline assignment
|
||||
exists(string file_expr, string raw_content |
|
||||
multiLineFileWrite(script.getRawScript(), _, file_expr, raw_content, _) and
|
||||
isParameterExpansion(file_expr, file_var, _, _) and
|
||||
content = trimQuotes(raw_content)
|
||||
)
|
||||
}
|
||||
|
||||
/** Writes the content of the file specified by `path` into a file pointed to by `file_var` */
|
||||
predicate fileToFileWrite(BashShellScript script, string file_var, string path) {
|
||||
exists(string regexp, string stmt, string file_expr |
|
||||
regexp =
|
||||
"(?i)(cat)\\s*" + "((?:(?!<<|<<-)[^>\n])+)\\s*" +
|
||||
"(>>|>|\\s*\\|\\s*tee\\s*(-a|--append)?)\\s*" + "(\\S+)" and
|
||||
stmt = script.getAStmt() and
|
||||
file_expr = trimQuotes(stmt.regexpCapture(regexp, 5)) and
|
||||
path = stmt.regexpCapture(regexp, 2) and
|
||||
containsParameterExpansion(file_expr, file_var, _, _)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if the Run scripts contains an access to an environment variable called `var`
|
||||
* which value may get appended to the GITHUB_XXX special file
|
||||
*/
|
||||
predicate envReachingGitHubFileWrite(
|
||||
BashShellScript script, string var, string file_var, string field
|
||||
) {
|
||||
exists(string file_write_value |
|
||||
(
|
||||
file_var = "GITHUB_ENV" and
|
||||
script.getAWriteToGitHubEnv(field, file_write_value)
|
||||
or
|
||||
file_var = "GITHUB_OUTPUT" and
|
||||
script.getAWriteToGitHubOutput(field, file_write_value)
|
||||
or
|
||||
file_var = "GITHUB_PATH" and
|
||||
field = "PATH" and
|
||||
script.getAWriteToGitHubPath(file_write_value)
|
||||
) and
|
||||
envReachingRunExpr(script, var, file_write_value)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if and environment variable is used, directly or indirectly, in a Run's step expression.
|
||||
* Where the expression is a string captured from the Run's script.
|
||||
*/
|
||||
bindingset[expr]
|
||||
predicate envReachingRunExpr(BashShellScript script, string var, string expr) {
|
||||
exists(string var2, string value2 |
|
||||
// VAR2=${VAR:-default} (var2=value2)
|
||||
// echo "FIELD=${VAR2:-default}" >> $GITHUB_ENV (field, file_write_value)
|
||||
script.getAnAssignment(var2, value2) and
|
||||
containsParameterExpansion(value2, var, _, _) and
|
||||
containsParameterExpansion(expr, var2, _, _)
|
||||
)
|
||||
or
|
||||
// var reaches the file write directly
|
||||
// echo "FIELD=${VAR:-default}" >> $GITHUB_ENV (field, file_write_value)
|
||||
containsParameterExpansion(expr, var, _, _)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if the Run scripts contains a command substitution (`cmd`)
|
||||
* which output may get appended to the GITHUB_XXX special file
|
||||
*/
|
||||
predicate cmdReachingGitHubFileWrite(
|
||||
BashShellScript script, string cmd, string file_var, string field
|
||||
) {
|
||||
exists(string file_write_value |
|
||||
(
|
||||
file_var = "GITHUB_ENV" and
|
||||
script.getAWriteToGitHubEnv(field, file_write_value)
|
||||
or
|
||||
file_var = "GITHUB_OUTPUT" and
|
||||
script.getAWriteToGitHubOutput(field, file_write_value)
|
||||
or
|
||||
file_var = "GITHUB_PATH" and
|
||||
field = "PATH" and
|
||||
script.getAWriteToGitHubPath(file_write_value)
|
||||
) and
|
||||
cmdReachingRunExpr(script, cmd, file_write_value)
|
||||
)
|
||||
}
|
||||
|
||||
predicate envReachingArgumentInjectionSink(
|
||||
BashShellScript script, string source, string command, string argument
|
||||
) {
|
||||
exists(string cmd, string regex, int command_group, int argument_group |
|
||||
cmd = script.getACommand() and
|
||||
argumentInjectionSinksDataModel(regex, command_group, argument_group) and
|
||||
argument = cmd.regexpCapture(regex, argument_group).trim() and
|
||||
command = cmd.regexpCapture(regex, command_group).trim() and
|
||||
envReachingRunExpr(script, source, argument)
|
||||
)
|
||||
}
|
||||
|
||||
predicate cmdReachingArgumentInjectionSink(
|
||||
BashShellScript script, string source, string command, string argument
|
||||
) {
|
||||
exists(string cmd, string regex, int command_group, int argument_group |
|
||||
cmd = script.getACommand() and
|
||||
argumentInjectionSinksDataModel(regex, command_group, argument_group) and
|
||||
argument = cmd.regexpCapture(regex, argument_group).trim() and
|
||||
command = cmd.regexpCapture(regex, command_group).trim() and
|
||||
cmdReachingRunExpr(script, source, argument)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if a command output is used, directly or indirectly, in a Run's step expression.
|
||||
* Where the expression is a string captured from the Run's script.
|
||||
*/
|
||||
bindingset[expr]
|
||||
predicate cmdReachingRunExpr(BashShellScript script, string cmd, string expr) {
|
||||
// cmd output is assigned to a second variable (var2) and var2 reaches the file write
|
||||
exists(string var2, string value2 |
|
||||
// VAR2=$(cmd)
|
||||
// echo "FIELD=${VAR2:-default}" >> $GITHUB_ENV (field, file_write_value)
|
||||
script.getAnAssignment(var2, value2) and
|
||||
containsCmdSubstitution(value2, cmd) and
|
||||
containsParameterExpansion(expr, var2, _, _) and
|
||||
not varMatchesRegexTest(script, var2, alphaNumericRegex())
|
||||
)
|
||||
or
|
||||
// var reaches the file write directly
|
||||
// echo "FIELD=$(cmd)" >> $GITHUB_ENV (field, file_write_value)
|
||||
containsCmdSubstitution(expr, cmd)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if there test command that checks a variable against a regex
|
||||
* eg: `[[ $VAR =~ ^[a-zA-Z0-9_]+$ ]]`
|
||||
*/
|
||||
bindingset[var, regex]
|
||||
predicate varMatchesRegexTest(BashShellScript script, string var, string regex) {
|
||||
exists(string lhs, string rhs |
|
||||
lhs = script.getACommand().regexpCapture(".*\\[\\[\\s*(.*?)\\s*=~\\s*(.*?)\\s*\\]\\].*", 1) and
|
||||
containsParameterExpansion(lhs, var, _, _) and
|
||||
rhs = script.getACommand().regexpCapture(".*\\[\\[\\s*(.*?)\\s*=~\\s*(.*?)\\s*\\]\\].*", 2) and
|
||||
trimQuotes(rhs).regexpMatch(regex)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if the given regex is used to match an alphanumeric string
|
||||
* eg: `^[0-9a-zA-Z]{40}$`, `^[0-9]+$` or `^[a-zA-Z0-9_]+$`
|
||||
*/
|
||||
string alphaNumericRegex() { result = "^\\^\\[([09azAZ_-]+)\\](\\+|\\{\\d+\\})\\$$" }
|
||||
}
|
||||
6
actions/ql/lib/codeql/actions/Cfg.qll
Normal file
6
actions/ql/lib/codeql/actions/Cfg.qll
Normal file
@@ -0,0 +1,6 @@
|
||||
/** Provides classes representing the control flow graph. */
|
||||
|
||||
private import codeql.actions.controlflow.internal.Cfg as CfgInternal
|
||||
import CfgInternal::Completion
|
||||
import CfgInternal::CfgScope
|
||||
import CfgInternal::CfgImpl
|
||||
1
actions/ql/lib/codeql/actions/Consistency.ql
Normal file
1
actions/ql/lib/codeql/actions/Consistency.ql
Normal file
@@ -0,0 +1 @@
|
||||
import DataFlow::DataFlow::Consistency
|
||||
22
actions/ql/lib/codeql/actions/DataFlow.qll
Normal file
22
actions/ql/lib/codeql/actions/DataFlow.qll
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* Provides classes for performing local (intra-procedural) and
|
||||
* global (inter-procedural) data flow analyses.
|
||||
*/
|
||||
|
||||
import codeql.Locations
|
||||
|
||||
module DataFlow {
|
||||
private import codeql.dataflow.DataFlow
|
||||
private import codeql.actions.dataflow.internal.DataFlowImplSpecific
|
||||
import DataFlowMake<Location, ActionsDataFlow>
|
||||
import codeql.actions.dataflow.internal.DataFlowPublic
|
||||
// debug
|
||||
private import codeql.actions.dataflow.internal.TaintTrackingImplSpecific
|
||||
import codeql.dataflow.internal.DataFlowImplConsistency as DFIC
|
||||
|
||||
module ActionsConsistency implements DFIC::InputSig<Location, ActionsDataFlow> { }
|
||||
|
||||
module Consistency {
|
||||
import DFIC::MakeConsistency<Location, ActionsDataFlow, ActionsTaintTracking, ActionsConsistency>
|
||||
}
|
||||
}
|
||||
88
actions/ql/lib/codeql/actions/Helper.qll
Normal file
88
actions/ql/lib/codeql/actions/Helper.qll
Normal file
@@ -0,0 +1,88 @@
|
||||
private import codeql.actions.Ast
|
||||
private import codeql.Locations
|
||||
private import codeql.actions.security.ControlChecks
|
||||
import codeql.actions.config.Config
|
||||
import codeql.actions.Bash
|
||||
import codeql.actions.PowerShell
|
||||
|
||||
bindingset[expr]
|
||||
string normalizeExpr(string expr) {
|
||||
result =
|
||||
expr.regexpReplaceAll("\\['([a-zA-Z0-9_\\*\\-]+)'\\]", ".$1")
|
||||
.regexpReplaceAll("\\[\"([a-zA-Z0-9_\\*\\-]+)\"\\]", ".$1")
|
||||
.regexpReplaceAll("\\s*\\.\\s*", ".")
|
||||
}
|
||||
|
||||
bindingset[regex]
|
||||
string wrapRegexp(string regex) { result = "\\b" + regex + "\\b" }
|
||||
|
||||
bindingset[regex]
|
||||
string wrapJsonRegexp(string regex) {
|
||||
result = ["fromJSON\\(\\s*" + regex + "\\s*\\)", "toJSON\\(\\s*" + regex + "\\s*\\)"]
|
||||
}
|
||||
|
||||
bindingset[str]
|
||||
string trimQuotes(string str) {
|
||||
result = str.trim().regexpReplaceAll("^(\"|')", "").regexpReplaceAll("(\"|')$", "")
|
||||
}
|
||||
|
||||
predicate inPrivilegedContext(AstNode node, Event event) {
|
||||
node.getEnclosingJob().isPrivilegedExternallyTriggerable(event)
|
||||
}
|
||||
|
||||
predicate inNonPrivilegedContext(AstNode node) {
|
||||
not node.getEnclosingJob().isPrivilegedExternallyTriggerable(_)
|
||||
}
|
||||
|
||||
string defaultBranchNames() {
|
||||
repositoryDataModel(_, result)
|
||||
or
|
||||
not exists(string default_branch_name | repositoryDataModel(_, default_branch_name)) and
|
||||
result = ["main", "master"]
|
||||
}
|
||||
|
||||
string getRepoRoot() {
|
||||
exists(Workflow w |
|
||||
w.getLocation().getFile().getRelativePath().indexOf("/.github/workflows") > 0 and
|
||||
result =
|
||||
w.getLocation()
|
||||
.getFile()
|
||||
.getRelativePath()
|
||||
.prefix(w.getLocation().getFile().getRelativePath().indexOf("/.github/workflows") + 1) and
|
||||
// exclude workflow_enum reusable workflows directory root
|
||||
not result.indexOf(".github/workflows/external/") > -1 and
|
||||
not result.indexOf(".github/actions/external/") > -1
|
||||
or
|
||||
not w.getLocation().getFile().getRelativePath().indexOf("/.github/workflows") > 0 and
|
||||
not w.getLocation().getFile().getRelativePath().indexOf(".github/workflows/external/") > -1 and
|
||||
not w.getLocation().getFile().getRelativePath().indexOf(".github/actions/external/") > -1 and
|
||||
result = ""
|
||||
)
|
||||
}
|
||||
|
||||
bindingset[path]
|
||||
string normalizePath(string path) {
|
||||
exists(string trimmed_path | trimmed_path = trimQuotes(path) |
|
||||
// ./foo -> GITHUB_WORKSPACE/foo
|
||||
if path.indexOf("./") = 0
|
||||
then result = path.replaceAll("./", "GITHUB_WORKSPACE/")
|
||||
else
|
||||
// GITHUB_WORKSPACE/foo -> GITHUB_WORKSPACE/foo
|
||||
if path.indexOf("GITHUB_WORKSPACE/") = 0
|
||||
then result = path
|
||||
else
|
||||
// foo -> GITHUB_WORKSPACE/foo
|
||||
if path.regexpMatch("^[^/~].*")
|
||||
then result = "GITHUB_WORKSPACE/" + path.regexpReplaceAll("/$", "")
|
||||
else
|
||||
// ~/foo -> ~/foo
|
||||
// /foo -> /foo
|
||||
result = path
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if the path cache_path is a subpath of the path untrusted_path.
|
||||
*/
|
||||
bindingset[subpath, path]
|
||||
predicate isSubpath(string subpath, string path) { subpath.substring(0, path.length()) = path }
|
||||
62
actions/ql/lib/codeql/actions/PowerShell.qll
Normal file
62
actions/ql/lib/codeql/actions/PowerShell.qll
Normal file
@@ -0,0 +1,62 @@
|
||||
private import codeql.actions.Ast
|
||||
|
||||
class PowerShellScript extends ShellScript {
|
||||
PowerShellScript() {
|
||||
exists(Run run |
|
||||
this = run.getScript() and
|
||||
run.getShell().matches("pwsh%")
|
||||
)
|
||||
}
|
||||
|
||||
override string getStmt(int i) { none() }
|
||||
|
||||
override string getAStmt() { none() }
|
||||
|
||||
override string getCommand(int i) { none() }
|
||||
|
||||
override string getACommand() { none() }
|
||||
|
||||
override string getFileReadCommand(int i) { none() }
|
||||
|
||||
override string getAFileReadCommand() { none() }
|
||||
|
||||
override predicate getAssignment(int i, string name, string data) { none() }
|
||||
|
||||
override predicate getAnAssignment(string name, string data) { none() }
|
||||
|
||||
override predicate getAWriteToGitHubEnv(string name, string data) { none() }
|
||||
|
||||
override predicate getAWriteToGitHubOutput(string name, string data) { none() }
|
||||
|
||||
override predicate getAWriteToGitHubPath(string data) { none() }
|
||||
|
||||
override predicate getAnEnvReachingGitHubOutputWrite(string var, string output_field) { none() }
|
||||
|
||||
override predicate getACmdReachingGitHubOutputWrite(string cmd, string output_field) { none() }
|
||||
|
||||
override predicate getAnEnvReachingGitHubEnvWrite(string var, string output_field) { none() }
|
||||
|
||||
override predicate getACmdReachingGitHubEnvWrite(string cmd, string output_field) { none() }
|
||||
|
||||
override predicate getAnEnvReachingGitHubPathWrite(string var) { none() }
|
||||
|
||||
override predicate getACmdReachingGitHubPathWrite(string cmd) { none() }
|
||||
|
||||
override predicate getAnEnvReachingArgumentInjectionSink(
|
||||
string var, string command, string argument
|
||||
) {
|
||||
none()
|
||||
}
|
||||
|
||||
override predicate getACmdReachingArgumentInjectionSink(
|
||||
string cmd, string command, string argument
|
||||
) {
|
||||
none()
|
||||
}
|
||||
|
||||
override predicate fileToGitHubEnv(string path) { none() }
|
||||
|
||||
override predicate fileToGitHubOutput(string path) { none() }
|
||||
|
||||
override predicate fileToGitHubPath(string path) { none() }
|
||||
}
|
||||
13
actions/ql/lib/codeql/actions/TaintTracking.qll
Normal file
13
actions/ql/lib/codeql/actions/TaintTracking.qll
Normal file
@@ -0,0 +1,13 @@
|
||||
/**
|
||||
* Provides classes for performing local (intra-procedural) and
|
||||
* global (inter-procedural) taint-tracking analyses.
|
||||
*/
|
||||
|
||||
import codeql.Locations
|
||||
|
||||
module TaintTracking {
|
||||
private import codeql.actions.dataflow.internal.DataFlowImplSpecific
|
||||
private import codeql.actions.dataflow.internal.TaintTrackingImplSpecific
|
||||
private import codeql.dataflow.TaintTracking
|
||||
import TaintFlowMake<Location, ActionsDataFlow, ActionsTaintTracking>
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
private import actions
|
||||
|
||||
/**
|
||||
* Holds if workflow step uses the github/codeql-action/init action with no customizations.
|
||||
* e.g.
|
||||
* - name: Initialize
|
||||
* uses: github/codeql-action/init@v2
|
||||
* with:
|
||||
* languages: ruby, javascript
|
||||
*/
|
||||
class DefaultableCodeQLInitiatlizeActionQuery extends UsesStep {
|
||||
DefaultableCodeQLInitiatlizeActionQuery() {
|
||||
this.getCallee() = "github/codeql-action/init" and
|
||||
not customizedWorkflowStep(this)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if the with: part of the workflow step contains any arguments for with: other than "languages".
|
||||
* e.g.
|
||||
* - name: Initialize CodeQL
|
||||
* uses: github/codeql-action/init@v3
|
||||
* with:
|
||||
* languages: ${{ matrix.language }}
|
||||
* config-file: ./.github/codeql/${{ matrix.language }}/codeql-config.yml
|
||||
*/
|
||||
predicate customizedWorkflowStep(UsesStep codeQLInitStep) {
|
||||
exists(string arg |
|
||||
exists(codeQLInitStep.getArgument(arg)) and
|
||||
arg != "languages"
|
||||
)
|
||||
}
|
||||
1924
actions/ql/lib/codeql/actions/ast/internal/Ast.qll
Normal file
1924
actions/ql/lib/codeql/actions/ast/internal/Ast.qll
Normal file
File diff suppressed because it is too large
Load Diff
57
actions/ql/lib/codeql/actions/ast/internal/Yaml.qll
Normal file
57
actions/ql/lib/codeql/actions/ast/internal/Yaml.qll
Normal file
@@ -0,0 +1,57 @@
|
||||
/**
|
||||
* Provides classes for working with YAML data.
|
||||
*
|
||||
* YAML documents are represented as abstract syntax trees whose nodes
|
||||
* are either YAML values or alias nodes referring to another YAML value.
|
||||
*/
|
||||
|
||||
private import codeql.yaml.Yaml as LibYaml
|
||||
|
||||
private module YamlSig implements LibYaml::InputSig {
|
||||
import codeql.Locations
|
||||
|
||||
class LocatableBase extends @yaml_locatable {
|
||||
Location getLocation() {
|
||||
exists(@location_default loc, File f, string p, int sl, int sc, int el, int ec |
|
||||
f.getAbsolutePath() = p and
|
||||
locations_default(loc, f, sl, sc, el, ec) and
|
||||
yaml_locations(this, loc) and
|
||||
result = TBaseLocation(p, sl, sc, el, ec)
|
||||
)
|
||||
}
|
||||
|
||||
string toString() { none() }
|
||||
}
|
||||
|
||||
class NodeBase extends LocatableBase, @yaml_node {
|
||||
NodeBase getChildNode(int i) { yaml(result, _, this, i, _, _) }
|
||||
|
||||
string getTag() { yaml(this, _, _, _, result, _) }
|
||||
|
||||
string getAnchor() { yaml_anchors(this, result) }
|
||||
|
||||
override string toString() { yaml(this, _, _, _, _, result) }
|
||||
}
|
||||
|
||||
class ScalarNodeBase extends NodeBase, @yaml_scalar_node {
|
||||
int getStyle() { yaml_scalars(this, result, _) }
|
||||
|
||||
string getValue() { yaml_scalars(this, _, result) }
|
||||
}
|
||||
|
||||
class CollectionNodeBase extends NodeBase, @yaml_collection_node { }
|
||||
|
||||
class MappingNodeBase extends CollectionNodeBase, @yaml_mapping_node { }
|
||||
|
||||
class SequenceNodeBase extends CollectionNodeBase, @yaml_sequence_node { }
|
||||
|
||||
class AliasNodeBase extends NodeBase, @yaml_alias_node {
|
||||
string getTarget() { yaml_aliases(this, result) }
|
||||
}
|
||||
|
||||
class ParseErrorBase extends LocatableBase, @yaml_error {
|
||||
string getMessage() { yaml_errors(this, result) }
|
||||
}
|
||||
}
|
||||
|
||||
import LibYaml::Make<YamlSig>
|
||||
147
actions/ql/lib/codeql/actions/config/Config.qll
Normal file
147
actions/ql/lib/codeql/actions/config/Config.qll
Normal file
@@ -0,0 +1,147 @@
|
||||
import ConfigExtensions as Extensions
|
||||
|
||||
/**
|
||||
* MaD models for workflow details
|
||||
* Fields:
|
||||
* - path: Path to the workflow file
|
||||
* - trigger: Trigger for the workflow
|
||||
* - job: Job name
|
||||
* - secrets_source: Source of secrets
|
||||
* - permissions: Permissions for the workflow
|
||||
* - runner: Runner info for the workflow
|
||||
*/
|
||||
predicate workflowDataModel(
|
||||
string path, string trigger, string job, string secrets_source, string permissions, string runner
|
||||
) {
|
||||
Extensions::workflowDataModel(path, trigger, job, secrets_source, permissions, runner)
|
||||
}
|
||||
|
||||
/**
|
||||
* MaD models for repository details
|
||||
* Fields:
|
||||
* - visibility: Visibility of the repository
|
||||
* - default_branch_name: Default branch name
|
||||
*/
|
||||
predicate repositoryDataModel(string visibility, string default_branch_name) {
|
||||
Extensions::repositoryDataModel(visibility, default_branch_name)
|
||||
}
|
||||
|
||||
/**
|
||||
* MaD models for context/trigger mapping
|
||||
* Fields:
|
||||
* - trigger: Trigger for the workflow
|
||||
* - context_prefix: Prefix for the context
|
||||
*/
|
||||
predicate contextTriggerDataModel(string trigger, string context_prefix) {
|
||||
Extensions::contextTriggerDataModel(trigger, context_prefix)
|
||||
}
|
||||
|
||||
/**
|
||||
* MaD models for externally triggerable events
|
||||
* Fields:
|
||||
* - event: Event name
|
||||
*/
|
||||
predicate externallyTriggerableEventsDataModel(string event) {
|
||||
Extensions::externallyTriggerableEventsDataModel(event)
|
||||
}
|
||||
|
||||
private string commandLauncher() { result = ["", "sudo\\s+", "su\\s+", "xvfb-run\\s+"] }
|
||||
|
||||
/**
|
||||
* MaD models for poisonable commands
|
||||
* Fields:
|
||||
* - regexp: Regular expression for matching poisonable commands
|
||||
*/
|
||||
predicate poisonableCommandsDataModel(string regexp) {
|
||||
exists(string sub_regexp |
|
||||
Extensions::poisonableCommandsDataModel(sub_regexp) and
|
||||
regexp = commandLauncher() + sub_regexp + ".*"
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* MaD models for poisonable local scripts
|
||||
* Fields:
|
||||
* - regexp: Regular expression for matching poisonable local scripts
|
||||
* - group: Script capture group number for the regular expression
|
||||
*/
|
||||
predicate poisonableLocalScriptsDataModel(string regexp, int command_group) {
|
||||
exists(string sub_regexp |
|
||||
Extensions::poisonableLocalScriptsDataModel(sub_regexp, command_group) and
|
||||
regexp = commandLauncher() + sub_regexp + ".*"
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* MaD models for arguments to commands that execute the given argument.
|
||||
* Fields:
|
||||
* - regexp: Regular expression for matching argument injections.
|
||||
* - command_group: capture group for the command.
|
||||
* - argument_group: capture group for the argument.
|
||||
*/
|
||||
predicate argumentInjectionSinksDataModel(string regexp, int command_group, int argument_group) {
|
||||
exists(string sub_regexp |
|
||||
Extensions::argumentInjectionSinksDataModel(sub_regexp, command_group, argument_group) and
|
||||
regexp = commandLauncher() + sub_regexp
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* MaD models for poisonable actions
|
||||
* Fields:
|
||||
* - action: action name
|
||||
*/
|
||||
predicate poisonableActionsDataModel(string action) {
|
||||
Extensions::poisonableActionsDataModel(action)
|
||||
}
|
||||
|
||||
/**
|
||||
* MaD models for event properties that can be user-controlled.
|
||||
* Fields:
|
||||
* - property: event property
|
||||
* - kind: property kind
|
||||
*/
|
||||
predicate untrustedEventPropertiesDataModel(string property, string kind) {
|
||||
Extensions::untrustedEventPropertiesDataModel(property, kind)
|
||||
}
|
||||
|
||||
/**
|
||||
* MaD models for vulnerable actions
|
||||
* Fields:
|
||||
* - action: action name
|
||||
* - vulnerable_version: vulnerable version
|
||||
* - vulnerable_sha: vulnerable sha
|
||||
* - fixed_version: fixed version
|
||||
*/
|
||||
predicate vulnerableActionsDataModel(
|
||||
string action, string vulnerable_version, string vulnerable_sha, string fixed_version
|
||||
) {
|
||||
Extensions::vulnerableActionsDataModel(action, vulnerable_version, vulnerable_sha, fixed_version)
|
||||
}
|
||||
|
||||
/**
|
||||
* MaD models for immutable actions
|
||||
* Fields:
|
||||
* - action: action name
|
||||
*/
|
||||
predicate immutableActionsDataModel(string action) { Extensions::immutableActionsDataModel(action) }
|
||||
|
||||
/**
|
||||
* MaD models for untrusted git commands
|
||||
* Fields:
|
||||
* - cmd_regex: Regular expression for matching untrusted git commands
|
||||
* - flag: Flag for the command
|
||||
*/
|
||||
predicate untrustedGitCommandDataModel(string cmd_regex, string flag) {
|
||||
Extensions::untrustedGitCommandDataModel(cmd_regex, flag)
|
||||
}
|
||||
|
||||
/**
|
||||
* MaD models for untrusted gh commands
|
||||
* Fields:
|
||||
* - cmd_regex: Regular expression for matching untrusted gh commands
|
||||
* - flag: Flag for the command
|
||||
*/
|
||||
predicate untrustedGhCommandDataModel(string cmd_regex, string flag) {
|
||||
Extensions::untrustedGhCommandDataModel(cmd_regex, flag)
|
||||
}
|
||||
74
actions/ql/lib/codeql/actions/config/ConfigExtensions.qll
Normal file
74
actions/ql/lib/codeql/actions/config/ConfigExtensions.qll
Normal file
@@ -0,0 +1,74 @@
|
||||
/**
|
||||
* This module provides extensible predicates for defining MaD models.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Holds if workflow data model exists for the given parameters.
|
||||
*/
|
||||
extensible predicate workflowDataModel(
|
||||
string path, string trigger, string job, string secrets_source, string permissions, string runner
|
||||
);
|
||||
|
||||
/**
|
||||
* Holds if repository data model exists for the given parameters.
|
||||
*/
|
||||
extensible predicate repositoryDataModel(string visibility, string default_branch_name);
|
||||
|
||||
/**
|
||||
* Holds if a context expression starting with context_prefix is available for a given trigger.
|
||||
*/
|
||||
extensible predicate contextTriggerDataModel(string trigger, string context_prefix);
|
||||
|
||||
/**
|
||||
* Holds if a given trigger event can be fired by an external actor.
|
||||
*/
|
||||
extensible predicate externallyTriggerableEventsDataModel(string event);
|
||||
|
||||
/**
|
||||
* Holds for strings that match poisonable commands.
|
||||
*/
|
||||
extensible predicate poisonableCommandsDataModel(string regexp);
|
||||
|
||||
/**
|
||||
* Holds for strings that match poisonable local scripts.
|
||||
*/
|
||||
extensible predicate poisonableLocalScriptsDataModel(string regexp, int group);
|
||||
|
||||
/**
|
||||
* Holds for actions that can be poisoned through local files.
|
||||
*/
|
||||
extensible predicate poisonableActionsDataModel(string action);
|
||||
|
||||
/**
|
||||
* Holds for event properties that can be user-controlled.
|
||||
*/
|
||||
extensible predicate untrustedEventPropertiesDataModel(string property, string kind);
|
||||
|
||||
/**
|
||||
* Holds for arguments to commands that execute the given argument
|
||||
*/
|
||||
extensible predicate argumentInjectionSinksDataModel(
|
||||
string regexp, int command_group, int argument_group
|
||||
);
|
||||
|
||||
/**
|
||||
* Holds for actions that are known to be vulnerable.
|
||||
*/
|
||||
extensible predicate vulnerableActionsDataModel(
|
||||
string action, string vulnerable_version, string vulnerable_sha, string fixed_version
|
||||
);
|
||||
|
||||
/**
|
||||
* Holds for actions that are known to be immutable.
|
||||
*/
|
||||
extensible predicate immutableActionsDataModel(string action);
|
||||
|
||||
/**
|
||||
* Holds for git commands that may introduce untrusted data when called on an attacker controlled branch.
|
||||
*/
|
||||
extensible predicate untrustedGitCommandDataModel(string cmd_regex, string flag);
|
||||
|
||||
/**
|
||||
* Holds for gh commands that may introduce untrusted data
|
||||
*/
|
||||
extensible predicate untrustedGhCommandDataModel(string cmd_regex, string flag);
|
||||
444
actions/ql/lib/codeql/actions/controlflow/BasicBlocks.qll
Normal file
444
actions/ql/lib/codeql/actions/controlflow/BasicBlocks.qll
Normal file
@@ -0,0 +1,444 @@
|
||||
/** Provides classes representing basic blocks. */
|
||||
|
||||
private import codeql.actions.Cfg
|
||||
private import codeql.actions.Ast
|
||||
private import codeql.Locations
|
||||
|
||||
/**
|
||||
* A basic block, that is, a maximal straight-line sequence of control flow nodes
|
||||
* without branches or joins.
|
||||
*/
|
||||
class BasicBlock extends TBasicBlockStart {
|
||||
/** Gets the scope of this basic block. */
|
||||
final CfgScope getScope() { result = this.getFirstNode().getScope() }
|
||||
|
||||
/** Gets an immediate successor of this basic block, if any. */
|
||||
BasicBlock getASuccessor() { result = this.getASuccessor(_) }
|
||||
|
||||
/** Gets an immediate successor of this basic block of a given type, if any. */
|
||||
BasicBlock getASuccessor(SuccessorType t) {
|
||||
result.getFirstNode() = this.getLastNode().getASuccessor(t)
|
||||
}
|
||||
|
||||
/** Gets an immediate predecessor of this basic block, if any. */
|
||||
BasicBlock getAPredecessor() { result.getASuccessor() = this }
|
||||
|
||||
/** Gets an immediate predecessor of this basic block of a given type, if any. */
|
||||
BasicBlock getAPredecessor(SuccessorType t) { result.getASuccessor(t) = this }
|
||||
|
||||
/** Gets the control flow node at a specific (zero-indexed) position in this basic block. */
|
||||
Node getNode(int pos) { bbIndex(this.getFirstNode(), result, pos) }
|
||||
|
||||
/** Gets a control flow node in this basic block. */
|
||||
Node getANode() { result = this.getNode(_) }
|
||||
|
||||
/** Gets the first control flow node in this basic block. */
|
||||
Node getFirstNode() { this = TBasicBlockStart(result) }
|
||||
|
||||
/** Gets the last control flow node in this basic block. */
|
||||
Node getLastNode() { result = this.getNode(this.length() - 1) }
|
||||
|
||||
/** Gets the length of this basic block. */
|
||||
int length() { result = strictcount(this.getANode()) }
|
||||
|
||||
/**
|
||||
* Holds if this basic block immediately dominates basic block `bb`.
|
||||
*
|
||||
* That is, all paths reaching basic block `bb` from some entry point
|
||||
* basic block must go through this basic block (which is an immediate
|
||||
* predecessor of `bb`).
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```rb
|
||||
* def m b
|
||||
* if b
|
||||
* return 0
|
||||
* end
|
||||
* return 1
|
||||
* end
|
||||
* ```
|
||||
*
|
||||
* The basic block starting on line 2 immediately dominates the
|
||||
* basic block on line 5 (all paths from the entry point of `m`
|
||||
* to `return 1` must go through the `if` block).
|
||||
*/
|
||||
predicate immediatelyDominates(BasicBlock bb) { bbIDominates(this, bb) }
|
||||
|
||||
/**
|
||||
* Holds if this basic block strictly dominates basic block `bb`.
|
||||
*
|
||||
* That is, all paths reaching basic block `bb` from some entry point
|
||||
* basic block must go through this basic block (which must be different
|
||||
* from `bb`).
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```rb
|
||||
* def m b
|
||||
* if b
|
||||
* return 0
|
||||
* end
|
||||
* return 1
|
||||
* end
|
||||
* ```
|
||||
*
|
||||
* The basic block starting on line 2 strictly dominates the
|
||||
* basic block on line 5 (all paths from the entry point of `m`
|
||||
* to `return 1` must go through the `if` block).
|
||||
*/
|
||||
predicate strictlyDominates(BasicBlock bb) { bbIDominates+(this, bb) }
|
||||
|
||||
/**
|
||||
* Holds if this basic block dominates basic block `bb`.
|
||||
*
|
||||
* That is, all paths reaching basic block `bb` from some entry point
|
||||
* basic block must go through this basic block.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```rb
|
||||
* def m b
|
||||
* if b
|
||||
* return 0
|
||||
* end
|
||||
* return 1
|
||||
* end
|
||||
* ```
|
||||
*
|
||||
* The basic block starting on line 2 dominates the basic
|
||||
* basic block on line 5 (all paths from the entry point of `m`
|
||||
* to `return 1` must go through the `if` block).
|
||||
*/
|
||||
predicate dominates(BasicBlock bb) {
|
||||
bb = this or
|
||||
this.strictlyDominates(bb)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `df` is in the dominance frontier of this basic block.
|
||||
* That is, this basic block dominates a predecessor of `df`, but
|
||||
* does not dominate `df` itself.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```rb
|
||||
* def m x
|
||||
* if x < 0
|
||||
* x = -x
|
||||
* if x > 10
|
||||
* x = x - 1
|
||||
* end
|
||||
* end
|
||||
* puts x
|
||||
* end
|
||||
* ```
|
||||
*
|
||||
* The basic block on line 8 is in the dominance frontier
|
||||
* of the basic block starting on line 3 because that block
|
||||
* dominates the basic block on line 4, which is a predecessor of
|
||||
* `puts x`. Also, the basic block starting on line 3 does not
|
||||
* dominate the basic block on line 8.
|
||||
*/
|
||||
predicate inDominanceFrontier(BasicBlock df) {
|
||||
this.dominatesPredecessor(df) and
|
||||
not this.strictlyDominates(df)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if this basic block dominates a predecessor of `df`.
|
||||
*/
|
||||
private predicate dominatesPredecessor(BasicBlock df) { this.dominates(df.getAPredecessor()) }
|
||||
|
||||
/**
|
||||
* Gets the basic block that immediately dominates this basic block, if any.
|
||||
*
|
||||
* That is, all paths reaching this basic block from some entry point
|
||||
* basic block must go through the result, which is an immediate basic block
|
||||
* predecessor of this basic block.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```rb
|
||||
* def m b
|
||||
* if b
|
||||
* return 0
|
||||
* end
|
||||
* return 1
|
||||
* end
|
||||
* ```
|
||||
*
|
||||
* The basic block starting on line 2 is an immediate dominator of
|
||||
* the basic block on line 5 (all paths from the entry point of `m`
|
||||
* to `return 1` must go through the `if` block, and the `if` block
|
||||
* is an immediate predecessor of `return 1`).
|
||||
*/
|
||||
BasicBlock getImmediateDominator() { bbIDominates(result, this) }
|
||||
|
||||
/**
|
||||
* Holds if this basic block strictly post-dominates basic block `bb`.
|
||||
*
|
||||
* That is, all paths reaching a normal exit point basic block from basic
|
||||
* block `bb` must go through this basic block (which must be different
|
||||
* from `bb`).
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```rb
|
||||
* def m b
|
||||
* if b
|
||||
* puts "b"
|
||||
* end
|
||||
* puts "m"
|
||||
* end
|
||||
* ```
|
||||
*
|
||||
* The basic block on line 5 strictly post-dominates the basic block on
|
||||
* line 3 (all paths to the exit point of `m` from `puts "b"` must go
|
||||
* through `puts "m"`).
|
||||
*/
|
||||
predicate strictlyPostDominates(BasicBlock bb) { bbIPostDominates+(this, bb) }
|
||||
|
||||
/**
|
||||
* Holds if this basic block post-dominates basic block `bb`.
|
||||
*
|
||||
* That is, all paths reaching a normal exit point basic block from basic
|
||||
* block `bb` must go through this basic block.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```rb
|
||||
* def m b
|
||||
* if b
|
||||
* puts "b"
|
||||
* end
|
||||
* puts "m"
|
||||
* end
|
||||
* ```
|
||||
*
|
||||
* The basic block on line 5 post-dominates the basic block on line 3
|
||||
* (all paths to the exit point of `m` from `puts "b"` must go through
|
||||
* `puts "m"`).
|
||||
*/
|
||||
predicate postDominates(BasicBlock bb) {
|
||||
this.strictlyPostDominates(bb) or
|
||||
this = bb
|
||||
}
|
||||
|
||||
/** Holds if this basic block is in a loop in the control flow graph. */
|
||||
predicate inLoop() { this.getASuccessor+() = this }
|
||||
|
||||
/** Gets a textual representation of this basic block. */
|
||||
string toString() { result = this.getFirstNode().toString() }
|
||||
|
||||
/** Gets the location of this basic block. */
|
||||
Location getLocation() { result = this.getFirstNode().getLocation() }
|
||||
}
|
||||
|
||||
cached
|
||||
private module Cached {
|
||||
/** Internal representation of basic blocks. */
|
||||
cached
|
||||
newtype TBasicBlock = TBasicBlockStart(Node cfn) { startsBB(cfn) }
|
||||
|
||||
/** Holds if `cfn` starts a new basic block. */
|
||||
private predicate startsBB(Node cfn) {
|
||||
not exists(cfn.getAPredecessor()) and exists(cfn.getASuccessor())
|
||||
or
|
||||
cfn.isJoin()
|
||||
or
|
||||
cfn.getAPredecessor().isBranch()
|
||||
or
|
||||
/*
|
||||
* In cases such as
|
||||
*
|
||||
* ```rb
|
||||
* if x or y
|
||||
* foo
|
||||
* else
|
||||
* bar
|
||||
* ```
|
||||
*
|
||||
* we have a CFG that looks like
|
||||
*
|
||||
* x --false--> [false] x or y --false--> bar
|
||||
* \ |
|
||||
* --true--> y --false--
|
||||
* \
|
||||
* --true--> [true] x or y --true--> foo
|
||||
*
|
||||
* and we want to ensure that both `foo` and `bar` start a new basic block,
|
||||
* in order to get a `ConditionalBlock` out of the disjunction.
|
||||
*/
|
||||
|
||||
exists(cfn.getAPredecessor(any(BooleanSuccessor s)))
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `succ` is a control flow successor of `pred` within
|
||||
* the same basic block.
|
||||
*/
|
||||
private predicate intraBBSucc(Node pred, Node succ) {
|
||||
succ = pred.getASuccessor() and
|
||||
not startsBB(succ)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `cfn` is the `i`th node in basic block `bb`.
|
||||
*
|
||||
* In other words, `i` is the shortest distance from a node `bb`
|
||||
* that starts a basic block to `cfn` along the `intraBBSucc` relation.
|
||||
*/
|
||||
cached
|
||||
predicate bbIndex(Node bbStart, Node cfn, int i) =
|
||||
shortestDistances(startsBB/1, intraBBSucc/2)(bbStart, cfn, i)
|
||||
|
||||
/**
|
||||
* Holds if the first node of basic block `succ` is a control flow
|
||||
* successor of the last node of basic block `pred`.
|
||||
*/
|
||||
private predicate succBB(BasicBlock pred, BasicBlock succ) { succ = pred.getASuccessor() }
|
||||
|
||||
/** Holds if `dom` is an immediate dominator of `bb`. */
|
||||
cached
|
||||
predicate bbIDominates(BasicBlock dom, BasicBlock bb) =
|
||||
idominance(entryBB/1, succBB/2)(_, dom, bb)
|
||||
|
||||
/** Holds if `pred` is a basic block predecessor of `succ`. */
|
||||
private predicate predBB(BasicBlock succ, BasicBlock pred) { succBB(pred, succ) }
|
||||
|
||||
/** Holds if `bb` is an exit basic block that represents normal exit. */
|
||||
private predicate normalExitBB(BasicBlock bb) { bb.getANode().(AnnotatedExitNode).isNormal() }
|
||||
|
||||
/** Holds if `dom` is an immediate post-dominator of `bb`. */
|
||||
cached
|
||||
predicate bbIPostDominates(BasicBlock dom, BasicBlock bb) =
|
||||
idominance(normalExitBB/1, predBB/2)(_, dom, bb)
|
||||
|
||||
/**
|
||||
* Gets the `i`th predecessor of join block `jb`, with respect to some
|
||||
* arbitrary order.
|
||||
*/
|
||||
cached
|
||||
JoinBlockPredecessor getJoinBlockPredecessor(JoinBlock jb, int i) {
|
||||
none()
|
||||
/*
|
||||
* result =
|
||||
* rank[i + 1](JoinBlockPredecessor jbp |
|
||||
* jbp = jb.getAPredecessor()
|
||||
* |
|
||||
* jbp order by JoinBlockPredecessors::getId(jbp), JoinBlockPredecessors::getSplitString(jbp)
|
||||
* )
|
||||
*/
|
||||
|
||||
}
|
||||
|
||||
cached
|
||||
predicate immediatelyControls(ConditionBlock cb, BasicBlock succ, BooleanSuccessor s) {
|
||||
succ = cb.getASuccessor(s) and
|
||||
forall(BasicBlock pred | pred = succ.getAPredecessor() and pred != cb | succ.dominates(pred))
|
||||
}
|
||||
|
||||
cached
|
||||
predicate controls(ConditionBlock cb, BasicBlock controlled, BooleanSuccessor s) {
|
||||
exists(BasicBlock succ | cb.immediatelyControls(succ, s) | succ.dominates(controlled))
|
||||
}
|
||||
}
|
||||
|
||||
private import Cached
|
||||
|
||||
/** Holds if `bb` is an entry basic block. */
|
||||
private predicate entryBB(BasicBlock bb) { bb.getFirstNode() instanceof EntryNode }
|
||||
|
||||
/**
|
||||
* An entry basic block, that is, a basic block whose first node is
|
||||
* an entry node.
|
||||
*/
|
||||
class EntryBasicBlock extends BasicBlock {
|
||||
EntryBasicBlock() { entryBB(this) }
|
||||
}
|
||||
|
||||
/**
|
||||
* An annotated exit basic block, that is, a basic block whose last node is
|
||||
* an annotated exit node.
|
||||
*/
|
||||
class AnnotatedExitBasicBlock extends BasicBlock {
|
||||
private boolean normal;
|
||||
|
||||
AnnotatedExitBasicBlock() {
|
||||
exists(AnnotatedExitNode n |
|
||||
n = this.getANode() and
|
||||
if n.isNormal() then normal = true else normal = false
|
||||
)
|
||||
}
|
||||
|
||||
/** Holds if this block represent a normal exit. */
|
||||
final predicate isNormal() { normal = true }
|
||||
}
|
||||
|
||||
/**
|
||||
* An exit basic block, that is, a basic block whose last node is
|
||||
* an exit node.
|
||||
*/
|
||||
class ExitBasicBlock extends BasicBlock {
|
||||
ExitBasicBlock() { this.getLastNode() instanceof ExitNode }
|
||||
}
|
||||
|
||||
/*
|
||||
* private module JoinBlockPredecessors {
|
||||
* private predicate id(AstNode x, AstNode y) { x = y }
|
||||
*
|
||||
* private predicate idOf(AstNode x, int y) = equivalenceRelation(id/2)(x, y)
|
||||
*
|
||||
* int getId(JoinBlockPredecessor jbp) {
|
||||
* idOf(Ast::toTreeSitter(jbp.getFirstNode().(AstCfgNode).getAstNode()), result)
|
||||
* or
|
||||
* idOf(Ast::toTreeSitter(jbp.(EntryBasicBlock).getScope()), result)
|
||||
* }
|
||||
*
|
||||
* string getSplitString(JoinBlockPredecessor jbp) {
|
||||
* result = jbp.getFirstNode().(AstCfgNode).getSplitsString()
|
||||
* or
|
||||
* not exists(jbp.getFirstNode().(AstCfgNode).getSplitsString()) and
|
||||
* result = ""
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
|
||||
/** A basic block with more than one predecessor. */
|
||||
class JoinBlock extends BasicBlock {
|
||||
JoinBlock() { this.getFirstNode().isJoin() }
|
||||
|
||||
/**
|
||||
* Gets the `i`th predecessor of this join block, with respect to some
|
||||
* arbitrary order.
|
||||
*/
|
||||
JoinBlockPredecessor getJoinBlockPredecessor(int i) { result = getJoinBlockPredecessor(this, i) }
|
||||
}
|
||||
|
||||
/** A basic block that is an immediate predecessor of a join block. */
|
||||
class JoinBlockPredecessor extends BasicBlock {
|
||||
JoinBlockPredecessor() { this.getASuccessor() instanceof JoinBlock }
|
||||
}
|
||||
|
||||
/** A basic block that terminates in a condition, splitting the subsequent control flow. */
|
||||
class ConditionBlock extends BasicBlock {
|
||||
ConditionBlock() { this.getLastNode().isCondition() }
|
||||
|
||||
/**
|
||||
* Holds if basic block `succ` is immediately controlled by this basic
|
||||
* block with conditional value `s`. That is, `succ` is an immediate
|
||||
* successor of this block, and `succ` can only be reached from
|
||||
* the callable entry point by going via the `s` edge out of this basic block.
|
||||
*/
|
||||
predicate immediatelyControls(BasicBlock succ, BooleanSuccessor s) {
|
||||
immediatelyControls(this, succ, s)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if basic block `controlled` is controlled by this basic block with
|
||||
* conditional value `s`. That is, `controlled` can only be reached from
|
||||
* the callable entry point by going via the `s` edge out of this basic block.
|
||||
*/
|
||||
predicate controls(BasicBlock controlled, BooleanSuccessor s) { controls(this, controlled, s) }
|
||||
}
|
||||
316
actions/ql/lib/codeql/actions/controlflow/internal/Cfg.qll
Normal file
316
actions/ql/lib/codeql/actions/controlflow/internal/Cfg.qll
Normal file
@@ -0,0 +1,316 @@
|
||||
private import codeql.actions.Ast
|
||||
private import codeql.controlflow.Cfg as CfgShared
|
||||
private import codeql.Locations
|
||||
|
||||
module Completion {
|
||||
private newtype TCompletion =
|
||||
TSimpleCompletion() or
|
||||
TBooleanCompletion(boolean b) { b in [false, true] } or
|
||||
TReturnCompletion()
|
||||
|
||||
abstract class Completion extends TCompletion {
|
||||
abstract string toString();
|
||||
|
||||
predicate isValidForSpecific(AstNode e) { none() }
|
||||
|
||||
predicate isValidFor(AstNode e) { this.isValidForSpecific(e) }
|
||||
|
||||
abstract SuccessorType getAMatchingSuccessorType();
|
||||
}
|
||||
|
||||
abstract class NormalCompletion extends Completion { }
|
||||
|
||||
class SimpleCompletion extends NormalCompletion, TSimpleCompletion {
|
||||
override string toString() { result = "SimpleCompletion" }
|
||||
|
||||
override predicate isValidFor(AstNode e) { not any(Completion c).isValidForSpecific(e) }
|
||||
|
||||
override NormalSuccessor getAMatchingSuccessorType() { any() }
|
||||
}
|
||||
|
||||
class BooleanCompletion extends NormalCompletion, TBooleanCompletion {
|
||||
boolean value;
|
||||
|
||||
BooleanCompletion() { this = TBooleanCompletion(value) }
|
||||
|
||||
override string toString() { result = "BooleanCompletion(" + value + ")" }
|
||||
|
||||
override predicate isValidForSpecific(AstNode e) { none() }
|
||||
|
||||
override BooleanSuccessor getAMatchingSuccessorType() { result.getValue() = value }
|
||||
|
||||
final boolean getValue() { result = value }
|
||||
}
|
||||
|
||||
class ReturnCompletion extends Completion, TReturnCompletion {
|
||||
override string toString() { result = "ReturnCompletion" }
|
||||
|
||||
override predicate isValidForSpecific(AstNode e) { none() }
|
||||
|
||||
override ReturnSuccessor getAMatchingSuccessorType() { any() }
|
||||
}
|
||||
|
||||
cached
|
||||
private newtype TSuccessorType =
|
||||
TNormalSuccessor() or
|
||||
TBooleanSuccessor(boolean b) { b in [false, true] } or
|
||||
TReturnSuccessor()
|
||||
|
||||
class SuccessorType extends TSuccessorType {
|
||||
string toString() { none() }
|
||||
}
|
||||
|
||||
class NormalSuccessor extends SuccessorType, TNormalSuccessor {
|
||||
override string toString() { result = "successor" }
|
||||
}
|
||||
|
||||
class BooleanSuccessor extends SuccessorType, TBooleanSuccessor {
|
||||
boolean value;
|
||||
|
||||
BooleanSuccessor() { this = TBooleanSuccessor(value) }
|
||||
|
||||
override string toString() { result = value.toString() }
|
||||
|
||||
boolean getValue() { result = value }
|
||||
}
|
||||
|
||||
class ReturnSuccessor extends SuccessorType, TReturnSuccessor {
|
||||
override string toString() { result = "return" }
|
||||
}
|
||||
}
|
||||
|
||||
module CfgScope {
|
||||
abstract class CfgScope extends AstNode { }
|
||||
|
||||
class WorkflowScope extends CfgScope instanceof Workflow { }
|
||||
|
||||
class CompositeActionScope extends CfgScope instanceof CompositeAction { }
|
||||
}
|
||||
|
||||
private module Implementation implements CfgShared::InputSig<Location> {
|
||||
import codeql.actions.Ast
|
||||
import Completion
|
||||
import CfgScope
|
||||
|
||||
predicate completionIsNormal(Completion c) { not c instanceof ReturnCompletion }
|
||||
|
||||
// Not using CFG splitting, so the following are just dummy types.
|
||||
private newtype TUnit = Unit()
|
||||
|
||||
additional class SplitKindBase = TUnit;
|
||||
|
||||
additional class Split extends TUnit {
|
||||
abstract string toString();
|
||||
}
|
||||
|
||||
predicate completionIsSimple(Completion c) { c instanceof SimpleCompletion }
|
||||
|
||||
predicate completionIsValidFor(Completion c, AstNode e) { c.isValidFor(e) }
|
||||
|
||||
CfgScope getCfgScope(AstNode e) {
|
||||
exists(AstNode p | p = e.getParentNode() |
|
||||
result = p
|
||||
or
|
||||
not p instanceof CfgScope and result = getCfgScope(p)
|
||||
)
|
||||
}
|
||||
|
||||
additional int maxSplits() { result = 0 }
|
||||
|
||||
predicate scopeFirst(CfgScope scope, AstNode e) {
|
||||
first(scope.(Workflow), e) or
|
||||
first(scope.(CompositeAction), e)
|
||||
}
|
||||
|
||||
predicate scopeLast(CfgScope scope, AstNode e, Completion c) {
|
||||
last(scope.(Workflow), e, c) or
|
||||
last(scope.(CompositeAction), e, c)
|
||||
}
|
||||
|
||||
predicate successorTypeIsSimple(SuccessorType t) { t instanceof NormalSuccessor }
|
||||
|
||||
predicate successorTypeIsCondition(SuccessorType t) { t instanceof BooleanSuccessor }
|
||||
|
||||
SuccessorType getAMatchingSuccessorType(Completion c) { result = c.getAMatchingSuccessorType() }
|
||||
|
||||
predicate isAbnormalExitType(SuccessorType t) { none() }
|
||||
}
|
||||
|
||||
module CfgImpl = CfgShared::Make<Location, Implementation>;
|
||||
|
||||
private import CfgImpl
|
||||
private import Completion
|
||||
private import CfgScope
|
||||
|
||||
private class CompositeActionTree extends StandardPreOrderTree instanceof CompositeAction {
|
||||
override ControlFlowTree getChildNode(int i) {
|
||||
result =
|
||||
rank[i](AstNode child, Location l |
|
||||
(
|
||||
child = this.(CompositeAction).getAnInput() or
|
||||
child = this.(CompositeAction).getOutputs() or
|
||||
child = this.(CompositeAction).getRuns()
|
||||
) and
|
||||
l = child.getLocation()
|
||||
|
|
||||
child
|
||||
order by
|
||||
l.getStartLine(), l.getStartColumn(), l.getEndColumn(), l.getEndLine(), child.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private class RunsTree extends StandardPreOrderTree instanceof Runs {
|
||||
override ControlFlowTree getChildNode(int i) { result = super.getStep(i) }
|
||||
}
|
||||
|
||||
private class WorkflowTree extends StandardPreOrderTree instanceof Workflow {
|
||||
override ControlFlowTree getChildNode(int i) {
|
||||
if this instanceof ReusableWorkflow
|
||||
then
|
||||
result =
|
||||
rank[i](AstNode child, Location l |
|
||||
(
|
||||
child = this.(ReusableWorkflow).getAnInput() or
|
||||
child = this.(ReusableWorkflow).getOutputs() or
|
||||
child = this.(ReusableWorkflow).getStrategy() or
|
||||
child = this.(ReusableWorkflow).getAJob()
|
||||
) and
|
||||
l = child.getLocation()
|
||||
|
|
||||
child
|
||||
order by
|
||||
l.getStartLine(), l.getStartColumn(), l.getEndColumn(), l.getEndLine(), child.toString()
|
||||
)
|
||||
else
|
||||
result =
|
||||
rank[i](AstNode child, Location l |
|
||||
(
|
||||
child = super.getStrategy() or
|
||||
child = super.getAJob()
|
||||
) and
|
||||
l = child.getLocation()
|
||||
|
|
||||
child
|
||||
order by
|
||||
l.getStartLine(), l.getStartColumn(), l.getEndColumn(), l.getEndLine(), child.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private class OutputsTree extends StandardPreOrderTree instanceof Outputs {
|
||||
override ControlFlowTree getChildNode(int i) {
|
||||
result =
|
||||
rank[i](AstNode child, Location l |
|
||||
child = super.getAnOutputExpr() and l = child.getLocation()
|
||||
|
|
||||
child
|
||||
order by
|
||||
l.getStartLine(), l.getStartColumn(), l.getEndColumn(), l.getEndLine(), child.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private class StrategyTree extends StandardPreOrderTree instanceof Strategy {
|
||||
override ControlFlowTree getChildNode(int i) {
|
||||
result =
|
||||
rank[i](AstNode child, Location l |
|
||||
child = super.getAMatrixVarExpr() and l = child.getLocation()
|
||||
|
|
||||
child
|
||||
order by
|
||||
l.getStartLine(), l.getStartColumn(), l.getEndColumn(), l.getEndLine(), child.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private class JobTree extends StandardPreOrderTree instanceof LocalJob {
|
||||
override ControlFlowTree getChildNode(int i) {
|
||||
result =
|
||||
rank[i](AstNode child, Location l |
|
||||
(
|
||||
child = super.getAStep() or
|
||||
child = super.getOutputs() or
|
||||
child = super.getStrategy()
|
||||
) and
|
||||
l = child.getLocation()
|
||||
|
|
||||
child
|
||||
order by
|
||||
l.getStartLine(), l.getStartColumn(), l.getEndColumn(), l.getEndLine(), child.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private class ExternalJobTree extends StandardPreOrderTree instanceof ExternalJob {
|
||||
override ControlFlowTree getChildNode(int i) {
|
||||
result =
|
||||
rank[i](AstNode child, Location l |
|
||||
(
|
||||
child = super.getArgumentExpr(_) or
|
||||
child = super.getInScopeEnvVarExpr(_) or
|
||||
child = super.getOutputs() or
|
||||
child = super.getStrategy()
|
||||
) and
|
||||
l = child.getLocation()
|
||||
|
|
||||
child
|
||||
order by
|
||||
l.getStartLine(), l.getStartColumn(), l.getEndColumn(), l.getEndLine(), child.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private class UsesTree extends StandardPreOrderTree instanceof UsesStep {
|
||||
override ControlFlowTree getChildNode(int i) {
|
||||
result =
|
||||
rank[i](AstNode child, Location l |
|
||||
(child = super.getArgumentExpr(_) or child = super.getInScopeEnvVarExpr(_)) and
|
||||
l = child.getLocation()
|
||||
|
|
||||
child
|
||||
order by
|
||||
l.getStartLine(), l.getStartColumn(), l.getEndColumn(), l.getEndLine(), child.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private class RunTree extends StandardPreOrderTree instanceof Run {
|
||||
override ControlFlowTree getChildNode(int i) {
|
||||
result =
|
||||
rank[i](AstNode child, Location l |
|
||||
(
|
||||
child = super.getInScopeEnvVarExpr(_) or
|
||||
child = super.getAnScriptExpr() or
|
||||
child = super.getScript()
|
||||
) and
|
||||
l = child.getLocation()
|
||||
|
|
||||
child
|
||||
order by
|
||||
l.getStartLine(), l.getStartColumn(), l.getEndColumn(), l.getEndLine(), child.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private class ScalarValueTree extends StandardPreOrderTree instanceof ScalarValue {
|
||||
override ControlFlowTree getChildNode(int i) {
|
||||
result =
|
||||
rank[i](Expression child, Location l |
|
||||
child = super.getAChildNode() and
|
||||
l = child.getLocation()
|
||||
|
|
||||
child
|
||||
order by
|
||||
l.getStartLine(), l.getStartColumn(), l.getEndColumn(), l.getEndLine(), child.toString()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private class UsesLeaf extends LeafTree instanceof Uses { }
|
||||
|
||||
private class InputTree extends LeafTree instanceof Input { }
|
||||
|
||||
private class ScalarValueLeaf extends LeafTree instanceof ScalarValue { }
|
||||
|
||||
private class ExpressionLeaf extends LeafTree instanceof Expression { }
|
||||
131
actions/ql/lib/codeql/actions/dataflow/ExternalFlow.qll
Normal file
131
actions/ql/lib/codeql/actions/dataflow/ExternalFlow.qll
Normal file
@@ -0,0 +1,131 @@
|
||||
private import actions
|
||||
private import internal.ExternalFlowExtensions as Extensions
|
||||
private import codeql.actions.DataFlow
|
||||
private import codeql.actions.security.ArtifactPoisoningQuery
|
||||
|
||||
/**
|
||||
* MaD sources
|
||||
* Fields:
|
||||
* - action: Fully-qualified action name (NWO)
|
||||
* - version: Either '*' or a specific SHA/Tag
|
||||
* - output arg: To node (prefixed with either `env.` or `output.`)
|
||||
* - provenance: verification of the model
|
||||
*/
|
||||
predicate actionsSourceModel(
|
||||
string action, string version, string output, string kind, string provenance
|
||||
) {
|
||||
Extensions::actionsSourceModel(action, version, output, kind, provenance)
|
||||
}
|
||||
|
||||
/**
|
||||
* MaD summaries
|
||||
* Fields:
|
||||
* - action: Fully-qualified action name (NWO)
|
||||
* - version: Either '*' or a specific SHA/Tag
|
||||
* - input arg: From node (prefixed with either `env.` or `input.`)
|
||||
* - output arg: To node (prefixed with either `env.` or `output.`)
|
||||
* - kind: Either 'Taint' or 'Value'
|
||||
* - provenance: verification of the model
|
||||
*/
|
||||
predicate actionsSummaryModel(
|
||||
string action, string version, string input, string output, string kind, string provenance
|
||||
) {
|
||||
Extensions::actionsSummaryModel(action, version, input, output, kind, provenance)
|
||||
}
|
||||
|
||||
/**
|
||||
* MaD sinks
|
||||
* Fields:
|
||||
* - action: Fully-qualified action name (NWO)
|
||||
* - version: Either '*' or a specific SHA/Tag
|
||||
* - input: sink node (prefixed with either `env.` or `input.`)
|
||||
* - kind: sink kind
|
||||
* - provenance: verification of the model
|
||||
*/
|
||||
predicate actionsSinkModel(
|
||||
string action, string version, string input, string kind, string provenance
|
||||
) {
|
||||
Extensions::actionsSinkModel(action, version, input, kind, provenance)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if source.fieldName is a MaD-defined source of a given taint kind.
|
||||
*/
|
||||
predicate madSource(DataFlow::Node source, string kind, string fieldName) {
|
||||
exists(Uses uses, string action, string version |
|
||||
actionsSourceModel(action, version, fieldName, kind, _) and
|
||||
uses.getCallee() = action.toLowerCase() and
|
||||
(
|
||||
if version.trim() = "*"
|
||||
then uses.getVersion() = any(string v)
|
||||
else uses.getVersion() = version.trim()
|
||||
) and
|
||||
(
|
||||
if fieldName.trim().matches("env.%")
|
||||
then source.asExpr() = uses.getInScopeEnvVarExpr(fieldName.trim().replaceAll("env.", ""))
|
||||
else
|
||||
if fieldName.trim().matches("output.%")
|
||||
then source.asExpr() = uses
|
||||
else none()
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if the data flow from `pred` to `succ` is a MaD store step.
|
||||
*/
|
||||
predicate madStoreStep(DataFlow::Node pred, DataFlow::Node succ, DataFlow::ContentSet c) {
|
||||
exists(Uses uses, string action, string version, string input, string output |
|
||||
actionsSummaryModel(action, version, input, output, "taint", _) and
|
||||
c = any(DataFlow::FieldContent ct | ct.getName() = output.replaceAll("output.", "")) and
|
||||
uses.getCallee() = action.toLowerCase() and
|
||||
// version check
|
||||
(
|
||||
if version.trim() = "*"
|
||||
then uses.getVersion() = any(string v)
|
||||
else uses.getVersion() = version.trim()
|
||||
) and
|
||||
// pred provenance
|
||||
(
|
||||
input.trim().matches("env.%") and
|
||||
pred.asExpr() = uses.getInScopeEnvVarExpr(input.trim().replaceAll("env.", ""))
|
||||
or
|
||||
input.trim().matches("input.%") and
|
||||
pred.asExpr() = uses.getArgumentExpr(input.trim().replaceAll("input.", ""))
|
||||
or
|
||||
input.trim() = "artifact" and
|
||||
exists(UntrustedArtifactDownloadStep download |
|
||||
pred.asExpr() = download and
|
||||
download.getAFollowingStep() = uses
|
||||
)
|
||||
) and
|
||||
succ.asExpr() = uses
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if sink is a MaD-defined sink for a given taint kind.
|
||||
*/
|
||||
predicate madSink(DataFlow::Node sink, string kind) {
|
||||
exists(Uses uses, string action, string version, string input |
|
||||
actionsSinkModel(action, version, input, kind, _) and
|
||||
uses.getCallee() = action.toLowerCase() and
|
||||
// version check
|
||||
(
|
||||
if version.trim() = "*"
|
||||
then uses.getVersion() = any(string v)
|
||||
else uses.getVersion() = version.trim()
|
||||
) and
|
||||
// pred provenance
|
||||
(
|
||||
input.trim().matches("env.%") and
|
||||
sink.asExpr() = uses.getInScopeEnvVarExpr(input.trim().replaceAll("env.", ""))
|
||||
or
|
||||
input.trim().matches("input.%") and
|
||||
sink.asExpr() = uses.getArgumentExpr(input.trim().replaceAll("input.", ""))
|
||||
or
|
||||
input.trim() = "artifact" and
|
||||
sink.asExpr() = uses
|
||||
)
|
||||
)
|
||||
}
|
||||
366
actions/ql/lib/codeql/actions/dataflow/FlowSources.qll
Normal file
366
actions/ql/lib/codeql/actions/dataflow/FlowSources.qll
Normal file
@@ -0,0 +1,366 @@
|
||||
private import codeql.actions.security.ArtifactPoisoningQuery
|
||||
private import codeql.actions.security.UntrustedCheckoutQuery
|
||||
private import codeql.actions.config.Config
|
||||
private import codeql.actions.dataflow.ExternalFlow
|
||||
|
||||
/**
|
||||
* A data flow source.
|
||||
*/
|
||||
abstract class SourceNode extends DataFlow::Node {
|
||||
/**
|
||||
* Gets a string that represents the source kind with respect to threat modeling.
|
||||
*/
|
||||
abstract string getThreatModel();
|
||||
}
|
||||
|
||||
/** A data flow source of remote user input. */
|
||||
abstract class RemoteFlowSource extends SourceNode {
|
||||
/** Gets a string that describes the type of this remote flow source. */
|
||||
abstract string getSourceType();
|
||||
|
||||
/** Gets the event that triggered the source. */
|
||||
abstract string getEventName();
|
||||
|
||||
override string getThreatModel() { result = "remote" }
|
||||
}
|
||||
|
||||
/**
|
||||
* A data flow source of user input from github context.
|
||||
* eg: github.head_ref
|
||||
*/
|
||||
class GitHubCtxSource extends RemoteFlowSource {
|
||||
string flag;
|
||||
string event;
|
||||
GitHubExpression e;
|
||||
|
||||
GitHubCtxSource() {
|
||||
this.asExpr() = e and
|
||||
// github.head_ref
|
||||
e.getFieldName() = "head_ref" and
|
||||
flag = "branch" and
|
||||
(
|
||||
event = e.getATriggerEvent().getName() and
|
||||
event = "pull_request_target"
|
||||
or
|
||||
not exists(e.getATriggerEvent()) and
|
||||
event = "unknown"
|
||||
)
|
||||
}
|
||||
|
||||
override string getSourceType() { result = flag }
|
||||
|
||||
override string getEventName() { result = event }
|
||||
}
|
||||
|
||||
class GitHubEventCtxSource extends RemoteFlowSource {
|
||||
string flag;
|
||||
string context;
|
||||
string event;
|
||||
|
||||
GitHubEventCtxSource() {
|
||||
exists(Expression e, string regexp |
|
||||
this.asExpr() = e and
|
||||
context = e.getExpression() and
|
||||
(
|
||||
// the context is available for the job trigger events
|
||||
event = e.getATriggerEvent().getName() and
|
||||
exists(string context_prefix |
|
||||
contextTriggerDataModel(event, context_prefix) and
|
||||
normalizeExpr(context).matches("%" + context_prefix + "%")
|
||||
)
|
||||
or
|
||||
not exists(e.getATriggerEvent()) and
|
||||
event = "unknown"
|
||||
) and
|
||||
untrustedEventPropertiesDataModel(regexp, flag) and
|
||||
not flag = "json" and
|
||||
normalizeExpr(context).regexpMatch("(?i)\\s*" + wrapRegexp(regexp) + ".*")
|
||||
)
|
||||
}
|
||||
|
||||
override string getSourceType() { result = flag }
|
||||
|
||||
string getContext() { result = context }
|
||||
|
||||
override string getEventName() { result = event }
|
||||
}
|
||||
|
||||
abstract class CommandSource extends RemoteFlowSource {
|
||||
abstract string getCommand();
|
||||
|
||||
abstract Run getEnclosingRun();
|
||||
|
||||
override string getEventName() { result = this.getEnclosingRun().getATriggerEvent().getName() }
|
||||
}
|
||||
|
||||
class GitCommandSource extends RemoteFlowSource, CommandSource {
|
||||
Run run;
|
||||
string cmd;
|
||||
string flag;
|
||||
|
||||
GitCommandSource() {
|
||||
exists(Step checkout, string cmd_regex |
|
||||
checkout instanceof SimplePRHeadCheckoutStep and
|
||||
this.asExpr() = run.getScript() and
|
||||
checkout.getAFollowingStep() = run and
|
||||
run.getScript().getAStmt() = cmd and
|
||||
cmd.indexOf("git") = 0 and
|
||||
untrustedGitCommandDataModel(cmd_regex, flag) and
|
||||
cmd.regexpMatch(cmd_regex + ".*")
|
||||
)
|
||||
}
|
||||
|
||||
override string getSourceType() { result = flag }
|
||||
|
||||
override string getCommand() { result = cmd }
|
||||
|
||||
override Run getEnclosingRun() { result = run }
|
||||
}
|
||||
|
||||
class GhCLICommandSource extends RemoteFlowSource, CommandSource {
|
||||
Run run;
|
||||
string cmd;
|
||||
string flag;
|
||||
|
||||
GhCLICommandSource() {
|
||||
exists(string cmd_regex |
|
||||
this.asExpr() = run.getScript() and
|
||||
run.getScript().getAStmt() = cmd and
|
||||
cmd.indexOf("gh ") = 0 and
|
||||
untrustedGhCommandDataModel(cmd_regex, flag) and
|
||||
cmd.regexpMatch(cmd_regex + ".*") and
|
||||
(
|
||||
cmd.regexpMatch(".*\\b(pr|pulls)\\b.*") and
|
||||
run.getATriggerEvent().getName() = checkoutTriggers()
|
||||
or
|
||||
not cmd.regexpMatch(".*\\b(pr|pulls)\\b.*")
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
override string getSourceType() { result = flag }
|
||||
|
||||
override Run getEnclosingRun() { result = run }
|
||||
|
||||
override string getCommand() { result = cmd }
|
||||
}
|
||||
|
||||
class GitHubEventPathSource extends RemoteFlowSource, CommandSource {
|
||||
string cmd;
|
||||
string flag;
|
||||
string access_path;
|
||||
Run run;
|
||||
|
||||
// Examples
|
||||
// COMMENT_AUTHOR=$(jq -r .comment.user.login "$GITHUB_EVENT_PATH")
|
||||
// CURRENT_COMMENT=$(jq -r .comment.body "$GITHUB_EVENT_PATH")
|
||||
// PR_HEAD=$(jq --raw-output .pull_request.head.ref ${GITHUB_EVENT_PATH})
|
||||
// PR_NUMBER=$(jq --raw-output .pull_request.number ${GITHUB_EVENT_PATH})
|
||||
// PR_TITLE=$(jq --raw-output .pull_request.title ${GITHUB_EVENT_PATH})
|
||||
// BODY=$(jq -r '.issue.body' "$GITHUB_EVENT_PATH" | sed -n '3p')
|
||||
GitHubEventPathSource() {
|
||||
this.asExpr() = run.getScript() and
|
||||
run.getScript().getACommand() = cmd and
|
||||
cmd.matches("jq%") and
|
||||
cmd.matches("%GITHUB_EVENT_PATH%") and
|
||||
exists(string regexp |
|
||||
untrustedEventPropertiesDataModel(regexp, flag) and
|
||||
not flag = "json" and
|
||||
access_path = "github.event" + cmd.regexpCapture(".*\\s+([^\\s]+)\\s+.*", 1) and
|
||||
normalizeExpr(access_path).regexpMatch("(?i)\\s*" + wrapRegexp(regexp) + ".*")
|
||||
)
|
||||
}
|
||||
|
||||
override string getSourceType() { result = flag }
|
||||
|
||||
override string getCommand() { result = cmd }
|
||||
|
||||
override Run getEnclosingRun() { result = run }
|
||||
}
|
||||
|
||||
class GitHubEventJsonSource extends RemoteFlowSource {
|
||||
string flag;
|
||||
string event;
|
||||
|
||||
GitHubEventJsonSource() {
|
||||
exists(Expression e, string context, string regexp |
|
||||
this.asExpr() = e and
|
||||
context = e.getExpression() and
|
||||
untrustedEventPropertiesDataModel(regexp, _) and
|
||||
(
|
||||
// only contexts for the triggering events are considered tainted.
|
||||
// eg: for `pull_request`, we only consider `github.event.pull_request`
|
||||
event = e.getEnclosingWorkflow().getATriggerEvent().getName() and
|
||||
exists(string context_prefix |
|
||||
contextTriggerDataModel(event, context_prefix) and
|
||||
normalizeExpr(context).matches("%" + context_prefix + "%")
|
||||
) and
|
||||
normalizeExpr(context).regexpMatch("(?i).*" + wrapJsonRegexp(regexp) + ".*")
|
||||
or
|
||||
// github.event is tainted for all triggers
|
||||
event = e.getEnclosingWorkflow().getATriggerEvent().getName() and
|
||||
contextTriggerDataModel(e.getEnclosingWorkflow().getATriggerEvent().getName(), _) and
|
||||
normalizeExpr(context).regexpMatch("(?i).*" + wrapJsonRegexp("\\bgithub.event\\b") + ".*")
|
||||
or
|
||||
not exists(e.getATriggerEvent()) and
|
||||
event = "unknown"
|
||||
) and
|
||||
flag = "json"
|
||||
)
|
||||
}
|
||||
|
||||
override string getSourceType() { result = flag }
|
||||
|
||||
override string getEventName() { result = event }
|
||||
}
|
||||
|
||||
/**
|
||||
* A Source of untrusted data defined in a MaD specification
|
||||
*/
|
||||
class MaDSource extends RemoteFlowSource {
|
||||
string sourceType;
|
||||
|
||||
MaDSource() { madSource(this, sourceType, _) }
|
||||
|
||||
override string getSourceType() { result = sourceType }
|
||||
|
||||
override string getEventName() { result = this.asExpr().getATriggerEvent().getName() }
|
||||
}
|
||||
|
||||
abstract class FileSource extends RemoteFlowSource { }
|
||||
|
||||
/**
|
||||
* A downloaded artifact.
|
||||
*/
|
||||
class ArtifactSource extends RemoteFlowSource, FileSource {
|
||||
ArtifactSource() { this.asExpr() instanceof UntrustedArtifactDownloadStep }
|
||||
|
||||
override string getSourceType() { result = "artifact" }
|
||||
|
||||
override string getEventName() { result = this.asExpr().getATriggerEvent().getName() }
|
||||
}
|
||||
|
||||
/**
|
||||
* A file from an untrusted checkout.
|
||||
*/
|
||||
private class CheckoutSource extends RemoteFlowSource, FileSource {
|
||||
CheckoutSource() { this.asExpr() instanceof SimplePRHeadCheckoutStep }
|
||||
|
||||
override string getSourceType() { result = "artifact" }
|
||||
|
||||
override string getEventName() { result = this.asExpr().getATriggerEvent().getName() }
|
||||
}
|
||||
|
||||
/**
|
||||
* A list of file names returned by dorny/paths-filter.
|
||||
*/
|
||||
class DornyPathsFilterSource extends RemoteFlowSource {
|
||||
DornyPathsFilterSource() {
|
||||
exists(UsesStep u |
|
||||
u.getCallee() = "dorny/paths-filter" and
|
||||
u.getArgument("list-files") = ["csv", "json"] and
|
||||
this.asExpr() = u
|
||||
)
|
||||
}
|
||||
|
||||
override string getSourceType() { result = "filename" }
|
||||
|
||||
override string getEventName() { result = this.asExpr().getATriggerEvent().getName() }
|
||||
}
|
||||
|
||||
/**
|
||||
* A list of file names returned by tj-actions/changed-files.
|
||||
*/
|
||||
class TJActionsChangedFilesSource extends RemoteFlowSource {
|
||||
TJActionsChangedFilesSource() {
|
||||
exists(UsesStep u, string vulnerable_action, string vulnerable_version, string vulnerable_sha |
|
||||
vulnerableActionsDataModel(vulnerable_action, vulnerable_version, vulnerable_sha, _) and
|
||||
u.getCallee() = "tj-actions/changed-files" and
|
||||
u.getCallee() = vulnerable_action and
|
||||
(
|
||||
u.getArgument("safe_output") = "false"
|
||||
or
|
||||
(u.getVersion() = vulnerable_version or u.getVersion() = vulnerable_sha)
|
||||
) and
|
||||
this.asExpr() = u
|
||||
)
|
||||
}
|
||||
|
||||
override string getSourceType() { result = "filename" }
|
||||
|
||||
override string getEventName() { result = this.asExpr().getATriggerEvent().getName() }
|
||||
}
|
||||
|
||||
/**
|
||||
* A list of file names returned by tj-actions/verify-changed-files.
|
||||
*/
|
||||
class TJActionsVerifyChangedFilesSource extends RemoteFlowSource {
|
||||
TJActionsVerifyChangedFilesSource() {
|
||||
exists(UsesStep u, string vulnerable_action, string vulnerable_version, string vulnerable_sha |
|
||||
vulnerableActionsDataModel(vulnerable_action, vulnerable_version, vulnerable_sha, _) and
|
||||
u.getCallee() = "tj-actions/verify-changed-files" and
|
||||
u.getCallee() = vulnerable_action and
|
||||
(
|
||||
u.getArgument("safe_output") = "false"
|
||||
or
|
||||
(u.getVersion() = vulnerable_version or u.getVersion() = vulnerable_sha)
|
||||
) and
|
||||
this.asExpr() = u
|
||||
)
|
||||
}
|
||||
|
||||
override string getSourceType() { result = "filename" }
|
||||
|
||||
override string getEventName() { result = this.asExpr().getATriggerEvent().getName() }
|
||||
}
|
||||
|
||||
class Xt0rtedSlashCommandSource extends RemoteFlowSource {
|
||||
Xt0rtedSlashCommandSource() {
|
||||
exists(UsesStep u |
|
||||
u.getCallee() = "xt0rted/slash-command-action" and
|
||||
u.getArgument("permission-level").toLowerCase() = ["read", "none"] and
|
||||
this.asExpr() = u
|
||||
)
|
||||
}
|
||||
|
||||
override string getSourceType() { result = "text" }
|
||||
|
||||
override string getEventName() { result = this.asExpr().getATriggerEvent().getName() }
|
||||
}
|
||||
|
||||
class ZenteredIssueFormBodyParserSource extends RemoteFlowSource {
|
||||
ZenteredIssueFormBodyParserSource() {
|
||||
exists(UsesStep u |
|
||||
u.getCallee() = "zentered/issue-forms-body-parser" and
|
||||
not exists(u.getArgument("body")) and
|
||||
this.asExpr() = u
|
||||
)
|
||||
}
|
||||
|
||||
override string getSourceType() { result = "text" }
|
||||
|
||||
override string getEventName() { result = this.asExpr().getATriggerEvent().getName() }
|
||||
}
|
||||
|
||||
class OctokitRequestActionSource extends RemoteFlowSource {
|
||||
OctokitRequestActionSource() {
|
||||
exists(UsesStep u, string route |
|
||||
u.getCallee() = "octokit/request-action" and
|
||||
route = u.getArgument("route").trim() and
|
||||
route.indexOf("GET") = 0 and
|
||||
(
|
||||
route.matches("%/commits%") or
|
||||
route.matches("%/comments%") or
|
||||
route.matches("%/pulls%") or
|
||||
route.matches("%/issues%") or
|
||||
route.matches("%/users%") or
|
||||
route.matches("%github.event.issue.pull_request.url%")
|
||||
) and
|
||||
this.asExpr() = u
|
||||
)
|
||||
}
|
||||
|
||||
override string getSourceType() { result = "text" }
|
||||
|
||||
override string getEventName() { result = this.asExpr().getATriggerEvent().getName() }
|
||||
}
|
||||
92
actions/ql/lib/codeql/actions/dataflow/FlowSteps.qll
Normal file
92
actions/ql/lib/codeql/actions/dataflow/FlowSteps.qll
Normal file
@@ -0,0 +1,92 @@
|
||||
/**
|
||||
* Provides classes representing various flow steps for taint tracking.
|
||||
*/
|
||||
|
||||
private import actions
|
||||
private import codeql.actions.DataFlow
|
||||
private import codeql.actions.dataflow.FlowSources
|
||||
|
||||
/**
|
||||
* Holds if a Run step declares an environment variable, uses it in its script and sets an output in its script.
|
||||
* e.g.
|
||||
* - name: Extract and Clean Initial URL
|
||||
* id: extract-url
|
||||
* env:
|
||||
* BODY: ${{ github.event.comment.body }}
|
||||
* run: |
|
||||
* echo "::set-output name=foo::$BODY"
|
||||
* echo "foo=$(echo $BODY)" >> $GITHUB_OUTPUT
|
||||
* echo "foo=$(echo $BODY)" >> "$GITHUB_OUTPUT"
|
||||
* echo "::set-output name=step-output::$BODY"
|
||||
*/
|
||||
predicate envToOutputStoreStep(DataFlow::Node pred, DataFlow::Node succ, DataFlow::ContentSet c) {
|
||||
exists(Run run, string var, string field |
|
||||
run.getInScopeEnvVarExpr(var) = pred.asExpr() and
|
||||
succ.asExpr() = run and
|
||||
run.getScript().getAnEnvReachingGitHubOutputWrite(var, field) and
|
||||
c = any(DataFlow::FieldContent ct | ct.getName() = field)
|
||||
)
|
||||
}
|
||||
|
||||
predicate envToEnvStoreStep(DataFlow::Node pred, DataFlow::Node succ, DataFlow::ContentSet c) {
|
||||
exists(
|
||||
Run run, string var, string field //string key, string value |
|
||||
|
|
||||
run.getInScopeEnvVarExpr(var) = pred.asExpr() and
|
||||
// we store the taint on the enclosing job since the may not exist an implicit env attribute
|
||||
succ.asExpr() = run.getEnclosingJob() and
|
||||
run.getScript().getAnEnvReachingGitHubEnvWrite(var, field) and
|
||||
c = any(DataFlow::FieldContent ct | ct.getName() = field)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* A command whose output gets assigned to an environment variable or step output.
|
||||
* - run: |
|
||||
* echo "foo=$(cmd)" >> "$GITHUB_OUTPUT"
|
||||
* - run: |
|
||||
* foo=$(<cmd)"
|
||||
* echo "bar=${foo}" >> "$GITHUB_OUTPUT"
|
||||
*/
|
||||
predicate commandToOutputStoreStep(DataFlow::Node pred, DataFlow::Node succ, DataFlow::ContentSet c) {
|
||||
exists(Run run, string key, string cmd |
|
||||
(
|
||||
exists(CommandSource source | source.getCommand() = cmd)
|
||||
or
|
||||
exists(FileSource source |
|
||||
source.asExpr().(Step).getAFollowingStep() = run and
|
||||
run.getScript().getAFileReadCommand() = cmd
|
||||
)
|
||||
) and
|
||||
run.getScript().getACmdReachingGitHubOutputWrite(cmd, key) and
|
||||
c = any(DataFlow::FieldContent ct | ct.getName() = key) and
|
||||
pred.asExpr() = run.getScript() and
|
||||
succ.asExpr() = run
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* A command whose output gets assigned to an environment variable or step output.
|
||||
* - run: |
|
||||
* echo "foo=$(cmd)" >> "$GITHUB_ENV"
|
||||
* - run: |
|
||||
* foo=$(<cmd)"
|
||||
* echo "bar=${foo}" >> "$GITHUB_ENV"
|
||||
*/
|
||||
predicate commandToEnvStoreStep(DataFlow::Node pred, DataFlow::Node succ, DataFlow::ContentSet c) {
|
||||
exists(Run run, string key, string cmd |
|
||||
(
|
||||
exists(CommandSource source | source.getCommand() = cmd)
|
||||
or
|
||||
exists(FileSource source |
|
||||
source.asExpr().(Step).getAFollowingStep() = run and
|
||||
run.getScript().getAFileReadCommand() = cmd
|
||||
)
|
||||
) and
|
||||
run.getScript().getACmdReachingGitHubEnvWrite(cmd, key) and
|
||||
c = any(DataFlow::FieldContent ct | ct.getName() = key) and
|
||||
pred.asExpr() = run.getScript() and
|
||||
// we store the taint on the enclosing job since there may not be an implicit env attribute
|
||||
succ.asExpr() = run.getEnclosingJob()
|
||||
)
|
||||
}
|
||||
156
actions/ql/lib/codeql/actions/dataflow/TaintSteps.qll
Normal file
156
actions/ql/lib/codeql/actions/dataflow/TaintSteps.qll
Normal file
@@ -0,0 +1,156 @@
|
||||
/**
|
||||
* Provides classes representing various flow steps for taint tracking.
|
||||
*/
|
||||
|
||||
private import actions
|
||||
private import codeql.util.Unit
|
||||
private import codeql.actions.DataFlow
|
||||
private import codeql.actions.dataflow.FlowSources
|
||||
|
||||
/**
|
||||
* A unit class for adding additional taint steps.
|
||||
*
|
||||
* Extend this class to add additional taint steps that should apply to all
|
||||
* taint configurations.
|
||||
*/
|
||||
class AdditionalTaintStep extends Unit {
|
||||
/**
|
||||
* Holds if the step from `node1` to `node2` should be considered a taint
|
||||
* step for all configurations.
|
||||
*/
|
||||
abstract predicate step(DataFlow::Node node1, DataFlow::Node node2);
|
||||
}
|
||||
|
||||
/**
|
||||
* A file source step followed by a Run step may read the file.
|
||||
*/
|
||||
predicate fileDownloadToRunStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(FileSource source, Run run |
|
||||
pred = source and
|
||||
source.asExpr().(Step).getAFollowingStep() = run and
|
||||
succ.asExpr() = run.getScript() and
|
||||
exists(run.getScript().getAFileReadCommand())
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* A read of the _files field of the dorny/paths-filter action.
|
||||
*/
|
||||
predicate dornyPathsFilterTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(StepsExpression o |
|
||||
pred instanceof DornyPathsFilterSource and
|
||||
o.getStepId() = pred.asExpr().(UsesStep).getId() and
|
||||
o.getFieldName().matches("%_files") and
|
||||
succ.asExpr() = o
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* A read of user-controlled field of the tj-actions/changed-files action.
|
||||
*/
|
||||
predicate tjActionsChangedFilesTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(StepsExpression o |
|
||||
pred instanceof TJActionsChangedFilesSource and
|
||||
o.getTarget() = pred.asExpr() and
|
||||
o.getStepId() = pred.asExpr().(UsesStep).getId() and
|
||||
o.getFieldName() =
|
||||
[
|
||||
"added_files", "copied_files", "deleted_files", "modified_files", "renamed_files",
|
||||
"all_old_new_renamed_files", "type_changed_files", "unmerged_files", "unknown_files",
|
||||
"all_changed_and_modified_files", "all_changed_files", "other_changed_files",
|
||||
"all_modified_files", "other_modified_files", "other_deleted_files", "modified_keys",
|
||||
"changed_keys"
|
||||
] and
|
||||
succ.asExpr() = o
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* A read of user-controlled field of the tj-actions/verify-changed-files action.
|
||||
*/
|
||||
predicate tjActionsVerifyChangedFilesTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(StepsExpression o |
|
||||
pred instanceof TJActionsVerifyChangedFilesSource and
|
||||
o.getTarget() = pred.asExpr() and
|
||||
o.getStepId() = pred.asExpr().(UsesStep).getId() and
|
||||
o.getFieldName() = "changed_files" and
|
||||
succ.asExpr() = o
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* A read of user-controlled field of the xt0rted/slash-command-action action.
|
||||
*/
|
||||
predicate xt0rtedSlashCommandActionTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(StepsExpression o |
|
||||
pred instanceof Xt0rtedSlashCommandSource and
|
||||
o.getTarget() = pred.asExpr() and
|
||||
o.getStepId() = pred.asExpr().(UsesStep).getId() and
|
||||
o.getFieldName() = "command-arguments" and
|
||||
succ.asExpr() = o
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* A read of user-controlled field of the zentered/issue-forms-body-parser action.
|
||||
*/
|
||||
predicate zenteredIssueFormBodyParserSource(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(StepsExpression o |
|
||||
pred instanceof ZenteredIssueFormBodyParserSource and
|
||||
o.getTarget() = pred.asExpr() and
|
||||
o.getStepId() = pred.asExpr().(UsesStep).getId() and
|
||||
(
|
||||
not o instanceof JsonReferenceExpression and
|
||||
o.getFieldName() = "data"
|
||||
or
|
||||
o instanceof JsonReferenceExpression and
|
||||
o.(JsonReferenceExpression).getInnerExpression().matches("%.data")
|
||||
) and
|
||||
succ.asExpr() = o
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* A read of user-controlled field of the octokit/request-action action.
|
||||
*/
|
||||
predicate octokitRequestActionTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(StepsExpression o |
|
||||
pred instanceof OctokitRequestActionSource and
|
||||
o.getTarget() = pred.asExpr() and
|
||||
o.getStepId() = pred.asExpr().(UsesStep).getId() and
|
||||
succ.asExpr() = o and
|
||||
(
|
||||
not o instanceof JsonReferenceExpression and
|
||||
o.getFieldName() = "data"
|
||||
or
|
||||
o instanceof JsonReferenceExpression and
|
||||
o.(JsonReferenceExpression).getInnerExpression().matches("%.data") and
|
||||
o.(JsonReferenceExpression)
|
||||
.getAccessPath()
|
||||
.matches([
|
||||
"%.title",
|
||||
"%.user.login",
|
||||
"%.body",
|
||||
"%.head.ref",
|
||||
"%.head.repo.full_name",
|
||||
"%.commit.author.email",
|
||||
"%.commit.commiter.email",
|
||||
"%.commit.message",
|
||||
"%.email",
|
||||
"%.name",
|
||||
])
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
class TaintSteps extends AdditionalTaintStep {
|
||||
override predicate step(DataFlow::Node node1, DataFlow::Node node2) {
|
||||
dornyPathsFilterTaintStep(node1, node2) or
|
||||
tjActionsChangedFilesTaintStep(node1, node2) or
|
||||
tjActionsVerifyChangedFilesTaintStep(node1, node2) or
|
||||
xt0rtedSlashCommandActionTaintStep(node1, node2) or
|
||||
xt0rtedSlashCommandActionTaintStep(node1, node2) or
|
||||
zenteredIssueFormBodyParserSource(node1, node2) or
|
||||
octokitRequestActionTaintStep(node1, node2)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* Provides Actions-specific definitions for use in the data flow library.
|
||||
* Implementation of https://github.com/github/codeql/blob/main/shared/dataflow/codeql/dataflow/DataFlow.qll
|
||||
*/
|
||||
|
||||
private import codeql.dataflow.DataFlow
|
||||
private import codeql.Locations
|
||||
|
||||
module ActionsDataFlow implements InputSig<Location> {
|
||||
import DataFlowPrivate as Private
|
||||
import DataFlowPublic
|
||||
import Private
|
||||
|
||||
predicate neverSkipInPathGraph = Private::neverSkipInPathGraph/1;
|
||||
}
|
||||
@@ -0,0 +1,403 @@
|
||||
private import codeql.util.Unit
|
||||
private import codeql.dataflow.DataFlow
|
||||
private import codeql.actions.Ast
|
||||
private import codeql.actions.Cfg as Cfg
|
||||
private import codeql.Locations
|
||||
private import codeql.actions.controlflow.BasicBlocks
|
||||
private import DataFlowPublic
|
||||
private import codeql.actions.dataflow.ExternalFlow
|
||||
private import codeql.actions.dataflow.FlowSteps
|
||||
private import codeql.actions.dataflow.FlowSources
|
||||
|
||||
class DataFlowSecondLevelScope = Unit;
|
||||
|
||||
cached
|
||||
newtype TNode = TExprNode(DataFlowExpr e)
|
||||
|
||||
class OutNode extends ExprNode {
|
||||
private DataFlowCall call;
|
||||
|
||||
OutNode() { call = this.getCfgNode() }
|
||||
|
||||
DataFlowCall getCall(ReturnKind kind) {
|
||||
result = call and
|
||||
kind instanceof NormalReturn
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Not implemented
|
||||
*/
|
||||
class CastNode extends Node {
|
||||
CastNode() { none() }
|
||||
}
|
||||
|
||||
/**
|
||||
* Not implemented
|
||||
*/
|
||||
class PostUpdateNode extends Node {
|
||||
PostUpdateNode() { none() }
|
||||
|
||||
Node getPreUpdateNode() { none() }
|
||||
}
|
||||
|
||||
predicate isParameterNode(ParameterNode p, DataFlowCallable c, ParameterPosition pos) {
|
||||
p.isParameterOf(c, pos)
|
||||
}
|
||||
|
||||
predicate isArgumentNode(ArgumentNode arg, DataFlowCall call, ArgumentPosition pos) {
|
||||
arg.argumentOf(call, pos)
|
||||
}
|
||||
|
||||
DataFlowCallable nodeGetEnclosingCallable(Node node) {
|
||||
node = TExprNode(any(DataFlowExpr e | result = e.getScope()))
|
||||
}
|
||||
|
||||
DataFlowType getNodeType(Node node) { any() }
|
||||
|
||||
predicate nodeIsHidden(Node node) { none() }
|
||||
|
||||
class DataFlowExpr extends Cfg::Node {
|
||||
DataFlowExpr() {
|
||||
this.getAstNode() instanceof Job or
|
||||
this.getAstNode() instanceof Expression or
|
||||
this.getAstNode() instanceof Uses or
|
||||
this.getAstNode() instanceof Run or
|
||||
this.getAstNode() instanceof Outputs or
|
||||
this.getAstNode() instanceof Input or
|
||||
this.getAstNode() instanceof ScalarValue
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A call corresponds to a Uses steps where a composite action or a reusable workflow get called
|
||||
*/
|
||||
class DataFlowCall instanceof Cfg::Node {
|
||||
DataFlowCall() { super.getAstNode() instanceof Uses }
|
||||
|
||||
/** Gets a textual representation of this element. */
|
||||
string toString() { result = super.toString() }
|
||||
|
||||
string getName() { result = super.getAstNode().(Uses).getCallee() }
|
||||
|
||||
DataFlowCallable getEnclosingCallable() { result = super.getScope() }
|
||||
|
||||
/** Gets a best-effort total ordering. */
|
||||
int totalorder() { none() }
|
||||
|
||||
/** Gets the location of this call. */
|
||||
Location getLocation() { result = this.(Cfg::Node).getLocation() }
|
||||
}
|
||||
|
||||
/**
|
||||
* A Cfg scope that can be called
|
||||
*/
|
||||
class DataFlowCallable instanceof Cfg::CfgScope {
|
||||
string toString() { result = super.toString() }
|
||||
|
||||
string getName() {
|
||||
result = this.(ReusableWorkflowImpl).getResolvedPath() or
|
||||
result = this.(CompositeActionImpl).getResolvedPath()
|
||||
}
|
||||
|
||||
/** Gets a best-effort total ordering. */
|
||||
int totalorder() { none() }
|
||||
|
||||
/** Gets the location of this callable. */
|
||||
Location getLocation() { result = this.(Cfg::CfgScope).getLocation() }
|
||||
}
|
||||
|
||||
newtype TReturnKind = TNormalReturn()
|
||||
|
||||
abstract class ReturnKind extends TReturnKind {
|
||||
/** Gets a textual representation of this element. */
|
||||
abstract string toString();
|
||||
}
|
||||
|
||||
class NormalReturn extends ReturnKind, TNormalReturn {
|
||||
override string toString() { result = "return" }
|
||||
}
|
||||
|
||||
/** Gets a viable implementation of the target of the given `Call`. */
|
||||
DataFlowCallable viableCallable(DataFlowCall c) { c.getName() = result.getName() }
|
||||
|
||||
/**
|
||||
* Gets a node that can read the value returned from `call` with return kind
|
||||
* `kind`.
|
||||
*/
|
||||
OutNode getAnOutNode(DataFlowCall call, ReturnKind kind) { call = result.getCall(kind) }
|
||||
|
||||
private newtype TDataFlowType = TUnknownDataFlowType()
|
||||
|
||||
/**
|
||||
* A type for a data flow node.
|
||||
*
|
||||
* This may or may not coincide with any type system existing for the source
|
||||
* language, but should minimally include unique types for individual closure
|
||||
* expressions (typically lambdas).
|
||||
*/
|
||||
class DataFlowType extends TDataFlowType {
|
||||
string toString() { result = "" }
|
||||
}
|
||||
|
||||
string ppReprType(DataFlowType t) { none() }
|
||||
|
||||
predicate compatibleTypes(DataFlowType t1, DataFlowType t2) { any() }
|
||||
|
||||
predicate typeStrongerThan(DataFlowType t1, DataFlowType t2) { none() }
|
||||
|
||||
newtype TContent =
|
||||
TFieldContent(string name) {
|
||||
// We only use field flow for env, steps and jobs outputs
|
||||
// not for accessing other context fields such as matrix or inputs
|
||||
name = any(StepsExpression a).getFieldName() or
|
||||
name = any(NeedsExpression a).getFieldName() or
|
||||
name = any(JobsExpression a).getFieldName() or
|
||||
name = any(EnvExpression a).getFieldName()
|
||||
}
|
||||
|
||||
predicate forceHighPrecision(Content c) { c instanceof FieldContent }
|
||||
|
||||
class NodeRegion instanceof Unit {
|
||||
string toString() { result = "NodeRegion" }
|
||||
|
||||
predicate contains(Node n) { none() }
|
||||
|
||||
int totalOrder() { result = 1 }
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if the nodes in `nr` are unreachable when the call context is `call`.
|
||||
*/
|
||||
predicate isUnreachableInCall(NodeRegion nr, DataFlowCall call) { none() }
|
||||
|
||||
class ContentApprox = ContentSet;
|
||||
|
||||
ContentApprox getContentApprox(Content c) { result = c }
|
||||
|
||||
/**
|
||||
* Made a string to match the ArgumentPosition type.
|
||||
*/
|
||||
class ParameterPosition extends string {
|
||||
ParameterPosition() {
|
||||
exists(any(ReusableWorkflow w).getInput(this)) or
|
||||
exists(any(CompositeAction a).getInput(this))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Made a string to match `With:` keys in the AST
|
||||
*/
|
||||
class ArgumentPosition extends string {
|
||||
ArgumentPosition() { exists(any(Uses e).getArgumentExpr(this)) }
|
||||
}
|
||||
|
||||
/**
|
||||
*/
|
||||
predicate parameterMatch(ParameterPosition ppos, ArgumentPosition apos) { ppos = apos }
|
||||
|
||||
/**
|
||||
* Holds if there is a local flow step between a ${{ steps.xxx.outputs.yyy }} expression accesing a step output field
|
||||
* and the step output itself. But only for those cases where the step output is defined externally in a MaD Source
|
||||
* specification. The reason for this is that we don't currently have a way to specify that a source starts with a
|
||||
* non-empty access path so we cannot write a Source that stores the taint in a Content, we can only do that for steps
|
||||
* (storeStep). The easiest thing is to add this local flow step that simulates a read step from the source node for a specific
|
||||
* field name.
|
||||
*/
|
||||
predicate stepsCtxLocalStep(Node nodeFrom, Node nodeTo) {
|
||||
exists(Uses astFrom, StepsExpression astTo |
|
||||
madSource(nodeFrom, _, "output." + ["*", astTo.getFieldName()]) and
|
||||
astFrom = nodeFrom.asExpr() and
|
||||
astTo = nodeTo.asExpr() and
|
||||
astTo.getTarget() = astFrom
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if there is a local flow step between a ${{ needs.xxx.outputs.yyy }} expression accesing a job output field
|
||||
* and the step output itself. But only for those cases where the job (needs) output is defined externally in a MaD Source
|
||||
* specification. The reason for this is that we don't currently have a way to specify that a source starts with a
|
||||
* non-empty access path so we cannot write a Source that stores the taint in a Content, we can only do that for steps
|
||||
* (storeStep). The easiest thing is to add this local flow step that simulates a read step from the source node for a specific
|
||||
* field name.
|
||||
*/
|
||||
predicate needsCtxLocalStep(Node nodeFrom, Node nodeTo) {
|
||||
exists(Uses astFrom, NeedsExpression astTo |
|
||||
madSource(nodeFrom, _, "output." + astTo.getFieldName()) and
|
||||
astFrom = nodeFrom.asExpr() and
|
||||
astTo = nodeTo.asExpr() and
|
||||
astTo.getTarget() = astFrom
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if there is a local flow step between a ${{}} expression accesing an input variable and the input itself
|
||||
* e.g. ${{ inputs.foo }}
|
||||
*/
|
||||
predicate inputsCtxLocalStep(Node nodeFrom, Node nodeTo) {
|
||||
exists(AstNode astFrom, InputsExpression astTo |
|
||||
astFrom = nodeFrom.asExpr() and
|
||||
astTo = nodeTo.asExpr() and
|
||||
astTo.getTarget() = astFrom
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if there is a local flow step between a ${{}} expression accesing a matrix variable and the matrix itself
|
||||
* e.g. ${{ matrix.foo }}
|
||||
*/
|
||||
predicate matrixCtxLocalStep(Node nodeFrom, Node nodeTo) {
|
||||
exists(AstNode astFrom, MatrixExpression astTo |
|
||||
astFrom = nodeFrom.asExpr() and
|
||||
astTo = nodeTo.asExpr() and
|
||||
astTo.getTarget() = astFrom
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if there is a local flow step between a ${{}} expression accesing an env var and the var definition itself
|
||||
* e.g. ${{ env.foo }}
|
||||
*/
|
||||
predicate envCtxLocalStep(Node nodeFrom, Node nodeTo) {
|
||||
exists(AstNode astFrom, EnvExpression astTo |
|
||||
astFrom = nodeFrom.asExpr() and
|
||||
astTo = nodeTo.asExpr() and
|
||||
(
|
||||
madSource(nodeFrom, _, "env." + astTo.getFieldName())
|
||||
or
|
||||
astTo.getTarget() = astFrom
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if there is a local flow step from `nodeFrom` to `nodeTo`.
|
||||
* For Actions, we dont need SSA nodes since it should be already in SSA form
|
||||
* Local flow steps are always between two nodes in the same Cfg scope.
|
||||
*/
|
||||
pragma[nomagic]
|
||||
predicate localFlowStep(Node nodeFrom, Node nodeTo) {
|
||||
stepsCtxLocalStep(nodeFrom, nodeTo) or
|
||||
needsCtxLocalStep(nodeFrom, nodeTo) or
|
||||
inputsCtxLocalStep(nodeFrom, nodeTo) or
|
||||
matrixCtxLocalStep(nodeFrom, nodeTo) or
|
||||
envCtxLocalStep(nodeFrom, nodeTo)
|
||||
}
|
||||
|
||||
/**
|
||||
* This is the local flow predicate that is used as a building block in global
|
||||
* data flow.
|
||||
*/
|
||||
cached
|
||||
predicate simpleLocalFlowStep(Node nodeFrom, Node nodeTo, string model) {
|
||||
localFlowStep(nodeFrom, nodeTo) and model = ""
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if data can flow from `node1` to `node2` through a non-local step
|
||||
* that does not follow a call edge. For example, a step through a global
|
||||
* variable.
|
||||
* We throw away the call context and let us jump to any location
|
||||
* AKA teleport steps
|
||||
* local steps are preferible since they are more predictable and easier to control
|
||||
*/
|
||||
predicate jumpStep(Node nodeFrom, Node nodeTo) { none() }
|
||||
|
||||
/**
|
||||
* Holds if a Expression reads a field from a job (needs/jobs), step (steps) output via a read of `c` (fieldname)
|
||||
*/
|
||||
predicate ctxFieldReadStep(Node node1, Node node2, ContentSet c) {
|
||||
exists(SimpleReferenceExpression access |
|
||||
(
|
||||
access instanceof NeedsExpression or
|
||||
access instanceof StepsExpression or
|
||||
access instanceof JobsExpression or
|
||||
access instanceof EnvExpression
|
||||
) and
|
||||
c = any(FieldContent ct | ct.getName() = access.getFieldName()) and
|
||||
node1.asExpr() = access.getTarget() and
|
||||
node2.asExpr() = access
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if data can flow from `node1` to `node2` via a read of `c`. Thus,
|
||||
* `node1` references an object with a content `c.getAReadContent()` whose
|
||||
* value ends up in `node2`.
|
||||
* Store steps without corresponding reads are pruned aggressively very early, since they can never contribute to a complete path.
|
||||
*/
|
||||
predicate readStep(Node node1, ContentSet c, Node node2) { ctxFieldReadStep(node1, node2, c) }
|
||||
|
||||
/**
|
||||
* Stores an output expression (node1) into its OutputsStm node (node2)
|
||||
* using the output variable name as the access path
|
||||
*/
|
||||
predicate fieldStoreStep(Node node1, Node node2, ContentSet c) {
|
||||
exists(Outputs out, string fieldName |
|
||||
node1.asExpr() = out.getOutputExpr(fieldName) and
|
||||
node2.asExpr() = out and
|
||||
c = any(FieldContent ct | ct.getName() = fieldName)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if data can flow from `node1` to `node2` via a store into `c`. Thus,
|
||||
* `node2` references an object with a content `c.getAStoreContent()` that
|
||||
* contains the value of `node1`.
|
||||
* Store steps without corresponding reads are pruned aggressively very early, since they can never contribute to a complete path.
|
||||
*/
|
||||
predicate storeStep(Node node1, ContentSet c, Node node2) {
|
||||
fieldStoreStep(node1, node2, c) or
|
||||
madStoreStep(node1, node2, c) or
|
||||
envToOutputStoreStep(node1, node2, c) or
|
||||
envToEnvStoreStep(node1, node2, c) or
|
||||
commandToOutputStoreStep(node1, node2, c) or
|
||||
commandToEnvStoreStep(node1, node2, c)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if values stored inside content `c` are cleared at node `n`. For example,
|
||||
* any value stored inside `f` is cleared at the pre-update node associated with `x`
|
||||
* in `x.f = newValue`.
|
||||
*/
|
||||
predicate clearsContent(Node n, ContentSet c) { none() }
|
||||
|
||||
/**
|
||||
* Holds if the value that is being tracked is expected to be stored inside content `c`
|
||||
* at node `n`.
|
||||
*/
|
||||
predicate expectsContent(Node n, ContentSet c) { none() }
|
||||
|
||||
/**
|
||||
* Holds if flow is allowed to pass from parameter `p` and back to itself as a
|
||||
* side-effect, resulting in a summary from `p` to itself.
|
||||
*
|
||||
* One example would be to allow flow like `p.foo = p.bar;`, which is disallowed
|
||||
* by default as a heuristic.
|
||||
*/
|
||||
predicate allowParameterReturnInSelf(ParameterNode p) { none() }
|
||||
|
||||
predicate localMustFlowStep(Node nodeFrom, Node nodeTo) { localFlowStep(nodeFrom, nodeTo) }
|
||||
|
||||
private newtype TLambdaCallKind = TNone()
|
||||
|
||||
class LambdaCallKind = TLambdaCallKind;
|
||||
|
||||
/** Holds if `creation` is an expression that creates a lambda of kind `kind` for `c`. */
|
||||
predicate lambdaCreation(Node creation, LambdaCallKind kind, DataFlowCallable c) { none() }
|
||||
|
||||
/** Holds if `call` is a lambda call of kind `kind` where `receiver` is the lambda expression. */
|
||||
predicate lambdaCall(DataFlowCall call, LambdaCallKind kind, Node receiver) { none() }
|
||||
|
||||
/** Extra data-flow steps needed for lambda flow analysis. */
|
||||
predicate additionalLambdaFlowStep(Node nodeFrom, Node nodeTo, boolean preservesValue) { none() }
|
||||
|
||||
/**
|
||||
* Since our model is so simple, we dont want to compress the local flow steps.
|
||||
* This compression is normally done to not show SSA steps, casts, etc.
|
||||
*/
|
||||
predicate neverSkipInPathGraph(Node node) { any() }
|
||||
|
||||
predicate knownSourceModel(Node source, string model) { none() }
|
||||
|
||||
predicate knownSinkModel(Node sink, string model) { none() }
|
||||
@@ -0,0 +1,194 @@
|
||||
private import codeql.dataflow.DataFlow
|
||||
private import codeql.actions.Ast
|
||||
private import codeql.actions.Cfg as Cfg
|
||||
private import codeql.Locations
|
||||
private import DataFlowPrivate
|
||||
|
||||
class Node extends TNode {
|
||||
/** Gets a textual representation of this element. */
|
||||
string toString() { none() }
|
||||
|
||||
Location getLocation() { none() }
|
||||
|
||||
/**
|
||||
* Holds if this element is at the specified location.
|
||||
* The location spans column `startcolumn` of line `startline` to
|
||||
* column `endcolumn` of line `endline` in file `filepath`.
|
||||
* For more information, see
|
||||
* [Locations](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
|
||||
*/
|
||||
predicate hasLocationInfo(
|
||||
string filepath, int startline, int startcolumn, int endline, int endcolumn
|
||||
) {
|
||||
this.getLocation().hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
|
||||
}
|
||||
|
||||
AstNode asExpr() { none() }
|
||||
}
|
||||
|
||||
/**
|
||||
* Any Ast Expression.
|
||||
* UsesExpr, RunExpr, ArgumentExpr, VarAccessExpr, ...
|
||||
*/
|
||||
class ExprNode extends Node, TExprNode {
|
||||
private DataFlowExpr expr;
|
||||
|
||||
ExprNode() { this = TExprNode(expr) }
|
||||
|
||||
Cfg::Node getCfgNode() { result = expr }
|
||||
|
||||
override string toString() { result = expr.toString() }
|
||||
|
||||
override Location getLocation() { result = expr.getLocation() }
|
||||
|
||||
override AstNode asExpr() { result = expr.getAstNode() }
|
||||
}
|
||||
|
||||
/**
|
||||
* Reusable workflow input nodes
|
||||
*/
|
||||
class ParameterNode extends ExprNode {
|
||||
private Input input;
|
||||
|
||||
ParameterNode() { this.asExpr() = input }
|
||||
|
||||
predicate isParameterOf(DataFlowCallable c, ParameterPosition pos) {
|
||||
input = c.(ReusableWorkflow).getInput(pos) or
|
||||
input = c.(CompositeAction).getInput(pos)
|
||||
}
|
||||
|
||||
override string toString() { result = "input " + input.toString() }
|
||||
|
||||
override Location getLocation() { result = input.getLocation() }
|
||||
|
||||
Input getInput() { result = input }
|
||||
}
|
||||
|
||||
/**
|
||||
* A call to a data flow callable (Uses).
|
||||
*/
|
||||
class CallNode extends ExprNode {
|
||||
private DataFlowCall call;
|
||||
|
||||
CallNode() { this.getCfgNode() instanceof DataFlowCall }
|
||||
|
||||
DataFlowCallable getCalleeNode() { result = viableCallable(this.getCfgNode()) }
|
||||
}
|
||||
|
||||
/**
|
||||
* An argument to a Uses step (call).
|
||||
*/
|
||||
class ArgumentNode extends ExprNode {
|
||||
ArgumentNode() { this.getCfgNode().getAstNode() = any(Uses e).getArgumentExpr(_) }
|
||||
|
||||
predicate argumentOf(DataFlowCall call, ArgumentPosition pos) {
|
||||
this.getCfgNode() = call.(Cfg::Node).getASuccessor+() and
|
||||
call.(Cfg::Node).getAstNode() =
|
||||
any(Uses e | e.getArgumentExpr(pos) = this.getCfgNode().getAstNode())
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reusable workflow output nodes
|
||||
*/
|
||||
class ReturnNode extends ExprNode {
|
||||
private Outputs outputs;
|
||||
|
||||
ReturnNode() {
|
||||
this.asExpr() = outputs and
|
||||
(
|
||||
exists(ReusableWorkflow w | w.getOutputs() = outputs) or
|
||||
exists(CompositeAction a | a.getOutputs() = outputs)
|
||||
)
|
||||
}
|
||||
|
||||
ReturnKind getKind() { result = TNormalReturn() }
|
||||
|
||||
override string toString() { result = "output " + outputs.toString() }
|
||||
|
||||
override Location getLocation() { result = outputs.getLocation() }
|
||||
}
|
||||
|
||||
/** Gets the node corresponding to `e`. */
|
||||
Node exprNode(DataFlowExpr e) { result = TExprNode(e) }
|
||||
|
||||
/**
|
||||
* An entity that represents a set of `Content`s.
|
||||
*
|
||||
* The set may be interpreted differently depending on whether it is
|
||||
* stored into (`getAStoreContent`) or read from (`getAReadContent`).
|
||||
*/
|
||||
class ContentSet instanceof Content {
|
||||
/** Gets a content that may be stored into when storing into this set. */
|
||||
Content getAStoreContent() { result = this }
|
||||
|
||||
/** Gets a content that may be read from when reading from this set. */
|
||||
Content getAReadContent() { result = this }
|
||||
|
||||
/** Gets a textual representation of this content set. */
|
||||
string toString() { result = super.toString() }
|
||||
|
||||
/**
|
||||
* Holds if this element is at the specified location.
|
||||
* The location spans column `startcolumn` of line `startline` to
|
||||
* column `endcolumn` of line `endline` in file `filepath`.
|
||||
* For more information, see
|
||||
* [Locations](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
|
||||
*/
|
||||
predicate hasLocationInfo(
|
||||
string filepath, int startline, int startcolumn, int endline, int endcolumn
|
||||
) {
|
||||
super.hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A reference contained in an object. Examples include instance fields, the
|
||||
* contents of a collection object, the contents of an array or pointer.
|
||||
*/
|
||||
class Content extends TContent {
|
||||
/** Gets the type of the contained data for the purpose of type pruning. */
|
||||
DataFlowType getType() { any() }
|
||||
|
||||
/** Gets a textual representation of this element. */
|
||||
abstract string toString();
|
||||
|
||||
/**
|
||||
* Holds if this element is at the specified location.
|
||||
* The location spans column `startcolumn` of line `startline` to
|
||||
* column `endcolumn` of line `endline` in file `filepath`.
|
||||
* For more information, see
|
||||
* [Locations](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
|
||||
*/
|
||||
predicate hasLocationInfo(
|
||||
string filepath, int startline, int startcolumn, int endline, int endcolumn
|
||||
) {
|
||||
filepath = "" and startline = 0 and startcolumn = 0 and endline = 0 and endcolumn = 0
|
||||
}
|
||||
}
|
||||
|
||||
/** A field of an object, for example an instance variable. */
|
||||
class FieldContent extends Content, TFieldContent {
|
||||
private string name;
|
||||
|
||||
FieldContent() { this = TFieldContent(name) }
|
||||
|
||||
/** Gets the name of the field. */
|
||||
string getName() { result = name }
|
||||
|
||||
override string toString() { result = name }
|
||||
}
|
||||
|
||||
predicate hasLocalFlow(Node n1, Node n2) {
|
||||
n1 = n2 or
|
||||
simpleLocalFlowStep(n1, n2, _) or
|
||||
exists(ContentSet c | ctxFieldReadStep(n1, n2, c))
|
||||
}
|
||||
|
||||
predicate hasLocalFlowExpr(AstNode n1, AstNode n2) {
|
||||
exists(Node dn1, Node dn2 |
|
||||
dn1.asExpr() = n1 and
|
||||
dn2.asExpr() = n2 and
|
||||
hasLocalFlow(dn1, dn2)
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
/**
|
||||
* This module provides extensible predicates for defining MaD models.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Holds if a source model exists for the given parameters.
|
||||
*/
|
||||
extensible predicate actionsSourceModel(
|
||||
string action, string version, string output, string kind, string provenance
|
||||
);
|
||||
|
||||
/**
|
||||
* Holds if a summary model exists for the given parameters.
|
||||
*/
|
||||
extensible predicate actionsSummaryModel(
|
||||
string action, string version, string input, string output, string kind, string provenance
|
||||
);
|
||||
|
||||
/**
|
||||
* Holds if a sink model exists for the given parameters.
|
||||
*/
|
||||
extensible predicate actionsSinkModel(
|
||||
string action, string version, string input, string kind, string provenance
|
||||
);
|
||||
@@ -0,0 +1,12 @@
|
||||
/**
|
||||
* Provides Actions-specific definitions for use in the taint tracking library.
|
||||
* Implementation of https://github.com/github/codeql/blob/main/shared/dataflow/codeql/dataflow/TaintTracking.qll
|
||||
*/
|
||||
|
||||
private import codeql.Locations
|
||||
private import codeql.dataflow.TaintTracking
|
||||
private import DataFlowImplSpecific
|
||||
|
||||
module ActionsTaintTracking implements InputSig<Location, ActionsDataFlow> {
|
||||
import TaintTrackingPrivate
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
/**
|
||||
* Provides modules for performing local (intra-procedural) and
|
||||
* global (inter-procedural) taint-tracking analyses.
|
||||
*/
|
||||
|
||||
private import DataFlowPrivate
|
||||
private import codeql.actions.DataFlow
|
||||
private import codeql.actions.dataflow.TaintSteps
|
||||
private import codeql.actions.Ast
|
||||
|
||||
/**
|
||||
* Holds if `node` should be a sanitizer in all global taint flow configurations
|
||||
* but not in local taint.
|
||||
*/
|
||||
predicate defaultTaintSanitizer(DataFlow::Node node) { none() }
|
||||
|
||||
// predicate defaultAdditionalTaintStep(DataFlow::Node nodeFrom, DataFlow::Node nodeTo) {
|
||||
// any(AdditionalTaintStep s).step(nodeFrom, nodeTo)
|
||||
// }
|
||||
/**
|
||||
* Holds if the additional step from `nodeFrom` to `nodeTo` should be included
|
||||
* in all global taint flow configurations.
|
||||
*/
|
||||
cached
|
||||
predicate defaultAdditionalTaintStep(DataFlow::Node nodeFrom, DataFlow::Node nodeTo, string model) {
|
||||
any(AdditionalTaintStep s).step(nodeFrom, nodeTo) and model = ""
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if taint flow configurations should allow implicit reads of `c` at sinks
|
||||
* and inputs to additional taint steps.
|
||||
*/
|
||||
bindingset[node]
|
||||
predicate defaultImplicitTaintRead(DataFlow::Node node, DataFlow::ContentSet c) { none() }
|
||||
|
||||
/**
|
||||
* Holds if the additional step from `src` to `sink` should be considered in
|
||||
* speculative taint flow exploration.
|
||||
*/
|
||||
predicate speculativeTaintStep(DataFlow::Node src, DataFlow::Node sink) { none() }
|
||||
@@ -0,0 +1,19 @@
|
||||
private import codeql.files.FileSystem
|
||||
|
||||
/**
|
||||
* Returns an appropriately encoded version of a filename `name`
|
||||
* passed by the VS Code extension in order to coincide with the
|
||||
* output of `.getFile()` on locatable entities.
|
||||
*/
|
||||
cached
|
||||
File getFileBySourceArchiveName(string name) {
|
||||
// The name provided for a file in the source archive by the VS Code extension
|
||||
// has some differences from the absolute path in the database:
|
||||
// 1. colons are replaced by underscores
|
||||
// 2. there's a leading slash, even for Windows paths: "C:/foo/bar" ->
|
||||
// "/C_/foo/bar"
|
||||
// 3. double slashes in UNC prefixes are replaced with a single slash
|
||||
// We can handle 2 and 3 together by unconditionally adding a leading slash
|
||||
// before replacing double slashes.
|
||||
name = ("/" + result.getAbsolutePath().replaceAll(":", "_")).replaceAll("//", "/")
|
||||
}
|
||||
137
actions/ql/lib/codeql/actions/ideContextual/printAst.qll
Normal file
137
actions/ql/lib/codeql/actions/ideContextual/printAst.qll
Normal file
@@ -0,0 +1,137 @@
|
||||
/**
|
||||
* Provides queries to pretty-print an Kaleidoscope abstract syntax tree as a graph.
|
||||
*
|
||||
* By default, this will print the AST for all nodes in the database. To change
|
||||
* this behavior, extend `PrintASTConfiguration` and override `shouldPrintNode`
|
||||
* to hold for only the AST nodes you wish to view.
|
||||
*/
|
||||
|
||||
private import codeql.actions.Ast
|
||||
private import codeql.Locations
|
||||
|
||||
/**
|
||||
* The query can extend this class to control which nodes are printed.
|
||||
*/
|
||||
class PrintAstConfiguration extends string {
|
||||
PrintAstConfiguration() { this = "PrintAstConfiguration" }
|
||||
|
||||
/**
|
||||
* Holds if the given node should be printed.
|
||||
*/
|
||||
predicate shouldPrintNode(PrintAstNode n) { any() }
|
||||
}
|
||||
|
||||
newtype TPrintNode = TPrintRegularAstNode(AstNode n) { any() }
|
||||
|
||||
private predicate shouldPrintNode(PrintAstNode n) {
|
||||
any(PrintAstConfiguration config).shouldPrintNode(n)
|
||||
}
|
||||
|
||||
/**
|
||||
* A node in the output tree.
|
||||
*/
|
||||
class PrintAstNode extends TPrintNode {
|
||||
/** Gets a textual representation of this node in the PrintAst output tree. */
|
||||
string toString() { none() }
|
||||
|
||||
/**
|
||||
* Gets the child node with name `edgeName`. Typically this is the name of the
|
||||
* predicate used to access the child.
|
||||
*/
|
||||
PrintAstNode getChild(string edgeName) { none() }
|
||||
|
||||
/** Get the Location of this AST node */
|
||||
Location getLocation() { none() }
|
||||
|
||||
/** Gets a child of this node. */
|
||||
final PrintAstNode getAChild() { result = this.getChild(_) }
|
||||
|
||||
/** Gets the parent of this node, if any. */
|
||||
final PrintAstNode getParent() { result.getAChild() = this }
|
||||
|
||||
/** Gets a value used to order this node amongst its siblings. */
|
||||
int getOrder() {
|
||||
this =
|
||||
rank[result](PrintRegularAstNode p, Location l, File f |
|
||||
l = p.getLocation() and
|
||||
f = l.getFile()
|
||||
|
|
||||
p
|
||||
order by
|
||||
f.getBaseName(), f.getAbsolutePath(), l.getStartLine(), l.getStartColumn(),
|
||||
l.getEndLine(), l.getEndColumn()
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the value of the property of this node, where the name of the property
|
||||
* is `key`.
|
||||
*/
|
||||
final string getProperty(string key) {
|
||||
key = "semmle.label" and
|
||||
result = this.toString()
|
||||
or
|
||||
key = "semmle.order" and result = this.getOrder().toString()
|
||||
}
|
||||
}
|
||||
|
||||
/** An `AstNode` in the output tree. */
|
||||
class PrintRegularAstNode extends PrintAstNode, TPrintRegularAstNode {
|
||||
AstNode astNode;
|
||||
|
||||
PrintRegularAstNode() { this = TPrintRegularAstNode(astNode) }
|
||||
|
||||
override string toString() {
|
||||
result = "[" + concat(astNode.getAPrimaryQlClass(), ", ") + "] " + astNode.toString()
|
||||
}
|
||||
|
||||
override Location getLocation() { result = astNode.getLocation() }
|
||||
|
||||
override PrintAstNode getChild(string name) {
|
||||
exists(int i |
|
||||
name = i.toString() and
|
||||
result =
|
||||
TPrintRegularAstNode(rank[i](AstNode child, Location l |
|
||||
child.getParentNode() = astNode and
|
||||
child.getLocation() = l
|
||||
|
|
||||
child
|
||||
order by
|
||||
l.getStartLine(), l.getStartColumn(), l.getEndColumn(), l.getEndLine(),
|
||||
child.toString()
|
||||
))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `node` belongs to the output tree, and its property `key` has the
|
||||
* given `value`.
|
||||
*/
|
||||
query predicate nodes(PrintAstNode node, string key, string value) {
|
||||
value = node.getProperty(key) and shouldPrintNode(node)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `target` is a child of `source` in the AST, and property `key` of
|
||||
* the edge has the given `value`.
|
||||
*/
|
||||
query predicate edges(PrintAstNode source, PrintAstNode target, string key, string value) {
|
||||
shouldPrintNode(source) and
|
||||
shouldPrintNode(target) and
|
||||
target = source.getChild(_) and
|
||||
(
|
||||
key = "semmle.label" and
|
||||
value = strictconcat(string name | source.getChild(name) = target | name, "/")
|
||||
or
|
||||
key = "semmle.order" and
|
||||
value = target.getProperty("semmle.order")
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if property `key` of the graph has the given `value`.
|
||||
*/
|
||||
query predicate graphProperties(string key, string value) {
|
||||
key = "semmle.graphKind" and value = "tree"
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
private import actions
|
||||
private import codeql.actions.TaintTracking
|
||||
private import codeql.actions.dataflow.ExternalFlow
|
||||
import codeql.actions.dataflow.FlowSources
|
||||
import codeql.actions.DataFlow
|
||||
|
||||
abstract class ArgumentInjectionSink extends DataFlow::Node {
|
||||
abstract string getCommand();
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if a Run step declares an environment variable, uses it as the argument to a command vulnerable to argument injection.
|
||||
* e.g.
|
||||
* env:
|
||||
* BODY: ${{ github.event.comment.body }}
|
||||
* run: |
|
||||
* sed "s/FOO/$BODY/g" > /tmp/foo
|
||||
*/
|
||||
class ArgumentInjectionFromEnvVarSink extends ArgumentInjectionSink {
|
||||
string command;
|
||||
string argument;
|
||||
|
||||
ArgumentInjectionFromEnvVarSink() {
|
||||
exists(Run run, string var |
|
||||
run.getScript() = this.asExpr() and
|
||||
(
|
||||
exists(run.getInScopeEnvVarExpr(var)) or
|
||||
var = "GITHUB_HEAD_REF"
|
||||
) and
|
||||
run.getScript().getAnEnvReachingArgumentInjectionSink(var, command, argument)
|
||||
)
|
||||
}
|
||||
|
||||
override string getCommand() { result = command }
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if a Run step executes a command that returns untrusted data which flows to an unsafe argument
|
||||
* e.g.
|
||||
* run: |
|
||||
* BODY=$(git log --format=%s)
|
||||
* sed "s/FOO/$BODY/g" > /tmp/foo
|
||||
*/
|
||||
class ArgumentInjectionFromCommandSink extends ArgumentInjectionSink {
|
||||
string command;
|
||||
string argument;
|
||||
|
||||
ArgumentInjectionFromCommandSink() {
|
||||
exists(CommandSource source, Run run |
|
||||
run = source.getEnclosingRun() and
|
||||
this.asExpr() = run.getScript() and
|
||||
run.getScript().getACmdReachingArgumentInjectionSink(source.getCommand(), command, argument)
|
||||
)
|
||||
}
|
||||
|
||||
override string getCommand() { result = command }
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if a Run step declares an environment variable, uses it as the argument to a command vulnerable to argument injection.
|
||||
*/
|
||||
class ArgumentInjectionFromMaDSink extends ArgumentInjectionSink {
|
||||
ArgumentInjectionFromMaDSink() { madSink(this, "argument-injection") }
|
||||
|
||||
override string getCommand() { result = "unknown" }
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for unsafe user input
|
||||
* that is used to construct and evaluate a code script.
|
||||
*/
|
||||
private module ArgumentInjectionConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) {
|
||||
source instanceof RemoteFlowSource
|
||||
or
|
||||
exists(Run run |
|
||||
run.getScript() = source.asExpr() and
|
||||
run.getScript().getAnEnvReachingArgumentInjectionSink("GITHUB_HEAD_REF", _, _)
|
||||
)
|
||||
}
|
||||
|
||||
predicate isSink(DataFlow::Node sink) { sink instanceof ArgumentInjectionSink }
|
||||
|
||||
predicate isAdditionalFlowStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(Run run, string var |
|
||||
run.getInScopeEnvVarExpr(var) = pred.asExpr() and
|
||||
succ.asExpr() = run.getScript() and
|
||||
run.getScript().getAnEnvReachingArgumentInjectionSink(var, _, _)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe user input that is used to construct and evaluate a code script. */
|
||||
module ArgumentInjectionFlow = TaintTracking::Global<ArgumentInjectionConfig>;
|
||||
@@ -0,0 +1,322 @@
|
||||
import actions
|
||||
private import codeql.actions.TaintTracking
|
||||
import codeql.actions.DataFlow
|
||||
import codeql.actions.dataflow.FlowSources
|
||||
import codeql.actions.security.PoisonableSteps
|
||||
import codeql.actions.security.UntrustedCheckoutQuery
|
||||
|
||||
string unzipRegexp() { result = "(unzip|tar)\\s+.*" }
|
||||
|
||||
string unzipDirArgRegexp() { result = "(-d|-C)\\s+([^ ]+).*" }
|
||||
|
||||
abstract class UntrustedArtifactDownloadStep extends Step {
|
||||
abstract string getPath();
|
||||
}
|
||||
|
||||
class GitHubDownloadArtifactActionStep extends UntrustedArtifactDownloadStep, UsesStep {
|
||||
GitHubDownloadArtifactActionStep() {
|
||||
this.getCallee() = "actions/download-artifact" and
|
||||
(
|
||||
// By default, the permissions are scoped so they can only download Artifacts within the current workflow run.
|
||||
// To elevate permissions for this scenario, you can specify a github-token along with other repository and run identifiers
|
||||
this.getArgument("run-id").matches("%github.event.workflow_run.id%") and
|
||||
exists(this.getArgument("github-token"))
|
||||
or
|
||||
// There is an artifact upload step in the same workflow which can be influenced by an attacker on a checkout step
|
||||
exists(LocalJob job, SimplePRHeadCheckoutStep checkout, UsesStep upload |
|
||||
this.getEnclosingWorkflow().getAJob() = job and
|
||||
job.getAStep() = checkout and
|
||||
checkout.getATriggerEvent().getName() = "pull_request_target" and
|
||||
checkout.getAFollowingStep() = upload and
|
||||
upload.getCallee() = "actions/upload-artifact"
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
override string getPath() {
|
||||
if exists(this.getArgument("path"))
|
||||
then result = normalizePath(this.getArgument("path"))
|
||||
else result = "GITHUB_WORKSPACE/"
|
||||
}
|
||||
}
|
||||
|
||||
class DownloadArtifactActionStep extends UntrustedArtifactDownloadStep, UsesStep {
|
||||
DownloadArtifactActionStep() {
|
||||
this.getCallee() =
|
||||
[
|
||||
"dawidd6/action-download-artifact", "marcofaggian/action-download-multiple-artifacts",
|
||||
"benday-inc/download-latest-artifact", "blablacar/action-download-last-artifact",
|
||||
"levonet/action-download-last-artifact", "bettermarks/action-artifact-download",
|
||||
"aochmann/actions-download-artifact", "cytopia/download-artifact-retry-action",
|
||||
"alextompkins/download-prior-artifact", "nmerget/download-gzip-artifact",
|
||||
"benday-inc/download-artifact", "synergy-au/download-workflow-artifacts-action",
|
||||
"ishworkh/docker-image-artifact-download", "ishworkh/container-image-artifact-download",
|
||||
"sidx1024/action-download-artifact", "hyperskill/azblob-download-artifact",
|
||||
"ma-ve/action-download-artifact-with-retry"
|
||||
] and
|
||||
(
|
||||
not exists(this.getArgument(["branch", "branch_name"]))
|
||||
or
|
||||
exists(this.getArgument(["branch", "branch_name"])) and
|
||||
this.getArgument("allow_forks") = "true"
|
||||
) and
|
||||
(
|
||||
not exists(this.getArgument(["commit", "commitHash", "commit_sha"])) or
|
||||
not this.getArgument(["commit", "commitHash", "commit_sha"])
|
||||
.matches("%github.event.pull_request.head.sha%")
|
||||
) and
|
||||
(
|
||||
not exists(this.getArgument("event")) or
|
||||
not this.getArgument("event") = "pull_request"
|
||||
) and
|
||||
(
|
||||
not exists(this.getArgument(["run-id", "run_id", "workflow-run-id", "workflow_run_id"])) or
|
||||
this.getArgument(["run-id", "run_id", "workflow-run-id", "workflow_run_id"])
|
||||
.matches("%github.event.workflow_run.id%")
|
||||
) and
|
||||
(
|
||||
not exists(this.getArgument("pr")) or
|
||||
not this.getArgument("pr")
|
||||
.matches(["%github.event.pull_request.number%", "%github.event.number%"])
|
||||
)
|
||||
}
|
||||
|
||||
override string getPath() {
|
||||
if exists(this.getArgument(["path", "download_path"]))
|
||||
then result = normalizePath(this.getArgument(["path", "download_path"]))
|
||||
else
|
||||
if exists(this.getArgument("paths"))
|
||||
then result = normalizePath(this.getArgument("paths").splitAt(" "))
|
||||
else result = "GITHUB_WORKSPACE/"
|
||||
}
|
||||
}
|
||||
|
||||
class LegitLabsDownloadArtifactActionStep extends UntrustedArtifactDownloadStep, UsesStep {
|
||||
LegitLabsDownloadArtifactActionStep() {
|
||||
this.getCallee() = "Legit-Labs/action-download-artifact" and
|
||||
(
|
||||
not exists(this.getArgument("branch")) or
|
||||
not this.getArgument("branch") = ["main", "master"]
|
||||
) and
|
||||
(
|
||||
not exists(this.getArgument("commit")) or
|
||||
not this.getArgument("commit").matches("%github.event.pull_request.head.sha%")
|
||||
) and
|
||||
(
|
||||
not exists(this.getArgument("event")) or
|
||||
not this.getArgument("event") = "pull_request"
|
||||
) and
|
||||
(
|
||||
not exists(this.getArgument("run_id")) or
|
||||
not this.getArgument("run_id").matches("%github.event.workflow_run.id%")
|
||||
) and
|
||||
(
|
||||
not exists(this.getArgument("pr")) or
|
||||
not this.getArgument("pr").matches("%github.event.pull_request.number%")
|
||||
)
|
||||
}
|
||||
|
||||
override string getPath() {
|
||||
if exists(this.getArgument("path"))
|
||||
then result = normalizePath(this.getArgument("path"))
|
||||
else result = "GITHUB_WORKSPACE/artifacts"
|
||||
}
|
||||
}
|
||||
|
||||
class ActionsGitHubScriptDownloadStep extends UntrustedArtifactDownloadStep, UsesStep {
|
||||
string script;
|
||||
|
||||
ActionsGitHubScriptDownloadStep() {
|
||||
// eg:
|
||||
// - uses: actions/github-script@v6
|
||||
// with:
|
||||
// script: |
|
||||
// let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
// owner: context.repo.owner,
|
||||
// repo: context.repo.repo,
|
||||
// run_id: context.payload.workflow_run.id,
|
||||
// });
|
||||
// let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
|
||||
// return artifact.name == "<ARTEFACT_NAME>"
|
||||
// })[0];
|
||||
// let download = await github.rest.actions.downloadArtifact({
|
||||
// owner: context.repo.owner,
|
||||
// repo: context.repo.repo,
|
||||
// artifact_id: matchArtifact.id,
|
||||
// archive_format: 'zip',
|
||||
// });
|
||||
// var fs = require('fs');
|
||||
// fs.writeFileSync('${{github.workspace}}/test-results.zip', Buffer.from(download.data));
|
||||
this.getCallee() = "actions/github-script" and
|
||||
this.getArgument("script") = script and
|
||||
script.matches("%listWorkflowRunArtifacts(%") and
|
||||
script.matches("%downloadArtifact(%") and
|
||||
script.matches("%writeFileSync(%") and
|
||||
// Filter out artifacts that were created by pull-request.
|
||||
not script.matches("%exclude_pull_requests: true%")
|
||||
}
|
||||
|
||||
override string getPath() {
|
||||
if
|
||||
this.getAFollowingStep()
|
||||
.(Run)
|
||||
.getScript()
|
||||
.getACommand()
|
||||
.regexpMatch(unzipRegexp() + unzipDirArgRegexp())
|
||||
then
|
||||
result =
|
||||
normalizePath(trimQuotes(this.getAFollowingStep()
|
||||
.(Run)
|
||||
.getScript()
|
||||
.getACommand()
|
||||
.regexpCapture(unzipRegexp() + unzipDirArgRegexp(), 3)))
|
||||
else
|
||||
if this.getAFollowingStep().(Run).getScript().getACommand().regexpMatch(unzipRegexp())
|
||||
then result = "GITHUB_WORKSPACE/"
|
||||
else none()
|
||||
}
|
||||
}
|
||||
|
||||
class GHRunArtifactDownloadStep extends UntrustedArtifactDownloadStep, Run {
|
||||
GHRunArtifactDownloadStep() {
|
||||
// eg: - run: gh run download ${{ github.event.workflow_run.id }} --repo "${GITHUB_REPOSITORY}" --name "artifact_name"
|
||||
this.getScript().getACommand().regexpMatch(".*gh\\s+run\\s+download.*") and
|
||||
(
|
||||
this.getScript().getACommand().regexpMatch(unzipRegexp()) or
|
||||
this.getAFollowingStep().(Run).getScript().getACommand().regexpMatch(unzipRegexp())
|
||||
)
|
||||
}
|
||||
|
||||
override string getPath() {
|
||||
if
|
||||
this.getAFollowingStep()
|
||||
.(Run)
|
||||
.getScript()
|
||||
.getACommand()
|
||||
.regexpMatch(unzipRegexp() + unzipDirArgRegexp()) or
|
||||
this.getScript().getACommand().regexpMatch(unzipRegexp() + unzipDirArgRegexp())
|
||||
then
|
||||
result =
|
||||
normalizePath(trimQuotes(this.getScript()
|
||||
.getACommand()
|
||||
.regexpCapture(unzipRegexp() + unzipDirArgRegexp(), 3))) or
|
||||
result =
|
||||
normalizePath(trimQuotes(this.getAFollowingStep()
|
||||
.(Run)
|
||||
.getScript()
|
||||
.getACommand()
|
||||
.regexpCapture(unzipRegexp() + unzipDirArgRegexp(), 3)))
|
||||
else
|
||||
if
|
||||
this.getAFollowingStep().(Run).getScript().getACommand().regexpMatch(unzipRegexp()) or
|
||||
this.getScript().getACommand().regexpMatch(unzipRegexp())
|
||||
then result = "GITHUB_WORKSPACE/"
|
||||
else none()
|
||||
}
|
||||
}
|
||||
|
||||
class DirectArtifactDownloadStep extends UntrustedArtifactDownloadStep, Run {
|
||||
DirectArtifactDownloadStep() {
|
||||
// eg:
|
||||
// run: |
|
||||
// artifacts_url=${{ github.event.workflow_run.artifacts_url }}
|
||||
// gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact
|
||||
// do
|
||||
// IFS=$'\t' read name url <<< "$artifact"
|
||||
// gh api $url > "$name.zip"
|
||||
// unzip -d "$name" "$name.zip"
|
||||
// done
|
||||
this.getScript().getACommand().matches("%github.event.workflow_run.artifacts_url%") and
|
||||
(
|
||||
this.getScript().getACommand().regexpMatch(unzipRegexp()) or
|
||||
this.getAFollowingStep().(Run).getScript().getACommand().regexpMatch(unzipRegexp())
|
||||
)
|
||||
}
|
||||
|
||||
override string getPath() {
|
||||
if
|
||||
this.getScript().getACommand().regexpMatch(unzipRegexp() + unzipDirArgRegexp()) or
|
||||
this.getAFollowingStep()
|
||||
.(Run)
|
||||
.getScript()
|
||||
.getACommand()
|
||||
.regexpMatch(unzipRegexp() + unzipDirArgRegexp())
|
||||
then
|
||||
result =
|
||||
normalizePath(trimQuotes(this.getScript()
|
||||
.getACommand()
|
||||
.regexpCapture(unzipRegexp() + unzipDirArgRegexp(), 3))) or
|
||||
result =
|
||||
normalizePath(trimQuotes(this.getAFollowingStep()
|
||||
.(Run)
|
||||
.getScript()
|
||||
.getACommand()
|
||||
.regexpCapture(unzipRegexp() + unzipDirArgRegexp(), 3)))
|
||||
else result = "GITHUB_WORKSPACE/"
|
||||
}
|
||||
}
|
||||
|
||||
class ArtifactPoisoningSink extends DataFlow::Node {
|
||||
UntrustedArtifactDownloadStep download;
|
||||
PoisonableStep poisonable;
|
||||
|
||||
ArtifactPoisoningSink() {
|
||||
download.getAFollowingStep() = poisonable and
|
||||
// excluding artifacts downloaded to /tmp
|
||||
not download.getPath().regexpMatch("^/tmp.*") and
|
||||
(
|
||||
poisonable.(Run).getScript() = this.asExpr() and
|
||||
(
|
||||
// Check if the poisonable step is a local script execution step
|
||||
// and the path of the command or script matches the path of the downloaded artifact
|
||||
isSubpath(poisonable.(LocalScriptExecutionRunStep).getPath(), download.getPath())
|
||||
or
|
||||
// Checking the path for non local script execution steps is very difficult
|
||||
not poisonable instanceof LocalScriptExecutionRunStep
|
||||
// Its not easy to extract the path from a non-local script execution step so skipping this check for now
|
||||
// and isSubpath(poisonable.(Run).getWorkingDirectory(), download.getPath())
|
||||
)
|
||||
or
|
||||
poisonable.(UsesStep) = this.asExpr() and
|
||||
(
|
||||
not poisonable instanceof LocalActionUsesStep and
|
||||
download.getPath() = "GITHUB_WORKSPACE/"
|
||||
or
|
||||
isSubpath(poisonable.(LocalActionUsesStep).getPath(), download.getPath())
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
string getPath() { result = download.getPath() }
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for unsafe artifacts
|
||||
* that is used may lead to artifact poisoning
|
||||
*/
|
||||
private module ArtifactPoisoningConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) { source instanceof ArtifactSource }
|
||||
|
||||
predicate isSink(DataFlow::Node sink) { sink instanceof ArtifactPoisoningSink }
|
||||
|
||||
predicate isAdditionalFlowStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(PoisonableStep step |
|
||||
pred instanceof ArtifactSource and
|
||||
pred.asExpr().(Step).getAFollowingStep() = step and
|
||||
(
|
||||
succ.asExpr() = step.(Run).getScript() or
|
||||
succ.asExpr() = step.(UsesStep)
|
||||
)
|
||||
)
|
||||
or
|
||||
exists(Run run |
|
||||
pred instanceof ArtifactSource and
|
||||
pred.asExpr().(Step).getAFollowingStep() = run and
|
||||
succ.asExpr() = run.getScript() and
|
||||
exists(run.getScript().getAFileReadCommand())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe artifacts that is used in an insecure way. */
|
||||
module ArtifactPoisoningFlow = TaintTracking::Global<ArtifactPoisoningConfig>;
|
||||
@@ -0,0 +1,72 @@
|
||||
import actions
|
||||
|
||||
string defaultBranchTriggerEvent() {
|
||||
result =
|
||||
[
|
||||
"check_run", "check_suite", "delete", "discussion", "discussion_comment", "fork", "gollum",
|
||||
"issue_comment", "issues", "label", "milestone", "project", "project_card", "project_column",
|
||||
"public", "pull_request_comment", "pull_request_target", "repository_dispatch", "schedule",
|
||||
"watch", "workflow_run"
|
||||
]
|
||||
}
|
||||
|
||||
predicate runsOnDefaultBranch(Event e) {
|
||||
(
|
||||
e.getName() = defaultBranchTriggerEvent() and
|
||||
not e.getName() = "pull_request_target"
|
||||
or
|
||||
e.getName() = "push" and
|
||||
e.getAPropertyValue("branches") = defaultBranchNames()
|
||||
or
|
||||
e.getName() = "pull_request_target" and
|
||||
(
|
||||
// no filtering
|
||||
not e.hasProperty("branches") and not e.hasProperty("branches-ignore")
|
||||
or
|
||||
// only branches-ignore filter
|
||||
e.hasProperty("branches-ignore") and
|
||||
not e.hasProperty("branches") and
|
||||
not e.getAPropertyValue("branches-ignore") = defaultBranchNames()
|
||||
or
|
||||
// only branches filter
|
||||
e.hasProperty("branches") and
|
||||
not e.hasProperty("branches-ignore") and
|
||||
e.getAPropertyValue("branches") = defaultBranchNames()
|
||||
or
|
||||
// branches and branches-ignore filters
|
||||
e.hasProperty("branches") and
|
||||
e.hasProperty("branches-ignore") and
|
||||
e.getAPropertyValue("branches") = defaultBranchNames() and
|
||||
not e.getAPropertyValue("branches-ignore") = defaultBranchNames()
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
abstract class CacheWritingStep extends Step {
|
||||
abstract string getPath();
|
||||
}
|
||||
|
||||
class CacheActionUsesStep extends CacheWritingStep, UsesStep {
|
||||
CacheActionUsesStep() { this.getCallee() = "actions/cache" }
|
||||
|
||||
override string getPath() {
|
||||
result = normalizePath(this.(UsesStep).getArgument("path").splitAt("\n"))
|
||||
}
|
||||
}
|
||||
|
||||
class CacheActionSaveUsesStep extends CacheWritingStep, UsesStep {
|
||||
CacheActionSaveUsesStep() { this.getCallee() = "actions/cache/save" }
|
||||
|
||||
override string getPath() {
|
||||
result = normalizePath(this.(UsesStep).getArgument("path").splitAt("\n"))
|
||||
}
|
||||
}
|
||||
|
||||
class SetupRubyUsesStep extends CacheWritingStep, UsesStep {
|
||||
SetupRubyUsesStep() {
|
||||
this.getCallee() = ["actions/setup-ruby", "ruby/setup-ruby"] and
|
||||
this.getArgument("bundler-cache") = "true"
|
||||
}
|
||||
|
||||
override string getPath() { result = normalizePath("vendor/bundle") }
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
private import actions
|
||||
private import codeql.actions.TaintTracking
|
||||
private import codeql.actions.dataflow.ExternalFlow
|
||||
import codeql.actions.dataflow.FlowSources
|
||||
import codeql.actions.DataFlow
|
||||
|
||||
class CodeInjectionSink extends DataFlow::Node {
|
||||
CodeInjectionSink() {
|
||||
exists(Run e | e.getAnScriptExpr() = this.asExpr()) or
|
||||
madSink(this, "code-injection")
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for unsafe user input
|
||||
* that is used to construct and evaluate a code script.
|
||||
*/
|
||||
private module CodeInjectionConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
|
||||
|
||||
predicate isSink(DataFlow::Node sink) { sink instanceof CodeInjectionSink }
|
||||
|
||||
predicate isAdditionalFlowStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(Uses step |
|
||||
pred instanceof FileSource and
|
||||
pred.asExpr().(Step).getAFollowingStep() = step and
|
||||
succ.asExpr() = step and
|
||||
madSink(succ, "code-injection")
|
||||
)
|
||||
or
|
||||
exists(Run run |
|
||||
pred instanceof FileSource and
|
||||
pred.asExpr().(Step).getAFollowingStep() = run and
|
||||
succ.asExpr() = run.getScript() and
|
||||
exists(run.getScript().getAFileReadCommand())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe user input that is used to construct and evaluate a code script. */
|
||||
module CodeInjectionFlow = TaintTracking::Global<CodeInjectionConfig>;
|
||||
@@ -0,0 +1,22 @@
|
||||
private import actions
|
||||
private import codeql.actions.TaintTracking
|
||||
private import codeql.actions.dataflow.ExternalFlow
|
||||
import codeql.actions.dataflow.FlowSources
|
||||
import codeql.actions.DataFlow
|
||||
|
||||
private class CommandInjectionSink extends DataFlow::Node {
|
||||
CommandInjectionSink() { madSink(this, "command-injection") }
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for unsafe user input
|
||||
* that is used to construct and evaluate a system command.
|
||||
*/
|
||||
private module CommandInjectionConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
|
||||
|
||||
predicate isSink(DataFlow::Node sink) { sink instanceof CommandInjectionSink }
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe user input that is used to construct and evaluate a system command. */
|
||||
module CommandInjectionFlow = TaintTracking::Global<CommandInjectionConfig>;
|
||||
312
actions/ql/lib/codeql/actions/security/ControlChecks.qll
Normal file
312
actions/ql/lib/codeql/actions/security/ControlChecks.qll
Normal file
@@ -0,0 +1,312 @@
|
||||
import actions
|
||||
|
||||
string any_category() {
|
||||
result =
|
||||
[
|
||||
"untrusted-checkout", "output-clobbering", "envpath-injection", "envvar-injection",
|
||||
"command-injection", "argument-injection", "code-injection", "cache-poisoning",
|
||||
"untrusted-checkout-toctou", "artifact-poisoning", "artifact-poisoning-toctou"
|
||||
]
|
||||
}
|
||||
|
||||
string non_toctou_category() {
|
||||
result = any_category() and not result = "untrusted-checkout-toctou"
|
||||
}
|
||||
|
||||
string toctou_category() { result = ["untrusted-checkout-toctou", "artifact-poisoning-toctou"] }
|
||||
|
||||
string any_event() { result = actor_not_attacker_event() or result = actor_is_attacker_event() }
|
||||
|
||||
string actor_is_attacker_event() {
|
||||
result =
|
||||
[
|
||||
// actor and attacker have to be the same
|
||||
"pull_request_target",
|
||||
"workflow_run",
|
||||
"discussion_comment",
|
||||
"discussion",
|
||||
"issues",
|
||||
"fork",
|
||||
"watch"
|
||||
]
|
||||
}
|
||||
|
||||
string actor_not_attacker_event() {
|
||||
result =
|
||||
[
|
||||
// actor and attacker can be different
|
||||
// actor may be a collaborator, but the attacker is may be the author of the PR that gets commented
|
||||
// therefore it may be vulnerable to TOCTOU races where the actor reviews one thing and the attacker changes it
|
||||
"issue_comment",
|
||||
"pull_request_comment",
|
||||
]
|
||||
}
|
||||
|
||||
/** An If node that contains an actor, user or label check */
|
||||
abstract class ControlCheck extends AstNode {
|
||||
ControlCheck() {
|
||||
this instanceof If or
|
||||
this instanceof Environment or
|
||||
this instanceof UsesStep or
|
||||
this instanceof Run
|
||||
}
|
||||
|
||||
predicate protects(AstNode node, Event event, string category) {
|
||||
// The check dominates the step it should protect
|
||||
this.dominates(node) and
|
||||
// The check is effective against the event and category
|
||||
this.protectsCategoryAndEvent(category, event.getName()) and
|
||||
// The check can be triggered by the event
|
||||
this.getATriggerEvent() = event
|
||||
}
|
||||
|
||||
predicate dominates(AstNode node) {
|
||||
this instanceof If and
|
||||
(
|
||||
node.getEnclosingStep().getIf() = this or
|
||||
node.getEnclosingJob().getIf() = this or
|
||||
node.getEnclosingJob().getANeededJob().(LocalJob).getAStep().getIf() = this or
|
||||
node.getEnclosingJob().getANeededJob().(LocalJob).getIf() = this
|
||||
)
|
||||
or
|
||||
this instanceof Environment and
|
||||
(
|
||||
node.getEnclosingJob().getEnvironment() = this
|
||||
or
|
||||
node.getEnclosingJob().getANeededJob().getEnvironment() = this
|
||||
)
|
||||
or
|
||||
(
|
||||
this instanceof Run or
|
||||
this instanceof UsesStep
|
||||
) and
|
||||
(
|
||||
this.(Step).getAFollowingStep() = node.getEnclosingStep()
|
||||
or
|
||||
node.getEnclosingJob().getANeededJob().(LocalJob).getAStep() = this.(Step)
|
||||
)
|
||||
}
|
||||
|
||||
abstract predicate protectsCategoryAndEvent(string category, string event);
|
||||
}
|
||||
|
||||
abstract class AssociationCheck extends ControlCheck {
|
||||
// Checks if the actor is a MEMBER/OWNER the repo
|
||||
// - they are effective against pull requests and workflow_run (since these are triggered by pull_requests) since they can control who is making the PR
|
||||
// - they are not effective against issue_comment since the author of the comment may not be the same as the author of the PR
|
||||
override predicate protectsCategoryAndEvent(string category, string event) {
|
||||
event = actor_is_attacker_event() and category = any_category()
|
||||
or
|
||||
event = actor_not_attacker_event() and category = non_toctou_category()
|
||||
}
|
||||
}
|
||||
|
||||
abstract class ActorCheck extends ControlCheck {
|
||||
// checks for a specific actor
|
||||
// - they are effective against pull requests and workflow_run (since these are triggered by pull_requests) since they can control who is making the PR
|
||||
// - they are not effective against issue_comment since the author of the comment may not be the same as the author of the PR
|
||||
override predicate protectsCategoryAndEvent(string category, string event) {
|
||||
event = actor_is_attacker_event() and category = any_category()
|
||||
or
|
||||
event = actor_not_attacker_event() and category = non_toctou_category()
|
||||
}
|
||||
}
|
||||
|
||||
abstract class RepositoryCheck extends ControlCheck {
|
||||
// checks that the origin of the code is the same as the repository.
|
||||
// for pull_requests, that means that it triggers only on local branches or repos from the same org
|
||||
// - they are effective against pull requests/workflow_run since they can control where the code is coming from
|
||||
// - they are not effective against issue_comment since the repository will always be the same
|
||||
}
|
||||
|
||||
abstract class PermissionCheck extends ControlCheck {
|
||||
// checks that the actor has a specific permission level
|
||||
// - they are effective against pull requests/workflow_run since they can control who can make changes
|
||||
// - they are not effective against issue_comment since the author of the comment may not be the same as the author of the PR
|
||||
override predicate protectsCategoryAndEvent(string category, string event) {
|
||||
event = actor_is_attacker_event() and category = any_category()
|
||||
or
|
||||
event = actor_not_attacker_event() and category = non_toctou_category()
|
||||
}
|
||||
}
|
||||
|
||||
abstract class LabelCheck extends ControlCheck {
|
||||
// checks if the issue/pull_request is labeled, which implies that it could have been approved
|
||||
// - they dont protect against mutation attacks
|
||||
override predicate protectsCategoryAndEvent(string category, string event) {
|
||||
event = actor_is_attacker_event() and category = any_category()
|
||||
or
|
||||
event = actor_not_attacker_event() and category = non_toctou_category()
|
||||
}
|
||||
}
|
||||
|
||||
class EnvironmentCheck extends ControlCheck instanceof Environment {
|
||||
// Environment checks are not effective against any mutable attacks
|
||||
// they do actually protect against untrusted code execution (sha)
|
||||
override predicate protectsCategoryAndEvent(string category, string event) {
|
||||
event = actor_is_attacker_event() and category = any_category()
|
||||
or
|
||||
event = actor_not_attacker_event() and category = non_toctou_category()
|
||||
}
|
||||
}
|
||||
|
||||
abstract class CommentVsHeadDateCheck extends ControlCheck {
|
||||
override predicate protectsCategoryAndEvent(string category, string event) {
|
||||
// by itself, this check is not effective against any attacks
|
||||
event = actor_not_attacker_event() and category = toctou_category()
|
||||
}
|
||||
}
|
||||
|
||||
/* Specific implementations of control checks */
|
||||
class LabelIfCheck extends LabelCheck instanceof If {
|
||||
string condition;
|
||||
|
||||
LabelIfCheck() {
|
||||
condition = normalizeExpr(this.getCondition()) and
|
||||
(
|
||||
// eg: contains(github.event.pull_request.labels.*.name, 'safe to test')
|
||||
condition.regexpMatch(".*(^|[^!])contains\\(\\s*github\\.event\\.pull_request\\.labels\\b.*")
|
||||
or
|
||||
// eg: github.event.label.name == 'safe to test'
|
||||
condition.regexpMatch(".*\\bgithub\\.event\\.label\\.name\\s*==.*")
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
class ActorIfCheck extends ActorCheck instanceof If {
|
||||
ActorIfCheck() {
|
||||
// eg: github.event.pull_request.user.login == 'admin'
|
||||
exists(
|
||||
normalizeExpr(this.getCondition())
|
||||
.regexpFind([
|
||||
"\\bgithub\\.event\\.pull_request\\.user\\.login\\b",
|
||||
"\\bgithub\\.event\\.head_commit\\.author\\.name\\b",
|
||||
"\\bgithub\\.event\\.commits.*\\.author\\.name\\b",
|
||||
"\\bgithub\\.event\\.sender\\.login\\b"
|
||||
], _, _)
|
||||
)
|
||||
or
|
||||
// eg: github.actor == 'admin'
|
||||
// eg: github.triggering_actor == 'admin'
|
||||
exists(
|
||||
normalizeExpr(this.getCondition())
|
||||
.regexpFind(["\\bgithub\\.actor\\b", "\\bgithub\\.triggering_actor\\b",], _, _)
|
||||
) and
|
||||
not normalizeExpr(this.getCondition()).matches("%[bot]%")
|
||||
}
|
||||
}
|
||||
|
||||
class PullRequestTargetRepositoryIfCheck extends RepositoryCheck instanceof If {
|
||||
PullRequestTargetRepositoryIfCheck() {
|
||||
// eg: github.event.pull_request.head.repo.full_name == github.repository
|
||||
exists(
|
||||
normalizeExpr(this.getCondition())
|
||||
// github.repository in a workflow_run event triggered by a pull request is the base repository
|
||||
.regexpFind([
|
||||
"\\bgithub\\.repository\\b", "\\bgithub\\.repository_owner\\b",
|
||||
"\\bgithub\\.event\\.pull_request\\.head\\.repo\\.full_name\\b",
|
||||
"\\bgithub\\.event\\.pull_request\\.head\\.repo\\.owner\\.name\\b",
|
||||
"\\bgithub\\.event\\.workflow_run\\.head_repository\\.full_name\\b",
|
||||
"\\bgithub\\.event\\.workflow_run\\.head_repository\\.owner\\.name\\b"
|
||||
], _, _)
|
||||
)
|
||||
}
|
||||
|
||||
override predicate protectsCategoryAndEvent(string category, string event) {
|
||||
event = "pull_request_target" and category = any_category()
|
||||
}
|
||||
}
|
||||
|
||||
class WorkflowRunRepositoryIfCheck extends RepositoryCheck instanceof If {
|
||||
WorkflowRunRepositoryIfCheck() {
|
||||
// eg: github.event.workflow_run.head_repository.full_name == github.repository
|
||||
exists(
|
||||
normalizeExpr(this.getCondition())
|
||||
// github.repository in a workflow_run event triggered by a pull request is the base repository
|
||||
.regexpFind([
|
||||
"\\bgithub\\.event\\.workflow_run\\.head_repository\\.full_name\\b",
|
||||
"\\bgithub\\.event\\.workflow_run\\.head_repository\\.owner\\.name\\b"
|
||||
], _, _)
|
||||
)
|
||||
}
|
||||
|
||||
override predicate protectsCategoryAndEvent(string category, string event) {
|
||||
event = "workflow_run" and category = any_category()
|
||||
}
|
||||
}
|
||||
|
||||
class AssociationIfCheck extends AssociationCheck instanceof If {
|
||||
AssociationIfCheck() {
|
||||
// eg: contains(fromJson('["MEMBER", "OWNER"]'), github.event.comment.author_association)
|
||||
normalizeExpr(this.getCondition())
|
||||
.splitAt("\n")
|
||||
.regexpMatch([
|
||||
".*\\bgithub\\.event\\.comment\\.author_association\\b.*",
|
||||
".*\\bgithub\\.event\\.issue\\.author_association\\b.*",
|
||||
".*\\bgithub\\.event\\.pull_request\\.author_association\\b.*",
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
class AssociationActionCheck extends AssociationCheck instanceof UsesStep {
|
||||
AssociationActionCheck() {
|
||||
this.getCallee() = "TheModdingInquisition/actions-team-membership" and
|
||||
(
|
||||
not exists(this.getArgument("exit"))
|
||||
or
|
||||
this.getArgument("exit") = "true"
|
||||
)
|
||||
or
|
||||
this.getCallee() = "actions/github-script" and
|
||||
this.getArgument("script").splitAt("\n").matches("%getMembershipForUserInOrg%")
|
||||
or
|
||||
this.getCallee() = "octokit/request-action" and
|
||||
this.getArgument("route").regexpMatch("GET.*(memberships).*")
|
||||
}
|
||||
}
|
||||
|
||||
class PermissionActionCheck extends PermissionCheck instanceof UsesStep {
|
||||
PermissionActionCheck() {
|
||||
this.getCallee() = "actions-cool/check-user-permission" and
|
||||
(
|
||||
// default permission level is write
|
||||
not exists(this.getArgument("permission-level")) or
|
||||
this.getArgument("require") = ["write", "admin"]
|
||||
)
|
||||
or
|
||||
this.getCallee() = "sushichop/action-repository-permission" and
|
||||
this.getArgument("required-permission") = ["write", "admin"]
|
||||
or
|
||||
this.getCallee() = "prince-chrismc/check-actor-permissions-action" and
|
||||
this.getArgument("permission") = ["write", "admin"]
|
||||
or
|
||||
this.getCallee() = "lannonbr/repo-permission-check-action" and
|
||||
this.getArgument("permission") = ["write", "admin"]
|
||||
or
|
||||
this.getCallee() = "xt0rted/slash-command-action" and
|
||||
(
|
||||
// default permission level is write
|
||||
not exists(this.getArgument("permission-level")) or
|
||||
this.getArgument("permission-level") = ["write", "admin"]
|
||||
)
|
||||
or
|
||||
this.getCallee() = "actions/github-script" and
|
||||
this.getArgument("script").splitAt("\n").matches("%getCollaboratorPermissionLevel%")
|
||||
or
|
||||
this.getCallee() = "octokit/request-action" and
|
||||
this.getArgument("route").regexpMatch("GET.*(collaborators|permission).*")
|
||||
}
|
||||
}
|
||||
|
||||
class BashCommentVsHeadDateCheck extends CommentVsHeadDateCheck, Run {
|
||||
BashCommentVsHeadDateCheck() {
|
||||
// eg: if [[ $(date -d "$pushed_at" +%s) -gt $(date -d "$COMMENT_AT" +%s) ]]; then
|
||||
exists(string cmd1, string cmd2 |
|
||||
cmd1 = this.getScript().getACommand() and
|
||||
cmd2 = this.getScript().getACommand() and
|
||||
not cmd1 = cmd2 and
|
||||
cmd1.toLowerCase().regexpMatch("date\\s+-d.*(commit|pushed|comment|commented)_at.*") and
|
||||
cmd2.toLowerCase().regexpMatch("date\\s+-d.*(commit|pushed|comment|commented)_at.*")
|
||||
)
|
||||
}
|
||||
}
|
||||
114
actions/ql/lib/codeql/actions/security/EnvPathInjectionQuery.qll
Normal file
114
actions/ql/lib/codeql/actions/security/EnvPathInjectionQuery.qll
Normal file
@@ -0,0 +1,114 @@
|
||||
private import actions
|
||||
private import codeql.actions.TaintTracking
|
||||
private import codeql.actions.dataflow.ExternalFlow
|
||||
private import codeql.actions.security.ArtifactPoisoningQuery
|
||||
private import codeql.actions.security.UntrustedCheckoutQuery
|
||||
|
||||
abstract class EnvPathInjectionSink extends DataFlow::Node { }
|
||||
|
||||
/**
|
||||
* Holds if a Run step declares a PATH environment variable with contents from a local file.
|
||||
*/
|
||||
class EnvPathInjectionFromFileReadSink extends EnvPathInjectionSink {
|
||||
EnvPathInjectionFromFileReadSink() {
|
||||
exists(Run run, Step step |
|
||||
(
|
||||
step instanceof UntrustedArtifactDownloadStep or
|
||||
step instanceof PRHeadCheckoutStep
|
||||
) and
|
||||
this.asExpr() = run.getScript() and
|
||||
step.getAFollowingStep() = run and
|
||||
(
|
||||
// echo "$(cat foo.txt)" >> $GITHUB_PATH
|
||||
// FOO=$(cat foo.txt)
|
||||
// echo "$FOO" >> $GITHUB_PATH
|
||||
exists(string cmd |
|
||||
run.getScript().getAFileReadCommand() = cmd and
|
||||
run.getScript().getACmdReachingGitHubPathWrite(cmd)
|
||||
)
|
||||
or
|
||||
// cat foo.txt >> $GITHUB_PATH
|
||||
run.getScript().fileToGitHubPath(_)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if a Run step executes a command that returns untrusted data which flows to GITHUB_ENV
|
||||
* e.g.
|
||||
* run: |
|
||||
* COMMIT_MESSAGE=$(git log --format=%s)
|
||||
* echo "${COMMIT_MESSAGE}" >> $GITHUB_PATH
|
||||
*/
|
||||
class EnvPathInjectionFromCommandSink extends EnvPathInjectionSink {
|
||||
EnvPathInjectionFromCommandSink() {
|
||||
exists(CommandSource source |
|
||||
this.asExpr() = source.getEnclosingRun().getScript() and
|
||||
source.getEnclosingRun().getScript().getACmdReachingGitHubPathWrite(source.getCommand())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if a Run step declares an environment variable, uses it to declare a PATH env var.
|
||||
* e.g.
|
||||
* env:
|
||||
* BODY: ${{ github.event.comment.body }}
|
||||
* run: |
|
||||
* echo "$BODY" >> $GITHUB_PATH
|
||||
*/
|
||||
class EnvPathInjectionFromEnvVarSink extends EnvPathInjectionSink {
|
||||
EnvPathInjectionFromEnvVarSink() {
|
||||
exists(Run run, string var_name |
|
||||
run.getScript().getAnEnvReachingGitHubPathWrite(var_name) and
|
||||
exists(run.getInScopeEnvVarExpr(var_name)) and
|
||||
run.getScript() = this.asExpr()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
class EnvPathInjectionFromMaDSink extends EnvPathInjectionSink {
|
||||
EnvPathInjectionFromMaDSink() { madSink(this, "envpath-injection") }
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for unsafe user input
|
||||
* that is used to construct and evaluate an environment variable.
|
||||
*/
|
||||
private module EnvPathInjectionConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
|
||||
|
||||
predicate isSink(DataFlow::Node sink) { sink instanceof EnvPathInjectionSink }
|
||||
|
||||
predicate isAdditionalFlowStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(Run run, string var |
|
||||
run.getInScopeEnvVarExpr(var) = pred.asExpr() and
|
||||
succ.asExpr() = run.getScript() and
|
||||
(
|
||||
run.getScript().getAnEnvReachingGitHubEnvWrite(var, _)
|
||||
or
|
||||
run.getScript().getAnEnvReachingGitHubOutputWrite(var, _)
|
||||
or
|
||||
run.getScript().getAnEnvReachingGitHubPathWrite(var)
|
||||
)
|
||||
)
|
||||
or
|
||||
exists(Uses step |
|
||||
pred instanceof FileSource and
|
||||
pred.asExpr().(Step).getAFollowingStep() = step and
|
||||
succ.asExpr() = step and
|
||||
madSink(succ, "envpath-injection")
|
||||
)
|
||||
or
|
||||
exists(Run run |
|
||||
pred instanceof FileSource and
|
||||
pred.asExpr().(Step).getAFollowingStep() = run and
|
||||
succ.asExpr() = run.getScript() and
|
||||
exists(run.getScript().getAFileReadCommand())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe user input that is used to construct and evaluate the PATH environment variable. */
|
||||
module EnvPathInjectionFlow = TaintTracking::Global<EnvPathInjectionConfig>;
|
||||
169
actions/ql/lib/codeql/actions/security/EnvVarInjectionQuery.qll
Normal file
169
actions/ql/lib/codeql/actions/security/EnvVarInjectionQuery.qll
Normal file
@@ -0,0 +1,169 @@
|
||||
private import actions
|
||||
private import codeql.actions.TaintTracking
|
||||
private import codeql.actions.dataflow.ExternalFlow
|
||||
private import codeql.actions.security.ArtifactPoisoningQuery
|
||||
private import codeql.actions.security.UntrustedCheckoutQuery
|
||||
|
||||
abstract class EnvVarInjectionSink extends DataFlow::Node { }
|
||||
|
||||
string sanitizerCommand() {
|
||||
result =
|
||||
[
|
||||
"tr\\s+(-d\\s*)?('|\")?.n('|\")?", // tr -d '\n' ' ', tr '\n' ' '
|
||||
"tr\\s+-cd\\s+.*:al(pha|num):", // tr -cd '[:alpha:_]'
|
||||
"(head|tail)\\s+-n\\s+1" // head -n 1, tail -n 1
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if a Run step declares an environment variable with contents from a local file.
|
||||
*/
|
||||
class EnvVarInjectionFromFileReadSink extends EnvVarInjectionSink {
|
||||
EnvVarInjectionFromFileReadSink() {
|
||||
exists(Run run, Step step |
|
||||
(
|
||||
step instanceof UntrustedArtifactDownloadStep or
|
||||
step instanceof PRHeadCheckoutStep
|
||||
) and
|
||||
this.asExpr() = run.getScript() and
|
||||
step.getAFollowingStep() = run and
|
||||
(
|
||||
// eg:
|
||||
// echo "SHA=$(cat test-results/sha-number)" >> $GITHUB_ENV
|
||||
// echo "SHA=$(<test-results/sha-number)" >> $GITHUB_ENV
|
||||
// FOO=$(cat test-results/sha-number)
|
||||
// echo "FOO=$FOO" >> $GITHUB_ENV
|
||||
exists(string cmd, string var, string sanitizer |
|
||||
run.getScript().getAFileReadCommand() = cmd and
|
||||
run.getScript().getACmdReachingGitHubEnvWrite(cmd, var) and
|
||||
run.getScript().getACmdReachingGitHubEnvWrite(sanitizer, var) and
|
||||
not exists(sanitizer.regexpFind(sanitizerCommand(), _, _))
|
||||
)
|
||||
or
|
||||
// eg: cat test-results/.env >> $GITHUB_ENV
|
||||
run.getScript().fileToGitHubEnv(_)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if a Run step executes a command that returns untrusted data which flows to GITHUB_ENV
|
||||
* e.g.
|
||||
* run: |
|
||||
* COMMIT_MESSAGE=$(git log --format=%s)
|
||||
* echo "COMMIT_MESSAGE=${COMMIT_MESSAGE}" >> $GITHUB_ENV
|
||||
*/
|
||||
class EnvVarInjectionFromCommandSink extends EnvVarInjectionSink {
|
||||
CommandSource inCommand;
|
||||
string injectedVar;
|
||||
string command;
|
||||
|
||||
EnvVarInjectionFromCommandSink() {
|
||||
exists(Run run |
|
||||
this.asExpr() = inCommand.getEnclosingRun().getScript() and
|
||||
run = inCommand.getEnclosingRun() and
|
||||
run.getScript().getACmdReachingGitHubEnvWrite(inCommand.getCommand(), injectedVar) and
|
||||
(
|
||||
// the source flows to the injected variable without any command in between
|
||||
not run.getScript().getACmdReachingGitHubEnvWrite(_, injectedVar) and
|
||||
command = ""
|
||||
or
|
||||
// the source flows to the injected variable with a command in between
|
||||
run.getScript().getACmdReachingGitHubEnvWrite(command, injectedVar) and
|
||||
not command.regexpMatch(".*" + sanitizerCommand() + ".*")
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if a Run step declares an environment variable, uses it to declare env var.
|
||||
* e.g.
|
||||
* env:
|
||||
* BODY: ${{ github.event.comment.body }}
|
||||
* run: |
|
||||
* echo "FOO=$BODY" >> $GITHUB_ENV
|
||||
*/
|
||||
class EnvVarInjectionFromEnvVarSink extends EnvVarInjectionSink {
|
||||
string inVar;
|
||||
string injectedVar;
|
||||
string command;
|
||||
|
||||
EnvVarInjectionFromEnvVarSink() {
|
||||
exists(Run run |
|
||||
run.getScript() = this.asExpr() and
|
||||
exists(run.getInScopeEnvVarExpr(inVar)) and
|
||||
run.getScript().getAnEnvReachingGitHubEnvWrite(inVar, injectedVar) and
|
||||
(
|
||||
// the source flows to the injected variable without any command in between
|
||||
not run.getScript().getACmdReachingGitHubEnvWrite(_, injectedVar) and
|
||||
command = ""
|
||||
or
|
||||
// the source flows to the injected variable with a command in between
|
||||
run.getScript().getACmdReachingGitHubEnvWrite(_, injectedVar) and
|
||||
run.getScript().getACmdReachingGitHubEnvWrite(command, injectedVar) and
|
||||
not command.regexpMatch(".*" + sanitizerCommand() + ".*")
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if a 3rd party action declares an environment variable with contents from an untrusted file.
|
||||
* e.g.
|
||||
*- name: Load .env file
|
||||
* uses: aarcangeli/load-dotenv@v1.0.0
|
||||
* with:
|
||||
* path: 'backend/new'
|
||||
* filenames: |
|
||||
* .env
|
||||
* .env.test
|
||||
* quiet: false
|
||||
* if-file-not-found: error
|
||||
*/
|
||||
class EnvVarInjectionFromMaDSink extends EnvVarInjectionSink {
|
||||
EnvVarInjectionFromMaDSink() { madSink(this, "envvar-injection") }
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for unsafe user input
|
||||
* that is used to construct and evaluate an environment variable.
|
||||
*/
|
||||
private module EnvVarInjectionConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) {
|
||||
source instanceof RemoteFlowSource and
|
||||
not source.(RemoteFlowSource).getSourceType() = ["branch", "username"]
|
||||
}
|
||||
|
||||
predicate isSink(DataFlow::Node sink) { sink instanceof EnvVarInjectionSink }
|
||||
|
||||
predicate isAdditionalFlowStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(Run run, string var |
|
||||
run.getInScopeEnvVarExpr(var) = pred.asExpr() and
|
||||
succ.asExpr() = run.getScript() and
|
||||
(
|
||||
run.getScript().getAnEnvReachingGitHubEnvWrite(var, _)
|
||||
or
|
||||
run.getScript().getAnEnvReachingGitHubOutputWrite(var, _)
|
||||
)
|
||||
)
|
||||
or
|
||||
exists(Uses step |
|
||||
pred instanceof FileSource and
|
||||
pred.asExpr().(Step).getAFollowingStep() = step and
|
||||
succ.asExpr() = step and
|
||||
madSink(succ, "envvar-injection")
|
||||
)
|
||||
or
|
||||
exists(Run run |
|
||||
pred instanceof FileSource and
|
||||
pred.asExpr().(Step).getAFollowingStep() = run and
|
||||
succ.asExpr() = run.getScript() and
|
||||
exists(run.getScript().getAFileReadCommand())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe user input that is used to construct and evaluate an environment variable. */
|
||||
module EnvVarInjectionFlow = TaintTracking::Global<EnvVarInjectionConfig>;
|
||||
220
actions/ql/lib/codeql/actions/security/OutputClobberingQuery.qll
Normal file
220
actions/ql/lib/codeql/actions/security/OutputClobberingQuery.qll
Normal file
@@ -0,0 +1,220 @@
|
||||
private import actions
|
||||
private import codeql.actions.TaintTracking
|
||||
private import codeql.actions.dataflow.ExternalFlow
|
||||
private import codeql.actions.security.ArtifactPoisoningQuery
|
||||
private import codeql.actions.security.UntrustedCheckoutQuery
|
||||
|
||||
abstract class OutputClobberingSink extends DataFlow::Node { }
|
||||
|
||||
/**
|
||||
* Holds if a Run step declares a step output variable with contents from a local file.
|
||||
* e.g.
|
||||
* run: |
|
||||
* cat test-results/.vars >> $GITHUB_OUTPUT
|
||||
* echo "sha=$(cat test-results/sha-number)" >> $GITHUB_OUTPUT
|
||||
* echo "sha=$(<test-results/sha-number)" >> $GITHUB_OUTPUT
|
||||
*/
|
||||
class OutputClobberingFromFileReadSink extends OutputClobberingSink {
|
||||
OutputClobberingFromFileReadSink() {
|
||||
exists(Run run, Step step, string field1, string field2 |
|
||||
(
|
||||
step instanceof UntrustedArtifactDownloadStep
|
||||
or
|
||||
step instanceof SimplePRHeadCheckoutStep
|
||||
) and
|
||||
step.getAFollowingStep() = run and
|
||||
this.asExpr() = run.getScript() and
|
||||
// A write to GITHUB_OUTPUT that is not attacker-controlled
|
||||
exists(string str |
|
||||
// The output of a command that is not a file read command
|
||||
run.getScript().getACmdReachingGitHubOutputWrite(str, field1) and
|
||||
not str = run.getScript().getAFileReadCommand()
|
||||
or
|
||||
// A hard-coded string
|
||||
run.getScript().getAWriteToGitHubOutput(field1, str) and
|
||||
str.regexpMatch("[\"'0-9a-zA-Z_\\-]+")
|
||||
) and
|
||||
// A write to GITHUB_OUTPUT that is attacker-controlled
|
||||
(
|
||||
// echo "sha=$(<test-results/sha-number)" >> $GITHUB_OUTPUT
|
||||
exists(string cmd |
|
||||
run.getScript().getACmdReachingGitHubOutputWrite(cmd, field2) and
|
||||
run.getScript().getAFileReadCommand() = cmd
|
||||
)
|
||||
or
|
||||
// cat test-results/.vars >> $GITHUB_OUTPUT
|
||||
run.getScript().fileToGitHubOutput(_) and
|
||||
field2 = "UNKNOWN"
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if a Run step declares an environment variable, uses it in a step variable output.
|
||||
* e.g.
|
||||
* env:
|
||||
* BODY: ${{ github.event.comment.body }}
|
||||
* run: |
|
||||
* echo "FOO=$BODY" >> $GITHUB_OUTPUT
|
||||
*/
|
||||
class OutputClobberingFromEnvVarSink extends OutputClobberingSink {
|
||||
OutputClobberingFromEnvVarSink() {
|
||||
exists(Run run, string field1, string field2 |
|
||||
// A write to GITHUB_OUTPUT that is attacker-controlled
|
||||
exists(string var |
|
||||
run.getScript().getAnEnvReachingGitHubOutputWrite(var, field1) and
|
||||
exists(run.getInScopeEnvVarExpr(var)) and
|
||||
run.getScript() = this.asExpr()
|
||||
) and
|
||||
// A write to GITHUB_OUTPUT that is not attacker-controlled
|
||||
exists(string str |
|
||||
// The output of a command that is not a file read command
|
||||
run.getScript().getACmdReachingGitHubOutputWrite(str, field2) and
|
||||
not str = run.getScript().getAFileReadCommand()
|
||||
or
|
||||
// A hard-coded string
|
||||
run.getScript().getAWriteToGitHubOutput(field2, str) and
|
||||
str.regexpMatch("[\"'0-9a-zA-Z_\\-]+")
|
||||
) and
|
||||
not field2 = field1
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* - id: clob1
|
||||
* env:
|
||||
* BODY: ${{ github.event.comment.body }}
|
||||
* run: |
|
||||
* # VULNERABLE
|
||||
* echo $BODY
|
||||
* echo "::set-output name=OUTPUT::SAFE"
|
||||
* - id: clob2
|
||||
* env:
|
||||
* BODY: ${{ github.event.comment.body }}
|
||||
* run: |
|
||||
* # VULNERABLE
|
||||
* echo "::set-output name=OUTPUT::SAFE"
|
||||
* echo $BODY
|
||||
*/
|
||||
class WorkflowCommandClobberingFromEnvVarSink extends OutputClobberingSink {
|
||||
string clobbering_var;
|
||||
string clobbered_value;
|
||||
|
||||
WorkflowCommandClobberingFromEnvVarSink() {
|
||||
exists(Run run, string workflow_cmd_stmt, string clobbering_stmt |
|
||||
run.getScript() = this.asExpr() and
|
||||
run.getScript().getAStmt() = clobbering_stmt and
|
||||
clobbering_stmt.regexpMatch("echo\\s+(-e\\s+)?(\"|')?\\$(\\{)?" + clobbering_var + ".*") and
|
||||
exists(run.getInScopeEnvVarExpr(clobbering_var)) and
|
||||
run.getScript().getAStmt() = workflow_cmd_stmt and
|
||||
clobbered_value =
|
||||
trimQuotes(workflow_cmd_stmt.regexpCapture(".*::set-output\\s+name=.*::(.*)", 1))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* - id: clob1
|
||||
* run: |
|
||||
* # VULNERABLE
|
||||
* PR="$(<pr-number)"
|
||||
* echo "$PR"
|
||||
* echo "::set-output name=OUTPUT::SAFE"
|
||||
* - id: clob2
|
||||
* run: |
|
||||
* # VULNERABLE
|
||||
* cat pr-number
|
||||
* echo "::set-output name=OUTPUT::SAFE"
|
||||
* - id: clob3
|
||||
* run: |
|
||||
* # VULNERABLE
|
||||
* echo "::set-output name=OUTPUT::SAFE"
|
||||
* ls *.txt
|
||||
* - id: clob4
|
||||
* run: |
|
||||
* # VULNERABLE
|
||||
* CURRENT_VERSION=$(cat gradle.properties | sed -n '/^version=/ { s/^version=//;p }')
|
||||
* echo "$CURRENT_VERSION"
|
||||
* echo "::set-output name=OUTPUT::SAFE"
|
||||
*/
|
||||
class WorkflowCommandClobberingFromFileReadSink extends OutputClobberingSink {
|
||||
string clobbering_cmd;
|
||||
|
||||
WorkflowCommandClobberingFromFileReadSink() {
|
||||
exists(Run run, string clobbering_stmt |
|
||||
run.getScript() = this.asExpr() and
|
||||
run.getScript().getAStmt() = clobbering_stmt and
|
||||
(
|
||||
// A file's content is assigned to an env var that gets printed to stdout
|
||||
// - run: |
|
||||
// foo=$(<pr-id.txt)"
|
||||
// echo "${foo}"
|
||||
exists(string var, string value |
|
||||
run.getScript().getAnAssignment(var, value) and
|
||||
clobbering_cmd = run.getScript().getAFileReadCommand() and
|
||||
trimQuotes(value) = ["$(" + clobbering_cmd + ")", "`" + clobbering_cmd + "`"] and
|
||||
clobbering_stmt.regexpMatch("echo.*\\$(\\{)?" + var + ".*")
|
||||
)
|
||||
or
|
||||
// A file is read and its content is printed to stdout
|
||||
clobbering_cmd = run.getScript().getACommand() and
|
||||
clobbering_cmd.regexpMatch(["ls", Bash::fileReadCommand()] + "\\s.*") and
|
||||
(
|
||||
// - run: echo "foo=$(<pr-id.txt)"
|
||||
clobbering_stmt.regexpMatch("echo.*" + clobbering_cmd + ".*")
|
||||
or
|
||||
// A file content is printed to stdout
|
||||
// - run: cat pr-id.txt
|
||||
clobbering_stmt.indexOf(clobbering_cmd) = 0
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
class OutputClobberingFromMaDSink extends OutputClobberingSink {
|
||||
OutputClobberingFromMaDSink() { madSink(this, "output-clobbering") }
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for unsafe user input
|
||||
* that is used to construct and evaluate an environment variable.
|
||||
*/
|
||||
private module OutputClobberingConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) {
|
||||
source instanceof RemoteFlowSource and
|
||||
not source.(RemoteFlowSource).getSourceType() = "branch"
|
||||
}
|
||||
|
||||
predicate isSink(DataFlow::Node sink) { sink instanceof OutputClobberingSink }
|
||||
|
||||
predicate isAdditionalFlowStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(Run run, string var |
|
||||
run.getInScopeEnvVarExpr(var) = pred.asExpr() and
|
||||
succ.asExpr() = run.getScript() and
|
||||
run.getScript().getAWriteToGitHubOutput(_, _)
|
||||
)
|
||||
or
|
||||
exists(Uses step |
|
||||
pred instanceof FileSource and
|
||||
pred.asExpr().(Step).getAFollowingStep() = step and
|
||||
succ.asExpr() = step and
|
||||
madSink(succ, "output-clobbering")
|
||||
)
|
||||
or
|
||||
exists(Run run |
|
||||
pred instanceof FileSource and
|
||||
pred.asExpr().(Step).getAFollowingStep() = run and
|
||||
succ.asExpr() = run.getScript() and
|
||||
(
|
||||
exists(run.getScript().getAFileReadCommand()) or
|
||||
run.getScript().getAStmt().matches("%::set-output %")
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe user input that is used to construct and evaluate an environment variable. */
|
||||
module OutputClobberingFlow = TaintTracking::Global<OutputClobberingConfig>;
|
||||
56
actions/ql/lib/codeql/actions/security/PoisonableSteps.qll
Normal file
56
actions/ql/lib/codeql/actions/security/PoisonableSteps.qll
Normal file
@@ -0,0 +1,56 @@
|
||||
import actions
|
||||
|
||||
abstract class PoisonableStep extends Step { }
|
||||
|
||||
class DangerousActionUsesStep extends PoisonableStep, UsesStep {
|
||||
DangerousActionUsesStep() { poisonableActionsDataModel(this.getCallee()) }
|
||||
}
|
||||
|
||||
class PoisonableCommandStep extends PoisonableStep, Run {
|
||||
PoisonableCommandStep() {
|
||||
exists(string regexp |
|
||||
poisonableCommandsDataModel(regexp) and
|
||||
this.getScript().getACommand().regexpMatch(regexp)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
class JavascriptImportUsesStep extends PoisonableStep, UsesStep {
|
||||
JavascriptImportUsesStep() {
|
||||
exists(string script, string line |
|
||||
this.getCallee() = "actions/github-script" and
|
||||
script = this.getArgument("script") and
|
||||
line = script.splitAt("\n").trim() and
|
||||
// const { default: foo } = await import('${{ github.workspace }}/scripts/foo.mjs')
|
||||
// const script = require('${{ github.workspace }}/scripts/test.js');
|
||||
// const script = require('./scripts');
|
||||
line.regexpMatch(".*(import|require)\\(('|\")(\\./|.*github.workspace).*")
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
class SetupNodeUsesStep extends PoisonableStep, UsesStep {
|
||||
SetupNodeUsesStep() {
|
||||
this.getCallee() = "actions/setup-node" and
|
||||
this.getArgument("cache") = "yarn"
|
||||
}
|
||||
}
|
||||
|
||||
class LocalScriptExecutionRunStep extends PoisonableStep, Run {
|
||||
string path;
|
||||
|
||||
LocalScriptExecutionRunStep() {
|
||||
exists(string cmd, string regexp, int path_group | cmd = this.getScript().getACommand() |
|
||||
poisonableLocalScriptsDataModel(regexp, path_group) and
|
||||
path = cmd.regexpCapture(regexp, path_group)
|
||||
)
|
||||
}
|
||||
|
||||
string getPath() { result = normalizePath(path.splitAt(" ")) }
|
||||
}
|
||||
|
||||
class LocalActionUsesStep extends PoisonableStep, UsesStep {
|
||||
LocalActionUsesStep() { this.getCallee().matches("./%") }
|
||||
|
||||
string getPath() { result = normalizePath(this.getCallee()) }
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
private import actions
|
||||
private import codeql.actions.TaintTracking
|
||||
private import codeql.actions.dataflow.ExternalFlow
|
||||
import codeql.actions.dataflow.FlowSources
|
||||
import codeql.actions.DataFlow
|
||||
|
||||
private class RequestForgerySink extends DataFlow::Node {
|
||||
RequestForgerySink() { madSink(this, "request-forgery") }
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for unsafe user input
|
||||
* that is used to construct and evaluate a system command.
|
||||
*/
|
||||
private module RequestForgeryConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
|
||||
|
||||
predicate isSink(DataFlow::Node sink) { sink instanceof RequestForgerySink }
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe user input that is used to construct and evaluate a system command. */
|
||||
module RequestForgeryFlow = TaintTracking::Global<RequestForgeryConfig>;
|
||||
@@ -0,0 +1,21 @@
|
||||
private import actions
|
||||
private import codeql.actions.TaintTracking
|
||||
private import codeql.actions.dataflow.ExternalFlow
|
||||
import codeql.actions.dataflow.FlowSources
|
||||
import codeql.actions.DataFlow
|
||||
|
||||
private class SecretExfiltrationSink extends DataFlow::Node {
|
||||
SecretExfiltrationSink() { madSink(this, "secret-exfiltration") }
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for untrusted data that reaches a sink where it may lead to secret exfiltration
|
||||
*/
|
||||
private module SecretExfiltrationConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
|
||||
|
||||
predicate isSink(DataFlow::Node sink) { sink instanceof SecretExfiltrationSink }
|
||||
}
|
||||
|
||||
/** Tracks flow of unsafe user input that is used in a context where it may lead to a secret exfiltration. */
|
||||
module SecretExfiltrationFlow = TaintTracking::Global<SecretExfiltrationConfig>;
|
||||
45
actions/ql/lib/codeql/actions/security/SelfHostedQuery.qll
Normal file
45
actions/ql/lib/codeql/actions/security/SelfHostedQuery.qll
Normal file
@@ -0,0 +1,45 @@
|
||||
import actions
|
||||
|
||||
bindingset[runner]
|
||||
predicate isGithubHostedRunner(string runner) {
|
||||
// list of github hosted repos: https://github.com/actions/runner-images/blob/main/README.md#available-images
|
||||
runner
|
||||
.toLowerCase()
|
||||
.regexpMatch("^(ubuntu-([0-9.]+|latest)|macos-([0-9]+|latest)(-x?large)?|windows-([0-9.]+|latest))$")
|
||||
}
|
||||
|
||||
bindingset[runner]
|
||||
predicate is3rdPartyHostedRunner(string runner) {
|
||||
runner.toLowerCase().regexpMatch("^(buildjet|warp)-[a-z0-9-]+$")
|
||||
}
|
||||
|
||||
/**
|
||||
* This predicate uses data available in the workflow file to identify self-hosted runners.
|
||||
* It does not know if the repository is public or private.
|
||||
* It is a best-effort approach to identify self-hosted runners.
|
||||
*/
|
||||
predicate staticallyIdentifiedSelfHostedRunner(Job job) {
|
||||
exists(string label |
|
||||
job.getATriggerEvent().getName() =
|
||||
[
|
||||
"issue_comment", "pull_request", "pull_request_review", "pull_request_review_comment",
|
||||
"pull_request_target", "workflow_run"
|
||||
] and
|
||||
label = job.getARunsOnLabel() and
|
||||
not isGithubHostedRunner(label) and
|
||||
not is3rdPartyHostedRunner(label)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* This predicate uses data available in the job log files to identify self-hosted runners.
|
||||
* It is a best-effort approach to identify self-hosted runners.
|
||||
*/
|
||||
predicate dynamicallyIdentifiedSelfHostedRunner(Job job) {
|
||||
exists(string runner_info |
|
||||
repositoryDataModel("public", _) and
|
||||
workflowDataModel(job.getEnclosingWorkflow().getLocation().getFile().getRelativePath(), _,
|
||||
job.getId(), _, _, runner_info) and
|
||||
runner_info.indexOf("self-hosted:true") > 0
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,384 @@
|
||||
import actions
|
||||
private import codeql.actions.DataFlow
|
||||
private import codeql.actions.dataflow.FlowSources
|
||||
private import codeql.actions.TaintTracking
|
||||
|
||||
string checkoutTriggers() {
|
||||
result = ["pull_request_target", "workflow_run", "workflow_call", "issue_comment"]
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint-tracking configuration for PR HEAD references flowing
|
||||
* into actions/checkout's ref argument.
|
||||
*/
|
||||
private module ActionsMutableRefCheckoutConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) {
|
||||
(
|
||||
// remote flow sources
|
||||
source instanceof GitHubCtxSource
|
||||
or
|
||||
source instanceof GitHubEventCtxSource
|
||||
or
|
||||
source instanceof GitHubEventJsonSource
|
||||
or
|
||||
source instanceof MaDSource
|
||||
or
|
||||
// `ref` argument contains the PR id/number or head ref
|
||||
exists(Expression e |
|
||||
source.asExpr() = e and
|
||||
(
|
||||
containsHeadRef(e.getExpression()) or
|
||||
containsPullRequestNumber(e.getExpression())
|
||||
)
|
||||
)
|
||||
or
|
||||
// 3rd party actions returning the PR head ref
|
||||
exists(StepsExpression e, UsesStep step |
|
||||
source.asExpr() = e and
|
||||
e.getStepId() = step.getId() and
|
||||
(
|
||||
step.getCallee() = "eficode/resolve-pr-refs" and e.getFieldName() = "head_ref"
|
||||
or
|
||||
step.getCallee() = "xt0rted/pull-request-comment-branch" and e.getFieldName() = "head_ref"
|
||||
or
|
||||
step.getCallee() = "alessbell/pull-request-comment-branch" and
|
||||
e.getFieldName() = "head_ref"
|
||||
or
|
||||
step.getCallee() = "gotson/pull-request-comment-branch" and e.getFieldName() = "head_ref"
|
||||
or
|
||||
step.getCallee() = "potiuk/get-workflow-origin" and
|
||||
e.getFieldName() = ["sourceHeadBranch", "pullRequestNumber"]
|
||||
or
|
||||
step.getCallee() = "github/branch-deploy" and e.getFieldName() = ["ref", "fork_ref"]
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
predicate isSink(DataFlow::Node sink) {
|
||||
exists(Uses uses |
|
||||
uses.getCallee() = "actions/checkout" and
|
||||
uses.getArgumentExpr(["ref", "repository"]) = sink.asExpr()
|
||||
)
|
||||
}
|
||||
|
||||
predicate isAdditionalFlowStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(Run run |
|
||||
pred instanceof FileSource and
|
||||
pred.asExpr().(Step).getAFollowingStep() = run and
|
||||
succ.asExpr() = run.getScript() and
|
||||
exists(run.getScript().getAFileReadCommand())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
module ActionsMutableRefCheckoutFlow = TaintTracking::Global<ActionsMutableRefCheckoutConfig>;
|
||||
|
||||
private module ActionsSHACheckoutConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) {
|
||||
source.asExpr().getATriggerEvent().getName() =
|
||||
["pull_request_target", "workflow_run", "workflow_call", "issue_comment"] and
|
||||
(
|
||||
// `ref` argument contains the PR head/merge commit sha
|
||||
exists(Expression e |
|
||||
source.asExpr() = e and
|
||||
containsHeadSHA(e.getExpression())
|
||||
)
|
||||
or
|
||||
// 3rd party actions returning the PR head sha
|
||||
exists(StepsExpression e, UsesStep step |
|
||||
source.asExpr() = e and
|
||||
e.getStepId() = step.getId() and
|
||||
(
|
||||
step.getCallee() = "eficode/resolve-pr-refs" and e.getFieldName() = "head_sha"
|
||||
or
|
||||
step.getCallee() = "xt0rted/pull-request-comment-branch" and e.getFieldName() = "head_sha"
|
||||
or
|
||||
step.getCallee() = "alessbell/pull-request-comment-branch" and
|
||||
e.getFieldName() = "head_sha"
|
||||
or
|
||||
step.getCallee() = "gotson/pull-request-comment-branch" and e.getFieldName() = "head_sha"
|
||||
or
|
||||
step.getCallee() = "potiuk/get-workflow-origin" and
|
||||
e.getFieldName() = ["sourceHeadSha", "mergeCommitSha"]
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
predicate isSink(DataFlow::Node sink) {
|
||||
exists(Uses uses |
|
||||
uses.getCallee() = "actions/checkout" and
|
||||
uses.getArgumentExpr(["ref", "repository"]) = sink.asExpr()
|
||||
)
|
||||
}
|
||||
|
||||
predicate isAdditionalFlowStep(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(Run run |
|
||||
pred instanceof FileSource and
|
||||
pred.asExpr().(Step).getAFollowingStep() = run and
|
||||
succ.asExpr() = run.getScript() and
|
||||
exists(run.getScript().getAFileReadCommand())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
module ActionsSHACheckoutFlow = TaintTracking::Global<ActionsSHACheckoutConfig>;
|
||||
|
||||
bindingset[s]
|
||||
predicate containsPullRequestNumber(string s) {
|
||||
exists(
|
||||
normalizeExpr(s)
|
||||
.regexpFind([
|
||||
"\\bgithub\\.event\\.number\\b", "\\bgithub\\.event\\.issue\\.number\\b",
|
||||
"\\bgithub\\.event\\.pull_request\\.id\\b",
|
||||
"\\bgithub\\.event\\.pull_request\\.number\\b",
|
||||
"\\bgithub\\.event\\.check_suite\\.pull_requests\\[\\d+\\]\\.id\\b",
|
||||
"\\bgithub\\.event\\.check_suite\\.pull_requests\\[\\d+\\]\\.number\\b",
|
||||
"\\bgithub\\.event\\.check_run\\.check_suite\\.pull_requests\\[\\d+\\]\\.id\\b",
|
||||
"\\bgithub\\.event\\.check_run\\.check_suite\\.pull_requests\\[\\d+\\]\\.number\\b",
|
||||
"\\bgithub\\.event\\.check_run\\.pull_requests\\[\\d+\\]\\.id\\b",
|
||||
"\\bgithub\\.event\\.check_run\\.pull_requests\\[\\d+\\]\\.number\\b",
|
||||
// heuristics
|
||||
"\\bpr_number\\b", "\\bpr_id\\b"
|
||||
], _, _)
|
||||
)
|
||||
}
|
||||
|
||||
bindingset[s]
|
||||
predicate containsHeadSHA(string s) {
|
||||
exists(
|
||||
normalizeExpr(s)
|
||||
.regexpFind([
|
||||
"\\bgithub\\.event\\.pull_request\\.head\\.sha\\b",
|
||||
"\\bgithub\\.event\\.pull_request\\.merge_commit_sha\\b",
|
||||
"\\bgithub\\.event\\.workflow_run\\.head_commit\\.id\\b",
|
||||
"\\bgithub\\.event\\.workflow_run\\.head_sha\\b",
|
||||
"\\bgithub\\.event\\.check_suite\\.after\\b",
|
||||
"\\bgithub\\.event\\.check_suite\\.head_commit\\.id\\b",
|
||||
"\\bgithub\\.event\\.check_suite\\.head_sha\\b",
|
||||
"\\bgithub\\.event\\.check_suite\\.pull_requests\\[\\d+\\]\\.head\\.sha\\b",
|
||||
"\\bgithub\\.event\\.check_run\\.check_suite\\.after\\b",
|
||||
"\\bgithub\\.event\\.check_run\\.check_suite\\.head_commit\\.id\\b",
|
||||
"\\bgithub\\.event\\.check_run\\.check_suite\\.head_sha\\b",
|
||||
"\\bgithub\\.event\\.check_run\\.check_suite\\.pull_requests\\[\\d+\\]\\.head\\.sha\\b",
|
||||
"\\bgithub\\.event\\.check_run\\.head_sha\\b",
|
||||
"\\bgithub\\.event\\.check_run\\.pull_requests\\[\\d+\\]\\.head\\.sha\\b",
|
||||
"\\bgithub\\.event\\.merge_group\\.head_sha\\b",
|
||||
"\\bgithub\\.event\\.merge_group\\.head_commit\\.id\\b",
|
||||
// heuristics
|
||||
"\\bhead\\.sha\\b", "\\bhead_sha\\b", "\\bmerge_sha\\b", "\\bpr_head_sha\\b"
|
||||
], _, _)
|
||||
)
|
||||
}
|
||||
|
||||
bindingset[s]
|
||||
predicate containsHeadRef(string s) {
|
||||
exists(
|
||||
normalizeExpr(s)
|
||||
.regexpFind([
|
||||
"\\bgithub\\.event\\.pull_request\\.head\\.ref\\b", "\\bgithub\\.head_ref\\b",
|
||||
"\\bgithub\\.event\\.workflow_run\\.head_branch\\b",
|
||||
"\\bgithub\\.event\\.check_suite\\.pull_requests\\[\\d+\\]\\.head\\.ref\\b",
|
||||
"\\bgithub\\.event\\.check_run\\.check_suite\\.pull_requests\\[\\d+\\]\\.head\\.ref\\b",
|
||||
"\\bgithub\\.event\\.check_run\\.pull_requests\\[\\d+\\]\\.head\\.ref\\b",
|
||||
"\\bgithub\\.event\\.merge_group\\.head_ref\\b",
|
||||
// heuristics
|
||||
"\\bhead\\.ref\\b", "\\bhead_ref\\b", "\\bmerge_ref\\b", "\\bpr_head_ref\\b",
|
||||
// env vars
|
||||
"GITHUB_HEAD_REF",
|
||||
], _, _)
|
||||
)
|
||||
}
|
||||
|
||||
class SimplePRHeadCheckoutStep extends Step {
|
||||
SimplePRHeadCheckoutStep() {
|
||||
// This should be:
|
||||
// artifact instanceof PRHeadCheckoutStep
|
||||
// but PRHeadCheckoutStep uses Taint Tracking anc causes a non-Monolitic Recursion error
|
||||
// so we list all the subclasses of PRHeadCheckoutStep here and use actions/checkout as a workaround
|
||||
// instead of using ActionsMutableRefCheckout and ActionsSHACheckout
|
||||
exists(Uses uses |
|
||||
this = uses and
|
||||
uses.getCallee() = "actions/checkout" and
|
||||
exists(uses.getArgument("ref")) and
|
||||
not uses.getArgument("ref").matches("%base%") and
|
||||
uses.getATriggerEvent().getName() = checkoutTriggers()
|
||||
)
|
||||
or
|
||||
this instanceof GitMutableRefCheckout
|
||||
or
|
||||
this instanceof GitSHACheckout
|
||||
or
|
||||
this instanceof GhMutableRefCheckout
|
||||
or
|
||||
this instanceof GhSHACheckout
|
||||
}
|
||||
}
|
||||
|
||||
/** Checkout of a Pull Request HEAD */
|
||||
abstract class PRHeadCheckoutStep extends Step {
|
||||
abstract string getPath();
|
||||
}
|
||||
|
||||
/** Checkout of a Pull Request HEAD ref */
|
||||
abstract class MutableRefCheckoutStep extends PRHeadCheckoutStep { }
|
||||
|
||||
/** Checkout of a Pull Request HEAD ref */
|
||||
abstract class SHACheckoutStep extends PRHeadCheckoutStep { }
|
||||
|
||||
/** Checkout of a Pull Request HEAD ref using actions/checkout action */
|
||||
class ActionsMutableRefCheckout extends MutableRefCheckoutStep instanceof UsesStep {
|
||||
ActionsMutableRefCheckout() {
|
||||
this.getCallee() = "actions/checkout" and
|
||||
(
|
||||
exists(
|
||||
ActionsMutableRefCheckoutFlow::PathNode source, ActionsMutableRefCheckoutFlow::PathNode sink
|
||||
|
|
||||
ActionsMutableRefCheckoutFlow::flowPath(source, sink) and
|
||||
this.getArgumentExpr(["ref", "repository"]) = sink.getNode().asExpr()
|
||||
)
|
||||
or
|
||||
// heuristic base on the step id and field name
|
||||
exists(string value, Expression expr |
|
||||
value.regexpMatch(".*(head|branch|ref).*") and expr = this.getArgumentExpr("ref")
|
||||
|
|
||||
expr.(StepsExpression).getStepId() = value
|
||||
or
|
||||
expr.(SimpleReferenceExpression).getFieldName() = value and
|
||||
not expr instanceof GitHubExpression
|
||||
or
|
||||
expr.(NeedsExpression).getNeededJobId() = value
|
||||
or
|
||||
expr.(JsonReferenceExpression).getAccessPath() = value
|
||||
or
|
||||
expr.(JsonReferenceExpression).getInnerExpression() = value
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
override string getPath() {
|
||||
if exists(this.(UsesStep).getArgument("path"))
|
||||
then result = this.(UsesStep).getArgument("path")
|
||||
else result = "GITHUB_WORKSPACE/"
|
||||
}
|
||||
}
|
||||
|
||||
/** Checkout of a Pull Request HEAD ref using actions/checkout action */
|
||||
class ActionsSHACheckout extends SHACheckoutStep instanceof UsesStep {
|
||||
ActionsSHACheckout() {
|
||||
this.getCallee() = "actions/checkout" and
|
||||
(
|
||||
exists(ActionsSHACheckoutFlow::PathNode source, ActionsSHACheckoutFlow::PathNode sink |
|
||||
ActionsSHACheckoutFlow::flowPath(source, sink) and
|
||||
this.getArgumentExpr(["ref", "repository"]) = sink.getNode().asExpr()
|
||||
)
|
||||
or
|
||||
// heuristic base on the step id and field name
|
||||
exists(string value, Expression expr |
|
||||
value.regexpMatch(".*(head|sha|commit).*") and expr = this.getArgumentExpr("ref")
|
||||
|
|
||||
expr.(StepsExpression).getStepId() = value
|
||||
or
|
||||
expr.(SimpleReferenceExpression).getFieldName() = value and
|
||||
not expr instanceof GitHubExpression
|
||||
or
|
||||
expr.(NeedsExpression).getNeededJobId() = value
|
||||
or
|
||||
expr.(JsonReferenceExpression).getAccessPath() = value
|
||||
or
|
||||
expr.(JsonReferenceExpression).getInnerExpression() = value
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
override string getPath() {
|
||||
if exists(this.(UsesStep).getArgument("path"))
|
||||
then result = this.(UsesStep).getArgument("path")
|
||||
else result = "GITHUB_WORKSPACE/"
|
||||
}
|
||||
}
|
||||
|
||||
/** Checkout of a Pull Request HEAD ref using git within a Run step */
|
||||
class GitMutableRefCheckout extends MutableRefCheckoutStep instanceof Run {
|
||||
GitMutableRefCheckout() {
|
||||
exists(string cmd | this.getScript().getACommand() = cmd |
|
||||
cmd.regexpMatch("git\\s+(fetch|pull).*") and
|
||||
(
|
||||
(containsHeadRef(cmd) or containsPullRequestNumber(cmd))
|
||||
or
|
||||
exists(string varname, string expr |
|
||||
expr = this.getInScopeEnvVarExpr(varname).getExpression() and
|
||||
(
|
||||
containsHeadRef(expr) or
|
||||
containsPullRequestNumber(expr)
|
||||
) and
|
||||
exists(cmd.regexpFind(varname, _, _))
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
override string getPath() { result = this.(Run).getWorkingDirectory() }
|
||||
}
|
||||
|
||||
/** Checkout of a Pull Request HEAD ref using git within a Run step */
|
||||
class GitSHACheckout extends SHACheckoutStep instanceof Run {
|
||||
GitSHACheckout() {
|
||||
exists(string cmd | this.getScript().getACommand() = cmd |
|
||||
cmd.regexpMatch("git\\s+(fetch|pull).*") and
|
||||
(
|
||||
containsHeadSHA(cmd)
|
||||
or
|
||||
exists(string varname, string expr |
|
||||
expr = this.getInScopeEnvVarExpr(varname).getExpression() and
|
||||
containsHeadSHA(expr) and
|
||||
exists(cmd.regexpFind(varname, _, _))
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
override string getPath() { result = this.(Run).getWorkingDirectory() }
|
||||
}
|
||||
|
||||
/** Checkout of a Pull Request HEAD ref using gh within a Run step */
|
||||
class GhMutableRefCheckout extends MutableRefCheckoutStep instanceof Run {
|
||||
GhMutableRefCheckout() {
|
||||
exists(string cmd | this.getScript().getACommand() = cmd |
|
||||
cmd.regexpMatch(".*(gh|hub)\\s+pr\\s+checkout.*") and
|
||||
(
|
||||
(containsHeadRef(cmd) or containsPullRequestNumber(cmd))
|
||||
or
|
||||
exists(string varname |
|
||||
(
|
||||
containsHeadRef(this.getInScopeEnvVarExpr(varname).getExpression()) or
|
||||
containsPullRequestNumber(this.getInScopeEnvVarExpr(varname).getExpression())
|
||||
) and
|
||||
exists(cmd.regexpFind(varname, _, _))
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
override string getPath() { result = this.(Run).getWorkingDirectory() }
|
||||
}
|
||||
|
||||
/** Checkout of a Pull Request HEAD ref using gh within a Run step */
|
||||
class GhSHACheckout extends SHACheckoutStep instanceof Run {
|
||||
GhSHACheckout() {
|
||||
exists(string cmd | this.getScript().getACommand() = cmd |
|
||||
cmd.regexpMatch("gh\\s+pr\\s+checkout.*") and
|
||||
(
|
||||
containsHeadSHA(cmd)
|
||||
or
|
||||
exists(string varname |
|
||||
containsHeadSHA(this.getInScopeEnvVarExpr(varname).getExpression()) and
|
||||
exists(cmd.regexpFind(varname, _, _))
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
override string getPath() { result = this.(Run).getWorkingDirectory() }
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
import actions
|
||||
|
||||
class KnownVulnerableAction extends UsesStep {
|
||||
string vulnerable_action;
|
||||
string fixed_version;
|
||||
string vulnerable_version;
|
||||
string vulnerable_sha;
|
||||
|
||||
KnownVulnerableAction() {
|
||||
vulnerableActionsDataModel(vulnerable_action, vulnerable_version, vulnerable_sha, fixed_version) and
|
||||
this.getCallee() = vulnerable_action and
|
||||
(this.getVersion() = vulnerable_version or this.getVersion() = vulnerable_sha)
|
||||
}
|
||||
|
||||
string getFixedVersion() { result = fixed_version }
|
||||
|
||||
string getVulnerableAction() { result = vulnerable_action }
|
||||
|
||||
string getVulnerableVersion() { result = vulnerable_version }
|
||||
|
||||
string getVulnerableSha() { result = vulnerable_sha }
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
import actions
|
||||
|
||||
class UnversionedImmutableAction extends UsesStep {
|
||||
string immutable_action;
|
||||
|
||||
UnversionedImmutableAction() {
|
||||
isImmutableAction(this, immutable_action) and
|
||||
not isSemVer(this.getVersion())
|
||||
}
|
||||
}
|
||||
|
||||
bindingset[version]
|
||||
predicate isSemVer(string version) {
|
||||
// https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string with optional v prefix
|
||||
version
|
||||
.regexpMatch("^v?(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$") or
|
||||
// or N or N.x or N.N.x with optional v prefix
|
||||
version.regexpMatch("^v?[1-9]\\d*$") or
|
||||
version.regexpMatch("^v?[1-9]\\d*\\.(x|0|([1-9]\\d*))$") or
|
||||
version.regexpMatch("^v?[1-9]\\d*\\.(0|([1-9]\\d*))\\.(x|0|([1-9]\\d*))$") or
|
||||
// or latest which will work
|
||||
version = "latest"
|
||||
}
|
||||
|
||||
predicate isImmutableAction(UsesStep actionStep, string actionName) {
|
||||
immutableActionsDataModel(actionName) and
|
||||
actionStep.getCallee() = actionName
|
||||
}
|
||||
177
actions/ql/lib/codeql/files/FileSystem.qll
Normal file
177
actions/ql/lib/codeql/files/FileSystem.qll
Normal file
@@ -0,0 +1,177 @@
|
||||
/** Provides classes for working with files and folders. */
|
||||
|
||||
private import codeql.Locations
|
||||
|
||||
/** A file or folder. */
|
||||
abstract class Container extends @container {
|
||||
/** Gets a file or sub-folder in this container. */
|
||||
Container getAChildContainer() { this = result.getParentContainer() }
|
||||
|
||||
/** Gets a file in this container. */
|
||||
File getAFile() { result = this.getAChildContainer() }
|
||||
|
||||
/** Gets a sub-folder in this container. */
|
||||
Folder getAFolder() { result = this.getAChildContainer() }
|
||||
|
||||
/**
|
||||
* Gets the absolute, canonical path of this container, using forward slashes
|
||||
* as path separator.
|
||||
*
|
||||
* The path starts with a _root prefix_ followed by zero or more _path
|
||||
* segments_ separated by forward slashes.
|
||||
*
|
||||
* The root prefix is of one of the following forms:
|
||||
*
|
||||
* 1. A single forward slash `/` (Unix-style)
|
||||
* 2. An upper-case drive letter followed by a colon and a forward slash,
|
||||
* such as `C:/` (Windows-style)
|
||||
* 3. Two forward slashes, a computer name, and then another forward slash,
|
||||
* such as `//FileServer/` (UNC-style)
|
||||
*
|
||||
* Path segments are never empty (that is, absolute paths never contain two
|
||||
* contiguous slashes, except as part of a UNC-style root prefix). Also, path
|
||||
* segments never contain forward slashes, and no path segment is of the
|
||||
* form `.` (one dot) or `..` (two dots).
|
||||
*
|
||||
* Note that an absolute path never ends with a forward slash, except if it is
|
||||
* a bare root prefix, that is, the path has no path segments. A container
|
||||
* whose absolute path has no segments is always a `Folder`, not a `File`.
|
||||
*/
|
||||
abstract string getAbsolutePath();
|
||||
|
||||
/**
|
||||
* Gets the base name of this container including extension, that is, the last
|
||||
* segment of its absolute path, or the empty string if it has no segments.
|
||||
*
|
||||
* Here are some examples of absolute paths and the corresponding base names
|
||||
* (surrounded with quotes to avoid ambiguity):
|
||||
*
|
||||
* <table border="1">
|
||||
* <tr><th>Absolute path</th><th>Base name</th></tr>
|
||||
* <tr><td>"/tmp/tst.go"</td><td>"tst.go"</td></tr>
|
||||
* <tr><td>"C:/Program Files (x86)"</td><td>"Program Files (x86)"</td></tr>
|
||||
* <tr><td>"/"</td><td>""</td></tr>
|
||||
* <tr><td>"C:/"</td><td>""</td></tr>
|
||||
* <tr><td>"D:/"</td><td>""</td></tr>
|
||||
* <tr><td>"//FileServer/"</td><td>""</td></tr>
|
||||
* </table>
|
||||
*/
|
||||
string getBaseName() {
|
||||
result = this.getAbsolutePath().regexpCapture(".*/(([^/]*?)(?:\\.([^.]*))?)", 1)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the extension of this container, that is, the suffix of its base name
|
||||
* after the last dot character, if any.
|
||||
*
|
||||
* In particular,
|
||||
*
|
||||
* - if the name does not include a dot, there is no extension, so this
|
||||
* predicate has no result;
|
||||
* - if the name ends in a dot, the extension is the empty string;
|
||||
* - if the name contains multiple dots, the extension follows the last dot.
|
||||
*
|
||||
* Here are some examples of absolute paths and the corresponding extensions
|
||||
* (surrounded with quotes to avoid ambiguity):
|
||||
*
|
||||
* <table border="1">
|
||||
* <tr><th>Absolute path</th><th>Extension</th></tr>
|
||||
* <tr><td>"/tmp/tst.go"</td><td>"go"</td></tr>
|
||||
* <tr><td>"/tmp/.classpath"</td><td>"classpath"</td></tr>
|
||||
* <tr><td>"/bin/bash"</td><td>not defined</td></tr>
|
||||
* <tr><td>"/tmp/tst2."</td><td>""</td></tr>
|
||||
* <tr><td>"/tmp/x.tar.gz"</td><td>"gz"</td></tr>
|
||||
* </table>
|
||||
*/
|
||||
string getExtension() {
|
||||
result = this.getAbsolutePath().regexpCapture(".*/([^/]*?)(\\.([^.]*))?", 3)
|
||||
}
|
||||
|
||||
/** Gets the file in this container that has the given `baseName`, if any. */
|
||||
File getFile(string baseName) {
|
||||
result = this.getAFile() and
|
||||
result.getBaseName() = baseName
|
||||
}
|
||||
|
||||
/** Gets the sub-folder in this container that has the given `baseName`, if any. */
|
||||
Folder getFolder(string baseName) {
|
||||
result = this.getAFolder() and
|
||||
result.getBaseName() = baseName
|
||||
}
|
||||
|
||||
/** Gets the parent container of this file or folder, if any. */
|
||||
Container getParentContainer() { containerparent(result, this) }
|
||||
|
||||
/**
|
||||
* Gets the relative path of this file or folder from the root folder of the
|
||||
* analyzed source location. The relative path of the root folder itself is
|
||||
* the empty string.
|
||||
*
|
||||
* This has no result if the container is outside the source root, that is,
|
||||
* if the root folder is not a reflexive, transitive parent of this container.
|
||||
*/
|
||||
string getRelativePath() {
|
||||
exists(string absPath, string pref |
|
||||
absPath = this.getAbsolutePath() and sourceLocationPrefix(pref)
|
||||
|
|
||||
absPath = pref and result = ""
|
||||
or
|
||||
absPath = pref.regexpReplaceAll("/$", "") + "/" + result and
|
||||
not result.matches("/%")
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the stem of this container, that is, the prefix of its base name up to
|
||||
* (but not including) the last dot character if there is one, or the entire
|
||||
* base name if there is not.
|
||||
*
|
||||
* Here are some examples of absolute paths and the corresponding stems
|
||||
* (surrounded with quotes to avoid ambiguity):
|
||||
*
|
||||
* <table border="1">
|
||||
* <tr><th>Absolute path</th><th>Stem</th></tr>
|
||||
* <tr><td>"/tmp/tst.go"</td><td>"tst"</td></tr>
|
||||
* <tr><td>"/tmp/.classpath"</td><td>""</td></tr>
|
||||
* <tr><td>"/bin/bash"</td><td>"bash"</td></tr>
|
||||
* <tr><td>"/tmp/tst2."</td><td>"tst2"</td></tr>
|
||||
* <tr><td>"/tmp/x.tar.gz"</td><td>"x.tar"</td></tr>
|
||||
* </table>
|
||||
*/
|
||||
string getStem() {
|
||||
result = this.getAbsolutePath().regexpCapture(".*/([^/]*?)(?:\\.([^.]*))?", 1)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a URL representing the location of this container.
|
||||
*
|
||||
* For more information see https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/#providing-urls.
|
||||
*/
|
||||
abstract string getURL();
|
||||
|
||||
/**
|
||||
* Gets a textual representation of the path of this container.
|
||||
*
|
||||
* This is the absolute path of the container.
|
||||
*/
|
||||
string toString() { result = this.getAbsolutePath() }
|
||||
}
|
||||
|
||||
/** A folder. */
|
||||
class Folder extends Container, @folder {
|
||||
override string getAbsolutePath() { folders(this, result) }
|
||||
|
||||
/** Gets the URL of this folder. */
|
||||
override string getURL() { result = "folder://" + this.getAbsolutePath() }
|
||||
}
|
||||
|
||||
/** A file. */
|
||||
class File extends Container, @file {
|
||||
override string getAbsolutePath() { files(this, result) }
|
||||
|
||||
/** Gets the URL of this file. */
|
||||
override string getURL() { result = "file://" + this.getAbsolutePath() + ":0:0:0:0" }
|
||||
|
||||
/** Holds if this file was extracted from ordinary source code. */
|
||||
predicate fromSource() { any() }
|
||||
}
|
||||
15
actions/ql/lib/ext/config/argument_injection_sinks.yml
Normal file
15
actions/ql/lib/ext/config/argument_injection_sinks.yml
Normal file
@@ -0,0 +1,15 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: argumentInjectionSinksDataModel
|
||||
# https://gtfobins.github.io/
|
||||
# https://0xn3va.gitbook.io/cheat-sheets/web-application/command-injection/argument-injection
|
||||
data:
|
||||
- ["(awk)\\s(.*?)", 1, 2]
|
||||
- ["(find)\\s(.*?)", 1, 2]
|
||||
- ["(git clone)\\s(.*?)", 1, 2]
|
||||
- ["(sed)\\s(.*?)", 1, 2]
|
||||
- ["(tar)\\s(.*?)", 1, 2]
|
||||
- ["(wget)\\s(.*?)", 1, 2]
|
||||
- ["(zip)\\s(.*?)", 1, 2]
|
||||
|
||||
53
actions/ql/lib/ext/config/context_event_map.yml
Normal file
53
actions/ql/lib/ext/config/context_event_map.yml
Normal file
@@ -0,0 +1,53 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: contextTriggerDataModel
|
||||
data:
|
||||
- ["commit_comment", "github.event.comment"]
|
||||
- ["commit_comment", "github.event.changes"]
|
||||
- ["discussion", "github.event.discussion"]
|
||||
- ["discussion", "github.event.changes"]
|
||||
- ["discussion_comment", "github.event.comment"]
|
||||
- ["discussion_comment", "github.event.discussion"]
|
||||
- ["discussion_comment", "github.event.changes"]
|
||||
- ["issues", "github.event.issue"]
|
||||
- ["issues", "github.event.changes"]
|
||||
- ["issue_comment", "github.event.issue"]
|
||||
- ["issue_comment", "github.event.comment"]
|
||||
- ["issue_comment", "github.event.changes"]
|
||||
- ["gollum", "github.event.pages"]
|
||||
- ["gollum", "github.event.changes"]
|
||||
- ["pull_request_comment", "github.event.comment"]
|
||||
- ["pull_request_comment", "github.event.pull_request"]
|
||||
- ["pull_request_comment", "github.head_ref"]
|
||||
- ["pull_request_comment", "github.event.changes"]
|
||||
- ["pull_request_review", "github.event.pull_request"]
|
||||
- ["pull_request_review", "github.event.review"]
|
||||
- ["pull_request_review", "github.head_ref"]
|
||||
- ["pull_request_review", "github.event.changes"]
|
||||
- ["pull_request_review_comment", "github.event.comment"]
|
||||
- ["pull_request_review_comment", "github.event.pull_request"]
|
||||
- ["pull_request_review_comment", "github.event.review"]
|
||||
- ["pull_request_review_comment", "github.head_ref"]
|
||||
- ["pull_request_review_comment", "github.event.changes"]
|
||||
- ["pull_request_target", "github.event.pull_request"]
|
||||
- ["pull_request_target", "github.head_ref"]
|
||||
- ["pull_request_target", "github.event.changes"]
|
||||
- ["push", "github.event.commits"]
|
||||
- ["push", "github.event.head_commit"]
|
||||
- ["push", "github.event.changes"]
|
||||
- ["workflow_run", "github.event.workflow"]
|
||||
- ["workflow_run", "github.event.workflow_run"]
|
||||
- ["workflow_run", "github.event.changes"]
|
||||
# workflow_call receives the same event payload as the calling workflow
|
||||
- ["workflow_call", "github.event.comment"]
|
||||
- ["workflow_call", "github.event.discussion"]
|
||||
- ["workflow_call", "github.event.inputs"]
|
||||
- ["workflow_call", "github.event.issue"]
|
||||
- ["workflow_call", "github.event.pages"]
|
||||
- ["workflow_call", "github.event.pull_request"]
|
||||
- ["workflow_call", "github.event.review"]
|
||||
- ["workflow_call", "github.event.workflow"]
|
||||
- ["workflow_call", "github.event.workflow_run"]
|
||||
- ["workflow_call", "github.event.changes"]
|
||||
|
||||
19
actions/ql/lib/ext/config/externally_triggereable_events.yml
Normal file
19
actions/ql/lib/ext/config/externally_triggereable_events.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: externallyTriggerableEventsDataModel
|
||||
data:
|
||||
- ["discussion"]
|
||||
- ["discussion_comment"]
|
||||
- ["fork"]
|
||||
- ["watch"]
|
||||
- ["issue_comment"]
|
||||
- ["issues"]
|
||||
- ["pull_request_comment"]
|
||||
- ["pull_request_review"]
|
||||
- ["pull_request_review_comment"]
|
||||
- ["pull_request_target"]
|
||||
- ["workflow_run"] # depending on branch filter
|
||||
- ["workflow_call"] # depending on caller
|
||||
- ["workflow_dispatch"]
|
||||
- ["scheduled"]
|
||||
22
actions/ql/lib/ext/config/immutable_actions.yml
Normal file
22
actions/ql/lib/ext/config/immutable_actions.yml
Normal file
@@ -0,0 +1,22 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: immutableActionsDataModel
|
||||
data:
|
||||
- ["actions/checkout"]
|
||||
- ["actions/cache"]
|
||||
- ["actions/setup-node"]
|
||||
- ["actions/upload-artifact"]
|
||||
- ["actions/setup-python"]
|
||||
- ["actions/download-artifact"]
|
||||
- ["actions/github-script"]
|
||||
- ["actions/setup-java"]
|
||||
- ["actions/setup-go"]
|
||||
- ["actions/upload-pages-artifact"]
|
||||
- ["actions/deploy-pages"]
|
||||
- ["actions/setup-dotnet"]
|
||||
- ["actions/stale"]
|
||||
- ["actions/labeler"]
|
||||
- ["actions/create-github-app-token"]
|
||||
- ["actions/configure-pages"]
|
||||
- ["octokit/request-action"]
|
||||
76
actions/ql/lib/ext/config/poisonable_steps.yml
Normal file
76
actions/ql/lib/ext/config/poisonable_steps.yml
Normal file
@@ -0,0 +1,76 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: poisonableActionsDataModel
|
||||
# source: https://boostsecurityio.github.io/lotp/
|
||||
data:
|
||||
- ["azure/powershell"]
|
||||
- ["pre-commit/action"]
|
||||
- ["oxsecurity/megalinter"]
|
||||
- ["bridgecrewio/checkov-action"]
|
||||
- ["ruby/setup-ruby"]
|
||||
- ["actions/jekyll-build-pages"]
|
||||
- ["qcastel/github-actions-maven/actions/maven"]
|
||||
- ["sonarsource/sonarcloud-github-action"]
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: poisonableCommandsDataModel
|
||||
# source: https://boostsecurityio.github.io/lotp/
|
||||
data:
|
||||
- ["ant"]
|
||||
- ["asv"]
|
||||
- ["awk\\s+-f"]
|
||||
- ["bundle"]
|
||||
- ["bun"]
|
||||
- ["cargo"]
|
||||
- ["checkov"]
|
||||
- ["eslint"]
|
||||
- ["gcloud\\s+builds submit"]
|
||||
- ["golangci-lint"]
|
||||
- ["gomplate"]
|
||||
- ["goreleaser"]
|
||||
- ["gradle"]
|
||||
- ["java\\s+-jar"]
|
||||
- ["make"]
|
||||
- ["mdformat"]
|
||||
- ["mkdocs"]
|
||||
- ["msbuild"]
|
||||
- ["mvn"]
|
||||
- ["mypy"]
|
||||
- ["(p)?npm\\s+[a-z]"]
|
||||
- ["pre-commit"]
|
||||
- ["prettier"]
|
||||
- ["phpstan"]
|
||||
- ["pip\\s+install(.*)\\s+-r"]
|
||||
- ["pip\\s+install(.*)\\s+--requirement"]
|
||||
- ["pip(x)?\\s+install(.*)\\s+\\."]
|
||||
- ["poetry"]
|
||||
- ["pylint"]
|
||||
- ["pytest"]
|
||||
- ["python[\\d\\.]*\\s+-m\\s+pip\\s+install\\s+-r"]
|
||||
- ["python[\\d\\.]*\\s+-m\\s+pip\\s+install\\s+--requirement"]
|
||||
- ["rake"]
|
||||
- ["rails\\s+db:create"]
|
||||
- ["rails\\s+assets:precompile"]
|
||||
- ["rubocop"]
|
||||
- ["sed\\s+-f"]
|
||||
- ["sonar-scanner"]
|
||||
- ["stylelint"]
|
||||
- ["terraform"]
|
||||
- ["tflint"]
|
||||
- ["yarn"]
|
||||
- ["webpack"]
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: poisonableLocalScriptsDataModel
|
||||
data:
|
||||
# TODO: It could also be in the form of `dir/cmd`
|
||||
- ["(\\.\\/[^\\s]+)\\b", 1] # eg: ./venv/bin/activate
|
||||
- ["(\\.\\s+[^\\s]+)\\b", 1] # eg: . venv/bin/activate
|
||||
- ["(source|sh|bash|zsh|fish)\\s+([^\\s]+)\\b", 2]
|
||||
- ["(node)\\s+([^\\s]+)(\\.js|\\.ts)\\b", 2]
|
||||
- ["(python[\\d\\.]*)\\s+([^\\s]+)\\.py\\b", 2]
|
||||
- ["(ruby)\\s+([^\\s]+)\\.rb\\b", 2]
|
||||
- ["(go)\\s+(generate|run)\\s+([^\\s]+)\\.go\\b", 3]
|
||||
- ["(dotnet)\\s+([^\\s]+)\\.csproj\\b", 2]
|
||||
|
||||
84
actions/ql/lib/ext/config/untrusted_event_properties.yml
Normal file
84
actions/ql/lib/ext/config/untrusted_event_properties.yml
Normal file
@@ -0,0 +1,84 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: untrustedEventPropertiesDataModel
|
||||
data:
|
||||
# TITLE
|
||||
- ["github\\.event\\.issue\\.title", "title"]
|
||||
- ["github\\.event\\.pull_request\\.title", "title"]
|
||||
- ["github\\.event\\.discussion\\.title", "title"]
|
||||
- ["github\\.event\\.pages\\[[0-9]+\\]\\.page_name", "title"]
|
||||
- ["github\\.event\\.pages\\[[0-9]+\\]\\.title", "title"]
|
||||
- ["github\\.event\\.workflow_run\\.display_title", "title"]
|
||||
- ["github\\.event\\.changes\\.title\\.from", "title"]
|
||||
# URL
|
||||
- ["github\\.event\\.pull_request\\.head\\.repo\\.homepage", "url"]
|
||||
# TEXT
|
||||
- ["github\\.event\\.issue\\.body", "text"]
|
||||
- ["github\\.event\\.pull_request\\.body", "text"]
|
||||
- ["github\\.event\\.discussion\\.body", "text"]
|
||||
- ["github\\.event\\.review\\.body", "text"]
|
||||
- ["github\\.event\\.comment\\.body", "text"]
|
||||
- ["github\\.event\\.commits\\[[0-9]+\\]\\.message", "text"]
|
||||
- ["github\\.event\\.head_commit\\.message", "text"]
|
||||
- ["github\\.event\\.workflow_run\\.head_commit\\.message", "text"]
|
||||
- ["github\\.event\\.pull_request\\.head\\.repo\\.description", "text"]
|
||||
- ["github\\.event\\.workflow_run\\.head_repository\\.description", "text"]
|
||||
- ["github\\.event\\.changes\\.body\\.from", "title"]
|
||||
# BRANCH
|
||||
- ["github\\.event\\.pull_request\\.head\\.repo\\.default_branch", "branch"]
|
||||
- ["github\\.event\\.pull_request\\.head\\.ref", "branch"]
|
||||
- ["github\\.event\\.workflow_run\\.head_branch", "branch"]
|
||||
- ["github\\.event\\.workflow_run\\.pull_requests\\[[0-9]+\\]\\.head\\.ref", "branch"]
|
||||
- ["github\\.event\\.merge_group\\.head_ref", "branch"]
|
||||
- ["github\\.event\\.changes\\.head\\.ref\\.from", "branch"]
|
||||
# LABEL
|
||||
- ["github\\.event\\.pull_request\\.head\\.label", "label"]
|
||||
# EMAIL
|
||||
- ["github\\.event\\.head_commit\\.author\\.email", "email"]
|
||||
- ["github\\.event\\.head_commit\\.committer\\.email", "email"]
|
||||
- ["github\\.event\\.commits\\[[0-9]+\\]\\.author\\.email", "email"]
|
||||
- ["github\\.event\\.commits\\[[0-9]+\\]\\.committer\\.email", "email"]
|
||||
- ["github\\.event\\.merge_group\\.committer\\.email", "email"]
|
||||
- ["github\\.event\\.workflow_run\\.head_commit\\.author\\.email", "email"]
|
||||
- ["github\\.event\\.workflow_run\\.head_commit\\.committer\\.email", "email"]
|
||||
# USERNAME
|
||||
- ["github\\.event\\.head_commit\\.author\\.name", "username"]
|
||||
- ["github\\.event\\.head_commit\\.committer\\.name", "username"]
|
||||
- ["github\\.event\\.commits\\[[0-9]+\\]\\.author\\.name", "username"]
|
||||
- ["github\\.event\\.commits\\[[0-9]+\\]\\.committer\\.name", "username"]
|
||||
- ["github\\.event\\.merge_group\\.committer\\.name", "username"]
|
||||
- ["github\\.event\\.workflow_run\\.head_commit\\.author\\.name", "username"]
|
||||
- ["github\\.event\\.workflow_run\\.head_commit\\.committer\\.name", "username"]
|
||||
# PATH
|
||||
- ["github\\.event\\.workflow\\.path", "path"]
|
||||
- ["github\\.event\\.workflow_run\\.path", "path"]
|
||||
- ["github\\.event\\.workflow_run\\.referenced_workflows\\.path", "path"]
|
||||
# JSON
|
||||
- ["github", "json"]
|
||||
- ["github\\.event", "json"]
|
||||
- ["github\\.event\\.comment", "json"]
|
||||
- ["github\\.event\\.commits", "json"]
|
||||
- ["github\\.event\\.discussion", "json"]
|
||||
- ["github\\.event\\.head_commit", "json"]
|
||||
- ["github\\.event\\.head_commit\\.author", "json"]
|
||||
- ["github\\.event\\.head_commit\\.committer", "json"]
|
||||
- ["github\\.event\\.issue", "json"]
|
||||
- ["github\\.event\\.merge_group", "json"]
|
||||
- ["github\\.event\\.merge_group\\.committer", "json"]
|
||||
- ["github\\.event\\.pull_request", "json"]
|
||||
- ["github\\.event\\.pull_request\\.head", "json"]
|
||||
- ["github\\.event\\.pull_request\\.head\\.repo", "json"]
|
||||
- ["github\\.event\\.pages", "json"]
|
||||
- ["github\\.event\\.review", "json"]
|
||||
- ["github\\.event\\.workflow", "json"]
|
||||
- ["github\\.event\\.workflow_run", "json"]
|
||||
- ["github\\.event\\.workflow_run\\.head_branch", "json"]
|
||||
- ["github\\.event\\.workflow_run\\.head_commit", "json"]
|
||||
- ["github\\.event\\.workflow_run\\.head_commit\\.author", "json"]
|
||||
- ["github\\.event\\.workflow_run\\.head_commit\\.committer", "json"]
|
||||
- ["github\\.event\\.workflow_run\\.head_repository", "json"]
|
||||
- ["github\\.event\\.workflow_run\\.pull_requests", "json"]
|
||||
- ["github\\.event\\.changes", "json"]
|
||||
|
||||
|
||||
56
actions/ql/lib/ext/config/untrusted_gh_command.yml
Normal file
56
actions/ql/lib/ext/config/untrusted_gh_command.yml
Normal file
@@ -0,0 +1,56 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: untrustedGhCommandDataModel
|
||||
data:
|
||||
#
|
||||
# PULL REQUESTS
|
||||
#
|
||||
# HEAD_REF=$(gh pr view "${{ github.event.issue.number }}" --json headRefName -q '.headRefName')
|
||||
- ["gh\\s+pr\\b.*\\bview\\b.*\\.headRefName.*", "branch,oneline"]
|
||||
# TITLE=$(gh pr view $PR_NUMBER --json title --jq .title)
|
||||
- ["gh\\s+pr\\b.*\\bview\\b.*\\.title.*", "title,oneline"]
|
||||
# BODY=$(gh pr view $PR_NUMBER --json body --jq .body)
|
||||
- ["gh\\s+pr\\b.*\\bview\\b.*\\.body.*", "text,multiline"]
|
||||
# COMMENTS="$(gh pr view --repo ${{ github.repository }} "$PR_NUMBER" --json "body,comments" -q '.body, .comments[].body')"
|
||||
- ["gh\\s+pr\\b.*\\bview\\b.*\\.comments.*", "text,multiline"]
|
||||
# CHANGED_FILES="$(gh pr view --repo ${{ github.repository }} ${{ needs.check-comment.outputs.pull_number }} --json files --jq '.files.[].path')"
|
||||
- ["gh\\s+pr\\b.*\\bview\\b.*\\.files.*", "filename,multiline"]
|
||||
# AUTHOR=$(gh pr view ${ORI_PR} -R ${REPO} --json author -q '.author.login')
|
||||
- ["gh\\s+pr\\b.*\\bview\\b.*\\.author.*", "username,oneline"]
|
||||
#
|
||||
# ISSUES
|
||||
#
|
||||
# TITLE=$(gh issue view "$ISSUE_NUMBER" --json title --jq '.title')
|
||||
- ["gh\\s+issue\\b.*\\bview\\b.*\\.title.*", "title,oneline"]
|
||||
# BODY=$(gh issue view -R ${GITHUB_REPOSITORY} ${ORIGINAL_ISSUE_NUMBER} --json title,body,assignees --jq .body)
|
||||
- ["gh\\s+issue\\b.*\\bview\\b.*\\.body.*", "text,multiline"]
|
||||
# COMMENTS=$(gh issue view "$ISSUE_NUMBER" --json comments --jq '.comments[].body')
|
||||
- ["gh\\s+issue\\b.*\\bview\\b.*\\.comments.*", "text,multiline"]
|
||||
#
|
||||
# API
|
||||
#
|
||||
# PR="$(gh api /repos/test/test/pulls/${PR_NUMBER})"
|
||||
#
|
||||
# HEAD_REF=$(gh api -H 'Accept: application/vnd.github+json' /repos/test/test/commits/${{ env.sui_sha }}/pulls --jq '.[].head.ref' | head -n 1)
|
||||
- ["gh\\s+api\\b.*\\b(/)?repos/.*/pulls.*\\b.*\\.head.ref.*", "branch,oneline"]
|
||||
# TITLE=$(gh api /repos/test/test/pulls/${{PR_NUMBER}} --jq ".title")
|
||||
- ["gh\\s+api\\b.*\\b(/)?repos/.*/pulls.*\\b.*\\.title.*", "title,oneline"]
|
||||
# BODY=$(gh api /repos/test/test/pulls/${{PR_NUMBER}} --jq ".body")
|
||||
- ["gh\\s+api\\b.*\\b(/)?repos/.*/pulls.*\\b.*\\.body.*", "text,multiline"]
|
||||
# COMMENTS=$(gh api /repos/test/test/pulls/${PR_NUMBER}/comments --jq '.[].body')
|
||||
- ["gh\\s+api\\b.*\\b(/)?repos/.*/pulls.*/comments\\b.*\\.body.*", "text,multiline"]
|
||||
# CHANGED_FILES=$(gh api /repos/test/test/pulls/${{PR_NUMBER}}/files --jq '.[].filename')
|
||||
- ["gh\\s+api\\b.*\\b(/)?repos/.*/pulls.*/files\\b.*\\.filename.*", "filename,oneline"]
|
||||
# AUTHOR=$(gh api /repos/test/test/pulls/${{PR_NUMBER}} --jq ".user.login")
|
||||
- ["gh\\s+api\\b.*\\b(/)?repos/.*/pulls.*\\b.*\\.user\\.login.*", "username,oneline"]
|
||||
#
|
||||
# ISSUES
|
||||
#
|
||||
# TITLE=$(gh api /repos/test/test/issues/${{PR_NUMBER}} --jq ".title")
|
||||
- ["gh\\s+api\\b.*\\b(/)?repos/.*/issues.*\\b.*\\.title.*", "title,oneline"]
|
||||
# BODY=$(gh api /repos/test/test/issues/${{PR_NUMBER}} --jq ".body")
|
||||
- ["gh\\s+api\\b.*\\b(/)?repos/.*/issues.*\\b.*\\.body.*", "text,multiline"]
|
||||
# COMMENTS=$(gh api /repos/test/test/pulls/${PR_NUMBER}/comments --jq '.[].body')
|
||||
- ["gh\\s+api\\b.*\\b(/)?repos/.*/issues.*/comments\\b.*\\.body.*", "text,multiline"]
|
||||
|
||||
32
actions/ql/lib/ext/config/untrusted_git_command.yml
Normal file
32
actions/ql/lib/ext/config/untrusted_git_command.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: untrustedGitCommandDataModel
|
||||
data:
|
||||
# FILES=$(git diff-tree --no-commit-id --name-only HEAD -r)
|
||||
- ["git\\b.*\\bdiff-tree\\b", "filename,multiline"]
|
||||
# CHANGES=$(git --no-pager diff --name-only $NAME | grep -v -f .droneignore);
|
||||
# CHANGES=$(git diff --name-only)
|
||||
- ["git\\b.*\\bdiff\\b", "filename,multiline"]
|
||||
# COMMIT_MESSAGE=$(git log --format=%s -n 1)
|
||||
- ["git\\b.*\\blog\\b.*%s", "text,online"]
|
||||
# COMMIT_MESSAGE=$(git log --format=%B -n 1)
|
||||
- ["git\\b.*\\blog\\b.*%B", "text,multiline"]
|
||||
# COMMIT_MESSAGE=$(git log --format=oneline)
|
||||
- ["git\\b.*\\blog\\b.*oneline", "text,oneline"]
|
||||
# COMMIT_MESSAGE=$(git show -s --format=%B)
|
||||
# COMMIT_MESSAGE=$(git show -s --format=%s)
|
||||
- ["git\\b.*\\bshow\\b.*-s.*%s", "text,oneline"]
|
||||
- ["git\\b.*\\bshow\\b.*-s.*%B", "text,multiline"]
|
||||
# AUTHOR=$(git log -1 --pretty=format:'%an')
|
||||
- ["git\\b.*\\blog\\b.*%an", "username,oneline"]
|
||||
# AUTHOR=$(git show -s --pretty=%an)
|
||||
- ["git\\b.*\\bshow\\b.*%an", "username,oneline"]
|
||||
# EMAIL=$(git log -1 --pretty=format:'%ae')
|
||||
- ["git\\b.*\\blog\\b.*%ae", "email,oneline"]
|
||||
# EMAIL=$(git show -s --pretty=%ae)
|
||||
- ["git\\b.*\\bshow\\b.*%ae", "email,oneline"]
|
||||
# BRANCH=$(git branch --show-current)
|
||||
- ["git\\b.*\\bbranch\\b.*\\b--show-current\\b", "branch,oneline"]
|
||||
# BRANCH=$(git rev-parse --abbrev-ref HEAD)
|
||||
- ["git\\b.*\\brev-parse\\b.*\\b--abbrev-ref\\b", "branch,oneline"]
|
||||
641
actions/ql/lib/ext/config/vulnerable_actions.yml
Normal file
641
actions/ql/lib/ext/config/vulnerable_actions.yml
Normal file
@@ -0,0 +1,641 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: vulnerableActionsDataModel
|
||||
data:
|
||||
|
||||
# gh api /repos/actions/download-artifact/tags --jq 'map({name: .name, sha: .commit.sha})' --paginate | jq -r '.[] | "- \"\(.name)\", \"\(.sha)\""'
|
||||
|
||||
#
|
||||
# actions/download-artifact
|
||||
- ["actions/download-artifact", "v4.1.6", "9c19ed7fe5d278cd354c7dfd5d3b88589c7e2395", "4.1.7"]
|
||||
- ["actions/download-artifact", "v4.1.5", "8caf195ad4b1dee92908e23f56eeb0696f1dd42d", "4.1.7"]
|
||||
- ["actions/download-artifact", "v4.1.4", "c850b930e6ba138125429b7e5c93fc707a7f8427", "4.1.7"]
|
||||
- ["actions/download-artifact", "v4.1.3", "87c55149d96e628cc2ef7e6fc2aab372015aec85", "4.1.7"]
|
||||
- ["actions/download-artifact", "v4.1.2", "eaceaf801fd36c7dee90939fad912460b18a1ffe", "4.1.7"]
|
||||
- ["actions/download-artifact", "v4.1.1", "6b208ae046db98c579e8a3aa621ab581ff575935", "4.1.7"]
|
||||
- ["actions/download-artifact", "v4.1.0", "f44cd7b40bfd40b6aa1cc1b9b5b7bf03d3c67110", "4.1.7"]
|
||||
- ["actions/download-artifact", "v4.0.0", "7a1cd3216ca9260cd8022db641d960b1db4d1be4", "4.1.7"]
|
||||
- ["actions/download-artifact", "v3.0.2", "9bc31d5ccc31df68ecc42ccf4149144866c47d8a", "4.1.7"]
|
||||
- ["actions/download-artifact", "v3.0.1", "9782bd6a9848b53b110e712e20e42d89988822b7", "4.1.7"]
|
||||
- ["actions/download-artifact", "v3.0.0", "fb598a63ae348fa914e94cd0ff38f362e927b741", "4.1.7"]
|
||||
- ["actions/download-artifact", "v3", "9bc31d5ccc31df68ecc42ccf4149144866c47d8a", "4.1.7"]
|
||||
- ["actions/download-artifact", "v3-node20", "246d7188e736d3686f6d19628d253ede9697bd55", "4.1.7"]
|
||||
- ["actions/download-artifact", "v2.1.1", "cbed621e49e4c01b044d60f6c80ea4ed6328b281", "4.1.7"]
|
||||
- ["actions/download-artifact", "v2.1.0", "f023be2c48cc18debc3bacd34cb396e0295e2869", "4.1.7"]
|
||||
- ["actions/download-artifact", "v2.0.10", "3be87be14a055c47b01d3bd88f8fe02320a9bb60", "4.1.7"]
|
||||
- ["actions/download-artifact", "v2.0.9", "158ca71f7c614ae705e79f25522ef4658df18253", "4.1.7"]
|
||||
- ["actions/download-artifact", "v2.0.8", "4a7a711286f30c025902c28b541c10e147a9b843", "4.1.7"]
|
||||
- ["actions/download-artifact", "v2.0.7", "f144d3c3916a86f4d6b11ff379d17a49d8f85dbc", "4.1.7"]
|
||||
- ["actions/download-artifact", "v2.0.6", "f8e41fbffeebb48c0273438d220bb2387727471f", "4.1.7"]
|
||||
- ["actions/download-artifact", "v2.0.5", "c3f5d00c8784369c43779f3d2611769594a61f7a", "4.1.7"]
|
||||
- ["actions/download-artifact", "v2.0.4", "b3cedea9bed36890c824f4065163b667eeca272b", "4.1.7"]
|
||||
- ["actions/download-artifact", "v2.0.3", "80d2d4023c185001eacb50e37afd7dd667ba8044", "4.1.7"]
|
||||
- ["actions/download-artifact", "v2.0.2", "381af06b4268a1e0ad7b7c7e5a09f1894977120f", "4.1.7"]
|
||||
- ["actions/download-artifact", "v2.0.1", "1ac47ba4b6af92e65d0438b64ce1ea49ce1cc48d", "4.1.7"]
|
||||
- ["actions/download-artifact", "v2.0", "1de1dea89c32dcb1f37183c96fe85cfe067b682a", "4.1.7"]
|
||||
- ["actions/download-artifact", "v2", "cbed621e49e4c01b044d60f6c80ea4ed6328b281", "4.1.7"]
|
||||
- ["actions/download-artifact", "v1.0.0", "18f0f591fbc635562c815484d73b6e8e3980482e", "4.1.7"]
|
||||
- ["actions/download-artifact", "v1", "18f0f591fbc635562c815484d73b6e8e3980482e", "4.1.7"]
|
||||
- ["actions/download-artifact", "1.0.0", "18f0f591fbc635562c815484d73b6e8e3980482e", "4.1.7"]
|
||||
|
||||
# tj-actions/changed-files
|
||||
# https://github.com/advisories/GHSA-mcph-m25j-8j63
|
||||
# CVE-2023-51664
|
||||
- ["tj-actions/changed-files", "v40.2.3", "56284d80811fb5963a972b438f2870f175e5b7c8", "41"]
|
||||
- ["tj-actions/changed-files", "v40.2.2", "94549999469dbfa032becf298d95c87a14c34394", "41"]
|
||||
- ["tj-actions/changed-files", "v40.2.1", "1c938490c880156b746568a518594309cfb3f66b", "41"]
|
||||
- ["tj-actions/changed-files", "v40.2.0", "da093c1609db0edd0a037ce9664e135f74bf30d9", "41"]
|
||||
- ["tj-actions/changed-files", "v40.1.1", "25ef3926d147cd02fc7e931c1ef50772bbb0d25d", "41"]
|
||||
- ["tj-actions/changed-files", "v40.1.0", "18c8a4ecebe93d32ed8a88e1d0c098f5f68c221b", "41"]
|
||||
- ["tj-actions/changed-files", "v40.0.2", "40526807ee1e208a1a8c1bbe6bd2d1b044ef6368", "41"]
|
||||
- ["tj-actions/changed-files", "v40.0.1", "bfc49f4cff6934aa236c171f9bcbf1dd6b1ef438", "41"]
|
||||
- ["tj-actions/changed-files", "v40.0.0", "af292f1e845a0377b596972698a8598734eb2796", "41"]
|
||||
- ["tj-actions/changed-files", "v40", "56284d80811fb5963a972b438f2870f175e5b7c8", "41"]
|
||||
- ["tj-actions/changed-files", "v39.2.4", "fea790cb660e33aef4bdf07304e28fedd77dfa13", "41"]
|
||||
- ["tj-actions/changed-files", "v39.2.3", "95690f9ece77c1740f4a55b7f1de9023ed6b1f87", "41"]
|
||||
- ["tj-actions/changed-files", "v39.2.2", "408093d9ff9c134c33b974e0722ce06b9d6e8263", "41"]
|
||||
- ["tj-actions/changed-files", "v39.2.1", "db153baf731265ad02cd490b07f470e2d55e3345", "41"]
|
||||
- ["tj-actions/changed-files", "v39.2.0", "8238a4103220c636f2dad328ead8a7c8dbe316a3", "41"]
|
||||
- ["tj-actions/changed-files", "v39.1.2", "41960309398d165631f08c5df47a11147e14712b", "41"]
|
||||
- ["tj-actions/changed-files", "v39.1.1", "a21a533a0c244a27daac02f9dc6fcf8aeb996154", "41"]
|
||||
- ["tj-actions/changed-files", "v39.1.0", "8e79ba7ab9fee9984275219aeb2c8db47bcb8a2d", "41"]
|
||||
- ["tj-actions/changed-files", "v39.0.3", "76c4d81a6acd339b55bd7407a016981c853eb702", "41"]
|
||||
- ["tj-actions/changed-files", "v39.0.2", "6ee9cdc5816333acda68e01cf12eedc619e28316", "41"]
|
||||
- ["tj-actions/changed-files", "v39.0.1", "246636f5fa148b5ad8e65ca4c57b18af3123e5f6", "41"]
|
||||
- ["tj-actions/changed-files", "v39.0.0", "48566bbcc22ceb7c5809ebdd27377309f2c3de8c", "41"]
|
||||
- ["tj-actions/changed-files", "v39", "fea790cb660e33aef4bdf07304e28fedd77dfa13", "41"]
|
||||
- ["tj-actions/changed-files", "v38.2.2", "1aee3621b1c10305ee778298fcf32324684e5448", "41"]
|
||||
- ["tj-actions/changed-files", "v38.2.1", "2f7246cb26e8bb6709b6cbfc1fec7febfe82e96a", "41"]
|
||||
- ["tj-actions/changed-files", "v38.2.0", "0fc9663aa70243d87319dbd32fd926344d18d38f", "41"]
|
||||
- ["tj-actions/changed-files", "v38.1.3", "c860b5c47fa71f461da850094ef2f6e3d6514e44", "41"]
|
||||
- ["tj-actions/changed-files", "v38.1.2", "2f8b80270f04e421b28efb2abaccef4fce4815b6", "41"]
|
||||
- ["tj-actions/changed-files", "v38.1.1", "b7f1b7347fea1df67230801b66081fe3cba7dc69", "41"]
|
||||
- ["tj-actions/changed-files", "v38.1.0", "1c26215f3fbd51eba03bc199e5cbabdfc3584ce3", "41"]
|
||||
- ["tj-actions/changed-files", "v38.0.0", "17f3fec1edef0c3916d59cbcee1585fcd457e456", "41"]
|
||||
- ["tj-actions/changed-files", "v38", "1aee3621b1c10305ee778298fcf32324684e5448", "41"]
|
||||
- ["tj-actions/changed-files", "v37.6.1", "a0585ff9904b77d046192a7846e59783d6ea287b", "41"]
|
||||
- ["tj-actions/changed-files", "v37.6.0", "87697c0dca7dd44e37a2b79a79489332556ff1f3", "41"]
|
||||
- ["tj-actions/changed-files", "v37.5.2", "85c8b8252fc9893e00b3633a16670e53040e6d71", "41"]
|
||||
- ["tj-actions/changed-files", "v37.5.1", "a96679dfee2a1e64b1db5a210c0ffaf1f2cb24ce", "41"]
|
||||
- ["tj-actions/changed-files", "v37.5.0", "920e7b9ae1d45913fc81f86c956fee89c77d2e5e", "41"]
|
||||
- ["tj-actions/changed-files", "v37.4.0", "de0eba32790fb9bf87471b32855a30fc8f9d5fc6", "41"]
|
||||
- ["tj-actions/changed-files", "v37.3.0", "39283171cefdf491e0f0d6cf285b86b31eb6f3cd", "41"]
|
||||
- ["tj-actions/changed-files", "v37.2.0", "68b429ddc666ea0dba46309e1ee45e06bb408df8", "41"]
|
||||
- ["tj-actions/changed-files", "v37.1.2", "2a968ff601949c81b47d9c1fdb789b0d25ddeea2", "41"]
|
||||
- ["tj-actions/changed-files", "v37.1.1", "1f20fb83f05eabed6e12ba0329edac8b6ec8e207", "41"]
|
||||
- ["tj-actions/changed-files", "v37.1.0", "87e23c4c79a603288642711155953c7da34b11ac", "41"]
|
||||
- ["tj-actions/changed-files", "v37.0.5", "54849deb963ca9f24185fb5de2965e002d066e6b", "41"]
|
||||
- ["tj-actions/changed-files", "v37.0.4", "bb3376162b179308a79fc4450262a15a8e1d6888", "41"]
|
||||
- ["tj-actions/changed-files", "v37.0.3", "ec1e14cf27f4585783f463070881b2c499349a8a", "41"]
|
||||
- ["tj-actions/changed-files", "v37.0.2", "2106eb4457dd2aba4d37c8cdd16acba5d18739b9", "41"]
|
||||
- ["tj-actions/changed-files", "v37.0.1", "e5efec47f620e0fde64a1ad8f53bbf53d51a8c97", "41"]
|
||||
- ["tj-actions/changed-files", "v37.0.0", "5817a9efb0d7cc34b917d8146ea10b9f32044968", "41"]
|
||||
- ["tj-actions/changed-files", "v37", "a0585ff9904b77d046192a7846e59783d6ea287b", "41"]
|
||||
- ["tj-actions/changed-files", "v36.4.1", "54479c37f5eb47a43e595c6b71e1df2c112ce7f1", "41"]
|
||||
- ["tj-actions/changed-files", "v36.4.0", "e1754a427f478b8778d349341b8f1d80f1f47f44", "41"]
|
||||
- ["tj-actions/changed-files", "v36.3.0", "9bf09145c3560e451e8d8e87b42ccb3fef5b692d", "41"]
|
||||
- ["tj-actions/changed-files", "v36.2.1", "c9124514c375de5dbb9697afa6f2e36a236ee58c", "41"]
|
||||
- ["tj-actions/changed-files", "v36.2.0", "174a2a6360b54a2019877c254c4be78106efc94f", "41"]
|
||||
- ["tj-actions/changed-files", "v36.1.0", "fb20f4d24890fadc539505b1746d260504b213d0", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.18", "07e0177b72d3640efced741cae32f9861eee1367", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.17", "b13786805affca18e536ed489687d3d8d1f05d21", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.16", "1aae16084af435f73c8cdfd742473028810c5f20", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.15", "5d2fcdb4cbef720a52f49fd05d8c7edd18a64758", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.14", "9ecc6e7fe2e26945b52485ccd9bc4b44000f5af1", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.13", "8c9ee56d0180a538ad5b6b8a208e4db974bad9c0", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.12", "5978e5a2df95ef20cde627d4acb5edd1f87ba46a", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.11", "17c3e9e98f47ef859502ba3e38be0b8a6a4bddd9", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.10", "3f7b5c900bdbf1b80a825e220413986227b3ff03", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.9", "cf4fe8759a45edd76ed6215da3529d2dbd2a3c68", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.8", "043929ee8fffa1dd1d619782a5a338cf39e76e23", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.7", "4e2535f2b330e70ff7055f7de4272653cfdbd555", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.6", "652648acb4f32660a94e245a2a51c6d0e56b2a1d", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.5", "9ad1a5b96ab3e56cd2bb25ff90c6271e4e70eb71", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.4", "c798a4ea57f0e0a9d2b5374853c9c479ebb435a2", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.3", "25eaddf37ae893cec889065e9a60439c8af6f089", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.2", "abef388dd913ce13a650bbf800eba73961657fb9", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.1", "1c2673b763ea086acd660dd4257c9be06eb77667", "41"]
|
||||
- ["tj-actions/changed-files", "v36.0.0", "53c377a374b445ec2a61e343068807bf41f2c9a6", "41"]
|
||||
- ["tj-actions/changed-files", "v36", "54479c37f5eb47a43e595c6b71e1df2c112ce7f1", "41"]
|
||||
- ["tj-actions/changed-files", "v35.9.3", "039afcd1024c210363c9d3fc8fd07e1f3fcf2867", "41"]
|
||||
- ["tj-actions/changed-files", "v35.9.3-sec", "8663bb8fc810b983a35585a2dd6a121c09d2590d", "41"]
|
||||
- ["tj-actions/changed-files", "v35.9.2", "b2d17f51244a144849c6b37a3a6791b98a51d86f", "41"]
|
||||
- ["tj-actions/changed-files", "v35.9.2-sec", "4fc4e9d28ecb58e0215483343f3dd2fd01178f42", "41"]
|
||||
- ["tj-actions/changed-files", "v35.9.1", "4a0aac0d19aa2838c6741fdf95a5276390418dc2", "41"]
|
||||
- ["tj-actions/changed-files", "v35.9.1-sec", "89daa3bca3cd1f2967097668c0e8b5f7dda4d57f", "41"]
|
||||
- ["tj-actions/changed-files", "v35.9.0", "ce810b29b28abf274afebdcd8fe47b8fba0f28bd", "41"]
|
||||
- ["tj-actions/changed-files", "v35.9.0-sec", "2e61fb6a48f5857e3a338b4cbf071e1164c060e9", "41"]
|
||||
- ["tj-actions/changed-files", "v35.8.0", "7ecfc6730dff8072d1cc5215a24cc9478f55264d", "41"]
|
||||
- ["tj-actions/changed-files", "v35.8.0-sec", "21d7a75834ad73fed7fa33b39b73ebe6495ee4e1", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.12", "b109d83a62e94cf7c522bf6c15cb25c175850b16", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.12-sec", "2be7c3758f3e6e45ae5d27c133a3260c5b0fdd60", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.11", "79adacd43ea069e57037edc891ea8d33013bc3da", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.11-sec", "123dfd48407ae53e33a73e2ae9adf9d8ad8b14d6", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.10", "6e426e6495fa7ea3451f37ce3f1dac2a3f16f62c", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.10-sec", "61bf27253df806648581aaddd4a8ec394b968c80", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.9", "5e2d64b30d51d557c5a29309ecbd5481a236ec77", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.9-sec", "b94d96993dacb3158c51d22c3afae1f4059a71d2", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.8", "e9b5807e928fc8eea705c90da5524fd44b183ba1", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.8-sec", "22bed7e94fbb176468579214290dfd84abc6ea86", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.7", "db5dd7c176cf59a19ef6561bf1936f059dee4b74", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.7-sec", "7795905b24e743c8c33cd5ba5cd256cc92c81f68", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.6", "07f86bcdc42639264ec561c7f175fea5f532b6ce", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.6-sec", "08d9eb809753cbbaf6c8256285605312ce3987b9", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.5", "3a3ec498d8976e74f5dd829c413c1d446e738df7", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.4", "ee137444f0b3b0855cb2fc7df807416ba2c3d311", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.3", "cda290230383045a8887a250c2abf796bf1dc6da", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.2", "9328bab880abf4acc377d77718d28c6ac167f154", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.1", "4e680e146a8e1b530a912f0a1fdc2f0ace7d1bb7", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.1-sec", "7e64030c44ffb4a2e8199e7e105943eb108db836", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.0", "bd376fbcfae914347656e4c70801e2a3fafed05b", "41"]
|
||||
- ["tj-actions/changed-files", "v35.7.0-sec", "1d1543af8cef13eb42c756e9425e2cc50e8030b0", "41"]
|
||||
- ["tj-actions/changed-files", "v35.6.4", "84ed30e2f4daf616144de7e0c1db59d5b33025e3", "41"]
|
||||
- ["tj-actions/changed-files", "v35.6.3", "74b06cafc9658d2a91cc5ceb920fd6b5a5649051", "41"]
|
||||
- ["tj-actions/changed-files", "v35.6.2", "5ce975c6021a0b11062c547acb6c26c96a34a8c5", "41"]
|
||||
- ["tj-actions/changed-files", "v35.6.1", "04124efe7560d15e11ea2ba96c0df2989f68f1f4", "41"]
|
||||
- ["tj-actions/changed-files", "v35.6.0", "3ee6abf6107ccc2d8ee538de7ff6b1fb644f5d60", "41"]
|
||||
- ["tj-actions/changed-files", "v35.5.6", "23e3c4300cb904a9d9c36fc2df4111a2fa9b9ff1", "41"]
|
||||
- ["tj-actions/changed-files", "v35.5.5", "5a331a4999f9f21a3ef2a6459edee90393a8b92a", "41"]
|
||||
- ["tj-actions/changed-files", "v35.5.4", "74338865c1e73fee674ce5cfc5d28f4b9caa33bc", "41"]
|
||||
- ["tj-actions/changed-files", "v35.5.3", "d5414fd30b0b7618c815fe7ebe5673720e081937", "41"]
|
||||
- ["tj-actions/changed-files", "v35.5.2", "7f2aa19bdcf4a00195671e368091a1e32a694ac5", "41"]
|
||||
- ["tj-actions/changed-files", "v35.5.1", "210cc839c24f532fe4fbf510b7b3314ca9a2b90b", "41"]
|
||||
- ["tj-actions/changed-files", "v35.5.0", "db3ea27a0cf07135175be5efe7aaf84df6e0e6f0", "41"]
|
||||
- ["tj-actions/changed-files", "v35.4.4", "57d9664f8e2aa45f26bcb59095f99aa47ae8e90d", "41"]
|
||||
- ["tj-actions/changed-files", "v35.4.3", "0953088baa540166372190bec608cad1603a787d", "41"]
|
||||
- ["tj-actions/changed-files", "v35.4.2", "0562b9f865df79542dfcd59cfbd14c9ac9a792d3", "41"]
|
||||
- ["tj-actions/changed-files", "v35.4.1", "487675b843e203b5c9a92a07f1ed763d046d7283", "41"]
|
||||
- ["tj-actions/changed-files", "v35.4.0", "9a6dabf8d15381f97f1c770257a1a0db59c28a47", "41"]
|
||||
- ["tj-actions/changed-files", "v35.3.2", "7839ede089e483df865be448d6f3652f875005e0", "41"]
|
||||
- ["tj-actions/changed-files", "v35.3.1", "c2296c1b044b4f5c97d310a6d31e95cbcb5583ec", "41"]
|
||||
- ["tj-actions/changed-files", "v35.3.0", "ea251d4d2f03a9c18841ae1b752f58b82dfb4d5e", "41"]
|
||||
- ["tj-actions/changed-files", "v35.2.1", "1d1287f9fafd92be283f99b781fb5f00f00dd471", "41"]
|
||||
- ["tj-actions/changed-files", "v35.2.0", "392359fc8c85be1a8752e9ab6b1ad9e45158b4a9", "41"]
|
||||
- ["tj-actions/changed-files", "v35.1.2", "7f33882a1271950f8592f96b77e694436bfee83b", "41"]
|
||||
- ["tj-actions/changed-files", "v35.1.1", "1d8a2f91371fd14ec6146c37cbae79526144fbe9", "41"]
|
||||
- ["tj-actions/changed-files", "v35.1.0", "0626c3f94002c0a9d7491dd7fed7055bbdff6f92", "41"]
|
||||
- ["tj-actions/changed-files", "v35.0.1", "a2b1e5dbb92d21753cf198228fbf2d0a8557f117", "41"]
|
||||
- ["tj-actions/changed-files", "v35.0.0", "110b9baa5fc65597d65c1d019c6d3aee16d00c53", "41"]
|
||||
- ["tj-actions/changed-files", "v35", "039afcd1024c210363c9d3fc8fd07e1f3fcf2867", "41"]
|
||||
- ["tj-actions/changed-files", "v35-sec", "7e64030c44ffb4a2e8199e7e105943eb108db836", "41"]
|
||||
- ["tj-actions/changed-files", "v34.6.2", "ce4b8e3cba2220de8132ac9721ff754efd6bb7d7", "41"]
|
||||
- ["tj-actions/changed-files", "v34.6.1", "3b6c057cd82d1dafab565df2ba9fa489574a03b8", "41"]
|
||||
- ["tj-actions/changed-files", "v34.6.0", "4f64429e8be26fe81a594635b07ed829581ea847", "41"]
|
||||
- ["tj-actions/changed-files", "v34.5.4", "3f1e44af6ca48144748dfc62a7a6fb22e4ca67f3", "41"]
|
||||
- ["tj-actions/changed-files", "v34.5.3", "74dc2e8a7877b725678a2195226bd470f10c481b", "41"]
|
||||
- ["tj-actions/changed-files", "v34.5.2", "8356a01788b5a36aa0319e74183f3237e020feac", "41"]
|
||||
- ["tj-actions/changed-files", "v34.5.1", "baaf598b46c2d9eb97eb995c9f69d1967349155d", "41"]
|
||||
- ["tj-actions/changed-files", "v34.5.0", "8a4cc4fbd67975557b6d85dd302f5f9400b9c92e", "41"]
|
||||
- ["tj-actions/changed-files", "v34.4.4", "8a7336fb6f6bc00da867b745d3491de42ac0231b", "41"]
|
||||
- ["tj-actions/changed-files", "v34.4.3", "3996bc3fded83a011dbfc57f379fd31266770b3a", "41"]
|
||||
- ["tj-actions/changed-files", "v34.4.2", "ef0a29048c50f844e30fac9fef80956f9765aab8", "41"]
|
||||
- ["tj-actions/changed-files", "v34.4.1", "3ebdc42d8ba53fedc5bef0f16181249ac58446fa", "41"]
|
||||
- ["tj-actions/changed-files", "v34.4.0", "94e6fba8d802f0fa80db51937e8752e9c165ee26", "41"]
|
||||
- ["tj-actions/changed-files", "v34.3.4", "3dbb79f46716e706df6be563a268df44b264b545", "41"]
|
||||
- ["tj-actions/changed-files", "v34.3.3", "991e8b3aae0ebbe0614b15b05d14ccb92affa24a", "41"]
|
||||
- ["tj-actions/changed-files", "v34.3.2", "72d3bb8b336df0723f5c9e9d5875c61bf7bdfe9f", "41"]
|
||||
- ["tj-actions/changed-files", "v34.3.1", "72d3bb8b336df0723f5c9e9d5875c61bf7bdfe9f", "41"]
|
||||
- ["tj-actions/changed-files", "v34.3.0", "5f89dc7d6eefdcb7323e773671fd3461a7c2f050", "41"]
|
||||
- ["tj-actions/changed-files", "v34.2.2", "734bb168e38279dfc7aa2af5d5be3a1475427a99", "41"]
|
||||
- ["tj-actions/changed-files", "v34.2.1", "d2e030b6ed85ce2db7ac1a4afc574640df8bca26", "41"]
|
||||
- ["tj-actions/changed-files", "v34.2.0", "6ba3c59bc6825f1ad375d92a9e70c6b275db0ddd", "41"]
|
||||
- ["tj-actions/changed-files", "v34.1.1", "d0e44775cd5572bb0ead1d7d2e399015644f7359", "41"]
|
||||
- ["tj-actions/changed-files", "v34.1.0", "b91acef304123e58fd6671ab267d6b5e2a7f2ef3", "41"]
|
||||
- ["tj-actions/changed-files", "v34.0.5", "12633630aba2ab48ec2ad8a3344dd736d61a7b89", "41"]
|
||||
- ["tj-actions/changed-files", "v34.0.4", "71840771e95943b1ab0c8f8ae45aeb0a34458e2e", "41"]
|
||||
- ["tj-actions/changed-files", "v34.0.3", "cbfb0fda5afcfbf4ef0ef854bf0d8210abd0866f", "41"]
|
||||
- ["tj-actions/changed-files", "v34.0.2", "932dad31974f07bd23cab5870d45c6e5ad5c8b73", "41"]
|
||||
- ["tj-actions/changed-files", "v34.0.1", "9f289689bb8364780830da00b69507b88b5a2f07", "41"]
|
||||
- ["tj-actions/changed-files", "v34.0.0", "c4d29bf5b2769a725bcc9a723c498ba9c34c05b4", "41"]
|
||||
- ["tj-actions/changed-files", "v34", "ce4b8e3cba2220de8132ac9721ff754efd6bb7d7", "41"]
|
||||
- ["tj-actions/changed-files", "v33.0.0", "aa52cfcd81f1a00a6bf1241a8cad6adec4d80638", "41"]
|
||||
- ["tj-actions/changed-files", "v33", "aa52cfcd81f1a00a6bf1241a8cad6adec4d80638", "41"]
|
||||
- ["tj-actions/changed-files", "v32.1.2", "1d6e210c970d01a876fbc6155212d068e79ca584", "41"]
|
||||
- ["tj-actions/changed-files", "v32.1.1", "8953e851a137075e59e84b5c15fbeb3617e82f15", "41"]
|
||||
- ["tj-actions/changed-files", "v32.1.0", "8de562e9316b23c4473ad852e5fd4f7f2bac7bc8", "41"]
|
||||
- ["tj-actions/changed-files", "v32.0.1", "7c640bd299646362775f9d02e156bc741f67453b", "41"]
|
||||
- ["tj-actions/changed-files", "v32.0.0", "270645280afddc7e2cf3f4867089522c8f2f8f9a", "41"]
|
||||
- ["tj-actions/changed-files", "v32", "1d6e210c970d01a876fbc6155212d068e79ca584", "41"]
|
||||
- ["tj-actions/changed-files", "v31.0.3", "dd7c81416dd9ddc14c594f751cd92c661e13daee", "41"]
|
||||
- ["tj-actions/changed-files", "v31.0.2", "528984a4f814905ea80ed2a3818afc97aef8b0de", "41"]
|
||||
- ["tj-actions/changed-files", "v31.0.1", "75af1a47c484c669beec6a1d00fc9d1d78179725", "41"]
|
||||
- ["tj-actions/changed-files", "v31.0.0", "5184a750a66da08aba414ca223aef75c055956a5", "41"]
|
||||
- ["tj-actions/changed-files", "v31", "dd7c81416dd9ddc14c594f751cd92c661e13daee", "41"]
|
||||
- ["tj-actions/changed-files", "v30.0.0", "402f3827f0f759df60b674e7f52a02d6f4a5af8b", "41"]
|
||||
- ["tj-actions/changed-files", "v30", "402f3827f0f759df60b674e7f52a02d6f4a5af8b", "41"]
|
||||
- ["tj-actions/changed-files", "v29.0.9", "f7a56405a89ea095c6230f10e7f1c49daab13b35", "41"]
|
||||
- ["tj-actions/changed-files", "v29.0.8", "df4dacaa89cace34cd60d5e9580f041a041e5233", "41"]
|
||||
- ["tj-actions/changed-files", "v29.0.7", "602081b5d9327a7770b4c447a4ee8984ae44e72e", "41"]
|
||||
- ["tj-actions/changed-files", "v29.0.6", "6e12407521ea9b0d11a4b7ab09b40266bd39496a", "41"]
|
||||
- ["tj-actions/changed-files", "v29.0.5", "c5c9b6ff9e75d84d8b69cbf82bcfbf61672ef91e", "41"]
|
||||
- ["tj-actions/changed-files", "v29.0.4", "c41b7152594c4423f3787d26662239eb0ae027c0", "41"]
|
||||
- ["tj-actions/changed-files", "v29.0.3", "60f4aabced9b4718c75acef86d42ffb631c4403a", "41"]
|
||||
- ["tj-actions/changed-files", "v29.0.2", "82edb42dc4e3a5d5edf24cc3ae4b1f55c20cc220", "41"]
|
||||
- ["tj-actions/changed-files", "v29.0.1", "18edda74753bbb7090ea030c1f80ef9610ebdff1", "41"]
|
||||
- ["tj-actions/changed-files", "v29.0.0", "bec82ebb3493119ba317fcee8a0d1db09d39d1ac", "41"]
|
||||
- ["tj-actions/changed-files", "v29", "f7a56405a89ea095c6230f10e7f1c49daab13b35", "41"]
|
||||
- ["tj-actions/changed-files", "v28.0.0", "28ac6724247a133793509b5d165d58319b40a171", "41"]
|
||||
- ["tj-actions/changed-files", "v28", "602cf940579b9a2b2db0aafe835bfdb675fac12c", "41"]
|
||||
- ["tj-actions/changed-files", "v27", "5e56dcabdd4a97ea745791856930038be56d9b70", "41"]
|
||||
- ["tj-actions/changed-files", "v26.1", "58ae566dc69a926834e4798bcfe0436ff97c0599", "41"]
|
||||
- ["tj-actions/changed-files", "v26", "7394701157dae4adb4eaa75d8c99e9b2edff81fe", "41"]
|
||||
- ["tj-actions/changed-files", "v25", "36e65a11651994e93d6f1ef3afa781c3dcbb9780", "41"]
|
||||
- ["tj-actions/changed-files", "v24.1", "bf6ddb7db66f9da5b2cffeb28b2b696aacb26e1c", "41"]
|
||||
- ["tj-actions/changed-files", "v24", "6c44eb8294bb9c93d6118427f4ff8404b695e1d7", "41"]
|
||||
- ["tj-actions/changed-files", "v23.2", "b2ee165d6b42ab1740e1037eb93748aad96767c5", "41"]
|
||||
- ["tj-actions/changed-files", "v23.1", "34a865a2b221bd60ec0d4c071f5e7a66ffdac88a", "41"]
|
||||
- ["tj-actions/changed-files", "v23", "fb1fe28aa9ff24afc553b37545437005a4cf2115", "41"]
|
||||
- ["tj-actions/changed-files", "v22.2", "ae90a0b602c90d598c0c027a519493c1a069543e", "41"]
|
||||
- ["tj-actions/changed-files", "v22.1", "bc1dc8f54db8eeeaae00ab92737ab34926b9ad8d", "41"]
|
||||
- ["tj-actions/changed-files", "v22", "3de1f9a283b61f308ee3045be4d301037657225a", "41"]
|
||||
- ["tj-actions/changed-files", "v21", "0edfedf16d9ff0903cbe599d474a022823ca8fb8", "41"]
|
||||
- ["tj-actions/changed-files", "v20.2", "205450238e81d3da0e0ec2d776f58c12846fddfb", "41"]
|
||||
- ["tj-actions/changed-files", "v20.1", "944a8b89098b24b0723ed9264888eb7fcffbbe9a", "41"]
|
||||
- ["tj-actions/changed-files", "v20", "581eef0495dd5b75a3dd93047ff9f0d42dc09370", "41"]
|
||||
- ["tj-actions/changed-files", "v19.3", "e55f7fb99e90111108bc24d3f14156b06ab6a12c", "41"]
|
||||
- ["tj-actions/changed-files", "v19.2", "07b38ce1a17c46f1d0eb1150c8a33f703d473262", "41"]
|
||||
- ["tj-actions/changed-files", "v19.1", "d26252004aa87df12f72411feec056907ecdbadc", "41"]
|
||||
- ["tj-actions/changed-files", "v19", "a6d456f542692915c5289ea834fb89bc07c11208", "41"]
|
||||
- ["tj-actions/changed-files", "v18.7", "a59f800cbb60ed483623848e31be67659a2940f8", "41"]
|
||||
- ["tj-actions/changed-files", "v18.6", "a2f1692a6f703b7a14e155ae404e6bb15538b763", "41"]
|
||||
- ["tj-actions/changed-files", "v18.5", "72aab29255d4fd553ccf1c0fa3223dcc62a2fd84", "41"]
|
||||
- ["tj-actions/changed-files", "v18.4", "e35d0afdc1f0b01f84ec0f4cdf1b179325634b36", "41"]
|
||||
- ["tj-actions/changed-files", "v18.3", "081ee9cc54a7ded6c421c632f23a31dbbe34a5f3", "41"]
|
||||
- ["tj-actions/changed-files", "v18.2", "1f30bd2085b83668fb636f1a1f90744d8adbacca", "41"]
|
||||
- ["tj-actions/changed-files", "v18.1", "227e314ad84036340cab47e649d91b012275a53c", "41"]
|
||||
- ["tj-actions/changed-files", "v18", "ffd30e8dd820b89653c2298acf0447d29dbd0f16", "41"]
|
||||
- ["tj-actions/changed-files", "v17.3", "f5a8de7d36c5909d300d7fcc8d6340d2a56ab9d9", "41"]
|
||||
- ["tj-actions/changed-files", "v17.2", "0bc7d4006fb085334217ec5d6e6c288daade2f59", "41"]
|
||||
- ["tj-actions/changed-files", "v17.1", "a53d74f700f2982646d538e66ce35cbfc8d4e826", "41"]
|
||||
- ["tj-actions/changed-files", "v17", "933541631c41bad3fe20bdbd440ec68afa9a9518", "41"]
|
||||
- ["tj-actions/changed-files", "v16", "4daffbaee17b34b8ae544990906277485819cc16", "41"]
|
||||
- ["tj-actions/changed-files", "v15.1", "4b1f26aed507a21569666773e1c753dfe409d806", "41"]
|
||||
- ["tj-actions/changed-files", "v15", "09441d38eaf8b76cbe2c42e256f46dfb432f63a4", "41"]
|
||||
- ["tj-actions/changed-files", "v14.7", "e44053b6a0e8e7df1aa50a171c46601c605f61bb", "41"]
|
||||
- ["tj-actions/changed-files", "v14.6", "c0dba8199070f01fcea9cd3a4dc42b365f06bf8d", "41"]
|
||||
- ["tj-actions/changed-files", "v14.5", "fd2e9917c337ba7e2222d5aa9e32b27a57a71d14", "41"]
|
||||
- ["tj-actions/changed-files", "v14.4", "2a8a501ad614cd775a2c07537b555783496dc085", "41"]
|
||||
- ["tj-actions/changed-files", "v14.3", "a8ea7202c1c248d93235e87cc59e5b3a9881f558", "41"]
|
||||
- ["tj-actions/changed-files", "v14.2", "88edda5361ed308226d6cb938eaa8b18182750f5", "41"]
|
||||
- ["tj-actions/changed-files", "v14.1", "be68c10267c4979ed30c9397041b052b2980f91f", "41"]
|
||||
- ["tj-actions/changed-files", "v14", "b59431bc7d44f9e8951a290fc7d48879f2ca1939", "41"]
|
||||
- ["tj-actions/changed-files", "v13.2", "68bd279d40fb5bfc976429283b060c6ee426f63c", "41"]
|
||||
- ["tj-actions/changed-files", "v13.1", "2c85495a7bb72f2734cb5181e29b2ee5e08e61f7", "41"]
|
||||
- ["tj-actions/changed-files", "v13", "f276697f3b86a1d897052524507c59f5e173ccd1", "41"]
|
||||
- ["tj-actions/changed-files", "v12.2", "00f80efd45353091691a96565de08f4f50c685f8", "41"]
|
||||
- ["tj-actions/changed-files", "v12.1", "f56e736bedd192c12951db94e83a440885d04eb1", "41"]
|
||||
- ["tj-actions/changed-files", "v12", "019a09d36e5b592a6770a9a71ef1b3efd9a85d37", "41"]
|
||||
- ["tj-actions/changed-files", "v11.9", "3b638a970886ec84db14ad956bb4df9766bd7c50", "41"]
|
||||
- ["tj-actions/changed-files", "v11.8", "b42f932be5b3fee4a990cb3e03478d5da2d4293b", "41"]
|
||||
- ["tj-actions/changed-files", "v11.7", "8dfe0ee3f4840f84a7947b5288b19d7a583755ae", "41"]
|
||||
- ["tj-actions/changed-files", "v11.6", "aae164d51be780a235cdeea89752bbacbbfee3c3", "41"]
|
||||
- ["tj-actions/changed-files", "v11.5", "09a879748c548705ec26508c030b11aad9b5097a", "41"]
|
||||
- ["tj-actions/changed-files", "v11.4", "b54a7ae7259d0729d0b582bac28b05462f16cd64", "41"]
|
||||
- ["tj-actions/changed-files", "v11.3", "902e60737927ccef3713faad3752d84f1153d7ac", "41"]
|
||||
- ["tj-actions/changed-files", "v11.2", "2b51570d5f086eb07a1e527a182773b2045ec26b", "41"]
|
||||
- ["tj-actions/changed-files", "v11.1", "040111b36775c1033b4703b77f9c5c203da18936", "41"]
|
||||
- ["tj-actions/changed-files", "v11", "3b638a970886ec84db14ad956bb4df9766bd7c50", "41"]
|
||||
- ["tj-actions/changed-files", "v10.1", "1d34e69895b85e643b9b259d54f395f0d1e27c10", "41"]
|
||||
- ["tj-actions/changed-files", "v10", "b86b537e2b78397b630cfb1a8d0aec1e03379737", "41"]
|
||||
- ["tj-actions/changed-files", "v9.3", "2a771ad30d623c27165b3677688ebe3f17c49f65", "41"]
|
||||
- ["tj-actions/changed-files", "v9.2", "75933dc40b241db3752ed4c9e2f24cb7cfff51f9", "41"]
|
||||
- ["tj-actions/changed-files", "v9.1", "2c0d12b627191145ce31c2a098d8d37e93b35861", "41"]
|
||||
- ["tj-actions/changed-files", "v9", "7abdbc94e90b9a9b002ad86d8d2a5f9472c3c75c", "41"]
|
||||
- ["tj-actions/changed-files", "v8.9", "675ab58887b9ae58d77d4dcd2d5e58228ab5f185", "41"]
|
||||
- ["tj-actions/changed-files", "v8.8", "8c6f276ea5961fa51474aaa203c6d06226acbaa8", "41"]
|
||||
- ["tj-actions/changed-files", "v8.7", "d825b1f7094e756ca34581aaab611003eaa23975", "41"]
|
||||
- ["tj-actions/changed-files", "v8.6", "0bd70b7aecded5f2eb1f0498c3692433f2453b37", "41"]
|
||||
- ["tj-actions/changed-files", "v8.5", "0fe67a1f15b48dcd40e7ea0dfdd4afc9418febf0", "41"]
|
||||
- ["tj-actions/changed-files", "v8.4", "7bfa539f0d6ed4331d2899e7440a1946929829c1", "41"]
|
||||
- ["tj-actions/changed-files", "v8.3", "d679de9200b28e963362cba99095dd8d9f23d446", "41"]
|
||||
- ["tj-actions/changed-files", "v8.2", "1e10ed49507767257514a643ca1baab24a5496af", "41"]
|
||||
- ["tj-actions/changed-files", "v8.1", "0754fdabe31b721683e1ffc719584df67ad24c87", "41"]
|
||||
- ["tj-actions/changed-files", "v8", "d290bdd91e68dcf1bafe3fa63280666077cbc61c", "41"]
|
||||
- ["tj-actions/changed-files", "v7", "15b1769fc52da64fe168a41ccb01c48b27687149", "41"]
|
||||
- ["tj-actions/changed-files", "v6.3", "2ecd06deb6721d96fd1da0369fc6be39e974edba", "41"]
|
||||
- ["tj-actions/changed-files", "v6.2", "5fe8e4d60450bbe483ca011b747c4a972a79ef07", "41"]
|
||||
- ["tj-actions/changed-files", "v6.1", "7c66aa285d3ec22f1b8442b9a498ebb76ca5f57b", "41"]
|
||||
- ["tj-actions/changed-files", "v6", "2ecd06deb6721d96fd1da0369fc6be39e974edba", "41"]
|
||||
- ["tj-actions/changed-files", "v5.3", "e95bba87d2bd0b2bab4094abd9755a74f16703e6", "41"]
|
||||
- ["tj-actions/changed-files", "v5.2", "7852058eeee10d857e59ce41f3cb465a70c96ae0", "41"]
|
||||
- ["tj-actions/changed-files", "v5.1", "81f32e24026825ecfb7cb5d3951f91cfe788b0ad", "41"]
|
||||
- ["tj-actions/changed-files", "v5.0.0", "450eadf5a0462f8d0b5e99d07d4b6d8f7358420c", "41"]
|
||||
- ["tj-actions/changed-files", "v5", "0e956bb09e9b05df440a2459a041cdec3cc0cc0c", "41"]
|
||||
- ["tj-actions/changed-files", "v4.4", "300e935beb285fcda513be84333e8726d5a544fb", "41"]
|
||||
- ["tj-actions/changed-files", "v4.3", "fcb2ab8c32c2b66fdf94ab3deede353f8fe6f77c", "41"]
|
||||
- ["tj-actions/changed-files", "v4.2", "271bbd60fedbc83dbb8cb00ce88bb4532d940e2f", "41"]
|
||||
- ["tj-actions/changed-files", "v4.1", "e8ace0110cd60a2a0a729d52078ad6cec839dbb9", "41"]
|
||||
- ["tj-actions/changed-files", "v4.0.7", "473984bd85c24f1fe61c0494d317cc7d490e1235", "41"]
|
||||
- ["tj-actions/changed-files", "v4.0.6", "032f37fd241eeaf66ead8120552a3c6a157d1f22", "41"]
|
||||
- ["tj-actions/changed-files", "v4.0.5", "3a35bdf667b36191faf1eea2b8c2cfbb8890bd25", "41"]
|
||||
- ["tj-actions/changed-files", "v4.0.4", "c2216f65fdd828a28c41d6c97d242ec39ed694f3", "41"]
|
||||
- ["tj-actions/changed-files", "v4.0.3", "0f16c26f3d5699a26be12446509c537ee964c1a8", "41"]
|
||||
- ["tj-actions/changed-files", "v4.0.2", "271468ecafc0c12c5f0ce364317a640a5668eba7", "41"]
|
||||
- ["tj-actions/changed-files", "v4.0.1", "fb063fc7d459d8ee25f9b3ed48ec83bc5c51df72", "41"]
|
||||
- ["tj-actions/changed-files", "v4.0.0", "a05436ffa9505d25707f781260a99d01cebd0d13", "41"]
|
||||
- ["tj-actions/changed-files", "v4", "c061ef1fa3d028267a34edff2d42a34c8d56ec53", "41"]
|
||||
- ["tj-actions/changed-files", "v3.3", "489e2d514f3a230d66dbf74efec7ceed7b171703", "41"]
|
||||
- ["tj-actions/changed-files", "v3.2", "8d5a33c6034b0991a3fe85b2e73012a689eadf92", "41"]
|
||||
- ["tj-actions/changed-files", "v3.1", "fbfaba544e2ae235b2f88c936bcd5f8aa12419cc", "41"]
|
||||
- ["tj-actions/changed-files", "v3.0.2", "1980f551b48196e1d8aa48fbfd924cedde0d3e13", "41"]
|
||||
- ["tj-actions/changed-files", "v3.0.1", "a86b5608ded2e43fee87cbbde6394e0be7f46a41", "41"]
|
||||
- ["tj-actions/changed-files", "v3.0.0", "f917cc3459f79321da6af2a153cb91ce82a34aaf", "41"]
|
||||
- ["tj-actions/changed-files", "v3", "e18ccae8fe477263087493451ea812d4d36faa4e", "41"]
|
||||
- ["tj-actions/changed-files", "v2.1", "e1d275d6d3255d6a586052675d3c5cef793edccf", "41"]
|
||||
- ["tj-actions/changed-files", "v2.0.1", "00f80efd45353091691a96565de08f4f50c685f8", "41"]
|
||||
- ["tj-actions/changed-files", "v2.0.0", "9c1a181e67797cd053d15062eda07b2b322bbbfe", "41"]
|
||||
- ["tj-actions/changed-files", "v2", "5eaa2d80dddfe7de6f7cc75fcaeb554851737685", "41"]
|
||||
- ["tj-actions/changed-files", "v1.3.1", "188487d180e816622215bd011cbaca666af41ed9", "41"]
|
||||
- ["tj-actions/changed-files", "v1.3.0", "30988915fa46789ba51cc1436c92488a52ac44ee", "41"]
|
||||
- ["tj-actions/changed-files", "v1.2.2", "467d26c8b77612d9f7d20df5271edc207eae69a7", "41"]
|
||||
- ["tj-actions/changed-files", "v1.2.1", "d9eb683b30e5b231c948331ad364b991fa8be544", "41"]
|
||||
- ["tj-actions/changed-files", "v1.2.0", "09a879748c548705ec26508c030b11aad9b5097a", "41"]
|
||||
- ["tj-actions/changed-files", "v1.1.3", "8e7cc77ab9c1bffc233f2f3023d1b89ed44c9af5", "41"]
|
||||
- ["tj-actions/changed-files", "v1.1.2", "81ad4b874479c31a00285815995079e20c6c2779", "41"]
|
||||
- ["tj-actions/changed-files", "v1.1.1", "5e2a2f192377df7d67537b0e788e1b53e8a76f12", "41"]
|
||||
- ["tj-actions/changed-files", "v1.1.0", "1af9ab38306a2fa478c9772eabab167444dbc755", "41"]
|
||||
- ["tj-actions/changed-files", "v1.0.3", "55a857d66a8e01f50a2a37d18239edde79b1668d", "41"]
|
||||
- ["tj-actions/changed-files", "v1.0.2", "62a9200adfe8200623dcd28ca74973e82baa954c", "41"]
|
||||
- ["tj-actions/changed-files", "v1.0.1", "b915d091052b9d35e7c200d1da10cc6e2ec266e2", "41"]
|
||||
- ["tj-actions/changed-files", "v1.0.0", "f0751de6af436d4e79016e2041cf6400e0833653", "41"]
|
||||
- ["tj-actions/changed-files", "v1", "eef94236f6b9dec768f89dc72b9e0b64e13bb36e", "41"]
|
||||
|
||||
# tj-actions/verify-changed-files
|
||||
# https://github.com/advisories/GHSA-ghm2-rq8q-wrhc
|
||||
# CVE-2023-52137
|
||||
- ["tj-actions/verify-changed-files", "v16.1.1", "54e20d3c522fbeed99ebaf2e38a1eb33214c58ba", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v16.1.0", "a9b6fd340565065ad293625200630be7fd2b0f13", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v16.0.1", "30aa174f53f67ecd5dc8e190dfbe46392202e5a5", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v16.0.0", "7f1b21ceb7ef533b97b46e89e2f882ee5cb17ae0", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v16", "54e20d3c522fbeed99ebaf2e38a1eb33214c58ba", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v15.0.2", "0409e189c445fab593a10a28e19663f0b012b5a5", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v15.0.1", "7da22d0521c254e711e5988bd2c7d48c2948d137", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v15.0.0", "7016858e130743cc6c6b472849411d40aa8ae1ce", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v15", "0409e189c445fab593a10a28e19663f0b012b5a5", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v14.0.2", "7517b838f3a0d51de4b334a61ef1330672118927", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v14.0.1", "bad2f5d7fc7e6812ac48d7e7207025a5a4cc93d3", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v14.0.0", "3b573ace62e287c3d68e24e4de2ee0c6f6280d86", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v14", "7517b838f3a0d51de4b334a61ef1330672118927", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v13.2.0", "f557547e643700f439745119efed5aac390db75d", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v13.1", "9ed3155b72ba709881c967f75611fc5852f773b9", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v13", "f557547e643700f439745119efed5aac390db75d", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v12.0", "a3391b5a01114c49c3a8d55181a9ff4c99bf0db7", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v12", "a3391b5a01114c49c3a8d55181a9ff4c99bf0db7", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v11.1", "1d7ee9711b0a8f675208004e66bc25d593a1a0ae", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v11", "c4322970b4f055ede155b95586b04562796f83b7", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v10.1", "6e986dfff1f61105bc496287b5bbf0776092737e", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v10", "fa6ea307b32e5314d4a62b1209c3c782d5b5dcc9", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v9.2", "6f40ee1d523d9a9223204ae06919a3b2739702dc", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v9.1", "1b13d2556290c5ca5a94b7d042b91f3519c17d38", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v9", "c09bcad97929b17bacf737670bee312af98be94f", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v8.8", "fda469d6b456070da68fa3fdbc07a513d858b200", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v8.7", "bd1e271a8d26e249e0412899d4e3d8f5a89ecd6c", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v8.6", "367ba21c800e2a2b1451e272d24cf0caa3e4f9e4", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v8.5", "9dea97ec0f35d708d32dadd9b34a6af7cc28b19f", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v8.4", "c154cc6a77695d4483937745499e07fee62addd3", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v8.3", "527ff7533afca6e5bece96bd15a998f90f54c624", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v8.2", "e8756d59f6d66ad7376c293832e4d6eda8ae3257", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v8.1", "bcb4e766c132157cda3d1e8c7ca3d68d86d6ae6b", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v8", "25267f57f3afa6c59f1495e52da8b08c2c586606", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v7.2", "ea24bfd8ba4b019cb321502a4382a7a44b6ebc01", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v7.1", "f2a40baded88e47fa3f8e0f614832835194f4904", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v7", "197e12135dd5eaedd520a27882d17c1f384cf6a0", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v6.2", "a8f1b11a7c4dfc6706d8c64416dda0ef85d06e77", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v6.1", "95c26dda77430743cb3542d24b3e739417f5a881", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v6", "97ba4ccf1285bdfca165bc0b0a7cb1f994dae04e", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v5.7", "68310bb8f2a087df9f6ab1a2cc07c1e7cfc8ea28", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v5.6", "720ba6a5776e8687117603acab16000c0fc8868b", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v5.5", "cedd7096b7f23ae0307d7d82f516d666580579b3", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v5.4", "d68d3d232ffbba653ab0227d4bb2001cda681d12", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v5.3", "2e1153b8d1546dea7cd1a9db9834daceb72af17a", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v5.2", "c3dd6355e363eab778c129867f91da02e3285961", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v5.1", "81bd1de29366c53364b43cf83c4a4ddcab53b571", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v5", "31a9c7487cc1096253faa121489f4dbb32ca4132", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v4", "e981d37638f538ab477279c9f1fb6048462fd161", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v3.0.4", "e7f801cef44ca52e9aa496526dcd71daf5ef8437", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v3.0.3", "e86d0b9d1805c4e84fc90d4bcdab7371e14173d2", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v3.0.2", "ad255a4b81fa69c78f5fd1bb8ac95739dd3a9580", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v3.0.1", "3a8aed1f8847cc121e5f08e8963755154bb9df9e", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v3.0.gamma", "de910b5a2cdd6814c6e41d2b7c6f678eb75d430a", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v3.0.g", "d31b2a1fd119abbeddd18df3d95001a141b37372", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v3.0.beta", "e61c6fc5323423d2f0d9f04c7d15fa52af1084b0", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v3.0.b", "380890dc80695b7aa8047c0f824f87234defabd7", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v3.0.alpha", "873cfd676aea5e2a04b3f16706bd590effb5023e", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v3.0.a", "b0c60c86ab292cabeb4b4dc9f34c296c314fdfbb", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v3", "71831832d68f9fa5b527a9d692df35e1626ddfa2", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v2.0a", "6555389afba06cce81bc2f57a191d54f380ece0a", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v2", "9828a95864031bd113695ad5c68944163008d861", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v1.0.1", "8150cee7a747364d6b113cf8b0f59af88453a161", "17"]
|
||||
- ["tj-actions/verify-changed-files", "v1", "48ddf88305af39076d425f86f0617d6f7ff23d58", "17"]
|
||||
|
||||
# tj-actions/branch-names
|
||||
# https://github.com/advisories/GHSA-8v8w-v8xg-79rf
|
||||
# CVE-2023-49291
|
||||
- ["tj-actions/branch-names", "v7.0.6", "ab304d8562e2f137165e1d930e6d22d431189074", "7.07"]
|
||||
- ["tj-actions/branch-names", "v7.0.5", "033f2358d95522973eee35810e35a86fae4a71d8", "7.07"]
|
||||
- ["tj-actions/branch-names", "v7.0.4", "f7cfbc8edeb70a87ebec52e94fa8366f5077d0bc", "7.07"]
|
||||
- ["tj-actions/branch-names", "v7.0.3", "309671a59e1143038c2a50f009b6adf301f6aa71", "7.07"]
|
||||
- ["tj-actions/branch-names", "v7.0.2", "636cfe47b2002897ee4d3f07792c9fdd5d7dc725", "7.07"]
|
||||
- ["tj-actions/branch-names", "v7.0.1", "4e532392367d7e4fb2f494f2d50c47562660cce5", "7.07"]
|
||||
- ["tj-actions/branch-names", "v7.0.0", "604fda4f4254216e3b564d60fe27d68017756558", "7.07"]
|
||||
- ["tj-actions/branch-names", "v6.5", "2e5354c6733793113f416314375826df030ada23", "7.07"]
|
||||
- ["tj-actions/branch-names", "v6.4", "eee8675bd61ec38bcfbfedd504d8473292ba649e", "7.07"]
|
||||
- ["tj-actions/branch-names", "v6.3", "a594c1e96eab7790611fdaf5bc8f76ea55cedabd", "7.07"]
|
||||
- ["tj-actions/branch-names", "v6.2", "b90df97be1c548ac9c8bd9186bfea6747153bf5e", "7.07"]
|
||||
- ["tj-actions/branch-names", "v6.1", "09ab61130975078eb7cde103fe8d2ae1649a1853", "7.07"]
|
||||
- ["tj-actions/branch-names", "v6", "2e5354c6733793113f416314375826df030ada23", "7.07"]
|
||||
- ["tj-actions/branch-names", "v5.6", "63b65253bc9542d36a60646299bd8c9af6d9ce7e", "7.07"]
|
||||
- ["tj-actions/branch-names", "v5.5", "a704b89383028b5df2a4fd0b9fac9711970f18be", "7.07"]
|
||||
- ["tj-actions/branch-names", "v5.4", "b0f914ba0e7aa1e243b53df97447f71eb57da09a", "7.07"]
|
||||
- ["tj-actions/branch-names", "v5.3", "e0e3be64a3f10f671bb526b715f86a8a834dce75", "7.07"]
|
||||
- ["tj-actions/branch-names", "v5.2", "9cd06d955f4184031cd71fbb1717ac268ade2ee0", "7.07"]
|
||||
- ["tj-actions/branch-names", "v5.1", "b99758d88d96a27ee98b444451c1602a4507d243", "7.07"]
|
||||
- ["tj-actions/branch-names", "v5", "dc2e78ac9284175fdc0f2d505d8b49ef99632ea8", "7.07"]
|
||||
- ["tj-actions/branch-names", "v4.9", "12c1d475292ae9bb96656e80c24172db3cd60ffb", "7.07"]
|
||||
- ["tj-actions/branch-names", "v4.8", "af5c6741e639608a1c0e87eaa3c0c414d427d9e4", "7.07"]
|
||||
- ["tj-actions/branch-names", "v4.7", "28a6a95bc5bcc69b16010647668f1c5c4fd0dcca", "7.07"]
|
||||
- ["tj-actions/branch-names", "v4.6", "b0fc3aebc2f3fb8edfd024aea4dc8a073d10db88", "7.07"]
|
||||
- ["tj-actions/branch-names", "v4.5", "a0061fbc59329b02d6c530f25b9d3fc80340a792", "7.07"]
|
||||
- ["tj-actions/branch-names", "v4.4", "ce1737e426445fcb5b05a09e984b66d0b27548ba", "7.07"]
|
||||
- ["tj-actions/branch-names", "v4.3", "47910e48331f8d64a4d535a35e9540c1ebf767f7", "7.07"]
|
||||
- ["tj-actions/branch-names", "v4.2", "f107226331b387d31308ceb1b5767b52024508e8", "7.07"]
|
||||
- ["tj-actions/branch-names", "v4.1", "98c04d51ee204c4f23daee8ee15af9e8e80e36b2", "7.07"]
|
||||
- ["tj-actions/branch-names", "v4", "f107226331b387d31308ceb1b5767b52024508e8", "7.07"]
|
||||
- ["tj-actions/branch-names", "v3.6", "3e0215fc2dd14b3e395f99b5e2cc1e4d93afe1b6", "7.07"]
|
||||
- ["tj-actions/branch-names", "v3.5", "b587231a9abec0da6f45dbaea42d88a9c130ee8f", "7.07"]
|
||||
- ["tj-actions/branch-names", "v3.4", "dd9939e9966a18c8ce9bfcf188731c4746faf197", "7.07"]
|
||||
- ["tj-actions/branch-names", "v3.3", "509c3124abef4caaeb784a5aa6f465da588e0c43", "7.07"]
|
||||
- ["tj-actions/branch-names", "v3.2", "ae7cf1163ab1375b4bbf5ec6d16a686118dac27d", "7.07"]
|
||||
- ["tj-actions/branch-names", "v3.1", "eb14b2dffd7af08b599b691d72b757ae607675bd", "7.07"]
|
||||
- ["tj-actions/branch-names", "v3", "fdb3a42221b1ee981def2a3e7767bd3ffcda0ff7", "7.07"]
|
||||
- ["tj-actions/branch-names", "v2.2", "4362da73333d3a6ecf81047f6ae055cad78fcb38", "7.07"]
|
||||
- ["tj-actions/branch-names", "v2.1", "8c72ffde4df03225c479f93fef608d8cdd1042f3", "7.07"]
|
||||
- ["tj-actions/branch-names", "v2", "8307330ac59a26bd125a6f99c33820dd0baf439f", "7.07"]
|
||||
- ["tj-actions/branch-names", "v1", "549ca323b2179ffc0f7f828b555e88fe53da3787", "7.07"]
|
||||
|
||||
# gradle/gradle-build-action
|
||||
# https://github.com/advisories/GHSA-h3qr-39j9-4r5v
|
||||
# CVE-2023-30853
|
||||
- ["gradle/gradle-build-action", "v2.4.1", "5056fa9d50478a14af3c9925c12ca02318659d3e", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.4.0", "6095a76664413da4c8c134ee32e8a8ae900f0f1f", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.3.3", "3fbe033aaae657f011f88f29be9e65ed26bd29ef", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.3.2", "fd32ae908111fe31afa48827bd1ee909540aa971", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.3.1", "c295a4096e1d2c453eaf1f65c6f96686e26bd8be", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.3.0", "356abb47e7664b5505e25d7997a5a522a17c62d9", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.3.0-beta.1", "d427a379a8cc30e1c773080ce783e7e6d5167584", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.2.5", "cd579d970f8aec1cf0cae5f62a8e418768970015", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.2.4", "bf2a15ee94874758c21b91220b4d0ab84f762423", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.2.3", "9411346324b44f5402cbef3ac5a83a411086aa9a", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.2.2", "cd3cedc781988c804f626f4cd2dc51d0bdf02a12", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.2.1", "67421db6bd0bf253fb4bd25b31ebb98943c375e1", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.2.0", "e88ed3e650b26bd116cfee53cf198c1f6856682d", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.2.0-rc.2", "de51428ba55149e7c6f6957a566b8759efd425de", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.2.0-rc.1", "63bcd47c1be270a660a151ce2b7848b8730f06ef", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.2.0-beta.1", "26ea4afa082ddf7e3e5bcf6d12283111b6f3f837", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.1.7", "9b814496b50909128c6a52622b416c5ffa04db49", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.1.6", "116ac10f8131939c7e405884cb2456067b0479e9", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.1.5", "fec4a42eb0c83154e5c9590748ba8337949c5701", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.1.4", "0d13054264b0bb894ded474f08ebb30921341cee", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.1.3", "937999e9cc2425eddc7fd62d1053baf041147db7", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.1.2", "bc3340afc5e3cc44f2321809ac090d731c13c514", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.1.1", "b9c806c75d3cb8998f905077e62bb670e7fa7e02", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.1.0", "3edb3cb004617998d8cf56fe2ebf9d59602e713e", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.0.1", "996094e8e808208e5738e8413b3f55d24d1c1eb7", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.0.0", "4137be6a8bf7d7133955359dbd952c0ca73b1021", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.0-rc.3", "4e899835b3bddb7d01d3a988e6c53d67ec8a76e2", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.0-rc.2", "2a57ddf74a257b005f65f70cbf15e8e7f06292d9", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.0-rc.1", "db2b34260fe57577fec47305e78a20755eef0441", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.0-beta.7", "cba1833ddecbbee649950c284416981928631008", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.0-beta.6", "a94b9252d5d8ca83eed3f76a856f2ba046b1b3c6", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.0-beta.5", "263f84178a82449371326ba2c1d781bc4b4bb9ac", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.0-beta.4", "29894757f3fd1d4752e4efadb74896d39873a0ae", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.0-beta.3", "c000a0b58fe0ad402c613a864ea3ed26d6e88fd0", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.0-beta.2", "21dee7159020ab3140bebfd2280a6f34ef4e08ae", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v2.0-beta.1", "bebb162342333983b660d21f31c90f33950f5023", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1.5.1", "b3afdc78a7849557ab26e243ccf07548086da025", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1.5.0", "e0c2736e35d366e96bb202d1af817db9d562da2f", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1.4.1", "3f3947669a3fe6883ed8dab14671bdc6042ec2d9", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1.4.0", "579711fd3cd8691fbc0cab64db65e9c1e586658e", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1.3.3", "90ccf054e6b9905f30f98c938bce4c6acd323b6b", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1.3.2", "c6b57b9c8c4f72268b10f151623ce6a2855c6387", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1.3.1", "791b98c5656178712736d390e91be71eadfe192e", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1.3.0", "27da3e28b3c4cc84c9e7965dc2371f969e582049", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1.2.1", "e220e54c83b8f1a546d8e6d598490231fe2bf64b", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1.2.0", "720051268d4728af6b7e0defa8ed8097b20ef218", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1.1.0", "d0c5f7955e911444399df5d044916a49bdccff00", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1.0.2", "064f85c1568a6fd57b32d8f98c0dc9f237c59156", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1.0.1", "6170f06e8dd334a7f6879781c2ed4889c4cc76bf", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1.0.0", "2d5ca45eab01ff2ce82777ab670ff2bd5d8cf8d5", "2.4.2"]
|
||||
- ["gradle/gradle-build-action", "v1", "b3afdc78a7849557ab26e243ccf07548086da025", "2.4.2"]
|
||||
|
||||
# rlespinasse/github-slug-action
|
||||
# https://github.com/advisories/GHSA-6q4m-7476-932w
|
||||
# CVE-2023-27581
|
||||
- ["rlespinasse/github-slug-action", "v4.4.1", "102b1a064a9b145e56556e22b18b19c624538d94", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "v4.4.0", "a362e5fb42057a3a23a62218b050838f1bacca5d", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "v4.3.2", "b011e83cf8cb29e22dda828db30586691ae164e4", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "v4.3.1", "00198f89920d4454e37e4b27af2b7a8eba79c530", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "v4.3.0", "9c3571fd3dba541bfdaebc001482a49a1c1f136a", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "v4.2.5", "0141d9b38d1f21c3b3de63229e20b7b0ad7ef0f4", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "v3.9.0", "2daab132aa3a6e23ea9d409f9946b3bf6468cc77", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "v3.8.0", "4a00c29bc1c0a737315b4200af6c6991bb4ace18", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "v3.7.1", "5150a26d43ce06608443c66efea46fc6f3c50d38", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "v3.7.0", "ebfc49c0e9cd081acb7ba0634d8d6a711b4c73cf", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "v3", "2daab132aa3a6e23ea9d409f9946b3bf6468cc77", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "v3.x", "2daab132aa3a6e23ea9d409f9946b3bf6468cc77", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "v2.x", "9d2c65418d6ecbbd3c08e686997b30482e9f4a80", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "v1.1.x", "fbf6d7b9c7af4e8d06135dbc7d774e717d788731", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "4.2.5", "0141d9b38d1f21c3b3de63229e20b7b0ad7ef0f4", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "4.2.4", "33cd7a701db9c2baf4ad705d930ade51a9f25c14", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "4.2.3", "1615fcb48b5315152b3733b7bed1a9f5dfada6e3", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "4.2.2", "4177734b38a3d59604747bf47e537ccb6bcb9cdf", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "4.2.1", "7a3b4c1766ad8e6d23ab37d33417392509ff84e2", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "4.2.0", "dbbe21b72b96929fe6e67275c332f43599b31274", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "4.1.0", "88f3ee8f6f5d1955de92f1fe2fdb301fd40207c6", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "4.0.1", "cd9871b66e11e9562e3f72469772fe100be4c95a", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "4.0.0", "bd31a9f564f7930eea1ecfc8d0e6aebc4bc3279f", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "3.6.1", "1bf76b7bc6ef7dc6ba597ff790f956d9082479d7", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "3.6.0", "172fe43594a58b5938e248ec757ada60cdb17e18", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "3.5.1", "016823880d193a56b180527cf7ee52f13c3cfe33", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "3.5.0", "4060fda2690bcebaabcd86db4fbc8e1c2817c835", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "3.4.0", "0c099abd978b382cb650281af13913c1905fdd50", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "3.3.0", "d1880ea5b39f611effb9f3f83f4d35bff34083a6", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "3.2.0", "c8d8ee50d00177c1e80dd57905fc61f81e437279", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "3.1.0", "e4699e49fcf890a3172a02c56ba78d867dbb9fd5", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "3.0.0", "6a873bec5ac11c6d2a11756b8763356da63a8939", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "2.2.0", "9d2c65418d6ecbbd3c08e686997b30482e9f4a80", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "2.1.1", "72cfc4cb1f36c102c48541cb59511a6267e89c95", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "2.1.0", "1172ed1802078eb665a55c252fc180138b907c51", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "2.0.0", "ca9a67fa1f1126b377a9d80dc1ea354284c71d21", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "1.2.0", "fbf6d7b9c7af4e8d06135dbc7d774e717d788731", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "1.1.1", "242e04c2d28ac5db296e5d8203dfd7dc6bcc17a9", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "1.1.0", "881085bcae8c3443a89cc9401f3e1c60fb014ed2", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "1.0.2", "a35a1a486a260cfd99c5b6f8c6034a2929ba9b3f", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "1.0.1", "e46186066296e23235242d0877e2b4fe54003d54", "4.4.1"]
|
||||
- ["rlespinasse/github-slug-action", "1.0.0", "9671420482a6e4c59c06f2d2d9e0605e941b1287", "4.4.1"]
|
||||
|
||||
# Azure/setup-kubectl
|
||||
# https://github.com/advisories/GHSA-p756-rfxh-x63h
|
||||
# CVE-2023-23939
|
||||
- ["Azure/setup-kubectl", "v2.1", "6025c840858f1afa584a5190a4426c338f59e503", "3"]
|
||||
- ["Azure/setup-kubectl", "v2.0", "7ad2aa66bb42774adf65a0c580fbc96b2dadd747", "3"]
|
||||
- ["Azure/setup-kubectl", "v1", "a625ca209b0faaa8871dac8fb5f50ee4b4d22622", "3"]
|
||||
|
||||
# gajira-create
|
||||
# https://github.com/advisories/GHSA-4xqx-pqpj-9fqw
|
||||
# CVE-2020-14188
|
||||
- ["atlassian/gajira-create", "v2.0.0", "77d13eab156b8ad1c08c0655011b8a442c502998", "2.0.1"]
|
||||
- ["atlassian/gajira-create", "v1.0.3", "14c3d657c383981ee595d9750f68d7e4e77d64d0", "2.0.1"]
|
||||
- ["atlassian/gajira-create", "v1.0.1", "2cd32e0738e2b31717e7119717fed83e482d2a36", "2.0.1"]
|
||||
- ["atlassian/gajira-create", "v1.0.0", "f11e88bf4a1358e741ac282bc198a4f21cb719a1", "2.0.1"]
|
||||
|
||||
# hashicorp/vault-action
|
||||
# https://github.com/advisories/GHSA-4mgv-m5cm-f9h7
|
||||
# CVE-2021-32074
|
||||
- ["hashicorp/vault-action", "v2.1.2", "5e5c06a3c8e96b7c4757fe7a10e03469cdbd07bb", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v2.1.1", "2fb78ab91e55be5479aacf74f7b451eab79773a4", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v2.1.0", "2ca76a4465bca4f71fc88320e67551a287f7eaec", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v2.0.1", "952d5d48e4448ad364651cc473aeccc25bd169d9", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v2.0.0", "e27b45646f82a319c8157e545e24b7588510a397", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v1.0.1", "22e3f3e09e3baba4d6cc62823175d21fafe4e30a", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v1.0.0", "727494f451d57cbfc932a1d8bce1b0a027d99a8b", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.10.2", "9878eba70ad6c6e21a01bd1e2debd3f3b7cbc46e", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.10.1", "567ec72c33597ee9feca8bed4611a8ace38330c2", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.10.0", "5c464962be8937589f883cf209d21b3982c92360", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.9.0", "50ece41861b565239528923369690fc43cc0050b", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.8.0", "4ab6f6070f5be6702101c9736961beb8105e8708", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.7.0", "4edbc9a77a84bd34b0da2e8b8d527871b6103aae", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.6.2", "7d1d7d26adb265e6ebc6018ce2b92be7c5a7c63c", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.6.1", "f9753d75ef0cdafe621cda2323b5dcc4d673d01a", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.6.0", "0188d9d223dac8b24b94b04d3253bf0fe0365ca7", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.5.0", "f229481670b4719a05f01e8fd8478c191a373c43", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.4.0", "3b9239de79207bf3fba80a16916f257918ab1d15", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.3.1", "ab4dc55b2ecc6eb5926c5caffa45eaf0c3ad735a", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.3.0", "3747195c5f2848179bf615690b3e66e69a5e4dc7", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.2.2", "da9a93f3f5bec24febf304139a6cbe61f0f8ad5e", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.2.1", "6784ab38963b266384880094ff02eb13334802f4", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.2.0", "6784ab38963b266384880094ff02eb13334802f4", "2.2.0"]
|
||||
- ["hashicorp/vault-action", "v0.1.0", "19c0b21a1ddb75543178ac4a250b5b7cff7fd55a", "2.2.0"]
|
||||
|
||||
# check-spelling/check-spelling
|
||||
# https://github.com/advisories/GHSA-g86g-chm8-7r2p
|
||||
# CVE-2021-32724
|
||||
- ["check-spelling/check-spelling", "v0.0.18", "08f08a6ff6b9ebae06cb8fe463374a8a5a37e03c", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.17-alpha", "ead83f4596b4aac06f698b501b5beb3218f6214d", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.16-alpha", "5f7f35b25e6bce7b1e5a8f226369a86ab19a623e", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.15-alpha", "d8f2d9ec30e38ffae03410088062714ac04c36cd", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.14-alpha", "67ea89eaff703694453dbfd346c4c31dfab646fc", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.13-alpha", "a9db57b850b66cb664373f19f6628c4ee39fbcb5", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.12-alpha", "22b3d11338aea9482eda87725ab15b8862de4061", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.11-alpha", "10d8401e72f7b4752a765b61ecbd1539394d6f4e", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.10-alpha", "c79ba85e2b8e45ef0a8da9eb0d16e7f2135ad2c6", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.9-alpha", "13d6bbcc0a082113d1c2d33ea41fcbe915e62de9", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.8-alpha", "6505ab5f1ebbe080fc072ea3cf68bac289f419ac", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.7-alpha", "a27e3104c5c8d69c2986d22c938e679ec0f1b2c7", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.6-alpha", "8a7dfc447cd58195531f7c313f6ff693f0e2eb89", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.5-alpha", "e584b835f290270af78538013634f348d6cc7398", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.4-alpha", "cb465b08587798aa788dfd9bc345c2c982ac9e29", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.3-alpha", "b8e280ae90b28f1aadc50f93073aa6450afe820d", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.2-alpha", "8e32de8a016bc4dce4170ec36881cbb315f94ff4", "0.0.19"]
|
||||
- ["check-spelling/check-spelling", "0.0.1-alpha", "d2d0ee06c72600982d2f80bca187ce90fee6ad94", "0.0.19"]
|
||||
9
actions/ql/lib/ext/config/workflow_runtime_data.yml
Normal file
9
actions/ql/lib/ext/config/workflow_runtime_data.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: repositoryDataModel
|
||||
data: []
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: workflowDataModel
|
||||
data: []
|
||||
@@ -0,0 +1,14 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["actions/actions-runner-controller", "*", "input.image-tag", "code-injection", "generated"]
|
||||
- ["actions/actions-runner-controller", "*", "input.image-name", "code-injection", "generated"]
|
||||
- ["actions/actions-runner-controller", "*", "input.arc-controller-namespace", "code-injection", "generated"]
|
||||
- ["actions/actions-runner-controller", "*", "input.arc-namespace", "code-injection", "generated"]
|
||||
- ["actions/actions-runner-controller", "*", "input.arc-name", "code-injection", "generated"]
|
||||
- ["actions/actions-runner-controller", "*", "input.repo-name", "code-injection", "generated"]
|
||||
- ["actions/actions-runner-controller", "*", "input.repo-owner", "code-injection", "generated"]
|
||||
- ["actions/actions-runner-controller", "*", "input.workflow-file", "code-injection", "generated"]
|
||||
- ["actions/actions-runner-controller", "*", "input.auth-token", "code-injection", "generated"]
|
||||
@@ -0,0 +1,9 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["adap/flower", "*", "input.poetry-version", "code-injection", "generated"]
|
||||
- ["adap/flower", "*", "input.setuptools-version", "code-injection", "generated"]
|
||||
- ["adap/flower", "*", "input.pip-version", "code-injection", "generated"]
|
||||
- ["adap/flower", "*", "input.python-version", "code-injection", "generated"]
|
||||
@@ -0,0 +1,11 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["agoric/agoric-sdk", "*", "input.xsnap-random-init", "code-injection", "generated"]
|
||||
- ["agoric/agoric-sdk", "*", "input.path", "code-injection", "generated"]
|
||||
- ["agoric/agoric-sdk", "*", "input.ignore-endo-branch", "code-injection", "generated"]
|
||||
- ["agoric/agoric-sdk", "*", "input.codecov-token", "code-injection", "generated"]
|
||||
- ["agoric/agoric-sdk", "*", "input.datadog-token", "code-injection", "generated"]
|
||||
- ["agoric/agoric-sdk", "*", "input.datadog-site", "code-injection", "generated"]
|
||||
@@ -0,0 +1,6 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["airbnb/lottie-ios", "*", "input.xcode", "code-injection", "generated"]
|
||||
@@ -0,0 +1,7 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["airbytehq/airbyte", "*", "input.options", "code-injection", "generated"]
|
||||
- ["airbytehq/airbyte", "*", "input.subcommand", "code-injection", "generated"]
|
||||
@@ -0,0 +1,7 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["amazon-ion/ion-java", "*", "input.project_version", "code-injection", "generated"]
|
||||
- ["amazon-ion/ion-java", "*", "input.repo", "code-injection", "generated"]
|
||||
@@ -0,0 +1,6 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["anchore/grype", "*", "input.bootstrap-apt-packages", "code-injection", "generated"]
|
||||
@@ -0,0 +1,6 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["anchore/syft", "*", "input.bootstrap-apt-packages", "code-injection", "generated"]
|
||||
@@ -0,0 +1,10 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["angular/dev-infra", "*", "input.firebase-public-dir", "code-injection", "generated"]
|
||||
- ["angular/dev-infra", "*", "input.workflow-artifact-name", "code-injection", "generated"]
|
||||
- ["angular/dev-infra", "*", "input.artifact-build-revision", "code-injection", "generated"]
|
||||
- ["angular/dev-infra", "*", "input.pull-number", "code-injection", "generated"]
|
||||
- ["angular/dev-infra", "*", "input.deploy-directory", "code-injection", "generated"]
|
||||
@@ -0,0 +1,7 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["ansible/ansible-lint", "*", "input.args", "code-injection", "generated"]
|
||||
- ["ansible/ansible-lint", "*", "input.working_directory", "code-injection", "generated"]
|
||||
@@ -0,0 +1,7 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["ansible/awx", "*", "input.log-filename", "code-injection", "generated"]
|
||||
- ["ansible/awx", "*", "input.github-token", "code-injection", "generated"]
|
||||
@@ -0,0 +1,6 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["apache/arrow-datafusion", "*", "input.rust-version", "code-injection", "generated"]
|
||||
@@ -0,0 +1,7 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["apache/arrow-rs", "*", "input.target", "code-injection", "generated"]
|
||||
- ["apache/arrow-rs", "*", "input.rust-version", "code-injection", "generated"]
|
||||
@@ -0,0 +1,6 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["apache/arrow", "*", "input.upload", "code-injection", "generated"]
|
||||
@@ -0,0 +1,6 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["apache/bookkeeper", "*", "input.mode", "code-injection", "generated"]
|
||||
@@ -0,0 +1,6 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["apache/brpc", "*", "input.options", "code-injection", "generated"]
|
||||
@@ -0,0 +1,17 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["apache/camel-k", "*", "input.test-suite", "code-injection", "generated"]
|
||||
- ["apache/camel-k", "*", "input.image-version", "code-injection", "generated"]
|
||||
- ["apache/camel-k", "*", "input.image-registry-insecure", "code-injection", "generated"]
|
||||
- ["apache/camel-k", "*", "input.image-name", "code-injection", "generated"]
|
||||
- ["apache/camel-k", "*", "input.image-registry-host", "code-injection", "generated"]
|
||||
- ["apache/camel-k", "*", "input.catalog-source-namespace", "code-injection", "generated"]
|
||||
- ["apache/camel-k", "*", "input.catalog-source-name", "code-injection", "generated"]
|
||||
- ["apache/camel-k", "*", "input.image-namespace", "code-injection", "generated"]
|
||||
- ["apache/camel-k", "*", "input.version", "code-injection", "generated"]
|
||||
- ["apache/camel-k", "*", "input.otlp-collector-image-version", "code-injection", "generated"]
|
||||
- ["apache/camel-k", "*", "input.otlp-collector-image-name", "code-injection", "generated"]
|
||||
- ["apache/camel-k", "*", "input.global-operator-namespace", "code-injection", "generated"]
|
||||
@@ -0,0 +1,11 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["apache/camel", "*", "input.end-commit", "code-injection", "generated"]
|
||||
- ["apache/camel", "*", "input.start-commit", "code-injection", "generated"]
|
||||
- ["apache/camel", "*", "input.distribution", "code-injection", "generated"]
|
||||
- ["apache/camel", "*", "input.version", "code-injection", "generated"]
|
||||
- ["apache/camel", "*", "input.pr-id", "code-injection", "generated"]
|
||||
- ["apache/camel", "*", "input.mode", "code-injection", "generated"]
|
||||
@@ -0,0 +1,10 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["apache/flink", "*", "input.maven-parameters", "code-injection", "generated"]
|
||||
- ["apache/flink", "*", "input.env", "code-injection", "generated"]
|
||||
- ["apache/flink", "*", "input.target_directory", "code-injection", "generated"]
|
||||
- ["apache/flink", "*", "input.source_directory", "code-injection", "generated"]
|
||||
- ["apache/flink", "*", "input.jdk_version", "code-injection", "generated"]
|
||||
@@ -0,0 +1,6 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSummaryModel
|
||||
data:
|
||||
- ["apache/incubator-kie-tools", "*", "input.pnpm_filter_string", "output.pnpm_filter_string", "taint", "manual"]
|
||||
@@ -0,0 +1,8 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["apache/nuttx", "*", "input.haskell", "code-injection", "generated"]
|
||||
- ["apache/nuttx", "*", "input.dotnet", "code-injection", "generated"]
|
||||
- ["apache/nuttx", "*", "input.android", "code-injection", "generated"]
|
||||
@@ -0,0 +1,9 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["apache/opendal", "*", "input.feature", "code-injection", "generated"]
|
||||
- ["apache/opendal", "*", "input.setup", "code-injection", "generated"]
|
||||
- ["apache/opendal", "*", "input.service", "code-injection", "generated"]
|
||||
- ["apache/opendal", "*", "input.target", "code-injection", "generated"]
|
||||
@@ -0,0 +1,6 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["apache/pekko", "*", "input.upload", "code-injection", "generated"]
|
||||
@@ -0,0 +1,12 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["apache/pulsar-helm-chart", "*", "input.limit-access-to-users", "code-injection", "generated"]
|
||||
- ["apache/pulsar-helm-chart", "*", "input.limit-access-to-actor", "code-injection", "generated"]
|
||||
- ["apache/pulsar-helm-chart", "*", "input.secure-access", "code-injection", "generated"]
|
||||
- ["apache/pulsar-helm-chart", "*", "input.action", "code-injection", "generated"]
|
||||
- ["apache/pulsar-helm-chart", "*", "input.yamale_version", "code-injection", "generated"]
|
||||
- ["apache/pulsar-helm-chart", "*", "input.yamllint_version", "code-injection", "generated"]
|
||||
- ["apache/pulsar-helm-chart", "*", "input.version", "code-injection", "generated"]
|
||||
@@ -0,0 +1,6 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["apache/superset", "*", "input.requirements-type", "code-injection", "generated"]
|
||||
@@ -0,0 +1,7 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["appflowy-io/appflowy", "*", "input.test_path", "code-injection", "generated"]
|
||||
- ["appflowy-io/appflowy", "*", "input.flutter_profile", "code-injection", "generated"]
|
||||
@@ -0,0 +1,8 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["aptos-labs/aptos-core", "*", "input.GIT_CREDENTIALS", "code-injection", "generated"]
|
||||
- ["aptos-labs/aptos-core", "*", "input.GCP_DOCKER_ARTIFACT_REPO", "code-injection", "generated"]
|
||||
- ["aptos-labs/aptos-core", "*", "input.IMAGE_TAG", "code-injection", "generated"]
|
||||
@@ -0,0 +1,6 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["archivesspace/archivesspace", "*", "input.mysql-connector-url", "code-injection", "generated"]
|
||||
@@ -0,0 +1,6 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["armadaproject/armada", "*", "input.tox-env", "code-injection", "generated"]
|
||||
@@ -0,0 +1,14 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["armbian/build", "*", "input.armbian_pgp_password", "code-injection", "generated"]
|
||||
- ["armbian/build", "*", "input.armbian_extensions", "code-injection", "generated"]
|
||||
- ["armbian/build", "*", "input.armbian_release", "code-injection", "generated"]
|
||||
- ["armbian/build", "*", "input.armbian_kernel_branch", "code-injection", "generated"]
|
||||
- ["armbian/build", "*", "input.armbian_board", "code-injection", "generated"]
|
||||
- ["armbian/build", "*", "input.armbian_target", "code-injection", "generated"]
|
||||
- ["armbian/build", "*", "input.armbian_branch", "code-injection", "generated"]
|
||||
- ["armbian/build", "*", "input.armbian_ui", "code-injection", "generated"]
|
||||
- ["armbian/build", "*", "input.armbian_version", "code-injection", "generated"]
|
||||
@@ -0,0 +1,9 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["auth0/auth0-java", "*", "input.signing-password", "code-injection", "generated"]
|
||||
- ["auth0/auth0-java", "*", "input.signing-key", "code-injection", "generated"]
|
||||
- ["auth0/auth0-java", "*", "input.ossr-password", "code-injection", "generated"]
|
||||
- ["auth0/auth0-java", "*", "input.ossr-username", "code-injection", "generated"]
|
||||
@@ -0,0 +1,8 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/actions-all
|
||||
extensible: actionsSinkModel
|
||||
data:
|
||||
- ["auth0/auth0.net", "*", "input.nuget-token", "code-injection", "generated"]
|
||||
- ["auth0/auth0.net", "*", "input.nuget-directory", "code-injection", "generated"]
|
||||
- ["auth0/auth0.net", "*", "input.project-paths", "code-injection", "generated"]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user