mirror of
https://github.com/github/codeql.git
synced 2026-05-04 05:05:12 +02:00
Merge branch 'main' into use-taint-configuration-in-three-more-queries
This commit is contained in:
50
.github/workflows/check-qldoc.yml
vendored
Normal file
50
.github/workflows/check-qldoc.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: "Check QLdoc coverage"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "*/ql/lib/**"
|
||||
- .github/workflows/check-qldoc.yml
|
||||
branches:
|
||||
- main
|
||||
- "rc/*"
|
||||
|
||||
jobs:
|
||||
qldoc:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Install CodeQL
|
||||
run: |
|
||||
gh extension install github/gh-codeql
|
||||
gh codeql set-channel nightly
|
||||
gh codeql version
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Check QLdoc coverage
|
||||
shell: bash
|
||||
run: |
|
||||
EXIT_CODE=0
|
||||
changed_lib_packs="$(git diff --name-only --diff-filter=ACMRT HEAD^ HEAD | { grep -o '^[a-z]*/ql/lib' || true; } | sort -u)"
|
||||
for pack_dir in ${changed_lib_packs}; do
|
||||
lang="${pack_dir%/ql/lib}"
|
||||
gh codeql generate library-doc-coverage --output="${RUNNER_TEMP}/${lang}-current.txt" --dir="${pack_dir}"
|
||||
done
|
||||
git checkout HEAD^
|
||||
for pack_dir in ${changed_lib_packs}; do
|
||||
lang="${pack_dir%/ql/lib}"
|
||||
gh codeql generate library-doc-coverage --output="${RUNNER_TEMP}/${lang}-baseline.txt" --dir="${pack_dir}"
|
||||
awk -F, '{gsub(/"/,""); if ($4==0 && $6=="public") print "\""$3"\"" }' "${RUNNER_TEMP}/${lang}-current.txt" | sort -u > "${RUNNER_TEMP}/current-undocumented.txt"
|
||||
awk -F, '{gsub(/"/,""); if ($4==0 && $6=="public") print "\""$3"\"" }' "${RUNNER_TEMP}/${lang}-baseline.txt" | sort -u > "${RUNNER_TEMP}/baseline-undocumented.txt"
|
||||
UNDOCUMENTED="$(grep -f <(comm -13 "${RUNNER_TEMP}/baseline-undocumented.txt" "${RUNNER_TEMP}/current-undocumented.txt") "${RUNNER_TEMP}/${lang}-current.txt" || true)"
|
||||
if [ -n "$UNDOCUMENTED" ]; then
|
||||
echo "$UNDOCUMENTED" | awk -F, '{gsub(/"/,""); print "::warning file='"${pack_dir}"'/"$1",line="$2"::Missing QLdoc for "$5, $3 }'
|
||||
EXIT_CODE=1
|
||||
fi
|
||||
done
|
||||
exit "${EXIT_CODE}"
|
||||
@@ -73,6 +73,14 @@
|
||||
"java/ql/lib/semmle/code/java/dataflow/internal/rangeanalysis/SsaReadPositionCommon.qll",
|
||||
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/rangeanalysis/SsaReadPositionCommon.qll"
|
||||
],
|
||||
"Model as Data Generation Java/C# - Utils": [
|
||||
"java/ql/src/utils/model-generator/ModelGeneratorUtils.qll",
|
||||
"csharp/ql/src/utils/model-generator/ModelGeneratorUtils.qll"
|
||||
],
|
||||
"Model as Data Generation Java/C# - SummaryModels": [
|
||||
"java/ql/src/utils/model-generator/CaptureSummaryModels.qll",
|
||||
"csharp/ql/src/utils/model-generator/CaptureSummaryModels.qll"
|
||||
],
|
||||
"Sign Java/C#": [
|
||||
"java/ql/lib/semmle/code/java/dataflow/internal/rangeanalysis/Sign.qll",
|
||||
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/rangeanalysis/Sign.qll"
|
||||
@@ -508,4 +516,4 @@
|
||||
"javascript/ql/lib/semmle/javascript/frameworks/data/internal/AccessPathSyntax.qll",
|
||||
"ruby/ql/lib/codeql/ruby/dataflow/internal/AccessPathSyntax.qll"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,9 @@
|
||||
## 0.0.11
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
* Many queries now support structured bindings, as structured bindings are now handled in the IR translation.
|
||||
|
||||
## 0.0.10
|
||||
|
||||
### New Features
|
||||
@@ -6,6 +12,7 @@
|
||||
|
||||
## 0.0.9
|
||||
|
||||
|
||||
## 0.0.8
|
||||
|
||||
### Deprecated APIs
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
## 0.0.11
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
* Many queries now support structured bindings, as structured bindings are now handled in the IR translation.
|
||||
@@ -1,2 +1,2 @@
|
||||
---
|
||||
lastReleaseVersion: 0.0.10
|
||||
lastReleaseVersion: 0.0.11
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: codeql/cpp-all
|
||||
version: 0.0.11-dev
|
||||
version: 0.0.12-dev
|
||||
groups: cpp
|
||||
dbscheme: semmlecode.cpp.dbscheme
|
||||
extractor: cpp
|
||||
|
||||
@@ -1,3 +1,24 @@
|
||||
## 0.0.11
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
* The deprecated queries `cpp/duplicate-block`, `cpp/duplicate-function`, `cpp/duplicate-class`, `cpp/duplicate-file`, `cpp/mostly-duplicate-function`,`cpp/similar-file`, `cpp/duplicated-lines-in-files` have been removed.
|
||||
|
||||
### Deprecated Predicates and Classes
|
||||
|
||||
* The predicates and classes in the `CodeDuplication` library have been deprecated.
|
||||
|
||||
### New Queries
|
||||
|
||||
* A new query titled "Use of expired stack-address" (`cpp/using-expired-stack-address`) has been added.
|
||||
This query finds accesses to expired stack-allocated memory that escaped via a global variable.
|
||||
* A new `cpp/insufficient-key-size` query has been added to the default query suite for C/C++. The query finds uses of certain cryptographic algorithms where the key size is too small to provide adequate encryption strength.
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
* The "Failure to use HTTPS URLs" (`cpp/non-https-url`) has been improved reducing false positive results, and its precision has been increased to 'high'.
|
||||
* The `cpp/system-data-exposure` query has been modernized and has converted to a `path-problem` query. There are now fewer false positive results.
|
||||
|
||||
## 0.0.10
|
||||
|
||||
### Deprecated Classes
|
||||
|
||||
@@ -77,12 +77,13 @@ class ReturnStackAllocatedMemoryConfig extends MustFlowConfiguration {
|
||||
|
||||
from
|
||||
MustFlowPathNode source, MustFlowPathNode sink, VariableAddressInstruction var,
|
||||
ReturnStackAllocatedMemoryConfig conf
|
||||
ReturnStackAllocatedMemoryConfig conf, Function f
|
||||
where
|
||||
conf.hasFlowPath(source, sink) and
|
||||
source.getNode().asInstruction() = var and
|
||||
// Only raise an alert if we're returning from the _same_ callable as the on that
|
||||
// declared the stack variable.
|
||||
var.getEnclosingFunction() = sink.getNode().getEnclosingCallable()
|
||||
var.getEnclosingFunction() = pragma[only_bind_into](f) and
|
||||
sink.getNode().getEnclosingCallable() = pragma[only_bind_into](f)
|
||||
select sink.getNode(), source, sink, "May return stack-allocated memory from $@.", var.getAst(),
|
||||
var.getAst().toString()
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* The `cpp/system-data-exposure` query has been modernized and has converted to a `path-problem` query. There are now fewer false positive results.
|
||||
@@ -1,4 +0,0 @@
|
||||
---
|
||||
category: newQuery
|
||||
---
|
||||
* A new `cpp/insufficient-key-size` query has been added to the default query suite for C/C++. The query finds uses of certain cryptographic algorithms where the key size is too small to provide adequate encryption strength.
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
category: newQuery
|
||||
---
|
||||
|
||||
- A new query titled "Use of expired stack-address" (`cpp/using-expired-stack-address`) has been added.
|
||||
This query finds accesses to expired stack-allocated memory that escaped via a global variable.
|
||||
@@ -1,4 +0,0 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* The "Failure to use HTTPS URLs" (`cpp/non-https-url`) has been improved reducing false positive results, and its precision has been increased to 'high'.
|
||||
@@ -1,4 +0,0 @@
|
||||
---
|
||||
category: deprecated
|
||||
---
|
||||
* The predicates and classes in the `CodeDuplication` library have been deprecated.
|
||||
@@ -1,4 +0,0 @@
|
||||
---
|
||||
category: breaking
|
||||
---
|
||||
* The deprecated queries `cpp/duplicate-block`, `cpp/duplicate-function`, `cpp/duplicate-class`, `cpp/duplicate-file`, `cpp/mostly-duplicate-function`,`cpp/similar-file`, `cpp/duplicated-lines-in-files` have been removed.
|
||||
20
cpp/ql/src/change-notes/released/0.0.11.md
Normal file
20
cpp/ql/src/change-notes/released/0.0.11.md
Normal file
@@ -0,0 +1,20 @@
|
||||
## 0.0.11
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
* The deprecated queries `cpp/duplicate-block`, `cpp/duplicate-function`, `cpp/duplicate-class`, `cpp/duplicate-file`, `cpp/mostly-duplicate-function`,`cpp/similar-file`, `cpp/duplicated-lines-in-files` have been removed.
|
||||
|
||||
### Deprecated Predicates and Classes
|
||||
|
||||
* The predicates and classes in the `CodeDuplication` library have been deprecated.
|
||||
|
||||
### New Queries
|
||||
|
||||
* A new query titled "Use of expired stack-address" (`cpp/using-expired-stack-address`) has been added.
|
||||
This query finds accesses to expired stack-allocated memory that escaped via a global variable.
|
||||
* A new `cpp/insufficient-key-size` query has been added to the default query suite for C/C++. The query finds uses of certain cryptographic algorithms where the key size is too small to provide adequate encryption strength.
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
* The "Failure to use HTTPS URLs" (`cpp/non-https-url`) has been improved reducing false positive results, and its precision has been increased to 'high'.
|
||||
* The `cpp/system-data-exposure` query has been modernized and has converted to a `path-problem` query. There are now fewer false positive results.
|
||||
@@ -1,2 +1,2 @@
|
||||
---
|
||||
lastReleaseVersion: 0.0.10
|
||||
lastReleaseVersion: 0.0.11
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: codeql/cpp-queries
|
||||
version: 0.0.11-dev
|
||||
version: 0.0.12-dev
|
||||
groups:
|
||||
- cpp
|
||||
- queries
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
## 1.0.5
|
||||
|
||||
## 1.0.4
|
||||
|
||||
## 1.0.3
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
## 1.0.5
|
||||
@@ -1,2 +1,2 @@
|
||||
---
|
||||
lastReleaseVersion: 1.0.4
|
||||
lastReleaseVersion: 1.0.5
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: codeql/csharp-solorigate-all
|
||||
version: 1.0.5-dev
|
||||
version: 1.0.6-dev
|
||||
groups:
|
||||
- csharp
|
||||
- solorigate
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
## 1.0.5
|
||||
|
||||
## 1.0.4
|
||||
|
||||
## 1.0.3
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
## 1.0.5
|
||||
@@ -1,2 +1,2 @@
|
||||
---
|
||||
lastReleaseVersion: 1.0.4
|
||||
lastReleaseVersion: 1.0.5
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: codeql/csharp-solorigate-queries
|
||||
version: 1.0.5-dev
|
||||
version: 1.0.6-dev
|
||||
groups:
|
||||
- csharp
|
||||
- solorigate
|
||||
|
||||
@@ -1,3 +1,21 @@
|
||||
## 0.0.11
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
* The C# extractor no longer supports the following legacy environment variables:
|
||||
```
|
||||
ODASA_BUILD_ERROR_DIR
|
||||
ODASA_CSHARP_LAYOUT
|
||||
ODASA_SNAPSHOT
|
||||
SEMMLE_DIST
|
||||
SEMMLE_EXTRACTOR_OPTIONS
|
||||
SEMMLE_PLATFORM_TOOLS
|
||||
SEMMLE_PRESERVE_SYMLINKS
|
||||
SOURCE_ARCHIVE
|
||||
TRAP_FOLDER
|
||||
```
|
||||
* `codeql test run` now extracts source code recursively from sub folders. This may break existing tests that have other tests in nested sub folders, as those will now get the nested test code included.
|
||||
|
||||
## 0.0.10
|
||||
|
||||
## 0.0.9
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
---
|
||||
category: breaking
|
||||
---
|
||||
* `codeql test run` now extracts source code recursively from sub folders. This may break existing tests that have other tests in nested sub folders, as those will now get the nested test code included.
|
||||
@@ -1,6 +1,7 @@
|
||||
---
|
||||
category: breaking
|
||||
---
|
||||
## 0.0.11
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
* The C# extractor no longer supports the following legacy environment variables:
|
||||
```
|
||||
ODASA_BUILD_ERROR_DIR
|
||||
@@ -12,4 +13,5 @@ SEMMLE_PLATFORM_TOOLS
|
||||
SEMMLE_PRESERVE_SYMLINKS
|
||||
SOURCE_ARCHIVE
|
||||
TRAP_FOLDER
|
||||
```
|
||||
```
|
||||
* `codeql test run` now extracts source code recursively from sub folders. This may break existing tests that have other tests in nested sub folders, as those will now get the nested test code included.
|
||||
@@ -1,2 +1,2 @@
|
||||
---
|
||||
lastReleaseVersion: 0.0.10
|
||||
lastReleaseVersion: 0.0.11
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: codeql/csharp-all
|
||||
version: 0.0.11-dev
|
||||
version: 0.0.12-dev
|
||||
groups: csharp
|
||||
dbscheme: semmlecode.csharp.dbscheme
|
||||
extractor: csharp
|
||||
|
||||
@@ -87,7 +87,9 @@ private class GvnCons extends Gvn, TGvnCons {
|
||||
pragma[noinline]
|
||||
private predicate gvnKindDeclaration(Expr e, int kind, boolean isTargetThis, Declaration d) {
|
||||
isTargetThis = isTargetThis(e) and
|
||||
d = referenceAttribute(e) and
|
||||
// guard against elements with multiple declaration targets (DB inconsistency),
|
||||
// which may result in a combinatorial explosion
|
||||
d = unique(Declaration d0 | d0 = referenceAttribute(e) | d0) and
|
||||
expressions(e, kind, _)
|
||||
}
|
||||
|
||||
|
||||
@@ -111,13 +111,6 @@ private predicate evenlyDivisibleExpr(Expr e, int factor) {
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `rix` is the number of input edges to `phi`.
|
||||
*/
|
||||
private predicate maxPhiInputRank(SsaPhiNode phi, int rix) {
|
||||
rix = max(int r | rankedPhiInput(phi, _, _, r))
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the remainder of `val` modulo `mod`.
|
||||
*
|
||||
|
||||
@@ -2026,3 +2026,8 @@ abstract class SyntheticField extends string {
|
||||
/** Gets the type of this synthetic field. */
|
||||
Type getType() { result instanceof ObjectType }
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if the the content `c` is a container.
|
||||
*/
|
||||
predicate containerContent(DataFlow::Content c) { c instanceof DataFlow::ElementContent }
|
||||
|
||||
@@ -43,37 +43,4 @@ module Private {
|
||||
predicate ssaUpdateStep = RU::ssaUpdateStep/3;
|
||||
|
||||
Expr getABasicBlockExpr(BasicBlock bb) { result = bb.getANode() }
|
||||
|
||||
private class PhiInputEdgeBlock extends BasicBlock {
|
||||
PhiInputEdgeBlock() { this = any(SsaReadPositionPhiInputEdge edge).getOrigBlock() }
|
||||
}
|
||||
|
||||
int getId(PhiInputEdgeBlock bb) {
|
||||
exists(CfgImpl::ControlFlowTree::Range_ t | CfgImpl::ControlFlowTree::idOf(t, result) |
|
||||
t = bb.getFirstNode().getElement()
|
||||
or
|
||||
t = bb.(CS::ControlFlow::BasicBlocks::EntryBlock).getCallable()
|
||||
)
|
||||
}
|
||||
|
||||
private string getSplitString(PhiInputEdgeBlock bb) {
|
||||
result = bb.getFirstNode().(CS::ControlFlow::Nodes::ElementNode).getSplitsString()
|
||||
or
|
||||
not exists(bb.getFirstNode().(CS::ControlFlow::Nodes::ElementNode).getSplitsString()) and
|
||||
result = ""
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `inp` is an input to `phi` along `edge` and this input has index `r`
|
||||
* in an arbitrary 1-based numbering of the input edges to `phi`.
|
||||
*/
|
||||
predicate rankedPhiInput(SsaPhiNode phi, SsaVariable inp, SsaReadPositionPhiInputEdge edge, int r) {
|
||||
edge.phiInput(phi, inp) and
|
||||
edge =
|
||||
rank[r](SsaReadPositionPhiInputEdge e |
|
||||
e.phiInput(phi, _)
|
||||
|
|
||||
e order by getId(e.getOrigBlock()), getSplitString(e.getOrigBlock())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
*/
|
||||
|
||||
private import SsaReadPositionSpecific
|
||||
import SsaReadPositionSpecific::Public
|
||||
|
||||
private newtype TSsaReadPosition =
|
||||
TSsaReadPositionBlock(BasicBlock bb) { bb = getAReadBasicBlock(_) } or
|
||||
@@ -55,3 +56,10 @@ class SsaReadPositionPhiInputEdge extends SsaReadPosition, TSsaReadPositionPhiIn
|
||||
|
||||
override string toString() { result = "edge" }
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `rix` is the number of input edges to `phi`.
|
||||
*/
|
||||
predicate maxPhiInputRank(SsaPhiNode phi, int rix) {
|
||||
rix = max(int r | rankedPhiInput(phi, _, _, r))
|
||||
}
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
*/
|
||||
|
||||
private import csharp
|
||||
private import SsaReadPositionCommon
|
||||
private import semmle.code.csharp.controlflow.internal.ControlFlowGraphImpl as CfgImpl
|
||||
|
||||
class SsaVariable = Ssa::Definition;
|
||||
|
||||
@@ -14,3 +16,41 @@ class BasicBlock = ControlFlow::BasicBlock;
|
||||
BasicBlock getAReadBasicBlock(SsaVariable v) {
|
||||
result = v.getARead().getAControlFlowNode().getBasicBlock()
|
||||
}
|
||||
|
||||
private class PhiInputEdgeBlock extends BasicBlock {
|
||||
PhiInputEdgeBlock() { this = any(SsaReadPositionPhiInputEdge edge).getOrigBlock() }
|
||||
}
|
||||
|
||||
private int getId(PhiInputEdgeBlock bb) {
|
||||
exists(CfgImpl::ControlFlowTree::Range_ t | CfgImpl::ControlFlowTree::idOf(t, result) |
|
||||
t = bb.getFirstNode().getElement()
|
||||
or
|
||||
t = bb.(ControlFlow::BasicBlocks::EntryBlock).getCallable()
|
||||
)
|
||||
}
|
||||
|
||||
private string getSplitString(PhiInputEdgeBlock bb) {
|
||||
result = bb.getFirstNode().(ControlFlow::Nodes::ElementNode).getSplitsString()
|
||||
or
|
||||
not exists(bb.getFirstNode().(ControlFlow::Nodes::ElementNode).getSplitsString()) and
|
||||
result = ""
|
||||
}
|
||||
|
||||
/**
|
||||
* Declarations to be exposed to users of SsaReadPositionCommon.
|
||||
*/
|
||||
module Public {
|
||||
/**
|
||||
* Holds if `inp` is an input to `phi` along `edge` and this input has index `r`
|
||||
* in an arbitrary 1-based numbering of the input edges to `phi`.
|
||||
*/
|
||||
predicate rankedPhiInput(SsaPhiNode phi, SsaVariable inp, SsaReadPositionPhiInputEdge edge, int r) {
|
||||
edge.phiInput(phi, inp) and
|
||||
edge =
|
||||
rank[r](SsaReadPositionPhiInputEdge e |
|
||||
e.phiInput(phi, _)
|
||||
|
|
||||
e order by getId(e.getOrigBlock()), getSplitString(e.getOrigBlock())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,13 @@
|
||||
## 0.0.11
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
* Casts to `dynamic` are excluded from the useless upcasts check (`cs/useless-upcast`).
|
||||
* The C# extractor now accepts an extractor option `buildless`, which is used to decide what type of extraction that should be performed. If `true` then buildless (standalone) extraction will be performed. Otherwise tracing extraction will be performed (default).
|
||||
The option is added via `codeql database create --language=csharp -Obuildless=true ...`.
|
||||
* The C# extractor now accepts an extractor option `trap.compression`, which is used to decide the compression format for TRAP files. The legal values are `brotli` (default), `gzip` or `none`.
|
||||
The option is added via `codeql database create --language=csharp -Otrap.compression=value ...`.
|
||||
|
||||
## 0.0.10
|
||||
|
||||
### Query Metadata Changes
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* The C# extractor now accepts an extractor option `trap.compression`, which is used to decide the compression format for TRAP files. The legal values are `brotli` (default), `gzip` or `none`.
|
||||
The option is added via `codeql database create --language=csharp -Otrap.compression=value ...`.
|
||||
@@ -1,5 +0,0 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* The C# extractor now accepts an extractor option `buildless`, which is used to decide what type of extraction that should be performed. If `true` then buildless (standalone) extraction will be performed. Otherwise tracing extraction will be performed (default).
|
||||
The option is added via `codeql database create --language=csharp -Obuildless=true ...`.
|
||||
@@ -1,4 +0,0 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* Casts to `dynamic` are excluded from the useless upcasts check (`cs/useless-upcast`).
|
||||
9
csharp/ql/src/change-notes/released/0.0.11.md
Normal file
9
csharp/ql/src/change-notes/released/0.0.11.md
Normal file
@@ -0,0 +1,9 @@
|
||||
## 0.0.11
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
* Casts to `dynamic` are excluded from the useless upcasts check (`cs/useless-upcast`).
|
||||
* The C# extractor now accepts an extractor option `buildless`, which is used to decide what type of extraction that should be performed. If `true` then buildless (standalone) extraction will be performed. Otherwise tracing extraction will be performed (default).
|
||||
The option is added via `codeql database create --language=csharp -Obuildless=true ...`.
|
||||
* The C# extractor now accepts an extractor option `trap.compression`, which is used to decide the compression format for TRAP files. The legal values are `brotli` (default), `gzip` or `none`.
|
||||
The option is added via `codeql database create --language=csharp -Otrap.compression=value ...`.
|
||||
@@ -1,2 +1,2 @@
|
||||
---
|
||||
lastReleaseVersion: 0.0.10
|
||||
lastReleaseVersion: 0.0.11
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: codeql/csharp-queries
|
||||
version: 0.0.11-dev
|
||||
version: 0.0.12-dev
|
||||
groups:
|
||||
- csharp
|
||||
- queries
|
||||
|
||||
91
csharp/ql/src/utils/model-generator/CaptureSummaryModels.ql
Normal file
91
csharp/ql/src/utils/model-generator/CaptureSummaryModels.ql
Normal file
@@ -0,0 +1,91 @@
|
||||
/**
|
||||
* @name Capture summary models.
|
||||
* @description Finds applicable summary models to be used by other queries.
|
||||
* @id csharp/utils/model-generator/summary-models
|
||||
*/
|
||||
|
||||
private import CaptureSummaryModels
|
||||
|
||||
/**
|
||||
* Capture fluent APIs that return `this`.
|
||||
* Example of a fluent API:
|
||||
* ```csharp
|
||||
* public class BasicFlow {
|
||||
* public BasicFlow ReturnThis(object input)
|
||||
* {
|
||||
* // some side effect
|
||||
* return this;
|
||||
* }
|
||||
* ```
|
||||
* Captured Model:
|
||||
* ```Summaries;BasicFlow;false;ReturnThis;(System.Object);Argument[Qualifier];ReturnValue;value```
|
||||
* Capture APIs that transfer taint from an input parameter to an output return
|
||||
* value or parameter.
|
||||
* Allows a sequence of read steps followed by a sequence of store steps.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* ```csharp
|
||||
* public class BasicFlow {
|
||||
* private string tainted;
|
||||
*
|
||||
* public String ReturnField()
|
||||
* {
|
||||
* return tainted;
|
||||
* }
|
||||
*
|
||||
* public void AssignFieldToArray(object[] target)
|
||||
* {
|
||||
* target[0] = tainted;
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
* Captured Models:
|
||||
* ```
|
||||
* Summaries;BasicFlow;false;ReturnField;();Argument[Qualifier];ReturnValue;taint |
|
||||
* Summaries;BasicFlow;false;AssignFieldToArray;(System.Object[]);Argument[Qualifier];Argument[0].Element;taint
|
||||
* ```
|
||||
*
|
||||
* ```csharp
|
||||
* public class BasicFlow {
|
||||
* private string tainted;
|
||||
*
|
||||
* public void SetField(string s)
|
||||
* {
|
||||
* tainted = s;
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
* Captured Model:
|
||||
* ```Summaries;BasicFlow;false;SetField;(System.String);Argument[0];Argument[Qualifier];taint```
|
||||
*
|
||||
* ```csharp
|
||||
* public class BasicFlow {
|
||||
* public void ReturnSubstring(string s)
|
||||
* {
|
||||
* return s.Substring(0, 1);
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
* Captured Model:
|
||||
* ```Summaries;BasicFlow;false;ReturnSubstring;(System.String);Argument[0];ReturnValue;taint```
|
||||
*
|
||||
* ```csharp
|
||||
* public class BasicFlow {
|
||||
* public void AssignToArray(int data, int[] target)
|
||||
* {
|
||||
* target[0] = data;
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
* Captured Model:
|
||||
* ```Summaries;BasicFlow;false;AssignToArray;(System.Int32,System.Int32[]);Argument[0];Argument[1].Element;taint```
|
||||
*/
|
||||
private string captureFlow(TargetApi api) {
|
||||
result = captureQualifierFlow(api) or
|
||||
result = captureThroughFlow(api)
|
||||
}
|
||||
|
||||
from TargetApi api, string flow
|
||||
where flow = captureFlow(api)
|
||||
select flow order by flow
|
||||
98
csharp/ql/src/utils/model-generator/CaptureSummaryModels.qll
Normal file
98
csharp/ql/src/utils/model-generator/CaptureSummaryModels.qll
Normal file
@@ -0,0 +1,98 @@
|
||||
/**
|
||||
* Provides classes and predicates related to capturing summary models
|
||||
* of the Standard or a 3rd party library.
|
||||
*/
|
||||
|
||||
import CaptureSummaryModelsSpecific
|
||||
|
||||
/**
|
||||
* Gets the summary model of `api`, if it follows the `fluent` programming pattern (returns `this`).
|
||||
*/
|
||||
string captureQualifierFlow(TargetApi api) {
|
||||
exists(ReturnNodeExt ret |
|
||||
api = returnNodeEnclosingCallable(ret) and
|
||||
isOwnInstanceAccessNode(ret)
|
||||
) and
|
||||
result = asValueModel(api, qualifierString(), "ReturnValue")
|
||||
}
|
||||
|
||||
/**
|
||||
* A FlowState representing a tainted read.
|
||||
*/
|
||||
private class TaintRead extends DataFlow::FlowState {
|
||||
TaintRead() { this = "TaintRead" }
|
||||
}
|
||||
|
||||
/**
|
||||
* A FlowState representing a tainted write.
|
||||
*/
|
||||
private class TaintStore extends DataFlow::FlowState {
|
||||
TaintStore() { this = "TaintStore" }
|
||||
}
|
||||
|
||||
/**
|
||||
* A TaintTracking Configuration used for tracking flow through APIs.
|
||||
* The sources are the parameters of an API and the sinks are the return values (excluding `this`) and parameters.
|
||||
*
|
||||
* This can be used to generate Flow summaries for APIs from parameter to return.
|
||||
*/
|
||||
class ThroughFlowConfig extends TaintTracking::Configuration {
|
||||
ThroughFlowConfig() { this = "ThroughFlowConfig" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source, DataFlow::FlowState state) {
|
||||
source instanceof DataFlow::ParameterNode and
|
||||
source.getEnclosingCallable() instanceof TargetApi and
|
||||
state instanceof TaintRead
|
||||
}
|
||||
|
||||
override predicate isSink(DataFlow::Node sink, DataFlow::FlowState state) {
|
||||
sink instanceof ReturnNodeExt and
|
||||
not isOwnInstanceAccessNode(sink) and
|
||||
not exists(captureQualifierFlow(sink.asExpr().getEnclosingCallable())) and
|
||||
(state instanceof TaintRead or state instanceof TaintStore)
|
||||
}
|
||||
|
||||
override predicate isAdditionalTaintStep(
|
||||
DataFlow::Node node1, DataFlow::FlowState state1, DataFlow::Node node2,
|
||||
DataFlow::FlowState state2
|
||||
) {
|
||||
exists(TypedContent tc |
|
||||
store(node1, tc, node2, _) and
|
||||
isRelevantContent(tc.getContent()) and
|
||||
(state1 instanceof TaintRead or state1 instanceof TaintStore) and
|
||||
state2 instanceof TaintStore
|
||||
)
|
||||
or
|
||||
exists(DataFlow::Content c |
|
||||
readStep(node1, c, node2) and
|
||||
isRelevantContent(c) and
|
||||
state1 instanceof TaintRead and
|
||||
state2 instanceof TaintRead
|
||||
)
|
||||
}
|
||||
|
||||
override predicate isSanitizer(DataFlow::Node n) {
|
||||
exists(Type t | t = n.getType() and not isRelevantType(t))
|
||||
}
|
||||
|
||||
override DataFlow::FlowFeature getAFeature() {
|
||||
result instanceof DataFlow::FeatureEqualSourceSinkCallContext
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the summary model(s) of `api`, if there is flow from parameters to return value or parameter.
|
||||
*/
|
||||
string captureThroughFlow(TargetApi api) {
|
||||
exists(
|
||||
ThroughFlowConfig config, DataFlow::ParameterNode p, ReturnNodeExt returnNodeExt, string input,
|
||||
string output
|
||||
|
|
||||
config.hasFlow(p, returnNodeExt) and
|
||||
returnNodeExt.getEnclosingCallable() = api and
|
||||
input = parameterNodeAsInput(p) and
|
||||
output = returnNodeAsOutput(returnNodeExt) and
|
||||
input != output and
|
||||
result = asTaintModel(api, input, output)
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* Provides predicates related to capturing summary models of the Standard or a 3rd party library.
|
||||
*/
|
||||
|
||||
import csharp
|
||||
import semmle.code.csharp.dataflow.TaintTracking
|
||||
import semmle.code.csharp.dataflow.internal.DataFlowImplCommon
|
||||
import semmle.code.csharp.dataflow.internal.DataFlowPrivate
|
||||
import ModelGeneratorUtils
|
||||
|
||||
Callable returnNodeEnclosingCallable(ReturnNodeExt ret) { result = getNodeEnclosingCallable(ret) }
|
||||
|
||||
predicate isOwnInstanceAccessNode(ReturnNode node) { node.asExpr() instanceof ThisAccess }
|
||||
|
||||
string qualifierString() { result = "Argument[Qualifier]" }
|
||||
71
csharp/ql/src/utils/model-generator/ModelGeneratorUtils.qll
Normal file
71
csharp/ql/src/utils/model-generator/ModelGeneratorUtils.qll
Normal file
@@ -0,0 +1,71 @@
|
||||
import ModelGeneratorUtilsSpecific
|
||||
|
||||
/**
|
||||
* Holds if data can flow from `node1` to `node2` either via a read or a write of an intermediate field `f`.
|
||||
*/
|
||||
predicate isRelevantTaintStep(DataFlow::Node node1, DataFlow::Node node2) {
|
||||
exists(DataFlow::Content f |
|
||||
readStep(node1, f, node2) and
|
||||
if f instanceof DataFlow::FieldContent
|
||||
then isRelevantType(f.(DataFlow::FieldContent).getField().getType())
|
||||
else
|
||||
if f instanceof DataFlow::SyntheticFieldContent
|
||||
then isRelevantType(f.(DataFlow::SyntheticFieldContent).getField().getType())
|
||||
else any()
|
||||
)
|
||||
or
|
||||
exists(DataFlow::Content f | storeStep(node1, f, node2) | containerContent(f))
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if content `c` is either a field or synthetic field of a relevant type
|
||||
* or a container like content.
|
||||
*/
|
||||
predicate isRelevantContent(DataFlow::Content c) {
|
||||
isRelevantType(c.(DataFlow::FieldContent).getField().getType()) or
|
||||
isRelevantType(c.(DataFlow::SyntheticFieldContent).getField().getType()) or
|
||||
containerContent(c)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the summary model for `api` with `input`, `output` and `kind`.
|
||||
*/
|
||||
bindingset[input, output, kind]
|
||||
string asSummaryModel(TargetApi api, string input, string output, string kind) {
|
||||
result =
|
||||
asPartialModel(api) + input + ";" //
|
||||
+ output + ";" //
|
||||
+ kind
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the value summary model for `api` with `input` and `output`.
|
||||
*/
|
||||
bindingset[input, output]
|
||||
string asValueModel(TargetApi api, string input, string output) {
|
||||
result = asSummaryModel(api, input, output, "value")
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the taint summary model for `api` with `input` and `output`.
|
||||
*/
|
||||
bindingset[input, output]
|
||||
string asTaintModel(TargetApi api, string input, string output) {
|
||||
result = asSummaryModel(api, input, output, "taint")
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the sink model for `api` with `input` and `kind`.
|
||||
*/
|
||||
bindingset[input, kind]
|
||||
string asSinkModel(TargetApi api, string input, string kind) {
|
||||
result = asPartialModel(api) + input + ";" + kind
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the source model for `api` with `output` and `kind`.
|
||||
*/
|
||||
bindingset[output, kind]
|
||||
string asSourceModel(TargetApi api, string output, string kind) {
|
||||
result = asPartialModel(api) + output + ";" + kind
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
import csharp
|
||||
import semmle.code.csharp.dataflow.internal.DataFlowPrivate
|
||||
private import semmle.code.csharp.commons.Util
|
||||
private import semmle.code.csharp.dataflow.internal.DataFlowImplCommon
|
||||
private import semmle.code.csharp.dataflow.internal.DataFlowDispatch
|
||||
|
||||
private predicate isRelevantForModels(Callable api) { not api instanceof MainMethod }
|
||||
|
||||
/**
|
||||
* A class of Callables that are relevant for generating summary, source and sinks models for.
|
||||
*
|
||||
* In the Standard library and 3rd party libraries it the Callables that can be called
|
||||
* from outside the library itself.
|
||||
*/
|
||||
class TargetApi extends Callable {
|
||||
TargetApi() {
|
||||
[this.(Modifiable), this.(Accessor).getDeclaration()].isEffectivelyPublic() and
|
||||
this.fromSource() and
|
||||
isRelevantForModels(this)
|
||||
}
|
||||
}
|
||||
|
||||
private string parameterQualifiedTypeNamesToString(TargetApi api) {
|
||||
result =
|
||||
concat(Parameter p, int i |
|
||||
p = api.getParameter(i)
|
||||
|
|
||||
p.getType().getQualifiedName(), "," order by i
|
||||
)
|
||||
}
|
||||
|
||||
/** Holds if the summary should apply for all overrides of this. */
|
||||
private predicate isBaseCallableOrPrototype(TargetApi api) {
|
||||
api.getDeclaringType() instanceof Interface
|
||||
or
|
||||
exists(Modifiable m | m = [api.(Modifiable), api.(Accessor).getDeclaration()] |
|
||||
m.isAbstract()
|
||||
or
|
||||
api.getDeclaringType().(Modifiable).isAbstract() and m.(Virtualizable).isVirtual()
|
||||
)
|
||||
}
|
||||
|
||||
/** Gets a string representing whether the summary should apply for all overrides of this. */
|
||||
private string getCallableOverride(TargetApi api) {
|
||||
if isBaseCallableOrPrototype(api) then result = "true" else result = "false"
|
||||
}
|
||||
|
||||
/** Computes the first 6 columns for CSV rows. */
|
||||
string asPartialModel(TargetApi api) {
|
||||
exists(string namespace, string type |
|
||||
api.getDeclaringType().hasQualifiedName(namespace, type) and
|
||||
result =
|
||||
namespace + ";" //
|
||||
+ type + ";" //
|
||||
+ getCallableOverride(api) + ";" //
|
||||
+ api.getName() + ";" //
|
||||
+ "(" + parameterQualifiedTypeNamesToString(api) + ")" //
|
||||
+ /* ext + */ ";" //
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds for type `t` for fields that are relevant as an intermediate
|
||||
* read or write step in the data flow analysis.
|
||||
*/
|
||||
predicate isRelevantType(Type t) { not t instanceof Enum }
|
||||
|
||||
private predicate isPrimitiveTypeUsedForBulkData(Type t) {
|
||||
t.getName().regexpMatch("byte|char|Byte|Char")
|
||||
}
|
||||
|
||||
private string parameterAccess(Parameter p) {
|
||||
if
|
||||
p.getType() instanceof ArrayType and
|
||||
not isPrimitiveTypeUsedForBulkData(p.getType().(ArrayType).getElementType())
|
||||
then result = "Argument[" + p.getPosition() + "].Element"
|
||||
else result = "Argument[" + p.getPosition() + "]"
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the model string representation of the parameter node `p`.
|
||||
*/
|
||||
string parameterNodeAsInput(DataFlow::ParameterNode p) {
|
||||
result = parameterAccess(p.asParameter())
|
||||
or
|
||||
result = "Argument[Qualifier]" and p instanceof InstanceParameterNode
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the model string represention of the the return node `node`.
|
||||
*/
|
||||
string returnNodeAsOutput(ReturnNodeExt node) {
|
||||
if node.getKind() instanceof ValueReturnKind
|
||||
then result = "ReturnValue"
|
||||
else
|
||||
exists(ParameterPosition pos | pos = node.getKind().(ParamUpdateReturnKind).getPosition() |
|
||||
result = parameterAccess(node.getEnclosingCallable().getParameter(pos.getPosition()))
|
||||
or
|
||||
pos.isThisParameter() and
|
||||
result = "Argument[Qualifier]"
|
||||
)
|
||||
}
|
||||
@@ -27,7 +27,7 @@ abstract class IncludeSummarizedCallable extends RelevantSummarizedCallable {
|
||||
)
|
||||
}
|
||||
|
||||
/** Gets a string representing, whether the summary should apply for all overrides of this. */
|
||||
/** Gets a string representing whether the summary should apply for all overrides of this. */
|
||||
private string getCallableOverride() {
|
||||
if this.isBaseCallableOrPrototype() then result = "true" else result = "false"
|
||||
}
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
| Summaries;BasicFlow;false;AssignFieldToArray;(System.Object[]);Argument[Qualifier];Argument[0].Element;taint |
|
||||
| Summaries;BasicFlow;false;AssignToArray;(System.Int32,System.Int32[]);Argument[0];Argument[1].Element;taint |
|
||||
| Summaries;BasicFlow;false;ReturnArrayElement;(System.Int32[]);Argument[0].Element;ReturnValue;taint |
|
||||
| Summaries;BasicFlow;false;ReturnField;();Argument[Qualifier];ReturnValue;taint |
|
||||
| Summaries;BasicFlow;false;ReturnParam0;(System.String,System.Object);Argument[0];ReturnValue;taint |
|
||||
| Summaries;BasicFlow;false;ReturnParam1;(System.String,System.Object);Argument[1];ReturnValue;taint |
|
||||
| Summaries;BasicFlow;false;ReturnParamMultiple;(System.Object,System.Object);Argument[0];ReturnValue;taint |
|
||||
| Summaries;BasicFlow;false;ReturnParamMultiple;(System.Object,System.Object);Argument[1];ReturnValue;taint |
|
||||
| Summaries;BasicFlow;false;ReturnSubstring;(System.String);Argument[0];ReturnValue;taint |
|
||||
| Summaries;BasicFlow;false;ReturnThis;(System.Object);Argument[Qualifier];ReturnValue;value |
|
||||
| Summaries;BasicFlow;false;SetField;(System.String);Argument[0];Argument[Qualifier];taint |
|
||||
@@ -0,0 +1 @@
|
||||
utils/model-generator/CaptureSummaryModels.ql
|
||||
58
csharp/ql/test/utils/model-generator/Summaries.cs
Normal file
58
csharp/ql/test/utils/model-generator/Summaries.cs
Normal file
@@ -0,0 +1,58 @@
|
||||
using System;
|
||||
|
||||
namespace Summaries;
|
||||
|
||||
public class BasicFlow
|
||||
{
|
||||
private string tainted;
|
||||
|
||||
public BasicFlow ReturnThis(object input)
|
||||
{
|
||||
return this;
|
||||
}
|
||||
|
||||
public string ReturnParam0(string input0, object input1)
|
||||
{
|
||||
return input0;
|
||||
}
|
||||
|
||||
public object ReturnParam1(string input0, object input1)
|
||||
{
|
||||
return input1;
|
||||
}
|
||||
|
||||
public object ReturnParamMultiple(object input0, object input1)
|
||||
{
|
||||
return (System.DateTime.Now.DayOfWeek == System.DayOfWeek.Monday) ? input0 : input1;
|
||||
}
|
||||
|
||||
public string ReturnSubstring(string s)
|
||||
{
|
||||
return s.Substring(0, 1);
|
||||
}
|
||||
|
||||
public int ReturnArrayElement(int[] input)
|
||||
{
|
||||
return input[0];
|
||||
}
|
||||
|
||||
public void AssignToArray(int data, int[] target)
|
||||
{
|
||||
target[0] = data;
|
||||
}
|
||||
|
||||
public void SetField(string s)
|
||||
{
|
||||
tainted = s;
|
||||
}
|
||||
|
||||
public string ReturnField()
|
||||
{
|
||||
return tainted;
|
||||
}
|
||||
|
||||
public void AssignFieldToArray(object[] target)
|
||||
{
|
||||
target[0] = tainted;
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,15 @@
|
||||
## 0.0.11
|
||||
|
||||
### New Features
|
||||
|
||||
* Added `hasDescendant(RefType anc, Type sub)`
|
||||
* Added `RefType.getADescendant()`
|
||||
* Added `RefType.getAStrictAncestor()`
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
* Add support for `CharacterLiteral` in `CompileTimeConstantExpr.getStringValue()`
|
||||
|
||||
## 0.0.10
|
||||
|
||||
### New Features
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
category: feature
|
||||
---
|
||||
* Added `hasDescendant(RefType anc, Type sub)`
|
||||
* Added `RefType.getADescendant()`
|
||||
* Added `RefType.getAStrictAncestor()`
|
||||
@@ -1,4 +0,0 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* Add support for `CharacterLiteral` in `CompileTimeConstantExpr.getStringValue()`
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* Added support for detection of SSRF via JDBC database URLs, including connections made using the standard library (`java.sql`), Hikari Connection Pool, JDBI and Spring JDBC.
|
||||
11
java/ql/lib/change-notes/released/0.0.11.md
Normal file
11
java/ql/lib/change-notes/released/0.0.11.md
Normal file
@@ -0,0 +1,11 @@
|
||||
## 0.0.11
|
||||
|
||||
### New Features
|
||||
|
||||
* Added `hasDescendant(RefType anc, Type sub)`
|
||||
* Added `RefType.getADescendant()`
|
||||
* Added `RefType.getAStrictAncestor()`
|
||||
|
||||
### Minor Analysis Improvements
|
||||
|
||||
* Add support for `CharacterLiteral` in `CompileTimeConstantExpr.getStringValue()`
|
||||
@@ -1,2 +1,2 @@
|
||||
---
|
||||
lastReleaseVersion: 0.0.10
|
||||
lastReleaseVersion: 0.0.11
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: codeql/java-all
|
||||
version: 0.0.11-dev
|
||||
version: 0.0.12-dev
|
||||
groups: java
|
||||
dbscheme: config/semmlecode.dbscheme
|
||||
extractor: java
|
||||
|
||||
@@ -131,6 +131,8 @@ private module Frameworks {
|
||||
private import semmle.code.java.security.XPath
|
||||
private import semmle.code.java.security.XsltInjection
|
||||
private import semmle.code.java.frameworks.Jdbc
|
||||
private import semmle.code.java.frameworks.Jdbi
|
||||
private import semmle.code.java.frameworks.HikariCP
|
||||
private import semmle.code.java.frameworks.SpringJdbc
|
||||
private import semmle.code.java.frameworks.MyBatis
|
||||
private import semmle.code.java.frameworks.Hibernate
|
||||
|
||||
@@ -111,13 +111,6 @@ private predicate evenlyDivisibleExpr(Expr e, int factor) {
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `rix` is the number of input edges to `phi`.
|
||||
*/
|
||||
private predicate maxPhiInputRank(SsaPhiNode phi, int rix) {
|
||||
rix = max(int r | rankedPhiInput(phi, _, _, r))
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the remainder of `val` modulo `mod`.
|
||||
*
|
||||
|
||||
@@ -725,6 +725,26 @@ private predicate boundedPhiCandValidForEdge(
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `b + delta` is a valid bound for `phi`'s `rix`th input edge.
|
||||
* - `upper = true` : `phi <= b + delta`
|
||||
* - `upper = false` : `phi >= b + delta`
|
||||
*/
|
||||
private predicate boundedPhiStep(
|
||||
SsaPhiNode phi, Bound b, int delta, boolean upper, boolean fromBackEdge, int origdelta,
|
||||
Reason reason, int rix
|
||||
) {
|
||||
exists(SsaVariable inp, SsaReadPositionPhiInputEdge edge |
|
||||
rankedPhiInput(phi, inp, edge, rix) and
|
||||
boundedPhiCandValidForEdge(phi, b, delta, upper, fromBackEdge, origdelta, reason, inp, edge) and
|
||||
(
|
||||
rix = 1
|
||||
or
|
||||
boundedPhiStep(phi, b, delta, upper, fromBackEdge, origdelta, reason, rix - 1)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `b + delta` is a valid bound for `phi`.
|
||||
* - `upper = true` : `phi <= b + delta`
|
||||
@@ -734,8 +754,9 @@ private predicate boundedPhi(
|
||||
SsaPhiNode phi, Bound b, int delta, boolean upper, boolean fromBackEdge, int origdelta,
|
||||
Reason reason
|
||||
) {
|
||||
forex(SsaVariable inp, SsaReadPositionPhiInputEdge edge | edge.phiInput(phi, inp) |
|
||||
boundedPhiCandValidForEdge(phi, b, delta, upper, fromBackEdge, origdelta, reason, inp, edge)
|
||||
exists(int r |
|
||||
boundedPhiStep(phi, b, delta, upper, fromBackEdge, origdelta, reason, r) and
|
||||
maxPhiInputRank(phi, r)
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -416,3 +416,13 @@ private class MyConsistencyConfiguration extends Consistency::ConsistencyConfigu
|
||||
n.getType() instanceof ImmutableType or n instanceof ImplicitVarargsArray
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if the the content `c` is a container.
|
||||
*/
|
||||
predicate containerContent(Content c) {
|
||||
c instanceof ArrayContent or
|
||||
c instanceof CollectionContent or
|
||||
c instanceof MapKeyContent or
|
||||
c instanceof MapValueContent
|
||||
}
|
||||
|
||||
@@ -62,13 +62,6 @@ private module Cached {
|
||||
)
|
||||
}
|
||||
|
||||
private predicate containerContent(DataFlow::Content c) {
|
||||
c instanceof DataFlow::ArrayContent or
|
||||
c instanceof DataFlow::CollectionContent or
|
||||
c instanceof DataFlow::MapKeyContent or
|
||||
c instanceof DataFlow::MapValueContent
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if taint can flow in one local step from `src` to `sink` excluding
|
||||
* local data flow steps. That is, `src` and `sink` are likely to represent
|
||||
|
||||
@@ -111,24 +111,4 @@ module Private {
|
||||
predicate ssaUpdateStep = RU::ssaUpdateStep/3;
|
||||
|
||||
Expr getABasicBlockExpr(BasicBlock bb) { result = bb.getANode() }
|
||||
|
||||
private predicate id(BasicBlock x, BasicBlock y) { x = y }
|
||||
|
||||
private predicate idOf(BasicBlock x, int y) = equivalenceRelation(id/2)(x, y)
|
||||
|
||||
private int getId(BasicBlock bb) { idOf(bb, result) }
|
||||
|
||||
/**
|
||||
* Holds if `inp` is an input to `phi` along `edge` and this input has index `r`
|
||||
* in an arbitrary 1-based numbering of the input edges to `phi`.
|
||||
*/
|
||||
predicate rankedPhiInput(SsaPhiNode phi, SsaVariable inp, SsaReadPositionPhiInputEdge edge, int r) {
|
||||
edge.phiInput(phi, inp) and
|
||||
edge =
|
||||
rank[r](SsaReadPositionPhiInputEdge e |
|
||||
e.phiInput(phi, _)
|
||||
|
|
||||
e order by getId(e.getOrigBlock())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
*/
|
||||
|
||||
private import SsaReadPositionSpecific
|
||||
import SsaReadPositionSpecific::Public
|
||||
|
||||
private newtype TSsaReadPosition =
|
||||
TSsaReadPositionBlock(BasicBlock bb) { bb = getAReadBasicBlock(_) } or
|
||||
@@ -55,3 +56,10 @@ class SsaReadPositionPhiInputEdge extends SsaReadPosition, TSsaReadPositionPhiIn
|
||||
|
||||
override string toString() { result = "edge" }
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if `rix` is the number of input edges to `phi`.
|
||||
*/
|
||||
predicate maxPhiInputRank(SsaPhiNode phi, int rix) {
|
||||
rix = max(int r | rankedPhiInput(phi, _, _, r))
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
|
||||
private import semmle.code.java.dataflow.SSA as Ssa
|
||||
private import semmle.code.java.controlflow.BasicBlocks as BB
|
||||
private import SsaReadPositionCommon
|
||||
|
||||
class SsaVariable = Ssa::SsaVariable;
|
||||
|
||||
@@ -13,3 +14,28 @@ class BasicBlock = BB::BasicBlock;
|
||||
|
||||
/** Gets a basic block in which SSA variable `v` is read. */
|
||||
BasicBlock getAReadBasicBlock(SsaVariable v) { result = v.getAUse().getBasicBlock() }
|
||||
|
||||
private predicate id(BasicBlock x, BasicBlock y) { x = y }
|
||||
|
||||
private predicate idOf(BasicBlock x, int y) = equivalenceRelation(id/2)(x, y)
|
||||
|
||||
private int getId(BasicBlock bb) { idOf(bb, result) }
|
||||
|
||||
/**
|
||||
* Declarations to be exposed to users of SsaReadPositionCommon
|
||||
*/
|
||||
module Public {
|
||||
/**
|
||||
* Holds if `inp` is an input to `phi` along `edge` and this input has index `r`
|
||||
* in an arbitrary 1-based numbering of the input edges to `phi`.
|
||||
*/
|
||||
predicate rankedPhiInput(SsaPhiNode phi, SsaVariable inp, SsaReadPositionPhiInputEdge edge, int r) {
|
||||
edge.phiInput(phi, inp) and
|
||||
edge =
|
||||
rank[r](SsaReadPositionPhiInputEdge e |
|
||||
e.phiInput(phi, _)
|
||||
|
|
||||
e order by getId(e.getOrigBlock())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
17
java/ql/lib/semmle/code/java/frameworks/HikariCP.qll
Normal file
17
java/ql/lib/semmle/code/java/frameworks/HikariCP.qll
Normal file
@@ -0,0 +1,17 @@
|
||||
/**
|
||||
* Definitions of sinks in the Hikari Connection Pool library.
|
||||
*/
|
||||
|
||||
import java
|
||||
import semmle.code.java.dataflow.ExternalFlow
|
||||
|
||||
private class SsrfSinkCsv extends SinkModelCsv {
|
||||
override predicate row(string row) {
|
||||
row =
|
||||
[
|
||||
//"package;type;overrides;name;signature;ext;spec;kind"
|
||||
"com.zaxxer.hikari;HikariConfig;false;HikariConfig;(Properties);;Argument[0];jdbc-url",
|
||||
"com.zaxxer.hikari;HikariConfig;false;setJdbcUrl;(String);;Argument[0];jdbc-url"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -52,3 +52,16 @@ private class SqlSinkCsv extends SinkModelCsv {
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
private class SsrfSinkCsv extends SinkModelCsv {
|
||||
override predicate row(string row) {
|
||||
row =
|
||||
[
|
||||
//"package;type;overrides;name;signature;ext;spec;kind"
|
||||
"java.sql;DriverManager;false;getConnection;(String);;Argument[0];jdbc-url",
|
||||
"java.sql;DriverManager;false;getConnection;(String,Properties);;Argument[0];jdbc-url",
|
||||
"java.sql;DriverManager;false;getConnection;(String,String,String);;Argument[0];jdbc-url",
|
||||
"java.sql;Driver;false;connect;(String,Properties);;Argument[0];jdbc-url"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
21
java/ql/lib/semmle/code/java/frameworks/Jdbi.qll
Normal file
21
java/ql/lib/semmle/code/java/frameworks/Jdbi.qll
Normal file
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* Definitions of sinks in the JDBI library.
|
||||
*/
|
||||
|
||||
import java
|
||||
import semmle.code.java.dataflow.ExternalFlow
|
||||
|
||||
private class SsrfSinkCsv extends SinkModelCsv {
|
||||
override predicate row(string row) {
|
||||
row =
|
||||
[
|
||||
//"package;type;overrides;name;signature;ext;spec;kind"
|
||||
"org.jdbi.v3.core;Jdbi;false;create;(String);;Argument[0];jdbc-url",
|
||||
"org.jdbi.v3.core;Jdbi;false;create;(String,Properties);;Argument[0];jdbc-url",
|
||||
"org.jdbi.v3.core;Jdbi;false;create;(String,String,String);;Argument[0];jdbc-url",
|
||||
"org.jdbi.v3.core;Jdbi;false;open;(String);;Argument[0];jdbc-url",
|
||||
"org.jdbi.v3.core;Jdbi;false;open;(String,Properties);;Argument[0];jdbc-url",
|
||||
"org.jdbi.v3.core;Jdbi;false;open;(String,String,String);;Argument[0];jdbc-url"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -37,3 +37,17 @@ private class SqlSinkCsv extends SinkModelCsv {
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
private class SsrfSinkCsv extends SinkModelCsv {
|
||||
override predicate row(string row) {
|
||||
row =
|
||||
[
|
||||
//"package;type;overrides;name;signature;ext;spec;kind"
|
||||
"org.springframework.boot.jdbc;DataSourceBuilder;false;url;(String);;Argument[0];jdbc-url",
|
||||
"org.springframework.jdbc.datasource;AbstractDriverBasedDataSource;false;setUrl;(String);;Argument[0];jdbc-url",
|
||||
"org.springframework.jdbc.datasource;DriverManagerDataSource;false;DriverManagerDataSource;(String);;Argument[0];jdbc-url",
|
||||
"org.springframework.jdbc.datasource;DriverManagerDataSource;false;DriverManagerDataSource;(String,String,String);;Argument[0];jdbc-url",
|
||||
"org.springframework.jdbc.datasource;DriverManagerDataSource;false;DriverManagerDataSource;(String,Properties);;Argument[0];jdbc-url"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import semmle.code.java.frameworks.spring.Spring
|
||||
import semmle.code.java.frameworks.JaxWS
|
||||
import semmle.code.java.frameworks.javase.Http
|
||||
import semmle.code.java.dataflow.DataFlow
|
||||
import semmle.code.java.frameworks.Properties
|
||||
private import semmle.code.java.dataflow.StringPrefixes
|
||||
private import semmle.code.java.dataflow.ExternalFlow
|
||||
|
||||
@@ -33,6 +34,20 @@ private class DefaultRequestForgeryAdditionalTaintStep extends RequestForgeryAdd
|
||||
}
|
||||
}
|
||||
|
||||
private class TypePropertiesRequestForgeryAdditionalTaintStep extends RequestForgeryAdditionalTaintStep {
|
||||
override predicate propagatesTaint(DataFlow::Node pred, DataFlow::Node succ) {
|
||||
exists(MethodAccess ma |
|
||||
// Properties props = new Properties();
|
||||
// props.setProperty("jdbcUrl", tainted);
|
||||
// Propagate tainted value to the qualifier `props`
|
||||
ma.getMethod() instanceof PropertiesSetPropertyMethod and
|
||||
ma.getArgument(0).(CompileTimeConstantExpr).getStringValue() = "jdbcUrl" and
|
||||
pred.asExpr() = ma.getArgument(1) and
|
||||
succ.asExpr() = ma.getQualifier()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** A data flow sink for server-side request forgery (SSRF) vulnerabilities. */
|
||||
abstract class RequestForgerySink extends DataFlow::Node { }
|
||||
|
||||
@@ -40,6 +55,10 @@ private class UrlOpenSinkAsRequestForgerySink extends RequestForgerySink {
|
||||
UrlOpenSinkAsRequestForgerySink() { sinkNode(this, "open-url") }
|
||||
}
|
||||
|
||||
private class JdbcUrlSinkAsRequestForgerySink extends RequestForgerySink {
|
||||
JdbcUrlSinkAsRequestForgerySink() { sinkNode(this, "jdbc-url") }
|
||||
}
|
||||
|
||||
/** A sanitizer for request forgery vulnerabilities. */
|
||||
abstract class RequestForgerySanitizer extends DataFlow::Node { }
|
||||
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
/** Provides configurations for sensitive logging queries. */
|
||||
|
||||
import java
|
||||
import semmle.code.java.dataflow.ExternalFlow
|
||||
import semmle.code.java.dataflow.TaintTracking
|
||||
import semmle.code.java.security.SensitiveActions
|
||||
import DataFlow
|
||||
|
||||
/** A variable that may hold sensitive information, judging by its name. * */
|
||||
class CredentialExpr extends Expr {
|
||||
CredentialExpr() {
|
||||
exists(Variable v | this = v.getAnAccess() |
|
||||
v.getName().regexpMatch([getCommonSensitiveInfoRegex(), "(?i).*(username).*"]) and
|
||||
not v.isFinal()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** A data-flow configuration for identifying potentially-sensitive data flowing to a log output. */
|
||||
class SensitiveLoggerConfiguration extends TaintTracking::Configuration {
|
||||
SensitiveLoggerConfiguration() { this = "SensitiveLoggerConfiguration" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) { source.asExpr() instanceof CredentialExpr }
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) { sinkNode(sink, "logging") }
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
## 0.0.11
|
||||
|
||||
## 0.0.10
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
@@ -14,5 +14,6 @@ public static void main(String[] args) {
|
||||
String password = "Pass@0rd";
|
||||
|
||||
// GOOD: user password is never written to debug log
|
||||
logger.debug("User password changed")
|
||||
}
|
||||
}
|
||||
20
java/ql/src/Security/CWE/CWE-532/SensitiveInfoLog.ql
Normal file
20
java/ql/src/Security/CWE/CWE-532/SensitiveInfoLog.ql
Normal file
@@ -0,0 +1,20 @@
|
||||
/**
|
||||
* @name Insertion of sensitive information into log files
|
||||
* @description Writing sensitive information to log files can allow that
|
||||
* information to be leaked to an attacker more easily.
|
||||
* @kind path-problem
|
||||
* @problem.severity warning
|
||||
* @precision medium
|
||||
* @id java/sensitive-log
|
||||
* @tags security
|
||||
* external/cwe/cwe-532
|
||||
*/
|
||||
|
||||
import java
|
||||
import semmle.code.java.security.SensitiveLoggingQuery
|
||||
import PathGraph
|
||||
|
||||
from SensitiveLoggerConfiguration cfg, DataFlow::PathNode source, DataFlow::PathNode sink
|
||||
where cfg.hasFlowPath(source, sink)
|
||||
select sink.getNode(), source, sink, "This $@ is written to a log file.", source.getNode(),
|
||||
"potentially sensitive information"
|
||||
4
java/ql/src/change-notes/2022-03-11-sensitive-logging.md
Normal file
4
java/ql/src/change-notes/2022-03-11-sensitive-logging.md
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: newQuery
|
||||
---
|
||||
* The query "Insertion of sensitive information into log files" (`java/sensitive-logging`) has been promoted from experimental to the main query pack. This query was originally [submitted as an experimental query by @luchua-bc](https://github.com/github/codeql/pull/3090).
|
||||
1
java/ql/src/change-notes/released/0.0.11.md
Normal file
1
java/ql/src/change-notes/released/0.0.11.md
Normal file
@@ -0,0 +1 @@
|
||||
## 0.0.11
|
||||
@@ -1,2 +1,2 @@
|
||||
---
|
||||
lastReleaseVersion: 0.0.10
|
||||
lastReleaseVersion: 0.0.11
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
/**
|
||||
* @name Insertion of sensitive information into log files
|
||||
* @description Writing sensitive information to log files can give valuable
|
||||
* guidance to an attacker or expose sensitive user information.
|
||||
* @kind path-problem
|
||||
* @problem.severity warning
|
||||
* @precision medium
|
||||
* @id java/sensitiveinfo-in-logfile
|
||||
* @tags security
|
||||
* external/cwe/cwe-532
|
||||
*/
|
||||
|
||||
import java
|
||||
import semmle.code.java.dataflow.ExternalFlow
|
||||
import semmle.code.java.dataflow.TaintTracking
|
||||
import semmle.code.java.security.SensitiveActions
|
||||
import DataFlow
|
||||
import PathGraph
|
||||
|
||||
/**
|
||||
* Gets a regular expression for matching names of variables that indicate the value being held may contain sensitive information
|
||||
*/
|
||||
private string getACredentialRegex() { result = "(?i)(.*username|url).*" }
|
||||
|
||||
/** Variable keeps sensitive information judging by its name * */
|
||||
class CredentialExpr extends Expr {
|
||||
CredentialExpr() {
|
||||
exists(Variable v | this = v.getAnAccess() |
|
||||
v.getName().regexpMatch([getCommonSensitiveInfoRegex(), getACredentialRegex()])
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
class LoggerConfiguration extends DataFlow::Configuration {
|
||||
LoggerConfiguration() { this = "Logger Configuration" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) { source.asExpr() instanceof CredentialExpr }
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) { sinkNode(sink, "logging") }
|
||||
|
||||
override predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) {
|
||||
TaintTracking::localTaintStep(node1, node2)
|
||||
}
|
||||
}
|
||||
|
||||
from LoggerConfiguration cfg, DataFlow::PathNode source, DataFlow::PathNode sink
|
||||
where cfg.hasFlowPath(source, sink)
|
||||
select sink.getNode(), source, sink, "Outputting $@ to log.", source.getNode(),
|
||||
"sensitive information"
|
||||
@@ -1,5 +1,5 @@
|
||||
name: codeql/java-queries
|
||||
version: 0.0.11-dev
|
||||
version: 0.0.12-dev
|
||||
groups:
|
||||
- java
|
||||
- queries
|
||||
|
||||
@@ -4,23 +4,12 @@
|
||||
* @id java/utils/model-generator/summary-models
|
||||
*/
|
||||
|
||||
import java
|
||||
import semmle.code.java.dataflow.TaintTracking
|
||||
import semmle.code.java.dataflow.internal.DataFlowImplCommon
|
||||
import semmle.code.java.dataflow.internal.DataFlowNodes
|
||||
import semmle.code.java.dataflow.internal.DataFlowPrivate
|
||||
import semmle.code.java.dataflow.InstanceAccess
|
||||
import ModelGeneratorUtils
|
||||
|
||||
string captureFlow(TargetApi api) {
|
||||
result = captureQualifierFlow(api) or
|
||||
result = captureThroughFlow(api)
|
||||
}
|
||||
private import CaptureSummaryModels
|
||||
|
||||
/**
|
||||
* Capture fluent APIs that return `this`.
|
||||
* Example of a fluent API:
|
||||
* ```
|
||||
* ```java
|
||||
* public class Foo {
|
||||
* public Foo someAPI() {
|
||||
* // some side-effect
|
||||
@@ -28,75 +17,14 @@ string captureFlow(TargetApi api) {
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
string captureQualifierFlow(TargetApi api) {
|
||||
exists(ReturnStmt rtn |
|
||||
rtn.getEnclosingCallable() = api and
|
||||
rtn.getResult().(ThisAccess).isOwnInstanceAccess()
|
||||
) and
|
||||
result = asValueModel(api, "Argument[-1]", "ReturnValue")
|
||||
}
|
||||
|
||||
class TaintRead extends DataFlow::FlowState {
|
||||
TaintRead() { this = "TaintRead" }
|
||||
}
|
||||
|
||||
class TaintStore extends DataFlow::FlowState {
|
||||
TaintStore() { this = "TaintStore" }
|
||||
}
|
||||
|
||||
class ThroughFlowConfig extends TaintTracking::Configuration {
|
||||
ThroughFlowConfig() { this = "ThroughFlowConfig" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source, DataFlow::FlowState state) {
|
||||
source instanceof DataFlow::ParameterNode and
|
||||
source.getEnclosingCallable() instanceof TargetApi and
|
||||
state instanceof TaintRead
|
||||
}
|
||||
|
||||
override predicate isSink(DataFlow::Node sink, DataFlow::FlowState state) {
|
||||
sink instanceof ReturnNodeExt and
|
||||
not sink.(ReturnNode).asExpr().(ThisAccess).isOwnInstanceAccess() and
|
||||
not exists(captureQualifierFlow(sink.asExpr().getEnclosingCallable())) and
|
||||
(state instanceof TaintRead or state instanceof TaintStore)
|
||||
}
|
||||
|
||||
override predicate isAdditionalTaintStep(
|
||||
DataFlow::Node node1, DataFlow::FlowState state1, DataFlow::Node node2,
|
||||
DataFlow::FlowState state2
|
||||
) {
|
||||
exists(TypedContent tc |
|
||||
store(node1, tc, node2, _) and
|
||||
isRelevantContent(tc.getContent()) and
|
||||
(state1 instanceof TaintRead or state1 instanceof TaintStore) and
|
||||
state2 instanceof TaintStore
|
||||
)
|
||||
or
|
||||
exists(DataFlow::Content c |
|
||||
readStep(node1, c, node2) and
|
||||
isRelevantContent(c) and
|
||||
state1 instanceof TaintRead and
|
||||
state2 instanceof TaintRead
|
||||
)
|
||||
}
|
||||
|
||||
override predicate isSanitizer(DataFlow::Node n) {
|
||||
exists(Type t | t = n.getType() and not isRelevantType(t))
|
||||
}
|
||||
|
||||
override DataFlow::FlowFeature getAFeature() {
|
||||
result instanceof DataFlow::FeatureEqualSourceSinkCallContext
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Capture APIs that transfer taint from an input parameter to an output return
|
||||
* value or parameter.
|
||||
* Allows a sequence of read steps followed by a sequence of store steps.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* ```
|
||||
* ```java
|
||||
* public class Foo {
|
||||
* private String tainted;
|
||||
*
|
||||
@@ -109,13 +37,13 @@ class ThroughFlowConfig extends TaintTracking::Configuration {
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
* Captured Model:
|
||||
* Captured Models:
|
||||
* ```
|
||||
* p;Foo;true;returnsTainted;;Argument[-1];ReturnValue;taint
|
||||
* p;Foo;true;putsTaintIntoParameter;(List);Argument[-1];Argument[0];taint
|
||||
* ```
|
||||
*
|
||||
* ```
|
||||
* ```java
|
||||
* public class Foo {
|
||||
* private String tainted;
|
||||
* public void doSomething(String input) {
|
||||
@@ -123,9 +51,9 @@ class ThroughFlowConfig extends TaintTracking::Configuration {
|
||||
* }
|
||||
* ```
|
||||
* Captured Model:
|
||||
* `p;Foo;true;doSomething;(String);Argument[0];Argument[-1];taint`
|
||||
* ```p;Foo;true;doSomething;(String);Argument[0];Argument[-1];taint```
|
||||
*
|
||||
* ```
|
||||
* ```java
|
||||
* public class Foo {
|
||||
* public String returnData(String tainted) {
|
||||
* return tainted.substring(0,10)
|
||||
@@ -133,9 +61,9 @@ class ThroughFlowConfig extends TaintTracking::Configuration {
|
||||
* }
|
||||
* ```
|
||||
* Captured Model:
|
||||
* `p;Foo;true;returnData;;Argument[0];ReturnValue;taint`
|
||||
* ```p;Foo;true;returnData;;Argument[0];ReturnValue;taint```
|
||||
*
|
||||
* ```
|
||||
* ```java
|
||||
* public class Foo {
|
||||
* public void addToList(String tainted, List<String> foo) {
|
||||
* foo.add(tainted);
|
||||
@@ -143,20 +71,11 @@ class ThroughFlowConfig extends TaintTracking::Configuration {
|
||||
* }
|
||||
* ```
|
||||
* Captured Model:
|
||||
* `p;Foo;true;addToList;;Argument[0];Argument[1];taint`
|
||||
* ```p;Foo;true;addToList;;Argument[0];Argument[1];taint```
|
||||
*/
|
||||
string captureThroughFlow(TargetApi api) {
|
||||
exists(
|
||||
ThroughFlowConfig config, DataFlow::ParameterNode p, ReturnNodeExt returnNodeExt, string input,
|
||||
string output
|
||||
|
|
||||
config.hasFlow(p, returnNodeExt) and
|
||||
returnNodeExt.getEnclosingCallable() = api and
|
||||
input = parameterNodeAsInput(p) and
|
||||
output = returnNodeAsOutput(returnNodeExt) and
|
||||
input != output and
|
||||
result = asTaintModel(api, input, output)
|
||||
)
|
||||
string captureFlow(TargetApi api) {
|
||||
result = captureQualifierFlow(api) or
|
||||
result = captureThroughFlow(api)
|
||||
}
|
||||
|
||||
from TargetApi api, string flow
|
||||
|
||||
98
java/ql/src/utils/model-generator/CaptureSummaryModels.qll
Normal file
98
java/ql/src/utils/model-generator/CaptureSummaryModels.qll
Normal file
@@ -0,0 +1,98 @@
|
||||
/**
|
||||
* Provides classes and predicates related to capturing summary models
|
||||
* of the Standard or a 3rd party library.
|
||||
*/
|
||||
|
||||
import CaptureSummaryModelsSpecific
|
||||
|
||||
/**
|
||||
* Gets the summary model of `api`, if it follows the `fluent` programming pattern (returns `this`).
|
||||
*/
|
||||
string captureQualifierFlow(TargetApi api) {
|
||||
exists(ReturnNodeExt ret |
|
||||
api = returnNodeEnclosingCallable(ret) and
|
||||
isOwnInstanceAccessNode(ret)
|
||||
) and
|
||||
result = asValueModel(api, qualifierString(), "ReturnValue")
|
||||
}
|
||||
|
||||
/**
|
||||
* A FlowState representing a tainted read.
|
||||
*/
|
||||
private class TaintRead extends DataFlow::FlowState {
|
||||
TaintRead() { this = "TaintRead" }
|
||||
}
|
||||
|
||||
/**
|
||||
* A FlowState representing a tainted write.
|
||||
*/
|
||||
private class TaintStore extends DataFlow::FlowState {
|
||||
TaintStore() { this = "TaintStore" }
|
||||
}
|
||||
|
||||
/**
|
||||
* A TaintTracking Configuration used for tracking flow through APIs.
|
||||
* The sources are the parameters of an API and the sinks are the return values (excluding `this`) and parameters.
|
||||
*
|
||||
* This can be used to generate Flow summaries for APIs from parameter to return.
|
||||
*/
|
||||
class ThroughFlowConfig extends TaintTracking::Configuration {
|
||||
ThroughFlowConfig() { this = "ThroughFlowConfig" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source, DataFlow::FlowState state) {
|
||||
source instanceof DataFlow::ParameterNode and
|
||||
source.getEnclosingCallable() instanceof TargetApi and
|
||||
state instanceof TaintRead
|
||||
}
|
||||
|
||||
override predicate isSink(DataFlow::Node sink, DataFlow::FlowState state) {
|
||||
sink instanceof ReturnNodeExt and
|
||||
not isOwnInstanceAccessNode(sink) and
|
||||
not exists(captureQualifierFlow(sink.asExpr().getEnclosingCallable())) and
|
||||
(state instanceof TaintRead or state instanceof TaintStore)
|
||||
}
|
||||
|
||||
override predicate isAdditionalTaintStep(
|
||||
DataFlow::Node node1, DataFlow::FlowState state1, DataFlow::Node node2,
|
||||
DataFlow::FlowState state2
|
||||
) {
|
||||
exists(TypedContent tc |
|
||||
store(node1, tc, node2, _) and
|
||||
isRelevantContent(tc.getContent()) and
|
||||
(state1 instanceof TaintRead or state1 instanceof TaintStore) and
|
||||
state2 instanceof TaintStore
|
||||
)
|
||||
or
|
||||
exists(DataFlow::Content c |
|
||||
readStep(node1, c, node2) and
|
||||
isRelevantContent(c) and
|
||||
state1 instanceof TaintRead and
|
||||
state2 instanceof TaintRead
|
||||
)
|
||||
}
|
||||
|
||||
override predicate isSanitizer(DataFlow::Node n) {
|
||||
exists(Type t | t = n.getType() and not isRelevantType(t))
|
||||
}
|
||||
|
||||
override DataFlow::FlowFeature getAFeature() {
|
||||
result instanceof DataFlow::FeatureEqualSourceSinkCallContext
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the summary model(s) of `api`, if there is flow from parameters to return value or parameter.
|
||||
*/
|
||||
string captureThroughFlow(TargetApi api) {
|
||||
exists(
|
||||
ThroughFlowConfig config, DataFlow::ParameterNode p, ReturnNodeExt returnNodeExt, string input,
|
||||
string output
|
||||
|
|
||||
config.hasFlow(p, returnNodeExt) and
|
||||
returnNodeExt.getEnclosingCallable() = api and
|
||||
input = parameterNodeAsInput(p) and
|
||||
output = returnNodeAsOutput(returnNodeExt) and
|
||||
input != output and
|
||||
result = asTaintModel(api, input, output)
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* Provides predicates related to capturing summary models of the Standard or a 3rd party library.
|
||||
*/
|
||||
|
||||
import java
|
||||
import semmle.code.java.dataflow.TaintTracking
|
||||
import semmle.code.java.dataflow.internal.DataFlowImplCommon
|
||||
import semmle.code.java.dataflow.internal.DataFlowNodes
|
||||
import semmle.code.java.dataflow.internal.DataFlowPrivate
|
||||
import semmle.code.java.dataflow.InstanceAccess
|
||||
import ModelGeneratorUtils
|
||||
|
||||
Callable returnNodeEnclosingCallable(ReturnNodeExt ret) {
|
||||
result = getNodeEnclosingCallable(ret).asCallable()
|
||||
}
|
||||
|
||||
predicate isOwnInstanceAccessNode(ReturnNode node) {
|
||||
node.asExpr().(ThisAccess).isOwnInstanceAccess()
|
||||
}
|
||||
|
||||
string qualifierString() { result = "Argument[-1]" }
|
||||
@@ -1,143 +1,8 @@
|
||||
import java
|
||||
private import semmle.code.java.dataflow.ExternalFlow
|
||||
private import semmle.code.java.dataflow.internal.ContainerFlow
|
||||
private import semmle.code.java.dataflow.internal.DataFlowImplCommon
|
||||
private import semmle.code.java.dataflow.DataFlow
|
||||
private import semmle.code.java.dataflow.internal.DataFlowPrivate
|
||||
|
||||
Method superImpl(Method m) {
|
||||
result = m.getAnOverride() and
|
||||
not exists(result.getAnOverride()) and
|
||||
not m instanceof ToStringMethod
|
||||
}
|
||||
|
||||
class TargetApi extends Callable {
|
||||
TargetApi() {
|
||||
this.isPublic() and
|
||||
this.fromSource() and
|
||||
(
|
||||
this.getDeclaringType().isPublic() or
|
||||
superImpl(this).getDeclaringType().isPublic()
|
||||
) and
|
||||
isRelevantForModels(this)
|
||||
}
|
||||
}
|
||||
|
||||
/** DEPRECATED: Alias for TargetApi */
|
||||
deprecated class TargetAPI = TargetApi;
|
||||
|
||||
private string isExtensible(RefType ref) {
|
||||
if ref.isFinal() then result = "false" else result = "true"
|
||||
}
|
||||
|
||||
predicate isRelevantForModels(Callable api) {
|
||||
not isInTestFile(api.getCompilationUnit().getFile()) and
|
||||
not isJdkInternal(api.getCompilationUnit()) and
|
||||
not api instanceof MainMethod
|
||||
}
|
||||
|
||||
private predicate isInTestFile(File file) {
|
||||
file.getAbsolutePath().matches("%src/test/%") or
|
||||
file.getAbsolutePath().matches("%/guava-tests/%") or
|
||||
file.getAbsolutePath().matches("%/guava-testlib/%")
|
||||
}
|
||||
|
||||
private predicate isJdkInternal(CompilationUnit cu) {
|
||||
cu.getPackage().getName().matches("org.graalvm%") or
|
||||
cu.getPackage().getName().matches("com.sun%") or
|
||||
cu.getPackage().getName().matches("javax.swing%") or
|
||||
cu.getPackage().getName().matches("java.awt%") or
|
||||
cu.getPackage().getName().matches("sun%") or
|
||||
cu.getPackage().getName().matches("jdk.%") or
|
||||
cu.getPackage().getName().matches("java2d.%") or
|
||||
cu.getPackage().getName().matches("build.tools.%") or
|
||||
cu.getPackage().getName().matches("propertiesparser.%") or
|
||||
cu.getPackage().getName().matches("org.jcp.%") or
|
||||
cu.getPackage().getName().matches("org.w3c.%") or
|
||||
cu.getPackage().getName().matches("org.ietf.jgss.%") or
|
||||
cu.getPackage().getName().matches("org.xml.sax%") or
|
||||
cu.getPackage().getName() = "compileproperties" or
|
||||
cu.getPackage().getName() = "netscape.javascript" or
|
||||
cu.getPackage().getName() = ""
|
||||
}
|
||||
|
||||
bindingset[input, output]
|
||||
string asTaintModel(TargetApi api, string input, string output) {
|
||||
result = asSummaryModel(api, input, output, "taint")
|
||||
}
|
||||
|
||||
bindingset[input, output]
|
||||
string asValueModel(TargetApi api, string input, string output) {
|
||||
result = asSummaryModel(api, input, output, "value")
|
||||
}
|
||||
|
||||
bindingset[input, output, kind]
|
||||
string asSummaryModel(TargetApi api, string input, string output, string kind) {
|
||||
result =
|
||||
asPartialModel(api) + input + ";" //
|
||||
+ output + ";" //
|
||||
+ kind
|
||||
}
|
||||
|
||||
bindingset[input, kind]
|
||||
string asSinkModel(TargetApi api, string input, string kind) {
|
||||
result = asPartialModel(api) + input + ";" + kind
|
||||
}
|
||||
|
||||
bindingset[output, kind]
|
||||
string asSourceModel(TargetApi api, string output, string kind) {
|
||||
result = asPartialModel(api) + output + ";" + kind
|
||||
}
|
||||
import ModelGeneratorUtilsSpecific
|
||||
|
||||
/**
|
||||
* Computes the first 6 columns for CSV rows.
|
||||
* Holds if data can flow from `node1` to `node2` either via a read or a write of an intermediate field `f`.
|
||||
*/
|
||||
private string asPartialModel(TargetApi api) {
|
||||
result =
|
||||
typeAsSummaryModel(api) + ";" //
|
||||
+ isExtensible(bestTypeForModel(api)) + ";" //
|
||||
+ api.getName() + ";" //
|
||||
+ paramsString(api) + ";" //
|
||||
+ /* ext + */ ";" //
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the appropriate type name for the model. Either the type
|
||||
* declaring the method or the supertype introducing the method.
|
||||
*/
|
||||
private string typeAsSummaryModel(TargetApi api) { result = typeAsModel(bestTypeForModel(api)) }
|
||||
|
||||
private RefType bestTypeForModel(TargetApi api) {
|
||||
if exists(superImpl(api))
|
||||
then superImpl(api).fromSource() and result = superImpl(api).getDeclaringType()
|
||||
else result = api.getDeclaringType()
|
||||
}
|
||||
|
||||
private string typeAsModel(RefType type) {
|
||||
result = type.getCompilationUnit().getPackage().getName() + ";" + type.nestedName()
|
||||
}
|
||||
|
||||
predicate isRelevantType(Type t) {
|
||||
not t instanceof TypeClass and
|
||||
not t instanceof EnumType and
|
||||
not t instanceof PrimitiveType and
|
||||
not t instanceof BoxedType and
|
||||
not t.(RefType).getAnAncestor().hasQualifiedName("java.lang", "Number") and
|
||||
not t.(RefType).getAnAncestor().hasQualifiedName("java.nio.charset", "Charset") and
|
||||
(
|
||||
not t.(Array).getElementType() instanceof PrimitiveType or
|
||||
isPrimitiveTypeUsedForBulkData(t.(Array).getElementType())
|
||||
) and
|
||||
(
|
||||
not t.(Array).getElementType() instanceof BoxedType or
|
||||
isPrimitiveTypeUsedForBulkData(t.(Array).getElementType())
|
||||
) and
|
||||
(
|
||||
not t.(CollectionType).getElementType() instanceof BoxedType or
|
||||
isPrimitiveTypeUsedForBulkData(t.(CollectionType).getElementType())
|
||||
)
|
||||
}
|
||||
|
||||
predicate isRelevantTaintStep(DataFlow::Node node1, DataFlow::Node node2) {
|
||||
exists(DataFlow::Content f |
|
||||
readStep(node1, f, node2) and
|
||||
@@ -149,50 +14,58 @@ predicate isRelevantTaintStep(DataFlow::Node node1, DataFlow::Node node2) {
|
||||
else any()
|
||||
)
|
||||
or
|
||||
exists(DataFlow::Content f | storeStep(node1, f, node2) |
|
||||
f instanceof DataFlow::ArrayContent or
|
||||
f instanceof DataFlow::CollectionContent or
|
||||
f instanceof DataFlow::MapKeyContent or
|
||||
f instanceof DataFlow::MapValueContent
|
||||
)
|
||||
exists(DataFlow::Content f | storeStep(node1, f, node2) | containerContent(f))
|
||||
}
|
||||
|
||||
predicate isRelevantContent(DataFlow::Content f) {
|
||||
isRelevantType(f.(DataFlow::FieldContent).getField().getType()) or
|
||||
f instanceof DataFlow::ArrayContent or
|
||||
f instanceof DataFlow::CollectionContent or
|
||||
f instanceof DataFlow::MapKeyContent or
|
||||
f instanceof DataFlow::MapValueContent
|
||||
/**
|
||||
* Holds if content `c` is either a field or synthetic field of a relevant type
|
||||
* or a container like content.
|
||||
*/
|
||||
predicate isRelevantContent(DataFlow::Content c) {
|
||||
isRelevantType(c.(DataFlow::FieldContent).getField().getType()) or
|
||||
isRelevantType(c.(DataFlow::SyntheticFieldContent).getField().getType()) or
|
||||
containerContent(c)
|
||||
}
|
||||
|
||||
string parameterNodeAsInput(DataFlow::ParameterNode p) {
|
||||
result = parameterAccess(p.asParameter())
|
||||
or
|
||||
result = "Argument[-1]" and p instanceof DataFlow::InstanceParameterNode
|
||||
/**
|
||||
* Gets the summary model for `api` with `input`, `output` and `kind`.
|
||||
*/
|
||||
bindingset[input, output, kind]
|
||||
string asSummaryModel(TargetApi api, string input, string output, string kind) {
|
||||
result =
|
||||
asPartialModel(api) + input + ";" //
|
||||
+ output + ";" //
|
||||
+ kind
|
||||
}
|
||||
|
||||
string returnNodeAsOutput(ReturnNodeExt node) {
|
||||
if node.getKind() instanceof ValueReturnKind
|
||||
then result = "ReturnValue"
|
||||
else
|
||||
exists(int pos | pos = node.getKind().(ParamUpdateReturnKind).getPosition() |
|
||||
result = parameterAccess(node.getEnclosingCallable().getParameter(pos))
|
||||
or
|
||||
result = "Argument[-1]" and pos = -1
|
||||
)
|
||||
/**
|
||||
* Gets the value summary model for `api` with `input` and `output`.
|
||||
*/
|
||||
bindingset[input, output]
|
||||
string asValueModel(TargetApi api, string input, string output) {
|
||||
result = asSummaryModel(api, input, output, "value")
|
||||
}
|
||||
|
||||
string parameterAccess(Parameter p) {
|
||||
if
|
||||
p.getType() instanceof Array and
|
||||
not isPrimitiveTypeUsedForBulkData(p.getType().(Array).getElementType())
|
||||
then result = "Argument[" + p.getPosition() + "].ArrayElement"
|
||||
else
|
||||
if p.getType() instanceof ContainerType
|
||||
then result = "Argument[" + p.getPosition() + "].Element"
|
||||
else result = "Argument[" + p.getPosition() + "]"
|
||||
/**
|
||||
* Gets the taint summary model for `api` with `input` and `output`.
|
||||
*/
|
||||
bindingset[input, output]
|
||||
string asTaintModel(TargetApi api, string input, string output) {
|
||||
result = asSummaryModel(api, input, output, "taint")
|
||||
}
|
||||
|
||||
predicate isPrimitiveTypeUsedForBulkData(Type t) {
|
||||
t.getName().regexpMatch("byte|char|Byte|Character")
|
||||
/**
|
||||
* Gets the sink model for `api` with `input` and `kind`.
|
||||
*/
|
||||
bindingset[input, kind]
|
||||
string asSinkModel(TargetApi api, string input, string kind) {
|
||||
result = asPartialModel(api) + input + ";" + kind
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the source model for `api` with `output` and `kind`.
|
||||
*/
|
||||
bindingset[output, kind]
|
||||
string asSourceModel(TargetApi api, string output, string kind) {
|
||||
result = asPartialModel(api) + output + ";" + kind
|
||||
}
|
||||
|
||||
@@ -0,0 +1,156 @@
|
||||
import java
|
||||
import semmle.code.java.dataflow.internal.DataFlowPrivate
|
||||
import semmle.code.java.dataflow.DataFlow
|
||||
private import semmle.code.java.dataflow.ExternalFlow
|
||||
private import semmle.code.java.dataflow.internal.ContainerFlow
|
||||
private import semmle.code.java.dataflow.internal.DataFlowImplCommon
|
||||
|
||||
Method superImpl(Method m) {
|
||||
result = m.getAnOverride() and
|
||||
not exists(result.getAnOverride()) and
|
||||
not m instanceof ToStringMethod
|
||||
}
|
||||
|
||||
private predicate isInTestFile(File file) {
|
||||
file.getAbsolutePath().matches("%src/test/%") or
|
||||
file.getAbsolutePath().matches("%/guava-tests/%") or
|
||||
file.getAbsolutePath().matches("%/guava-testlib/%")
|
||||
}
|
||||
|
||||
private predicate isJdkInternal(CompilationUnit cu) {
|
||||
cu.getPackage().getName().matches("org.graalvm%") or
|
||||
cu.getPackage().getName().matches("com.sun%") or
|
||||
cu.getPackage().getName().matches("javax.swing%") or
|
||||
cu.getPackage().getName().matches("java.awt%") or
|
||||
cu.getPackage().getName().matches("sun%") or
|
||||
cu.getPackage().getName().matches("jdk.%") or
|
||||
cu.getPackage().getName().matches("java2d.%") or
|
||||
cu.getPackage().getName().matches("build.tools.%") or
|
||||
cu.getPackage().getName().matches("propertiesparser.%") or
|
||||
cu.getPackage().getName().matches("org.jcp.%") or
|
||||
cu.getPackage().getName().matches("org.w3c.%") or
|
||||
cu.getPackage().getName().matches("org.ietf.jgss.%") or
|
||||
cu.getPackage().getName().matches("org.xml.sax%") or
|
||||
cu.getPackage().getName() = "compileproperties" or
|
||||
cu.getPackage().getName() = "netscape.javascript" or
|
||||
cu.getPackage().getName() = ""
|
||||
}
|
||||
|
||||
predicate isRelevantForModels(Callable api) {
|
||||
not isInTestFile(api.getCompilationUnit().getFile()) and
|
||||
not isJdkInternal(api.getCompilationUnit()) and
|
||||
not api instanceof MainMethod
|
||||
}
|
||||
|
||||
/**
|
||||
* A class of Callables that are relevant for generating summary, source and sinks models for.
|
||||
*
|
||||
* In the Standard library and 3rd party libraries it the Callables that can be called
|
||||
* from outside the library itself.
|
||||
*/
|
||||
class TargetApi extends Callable {
|
||||
TargetApi() {
|
||||
this.isPublic() and
|
||||
this.fromSource() and
|
||||
(
|
||||
this.getDeclaringType().isPublic() or
|
||||
superImpl(this).getDeclaringType().isPublic()
|
||||
) and
|
||||
isRelevantForModels(this)
|
||||
}
|
||||
}
|
||||
|
||||
private string isExtensible(RefType ref) {
|
||||
if ref.isFinal() then result = "false" else result = "true"
|
||||
}
|
||||
|
||||
private string typeAsModel(RefType type) {
|
||||
result = type.getCompilationUnit().getPackage().getName() + ";" + type.nestedName()
|
||||
}
|
||||
|
||||
private RefType bestTypeForModel(TargetApi api) {
|
||||
if exists(superImpl(api))
|
||||
then superImpl(api).fromSource() and result = superImpl(api).getDeclaringType()
|
||||
else result = api.getDeclaringType()
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the appropriate type name for the model. Either the type
|
||||
* declaring the method or the supertype introducing the method.
|
||||
*/
|
||||
private string typeAsSummaryModel(TargetApi api) { result = typeAsModel(bestTypeForModel(api)) }
|
||||
|
||||
/**
|
||||
* Computes the first 6 columns for CSV rows.
|
||||
*/
|
||||
string asPartialModel(TargetApi api) {
|
||||
result =
|
||||
typeAsSummaryModel(api) + ";" //
|
||||
+ isExtensible(bestTypeForModel(api)) + ";" //
|
||||
+ api.getName() + ";" //
|
||||
+ paramsString(api) + ";" //
|
||||
+ /* ext + */ ";" //
|
||||
}
|
||||
|
||||
private predicate isPrimitiveTypeUsedForBulkData(Type t) {
|
||||
t.getName().regexpMatch("byte|char|Byte|Character")
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds for type `t` for fields that are relevant as an intermediate
|
||||
* read or write step in the data flow analysis.
|
||||
*/
|
||||
predicate isRelevantType(Type t) {
|
||||
not t instanceof TypeClass and
|
||||
not t instanceof EnumType and
|
||||
not t instanceof PrimitiveType and
|
||||
not t instanceof BoxedType and
|
||||
not t.(RefType).getAnAncestor().hasQualifiedName("java.lang", "Number") and
|
||||
not t.(RefType).getAnAncestor().hasQualifiedName("java.nio.charset", "Charset") and
|
||||
(
|
||||
not t.(Array).getElementType() instanceof PrimitiveType or
|
||||
isPrimitiveTypeUsedForBulkData(t.(Array).getElementType())
|
||||
) and
|
||||
(
|
||||
not t.(Array).getElementType() instanceof BoxedType or
|
||||
isPrimitiveTypeUsedForBulkData(t.(Array).getElementType())
|
||||
) and
|
||||
(
|
||||
not t.(CollectionType).getElementType() instanceof BoxedType or
|
||||
isPrimitiveTypeUsedForBulkData(t.(CollectionType).getElementType())
|
||||
)
|
||||
}
|
||||
|
||||
private string parameterAccess(Parameter p) {
|
||||
if
|
||||
p.getType() instanceof Array and
|
||||
not isPrimitiveTypeUsedForBulkData(p.getType().(Array).getElementType())
|
||||
then result = "Argument[" + p.getPosition() + "].ArrayElement"
|
||||
else
|
||||
if p.getType() instanceof ContainerType
|
||||
then result = "Argument[" + p.getPosition() + "].Element"
|
||||
else result = "Argument[" + p.getPosition() + "]"
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the model string representation of the parameter node `p`.
|
||||
*/
|
||||
string parameterNodeAsInput(DataFlow::ParameterNode p) {
|
||||
result = parameterAccess(p.asParameter())
|
||||
or
|
||||
result = "Argument[-1]" and p instanceof DataFlow::InstanceParameterNode
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the model string represention of the the return node `node`.
|
||||
*/
|
||||
string returnNodeAsOutput(ReturnNodeExt node) {
|
||||
if node.getKind() instanceof ValueReturnKind
|
||||
then result = "ReturnValue"
|
||||
else
|
||||
exists(int pos | pos = node.getKind().(ParamUpdateReturnKind).getPosition() |
|
||||
result = parameterAccess(node.getEnclosingCallable().getParameter(pos))
|
||||
or
|
||||
result = "Argument[-1]" and pos = -1
|
||||
)
|
||||
}
|
||||
@@ -55,7 +55,7 @@ class JaxRsTest extends InlineExpectationsTest {
|
||||
or
|
||||
tag = "ResourceMethodOnResourceClass" and
|
||||
exists(JaxRsResourceMethod resourceMethod |
|
||||
resourceMethod = any(JaxRsResourceClass ResourceClass).getAResourceMethod()
|
||||
resourceMethod = any(JaxRsResourceClass resourceClass).getAResourceMethod()
|
||||
|
|
||||
resourceMethod.getLocation() = location and
|
||||
element = resourceMethod.toString() and
|
||||
|
||||
@@ -2,8 +2,8 @@ import semmle.code.java.Type
|
||||
|
||||
predicate typeIsInCU(Type tpe, CompilationUnit cu) { tpe.getCompilationUnit() = cu }
|
||||
|
||||
from Type tpe, CompilationUnit Ajava
|
||||
from Type tpe, CompilationUnit aJava
|
||||
where
|
||||
Ajava.hasName("A") and
|
||||
typeIsInCU(tpe, Ajava)
|
||||
aJava.hasName("A") and
|
||||
typeIsInCU(tpe, aJava)
|
||||
select tpe
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
import java
|
||||
import TestUtilities.InlineFlowTest
|
||||
import semmle.code.java.security.SensitiveLoggingQuery
|
||||
|
||||
class HasFlowTest extends InlineFlowTest {
|
||||
override DataFlow::Configuration getTaintFlowConfig() {
|
||||
result instanceof SensitiveLoggerConfiguration
|
||||
}
|
||||
|
||||
override DataFlow::Configuration getValueFlowConfig() { none() }
|
||||
}
|
||||
16
java/ql/test/query-tests/security/CWE-532/Test.java
Normal file
16
java/ql/test/query-tests/security/CWE-532/Test.java
Normal file
@@ -0,0 +1,16 @@
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
class Test {
|
||||
void test(String password) {
|
||||
Logger logger = null;
|
||||
|
||||
logger.info("User's password is: " + password); // $ hasTaintFlow
|
||||
}
|
||||
|
||||
void test2(String authToken) {
|
||||
Logger logger = null;
|
||||
|
||||
logger.error("Auth failed for: " + authToken); // $ hasTaintFlow
|
||||
}
|
||||
|
||||
}
|
||||
1
java/ql/test/query-tests/security/CWE-532/options
Normal file
1
java/ql/test/query-tests/security/CWE-532/options
Normal file
@@ -0,0 +1 @@
|
||||
//semmle-extractor-options: --javac-args -cp ${testdir}/../../../stubs/apache-log4j-1.2.17:${testdir}/../../../stubs/apache-log4j-2.14.1:${testdir}/../../../stubs/apache-commons-logging-1.2:${testdir}/../../../stubs/jboss-logging-3.4.2:${testdir}/../../../stubs/slf4j-2.0.0:${testdir}/../../../stubs/scijava-common-2.87.1:${testdir}/../../../stubs/flogger-0.7.1:${testdir}/../../../stubs/google-android-9.0.0
|
||||
91
java/ql/test/query-tests/security/CWE-918/JdbcUrlSSRF.java
Normal file
91
java/ql/test/query-tests/security/CWE-918/JdbcUrlSSRF.java
Normal file
@@ -0,0 +1,91 @@
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServlet;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.Driver;
|
||||
import java.sql.SQLException;
|
||||
import java.io.IOException;
|
||||
import com.zaxxer.hikari.HikariConfig;
|
||||
import com.zaxxer.hikari.HikariDataSource;
|
||||
import java.util.*;
|
||||
import org.springframework.jdbc.datasource.*;
|
||||
import org.jdbi.v3.core.Jdbi;
|
||||
import org.springframework.boot.jdbc.DataSourceBuilder;
|
||||
|
||||
public class JdbcUrlSSRF extends HttpServlet {
|
||||
|
||||
protected void doGet(HttpServletRequest request, HttpServletResponse response)
|
||||
throws ServletException, IOException {
|
||||
|
||||
String jdbcUrl = request.getParameter("jdbcUrl");
|
||||
Driver driver = new org.postgresql.Driver();
|
||||
DataSourceBuilder dsBuilder = new DataSourceBuilder();
|
||||
|
||||
try {
|
||||
driver.connect(jdbcUrl, null); // $ SSRF
|
||||
|
||||
DriverManager.getConnection(jdbcUrl); // $ SSRF
|
||||
DriverManager.getConnection(jdbcUrl, "user", "password"); // $ SSRF
|
||||
DriverManager.getConnection(jdbcUrl, null); // $ SSRF
|
||||
|
||||
dsBuilder.url(jdbcUrl); // $ SSRF
|
||||
}
|
||||
catch(SQLException e) {}
|
||||
}
|
||||
|
||||
protected void doPost(HttpServletRequest request, HttpServletResponse response)
|
||||
throws ServletException, IOException {
|
||||
|
||||
String jdbcUrl = request.getParameter("jdbcUrl");
|
||||
HikariConfig config = new HikariConfig();
|
||||
|
||||
config.setJdbcUrl(jdbcUrl); // $ SSRF
|
||||
config.setUsername("database_username");
|
||||
config.setPassword("database_password");
|
||||
|
||||
HikariDataSource ds = new HikariDataSource();
|
||||
ds.setJdbcUrl(jdbcUrl); // $ SSRF
|
||||
|
||||
Properties props = new Properties();
|
||||
props.setProperty("driverClassName", "org.postgresql.Driver");
|
||||
props.setProperty("jdbcUrl", jdbcUrl);
|
||||
|
||||
HikariConfig config2 = new HikariConfig(props); // $ SSRF
|
||||
}
|
||||
|
||||
protected void doPut(HttpServletRequest request, HttpServletResponse response)
|
||||
throws ServletException, IOException {
|
||||
|
||||
String jdbcUrl = request.getParameter("jdbcUrl");
|
||||
|
||||
DriverManagerDataSource dataSource = new DriverManagerDataSource();
|
||||
|
||||
dataSource.setDriverClassName("org.postgresql.Driver");
|
||||
dataSource.setUrl(jdbcUrl); // $ SSRF
|
||||
|
||||
DriverManagerDataSource dataSource2 = new DriverManagerDataSource(jdbcUrl); // $ SSRF
|
||||
dataSource2.setDriverClassName("org.postgresql.Driver");
|
||||
|
||||
DriverManagerDataSource dataSource3 = new DriverManagerDataSource(jdbcUrl, "user", "pass"); // $ SSRF
|
||||
dataSource3.setDriverClassName("org.postgresql.Driver");
|
||||
|
||||
DriverManagerDataSource dataSource4 = new DriverManagerDataSource(jdbcUrl, null); // $ SSRF
|
||||
dataSource4.setDriverClassName("org.postgresql.Driver");
|
||||
}
|
||||
|
||||
protected void doDelete(HttpServletRequest request, HttpServletResponse response)
|
||||
throws ServletException, IOException {
|
||||
|
||||
String jdbcUrl = request.getParameter("jdbcUrl");
|
||||
|
||||
Jdbi.create(jdbcUrl); // $ SSRF
|
||||
Jdbi.create(jdbcUrl, null); // $ SSRF
|
||||
Jdbi.create(jdbcUrl, "user", "pass"); // $ SSRF
|
||||
|
||||
Jdbi.open(jdbcUrl); // $ SSRF
|
||||
Jdbi.open(jdbcUrl, null); // $ SSRF
|
||||
Jdbi.open(jdbcUrl, "user", "pass"); // $ SSRF
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,2 +1,2 @@
|
||||
//semmle-extractor-options: --javac-args -source 11 -target 11 -cp ${testdir}/../../../stubs/springframework-5.3.8:${testdir}/../../../stubs/javax-ws-rs-api-2.1.1:${testdir}/../../../stubs/javax-ws-rs-api-3.0.0:${testdir}/../../../stubs/apache-http-4.4.13/:${testdir}/../../../stubs/servlet-api-2.4/:${testdir}/../../../stubs/projectreactor-3.4.3/
|
||||
//semmle-extractor-options: --javac-args -source 11 -target 11 -cp ${testdir}/../../../stubs/springframework-5.3.8:${testdir}/../../../stubs/javax-ws-rs-api-2.1.1:${testdir}/../../../stubs/javax-ws-rs-api-3.0.0:${testdir}/../../../stubs/apache-http-4.4.13/:${testdir}/../../../stubs/servlet-api-2.4/:${testdir}/../../../stubs/projectreactor-3.4.3/:${testdir}/../../../stubs/postgresql-42.3.3/:${testdir}/../../../stubs/HikariCP-3.4.5/:${testdir}/../../../stubs/spring-jdbc-5.3.8/:${testdir}/../../../stubs/jdbi3-core-3.27.2/
|
||||
|
||||
|
||||
79
java/ql/test/stubs/HikariCP-3.4.5/com/zaxxer/hikari/HikariConfig.java
generated
Normal file
79
java/ql/test/stubs/HikariCP-3.4.5/com/zaxxer/hikari/HikariConfig.java
generated
Normal file
@@ -0,0 +1,79 @@
|
||||
/*
|
||||
* Copyright (C) 2013, 2014 Brett Wooldridge
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.zaxxer.hikari;
|
||||
|
||||
|
||||
public class HikariConfig implements HikariConfigMXBean {
|
||||
|
||||
private String jdbcUrl;
|
||||
|
||||
public HikariConfig() {
|
||||
}
|
||||
|
||||
public HikariConfig(java.util.Properties properties) {
|
||||
|
||||
}
|
||||
|
||||
public HikariConfig(String propertyFileName) {
|
||||
}
|
||||
|
||||
public String getJdbcUrl() {
|
||||
return jdbcUrl;
|
||||
}
|
||||
|
||||
public void setJdbcUrl(String jdbcUrl) {
|
||||
this.jdbcUrl = jdbcUrl;
|
||||
}
|
||||
|
||||
public long getConnectionTimeout() { return 0; }
|
||||
|
||||
public void setConnectionTimeout(long connectionTimeoutMs) {}
|
||||
|
||||
public long getValidationTimeout() { return 0; }
|
||||
|
||||
public void setValidationTimeout(long validationTimeoutMs) {}
|
||||
|
||||
public long getIdleTimeout() { return 0; }
|
||||
|
||||
public void setIdleTimeout(long idleTimeoutMs) {}
|
||||
|
||||
public long getLeakDetectionThreshold() { return 0; }
|
||||
|
||||
public void setLeakDetectionThreshold(long leakDetectionThresholdMs) {}
|
||||
|
||||
public long getMaxLifetime() { return 0; }
|
||||
|
||||
public void setMaxLifetime(long maxLifetimeMs) {}
|
||||
|
||||
public int getMinimumIdle() { return 0; }
|
||||
|
||||
public void setMinimumIdle(int minIdle) {}
|
||||
|
||||
public int getMaximumPoolSize() { return 0; }
|
||||
|
||||
public void setMaximumPoolSize(int maxPoolSize) {}
|
||||
|
||||
public void setPassword(String password) {}
|
||||
|
||||
public void setUsername(String username) {}
|
||||
|
||||
public String getPoolName() {return "";}
|
||||
|
||||
public String getCatalog() {return "";}
|
||||
|
||||
public void setCatalog(String catalog) {}
|
||||
}
|
||||
193
java/ql/test/stubs/HikariCP-3.4.5/com/zaxxer/hikari/HikariConfigMXBean.java
generated
Normal file
193
java/ql/test/stubs/HikariCP-3.4.5/com/zaxxer/hikari/HikariConfigMXBean.java
generated
Normal file
@@ -0,0 +1,193 @@
|
||||
/*
|
||||
* Copyright (C) 2013 Brett Wooldridge
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.zaxxer.hikari;
|
||||
|
||||
/**
|
||||
* The javax.management MBean for a Hikari pool configuration.
|
||||
*
|
||||
* @author Brett Wooldridge
|
||||
*/
|
||||
public interface HikariConfigMXBean
|
||||
{
|
||||
/**
|
||||
* Get the maximum number of milliseconds that a client will wait for a connection from the pool. If this
|
||||
* time is exceeded without a connection becoming available, a SQLException will be thrown from
|
||||
* {@link javax.sql.DataSource#getConnection()}.
|
||||
*
|
||||
* @return the connection timeout in milliseconds
|
||||
*/
|
||||
long getConnectionTimeout();
|
||||
|
||||
/**
|
||||
* Set the maximum number of milliseconds that a client will wait for a connection from the pool. If this
|
||||
* time is exceeded without a connection becoming available, a SQLException will be thrown from
|
||||
* {@link javax.sql.DataSource#getConnection()}.
|
||||
*
|
||||
* @param connectionTimeoutMs the connection timeout in milliseconds
|
||||
*/
|
||||
void setConnectionTimeout(long connectionTimeoutMs);
|
||||
|
||||
/**
|
||||
* Get the maximum number of milliseconds that the pool will wait for a connection to be validated as
|
||||
* alive.
|
||||
*
|
||||
* @return the validation timeout in milliseconds
|
||||
*/
|
||||
long getValidationTimeout();
|
||||
|
||||
/**
|
||||
* Sets the maximum number of milliseconds that the pool will wait for a connection to be validated as
|
||||
* alive.
|
||||
*
|
||||
* @param validationTimeoutMs the validation timeout in milliseconds
|
||||
*/
|
||||
void setValidationTimeout(long validationTimeoutMs);
|
||||
|
||||
/**
|
||||
* This property controls the maximum amount of time (in milliseconds) that a connection is allowed to sit
|
||||
* idle in the pool. Whether a connection is retired as idle or not is subject to a maximum variation of +30
|
||||
* seconds, and average variation of +15 seconds. A connection will never be retired as idle before this timeout.
|
||||
* A value of 0 means that idle connections are never removed from the pool.
|
||||
*
|
||||
* @return the idle timeout in milliseconds
|
||||
*/
|
||||
long getIdleTimeout();
|
||||
|
||||
/**
|
||||
* This property controls the maximum amount of time (in milliseconds) that a connection is allowed to sit
|
||||
* idle in the pool. Whether a connection is retired as idle or not is subject to a maximum variation of +30
|
||||
* seconds, and average variation of +15 seconds. A connection will never be retired as idle before this timeout.
|
||||
* A value of 0 means that idle connections are never removed from the pool.
|
||||
*
|
||||
* @param idleTimeoutMs the idle timeout in milliseconds
|
||||
*/
|
||||
void setIdleTimeout(long idleTimeoutMs);
|
||||
|
||||
/**
|
||||
* This property controls the amount of time that a connection can be out of the pool before a message is
|
||||
* logged indicating a possible connection leak. A value of 0 means leak detection is disabled.
|
||||
*
|
||||
* @return the connection leak detection threshold in milliseconds
|
||||
*/
|
||||
long getLeakDetectionThreshold();
|
||||
|
||||
/**
|
||||
* This property controls the amount of time that a connection can be out of the pool before a message is
|
||||
* logged indicating a possible connection leak. A value of 0 means leak detection is disabled.
|
||||
*
|
||||
* @param leakDetectionThresholdMs the connection leak detection threshold in milliseconds
|
||||
*/
|
||||
void setLeakDetectionThreshold(long leakDetectionThresholdMs);
|
||||
|
||||
/**
|
||||
* This property controls the maximum lifetime of a connection in the pool. When a connection reaches this
|
||||
* timeout, even if recently used, it will be retired from the pool. An in-use connection will never be
|
||||
* retired, only when it is idle will it be removed.
|
||||
*
|
||||
* @return the maximum connection lifetime in milliseconds
|
||||
*/
|
||||
long getMaxLifetime();
|
||||
|
||||
/**
|
||||
* This property controls the maximum lifetime of a connection in the pool. When a connection reaches this
|
||||
* timeout, even if recently used, it will be retired from the pool. An in-use connection will never be
|
||||
* retired, only when it is idle will it be removed.
|
||||
*
|
||||
* @param maxLifetimeMs the maximum connection lifetime in milliseconds
|
||||
*/
|
||||
void setMaxLifetime(long maxLifetimeMs);
|
||||
|
||||
/**
|
||||
* The property controls the minimum number of idle connections that HikariCP tries to maintain in the pool,
|
||||
* including both idle and in-use connections. If the idle connections dip below this value, HikariCP will
|
||||
* make a best effort to restore them quickly and efficiently.
|
||||
*
|
||||
* @return the minimum number of connections in the pool
|
||||
*/
|
||||
int getMinimumIdle();
|
||||
|
||||
/**
|
||||
* The property controls the minimum number of idle connections that HikariCP tries to maintain in the pool,
|
||||
* including both idle and in-use connections. If the idle connections dip below this value, HikariCP will
|
||||
* make a best effort to restore them quickly and efficiently.
|
||||
*
|
||||
* @param minIdle the minimum number of idle connections in the pool to maintain
|
||||
*/
|
||||
void setMinimumIdle(int minIdle);
|
||||
|
||||
/**
|
||||
* The property controls the maximum number of connections that HikariCP will keep in the pool,
|
||||
* including both idle and in-use connections.
|
||||
*
|
||||
* @return the maximum number of connections in the pool
|
||||
*/
|
||||
int getMaximumPoolSize();
|
||||
|
||||
/**
|
||||
* The property controls the maximum size that the pool is allowed to reach, including both idle and in-use
|
||||
* connections. Basically this value will determine the maximum number of actual connections to the database
|
||||
* backend.
|
||||
* <p>
|
||||
* When the pool reaches this size, and no idle connections are available, calls to getConnection() will
|
||||
* block for up to connectionTimeout milliseconds before timing out.
|
||||
*
|
||||
* @param maxPoolSize the maximum number of connections in the pool
|
||||
*/
|
||||
void setMaximumPoolSize(int maxPoolSize);
|
||||
|
||||
/**
|
||||
* Set the password used for authentication. Changing this at runtime will apply to new connections only.
|
||||
* Altering this at runtime only works for DataSource-based connections, not Driver-class or JDBC URL-based
|
||||
* connections.
|
||||
*
|
||||
* @param password the database password
|
||||
*/
|
||||
void setPassword(String password);
|
||||
|
||||
/**
|
||||
* Set the username used for authentication. Changing this at runtime will apply to new connections only.
|
||||
* Altering this at runtime only works for DataSource-based connections, not Driver-class or JDBC URL-based
|
||||
* connections.
|
||||
*
|
||||
* @param username the database username
|
||||
*/
|
||||
void setUsername(String username);
|
||||
|
||||
|
||||
/**
|
||||
* The name of the connection pool.
|
||||
*
|
||||
* @return the name of the connection pool
|
||||
*/
|
||||
String getPoolName();
|
||||
|
||||
/**
|
||||
* Get the default catalog name to be set on connections.
|
||||
*
|
||||
* @return the default catalog name
|
||||
*/
|
||||
String getCatalog();
|
||||
|
||||
/**
|
||||
* Set the default catalog name to be set on connections.
|
||||
* <p>
|
||||
* WARNING: THIS VALUE SHOULD ONLY BE CHANGED WHILE THE POOL IS SUSPENDED, AFTER CONNECTIONS HAVE BEEN EVICTED.
|
||||
*
|
||||
* @param catalog the catalog name, or null
|
||||
*/
|
||||
void setCatalog(String catalog);
|
||||
}
|
||||
70
java/ql/test/stubs/HikariCP-3.4.5/com/zaxxer/hikari/HikariDataSource.java
generated
Normal file
70
java/ql/test/stubs/HikariCP-3.4.5/com/zaxxer/hikari/HikariDataSource.java
generated
Normal file
@@ -0,0 +1,70 @@
|
||||
/*
|
||||
* Copyright (C) 2013 Brett Wooldridge
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.zaxxer.hikari;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.io.Closeable;
|
||||
import java.sql.*;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
|
||||
public class HikariDataSource extends HikariConfig implements DataSource, Closeable {
|
||||
|
||||
public HikariDataSource() {
|
||||
}
|
||||
|
||||
public HikariDataSource(HikariConfig configuration) {
|
||||
}
|
||||
|
||||
public Connection getConnection() throws SQLException {
|
||||
return null;
|
||||
}
|
||||
|
||||
public Connection getConnection(String username, String password)
|
||||
throws SQLException {
|
||||
return null;
|
||||
}
|
||||
|
||||
public java.io.PrintWriter getLogWriter() throws SQLException {
|
||||
return null;
|
||||
}
|
||||
|
||||
public void setLogWriter(java.io.PrintWriter out) throws SQLException {
|
||||
}
|
||||
|
||||
public void setLoginTimeout(int seconds) throws SQLException {
|
||||
}
|
||||
|
||||
public int getLoginTimeout() throws SQLException {
|
||||
return 0;
|
||||
}
|
||||
|
||||
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
|
||||
return null;
|
||||
}
|
||||
|
||||
public <T> T unwrap(java.lang.Class<T> iface) throws java.sql.SQLException {
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean isWrapperFor(java.lang.Class<?> iface) throws java.sql.SQLException {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void close() {
|
||||
}
|
||||
}
|
||||
30
java/ql/test/stubs/jdbi3-core-3.27.2/org/jdbi/v3/core/Handle.java
generated
Normal file
30
java/ql/test/stubs/jdbi3-core-3.27.2/org/jdbi/v3/core/Handle.java
generated
Normal file
@@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.jdbi.v3.core;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.sql.Connection;
|
||||
import org.jdbi.v3.core.config.Configurable;
|
||||
|
||||
|
||||
/**
|
||||
* This represents a connection to the database system. It is a wrapper around
|
||||
* a JDBC Connection object. Handle provides essential methods for transaction
|
||||
* management, statement creation, and other operations tied to the database session.
|
||||
*/
|
||||
public class Handle implements Closeable, Configurable<Handle> {
|
||||
|
||||
public void close() {
|
||||
}
|
||||
}
|
||||
78
java/ql/test/stubs/jdbi3-core-3.27.2/org/jdbi/v3/core/Jdbi.java
generated
Normal file
78
java/ql/test/stubs/jdbi3-core-3.27.2/org/jdbi/v3/core/Jdbi.java
generated
Normal file
@@ -0,0 +1,78 @@
|
||||
/*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.jdbi.v3.core;
|
||||
|
||||
import org.jdbi.v3.core.config.Configurable;
|
||||
import java.util.Properties;
|
||||
|
||||
public class Jdbi implements Configurable<Jdbi> {
|
||||
|
||||
public static Jdbi create(final String url) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Jdbi} instance from a database URL.
|
||||
*
|
||||
* @param url JDBC URL for connections
|
||||
* @param properties Properties to pass to DriverManager.getConnection(url, props) for each new handle
|
||||
*
|
||||
* @return a Jdbi which uses {@link DriverManager} as a connection factory.
|
||||
*/
|
||||
public static Jdbi create(final String url, final Properties properties) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link Jdbi} instance from a database URL.
|
||||
*
|
||||
* @param url JDBC URL for connections
|
||||
* @param username User name for connection authentication
|
||||
* @param password Password for connection authentication
|
||||
*
|
||||
* @return a Jdbi which uses {@link DriverManager} as a connection factory.
|
||||
*/
|
||||
public static Jdbi create(final String url, final String username, final String password) {
|
||||
return null;
|
||||
}
|
||||
|
||||
public static Handle open(final String url) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a handle with just a JDBC URL
|
||||
*
|
||||
* @param url JDBC Url
|
||||
* @param username JDBC username for authentication
|
||||
* @param password JDBC password for authentication
|
||||
*
|
||||
* @return newly opened Handle
|
||||
*/
|
||||
public static Handle open(final String url, final String username, final String password) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a handle with just a JDBC URL
|
||||
*
|
||||
* @param url JDBC Url
|
||||
* @param props JDBC properties
|
||||
*
|
||||
* @return newly opened Handle
|
||||
*/
|
||||
public static Handle open(final String url, final Properties props) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user