Merge pull request #12408 from jketema/merge-main

C++: use-use dataflow merge main
This commit is contained in:
Mathias Vorreiter Pedersen
2023-03-07 13:05:30 +00:00
committed by GitHub
925 changed files with 77774 additions and 172565 deletions

View File

@@ -26,9 +26,8 @@ jobs:
shell: bash
run: |
EXIT_CODE=0
# TODO: remove the swift exception from the regex when we fix generated QLdoc
# TODO: remove the shared exception from the regex when coverage of qlpacks without dbschemes is supported
changed_lib_packs="$(git diff --name-only --diff-filter=ACMRT HEAD^ HEAD | { grep -Po '^(?!(swift|shared))[a-z]*/ql/lib' || true; } | sort -u)"
changed_lib_packs="$(git diff --name-only --diff-filter=ACMRT HEAD^ HEAD | { grep -Po '^(?!(shared))[a-z]*/ql/lib' || true; } | sort -u)"
for pack_dir in ${changed_lib_packs}; do
lang="${pack_dir%/ql/lib}"
codeql generate library-doc-coverage --output="${RUNNER_TEMP}/${lang}-current.txt" --dir="${pack_dir}"

View File

@@ -1,6 +1,26 @@
{
"DataFlow Java/C++/C#/Go/Python/Ruby/Swift": [
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlow.qll",
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/DataFlow.qll",
"cpp/ql/lib/semmle/code/cpp/ir/dataflow/internal/DataFlow.qll",
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/DataFlow.qll",
"go/ql/lib/semmle/go/dataflow/internal/DataFlow.qll",
"python/ql/lib/semmle/python/dataflow/new/internal/DataFlow.qll",
"ruby/ql/lib/codeql/ruby/dataflow/internal/DataFlow.qll",
"swift/ql/lib/codeql/swift/dataflow/internal/DataFlow.qll"
],
"DataFlowImpl Java/C++/C#/Go/Python/Ruby/Swift": [
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlowImpl.qll",
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/DataFlowImpl.qll",
"cpp/ql/lib/semmle/code/cpp/ir/dataflow/internal/DataFlowImpl.qll",
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/DataFlowImpl.qll",
"go/ql/lib/semmle/go/dataflow/internal/DataFlowImpl.qll",
"python/ql/lib/semmle/python/dataflow/new/internal/DataFlowImpl.qll",
"ruby/ql/lib/codeql/ruby/dataflow/internal/DataFlowImpl.qll",
"swift/ql/lib/codeql/swift/dataflow/internal/DataFlowImpl.qll"
],
"DataFlow Java/C++/C#/Go/Python/Ruby/Swift Legacy Configuration": [
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlowImpl1.qll",
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlowImpl2.qll",
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlowImpl3.qll",
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlowImpl4.qll",
@@ -8,34 +28,34 @@
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlowImpl6.qll",
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlowImplForSerializability.qll",
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlowImplForOnActivityResult.qll",
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/DataFlowImpl.qll",
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/DataFlowImpl1.qll",
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/DataFlowImpl2.qll",
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/DataFlowImpl3.qll",
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/DataFlowImpl4.qll",
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/DataFlowImplLocal.qll",
"cpp/ql/lib/semmle/code/cpp/ir/dataflow/internal/DataFlowImpl.qll",
"cpp/ql/lib/semmle/code/cpp/ir/dataflow/internal/DataFlowImpl1.qll",
"cpp/ql/lib/semmle/code/cpp/ir/dataflow/internal/DataFlowImpl2.qll",
"cpp/ql/lib/semmle/code/cpp/ir/dataflow/internal/DataFlowImpl3.qll",
"cpp/ql/lib/semmle/code/cpp/ir/dataflow/internal/DataFlowImpl4.qll",
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/DataFlowImpl.qll",
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/DataFlowImpl1.qll",
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/DataFlowImpl2.qll",
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/DataFlowImpl3.qll",
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/DataFlowImpl4.qll",
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/DataFlowImpl5.qll",
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/DataFlowImplForContentDataFlow.qll",
"go/ql/lib/semmle/go/dataflow/internal/DataFlowImpl.qll",
"go/ql/lib/semmle/go/dataflow/internal/DataFlowImpl1.qll",
"go/ql/lib/semmle/go/dataflow/internal/DataFlowImpl2.qll",
"go/ql/lib/semmle/go/dataflow/internal/DataFlowImplForStringsNewReplacer.qll",
"python/ql/lib/semmle/python/dataflow/new/internal/DataFlowImpl.qll",
"python/ql/lib/semmle/python/dataflow/new/internal/DataFlowImpl1.qll",
"python/ql/lib/semmle/python/dataflow/new/internal/DataFlowImpl2.qll",
"python/ql/lib/semmle/python/dataflow/new/internal/DataFlowImpl3.qll",
"python/ql/lib/semmle/python/dataflow/new/internal/DataFlowImpl4.qll",
"python/ql/lib/semmle/python/dataflow/new/internal/DataFlowImplForRegExp.qll",
"ruby/ql/lib/codeql/ruby/dataflow/internal/DataFlowImpl.qll",
"ruby/ql/lib/codeql/ruby/dataflow/internal/DataFlowImpl1.qll",
"ruby/ql/lib/codeql/ruby/dataflow/internal/DataFlowImpl2.qll",
"ruby/ql/lib/codeql/ruby/dataflow/internal/DataFlowImplForHttpClientLibraries.qll",
"ruby/ql/lib/codeql/ruby/dataflow/internal/DataFlowImplForPathname.qll",
"swift/ql/lib/codeql/swift/dataflow/internal/DataFlowImpl.qll"
"swift/ql/lib/codeql/swift/dataflow/internal/DataFlowImpl1.qll"
],
"DataFlow Java/C++/C#/Go/Python/Ruby/Swift Common": [
"java/ql/lib/semmle/code/java/dataflow/internal/DataFlowImplCommon.qll",
@@ -47,7 +67,17 @@
"ruby/ql/lib/codeql/ruby/dataflow/internal/DataFlowImplCommon.qll",
"swift/ql/lib/codeql/swift/dataflow/internal/DataFlowImplCommon.qll"
],
"TaintTracking::Configuration Java/C++/C#/Go/Python/Ruby/Swift": [
"TaintTracking Java/C++/C#/Go/Python/Ruby/Swift": [
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/tainttracking1/TaintTracking.qll",
"cpp/ql/lib/semmle/code/cpp/ir/dataflow/internal/tainttracking1/TaintTracking.qll",
"csharp/ql/lib/semmle/code/csharp/dataflow/internal/tainttracking1/TaintTracking.qll",
"go/ql/lib/semmle/go/dataflow/internal/tainttracking1/TaintTracking.qll",
"java/ql/lib/semmle/code/java/dataflow/internal/tainttracking1/TaintTracking.qll",
"python/ql/lib/semmle/python/dataflow/new/internal/tainttracking1/TaintTracking.qll",
"ruby/ql/lib/codeql/ruby/dataflow/internal/tainttracking1/TaintTracking.qll",
"swift/ql/lib/codeql/swift/dataflow/internal/tainttracking1/TaintTracking.qll"
],
"TaintTracking Legacy Configuration Java/C++/C#/Go/Python/Ruby/Swift": [
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/tainttracking1/TaintTrackingImpl.qll",
"cpp/ql/lib/semmle/code/cpp/dataflow/internal/tainttracking2/TaintTrackingImpl.qll",
"cpp/ql/lib/semmle/code/cpp/ir/dataflow/internal/tainttracking1/TaintTrackingImpl.qll",

View File

@@ -0,0 +1,9 @@
---
category: majorAnalysis
---
* The main data flow and taint tracking APIs have been changed. The old APIs
remain in place for now and translate to the new through a
backwards-compatible wrapper. If multiple configurations are in scope
simultaneously, then this may affect results slightly. The new API is quite
similar to the old, but makes use of a configuration module instead of a
configuration class.

View File

@@ -68,7 +68,9 @@ class Declaration extends Locatable, @declaration {
* Holds if this declaration has the fully-qualified name `qualifiedName`.
* See `getQualifiedName`.
*/
predicate hasQualifiedName(string qualifiedName) { this.getQualifiedName() = qualifiedName }
deprecated predicate hasQualifiedName(string qualifiedName) {
this.getQualifiedName() = qualifiedName
}
/**
* Holds if this declaration has a fully-qualified name with a name-space

View File

@@ -24,5 +24,6 @@ import cpp
* global (inter-procedural) data flow analyses.
*/
module DataFlow {
import semmle.code.cpp.dataflow.internal.DataFlowImpl
import semmle.code.cpp.dataflow.internal.DataFlow
import semmle.code.cpp.dataflow.internal.DataFlowImpl1
}

View File

@@ -23,5 +23,6 @@ import semmle.code.cpp.dataflow.DataFlow2
* global (inter-procedural) taint-tracking analyses.
*/
module TaintTracking {
import semmle.code.cpp.dataflow.internal.tainttracking1.TaintTracking
import semmle.code.cpp.dataflow.internal.tainttracking1.TaintTrackingImpl
}

View File

@@ -0,0 +1,245 @@
/**
* Provides an implementation of global (interprocedural) data flow. This file
* re-exports the local (intraprocedural) data flow analysis from
* `DataFlowImplSpecific::Public` and adds a global analysis, mainly exposed
* through the `Make` and `MakeWithState` modules.
*/
private import DataFlowImplCommon
private import DataFlowImplSpecific::Private
import DataFlowImplSpecific::Public
import DataFlowImplCommonPublic
private import DataFlowImpl
/** An input configuration for data flow. */
signature module ConfigSig {
/**
* Holds if `source` is a relevant data flow source.
*/
predicate isSource(Node source);
/**
* Holds if `sink` is a relevant data flow sink.
*/
predicate isSink(Node sink);
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
default predicate isBarrier(Node node) { none() }
/** Holds if data flow into `node` is prohibited. */
default predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
default predicate isBarrierOut(Node node) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
default predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
default predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
default int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
default FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
default predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
default predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (as it is in a `path-problem` query).
*/
default predicate includeHiddenNodes() { none() }
}
/** An input configuration for data flow using flow state. */
signature module StateConfigSig {
bindingset[this]
class FlowState;
/**
* Holds if `source` is a relevant data flow source with the given initial
* `state`.
*/
predicate isSource(Node source, FlowState state);
/**
* Holds if `sink` is a relevant data flow sink accepting `state`.
*/
predicate isSink(Node sink, FlowState state);
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
default predicate isBarrier(Node node) { none() }
/**
* Holds if data flow through `node` is prohibited when the flow state is
* `state`.
*/
predicate isBarrier(Node node, FlowState state);
/** Holds if data flow into `node` is prohibited. */
default predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
default predicate isBarrierOut(Node node) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
default predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
* This step is only applicable in `state1` and updates the flow state to `state2`.
*/
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2);
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
default predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
default int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
default FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
default predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
default predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (as it is in a `path-problem` query).
*/
default predicate includeHiddenNodes() { none() }
}
/**
* Gets the exploration limit for `hasPartialFlow` and `hasPartialFlowRev`
* measured in approximate number of interprocedural steps.
*/
signature int explorationLimitSig();
/**
* The output of a data flow computation.
*/
signature module DataFlowSig {
/**
* A `Node` augmented with a call context (except for sinks) and an access path.
* Only those `PathNode`s that are reachable from a source, and which can reach a sink, are generated.
*/
class PathNode;
/**
* Holds if data can flow from `source` to `sink`.
*
* The corresponding paths are generated from the end-points and the graph
* included in the module `PathGraph`.
*/
predicate hasFlowPath(PathNode source, PathNode sink);
/**
* Holds if data can flow from `source` to `sink`.
*/
predicate hasFlow(Node source, Node sink);
/**
* Holds if data can flow from some source to `sink`.
*/
predicate hasFlowTo(Node sink);
/**
* Holds if data can flow from some source to `sink`.
*/
predicate hasFlowToExpr(DataFlowExpr sink);
}
/**
* Constructs a standard data flow computation.
*/
module Make<ConfigSig Config> implements DataFlowSig {
private module C implements FullStateConfigSig {
import DefaultState<Config>
import Config
}
import Impl<C>
}
/**
* Constructs a data flow computation using flow state.
*/
module MakeWithState<StateConfigSig Config> implements DataFlowSig {
private module C implements FullStateConfigSig {
import Config
}
import Impl<C>
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,396 @@
/**
* DEPRECATED: Use `Make` and `MakeWithState` instead.
*
* Provides a `Configuration` class backwards-compatible interface to the data
* flow library.
*/
private import DataFlowImplCommon
private import DataFlowImplSpecific::Private
import DataFlowImplSpecific::Public
private import DataFlowImpl
import DataFlowImplCommonPublic
import FlowStateString
/**
* A configuration of interprocedural data flow analysis. This defines
* sources, sinks, and any other configurable aspect of the analysis. Each
* use of the global data flow library must define its own unique extension
* of this abstract class. To create a configuration, extend this class with
* a subclass whose characteristic predicate is a unique singleton string.
* For example, write
*
* ```ql
* class MyAnalysisConfiguration extends DataFlow::Configuration {
* MyAnalysisConfiguration() { this = "MyAnalysisConfiguration" }
* // Override `isSource` and `isSink`.
* // Optionally override `isBarrier`.
* // Optionally override `isAdditionalFlowStep`.
* }
* ```
* Conceptually, this defines a graph where the nodes are `DataFlow::Node`s and
* the edges are those data-flow steps that preserve the value of the node
* along with any additional edges defined by `isAdditionalFlowStep`.
* Specifying nodes in `isBarrier` will remove those nodes from the graph, and
* specifying nodes in `isBarrierIn` and/or `isBarrierOut` will remove in-going
* and/or out-going edges from those nodes, respectively.
*
* Then, to query whether there is flow between some `source` and `sink`,
* write
*
* ```ql
* exists(MyAnalysisConfiguration cfg | cfg.hasFlow(source, sink))
* ```
*
* Multiple configurations can coexist, but two classes extending
* `DataFlow::Configuration` should never depend on each other. One of them
* should instead depend on a `DataFlow2::Configuration`, a
* `DataFlow3::Configuration`, or a `DataFlow4::Configuration`.
*/
abstract class Configuration extends string {
bindingset[this]
Configuration() { any() }
/**
* Holds if `source` is a relevant data flow source.
*/
predicate isSource(Node source) { none() }
/**
* Holds if `source` is a relevant data flow source with the given initial
* `state`.
*/
predicate isSource(Node source, FlowState state) { none() }
/**
* Holds if `sink` is a relevant data flow sink.
*/
predicate isSink(Node sink) { none() }
/**
* Holds if `sink` is a relevant data flow sink accepting `state`.
*/
predicate isSink(Node sink, FlowState state) { none() }
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
predicate isBarrier(Node node) { none() }
/**
* Holds if data flow through `node` is prohibited when the flow state is
* `state`.
*/
predicate isBarrier(Node node, FlowState state) { none() }
/** Holds if data flow into `node` is prohibited. */
predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
predicate isBarrierOut(Node node) { none() }
/**
* DEPRECATED: Use `isBarrier` and `BarrierGuard` module instead.
*
* Holds if data flow through nodes guarded by `guard` is prohibited.
*/
deprecated predicate isBarrierGuard(BarrierGuard guard) { none() }
/**
* DEPRECATED: Use `isBarrier` and `BarrierGuard` module instead.
*
* Holds if data flow through nodes guarded by `guard` is prohibited when
* the flow state is `state`
*/
deprecated predicate isBarrierGuard(BarrierGuard guard, FlowState state) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
* This step is only applicable in `state1` and updates the flow state to `state2`.
*/
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2) {
none()
}
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if data may flow from `source` to `sink` for this configuration.
*/
predicate hasFlow(Node source, Node sink) { hasFlow(source, sink, this) }
/**
* Holds if data may flow from `source` to `sink` for this configuration.
*
* The corresponding paths are generated from the end-points and the graph
* included in the module `PathGraph`.
*/
predicate hasFlowPath(PathNode source, PathNode sink) { hasFlowPath(source, sink, this) }
/**
* Holds if data may flow from some source to `sink` for this configuration.
*/
predicate hasFlowTo(Node sink) { hasFlowTo(sink, this) }
/**
* Holds if data may flow from some source to `sink` for this configuration.
*/
predicate hasFlowToExpr(DataFlowExpr sink) { this.hasFlowTo(exprNode(sink)) }
/**
* DEPRECATED: Use `FlowExploration<explorationLimit>` instead.
*
* Gets the exploration limit for `hasPartialFlow` and `hasPartialFlowRev`
* measured in approximate number of interprocedural steps.
*/
deprecated int explorationLimit() { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (for example in a `path-problem` query).
*/
predicate includeHiddenNodes() { none() }
}
/**
* This class exists to prevent mutual recursion between the user-overridden
* member predicates of `Configuration` and the rest of the data-flow library.
* Good performance cannot be guaranteed in the presence of such recursion, so
* it should be replaced by using more than one copy of the data flow library.
*/
abstract private class ConfigurationRecursionPrevention extends Configuration {
bindingset[this]
ConfigurationRecursionPrevention() { any() }
override predicate hasFlow(Node source, Node sink) {
strictcount(Node n | this.isSource(n)) < 0
or
strictcount(Node n | this.isSource(n, _)) < 0
or
strictcount(Node n | this.isSink(n)) < 0
or
strictcount(Node n | this.isSink(n, _)) < 0
or
strictcount(Node n1, Node n2 | this.isAdditionalFlowStep(n1, n2)) < 0
or
strictcount(Node n1, Node n2 | this.isAdditionalFlowStep(n1, _, n2, _)) < 0
or
super.hasFlow(source, sink)
}
}
/** A bridge class to access the deprecated `isBarrierGuard`. */
private class BarrierGuardGuardedNodeBridge extends Unit {
abstract predicate guardedNode(Node n, Configuration config);
abstract predicate guardedNode(Node n, FlowState state, Configuration config);
}
private class BarrierGuardGuardedNode extends BarrierGuardGuardedNodeBridge {
deprecated override predicate guardedNode(Node n, Configuration config) {
exists(BarrierGuard g |
config.isBarrierGuard(g) and
n = g.getAGuardedNode()
)
}
deprecated override predicate guardedNode(Node n, FlowState state, Configuration config) {
exists(BarrierGuard g |
config.isBarrierGuard(g, state) and
n = g.getAGuardedNode()
)
}
}
private FlowState relevantState(Configuration config) {
config.isSource(_, result) or
config.isSink(_, result) or
config.isBarrier(_, result) or
config.isAdditionalFlowStep(_, result, _, _) or
config.isAdditionalFlowStep(_, _, _, result)
}
private newtype TConfigState =
TMkConfigState(Configuration config, FlowState state) {
state = relevantState(config) or state instanceof FlowStateEmpty
}
private Configuration getConfig(TConfigState state) { state = TMkConfigState(result, _) }
private FlowState getState(TConfigState state) { state = TMkConfigState(_, result) }
private predicate singleConfiguration() { 1 = strictcount(Configuration c) }
private module Config implements FullStateConfigSig {
class FlowState = TConfigState;
predicate isSource(Node source, FlowState state) {
getConfig(state).isSource(source, getState(state))
or
getConfig(state).isSource(source) and getState(state) instanceof FlowStateEmpty
}
predicate isSink(Node sink, FlowState state) {
getConfig(state).isSink(sink, getState(state))
or
getConfig(state).isSink(sink) and getState(state) instanceof FlowStateEmpty
}
predicate isBarrier(Node node) { none() }
predicate isBarrier(Node node, FlowState state) {
getConfig(state).isBarrier(node, getState(state)) or
getConfig(state).isBarrier(node) or
any(BarrierGuardGuardedNodeBridge b).guardedNode(node, getState(state), getConfig(state)) or
any(BarrierGuardGuardedNodeBridge b).guardedNode(node, getConfig(state))
}
predicate isBarrierIn(Node node) { any(Configuration config).isBarrierIn(node) }
predicate isBarrierOut(Node node) { any(Configuration config).isBarrierOut(node) }
predicate isAdditionalFlowStep(Node node1, Node node2) {
singleConfiguration() and
any(Configuration config).isAdditionalFlowStep(node1, node2)
}
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2) {
getConfig(state1).isAdditionalFlowStep(node1, getState(state1), node2, getState(state2)) and
getConfig(state2) = getConfig(state1)
or
not singleConfiguration() and
getConfig(state1).isAdditionalFlowStep(node1, node2) and
state2 = state1
}
predicate allowImplicitRead(Node node, ContentSet c) {
any(Configuration config).allowImplicitRead(node, c)
}
int fieldFlowBranchLimit() { result = min(any(Configuration config).fieldFlowBranchLimit()) }
FlowFeature getAFeature() { result = any(Configuration config).getAFeature() }
predicate sourceGrouping(Node source, string sourceGroup) {
any(Configuration config).sourceGrouping(source, sourceGroup)
}
predicate sinkGrouping(Node sink, string sinkGroup) {
any(Configuration config).sinkGrouping(sink, sinkGroup)
}
predicate includeHiddenNodes() { any(Configuration config).includeHiddenNodes() }
}
private import Impl<Config> as I
import I
/**
* A `Node` augmented with a call context (except for sinks), an access path, and a configuration.
* Only those `PathNode`s that are reachable from a source, and which can reach a sink, are generated.
*/
class PathNode instanceof I::PathNode {
/** Gets a textual representation of this element. */
final string toString() { result = super.toString() }
/**
* Gets a textual representation of this element, including a textual
* representation of the call context.
*/
final string toStringWithContext() { result = super.toStringWithContext() }
/**
* Holds if this element is at the specified location.
* The location spans column `startcolumn` of line `startline` to
* column `endcolumn` of line `endline` in file `filepath`.
* For more information, see
* [Locations](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
*/
final predicate hasLocationInfo(
string filepath, int startline, int startcolumn, int endline, int endcolumn
) {
super.hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
}
/** Gets the underlying `Node`. */
final Node getNode() { result = super.getNode() }
/** Gets the `FlowState` of this node. */
final FlowState getState() { result = getState(super.getState()) }
/** Gets the associated configuration. */
final Configuration getConfiguration() { result = getConfig(super.getState()) }
/** Gets a successor of this node, if any. */
final PathNode getASuccessor() { result = super.getASuccessor() }
/** Holds if this node is a source. */
final predicate isSource() { super.isSource() }
/** Holds if this node is a grouping of source nodes. */
final predicate isSourceGroup(string group) { super.isSourceGroup(group) }
/** Holds if this node is a grouping of sink nodes. */
final predicate isSinkGroup(string group) { super.isSinkGroup(group) }
}
private predicate hasFlow(Node source, Node sink, Configuration config) {
exists(PathNode source0, PathNode sink0 |
hasFlowPath(source0, sink0, config) and
source0.getNode() = source and
sink0.getNode() = sink
)
}
private predicate hasFlowPath(PathNode source, PathNode sink, Configuration config) {
hasFlowPath(source, sink) and source.getConfiguration() = config
}
private predicate hasFlowTo(Node sink, Configuration config) { hasFlow(_, sink, config) }
predicate flowsTo = hasFlow/3;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -3,15 +3,18 @@ private import DataFlowImplSpecific::Public
import Cached
module DataFlowImplCommonPublic {
/** A state value to track during data flow. */
class FlowState = string;
/** Provides `FlowState = string`. */
module FlowStateString {
/** A state value to track during data flow. */
class FlowState = string;
/**
* The default state, which is used when the state is unspecified for a source
* or a sink.
*/
class FlowStateEmpty extends FlowState {
FlowStateEmpty() { this = "" }
/**
* The default state, which is used when the state is unspecified for a source
* or a sink.
*/
class FlowStateEmpty extends FlowState {
FlowStateEmpty() { this = "" }
}
}
private newtype TFlowFeature =

View File

@@ -0,0 +1,63 @@
/**
* Provides classes for performing local (intra-procedural) and
* global (inter-procedural) taint-tracking analyses.
*/
import TaintTrackingParameter::Public
private import TaintTrackingParameter::Private
private module AddTaintDefaults<DataFlowInternal::FullStateConfigSig Config> implements
DataFlowInternal::FullStateConfigSig {
import Config
predicate isBarrier(DataFlow::Node node) {
Config::isBarrier(node) or defaultTaintSanitizer(node)
}
predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) {
Config::isAdditionalFlowStep(node1, node2) or
defaultAdditionalTaintStep(node1, node2)
}
predicate allowImplicitRead(DataFlow::Node node, DataFlow::ContentSet c) {
Config::allowImplicitRead(node, c)
or
(
Config::isSink(node, _) or
Config::isAdditionalFlowStep(node, _) or
Config::isAdditionalFlowStep(node, _, _, _)
) and
defaultImplicitTaintRead(node, c)
}
}
/**
* Constructs a standard taint tracking computation.
*/
module Make<DataFlow::ConfigSig Config> implements DataFlow::DataFlowSig {
private module Config0 implements DataFlowInternal::FullStateConfigSig {
import DataFlowInternal::DefaultState<Config>
import Config
}
private module C implements DataFlowInternal::FullStateConfigSig {
import AddTaintDefaults<Config0>
}
import DataFlowInternal::Impl<C>
}
/**
* Constructs a taint tracking computation using flow state.
*/
module MakeWithState<DataFlow::StateConfigSig Config> implements DataFlow::DataFlowSig {
private module Config0 implements DataFlowInternal::FullStateConfigSig {
import Config
}
private module C implements DataFlowInternal::FullStateConfigSig {
import AddTaintDefaults<Config0>
}
import DataFlowInternal::Impl<C>
}

View File

@@ -2,4 +2,5 @@ import semmle.code.cpp.dataflow.internal.TaintTrackingUtil as Public
module Private {
import semmle.code.cpp.dataflow.DataFlow::DataFlow as DataFlow
import semmle.code.cpp.dataflow.internal.DataFlowImpl as DataFlowInternal
}

View File

@@ -26,5 +26,6 @@ import cpp
* global (inter-procedural) data flow analyses.
*/
module DataFlow {
import semmle.code.cpp.ir.dataflow.internal.DataFlowImpl
import semmle.code.cpp.ir.dataflow.internal.DataFlow
import semmle.code.cpp.ir.dataflow.internal.DataFlowImpl1
}

View File

@@ -23,5 +23,6 @@ import semmle.code.cpp.dataflow.new.DataFlow2
* global (inter-procedural) taint-tracking analyses.
*/
module TaintTracking {
import semmle.code.cpp.ir.dataflow.internal.tainttracking1.TaintTracking
import semmle.code.cpp.ir.dataflow.internal.tainttracking1.TaintTrackingImpl
}

View File

@@ -22,5 +22,6 @@
import cpp
module DataFlow {
import semmle.code.cpp.ir.dataflow.internal.DataFlowImpl
import semmle.code.cpp.ir.dataflow.internal.DataFlow
import semmle.code.cpp.ir.dataflow.internal.DataFlowImpl1
}

View File

@@ -19,5 +19,6 @@ import semmle.code.cpp.ir.dataflow.DataFlow
import semmle.code.cpp.ir.dataflow.DataFlow2
module TaintTracking {
import semmle.code.cpp.ir.dataflow.internal.tainttracking1.TaintTracking
import semmle.code.cpp.ir.dataflow.internal.tainttracking1.TaintTrackingImpl
}

View File

@@ -0,0 +1,245 @@
/**
* Provides an implementation of global (interprocedural) data flow. This file
* re-exports the local (intraprocedural) data flow analysis from
* `DataFlowImplSpecific::Public` and adds a global analysis, mainly exposed
* through the `Make` and `MakeWithState` modules.
*/
private import DataFlowImplCommon
private import DataFlowImplSpecific::Private
import DataFlowImplSpecific::Public
import DataFlowImplCommonPublic
private import DataFlowImpl
/** An input configuration for data flow. */
signature module ConfigSig {
/**
* Holds if `source` is a relevant data flow source.
*/
predicate isSource(Node source);
/**
* Holds if `sink` is a relevant data flow sink.
*/
predicate isSink(Node sink);
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
default predicate isBarrier(Node node) { none() }
/** Holds if data flow into `node` is prohibited. */
default predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
default predicate isBarrierOut(Node node) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
default predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
default predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
default int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
default FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
default predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
default predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (as it is in a `path-problem` query).
*/
default predicate includeHiddenNodes() { none() }
}
/** An input configuration for data flow using flow state. */
signature module StateConfigSig {
bindingset[this]
class FlowState;
/**
* Holds if `source` is a relevant data flow source with the given initial
* `state`.
*/
predicate isSource(Node source, FlowState state);
/**
* Holds if `sink` is a relevant data flow sink accepting `state`.
*/
predicate isSink(Node sink, FlowState state);
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
default predicate isBarrier(Node node) { none() }
/**
* Holds if data flow through `node` is prohibited when the flow state is
* `state`.
*/
predicate isBarrier(Node node, FlowState state);
/** Holds if data flow into `node` is prohibited. */
default predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
default predicate isBarrierOut(Node node) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
default predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
* This step is only applicable in `state1` and updates the flow state to `state2`.
*/
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2);
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
default predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
default int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
default FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
default predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
default predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (as it is in a `path-problem` query).
*/
default predicate includeHiddenNodes() { none() }
}
/**
* Gets the exploration limit for `hasPartialFlow` and `hasPartialFlowRev`
* measured in approximate number of interprocedural steps.
*/
signature int explorationLimitSig();
/**
* The output of a data flow computation.
*/
signature module DataFlowSig {
/**
* A `Node` augmented with a call context (except for sinks) and an access path.
* Only those `PathNode`s that are reachable from a source, and which can reach a sink, are generated.
*/
class PathNode;
/**
* Holds if data can flow from `source` to `sink`.
*
* The corresponding paths are generated from the end-points and the graph
* included in the module `PathGraph`.
*/
predicate hasFlowPath(PathNode source, PathNode sink);
/**
* Holds if data can flow from `source` to `sink`.
*/
predicate hasFlow(Node source, Node sink);
/**
* Holds if data can flow from some source to `sink`.
*/
predicate hasFlowTo(Node sink);
/**
* Holds if data can flow from some source to `sink`.
*/
predicate hasFlowToExpr(DataFlowExpr sink);
}
/**
* Constructs a standard data flow computation.
*/
module Make<ConfigSig Config> implements DataFlowSig {
private module C implements FullStateConfigSig {
import DefaultState<Config>
import Config
}
import Impl<C>
}
/**
* Constructs a data flow computation using flow state.
*/
module MakeWithState<StateConfigSig Config> implements DataFlowSig {
private module C implements FullStateConfigSig {
import Config
}
import Impl<C>
}

View File

@@ -0,0 +1,396 @@
/**
* DEPRECATED: Use `Make` and `MakeWithState` instead.
*
* Provides a `Configuration` class backwards-compatible interface to the data
* flow library.
*/
private import DataFlowImplCommon
private import DataFlowImplSpecific::Private
import DataFlowImplSpecific::Public
private import DataFlowImpl
import DataFlowImplCommonPublic
import FlowStateString
/**
* A configuration of interprocedural data flow analysis. This defines
* sources, sinks, and any other configurable aspect of the analysis. Each
* use of the global data flow library must define its own unique extension
* of this abstract class. To create a configuration, extend this class with
* a subclass whose characteristic predicate is a unique singleton string.
* For example, write
*
* ```ql
* class MyAnalysisConfiguration extends DataFlow::Configuration {
* MyAnalysisConfiguration() { this = "MyAnalysisConfiguration" }
* // Override `isSource` and `isSink`.
* // Optionally override `isBarrier`.
* // Optionally override `isAdditionalFlowStep`.
* }
* ```
* Conceptually, this defines a graph where the nodes are `DataFlow::Node`s and
* the edges are those data-flow steps that preserve the value of the node
* along with any additional edges defined by `isAdditionalFlowStep`.
* Specifying nodes in `isBarrier` will remove those nodes from the graph, and
* specifying nodes in `isBarrierIn` and/or `isBarrierOut` will remove in-going
* and/or out-going edges from those nodes, respectively.
*
* Then, to query whether there is flow between some `source` and `sink`,
* write
*
* ```ql
* exists(MyAnalysisConfiguration cfg | cfg.hasFlow(source, sink))
* ```
*
* Multiple configurations can coexist, but two classes extending
* `DataFlow::Configuration` should never depend on each other. One of them
* should instead depend on a `DataFlow2::Configuration`, a
* `DataFlow3::Configuration`, or a `DataFlow4::Configuration`.
*/
abstract class Configuration extends string {
bindingset[this]
Configuration() { any() }
/**
* Holds if `source` is a relevant data flow source.
*/
predicate isSource(Node source) { none() }
/**
* Holds if `source` is a relevant data flow source with the given initial
* `state`.
*/
predicate isSource(Node source, FlowState state) { none() }
/**
* Holds if `sink` is a relevant data flow sink.
*/
predicate isSink(Node sink) { none() }
/**
* Holds if `sink` is a relevant data flow sink accepting `state`.
*/
predicate isSink(Node sink, FlowState state) { none() }
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
predicate isBarrier(Node node) { none() }
/**
* Holds if data flow through `node` is prohibited when the flow state is
* `state`.
*/
predicate isBarrier(Node node, FlowState state) { none() }
/** Holds if data flow into `node` is prohibited. */
predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
predicate isBarrierOut(Node node) { none() }
/**
* DEPRECATED: Use `isBarrier` and `BarrierGuard` module instead.
*
* Holds if data flow through nodes guarded by `guard` is prohibited.
*/
deprecated predicate isBarrierGuard(BarrierGuard guard) { none() }
/**
* DEPRECATED: Use `isBarrier` and `BarrierGuard` module instead.
*
* Holds if data flow through nodes guarded by `guard` is prohibited when
* the flow state is `state`
*/
deprecated predicate isBarrierGuard(BarrierGuard guard, FlowState state) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
* This step is only applicable in `state1` and updates the flow state to `state2`.
*/
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2) {
none()
}
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if data may flow from `source` to `sink` for this configuration.
*/
predicate hasFlow(Node source, Node sink) { hasFlow(source, sink, this) }
/**
* Holds if data may flow from `source` to `sink` for this configuration.
*
* The corresponding paths are generated from the end-points and the graph
* included in the module `PathGraph`.
*/
predicate hasFlowPath(PathNode source, PathNode sink) { hasFlowPath(source, sink, this) }
/**
* Holds if data may flow from some source to `sink` for this configuration.
*/
predicate hasFlowTo(Node sink) { hasFlowTo(sink, this) }
/**
* Holds if data may flow from some source to `sink` for this configuration.
*/
predicate hasFlowToExpr(DataFlowExpr sink) { this.hasFlowTo(exprNode(sink)) }
/**
* DEPRECATED: Use `FlowExploration<explorationLimit>` instead.
*
* Gets the exploration limit for `hasPartialFlow` and `hasPartialFlowRev`
* measured in approximate number of interprocedural steps.
*/
deprecated int explorationLimit() { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (for example in a `path-problem` query).
*/
predicate includeHiddenNodes() { none() }
}
/**
* This class exists to prevent mutual recursion between the user-overridden
* member predicates of `Configuration` and the rest of the data-flow library.
* Good performance cannot be guaranteed in the presence of such recursion, so
* it should be replaced by using more than one copy of the data flow library.
*/
abstract private class ConfigurationRecursionPrevention extends Configuration {
bindingset[this]
ConfigurationRecursionPrevention() { any() }
override predicate hasFlow(Node source, Node sink) {
strictcount(Node n | this.isSource(n)) < 0
or
strictcount(Node n | this.isSource(n, _)) < 0
or
strictcount(Node n | this.isSink(n)) < 0
or
strictcount(Node n | this.isSink(n, _)) < 0
or
strictcount(Node n1, Node n2 | this.isAdditionalFlowStep(n1, n2)) < 0
or
strictcount(Node n1, Node n2 | this.isAdditionalFlowStep(n1, _, n2, _)) < 0
or
super.hasFlow(source, sink)
}
}
/** A bridge class to access the deprecated `isBarrierGuard`. */
private class BarrierGuardGuardedNodeBridge extends Unit {
abstract predicate guardedNode(Node n, Configuration config);
abstract predicate guardedNode(Node n, FlowState state, Configuration config);
}
private class BarrierGuardGuardedNode extends BarrierGuardGuardedNodeBridge {
deprecated override predicate guardedNode(Node n, Configuration config) {
exists(BarrierGuard g |
config.isBarrierGuard(g) and
n = g.getAGuardedNode()
)
}
deprecated override predicate guardedNode(Node n, FlowState state, Configuration config) {
exists(BarrierGuard g |
config.isBarrierGuard(g, state) and
n = g.getAGuardedNode()
)
}
}
private FlowState relevantState(Configuration config) {
config.isSource(_, result) or
config.isSink(_, result) or
config.isBarrier(_, result) or
config.isAdditionalFlowStep(_, result, _, _) or
config.isAdditionalFlowStep(_, _, _, result)
}
private newtype TConfigState =
TMkConfigState(Configuration config, FlowState state) {
state = relevantState(config) or state instanceof FlowStateEmpty
}
private Configuration getConfig(TConfigState state) { state = TMkConfigState(result, _) }
private FlowState getState(TConfigState state) { state = TMkConfigState(_, result) }
private predicate singleConfiguration() { 1 = strictcount(Configuration c) }
private module Config implements FullStateConfigSig {
class FlowState = TConfigState;
predicate isSource(Node source, FlowState state) {
getConfig(state).isSource(source, getState(state))
or
getConfig(state).isSource(source) and getState(state) instanceof FlowStateEmpty
}
predicate isSink(Node sink, FlowState state) {
getConfig(state).isSink(sink, getState(state))
or
getConfig(state).isSink(sink) and getState(state) instanceof FlowStateEmpty
}
predicate isBarrier(Node node) { none() }
predicate isBarrier(Node node, FlowState state) {
getConfig(state).isBarrier(node, getState(state)) or
getConfig(state).isBarrier(node) or
any(BarrierGuardGuardedNodeBridge b).guardedNode(node, getState(state), getConfig(state)) or
any(BarrierGuardGuardedNodeBridge b).guardedNode(node, getConfig(state))
}
predicate isBarrierIn(Node node) { any(Configuration config).isBarrierIn(node) }
predicate isBarrierOut(Node node) { any(Configuration config).isBarrierOut(node) }
predicate isAdditionalFlowStep(Node node1, Node node2) {
singleConfiguration() and
any(Configuration config).isAdditionalFlowStep(node1, node2)
}
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2) {
getConfig(state1).isAdditionalFlowStep(node1, getState(state1), node2, getState(state2)) and
getConfig(state2) = getConfig(state1)
or
not singleConfiguration() and
getConfig(state1).isAdditionalFlowStep(node1, node2) and
state2 = state1
}
predicate allowImplicitRead(Node node, ContentSet c) {
any(Configuration config).allowImplicitRead(node, c)
}
int fieldFlowBranchLimit() { result = min(any(Configuration config).fieldFlowBranchLimit()) }
FlowFeature getAFeature() { result = any(Configuration config).getAFeature() }
predicate sourceGrouping(Node source, string sourceGroup) {
any(Configuration config).sourceGrouping(source, sourceGroup)
}
predicate sinkGrouping(Node sink, string sinkGroup) {
any(Configuration config).sinkGrouping(sink, sinkGroup)
}
predicate includeHiddenNodes() { any(Configuration config).includeHiddenNodes() }
}
private import Impl<Config> as I
import I
/**
* A `Node` augmented with a call context (except for sinks), an access path, and a configuration.
* Only those `PathNode`s that are reachable from a source, and which can reach a sink, are generated.
*/
class PathNode instanceof I::PathNode {
/** Gets a textual representation of this element. */
final string toString() { result = super.toString() }
/**
* Gets a textual representation of this element, including a textual
* representation of the call context.
*/
final string toStringWithContext() { result = super.toStringWithContext() }
/**
* Holds if this element is at the specified location.
* The location spans column `startcolumn` of line `startline` to
* column `endcolumn` of line `endline` in file `filepath`.
* For more information, see
* [Locations](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
*/
final predicate hasLocationInfo(
string filepath, int startline, int startcolumn, int endline, int endcolumn
) {
super.hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
}
/** Gets the underlying `Node`. */
final Node getNode() { result = super.getNode() }
/** Gets the `FlowState` of this node. */
final FlowState getState() { result = getState(super.getState()) }
/** Gets the associated configuration. */
final Configuration getConfiguration() { result = getConfig(super.getState()) }
/** Gets a successor of this node, if any. */
final PathNode getASuccessor() { result = super.getASuccessor() }
/** Holds if this node is a source. */
final predicate isSource() { super.isSource() }
/** Holds if this node is a grouping of source nodes. */
final predicate isSourceGroup(string group) { super.isSourceGroup(group) }
/** Holds if this node is a grouping of sink nodes. */
final predicate isSinkGroup(string group) { super.isSinkGroup(group) }
}
private predicate hasFlow(Node source, Node sink, Configuration config) {
exists(PathNode source0, PathNode sink0 |
hasFlowPath(source0, sink0, config) and
source0.getNode() = source and
sink0.getNode() = sink
)
}
private predicate hasFlowPath(PathNode source, PathNode sink, Configuration config) {
hasFlowPath(source, sink) and source.getConfiguration() = config
}
private predicate hasFlowTo(Node sink, Configuration config) { hasFlow(_, sink, config) }
predicate flowsTo = hasFlow/3;

View File

@@ -3,15 +3,18 @@ private import DataFlowImplSpecific::Public
import Cached
module DataFlowImplCommonPublic {
/** A state value to track during data flow. */
class FlowState = string;
/** Provides `FlowState = string`. */
module FlowStateString {
/** A state value to track during data flow. */
class FlowState = string;
/**
* The default state, which is used when the state is unspecified for a source
* or a sink.
*/
class FlowStateEmpty extends FlowState {
FlowStateEmpty() { this = "" }
/**
* The default state, which is used when the state is unspecified for a source
* or a sink.
*/
class FlowStateEmpty extends FlowState {
FlowStateEmpty() { this = "" }
}
}
private newtype TFlowFeature =

View File

@@ -97,23 +97,23 @@ private string getNodeProperty(DataFlow::Node node, string key) {
|
kind, ", "
)
or
// Is there partial flow from a source to this node?
// This property will only be emitted if partial flow is enabled by overriding
// `DataFlow::Configuration::explorationLimit()`.
key = "pflow" and
result =
strictconcat(DataFlow::PartialPathNode sourceNode, DataFlow::PartialPathNode destNode, int dist,
int order1, int order2 |
any(DataFlow::Configuration cfg).hasPartialFlow(sourceNode, destNode, dist) and
destNode.getNode() = node and
// Only print flow from a source in the same function.
sourceNode.getNode().getEnclosingCallable() = node.getEnclosingCallable()
|
nodeId(sourceNode.getNode(), order1, order2) + "+" + dist.toString(), ", "
order by
order1, order2, dist desc
)
// or
// // Is there partial flow from a source to this node?
// // This property will only be emitted if partial flow is enabled by overriding
// // `DataFlow::Configuration::explorationLimit()`.
// key = "pflow" and
// result =
// strictconcat(DataFlow::PartialPathNode sourceNode, DataFlow::PartialPathNode destNode, int dist,
// int order1, int order2 |
// any(DataFlow::Configuration cfg).hasPartialFlow(sourceNode, destNode, dist) and
// destNode.getNode() = node and
// // Only print flow from a source in the same function.
// sourceNode.getNode().getEnclosingCallable() = node.getEnclosingCallable()
// |
// nodeId(sourceNode.getNode(), order1, order2) + "+" + dist.toString(), ", "
// order by
// order1, order2, dist desc
// )
}
/**

View File

@@ -0,0 +1,63 @@
/**
* Provides classes for performing local (intra-procedural) and
* global (inter-procedural) taint-tracking analyses.
*/
import TaintTrackingParameter::Public
private import TaintTrackingParameter::Private
private module AddTaintDefaults<DataFlowInternal::FullStateConfigSig Config> implements
DataFlowInternal::FullStateConfigSig {
import Config
predicate isBarrier(DataFlow::Node node) {
Config::isBarrier(node) or defaultTaintSanitizer(node)
}
predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) {
Config::isAdditionalFlowStep(node1, node2) or
defaultAdditionalTaintStep(node1, node2)
}
predicate allowImplicitRead(DataFlow::Node node, DataFlow::ContentSet c) {
Config::allowImplicitRead(node, c)
or
(
Config::isSink(node, _) or
Config::isAdditionalFlowStep(node, _) or
Config::isAdditionalFlowStep(node, _, _, _)
) and
defaultImplicitTaintRead(node, c)
}
}
/**
* Constructs a standard taint tracking computation.
*/
module Make<DataFlow::ConfigSig Config> implements DataFlow::DataFlowSig {
private module Config0 implements DataFlowInternal::FullStateConfigSig {
import DataFlowInternal::DefaultState<Config>
import Config
}
private module C implements DataFlowInternal::FullStateConfigSig {
import AddTaintDefaults<Config0>
}
import DataFlowInternal::Impl<C>
}
/**
* Constructs a taint tracking computation using flow state.
*/
module MakeWithState<DataFlow::StateConfigSig Config> implements DataFlow::DataFlowSig {
private module Config0 implements DataFlowInternal::FullStateConfigSig {
import Config
}
private module C implements DataFlowInternal::FullStateConfigSig {
import AddTaintDefaults<Config0>
}
import DataFlowInternal::Impl<C>
}

View File

@@ -2,4 +2,5 @@ import semmle.code.cpp.ir.dataflow.internal.TaintTrackingUtil as Public
module Private {
import semmle.code.cpp.ir.dataflow.DataFlow::DataFlow as DataFlow
import semmle.code.cpp.ir.dataflow.internal.DataFlowImpl as DataFlowInternal
}

View File

@@ -19,7 +19,7 @@ import semmle.code.cpp.security.FunctionWithWrappers
import semmle.code.cpp.security.FlowSources
import semmle.code.cpp.ir.IR
import semmle.code.cpp.ir.dataflow.TaintTracking
import DataFlow::PathGraph
import TaintedPath::PathGraph
/**
* A function for opening a file.
@@ -70,18 +70,16 @@ predicate hasUpperBoundsCheck(Variable var) {
)
}
class TaintedPathConfiguration extends TaintTracking::Configuration {
TaintedPathConfiguration() { this = "TaintedPathConfiguration" }
module TaintedPathConfiguration implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node node) { node instanceof FlowSource }
override predicate isSource(DataFlow::Node node) { node instanceof FlowSource }
override predicate isSink(DataFlow::Node node) {
predicate isSink(DataFlow::Node node) {
exists(FileFunction fileFunction |
fileFunction.outermostWrapperFunctionCall(node.asIndirectArgument(), _)
)
}
override predicate isSanitizer(DataFlow::Node node) {
predicate isBarrier(DataFlow::Node node) {
node.asExpr().(Call).getTarget().getUnspecifiedType() instanceof ArithmeticType
or
exists(LoadInstruction load, Variable checkedVar |
@@ -92,13 +90,15 @@ class TaintedPathConfiguration extends TaintTracking::Configuration {
}
}
module TaintedPath = TaintTracking::Make<TaintedPathConfiguration>;
from
FileFunction fileFunction, Expr taintedArg, FlowSource taintSource, TaintedPathConfiguration cfg,
DataFlow::PathNode sourceNode, DataFlow::PathNode sinkNode, string callChain
FileFunction fileFunction, Expr taintedArg, FlowSource taintSource,
TaintedPath::PathNode sourceNode, TaintedPath::PathNode sinkNode, string callChain
where
taintedArg = sinkNode.getNode().asIndirectArgument() and
fileFunction.outermostWrapperFunctionCall(taintedArg, callChain) and
cfg.hasFlowPath(sourceNode, sinkNode) and
TaintedPath::hasFlowPath(sourceNode, sinkNode) and
taintSource = sourceNode.getNode()
select taintedArg, sourceNode, sinkNode,
"This argument to a file access function is derived from $@ and then passed to " + callChain + ".",

View File

@@ -22,7 +22,7 @@ import semmle.code.cpp.ir.dataflow.TaintTracking
import semmle.code.cpp.ir.dataflow.TaintTracking2
import semmle.code.cpp.security.FlowSources
import semmle.code.cpp.models.implementations.Strcat
import DataFlow::PathGraph
import ExecTaint::PathGraph
/**
* Holds if `incoming` is a string that is used in a format or concatenation function resulting
@@ -55,29 +55,30 @@ predicate interestingConcatenation(DataFlow::Node incoming, DataFlow::Node outgo
)
}
class ConcatState extends DataFlow::FlowState {
ConcatState() { this = "ConcatState" }
newtype TState =
TConcatState() or
TExecState(DataFlow::Node incoming, DataFlow::Node outgoing) {
interestingConcatenation(pragma[only_bind_into](incoming), pragma[only_bind_into](outgoing))
}
class ConcatState extends TConcatState {
string toString() { result = "ConcatState" }
}
class ExecState extends DataFlow::FlowState {
class ExecState extends TExecState {
DataFlow::Node incoming;
DataFlow::Node outgoing;
ExecState() {
this =
"ExecState (" + incoming.getLocation() + " | " + incoming + ", " + outgoing.getLocation() +
" | " + outgoing + ")" and
interestingConcatenation(pragma[only_bind_into](incoming), pragma[only_bind_into](outgoing))
}
ExecState() { this = TExecState(incoming, outgoing) }
DataFlow::Node getIncomingNode() { result = incoming }
DataFlow::Node getOutgoingNode() { result = outgoing }
/** Holds if this is a possible `ExecState` for `sink`. */
predicate isFeasibleForSink(DataFlow::Node sink) {
any(ExecStateConfiguration conf).hasFlow(outgoing, sink)
}
predicate isFeasibleForSink(DataFlow::Node sink) { ExecState::hasFlow(outgoing, sink) }
string toString() { result = "ExecState" }
}
predicate isSinkImpl(DataFlow::Node sink, Expr command, string callChain) {
@@ -85,7 +86,7 @@ predicate isSinkImpl(DataFlow::Node sink, Expr command, string callChain) {
shellCommand(command, callChain)
}
predicate isSanitizerImpl(DataFlow::Node node) {
predicate isBarrierImpl(DataFlow::Node node) {
node.asExpr().getUnspecifiedType() instanceof IntegralType
or
node.asExpr().getUnspecifiedType() instanceof FloatingPointType
@@ -96,56 +97,57 @@ predicate isSanitizerImpl(DataFlow::Node node) {
* given sink. This avoids a cartesian product between all sinks and all `ExecState`s in
* `ExecTaintConfiguration::isSink`.
*/
class ExecStateConfiguration extends TaintTracking2::Configuration {
ExecStateConfiguration() { this = "ExecStateConfiguration" }
module ExecStateConfiguration implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { any(ExecState state).getOutgoingNode() = source }
override predicate isSource(DataFlow::Node source) {
any(ExecState state).getOutgoingNode() = source
}
predicate isSink(DataFlow::Node sink) { isSinkImpl(sink, _, _) }
override predicate isSink(DataFlow::Node sink) { isSinkImpl(sink, _, _) }
predicate isBarrier(DataFlow::Node node) { isBarrierImpl(node) }
override predicate isSanitizer(DataFlow::Node node) { isSanitizerImpl(node) }
override predicate isSanitizerOut(DataFlow::Node node) {
isSink(node, _) // Prevent duplicates along a call chain, since `shellCommand` will include wrappers
predicate isBarrierOut(DataFlow::Node node) {
isSink(node) // Prevent duplicates along a call chain, since `shellCommand` will include wrappers
}
}
class ExecTaintConfiguration extends TaintTracking::Configuration {
ExecTaintConfiguration() { this = "ExecTaintConfiguration" }
module ExecState = TaintTracking::Make<ExecStateConfiguration>;
override predicate isSource(DataFlow::Node source, DataFlow::FlowState state) {
module ExecTaintConfiguration implements DataFlow::StateConfigSig {
class FlowState = TState;
predicate isSource(DataFlow::Node source, FlowState state) {
source instanceof FlowSource and
state instanceof ConcatState
}
override predicate isSink(DataFlow::Node sink, DataFlow::FlowState state) {
any(ExecStateConfiguration conf).isSink(sink) and
predicate isSink(DataFlow::Node sink, FlowState state) {
ExecStateConfiguration::isSink(sink) and
state.(ExecState).isFeasibleForSink(sink)
}
override predicate isAdditionalTaintStep(
DataFlow::Node node1, DataFlow::FlowState state1, DataFlow::Node node2,
DataFlow::FlowState state2
predicate isAdditionalFlowStep(
DataFlow::Node node1, FlowState state1, DataFlow::Node node2, FlowState state2
) {
state1 instanceof ConcatState and
state2.(ExecState).getIncomingNode() = node1 and
state2.(ExecState).getOutgoingNode() = node2
}
override predicate isSanitizer(DataFlow::Node node) { isSanitizerImpl(node) }
predicate isBarrier(DataFlow::Node node) { isBarrierImpl(node) }
override predicate isSanitizerOut(DataFlow::Node node) {
predicate isBarrier(DataFlow::Node node, FlowState state) { none() }
predicate isBarrierOut(DataFlow::Node node) {
isSink(node, _) // Prevent duplicates along a call chain, since `shellCommand` will include wrappers
}
}
module ExecTaint = TaintTracking::MakeWithState<ExecTaintConfiguration>;
from
ExecTaintConfiguration conf, DataFlow::PathNode sourceNode, DataFlow::PathNode sinkNode,
string taintCause, string callChain, DataFlow::Node concatResult, Expr command
ExecTaint::PathNode sourceNode, ExecTaint::PathNode sinkNode, string taintCause, string callChain,
DataFlow::Node concatResult, Expr command
where
conf.hasFlowPath(sourceNode, sinkNode) and
ExecTaint::hasFlowPath(sourceNode, sinkNode) and
taintCause = sourceNode.getNode().(FlowSource).getSourceType() and
isSinkImpl(sinkNode.getNode(), command, callChain) and
concatResult = sinkNode.getState().(ExecState).getOutgoingNode()

View File

@@ -19,7 +19,7 @@ import semmle.code.cpp.ir.dataflow.TaintTracking
import semmle.code.cpp.ir.IR
import semmle.code.cpp.controlflow.IRGuards
import semmle.code.cpp.security.FlowSources
import DataFlow::PathGraph
import TaintedAllocationSize::PathGraph
/**
* Holds if `alloc` is an allocation, and `tainted` is a child of it that is a
@@ -54,14 +54,12 @@ predicate nodeIsBarrierEqualityCandidate(DataFlow::Node node, Operand access, Va
predicate isFlowSource(FlowSource source, string sourceType) { sourceType = source.getSourceType() }
class TaintedAllocationSizeConfiguration extends TaintTracking::Configuration {
TaintedAllocationSizeConfiguration() { this = "TaintedAllocationSizeConfiguration" }
module TaintedAllocationSizeConfiguration implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { isFlowSource(source, _) }
override predicate isSource(DataFlow::Node source) { isFlowSource(source, _) }
predicate isSink(DataFlow::Node sink) { allocSink(_, sink) }
override predicate isSink(DataFlow::Node sink) { allocSink(_, sink) }
override predicate isSanitizer(DataFlow::Node node) {
predicate isBarrier(DataFlow::Node node) {
exists(Expr e | e = node.asExpr() |
// There can be two separate reasons for `convertedExprMightOverflow` not holding:
// 1. `e` really cannot overflow.
@@ -97,12 +95,14 @@ class TaintedAllocationSizeConfiguration extends TaintTracking::Configuration {
}
}
module TaintedAllocationSize = TaintTracking::Make<TaintedAllocationSizeConfiguration>;
from
Expr alloc, DataFlow::PathNode source, DataFlow::PathNode sink, string taintCause,
TaintedAllocationSizeConfiguration conf
Expr alloc, TaintedAllocationSize::PathNode source, TaintedAllocationSize::PathNode sink,
string taintCause
where
isFlowSource(source.getNode(), taintCause) and
conf.hasFlowPath(source, sink) and
TaintedAllocationSize::hasFlowPath(source, sink) and
allocSink(alloc, sink.getNode())
select alloc, source, sink, "This allocation size is derived from $@ and might overflow.",
source.getNode(), "user input (" + taintCause + ")"

View File

@@ -0,0 +1,7 @@
...
a = getc(f);
if (a < 123) ret = 123/a; // BAD
...
if (a != 0) ret = 123/a; // GOOD
...

View File

@@ -0,0 +1,23 @@
<!DOCTYPE qhelp PUBLIC
"-//Semmle//qhelp//EN"
"qhelp.dtd">
<qhelp>
<overview>
<p> Possible cases of division by zero when using the return value from functions.</p>
</overview>
<example>
<p>The following example shows the use of a function with an error when using the return value and without an error.</p>
<sample src="DivideByZeroUsingReturnValue.cpp" />
</example>
<references>
<li>
CERT Coding Standard:
<a href="https://wiki.sei.cmu.edu/confluence/display/c/INT33-C.+Ensure+that+division+and+remainder+operations+do+not+result+in+divide-by-zero+errors">INT33-C. Ensure that division and remainder operations do not result in divide-by-zero errors - SEI CERT C Coding Standard - Confluence</a>.
</li>
</references>
</qhelp>

View File

@@ -0,0 +1,274 @@
/**
* @name Divide by zero using return value
* @description Possible cases of division by zero when using the return value from functions.
* @kind problem
* @id cpp/divide-by-zero-using-return-value
* @problem.severity warning
* @precision medium
* @tags correctness
* security
* external/cwe/cwe-369
*/
import cpp
import semmle.code.cpp.valuenumbering.GlobalValueNumbering
import semmle.code.cpp.controlflow.Guards
/** Holds if function `fn` can return a value equal to value `val` */
predicate mayBeReturnValue(Function fn, float val) {
exists(Expr tmpExp, ReturnStmt rs |
tmpExp.getValue().toFloat() = val and
rs.getEnclosingFunction() = fn and
(
globalValueNumber(rs.getExpr()) = globalValueNumber(tmpExp)
or
exists(AssignExpr ae |
ae.getLValue().(VariableAccess).getTarget() =
globalValueNumber(rs.getExpr()).getAnExpr().(VariableAccess).getTarget() and
globalValueNumber(ae.getRValue()) = globalValueNumber(tmpExp)
)
or
exists(Initializer it |
globalValueNumber(it.getExpr()) = globalValueNumber(tmpExp) and
it.getDeclaration().(Variable).getAnAccess().getTarget() =
globalValueNumber(rs.getExpr()).getAnExpr().(VariableAccess).getTarget()
)
)
)
}
/** Holds if function `fn` can return a value equal zero */
predicate mayBeReturnZero(Function fn) {
mayBeReturnValue(fn, 0)
or
fn.hasName([
"iswalpha", "iswlower", "iswprint", "iswspace", "iswblank", "iswupper", "iswcntrl",
"iswctype", "iswalnum", "iswgraph", "iswxdigit", "iswdigit", "iswpunct", "isblank", "isupper",
"isgraph", "isalnum", "ispunct", "islower", "isspace", "isprint", "isxdigit", "iscntrl",
"isdigit", "isalpha", "timespec_get", "feof", "atomic_is_lock_free",
"atomic_compare_exchange", "thrd_equal", "isfinite", "islessequal", "isnan", "isgreater",
"signbit", "isinf", "islessgreater", "isnormal", "isless", "isgreaterequal", "isunordered",
"ferror"
])
or
fn.hasName([
"thrd_sleep", "feenv", "feholdexcept", "feclearexcept", "feexceptflag", "feupdateenv",
"remove", "fflush", "setvbuf", "fgetpos", "fsetpos", "fclose", "rename", "fseek", "raise"
])
or
fn.hasName(["tss_get", "gets"])
or
fn.hasName(["getc", "atoi"])
}
/** Gets the Guard which compares the expression `bound` */
pragma[inline]
GuardCondition checkByValue(Expr bound, Expr val) {
exists(GuardCondition gc |
(
gc.ensuresEq(bound, val, _, _, _) or
gc.ensuresEq(val, bound, _, _, _) or
gc.ensuresLt(bound, val, _, _, _) or
gc.ensuresLt(val, bound, _, _, _) or
gc = globalValueNumber(bound).getAnExpr()
) and
result = gc
)
}
/** Holds if there are no comparisons between the value returned by possible function calls `compArg` and the value `valArg`, or when these comparisons do not exclude equality to the value `valArg`. */
pragma[inline]
predicate compareFunctionWithValue(Expr guardExp, Function compArg, Expr valArg) {
not exists(Expr exp |
exp.getAChild*() = globalValueNumber(compArg.getACallToThisFunction()).getAnExpr() and
checkByValue(exp, valArg).controls(guardExp.getBasicBlock(), _)
)
or
exists(GuardCondition gc |
(
gc.ensuresEq(globalValueNumber(compArg.getACallToThisFunction()).getAnExpr(), valArg, 0,
guardExp.getBasicBlock(), true)
or
gc.ensuresEq(valArg, globalValueNumber(compArg.getACallToThisFunction()).getAnExpr(), 0,
guardExp.getBasicBlock(), true)
or
gc.ensuresLt(globalValueNumber(compArg.getACallToThisFunction()).getAnExpr(), valArg, 0,
guardExp.getBasicBlock(), false)
or
gc.ensuresLt(valArg, globalValueNumber(compArg.getACallToThisFunction()).getAnExpr(), 0,
guardExp.getBasicBlock(), false)
)
or
exists(Expr exp |
exp.getValue().toFloat() > valArg.getValue().toFloat() and
gc.ensuresLt(globalValueNumber(compArg.getACallToThisFunction()).getAnExpr(), exp, 0,
guardExp.getBasicBlock(), true)
or
exp.getValue().toFloat() < valArg.getValue().toFloat() and
gc.ensuresLt(exp, globalValueNumber(compArg.getACallToThisFunction()).getAnExpr(), 0,
guardExp.getBasicBlock(), true)
)
)
or
valArg.getValue().toFloat() = 0 and
exists(NotExpr ne, IfStmt ifne |
ne.getOperand() = globalValueNumber(compArg.getACallToThisFunction()).getAnExpr() and
ifne.getCondition() = ne and
ifne.getThen().getAChild*() = guardExp
)
}
/** Wraping predicate for call `compareFunctionWithValue`. */
pragma[inline]
predicate checkConditions1(Expr div, Function fn, float changeInt) {
exists(Expr val |
val.getEnclosingFunction() = fn and
val.getValue().toFloat() = changeInt and
compareFunctionWithValue(div, fn, val)
)
}
/** Holds if there are no comparisons between the value `compArg` and the value `valArg`, or when these comparisons do not exclude equality to the value `valArg`. */
pragma[inline]
predicate compareExprWithValue(Expr guardExp, Expr compArg, Expr valArg) {
not exists(Expr exp |
exp.getAChild*() = globalValueNumber(compArg).getAnExpr() and
checkByValue(exp, valArg).controls(guardExp.getBasicBlock(), _)
)
or
exists(GuardCondition gc |
(
gc.ensuresEq(globalValueNumber(compArg).getAnExpr(), valArg, 0, guardExp.getBasicBlock(), true)
or
gc.ensuresEq(valArg, globalValueNumber(compArg).getAnExpr(), 0, guardExp.getBasicBlock(), true)
or
gc.ensuresLt(globalValueNumber(compArg).getAnExpr(), valArg, 0, guardExp.getBasicBlock(),
false)
or
gc.ensuresLt(valArg, globalValueNumber(compArg).getAnExpr(), 0, guardExp.getBasicBlock(),
false)
)
or
exists(Expr exp |
exp.getValue().toFloat() > valArg.getValue().toFloat() and
gc.ensuresLt(globalValueNumber(compArg).getAnExpr(), exp, 0, guardExp.getBasicBlock(), true)
or
exp.getValue().toFloat() < valArg.getValue().toFloat() and
gc.ensuresLt(exp, globalValueNumber(compArg).getAnExpr(), 0, guardExp.getBasicBlock(), true)
)
)
or
valArg.getValue().toFloat() = 0 and
exists(NotExpr ne, IfStmt ifne |
ne.getOperand() = globalValueNumber(compArg).getAnExpr() and
ifne.getCondition() = ne and
ifne.getThen().getAChild*() = guardExp
)
}
/** Wraping predicate for call `compareExprWithValue`. */
pragma[inline]
predicate checkConditions2(Expr div, Expr divVal, float changeInt2) {
exists(Expr val |
(
val.getEnclosingFunction() =
div.getEnclosingFunction().getACallToThisFunction().getEnclosingFunction() or
val.getEnclosingFunction() = div.getEnclosingFunction()
) and
val.getValue().toFloat() = changeInt2 and
compareExprWithValue(div, divVal, val)
)
}
/** Gets the value of the difference or summand from the expression `src`. */
float getValueOperand(Expr src, Expr e1, Expr e2) {
src.(SubExpr).hasOperands(e1, e2) and
result = e2.getValue().toFloat()
or
src.(AddExpr).hasOperands(e1, e2) and
result = -e2.getValue().toFloat()
}
/** Function the return of the expression `e1` and the multiplication operands, or the left operand of division if `e1` contains a multiplication or division, respectively. */
Expr getMulDivOperand(Expr e1) {
result = e1 or
result = e1.(MulExpr).getAnOperand() or
result = e1.(DivExpr).getLeftOperand()
}
/** The class that defines possible variants of the division expression or the search for the remainder. */
class MyDiv extends Expr {
MyDiv() {
this instanceof DivExpr or
this instanceof RemExpr or
this instanceof AssignDivExpr or
this instanceof AssignRemExpr
}
Expr getRV() {
result = this.(AssignArithmeticOperation).getRValue() or
result = this.(BinaryArithmeticOperation).getRightOperand()
}
}
from Expr exp, string msg, Function fn, GVN findVal, float changeInt, MyDiv div
where
findVal = globalValueNumber(fn.getACallToThisFunction()) and
(
// Look for divide-by-zero operations possible due to the return value of the function `fn`.
checkConditions1(div, fn, changeInt) and
(
// Function return value can be zero.
mayBeReturnZero(fn) and
getMulDivOperand(globalValueNumber(div.getRV()).getAnExpr()) = findVal.getAnExpr() and
changeInt = 0
or
// Denominator can be sum or difference.
changeInt = getValueOperand(div.getRV(), findVal.getAnExpr(), _) and
mayBeReturnValue(fn, changeInt)
) and
exp = div and
msg =
"Can lead to division by 0, since the function " + fn.getName() + " can return a value " +
changeInt.toString() + "."
or
// Search for situations where division by zero is possible inside the `divFn` function if the passed argument can be equal to a certain value.
exists(int posArg, Expr divVal, FunctionCall divFc, float changeInt2 |
// Division is associated with the function argument.
exists(Function divFn |
divFn.getParameter(posArg).getAnAccess() = divVal and
divVal.getEnclosingStmt() = div.getEnclosingStmt() and
divFc = divFn.getACallToThisFunction()
) and
(
divVal = div.getRV() and
divFc.getArgument(posArg) != findVal.getAnExpr() and
(
// Function return value can be zero.
mayBeReturnZero(fn) and
getMulDivOperand(globalValueNumber(divFc.getArgument(posArg)).getAnExpr()) =
findVal.getAnExpr() and
changeInt = 0 and
changeInt2 = 0
or
// Denominator can be sum or difference.
changeInt = getValueOperand(divFc.getArgument(posArg), findVal.getAnExpr(), _) and
mayBeReturnValue(fn, changeInt) and
changeInt2 = 0
)
or
// Look for a situation where the difference or subtraction is considered as an argument, and it can be used in the same way.
changeInt = getValueOperand(div.getRV(), divVal, _) and
changeInt2 = changeInt and
mayBeReturnValue(fn, changeInt) and
divFc.getArgument(posArg) = findVal.getAnExpr()
) and
checkConditions2(div, divVal, changeInt2) and
checkConditions1(divFc, fn, changeInt) and
exp = divFc and
msg =
"Can lead to division by 0, since the function " + fn.getName() + " can return a value " +
changeInt.toString() + "."
)
)
select exp, msg

View File

@@ -2,7 +2,7 @@ import cpp
import semmle.code.cpp.dataflow.new.DataFlow
class GetenvSource extends DataFlow::Node {
GetenvSource() { this.asIndirectExpr(1).(FunctionCall).getTarget().hasQualifiedName("getenv") }
GetenvSource() { this.asIndirectExpr(1).(FunctionCall).getTarget().hasGlobalName("getenv") }
}
class GetenvToGethostbynameConfiguration extends DataFlow::Configuration {

View File

@@ -3,7 +3,7 @@ import semmle.code.cpp.dataflow.new.DataFlow
from Function fopen, FunctionCall fc, Expr src, DataFlow::Node source, DataFlow::Node sink
where
fopen.hasQualifiedName("fopen") and
fopen.hasGlobalName("fopen") and
fc.getTarget() = fopen and
source.asIndirectExpr(1) = src and
sink.asIndirectExpr(1) = fc.getArgument(0) and

View File

@@ -7,14 +7,14 @@ class EnvironmentToFileConfiguration extends DataFlow::Configuration {
override predicate isSource(DataFlow::Node source) {
exists(Function getenv |
source.asIndirectExpr(1).(FunctionCall).getTarget() = getenv and
getenv.hasQualifiedName("getenv")
getenv.hasGlobalName("getenv")
)
}
override predicate isSink(DataFlow::Node sink) {
exists(FunctionCall fc |
sink.asIndirectExpr(1) = fc.getArgument(0) and
fc.getTarget().hasQualifiedName("fopen")
fc.getTarget().hasGlobalName("fopen")
)
}
}

View File

@@ -3,7 +3,7 @@ import semmle.code.cpp.dataflow.new.DataFlow
from Function fopen, FunctionCall fc, Parameter p, DataFlow::Node source, DataFlow::Node sink
where
fopen.hasQualifiedName("fopen") and
fopen.hasGlobalName("fopen") and
fc.getTarget() = fopen and
source.asParameter(1) = p and
sink.asIndirectExpr(1) = fc.getArgument(0) and

View File

@@ -2,6 +2,6 @@ import cpp
from Function fopen, FunctionCall fc
where
fopen.hasQualifiedName("fopen") and
fopen.hasGlobalName("fopen") and
fc.getTarget() = fopen
select fc.getArgument(0)

View File

@@ -0,0 +1,27 @@
| test.cpp:47:24:47:31 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:48:15:48:34 | ... / ... | Can lead to division by 0, since the function getSize2 can return a value 0. |
| test.cpp:53:10:53:17 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:65:15:65:22 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:68:15:68:22 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:71:9:71:16 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:74:9:74:16 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:77:21:77:28 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:79:25:79:32 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:81:24:81:31 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:128:10:128:16 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:135:10:135:16 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:141:10:141:23 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:153:12:153:19 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:172:3:172:12 | ... /= ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:173:3:173:12 | ... %= ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:187:10:187:17 | ... / ... | Can lead to division by 0, since the function getSizeFloat can return a value 0. |
| test.cpp:199:12:199:25 | ... / ... | Can lead to division by 0, since the function getSize can return a value -1. |
| test.cpp:202:12:202:25 | ... / ... | Can lead to division by 0, since the function getSize can return a value 1. |
| test.cpp:205:10:205:23 | ... / ... | Can lead to division by 0, since the function getSize can return a value 1. |
| test.cpp:210:10:210:23 | ... / ... | Can lead to division by 0, since the function getSize can return a value 3. |
| test.cpp:258:3:258:10 | call to badMyDiv | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:259:3:259:10 | call to badMyDiv | Can lead to division by 0, since the function getSize can return a value 2. |
| test.cpp:260:3:260:13 | call to badMySubDiv | Can lead to division by 0, since the function getSize can return a value 3. |
| test.cpp:263:5:263:15 | call to badMySubDiv | Can lead to division by 0, since the function getSize can return a value 3. |
| test.cpp:273:5:273:12 | call to badMyDiv | Can lead to division by 0, since the function getSize can return a value 3. |
| test.cpp:275:5:275:12 | call to badMyDiv | Can lead to division by 0, since the function getSize can return a value -1. |

View File

@@ -0,0 +1 @@
experimental/Security/CWE/CWE-369/DivideByZeroUsingReturnValue.ql

View File

@@ -0,0 +1,278 @@
typedef struct {}
FILE;
int getc(FILE * stream);
int getSize(int type) {
int st;
switch (type) {
case 1:
st = 1;
break;
case 2:
st = 2;
break;
case 3:
st = 3;
break;
case 4:
st = -1;
break;
default:
st = 0;
break;
}
return st;
}
int getSize2(int type) {
int st = 0;
switch (type) {
case 1:
st = 1;
break;
case 2:
st = 2;
break;
case 3:
st = 3;
break;
case 4:
st = -1;
break;
}
return st;
}
int badTestf1(int type, int met) {
int is = getSize(type);
if (met == 1) return 123 / is; // BAD
else return 123 / getSize2(type); // BAD
}
int badTestf2(int type) {
int is;
is = getSize(type);
return 123 / is; // BAD
}
int badTestf3(int type, int met) {
int is;
is = getSize(type);
switch (met) {
case 1:
if (is >= 0) return 123 / is; // BAD [NOT DETECTED]
case 2:
if (0 == is) return 123 / is; // BAD [NOT DETECTED]
case 3:
if (!is & 123 / is) // BAD
return 123;
case 4:
if (!is | 123 / is) // BAD
return 123;
case 5:
if (123 / is || !is) // BAD
return 123;
case 6:
if (123 / is && !is) // BAD
return 123;
case 7:
if (!is) return 123 / is; // BAD
case 8:
if (is > -1) return 123 / is; // BAD
case 9:
if (is < 2) return 123 / is; // BAD
}
if (is != 0) return -1;
if (is == 0) type += 1;
return 123 / is; // BAD [NOT DETECTED]
}
int goodTestf3(int type, int met) {
int is = getSize(type);
if (is == 0) return -1;
switch (met) {
case 1:
if (is < 0) return 123 / is; // GOOD
case 2:
if (!is && 123 / is) // GOOD
return 123;
case 3:
if (!is || 123 / is) // GOOD
return 123;
case 8:
if (is < -1) return 123 / is; // GOOD
case 9:
if (is > 2) return 123 / is; // GOOD
}
return 123 / is;
}
int goodTestf3a(int type, int met) {
int is = getSize(type);
switch (met) {
case 1:
if (is < 0)
return 123 / is; // GOOD
case 2:
if (!is && 123 / is) // GOOD
return 123;
case 3:
if (!is || 123 / is) // GOOD
return 123;
}
return 1;
}
int badTestf4(int type) {
int is = getSize(type);
int d;
d = type * is;
return 123 / d; // BAD
}
int badTestf5(int type) {
int is = getSize(type);
int d;
d = is / type;
return 123 / d; // BAD
}
int badTestf6(int type) {
int is = getSize(type);
int d;
d = is / type;
return type * 123 / d; // BAD
}
int badTestf7(int type, int met) {
int is = getSize(type);
if (is == 0) goto quit;
switch (met) {
case 1:
if (is < 0)
return 123 / is; // GOOD
}
quit:
return 123 / is; // BAD
}
int goodTestf7(int type, int met) {
int is = getSize(type);
if (is == 0) goto quit2;
if (is == 0.) return -1;
switch (met) {
case 1:
if (is < 0.)
return 123 / is; // GOOD
}
return 123 / is; // GOOD
quit2:
return -1;
}
int badTestf8(int type) {
int is = getSize(type);
type /= is; // BAD
type %= is; // BAD
return type;
}
float getSizeFloat(float type) {
float st;
if (type)
st = 1.0;
else
st = 0.0;
return st;
}
float badTestf9(float type) {
float is = getSizeFloat(type);
return 123 / is; // BAD
}
float goodTestf9(float type) {
float is = getSizeFloat(type);
if (is == 0.0) return -1;
return 123 / is; // GOOD
}
int badTestf10(int type) {
int out = type;
int is = getSize(type);
if (is > -2) {
out /= 123 / (is + 1); // BAD
}
if (is > 0) {
return 123 / (is - 1); // BAD
}
if (is <= 0) return 0;
return 123 / (is - 1); // BAD
return 0;
}
int badTestf11(int type) {
int is = getSize(type);
return 123 / (is - 3); // BAD
}
int goodTestf11(int type) {
int is = getSize(type);
if (is > 1) {
return 123 / (is - 1); // GOOD
} else {
return 0;
}
}
int badTestf12(FILE * f) {
int a;
int ret = -1;
a = getc(f);
if (a == 0) ret = 123 / a; // BAD [NOT DETECTED]
return ret;
}
int goodTestf12(FILE * f) {
int a;
int ret = -1;
a = getc(f);
if (a != 0) ret = 123 / a; // GOOD
return ret;
}
int badMyDiv(int type, int is) {
type /= is;
type %= is;
return type;
}
int goodMyDiv(int type, int is) {
if (is == 0) return -1;
type /= is;
type %= is;
return type;
}
int badMySubDiv(int type, int is) {
type /= (is - 3);
type %= (is + 1);
return type;
}
void badTestf13(int type) {
int is = getSize(type);
badMyDiv(type, is); // BAD
badMyDiv(type, is - 2); // BAD
badMySubDiv(type, is); // BAD
goodMyDiv(type, is); // GOOD
if (is < 5)
badMySubDiv(type, is); // BAD
if (is < 0)
badMySubDiv(type, is); // BAD [NOT DETECTED]
if (is > 5)
badMySubDiv(type, is); // GOOD
if (is == 0)
badMyDiv(type, is); // BAD
if (is > 0)
badMyDiv(type, is); // GOOD
if (is < 5)
badMyDiv(type, is - 3); // BAD
if (is < 0)
badMyDiv(type, is + 1); // BAD
if (is > 5)
badMyDiv(type, is - 3); // GOOD
}

View File

@@ -60,8 +60,6 @@ edges
| test.cpp:220:10:220:16 | strncat output argument | test.cpp:222:32:222:38 | command indirection |
| test.cpp:220:19:220:26 | filename indirection | test.cpp:220:10:220:16 | strncat output argument |
| test.cpp:220:19:220:26 | filename indirection | test.cpp:220:10:220:16 | strncat output argument |
| test.cpp:220:19:220:26 | filename indirection | test.cpp:220:10:220:16 | strncat output argument |
| test.cpp:220:19:220:26 | filename indirection | test.cpp:220:10:220:16 | strncat output argument |
nodes
| test.cpp:15:27:15:30 | argv indirection | semmle.label | argv indirection |
| test.cpp:15:27:15:30 | argv indirection | semmle.label | argv indirection |
@@ -133,6 +131,7 @@ nodes
| test.cpp:220:19:220:26 | filename indirection | semmle.label | filename indirection |
| test.cpp:220:19:220:26 | filename indirection | semmle.label | filename indirection |
| test.cpp:222:32:222:38 | command indirection | semmle.label | command indirection |
| test.cpp:222:32:222:38 | command indirection | semmle.label | command indirection |
subpaths
| test.cpp:196:26:196:33 | filename indirection | test.cpp:186:47:186:54 | filename indirection | test.cpp:188:11:188:17 | strncat output argument | test.cpp:196:10:196:16 | concat output argument |
| test.cpp:196:26:196:33 | filename indirection | test.cpp:186:47:186:54 | filename indirection | test.cpp:188:11:188:17 | strncat output argument | test.cpp:196:10:196:16 | concat output argument |

View File

@@ -10,6 +10,8 @@ namespace Semmle.Extraction.CSharp.Entities
private Conversion(Context cx, IMethodSymbol init)
: base(cx, init) { }
protected override MethodKind ExplicitlyImplementsKind => MethodKind.Conversion;
public static new Conversion Create(Context cx, IMethodSymbol symbol) =>
ConversionFactory.Instance.CreateEntityFromSymbol(cx, symbol);

View File

@@ -83,10 +83,12 @@ namespace Semmle.Extraction.CSharp.Entities
}
}
protected virtual MethodKind ExplicitlyImplementsKind => MethodKind.Ordinary;
public void Overrides(TextWriter trapFile)
{
foreach (var explicitInterface in Symbol.ExplicitInterfaceImplementations
.Where(sym => sym.MethodKind == MethodKind.Ordinary)
.Where(sym => sym.MethodKind == ExplicitlyImplementsKind)
.Select(impl => Type.Create(Context, impl.ContainingType)))
{
trapFile.explicitly_implements(this, explicitInterface.TypeRef);

View File

@@ -85,6 +85,9 @@ namespace Semmle.Extraction.CSharp.Entities
if (nt.IsRecord)
HasModifier(cx, trapFile, key, Modifiers.Record);
if (nt.IsFileLocal)
HasModifier(cx, trapFile, key, Modifiers.File);
if (nt.TypeKind == TypeKind.Struct)
{
if (nt.IsReadOnly)
@@ -97,7 +100,11 @@ namespace Semmle.Extraction.CSharp.Entities
public static void ExtractModifiers(Context cx, TextWriter trapFile, IEntity key, ISymbol symbol)
{
HasAccessibility(cx, trapFile, key, symbol.DeclaredAccessibility);
// A file scoped type has declared accessibility `internal` which we shouldn't extract.
// The file modifier is extracted as a source level modifier.
if (symbol.Kind != SymbolKind.NamedType || !((INamedTypeSymbol)symbol).IsFileLocal)
HasAccessibility(cx, trapFile, key, symbol.DeclaredAccessibility);
if (symbol.Kind == SymbolKind.ErrorType)
trapFile.has_modifiers(key, Modifier.Create(cx, Accessibility.Public));

View File

@@ -4,6 +4,7 @@ internal static class Modifiers
public const string Async = "async";
public const string Const = "const";
public const string Extern = "extern";
public const string File = "file";
public const string Internal = "internal";
public const string New = "new";
public const string Override = "override";

View File

@@ -11,6 +11,8 @@ namespace Semmle.Extraction.CSharp.Entities
protected UserOperator(Context cx, IMethodSymbol init)
: base(cx, init) { }
protected override MethodKind ExplicitlyImplementsKind => MethodKind.UserDefinedOperator;
public override void Populate(TextWriter trapFile)
{
PopulateMethod(trapFile);
@@ -37,6 +39,7 @@ namespace Semmle.Extraction.CSharp.Entities
}
ContainingType.PopulateGenerics();
Overrides(trapFile);
}
public override bool NeedsPopulation => Context.Defines(Symbol) || IsImplicitOperator(out _);

View File

@@ -282,54 +282,60 @@ namespace Semmle.Extraction.CSharp
public static IEnumerable<IFieldSymbol?> GetTupleElementsMaybeNull(this INamedTypeSymbol type) =>
type.TupleElements;
private static void BuildQualifierAndName(INamedTypeSymbol named, Context cx, EscapingTextWriter trapFile, ISymbol symbolBeingDefined)
{
if (named.ContainingType is not null)
{
named.ContainingType.BuildOrWriteId(cx, trapFile, symbolBeingDefined, constructUnderlyingTupleType: false);
trapFile.Write('.');
}
else if (named.ContainingNamespace is not null)
{
if (cx.ShouldAddAssemblyTrapPrefix && named.ContainingAssembly is not null)
BuildAssembly(named.ContainingAssembly, trapFile);
named.ContainingNamespace.BuildNamespace(cx, trapFile);
}
var name = named.IsFileLocal ? named.MetadataName : named.Name;
trapFile.Write(name);
}
private static void BuildTupleId(INamedTypeSymbol named, Context cx, EscapingTextWriter trapFile, ISymbol symbolBeingDefined)
{
trapFile.Write('(');
trapFile.BuildList(",", named.GetTupleElementsMaybeNull(),
(i, f) =>
{
if (f is null)
{
trapFile.Write($"null({i})");
}
else
{
trapFile.Write((f.CorrespondingTupleField ?? f).Name);
trapFile.Write(":");
f.Type.BuildOrWriteId(cx, trapFile, symbolBeingDefined, constructUnderlyingTupleType: false);
}
}
);
trapFile.Write(")");
}
private static void BuildNamedTypeId(this INamedTypeSymbol named, Context cx, EscapingTextWriter trapFile, ISymbol symbolBeingDefined, bool constructUnderlyingTupleType)
{
if (!constructUnderlyingTupleType && named.IsTupleType)
{
trapFile.Write('(');
trapFile.BuildList(",", named.GetTupleElementsMaybeNull(),
(i, f) =>
{
if (f is null)
{
trapFile.Write($"null({i})");
}
else
{
trapFile.Write((f.CorrespondingTupleField ?? f).Name);
trapFile.Write(":");
f.Type.BuildOrWriteId(cx, trapFile, symbolBeingDefined, constructUnderlyingTupleType: false);
}
}
);
trapFile.Write(")");
BuildTupleId(named, cx, trapFile, symbolBeingDefined);
return;
}
void AddContaining()
{
if (named.ContainingType is not null)
{
named.ContainingType.BuildOrWriteId(cx, trapFile, symbolBeingDefined, constructUnderlyingTupleType: false);
trapFile.Write('.');
}
else if (named.ContainingNamespace is not null)
{
if (cx.ShouldAddAssemblyTrapPrefix && named.ContainingAssembly is not null)
BuildAssembly(named.ContainingAssembly, trapFile);
named.ContainingNamespace.BuildNamespace(cx, trapFile);
}
}
if (named.TypeParameters.IsEmpty)
{
AddContaining();
trapFile.Write(named.Name);
BuildQualifierAndName(named, cx, trapFile, symbolBeingDefined);
}
else if (named.IsReallyUnbound())
{
AddContaining();
trapFile.Write(named.Name);
BuildQualifierAndName(named, cx, trapFile, symbolBeingDefined);
trapFile.Write("`");
trapFile.Write(named.TypeParameters.Length);
}

View File

@@ -0,0 +1,4 @@
---
category: minorAnalysis
---
* C# 11: Added extractor and library support for `file` scoped types.

View File

@@ -0,0 +1,4 @@
---
category: minorAnalysis
---
* C# 11: Support for explicit interface member implementation of operators.

View File

@@ -0,0 +1,4 @@
---
category: minorAnalysis
---
* The query `cs/static-field-written-by-instance` is updated to handle properties.

View File

@@ -0,0 +1,9 @@
---
category: majorAnalysis
---
* The main data flow and taint tracking APIs have been changed. The old APIs
remain in place for now and translate to the new through a
backwards-compatible wrapper. If multiple configurations are in scope
simultaneously, then this may affect results slightly. The new API is quite
similar to the old, but makes use of a configuration module instead of a
configuration class.

View File

@@ -434,7 +434,7 @@ class Destructor extends DotNet::Destructor, Callable, Member, Attributable, @de
* Either a unary operator (`UnaryOperator`), a binary operator
* (`BinaryOperator`), or a conversion operator (`ConversionOperator`).
*/
class Operator extends Callable, Member, Attributable, @operator {
class Operator extends Callable, Member, Attributable, Overridable, @operator {
/**
* DEPRECATED: use `getFunctionName()` instead.
*

View File

@@ -93,6 +93,9 @@ class Modifiable extends Declaration, @modifiable {
/** Holds if this declaration has the modifier `required`. */
predicate isRequired() { this.hasModifier("required") }
/** Holds if this declaration is `file` local. */
predicate isFile() { this.hasModifier("file") }
/** Holds if this declaration is `unsafe`. */
predicate isUnsafe() {
this.hasModifier("unsafe") or
@@ -183,6 +186,8 @@ class Member extends DotNet::Member, Modifiable, @member {
override predicate isStatic() { Modifiable.super.isStatic() }
override predicate isRequired() { Modifiable.super.isRequired() }
override predicate isFile() { Modifiable.super.isFile() }
}
private class TOverridable = @virtualizable or @callable_accessor;

View File

@@ -6,5 +6,6 @@
import csharp
module DataFlow {
import semmle.code.csharp.dataflow.internal.DataFlowImpl
import semmle.code.csharp.dataflow.internal.DataFlow
import semmle.code.csharp.dataflow.internal.DataFlowImpl1
}

View File

@@ -137,6 +137,8 @@ private class RecordConstructorFlow extends SummarizedCallable {
preservesValue = true
)
}
override predicate hasProvenance(string provenance) { provenance = "manual" }
}
class RequiredSummaryComponentStack = Impl::Public::RequiredSummaryComponentStack;

View File

@@ -6,5 +6,6 @@
import csharp
module TaintTracking {
import semmle.code.csharp.dataflow.internal.tainttracking1.TaintTracking
import semmle.code.csharp.dataflow.internal.tainttracking1.TaintTrackingImpl
}

View File

@@ -16,8 +16,7 @@ module ContentDataFlow {
class ContentSet = DF::ContentSet;
predicate stageStats = DF::stageStats/8;
// predicate stageStats = DF::stageStats/8;
/**
* A configuration of interprocedural data flow analysis. This defines
* sources, sinks, and any other configurable aspect of the analysis. Each

View File

@@ -0,0 +1,245 @@
/**
* Provides an implementation of global (interprocedural) data flow. This file
* re-exports the local (intraprocedural) data flow analysis from
* `DataFlowImplSpecific::Public` and adds a global analysis, mainly exposed
* through the `Make` and `MakeWithState` modules.
*/
private import DataFlowImplCommon
private import DataFlowImplSpecific::Private
import DataFlowImplSpecific::Public
import DataFlowImplCommonPublic
private import DataFlowImpl
/** An input configuration for data flow. */
signature module ConfigSig {
/**
* Holds if `source` is a relevant data flow source.
*/
predicate isSource(Node source);
/**
* Holds if `sink` is a relevant data flow sink.
*/
predicate isSink(Node sink);
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
default predicate isBarrier(Node node) { none() }
/** Holds if data flow into `node` is prohibited. */
default predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
default predicate isBarrierOut(Node node) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
default predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
default predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
default int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
default FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
default predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
default predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (as it is in a `path-problem` query).
*/
default predicate includeHiddenNodes() { none() }
}
/** An input configuration for data flow using flow state. */
signature module StateConfigSig {
bindingset[this]
class FlowState;
/**
* Holds if `source` is a relevant data flow source with the given initial
* `state`.
*/
predicate isSource(Node source, FlowState state);
/**
* Holds if `sink` is a relevant data flow sink accepting `state`.
*/
predicate isSink(Node sink, FlowState state);
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
default predicate isBarrier(Node node) { none() }
/**
* Holds if data flow through `node` is prohibited when the flow state is
* `state`.
*/
predicate isBarrier(Node node, FlowState state);
/** Holds if data flow into `node` is prohibited. */
default predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
default predicate isBarrierOut(Node node) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
default predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
* This step is only applicable in `state1` and updates the flow state to `state2`.
*/
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2);
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
default predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
default int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
default FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
default predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
default predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (as it is in a `path-problem` query).
*/
default predicate includeHiddenNodes() { none() }
}
/**
* Gets the exploration limit for `hasPartialFlow` and `hasPartialFlowRev`
* measured in approximate number of interprocedural steps.
*/
signature int explorationLimitSig();
/**
* The output of a data flow computation.
*/
signature module DataFlowSig {
/**
* A `Node` augmented with a call context (except for sinks) and an access path.
* Only those `PathNode`s that are reachable from a source, and which can reach a sink, are generated.
*/
class PathNode;
/**
* Holds if data can flow from `source` to `sink`.
*
* The corresponding paths are generated from the end-points and the graph
* included in the module `PathGraph`.
*/
predicate hasFlowPath(PathNode source, PathNode sink);
/**
* Holds if data can flow from `source` to `sink`.
*/
predicate hasFlow(Node source, Node sink);
/**
* Holds if data can flow from some source to `sink`.
*/
predicate hasFlowTo(Node sink);
/**
* Holds if data can flow from some source to `sink`.
*/
predicate hasFlowToExpr(DataFlowExpr sink);
}
/**
* Constructs a standard data flow computation.
*/
module Make<ConfigSig Config> implements DataFlowSig {
private module C implements FullStateConfigSig {
import DefaultState<Config>
import Config
}
import Impl<C>
}
/**
* Constructs a data flow computation using flow state.
*/
module MakeWithState<StateConfigSig Config> implements DataFlowSig {
private module C implements FullStateConfigSig {
import Config
}
import Impl<C>
}

View File

@@ -0,0 +1,396 @@
/**
* DEPRECATED: Use `Make` and `MakeWithState` instead.
*
* Provides a `Configuration` class backwards-compatible interface to the data
* flow library.
*/
private import DataFlowImplCommon
private import DataFlowImplSpecific::Private
import DataFlowImplSpecific::Public
private import DataFlowImpl
import DataFlowImplCommonPublic
import FlowStateString
/**
* A configuration of interprocedural data flow analysis. This defines
* sources, sinks, and any other configurable aspect of the analysis. Each
* use of the global data flow library must define its own unique extension
* of this abstract class. To create a configuration, extend this class with
* a subclass whose characteristic predicate is a unique singleton string.
* For example, write
*
* ```ql
* class MyAnalysisConfiguration extends DataFlow::Configuration {
* MyAnalysisConfiguration() { this = "MyAnalysisConfiguration" }
* // Override `isSource` and `isSink`.
* // Optionally override `isBarrier`.
* // Optionally override `isAdditionalFlowStep`.
* }
* ```
* Conceptually, this defines a graph where the nodes are `DataFlow::Node`s and
* the edges are those data-flow steps that preserve the value of the node
* along with any additional edges defined by `isAdditionalFlowStep`.
* Specifying nodes in `isBarrier` will remove those nodes from the graph, and
* specifying nodes in `isBarrierIn` and/or `isBarrierOut` will remove in-going
* and/or out-going edges from those nodes, respectively.
*
* Then, to query whether there is flow between some `source` and `sink`,
* write
*
* ```ql
* exists(MyAnalysisConfiguration cfg | cfg.hasFlow(source, sink))
* ```
*
* Multiple configurations can coexist, but two classes extending
* `DataFlow::Configuration` should never depend on each other. One of them
* should instead depend on a `DataFlow2::Configuration`, a
* `DataFlow3::Configuration`, or a `DataFlow4::Configuration`.
*/
abstract class Configuration extends string {
bindingset[this]
Configuration() { any() }
/**
* Holds if `source` is a relevant data flow source.
*/
predicate isSource(Node source) { none() }
/**
* Holds if `source` is a relevant data flow source with the given initial
* `state`.
*/
predicate isSource(Node source, FlowState state) { none() }
/**
* Holds if `sink` is a relevant data flow sink.
*/
predicate isSink(Node sink) { none() }
/**
* Holds if `sink` is a relevant data flow sink accepting `state`.
*/
predicate isSink(Node sink, FlowState state) { none() }
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
predicate isBarrier(Node node) { none() }
/**
* Holds if data flow through `node` is prohibited when the flow state is
* `state`.
*/
predicate isBarrier(Node node, FlowState state) { none() }
/** Holds if data flow into `node` is prohibited. */
predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
predicate isBarrierOut(Node node) { none() }
/**
* DEPRECATED: Use `isBarrier` and `BarrierGuard` module instead.
*
* Holds if data flow through nodes guarded by `guard` is prohibited.
*/
deprecated predicate isBarrierGuard(BarrierGuard guard) { none() }
/**
* DEPRECATED: Use `isBarrier` and `BarrierGuard` module instead.
*
* Holds if data flow through nodes guarded by `guard` is prohibited when
* the flow state is `state`
*/
deprecated predicate isBarrierGuard(BarrierGuard guard, FlowState state) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
* This step is only applicable in `state1` and updates the flow state to `state2`.
*/
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2) {
none()
}
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if data may flow from `source` to `sink` for this configuration.
*/
predicate hasFlow(Node source, Node sink) { hasFlow(source, sink, this) }
/**
* Holds if data may flow from `source` to `sink` for this configuration.
*
* The corresponding paths are generated from the end-points and the graph
* included in the module `PathGraph`.
*/
predicate hasFlowPath(PathNode source, PathNode sink) { hasFlowPath(source, sink, this) }
/**
* Holds if data may flow from some source to `sink` for this configuration.
*/
predicate hasFlowTo(Node sink) { hasFlowTo(sink, this) }
/**
* Holds if data may flow from some source to `sink` for this configuration.
*/
predicate hasFlowToExpr(DataFlowExpr sink) { this.hasFlowTo(exprNode(sink)) }
/**
* DEPRECATED: Use `FlowExploration<explorationLimit>` instead.
*
* Gets the exploration limit for `hasPartialFlow` and `hasPartialFlowRev`
* measured in approximate number of interprocedural steps.
*/
deprecated int explorationLimit() { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (for example in a `path-problem` query).
*/
predicate includeHiddenNodes() { none() }
}
/**
* This class exists to prevent mutual recursion between the user-overridden
* member predicates of `Configuration` and the rest of the data-flow library.
* Good performance cannot be guaranteed in the presence of such recursion, so
* it should be replaced by using more than one copy of the data flow library.
*/
abstract private class ConfigurationRecursionPrevention extends Configuration {
bindingset[this]
ConfigurationRecursionPrevention() { any() }
override predicate hasFlow(Node source, Node sink) {
strictcount(Node n | this.isSource(n)) < 0
or
strictcount(Node n | this.isSource(n, _)) < 0
or
strictcount(Node n | this.isSink(n)) < 0
or
strictcount(Node n | this.isSink(n, _)) < 0
or
strictcount(Node n1, Node n2 | this.isAdditionalFlowStep(n1, n2)) < 0
or
strictcount(Node n1, Node n2 | this.isAdditionalFlowStep(n1, _, n2, _)) < 0
or
super.hasFlow(source, sink)
}
}
/** A bridge class to access the deprecated `isBarrierGuard`. */
private class BarrierGuardGuardedNodeBridge extends Unit {
abstract predicate guardedNode(Node n, Configuration config);
abstract predicate guardedNode(Node n, FlowState state, Configuration config);
}
private class BarrierGuardGuardedNode extends BarrierGuardGuardedNodeBridge {
deprecated override predicate guardedNode(Node n, Configuration config) {
exists(BarrierGuard g |
config.isBarrierGuard(g) and
n = g.getAGuardedNode()
)
}
deprecated override predicate guardedNode(Node n, FlowState state, Configuration config) {
exists(BarrierGuard g |
config.isBarrierGuard(g, state) and
n = g.getAGuardedNode()
)
}
}
private FlowState relevantState(Configuration config) {
config.isSource(_, result) or
config.isSink(_, result) or
config.isBarrier(_, result) or
config.isAdditionalFlowStep(_, result, _, _) or
config.isAdditionalFlowStep(_, _, _, result)
}
private newtype TConfigState =
TMkConfigState(Configuration config, FlowState state) {
state = relevantState(config) or state instanceof FlowStateEmpty
}
private Configuration getConfig(TConfigState state) { state = TMkConfigState(result, _) }
private FlowState getState(TConfigState state) { state = TMkConfigState(_, result) }
private predicate singleConfiguration() { 1 = strictcount(Configuration c) }
private module Config implements FullStateConfigSig {
class FlowState = TConfigState;
predicate isSource(Node source, FlowState state) {
getConfig(state).isSource(source, getState(state))
or
getConfig(state).isSource(source) and getState(state) instanceof FlowStateEmpty
}
predicate isSink(Node sink, FlowState state) {
getConfig(state).isSink(sink, getState(state))
or
getConfig(state).isSink(sink) and getState(state) instanceof FlowStateEmpty
}
predicate isBarrier(Node node) { none() }
predicate isBarrier(Node node, FlowState state) {
getConfig(state).isBarrier(node, getState(state)) or
getConfig(state).isBarrier(node) or
any(BarrierGuardGuardedNodeBridge b).guardedNode(node, getState(state), getConfig(state)) or
any(BarrierGuardGuardedNodeBridge b).guardedNode(node, getConfig(state))
}
predicate isBarrierIn(Node node) { any(Configuration config).isBarrierIn(node) }
predicate isBarrierOut(Node node) { any(Configuration config).isBarrierOut(node) }
predicate isAdditionalFlowStep(Node node1, Node node2) {
singleConfiguration() and
any(Configuration config).isAdditionalFlowStep(node1, node2)
}
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2) {
getConfig(state1).isAdditionalFlowStep(node1, getState(state1), node2, getState(state2)) and
getConfig(state2) = getConfig(state1)
or
not singleConfiguration() and
getConfig(state1).isAdditionalFlowStep(node1, node2) and
state2 = state1
}
predicate allowImplicitRead(Node node, ContentSet c) {
any(Configuration config).allowImplicitRead(node, c)
}
int fieldFlowBranchLimit() { result = min(any(Configuration config).fieldFlowBranchLimit()) }
FlowFeature getAFeature() { result = any(Configuration config).getAFeature() }
predicate sourceGrouping(Node source, string sourceGroup) {
any(Configuration config).sourceGrouping(source, sourceGroup)
}
predicate sinkGrouping(Node sink, string sinkGroup) {
any(Configuration config).sinkGrouping(sink, sinkGroup)
}
predicate includeHiddenNodes() { any(Configuration config).includeHiddenNodes() }
}
private import Impl<Config> as I
import I
/**
* A `Node` augmented with a call context (except for sinks), an access path, and a configuration.
* Only those `PathNode`s that are reachable from a source, and which can reach a sink, are generated.
*/
class PathNode instanceof I::PathNode {
/** Gets a textual representation of this element. */
final string toString() { result = super.toString() }
/**
* Gets a textual representation of this element, including a textual
* representation of the call context.
*/
final string toStringWithContext() { result = super.toStringWithContext() }
/**
* Holds if this element is at the specified location.
* The location spans column `startcolumn` of line `startline` to
* column `endcolumn` of line `endline` in file `filepath`.
* For more information, see
* [Locations](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
*/
final predicate hasLocationInfo(
string filepath, int startline, int startcolumn, int endline, int endcolumn
) {
super.hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
}
/** Gets the underlying `Node`. */
final Node getNode() { result = super.getNode() }
/** Gets the `FlowState` of this node. */
final FlowState getState() { result = getState(super.getState()) }
/** Gets the associated configuration. */
final Configuration getConfiguration() { result = getConfig(super.getState()) }
/** Gets a successor of this node, if any. */
final PathNode getASuccessor() { result = super.getASuccessor() }
/** Holds if this node is a source. */
final predicate isSource() { super.isSource() }
/** Holds if this node is a grouping of source nodes. */
final predicate isSourceGroup(string group) { super.isSourceGroup(group) }
/** Holds if this node is a grouping of sink nodes. */
final predicate isSinkGroup(string group) { super.isSinkGroup(group) }
}
private predicate hasFlow(Node source, Node sink, Configuration config) {
exists(PathNode source0, PathNode sink0 |
hasFlowPath(source0, sink0, config) and
source0.getNode() = source and
sink0.getNode() = sink
)
}
private predicate hasFlowPath(PathNode source, PathNode sink, Configuration config) {
hasFlowPath(source, sink) and source.getConfiguration() = config
}
private predicate hasFlowTo(Node sink, Configuration config) { hasFlow(_, sink, config) }
predicate flowsTo = hasFlow/3;

View File

@@ -3,15 +3,18 @@ private import DataFlowImplSpecific::Public
import Cached
module DataFlowImplCommonPublic {
/** A state value to track during data flow. */
class FlowState = string;
/** Provides `FlowState = string`. */
module FlowStateString {
/** A state value to track during data flow. */
class FlowState = string;
/**
* The default state, which is used when the state is unspecified for a source
* or a sink.
*/
class FlowStateEmpty extends FlowState {
FlowStateEmpty() { this = "" }
/**
* The default state, which is used when the state is unspecified for a source
* or a sink.
*/
class FlowStateEmpty extends FlowState {
FlowStateEmpty() { this = "" }
}
}
private newtype TFlowFeature =

View File

@@ -248,7 +248,9 @@ module Public {
/**
* Holds if all the summaries that apply to `this` are auto generated and not manually created.
*/
final predicate isAutoGenerated() { this.hasProvenance("generated") and not this.isManual() }
final predicate isAutoGenerated() {
this.hasProvenance(["generated", "ai-generated"]) and not this.isManual()
}
/**
* Holds if there exists a manual summary that applies to `this`.
@@ -268,7 +270,7 @@ module Public {
/**
* Holds if the neutral is auto generated.
*/
predicate isAutoGenerated() { neutralElement(this, "generated") }
predicate isAutoGenerated() { neutralElement(this, ["generated", "ai-generated"]) }
/**
* Holds if there exists a manual neutral that applies to `this`.
@@ -1202,11 +1204,11 @@ module Private {
}
private string renderProvenance(SummarizedCallable c) {
if c.isAutoGenerated() then result = "generated" else result = "manual"
if c.isManual() then result = "manual" else c.hasProvenance(result)
}
private string renderProvenanceNeutral(NeutralCallable c) {
if c.isAutoGenerated() then result = "generated" else result = "manual"
if c.isManual() then result = "manual" else c.hasProvenance(result)
}
/**

View File

@@ -0,0 +1,63 @@
/**
* Provides classes for performing local (intra-procedural) and
* global (inter-procedural) taint-tracking analyses.
*/
import TaintTrackingParameter::Public
private import TaintTrackingParameter::Private
private module AddTaintDefaults<DataFlowInternal::FullStateConfigSig Config> implements
DataFlowInternal::FullStateConfigSig {
import Config
predicate isBarrier(DataFlow::Node node) {
Config::isBarrier(node) or defaultTaintSanitizer(node)
}
predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) {
Config::isAdditionalFlowStep(node1, node2) or
defaultAdditionalTaintStep(node1, node2)
}
predicate allowImplicitRead(DataFlow::Node node, DataFlow::ContentSet c) {
Config::allowImplicitRead(node, c)
or
(
Config::isSink(node, _) or
Config::isAdditionalFlowStep(node, _) or
Config::isAdditionalFlowStep(node, _, _, _)
) and
defaultImplicitTaintRead(node, c)
}
}
/**
* Constructs a standard taint tracking computation.
*/
module Make<DataFlow::ConfigSig Config> implements DataFlow::DataFlowSig {
private module Config0 implements DataFlowInternal::FullStateConfigSig {
import DataFlowInternal::DefaultState<Config>
import Config
}
private module C implements DataFlowInternal::FullStateConfigSig {
import AddTaintDefaults<Config0>
}
import DataFlowInternal::Impl<C>
}
/**
* Constructs a taint tracking computation using flow state.
*/
module MakeWithState<DataFlow::StateConfigSig Config> implements DataFlow::DataFlowSig {
private module Config0 implements DataFlowInternal::FullStateConfigSig {
import Config
}
private module C implements DataFlowInternal::FullStateConfigSig {
import AddTaintDefaults<Config0>
}
import DataFlowInternal::Impl<C>
}

View File

@@ -2,5 +2,6 @@ import semmle.code.csharp.dataflow.internal.TaintTrackingPublic as Public
module Private {
import semmle.code.csharp.dataflow.DataFlow::DataFlow as DataFlow
import semmle.code.csharp.dataflow.internal.DataFlowImpl as DataFlowInternal
import semmle.code.csharp.dataflow.internal.TaintTrackingPrivate
}

View File

@@ -86,6 +86,8 @@ module EntityFramework {
abstract class EFSummarizedCallable extends SummarizedCallable {
bindingset[this]
EFSummarizedCallable() { any() }
override predicate hasProvenance(string provenance) { provenance = "manual" }
}
private class DbSetAddOrUpdateRequiredSummaryComponentStack extends RequiredSummaryComponentStack {

View File

@@ -83,6 +83,9 @@ class Member extends Declaration, @dotnet_member {
/** Holds if this member is declared `required`. */
predicate isRequired() { none() }
/** Holds if this member is declared `file` local. */
predicate isFile() { none() }
/**
* Holds if this member has name `name` and is defined in type `type`
* with namespace `namespace`.

View File

@@ -1,6 +1,6 @@
/**
* @name Static field written by instance method
* @description Finds instance methods that write static fields.
* @description Finds instance methods and properties that write to static fields.
* This is tricky to get right if multiple instances are being manipulated,
* and generally bad practice.
* @kind problem
@@ -14,12 +14,12 @@
import csharp
from FieldWrite fw, Field f, Callable m
from FieldWrite fw, Field f, Callable c
where
fw.getTarget() = f and
f.isStatic() and
m = fw.getEnclosingCallable() and
not m.(Member).isStatic() and
f.getDeclaringType() = m.getDeclaringType() and
m.fromSource()
select fw.(VariableAccess), "Write to static field from instance method or constructor."
c = fw.getEnclosingCallable() and
not [c.(Member), c.(Accessor).getDeclaration()].isStatic() and
f.getDeclaringType() = c.getDeclaringType() and
c.fromSource()
select fw.(VariableAccess), "Write to static field from instance method, property, or constructor."

View File

@@ -0,0 +1,25 @@
file interface I1 { }
file interface I2 { }
file class C1 : I1 { }
public class C2 { }
public class C3 : I2 { }
file interface IC { }
file class C4<T> { }
file class C5<S> : C4<S> { }
file struct S1 { }
file enum E1 { }
file delegate void D1();
file record R1 { }
file record struct RS1 { }

View File

@@ -0,0 +1,23 @@
file interface I1 { }
public interface I2 { }
file class C1 { }
file class C2 : I2 { }
file class IC { }
file class C4<T> { }
file class C5<S> : C4<S> { }
file struct S1 { }
file enum E1 { }
file delegate void D1();
file record R1 { }
file record struct RS1 { }

View File

@@ -0,0 +1,7 @@
namespace TestFileScoped;
file interface I10 { }
file class C10 { }
public class C11 : I10 { }

View File

@@ -0,0 +1,7 @@
namespace TestFileScoped;
public interface I10 { }
file class C10 { }
file class C11 : I10 { }

View File

@@ -216,6 +216,103 @@ CheckedOperators.cs:
# 55| 0: [TypeMention] short
# 55| 1: [PropertyCall] access to property Value
# 55| -1: [ParameterAccess] access to parameter n
FileScoped1.cs:
# 1| [Interface] I1
# 3| [Interface] I2
# 5| [Class] C1
#-----| 3: (Base types)
# 5| 1: [TypeMention] I1
# 7| [Class] C2
# 9| [Class] C3
#-----| 3: (Base types)
# 9| 1: [TypeMention] I2
# 11| [Interface] IC
# 13| [Class] C4<>
#-----| 1: (Type parameters)
# 13| 0: [TypeParameter] T
# 15| [Class] C5<>
#-----| 1: (Type parameters)
# 15| 0: [TypeParameter] S
#-----| 3: (Base types)
# 15| 0: [TypeMention] C4<S>
# 15| 1: [TypeMention] S
# 17| [Struct] S1
# 19| [Enum] E1
# 21| [DelegateType] D1
# 23| [RecordClass] R1
# 23| 12: [NEOperator] !=
#-----| 2: (Parameters)
# 23| 0: [Parameter] left
# 23| 1: [Parameter] right
# 23| 13: [EQOperator] ==
#-----| 2: (Parameters)
# 23| 0: [Parameter] left
# 23| 1: [Parameter] right
# 23| 14: [Property] EqualityContract
# 23| 3: [Getter] get_EqualityContract
# 25| [RecordStruct] RS1
# 25| 10: [NEOperator] !=
#-----| 2: (Parameters)
# 25| 0: [Parameter] left
# 25| 1: [Parameter] right
# 25| 11: [EQOperator] ==
#-----| 2: (Parameters)
# 25| 0: [Parameter] left
# 25| 1: [Parameter] right
FileScoped2.cs:
# 1| [Interface] I1
# 3| [Interface] I2
# 5| [Class] C1
# 7| [Class] C2
#-----| 3: (Base types)
# 7| 1: [TypeMention] I2
# 9| [Class] IC
# 11| [Class] C4<>
#-----| 1: (Type parameters)
# 11| 0: [TypeParameter] T
# 13| [Class] C5<>
#-----| 1: (Type parameters)
# 13| 0: [TypeParameter] S
#-----| 3: (Base types)
# 13| 0: [TypeMention] C4<S>
# 13| 1: [TypeMention] S
# 15| [Struct] S1
# 17| [Enum] E1
# 19| [DelegateType] D1
# 21| [RecordClass] R1
# 21| 12: [NEOperator] !=
#-----| 2: (Parameters)
# 21| 0: [Parameter] left
# 21| 1: [Parameter] right
# 21| 13: [EQOperator] ==
#-----| 2: (Parameters)
# 21| 0: [Parameter] left
# 21| 1: [Parameter] right
# 21| 14: [Property] EqualityContract
# 21| 3: [Getter] get_EqualityContract
# 23| [RecordStruct] RS1
# 23| 10: [NEOperator] !=
#-----| 2: (Parameters)
# 23| 0: [Parameter] left
# 23| 1: [Parameter] right
# 23| 11: [EQOperator] ==
#-----| 2: (Parameters)
# 23| 0: [Parameter] left
# 23| 1: [Parameter] right
FileScoped3.cs:
# 1| [NamespaceDeclaration] namespace ... { ... }
# 3| 1: [Interface] I10
# 5| 2: [Class] C10
# 7| 3: [Class] C11
#-----| 3: (Base types)
# 7| 1: [TypeMention] I10
FileScoped4.cs:
# 1| [NamespaceDeclaration] namespace ... { ... }
# 3| 1: [Interface] I10
# 5| 2: [Class] C10
# 7| 3: [Class] C11
#-----| 3: (Base types)
# 7| 1: [TypeMention] I10
GenericAttribute.cs:
# 3| [GenericAssemblyAttribute] [assembly: MyGeneric<Int32>(...)]
# 3| 0: [TypeMention] MyGenericAttribute<int>
@@ -954,14 +1051,14 @@ StaticInterfaceMembers.cs:
# 5| 0: [Parameter] other
# 5| -1: [TypeMention] T
# 5| 4: [ParameterAccess] access to parameter other
# 7| 6: [Method] Add
# 7| 6: [AddOperator] +
# 7| -1: [TypeMention] T
#-----| 2: (Parameters)
# 7| 0: [Parameter] left
# 7| -1: [TypeMention] T
# 7| 1: [Parameter] right
# 7| -1: [TypeMention] T
# 9| 7: [Method] Subtract
# 9| 7: [SubOperator] -
# 9| -1: [TypeMention] T
#-----| 2: (Parameters)
# 9| 0: [Parameter] left
@@ -969,121 +1066,265 @@ StaticInterfaceMembers.cs:
# 9| 1: [Parameter] right
# 9| -1: [TypeMention] T
# 9| 4: [ParameterAccess] access to parameter left
# 11| 8: [Method] Zero
# 11| -1: [TypeMention] T
# 11| 4: [DefaultValueExpr] default(...)
# 11| 0: [TypeAccess] access to type T
# 11| 0: [TypeMention] T
# 14| [Class] Complex
# 11| 8: [ExplicitConversionOperator] explicit conversion
# 11| -1: [TypeMention] int
#-----| 2: (Parameters)
# 11| 0: [Parameter] n
# 11| -1: [TypeMention] T
# 13| 9: [ExplicitConversionOperator] explicit conversion
# 13| -1: [TypeMention] short
#-----| 2: (Parameters)
# 13| 0: [Parameter] n
# 13| -1: [TypeMention] T
# 15| 10: [Method] Inc
# 15| -1: [TypeMention] T
#-----| 2: (Parameters)
# 15| 0: [Parameter] other
# 15| -1: [TypeMention] T
# 17| 11: [Method] Dec
# 17| -1: [TypeMention] T
#-----| 2: (Parameters)
# 17| 0: [Parameter] other
# 17| -1: [TypeMention] T
# 17| 4: [ParameterAccess] access to parameter other
# 19| 12: [Method] Add
# 19| -1: [TypeMention] T
#-----| 2: (Parameters)
# 19| 0: [Parameter] left
# 19| -1: [TypeMention] T
# 19| 1: [Parameter] right
# 19| -1: [TypeMention] T
# 21| 13: [Method] Subtract
# 21| -1: [TypeMention] T
#-----| 2: (Parameters)
# 21| 0: [Parameter] left
# 21| -1: [TypeMention] T
# 21| 1: [Parameter] right
# 21| -1: [TypeMention] T
# 21| 4: [ParameterAccess] access to parameter left
# 23| 14: [Method] Zero
# 23| -1: [TypeMention] T
# 23| 4: [DefaultValueExpr] default(...)
# 23| 0: [TypeAccess] access to type T
# 23| 0: [TypeMention] T
# 26| [Class] Complex
#-----| 3: (Base types)
# 16| 4: [Property] Real
# 16| -1: [TypeMention] double
# 16| 2: [AssignExpr] ... = ...
# 16| 0: [PropertyCall] access to property Real
# 16| 1: [DoubleLiteral] 0
# 16| 3: [Getter] get_Real
# 16| 4: [Setter] set_Real
# 28| 4: [Property] Real
# 28| -1: [TypeMention] double
# 28| 2: [AssignExpr] ... = ...
# 28| 0: [PropertyCall] access to property Real
# 28| 1: [DoubleLiteral] 0
# 28| 3: [Getter] get_Real
# 28| 4: [Setter] set_Real
#-----| 2: (Parameters)
# 16| 0: [Parameter] value
# 17| 5: [Property] Imaginary
# 17| -1: [TypeMention] double
# 17| 2: [AssignExpr] ... = ...
# 17| 0: [PropertyCall] access to property Imaginary
# 17| 1: [DoubleLiteral] 0
# 17| 3: [Getter] get_Imaginary
# 17| 4: [Setter] set_Imaginary
# 28| 0: [Parameter] value
# 29| 5: [Property] Imaginary
# 29| -1: [TypeMention] double
# 29| 2: [AssignExpr] ... = ...
# 29| 0: [PropertyCall] access to property Imaginary
# 29| 1: [DoubleLiteral] 0
# 29| 3: [Getter] get_Imaginary
# 29| 4: [Setter] set_Imaginary
#-----| 2: (Parameters)
# 17| 0: [Parameter] value
# 19| 6: [InstanceConstructor] Complex
# 19| 4: [BlockStmt] {...}
# 21| 7: [Method] Zero
# 21| -1: [TypeMention] Complex
# 21| 4: [ObjectCreation] object creation of type Complex
# 21| 0: [TypeMention] Complex
# 23| 8: [IncrementOperator] ++
# 23| -1: [TypeMention] Complex
#-----| 2: (Parameters)
# 23| 0: [Parameter] other
# 23| -1: [TypeMention] Complex
# 24| 4: [ObjectCreation] object creation of type Complex
# 24| -2: [TypeMention] Complex
# 24| -1: [ObjectInitializer] { ..., ... }
# 24| 0: [MemberInitializer] ... = ...
# 24| 0: [PropertyCall] access to property Real
# 24| 1: [AddExpr] ... + ...
# 24| 0: [PropertyCall] access to property Real
# 24| -1: [ParameterAccess] access to parameter other
# 24| 1: [DoubleLiteral] 1
# 24| 1: [MemberInitializer] ... = ...
# 24| 0: [PropertyCall] access to property Imaginary
# 24| 1: [PropertyCall] access to property Imaginary
# 24| -1: [ParameterAccess] access to parameter other
# 26| 9: [DecrementOperator] --
# 26| -1: [TypeMention] Complex
#-----| 2: (Parameters)
# 26| 0: [Parameter] other
# 26| -1: [TypeMention] Complex
# 27| 4: [ObjectCreation] object creation of type Complex
# 27| -2: [TypeMention] Complex
# 27| -1: [ObjectInitializer] { ..., ... }
# 27| 0: [MemberInitializer] ... = ...
# 27| 0: [PropertyCall] access to property Real
# 27| 1: [SubExpr] ... - ...
# 27| 0: [PropertyCall] access to property Real
# 27| -1: [ParameterAccess] access to parameter other
# 27| 1: [DoubleLiteral] 1
# 27| 1: [MemberInitializer] ... = ...
# 27| 0: [PropertyCall] access to property Imaginary
# 27| 1: [PropertyCall] access to property Imaginary
# 27| -1: [ParameterAccess] access to parameter other
# 29| 10: [Method] Add
# 29| -1: [TypeMention] Complex
#-----| 2: (Parameters)
# 29| 0: [Parameter] left
# 29| -1: [TypeMention] Complex
# 29| 1: [Parameter] right
# 29| -1: [TypeMention] Complex
# 30| 4: [ObjectCreation] object creation of type Complex
# 30| -2: [TypeMention] Complex
# 30| -1: [ObjectInitializer] { ..., ... }
# 30| 0: [MemberInitializer] ... = ...
# 30| 0: [PropertyCall] access to property Real
# 30| 1: [AddExpr] ... + ...
# 30| 0: [PropertyCall] access to property Real
# 30| -1: [ParameterAccess] access to parameter left
# 30| 1: [PropertyCall] access to property Real
# 30| -1: [ParameterAccess] access to parameter right
# 30| 1: [MemberInitializer] ... = ...
# 30| 0: [PropertyCall] access to property Imaginary
# 30| 1: [AddExpr] ... + ...
# 30| 0: [PropertyCall] access to property Imaginary
# 30| -1: [ParameterAccess] access to parameter left
# 30| 1: [PropertyCall] access to property Imaginary
# 30| -1: [ParameterAccess] access to parameter right
# 32| 11: [Method] Subtract
# 32| -1: [TypeMention] Complex
#-----| 2: (Parameters)
# 32| 0: [Parameter] left
# 32| -1: [TypeMention] Complex
# 32| 1: [Parameter] right
# 32| -1: [TypeMention] Complex
# 29| 0: [Parameter] value
# 31| 6: [InstanceConstructor] Complex
# 31| 4: [BlockStmt] {...}
# 33| 7: [Method] Zero
# 33| -1: [TypeMention] Complex
# 33| 4: [ObjectCreation] object creation of type Complex
# 33| -2: [TypeMention] Complex
# 33| -1: [ObjectInitializer] { ..., ... }
# 33| 0: [MemberInitializer] ... = ...
# 33| 0: [PropertyCall] access to property Real
# 33| 1: [SubExpr] ... - ...
# 33| 0: [PropertyCall] access to property Real
# 33| -1: [ParameterAccess] access to parameter left
# 33| 1: [PropertyCall] access to property Real
# 33| -1: [ParameterAccess] access to parameter right
# 33| 1: [MemberInitializer] ... = ...
# 33| 0: [PropertyCall] access to property Imaginary
# 33| 1: [SubExpr] ... - ...
# 33| 0: [PropertyCall] access to property Imaginary
# 33| -1: [ParameterAccess] access to parameter left
# 33| 1: [PropertyCall] access to property Imaginary
# 33| -1: [ParameterAccess] access to parameter right
# 33| 0: [TypeMention] Complex
# 35| 8: [IncrementOperator] ++
# 35| -1: [TypeMention] Complex
#-----| 2: (Parameters)
# 35| 0: [Parameter] other
# 35| -1: [TypeMention] Complex
# 36| 4: [ObjectCreation] object creation of type Complex
# 36| -2: [TypeMention] Complex
# 36| -1: [ObjectInitializer] { ..., ... }
# 36| 0: [MemberInitializer] ... = ...
# 36| 0: [PropertyCall] access to property Real
# 36| 1: [AddExpr] ... + ...
# 36| 0: [PropertyCall] access to property Real
# 36| -1: [ParameterAccess] access to parameter other
# 36| 1: [DoubleLiteral] 1
# 36| 1: [MemberInitializer] ... = ...
# 36| 0: [PropertyCall] access to property Imaginary
# 36| 1: [PropertyCall] access to property Imaginary
# 36| -1: [ParameterAccess] access to parameter other
# 38| 9: [DecrementOperator] --
# 38| -1: [TypeMention] Complex
#-----| 2: (Parameters)
# 38| 0: [Parameter] other
# 38| -1: [TypeMention] Complex
# 39| 4: [ObjectCreation] object creation of type Complex
# 39| -2: [TypeMention] Complex
# 39| -1: [ObjectInitializer] { ..., ... }
# 39| 0: [MemberInitializer] ... = ...
# 39| 0: [PropertyCall] access to property Real
# 39| 1: [SubExpr] ... - ...
# 39| 0: [PropertyCall] access to property Real
# 39| -1: [ParameterAccess] access to parameter other
# 39| 1: [DoubleLiteral] 1
# 39| 1: [MemberInitializer] ... = ...
# 39| 0: [PropertyCall] access to property Imaginary
# 39| 1: [PropertyCall] access to property Imaginary
# 39| -1: [ParameterAccess] access to parameter other
# 41| 10: [AddOperator] +
# 41| -1: [TypeMention] Complex
#-----| 2: (Parameters)
# 41| 0: [Parameter] left
# 41| -1: [TypeMention] Complex
# 41| 1: [Parameter] right
# 41| -1: [TypeMention] Complex
# 42| 4: [ObjectCreation] object creation of type Complex
# 42| -2: [TypeMention] Complex
# 42| -1: [ObjectInitializer] { ..., ... }
# 42| 0: [MemberInitializer] ... = ...
# 42| 0: [PropertyCall] access to property Real
# 42| 1: [AddExpr] ... + ...
# 42| 0: [PropertyCall] access to property Real
# 42| -1: [ParameterAccess] access to parameter left
# 42| 1: [PropertyCall] access to property Real
# 42| -1: [ParameterAccess] access to parameter right
# 42| 1: [MemberInitializer] ... = ...
# 42| 0: [PropertyCall] access to property Imaginary
# 42| 1: [AddExpr] ... + ...
# 42| 0: [PropertyCall] access to property Imaginary
# 42| -1: [ParameterAccess] access to parameter left
# 42| 1: [PropertyCall] access to property Imaginary
# 42| -1: [ParameterAccess] access to parameter right
# 44| 11: [SubOperator] -
# 44| -1: [TypeMention] Complex
#-----| 2: (Parameters)
# 44| 0: [Parameter] left
# 44| -1: [TypeMention] Complex
# 44| 1: [Parameter] right
# 44| -1: [TypeMention] Complex
# 45| 4: [ObjectCreation] object creation of type Complex
# 45| -2: [TypeMention] Complex
# 45| -1: [ObjectInitializer] { ..., ... }
# 45| 0: [MemberInitializer] ... = ...
# 45| 0: [PropertyCall] access to property Real
# 45| 1: [SubExpr] ... - ...
# 45| 0: [PropertyCall] access to property Real
# 45| -1: [ParameterAccess] access to parameter left
# 45| 1: [PropertyCall] access to property Real
# 45| -1: [ParameterAccess] access to parameter right
# 45| 1: [MemberInitializer] ... = ...
# 45| 0: [PropertyCall] access to property Imaginary
# 45| 1: [SubExpr] ... - ...
# 45| 0: [PropertyCall] access to property Imaginary
# 45| -1: [ParameterAccess] access to parameter left
# 45| 1: [PropertyCall] access to property Imaginary
# 45| -1: [ParameterAccess] access to parameter right
# 47| 12: [ExplicitConversionOperator] explicit conversion
# 47| -1: [TypeMention] int
#-----| 2: (Parameters)
# 47| 0: [Parameter] n
# 47| -1: [TypeMention] Complex
# 47| 4: [CastExpr] (...) ...
# 47| 0: [TypeAccess] access to type Int32
# 47| 0: [TypeMention] int
# 47| 1: [PropertyCall] access to property Real
# 47| -1: [ParameterAccess] access to parameter n
# 49| 13: [ExplicitConversionOperator] explicit conversion
# 49| -1: [TypeMention] short
#-----| 2: (Parameters)
# 49| 0: [Parameter] n
# 49| -1: [TypeMention] Complex
# 49| 4: [CastExpr] (...) ...
# 49| 0: [TypeAccess] access to type Int16
# 49| 0: [TypeMention] short
# 49| 1: [PropertyCall] access to property Real
# 49| -1: [ParameterAccess] access to parameter n
# 51| 14: [Method] Inc
# 51| -1: [TypeMention] Complex
# 51| -1: [TypeMention] INumber<Complex>
# 51| 1: [TypeMention] Complex
#-----| 2: (Parameters)
# 51| 0: [Parameter] other
# 51| -1: [TypeMention] Complex
# 52| 4: [ObjectCreation] object creation of type Complex
# 52| -2: [TypeMention] Complex
# 52| -1: [ObjectInitializer] { ..., ... }
# 52| 0: [MemberInitializer] ... = ...
# 52| 0: [PropertyCall] access to property Real
# 52| 1: [AddExpr] ... + ...
# 52| 0: [PropertyCall] access to property Real
# 52| -1: [ParameterAccess] access to parameter other
# 52| 1: [DoubleLiteral] 1
# 52| 1: [MemberInitializer] ... = ...
# 52| 0: [PropertyCall] access to property Imaginary
# 52| 1: [PropertyCall] access to property Imaginary
# 52| -1: [ParameterAccess] access to parameter other
# 54| 15: [Method] Dec
# 54| -1: [TypeMention] Complex
# 54| -1: [TypeMention] INumber<Complex>
# 54| 1: [TypeMention] Complex
#-----| 2: (Parameters)
# 54| 0: [Parameter] other
# 54| -1: [TypeMention] Complex
# 55| 4: [ObjectCreation] object creation of type Complex
# 55| -2: [TypeMention] Complex
# 55| -1: [ObjectInitializer] { ..., ... }
# 55| 0: [MemberInitializer] ... = ...
# 55| 0: [PropertyCall] access to property Real
# 55| 1: [SubExpr] ... - ...
# 55| 0: [PropertyCall] access to property Real
# 55| -1: [ParameterAccess] access to parameter other
# 55| 1: [DoubleLiteral] 1
# 55| 1: [MemberInitializer] ... = ...
# 55| 0: [PropertyCall] access to property Imaginary
# 55| 1: [PropertyCall] access to property Imaginary
# 55| -1: [ParameterAccess] access to parameter other
# 57| 16: [Method] Add
# 57| -1: [TypeMention] Complex
#-----| 2: (Parameters)
# 57| 0: [Parameter] left
# 57| -1: [TypeMention] Complex
# 57| 1: [Parameter] right
# 57| -1: [TypeMention] Complex
# 58| 4: [ObjectCreation] object creation of type Complex
# 58| -2: [TypeMention] Complex
# 58| -1: [ObjectInitializer] { ..., ... }
# 58| 0: [MemberInitializer] ... = ...
# 58| 0: [PropertyCall] access to property Real
# 58| 1: [AddExpr] ... + ...
# 58| 0: [PropertyCall] access to property Real
# 58| -1: [ParameterAccess] access to parameter left
# 58| 1: [PropertyCall] access to property Real
# 58| -1: [ParameterAccess] access to parameter right
# 58| 1: [MemberInitializer] ... = ...
# 58| 0: [PropertyCall] access to property Imaginary
# 58| 1: [AddExpr] ... + ...
# 58| 0: [PropertyCall] access to property Imaginary
# 58| -1: [ParameterAccess] access to parameter left
# 58| 1: [PropertyCall] access to property Imaginary
# 58| -1: [ParameterAccess] access to parameter right
# 60| 17: [Method] Subtract
# 60| -1: [TypeMention] Complex
#-----| 2: (Parameters)
# 60| 0: [Parameter] left
# 60| -1: [TypeMention] Complex
# 60| 1: [Parameter] right
# 60| -1: [TypeMention] Complex
# 61| 4: [ObjectCreation] object creation of type Complex
# 61| -2: [TypeMention] Complex
# 61| -1: [ObjectInitializer] { ..., ... }
# 61| 0: [MemberInitializer] ... = ...
# 61| 0: [PropertyCall] access to property Real
# 61| 1: [SubExpr] ... - ...
# 61| 0: [PropertyCall] access to property Real
# 61| -1: [ParameterAccess] access to parameter left
# 61| 1: [PropertyCall] access to property Real
# 61| -1: [ParameterAccess] access to parameter right
# 61| 1: [MemberInitializer] ... = ...
# 61| 0: [PropertyCall] access to property Imaginary
# 61| 1: [SubExpr] ... - ...
# 61| 0: [PropertyCall] access to property Imaginary
# 61| -1: [ParameterAccess] access to parameter left
# 61| 1: [PropertyCall] access to property Imaginary
# 61| -1: [ParameterAccess] access to parameter right
Strings.cs:
# 3| [Class] MyTestClass
# 5| 5: [Method] M1

View File

@@ -4,6 +4,18 @@ public interface INumber<T> where T : INumber<T>
static virtual T operator --(T other) => other;
static abstract T operator +(T left, T right);
static virtual T operator -(T left, T right) => left;
static abstract explicit operator int(T n);
static abstract explicit operator short(T n);
static abstract T Inc(T other);
static virtual T Dec(T other) => other;
static abstract T Add(T left, T right);
static virtual T Subtract(T left, T right) => left;
@@ -26,6 +38,22 @@ public class Complex : INumber<Complex>
public static Complex operator --(Complex other) =>
new Complex { Real = other.Real - 1.0, Imaginary = other.Imaginary };
static Complex INumber<Complex>.operator +(Complex left, Complex right) =>
new Complex { Real = left.Real + right.Real, Imaginary = left.Imaginary + right.Imaginary };
static Complex INumber<Complex>.operator -(Complex left, Complex right) =>
new Complex { Real = left.Real - right.Real, Imaginary = left.Imaginary - right.Imaginary };
public static explicit operator int(Complex n) => (int)n.Real;
static explicit INumber<Complex>.operator short(Complex n) => (short)n.Real;
static Complex INumber<Complex>.Inc(Complex other) =>
new Complex { Real = other.Real + 1.0, Imaginary = other.Imaginary };
static Complex INumber<Complex>.Dec(Complex other) =>
new Complex { Real = other.Real - 1.0, Imaginary = other.Imaginary };
public static Complex Add(Complex left, Complex right) =>
new Complex { Real = left.Real + right.Real, Imaginary = left.Imaginary + right.Imaginary };

View File

@@ -0,0 +1,116 @@
typemodifiers
| FileScoped1.cs:1:16:1:17 | I1 | file |
| FileScoped1.cs:3:16:3:17 | I2 | file |
| FileScoped1.cs:5:12:5:13 | C1 | file |
| FileScoped1.cs:7:14:7:15 | C2 | public |
| FileScoped1.cs:9:14:9:15 | C3 | public |
| FileScoped1.cs:11:16:11:17 | IC | file |
| FileScoped1.cs:13:12:13:16 | C4<> | file |
| FileScoped1.cs:13:12:13:16 | C4<S> | file |
| FileScoped1.cs:15:12:15:16 | C5<> | file |
| FileScoped1.cs:17:13:17:14 | S1 | file |
| FileScoped1.cs:17:13:17:14 | S1 | sealed |
| FileScoped1.cs:19:11:19:12 | E1 | file |
| FileScoped1.cs:19:11:19:12 | E1 | sealed |
| FileScoped1.cs:21:20:21:21 | D1 | file |
| FileScoped1.cs:21:20:21:21 | D1 | sealed |
| FileScoped1.cs:23:1:23:18 | R1 | file |
| FileScoped1.cs:23:1:23:18 | R1 | record |
| FileScoped1.cs:25:1:25:26 | RS1 | file |
| FileScoped1.cs:25:1:25:26 | RS1 | record |
| FileScoped1.cs:25:1:25:26 | RS1 | sealed |
| FileScoped2.cs:1:16:1:17 | I1 | file |
| FileScoped2.cs:3:18:3:19 | I2 | public |
| FileScoped2.cs:5:12:5:13 | C1 | file |
| FileScoped2.cs:7:12:7:13 | C2 | file |
| FileScoped2.cs:9:12:9:13 | IC | file |
| FileScoped2.cs:11:12:11:16 | C4<> | file |
| FileScoped2.cs:11:12:11:16 | C4<S> | file |
| FileScoped2.cs:13:12:13:16 | C5<> | file |
| FileScoped2.cs:15:13:15:14 | S1 | file |
| FileScoped2.cs:15:13:15:14 | S1 | sealed |
| FileScoped2.cs:17:11:17:12 | E1 | file |
| FileScoped2.cs:17:11:17:12 | E1 | sealed |
| FileScoped2.cs:19:20:19:21 | D1 | file |
| FileScoped2.cs:19:20:19:21 | D1 | sealed |
| FileScoped2.cs:21:1:21:18 | R1 | file |
| FileScoped2.cs:21:1:21:18 | R1 | record |
| FileScoped2.cs:23:1:23:26 | RS1 | file |
| FileScoped2.cs:23:1:23:26 | RS1 | record |
| FileScoped2.cs:23:1:23:26 | RS1 | sealed |
| FileScoped3.cs:3:16:3:18 | I10 | file |
| FileScoped3.cs:5:12:5:14 | C10 | file |
| FileScoped3.cs:7:14:7:16 | C11 | public |
| FileScoped4.cs:3:18:3:20 | I10 | public |
| FileScoped4.cs:5:12:5:14 | C10 | file |
| FileScoped4.cs:7:12:7:14 | C11 | file |
qualifiedtypes
| FileScoped1.cs:1:16:1:17 | I1 | I1 |
| FileScoped1.cs:3:16:3:17 | I2 | I2 |
| FileScoped1.cs:5:12:5:13 | C1 | C1 |
| FileScoped1.cs:7:14:7:15 | C2 | C2 |
| FileScoped1.cs:9:14:9:15 | C3 | C3 |
| FileScoped1.cs:11:16:11:17 | IC | IC |
| FileScoped1.cs:13:12:13:16 | C4<> | C4<> |
| FileScoped1.cs:13:12:13:16 | C4<S> | C4<S> |
| FileScoped1.cs:15:12:15:16 | C5<> | C5<> |
| FileScoped1.cs:17:13:17:14 | S1 | S1 |
| FileScoped1.cs:19:11:19:12 | E1 | E1 |
| FileScoped1.cs:21:20:21:21 | D1 | D1 |
| FileScoped1.cs:23:1:23:18 | R1 | R1 |
| FileScoped1.cs:25:1:25:26 | RS1 | RS1 |
| FileScoped2.cs:1:16:1:17 | I1 | I1 |
| FileScoped2.cs:3:18:3:19 | I2 | I2 |
| FileScoped2.cs:5:12:5:13 | C1 | C1 |
| FileScoped2.cs:7:12:7:13 | C2 | C2 |
| FileScoped2.cs:9:12:9:13 | IC | IC |
| FileScoped2.cs:11:12:11:16 | C4<> | C4<> |
| FileScoped2.cs:11:12:11:16 | C4<S> | C4<S> |
| FileScoped2.cs:13:12:13:16 | C5<> | C5<> |
| FileScoped2.cs:15:13:15:14 | S1 | S1 |
| FileScoped2.cs:17:11:17:12 | E1 | E1 |
| FileScoped2.cs:19:20:19:21 | D1 | D1 |
| FileScoped2.cs:21:1:21:18 | R1 | R1 |
| FileScoped2.cs:23:1:23:26 | RS1 | RS1 |
| FileScoped3.cs:3:16:3:18 | I10 | TestFileScoped.I10 |
| FileScoped3.cs:5:12:5:14 | C10 | TestFileScoped.C10 |
| FileScoped3.cs:7:14:7:16 | C11 | TestFileScoped.C11 |
| FileScoped4.cs:3:18:3:20 | I10 | TestFileScoped.I10 |
| FileScoped4.cs:5:12:5:14 | C10 | TestFileScoped.C10 |
| FileScoped4.cs:7:12:7:14 | C11 | TestFileScoped.C11 |
filetypes
| FileScoped1.cs:1:16:1:17 | I1 |
| FileScoped1.cs:3:16:3:17 | I2 |
| FileScoped1.cs:5:12:5:13 | C1 |
| FileScoped1.cs:11:16:11:17 | IC |
| FileScoped1.cs:13:12:13:16 | C4<> |
| FileScoped1.cs:13:12:13:16 | C4<S> |
| FileScoped1.cs:15:12:15:16 | C5<> |
| FileScoped1.cs:17:13:17:14 | S1 |
| FileScoped1.cs:19:11:19:12 | E1 |
| FileScoped1.cs:21:20:21:21 | D1 |
| FileScoped1.cs:23:1:23:18 | R1 |
| FileScoped1.cs:25:1:25:26 | RS1 |
| FileScoped2.cs:1:16:1:17 | I1 |
| FileScoped2.cs:5:12:5:13 | C1 |
| FileScoped2.cs:7:12:7:13 | C2 |
| FileScoped2.cs:9:12:9:13 | IC |
| FileScoped2.cs:11:12:11:16 | C4<> |
| FileScoped2.cs:11:12:11:16 | C4<S> |
| FileScoped2.cs:13:12:13:16 | C5<> |
| FileScoped2.cs:15:13:15:14 | S1 |
| FileScoped2.cs:17:11:17:12 | E1 |
| FileScoped2.cs:19:20:19:21 | D1 |
| FileScoped2.cs:21:1:21:18 | R1 |
| FileScoped2.cs:23:1:23:26 | RS1 |
| FileScoped3.cs:3:16:3:18 | I10 |
| FileScoped3.cs:5:12:5:14 | C10 |
| FileScoped4.cs:5:12:5:14 | C10 |
| FileScoped4.cs:7:12:7:14 | C11 |
internaltypes
publictypes
| FileScoped1.cs:7:14:7:15 | C2 |
| FileScoped1.cs:9:14:9:15 | C3 |
| FileScoped2.cs:3:18:3:19 | I2 |
| FileScoped3.cs:7:14:7:16 | C11 |
| FileScoped4.cs:3:18:3:20 | I10 |

View File

@@ -0,0 +1,42 @@
import csharp
private import semmle.code.csharp.commons.QualifiedName
private predicate isInteresting(Type t) {
(
t instanceof Class or
t instanceof Interface or
t instanceof Struct or
t instanceof Enum or
t instanceof DelegateType or
t instanceof RecordType
) and
t.getFile().getStem().matches("FileScoped%")
}
query predicate typemodifiers(Type t, string modifier) {
isInteresting(t) and
t.(Modifiable).hasModifier(modifier)
}
query predicate qualifiedtypes(Type t, string qualifiedName) {
isInteresting(t) and
exists(string qualifier, string name |
t.hasQualifiedName(qualifier, name) and
qualifiedName = getQualifiedName(qualifier, name)
)
}
query predicate filetypes(Type t) {
isInteresting(t) and
t.isFile()
}
query predicate internaltypes(Type t) {
isInteresting(t) and
t.isInternal()
}
query predicate publictypes(Type t) {
isInteresting(t) and
t.isPublic()
}

View File

@@ -5,16 +5,55 @@ interfacemembers
| INumber<> | StaticInterfaceMembers.cs:5:31:5:32 | -- | public |
| INumber<> | StaticInterfaceMembers.cs:5:31:5:32 | -- | static |
| INumber<> | StaticInterfaceMembers.cs:5:31:5:32 | -- | virtual |
| INumber<> | StaticInterfaceMembers.cs:7:23:7:25 | Add | abstract |
| INumber<> | StaticInterfaceMembers.cs:7:23:7:25 | Add | public |
| INumber<> | StaticInterfaceMembers.cs:7:23:7:25 | Add | static |
| INumber<> | StaticInterfaceMembers.cs:9:22:9:29 | Subtract | public |
| INumber<> | StaticInterfaceMembers.cs:9:22:9:29 | Subtract | static |
| INumber<> | StaticInterfaceMembers.cs:9:22:9:29 | Subtract | virtual |
| INumber<> | StaticInterfaceMembers.cs:11:14:11:17 | Zero | public |
| INumber<> | StaticInterfaceMembers.cs:11:14:11:17 | Zero | static |
| INumber<> | StaticInterfaceMembers.cs:7:32:7:32 | + | abstract |
| INumber<> | StaticInterfaceMembers.cs:7:32:7:32 | + | public |
| INumber<> | StaticInterfaceMembers.cs:7:32:7:32 | + | static |
| INumber<> | StaticInterfaceMembers.cs:9:31:9:31 | - | public |
| INumber<> | StaticInterfaceMembers.cs:9:31:9:31 | - | static |
| INumber<> | StaticInterfaceMembers.cs:9:31:9:31 | - | virtual |
| INumber<> | StaticInterfaceMembers.cs:11:30:11:37 | explicit conversion | abstract |
| INumber<> | StaticInterfaceMembers.cs:11:30:11:37 | explicit conversion | public |
| INumber<> | StaticInterfaceMembers.cs:11:30:11:37 | explicit conversion | static |
| INumber<> | StaticInterfaceMembers.cs:13:30:13:37 | explicit conversion | abstract |
| INumber<> | StaticInterfaceMembers.cs:13:30:13:37 | explicit conversion | public |
| INumber<> | StaticInterfaceMembers.cs:13:30:13:37 | explicit conversion | static |
| INumber<> | StaticInterfaceMembers.cs:15:23:15:25 | Inc | abstract |
| INumber<> | StaticInterfaceMembers.cs:15:23:15:25 | Inc | public |
| INumber<> | StaticInterfaceMembers.cs:15:23:15:25 | Inc | static |
| INumber<> | StaticInterfaceMembers.cs:17:22:17:24 | Dec | public |
| INumber<> | StaticInterfaceMembers.cs:17:22:17:24 | Dec | static |
| INumber<> | StaticInterfaceMembers.cs:17:22:17:24 | Dec | virtual |
| INumber<> | StaticInterfaceMembers.cs:19:23:19:25 | Add | abstract |
| INumber<> | StaticInterfaceMembers.cs:19:23:19:25 | Add | public |
| INumber<> | StaticInterfaceMembers.cs:19:23:19:25 | Add | static |
| INumber<> | StaticInterfaceMembers.cs:21:22:21:29 | Subtract | public |
| INumber<> | StaticInterfaceMembers.cs:21:22:21:29 | Subtract | static |
| INumber<> | StaticInterfaceMembers.cs:21:22:21:29 | Subtract | virtual |
| INumber<> | StaticInterfaceMembers.cs:23:14:23:17 | Zero | public |
| INumber<> | StaticInterfaceMembers.cs:23:14:23:17 | Zero | static |
implements
| StaticInterfaceMembers.cs:23:36:23:37 | ++ | StaticInterfaceMembers.cs:3:32:3:33 | ++ |
| StaticInterfaceMembers.cs:26:36:26:37 | -- | StaticInterfaceMembers.cs:5:31:5:32 | -- |
| StaticInterfaceMembers.cs:29:27:29:29 | Add | StaticInterfaceMembers.cs:7:23:7:25 | Add |
| StaticInterfaceMembers.cs:32:27:32:34 | Subtract | StaticInterfaceMembers.cs:9:22:9:29 | Subtract |
| StaticInterfaceMembers.cs:35:36:35:37 | ++ | StaticInterfaceMembers.cs:3:32:3:33 | ++ |
| StaticInterfaceMembers.cs:38:36:38:37 | -- | StaticInterfaceMembers.cs:5:31:5:32 | -- |
| StaticInterfaceMembers.cs:41:46:41:46 | + | StaticInterfaceMembers.cs:7:32:7:32 | + |
| StaticInterfaceMembers.cs:44:46:44:46 | - | StaticInterfaceMembers.cs:9:31:9:31 | - |
| StaticInterfaceMembers.cs:47:28:47:35 | explicit conversion | StaticInterfaceMembers.cs:11:30:11:37 | explicit conversion |
| StaticInterfaceMembers.cs:49:38:49:45 | explicit conversion | StaticInterfaceMembers.cs:13:30:13:37 | explicit conversion |
| StaticInterfaceMembers.cs:51:37:51:39 | Inc | StaticInterfaceMembers.cs:15:23:15:25 | Inc |
| StaticInterfaceMembers.cs:54:37:54:39 | Dec | StaticInterfaceMembers.cs:17:22:17:24 | Dec |
| StaticInterfaceMembers.cs:57:27:57:29 | Add | StaticInterfaceMembers.cs:19:23:19:25 | Add |
| StaticInterfaceMembers.cs:60:27:60:34 | Subtract | StaticInterfaceMembers.cs:21:22:21:29 | Subtract |
publicmembers
| StaticInterfaceMembers.cs:28:19:28:22 | Real |
| StaticInterfaceMembers.cs:29:19:29:27 | Imaginary |
| StaticInterfaceMembers.cs:31:12:31:18 | Complex |
| StaticInterfaceMembers.cs:33:27:33:30 | Zero |
| StaticInterfaceMembers.cs:35:36:35:37 | ++ |
| StaticInterfaceMembers.cs:38:36:38:37 | -- |
| StaticInterfaceMembers.cs:41:46:41:46 | + |
| StaticInterfaceMembers.cs:44:46:44:46 | - |
| StaticInterfaceMembers.cs:47:28:47:35 | explicit conversion |
| StaticInterfaceMembers.cs:49:38:49:45 | explicit conversion |
| StaticInterfaceMembers.cs:51:37:51:39 | Inc |
| StaticInterfaceMembers.cs:54:37:54:39 | Dec |
| StaticInterfaceMembers.cs:57:27:57:29 | Add |
| StaticInterfaceMembers.cs:60:27:60:34 | Subtract |

View File

@@ -16,3 +16,9 @@ query predicate implements(Overridable o, Virtualizable v) {
v.isStatic() and
v.getAnImplementor() = o
}
query predicate publicmembers(Member m) {
m.getFile().getStem() = "StaticInterfaceMembers" and
m.getDeclaringType().getName() = "Complex" and
m.isPublic()
}

View File

@@ -26,4 +26,21 @@ class StaticFields
staticField = 0; // BAD
instanceField = 0; // OK
}
static object backingField;
static object StaticProp
{
get
{
return backingField ?? (backingField = new object()); // OK
}
}
object Prop
{
get
{
return backingField ?? (backingField = new object()); // BAD
}
}
}

View File

@@ -1,2 +1,3 @@
| StaticFieldWrittenByInstance.cs:15:9:15:19 | access to field staticField | Write to static field from instance method or constructor. |
| StaticFieldWrittenByInstance.cs:26:9:26:19 | access to field staticField | Write to static field from instance method or constructor. |
| StaticFieldWrittenByInstance.cs:15:9:15:19 | access to field staticField | Write to static field from instance method, property, or constructor. |
| StaticFieldWrittenByInstance.cs:26:9:26:19 | access to field staticField | Write to static field from instance method, property, or constructor. |
| StaticFieldWrittenByInstance.cs:43:37:43:48 | access to field backingField | Write to static field from instance method, property, or constructor. |

View File

@@ -14,7 +14,7 @@ CodeQL for Visual Studio Code provides an easy way to run queries from the large
With these queries, or your own custom queries, you can analyze databases generated from source code to find errors and security vulnerabilities.
The Results view shows the flow of data through the results of path queries, which is essential for triaging security results.
The CodeQL extension also adds a **CodeQL** sidebar view to VS Code. This contains a list of databases, and an overview of the queries that you have run in the current session.
The CodeQL extension also adds a **CodeQL** sidebar view to VS Code. This contains a list of local CodeQL databases, an overview of the queries that you have run in the current session, and a variant analysis view for large scale analysis.
The extension provides standard `IntelliSense <https://code.visualstudio.com/docs/editor/intellisense>`__
features for query files (extension ``.ql``) and library files (extension ``.qll``) that you open in the Visual Studio Code editor.
@@ -36,4 +36,5 @@ Further reading
-------------------
- ":doc:`Setting up CodeQL in Visual Studio Code <setting-up-codeql-in-visual-studio-code>`"
- ":doc:`Analyzing your projects <analyzing-your-projects>`"
- ":doc:`Analyzing your projects <analyzing-your-projects>`"
- ":doc:`Running CodeQL queries at scale with multi-repository variant analysis <running-codeql-queries-at-scale-with-mrva>`"

View File

@@ -5,7 +5,7 @@
Analyzing your projects
=================================================
You can run queries on CodeQL databases and view the results in Visual Studio Code.
You can run queries on CodeQL databases and view the results in Visual Studio Code. This article explains how to get a CodeQL database and analyze it on your local machine. For information on running analysis at scale across many CodeQL databases, see ":ref:`Running CodeQL queries at scale with multi-repository variant analysis <running-codeql-queries-at-scale-with-mrva>`."
Choosing a database
------------------------
@@ -24,8 +24,8 @@ To analyze a project, you need to add a :ref:`CodeQL database <codeql-database>`
#. Once you've chosen a database, it is displayed in the Databases view. To see the menu options for interacting with a database, right-click an entry in the list. You can select multiple databases using **Ctrl/Cmd+click**.
Obtaining a local database
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Importing a local database
~~~~~~~~~~~~~~~~~~~~~~~~~~
If you have a CodeQL database saved locally, as an unarchived folder or as a ZIP file, you can add it to Visual Studio Code. There are several ways to obtain a local CodeQL database.
@@ -37,6 +37,9 @@ If you have a CodeQL database saved locally, as an unarchived folder or as a ZIP
For more information about running query tests, see "`Testing custom queries <https://docs.github.com/en/code-security/codeql-cli/using-the-codeql-cli/testing-custom-queries>`__" in the CodeQL CLI help.
Downloading a database from GitHub
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. include:: ../reusables/download-github-database.rst
Running a query

View File

@@ -25,6 +25,8 @@ Editing settings
3. Edit a setting. The new settings are saved automatically.
Alternatively, you can edit the settings in JSON format by opening the command palette and selecting **Preferences: Open User Settings (JSON)**.
Choosing a version of the CodeQL CLI
--------------------------------------
@@ -55,8 +57,8 @@ By default, items in the query history view are retained for 30 days. You can se
.. _configuring-settings-for-running-queries:
Configuring settings for running queries
-----------------------------------------
Configuring settings for running queries locally
------------------------------------------------
There are a number of settings for **Running Queries**. If your queries run too slowly and time out frequently, you may want to increase the memory.
@@ -64,8 +66,49 @@ There are a number of settings for **Running Queries**. If your queries run too
To save query server logs in a custom location, edit the **Running Queries: Custom Log Directory** setting. If you use a custom log directory, the extension saves the logs permanently, instead of deleting them automatically after each workspace session. This is useful if you want to investigate these logs to improve the performance of your queries.
Configuring settings for testing queries
-----------------------------------------
Configuring settings for variant analysis
------------------------------------------
You can define or edit lists of GitHub repositories for variant analysis, and change to a different controller repository using the **Variant analysis** settings.
For information on the purpose and requirements for a controller repository, see ":ref:`Setting up a controller repository for variant analysis <controller-repository>`."
You can also edit the items shown in the Variant Analysis Repositories panel by editing a file in your Visual Studio Code workspace called ``databases.json``. This file contains a JSON representation of all the items displayed in the panel. To open your ``databases.json`` file in an editor window, click the **{ }** icon in the top right of the Variant Analysis Repositories panel. You can then see a structured representation of the repos, orgs and lists in your panel. For example:
.. code-block:: json
{
"version": 1,
"databases": {
"variantAnalysis": {
"repositoryLists": [
{
"name": "My favorite JavaScript repos",
"repositories": [
"facebook/react",
"babel/babel",
"angular/angular"
]
}
],
"owners": [
"microsoft"
],
"repositories": [
"apache/hadoop"
]
}
},
"selected": {
"kind": "variantAnalysisSystemDefinedList",
"listName": "top_10"
}
}
You can change the items shown in the panel or add new items by directly editing this file.
Configuring settings for testing queries locally
------------------------------------------------
To increase the number of threads used for testing queries, you can update the **Running Tests > Number Of Threads** setting.

View File

@@ -3,7 +3,7 @@
.. _exploring-data-flow-with-path-queries:
Exploring data flow with path queries
=================================================
=====================================
You can run CodeQL queries in VS Code to help you track the flow of data through a program, highlighting areas that are potential security vulnerabilities.
@@ -20,8 +20,8 @@ You can also modify the existing queries to model data flow more precisely for t
To ensure that your path query uses the correct format and metadata, follow the instructions in ":ref:`Creating path queries <creating-path-queries>`."
This topic also contains detailed information about how to define new sources and sinks, as well as templates and examples of how to extend the CodeQL libraries to suit your analysis.
Running path queries in VS Code
-----------------------------------
Running path queries in VS Code locally
---------------------------------------
#. Open a path query in the editor.
#. Right-click in the query window and select **CodeQL: Run Query on Selected Database**. (Alternatively, run the command from the Command Palette.)
@@ -30,6 +30,8 @@ Running path queries in VS Code
#. Click each step to jump to it in the source code and investigate the problem further.
#. To navigate the results from your keyboard, you can bind shortcuts to the **CodeQL: Navigate Up/Down/Left/Right in Result Viewer** commands.
When you are ready to run a path query at scale, you can use the Variant Analysis Repositories panel to run the query against up to 1,000 repositories on GitHub.com. For information on running analysis at scale across many CodeQL databases, see ":ref:`Running CodeQL queries at scale with multi-repository variant analysis <running-codeql-queries-at-scale-with-mrva>`."
Further reading
-----------------

View File

@@ -27,6 +27,11 @@ The CodeQL extension for Visual Studio Code adds rich language support for CodeQ
VS Code to help you track the flow of data through a program, highlighting
areas that are potential security vulnerabilities.
- :doc:`Running CodeQL queries at scale with multi-repository variant analysis
<running-codeql-queries-at-scale-with-mrva>`: You can run queries against groups
of repositories on GitHub.com and view results in Visual Studio Code as each analysis
finishes.
- :doc:`Testing CodeQL queries in Visual Studio Code
<testing-codeql-queries-in-visual-studio-code>`: You can run unit tests for
CodeQL queries using the Visual Studio Code extension.
@@ -40,7 +45,13 @@ The CodeQL extension for Visual Studio Code adds rich language support for CodeQ
- :doc:`Troubleshooting CodeQL for Visual Studio Code
<troubleshooting-codeql-for-visual-studio-code>`: You can use the detailed
information written to the extension's log files if you need to troubleshoot problems.
information written to the extension's log files if you need to troubleshoot problems with
analysis of local CodeQL databases.
- :doc:`Troubleshooting variant analysis
<troubleshooting-variant-analysis>`: You can use the detailed
information written to workflow log files in your controller repository if you need to
troubleshoot problems with analysis of CodeQL databases stored on GitHub.com.
- :doc:`About telemetry in CodeQL for Visual Studio Code <about-telemetry-in-codeql-for-visual-studio-code>`: If you specifically opt in to permit GitHub to do so, GitHub will collect usage data and metrics for the purposes of helping the core developers to improve the CodeQL extension for VS Code.
@@ -53,8 +64,10 @@ The CodeQL extension for Visual Studio Code adds rich language support for CodeQ
analyzing-your-projects
exploring-the-structure-of-your-source-code
exploring-data-flow-with-path-queries
running-codeql-queries-at-scale-with-mrva
testing-codeql-queries-in-visual-studio-code
working-with-codeql-packs-in-visual-studio-code
customizing-settings
troubleshooting-codeql-for-visual-studio-code
troubleshooting-variant-analysis
about-telemetry-in-codeql-for-visual-studio-code

View File

@@ -0,0 +1,163 @@
:tocdepth: 1
.. _running-codeql-queries-at-scale-with-mrva:
Running CodeQL queries at scale with multi-repository variant analysis
======================================================================
.. include:: ../reusables/beta-note-mrva.rst
About multi-repository variant analysis
---------------------------------------
When you write a query to find variants of a security vulnerability and finish testing it locally, the next step is to run it on a large group of repositories. Multi-repository variant analysis (variant analysis) makes it easy run a query on up to 1000 repositories without leaving Visual Studio Code.
The core functionality of the CodeQL extension helps you write queries and run them locally against a CodeQL database. In contrast, variant analysis allows you to send your CodeQL query to GitHub.com to be tested against a list of repositories.
When you run variant analysis against a list of repositories, your query is run against each repository that has a CodeQL database available to analyze. GitHub creates and stores the latest CodeQL database for the default branch of thousands of public repositories, including every repository that runs code scanning using CodeQL.
If you want to run variant analysis on your repositories, you need to enable code scanning using CodeQL on GitHub.com before adding your repository to a list for analysis (either default setup, or advanced setup using the CodeQL action). For information about enabling code scanning using CodeQL, see "`Configuring code scanning automatically <https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning-for-a-repository#configuring-code-scanning-automatically>`__."
.. _controller-repository:
Setting a controller repository for variant analysis
----------------------------------------------------
When you run variant analysis, the analysis is run entirely using GitHub Actions. You don't need to create any workflows, but you must specify which GitHub repository the CodeQL extension should use as the "controller repository." Controller repositories can be empty, but they must have at least one commit. The ``GITHUB_TOKEN`` must also have "Read and write permissions" to run workflows in that repository. For more information, see "`Managing GitHub Actions settings for a repository <https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/enabling-features-for-your-repository/managing-github-actions-settings-for-a-repository#setting-the-permissions-of-the-github_token-for-your-repository>`__."
.. pull-quote::
Note
- The controller repository visibility can be "public" if you plan to analyze public repositories. The variant analysis will be free.
- The controller repository visibility must be "private" if you need to analyze any private or internal repositories. Any actions minutes used by variant analysis, above the free limit, will be charged to the repository owner. For more information about free minutes and billing, see "`About billing for GitHub Actions <https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions>`__."
You must define a controller repository before you can run your first variant analysis.
.. image:: ../images/codeql-for-visual-studio-code/controller-repository.png
:width: 350
:alt: Screenshot of the CodeQL extension in Visual Studio Code. The "Variant Analysis Repositories" section is expanded and the "Set up controller repository" button is highlighted with a dark orange outline.
#. In Visual Studio Code, click **QL** in the left sidebar to display the CodeQL extension.
#. Expand **Variant Analysis Repositories** and click **Set up controller repository** to display a field for the controller repository.
#. Type the owner and name of the repository on GitHub.com that you want to use as your controller repository and press the **Enter** key.
#. If you are prompted to authenticate with GitHub, follow the instructions and sign into your personal or organization account. When you have finished following the process, a prompt from GitHub Authentication may ask for permission to open a URI in Visual Studio Code, click **Open**.
The name of the controller repository is saved in your settings for the CodeQL extension. For information on how to edit the controller repository, see ":ref:`Customizing settings <customizing-settings>`."
Running a query at scale using variant analysis
-----------------------------------------------
#. Expand the **Variant Analysis Repositories** section, to show the default lists which include a selection of 10, 100, and 1,000 public repositories on GitHub.com for the language that you are analyzing.
#. Select which GitHub repository or repositories you want to run your query against. Click a row to highlight it, and then click **Select** to select that repository, organization, or list of repositories. If you want to add a new repository, organization, or list, use the options in the header panel. For information, see ":ref:`Creating custom lists of repositories <custom-lists>`", later in this article.
.. image:: ../images/codeql-for-visual-studio-code/variant-analysis-repo-lists.png
:width: 350
:alt: Screenshot of the CodeQL extension in Visual Studio Code. The "Variant Analysis Repositories" section is expanded. The "Top 10 repositories" item has a checkmark to show that it is currently selected for analysis. The user has clicked on the row for a single repository "octo-org/octo-repo" and it is highlighted blue. The "Select" button for that row is highlighted with a dark orange highlight.
#. Open the query you want to run, right-click in the query file, and select **CodeQL: Run Variant Analysis** to start variant analysis.
The CodeQL extension builds a CodeQL pack with your library and any library dependencies. The CodeQL pack and your selected repository list are posted to an API endpoint on GitHub.com which triggers a GitHub Actions dynamic workflow in your controller repository. The workflow spins up multiple parallel jobs to execute the CodeQL query against the repositories in the list, optimizing query execution. As each repository is analyzed, the results are processed and displayed in a Variant Analysis Results view in Visual Studio Code.
.. pull-quote::
Note
If you need to cancel the variant analysis run for any reason, click **Stop query** in the Variant Analysis Results view.
Exploring your results
----------------------
When you run variant analysis, as soon as a workflow to run your analysis on GitHub is running, a Variant Analysis Results view opens to display the results as they are ready. You can use this view to monitor progress, see any errors, and access the workflow logs in your controller repository.
.. image:: ../images/codeql-for-visual-studio-code/variant-analysis-results-view.png
:alt: Screenshot of the "Variant Analysis Results" view showing a partially complete run. Analysis of ``angular/angular`` is still running but all other results are displayed. ``facebook/create-react-app`` has three results for this query.
When your variant analysis run is scheduled, the results view automatically opens. Initially the view shows a list of every repository that was scheduled for analysis. As each repository is analyzed, the view is updated to show a summary of the number of results. To view the detailed results for a repository (including results paths), click the repository name.
For each repository, you can see:
- Number of results found by the query
- Visibility of the repository
- Whether analysis is still running (black, moving circle) or finished (green checkmark)
- Number of stars the repository has on GitHub
- When the repository was last updated
To see the results for a repository:
.. image:: ../images/codeql-for-visual-studio-code/variant-analysis-result.png
:alt: Screenshot of an example result in the "Variant Analysis Results" view. The result has blue links to the source files in GitHub so you can go straight to the repository to fix the problem. There is also a "Show paths" link because this is a data flow query.
#. Click the repository name to show a summary of each result.
#. Explore the information available for each result using links to the source files in GitHub.com and, for data flow queries, the **Show paths** link. For more information, see ":ref:`Exploring data flow with path queries <exploring-data-flow-with-path-queries>`."
Exporting your results
----------------------
You can export your results for further analysis or to discuss them with collaborators. In the results view, click **Export results** to export the results to a secret gist on GitHub.com or to a markdown file in your workspace.
.. _custom-lists:
Creating custom lists of repositories
-------------------------------------
After you have defined a controller repository, the Variant Analysis Repositories panel shows the lists of repositories that you can select for variant analysis. You can use the options in the panel header to add a specific repository or organization to the panel, and to create and manage custom lists of repositories for variant analysis.
.. pull-quote::
Note
CodeQL analysis always requires a CodeQL database to run queries against. When you run variant analysis against a list of repositories, your query will only be executed against the repositories that currently have a CodeQL database available to download. The best way to make a repository available for variant analysis is to enable code scanning with CodeQL. For information about enabling code scanning using CodeQL, see "`Configuring code scanning automatically <https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning-for-a-repository#configuring-code-scanning-automatically>`__."
Selecting a single GitHub repository or organization for analysis
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#. In the Variant Analysis Repositories panel, click the **+**, add new database, icon.
#. From the dropdown menu, click **From a GitHub repository** or **All repositories of GitHub org or owner**.
#. Type the identifier of the repository or organization that you want to use into the field.
.. image:: ../images/codeql-for-visual-studio-code/variant-analysis-repo-and-org.png
:width: 350
:alt: Screenshot of the CodeQL extension in Visual Studio Code. The "Variant Analysis Repositories" section is expanded to show a repository (octo-org/octo-repo) and an organization (octo-org). These items are highlighted with a dark orange outline.
Creating a custom list of repositories
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#. In the Variant Analysis Repositories panel, click the |add-list| icon.
#. Type a name for the new list and press **Enter**.
#. Select your list in the panel and then click **+**, to add a repository to your list.
You can manage and edit your custom lists by right-clicking on either the list name, or a repository name within the list, and selecting an option from the context menu.
The custom lists are stored in your workspace in a ``databases.json`` file. If you want to edit this file directly, you can open it by clicking **{ }** in the panel header.
For example, if you want to continue analyzing a set of repositories that had results for your query, click **Copy repository list** in the Variant Analysis Results view to add a list of only the repositories that have results to the clipboard as JSON. For example:
.. code-block:: json
{
"name": "new-repo-list",
"repositories": [
"facebook/create-react-app"
]
}
You can then insert the ``new-repo-list`` of repositories into your list of custom repository lists for easy access in the Variant Analysis Repositories panel.
Troubleshooting variant analysis
--------------------------------
For information on troubleshooting variant analysis, see
":ref:`Troubleshooting variant analysis <troubleshooting-variant-analysis>`."
.. |add-list| image:: ../images/codeql-for-visual-studio-code/variant-analysis-add-list.png
:height: 2ex

View File

@@ -5,7 +5,7 @@
Testing CodeQL queries in Visual Studio Code
============================================
You can run unit tests for CodeQL queries using the Visual Studio Code extension.
You can run unit tests for CodeQL queries using the Visual Studio Code extension. When you are sure that your query finds the results you want to identify, you can use variant analysis to run it at scale. For information on running analysis at scale across many CodeQL databases, see ":ref:`Running CodeQL queries at scale with multi-repository variant analysis <running-codeql-queries-at-scale-with-mrva>`."
About testing queries in VS Code
---------------------------------

View File

@@ -5,7 +5,12 @@
Troubleshooting CodeQL for Visual Studio Code
=============================================
You can use the detailed information written to the extension's log files if you need to troubleshoot problems.
This article explains how to debug problems with the analysis of CodeQL databases that are stored on your local
machine. For information on troubleshooting variant analysis, which runs on GitHub.com, see
":ref:`Troubleshooting variant analysis <troubleshooting-variant-analysis>`."
You can use the detailed information written to the extension's log files if you need to troubleshoot problems
analyzing CodeQL databases that are stored locally.
About the log files
--------------------

View File

@@ -0,0 +1,30 @@
:tocdepth: 1
.. _troubleshooting-variant-analysis:
Troubleshooting variant analysis
================================
.. include:: ../reusables/beta-note-mrva.rst
This article explains how to debug problems with variant analysis, that is, analysis run using GitHub Actions
and not locally on your machine.
For information on troubleshooting local analysis, see
":ref:`Troubleshooting CodeQL for Visual Studio Code <troubleshooting-codeql-for-visual-studio-code>`."
When you run variant analysis, there are two key places where errors and warnings are displayed:
#. **Visual Studio Code errors** - any problems with creating a CodeQL pack and sending the analysis to GitHub.com are reported as Visual Studio Code errors in the bottom right corner of the application. The problem information is also available in the **Problems** view.
#. **Variant Analysis Results** - any problems with the variant analysis run are reported in this view.
Variant analysis warning: Problem with controller repository
------------------------------------------------------------
If there are problems with the variant analysis run, you will see a warning banner at the top of the Variant Analysis Results tab. For example:
.. image:: ../images/codeql-for-visual-studio-code/variant-analysis-results-warning.png
:width: 600
:alt: Screenshot of the "Variant Analysis Results" view showing a warning banner with the text "warning: Problem with controller repository" and "Publicly visible controller repository can't be used to analyze private repositories. 1 private repository was not analyzed." The "Show logs" button is highlighted with a dark orange outline.
In this example, the user ran variant analysis on a custom list of two repositories. One of the repositories was a private repository and could not be analyzed because they had a public controller repository. Only the public repository was analyzed. To analyze both repositories, this user needs to edit their settings and update the controller repository to a private repository. For information on how to edit the controller repository, see ":ref:`Customizing settings <customizing-settings>`."

View File

@@ -31,16 +31,16 @@ following snippet demonstrates.
This query selects the API graph node corresponding to the ``re`` module. This node represents the fact that the ``re`` module has been imported rather than a specific location in the program where the import happens. Therefore, there will be at most one result per project, and it will not have a useful location, so you'll have to click `Show 1 non-source result` in order to see it.
To find where the ``re`` module is referenced in the program, you can use the ``getAUse`` method. The following query selects all references to the ``re`` module in the current database.
To find where the ``re`` module is referenced in the program, you can use the ``getAValueReachableFromSource`` method. The following query selects all references to the ``re`` module in the current database.
.. code-block:: ql
import python
import semmle.python.ApiGraphs
select API::moduleImport("re").getAUse()
select API::moduleImport("re").getAValueReachableFromSource()
Note that the ``getAUse`` method accounts for local flow, so that ``my_re_compile``
Note that the ``getAValueReachableFromSource`` method accounts for local flow, so that ``my_re_compile``
in the following snippet is
correctly recognized as a reference to the ``re.compile`` function.
@@ -53,7 +53,7 @@ correctly recognized as a reference to the ``re.compile`` function.
r = my_re_compile(".*")
If you only require immediate uses, without taking local flow into account, then you can use
the ``getAnImmediateUse`` method instead.
the ``asSource`` method instead.
Note that the given module name *must not* contain any dots. Thus, something like
``API::moduleImport("flask.views")`` will not do what you expect. Instead, this should be decomposed
@@ -71,7 +71,7 @@ the above ``re.compile`` example, you can now find references to ``re.compile``.
import python
import semmle.python.ApiGraphs
select API::moduleImport("re").getMember("compile").getAUse()
select API::moduleImport("re").getMember("compile").getAValueReachableFromSource()
In addition to ``getMember``, you can use the ``getUnknownMember`` method to find references to API
components where the name is not known statically. You can use the ``getAMember`` method to
@@ -89,12 +89,36 @@ where the return value of ``re.compile`` is used:
import python
import semmle.python.ApiGraphs
select API::moduleImport("re").getMember("compile").getReturn().getAUse()
select API::moduleImport("re").getMember("compile").getReturn().getAValueReachableFromSource()
Note that this includes all uses of the result of ``re.compile``, including those reachable via
local flow. To get just the *calls* to ``re.compile``, you can use ``getAnImmediateUse`` instead of
``getAUse``. As this is a common occurrence, you can use ``getACall`` instead of
``getReturn`` followed by ``getAnImmediateUse``.
local flow. To get just the *calls* to ``re.compile``, you can use ``asSource`` instead of
``getAValueReachableFromSource``. As this is a common occurrence, you can, instead of
``getReturn`` followed by ``asSource``, simply use ``getACall``. This will result in an
``API::CallNode``, which deserves a small description of its own.
``API::CallNode``s are not ``API::Node``s. Instead they are ``DataFlow::Node``s with some convenience
predicates that allows you to recover ``API::Node``s for the return value as well as for arguments
to the call. This enables you to constrain the call in various ways using the API graph. The following
snippet finds all calls to ``re.compile`` where the ``pattern`` argument comes from parsing a command
line argument using the ``argparse`` library.
.. code-block:: ql
import python
import semmle.python.ApiGraphs
from API::CallNode call
where
call = API::moduleImport("re").getMember("compile").getACall() and
call.getParameter(0, "pattern") =
API::moduleImport("argparse")
.getMember("ArgumentParser")
.getReturn()
.getMember("parse_args")
.getMember(_)
select call
Note that the API graph does not distinguish between class instantiations and function calls. As far
as it's concerned, both are simply places where an API graph node is called.
@@ -122,7 +146,7 @@ all subclasses of ``View``, you must explicitly include the subclasses of ``Meth
API::moduleImport("flask").getMember("views").getMember(["View", "MethodView"]).getASubclass*()
}
select viewClass().getAUse()
select viewClass().getAValueReachableFromSource()
Note the use of the set literal ``["View", "MethodView"]`` to match both classes simultaneously.

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

Some files were not shown because too many files have changed in this diff Show More