Merge remote-tracking branch 'upstream/main' into merge-main

This commit is contained in:
Jeroen Ketema
2023-03-06 15:20:39 +01:00
923 changed files with 77754 additions and 172545 deletions

View File

@@ -0,0 +1,9 @@
---
category: majorAnalysis
---
* The main data flow and taint tracking APIs have been changed. The old APIs
remain in place for now and translate to the new through a
backwards-compatible wrapper. If multiple configurations are in scope
simultaneously, then this may affect results slightly. The new API is quite
similar to the old, but makes use of a configuration module instead of a
configuration class.

View File

@@ -68,7 +68,9 @@ class Declaration extends Locatable, @declaration {
* Holds if this declaration has the fully-qualified name `qualifiedName`.
* See `getQualifiedName`.
*/
predicate hasQualifiedName(string qualifiedName) { this.getQualifiedName() = qualifiedName }
deprecated predicate hasQualifiedName(string qualifiedName) {
this.getQualifiedName() = qualifiedName
}
/**
* Holds if this declaration has a fully-qualified name with a name-space

View File

@@ -24,5 +24,6 @@ import cpp
* global (inter-procedural) data flow analyses.
*/
module DataFlow {
import semmle.code.cpp.dataflow.internal.DataFlowImpl
import semmle.code.cpp.dataflow.internal.DataFlow
import semmle.code.cpp.dataflow.internal.DataFlowImpl1
}

View File

@@ -23,5 +23,6 @@ import semmle.code.cpp.dataflow.DataFlow2
* global (inter-procedural) taint-tracking analyses.
*/
module TaintTracking {
import semmle.code.cpp.dataflow.internal.tainttracking1.TaintTracking
import semmle.code.cpp.dataflow.internal.tainttracking1.TaintTrackingImpl
}

View File

@@ -0,0 +1,245 @@
/**
* Provides an implementation of global (interprocedural) data flow. This file
* re-exports the local (intraprocedural) data flow analysis from
* `DataFlowImplSpecific::Public` and adds a global analysis, mainly exposed
* through the `Make` and `MakeWithState` modules.
*/
private import DataFlowImplCommon
private import DataFlowImplSpecific::Private
import DataFlowImplSpecific::Public
import DataFlowImplCommonPublic
private import DataFlowImpl
/** An input configuration for data flow. */
signature module ConfigSig {
/**
* Holds if `source` is a relevant data flow source.
*/
predicate isSource(Node source);
/**
* Holds if `sink` is a relevant data flow sink.
*/
predicate isSink(Node sink);
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
default predicate isBarrier(Node node) { none() }
/** Holds if data flow into `node` is prohibited. */
default predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
default predicate isBarrierOut(Node node) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
default predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
default predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
default int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
default FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
default predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
default predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (as it is in a `path-problem` query).
*/
default predicate includeHiddenNodes() { none() }
}
/** An input configuration for data flow using flow state. */
signature module StateConfigSig {
bindingset[this]
class FlowState;
/**
* Holds if `source` is a relevant data flow source with the given initial
* `state`.
*/
predicate isSource(Node source, FlowState state);
/**
* Holds if `sink` is a relevant data flow sink accepting `state`.
*/
predicate isSink(Node sink, FlowState state);
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
default predicate isBarrier(Node node) { none() }
/**
* Holds if data flow through `node` is prohibited when the flow state is
* `state`.
*/
predicate isBarrier(Node node, FlowState state);
/** Holds if data flow into `node` is prohibited. */
default predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
default predicate isBarrierOut(Node node) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
default predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
* This step is only applicable in `state1` and updates the flow state to `state2`.
*/
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2);
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
default predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
default int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
default FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
default predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
default predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (as it is in a `path-problem` query).
*/
default predicate includeHiddenNodes() { none() }
}
/**
* Gets the exploration limit for `hasPartialFlow` and `hasPartialFlowRev`
* measured in approximate number of interprocedural steps.
*/
signature int explorationLimitSig();
/**
* The output of a data flow computation.
*/
signature module DataFlowSig {
/**
* A `Node` augmented with a call context (except for sinks) and an access path.
* Only those `PathNode`s that are reachable from a source, and which can reach a sink, are generated.
*/
class PathNode;
/**
* Holds if data can flow from `source` to `sink`.
*
* The corresponding paths are generated from the end-points and the graph
* included in the module `PathGraph`.
*/
predicate hasFlowPath(PathNode source, PathNode sink);
/**
* Holds if data can flow from `source` to `sink`.
*/
predicate hasFlow(Node source, Node sink);
/**
* Holds if data can flow from some source to `sink`.
*/
predicate hasFlowTo(Node sink);
/**
* Holds if data can flow from some source to `sink`.
*/
predicate hasFlowToExpr(DataFlowExpr sink);
}
/**
* Constructs a standard data flow computation.
*/
module Make<ConfigSig Config> implements DataFlowSig {
private module C implements FullStateConfigSig {
import DefaultState<Config>
import Config
}
import Impl<C>
}
/**
* Constructs a data flow computation using flow state.
*/
module MakeWithState<StateConfigSig Config> implements DataFlowSig {
private module C implements FullStateConfigSig {
import Config
}
import Impl<C>
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,396 @@
/**
* DEPRECATED: Use `Make` and `MakeWithState` instead.
*
* Provides a `Configuration` class backwards-compatible interface to the data
* flow library.
*/
private import DataFlowImplCommon
private import DataFlowImplSpecific::Private
import DataFlowImplSpecific::Public
private import DataFlowImpl
import DataFlowImplCommonPublic
import FlowStateString
/**
* A configuration of interprocedural data flow analysis. This defines
* sources, sinks, and any other configurable aspect of the analysis. Each
* use of the global data flow library must define its own unique extension
* of this abstract class. To create a configuration, extend this class with
* a subclass whose characteristic predicate is a unique singleton string.
* For example, write
*
* ```ql
* class MyAnalysisConfiguration extends DataFlow::Configuration {
* MyAnalysisConfiguration() { this = "MyAnalysisConfiguration" }
* // Override `isSource` and `isSink`.
* // Optionally override `isBarrier`.
* // Optionally override `isAdditionalFlowStep`.
* }
* ```
* Conceptually, this defines a graph where the nodes are `DataFlow::Node`s and
* the edges are those data-flow steps that preserve the value of the node
* along with any additional edges defined by `isAdditionalFlowStep`.
* Specifying nodes in `isBarrier` will remove those nodes from the graph, and
* specifying nodes in `isBarrierIn` and/or `isBarrierOut` will remove in-going
* and/or out-going edges from those nodes, respectively.
*
* Then, to query whether there is flow between some `source` and `sink`,
* write
*
* ```ql
* exists(MyAnalysisConfiguration cfg | cfg.hasFlow(source, sink))
* ```
*
* Multiple configurations can coexist, but two classes extending
* `DataFlow::Configuration` should never depend on each other. One of them
* should instead depend on a `DataFlow2::Configuration`, a
* `DataFlow3::Configuration`, or a `DataFlow4::Configuration`.
*/
abstract class Configuration extends string {
bindingset[this]
Configuration() { any() }
/**
* Holds if `source` is a relevant data flow source.
*/
predicate isSource(Node source) { none() }
/**
* Holds if `source` is a relevant data flow source with the given initial
* `state`.
*/
predicate isSource(Node source, FlowState state) { none() }
/**
* Holds if `sink` is a relevant data flow sink.
*/
predicate isSink(Node sink) { none() }
/**
* Holds if `sink` is a relevant data flow sink accepting `state`.
*/
predicate isSink(Node sink, FlowState state) { none() }
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
predicate isBarrier(Node node) { none() }
/**
* Holds if data flow through `node` is prohibited when the flow state is
* `state`.
*/
predicate isBarrier(Node node, FlowState state) { none() }
/** Holds if data flow into `node` is prohibited. */
predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
predicate isBarrierOut(Node node) { none() }
/**
* DEPRECATED: Use `isBarrier` and `BarrierGuard` module instead.
*
* Holds if data flow through nodes guarded by `guard` is prohibited.
*/
deprecated predicate isBarrierGuard(BarrierGuard guard) { none() }
/**
* DEPRECATED: Use `isBarrier` and `BarrierGuard` module instead.
*
* Holds if data flow through nodes guarded by `guard` is prohibited when
* the flow state is `state`
*/
deprecated predicate isBarrierGuard(BarrierGuard guard, FlowState state) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
* This step is only applicable in `state1` and updates the flow state to `state2`.
*/
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2) {
none()
}
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if data may flow from `source` to `sink` for this configuration.
*/
predicate hasFlow(Node source, Node sink) { hasFlow(source, sink, this) }
/**
* Holds if data may flow from `source` to `sink` for this configuration.
*
* The corresponding paths are generated from the end-points and the graph
* included in the module `PathGraph`.
*/
predicate hasFlowPath(PathNode source, PathNode sink) { hasFlowPath(source, sink, this) }
/**
* Holds if data may flow from some source to `sink` for this configuration.
*/
predicate hasFlowTo(Node sink) { hasFlowTo(sink, this) }
/**
* Holds if data may flow from some source to `sink` for this configuration.
*/
predicate hasFlowToExpr(DataFlowExpr sink) { this.hasFlowTo(exprNode(sink)) }
/**
* DEPRECATED: Use `FlowExploration<explorationLimit>` instead.
*
* Gets the exploration limit for `hasPartialFlow` and `hasPartialFlowRev`
* measured in approximate number of interprocedural steps.
*/
deprecated int explorationLimit() { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (for example in a `path-problem` query).
*/
predicate includeHiddenNodes() { none() }
}
/**
* This class exists to prevent mutual recursion between the user-overridden
* member predicates of `Configuration` and the rest of the data-flow library.
* Good performance cannot be guaranteed in the presence of such recursion, so
* it should be replaced by using more than one copy of the data flow library.
*/
abstract private class ConfigurationRecursionPrevention extends Configuration {
bindingset[this]
ConfigurationRecursionPrevention() { any() }
override predicate hasFlow(Node source, Node sink) {
strictcount(Node n | this.isSource(n)) < 0
or
strictcount(Node n | this.isSource(n, _)) < 0
or
strictcount(Node n | this.isSink(n)) < 0
or
strictcount(Node n | this.isSink(n, _)) < 0
or
strictcount(Node n1, Node n2 | this.isAdditionalFlowStep(n1, n2)) < 0
or
strictcount(Node n1, Node n2 | this.isAdditionalFlowStep(n1, _, n2, _)) < 0
or
super.hasFlow(source, sink)
}
}
/** A bridge class to access the deprecated `isBarrierGuard`. */
private class BarrierGuardGuardedNodeBridge extends Unit {
abstract predicate guardedNode(Node n, Configuration config);
abstract predicate guardedNode(Node n, FlowState state, Configuration config);
}
private class BarrierGuardGuardedNode extends BarrierGuardGuardedNodeBridge {
deprecated override predicate guardedNode(Node n, Configuration config) {
exists(BarrierGuard g |
config.isBarrierGuard(g) and
n = g.getAGuardedNode()
)
}
deprecated override predicate guardedNode(Node n, FlowState state, Configuration config) {
exists(BarrierGuard g |
config.isBarrierGuard(g, state) and
n = g.getAGuardedNode()
)
}
}
private FlowState relevantState(Configuration config) {
config.isSource(_, result) or
config.isSink(_, result) or
config.isBarrier(_, result) or
config.isAdditionalFlowStep(_, result, _, _) or
config.isAdditionalFlowStep(_, _, _, result)
}
private newtype TConfigState =
TMkConfigState(Configuration config, FlowState state) {
state = relevantState(config) or state instanceof FlowStateEmpty
}
private Configuration getConfig(TConfigState state) { state = TMkConfigState(result, _) }
private FlowState getState(TConfigState state) { state = TMkConfigState(_, result) }
private predicate singleConfiguration() { 1 = strictcount(Configuration c) }
private module Config implements FullStateConfigSig {
class FlowState = TConfigState;
predicate isSource(Node source, FlowState state) {
getConfig(state).isSource(source, getState(state))
or
getConfig(state).isSource(source) and getState(state) instanceof FlowStateEmpty
}
predicate isSink(Node sink, FlowState state) {
getConfig(state).isSink(sink, getState(state))
or
getConfig(state).isSink(sink) and getState(state) instanceof FlowStateEmpty
}
predicate isBarrier(Node node) { none() }
predicate isBarrier(Node node, FlowState state) {
getConfig(state).isBarrier(node, getState(state)) or
getConfig(state).isBarrier(node) or
any(BarrierGuardGuardedNodeBridge b).guardedNode(node, getState(state), getConfig(state)) or
any(BarrierGuardGuardedNodeBridge b).guardedNode(node, getConfig(state))
}
predicate isBarrierIn(Node node) { any(Configuration config).isBarrierIn(node) }
predicate isBarrierOut(Node node) { any(Configuration config).isBarrierOut(node) }
predicate isAdditionalFlowStep(Node node1, Node node2) {
singleConfiguration() and
any(Configuration config).isAdditionalFlowStep(node1, node2)
}
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2) {
getConfig(state1).isAdditionalFlowStep(node1, getState(state1), node2, getState(state2)) and
getConfig(state2) = getConfig(state1)
or
not singleConfiguration() and
getConfig(state1).isAdditionalFlowStep(node1, node2) and
state2 = state1
}
predicate allowImplicitRead(Node node, ContentSet c) {
any(Configuration config).allowImplicitRead(node, c)
}
int fieldFlowBranchLimit() { result = min(any(Configuration config).fieldFlowBranchLimit()) }
FlowFeature getAFeature() { result = any(Configuration config).getAFeature() }
predicate sourceGrouping(Node source, string sourceGroup) {
any(Configuration config).sourceGrouping(source, sourceGroup)
}
predicate sinkGrouping(Node sink, string sinkGroup) {
any(Configuration config).sinkGrouping(sink, sinkGroup)
}
predicate includeHiddenNodes() { any(Configuration config).includeHiddenNodes() }
}
private import Impl<Config> as I
import I
/**
* A `Node` augmented with a call context (except for sinks), an access path, and a configuration.
* Only those `PathNode`s that are reachable from a source, and which can reach a sink, are generated.
*/
class PathNode instanceof I::PathNode {
/** Gets a textual representation of this element. */
final string toString() { result = super.toString() }
/**
* Gets a textual representation of this element, including a textual
* representation of the call context.
*/
final string toStringWithContext() { result = super.toStringWithContext() }
/**
* Holds if this element is at the specified location.
* The location spans column `startcolumn` of line `startline` to
* column `endcolumn` of line `endline` in file `filepath`.
* For more information, see
* [Locations](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
*/
final predicate hasLocationInfo(
string filepath, int startline, int startcolumn, int endline, int endcolumn
) {
super.hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
}
/** Gets the underlying `Node`. */
final Node getNode() { result = super.getNode() }
/** Gets the `FlowState` of this node. */
final FlowState getState() { result = getState(super.getState()) }
/** Gets the associated configuration. */
final Configuration getConfiguration() { result = getConfig(super.getState()) }
/** Gets a successor of this node, if any. */
final PathNode getASuccessor() { result = super.getASuccessor() }
/** Holds if this node is a source. */
final predicate isSource() { super.isSource() }
/** Holds if this node is a grouping of source nodes. */
final predicate isSourceGroup(string group) { super.isSourceGroup(group) }
/** Holds if this node is a grouping of sink nodes. */
final predicate isSinkGroup(string group) { super.isSinkGroup(group) }
}
private predicate hasFlow(Node source, Node sink, Configuration config) {
exists(PathNode source0, PathNode sink0 |
hasFlowPath(source0, sink0, config) and
source0.getNode() = source and
sink0.getNode() = sink
)
}
private predicate hasFlowPath(PathNode source, PathNode sink, Configuration config) {
hasFlowPath(source, sink) and source.getConfiguration() = config
}
private predicate hasFlowTo(Node sink, Configuration config) { hasFlow(_, sink, config) }
predicate flowsTo = hasFlow/3;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -3,15 +3,18 @@ private import DataFlowImplSpecific::Public
import Cached
module DataFlowImplCommonPublic {
/** A state value to track during data flow. */
class FlowState = string;
/** Provides `FlowState = string`. */
module FlowStateString {
/** A state value to track during data flow. */
class FlowState = string;
/**
* The default state, which is used when the state is unspecified for a source
* or a sink.
*/
class FlowStateEmpty extends FlowState {
FlowStateEmpty() { this = "" }
/**
* The default state, which is used when the state is unspecified for a source
* or a sink.
*/
class FlowStateEmpty extends FlowState {
FlowStateEmpty() { this = "" }
}
}
private newtype TFlowFeature =

View File

@@ -0,0 +1,63 @@
/**
* Provides classes for performing local (intra-procedural) and
* global (inter-procedural) taint-tracking analyses.
*/
import TaintTrackingParameter::Public
private import TaintTrackingParameter::Private
private module AddTaintDefaults<DataFlowInternal::FullStateConfigSig Config> implements
DataFlowInternal::FullStateConfigSig {
import Config
predicate isBarrier(DataFlow::Node node) {
Config::isBarrier(node) or defaultTaintSanitizer(node)
}
predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) {
Config::isAdditionalFlowStep(node1, node2) or
defaultAdditionalTaintStep(node1, node2)
}
predicate allowImplicitRead(DataFlow::Node node, DataFlow::ContentSet c) {
Config::allowImplicitRead(node, c)
or
(
Config::isSink(node, _) or
Config::isAdditionalFlowStep(node, _) or
Config::isAdditionalFlowStep(node, _, _, _)
) and
defaultImplicitTaintRead(node, c)
}
}
/**
* Constructs a standard taint tracking computation.
*/
module Make<DataFlow::ConfigSig Config> implements DataFlow::DataFlowSig {
private module Config0 implements DataFlowInternal::FullStateConfigSig {
import DataFlowInternal::DefaultState<Config>
import Config
}
private module C implements DataFlowInternal::FullStateConfigSig {
import AddTaintDefaults<Config0>
}
import DataFlowInternal::Impl<C>
}
/**
* Constructs a taint tracking computation using flow state.
*/
module MakeWithState<DataFlow::StateConfigSig Config> implements DataFlow::DataFlowSig {
private module Config0 implements DataFlowInternal::FullStateConfigSig {
import Config
}
private module C implements DataFlowInternal::FullStateConfigSig {
import AddTaintDefaults<Config0>
}
import DataFlowInternal::Impl<C>
}

View File

@@ -2,4 +2,5 @@ import semmle.code.cpp.dataflow.internal.TaintTrackingUtil as Public
module Private {
import semmle.code.cpp.dataflow.DataFlow::DataFlow as DataFlow
import semmle.code.cpp.dataflow.internal.DataFlowImpl as DataFlowInternal
}

View File

@@ -26,5 +26,6 @@ import cpp
* global (inter-procedural) data flow analyses.
*/
module DataFlow {
import semmle.code.cpp.ir.dataflow.internal.DataFlowImpl
import semmle.code.cpp.ir.dataflow.internal.DataFlow
import semmle.code.cpp.ir.dataflow.internal.DataFlowImpl1
}

View File

@@ -23,5 +23,6 @@ import semmle.code.cpp.dataflow.new.DataFlow2
* global (inter-procedural) taint-tracking analyses.
*/
module TaintTracking {
import semmle.code.cpp.ir.dataflow.internal.tainttracking1.TaintTracking
import semmle.code.cpp.ir.dataflow.internal.tainttracking1.TaintTrackingImpl
}

View File

@@ -22,5 +22,6 @@
import cpp
module DataFlow {
import semmle.code.cpp.ir.dataflow.internal.DataFlowImpl
import semmle.code.cpp.ir.dataflow.internal.DataFlow
import semmle.code.cpp.ir.dataflow.internal.DataFlowImpl1
}

View File

@@ -19,5 +19,6 @@ import semmle.code.cpp.ir.dataflow.DataFlow
import semmle.code.cpp.ir.dataflow.DataFlow2
module TaintTracking {
import semmle.code.cpp.ir.dataflow.internal.tainttracking1.TaintTracking
import semmle.code.cpp.ir.dataflow.internal.tainttracking1.TaintTrackingImpl
}

View File

@@ -0,0 +1,245 @@
/**
* Provides an implementation of global (interprocedural) data flow. This file
* re-exports the local (intraprocedural) data flow analysis from
* `DataFlowImplSpecific::Public` and adds a global analysis, mainly exposed
* through the `Make` and `MakeWithState` modules.
*/
private import DataFlowImplCommon
private import DataFlowImplSpecific::Private
import DataFlowImplSpecific::Public
import DataFlowImplCommonPublic
private import DataFlowImpl
/** An input configuration for data flow. */
signature module ConfigSig {
/**
* Holds if `source` is a relevant data flow source.
*/
predicate isSource(Node source);
/**
* Holds if `sink` is a relevant data flow sink.
*/
predicate isSink(Node sink);
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
default predicate isBarrier(Node node) { none() }
/** Holds if data flow into `node` is prohibited. */
default predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
default predicate isBarrierOut(Node node) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
default predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
default predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
default int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
default FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
default predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
default predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (as it is in a `path-problem` query).
*/
default predicate includeHiddenNodes() { none() }
}
/** An input configuration for data flow using flow state. */
signature module StateConfigSig {
bindingset[this]
class FlowState;
/**
* Holds if `source` is a relevant data flow source with the given initial
* `state`.
*/
predicate isSource(Node source, FlowState state);
/**
* Holds if `sink` is a relevant data flow sink accepting `state`.
*/
predicate isSink(Node sink, FlowState state);
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
default predicate isBarrier(Node node) { none() }
/**
* Holds if data flow through `node` is prohibited when the flow state is
* `state`.
*/
predicate isBarrier(Node node, FlowState state);
/** Holds if data flow into `node` is prohibited. */
default predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
default predicate isBarrierOut(Node node) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
default predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
* This step is only applicable in `state1` and updates the flow state to `state2`.
*/
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2);
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
default predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
default int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
default FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
default predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
default predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (as it is in a `path-problem` query).
*/
default predicate includeHiddenNodes() { none() }
}
/**
* Gets the exploration limit for `hasPartialFlow` and `hasPartialFlowRev`
* measured in approximate number of interprocedural steps.
*/
signature int explorationLimitSig();
/**
* The output of a data flow computation.
*/
signature module DataFlowSig {
/**
* A `Node` augmented with a call context (except for sinks) and an access path.
* Only those `PathNode`s that are reachable from a source, and which can reach a sink, are generated.
*/
class PathNode;
/**
* Holds if data can flow from `source` to `sink`.
*
* The corresponding paths are generated from the end-points and the graph
* included in the module `PathGraph`.
*/
predicate hasFlowPath(PathNode source, PathNode sink);
/**
* Holds if data can flow from `source` to `sink`.
*/
predicate hasFlow(Node source, Node sink);
/**
* Holds if data can flow from some source to `sink`.
*/
predicate hasFlowTo(Node sink);
/**
* Holds if data can flow from some source to `sink`.
*/
predicate hasFlowToExpr(DataFlowExpr sink);
}
/**
* Constructs a standard data flow computation.
*/
module Make<ConfigSig Config> implements DataFlowSig {
private module C implements FullStateConfigSig {
import DefaultState<Config>
import Config
}
import Impl<C>
}
/**
* Constructs a data flow computation using flow state.
*/
module MakeWithState<StateConfigSig Config> implements DataFlowSig {
private module C implements FullStateConfigSig {
import Config
}
import Impl<C>
}

View File

@@ -0,0 +1,396 @@
/**
* DEPRECATED: Use `Make` and `MakeWithState` instead.
*
* Provides a `Configuration` class backwards-compatible interface to the data
* flow library.
*/
private import DataFlowImplCommon
private import DataFlowImplSpecific::Private
import DataFlowImplSpecific::Public
private import DataFlowImpl
import DataFlowImplCommonPublic
import FlowStateString
/**
* A configuration of interprocedural data flow analysis. This defines
* sources, sinks, and any other configurable aspect of the analysis. Each
* use of the global data flow library must define its own unique extension
* of this abstract class. To create a configuration, extend this class with
* a subclass whose characteristic predicate is a unique singleton string.
* For example, write
*
* ```ql
* class MyAnalysisConfiguration extends DataFlow::Configuration {
* MyAnalysisConfiguration() { this = "MyAnalysisConfiguration" }
* // Override `isSource` and `isSink`.
* // Optionally override `isBarrier`.
* // Optionally override `isAdditionalFlowStep`.
* }
* ```
* Conceptually, this defines a graph where the nodes are `DataFlow::Node`s and
* the edges are those data-flow steps that preserve the value of the node
* along with any additional edges defined by `isAdditionalFlowStep`.
* Specifying nodes in `isBarrier` will remove those nodes from the graph, and
* specifying nodes in `isBarrierIn` and/or `isBarrierOut` will remove in-going
* and/or out-going edges from those nodes, respectively.
*
* Then, to query whether there is flow between some `source` and `sink`,
* write
*
* ```ql
* exists(MyAnalysisConfiguration cfg | cfg.hasFlow(source, sink))
* ```
*
* Multiple configurations can coexist, but two classes extending
* `DataFlow::Configuration` should never depend on each other. One of them
* should instead depend on a `DataFlow2::Configuration`, a
* `DataFlow3::Configuration`, or a `DataFlow4::Configuration`.
*/
abstract class Configuration extends string {
bindingset[this]
Configuration() { any() }
/**
* Holds if `source` is a relevant data flow source.
*/
predicate isSource(Node source) { none() }
/**
* Holds if `source` is a relevant data flow source with the given initial
* `state`.
*/
predicate isSource(Node source, FlowState state) { none() }
/**
* Holds if `sink` is a relevant data flow sink.
*/
predicate isSink(Node sink) { none() }
/**
* Holds if `sink` is a relevant data flow sink accepting `state`.
*/
predicate isSink(Node sink, FlowState state) { none() }
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
predicate isBarrier(Node node) { none() }
/**
* Holds if data flow through `node` is prohibited when the flow state is
* `state`.
*/
predicate isBarrier(Node node, FlowState state) { none() }
/** Holds if data flow into `node` is prohibited. */
predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
predicate isBarrierOut(Node node) { none() }
/**
* DEPRECATED: Use `isBarrier` and `BarrierGuard` module instead.
*
* Holds if data flow through nodes guarded by `guard` is prohibited.
*/
deprecated predicate isBarrierGuard(BarrierGuard guard) { none() }
/**
* DEPRECATED: Use `isBarrier` and `BarrierGuard` module instead.
*
* Holds if data flow through nodes guarded by `guard` is prohibited when
* the flow state is `state`
*/
deprecated predicate isBarrierGuard(BarrierGuard guard, FlowState state) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
* This step is only applicable in `state1` and updates the flow state to `state2`.
*/
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2) {
none()
}
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if data may flow from `source` to `sink` for this configuration.
*/
predicate hasFlow(Node source, Node sink) { hasFlow(source, sink, this) }
/**
* Holds if data may flow from `source` to `sink` for this configuration.
*
* The corresponding paths are generated from the end-points and the graph
* included in the module `PathGraph`.
*/
predicate hasFlowPath(PathNode source, PathNode sink) { hasFlowPath(source, sink, this) }
/**
* Holds if data may flow from some source to `sink` for this configuration.
*/
predicate hasFlowTo(Node sink) { hasFlowTo(sink, this) }
/**
* Holds if data may flow from some source to `sink` for this configuration.
*/
predicate hasFlowToExpr(DataFlowExpr sink) { this.hasFlowTo(exprNode(sink)) }
/**
* DEPRECATED: Use `FlowExploration<explorationLimit>` instead.
*
* Gets the exploration limit for `hasPartialFlow` and `hasPartialFlowRev`
* measured in approximate number of interprocedural steps.
*/
deprecated int explorationLimit() { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (for example in a `path-problem` query).
*/
predicate includeHiddenNodes() { none() }
}
/**
* This class exists to prevent mutual recursion between the user-overridden
* member predicates of `Configuration` and the rest of the data-flow library.
* Good performance cannot be guaranteed in the presence of such recursion, so
* it should be replaced by using more than one copy of the data flow library.
*/
abstract private class ConfigurationRecursionPrevention extends Configuration {
bindingset[this]
ConfigurationRecursionPrevention() { any() }
override predicate hasFlow(Node source, Node sink) {
strictcount(Node n | this.isSource(n)) < 0
or
strictcount(Node n | this.isSource(n, _)) < 0
or
strictcount(Node n | this.isSink(n)) < 0
or
strictcount(Node n | this.isSink(n, _)) < 0
or
strictcount(Node n1, Node n2 | this.isAdditionalFlowStep(n1, n2)) < 0
or
strictcount(Node n1, Node n2 | this.isAdditionalFlowStep(n1, _, n2, _)) < 0
or
super.hasFlow(source, sink)
}
}
/** A bridge class to access the deprecated `isBarrierGuard`. */
private class BarrierGuardGuardedNodeBridge extends Unit {
abstract predicate guardedNode(Node n, Configuration config);
abstract predicate guardedNode(Node n, FlowState state, Configuration config);
}
private class BarrierGuardGuardedNode extends BarrierGuardGuardedNodeBridge {
deprecated override predicate guardedNode(Node n, Configuration config) {
exists(BarrierGuard g |
config.isBarrierGuard(g) and
n = g.getAGuardedNode()
)
}
deprecated override predicate guardedNode(Node n, FlowState state, Configuration config) {
exists(BarrierGuard g |
config.isBarrierGuard(g, state) and
n = g.getAGuardedNode()
)
}
}
private FlowState relevantState(Configuration config) {
config.isSource(_, result) or
config.isSink(_, result) or
config.isBarrier(_, result) or
config.isAdditionalFlowStep(_, result, _, _) or
config.isAdditionalFlowStep(_, _, _, result)
}
private newtype TConfigState =
TMkConfigState(Configuration config, FlowState state) {
state = relevantState(config) or state instanceof FlowStateEmpty
}
private Configuration getConfig(TConfigState state) { state = TMkConfigState(result, _) }
private FlowState getState(TConfigState state) { state = TMkConfigState(_, result) }
private predicate singleConfiguration() { 1 = strictcount(Configuration c) }
private module Config implements FullStateConfigSig {
class FlowState = TConfigState;
predicate isSource(Node source, FlowState state) {
getConfig(state).isSource(source, getState(state))
or
getConfig(state).isSource(source) and getState(state) instanceof FlowStateEmpty
}
predicate isSink(Node sink, FlowState state) {
getConfig(state).isSink(sink, getState(state))
or
getConfig(state).isSink(sink) and getState(state) instanceof FlowStateEmpty
}
predicate isBarrier(Node node) { none() }
predicate isBarrier(Node node, FlowState state) {
getConfig(state).isBarrier(node, getState(state)) or
getConfig(state).isBarrier(node) or
any(BarrierGuardGuardedNodeBridge b).guardedNode(node, getState(state), getConfig(state)) or
any(BarrierGuardGuardedNodeBridge b).guardedNode(node, getConfig(state))
}
predicate isBarrierIn(Node node) { any(Configuration config).isBarrierIn(node) }
predicate isBarrierOut(Node node) { any(Configuration config).isBarrierOut(node) }
predicate isAdditionalFlowStep(Node node1, Node node2) {
singleConfiguration() and
any(Configuration config).isAdditionalFlowStep(node1, node2)
}
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2) {
getConfig(state1).isAdditionalFlowStep(node1, getState(state1), node2, getState(state2)) and
getConfig(state2) = getConfig(state1)
or
not singleConfiguration() and
getConfig(state1).isAdditionalFlowStep(node1, node2) and
state2 = state1
}
predicate allowImplicitRead(Node node, ContentSet c) {
any(Configuration config).allowImplicitRead(node, c)
}
int fieldFlowBranchLimit() { result = min(any(Configuration config).fieldFlowBranchLimit()) }
FlowFeature getAFeature() { result = any(Configuration config).getAFeature() }
predicate sourceGrouping(Node source, string sourceGroup) {
any(Configuration config).sourceGrouping(source, sourceGroup)
}
predicate sinkGrouping(Node sink, string sinkGroup) {
any(Configuration config).sinkGrouping(sink, sinkGroup)
}
predicate includeHiddenNodes() { any(Configuration config).includeHiddenNodes() }
}
private import Impl<Config> as I
import I
/**
* A `Node` augmented with a call context (except for sinks), an access path, and a configuration.
* Only those `PathNode`s that are reachable from a source, and which can reach a sink, are generated.
*/
class PathNode instanceof I::PathNode {
/** Gets a textual representation of this element. */
final string toString() { result = super.toString() }
/**
* Gets a textual representation of this element, including a textual
* representation of the call context.
*/
final string toStringWithContext() { result = super.toStringWithContext() }
/**
* Holds if this element is at the specified location.
* The location spans column `startcolumn` of line `startline` to
* column `endcolumn` of line `endline` in file `filepath`.
* For more information, see
* [Locations](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
*/
final predicate hasLocationInfo(
string filepath, int startline, int startcolumn, int endline, int endcolumn
) {
super.hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
}
/** Gets the underlying `Node`. */
final Node getNode() { result = super.getNode() }
/** Gets the `FlowState` of this node. */
final FlowState getState() { result = getState(super.getState()) }
/** Gets the associated configuration. */
final Configuration getConfiguration() { result = getConfig(super.getState()) }
/** Gets a successor of this node, if any. */
final PathNode getASuccessor() { result = super.getASuccessor() }
/** Holds if this node is a source. */
final predicate isSource() { super.isSource() }
/** Holds if this node is a grouping of source nodes. */
final predicate isSourceGroup(string group) { super.isSourceGroup(group) }
/** Holds if this node is a grouping of sink nodes. */
final predicate isSinkGroup(string group) { super.isSinkGroup(group) }
}
private predicate hasFlow(Node source, Node sink, Configuration config) {
exists(PathNode source0, PathNode sink0 |
hasFlowPath(source0, sink0, config) and
source0.getNode() = source and
sink0.getNode() = sink
)
}
private predicate hasFlowPath(PathNode source, PathNode sink, Configuration config) {
hasFlowPath(source, sink) and source.getConfiguration() = config
}
private predicate hasFlowTo(Node sink, Configuration config) { hasFlow(_, sink, config) }
predicate flowsTo = hasFlow/3;

View File

@@ -3,15 +3,18 @@ private import DataFlowImplSpecific::Public
import Cached
module DataFlowImplCommonPublic {
/** A state value to track during data flow. */
class FlowState = string;
/** Provides `FlowState = string`. */
module FlowStateString {
/** A state value to track during data flow. */
class FlowState = string;
/**
* The default state, which is used when the state is unspecified for a source
* or a sink.
*/
class FlowStateEmpty extends FlowState {
FlowStateEmpty() { this = "" }
/**
* The default state, which is used when the state is unspecified for a source
* or a sink.
*/
class FlowStateEmpty extends FlowState {
FlowStateEmpty() { this = "" }
}
}
private newtype TFlowFeature =

View File

@@ -97,23 +97,23 @@ private string getNodeProperty(DataFlow::Node node, string key) {
|
kind, ", "
)
or
// Is there partial flow from a source to this node?
// This property will only be emitted if partial flow is enabled by overriding
// `DataFlow::Configuration::explorationLimit()`.
key = "pflow" and
result =
strictconcat(DataFlow::PartialPathNode sourceNode, DataFlow::PartialPathNode destNode, int dist,
int order1, int order2 |
any(DataFlow::Configuration cfg).hasPartialFlow(sourceNode, destNode, dist) and
destNode.getNode() = node and
// Only print flow from a source in the same function.
sourceNode.getNode().getEnclosingCallable() = node.getEnclosingCallable()
|
nodeId(sourceNode.getNode(), order1, order2) + "+" + dist.toString(), ", "
order by
order1, order2, dist desc
)
// or
// // Is there partial flow from a source to this node?
// // This property will only be emitted if partial flow is enabled by overriding
// // `DataFlow::Configuration::explorationLimit()`.
// key = "pflow" and
// result =
// strictconcat(DataFlow::PartialPathNode sourceNode, DataFlow::PartialPathNode destNode, int dist,
// int order1, int order2 |
// any(DataFlow::Configuration cfg).hasPartialFlow(sourceNode, destNode, dist) and
// destNode.getNode() = node and
// // Only print flow from a source in the same function.
// sourceNode.getNode().getEnclosingCallable() = node.getEnclosingCallable()
// |
// nodeId(sourceNode.getNode(), order1, order2) + "+" + dist.toString(), ", "
// order by
// order1, order2, dist desc
// )
}
/**

View File

@@ -0,0 +1,63 @@
/**
* Provides classes for performing local (intra-procedural) and
* global (inter-procedural) taint-tracking analyses.
*/
import TaintTrackingParameter::Public
private import TaintTrackingParameter::Private
private module AddTaintDefaults<DataFlowInternal::FullStateConfigSig Config> implements
DataFlowInternal::FullStateConfigSig {
import Config
predicate isBarrier(DataFlow::Node node) {
Config::isBarrier(node) or defaultTaintSanitizer(node)
}
predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) {
Config::isAdditionalFlowStep(node1, node2) or
defaultAdditionalTaintStep(node1, node2)
}
predicate allowImplicitRead(DataFlow::Node node, DataFlow::ContentSet c) {
Config::allowImplicitRead(node, c)
or
(
Config::isSink(node, _) or
Config::isAdditionalFlowStep(node, _) or
Config::isAdditionalFlowStep(node, _, _, _)
) and
defaultImplicitTaintRead(node, c)
}
}
/**
* Constructs a standard taint tracking computation.
*/
module Make<DataFlow::ConfigSig Config> implements DataFlow::DataFlowSig {
private module Config0 implements DataFlowInternal::FullStateConfigSig {
import DataFlowInternal::DefaultState<Config>
import Config
}
private module C implements DataFlowInternal::FullStateConfigSig {
import AddTaintDefaults<Config0>
}
import DataFlowInternal::Impl<C>
}
/**
* Constructs a taint tracking computation using flow state.
*/
module MakeWithState<DataFlow::StateConfigSig Config> implements DataFlow::DataFlowSig {
private module Config0 implements DataFlowInternal::FullStateConfigSig {
import Config
}
private module C implements DataFlowInternal::FullStateConfigSig {
import AddTaintDefaults<Config0>
}
import DataFlowInternal::Impl<C>
}

View File

@@ -2,4 +2,5 @@ import semmle.code.cpp.ir.dataflow.internal.TaintTrackingUtil as Public
module Private {
import semmle.code.cpp.ir.dataflow.DataFlow::DataFlow as DataFlow
import semmle.code.cpp.ir.dataflow.internal.DataFlowImpl as DataFlowInternal
}

View File

@@ -22,7 +22,7 @@ import semmle.code.cpp.ir.dataflow.TaintTracking
import semmle.code.cpp.ir.dataflow.TaintTracking2
import semmle.code.cpp.security.FlowSources
import semmle.code.cpp.models.implementations.Strcat
import DataFlow::PathGraph
import ExecTaint::PathGraph
/**
* Holds if `incoming` is a string that is used in a format or concatenation function resulting
@@ -55,29 +55,30 @@ predicate interestingConcatenation(DataFlow::Node incoming, DataFlow::Node outgo
)
}
class ConcatState extends DataFlow::FlowState {
ConcatState() { this = "ConcatState" }
newtype TState =
TConcatState() or
TExecState(DataFlow::Node incoming, DataFlow::Node outgoing) {
interestingConcatenation(pragma[only_bind_into](incoming), pragma[only_bind_into](outgoing))
}
class ConcatState extends TConcatState {
string toString() { result = "ConcatState" }
}
class ExecState extends DataFlow::FlowState {
class ExecState extends TExecState {
DataFlow::Node incoming;
DataFlow::Node outgoing;
ExecState() {
this =
"ExecState (" + incoming.getLocation() + " | " + incoming + ", " + outgoing.getLocation() +
" | " + outgoing + ")" and
interestingConcatenation(pragma[only_bind_into](incoming), pragma[only_bind_into](outgoing))
}
ExecState() { this = TExecState(incoming, outgoing) }
DataFlow::Node getIncomingNode() { result = incoming }
DataFlow::Node getOutgoingNode() { result = outgoing }
/** Holds if this is a possible `ExecState` for `sink`. */
predicate isFeasibleForSink(DataFlow::Node sink) {
any(ExecStateConfiguration conf).hasFlow(outgoing, sink)
}
predicate isFeasibleForSink(DataFlow::Node sink) { ExecState::hasFlow(outgoing, sink) }
string toString() { result = "ExecState" }
}
predicate isSinkImpl(DataFlow::Node sink, Expr command, string callChain) {
@@ -85,7 +86,7 @@ predicate isSinkImpl(DataFlow::Node sink, Expr command, string callChain) {
shellCommand(command, callChain)
}
predicate isSanitizerImpl(DataFlow::Node node) {
predicate isBarrierImpl(DataFlow::Node node) {
node.asExpr().getUnspecifiedType() instanceof IntegralType
or
node.asExpr().getUnspecifiedType() instanceof FloatingPointType
@@ -96,56 +97,57 @@ predicate isSanitizerImpl(DataFlow::Node node) {
* given sink. This avoids a cartesian product between all sinks and all `ExecState`s in
* `ExecTaintConfiguration::isSink`.
*/
class ExecStateConfiguration extends TaintTracking2::Configuration {
ExecStateConfiguration() { this = "ExecStateConfiguration" }
module ExecStateConfiguration implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { any(ExecState state).getOutgoingNode() = source }
override predicate isSource(DataFlow::Node source) {
any(ExecState state).getOutgoingNode() = source
}
predicate isSink(DataFlow::Node sink) { isSinkImpl(sink, _, _) }
override predicate isSink(DataFlow::Node sink) { isSinkImpl(sink, _, _) }
predicate isBarrier(DataFlow::Node node) { isBarrierImpl(node) }
override predicate isSanitizer(DataFlow::Node node) { isSanitizerImpl(node) }
override predicate isSanitizerOut(DataFlow::Node node) {
isSink(node, _) // Prevent duplicates along a call chain, since `shellCommand` will include wrappers
predicate isBarrierOut(DataFlow::Node node) {
isSink(node) // Prevent duplicates along a call chain, since `shellCommand` will include wrappers
}
}
class ExecTaintConfiguration extends TaintTracking::Configuration {
ExecTaintConfiguration() { this = "ExecTaintConfiguration" }
module ExecState = TaintTracking::Make<ExecStateConfiguration>;
override predicate isSource(DataFlow::Node source, DataFlow::FlowState state) {
module ExecTaintConfiguration implements DataFlow::StateConfigSig {
class FlowState = TState;
predicate isSource(DataFlow::Node source, FlowState state) {
source instanceof FlowSource and
state instanceof ConcatState
}
override predicate isSink(DataFlow::Node sink, DataFlow::FlowState state) {
any(ExecStateConfiguration conf).isSink(sink) and
predicate isSink(DataFlow::Node sink, FlowState state) {
ExecStateConfiguration::isSink(sink) and
state.(ExecState).isFeasibleForSink(sink)
}
override predicate isAdditionalTaintStep(
DataFlow::Node node1, DataFlow::FlowState state1, DataFlow::Node node2,
DataFlow::FlowState state2
predicate isAdditionalFlowStep(
DataFlow::Node node1, FlowState state1, DataFlow::Node node2, FlowState state2
) {
state1 instanceof ConcatState and
state2.(ExecState).getIncomingNode() = node1 and
state2.(ExecState).getOutgoingNode() = node2
}
override predicate isSanitizer(DataFlow::Node node) { isSanitizerImpl(node) }
predicate isBarrier(DataFlow::Node node) { isBarrierImpl(node) }
override predicate isSanitizerOut(DataFlow::Node node) {
predicate isBarrier(DataFlow::Node node, FlowState state) { none() }
predicate isBarrierOut(DataFlow::Node node) {
isSink(node, _) // Prevent duplicates along a call chain, since `shellCommand` will include wrappers
}
}
module ExecTaint = TaintTracking::MakeWithState<ExecTaintConfiguration>;
from
ExecTaintConfiguration conf, DataFlow::PathNode sourceNode, DataFlow::PathNode sinkNode,
string taintCause, string callChain, DataFlow::Node concatResult, Expr command
ExecTaint::PathNode sourceNode, ExecTaint::PathNode sinkNode, string taintCause, string callChain,
DataFlow::Node concatResult, Expr command
where
conf.hasFlowPath(sourceNode, sinkNode) and
ExecTaint::hasFlowPath(sourceNode, sinkNode) and
taintCause = sourceNode.getNode().(FlowSource).getSourceType() and
isSinkImpl(sinkNode.getNode(), command, callChain) and
concatResult = sinkNode.getState().(ExecState).getOutgoingNode()

View File

@@ -0,0 +1,7 @@
...
a = getc(f);
if (a < 123) ret = 123/a; // BAD
...
if (a != 0) ret = 123/a; // GOOD
...

View File

@@ -0,0 +1,23 @@
<!DOCTYPE qhelp PUBLIC
"-//Semmle//qhelp//EN"
"qhelp.dtd">
<qhelp>
<overview>
<p> Possible cases of division by zero when using the return value from functions.</p>
</overview>
<example>
<p>The following example shows the use of a function with an error when using the return value and without an error.</p>
<sample src="DivideByZeroUsingReturnValue.cpp" />
</example>
<references>
<li>
CERT Coding Standard:
<a href="https://wiki.sei.cmu.edu/confluence/display/c/INT33-C.+Ensure+that+division+and+remainder+operations+do+not+result+in+divide-by-zero+errors">INT33-C. Ensure that division and remainder operations do not result in divide-by-zero errors - SEI CERT C Coding Standard - Confluence</a>.
</li>
</references>
</qhelp>

View File

@@ -0,0 +1,274 @@
/**
* @name Divide by zero using return value
* @description Possible cases of division by zero when using the return value from functions.
* @kind problem
* @id cpp/divide-by-zero-using-return-value
* @problem.severity warning
* @precision medium
* @tags correctness
* security
* external/cwe/cwe-369
*/
import cpp
import semmle.code.cpp.valuenumbering.GlobalValueNumbering
import semmle.code.cpp.controlflow.Guards
/** Holds if function `fn` can return a value equal to value `val` */
predicate mayBeReturnValue(Function fn, float val) {
exists(Expr tmpExp, ReturnStmt rs |
tmpExp.getValue().toFloat() = val and
rs.getEnclosingFunction() = fn and
(
globalValueNumber(rs.getExpr()) = globalValueNumber(tmpExp)
or
exists(AssignExpr ae |
ae.getLValue().(VariableAccess).getTarget() =
globalValueNumber(rs.getExpr()).getAnExpr().(VariableAccess).getTarget() and
globalValueNumber(ae.getRValue()) = globalValueNumber(tmpExp)
)
or
exists(Initializer it |
globalValueNumber(it.getExpr()) = globalValueNumber(tmpExp) and
it.getDeclaration().(Variable).getAnAccess().getTarget() =
globalValueNumber(rs.getExpr()).getAnExpr().(VariableAccess).getTarget()
)
)
)
}
/** Holds if function `fn` can return a value equal zero */
predicate mayBeReturnZero(Function fn) {
mayBeReturnValue(fn, 0)
or
fn.hasName([
"iswalpha", "iswlower", "iswprint", "iswspace", "iswblank", "iswupper", "iswcntrl",
"iswctype", "iswalnum", "iswgraph", "iswxdigit", "iswdigit", "iswpunct", "isblank", "isupper",
"isgraph", "isalnum", "ispunct", "islower", "isspace", "isprint", "isxdigit", "iscntrl",
"isdigit", "isalpha", "timespec_get", "feof", "atomic_is_lock_free",
"atomic_compare_exchange", "thrd_equal", "isfinite", "islessequal", "isnan", "isgreater",
"signbit", "isinf", "islessgreater", "isnormal", "isless", "isgreaterequal", "isunordered",
"ferror"
])
or
fn.hasName([
"thrd_sleep", "feenv", "feholdexcept", "feclearexcept", "feexceptflag", "feupdateenv",
"remove", "fflush", "setvbuf", "fgetpos", "fsetpos", "fclose", "rename", "fseek", "raise"
])
or
fn.hasName(["tss_get", "gets"])
or
fn.hasName(["getc", "atoi"])
}
/** Gets the Guard which compares the expression `bound` */
pragma[inline]
GuardCondition checkByValue(Expr bound, Expr val) {
exists(GuardCondition gc |
(
gc.ensuresEq(bound, val, _, _, _) or
gc.ensuresEq(val, bound, _, _, _) or
gc.ensuresLt(bound, val, _, _, _) or
gc.ensuresLt(val, bound, _, _, _) or
gc = globalValueNumber(bound).getAnExpr()
) and
result = gc
)
}
/** Holds if there are no comparisons between the value returned by possible function calls `compArg` and the value `valArg`, or when these comparisons do not exclude equality to the value `valArg`. */
pragma[inline]
predicate compareFunctionWithValue(Expr guardExp, Function compArg, Expr valArg) {
not exists(Expr exp |
exp.getAChild*() = globalValueNumber(compArg.getACallToThisFunction()).getAnExpr() and
checkByValue(exp, valArg).controls(guardExp.getBasicBlock(), _)
)
or
exists(GuardCondition gc |
(
gc.ensuresEq(globalValueNumber(compArg.getACallToThisFunction()).getAnExpr(), valArg, 0,
guardExp.getBasicBlock(), true)
or
gc.ensuresEq(valArg, globalValueNumber(compArg.getACallToThisFunction()).getAnExpr(), 0,
guardExp.getBasicBlock(), true)
or
gc.ensuresLt(globalValueNumber(compArg.getACallToThisFunction()).getAnExpr(), valArg, 0,
guardExp.getBasicBlock(), false)
or
gc.ensuresLt(valArg, globalValueNumber(compArg.getACallToThisFunction()).getAnExpr(), 0,
guardExp.getBasicBlock(), false)
)
or
exists(Expr exp |
exp.getValue().toFloat() > valArg.getValue().toFloat() and
gc.ensuresLt(globalValueNumber(compArg.getACallToThisFunction()).getAnExpr(), exp, 0,
guardExp.getBasicBlock(), true)
or
exp.getValue().toFloat() < valArg.getValue().toFloat() and
gc.ensuresLt(exp, globalValueNumber(compArg.getACallToThisFunction()).getAnExpr(), 0,
guardExp.getBasicBlock(), true)
)
)
or
valArg.getValue().toFloat() = 0 and
exists(NotExpr ne, IfStmt ifne |
ne.getOperand() = globalValueNumber(compArg.getACallToThisFunction()).getAnExpr() and
ifne.getCondition() = ne and
ifne.getThen().getAChild*() = guardExp
)
}
/** Wraping predicate for call `compareFunctionWithValue`. */
pragma[inline]
predicate checkConditions1(Expr div, Function fn, float changeInt) {
exists(Expr val |
val.getEnclosingFunction() = fn and
val.getValue().toFloat() = changeInt and
compareFunctionWithValue(div, fn, val)
)
}
/** Holds if there are no comparisons between the value `compArg` and the value `valArg`, or when these comparisons do not exclude equality to the value `valArg`. */
pragma[inline]
predicate compareExprWithValue(Expr guardExp, Expr compArg, Expr valArg) {
not exists(Expr exp |
exp.getAChild*() = globalValueNumber(compArg).getAnExpr() and
checkByValue(exp, valArg).controls(guardExp.getBasicBlock(), _)
)
or
exists(GuardCondition gc |
(
gc.ensuresEq(globalValueNumber(compArg).getAnExpr(), valArg, 0, guardExp.getBasicBlock(), true)
or
gc.ensuresEq(valArg, globalValueNumber(compArg).getAnExpr(), 0, guardExp.getBasicBlock(), true)
or
gc.ensuresLt(globalValueNumber(compArg).getAnExpr(), valArg, 0, guardExp.getBasicBlock(),
false)
or
gc.ensuresLt(valArg, globalValueNumber(compArg).getAnExpr(), 0, guardExp.getBasicBlock(),
false)
)
or
exists(Expr exp |
exp.getValue().toFloat() > valArg.getValue().toFloat() and
gc.ensuresLt(globalValueNumber(compArg).getAnExpr(), exp, 0, guardExp.getBasicBlock(), true)
or
exp.getValue().toFloat() < valArg.getValue().toFloat() and
gc.ensuresLt(exp, globalValueNumber(compArg).getAnExpr(), 0, guardExp.getBasicBlock(), true)
)
)
or
valArg.getValue().toFloat() = 0 and
exists(NotExpr ne, IfStmt ifne |
ne.getOperand() = globalValueNumber(compArg).getAnExpr() and
ifne.getCondition() = ne and
ifne.getThen().getAChild*() = guardExp
)
}
/** Wraping predicate for call `compareExprWithValue`. */
pragma[inline]
predicate checkConditions2(Expr div, Expr divVal, float changeInt2) {
exists(Expr val |
(
val.getEnclosingFunction() =
div.getEnclosingFunction().getACallToThisFunction().getEnclosingFunction() or
val.getEnclosingFunction() = div.getEnclosingFunction()
) and
val.getValue().toFloat() = changeInt2 and
compareExprWithValue(div, divVal, val)
)
}
/** Gets the value of the difference or summand from the expression `src`. */
float getValueOperand(Expr src, Expr e1, Expr e2) {
src.(SubExpr).hasOperands(e1, e2) and
result = e2.getValue().toFloat()
or
src.(AddExpr).hasOperands(e1, e2) and
result = -e2.getValue().toFloat()
}
/** Function the return of the expression `e1` and the multiplication operands, or the left operand of division if `e1` contains a multiplication or division, respectively. */
Expr getMulDivOperand(Expr e1) {
result = e1 or
result = e1.(MulExpr).getAnOperand() or
result = e1.(DivExpr).getLeftOperand()
}
/** The class that defines possible variants of the division expression or the search for the remainder. */
class MyDiv extends Expr {
MyDiv() {
this instanceof DivExpr or
this instanceof RemExpr or
this instanceof AssignDivExpr or
this instanceof AssignRemExpr
}
Expr getRV() {
result = this.(AssignArithmeticOperation).getRValue() or
result = this.(BinaryArithmeticOperation).getRightOperand()
}
}
from Expr exp, string msg, Function fn, GVN findVal, float changeInt, MyDiv div
where
findVal = globalValueNumber(fn.getACallToThisFunction()) and
(
// Look for divide-by-zero operations possible due to the return value of the function `fn`.
checkConditions1(div, fn, changeInt) and
(
// Function return value can be zero.
mayBeReturnZero(fn) and
getMulDivOperand(globalValueNumber(div.getRV()).getAnExpr()) = findVal.getAnExpr() and
changeInt = 0
or
// Denominator can be sum or difference.
changeInt = getValueOperand(div.getRV(), findVal.getAnExpr(), _) and
mayBeReturnValue(fn, changeInt)
) and
exp = div and
msg =
"Can lead to division by 0, since the function " + fn.getName() + " can return a value " +
changeInt.toString() + "."
or
// Search for situations where division by zero is possible inside the `divFn` function if the passed argument can be equal to a certain value.
exists(int posArg, Expr divVal, FunctionCall divFc, float changeInt2 |
// Division is associated with the function argument.
exists(Function divFn |
divFn.getParameter(posArg).getAnAccess() = divVal and
divVal.getEnclosingStmt() = div.getEnclosingStmt() and
divFc = divFn.getACallToThisFunction()
) and
(
divVal = div.getRV() and
divFc.getArgument(posArg) != findVal.getAnExpr() and
(
// Function return value can be zero.
mayBeReturnZero(fn) and
getMulDivOperand(globalValueNumber(divFc.getArgument(posArg)).getAnExpr()) =
findVal.getAnExpr() and
changeInt = 0 and
changeInt2 = 0
or
// Denominator can be sum or difference.
changeInt = getValueOperand(divFc.getArgument(posArg), findVal.getAnExpr(), _) and
mayBeReturnValue(fn, changeInt) and
changeInt2 = 0
)
or
// Look for a situation where the difference or subtraction is considered as an argument, and it can be used in the same way.
changeInt = getValueOperand(div.getRV(), divVal, _) and
changeInt2 = changeInt and
mayBeReturnValue(fn, changeInt) and
divFc.getArgument(posArg) = findVal.getAnExpr()
) and
checkConditions2(div, divVal, changeInt2) and
checkConditions1(divFc, fn, changeInt) and
exp = divFc and
msg =
"Can lead to division by 0, since the function " + fn.getName() + " can return a value " +
changeInt.toString() + "."
)
)
select exp, msg

View File

@@ -2,7 +2,7 @@ import cpp
import semmle.code.cpp.dataflow.new.DataFlow
class GetenvSource extends DataFlow::Node {
GetenvSource() { this.asIndirectExpr(1).(FunctionCall).getTarget().hasQualifiedName("getenv") }
GetenvSource() { this.asIndirectExpr(1).(FunctionCall).getTarget().hasGlobalName("getenv") }
}
class GetenvToGethostbynameConfiguration extends DataFlow::Configuration {

View File

@@ -3,7 +3,7 @@ import semmle.code.cpp.dataflow.new.DataFlow
from Function fopen, FunctionCall fc, Expr src, DataFlow::Node source, DataFlow::Node sink
where
fopen.hasQualifiedName("fopen") and
fopen.hasGlobalName("fopen") and
fc.getTarget() = fopen and
source.asIndirectExpr(1) = src and
sink.asIndirectExpr(1) = fc.getArgument(0) and

View File

@@ -7,14 +7,14 @@ class EnvironmentToFileConfiguration extends DataFlow::Configuration {
override predicate isSource(DataFlow::Node source) {
exists(Function getenv |
source.asIndirectExpr(1).(FunctionCall).getTarget() = getenv and
getenv.hasQualifiedName("getenv")
getenv.hasGlobalName("getenv")
)
}
override predicate isSink(DataFlow::Node sink) {
exists(FunctionCall fc |
sink.asIndirectExpr(1) = fc.getArgument(0) and
fc.getTarget().hasQualifiedName("fopen")
fc.getTarget().hasGlobalName("fopen")
)
}
}

View File

@@ -3,7 +3,7 @@ import semmle.code.cpp.dataflow.new.DataFlow
from Function fopen, FunctionCall fc, Parameter p, DataFlow::Node source, DataFlow::Node sink
where
fopen.hasQualifiedName("fopen") and
fopen.hasGlobalName("fopen") and
fc.getTarget() = fopen and
source.asParameter(1) = p and
sink.asIndirectExpr(1) = fc.getArgument(0) and

View File

@@ -2,6 +2,6 @@ import cpp
from Function fopen, FunctionCall fc
where
fopen.hasQualifiedName("fopen") and
fopen.hasGlobalName("fopen") and
fc.getTarget() = fopen
select fc.getArgument(0)

View File

@@ -0,0 +1,27 @@
| test.cpp:47:24:47:31 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:48:15:48:34 | ... / ... | Can lead to division by 0, since the function getSize2 can return a value 0. |
| test.cpp:53:10:53:17 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:65:15:65:22 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:68:15:68:22 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:71:9:71:16 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:74:9:74:16 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:77:21:77:28 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:79:25:79:32 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:81:24:81:31 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:128:10:128:16 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:135:10:135:16 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:141:10:141:23 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:153:12:153:19 | ... / ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:172:3:172:12 | ... /= ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:173:3:173:12 | ... %= ... | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:187:10:187:17 | ... / ... | Can lead to division by 0, since the function getSizeFloat can return a value 0. |
| test.cpp:199:12:199:25 | ... / ... | Can lead to division by 0, since the function getSize can return a value -1. |
| test.cpp:202:12:202:25 | ... / ... | Can lead to division by 0, since the function getSize can return a value 1. |
| test.cpp:205:10:205:23 | ... / ... | Can lead to division by 0, since the function getSize can return a value 1. |
| test.cpp:210:10:210:23 | ... / ... | Can lead to division by 0, since the function getSize can return a value 3. |
| test.cpp:258:3:258:10 | call to badMyDiv | Can lead to division by 0, since the function getSize can return a value 0. |
| test.cpp:259:3:259:10 | call to badMyDiv | Can lead to division by 0, since the function getSize can return a value 2. |
| test.cpp:260:3:260:13 | call to badMySubDiv | Can lead to division by 0, since the function getSize can return a value 3. |
| test.cpp:263:5:263:15 | call to badMySubDiv | Can lead to division by 0, since the function getSize can return a value 3. |
| test.cpp:273:5:273:12 | call to badMyDiv | Can lead to division by 0, since the function getSize can return a value 3. |
| test.cpp:275:5:275:12 | call to badMyDiv | Can lead to division by 0, since the function getSize can return a value -1. |

View File

@@ -0,0 +1 @@
experimental/Security/CWE/CWE-369/DivideByZeroUsingReturnValue.ql

View File

@@ -0,0 +1,278 @@
typedef struct {}
FILE;
int getc(FILE * stream);
int getSize(int type) {
int st;
switch (type) {
case 1:
st = 1;
break;
case 2:
st = 2;
break;
case 3:
st = 3;
break;
case 4:
st = -1;
break;
default:
st = 0;
break;
}
return st;
}
int getSize2(int type) {
int st = 0;
switch (type) {
case 1:
st = 1;
break;
case 2:
st = 2;
break;
case 3:
st = 3;
break;
case 4:
st = -1;
break;
}
return st;
}
int badTestf1(int type, int met) {
int is = getSize(type);
if (met == 1) return 123 / is; // BAD
else return 123 / getSize2(type); // BAD
}
int badTestf2(int type) {
int is;
is = getSize(type);
return 123 / is; // BAD
}
int badTestf3(int type, int met) {
int is;
is = getSize(type);
switch (met) {
case 1:
if (is >= 0) return 123 / is; // BAD [NOT DETECTED]
case 2:
if (0 == is) return 123 / is; // BAD [NOT DETECTED]
case 3:
if (!is & 123 / is) // BAD
return 123;
case 4:
if (!is | 123 / is) // BAD
return 123;
case 5:
if (123 / is || !is) // BAD
return 123;
case 6:
if (123 / is && !is) // BAD
return 123;
case 7:
if (!is) return 123 / is; // BAD
case 8:
if (is > -1) return 123 / is; // BAD
case 9:
if (is < 2) return 123 / is; // BAD
}
if (is != 0) return -1;
if (is == 0) type += 1;
return 123 / is; // BAD [NOT DETECTED]
}
int goodTestf3(int type, int met) {
int is = getSize(type);
if (is == 0) return -1;
switch (met) {
case 1:
if (is < 0) return 123 / is; // GOOD
case 2:
if (!is && 123 / is) // GOOD
return 123;
case 3:
if (!is || 123 / is) // GOOD
return 123;
case 8:
if (is < -1) return 123 / is; // GOOD
case 9:
if (is > 2) return 123 / is; // GOOD
}
return 123 / is;
}
int goodTestf3a(int type, int met) {
int is = getSize(type);
switch (met) {
case 1:
if (is < 0)
return 123 / is; // GOOD
case 2:
if (!is && 123 / is) // GOOD
return 123;
case 3:
if (!is || 123 / is) // GOOD
return 123;
}
return 1;
}
int badTestf4(int type) {
int is = getSize(type);
int d;
d = type * is;
return 123 / d; // BAD
}
int badTestf5(int type) {
int is = getSize(type);
int d;
d = is / type;
return 123 / d; // BAD
}
int badTestf6(int type) {
int is = getSize(type);
int d;
d = is / type;
return type * 123 / d; // BAD
}
int badTestf7(int type, int met) {
int is = getSize(type);
if (is == 0) goto quit;
switch (met) {
case 1:
if (is < 0)
return 123 / is; // GOOD
}
quit:
return 123 / is; // BAD
}
int goodTestf7(int type, int met) {
int is = getSize(type);
if (is == 0) goto quit2;
if (is == 0.) return -1;
switch (met) {
case 1:
if (is < 0.)
return 123 / is; // GOOD
}
return 123 / is; // GOOD
quit2:
return -1;
}
int badTestf8(int type) {
int is = getSize(type);
type /= is; // BAD
type %= is; // BAD
return type;
}
float getSizeFloat(float type) {
float st;
if (type)
st = 1.0;
else
st = 0.0;
return st;
}
float badTestf9(float type) {
float is = getSizeFloat(type);
return 123 / is; // BAD
}
float goodTestf9(float type) {
float is = getSizeFloat(type);
if (is == 0.0) return -1;
return 123 / is; // GOOD
}
int badTestf10(int type) {
int out = type;
int is = getSize(type);
if (is > -2) {
out /= 123 / (is + 1); // BAD
}
if (is > 0) {
return 123 / (is - 1); // BAD
}
if (is <= 0) return 0;
return 123 / (is - 1); // BAD
return 0;
}
int badTestf11(int type) {
int is = getSize(type);
return 123 / (is - 3); // BAD
}
int goodTestf11(int type) {
int is = getSize(type);
if (is > 1) {
return 123 / (is - 1); // GOOD
} else {
return 0;
}
}
int badTestf12(FILE * f) {
int a;
int ret = -1;
a = getc(f);
if (a == 0) ret = 123 / a; // BAD [NOT DETECTED]
return ret;
}
int goodTestf12(FILE * f) {
int a;
int ret = -1;
a = getc(f);
if (a != 0) ret = 123 / a; // GOOD
return ret;
}
int badMyDiv(int type, int is) {
type /= is;
type %= is;
return type;
}
int goodMyDiv(int type, int is) {
if (is == 0) return -1;
type /= is;
type %= is;
return type;
}
int badMySubDiv(int type, int is) {
type /= (is - 3);
type %= (is + 1);
return type;
}
void badTestf13(int type) {
int is = getSize(type);
badMyDiv(type, is); // BAD
badMyDiv(type, is - 2); // BAD
badMySubDiv(type, is); // BAD
goodMyDiv(type, is); // GOOD
if (is < 5)
badMySubDiv(type, is); // BAD
if (is < 0)
badMySubDiv(type, is); // BAD [NOT DETECTED]
if (is > 5)
badMySubDiv(type, is); // GOOD
if (is == 0)
badMyDiv(type, is); // BAD
if (is > 0)
badMyDiv(type, is); // GOOD
if (is < 5)
badMyDiv(type, is - 3); // BAD
if (is < 0)
badMyDiv(type, is + 1); // BAD
if (is > 5)
badMyDiv(type, is - 3); // GOOD
}

View File

@@ -60,8 +60,6 @@ edges
| test.cpp:220:10:220:16 | strncat output argument | test.cpp:222:32:222:38 | command indirection |
| test.cpp:220:19:220:26 | filename indirection | test.cpp:220:10:220:16 | strncat output argument |
| test.cpp:220:19:220:26 | filename indirection | test.cpp:220:10:220:16 | strncat output argument |
| test.cpp:220:19:220:26 | filename indirection | test.cpp:220:10:220:16 | strncat output argument |
| test.cpp:220:19:220:26 | filename indirection | test.cpp:220:10:220:16 | strncat output argument |
nodes
| test.cpp:15:27:15:30 | argv indirection | semmle.label | argv indirection |
| test.cpp:15:27:15:30 | argv indirection | semmle.label | argv indirection |
@@ -133,6 +131,7 @@ nodes
| test.cpp:220:19:220:26 | filename indirection | semmle.label | filename indirection |
| test.cpp:220:19:220:26 | filename indirection | semmle.label | filename indirection |
| test.cpp:222:32:222:38 | command indirection | semmle.label | command indirection |
| test.cpp:222:32:222:38 | command indirection | semmle.label | command indirection |
subpaths
| test.cpp:196:26:196:33 | filename indirection | test.cpp:186:47:186:54 | filename indirection | test.cpp:188:11:188:17 | strncat output argument | test.cpp:196:10:196:16 | concat output argument |
| test.cpp:196:26:196:33 | filename indirection | test.cpp:186:47:186:54 | filename indirection | test.cpp:188:11:188:17 | strncat output argument | test.cpp:196:10:196:16 | concat output argument |