Merge branch 'main' into js/shared-dataflow-merge-main

This commit is contained in:
Asger F
2024-08-26 12:43:16 +02:00
60 changed files with 816 additions and 677 deletions

View File

@@ -24,5 +24,5 @@ jobs:
extra_args: >
buildifier --all-files 2>&1 ||
(
echo -e "In order to format all bazel files, please run:\n bazel run //misc/bazel:buildifier"; exit 1
echo -e "In order to format all bazel files, please run:\n bazel run //misc/bazel/buildifier"; exit 1
)

View File

@@ -26,7 +26,7 @@ repos:
name: Format bazel files
files: \.(bazel|bzl)
language: system
entry: bazel run //misc/bazel:buildifier
entry: bazel run //misc/bazel/buildifier
pass_filenames: false
# DISABLED: can be enabled by copying this config and installing `pre-commit` with `--config` on the copy

View File

@@ -450,7 +450,7 @@ void test_qualifiers()
b.member = source();
sink(b); // $ ir MISSING: ast
sink(b.member); // $ ast,ir
sink(b.getMember()); // $ ir MISSING: ast
sink(b.getMember()); // $ MISSING: ir ast
c = new MyClass2(0);

View File

@@ -115,8 +115,8 @@ void test_vector_swap() {
v3.swap(v4);
sink(v1);
sink(v2); // $ ir MISSING:ast
sink(v3); // $ ir MISSING:ast
sink(v2); // $ MISSING:ir ast
sink(v3); // $ MISSING:ir ast
sink(v4);
}

View File

@@ -3,6 +3,7 @@ using System.Collections.Concurrent;
using System.IO;
using System.Linq;
using Microsoft.CodeAnalysis;
using Semmle.Extraction.Entities;
using Semmle.Util;
namespace Semmle.Extraction.CSharp.Entities
@@ -89,13 +90,21 @@ namespace Semmle.Extraction.CSharp.Entities
trapFile.compilation_finished(this, (float)p.Total.Cpu.TotalSeconds, (float)p.Total.Elapsed.TotalSeconds);
}
public void PopulateAggregatedMessages()
{
ExtractionMessage.groupedMessageCounts.ForEach(pair =>
{
Context.TrapWriter.Writer.compilation_info(this, $"Extractor message count for group '{pair.Key}'", pair.Value.ToString());
});
}
public override void WriteId(EscapingTextWriter trapFile)
{
trapFile.Write(hashCode);
trapFile.Write(";compilation");
}
public override Location ReportingLocation => throw new NotImplementedException();
public override Microsoft.CodeAnalysis.Location ReportingLocation => throw new NotImplementedException();
public override bool NeedsPopulation => Context.IsAssemblyScope;

View File

@@ -21,7 +21,7 @@ namespace Semmle.Extraction.CSharp.Entities
public override Microsoft.CodeAnalysis.Location ReportingLocation =>
IsCompilerGeneratedDelegate()
? Symbol.ContainingType.GetSymbolLocation()
: Symbol.GetSymbolLocation();
: BodyDeclaringSymbol.GetSymbolLocation();
public override bool NeedsPopulation => base.NeedsPopulation || IsCompilerGeneratedDelegate();

View File

@@ -250,6 +250,8 @@ namespace Semmle.Extraction.CSharp
public void LogPerformance(Entities.PerformanceMetrics p) => compilationEntity.PopulatePerformance(p);
public void ExtractAggregatedMessages() => compilationEntity.PopulateAggregatedMessages();
#nullable restore warnings
/// <summary>

View File

@@ -458,6 +458,7 @@ namespace Semmle.Extraction.CSharp
sw.Restart();
analyser.PerformExtraction(options.Threads);
analyser.ExtractAggregatedMessages();
sw.Stop();
var cpuTime2 = currentProcess.TotalProcessorTime;
var userTime2 = currentProcess.UserProcessorTime;

View File

@@ -26,6 +26,7 @@ codeql_csharp_library(
],
"//conditions:default": [],
}),
internals_visible_to = ["Semmle.Extraction.CSharp"],
visibility = ["//csharp:__subpackages__"],
deps = [
"//csharp/extractor/Semmle.Util",

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Collections.Concurrent;
using System.IO;
using System.Threading;
using Semmle.Util;
@@ -7,6 +8,8 @@ namespace Semmle.Extraction.Entities
internal class ExtractionMessage : FreshEntity
{
private static readonly int limit = EnvironmentVariables.TryGetExtractorNumberOption<int>("MESSAGE_LIMIT") ?? 10000;
internal static readonly ConcurrentDictionary<string, int> groupedMessageCounts = [];
private static int messageCount = 0;
private readonly Message msg;
@@ -25,6 +28,10 @@ namespace Semmle.Extraction.Entities
protected override void Populate(TextWriter trapFile)
{
// For the time being we're counting the number of messages per severity, we could introduce other groupings in the future
var key = msg.Severity.ToString();
groupedMessageCounts.AddOrUpdate(key, 1, (_, c) => c + 1);
if (!bypassLimit)
{
var val = Interlocked.Increment(ref messageCount);

View File

@@ -5,6 +5,8 @@
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\Semmle.Util\Semmle.Util.csproj" />
<InternalsVisibleTo Include="Semmle.Extraction.CSharp" />
</ItemGroup>
<Import Project="..\..\.paket\Paket.Restore.targets" />
</Project>

View File

@@ -7,3 +7,5 @@ extractorMessagesLeachedLimit
compilationInfo
| Compiler diagnostic count for CS0103 | 3.0 |
| Compiler diagnostic count for CS8019 | 7.0 |
| Extractor message count for group 'Error' | 8.0 |
| Extractor message count for group 'Warning' | 1.0 |

View File

@@ -11,7 +11,8 @@ query predicate extractorMessagesLeachedLimit(ExtractorMessage msg) {
query predicate compilationInfo(string key, float value) {
exists(Compilation c, string infoValue |
infoValue = c.getInfo(key) and key.matches("Compiler diagnostic count for%")
infoValue = c.getInfo(key) and
key.matches(["Compiler diagnostic count for%", "Extractor message count for group%"])
|
value = infoValue.toFloat()
)

View File

@@ -4,7 +4,7 @@ import semmle.code.csharp.commons.Diagnostics
query predicate compilationInfo(string key, float value) {
key != "Resolved references" and
key != "Resolved assembly conflicts" and
not key.matches("Compiler diagnostic count for%") and
not key.matches(["Compiler diagnostic count for%", "Extractor message count for group%"]) and
exists(Compilation c, string infoKey, string infoValue | infoValue = c.getInfo(infoKey) |
key = infoKey and
value = infoValue.toFloat()

View File

@@ -0,0 +1,4 @@
---
category: minorAnalysis
---
* The reported location of `partial` methods has been changed from the definition to the implementation part.

View File

@@ -1,504 +1,4 @@
/**
* Provides classes for performing global (inter-procedural)
* content-sensitive data flow analyses.
*
* Unlike `DataFlow::Global`, we allow for data to be stored (possibly nested) inside
* contents of sources and sinks.
* We track flow paths of the form
*
* ```
* source --value-->* node
* (--read--> node --value-->* node)*
* --(non-value|value)-->* node
* (--store--> node --value-->* node)*
* --value-->* sink
* ```
*
* where `--value-->` is a value-preserving flow step, `--read-->` is a read
* step, `--store-->` is a store step, and `--(non-value)-->` is a
* non-value-preserving flow step.
*
* That is, first a sequence of 0 or more reads, followed by 0 or more additional
* steps, followed by 0 or more stores, with value-preserving steps allowed in
* between all other steps.
*/
private import csharp
private import codeql.util.Boolean
private import DataFlowImplCommon
private import DataFlowImplSpecific::Private
private import DataFlowImplSpecific::Private as DataFlowPrivate
/**
* An input configuration for content data flow.
*/
signature module ConfigSig {
/**
* Holds if `source` is a relevant data flow source.
*/
predicate isSource(DataFlow::Node source);
/**
* Holds if `sink` is a relevant data flow sink.
*/
predicate isSink(DataFlow::Node sink);
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
default predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) { none() }
/** Holds if data flow into `node` is prohibited. */
default predicate isBarrier(DataFlow::Node node) { none() }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*/
default DataFlow::FlowFeature getAFeature() { none() }
/** Gets a limit on the number of reads out of sources and number of stores into sinks. */
default int accessPathLimit() { result = DataFlowPrivate::accessPathLimit() }
/** Holds if `c` is relevant for reads out of sources or stores into sinks. */
default predicate isRelevantContent(DataFlow::ContentSet c) { any() }
}
/**
* Constructs a global content data flow computation.
*/
module Global<ConfigSig ContentConfig> {
private module FlowConfig implements DataFlow::StateConfigSig {
class FlowState = State;
predicate isSource(DataFlow::Node source, FlowState state) {
ContentConfig::isSource(source) and
state.(InitState).decode(true)
}
predicate isSink(DataFlow::Node sink, FlowState state) {
ContentConfig::isSink(sink) and
(
state instanceof InitState or
state instanceof StoreState or
state instanceof ReadState
)
}
predicate isAdditionalFlowStep(
DataFlow::Node node1, FlowState state1, DataFlow::Node node2, FlowState state2
) {
storeStep(node1, state1, _, node2, state2) or
readStep(node1, state1, _, node2, state2) or
additionalStep(node1, state1, node2, state2)
}
predicate isAdditionalFlowStep = ContentConfig::isAdditionalFlowStep/2;
predicate isBarrier = ContentConfig::isBarrier/1;
DataFlow::FlowFeature getAFeature() { result = ContentConfig::getAFeature() }
predicate accessPathLimit = ContentConfig::accessPathLimit/0;
// needed to record reads/stores inside summarized callables
predicate includeHiddenNodes() { any() }
}
private module Flow = DataFlow::GlobalWithState<FlowConfig>;
/**
* Holds if data stored inside `sourceAp` on `source` flows to `sinkAp` inside `sink`
* for this configuration. `preservesValue` indicates whether any of the additional
* flow steps defined by `isAdditionalFlowStep` are needed.
*
* For the source access path, `sourceAp`, the top of the stack represents the content
* that was last read from. That is, if `sourceAp` is `Field1.Field2` (with `Field1`
* being the top of the stack), then there is flow from `source.Field2.Field1`.
*
* For the sink access path, `sinkAp`, the top of the stack represents the content
* that was last stored into. That is, if `sinkAp` is `Field1.Field2` (with `Field1`
* being the top of the stack), then there is flow into `sink.Field1.Field2`.
*/
predicate flow(
DataFlow::Node source, AccessPath sourceAp, DataFlow::Node sink, AccessPath sinkAp,
boolean preservesValue
) {
exists(Flow::PathNode pathSource, Flow::PathNode pathSink |
Flow::flowPath(pathSource, pathSink) and
nodeReaches(pathSource, TAccessPathNil(), TAccessPathNil(), pathSink, sourceAp, sinkAp) and
source = pathSource.getNode() and
sink = pathSink.getNode()
|
pathSink.getState().(InitState).decode(preservesValue)
or
pathSink.getState().(ReadState).decode(_, preservesValue)
or
pathSink.getState().(StoreState).decode(_, preservesValue)
)
}
private newtype TState =
TInitState(Boolean preservesValue) or
TStoreState(int size, Boolean preservesValue) {
size in [1 .. ContentConfig::accessPathLimit()]
} or
TReadState(int size, Boolean preservesValue) { size in [1 .. ContentConfig::accessPathLimit()] }
abstract private class State extends TState {
abstract string toString();
}
/** A flow state representing no reads or stores. */
private class InitState extends State, TInitState {
private boolean preservesValue_;
InitState() { this = TInitState(preservesValue_) }
override string toString() { result = "Init(" + preservesValue_ + ")" }
predicate decode(boolean preservesValue) { preservesValue = preservesValue_ }
}
/** A flow state representing that content has been stored into. */
private class StoreState extends State, TStoreState {
private boolean preservesValue_;
private int size_;
StoreState() { this = TStoreState(size_, preservesValue_) }
override string toString() { result = "StoreState(" + size_ + "," + preservesValue_ + ")" }
predicate decode(int size, boolean preservesValue) {
size = size_ and preservesValue = preservesValue_
}
}
/** A flow state representing that content has been read from. */
private class ReadState extends State, TReadState {
private boolean preservesValue_;
private int size_;
ReadState() { this = TReadState(size_, preservesValue_) }
override string toString() { result = "ReadState(" + size_ + "," + preservesValue_ + ")" }
predicate decode(int size, boolean preservesValue) {
size = size_ and preservesValue = preservesValue_
}
}
private predicate storeStep(
DataFlow::Node node1, State state1, DataFlow::ContentSet c, DataFlow::Node node2,
StoreState state2
) {
exists(boolean preservesValue, int size |
storeSet(node1, c, node2, _, _) and
ContentConfig::isRelevantContent(c) and
state2.decode(size + 1, preservesValue)
|
state1.(InitState).decode(preservesValue) and size = 0
or
state1.(ReadState).decode(_, preservesValue) and size = 0
or
state1.(StoreState).decode(size, preservesValue)
)
}
private predicate readStep(
DataFlow::Node node1, State state1, DataFlow::ContentSet c, DataFlow::Node node2,
ReadState state2
) {
exists(int size |
readSet(node1, c, node2) and
ContentConfig::isRelevantContent(c) and
state2.decode(size + 1, true)
|
state1.(InitState).decode(true) and
size = 0
or
state1.(ReadState).decode(size, true)
)
}
private predicate additionalStep(
DataFlow::Node node1, State state1, DataFlow::Node node2, State state2
) {
ContentConfig::isAdditionalFlowStep(node1, node2) and
(
state1 instanceof InitState and
state2.(InitState).decode(false)
or
exists(int size |
state1.(ReadState).decode(size, _) and
state2.(ReadState).decode(size, false)
)
)
}
private newtype TAccessPath =
TAccessPathNil() or
TAccessPathCons(DataFlow::ContentSet head, AccessPath tail) {
nodeReachesStore(_, _, _, _, head, _, tail)
or
nodeReachesRead(_, _, _, _, head, tail, _)
}
/** An access path. */
class AccessPath extends TAccessPath {
/** Gets the head of this access path, if any. */
DataFlow::ContentSet getHead() { this = TAccessPathCons(result, _) }
/** Gets the tail of this access path, if any. */
AccessPath getTail() { this = TAccessPathCons(_, result) }
/**
* Gets a textual representation of this access path.
*
* Elements are dot-separated, and the head of the stack is
* rendered first.
*/
string toString() {
this = TAccessPathNil() and
result = ""
or
exists(DataFlow::ContentSet head, AccessPath tail |
this = TAccessPathCons(head, tail) and
result = head + "." + tail
)
}
}
/**
* Provides a big-step flow relation, where flow stops at read/store steps that
* must be recorded, and flow via `subpaths` such that reads/stores inside
* summarized callables can be recorded as well.
*/
private module BigStepFlow {
private predicate reachesSink(Flow::PathNode node) {
FlowConfig::isSink(node.getNode(), node.getState())
or
reachesSink(node.getASuccessor())
}
/**
* Holds if the flow step `pred -> succ` should not be allowed to be included
* in the big-step relation.
*/
pragma[nomagic]
private predicate excludeStep(Flow::PathNode pred, Flow::PathNode succ) {
pred.getASuccessor() = succ and
(
// we need to record reads/stores inside summarized callables
Flow::PathGraph::subpaths(pred, _, _, succ)
or
// only allow flow into a summarized callable, as part of the big-step
// relation, when flow can reach a sink without going back out
Flow::PathGraph::subpaths(pred, succ, _, _) and
not reachesSink(succ)
or
// needed to record store steps
storeStep(pred.getNode(), pred.getState(), _, succ.getNode(), succ.getState())
or
// needed to record read steps
readStep(pred.getNode(), pred.getState(), _, succ.getNode(), succ.getState())
)
}
pragma[nomagic]
private DataFlowCallable getEnclosingCallableImpl(Flow::PathNode node) {
result = getNodeEnclosingCallable(node.getNode())
}
pragma[inline]
private DataFlowCallable getEnclosingCallable(Flow::PathNode node) {
pragma[only_bind_into](result) = getEnclosingCallableImpl(pragma[only_bind_out](node))
}
pragma[nomagic]
private predicate bigStepEntry(Flow::PathNode node) {
(
FlowConfig::isSource(node.getNode(), node.getState())
or
excludeStep(_, node)
or
Flow::PathGraph::subpaths(_, node, _, _)
)
}
pragma[nomagic]
private predicate bigStepExit(Flow::PathNode node) {
(
bigStepEntry(node)
or
FlowConfig::isSink(node.getNode(), node.getState())
or
excludeStep(node, _)
or
Flow::PathGraph::subpaths(_, _, node, _)
)
}
pragma[nomagic]
private predicate step(Flow::PathNode pred, Flow::PathNode succ) {
pred.getASuccessor() = succ and
not excludeStep(pred, succ)
}
pragma[nomagic]
private predicate stepRec(Flow::PathNode pred, Flow::PathNode succ) {
step(pred, succ) and
not bigStepEntry(pred)
}
private predicate stepRecPlus(Flow::PathNode n1, Flow::PathNode n2) = fastTC(stepRec/2)(n1, n2)
/**
* Holds if there is flow `pathSucc+(pred) = succ`, and such a flow path does
* not go through any reads/stores that need to be recorded, or summarized
* steps.
*/
pragma[nomagic]
private predicate bigStep(Flow::PathNode pred, Flow::PathNode succ) {
exists(Flow::PathNode mid |
bigStepEntry(pred) and
step(pred, mid)
|
succ = mid
or
stepRecPlus(mid, succ)
) and
bigStepExit(succ)
}
pragma[nomagic]
predicate bigStepNotLocal(Flow::PathNode pred, Flow::PathNode succ) {
bigStep(pred, succ) and
not getEnclosingCallable(pred) = getEnclosingCallable(succ)
}
pragma[nomagic]
predicate bigStepMaybeLocal(Flow::PathNode pred, Flow::PathNode succ) {
bigStep(pred, succ) and
getEnclosingCallable(pred) = getEnclosingCallable(succ)
}
}
/**
* Holds if `source` can reach `node`, having read `reads` from the source and
* written `stores` into `node`.
*
* `source` is either a source from a configuration, in which case `scReads` and
* `scStores` are always empty, or it is the parameter of a summarized callable,
* in which case `scReads` and `scStores` record the reads/stores for a summary
* context, that is, the reads/stores for an argument that can reach the parameter.
*/
pragma[nomagic]
private predicate nodeReaches(
Flow::PathNode source, AccessPath scReads, AccessPath scStores, Flow::PathNode node,
AccessPath reads, AccessPath stores
) {
node = source and
reads = scReads and
stores = scStores and
(
Flow::flowPath(source, _) and
scReads = TAccessPathNil() and
scStores = TAccessPathNil()
or
// the argument in a sub path can be reached, so we start flow from the sub path
// parameter, while recording the read/store summary context
exists(Flow::PathNode arg |
nodeReachesSubpathArg(_, _, _, arg, scReads, scStores) and
Flow::PathGraph::subpaths(arg, source, _, _)
)
)
or
exists(Flow::PathNode mid |
nodeReaches(source, scReads, scStores, mid, reads, stores) and
BigStepFlow::bigStepMaybeLocal(mid, node)
)
or
exists(Flow::PathNode mid |
nodeReaches(source, scReads, scStores, mid, reads, stores) and
BigStepFlow::bigStepNotLocal(mid, node) and
// when flow is not local, we cannot flow back out, so we may stop
// flow early when computing summary flow
Flow::flowPath(source, _) and
scReads = TAccessPathNil() and
scStores = TAccessPathNil()
)
or
// store step
exists(AccessPath storesMid, DataFlow::ContentSet c |
nodeReachesStore(source, scReads, scStores, node, c, reads, storesMid) and
stores = TAccessPathCons(c, storesMid)
)
or
// read step
exists(AccessPath readsMid, DataFlow::ContentSet c |
nodeReachesRead(source, scReads, scStores, node, c, readsMid, stores) and
reads = TAccessPathCons(c, readsMid)
)
or
// flow-through step; match outer stores/reads with inner store/read summary contexts
exists(Flow::PathNode mid, AccessPath innerScReads, AccessPath innerScStores |
nodeReachesSubpathArg(source, scReads, scStores, mid, innerScReads, innerScStores) and
subpathArgReachesOut(mid, innerScReads, innerScStores, node, reads, stores)
)
}
pragma[nomagic]
private predicate nodeReachesStore(
Flow::PathNode source, AccessPath scReads, AccessPath scStores, Flow::PathNode node,
DataFlow::ContentSet c, AccessPath reads, AccessPath stores
) {
exists(Flow::PathNode mid |
nodeReaches(source, scReads, scStores, mid, reads, stores) and
storeStep(mid.getNode(), mid.getState(), c, node.getNode(), node.getState()) and
mid.getASuccessor() = node
)
}
pragma[nomagic]
private predicate nodeReachesRead(
Flow::PathNode source, AccessPath scReads, AccessPath scStores, Flow::PathNode node,
DataFlow::ContentSet c, AccessPath reads, AccessPath stores
) {
exists(Flow::PathNode mid |
nodeReaches(source, scReads, scStores, mid, reads, stores) and
readStep(mid.getNode(), mid.getState(), c, node.getNode(), node.getState()) and
mid.getASuccessor() = node
)
}
pragma[nomagic]
private predicate nodeReachesSubpathArg(
Flow::PathNode source, AccessPath scReads, AccessPath scStores, Flow::PathNode arg,
AccessPath reads, AccessPath stores
) {
nodeReaches(source, scReads, scStores, arg, reads, stores) and
Flow::PathGraph::subpaths(arg, _, _, _)
}
pragma[nomagic]
private predicate subpathArgReachesOut(
Flow::PathNode arg, AccessPath scReads, AccessPath scStores, Flow::PathNode out,
AccessPath reads, AccessPath stores
) {
exists(Flow::PathNode source, Flow::PathNode ret |
nodeReaches(source, scReads, scStores, ret, reads, stores) and
Flow::PathGraph::subpaths(arg, source, ret, out)
)
}
}
private import semmle.code.csharp.Location
private import DataFlowImplSpecific
private import codeql.dataflow.internal.ContentDataFlowImpl
import MakeImplContentDataFlow<Location, CsharpDataFlow>

View File

@@ -12,6 +12,7 @@ import DatabaseQuality
predicate compilationInfo(string key, float value) {
not key.matches("Compiler diagnostic count for%") and
not key.matches("Extractor message count for group%") and
exists(Compilation c, string infoKey, string infoValue | infoValue = c.getInfo(infoKey) |
key = infoKey and
value = infoValue.toFloat()
@@ -22,6 +23,16 @@ predicate compilationInfo(string key, float value) {
)
}
predicate compilerDiagnostics(string key, int value) {
key.matches("Compiler diagnostic count for%") and
strictsum(Compilation c | | c.getInfo(key).toInt()) = value
}
predicate extractorMessages(string key, int value) {
key.matches("Extractor message count for group%") and
strictsum(Compilation c | | c.getInfo(key).toInt()) = value
}
predicate fileCount(string key, int value) {
key = "Number of files" and
value = strictcount(File f)
@@ -140,6 +151,8 @@ from string key, float value
where
(
compilationInfo(key, value) or
compilerDiagnostics(key, value) or
extractorMessages(key, value) or
fileCount(key, value) or
fileCountByExtension(key, value) or
totalNumberOfLines(key, value) or

View File

@@ -1,6 +1,6 @@
| Partial.cs:3:18:3:39 | PartialMethodWithBody1 | true |
| Partial.cs:4:18:4:42 | PartialMethodWithoutBody1 | true |
| Partial.cs:5:17:5:23 | Method2 | false |
| Partial.cs:10:18:10:39 | PartialMethodWithBody1 | true |
| Partial.cs:11:17:11:23 | Method3 | false |
| Partial.cs:16:18:16:42 | PartialMethodWithoutBody2 | true |
| Partial.cs:17:17:17:23 | Method4 | false |

View File

@@ -1,6 +1,6 @@
| Partial.cs:1:15:1:26 | TwoPartClass |
| Partial.cs:3:18:3:39 | PartialMethodWithBody1 |
| Partial.cs:4:18:4:42 | PartialMethodWithoutBody1 |
| Partial.cs:8:15:8:26 | TwoPartClass |
| Partial.cs:10:18:10:39 | PartialMethodWithBody1 |
| Partial.cs:14:15:14:33 | OnePartPartialClass |
| Partial.cs:16:18:16:42 | PartialMethodWithoutBody2 |

View File

@@ -1,10 +1,10 @@
| Partial.cs:1:15:1:26 | TwoPartClass | Partial.cs:3:18:3:39 | PartialMethodWithBody1 |
| Partial.cs:1:15:1:26 | TwoPartClass | Partial.cs:4:18:4:42 | PartialMethodWithoutBody1 |
| Partial.cs:1:15:1:26 | TwoPartClass | Partial.cs:5:17:5:23 | Method2 |
| Partial.cs:1:15:1:26 | TwoPartClass | Partial.cs:10:18:10:39 | PartialMethodWithBody1 |
| Partial.cs:1:15:1:26 | TwoPartClass | Partial.cs:11:17:11:23 | Method3 |
| Partial.cs:8:15:8:26 | TwoPartClass | Partial.cs:3:18:3:39 | PartialMethodWithBody1 |
| Partial.cs:8:15:8:26 | TwoPartClass | Partial.cs:4:18:4:42 | PartialMethodWithoutBody1 |
| Partial.cs:8:15:8:26 | TwoPartClass | Partial.cs:5:17:5:23 | Method2 |
| Partial.cs:8:15:8:26 | TwoPartClass | Partial.cs:10:18:10:39 | PartialMethodWithBody1 |
| Partial.cs:8:15:8:26 | TwoPartClass | Partial.cs:11:17:11:23 | Method3 |
| Partial.cs:14:15:14:33 | OnePartPartialClass | Partial.cs:16:18:16:42 | PartialMethodWithoutBody2 |
| Partial.cs:14:15:14:33 | OnePartPartialClass | Partial.cs:17:17:17:23 | Method4 |

View File

@@ -1,3 +1,3 @@
| Partial.cs:3:18:3:39 | PartialMethodWithBody1 | true |
| Partial.cs:4:18:4:42 | PartialMethodWithoutBody1 | false |
| Partial.cs:10:18:10:39 | PartialMethodWithBody1 | true |
| Partial.cs:16:18:16:42 | PartialMethodWithoutBody2 | false |

View File

@@ -1,13 +1,13 @@
Partial.cs:
# 1| [Class] TwoPartClass
# 3| 5: [Method] PartialMethodWithBody1
# 3| -1: [TypeMention] Void
# 10| 4: [BlockStmt] {...}
# 4| 6: [Method] PartialMethodWithoutBody1
# 4| 5: [Method] PartialMethodWithoutBody1
# 4| -1: [TypeMention] Void
# 5| 7: [Method] Method2
# 5| 6: [Method] Method2
# 5| -1: [TypeMention] Void
# 5| 4: [BlockStmt] {...}
# 10| 7: [Method] PartialMethodWithBody1
# 3| -1: [TypeMention] Void
# 10| 4: [BlockStmt] {...}
# 11| 8: [Method] Method3
# 11| -1: [TypeMention] Void
# 11| 4: [BlockStmt] {...}

View File

@@ -47,6 +47,7 @@ codeql_pkg_files(
"//go/extractor/cli/go-autobuilder",
"//go/extractor/cli/go-bootstrap",
"//go/extractor/cli/go-build-runner",
"//go/extractor/cli/go-configure-baseline",
"//go/extractor/cli/go-extractor",
"//go/extractor/cli/go-gen-dbscheme",
"//go/extractor/cli/go-tokenizer",

View File

@@ -1,3 +0,0 @@
{
"paths-ignore": []
}

View File

@@ -1,5 +0,0 @@
{
"paths-ignore": [
"vendor/**"
]
}

View File

@@ -1,6 +1,4 @@
@echo off
if exist vendor\modules.txt (
type "%CODEQL_EXTRACTOR_GO_ROOT%\tools\baseline-config-vendor.json"
) else (
type "%CODEQL_EXTRACTOR_GO_ROOT%\tools\baseline-config-empty.json"
)
type NUL && "%CODEQL_EXTRACTOR_GO_ROOT%/tools/%CODEQL_PLATFORM%/go-configure-baseline.exe"
exit /b %ERRORLEVEL%

View File

@@ -1,7 +1,3 @@
#!/bin/sh
if [ -f vendor/modules.txt ]; then
cat "$CODEQL_EXTRACTOR_GO_ROOT/tools/baseline-config-vendor.json"
else
cat "$CODEQL_EXTRACTOR_GO_ROOT/tools/baseline-config-empty.json"
fi
"$CODEQL_EXTRACTOR_GO_ROOT/tools/$CODEQL_PLATFORM/go-configure-baseline"

View File

@@ -0,0 +1,18 @@
# generated running `bazel run //go/gazelle`, do not edit
load("@rules_go//go:def.bzl", "go_library")
load("//go:rules.bzl", "codeql_go_binary")
go_library(
name = "go-configure-baseline_lib",
srcs = ["go-configure-baseline.go"],
importpath = "github.com/github/codeql-go/extractor/cli/go-configure-baseline",
visibility = ["//visibility:private"],
deps = ["//go/extractor/configurebaseline"],
)
codeql_go_binary(
name = "go-configure-baseline",
embed = [":go-configure-baseline_lib"],
visibility = ["//visibility:public"],
)

View File

@@ -0,0 +1,16 @@
package main
import (
"fmt"
"github.com/github/codeql-go/extractor/configurebaseline"
)
func main() {
jsonResult, err := configurebaseline.GetConfigBaselineAsJSON(".")
if err != nil {
panic(err)
} else {
fmt.Println(string(jsonResult))
}
}

View File

@@ -0,0 +1,11 @@
# generated running `bazel run //go/gazelle`, do not edit
load("@rules_go//go:def.bzl", "go_library")
go_library(
name = "configurebaseline",
srcs = ["configurebaseline.go"],
importpath = "github.com/github/codeql-go/extractor/configurebaseline",
visibility = ["//visibility:public"],
deps = ["//go/extractor/util"],
)

View File

@@ -0,0 +1,52 @@
package configurebaseline
import (
"encoding/json"
"io/fs"
"os"
"path"
"path/filepath"
"github.com/github/codeql-go/extractor/util"
)
func fileExists(path string) bool {
stat, err := os.Stat(path)
return err == nil && stat.Mode().IsRegular()
}
// Decides if `dirPath` is a vendor directory by testing whether it is called `vendor`
// and contains a `modules.txt` file.
func isGolangVendorDirectory(dirPath string) bool {
return filepath.Base(dirPath) == "vendor" && fileExists(filepath.Join(dirPath, "modules.txt"))
}
type BaselineConfig struct {
PathsIgnore []string `json:"paths-ignore"`
}
func GetConfigBaselineAsJSON(rootDir string) ([]byte, error) {
vendorDirs := make([]string, 0)
if util.IsVendorDirExtractionEnabled() {
// The user wants vendor directories scanned; emit an empty report.
} else {
filepath.WalkDir(rootDir, func(dirPath string, d fs.DirEntry, err error) error {
if err != nil {
// Ignore any unreadable paths -- if this script can't see it, very likely
// it will not be extracted either.
return nil
}
if isGolangVendorDirectory(dirPath) {
// Note that CodeQL expects a forward-slash-separated path, even on Windows.
vendorDirs = append(vendorDirs, path.Join(filepath.ToSlash(dirPath), "**"))
return filepath.SkipDir
} else {
return nil
}
})
}
outputStruct := BaselineConfig{PathsIgnore: vendorDirs}
return json.Marshal(outputStruct)
}

View File

@@ -199,7 +199,7 @@ func ExtractWithFlags(buildFlags []string, patterns []string) error {
// If CODEQL_EXTRACTOR_GO_EXTRACT_VENDOR_DIRS is "true", we extract `vendor` directories;
// otherwise (the default) is to exclude them from extraction
includeVendor := os.Getenv("CODEQL_EXTRACTOR_GO_EXTRACT_VENDOR_DIRS") == "true"
includeVendor := util.IsVendorDirExtractionEnabled()
if !includeVendor {
excludedDirs = append(excludedDirs, "vendor")
}

View File

@@ -5,6 +5,7 @@ load("@rules_go//go:def.bzl", "go_library", "go_test")
go_library(
name = "util",
srcs = [
"extractvendordirs.go",
"semver.go",
"util.go",
],

View File

@@ -0,0 +1,9 @@
package util
import (
"os"
)
func IsVendorDirExtractionEnabled() bool {
return os.Getenv("CODEQL_EXTRACTOR_GO_EXTRACT_VENDOR_DIRS") == "true"
}

View File

@@ -0,0 +1 @@
package abc

View File

@@ -0,0 +1 @@
package abc

View File

@@ -0,0 +1 @@
package abc

View File

@@ -0,0 +1 @@
package abc

View File

@@ -0,0 +1,9 @@
import os.path
import json
def test(codeql, go):
codeql.database.init(source_root="src")
baseline_info_path = os.path.join("test-db", "baseline-info.json")
with open(baseline_info_path, "r") as f:
baseline_info = json.load(f)
assert set(baseline_info["languages"]["go"]["files"]) == set(["root.go", "c/vendor/cvendor.go"]), "Expected root.go and cvendor.go in baseline"

View File

@@ -0,0 +1,4 @@
---
category: fix
---
* Golang vendor directories not at the root of a repository are now correctly excluded from the baseline Go file count. This means code coverage information will be more accurate.

View File

@@ -213,16 +213,30 @@ predicate interpretModelForTest(QlBuiltins::ExtensionId madId, string model) {
)
}
bindingset[p]
private string cleanPackage(string p) {
exists(string noPrefix |
p = fixedVersionPrefix() + noPrefix
or
not p = fixedVersionPrefix() + any(string s) and
noPrefix = p
|
result = noPrefix.regexpReplaceAll(majorVersionSuffixRegex(), "")
)
}
private predicate relevantPackage(string package) {
sourceModel(package, _, _, _, _, _, _, _, _, _) or
sinkModel(package, _, _, _, _, _, _, _, _, _) or
summaryModel(package, _, _, _, _, _, _, _, _, _, _)
exists(string p | package = cleanPackage(p) |
sourceModel(p, _, _, _, _, _, _, _, _, _) or
sinkModel(p, _, _, _, _, _, _, _, _, _) or
summaryModel(p, _, _, _, _, _, _, _, _, _, _)
)
}
private predicate packageLink(string shortpkg, string longpkg) {
relevantPackage(shortpkg) and
relevantPackage(longpkg) and
longpkg.prefix(longpkg.indexOf(".")) = shortpkg
longpkg.prefix(longpkg.indexOf("/")) = shortpkg
}
private predicate canonicalPackage(string package) {
@@ -245,26 +259,28 @@ predicate modelCoverage(string package, int pkgs, string kind, string part, int
part = "source" and
n =
strictcount(string subpkg, string type, boolean subtypes, string name, string signature,
string ext, string output, string provenance |
string ext, string output, string provenance, string x |
canonicalPkgLink(package, subpkg) and
sourceModel(subpkg, type, subtypes, name, signature, ext, output, kind, provenance, _)
subpkg = cleanPackage(x) and
sourceModel(x, type, subtypes, name, signature, ext, output, kind, provenance, _)
)
or
part = "sink" and
n =
strictcount(string subpkg, string type, boolean subtypes, string name, string signature,
string ext, string input, string provenance |
string ext, string input, string provenance, string x |
canonicalPkgLink(package, subpkg) and
sinkModel(subpkg, type, subtypes, name, signature, ext, input, kind, provenance, _)
subpkg = cleanPackage(x) and
sinkModel(x, type, subtypes, name, signature, ext, input, kind, provenance, _)
)
or
part = "summary" and
n =
strictcount(string subpkg, string type, boolean subtypes, string name, string signature,
string ext, string input, string output, string provenance |
string ext, string input, string output, string provenance, string x |
canonicalPkgLink(package, subpkg) and
summaryModel(subpkg, type, subtypes, name, signature, ext, input, output, kind, provenance,
_)
subpkg = cleanPackage(x) and
summaryModel(x, type, subtypes, name, signature, ext, input, output, kind, provenance, _)
)
)
}

View File

@@ -36,19 +36,14 @@ edges
| apply.kt:6:28:6:41 | $this$apply : String | apply.kt:6:35:6:38 | this | provenance | |
| apply.kt:7:14:7:25 | taint(...) : String | apply.kt:7:14:7:40 | apply(...) | provenance | MaD:31 |
| list.kt:6:9:6:9 | l [post update] : List [<element>] : String | list.kt:7:14:7:14 | l | provenance | |
| list.kt:6:9:6:9 | l [post update] : List [<element>] : String | list.kt:8:14:8:14 | l : List | provenance | |
| list.kt:6:9:6:9 | l [post update] : List [<element>] : String | list.kt:8:14:8:14 | l : List [<element>] : String | provenance | |
| list.kt:6:9:6:9 | l [post update] : List [<element>] : String | list.kt:9:19:9:19 | l : List [<element>] : String | provenance | |
| list.kt:6:9:6:9 | l [post update] : List [<element>] : String | list.kt:10:18:10:18 | s | provenance | |
| list.kt:6:16:6:25 | taint(...) : String | list.kt:6:9:6:9 | l [post update] : List [<element>] : String | provenance | MaD:27 |
| list.kt:8:14:8:14 | l : List | list.kt:8:14:8:17 | get(...) | provenance | MaD:26 |
| list.kt:8:14:8:14 | l : List [<element>] : String | list.kt:8:14:8:17 | get(...) | provenance | MaD:26 |
| list.kt:9:19:9:19 | l : List [<element>] : String | list.kt:10:18:10:18 | s | provenance | |
| list.kt:13:17:13:40 | {...} : String[] [[]] : String | list.kt:14:14:14:14 | a | provenance | |
| list.kt:13:17:13:40 | {...} : String[] [[]] : String | list.kt:15:14:15:14 | a : String[] [[]] : String | provenance | |
| list.kt:13:17:13:40 | {...} : String[] [[]] : String | list.kt:15:14:15:17 | ...[...] | provenance | |
| list.kt:13:17:13:40 | {...} : String[] [[]] : String | list.kt:16:19:16:19 | a : String[] [[]] : String | provenance | |
| list.kt:13:17:13:40 | {...} : String[] [[]] : String | list.kt:17:18:17:18 | s | provenance | |
| list.kt:13:25:13:34 | taint(...) : String | list.kt:13:17:13:40 | {...} : String[] [[]] : String | provenance | |
| list.kt:15:14:15:14 | a : String[] [[]] : String | list.kt:15:14:15:17 | ...[...] | provenance | |
| list.kt:16:19:16:19 | a : String[] [[]] : String | list.kt:17:18:17:18 | s | provenance | |
@@ -134,7 +129,6 @@ nodes
| list.kt:6:9:6:9 | l [post update] : List [<element>] : String | semmle.label | l [post update] : List [<element>] : String |
| list.kt:6:16:6:25 | taint(...) : String | semmle.label | taint(...) : String |
| list.kt:7:14:7:14 | l | semmle.label | l |
| list.kt:8:14:8:14 | l : List | semmle.label | l : List |
| list.kt:8:14:8:14 | l : List [<element>] : String | semmle.label | l : List [<element>] : String |
| list.kt:8:14:8:17 | get(...) | semmle.label | get(...) |
| list.kt:9:19:9:19 | l : List [<element>] : String | semmle.label | l : List [<element>] : String |

View File

@@ -36,19 +36,14 @@ edges
| apply.kt:6:28:6:41 | $this$apply : String | apply.kt:6:35:6:38 | this | provenance | |
| apply.kt:7:14:7:25 | taint(...) : String | apply.kt:7:14:7:40 | apply(...) | provenance | MaD:31 |
| list.kt:6:9:6:9 | l [post update] : List [<element>] : String | list.kt:7:14:7:14 | l | provenance | |
| list.kt:6:9:6:9 | l [post update] : List [<element>] : String | list.kt:8:14:8:14 | l : List | provenance | |
| list.kt:6:9:6:9 | l [post update] : List [<element>] : String | list.kt:8:14:8:14 | l : List [<element>] : String | provenance | |
| list.kt:6:9:6:9 | l [post update] : List [<element>] : String | list.kt:9:19:9:19 | l : List [<element>] : String | provenance | |
| list.kt:6:9:6:9 | l [post update] : List [<element>] : String | list.kt:10:18:10:18 | s | provenance | |
| list.kt:6:16:6:25 | taint(...) : String | list.kt:6:9:6:9 | l [post update] : List [<element>] : String | provenance | MaD:27 |
| list.kt:8:14:8:14 | l : List | list.kt:8:14:8:17 | get(...) | provenance | MaD:26 |
| list.kt:8:14:8:14 | l : List [<element>] : String | list.kt:8:14:8:17 | get(...) | provenance | MaD:26 |
| list.kt:9:19:9:19 | l : List [<element>] : String | list.kt:10:18:10:18 | s | provenance | |
| list.kt:13:17:13:40 | {...} : String[] [[]] : String | list.kt:14:14:14:14 | a | provenance | |
| list.kt:13:17:13:40 | {...} : String[] [[]] : String | list.kt:15:14:15:14 | a : String[] [[]] : String | provenance | |
| list.kt:13:17:13:40 | {...} : String[] [[]] : String | list.kt:15:14:15:17 | ...[...] | provenance | |
| list.kt:13:17:13:40 | {...} : String[] [[]] : String | list.kt:16:19:16:19 | a : String[] [[]] : String | provenance | |
| list.kt:13:17:13:40 | {...} : String[] [[]] : String | list.kt:17:18:17:18 | s | provenance | |
| list.kt:13:25:13:34 | taint(...) : String | list.kt:13:17:13:40 | {...} : String[] [[]] : String | provenance | |
| list.kt:15:14:15:14 | a : String[] [[]] : String | list.kt:15:14:15:17 | ...[...] | provenance | |
| list.kt:16:19:16:19 | a : String[] [[]] : String | list.kt:17:18:17:18 | s | provenance | |
@@ -134,7 +129,6 @@ nodes
| list.kt:6:9:6:9 | l [post update] : List [<element>] : String | semmle.label | l [post update] : List [<element>] : String |
| list.kt:6:16:6:25 | taint(...) : String | semmle.label | taint(...) : String |
| list.kt:7:14:7:14 | l | semmle.label | l |
| list.kt:8:14:8:14 | l : List | semmle.label | l : List |
| list.kt:8:14:8:14 | l : List [<element>] : String | semmle.label | l : List [<element>] : String |
| list.kt:8:14:8:17 | get(...) | semmle.label | get(...) |
| list.kt:9:19:9:19 | l : List [<element>] : String | semmle.label | l : List [<element>] : String |

View File

@@ -6,8 +6,7 @@ edges
| FileService.java:20:31:20:43 | intent : Intent | FileService.java:21:28:21:33 | intent : Intent | provenance | |
| FileService.java:21:28:21:33 | intent : Intent | FileService.java:21:28:21:64 | getStringExtra(...) : String | provenance | MaD:2 |
| FileService.java:21:28:21:64 | getStringExtra(...) : String | FileService.java:25:42:25:50 | localPath : String | provenance | |
| FileService.java:25:13:25:51 | makeParamsToExecute(...) : Object[] | FileService.java:40:41:40:55 | params : Object[] | provenance | Config |
| FileService.java:25:13:25:51 | makeParamsToExecute(...) : Object[] [[]] : String | FileService.java:25:13:25:51 | makeParamsToExecute(...) : Object[] | provenance | |
| FileService.java:25:13:25:51 | makeParamsToExecute(...) : Object[] [[]] : String | FileService.java:40:41:40:55 | params : Object[] | provenance | Config |
| FileService.java:25:42:25:50 | localPath : String | FileService.java:25:13:25:51 | makeParamsToExecute(...) : Object[] [[]] : String | provenance | |
| FileService.java:25:42:25:50 | localPath : String | FileService.java:32:13:32:28 | sourceUri : String | provenance | |
| FileService.java:32:13:32:28 | sourceUri : String | FileService.java:35:17:35:25 | sourceUri : String | provenance | |
@@ -33,7 +32,6 @@ nodes
| FileService.java:20:31:20:43 | intent : Intent | semmle.label | intent : Intent |
| FileService.java:21:28:21:33 | intent : Intent | semmle.label | intent : Intent |
| FileService.java:21:28:21:64 | getStringExtra(...) : String | semmle.label | getStringExtra(...) : String |
| FileService.java:25:13:25:51 | makeParamsToExecute(...) : Object[] | semmle.label | makeParamsToExecute(...) : Object[] |
| FileService.java:25:13:25:51 | makeParamsToExecute(...) : Object[] [[]] : String | semmle.label | makeParamsToExecute(...) : Object[] [[]] : String |
| FileService.java:25:42:25:50 | localPath : String | semmle.label | localPath : String |
| FileService.java:32:13:32:28 | sourceUri : String | semmle.label | sourceUri : String |

View File

@@ -0,0 +1,27 @@
public class A {
String field;
static String source(String name) {
return name;
}
static void sink(Object o) {}
static String step(Object o) {
return "";
}
static Object getA() {
A a = new A();
a.field = source("source");
return a;
}
static void test() {
Object object = getA();
sink(step(object)); // $ hasTaintFlow=source
sink(object);
sink(((A)object).field); // $ hasTaintFlow=source
}
}

View File

@@ -0,0 +1,22 @@
import java
import TestUtilities.InlineFlowTest
module TestConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { DefaultFlowConfig::isSource(source) }
predicate isSink(DataFlow::Node sink) { DefaultFlowConfig::isSink(sink) }
predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) {
exists(MethodCall call |
call.getMethod().getName() = "step" and
node1.asExpr() = call.getArgument(0) and
node2.asExpr() = call
)
}
predicate allowImplicitRead(DataFlow::Node node, DataFlow::ContentSet content) {
isAdditionalFlowStep(node, _) and content instanceof DataFlow::FieldContent
}
}
import TaintFlowTest<TestConfig>

View File

@@ -745,12 +745,9 @@ edges
| ArrayUtilsTest.java:68:27:68:57 | {...} : int[] [[]] : Number | ArrayUtilsTest.java:69:56:69:66 | taintedInts : int[] [[]] : Number | provenance | |
| ArrayUtilsTest.java:68:39:68:55 | taint(...) : Number | ArrayUtilsTest.java:68:27:68:57 | {...} : int[] [[]] : Number | provenance | |
| ArrayUtilsTest.java:69:36:69:67 | toObject(...) : Integer[] [[]] : Number | ArrayUtilsTest.java:70:12:70:27 | taintedBoxedInts | provenance | |
| ArrayUtilsTest.java:69:36:69:67 | toObject(...) : Integer[] [[]] : Number | ArrayUtilsTest.java:71:35:71:50 | taintedBoxedInts : Integer[] | provenance | |
| ArrayUtilsTest.java:69:36:69:67 | toObject(...) : Integer[] [[]] : Number | ArrayUtilsTest.java:71:35:71:50 | taintedBoxedInts : Integer[] [[]] : Number | provenance | |
| ArrayUtilsTest.java:69:56:69:66 | taintedInts : int[] [[]] : Number | ArrayUtilsTest.java:69:36:69:67 | toObject(...) : Integer[] [[]] : Number | provenance | MaD:53 |
| ArrayUtilsTest.java:71:12:71:51 | toPrimitive(...) : int[] [[]] : Number | ArrayUtilsTest.java:71:12:71:51 | toPrimitive(...) | provenance | |
| ArrayUtilsTest.java:71:12:71:51 | toPrimitive(...) : int[] [[]] : Object | ArrayUtilsTest.java:71:12:71:51 | toPrimitive(...) | provenance | |
| ArrayUtilsTest.java:71:35:71:50 | taintedBoxedInts : Integer[] | ArrayUtilsTest.java:71:12:71:51 | toPrimitive(...) : int[] [[]] : Object | provenance | MaD:54 |
| ArrayUtilsTest.java:71:35:71:50 | taintedBoxedInts : Integer[] [[]] : Number | ArrayUtilsTest.java:71:12:71:51 | toPrimitive(...) : int[] [[]] : Number | provenance | MaD:54 |
| ArrayUtilsTest.java:72:12:72:70 | toPrimitive(...) : int[] [[]] : Number | ArrayUtilsTest.java:72:12:72:70 | toPrimitive(...) | provenance | |
| ArrayUtilsTest.java:72:53:72:69 | taint(...) : Number | ArrayUtilsTest.java:72:12:72:70 | toPrimitive(...) : int[] [[]] : Number | provenance | MaD:55 |
@@ -3434,8 +3431,6 @@ nodes
| ArrayUtilsTest.java:70:12:70:27 | taintedBoxedInts | semmle.label | taintedBoxedInts |
| ArrayUtilsTest.java:71:12:71:51 | toPrimitive(...) | semmle.label | toPrimitive(...) |
| ArrayUtilsTest.java:71:12:71:51 | toPrimitive(...) : int[] [[]] : Number | semmle.label | toPrimitive(...) : int[] [[]] : Number |
| ArrayUtilsTest.java:71:12:71:51 | toPrimitive(...) : int[] [[]] : Object | semmle.label | toPrimitive(...) : int[] [[]] : Object |
| ArrayUtilsTest.java:71:35:71:50 | taintedBoxedInts : Integer[] | semmle.label | taintedBoxedInts : Integer[] |
| ArrayUtilsTest.java:71:35:71:50 | taintedBoxedInts : Integer[] [[]] : Number | semmle.label | taintedBoxedInts : Integer[] [[]] : Number |
| ArrayUtilsTest.java:72:12:72:70 | toPrimitive(...) | semmle.label | toPrimitive(...) |
| ArrayUtilsTest.java:72:12:72:70 | toPrimitive(...) : int[] [[]] : Number | semmle.label | toPrimitive(...) : int[] [[]] : Number |

View File

@@ -1,13 +1,3 @@
load("@buildifier_prebuilt//:rules.bzl", "buildifier")
buildifier(
name = "buildifier",
exclude_patterns = [
"./.git/*",
],
lint_mode = "fix",
)
sh_library(
name = "sh_runfiles",
srcs = ["runfiles.sh"],

View File

@@ -0,0 +1,9 @@
load("@buildifier_prebuilt//:rules.bzl", "buildifier")
buildifier(
name = "buildifier",
exclude_patterns = [
"./.git/*",
],
lint_mode = "fix",
)

View File

@@ -2,7 +2,7 @@
set -eu
. misc/bazel/runfiles.sh
source misc/bazel/runfiles.sh 2>/dev/null || source external/ql~/misc/bazel/runfiles.sh
dest="${2:-$HOME/.local/bin}"

View File

@@ -0,0 +1,502 @@
/**
* Provides classes for performing global (inter-procedural)
* content-sensitive data flow analyses.
*
* Unlike `DataFlow::Global`, we allow for data to be stored (possibly nested) inside
* contents of sources and sinks.
* We track flow paths of the form
*
* ```
* source --value-->* node
* (--read--> node --value-->* node)*
* --(non-value|value)-->* node
* (--store--> node --value-->* node)*
* --value-->* sink
* ```
*
* where `--value-->` is a value-preserving flow step, `--read-->` is a read
* step, `--store-->` is a store step, and `--(non-value)-->` is a
* non-value-preserving flow step.
*
* That is, first a sequence of 0 or more reads, followed by 0 or more additional
* steps, followed by 0 or more stores, with value-preserving steps allowed in
* between all other steps.
*/
private import codeql.dataflow.DataFlow
private import codeql.util.Boolean
private import codeql.util.Location
module MakeImplContentDataFlow<LocationSig Location, InputSig<Location> Lang> {
private import Lang
private import DataFlowMake<Location, Lang>
private import DataFlowImplCommon::MakeImplCommon<Location, Lang>
/**
* An input configuration for content data flow.
*/
signature module ConfigSig {
/**
* Holds if `source` is a relevant data flow source.
*/
predicate isSource(Node source);
/**
* Holds if `sink` is a relevant data flow sink.
*/
predicate isSink(Node sink);
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
default predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/** Holds if data flow into `node` is prohibited. */
default predicate isBarrier(Node node) { none() }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*/
default FlowFeature getAFeature() { none() }
/** Gets a limit on the number of reads out of sources and number of stores into sinks. */
default int accessPathLimit() { result = Lang::accessPathLimit() }
/** Holds if `c` is relevant for reads out of sources or stores into sinks. */
default predicate isRelevantContent(ContentSet c) { any() }
}
/**
* Constructs a global content data flow computation.
*/
module Global<ConfigSig ContentConfig> {
private module FlowConfig implements StateConfigSig {
class FlowState = State;
predicate isSource(Node source, FlowState state) {
ContentConfig::isSource(source) and
state.(InitState).decode(true)
}
predicate isSink(Node sink, FlowState state) {
ContentConfig::isSink(sink) and
(
state instanceof InitState or
state instanceof StoreState or
state instanceof ReadState
)
}
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2) {
storeStep(node1, state1, _, node2, state2) or
readStep(node1, state1, _, node2, state2) or
additionalStep(node1, state1, node2, state2)
}
predicate isAdditionalFlowStep = ContentConfig::isAdditionalFlowStep/2;
predicate isBarrier = ContentConfig::isBarrier/1;
FlowFeature getAFeature() { result = ContentConfig::getAFeature() }
predicate accessPathLimit = ContentConfig::accessPathLimit/0;
// needed to record reads/stores inside summarized callables
predicate includeHiddenNodes() { any() }
}
private module Flow = GlobalWithState<FlowConfig>;
/**
* Holds if data stored inside `sourceAp` on `source` flows to `sinkAp` inside `sink`
* for this configuration. `preservesValue` indicates whether any of the additional
* flow steps defined by `isAdditionalFlowStep` are needed.
*
* For the source access path, `sourceAp`, the top of the stack represents the content
* that was last read from. That is, if `sourceAp` is `Field1.Field2` (with `Field1`
* being the top of the stack), then there is flow from `source.Field2.Field1`.
*
* For the sink access path, `sinkAp`, the top of the stack represents the content
* that was last stored into. That is, if `sinkAp` is `Field1.Field2` (with `Field1`
* being the top of the stack), then there is flow into `sink.Field1.Field2`.
*/
predicate flow(
Node source, AccessPath sourceAp, Node sink, AccessPath sinkAp, boolean preservesValue
) {
exists(Flow::PathNode pathSource, Flow::PathNode pathSink |
Flow::flowPath(pathSource, pathSink) and
nodeReaches(pathSource, TAccessPathNil(), TAccessPathNil(), pathSink, sourceAp, sinkAp) and
source = pathSource.getNode() and
sink = pathSink.getNode()
|
pathSink.getState().(InitState).decode(preservesValue)
or
pathSink.getState().(ReadState).decode(_, preservesValue)
or
pathSink.getState().(StoreState).decode(_, preservesValue)
)
}
private newtype TState =
TInitState(Boolean preservesValue) or
TStoreState(int size, Boolean preservesValue) {
size in [1 .. ContentConfig::accessPathLimit()]
} or
TReadState(int size, Boolean preservesValue) {
size in [1 .. ContentConfig::accessPathLimit()]
}
abstract private class State extends TState {
abstract string toString();
}
/** A flow state representing no reads or stores. */
private class InitState extends State, TInitState {
private boolean preservesValue_;
InitState() { this = TInitState(preservesValue_) }
override string toString() { result = "Init(" + preservesValue_ + ")" }
predicate decode(boolean preservesValue) { preservesValue = preservesValue_ }
}
/** A flow state representing that content has been stored into. */
private class StoreState extends State, TStoreState {
private boolean preservesValue_;
private int size_;
StoreState() { this = TStoreState(size_, preservesValue_) }
override string toString() { result = "StoreState(" + size_ + "," + preservesValue_ + ")" }
predicate decode(int size, boolean preservesValue) {
size = size_ and preservesValue = preservesValue_
}
}
/** A flow state representing that content has been read from. */
private class ReadState extends State, TReadState {
private boolean preservesValue_;
private int size_;
ReadState() { this = TReadState(size_, preservesValue_) }
override string toString() { result = "ReadState(" + size_ + "," + preservesValue_ + ")" }
predicate decode(int size, boolean preservesValue) {
size = size_ and preservesValue = preservesValue_
}
}
private predicate storeStep(
Node node1, State state1, ContentSet c, Node node2, StoreState state2
) {
exists(boolean preservesValue, int size |
storeSet(node1, c, node2, _, _) and
ContentConfig::isRelevantContent(c) and
state2.decode(size + 1, preservesValue)
|
state1.(InitState).decode(preservesValue) and size = 0
or
state1.(ReadState).decode(_, preservesValue) and size = 0
or
state1.(StoreState).decode(size, preservesValue)
)
}
private predicate readStep(Node node1, State state1, ContentSet c, Node node2, ReadState state2) {
exists(int size |
readSet(node1, c, node2) and
ContentConfig::isRelevantContent(c) and
state2.decode(size + 1, true)
|
state1.(InitState).decode(true) and
size = 0
or
state1.(ReadState).decode(size, true)
)
}
private predicate additionalStep(Node node1, State state1, Node node2, State state2) {
ContentConfig::isAdditionalFlowStep(node1, node2) and
(
state1 instanceof InitState and
state2.(InitState).decode(false)
or
exists(int size |
state1.(ReadState).decode(size, _) and
state2.(ReadState).decode(size, false)
)
)
}
private newtype TAccessPath =
TAccessPathNil() or
TAccessPathCons(ContentSet head, AccessPath tail) {
nodeReachesStore(_, _, _, _, head, _, tail)
or
nodeReachesRead(_, _, _, _, head, tail, _)
}
/** An access path. */
class AccessPath extends TAccessPath {
/** Gets the head of this access path, if any. */
ContentSet getHead() { this = TAccessPathCons(result, _) }
/** Gets the tail of this access path, if any. */
AccessPath getTail() { this = TAccessPathCons(_, result) }
/**
* Gets a textual representation of this access path.
*
* Elements are dot-separated, and the head of the stack is
* rendered first.
*/
string toString() {
this = TAccessPathNil() and
result = ""
or
exists(ContentSet head, AccessPath tail |
this = TAccessPathCons(head, tail) and
result = head + "." + tail
)
}
}
/**
* Provides a big-step flow relation, where flow stops at read/store steps that
* must be recorded, and flow via `subpaths` such that reads/stores inside
* summarized callables can be recorded as well.
*/
private module BigStepFlow {
private predicate reachesSink(Flow::PathNode node) {
FlowConfig::isSink(node.getNode(), node.getState())
or
reachesSink(node.getASuccessor())
}
/**
* Holds if the flow step `pred -> succ` should not be allowed to be included
* in the big-step relation.
*/
pragma[nomagic]
private predicate excludeStep(Flow::PathNode pred, Flow::PathNode succ) {
pred.getASuccessor() = succ and
(
// we need to record reads/stores inside summarized callables
Flow::PathGraph::subpaths(pred, _, _, succ)
or
// only allow flow into a summarized callable, as part of the big-step
// relation, when flow can reach a sink without going back out
Flow::PathGraph::subpaths(pred, succ, _, _) and
not reachesSink(succ)
or
// needed to record store steps
storeStep(pred.getNode(), pred.getState(), _, succ.getNode(), succ.getState())
or
// needed to record read steps
readStep(pred.getNode(), pred.getState(), _, succ.getNode(), succ.getState())
)
}
pragma[nomagic]
private DataFlowCallable getEnclosingCallableImpl(Flow::PathNode node) {
result = getNodeEnclosingCallable(node.getNode())
}
pragma[inline]
private DataFlowCallable getEnclosingCallable(Flow::PathNode node) {
pragma[only_bind_into](result) = getEnclosingCallableImpl(pragma[only_bind_out](node))
}
pragma[nomagic]
private predicate bigStepEntry(Flow::PathNode node) {
(
FlowConfig::isSource(node.getNode(), node.getState())
or
excludeStep(_, node)
or
Flow::PathGraph::subpaths(_, node, _, _)
)
}
pragma[nomagic]
private predicate bigStepExit(Flow::PathNode node) {
(
bigStepEntry(node)
or
FlowConfig::isSink(node.getNode(), node.getState())
or
excludeStep(node, _)
or
Flow::PathGraph::subpaths(_, _, node, _)
)
}
pragma[nomagic]
private predicate step(Flow::PathNode pred, Flow::PathNode succ) {
pred.getASuccessor() = succ and
not excludeStep(pred, succ)
}
pragma[nomagic]
private predicate stepRec(Flow::PathNode pred, Flow::PathNode succ) {
step(pred, succ) and
not bigStepEntry(pred)
}
private predicate stepRecPlus(Flow::PathNode n1, Flow::PathNode n2) =
fastTC(stepRec/2)(n1, n2)
/**
* Holds if there is flow `pathSucc+(pred) = succ`, and such a flow path does
* not go through any reads/stores that need to be recorded, or summarized
* steps.
*/
pragma[nomagic]
private predicate bigStep(Flow::PathNode pred, Flow::PathNode succ) {
exists(Flow::PathNode mid |
bigStepEntry(pred) and
step(pred, mid)
|
succ = mid
or
stepRecPlus(mid, succ)
) and
bigStepExit(succ)
}
pragma[nomagic]
predicate bigStepNotLocal(Flow::PathNode pred, Flow::PathNode succ) {
bigStep(pred, succ) and
not getEnclosingCallable(pred) = getEnclosingCallable(succ)
}
pragma[nomagic]
predicate bigStepMaybeLocal(Flow::PathNode pred, Flow::PathNode succ) {
bigStep(pred, succ) and
getEnclosingCallable(pred) = getEnclosingCallable(succ)
}
}
/**
* Holds if `source` can reach `node`, having read `reads` from the source and
* written `stores` into `node`.
*
* `source` is either a source from a configuration, in which case `scReads` and
* `scStores` are always empty, or it is the parameter of a summarized callable,
* in which case `scReads` and `scStores` record the reads/stores for a summary
* context, that is, the reads/stores for an argument that can reach the parameter.
*/
pragma[nomagic]
private predicate nodeReaches(
Flow::PathNode source, AccessPath scReads, AccessPath scStores, Flow::PathNode node,
AccessPath reads, AccessPath stores
) {
node = source and
reads = scReads and
stores = scStores and
(
Flow::flowPath(source, _) and
scReads = TAccessPathNil() and
scStores = TAccessPathNil()
or
// the argument in a sub path can be reached, so we start flow from the sub path
// parameter, while recording the read/store summary context
exists(Flow::PathNode arg |
nodeReachesSubpathArg(_, _, _, arg, scReads, scStores) and
Flow::PathGraph::subpaths(arg, source, _, _)
)
)
or
exists(Flow::PathNode mid |
nodeReaches(source, scReads, scStores, mid, reads, stores) and
BigStepFlow::bigStepMaybeLocal(mid, node)
)
or
exists(Flow::PathNode mid |
nodeReaches(source, scReads, scStores, mid, reads, stores) and
BigStepFlow::bigStepNotLocal(mid, node) and
// when flow is not local, we cannot flow back out, so we may stop
// flow early when computing summary flow
Flow::flowPath(source, _) and
scReads = TAccessPathNil() and
scStores = TAccessPathNil()
)
or
// store step
exists(AccessPath storesMid, ContentSet c |
nodeReachesStore(source, scReads, scStores, node, c, reads, storesMid) and
stores = TAccessPathCons(c, storesMid)
)
or
// read step
exists(AccessPath readsMid, ContentSet c |
nodeReachesRead(source, scReads, scStores, node, c, readsMid, stores) and
reads = TAccessPathCons(c, readsMid)
)
or
// flow-through step; match outer stores/reads with inner store/read summary contexts
exists(Flow::PathNode mid, AccessPath innerScReads, AccessPath innerScStores |
nodeReachesSubpathArg(source, scReads, scStores, mid, innerScReads, innerScStores) and
subpathArgReachesOut(mid, innerScReads, innerScStores, node, reads, stores)
)
}
pragma[nomagic]
private predicate nodeReachesStore(
Flow::PathNode source, AccessPath scReads, AccessPath scStores, Flow::PathNode node,
ContentSet c, AccessPath reads, AccessPath stores
) {
exists(Flow::PathNode mid |
nodeReaches(source, scReads, scStores, mid, reads, stores) and
storeStep(mid.getNode(), mid.getState(), c, node.getNode(), node.getState()) and
mid.getASuccessor() = node
)
}
pragma[nomagic]
private predicate nodeReachesRead(
Flow::PathNode source, AccessPath scReads, AccessPath scStores, Flow::PathNode node,
ContentSet c, AccessPath reads, AccessPath stores
) {
exists(Flow::PathNode mid |
nodeReaches(source, scReads, scStores, mid, reads, stores) and
readStep(mid.getNode(), mid.getState(), c, node.getNode(), node.getState()) and
mid.getASuccessor() = node
)
}
pragma[nomagic]
private predicate nodeReachesSubpathArg(
Flow::PathNode source, AccessPath scReads, AccessPath scStores, Flow::PathNode arg,
AccessPath reads, AccessPath stores
) {
nodeReaches(source, scReads, scStores, arg, reads, stores) and
Flow::PathGraph::subpaths(arg, _, _, _)
}
pragma[nomagic]
private predicate subpathArgReachesOut(
Flow::PathNode arg, AccessPath scReads, AccessPath scStores, Flow::PathNode out,
AccessPath reads, AccessPath stores
) {
exists(Flow::PathNode source, Flow::PathNode ret |
nodeReaches(source, scReads, scStores, ret, reads, stores) and
Flow::PathGraph::subpaths(arg, source, ret, out)
)
}
}
}

View File

@@ -173,6 +173,11 @@ module MakeImpl<LocationSig Location, InputSig<Location> Lang> {
Node asNode() { this = TNodeNormal(result) }
/** Gets the corresponding Node if this is a normal node or its post-implicit read node. */
Node asNodeOrImplicitRead() {
this = TNodeNormal(result) or this = TNodeImplicitRead(result, true)
}
predicate isImplicitReadNode(Node n, boolean hasRead) { this = TNodeImplicitRead(n, hasRead) }
ParameterNode asParamReturnNode() { this = TParamReturnNode(result, _) }
@@ -241,6 +246,16 @@ module MakeImpl<LocationSig Location, InputSig<Location> Lang> {
ReturnKindExt getKind() { result = pos.getKind() }
}
/** If `node` corresponds to a sink, gets the normal node for that sink. */
pragma[nomagic]
private NodeEx toNormalSinkNodeEx(NodeEx node) {
exists(Node n |
node.asNodeOrImplicitRead() = n and
(Config::isSink(n) or Config::isSink(n, _)) and
result.asNode() = n
)
}
private predicate inBarrier(NodeEx node) {
exists(Node n |
node.asNode() = n and
@@ -260,7 +275,7 @@ module MakeImpl<LocationSig Location, InputSig<Location> Lang> {
private predicate outBarrier(NodeEx node) {
exists(Node n |
node.asNode() = n and
node.asNodeOrImplicitRead() = n and
Config::isBarrierOut(n)
|
Config::isSink(n, _)
@@ -272,7 +287,7 @@ module MakeImpl<LocationSig Location, InputSig<Location> Lang> {
pragma[nomagic]
private predicate outBarrier(NodeEx node, FlowState state) {
exists(Node n |
node.asNode() = n and
node.asNodeOrImplicitRead() = n and
Config::isBarrierOut(n, state)
|
Config::isSink(n, state)
@@ -318,7 +333,7 @@ module MakeImpl<LocationSig Location, InputSig<Location> Lang> {
pragma[nomagic]
private predicate sinkNodeWithState(NodeEx node, FlowState state) {
Config::isSink(node.asNode(), state) and
Config::isSink(node.asNodeOrImplicitRead(), state) and
not fullBarrier(node) and
not stateBarrier(node, state)
}
@@ -380,26 +395,19 @@ module MakeImpl<LocationSig Location, InputSig<Location> Lang> {
*/
private predicate additionalLocalFlowStep(NodeEx node1, NodeEx node2, string model) {
exists(Node n1, Node n2 |
node1.asNode() = n1 and
node1.asNodeOrImplicitRead() = n1 and
node2.asNode() = n2 and
Config::isAdditionalFlowStep(pragma[only_bind_into](n1), pragma[only_bind_into](n2), model) and
getNodeEnclosingCallable(n1) = getNodeEnclosingCallable(n2) and
stepFilter(node1, node2)
)
or
exists(Node n |
node1.isImplicitReadNode(n, true) and
node2.asNode() = n and
not fullBarrier(node2) and
model = ""
)
}
private predicate additionalLocalStateStep(
NodeEx node1, FlowState s1, NodeEx node2, FlowState s2
) {
exists(Node n1, Node n2 |
node1.asNode() = n1 and
node1.asNodeOrImplicitRead() = n1 and
node2.asNode() = n2 and
Config::isAdditionalFlowStep(pragma[only_bind_into](n1), s1, pragma[only_bind_into](n2), s2) and
getNodeEnclosingCallable(n1) = getNodeEnclosingCallable(n2) and
@@ -425,7 +433,7 @@ module MakeImpl<LocationSig Location, InputSig<Location> Lang> {
*/
private predicate additionalJumpStep(NodeEx node1, NodeEx node2, string model) {
exists(Node n1, Node n2 |
node1.asNode() = n1 and
node1.asNodeOrImplicitRead() = n1 and
node2.asNode() = n2 and
Config::isAdditionalFlowStep(pragma[only_bind_into](n1), pragma[only_bind_into](n2), model) and
getNodeEnclosingCallable(n1) != getNodeEnclosingCallable(n2) and
@@ -436,7 +444,7 @@ module MakeImpl<LocationSig Location, InputSig<Location> Lang> {
private predicate additionalJumpStateStep(NodeEx node1, FlowState s1, NodeEx node2, FlowState s2) {
exists(Node n1, Node n2 |
node1.asNode() = n1 and
node1.asNodeOrImplicitRead() = n1 and
node2.asNode() = n2 and
Config::isAdditionalFlowStep(pragma[only_bind_into](n1), s1, pragma[only_bind_into](n2), s2) and
getNodeEnclosingCallable(n1) != getNodeEnclosingCallable(n2) and
@@ -729,7 +737,7 @@ module MakeImpl<LocationSig Location, InputSig<Location> Lang> {
additional predicate sinkNode(NodeEx node, FlowState state) {
fwdFlow(node) and
fwdFlowState(state) and
Config::isSink(node.asNode())
Config::isSink(node.asNodeOrImplicitRead())
or
fwdFlow(node) and
fwdFlowState(state) and
@@ -1052,7 +1060,7 @@ module MakeImpl<LocationSig Location, InputSig<Location> Lang> {
private predicate sinkModel(NodeEx node, string model) {
sinkNode(node, _) and
exists(Node n | n = node.asNode() |
exists(Node n | n = node.asNodeOrImplicitRead() |
knownSinkModel(n, model)
or
not knownSinkModel(n, _) and model = ""
@@ -2549,7 +2557,7 @@ module MakeImpl<LocationSig Location, InputSig<Location> Lang> {
TPathNodeSink(NodeEx node, FlowState state) {
exists(PathNodeMid sink |
sink.isAtSink() and
node = sink.getNodeEx() and
node = toNormalSinkNodeEx(sink.getNodeEx()) and
state = sink.getState()
)
} or
@@ -2772,7 +2780,7 @@ module MakeImpl<LocationSig Location, InputSig<Location> Lang> {
PathNodeSink projectToSink(string model) {
this.isAtSink() and
sinkModel(node, model) and
result.getNodeEx() = node and
result.getNodeEx() = toNormalSinkNodeEx(node) and
result.getState() = state
}
}
@@ -4851,7 +4859,7 @@ module MakeImpl<LocationSig Location, InputSig<Location> Lang> {
private predicate revSinkNode(NodeEx node, FlowState state) {
sinkNodeWithState(node, state)
or
Config::isSink(node.asNode()) and
Config::isSink(node.asNodeOrImplicitRead()) and
relevantState(state) and
not fullBarrier(node) and
not stateBarrier(node, state)

View File

@@ -9,18 +9,13 @@ edges
| conversions.swift:37:19:37:29 | call to sourceInt() | conversions.swift:37:12:37:30 | call to String.init(_:) | provenance | |
| conversions.swift:39:12:39:30 | [...] [Collection element] | conversions.swift:40:12:40:12 | arr | provenance | |
| conversions.swift:39:12:39:30 | [...] [Collection element] | conversions.swift:41:12:41:12 | arr [Collection element] | provenance | |
| conversions.swift:39:12:39:30 | [...] [Collection element] | conversions.swift:41:12:41:17 | ...[...] | provenance | |
| conversions.swift:39:12:39:30 | [...] [Collection element] | conversions.swift:42:20:42:20 | arr | provenance | |
| conversions.swift:39:12:39:30 | [...] [Collection element] | conversions.swift:42:20:42:20 | arr [Collection element] | provenance | |
| conversions.swift:39:12:39:30 | [...] [Collection element] | conversions.swift:43:20:43:20 | arr | provenance | |
| conversions.swift:39:12:39:30 | [...] [Collection element] | conversions.swift:43:20:43:20 | arr [Collection element] | provenance | |
| conversions.swift:39:19:39:29 | call to sourceInt() | conversions.swift:39:12:39:30 | [...] [Collection element] | provenance | |
| conversions.swift:41:12:41:12 | arr [Collection element] | conversions.swift:41:12:41:17 | ...[...] | provenance | |
| conversions.swift:42:12:42:23 | call to Array<Element>.init(_:) [Collection element] | conversions.swift:42:12:42:23 | call to Array<Element>.init(_:) | provenance | |
| conversions.swift:42:20:42:20 | arr | conversions.swift:42:12:42:23 | call to Array<Element>.init(_:) [Collection element] | provenance | |
| conversions.swift:42:20:42:20 | arr [Collection element] | conversions.swift:42:12:42:23 | call to Array<Element>.init(_:) [Collection element] | provenance | |
| conversions.swift:43:12:43:23 | call to Array<Element>.init(_:) [Collection element] | conversions.swift:43:12:43:26 | ...[...] | provenance | |
| conversions.swift:43:20:43:20 | arr | conversions.swift:43:12:43:23 | call to Array<Element>.init(_:) [Collection element] | provenance | |
| conversions.swift:43:20:43:20 | arr [Collection element] | conversions.swift:43:12:43:23 | call to Array<Element>.init(_:) [Collection element] | provenance | |
| conversions.swift:44:12:44:39 | call to Array<Element>.init(_:) [Collection element] | conversions.swift:44:12:44:39 | call to Array<Element>.init(_:) | provenance | |
| conversions.swift:44:20:44:33 | call to sourceString() | conversions.swift:44:20:44:35 | .utf8 | provenance | |
@@ -106,32 +101,23 @@ edges
| conversions.swift:171:14:171:33 | call to sourceArray(_:) | conversions.swift:185:31:185:31 | arr1 | provenance | |
| conversions.swift:172:14:172:26 | [...] [Collection element] | conversions.swift:174:13:174:13 | arr2 | provenance | |
| conversions.swift:172:14:172:26 | [...] [Collection element] | conversions.swift:176:13:176:13 | arr2 [Collection element] | provenance | |
| conversions.swift:172:14:172:26 | [...] [Collection element] | conversions.swift:176:13:176:19 | ...[...] | provenance | |
| conversions.swift:172:14:172:26 | [...] [Collection element] | conversions.swift:179:25:179:25 | arr2 | provenance | |
| conversions.swift:172:14:172:26 | [...] [Collection element] | conversions.swift:179:25:179:25 | arr2 [Collection element] | provenance | |
| conversions.swift:172:14:172:26 | [...] [Collection element] | conversions.swift:186:31:186:31 | arr2 | provenance | |
| conversions.swift:172:14:172:26 | [...] [Collection element] | conversions.swift:186:31:186:31 | arr2 [Collection element] | provenance | |
| conversions.swift:172:15:172:25 | call to sourceInt() | conversions.swift:172:14:172:26 | [...] [Collection element] | provenance | |
| conversions.swift:176:13:176:13 | arr2 [Collection element] | conversions.swift:176:13:176:19 | ...[...] | provenance | |
| conversions.swift:178:19:178:29 | call to Array<Element>.init(_:) [Collection element] | conversions.swift:180:13:180:13 | arr1b | provenance | |
| conversions.swift:178:19:178:29 | call to Array<Element>.init(_:) [Collection element] | conversions.swift:182:13:182:13 | arr1b [Collection element] | provenance | |
| conversions.swift:178:19:178:29 | call to Array<Element>.init(_:) [Collection element] | conversions.swift:182:13:182:20 | ...[...] | provenance | |
| conversions.swift:178:25:178:25 | arr1 | conversions.swift:178:19:178:29 | call to Array<Element>.init(_:) [Collection element] | provenance | |
| conversions.swift:179:19:179:29 | call to Array<Element>.init(_:) [Collection element] | conversions.swift:181:13:181:13 | arr2b | provenance | |
| conversions.swift:179:19:179:29 | call to Array<Element>.init(_:) [Collection element] | conversions.swift:183:13:183:13 | arr2b [Collection element] | provenance | |
| conversions.swift:179:19:179:29 | call to Array<Element>.init(_:) [Collection element] | conversions.swift:183:13:183:20 | ...[...] | provenance | |
| conversions.swift:179:25:179:25 | arr2 | conversions.swift:179:19:179:29 | call to Array<Element>.init(_:) [Collection element] | provenance | |
| conversions.swift:179:25:179:25 | arr2 [Collection element] | conversions.swift:179:19:179:29 | call to Array<Element>.init(_:) [Collection element] | provenance | |
| conversions.swift:182:13:182:13 | arr1b [Collection element] | conversions.swift:182:13:182:20 | ...[...] | provenance | |
| conversions.swift:183:13:183:13 | arr2b [Collection element] | conversions.swift:183:13:183:20 | ...[...] | provenance | |
| conversions.swift:185:15:185:35 | call to ContiguousArray<Element>.init(_:) [Collection element] | conversions.swift:187:13:187:13 | arr1c | provenance | |
| conversions.swift:185:15:185:35 | call to ContiguousArray<Element>.init(_:) [Collection element] | conversions.swift:189:13:189:13 | arr1c [Collection element] | provenance | |
| conversions.swift:185:15:185:35 | call to ContiguousArray<Element>.init(_:) [Collection element] | conversions.swift:189:13:189:20 | ...[...] | provenance | |
| conversions.swift:185:31:185:31 | arr1 | conversions.swift:185:15:185:35 | call to ContiguousArray<Element>.init(_:) [Collection element] | provenance | |
| conversions.swift:186:15:186:35 | call to ContiguousArray<Element>.init(_:) [Collection element] | conversions.swift:188:13:188:13 | arr2c | provenance | |
| conversions.swift:186:15:186:35 | call to ContiguousArray<Element>.init(_:) [Collection element] | conversions.swift:190:13:190:13 | arr2c [Collection element] | provenance | |
| conversions.swift:186:15:186:35 | call to ContiguousArray<Element>.init(_:) [Collection element] | conversions.swift:190:13:190:20 | ...[...] | provenance | |
| conversions.swift:186:31:186:31 | arr2 | conversions.swift:186:15:186:35 | call to ContiguousArray<Element>.init(_:) [Collection element] | provenance | |
| conversions.swift:186:31:186:31 | arr2 [Collection element] | conversions.swift:186:15:186:35 | call to ContiguousArray<Element>.init(_:) [Collection element] | provenance | |
| conversions.swift:189:13:189:13 | arr1c [Collection element] | conversions.swift:189:13:189:20 | ...[...] | provenance | |
| conversions.swift:190:13:190:13 | arr2c [Collection element] | conversions.swift:190:13:190:20 | ...[...] | provenance | |
@@ -277,11 +263,9 @@ nodes
| conversions.swift:41:12:41:17 | ...[...] | semmle.label | ...[...] |
| conversions.swift:42:12:42:23 | call to Array<Element>.init(_:) | semmle.label | call to Array<Element>.init(_:) |
| conversions.swift:42:12:42:23 | call to Array<Element>.init(_:) [Collection element] | semmle.label | call to Array<Element>.init(_:) [Collection element] |
| conversions.swift:42:20:42:20 | arr | semmle.label | arr |
| conversions.swift:42:20:42:20 | arr [Collection element] | semmle.label | arr [Collection element] |
| conversions.swift:43:12:43:23 | call to Array<Element>.init(_:) [Collection element] | semmle.label | call to Array<Element>.init(_:) [Collection element] |
| conversions.swift:43:12:43:26 | ...[...] | semmle.label | ...[...] |
| conversions.swift:43:20:43:20 | arr | semmle.label | arr |
| conversions.swift:43:20:43:20 | arr [Collection element] | semmle.label | arr [Collection element] |
| conversions.swift:44:12:44:39 | call to Array<Element>.init(_:) | semmle.label | call to Array<Element>.init(_:) |
| conversions.swift:44:12:44:39 | call to Array<Element>.init(_:) [Collection element] | semmle.label | call to Array<Element>.init(_:) [Collection element] |
@@ -409,7 +393,6 @@ nodes
| conversions.swift:178:19:178:29 | call to Array<Element>.init(_:) [Collection element] | semmle.label | call to Array<Element>.init(_:) [Collection element] |
| conversions.swift:178:25:178:25 | arr1 | semmle.label | arr1 |
| conversions.swift:179:19:179:29 | call to Array<Element>.init(_:) [Collection element] | semmle.label | call to Array<Element>.init(_:) [Collection element] |
| conversions.swift:179:25:179:25 | arr2 | semmle.label | arr2 |
| conversions.swift:179:25:179:25 | arr2 [Collection element] | semmle.label | arr2 [Collection element] |
| conversions.swift:180:13:180:13 | arr1b | semmle.label | arr1b |
| conversions.swift:181:13:181:13 | arr2b | semmle.label | arr2b |
@@ -420,7 +403,6 @@ nodes
| conversions.swift:185:15:185:35 | call to ContiguousArray<Element>.init(_:) [Collection element] | semmle.label | call to ContiguousArray<Element>.init(_:) [Collection element] |
| conversions.swift:185:31:185:31 | arr1 | semmle.label | arr1 |
| conversions.swift:186:15:186:35 | call to ContiguousArray<Element>.init(_:) [Collection element] | semmle.label | call to ContiguousArray<Element>.init(_:) [Collection element] |
| conversions.swift:186:31:186:31 | arr2 | semmle.label | arr2 |
| conversions.swift:186:31:186:31 | arr2 [Collection element] | semmle.label | arr2 [Collection element] |
| conversions.swift:187:13:187:13 | arr1c | semmle.label | arr1c |
| conversions.swift:188:13:188:13 | arr2c | semmle.label | arr2c |

View File

@@ -28,7 +28,7 @@ func testSet(ix: Int) {
sink(arg: taintedSet.max()!) // $ tainted=t1
sink(arg: taintedSet.firstIndex(of: source("t2"))!)
sink(arg: taintedSet[taintedSet.firstIndex(of: source("t3"))!]) // $ tainted=t1
sink(arg: taintedSet.first!) // $ tainted=t1
sink(arg: taintedSet.first!) // $ MISSING: tainted=t1
for elem in taintedSet {
sink(arg: elem) // $ tainted=t1
}
@@ -100,7 +100,7 @@ func testSet(ix: Int) {
sink(arg: taintedSet.sorted().randomElement()!) // $ tainted=t1
sink(arg: taintedSet.shuffled().randomElement()!) // $ tainted=t1
sink(arg: taintedSet.lazy[taintedSet.firstIndex(of: source("t11"))!]) // $ tainted=t1
sink(arg: taintedSet.lazy[taintedSet.firstIndex(of: source("t11"))!]) // $ MISSING: tainted=t1
var it = taintedSet.makeIterator()
sink(arg: it.next()!) // $ tainted=t1

View File

@@ -41,16 +41,14 @@ edges
| UnsafeJsEval.swift:286:51:286:51 | stringBytes [Collection element] | UnsafeJsEval.swift:287:60:287:60 | stringBytes [Collection element] | provenance | |
| UnsafeJsEval.swift:287:16:287:98 | call to JSStringRetain(_:) | UnsafeJsEval.swift:291:17:291:17 | jsstr | provenance | |
| UnsafeJsEval.swift:287:31:287:97 | call to JSStringCreateWithCharacters(_:_:) | UnsafeJsEval.swift:287:16:287:98 | call to JSStringRetain(_:) | provenance | |
| UnsafeJsEval.swift:287:60:287:60 | stringBytes | UnsafeJsEval.swift:287:60:287:72 | .baseAddress | provenance | Config |
| UnsafeJsEval.swift:287:60:287:60 | stringBytes [Collection element] | UnsafeJsEval.swift:287:60:287:60 | stringBytes | provenance | |
| UnsafeJsEval.swift:287:60:287:60 | stringBytes [Collection element] | UnsafeJsEval.swift:287:60:287:72 | .baseAddress | provenance | Config |
| UnsafeJsEval.swift:287:60:287:72 | .baseAddress | UnsafeJsEval.swift:287:31:287:97 | call to JSStringCreateWithCharacters(_:_:) | provenance | |
| UnsafeJsEval.swift:299:13:299:13 | string | UnsafeJsEval.swift:300:3:300:10 | .utf8CString | provenance | |
| UnsafeJsEval.swift:300:3:300:10 | .utf8CString | UnsafeJsEval.swift:300:48:300:48 | stringBytes [Collection element] | provenance | |
| UnsafeJsEval.swift:300:48:300:48 | stringBytes [Collection element] | UnsafeJsEval.swift:301:61:301:61 | stringBytes [Collection element] | provenance | |
| UnsafeJsEval.swift:301:16:301:85 | call to JSStringRetain(_:) | UnsafeJsEval.swift:305:17:305:17 | jsstr | provenance | |
| UnsafeJsEval.swift:301:31:301:84 | call to JSStringCreateWithUTF8CString(_:) | UnsafeJsEval.swift:301:16:301:85 | call to JSStringRetain(_:) | provenance | |
| UnsafeJsEval.swift:301:61:301:61 | stringBytes | UnsafeJsEval.swift:301:61:301:73 | .baseAddress | provenance | Config |
| UnsafeJsEval.swift:301:61:301:61 | stringBytes [Collection element] | UnsafeJsEval.swift:301:61:301:61 | stringBytes | provenance | |
| UnsafeJsEval.swift:301:61:301:61 | stringBytes [Collection element] | UnsafeJsEval.swift:301:61:301:73 | .baseAddress | provenance | Config |
| UnsafeJsEval.swift:301:61:301:73 | .baseAddress | UnsafeJsEval.swift:301:31:301:84 | call to JSStringCreateWithUTF8CString(_:) | provenance | |
| UnsafeJsEval.swift:318:24:318:87 | call to String.init(contentsOf:) | UnsafeJsEval.swift:320:44:320:74 | ... .+(_:_:) ... | provenance | |
nodes
@@ -80,7 +78,6 @@ nodes
| UnsafeJsEval.swift:286:51:286:51 | stringBytes [Collection element] | semmle.label | stringBytes [Collection element] |
| UnsafeJsEval.swift:287:16:287:98 | call to JSStringRetain(_:) | semmle.label | call to JSStringRetain(_:) |
| UnsafeJsEval.swift:287:31:287:97 | call to JSStringCreateWithCharacters(_:_:) | semmle.label | call to JSStringCreateWithCharacters(_:_:) |
| UnsafeJsEval.swift:287:60:287:60 | stringBytes | semmle.label | stringBytes |
| UnsafeJsEval.swift:287:60:287:60 | stringBytes [Collection element] | semmle.label | stringBytes [Collection element] |
| UnsafeJsEval.swift:287:60:287:72 | .baseAddress | semmle.label | .baseAddress |
| UnsafeJsEval.swift:291:17:291:17 | jsstr | semmle.label | jsstr |
@@ -89,7 +86,6 @@ nodes
| UnsafeJsEval.swift:300:48:300:48 | stringBytes [Collection element] | semmle.label | stringBytes [Collection element] |
| UnsafeJsEval.swift:301:16:301:85 | call to JSStringRetain(_:) | semmle.label | call to JSStringRetain(_:) |
| UnsafeJsEval.swift:301:31:301:84 | call to JSStringCreateWithUTF8CString(_:) | semmle.label | call to JSStringCreateWithUTF8CString(_:) |
| UnsafeJsEval.swift:301:61:301:61 | stringBytes | semmle.label | stringBytes |
| UnsafeJsEval.swift:301:61:301:61 | stringBytes [Collection element] | semmle.label | stringBytes [Collection element] |
| UnsafeJsEval.swift:301:61:301:73 | .baseAddress | semmle.label | .baseAddress |
| UnsafeJsEval.swift:305:17:305:17 | jsstr | semmle.label | jsstr |

View File

@@ -26,10 +26,7 @@ edges
| cryptoswift.swift:94:18:94:36 | call to getConstantString() | cryptoswift.swift:155:26:155:26 | keyString | provenance | |
| cryptoswift.swift:94:18:94:36 | call to getConstantString() | cryptoswift.swift:164:24:164:24 | keyString | provenance | |
| cryptoswift.swift:94:18:94:36 | call to getConstantString() | cryptoswift.swift:166:24:166:24 | keyString | provenance | |
| file://:0:0:0:0 | [post] self | misc.swift:30:7:30:7 | self [Return] | provenance | |
| file://:0:0:0:0 | [post] self [encryptionKey] | file://:0:0:0:0 | [post] self | provenance | |
| file://:0:0:0:0 | [post] self [encryptionKey] | file://:0:0:0:0 | [post] self | provenance | |
| file://:0:0:0:0 | [post] self [encryptionKey] | misc.swift:30:7:30:7 | self [Return] | provenance | |
| file://:0:0:0:0 | [post] self [encryptionKey] | misc.swift:30:7:30:7 | self [Return] [encryptionKey] | provenance | |
| file://:0:0:0:0 | value | file://:0:0:0:0 | [post] self [encryptionKey] | provenance | |
| grdb.swift:21:20:21:20 | abc123 | grdb.swift:27:23:27:23 | constString | provenance | |
@@ -45,11 +42,9 @@ edges
| misc.swift:57:24:57:24 | abcdef123456 | misc.swift:57:19:57:38 | call to Data.init(_:) | provenance | |
| misc.swift:66:2:66:2 | [post] config [encryptionKey] | misc.swift:66:2:66:2 | [post] config | provenance | |
| misc.swift:66:25:66:25 | myConstKey | misc.swift:30:7:30:7 | value | provenance | |
| misc.swift:66:25:66:25 | myConstKey | misc.swift:66:2:66:2 | [post] config | provenance | |
| misc.swift:66:25:66:25 | myConstKey | misc.swift:66:2:66:2 | [post] config [encryptionKey] | provenance | |
| misc.swift:70:2:70:18 | [post] getter for .config [encryptionKey] | misc.swift:70:2:70:18 | [post] getter for .config | provenance | |
| misc.swift:70:41:70:41 | myConstKey | misc.swift:30:7:30:7 | value | provenance | |
| misc.swift:70:41:70:41 | myConstKey | misc.swift:70:2:70:18 | [post] getter for .config | provenance | |
| misc.swift:70:41:70:41 | myConstKey | misc.swift:70:2:70:18 | [post] getter for .config [encryptionKey] | provenance | |
| misc.swift:73:14:73:20 | k1 | misc.swift:76:26:76:29 | .utf8 | provenance | |
| misc.swift:73:28:73:34 | k2 | misc.swift:77:26:77:29 | .utf8 | provenance | |
@@ -117,7 +112,6 @@ nodes
| cryptoswift.swift:165:24:165:24 | key | semmle.label | key |
| cryptoswift.swift:166:24:166:24 | keyString | semmle.label | keyString |
| file://:0:0:0:0 | [post] self | semmle.label | [post] self |
| file://:0:0:0:0 | [post] self | semmle.label | [post] self |
| file://:0:0:0:0 | [post] self [encryptionKey] | semmle.label | [post] self [encryptionKey] |
| file://:0:0:0:0 | value | semmle.label | value |
| grdb.swift:21:20:21:20 | abc123 | semmle.label | abc123 |
@@ -128,7 +122,6 @@ nodes
| grdb.swift:29:23:29:23 | constData | semmle.label | constData |
| grdb.swift:31:26:31:26 | constString | semmle.label | constString |
| grdb.swift:33:26:33:26 | constData | semmle.label | constData |
| misc.swift:30:7:30:7 | self [Return] | semmle.label | self [Return] |
| misc.swift:30:7:30:7 | self [Return] [encryptionKey] | semmle.label | self [Return] [encryptionKey] |
| misc.swift:30:7:30:7 | value | semmle.label | value |
| misc.swift:57:19:57:38 | call to Data.init(_:) | semmle.label | call to Data.init(_:) |
@@ -175,9 +168,7 @@ nodes
| sqlite3_c_api.swift:49:36:49:36 | buffer | semmle.label | buffer |
| sqlite3_c_api.swift:50:38:50:38 | buffer | semmle.label | buffer |
subpaths
| misc.swift:66:25:66:25 | myConstKey | misc.swift:30:7:30:7 | value | misc.swift:30:7:30:7 | self [Return] | misc.swift:66:2:66:2 | [post] config |
| misc.swift:66:25:66:25 | myConstKey | misc.swift:30:7:30:7 | value | misc.swift:30:7:30:7 | self [Return] [encryptionKey] | misc.swift:66:2:66:2 | [post] config [encryptionKey] |
| misc.swift:70:41:70:41 | myConstKey | misc.swift:30:7:30:7 | value | misc.swift:30:7:30:7 | self [Return] | misc.swift:70:2:70:18 | [post] getter for .config |
| misc.swift:70:41:70:41 | myConstKey | misc.swift:30:7:30:7 | value | misc.swift:30:7:30:7 | self [Return] [encryptionKey] | misc.swift:70:2:70:18 | [post] getter for .config [encryptionKey] |
#select
| SQLite.swift:43:13:43:13 | hardcoded_key | SQLite.swift:43:13:43:13 | hardcoded_key | SQLite.swift:43:13:43:13 | hardcoded_key | The key 'hardcoded_key' has been initialized with hard-coded values from $@. | SQLite.swift:43:13:43:13 | hardcoded_key | hardcoded_key |

View File

@@ -5,33 +5,26 @@ edges
| InsecureTLS.swift:23:7:23:7 | value | file://:0:0:0:0 | value | provenance | |
| InsecureTLS.swift:40:3:40:3 | [post] config [tlsMinimumSupportedProtocolVersion] | InsecureTLS.swift:40:3:40:3 | [post] config | provenance | |
| InsecureTLS.swift:40:47:40:70 | .TLSv10 | InsecureTLS.swift:19:7:19:7 | value | provenance | |
| InsecureTLS.swift:40:47:40:70 | .TLSv10 | InsecureTLS.swift:40:3:40:3 | [post] config | provenance | |
| InsecureTLS.swift:40:47:40:70 | .TLSv10 | InsecureTLS.swift:40:3:40:3 | [post] config [tlsMinimumSupportedProtocolVersion] | provenance | |
| InsecureTLS.swift:45:3:45:3 | [post] config [tlsMinimumSupportedProtocolVersion] | InsecureTLS.swift:45:3:45:3 | [post] config | provenance | |
| InsecureTLS.swift:45:47:45:70 | .TLSv11 | InsecureTLS.swift:19:7:19:7 | value | provenance | |
| InsecureTLS.swift:45:47:45:70 | .TLSv11 | InsecureTLS.swift:45:3:45:3 | [post] config | provenance | |
| InsecureTLS.swift:45:47:45:70 | .TLSv11 | InsecureTLS.swift:45:3:45:3 | [post] config [tlsMinimumSupportedProtocolVersion] | provenance | |
| InsecureTLS.swift:57:3:57:3 | [post] config [tlsMaximumSupportedProtocolVersion] | InsecureTLS.swift:57:3:57:3 | [post] config | provenance | |
| InsecureTLS.swift:57:47:57:70 | .TLSv10 | InsecureTLS.swift:20:7:20:7 | value | provenance | |
| InsecureTLS.swift:57:47:57:70 | .TLSv10 | InsecureTLS.swift:57:3:57:3 | [post] config | provenance | |
| InsecureTLS.swift:57:47:57:70 | .TLSv10 | InsecureTLS.swift:57:3:57:3 | [post] config [tlsMaximumSupportedProtocolVersion] | provenance | |
| InsecureTLS.swift:64:3:64:3 | [post] config [tlsMinimumSupportedProtocol] | InsecureTLS.swift:64:3:64:3 | [post] config | provenance | |
| InsecureTLS.swift:64:40:64:52 | .tlsProtocol10 | InsecureTLS.swift:22:7:22:7 | value | provenance | |
| InsecureTLS.swift:64:40:64:52 | .tlsProtocol10 | InsecureTLS.swift:64:3:64:3 | [post] config | provenance | |
| InsecureTLS.swift:64:40:64:52 | .tlsProtocol10 | InsecureTLS.swift:64:3:64:3 | [post] config [tlsMinimumSupportedProtocol] | provenance | |
| InsecureTLS.swift:76:3:76:3 | [post] config [tlsMaximumSupportedProtocol] | InsecureTLS.swift:76:3:76:3 | [post] config | provenance | |
| InsecureTLS.swift:76:40:76:52 | .tlsProtocol10 | InsecureTLS.swift:23:7:23:7 | value | provenance | |
| InsecureTLS.swift:76:40:76:52 | .tlsProtocol10 | InsecureTLS.swift:76:3:76:3 | [post] config | provenance | |
| InsecureTLS.swift:76:40:76:52 | .tlsProtocol10 | InsecureTLS.swift:76:3:76:3 | [post] config [tlsMaximumSupportedProtocol] | provenance | |
| InsecureTLS.swift:102:10:102:33 | .TLSv10 | InsecureTLS.swift:111:47:111:64 | call to getBadTLSVersion() | provenance | |
| InsecureTLS.swift:111:3:111:3 | [post] config [tlsMinimumSupportedProtocolVersion] | InsecureTLS.swift:111:3:111:3 | [post] config | provenance | |
| InsecureTLS.swift:111:47:111:64 | call to getBadTLSVersion() | InsecureTLS.swift:19:7:19:7 | value | provenance | |
| InsecureTLS.swift:111:47:111:64 | call to getBadTLSVersion() | InsecureTLS.swift:111:3:111:3 | [post] config | provenance | |
| InsecureTLS.swift:111:47:111:64 | call to getBadTLSVersion() | InsecureTLS.swift:111:3:111:3 | [post] config [tlsMinimumSupportedProtocolVersion] | provenance | |
| InsecureTLS.swift:121:55:121:66 | version | InsecureTLS.swift:122:47:122:47 | version | provenance | |
| InsecureTLS.swift:122:3:122:3 | [post] config [tlsMinimumSupportedProtocolVersion] | InsecureTLS.swift:122:3:122:3 | [post] config | provenance | |
| InsecureTLS.swift:122:47:122:47 | version | InsecureTLS.swift:19:7:19:7 | value | provenance | |
| InsecureTLS.swift:122:47:122:47 | version | InsecureTLS.swift:122:3:122:3 | [post] config | provenance | |
| InsecureTLS.swift:122:47:122:47 | version | InsecureTLS.swift:122:3:122:3 | [post] config [tlsMinimumSupportedProtocolVersion] | provenance | |
| InsecureTLS.swift:127:25:127:48 | .TLSv11 | InsecureTLS.swift:121:55:121:66 | version | provenance | |
| InsecureTLS.swift:158:7:158:7 | self [TLSVersion] | file://:0:0:0:0 | self [TLSVersion] | provenance | |
@@ -43,11 +36,9 @@ edges
| InsecureTLS.swift:165:47:165:47 | def [TLSVersion] | InsecureTLS.swift:158:7:158:7 | self [TLSVersion] | provenance | |
| InsecureTLS.swift:165:47:165:47 | def [TLSVersion] | InsecureTLS.swift:165:47:165:51 | .TLSVersion | provenance | |
| InsecureTLS.swift:165:47:165:51 | .TLSVersion | InsecureTLS.swift:19:7:19:7 | value | provenance | |
| InsecureTLS.swift:165:47:165:51 | .TLSVersion | InsecureTLS.swift:165:3:165:3 | [post] config | provenance | |
| InsecureTLS.swift:165:47:165:51 | .TLSVersion | InsecureTLS.swift:165:3:165:3 | [post] config [tlsMinimumSupportedProtocolVersion] | provenance | |
| InsecureTLS.swift:181:3:181:9 | [post] getter for .config [tlsMinimumSupportedProtocolVersion] | InsecureTLS.swift:181:3:181:9 | [post] getter for .config | provenance | |
| InsecureTLS.swift:181:53:181:76 | .TLSv10 | InsecureTLS.swift:19:7:19:7 | value | provenance | |
| InsecureTLS.swift:181:53:181:76 | .TLSv10 | InsecureTLS.swift:181:3:181:9 | [post] getter for .config | provenance | |
| InsecureTLS.swift:181:53:181:76 | .TLSv10 | InsecureTLS.swift:181:3:181:9 | [post] getter for .config [tlsMinimumSupportedProtocolVersion] | provenance | |
| InsecureTLS.swift:185:20:185:36 | withMinVersion | InsecureTLS.swift:187:42:187:42 | withMinVersion | provenance | |
| InsecureTLS.swift:187:5:187:5 | [post] self [tlsMinimumSupportedProtocolVersion] | InsecureTLS.swift:187:5:187:5 | [post] self | provenance | |
@@ -58,27 +49,15 @@ edges
| InsecureTLS.swift:202:24:202:31 | [post] getter for .tlsMinimumSupportedProtocolVersion | InsecureTLS.swift:202:24:202:24 | [post] config [tlsMinimumSupportedProtocolVersion] | provenance | |
| InsecureTLS.swift:202:74:202:97 | .TLSv10 | InsecureTLS.swift:196:56:196:63 | value | provenance | |
| InsecureTLS.swift:202:74:202:97 | .TLSv10 | InsecureTLS.swift:202:24:202:31 | [post] getter for .tlsMinimumSupportedProtocolVersion | provenance | |
| file://:0:0:0:0 | [post] self | InsecureTLS.swift:19:7:19:7 | self [Return] | provenance | |
| file://:0:0:0:0 | [post] self | InsecureTLS.swift:20:7:20:7 | self [Return] | provenance | |
| file://:0:0:0:0 | [post] self | InsecureTLS.swift:22:7:22:7 | self [Return] | provenance | |
| file://:0:0:0:0 | [post] self | InsecureTLS.swift:23:7:23:7 | self [Return] | provenance | |
| file://:0:0:0:0 | [post] self [TLSVersion] | InsecureTLS.swift:158:7:158:7 | self [Return] [TLSVersion] | provenance | |
| file://:0:0:0:0 | [post] self [tlsMaximumSupportedProtocolVersion] | InsecureTLS.swift:20:7:20:7 | self [Return] | provenance | |
| file://:0:0:0:0 | [post] self [tlsMaximumSupportedProtocolVersion] | InsecureTLS.swift:20:7:20:7 | self [Return] [tlsMaximumSupportedProtocolVersion] | provenance | |
| file://:0:0:0:0 | [post] self [tlsMaximumSupportedProtocolVersion] | file://:0:0:0:0 | [post] self | provenance | |
| file://:0:0:0:0 | [post] self [tlsMaximumSupportedProtocolVersion] | file://:0:0:0:0 | [post] self | provenance | |
| file://:0:0:0:0 | [post] self [tlsMaximumSupportedProtocol] | InsecureTLS.swift:23:7:23:7 | self [Return] | provenance | |
| file://:0:0:0:0 | [post] self [tlsMaximumSupportedProtocol] | InsecureTLS.swift:23:7:23:7 | self [Return] [tlsMaximumSupportedProtocol] | provenance | |
| file://:0:0:0:0 | [post] self [tlsMaximumSupportedProtocol] | file://:0:0:0:0 | [post] self | provenance | |
| file://:0:0:0:0 | [post] self [tlsMaximumSupportedProtocol] | file://:0:0:0:0 | [post] self | provenance | |
| file://:0:0:0:0 | [post] self [tlsMinimumSupportedProtocolVersion] | InsecureTLS.swift:19:7:19:7 | self [Return] | provenance | |
| file://:0:0:0:0 | [post] self [tlsMinimumSupportedProtocolVersion] | InsecureTLS.swift:19:7:19:7 | self [Return] [tlsMinimumSupportedProtocolVersion] | provenance | |
| file://:0:0:0:0 | [post] self [tlsMinimumSupportedProtocolVersion] | file://:0:0:0:0 | [post] self | provenance | |
| file://:0:0:0:0 | [post] self [tlsMinimumSupportedProtocolVersion] | file://:0:0:0:0 | [post] self | provenance | |
| file://:0:0:0:0 | [post] self [tlsMinimumSupportedProtocol] | InsecureTLS.swift:22:7:22:7 | self [Return] | provenance | |
| file://:0:0:0:0 | [post] self [tlsMinimumSupportedProtocol] | InsecureTLS.swift:22:7:22:7 | self [Return] [tlsMinimumSupportedProtocol] | provenance | |
| file://:0:0:0:0 | [post] self [tlsMinimumSupportedProtocol] | file://:0:0:0:0 | [post] self | provenance | |
| file://:0:0:0:0 | [post] self [tlsMinimumSupportedProtocol] | file://:0:0:0:0 | [post] self | provenance | |
| file://:0:0:0:0 | self [TLSVersion] | file://:0:0:0:0 | .TLSVersion | provenance | |
| file://:0:0:0:0 | value | file://:0:0:0:0 | [post] self [TLSVersion] | provenance | |
| file://:0:0:0:0 | value | file://:0:0:0:0 | [post] self [tlsMaximumSupportedProtocolVersion] | provenance | |
@@ -86,16 +65,12 @@ edges
| file://:0:0:0:0 | value | file://:0:0:0:0 | [post] self [tlsMinimumSupportedProtocolVersion] | provenance | |
| file://:0:0:0:0 | value | file://:0:0:0:0 | [post] self [tlsMinimumSupportedProtocol] | provenance | |
nodes
| InsecureTLS.swift:19:7:19:7 | self [Return] | semmle.label | self [Return] |
| InsecureTLS.swift:19:7:19:7 | self [Return] [tlsMinimumSupportedProtocolVersion] | semmle.label | self [Return] [tlsMinimumSupportedProtocolVersion] |
| InsecureTLS.swift:19:7:19:7 | value | semmle.label | value |
| InsecureTLS.swift:20:7:20:7 | self [Return] | semmle.label | self [Return] |
| InsecureTLS.swift:20:7:20:7 | self [Return] [tlsMaximumSupportedProtocolVersion] | semmle.label | self [Return] [tlsMaximumSupportedProtocolVersion] |
| InsecureTLS.swift:20:7:20:7 | value | semmle.label | value |
| InsecureTLS.swift:22:7:22:7 | self [Return] | semmle.label | self [Return] |
| InsecureTLS.swift:22:7:22:7 | self [Return] [tlsMinimumSupportedProtocol] | semmle.label | self [Return] [tlsMinimumSupportedProtocol] |
| InsecureTLS.swift:22:7:22:7 | value | semmle.label | value |
| InsecureTLS.swift:23:7:23:7 | self [Return] | semmle.label | self [Return] |
| InsecureTLS.swift:23:7:23:7 | self [Return] [tlsMaximumSupportedProtocol] | semmle.label | self [Return] [tlsMaximumSupportedProtocol] |
| InsecureTLS.swift:23:7:23:7 | value | semmle.label | value |
| InsecureTLS.swift:40:3:40:3 | [post] config | semmle.label | [post] config |
@@ -150,10 +125,6 @@ nodes
| file://:0:0:0:0 | [post] self | semmle.label | [post] self |
| file://:0:0:0:0 | [post] self | semmle.label | [post] self |
| file://:0:0:0:0 | [post] self | semmle.label | [post] self |
| file://:0:0:0:0 | [post] self | semmle.label | [post] self |
| file://:0:0:0:0 | [post] self | semmle.label | [post] self |
| file://:0:0:0:0 | [post] self | semmle.label | [post] self |
| file://:0:0:0:0 | [post] self | semmle.label | [post] self |
| file://:0:0:0:0 | [post] self [TLSVersion] | semmle.label | [post] self [TLSVersion] |
| file://:0:0:0:0 | [post] self [tlsMaximumSupportedProtocolVersion] | semmle.label | [post] self [tlsMaximumSupportedProtocolVersion] |
| file://:0:0:0:0 | [post] self [tlsMaximumSupportedProtocol] | semmle.label | [post] self [tlsMaximumSupportedProtocol] |
@@ -166,25 +137,16 @@ nodes
| file://:0:0:0:0 | value | semmle.label | value |
| file://:0:0:0:0 | value | semmle.label | value |
subpaths
| InsecureTLS.swift:40:47:40:70 | .TLSv10 | InsecureTLS.swift:19:7:19:7 | value | InsecureTLS.swift:19:7:19:7 | self [Return] | InsecureTLS.swift:40:3:40:3 | [post] config |
| InsecureTLS.swift:40:47:40:70 | .TLSv10 | InsecureTLS.swift:19:7:19:7 | value | InsecureTLS.swift:19:7:19:7 | self [Return] [tlsMinimumSupportedProtocolVersion] | InsecureTLS.swift:40:3:40:3 | [post] config [tlsMinimumSupportedProtocolVersion] |
| InsecureTLS.swift:45:47:45:70 | .TLSv11 | InsecureTLS.swift:19:7:19:7 | value | InsecureTLS.swift:19:7:19:7 | self [Return] | InsecureTLS.swift:45:3:45:3 | [post] config |
| InsecureTLS.swift:45:47:45:70 | .TLSv11 | InsecureTLS.swift:19:7:19:7 | value | InsecureTLS.swift:19:7:19:7 | self [Return] [tlsMinimumSupportedProtocolVersion] | InsecureTLS.swift:45:3:45:3 | [post] config [tlsMinimumSupportedProtocolVersion] |
| InsecureTLS.swift:57:47:57:70 | .TLSv10 | InsecureTLS.swift:20:7:20:7 | value | InsecureTLS.swift:20:7:20:7 | self [Return] | InsecureTLS.swift:57:3:57:3 | [post] config |
| InsecureTLS.swift:57:47:57:70 | .TLSv10 | InsecureTLS.swift:20:7:20:7 | value | InsecureTLS.swift:20:7:20:7 | self [Return] [tlsMaximumSupportedProtocolVersion] | InsecureTLS.swift:57:3:57:3 | [post] config [tlsMaximumSupportedProtocolVersion] |
| InsecureTLS.swift:64:40:64:52 | .tlsProtocol10 | InsecureTLS.swift:22:7:22:7 | value | InsecureTLS.swift:22:7:22:7 | self [Return] | InsecureTLS.swift:64:3:64:3 | [post] config |
| InsecureTLS.swift:64:40:64:52 | .tlsProtocol10 | InsecureTLS.swift:22:7:22:7 | value | InsecureTLS.swift:22:7:22:7 | self [Return] [tlsMinimumSupportedProtocol] | InsecureTLS.swift:64:3:64:3 | [post] config [tlsMinimumSupportedProtocol] |
| InsecureTLS.swift:76:40:76:52 | .tlsProtocol10 | InsecureTLS.swift:23:7:23:7 | value | InsecureTLS.swift:23:7:23:7 | self [Return] | InsecureTLS.swift:76:3:76:3 | [post] config |
| InsecureTLS.swift:76:40:76:52 | .tlsProtocol10 | InsecureTLS.swift:23:7:23:7 | value | InsecureTLS.swift:23:7:23:7 | self [Return] [tlsMaximumSupportedProtocol] | InsecureTLS.swift:76:3:76:3 | [post] config [tlsMaximumSupportedProtocol] |
| InsecureTLS.swift:111:47:111:64 | call to getBadTLSVersion() | InsecureTLS.swift:19:7:19:7 | value | InsecureTLS.swift:19:7:19:7 | self [Return] | InsecureTLS.swift:111:3:111:3 | [post] config |
| InsecureTLS.swift:111:47:111:64 | call to getBadTLSVersion() | InsecureTLS.swift:19:7:19:7 | value | InsecureTLS.swift:19:7:19:7 | self [Return] [tlsMinimumSupportedProtocolVersion] | InsecureTLS.swift:111:3:111:3 | [post] config [tlsMinimumSupportedProtocolVersion] |
| InsecureTLS.swift:122:47:122:47 | version | InsecureTLS.swift:19:7:19:7 | value | InsecureTLS.swift:19:7:19:7 | self [Return] | InsecureTLS.swift:122:3:122:3 | [post] config |
| InsecureTLS.swift:122:47:122:47 | version | InsecureTLS.swift:19:7:19:7 | value | InsecureTLS.swift:19:7:19:7 | self [Return] [tlsMinimumSupportedProtocolVersion] | InsecureTLS.swift:122:3:122:3 | [post] config [tlsMinimumSupportedProtocolVersion] |
| InsecureTLS.swift:163:20:163:43 | .TLSv10 | InsecureTLS.swift:158:7:158:7 | value | InsecureTLS.swift:158:7:158:7 | self [Return] [TLSVersion] | InsecureTLS.swift:163:3:163:3 | [post] def [TLSVersion] |
| InsecureTLS.swift:165:47:165:47 | def [TLSVersion] | InsecureTLS.swift:158:7:158:7 | self [TLSVersion] | file://:0:0:0:0 | .TLSVersion | InsecureTLS.swift:165:47:165:51 | .TLSVersion |
| InsecureTLS.swift:165:47:165:51 | .TLSVersion | InsecureTLS.swift:19:7:19:7 | value | InsecureTLS.swift:19:7:19:7 | self [Return] | InsecureTLS.swift:165:3:165:3 | [post] config |
| InsecureTLS.swift:165:47:165:51 | .TLSVersion | InsecureTLS.swift:19:7:19:7 | value | InsecureTLS.swift:19:7:19:7 | self [Return] [tlsMinimumSupportedProtocolVersion] | InsecureTLS.swift:165:3:165:3 | [post] config [tlsMinimumSupportedProtocolVersion] |
| InsecureTLS.swift:181:53:181:76 | .TLSv10 | InsecureTLS.swift:19:7:19:7 | value | InsecureTLS.swift:19:7:19:7 | self [Return] | InsecureTLS.swift:181:3:181:9 | [post] getter for .config |
| InsecureTLS.swift:181:53:181:76 | .TLSv10 | InsecureTLS.swift:19:7:19:7 | value | InsecureTLS.swift:19:7:19:7 | self [Return] [tlsMinimumSupportedProtocolVersion] | InsecureTLS.swift:181:3:181:9 | [post] getter for .config [tlsMinimumSupportedProtocolVersion] |
| InsecureTLS.swift:202:74:202:97 | .TLSv10 | InsecureTLS.swift:196:56:196:63 | value | InsecureTLS.swift:196:1:198:1 | version[return] | InsecureTLS.swift:202:24:202:31 | [post] getter for .tlsMinimumSupportedProtocolVersion |
#select