Merge branch 'main' into js-insecure-http-parser

This commit is contained in:
Nate Johnson
2023-04-19 13:36:24 -04:00
committed by GitHub
110 changed files with 1953 additions and 1493 deletions

View File

@@ -1 +1 @@
5.0.0
6.1.2

View File

@@ -15,7 +15,7 @@ jobs:
- name: Set up Go 1.20
uses: actions/setup-go@v4
with:
go-version: 1.20.0
go-version: '1.20'
id: go
- name: Check out code
@@ -50,7 +50,7 @@ jobs:
- name: Set up Go 1.20
uses: actions/setup-go@v4
with:
go-version: 1.20.0
go-version: '1.20'
id: go
- name: Check out code

View File

@@ -23,7 +23,7 @@ jobs:
- name: Set up Go 1.20
uses: actions/setup-go@v4
with:
go-version: 1.20.0
go-version: '1.20'
id: go
- name: Check out code

View File

@@ -58,10 +58,8 @@ jobs:
id: cache-extractor
with:
path: |
ruby/extractor/target/release/autobuilder
ruby/extractor/target/release/autobuilder.exe
ruby/extractor/target/release/extractor
ruby/extractor/target/release/extractor.exe
ruby/extractor/target/release/codeql-extractor-ruby
ruby/extractor/target/release/codeql-extractor-ruby.exe
ruby/extractor/ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
key: ${{ runner.os }}-${{ steps.os_version.outputs.version }}-ruby-extractor-${{ hashFiles('ruby/extractor/rust-toolchain.toml', 'ruby/extractor/Cargo.lock') }}--${{ hashFiles('ruby/extractor/**/*.rs') }}
- uses: actions/cache@v3
@@ -88,15 +86,13 @@ jobs:
run: |
cd extractor
cross build --release
mv target/x86_64-unknown-linux-gnu/release/extractor target/release/
mv target/x86_64-unknown-linux-gnu/release/autobuilder target/release/
mv target/x86_64-unknown-linux-gnu/release/generator target/release/
mv target/x86_64-unknown-linux-gnu/release/codeql-extractor-ruby target/release/
- name: Release build (windows and macos)
if: steps.cache-extractor.outputs.cache-hit != 'true' && runner.os != 'Linux'
run: cd extractor && cargo build --release
- name: Generate dbscheme
if: ${{ matrix.os == 'ubuntu-latest' && steps.cache-extractor.outputs.cache-hit != 'true'}}
run: extractor/target/release/generator --dbscheme ql/lib/ruby.dbscheme --library ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
run: extractor/target/release/codeql-extractor-ruby generate --dbscheme ql/lib/ruby.dbscheme --library ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
- uses: actions/upload-artifact@v3
if: ${{ matrix.os == 'ubuntu-latest' }}
with:
@@ -111,10 +107,8 @@ jobs:
with:
name: extractor-${{ matrix.os }}
path: |
ruby/extractor/target/release/autobuilder
ruby/extractor/target/release/autobuilder.exe
ruby/extractor/target/release/extractor
ruby/extractor/target/release/extractor.exe
ruby/extractor/target/release/codeql-extractor-ruby
ruby/extractor/target/release/codeql-extractor-ruby.exe
retention-days: 1
compile-queries:
runs-on: ubuntu-latest-xl
@@ -172,13 +166,10 @@ jobs:
mkdir -p ruby
cp -r codeql-extractor.yml tools ql/lib/ruby.dbscheme.stats ruby/
mkdir -p ruby/tools/{linux64,osx64,win64}
cp linux64/autobuilder ruby/tools/linux64/autobuilder
cp osx64/autobuilder ruby/tools/osx64/autobuilder
cp win64/autobuilder.exe ruby/tools/win64/autobuilder.exe
cp linux64/extractor ruby/tools/linux64/extractor
cp osx64/extractor ruby/tools/osx64/extractor
cp win64/extractor.exe ruby/tools/win64/extractor.exe
chmod +x ruby/tools/{linux64,osx64}/{autobuilder,extractor}
cp linux64/codeql-extractor-ruby ruby/tools/linux64/extractor
cp osx64/codeql-extractor-ruby ruby/tools/osx64/extractor
cp win64/codeql-extractor-ruby.exe ruby/tools/win64/extractor.exe
chmod +x ruby/tools/{linux64,osx64}/extractor
zip -rq codeql-ruby.zip ruby
- uses: actions/upload-artifact@v3
with:

View File

@@ -1,7 +1,7 @@
| file://:0:0:0:0 | short __attribute((__may_alias__)) | type_attributes.c:25:30:25:42 | may_alias |
| type_attributes.c:5:36:5:51 | my_packed_struct | type_attributes.c:5:23:5:32 | packed |
| type_attributes.c:10:54:10:54 | union <unnamed> | type_attributes.c:10:30:10:50 | transparent_union |
| type_attributes.c:16:54:16:54 | union <unnamed> | type_attributes.c:16:30:16:50 | transparent_union |
| type_attributes.c:10:54:10:54 | (unnamed class/struct/union) | type_attributes.c:10:30:10:50 | transparent_union |
| type_attributes.c:16:54:16:54 | (unnamed class/struct/union) | type_attributes.c:16:30:16:50 | transparent_union |
| type_attributes.c:21:37:21:45 | unusedInt | type_attributes.c:21:24:21:29 | unused |
| type_attributes.c:23:13:23:18 | depInt | type_attributes.c:23:36:23:45 | deprecated |
| type_attributes_ms.cpp:1:29:1:29 | X | type_attributes_ms.cpp:1:19:1:26 | novtable |

View File

@@ -1,4 +1,4 @@
| (unnamed class/struct/union) |
| float[3] |
| float[3][3] |
| foo[1] |
| struct <unnamed> |

View File

@@ -1,6 +1,6 @@
| a1.c:6:16:6:16 | struct <unnamed> | 0 members | 2 locations | -1 | <none> |
| a1.c:10:16:10:16 | struct <unnamed> | 1 members | 2 locations | 0 | x |
| a1.c:17:16:17:16 | struct <unnamed> | 1 members | 2 locations | 0 | x |
| a1.c:6:16:6:16 | (unnamed class/struct/union) | 0 members | 2 locations | -1 | <none> |
| a1.c:10:16:10:16 | (unnamed class/struct/union) | 1 members | 2 locations | 0 | x |
| a1.c:17:16:17:16 | (unnamed class/struct/union) | 1 members | 2 locations | 0 | x |
| a1.c:24:8:24:10 | Foo | 3 members | 2 locations | 0 | empty |
| a1.c:24:8:24:10 | Foo | 3 members | 2 locations | 1 | nonempty |
| a1.c:24:8:24:10 | Foo | 3 members | 2 locations | 2 | i |
@@ -9,9 +9,9 @@
| a1.c:36:8:36:10 | Bar | 3 members | 2 locations | 0 | empty |
| a1.c:36:8:36:10 | Bar | 3 members | 2 locations | 1 | nonempty |
| a1.c:36:8:36:10 | Bar | 3 members | 2 locations | 2 | i |
| a2.c:6:16:6:16 | struct <unnamed> | 0 members | 2 locations | -1 | <none> |
| a2.c:10:16:10:16 | struct <unnamed> | 1 members | 2 locations | 0 | x |
| a2.c:17:16:17:16 | struct <unnamed> | 1 members | 2 locations | 0 | x |
| a2.c:6:16:6:16 | (unnamed class/struct/union) | 0 members | 2 locations | -1 | <none> |
| a2.c:10:16:10:16 | (unnamed class/struct/union) | 1 members | 2 locations | 0 | x |
| a2.c:17:16:17:16 | (unnamed class/struct/union) | 1 members | 2 locations | 0 | x |
| a2.c:24:8:24:10 | Foo | 3 members | 2 locations | 0 | empty |
| a2.c:24:8:24:10 | Foo | 3 members | 2 locations | 1 | nonempty |
| a2.c:24:8:24:10 | Foo | 3 members | 2 locations | 2 | i |

View File

@@ -1,5 +1,5 @@
| a1.c:10:16:10:16 | struct <unnamed> | 0 | file://:0:0:0:0 | int | 1 types |
| a1.c:17:16:17:16 | struct <unnamed> | 0 | file://:0:0:0:0 | int | 1 types |
| a1.c:10:16:10:16 | (unnamed class/struct/union) | 0 | file://:0:0:0:0 | int | 1 types |
| a1.c:17:16:17:16 | (unnamed class/struct/union) | 0 | file://:0:0:0:0 | int | 1 types |
| a1.c:24:8:24:10 | Foo | 0 | file://:0:0:0:0 | anon_empty_t * | 1 types |
| a1.c:24:8:24:10 | Foo | 1 | file://:0:0:0:0 | anon_nonempty_t * | 1 types |
| a1.c:24:8:24:10 | Foo | 2 | file://:0:0:0:0 | int | 1 types |
@@ -7,8 +7,8 @@
| a1.c:36:8:36:10 | Bar | 0 | file://:0:0:0:0 | Empty * | 1 types |
| a1.c:36:8:36:10 | Bar | 1 | file://:0:0:0:0 | NonEmpty * | 1 types |
| a1.c:36:8:36:10 | Bar | 2 | file://:0:0:0:0 | int | 1 types |
| a2.c:10:16:10:16 | struct <unnamed> | 0 | file://:0:0:0:0 | int | 1 types |
| a2.c:17:16:17:16 | struct <unnamed> | 0 | file://:0:0:0:0 | int | 1 types |
| a2.c:10:16:10:16 | (unnamed class/struct/union) | 0 | file://:0:0:0:0 | int | 1 types |
| a2.c:17:16:17:16 | (unnamed class/struct/union) | 0 | file://:0:0:0:0 | int | 1 types |
| a2.c:24:8:24:10 | Foo | 0 | file://:0:0:0:0 | anon_empty_t * | 1 types |
| a2.c:24:8:24:10 | Foo | 1 | file://:0:0:0:0 | anon_nonempty_t * | 1 types |
| a2.c:24:8:24:10 | Foo | 2 | file://:0:0:0:0 | int | 1 types |

View File

@@ -52,23 +52,24 @@ class IDbCommandConstructionSqlExpr extends SqlExpr, ObjectCreation {
class DapperCommandDefinitionMethodCallSqlExpr extends SqlExpr, ObjectCreation {
DapperCommandDefinitionMethodCallSqlExpr() {
this.getObjectType() instanceof Dapper::CommandDefinitionStruct and
exists(Conf c | c.hasFlow(DataFlow::exprNode(this), _))
DapperCommandDefinitionMethodCallSql::flow(DataFlow::exprNode(this), _)
}
override Expr getSql() { result = this.getArgumentForName("commandText") }
}
private class Conf extends DataFlow4::Configuration {
Conf() { this = "DapperCommandDefinitionFlowConfig" }
override predicate isSource(DataFlow::Node node) {
private module DapperCommandDefitionMethodCallSqlConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node node) {
node.asExpr().(ObjectCreation).getObjectType() instanceof Dapper::CommandDefinitionStruct
}
override predicate isSink(DataFlow::Node node) {
predicate isSink(DataFlow::Node node) {
exists(MethodCall mc |
mc.getTarget() = any(Dapper::SqlMapperClass c).getAQueryMethod() and
node.asExpr() = mc.getArgumentForName("command")
)
}
}
private module DapperCommandDefinitionMethodCallSql =
DataFlow::Global<DapperCommandDefitionMethodCallSqlConfig>;

View File

@@ -162,18 +162,14 @@ class XmlReaderSettingsCreation extends ObjectCreation {
}
}
private class SettingsDataFlowConfig extends DataFlow3::Configuration {
SettingsDataFlowConfig() { this = "SettingsDataFlowConfig" }
private module SettingsDataFlowConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { source.asExpr() instanceof XmlReaderSettingsCreation }
override predicate isSource(DataFlow::Node source) {
source.asExpr() instanceof XmlReaderSettingsCreation
}
override predicate isSink(DataFlow::Node sink) {
sink.asExpr() instanceof XmlReaderSettingsInstance
}
predicate isSink(DataFlow::Node sink) { sink.asExpr() instanceof XmlReaderSettingsInstance }
}
private module SettingsDataFlow = DataFlow::Global<SettingsDataFlowConfig>;
/** A call to `XmlReader.Create`. */
class XmlReaderCreateCall extends MethodCall {
XmlReaderCreateCall() { this.getTarget() = any(SystemXmlXmlReaderClass r).getCreateMethod() }
@@ -190,8 +186,6 @@ class XmlReaderSettingsInstance extends Expr {
/** Gets a possible creation point for this instance of `XmlReaderSettings`. */
XmlReaderSettingsCreation getASettingsCreation() {
exists(SettingsDataFlowConfig settingsFlow |
settingsFlow.hasFlow(DataFlow::exprNode(result), DataFlow::exprNode(this))
)
SettingsDataFlow::flow(DataFlow::exprNode(result), DataFlow::exprNode(this))
}
}

View File

@@ -78,10 +78,12 @@ predicate isExponentialRegex(StringLiteral s) {
}
/**
* DEPRECATED: Use `ExponentialRegexDataflow` instead.
*
* A data flow configuration for tracking exponential worst case time regular expression string
* literals to the pattern argument of a regex.
*/
class ExponentialRegexDataflow extends DataFlow2::Configuration {
deprecated class ExponentialRegexDataflow extends DataFlow2::Configuration {
ExponentialRegexDataflow() { this = "ExponentialRegex" }
override predicate isSource(DataFlow::Node s) { isExponentialRegex(s.asExpr()) }
@@ -89,15 +91,27 @@ class ExponentialRegexDataflow extends DataFlow2::Configuration {
override predicate isSink(DataFlow::Node s) { s.asExpr() = any(RegexOperation c).getPattern() }
}
/**
* A data flow configuration for tracking exponential worst case time regular expression string
* literals to the pattern argument of a regex.
*/
private module ExponentialRegexDataFlowConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node s) { isExponentialRegex(s.asExpr()) }
predicate isSink(DataFlow::Node s) { s.asExpr() = any(RegexOperation c).getPattern() }
}
module ExponentialRegexDataFlow = DataFlow::Global<ExponentialRegexDataFlowConfig>;
/**
* An expression passed as the `input` to a call to a `Regex` method, where the regex appears to
* have exponential behavior.
*/
class ExponentialRegexSink extends DataFlow::ExprNode, Sink {
ExponentialRegexSink() {
exists(ExponentialRegexDataflow regexDataflow, RegexOperation regexOperation |
exists(RegexOperation regexOperation |
// Exponential regex flows to the pattern argument
regexDataflow.hasFlow(_, DataFlow::exprNode(regexOperation.getPattern()))
ExponentialRegexDataFlow::flow(_, DataFlow::exprNode(regexOperation.getPattern()))
|
// This is used as an input for this pattern
this.getExpr() = regexOperation.getInput() and

View File

@@ -75,9 +75,11 @@ class JsonConvertTrackingConfig extends TaintTracking::Configuration {
}
/**
* DEPRECATED: Use `TypeNameTracking` instead.
*
* Tracks unsafe `TypeNameHandling` setting to `JsonConvert` call
*/
class TypeNameTrackingConfig extends DataFlow::Configuration {
deprecated class TypeNameTrackingConfig extends DataFlow::Configuration {
TypeNameTrackingConfig() { this = "TypeNameTrackingConfig" }
override predicate isSource(DataFlow::Node source) {
@@ -127,6 +129,62 @@ class TypeNameTrackingConfig extends DataFlow::Configuration {
}
}
/**
* Configuration module for tracking unsafe `TypeNameHandling` setting to `JsonConvert` calls.
*/
private module TypeNameTrackingConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) {
(
source.asExpr() instanceof MemberConstantAccess and
source.getType() instanceof TypeNameHandlingEnum
or
source.asExpr() instanceof IntegerLiteral
) and
source.asExpr().hasValue() and
not source.asExpr().getValue() = "0"
}
predicate isSink(DataFlow::Node sink) {
exists(MethodCall mc, Method m, Expr expr |
m = mc.getTarget() and
(
not mc.getArgument(0).hasValue() and
m instanceof NewtonsoftJsonConvertClassDeserializeObjectMethod
) and
expr = mc.getAnArgument() and
sink.asExpr() = expr and
expr.getType() instanceof JsonSerializerSettingsClass
)
}
predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) {
node1.asExpr() instanceof IntegerLiteral and
node2.asExpr().(CastExpr).getExpr() = node1.asExpr()
or
node1.getType() instanceof TypeNameHandlingEnum and
exists(PropertyWrite pw, Property p, Assignment a |
a.getLValue() = pw and
pw.getProperty() = p and
p.getDeclaringType() instanceof JsonSerializerSettingsClass and
p.hasName("TypeNameHandling") and
(
node1.asExpr() = a.getRValue() and
node2.asExpr() = pw.getQualifier()
or
exists(ObjectInitializer oi |
node1.asExpr() = oi.getAMemberInitializer().getRValue() and
node2.asExpr() = oi
)
)
)
}
}
/**
* Configuration module for tracking unsafe `TypeNameHandling` setting to `JsonConvert` calls.
*/
module TypeNameTracking = DataFlow::Global<TypeNameTrackingConfig>;
/**
* User input to static method or constructor call deserialization flow tracking.
*/

View File

@@ -172,26 +172,24 @@ module XmlReader {
isNetFrameworkBefore(this.(MethodCall).getTarget().getDeclaringType(), "4.0")
or
// bad settings flow here
exists(SettingsDataFlowConfig flow, ObjectCreation settings |
flow.hasFlow(DataFlow::exprNode(settings), DataFlow::exprNode(this.getSettings())) and
exists(ObjectCreation settings |
SettingsDataFlow::flow(DataFlow::exprNode(settings), DataFlow::exprNode(this.getSettings())) and
XmlSettings::dtdEnabledSettings(settings, evidence, reason)
)
}
private predicate insecureResolver(string reason, Expr evidence) {
// bad settings flow here
exists(SettingsDataFlowConfig flow, ObjectCreation settings |
flow.hasFlow(DataFlow::exprNode(settings), DataFlow::exprNode(this.getSettings())) and
exists(ObjectCreation settings |
SettingsDataFlow::flow(DataFlow::exprNode(settings), DataFlow::exprNode(this.getSettings())) and
XmlSettings::insecureResolverSettings(settings, evidence, reason)
)
// default is secure
}
}
private class SettingsDataFlowConfig extends DataFlow2::Configuration {
SettingsDataFlowConfig() { this = "SettingsDataFlowConfig" }
override predicate isSource(DataFlow::Node source) {
private module SettingsDataFlowConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) {
// flow from places where we construct an XmlReaderSettings
source
.asExpr()
@@ -202,10 +200,12 @@ module XmlReader {
.hasQualifiedName("System.Xml", "XmlReaderSettings")
}
override predicate isSink(DataFlow::Node sink) {
predicate isSink(DataFlow::Node sink) {
sink.asExpr() = any(InsecureXmlReaderCreate create).getSettings()
}
}
private module SettingsDataFlow = DataFlow::Global<SettingsDataFlowConfig>;
}
/** Provides predicates related to `System.Xml.XmlTextReader`. */

View File

@@ -37,19 +37,19 @@ predicate inForeachStmtBody(ForeachStmt loop, Element e) {
)
}
class LambdaDataFlowConfiguration extends DataFlow::Configuration {
LambdaDataFlowConfiguration() { this = "LambdaDataFlowConfiguration" }
module LambdaDataFlowConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { lambdaCapturesLoopVariable(source.asExpr(), _, _) }
override predicate isSource(DataFlow::Node source) {
lambdaCapturesLoopVariable(source.asExpr(), _, _)
}
predicate isSink(DataFlow::Node sink) { exists(getAssignmentTarget(sink.asExpr())) }
}
override predicate isSink(DataFlow::Node sink) { exists(getAssignmentTarget(sink.asExpr())) }
module LambdaDataFlow {
private import DataFlow::Global<LambdaDataFlowConfig>
predicate capturesLoopVarAndIsStoredIn(
AnonymousFunctionExpr lambda, Variable loopVar, Element storage
) {
exists(DataFlow::Node sink | this.hasFlow(DataFlow::exprNode(lambda), sink) |
exists(DataFlow::Node sink | flow(DataFlow::exprNode(lambda), sink) |
storage = getAssignmentTarget(sink.asExpr())
) and
exists(ForeachStmt loop | lambdaCapturesLoopVariable(lambda, loop, loopVar) |
@@ -109,7 +109,7 @@ predicate declaredInsideLoop(ForeachStmt loop, LocalVariable v) {
)
}
from LambdaDataFlowConfiguration c, AnonymousFunctionExpr lambda, Variable loopVar, Element storage
where c.capturesLoopVarAndIsStoredIn(lambda, loopVar, storage)
from AnonymousFunctionExpr lambda, Variable loopVar, Element storage
where LambdaDataFlow::capturesLoopVarAndIsStoredIn(lambda, loopVar, storage)
select lambda, "Function which may be stored in $@ captures variable $@.", storage,
storage.toString(), loopVar, loopVar.getName()

View File

@@ -12,12 +12,10 @@
import csharp
import semmle.code.csharp.dataflow.DataFlow::DataFlow
import semmle.code.csharp.dataflow.DataFlow::DataFlow::PathGraph
import AddCertToRootStore::PathGraph
class AddCertToRootStoreConfig extends DataFlow::Configuration {
AddCertToRootStoreConfig() { this = "Adding Certificate To Root Store" }
override predicate isSource(DataFlow::Node source) {
module AddCertToRootStoreConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) {
exists(ObjectCreation oc | oc = source.asExpr() |
oc.getType()
.(RefType)
@@ -26,7 +24,7 @@ class AddCertToRootStoreConfig extends DataFlow::Configuration {
)
}
override predicate isSink(DataFlow::Node sink) {
predicate isSink(DataFlow::Node sink) {
exists(MethodCall mc |
(
mc.getTarget()
@@ -40,6 +38,8 @@ class AddCertToRootStoreConfig extends DataFlow::Configuration {
}
}
from DataFlow::PathNode oc, DataFlow::PathNode mc, AddCertToRootStoreConfig config
where config.hasFlowPath(oc, mc)
module AddCertToRootStore = DataFlow::Global<AddCertToRootStoreConfig>;
from AddCertToRootStore::PathNode oc, AddCertToRootStore::PathNode mc
where AddCertToRootStore::flowPath(oc, mc)
select mc.getNode(), oc, mc, "This certificate is added to the root certificate store."

View File

@@ -11,15 +11,13 @@
*/
import csharp
import DataFlow::PathGraph
import InsecureSqlConnection::PathGraph
/**
* A data flow configuration for tracking strings passed to `SqlConnection[StringBuilder]` instances.
*/
class TaintTrackingConfiguration extends DataFlow::Configuration {
TaintTrackingConfiguration() { this = "TaintTrackingConfiguration" }
override predicate isSource(DataFlow::Node source) {
module InsecureSqlConnectionConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) {
exists(string s | s = source.asExpr().(StringLiteral).getValue().toLowerCase() |
s.matches("%encrypt=false%")
or
@@ -27,7 +25,7 @@ class TaintTrackingConfiguration extends DataFlow::Configuration {
)
}
override predicate isSink(DataFlow::Node sink) {
predicate isSink(DataFlow::Node sink) {
exists(ObjectCreation oc |
oc.getRuntimeArgument(0) = sink.asExpr() and
(
@@ -39,8 +37,13 @@ class TaintTrackingConfiguration extends DataFlow::Configuration {
}
}
from TaintTrackingConfiguration c, DataFlow::PathNode source, DataFlow::PathNode sink
where c.hasFlowPath(source, sink)
/**
* A data flow configuration for tracking strings passed to `SqlConnection[StringBuilder]` instances.
*/
module InsecureSqlConnection = DataFlow::Global<InsecureSqlConnectionConfig>;
from InsecureSqlConnection::PathNode source, InsecureSqlConnection::PathNode sink
where InsecureSqlConnection::flowPath(source, sink)
select sink.getNode(), source, sink,
"$@ flows to this SQL connection and does not specify `Encrypt=True`.", source.getNode(),
"Connection string"

View File

@@ -48,12 +48,9 @@ where
)
or
// JsonConvert static method call, but with additional unsafe typename tracking
exists(
JsonConvertTrackingConfig taintTrackingJsonConvert, TypeNameTrackingConfig typenameTracking,
DataFlow::Node settingsCallArg
|
exists(JsonConvertTrackingConfig taintTrackingJsonConvert, DataFlow::Node settingsCallArg |
taintTrackingJsonConvert.hasFlowPath(userInput, deserializeCallArg) and
typenameTracking.hasFlow(_, settingsCallArg) and
TypeNameTracking::flow(_, settingsCallArg) and
deserializeCallArg.getNode().asExpr().getParent() = settingsCallArg.asExpr().getParent()
)
select deserializeCallArg, userInput, deserializeCallArg, "$@ flows to unsafe deserializer.",

View File

@@ -38,11 +38,8 @@ where
// there is no callback `OnAppendCookie` that sets `HttpOnly` to true
not exists(OnAppendCookieHttpOnlyTrackingConfig config | config.hasFlowTo(_)) and
// Passed as third argument to `IResponseCookies.Append`
exists(
CookieOptionsTrackingConfiguration cookieTracking, DataFlow::Node creation,
DataFlow::Node append
|
cookieTracking.hasFlow(creation, append) and
exists(DataFlow::Node creation, DataFlow::Node append |
CookieOptionsTracking::flow(creation, append) and
creation.asExpr() = oc and
append.asExpr() = mc.getArgument(2)
)
@@ -79,8 +76,8 @@ where
oc = c and
oc.getType() instanceof MicrosoftAspNetCoreHttpCookieOptions and
not isPropertySet(oc, "HttpOnly") and
exists(CookieOptionsTrackingConfiguration cookieTracking, DataFlow::Node creation |
cookieTracking.hasFlow(creation, _) and
exists(DataFlow::Node creation |
CookieOptionsTracking::flow(creation, _) and
creation.asExpr() = oc
)
)

View File

@@ -37,8 +37,8 @@ where
oc = c and
oc.getType() instanceof MicrosoftAspNetCoreHttpCookieOptions and
not isPropertySet(oc, "Secure") and
exists(CookieOptionsTrackingConfiguration cookieTracking, DataFlow::Node creation |
cookieTracking.hasFlow(creation, _) and
exists(DataFlow::Node creation |
CookieOptionsTracking::flow(creation, _) and
creation.asExpr() = oc
)
)
@@ -82,8 +82,8 @@ where
// there is no callback `OnAppendCookie` that sets `Secure` to true
not exists(OnAppendCookieSecureTrackingConfig config | config.hasFlowTo(_)) and
// the cookie option is passed to `Append`
exists(CookieOptionsTrackingConfiguration cookieTracking, DataFlow::Node creation |
cookieTracking.hasFlow(creation, _) and
exists(DataFlow::Node creation |
CookieOptionsTracking::flow(creation, _) and
creation.asExpr() = oc
)
)

View File

@@ -12,7 +12,7 @@
import csharp
import semmle.code.csharp.dataflow.DataFlow2
import semmle.code.csharp.dataflow.TaintTracking2
import DataFlow::PathGraph
import HashWithoutSalt::PathGraph
/** The C# class `Windows.Security.Cryptography.Core.HashAlgorithmProvider`. */
class HashAlgorithmProvider extends RefType {
@@ -120,12 +120,10 @@ predicate hasHashAncestor(MethodCall mc) {
* Taint configuration tracking flow from an expression whose name suggests it holds
* password data to a method call that generates a hash without a salt.
*/
class HashWithoutSaltConfiguration extends TaintTracking::Configuration {
HashWithoutSaltConfiguration() { this = "HashWithoutSaltConfiguration" }
module HashWithoutSaltConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { source.asExpr() instanceof PasswordVarExpr }
override predicate isSource(DataFlow::Node source) { source.asExpr() instanceof PasswordVarExpr }
override predicate isSink(DataFlow::Node sink) {
predicate isSink(DataFlow::Node sink) {
exists(MethodCall mc |
sink.asExpr() = mc.getArgument(0) and
isHashCall(mc) and
@@ -148,7 +146,7 @@ class HashWithoutSaltConfiguration extends TaintTracking::Configuration {
)
}
override predicate isAdditionalTaintStep(DataFlow::Node node1, DataFlow::Node node2) {
predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) {
exists(MethodCall mc |
mc.getTarget()
.hasQualifiedName("Windows.Security.Cryptography", "CryptographicBuffer",
@@ -166,7 +164,7 @@ class HashWithoutSaltConfiguration extends TaintTracking::Configuration {
* `byte[] saltedPassword = sha256.ComputeHash(rawSalted);`
* Or the password is concatenated with a salt as a string.
*/
override predicate isSanitizer(DataFlow::Node node) {
predicate isBarrier(DataFlow::Node node) {
exists(MethodCall mc |
hasFurtherProcessing(mc) and
mc.getAnArgument() = node.asExpr()
@@ -194,7 +192,9 @@ class HashWithoutSaltConfiguration extends TaintTracking::Configuration {
}
}
from DataFlow::PathNode source, DataFlow::PathNode sink, HashWithoutSaltConfiguration c
where c.hasFlowPath(source, sink)
module HashWithoutSalt = TaintTracking::Global<HashWithoutSaltConfig>;
from HashWithoutSalt::PathNode source, HashWithoutSalt::PathNode sink
where HashWithoutSalt::flowPath(source, sink)
select sink.getNode(), source, sink, "$@ is hashed without a salt.", source.getNode(),
"The password"

View File

@@ -19,9 +19,11 @@ class TokenValidationParametersPropertySensitiveValidation extends Property {
}
/**
* DEPRECATED: Use `FalseValueFlowsToTokenValidationParametersPropertyWriteToBypassValidation` instead.
*
* A dataflow from a `false` value to a write sensitive property for `TokenValidationParameters`.
*/
class FalseValueFlowsToTokenValidationParametersPropertyWriteToBypassValidation extends DataFlow::Configuration
deprecated class FalseValueFlowsToTokenValidationParametersPropertyWriteToBypassValidation extends DataFlow::Configuration
{
FalseValueFlowsToTokenValidationParametersPropertyWriteToBypassValidation() {
this = "FalseValueFlowsToTokenValidationParametersPropertyWriteToBypassValidation"
@@ -37,6 +39,25 @@ class FalseValueFlowsToTokenValidationParametersPropertyWriteToBypassValidation
}
}
/**
* A dataflow configuration from a `false` value to a write sensitive property for `TokenValidationParameters`.
*/
private module FalseValueFlowsToTokenValidationParametersPropertyWriteToBypassValidationConfig
implements DataFlow::ConfigSig
{
predicate isSource(DataFlow::Node source) {
source.asExpr().getValue() = "false" and
source.asExpr().getType() instanceof BoolType
}
predicate isSink(DataFlow::Node sink) {
sink.asExpr() = any(TokenValidationParametersPropertySensitiveValidation p).getAnAssignedValue()
}
}
module FalseValueFlowsToTokenValidationParametersPropertyWriteToBypassValidation =
DataFlow::Global<FalseValueFlowsToTokenValidationParametersPropertyWriteToBypassValidationConfig>;
/**
* Holds if `assemblyName` is older than version `ver`
*/

View File

@@ -16,11 +16,10 @@ import JsonWebTokenHandlerLib
import semmle.code.csharp.commons.QualifiedName
from
FalseValueFlowsToTokenValidationParametersPropertyWriteToBypassValidation config,
DataFlow::Node source, DataFlow::Node sink,
TokenValidationParametersPropertySensitiveValidation pw, string qualifier, string name
where
config.hasFlow(source, sink) and
FalseValueFlowsToTokenValidationParametersPropertyWriteToBypassValidation::flow(source, sink) and
sink.asExpr() = pw.getAnAssignedValue() and
pw.hasQualifiedName(qualifier, name)
select sink, "The security sensitive property $@ is being disabled by the following value: $@.", pw,

View File

@@ -31,16 +31,16 @@ predicate unsafeDataContractTypeCreation(Expr e) {
e.(TypeofExpr).getTypeAccess().getTarget() instanceof DataSetOrTableRelatedClass
}
class Conf extends DataFlow::Configuration {
Conf() { this = "FlowToDataSerializerConstructor" }
module FlowToDataSerializerConstructorConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node node) { unsafeDataContractTypeCreation(node.asExpr()) }
override predicate isSource(DataFlow::Node node) { unsafeDataContractTypeCreation(node.asExpr()) }
override predicate isSink(DataFlow::Node node) { xmlSerializerConstructorArgument(node.asExpr()) }
predicate isSink(DataFlow::Node node) { xmlSerializerConstructorArgument(node.asExpr()) }
}
from Conf conf, DataFlow::Node source, DataFlow::Node sink
where conf.hasFlow(source, sink)
module FlowToDataSerializerConstructor = DataFlow::Global<FlowToDataSerializerConstructorConfig>;
from DataFlow::Node source, DataFlow::Node sink
where FlowToDataSerializerConstructor::flow(source, sink)
select sink,
"Unsafe type is used in data contract serializer. Make sure $@ comes from the trusted source.",
source, source.toString()

View File

@@ -40,9 +40,11 @@ private class AuthCookieNameConfiguration extends DataFlow::Configuration {
}
/**
* DEPRECATED: Use `CookieOptionsTracking` instead.
*
* Tracks creation of `CookieOptions` to `IResponseCookies.Append(String, String, CookieOptions)` call as a third parameter.
*/
class CookieOptionsTrackingConfiguration extends DataFlow::Configuration {
deprecated class CookieOptionsTrackingConfiguration extends DataFlow::Configuration {
CookieOptionsTrackingConfiguration() { this = "CookieOptionsTrackingConfiguration" }
override predicate isSource(DataFlow::Node source) {
@@ -57,6 +59,29 @@ class CookieOptionsTrackingConfiguration extends DataFlow::Configuration {
}
}
/**
* Configuration module tracking creation of `CookieOptions` to `IResponseCookies.Append(String, String, CookieOptions)`
* calls as a third parameter.
*/
private module CookieOptionsTrackingConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) {
source.asExpr().(ObjectCreation).getType() instanceof MicrosoftAspNetCoreHttpCookieOptions
}
predicate isSink(DataFlow::Node sink) {
exists(MicrosoftAspNetCoreHttpResponseCookies iResponse, MethodCall mc |
iResponse.getAppendMethod() = mc.getTarget() and
mc.getArgument(2) = sink.asExpr()
)
}
}
/**
* Tracking creation of `CookieOptions` to `IResponseCookies.Append(String, String, CookieOptions)`
* calls as a third parameter.
*/
module CookieOptionsTracking = DataFlow::Global<CookieOptionsTrackingConfig>;
/**
* Looks for property value of `CookiePolicyOptions` passed to `app.UseCookiePolicy` in `Startup.Configure`.
*/

View File

@@ -3,20 +3,18 @@
*/
import csharp
import DataFlow::PathGraph
import GlobalFlow::PathGraph
class DataflowConfiguration extends DataFlow::Configuration {
DataflowConfiguration() { this = "data flow configuration" }
module GlobalFlowConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { source.asExpr().(Expr).getValue() = "tainted" }
override predicate isSource(DataFlow::Node source) {
source.asExpr().(Expr).getValue() = "tainted"
}
override predicate isSink(DataFlow::Node sink) {
predicate isSink(DataFlow::Node sink) {
exists(LocalVariable v | sink.asExpr() = v.getInitializer())
}
}
from DataFlow::PathNode source, DataFlow::PathNode sink, DataflowConfiguration conf
where conf.hasFlowPath(source, sink)
module GlobalFlow = DataFlow::Global<GlobalFlowConfig>;
from GlobalFlow::PathNode source, GlobalFlow::PathNode sink
where GlobalFlow::flowPath(source, sink)
select source, source, sink, "$@", sink, sink.toString()

View File

@@ -3,14 +3,12 @@
*/
import csharp
import DataFlow::PathGraph
import CallSensitivity::PathGraph
class Conf extends DataFlow::Configuration {
Conf() { this = "CallSensitiveFlowConf" }
module CallSensitivityConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node src) { src.asExpr() instanceof ObjectCreation }
override predicate isSource(DataFlow::Node src) { src.asExpr() instanceof ObjectCreation }
override predicate isSink(DataFlow::Node sink) {
predicate isSink(DataFlow::Node sink) {
exists(MethodCall mc |
mc.getTarget().hasName("Sink") and
mc.getAnArgument() = sink.asExpr()
@@ -18,6 +16,8 @@ class Conf extends DataFlow::Configuration {
}
}
from DataFlow::PathNode source, DataFlow::PathNode sink, Conf conf
where conf.hasFlowPath(source, sink)
module CallSensitivity = DataFlow::Global<CallSensitivityConfig>;
from CallSensitivity::PathNode source, CallSensitivity::PathNode sink
where CallSensitivity::flowPath(source, sink)
select source, source, sink, "$@", sink, sink.toString()

View File

@@ -3,23 +3,23 @@
*/
import csharp
import DataFlow::PathGraph
import ArrayFlow::PathGraph
class Conf extends DataFlow::Configuration {
Conf() { this = "ArrayFlowConf" }
module ArrayFlowConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node src) { src.asExpr() instanceof ObjectCreation }
override predicate isSource(DataFlow::Node src) { src.asExpr() instanceof ObjectCreation }
override predicate isSink(DataFlow::Node sink) {
predicate isSink(DataFlow::Node sink) {
exists(MethodCall mc |
mc.getTarget().hasUndecoratedName("Sink") and
mc.getAnArgument() = sink.asExpr()
)
}
override int fieldFlowBranchLimit() { result = 100 }
int fieldFlowBranchLimit() { result = 100 }
}
from DataFlow::PathNode source, DataFlow::PathNode sink, Conf conf
where conf.hasFlowPath(source, sink)
module ArrayFlow = DataFlow::Global<ArrayFlowConfig>;
from ArrayFlow::PathNode source, ArrayFlow::PathNode sink
where ArrayFlow::flowPath(source, sink)
select source, source, sink, "$@", sink, sink.toString()

View File

@@ -3,17 +3,15 @@
*/
import csharp
import DataFlow::PathGraph
import Types::PathGraph
class Conf extends DataFlow::Configuration {
Conf() { this = "TypesConf" }
override predicate isSource(DataFlow::Node src) {
module TypesConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node src) {
src.asExpr() instanceof ObjectCreation or
src.asExpr() instanceof NullLiteral
}
override predicate isSink(DataFlow::Node sink) {
predicate isSink(DataFlow::Node sink) {
exists(MethodCall mc |
mc.getTarget().hasUndecoratedName("Sink") and
mc.getAnArgument() = sink.asExpr()
@@ -21,6 +19,8 @@ class Conf extends DataFlow::Configuration {
}
}
from DataFlow::PathNode source, DataFlow::PathNode sink, Conf conf
where conf.hasFlowPath(source, sink)
module Types = DataFlow::Global<TypesConfig>;
from Types::PathNode source, Types::PathNode sink
where Types::flowPath(source, sink)
select source, source, sink, "$@", sink, sink.toString()

View File

@@ -221,17 +221,9 @@ func checkVendor() bool {
return true
}
func main() {
if len(os.Args) > 1 {
usage()
os.Exit(2)
}
log.Printf("Autobuilder was built with %s, environment has %s\n", runtime.Version(), getEnvGoVersion())
func getSourceDir() string {
srcdir := os.Getenv("LGTM_SRC")
inLGTM := srcdir != ""
if inLGTM {
if srcdir != "" {
log.Printf("LGTM_SRC is %s\n", srcdir)
} else {
cwd, err := os.Getwd()
@@ -241,57 +233,39 @@ func main() {
log.Printf("LGTM_SRC is not set; defaulting to current working directory %s\n", cwd)
srcdir = cwd
}
return srcdir
}
// we set `SEMMLE_PATH_TRANSFORMER` ourselves in some cases, so blank it out first for consistency
os.Setenv("SEMMLE_PATH_TRANSFORMER", "")
// determine how to install dependencies and whether a GOPATH needs to be set up before
// extraction
depMode := GoGetNoModules
modMode := ModUnset
needGopath := true
goDirectiveFound := false
if _, present := os.LookupEnv("GO111MODULE"); !present {
os.Setenv("GO111MODULE", "auto")
}
func getDepMode() DependencyInstallerMode {
if util.FileExists("go.mod") {
depMode = GoGetWithModules
needGopath = false
versionRe := regexp.MustCompile(`(?m)^go[ \t\r]+([0-9]+\.[0-9]+)$`)
goMod, err := ioutil.ReadFile("go.mod")
if err != nil {
log.Println("Failed to read go.mod to check for missing Go version")
} else {
matches := versionRe.FindSubmatch(goMod)
if matches != nil {
goDirectiveFound = true
if len(matches) > 1 {
goDirectiveVersion := "v" + string(matches[1])
if semver.Compare(goDirectiveVersion, getEnvGoSemVer()) >= 0 {
diagnostics.EmitNewerGoVersionNeeded()
}
}
}
}
log.Println("Found go.mod, enabling go modules")
} else if util.FileExists("Gopkg.toml") {
depMode = Dep
log.Println("Found Gopkg.toml, using dep instead of go get")
} else if util.FileExists("glide.yaml") {
depMode = Glide
log.Println("Found glide.yaml, enabling go modules")
return GoGetWithModules
}
if util.FileExists("Gopkg.toml") {
log.Println("Found Gopkg.toml, using dep instead of go get")
return Dep
}
if util.FileExists("glide.yaml") {
log.Println("Found glide.yaml, enabling go modules")
return Glide
}
return GoGetNoModules
}
func getModMode(depMode DependencyInstallerMode) ModMode {
if depMode == GoGetWithModules {
// if a vendor/modules.txt file exists, we assume that there are vendored Go dependencies, and
// skip the dependency installation step and run the extractor with `-mod=vendor`
if util.FileExists("vendor/modules.txt") {
modMode = ModVendor
return ModVendor
} else if util.DirExists("vendor") {
modMode = ModMod
return ModMod
}
}
return ModUnset
}
func fixGoVendorIssues(modMode ModMode, depMode DependencyInstallerMode, goDirectiveFound bool) ModMode {
if modMode == ModVendor {
// fix go vendor issues with go versions >= 1.14 when no go version is specified in the go.mod
// if this is the case, and dependencies were vendored with an old go version (and therefore
@@ -311,13 +285,38 @@ func main() {
log.Println("Adding a version directive to the go.mod file as the modules.txt does not have explicit annotations")
if !addVersionToMod("1.13") {
log.Println("Failed to add a version to the go.mod file to fix explicitly required package bug; not using vendored dependencies")
modMode = ModMod
return ModMod
}
}
}
}
}
return modMode
}
func getNeedGopath(depMode DependencyInstallerMode, importpath string) bool {
needGopath := true
if depMode == GoGetWithModules {
needGopath = false
}
// if `LGTM_INDEX_NEED_GOPATH` is set, it overrides the value for `needGopath` inferred above
if needGopathOverride := os.Getenv("LGTM_INDEX_NEED_GOPATH"); needGopathOverride != "" {
if needGopathOverride == "true" {
needGopath = true
} else if needGopathOverride == "false" {
needGopath = false
} else {
log.Fatalf("Unexpected value for Boolean environment variable LGTM_NEED_GOPATH: %v.\n", needGopathOverride)
}
}
if needGopath && importpath == "" {
log.Printf("Failed to determine import path, not setting up GOPATH")
needGopath = false
}
return needGopath
}
func tryUpdateGoModAndGoSum(modMode ModMode, depMode DependencyInstallerMode) {
// Go 1.16 and later won't automatically attempt to update go.mod / go.sum during package loading, so try to update them here:
if modMode != ModVendor && depMode == GoGetWithModules && semver.Compare(getEnvGoSemVer(), "1.16") >= 0 {
// stat go.mod and go.sum
@@ -355,240 +354,227 @@ func main() {
}
}
}
}
// if `LGTM_INDEX_NEED_GOPATH` is set, it overrides the value for `needGopath` inferred above
if needGopathOverride := os.Getenv("LGTM_INDEX_NEED_GOPATH"); needGopathOverride != "" {
inLGTM = true
if needGopathOverride == "true" {
needGopath = true
} else if needGopathOverride == "false" {
needGopath = false
} else {
log.Fatalf("Unexpected value for Boolean environment variable LGTM_NEED_GOPATH: %v.\n", needGopathOverride)
}
type moveGopathInfo struct {
scratch, realSrc, root, newdir string
files []string
}
func moveToTemporaryGopath(srcdir string, importpath string) moveGopathInfo {
// a temporary directory where everything is moved while the correct
// directory structure is created.
scratch, err := ioutil.TempDir(srcdir, "scratch")
if err != nil {
log.Fatalf("Failed to create temporary directory %s in directory %s: %s\n",
scratch, srcdir, err.Error())
}
log.Printf("Temporary directory is %s.\n", scratch)
importpath := getImportPath()
if needGopath && importpath == "" {
log.Printf("Failed to determine import path, not setting up GOPATH")
needGopath = false
// move all files in `srcdir` to `scratch`
dir, err := os.Open(srcdir)
if err != nil {
log.Fatalf("Failed to open source directory %s for reading: %s\n", srcdir, err.Error())
}
if inLGTM && needGopath {
// a temporary directory where everything is moved while the correct
// directory structure is created.
scratch, err := ioutil.TempDir(srcdir, "scratch")
if err != nil {
log.Fatalf("Failed to create temporary directory %s in directory %s: %s\n",
scratch, srcdir, err.Error())
}
log.Printf("Temporary directory is %s.\n", scratch)
// move all files in `srcdir` to `scratch`
dir, err := os.Open(srcdir)
if err != nil {
log.Fatalf("Failed to open source directory %s for reading: %s\n", srcdir, err.Error())
}
files, err := dir.Readdirnames(-1)
if err != nil {
log.Fatalf("Failed to read source directory %s: %s\n", srcdir, err.Error())
}
for _, file := range files {
if file != filepath.Base(scratch) {
log.Printf("Moving %s/%s to %s/%s.\n", srcdir, file, scratch, file)
err := os.Rename(filepath.Join(srcdir, file), filepath.Join(scratch, file))
if err != nil {
log.Fatalf("Failed to move file %s to the temporary directory: %s\n", file, err.Error())
}
files, err := dir.Readdirnames(-1)
if err != nil {
log.Fatalf("Failed to read source directory %s: %s\n", srcdir, err.Error())
}
for _, file := range files {
if file != filepath.Base(scratch) {
log.Printf("Moving %s/%s to %s/%s.\n", srcdir, file, scratch, file)
err := os.Rename(filepath.Join(srcdir, file), filepath.Join(scratch, file))
if err != nil {
log.Fatalf("Failed to move file %s to the temporary directory: %s\n", file, err.Error())
}
}
// create a new folder which we will add to GOPATH below
// Note we evaluate all symlinks here for consistency: otherwise os.Chdir below
// will follow links but other references to the path may not, which can lead to
// disagreements between GOPATH and the working directory.
realSrc, err := filepath.EvalSymlinks(srcdir)
if err != nil {
log.Fatalf("Failed to evaluate symlinks in %s: %s\n", srcdir, err.Error())
}
root := filepath.Join(realSrc, "root")
// move source files to where Go expects them to be
newdir := filepath.Join(root, "src", importpath)
err = os.MkdirAll(filepath.Dir(newdir), 0755)
if err != nil {
log.Fatalf("Failed to create directory %s: %s\n", newdir, err.Error())
}
log.Printf("Moving %s to %s.\n", scratch, newdir)
err = os.Rename(scratch, newdir)
if err != nil {
log.Fatalf("Failed to rename %s to %s: %s\n", scratch, newdir, err.Error())
}
// schedule restoring the contents of newdir to their original location after this function completes:
defer restoreRepoLayout(newdir, files, filepath.Base(scratch), srcdir)
err = os.Chdir(newdir)
if err != nil {
log.Fatalf("Failed to chdir into %s: %s\n", newdir, err.Error())
}
// set up SEMMLE_PATH_TRANSFORMER to ensure paths in the source archive and the snapshot
// match the original source location, not the location we moved it to
pt, err := ioutil.TempFile("", "path-transformer")
if err != nil {
log.Fatalf("Unable to create path transformer file: %s.", err.Error())
}
defer os.Remove(pt.Name())
_, err = pt.WriteString("#" + realSrc + "\n" + newdir + "//\n")
if err != nil {
log.Fatalf("Unable to write path transformer file: %s.", err.Error())
}
err = pt.Close()
if err != nil {
log.Fatalf("Unable to close path transformer file: %s.", err.Error())
}
err = os.Setenv("SEMMLE_PATH_TRANSFORMER", pt.Name())
if err != nil {
log.Fatalf("Unable to set SEMMLE_PATH_TRANSFORMER environment variable: %s.\n", err.Error())
}
// set/extend GOPATH
oldGopath := os.Getenv("GOPATH")
var newGopath string
if oldGopath != "" {
newGopath = strings.Join(
[]string{root, oldGopath},
string(os.PathListSeparator),
)
} else {
newGopath = root
}
err = os.Setenv("GOPATH", newGopath)
if err != nil {
log.Fatalf("Unable to set GOPATH to %s: %s\n", newGopath, err.Error())
}
log.Printf("GOPATH set to %s.\n", newGopath)
}
// check whether an explicit dependency installation command was provided
inst := util.Getenv("CODEQL_EXTRACTOR_GO_BUILD_COMMAND", "LGTM_INDEX_BUILD_COMMAND")
shouldInstallDependencies := false
if inst == "" {
// try to build the project
buildSucceeded := autobuilder.Autobuild()
// create a new folder which we will add to GOPATH below
// Note we evaluate all symlinks here for consistency: otherwise os.Chdir below
// will follow links but other references to the path may not, which can lead to
// disagreements between GOPATH and the working directory.
realSrc, err := filepath.EvalSymlinks(srcdir)
if err != nil {
log.Fatalf("Failed to evaluate symlinks in %s: %s\n", srcdir, err.Error())
}
// Build failed or there are still dependency errors; we'll try to install dependencies
// ourselves
if !buildSucceeded {
log.Println("Build failed, continuing to install dependencies.")
root := filepath.Join(realSrc, "root")
shouldInstallDependencies = true
} else if util.DepErrors("./...", modMode.argsForGoVersion(getEnvGoSemVer())...) {
log.Println("Dependencies are still not resolving after the build, continuing to install dependencies.")
// move source files to where Go expects them to be
newdir := filepath.Join(root, "src", importpath)
err = os.MkdirAll(filepath.Dir(newdir), 0755)
if err != nil {
log.Fatalf("Failed to create directory %s: %s\n", newdir, err.Error())
}
log.Printf("Moving %s to %s.\n", scratch, newdir)
err = os.Rename(scratch, newdir)
if err != nil {
log.Fatalf("Failed to rename %s to %s: %s\n", scratch, newdir, err.Error())
}
shouldInstallDependencies = true
}
} else {
// write custom build commands into a script, then run it
var (
ext = ""
header = ""
footer = ""
return moveGopathInfo{
scratch: scratch,
realSrc: realSrc,
root: root,
newdir: newdir,
files: files,
}
}
func createPathTransformerFile(newdir string) *os.File {
err := os.Chdir(newdir)
if err != nil {
log.Fatalf("Failed to chdir into %s: %s\n", newdir, err.Error())
}
// set up SEMMLE_PATH_TRANSFORMER to ensure paths in the source archive and the snapshot
// match the original source location, not the location we moved it to
pt, err := ioutil.TempFile("", "path-transformer")
if err != nil {
log.Fatalf("Unable to create path transformer file: %s.", err.Error())
}
return pt
}
func writePathTransformerFile(pt *os.File, realSrc, root, newdir string) {
_, err := pt.WriteString("#" + realSrc + "\n" + newdir + "//\n")
if err != nil {
log.Fatalf("Unable to write path transformer file: %s.", err.Error())
}
err = pt.Close()
if err != nil {
log.Fatalf("Unable to close path transformer file: %s.", err.Error())
}
err = os.Setenv("SEMMLE_PATH_TRANSFORMER", pt.Name())
if err != nil {
log.Fatalf("Unable to set SEMMLE_PATH_TRANSFORMER environment variable: %s.\n", err.Error())
}
}
func setGopath(root string) {
// set/extend GOPATH
oldGopath := os.Getenv("GOPATH")
var newGopath string
if oldGopath != "" {
newGopath = strings.Join(
[]string{root, oldGopath},
string(os.PathListSeparator),
)
if runtime.GOOS == "windows" {
ext = ".cmd"
header = "@echo on\n@prompt +$S\n"
footer = "\nIF %ERRORLEVEL% NEQ 0 EXIT"
} else {
ext = ".sh"
header = "#! /bin/bash\nset -xe +u\n"
}
script, err := ioutil.TempFile("", "go-build-command-*"+ext)
if err != nil {
log.Fatalf("Unable to create temporary script holding custom build commands: %s\n", err.Error())
}
defer os.Remove(script.Name())
_, err = script.WriteString(header + inst + footer)
if err != nil {
log.Fatalf("Unable to write to temporary script holding custom build commands: %s\n", err.Error())
}
err = script.Close()
if err != nil {
log.Fatalf("Unable to close temporary script holding custom build commands: %s\n", err.Error())
}
os.Chmod(script.Name(), 0700)
log.Println("Installing dependencies using custom build command.")
util.RunCmd(exec.Command(script.Name()))
} else {
newGopath = root
}
if modMode == ModVendor {
// test if running `go` with -mod=vendor works, and if it doesn't, try to fallback to -mod=mod
// or not set if the go version < 1.14. Note we check this post-build in case the build brings
// the vendor directory up to date.
if !checkVendor() {
modMode = ModMod
log.Println("The vendor directory is not consistent with the go.mod; not using vendored dependencies.")
}
err := os.Setenv("GOPATH", newGopath)
if err != nil {
log.Fatalf("Unable to set GOPATH to %s: %s\n", newGopath, err.Error())
}
log.Printf("GOPATH set to %s.\n", newGopath)
}
if shouldInstallDependencies {
if modMode == ModVendor {
log.Printf("Skipping dependency installation because a Go vendor directory was found.")
} else {
// automatically determine command to install dependencies
var install *exec.Cmd
if depMode == Dep {
// set up the dep cache if SEMMLE_CACHE is set
cacheDir := os.Getenv("SEMMLE_CACHE")
if cacheDir != "" {
depCacheDir := filepath.Join(cacheDir, "go", "dep")
log.Printf("Attempting to create dep cache dir %s\n", depCacheDir)
err := os.MkdirAll(depCacheDir, 0755)
func buildWithoutCustomCommands(modMode ModMode) bool {
shouldInstallDependencies := false
// try to build the project
buildSucceeded := autobuilder.Autobuild()
// Build failed or there are still dependency errors; we'll try to install dependencies
// ourselves
if !buildSucceeded {
log.Println("Build failed, continuing to install dependencies.")
shouldInstallDependencies = true
} else if util.DepErrors("./...", modMode.argsForGoVersion(getEnvGoSemVer())...) {
log.Println("Dependencies are still not resolving after the build, continuing to install dependencies.")
shouldInstallDependencies = true
}
return shouldInstallDependencies
}
func buildWithCustomCommands(inst string) {
// write custom build commands into a script, then run it
var (
ext = ""
header = ""
footer = ""
)
if runtime.GOOS == "windows" {
ext = ".cmd"
header = "@echo on\n@prompt +$S\n"
footer = "\nIF %ERRORLEVEL% NEQ 0 EXIT"
} else {
ext = ".sh"
header = "#! /bin/bash\nset -xe +u\n"
}
script, err := ioutil.TempFile("", "go-build-command-*"+ext)
if err != nil {
log.Fatalf("Unable to create temporary script holding custom build commands: %s\n", err.Error())
}
defer os.Remove(script.Name())
_, err = script.WriteString(header + inst + footer)
if err != nil {
log.Fatalf("Unable to write to temporary script holding custom build commands: %s\n", err.Error())
}
err = script.Close()
if err != nil {
log.Fatalf("Unable to close temporary script holding custom build commands: %s\n", err.Error())
}
os.Chmod(script.Name(), 0700)
log.Println("Installing dependencies using custom build command.")
util.RunCmd(exec.Command(script.Name()))
}
func installDependencies(depMode DependencyInstallerMode) {
// automatically determine command to install dependencies
var install *exec.Cmd
if depMode == Dep {
// set up the dep cache if SEMMLE_CACHE is set
cacheDir := os.Getenv("SEMMLE_CACHE")
if cacheDir != "" {
depCacheDir := filepath.Join(cacheDir, "go", "dep")
log.Printf("Attempting to create dep cache dir %s\n", depCacheDir)
err := os.MkdirAll(depCacheDir, 0755)
if err != nil {
log.Printf("Failed to create dep cache directory: %s\n", err.Error())
} else {
log.Printf("Setting dep cache directory to %s\n", depCacheDir)
err = os.Setenv("DEPCACHEDIR", depCacheDir)
if err != nil {
log.Println("Failed to set dep cache directory")
} else {
err = os.Setenv("DEPCACHEAGE", "720h") // 30 days
if err != nil {
log.Printf("Failed to create dep cache directory: %s\n", err.Error())
} else {
log.Printf("Setting dep cache directory to %s\n", depCacheDir)
err = os.Setenv("DEPCACHEDIR", depCacheDir)
if err != nil {
log.Println("Failed to set dep cache directory")
} else {
err = os.Setenv("DEPCACHEAGE", "720h") // 30 days
if err != nil {
log.Println("Failed to set dep cache age")
}
}
log.Println("Failed to set dep cache age")
}
}
if util.FileExists("Gopkg.lock") {
// if Gopkg.lock exists, don't update it and only vendor dependencies
install = exec.Command("dep", "ensure", "-v", "-vendor-only")
} else {
install = exec.Command("dep", "ensure", "-v")
}
log.Println("Installing dependencies using `dep ensure`.")
} else if depMode == Glide {
install = exec.Command("glide", "install")
log.Println("Installing dependencies using `glide install`")
} else {
// explicitly set go module support
if depMode == GoGetWithModules {
os.Setenv("GO111MODULE", "on")
} else if depMode == GoGetNoModules {
os.Setenv("GO111MODULE", "off")
}
// get dependencies
install = exec.Command("go", "get", "-v", "./...")
log.Println("Installing dependencies using `go get -v ./...`.")
}
util.RunCmd(install)
}
}
// extract
if util.FileExists("Gopkg.lock") {
// if Gopkg.lock exists, don't update it and only vendor dependencies
install = exec.Command("dep", "ensure", "-v", "-vendor-only")
} else {
install = exec.Command("dep", "ensure", "-v")
}
log.Println("Installing dependencies using `dep ensure`.")
} else if depMode == Glide {
install = exec.Command("glide", "install")
log.Println("Installing dependencies using `glide install`")
} else {
// explicitly set go module support
if depMode == GoGetWithModules {
os.Setenv("GO111MODULE", "on")
} else if depMode == GoGetNoModules {
os.Setenv("GO111MODULE", "off")
}
// get dependencies
install = exec.Command("go", "get", "-v", "./...")
log.Println("Installing dependencies using `go get -v ./...`.")
}
util.RunCmd(install)
}
func extract(depMode DependencyInstallerMode, modMode ModMode) {
extractor, err := util.GetExtractorPath()
if err != nil {
log.Fatalf("Could not determine path of extractor: %v.\n", err)
@@ -614,3 +600,95 @@ func main() {
log.Fatalf("Extraction failed: %s\n", err.Error())
}
}
func main() {
if len(os.Args) > 1 {
usage()
os.Exit(2)
}
log.Printf("Autobuilder was built with %s, environment has %s\n", runtime.Version(), getEnvGoVersion())
srcdir := getSourceDir()
// we set `SEMMLE_PATH_TRANSFORMER` ourselves in some cases, so blank it out first for consistency
os.Setenv("SEMMLE_PATH_TRANSFORMER", "")
// determine how to install dependencies and whether a GOPATH needs to be set up before
// extraction
depMode := getDepMode()
goDirectiveFound := false
if _, present := os.LookupEnv("GO111MODULE"); !present {
os.Setenv("GO111MODULE", "auto")
}
if depMode == GoGetWithModules {
versionRe := regexp.MustCompile(`(?m)^go[ \t\r]+([0-9]+\.[0-9]+)$`)
goMod, err := ioutil.ReadFile("go.mod")
if err != nil {
log.Println("Failed to read go.mod to check for missing Go version")
} else {
matches := versionRe.FindSubmatch(goMod)
if matches != nil {
goDirectiveFound = true
if len(matches) > 1 {
goDirectiveVersion := "v" + string(matches[1])
if semver.Compare(goDirectiveVersion, getEnvGoSemVer()) >= 0 {
diagnostics.EmitNewerGoVersionNeeded()
}
}
}
}
}
modMode := getModMode(depMode)
modMode = fixGoVendorIssues(modMode, depMode, goDirectiveFound)
tryUpdateGoModAndGoSum(modMode, depMode)
importpath := getImportPath()
needGopath := getNeedGopath(depMode, importpath)
inLGTM := os.Getenv("LGTM_SRC") != "" || os.Getenv("LGTM_INDEX_NEED_GOPATH") != ""
if inLGTM && needGopath {
paths := moveToTemporaryGopath(srcdir, importpath)
// schedule restoring the contents of newdir to their original location after this function completes:
defer restoreRepoLayout(paths.newdir, paths.files, filepath.Base(paths.scratch), srcdir)
pt := createPathTransformerFile(paths.newdir)
defer os.Remove(pt.Name())
writePathTransformerFile(pt, paths.realSrc, paths.root, paths.newdir)
setGopath(paths.root)
}
// check whether an explicit dependency installation command was provided
inst := util.Getenv("CODEQL_EXTRACTOR_GO_BUILD_COMMAND", "LGTM_INDEX_BUILD_COMMAND")
shouldInstallDependencies := false
if inst == "" {
shouldInstallDependencies = buildWithoutCustomCommands(modMode)
} else {
buildWithCustomCommands(inst)
}
if modMode == ModVendor {
// test if running `go` with -mod=vendor works, and if it doesn't, try to fallback to -mod=mod
// or not set if the go version < 1.14. Note we check this post-build in case the build brings
// the vendor directory up to date.
if !checkVendor() {
modMode = ModMod
log.Println("The vendor directory is not consistent with the go.mod; not using vendored dependencies.")
}
}
if shouldInstallDependencies {
if modMode == ModVendor {
log.Printf("Skipping dependency installation because a Go vendor directory was found.")
} else {
installDependencies(depMode)
}
}
extract(depMode, modMode)
}

View File

@@ -183,7 +183,7 @@ func DepErrors(pkgpath string, flags ...string) bool {
// FileExists tests whether the file at `filename` exists and is not a directory.
func FileExists(filename string) bool {
info, err := os.Stat(filename)
if err != nil && !os.IsNotExist(err) {
if err != nil && !errors.Is(err, fs.ErrNotExist) {
log.Printf("Unable to stat %s: %s\n", filename, err.Error())
}
return err == nil && !info.IsDir()
@@ -192,7 +192,7 @@ func FileExists(filename string) bool {
// DirExists tests whether `filename` exists and is a directory.
func DirExists(filename string) bool {
info, err := os.Stat(filename)
if err != nil && !os.IsNotExist(err) {
if err != nil && !errors.Is(err, fs.ErrNotExist) {
log.Printf("Unable to stat %s: %s\n", filename, err.Error())
}
return err == nil && info.IsDir()

View File

@@ -73,7 +73,6 @@
| file://:0:0:0:0 | parameter 0 of Store | file://:0:0:0:0 | [summary] to write: argument -1 in Store |
| file://:0:0:0:0 | parameter 0 of StringBytePtr | file://:0:0:0:0 | [summary] to write: return (return[0]) in StringBytePtr |
| file://:0:0:0:0 | parameter 0 of StringByteSlice | file://:0:0:0:0 | [summary] to write: return (return[0]) in StringByteSlice |
| file://:0:0:0:0 | parameter 0 of StringSlicePtr | file://:0:0:0:0 | [summary] to write: return (return[0]) in StringSlicePtr |
| file://:0:0:0:0 | parameter 0 of Sub | file://:0:0:0:0 | [summary] to write: return (return[0]) in Sub |
| file://:0:0:0:0 | parameter 0 of Swap | file://:0:0:0:0 | [summary] to write: argument -1 in Swap |
| file://:0:0:0:0 | parameter 0 of Swap | file://:0:0:0:0 | [summary] to write: argument -1 in Swap |
@@ -184,7 +183,6 @@
| file://:0:0:0:0 | parameter -1 of Info | file://:0:0:0:0 | [summary] to write: return (return[0]) in Info |
| file://:0:0:0:0 | parameter -1 of Info | file://:0:0:0:0 | [summary] to write: return (return[0]) in Info |
| file://:0:0:0:0 | parameter -1 of Info | file://:0:0:0:0 | [summary] to write: return (return[0]) in Info |
| file://:0:0:0:0 | parameter -1 of Info | file://:0:0:0:0 | [summary] to write: return (return[0]) in Info |
| file://:0:0:0:0 | parameter -1 of Interface | file://:0:0:0:0 | [summary] to write: return (return[0]) in Interface |
| file://:0:0:0:0 | parameter -1 of InterfaceData | file://:0:0:0:0 | [summary] to write: return (return[0]) in InterfaceData |
| file://:0:0:0:0 | parameter -1 of Key | file://:0:0:0:0 | [summary] to write: return (return[0]) in Key |
@@ -203,7 +201,6 @@
| file://:0:0:0:0 | parameter -1 of Name | file://:0:0:0:0 | [summary] to write: return (return[0]) in Name |
| file://:0:0:0:0 | parameter -1 of Name | file://:0:0:0:0 | [summary] to write: return (return[0]) in Name |
| file://:0:0:0:0 | parameter -1 of Name | file://:0:0:0:0 | [summary] to write: return (return[0]) in Name |
| file://:0:0:0:0 | parameter -1 of Name | file://:0:0:0:0 | [summary] to write: return (return[0]) in Name |
| file://:0:0:0:0 | parameter -1 of Open | file://:0:0:0:0 | [summary] to write: return (return[0]) in Open |
| file://:0:0:0:0 | parameter -1 of Open | file://:0:0:0:0 | [summary] to write: return (return[0]) in Open |
| file://:0:0:0:0 | parameter -1 of Open | file://:0:0:0:0 | [summary] to write: return (return[0]) in Open |

View File

@@ -4,5 +4,17 @@ from DataFlow::Node nd, DataFlow::Node succ
where
TaintTracking::localTaintStep(nd, succ) and
// exclude data-flow steps
not DataFlow::localFlowStep(nd, succ)
not DataFlow::localFlowStep(nd, succ) and
// Exclude results which only appear on unix to avoid platform-specific results
not exists(string pkg, string name |
nd.(DataFlow::SummarizedParameterNode)
.getCallable()
.asSummarizedCallable()
.asFunction()
.hasQualifiedName(pkg, name)
|
pkg = "syscall" and name = "StringSlicePtr"
or
pkg = ["os.dirEntry", "os.unixDirent"] and name = ["Info", "Name"]
)
select nd, succ

View File

@@ -191,7 +191,6 @@
| file://:0:0:0:0 | parameter 0 of Store | file://:0:0:0:0 | [summary] to write: argument -1 in Store |
| file://:0:0:0:0 | parameter 0 of StringBytePtr | file://:0:0:0:0 | [summary] to write: return (return[0]) in StringBytePtr |
| file://:0:0:0:0 | parameter 0 of StringByteSlice | file://:0:0:0:0 | [summary] to write: return (return[0]) in StringByteSlice |
| file://:0:0:0:0 | parameter 0 of StringSlicePtr | file://:0:0:0:0 | [summary] to write: return (return[0]) in StringSlicePtr |
| file://:0:0:0:0 | parameter 0 of Sub | file://:0:0:0:0 | [summary] to write: return (return[0]) in Sub |
| file://:0:0:0:0 | parameter 0 of Swap | file://:0:0:0:0 | [summary] to write: argument -1 in Swap |
| file://:0:0:0:0 | parameter 0 of Swap | file://:0:0:0:0 | [summary] to write: argument -1 in Swap |
@@ -338,6 +337,7 @@
| file://:0:0:0:0 | parameter 0 of Write | file://:0:0:0:0 | [summary] to write: argument -1 in Write |
| file://:0:0:0:0 | parameter 0 of Write | file://:0:0:0:0 | [summary] to write: argument -1 in Write |
| file://:0:0:0:0 | parameter 0 of Write | file://:0:0:0:0 | [summary] to write: argument -1 in Write |
| file://:0:0:0:0 | parameter 0 of Write | file://:0:0:0:0 | [summary] to write: argument -1 in Write |
| file://:0:0:0:0 | parameter 0 of WriteAt | file://:0:0:0:0 | [summary] to write: argument -1 in WriteAt |
| file://:0:0:0:0 | parameter 0 of WriteAt | file://:0:0:0:0 | [summary] to write: argument -1 in WriteAt |
| file://:0:0:0:0 | parameter 0 of WriteAt | file://:0:0:0:0 | [summary] to write: argument -1 in WriteAt |
@@ -537,7 +537,6 @@
| file://:0:0:0:0 | parameter -1 of Info | file://:0:0:0:0 | [summary] to write: return (return[0]) in Info |
| file://:0:0:0:0 | parameter -1 of Info | file://:0:0:0:0 | [summary] to write: return (return[0]) in Info |
| file://:0:0:0:0 | parameter -1 of Info | file://:0:0:0:0 | [summary] to write: return (return[0]) in Info |
| file://:0:0:0:0 | parameter -1 of Info | file://:0:0:0:0 | [summary] to write: return (return[0]) in Info |
| file://:0:0:0:0 | parameter -1 of Init | file://:0:0:0:0 | [summary] to write: return (return[0]) in Init |
| file://:0:0:0:0 | parameter -1 of Interface | file://:0:0:0:0 | [summary] to write: return (return[0]) in Interface |
| file://:0:0:0:0 | parameter -1 of InterfaceData | file://:0:0:0:0 | [summary] to write: return (return[0]) in InterfaceData |
@@ -583,7 +582,6 @@
| file://:0:0:0:0 | parameter -1 of Name | file://:0:0:0:0 | [summary] to write: return (return[0]) in Name |
| file://:0:0:0:0 | parameter -1 of Name | file://:0:0:0:0 | [summary] to write: return (return[0]) in Name |
| file://:0:0:0:0 | parameter -1 of Name | file://:0:0:0:0 | [summary] to write: return (return[0]) in Name |
| file://:0:0:0:0 | parameter -1 of Name | file://:0:0:0:0 | [summary] to write: return (return[0]) in Name |
| file://:0:0:0:0 | parameter -1 of Next | file://:0:0:0:0 | [summary] to write: return (return[0]) in Next |
| file://:0:0:0:0 | parameter -1 of Next | file://:0:0:0:0 | [summary] to write: return (return[0]) in Next |
| file://:0:0:0:0 | parameter -1 of NextPart | file://:0:0:0:0 | [summary] to write: return (return[0]) in NextPart |
@@ -673,8 +671,6 @@
| file://:0:0:0:0 | parameter -1 of Read | file://:0:0:0:0 | [summary] to write: argument 0 in Read |
| file://:0:0:0:0 | parameter -1 of Read | file://:0:0:0:0 | [summary] to write: argument 0 in Read |
| file://:0:0:0:0 | parameter -1 of Read | file://:0:0:0:0 | [summary] to write: argument 0 in Read |
| file://:0:0:0:0 | parameter -1 of Read | file://:0:0:0:0 | [summary] to write: argument 0 in Read |
| file://:0:0:0:0 | parameter -1 of Read | file://:0:0:0:0 | [summary] to write: argument 0 in Read |
| file://:0:0:0:0 | parameter -1 of ReadAt | file://:0:0:0:0 | [summary] to write: argument 0 in ReadAt |
| file://:0:0:0:0 | parameter -1 of ReadAt | file://:0:0:0:0 | [summary] to write: argument 0 in ReadAt |
| file://:0:0:0:0 | parameter -1 of ReadAt | file://:0:0:0:0 | [summary] to write: argument 0 in ReadAt |

View File

@@ -3,5 +3,20 @@ import go
from DataFlow::Node pred, DataFlow::Node succ
where
TaintTracking::localTaintStep(pred, succ) and
not DataFlow::localFlowStep(pred, succ)
not DataFlow::localFlowStep(pred, succ) and
// Exclude results which only appear on unix to avoid platform-specific results
not exists(string pkg, string name |
pred.(DataFlow::SummarizedParameterNode)
.getCallable()
.asSummarizedCallable()
.asFunction()
.hasQualifiedName(pkg, name)
|
pkg = "syscall" and name = "StringSlicePtr"
or
pkg.matches("crypto/rand.%") and
name = "Read"
or
pkg = ["os.dirEntry", "os.unixDirent"] and name = ["Info", "Name"]
)
select pred, succ

View File

@@ -52,7 +52,7 @@ java.beans,,,1,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,
java.io,44,,45,,18,,,,,,,,,,,,,,,4,,,,,,,,,,,,,22,,,,,,,,43,2
java.lang,18,,92,,,,,,,,,,,,8,,,,,5,,4,,,1,,,,,,,,,,,,,,,,56,36
java.net,13,3,20,,,,,,,,,,,,,,,13,,,,,,,,,,,,,,,,,,,,,,3,20,
java.nio,35,,31,,21,,,,,,,,,,,,,,,12,,,,,,,,,,,,,2,,,,,,,,31,
java.nio,36,,31,,21,,,,,,,,,,,,,,,12,,,,,,,,,,,,,3,,,,,,,,31,
java.sql,13,,3,,,,,,,,4,,,,,,,,,,,,,,,,,,9,,,,,,,,,,,,2,1
java.util,44,,478,,,,,,,,,,,,34,,,,,,,,5,2,,1,2,,,,,,,,,,,,,,41,437
javafx.scene.web,1,,,,,,,,,,,,,,,,,1,,,,,,,,,,,,,,,,,,,,,,,,
1 package sink source summary sink:bean-validation sink:create-file sink:fragment-injection sink:groovy sink:header-splitting sink:information-leak sink:intent-start sink:jdbc-url sink:jexl sink:jndi-injection sink:ldap sink:logging sink:mvel sink:ognl-injection sink:open-url sink:pending-intent-sent sink:read-file sink:regex-use sink:regex-use[-1] sink:regex-use[0] sink:regex-use[] sink:regex-use[f-1] sink:regex-use[f1] sink:regex-use[f] sink:set-hostname-verifier sink:sql sink:ssti sink:url-open-stream sink:url-redirect sink:write-file sink:xpath sink:xslt sink:xss source:android-external-storage-dir source:android-widget source:contentprovider source:remote summary:taint summary:value
52 java.io 44 45 18 4 22 43 2
53 java.lang 18 92 8 5 4 1 56 36
54 java.net 13 3 20 13 3 20
55 java.nio 35 36 31 21 12 2 3 31
56 java.sql 13 3 4 9 2 1
57 java.util 44 478 34 5 2 1 2 41 437
58 javafx.scene.web 1 1

View File

@@ -18,10 +18,10 @@ Java framework & library support
`Google Guava <https://guava.dev/>`_,``com.google.common.*``,,730,47,2,6,,,,,
JBoss Logging,``org.jboss.logging``,,,324,,,,,,,
`JSON-java <https://github.com/stleary/JSON-java>`_,``org.json``,,236,,,,,,,,
Java Standard Library,``java.*``,3,673,167,39,,,9,,,13
Java Standard Library,``java.*``,3,673,168,39,,,9,,,13
Java extensions,"``javax.*``, ``jakarta.*``",63,611,34,1,,4,,1,1,2
Kotlin Standard Library,``kotlin*``,,1843,16,11,,,,,,2
`Spring <https://spring.io/>`_,``org.springframework.*``,29,480,101,,,,19,14,,29
Others,"``cn.hutool.core.codec``, ``com.esotericsoftware.kryo.io``, ``com.esotericsoftware.kryo5.io``, ``com.fasterxml.jackson.core``, ``com.fasterxml.jackson.databind``, ``com.hubspot.jinjava``, ``com.mitchellbosecke.pebble``, ``com.opensymphony.xwork2.ognl``, ``com.rabbitmq.client``, ``com.thoughtworks.xstream``, ``com.unboundid.ldap.sdk``, ``com.zaxxer.hikari``, ``flexjson``, ``freemarker.cache``, ``freemarker.template``, ``groovy.lang``, ``groovy.util``, ``hudson``, ``io.netty.bootstrap``, ``io.netty.buffer``, ``io.netty.channel``, ``io.netty.handler.codec``, ``io.netty.handler.ssl``, ``io.netty.handler.stream``, ``io.netty.resolver``, ``io.netty.util``, ``javafx.scene.web``, ``jodd.json``, ``net.sf.saxon.s9api``, ``ognl``, ``okhttp3``, ``org.apache.commons.codec``, ``org.apache.commons.compress.archivers.tar``, ``org.apache.commons.httpclient.util``, ``org.apache.commons.jelly``, ``org.apache.commons.jexl2``, ``org.apache.commons.jexl3``, ``org.apache.commons.logging``, ``org.apache.commons.ognl``, ``org.apache.directory.ldap.client.api``, ``org.apache.hadoop.hive.metastore``, ``org.apache.hc.client5.http.async.methods``, ``org.apache.hc.client5.http.classic.methods``, ``org.apache.hc.client5.http.fluent``, ``org.apache.hive.hcatalog.templeton``, ``org.apache.ibatis.jdbc``, ``org.apache.log4j``, ``org.apache.shiro.codec``, ``org.apache.shiro.jndi``, ``org.apache.tools.ant``, ``org.apache.tools.zip``, ``org.apache.velocity.app``, ``org.apache.velocity.runtime``, ``org.codehaus.cargo.container.installer``, ``org.codehaus.groovy.control``, ``org.dom4j``, ``org.eclipse.jetty.client``, ``org.geogebra.web.full.main``, ``org.hibernate``, ``org.jdbi.v3.core``, ``org.jooq``, ``org.kohsuke.stapler``, ``org.mvel2``, ``org.openjdk.jmh.runner.options``, ``org.scijava.log``, ``org.slf4j``, ``org.thymeleaf``, ``org.xml.sax``, ``org.xmlpull.v1``, ``play.mvc``, ``ratpack.core.form``, ``ratpack.core.handling``, ``ratpack.core.http``, ``ratpack.exec``, ``ratpack.form``, ``ratpack.func``, ``ratpack.handling``, ``ratpack.http``, ``ratpack.util``, ``retrofit2``",75,813,506,26,,,18,18,,175
Totals,,232,9096,1950,172,6,10,113,33,1,355
Totals,,232,9096,1951,172,6,10,113,33,1,355

View File

@@ -0,0 +1,4 @@
---
category: minorAnalysis
---
* Added sanitizers that recognize line breaks to the query `java/log-injection`.

View File

@@ -1,8 +1,9 @@
/** Provides classes and predicates related to Log Injection vulnerabilities. */
import java
import semmle.code.java.dataflow.DataFlow
private import semmle.code.java.dataflow.DataFlow
private import semmle.code.java.dataflow.ExternalFlow
private import semmle.code.java.controlflow.Guards
/** A data flow sink for unvalidated user input that is used to log messages. */
abstract class LogInjectionSink extends DataFlow::Node { }
@@ -31,6 +32,90 @@ private class DefaultLogInjectionSink extends LogInjectionSink {
private class DefaultLogInjectionSanitizer extends LogInjectionSanitizer {
DefaultLogInjectionSanitizer() {
this.getType() instanceof BoxedType or this.getType() instanceof PrimitiveType
this.getType() instanceof BoxedType or
this.getType() instanceof PrimitiveType or
this.getType() instanceof NumericType
}
}
private class LineBreaksLogInjectionSanitizer extends LogInjectionSanitizer {
LineBreaksLogInjectionSanitizer() {
logInjectionSanitizer(this.asExpr())
or
this = DataFlow::BarrierGuard<logInjectionGuard/3>::getABarrierNode()
}
}
/**
* Holds if the return value of `ma` is sanitized against log injection attacks
* by removing line breaks from it.
*/
private predicate logInjectionSanitizer(MethodAccess ma) {
exists(CompileTimeConstantExpr target, CompileTimeConstantExpr replacement |
ma.getMethod().getDeclaringType() instanceof TypeString and
target = ma.getArgument(0) and
replacement = ma.getArgument(1) and
not replacement.getStringValue().matches(["%\n%", "%\r%"])
|
ma.getMethod().hasName("replace") and
not replacement.getIntValue() = [10, 13] and
(
target.getIntValue() = [10, 13] or // 10 == '\n', 13 == '\r'
target.getStringValue() = ["\n", "\r"]
)
or
ma.getMethod().hasName("replaceAll") and
(
// Replace anything not in an allow list
target.getStringValue().matches("[^%]") and
not target.getStringValue().matches("%" + ["\n", "\r", "\\n", "\\r", "\\R"] + "%")
or
// Replace line breaks
target.getStringValue() = ["\n", "\r", "\\n", "\\r", "\\R"]
)
)
}
/**
* Holds if `g` guards `e` in branch `branch` against log injection attacks
* by checking if there are line breaks in `e`.
*/
private predicate logInjectionGuard(Guard g, Expr e, boolean branch) {
exists(MethodAccess ma, CompileTimeConstantExpr target |
ma = g and
target = ma.getArgument(0)
|
ma.getMethod().getDeclaringType() instanceof TypeString and
ma.getMethod().hasName("contains") and
target.getStringValue() = ["\n", "\r"] and
e = ma.getQualifier() and
branch = false
or
ma.getMethod().hasName("matches") and
(
ma.getMethod().getDeclaringType() instanceof TypeString and
e = ma.getQualifier()
or
ma.getMethod().getDeclaringType().hasQualifiedName("java.util.regex", "Pattern") and
e = ma.getArgument(1)
) and
(
// Allow anything except line breaks
(
not target.getStringValue().matches("%[^%]%") and
not target.getStringValue().matches("%" + ["\n", "\r", "\\n", "\\r", "\\R"] + "%")
or
target.getStringValue().matches("%[^%" + ["\n", "\r", "\\n", "\\r", "\\R"] + "%]%")
) and
branch = true
or
// Disallow line breaks
(
not target.getStringValue().matches("%[^%" + ["\n", "\r", "\\n", "\\r", "\\R"] + "%]%") and
// Assuming a regex containing line breaks is correctly matching line breaks in a string
target.getStringValue().matches("%" + ["\n", "\r", "\\n", "\\r", "\\R"] + "%")
) and
branch = false
)
)
}

View File

@@ -1,5 +1,6 @@
import java.util.ResourceBundle;
import java.util.logging.LogRecord;
import java.util.regex.Pattern;
import com.google.common.flogger.LoggingApi;
import org.apache.commons.logging.Log;
import org.apache.log4j.Category;
@@ -19,6 +20,172 @@ public class LogInjectionTest {
return null;
}
public void testSanitizers() {
String source = (String) source();
Logger logger = null;
logger.debug(source.replace("\n", "")); // Safe
logger.debug(source.replace("\n", "\n")); // $ hasTaintFlow
logger.debug(source.replace("\n", "\r")); // $ hasTaintFlow
logger.debug(source.replace("\r", "")); // Safe
logger.debug(source.replace("\r", "\n")); // $ hasTaintFlow
logger.debug(source.replace("\r", "\r")); // $ hasTaintFlow
logger.debug(source.replace("something_else", "")); // $ hasTaintFlow
logger.debug(source.replace('\n', '_')); // Safe
logger.debug(source.replace('\n', '\n')); // $ hasTaintFlow
logger.debug(source.replace('\n', '\r')); // $ hasTaintFlow
logger.debug(source.replace('\r', '_')); // Safe
logger.debug(source.replace('\r', '\n')); // $ hasTaintFlow
logger.debug(source.replace('\r', '\r')); // $ hasTaintFlow
logger.debug(source.replace('-', '_')); // $ hasTaintFlow
logger.debug(source.replaceAll("\n", "")); // Safe
logger.debug(source.replaceAll("\n", "\n")); // $ hasTaintFlow
logger.debug(source.replaceAll("\n", "\r")); // $ hasTaintFlow
logger.debug(source.replaceAll("\r", "")); // Safe
logger.debug(source.replaceAll("\r", "\n")); // $ hasTaintFlow
logger.debug(source.replaceAll("\r", "\r")); // $ hasTaintFlow
logger.debug(source.replaceAll("\\n", "")); // Safe
logger.debug(source.replaceAll("\\n", "\n")); // $ hasTaintFlow
logger.debug(source.replaceAll("\\n", "\r")); // $ hasTaintFlow
logger.debug(source.replaceAll("\\r", "")); // Safe
logger.debug(source.replaceAll("\\r", "\n")); // $ hasTaintFlow
logger.debug(source.replaceAll("\\r", "\r")); // $ hasTaintFlow
logger.debug(source.replaceAll("\\R", "")); // Safe
logger.debug(source.replaceAll("\\R", "\n")); // $ hasTaintFlow
logger.debug(source.replaceAll("\\R", "\r")); // $ hasTaintFlow
logger.debug(source.replaceAll("[^a-zA-Z]", "")); // Safe
logger.debug(source.replaceAll("[^a-zA-Z]", "\n")); // $ hasTaintFlow
logger.debug(source.replaceAll("[^a-zA-Z]", "\r")); // $ hasTaintFlow
logger.debug(source.replaceAll("[^a-zA-Z\n]", "")); // $ hasTaintFlow
logger.debug(source.replaceAll("[^a-zA-Z\r]", "")); // $ hasTaintFlow
logger.debug(source.replaceAll("[^a-zA-Z\\R]", "")); // $ hasTaintFlow
}
public void testGuards() {
String source = (String) source();
Logger logger = null;
if (source.matches(".*\n.*")) {
logger.debug(source); // $ hasTaintFlow
} else {
logger.debug(source); // Safe
}
if (Pattern.matches(".*\n.*", source)) {
logger.debug(source); // $ hasTaintFlow
} else {
logger.debug(source); // Safe
}
if (source.matches(".*\\n.*")) {
logger.debug(source); // $ hasTaintFlow
} else {
logger.debug(source); // Safe
}
if (Pattern.matches(".*\\n.*", source)) {
logger.debug(source); // $ hasTaintFlow
} else {
logger.debug(source); // Safe
}
if (source.matches(".*\r.*")) {
logger.debug(source); // $ hasTaintFlow
} else {
logger.debug(source); // Safe
}
if (Pattern.matches(".*\r.*", source)) {
logger.debug(source); // $ hasTaintFlow
} else {
logger.debug(source); // Safe
}
if (source.matches(".*\\r.*")) {
logger.debug(source); // $ hasTaintFlow
} else {
logger.debug(source); // Safe
}
if (Pattern.matches(".*\\r.*", source)) {
logger.debug(source); // $ hasTaintFlow
} else {
logger.debug(source); // Safe
}
if (source.matches(".*\\R.*")) {
logger.debug(source); // $ hasTaintFlow
} else {
logger.debug(source); // Safe
}
if (Pattern.matches(".*\\R.*", source)) {
logger.debug(source); // $ hasTaintFlow
} else {
logger.debug(source); // Safe
}
if (source.matches(".*")) {
logger.debug(source); // Safe (assuming not DOTALL)
} else {
logger.debug(source); // $ hasTaintFlow
}
if (Pattern.matches(".*", source)) {
logger.debug(source); // Safe (assuming not DOTALL)
} else {
logger.debug(source); // $ hasTaintFlow
}
if (source.matches("[^\n\r]*")) {
logger.debug(source); // Safe
} else {
logger.debug(source); // $ hasTaintFlow
}
if (Pattern.matches("[^\n\r]*", source)) {
logger.debug(source); // Safe
} else {
logger.debug(source); // $ hasTaintFlow
}
if (source.matches("[^\\R]*")) {
logger.debug(source); // Safe
} else {
logger.debug(source); // $ hasTaintFlow
}
if (Pattern.matches("[^\\R]*", source)) {
logger.debug(source); // Safe
} else {
logger.debug(source); // $ hasTaintFlow
}
if (source.matches("[^a-zA-Z]*")) {
logger.debug(source); // $ hasTaintFlow
} else {
logger.debug(source); // $ hasTaintFlow
}
if (Pattern.matches("[^a-zA-Z]*", source)) {
logger.debug(source); // $ hasTaintFlow
} else {
logger.debug(source); // $ hasTaintFlow
}
if (source.matches("[\n]*")) {
logger.debug(source); // $ hasTaintFlow
} else {
logger.debug(source); // $ MISSING: $ hasTaintFlow
}
if (Pattern.matches("[\n]*", source)) {
logger.debug(source); // $ hasTaintFlow
} else {
logger.debug(source); // $ MISSING: $ hasTaintFlow
}
}
public void test() {
{
Category category = null;

BIN
ql/Cargo.lock generated

Binary file not shown.

View File

@@ -1,5 +1,5 @@
[package]
name = "ql-extractor"
name = "codeql-extractor-ql"
version = "0.1.0"
authors = ["GitHub"]
edition = "2018"
@@ -13,7 +13,7 @@ tree-sitter-ql-dbscheme = { git = "https://github.com/erik-krogh/tree-sitter-ql-
tree-sitter-ql-yaml = {git = "https://github.com/erik-krogh/tree-sitter-ql.git", rev = "cf704bf3671e1ae148e173464fb65a4d2bbf5f99"}
tree-sitter-blame = {path = "../buramu/tree-sitter-blame"}
tree-sitter-json = {git = "https://github.com/tausbn/tree-sitter-json.git", rev = "745663ee997f1576fe1e7187e6347e0db36ec7a9"}
clap = "4.2"
clap = { version = "4.2", features = ["derive"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
rayon = "1.7.0"

View File

@@ -1,8 +1,13 @@
use clap::Args;
use std::env;
use std::path::PathBuf;
use std::process::Command;
fn main() -> std::io::Result<()> {
#[derive(Args)]
// The autobuilder takes no command-line options, but this may change in the future.
pub struct Options {}
pub fn run(_: Options) -> std::io::Result<()> {
let dist = env::var("CODEQL_DIST").expect("CODEQL_DIST not set");
let db = env::var("CODEQL_EXTRACTOR_QL_WIP_DATABASE")
.expect("CODEQL_EXTRACTOR_QL_WIP_DATABASE not set");

View File

@@ -1,3 +1,4 @@
use clap::Args;
use rayon::prelude::*;
use std::fs;
use std::io::BufRead;
@@ -5,7 +6,22 @@ use std::path::{Path, PathBuf};
use codeql_extractor::{diagnostics, extractor, node_types, trap};
fn main() -> std::io::Result<()> {
#[derive(Args)]
pub struct Options {
/// Sets a custom source achive folder
#[arg(long)]
source_archive_dir: PathBuf,
/// Sets a custom trap folder
#[arg(long)]
output_dir: PathBuf,
/// A text file containing the paths of the files to extract
#[arg(long)]
file_list: PathBuf,
}
pub fn run(options: Options) -> std::io::Result<()> {
tracing_subscriber::fmt()
.with_target(false)
.without_time()
@@ -58,30 +74,9 @@ fn main() -> std::io::Result<()> {
.build_global()
.unwrap();
let matches = clap::Command::new("QL extractor")
.version("1.0")
.author("GitHub")
.about("CodeQL QL extractor")
.args(&[
clap::arg!(--"source-archive-dir" <DIR> "Sets a custom source archive folder"),
clap::arg!(--"output-dir" <DIR> "Sets a custom trap folder"),
clap::arg!(--"file-list" <FILE_LIST> "A text file containing the paths of the files to extract"),
])
.get_matches();
let src_archive_dir = matches
.get_one::<String>("source-archive-dir")
.expect("missing --source-archive-dir");
let src_archive_dir = PathBuf::from(src_archive_dir);
let trap_dir = matches
.get_one::<String>("output-dir")
.expect("missing --output-dir");
let trap_dir = PathBuf::from(trap_dir);
let file_list = matches
.get_one::<String>("file-list")
.expect("missing --file-list");
let file_list = fs::File::open(file_list)?;
let trap_dir = options.output_dir;
let file_list = fs::File::open(options.file_list)?;
let source_archive_dir = options.source_archive_dir;
let language = tree_sitter_ql::language();
let dbscheme = tree_sitter_ql_dbscheme::language();
@@ -114,7 +109,7 @@ fn main() -> std::io::Result<()> {
return Ok(());
}
let path = PathBuf::from(line).canonicalize()?;
let src_archive_file = path_for(&src_archive_dir, &path, "");
let src_archive_file = path_for(&source_archive_dir, &path, "");
let source = std::fs::read(&path)?;
let code_ranges = vec![];
let mut trap_writer = trap::Writer::new();

View File

@@ -1,8 +1,20 @@
use clap::Args;
use std::path::PathBuf;
use codeql_extractor::generator::{generate, language::Language};
fn main() -> std::io::Result<()> {
#[derive(Args)]
pub struct Options {
/// Path of the generated dbscheme file
#[arg(long)]
dbscheme: PathBuf,
/// Path of the generated QLL file
#[arg(long)]
library: PathBuf,
}
pub fn run(options: Options) -> std::io::Result<()> {
tracing_subscriber::fmt()
.with_target(false)
.without_time()
@@ -10,25 +22,6 @@ fn main() -> std::io::Result<()> {
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.init();
let matches = clap::Command::new("QL dbscheme generator")
.version("1.0")
.author("GitHub")
.about("CodeQL QL dbscheme generator")
.args(&[
clap::arg!(--dbscheme <FILE> "Path of the generated dbscheme file"),
clap::arg!(--library <FILE> "Path of the generated QLL file"),
])
.get_matches();
let dbscheme_path = matches
.get_one::<String>("dbscheme")
.expect("missing --dbscheme");
let dbscheme_path = PathBuf::from(dbscheme_path);
let ql_library_path = matches
.get_one::<String>("library")
.expect("missing --library");
let ql_library_path = PathBuf::from(ql_library_path);
let languages = vec![
Language {
name: "QL".to_owned(),
@@ -52,5 +45,5 @@ fn main() -> std::io::Result<()> {
},
];
generate(languages, dbscheme_path, ql_library_path)
generate(languages, options.dbscheme, options.library)
}

23
ql/extractor/src/main.rs Normal file
View File

@@ -0,0 +1,23 @@
use clap::Parser;
mod autobuilder;
mod extractor;
mod generator;
#[derive(Parser)]
#[command(author, version, about)]
enum Cli {
Extract(extractor::Options),
Generate(generator::Options),
Autobuild(autobuilder::Options),
}
fn main() -> std::io::Result<()> {
let cli = Cli::parse();
match cli {
Cli::Extract(options) => extractor::run(options),
Cli::Generate(options) => generator::run(options),
Cli::Autobuild(options) => autobuilder::run(options),
}
}

View File

@@ -1,6 +1,6 @@
cargo build --release
cargo run --release --bin generator -- --dbscheme ql/src/ql.dbscheme --library ql/src/codeql_ql/ast/internal/TreeSitter.qll
cargo run --release --bin codeql-extractor-ql -- generate --dbscheme ql/src/ql.dbscheme --library ql/src/codeql_ql/ast/internal/TreeSitter.qll
codeql query format -i ql\src\codeql_ql\ast\internal\TreeSitter.qll
if (Test-Path -Path extractor-pack) {
@@ -10,5 +10,4 @@ mkdir extractor-pack | Out-Null
cp codeql-extractor.yml, ql\src\ql.dbscheme, ql\src\ql.dbscheme.stats extractor-pack
cp -Recurse tools extractor-pack
mkdir extractor-pack\tools\win64 | Out-Null
cp target\release\extractor.exe extractor-pack\tools\win64\extractor.exe
cp target\release\autobuilder.exe extractor-pack\tools\win64\autobuilder.exe
cp target\release\codeql-extractor-ql.exe extractor-pack\tools\win64\extractor.exe

View File

@@ -20,12 +20,11 @@ else
fi
cargo build --release
cargo run --release --bin generator -- --dbscheme ql/src/ql.dbscheme --library ql/src/codeql_ql/ast/internal/TreeSitter.qll
cargo run --release --bin codeql-extractor-ql -- generate --dbscheme ql/src/ql.dbscheme --library ql/src/codeql_ql/ast/internal/TreeSitter.qll
$CODEQL_BINARY query format -i ql/src/codeql_ql/ast/internal/TreeSitter.qll
rm -rf extractor-pack
mkdir -p extractor-pack
cp -r codeql-extractor.yml tools ql/src/ql.dbscheme ql/src/ql.dbscheme.stats extractor-pack/
mkdir -p extractor-pack/tools/${platform}
cp target/release/extractor extractor-pack/tools/${platform}/extractor
cp target/release/autobuilder extractor-pack/tools/${platform}/autobuilder
cp target/release/codeql-extractor-ql extractor-pack/tools/${platform}/extractor

View File

@@ -1,5 +1,5 @@
@echo off
type NUL && "%CODEQL_EXTRACTOR_QL_ROOT%\tools\%CODEQL_PLATFORM%\autobuilder"
type NUL && "%CODEQL_EXTRACTOR_QL_ROOT%\tools\%CODEQL_PLATFORM%\extractor" autobuild
exit /b %ERRORLEVEL%

View File

@@ -1,4 +1,4 @@
#!/bin/sh
exec "${CODEQL_EXTRACTOR_QL_ROOT}/tools/${CODEQL_PLATFORM}/autobuilder"
exec "${CODEQL_EXTRACTOR_QL_ROOT}/tools/${CODEQL_PLATFORM}/extractor" autobuild

View File

@@ -1,6 +1,7 @@
@echo off
type NUL && "%CODEQL_EXTRACTOR_QL_ROOT%\tools\win64\extractor.exe" ^
extract ^
--file-list "%1" ^
--source-archive-dir "%CODEQL_EXTRACTOR_QL_SOURCE_ARCHIVE_DIR%" ^
--output-dir "%CODEQL_EXTRACTOR_QL_TRAP_DIR%"

View File

@@ -3,6 +3,7 @@
set -eu
exec "${CODEQL_EXTRACTOR_QL_ROOT}/tools/${CODEQL_PLATFORM}/extractor" \
extract \
--file-list "$1" \
--source-archive-dir "$CODEQL_EXTRACTOR_QL_SOURCE_ARCHIVE_DIR" \
--output-dir "$CODEQL_EXTRACTOR_QL_TRAP_DIR"

View File

@@ -24,7 +24,7 @@ FILES=codeql-extractor.yml\
ql/lib/ruby.dbscheme.stats\
ql/lib/ruby.dbscheme
BIN_FILES=target/release/ruby-extractor$(EXE) target/release/ruby-autobuilder$(EXE)
BIN_FILES=target/release/codeql-extractor-ruby$(EXE)
extractor-common:
rm -rf build
@@ -37,15 +37,14 @@ extractor-common:
tools: $(BIN_FILES)
rm -rf tools/bin
mkdir tools/bin
cp -r target/release/ruby-autobuilder$(EXE) tools/bin/autobuilder$(EXE)
cp -r target/release/ruby-extractor$(EXE) tools/bin/extractor$(EXE)
cp -r target/release/codeql-extractor-ruby$(EXE) tools/bin/extractor$(EXE)
target/release/%$(EXE):
cd extractor && cargo build --release
dbscheme:
cd extractor && cargo build --release
extractor/target/release/generator --dbscheme ql/lib/ruby.dbscheme --library ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
extractor/target/release/codeql-extractor-ruby generate --dbscheme ql/lib/ruby.dbscheme --library ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
codeql query format -i ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
.PHONY: extractor
@@ -63,8 +62,7 @@ extractor: $(FILES) $(BIN_FILES)
cp tools/autobuild.cmd extractor-pack/tools/autobuild.cmd
cp ql/lib/ruby.dbscheme.stats extractor-pack/ruby.dbscheme.stats
cp ql/lib/ruby.dbscheme extractor-pack/ruby.dbscheme
cp extractor/target/release/extractor$(EXE) extractor-pack/tools/$(CODEQL_PLATFORM)/extractor$(EXE)
cp extractor/target/release/autobuilder$(EXE) extractor-pack/tools/$(CODEQL_PLATFORM)/autobuilder$(EXE)
cp extractor/target/release/codeql-extractor-ruby$(EXE) extractor-pack/tools/$(CODEQL_PLATFORM)/extractor$(EXE)
test: extractor dbscheme
codeql test run --check-databases --check-unused-labels --check-repeated-labels --check-redefined-labels --check-use-before-definition --search-path extractor-pack --consistency-queries ql/consistency-queries ql/test

Binary file not shown.

View File

@@ -1,5 +1,6 @@
[package]
name = "ruby-extractor"
name = "codeql-extractor-ruby"
description = "CodeQL Ruby extractor"
version = "0.1.0"
authors = ["GitHub"]
edition = "2018"
@@ -10,7 +11,7 @@ edition = "2018"
tree-sitter = "0.20"
tree-sitter-embedded-template = { git = "https://github.com/tree-sitter/tree-sitter-embedded-template.git", rev = "203f7bd3c1bbfbd98fc19add4b8fcb213c059205" }
tree-sitter-ruby = { git = "https://github.com/tree-sitter/tree-sitter-ruby.git", rev = "206c7077164372c596ffa8eaadb9435c28941364" }
clap = "4.2"
clap = { version = "4.2", features = ["derive"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3.3", features = ["env-filter"] }
rayon = "1.5.0"

View File

@@ -1,8 +1,13 @@
use clap::Args;
use std::env;
use std::path::PathBuf;
use std::process::Command;
fn main() -> std::io::Result<()> {
#[derive(Args)]
// The autobuilder takes no command-line options, but this may change in the future.
pub struct Options {}
pub fn run(_: Options) -> std::io::Result<()> {
let dist = env::var("CODEQL_DIST").expect("CODEQL_DIST not set");
let db = env::var("CODEQL_EXTRACTOR_RUBY_WIP_DATABASE")
.expect("CODEQL_EXTRACTOR_RUBY_WIP_DATABASE not set");

View File

@@ -1,43 +0,0 @@
use clap::arg;
use std::path::PathBuf;
use codeql_extractor::generator::{generate, language::Language};
fn main() -> std::io::Result<()> {
tracing_subscriber::fmt()
.with_target(false)
.without_time()
.with_level(true)
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.init();
let matches = clap::Command::new("Ruby dbscheme generator")
.version("1.0")
.author("GitHub")
.about("CodeQL Ruby dbscheme generator")
.arg(arg!(--dbscheme <FILE> "Path of the generated dbscheme file"))
.arg(arg!(--library <FILE> "Path of the generated QLL file"))
.get_matches();
let dbscheme_path = matches
.get_one::<String>("dbscheme")
.expect("missing --dbscheme");
let dbscheme_path = PathBuf::from(dbscheme_path);
let ql_library_path = matches
.get_one::<String>("library")
.expect("missing --library");
let ql_library_path = PathBuf::from(ql_library_path);
let languages = vec![
Language {
name: "Ruby".to_owned(),
node_types: tree_sitter_ruby::NODE_TYPES,
},
Language {
name: "Erb".to_owned(),
node_types: tree_sitter_embedded_template::NODE_TYPES,
},
];
generate(languages, dbscheme_path, ql_library_path)
}

View File

@@ -1,7 +1,5 @@
#[macro_use]
extern crate lazy_static;
use clap::arg;
use clap::Args;
use lazy_static::lazy_static;
use rayon::prelude::*;
use std::borrow::Cow;
use std::fs;
@@ -11,23 +9,22 @@ use tree_sitter::{Language, Parser, Range};
use codeql_extractor::{diagnostics, extractor, file_paths, node_types, trap};
lazy_static! {
static ref CP_NUMBER: regex::Regex = regex::Regex::new("cp([0-9]+)").unwrap();
#[derive(Args)]
pub struct Options {
/// Sets a custom source achive folder
#[arg(long)]
source_archive_dir: String,
/// Sets a custom trap folder
#[arg(long)]
output_dir: String,
/// A text file containing the paths of the files to extract
#[arg(long)]
file_list: String,
}
/// Returns the `encoding::Encoding` corresponding to the given encoding name, if one exists.
fn encoding_from_name(encoding_name: &str) -> Option<&(dyn encoding::Encoding + Send + Sync)> {
match encoding::label::encoding_from_whatwg_label(encoding_name) {
s @ Some(_) => s,
None => CP_NUMBER.captures(encoding_name).and_then(|cap| {
encoding::label::encoding_from_windows_code_page(
str::parse(cap.get(1).unwrap().as_str()).unwrap(),
)
}),
}
}
fn main() -> std::io::Result<()> {
pub fn run(options: Options) -> std::io::Result<()> {
tracing_subscriber::fmt()
.with_target(false)
.without_time()
@@ -82,29 +79,11 @@ fn main() -> std::io::Result<()> {
.build_global()
.unwrap();
let matches = clap::Command::new("Ruby extractor")
.version("1.0")
.author("GitHub")
.about("CodeQL Ruby extractor")
.arg(arg!(--"source-archive-dir" <DIR> "Sets a custom source archive folder"))
.arg(arg!(--"output-dir" <DIR> "Sets a custom trap folder"))
.arg(arg!(--"file-list" <FILE_LIST> "A text file containing the paths of the files to extract"))
.get_matches();
let src_archive_dir = file_paths::path_from_string(&options.source_archive_dir);
let src_archive_dir = matches
.get_one::<String>("source-archive-dir")
.expect("missing --source-archive-dir");
let src_archive_dir = file_paths::path_from_string(src_archive_dir);
let trap_dir = file_paths::path_from_string(&options.output_dir);
let trap_dir = matches
.get_one::<String>("output-dir")
.expect("missing --output-dir");
let trap_dir = file_paths::path_from_string(&trap_dir);
let file_list = matches
.get_one::<String>("file-list")
.expect("missing --file-list");
let file_list = fs::File::open(file_paths::path_from_string(&file_list))?;
let file_list = fs::File::open(file_paths::path_from_string(&options.file_list))?;
let language = tree_sitter_ruby::language();
let erb = tree_sitter_embedded_template::language();
@@ -242,6 +221,22 @@ fn main() -> std::io::Result<()> {
write_trap(&trap_dir, path, &trap_writer, trap_compression)
}
lazy_static! {
static ref CP_NUMBER: regex::Regex = regex::Regex::new("cp([0-9]+)").unwrap();
}
/// Returns the `encoding::Encoding` corresponding to the given encoding name, if one exists.
fn encoding_from_name(encoding_name: &str) -> Option<&(dyn encoding::Encoding + Send + Sync)> {
match encoding::label::encoding_from_whatwg_label(encoding_name) {
s @ Some(_) => s,
None => CP_NUMBER.captures(encoding_name).and_then(|cap| {
encoding::label::encoding_from_windows_code_page(
str::parse(cap.get(1).unwrap().as_str()).unwrap(),
)
}),
}
}
fn write_trap(
trap_dir: &Path,
path: PathBuf,

View File

@@ -0,0 +1,37 @@
use clap::Args;
use std::path::PathBuf;
use codeql_extractor::generator::{generate, language::Language};
#[derive(Args)]
pub struct Options {
/// Path of the generated dbscheme file
#[arg(long)]
dbscheme: PathBuf,
/// Path of the generated QLL file
#[arg(long)]
library: PathBuf,
}
pub fn run(options: Options) -> std::io::Result<()> {
tracing_subscriber::fmt()
.with_target(false)
.without_time()
.with_level(true)
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.init();
let languages = vec![
Language {
name: "Ruby".to_owned(),
node_types: tree_sitter_ruby::NODE_TYPES,
},
Language {
name: "Erb".to_owned(),
node_types: tree_sitter_embedded_template::NODE_TYPES,
},
];
generate(languages, options.dbscheme, options.library)
}

View File

@@ -0,0 +1,23 @@
use clap::Parser;
mod autobuilder;
mod extractor;
mod generator;
#[derive(Parser)]
#[command(author, version, about)]
enum Cli {
Extract(extractor::Options),
Generate(generator::Options),
Autobuild(autobuilder::Options),
}
fn main() -> std::io::Result<()> {
let cli = Cli::parse();
match cli {
Cli::Extract(options) => extractor::run(options),
Cli::Generate(options) => generator::run(options),
Cli::Autobuild(options) => autobuilder::run(options),
}
}

View File

@@ -18,23 +18,41 @@
import codeql.ruby.security.KernelOpenQuery
import codeql.ruby.AST
import codeql.ruby.ApiGraphs
import codeql.ruby.DataFlow
from AmbiguousPathCall call
where
not hasConstantPrefix(call.getPathArgument().getALocalSource().asExpr().getExpr()) and
call.getNumberOfArguments() > 0 and
not hasConstantPrefix(call.getPathArgument()) and
not call.getPathArgument().getALocalSource() =
API::getTopLevelMember("File").getAMethodCall("join")
select call,
"Call to " + call.getName() + " with a non-constant value. Consider replacing it with " +
call.getReplacement() + "."
predicate hasConstantPrefix(Expr e) {
predicate hasConstantPrefix(DataFlow::Node node) {
hasConstantPrefix(node.getALocalSource())
or
// if it's a format string, then the first argument is not a constant string
e.(StringlikeLiteral).getComponent(0) instanceof StringTextComponent
node.asExpr().getExpr().(StringlikeLiteral).getComponent(0) instanceof StringTextComponent
or
// it is not a constant string argument
exists(e.getConstantValue())
exists(node.getConstantValue())
or
// not a concatenation that starts with a constant string
hasConstantPrefix(e.(AddExpr).getLeftOperand())
exists(DataFlow::ExprNode prefix |
node.asExpr().getExpr().(AddExpr).getLeftOperand() = prefix.asExpr().getExpr() and
hasConstantPrefix(prefix)
)
or
// is a .freeze call on a constant string
exists(DataFlow::CallNode call | node = call and call.getMethodName() = "freeze" |
hasConstantPrefix(call.getReceiver())
)
or
// is a constant read of a constant string
exists(DataFlow::Node constant |
constant.asExpr().getExpr() = node.asExpr().getExpr().(ConstantReadAccess).getValue() and
hasConstantPrefix(constant)
)
}

View File

@@ -1,11 +1,11 @@
| NonConstantKernelOpen.rb:4:5:4:14 | call to open | Call to Kernel.open with a non-constant value. Consider replacing it with File.open. |
| NonConstantKernelOpen.rb:5:5:5:17 | call to read | Call to IO.read with a non-constant value. Consider replacing it with File.read. |
| NonConstantKernelOpen.rb:6:5:6:18 | call to write | Call to IO.write with a non-constant value. Consider replacing it with File.write. |
| NonConstantKernelOpen.rb:7:5:7:20 | call to binread | Call to IO.binread with a non-constant value. Consider replacing it with File.binread. |
| NonConstantKernelOpen.rb:8:5:8:21 | call to binwrite | Call to IO.binwrite with a non-constant value. Consider replacing it with File.binwrite. |
| NonConstantKernelOpen.rb:9:5:9:20 | call to foreach | Call to IO.foreach with a non-constant value. Consider replacing it with File.foreach. |
| NonConstantKernelOpen.rb:10:5:10:22 | call to readlines | Call to IO.readlines with a non-constant value. Consider replacing it with File.readlines. |
| NonConstantKernelOpen.rb:11:5:11:18 | call to open | Call to URI.open with a non-constant value. Consider replacing it with URI(<uri>).open. |
| NonConstantKernelOpen.rb:15:5:15:21 | call to open | Call to Kernel.open with a non-constant value. Consider replacing it with File.open. |
| NonConstantKernelOpen.rb:25:5:25:33 | call to open | Call to Kernel.open with a non-constant value. Consider replacing it with File.open. |
| NonConstantKernelOpen.rb:33:5:33:14 | call to open | Call to Kernel.open with a non-constant value. Consider replacing it with File.open. |
| NonConstantKernelOpen.rb:7:5:7:14 | call to open | Call to Kernel.open with a non-constant value. Consider replacing it with File.open. |
| NonConstantKernelOpen.rb:8:5:8:17 | call to read | Call to IO.read with a non-constant value. Consider replacing it with File.read. |
| NonConstantKernelOpen.rb:9:5:9:18 | call to write | Call to IO.write with a non-constant value. Consider replacing it with File.write. |
| NonConstantKernelOpen.rb:10:5:10:20 | call to binread | Call to IO.binread with a non-constant value. Consider replacing it with File.binread. |
| NonConstantKernelOpen.rb:11:5:11:21 | call to binwrite | Call to IO.binwrite with a non-constant value. Consider replacing it with File.binwrite. |
| NonConstantKernelOpen.rb:12:5:12:20 | call to foreach | Call to IO.foreach with a non-constant value. Consider replacing it with File.foreach. |
| NonConstantKernelOpen.rb:13:5:13:22 | call to readlines | Call to IO.readlines with a non-constant value. Consider replacing it with File.readlines. |
| NonConstantKernelOpen.rb:14:5:14:18 | call to open | Call to URI.open with a non-constant value. Consider replacing it with URI(<uri>).open. |
| NonConstantKernelOpen.rb:18:5:18:21 | call to open | Call to Kernel.open with a non-constant value. Consider replacing it with File.open. |
| NonConstantKernelOpen.rb:28:5:28:33 | call to open | Call to Kernel.open with a non-constant value. Consider replacing it with File.open. |
| NonConstantKernelOpen.rb:46:5:46:14 | call to open | Call to Kernel.open with a non-constant value. Consider replacing it with File.open. |

View File

@@ -1,4 +1,7 @@
class UsersController < ActionController::Base
CONSTANT = "constant"
CONSTANT_WITH_FREEZE = "constant-with-freeze".freeze
def create
file = params[:file]
open(file) # BAD
@@ -30,6 +33,16 @@ class UsersController < ActionController::Base
IO.write(File.join("foo", "bar.txt"), "bar") # GOOD
IO.read(CONSTANT) # GOOD
IO.read(CONSTANT + file) # GOOD
IO.read(CONSTANT_WITH_FREEZE) # GOOD
IO.read(CONSTANT_WITH_FREEZE + file) # GOOD
open.where(external: false) # GOOD - an open method is called withoout arguments
open(file) # BAD - sanity check to verify that file was not mistakenly marked as sanitized
end
end

View File

@@ -2,7 +2,7 @@ cd extractor
cargo build --release
cd ..
extractor\target\release\generator --dbscheme ql/lib/ruby.dbscheme --library ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
extractor\target\release\codeql-extractor-ruby -- generate --dbscheme ql/lib/ruby.dbscheme --library ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
codeql query format -i ql\lib\codeql/ruby\ast\internal\TreeSitter.qll
@@ -12,5 +12,4 @@ cp codeql-extractor.yml, ql\lib\ruby.dbscheme, ql\lib\ruby.dbscheme.stats extrac
cp -Recurse tools extractor-pack
cp -Recurse downgrades extractor-pack
mkdir extractor-pack\tools\win64 | Out-Null
cp extractor\target\release\extractor.exe extractor-pack\tools\win64\extractor.exe
cp extractor\target\release\autobuilder.exe extractor-pack\tools\win64\autobuilder.exe
cp extractor\target\release\codeql-extractor-ruby.exe extractor-pack\tools\win64\extractor.exe

View File

@@ -22,7 +22,7 @@ if [[ "$CARGO" == "cross" ]]; then
BIN_DIR=extractor/target/x86_64-unknown-linux-gnu/release
fi
"$BIN_DIR/generator" --dbscheme ql/lib/ruby.dbscheme --library ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
"$BIN_DIR/codeql-extractor-ruby" generate --dbscheme ql/lib/ruby.dbscheme --library ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
codeql query format -i ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
@@ -30,5 +30,4 @@ rm -rf extractor-pack
mkdir -p extractor-pack
cp -r codeql-extractor.yml downgrades tools ql/lib/ruby.dbscheme ql/lib/ruby.dbscheme.stats extractor-pack/
mkdir -p extractor-pack/tools/${platform}
cp "$BIN_DIR/extractor" extractor-pack/tools/${platform}/extractor
cp "$BIN_DIR/autobuilder" extractor-pack/tools/${platform}/autobuilder
cp "$BIN_DIR/codeql-extractor-ruby" extractor-pack/tools/${platform}/extractor

View File

@@ -1,5 +1,5 @@
@echo off
type NUL && "%CODEQL_EXTRACTOR_RUBY_ROOT%\tools\%CODEQL_PLATFORM%\autobuilder"
type NUL && "%CODEQL_EXTRACTOR_RUBY_ROOT%\tools\%CODEQL_PLATFORM%\extractor" autobuild
exit /b %ERRORLEVEL%

View File

@@ -1,3 +1,3 @@
#!/bin/sh
exec "${CODEQL_EXTRACTOR_RUBY_ROOT}/tools/${CODEQL_PLATFORM}/autobuilder"
exec "${CODEQL_EXTRACTOR_RUBY_ROOT}/tools/${CODEQL_PLATFORM}/extractor" autobuild

View File

@@ -1,6 +1,7 @@
@echo off
type NUL && "%CODEQL_EXTRACTOR_RUBY_ROOT%\tools\win64\extractor.exe" ^
extract ^
--file-list "%1" ^
--source-archive-dir "%CODEQL_EXTRACTOR_RUBY_SOURCE_ARCHIVE_DIR%" ^
--output-dir "%CODEQL_EXTRACTOR_RUBY_TRAP_DIR%"

View File

@@ -3,6 +3,7 @@
set -eu
exec "${CODEQL_EXTRACTOR_RUBY_ROOT}/tools/${CODEQL_PLATFORM}/extractor" \
extract \
--file-list "$1" \
--source-archive-dir "$CODEQL_EXTRACTOR_RUBY_SOURCE_ARCHIVE_DIR" \
--output-dir "$CODEQL_EXTRACTOR_RUBY_TRAP_DIR"

View File

@@ -179,3 +179,25 @@ class ApplyExprCfgNode extends ExprCfgNode {
class CallExprCfgNode extends ApplyExprCfgNode {
override CallExpr e;
}
/** A control-flow node that wraps a key-path application. */
class KeyPathApplicationExprCfgNode extends ExprCfgNode {
override KeyPathApplicationExpr e;
/**
* Gets the control-flow node that wraps the key-path of
* this control-flow element.
*/
CfgNode getKeyPath() { result.getAst() = e.getKeyPath() }
/**
* Gets the control-flow node that wraps the base of
* this control-flow element.
*/
CfgNode getBase() { result.getAst() = e.getBase() }
}
/** A control-flow node that wraps a key-path expression. */
class KeyPathExprCfgNode extends ExprCfgNode {
override KeyPathExpr e;
}

View File

@@ -58,7 +58,7 @@ module CfgScope {
}
private class KeyPathScope extends Range_ instanceof KeyPathExpr {
AstControlFlowTree tree;
KeyPathControlFlowTree tree;
KeyPathScope() { tree.getAst() = this }
@@ -76,6 +76,12 @@ module CfgScope {
final override predicate exit(ControlFlowElement last, Completion c) { last(tree, last, c) }
}
private class KeyPathControlFlowTree extends StandardPostOrderTree, KeyPathElement {
final override ControlFlowElement getChildElement(int i) {
result.asAstNode() = expr.getComponent(i)
}
}
}
/** Holds if `first` is first executed when entering `scope`. */
@@ -88,6 +94,14 @@ predicate succExit(CfgScope::Range_ scope, ControlFlowElement last, Completion c
scope.exit(last, c)
}
private class KeyPathComponentTree extends AstStandardPostOrderTree {
override KeyPathComponent ast;
final override ControlFlowElement getChildElement(int i) {
result.asAstNode() = ast.getSubscriptArgument(i).getExpr().getFullyConverted()
}
}
/**
* Control-flow for statements.
*/

View File

@@ -97,9 +97,12 @@ private module Frameworks {
private import codeql.swift.frameworks.Alamofire.Alamofire
private import codeql.swift.security.CleartextLoggingExtensions
private import codeql.swift.security.CleartextStorageDatabaseExtensions
private import codeql.swift.security.ECBEncryptionExtensions
private import codeql.swift.security.HardcodedEncryptionKeyExtensions
private import codeql.swift.security.PathInjectionExtensions
private import codeql.swift.security.PredicateInjectionExtensions
private import codeql.swift.security.StringLengthConflationExtensions
private import codeql.swift.security.WeakSensitiveDataHashingExtensions
}
/**

View File

@@ -21,7 +21,39 @@ module Ssa {
class ExitBasicBlock = BasicBlocks::ExitBasicBlock;
class SourceVariable = VarDecl;
private newtype TSourceVariable =
TNormalSourceVariable(VarDecl v) or
TKeyPathSourceVariable(EntryNode entry) { entry.getScope() instanceof KeyPathExpr }
abstract class SourceVariable extends TSourceVariable {
abstract string toString();
VarDecl asVarDecl() { none() }
EntryNode asKeyPath() { none() }
DeclRefExpr getAnAccess() { result.getDecl() = this.asVarDecl() }
}
private class NormalSourceVariable extends SourceVariable, TNormalSourceVariable {
VarDecl v;
NormalSourceVariable() { this = TNormalSourceVariable(v) }
override string toString() { result = v.toString() }
override VarDecl asVarDecl() { result = v }
}
private class KeyPathSourceVariable extends SourceVariable, TKeyPathSourceVariable {
EntryNode enter;
KeyPathSourceVariable() { this = TKeyPathSourceVariable(enter) }
override string toString() { result = enter.toString() }
override EntryNode asKeyPath() { result = enter }
}
predicate variableWrite(BasicBlock bb, int i, SourceVariable v, boolean certain) {
exists(AssignExpr assign |
@@ -40,17 +72,22 @@ module Ssa {
// ```
exists(NamedPattern pattern |
bb.getNode(i).getNode().asAstNode() = pattern and
v = pattern.getVarDecl() and
v.asVarDecl() = pattern.getVarDecl() and
certain = true
)
or
v instanceof ParamDecl and
bb.getNode(i).getNode().asAstNode() = v and
exists(ParamDecl p |
p = v.asVarDecl() and
bb.getNode(i).getNode().asAstNode() = p and
certain = true
)
or
bb.getNode(i) = v.asKeyPath() and
certain = true
or
// Mark the subexpression as a write of the local variable declared in the `TapExpr`.
exists(TapExpr tap |
v = tap.getVar() and
v.asVarDecl() = tap.getVar() and
bb.getNode(i).getNode().asAstNode() = tap.getSubExpr() and
certain = true
)
@@ -60,7 +97,7 @@ module Ssa {
exists(DeclRefExpr ref |
not isLValue(ref) and
bb.getNode(i).getNode().asAstNode() = ref and
v = ref.getDecl() and
v.asVarDecl() = ref.getDecl() and
certain = true
)
or
@@ -71,24 +108,26 @@ module Ssa {
)
or
exists(ExitNode exit, AbstractFunctionDecl func |
func.getAParam() = v or func.getSelfParam() = v
|
[func.getAParam(), func.getSelfParam()] = v.asVarDecl() and
bb.getNode(i) = exit and
modifiableParam(v) and
modifiableParam(v.asVarDecl()) and
bb.getScope() = func and
certain = true
)
or
// Mark the `TapExpr` as a read of the of the local variable.
exists(TapExpr tap |
v = tap.getVar() and
v.asVarDecl() = tap.getVar() and
bb.getNode(i).getNode().asAstNode() = tap and
certain = true
)
}
}
private module SsaImpl = SsaImplCommon::Make<SsaInput>;
/**
* INTERNAL: Do not use.
*/
module SsaImpl = SsaImplCommon::Make<SsaInput>;
cached
class Definition extends SsaImpl::Definition {
@@ -97,7 +136,7 @@ module Ssa {
cached
ControlFlowNode getARead() {
exists(VarDecl v, SsaInput::BasicBlock bb, int i |
exists(SsaInput::SourceVariable v, SsaInput::BasicBlock bb, int i |
SsaImpl::ssaDefReachesRead(v, this, bb, i) and
SsaInput::variableRead(bb, i, v, true) and
result = bb.getNode(i)

View File

@@ -74,6 +74,7 @@ newtype TDataFlowCall =
TPropertyGetterCall(PropertyGetterCfgNode getter) or
TPropertySetterCall(PropertySetterCfgNode setter) or
TPropertyObserverCall(PropertyObserverCfgNode observer) or
TKeyPathCall(KeyPathApplicationExprCfgNode keyPathApplication) or
TSummaryCall(FlowSummaryImpl::Public::SummarizedCallable c, Node receiver) {
FlowSummaryImpl::Private::summaryCallbackRange(c, receiver)
}
@@ -89,6 +90,9 @@ class DataFlowCall extends TDataFlowCall {
/** Gets the underlying source code call, if any. */
ApplyExprCfgNode asCall() { none() }
/** Gets the underlying key-path application node, if any. */
KeyPathApplicationExprCfgNode asKeyPath() { none() }
/**
* Gets the i'th argument of call.class
* The qualifier is considered to have index `-1`.
@@ -138,6 +142,25 @@ private class NormalCall extends DataFlowCall, TNormalCall {
override Location getLocation() { result = apply.getLocation() }
}
private class KeyPathCall extends DataFlowCall, TKeyPathCall {
private KeyPathApplicationExprCfgNode apply;
KeyPathCall() { this = TKeyPathCall(apply) }
override KeyPathApplicationExprCfgNode asKeyPath() { result = apply }
override CfgNode getArgument(int i) {
i = -1 and
result = apply.getBase()
}
override DataFlowCallable getEnclosingCallable() { result = TDataFlowFunc(apply.getScope()) }
override string toString() { result = apply.toString() }
override Location getLocation() { result = apply.getLocation() }
}
class PropertyGetterCall extends DataFlowCall, TPropertyGetterCall {
private PropertyGetterCfgNode getter;

View File

@@ -40,6 +40,22 @@ private class ExprNodeImpl extends ExprNode, NodeImpl {
override DataFlowCallable getEnclosingCallable() { result = TDataFlowFunc(n.getScope()) }
}
private class KeyPathComponentNodeImpl extends TKeyPathComponentNode, NodeImpl {
KeyPathComponent component;
KeyPathComponentNodeImpl() { this = TKeyPathComponentNode(component) }
override Location getLocationImpl() { result = component.getLocation() }
override string toStringImpl() { result = component.toString() }
override DataFlowCallable getEnclosingCallable() {
result.asSourceCallable() = component.getKeyPathExpr()
}
KeyPathComponent getComponent() { result = component }
}
private class PatternNodeImpl extends PatternNode, NodeImpl {
override Location getLocationImpl() { result = pattern.getLocation() }
@@ -78,6 +94,9 @@ private module Cached {
FlowSummaryImpl::Private::summaryNodeRange(c, state)
} or
TSourceParameterNode(ParamDecl param) or
TKeyPathParameterNode(EntryNode entry) { entry.getScope() instanceof KeyPathExpr } or
TKeyPathReturnNode(ExitNode exit) { exit.getScope() instanceof KeyPathExpr } or
TKeyPathComponentNode(KeyPathComponent component) or
TSummaryParameterNode(FlowSummary::SummarizedCallable c, ParameterPosition pos) {
FlowSummaryImpl::Private::summaryParameterNodeRange(c, pos)
} or
@@ -105,7 +124,7 @@ private module Cached {
(
nodeTo instanceof InoutReturnNode
implies
nodeTo.(InoutReturnNode).getParameter() = def.getSourceVariable()
nodeTo.(InoutReturnNode).getParameter() = def.getSourceVariable().asVarDecl()
)
}
@@ -135,7 +154,7 @@ private module Cached {
(
nodeTo instanceof InoutReturnNode
implies
nodeTo.(InoutReturnNode).getParameter() = def.getSourceVariable()
nodeTo.(InoutReturnNode).getParameter() = def.getSourceVariable().asVarDecl()
)
or
// use-use flow
@@ -198,6 +217,11 @@ private module Cached {
nodeFrom.asPattern().(TypedPattern).getSubPattern()
]
or
// Flow from the unique parameter of a key path expression to
// the first component in the chain.
nodeTo.(KeyPathComponentNodeImpl).getComponent() =
nodeFrom.(KeyPathParameterNode).getComponent(0)
or
// flow through a flow summary (extension of `SummaryModelCsv`)
FlowSummaryImpl::Private::Steps::summaryLocalStep(nodeFrom, nodeTo, true)
}
@@ -311,6 +335,28 @@ private module ParameterNodes {
override DataFlowCallable getEnclosingCallable() { this.isParameterOf(result, _) }
}
class KeyPathParameterNode extends ParameterNodeImpl, TKeyPathParameterNode {
private EntryNode entry;
KeyPathParameterNode() { this = TKeyPathParameterNode(entry) }
override predicate isParameterOf(DataFlowCallable c, ParameterPosition pos) {
c.asSourceCallable() = entry.getScope() and pos = TThisParameter()
}
override Location getLocationImpl() { result = entry.getLocation() }
override string toStringImpl() { result = entry.toString() }
override DataFlowCallable getEnclosingCallable() { this.isParameterOf(result, _) }
KeyPathComponent getComponent(int i) { result = entry.getScope().(KeyPathExpr).getComponent(i) }
KeyPathComponent getAComponent() { result = this.getComponent(_) }
KeyPathExpr getKeyPathExpr() { result = entry.getScope() }
}
}
import ParameterNodes
@@ -412,6 +458,17 @@ private module ArgumentNodes {
FlowSummaryImpl::Private::summaryArgumentNode(call, this, pos)
}
}
class KeyPathArgumentNode extends ExprNode, ArgumentNode {
private KeyPathApplicationExprCfgNode keyPath;
KeyPathArgumentNode() { keyPath.getBase() = this.getCfgNode() }
override predicate argumentOf(DataFlowCall call, ArgumentPosition pos) {
call.asKeyPath() = keyPath and
pos = TThisArgument()
}
}
}
import ArgumentNodes
@@ -474,6 +531,24 @@ private module ReturnNodes {
override ReturnKind getKind() { result = rk }
}
class KeyPathReturnNodeImpl extends ReturnNode, TKeyPathReturnNode, NodeImpl {
ExitNode exit;
KeyPathReturnNodeImpl() { this = TKeyPathReturnNode(exit) }
override ReturnKind getKind() { result instanceof NormalReturnKind }
override ControlFlowNode getCfgNode() { result = exit }
override DataFlowCallable getEnclosingCallable() { result.asSourceCallable() = exit.getScope() }
override Location getLocationImpl() { result = exit.getLocation() }
override string toStringImpl() { result = exit.toString() }
KeyPathExpr getKeyPathExpr() { result = exit.getScope() }
}
}
import ReturnNodes
@@ -495,6 +570,16 @@ private module OutNodes {
}
}
class KeyPathOutNode extends OutNode, ExprNodeImpl {
KeyPathApplicationExprCfgNode keyPath;
KeyPathOutNode() { keyPath = this.getCfgNode() }
override DataFlowCall getCall(ReturnKind kind) {
result.asKeyPath() = keyPath and kind instanceof NormalReturnKind
}
}
class SummaryOutNode extends OutNode, SummaryNode {
SummaryOutNode() { FlowSummaryImpl::Private::summaryOutNode(_, this, _) }
@@ -658,6 +743,20 @@ predicate readStep(Node node1, ContentSet c, Node node2) {
node2.asPattern() = pat.getSubPattern() and
c instanceof OptionalSomeContentSet
)
or
// read of a component in a key-path expression chain
exists(KeyPathComponent component, FieldDecl f |
component = node1.(KeyPathComponentNodeImpl).getComponent() and
f = component.getDeclRef() and
c.isSingleton(any(Content::FieldContent ct | ct.getField() = f))
|
// the next node is either the next element in the chain
node2.(KeyPathComponentNodeImpl).getComponent() = component.getNextComponent()
or
// or the return node, if this is the last component in the chain
not exists(component.getNextComponent()) and
node2.(KeyPathReturnNodeImpl).getKeyPathExpr() = component.getKeyPathExpr()
)
}
/**
@@ -786,6 +885,9 @@ predicate lambdaCall(DataFlowCall call, LambdaCallKind kind, Node receiver) {
or
kind = TLambdaCallKind() and
receiver = call.(SummaryCall).getReceiver()
or
kind = TLambdaCallKind() and
receiver.asExpr() = call.asKeyPath().getExpr().(KeyPathApplicationExpr).getKeyPath()
}
/** Extra data-flow steps needed for lambda flow analysis. */

View File

@@ -1,4 +1,5 @@
private import codeql.swift.generated.KeyPathComponent
private import swift
class KeyPathComponent extends Generated::KeyPathComponent {
/**
@@ -35,4 +36,23 @@ class KeyPathComponent extends Generated::KeyPathComponent {
* Tuple indexing like `.1`.
*/
predicate isTupleIndexing() { getKind() = 9 }
/** Gets the underlying key-path expression which this is a component of. */
KeyPathExpr getKeyPathExpr() { result.getAComponent() = this }
/** Holds if this component is the i'th component of the underling key-path expression. */
predicate hasIndex(int i) { any(KeyPathExpr e).getComponent(i) = this }
/** Gets the next component of the underlying key-path expression. */
KeyPathComponent getNextComponent() {
exists(int i, KeyPathExpr e |
hasKeyPathExprAndIndex(e, i, this) and
hasKeyPathExprAndIndex(e, i + 1, result)
)
}
}
pragma[nomagic]
private predicate hasKeyPathExprAndIndex(KeyPathExpr e, int i, KeyPathComponent c) {
e.getComponent(i) = c
}

View File

@@ -2,6 +2,9 @@ private import codeql.swift.generated.decl.ExtensionDecl
class ExtensionDecl extends Generated::ExtensionDecl {
override string toString() {
result = "extension" // TODO: Once we extract the name of this one we can provide a better `toString`.
result = "extension of " + getExtendedTypeDecl().toString()
or
not exists(getExtendedTypeDecl()) and
result = "extension"
}
}

View File

@@ -9,28 +9,6 @@ private import codeql.swift.dataflow.TaintTracking
private import codeql.swift.security.CleartextLoggingExtensions
private import codeql.swift.security.SensitiveExprs
/**
* A taint-tracking configuration for cleartext logging of sensitive data vulnerabilities.
*/
deprecated class CleartextLoggingConfiguration extends TaintTracking::Configuration {
CleartextLoggingConfiguration() { this = "CleartextLoggingConfiguration" }
override predicate isSource(DataFlow::Node source) { source.asExpr() instanceof SensitiveExpr }
override predicate isSink(DataFlow::Node sink) { sink instanceof CleartextLoggingSink }
override predicate isSanitizer(DataFlow::Node sanitizer) {
sanitizer instanceof CleartextLoggingSanitizer
}
// Disregard paths that contain other paths. This helps with performance.
override predicate isSanitizerIn(DataFlow::Node node) { this.isSource(node) }
override predicate isAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
any(CleartextLoggingAdditionalTaintStep s).step(n1, n2)
}
}
/**
* A taint-tracking configuration for cleartext logging of sensitive data vulnerabilities.
*/

View File

@@ -13,47 +13,7 @@ import codeql.swift.security.CleartextStorageDatabaseExtensions
* A taint configuration from sensitive information to expressions that are
* transmitted over a network.
*/
deprecated class CleartextStorageConfig extends TaintTracking::Configuration {
CleartextStorageConfig() { this = "CleartextStorageConfig" }
override predicate isSource(DataFlow::Node node) { node.asExpr() instanceof SensitiveExpr }
override predicate isSink(DataFlow::Node node) { node instanceof CleartextStorageDatabaseSink }
override predicate isSanitizer(DataFlow::Node sanitizer) {
sanitizer instanceof CleartextStorageDatabaseSanitizer
}
override predicate isAdditionalTaintStep(DataFlow::Node nodeFrom, DataFlow::Node nodeTo) {
any(CleartextStorageDatabaseAdditionalTaintStep s).step(nodeFrom, nodeTo)
}
override predicate isSanitizerIn(DataFlow::Node node) {
// make sources barriers so that we only report the closest instance
isSource(node)
}
override predicate allowImplicitRead(DataFlow::Node node, DataFlow::ContentSet c) {
// flow out from fields of an `NSManagedObject` or `RealmSwiftObject` at the sink,
// for example in `realmObj.data = sensitive`.
isSink(node) and
exists(NominalTypeDecl d, Decl cx |
d.getType().getABaseType*().getUnderlyingType().getName() =
["NSManagedObject", "RealmSwiftObject"] and
cx.asNominalTypeDecl() = d and
c.getAReadContent().(DataFlow::Content::FieldContent).getField() = cx.getAMember()
)
or
// any default implicit reads
super.allowImplicitRead(node, c)
}
}
/**
* A taint configuration from sensitive information to expressions that are
* transmitted over a network.
*/
module CleartextStorageConfig implements DataFlow::ConfigSig {
module CleartextStorageDatabaseConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node node) { node.asExpr() instanceof SensitiveExpr }
predicate isSink(DataFlow::Node node) { node instanceof CleartextStorageDatabaseSink }
@@ -88,4 +48,4 @@ module CleartextStorageConfig implements DataFlow::ConfigSig {
* Detect taint flow of sensitive information to expressions that are
* transmitted over a network.
*/
module CleartextStorageFlow = TaintTracking::Global<CleartextStorageConfig>;
module CleartextStorageDatabaseFlow = TaintTracking::Global<CleartextStorageDatabaseConfig>;

View File

@@ -13,32 +13,7 @@ import codeql.swift.security.CleartextStoragePreferencesExtensions
* A taint configuration from sensitive information to expressions that are
* stored as preferences.
*/
deprecated class CleartextStorageConfig extends TaintTracking::Configuration {
CleartextStorageConfig() { this = "CleartextStorageConfig" }
override predicate isSource(DataFlow::Node node) { node.asExpr() instanceof SensitiveExpr }
override predicate isSink(DataFlow::Node node) { node instanceof CleartextStoragePreferencesSink }
override predicate isSanitizer(DataFlow::Node sanitizer) {
sanitizer instanceof CleartextStoragePreferencesSanitizer
}
override predicate isAdditionalTaintStep(DataFlow::Node nodeFrom, DataFlow::Node nodeTo) {
any(CleartextStoragePreferencesAdditionalTaintStep s).step(nodeFrom, nodeTo)
}
override predicate isSanitizerIn(DataFlow::Node node) {
// make sources barriers so that we only report the closest instance
this.isSource(node)
}
}
/**
* A taint configuration from sensitive information to expressions that are
* stored as preferences.
*/
module CleartextStorageConfig implements DataFlow::ConfigSig {
module CleartextStoragePreferencesConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node node) { node.asExpr() instanceof SensitiveExpr }
predicate isSink(DataFlow::Node node) { node instanceof CleartextStoragePreferencesSink }
@@ -61,4 +36,4 @@ module CleartextStorageConfig implements DataFlow::ConfigSig {
* Detect taint flow of sensitive information to expressions that are stored
* as preferences.
*/
module CleartextStorageFlow = TaintTracking::Global<CleartextStorageConfig>;
module CleartextStoragePreferencesFlow = TaintTracking::Global<CleartextStoragePreferencesConfig>;

View File

@@ -9,31 +9,6 @@ import codeql.swift.dataflow.DataFlow
import codeql.swift.dataflow.TaintTracking
import codeql.swift.security.CleartextTransmissionExtensions
/**
* A taint configuration from sensitive information to expressions that are
* transmitted over a network.
*/
deprecated class CleartextTransmissionConfig extends TaintTracking::Configuration {
CleartextTransmissionConfig() { this = "CleartextTransmissionConfig" }
override predicate isSource(DataFlow::Node node) { node.asExpr() instanceof SensitiveExpr }
override predicate isSink(DataFlow::Node node) { node instanceof CleartextTransmissionSink }
override predicate isSanitizer(DataFlow::Node sanitizer) {
sanitizer instanceof CleartextTransmissionSanitizer
}
override predicate isAdditionalTaintStep(DataFlow::Node nodeFrom, DataFlow::Node nodeTo) {
any(CleartextTransmissionAdditionalTaintStep s).step(nodeFrom, nodeTo)
}
override predicate isSanitizerIn(DataFlow::Node node) {
// make sources barriers so that we only report the closest instance
isSource(node)
}
}
/**
* A taint configuration from sensitive information to expressions that are
* transmitted over a network.

View File

@@ -57,11 +57,24 @@ private class RnCryptorEncryptionKeySink extends HardcodedEncryptionKeySink {
] and
c.getAMember() = f and
call.getStaticTarget() = f and
call.getArgumentWithLabel(["encryptionKey", "withEncryptionKey"]).getExpr() = this.asExpr()
call.getArgumentWithLabel(["encryptionKey", "withEncryptionKey", "hmacKey"]).getExpr() =
this.asExpr()
)
}
}
private class EncryptionKeySinks extends SinkModelCsv {
override predicate row(string row) {
row =
[
// Realm database library.
";Realm.Configuration;true;init(fileURL:inMemoryIdentifier:syncConfiguration:encryptionKey:readOnly:schemaVersion:migrationBlock:deleteRealmIfMigrationNeeded:shouldCompactOnLaunch:objectTypes:);;;Argument[3];encryption-key",
";Realm.Configuration;true;init(fileURL:inMemoryIdentifier:syncConfiguration:encryptionKey:readOnly:schemaVersion:migrationBlock:deleteRealmIfMigrationNeeded:shouldCompactOnLaunch:objectTypes:seedFilePath:);;;Argument[3];encryption-key",
";Realm.Configuration;true;encryptionKey;;;;encryption-key",
]
}
}
/**
* A sink defined in a CSV model.
*/

View File

@@ -127,7 +127,13 @@ private class PathInjectionSinks extends SinkModelCsv {
";DatabasePool;true;init(path:configuration:);;;Argument[0];path-injection",
";DatabaseQueue;true;init(path:configuration:);;;Argument[0];path-injection",
";DatabaseSnapshotPool;true;init(path:configuration:);;;Argument[0];path-injection",
";SerializedDatabase;true;init(path:configuration:defaultLabel:purpose:);;;Argument[0];path-injection"
";SerializedDatabase;true;init(path:configuration:defaultLabel:purpose:);;;Argument[0];path-injection",
// Realm
";Realm.Configuration;true;init(fileURL:inMemoryIdentifier:syncConfiguration:encryptionKey:readOnly:schemaVersion:migrationBlock:deleteRealmIfMigrationNeeded:shouldCompactOnLaunch:objectTypes:);;;Argument[0];path-injection",
";Realm.Configuration;true;init(fileURL:inMemoryIdentifier:syncConfiguration:encryptionKey:readOnly:schemaVersion:migrationBlock:deleteRealmIfMigrationNeeded:shouldCompactOnLaunch:objectTypes:seedFilePath:);;;Argument[0];path-injection",
";Realm.Configuration;true;init(fileURL:inMemoryIdentifier:syncConfiguration:encryptionKey:readOnly:schemaVersion:migrationBlock:deleteRealmIfMigrationNeeded:shouldCompactOnLaunch:objectTypes:seedFilePath:);;;Argument[10];path-injection",
";Realm.Configuration;true;fileURL;;;;path-injection",
";Realm.Configuration;true;seedFilePath;;;;path-injection",
]
}
}

View File

@@ -10,25 +10,6 @@ private import codeql.swift.dataflow.FlowSources
private import codeql.swift.dataflow.TaintTracking
private import codeql.swift.security.PathInjectionExtensions
/**
* A taint-tracking configuration for path injection vulnerabilities.
*/
deprecated class PathInjectionConfiguration extends TaintTracking::Configuration {
PathInjectionConfiguration() { this = "PathInjectionConfiguration" }
override predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
override predicate isSink(DataFlow::Node sink) { sink instanceof PathInjectionSink }
override predicate isSanitizer(DataFlow::Node sanitizer) {
sanitizer instanceof PathInjectionSanitizer
}
override predicate isAdditionalTaintStep(DataFlow::Node node1, DataFlow::Node node2) {
any(PathInjectionAdditionalTaintStep s).step(node1, node2)
}
}
/**
* A taint-tracking configuration for path injection vulnerabilities.
*/

View File

@@ -9,25 +9,6 @@ private import codeql.swift.dataflow.FlowSources
private import codeql.swift.dataflow.TaintTracking
private import codeql.swift.security.PredicateInjectionExtensions
/**
* A taint-tracking configuration for predicate injection vulnerabilities.
*/
deprecated class PredicateInjectionConf extends TaintTracking::Configuration {
PredicateInjectionConf() { this = "PredicateInjectionConf" }
override predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
override predicate isSink(DataFlow::Node sink) { sink instanceof PredicateInjectionSink }
override predicate isSanitizer(DataFlow::Node sanitizer) {
sanitizer instanceof PredicateInjectionSanitizer
}
override predicate isAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
any(PredicateInjectionAdditionalTaintStep s).step(n1, n2)
}
}
/**
* A taint-tracking configuration for predicate injection vulnerabilities.
*/

View File

@@ -10,25 +10,6 @@ import codeql.swift.dataflow.TaintTracking
import codeql.swift.dataflow.FlowSources
import codeql.swift.security.SqlInjectionExtensions
/**
* A taint configuration for tainted data that reaches a SQL sink.
*/
deprecated class SqlInjectionConfig extends TaintTracking::Configuration {
SqlInjectionConfig() { this = "SqlInjectionConfig" }
override predicate isSource(DataFlow::Node node) { node instanceof FlowSource }
override predicate isSink(DataFlow::Node node) { node instanceof SqlInjectionSink }
override predicate isSanitizer(DataFlow::Node sanitizer) {
sanitizer instanceof SqlInjectionSanitizer
}
override predicate isAdditionalTaintStep(DataFlow::Node nodeFrom, DataFlow::Node nodeTo) {
any(SqlInjectionAdditionalTaintStep s).step(nodeFrom, nodeTo)
}
}
/**
* A taint configuration for tainted data that reaches a SQL sink.
*/

View File

@@ -10,25 +10,6 @@ import codeql.swift.dataflow.TaintTracking
import codeql.swift.dataflow.FlowSources
import codeql.swift.security.UncontrolledFormatStringExtensions
/**
* A taint configuration for tainted data that reaches a format string.
*/
deprecated class TaintedFormatConfiguration extends TaintTracking::Configuration {
TaintedFormatConfiguration() { this = "TaintedFormatConfiguration" }
override predicate isSource(DataFlow::Node node) { node instanceof FlowSource }
override predicate isSink(DataFlow::Node node) { node instanceof UncontrolledFormatStringSink }
override predicate isSanitizer(DataFlow::Node sanitizer) {
sanitizer instanceof UncontrolledFormatStringSanitizer
}
override predicate isAdditionalTaintStep(DataFlow::Node nodeFrom, DataFlow::Node nodeTo) {
any(UncontrolledFormatStringAdditionalTaintStep s).step(nodeFrom, nodeTo)
}
}
/**
* A taint configuration for tainted data that reaches a format string.
*/

View File

@@ -9,25 +9,6 @@ import codeql.swift.dataflow.TaintTracking
import codeql.swift.dataflow.FlowSources
import codeql.swift.security.UnsafeJsEvalExtensions
/**
* A taint configuration from taint sources to sinks for this query.
*/
deprecated class UnsafeJsEvalConfig extends TaintTracking::Configuration {
UnsafeJsEvalConfig() { this = "UnsafeJsEvalConfig" }
override predicate isSource(DataFlow::Node node) { node instanceof FlowSource }
override predicate isSink(DataFlow::Node node) { node instanceof UnsafeJsEvalSink }
override predicate isSanitizer(DataFlow::Node sanitizer) {
sanitizer instanceof UnsafeJsEvalSanitizer
}
override predicate isAdditionalTaintStep(DataFlow::Node nodeFrom, DataFlow::Node nodeTo) {
any(UnsafeJsEvalAdditionalTaintStep s).step(nodeFrom, nodeTo)
}
}
/**
* A taint configuration from taint sources to sinks for this query.
*/

View File

@@ -9,31 +9,6 @@ import codeql.swift.dataflow.TaintTracking
import codeql.swift.dataflow.FlowSources
import codeql.swift.security.UnsafeWebViewFetchExtensions
/**
* A taint configuration from taint sources to sinks (and `baseURL` arguments)
* for this query.
*/
deprecated class UnsafeWebViewFetchConfig extends TaintTracking::Configuration {
UnsafeWebViewFetchConfig() { this = "UnsafeWebViewFetchConfig" }
override predicate isSource(DataFlow::Node node) { node instanceof RemoteFlowSource }
override predicate isSink(DataFlow::Node node) {
exists(UnsafeWebViewFetchSink sink |
node = sink or
node.asExpr() = sink.getBaseUrl()
)
}
override predicate isSanitizer(DataFlow::Node sanitizer) {
sanitizer instanceof UnsafeWebViewFetchSanitizer
}
override predicate isAdditionalTaintStep(DataFlow::Node nodeFrom, DataFlow::Node nodeTo) {
any(UnsafeWebViewFetchAdditionalTaintStep s).step(nodeFrom, nodeTo)
}
}
/**
* A taint configuration from taint sources to sinks (and `baseURL` arguments)
* for this query.

View File

@@ -9,23 +9,6 @@ import codeql.swift.dataflow.FlowSources
import codeql.swift.dataflow.TaintTracking
import codeql.swift.security.XXEExtensions
/**
* A taint-tracking configuration for XML external entities (XXE) vulnerabilities.
*/
deprecated class XxeConfiguration extends TaintTracking::Configuration {
XxeConfiguration() { this = "XxeConfiguration" }
override predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
override predicate isSink(DataFlow::Node sink) { sink instanceof XxeSink }
override predicate isSanitizer(DataFlow::Node sanitizer) { sanitizer instanceof XxeSanitizer }
override predicate isAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
any(XxeAdditionalTaintStep s).step(n1, n2)
}
}
/**
* A taint-tracking configuration for XML external entities (XXE) vulnerabilities.
*/

View File

@@ -14,7 +14,7 @@
import swift
import codeql.swift.dataflow.DataFlow
import codeql.swift.security.CleartextStorageDatabaseQuery
import CleartextStorageFlow::PathGraph
import CleartextStorageDatabaseFlow::PathGraph
/**
* Gets a prettier node to use in the results.
@@ -27,10 +27,10 @@ DataFlow::Node cleanupNode(DataFlow::Node n) {
}
from
CleartextStorageFlow::PathNode sourceNode, CleartextStorageFlow::PathNode sinkNode,
DataFlow::Node cleanSink
CleartextStorageDatabaseFlow::PathNode sourceNode,
CleartextStorageDatabaseFlow::PathNode sinkNode, DataFlow::Node cleanSink
where
CleartextStorageFlow::flowPath(sourceNode, sinkNode) and
CleartextStorageDatabaseFlow::flowPath(sourceNode, sinkNode) and
cleanSink = cleanupNode(sinkNode.getNode())
select cleanSink, sourceNode, sinkNode,
"This operation stores '" + cleanSink.toString() +

View File

@@ -13,7 +13,7 @@
import swift
import codeql.swift.dataflow.DataFlow
import codeql.swift.security.CleartextStoragePreferencesQuery
import CleartextStorageFlow::PathGraph
import CleartextStoragePreferencesFlow::PathGraph
/**
* Gets a prettier node to use in the results.
@@ -26,10 +26,10 @@ DataFlow::Node cleanupNode(DataFlow::Node n) {
}
from
CleartextStorageFlow::PathNode sourceNode, CleartextStorageFlow::PathNode sinkNode,
DataFlow::Node cleanSink
CleartextStoragePreferencesFlow::PathNode sourceNode,
CleartextStoragePreferencesFlow::PathNode sinkNode, DataFlow::Node cleanSink
where
CleartextStorageFlow::flowPath(sourceNode, sinkNode) and
CleartextStoragePreferencesFlow::flowPath(sourceNode, sinkNode) and
cleanSink = cleanupNode(sinkNode.getNode())
select cleanSink, sourceNode, sinkNode,
"This operation stores '" + cleanSink.toString() + "' in " +

View File

@@ -231,7 +231,7 @@
| declarations.swift:132:5:132:15 | willSet | |
| declarations.swift:134:5:134:5 | self | |
| declarations.swift:134:5:134:14 | didSet | |
| declarations.swift:138:1:142:1 | extension | |
| declarations.swift:138:1:142:1 | extension of Int | |
| declarations.swift:139:3:141:3 | id() | |
| declarations.swift:139:8:139:8 | self | |
| declarations.swift:144:1:144:7 | { ... } | |

View File

@@ -1,4 +1,4 @@
| extensions.swift:5:1:9:1 | extension | getModule: | file://:0:0:0:0 | extensions | getNumberOfMembers: | 3 | getNumberOfGenericTypeParams: | 0 | getExtendedTypeDecl: | extensions.swift:1:1:1:11 | S | getNumberOfProtocols: | 0 |
| extensions.swift:11:1:15:1 | extension | getModule: | file://:0:0:0:0 | extensions | getNumberOfMembers: | 3 | getNumberOfGenericTypeParams: | 0 | getExtendedTypeDecl: | extensions.swift:3:1:3:10 | C | getNumberOfProtocols: | 0 |
| extensions.swift:21:1:23:1 | extension | getModule: | file://:0:0:0:0 | extensions | getNumberOfMembers: | 1 | getNumberOfGenericTypeParams: | 0 | getExtendedTypeDecl: | extensions.swift:1:1:1:11 | S | getNumberOfProtocols: | 1 |
| extensions.swift:27:1:29:1 | extension | getModule: | file://:0:0:0:0 | extensions | getNumberOfMembers: | 1 | getNumberOfGenericTypeParams: | 0 | getExtendedTypeDecl: | extensions.swift:3:1:3:10 | C | getNumberOfProtocols: | 2 |
| extensions.swift:5:1:9:1 | extension of S | getModule: | file://:0:0:0:0 | extensions | getNumberOfMembers: | 3 | getNumberOfGenericTypeParams: | 0 | getExtendedTypeDecl: | extensions.swift:1:1:1:11 | S | getNumberOfProtocols: | 0 |
| extensions.swift:11:1:15:1 | extension of C | getModule: | file://:0:0:0:0 | extensions | getNumberOfMembers: | 3 | getNumberOfGenericTypeParams: | 0 | getExtendedTypeDecl: | extensions.swift:3:1:3:10 | C | getNumberOfProtocols: | 0 |
| extensions.swift:21:1:23:1 | extension of S | getModule: | file://:0:0:0:0 | extensions | getNumberOfMembers: | 1 | getNumberOfGenericTypeParams: | 0 | getExtendedTypeDecl: | extensions.swift:1:1:1:11 | S | getNumberOfProtocols: | 1 |
| extensions.swift:27:1:29:1 | extension of C | getModule: | file://:0:0:0:0 | extensions | getNumberOfMembers: | 1 | getNumberOfGenericTypeParams: | 0 | getExtendedTypeDecl: | extensions.swift:3:1:3:10 | C | getNumberOfProtocols: | 2 |

View File

@@ -1,8 +1,8 @@
| extensions.swift:5:1:9:1 | extension | 0 | extensions.swift:6:5:6:37 | var ... = ... |
| extensions.swift:5:1:9:1 | extension | 1 | extensions.swift:6:9:6:9 | x |
| extensions.swift:5:1:9:1 | extension | 2 | extensions.swift:8:5:8:17 | foo() |
| extensions.swift:11:1:15:1 | extension | 0 | extensions.swift:12:5:12:38 | var ... = ... |
| extensions.swift:11:1:15:1 | extension | 1 | extensions.swift:12:9:12:9 | y |
| extensions.swift:11:1:15:1 | extension | 2 | extensions.swift:14:5:14:17 | bar() |
| extensions.swift:21:1:23:1 | extension | 0 | extensions.swift:22:5:22:17 | baz() |
| extensions.swift:27:1:29:1 | extension | 0 | extensions.swift:28:5:28:17 | baz() |
| extensions.swift:5:1:9:1 | extension of S | 0 | extensions.swift:6:5:6:37 | var ... = ... |
| extensions.swift:5:1:9:1 | extension of S | 1 | extensions.swift:6:9:6:9 | x |
| extensions.swift:5:1:9:1 | extension of S | 2 | extensions.swift:8:5:8:17 | foo() |
| extensions.swift:11:1:15:1 | extension of C | 0 | extensions.swift:12:5:12:38 | var ... = ... |
| extensions.swift:11:1:15:1 | extension of C | 1 | extensions.swift:12:9:12:9 | y |
| extensions.swift:11:1:15:1 | extension of C | 2 | extensions.swift:14:5:14:17 | bar() |
| extensions.swift:21:1:23:1 | extension of S | 0 | extensions.swift:22:5:22:17 | baz() |
| extensions.swift:27:1:29:1 | extension of C | 0 | extensions.swift:28:5:28:17 | baz() |

View File

@@ -1,3 +1,3 @@
| extensions.swift:21:1:23:1 | extension | 0 | extensions.swift:17:1:19:1 | P1 |
| extensions.swift:27:1:29:1 | extension | 0 | extensions.swift:17:1:19:1 | P1 |
| extensions.swift:27:1:29:1 | extension | 1 | extensions.swift:25:1:25:14 | P2 |
| extensions.swift:21:1:23:1 | extension of S | 0 | extensions.swift:17:1:19:1 | P1 |
| extensions.swift:27:1:29:1 | extension of C | 0 | extensions.swift:17:1:19:1 | P1 |
| extensions.swift:27:1:29:1 | extension of C | 1 | extensions.swift:25:1:25:14 | P2 |

View File

@@ -4134,7 +4134,7 @@ declarations.swift:
# 90|
# 95| [Comment] // Or by adding an access declaration
# 95|
# 138| [ExtensionDecl] extension
# 138| [ExtensionDecl] extension of Int
# 139| getMember(0): [ConcreteFuncDecl] id()
# 139| InterfaceType = (Int) -> () -> Int
# 139| getSelfParam(): [ParamDecl] self

View File

@@ -5680,12 +5680,6 @@ cfg.swift:
# 456| var ... = ...
#-----| -> kpGet_b_x
# 456| var ... = ...
#-----| -> kpGet_b_x
# 456| kpGet_b_x
#-----| -> kpGet_bs_0_x
# 456| kpGet_b_x
#-----| -> kpGet_bs_0_x
@@ -5693,193 +5687,122 @@ cfg.swift:
#-----| match -> #keyPath(...)
# 456| #keyPath(...)
#-----| -> var ... = ...
#-----| -> exit #keyPath(...) (normal)
# 456| #keyPath(...)
#-----| -> var ... = ...
#-----| -> exit #keyPath(...) (normal)
# 456| enter #keyPath(...)
#-----| -> #keyPath(...)
#-----| -> KeyPathComponent
# 456| exit #keyPath(...)
# 456| exit #keyPath(...) (normal)
#-----| -> exit #keyPath(...)
# 457| var ... = ...
#-----| -> kpGet_bs_0_x
# 456| KeyPathComponent
#-----| -> KeyPathComponent
# 456| KeyPathComponent
#-----| -> #keyPath(...)
# 457| var ... = ...
#-----| -> kpGet_bs_0_x
# 457| var ... = ...
#-----| -> kpGet_bs_0_x
# 457| kpGet_bs_0_x
#-----| -> kpGet_mayB_force_x
# 457| kpGet_bs_0_x
#-----| -> kpGet_mayB_force_x
# 457| kpGet_bs_0_x
#-----| -> kpGet_mayB_force_x
# 457| kpGet_bs_0_x
#-----| match -> #keyPath(...)
# 457| kpGet_bs_0_x
#-----| match -> #keyPath(...)
# 457| #keyPath(...)
#-----| -> var ... = ...
# 457| #keyPath(...)
#-----| -> var ... = ...
# 457| #keyPath(...)
#-----| -> var ... = ...
#-----| -> exit #keyPath(...) (normal)
# 457| #keyPath(...)
#-----| -> var ... = ...
# 457| enter #keyPath(...)
#-----| -> #keyPath(...)
#-----| -> KeyPathComponent
# 457| exit #keyPath(...)
# 457| exit #keyPath(...) (normal)
#-----| -> exit #keyPath(...)
# 458| var ... = ...
#-----| -> kpGet_mayB_force_x
# 457| KeyPathComponent
#-----| -> 0
# 457| KeyPathComponent
#-----| -> KeyPathComponent
# 457| 0
#-----| -> KeyPathComponent
# 457| KeyPathComponent
#-----| -> #keyPath(...)
# 458| var ... = ...
#-----| -> kpGet_mayB_force_x
# 458| var ... = ...
#-----| -> kpGet_mayB_force_x
# 458| var ... = ...
#-----| -> kpGet_mayB_force_x
# 458| kpGet_mayB_force_x
#-----| -> kpGet_mayB_x
# 458| kpGet_mayB_force_x
#-----| -> kpGet_mayB_x
# 458| kpGet_mayB_force_x
#-----| -> kpGet_mayB_x
# 458| kpGet_mayB_force_x
#-----| -> kpGet_mayB_x
# 458| kpGet_mayB_force_x
#-----| match -> #keyPath(...)
# 458| kpGet_mayB_force_x
#-----| match -> #keyPath(...)
# 458| kpGet_mayB_force_x
#-----| match -> #keyPath(...)
# 458| #keyPath(...)
#-----| -> var ... = ...
# 458| #keyPath(...)
#-----| -> var ... = ...
# 458| #keyPath(...)
#-----| -> var ... = ...
# 458| #keyPath(...)
#-----| -> var ... = ...
#-----| -> exit #keyPath(...) (normal)
# 458| #keyPath(...)
#-----| -> var ... = ...
# 458| enter #keyPath(...)
#-----| -> #keyPath(...)
#-----| -> KeyPathComponent
# 458| exit #keyPath(...)
# 458| exit #keyPath(...) (normal)
#-----| -> exit #keyPath(...)
# 459| var ... = ...
#-----| -> kpGet_mayB_x
# 458| KeyPathComponent
#-----| -> KeyPathComponent
# 458| KeyPathComponent
#-----| -> KeyPathComponent
# 458| KeyPathComponent
#-----| -> #keyPath(...)
# 459| var ... = ...
#-----| -> kpGet_mayB_x
# 459| var ... = ...
#-----| -> kpGet_mayB_x
# 459| var ... = ...
#-----| -> kpGet_mayB_x
# 459| var ... = ...
#-----| -> kpGet_mayB_x
# 459| kpGet_mayB_x
#-----| -> apply_kpGet_b_x
# 459| kpGet_mayB_x
#-----| -> apply_kpGet_b_x
# 459| kpGet_mayB_x
#-----| -> apply_kpGet_b_x
# 459| kpGet_mayB_x
#-----| -> apply_kpGet_b_x
# 459| kpGet_mayB_x
#-----| -> apply_kpGet_b_x
# 459| kpGet_mayB_x
#-----| match -> #keyPath(...)
# 459| kpGet_mayB_x
#-----| match -> #keyPath(...)
# 459| kpGet_mayB_x
#-----| match -> #keyPath(...)
# 459| kpGet_mayB_x
#-----| match -> #keyPath(...)
# 459| #keyPath(...)
#-----| -> var ... = ...
# 459| #keyPath(...)
#-----| -> var ... = ...
# 459| #keyPath(...)
#-----| -> var ... = ...
# 459| #keyPath(...)
#-----| -> var ... = ...
# 459| #keyPath(...)
#-----| -> var ... = ...
#-----| -> exit #keyPath(...) (normal)
# 459| #keyPath(...)
#-----| -> var ... = ...
# 459| enter #keyPath(...)
#-----| -> #keyPath(...)
#-----| -> KeyPathComponent
# 459| exit #keyPath(...)
# 459| exit #keyPath(...) (normal)
#-----| -> exit #keyPath(...)
# 461| var ... = ...
#-----| -> apply_kpGet_b_x
# 459| KeyPathComponent
#-----| -> KeyPathComponent
# 461| var ... = ...
#-----| -> apply_kpGet_b_x
# 459| KeyPathComponent
#-----| -> KeyPathComponent
# 461| var ... = ...
#-----| -> apply_kpGet_b_x
# 461| var ... = ...
#-----| -> apply_kpGet_b_x
# 459| KeyPathComponent
# 461| var ... = ...
#-----| -> apply_kpGet_b_x
@@ -5887,401 +5810,81 @@ cfg.swift:
# 461| apply_kpGet_b_x
#-----| -> apply_kpGet_bs_0_x
# 461| apply_kpGet_b_x
#-----| -> apply_kpGet_bs_0_x
# 461| apply_kpGet_b_x
#-----| -> apply_kpGet_bs_0_x
# 461| apply_kpGet_b_x
#-----| -> apply_kpGet_bs_0_x
# 461| apply_kpGet_b_x
#-----| -> apply_kpGet_bs_0_x
# 461| apply_kpGet_b_x
#-----| match -> a
# 461| apply_kpGet_b_x
#-----| match -> a
# 461| apply_kpGet_b_x
#-----| match -> a
# 461| apply_kpGet_b_x
#-----| match -> a
# 461| apply_kpGet_b_x
#-----| match -> a
# 461| a
#-----| -> kpGet_b_x
# 461| a
#-----| -> kpGet_b_x
# 461| a
#-----| -> kpGet_b_x
# 461| a
#-----| -> kpGet_b_x
# 461| a
#-----| -> kpGet_b_x
# 461| \...[...]
#-----| -> var ... = ...
# 461| \...[...]
#-----| -> var ... = ...
# 461| \...[...]
#-----| -> var ... = ...
# 461| \...[...]
#-----| -> var ... = ...
# 461| \...[...]
#-----| -> var ... = ...
# 461| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 461| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 461| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 461| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 461| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 461| kpGet_b_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 461| kpGet_b_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 461| kpGet_b_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 461| kpGet_b_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 461| kpGet_b_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 462| var ... = ...
#-----| -> apply_kpGet_bs_0_x
# 462| var ... = ...
#-----| -> apply_kpGet_bs_0_x
# 462| var ... = ...
#-----| -> apply_kpGet_bs_0_x
# 462| var ... = ...
#-----| -> apply_kpGet_bs_0_x
# 462| var ... = ...
#-----| -> apply_kpGet_bs_0_x
# 462| apply_kpGet_bs_0_x
#-----| -> apply_kpGet_mayB_force_x
# 462| apply_kpGet_bs_0_x
#-----| -> apply_kpGet_mayB_force_x
# 462| apply_kpGet_bs_0_x
#-----| -> apply_kpGet_mayB_force_x
# 462| apply_kpGet_bs_0_x
#-----| -> apply_kpGet_mayB_force_x
# 462| apply_kpGet_bs_0_x
#-----| -> apply_kpGet_mayB_force_x
# 462| apply_kpGet_bs_0_x
#-----| match -> a
# 462| apply_kpGet_bs_0_x
#-----| match -> a
# 462| apply_kpGet_bs_0_x
#-----| match -> a
# 462| apply_kpGet_bs_0_x
#-----| match -> a
# 462| apply_kpGet_bs_0_x
#-----| match -> a
# 462| a
#-----| -> kpGet_bs_0_x
# 462| a
#-----| -> kpGet_bs_0_x
# 462| a
#-----| -> kpGet_bs_0_x
# 462| a
#-----| -> kpGet_bs_0_x
# 462| a
#-----| -> kpGet_bs_0_x
# 462| \...[...]
#-----| -> var ... = ...
# 462| \...[...]
#-----| -> var ... = ...
# 462| \...[...]
#-----| -> var ... = ...
# 462| \...[...]
#-----| -> var ... = ...
# 462| \...[...]
#-----| -> var ... = ...
# 462| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 462| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 462| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 462| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 462| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 462| kpGet_bs_0_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 462| kpGet_bs_0_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 462| kpGet_bs_0_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 462| kpGet_bs_0_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 462| kpGet_bs_0_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 463| var ... = ...
#-----| -> apply_kpGet_mayB_force_x
# 463| var ... = ...
#-----| -> apply_kpGet_mayB_force_x
# 463| var ... = ...
#-----| -> apply_kpGet_mayB_force_x
# 463| var ... = ...
#-----| -> apply_kpGet_mayB_force_x
# 463| var ... = ...
#-----| -> apply_kpGet_mayB_force_x
# 463| apply_kpGet_mayB_force_x
#-----| -> apply_kpGet_mayB_x
# 463| apply_kpGet_mayB_force_x
#-----| -> apply_kpGet_mayB_x
# 463| apply_kpGet_mayB_force_x
#-----| -> apply_kpGet_mayB_x
# 463| apply_kpGet_mayB_force_x
#-----| -> apply_kpGet_mayB_x
# 463| apply_kpGet_mayB_force_x
#-----| -> apply_kpGet_mayB_x
# 463| apply_kpGet_mayB_force_x
#-----| match -> a
# 463| apply_kpGet_mayB_force_x
#-----| match -> a
# 463| apply_kpGet_mayB_force_x
#-----| match -> a
# 463| apply_kpGet_mayB_force_x
#-----| match -> a
# 463| apply_kpGet_mayB_force_x
#-----| match -> a
# 463| a
#-----| -> kpGet_mayB_force_x
# 463| a
#-----| -> kpGet_mayB_force_x
# 463| a
#-----| -> kpGet_mayB_force_x
# 463| a
#-----| -> kpGet_mayB_force_x
# 463| a
#-----| -> kpGet_mayB_force_x
# 463| \...[...]
#-----| -> var ... = ...
# 463| \...[...]
#-----| -> var ... = ...
# 463| \...[...]
#-----| -> var ... = ...
# 463| \...[...]
#-----| -> var ... = ...
# 463| \...[...]
#-----| -> var ... = ...
# 463| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 463| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 463| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 463| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 463| (WritableKeyPath<A, Int>) ...
#-----| -> \...[...]
# 463| kpGet_mayB_force_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 463| kpGet_mayB_force_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 463| kpGet_mayB_force_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 463| kpGet_mayB_force_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 463| kpGet_mayB_force_x
#-----| -> (WritableKeyPath<A, Int>) ...
# 464| var ... = ...
#-----| -> apply_kpGet_mayB_x
# 464| var ... = ...
#-----| -> apply_kpGet_mayB_x
# 464| var ... = ...
#-----| -> apply_kpGet_mayB_x
# 464| var ... = ...
#-----| -> apply_kpGet_mayB_x
# 464| var ... = ...
#-----| -> apply_kpGet_mayB_x
# 464| apply_kpGet_mayB_x
#-----| -> exit test(a:) (normal)
# 464| apply_kpGet_mayB_x
# 464| apply_kpGet_mayB_x
# 464| apply_kpGet_mayB_x
# 464| apply_kpGet_mayB_x
# 464| apply_kpGet_mayB_x
#-----| match -> a
# 464| apply_kpGet_mayB_x
#-----| match -> a
# 464| apply_kpGet_mayB_x
#-----| match -> a
# 464| apply_kpGet_mayB_x
#-----| match -> a
# 464| apply_kpGet_mayB_x
#-----| match -> a
# 464| a
#-----| -> kpGet_mayB_x
# 464| a
#-----| -> kpGet_mayB_x
# 464| a
#-----| -> kpGet_mayB_x
# 464| a
#-----| -> kpGet_mayB_x
# 464| a
#-----| -> kpGet_mayB_x
# 464| \...[...]
#-----| -> var ... = ...
# 464| \...[...]
#-----| -> var ... = ...
# 464| \...[...]
#-----| -> var ... = ...
# 464| \...[...]
#-----| -> var ... = ...
# 464| \...[...]
#-----| -> var ... = ...
# 464| (KeyPath<A, Int?>) ...
#-----| -> \...[...]
# 464| (KeyPath<A, Int?>) ...
#-----| -> \...[...]
# 464| (KeyPath<A, Int?>) ...
#-----| -> \...[...]
# 464| (KeyPath<A, Int?>) ...
#-----| -> \...[...]
# 464| (KeyPath<A, Int?>) ...
#-----| -> \...[...]
# 464| kpGet_mayB_x
#-----| -> (KeyPath<A, Int?>) ...
# 464| kpGet_mayB_x
#-----| -> (KeyPath<A, Int?>) ...
# 464| kpGet_mayB_x
#-----| -> (KeyPath<A, Int?>) ...
# 464| kpGet_mayB_x
#-----| -> (KeyPath<A, Int?>) ...
# 464| kpGet_mayB_x
#-----| -> (KeyPath<A, Int?>) ...

View File

@@ -0,0 +1,24 @@
nonUniqueSetRepresentation
breakInvariant2
breakInvariant3
breakInvariant4
breakInvariant5
multipleSuccessors
| cfg.swift:33:28:33:28 | ... is ... | no-match | cfg.swift:33:49:33:60 | call to isZero(x:) |
| cfg.swift:33:28:33:28 | ... is ... | no-match | cfg.swift:35:5:37:3 | case ... |
| cfg.swift:144:10:144:10 | =~ ... | no-match | cfg.swift:144:18:144:34 | ... .&&(_:_:) ... |
| cfg.swift:144:10:144:10 | =~ ... | no-match | cfg.swift:146:5:147:14 | case ... |
| cfg.swift:515:6:515:28 | #available | false | cfg.swift:515:42:515:46 | iOS 12 |
| cfg.swift:515:6:515:28 | #available | false | cfg.swift:519:10:519:10 | x |
| file://:0:0:0:0 | $interpolation | successor | cfg.swift:40:11:40:11 | OpaqueValueExpr |
| file://:0:0:0:0 | $interpolation | successor | cfg.swift:40:12:40:12 | .appendLiteral(_:) |
| file://:0:0:0:0 | $interpolation | successor | cfg.swift:263:10:263:10 | OpaqueValueExpr |
| file://:0:0:0:0 | $interpolation | successor | cfg.swift:263:11:263:11 | .appendLiteral(_:) |
simpleAndNormalSuccessors
deadEnd
| cfg.swift:33:49:33:60 | call to isZero(x:) |
| cfg.swift:144:18:144:34 | ... .&&(_:_:) ... |
| file://:0:0:0:0 | ... = ... |
| file://:0:0:0:0 | ... = ... |
nonUniqueSplitKind
nonUniqueListOrder

View File

@@ -0,0 +1 @@
import codeql.swift.controlflow.internal.ControlFlowGraphImplShared::Consistency

View File

@@ -247,6 +247,35 @@ edges
| test.swift:549:24:549:32 | call to source3() : | test.swift:549:13:549:33 | call to MyClass.init(s:) [str] : |
| test.swift:550:13:550:41 | call to Self.init(contentsOfFile:) [str] : | test.swift:535:9:535:9 | self [str] : |
| test.swift:550:13:550:41 | call to Self.init(contentsOfFile:) [str] : | test.swift:550:13:550:43 | .str |
| test.swift:567:8:567:11 | x : | test.swift:568:14:568:14 | x : |
| test.swift:568:5:568:5 | [post] self [x] : | test.swift:567:3:569:3 | self[return] [x] : |
| test.swift:568:14:568:14 | x : | test.swift:568:5:568:5 | [post] self [x] : |
| test.swift:573:11:573:24 | call to S.init(x:) [x] : | test.swift:575:13:575:13 | s [x] : |
| test.swift:573:11:573:24 | call to S.init(x:) [x] : | test.swift:578:13:578:13 | s [x] : |
| test.swift:573:16:573:23 | call to source() : | test.swift:567:8:567:11 | x : |
| test.swift:573:16:573:23 | call to source() : | test.swift:573:11:573:24 | call to S.init(x:) [x] : |
| test.swift:574:11:574:14 | enter #keyPath(...) [x] : | test.swift:574:14:574:14 | KeyPathComponent [x] : |
| test.swift:574:14:574:14 | KeyPathComponent [x] : | test.swift:574:11:574:14 | exit #keyPath(...) : |
| test.swift:575:13:575:13 | s [x] : | test.swift:574:11:574:14 | enter #keyPath(...) [x] : |
| test.swift:575:13:575:13 | s [x] : | test.swift:575:13:575:25 | \\...[...] |
| test.swift:577:36:577:38 | enter #keyPath(...) [x] : | test.swift:577:38:577:38 | KeyPathComponent [x] : |
| test.swift:577:38:577:38 | KeyPathComponent [x] : | test.swift:577:36:577:38 | exit #keyPath(...) : |
| test.swift:578:13:578:13 | s [x] : | test.swift:577:36:577:38 | enter #keyPath(...) [x] : |
| test.swift:578:13:578:13 | s [x] : | test.swift:578:13:578:32 | \\...[...] |
| test.swift:584:8:584:11 | s [x] : | test.swift:585:14:585:14 | s [x] : |
| test.swift:585:5:585:5 | [post] self [s, x] : | test.swift:584:3:586:3 | self[return] [s, x] : |
| test.swift:585:14:585:14 | s [x] : | test.swift:585:5:585:5 | [post] self [s, x] : |
| test.swift:590:11:590:24 | call to S.init(x:) [x] : | test.swift:591:18:591:18 | s [x] : |
| test.swift:590:16:590:23 | call to source() : | test.swift:567:8:567:11 | x : |
| test.swift:590:16:590:23 | call to source() : | test.swift:590:11:590:24 | call to S.init(x:) [x] : |
| test.swift:591:12:591:19 | call to S2.init(s:) [s, x] : | test.swift:593:13:593:13 | s2 [s, x] : |
| test.swift:591:18:591:18 | s [x] : | test.swift:584:8:584:11 | s [x] : |
| test.swift:591:18:591:18 | s [x] : | test.swift:591:12:591:19 | call to S2.init(s:) [s, x] : |
| test.swift:592:11:592:17 | enter #keyPath(...) [s, x] : | test.swift:592:15:592:15 | KeyPathComponent [s, x] : |
| test.swift:592:15:592:15 | KeyPathComponent [s, x] : | test.swift:592:17:592:17 | KeyPathComponent [x] : |
| test.swift:592:17:592:17 | KeyPathComponent [x] : | test.swift:592:11:592:17 | exit #keyPath(...) : |
| test.swift:593:13:593:13 | s2 [s, x] : | test.swift:592:11:592:17 | enter #keyPath(...) [s, x] : |
| test.swift:593:13:593:13 | s2 [s, x] : | test.swift:593:13:593:26 | \\...[...] |
nodes
| file://:0:0:0:0 | .a [x] : | semmle.label | .a [x] : |
| file://:0:0:0:0 | .str : | semmle.label | .str : |
@@ -514,6 +543,36 @@ nodes
| test.swift:549:24:549:32 | call to source3() : | semmle.label | call to source3() : |
| test.swift:550:13:550:41 | call to Self.init(contentsOfFile:) [str] : | semmle.label | call to Self.init(contentsOfFile:) [str] : |
| test.swift:550:13:550:43 | .str | semmle.label | .str |
| test.swift:567:3:569:3 | self[return] [x] : | semmle.label | self[return] [x] : |
| test.swift:567:8:567:11 | x : | semmle.label | x : |
| test.swift:568:5:568:5 | [post] self [x] : | semmle.label | [post] self [x] : |
| test.swift:568:14:568:14 | x : | semmle.label | x : |
| test.swift:573:11:573:24 | call to S.init(x:) [x] : | semmle.label | call to S.init(x:) [x] : |
| test.swift:573:16:573:23 | call to source() : | semmle.label | call to source() : |
| test.swift:574:11:574:14 | enter #keyPath(...) [x] : | semmle.label | enter #keyPath(...) [x] : |
| test.swift:574:11:574:14 | exit #keyPath(...) : | semmle.label | exit #keyPath(...) : |
| test.swift:574:14:574:14 | KeyPathComponent [x] : | semmle.label | KeyPathComponent [x] : |
| test.swift:575:13:575:13 | s [x] : | semmle.label | s [x] : |
| test.swift:575:13:575:25 | \\...[...] | semmle.label | \\...[...] |
| test.swift:577:36:577:38 | enter #keyPath(...) [x] : | semmle.label | enter #keyPath(...) [x] : |
| test.swift:577:36:577:38 | exit #keyPath(...) : | semmle.label | exit #keyPath(...) : |
| test.swift:577:38:577:38 | KeyPathComponent [x] : | semmle.label | KeyPathComponent [x] : |
| test.swift:578:13:578:13 | s [x] : | semmle.label | s [x] : |
| test.swift:578:13:578:32 | \\...[...] | semmle.label | \\...[...] |
| test.swift:584:3:586:3 | self[return] [s, x] : | semmle.label | self[return] [s, x] : |
| test.swift:584:8:584:11 | s [x] : | semmle.label | s [x] : |
| test.swift:585:5:585:5 | [post] self [s, x] : | semmle.label | [post] self [s, x] : |
| test.swift:585:14:585:14 | s [x] : | semmle.label | s [x] : |
| test.swift:590:11:590:24 | call to S.init(x:) [x] : | semmle.label | call to S.init(x:) [x] : |
| test.swift:590:16:590:23 | call to source() : | semmle.label | call to source() : |
| test.swift:591:12:591:19 | call to S2.init(s:) [s, x] : | semmle.label | call to S2.init(s:) [s, x] : |
| test.swift:591:18:591:18 | s [x] : | semmle.label | s [x] : |
| test.swift:592:11:592:17 | enter #keyPath(...) [s, x] : | semmle.label | enter #keyPath(...) [s, x] : |
| test.swift:592:11:592:17 | exit #keyPath(...) : | semmle.label | exit #keyPath(...) : |
| test.swift:592:15:592:15 | KeyPathComponent [s, x] : | semmle.label | KeyPathComponent [s, x] : |
| test.swift:592:17:592:17 | KeyPathComponent [x] : | semmle.label | KeyPathComponent [x] : |
| test.swift:593:13:593:13 | s2 [s, x] : | semmle.label | s2 [s, x] : |
| test.swift:593:13:593:26 | \\...[...] | semmle.label | \\...[...] |
subpaths
| test.swift:75:21:75:22 | &... : | test.swift:65:16:65:28 | arg1 : | test.swift:65:1:70:1 | arg2[return] : | test.swift:75:31:75:32 | [post] &... : |
| test.swift:114:19:114:19 | arg : | test.swift:109:9:109:14 | arg : | test.swift:110:12:110:12 | arg : | test.swift:114:12:114:22 | call to ... : |
@@ -551,6 +610,12 @@ subpaths
| test.swift:549:13:549:33 | call to MyClass.init(s:) [str] : | test.swift:535:9:535:9 | self [str] : | file://:0:0:0:0 | .str : | test.swift:549:13:549:35 | .str |
| test.swift:549:24:549:32 | call to source3() : | test.swift:536:10:536:13 | s : | test.swift:536:5:538:5 | self[return] [str] : | test.swift:549:13:549:33 | call to MyClass.init(s:) [str] : |
| test.swift:550:13:550:41 | call to Self.init(contentsOfFile:) [str] : | test.swift:535:9:535:9 | self [str] : | file://:0:0:0:0 | .str : | test.swift:550:13:550:43 | .str |
| test.swift:573:16:573:23 | call to source() : | test.swift:567:8:567:11 | x : | test.swift:567:3:569:3 | self[return] [x] : | test.swift:573:11:573:24 | call to S.init(x:) [x] : |
| test.swift:575:13:575:13 | s [x] : | test.swift:574:11:574:14 | enter #keyPath(...) [x] : | test.swift:574:11:574:14 | exit #keyPath(...) : | test.swift:575:13:575:25 | \\...[...] |
| test.swift:578:13:578:13 | s [x] : | test.swift:577:36:577:38 | enter #keyPath(...) [x] : | test.swift:577:36:577:38 | exit #keyPath(...) : | test.swift:578:13:578:32 | \\...[...] |
| test.swift:590:16:590:23 | call to source() : | test.swift:567:8:567:11 | x : | test.swift:567:3:569:3 | self[return] [x] : | test.swift:590:11:590:24 | call to S.init(x:) [x] : |
| test.swift:591:18:591:18 | s [x] : | test.swift:584:8:584:11 | s [x] : | test.swift:584:3:586:3 | self[return] [s, x] : | test.swift:591:12:591:19 | call to S2.init(s:) [s, x] : |
| test.swift:593:13:593:13 | s2 [s, x] : | test.swift:592:11:592:17 | enter #keyPath(...) [s, x] : | test.swift:592:11:592:17 | exit #keyPath(...) : | test.swift:593:13:593:26 | \\...[...] |
#select
| test.swift:7:15:7:15 | t1 | test.swift:6:19:6:26 | call to source() : | test.swift:7:15:7:15 | t1 | result |
| test.swift:9:15:9:15 | t1 | test.swift:6:19:6:26 | call to source() : | test.swift:9:15:9:15 | t1 | result |
@@ -621,3 +686,6 @@ subpaths
| test.swift:544:17:544:17 | .str | test.swift:543:20:543:28 | call to source3() : | test.swift:544:17:544:17 | .str | result |
| test.swift:549:13:549:35 | .str | test.swift:549:24:549:32 | call to source3() : | test.swift:549:13:549:35 | .str | result |
| test.swift:550:13:550:43 | .str | test.swift:543:20:543:28 | call to source3() : | test.swift:550:13:550:43 | .str | result |
| test.swift:575:13:575:25 | \\...[...] | test.swift:573:16:573:23 | call to source() : | test.swift:575:13:575:25 | \\...[...] | result |
| test.swift:578:13:578:32 | \\...[...] | test.swift:573:16:573:23 | call to source() : | test.swift:578:13:578:32 | \\...[...] | result |
| test.swift:593:13:593:26 | \\...[...] | test.swift:590:16:590:23 | call to source() : | test.swift:593:13:593:26 | \\...[...] | result |

Some files were not shown because too many files have changed in this diff Show More