mirror of
https://github.com/github/codeql.git
synced 2025-12-16 16:53:25 +01:00
Merge branch 'main' into redsun82/fix-pytest-build-as-test-windows
This commit is contained in:
@@ -9,6 +9,7 @@
|
||||
"fragments": [
|
||||
"/*- Compilations -*/",
|
||||
"/*- External data -*/",
|
||||
"/*- Overlay support -*/",
|
||||
"/*- Files and folders -*/",
|
||||
"/*- Diagnostic messages -*/",
|
||||
"/*- Diagnostic messages: severity -*/",
|
||||
|
||||
@@ -3,11 +3,15 @@
|
||||
"qhelp.dtd">
|
||||
<qhelp>
|
||||
<overview>
|
||||
<p>Using broken or weak cryptographic algorithms can leave data vulnerable to being decrypted.</p>
|
||||
<p>Using broken or weak cryptographic algorithms may compromise security guarantees such as confidentiality, integrity, and authenticity.</p>
|
||||
|
||||
<p>Many cryptographic algorithms provided by cryptography libraries are known to be weak, or
|
||||
flawed. Using such an algorithm means that an attacker may be able to easily decrypt the encrypted
|
||||
data.</p>
|
||||
<p>Many cryptographic algorithms are known to be weak or flawed. The security guarantees of a system often rely on the underlying cryptography, so using a weak algorithm can have severe consequences. For example:
|
||||
</p>
|
||||
<ul>
|
||||
<li>If a weak encryption algorithm is used, an attacker may be able to decrypt sensitive data.</li>
|
||||
<li>If a weak hashing algorithm is used to protect data integrity, an attacker may be able to craft a malicious input that has the same hash as a benign one.</li>
|
||||
<li>If a weak algorithm is used for digital signatures, an attacker may be able to forge signatures and impersonate legitimate users.</li>
|
||||
</ul>
|
||||
|
||||
</overview>
|
||||
<recommendation>
|
||||
|
||||
@@ -44,7 +44,7 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
|
||||
// Configure the proxy settings, if applicable.
|
||||
if (this.proxy != null)
|
||||
{
|
||||
logger.LogInfo($"Setting up Dependabot proxy at {this.proxy.Address}");
|
||||
logger.LogDebug($"Configuring environment variables for the Dependabot proxy at {this.proxy.Address}");
|
||||
|
||||
startInfo.EnvironmentVariables["HTTP_PROXY"] = this.proxy.Address;
|
||||
startInfo.EnvironmentVariables["HTTPS_PROXY"] = this.proxy.Address;
|
||||
@@ -57,11 +57,11 @@ namespace Semmle.Extraction.CSharp.DependencyFetching
|
||||
private bool RunCommandAux(string args, string? workingDirectory, out IList<string> output, bool silent)
|
||||
{
|
||||
var dirLog = string.IsNullOrWhiteSpace(workingDirectory) ? "" : $" in {workingDirectory}";
|
||||
logger.LogInfo($"Running '{Exec} {args}'{dirLog}");
|
||||
var pi = MakeDotnetStartInfo(args, workingDirectory);
|
||||
var threadId = Environment.CurrentManagedThreadId;
|
||||
void onOut(string s) => logger.Log(silent ? Severity.Debug : Severity.Info, s, threadId);
|
||||
void onError(string s) => logger.LogError(s, threadId);
|
||||
logger.LogInfo($"Running '{Exec} {args}'{dirLog}");
|
||||
var exitCode = pi.ReadOutput(out output, onOut, onError);
|
||||
if (exitCode != 0)
|
||||
{
|
||||
|
||||
@@ -336,6 +336,8 @@ and the CodeQL library pack ``codeql/rust-all`` (`changelog <https://github.com/
|
||||
`log <https://crates.io/crates/log>`__, Logging library
|
||||
`md5 <https://crates.io/crates/md5>`__, Utility library
|
||||
`memchr <https://crates.io/crates/memchr>`__, Utility library
|
||||
`mysql <https://crates.io/crates/mysql>`__, Database
|
||||
`mysql_async <https://crates.io/crates/mysql_async>`__, Database
|
||||
`once_cell <https://crates.io/crates/once_cell>`__, Utility library
|
||||
`poem <https://crates.io/crates/poem>`__, Web framework
|
||||
`postgres <https://crates.io/crates/postgres>`__, Database
|
||||
|
||||
@@ -21,6 +21,7 @@ ql/java/ql/src/Security/CWE/CWE-094/JexlInjection.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-094/MvelInjection.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-094/SpelInjection.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-094/TemplateInjection.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-1004/SensitiveCookieNotHttpOnly.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-1104/MavenPomDependsOnBintray.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-113/NettyResponseSplitting.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-113/ResponseSplitting.ql
|
||||
|
||||
@@ -127,6 +127,7 @@ ql/java/ql/src/Security/CWE/CWE-094/JexlInjection.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-094/MvelInjection.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-094/SpelInjection.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-094/TemplateInjection.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-1004/SensitiveCookieNotHttpOnly.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-1104/MavenPomDependsOnBintray.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-113/NettyResponseSplitting.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-113/ResponseSplitting.ql
|
||||
|
||||
@@ -30,6 +30,7 @@ ql/java/ql/src/Security/CWE/CWE-094/JexlInjection.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-094/MvelInjection.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-094/SpelInjection.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-094/TemplateInjection.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-1004/SensitiveCookieNotHttpOnly.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-1104/MavenPomDependsOnBintray.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-113/NettyResponseSplitting.ql
|
||||
ql/java/ql/src/Security/CWE/CWE-113/ResponseSplitting.ql
|
||||
|
||||
@@ -190,7 +190,6 @@ ql/java/ql/src/experimental/Security/CWE/CWE-094/ScriptInjection.ql
|
||||
ql/java/ql/src/experimental/Security/CWE/CWE-094/SpringImplicitViewManipulation.ql
|
||||
ql/java/ql/src/experimental/Security/CWE/CWE-094/SpringViewManipulation.ql
|
||||
ql/java/ql/src/experimental/Security/CWE/CWE-1004/InsecureTomcatConfig.ql
|
||||
ql/java/ql/src/experimental/Security/CWE/CWE-1004/SensitiveCookieNotHttpOnly.ql
|
||||
ql/java/ql/src/experimental/Security/CWE/CWE-200/InsecureWebResourceResponse.ql
|
||||
ql/java/ql/src/experimental/Security/CWE/CWE-200/SensitiveAndroidFileLeak.ql
|
||||
ql/java/ql/src/experimental/Security/CWE/CWE-208/PossibleTimingAttackAgainstSignature.ql
|
||||
|
||||
@@ -2,11 +2,13 @@
|
||||
<qhelp>
|
||||
|
||||
<overview>
|
||||
<p>Cross-Site Scripting (XSS) is categorized as one of the OWASP Top 10 Security Vulnerabilities. The <code>HttpOnly</code> flag directs compatible browsers to prevent client-side script from accessing cookies. Including the <code>HttpOnly</code> flag in the Set-Cookie HTTP response header for a sensitive cookie helps mitigate the risk associated with XSS where an attacker's script code attempts to read the contents of a cookie and exfiltrate information obtained.</p>
|
||||
<p>Cookies without the <code>HttpOnly</code> flag set are accessible to client-side scripts (such as JavaScript) running in the same origin.
|
||||
In case of a Cross-Site Scripting (XSS) vulnerability, the cookie can be stolen by a malicious script.
|
||||
If a sensitive cookie does not need to be accessed directly by client-side scripts, the <code>HttpOnly</code> flag should be set.</p>
|
||||
</overview>
|
||||
|
||||
<recommendation>
|
||||
<p>Use the <code>HttpOnly</code> flag when generating a cookie containing sensitive information to help mitigate the risk of client side script accessing the protected cookie.</p>
|
||||
<p>Use the <code>HttpOnly</code> flag when generating a cookie containing sensitive information to help mitigate the risk of client-side scripts accessing the protected cookie.</p>
|
||||
</recommendation>
|
||||
|
||||
<example>
|
||||
@@ -23,5 +25,6 @@
|
||||
OWASP:
|
||||
<a href="https://owasp.org/www-community/HttpOnly">HttpOnly</a>
|
||||
</li>
|
||||
<li>MDN: <a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/Set-Cookie#httponly">Set-Cookie HttpOnly</a>.</li>
|
||||
</references>
|
||||
</qhelp>
|
||||
@@ -1,13 +1,13 @@
|
||||
/**
|
||||
* @name Sensitive cookies without the HttpOnly response header set
|
||||
* @description Sensitive cookies without the 'HttpOnly' flag set leaves session cookies vulnerable to
|
||||
* @description A sensitive cookie without the 'HttpOnly' flag set may be vulnerable to
|
||||
* an XSS attack.
|
||||
* @kind path-problem
|
||||
* @problem.severity warning
|
||||
* @precision medium
|
||||
* @precision high
|
||||
* @security-severity 5.0
|
||||
* @id java/sensitive-cookie-not-httponly
|
||||
* @tags security
|
||||
* experimental
|
||||
* external/cwe/cwe-1004
|
||||
*/
|
||||
|
||||
@@ -15,16 +15,15 @@
|
||||
* Sketch of the structure of this query: we track cookie names that appear to be sensitive
|
||||
* (e.g. `session` or `token`) to a `ServletResponse.addHeader(...)` or `.addCookie(...)`
|
||||
* method that does not set the `httpOnly` flag. Subsidiary configurations
|
||||
* `MatchesHttpOnlyConfiguration` and `SetHttpOnlyInCookieConfiguration` are used to establish
|
||||
* `MatchesHttpOnlyToRawHeaderConfig` and `SetHttpOnlyInCookieConfig` are used to establish
|
||||
* when the `httpOnly` flag is likely to have been set, before configuration
|
||||
* `MissingHttpOnlyConfiguration` establishes that a non-`httpOnly` cookie has a sensitive-seeming name.
|
||||
* `MissingHttpOnlyConfig` establishes that a non-`httpOnly` cookie has a sensitive-seeming name.
|
||||
*/
|
||||
|
||||
import java
|
||||
import semmle.code.java.dataflow.FlowSteps
|
||||
import semmle.code.java.frameworks.Servlets
|
||||
import semmle.code.java.dataflow.TaintTracking
|
||||
import MissingHttpOnlyFlow::PathGraph
|
||||
|
||||
/** Gets a regular expression for matching common names of sensitive cookies. */
|
||||
string getSensitiveCookieNameRegex() { result = "(?i).*(auth|session|token|key|credential).*" }
|
||||
@@ -50,8 +49,8 @@ class SensitiveCookieNameExpr extends Expr {
|
||||
}
|
||||
|
||||
/** A method call that sets a `Set-Cookie` header. */
|
||||
class SetCookieMethodCall extends MethodCall {
|
||||
SetCookieMethodCall() {
|
||||
class SetCookieRawHeaderMethodCall extends MethodCall {
|
||||
SetCookieRawHeaderMethodCall() {
|
||||
(
|
||||
this.getMethod() instanceof ResponseAddHeaderMethod or
|
||||
this.getMethod() instanceof ResponseSetHeaderMethod
|
||||
@@ -62,19 +61,19 @@ class SetCookieMethodCall extends MethodCall {
|
||||
|
||||
/**
|
||||
* A taint configuration tracking flow from the text `httponly` to argument 1 of
|
||||
* `SetCookieMethodCall`.
|
||||
* `SetCookieRawHeaderMethodCall`.
|
||||
*/
|
||||
module MatchesHttpOnlyConfig implements DataFlow::ConfigSig {
|
||||
module MatchesHttpOnlyToRawHeaderConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) {
|
||||
source.asExpr().(CompileTimeConstantExpr).getStringValue().toLowerCase().matches("%httponly%")
|
||||
}
|
||||
|
||||
predicate isSink(DataFlow::Node sink) {
|
||||
sink.asExpr() = any(SetCookieMethodCall ma).getArgument(1)
|
||||
sink.asExpr() = any(SetCookieRawHeaderMethodCall ma).getArgument(1)
|
||||
}
|
||||
}
|
||||
|
||||
module MatchesHttpOnlyFlow = TaintTracking::Global<MatchesHttpOnlyConfig>;
|
||||
module MatchesHttpOnlyToRawHeaderFlow = TaintTracking::Global<MatchesHttpOnlyToRawHeaderConfig>;
|
||||
|
||||
/** A class descended from `javax.servlet.http.Cookie`. */
|
||||
class CookieClass extends RefType {
|
||||
@@ -103,29 +102,11 @@ predicate removesCookie(MethodCall ma) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if the MethodCall `ma` is a test method call indicated by:
|
||||
* a) in a test directory such as `src/test/java`
|
||||
* b) in a test package whose name has the word `test`
|
||||
* c) in a test class whose name has the word `test`
|
||||
* d) in a test class implementing a test framework such as JUnit or TestNG
|
||||
* A taint configuration tracking the flow of a cookie that has had the
|
||||
* `HttpOnly` flag set, or has been removed, to a `ServletResponse.addCookie`
|
||||
* call.
|
||||
*/
|
||||
predicate isTestMethod(MethodCall ma) {
|
||||
exists(Method m |
|
||||
m = ma.getEnclosingCallable() and
|
||||
(
|
||||
m.getDeclaringType().getName().toLowerCase().matches("%test%") or // Simple check to exclude test classes to reduce FPs
|
||||
m.getDeclaringType().getPackage().getName().toLowerCase().matches("%test%") or // Simple check to exclude classes in test packages to reduce FPs
|
||||
exists(m.getLocation().getFile().getAbsolutePath().indexOf("/src/test/java")) or // Match test directory structure of build tools like maven
|
||||
m instanceof TestMethod // Test method of a test case implementing a test framework such as JUnit or TestNG
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* A taint configuration tracking flow of a method that sets the `HttpOnly` flag,
|
||||
* or one that removes a cookie, to a `ServletResponse.addCookie` call.
|
||||
*/
|
||||
module SetHttpOnlyOrRemovesCookieConfig implements DataFlow::ConfigSig {
|
||||
module SetHttpOnlyOrRemovesCookieToAddCookieConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) {
|
||||
source.asExpr() =
|
||||
any(MethodCall ma | setsCookieHttpOnly(ma) or removesCookie(ma)).getQualifier()
|
||||
@@ -137,25 +118,25 @@ module SetHttpOnlyOrRemovesCookieConfig implements DataFlow::ConfigSig {
|
||||
}
|
||||
}
|
||||
|
||||
module SetHttpOnlyOrRemovesCookieFlow = TaintTracking::Global<SetHttpOnlyOrRemovesCookieConfig>;
|
||||
module SetHttpOnlyOrRemovesCookieToAddCookieFlow =
|
||||
TaintTracking::Global<SetHttpOnlyOrRemovesCookieToAddCookieConfig>;
|
||||
|
||||
/**
|
||||
* A cookie that is added to an HTTP response and which doesn't have `httpOnly` set, used as a sink
|
||||
* in `MissingHttpOnlyConfiguration`.
|
||||
* A cookie that is added to an HTTP response and which doesn't have `HttpOnly` set, used as a sink
|
||||
* in `MissingHttpOnlyConfig`.
|
||||
*/
|
||||
class CookieResponseSink extends DataFlow::ExprNode {
|
||||
CookieResponseSink() {
|
||||
class CookieResponseWithoutHttpOnlySink extends DataFlow::ExprNode {
|
||||
CookieResponseWithoutHttpOnlySink() {
|
||||
exists(MethodCall ma |
|
||||
(
|
||||
ma.getMethod() instanceof ResponseAddCookieMethod and
|
||||
this.getExpr() = ma.getArgument(0) and
|
||||
not SetHttpOnlyOrRemovesCookieFlow::flowTo(this)
|
||||
not SetHttpOnlyOrRemovesCookieToAddCookieFlow::flowTo(this)
|
||||
or
|
||||
ma instanceof SetCookieMethodCall and
|
||||
ma instanceof SetCookieRawHeaderMethodCall and
|
||||
this.getExpr() = ma.getArgument(1) and
|
||||
not MatchesHttpOnlyFlow::flowTo(this) // response.addHeader("Set-Cookie", "token=" +authId + ";HttpOnly;Secure")
|
||||
) and
|
||||
not isTestMethod(ma) // Test class or method
|
||||
not MatchesHttpOnlyToRawHeaderFlow::flowTo(this) // response.addHeader("Set-Cookie", "token=" +authId + ";HttpOnly;Secure")
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -178,15 +159,21 @@ predicate setsHttpOnlyInNewCookie(ClassInstanceExpr cie) {
|
||||
|
||||
/**
|
||||
* A taint configuration tracking flow from a sensitive cookie without the `HttpOnly` flag
|
||||
* set to its HTTP response.
|
||||
* set to an HTTP response.
|
||||
*
|
||||
* Tracks string literals containing sensitive names (`SensitiveCookieNameExpr`), to an `addCookie` call (as a `Cookie` object)
|
||||
* or an `addHeader` call (as a string) (`CookieResponseWithoutHttpOnlySink`).
|
||||
*
|
||||
* Passes through `Cookie` constructors and `toString` calls.
|
||||
*/
|
||||
module MissingHttpOnlyConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) { source.asExpr() instanceof SensitiveCookieNameExpr }
|
||||
|
||||
predicate isSink(DataFlow::Node sink) { sink instanceof CookieResponseSink }
|
||||
predicate isSink(DataFlow::Node sink) { sink instanceof CookieResponseWithoutHttpOnlySink }
|
||||
|
||||
predicate isBarrier(DataFlow::Node node) {
|
||||
// JAX-RS's `new NewCookie("session-access-key", accessKey, "/", null, null, 0, true, true)` and similar
|
||||
// Cookie constructors that set the `HttpOnly` flag are considered barriers to the flow of sensitive names.
|
||||
setsHttpOnlyInNewCookie(node.asExpr())
|
||||
}
|
||||
|
||||
@@ -212,13 +199,8 @@ module MissingHttpOnlyConfig implements DataFlow::ConfigSig {
|
||||
|
||||
module MissingHttpOnlyFlow = TaintTracking::Global<MissingHttpOnlyConfig>;
|
||||
|
||||
deprecated query predicate problems(
|
||||
DataFlow::Node sinkNode, MissingHttpOnlyFlow::PathNode source, MissingHttpOnlyFlow::PathNode sink,
|
||||
string message1, DataFlow::Node sourceNode, string message2
|
||||
) {
|
||||
MissingHttpOnlyFlow::flowPath(source, sink) and
|
||||
sinkNode = sink.getNode() and
|
||||
message1 = "$@ doesn't have the HttpOnly flag set." and
|
||||
sourceNode = source.getNode() and
|
||||
message2 = "This sensitive cookie"
|
||||
}
|
||||
import MissingHttpOnlyFlow::PathGraph
|
||||
|
||||
from MissingHttpOnlyFlow::PathNode source, MissingHttpOnlyFlow::PathNode sink
|
||||
where MissingHttpOnlyFlow::flowPath(source, sink)
|
||||
select sink, source, sink, "$@ doesn't have the HttpOnly flag set.", source, "This sensitive cookie"
|
||||
@@ -3,11 +3,15 @@
|
||||
"qhelp.dtd">
|
||||
<qhelp>
|
||||
<overview>
|
||||
<p>Using broken or weak cryptographic algorithms can leave data vulnerable to being decrypted.</p>
|
||||
<p>Using broken or weak cryptographic algorithms may compromise security guarantees such as confidentiality, integrity, and authenticity.</p>
|
||||
|
||||
<p>Many cryptographic algorithms provided by cryptography libraries are known to be weak, or
|
||||
flawed. Using such an algorithm means that an attacker may be able to easily decrypt the encrypted
|
||||
data.</p>
|
||||
<p>Many cryptographic algorithms are known to be weak or flawed. The security guarantees of a system often rely on the underlying cryptography, so using a weak algorithm can have severe consequences. For example:
|
||||
</p>
|
||||
<ul>
|
||||
<li>If a weak encryption algorithm is used, an attacker may be able to decrypt sensitive data.</li>
|
||||
<li>If a weak hashing algorithm is used to protect data integrity, an attacker may be able to craft a malicious input that has the same hash as a benign one.</li>
|
||||
<li>If a weak algorithm is used for digital signatures, an attacker may be able to forge signatures and impersonate legitimate users.</li>
|
||||
</ul>
|
||||
|
||||
</overview>
|
||||
<recommendation>
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: newQuery
|
||||
---
|
||||
* The `java/sensitive-cookie-not-httponly` query has been promoted from experimental to the main query pack.
|
||||
@@ -1,2 +0,0 @@
|
||||
query: experimental/Security/CWE/CWE-1004/SensitiveCookieNotHttpOnly.ql
|
||||
postprocess: utils/test/PrettyPrintModels.ql
|
||||
@@ -1 +0,0 @@
|
||||
// semmle-extractor-options: --javac-args -cp ${testdir}/../../../../stubs/servlet-api-2.4:${testdir}/../../../../stubs/jsr311-api-1.1.1:${testdir}/../../../../stubs/springframework-5.8.x
|
||||
@@ -1,3 +1,14 @@
|
||||
#select
|
||||
| SensitiveCookieNotHttpOnly.java:31:28:31:36 | jwtCookie | SensitiveCookieNotHttpOnly.java:24:33:24:43 | "jwt_token" : String | SensitiveCookieNotHttpOnly.java:31:28:31:36 | jwtCookie | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:24:33:24:43 | "jwt_token" : String | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | SensitiveCookieNotHttpOnly.java:42:42:42:49 | "token=" : String | SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:42:42:42:49 | "token=" : String | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | SensitiveCookieNotHttpOnly.java:42:42:42:57 | ... + ... : String | SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:42:42:42:57 | ... + ... : String | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:52:42:52:124 | toString(...) | SensitiveCookieNotHttpOnly.java:52:56:52:75 | "session-access-key" : String | SensitiveCookieNotHttpOnly.java:52:42:52:124 | toString(...) | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:52:56:52:75 | "session-access-key" : String | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:65:42:65:47 | keyStr | SensitiveCookieNotHttpOnly.java:63:51:63:70 | "session-access-key" : String | SensitiveCookieNotHttpOnly.java:65:42:65:47 | keyStr | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:63:51:63:70 | "session-access-key" : String | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:71:42:71:50 | secString | SensitiveCookieNotHttpOnly.java:70:28:70:35 | "token=" : String | SensitiveCookieNotHttpOnly.java:71:42:71:50 | secString | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:70:28:70:35 | "token=" : String | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:71:42:71:50 | secString | SensitiveCookieNotHttpOnly.java:70:28:70:43 | ... + ... : String | SensitiveCookieNotHttpOnly.java:71:42:71:50 | secString | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:70:28:70:43 | ... + ... : String | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:71:42:71:50 | secString | SensitiveCookieNotHttpOnly.java:70:28:70:55 | ... + ... : String | SensitiveCookieNotHttpOnly.java:71:42:71:50 | secString | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:70:28:70:55 | ... + ... : String | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:111:28:111:33 | cookie | SensitiveCookieNotHttpOnly.java:88:35:88:51 | "Presto-UI-Token" : String | SensitiveCookieNotHttpOnly.java:111:28:111:33 | cookie | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:88:35:88:51 | "Presto-UI-Token" : String | This sensitive cookie |
|
||||
edges
|
||||
| SensitiveCookieNotHttpOnly.java:24:33:24:43 | "jwt_token" : String | SensitiveCookieNotHttpOnly.java:25:39:25:52 | tokenCookieStr : String | provenance | |
|
||||
| SensitiveCookieNotHttpOnly.java:25:28:25:64 | new Cookie(...) : Cookie | SensitiveCookieNotHttpOnly.java:31:28:31:36 | jwtCookie | provenance | Sink:MaD:1 |
|
||||
@@ -53,15 +64,4 @@ nodes
|
||||
| SensitiveCookieNotHttpOnly.java:91:16:91:21 | cookie : Cookie | semmle.label | cookie : Cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:110:25:110:64 | createAuthenticationCookie(...) : Cookie | semmle.label | createAuthenticationCookie(...) : Cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:111:28:111:33 | cookie | semmle.label | cookie |
|
||||
problems
|
||||
| SensitiveCookieNotHttpOnly.java:31:28:31:36 | jwtCookie | SensitiveCookieNotHttpOnly.java:24:33:24:43 | "jwt_token" : String | SensitiveCookieNotHttpOnly.java:31:28:31:36 | jwtCookie | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:24:33:24:43 | "jwt_token" | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | SensitiveCookieNotHttpOnly.java:42:42:42:49 | "token=" : String | SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:42:42:42:49 | "token=" | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | SensitiveCookieNotHttpOnly.java:42:42:42:57 | ... + ... : String | SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:42:42:42:57 | ... + ... | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:42:42:42:69 | ... + ... | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:52:42:52:124 | toString(...) | SensitiveCookieNotHttpOnly.java:52:56:52:75 | "session-access-key" : String | SensitiveCookieNotHttpOnly.java:52:42:52:124 | toString(...) | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:52:56:52:75 | "session-access-key" | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:65:42:65:47 | keyStr | SensitiveCookieNotHttpOnly.java:63:51:63:70 | "session-access-key" : String | SensitiveCookieNotHttpOnly.java:65:42:65:47 | keyStr | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:63:51:63:70 | "session-access-key" | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:71:42:71:50 | secString | SensitiveCookieNotHttpOnly.java:70:28:70:35 | "token=" : String | SensitiveCookieNotHttpOnly.java:71:42:71:50 | secString | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:70:28:70:35 | "token=" | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:71:42:71:50 | secString | SensitiveCookieNotHttpOnly.java:70:28:70:43 | ... + ... : String | SensitiveCookieNotHttpOnly.java:71:42:71:50 | secString | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:70:28:70:43 | ... + ... | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:71:42:71:50 | secString | SensitiveCookieNotHttpOnly.java:70:28:70:55 | ... + ... : String | SensitiveCookieNotHttpOnly.java:71:42:71:50 | secString | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:70:28:70:55 | ... + ... | This sensitive cookie |
|
||||
| SensitiveCookieNotHttpOnly.java:111:28:111:33 | cookie | SensitiveCookieNotHttpOnly.java:88:35:88:51 | "Presto-UI-Token" : String | SensitiveCookieNotHttpOnly.java:111:28:111:33 | cookie | $@ doesn't have the HttpOnly flag set. | SensitiveCookieNotHttpOnly.java:88:35:88:51 | "Presto-UI-Token" | This sensitive cookie |
|
||||
subpaths
|
||||
@@ -21,14 +21,14 @@ class SensitiveCookieNotHttpOnly {
|
||||
|
||||
// BAD - Tests adding a sensitive cookie without the `HttpOnly` flag set.
|
||||
public void addCookie2(String jwt_token, String userId, HttpServletRequest request, HttpServletResponse response) {
|
||||
String tokenCookieStr = "jwt_token";
|
||||
String tokenCookieStr = "jwt_token"; // $Source
|
||||
Cookie jwtCookie = new Cookie(tokenCookieStr, jwt_token);
|
||||
Cookie userIdCookie = new Cookie("user_id", userId);
|
||||
jwtCookie.setPath("/");
|
||||
userIdCookie.setPath("/");
|
||||
jwtCookie.setMaxAge(3600*24*7);
|
||||
userIdCookie.setMaxAge(3600*24*7);
|
||||
response.addCookie(jwtCookie);
|
||||
response.addCookie(jwtCookie); // $Alert
|
||||
response.addCookie(userIdCookie);
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ class SensitiveCookieNotHttpOnly {
|
||||
|
||||
// BAD - Tests set a sensitive cookie header without the `HttpOnly` flag set.
|
||||
public void addCookie4(String authId, HttpServletRequest request, HttpServletResponse response) {
|
||||
response.addHeader("Set-Cookie", "token=" +authId + ";Secure");
|
||||
response.addHeader("Set-Cookie", "token=" +authId + ";Secure"); // $Alert
|
||||
}
|
||||
|
||||
// GOOD - Tests set a sensitive cookie header using the class `javax.ws.rs.core.Cookie` with the `HttpOnly` flag set through string concatenation.
|
||||
@@ -49,7 +49,7 @@ class SensitiveCookieNotHttpOnly {
|
||||
|
||||
// BAD - Tests set a sensitive cookie header using the class `javax.ws.rs.core.Cookie` without the `HttpOnly` flag set.
|
||||
public void addCookie6(String accessKey, HttpServletRequest request, HttpServletResponse response) {
|
||||
response.setHeader("Set-Cookie", new NewCookie("session-access-key", accessKey, "/", null, null, 0, true).toString());
|
||||
response.setHeader("Set-Cookie", new NewCookie("session-access-key", accessKey, "/", null, null, 0, true).toString()); // $Alert
|
||||
}
|
||||
|
||||
// GOOD - Tests set a sensitive cookie header using the class `javax.ws.rs.core.Cookie` with the `HttpOnly` flag set through the constructor.
|
||||
@@ -60,15 +60,15 @@ class SensitiveCookieNotHttpOnly {
|
||||
|
||||
// BAD - Tests set a sensitive cookie header using the class `javax.ws.rs.core.Cookie` without the `HttpOnly` flag set.
|
||||
public void addCookie8(String accessKey, HttpServletRequest request, HttpServletResponse response) {
|
||||
NewCookie accessKeyCookie = new NewCookie("session-access-key", accessKey, "/", null, 0, null, 86400, true);
|
||||
NewCookie accessKeyCookie = new NewCookie("session-access-key", accessKey, "/", null, 0, null, 86400, true); // $Source
|
||||
String keyStr = accessKeyCookie.toString();
|
||||
response.setHeader("Set-Cookie", keyStr);
|
||||
response.setHeader("Set-Cookie", keyStr); // $Alert
|
||||
}
|
||||
|
||||
// BAD - Tests set a sensitive cookie header using a variable without the `HttpOnly` flag set.
|
||||
public void addCookie9(String authId, HttpServletRequest request, HttpServletResponse response) {
|
||||
String secString = "token=" +authId + ";Secure";
|
||||
response.addHeader("Set-Cookie", secString);
|
||||
String secString = "token=" +authId + ";Secure"; // $Source
|
||||
response.addHeader("Set-Cookie", secString); // $Alert
|
||||
}
|
||||
|
||||
// GOOD - Tests set a sensitive cookie header with the `HttpOnly` flag set using `String.format(...)`.
|
||||
@@ -85,7 +85,7 @@ class SensitiveCookieNotHttpOnly {
|
||||
}
|
||||
|
||||
public Cookie createAuthenticationCookie(HttpServletRequest request, String jwt) {
|
||||
String PRESTO_UI_COOKIE = "Presto-UI-Token";
|
||||
String PRESTO_UI_COOKIE = "Presto-UI-Token"; // $Source
|
||||
Cookie cookie = new Cookie(PRESTO_UI_COOKIE, jwt);
|
||||
cookie.setPath("/ui");
|
||||
return cookie;
|
||||
@@ -108,7 +108,7 @@ class SensitiveCookieNotHttpOnly {
|
||||
// BAD - Tests set a sensitive cookie header without the `HttpOnly` flag set using a wrapper method.
|
||||
public void addCookie12(HttpServletRequest request, HttpServletResponse response, String jwt) {
|
||||
Cookie cookie = createAuthenticationCookie(request, jwt);
|
||||
response.addCookie(cookie);
|
||||
response.addCookie(cookie); // $Alert
|
||||
}
|
||||
|
||||
// GOOD - Tests remove a sensitive cookie header without the `HttpOnly` flag set using a wrapper method.
|
||||
@@ -141,7 +141,7 @@ class SensitiveCookieNotHttpOnly {
|
||||
// This example is missed because the `cookie.setHttpOnly` call in `createCookie` is thought to maybe set the HTTP-only flag, and the `cookie`
|
||||
// object flows to this `addCookie` call.
|
||||
public void addCookie15(HttpServletRequest request, HttpServletResponse response, String refreshToken) {
|
||||
response.addCookie(createCookie("refresh_token", refreshToken, false));
|
||||
response.addCookie(createCookie("refresh_token", refreshToken, false)); // $MISSING:Alert
|
||||
}
|
||||
|
||||
// GOOD - CSRF token doesn't need to have the `HttpOnly` flag set.
|
||||
@@ -0,0 +1,4 @@
|
||||
query: Security/CWE/CWE-1004/SensitiveCookieNotHttpOnly.ql
|
||||
postprocess:
|
||||
- utils/test/InlineExpectationsTestQuery.ql
|
||||
- utils/test/PrettyPrintModels.ql
|
||||
1
java/ql/test/query-tests/security/CWE-1004/options
Normal file
1
java/ql/test/query-tests/security/CWE-1004/options
Normal file
@@ -0,0 +1 @@
|
||||
// semmle-extractor-options: --javac-args -cp ${testdir}/../../../stubs/servlet-api-2.4:${testdir}/../../../stubs/jsr311-api-1.1.1:${testdir}/../../../stubs/springframework-5.8.x
|
||||
@@ -71,7 +71,6 @@ ql/javascript/ql/src/LanguageFeatures/SemicolonInsertion.ql
|
||||
ql/javascript/ql/src/LanguageFeatures/SetterReturn.ql
|
||||
ql/javascript/ql/src/LanguageFeatures/SpuriousArguments.ql
|
||||
ql/javascript/ql/src/LanguageFeatures/StrictModeCallStackIntrospection.ql
|
||||
ql/javascript/ql/src/LanguageFeatures/SyntaxError.ql
|
||||
ql/javascript/ql/src/LanguageFeatures/TemplateSyntaxInStringLiteral.ql
|
||||
ql/javascript/ql/src/LanguageFeatures/ThisBeforeSuper.ql
|
||||
ql/javascript/ql/src/LanguageFeatures/UnusedIndexVariable.ql
|
||||
|
||||
@@ -63,7 +63,6 @@ ql/javascript/ql/src/LanguageFeatures/SemicolonInsertion.ql
|
||||
ql/javascript/ql/src/LanguageFeatures/SetterReturn.ql
|
||||
ql/javascript/ql/src/LanguageFeatures/SpuriousArguments.ql
|
||||
ql/javascript/ql/src/LanguageFeatures/StrictModeCallStackIntrospection.ql
|
||||
ql/javascript/ql/src/LanguageFeatures/SyntaxError.ql
|
||||
ql/javascript/ql/src/LanguageFeatures/TemplateSyntaxInStringLiteral.ql
|
||||
ql/javascript/ql/src/LanguageFeatures/ThisBeforeSuper.ql
|
||||
ql/javascript/ql/src/LanguageFeatures/UnusedIndexVariable.ql
|
||||
|
||||
@@ -73,6 +73,7 @@ module NameResolution {
|
||||
*
|
||||
* May also include some type-specific steps in cases where this is harmless when tracking values.
|
||||
*/
|
||||
pragma[nomagic]
|
||||
private predicate commonStep(Node node1, Node node2) {
|
||||
// Import paths are part of the graph and has an incoming edge from the imported module, if found.
|
||||
// This ensures we can also use the PathExpr as a source when working with external (unresolved) modules.
|
||||
@@ -187,6 +188,7 @@ module NameResolution {
|
||||
/**
|
||||
* Holds if there is a read from `node1` to `node2` that accesses the member `name`.
|
||||
*/
|
||||
pragma[nomagic]
|
||||
predicate readStep(Node node1, string name, Node node2) {
|
||||
exists(QualifiedTypeAccess access |
|
||||
node1 = access.getQualifier() and
|
||||
@@ -321,6 +323,7 @@ module NameResolution {
|
||||
/**
|
||||
* Gets the exported member of `mod` named `name`.
|
||||
*/
|
||||
pragma[nomagic]
|
||||
Node getModuleExport(ModuleLike mod, string name) {
|
||||
exists(ExportDeclaration exprt |
|
||||
mod = exprt.getContainer() and
|
||||
@@ -362,6 +365,7 @@ module NameResolution {
|
||||
* Holds if `value` is stored in `target.prop`. Only needs to recognise assignments
|
||||
* that are also recognised by JSDoc tooling such as the Closure compiler.
|
||||
*/
|
||||
pragma[nomagic]
|
||||
private predicate storeToVariable(Expr value, string prop, LocalVariableLike target) {
|
||||
exists(AssignExpr assign |
|
||||
// target.name = value
|
||||
@@ -374,6 +378,7 @@ module NameResolution {
|
||||
}
|
||||
|
||||
/** Steps that only apply for this configuration. */
|
||||
pragma[nomagic]
|
||||
private predicate specificStep(Node node1, Node node2) {
|
||||
exists(LexicalName var | S::isRelevantVariable(var) |
|
||||
node1.(LexicalDecl).getALexicalName() = var and
|
||||
@@ -406,6 +411,7 @@ module NameResolution {
|
||||
/** Helps track flow from a particular set of source nodes. */
|
||||
module Track<nodeSig/1 isSource> {
|
||||
/** Gets the set of nodes reachable from `source`. */
|
||||
pragma[nomagic]
|
||||
Node track(Node source) {
|
||||
isSource(source) and
|
||||
result = source
|
||||
@@ -419,6 +425,7 @@ module NameResolution {
|
||||
/** Helps track flow from a particular set of source nodes. */
|
||||
module TrackNode<AstNodeSig Source> {
|
||||
/** Gets the set of nodes reachable from `source`. */
|
||||
pragma[nomagic]
|
||||
Node track(Source source) {
|
||||
result = source
|
||||
or
|
||||
@@ -482,6 +489,7 @@ module NameResolution {
|
||||
*
|
||||
* Unlike `trackModule`, this is intended to track uses of external packages.
|
||||
*/
|
||||
pragma[nomagic]
|
||||
predicate nodeRefersToModule(Node node, string mod, string qualifiedName) {
|
||||
exists(Expr path |
|
||||
path = any(Import imprt).getImportedPathExpr() or
|
||||
|
||||
@@ -12,6 +12,7 @@ module TypeResolution {
|
||||
* We track through underlying types as an approximate way to handle calls to a type
|
||||
* that is a union/intersection involving functions.
|
||||
*/
|
||||
pragma[nomagic]
|
||||
Node trackUnderlyingFunctionType(Function fun) {
|
||||
result = fun
|
||||
or
|
||||
@@ -139,6 +140,28 @@ module TypeResolution {
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* `ContentSet.getAReadContent` restricted to the content sets and contents relevant for type resolution.
|
||||
*/
|
||||
pragma[nomagic]
|
||||
private DataFlow::Content getAReadContentRestricted(DataFlow::ContentSet cs) {
|
||||
valueReadStep(_, cs, _) and
|
||||
result = cs.getAReadContent() and
|
||||
typeMember(_, result, _)
|
||||
}
|
||||
|
||||
/**
|
||||
* `valueReadStep` where the `ContentSet` has been mapped to the set of relevant read-contents.
|
||||
*/
|
||||
pragma[nomagic]
|
||||
private predicate valueReadStepOnContent(Node object, DataFlow::Content content, Node member) {
|
||||
exists(DataFlow::ContentSet contents |
|
||||
valueReadStep(object, contents, member) and
|
||||
content = getAReadContentRestricted(contents)
|
||||
)
|
||||
}
|
||||
|
||||
pragma[nomagic]
|
||||
predicate callTarget(InvokeExpr call, Function target) {
|
||||
exists(ClassDefinition cls |
|
||||
valueHasType(call.(NewExpr).getCallee(), trackClassValue(cls)) and
|
||||
@@ -198,6 +221,7 @@ module TypeResolution {
|
||||
)
|
||||
}
|
||||
|
||||
pragma[nomagic]
|
||||
predicate contextualType(Node value, Node type) {
|
||||
exists(LocalVariableLike v |
|
||||
type = v.getADeclaration().getTypeAnnotation() and
|
||||
@@ -239,6 +263,7 @@ module TypeResolution {
|
||||
/**
|
||||
* Holds if `value` has the given `type`.
|
||||
*/
|
||||
cached
|
||||
predicate valueHasType(Node value, Node type) {
|
||||
value.(BindingPattern).getTypeAnnotation() = type
|
||||
or
|
||||
@@ -293,11 +318,18 @@ module TypeResolution {
|
||||
or
|
||||
exists(Node mid | valueHasType(mid, type) | ValueFlow::step(mid, value))
|
||||
or
|
||||
exists(Node mid, Node midType, DataFlow::ContentSet contents, Node host |
|
||||
valueReadStep(mid, contents, value) and
|
||||
exists(DataFlow::Content content, Node host |
|
||||
typeMemberHostRead(host, content, value) and
|
||||
typeMember(host, content, type)
|
||||
)
|
||||
}
|
||||
|
||||
pragma[nomagic]
|
||||
private predicate typeMemberHostRead(Node host, DataFlow::Content content, Node target) {
|
||||
exists(Node mid, Node midType |
|
||||
valueReadStepOnContent(mid, content, target) and
|
||||
valueHasType(mid, midType) and
|
||||
typeMemberHostReaches(host, midType) and
|
||||
typeMember(host, contents.getAReadContent(), type)
|
||||
typeMemberHostReaches(host, midType)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -309,6 +341,7 @@ module TypeResolution {
|
||||
* - a union type has the property if all its members have the property
|
||||
*/
|
||||
module TrackMustProp<nodeSig/1 directlyHasProperty> {
|
||||
pragma[nomagic]
|
||||
predicate hasProperty(Node node) {
|
||||
directlyHasProperty(node)
|
||||
or
|
||||
@@ -341,6 +374,7 @@ module TypeResolution {
|
||||
}
|
||||
|
||||
module ValueHasProperty<nodeSig/1 typeHasProperty> {
|
||||
pragma[nomagic]
|
||||
predicate valueHasProperty(Node value) {
|
||||
exists(Node type |
|
||||
valueHasType(value, type) and
|
||||
@@ -405,6 +439,7 @@ module TypeResolution {
|
||||
/**
|
||||
* Holds if `type` contains `string` or `any`, possibly wrapped in a promise.
|
||||
*/
|
||||
pragma[nomagic]
|
||||
predicate hasUnderlyingStringOrAnyType(Node type) {
|
||||
type.(TypeAnnotation).isStringy()
|
||||
or
|
||||
|
||||
@@ -4,8 +4,7 @@
|
||||
* @kind problem
|
||||
* @problem.severity recommendation
|
||||
* @id js/syntax-error
|
||||
* @tags quality
|
||||
* reliability
|
||||
* @tags reliability
|
||||
* correctness
|
||||
* language-features
|
||||
* @precision very-high
|
||||
|
||||
@@ -4,17 +4,34 @@
|
||||
<qhelp>
|
||||
<overview>
|
||||
<p>
|
||||
Using broken or weak cryptographic algorithms can leave data
|
||||
vulnerable to being decrypted or forged by an attacker.
|
||||
Using broken or weak cryptographic algorithms may compromise
|
||||
security guarantees such as confidentiality, integrity, and
|
||||
authenticity.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
Many cryptographic algorithms provided by cryptography
|
||||
libraries are known to be weak, or flawed. Using such an
|
||||
algorithm means that encrypted or hashed data is less
|
||||
secure than it appears to be.
|
||||
Many cryptographic algorithms are known to be weak or flawed. The
|
||||
security guarantees of a system often rely on the underlying
|
||||
cryptography, so using a weak algorithm can have severe consequences.
|
||||
For example:
|
||||
</p>
|
||||
|
||||
<ul>
|
||||
<li>
|
||||
If a weak encryption algorithm is used, an attacker may be able to
|
||||
decrypt sensitive data.
|
||||
</li>
|
||||
<li>
|
||||
If a weak hashing algorithm is used to protect data integrity, an
|
||||
attacker may be able to craft a malicious input that has the same
|
||||
hash as a benign one.
|
||||
</li>
|
||||
<li>
|
||||
If a weak algorithm is used for digital signatures, an attacker may
|
||||
be able to forge signatures and impersonate legitimate users.
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
</overview>
|
||||
<recommendation>
|
||||
|
||||
|
||||
148
misc/scripts/patch_query_history.py
Normal file
148
misc/scripts/patch_query_history.py
Normal file
@@ -0,0 +1,148 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Duplicate the last CodeQL query history entry, pointing it at a given evaluator log.
|
||||
|
||||
Behavior:
|
||||
1. Locate the most relevant ``workspace-query-history.json`` (supports local & remote VS Code).
|
||||
2. Duplicate the final object in ``queries``.
|
||||
3. Generate a fresh random ID and a new timestamp.
|
||||
4. Set ``jsonEvalLogSummaryLocation`` to the provided summary file path.
|
||||
5. Set ``initialInfo.userSpecifiedLabel`` to ``Evaluator log at <dir>/<filename>`` (last 2 path parts).
|
||||
6. Write back atomically.
|
||||
|
||||
Usage: python3 misc/scripts/patch_query_history.py /path/to/evaluator-log.summary.jsonl
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import argparse
|
||||
import json, os, random, string, tempfile, sys
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
from datetime import datetime, timezone
|
||||
import copy
|
||||
|
||||
|
||||
# Extension folder segment for CodeQL extension query history
|
||||
EXT_SEGMENT = "GitHub.vscode-codeql"
|
||||
HISTORY_FILENAME = "workspace-query-history.json"
|
||||
WORKSPACE_JSON = "workspace.json"
|
||||
|
||||
def candidate_user_data_dirs() -> List[Path]:
|
||||
"""Return plausible VS Code user data dirs (ordered, deduped)."""
|
||||
home = Path.home()
|
||||
env = os.environ
|
||||
override = env.get("VSCODE_USER_DATA_DIR")
|
||||
bases: List[Path] = []
|
||||
if override:
|
||||
bases.append(Path(override).expanduser())
|
||||
if os.name == "nt":
|
||||
appdata = env.get("APPDATA")
|
||||
if appdata:
|
||||
bases.append(Path(appdata) / "Code" / "User")
|
||||
elif sys.platform == "darwin": # macOS inline check
|
||||
bases.append(home / "Library" / "Application Support" / "Code" / "User")
|
||||
else:
|
||||
bases.append(home / ".config" / "Code" / "User")
|
||||
# Remote / server variants
|
||||
bases.extend([
|
||||
home / ".vscode-remote" / "data" / "User",
|
||||
home / ".vscode-server" / "data" / "User",
|
||||
home / ".vscode" / "data" / "User",
|
||||
])
|
||||
seen: set[Path] = set()
|
||||
ordered: List[Path] = []
|
||||
for b in bases:
|
||||
if b not in seen:
|
||||
seen.add(b)
|
||||
ordered.append(b)
|
||||
return ordered
|
||||
|
||||
|
||||
def find_history_files() -> List[Path]:
|
||||
"""Return all candidate history files sorted by descending modification time.
|
||||
"""
|
||||
candidates: List[Path] = []
|
||||
for base in candidate_user_data_dirs():
|
||||
storage_root = base / "workspaceStorage"
|
||||
if not storage_root.is_dir():
|
||||
continue
|
||||
for ws_entry in storage_root.iterdir():
|
||||
if not ws_entry.is_dir():
|
||||
continue
|
||||
history_file = ws_entry / EXT_SEGMENT / HISTORY_FILENAME
|
||||
if history_file.is_file():
|
||||
candidates.append(history_file)
|
||||
candidates.sort(key=lambda p: p.stat().st_mtime, reverse=True)
|
||||
return candidates
|
||||
|
||||
def _generate_new_id() -> str:
|
||||
"""Return a new random id (24 chars from allowed set, prefixed with 'evaluator-log-' for stability)."""
|
||||
alphabet = string.ascii_letters + string.digits + "_-"
|
||||
return "evaluator-log-" + "".join(random.choice(alphabet) for _ in range(23))
|
||||
|
||||
def atomic_write_json(target: Path, obj) -> None:
|
||||
fd, tmp = tempfile.mkstemp(dir=str(target.parent), prefix="history.", suffix=".json")
|
||||
try:
|
||||
with os.fdopen(fd, "w", encoding="utf-8") as out:
|
||||
json.dump(obj, out, ensure_ascii=False, indent=2)
|
||||
out.write("\n")
|
||||
os.replace(tmp, target)
|
||||
finally:
|
||||
if os.path.exists(tmp):
|
||||
try:
|
||||
os.remove(tmp)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def _duplicate_last_entry(path: Path, summary_path: Path) -> dict:
|
||||
try:
|
||||
data = json.loads(path.read_text(encoding="utf-8"))
|
||||
except json.JSONDecodeError as e:
|
||||
raise SystemExit(f"History file JSON is corrupt: {e}")
|
||||
if not isinstance(data, dict) or not isinstance(data.get("queries"), list):
|
||||
raise SystemExit("Unexpected history file structure: missing 'queries' list")
|
||||
queries = data["queries"]
|
||||
if not queries:
|
||||
raise SystemExit("History file contains no queries to duplicate. Please run a query in VSCode and try again.")
|
||||
last = queries[-1]
|
||||
if not isinstance(last, dict):
|
||||
raise SystemExit("Last query entry malformed")
|
||||
payload = copy.deepcopy(last)
|
||||
initial = payload.setdefault("initialInfo", {})
|
||||
if not isinstance(initial, dict):
|
||||
initial = {}
|
||||
payload["initialInfo"] = initial
|
||||
new_id = _generate_new_id()
|
||||
initial["id"] = new_id
|
||||
initial["start"] = datetime.now(timezone.utc).isoformat(timespec="milliseconds").replace("+00:00", "Z")
|
||||
payload["jsonEvalLogSummaryLocation"] = str(summary_path)
|
||||
parts = list(summary_path.parts)
|
||||
last_two = "/".join(parts[-2:]) if len(parts) >= 2 else parts[-1]
|
||||
new_label = f"Evaluator log at {last_two}"
|
||||
initial["userSpecifiedLabel"] = new_label
|
||||
queries.append(payload)
|
||||
atomic_write_json(path, data)
|
||||
return {"new_id": new_id, "new_label": new_label, "count": len(queries)}
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description="Duplicate last CodeQL query history entry, patching summary location and label.")
|
||||
parser.add_argument("summary_path", type=Path, help="Path to evaluator-log.summary.jsonl file (required).")
|
||||
args = parser.parse_args()
|
||||
|
||||
summary_path: Path = args.summary_path
|
||||
if not summary_path.is_file():
|
||||
raise SystemExit(f"Summary file does not exist: {summary_path}")
|
||||
|
||||
candidates = find_history_files()
|
||||
if not candidates:
|
||||
raise SystemExit("No workspace-query-history.json files found.")
|
||||
best = candidates[0]
|
||||
|
||||
result = _duplicate_last_entry(best, summary_path)
|
||||
|
||||
print(f"Patched history: {best}")
|
||||
print(f"Evaluator log summary: {summary_path}")
|
||||
print(f"New ID: {result['new_id']}")
|
||||
print(f"Label: {result['new_label']}")
|
||||
print(f"Total entries: {result['count']}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
@@ -2,6 +2,7 @@ name: "python"
|
||||
display_name: "Python"
|
||||
version: 1.22.1
|
||||
column_kind: utf32
|
||||
overlay_support_version: 20250626
|
||||
build_modes:
|
||||
- none
|
||||
default_queries:
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,4 @@
|
||||
description: Add databaseMetadata and overlayChangedFiles relations
|
||||
compatibility: full
|
||||
databaseMetadata.rel: delete
|
||||
overlayChangedFiles.rel: delete
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,2 @@
|
||||
description: Add @top type
|
||||
compatibility: full
|
||||
@@ -40,6 +40,22 @@ externalData(
|
||||
string value : string ref
|
||||
);
|
||||
|
||||
/*- Overlay support -*/
|
||||
|
||||
/**
|
||||
* The CLI will automatically emit the tuple `databaseMetadata("isOverlay", "true")`,
|
||||
* along with an `overlayChangedFiles` tuple for each new/modified/deleted file,
|
||||
* when building an overlay database, and these can be used by the discard predicates.
|
||||
*/
|
||||
databaseMetadata(
|
||||
string metadataKey : string ref,
|
||||
string value : string ref
|
||||
);
|
||||
|
||||
overlayChangedFiles(
|
||||
string path : string ref
|
||||
);
|
||||
|
||||
/*- DEPRECATED: Snapshot date -*/
|
||||
|
||||
snapshotDate(unique date snapshotDate : date ref);
|
||||
@@ -420,3 +436,12 @@ py_decorated_object(int object : @py_object ref,
|
||||
@py_object = @py_cobject | @py_flow_node;
|
||||
|
||||
@py_source_element = @py_ast_node | @container;
|
||||
|
||||
/** The union of all Python database entities */
|
||||
@top =
|
||||
@py_source_element | @py_object | @py_base_var | @location | @py_line | @py_comment |
|
||||
@py_expr_parent | @py_expr_context |
|
||||
@py_operator | @py_boolop | @py_cmpop | @py_unaryop |
|
||||
@py_cmpop_list | @py_alias_list | @py_StringPart_list | @py_comprehension_list | @py_dict_item_list | @py_pattern_list | @py_stmt_list | @py_str_list | @py_type_parameter_list |
|
||||
@externalDefect | @externalMetric | @externalDataElement | @duplication_or_similarity | @svnentry |
|
||||
@xmllocatable | @yaml_locatable;
|
||||
|
||||
@@ -25,7 +25,7 @@ def renamer_from_options_and_env(options, logger):
|
||||
except (AttributeError, ImportError):
|
||||
raise SemmleError("Cannot get renamer from module " + options.renamer)
|
||||
else:
|
||||
path_transformer = os.environ.get("SEMMLE_PATH_TRANSFORMER", None)
|
||||
path_transformer = os.environ.get("CODEQL_PATH_TRANSFORMER", None) or os.environ.get("SEMMLE_PATH_TRANSFORMER", None)
|
||||
if path_transformer:
|
||||
logger.info("Using path transformer '%s'", path_transformer)
|
||||
rename = projectlayout.get_renamer(path_transformer)
|
||||
|
||||
@@ -12,12 +12,16 @@ import collections
|
||||
import re
|
||||
from functools import total_ordering
|
||||
import sys
|
||||
from pathlib import PureWindowsPath
|
||||
import os
|
||||
|
||||
def get_renamer(filename):
|
||||
layout = load(filename)
|
||||
def rename(path):
|
||||
renamed = layout.artificial_path(path)
|
||||
return path if renamed is None else renamed
|
||||
if os.name == "nt":
|
||||
return lambda path: rename(PureWindowsPath(path).as_posix())
|
||||
return rename
|
||||
|
||||
def load(filename):
|
||||
@@ -257,7 +261,7 @@ class _Rewrite(object):
|
||||
exclude = path
|
||||
self._line = line;
|
||||
self._original = u'-' + exclude;
|
||||
if not exclude.startswith(u"/"):
|
||||
if os.name != 'nt' and not exclude.startswith(u"/"):
|
||||
exclude = u'/' + exclude
|
||||
if exclude.find(u"//") != -1:
|
||||
raise _error(u"Illegal '//' in exclude path", line)
|
||||
@@ -274,14 +278,14 @@ class _Rewrite(object):
|
||||
include = path
|
||||
self._line = line;
|
||||
self._original = include;
|
||||
if not include.startswith(u"/"):
|
||||
if os.name != 'nt' and not include.startswith(u"/"):
|
||||
include = u'/' + include
|
||||
doubleslash = include.find(u"//")
|
||||
if doubleslash != include.find(u"//"):
|
||||
raise _error(u"More than one '//' in include path (project-layout)", line)
|
||||
if self._verify_stars.match(include):
|
||||
raise _error(u"Illegal use of '**' in include path (project-layout)", line)
|
||||
if not virtual.startswith(u"/"):
|
||||
if os.name != 'nt' and not virtual.startswith(u"/"):
|
||||
virtual = u"/" + virtual
|
||||
if virtual.endswith(u"/"):
|
||||
virtual = virtual[0 : -1]
|
||||
|
||||
@@ -11,6 +11,7 @@ from semmle.extractors import SuperExtractor, ModulePrinter, SkippedBuiltin
|
||||
from semmle.profiling import get_profiler
|
||||
from semmle.path_rename import renamer_from_options_and_env
|
||||
from semmle.logging import WARN, recursion_error_message, internal_error_message, Logger
|
||||
from semmle.util import FileExtractable, FolderExtractable
|
||||
|
||||
class ExtractorFailure(Exception):
|
||||
'Generic exception representing the failure of an extractor.'
|
||||
@@ -19,17 +20,32 @@ class ExtractorFailure(Exception):
|
||||
|
||||
class ModuleImportGraph(object):
|
||||
|
||||
def __init__(self, max_depth):
|
||||
def __init__(self, max_depth, logger: Logger):
|
||||
self.modules = {}
|
||||
self.succ = defaultdict(set)
|
||||
self.todo = set()
|
||||
self.done = set()
|
||||
self.max_depth = max_depth
|
||||
self.logger = logger
|
||||
|
||||
# During overlay extraction, only traverse the files that were changed.
|
||||
self.overlay_changes = None
|
||||
if 'CODEQL_EXTRACTOR_PYTHON_OVERLAY_CHANGES' in os.environ:
|
||||
overlay_changes_file = os.environ['CODEQL_EXTRACTOR_PYTHON_OVERLAY_CHANGES']
|
||||
logger.info("Overlay extraction mode: only extracting files changed according to '%s'", overlay_changes_file)
|
||||
try:
|
||||
with open(overlay_changes_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
changed_paths = data.get('changes', [])
|
||||
self.overlay_changes = { os.path.abspath(p) for p in changed_paths }
|
||||
except (IOError, ValueError) as e:
|
||||
logger.warn("Failed to read overlay changes from '%s' (falling back to full extraction): %s", overlay_changes_file, e)
|
||||
self.overlay_changes = None
|
||||
|
||||
def add_root(self, mod):
|
||||
self.modules[mod] = 0
|
||||
if mod not in self.done:
|
||||
self.todo.add(mod)
|
||||
self.add_todo(mod)
|
||||
|
||||
def add_import(self, mod, imported):
|
||||
assert mod in self.modules
|
||||
@@ -39,7 +55,7 @@ class ModuleImportGraph(object):
|
||||
self._reduce_depth(imported, self.modules[mod] + 1)
|
||||
else:
|
||||
if self.modules[mod] < self.max_depth and imported not in self.done:
|
||||
self.todo.add(imported)
|
||||
self.add_todo(imported)
|
||||
self.modules[imported] = self.modules[mod] + 1
|
||||
|
||||
def _reduce_depth(self, mod, depth):
|
||||
@@ -48,7 +64,7 @@ class ModuleImportGraph(object):
|
||||
if depth > self.max_depth:
|
||||
return
|
||||
if mod not in self.done:
|
||||
self.todo.add(mod)
|
||||
self.add_todo(mod)
|
||||
self.modules[mod] = depth
|
||||
for imp in self.succ[mod]:
|
||||
self._reduce_depth(imp, depth+1)
|
||||
@@ -61,11 +77,25 @@ class ModuleImportGraph(object):
|
||||
|
||||
def push_back(self, mod):
|
||||
self.done.remove(mod)
|
||||
self.todo.add(mod)
|
||||
self.add_todo(mod)
|
||||
|
||||
def empty(self):
|
||||
return not self.todo
|
||||
|
||||
def add_todo(self, mod):
|
||||
if not self._module_in_overlay_changes(mod):
|
||||
self.logger.debug("Skipping module '%s' as it was not changed in overlay extraction.", mod)
|
||||
return
|
||||
self.todo.add(mod)
|
||||
|
||||
def _module_in_overlay_changes(self, mod):
|
||||
if self.overlay_changes is not None:
|
||||
if isinstance(mod, FileExtractable):
|
||||
return mod.path in self.overlay_changes
|
||||
if isinstance(mod, FolderExtractable):
|
||||
return mod.path + '/__init__.py' in self.overlay_changes
|
||||
return True
|
||||
|
||||
class ExtractorPool(object):
|
||||
'''Pool of worker processes running extractors'''
|
||||
|
||||
@@ -90,7 +120,7 @@ class ExtractorPool(object):
|
||||
self.enqueued = set()
|
||||
self.done = set()
|
||||
self.requirements = {}
|
||||
self.import_graph = ModuleImportGraph(options.max_import_depth)
|
||||
self.import_graph = ModuleImportGraph(options.max_import_depth, logger)
|
||||
logger.debug("Source archive: %s", archive)
|
||||
self.logger = logger
|
||||
DiagnosticsWriter.create_output_dir()
|
||||
@@ -162,6 +192,10 @@ class ExtractorPool(object):
|
||||
self.module_queue.put(None)
|
||||
for p in self.procs:
|
||||
p.join()
|
||||
if 'CODEQL_EXTRACTOR_PYTHON_OVERLAY_BASE_METADATA_OUT' in os.environ:
|
||||
with open(os.environ['CODEQL_EXTRACTOR_PYTHON_OVERLAY_BASE_METADATA_OUT'], 'w', encoding='utf-8') as f:
|
||||
metadata = {}
|
||||
json.dump(metadata, f)
|
||||
self.logger.info("Processed %d modules in %0.2fs", len(self.import_graph.done), time.time() - self.start_time)
|
||||
|
||||
def stop(self, timeout=2.0):
|
||||
|
||||
@@ -19,7 +19,7 @@ class ProjectLayoutUseTest(ExtractorTest):
|
||||
|
||||
def test_invalid_layout(self):
|
||||
try:
|
||||
with environment("SEMMLE_PATH_TRANSFORMER", "nonsuch/project-layout"):
|
||||
with environment("CODEQL_PATH_TRANSFORMER", "nonsuch/project-layout"):
|
||||
self.run_extractor("-R", self.src_path)
|
||||
except subprocess.CalledProcessError as ex:
|
||||
self.assertEqual(ex.returncode, 2)
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
import semmle.python.internal.OverlayDiscardConsistencyQuery
|
||||
@@ -0,0 +1,5 @@
|
||||
---
|
||||
category: feature
|
||||
---
|
||||
|
||||
* Initial support for incremental Python databases via `codeql database create --overlay-base`/`--overlay-changes`.
|
||||
@@ -37,6 +37,7 @@ import semmle.python.pointsto.CallGraph
|
||||
import semmle.python.objects.ObjectAPI
|
||||
import semmle.python.Unit
|
||||
import site
|
||||
private import semmle.python.Overlay
|
||||
// Removing this import perturbs the compilation process enough that the points-to analysis gets
|
||||
// compiled -- and cached -- differently depending on whether the data flow library is imported. By
|
||||
// importing it privately here, we ensure that the points-to analysis is compiled the same way.
|
||||
|
||||
@@ -19,3 +19,4 @@ dataExtensions:
|
||||
- semmle/python/frameworks/**/*.model.yml
|
||||
- ext/*.model.yml
|
||||
warnOnImplicitThis: true
|
||||
compileForOverlayEval: true
|
||||
|
||||
355
python/ql/lib/semmle/python/Overlay.qll
Normal file
355
python/ql/lib/semmle/python/Overlay.qll
Normal file
@@ -0,0 +1,355 @@
|
||||
/**
|
||||
* Defines entity discard predicates for Python overlay analysis.
|
||||
*/
|
||||
|
||||
/*- Predicates -*/
|
||||
/**
|
||||
* Holds always for the overlay variant and never for the base variant.
|
||||
* This local predicate is used to define local predicates that behave
|
||||
* differently for the base and overlay variant.
|
||||
*/
|
||||
overlay[local]
|
||||
predicate isOverlay() { databaseMetadata("isOverlay", "true") }
|
||||
|
||||
overlay[local]
|
||||
private string getPathForLocation(@location loc) {
|
||||
exists(@file file | locations_default(loc, file, _, _, _, _) | files(file, result))
|
||||
or
|
||||
exists(@py_Module mod | locations_ast(loc, mod, _, _, _, _) | result = getPathForModule(mod))
|
||||
}
|
||||
|
||||
overlay[local]
|
||||
private string getPathForModule(@py_Module mod) {
|
||||
exists(@container fileOrFolder | py_module_path(mod, fileOrFolder) |
|
||||
result = getPathForContainer(fileOrFolder)
|
||||
)
|
||||
}
|
||||
|
||||
overlay[local]
|
||||
private string getPathForContainer(@container fileOrFolder) {
|
||||
files(fileOrFolder, result) or folders(fileOrFolder, result)
|
||||
}
|
||||
|
||||
/*- Discardable entities and their discard predicates -*/
|
||||
/** Python database entities that use named TRAP IDs; the rest use *-ids. */
|
||||
overlay[local]
|
||||
private class NamedEntity = @py_Module or @container or @py_cobject;
|
||||
|
||||
overlay[discard_entity]
|
||||
private predicate discardNamedEntity(@top el) {
|
||||
el instanceof NamedEntity and
|
||||
// Entities with named IDs can exist both in base, overlay, or both.
|
||||
exists(Discardable d | d = el |
|
||||
overlayChangedFiles(d.getPath()) and
|
||||
not d.existsInOverlay()
|
||||
)
|
||||
}
|
||||
|
||||
overlay[discard_entity]
|
||||
private predicate discardStarEntity(@top el) {
|
||||
not el instanceof NamedEntity and
|
||||
// Entities with *-ids can exist either in base or overlay, but not both.
|
||||
exists(Discardable d | d = el |
|
||||
overlayChangedFiles(d.getPath()) and
|
||||
d.existsInBase()
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* An abstract base class for all elements that can be discarded from the base.
|
||||
*/
|
||||
overlay[local]
|
||||
abstract class Discardable extends @top {
|
||||
/** Gets the path to the file in which this element occurs. */
|
||||
abstract string getPath();
|
||||
|
||||
/** Holds if this element exists in the base variant. */
|
||||
predicate existsInBase() { not isOverlay() and exists(this) }
|
||||
|
||||
/** Holds if this element exists in the overlay variant. */
|
||||
predicate existsInOverlay() { isOverlay() and exists(this) }
|
||||
|
||||
/** Gets a textual representation of this discardable element. */
|
||||
string toString() { none() }
|
||||
}
|
||||
|
||||
/**
|
||||
* Discardable locatable AST nodes (`@py_location_parent`).
|
||||
*/
|
||||
overlay[local]
|
||||
final private class DiscardableLocatable extends Discardable instanceof @py_location_parent {
|
||||
override string getPath() {
|
||||
exists(@location loc | py_locations(loc, this) | result = getPathForLocation(loc))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Discardable scopes (classes, functions, modules).
|
||||
*/
|
||||
overlay[local]
|
||||
final private class DiscardableScope extends Discardable instanceof @py_scope {
|
||||
override string getPath() {
|
||||
exists(@location loc | py_scope_location(loc, this) | result = getPathForLocation(loc))
|
||||
or
|
||||
result = getPathForModule(this)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Discardable files and folders.
|
||||
*/
|
||||
overlay[local]
|
||||
final private class DiscardableContainer extends Discardable instanceof @container {
|
||||
override string getPath() { result = getPathForContainer(this) }
|
||||
}
|
||||
|
||||
/** Discardable control flow nodes */
|
||||
overlay[local]
|
||||
final private class DiscardableCfgNode extends Discardable instanceof @py_flow_node {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() | py_flow_bb_node(this, d.(@py_ast_node), _, _))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable Python variables. */
|
||||
overlay[local]
|
||||
final private class DiscardableVar extends Discardable instanceof @py_variable {
|
||||
override string getPath() {
|
||||
exists(Discardable parent | result = parent.getPath() | variable(this, parent.(@py_scope), _))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable SSA variables. */
|
||||
overlay[local]
|
||||
final private class DiscardableSsaVar extends Discardable instanceof @py_ssa_var {
|
||||
override string getPath() {
|
||||
exists(DiscardableVar other | result = other.getPath() | py_ssa_var(this, other))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable locations. */
|
||||
overlay[local]
|
||||
final private class DiscardableLocation extends Discardable instanceof @location {
|
||||
override string getPath() { result = getPathForLocation(this) }
|
||||
}
|
||||
|
||||
/** Discardable lines. */
|
||||
overlay[local]
|
||||
final private class DiscardableLine extends Discardable instanceof @py_line {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() | py_line_lengths(this, d.(@py_Module), _, _))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable string part lists. */
|
||||
overlay[local]
|
||||
final private class DiscardableStringPartList extends Discardable instanceof @py_StringPart_list {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() | py_StringPart_lists(this, d.(@py_Bytes_or_Str)))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable alias */
|
||||
overlay[local]
|
||||
final private class DiscardableAlias extends Discardable instanceof @py_alias {
|
||||
override string getPath() {
|
||||
exists(DiscardableAliasList d | result = d.getPath() | py_aliases(this, d, _))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable alias list */
|
||||
overlay[local]
|
||||
final private class DiscardableAliasList extends Discardable instanceof @py_alias_list {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() | py_alias_lists(this, d.(@py_Import)))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable arguments */
|
||||
overlay[local]
|
||||
final private class DiscardableArguments extends Discardable instanceof @py_arguments {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() | py_arguments(this, d.(@py_arguments_parent)))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable boolop */
|
||||
overlay[local]
|
||||
final private class DiscardableBoolOp extends Discardable instanceof @py_boolop {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() | py_boolops(this, _, d.(@py_BoolExpr)))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable cmpop */
|
||||
overlay[local]
|
||||
final private class DiscardableCmpOp extends Discardable instanceof @py_cmpop {
|
||||
override string getPath() {
|
||||
exists(DiscardableCmpOpList d | result = d.getPath() | py_cmpops(this, _, d, _))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable cmpop list */
|
||||
overlay[local]
|
||||
final private class DiscardableCmpOpList extends Discardable instanceof @py_cmpop_list {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() | py_cmpop_lists(this, d.(@py_Compare)))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable comprehension list */
|
||||
overlay[local]
|
||||
final private class DiscardableComprehensionList extends Discardable instanceof @py_comprehension_list
|
||||
{
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() | py_comprehension_lists(this, d.(@py_ListComp)))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable dict item list */
|
||||
overlay[local]
|
||||
final private class DiscardableDictItemList extends Discardable instanceof @py_dict_item_list {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() |
|
||||
py_dict_item_lists(this, d.(@py_dict_item_list_parent))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable expr context */
|
||||
overlay[local]
|
||||
final private class DiscardableExprContext extends Discardable instanceof @py_expr_context {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() |
|
||||
py_expr_contexts(this, _, d.(@py_expr_context_parent))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable expr list */
|
||||
overlay[local]
|
||||
final private class DiscardableExprList extends Discardable instanceof @py_expr_list {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() | py_expr_lists(this, d.(@py_expr_list_parent), _))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable operator */
|
||||
overlay[local]
|
||||
final private class DiscardableOperator extends Discardable instanceof @py_operator {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() | py_operators(this, _, d.(@py_BinaryExpr)))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable parameter list */
|
||||
overlay[local]
|
||||
final private class DiscardableParameterList extends Discardable instanceof @py_parameter_list {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() | py_parameter_lists(this, d.(@py_Function)))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable pattern list */
|
||||
overlay[local]
|
||||
final private class DiscardablePatternList extends Discardable instanceof @py_pattern_list {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() |
|
||||
py_pattern_lists(this, d.(@py_pattern_list_parent), _)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable stmt list */
|
||||
overlay[local]
|
||||
final private class DiscardableStmtList extends Discardable instanceof @py_stmt_list {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() | py_stmt_lists(this, d.(@py_stmt_list_parent), _))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable str list */
|
||||
overlay[local]
|
||||
final private class DiscardableStrList extends Discardable instanceof @py_str_list {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() | py_str_lists(this, d.(@py_str_list_parent)))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable type parameter list */
|
||||
overlay[local]
|
||||
final private class DiscardableTypeParameterList extends Discardable instanceof @py_type_parameter_list
|
||||
{
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() |
|
||||
py_type_parameter_lists(this, d.(@py_type_parameter_list_parent))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable unaryop */
|
||||
overlay[local]
|
||||
final private class DiscardableUnaryOp extends Discardable instanceof @py_unaryop {
|
||||
override string getPath() {
|
||||
exists(Discardable d | result = d.getPath() | py_unaryops(this, _, d.(@py_UnaryExpr)))
|
||||
}
|
||||
}
|
||||
|
||||
/** Discardable comment */
|
||||
overlay[local]
|
||||
final private class DiscardableComment extends Discardable instanceof @py_comment {
|
||||
override string getPath() {
|
||||
exists(DiscardableLocation d | result = d.getPath() | py_comments(this, _, d))
|
||||
}
|
||||
}
|
||||
|
||||
/*- XML -*/
|
||||
overlay[local]
|
||||
final private class DiscardableXmlLocatable extends Discardable instanceof @xmllocatable {
|
||||
override string getPath() {
|
||||
exists(@location loc | xmllocations(this, loc) | result = getPathForLocation(loc))
|
||||
}
|
||||
}
|
||||
|
||||
overlay[local]
|
||||
private predicate overlayXmlExtracted(string path) {
|
||||
exists(DiscardableXmlLocatable d | not files(d, _) and not xmlNs(d, _, _, _) |
|
||||
d.existsInOverlay() and
|
||||
path = d.getPath()
|
||||
)
|
||||
}
|
||||
|
||||
overlay[discard_entity]
|
||||
private predicate discardXmlLocatable(@xmllocatable el) {
|
||||
exists(DiscardableXmlLocatable d | d = el |
|
||||
// The XML extractor is currently not incremental and may extract more
|
||||
// XML files than those included in `overlayChangedFiles`, so this discard predicate
|
||||
// handles those files alongside the normal `discardStarEntity` logic.
|
||||
overlayXmlExtracted(d.getPath()) and
|
||||
d.existsInBase()
|
||||
)
|
||||
}
|
||||
|
||||
/*- YAML -*/
|
||||
overlay[local]
|
||||
final private class DiscardableYamlLocatable extends Discardable instanceof @yaml_locatable {
|
||||
override string getPath() {
|
||||
exists(@location loc | yaml_locations(this, loc) | result = getPathForLocation(loc))
|
||||
}
|
||||
}
|
||||
|
||||
overlay[local]
|
||||
private predicate overlayYamlExtracted(string path) {
|
||||
exists(DiscardableYamlLocatable l | l.existsInOverlay() | path = l.getPath())
|
||||
}
|
||||
|
||||
overlay[discard_entity]
|
||||
private predicate discardBaseYamlLocatable(@yaml_locatable el) {
|
||||
exists(DiscardableYamlLocatable d | d = el |
|
||||
// The Yaml extractor is currently not incremental and may extract more
|
||||
// Yaml files than those included in `overlayChangedFiles`, so this discard predicate
|
||||
// handles those files alongside the normal `discardStarEntity` logic.
|
||||
overlayYamlExtracted(d.getPath()) and
|
||||
d.existsInBase()
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,90 @@
|
||||
/**
|
||||
* Provides consistency queries for checking that every database entity
|
||||
* that can be discarded (i.e. everything but `@py_cobject`) in an overlay
|
||||
* database is indeed discarded, by proxy of having exactly one `Discardable.getPath()`.
|
||||
*/
|
||||
|
||||
import python
|
||||
import semmle.python.Overlay
|
||||
|
||||
class TopWithToString instanceof @top {
|
||||
string getDbType() {
|
||||
this instanceof @py_source_element and result = "@source_element"
|
||||
or
|
||||
this instanceof @py_object and result = "@py_object"
|
||||
or
|
||||
this instanceof @py_base_var and result = "@py_base_var"
|
||||
or
|
||||
this instanceof @location and result = "@location"
|
||||
or
|
||||
this instanceof @py_line and result = "@py_line"
|
||||
or
|
||||
this instanceof @py_comment and result = "@py_comment"
|
||||
or
|
||||
this instanceof @py_expr_parent and result = "@py_expr_parent"
|
||||
or
|
||||
this instanceof @py_expr_context and result = "@py_expr_context"
|
||||
or
|
||||
this instanceof @py_operator and result = "@py_operator"
|
||||
or
|
||||
this instanceof @py_boolop and result = "@py_boolop"
|
||||
or
|
||||
this instanceof @py_cmpop and result = "@py_cmpop"
|
||||
or
|
||||
this instanceof @py_unaryop and result = "@py_unaryop"
|
||||
or
|
||||
this instanceof @py_cmpop_list and result = "@py_cmpop_list"
|
||||
or
|
||||
this instanceof @py_alias_list and result = "@py_alias_list"
|
||||
or
|
||||
this instanceof @py_StringPart_list and result = "@py_StringPart_list"
|
||||
or
|
||||
this instanceof @py_comprehension_list and result = "@py_comprehension_list"
|
||||
or
|
||||
this instanceof @py_dict_item_list and result = "@py_dict_item_list"
|
||||
or
|
||||
this instanceof @py_pattern_list and result = "@py_pattern_list"
|
||||
or
|
||||
this instanceof @py_stmt_list and result = "@py_stmt_list"
|
||||
or
|
||||
this instanceof @py_str_list and result = "@py_str_list"
|
||||
or
|
||||
this instanceof @py_type_parameter_list and result = "@py_type_parameter_list"
|
||||
or
|
||||
this instanceof @externalDefect and result = "@externalDefect"
|
||||
or
|
||||
this instanceof @externalMetric and result = "@externalMetric"
|
||||
or
|
||||
this instanceof @externalDataElement and result = "@externalDataElement"
|
||||
or
|
||||
this instanceof @duplication_or_similarity and result = "@duplication_or_similarity"
|
||||
or
|
||||
this instanceof @svnentry and result = "@svnentry"
|
||||
or
|
||||
this instanceof @xmllocatable and result = "@xmllocatable"
|
||||
or
|
||||
this instanceof @yaml_locatable and result = "@yaml_locatable"
|
||||
}
|
||||
|
||||
string toString() {
|
||||
result = this.getDbType()
|
||||
or
|
||||
not exists(this.getDbType()) and
|
||||
result = "Unknown type"
|
||||
}
|
||||
}
|
||||
|
||||
query predicate consistencyTest(TopWithToString el, string message) {
|
||||
not el instanceof Discardable and
|
||||
not el instanceof @py_cobject and // cannot be linked to a path
|
||||
not el instanceof @externalDataElement and // cannot be linked to a path
|
||||
message = "Not Discardable"
|
||||
or
|
||||
exists(Discardable d, int numPaths | d = el and numPaths = count(d.getPath()) |
|
||||
numPaths = 0 and
|
||||
message = "Discardable but no path found"
|
||||
or
|
||||
numPaths > 1 and
|
||||
message = "Discardable but multiple paths found (" + concat(d.getPath(), ", ") + ")"
|
||||
)
|
||||
}
|
||||
@@ -47,6 +47,22 @@ externalData(
|
||||
string value : string ref
|
||||
);
|
||||
|
||||
/*- Overlay support -*/
|
||||
|
||||
/**
|
||||
* The CLI will automatically emit the tuple `databaseMetadata("isOverlay", "true")`,
|
||||
* along with an `overlayChangedFiles` tuple for each new/modified/deleted file,
|
||||
* when building an overlay database, and these can be used by the discard predicates.
|
||||
*/
|
||||
databaseMetadata(
|
||||
string metadataKey : string ref,
|
||||
string value : string ref
|
||||
);
|
||||
|
||||
overlayChangedFiles(
|
||||
string path : string ref
|
||||
);
|
||||
|
||||
/*- DEPRECATED: Snapshot date -*/
|
||||
|
||||
snapshotDate(unique date snapshotDate : date ref);
|
||||
@@ -1234,3 +1250,12 @@ py_decorated_object(int object : @py_object ref,
|
||||
@py_object = @py_cobject | @py_flow_node;
|
||||
|
||||
@py_source_element = @py_ast_node | @container;
|
||||
|
||||
/** The union of all Python database entities */
|
||||
@top =
|
||||
@py_source_element | @py_object | @py_base_var | @location | @py_line | @py_comment |
|
||||
@py_expr_parent | @py_expr_context |
|
||||
@py_operator | @py_boolop | @py_cmpop | @py_unaryop |
|
||||
@py_cmpop_list | @py_alias_list | @py_StringPart_list | @py_comprehension_list | @py_dict_item_list | @py_pattern_list | @py_stmt_list | @py_str_list | @py_type_parameter_list |
|
||||
@externalDefect | @externalMetric | @externalDataElement | @duplication_or_similarity | @svnentry |
|
||||
@xmllocatable | @yaml_locatable;
|
||||
|
||||
@@ -1269,6 +1269,53 @@
|
||||
</dependencies>
|
||||
</relation>
|
||||
<relation>
|
||||
<name>databaseMetadata</name>
|
||||
<cardinality>1</cardinality>
|
||||
<columnsizes>
|
||||
<e>
|
||||
<k>metadataKey</k>
|
||||
<v>1</v>
|
||||
</e>
|
||||
<e>
|
||||
<k>value</k>
|
||||
<v>1</v>
|
||||
</e>
|
||||
</columnsizes>
|
||||
<dependencies>
|
||||
<dep>
|
||||
<src>metadataKey</src>
|
||||
<trg>value</trg>
|
||||
<val>
|
||||
<hist>
|
||||
<budget>12</budget>
|
||||
<bs/>
|
||||
</hist>
|
||||
</val>
|
||||
</dep>
|
||||
<dep>
|
||||
<src>value</src>
|
||||
<trg>metadataKey</trg>
|
||||
<val>
|
||||
<hist>
|
||||
<budget>12</budget>
|
||||
<bs/>
|
||||
</hist>
|
||||
</val>
|
||||
</dep>
|
||||
</dependencies>
|
||||
</relation>
|
||||
<relation>
|
||||
<name>overlayChangedFiles</name>
|
||||
<cardinality>50</cardinality>
|
||||
<columnsizes>
|
||||
<e>
|
||||
<k>path</k>
|
||||
<v>50</v>
|
||||
</e>
|
||||
</columnsizes>
|
||||
<dependencies/>
|
||||
</relation>
|
||||
<relation>
|
||||
<name>snapshotDate</name>
|
||||
<cardinality>2</cardinality>
|
||||
<columnsizes>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,2 @@
|
||||
description: Add databaseMetadata and overlayChangedFiles relations
|
||||
compatibility: full
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,2 @@
|
||||
description: Add @top type
|
||||
compatibility: full
|
||||
@@ -3,20 +3,33 @@
|
||||
"qhelp.dtd">
|
||||
<qhelp>
|
||||
<overview>
|
||||
|
||||
<p>
|
||||
Using broken or weak cryptographic algorithms can leave data
|
||||
vulnerable to being decrypted or forged by an attacker.
|
||||
Using broken or weak cryptographic algorithms may compromise
|
||||
security guarantees such as confidentiality, integrity, and
|
||||
authenticity.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
Many cryptographic algorithms provided by cryptography
|
||||
libraries are known to be weak, or flawed. Using such an
|
||||
algorithm means that encrypted or hashed data is less
|
||||
secure than it appears to be.
|
||||
Many cryptographic algorithms are known to be weak or flawed. The
|
||||
security guarantees of a system often rely on the underlying
|
||||
cryptography, so using a weak algorithm can have severe consequences.
|
||||
For example:
|
||||
</p>
|
||||
|
||||
<ul>
|
||||
<li>
|
||||
If a weak encryption algorithm is used, an attacker may be able to
|
||||
decrypt sensitive data.
|
||||
</li>
|
||||
<li>
|
||||
If a weak algorithm is used for digital signatures, an attacker may
|
||||
be able to forge signatures and impersonate legitimate users.
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
<p>
|
||||
This query alerts on any use of a weak cryptographic algorithm, that is
|
||||
This query alerts on any use of a weak cryptographic algorithm that is
|
||||
not a hashing algorithm. Use of broken or weak cryptographic hash
|
||||
functions are handled by the
|
||||
<code>py/weak-sensitive-data-hashing</code> query.
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
from new import *
|
||||
from lib import *
|
||||
|
||||
def get_greeting():
|
||||
return combine("Goodbye", get_new_target())
|
||||
@@ -0,0 +1,4 @@
|
||||
from greeting import *
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(get_greeting())
|
||||
@@ -0,0 +1 @@
|
||||
def combine(a, b): return f"{a}, {b}!"
|
||||
@@ -0,0 +1,2 @@
|
||||
def get_new_target():
|
||||
return "世界"
|
||||
@@ -0,0 +1 @@
|
||||
semmle-extractor-options: -R . -m hello.py --filter exclude:**/*.testproj/**
|
||||
@@ -0,0 +1,152 @@
|
||||
testStringLiterals
|
||||
| greeting.py:5:20:5:28 | StringLiteral | Goodbye |
|
||||
| hello.py:3:16:3:25 | StringLiteral | __main__ |
|
||||
| lib/__init__.py:1:27:1:29 | StringLiteral | |
|
||||
| lib/__init__.py:1:31:1:34 | StringLiteral | , |
|
||||
| lib/__init__.py:1:36:1:38 | StringLiteral | ! |
|
||||
| new.py:2:12:2:15 | StringLiteral | \u4e16\u754c |
|
||||
testModules
|
||||
| greeting.py:0:0:0:0 | Module greeting |
|
||||
| hello.py:0:0:0:0 | Module hello |
|
||||
| lib/__init__.py:0:0:0:0 | Module lib.__init__ |
|
||||
| lib:0:0:0:0 | Package lib |
|
||||
| new.py:0:0:0:0 | Module new |
|
||||
testFunctions
|
||||
| greeting.py:4:1:4:19 | Function get_greeting |
|
||||
| lib/__init__.py:1:1:1:18 | Function combine |
|
||||
| new.py:1:1:1:21 | Function get_new_target |
|
||||
testClasses
|
||||
testLocations
|
||||
| greeting.py:0:0:0:0 | greeting.py:0 |
|
||||
| greeting.py:1:1:1:17 | greeting.py:1 |
|
||||
| greeting.py:1:6:1:8 | greeting.py:1 |
|
||||
| greeting.py:2:1:2:17 | greeting.py:2 |
|
||||
| greeting.py:2:6:2:8 | greeting.py:2 |
|
||||
| greeting.py:4:1:4:19 | greeting.py:4 |
|
||||
| greeting.py:4:1:4:19 | greeting.py:4 |
|
||||
| greeting.py:4:1:4:19 | greeting.py:4 |
|
||||
| greeting.py:4:5:4:16 | greeting.py:4 |
|
||||
| greeting.py:5:5:5:47 | greeting.py:5 |
|
||||
| greeting.py:5:12:5:18 | greeting.py:5 |
|
||||
| greeting.py:5:12:5:47 | greeting.py:5 |
|
||||
| greeting.py:5:20:5:28 | greeting.py:5 |
|
||||
| greeting.py:5:31:5:44 | greeting.py:5 |
|
||||
| greeting.py:5:31:5:46 | greeting.py:5 |
|
||||
| hello.py:0:0:0:0 | hello.py:0 |
|
||||
| hello.py:1:1:1:22 | hello.py:1 |
|
||||
| hello.py:1:6:1:13 | hello.py:1 |
|
||||
| hello.py:3:1:3:26 | hello.py:3 |
|
||||
| hello.py:3:4:3:11 | hello.py:3 |
|
||||
| hello.py:3:4:3:25 | hello.py:3 |
|
||||
| hello.py:3:16:3:25 | hello.py:3 |
|
||||
| hello.py:4:5:4:9 | hello.py:4 |
|
||||
| hello.py:4:5:4:25 | hello.py:4 |
|
||||
| hello.py:4:5:4:25 | hello.py:4 |
|
||||
| hello.py:4:11:4:22 | hello.py:4 |
|
||||
| hello.py:4:11:4:24 | hello.py:4 |
|
||||
| lib/__init__.py:0:0:0:0 | lib/__init__.py:0 |
|
||||
| lib/__init__.py:1:1:1:18 | lib/__init__.py:1 |
|
||||
| lib/__init__.py:1:1:1:18 | lib/__init__.py:1 |
|
||||
| lib/__init__.py:1:1:1:18 | lib/__init__.py:1 |
|
||||
| lib/__init__.py:1:5:1:11 | lib/__init__.py:1 |
|
||||
| lib/__init__.py:1:13:1:13 | lib/__init__.py:1 |
|
||||
| lib/__init__.py:1:16:1:16 | lib/__init__.py:1 |
|
||||
| lib/__init__.py:1:20:1:38 | lib/__init__.py:1 |
|
||||
| lib/__init__.py:1:27:1:29 | lib/__init__.py:1 |
|
||||
| lib/__init__.py:1:27:1:38 | lib/__init__.py:1 |
|
||||
| lib/__init__.py:1:30:1:30 | lib/__init__.py:1 |
|
||||
| lib/__init__.py:1:31:1:34 | lib/__init__.py:1 |
|
||||
| lib/__init__.py:1:35:1:35 | lib/__init__.py:1 |
|
||||
| lib/__init__.py:1:36:1:38 | lib/__init__.py:1 |
|
||||
| lib:0:0:0:0 | lib:0 |
|
||||
| new.py:0:0:0:0 | new.py:0 |
|
||||
| new.py:1:1:1:21 | new.py:1 |
|
||||
| new.py:1:1:1:21 | new.py:1 |
|
||||
| new.py:1:1:1:21 | new.py:1 |
|
||||
| new.py:1:5:1:18 | new.py:1 |
|
||||
| new.py:2:5:2:15 | new.py:2 |
|
||||
| new.py:2:12:2:15 | new.py:2 |
|
||||
testFiles
|
||||
| greeting.py:0:0:0:0 | greeting.py |
|
||||
| hello.py:0:0:0:0 | hello.py |
|
||||
| lib/__init__.py:0:0:0:0 | lib/__init__.py |
|
||||
| new.py:0:0:0:0 | new.py |
|
||||
testCfgNodes
|
||||
| greeting.py:0:0:0:0 | Entry node for Module greeting |
|
||||
| greeting.py:0:0:0:0 | Exit node for Module greeting |
|
||||
| greeting.py:1:1:1:17 | ControlFlowNode for from new import * |
|
||||
| greeting.py:1:6:1:8 | ControlFlowNode for ImportExpr |
|
||||
| greeting.py:2:1:2:17 | ControlFlowNode for from lib import * |
|
||||
| greeting.py:2:6:2:8 | ControlFlowNode for ImportExpr |
|
||||
| greeting.py:4:1:4:19 | ControlFlowNode for FunctionExpr |
|
||||
| greeting.py:4:1:4:19 | Entry node for Function get_greeting |
|
||||
| greeting.py:4:1:4:19 | Exit node for Function get_greeting |
|
||||
| greeting.py:4:5:4:16 | ControlFlowNode for get_greeting |
|
||||
| greeting.py:5:5:5:47 | ControlFlowNode for Return |
|
||||
| greeting.py:5:12:5:18 | ControlFlowNode for combine |
|
||||
| greeting.py:5:12:5:47 | ControlFlowNode for combine() |
|
||||
| greeting.py:5:20:5:28 | ControlFlowNode for StringLiteral |
|
||||
| greeting.py:5:31:5:44 | ControlFlowNode for get_new_target |
|
||||
| greeting.py:5:31:5:46 | ControlFlowNode for get_new_target() |
|
||||
| hello.py:0:0:0:0 | Entry node for Module hello |
|
||||
| hello.py:0:0:0:0 | Exit node for Module hello |
|
||||
| hello.py:1:1:1:22 | ControlFlowNode for from greeting import * |
|
||||
| hello.py:1:6:1:13 | ControlFlowNode for ImportExpr |
|
||||
| hello.py:3:4:3:11 | ControlFlowNode for __name__ |
|
||||
| hello.py:3:4:3:25 | ControlFlowNode for Compare |
|
||||
| hello.py:3:16:3:25 | ControlFlowNode for StringLiteral |
|
||||
| hello.py:4:5:4:9 | ControlFlowNode for print |
|
||||
| hello.py:4:5:4:25 | ControlFlowNode for print() |
|
||||
| hello.py:4:11:4:22 | ControlFlowNode for get_greeting |
|
||||
| hello.py:4:11:4:24 | ControlFlowNode for get_greeting() |
|
||||
| lib/__init__.py:0:0:0:0 | Entry node for Module lib.__init__ |
|
||||
| lib/__init__.py:0:0:0:0 | Exit node for Module lib.__init__ |
|
||||
| lib/__init__.py:1:1:1:18 | ControlFlowNode for FunctionExpr |
|
||||
| lib/__init__.py:1:1:1:18 | Entry node for Function combine |
|
||||
| lib/__init__.py:1:1:1:18 | Exit node for Function combine |
|
||||
| lib/__init__.py:1:5:1:11 | ControlFlowNode for combine |
|
||||
| lib/__init__.py:1:13:1:13 | ControlFlowNode for a |
|
||||
| lib/__init__.py:1:16:1:16 | ControlFlowNode for b |
|
||||
| lib/__init__.py:1:20:1:38 | ControlFlowNode for Return |
|
||||
| lib/__init__.py:1:27:1:29 | ControlFlowNode for StringLiteral |
|
||||
| lib/__init__.py:1:27:1:38 | ControlFlowNode for Fstring |
|
||||
| lib/__init__.py:1:30:1:30 | ControlFlowNode for a |
|
||||
| lib/__init__.py:1:31:1:34 | ControlFlowNode for StringLiteral |
|
||||
| lib/__init__.py:1:35:1:35 | ControlFlowNode for b |
|
||||
| lib/__init__.py:1:36:1:38 | ControlFlowNode for StringLiteral |
|
||||
| lib:0:0:0:0 | Entry node for Package lib |
|
||||
| new.py:0:0:0:0 | Entry node for Module new |
|
||||
| new.py:0:0:0:0 | Exit node for Module new |
|
||||
| new.py:1:1:1:21 | ControlFlowNode for FunctionExpr |
|
||||
| new.py:1:1:1:21 | Entry node for Function get_new_target |
|
||||
| new.py:1:1:1:21 | Exit node for Function get_new_target |
|
||||
| new.py:1:5:1:18 | ControlFlowNode for get_new_target |
|
||||
| new.py:2:5:2:15 | ControlFlowNode for Return |
|
||||
| new.py:2:12:2:15 | ControlFlowNode for StringLiteral |
|
||||
testSsaVars
|
||||
| file://:0:0:0:0 | SSA Variable __name__ |
|
||||
| file://:0:0:0:0 | SSA Variable get_greeting |
|
||||
| file://:0:0:0:0 | SSA Variable print |
|
||||
| greeting.py:4:5:4:16 | SSA Variable get_greeting |
|
||||
| lib/__init__.py:1:5:1:11 | SSA Variable combine |
|
||||
| lib/__init__.py:1:13:1:13 | SSA Variable a |
|
||||
| lib/__init__.py:1:16:1:16 | SSA Variable b |
|
||||
| new.py:1:5:1:18 | SSA Variable get_new_target |
|
||||
testVars
|
||||
| Global Variable __name__ | greeting.py:0:0:0:0 | Module greeting |
|
||||
| Global Variable __name__ | hello.py:0:0:0:0 | Module hello |
|
||||
| Global Variable __name__ | lib/__init__.py:0:0:0:0 | Module lib.__init__ |
|
||||
| Global Variable __name__ | new.py:0:0:0:0 | Module new |
|
||||
| Global Variable __package__ | greeting.py:0:0:0:0 | Module greeting |
|
||||
| Global Variable __package__ | hello.py:0:0:0:0 | Module hello |
|
||||
| Global Variable __package__ | lib/__init__.py:0:0:0:0 | Module lib.__init__ |
|
||||
| Global Variable __package__ | new.py:0:0:0:0 | Module new |
|
||||
| Global Variable combine | greeting.py:0:0:0:0 | Module greeting |
|
||||
| Global Variable combine | lib/__init__.py:0:0:0:0 | Module lib.__init__ |
|
||||
| Global Variable get_greeting | greeting.py:0:0:0:0 | Module greeting |
|
||||
| Global Variable get_greeting | hello.py:0:0:0:0 | Module hello |
|
||||
| Global Variable get_new_target | greeting.py:0:0:0:0 | Module greeting |
|
||||
| Global Variable get_new_target | new.py:0:0:0:0 | Module new |
|
||||
| Global Variable print | hello.py:0:0:0:0 | Module hello |
|
||||
| Local Variable a | lib/__init__.py:1:1:1:18 | Function combine |
|
||||
| Local Variable b | lib/__init__.py:1:1:1:18 | Function combine |
|
||||
@@ -0,0 +1,19 @@
|
||||
import python
|
||||
|
||||
query predicate testStringLiterals(StringLiteral l, string text) { l.getText() = text }
|
||||
|
||||
query predicate testModules(Module m) { any() }
|
||||
|
||||
query predicate testFunctions(Function f) { any() }
|
||||
|
||||
query predicate testClasses(Class c) { any() }
|
||||
|
||||
query predicate testLocations(Location l) { any() }
|
||||
|
||||
query predicate testFiles(File f) { any() }
|
||||
|
||||
query predicate testCfgNodes(ControlFlowNode n) { any() }
|
||||
|
||||
query predicate testSsaVars(SsaVariable var) { any() }
|
||||
|
||||
query predicate testVars(Variable var, Scope s) { s = var.getScope() }
|
||||
@@ -0,0 +1,3 @@
|
||||
overlay:
|
||||
base: "orig_src"
|
||||
overlay: "../basic-full-eval"
|
||||
@@ -0,0 +1 @@
|
||||
semmle-extractor-options: -R . -m hello.py --filter exclude:**/*.testproj/**
|
||||
@@ -0,0 +1,5 @@
|
||||
from old import *
|
||||
from lib import *
|
||||
|
||||
def get_greeting():
|
||||
return combine("Hello", get_old_target())
|
||||
@@ -0,0 +1,4 @@
|
||||
from greeting import *
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(get_greeting())
|
||||
@@ -0,0 +1 @@
|
||||
def combine(a, b): return f"{a}, {b}!"
|
||||
@@ -0,0 +1,2 @@
|
||||
def get_old_target():
|
||||
return "World"
|
||||
@@ -0,0 +1 @@
|
||||
../basic-full-eval/test.ql
|
||||
@@ -4,14 +4,33 @@
|
||||
<qhelp>
|
||||
<overview>
|
||||
<p>
|
||||
Using broken or weak cryptographic algorithms can leave data
|
||||
vulnerable to being decrypted or forged by an attacker.
|
||||
Using broken or weak cryptographic algorithms may compromise
|
||||
security guarantees such as confidentiality, integrity, and
|
||||
authenticity.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
Many cryptographic algorithms provided by cryptography
|
||||
libraries are known to be weak, or flawed. Using such an
|
||||
algorithm means that encrypted or hashed data is less
|
||||
secure than it appears to be.
|
||||
Many cryptographic algorithms are known to be weak or flawed. The
|
||||
security guarantees of a system often rely on the underlying
|
||||
cryptography, so using a weak algorithm can have severe consequences.
|
||||
For example:
|
||||
</p>
|
||||
|
||||
<ul>
|
||||
<li>
|
||||
If a weak encryption algorithm is used, an attacker may be able to
|
||||
decrypt sensitive data.
|
||||
</li>
|
||||
<li>
|
||||
If a weak algorithm is used for digital signatures, an attacker may
|
||||
be able to forge signatures and impersonate legitimate users.
|
||||
</li>
|
||||
</ul>
|
||||
<p>
|
||||
This query alerts on any use of a weak cryptographic algorithm that is
|
||||
not a hashing algorithm. Use of broken or weak cryptographic hash
|
||||
functions are handled by the
|
||||
<code>rb/weak-sensitive-data-hashing</code> query.
|
||||
</p>
|
||||
</overview>
|
||||
<recommendation>
|
||||
|
||||
@@ -16,7 +16,7 @@ ql/rust/ql/src/queries/security/CWE-312/CleartextLogging.ql
|
||||
ql/rust/ql/src/queries/security/CWE-312/CleartextStorageDatabase.ql
|
||||
ql/rust/ql/src/queries/security/CWE-319/UseOfHttp.ql
|
||||
ql/rust/ql/src/queries/security/CWE-327/BrokenCryptoAlgorithm.ql
|
||||
ql/rust/ql/src/queries/security/CWE-328/WeakSensitiveDataHashing.ql
|
||||
ql/rust/ql/src/queries/security/CWE-327/WeakSensitiveDataHashing.ql
|
||||
ql/rust/ql/src/queries/security/CWE-614/InsecureCookie.ql
|
||||
ql/rust/ql/src/queries/security/CWE-770/UncontrolledAllocationSize.ql
|
||||
ql/rust/ql/src/queries/security/CWE-798/HardcodedCryptographicValue.ql
|
||||
|
||||
@@ -17,7 +17,7 @@ ql/rust/ql/src/queries/security/CWE-312/CleartextLogging.ql
|
||||
ql/rust/ql/src/queries/security/CWE-312/CleartextStorageDatabase.ql
|
||||
ql/rust/ql/src/queries/security/CWE-319/UseOfHttp.ql
|
||||
ql/rust/ql/src/queries/security/CWE-327/BrokenCryptoAlgorithm.ql
|
||||
ql/rust/ql/src/queries/security/CWE-328/WeakSensitiveDataHashing.ql
|
||||
ql/rust/ql/src/queries/security/CWE-327/WeakSensitiveDataHashing.ql
|
||||
ql/rust/ql/src/queries/security/CWE-614/InsecureCookie.ql
|
||||
ql/rust/ql/src/queries/security/CWE-696/BadCtorInitialization.ql
|
||||
ql/rust/ql/src/queries/security/CWE-770/UncontrolledAllocationSize.ql
|
||||
|
||||
@@ -17,7 +17,7 @@ ql/rust/ql/src/queries/security/CWE-312/CleartextLogging.ql
|
||||
ql/rust/ql/src/queries/security/CWE-312/CleartextStorageDatabase.ql
|
||||
ql/rust/ql/src/queries/security/CWE-319/UseOfHttp.ql
|
||||
ql/rust/ql/src/queries/security/CWE-327/BrokenCryptoAlgorithm.ql
|
||||
ql/rust/ql/src/queries/security/CWE-328/WeakSensitiveDataHashing.ql
|
||||
ql/rust/ql/src/queries/security/CWE-327/WeakSensitiveDataHashing.ql
|
||||
ql/rust/ql/src/queries/security/CWE-614/InsecureCookie.ql
|
||||
ql/rust/ql/src/queries/security/CWE-770/UncontrolledAllocationSize.ql
|
||||
ql/rust/ql/src/queries/security/CWE-798/HardcodedCryptographicValue.ql
|
||||
|
||||
4
rust/ql/lib/change-notes/2025-09-29-actix-web-model.md
Normal file
4
rust/ql/lib/change-notes/2025-09-29-actix-web-model.md
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: majorAnalysis
|
||||
---
|
||||
* Added basic models for the `actix-web` web framework.
|
||||
4
rust/ql/lib/change-notes/2025-10-10-mysql.md
Normal file
4
rust/ql/lib/change-notes/2025-10-10-mysql.md
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* Added models for the `mysql` and `mysql_async` libraries.
|
||||
4
rust/ql/lib/change-notes/2025-10-15-models.md
Normal file
4
rust/ql/lib/change-notes/2025-10-15-models.md
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* Generalized some existing models to improve data flow.
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
category: minorAnalysis
|
||||
---
|
||||
* Added `ExtractedFile::hasSemantics` and `ExtractedFile::isSkippedByCompilation` predicates.
|
||||
@@ -71,6 +71,25 @@ class File extends Container, Impl::File {
|
||||
*/
|
||||
class ExtractedFile extends File {
|
||||
ExtractedFile() { this.fromSource() }
|
||||
|
||||
private Diagnostic getNoSemanticsDiagnostic() {
|
||||
result.getTag() = "semantics" and result.getLocation().getFile() = this
|
||||
}
|
||||
|
||||
/**
|
||||
* Holds if we have semantical information about this file, which means
|
||||
* we should be able to
|
||||
* * expand any macros
|
||||
* * skip any blocks that are conditionally compiled out
|
||||
*/
|
||||
predicate hasSemantics() { not exists(this.getNoSemanticsDiagnostic()) }
|
||||
|
||||
/**
|
||||
* Holds if we know this file was skipped by conditional compilation.
|
||||
* This is not the same as `not this.hasSemantics()`, as a file
|
||||
* might not have semantics because of some error.
|
||||
*/
|
||||
predicate isSkippedByCompilation() { this.getNoSemanticsDiagnostic().getSeverityText() = "Info" }
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
18
rust/ql/lib/codeql/rust/frameworks/actix-web.model.yml
Normal file
18
rust/ql/lib/codeql/rust/frameworks/actix-web.model.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/rust-all
|
||||
extensible: sourceModel
|
||||
data:
|
||||
- ["<actix_web::route::Route>::to", "Argument[0].Parameter[0..7]", "remote", "manual"]
|
||||
# Actix attributes such as `get` expand to this `to` call on the handler.
|
||||
- ["<actix_web::resource::Resource>::to", "Argument[0].Parameter[0..7]", "remote", "manual"]
|
||||
- addsTo:
|
||||
pack: codeql/rust-all
|
||||
extensible: summaryModel
|
||||
data:
|
||||
- ["<actix_web::types::path::Path>::into_inner", "Argument[self]", "ReturnValue", "taint", "manual"]
|
||||
- ["<actix_web::types::path::Path>::into_inner", "Argument[self]", "ReturnValue.Field[0]", "taint", "manual"]
|
||||
- ["<actix_web::types::path::Path>::into_inner", "Argument[self]", "ReturnValue.Field[1]", "taint", "manual"]
|
||||
- ["<actix_web::types::path::Path>::into_inner", "Argument[self]", "ReturnValue.Field[2]", "taint", "manual"]
|
||||
- ["<actix_web::types::path::Path>::into_inner", "Argument[self]", "ReturnValue.Field[3]", "taint", "manual"]
|
||||
- ["<actix_web::types::path::Path>::into_inner", "Argument[self]", "ReturnValue.Field[4]", "taint", "manual"]
|
||||
43
rust/ql/lib/codeql/rust/frameworks/mysql-async.model.yml
Normal file
43
rust/ql/lib/codeql/rust/frameworks/mysql-async.model.yml
Normal file
@@ -0,0 +1,43 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/rust-all
|
||||
extensible: sinkModel
|
||||
data:
|
||||
- ["<_ as mysql_async::queryable::Queryable>::query", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::query_drop", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::query_first", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::query_fold", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::query_stream", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::query_map", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<mysql_async::conn::Conn as mysql_async::queryable::Queryable>::query_iter", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<mysql_async::conn::Conn as mysql_async::queryable::Queryable>::prep", "Argument[0]", "sql-injection", "manual"]
|
||||
- addsTo:
|
||||
pack: codeql/rust-all
|
||||
extensible: sourceModel
|
||||
data:
|
||||
- ["<_ as mysql_async::queryable::Queryable>::query", "ReturnValue.Future.Field[core::result::Result::Ok(0)].Element", "database", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::exec", "ReturnValue.Future.Field[core::result::Result::Ok(0)].Element", "database", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::query_first", "ReturnValue.Future.Field[core::result::Result::Ok(0)].Field[core::option::Option::Some(0)]", "database", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::exec_first", "ReturnValue.Future.Field[core::result::Result::Ok(0)].Field[core::option::Option::Some(0)]", "database", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::query_fold", "Argument[2].Parameter[1]", "database", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::exec_fold", "Argument[3].Parameter[1]", "database", "manual"]
|
||||
- ["<mysql_async::conn::Conn as mysql_async::queryable::Queryable>::query_iter", "ReturnValue.Future.Field[core::result::Result::Ok(0)].Element", "database", "manual"]
|
||||
- ["<mysql_async::conn::Conn as mysql_async::queryable::Queryable>::exec_iter", "ReturnValue.Future.Field[core::result::Result::Ok(0)].Element", "database", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::query_map", "Argument[1].Parameter[0]", "database", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::exec_map", "Argument[2].Parameter[0]", "database", "manual"]
|
||||
- ["<mysql_common::row::Row>::get", "ReturnValue.Field[core::option::Option::Some(0)]", "database", "manual"]
|
||||
- ["<mysql_common::row::Row>::get_opt", "ReturnValue.Field[core::option::Option::Some(0)].Field[core::result::Result::Ok(0)]", "database", "manual"]
|
||||
- ["<mysql_common::row::Row>::take", "ReturnValue.Field[core::option::Option::Some(0)]", "database", "manual"]
|
||||
- ["<mysql_common::row::Row>::take_opt", "ReturnValue.Field[core::option::Option::Some(0)].Field[core::result::Result::Ok(0)]", "database", "manual"]
|
||||
- ["<mysql_common::row::Row>::as_ref", "ReturnValue.Field[core::option::Option::Some(0)].Reference", "database", "manual"]
|
||||
- ["<mysql_common::row::Row>::unwrap", "ReturnValue.Element", "database", "manual"]
|
||||
- addsTo:
|
||||
pack: codeql/rust-all
|
||||
extensible: summaryModel
|
||||
data:
|
||||
- ["<_ as mysql_async::queryable::Queryable>::query_fold", "Argument[1]", "Argument[2].Parameter[0]", "value", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::query_fold", "Argument[2].ReturnValue", "ReturnValue.Future.Field[core::result::Result::Ok(0)]", "value", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::exec_fold", "Argument[2]", "Argument[3].Parameter[0]", "value", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::exec_fold", "Argument[3].ReturnValue", "ReturnValue.Future.Field[core::result::Result::Ok(0)]", "value", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::query_map", "Argument[1].ReturnValue", "ReturnValue.Future.Field[core::result::Result::Ok(0)].Element", "value", "manual"]
|
||||
- ["<_ as mysql_async::queryable::Queryable>::query_map", "Argument[2].ReturnValue", "ReturnValue.Future.Field[core::result::Result::Ok(0)].Element", "value", "manual"]
|
||||
54
rust/ql/lib/codeql/rust/frameworks/mysql.model.yml
Normal file
54
rust/ql/lib/codeql/rust/frameworks/mysql.model.yml
Normal file
@@ -0,0 +1,54 @@
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/rust-all
|
||||
extensible: sinkModel
|
||||
data:
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_opt", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_drop", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_first", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_first_opt", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_fold", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_fold_opt", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<mysql::conn::pool::PooledConn as mysql::conn::queryable::Queryable>::query_iter", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_map", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_map_opt", "Argument[0]", "sql-injection", "manual"]
|
||||
- ["<mysql::conn::pool::PooledConn as mysql::conn::queryable::Queryable>::prep", "Argument[0]", "sql-injection", "manual"]
|
||||
- addsTo:
|
||||
pack: codeql/rust-all
|
||||
extensible: sourceModel
|
||||
data:
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query", "ReturnValue.Field[core::result::Result::Ok(0)].Element", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::exec", "ReturnValue.Field[core::result::Result::Ok(0)].Element", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_opt", "ReturnValue.Field[core::result::Result::Ok(0)].Element.Field[core::result::Result::Ok(0)]", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::exec_opt", "ReturnValue.Field[core::result::Result::Ok(0)].Element.Field[core::result::Result::Ok(0)]", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_first", "ReturnValue.Field[core::result::Result::Ok(0)].Field[core::option::Option::Some(0)]", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::exec_first", "ReturnValue.Field[core::result::Result::Ok(0)].Field[core::option::Option::Some(0)]", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_first_opt", "ReturnValue.Field[core::result::Result::Ok(0)].Field[core::option::Option::Some(0)].Field[core::result::Result::Ok(0)]", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::exec_first_opt", "ReturnValue.Field[core::result::Result::Ok(0)].Field[core::option::Option::Some(0)].Field[core::result::Result::Ok(0)]", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_fold", "Argument[2].Parameter[1]", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::exec_fold", "Argument[3].Parameter[1]", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_fold_opt", "Argument[2].Parameter[1].Field[core::result::Result::Ok(0)]", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::exec_fold_opt", "Argument[3].Parameter[1].Field[core::result::Result::Ok(0)]", "database", "manual"]
|
||||
- ["<mysql::conn::pool::PooledConn as mysql::conn::queryable::Queryable>::query_iter", "ReturnValue.Field[core::result::Result::Ok(0)].Element", "database", "manual"]
|
||||
- ["<mysql::conn::pool::PooledConn as mysql::conn::queryable::Queryable>::exec_iter", "ReturnValue.Field[core::result::Result::Ok(0)].Element", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_map", "Argument[1].Parameter[0]", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_map_opt", "Argument[1].Parameter[0].Field[core::result::Result::Ok(0)]", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::exec_map", "Argument[2].Parameter[0]", "database", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::exec_map_opt", "Argument[2].Parameter[0].Field[core::result::Result::Ok(0)]", "database", "manual"]
|
||||
- addsTo:
|
||||
pack: codeql/rust-all
|
||||
extensible: summaryModel
|
||||
data:
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_fold", "Argument[1]", "Argument[2].Parameter[0]", "value", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_fold", "Argument[2].ReturnValue", "ReturnValue.Field[core::result::Result::Ok(0)]", "value", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::exec_fold", "Argument[2]", "Argument[3].Parameter[0]", "value", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::exec_fold", "Argument[3].ReturnValue", "ReturnValue.Field[core::result::Result::Ok(0)]", "value", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_fold_opt", "Argument[1]", "Argument[2].Parameter[0]", "value", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_fold_opt", "Argument[2].ReturnValue", "ReturnValue.Field[core::result::Result::Ok(0)]", "value", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::exec_fold_opt", "Argument[2]", "Argument[3].Parameter[0]", "value", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::exec_fold_opt", "Argument[3].ReturnValue", "ReturnValue.Field[core::result::Result::Ok(0)]", "value", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_map", "Argument[1].ReturnValue", "ReturnValue.Field[core::result::Result::Ok(0)].Element", "value", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::query_map_opt", "Argument[1].ReturnValue", "ReturnValue.Field[core::result::Result::Ok(0)].Element", "value", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::exec_map", "Argument[2].ReturnValue", "ReturnValue.Field[core::result::Result::Ok(0)].Element", "value", "manual"]
|
||||
- ["<_ as mysql::conn::queryable::Queryable>::exec_map_opt", "Argument[2].ReturnValue", "ReturnValue.Field[core::result::Result::Ok(0)].Element", "value", "manual"]
|
||||
@@ -14,19 +14,11 @@ extensions:
|
||||
- ["alloc::alloc::alloc_zeroed", "Argument[0]", "alloc-layout", "manual"]
|
||||
- ["alloc::alloc::realloc", "Argument[2]", "alloc-size", "manual"]
|
||||
- ["<_ as core::alloc::global::GlobalAlloc>::alloc", "Argument[0]", "alloc-layout", "manual"]
|
||||
- ["<std::alloc::System as core::alloc::global::GlobalAlloc>::alloc", "Argument[0]", "alloc-layout", "manual"]
|
||||
- ["<_ as core::alloc::global::GlobalAlloc>::alloc_zeroed", "Argument[0]", "alloc-layout", "manual"]
|
||||
- ["<std::alloc::System as core::alloc::global::GlobalAlloc>::alloc_zeroed", "Argument[0]", "alloc-layout", "manual"]
|
||||
- ["<std::alloc::System as core::alloc::Allocator>::allocate", "Argument[0]", "alloc-layout", "manual"]
|
||||
- ["<std::alloc::System as core::alloc::Allocator>::allocate_zeroed", "Argument[0]", "alloc-layout", "manual"]
|
||||
- ["<std::alloc::System as core::alloc::Allocator>::grow", "Argument[2]", "alloc-layout", "manual"]
|
||||
- ["<std::alloc::System as core::alloc::Allocator>::grow_zeroed", "Argument[2]", "alloc-layout", "manual"]
|
||||
- ["<alloc::alloc::Global as core::alloc::global::GlobalAlloc>::alloc", "Argument[0]", "alloc-layout", "manual"]
|
||||
- ["<alloc::alloc::Global as core::alloc::global::GlobalAlloc>::alloc_zeroed", "Argument[0]", "alloc-layout", "manual"]
|
||||
- ["<alloc::alloc::Global as core::alloc::Allocator>::allocate", "Argument[0]", "alloc-layout", "manual"]
|
||||
- ["<alloc::alloc::Global as core::alloc::Allocator>::allocate_zeroed", "Argument[0]", "alloc-layout", "manual"]
|
||||
- ["<alloc::alloc::Global as core::alloc::Allocator>::grow", "Argument[2]", "alloc-layout", "manual"]
|
||||
- ["<alloc::alloc::Global as core::alloc::Allocator>::grow_zeroed", "Argument[2]", "alloc-layout", "manual"]
|
||||
- ["<_ as core::alloc::Allocator>::allocate", "Argument[0]", "alloc-layout", "manual"]
|
||||
- ["<_ as core::alloc::Allocator>::allocate_zeroed", "Argument[0]", "alloc-layout", "manual"]
|
||||
- ["<_ as core::alloc::Allocator>::grow", "Argument[2]", "alloc-layout", "manual"]
|
||||
- ["<_ as core::alloc::Allocator>::grow_zeroed", "Argument[2]", "alloc-layout", "manual"]
|
||||
- addsTo:
|
||||
pack: codeql/rust-all
|
||||
extensible: summaryModel
|
||||
@@ -42,11 +34,11 @@ extensions:
|
||||
- ["<core::str>::as_bytes", "Argument[self]", "ReturnValue", "value", "manual"]
|
||||
- ["<alloc::string::String>::as_str", "Argument[self]", "ReturnValue", "value", "manual"]
|
||||
- ["<alloc::string::String>::as_bytes", "Argument[self]", "ReturnValue", "value", "manual"]
|
||||
- ["<alloc::str as alloc::string::ToString>::to_string", "Argument[self]", "ReturnValue", "taint", "manual"]
|
||||
- ["<alloc::string::String as alloc::string::ToString>::to_string", "Argument[self]", "ReturnValue", "taint", "manual"]
|
||||
- ["<_ as alloc::string::ToString>::to_string", "Argument[self]", "ReturnValue", "taint", "manual"]
|
||||
- ["<core::str>::parse", "Argument[self]", "ReturnValue.Field[core::result::Result::Ok(0)]", "taint", "manual"]
|
||||
- ["<core::str>::trim", "Argument[self]", "ReturnValue.Reference", "taint", "manual"]
|
||||
- ["<alloc::string::String as core::ops::arith::Add>::add", "Argument[self]", "ReturnValue", "taint", "manual"]
|
||||
- ["<alloc::string::String as core::ops::arith::Add>::add", "Argument[0].Reference", "ReturnValue", "taint", "manual"]
|
||||
- ["<_ as core::ops::arith::Add>::add", "Argument[self]", "ReturnValue", "taint", "manual"]
|
||||
- ["<_ as core::ops::arith::Add>::add", "Argument[0]", "ReturnValue", "taint", "manual"]
|
||||
- ["<_ as core::ops::arith::Add>::add", "Argument[0].Reference", "ReturnValue", "taint", "manual"]
|
||||
# Vec
|
||||
- ["alloc::vec::from_elem", "Argument[0]", "ReturnValue.Element", "value", "manual"]
|
||||
@@ -9,25 +9,20 @@ extensions:
|
||||
- ["<core::alloc::layout::Layout>::align_to", "Argument[self].Element", "ReturnValue.Field[0,1,2].Reference.Element", "taint", "manual"]
|
||||
- ["<_ as core::convert::Into>::into", "Argument[self].Element", "ReturnValue.Element", "taint", "manual"]
|
||||
- ["<_ as core::convert::Into>::into", "Argument[self].Reference.Element", "ReturnValue.Element", "taint", "manual"]
|
||||
- ["<alloc::string::String as core::convert::Into>::into", "Argument[self].Element", "ReturnValue.Element", "taint", "manual"]
|
||||
- ["<alloc::string::String as core::convert::Into>::into", "Argument[self].Reference.Element", "ReturnValue.Element", "taint", "manual"]
|
||||
# From
|
||||
- ["<_ as core::convert::From>::from", "Argument[0]", "ReturnValue", "taint", "manual"]
|
||||
# Iterator
|
||||
- ["<core::result::Result>::iter", "Argument[self].Element", "ReturnValue.Element", "value", "manual"]
|
||||
- ["<alloc::vec::Vec as value_trait::array::Array>::iter", "Argument[self].Element", "ReturnValue.Element", "value", "manual"]
|
||||
- ["<_ as value_trait::array::Array>::iter", "Argument[self].Element", "ReturnValue.Element", "value", "manual"]
|
||||
- ["<core::result::Result>::iter_mut", "Argument[self].Element", "ReturnValue.Element", "value", "manual"]
|
||||
- ["<core::result::Result>::into_iter", "Argument[self].Element", "ReturnValue.Element", "value", "manual"]
|
||||
- ["<_ as core::iter::traits::iterator::Iterator>::for_each", "Argument[self].Element", "Argument[0].Parameter[0]", "value", "manual"]
|
||||
- ["<_ as core::iter::traits::iterator::Iterator>::nth", "Argument[self].Element", "ReturnValue.Field[core::option::Option::Some(0)]", "value", "manual"]
|
||||
- ["<_ as core::iter::traits::iterator::Iterator>::next", "Argument[self].Element", "ReturnValue.Field[core::option::Option::Some(0)]", "value", "manual"]
|
||||
- ["<_ as core::iter::traits::iterator::Iterator>::next", "Argument[self].Element", "ReturnValue.Field[core::option::Option::Some(0)].Field[core::result::Result::Ok(0)]", "value", "manual"]
|
||||
- ["<_ as core::iter::traits::iterator::Iterator>::collect", "Argument[self].Element", "ReturnValue.Element", "value", "manual"]
|
||||
- ["<_ as core::iter::traits::iterator::Iterator>::map", "Argument[self].Element", "Argument[0].Parameter[0]", "value", "manual"]
|
||||
- ["<_ as core::iter::traits::iterator::Iterator>::for_each", "Argument[self].Element", "Argument[0].Parameter[0]", "value", "manual"]
|
||||
- ["<core::slice::iter::Iter as core::iter::traits::iterator::Iterator>::nth", "Argument[self].Element", "ReturnValue.Field[core::option::Option::Some(0)]", "value", "manual"]
|
||||
- ["<core::slice::iter::Iter as core::iter::traits::iterator::Iterator>::next", "Argument[self].Element", "ReturnValue.Field[core::option::Option::Some(0)]", "value", "manual"]
|
||||
- ["<core::slice::iter::Iter as core::iter::traits::iterator::Iterator>::collect", "Argument[self].Element", "ReturnValue.Element", "value", "manual"]
|
||||
- ["<core::slice::iter::Iter as core::iter::traits::iterator::Iterator>::map", "Argument[self].Element", "Argument[0].Parameter[0]", "value", "manual"]
|
||||
- ["<_ as core::iter::traits::iterator::Iterator>::for_each", "Argument[self].Element", "Argument[0].Parameter[0]", "value", "manual"]
|
||||
# Layout
|
||||
- ["<core::alloc::layout::Layout>::from_size_align", "Argument[0]", "ReturnValue.Field[core::result::Result::Ok(0)]", "taint", "manual"]
|
||||
- ["<core::alloc::layout::Layout>::from_size_align_unchecked", "Argument[0]", "ReturnValue", "taint", "manual"]
|
||||
@@ -9,23 +9,11 @@ extensions:
|
||||
extensible: summaryModel
|
||||
data:
|
||||
- ["<std::io::buffered::bufreader::BufReader>::new", "Argument[0]", "ReturnValue", "taint", "manual"]
|
||||
- ["<std::io::buffered::bufreader::BufReader as std::io::BufRead>::fill_buf", "Argument[self]", "ReturnValue.Field[core::result::Result::Ok(0)]", "taint", "manual"]
|
||||
- ["<_ as std::io::BufRead>::fill_buf", "Argument[self]", "ReturnValue.Field[core::result::Result::Ok(0)]", "taint", "manual"]
|
||||
- ["<std::io::buffered::bufreader::BufReader>::buffer", "Argument[self]", "ReturnValue", "taint", "manual"]
|
||||
- ["<std::io::stdio::Stdin as std::io::Read>::read", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<std::io::stdio::StdinLock as std::io::Read>::read", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<std::fs::File as std::io::Read>::read", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<_ as std::io::Read>::read", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<std::io::stdio::Stdin as std::io::Read>::read_to_string", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<std::io::stdio::StdinLock as std::io::Read>::read_to_string", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<std::fs::File as std::io::Read>::read_to_string", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<_ as std::io::Read>::read_to_string", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<std::io::stdio::Stdin as std::io::Read>::read_to_end", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<std::io::stdio::StdinLock as std::io::Read>::read_to_end", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<std::fs::File as std::io::Read>::read_to_end", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<_ as std::io::Read>::read_to_end", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<std::io::stdio::Stdin as std::io::Read>::read_exact", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<std::io::stdio::StdinLock as std::io::Read>::read_exact", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<std::fs::File as std::io::Read>::read_exact", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<_ as std::io::Read>::read_exact", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<_ as std::io::BufRead>::read_line", "Argument[self]", "Argument[0].Reference", "taint", "manual"]
|
||||
- ["<_ as std::io::BufRead>::read_until", "Argument[self]", "Argument[1].Reference", "taint", "manual"]
|
||||
@@ -36,4 +24,3 @@ extensions:
|
||||
- ["<_ as std::io::Read>::chain", "Argument[0]", "ReturnValue", "taint", "manual"]
|
||||
- ["<_ as std::io::Read>::take", "Argument[self]", "ReturnValue", "taint", "manual"]
|
||||
- ["<std::io::stdio::Stdin>::lock", "Argument[self]", "ReturnValue", "taint", "manual"]
|
||||
- ["<std::io::Split as core::iter::traits::iterator::Iterator>::next", "Argument[self]", "ReturnValue.Field[core::option::Option::Some(0)].Field[core::result::Result::Ok(0)]", "taint", "manual"]
|
||||
|
||||
@@ -4,19 +4,31 @@
|
||||
<qhelp>
|
||||
<overview>
|
||||
<p>
|
||||
Using broken or weak cryptographic algorithms can leave data
|
||||
vulnerable to being decrypted or forged by an attacker.
|
||||
Using broken or weak cryptographic algorithms may compromise
|
||||
security guarantees such as confidentiality, integrity, and
|
||||
authenticity.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
Many cryptographic algorithms provided by cryptography
|
||||
libraries are known to be weak, or flawed. Using such an
|
||||
algorithm means that encrypted or hashed data is less
|
||||
secure than it appears to be.
|
||||
Many cryptographic algorithms are known to be weak or flawed. The
|
||||
security guarantees of a system often rely on the underlying
|
||||
cryptography, so using a weak algorithm can have severe consequences.
|
||||
For example:
|
||||
</p>
|
||||
|
||||
<ul>
|
||||
<li>
|
||||
If a weak encryption algorithm is used, an attacker may be able to
|
||||
decrypt sensitive data.
|
||||
</li>
|
||||
<li>
|
||||
If a weak algorithm is used for digital signatures, an attacker may
|
||||
be able to forge signatures and impersonate legitimate users.
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
<p>
|
||||
This query alerts on any use of a weak cryptographic algorithm, that is
|
||||
This query alerts on any use of a weak cryptographic algorithm that is
|
||||
not a hashing algorithm. Use of broken or weak cryptographic hash
|
||||
functions are handled by the
|
||||
<code>rust/weak-sensitive-data-hashing</code> query.
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
extractionWarning
|
||||
| bad_cargo/src/no_semantics.rs:1:1:1:1 | semantic analyzer unavailable (unable to load manifest) |
|
||||
@@ -1,6 +1,9 @@
|
||||
| Cargo.toml:0:0:0:0 | Cargo.toml | fromSource: no |
|
||||
| a_file.rs:0:0:0:0 | a_file.rs | fromSource: yes |
|
||||
| another_file.rs:0:0:0:0 | another_file.rs | fromSource: yes |
|
||||
| lib.rs:0:0:0:0 | lib.rs | fromSource: yes |
|
||||
| nested.rs:0:0:0:0 | nested.rs | fromSource: yes |
|
||||
| nested/file.rs:0:0:0:0 | nested/file.rs | fromSource: yes |
|
||||
| Cargo.toml:0:0:0:0 | Cargo.toml | fromSource: no | hasSemantics: no | isSkippedByCompilation: no |
|
||||
| a_file.rs:0:0:0:0 | a_file.rs | fromSource: yes | hasSemantics: yes | isSkippedByCompilation: no |
|
||||
| another_file.rs:0:0:0:0 | another_file.rs | fromSource: yes | hasSemantics: yes | isSkippedByCompilation: no |
|
||||
| bad_cargo/Cargo.toml:0:0:0:0 | bad_cargo/Cargo.toml | fromSource: no | hasSemantics: no | isSkippedByCompilation: no |
|
||||
| bad_cargo/src/no_semantics.rs:0:0:0:0 | bad_cargo/src/no_semantics.rs | fromSource: yes | hasSemantics: no | isSkippedByCompilation: no |
|
||||
| lib.rs:0:0:0:0 | lib.rs | fromSource: yes | hasSemantics: yes | isSkippedByCompilation: no |
|
||||
| nested.rs:0:0:0:0 | nested.rs | fromSource: yes | hasSemantics: yes | isSkippedByCompilation: no |
|
||||
| nested/file.rs:0:0:0:0 | nested/file.rs | fromSource: yes | hasSemantics: yes | isSkippedByCompilation: no |
|
||||
| nested/not_compiled.rs:0:0:0:0 | nested/not_compiled.rs | fromSource: yes | hasSemantics: no | isSkippedByCompilation: yes |
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
import rust
|
||||
|
||||
from File f, string fromSource
|
||||
from File f, string fromSource, string hasSemantics, string isSkippedByCompilation
|
||||
where
|
||||
exists(f.getRelativePath()) and
|
||||
if f.fromSource() then fromSource = "fromSource: yes" else fromSource = "fromSource: no"
|
||||
select f, fromSource
|
||||
(if f.fromSource() then fromSource = "fromSource: yes" else fromSource = "fromSource: no") and
|
||||
(
|
||||
if f.(ExtractedFile).hasSemantics()
|
||||
then hasSemantics = "hasSemantics: yes"
|
||||
else hasSemantics = "hasSemantics: no"
|
||||
) and
|
||||
if f.(ExtractedFile).isSkippedByCompilation()
|
||||
then isSkippedByCompilation = "isSkippedByCompilation: yes"
|
||||
else isSkippedByCompilation = "isSkippedByCompilation: no"
|
||||
select f, fromSource, hasSemantics, isSkippedByCompilation
|
||||
|
||||
1
rust/ql/test/extractor-tests/File/bad_cargo/.gitignore
vendored
Normal file
1
rust/ql/test/extractor-tests/File/bad_cargo/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!/Cargo.toml
|
||||
1
rust/ql/test/extractor-tests/File/bad_cargo/Cargo.toml
Normal file
1
rust/ql/test/extractor-tests/File/bad_cargo/Cargo.toml
Normal file
@@ -0,0 +1 @@
|
||||
wrong
|
||||
@@ -508,9 +508,9 @@ fn parse() {
|
||||
let d: i64 = b.parse().unwrap();
|
||||
|
||||
sink(a); // $ hasValueFlow=90
|
||||
sink_string(b); // $ MISSING: we are not currently able to resolve the `to_string` call above, which comes from `impl<T: fmt::Display + ?Sized> ToString for T`
|
||||
sink(c); // $ MISSING: hasTaintFlow=90 - we are not currently able to resolve the `parse` call above
|
||||
sink(d); // $ MISSING: hasTaintFlow=90 - we are not currently able to resolve the `parse` call above
|
||||
sink_string(b); // $ hasTaintFlow=90
|
||||
sink(c); // $ hasTaintFlow=90
|
||||
sink(d); // $ hasTaintFlow=90
|
||||
}
|
||||
|
||||
fn iterators() {
|
||||
|
||||
@@ -4,8 +4,8 @@ multipleCallTargets
|
||||
| test.rs:229:22:229:72 | ... .read_to_string(...) |
|
||||
| test.rs:664:22:664:43 | file.read(...) |
|
||||
| test.rs:673:22:673:41 | f1.read(...) |
|
||||
| test.rs:894:50:894:66 | ...::from(...) |
|
||||
| test.rs:894:50:894:66 | ...::from(...) |
|
||||
| test.rs:1096:50:1096:66 | ...::from(...) |
|
||||
| test.rs:1096:50:1096:66 | ...::from(...) |
|
||||
| test_futures_io.rs:45:27:45:84 | ...::read(...) |
|
||||
| test_futures_io.rs:49:27:49:51 | reader.read(...) |
|
||||
| test_futures_io.rs:83:22:83:39 | reader2.fill_buf() |
|
||||
@@ -26,3 +26,6 @@ multipleCallTargets
|
||||
| web_frameworks.rs:102:14:102:25 | a.as_bytes() |
|
||||
| web_frameworks.rs:158:14:158:23 | a.as_str() |
|
||||
| web_frameworks.rs:159:14:159:25 | a.as_bytes() |
|
||||
multiplePathResolutions
|
||||
| test.rs:897:28:897:65 | Result::<...> |
|
||||
| test.rs:984:40:984:49 | Result::<...> |
|
||||
|
||||
408
rust/ql/test/library-tests/dataflow/sources/Cargo.lock
generated
408
rust/ql/test/library-tests/dataflow/sources/Cargo.lock
generated
@@ -224,6 +224,12 @@ dependencies = [
|
||||
"alloc-no-stdlib",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "allocator-api2"
|
||||
version = "0.2.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
|
||||
|
||||
[[package]]
|
||||
name = "async-channel"
|
||||
version = "1.9.0"
|
||||
@@ -520,12 +526,33 @@ dependencies = [
|
||||
"alloc-stdlib",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "btoi"
|
||||
version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9dd6407f73a9b8b6162d8a2ef999fe6afd7cc15902ebf42c5cd296addf17e0ad"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bufstream"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "40e38929add23cdf8a366df9b0e088953150724bcbe5fc330b0d8eb3b328eec8"
|
||||
|
||||
[[package]]
|
||||
name = "bumpalo"
|
||||
version = "3.18.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "793db76d6187cd04dff33004d8e6c9cc4e05cd330500379d2394209271b4aeee"
|
||||
|
||||
[[package]]
|
||||
name = "byteorder"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.10.1"
|
||||
@@ -647,6 +674,15 @@ dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-queue"
|
||||
version = "0.3.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115"
|
||||
dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-utils"
|
||||
version = "0.8.21"
|
||||
@@ -663,6 +699,41 @@ dependencies = [
|
||||
"typenum",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling"
|
||||
version = "0.20.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"darling_macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling_core"
|
||||
version = "0.20.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e"
|
||||
dependencies = [
|
||||
"fnv",
|
||||
"ident_case",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"strsim",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "darling_macro"
|
||||
version = "0.20.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deranged"
|
||||
version = "0.4.0"
|
||||
@@ -693,6 +764,17 @@ dependencies = [
|
||||
"unicode-xid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_utils"
|
||||
version = "0.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ccfae181bab5ab6c5478b2ccb69e4c68a02f8c3ec72f6616bfec9dbc599d2ee0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "digest"
|
||||
version = "0.10.7"
|
||||
@@ -791,6 +873,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d"
|
||||
dependencies = [
|
||||
"crc32fast",
|
||||
"libz-sys",
|
||||
"miniz_oxide",
|
||||
]
|
||||
|
||||
@@ -1049,6 +1132,11 @@ name = "hashbrown"
|
||||
version = "0.15.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5"
|
||||
dependencies = [
|
||||
"allocator-api2",
|
||||
"equivalent",
|
||||
"foldhash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "headers"
|
||||
@@ -1074,6 +1162,12 @@ dependencies = [
|
||||
"http 1.3.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.5.2"
|
||||
@@ -1311,6 +1405,12 @@ dependencies = [
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ident_case"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "1.0.3"
|
||||
@@ -1348,6 +1448,15 @@ dependencies = [
|
||||
"hashbrown",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "io-enum"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d197db2f7ebf90507296df3aebaf65d69f5dce8559d8dbd82776a6cadab61bbf"
|
||||
dependencies = [
|
||||
"derive_utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ipnet"
|
||||
version = "2.11.0"
|
||||
@@ -1399,6 +1508,15 @@ dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "keyed_priority_queue"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ee7893dab2e44ae5f9d0173f26ff4aa327c10b01b06a72b52dd9405b628640d"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kv-log-macro"
|
||||
version = "1.0.7"
|
||||
@@ -1442,6 +1560,17 @@ dependencies = [
|
||||
"windows-targets 0.53.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libz-sys"
|
||||
version = "1.1.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b70e7a7df205e92a1a4cd9aaae7898dac0aa555503cc0a649494d0d60e7651d"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.4.15"
|
||||
@@ -1496,6 +1625,21 @@ dependencies = [
|
||||
"value-bag",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lru"
|
||||
version = "0.12.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38"
|
||||
|
||||
[[package]]
|
||||
name = "lru"
|
||||
version = "0.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9f8cc7106155f10bdf99a6f379688f543ad6596a415375b36a59a054ceda1198"
|
||||
dependencies = [
|
||||
"hashbrown",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matchit"
|
||||
version = "0.8.4"
|
||||
@@ -1551,6 +1695,111 @@ dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mysql"
|
||||
version = "26.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ce2510a735f601bab18202b07ea0a197bd1d130d3a5ce2edf4577d225f0c3ee4"
|
||||
dependencies = [
|
||||
"bufstream",
|
||||
"bytes",
|
||||
"crossbeam-queue",
|
||||
"crossbeam-utils",
|
||||
"flate2",
|
||||
"io-enum",
|
||||
"libc",
|
||||
"lru 0.12.5",
|
||||
"mysql_common",
|
||||
"named_pipe",
|
||||
"pem",
|
||||
"percent-encoding",
|
||||
"socket2",
|
||||
"twox-hash",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mysql-common-derive"
|
||||
version = "0.32.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "66f62cad7623a9cb6f8f64037f0c4f69c8db8e82914334a83c9788201c2c1bfa"
|
||||
dependencies = [
|
||||
"darling",
|
||||
"heck",
|
||||
"num-bigint",
|
||||
"proc-macro-crate",
|
||||
"proc-macro-error2",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"termcolor",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mysql_async"
|
||||
version = "0.36.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "277ce2f2459b2af4cc6d0a0b7892381f80800832f57c533f03e2845f4ea331ea"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"crossbeam-queue",
|
||||
"flate2",
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"futures-util",
|
||||
"keyed_priority_queue",
|
||||
"lru 0.14.0",
|
||||
"mysql_common",
|
||||
"pem",
|
||||
"percent-encoding",
|
||||
"rand",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"socket2",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"twox-hash",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mysql_common"
|
||||
version = "0.35.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fbb9f371618ce723f095c61fbcdc36e8936956d2b62832f9c7648689b338e052"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bitflags",
|
||||
"btoi",
|
||||
"byteorder",
|
||||
"bytes",
|
||||
"crc32fast",
|
||||
"flate2",
|
||||
"getrandom 0.3.3",
|
||||
"mysql-common-derive",
|
||||
"num-bigint",
|
||||
"num-traits",
|
||||
"regex",
|
||||
"saturating",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"thiserror",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "named_pipe"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad9c443cce91fc3e12f017290db75dde490d685cdaaf508d7159d7cf41f0eb2b"
|
||||
dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "native-tls"
|
||||
version = "0.2.14"
|
||||
@@ -1590,12 +1839,40 @@ dependencies = [
|
||||
"minimal-lexical",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-bigint"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
|
||||
dependencies = [
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-conv"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
|
||||
|
||||
[[package]]
|
||||
name = "num-integer"
|
||||
version = "0.1.46"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-traits"
|
||||
version = "0.2.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "object"
|
||||
version = "0.36.7"
|
||||
@@ -1684,6 +1961,16 @@ dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pem"
|
||||
version = "3.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d30c53c26bc5b31a98cd02d20f25a7c8567146caf63ed593a9d87b2775291be"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "percent-encoding"
|
||||
version = "2.3.1"
|
||||
@@ -1842,6 +2129,28 @@ dependencies = [
|
||||
"toml_edit",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error-attr2"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error2"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802"
|
||||
dependencies = [
|
||||
"proc-macro-error-attr2",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.95"
|
||||
@@ -2090,6 +2399,12 @@ version = "1.0.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||
|
||||
[[package]]
|
||||
name = "saturating"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ece8e78b2f38ec51c51f5d475df0a7187ba5111b2a28bdc761ee05b075d40a71"
|
||||
|
||||
[[package]]
|
||||
name = "schannel"
|
||||
version = "0.1.27"
|
||||
@@ -2136,18 +2451,28 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.219"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
|
||||
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_core"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.219"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
|
||||
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2199,6 +2524,17 @@ dependencies = [
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha2"
|
||||
version = "0.10.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"cpufeatures",
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "shlex"
|
||||
version = "1.3.0"
|
||||
@@ -2242,6 +2578,12 @@ version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||
|
||||
[[package]]
|
||||
name = "subtle"
|
||||
version = "2.6.1"
|
||||
@@ -2313,6 +2655,15 @@ dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termcolor"
|
||||
version = "1.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755"
|
||||
dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "test"
|
||||
version = "0.0.1"
|
||||
@@ -2327,6 +2678,8 @@ dependencies = [
|
||||
"http-body-util",
|
||||
"hyper",
|
||||
"hyper-util",
|
||||
"mysql",
|
||||
"mysql_async",
|
||||
"poem",
|
||||
"reqwest",
|
||||
"rustls",
|
||||
@@ -2560,6 +2913,12 @@ version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
|
||||
|
||||
[[package]]
|
||||
name = "twox-hash"
|
||||
version = "2.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ea3136b675547379c4bd395ca6b938e5ad3c3d20fad76e7fe85f9e0d011419c"
|
||||
|
||||
[[package]]
|
||||
name = "typenum"
|
||||
version = "1.18.0"
|
||||
@@ -2616,6 +2975,16 @@ version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.18.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "value-bag"
|
||||
version = "1.11.1"
|
||||
@@ -2786,6 +3155,37 @@ version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68ce1ab1f8c62655ebe1350f589c61e505cf94d385bc6a12899442d9081e71fd"
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
|
||||
dependencies = [
|
||||
"winapi-i686-pc-windows-gnu",
|
||||
"winapi-x86_64-pc-windows-gnu",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-i686-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-util"
|
||||
version = "0.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-link"
|
||||
version = "0.1.3"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -87,8 +87,33 @@
|
||||
| test.rs:759:28:759:57 | ...::connect | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| test.rs:841:22:841:49 | ...::connect | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| test.rs:867:22:867:50 | ...::new | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| test.rs:894:16:894:29 | ...::args | Flow source 'CommandLineArgs' of type commandargs (DEFAULT). |
|
||||
| test.rs:894:16:894:29 | ...::args | Flow source 'CommandLineArgs' of type commandargs (DEFAULT). |
|
||||
| test.rs:902:47:902:51 | query | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:905:28:905:30 | get | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:908:28:908:34 | get_opt | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:911:28:911:31 | take | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:914:28:914:35 | take_opt | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:917:26:917:31 | as_ref | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:924:28:924:38 | query_first | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:927:27:927:35 | exec_iter | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:928:42:928:44 | get | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:935:22:935:30 | query_map | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:942:22:942:30 | query_map | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:951:26:951:35 | query_fold | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:957:22:957:31 | query_fold | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:989:53:989:57 | query | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:992:28:992:30 | get | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:995:28:995:34 | get_opt | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:998:28:998:31 | take | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:1001:28:1001:35 | take_opt | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:1004:26:1004:31 | as_ref | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:1011:28:1011:38 | query_first | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:1014:27:1014:35 | exec_iter | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:1022:22:1022:30 | query_map | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:1029:22:1029:30 | query_map | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:1038:26:1038:35 | query_fold | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:1044:22:1044:31 | query_fold | Flow source 'DatabaseSource' of type database (DEFAULT). |
|
||||
| test.rs:1096:16:1096:29 | ...::args | Flow source 'CommandLineArgs' of type commandargs (DEFAULT). |
|
||||
| test.rs:1096:16:1096:29 | ...::args | Flow source 'CommandLineArgs' of type commandargs (DEFAULT). |
|
||||
| test_futures_io.rs:19:15:19:32 | ...::connect | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:11:31:11:31 | a | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:11:31:11:31 | a | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
@@ -100,6 +125,38 @@
|
||||
| web_frameworks.rs:58:14:58:15 | ms | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:68:15:68:15 | a | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:68:15:68:15 | a | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:127:5:127:20 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:127:5:127:20 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:127:5:127:20 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:127:5:127:20 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:127:5:127:20 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:127:5:127:20 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:127:5:127:20 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:127:5:127:20 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:139:41:139:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:139:41:139:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:139:41:139:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:139:41:139:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:139:41:139:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:139:41:139:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:139:41:139:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:139:41:139:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:140:45:140:46 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:140:45:140:46 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:140:45:140:46 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:140:45:140:46 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:140:45:140:46 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:140:45:140:46 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:140:45:140:46 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:140:45:140:46 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:141:41:141:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:141:41:141:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:141:41:141:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:141:41:141:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:141:41:141:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:141:41:141:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:141:41:141:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:141:41:141:42 | to | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:242:33:242:35 | map | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:242:33:242:35 | map | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
| web_frameworks.rs:242:33:242:35 | map | Flow source 'RemoteSource' of type remote (DEFAULT). |
|
||||
|
||||
@@ -17,3 +17,5 @@ qltest_dependencies:
|
||||
- futures-rustls = { version = "0.26.0" }
|
||||
- async-std = { version = "1.13.1" }
|
||||
- warp = { version = "0.4.2", features = ["server"] }
|
||||
- mysql = { version = "26.0.1" }
|
||||
- mysql_async = { version = "0.36.1" }
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user