Merge branch 'mathiasvp/replace-ast-with-ir-use-usedataflow' into local-flow-for-getAdditionalFlowIntoCallNodeTerm

This commit is contained in:
Mathias Vorreiter Pedersen
2023-03-15 11:15:16 +00:00
152 changed files with 15916 additions and 1149 deletions

View File

@@ -55,12 +55,12 @@ jobs:
id: cache-extractor
with:
path: |
ruby/target/release/ruby-autobuilder
ruby/target/release/ruby-autobuilder.exe
ruby/target/release/ruby-extractor
ruby/target/release/ruby-extractor.exe
ruby/ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
key: ${{ runner.os }}-${{ steps.os_version.outputs.version }}-ruby-extractor-${{ hashFiles('ruby/rust-toolchain.toml', 'ruby/**/Cargo.lock') }}--${{ hashFiles('ruby/**/*.rs') }}
ruby/extractor/target/release/autobuilder
ruby/extractor/target/release/autobuilder.exe
ruby/extractor/target/release/extractor
ruby/extractor/target/release/extractor.exe
ruby/extractor/ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
key: ${{ runner.os }}-${{ steps.os_version.outputs.version }}-ruby-extractor-${{ hashFiles('ruby/extractor/rust-toolchain.toml', 'ruby/extractor/Cargo.lock') }}--${{ hashFiles('ruby/extractor/**/*.rs') }}
- uses: actions/cache@v3
if: steps.cache-extractor.outputs.cache-hit != 'true'
with:
@@ -68,22 +68,22 @@ jobs:
~/.cargo/registry
~/.cargo/git
ruby/target
key: ${{ runner.os }}-${{ steps.os_version.outputs.version }}-ruby-rust-cargo-${{ hashFiles('ruby/rust-toolchain.toml', 'ruby/**/Cargo.lock') }}
key: ${{ runner.os }}-${{ steps.os_version.outputs.version }}-ruby-rust-cargo-${{ hashFiles('ruby/extractor/rust-toolchain.toml', 'ruby/extractor/**/Cargo.lock') }}
- name: Check formatting
if: steps.cache-extractor.outputs.cache-hit != 'true'
run: cargo fmt --all -- --check
run: cd extractor && cargo fmt --all -- --check
- name: Build
if: steps.cache-extractor.outputs.cache-hit != 'true'
run: cargo build --verbose
run: cd extractor && cargo build --verbose
- name: Run tests
if: steps.cache-extractor.outputs.cache-hit != 'true'
run: cargo test --verbose
run: cd extractor && cargo test --verbose
- name: Release build
if: steps.cache-extractor.outputs.cache-hit != 'true'
run: cargo build --release
run: cd extractor && cargo build --release
- name: Generate dbscheme
if: ${{ matrix.os == 'ubuntu-latest' && steps.cache-extractor.outputs.cache-hit != 'true'}}
run: target/release/ruby-generator --dbscheme ql/lib/ruby.dbscheme --library ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
run: extractor/target/release/generator --dbscheme ql/lib/ruby.dbscheme --library ql/lib/codeql/ruby/ast/internal/TreeSitter.qll
- uses: actions/upload-artifact@v3
if: ${{ matrix.os == 'ubuntu-latest' }}
with:
@@ -98,10 +98,10 @@ jobs:
with:
name: extractor-${{ matrix.os }}
path: |
ruby/target/release/ruby-autobuilder
ruby/target/release/ruby-autobuilder.exe
ruby/target/release/ruby-extractor
ruby/target/release/ruby-extractor.exe
ruby/extractor/target/release/autobuilder
ruby/extractor/target/release/autobuilder.exe
ruby/extractor/target/release/extractor
ruby/extractor/target/release/extractor.exe
retention-days: 1
compile-queries:
runs-on: ubuntu-latest-xl
@@ -116,21 +116,22 @@ jobs:
key: ruby-build
- name: Build Query Pack
run: |
rm -rf target/packs
codeql pack create ../misc/suite-helpers --output target/packs
codeql pack create ../shared/regex --output target/packs
codeql pack create ../shared/ssa --output target/packs
codeql pack create ../shared/tutorial --output target/packs
codeql pack create ql/lib --output target/packs
codeql pack create -j0 ql/src --output target/packs --compilation-cache "${{ steps.query-cache.outputs.cache-dir }}"
PACK_FOLDER=$(readlink -f target/packs/codeql/ruby-queries/*)
PACKS=${{ runner.temp }}/query-packs
rm -rf $PACKS
codeql pack create ../misc/suite-helpers --output "$PACKS"
codeql pack create ../shared/regex --output "$PACKS"
codeql pack create ../shared/ssa --output "$PACKS"
codeql pack create ../shared/tutorial --output "$PACKS"
codeql pack create ql/lib --output "$PACKS"
codeql pack create -j0 ql/src --output "$PACKS" --compilation-cache "${{ steps.query-cache.outputs.cache-dir }}"
PACK_FOLDER=$(readlink -f "$PACKS"/codeql/ruby-queries/*)
codeql generate query-help --format=sarifv2.1.0 --output="${PACK_FOLDER}/rules.sarif" ql/src
(cd ql/src; find queries \( -name '*.qhelp' -o -name '*.rb' -o -name '*.erb' \) -exec bash -c 'mkdir -p "'"${PACK_FOLDER}"'/$(dirname "{}")"' \; -exec cp "{}" "${PACK_FOLDER}/{}" \;)
- uses: actions/upload-artifact@v3
with:
name: codeql-ruby-queries
path: |
ruby/target/packs/*
${{ runner.temp }}/query-packs/*
retention-days: 1
package:
@@ -158,12 +159,12 @@ jobs:
mkdir -p ruby
cp -r codeql-extractor.yml tools ql/lib/ruby.dbscheme.stats ruby/
mkdir -p ruby/tools/{linux64,osx64,win64}
cp linux64/ruby-autobuilder ruby/tools/linux64/autobuilder
cp osx64/ruby-autobuilder ruby/tools/osx64/autobuilder
cp win64/ruby-autobuilder.exe ruby/tools/win64/autobuilder.exe
cp linux64/ruby-extractor ruby/tools/linux64/extractor
cp osx64/ruby-extractor ruby/tools/osx64/extractor
cp win64/ruby-extractor.exe ruby/tools/win64/extractor.exe
cp linux64/autobuilder ruby/tools/linux64/autobuilder
cp osx64/autobuilder ruby/tools/osx64/autobuilder
cp win64/autobuilder.exe ruby/tools/win64/autobuilder.exe
cp linux64/extractor ruby/tools/linux64/extractor
cp osx64/extractor ruby/tools/osx64/extractor
cp win64/extractor.exe ruby/tools/win64/extractor.exe
chmod +x ruby/tools/{linux64,osx64}/{autobuilder,extractor}
zip -rq codeql-ruby.zip ruby
- uses: actions/upload-artifact@v3

View File

@@ -182,6 +182,7 @@ private module LambdaFlow {
boolean toJump, DataFlowCallOption lastCall
) {
revLambdaFlow0(lambdaCall, kind, node, t, toReturn, toJump, lastCall) and
not expectsContent(node, _) and
if castNode(node) or node instanceof ArgNode or node instanceof ReturnNode
then compatibleTypes(t, getNodeDataFlowType(node))
else any()

View File

@@ -182,6 +182,7 @@ private module LambdaFlow {
boolean toJump, DataFlowCallOption lastCall
) {
revLambdaFlow0(lambdaCall, kind, node, t, toReturn, toJump, lastCall) and
not expectsContent(node, _) and
if castNode(node) or node instanceof ArgNode or node instanceof ReturnNode
then compatibleTypes(t, getNodeDataFlowType(node))
else any()

View File

@@ -182,6 +182,7 @@ private module LambdaFlow {
boolean toJump, DataFlowCallOption lastCall
) {
revLambdaFlow0(lambdaCall, kind, node, t, toReturn, toJump, lastCall) and
not expectsContent(node, _) and
if castNode(node) or node instanceof ArgNode or node instanceof ReturnNode
then compatibleTypes(t, getNodeDataFlowType(node))
else any()

View File

@@ -182,6 +182,7 @@ private module LambdaFlow {
boolean toJump, DataFlowCallOption lastCall
) {
revLambdaFlow0(lambdaCall, kind, node, t, toReturn, toJump, lastCall) and
not expectsContent(node, _) and
if castNode(node) or node instanceof ArgNode or node instanceof ReturnNode
then compatibleTypes(t, getNodeDataFlowType(node))
else any()

View File

@@ -38,12 +38,13 @@ jakarta.ws.rs.client,1,,,,,,,,,,,,,,,,,1,,,,,,,,,,,,,,,,,,,,,,,,
jakarta.ws.rs.container,,9,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,9,,
jakarta.ws.rs.core,2,,149,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,,,,,,,,,94,55
java.beans,,,1,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,
java.io,37,,42,,15,,,,,,,,,,,,,,,,,,,,,,,,,,,,22,,,,,,,,41,1
java.lang,14,,76,,,,,,,,,,,,8,,,,,1,,4,,,1,,,,,,,,,,,,,,,,53,23
java.io,42,,42,,17,,,,,,,,,,,,,,,3,,,,,,,,,,,,,22,,,,,,,,41,1
java.lang,16,,76,,,,,,,,,,,,8,,,,,3,,4,,,1,,,,,,,,,,,,,,,,53,23
java.net,10,3,9,,,,,,,,,,,,,,,10,,,,,,,,,,,,,,,,,,,,,,3,9,
java.nio,16,,16,,13,,,,,,,,,,,,,,,1,,,,,,,,,,,,,2,,,,,,,,16,
java.sql,11,,2,,,,,,,,4,,,,,,,,,,,,,,,,,,7,,,,,,,,,,,,1,1
java.sql,13,,2,,,,,,,,4,,,,,,,,,,,,,,,,,,9,,,,,,,,,,,,1,1
java.util,44,,465,,,,,,,,,,,,34,,,,,,,,5,2,,1,2,,,,,,,,,,,,,,38,427
javafx.scene.web,1,,,,,,,,,,,,,,,,,1,,,,,,,,,,,,,,,,,,,,,,,,
javax.faces.context,2,7,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,,,,7,,
javax.jms,,9,57,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,9,57,
javax.json,,,123,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,100,23
@@ -66,6 +67,7 @@ okhttp3,2,,47,,,,,,,,,,,,,,,2,,,,,,,,,,,,,,,,,,,,,,,22,25
org.apache.commons.codec,,,6,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,6,
org.apache.commons.collections,,,800,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,17,783
org.apache.commons.collections4,,,800,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,17,783
org.apache.commons.compress.archivers.tar,,,2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,
org.apache.commons.io,106,,560,,91,,,,,,,,,,,,,15,,,,,,,,,,,,,,,,,,,,,,,546,14
org.apache.commons.jexl2,15,,,,,,,,,,,15,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
org.apache.commons.jexl3,15,,,,,,,,,,,15,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
@@ -80,7 +82,7 @@ org.apache.hc.core5.http,1,2,39,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,,,,2,39,
org.apache.hc.core5.net,,,2,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,2,
org.apache.hc.core5.util,,,24,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,18,6
org.apache.hive.hcatalog.templeton,1,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,,,,,,,,,,,,,
org.apache.http,27,3,70,,,,,,,,,,,,,,,25,,,,,,,,,,,,,,,,,,2,,,,3,62,8
org.apache.http,27,3,77,,,,,,,,,,,,,,,25,,,,,,,,,,,,,,,,,,2,,,,3,69,8
org.apache.ibatis.jdbc,6,,57,,,,,,,,,,,,,,,,,,,,,,,,,,6,,,,,,,,,,,,57,
org.apache.log4j,11,,,,,,,,,,,,,,11,,,,,,,,,,,,,,,,,,,,,,,,,,,
org.apache.logging.log4j,359,,8,,,,,,,,,,,,359,,,,,,,,,,,,,,,,,,,,,,,,,,4,4
@@ -88,6 +90,7 @@ org.apache.shiro.codec,,,1,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,1,
org.apache.shiro.jndi,1,,,,,,,,,,,,1,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
org.apache.velocity.app,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,4,,,,,,,,,,,,
org.apache.velocity.runtime,4,,,,,,,,,,,,,,,,,,,,,,,,,,,,,4,,,,,,,,,,,,
org.codehaus.cargo.container.installer,3,,,,2,,,,,,,,,,,,,1,,,,,,,,,,,,,,,,,,,,,,,,
org.codehaus.groovy.control,1,,,,,,1,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
org.dom4j,20,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,20,,,,,,,,
org.hibernate,7,,,,,,,,,,,,,,,,,,,,,,,,,,,,7,,,,,,,,,,,,,
1 package sink source summary sink:bean-validation sink:create-file sink:fragment-injection sink:groovy sink:header-splitting sink:information-leak sink:intent-start sink:jdbc-url sink:jexl sink:jndi-injection sink:ldap sink:logging sink:mvel sink:ognl-injection sink:open-url sink:pending-intent-sent sink:read-file sink:regex-use sink:regex-use[-1] sink:regex-use[0] sink:regex-use[] sink:regex-use[f-1] sink:regex-use[f1] sink:regex-use[f] sink:set-hostname-verifier sink:sql sink:ssti sink:url-open-stream sink:url-redirect sink:write-file sink:xpath sink:xslt sink:xss source:android-external-storage-dir source:android-widget source:contentprovider source:remote summary:taint summary:value
38 jakarta.ws.rs.container 9 9
39 jakarta.ws.rs.core 2 149 2 94 55
40 java.beans 1 1
41 java.io 37 42 42 15 17 3 22 41 1
42 java.lang 14 16 76 8 1 3 4 1 53 23
43 java.net 10 3 9 10 3 9
44 java.nio 16 16 13 1 2 16
45 java.sql 11 13 2 4 7 9 1 1
46 java.util 44 465 34 5 2 1 2 38 427
47 javafx.scene.web 1 1
48 javax.faces.context 2 7 2 7
49 javax.jms 9 57 9 57
50 javax.json 123 100 23
67 org.apache.commons.codec 6 6
68 org.apache.commons.collections 800 17 783
69 org.apache.commons.collections4 800 17 783
70 org.apache.commons.compress.archivers.tar 2 2
71 org.apache.commons.io 106 560 91 15 546 14
72 org.apache.commons.jexl2 15 15
73 org.apache.commons.jexl3 15 15
82 org.apache.hc.core5.net 2 2
83 org.apache.hc.core5.util 24 18 6
84 org.apache.hive.hcatalog.templeton 1 1
85 org.apache.http 27 3 70 77 25 2 3 62 69 8
86 org.apache.ibatis.jdbc 6 57 6 57
87 org.apache.log4j 11 11
88 org.apache.logging.log4j 359 8 359 4 4
90 org.apache.shiro.jndi 1 1
91 org.apache.velocity.app 4 4
92 org.apache.velocity.runtime 4 4
93 org.codehaus.cargo.container.installer 3 2 1
94 org.codehaus.groovy.control 1 1
95 org.dom4j 20 20
96 org.hibernate 7 7

View File

@@ -13,15 +13,15 @@ Java framework & library support
`Apache Commons IO <https://commons.apache.org/proper/commons-io/>`_,``org.apache.commons.io``,,560,106,91,,,,,,15
`Apache Commons Lang <https://commons.apache.org/proper/commons-lang/>`_,``org.apache.commons.lang3``,,424,6,,,,,,,
`Apache Commons Text <https://commons.apache.org/proper/commons-text/>`_,``org.apache.commons.text``,,272,,,,,,,,
`Apache HttpComponents <https://hc.apache.org/>`_,"``org.apache.hc.core5.*``, ``org.apache.http``",5,136,28,,,3,,,,25
`Apache HttpComponents <https://hc.apache.org/>`_,"``org.apache.hc.core5.*``, ``org.apache.http``",5,143,28,,,3,,,,25
`Apache Log4j 2 <https://logging.apache.org/log4j/2.0/>`_,``org.apache.logging.log4j``,,8,359,,,,,,,
`Google Guava <https://guava.dev/>`_,``com.google.common.*``,,728,39,,6,,,,,
JBoss Logging,``org.jboss.logging``,,,324,,,,,,,
`JSON-java <https://github.com/stleary/JSON-java>`_,``org.json``,,236,,,,,,,,
Java Standard Library,``java.*``,3,611,132,28,,,7,,,10
Java Standard Library,``java.*``,3,611,141,30,,,9,,,10
Java extensions,"``javax.*``, ``jakarta.*``",63,609,32,,,4,,1,1,2
Kotlin Standard Library,``kotlin*``,,1835,12,10,,,,,,2
`Spring <https://spring.io/>`_,``org.springframework.*``,29,477,101,,,,19,14,,29
Others,"``cn.hutool.core.codec``, ``com.esotericsoftware.kryo.io``, ``com.esotericsoftware.kryo5.io``, ``com.fasterxml.jackson.core``, ``com.fasterxml.jackson.databind``, ``com.hubspot.jinjava``, ``com.mitchellbosecke.pebble``, ``com.opensymphony.xwork2.ognl``, ``com.rabbitmq.client``, ``com.unboundid.ldap.sdk``, ``com.zaxxer.hikari``, ``flexjson``, ``freemarker.cache``, ``freemarker.template``, ``groovy.lang``, ``groovy.util``, ``jodd.json``, ``net.sf.saxon.s9api``, ``ognl``, ``okhttp3``, ``org.apache.commons.codec``, ``org.apache.commons.jexl2``, ``org.apache.commons.jexl3``, ``org.apache.commons.logging``, ``org.apache.commons.ognl``, ``org.apache.directory.ldap.client.api``, ``org.apache.hadoop.hive.metastore``, ``org.apache.hive.hcatalog.templeton``, ``org.apache.ibatis.jdbc``, ``org.apache.log4j``, ``org.apache.shiro.codec``, ``org.apache.shiro.jndi``, ``org.apache.velocity.app``, ``org.apache.velocity.runtime``, ``org.codehaus.groovy.control``, ``org.dom4j``, ``org.hibernate``, ``org.jdbi.v3.core``, ``org.jooq``, ``org.mvel2``, ``org.scijava.log``, ``org.slf4j``, ``org.thymeleaf``, ``org.xml.sax``, ``org.xmlpull.v1``, ``play.mvc``, ``ratpack.core.form``, ``ratpack.core.handling``, ``ratpack.core.http``, ``ratpack.exec``, ``ratpack.form``, ``ratpack.func``, ``ratpack.handling``, ``ratpack.http``, ``ratpack.util``, ``retrofit2``",60,300,273,,,,18,18,,3
Totals,,217,8458,1569,129,6,10,111,33,1,86
Others,"``cn.hutool.core.codec``, ``com.esotericsoftware.kryo.io``, ``com.esotericsoftware.kryo5.io``, ``com.fasterxml.jackson.core``, ``com.fasterxml.jackson.databind``, ``com.hubspot.jinjava``, ``com.mitchellbosecke.pebble``, ``com.opensymphony.xwork2.ognl``, ``com.rabbitmq.client``, ``com.unboundid.ldap.sdk``, ``com.zaxxer.hikari``, ``flexjson``, ``freemarker.cache``, ``freemarker.template``, ``groovy.lang``, ``groovy.util``, ``javafx.scene.web``, ``jodd.json``, ``net.sf.saxon.s9api``, ``ognl``, ``okhttp3``, ``org.apache.commons.codec``, ``org.apache.commons.compress.archivers.tar``, ``org.apache.commons.jexl2``, ``org.apache.commons.jexl3``, ``org.apache.commons.logging``, ``org.apache.commons.ognl``, ``org.apache.directory.ldap.client.api``, ``org.apache.hadoop.hive.metastore``, ``org.apache.hive.hcatalog.templeton``, ``org.apache.ibatis.jdbc``, ``org.apache.log4j``, ``org.apache.shiro.codec``, ``org.apache.shiro.jndi``, ``org.apache.velocity.app``, ``org.apache.velocity.runtime``, ``org.codehaus.cargo.container.installer``, ``org.codehaus.groovy.control``, ``org.dom4j``, ``org.hibernate``, ``org.jdbi.v3.core``, ``org.jooq``, ``org.mvel2``, ``org.scijava.log``, ``org.slf4j``, ``org.thymeleaf``, ``org.xml.sax``, ``org.xmlpull.v1``, ``play.mvc``, ``ratpack.core.form``, ``ratpack.core.handling``, ``ratpack.core.http``, ``ratpack.exec``, ``ratpack.form``, ``ratpack.func``, ``ratpack.handling``, ``ratpack.http``, ``ratpack.util``, ``retrofit2``",60,302,277,2,,,18,18,,5
Totals,,217,8467,1582,133,6,10,113,33,1,88

View File

@@ -0,0 +1,4 @@
---
category: majorAnalysis
---
* Removed low-confidence call edges to known neutral call targets from the call graph used in data flow analysis. This includes, for example, custom `List.contains` implementations when the best inferrable type at the call site is simply `List`.

View File

@@ -132,9 +132,9 @@ private newtype TPrintAstNode =
TGenericTypeNode(GenericType ty) { shouldPrint(ty, _) } or
TGenericCallableNode(GenericCallable c) { shouldPrint(c, _) } or
TDocumentableNode(Documentable d) { shouldPrint(d, _) and exists(d.getJavadoc()) } or
TJavadocNode(Javadoc jd) { exists(Documentable d | d.getJavadoc() = jd | shouldPrint(d, _)) } or
TJavadocElementNode(JavadocElement jd) {
exists(Documentable d | d.getJavadoc() = jd.getParent*() | shouldPrint(d, _))
TJavadocNode(Javadoc jd, Documentable d) { d.getJavadoc() = jd and shouldPrint(d, _) } or
TJavadocElementNode(JavadocElement jd, Documentable d) {
d.getJavadoc() = jd.getParent*() and shouldPrint(d, _)
} or
TImportsNode(CompilationUnit cu) {
shouldPrint(cu, _) and exists(Import i | i.getCompilationUnit() = cu)
@@ -794,6 +794,7 @@ final class DocumentableNode extends PrintAstNode, TDocumentableNode {
override Location getLocation() { none() }
override JavadocNode getChild(int childIndex) {
result.getDocumentable() = d and
result.getJavadoc() =
rank[childIndex](Javadoc jd, string file, int line, int column |
jd.getCommentedElement() = d and jd.getLocation().hasLocationInfo(file, line, column, _, _)
@@ -814,14 +815,16 @@ final class DocumentableNode extends PrintAstNode, TDocumentableNode {
*/
final class JavadocNode extends PrintAstNode, TJavadocNode {
Javadoc jd;
Documentable d;
JavadocNode() { this = TJavadocNode(jd) }
JavadocNode() { this = TJavadocNode(jd, d) and not duplicateMetadata(d) }
override string toString() { result = getQlClass(jd) + jd.toString() }
override Location getLocation() { result = jd.getLocation() }
override JavadocElementNode getChild(int childIndex) {
result.getDocumentable() = d and
result.getJavadocElement() = jd.getChild(childIndex)
}
@@ -829,6 +832,11 @@ final class JavadocNode extends PrintAstNode, TJavadocNode {
* Gets the `Javadoc` represented by this node.
*/
Javadoc getJavadoc() { result = jd }
/**
* Gets the `Documentable` whose `Javadoc` is represented by this node.
*/
Documentable getDocumentable() { result = d }
}
/**
@@ -837,14 +845,16 @@ final class JavadocNode extends PrintAstNode, TJavadocNode {
*/
final class JavadocElementNode extends PrintAstNode, TJavadocElementNode {
JavadocElement jd;
Documentable d;
JavadocElementNode() { this = TJavadocElementNode(jd) }
JavadocElementNode() { this = TJavadocElementNode(jd, d) and not duplicateMetadata(d) }
override string toString() { result = getQlClass(jd) + jd.toString() }
override Location getLocation() { result = jd.getLocation() }
override JavadocElementNode getChild(int childIndex) {
result.getDocumentable() = d and
result.getJavadocElement() = jd.(JavadocParent).getChild(childIndex)
}
@@ -852,6 +862,11 @@ final class JavadocElementNode extends PrintAstNode, TJavadocElementNode {
* Gets the `JavadocElement` represented by this node.
*/
JavadocElement getJavadocElement() { result = jd }
/**
* Gets the `Documentable` whose `JavadocElement` is represented by this node.
*/
Documentable getDocumentable() { result = d }
}
/**

View File

@@ -171,6 +171,8 @@ class SummarizedCallableBase extends TSummarizedCallableBase {
class SummarizedCallable = Impl::Public::SummarizedCallable;
class NeutralCallable = Impl::Public::NeutralCallable;
/**
* An adapter class to add the flow summaries specified on `SyntheticCallable`
* to `SummarizedCallable`.

View File

@@ -11,6 +11,8 @@ private module DispatchImpl {
private predicate hasHighConfidenceTarget(Call c) {
exists(SummarizedCallable sc | sc.getACall() = c and not sc.isAutoGenerated())
or
exists(NeutralCallable nc | nc.getACall() = c and nc.isManual())
or
exists(Callable srcTgt |
srcTgt = VirtualDispatch::viableCallable(c) and
not VirtualDispatch::lowConfidenceDispatchTarget(c, srcTgt)

View File

@@ -182,6 +182,7 @@ private module LambdaFlow {
boolean toJump, DataFlowCallOption lastCall
) {
revLambdaFlow0(lambdaCall, kind, node, t, toReturn, toJump, lastCall) and
not expectsContent(node, _) and
if castNode(node) or node instanceof ArgNode or node instanceof ReturnNode
then compatibleTypes(t, getNodeDataFlowType(node))
else any()

View File

@@ -236,6 +236,12 @@ private VirtualMethodAccess objectToString(ObjNode n) {
result.getQualifier() = n.asExpr() and sink(n)
}
/**
* Holds if `ma` is an `Object.toString()` call taking possibly improved type
* bounds into account.
*/
predicate objectToStringCall(VirtualMethodAccess ma) { ma = objectToString(_) }
/**
* Holds if the qualifier of the `Object.toString()` call `ma` might have type `t`.
*/

View File

@@ -93,7 +93,8 @@ private module Dispatch {
exists(RefType t | qualUnionType(ma, t, false) |
lowConfidenceDispatchType(t.getSourceDeclaration())
)
)
) and
not ObjFlow::objectToStringCall(ma)
}
private predicate lowConfidenceDispatchType(SrcRefType t) {

View File

@@ -0,0 +1,87 @@
/** Provide classes to reason about Android Intents that can install APKs. */
import java
import semmle.code.java.frameworks.android.Intent
import semmle.code.java.dataflow.DataFlow
private import semmle.code.java.dataflow.ExternalFlow
private import semmle.code.java.dataflow.FlowSources
/** A string literal that represents the MIME type for Android APKs. */
class PackageArchiveMimeTypeLiteral extends StringLiteral {
PackageArchiveMimeTypeLiteral() { this.getValue() = "application/vnd.android.package-archive" }
}
/** The `android.content.Intent.ACTION_INSTALL_PACKAGE` constant. */
class InstallPackageAction extends Expr {
InstallPackageAction() {
this.(StringLiteral).getValue() = "android.intent.action.INSTALL_PACKAGE"
or
exists(VarAccess va |
va.getVariable().hasName("ACTION_INSTALL_PACKAGE") and
va.getQualifier().getType() instanceof TypeIntent
)
}
}
/** A method that sets the MIME type of an intent. */
class SetTypeMethod extends Method {
SetTypeMethod() {
this.hasName(["setType", "setTypeAndNormalize"]) and
this.getDeclaringType() instanceof TypeIntent
}
}
/** A method that sets the data URI and the MIME type of an intent. */
class SetDataAndTypeMethod extends Method {
SetDataAndTypeMethod() {
this.hasName(["setDataAndType", "setDataAndTypeAndNormalize"]) and
this.getDeclaringType() instanceof TypeIntent
}
}
/** A method that sets the data URI of an intent. */
class SetDataMethod extends Method {
SetDataMethod() {
this.hasName(["setData", "setDataAndNormalize", "setDataAndType", "setDataAndTypeAndNormalize"]) and
this.getDeclaringType() instanceof TypeIntent
}
}
/** A dataflow sink for the URI of an intent. */
class SetDataSink extends DataFlow::ExprNode {
SetDataSink() {
exists(MethodAccess ma |
this.getExpr() = ma.getQualifier() and
ma.getMethod() instanceof SetDataMethod
)
}
}
/** A method that generates a URI. */
class UriConstructorMethod extends Method {
UriConstructorMethod() {
this.hasQualifiedName("android.net", "Uri", ["fromFile", "fromParts"]) or
this.hasQualifiedName("androidx.core.content", "FileProvider", "getUriForFile")
}
}
/**
* A dataflow source representing the URIs which an APK not controlled by the
* application may come from. Including external storage and web URLs.
*/
class ExternalApkSource extends DataFlow::Node {
ExternalApkSource() {
sourceNode(this, "android-external-storage-dir") or
this.asExpr().(MethodAccess).getMethod() instanceof UriConstructorMethod or
this.asExpr().(StringLiteral).getValue().matches("file://%") or
this instanceof RemoteFlowSource
}
}
/** The `setAction` method of the `android.content.Intent` class. */
class SetActionMethod extends Method {
SetActionMethod() {
this.hasName("setAction") and
this.getDeclaringType() instanceof TypeIntent
}
}

View File

@@ -0,0 +1,121 @@
/** Provides dataflow configurations to reason about installation of arbitrary Android APKs. */
import java
import semmle.code.java.dataflow.DataFlow
import semmle.code.java.dataflow.TaintTracking
private import semmle.code.java.security.ArbitraryApkInstallation
/**
* A dataflow configuration for flow from an external source of an APK to the
* `setData[AndType][AndNormalize]` method of an intent.
*/
private module ApkInstallationConfiguration implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node node) { node instanceof ExternalApkSource }
predicate isSink(DataFlow::Node node) {
exists(MethodAccess ma |
ma.getMethod() instanceof SetDataMethod and
ma.getArgument(0) = node.asExpr() and
(
PackageArchiveMimeTypeFlow::hasFlowToExpr(ma.getQualifier())
or
InstallPackageActionFlow::hasFlowToExpr(ma.getQualifier())
)
)
}
}
module ApkInstallationFlow = DataFlow::Make<ApkInstallationConfiguration>;
private newtype ActionState =
ActionUnset() or
HasInstallPackageAction()
/**
* A dataflow configuration tracking the flow from the `android.content.Intent.ACTION_INSTALL_PACKAGE`
* constant to either the constructor of an intent or the `setAction` method of an intent.
*
* This is used to track if an intent is used to install an APK.
*/
private module InstallPackageActionConfiguration implements DataFlow::StateConfigSig {
class FlowState = ActionState;
predicate isSource(DataFlow::Node source, FlowState state) {
source.asExpr() instanceof InstallPackageAction and state instanceof ActionUnset
}
predicate isAdditionalFlowStep(
DataFlow::Node node1, FlowState state1, DataFlow::Node node2, FlowState state2
) {
state1 instanceof ActionUnset and
state2 instanceof HasInstallPackageAction and
(
exists(ConstructorCall cc |
cc.getConstructedType() instanceof TypeIntent and
node1.asExpr() = cc.getArgument(0) and
node1.asExpr().getType() instanceof TypeString and
node2.asExpr() = cc
)
or
exists(MethodAccess ma |
ma.getMethod() instanceof SetActionMethod and
node1.asExpr() = ma.getArgument(0) and
node2.asExpr() = ma.getQualifier()
)
)
}
predicate isSink(DataFlow::Node node, FlowState state) {
state instanceof HasInstallPackageAction and node.asExpr().getType() instanceof TypeIntent
}
predicate isBarrier(DataFlow::Node node, FlowState state) { none() }
}
private module InstallPackageActionFlow =
TaintTracking::MakeWithState<InstallPackageActionConfiguration>;
private newtype MimeTypeState =
MimeTypeUnset() or
HasPackageArchiveMimeType()
/**
* A dataflow configuration tracking the flow of the Android APK MIME type to
* the `setType` or `setTypeAndNormalize` method of an intent, followed by a call
* to `setData[AndType][AndNormalize]`.
*/
private module PackageArchiveMimeTypeConfiguration implements DataFlow::StateConfigSig {
class FlowState = MimeTypeState;
predicate isSource(DataFlow::Node node, FlowState state) {
node.asExpr() instanceof PackageArchiveMimeTypeLiteral and
state instanceof MimeTypeUnset
}
predicate isAdditionalFlowStep(
DataFlow::Node node1, FlowState state1, DataFlow::Node node2, FlowState state2
) {
state1 instanceof MimeTypeUnset and
state2 instanceof HasPackageArchiveMimeType and
exists(MethodAccess ma |
ma.getQualifier() = node2.asExpr() and
(
ma.getMethod() instanceof SetTypeMethod and
ma.getArgument(0) = node1.asExpr()
or
ma.getMethod() instanceof SetDataAndTypeMethod and
ma.getArgument(1) = node1.asExpr()
)
)
}
predicate isSink(DataFlow::Node node, FlowState state) {
state instanceof HasPackageArchiveMimeType and
node instanceof SetDataSink
}
predicate isBarrier(DataFlow::Node node, FlowState state) { none() }
}
private module PackageArchiveMimeTypeFlow =
TaintTracking::MakeWithState<PackageArchiveMimeTypeConfiguration>;

View File

@@ -32,8 +32,12 @@ private class LengthRestrictedMethod extends Method {
}
}
/** A configuration for Polynomial ReDoS queries. */
class PolynomialRedosConfig extends TaintTracking::Configuration {
/**
* DEPRECATED: Use `PolynomialRedosFlow` instead.
*
* A configuration for Polynomial ReDoS queries.
*/
deprecated class PolynomialRedosConfig extends TaintTracking::Configuration {
PolynomialRedosConfig() { this = "PolynomialRedosConfig" }
override predicate isSource(DataFlow::Node src) { src instanceof RemoteFlowSource }
@@ -47,11 +51,34 @@ class PolynomialRedosConfig extends TaintTracking::Configuration {
}
}
/** Holds if there is flow from `source` to `sink` that is matched against the regexp term `regexp` that is vulnerable to Polynomial ReDoS. */
predicate hasPolynomialReDoSResult(
/**
* DEPRECATED: Use `PolynomialRedosFlow` instead.
*
* Holds if there is flow from `source` to `sink` that is matched against the regexp term `regexp` that is vulnerable to Polynomial ReDoS.
*/
deprecated predicate hasPolynomialReDoSResult(
DataFlow::PathNode source, DataFlow::PathNode sink,
SuperlinearBackTracking::PolynomialBackTrackingTerm regexp
) {
any(PolynomialRedosConfig config).hasFlowPath(source, sink) and
regexp.getRootTerm() = sink.getNode().(PolynomialRedosSink).getRegExp()
}
/** A configuration for Polynomial ReDoS queries. */
private module PolynomialRedosConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node src) { src instanceof RemoteFlowSource }
predicate isSink(DataFlow::Node sink) {
exists(SuperlinearBackTracking::PolynomialBackTrackingTerm regexp |
regexp.getRootTerm() = sink.(PolynomialRedosSink).getRegExp()
)
}
predicate isBarrier(DataFlow::Node node) {
node.getType() instanceof PrimitiveType or
node.getType() instanceof BoxedType or
node.asExpr().(MethodAccess).getMethod() instanceof LengthRestrictedMethod
}
}
module PolynomialRedosFlow = TaintTracking::Make<PolynomialRedosConfig>;

View File

@@ -0,0 +1,72 @@
<!DOCTYPE qhelp PUBLIC
"-//Semmle//qhelp//EN"
"qhelp.dtd">
<qhelp>
<overview>
<p>
Android allows an application to install an Android Package Kit (APK)
using an <code>Intent</code> with
the <code>"application/vnd.android.package-archive"</code> MIME type. If
the file used in the <code>Intent</code> is from a location that is not
controlled by the application (for example, an SD card that is
universally writable), this can result in the unintended installation of untrusted applications.
</p>
</overview>
<recommendation>
<p>
You should install packages using
the <code>PackageInstaller</code> class.
</p>
<p>
If you need to install from a file, you should use
a <code>FileProvider</code>. Content providers can provide more specific
permissions than file system permissions can.
</p>
<p>
When your application does not require package installations, do not add
the <code>REQUEST_INSTALL_PACKAGES</code> permission in the manifest file.
</p>
</recommendation>
<example>
<p>
In the following (bad) example, the package is installed from a file which
may be altered by another application:
</p>
<sample src="InstallApkWithFile.java"/>
<p>
In the following (good) example, the package is installed by using
a <code>FileProvider</code>:
</p>
<sample src="InstallApkWithFileProvider.java"/>
<p>
In the following (good) example, the package is installed using an
instance of the <code>android.content.pm.PackageInstaller</code> class:
</p>
<sample src="InstallApkWithPackageInstaller.java"/>
</example>
<references>
<li>
Android Developers: <a href="https://developer.android.com/reference/android/content/Intent#ACTION_INSTALL_PACKAGE">Intent.ACTION_INSTALL_PACKAGE</a>.
</li>
<li>
Android Developers: <a href="https://developer.android.com/reference/android/Manifest.permission#REQUEST_INSTALL_PACKAGES">Manifest.permission.REQUEST_INSTALL_PACKAGES</a>.
</li>
<li>
Android Developers: <a href="https://developer.android.com/reference/android/content/pm/PackageInstaller">PackageInstaller</a>.
</li>
<li>
Android Developers: <a href="https://developer.android.com/reference/androidx/core/content/FileProvider">FileProvider</a>.
</li>
</references>
</qhelp>

View File

@@ -0,0 +1,19 @@
/**
* @id java/android/arbitrary-apk-installation
* @name Android APK installation
* @description Creating an intent with a URI pointing to a untrusted file can lead to the installation of an untrusted application.
* @kind path-problem
* @security-severity 9.3
* @problem.severity error
* @precision medium
* @tags security
* external/cwe/cwe-094
*/
import java
import semmle.code.java.security.ArbitraryApkInstallationQuery
import ApkInstallationFlow::PathGraph
from ApkInstallationFlow::PathNode source, ApkInstallationFlow::PathNode sink
where ApkInstallationFlow::hasFlowPath(source, sink)
select sink.getNode(), source, sink, "Arbitrary Android APK installation."

View File

@@ -0,0 +1,14 @@
import android.app.Activity;
import android.content.Intent;
import android.net.Uri;
import android.os.Environment;
import java.io.File;
/* Get a file from external storage */
File file = new File(Environment.getExternalStorageDirectory(), "myapp.apk");
Intent intent = new Intent(Intent.ACTION_VIEW);
/* Set the mimetype to APK */
intent.setDataAndType(Uri.fromFile(file), "application/vnd.android.package-archive");
startActivity(intent);

View File

@@ -0,0 +1,31 @@
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import androidx.core.content.FileProvider;
import java.io.File;
import java.io.FileOutputStream;
String tempFilename = "temporary.apk";
byte[] buffer = new byte[16384];
/* Copy application asset into temporary file */
try (InputStream is = getAssets().open(assetName);
FileOutputStream fout = openFileOutput(tempFilename, Context.MODE_PRIVATE)) {
int n;
while ((n=is.read(buffer)) >= 0) {
fout.write(buffer, 0, n);
}
}
/* Expose temporary file with FileProvider */
File toInstall = new File(this.getFilesDir(), tempFilename);
Uri applicationUri = FileProvider.getUriForFile(this, "com.example.apkprovider", toInstall);
/* Create Intent and set data to APK file. */
Intent intent = new Intent(Intent.ACTION_INSTALL_PACKAGE);
intent.setData(applicationUri);
intent.setFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
startActivity(intent);

View File

@@ -0,0 +1,32 @@
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageInstaller;
private static final String PACKAGE_INSTALLED_ACTION =
"com.example.SESSION_API_PACKAGE_INSTALLED";
/* Create the package installer and session */
PackageInstaller packageInstaller = getPackageManager().getPackageInstaller();
PackageInstaller.SessionParams params =
new PackageInstaller.SessionParams(PackageInstaller.SessionParams.MODE_FULL_INSTALL);
int sessionId = packageInstaller.createSession(params);
session = packageInstaller.openSession(sessionId);
/* Load asset into session */
try (OutputStream packageInSession = session.openWrite("package", 0, -1);
InputStream is = getAssets().open(assetName)) {
byte[] buffer = new byte[16384];
int n;
while ((n = is.read(buffer)) >= 0) {
packageInSession.write(buffer, 0, n);
}
}
/* Create status receiver */
Intent intent = new Intent(this, InstallApkSessionApi.class);
intent.setAction(PACKAGE_INSTALLED_ACTION);
PendingIntent pendingIntent = PendingIntent.getActivity(context, 0, intent, 0);
IntentSender statusReceiver = pendingIntent.getIntentSender();
/* Commit the session */
session.commit(statusReceiver);

View File

@@ -15,35 +15,39 @@
import java
import semmle.code.java.dataflow.FlowSources
import ArithmeticCommon
import DataFlow::PathGraph
class RemoteUserInputOverflowConfig extends TaintTracking::Configuration {
RemoteUserInputOverflowConfig() { this = "ArithmeticTainted.ql:RemoteUserInputOverflowConfig" }
module RemoteUserInputOverflowConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
override predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
predicate isSink(DataFlow::Node sink) { overflowSink(_, sink.asExpr()) }
override predicate isSink(DataFlow::Node sink) { overflowSink(_, sink.asExpr()) }
override predicate isSanitizer(DataFlow::Node n) { overflowBarrier(n) }
predicate isBarrier(DataFlow::Node n) { overflowBarrier(n) }
}
class RemoteUserInputUnderflowConfig extends TaintTracking::Configuration {
RemoteUserInputUnderflowConfig() { this = "ArithmeticTainted.ql:RemoteUserInputUnderflowConfig" }
module RemoteUserInputUnderflowConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
override predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
predicate isSink(DataFlow::Node sink) { underflowSink(_, sink.asExpr()) }
override predicate isSink(DataFlow::Node sink) { underflowSink(_, sink.asExpr()) }
override predicate isSanitizer(DataFlow::Node n) { underflowBarrier(n) }
predicate isBarrier(DataFlow::Node n) { underflowBarrier(n) }
}
from DataFlow::PathNode source, DataFlow::PathNode sink, ArithExpr exp, string effect
module RemoteUserInputOverflow = TaintTracking::Make<RemoteUserInputOverflowConfig>;
module RemoteUserInputUnderflow = TaintTracking::Make<RemoteUserInputUnderflowConfig>;
module Flow =
DataFlow::MergePathGraph<RemoteUserInputOverflow::PathNode, RemoteUserInputUnderflow::PathNode, RemoteUserInputOverflow::PathGraph, RemoteUserInputUnderflow::PathGraph>;
import Flow::PathGraph
from Flow::PathNode source, Flow::PathNode sink, ArithExpr exp, string effect
where
any(RemoteUserInputOverflowConfig c).hasFlowPath(source, sink) and
RemoteUserInputOverflow::hasFlowPath(source.asPathNode1(), sink.asPathNode1()) and
overflowSink(exp, sink.getNode().asExpr()) and
effect = "overflow"
or
any(RemoteUserInputUnderflowConfig c).hasFlowPath(source, sink) and
RemoteUserInputUnderflow::hasFlowPath(source.asPathNode2(), sink.asPathNode2()) and
underflowSink(exp, sink.getNode().asExpr()) and
effect = "underflow"
select exp, source, sink,

View File

@@ -14,8 +14,7 @@
import java
import semmle.code.java.os.OSCheck
import TempDirUtils
import DataFlow::PathGraph
import semmle.code.java.dataflow.TaintTracking2
import semmle.code.java.dataflow.TaintTracking
abstract private class MethodFileSystemFileCreation extends Method {
MethodFileSystemFileCreation() { this.getDeclaringType() instanceof TypeFile }
@@ -127,19 +126,17 @@ private class IsSpecificWindowsSanitizer extends WindowsOsSanitizer {
* A taint tracking configuration tracking the access of the system temporary directory
* flowing to the creation of files or directories.
*/
private class TempDirSystemGetPropertyToCreateConfig extends TaintTracking::Configuration {
TempDirSystemGetPropertyToCreateConfig() { this = "TempDirSystemGetPropertyToCreateConfig" }
override predicate isSource(DataFlow::Node source) {
module TempDirSystemGetPropertyToCreateConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) {
source.asExpr() instanceof ExprSystemGetPropertyTempDirTainted
}
override predicate isSink(DataFlow::Node sink) {
predicate isSink(DataFlow::Node sink) {
sink instanceof FileCreationSink and
not any(TempDirSystemGetPropertyDirectlyToMkdirConfig config).hasFlowTo(sink)
not TempDirSystemGetPropertyDirectlyToMkdir::hasFlowTo(sink)
}
override predicate isSanitizer(DataFlow::Node sanitizer) {
predicate isBarrier(DataFlow::Node sanitizer) {
exists(FilesSanitizingCreationMethodAccess sanitisingMethodAccess |
sanitizer.asExpr() = sanitisingMethodAccess.getArgument(0)
)
@@ -148,6 +145,9 @@ private class TempDirSystemGetPropertyToCreateConfig extends TaintTracking::Conf
}
}
module TempDirSystemGetPropertyToCreate =
TaintTracking::Make<TempDirSystemGetPropertyToCreateConfig>;
/**
* Configuration that tracks calls to to `mkdir` or `mkdirs` that are are directly on the temp directory system property.
* Examples:
@@ -158,12 +158,8 @@ private class TempDirSystemGetPropertyToCreateConfig extends TaintTracking::Conf
* As such, this code pattern is filtered out as an explicit vulnerability in
* `TempDirSystemGetPropertyToCreateConfig::isSink`.
*/
private class TempDirSystemGetPropertyDirectlyToMkdirConfig extends TaintTracking2::Configuration {
TempDirSystemGetPropertyDirectlyToMkdirConfig() {
this = "TempDirSystemGetPropertyDirectlyToMkdirConfig"
}
override predicate isSource(DataFlow::Node node) {
module TempDirSystemGetPropertyDirectlyToMkdirConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node node) {
exists(ExprSystemGetPropertyTempDirTainted propertyGetExpr, DataFlow::Node callSite |
DataFlow::localFlow(DataFlow::exprNode(propertyGetExpr), callSite)
|
@@ -171,17 +167,20 @@ private class TempDirSystemGetPropertyDirectlyToMkdirConfig extends TaintTrackin
)
}
override predicate isSink(DataFlow::Node node) {
predicate isSink(DataFlow::Node node) {
exists(MethodAccess ma | ma.getMethod() instanceof MethodFileDirectoryCreation |
ma.getQualifier() = node.asExpr()
)
}
override predicate isSanitizer(DataFlow::Node sanitizer) {
predicate isBarrier(DataFlow::Node sanitizer) {
isFileConstructorArgument(sanitizer.asExpr(), _, _)
}
}
module TempDirSystemGetPropertyDirectlyToMkdir =
TaintTracking::Make<TempDirSystemGetPropertyDirectlyToMkdirConfig>;
//
// Begin configuration for tracking single-method calls that are vulnerable.
//
@@ -193,6 +192,8 @@ abstract class MethodAccessInsecureFileCreation extends MethodAccess {
* Gets the type of entity created (e.g. `file`, `directory`, ...).
*/
abstract string getFileSystemEntityType();
DataFlow::Node getNode() { result.asExpr() = this }
}
/**
@@ -235,39 +236,47 @@ class MethodAccessInsecureGuavaFilesCreateTempFile extends MethodAccessInsecureF
}
/**
* A hack: we include use of inherently insecure methods, which don't have any associated
* We include use of inherently insecure methods, which don't have any associated
* flow path, in with results describing a path from reading `java.io.tmpdir` or similar to use
* in a file creation op.
*
* We achieve this by making inherently-insecure method invocations both a source and a sink in
* this configuration, resulting in a zero-length path which is type-compatible with the actual
* path-flow results.
* We achieve this by making inherently-insecure method invocations into an edge-less graph,
* resulting in a zero-length paths.
*/
class InsecureMethodPseudoConfiguration extends DataFlow::Configuration {
InsecureMethodPseudoConfiguration() { this = "InsecureMethodPseudoConfiguration" }
override predicate isSource(DataFlow::Node node) {
node.asExpr() instanceof MethodAccessInsecureFileCreation
module InsecureMethodPathGraph implements DataFlow::PathGraphSig<MethodAccessInsecureFileCreation> {
predicate edges(MethodAccessInsecureFileCreation n1, MethodAccessInsecureFileCreation n2) {
none()
}
override predicate isSink(DataFlow::Node node) {
node.asExpr() instanceof MethodAccessInsecureFileCreation
predicate nodes(MethodAccessInsecureFileCreation n, string key, string val) {
key = "semmle.label" and val = n.toString()
}
predicate subpaths(
MethodAccessInsecureFileCreation n1, MethodAccessInsecureFileCreation n2,
MethodAccessInsecureFileCreation n3, MethodAccessInsecureFileCreation n4
) {
none()
}
}
from DataFlow::PathNode source, DataFlow::PathNode sink, string message
module Flow =
DataFlow::MergePathGraph<TempDirSystemGetPropertyToCreate::PathNode, MethodAccessInsecureFileCreation, TempDirSystemGetPropertyToCreate::PathGraph, InsecureMethodPathGraph>;
import Flow::PathGraph
from Flow::PathNode source, Flow::PathNode sink, string message
where
(
any(TempDirSystemGetPropertyToCreateConfig conf).hasFlowPath(source, sink) and
TempDirSystemGetPropertyToCreate::hasFlowPath(source.asPathNode1(), sink.asPathNode1()) and
message =
"Local information disclosure vulnerability from $@ due to use of file or directory readable by other local users."
or
any(InsecureMethodPseudoConfiguration conf).hasFlowPath(source, sink) and
source = sink and
// Note this message has no "$@" placeholder, so the "system temp directory" template parameter below is not used.
message =
"Local information disclosure vulnerability due to use of " +
source.getNode().asExpr().(MethodAccessInsecureFileCreation).getFileSystemEntityType() +
" readable by other local users."
source.asPathNode2().getFileSystemEntityType() + " readable by other local users."
) and
not isPermissionsProtectedTempDirUse(sink.getNode())
select source.getNode(), source, sink, message, source.getNode(), "system temp directory"

View File

@@ -15,12 +15,14 @@
import java
import semmle.code.java.security.regexp.PolynomialReDoSQuery
import DataFlow::PathGraph
import PolynomialRedosFlow::PathGraph
from
DataFlow::PathNode source, DataFlow::PathNode sink,
PolynomialRedosFlow::PathNode source, PolynomialRedosFlow::PathNode sink,
SuperlinearBackTracking::PolynomialBackTrackingTerm regexp
where hasPolynomialReDoSResult(source, sink, regexp)
where
PolynomialRedosFlow::hasFlowPath(source, sink) and
regexp.getRootTerm() = sink.getNode().(PolynomialRedosSink).getRegExp()
select sink, source, sink,
"This $@ that depends on a $@ may run slow on strings " + regexp.getPrefixMessage() +
"with many repetitions of '" + regexp.getPumpString() + "'.", regexp, "regular expression",

View File

@@ -0,0 +1,5 @@
---
category: newQuery
---
* Added a new query, `java/android/arbitrary-apk-installation`, to detect installation of APKs from untrusted sources.

View File

@@ -182,4 +182,18 @@ public class A {
public Object field1;
public Object field2;
void foo4() {
Producer1Consumer3<Integer> pc = new Producer1Consumer3<Integer>() {
int cfield = 0;
@Override public Integer[] make() {
return new Integer[] { cfield };
}
@Override public void eat(Integer[] xs) {
cfield = xs[0];
}
};
applyConsumer3(new Integer[] { (Integer)source(21) }, pc);
sink(applyProducer1(pc)[0]); // $ flow=21
}
}

View File

@@ -56,4 +56,18 @@ class A {
return;
}
}
enum E {
/**
* Javadoc for enum constant
*/
A,
B,
C;
}
/**
* Javadoc for fields
*/
int i, j, k;
}

View File

@@ -125,3 +125,30 @@ A.java:
# 55| 1: [LocalVariableDeclExpr] rte
# 55| 1: [BlockStmt] { ... }
# 56| 0: [ReturnStmt] return ...
# 60| 10: [Class] E
# 64| 3: [FieldDeclaration] E A;
#-----| -3: (Javadoc)
# 61| 1: [Javadoc] /** Javadoc for enum constant */
# 62| 0: [JavadocText] Javadoc for enum constant
# 64| -1: [TypeAccess] E
# 64| 0: [ClassInstanceExpr] new E(...)
# 64| -3: [TypeAccess] E
# 65| 4: [FieldDeclaration] E B;
#-----| -3: (Javadoc)
# 61| 1: [Javadoc] /** Javadoc for enum constant */
# 62| 0: [JavadocText] Javadoc for enum constant
# 65| -1: [TypeAccess] E
# 65| 0: [ClassInstanceExpr] new E(...)
# 65| -3: [TypeAccess] E
# 66| 5: [FieldDeclaration] E C;
#-----| -3: (Javadoc)
# 61| 1: [Javadoc] /** Javadoc for enum constant */
# 62| 0: [JavadocText] Javadoc for enum constant
# 66| -1: [TypeAccess] E
# 66| 0: [ClassInstanceExpr] new E(...)
# 66| -3: [TypeAccess] E
# 72| 11: [FieldDeclaration] int i, ...;
#-----| -3: (Javadoc)
# 69| 1: [Javadoc] /** Javadoc for fields */
# 70| 0: [JavadocText] Javadoc for fields
# 72| -1: [TypeAccess] int

View File

@@ -0,0 +1,58 @@
import android.app.Activity;
import android.content.Intent;
import android.net.Uri;
import android.os.Environment;
import java.io.File;
public class ApkInstallation extends Activity {
static final String APK_MIMETYPE = "application/vnd.android.package-archive";
public void installAPK(String path) {
// BAD: the path is not checked
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.setDataAndType(Uri.fromFile(new File(path)), "application/vnd.android.package-archive"); // $ hasApkInstallation
startActivity(intent);
}
public void installAPK3(String path) {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.setType(APK_MIMETYPE);
// BAD: the path is not checked
intent.setData(Uri.fromFile(new File(path))); // $ hasApkInstallation
startActivity(intent);
}
public void installAPKFromExternalStorage(String path) {
// BAD: file is from external storage
File file = new File(Environment.getExternalStorageDirectory(), path);
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.setDataAndType(Uri.fromFile(file), APK_MIMETYPE); // $ hasApkInstallation
startActivity(intent);
}
public void installAPKFromExternalStorageWithActionInstallPackage(String path) {
// BAD: file is from external storage
File file = new File(Environment.getExternalStorageDirectory(), path);
Intent intent = new Intent(Intent.ACTION_INSTALL_PACKAGE);
intent.setData(Uri.fromFile(file)); // $ hasApkInstallation
startActivity(intent);
}
public void installAPKInstallPackageLiteral(String path) {
File file = new File(Environment.getExternalStorageDirectory(), path);
Intent intent = new Intent("android.intent.action.INSTALL_PACKAGE");
intent.setData(Uri.fromFile(file)); // $ hasApkInstallation
startActivity(intent);
}
public void otherIntent(File file) {
Intent intent = new Intent(this, OtherActivity.class);
intent.setAction(Intent.ACTION_VIEW);
// BAD: the file is from unknown source
intent.setData(Uri.fromFile(file)); // $ hasApkInstallation
}
}
class OtherActivity extends Activity {
}

View File

@@ -0,0 +1,19 @@
import java
import semmle.code.java.dataflow.DataFlow
import semmle.code.java.security.ArbitraryApkInstallationQuery
import TestUtilities.InlineExpectationsTest
class HasApkInstallationTest extends InlineExpectationsTest {
HasApkInstallationTest() { this = "HasApkInstallationTest" }
override string getARelevantTag() { result = "hasApkInstallation" }
override predicate hasActualResult(Location location, string element, string tag, string value) {
tag = "hasApkInstallation" and
exists(DataFlow::Node sink | ApkInstallationFlow::hasFlowTo(sink) |
sink.getLocation() = location and
element = sink.toString() and
value = ""
)
}
}

View File

@@ -1 +1 @@
//semmle-extractor-options: --javac-args -cp ${testdir}/../../../stubs/validation-api-2.0.1.Final:${testdir}/../../../stubs/springframework-5.3.8:${testdir}/../../../stubs/apache-commons-jexl-2.1.1:${testdir}/../../../stubs/apache-commons-jexl-3.1:${testdir}/../../../stubs/apache-commons-logging-1.2:${testdir}/../../../stubs/mvel2-2.4.7:${testdir}/../../../stubs/groovy-all-3.0.7:${testdir}/../../../stubs/servlet-api-2.4:${testdir}/../../../stubs/scriptengine:${testdir}/../../../stubs/jsr223-api:${testdir}/../../../stubs/apache-freemarker-2.3.31:${testdir}/../../../stubs/jinjava-2.6.0:${testdir}/../../../stubs/pebble-3.1.5:${testdir}/../../../stubs/thymeleaf-3.0.14:${testdir}/../../../stubs/apache-velocity-2.3
//semmle-extractor-options: --javac-args -cp ${testdir}/../../../stubs/validation-api-2.0.1.Final:${testdir}/../../../stubs/springframework-5.3.8:${testdir}/../../../stubs/apache-commons-jexl-2.1.1:${testdir}/../../../stubs/apache-commons-jexl-3.1:${testdir}/../../../stubs/apache-commons-logging-1.2:${testdir}/../../../stubs/mvel2-2.4.7:${testdir}/../../../stubs/groovy-all-3.0.7:${testdir}/../../../stubs/servlet-api-2.4:${testdir}/../../../stubs/scriptengine:${testdir}/../../../stubs/jsr223-api:${testdir}/../../../stubs/apache-freemarker-2.3.31:${testdir}/../../../stubs/jinjava-2.6.0:${testdir}/../../../stubs/pebble-3.1.5:${testdir}/../../../stubs/thymeleaf-3.0.14:${testdir}/../../../stubs/apache-velocity-2.3:${testdir}/../../..//stubs/google-android-9.0.0

View File

@@ -8,10 +8,10 @@ class HasPolyRedos extends InlineExpectationsTest {
override predicate hasActualResult(Location location, string element, string tag, string value) {
tag = "hasPolyRedos" and
exists(DataFlow::PathNode sink |
hasPolynomialReDoSResult(_, sink, _) and
location = sink.getNode().getLocation() and
element = sink.getNode().toString() and
exists(DataFlow::Node sink |
PolynomialRedosFlow::hasFlowTo(sink) and
location = sink.getLocation() and
element = sink.toString() and
value = ""
)
}

View File

@@ -224,6 +224,7 @@ const astProperties: string[] = [
"argument",
"argumentExpression",
"arguments",
"assertClause",
"assertsModifier",
"asteriskToken",
"attributes",

View File

@@ -314,7 +314,8 @@ public class ESNextParser extends JSXParser {
this.parseExportSpecifiersMaybe(specifiers, exports);
}
Literal source = (Literal) this.parseExportFrom(specifiers, null, true);
return this.finishNode(new ExportNamedDeclaration(exportStart, null, specifiers, source));
Expression assertion = this.parseImportOrExportAssertionAndSemicolon();
return this.finishNode(new ExportNamedDeclaration(exportStart, null, specifiers, source, assertion));
}
return super.parseExportRest(exportStart, exports);
@@ -330,7 +331,8 @@ public class ESNextParser extends JSXParser {
List<ExportSpecifier> specifiers = CollectionUtil.makeList(nsSpec);
this.parseExportSpecifiersMaybe(specifiers, exports);
Literal source = (Literal) this.parseExportFrom(specifiers, null, true);
return this.finishNode(new ExportNamedDeclaration(exportStart, null, specifiers, source));
Expression assertion = this.parseImportOrExportAssertionAndSemicolon();
return this.finishNode(new ExportNamedDeclaration(exportStart, null, specifiers, source, assertion));
}
return super.parseExportAll(exportStart, starLoc, exports);
@@ -435,8 +437,15 @@ public class ESNextParser extends JSXParser {
*/
private DynamicImport parseDynamicImport(Position startLoc) {
Expression source = parseMaybeAssign(false, null, null);
Expression attributes = null;
if (this.eat(TokenType.comma)) {
if (this.type != TokenType.parenR) { // Skip if the comma was a trailing comma
attributes = this.parseMaybeAssign(false, null, null);
this.eat(TokenType.comma); // Allow trailing comma
}
}
this.expect(TokenType.parenR);
DynamicImport di = this.finishNode(new DynamicImport(new SourceLocation(startLoc), source));
DynamicImport di = this.finishNode(new DynamicImport(new SourceLocation(startLoc), source, attributes));
return di;
}

View File

@@ -2783,7 +2783,7 @@ public class Parser {
boolean isBreak = keyword.equals("break");
this.next();
Identifier label = null;
if (this.eat(TokenType.semi) || this.insertSemicolon()) {
if (this.eagerlyTrySemicolon()) {
label = null;
} else if (this.type != TokenType.name) {
this.unexpected();
@@ -2893,6 +2893,15 @@ public class Parser {
new IfStatement(new SourceLocation(startLoc), test, consequent, alternate));
}
/**
* Consumes or inserts a semicolon if possible, and returns true if a semicolon was consumed or inserted.
*
* Returns false if there was no semicolon and insertion was not possible.
*/
protected boolean eagerlyTrySemicolon() {
return this.eat(TokenType.semi) || this.insertSemicolon();
}
protected ReturnStatement parseReturnStatement(Position startLoc) {
if (!this.inFunction && !this.options.allowReturnOutsideFunction())
this.raise(this.start, "'return' outside of function");
@@ -2902,7 +2911,7 @@ public class Parser {
// optional arguments, we eagerly look for a semicolon or the
// possibility to insert one.
Expression argument;
if (this.eat(TokenType.semi) || this.insertSemicolon()) {
if (this.eagerlyTrySemicolon()) {
argument = null;
} else {
argument = this.parseExpression(false, null);
@@ -3404,6 +3413,7 @@ public class Parser {
Statement declaration;
List<ExportSpecifier> specifiers;
Expression source = null;
Expression assertion = null;
if (this.shouldParseExportStatement()) {
declaration = this.parseStatement(true, false);
if (declaration == null) return null;
@@ -3419,11 +3429,13 @@ public class Parser {
declaration = null;
specifiers = this.parseExportSpecifiers(exports);
source = parseExportFrom(specifiers, source, false);
assertion = parseImportOrExportAssertionAndSemicolon();
}
return this.finishNode(
new ExportNamedDeclaration(loc, declaration, specifiers, (Literal) source));
new ExportNamedDeclaration(loc, declaration, specifiers, (Literal) source, assertion));
}
/** Parses the 'from' clause of an export, not including the assertion or semicolon. */
protected Expression parseExportFrom(
List<ExportSpecifier> specifiers, Expression source, boolean expectFrom) {
if (this.eatContextual("from")) {
@@ -3442,14 +3454,14 @@ public class Parser {
source = null;
}
this.semicolon();
return source;
}
protected ExportDeclaration parseExportAll(
SourceLocation loc, Position starLoc, Set<String> exports) {
Expression source = parseExportFrom(null, null, true);
return this.finishNode(new ExportAllDeclaration(loc, (Literal) source));
Expression assertion = parseImportOrExportAssertionAndSemicolon();
return this.finishNode(new ExportAllDeclaration(loc, (Literal) source, assertion));
}
private void checkExport(Set<String> exports, String name, Position pos) {
@@ -3514,6 +3526,16 @@ public class Parser {
return parseImportRest(loc);
}
protected Expression parseImportOrExportAssertionAndSemicolon() {
Expression result = null;
if (!this.eagerlyTrySemicolon()) {
this.expectContextual("assert");
result = this.parseObj(false, null);
this.semicolon();
}
return result;
}
protected ImportDeclaration parseImportRest(SourceLocation loc) {
List<ImportSpecifier> specifiers;
Literal source;
@@ -3527,9 +3549,9 @@ public class Parser {
if (this.type != TokenType.string) this.unexpected();
source = (Literal) this.parseExprAtom(null);
}
this.semicolon();
Expression assertion = this.parseImportOrExportAssertionAndSemicolon();
if (specifiers == null) return null;
return this.finishNode(new ImportDeclaration(loc, specifiers, source));
return this.finishNode(new ImportDeclaration(loc, specifiers, source, assertion));
}
// Parses a comma-separated list of module imports.

View File

@@ -943,10 +943,12 @@ public class FlowParser extends ESNextParser {
// `export type { foo, bar };`
List<ExportSpecifier> specifiers = this.parseExportSpecifiers(exports);
this.parseExportFrom(specifiers, null, false);
this.parseImportOrExportAssertionAndSemicolon();
return null;
} else if (this.eat(TokenType.star)) {
if (this.eatContextual("as")) this.parseIdent(true);
this.parseExportFrom(null, null, true);
this.parseImportOrExportAssertionAndSemicolon();
return null;
} else {
// `export type Foo = Bar;`

View File

@@ -2,16 +2,23 @@ package com.semmle.js.ast;
public class DynamicImport extends Expression {
private final Expression source;
private final Expression attributes;
public DynamicImport(SourceLocation loc, Expression source) {
public DynamicImport(SourceLocation loc, Expression source, Expression attributes) {
super("DynamicImport", loc);
this.source = source;
this.attributes = attributes;
}
public Expression getSource() {
return source;
}
/** Returns the second "argument" provided to the import, such as <code>{ assert: { type: "json" }}</code>. */
public Expression getAttributes() {
return attributes;
}
@Override
public <C, R> R accept(Visitor<C, R> v, C c) {
return v.visit(this, c);

View File

@@ -9,16 +9,22 @@ package com.semmle.js.ast;
*/
public class ExportAllDeclaration extends ExportDeclaration {
private final Literal source;
private final Expression assertion;
public ExportAllDeclaration(SourceLocation loc, Literal source) {
public ExportAllDeclaration(SourceLocation loc, Literal source, Expression assertion) {
super("ExportAllDeclaration", loc);
this.source = source;
this.assertion = assertion;
}
public Literal getSource() {
return source;
}
public Expression getAssertion() {
return assertion;
}
@Override
public <C, R> R accept(Visitor<C, R> v, C c) {
return v.visit(this, c);

View File

@@ -15,20 +15,22 @@ public class ExportNamedDeclaration extends ExportDeclaration {
private final Statement declaration;
private final List<ExportSpecifier> specifiers;
private final Literal source;
private final Expression assertion;
private final boolean hasTypeKeyword;
public ExportNamedDeclaration(
SourceLocation loc, Statement declaration, List<ExportSpecifier> specifiers, Literal source) {
this(loc, declaration, specifiers, source, false);
SourceLocation loc, Statement declaration, List<ExportSpecifier> specifiers, Literal source, Expression assertion) {
this(loc, declaration, specifiers, source, assertion, false);
}
public ExportNamedDeclaration(
SourceLocation loc, Statement declaration, List<ExportSpecifier> specifiers, Literal source,
boolean hasTypeKeyword) {
Expression assertion, boolean hasTypeKeyword) {
super("ExportNamedDeclaration", loc);
this.declaration = declaration;
this.specifiers = specifiers;
this.source = source;
this.assertion = assertion;
this.hasTypeKeyword = hasTypeKeyword;
}
@@ -57,6 +59,11 @@ public class ExportNamedDeclaration extends ExportDeclaration {
return v.visit(this, c);
}
/** Returns the expression after the <code>assert</code> keyword, if any, such as <code>{ type: "json" }</code>. */
public Expression getAssertion() {
return assertion;
}
/** Returns true if this is an <code>export type</code> declaration. */
public boolean hasTypeKeyword() {
return hasTypeKeyword;

View File

@@ -23,18 +23,21 @@ public class ImportDeclaration extends Statement implements INodeWithSymbol {
/** The module from which declarations are imported. */
private final Literal source;
private final Expression assertion;
private int symbol = -1;
private boolean hasTypeKeyword;
public ImportDeclaration(SourceLocation loc, List<ImportSpecifier> specifiers, Literal source) {
this(loc, specifiers, source, false);
public ImportDeclaration(SourceLocation loc, List<ImportSpecifier> specifiers, Literal source, Expression assertion) {
this(loc, specifiers, source, assertion, false);
}
public ImportDeclaration(SourceLocation loc, List<ImportSpecifier> specifiers, Literal source, boolean hasTypeKeyword) {
public ImportDeclaration(SourceLocation loc, List<ImportSpecifier> specifiers, Literal source, Expression assertion, boolean hasTypeKeyword) {
super("ImportDeclaration", loc);
this.specifiers = specifiers;
this.source = source;
this.assertion = assertion;
this.hasTypeKeyword = hasTypeKeyword;
}
@@ -46,6 +49,11 @@ public class ImportDeclaration extends Statement implements INodeWithSymbol {
return specifiers;
}
/** Returns the expression after the <code>assert</code> keyword, if any, such as <code>{ type: "json" }</code>. */
public Expression getAssertion() {
return assertion;
}
@Override
public <C, R> R accept(Visitor<C, R> v, C c) {
return v.visit(this, c);

View File

@@ -523,7 +523,7 @@ public class NodeCopier implements Visitor<Void, INode> {
@Override
public ExportAllDeclaration visit(ExportAllDeclaration nd, Void c) {
return new ExportAllDeclaration(visit(nd.getLoc()), copy(nd.getSource()));
return new ExportAllDeclaration(visit(nd.getLoc()), copy(nd.getSource()), copy(nd.getAssertion()));
}
@Override
@@ -537,7 +537,8 @@ public class NodeCopier implements Visitor<Void, INode> {
visit(nd.getLoc()),
copy(nd.getDeclaration()),
copy(nd.getSpecifiers()),
copy(nd.getSource()));
copy(nd.getSource()),
copy(nd.getAssertion()));
}
@Override
@@ -558,7 +559,7 @@ public class NodeCopier implements Visitor<Void, INode> {
@Override
public ImportDeclaration visit(ImportDeclaration nd, Void c) {
return new ImportDeclaration(
visit(nd.getLoc()), copy(nd.getSpecifiers()), copy(nd.getSource()));
visit(nd.getLoc()), copy(nd.getSpecifiers()), copy(nd.getSource()), copy(nd.getAssertion()), nd.hasTypeKeyword());
}
@Override
@@ -678,7 +679,7 @@ public class NodeCopier implements Visitor<Void, INode> {
@Override
public INode visit(DynamicImport nd, Void c) {
return new DynamicImport(visit(nd.getLoc()), copy(nd.getSource()));
return new DynamicImport(visit(nd.getLoc()), copy(nd.getSource()), copy(nd.getAttributes()));
}
@Override

View File

@@ -1759,6 +1759,7 @@ public class ASTExtractor {
public Label visit(ExportAllDeclaration nd, Context c) {
Label lbl = super.visit(nd, c);
visit(nd.getSource(), lbl, 0);
visit(nd.getAssertion(), lbl, -10);
return lbl;
}
@@ -1774,6 +1775,7 @@ public class ASTExtractor {
Label lbl = super.visit(nd, c);
visit(nd.getDeclaration(), lbl, -1);
visit(nd.getSource(), lbl, -2);
visit(nd.getAssertion(), lbl, -10);
IdContext childContext =
nd.hasSource()
? IdContext.LABEL
@@ -1797,6 +1799,7 @@ public class ASTExtractor {
public Label visit(ImportDeclaration nd, Context c) {
Label lbl = super.visit(nd, c);
visit(nd.getSource(), lbl, -1);
visit(nd.getAssertion(), lbl, -10);
IdContext childContext =
nd.hasTypeKeyword()
? IdContext.TYPE_ONLY_IMPORT
@@ -1972,6 +1975,7 @@ public class ASTExtractor {
public Label visit(DynamicImport nd, Context c) {
Label key = super.visit(nd, c);
visit(nd.getSource(), key, 0);
visit(nd.getAttributes(), key, 1);
return key;
}

View File

@@ -177,6 +177,7 @@ public class TypeScriptASTConverter {
private static final Pattern EXPORT_DECL_START =
Pattern.compile("^export" + "(" + WHITESPACE_CHAR + "+default)?" + WHITESPACE_CHAR + "+");
private static final Pattern TYPEOF_START = Pattern.compile("^typeof" + WHITESPACE_CHAR + "+");
private static final Pattern ASSERT_START = Pattern.compile("^assert" + WHITESPACE_CHAR + "+");
private static final Pattern WHITESPACE_END_PAREN =
Pattern.compile("^" + WHITESPACE_CHAR + "*\\)");
@@ -342,6 +343,10 @@ public class TypeScriptASTConverter {
return convertArrowFunction(node, loc);
case "AsExpression":
return convertTypeAssertionExpression(node, loc);
case "AssertClause":
return convertAssertClause(node, loc);
case "AssertEntry":
return convertAssertEntry(node, loc);
case "SatisfiesExpression":
return convertSatisfiesExpression(node, loc);
case "AwaitExpression":
@@ -887,8 +892,8 @@ public class TypeScriptASTConverter {
private Node convertCallExpression(JsonObject node, SourceLocation loc) throws ParseError {
List<Expression> arguments = convertChildren(node, "arguments");
if (arguments.size() == 1 && hasKind(node.get("expression"), "ImportKeyword")) {
return new DynamicImport(loc, arguments.get(0));
if (arguments.size() >= 1 && hasKind(node.get("expression"), "ImportKeyword")) {
return new DynamicImport(loc, arguments.get(0), arguments.size() > 1 ? arguments.get(1) : null);
}
Expression callee = convertChild(node, "expression");
List<ITypeExpression> typeArguments = convertChildrenAsTypes(node, "typeArguments");
@@ -1193,15 +1198,16 @@ public class TypeScriptASTConverter {
private Node convertExportDeclaration(JsonObject node, SourceLocation loc) throws ParseError {
Literal source = tryConvertChild(node, "moduleSpecifier", Literal.class);
Expression assertion = convertChild(node, "assertClause");
if (hasChild(node, "exportClause")) {
boolean hasTypeKeyword = node.get("isTypeOnly").getAsBoolean();
List<ExportSpecifier> specifiers =
hasKind(node.get("exportClause"), "NamespaceExport")
? Collections.singletonList(convertChild(node, "exportClause"))
: convertChildren(node.get("exportClause").getAsJsonObject(), "elements");
return new ExportNamedDeclaration(loc, null, specifiers, source, hasTypeKeyword);
return new ExportNamedDeclaration(loc, null, specifiers, source, assertion, hasTypeKeyword);
} else {
return new ExportAllDeclaration(loc, source);
return new ExportAllDeclaration(loc, source, assertion);
}
}
@@ -1383,6 +1389,7 @@ public class TypeScriptASTConverter {
private Node convertImportDeclaration(JsonObject node, SourceLocation loc) throws ParseError {
Literal src = tryConvertChild(node, "moduleSpecifier", Literal.class);
Expression assertion = convertChild(node, "assertClause");
List<ImportSpecifier> specifiers = new ArrayList<>();
boolean hasTypeKeyword = false;
if (hasChild(node, "importClause")) {
@@ -1400,7 +1407,7 @@ public class TypeScriptASTConverter {
}
hasTypeKeyword = importClause.get("isTypeOnly").getAsBoolean();
}
ImportDeclaration importDecl = new ImportDeclaration(loc, specifiers, src, hasTypeKeyword);
ImportDeclaration importDecl = new ImportDeclaration(loc, specifiers, src, assertion, hasTypeKeyword);
attachSymbolInformation(importDecl, node);
return importDecl;
}
@@ -1746,7 +1753,7 @@ public class TypeScriptASTConverter {
if (hasFlag(node, "NestedNamespace")) {
// In a nested namespace declaration `namespace A.B`, the nested namespace `B`
// is implicitly exported.
return new ExportNamedDeclaration(loc, decl, new ArrayList<>(), null);
return new ExportNamedDeclaration(loc, decl, new ArrayList<>(), null, null);
} else {
return fixExports(loc, decl);
}
@@ -2276,6 +2283,29 @@ public class TypeScriptASTConverter {
return new TypeAssertion(loc, convertChild(node, "expression"), type, false);
}
private Node convertAssertClause(JsonObject node, SourceLocation loc) throws ParseError {
List<Property> properties = new ArrayList<>();
for (INode child : convertChildren(node, "elements")) {
properties.add((Property)child);
}
// Adjust location to skip over the `assert` keyword.
Matcher m = ASSERT_START.matcher(loc.getSource());
if (m.find()) {
advance(loc, m.group(0));
}
return new ObjectExpression(loc, properties);
}
private Node convertAssertEntry(JsonObject node, SourceLocation loc) throws ParseError {
return new Property(
loc,
convertChild(node, "key"),
convertChild(node, "value"),
"init",
false,
false);
}
private Node convertSatisfiesExpression(JsonObject node, SourceLocation loc) throws ParseError {
ITypeExpression type = convertChildAsType(node, "type");
return new SatisfiesExpr(loc, convertChild(node, "expression"), type);
@@ -2455,7 +2485,7 @@ public class TypeScriptASTConverter {
advance(loc, skipped);
// capture group 1 is `default`, if present
if (m.group(1) == null)
return new ExportNamedDeclaration(outerLoc, (Statement) decl, new ArrayList<>(), null);
return new ExportNamedDeclaration(outerLoc, (Statement) decl, new ArrayList<>(), null, null);
return new ExportDefaultDeclaration(outerLoc, decl);
}
return decl;

View File

@@ -1,3 +1,6 @@
import("m");
b ? import("n") : {};
import("o").then((o) => {});
import("m",);
import("m",{},);

View File

@@ -0,0 +1,13 @@
import "module" assert { type: "json" };
import * as v1 from "module" assert { type: "json" };
import { v2 } from "module" assert { type: "json" };
import v3 from "module" assert { type: "json" };
export { v4 } from "module" assert { type: "json" };
export * from "module" assert { type: "json" };
export * as v5 from "module" assert { type: "json" };
const v6 = import("module", { assert: { type: "json" } });
import "module" // missing semicolon
assert({type: "json"}); // function call, not import assertion

View File

@@ -27,355 +27,507 @@ lines(#20006,#20001,"import(""o"").then((o) => {});","
#20007=@"loc,{#10000},3,1,3,28"
locations_default(#20007,#10000,3,1,3,28)
hasLocation(#20006,#20007)
numlines(#20001,3,3,0)
#20008=*
tokeninfo(#20008,7,#20001,0,"import")
#20009=@"loc,{#10000},1,1,1,6"
locations_default(#20009,#10000,1,1,1,6)
lines(#20008,#20001,"","
")
#20009=@"loc,{#10000},4,1,4,0"
locations_default(#20009,#10000,4,1,4,0)
hasLocation(#20008,#20009)
#20010=*
tokeninfo(#20010,8,#20001,1,"(")
#20011=@"loc,{#10000},1,7,1,7"
locations_default(#20011,#10000,1,7,1,7)
lines(#20010,#20001,"import(""m"",);","
")
#20011=@"loc,{#10000},5,1,5,13"
locations_default(#20011,#10000,5,1,5,13)
hasLocation(#20010,#20011)
#20012=*
tokeninfo(#20012,4,#20001,2,"""m""")
#20013=@"loc,{#10000},1,8,1,10"
locations_default(#20013,#10000,1,8,1,10)
lines(#20012,#20001,"import(""m"",{},);","
")
#20013=@"loc,{#10000},6,1,6,16"
locations_default(#20013,#10000,6,1,6,16)
hasLocation(#20012,#20013)
numlines(#20001,6,5,0)
#20014=*
tokeninfo(#20014,8,#20001,3,")")
#20015=@"loc,{#10000},1,11,1,11"
locations_default(#20015,#10000,1,11,1,11)
tokeninfo(#20014,7,#20001,0,"import")
#20015=@"loc,{#10000},1,1,1,6"
locations_default(#20015,#10000,1,1,1,6)
hasLocation(#20014,#20015)
#20016=*
tokeninfo(#20016,8,#20001,4,";")
#20017=@"loc,{#10000},1,12,1,12"
locations_default(#20017,#10000,1,12,1,12)
tokeninfo(#20016,8,#20001,1,"(")
#20017=@"loc,{#10000},1,7,1,7"
locations_default(#20017,#10000,1,7,1,7)
hasLocation(#20016,#20017)
#20018=*
tokeninfo(#20018,6,#20001,5,"b")
#20019=@"loc,{#10000},2,1,2,1"
locations_default(#20019,#10000,2,1,2,1)
tokeninfo(#20018,4,#20001,2,"""m""")
#20019=@"loc,{#10000},1,8,1,10"
locations_default(#20019,#10000,1,8,1,10)
hasLocation(#20018,#20019)
#20020=*
tokeninfo(#20020,8,#20001,6,"?")
#20021=@"loc,{#10000},2,3,2,3"
locations_default(#20021,#10000,2,3,2,3)
tokeninfo(#20020,8,#20001,3,")")
#20021=@"loc,{#10000},1,11,1,11"
locations_default(#20021,#10000,1,11,1,11)
hasLocation(#20020,#20021)
#20022=*
tokeninfo(#20022,7,#20001,7,"import")
#20023=@"loc,{#10000},2,5,2,10"
locations_default(#20023,#10000,2,5,2,10)
tokeninfo(#20022,8,#20001,4,";")
#20023=@"loc,{#10000},1,12,1,12"
locations_default(#20023,#10000,1,12,1,12)
hasLocation(#20022,#20023)
#20024=*
tokeninfo(#20024,8,#20001,8,"(")
#20025=@"loc,{#10000},2,11,2,11"
locations_default(#20025,#10000,2,11,2,11)
tokeninfo(#20024,6,#20001,5,"b")
#20025=@"loc,{#10000},2,1,2,1"
locations_default(#20025,#10000,2,1,2,1)
hasLocation(#20024,#20025)
#20026=*
tokeninfo(#20026,4,#20001,9,"""n""")
#20027=@"loc,{#10000},2,12,2,14"
locations_default(#20027,#10000,2,12,2,14)
tokeninfo(#20026,8,#20001,6,"?")
#20027=@"loc,{#10000},2,3,2,3"
locations_default(#20027,#10000,2,3,2,3)
hasLocation(#20026,#20027)
#20028=*
tokeninfo(#20028,8,#20001,10,")")
#20029=@"loc,{#10000},2,15,2,15"
locations_default(#20029,#10000,2,15,2,15)
tokeninfo(#20028,7,#20001,7,"import")
#20029=@"loc,{#10000},2,5,2,10"
locations_default(#20029,#10000,2,5,2,10)
hasLocation(#20028,#20029)
#20030=*
tokeninfo(#20030,8,#20001,11,":")
#20031=@"loc,{#10000},2,17,2,17"
locations_default(#20031,#10000,2,17,2,17)
tokeninfo(#20030,8,#20001,8,"(")
#20031=@"loc,{#10000},2,11,2,11"
locations_default(#20031,#10000,2,11,2,11)
hasLocation(#20030,#20031)
#20032=*
tokeninfo(#20032,8,#20001,12,"{")
#20033=@"loc,{#10000},2,19,2,19"
locations_default(#20033,#10000,2,19,2,19)
tokeninfo(#20032,4,#20001,9,"""n""")
#20033=@"loc,{#10000},2,12,2,14"
locations_default(#20033,#10000,2,12,2,14)
hasLocation(#20032,#20033)
#20034=*
tokeninfo(#20034,8,#20001,13,"}")
#20035=@"loc,{#10000},2,20,2,20"
locations_default(#20035,#10000,2,20,2,20)
tokeninfo(#20034,8,#20001,10,")")
#20035=@"loc,{#10000},2,15,2,15"
locations_default(#20035,#10000,2,15,2,15)
hasLocation(#20034,#20035)
#20036=*
tokeninfo(#20036,8,#20001,14,";")
#20037=@"loc,{#10000},2,21,2,21"
locations_default(#20037,#10000,2,21,2,21)
tokeninfo(#20036,8,#20001,11,":")
#20037=@"loc,{#10000},2,17,2,17"
locations_default(#20037,#10000,2,17,2,17)
hasLocation(#20036,#20037)
#20038=*
tokeninfo(#20038,7,#20001,15,"import")
#20039=@"loc,{#10000},3,1,3,6"
locations_default(#20039,#10000,3,1,3,6)
tokeninfo(#20038,8,#20001,12,"{")
#20039=@"loc,{#10000},2,19,2,19"
locations_default(#20039,#10000,2,19,2,19)
hasLocation(#20038,#20039)
#20040=*
tokeninfo(#20040,8,#20001,16,"(")
#20041=@"loc,{#10000},3,7,3,7"
locations_default(#20041,#10000,3,7,3,7)
tokeninfo(#20040,8,#20001,13,"}")
#20041=@"loc,{#10000},2,20,2,20"
locations_default(#20041,#10000,2,20,2,20)
hasLocation(#20040,#20041)
#20042=*
tokeninfo(#20042,4,#20001,17,"""o""")
#20043=@"loc,{#10000},3,8,3,10"
locations_default(#20043,#10000,3,8,3,10)
tokeninfo(#20042,8,#20001,14,";")
#20043=@"loc,{#10000},2,21,2,21"
locations_default(#20043,#10000,2,21,2,21)
hasLocation(#20042,#20043)
#20044=*
tokeninfo(#20044,8,#20001,18,")")
#20045=@"loc,{#10000},3,11,3,11"
locations_default(#20045,#10000,3,11,3,11)
tokeninfo(#20044,7,#20001,15,"import")
#20045=@"loc,{#10000},3,1,3,6"
locations_default(#20045,#10000,3,1,3,6)
hasLocation(#20044,#20045)
#20046=*
tokeninfo(#20046,8,#20001,19,".")
#20047=@"loc,{#10000},3,12,3,12"
locations_default(#20047,#10000,3,12,3,12)
tokeninfo(#20046,8,#20001,16,"(")
#20047=@"loc,{#10000},3,7,3,7"
locations_default(#20047,#10000,3,7,3,7)
hasLocation(#20046,#20047)
#20048=*
tokeninfo(#20048,6,#20001,20,"then")
#20049=@"loc,{#10000},3,13,3,16"
locations_default(#20049,#10000,3,13,3,16)
tokeninfo(#20048,4,#20001,17,"""o""")
#20049=@"loc,{#10000},3,8,3,10"
locations_default(#20049,#10000,3,8,3,10)
hasLocation(#20048,#20049)
#20050=*
tokeninfo(#20050,8,#20001,21,"(")
#20051=@"loc,{#10000},3,17,3,17"
locations_default(#20051,#10000,3,17,3,17)
tokeninfo(#20050,8,#20001,18,")")
#20051=@"loc,{#10000},3,11,3,11"
locations_default(#20051,#10000,3,11,3,11)
hasLocation(#20050,#20051)
#20052=*
tokeninfo(#20052,8,#20001,22,"(")
#20053=@"loc,{#10000},3,18,3,18"
locations_default(#20053,#10000,3,18,3,18)
tokeninfo(#20052,8,#20001,19,".")
#20053=@"loc,{#10000},3,12,3,12"
locations_default(#20053,#10000,3,12,3,12)
hasLocation(#20052,#20053)
#20054=*
tokeninfo(#20054,6,#20001,23,"o")
#20055=@"loc,{#10000},3,19,3,19"
locations_default(#20055,#10000,3,19,3,19)
tokeninfo(#20054,6,#20001,20,"then")
#20055=@"loc,{#10000},3,13,3,16"
locations_default(#20055,#10000,3,13,3,16)
hasLocation(#20054,#20055)
#20056=*
tokeninfo(#20056,8,#20001,24,")")
#20057=@"loc,{#10000},3,20,3,20"
locations_default(#20057,#10000,3,20,3,20)
tokeninfo(#20056,8,#20001,21,"(")
#20057=@"loc,{#10000},3,17,3,17"
locations_default(#20057,#10000,3,17,3,17)
hasLocation(#20056,#20057)
#20058=*
tokeninfo(#20058,8,#20001,25,"=>")
#20059=@"loc,{#10000},3,22,3,23"
locations_default(#20059,#10000,3,22,3,23)
tokeninfo(#20058,8,#20001,22,"(")
#20059=@"loc,{#10000},3,18,3,18"
locations_default(#20059,#10000,3,18,3,18)
hasLocation(#20058,#20059)
#20060=*
tokeninfo(#20060,8,#20001,26,"{")
#20061=@"loc,{#10000},3,25,3,25"
locations_default(#20061,#10000,3,25,3,25)
tokeninfo(#20060,6,#20001,23,"o")
#20061=@"loc,{#10000},3,19,3,19"
locations_default(#20061,#10000,3,19,3,19)
hasLocation(#20060,#20061)
#20062=*
tokeninfo(#20062,8,#20001,27,"}")
#20063=@"loc,{#10000},3,26,3,26"
locations_default(#20063,#10000,3,26,3,26)
tokeninfo(#20062,8,#20001,24,")")
#20063=@"loc,{#10000},3,20,3,20"
locations_default(#20063,#10000,3,20,3,20)
hasLocation(#20062,#20063)
#20064=*
tokeninfo(#20064,8,#20001,28,")")
#20065=@"loc,{#10000},3,27,3,27"
locations_default(#20065,#10000,3,27,3,27)
tokeninfo(#20064,8,#20001,25,"=>")
#20065=@"loc,{#10000},3,22,3,23"
locations_default(#20065,#10000,3,22,3,23)
hasLocation(#20064,#20065)
#20066=*
tokeninfo(#20066,8,#20001,29,";")
#20067=@"loc,{#10000},3,28,3,28"
locations_default(#20067,#10000,3,28,3,28)
tokeninfo(#20066,8,#20001,26,"{")
#20067=@"loc,{#10000},3,25,3,25"
locations_default(#20067,#10000,3,25,3,25)
hasLocation(#20066,#20067)
#20068=*
tokeninfo(#20068,0,#20001,30,"")
#20069=@"loc,{#10000},4,1,4,0"
locations_default(#20069,#10000,4,1,4,0)
tokeninfo(#20068,8,#20001,27,"}")
#20069=@"loc,{#10000},3,26,3,26"
locations_default(#20069,#10000,3,26,3,26)
hasLocation(#20068,#20069)
#20070=*
tokeninfo(#20070,8,#20001,28,")")
#20071=@"loc,{#10000},3,27,3,27"
locations_default(#20071,#10000,3,27,3,27)
hasLocation(#20070,#20071)
#20072=*
tokeninfo(#20072,8,#20001,29,";")
#20073=@"loc,{#10000},3,28,3,28"
locations_default(#20073,#10000,3,28,3,28)
hasLocation(#20072,#20073)
#20074=*
tokeninfo(#20074,7,#20001,30,"import")
#20075=@"loc,{#10000},5,1,5,6"
locations_default(#20075,#10000,5,1,5,6)
hasLocation(#20074,#20075)
#20076=*
tokeninfo(#20076,8,#20001,31,"(")
#20077=@"loc,{#10000},5,7,5,7"
locations_default(#20077,#10000,5,7,5,7)
hasLocation(#20076,#20077)
#20078=*
tokeninfo(#20078,4,#20001,32,"""m""")
#20079=@"loc,{#10000},5,8,5,10"
locations_default(#20079,#10000,5,8,5,10)
hasLocation(#20078,#20079)
#20080=*
tokeninfo(#20080,8,#20001,33,",")
#20081=@"loc,{#10000},5,11,5,11"
locations_default(#20081,#10000,5,11,5,11)
hasLocation(#20080,#20081)
#20082=*
tokeninfo(#20082,8,#20001,34,")")
#20083=@"loc,{#10000},5,12,5,12"
locations_default(#20083,#10000,5,12,5,12)
hasLocation(#20082,#20083)
#20084=*
tokeninfo(#20084,8,#20001,35,";")
#20085=@"loc,{#10000},5,13,5,13"
locations_default(#20085,#10000,5,13,5,13)
hasLocation(#20084,#20085)
#20086=*
tokeninfo(#20086,7,#20001,36,"import")
#20087=@"loc,{#10000},6,1,6,6"
locations_default(#20087,#10000,6,1,6,6)
hasLocation(#20086,#20087)
#20088=*
tokeninfo(#20088,8,#20001,37,"(")
#20089=@"loc,{#10000},6,7,6,7"
locations_default(#20089,#10000,6,7,6,7)
hasLocation(#20088,#20089)
#20090=*
tokeninfo(#20090,4,#20001,38,"""m""")
#20091=@"loc,{#10000},6,8,6,10"
locations_default(#20091,#10000,6,8,6,10)
hasLocation(#20090,#20091)
#20092=*
tokeninfo(#20092,8,#20001,39,",")
#20093=@"loc,{#10000},6,11,6,11"
locations_default(#20093,#10000,6,11,6,11)
hasLocation(#20092,#20093)
#20094=*
tokeninfo(#20094,8,#20001,40,"{")
#20095=@"loc,{#10000},6,12,6,12"
locations_default(#20095,#10000,6,12,6,12)
hasLocation(#20094,#20095)
#20096=*
tokeninfo(#20096,8,#20001,41,"}")
#20097=@"loc,{#10000},6,13,6,13"
locations_default(#20097,#10000,6,13,6,13)
hasLocation(#20096,#20097)
#20098=*
tokeninfo(#20098,8,#20001,42,",")
#20099=@"loc,{#10000},6,14,6,14"
locations_default(#20099,#10000,6,14,6,14)
hasLocation(#20098,#20099)
#20100=*
tokeninfo(#20100,8,#20001,43,")")
#20101=@"loc,{#10000},6,15,6,15"
locations_default(#20101,#10000,6,15,6,15)
hasLocation(#20100,#20101)
#20102=*
tokeninfo(#20102,8,#20001,44,";")
#20103=@"loc,{#10000},6,16,6,16"
locations_default(#20103,#10000,6,16,6,16)
hasLocation(#20102,#20103)
#20104=*
tokeninfo(#20104,0,#20001,45,"")
#20105=@"loc,{#10000},7,1,7,0"
locations_default(#20105,#10000,7,1,7,0)
hasLocation(#20104,#20105)
toplevels(#20001,0)
#20070=@"loc,{#10000},1,1,4,0"
locations_default(#20070,#10000,1,1,4,0)
hasLocation(#20001,#20070)
#20071=@"module;{#10000},1,1"
scopes(#20071,3)
scopenodes(#20001,#20071)
scopenesting(#20071,#20000)
#20106=@"loc,{#10000},1,1,7,0"
locations_default(#20106,#10000,1,1,7,0)
hasLocation(#20001,#20106)
#20107=@"module;{#10000},1,1"
scopes(#20107,3)
scopenodes(#20001,#20107)
scopenesting(#20107,#20000)
is_module(#20001)
is_es2015_module(#20001)
#20072=*
stmts(#20072,2,#20001,0,"import(""m"");")
hasLocation(#20072,#20003)
stmt_containers(#20072,#20001)
#20073=*
exprs(#20073,99,#20072,0,"import(""m"")")
#20074=@"loc,{#10000},1,1,1,11"
locations_default(#20074,#10000,1,1,1,11)
hasLocation(#20073,#20074)
enclosing_stmt(#20073,#20072)
expr_containers(#20073,#20001)
#20075=*
exprs(#20075,4,#20073,0,"""m""")
hasLocation(#20075,#20013)
enclosing_stmt(#20075,#20072)
expr_containers(#20075,#20001)
literals("m","""m""",#20075)
#20076=*
regexpterm(#20076,14,#20075,0,"m")
#20077=@"loc,{#10000},1,9,1,9"
locations_default(#20077,#10000,1,9,1,9)
hasLocation(#20076,#20077)
regexp_const_value(#20076,"m")
#20078=*
stmts(#20078,2,#20001,1,"b ? imp ... ) : {};")
hasLocation(#20078,#20005)
stmt_containers(#20078,#20001)
#20079=*
exprs(#20079,11,#20078,0,"b ? import(""n"") : {}")
#20080=@"loc,{#10000},2,1,2,20"
locations_default(#20080,#10000,2,1,2,20)
hasLocation(#20079,#20080)
enclosing_stmt(#20079,#20078)
expr_containers(#20079,#20001)
#20081=*
exprs(#20081,79,#20079,0,"b")
hasLocation(#20081,#20019)
enclosing_stmt(#20081,#20078)
expr_containers(#20081,#20001)
literals("b","b",#20081)
#20082=@"var;{b};{#20000}"
variables(#20082,"b",#20000)
bind(#20081,#20082)
#20083=*
exprs(#20083,99,#20079,1,"import(""n"")")
#20084=@"loc,{#10000},2,5,2,15"
locations_default(#20084,#10000,2,5,2,15)
hasLocation(#20083,#20084)
enclosing_stmt(#20083,#20078)
expr_containers(#20083,#20001)
#20085=*
exprs(#20085,4,#20083,0,"""n""")
hasLocation(#20085,#20027)
enclosing_stmt(#20085,#20078)
expr_containers(#20085,#20001)
literals("n","""n""",#20085)
#20086=*
regexpterm(#20086,14,#20085,0,"n")
#20087=@"loc,{#10000},2,13,2,13"
locations_default(#20087,#10000,2,13,2,13)
hasLocation(#20086,#20087)
regexp_const_value(#20086,"n")
#20088=*
exprs(#20088,8,#20079,2,"{}")
#20089=@"loc,{#10000},2,19,2,20"
locations_default(#20089,#10000,2,19,2,20)
hasLocation(#20088,#20089)
enclosing_stmt(#20088,#20078)
expr_containers(#20088,#20001)
#20090=*
stmts(#20090,2,#20001,2,"import( ... => {});")
hasLocation(#20090,#20007)
stmt_containers(#20090,#20001)
#20091=*
exprs(#20091,13,#20090,0,"import( ... => {})")
#20092=@"loc,{#10000},3,1,3,27"
locations_default(#20092,#10000,3,1,3,27)
hasLocation(#20091,#20092)
enclosing_stmt(#20091,#20090)
expr_containers(#20091,#20001)
#20093=*
exprs(#20093,14,#20091,-1,"import(""o"").then")
#20094=@"loc,{#10000},3,1,3,16"
locations_default(#20094,#10000,3,1,3,16)
hasLocation(#20093,#20094)
enclosing_stmt(#20093,#20090)
expr_containers(#20093,#20001)
#20095=*
exprs(#20095,99,#20093,0,"import(""o"")")
#20096=@"loc,{#10000},3,1,3,11"
locations_default(#20096,#10000,3,1,3,11)
hasLocation(#20095,#20096)
enclosing_stmt(#20095,#20090)
expr_containers(#20095,#20001)
#20097=*
exprs(#20097,4,#20095,0,"""o""")
hasLocation(#20097,#20043)
enclosing_stmt(#20097,#20090)
expr_containers(#20097,#20001)
literals("o","""o""",#20097)
#20098=*
regexpterm(#20098,14,#20097,0,"o")
#20099=@"loc,{#10000},3,9,3,9"
locations_default(#20099,#10000,3,9,3,9)
hasLocation(#20098,#20099)
regexp_const_value(#20098,"o")
#20100=*
exprs(#20100,0,#20093,1,"then")
hasLocation(#20100,#20049)
enclosing_stmt(#20100,#20090)
expr_containers(#20100,#20001)
literals("then","then",#20100)
#20101=*
exprs(#20101,65,#20091,0,"(o) => {}")
#20102=@"loc,{#10000},3,18,3,26"
locations_default(#20102,#10000,3,18,3,26)
hasLocation(#20101,#20102)
enclosing_stmt(#20101,#20090)
expr_containers(#20101,#20001)
#20103=*
scopes(#20103,1)
scopenodes(#20101,#20103)
scopenesting(#20103,#20071)
#20104=@"var;{o};{#20103}"
variables(#20104,"o",#20103)
#20105=*
exprs(#20105,78,#20101,0,"o")
hasLocation(#20105,#20055)
expr_containers(#20105,#20101)
literals("o","o",#20105)
decl(#20105,#20104)
#20106=*
stmts(#20106,1,#20101,-2,"{}")
#20107=@"loc,{#10000},3,25,3,26"
locations_default(#20107,#10000,3,25,3,26)
hasLocation(#20106,#20107)
stmt_containers(#20106,#20101)
#20108=*
entry_cfg_node(#20108,#20001)
#20109=@"loc,{#10000},1,1,1,0"
locations_default(#20109,#10000,1,1,1,0)
hasLocation(#20108,#20109)
#20110=*
exit_cfg_node(#20110,#20001)
hasLocation(#20110,#20069)
successor(#20090,#20097)
successor(#20101,#20091)
stmts(#20108,2,#20001,0,"import(""m"");")
hasLocation(#20108,#20003)
stmt_containers(#20108,#20001)
#20109=*
exprs(#20109,99,#20108,0,"import(""m"")")
#20110=@"loc,{#10000},1,1,1,11"
locations_default(#20110,#10000,1,1,1,11)
hasLocation(#20109,#20110)
enclosing_stmt(#20109,#20108)
expr_containers(#20109,#20001)
#20111=*
entry_cfg_node(#20111,#20101)
#20112=@"loc,{#10000},3,18,3,17"
locations_default(#20112,#10000,3,18,3,17)
hasLocation(#20111,#20112)
#20113=*
exit_cfg_node(#20113,#20101)
#20114=@"loc,{#10000},3,27,3,26"
locations_default(#20114,#10000,3,27,3,26)
hasLocation(#20113,#20114)
successor(#20106,#20113)
successor(#20105,#20106)
successor(#20111,#20105)
successor(#20100,#20093)
successor(#20097,#20095)
successor(#20095,#20100)
successor(#20093,#20101)
successor(#20091,#20110)
successor(#20078,#20079)
successor(#20079,#20081)
exprs(#20111,4,#20109,0,"""m""")
hasLocation(#20111,#20019)
enclosing_stmt(#20111,#20108)
expr_containers(#20111,#20001)
literals("m","""m""",#20111)
#20112=*
regexpterm(#20112,14,#20111,0,"m")
#20113=@"loc,{#10000},1,9,1,9"
locations_default(#20113,#10000,1,9,1,9)
hasLocation(#20112,#20113)
regexp_const_value(#20112,"m")
#20114=*
stmts(#20114,2,#20001,1,"b ? imp ... ) : {};")
hasLocation(#20114,#20005)
stmt_containers(#20114,#20001)
#20115=*
guard_node(#20115,1,#20081)
hasLocation(#20115,#20019)
successor(#20115,#20085)
#20116=*
guard_node(#20116,0,#20081)
hasLocation(#20116,#20019)
successor(#20116,#20088)
successor(#20081,#20115)
successor(#20081,#20116)
successor(#20085,#20083)
successor(#20083,#20090)
successor(#20088,#20090)
successor(#20072,#20075)
successor(#20075,#20073)
successor(#20073,#20078)
successor(#20108,#20072)
numlines(#10000,3,3,0)
exprs(#20115,11,#20114,0,"b ? import(""n"") : {}")
#20116=@"loc,{#10000},2,1,2,20"
locations_default(#20116,#10000,2,1,2,20)
hasLocation(#20115,#20116)
enclosing_stmt(#20115,#20114)
expr_containers(#20115,#20001)
#20117=*
exprs(#20117,79,#20115,0,"b")
hasLocation(#20117,#20025)
enclosing_stmt(#20117,#20114)
expr_containers(#20117,#20001)
literals("b","b",#20117)
#20118=@"var;{b};{#20000}"
variables(#20118,"b",#20000)
bind(#20117,#20118)
#20119=*
exprs(#20119,99,#20115,1,"import(""n"")")
#20120=@"loc,{#10000},2,5,2,15"
locations_default(#20120,#10000,2,5,2,15)
hasLocation(#20119,#20120)
enclosing_stmt(#20119,#20114)
expr_containers(#20119,#20001)
#20121=*
exprs(#20121,4,#20119,0,"""n""")
hasLocation(#20121,#20033)
enclosing_stmt(#20121,#20114)
expr_containers(#20121,#20001)
literals("n","""n""",#20121)
#20122=*
regexpterm(#20122,14,#20121,0,"n")
#20123=@"loc,{#10000},2,13,2,13"
locations_default(#20123,#10000,2,13,2,13)
hasLocation(#20122,#20123)
regexp_const_value(#20122,"n")
#20124=*
exprs(#20124,8,#20115,2,"{}")
#20125=@"loc,{#10000},2,19,2,20"
locations_default(#20125,#10000,2,19,2,20)
hasLocation(#20124,#20125)
enclosing_stmt(#20124,#20114)
expr_containers(#20124,#20001)
#20126=*
stmts(#20126,2,#20001,2,"import( ... => {});")
hasLocation(#20126,#20007)
stmt_containers(#20126,#20001)
#20127=*
exprs(#20127,13,#20126,0,"import( ... => {})")
#20128=@"loc,{#10000},3,1,3,27"
locations_default(#20128,#10000,3,1,3,27)
hasLocation(#20127,#20128)
enclosing_stmt(#20127,#20126)
expr_containers(#20127,#20001)
#20129=*
exprs(#20129,14,#20127,-1,"import(""o"").then")
#20130=@"loc,{#10000},3,1,3,16"
locations_default(#20130,#10000,3,1,3,16)
hasLocation(#20129,#20130)
enclosing_stmt(#20129,#20126)
expr_containers(#20129,#20001)
#20131=*
exprs(#20131,99,#20129,0,"import(""o"")")
#20132=@"loc,{#10000},3,1,3,11"
locations_default(#20132,#10000,3,1,3,11)
hasLocation(#20131,#20132)
enclosing_stmt(#20131,#20126)
expr_containers(#20131,#20001)
#20133=*
exprs(#20133,4,#20131,0,"""o""")
hasLocation(#20133,#20049)
enclosing_stmt(#20133,#20126)
expr_containers(#20133,#20001)
literals("o","""o""",#20133)
#20134=*
regexpterm(#20134,14,#20133,0,"o")
#20135=@"loc,{#10000},3,9,3,9"
locations_default(#20135,#10000,3,9,3,9)
hasLocation(#20134,#20135)
regexp_const_value(#20134,"o")
#20136=*
exprs(#20136,0,#20129,1,"then")
hasLocation(#20136,#20055)
enclosing_stmt(#20136,#20126)
expr_containers(#20136,#20001)
literals("then","then",#20136)
#20137=*
exprs(#20137,65,#20127,0,"(o) => {}")
#20138=@"loc,{#10000},3,18,3,26"
locations_default(#20138,#10000,3,18,3,26)
hasLocation(#20137,#20138)
enclosing_stmt(#20137,#20126)
expr_containers(#20137,#20001)
#20139=*
scopes(#20139,1)
scopenodes(#20137,#20139)
scopenesting(#20139,#20107)
#20140=@"var;{o};{#20139}"
variables(#20140,"o",#20139)
#20141=*
exprs(#20141,78,#20137,0,"o")
hasLocation(#20141,#20061)
expr_containers(#20141,#20137)
literals("o","o",#20141)
decl(#20141,#20140)
#20142=*
stmts(#20142,1,#20137,-2,"{}")
#20143=@"loc,{#10000},3,25,3,26"
locations_default(#20143,#10000,3,25,3,26)
hasLocation(#20142,#20143)
stmt_containers(#20142,#20137)
#20144=*
stmts(#20144,2,#20001,3,"import(""m"",);")
hasLocation(#20144,#20011)
stmt_containers(#20144,#20001)
#20145=*
exprs(#20145,99,#20144,0,"import(""m"",)")
#20146=@"loc,{#10000},5,1,5,12"
locations_default(#20146,#10000,5,1,5,12)
hasLocation(#20145,#20146)
enclosing_stmt(#20145,#20144)
expr_containers(#20145,#20001)
#20147=*
exprs(#20147,4,#20145,0,"""m""")
hasLocation(#20147,#20079)
enclosing_stmt(#20147,#20144)
expr_containers(#20147,#20001)
literals("m","""m""",#20147)
#20148=*
regexpterm(#20148,14,#20147,0,"m")
#20149=@"loc,{#10000},5,9,5,9"
locations_default(#20149,#10000,5,9,5,9)
hasLocation(#20148,#20149)
regexp_const_value(#20148,"m")
#20150=*
stmts(#20150,2,#20001,4,"import(""m"",{},);")
hasLocation(#20150,#20013)
stmt_containers(#20150,#20001)
#20151=*
exprs(#20151,99,#20150,0,"import(""m"",{},)")
#20152=@"loc,{#10000},6,1,6,15"
locations_default(#20152,#10000,6,1,6,15)
hasLocation(#20151,#20152)
enclosing_stmt(#20151,#20150)
expr_containers(#20151,#20001)
#20153=*
exprs(#20153,4,#20151,0,"""m""")
hasLocation(#20153,#20091)
enclosing_stmt(#20153,#20150)
expr_containers(#20153,#20001)
literals("m","""m""",#20153)
#20154=*
regexpterm(#20154,14,#20153,0,"m")
#20155=@"loc,{#10000},6,9,6,9"
locations_default(#20155,#10000,6,9,6,9)
hasLocation(#20154,#20155)
regexp_const_value(#20154,"m")
#20156=*
exprs(#20156,8,#20151,1,"{}")
#20157=@"loc,{#10000},6,12,6,13"
locations_default(#20157,#10000,6,12,6,13)
hasLocation(#20156,#20157)
enclosing_stmt(#20156,#20150)
expr_containers(#20156,#20001)
#20158=*
entry_cfg_node(#20158,#20001)
#20159=@"loc,{#10000},1,1,1,0"
locations_default(#20159,#10000,1,1,1,0)
hasLocation(#20158,#20159)
#20160=*
exit_cfg_node(#20160,#20001)
hasLocation(#20160,#20105)
successor(#20150,#20153)
successor(#20153,#20151)
successor(#20151,#20160)
successor(#20144,#20147)
successor(#20147,#20145)
successor(#20145,#20150)
successor(#20126,#20133)
successor(#20137,#20127)
#20161=*
entry_cfg_node(#20161,#20137)
#20162=@"loc,{#10000},3,18,3,17"
locations_default(#20162,#10000,3,18,3,17)
hasLocation(#20161,#20162)
#20163=*
exit_cfg_node(#20163,#20137)
#20164=@"loc,{#10000},3,27,3,26"
locations_default(#20164,#10000,3,27,3,26)
hasLocation(#20163,#20164)
successor(#20142,#20163)
successor(#20141,#20142)
successor(#20161,#20141)
successor(#20136,#20129)
successor(#20133,#20131)
successor(#20131,#20136)
successor(#20129,#20137)
successor(#20127,#20144)
successor(#20114,#20115)
successor(#20115,#20117)
#20165=*
guard_node(#20165,1,#20117)
hasLocation(#20165,#20025)
successor(#20165,#20121)
#20166=*
guard_node(#20166,0,#20117)
hasLocation(#20166,#20025)
successor(#20166,#20124)
successor(#20117,#20165)
successor(#20117,#20166)
successor(#20121,#20119)
successor(#20119,#20126)
successor(#20124,#20126)
successor(#20108,#20111)
successor(#20111,#20109)
successor(#20109,#20114)
successor(#20158,#20108)
numlines(#10000,6,5,0)
filetype(#10000,"javascript")

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,13 @@
import "module" assert { type: "json" };
import * as v1 from "module" assert { type: "json" };
import { v2 } from "module" assert { type: "json" };
import v3 from "module" assert { type: "json" };
export { v4 } from "module" assert { type: "json" };
export * from "module" assert { type: "json" };
export * as v5 from "module" assert { type: "json" };
const v6 = import("module", { assert: { type: "json" } });
import "module"; // missing semicolon
assert({ type: "json" }); // function call, not import assertion

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,5 @@
---
category: minorAnalysis
---
* [Import assertions](https://github.com/tc39/proposal-import-assertions) are now supported.
Previously this feature was only supported in TypeScript code, but is now supported for plain JavaScript as well and is also accessible in the AST.

View File

@@ -90,6 +90,16 @@ class ImportDeclaration extends Stmt, Import, @import_declaration {
override PathExpr getImportedPath() { result = getChildExpr(-1) }
/**
* Gets the object literal passed as part of the `assert` clause in this import declaration.
*
* For example, this gets the `{ type: "json" }` object literal in the following:
* ```js
* import foo from "foo" assert { type: "json" };
* ```
*/
ObjectExpr getImportAssertion() { result = this.getChildExpr(-10) }
/** Gets the `i`th import specifier of this import declaration. */
ImportSpecifier getSpecifier(int i) { result = getChildExpr(i) }
@@ -310,6 +320,19 @@ abstract class ExportDeclaration extends Stmt, @export_declaration {
predicate isTypeOnly() { has_type_keyword(this) }
override string getAPrimaryQlClass() { result = "ExportDeclaration" }
/**
* Gets the object literal passed as part of the `assert` clause, if this is
* a re-export declaration.
*
* For example, this gets the `{ type: "json" }` expression in each of the following:
* ```js
* export { x } from 'foo' assert { type: "json" };
* export * from 'foo' assert { type: "json" };
* export * as x from 'foo' assert { type: "json" };
* ```
*/
ObjectExpr getImportAssertion() { result = this.getChildExpr(-10) }
}
/**

View File

@@ -2807,6 +2807,7 @@ class FunctionBindExpr extends @bind_expr, Expr {
*
* ```
* import("fs")
* import("foo", { assert: { type: "json" }})
* ```
*/
class DynamicImportExpr extends @dynamic_import, Expr, Import {
@@ -2819,6 +2820,16 @@ class DynamicImportExpr extends @dynamic_import, Expr, Import {
override PathExpr getImportedPath() { result = this.getSource() }
/**
* Gets the second "argument" to the import expression, that is, the `Y` in `import(X, Y)`.
*
* For example, gets the `{ assert: { type: "json" }}` expression in the following:
* ```js
* import('foo', { assert: { type: "json" }})
* ```
*/
Expr getImportAttributes() { result = this.getChildExpr(1) }
override Module getEnclosingModule() { result = this.getTopLevel() }
override DataFlow::Node getImportedModuleNode() { result = DataFlow::valueNode(this) }

View File

@@ -775,7 +775,7 @@ private class ReactRouterLocationSource extends DOM::LocationSource::Range {
private DataFlow::SourceNode higherOrderComponentBuilder() {
// `memo(f)` returns a function that behaves as `f` but caches results
// It is sometimes used to wrap an entire functional component.
result = react().getAPropertyRead("memo")
result = react().getAPropertyRead(["memo", "forwardRef"])
or
result = DataFlow::moduleMember("react-redux", "connect").getACall()
or

View File

@@ -25,8 +25,7 @@ module TrustedTypes {
/** Gets the function passed as the given option. */
DataFlow::FunctionNode getPolicyCallback(string method) {
// Require local callback to avoid potential call/return mismatch in the uses below
result = getOptionArgument(1, method).getALocalSource()
result = getParameter(1).getMember(method).getAValueReachingSink()
}
}

View File

@@ -0,0 +1,13 @@
import "module" assert { type: "json" };
import * as v1 from "module" assert { type: "json" };
import { v2 } from "module" assert { type: "json" };
import v3 from "module" assert { type: "json" };
export { v4 } from "module" assert { type: "json" };
export * from "module" assert { type: "json" };
export * as v5 from "module" assert { type: "json" };
const v6 = import("module", { assert: { type: "json" } });
import "module" // missing semicolon
assert({type: "json"}); // function call, not import assertion

View File

@@ -0,0 +1,20 @@
getImportAssertionFromImport
| js-import-assertions.js:1:1:1:40 | import ... son" }; | js-import-assertions.js:1:24:1:39 | { type: "json" } |
| js-import-assertions.js:2:1:2:53 | import ... son" }; | js-import-assertions.js:2:37:2:52 | { type: "json" } |
| js-import-assertions.js:3:1:3:52 | import ... son" }; | js-import-assertions.js:3:36:3:51 | { type: "json" } |
| js-import-assertions.js:4:1:4:48 | import ... son" }; | js-import-assertions.js:4:32:4:47 | { type: "json" } |
| ts-import-assertions.ts:3:1:3:40 | import ... son" }; | ts-import-assertions.ts:3:24:3:39 | { type: "json" } |
| ts-import-assertions.ts:4:1:4:53 | import ... son" }; | ts-import-assertions.ts:4:37:4:52 | { type: "json" } |
| ts-import-assertions.ts:5:1:5:52 | import ... son" }; | ts-import-assertions.ts:5:36:5:51 | { type: "json" } |
| ts-import-assertions.ts:6:1:6:48 | import ... son" }; | ts-import-assertions.ts:6:32:6:47 | { type: "json" } |
getImportAssertionFromExport
| js-import-assertions.js:6:1:6:52 | export ... son" }; | js-import-assertions.js:6:36:6:51 | { type: "json" } |
| js-import-assertions.js:7:1:7:47 | export ... son" }; | js-import-assertions.js:7:31:7:46 | { type: "json" } |
| js-import-assertions.js:8:1:8:53 | export ... son" }; | js-import-assertions.js:8:37:8:52 | { type: "json" } |
| ts-import-assertions.ts:8:1:8:52 | export ... son" }; | ts-import-assertions.ts:8:36:8:51 | { type: "json" } |
| ts-import-assertions.ts:9:1:9:47 | export ... son" }; | ts-import-assertions.ts:9:31:9:46 | { type: "json" } |
| ts-import-assertions.ts:10:1:10:53 | export ... son" }; | ts-import-assertions.ts:10:37:10:52 | { type: "json" } |
getImportAttributes
| js-import-assertions.js:10:12:10:57 | import( ... n" } }) | js-import-assertions.js:10:29:10:56 | { asser ... on" } } |
| ts-import-assertions.ts:12:12:12:57 | import( ... n" } }) | ts-import-assertions.ts:12:29:12:56 | { asser ... on" } } |
errors

View File

@@ -0,0 +1,13 @@
import javascript
query Expr getImportAssertionFromImport(ImportDeclaration decl) {
result = decl.getImportAssertion()
}
query Expr getImportAssertionFromExport(ExportDeclaration decl) {
result = decl.getImportAssertion()
}
query Expr getImportAttributes(DynamicImportExpr imprt) { result = imprt.getImportAttributes() }
query JSParseError errors() { any() }

View File

@@ -0,0 +1,15 @@
// TypeScript
import "module" assert { type: "json" };
import * as v1 from "module" assert { type: "json" };
import { v2 } from "module" assert { type: "json" };
import v3 from "module" assert { type: "json" };
export { v4 } from "module" assert { type: "json" };
export * from "module" assert { type: "json" };
export * as v5 from "module" assert { type: "json" };
const v6 = import("module", { assert: { type: "json" } });
import "module" // missing semicolon
assert({ type: "json" }); // function call, not import assertion

View File

@@ -949,6 +949,8 @@ nodes
| tst.ts:237:8:237:16 | [ImportSpecifier] * as Foo3 | semmle.label | [ImportSpecifier] * as Foo3 |
| tst.ts:237:13:237:16 | [VarDecl] Foo3 | semmle.label | [VarDecl] Foo3 |
| tst.ts:237:23:237:40 | [Literal] "./something.json" | semmle.label | [Literal] "./something.json" |
| tst.ts:237:49:237:64 | [ObjectExpr] { type: "json" } | semmle.label | [ObjectExpr] { type: "json" } |
| tst.ts:237:51:237:62 | [Property] type: "json" | semmle.label | [Property] type: "json" |
| tst.ts:238:1:238:19 | [DeclStmt] var foo = ... | semmle.label | [DeclStmt] var foo = ... |
| tst.ts:238:1:238:19 | [DeclStmt] var foo = ... | semmle.order | 59 |
| tst.ts:238:5:238:7 | [VarDecl] foo | semmle.label | [VarDecl] foo |
@@ -3461,8 +3463,12 @@ edges
| tst.ts:237:1:237:65 | [ImportDeclaration] import ... son" }; | tst.ts:237:8:237:16 | [ImportSpecifier] * as Foo3 | semmle.order | 1 |
| tst.ts:237:1:237:65 | [ImportDeclaration] import ... son" }; | tst.ts:237:23:237:40 | [Literal] "./something.json" | semmle.label | 2 |
| tst.ts:237:1:237:65 | [ImportDeclaration] import ... son" }; | tst.ts:237:23:237:40 | [Literal] "./something.json" | semmle.order | 2 |
| tst.ts:237:1:237:65 | [ImportDeclaration] import ... son" }; | tst.ts:237:49:237:64 | [ObjectExpr] { type: "json" } | semmle.label | 3 |
| tst.ts:237:1:237:65 | [ImportDeclaration] import ... son" }; | tst.ts:237:49:237:64 | [ObjectExpr] { type: "json" } | semmle.order | 3 |
| tst.ts:237:8:237:16 | [ImportSpecifier] * as Foo3 | tst.ts:237:13:237:16 | [VarDecl] Foo3 | semmle.label | 1 |
| tst.ts:237:8:237:16 | [ImportSpecifier] * as Foo3 | tst.ts:237:13:237:16 | [VarDecl] Foo3 | semmle.order | 1 |
| tst.ts:237:49:237:64 | [ObjectExpr] { type: "json" } | tst.ts:237:51:237:62 | [Property] type: "json" | semmle.label | 1 |
| tst.ts:237:49:237:64 | [ObjectExpr] { type: "json" } | tst.ts:237:51:237:62 | [Property] type: "json" | semmle.order | 1 |
| tst.ts:238:1:238:19 | [DeclStmt] var foo = ... | tst.ts:238:5:238:18 | [VariableDeclarator] foo = Foo3.foo | semmle.label | 1 |
| tst.ts:238:1:238:19 | [DeclStmt] var foo = ... | tst.ts:238:5:238:18 | [VariableDeclarator] foo = Foo3.foo | semmle.order | 1 |
| tst.ts:238:5:238:18 | [VariableDeclarator] foo = Foo3.foo | tst.ts:238:5:238:7 | [VarDecl] foo | semmle.label | 1 |

View File

@@ -1,4 +1,4 @@
import { memo } from 'react';
import { memo, forwardRef } from 'react';
import { connect } from 'react-redux';
import { compose } from 'redux';
import styled from 'styled-components';
@@ -25,4 +25,4 @@ const ConnectedComponent = compose(withConnect, unknownFunction)(StyledComponent
const ConnectedComponent2 = withState('counter', 'setCounter', 0)(ConnectedComponent);
export default hot(module)(memo(ConnectedComponent2));
export default hot(module)(memo(forwardRef(ConnectedComponent2)));

View File

@@ -689,14 +689,22 @@ nodes
| translate.js:9:27:9:50 | searchP ... 'term') |
| translate.js:9:27:9:50 | searchP ... 'term') |
| translate.js:9:27:9:50 | searchP ... 'term') |
| trusted-types.js:2:66:2:66 | x |
| trusted-types.js:2:66:2:66 | x |
| trusted-types.js:2:71:2:71 | x |
| trusted-types.js:2:71:2:71 | x |
| trusted-types.js:2:71:2:71 | x |
| trusted-types.js:3:24:3:34 | window.name |
| trusted-types.js:3:24:3:34 | window.name |
| trusted-types.js:3:24:3:34 | window.name |
| trusted-types-lib.js:1:28:1:28 | x |
| trusted-types-lib.js:1:28:1:28 | x |
| trusted-types-lib.js:2:12:2:12 | x |
| trusted-types-lib.js:2:12:2:12 | x |
| trusted-types-lib.js:2:12:2:12 | x |
| trusted-types.js:3:62:3:62 | x |
| trusted-types.js:3:62:3:62 | x |
| trusted-types.js:3:67:3:67 | x |
| trusted-types.js:3:67:3:67 | x |
| trusted-types.js:3:67:3:67 | x |
| trusted-types.js:4:20:4:30 | window.name |
| trusted-types.js:4:20:4:30 | window.name |
| trusted-types.js:4:20:4:30 | window.name |
| trusted-types.js:13:20:13:30 | window.name |
| trusted-types.js:13:20:13:30 | window.name |
| trusted-types.js:13:20:13:30 | window.name |
| tst3.js:2:12:2:75 | JSON.pa ... tr(1))) |
| tst3.js:2:23:2:74 | decodeU ... str(1)) |
| tst3.js:2:42:2:63 | window. ... .search |
@@ -1818,14 +1826,22 @@ edges
| translate.js:9:27:9:38 | searchParams | translate.js:9:27:9:50 | searchP ... 'term') |
| translate.js:9:27:9:38 | searchParams | translate.js:9:27:9:50 | searchP ... 'term') |
| translate.js:9:27:9:38 | searchParams | translate.js:9:27:9:50 | searchP ... 'term') |
| trusted-types.js:2:66:2:66 | x | trusted-types.js:2:71:2:71 | x |
| trusted-types.js:2:66:2:66 | x | trusted-types.js:2:71:2:71 | x |
| trusted-types.js:2:66:2:66 | x | trusted-types.js:2:71:2:71 | x |
| trusted-types.js:2:66:2:66 | x | trusted-types.js:2:71:2:71 | x |
| trusted-types.js:3:24:3:34 | window.name | trusted-types.js:2:66:2:66 | x |
| trusted-types.js:3:24:3:34 | window.name | trusted-types.js:2:66:2:66 | x |
| trusted-types.js:3:24:3:34 | window.name | trusted-types.js:2:66:2:66 | x |
| trusted-types.js:3:24:3:34 | window.name | trusted-types.js:2:66:2:66 | x |
| trusted-types-lib.js:1:28:1:28 | x | trusted-types-lib.js:2:12:2:12 | x |
| trusted-types-lib.js:1:28:1:28 | x | trusted-types-lib.js:2:12:2:12 | x |
| trusted-types-lib.js:1:28:1:28 | x | trusted-types-lib.js:2:12:2:12 | x |
| trusted-types-lib.js:1:28:1:28 | x | trusted-types-lib.js:2:12:2:12 | x |
| trusted-types.js:3:62:3:62 | x | trusted-types.js:3:67:3:67 | x |
| trusted-types.js:3:62:3:62 | x | trusted-types.js:3:67:3:67 | x |
| trusted-types.js:3:62:3:62 | x | trusted-types.js:3:67:3:67 | x |
| trusted-types.js:3:62:3:62 | x | trusted-types.js:3:67:3:67 | x |
| trusted-types.js:4:20:4:30 | window.name | trusted-types.js:3:62:3:62 | x |
| trusted-types.js:4:20:4:30 | window.name | trusted-types.js:3:62:3:62 | x |
| trusted-types.js:4:20:4:30 | window.name | trusted-types.js:3:62:3:62 | x |
| trusted-types.js:4:20:4:30 | window.name | trusted-types.js:3:62:3:62 | x |
| trusted-types.js:13:20:13:30 | window.name | trusted-types-lib.js:1:28:1:28 | x |
| trusted-types.js:13:20:13:30 | window.name | trusted-types-lib.js:1:28:1:28 | x |
| trusted-types.js:13:20:13:30 | window.name | trusted-types-lib.js:1:28:1:28 | x |
| trusted-types.js:13:20:13:30 | window.name | trusted-types-lib.js:1:28:1:28 | x |
| tst3.js:2:12:2:75 | JSON.pa ... tr(1))) | tst3.js:4:25:4:28 | data |
| tst3.js:2:12:2:75 | JSON.pa ... tr(1))) | tst3.js:5:26:5:29 | data |
| tst3.js:2:12:2:75 | JSON.pa ... tr(1))) | tst3.js:7:32:7:35 | data |
@@ -2382,7 +2398,8 @@ edges
| tooltip.jsx:10:25:10:30 | source | tooltip.jsx:6:20:6:30 | window.name | tooltip.jsx:10:25:10:30 | source | Cross-site scripting vulnerability due to $@. | tooltip.jsx:6:20:6:30 | window.name | user-provided value |
| tooltip.jsx:11:25:11:30 | source | tooltip.jsx:6:20:6:30 | window.name | tooltip.jsx:11:25:11:30 | source | Cross-site scripting vulnerability due to $@. | tooltip.jsx:6:20:6:30 | window.name | user-provided value |
| translate.js:9:27:9:50 | searchP ... 'term') | translate.js:6:16:6:39 | documen ... .search | translate.js:9:27:9:50 | searchP ... 'term') | Cross-site scripting vulnerability due to $@. | translate.js:6:16:6:39 | documen ... .search | user-provided value |
| trusted-types.js:2:71:2:71 | x | trusted-types.js:3:24:3:34 | window.name | trusted-types.js:2:71:2:71 | x | Cross-site scripting vulnerability due to $@. | trusted-types.js:3:24:3:34 | window.name | user-provided value |
| trusted-types-lib.js:2:12:2:12 | x | trusted-types.js:13:20:13:30 | window.name | trusted-types-lib.js:2:12:2:12 | x | Cross-site scripting vulnerability due to $@. | trusted-types.js:13:20:13:30 | window.name | user-provided value |
| trusted-types.js:3:67:3:67 | x | trusted-types.js:4:20:4:30 | window.name | trusted-types.js:3:67:3:67 | x | Cross-site scripting vulnerability due to $@. | trusted-types.js:4:20:4:30 | window.name | user-provided value |
| tst3.js:4:25:4:32 | data.src | tst3.js:2:42:2:63 | window. ... .search | tst3.js:4:25:4:32 | data.src | Cross-site scripting vulnerability due to $@. | tst3.js:2:42:2:63 | window. ... .search | user-provided value |
| tst3.js:5:26:5:31 | data.p | tst3.js:2:42:2:63 | window. ... .search | tst3.js:5:26:5:31 | data.p | Cross-site scripting vulnerability due to $@. | tst3.js:2:42:2:63 | window. ... .search | user-provided value |
| tst3.js:7:32:7:37 | data.p | tst3.js:2:42:2:63 | window. ... .search | tst3.js:7:32:7:37 | data.p | Cross-site scripting vulnerability due to $@. | tst3.js:2:42:2:63 | window. ... .search | user-provided value |

View File

@@ -701,14 +701,22 @@ nodes
| translate.js:9:27:9:50 | searchP ... 'term') |
| translate.js:9:27:9:50 | searchP ... 'term') |
| translate.js:9:27:9:50 | searchP ... 'term') |
| trusted-types.js:2:66:2:66 | x |
| trusted-types.js:2:66:2:66 | x |
| trusted-types.js:2:71:2:71 | x |
| trusted-types.js:2:71:2:71 | x |
| trusted-types.js:2:71:2:71 | x |
| trusted-types.js:3:24:3:34 | window.name |
| trusted-types.js:3:24:3:34 | window.name |
| trusted-types.js:3:24:3:34 | window.name |
| trusted-types-lib.js:1:28:1:28 | x |
| trusted-types-lib.js:1:28:1:28 | x |
| trusted-types-lib.js:2:12:2:12 | x |
| trusted-types-lib.js:2:12:2:12 | x |
| trusted-types-lib.js:2:12:2:12 | x |
| trusted-types.js:3:62:3:62 | x |
| trusted-types.js:3:62:3:62 | x |
| trusted-types.js:3:67:3:67 | x |
| trusted-types.js:3:67:3:67 | x |
| trusted-types.js:3:67:3:67 | x |
| trusted-types.js:4:20:4:30 | window.name |
| trusted-types.js:4:20:4:30 | window.name |
| trusted-types.js:4:20:4:30 | window.name |
| trusted-types.js:13:20:13:30 | window.name |
| trusted-types.js:13:20:13:30 | window.name |
| trusted-types.js:13:20:13:30 | window.name |
| tst3.js:2:12:2:75 | JSON.pa ... tr(1))) |
| tst3.js:2:23:2:74 | decodeU ... str(1)) |
| tst3.js:2:42:2:63 | window. ... .search |
@@ -1880,14 +1888,22 @@ edges
| translate.js:9:27:9:38 | searchParams | translate.js:9:27:9:50 | searchP ... 'term') |
| translate.js:9:27:9:38 | searchParams | translate.js:9:27:9:50 | searchP ... 'term') |
| translate.js:9:27:9:38 | searchParams | translate.js:9:27:9:50 | searchP ... 'term') |
| trusted-types.js:2:66:2:66 | x | trusted-types.js:2:71:2:71 | x |
| trusted-types.js:2:66:2:66 | x | trusted-types.js:2:71:2:71 | x |
| trusted-types.js:2:66:2:66 | x | trusted-types.js:2:71:2:71 | x |
| trusted-types.js:2:66:2:66 | x | trusted-types.js:2:71:2:71 | x |
| trusted-types.js:3:24:3:34 | window.name | trusted-types.js:2:66:2:66 | x |
| trusted-types.js:3:24:3:34 | window.name | trusted-types.js:2:66:2:66 | x |
| trusted-types.js:3:24:3:34 | window.name | trusted-types.js:2:66:2:66 | x |
| trusted-types.js:3:24:3:34 | window.name | trusted-types.js:2:66:2:66 | x |
| trusted-types-lib.js:1:28:1:28 | x | trusted-types-lib.js:2:12:2:12 | x |
| trusted-types-lib.js:1:28:1:28 | x | trusted-types-lib.js:2:12:2:12 | x |
| trusted-types-lib.js:1:28:1:28 | x | trusted-types-lib.js:2:12:2:12 | x |
| trusted-types-lib.js:1:28:1:28 | x | trusted-types-lib.js:2:12:2:12 | x |
| trusted-types.js:3:62:3:62 | x | trusted-types.js:3:67:3:67 | x |
| trusted-types.js:3:62:3:62 | x | trusted-types.js:3:67:3:67 | x |
| trusted-types.js:3:62:3:62 | x | trusted-types.js:3:67:3:67 | x |
| trusted-types.js:3:62:3:62 | x | trusted-types.js:3:67:3:67 | x |
| trusted-types.js:4:20:4:30 | window.name | trusted-types.js:3:62:3:62 | x |
| trusted-types.js:4:20:4:30 | window.name | trusted-types.js:3:62:3:62 | x |
| trusted-types.js:4:20:4:30 | window.name | trusted-types.js:3:62:3:62 | x |
| trusted-types.js:4:20:4:30 | window.name | trusted-types.js:3:62:3:62 | x |
| trusted-types.js:13:20:13:30 | window.name | trusted-types-lib.js:1:28:1:28 | x |
| trusted-types.js:13:20:13:30 | window.name | trusted-types-lib.js:1:28:1:28 | x |
| trusted-types.js:13:20:13:30 | window.name | trusted-types-lib.js:1:28:1:28 | x |
| trusted-types.js:13:20:13:30 | window.name | trusted-types-lib.js:1:28:1:28 | x |
| tst3.js:2:12:2:75 | JSON.pa ... tr(1))) | tst3.js:4:25:4:28 | data |
| tst3.js:2:12:2:75 | JSON.pa ... tr(1))) | tst3.js:5:26:5:29 | data |
| tst3.js:2:12:2:75 | JSON.pa ... tr(1))) | tst3.js:7:32:7:35 | data |

View File

@@ -0,0 +1,3 @@
export function createHtml(x) {
return x;
}

View File

@@ -1,10 +1,13 @@
(function() {
const policy1 = trustedTypes.createPolicy('x', { createHTML: x => x }); // NOT OK
policy1.createHTML(window.name);
import * as lib from './trusted-types-lib';
const policy2 = trustedTypes.createPolicy('x', { createHTML: x => 'safe' }); // OK
policy2.createHTML(window.name);
const policy1 = trustedTypes.createPolicy('x', { createHTML: x => x }); // NOT OK
policy1.createHTML(window.name);
const policy3 = trustedTypes.createPolicy('x', { createHTML: x => x }); // OK
policy3.createHTML('safe');
})();
const policy2 = trustedTypes.createPolicy('x', { createHTML: x => 'safe' }); // OK
policy2.createHTML(window.name);
const policy3 = trustedTypes.createPolicy('x', { createHTML: x => x }); // OK
policy3.createHTML('safe');
const policy4 = trustedTypes.createPolicy('x', { createHTML: lib.createHtml });
policy4.createHTML(window.name);

View File

@@ -19,6 +19,9 @@ private import semmle.python.security.internal.EncryptionKeySizes
* extend `SystemCommandExecution::Range` instead.
*/
class SystemCommandExecution extends DataFlow::Node instanceof SystemCommandExecution::Range {
/** Holds if a shell interprets `arg`. */
predicate isShellInterpreted(DataFlow::Node arg) { super.isShellInterpreted(arg) }
/** Gets the argument that specifies the command to be executed. */
DataFlow::Node getCommand() { result = super.getCommand() }
}
@@ -35,6 +38,9 @@ module SystemCommandExecution {
abstract class Range extends DataFlow::Node {
/** Gets the argument that specifies the command to be executed. */
abstract DataFlow::Node getCommand();
/** Holds if a shell interprets `arg`. */
predicate isShellInterpreted(DataFlow::Node arg) { none() }
}
}

View File

@@ -51,6 +51,7 @@ private import semmle.python.frameworks.Simplejson
private import semmle.python.frameworks.SqlAlchemy
private import semmle.python.frameworks.Starlette
private import semmle.python.frameworks.Stdlib
private import semmle.python.frameworks.Setuptools
private import semmle.python.frameworks.Toml
private import semmle.python.frameworks.Tornado
private import semmle.python.frameworks.Twisted

View File

@@ -182,6 +182,7 @@ private module LambdaFlow {
boolean toJump, DataFlowCallOption lastCall
) {
revLambdaFlow0(lambdaCall, kind, node, t, toReturn, toJump, lastCall) and
not expectsContent(node, _) and
if castNode(node) or node instanceof ArgNode or node instanceof ReturnNode
then compatibleTypes(t, getNodeDataFlowType(node))
else any()

View File

@@ -43,14 +43,22 @@ private module FabricV1 {
* - https://docs.fabfile.org/en/1.14/api/core/operations.html#fabric.operations.run
* - https://docs.fabfile.org/en/1.14/api/core/operations.html#fabric.operations.sudo
*/
private class FabricApiLocalRunSudoCall extends SystemCommandExecution::Range,
DataFlow::CallCfgNode
{
private class FabricApiLocalRunSudoCall extends SystemCommandExecution::Range, API::CallNode {
FabricApiLocalRunSudoCall() { this = api().getMember(["local", "run", "sudo"]).getACall() }
override DataFlow::Node getCommand() {
result = [this.getArg(0), this.getArgByName("command")]
}
override predicate isShellInterpreted(DataFlow::Node arg) {
arg = this.getCommand() and
// defaults to running in a shell
not this.getParameter(1, "shell")
.getAValueReachingSink()
.asExpr()
.(ImmutableLiteral)
.booleanValue() = false
}
}
}
}
@@ -163,6 +171,8 @@ private module FabricV2 {
override DataFlow::Node getCommand() {
result = [this.getArg(0), this.getArgByName("command")]
}
override predicate isShellInterpreted(DataFlow::Node arg) { arg = this.getCommand() }
}
// -------------------------------------------------------------------------
@@ -246,6 +256,8 @@ private module FabricV2 {
override DataFlow::Node getCommand() {
result = [this.getArg(0), this.getArgByName("command")]
}
override predicate isShellInterpreted(DataFlow::Node arg) { arg = this.getCommand() }
}
/**

View File

@@ -81,5 +81,7 @@ private module Invoke {
override DataFlow::Node getCommand() {
result in [this.getArg(0), this.getArgByName("command")]
}
override predicate isShellInterpreted(DataFlow::Node arg) { arg = this.getCommand() }
}
}

View File

@@ -0,0 +1,74 @@
/**
* Provides classes modeling package setup as defined by `setuptools`.
*/
private import python
private import semmle.python.dataflow.new.DataFlow
/** Provides models for the use of `setuptools` in setup scripts, and the APIs exported by the library defined using `setuptools`. */
module Setuptools {
/**
* Gets a file that sets up a package using `setuptools` (or the deprecated `distutils`).
*/
private File setupFile() {
// all of these might not be extracted, but the support is ready for when they are
result.getBaseName() = ["setup.py", "setup.cfg", "pyproject.toml"]
}
/**
* Gets a file or folder that is exported by a library.
*/
private Container getALibraryExportedContainer() {
// a child folder of the root that has a setup.py file
result = setupFile().getParent().(Folder).getAFolder() and
// where the folder has __init__.py file
exists(result.(Folder).getFile("__init__.py")) and
// and is not a test folder
not result.(Folder).getBaseName() = ["test", "tests", "testing"]
or
// child of a library exported container
result = getALibraryExportedContainer().getAChildContainer() and
(
// either any file
not result instanceof Folder
or
// or a folder with an __init__.py file
exists(result.(Folder).getFile("__init__.py"))
)
}
/**
* Gets an AST node that is exported by a library.
*/
private AstNode getAnExportedLibraryFeature() {
result.(Module).getFile() = getALibraryExportedContainer()
or
result = getAnExportedLibraryFeature().(Module).getAStmt()
or
result = getAnExportedLibraryFeature().(ClassDef).getDefinedClass().getAMethod()
or
result = getAnExportedLibraryFeature().(ClassDef).getDefinedClass().getInitMethod()
or
result = getAnExportedLibraryFeature().(FunctionDef).getDefinedFunction()
}
/**
* Gets a public function (or __init__) that is exported by a library.
*/
private Function getAnExportedFunction() {
result = getAnExportedLibraryFeature() and
(
result.isPublic()
or
result.isInitMethod()
)
}
/**
* Gets a parameter to a public function that is exported by a library.
*/
DataFlow::ParameterNode getALibraryInput() {
result.getParameter() = getAnExportedFunction().getAnArg() and
not result.getParameter().isSelf()
}
}

View File

@@ -1060,7 +1060,11 @@ private module StdlibPrivate {
private class OsSystemCall extends SystemCommandExecution::Range, DataFlow::CallCfgNode {
OsSystemCall() { this = os().getMember("system").getACall() }
override DataFlow::Node getCommand() { result = this.getArg(0) }
override DataFlow::Node getCommand() {
result in [this.getArg(0), this.getArgByName("command")]
}
override predicate isShellInterpreted(DataFlow::Node arg) { arg = this.getCommand() }
}
/**
@@ -1071,7 +1075,7 @@ private module StdlibPrivate {
* Although deprecated since version 2.6, they still work in 2.7.
* See https://docs.python.org/2.7/library/os.html#os.popen2
*/
private class OsPopenCall extends SystemCommandExecution::Range, DataFlow::CallCfgNode {
private class OsPopenCall extends SystemCommandExecution::Range, API::CallNode {
string name;
OsPopenCall() {
@@ -1085,6 +1089,8 @@ private module StdlibPrivate {
not name = "popen" and
result = this.getArgByName("cmd")
}
override predicate isShellInterpreted(DataFlow::Node arg) { arg = this.getCommand() }
}
/**
@@ -1104,6 +1110,10 @@ private module StdlibPrivate {
override DataFlow::Node getCommand() { result = this.getArg(0) }
override DataFlow::Node getAPathArgument() { result = this.getCommand() }
override predicate isShellInterpreted(DataFlow::Node arg) {
none() // this is a safe API.
}
}
/**
@@ -1131,6 +1141,10 @@ private module StdlibPrivate {
}
override DataFlow::Node getAPathArgument() { result = this.getCommand() }
override predicate isShellInterpreted(DataFlow::Node arg) {
none() // this is a safe API.
}
}
/**
@@ -1145,6 +1159,10 @@ private module StdlibPrivate {
override DataFlow::Node getCommand() { result in [this.getArg(0), this.getArgByName("path")] }
override DataFlow::Node getAPathArgument() { result = this.getCommand() }
override predicate isShellInterpreted(DataFlow::Node arg) {
none() // this is a safe API.
}
}
/** An additional taint step for calls to `os.path.join` */
@@ -1170,7 +1188,7 @@ private module StdlibPrivate {
* See https://docs.python.org/3.8/library/subprocess.html#subprocess.Popen
* ref: https://docs.python.org/3/library/subprocess.html#legacy-shell-invocation-functions
*/
private class SubprocessPopenCall extends SystemCommandExecution::Range, DataFlow::CallCfgNode {
private class SubprocessPopenCall extends SystemCommandExecution::Range, API::CallNode {
SubprocessPopenCall() {
exists(string name |
name in [
@@ -1180,43 +1198,33 @@ private module StdlibPrivate {
)
}
/** Gets the ControlFlowNode for the `args` argument, if any. */
private DataFlow::Node get_args_arg() { result in [this.getArg(0), this.getArgByName("args")] }
/** Gets the API-node for the `args` argument, if any. */
private API::Node get_args_arg() { result = this.getParameter(0, "args") }
/** Gets the ControlFlowNode for the `shell` argument, if any. */
private DataFlow::Node get_shell_arg() {
result in [this.getArg(8), this.getArgByName("shell")]
}
/** Gets the API-node for the `shell` argument, if any. */
private API::Node get_shell_arg() { result = this.getParameter(8, "shell") }
private boolean get_shell_arg_value() {
not exists(this.get_shell_arg()) and
result = false
or
exists(DataFlow::Node shell_arg | shell_arg = this.get_shell_arg() |
result = shell_arg.asCfgNode().getNode().(ImmutableLiteral).booleanValue()
result =
this.get_shell_arg().getAValueReachingSink().asExpr().(ImmutableLiteral).booleanValue()
or
// TODO: Track the "shell" argument to determine possible values
not shell_arg.asCfgNode().getNode() instanceof ImmutableLiteral and
(
result = true
or
result = false
)
)
not this.get_shell_arg().getAValueReachingSink().asExpr() instanceof ImmutableLiteral and
result = false // defaults to `False`
}
/** Gets the ControlFlowNode for the `executable` argument, if any. */
private DataFlow::Node get_executable_arg() {
result in [this.getArg(2), this.getArgByName("executable")]
}
/** Gets the API-node for the `executable` argument, if any. */
private API::Node get_executable_arg() { result = this.getParameter(2, "executable") }
override DataFlow::Node getCommand() {
// TODO: Track arguments ("args" and "shell")
// TODO: Handle using `args=["sh", "-c", <user-input>]`
result = this.get_executable_arg()
result = this.get_executable_arg().asSink()
or
exists(DataFlow::Node arg_args, boolean shell |
arg_args = this.get_args_arg() and
arg_args = this.get_args_arg().asSink() and
shell = this.get_shell_arg_value()
|
// When "executable" argument is set, and "shell" argument is `False`, the
@@ -1242,6 +1250,11 @@ private module StdlibPrivate {
)
)
}
override predicate isShellInterpreted(DataFlow::Node arg) {
arg = [this.get_executable_arg(), this.get_args_arg()].asSink() and
this.get_shell_arg_value() = true
}
}
// ---------------------------------------------------------------------------
@@ -1389,6 +1402,8 @@ private module StdlibPrivate {
}
override DataFlow::Node getCommand() { result in [this.getArg(0), this.getArgByName("cmd")] }
override predicate isShellInterpreted(DataFlow::Node arg) { arg = this.getCommand() }
}
// ---------------------------------------------------------------------------
@@ -1405,6 +1420,8 @@ private module StdlibPrivate {
PlatformPopenCall() { this = platform().getMember("popen").getACall() }
override DataFlow::Node getCommand() { result in [this.getArg(0), this.getArgByName("cmd")] }
override predicate isShellInterpreted(DataFlow::Node arg) { arg = this.getCommand() }
}
// ---------------------------------------------------------------------------

View File

@@ -0,0 +1,159 @@
/**
* Provides default sources, sinks and sanitizers for reasoning about
* shell command constructed from library input vulnerabilities, as
* well as extension points for adding your own.
*/
private import python
private import semmle.python.dataflow.new.DataFlow
private import semmle.python.dataflow.new.TaintTracking
private import CommandInjectionCustomizations::CommandInjection as CommandInjection
private import semmle.python.Concepts as Concepts
/**
* Module containing sources, sinks, and sanitizers for shell command constructed from library input.
*/
module UnsafeShellCommandConstruction {
/** A source for shell command constructed from library input vulnerabilities. */
abstract class Source extends DataFlow::Node { }
private import semmle.python.frameworks.Setuptools
/** An input parameter to a gem seen as a source. */
private class LibraryInputAsSource extends Source instanceof DataFlow::ParameterNode {
LibraryInputAsSource() {
this = Setuptools::getALibraryInput() and
not this.getParameter().getName().matches(["cmd%", "command%", "%_command", "%_cmd"])
}
}
/** A sink for shell command constructed from library input vulnerabilities. */
abstract class Sink extends DataFlow::Node {
Sink() { not this.asExpr() instanceof StrConst } // filter out string constants, makes testing easier
/** Gets a description of how the string in this sink was constructed. */
abstract string describe();
/** Gets the dataflow node where the string is constructed. */
DataFlow::Node getStringConstruction() { result = this }
/** Gets the dataflow node that executed the string as a shell command. */
abstract DataFlow::Node getCommandExecution();
}
/** Holds if the string constructed at `source` is executed at `shellExec` */
predicate isUsedAsShellCommand(DataFlow::Node source, Concepts::SystemCommandExecution shellExec) {
source = backtrackShellExec(TypeTracker::TypeBackTracker::end(), shellExec)
}
import semmle.python.dataflow.new.TypeTracker as TypeTracker
private DataFlow::LocalSourceNode backtrackShellExec(
TypeTracker::TypeBackTracker t, Concepts::SystemCommandExecution shellExec
) {
t.start() and
result = any(DataFlow::Node n | shellExec.isShellInterpreted(n)).getALocalSource()
or
exists(TypeTracker::TypeBackTracker t2 |
result = backtrackShellExec(t2, shellExec).backtrack(t2, t)
)
}
/**
* A string constructed from a string-literal (e.g. `f'foo {sink}'`),
* where the resulting string ends up being executed as a shell command.
*/
class StringInterpolationAsSink extends Sink {
Concepts::SystemCommandExecution s;
Fstring fstring;
StringInterpolationAsSink() {
isUsedAsShellCommand(DataFlow::exprNode(fstring), s) and
this.asExpr() = fstring.getASubExpression()
}
override string describe() { result = "f-string" }
override DataFlow::Node getCommandExecution() { result = s }
override DataFlow::Node getStringConstruction() { result.asExpr() = fstring }
}
/**
* A component of a string-concatenation (e.g. `"foo " + sink`),
* where the resulting string ends up being executed as a shell command.
*/
class StringConcatAsSink extends Sink {
Concepts::SystemCommandExecution s;
BinaryExpr add;
StringConcatAsSink() {
add.getOp() instanceof Add and
isUsedAsShellCommand(any(DataFlow::Node n | n.asExpr() = add), s) and
this.asExpr() = add.getASubExpression()
}
override DataFlow::Node getCommandExecution() { result = s }
override string describe() { result = "string concatenation" }
override DataFlow::Node getStringConstruction() { result.asExpr() = add }
}
/**
* A string constructed using a `" ".join(...)` call, where the resulting string ends up being executed as a shell command.
*/
class ArrayJoin extends Sink {
Concepts::SystemCommandExecution s;
DataFlow::MethodCallNode call;
ArrayJoin() {
call.getMethodName() = "join" and
unique( | | call.getArg(_)).asExpr().(Str).getText() = " " and
isUsedAsShellCommand(call, s) and
(
this = call.getArg(0) and
not call.getArg(0).asExpr() instanceof List
or
this.asExpr() = call.getArg(0).asExpr().(List).getASubExpression()
)
}
override string describe() { result = "array" }
override DataFlow::Node getCommandExecution() { result = s }
override DataFlow::Node getStringConstruction() { result = call }
}
/**
* A string constructed from a format call,
* where the resulting string ends up being executed as a shell command.
* Either a call to `.format(..)` or a string-interpolation with a `%` operator.
*/
class TaintedFormatStringAsSink extends Sink {
Concepts::SystemCommandExecution s;
DataFlow::Node formatCall;
TaintedFormatStringAsSink() {
(
formatCall.asExpr().(BinaryExpr).getOp() instanceof Mod and
this.asExpr() = formatCall.asExpr().(BinaryExpr).getASubExpression()
or
formatCall.(DataFlow::MethodCallNode).getMethodName() = "format" and
this =
[
formatCall.(DataFlow::MethodCallNode).getArg(_),
formatCall.(DataFlow::MethodCallNode).getObject()
]
) and
isUsedAsShellCommand(formatCall, s)
}
override string describe() { result = "formatted string" }
override DataFlow::Node getCommandExecution() { result = s }
override DataFlow::Node getStringConstruction() { result = formatCall }
}
}

View File

@@ -0,0 +1,34 @@
/**
* Provides a taint tracking configuration for reasoning about shell command
* constructed from library input vulnerabilities
*
* Note, for performance reasons: only import this file if `Configuration` is needed,
* otherwise `UnsafeShellCommandConstructionCustomizations` should be imported instead.
*/
import python
import semmle.python.dataflow.new.DataFlow
import UnsafeShellCommandConstructionCustomizations::UnsafeShellCommandConstruction
private import semmle.python.dataflow.new.TaintTracking
private import CommandInjectionCustomizations::CommandInjection as CommandInjection
private import semmle.python.dataflow.new.BarrierGuards
/**
* A taint-tracking configuration for detecting shell command constructed from library input vulnerabilities.
*/
class Configuration extends TaintTracking::Configuration {
Configuration() { this = "UnsafeShellCommandConstruction" }
override predicate isSource(DataFlow::Node source) { source instanceof Source }
override predicate isSink(DataFlow::Node sink) { sink instanceof Sink }
override predicate isSanitizer(DataFlow::Node node) {
node instanceof CommandInjection::Sanitizer // using all sanitizers from `rb/command-injection`
}
// override to require the path doesn't have unmatched return steps
override DataFlow::FlowFeature getAFeature() {
result instanceof DataFlow::FeatureHasSourceCallContext
}
}

View File

@@ -0,0 +1,73 @@
<!DOCTYPE qhelp PUBLIC
"-//Semmle//qhelp//EN"
"qhelp.dtd">
<qhelp>
<overview>
<p>
Dynamically constructing a shell command with inputs from library
functions may inadvertently change the meaning of the shell command.
Clients using the exported function may use inputs containing
characters that the shell interprets in a special way, for instance
quotes and spaces.
This can result in the shell command misbehaving, or even
allowing a malicious user to execute arbitrary commands on the system.
</p>
</overview>
<recommendation>
<p>
If possible, provide the dynamic arguments to the shell as an array
to APIs such as <code>subprocess.run</code> to avoid interpretation by the shell.
</p>
<p>
Alternatively, if the shell command must be constructed
dynamically, then add code to ensure that special characters
do not alter the shell command unexpectedly.
</p>
</recommendation>
<example>
<p>
The following example shows a dynamically constructed shell
command that downloads a file from a remote URL.
</p>
<sample src="examples/unsafe-shell-command-construction.py" />
<p>
The shell command will, however, fail to work as intended if the
input contains spaces or other special characters interpreted in a
special way by the shell.
</p>
<p>
Even worse, a client might pass in user-controlled
data, not knowing that the input is interpreted as a shell command.
This could allow a malicious user to provide the input <code>http://example.org; cat /etc/passwd</code>
in order to execute the command <code>cat /etc/passwd</code>.
</p>
<p>
To avoid such potentially catastrophic behaviors, provide the
input from library functions as an argument that does not
get interpreted by a shell:
</p>
<sample src="examples/unsafe-shell-command-construction_fixed.py" />
</example>
<references>
<li>
OWASP:
<a href="https://www.owasp.org/index.php/Command_Injection">Command Injection</a>.
</li>
</references>
</qhelp>

View File

@@ -0,0 +1,27 @@
/**
* @name Unsafe shell command constructed from library input
* @description Using externally controlled strings in a command line may allow a malicious
* user to change the meaning of the command.
* @kind path-problem
* @problem.severity error
* @security-severity 6.3
* @precision medium
* @id py/shell-command-constructed-from-input
* @tags correctness
* security
* external/cwe/cwe-078
* external/cwe/cwe-088
* external/cwe/cwe-073
*/
import python
import semmle.python.security.dataflow.UnsafeShellCommandConstructionQuery
import DataFlow::PathGraph
from Configuration config, DataFlow::PathNode source, DataFlow::PathNode sink, Sink sinkNode
where
config.hasFlowPath(source, sink) and
sinkNode = sink.getNode()
select sinkNode.getStringConstruction(), source, sink,
"This " + sinkNode.describe() + " which depends on $@ is later used in a $@.", source.getNode(),
"library input", sinkNode.getCommandExecution(), "shell command"

View File

@@ -0,0 +1,4 @@
import os
def download(path):
os.system("wget " + path) # NOT OK

View File

@@ -0,0 +1,4 @@
import subprocess
def download(path):
subprocess.run(["wget", path]) # OK

View File

@@ -0,0 +1,4 @@
---
category: newQuery
---
* Added a new query, `py/shell-command-constructed-from-input`, to detect libraries that unsafely construct shell commands from their inputs.

View File

@@ -140,7 +140,7 @@ subprocess.Popen(args) # $getCommand=args
args = "<progname>"
use_shell = False
exe = "executable"
subprocess.Popen(args, shell=use_shell, executable=exe) # $getCommand=exe SPURIOUS: getCommand=args
subprocess.Popen(args, shell=use_shell, executable=exe) # $getCommand=exe
################################################################################

View File

@@ -0,0 +1,2 @@
missingAnnotationOnSink
failures

View File

@@ -0,0 +1,3 @@
import python
import experimental.dataflow.TestUtil.DataflowQueryTest
import semmle.python.security.dataflow.UnsafeShellCommandConstructionQuery

View File

@@ -0,0 +1,40 @@
edges
| src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | src/unsafe_shell_test.py:5:25:5:28 | ControlFlowNode for name |
| src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | src/unsafe_shell_test.py:8:23:8:26 | ControlFlowNode for name |
| src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | src/unsafe_shell_test.py:11:25:11:38 | ControlFlowNode for Attribute() |
| src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | src/unsafe_shell_test.py:14:34:14:39 | ControlFlowNode for List |
| src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | src/unsafe_shell_test.py:17:32:17:35 | ControlFlowNode for name |
| src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | src/unsafe_shell_test.py:20:27:20:30 | ControlFlowNode for name |
| src/unsafe_shell_test.py:14:34:14:39 | ControlFlowNode for List | src/unsafe_shell_test.py:14:25:14:40 | ControlFlowNode for Attribute() |
| src/unsafe_shell_test.py:26:20:26:23 | ControlFlowNode for name | src/unsafe_shell_test.py:29:30:29:33 | ControlFlowNode for name |
| src/unsafe_shell_test.py:36:22:36:25 | ControlFlowNode for name | src/unsafe_shell_test.py:39:30:39:33 | ControlFlowNode for name |
| src/unsafe_shell_test.py:36:22:36:25 | ControlFlowNode for name | src/unsafe_shell_test.py:44:20:44:23 | ControlFlowNode for name |
| src/unsafe_shell_test.py:41:24:41:24 | ControlFlowNode for x | src/unsafe_shell_test.py:42:34:42:34 | ControlFlowNode for x |
| src/unsafe_shell_test.py:44:20:44:23 | ControlFlowNode for name | src/unsafe_shell_test.py:41:24:41:24 | ControlFlowNode for x |
nodes
| src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | semmle.label | ControlFlowNode for name |
| src/unsafe_shell_test.py:5:25:5:28 | ControlFlowNode for name | semmle.label | ControlFlowNode for name |
| src/unsafe_shell_test.py:8:23:8:26 | ControlFlowNode for name | semmle.label | ControlFlowNode for name |
| src/unsafe_shell_test.py:11:25:11:38 | ControlFlowNode for Attribute() | semmle.label | ControlFlowNode for Attribute() |
| src/unsafe_shell_test.py:14:25:14:40 | ControlFlowNode for Attribute() | semmle.label | ControlFlowNode for Attribute() |
| src/unsafe_shell_test.py:14:34:14:39 | ControlFlowNode for List | semmle.label | ControlFlowNode for List |
| src/unsafe_shell_test.py:17:32:17:35 | ControlFlowNode for name | semmle.label | ControlFlowNode for name |
| src/unsafe_shell_test.py:20:27:20:30 | ControlFlowNode for name | semmle.label | ControlFlowNode for name |
| src/unsafe_shell_test.py:26:20:26:23 | ControlFlowNode for name | semmle.label | ControlFlowNode for name |
| src/unsafe_shell_test.py:29:30:29:33 | ControlFlowNode for name | semmle.label | ControlFlowNode for name |
| src/unsafe_shell_test.py:36:22:36:25 | ControlFlowNode for name | semmle.label | ControlFlowNode for name |
| src/unsafe_shell_test.py:39:30:39:33 | ControlFlowNode for name | semmle.label | ControlFlowNode for name |
| src/unsafe_shell_test.py:41:24:41:24 | ControlFlowNode for x | semmle.label | ControlFlowNode for x |
| src/unsafe_shell_test.py:42:34:42:34 | ControlFlowNode for x | semmle.label | ControlFlowNode for x |
| src/unsafe_shell_test.py:44:20:44:23 | ControlFlowNode for name | semmle.label | ControlFlowNode for name |
subpaths
#select
| src/unsafe_shell_test.py:5:15:5:28 | ControlFlowNode for BinaryExpr | src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | src/unsafe_shell_test.py:5:25:5:28 | ControlFlowNode for name | This string concatenation which depends on $@ is later used in a $@. | src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | library input | src/unsafe_shell_test.py:5:5:5:29 | ControlFlowNode for Attribute() | shell command |
| src/unsafe_shell_test.py:8:15:8:28 | ControlFlowNode for Fstring | src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | src/unsafe_shell_test.py:8:23:8:26 | ControlFlowNode for name | This f-string which depends on $@ is later used in a $@. | src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | library input | src/unsafe_shell_test.py:8:5:8:29 | ControlFlowNode for Attribute() | shell command |
| src/unsafe_shell_test.py:11:15:11:38 | ControlFlowNode for BinaryExpr | src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | src/unsafe_shell_test.py:11:25:11:38 | ControlFlowNode for Attribute() | This string concatenation which depends on $@ is later used in a $@. | src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | library input | src/unsafe_shell_test.py:11:5:11:39 | ControlFlowNode for Attribute() | shell command |
| src/unsafe_shell_test.py:14:15:14:40 | ControlFlowNode for BinaryExpr | src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | src/unsafe_shell_test.py:14:25:14:40 | ControlFlowNode for Attribute() | This string concatenation which depends on $@ is later used in a $@. | src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | library input | src/unsafe_shell_test.py:14:5:14:41 | ControlFlowNode for Attribute() | shell command |
| src/unsafe_shell_test.py:17:15:17:36 | ControlFlowNode for Attribute() | src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | src/unsafe_shell_test.py:17:32:17:35 | ControlFlowNode for name | This formatted string which depends on $@ is later used in a $@. | src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | library input | src/unsafe_shell_test.py:17:5:17:37 | ControlFlowNode for Attribute() | shell command |
| src/unsafe_shell_test.py:20:15:20:30 | ControlFlowNode for BinaryExpr | src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | src/unsafe_shell_test.py:20:27:20:30 | ControlFlowNode for name | This formatted string which depends on $@ is later used in a $@. | src/unsafe_shell_test.py:4:22:4:25 | ControlFlowNode for name | library input | src/unsafe_shell_test.py:20:5:20:31 | ControlFlowNode for Attribute() | shell command |
| src/unsafe_shell_test.py:29:20:29:33 | ControlFlowNode for BinaryExpr | src/unsafe_shell_test.py:26:20:26:23 | ControlFlowNode for name | src/unsafe_shell_test.py:29:30:29:33 | ControlFlowNode for name | This string concatenation which depends on $@ is later used in a $@. | src/unsafe_shell_test.py:26:20:26:23 | ControlFlowNode for name | library input | src/unsafe_shell_test.py:29:5:29:46 | ControlFlowNode for Attribute() | shell command |
| src/unsafe_shell_test.py:39:20:39:33 | ControlFlowNode for BinaryExpr | src/unsafe_shell_test.py:36:22:36:25 | ControlFlowNode for name | src/unsafe_shell_test.py:39:30:39:33 | ControlFlowNode for name | This string concatenation which depends on $@ is later used in a $@. | src/unsafe_shell_test.py:36:22:36:25 | ControlFlowNode for name | library input | src/unsafe_shell_test.py:39:5:39:46 | ControlFlowNode for Attribute() | shell command |
| src/unsafe_shell_test.py:42:24:42:34 | ControlFlowNode for BinaryExpr | src/unsafe_shell_test.py:36:22:36:25 | ControlFlowNode for name | src/unsafe_shell_test.py:42:34:42:34 | ControlFlowNode for x | This string concatenation which depends on $@ is later used in a $@. | src/unsafe_shell_test.py:36:22:36:25 | ControlFlowNode for name | library input | src/unsafe_shell_test.py:42:9:42:47 | ControlFlowNode for Attribute() | shell command |

View File

@@ -0,0 +1 @@
Security/CWE-078/UnsafeShellCommandConstruction.ql

View File

@@ -0,0 +1 @@
semmle-extractor-options: --lang=3 --max-import-depth=0 -r src

View File

@@ -0,0 +1,4 @@
import os
def unsafe_setup(name):
os.system("ping " + name) # $result=OK - this is inside a setyp script, so it's fine.

View File

@@ -0,0 +1,49 @@
import os
import subprocess
def unsafe_shell_one(name):
os.system("ping " + name) # $result=BAD
# f-strings
os.system(f"ping {name}") # $result=BAD
# array.join
os.system("ping " + " ".join(name)) # $result=BAD
# array.join, with a list
os.system("ping " + " ".join([name])) # $result=BAD
# format, using .format
os.system("ping {}".format(name)) # $result=BAD
# format, using %
os.system("ping %s" % name) # $result=BAD
os.system(name) # OK - seems intentional.
import fabric
def facbric_stuff (name):
fabric.api.run("ping " + name, shell=False) # OK
fabric.api.run("ping " + name, shell=True) # $result=BAD
def indirect(flag):
fabric.api.run("ping " + name, shell=flag) # OK
indirect(False)
def subprocess_flag (name):
subprocess.run("ping " + name, shell=False) # OK - and nonsensical
subprocess.run("ping " + name, shell=True) # $result=BAD
def indirect(flag, x):
subprocess.run("ping " + x, shell=flag) # $result=BAD
indirect(True, name)
subprocess.Popen("ping " + name, shell=unknownValue) # OK - shell assumed to be False
def intentional(command):
os.system("fish -ic " + command) # $result=OK - intentional

2
ruby/.gitignore vendored
View File

@@ -1,4 +1,4 @@
/target
extractor/target
extractor-pack
.vscode/launch.json
.cache

View File

@@ -1,7 +0,0 @@
[workspace]
members = [
"autobuilder",
"extractor",
"generator",
"node-types",
]

View File

@@ -10,7 +10,7 @@ runs:
uses: actions/cache@v3
with:
path: ruby/extractor-pack
key: ${{ runner.os }}-${{ steps.os_version.outputs.version }}-extractor-${{ hashFiles('ruby/rust-toolchain.toml', 'ruby/scripts/create-extractor-pack.sh', 'ruby/**/Cargo.lock', 'ruby/actions/create-extractor-pack/action.yml') }}-${{ hashFiles('ruby/**/*.rs') }}-${{ hashFiles('ruby/codeql-extractor.yml', 'ruby/downgrades', 'ruby/tools', 'ruby/ql/lib/ruby.dbscheme', 'ruby/ql/lib/ruby.dbscheme.stats') }}
key: ${{ runner.os }}-${{ steps.os_version.outputs.version }}-extractor-${{ hashFiles('ruby/extractor/rust-toolchain.toml', 'ruby/scripts/create-extractor-pack.sh', 'ruby/extractor/**/Cargo.lock', 'ruby/actions/create-extractor-pack/action.yml') }}-${{ hashFiles('ruby/extractor/**/*.rs') }}-${{ hashFiles('ruby/codeql-extractor.yml', 'ruby/downgrades', 'ruby/tools', 'ruby/ql/lib/ruby.dbscheme', 'ruby/ql/lib/ruby.dbscheme.stats') }}
- name: Cache cargo
uses: actions/cache@v3
if: steps.cache-extractor.outputs.cache-hit != 'true'
@@ -19,7 +19,7 @@ runs:
~/.cargo/registry
~/.cargo/git
ruby/target
key: ${{ runner.os }}-${{ steps.os_version.outputs.version }}-ruby-qltest-cargo-${{ hashFiles('ruby/rust-toolchain.toml', 'ruby/scripts/create-extractor-pack.sh', 'ruby/**/Cargo.lock') }}
key: ${{ runner.os }}-${{ steps.os_version.outputs.version }}-ruby-qltest-cargo-${{ hashFiles('ruby/extractor/rust-toolchain.toml', 'ruby/scripts/create-extractor-pack.sh', 'ruby/extractor/**/Cargo.lock') }}
- name: Build Extractor
if: steps.cache-extractor.outputs.cache-hit != 'true'
shell: bash

View File

@@ -1,9 +0,0 @@
[package]
name = "ruby-autobuilder"
version = "0.1.0"
authors = ["GitHub"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]

Binary file not shown.

View File

@@ -8,7 +8,6 @@ edition = "2018"
[dependencies]
flate2 = "1.0"
node-types = { path = "../node-types" }
tree-sitter = "0.20"
tree-sitter-embedded-template = { git = "https://github.com/tree-sitter/tree-sitter-embedded-template.git", rev = "203f7bd3c1bbfbd98fc19add4b8fcb213c059205" }
tree-sitter-ruby = { git = "https://github.com/tree-sitter/tree-sitter-ruby.git", rev = "206c7077164372c596ffa8eaadb9435c28941364" }

View File

@@ -1,8 +1,3 @@
mod diagnostics;
mod extractor;
mod file_paths;
mod trap;
#[macro_use]
extern crate lazy_static;
extern crate num_cpus;
@@ -16,6 +11,8 @@ use std::io::BufRead;
use std::path::{Path, PathBuf};
use tree_sitter::{Language, Parser, Range};
use ruby_extractor::{diagnostics, extractor, file_paths, node_types, trap};
/**
* Gets the number of threads the extractor should use, by reading the
* CODEQL_THREADS environment variable and using it as described in the
@@ -46,6 +43,7 @@ lazy_static! {
static ref CP_NUMBER: regex::Regex = regex::Regex::new("cp([0-9]+)").unwrap();
}
/// Returns the `encoding::Encoding` corresponding to the given encoding name, if one exists.
fn encoding_from_name(encoding_name: &str) -> Option<&(dyn encoding::Encoding + Send + Sync)> {
match encoding::label::encoding_from_whatwg_label(encoding_name) {
s @ Some(_) => s,

Some files were not shown because too many files have changed in this diff Show More