This commit is contained in:
amammad
2023-06-12 23:25:23 +10:00
parent 798f3880c9
commit 7354db873a
17 changed files with 1346 additions and 1 deletions

View File

@@ -0,0 +1,31 @@
/**
* @name Unsafe file write from a remotely provided path.
* @description using apache commons upload file write sink is dangerous without sanitization
* @kind path-problem
* @problem.severity error
* @precision high
* @id java/unsafe-file-write
* @tags security
* experimental
*/
import semmle.code.java.dataflow.DataFlow
import semmle.code.java.dataflow.FlowSources
import RemoteSource
module ApacheCommonsUploadFileConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { source instanceof FormRemoteFlowSource }
predicate isSink(DataFlow::Node sink) {
sink.asExpr() = any(ApacheCommonsFileUpload::DangerousSink::FileWriteSink s).getAPathArgument()
}
}
module BombsFlow = TaintTracking::Global<ApacheCommonsUploadFileConfig>;
import BombsFlow::PathGraph
from BombsFlow::PathNode source, BombsFlow::PathNode sink
where BombsFlow::flowPath(source, sink)
select sink.getNode(), source, sink, "This file extraction depends on a $@.", source.getNode(),
"potentially untrusted source"

View File

@@ -0,0 +1,187 @@
import java
import semmle.code.java.dataflow.DataFlow
abstract class CLIFlowSource extends DataFlow::Node { }
Field childFiled(RefType rt) {
not result.getType() instanceof PrimitiveType and
not result.getType() instanceof BoxedType and
(
result.getDeclaringType() = rt or
result.getDeclaringType() = childFiled(result.getDeclaringType()).getType()
)
}
module CommonsCLI {
class TypeCommandLine extends RefType {
TypeCommandLine() {
this.getAStrictAncestor*().hasQualifiedName("org.apache.commons.cli", "CommandLine")
}
}
class TypeOption extends RefType {
TypeOption() { this.getAStrictAncestor*().hasQualifiedName("org.apache.commons.cli", "Option") }
}
class OptionValue extends CLIFlowSource {
OptionValue() {
exists(MethodAccess ma |
ma.getCallee().getDeclaringType() instanceof TypeOption and
ma.getCallee().hasName(["getValue", "getValues", "getValuesList"]) and
this.asExpr() = ma
)
}
}
class CommandLine extends CLIFlowSource {
CommandLine() {
exists(MethodAccess ma |
ma.getCallee().getDeclaringType() instanceof TypeCommandLine and
ma.getCallee()
.hasName(["getArgs", "getArgList", "getOptionValue", "getOptionValues", "getArgs"]) and
this.asExpr() = ma
)
}
}
}
module Args4j {
class TypeCmdLineParser extends RefType {
TypeCmdLineParser() {
// classes of ArgumentParser interface like `SubParser`
this.getAStrictAncestor*().hasQualifiedName("org.kohsuke.args4j", "CmdLineParser")
}
}
class CommandLine extends CLIFlowSource {
CommandLine() {
exists(Call c, ClassInstanceExpr cie, Field f |
c.getCallee().getDeclaringType() instanceof TypeCmdLineParser and
DataFlow::localExprFlow(cie, c.getArgument(0)) and
f.getDeclaringType() = cie.getType() and
(
this.asExpr() = f.getAnAccess()
or
this.asExpr() = childFiled(f.getType()).getAnAccess()
)
)
}
}
}
module Jcommander {
class TypeBuilder extends RefType {
TypeBuilder() {
// classes of ArgumentParser interface like `SubParser`
this.getAStrictAncestor*().hasQualifiedName("com.beust.jcommander", "JCommander$Builder")
}
}
class CommandLine extends CLIFlowSource {
CommandLine() {
exists(Call c, ClassInstanceExpr cie, Field f |
c.getCallee().getDeclaringType() instanceof TypeBuilder and
DataFlow::localExprFlow(cie, c.getArgument(0)) and
f.getDeclaringType() = cie.getType() and
(
this.asExpr() = f.getAnAccess()
or
this.asExpr() = childFiled(f.getType()).getAnAccess()
)
)
}
}
}
module Picocli {
class TypeCommandLine extends RefType {
TypeCommandLine() {
// classes of ArgumentParser interface like `SubParser`
this.getAStrictAncestor*().hasQualifiedName("picocli", "CommandLine")
}
}
class GetCallResult extends CLIFlowSource {
GetCallResult() {
exists(MethodAccess ma |
ma.getCallee().getDeclaringType() instanceof TypeCommandLine and
ma.getCallee().hasName("getExecutionResult") and
this.asExpr() = ma
)
}
}
class Execute extends CLIFlowSource {
Execute() {
exists(MethodAccess ma, ClassInstanceExpr cie, Field f |
ma.getCallee().getDeclaringType() instanceof TypeCommandLine and
ma.getCallee().hasName("populateCommand") and
DataFlow::localExprFlow(cie, ma.getArgument(0)) and
f.getDeclaringType() = cie.getType() and
(
this.asExpr() = f.getAnAccess()
or
this.asExpr() = childFiled(f.getType()).getAnAccess()
)
)
}
}
class CommandLine extends CLIFlowSource {
CommandLine() {
exists(Call c, ClassInstanceExpr cie, Field f |
c.getCallee().getDeclaringType() instanceof TypeCommandLine and
DataFlow::localExprFlow(cie, c.getArgument(0)) and
f.getDeclaringType() = cie.getType() and
(
this.asExpr() = f.getAnAccess()
or
this.asExpr() = childFiled(f.getType()).getAnAccess()
)
)
}
}
}
module ArgParse4j {
class TypeArgumentParser extends RefType {
TypeArgumentParser() {
// classes of ArgumentParser interface like `SubParser`
this.getAStrictAncestor*()
.hasQualifiedName("net.sourceforge.argparse4j.inf", "ArgumentParser")
}
}
class TypeNamespace extends RefType {
TypeNamespace() {
this.getAStrictAncestor*().hasQualifiedName("net.sourceforge.argparse4j.inf", "Namespace")
}
}
class ParseArgsReturnValue extends CLIFlowSource {
ParseArgsReturnValue() {
exists(MethodAccess ma |
ma.getReceiverType() instanceof TypeNamespace and
ma.getCallee().hasName(["getString", "getList", "toString", "getByte", "get", "getAttrs"]) and
this.asExpr() = ma
)
}
}
class ParseArgsSecondArg extends CLIFlowSource {
ParseArgsSecondArg() {
exists(MethodAccess ma, ClassInstanceExpr cie, Field f |
ma.getReceiverType() instanceof TypeArgumentParser and
ma.getCallee().hasName(["parseArgs"]) and
ma.getNumArgument() = 2 and
DataFlow::localExprFlow(cie, ma.getArgument(1)) and
f.getDeclaringType() = cie.getType() and
(
this.asExpr() = f.getAnAccess()
or
this.asExpr() = childFiled(f.getType()).getAnAccess()
)
)
}
}
}

View File

@@ -0,0 +1,38 @@
<!DOCTYPE qhelp PUBLIC
"-//Semmle//qhelp//EN"
"qhelp.dtd">
<qhelp>
<overview>
<p>Extracting Compressed files with any compression algorithm like gzip can cause to denial of service attacks.</p>
<p>Attackers can compress a huge file which created by repeated similiar byte and convert it to a small compressed file.</p>
</overview>
<recommendation>
<p>When you want to decompress a user-provided compressed file you must be careful about the decompression ratio or read these files within a loop byte by byte to be able to manage the decompressed size in each cycle of the loop.</p>
</recommendation>
<example>
<p>
Reading uncompressed ZipFile within a loop and check for a Threshold size in each cycle.
</p>
<sample src="example_good.java"/>
<p>
An Unsafe Approach can be this example which we don't check for uncompressed size.
</p>
<sample src="example_bad.java" />
</example>
<references>
<li>
<a href="https://github.com/advisories/GHSA-47vx-fqr5-j2gw">CVE-2022-4565</a>
</li>
<li>
<a href="https://www.bamsoftware.com/hacks/zipbomb/">A great research to gain more impact by this kind of attacks</a>
</li>
</references>
</qhelp>

View File

@@ -0,0 +1,393 @@
/**
* @name User-controlled file decompression
* @description User-controlled data that flows into decompression library APIs without checking the compression rate is dangerous
* @kind path-problem
* @problem.severity error
* @security-severity 7.8
* @precision medium
* @id java/user-controlled-file-decompression
* @tags security
* experimental
* external/cwe/cwe-409
*/
import semmle.code.java.dataflow.DataFlow
import semmle.code.java.dataflow.DataFlow2
import semmle.code.java.dataflow.FlowSources
import semmle.code.java.dataflow.TaintTracking
import semmle.code.java.dataflow.TaintTracking2
import RemoteSource
import CommandLineSource
import java
module XserialSnappy {
class TypeInputStream extends RefType {
TypeInputStream() {
this.getASupertype*().hasQualifiedName("org.xerial.snappy", "SnappyInputStream")
}
}
predicate inputStreamAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
exists(Call call |
call.getCallee().getDeclaringType() instanceof TypeInputStream and
call.getArgument(0) = n1.asExpr() and
call = n2.asExpr()
)
}
class ReadInputStreamCall extends MethodAccess {
ReadInputStreamCall() {
this.getReceiverType() instanceof TypeInputStream and
this.getCallee().hasName(["read", "readNBytes", "readAllBytes"])
}
Expr getAWriteArgument() { result = this.getArgument(0) }
// look at Zip4j comments for this method
predicate isControlledRead() { none() }
}
}
module ApacheCommons {
class TypeArchiveInputStream extends RefType {
TypeArchiveInputStream() {
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers", "ArchiveInputStream")
}
}
class TypeCompressorInputStream extends RefType {
TypeCompressorInputStream() {
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors", "CompressorInputStream")
}
}
module Compressors {
class TypeCompressors extends RefType {
TypeCompressors() {
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.gzip",
"GzipCompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.brotli",
"BrotliCompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.bzip2",
"BZip2CompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.deflate",
"DeflateCompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.deflate64",
"Deflate64CompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.lz4",
"BlockLZ4CompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.lzma",
"LZMACompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.pack200",
"Pack200CompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.snappy",
"SnappyCompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.xz",
"XZCompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.z", "ZCompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.zstandard",
"ZstdCompressorInputStream")
}
}
predicate inputStreamAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
exists(Call call |
call.getCallee().getDeclaringType() instanceof TypeCompressors and
call.getArgument(0) = n1.asExpr() and
call = n2.asExpr()
)
}
class ReadInputStreamCall extends MethodAccess {
ReadInputStreamCall() {
this.getReceiverType() instanceof TypeCompressors and
this.getCallee().hasName(["read", "readNBytes", "readAllBytes"])
}
Expr getAWriteArgument() { result = this.getArgument(0) }
// look at Zip4j comments for this method
predicate isControlledRead() { none() }
}
}
module Archivers {
class TypeArchivers extends RefType {
TypeArchivers() {
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers.ar", "ArArchiveInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers.arj", "ArjArchiveInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers.cpio", "CpioArchiveInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers.ar", "ArArchiveInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers.jar", "JarArchiveInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers.zip", "ZipArchiveInputStream")
}
}
predicate inputStreamAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
exists(Call call |
call.getCallee().getDeclaringType() instanceof TypeArchivers and
n1.asExpr() = call.getArgument(0) and
n2.asExpr() = call
)
}
class ReadInputStreamCall extends MethodAccess {
ReadInputStreamCall() {
this.getReceiverType() instanceof TypeArchivers and
this.getCallee().hasName(["read", "readNBytes", "readAllBytes"])
}
Expr getAWriteArgument() { result = this.getArgument(0) }
// look at Zip4j comments for this method
predicate isControlledRead() { none() }
}
}
module Factory {
class TypeArchivers extends RefType {
TypeArchivers() {
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers", "ArchiveStreamFactory")
}
}
class TypeCompressors extends RefType {
TypeCompressors() {
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors", "CompressorStreamFactory")
}
}
predicate inputStreamAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
exists(Call call |
(
call.getCallee().getDeclaringType() instanceof TypeCompressors
or
call.getCallee().getDeclaringType() instanceof TypeArchivers
) and
n1.asExpr() = call.getArgument(0) and
n2.asExpr() = call
)
}
class ReadInputStreamCall extends MethodAccess {
ReadInputStreamCall() {
(
this.getReceiverType() instanceof TypeArchiveInputStream
or
this.getReceiverType() instanceof TypeCompressorInputStream
) and
this.getCallee().hasName(["read", "readNBytes", "readAllBytes"])
}
Expr getAWriteArgument() { result = this.getArgument(0) }
// look at Zip4j comments for this method
predicate isControlledRead() { none() }
}
}
}
module Zip4j {
class TypeZipInputStream extends RefType {
TypeZipInputStream() {
this.hasQualifiedName("net.lingala.zip4j.io.inputstream", "ZipInputStream")
}
}
/**
* ```java
* n = new net.lingala.zip4j.io.inputstream.ZipInputStream(inputStream);
* this = n.read(readBuffer);
* ```
*/
class ReadInputStreamCall extends MethodAccess {
ReadInputStreamCall() {
this.getReceiverType() instanceof TypeZipInputStream and
this.getMethod().hasName(["read", "readNBytes", "readAllBytes"])
}
Expr getAWriteArgument() { result = this.getArgument(0) }
// while ((readLen = zipInputStream.read(readBuffer)) != -1) {
// totallRead += readLen;
// if (totallRead > 1024 * 1024 * 4) {
// System.out.println("potential Bomb");
// break;
// }
// outputStream.write(readBuffer, 0, readLen);
// }
// TODO: I don't know why we can't reach totallRead with Local Tainting
// the same behaviour exists in golang
predicate isControlledRead() {
exists(ComparisonExpr i |
TaintTracking::localExprTaint([this, this.getArgument(2)], i.getAChildExpr*())
)
}
}
/**
* ```java
* n2 = new net.lingala.zip4j.io.inputstream.ZipInputStream(n1);
* // or
* n = new net.lingala.zip4j.io.inputstream.ZipInputStream(inputStream);
* n2 = n.Method(n1);
* ```
*/
predicate inputStreamAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
exists(Call call |
call.getCallee().getDeclaringType() instanceof TypeZipInputStream and
call.getCallee().hasName(["read", "readNBytes", "readAllBytes"]) and
call.getArgument(0) = n1.asExpr() and
call = n2.asExpr()
)
}
}
module Zip {
class TypeZipInputStream extends RefType {
TypeZipInputStream() {
this.getASupertype*()
.hasQualifiedName("java.util.zip",
["ZipInputStream", "GZIPInputStream", "InflaterInputStream"])
}
}
predicate inputStreamAdditionalTaintStep(DataFlow::Node n1, DataFlow::Node n2) {
exists(Call call |
call.getCallee().getDeclaringType() instanceof TypeZipInputStream and
call.getArgument(0) = n1.asExpr() and
call = n2.asExpr()
)
}
class ReadInputStreamCall extends MethodAccess {
ReadInputStreamCall() {
this.getReceiverType() instanceof TypeZipInputStream and
this.getCallee().hasName(["read", "readNBytes", "readAllBytes"])
}
Expr getAWriteArgument() { result = this.getArgument(0) }
// look at Zip4j comments for this method
predicate isControlledRead() { none() }
}
}
module CommonsIO {
class IOUtils extends MethodAccess {
IOUtils() {
this.getMethod()
.hasName([
"copy", "copyLarge", "read", "readFully", "readLines", "toBufferedInputStream",
"toByteArray", "toCharArray", "toString", "buffer"
]) and
this.getMethod().getDeclaringType().hasQualifiedName("org.apache.commons.io", "IOUtils")
}
}
}
module DecompressionBombsConfig implements DataFlow::StateConfigSig {
class FlowState = DataFlow::FlowState;
predicate isSource(DataFlow::Node source, FlowState state) {
(
source instanceof RemoteFlowSource
or
source instanceof CLIFlowSource
or
source instanceof FormRemoteFlowSource
or
source instanceof FileUploadRemoteFlowSource
) and
state = ["Zip4j", "Zip", "ApacheCommons", "XserialSnappy"]
}
predicate isBarrier(DataFlow::Node sanitizer, FlowState state) { none() }
/**
* if getNumArgument > 1 then we can check for sanitizers before reading each Buffer of byte
* otherwise it can be hard to write sanitizers
*/
predicate isSink(DataFlow::Node sink, FlowState state) {
(
exists(CommonsIO::IOUtils ma |
sink.asExpr() = ma.getArgument(0) and
state = ["Zip4j", "Zip", "ApacheCommons", "XserialSnappy"]
)
or
sink.asExpr() = any(Zip4j::ReadInputStreamCall r).getAWriteArgument() and
state = "Zip4j"
or
sink.asExpr() = any(Zip::ReadInputStreamCall r).getAWriteArgument() and
state = "Zip"
or
sink.asExpr() = any(ApacheCommons::Factory::ReadInputStreamCall r).getAWriteArgument() and
state = "ApacheCommons"
or
sink.asExpr() = any(ApacheCommons::Compressors::ReadInputStreamCall r).getAWriteArgument() and
state = "ApacheCommons"
or
sink.asExpr() = any(ApacheCommons::Archivers::ReadInputStreamCall r).getAWriteArgument() and
state = "ApacheCommons"
or
sink.asExpr() = any(XserialSnappy::ReadInputStreamCall r).getAWriteArgument() and
state = "XserialSnappy"
)
}
predicate isAdditionalFlowStep(
DataFlow::Node nodeFrom, FlowState stateFrom, DataFlow::Node nodeTo, FlowState stateTo
) {
(
Zip::inputStreamAdditionalTaintStep(nodeFrom, nodeTo) and
stateFrom = "Zip"
or
Zip4j::inputStreamAdditionalTaintStep(nodeFrom, nodeTo) and
stateFrom = "Zip4j"
or
ApacheCommons::Factory::inputStreamAdditionalTaintStep(nodeFrom, nodeTo) and
stateFrom = "ApacheCommons"
or
ApacheCommons::Compressors::inputStreamAdditionalTaintStep(nodeFrom, nodeTo) and
stateFrom = "ApacheCommons"
or
ApacheCommons::Archivers::inputStreamAdditionalTaintStep(nodeFrom, nodeTo) and
stateFrom = "ApacheCommons"
or
XserialSnappy::inputStreamAdditionalTaintStep(nodeFrom, nodeTo) and
stateFrom = "XserialSnappy"
) and
stateTo = ""
}
}
module DecompressionBombsFlow = TaintTracking::GlobalWithState<DecompressionBombsConfig>;
import DecompressionBombsFlow::PathGraph
from DecompressionBombsFlow::PathNode source, DecompressionBombsFlow::PathNode sink
where DecompressionBombsFlow::flowPath(source, sink)
select sink.getNode(), source, sink, "This file extraction depends on a $@.", source.getNode(),
"potentially untrusted source"

View File

@@ -0,0 +1,157 @@
import java
import semmle.code.java.dataflow.DataFlow
abstract class FormRemoteFlowSource extends DataFlow::Node { }
abstract class FileUploadRemoteFlowSource extends DataFlow::Node { }
class CommonsFileUploadAdditionalTaintStep extends Unit {
abstract predicate step(DataFlow::Node n1, DataFlow::Node n2);
}
module ApacheCommonsFileUpload {
module DangerousSink {
class TypeDiskFileItemFactory extends RefType {
TypeDiskFileItemFactory() {
this.getAStrictAncestor*()
.hasQualifiedName("org.apache.commons.fileupload.disk", "DiskFileItemFactory")
or
this.getAStrictAncestor*()
.hasQualifiedName("org.apache.commons.fileupload", "FileItemFactory")
}
}
abstract class FileWriteSink extends MethodAccess {
abstract Expr getAPathArgument();
}
class FileItemWrite extends FileWriteSink {
FileItemWrite() {
this.getReceiverType() instanceof RemoteFlowSource::TypeFileUpload and
this.getCallee().hasName(["write"])
}
override Expr getAPathArgument() { result = this.getArgument(0) }
}
class DiskFileItemFactoryCreateItem extends FileWriteSink {
DiskFileItemFactoryCreateItem() {
this.getReceiverType() instanceof TypeDiskFileItemFactory and
this.getCallee().hasName(["createItem"])
}
override Expr getAPathArgument() { result = this.getArgument(3) }
}
class DiskFileItemFactorySetRepository extends FileWriteSink {
DiskFileItemFactorySetRepository() {
this.getReceiverType() instanceof TypeDiskFileItemFactory and
this.getCallee().hasName(["setRepository"])
}
override Expr getAPathArgument() { result = this.getArgument(0) }
}
}
module RemoteFlowSource {
class TypeServletFileUpload extends RefType {
TypeServletFileUpload() {
this.hasQualifiedName("org.apache.commons.fileupload.servlet", "ServletFileUpload")
}
}
class TypeFileUpload extends RefType {
TypeFileUpload() {
this.getAStrictAncestor*().hasQualifiedName("org.apache.commons.fileupload", "FileItem")
}
}
class TypeFileItemStream extends RefType {
TypeFileItemStream() {
this.getAStrictAncestor*()
.hasQualifiedName("org.apache.commons.fileupload", "FileItemStream")
}
}
class ServletFileUpload extends FileUploadRemoteFlowSource {
ServletFileUpload() {
exists(MethodAccess ma |
ma.getReceiverType() instanceof TypeServletFileUpload and
ma.getCallee().hasName(["parseRequest"]) and
this.asExpr() = ma
)
}
}
private class FileItemRemoteSource extends FileUploadRemoteFlowSource {
FileItemRemoteSource() {
exists(MethodAccess ma |
ma.getReceiverType() instanceof TypeFileUpload and
ma.getCallee()
.hasName([
"getInputStream", "getFieldName", "getContentType", "get", "getName", "getString"
]) and
this.asExpr() = ma
)
}
}
private class FileItemStreamRemoteSource extends FileUploadRemoteFlowSource {
FileItemStreamRemoteSource() {
exists(MethodAccess ma |
ma.getReceiverType() instanceof TypeFileItemStream and
ma.getCallee().hasName(["getContentType", "getFieldName", "getName", "openStream"]) and
this.asExpr() = ma
)
}
}
}
module Util {
class TypeStreams extends RefType {
TypeStreams() { this.hasQualifiedName("org.apache.commons.fileupload.util", "Streams") }
}
private class AsStringAdditionalTaintStep extends CommonsFileUploadAdditionalTaintStep {
override predicate step(DataFlow::Node n1, DataFlow::Node n2) {
exists(Call call |
call.getCallee().getDeclaringType() instanceof TypeStreams and
call.getArgument(0) = n1.asExpr() and
call = n2.asExpr() and
call.getCallee().hasName("asString")
)
}
}
private class CopyAdditionalTaintStep extends CommonsFileUploadAdditionalTaintStep {
override predicate step(DataFlow::Node n1, DataFlow::Node n2) {
exists(Call call |
call.getCallee().getDeclaringType() instanceof TypeStreams and
call.getArgument(0) = n1.asExpr() and
call.getArgument(1) = n2.asExpr() and
call.getCallee().hasName("copy")
)
}
}
}
}
module ServletRemoteMultiPartSources {
class TypePart extends RefType {
TypePart() { this.hasQualifiedName(["javax.servlet.http", "jakarta.servlet.http"], "Part") }
}
private class ServletPartCalls extends FormRemoteFlowSource {
ServletPartCalls() {
exists(MethodAccess ma |
ma.getReceiverType() instanceof TypePart and
ma.getCallee()
.hasName([
"getInputStream", "getName", "getContentType", "getHeader", "getHeaders",
"getHeaderNames", "getSubmittedFileName", "write"
]) and
this.asExpr() = ma
)
}
}
}

View File

@@ -0,0 +1,22 @@
package org.example;
import java.nio.file.StandardCopyOption;
import java.util.Enumeration;
import java.io.IOException;
import java.util.zip.*;
import java.util.zip.ZipEntry;
import java.io.File;
import java.nio.file.Files;
public static void ZipInputStreamUnSafe(String filename) throws IOException {
File f = new File(filename);
try (ZipFile zipFile = new ZipFile(f)) {
Enumeration<? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
ZipEntry ze = entries.nextElement();
File out = new File("./tmp/tmp.txt");
Files.copy(zipFile.getInputStream(ze), out.toPath(), StandardCopyOption.REPLACE_EXISTING);
}
}
}

View File

@@ -0,0 +1,33 @@
import java.util.zip.*;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.util.zip.ZipEntry;
public class Main {
public static void ZipInputStreamSafe(String filename) throws IOException {
int UncompressedSizeThreshold = 10 * 1024 * 1024; // 10MB
int BUFFERSIZE = 256;
FileInputStream fis = new FileInputStream(filename);
try (ZipInputStream zis = new ZipInputStream(new BufferedInputStream(fis))) {
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
int count;
byte[] data = new byte[BUFFERSIZE];
FileOutputStream fos = new FileOutputStream(entry.getName());
BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFERSIZE);
int totalRead = 0;
while ((count = zis.read(data, 0, BUFFERSIZE)) != -1) {
totalRead = totalRead + count;
if (totalRead > UncompressedSizeThreshold) {
System.out.println("This Compressed file can be a bomb!");
break;
}
dest.write(data, 0, count);
}
dest.flush();
dest.close();
zis.closeEntry();
}
}
}
}

View File

@@ -0,0 +1 @@
experimental/Security/CWE/CWE-522-DecompressionBombs/DecompressionBombFlowState.ql

View File

@@ -0,0 +1,79 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.example</groupId>
<artifactId>BombsRemoteSourceMavenJavax2</artifactId>
<packaging>war</packaging>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>11</source>
<target>11</target>
</configuration>
</plugin>
</plugins>
</build>
<version>1.0-SNAPSHOT</version>
<name>BombsRemoteSourceMavenJavax2 Maven Webapp</name>
<url>http://maven.apache.org</url>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/javax.servlet/javax.servlet-api -->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>4.0.1</version>
<scope>provided</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/commons-io/commons-io -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.11.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/net.lingala.zip4j/zip4j -->
<dependency>
<groupId>net.lingala.zip4j</groupId>
<artifactId>zip4j</artifactId>
<version>2.11.5</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.github.luben/zstd-jni -->
<dependency>
<groupId>com.github.luben</groupId>
<artifactId>zstd-jni</artifactId>
<version>1.5.5-1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.tukaani/xz -->
<dependency>
<groupId>org.tukaani</groupId>
<artifactId>xz</artifactId>
<version>1.9</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.lz4/lz4-java -->
<dependency>
<groupId>org.lz4</groupId>
<artifactId>lz4-java</artifactId>
<version>1.8.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.xerial.snappy/snappy-java -->
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
<version>1.1.9.1</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-compress -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
<version>1.23.0</version>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,130 @@
package com.Bombs;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import org.apache.commons.compress.archivers.*;
import org.apache.commons.compress.compressors.CompressorException;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.compressors.CompressorStreamFactory;
import org.apache.commons.compress.compressors.gzip.*;
import org.apache.commons.io.IOUtils;
public class CommonsCompressHandler {
public static void commonsCompressorInputStream(InputStream inputStream) throws IOException {
BufferedInputStream in = new BufferedInputStream(inputStream);
OutputStream out = Files.newOutputStream(Path.of("tmpfile"));
GzipCompressorInputStream gzIn =
new org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream(in);
// for testing
new org.apache.commons.compress.compressors.brotli.BrotliCompressorInputStream(in);
new org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream(in);
new org.apache.commons.compress.compressors.deflate.DeflateCompressorInputStream(in);
new org.apache.commons.compress.compressors.deflate64.Deflate64CompressorInputStream(in);
new org.apache.commons.compress.compressors.lz4.BlockLZ4CompressorInputStream(in);
new org.apache.commons.compress.compressors.lzma.LZMACompressorInputStream(in);
new org.apache.commons.compress.compressors.pack200.Pack200CompressorInputStream(in);
new org.apache.commons.compress.compressors.snappy.SnappyCompressorInputStream(in);
new org.apache.commons.compress.compressors.xz.XZCompressorInputStream(in);
new org.apache.commons.compress.compressors.z.ZCompressorInputStream(in);
new org.apache.commons.compress.compressors.zstandard.ZstdCompressorInputStream(in);
int buffersize = 4096;
final byte[] buffer = new byte[buffersize];
int n = 0;
while (-1 != (n = gzIn.read(buffer))) {
out.write(buffer, 0, n);
}
out.close();
gzIn.close();
try (GzipCompressorInputStream gzIn2 =
new org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream(in)) {
File f = new File("tmpfile");
try (OutputStream o = Files.newOutputStream(f.toPath())) {
IOUtils.copy(gzIn2, o);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
static void commonsCompressArchiveInputStream(InputStream inputStream) throws ArchiveException {
new org.apache.commons.compress.archivers.ar.ArArchiveInputStream(inputStream);
new org.apache.commons.compress.archivers.arj.ArjArchiveInputStream(inputStream);
new org.apache.commons.compress.archivers.cpio.CpioArchiveInputStream(inputStream);
new org.apache.commons.compress.archivers.jar.JarArchiveInputStream(inputStream);
try (org.apache.commons.compress.archivers.zip.ZipArchiveInputStream zipInputStream =
new org.apache.commons.compress.archivers.zip.ZipArchiveInputStream(inputStream)) {
ArchiveEntry entry = null;
while ((entry = zipInputStream.getNextEntry()) != null) {
if (!zipInputStream.canReadEntryData(entry)) {
continue;
}
File f = new File("tmpfile");
try (OutputStream o = Files.newOutputStream(f.toPath())) {
IOUtils.copy(zipInputStream, o);
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
static void commonsCompressArchiveInputStream2(InputStream inputStream) {
byte[] readBuffer = new byte[4096];
try (org.apache.commons.compress.archivers.zip.ZipArchiveInputStream zipInputStream =
new org.apache.commons.compress.archivers.zip.ZipArchiveInputStream(inputStream)) {
ArchiveEntry entry = null;
while ((entry = zipInputStream.getNextEntry()) != null) {
if (!zipInputStream.canReadEntryData(entry)) {
continue;
}
File f = new File("tmpfile");
try (OutputStream outputStream = new FileOutputStream(f)) {
int readLen;
while ((readLen = zipInputStream.read(readBuffer)) != -1) {
outputStream.write(readBuffer, 0, readLen);
}
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
static void commonsCompressArchiveStreamFactory(InputStream inputStream)
throws IOException, ArchiveException {
BufferedInputStream bin = new BufferedInputStream(inputStream);
ArchiveInputStream zipInputStream = new ArchiveStreamFactory().createArchiveInputStream(bin);
ArchiveEntry entry = null;
byte[] readBuffer = new byte[4096];
while ((entry = zipInputStream.getNextEntry()) != null) {
if (!zipInputStream.canReadEntryData(entry)) {
continue;
}
File f = new File("tmpfile");
try (OutputStream outputStream = new FileOutputStream(f)) {
int readLen;
while ((readLen = zipInputStream.read(readBuffer)) != -1) {
outputStream.write(readBuffer, 0, readLen);
}
}
}
}
static void commonsCompressCompressorStreamFactory(InputStream inputStream)
throws IOException, CompressorException {
BufferedInputStream bin = new BufferedInputStream(inputStream);
CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream(bin);
OutputStream out = Files.newOutputStream(Path.of("tmpfile"));
int buffersize = 4096;
final byte[] buffer = new byte[buffersize];
int n = 0;
while (-1 != (n = in.read(buffer))) {
out.write(buffer, 0, n);
}
out.close();
in.close();
}
}

View File

@@ -0,0 +1,65 @@
package com.Bombs;
import org.apache.commons.compress.archivers.ArchiveException;
import org.apache.commons.compress.compressors.CompressorException;
import static com.Bombs.CommonsCompressHandler.*;
import static com.Bombs.SnappyHandler.*;
import static com.Bombs.Zip4jHandler.*;
import static com.Bombs.ZipHandler.*;
import java.io.*;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.MultipartConfig;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.Part;
@WebServlet(
name = "helloServlet",
urlPatterns = {"/hello-servlet"})
@MultipartConfig()
public class HelloServlet extends HttpServlet {
public void init() {}
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
response.setContentType("text/html");
Part remoteFile = request.getPart("zipFile");
// Zip
ZipInputStreamSafe2(remoteFile.getInputStream());
ZipInputStreamSafe(request.getPart("zipFile").getInputStream());
ZipInputStreamUnsafe(remoteFile.getInputStream());
GZipInputStreamUnsafe(request.getPart("zipFile").getInputStream());
InflaterInputStreamUnsafe(request.getPart("zipFile").getInputStream());
// Zip4j
zip4jZipInputStream(remoteFile.getInputStream());
zip4jZipInputStreamSafe(remoteFile.getInputStream());
// SnappyZip
SnappyZipInputStream(remoteFile.getInputStream());
// apache Commons
commonsCompressArchiveInputStream2(remoteFile.getInputStream());
commonsCompressorInputStream(remoteFile.getInputStream());
try {
commonsCompressArchiveInputStream(remoteFile.getInputStream());
commonsCompressArchiveStreamFactory(remoteFile.getInputStream());
} catch (ArchiveException e) {
throw new RuntimeException(e);
}
try {
commonsCompressCompressorStreamFactory(remoteFile.getInputStream());
} catch (CompressorException e) {
throw new RuntimeException(e);
}
PrintWriter out = response.getWriter();
out.println("<html><body>end</body></html>");
}
public void destroy() {}
}

View File

@@ -0,0 +1,22 @@
package com.Bombs;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Paths;
import org.xerial.snappy.SnappyInputStream;
import net.lingala.zip4j.model.LocalFileHeader;
public class SnappyHandler {
public static void SnappyZipInputStream(InputStream inputStream) throws IOException {
int readLen;
byte[] readBuffer = new byte[4096];
try (SnappyInputStream zipInputStream = new SnappyInputStream(inputStream)) {
try (OutputStream outputStream = Files.newOutputStream(Paths.get("extractedFile"))) {
while ((readLen = zipInputStream.read(readBuffer)) != -1) {
outputStream.write(readBuffer, 0, readLen);
}
}
}
}
}

View File

@@ -0,0 +1,45 @@
package com.Bombs;
import net.lingala.zip4j.model.LocalFileHeader;
import net.lingala.zip4j.io.inputstream.ZipInputStream;
import java.io.*;
public class Zip4jHandler {
public static void zip4jZipInputStream(InputStream inputStream) throws IOException {
LocalFileHeader localFileHeader;
int readLen;
byte[] readBuffer = new byte[4096];
try (ZipInputStream zipInputStream = new ZipInputStream(inputStream)) {
while ((localFileHeader = zipInputStream.getNextEntry()) != null) {
File extractedFile = new File(localFileHeader.getFileName());
try (OutputStream outputStream = new FileOutputStream(extractedFile)) {
while ((readLen = zipInputStream.read(readBuffer)) != -1) {
outputStream.write(readBuffer, 0, readLen);
}
}
}
}
}
public static void zip4jZipInputStreamSafe(InputStream inputStream) throws IOException {
LocalFileHeader localFileHeader;
int readLen;
byte[] readBuffer = new byte[4096];
try (ZipInputStream zipInputStream = new ZipInputStream(inputStream)) {
while ((localFileHeader = zipInputStream.getNextEntry()) != null) {
File extractedFile = new File(localFileHeader.getFileName());
try (OutputStream outputStream = new FileOutputStream(extractedFile)) {
int totallRead = 0;
while ((readLen = zipInputStream.read(readBuffer)) != -1) {
totallRead += readLen;
if (totallRead > 1024 * 1024 * 4) {
System.out.println("potential Bomb");
break;
}
outputStream.write(readBuffer, 0, readLen);
}
}
}
}
}
}

View File

@@ -0,0 +1,130 @@
package com.Bombs;
import java.io.*;
import java.io.FileOutputStream;
import java.util.zip.*;
public class ZipHandler {
public static void ZipInputStreamSafe(InputStream inputStream) throws IOException {
final int BUFFER = 512;
final long TOOBIG = 0x6400000; // Max size of unzipped data, 100MB
final int TOOMANY = 1024;
// FileInputStream fis = new FileInputStream(filename);
CRC32 checkSum = new CRC32();
CheckedInputStream gzis = new CheckedInputStream(inputStream, checkSum);
try (ZipInputStream zis = new ZipInputStream(new BufferedInputStream(inputStream))) {
ZipEntry entry;
int entries = 0;
long total = 0;
while ((entry = zis.getNextEntry()) != null) {
System.out.println("Extracting: " + entry);
int count;
byte[] data = new byte[BUFFER];
if (entry.isDirectory()) {
System.out.println("Creating directory " + entry.getName());
continue;
}
FileOutputStream fos = new FileOutputStream("/tmp/tmptmp");
BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
while (total + BUFFER <= TOOBIG && (count = zis.read(data, 0, BUFFER)) != -1) {
dest.write(data, 0, count);
total += count;
}
dest.flush();
dest.close();
zis.closeEntry();
entries++;
if (entries > TOOMANY) {
throw new IllegalStateException("Too many files to unzip.");
}
if (total + BUFFER > TOOBIG) {
throw new IllegalStateException("File being unzipped is too big.");
}
}
}
}
// it seems that previously getSize could be bypassed by forged headers, so I tested following
// method with a forged header zip bomb, the getSize() return the forged header but read method
// will read bytes until the getSize() value that this makes getSize() safe for now.
public static void ZipInputStreamSafe2(InputStream inputStream) throws IOException {
int BUFFER = 512;
int TOOBIG = 100 * 1024 * 1024; // 100MB
// FileInputStream fis = new FileInputStream(filename);
try (ZipInputStream zis = new ZipInputStream(new BufferedInputStream(inputStream))) {
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
System.out.println("Extracting: " + entry);
int count;
byte[] data = new byte[BUFFER];
// Write the files to the disk, but only if the file is not insanely big
if (entry.getSize() > TOOBIG) {
throw new IllegalStateException("File to be unzipped is huge.");
}
if (entry.getSize() == -1) {
throw new IllegalStateException("File to be unzipped might be huge.");
}
FileOutputStream fos = new FileOutputStream(entry.getName());
BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
while ((count = zis.read(data, 0, BUFFER)) != -1) {
dest.write(data, 0, count);
}
dest.flush();
dest.close();
zis.closeEntry();
}
}
}
public static void ZipInputStreamUnsafe(InputStream inputStream) throws IOException {
int BUFFER = 512;
// FileInputStream fis = new FileInputStream(filename);
try (ZipInputStream zis = new ZipInputStream(new BufferedInputStream(inputStream))) {
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
System.out.println("Extracting: " + entry);
int count;
byte[] data = new byte[BUFFER];
// Write the files to the disk
FileOutputStream fos = new FileOutputStream(entry.getName());
BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
while ((count = zis.read(data, 0, BUFFER)) != -1) {
dest.write(data, 0, count);
}
dest.flush();
dest.close();
zis.closeEntry();
}
}
}
public static void GZipInputStreamUnsafe(InputStream inputStream) throws java.io.IOException {
int BUFFER = 512;
try (GZIPInputStream gzis = new GZIPInputStream(inputStream)) {
int count;
byte[] data = new byte[BUFFER];
FileOutputStream fos = new FileOutputStream("/tmp/tmp");
BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
while ((count = gzis.read(data, 0, BUFFER)) != -1) {
dest.write(data, 0, count);
}
dest.flush();
dest.close();
}
}
public static void InflaterInputStreamUnsafe(InputStream inputStream) throws java.io.IOException {
int BUFFER = 512;
try (InflaterInputStream Izis = new InflaterInputStream(inputStream)) {
int count;
byte[] data = new byte[BUFFER];
FileOutputStream fos = new FileOutputStream("/tmp/tmp");
BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
while ((count = Izis.read(data, 0, BUFFER)) != -1) {
dest.write(data, 0, count);
}
dest.flush();
dest.close();
}
}
}

View File

@@ -0,0 +1,7 @@
<!DOCTYPE web-app PUBLIC
"-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
"http://java.sun.com/dtd/web-app_2_3.dtd" >
<web-app>
<display-name>Archetype Created Web Application</display-name>
</web-app>

View File

@@ -0,0 +1,5 @@
<html>
<body>
<h2>Hello World!</h2>
</body>
</html>

View File

@@ -28,7 +28,7 @@ class CloseWriter {
try {
// OutputStreamWriter may throw an exception, in which case the ...
writer = new OutputStreamWriter(
// ... FileOutputStream is not closed by the finally block
// ... FileOutputStream is notne closed by the finally block
new FileOutputStream("C:\\test.txt"), "UTF-8");
writer.write("test");
}