Merge pull request #13555 from am0o0/amammad-java-bombs

Java: Decompression Bombs
This commit is contained in:
Owen Mansel-Chan
2024-07-31 14:55:28 +01:00
committed by GitHub
82 changed files with 2611 additions and 0 deletions

View File

@@ -0,0 +1,25 @@
package org.example;
import java.nio.file.StandardCopyOption;
import java.util.Enumeration;
import java.io.IOException;
import java.util.zip.*;
import java.util.zip.ZipEntry;
import java.io.File;
import java.nio.file.Files;
class BadExample {
public static void ZipInputStreamUnSafe(String filename) throws IOException {
File f = new File(filename);
try (ZipFile zipFile = new ZipFile(f)) {
Enumeration<? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
ZipEntry ze = entries.nextElement();
File out = new File("./tmp/tmp.txt");
Files.copy(zipFile.getInputStream(ze), out.toPath(), StandardCopyOption.REPLACE_EXISTING);
}
}
}
}

View File

@@ -0,0 +1,38 @@
<!DOCTYPE qhelp PUBLIC
"-//Semmle//qhelp//EN"
"qhelp.dtd">
<qhelp>
<overview>
<p>Extracting Compressed files with any compression algorithm like gzip can cause a denial of service attack.</p>
<p>Attackers can create a huge file by just repeating a single byte and compress it to a small file.</p>
</overview>
<recommendation>
<p>When decompressing a user-provided compressed file, verify the decompression ratio or decompress the files within a loop byte by byte to be able to manage the decompressed size in each cycle of the loop.</p>
</recommendation>
<example>
<p>
In the following example, the decompressed file size is not checked before decompression, exposing the application to a denial of service.
</p>
<sample src="BadExample.java" />
<p>
A better approach is shown in the following example, where a ZIP file is read within a loop and a size threshold is checked every cycle.
</p>
<sample src="GoodExample.java"/>
</example>
<references>
<li>
<a href="https://github.com/advisories/GHSA-47vx-fqr5-j2gw">CVE-2022-4565</a>
</li>
<li>
David Fifield: <a href="https://www.bamsoftware.com/hacks/zipbomb/">A better zip bomb</a>.
</li>
</references>
</qhelp>

View File

@@ -0,0 +1,21 @@
/**
* @name Uncontrolled file decompression
* @description Decompressing user-controlled files without checking the compression ratio may allow attackers to perform denial-of-service attacks.
* @kind path-problem
* @problem.severity error
* @security-severity 7.8
* @precision high
* @id java/uncontrolled-file-decompression
* @tags security
* experimental
* external/cwe/cwe-409
*/
import java
import experimental.semmle.code.java.security.DecompressionBombQuery
import DecompressionBombsFlow::PathGraph
from DecompressionBombsFlow::PathNode source, DecompressionBombsFlow::PathNode sink
where DecompressionBombsFlow::flowPath(source, sink)
select sink.getNode(), source, sink, "This file extraction depends on a $@.", source.getNode(),
"potentially untrusted source"

View File

@@ -0,0 +1,33 @@
import java.util.zip.*;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.util.zip.ZipEntry;
public class GoodExample {
public static void ZipInputStreamSafe(String filename) throws IOException {
int UncompressedSizeThreshold = 10 * 1024 * 1024; // 10MB
int BUFFERSIZE = 256;
FileInputStream fis = new FileInputStream(filename);
try (ZipInputStream zis = new ZipInputStream(new BufferedInputStream(fis))) {
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
int count;
byte[] data = new byte[BUFFERSIZE];
FileOutputStream fos = new FileOutputStream(entry.getName());
BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFERSIZE);
int totalRead = 0;
while ((count = zis.read(data, 0, BUFFERSIZE)) != -1) {
totalRead = totalRead + count;
if (totalRead > UncompressedSizeThreshold) {
System.out.println("This Compressed file can be a bomb!");
break;
}
dest.write(data, 0, count);
}
dest.flush();
dest.close();
zis.closeEntry();
}
}
}
}

View File

@@ -0,0 +1,379 @@
import java
private import semmle.code.java.dataflow.TaintTracking
module DecompressionBomb {
/**
* The Decompression bomb Sink
*
* Extend this class for creating new decompression bomb sinks
*/
abstract class Sink extends DataFlow::Node { }
/**
* The Additional flow steps that help to create a dataflow or taint tracking query
*
* Extend this class for creating new additional taint steps
*/
class AdditionalStep extends Unit {
abstract predicate step(DataFlow::Node n1, DataFlow::Node n2);
}
abstract class BombReadInputStreamCall extends MethodCall { }
private class ReadInputStreamQualifierSink extends DecompressionBomb::Sink {
ReadInputStreamQualifierSink() { this.asExpr() = any(BombReadInputStreamCall r).getQualifier() }
}
}
/**
* Providing Decompression sinks and additional taint steps for `org.xerial.snappy` package
*/
module XerialSnappy {
/**
* A type that is responsible for `SnappyInputStream` Class
*/
class TypeInputStream extends RefType {
TypeInputStream() {
this.getASupertype*().hasQualifiedName("org.xerial.snappy", "SnappyInputStream")
}
}
/**
* The methods that read bytes and belong to `SnappyInputStream` Types
*/
class ReadInputStreamCall extends DecompressionBomb::BombReadInputStreamCall {
ReadInputStreamCall() {
this.getReceiverType() instanceof TypeInputStream and
this.getCallee().hasName(["read", "readNBytes", "readAllBytes"])
}
}
/**
* Gets `n1` and `n2` which `SnappyInputStream n2 = new SnappyInputStream(n1)` or
* `n1.read(n2)`,
* second one is added because of sanitizer, we want to compare return value of each `read` or similar method
* that whether there is a flow to a comparison between total read of decompressed stream and a constant value
*/
private class InputStreamAdditionalTaintStep extends DecompressionBomb::AdditionalStep {
override predicate step(DataFlow::Node n1, DataFlow::Node n2) {
exists(ConstructorCall call |
call.getCallee().getDeclaringType() instanceof TypeInputStream and
call.getArgument(0) = n1.asExpr() and
call = n2.asExpr()
)
}
}
}
/**
* Providing Decompression sinks and additional taint steps for `org.apache.commons.compress` package
*/
module ApacheCommons {
/**
* A type that is responsible for `ArchiveInputStream` Class
*/
class TypeArchiveInputStream extends RefType {
TypeArchiveInputStream() {
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers", "ArchiveInputStream")
}
}
/**
* A type that is responsible for `CompressorInputStream` Class
*/
class TypeCompressorInputStream extends RefType {
TypeCompressorInputStream() {
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors", "CompressorInputStream")
}
}
/**
* Providing Decompression sinks and additional taint steps for `org.apache.commons.compress.compressors.*` Types
*/
module Compressors {
/**
* The types that are responsible for specific compression format of `CompressorInputStream` Class
*/
class TypeCompressors extends RefType {
TypeCompressors() {
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.gzip",
"GzipCompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.brotli",
"BrotliCompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.bzip2",
"BZip2CompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.deflate",
"DeflateCompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.deflate64",
"Deflate64CompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.lz4",
"BlockLZ4CompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.lzma",
"LZMACompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.pack200",
"Pack200CompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.snappy",
"SnappyCompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.xz",
"XZCompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.z", "ZCompressorInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors.zstandard",
"ZstdCompressorInputStream")
}
}
/**
* The methods that read bytes and belong to `*CompressorInputStream` Types
*/
class ReadInputStreamCall extends DecompressionBomb::BombReadInputStreamCall {
ReadInputStreamCall() {
this.getReceiverType() instanceof TypeCompressors and
this.getCallee().hasName(["read", "readNBytes", "readAllBytes"])
}
}
/**
* Gets `n1` and `n2` which `GzipCompressorInputStream n2 = new GzipCompressorInputStream(n1)`
*/
private class CompressorsAndArchiversAdditionalTaintStep extends DecompressionBomb::AdditionalStep
{
override predicate step(DataFlow::Node n1, DataFlow::Node n2) {
exists(ConstructorCall call |
call.getCallee().getDeclaringType() instanceof TypeCompressors and
call.getArgument(0) = n1.asExpr() and
call = n2.asExpr()
)
}
}
}
/**
* Providing Decompression sinks and additional taint steps for Types from `org.apache.commons.compress.archivers.*` packages
*/
module Archivers {
/**
* The types that are responsible for specific compression format of `ArchiveInputStream` Class
*/
class TypeArchivers extends RefType {
TypeArchivers() {
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers.ar", "ArArchiveInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers.arj", "ArjArchiveInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers.cpio", "CpioArchiveInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers.ar", "ArArchiveInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers.jar", "JarArchiveInputStream") or
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers.zip", "ZipArchiveInputStream")
}
}
/**
* The methods that read bytes and belong to `*ArchiveInputStream` Types
*/
class ReadInputStreamCall extends DecompressionBomb::BombReadInputStreamCall {
ReadInputStreamCall() {
this.getReceiverType() instanceof TypeArchivers and
this.getCallee().hasName(["read", "readNBytes", "readAllBytes"])
}
}
/**
* Gets `n1` and `n2` which `CompressorInputStream n2 = new CompressorStreamFactory().createCompressorInputStream(n1)`
* or `ArchiveInputStream n2 = new ArchiveStreamFactory().createArchiveInputStream(n1)` or
* `n1.read(n2)`,
* second one is added because of sanitizer, we want to compare return value of each `read` or similar method
* that whether there is a flow to a comparison between total read of decompressed stream and a constant value
*/
private class CompressorsAndArchiversAdditionalTaintStep extends DecompressionBomb::AdditionalStep
{
override predicate step(DataFlow::Node n1, DataFlow::Node n2) {
exists(ConstructorCall call |
call.getCallee().getDeclaringType() instanceof TypeArchivers and
call.getArgument(0) = n1.asExpr() and
call = n2.asExpr()
)
}
}
}
/**
* Providing Decompression sinks and additional taint steps for `CompressorStreamFactory` and `ArchiveStreamFactory` Types
*/
module Factory {
/**
* A type that is responsible for `ArchiveInputStream` Class
*/
class TypeArchivers extends RefType {
TypeArchivers() {
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.archivers", "ArchiveStreamFactory")
}
}
/**
* A type that is responsible for `CompressorStreamFactory` Class
*/
class TypeCompressors extends RefType {
TypeCompressors() {
this.getASupertype*()
.hasQualifiedName("org.apache.commons.compress.compressors", "CompressorStreamFactory")
}
}
/**
* Gets `n1` and `n2` which `ZipInputStream n2 = new ZipInputStream(n1)`
*/
private class CompressorsAndArchiversAdditionalTaintStep extends DecompressionBomb::AdditionalStep
{
override predicate step(DataFlow::Node n1, DataFlow::Node n2) {
exists(MethodCall call |
(
call.getCallee().getDeclaringType() instanceof TypeCompressors
or
call.getCallee().getDeclaringType() instanceof TypeArchivers
) and
call.getArgument(0) = n1.asExpr() and
call = n2.asExpr()
)
}
}
/**
* The methods that read bytes and belong to `CompressorInputStream` or `ArchiveInputStream` Types
*/
class ReadInputStreamCall extends DecompressionBomb::BombReadInputStreamCall {
ReadInputStreamCall() {
(
this.getReceiverType() instanceof TypeArchiveInputStream
or
this.getReceiverType() instanceof TypeCompressorInputStream
) and
this.getCallee().hasName(["read", "readNBytes", "readAllBytes"])
}
}
}
}
/**
* Providing Decompression sinks and additional taint steps for `net.lingala.zip4j.io` package
*/
module Zip4j {
/**
* A type that is responsible for `ZipInputStream` Class
*/
class TypeZipInputStream extends RefType {
TypeZipInputStream() {
this.hasQualifiedName("net.lingala.zip4j.io.inputstream", "ZipInputStream")
}
}
/**
* The methods that read bytes and belong to `CompressorInputStream` or `ArchiveInputStream` Types
*/
class ReadInputStreamCall extends DecompressionBomb::BombReadInputStreamCall {
ReadInputStreamCall() {
this.getReceiverType() instanceof TypeZipInputStream and
this.getMethod().hasName(["read", "readNBytes", "readAllBytes"])
}
}
/**
* Gets `n1` and `n2` which `CompressorInputStream n2 = new CompressorStreamFactory().createCompressorInputStream(n1)`
* or `ArchiveInputStream n2 = new ArchiveStreamFactory().createArchiveInputStream(n1)` or
* `n1.read(n2)`,
* second one is added because of sanitizer, we want to compare return value of each `read` or similar method
* that whether there is a flow to a comparison between total read of decompressed stream and a constant value
*/
private class CompressorsAndArchiversAdditionalTaintStep extends DecompressionBomb::AdditionalStep
{
override predicate step(DataFlow::Node n1, DataFlow::Node n2) {
exists(ConstructorCall call |
call.getCallee().getDeclaringType() instanceof TypeZipInputStream and
call.getArgument(0) = n1.asExpr() and
call = n2.asExpr()
)
}
}
}
/**
* Providing Decompression sinks and additional taint steps for `java.util.zip` package
*/
module Zip {
/**
* The Types that are responsible for `ZipInputStream`, `GZIPInputStream`, `InflaterInputStream` Classes
*/
class TypeInputStream extends RefType {
TypeInputStream() {
this.getASupertype*()
.hasQualifiedName("java.util.zip",
["ZipInputStream", "GZIPInputStream", "InflaterInputStream"])
}
}
/**
* The methods that read bytes and belong to `*InputStream` Types
*/
class ReadInputStreamCall extends DecompressionBomb::BombReadInputStreamCall {
ReadInputStreamCall() {
this.getReceiverType() instanceof TypeInputStream and
this.getCallee().hasName(["read", "readNBytes", "readAllBytes"])
}
}
/**
* A type that is responsible for `Inflater` Class
*/
class TypeInflator extends RefType {
TypeInflator() { this.hasQualifiedName("java.util.zip", "Inflater") }
}
class InflateSink extends DecompressionBomb::Sink {
InflateSink() {
exists(MethodCall ma |
ma.getReceiverType() instanceof TypeInflator and
ma.getCallee().hasName("inflate") and
ma.getArgument(0) = this.asExpr()
or
ma.getReceiverType() instanceof TypeInflator and
ma.getMethod().hasName("setInput") and
ma.getArgument(0) = this.asExpr()
)
}
}
class ZipFileSink extends DecompressionBomb::Sink {
ZipFileSink() {
exists(MethodCall call |
call.getCallee().getDeclaringType() instanceof TypeZipFile and
call.getCallee().hasName("getInputStream") and
call.getQualifier() = this.asExpr()
)
}
}
/**
* A type that is responsible for `ZipFile` Class
*/
class TypeZipFile extends RefType {
TypeZipFile() { this.hasQualifiedName("java.util.zip", "ZipFile") }
}
}

View File

@@ -0,0 +1,14 @@
import experimental.semmle.code.java.security.FileAndFormRemoteSource
import experimental.semmle.code.java.security.DecompressionBomb::DecompressionBomb
module DecompressionBombsConfig implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
predicate isSink(DataFlow::Node sink) { sink instanceof Sink }
predicate isAdditionalFlowStep(DataFlow::Node nodeFrom, DataFlow::Node nodeTo) {
any(AdditionalStep ads).step(nodeFrom, nodeTo)
}
}
module DecompressionBombsFlow = TaintTracking::Global<DecompressionBombsConfig>;

View File

@@ -0,0 +1,118 @@
import java
import semmle.code.java.dataflow.FlowSources
class CommonsFileUploadAdditionalTaintStep extends Unit {
abstract predicate step(DataFlow::Node n1, DataFlow::Node n2);
}
module ApacheCommonsFileUpload {
module RemoteFlowSource {
class TypeServletFileUpload extends RefType {
TypeServletFileUpload() {
this.hasQualifiedName("org.apache.commons.fileupload.servlet", "ServletFileUpload")
}
}
class TypeFileUpload extends RefType {
TypeFileUpload() {
this.getAStrictAncestor*().hasQualifiedName("org.apache.commons.fileupload", "FileItem")
}
}
class TypeFileItemStream extends RefType {
TypeFileItemStream() {
this.getAStrictAncestor*()
.hasQualifiedName("org.apache.commons.fileupload", "FileItemStream")
}
}
class ServletFileUpload extends RemoteFlowSource {
ServletFileUpload() {
exists(MethodCall ma |
ma.getReceiverType() instanceof TypeServletFileUpload and
ma.getCallee().hasName("parseRequest") and
this.asExpr() = ma
)
}
override string getSourceType() { result = "Apache Commons Fileupload" }
}
private class FileItemRemoteSource extends RemoteFlowSource {
FileItemRemoteSource() {
exists(MethodCall ma |
ma.getReceiverType() instanceof TypeFileUpload and
ma.getCallee()
.hasName([
"getInputStream", "getFieldName", "getContentType", "get", "getName", "getString"
]) and
this.asExpr() = ma
)
}
override string getSourceType() { result = "Apache Commons Fileupload" }
}
private class FileItemStreamRemoteSource extends RemoteFlowSource {
FileItemStreamRemoteSource() {
exists(MethodCall ma |
ma.getReceiverType() instanceof TypeFileItemStream and
ma.getCallee().hasName(["getContentType", "getFieldName", "getName", "openStream"]) and
this.asExpr() = ma
)
}
override string getSourceType() { result = "Apache Commons Fileupload" }
}
}
module Util {
class TypeStreams extends RefType {
TypeStreams() { this.hasQualifiedName("org.apache.commons.fileupload.util", "Streams") }
}
private class AsStringAdditionalTaintStep extends CommonsFileUploadAdditionalTaintStep {
override predicate step(DataFlow::Node n1, DataFlow::Node n2) {
exists(Call call |
call.getCallee().getDeclaringType() instanceof TypeStreams and
call.getArgument(0) = n1.asExpr() and
call = n2.asExpr() and
call.getCallee().hasName("asString")
)
}
}
private class CopyAdditionalTaintStep extends CommonsFileUploadAdditionalTaintStep {
override predicate step(DataFlow::Node n1, DataFlow::Node n2) {
exists(Call call |
call.getCallee().getDeclaringType() instanceof TypeStreams and
call.getArgument(0) = n1.asExpr() and
call.getArgument(1) = n2.asExpr() and
call.getCallee().hasName("copy")
)
}
}
}
}
module ServletRemoteMultiPartSources {
class TypePart extends RefType {
TypePart() { this.hasQualifiedName(["javax.servlet.http", "jakarta.servlet.http"], "Part") }
}
private class ServletPartCalls extends RemoteFlowSource {
ServletPartCalls() {
exists(MethodCall ma |
ma.getReceiverType() instanceof TypePart and
ma.getCallee()
.hasName([
"getInputStream", "getName", "getContentType", "getHeader", "getHeaders",
"getHeaderNames", "getSubmittedFileName", "write"
]) and
this.asExpr() = ma
)
}
override string getSourceType() { result = "Javax Servlet Http" }
}
}

View File

@@ -0,0 +1,55 @@
import org.apache.commons.compress.archivers.ArchiveException;
import org.apache.commons.compress.compressors.CompressorException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.BufferedReader;
import java.net.Socket;
import java.util.zip.DataFormatException;
public class Bombs {
public void sendUserFileGood2(Socket sock) throws IOException {
InputStream remoteFile = sock.getInputStream();
// Zip
ZipHandler.ZipInputStreamSafe2(remoteFile);
ZipHandler.ZipInputStreamSafe(remoteFile);
ZipHandler.ZipInputStreamUnsafe(remoteFile);
ZipHandler.GZipInputStreamUnsafe(remoteFile);
ZipHandler.InflaterInputStreamUnsafe(remoteFile);
BufferedReader filenameReader =
new BufferedReader(new InputStreamReader(sock.getInputStream(), "UTF-8"));
String filename = filenameReader.readLine();
try {
ZipHandler.InflaterUnsafe(filename.getBytes());
} catch (DataFormatException e) {
throw new RuntimeException(e);
}
try {
ZipHandler.ZipFile1(filename);
} catch (DataFormatException e) {
throw new RuntimeException(e);
}
// Zip4j
Zip4jHandler.zip4jZipInputStream(remoteFile);
Zip4jHandler.zip4jZipInputStreamSafe(remoteFile);
// SnappyZip
SnappyHandler.SnappyZipInputStream(remoteFile);
// apache Commons
CommonsCompressHandler.commonsCompressorInputStream(remoteFile);
try {
CommonsCompressHandler.commonsCompressArchiveInputStream(remoteFile);
CommonsCompressHandler.commonsCompressArchiveStreamFactory(remoteFile);
} catch (ArchiveException e) {
throw new RuntimeException(e);
}
try {
CommonsCompressHandler.commonsCompressCompressorStreamFactory(remoteFile);
} catch (CompressorException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -0,0 +1,128 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.io.IOException;
import java.io.File;
import java.io.FileOutputStream;
import java.io.BufferedInputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveException;
import org.apache.commons.compress.archivers.ArchiveInputStream;
import org.apache.commons.compress.archivers.ArchiveStreamFactory;
import org.apache.commons.compress.compressors.CompressorException;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.compressors.CompressorStreamFactory;
import org.apache.commons.compress.compressors.lz4.*;
import org.apache.commons.compress.archivers.ar.ArArchiveInputStream;
import org.apache.commons.compress.archivers.arj.ArjArchiveInputStream;
import org.apache.commons.compress.archivers.cpio.CpioArchiveInputStream;
import org.apache.commons.compress.archivers.jar.JarArchiveInputStream;
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
import org.apache.commons.compress.compressors.lzma.LZMACompressorInputStream;
import org.apache.commons.compress.compressors.pack200.Pack200CompressorInputStream;
import org.apache.commons.compress.compressors.snappy.SnappyCompressorInputStream;
import org.apache.commons.compress.compressors.xz.XZCompressorInputStream;
import org.apache.commons.compress.compressors.zstandard.ZstdCompressorInputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import org.apache.commons.compress.compressors.brotli.BrotliCompressorInputStream;
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
import org.apache.commons.compress.compressors.deflate.DeflateCompressorInputStream;
import org.apache.commons.compress.compressors.deflate64.Deflate64CompressorInputStream;
import org.apache.commons.compress.compressors.z.ZCompressorInputStream;
public class CommonsCompressHandler {
public static void commonsCompressorInputStream(InputStream inputStream) throws IOException {
BufferedInputStream in = new BufferedInputStream(inputStream);
OutputStream out = Files.newOutputStream(Path.of("tmpfile"));
GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in);
// Also, the `new GzipCompressorInputStream(in)` can be the following:
// new BrotliCompressorInputStream(in);
// new BZip2CompressorInputStream(in);
// new DeflateCompressorInputStream(in);
// new Deflate64CompressorInputStream(in);
// new BlockLZ4CompressorInputStream(in);
// new LZMACompressorInputStream(in);
// new Pack200CompressorInputStream(in);
// new SnappyCompressorInputStream(in);
// new XZCompressorInputStream(in);
// new ZCompressorInputStream(in);
// new ZstdCompressorInputStream(in);
int buffersize = 4096;
final byte[] buffer = new byte[buffersize];
int n = 0;
while (-1 != (n = gzIn.read(buffer))) { // $ hasTaintFlow="gzIn"
out.write(buffer, 0, n);
}
out.close();
gzIn.close();
}
static void commonsCompressArchiveInputStream(InputStream inputStream) {
byte[] readBuffer = new byte[4096];
// Also, the `new ZipArchiveInputStream(inputStream)` can be the following:
// new ArArchiveInputStream(inputStream);
// new ArjArchiveInputStream(inputStream);
// new CpioArchiveInputStream(inputStream);
// new JarArchiveInputStream(inputStream);
// new ZipArchiveInputStream(inputStream);
try (ZipArchiveInputStream zipInputStream =
new ZipArchiveInputStream(inputStream)) {
ArchiveEntry entry = null;
while ((entry = zipInputStream.getNextEntry()) != null) {
if (!zipInputStream.canReadEntryData(entry)) {
continue;
}
File f = new File("tmpfile");
try (OutputStream outputStream = new FileOutputStream(f)) {
int readLen;
while ((readLen = zipInputStream.read(readBuffer)) != -1) { // $ hasTaintFlow="zipInputStream"
outputStream.write(readBuffer, 0, readLen);
}
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
static void commonsCompressArchiveStreamFactory(InputStream inputStream)
throws IOException, ArchiveException {
BufferedInputStream bin = new BufferedInputStream(inputStream);
ArchiveInputStream zipInputStream = new ArchiveStreamFactory().createArchiveInputStream(bin);
ArchiveEntry entry = null;
byte[] readBuffer = new byte[4096];
while ((entry = zipInputStream.getNextEntry()) != null) {
if (!zipInputStream.canReadEntryData(entry)) {
continue;
}
File f = new File("tmpfile");
try (OutputStream outputStream = new FileOutputStream(f)) {
int readLen;
while ((readLen = zipInputStream.read(readBuffer)) != -1) { // $ hasTaintFlow="zipInputStream"
outputStream.write(readBuffer, 0, readLen);
}
}
}
}
static void commonsCompressCompressorStreamFactory(InputStream inputStream)
throws IOException, CompressorException {
BufferedInputStream bin = new BufferedInputStream(inputStream);
CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream(bin);
OutputStream out = Files.newOutputStream(Path.of("tmpfile"));
int buffersize = 4096;
final byte[] buffer = new byte[buffersize];
int n = 0;
while (-1 != (n = in.read(buffer))) { // $ hasTaintFlow="in"
out.write(buffer, 0, n);
}
out.close();
in.close();
}
}

View File

@@ -0,0 +1,21 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import org.xerial.snappy.SnappyInputStream;
public class SnappyHandler {
public static void SnappyZipInputStream(InputStream inputStream) throws IOException {
int readLen;
byte[] readBuffer = new byte[4096];
try (SnappyInputStream zipInputStream = new SnappyInputStream(inputStream)) {
try (OutputStream outputStream = Files.newOutputStream(Paths.get("extractedFile"))) {
while ((readLen = zipInputStream.read(readBuffer)) != -1) { // $ hasTaintFlow="zipInputStream"
outputStream.write(readBuffer, 0, readLen);
}
}
}
}
}

View File

@@ -0,0 +1,48 @@
import net.lingala.zip4j.model.LocalFileHeader;
import net.lingala.zip4j.io.inputstream.ZipInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.FileOutputStream;
public class Zip4jHandler {
public static void zip4jZipInputStream(InputStream inputStream) throws IOException {
LocalFileHeader localFileHeader;
int readLen;
byte[] readBuffer = new byte[4096];
try (ZipInputStream zipInputStream = new ZipInputStream(inputStream)) {
while ((localFileHeader = zipInputStream.getNextEntry()) != null) {
File extractedFile = new File(localFileHeader.getFileName());
try (OutputStream outputStream = new FileOutputStream(extractedFile)) {
while ((readLen = zipInputStream.read(readBuffer)) != -1) { // $ hasTaintFlow="zipInputStream"
outputStream.write(readBuffer, 0, readLen);
}
}
}
}
}
public static void zip4jZipInputStreamSafe(InputStream inputStream) throws IOException {
LocalFileHeader localFileHeader;
int readLen;
byte[] readBuffer = new byte[4096];
try (ZipInputStream zipInputStream = new ZipInputStream(inputStream)) {
while ((localFileHeader = zipInputStream.getNextEntry()) != null) {
File extractedFile = new File(localFileHeader.getFileName());
try (OutputStream outputStream = new FileOutputStream(extractedFile)) {
int totallRead = 0;
while ((readLen = zipInputStream.read(readBuffer)) != -1) { // $ SPURIOUS: hasTaintFlow="zipInputStream"
totallRead += readLen;
if (totallRead > 1024 * 1024 * 4) {
System.out.println("potential Bomb");
break;
}
outputStream.write(readBuffer, 0, readLen);
}
}
}
}
}
}

View File

@@ -0,0 +1,187 @@
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.BufferedInputStream;
import java.util.Enumeration;
import java.util.zip.CRC32;
import java.util.zip.CheckedInputStream;
import java.util.zip.ZipFile;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipEntry;
import java.util.zip.GZIPInputStream;
import java.util.zip.InflaterInputStream;
import java.util.zip.Inflater;
import java.util.zip.DataFormatException;
public class ZipHandler {
public static void ZipInputStreamSafe(InputStream inputStream) throws IOException {
final int BUFFER = 512;
final long TOOBIG = 0x6400000; // Max size of unzipped data, 100MB
final int TOOMANY = 1024;
// FileInputStream fis = new FileInputStream(filename);
CRC32 checkSum = new CRC32();
CheckedInputStream gzis = new CheckedInputStream(inputStream, checkSum);
try (ZipInputStream zis = new ZipInputStream(new BufferedInputStream(inputStream))) {
ZipEntry entry;
int entries = 0;
long total = 0;
while ((entry = zis.getNextEntry()) != null) {
System.out.println("Extracting: " + entry);
int count;
byte[] data = new byte[BUFFER];
if (entry.isDirectory()) {
System.out.println("Creating directory " + entry.getName());
continue;
}
FileOutputStream fos = new FileOutputStream("/tmp/tmptmp");
BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
while (total + BUFFER <= TOOBIG && (count = zis.read(data, 0, BUFFER)) != -1) { // $ SPURIOUS: hasTaintFlow="zis"
dest.write(data, 0, count);
total += count;
}
dest.flush();
dest.close();
zis.closeEntry();
entries++;
if (entries > TOOMANY) {
throw new IllegalStateException("Too many files to unzip.");
}
if (total + BUFFER > TOOBIG) {
throw new IllegalStateException("File being unzipped is too big.");
}
}
}
}
// it seems that previously getSize could be bypassed by forged headers, so I tested following
// method with a forged header zip bomb, the getSize() return the forged header but read method
// will read bytes until the getSize() value that this makes getSize() safe for now.
public static void ZipInputStreamSafe2(InputStream inputStream) throws IOException {
int BUFFER = 512;
int TOOBIG = 100 * 1024 * 1024; // 100MB
// FileInputStream fis = new FileInputStream(filename);
try (ZipInputStream zis = new ZipInputStream(new BufferedInputStream(inputStream))) {
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
System.out.println("Extracting: " + entry);
int count;
byte[] data = new byte[BUFFER];
// Write the files to the disk, but only if the file is not insanely big
if (entry.getSize() > TOOBIG) {
throw new IllegalStateException("File to be unzipped is huge.");
}
if (entry.getSize() == -1) {
throw new IllegalStateException("File to be unzipped might be huge.");
}
FileOutputStream fos = new FileOutputStream(entry.getName());
BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
while ((count = zis.read(data, 0, BUFFER)) != -1) { // $ SPURIOUS: hasTaintFlow="zis"
dest.write(data, 0, count);
}
dest.flush();
dest.close();
zis.closeEntry();
}
}
}
public static void ZipInputStreamUnsafe(InputStream inputStream) throws IOException {
int BUFFER = 512;
// FileInputStream fis = new FileInputStream(filename);
try (ZipInputStream zis = new ZipInputStream(new BufferedInputStream(inputStream))) {
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
System.out.println("Extracting: " + entry);
int count;
byte[] data = new byte[BUFFER];
// Write the files to the disk
FileOutputStream fos = new FileOutputStream(entry.getName());
BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
while ((count = zis.read(data, 0, BUFFER)) != -1) { // $ hasTaintFlow="zis"
dest.write(data, 0, count);
}
dest.flush();
dest.close();
zis.closeEntry();
}
}
}
public static void GZipInputStreamUnsafe(InputStream inputStream) throws IOException {
int BUFFER = 512;
try (GZIPInputStream gzis = new GZIPInputStream(inputStream)) {
int count;
byte[] data = new byte[BUFFER];
FileOutputStream fos = new FileOutputStream("/tmp/tmp");
BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
while ((count = gzis.read(data, 0, BUFFER)) != -1) { // $ hasTaintFlow="gzis"
dest.write(data, 0, count);
}
dest.flush();
dest.close();
}
}
public static void InflaterInputStreamUnsafe(InputStream inputStream) throws IOException {
int BUFFER = 512;
try (InflaterInputStream Izis = new InflaterInputStream(inputStream)) {
int count;
byte[] data = new byte[BUFFER];
FileOutputStream fos = new FileOutputStream("/tmp/tmp");
BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
while ((count = Izis.read(data, 0, BUFFER)) != -1) { // $ hasTaintFlow="Izis"
dest.write(data, 0, count);
}
dest.flush();
dest.close();
}
}
public static void InflaterUnsafe(byte[] inputBytes) throws DataFormatException, IOException {
Inflater inflater = new Inflater();
inflater.setInput(inputBytes); // $ hasTaintFlow="inputBytes"
try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(inputBytes.length)) {
byte[] buffer = new byte[1024];
while (!inflater.finished()) {
final int count = inflater.inflate(buffer);
outputStream.write(buffer, 0, count);
}
outputStream.toByteArray();
}
}
public static void ZipFile1(String zipFilePath) throws DataFormatException, IOException {
try {
System.out.println("zipFilePath = " + zipFilePath);
ZipFile zipFile = new ZipFile(zipFilePath);
Enumeration<? extends ZipEntry> entries = zipFile.entries();
while (entries.hasMoreElements()) {
ZipEntry entry = entries.nextElement();
if (entry.isDirectory()) {
System.out.print("dir : " + entry.getName());
String destPath = "tmp" + File.separator + entry.getName();
System.out.println(" => " + destPath);
File file = new File(destPath);
file.mkdirs();
} else {
String destPath = "tmp" + File.separator + entry.getName();
try (InputStream inputStream = zipFile.getInputStream(entry); // $ hasTaintFlow="zipFile"
FileOutputStream outputStream = new FileOutputStream(destPath);) {
int data = inputStream.read();
while (data != -1) {
outputStream.write(data);
data = inputStream.read();
}
}
System.out.println("file : " + entry.getName() + " => " + destPath);
}
}
} catch (IOException e) {
throw new RuntimeException("Error unzipping file " + zipFilePath, e);
}
}
}

View File

@@ -0,0 +1 @@
//semmle-extractor-options: --javac-args -cp ${testdir}/../../../stubs/apache-commons-compress-1.23.0:${testdir}/../../../stubs/lingala-zip4j-2.11.5:${testdir}/../../../stubs/zstd-jni-1.5.5

View File

@@ -0,0 +1,9 @@
import java
import experimental.semmle.code.java.security.DecompressionBombQuery
import TestUtilities.InlineFlowTest
import TaintFlowTestArgString<DecompressionBombsConfig, getArgString/2>
string getArgString(DataFlow::Node src, DataFlow::Node sink) {
exists(src) and
result = "\"" + sink.toString() + "\""
}

View File

@@ -0,0 +1,14 @@
// Generated automatically from org.apache.commons.compress.archivers.ArchiveEntry for testing purposes
package org.apache.commons.compress.archivers;
import java.util.Date;
public interface ArchiveEntry
{
Date getLastModifiedDate();
String getName();
boolean isDirectory();
long getSize();
static long SIZE_UNKNOWN = 0;
}

View File

@@ -0,0 +1,11 @@
// Generated automatically from org.apache.commons.compress.archivers.ArchiveException for testing purposes
package org.apache.commons.compress.archivers;
public class ArchiveException extends Exception
{
protected ArchiveException() {}
public ArchiveException(String p0){}
public ArchiveException(String p0, Exception p1){}
}

View File

@@ -0,0 +1,19 @@
// Generated automatically from org.apache.commons.compress.archivers.ArchiveInputStream for testing purposes
package org.apache.commons.compress.archivers;
import java.io.InputStream;
import org.apache.commons.compress.archivers.ArchiveEntry;
abstract public class ArchiveInputStream extends InputStream
{
protected void count(int p0){}
protected void count(long p0){}
protected void pushedBackBytes(long p0){}
public ArchiveInputStream(){}
public abstract ArchiveEntry getNextEntry();
public boolean canReadEntryData(ArchiveEntry p0){ return false; }
public int getCount(){ return 0; }
public int read(){ return 0; }
public long getBytesRead(){ return 0; }
}

View File

@@ -0,0 +1,25 @@
// Generated automatically from org.apache.commons.compress.archivers.ArchiveOutputStream for testing purposes
package org.apache.commons.compress.archivers;
import java.io.File;
import java.io.OutputStream;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import org.apache.commons.compress.archivers.ArchiveEntry;
abstract public class ArchiveOutputStream extends OutputStream
{
protected void count(int p0){}
protected void count(long p0){}
public ArchiveEntry createArchiveEntry(Path p0, String p1, LinkOption... p2){ return null; }
public ArchiveOutputStream(){}
public abstract ArchiveEntry createArchiveEntry(File p0, String p1);
public abstract void closeArchiveEntry();
public abstract void finish();
public abstract void putArchiveEntry(ArchiveEntry p0);
public boolean canWriteEntryData(ArchiveEntry p0){ return false; }
public int getCount(){ return 0; }
public long getBytesWritten(){ return 0; }
public void write(int p0){}
}

View File

@@ -0,0 +1,44 @@
// Generated automatically from org.apache.commons.compress.archivers.ArchiveStreamFactory for testing purposes
package org.apache.commons.compress.archivers;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Set;
import java.util.SortedMap;
import org.apache.commons.compress.archivers.ArchiveInputStream;
import org.apache.commons.compress.archivers.ArchiveOutputStream;
import org.apache.commons.compress.archivers.ArchiveStreamProvider;
public class ArchiveStreamFactory implements ArchiveStreamProvider
{
public ArchiveInputStream createArchiveInputStream(InputStream p0){ return null; }
public ArchiveInputStream createArchiveInputStream(String p0, InputStream p1){ return null; }
public ArchiveInputStream createArchiveInputStream(String p0, InputStream p1, String p2){ return null; }
public ArchiveOutputStream createArchiveOutputStream(String p0, OutputStream p1){ return null; }
public ArchiveOutputStream createArchiveOutputStream(String p0, OutputStream p1, String p2){ return null; }
public ArchiveStreamFactory(){}
public ArchiveStreamFactory(String p0){}
public Set<String> getInputStreamArchiveNames(){ return null; }
public Set<String> getOutputStreamArchiveNames(){ return null; }
public SortedMap<String, ArchiveStreamProvider> getArchiveInputStreamProviders(){ return null; }
public SortedMap<String, ArchiveStreamProvider> getArchiveOutputStreamProviders(){ return null; }
public String getEntryEncoding(){ return null; }
public static ArchiveStreamFactory DEFAULT = null;
public static SortedMap<String, ArchiveStreamProvider> findAvailableArchiveInputStreamProviders(){ return null; }
public static SortedMap<String, ArchiveStreamProvider> findAvailableArchiveOutputStreamProviders(){ return null; }
public static String APK = null;
public static String APKM = null;
public static String APKS = null;
public static String AR = null;
public static String ARJ = null;
public static String CPIO = null;
public static String DUMP = null;
public static String JAR = null;
public static String SEVEN_Z = null;
public static String TAR = null;
public static String XAPK = null;
public static String ZIP = null;
public static String detect(InputStream p0){ return null; }
public void setEntryEncoding(String p0){}
}

View File

@@ -0,0 +1,17 @@
// Generated automatically from org.apache.commons.compress.archivers.ArchiveStreamProvider for testing purposes
package org.apache.commons.compress.archivers;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Set;
import org.apache.commons.compress.archivers.ArchiveInputStream;
import org.apache.commons.compress.archivers.ArchiveOutputStream;
public interface ArchiveStreamProvider
{
ArchiveInputStream createArchiveInputStream(String p0, InputStream p1, String p2);
ArchiveOutputStream createArchiveOutputStream(String p0, OutputStream p1, String p2);
Set<String> getInputStreamArchiveNames();
Set<String> getOutputStreamArchiveNames();
}

View File

@@ -0,0 +1,11 @@
// Generated automatically from org.apache.commons.compress.archivers.EntryStreamOffsets for testing purposes
package org.apache.commons.compress.archivers;
public interface EntryStreamOffsets
{
boolean isStreamContiguous();
long getDataOffset();
static long OFFSET_UNKNOWN = 0;
}

View File

@@ -0,0 +1,31 @@
// Generated automatically from org.apache.commons.compress.archivers.ar.ArArchiveEntry for testing purposes
package org.apache.commons.compress.archivers.ar;
import java.io.File;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.util.Date;
import org.apache.commons.compress.archivers.ArchiveEntry;
public class ArArchiveEntry implements ArchiveEntry
{
protected ArArchiveEntry() {}
public ArArchiveEntry(File p0, String p1){}
public ArArchiveEntry(Path p0, String p1, LinkOption... p2){}
public ArArchiveEntry(String p0, long p1){}
public ArArchiveEntry(String p0, long p1, int p2, int p3, int p4, long p5){}
public Date getLastModifiedDate(){ return null; }
public String getName(){ return null; }
public boolean equals(Object p0){ return false; }
public boolean isDirectory(){ return false; }
public int getGroupId(){ return 0; }
public int getMode(){ return 0; }
public int getUserId(){ return 0; }
public int hashCode(){ return 0; }
public long getLastModified(){ return 0; }
public long getLength(){ return 0; }
public long getSize(){ return 0; }
public static String HEADER = null;
public static String TRAILER = null;
}

View File

@@ -0,0 +1,19 @@
// Generated automatically from org.apache.commons.compress.archivers.ar.ArArchiveInputStream for testing purposes
package org.apache.commons.compress.archivers.ar;
import java.io.InputStream;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveInputStream;
import org.apache.commons.compress.archivers.ar.ArArchiveEntry;
public class ArArchiveInputStream extends ArchiveInputStream
{
protected ArArchiveInputStream() {}
public ArArchiveEntry getNextArEntry(){ return null; }
public ArArchiveInputStream(InputStream p0){}
public ArchiveEntry getNextEntry(){ return null; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public static boolean matches(byte[] p0, int p1){ return false; }
public void close(){}
}

View File

@@ -0,0 +1,21 @@
// Generated automatically from org.apache.commons.compress.archivers.arj.ArjArchiveEntry for testing purposes
package org.apache.commons.compress.archivers.arj;
import java.util.Date;
import org.apache.commons.compress.archivers.ArchiveEntry;
public class ArjArchiveEntry implements ArchiveEntry
{
public ArjArchiveEntry(){}
public Date getLastModifiedDate(){ return null; }
public String getName(){ return null; }
public boolean equals(Object p0){ return false; }
public boolean isDirectory(){ return false; }
public boolean isHostOsUnix(){ return false; }
public int getHostOs(){ return 0; }
public int getMode(){ return 0; }
public int getUnixMode(){ return 0; }
public int hashCode(){ return 0; }
public long getSize(){ return 0; }
}

View File

@@ -0,0 +1,22 @@
// Generated automatically from org.apache.commons.compress.archivers.arj.ArjArchiveInputStream for testing purposes
package org.apache.commons.compress.archivers.arj;
import java.io.InputStream;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveInputStream;
import org.apache.commons.compress.archivers.arj.ArjArchiveEntry;
public class ArjArchiveInputStream extends ArchiveInputStream
{
protected ArjArchiveInputStream() {}
public ArjArchiveEntry getNextEntry(){ return null; }
public ArjArchiveInputStream(InputStream p0){}
public ArjArchiveInputStream(InputStream p0, String p1){}
public String getArchiveComment(){ return null; }
public String getArchiveName(){ return null; }
public boolean canReadEntryData(ArchiveEntry p0){ return false; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public static boolean matches(byte[] p0, int p1){ return false; }
public void close(){}
}

View File

@@ -0,0 +1,75 @@
// Generated automatically from org.apache.commons.compress.archivers.cpio.CpioArchiveEntry for testing purposes
package org.apache.commons.compress.archivers.cpio;
import java.io.File;
import java.nio.charset.Charset;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.nio.file.attribute.FileTime;
import java.util.Date;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.cpio.CpioConstants;
public class CpioArchiveEntry implements ArchiveEntry, CpioConstants
{
protected CpioArchiveEntry() {}
public CpioArchiveEntry(File p0, String p1){}
public CpioArchiveEntry(Path p0, String p1, LinkOption... p2){}
public CpioArchiveEntry(String p0){}
public CpioArchiveEntry(String p0, long p1){}
public CpioArchiveEntry(short p0){}
public CpioArchiveEntry(short p0, File p1, String p2){}
public CpioArchiveEntry(short p0, Path p1, String p2, LinkOption... p3){}
public CpioArchiveEntry(short p0, String p1){}
public CpioArchiveEntry(short p0, String p1, long p2){}
public Date getLastModifiedDate(){ return null; }
public String getName(){ return null; }
public boolean equals(Object p0){ return false; }
public boolean isBlockDevice(){ return false; }
public boolean isCharacterDevice(){ return false; }
public boolean isDirectory(){ return false; }
public boolean isNetwork(){ return false; }
public boolean isPipe(){ return false; }
public boolean isRegularFile(){ return false; }
public boolean isSocket(){ return false; }
public boolean isSymbolicLink(){ return false; }
public int getAlignmentBoundary(){ return 0; }
public int getDataPadCount(){ return 0; }
public int getHeaderPadCount(){ return 0; }
public int getHeaderPadCount(Charset p0){ return 0; }
public int getHeaderPadCount(long p0){ return 0; }
public int getHeaderSize(){ return 0; }
public int hashCode(){ return 0; }
public long getChksum(){ return 0; }
public long getDevice(){ return 0; }
public long getDeviceMaj(){ return 0; }
public long getDeviceMin(){ return 0; }
public long getGID(){ return 0; }
public long getInode(){ return 0; }
public long getMode(){ return 0; }
public long getNumberOfLinks(){ return 0; }
public long getRemoteDevice(){ return 0; }
public long getRemoteDeviceMaj(){ return 0; }
public long getRemoteDeviceMin(){ return 0; }
public long getSize(){ return 0; }
public long getTime(){ return 0; }
public long getUID(){ return 0; }
public short getFormat(){ return 0; }
public void setChksum(long p0){}
public void setDevice(long p0){}
public void setDeviceMaj(long p0){}
public void setDeviceMin(long p0){}
public void setGID(long p0){}
public void setInode(long p0){}
public void setMode(long p0){}
public void setName(String p0){}
public void setNumberOfLinks(long p0){}
public void setRemoteDevice(long p0){}
public void setRemoteDeviceMaj(long p0){}
public void setRemoteDeviceMin(long p0){}
public void setSize(long p0){}
public void setTime(FileTime p0){}
public void setTime(long p0){}
public void setUID(long p0){}
}

View File

@@ -0,0 +1,25 @@
// Generated automatically from org.apache.commons.compress.archivers.cpio.CpioArchiveInputStream for testing purposes
package org.apache.commons.compress.archivers.cpio;
import java.io.InputStream;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveInputStream;
import org.apache.commons.compress.archivers.cpio.CpioArchiveEntry;
import org.apache.commons.compress.archivers.cpio.CpioConstants;
public class CpioArchiveInputStream extends ArchiveInputStream implements CpioConstants
{
protected CpioArchiveInputStream() {}
public ArchiveEntry getNextEntry(){ return null; }
public CpioArchiveEntry getNextCPIOEntry(){ return null; }
public CpioArchiveInputStream(InputStream p0){}
public CpioArchiveInputStream(InputStream p0, String p1){}
public CpioArchiveInputStream(InputStream p0, int p1){}
public CpioArchiveInputStream(InputStream p0, int p1, String p2){}
public int available(){ return 0; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public long skip(long p0){ return 0; }
public static boolean matches(byte[] p0, int p1){ return false; }
public void close(){}
}

View File

@@ -0,0 +1,41 @@
// Generated automatically from org.apache.commons.compress.archivers.cpio.CpioConstants for testing purposes
package org.apache.commons.compress.archivers.cpio;
public interface CpioConstants
{
static String CPIO_TRAILER = null;
static String MAGIC_NEW = null;
static String MAGIC_NEW_CRC = null;
static String MAGIC_OLD_ASCII = null;
static int BLOCK_SIZE = 0;
static int C_IRGRP = 0;
static int C_IROTH = 0;
static int C_IRUSR = 0;
static int C_ISBLK = 0;
static int C_ISCHR = 0;
static int C_ISDIR = 0;
static int C_ISFIFO = 0;
static int C_ISGID = 0;
static int C_ISLNK = 0;
static int C_ISNWK = 0;
static int C_ISREG = 0;
static int C_ISSOCK = 0;
static int C_ISUID = 0;
static int C_ISVTX = 0;
static int C_IWGRP = 0;
static int C_IWOTH = 0;
static int C_IWUSR = 0;
static int C_IXGRP = 0;
static int C_IXOTH = 0;
static int C_IXUSR = 0;
static int MAGIC_OLD_BINARY = 0;
static int S_IFMT = 0;
static short FORMAT_NEW = 0;
static short FORMAT_NEW_CRC = 0;
static short FORMAT_NEW_MASK = 0;
static short FORMAT_OLD_ASCII = 0;
static short FORMAT_OLD_BINARY = 0;
static short FORMAT_OLD_MASK = 0;
}

View File

@@ -0,0 +1,20 @@
// Generated automatically from org.apache.commons.compress.archivers.jar.JarArchiveEntry for testing purposes
package org.apache.commons.compress.archivers.jar;
import java.security.cert.Certificate;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.zip.ZipEntry;
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
public class JarArchiveEntry extends ZipArchiveEntry
{
protected JarArchiveEntry() {}
public Attributes getManifestAttributes(){ return null; }
public Certificate[] getCertificates(){ return null; }
public JarArchiveEntry(JarEntry p0){}
public JarArchiveEntry(String p0){}
public JarArchiveEntry(ZipArchiveEntry p0){}
public JarArchiveEntry(ZipEntry p0){}
}

View File

@@ -0,0 +1,18 @@
// Generated automatically from org.apache.commons.compress.archivers.jar.JarArchiveInputStream for testing purposes
package org.apache.commons.compress.archivers.jar;
import java.io.InputStream;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.jar.JarArchiveEntry;
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
public class JarArchiveInputStream extends ZipArchiveInputStream
{
protected JarArchiveInputStream() {}
public ArchiveEntry getNextEntry(){ return null; }
public JarArchiveEntry getNextJarEntry(){ return null; }
public JarArchiveInputStream(InputStream p0){}
public JarArchiveInputStream(InputStream p0, String p1){}
public static boolean matches(byte[] p0, int p1){ return false; }
}

View File

@@ -0,0 +1,13 @@
// Generated automatically from org.apache.commons.compress.archivers.zip.ExtraFieldParsingBehavior for testing purposes
package org.apache.commons.compress.archivers.zip;
import org.apache.commons.compress.archivers.zip.UnparseableExtraFieldBehavior;
import org.apache.commons.compress.archivers.zip.ZipExtraField;
import org.apache.commons.compress.archivers.zip.ZipShort;
public interface ExtraFieldParsingBehavior extends UnparseableExtraFieldBehavior
{
ZipExtraField createExtraField(ZipShort p0);
ZipExtraField fill(ZipExtraField p0, byte[] p1, int p2, int p3, boolean p4);
}

View File

@@ -0,0 +1,24 @@
// Generated automatically from org.apache.commons.compress.archivers.zip.GeneralPurposeBit for testing purposes
package org.apache.commons.compress.archivers.zip;
public class GeneralPurposeBit implements Cloneable
{
public GeneralPurposeBit(){}
public Object clone(){ return null; }
public boolean equals(Object p0){ return false; }
public boolean usesDataDescriptor(){ return false; }
public boolean usesEncryption(){ return false; }
public boolean usesStrongEncryption(){ return false; }
public boolean usesUTF8ForNames(){ return false; }
public byte[] encode(){ return null; }
public int hashCode(){ return 0; }
public static GeneralPurposeBit parse(byte[] p0, int p1){ return null; }
public static int UFT8_NAMES_FLAG = 0;
public void encode(byte[] p0, int p1){}
public void useDataDescriptor(boolean p0){}
public void useEncryption(boolean p0){}
public void useStrongEncryption(boolean p0){}
public void useUTF8ForNames(boolean p0){}
}

View File

@@ -0,0 +1,10 @@
// Generated automatically from org.apache.commons.compress.archivers.zip.UnparseableExtraFieldBehavior for testing purposes
package org.apache.commons.compress.archivers.zip;
import org.apache.commons.compress.archivers.zip.ZipExtraField;
public interface UnparseableExtraFieldBehavior
{
ZipExtraField onUnparseableExtraField(byte[] p0, int p1, int p2, boolean p3, int p4);
}

View File

@@ -0,0 +1,18 @@
// Generated automatically from org.apache.commons.compress.archivers.zip.UnparseableExtraFieldData for testing purposes
package org.apache.commons.compress.archivers.zip;
import org.apache.commons.compress.archivers.zip.ZipExtraField;
import org.apache.commons.compress.archivers.zip.ZipShort;
public class UnparseableExtraFieldData implements ZipExtraField
{
public UnparseableExtraFieldData(){}
public ZipShort getCentralDirectoryLength(){ return null; }
public ZipShort getHeaderId(){ return null; }
public ZipShort getLocalFileDataLength(){ return null; }
public byte[] getCentralDirectoryData(){ return null; }
public byte[] getLocalFileDataData(){ return null; }
public void parseFromCentralDirectoryData(byte[] p0, int p1, int p2){}
public void parseFromLocalFileData(byte[] p0, int p1, int p2){}
}

View File

@@ -0,0 +1,105 @@
// Generated automatically from org.apache.commons.compress.archivers.zip.ZipArchiveEntry for testing purposes
package org.apache.commons.compress.archivers.zip;
import java.io.File;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.nio.file.attribute.FileTime;
import java.util.Date;
import java.util.zip.ZipEntry;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.EntryStreamOffsets;
import org.apache.commons.compress.archivers.zip.ExtraFieldParsingBehavior;
import org.apache.commons.compress.archivers.zip.GeneralPurposeBit;
import org.apache.commons.compress.archivers.zip.UnparseableExtraFieldData;
import org.apache.commons.compress.archivers.zip.ZipExtraField;
import org.apache.commons.compress.archivers.zip.ZipShort;
public class ZipArchiveEntry extends ZipEntry implements ArchiveEntry, EntryStreamOffsets
{
protected ZipArchiveEntry(){super("");}
protected int getAlignment(){ return 0; }
protected long getLocalHeaderOffset(){ return 0; }
protected void setDataOffset(long p0){}
protected void setExtra(){}
protected void setLocalHeaderOffset(long p0){}
protected void setName(String p0){}
protected void setName(String p0, byte[] p1){}
protected void setPlatform(int p0){}
protected void setStreamContiguous(boolean p0){}
public Date getLastModifiedDate(){ return null; }
public GeneralPurposeBit getGeneralPurposeBit(){ return null; }
public Object clone(){ return null; }
public String getName(){ return null; }
public UnparseableExtraFieldData getUnparseableExtraFieldData(){ return null; }
public ZipArchiveEntry(File p0, String p1){super("");}
public ZipArchiveEntry(Path p0, String p1, LinkOption... p2){super("");}
public ZipArchiveEntry(String p0){super("");}
public ZipArchiveEntry(ZipArchiveEntry p0){super("");}
public ZipArchiveEntry(ZipEntry p0){super("");}
public ZipArchiveEntry.CommentSource getCommentSource(){ return null; }
public ZipArchiveEntry.NameSource getNameSource(){ return null; }
public ZipEntry setCreationTime(FileTime p0){ return null; }
public ZipEntry setLastAccessTime(FileTime p0){ return null; }
public ZipEntry setLastModifiedTime(FileTime p0){ return null; }
public ZipExtraField getExtraField(ZipShort p0){ return null; }
public ZipExtraField[] getExtraFields(){ return null; }
public ZipExtraField[] getExtraFields(ExtraFieldParsingBehavior p0){ return null; }
public ZipExtraField[] getExtraFields(boolean p0){ return null; }
public boolean equals(Object p0){ return false; }
public boolean isDirectory(){ return false; }
public boolean isStreamContiguous(){ return false; }
public boolean isUnixSymlink(){ return false; }
public byte[] getCentralDirectoryExtra(){ return null; }
public byte[] getLocalFileDataExtra(){ return null; }
public byte[] getRawName(){ return null; }
public int getInternalAttributes(){ return 0; }
public int getMethod(){ return 0; }
public int getPlatform(){ return 0; }
public int getRawFlag(){ return 0; }
public int getUnixMode(){ return 0; }
public int getVersionMadeBy(){ return 0; }
public int getVersionRequired(){ return 0; }
public int hashCode(){ return 0; }
public long getDataOffset(){ return 0; }
public long getDiskNumberStart(){ return 0; }
public long getExternalAttributes(){ return 0; }
public long getSize(){ return 0; }
public long getTime(){ return 0; }
public static int CRC_UNKNOWN = 0;
public static int PLATFORM_FAT = 0;
public static int PLATFORM_UNIX = 0;
public void addAsFirstExtraField(ZipExtraField p0){}
public void addExtraField(ZipExtraField p0){}
public void removeExtraField(ZipShort p0){}
public void removeUnparseableExtraFieldData(){}
public void setAlignment(int p0){}
public void setCentralDirectoryExtra(byte[] p0){}
public void setCommentSource(ZipArchiveEntry.CommentSource p0){}
public void setDiskNumberStart(long p0){}
public void setExternalAttributes(long p0){}
public void setExtra(byte[] p0){}
public void setExtraFields(ZipExtraField[] p0){}
public void setGeneralPurposeBit(GeneralPurposeBit p0){}
public void setInternalAttributes(int p0){}
public void setMethod(int p0){}
public void setNameSource(ZipArchiveEntry.NameSource p0){}
public void setRawFlag(int p0){}
public void setSize(long p0){}
public void setTime(FileTime p0){}
public void setTime(long p0){}
public void setUnixMode(int p0){}
public void setVersionMadeBy(int p0){}
public void setVersionRequired(int p0){}
static public enum CommentSource
{
COMMENT, UNICODE_EXTRA_FIELD;
private CommentSource() {}
}
static public enum NameSource
{
NAME, NAME_WITH_EFS_FLAG, UNICODE_EXTRA_FIELD;
private NameSource() {}
}
}

View File

@@ -0,0 +1,28 @@
// Generated automatically from org.apache.commons.compress.archivers.zip.ZipArchiveInputStream for testing purposes
package org.apache.commons.compress.archivers.zip;
import java.io.InputStream;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveInputStream;
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
import org.apache.commons.compress.utils.InputStreamStatistics;
public class ZipArchiveInputStream extends ArchiveInputStream implements InputStreamStatistics
{
protected ZipArchiveInputStream() {}
public ArchiveEntry getNextEntry(){ return null; }
public ZipArchiveEntry getNextZipEntry(){ return null; }
public ZipArchiveInputStream(InputStream p0){}
public ZipArchiveInputStream(InputStream p0, String p1){}
public ZipArchiveInputStream(InputStream p0, String p1, boolean p2){}
public ZipArchiveInputStream(InputStream p0, String p1, boolean p2, boolean p3){}
public ZipArchiveInputStream(InputStream p0, String p1, boolean p2, boolean p3, boolean p4){}
public boolean canReadEntryData(ArchiveEntry p0){ return false; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public long getCompressedCount(){ return 0; }
public long getUncompressedCount(){ return 0; }
public long skip(long p0){ return 0; }
public static boolean matches(byte[] p0, int p1){ return false; }
public void close(){}
}

View File

@@ -0,0 +1,17 @@
// Generated automatically from org.apache.commons.compress.archivers.zip.ZipExtraField for testing purposes
package org.apache.commons.compress.archivers.zip;
import org.apache.commons.compress.archivers.zip.ZipShort;
public interface ZipExtraField
{
ZipShort getCentralDirectoryLength();
ZipShort getHeaderId();
ZipShort getLocalFileDataLength();
byte[] getCentralDirectoryData();
byte[] getLocalFileDataData();
static int EXTRAFIELD_HEADER_SIZE = 0;
void parseFromCentralDirectoryData(byte[] p0, int p1, int p2);
void parseFromLocalFileData(byte[] p0, int p1, int p2);
}

View File

@@ -0,0 +1,24 @@
// Generated automatically from org.apache.commons.compress.archivers.zip.ZipShort for testing purposes
package org.apache.commons.compress.archivers.zip;
import java.io.Serializable;
public class ZipShort implements Cloneable, Serializable
{
protected ZipShort() {}
public Object clone(){ return null; }
public String toString(){ return null; }
public ZipShort(byte[] p0){}
public ZipShort(byte[] p0, int p1){}
public ZipShort(int p0){}
public boolean equals(Object p0){ return false; }
public byte[] getBytes(){ return null; }
public int getValue(){ return 0; }
public int hashCode(){ return 0; }
public static ZipShort ZERO = null;
public static byte[] getBytes(int p0){ return null; }
public static int getValue(byte[] p0){ return 0; }
public static int getValue(byte[] p0, int p1){ return 0; }
public static void putShort(int p0, byte[] p1, int p2){}
}

View File

@@ -0,0 +1,11 @@
// Generated automatically from org.apache.commons.compress.compressors.CompressorException for testing purposes
package org.apache.commons.compress.compressors;
public class CompressorException extends Exception
{
protected CompressorException() {}
public CompressorException(String p0){}
public CompressorException(String p0, Throwable p1){}
}

View File

@@ -0,0 +1,16 @@
// Generated automatically from org.apache.commons.compress.compressors.CompressorInputStream for testing purposes
package org.apache.commons.compress.compressors;
import java.io.InputStream;
abstract public class CompressorInputStream extends InputStream
{
protected void count(int p0){}
protected void count(long p0){}
protected void pushedBackBytes(long p0){}
public CompressorInputStream(){}
public int getCount(){ return 0; }
public long getBytesRead(){ return 0; }
public long getUncompressedCount(){ return 0; }
}

View File

@@ -0,0 +1,10 @@
// Generated automatically from org.apache.commons.compress.compressors.CompressorOutputStream for testing purposes
package org.apache.commons.compress.compressors;
import java.io.OutputStream;
abstract public class CompressorOutputStream extends OutputStream
{
public CompressorOutputStream(){}
}

View File

@@ -0,0 +1,60 @@
// Generated automatically from org.apache.commons.compress.compressors.CompressorStreamFactory for testing purposes
package org.apache.commons.compress.compressors;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Set;
import java.util.SortedMap;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.compressors.CompressorOutputStream;
import org.apache.commons.compress.compressors.CompressorStreamProvider;
public class CompressorStreamFactory implements CompressorStreamProvider
{
public Boolean getDecompressUntilEOF(){ return null; }
public CompressorInputStream createCompressorInputStream(InputStream p0){ return null; }
public CompressorInputStream createCompressorInputStream(String p0, InputStream p1){ return null; }
public CompressorInputStream createCompressorInputStream(String p0, InputStream p1, boolean p2){ return null; }
public CompressorOutputStream createCompressorOutputStream(String p0, OutputStream p1){ return null; }
public CompressorStreamFactory(){}
public CompressorStreamFactory(boolean p0){}
public CompressorStreamFactory(boolean p0, int p1){}
public Set<String> getInputStreamCompressorNames(){ return null; }
public Set<String> getOutputStreamCompressorNames(){ return null; }
public SortedMap<String, CompressorStreamProvider> getCompressorInputStreamProviders(){ return null; }
public SortedMap<String, CompressorStreamProvider> getCompressorOutputStreamProviders(){ return null; }
public static CompressorStreamFactory getSingleton(){ return null; }
public static SortedMap<String, CompressorStreamProvider> findAvailableCompressorInputStreamProviders(){ return null; }
public static SortedMap<String, CompressorStreamProvider> findAvailableCompressorOutputStreamProviders(){ return null; }
public static String BROTLI = null;
public static String BZIP2 = null;
public static String DEFLATE = null;
public static String DEFLATE64 = null;
public static String GZIP = null;
public static String LZ4_BLOCK = null;
public static String LZ4_FRAMED = null;
public static String LZMA = null;
public static String PACK200 = null;
public static String SNAPPY_FRAMED = null;
public static String SNAPPY_RAW = null;
public static String XZ = null;
public static String Z = null;
public static String ZSTANDARD = null;
public static String detect(InputStream p0){ return null; }
public static String getBrotli(){ return null; }
public static String getBzip2(){ return null; }
public static String getDeflate(){ return null; }
public static String getDeflate64(){ return null; }
public static String getGzip(){ return null; }
public static String getLZ4Block(){ return null; }
public static String getLZ4Framed(){ return null; }
public static String getLzma(){ return null; }
public static String getPack200(){ return null; }
public static String getSnappyFramed(){ return null; }
public static String getSnappyRaw(){ return null; }
public static String getXz(){ return null; }
public static String getZ(){ return null; }
public static String getZstandard(){ return null; }
public void setDecompressConcatenated(boolean p0){}
}

View File

@@ -0,0 +1,17 @@
// Generated automatically from org.apache.commons.compress.compressors.CompressorStreamProvider for testing purposes
package org.apache.commons.compress.compressors;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Set;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.compressors.CompressorOutputStream;
public interface CompressorStreamProvider
{
CompressorInputStream createCompressorInputStream(String p0, InputStream p1, boolean p2);
CompressorOutputStream createCompressorOutputStream(String p0, OutputStream p1);
Set<String> getInputStreamCompressorNames();
Set<String> getOutputStreamCompressorNames();
}

View File

@@ -0,0 +1,24 @@
// Generated automatically from org.apache.commons.compress.compressors.brotli.BrotliCompressorInputStream for testing purposes
package org.apache.commons.compress.compressors.brotli;
import java.io.InputStream;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.utils.InputStreamStatistics;
public class BrotliCompressorInputStream extends CompressorInputStream implements InputStreamStatistics
{
protected BrotliCompressorInputStream() {}
public BrotliCompressorInputStream(InputStream p0){}
public String toString(){ return null; }
public boolean markSupported(){ return false; }
public int available(){ return 0; }
public int read(){ return 0; }
public int read(byte[] p0){ return 0; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public long getCompressedCount(){ return 0; }
public long skip(long p0){ return 0; }
public void close(){}
public void mark(int p0){}
public void reset(){}
}

View File

@@ -0,0 +1,20 @@
// Generated automatically from org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream for testing purposes
package org.apache.commons.compress.compressors.bzip2;
import java.io.InputStream;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.compressors.bzip2.BZip2Constants;
import org.apache.commons.compress.utils.InputStreamStatistics;
public class BZip2CompressorInputStream extends CompressorInputStream implements BZip2Constants, InputStreamStatistics
{
protected BZip2CompressorInputStream() {}
public BZip2CompressorInputStream(InputStream p0){}
public BZip2CompressorInputStream(InputStream p0, boolean p1){}
public int read(){ return 0; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public long getCompressedCount(){ return 0; }
public static boolean matches(byte[] p0, int p1){ return false; }
public void close(){}
}

View File

@@ -0,0 +1,18 @@
// Generated automatically from org.apache.commons.compress.compressors.bzip2.BZip2Constants for testing purposes
package org.apache.commons.compress.compressors.bzip2;
interface BZip2Constants
{
static int BASEBLOCKSIZE = 0;
static int G_SIZE = 0;
static int MAX_ALPHA_SIZE = 0;
static int MAX_CODE_LEN = 0;
static int MAX_SELECTORS = 0;
static int NUM_OVERSHOOT_BYTES = 0;
static int N_GROUPS = 0;
static int N_ITERS = 0;
static int RUNA = 0;
static int RUNB = 0;
}

View File

@@ -0,0 +1,22 @@
// Generated automatically from org.apache.commons.compress.compressors.deflate.DeflateCompressorInputStream for testing purposes
package org.apache.commons.compress.compressors.deflate;
import java.io.InputStream;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.compressors.deflate.DeflateParameters;
import org.apache.commons.compress.utils.InputStreamStatistics;
public class DeflateCompressorInputStream extends CompressorInputStream implements InputStreamStatistics
{
protected DeflateCompressorInputStream() {}
public DeflateCompressorInputStream(InputStream p0){}
public DeflateCompressorInputStream(InputStream p0, DeflateParameters p1){}
public int available(){ return 0; }
public int read(){ return 0; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public long getCompressedCount(){ return 0; }
public long skip(long p0){ return 0; }
public static boolean matches(byte[] p0, int p1){ return false; }
public void close(){}
}

View File

@@ -0,0 +1,13 @@
// Generated automatically from org.apache.commons.compress.compressors.deflate.DeflateParameters for testing purposes
package org.apache.commons.compress.compressors.deflate;
public class DeflateParameters
{
public DeflateParameters(){}
public boolean withZlibHeader(){ return false; }
public int getCompressionLevel(){ return 0; }
public void setCompressionLevel(int p0){}
public void setWithZlibHeader(boolean p0){}
}

View File

@@ -0,0 +1,18 @@
// Generated automatically from org.apache.commons.compress.compressors.deflate64.Deflate64CompressorInputStream for testing purposes
package org.apache.commons.compress.compressors.deflate64;
import java.io.InputStream;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.utils.InputStreamStatistics;
public class Deflate64CompressorInputStream extends CompressorInputStream implements InputStreamStatistics
{
protected Deflate64CompressorInputStream() {}
public Deflate64CompressorInputStream(InputStream p0){}
public int available(){ return 0; }
public int read(){ return 0; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public long getCompressedCount(){ return 0; }
public void close(){}
}

View File

@@ -0,0 +1,21 @@
// Generated automatically from org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream for testing purposes
package org.apache.commons.compress.compressors.gzip;
import java.io.InputStream;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.compressors.gzip.GzipParameters;
import org.apache.commons.compress.utils.InputStreamStatistics;
public class GzipCompressorInputStream extends CompressorInputStream implements InputStreamStatistics
{
protected GzipCompressorInputStream() {}
public GzipCompressorInputStream(InputStream p0){}
public GzipCompressorInputStream(InputStream p0, boolean p1){}
public GzipParameters getMetaData(){ return null; }
public int read(){ return 0; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public long getCompressedCount(){ return 0; }
public static boolean matches(byte[] p0, int p1){ return false; }
public void close(){}
}

View File

@@ -0,0 +1,23 @@
// Generated automatically from org.apache.commons.compress.compressors.gzip.GzipParameters for testing purposes
package org.apache.commons.compress.compressors.gzip;
public class GzipParameters
{
public GzipParameters(){}
public String getComment(){ return null; }
public String getFilename(){ return null; }
public int getBufferSize(){ return 0; }
public int getCompressionLevel(){ return 0; }
public int getDeflateStrategy(){ return 0; }
public int getOperatingSystem(){ return 0; }
public long getModificationTime(){ return 0; }
public void setBufferSize(int p0){}
public void setComment(String p0){}
public void setCompressionLevel(int p0){}
public void setDeflateStrategy(int p0){}
public void setFilename(String p0){}
public void setModificationTime(long p0){}
public void setOperatingSystem(int p0){}
}

View File

@@ -0,0 +1,13 @@
// Generated automatically from org.apache.commons.compress.compressors.lz4.BlockLZ4CompressorInputStream for testing purposes
package org.apache.commons.compress.compressors.lz4;
import java.io.InputStream;
import org.apache.commons.compress.compressors.lz77support.AbstractLZ77CompressorInputStream;
public class BlockLZ4CompressorInputStream extends AbstractLZ77CompressorInputStream
{
protected BlockLZ4CompressorInputStream() {}
public BlockLZ4CompressorInputStream(InputStream p0){}
public int read(byte[] p0, int p1, int p2){ return 0; }
}

View File

@@ -0,0 +1,27 @@
// Generated automatically from org.apache.commons.compress.compressors.lz77support.AbstractLZ77CompressorInputStream for testing purposes
package org.apache.commons.compress.compressors.lz77support;
import java.io.InputStream;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.utils.ByteUtils;
import org.apache.commons.compress.utils.InputStreamStatistics;
abstract public class AbstractLZ77CompressorInputStream extends CompressorInputStream implements InputStreamStatistics
{
protected AbstractLZ77CompressorInputStream() {}
protected final ByteUtils.ByteSupplier supplier = null;
protected final boolean hasMoreDataInBlock(){ return false; }
protected final int readBackReference(byte[] p0, int p1, int p2){ return 0; }
protected final int readLiteral(byte[] p0, int p1, int p2){ return 0; }
protected final int readOneByte(){ return 0; }
protected final void startBackReference(int p0, long p1){}
protected final void startLiteral(long p0){}
public AbstractLZ77CompressorInputStream(InputStream p0, int p1){}
public int available(){ return 0; }
public int getSize(){ return 0; }
public int read(){ return 0; }
public long getCompressedCount(){ return 0; }
public void close(){}
public void prefill(byte[] p0){}
}

View File

@@ -0,0 +1,21 @@
// Generated automatically from org.apache.commons.compress.compressors.lzma.LZMACompressorInputStream for testing purposes
package org.apache.commons.compress.compressors.lzma;
import java.io.InputStream;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.utils.InputStreamStatistics;
public class LZMACompressorInputStream extends CompressorInputStream implements InputStreamStatistics
{
protected LZMACompressorInputStream() {}
public LZMACompressorInputStream(InputStream p0){}
public LZMACompressorInputStream(InputStream p0, int p1){}
public int available(){ return 0; }
public int read(){ return 0; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public long getCompressedCount(){ return 0; }
public long skip(long p0){ return 0; }
public static boolean matches(byte[] p0, int p1){ return false; }
public void close(){}
}

View File

@@ -0,0 +1,42 @@
// Generated automatically from org.apache.commons.compress.compressors.lzw.LZWInputStream for testing purposes
package org.apache.commons.compress.compressors.lzw;
import java.io.InputStream;
import java.nio.ByteOrder;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.utils.BitInputStream;
import org.apache.commons.compress.utils.InputStreamStatistics;
abstract public class LZWInputStream extends CompressorInputStream implements InputStreamStatistics
{
protected LZWInputStream() {}
protected LZWInputStream(InputStream p0, ByteOrder p1){}
protected abstract int addEntry(int p0, byte p1);
protected abstract int decompressNextSymbol();
protected final BitInputStream in = null;
protected int addEntry(int p0, byte p1, int p2){ return 0; }
protected int addRepeatOfPreviousCode(){ return 0; }
protected int expandCodeToOutputStack(int p0, boolean p1){ return 0; }
protected int getClearCode(){ return 0; }
protected int getCodeSize(){ return 0; }
protected int getPrefix(int p0){ return 0; }
protected int getPrefixesLength(){ return 0; }
protected int getTableSize(){ return 0; }
protected int readNextCode(){ return 0; }
protected static int DEFAULT_CODE_SIZE = 0;
protected static int UNUSED_PREFIX = 0;
protected void incrementCodeSize(){}
protected void initializeTables(int p0){}
protected void initializeTables(int p0, int p1){}
protected void resetCodeSize(){}
protected void resetPreviousCode(){}
protected void setClearCode(int p0){}
protected void setCodeSize(int p0){}
protected void setPrefix(int p0, int p1){}
protected void setTableSize(int p0){}
public int read(){ return 0; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public long getCompressedCount(){ return 0; }
public void close(){}
}

View File

@@ -0,0 +1,32 @@
// Generated automatically from org.apache.commons.compress.compressors.pack200.Pack200CompressorInputStream for testing purposes
package org.apache.commons.compress.compressors.pack200;
import java.io.File;
import java.io.InputStream;
import java.util.Map;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.compressors.pack200.Pack200Strategy;
public class Pack200CompressorInputStream extends CompressorInputStream
{
protected Pack200CompressorInputStream() {}
public Pack200CompressorInputStream(File p0){}
public Pack200CompressorInputStream(File p0, Map<String, String> p1){}
public Pack200CompressorInputStream(File p0, Pack200Strategy p1){}
public Pack200CompressorInputStream(File p0, Pack200Strategy p1, Map<String, String> p2){}
public Pack200CompressorInputStream(InputStream p0){}
public Pack200CompressorInputStream(InputStream p0, Map<String, String> p1){}
public Pack200CompressorInputStream(InputStream p0, Pack200Strategy p1){}
public Pack200CompressorInputStream(InputStream p0, Pack200Strategy p1, Map<String, String> p2){}
public boolean markSupported(){ return false; }
public int available(){ return 0; }
public int read(){ return 0; }
public int read(byte[] p0){ return 0; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public long skip(long p0){ return 0; }
public static boolean matches(byte[] p0, int p1){ return false; }
public void close(){}
public void mark(int p0){}
public void reset(){}
}

View File

@@ -0,0 +1,9 @@
// Generated automatically from org.apache.commons.compress.compressors.pack200.Pack200Strategy for testing purposes
package org.apache.commons.compress.compressors.pack200;
public enum Pack200Strategy
{
IN_MEMORY, TEMP_FILE;
private Pack200Strategy() {}
}

View File

@@ -0,0 +1,16 @@
// Generated automatically from org.apache.commons.compress.compressors.snappy.SnappyCompressorInputStream for testing purposes
package org.apache.commons.compress.compressors.snappy;
import java.io.InputStream;
import org.apache.commons.compress.compressors.lz77support.AbstractLZ77CompressorInputStream;
public class SnappyCompressorInputStream extends AbstractLZ77CompressorInputStream
{
protected SnappyCompressorInputStream() {}
public SnappyCompressorInputStream(InputStream p0){}
public SnappyCompressorInputStream(InputStream p0, int p1){}
public int getSize(){ return 0; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public static int DEFAULT_BLOCK_SIZE = 0;
}

View File

@@ -0,0 +1,22 @@
// Generated automatically from org.apache.commons.compress.compressors.xz.XZCompressorInputStream for testing purposes
package org.apache.commons.compress.compressors.xz;
import java.io.InputStream;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.utils.InputStreamStatistics;
public class XZCompressorInputStream extends CompressorInputStream implements InputStreamStatistics
{
protected XZCompressorInputStream() {}
public XZCompressorInputStream(InputStream p0){}
public XZCompressorInputStream(InputStream p0, boolean p1){}
public XZCompressorInputStream(InputStream p0, boolean p1, int p2){}
public int available(){ return 0; }
public int read(){ return 0; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public long getCompressedCount(){ return 0; }
public long skip(long p0){ return 0; }
public static boolean matches(byte[] p0, int p1){ return false; }
public void close(){}
}

View File

@@ -0,0 +1,17 @@
// Generated automatically from org.apache.commons.compress.compressors.z.ZCompressorInputStream for testing purposes
package org.apache.commons.compress.compressors.z;
import java.io.InputStream;
import org.apache.commons.compress.compressors.lzw.LZWInputStream;
public class ZCompressorInputStream extends LZWInputStream
{
protected ZCompressorInputStream() {}
protected int addEntry(int p0, byte p1){ return 0; }
protected int decompressNextSymbol(){ return 0; }
protected int readNextCode(){ return 0; }
public ZCompressorInputStream(InputStream p0){}
public ZCompressorInputStream(InputStream p0, int p1){}
public static boolean matches(byte[] p0, int p1){ return false; }
}

View File

@@ -0,0 +1,26 @@
// Generated automatically from org.apache.commons.compress.compressors.zstandard.ZstdCompressorInputStream for testing purposes
package org.apache.commons.compress.compressors.zstandard;
import com.github.luben.zstd.BufferPool;
import java.io.InputStream;
import org.apache.commons.compress.compressors.CompressorInputStream;
import org.apache.commons.compress.utils.InputStreamStatistics;
public class ZstdCompressorInputStream extends CompressorInputStream implements InputStreamStatistics
{
protected ZstdCompressorInputStream() {}
public String toString(){ return null; }
public ZstdCompressorInputStream(InputStream p0){}
public ZstdCompressorInputStream(InputStream p0, BufferPool p1){}
public boolean markSupported(){ return false; }
public int available(){ return 0; }
public int read(){ return 0; }
public int read(byte[] p0){ return 0; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public long getCompressedCount(){ return 0; }
public long skip(long p0){ return 0; }
public void close(){}
public void mark(int p0){}
public void reset(){}
}

View File

@@ -0,0 +1,20 @@
// Generated automatically from org.apache.commons.compress.utils.BitInputStream for testing purposes
package org.apache.commons.compress.utils;
import java.io.Closeable;
import java.io.InputStream;
import java.nio.ByteOrder;
public class BitInputStream implements Closeable
{
protected BitInputStream() {}
public BitInputStream(InputStream p0, ByteOrder p1){}
public int bitsCached(){ return 0; }
public long bitsAvailable(){ return 0; }
public long getBytesRead(){ return 0; }
public long readBits(int p0){ return 0; }
public void alignWithByteBoundary(){}
public void clearBitCache(){}
public void close(){}
}

View File

@@ -0,0 +1,31 @@
// Generated automatically from org.apache.commons.compress.utils.ByteUtils for testing purposes
package org.apache.commons.compress.utils;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.InputStream;
import java.io.OutputStream;
public class ByteUtils
{
protected ByteUtils() {}
public static byte[] EMPTY_BYTE_ARRAY = null;
public static long fromLittleEndian(ByteUtils.ByteSupplier p0, int p1){ return 0; }
public static long fromLittleEndian(DataInput p0, int p1){ return 0; }
public static long fromLittleEndian(InputStream p0, int p1){ return 0; }
public static long fromLittleEndian(byte[] p0){ return 0; }
public static long fromLittleEndian(byte[] p0, int p1, int p2){ return 0; }
public static void toLittleEndian(ByteUtils.ByteConsumer p0, long p1, int p2){}
public static void toLittleEndian(DataOutput p0, long p1, int p2){}
public static void toLittleEndian(OutputStream p0, long p1, int p2){}
public static void toLittleEndian(byte[] p0, long p1, int p2, int p3){}
static public interface ByteConsumer
{
void accept(int p0);
}
static public interface ByteSupplier
{
int getAsByte();
}
}

View File

@@ -0,0 +1,10 @@
// Generated automatically from org.apache.commons.compress.utils.InputStreamStatistics for testing purposes
package org.apache.commons.compress.utils;
public interface InputStreamStatistics
{
long getCompressedCount();
long getUncompressedCount();
}

View File

@@ -0,0 +1,30 @@
// Generated automatically from org.xerial.snappy.SnappyInputStream for testing purposes
package org.xerial.snappy;
import java.io.InputStream;
public class SnappyInputStream extends InputStream
{
protected SnappyInputStream() {}
protected boolean hasNextChunk(){ return false; }
protected final InputStream in = null;
protected void readFully(byte[] p0, int p1){}
protected void readHeader(){}
public SnappyInputStream(InputStream p0){}
public int available(){ return 0; }
public int rawRead(Object p0, int p1, int p2){ return 0; }
public int read(){ return 0; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public int read(double[] p0){ return 0; }
public int read(double[] p0, int p1, int p2){ return 0; }
public int read(float[] p0){ return 0; }
public int read(float[] p0, int p1, int p2){ return 0; }
public int read(int[] p0){ return 0; }
public int read(int[] p0, int p1, int p2){ return 0; }
public int read(long[] p0){ return 0; }
public int read(long[] p0, int p1, int p2){ return 0; }
public int read(short[] p0){ return 0; }
public int read(short[] p0, int p1, int p2){ return 0; }
public void close(){}
}

View File

@@ -0,0 +1,11 @@
// Generated automatically from net.lingala.zip4j.headers.HeaderSignature for testing purposes
package net.lingala.zip4j.headers;
public enum HeaderSignature
{
AES_EXTRA_DATA_RECORD, ARCEXTDATREC, CENTRAL_DIRECTORY, DIGITAL_SIGNATURE, END_OF_CENTRAL_DIRECTORY, EXTRA_DATA_RECORD, LOCAL_FILE_HEADER, SPLIT_ZIP, TEMPORARY_SPANNING_MARKER, ZIP64_END_CENTRAL_DIRECTORY_LOCATOR, ZIP64_END_CENTRAL_DIRECTORY_RECORD, ZIP64_EXTRA_FIELD_SIGNATURE;
private HeaderSignature() {}
public long getValue(){ return 0; }
}

View File

@@ -0,0 +1,31 @@
// Generated automatically from net.lingala.zip4j.io.inputstream.ZipInputStream for testing purposes
package net.lingala.zip4j.io.inputstream;
import java.io.InputStream;
import java.nio.charset.Charset;
import net.lingala.zip4j.model.FileHeader;
import net.lingala.zip4j.model.LocalFileHeader;
import net.lingala.zip4j.model.Zip4jConfig;
import net.lingala.zip4j.util.PasswordCallback;
public class ZipInputStream extends InputStream
{
protected ZipInputStream() {}
public LocalFileHeader getNextEntry(){ return null; }
public LocalFileHeader getNextEntry(FileHeader p0, boolean p1){ return null; }
public ZipInputStream(InputStream p0){}
public ZipInputStream(InputStream p0, Charset p1){}
public ZipInputStream(InputStream p0, PasswordCallback p1){}
public ZipInputStream(InputStream p0, PasswordCallback p1, Charset p2){}
public ZipInputStream(InputStream p0, PasswordCallback p1, Zip4jConfig p2){}
public ZipInputStream(InputStream p0, char[] p1){}
public ZipInputStream(InputStream p0, char[] p1, Charset p2){}
public ZipInputStream(InputStream p0, char[] p1, Zip4jConfig p2){}
public int available(){ return 0; }
public int read(){ return 0; }
public int read(byte[] p0){ return 0; }
public int read(byte[] p0, int p1, int p2){ return 0; }
public void close(){}
public void setPassword(char[] p0){}
}

View File

@@ -0,0 +1,23 @@
// Generated automatically from net.lingala.zip4j.model.AESExtraDataRecord for testing purposes
package net.lingala.zip4j.model;
import net.lingala.zip4j.model.ZipHeader;
import net.lingala.zip4j.model.enums.AesKeyStrength;
import net.lingala.zip4j.model.enums.AesVersion;
import net.lingala.zip4j.model.enums.CompressionMethod;
public class AESExtraDataRecord extends ZipHeader
{
public AESExtraDataRecord(){}
public AesKeyStrength getAesKeyStrength(){ return null; }
public AesVersion getAesVersion(){ return null; }
public CompressionMethod getCompressionMethod(){ return null; }
public String getVendorID(){ return null; }
public int getDataSize(){ return 0; }
public void setAesKeyStrength(AesKeyStrength p0){}
public void setAesVersion(AesVersion p0){}
public void setCompressionMethod(CompressionMethod p0){}
public void setDataSize(int p0){}
public void setVendorID(String p0){}
}

View File

@@ -0,0 +1,54 @@
// Generated automatically from net.lingala.zip4j.model.AbstractFileHeader for testing purposes
package net.lingala.zip4j.model;
import java.util.List;
import net.lingala.zip4j.model.AESExtraDataRecord;
import net.lingala.zip4j.model.ExtraDataRecord;
import net.lingala.zip4j.model.Zip64ExtendedInfo;
import net.lingala.zip4j.model.ZipHeader;
import net.lingala.zip4j.model.enums.CompressionMethod;
import net.lingala.zip4j.model.enums.EncryptionMethod;
abstract public class AbstractFileHeader extends ZipHeader
{
public AESExtraDataRecord getAesExtraDataRecord(){ return null; }
public AbstractFileHeader(){}
public CompressionMethod getCompressionMethod(){ return null; }
public EncryptionMethod getEncryptionMethod(){ return null; }
public List<ExtraDataRecord> getExtraDataRecords(){ return null; }
public String getFileName(){ return null; }
public Zip64ExtendedInfo getZip64ExtendedInfo(){ return null; }
public boolean equals(Object p0){ return false; }
public boolean isDataDescriptorExists(){ return false; }
public boolean isDirectory(){ return false; }
public boolean isEncrypted(){ return false; }
public boolean isFileNameUTF8Encoded(){ return false; }
public byte[] getGeneralPurposeFlag(){ return null; }
public int getExtraFieldLength(){ return 0; }
public int getFileNameLength(){ return 0; }
public int getVersionNeededToExtract(){ return 0; }
public long getCompressedSize(){ return 0; }
public long getCrc(){ return 0; }
public long getLastModifiedTime(){ return 0; }
public long getLastModifiedTimeEpoch(){ return 0; }
public long getUncompressedSize(){ return 0; }
public void setAesExtraDataRecord(AESExtraDataRecord p0){}
public void setCompressedSize(long p0){}
public void setCompressionMethod(CompressionMethod p0){}
public void setCrc(long p0){}
public void setDataDescriptorExists(boolean p0){}
public void setDirectory(boolean p0){}
public void setEncrypted(boolean p0){}
public void setEncryptionMethod(EncryptionMethod p0){}
public void setExtraDataRecords(List<ExtraDataRecord> p0){}
public void setExtraFieldLength(int p0){}
public void setFileName(String p0){}
public void setFileNameLength(int p0){}
public void setFileNameUTF8Encoded(boolean p0){}
public void setGeneralPurposeFlag(byte[] p0){}
public void setLastModifiedTime(long p0){}
public void setUncompressedSize(long p0){}
public void setVersionNeededToExtract(int p0){}
public void setZip64ExtendedInfo(Zip64ExtendedInfo p0){}
}

View File

@@ -0,0 +1,16 @@
// Generated automatically from net.lingala.zip4j.model.ExtraDataRecord for testing purposes
package net.lingala.zip4j.model;
import net.lingala.zip4j.model.ZipHeader;
public class ExtraDataRecord extends ZipHeader
{
public ExtraDataRecord(){}
public byte[] getData(){ return null; }
public int getSizeOfData(){ return 0; }
public long getHeader(){ return 0; }
public void setData(byte[] p0){}
public void setHeader(long p0){}
public void setSizeOfData(int p0){}
}

View File

@@ -0,0 +1,27 @@
// Generated automatically from net.lingala.zip4j.model.FileHeader for testing purposes
package net.lingala.zip4j.model;
import net.lingala.zip4j.model.AbstractFileHeader;
public class FileHeader extends AbstractFileHeader
{
public FileHeader(){}
public String getFileComment(){ return null; }
public String toString(){ return null; }
public boolean equals(Object p0){ return false; }
public byte[] getExternalFileAttributes(){ return null; }
public byte[] getInternalFileAttributes(){ return null; }
public int getDiskNumberStart(){ return 0; }
public int getFileCommentLength(){ return 0; }
public int getVersionMadeBy(){ return 0; }
public int hashCode(){ return 0; }
public long getOffsetLocalHeader(){ return 0; }
public void setDiskNumberStart(int p0){}
public void setExternalFileAttributes(byte[] p0){}
public void setFileComment(String p0){}
public void setFileCommentLength(int p0){}
public void setInternalFileAttributes(byte[] p0){}
public void setOffsetLocalHeader(long p0){}
public void setVersionMadeBy(int p0){}
}

View File

@@ -0,0 +1,16 @@
// Generated automatically from net.lingala.zip4j.model.LocalFileHeader for testing purposes
package net.lingala.zip4j.model;
import net.lingala.zip4j.model.AbstractFileHeader;
public class LocalFileHeader extends AbstractFileHeader
{
public LocalFileHeader(){}
public boolean isWriteCompressedSizeInZip64ExtraRecord(){ return false; }
public byte[] getExtraField(){ return null; }
public long getOffsetStartOfData(){ return 0; }
public void setExtraField(byte[] p0){}
public void setOffsetStartOfData(long p0){}
public void setWriteCompressedSizeInZip64ExtraRecord(boolean p0){}
}

View File

@@ -0,0 +1,14 @@
// Generated automatically from net.lingala.zip4j.model.Zip4jConfig for testing purposes
package net.lingala.zip4j.model;
import java.nio.charset.Charset;
public class Zip4jConfig
{
protected Zip4jConfig() {}
public Charset getCharset(){ return null; }
public Zip4jConfig(Charset p0, int p1, boolean p2){}
public boolean isUseUtf8CharsetForPasswords(){ return false; }
public int getBufferSize(){ return 0; }
}

View File

@@ -0,0 +1,20 @@
// Generated automatically from net.lingala.zip4j.model.Zip64ExtendedInfo for testing purposes
package net.lingala.zip4j.model;
import net.lingala.zip4j.model.ZipHeader;
public class Zip64ExtendedInfo extends ZipHeader
{
public Zip64ExtendedInfo(){}
public int getDiskNumberStart(){ return 0; }
public int getSize(){ return 0; }
public long getCompressedSize(){ return 0; }
public long getOffsetLocalHeader(){ return 0; }
public long getUncompressedSize(){ return 0; }
public void setCompressedSize(long p0){}
public void setDiskNumberStart(int p0){}
public void setOffsetLocalHeader(long p0){}
public void setSize(int p0){}
public void setUncompressedSize(long p0){}
}

View File

@@ -0,0 +1,12 @@
// Generated automatically from net.lingala.zip4j.model.ZipHeader for testing purposes
package net.lingala.zip4j.model;
import net.lingala.zip4j.headers.HeaderSignature;
abstract public class ZipHeader
{
public HeaderSignature getSignature(){ return null; }
public ZipHeader(){}
public void setSignature(HeaderSignature p0){}
}

View File

@@ -0,0 +1,15 @@
// Generated automatically from net.lingala.zip4j.model.enums.AesKeyStrength for testing purposes
package net.lingala.zip4j.model.enums;
public enum AesKeyStrength
{
KEY_STRENGTH_128, KEY_STRENGTH_192, KEY_STRENGTH_256;
private AesKeyStrength() {}
public int getKeyLength(){ return 0; }
public int getMacLength(){ return 0; }
public int getRawCode(){ return 0; }
public int getSaltLength(){ return 0; }
public static AesKeyStrength getAesKeyStrengthFromRawCode(int p0){ return null; }
}

View File

@@ -0,0 +1,12 @@
// Generated automatically from net.lingala.zip4j.model.enums.AesVersion for testing purposes
package net.lingala.zip4j.model.enums;
public enum AesVersion
{
ONE, TWO;
private AesVersion() {}
public int getVersionNumber(){ return 0; }
public static AesVersion getFromVersionNumber(int p0){ return null; }
}

View File

@@ -0,0 +1,12 @@
// Generated automatically from net.lingala.zip4j.model.enums.CompressionMethod for testing purposes
package net.lingala.zip4j.model.enums;
public enum CompressionMethod
{
AES_INTERNAL_ONLY, DEFLATE, STORE;
private CompressionMethod() {}
public int getCode(){ return 0; }
public static CompressionMethod getCompressionMethodFromCode(int p0){ return null; }
}

View File

@@ -0,0 +1,10 @@
// Generated automatically from net.lingala.zip4j.model.enums.EncryptionMethod for testing purposes
package net.lingala.zip4j.model.enums;
public enum EncryptionMethod
{
AES, NONE, ZIP_STANDARD, ZIP_STANDARD_VARIANT_STRONG;
private EncryptionMethod() {}
}

View File

@@ -0,0 +1,9 @@
// Generated automatically from net.lingala.zip4j.util.PasswordCallback for testing purposes
package net.lingala.zip4j.util;
public interface PasswordCallback
{
char[] getPassword();
}

View File

@@ -0,0 +1,11 @@
// Generated automatically from com.github.luben.zstd.BufferPool for testing purposes
package com.github.luben.zstd;
import java.nio.ByteBuffer;
public interface BufferPool
{
ByteBuffer get(int p0);
void release(ByteBuffer p0);
}