Merge pull request #13554 from am0o0/amammad-js-bombs

JS: Decompression Bombs
This commit is contained in:
Asger F
2024-05-16 13:25:41 +02:00
committed by GitHub
28 changed files with 2200 additions and 0 deletions

View File

@@ -0,0 +1,43 @@
<!DOCTYPE qhelp PUBLIC
"-//Semmle//qhelp//EN"
"qhelp.dtd">
<qhelp>
<overview>
<p>Extracting Compressed files with any compression algorithm like gzip can cause to denial of service attacks.</p>
<p>Attackers can compress a huge file which created by repeated similiar byte and convert it to a small compressed file.</p>
</overview>
<recommendation>
<p>When you want to decompress a user-provided compressed file you must be careful about the decompression ratio or read these files within a loop byte by byte to be able to manage the decompressed size in each cycle of the loop.</p>
</recommendation>
<example>
<p>
JsZip: check uncompressedSize Object Field before extraction.
</p>
<sample src="jszip_good.js"/>
<p>
nodejs Zlib: use <a href="https://nodejs.org/dist/latest-v18.x/docs/api/zlib.html#class-options">maxOutputLength option</a> which it'll limit the buffer read size
</p>
<sample src="zlib_good.js" />
<p>
node-tar: use <a href="https://github.com/isaacs/node-tar/blob/8c5af15e43a769fd24aa7f1c84d93e54824d19d2/lib/list.js#L90">maxReadSize option</a> which it'll limit the buffer read size
</p>
<sample src="node-tar_good.js" />
</example>
<references>
<li>
<a href="https://github.com/advisories/GHSA-8225-6cvr-8pqp">CVE-2017-16129</a>
</li>
<li>
<a href="https://www.bamsoftware.com/hacks/zipbomb/">A great research to gain more impact by this kind of attacks</a>
</li>
</references>
</qhelp>

View File

@@ -0,0 +1,35 @@
/**
* @name User-controlled file decompression
* @description User-controlled data that flows into decompression library APIs without checking the compression rate is dangerous
* @kind path-problem
* @problem.severity error
* @security-severity 7.8
* @precision high
* @id js/user-controlled-data-decompression
* @tags security
* experimental
* external/cwe/cwe-522
*/
import javascript
import DataFlow::PathGraph
import DecompressionBombs
class BombConfiguration extends TaintTracking::Configuration {
BombConfiguration() { this = "DecompressionBombs" }
override predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
override predicate isSink(DataFlow::Node sink) { sink instanceof DecompressionBomb::Sink }
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
exists(DecompressionBomb::AdditionalTaintStep addstep |
addstep.isAdditionalTaintStep(pred, succ)
)
}
}
from BombConfiguration cfg, DataFlow::PathNode source, DataFlow::PathNode sink
where cfg.hasFlowPath(source, sink)
select sink.getNode(), source, sink, "This Decompression depends on a $@.", source.getNode(),
"potentially untrusted source"

View File

@@ -0,0 +1,432 @@
import javascript
import experimental.semmle.javascript.FormParsers
import experimental.semmle.javascript.ReadableStream
import DataFlow::PathGraph
module DecompressionBomb {
/**
* The Sinks of uncontrolled data decompression
*/
class Sink extends DataFlow::Node {
Sink() { this = any(Range r).sink() }
}
/**
* The additional taint steps that need for creating taint tracking or dataflow.
*/
abstract class AdditionalTaintStep extends string {
AdditionalTaintStep() { this = "AdditionalTaintStep" }
/**
* Holds if there is a additional taint step between pred and succ.
*/
abstract predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ);
}
/**
* A abstract class responsible for extending new decompression sinks
*/
abstract class Range extends API::Node {
/**
* Gets the sink of responsible for decompression node
*
* it can be a path, stream of compressed data,
* or a call to function that use pipe
*/
abstract DataFlow::Node sink();
}
}
/**
* Provides additional taint steps for Readable Stream object
*/
module ReadableStream {
class ReadableStreamAdditionalTaintStep extends DecompressionBomb::AdditionalTaintStep {
ReadableStreamAdditionalTaintStep() { this = "AdditionalTaintStep" }
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
(
readablePipeAdditionalTaintStep(pred, succ)
or
streamPipelineAdditionalTaintStep(pred, succ)
or
promisesFileHandlePipeAdditionalTaintStep(pred, succ)
)
}
}
}
/**
* Provides additional taint steps for File system access functions
*/
module FileSystemAccessAdditionalTaintStep {
class ReadableStreamAdditionalTaintStep extends DecompressionBomb::AdditionalTaintStep {
ReadableStreamAdditionalTaintStep() { this = "AdditionalTaintStep" }
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
// additional taint step for fs.readFile(pred)
// It can be global additional step too
exists(DataFlow::CallNode n | n = DataFlow::moduleMember("fs", "readFile").getACall() |
pred = n.getArgument(0) and succ = n.getABoundCallbackParameter(1, 1)
)
or
exists(FileSystemReadAccess cn |
pred = cn.getAPathArgument() and
succ = cn.getADataNode()
)
}
}
}
/**
* Provides Models for [jszip](https://www.npmjs.com/package/jszip) package
*/
module JsZip {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
DecompressionBomb() { this = API::moduleImport("jszip").getMember("loadAsync") }
override DataFlow::Node sink() {
result = this.getParameter(0).asSink() and not this.sanitizer(this)
}
/**
* Gets a jszip `loadAsync` instance
* and Holds if member of name `uncompressedSize` exists
*/
predicate sanitizer(API::Node loadAsync) {
exists(loadAsync.getASuccessor*().getMember("_data").getMember("uncompressedSize"))
}
}
}
/**
* Provides Models for [node-tar](https://www.npmjs.com/package/tar) package
*/
module NodeTar {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
DecompressionBomb() { this = API::moduleImport("tar").getMember(["x", "extract"]) }
override DataFlow::Node sink() {
(
// piping tar.x()
result = this.getACall()
or
// tar.x({file: filename})
result = this.getParameter(0).getMember("file").asSink()
) and
// and there shouldn't be a "maxReadSize: ANum" option
not this.sanitizer(this.getParameter(0))
}
/**
* Gets a options parameter that belong to a `tar` instance
* and Holds if "maxReadSize: ANumber" option exists
*/
predicate sanitizer(API::Node tarExtract) { exists(tarExtract.getMember("maxReadSize")) }
}
/**
* The decompression Additional Taint Steps
*/
class DecompressionAdditionalSteps extends DecompressionBomb::AdditionalTaintStep {
DecompressionAdditionalSteps() { this = "AdditionalTaintStep" }
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node n | n = API::moduleImport("tar") |
pred = n.asSource() and
(
succ = n.getMember("x").getACall() or
succ = n.getMember("x").getACall().getArgument(0)
)
)
}
}
}
/**
* Provides Models for `node:zlib` package
*/
module Zlib {
/**
* The decompression sinks of `node:zlib`
*/
class DecompressionBomb extends DecompressionBomb::Range {
boolean isSynk;
DecompressionBomb() {
this =
API::moduleImport("zlib")
.getMember([
"gunzip", "gunzipSync", "unzip", "unzipSync", "brotliDecompress",
"brotliDecompressSync", "inflateSync", "inflateRawSync", "inflate", "inflateRaw"
]) and
isSynk = true
or
this =
API::moduleImport("zlib")
.getMember([
"createGunzip", "createBrotliDecompress", "createUnzip", "createInflate",
"createInflateRaw"
]) and
isSynk = false
}
override DataFlow::Node sink() {
result = this.getACall() and
not this.sanitizer(this.getParameter(0)) and
isSynk = false
or
result = this.getACall().getArgument(0) and
not this.sanitizer(this.getParameter(1)) and
isSynk = true
}
/**
* Gets a options parameter that belong to a zlib instance
* and Holds if "maxOutputLength: ANumber" option exists
*/
predicate sanitizer(API::Node zlib) { exists(zlib.getMember("maxOutputLength")) }
}
}
/**
* Provides Models for [pako](https://www.npmjs.com/package/pako) package
*/
module Pako {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
DecompressionBomb() {
this = API::moduleImport("pako").getMember(["inflate", "inflateRaw", "ungzip"])
}
override DataFlow::Node sink() { result = this.getParameter(0).asSink() }
}
/**
* The decompression Additional Taint Steps
*/
class DecompressionAdditionalSteps extends DecompressionBomb::AdditionalTaintStep {
DecompressionAdditionalSteps() { this = "AdditionalTaintStep" }
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
// succ = new Uint8Array(pred)
exists(DataFlow::Node n, NewExpr ne | ne = n.asExpr() |
pred.asExpr() = ne.getArgument(0) and
succ.asExpr() = ne and
ne.getCalleeName() = "Uint8Array"
)
}
}
}
/**
* Provides Models for [adm-zip](https://www.npmjs.com/package/adm-zip) package
*/
module AdmZip {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
DecompressionBomb() { this = API::moduleImport("adm-zip").getInstance() }
override DataFlow::Node sink() {
result =
this.getMember(["extractAllTo", "extractEntryTo", "readAsText"]).getReturn().asSource()
or
result = this.getASuccessor*().getMember("getData").getReturn().asSource()
}
}
/**
* The decompression Additional Taint Steps
*/
class DecompressionAdditionalSteps extends DecompressionBomb::AdditionalTaintStep {
DecompressionAdditionalSteps() { this = "AdditionalTaintStep" }
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node n | n = API::moduleImport("adm-zip") |
pred = n.getParameter(0).asSink() and
(
succ =
n.getInstance()
.getMember(["extractAllTo", "extractEntryTo", "readAsText"])
.getReturn()
.asSource()
or
succ =
n.getInstance()
.getMember("getEntries")
.getASuccessor*()
.getMember("getData")
.getReturn()
.asSource()
)
)
}
}
}
/**
* Provides Models for [decompress](https://www.npmjs.com/package/decompress) package
*/
module Decompress {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
DecompressionBomb() { this = API::moduleImport("decompress") }
override DataFlow::Node sink() { result = this.getACall().getArgument(0) }
}
}
/**
* Provides Models for [gunzip-maybe][https://www.npmjs.com/package/gunzip-maybe] package
*/
module GunzipMaybe {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
DecompressionBomb() { this = API::moduleImport("gunzip-maybe") }
override DataFlow::Node sink() { result = this.getACall() }
}
}
/**
* Provides Models for [unbzip2-stream](https://www.npmjs.com/package/unbzip2-stream) package
*/
module Unbzip2Stream {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
DecompressionBomb() { this = API::moduleImport("unbzip2-stream") }
override DataFlow::Node sink() { result = this.getACall() }
}
}
/**
* Provides Models for [unzipper](https://www.npmjs.com/package/unzipper) package
*/
module Unzipper {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
string funcName;
DecompressionBomb() {
this = API::moduleImport("unzipper").getMember(["Extract", "Parse", "ParseOne"]) and
funcName = ["Extract", "Parse", "ParseOne"]
or
this = API::moduleImport("unzipper").getMember("Open") and
// open has some functions which will be specified in sink predicate
funcName = "Open"
}
override DataFlow::Node sink() {
result = this.getMember(["buffer", "file", "url", "file"]).getACall().getArgument(0) and
funcName = "Open"
or
result = this.getACall() and
funcName = ["Extract", "Parse", "ParseOne"]
}
/**
* Gets a
* and Holds if unzipper instance has a member `uncompressedSize`
*
* it is really difficult to implement this sanitizer,
* so i'm going to check if there is a member like `vars.uncompressedSize` in whole DB or not!
*/
predicate sanitizer() {
exists(this.getASuccessor*().getMember("vars").getMember("uncompressedSize")) and
funcName = ["Extract", "Parse", "ParseOne"]
}
}
}
/**
* Provides Models for [yauzl](https://www.npmjs.com/package/yauzl) package
*/
module Yauzl {
API::Node test() { result = API::moduleImport("yauzl").getASuccessor*() }
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
// open function has a sanitizer
string methodName;
DecompressionBomb() {
this =
API::moduleImport("yauzl").getMember(["fromFd", "fromBuffer", "fromRandomAccessReader"]) and
methodName = "from"
or
this = API::moduleImport("yauzl").getMember("open") and
methodName = "open"
}
override DataFlow::Node sink() {
(
result = this.getParameter(2).getParameter(1).getMember("readEntry").getACall() or
result =
this.getParameter(2)
.getParameter(1)
.getMember("openReadStream")
.getParameter(1)
.getParameter(1)
.asSource()
) and
not this.sanitizer() and
methodName = "open"
or
result = this.getParameter(0).asSink() and
methodName = "from"
}
/**
* Gets a
* and Holds if yauzl `open` instance has a member `uncompressedSize`
*/
predicate sanitizer() {
exists(this.getASuccessor*().getMember("uncompressedSize")) and
methodName = ["readStream", "open"]
}
}
/**
* The decompression Additional Taint Steps
*/
class DecompressionAdditionalSteps extends DecompressionBomb::AdditionalTaintStep {
DecompressionAdditionalSteps() { this = "AdditionalTaintStep" }
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node open | open = API::moduleImport("yauzl").getMember("open") |
pred = open.getParameter(0).asSink() and
(
succ = open.getParameter(2).getParameter(1).getMember("readEntry").getACall() or
succ =
open.getParameter(2)
.getParameter(1)
.getMember("openReadStream")
.getParameter(1)
.getParameter(1)
.asSource()
)
)
}
}
}

View File

@@ -0,0 +1,11 @@
const jszipp = require("jszip");
function zipBombSafe(zipFile) {
jszipp.loadAsync(zipFile.data).then(function (zip) {
if (zip.file("10GB")["_data"]["uncompressedSize"] > 1024 * 1024 * 8) {
console.log("error")
}
zip.file("10GB").async("uint8array").then(function (u8) {
console.log(u8);
});
});
}

View File

@@ -0,0 +1,8 @@
const tar = require("tar");
tar.x({
file: tarFileName,
strip: 1,
C: 'some-dir',
maxReadSize: 16 * 1024 * 1024 // 16 MB
})

View File

@@ -0,0 +1,11 @@
const zlib = require("zlib");
zlib.gunzip(
inputZipFile.data,
{ maxOutputLength: 1024 * 1024 * 5 },
(err, buffer) => {
doSomeThingWithData(buffer);
});
zlib.gunzipSync(inputZipFile.data, { maxOutputLength: 1024 * 1024 * 5 });
inputZipFile.pipe(zlib.createGunzip({ maxOutputLength: 1024 * 1024 * 5 })).pipe(outputFile);

View File

@@ -0,0 +1,179 @@
/**
* Provides classes for modeling the server-side form/file parsing libraries.
*/
import javascript
import experimental.semmle.javascript.ReadableStream
/**
* A module for modeling [busboy](https://www.npmjs.com/package/busboy) package
*/
module BusBoy {
/**
* A source of remote flow from the `Busboy` library.
*/
private class BusBoyRemoteFlow extends RemoteFlowSource {
BusBoyRemoteFlow() {
exists(API::Node busboyOnEvent |
busboyOnEvent = API::moduleImport("busboy").getReturn().getMember("on")
|
// Files
busboyOnEvent.getParameter(0).asSink().mayHaveStringValue("file") and
// second param of 'file' event is a Readable stream
this = readableStreamDataNode(busboyOnEvent.getParameter(1).getParameter(1))
or
// Fields
busboyOnEvent.getParameter(0).asSink().mayHaveStringValue(["file", "field"]) and
this =
API::moduleImport("busboy")
.getReturn()
.getMember("on")
.getParameter(1)
.getAParameter()
.asSource()
)
}
override string getSourceType() { result = "parsed user value from Busbuy" }
}
/**
* A busboy file data step according to a Readable Stream type
*/
private class AdditionalTaintStep extends TaintTracking::SharedTaintStep {
override predicate step(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node busboyOnEvent |
busboyOnEvent = API::moduleImport("busboy").getReturn().getMember("on")
|
busboyOnEvent.getParameter(0).asSink().mayHaveStringValue("file") and
customStreamPipeAdditionalTaintStep(busboyOnEvent.getParameter(1).getParameter(1), pred,
succ)
)
}
}
}
/**
* A module for modeling [formidable](https://www.npmjs.com/package/formidable) package
*/
module Formidable {
/**
* A source of remote flow from the `Formidable` library parsing a HTTP request.
*/
private class FormidableRemoteFlow extends RemoteFlowSource {
FormidableRemoteFlow() {
exists(API::Node formidable |
formidable = API::moduleImport("formidable").getReturn()
or
formidable = API::moduleImport("formidable").getMember("formidable").getReturn()
or
formidable =
API::moduleImport("formidable").getMember(["IncomingForm", "Formidable"]).getInstance()
|
this =
formidable.getMember("parse").getACall().getABoundCallbackParameter(1, any(int i | i > 0))
or
// if callback is not provide a promise will be returned,
// return values contains [fields,files] members
exists(API::Node parseMethod |
parseMethod = formidable.getMember("parse") and parseMethod.getNumParameter() = 1
|
this = parseMethod.getReturn().asSource()
)
or
// event handler
this = formidable.getMember("on").getParameter(1).getAParameter().asSource()
)
}
override string getSourceType() { result = "parsed user value from Formidable" }
}
}
/**
* A module for modeling [multiparty](https://www.npmjs.com/package/multiparty) package
*/
module Multiparty {
/**
* A source of remote flow from the `Multiparty` library.
*/
private class MultipartyRemoteFlow extends RemoteFlowSource {
MultipartyRemoteFlow() {
exists(API::Node form |
form = API::moduleImport("multiparty").getMember("Form").getInstance()
|
exists(API::CallNode parse | parse = form.getMember("parse").getACall() |
this = parse.getParameter(1).getParameter([1, 2]).asSource()
)
or
exists(API::Node on | on = form.getMember("on") |
(
on.getParameter(0).asSink().mayHaveStringValue(["file", "field"]) and
this = on.getParameter(1).getParameter([0, 1]).asSource()
or
on.getParameter(0).asSink().mayHaveStringValue("part") and
this = readableStreamDataNode(on.getParameter(1).getParameter(0))
)
)
)
}
override string getSourceType() { result = "parsed user value from Multiparty" }
}
/**
* A multiparty part data step according to a Readable Stream type
*/
private class AdditionalTaintStep extends TaintTracking::SharedTaintStep {
override predicate step(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node multipartyOnEvent |
multipartyOnEvent =
API::moduleImport("multiparty").getMember("Form").getInstance().getMember("on")
|
multipartyOnEvent.getParameter(0).asSink().mayHaveStringValue("part") and
customStreamPipeAdditionalTaintStep(multipartyOnEvent.getParameter(1).getParameter(0), pred,
succ)
)
}
}
}
/**
* A module for modeling [dicer](https://www.npmjs.com/package/dicer) package
*/
module Dicer {
/**
* A source of remote flow from the `dicer` library.
*/
private class DicerRemoteFlow extends RemoteFlowSource {
DicerRemoteFlow() {
exists(API::Node dicer | dicer = API::moduleImport("dicer").getInstance() |
exists(API::Node on | on = dicer.getMember("on") |
on.getParameter(0).asSink().mayHaveStringValue("part") and
this = readableStreamDataNode(on.getParameter(1).getParameter(0))
or
exists(API::Node onPart | onPart = on.getParameter(1).getParameter(0).getMember("on") |
onPart.getParameter(0).asSink().mayHaveStringValue("header") and
this = onPart.getParameter(1).getParameter(0).asSource()
)
)
)
}
override string getSourceType() { result = "parsed user value from Dicer" }
}
/**
* A dicer part data step according to a Readable Stream type
*/
private class AdditionalTaintStep extends TaintTracking::SharedTaintStep {
override predicate step(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node onEvent |
onEvent = API::moduleImport("dicer").getInstance().getMember("on")
|
onEvent.getParameter(0).asSink().mayHaveStringValue("part") and
customStreamPipeAdditionalTaintStep(onEvent.getParameter(1).getParameter(0), pred, succ)
)
}
}
}

View File

@@ -0,0 +1,147 @@
/**
* Provides helper predicates to work with any Readable Stream in dataflow queries
*
* main predicate in which you can use by passing a Readable Stream is `customStreamPipeAdditionalTaintStep`
*/
import javascript
/**
* Holds if there is a step between `fs.createReadStream` and `stream.Readable.from` first parameters to all other piped parameters
*
* It can be global additional step too
*/
predicate readablePipeAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node receiver |
receiver =
[
API::moduleImport("fs").getMember("createReadStream"),
API::moduleImport("stream").getMember("Readable").getMember("from")
]
|
customStreamPipeAdditionalTaintStep(receiver, pred, succ)
or
pred = receiver.getParameter(0).asSink() and
succ = receiver.getReturn().asSource()
)
}
/**
* additional taint steps for piped stream from `createReadStream` method of `fs/promises.open`
*
* It can be global additional step too
*/
predicate promisesFileHandlePipeAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node receiver | receiver = nodeJsPromisesFileSystem().getMember("open") |
customStreamPipeAdditionalTaintStep(receiver, pred, succ)
or
pred = receiver.getParameter(0).asSink() and
succ = receiver.getReturn().asSource()
)
}
/**
* Gets nodejs `fs` Promises API
*/
API::Node nodeJsPromisesFileSystem() {
result = [API::moduleImport("fs").getMember("promises"), API::moduleImport("fs/promises")]
}
/**
* Holds if
* or `receiver.pipe(pred).pipe(sth).pipe(succ)`
*
* or `receiver.pipe(sth).pipe(pred).pipe(succ)`
*
* or `receiver.pipe(succ)` and receiver is pred
*
* Receiver is a Readable Stream object
*/
predicate customStreamPipeAdditionalTaintStep(
API::Node receiver, DataFlow::Node pred, DataFlow::Node succ
) {
// following connect the first pipe parameter to the last pipe parameter
exists(API::Node firstPipe | firstPipe = receiver.getMember("pipe") |
pred = firstPipe.getParameter(0).asSink() and
succ = firstPipe.getASuccessor*().getMember("pipe").getParameter(0).asSink()
)
or
// following connect a pipe parameter to the next pipe parameter
exists(API::Node cn | cn = receiver.getASuccessor+() |
pred = cn.getParameter(0).asSink() and
succ = cn.getReturn().getMember("pipe").getParameter(0).asSink()
)
or
// it is a function that its return value is a Readable stream object
pred = receiver.getReturn().asSource() and
succ = receiver.getReturn().getMember("pipe").getParameter(0).asSink()
or
// it is a Readable stream object
pred = receiver.asSource() and
succ = receiver.getMember("pipe").getParameter(0).asSink()
}
/**
* Holds if
*
* ```js
* await pipeline(
* pred,
* succ_or_pred,
* succ
* )
* ```
*
* It can be global additional step too
*/
predicate streamPipelineAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
// this step connect the a pipeline parameter to the next pipeline parameter
exists(API::CallNode cn, int i |
// we assume that there are maximum 10 pipes mostly or maybe less
i in [0 .. 10] and
cn = nodeJsStream().getMember("pipeline").getACall()
|
pred = cn.getParameter(i).asSink() and
succ = cn.getParameter(i + 1).asSink()
)
or
// this step connect the first pipeline parameter to the next parameters
exists(API::CallNode cn, int i |
// we assume that there are maximum 10 pipes mostly or maybe less
i in [1 .. 10] and
cn = nodeJsStream().getMember("pipeline").getACall()
|
pred = cn.getParameter(0).asSink() and
succ = cn.getParameter(i).asSink()
)
}
/**
* Gets `stream` Promises API
*/
API::Node nodeJsStream() {
result = [API::moduleImport("stream/promises"), API::moduleImport("stream").getMember("promises")]
}
/**
* Gets a Readable stream object,
* and returns all nodes responsible for a data read of this Readable stream
*/
DataFlow::Node readableStreamDataNode(API::Node stream) {
result = stream.asSource()
or
// 'data' event
exists(API::CallNode onEvent | onEvent = stream.getMember("on").getACall() |
result = onEvent.getParameter(1).getParameter(0).asSource() and
onEvent.getParameter(0).asSink().mayHaveStringValue("data")
)
or
// 'Readable' event
exists(API::CallNode onEvent | onEvent = stream.getMember("on").getACall() |
(
result = onEvent.getParameter(1).getReceiver().getMember("read").getReturn().asSource() or
result = stream.getMember("read").getReturn().asSource()
) and
onEvent.getParameter(0).asSink().mayHaveStringValue("readable")
)
}

View File

@@ -0,0 +1,234 @@
nodes
| busybus.js:9:30:9:33 | file |
| busybus.js:9:30:9:33 | file |
| busybus.js:9:36:9:39 | info |
| busybus.js:9:36:9:39 | info |
| busybus.js:10:19:10:50 | { filen ... eType } |
| busybus.js:10:19:10:57 | encoding |
| busybus.js:10:19:10:57 | filename |
| busybus.js:10:19:10:57 | mimeType |
| busybus.js:10:21:10:28 | filename |
| busybus.js:10:31:10:38 | encoding |
| busybus.js:10:41:10:48 | mimeType |
| busybus.js:10:54:10:57 | info |
| busybus.js:12:18:12:25 | filename |
| busybus.js:12:18:12:25 | filename |
| busybus.js:12:28:12:35 | encoding |
| busybus.js:12:28:12:35 | encoding |
| busybus.js:12:38:12:45 | mimeType |
| busybus.js:12:38:12:45 | mimeType |
| busybus.js:13:23:13:23 | z |
| busybus.js:13:31:13:36 | sink() |
| busybus.js:13:31:13:36 | sink() |
| busybus.js:15:30:15:33 | data |
| busybus.js:15:30:15:33 | data |
| busybus.js:16:22:16:25 | data |
| busybus.js:16:22:16:25 | data |
| busybus.js:22:25:22:42 | data |
| busybus.js:22:32:22:42 | this.read() |
| busybus.js:22:32:22:42 | this.read() |
| busybus.js:23:26:23:29 | data |
| busybus.js:23:26:23:29 | data |
| busybus.js:27:25:27:28 | name |
| busybus.js:27:25:27:28 | name |
| busybus.js:27:31:27:33 | val |
| busybus.js:27:31:27:33 | val |
| busybus.js:27:36:27:39 | info |
| busybus.js:27:36:27:39 | info |
| busybus.js:28:18:28:21 | name |
| busybus.js:28:18:28:21 | name |
| busybus.js:28:24:28:26 | val |
| busybus.js:28:24:28:26 | val |
| busybus.js:28:29:28:32 | info |
| busybus.js:28:29:28:32 | info |
| dicer.js:12:23:12:26 | part |
| dicer.js:12:23:12:26 | part |
| dicer.js:13:19:13:24 | sink() |
| dicer.js:13:19:13:24 | sink() |
| dicer.js:14:28:14:33 | header |
| dicer.js:14:28:14:33 | header |
| dicer.js:16:22:16:27 | header |
| dicer.js:16:22:16:30 | header[h] |
| dicer.js:16:22:16:30 | header[h] |
| dicer.js:19:26:19:29 | data |
| dicer.js:19:26:19:29 | data |
| dicer.js:20:18:20:21 | data |
| dicer.js:20:18:20:21 | data |
| formidable.js:7:11:7:25 | [fields, files] |
| formidable.js:7:11:7:49 | fields |
| formidable.js:7:11:7:49 | files |
| formidable.js:7:12:7:17 | fields |
| formidable.js:7:20:7:24 | files |
| formidable.js:7:29:7:49 | await f ... se(req) |
| formidable.js:7:35:7:49 | form.parse(req) |
| formidable.js:7:35:7:49 | form.parse(req) |
| formidable.js:8:10:8:15 | fields |
| formidable.js:8:10:8:15 | fields |
| formidable.js:8:18:8:22 | files |
| formidable.js:8:18:8:22 | files |
| formidable.js:9:27:9:34 | formname |
| formidable.js:9:27:9:34 | formname |
| formidable.js:9:37:9:40 | file |
| formidable.js:9:37:9:40 | file |
| formidable.js:10:14:10:21 | formname |
| formidable.js:10:14:10:21 | formname |
| formidable.js:10:24:10:27 | file |
| formidable.js:10:24:10:27 | file |
| formidable.js:12:22:12:29 | formname |
| formidable.js:12:22:12:29 | formname |
| formidable.js:12:32:12:35 | file |
| formidable.js:12:32:12:35 | file |
| formidable.js:13:14:13:21 | formname |
| formidable.js:13:14:13:21 | formname |
| formidable.js:13:24:13:27 | file |
| formidable.js:13:24:13:27 | file |
| formidable.js:15:23:15:31 | fieldName |
| formidable.js:15:23:15:31 | fieldName |
| formidable.js:15:34:15:43 | fieldValue |
| formidable.js:15:34:15:43 | fieldValue |
| formidable.js:16:14:16:22 | fieldName |
| formidable.js:16:14:16:22 | fieldName |
| formidable.js:16:25:16:34 | fieldValue |
| formidable.js:16:25:16:34 | fieldValue |
| multiparty.js:8:22:8:25 | part |
| multiparty.js:8:22:8:25 | part |
| multiparty.js:9:14:9:17 | part |
| multiparty.js:9:14:9:17 | part |
| multiparty.js:10:19:10:24 | sink() |
| multiparty.js:10:19:10:24 | sink() |
| multiparty.js:14:37:14:42 | fields |
| multiparty.js:14:37:14:42 | fields |
| multiparty.js:14:45:14:49 | files |
| multiparty.js:14:45:14:49 | files |
| multiparty.js:15:14:15:19 | fields |
| multiparty.js:15:14:15:19 | fields |
| multiparty.js:15:22:15:26 | files |
| multiparty.js:15:22:15:26 | files |
edges
| busybus.js:9:30:9:33 | file | busybus.js:13:23:13:23 | z |
| busybus.js:9:30:9:33 | file | busybus.js:13:23:13:23 | z |
| busybus.js:9:36:9:39 | info | busybus.js:10:54:10:57 | info |
| busybus.js:9:36:9:39 | info | busybus.js:10:54:10:57 | info |
| busybus.js:10:19:10:50 | { filen ... eType } | busybus.js:10:21:10:28 | filename |
| busybus.js:10:19:10:50 | { filen ... eType } | busybus.js:10:31:10:38 | encoding |
| busybus.js:10:19:10:50 | { filen ... eType } | busybus.js:10:41:10:48 | mimeType |
| busybus.js:10:19:10:57 | encoding | busybus.js:12:28:12:35 | encoding |
| busybus.js:10:19:10:57 | encoding | busybus.js:12:28:12:35 | encoding |
| busybus.js:10:19:10:57 | filename | busybus.js:12:18:12:25 | filename |
| busybus.js:10:19:10:57 | filename | busybus.js:12:18:12:25 | filename |
| busybus.js:10:19:10:57 | mimeType | busybus.js:12:38:12:45 | mimeType |
| busybus.js:10:19:10:57 | mimeType | busybus.js:12:38:12:45 | mimeType |
| busybus.js:10:21:10:28 | filename | busybus.js:10:19:10:57 | filename |
| busybus.js:10:31:10:38 | encoding | busybus.js:10:19:10:57 | encoding |
| busybus.js:10:41:10:48 | mimeType | busybus.js:10:19:10:57 | mimeType |
| busybus.js:10:54:10:57 | info | busybus.js:10:19:10:50 | { filen ... eType } |
| busybus.js:13:23:13:23 | z | busybus.js:13:31:13:36 | sink() |
| busybus.js:13:23:13:23 | z | busybus.js:13:31:13:36 | sink() |
| busybus.js:15:30:15:33 | data | busybus.js:16:22:16:25 | data |
| busybus.js:15:30:15:33 | data | busybus.js:16:22:16:25 | data |
| busybus.js:15:30:15:33 | data | busybus.js:16:22:16:25 | data |
| busybus.js:15:30:15:33 | data | busybus.js:16:22:16:25 | data |
| busybus.js:22:25:22:42 | data | busybus.js:23:26:23:29 | data |
| busybus.js:22:25:22:42 | data | busybus.js:23:26:23:29 | data |
| busybus.js:22:32:22:42 | this.read() | busybus.js:22:25:22:42 | data |
| busybus.js:22:32:22:42 | this.read() | busybus.js:22:25:22:42 | data |
| busybus.js:27:25:27:28 | name | busybus.js:28:18:28:21 | name |
| busybus.js:27:25:27:28 | name | busybus.js:28:18:28:21 | name |
| busybus.js:27:25:27:28 | name | busybus.js:28:18:28:21 | name |
| busybus.js:27:25:27:28 | name | busybus.js:28:18:28:21 | name |
| busybus.js:27:31:27:33 | val | busybus.js:28:24:28:26 | val |
| busybus.js:27:31:27:33 | val | busybus.js:28:24:28:26 | val |
| busybus.js:27:31:27:33 | val | busybus.js:28:24:28:26 | val |
| busybus.js:27:31:27:33 | val | busybus.js:28:24:28:26 | val |
| busybus.js:27:36:27:39 | info | busybus.js:28:29:28:32 | info |
| busybus.js:27:36:27:39 | info | busybus.js:28:29:28:32 | info |
| busybus.js:27:36:27:39 | info | busybus.js:28:29:28:32 | info |
| busybus.js:27:36:27:39 | info | busybus.js:28:29:28:32 | info |
| dicer.js:12:23:12:26 | part | dicer.js:13:19:13:24 | sink() |
| dicer.js:12:23:12:26 | part | dicer.js:13:19:13:24 | sink() |
| dicer.js:12:23:12:26 | part | dicer.js:13:19:13:24 | sink() |
| dicer.js:12:23:12:26 | part | dicer.js:13:19:13:24 | sink() |
| dicer.js:14:28:14:33 | header | dicer.js:16:22:16:27 | header |
| dicer.js:14:28:14:33 | header | dicer.js:16:22:16:27 | header |
| dicer.js:16:22:16:27 | header | dicer.js:16:22:16:30 | header[h] |
| dicer.js:16:22:16:27 | header | dicer.js:16:22:16:30 | header[h] |
| dicer.js:19:26:19:29 | data | dicer.js:20:18:20:21 | data |
| dicer.js:19:26:19:29 | data | dicer.js:20:18:20:21 | data |
| dicer.js:19:26:19:29 | data | dicer.js:20:18:20:21 | data |
| dicer.js:19:26:19:29 | data | dicer.js:20:18:20:21 | data |
| formidable.js:7:11:7:25 | [fields, files] | formidable.js:7:12:7:17 | fields |
| formidable.js:7:11:7:25 | [fields, files] | formidable.js:7:20:7:24 | files |
| formidable.js:7:11:7:49 | fields | formidable.js:8:10:8:15 | fields |
| formidable.js:7:11:7:49 | fields | formidable.js:8:10:8:15 | fields |
| formidable.js:7:11:7:49 | files | formidable.js:8:18:8:22 | files |
| formidable.js:7:11:7:49 | files | formidable.js:8:18:8:22 | files |
| formidable.js:7:12:7:17 | fields | formidable.js:7:11:7:49 | fields |
| formidable.js:7:20:7:24 | files | formidable.js:7:11:7:49 | files |
| formidable.js:7:29:7:49 | await f ... se(req) | formidable.js:7:11:7:25 | [fields, files] |
| formidable.js:7:35:7:49 | form.parse(req) | formidable.js:7:29:7:49 | await f ... se(req) |
| formidable.js:7:35:7:49 | form.parse(req) | formidable.js:7:29:7:49 | await f ... se(req) |
| formidable.js:9:27:9:34 | formname | formidable.js:10:14:10:21 | formname |
| formidable.js:9:27:9:34 | formname | formidable.js:10:14:10:21 | formname |
| formidable.js:9:27:9:34 | formname | formidable.js:10:14:10:21 | formname |
| formidable.js:9:27:9:34 | formname | formidable.js:10:14:10:21 | formname |
| formidable.js:9:37:9:40 | file | formidable.js:10:24:10:27 | file |
| formidable.js:9:37:9:40 | file | formidable.js:10:24:10:27 | file |
| formidable.js:9:37:9:40 | file | formidable.js:10:24:10:27 | file |
| formidable.js:9:37:9:40 | file | formidable.js:10:24:10:27 | file |
| formidable.js:12:22:12:29 | formname | formidable.js:13:14:13:21 | formname |
| formidable.js:12:22:12:29 | formname | formidable.js:13:14:13:21 | formname |
| formidable.js:12:22:12:29 | formname | formidable.js:13:14:13:21 | formname |
| formidable.js:12:22:12:29 | formname | formidable.js:13:14:13:21 | formname |
| formidable.js:12:32:12:35 | file | formidable.js:13:24:13:27 | file |
| formidable.js:12:32:12:35 | file | formidable.js:13:24:13:27 | file |
| formidable.js:12:32:12:35 | file | formidable.js:13:24:13:27 | file |
| formidable.js:12:32:12:35 | file | formidable.js:13:24:13:27 | file |
| formidable.js:15:23:15:31 | fieldName | formidable.js:16:14:16:22 | fieldName |
| formidable.js:15:23:15:31 | fieldName | formidable.js:16:14:16:22 | fieldName |
| formidable.js:15:23:15:31 | fieldName | formidable.js:16:14:16:22 | fieldName |
| formidable.js:15:23:15:31 | fieldName | formidable.js:16:14:16:22 | fieldName |
| formidable.js:15:34:15:43 | fieldValue | formidable.js:16:25:16:34 | fieldValue |
| formidable.js:15:34:15:43 | fieldValue | formidable.js:16:25:16:34 | fieldValue |
| formidable.js:15:34:15:43 | fieldValue | formidable.js:16:25:16:34 | fieldValue |
| formidable.js:15:34:15:43 | fieldValue | formidable.js:16:25:16:34 | fieldValue |
| multiparty.js:8:22:8:25 | part | multiparty.js:9:14:9:17 | part |
| multiparty.js:8:22:8:25 | part | multiparty.js:9:14:9:17 | part |
| multiparty.js:8:22:8:25 | part | multiparty.js:9:14:9:17 | part |
| multiparty.js:8:22:8:25 | part | multiparty.js:9:14:9:17 | part |
| multiparty.js:8:22:8:25 | part | multiparty.js:10:19:10:24 | sink() |
| multiparty.js:8:22:8:25 | part | multiparty.js:10:19:10:24 | sink() |
| multiparty.js:8:22:8:25 | part | multiparty.js:10:19:10:24 | sink() |
| multiparty.js:8:22:8:25 | part | multiparty.js:10:19:10:24 | sink() |
| multiparty.js:14:37:14:42 | fields | multiparty.js:15:14:15:19 | fields |
| multiparty.js:14:37:14:42 | fields | multiparty.js:15:14:15:19 | fields |
| multiparty.js:14:37:14:42 | fields | multiparty.js:15:14:15:19 | fields |
| multiparty.js:14:37:14:42 | fields | multiparty.js:15:14:15:19 | fields |
| multiparty.js:14:45:14:49 | files | multiparty.js:15:22:15:26 | files |
| multiparty.js:14:45:14:49 | files | multiparty.js:15:22:15:26 | files |
| multiparty.js:14:45:14:49 | files | multiparty.js:15:22:15:26 | files |
| multiparty.js:14:45:14:49 | files | multiparty.js:15:22:15:26 | files |
#select
| busybus.js:12:18:12:25 | filename | busybus.js:9:36:9:39 | info | busybus.js:12:18:12:25 | filename | This entity depends on a $@. | busybus.js:9:36:9:39 | info | user-provided value |
| busybus.js:12:28:12:35 | encoding | busybus.js:9:36:9:39 | info | busybus.js:12:28:12:35 | encoding | This entity depends on a $@. | busybus.js:9:36:9:39 | info | user-provided value |
| busybus.js:12:38:12:45 | mimeType | busybus.js:9:36:9:39 | info | busybus.js:12:38:12:45 | mimeType | This entity depends on a $@. | busybus.js:9:36:9:39 | info | user-provided value |
| busybus.js:13:31:13:36 | sink() | busybus.js:9:30:9:33 | file | busybus.js:13:31:13:36 | sink() | This entity depends on a $@. | busybus.js:9:30:9:33 | file | user-provided value |
| busybus.js:16:22:16:25 | data | busybus.js:15:30:15:33 | data | busybus.js:16:22:16:25 | data | This entity depends on a $@. | busybus.js:15:30:15:33 | data | user-provided value |
| busybus.js:23:26:23:29 | data | busybus.js:22:32:22:42 | this.read() | busybus.js:23:26:23:29 | data | This entity depends on a $@. | busybus.js:22:32:22:42 | this.read() | user-provided value |
| busybus.js:28:18:28:21 | name | busybus.js:27:25:27:28 | name | busybus.js:28:18:28:21 | name | This entity depends on a $@. | busybus.js:27:25:27:28 | name | user-provided value |
| busybus.js:28:24:28:26 | val | busybus.js:27:31:27:33 | val | busybus.js:28:24:28:26 | val | This entity depends on a $@. | busybus.js:27:31:27:33 | val | user-provided value |
| busybus.js:28:29:28:32 | info | busybus.js:27:36:27:39 | info | busybus.js:28:29:28:32 | info | This entity depends on a $@. | busybus.js:27:36:27:39 | info | user-provided value |
| dicer.js:13:19:13:24 | sink() | dicer.js:12:23:12:26 | part | dicer.js:13:19:13:24 | sink() | This entity depends on a $@. | dicer.js:12:23:12:26 | part | user-provided value |
| dicer.js:16:22:16:30 | header[h] | dicer.js:14:28:14:33 | header | dicer.js:16:22:16:30 | header[h] | This entity depends on a $@. | dicer.js:14:28:14:33 | header | user-provided value |
| dicer.js:20:18:20:21 | data | dicer.js:19:26:19:29 | data | dicer.js:20:18:20:21 | data | This entity depends on a $@. | dicer.js:19:26:19:29 | data | user-provided value |
| formidable.js:8:10:8:15 | fields | formidable.js:7:35:7:49 | form.parse(req) | formidable.js:8:10:8:15 | fields | This entity depends on a $@. | formidable.js:7:35:7:49 | form.parse(req) | user-provided value |
| formidable.js:8:18:8:22 | files | formidable.js:7:35:7:49 | form.parse(req) | formidable.js:8:18:8:22 | files | This entity depends on a $@. | formidable.js:7:35:7:49 | form.parse(req) | user-provided value |
| formidable.js:10:14:10:21 | formname | formidable.js:9:27:9:34 | formname | formidable.js:10:14:10:21 | formname | This entity depends on a $@. | formidable.js:9:27:9:34 | formname | user-provided value |
| formidable.js:10:24:10:27 | file | formidable.js:9:37:9:40 | file | formidable.js:10:24:10:27 | file | This entity depends on a $@. | formidable.js:9:37:9:40 | file | user-provided value |
| formidable.js:13:14:13:21 | formname | formidable.js:12:22:12:29 | formname | formidable.js:13:14:13:21 | formname | This entity depends on a $@. | formidable.js:12:22:12:29 | formname | user-provided value |
| formidable.js:13:24:13:27 | file | formidable.js:12:32:12:35 | file | formidable.js:13:24:13:27 | file | This entity depends on a $@. | formidable.js:12:32:12:35 | file | user-provided value |
| formidable.js:16:14:16:22 | fieldName | formidable.js:15:23:15:31 | fieldName | formidable.js:16:14:16:22 | fieldName | This entity depends on a $@. | formidable.js:15:23:15:31 | fieldName | user-provided value |
| formidable.js:16:25:16:34 | fieldValue | formidable.js:15:34:15:43 | fieldValue | formidable.js:16:25:16:34 | fieldValue | This entity depends on a $@. | formidable.js:15:34:15:43 | fieldValue | user-provided value |
| multiparty.js:9:14:9:17 | part | multiparty.js:8:22:8:25 | part | multiparty.js:9:14:9:17 | part | This entity depends on a $@. | multiparty.js:8:22:8:25 | part | user-provided value |
| multiparty.js:10:19:10:24 | sink() | multiparty.js:8:22:8:25 | part | multiparty.js:10:19:10:24 | sink() | This entity depends on a $@. | multiparty.js:8:22:8:25 | part | user-provided value |
| multiparty.js:15:14:15:19 | fields | multiparty.js:14:37:14:42 | fields | multiparty.js:15:14:15:19 | fields | This entity depends on a $@. | multiparty.js:14:37:14:42 | fields | user-provided value |
| multiparty.js:15:22:15:26 | files | multiparty.js:14:45:14:49 | files | multiparty.js:15:22:15:26 | files | This entity depends on a $@. | multiparty.js:14:45:14:49 | files | user-provided value |

View File

@@ -0,0 +1,34 @@
/**
* @name Remote Form Flow Sources
* @description Using remote user controlled sources from Forms
* @kind path-problem
* @problem.severity error
* @security-severity 5
* @precision high
* @id js/remote-flow-source
* @tags correctness
* security
*/
import javascript
import DataFlow::PathGraph
import experimental.semmle.javascript.FormParsers
/**
* A taint-tracking configuration for test
*/
class Configuration extends TaintTracking::Configuration {
Configuration() { this = "RemoteFlowSourcesOUserForm" }
override predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
override predicate isSink(DataFlow::Node sink) {
sink = API::moduleImport("sink").getAParameter().asSink() or
sink = API::moduleImport("sink").getReturn().asSource()
}
}
from Configuration cfg, DataFlow::PathNode source, DataFlow::PathNode sink
where cfg.hasFlowPath(source, sink)
select sink.getNode(), source, sink, "This entity depends on a $@.", source.getNode(),
"user-provided value"

View File

@@ -0,0 +1,33 @@
const http = require('http');
const zlib = require('node:zlib');
const busboy = require('busboy');
const sink = require('sink');
http.createServer((req, res) => {
if (req.method === 'POST') {
const bb = busboy({ headers: req.headers });
bb.on('file', (name, file, info) => {
const { filename, encoding, mimeType } = info;
const z = zlib.createGzip();
sink(filename, encoding, mimeType) // sink
file.pipe(z).pipe(sink())
file.on('data', (data) => {
sink(data)
})
file.on('readable', function () {
// There is some data to read now.
let data;
while ((data = this.read()) !== null) {
sink(data)
}
});
});
bb.on('field', (name, val, info) => {
sink(name, val, info)
});
}
}).listen(8000, () => {
console.log('Listening for requests');
});

View File

@@ -0,0 +1,25 @@
const { inspect } = require('util');
const http = require('http');
const Dicer = require('dicer');
const sink = require('sink');
const PORT = 8080;
http.createServer((req, res) => {
let m;
const dicer = new Dicer({ boundary: m[1] || m[2] });
dicer.on('part', (part) => {
part.pipe(sink())
part.on('header', (header) => {
for (h in header) {
sink(header[h])
}
});
part.on('data', (data) => {
sink(data)
});
});
}).listen(PORT, () => {
console.log(`Listening for requests on port ${PORT}`);
});

View File

@@ -0,0 +1,22 @@
import http from 'node:http';
import formidable from 'formidable';
const sink = require('sink');
const server = http.createServer(async (req, res) => {
const form = formidable({});
const [fields, files] = await form.parse(req);
sink(fields, files)
form.on('fileBegin', (formname, file) => {
sink(formname, file)
});
form.on('file', (formname, file) => {
sink(formname, file)
});
form.on('field', (fieldName, fieldValue) => {
sink(fieldName, fieldValue)
});
});
server.listen(8080, () => {
console.log('Server listening on http://localhost:8080/ ...');
});

View File

@@ -0,0 +1,19 @@
var multiparty = require('multiparty');
var http = require('http');
var util = require('util');
const sink = require('sink');
http.createServer(function (req, res) {
var form = new multiparty.Form();
form.on('part', (part) => {
sink(part)
part.pipe(sink())
});
var form2 = new multiparty.Form();
form2.parse(req, function (err, fields, files) {
sink(fields, files)
});
form2.parse(req);
}).listen(8080);

View File

@@ -0,0 +1,391 @@
nodes
| adm-zip.js:13:13:13:21 | req.files |
| adm-zip.js:13:13:13:21 | req.files |
| adm-zip.js:13:13:13:33 | req.fil ... ombFile |
| adm-zip.js:17:18:17:24 | tarFile |
| adm-zip.js:24:22:24:28 | tarFile |
| adm-zip.js:24:22:24:33 | tarFile.data |
| adm-zip.js:28:25:28:42 | zipEntry.getData() |
| adm-zip.js:28:25:28:42 | zipEntry.getData() |
| adm-zip.js:32:17:32:41 | admZip. ... "10GB") |
| adm-zip.js:32:17:32:41 | admZip. ... "10GB") |
| adm-zip.js:34:5:34:55 | admZip. ... , true) |
| adm-zip.js:34:5:34:55 | admZip. ... , true) |
| adm-zip.js:36:5:36:38 | admZip. ... , true) |
| adm-zip.js:36:5:36:38 | admZip. ... , true) |
| decompress.js:11:16:11:33 | req.query.filePath |
| decompress.js:11:16:11:33 | req.query.filePath |
| decompress.js:11:16:11:33 | req.query.filePath |
| jszip.js:12:13:12:21 | req.files |
| jszip.js:12:13:12:21 | req.files |
| jszip.js:12:13:12:33 | req.fil ... ombFile |
| jszip.js:12:13:12:38 | req.fil ... le.data |
| jszip.js:32:18:32:24 | zipFile |
| jszip.js:33:22:33:28 | zipFile |
| jszip.js:33:22:33:33 | zipFile.data |
| jszip.js:33:22:33:33 | zipFile.data |
| node-tar.js:15:13:15:21 | req.files |
| node-tar.js:15:13:15:21 | req.files |
| node-tar.js:15:13:15:33 | req.fil ... ombFile |
| node-tar.js:15:13:15:38 | req.fil ... le.data |
| node-tar.js:19:18:19:24 | tarFile |
| node-tar.js:21:23:21:49 | Readabl ... e.data) |
| node-tar.js:21:37:21:43 | tarFile |
| node-tar.js:21:37:21:48 | tarFile.data |
| node-tar.js:24:9:24:15 | tar.x() |
| node-tar.js:24:9:24:15 | tar.x() |
| node-tar.js:29:5:29:37 | fs.crea ... e.name) |
| node-tar.js:29:25:29:31 | tarFile |
| node-tar.js:29:25:29:36 | tarFile.name |
| node-tar.js:30:9:33:10 | tar.x({ ... }) |
| node-tar.js:30:9:33:10 | tar.x({ ... }) |
| node-tar.js:45:5:45:37 | fs.crea ... e.name) |
| node-tar.js:45:25:45:31 | tarFile |
| node-tar.js:45:25:45:36 | tarFile.name |
| node-tar.js:46:9:46:20 | decompressor |
| node-tar.js:48:9:50:10 | tar.x({ ... }) |
| node-tar.js:48:9:50:10 | tar.x({ ... }) |
| node-tar.js:58:19:58:25 | tarFile |
| node-tar.js:58:19:58:30 | tarFile.name |
| node-tar.js:58:19:58:30 | tarFile.name |
| node-tar.js:59:25:59:31 | tarFile |
| node-tar.js:59:25:59:36 | tarFile.name |
| node-tar.js:59:25:59:36 | tarFile.name |
| pako.js:12:14:12:22 | req.files |
| pako.js:12:14:12:22 | req.files |
| pako.js:12:14:12:34 | req.fil ... ombFile |
| pako.js:12:14:12:39 | req.fil ... le.data |
| pako.js:13:14:13:22 | req.files |
| pako.js:13:14:13:22 | req.files |
| pako.js:13:14:13:34 | req.fil ... ombFile |
| pako.js:13:14:13:39 | req.fil ... le.data |
| pako.js:17:19:17:25 | zipFile |
| pako.js:18:11:18:68 | myArray |
| pako.js:18:21:18:68 | Buffer. ... uffer)) |
| pako.js:18:33:18:67 | new Uin ... buffer) |
| pako.js:18:48:18:54 | zipFile |
| pako.js:18:48:18:59 | zipFile.data |
| pako.js:18:48:18:66 | zipFile.data.buffer |
| pako.js:21:31:21:37 | myArray |
| pako.js:21:31:21:37 | myArray |
| pako.js:28:19:28:25 | zipFile |
| pako.js:29:11:29:62 | myArray |
| pako.js:29:21:29:55 | new Uin ... buffer) |
| pako.js:29:21:29:62 | new Uin ... .buffer |
| pako.js:29:36:29:42 | zipFile |
| pako.js:29:36:29:47 | zipFile.data |
| pako.js:29:36:29:54 | zipFile.data.buffer |
| pako.js:32:31:32:37 | myArray |
| pako.js:32:31:32:37 | myArray |
| unbzip2.js:12:5:12:43 | fs.crea ... lePath) |
| unbzip2.js:12:25:12:42 | req.query.FilePath |
| unbzip2.js:12:25:12:42 | req.query.FilePath |
| unbzip2.js:12:50:12:54 | bz2() |
| unbzip2.js:12:50:12:54 | bz2() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) |
| unzipper.js:13:40:13:48 | req.files |
| unzipper.js:13:40:13:48 | req.files |
| unzipper.js:13:40:13:56 | req.files.ZipFile |
| unzipper.js:13:40:13:61 | req.fil ... le.data |
| unzipper.js:16:23:16:63 | unzippe ... ath' }) |
| unzipper.js:16:23:16:63 | unzippe ... ath' }) |
| unzipper.js:19:23:19:41 | unzipper.ParseOne() |
| unzipper.js:19:23:19:41 | unzipper.ParseOne() |
| unzipper.js:24:15:24:30 | unzipper.Parse() |
| unzipper.js:24:15:24:30 | unzipper.Parse() |
| unzipper.js:34:15:34:30 | unzipper.Parse() |
| unzipper.js:34:15:34:30 | unzipper.Parse() |
| unzipper.js:41:35:41:71 | unzippe ... true }) |
| unzipper.js:41:35:41:71 | unzippe ... true }) |
| unzipper.js:51:36:51:72 | unzippe ... true }) |
| unzipper.js:51:36:51:72 | unzippe ... true }) |
| unzipper.js:60:23:60:38 | unzipper.Parse() |
| unzipper.js:60:23:60:38 | unzipper.Parse() |
| unzipper.js:73:23:73:38 | unzipper.Parse() |
| unzipper.js:73:23:73:38 | unzipper.Parse() |
| yauzl.js:12:18:12:26 | req.files |
| yauzl.js:12:18:12:26 | req.files |
| yauzl.js:12:18:12:34 | req.files.zipFile |
| yauzl.js:12:18:12:39 | req.fil ... le.data |
| yauzl.js:12:18:12:39 | req.fil ... le.data |
| yauzl.js:13:22:13:30 | req.files |
| yauzl.js:13:22:13:30 | req.files |
| yauzl.js:13:22:13:38 | req.files.zipFile |
| yauzl.js:13:22:13:43 | req.fil ... le.data |
| yauzl.js:13:22:13:43 | req.fil ... le.data |
| yauzl.js:14:34:14:42 | req.files |
| yauzl.js:14:34:14:42 | req.files |
| yauzl.js:14:34:14:50 | req.files.zipFile |
| yauzl.js:14:34:14:55 | req.fil ... le.data |
| yauzl.js:14:34:14:55 | req.fil ... le.data |
| yauzl.js:37:16:37:33 | req.query.filePath |
| yauzl.js:37:16:37:33 | req.query.filePath |
| yauzl.js:39:9:39:27 | zipfile.readEntry() |
| yauzl.js:39:9:39:27 | zipfile.readEntry() |
| yauzl.js:41:64:41:73 | readStream |
| yauzl.js:41:64:41:73 | readStream |
| yauzl.js:43:21:43:39 | zipfile.readEntry() |
| yauzl.js:43:21:43:39 | zipfile.readEntry() |
| zlib.js:15:19:15:27 | req.files |
| zlib.js:15:19:15:27 | req.files |
| zlib.js:15:19:15:39 | req.fil ... ombFile |
| zlib.js:15:19:15:44 | req.fil ... le.data |
| zlib.js:17:18:17:26 | req.files |
| zlib.js:17:18:17:26 | req.files |
| zlib.js:17:18:17:38 | req.fil ... ombFile |
| zlib.js:17:18:17:43 | req.fil ... le.data |
| zlib.js:19:24:19:32 | req.files |
| zlib.js:19:24:19:32 | req.files |
| zlib.js:19:24:19:44 | req.fil ... ombFile |
| zlib.js:19:24:19:49 | req.fil ... le.data |
| zlib.js:21:32:21:40 | req.files |
| zlib.js:21:32:21:40 | req.files |
| zlib.js:21:32:21:52 | req.fil ... ombFile |
| zlib.js:21:32:21:57 | req.fil ... le.data |
| zlib.js:27:24:27:30 | zipFile |
| zlib.js:29:9:29:15 | zipFile |
| zlib.js:29:9:29:20 | zipFile.data |
| zlib.js:29:9:29:20 | zipFile.data |
| zlib.js:33:9:33:15 | zipFile |
| zlib.js:33:9:33:20 | zipFile.data |
| zlib.js:33:9:33:20 | zipFile.data |
| zlib.js:38:9:38:15 | zipFile |
| zlib.js:38:9:38:20 | zipFile.data |
| zlib.js:38:9:38:20 | zipFile.data |
| zlib.js:62:23:62:29 | zipFile |
| zlib.js:63:21:63:27 | zipFile |
| zlib.js:63:21:63:32 | zipFile.data |
| zlib.js:63:21:63:32 | zipFile.data |
| zlib.js:64:20:64:26 | zipFile |
| zlib.js:64:20:64:31 | zipFile.data |
| zlib.js:64:20:64:31 | zipFile.data |
| zlib.js:65:31:65:37 | zipFile |
| zlib.js:65:31:65:42 | zipFile.data |
| zlib.js:65:31:65:42 | zipFile.data |
| zlib.js:74:29:74:35 | zipFile |
| zlib.js:75:25:75:51 | Readabl ... e.data) |
| zlib.js:75:39:75:45 | zipFile |
| zlib.js:75:39:75:50 | zipFile.data |
| zlib.js:77:22:77:40 | zlib.createGunzip() |
| zlib.js:77:22:77:40 | zlib.createGunzip() |
| zlib.js:78:22:78:39 | zlib.createUnzip() |
| zlib.js:78:22:78:39 | zlib.createUnzip() |
| zlib.js:79:22:79:50 | zlib.cr ... press() |
| zlib.js:79:22:79:50 | zlib.cr ... press() |
| zlib.js:82:43:82:49 | zipFile |
| zlib.js:83:11:83:51 | inputStream |
| zlib.js:83:25:83:51 | Readabl ... e.data) |
| zlib.js:83:39:83:45 | zipFile |
| zlib.js:83:39:83:50 | zipFile.data |
| zlib.js:86:9:86:19 | inputStream |
| zlib.js:87:9:87:27 | zlib.createGunzip() |
| zlib.js:87:9:87:27 | zlib.createGunzip() |
edges
| adm-zip.js:13:13:13:21 | req.files | adm-zip.js:13:13:13:33 | req.fil ... ombFile |
| adm-zip.js:13:13:13:21 | req.files | adm-zip.js:13:13:13:33 | req.fil ... ombFile |
| adm-zip.js:13:13:13:33 | req.fil ... ombFile | adm-zip.js:17:18:17:24 | tarFile |
| adm-zip.js:17:18:17:24 | tarFile | adm-zip.js:24:22:24:28 | tarFile |
| adm-zip.js:24:22:24:28 | tarFile | adm-zip.js:24:22:24:33 | tarFile.data |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:28:25:28:42 | zipEntry.getData() |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:28:25:28:42 | zipEntry.getData() |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:32:17:32:41 | admZip. ... "10GB") |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:32:17:32:41 | admZip. ... "10GB") |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:34:5:34:55 | admZip. ... , true) |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:34:5:34:55 | admZip. ... , true) |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:36:5:36:38 | admZip. ... , true) |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:36:5:36:38 | admZip. ... , true) |
| decompress.js:11:16:11:33 | req.query.filePath | decompress.js:11:16:11:33 | req.query.filePath |
| jszip.js:12:13:12:21 | req.files | jszip.js:12:13:12:33 | req.fil ... ombFile |
| jszip.js:12:13:12:21 | req.files | jszip.js:12:13:12:33 | req.fil ... ombFile |
| jszip.js:12:13:12:33 | req.fil ... ombFile | jszip.js:12:13:12:38 | req.fil ... le.data |
| jszip.js:12:13:12:38 | req.fil ... le.data | jszip.js:32:18:32:24 | zipFile |
| jszip.js:32:18:32:24 | zipFile | jszip.js:33:22:33:28 | zipFile |
| jszip.js:33:22:33:28 | zipFile | jszip.js:33:22:33:33 | zipFile.data |
| jszip.js:33:22:33:28 | zipFile | jszip.js:33:22:33:33 | zipFile.data |
| node-tar.js:15:13:15:21 | req.files | node-tar.js:15:13:15:33 | req.fil ... ombFile |
| node-tar.js:15:13:15:21 | req.files | node-tar.js:15:13:15:33 | req.fil ... ombFile |
| node-tar.js:15:13:15:33 | req.fil ... ombFile | node-tar.js:15:13:15:38 | req.fil ... le.data |
| node-tar.js:15:13:15:38 | req.fil ... le.data | node-tar.js:19:18:19:24 | tarFile |
| node-tar.js:19:18:19:24 | tarFile | node-tar.js:21:37:21:43 | tarFile |
| node-tar.js:19:18:19:24 | tarFile | node-tar.js:29:25:29:31 | tarFile |
| node-tar.js:19:18:19:24 | tarFile | node-tar.js:45:25:45:31 | tarFile |
| node-tar.js:19:18:19:24 | tarFile | node-tar.js:58:19:58:25 | tarFile |
| node-tar.js:19:18:19:24 | tarFile | node-tar.js:59:25:59:31 | tarFile |
| node-tar.js:21:23:21:49 | Readabl ... e.data) | node-tar.js:24:9:24:15 | tar.x() |
| node-tar.js:21:23:21:49 | Readabl ... e.data) | node-tar.js:24:9:24:15 | tar.x() |
| node-tar.js:21:37:21:43 | tarFile | node-tar.js:21:37:21:48 | tarFile.data |
| node-tar.js:21:37:21:48 | tarFile.data | node-tar.js:21:23:21:49 | Readabl ... e.data) |
| node-tar.js:29:5:29:37 | fs.crea ... e.name) | node-tar.js:30:9:33:10 | tar.x({ ... }) |
| node-tar.js:29:5:29:37 | fs.crea ... e.name) | node-tar.js:30:9:33:10 | tar.x({ ... }) |
| node-tar.js:29:25:29:31 | tarFile | node-tar.js:29:25:29:36 | tarFile.name |
| node-tar.js:29:25:29:36 | tarFile.name | node-tar.js:29:5:29:37 | fs.crea ... e.name) |
| node-tar.js:45:5:45:37 | fs.crea ... e.name) | node-tar.js:46:9:46:20 | decompressor |
| node-tar.js:45:25:45:31 | tarFile | node-tar.js:45:25:45:36 | tarFile.name |
| node-tar.js:45:25:45:36 | tarFile.name | node-tar.js:45:5:45:37 | fs.crea ... e.name) |
| node-tar.js:46:9:46:20 | decompressor | node-tar.js:48:9:50:10 | tar.x({ ... }) |
| node-tar.js:46:9:46:20 | decompressor | node-tar.js:48:9:50:10 | tar.x({ ... }) |
| node-tar.js:58:19:58:25 | tarFile | node-tar.js:58:19:58:30 | tarFile.name |
| node-tar.js:58:19:58:25 | tarFile | node-tar.js:58:19:58:30 | tarFile.name |
| node-tar.js:59:25:59:31 | tarFile | node-tar.js:59:25:59:36 | tarFile.name |
| node-tar.js:59:25:59:31 | tarFile | node-tar.js:59:25:59:36 | tarFile.name |
| pako.js:12:14:12:22 | req.files | pako.js:12:14:12:34 | req.fil ... ombFile |
| pako.js:12:14:12:22 | req.files | pako.js:12:14:12:34 | req.fil ... ombFile |
| pako.js:12:14:12:34 | req.fil ... ombFile | pako.js:12:14:12:39 | req.fil ... le.data |
| pako.js:12:14:12:39 | req.fil ... le.data | pako.js:17:19:17:25 | zipFile |
| pako.js:13:14:13:22 | req.files | pako.js:13:14:13:34 | req.fil ... ombFile |
| pako.js:13:14:13:22 | req.files | pako.js:13:14:13:34 | req.fil ... ombFile |
| pako.js:13:14:13:34 | req.fil ... ombFile | pako.js:13:14:13:39 | req.fil ... le.data |
| pako.js:13:14:13:39 | req.fil ... le.data | pako.js:28:19:28:25 | zipFile |
| pako.js:17:19:17:25 | zipFile | pako.js:18:48:18:54 | zipFile |
| pako.js:18:11:18:68 | myArray | pako.js:21:31:21:37 | myArray |
| pako.js:18:11:18:68 | myArray | pako.js:21:31:21:37 | myArray |
| pako.js:18:21:18:68 | Buffer. ... uffer)) | pako.js:18:11:18:68 | myArray |
| pako.js:18:33:18:67 | new Uin ... buffer) | pako.js:18:21:18:68 | Buffer. ... uffer)) |
| pako.js:18:48:18:54 | zipFile | pako.js:18:48:18:59 | zipFile.data |
| pako.js:18:48:18:59 | zipFile.data | pako.js:18:48:18:66 | zipFile.data.buffer |
| pako.js:18:48:18:66 | zipFile.data.buffer | pako.js:18:33:18:67 | new Uin ... buffer) |
| pako.js:28:19:28:25 | zipFile | pako.js:29:36:29:42 | zipFile |
| pako.js:29:11:29:62 | myArray | pako.js:32:31:32:37 | myArray |
| pako.js:29:11:29:62 | myArray | pako.js:32:31:32:37 | myArray |
| pako.js:29:21:29:55 | new Uin ... buffer) | pako.js:29:21:29:62 | new Uin ... .buffer |
| pako.js:29:21:29:62 | new Uin ... .buffer | pako.js:29:11:29:62 | myArray |
| pako.js:29:36:29:42 | zipFile | pako.js:29:36:29:47 | zipFile.data |
| pako.js:29:36:29:47 | zipFile.data | pako.js:29:36:29:54 | zipFile.data.buffer |
| pako.js:29:36:29:54 | zipFile.data.buffer | pako.js:29:21:29:55 | new Uin ... buffer) |
| unbzip2.js:12:5:12:43 | fs.crea ... lePath) | unbzip2.js:12:50:12:54 | bz2() |
| unbzip2.js:12:5:12:43 | fs.crea ... lePath) | unbzip2.js:12:50:12:54 | bz2() |
| unbzip2.js:12:25:12:42 | req.query.FilePath | unbzip2.js:12:5:12:43 | fs.crea ... lePath) |
| unbzip2.js:12:25:12:42 | req.query.FilePath | unbzip2.js:12:5:12:43 | fs.crea ... lePath) |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:16:23:16:63 | unzippe ... ath' }) |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:16:23:16:63 | unzippe ... ath' }) |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:19:23:19:41 | unzipper.ParseOne() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:19:23:19:41 | unzipper.ParseOne() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:24:15:24:30 | unzipper.Parse() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:24:15:24:30 | unzipper.Parse() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:34:15:34:30 | unzipper.Parse() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:34:15:34:30 | unzipper.Parse() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:41:35:41:71 | unzippe ... true }) |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:41:35:41:71 | unzippe ... true }) |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:51:36:51:72 | unzippe ... true }) |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:51:36:51:72 | unzippe ... true }) |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:60:23:60:38 | unzipper.Parse() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:60:23:60:38 | unzipper.Parse() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:73:23:73:38 | unzipper.Parse() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:73:23:73:38 | unzipper.Parse() |
| unzipper.js:13:40:13:48 | req.files | unzipper.js:13:40:13:56 | req.files.ZipFile |
| unzipper.js:13:40:13:48 | req.files | unzipper.js:13:40:13:56 | req.files.ZipFile |
| unzipper.js:13:40:13:56 | req.files.ZipFile | unzipper.js:13:40:13:61 | req.fil ... le.data |
| unzipper.js:13:40:13:61 | req.fil ... le.data | unzipper.js:13:26:13:62 | Readabl ... e.data) |
| yauzl.js:12:18:12:26 | req.files | yauzl.js:12:18:12:34 | req.files.zipFile |
| yauzl.js:12:18:12:26 | req.files | yauzl.js:12:18:12:34 | req.files.zipFile |
| yauzl.js:12:18:12:34 | req.files.zipFile | yauzl.js:12:18:12:39 | req.fil ... le.data |
| yauzl.js:12:18:12:34 | req.files.zipFile | yauzl.js:12:18:12:39 | req.fil ... le.data |
| yauzl.js:13:22:13:30 | req.files | yauzl.js:13:22:13:38 | req.files.zipFile |
| yauzl.js:13:22:13:30 | req.files | yauzl.js:13:22:13:38 | req.files.zipFile |
| yauzl.js:13:22:13:38 | req.files.zipFile | yauzl.js:13:22:13:43 | req.fil ... le.data |
| yauzl.js:13:22:13:38 | req.files.zipFile | yauzl.js:13:22:13:43 | req.fil ... le.data |
| yauzl.js:14:34:14:42 | req.files | yauzl.js:14:34:14:50 | req.files.zipFile |
| yauzl.js:14:34:14:42 | req.files | yauzl.js:14:34:14:50 | req.files.zipFile |
| yauzl.js:14:34:14:50 | req.files.zipFile | yauzl.js:14:34:14:55 | req.fil ... le.data |
| yauzl.js:14:34:14:50 | req.files.zipFile | yauzl.js:14:34:14:55 | req.fil ... le.data |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:39:9:39:27 | zipfile.readEntry() |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:39:9:39:27 | zipfile.readEntry() |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:39:9:39:27 | zipfile.readEntry() |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:39:9:39:27 | zipfile.readEntry() |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:41:64:41:73 | readStream |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:41:64:41:73 | readStream |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:41:64:41:73 | readStream |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:41:64:41:73 | readStream |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:43:21:43:39 | zipfile.readEntry() |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:43:21:43:39 | zipfile.readEntry() |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:43:21:43:39 | zipfile.readEntry() |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:43:21:43:39 | zipfile.readEntry() |
| zlib.js:15:19:15:27 | req.files | zlib.js:15:19:15:39 | req.fil ... ombFile |
| zlib.js:15:19:15:27 | req.files | zlib.js:15:19:15:39 | req.fil ... ombFile |
| zlib.js:15:19:15:39 | req.fil ... ombFile | zlib.js:15:19:15:44 | req.fil ... le.data |
| zlib.js:15:19:15:44 | req.fil ... le.data | zlib.js:27:24:27:30 | zipFile |
| zlib.js:17:18:17:26 | req.files | zlib.js:17:18:17:38 | req.fil ... ombFile |
| zlib.js:17:18:17:26 | req.files | zlib.js:17:18:17:38 | req.fil ... ombFile |
| zlib.js:17:18:17:38 | req.fil ... ombFile | zlib.js:17:18:17:43 | req.fil ... le.data |
| zlib.js:17:18:17:43 | req.fil ... le.data | zlib.js:62:23:62:29 | zipFile |
| zlib.js:19:24:19:32 | req.files | zlib.js:19:24:19:44 | req.fil ... ombFile |
| zlib.js:19:24:19:32 | req.files | zlib.js:19:24:19:44 | req.fil ... ombFile |
| zlib.js:19:24:19:44 | req.fil ... ombFile | zlib.js:19:24:19:49 | req.fil ... le.data |
| zlib.js:19:24:19:49 | req.fil ... le.data | zlib.js:74:29:74:35 | zipFile |
| zlib.js:21:32:21:40 | req.files | zlib.js:21:32:21:52 | req.fil ... ombFile |
| zlib.js:21:32:21:40 | req.files | zlib.js:21:32:21:52 | req.fil ... ombFile |
| zlib.js:21:32:21:52 | req.fil ... ombFile | zlib.js:21:32:21:57 | req.fil ... le.data |
| zlib.js:21:32:21:57 | req.fil ... le.data | zlib.js:82:43:82:49 | zipFile |
| zlib.js:27:24:27:30 | zipFile | zlib.js:29:9:29:15 | zipFile |
| zlib.js:27:24:27:30 | zipFile | zlib.js:33:9:33:15 | zipFile |
| zlib.js:27:24:27:30 | zipFile | zlib.js:38:9:38:15 | zipFile |
| zlib.js:29:9:29:15 | zipFile | zlib.js:29:9:29:20 | zipFile.data |
| zlib.js:29:9:29:15 | zipFile | zlib.js:29:9:29:20 | zipFile.data |
| zlib.js:33:9:33:15 | zipFile | zlib.js:33:9:33:20 | zipFile.data |
| zlib.js:33:9:33:15 | zipFile | zlib.js:33:9:33:20 | zipFile.data |
| zlib.js:38:9:38:15 | zipFile | zlib.js:38:9:38:20 | zipFile.data |
| zlib.js:38:9:38:15 | zipFile | zlib.js:38:9:38:20 | zipFile.data |
| zlib.js:62:23:62:29 | zipFile | zlib.js:63:21:63:27 | zipFile |
| zlib.js:62:23:62:29 | zipFile | zlib.js:64:20:64:26 | zipFile |
| zlib.js:62:23:62:29 | zipFile | zlib.js:65:31:65:37 | zipFile |
| zlib.js:63:21:63:27 | zipFile | zlib.js:63:21:63:32 | zipFile.data |
| zlib.js:63:21:63:27 | zipFile | zlib.js:63:21:63:32 | zipFile.data |
| zlib.js:64:20:64:26 | zipFile | zlib.js:64:20:64:31 | zipFile.data |
| zlib.js:64:20:64:26 | zipFile | zlib.js:64:20:64:31 | zipFile.data |
| zlib.js:65:31:65:37 | zipFile | zlib.js:65:31:65:42 | zipFile.data |
| zlib.js:65:31:65:37 | zipFile | zlib.js:65:31:65:42 | zipFile.data |
| zlib.js:74:29:74:35 | zipFile | zlib.js:75:39:75:45 | zipFile |
| zlib.js:75:25:75:51 | Readabl ... e.data) | zlib.js:77:22:77:40 | zlib.createGunzip() |
| zlib.js:75:25:75:51 | Readabl ... e.data) | zlib.js:77:22:77:40 | zlib.createGunzip() |
| zlib.js:75:25:75:51 | Readabl ... e.data) | zlib.js:78:22:78:39 | zlib.createUnzip() |
| zlib.js:75:25:75:51 | Readabl ... e.data) | zlib.js:78:22:78:39 | zlib.createUnzip() |
| zlib.js:75:25:75:51 | Readabl ... e.data) | zlib.js:79:22:79:50 | zlib.cr ... press() |
| zlib.js:75:25:75:51 | Readabl ... e.data) | zlib.js:79:22:79:50 | zlib.cr ... press() |
| zlib.js:75:39:75:45 | zipFile | zlib.js:75:39:75:50 | zipFile.data |
| zlib.js:75:39:75:50 | zipFile.data | zlib.js:75:25:75:51 | Readabl ... e.data) |
| zlib.js:82:43:82:49 | zipFile | zlib.js:83:39:83:45 | zipFile |
| zlib.js:83:11:83:51 | inputStream | zlib.js:86:9:86:19 | inputStream |
| zlib.js:83:25:83:51 | Readabl ... e.data) | zlib.js:83:11:83:51 | inputStream |
| zlib.js:83:39:83:45 | zipFile | zlib.js:83:39:83:50 | zipFile.data |
| zlib.js:83:39:83:50 | zipFile.data | zlib.js:83:25:83:51 | Readabl ... e.data) |
| zlib.js:86:9:86:19 | inputStream | zlib.js:87:9:87:27 | zlib.createGunzip() |
| zlib.js:86:9:86:19 | inputStream | zlib.js:87:9:87:27 | zlib.createGunzip() |
#select
| adm-zip.js:28:25:28:42 | zipEntry.getData() | adm-zip.js:13:13:13:21 | req.files | adm-zip.js:28:25:28:42 | zipEntry.getData() | This Decompression depends on a $@. | adm-zip.js:13:13:13:21 | req.files | potentially untrusted source |
| adm-zip.js:32:17:32:41 | admZip. ... "10GB") | adm-zip.js:13:13:13:21 | req.files | adm-zip.js:32:17:32:41 | admZip. ... "10GB") | This Decompression depends on a $@. | adm-zip.js:13:13:13:21 | req.files | potentially untrusted source |
| adm-zip.js:34:5:34:55 | admZip. ... , true) | adm-zip.js:13:13:13:21 | req.files | adm-zip.js:34:5:34:55 | admZip. ... , true) | This Decompression depends on a $@. | adm-zip.js:13:13:13:21 | req.files | potentially untrusted source |
| adm-zip.js:36:5:36:38 | admZip. ... , true) | adm-zip.js:13:13:13:21 | req.files | adm-zip.js:36:5:36:38 | admZip. ... , true) | This Decompression depends on a $@. | adm-zip.js:13:13:13:21 | req.files | potentially untrusted source |
| decompress.js:11:16:11:33 | req.query.filePath | decompress.js:11:16:11:33 | req.query.filePath | decompress.js:11:16:11:33 | req.query.filePath | This Decompression depends on a $@. | decompress.js:11:16:11:33 | req.query.filePath | potentially untrusted source |
| jszip.js:33:22:33:33 | zipFile.data | jszip.js:12:13:12:21 | req.files | jszip.js:33:22:33:33 | zipFile.data | This Decompression depends on a $@. | jszip.js:12:13:12:21 | req.files | potentially untrusted source |
| node-tar.js:24:9:24:15 | tar.x() | node-tar.js:15:13:15:21 | req.files | node-tar.js:24:9:24:15 | tar.x() | This Decompression depends on a $@. | node-tar.js:15:13:15:21 | req.files | potentially untrusted source |
| node-tar.js:30:9:33:10 | tar.x({ ... }) | node-tar.js:15:13:15:21 | req.files | node-tar.js:30:9:33:10 | tar.x({ ... }) | This Decompression depends on a $@. | node-tar.js:15:13:15:21 | req.files | potentially untrusted source |
| node-tar.js:48:9:50:10 | tar.x({ ... }) | node-tar.js:15:13:15:21 | req.files | node-tar.js:48:9:50:10 | tar.x({ ... }) | This Decompression depends on a $@. | node-tar.js:15:13:15:21 | req.files | potentially untrusted source |
| node-tar.js:58:19:58:30 | tarFile.name | node-tar.js:15:13:15:21 | req.files | node-tar.js:58:19:58:30 | tarFile.name | This Decompression depends on a $@. | node-tar.js:15:13:15:21 | req.files | potentially untrusted source |
| node-tar.js:59:25:59:36 | tarFile.name | node-tar.js:15:13:15:21 | req.files | node-tar.js:59:25:59:36 | tarFile.name | This Decompression depends on a $@. | node-tar.js:15:13:15:21 | req.files | potentially untrusted source |
| pako.js:21:31:21:37 | myArray | pako.js:12:14:12:22 | req.files | pako.js:21:31:21:37 | myArray | This Decompression depends on a $@. | pako.js:12:14:12:22 | req.files | potentially untrusted source |
| pako.js:32:31:32:37 | myArray | pako.js:13:14:13:22 | req.files | pako.js:32:31:32:37 | myArray | This Decompression depends on a $@. | pako.js:13:14:13:22 | req.files | potentially untrusted source |
| unbzip2.js:12:50:12:54 | bz2() | unbzip2.js:12:25:12:42 | req.query.FilePath | unbzip2.js:12:50:12:54 | bz2() | This Decompression depends on a $@. | unbzip2.js:12:25:12:42 | req.query.FilePath | potentially untrusted source |
| unzipper.js:16:23:16:63 | unzippe ... ath' }) | unzipper.js:13:40:13:48 | req.files | unzipper.js:16:23:16:63 | unzippe ... ath' }) | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| unzipper.js:19:23:19:41 | unzipper.ParseOne() | unzipper.js:13:40:13:48 | req.files | unzipper.js:19:23:19:41 | unzipper.ParseOne() | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| unzipper.js:24:15:24:30 | unzipper.Parse() | unzipper.js:13:40:13:48 | req.files | unzipper.js:24:15:24:30 | unzipper.Parse() | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| unzipper.js:34:15:34:30 | unzipper.Parse() | unzipper.js:13:40:13:48 | req.files | unzipper.js:34:15:34:30 | unzipper.Parse() | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| unzipper.js:41:35:41:71 | unzippe ... true }) | unzipper.js:13:40:13:48 | req.files | unzipper.js:41:35:41:71 | unzippe ... true }) | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| unzipper.js:51:36:51:72 | unzippe ... true }) | unzipper.js:13:40:13:48 | req.files | unzipper.js:51:36:51:72 | unzippe ... true }) | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| unzipper.js:60:23:60:38 | unzipper.Parse() | unzipper.js:13:40:13:48 | req.files | unzipper.js:60:23:60:38 | unzipper.Parse() | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| unzipper.js:73:23:73:38 | unzipper.Parse() | unzipper.js:13:40:13:48 | req.files | unzipper.js:73:23:73:38 | unzipper.Parse() | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| yauzl.js:12:18:12:39 | req.fil ... le.data | yauzl.js:12:18:12:26 | req.files | yauzl.js:12:18:12:39 | req.fil ... le.data | This Decompression depends on a $@. | yauzl.js:12:18:12:26 | req.files | potentially untrusted source |
| yauzl.js:13:22:13:43 | req.fil ... le.data | yauzl.js:13:22:13:30 | req.files | yauzl.js:13:22:13:43 | req.fil ... le.data | This Decompression depends on a $@. | yauzl.js:13:22:13:30 | req.files | potentially untrusted source |
| yauzl.js:14:34:14:55 | req.fil ... le.data | yauzl.js:14:34:14:42 | req.files | yauzl.js:14:34:14:55 | req.fil ... le.data | This Decompression depends on a $@. | yauzl.js:14:34:14:42 | req.files | potentially untrusted source |
| yauzl.js:39:9:39:27 | zipfile.readEntry() | yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:39:9:39:27 | zipfile.readEntry() | This Decompression depends on a $@. | yauzl.js:37:16:37:33 | req.query.filePath | potentially untrusted source |
| yauzl.js:41:64:41:73 | readStream | yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:41:64:41:73 | readStream | This Decompression depends on a $@. | yauzl.js:37:16:37:33 | req.query.filePath | potentially untrusted source |
| yauzl.js:43:21:43:39 | zipfile.readEntry() | yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:43:21:43:39 | zipfile.readEntry() | This Decompression depends on a $@. | yauzl.js:37:16:37:33 | req.query.filePath | potentially untrusted source |
| zlib.js:29:9:29:20 | zipFile.data | zlib.js:15:19:15:27 | req.files | zlib.js:29:9:29:20 | zipFile.data | This Decompression depends on a $@. | zlib.js:15:19:15:27 | req.files | potentially untrusted source |
| zlib.js:33:9:33:20 | zipFile.data | zlib.js:15:19:15:27 | req.files | zlib.js:33:9:33:20 | zipFile.data | This Decompression depends on a $@. | zlib.js:15:19:15:27 | req.files | potentially untrusted source |
| zlib.js:38:9:38:20 | zipFile.data | zlib.js:15:19:15:27 | req.files | zlib.js:38:9:38:20 | zipFile.data | This Decompression depends on a $@. | zlib.js:15:19:15:27 | req.files | potentially untrusted source |
| zlib.js:63:21:63:32 | zipFile.data | zlib.js:17:18:17:26 | req.files | zlib.js:63:21:63:32 | zipFile.data | This Decompression depends on a $@. | zlib.js:17:18:17:26 | req.files | potentially untrusted source |
| zlib.js:64:20:64:31 | zipFile.data | zlib.js:17:18:17:26 | req.files | zlib.js:64:20:64:31 | zipFile.data | This Decompression depends on a $@. | zlib.js:17:18:17:26 | req.files | potentially untrusted source |
| zlib.js:65:31:65:42 | zipFile.data | zlib.js:17:18:17:26 | req.files | zlib.js:65:31:65:42 | zipFile.data | This Decompression depends on a $@. | zlib.js:17:18:17:26 | req.files | potentially untrusted source |
| zlib.js:77:22:77:40 | zlib.createGunzip() | zlib.js:19:24:19:32 | req.files | zlib.js:77:22:77:40 | zlib.createGunzip() | This Decompression depends on a $@. | zlib.js:19:24:19:32 | req.files | potentially untrusted source |
| zlib.js:78:22:78:39 | zlib.createUnzip() | zlib.js:19:24:19:32 | req.files | zlib.js:78:22:78:39 | zlib.createUnzip() | This Decompression depends on a $@. | zlib.js:19:24:19:32 | req.files | potentially untrusted source |
| zlib.js:79:22:79:50 | zlib.cr ... press() | zlib.js:19:24:19:32 | req.files | zlib.js:79:22:79:50 | zlib.cr ... press() | This Decompression depends on a $@. | zlib.js:19:24:19:32 | req.files | potentially untrusted source |
| zlib.js:87:9:87:27 | zlib.createGunzip() | zlib.js:21:32:21:40 | req.files | zlib.js:87:9:87:27 | zlib.createGunzip() | This Decompression depends on a $@. | zlib.js:21:32:21:40 | req.files | potentially untrusted source |

View File

@@ -0,0 +1 @@
experimental/Security/CWE-522-DecompressionBombs/DecompressionBombs.ql

View File

@@ -0,0 +1,37 @@
const AdmZip = require("adm-zip");
const express = require('express')
const fileUpload = require("express-fileupload");
const fs = require("fs");
const app = express();
const port = 3000;
app.use(fileUpload());
app.listen(port, () => {
console.log(`Example app listening on port ${port}`)
});
app.post('/upload', (req, res) => {
zipBomb(req.files.zipBombFile)
res.send('Hello World!')
});
function zipBomb(tarFile) {
fs.writeFileSync(tarFile.name, tarFile.data);
// or using fs.writeFile
// file path is a tmp file name that can get from DB after saving to DB with remote file upload
// so the input file name will come from a DB source
const admZip
= new AdmZip(tarFile.data);
const zipEntries = admZip.getEntries();
zipEntries.forEach(function (zipEntry) {
if (zipEntry.entryName === "my_file.txt") {
console.log(zipEntry.getData().toString("utf8"));
}
});
// outputs the content of file named 10GB
console.log(admZip.readAsText("10GB"));
// extracts the specified file to the specified location
admZip.extractEntryTo("10GB", "/tmp/", false, true);
// extracts everything
admZip.extractAllTo("./tmp", true);
}

View File

@@ -0,0 +1,16 @@
const decompress = require('decompress');
const express = require('express')
const fileUpload = require("express-fileupload");
const app = express();
app.use(fileUpload());
app.listen(3000, () => {
});
app.post('/upload', async (req, res) => {
decompress(req.query.filePath, 'dist').then(files => {
console.log('done!');
});
res.send("OK")
});

View File

@@ -0,0 +1,63 @@
const fflate = require('fflate');
const express = require('express')
const fileUpload = require("express-fileupload");
const { writeFileSync } = require("fs");
const app = express();
app.use(fileUpload());
app.listen(3000, () => {
});
app.post('/upload', async (req, res) => {
// NOT OK
fflate.unzlibSync(new Uint8Array(req.files.CompressedFile.data));
fflate.unzip(new Uint8Array(new Uint8Array(req.files.CompressedFile.data)));
fflate.unzlib(new Uint8Array(req.files.CompressedFile.data));
fflate.unzlibSync(new Uint8Array(req.files.CompressedFile.data));
fflate.gunzip(new Uint8Array(req.files.CompressedFile.data));
fflate.gunzipSync(new Uint8Array(req.files.CompressedFile.data));
fflate.decompress(new Uint8Array(req.files.CompressedFile.data));
fflate.decompressSync(new Uint8Array(req.files.CompressedFile.data));
// OK
fflate.unzlibSync(new Uint8Array(req.files.CompressedFile.data), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
fflate.unzip(new Uint8Array(new Uint8Array(req.files.CompressedFile.data)), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
fflate.unzlib(new Uint8Array(req.files.CompressedFile.data), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
fflate.unzlibSync(new Uint8Array(req.files.CompressedFile.data), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
fflate.gunzip(new Uint8Array(req.files.CompressedFile.data), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
fflate.gunzipSync(new Uint8Array(req.files.CompressedFile.data), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
fflate.decompress(new Uint8Array(req.files.CompressedFile.data), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
fflate.decompressSync(new Uint8Array(req.files.CompressedFile.data), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
});

View File

@@ -0,0 +1,14 @@
const gunzipmaybe = require("gunzip-maybe");
const express = require('express')
const fileUpload = require("express-fileupload");
const { Readable } = require('stream');
const app = express();
app.use(fileUpload());
app.listen(3000, () => {
});
app.post('/upload', async (req, res) => {
// Unsafe
const RemoteStream = Readable.from(req.files.ZipFile.data);
RemoteStream.pipe(gunzipmaybe).createWriteStream("tmp")
});

View File

@@ -0,0 +1,44 @@
const jszipp = require("jszip");
const express = require('express')
const fileUpload = require("express-fileupload");
const app = express();
const port = 3000;
app.use(fileUpload());
app.listen(port, () => {
console.log(`Example app listening on port ${port}`)
});
app.post('/upload', (req, res) => {
zipBomb(req.files.zipBombFile.data)
zipBombSafe(req.files.zipBombFile.data)
res.send("OK")
});
function zipBombSafe(zipFile) {
jszipp.loadAsync(zipFile.data).then(function (zip) {
if (zip.file("10GB")["_data"]["uncompressedSize"] > 1024 * 1024 * 8) {
console.log("error")
return
}
zip.files["10GB"].async("uint8array").then(function (u8) {
console.log(u8);
});
zip.file("10GB").async("uint8array").then(function (u8) {
console.log(u8);
});
});
}
function zipBomb(zipFile) {
jszipp.loadAsync(zipFile.data).then(function (zip) {
zip.files["10GB"].async("uint8array").then(function (u8) {
console.log(u8);
});
zip.file("10GB").async("uint8array").then(function (u8) {
console.log(u8);
});
});
}
module.exports = { localZipLoad };

View File

@@ -0,0 +1,67 @@
const tar = require("tar");
const express = require('express')
const fileUpload = require("express-fileupload");
const { Readable, writeFileSync } = require("stream");
const fs = require("fs");
const { createGunzip } = require("zlib");
const app = express();
const port = 3000;
app.use(fileUpload());
app.listen(port, () => {
console.log(`Example app listening on port ${port}`)
});
app.post('/upload', (req, res) => {
zipBomb(req.files.zipBombFile.data)
res.send('Hello World!')
});
function zipBomb(tarFile) {
// scenario 1
const inputFile = Readable.from(tarFile.data);
const outputFile = fs.createWriteStream('/tmp/untar');
inputFile.pipe(
tar.x()
).pipe(outputFile);
// scenario 2
fs.writeFileSync(tarFile.name, tarFile.data);
fs.createReadStream(tarFile.name).pipe(
tar.x({
strip: 1,
C: 'some-dir'
})
)
// safe https://github.com/isaacs/node-tar/blob/8c5af15e43a769fd24aa7f1c84d93e54824d19d2/lib/list.js#L90
fs.createReadStream(tarFile.name).pipe(
tar.x({
strip: 1,
C: 'some-dir',
maxReadSize: 16 * 1024 * 1024 // 16 MB
})
)
// scenario 3
const decompressor = createGunzip();
fs.createReadStream(tarFile.name).pipe(
decompressor
).pipe(
tar.x({
cwd: "dest"
})
)
// scenario 4
fs.writeFileSync(tarFile.name, tarFile.data);
// or using fs.writeFile
// file path is a tmp file name that can get from DB after saving to DB with remote file upload
// so the input file name will come from a DB source
tar.x({ file: tarFile.name })
tar.extract({ file: tarFile.name })
// safe https://github.com/isaacs/node-tar/blob/8c5af15e43a769fd24aa7f1c84d93e54824d19d2/lib/list.js#L90
tar.x({
file: tarFile.name,
strip: 1,
C: 'some-dir',
maxReadSize: 16 * 1024 * 1024 // 16 MB
})
}

View File

@@ -0,0 +1,37 @@
const pako = require('pako');
const express = require('express')
const fileUpload = require("express-fileupload");
const app = express();
const port = 3000;
app.use(fileUpload());
app.listen(port, () => {
console.log(`Example app listening on port ${port}`)
});
app.post('/upload', (req, res) => {
zipBomb1(req.files.zipBombFile.data);
zipBomb2(req.files.zipBombFile.data);
res.send('Hello World!');
});
function zipBomb1(zipFile) {
const myArray = Buffer.from(new Uint8Array(zipFile.data.buffer));
let output;
try {
output = pako.inflate(myArray);
console.log(output);
} catch (err) {
console.log(err);
}
}
function zipBomb2(zipFile) {
const myArray = new Uint8Array(zipFile.data.buffer).buffer;
let output;
try {
output = pako.inflate(myArray);
console.log(output);
} catch (err) {
console.log(err);
}
}

View File

@@ -0,0 +1,13 @@
var bz2 = require('unbzip2-stream');
var fs = require('fs');
const express = require('express')
const fileUpload = require("express-fileupload");
const app = express();
app.use(fileUpload());
app.listen(3000, () => {
});
app.post('/upload', async (req, res) => {
fs.createReadStream(req.query.FilePath).pipe(bz2()).pipe(process.stdout);
});

View File

@@ -0,0 +1,26 @@
const unzip = require("unzip");
const { createWriteStream } = require("fs");
const express = require('express')
const fileUpload = require("express-fileupload");
const { Readable } = require("stream");
const app = express();
app.use(fileUpload());
app.listen(3000, () => {
});
app.post('/upload', async (req, res) => {
const InputStream = Readable.from(req.files.ZipFile.data);
InputStream.pipe(unzip.Parse())
.on('entry', function (entry) {
if (entry.uncompressedSize > 1024) {
throw "uncompressed size exceed"
}
});
let writeStream = createWriteStream('output/path');
InputStream
.pipe(unzip.Parse())
.pipe(writeStream)
});

View File

@@ -0,0 +1,106 @@
const unzipper = require("unzipper");
const express = require('express')
const fileUpload = require("express-fileupload");
const { Readable } = require('stream');
const { createWriteStream, readFileSync } = require("fs");
const stream = require("node:stream");
const app = express();
app.use(fileUpload());
app.listen(3000, () => {
});
app.post('/upload', async (req, res) => {
const RemoteStream = Readable.from(req.files.ZipFile.data);
// Unsafe
RemoteStream.pipe(unzipper.Extract({ path: 'output/path' }));
// Unsafe
RemoteStream.pipe(unzipper.ParseOne())
.pipe(createWriteStream('firstFile.txt'));
// Safe because of uncompressedSize
RemoteStream
.pipe(unzipper.Parse())
.on('entry', function (entry) {
const size = entry.vars.uncompressedSize;
if (size < 1024 * 1024 * 1024) {
entry.pipe(createWriteStream('output/path'));
}
});
// Unsafe
RemoteStream
.pipe(unzipper.Parse())
.on('entry', function (entry) {
const size = entry.vars.uncompressedSize;
entry.pipe(createWriteStream('output/path'));
});
// Unsafe
const zip = RemoteStream.pipe(unzipper.Parse({ forceStream: true }));
for await (const entry of zip) {
const fileName = entry.path;
if (fileName === "this IS the file I'm looking for") {
entry.pipe(createWriteStream('output/path'));
} else {
entry.autodrain();
}
}
// Safe
const zip2 = RemoteStream.pipe(unzipper.Parse({ forceStream: true }));
for await (const entry of zip2) {
const size = entry.vars.uncompressedSize;
if (size < 1024 * 1024 * 1024) {
entry.pipe(createWriteStream('output/path'));
}
}
// Safe because of uncompressedSize
RemoteStream.pipe(unzipper.Parse())
.pipe(stream.Transform({
objectMode: true,
transform: function (entry, e, cb) {
const size = entry.vars.uncompressedSize; // There is also compressedSize;
if (size < 1024 * 1024 * 1024) {
entry.pipe(createWriteStream('output/path'))
.on('finish', cb);
}
}
}));
// Unsafe
RemoteStream.pipe(unzipper.Parse())
.pipe(stream.Transform({
objectMode: true,
transform: function (entry, e, cb) {
entry.pipe(createWriteStream('output/path'))
.on('finish', cb);
}
}));
let directory = await unzipper.Open.file('path/to/archive.zip');
new Promise((resolve, reject) => {
directory.files[0]
.stream()
.pipe(fs.createWriteStream('firstFile'))
.on('error', reject)
.on('finish', resolve)
});
const request = require('request');
// Unsafe
directory = await unzipper.Open.url(request, 'http://example.com/example.zip');
const file = directory.files.find(d => d.path === 'example.xml');
await file.buffer();
// Unsafe
const buffer = readFileSync(request.query.FilePath);
directory = await unzipper.Open.buffer(buffer);
directory.files[0].buffer();
// Unsafe
unzipper.Open.file(request.query.FilePath)
.then(d => d.extract({ path: '/extraction/path', concurrency: 5 }));
});

View File

@@ -0,0 +1,54 @@
const { pipeline } = require('stream/promises');
const yauzl = require("yauzl");
const fs = require("fs");
const express = require('express')
const fileUpload = require("express-fileupload");
const app = express();
app.use(fileUpload());
app.listen(3000, () => {
});
app.post('/upload', (req, res) => {
yauzl.fromFd(req.files.zipFile.data)
yauzl.fromBuffer(req.files.zipFile.data)
yauzl.fromRandomAccessReader(req.files.zipFile.data)
// Safe
yauzl.open(req.query.filePath, { lazyEntries: true }, function (err, zipfile) {
if (err) throw err;
zipfile.readEntry();
zipfile.on("entry", function (entry) {
zipfile.openReadStream(entry, async function (err, readStream) {
if (err) throw err;
if (entry.uncompressedSize > 1024 * 1024 * 1024) {
throw err
}
readStream.on("end", function () {
zipfile.readEntry();
});
const outputFile = fs.createWriteStream('testiness');
await pipeline(
readStream,
outputFile
)
});
});
});
// Unsafe
yauzl.open(req.query.filePath, { lazyEntries: true }, function (err, zipfile) {
if (err) throw err;
zipfile.readEntry();
zipfile.on("entry", function (entry) {
zipfile.openReadStream(entry, async function (err, readStream) {
readStream.on("end", function () {
zipfile.readEntry();
});
const outputFile = fs.createWriteStream('testiness');
await pipeline(
readStream,
outputFile
)
});
});
});
res.send("OK")
});

View File

@@ -0,0 +1,98 @@
const fs = require("fs");
const zlib = require("node:zlib");
const { Readable } = require('stream');
const express = require('express');
const fileUpload = require("express-fileupload");
const app = express();
const port = 3000;
const stream = require('stream/promises');
app.use(fileUpload());
app.listen(port, () => {
console.log(`Example app listening on port ${port}`)
});
app.post('/upload', async (req, res) => {
zlibBombAsync(req.files.zipBombFile.data)
zlibBombAsyncSafe(req.files.zipBombFile.data);
zlibBombSync(req.files.zipBombFile.data)
zlibBombSyncSafe(req.files.zipBombFile.data)
zlibBombPipeStream(req.files.zipBombFile.data)
zlibBombPipeStreamSafe(req.files.zipBombFile.data)
zlibBombPipeStreamPromises(req.files.zipBombFile.data).then(r =>
console.log("sone"));
res.send('Hello World!')
});
function zlibBombAsync(zipFile) {
zlib.gunzip(
zipFile.data,
(err, buffer) => {
});
zlib.unzip(
zipFile.data,
(err, buffer) => {
});
zlib.brotliDecompress(
zipFile.data,
(err, buffer) => {
});
}
function zlibBombAsyncSafe(zipFile) {
zlib.gunzip(
zipFile.data,
{ maxOutputLength: 1024 * 1024 * 5 },
(err, buffer) => {
});
zlib.unzip(
zipFile.data,
{ maxOutputLength: 1024 * 1024 * 5 },
(err, buffer) => {
});
zlib.brotliDecompress(
zipFile.data,
{ maxOutputLength: 1024 * 1024 * 5 },
(err, buffer) => {
});
}
function zlibBombSync(zipFile) {
zlib.gunzipSync(zipFile.data, { finishFlush: zlib.constants.Z_SYNC_FLUSH });
zlib.unzipSync(zipFile.data);
zlib.brotliDecompressSync(zipFile.data);
}
function zlibBombSyncSafe(zipFile) {
zlib.gunzipSync(zipFile.data, { finishFlush: zlib.constants.Z_SYNC_FLUSH, maxOutputLength: 1024 * 1024 * 5 });
zlib.unzipSync(zipFile.data, { maxOutputLength: 1024 * 1024 * 5 });
zlib.brotliDecompressSync(zipFile.data, { maxOutputLength: 1024 * 1024 * 5 });
}
function zlibBombPipeStream(zipFile) {
const inputStream = Readable.from(zipFile.data);
const outputFile = fs.createWriteStream('unzip.txt');
inputStream.pipe(zlib.createGunzip()).pipe(outputFile);
inputStream.pipe(zlib.createUnzip()).pipe(outputFile);
inputStream.pipe(zlib.createBrotliDecompress()).pipe(outputFile);
}
async function zlibBombPipeStreamPromises(zipFile) {
const inputStream = Readable.from(zipFile.data);
const outputFile = fs.createWriteStream('unzip.txt');
await stream.pipeline(
inputStream,
zlib.createGunzip(),
outputFile
)
}
function zlibBombPipeStreamSafe(zipFile) {
const inputFile = Readable.from(zipFile.data);
const outputFile = fs.createWriteStream('unzip.txt');
inputFile.pipe(zlib.createGunzip({ maxOutputLength: 1024 * 1024 * 5 })).pipe(outputFile);
inputFile.pipe(zlib.createUnzip({ maxOutputLength: 1024 * 1024 * 5 })).pipe(outputFile);
inputFile.pipe(zlib.createBrotliDecompress({ maxOutputLength: 1024 * 1024 * 5 })).pipe(outputFile);
}