Merge branch 'main' into aegilops/js/insecure-helmet-middleware

This commit is contained in:
Paul Hodgkinson
2024-05-21 14:46:49 +01:00
committed by GitHub
1024 changed files with 57511 additions and 46928 deletions

View File

@@ -1,3 +1,7 @@
## 0.9.1
No user-facing changes.
## 0.9.0
### Breaking Changes

View File

@@ -0,0 +1,4 @@
---
category: minorAnalysis
---
* Additional heuristics for a new sensitive data classification for private information (e.g. credit card numbers) have been added to the shared `SensitiveDataHeuristics.qll` library. This may result in additional results for queries that use sensitive data such as `js/clear-text-storage-sensitive-data` and `js/clear-text-logging`.

View File

@@ -0,0 +1,3 @@
## 0.9.1
No user-facing changes.

View File

@@ -1,2 +1,2 @@
---
lastReleaseVersion: 0.9.0
lastReleaseVersion: 0.9.1

View File

@@ -1,5 +1,5 @@
name: codeql/javascript-all
version: 0.9.0
version: 0.9.2-dev
groups: javascript
dbscheme: semmlecode.javascript.dbscheme
extractor: javascript

View File

@@ -1003,7 +1003,7 @@ module NodeJSLib {
exists(ClientRequestLoginCallback callback | this = callback.getACall().getArgument(0))
}
override string getCredentialsKind() { result = "Node.js http(s) client login username" }
override string getCredentialsKind() { result = "user name" }
}
/**
@@ -1014,7 +1014,7 @@ module NodeJSLib {
exists(ClientRequestLoginCallback callback | this = callback.getACall().getArgument(1))
}
override string getCredentialsKind() { result = "Node.js http(s) client login password" }
override string getCredentialsKind() { result = "password" }
}
/**

View File

@@ -1,15 +1,29 @@
/**
* Models the `shelljs` library in terms of `FileSystemAccess` and `SystemCommandExecution`.
*
* https://www.npmjs.com/package/shelljs
*/
import javascript
module ShellJS {
private API::Node shellJSMember() {
result = API::moduleImport("shelljs")
or
result =
shellJSMember()
.getMember([
"exec", "cd", "cp", "touch", "chmod", "pushd", "find", "ls", "ln", "mkdir", "mv",
"rm", "cat", "head", "sort", "tail", "uniq", "grep", "sed", "to", "toEnd", "echo"
])
.getReturn()
}
/**
* Gets an import of the `shelljs` or `async-shelljs` module.
* Gets a function that can execute a shell command using the `shelljs` or `async-shelljs` modules.
*/
DataFlow::SourceNode shelljs() {
result = DataFlow::moduleImport("shelljs") or
result = shellJSMember().asSource() or
result = DataFlow::moduleImport("async-shelljs")
}
@@ -39,7 +53,10 @@ module ShellJS {
/** The `shelljs.exec` library modeled as a `shelljs` member. */
private class ShellJsExec extends Range {
ShellJsExec() { this = DataFlow::moduleImport("shelljs.exec") }
ShellJsExec() {
this = DataFlow::moduleImport("shelljs.exec") or
this = shellJSMember().getMember("exec").asSource()
}
override string getName() { result = "exec" }
}

View File

@@ -1,5 +1,6 @@
extensions:
# Contribute empty data sets to avoid errors about an undefined extensionals
# Make sure that the extensible model predicates have at least one definition
# to avoid errors about undefined extensionals.
- addsTo:
pack: codeql/javascript-all
extensible: sourceModel

View File

@@ -14,13 +14,14 @@
* - id: a user name or other account information;
* - password: a password or authorization key;
* - certificate: a certificate.
* - private: private data such as credit card numbers
*
* While classifications are represented as strings, this should not be relied upon.
* Instead, use the predicates in `SensitiveDataClassification::` to work with
* classifications.
*/
class SensitiveDataClassification extends string {
SensitiveDataClassification() { this in ["secret", "id", "password", "certificate"] }
SensitiveDataClassification() { this in ["secret", "id", "password", "certificate", "private"] }
}
/**
@@ -38,6 +39,9 @@ module SensitiveDataClassification {
/** Gets the classification for certificates. */
SensitiveDataClassification certificate() { result = "certificate" }
/** Gets the classification for private data. */
SensitiveDataClassification private() { result = "private" }
}
/**
@@ -77,6 +81,40 @@ module HeuristicNames {
*/
string maybeCertificate() { result = "(?is).*(cert)(?!.*(format|name|ification)).*" }
/**
* Gets a regular expression that identifies strings that may indicate the presence of
* private data.
*/
string maybePrivate() {
result =
"(?is).*(" +
// Inspired by the list on https://cwe.mitre.org/data/definitions/359.html
// Government identifiers, such as Social Security Numbers
"social.?security|employer.?identification|national.?insurance|resident.?id|" +
"passport.?(num|no)|([_-]|\\b)ssn([_-]|\\b)|" +
// Contact information, such as home addresses
"post.?code|zip.?code|home.?addr|" +
// and telephone numbers
"(mob(ile)?|home).?(num|no|tel|phone)|(tel|fax|phone).?(num|no)|telephone|" +
"emergency.?contact|" +
// Geographic location - where the user is (or was)
"latitude|longitude|nationality|" +
// Financial data - such as credit card numbers, salary, bank accounts, and debts
"(credit|debit|bank|visa).?(card|num|no|acc(ou)?nt)|acc(ou)?nt.?(no|num|credit)|" +
"salary|billing|credit.?(rating|score)|([_-]|\\b)ccn([_-]|\\b)|" +
// Communications - e-mail addresses, private e-mail messages, SMS text messages, chat logs, etc.
// "e(mail|_mail)|" + // this seems too noisy
// Health - medical conditions, insurance status, prescription records
"birth.?da(te|y)|da(te|y).?(of.?)?birth|" +
"medical|(health|care).?plan|healthkit|appointment|prescription|" +
"blood.?(type|alcohol|glucose|pressure)|heart.?(rate|rhythm)|body.?(mass|fat)|" +
"menstrua|pregnan|insulin|inhaler|" +
// Relationships - work and family
"employ(er|ee)|spouse|maiden.?name" +
// ---
").*"
}
/**
* Gets a regular expression that identifies strings that may indicate the presence
* of sensitive data, with `classification` describing the kind of sensitive data involved.
@@ -90,6 +128,9 @@ module HeuristicNames {
or
result = maybeCertificate() and
classification = SensitiveDataClassification::certificate()
or
result = maybePrivate() and
classification = SensitiveDataClassification::private()
}
/**

View File

@@ -1,4 +1,4 @@
description: Removed unused column from the `folders` and `files` relations
compatibility: full
files.rel: reorder files.rel (int id, string name, string simple, string ext, int fromSource) id name
folders.rel: reorder folders.rel (int id, string name, string simple) id name
files.rel: reorder files.rel (@file id, string name, string simple, string ext, int fromSource) id name
folders.rel: reorder folders.rel (@folder id, string name, string simple) id name

View File

@@ -1,94 +1,94 @@
description: Rename multiple relation names.
compatibility: backwards
is_externs.rel: reorder isExterns.rel(int toplevel) toplevel
is_externs.rel: reorder isExterns.rel(@toplevel toplevel) toplevel
isExterns.rel: delete
is_module.rel: reorder isModule.rel(int toplevel) toplevel
is_module.rel: reorder isModule.rel(@toplevel toplevel) toplevel
isModule.rel: delete
is_nodejs.rel: reorder isNodejs.rel(int toplevel) toplevel
is_nodejs.rel: reorder isNodejs.rel(@toplevel toplevel) toplevel
isNodejs.rel: delete
is_es2015_module.rel: reorder isES2015Module.rel(int toplevel) toplevel
is_es2015_module.rel: reorder isES2015Module.rel(@toplevel toplevel) toplevel
isES2015Module.rel: delete
is_closure_module.rel: reorder isClosureModule.rel(int toplevel) toplevel
is_closure_module.rel: reorder isClosureModule.rel(@toplevel toplevel) toplevel
isClosureModule.rel: delete
stmt_containers.rel: reorder stmtContainers.rel(int stmt, int container) stmt container
stmt_containers.rel: reorder stmtContainers.rel(@stmt stmt, @stmt_container container) stmt container
stmtContainers.rel: delete
jump_targets.rel: reorder jumpTargets.rel(int jump, int target) jump target
jump_targets.rel: reorder jumpTargets.rel(@stmt jump, @stmt target) jump target
jumpTargets.rel: delete
is_instantiated.rel: reorder isInstantiated.rel(int decl) decl
is_instantiated.rel: reorder isInstantiated.rel(@namespacedeclaration decl) decl
isInstantiated.rel: delete
has_declare_keyword.rel: reorder hasDeclareKeyword.rel(int stmt) stmt
has_declare_keyword.rel: reorder hasDeclareKeyword.rel(@declarablenode stmt) stmt
hasDeclareKeyword.rel: delete
is_for_await_of.rel: reorder isForAwaitOf.rel(int forof) forof
is_for_await_of.rel: reorder isForAwaitOf.rel(@forofstmt forof) forof
isForAwaitOf.rel: delete
enclosing_stmt.rel: reorder enclosingStmt.rel(int expr, int stmt) expr stmt
enclosing_stmt.rel: reorder enclosingStmt.rel(@exprortype expr, @stmt stmt) expr stmt
enclosingStmt.rel: delete
expr_containers.rel: reorder exprContainers.rel(int expr, int container) expr container
expr_containers.rel: reorder exprContainers.rel(@exprortype expr, @stmt_container container) expr container
exprContainers.rel: delete
array_size.rel: reorder arraySize.rel(int ae, int sz) ae sz
array_size.rel: reorder arraySize.rel(@arraylike ae, int sz) ae sz
arraySize.rel: delete
is_delegating.rel: reorder isDelegating.rel(int yield) yield
is_delegating.rel: reorder isDelegating.rel(@yieldexpr yield) yield
isDelegating.rel: delete
is_arguments_object.rel: reorder isArgumentsObject.rel(int id) id
is_arguments_object.rel: reorder isArgumentsObject.rel(@variable id) id
isArgumentsObject.rel: delete
is_computed.rel: reorder isComputed.rel(int prop) prop
is_computed.rel: reorder isComputed.rel(@property prop) prop
isComputed.rel: delete
is_method.rel: reorder isMethod.rel(int prop) prop
is_method.rel: reorder isMethod.rel(@property prop) prop
isMethod.rel: delete
is_static.rel: reorder isStatic.rel(int prop) prop
is_static.rel: reorder isStatic.rel(@property prop) prop
isStatic.rel: delete
is_abstract_member.rel: reorder isAbstractMember.rel(int prop) prop
is_abstract_member.rel: reorder isAbstractMember.rel(@property prop) prop
isAbstractMember.rel: delete
is_const_enum.rel: reorder isConstEnum.rel(int id) id
is_const_enum.rel: reorder isConstEnum.rel(@enumdeclaration id) id
isConstEnum.rel: delete
is_abstract_class.rel: reorder isAbstractClass.rel(int id) id
is_abstract_class.rel: reorder isAbstractClass.rel(@classdeclstmt id) id
isAbstractClass.rel: delete
has_public_keyword.rel: reorder hasPublicKeyword.rel(int prop) prop
has_public_keyword.rel: reorder hasPublicKeyword.rel(@property prop) prop
hasPublicKeyword.rel: delete
has_private_keyword.rel: reorder hasPrivateKeyword.rel(int prop) prop
has_private_keyword.rel: reorder hasPrivateKeyword.rel(@property prop) prop
hasPrivateKeyword.rel: delete
has_protected_keyword.rel: reorder hasProtectedKeyword.rel(int prop) prop
has_protected_keyword.rel: reorder hasProtectedKeyword.rel(@property prop) prop
hasProtectedKeyword.rel: delete
has_readonly_keyword.rel: reorder hasReadonlyKeyword.rel(int prop) prop
has_readonly_keyword.rel: reorder hasReadonlyKeyword.rel(@property prop) prop
hasReadonlyKeyword.rel: delete
has_type_keyword.rel: reorder hasTypeKeyword.rel(int id) id
has_type_keyword.rel: reorder hasTypeKeyword.rel(@import_or_export_declaration id) id
hasTypeKeyword.rel: delete
is_optional_member.rel: reorder isOptionalMember.rel(int id) id
is_optional_member.rel: reorder isOptionalMember.rel(@property id) id
isOptionalMember.rel: delete
has_definite_assignment_assertion.rel: reorder hasDefiniteAssignmentAssertion.rel(int id) id
has_definite_assignment_assertion.rel: reorder hasDefiniteAssignmentAssertion.rel(@field_or_vardeclarator id) id
hasDefiniteAssignmentAssertion.rel: delete
is_optional_parameter_declaration.rel: reorder isOptionalParameterDeclaration.rel(int parameter) parameter
is_optional_parameter_declaration.rel: reorder isOptionalParameterDeclaration.rel(@pattern parameter) parameter
isOptionalParameterDeclaration.rel: delete
has_asserts_keyword.rel: reorder hasAssertsKeyword.rel(int node) node
has_asserts_keyword.rel: reorder hasAssertsKeyword.rel(@predicatetypeexpr node) node
hasAssertsKeyword.rel: delete
js_parse_errors.rel: reorder jsParseErrors.rel(int id, int toplevel, string message, string line) id toplevel message line
js_parse_errors.rel: reorder jsParseErrors.rel(@js_parse_error id, @toplevel toplevel, string message, string line) id toplevel message line
jsParseErrors.rel: delete
regexp_parse_errors.rel: reorder regexpParseErrors.rel(int id, int regexp, string message) id regexp message
regexp_parse_errors.rel: reorder regexpParseErrors.rel(@regexp_parse_error id, @regexpterm regexp, string message) id regexp message
regexpParseErrors.rel: delete
is_greedy.rel: reorder isGreedy.rel(int id) id
is_greedy.rel: reorder isGreedy.rel(@regexp_quantifier id) id
isGreedy.rel: delete
range_quantifier_lower_bound.rel: reorder rangeQuantifierLowerBound.rel(int id, int lo) id lo
range_quantifier_lower_bound.rel: reorder rangeQuantifierLowerBound.rel(@regexp_range id, int lo) id lo
rangeQuantifierLowerBound.rel: delete
range_quantifier_upper_bound.rel: reorder rangeQuantifierUpperBound.rel(int id, int hi) id hi
range_quantifier_upper_bound.rel: reorder rangeQuantifierUpperBound.rel(@regexp_range id, int hi) id hi
rangeQuantifierUpperBound.rel: delete
is_capture.rel: reorder isCapture.rel(int id, int number) id number
is_capture.rel: reorder isCapture.rel(@regexp_group id, int number) id number
isCapture.rel: delete
is_named_capture.rel: reorder isNamedCapture.rel(int id, string name) id name
is_named_capture.rel: reorder isNamedCapture.rel(@regexp_group id, string name) id name
isNamedCapture.rel: delete
is_inverted.rel: reorder isInverted.rel(int id) id
is_inverted.rel: reorder isInverted.rel(@regexp_char_class id) id
isInverted.rel: delete
regexp_const_value.rel: reorder regexpConstValue.rel(int id, string value) id value
regexp_const_value.rel: reorder regexpConstValue.rel(@regexp_constant id, string value) id value
regexpConstValue.rel: delete
char_class_escape.rel: reorder charClassEscape.rel(int id, string value) id value
char_class_escape.rel: reorder charClassEscape.rel(@regexp_char_class_escape id, string value) id value
charClassEscape.rel: delete
named_backref.rel: reorder namedBackref.rel(int id, string name) id name
named_backref.rel: reorder namedBackref.rel(@regexp_backref id, string name) id name
namedBackref.rel: delete
unicode_property_escapename.rel: reorder unicodePropertyEscapeName.rel(int id, string name) id name
unicode_property_escapename.rel: reorder unicodePropertyEscapeName.rel(@regexp_unicode_property_escape id, string name) id name
unicodePropertyEscapeName.rel: delete
unicode_property_escapevalue.rel: reorder unicodePropertyEscapeValue.rel(int id, string value) id value
unicode_property_escapevalue.rel: reorder unicodePropertyEscapeValue.rel(@regexp_unicode_property_escape id, string value) id value
unicodePropertyEscapeValue.rel: delete
is_generator.rel: reorder isGenerator.rel(int fun) fun
is_generator.rel: reorder isGenerator.rel(@function fun) fun
isGenerator.rel: delete
has_rest_parameter.rel: reorder hasRestParameter.rel(int fun) fun
has_rest_parameter.rel: reorder hasRestParameter.rel(@function fun) fun
hasRestParameter.rel: delete
is_async.rel: reorder isAsync.rel(int fun) fun
is_async.rel: reorder isAsync.rel(@function fun) fun
isAsync.rel: delete

View File

@@ -1,3 +1,7 @@
## 0.8.16
No user-facing changes.
## 0.8.15
### Minor Analysis Improvements

View File

@@ -36,7 +36,7 @@
<p>
For JavaScript in the browser,
<code>RandomSource.getRandomValues</code> provides a cryptographically
<code>crypto.getRandomValues</code> provides a cryptographically
secure pseudo-random number generator.
</p>
@@ -69,7 +69,7 @@
<references>
<li>Wikipedia: <a href="http://en.wikipedia.org/wiki/Pseudorandom_number_generator">Pseudo-random number generator</a>.</li>
<li>Mozilla Developer Network: <a href="https://developer.mozilla.org/en-US/docs/Web/API/RandomSource/getRandomValues">RandomSource.getRandomValues</a>.</li>
<li>Mozilla Developer Network: <a href="https://developer.mozilla.org/en-US/docs/Web/API/Crypto/getRandomValues">Crypto: getRandomValues()</a>.</li>
<li>NodeJS: <a href="https://nodejs.org/api/crypto.html#crypto_crypto_randombytes_size_callback">crypto.randomBytes</a></li>
</references>
</qhelp>

View File

@@ -2,5 +2,7 @@ function securePassword() {
// GOOD: the random suffix is cryptographically secure
var suffix = window.crypto.getRandomValues(new Uint32Array(1))[0];
var password = "myPassword" + suffix;
return password;
// GOOD: if a random value between 0 and 1 is desired
var secret = window.crypto.getRandomValues(new Uint32Array(1))[0] * Math.pow(2,-32);
}

View File

@@ -19,6 +19,10 @@
If possible, store configuration files including credential data separately from the source code,
in a secure location with restricted access.
</p>
<p>
If the credentials are a placeholder value, make sure the value is obviously a placeholder by
using a name such as <code>"SampleToken"</code> or <code>"MyPassword"</code>.
</p>
</recommendation>
<example>

View File

@@ -30,7 +30,7 @@ where
// exclude dummy passwords and templates
not (
sink.getNode().(Sink).(DefaultCredentialsSink).getKind() =
["password", "credentials", "token"] and
["password", "credentials", "token", "key"] and
PasswordHeuristics::isDummyPassword(val)
or
sink.getNode().(Sink).getKind() = "authorization header" and

View File

@@ -0,0 +1,3 @@
## 0.8.16
No user-facing changes.

View File

@@ -1,2 +1,2 @@
---
lastReleaseVersion: 0.8.15
lastReleaseVersion: 0.8.16

View File

@@ -0,0 +1,43 @@
<!DOCTYPE qhelp PUBLIC
"-//Semmle//qhelp//EN"
"qhelp.dtd">
<qhelp>
<overview>
<p>Extracting Compressed files with any compression algorithm like gzip can cause to denial of service attacks.</p>
<p>Attackers can compress a huge file which created by repeated similiar byte and convert it to a small compressed file.</p>
</overview>
<recommendation>
<p>When you want to decompress a user-provided compressed file you must be careful about the decompression ratio or read these files within a loop byte by byte to be able to manage the decompressed size in each cycle of the loop.</p>
</recommendation>
<example>
<p>
JsZip: check uncompressedSize Object Field before extraction.
</p>
<sample src="jszip_good.js"/>
<p>
nodejs Zlib: use <a href="https://nodejs.org/dist/latest-v18.x/docs/api/zlib.html#class-options">maxOutputLength option</a> which it'll limit the buffer read size
</p>
<sample src="zlib_good.js" />
<p>
node-tar: use <a href="https://github.com/isaacs/node-tar/blob/8c5af15e43a769fd24aa7f1c84d93e54824d19d2/lib/list.js#L90">maxReadSize option</a> which it'll limit the buffer read size
</p>
<sample src="node-tar_good.js" />
</example>
<references>
<li>
<a href="https://github.com/advisories/GHSA-8225-6cvr-8pqp">CVE-2017-16129</a>
</li>
<li>
<a href="https://www.bamsoftware.com/hacks/zipbomb/">A great research to gain more impact by this kind of attacks</a>
</li>
</references>
</qhelp>

View File

@@ -0,0 +1,35 @@
/**
* @name User-controlled file decompression
* @description User-controlled data that flows into decompression library APIs without checking the compression rate is dangerous
* @kind path-problem
* @problem.severity error
* @security-severity 7.8
* @precision high
* @id js/user-controlled-data-decompression
* @tags security
* experimental
* external/cwe/cwe-522
*/
import javascript
import DataFlow::PathGraph
import DecompressionBombs
class BombConfiguration extends TaintTracking::Configuration {
BombConfiguration() { this = "DecompressionBombs" }
override predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
override predicate isSink(DataFlow::Node sink) { sink instanceof DecompressionBomb::Sink }
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
exists(DecompressionBomb::AdditionalTaintStep addstep |
addstep.isAdditionalTaintStep(pred, succ)
)
}
}
from BombConfiguration cfg, DataFlow::PathNode source, DataFlow::PathNode sink
where cfg.hasFlowPath(source, sink)
select sink.getNode(), source, sink, "This Decompression depends on a $@.", source.getNode(),
"potentially untrusted source"

View File

@@ -0,0 +1,432 @@
import javascript
import experimental.semmle.javascript.FormParsers
import experimental.semmle.javascript.ReadableStream
import DataFlow::PathGraph
module DecompressionBomb {
/**
* The Sinks of uncontrolled data decompression
*/
class Sink extends DataFlow::Node {
Sink() { this = any(Range r).sink() }
}
/**
* The additional taint steps that need for creating taint tracking or dataflow.
*/
abstract class AdditionalTaintStep extends string {
AdditionalTaintStep() { this = "AdditionalTaintStep" }
/**
* Holds if there is a additional taint step between pred and succ.
*/
abstract predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ);
}
/**
* A abstract class responsible for extending new decompression sinks
*/
abstract class Range extends API::Node {
/**
* Gets the sink of responsible for decompression node
*
* it can be a path, stream of compressed data,
* or a call to function that use pipe
*/
abstract DataFlow::Node sink();
}
}
/**
* Provides additional taint steps for Readable Stream object
*/
module ReadableStream {
class ReadableStreamAdditionalTaintStep extends DecompressionBomb::AdditionalTaintStep {
ReadableStreamAdditionalTaintStep() { this = "AdditionalTaintStep" }
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
(
readablePipeAdditionalTaintStep(pred, succ)
or
streamPipelineAdditionalTaintStep(pred, succ)
or
promisesFileHandlePipeAdditionalTaintStep(pred, succ)
)
}
}
}
/**
* Provides additional taint steps for File system access functions
*/
module FileSystemAccessAdditionalTaintStep {
class ReadableStreamAdditionalTaintStep extends DecompressionBomb::AdditionalTaintStep {
ReadableStreamAdditionalTaintStep() { this = "AdditionalTaintStep" }
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
// additional taint step for fs.readFile(pred)
// It can be global additional step too
exists(DataFlow::CallNode n | n = DataFlow::moduleMember("fs", "readFile").getACall() |
pred = n.getArgument(0) and succ = n.getABoundCallbackParameter(1, 1)
)
or
exists(FileSystemReadAccess cn |
pred = cn.getAPathArgument() and
succ = cn.getADataNode()
)
}
}
}
/**
* Provides Models for [jszip](https://www.npmjs.com/package/jszip) package
*/
module JsZip {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
DecompressionBomb() { this = API::moduleImport("jszip").getMember("loadAsync") }
override DataFlow::Node sink() {
result = this.getParameter(0).asSink() and not this.sanitizer(this)
}
/**
* Gets a jszip `loadAsync` instance
* and Holds if member of name `uncompressedSize` exists
*/
predicate sanitizer(API::Node loadAsync) {
exists(loadAsync.getASuccessor*().getMember("_data").getMember("uncompressedSize"))
}
}
}
/**
* Provides Models for [node-tar](https://www.npmjs.com/package/tar) package
*/
module NodeTar {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
DecompressionBomb() { this = API::moduleImport("tar").getMember(["x", "extract"]) }
override DataFlow::Node sink() {
(
// piping tar.x()
result = this.getACall()
or
// tar.x({file: filename})
result = this.getParameter(0).getMember("file").asSink()
) and
// and there shouldn't be a "maxReadSize: ANum" option
not this.sanitizer(this.getParameter(0))
}
/**
* Gets a options parameter that belong to a `tar` instance
* and Holds if "maxReadSize: ANumber" option exists
*/
predicate sanitizer(API::Node tarExtract) { exists(tarExtract.getMember("maxReadSize")) }
}
/**
* The decompression Additional Taint Steps
*/
class DecompressionAdditionalSteps extends DecompressionBomb::AdditionalTaintStep {
DecompressionAdditionalSteps() { this = "AdditionalTaintStep" }
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node n | n = API::moduleImport("tar") |
pred = n.asSource() and
(
succ = n.getMember("x").getACall() or
succ = n.getMember("x").getACall().getArgument(0)
)
)
}
}
}
/**
* Provides Models for `node:zlib` package
*/
module Zlib {
/**
* The decompression sinks of `node:zlib`
*/
class DecompressionBomb extends DecompressionBomb::Range {
boolean isSynk;
DecompressionBomb() {
this =
API::moduleImport("zlib")
.getMember([
"gunzip", "gunzipSync", "unzip", "unzipSync", "brotliDecompress",
"brotliDecompressSync", "inflateSync", "inflateRawSync", "inflate", "inflateRaw"
]) and
isSynk = true
or
this =
API::moduleImport("zlib")
.getMember([
"createGunzip", "createBrotliDecompress", "createUnzip", "createInflate",
"createInflateRaw"
]) and
isSynk = false
}
override DataFlow::Node sink() {
result = this.getACall() and
not this.sanitizer(this.getParameter(0)) and
isSynk = false
or
result = this.getACall().getArgument(0) and
not this.sanitizer(this.getParameter(1)) and
isSynk = true
}
/**
* Gets a options parameter that belong to a zlib instance
* and Holds if "maxOutputLength: ANumber" option exists
*/
predicate sanitizer(API::Node zlib) { exists(zlib.getMember("maxOutputLength")) }
}
}
/**
* Provides Models for [pako](https://www.npmjs.com/package/pako) package
*/
module Pako {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
DecompressionBomb() {
this = API::moduleImport("pako").getMember(["inflate", "inflateRaw", "ungzip"])
}
override DataFlow::Node sink() { result = this.getParameter(0).asSink() }
}
/**
* The decompression Additional Taint Steps
*/
class DecompressionAdditionalSteps extends DecompressionBomb::AdditionalTaintStep {
DecompressionAdditionalSteps() { this = "AdditionalTaintStep" }
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
// succ = new Uint8Array(pred)
exists(DataFlow::Node n, NewExpr ne | ne = n.asExpr() |
pred.asExpr() = ne.getArgument(0) and
succ.asExpr() = ne and
ne.getCalleeName() = "Uint8Array"
)
}
}
}
/**
* Provides Models for [adm-zip](https://www.npmjs.com/package/adm-zip) package
*/
module AdmZip {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
DecompressionBomb() { this = API::moduleImport("adm-zip").getInstance() }
override DataFlow::Node sink() {
result =
this.getMember(["extractAllTo", "extractEntryTo", "readAsText"]).getReturn().asSource()
or
result = this.getASuccessor*().getMember("getData").getReturn().asSource()
}
}
/**
* The decompression Additional Taint Steps
*/
class DecompressionAdditionalSteps extends DecompressionBomb::AdditionalTaintStep {
DecompressionAdditionalSteps() { this = "AdditionalTaintStep" }
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node n | n = API::moduleImport("adm-zip") |
pred = n.getParameter(0).asSink() and
(
succ =
n.getInstance()
.getMember(["extractAllTo", "extractEntryTo", "readAsText"])
.getReturn()
.asSource()
or
succ =
n.getInstance()
.getMember("getEntries")
.getASuccessor*()
.getMember("getData")
.getReturn()
.asSource()
)
)
}
}
}
/**
* Provides Models for [decompress](https://www.npmjs.com/package/decompress) package
*/
module Decompress {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
DecompressionBomb() { this = API::moduleImport("decompress") }
override DataFlow::Node sink() { result = this.getACall().getArgument(0) }
}
}
/**
* Provides Models for [gunzip-maybe][https://www.npmjs.com/package/gunzip-maybe] package
*/
module GunzipMaybe {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
DecompressionBomb() { this = API::moduleImport("gunzip-maybe") }
override DataFlow::Node sink() { result = this.getACall() }
}
}
/**
* Provides Models for [unbzip2-stream](https://www.npmjs.com/package/unbzip2-stream) package
*/
module Unbzip2Stream {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
DecompressionBomb() { this = API::moduleImport("unbzip2-stream") }
override DataFlow::Node sink() { result = this.getACall() }
}
}
/**
* Provides Models for [unzipper](https://www.npmjs.com/package/unzipper) package
*/
module Unzipper {
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
string funcName;
DecompressionBomb() {
this = API::moduleImport("unzipper").getMember(["Extract", "Parse", "ParseOne"]) and
funcName = ["Extract", "Parse", "ParseOne"]
or
this = API::moduleImport("unzipper").getMember("Open") and
// open has some functions which will be specified in sink predicate
funcName = "Open"
}
override DataFlow::Node sink() {
result = this.getMember(["buffer", "file", "url", "file"]).getACall().getArgument(0) and
funcName = "Open"
or
result = this.getACall() and
funcName = ["Extract", "Parse", "ParseOne"]
}
/**
* Gets a
* and Holds if unzipper instance has a member `uncompressedSize`
*
* it is really difficult to implement this sanitizer,
* so i'm going to check if there is a member like `vars.uncompressedSize` in whole DB or not!
*/
predicate sanitizer() {
exists(this.getASuccessor*().getMember("vars").getMember("uncompressedSize")) and
funcName = ["Extract", "Parse", "ParseOne"]
}
}
}
/**
* Provides Models for [yauzl](https://www.npmjs.com/package/yauzl) package
*/
module Yauzl {
API::Node test() { result = API::moduleImport("yauzl").getASuccessor*() }
/**
* The decompression bomb sinks
*/
class DecompressionBomb extends DecompressionBomb::Range {
// open function has a sanitizer
string methodName;
DecompressionBomb() {
this =
API::moduleImport("yauzl").getMember(["fromFd", "fromBuffer", "fromRandomAccessReader"]) and
methodName = "from"
or
this = API::moduleImport("yauzl").getMember("open") and
methodName = "open"
}
override DataFlow::Node sink() {
(
result = this.getParameter(2).getParameter(1).getMember("readEntry").getACall() or
result =
this.getParameter(2)
.getParameter(1)
.getMember("openReadStream")
.getParameter(1)
.getParameter(1)
.asSource()
) and
not this.sanitizer() and
methodName = "open"
or
result = this.getParameter(0).asSink() and
methodName = "from"
}
/**
* Gets a
* and Holds if yauzl `open` instance has a member `uncompressedSize`
*/
predicate sanitizer() {
exists(this.getASuccessor*().getMember("uncompressedSize")) and
methodName = ["readStream", "open"]
}
}
/**
* The decompression Additional Taint Steps
*/
class DecompressionAdditionalSteps extends DecompressionBomb::AdditionalTaintStep {
DecompressionAdditionalSteps() { this = "AdditionalTaintStep" }
override predicate isAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node open | open = API::moduleImport("yauzl").getMember("open") |
pred = open.getParameter(0).asSink() and
(
succ = open.getParameter(2).getParameter(1).getMember("readEntry").getACall() or
succ =
open.getParameter(2)
.getParameter(1)
.getMember("openReadStream")
.getParameter(1)
.getParameter(1)
.asSource()
)
)
}
}
}

View File

@@ -0,0 +1,11 @@
const jszipp = require("jszip");
function zipBombSafe(zipFile) {
jszipp.loadAsync(zipFile.data).then(function (zip) {
if (zip.file("10GB")["_data"]["uncompressedSize"] > 1024 * 1024 * 8) {
console.log("error")
}
zip.file("10GB").async("uint8array").then(function (u8) {
console.log(u8);
});
});
}

View File

@@ -0,0 +1,8 @@
const tar = require("tar");
tar.x({
file: tarFileName,
strip: 1,
C: 'some-dir',
maxReadSize: 16 * 1024 * 1024 // 16 MB
})

View File

@@ -0,0 +1,11 @@
const zlib = require("zlib");
zlib.gunzip(
inputZipFile.data,
{ maxOutputLength: 1024 * 1024 * 5 },
(err, buffer) => {
doSomeThingWithData(buffer);
});
zlib.gunzipSync(inputZipFile.data, { maxOutputLength: 1024 * 1024 * 5 });
inputZipFile.pipe(zlib.createGunzip({ maxOutputLength: 1024 * 1024 * 5 })).pipe(outputFile);

View File

@@ -0,0 +1,179 @@
/**
* Provides classes for modeling the server-side form/file parsing libraries.
*/
import javascript
import experimental.semmle.javascript.ReadableStream
/**
* A module for modeling [busboy](https://www.npmjs.com/package/busboy) package
*/
module BusBoy {
/**
* A source of remote flow from the `Busboy` library.
*/
private class BusBoyRemoteFlow extends RemoteFlowSource {
BusBoyRemoteFlow() {
exists(API::Node busboyOnEvent |
busboyOnEvent = API::moduleImport("busboy").getReturn().getMember("on")
|
// Files
busboyOnEvent.getParameter(0).asSink().mayHaveStringValue("file") and
// second param of 'file' event is a Readable stream
this = readableStreamDataNode(busboyOnEvent.getParameter(1).getParameter(1))
or
// Fields
busboyOnEvent.getParameter(0).asSink().mayHaveStringValue(["file", "field"]) and
this =
API::moduleImport("busboy")
.getReturn()
.getMember("on")
.getParameter(1)
.getAParameter()
.asSource()
)
}
override string getSourceType() { result = "parsed user value from Busbuy" }
}
/**
* A busboy file data step according to a Readable Stream type
*/
private class AdditionalTaintStep extends TaintTracking::SharedTaintStep {
override predicate step(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node busboyOnEvent |
busboyOnEvent = API::moduleImport("busboy").getReturn().getMember("on")
|
busboyOnEvent.getParameter(0).asSink().mayHaveStringValue("file") and
customStreamPipeAdditionalTaintStep(busboyOnEvent.getParameter(1).getParameter(1), pred,
succ)
)
}
}
}
/**
* A module for modeling [formidable](https://www.npmjs.com/package/formidable) package
*/
module Formidable {
/**
* A source of remote flow from the `Formidable` library parsing a HTTP request.
*/
private class FormidableRemoteFlow extends RemoteFlowSource {
FormidableRemoteFlow() {
exists(API::Node formidable |
formidable = API::moduleImport("formidable").getReturn()
or
formidable = API::moduleImport("formidable").getMember("formidable").getReturn()
or
formidable =
API::moduleImport("formidable").getMember(["IncomingForm", "Formidable"]).getInstance()
|
this =
formidable.getMember("parse").getACall().getABoundCallbackParameter(1, any(int i | i > 0))
or
// if callback is not provide a promise will be returned,
// return values contains [fields,files] members
exists(API::Node parseMethod |
parseMethod = formidable.getMember("parse") and parseMethod.getNumParameter() = 1
|
this = parseMethod.getReturn().asSource()
)
or
// event handler
this = formidable.getMember("on").getParameter(1).getAParameter().asSource()
)
}
override string getSourceType() { result = "parsed user value from Formidable" }
}
}
/**
* A module for modeling [multiparty](https://www.npmjs.com/package/multiparty) package
*/
module Multiparty {
/**
* A source of remote flow from the `Multiparty` library.
*/
private class MultipartyRemoteFlow extends RemoteFlowSource {
MultipartyRemoteFlow() {
exists(API::Node form |
form = API::moduleImport("multiparty").getMember("Form").getInstance()
|
exists(API::CallNode parse | parse = form.getMember("parse").getACall() |
this = parse.getParameter(1).getParameter([1, 2]).asSource()
)
or
exists(API::Node on | on = form.getMember("on") |
(
on.getParameter(0).asSink().mayHaveStringValue(["file", "field"]) and
this = on.getParameter(1).getParameter([0, 1]).asSource()
or
on.getParameter(0).asSink().mayHaveStringValue("part") and
this = readableStreamDataNode(on.getParameter(1).getParameter(0))
)
)
)
}
override string getSourceType() { result = "parsed user value from Multiparty" }
}
/**
* A multiparty part data step according to a Readable Stream type
*/
private class AdditionalTaintStep extends TaintTracking::SharedTaintStep {
override predicate step(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node multipartyOnEvent |
multipartyOnEvent =
API::moduleImport("multiparty").getMember("Form").getInstance().getMember("on")
|
multipartyOnEvent.getParameter(0).asSink().mayHaveStringValue("part") and
customStreamPipeAdditionalTaintStep(multipartyOnEvent.getParameter(1).getParameter(0), pred,
succ)
)
}
}
}
/**
* A module for modeling [dicer](https://www.npmjs.com/package/dicer) package
*/
module Dicer {
/**
* A source of remote flow from the `dicer` library.
*/
private class DicerRemoteFlow extends RemoteFlowSource {
DicerRemoteFlow() {
exists(API::Node dicer | dicer = API::moduleImport("dicer").getInstance() |
exists(API::Node on | on = dicer.getMember("on") |
on.getParameter(0).asSink().mayHaveStringValue("part") and
this = readableStreamDataNode(on.getParameter(1).getParameter(0))
or
exists(API::Node onPart | onPart = on.getParameter(1).getParameter(0).getMember("on") |
onPart.getParameter(0).asSink().mayHaveStringValue("header") and
this = onPart.getParameter(1).getParameter(0).asSource()
)
)
)
}
override string getSourceType() { result = "parsed user value from Dicer" }
}
/**
* A dicer part data step according to a Readable Stream type
*/
private class AdditionalTaintStep extends TaintTracking::SharedTaintStep {
override predicate step(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node onEvent |
onEvent = API::moduleImport("dicer").getInstance().getMember("on")
|
onEvent.getParameter(0).asSink().mayHaveStringValue("part") and
customStreamPipeAdditionalTaintStep(onEvent.getParameter(1).getParameter(0), pred, succ)
)
}
}
}

View File

@@ -0,0 +1,147 @@
/**
* Provides helper predicates to work with any Readable Stream in dataflow queries
*
* main predicate in which you can use by passing a Readable Stream is `customStreamPipeAdditionalTaintStep`
*/
import javascript
/**
* Holds if there is a step between `fs.createReadStream` and `stream.Readable.from` first parameters to all other piped parameters
*
* It can be global additional step too
*/
predicate readablePipeAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node receiver |
receiver =
[
API::moduleImport("fs").getMember("createReadStream"),
API::moduleImport("stream").getMember("Readable").getMember("from")
]
|
customStreamPipeAdditionalTaintStep(receiver, pred, succ)
or
pred = receiver.getParameter(0).asSink() and
succ = receiver.getReturn().asSource()
)
}
/**
* additional taint steps for piped stream from `createReadStream` method of `fs/promises.open`
*
* It can be global additional step too
*/
predicate promisesFileHandlePipeAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
exists(API::Node receiver | receiver = nodeJsPromisesFileSystem().getMember("open") |
customStreamPipeAdditionalTaintStep(receiver, pred, succ)
or
pred = receiver.getParameter(0).asSink() and
succ = receiver.getReturn().asSource()
)
}
/**
* Gets nodejs `fs` Promises API
*/
API::Node nodeJsPromisesFileSystem() {
result = [API::moduleImport("fs").getMember("promises"), API::moduleImport("fs/promises")]
}
/**
* Holds if
* or `receiver.pipe(pred).pipe(sth).pipe(succ)`
*
* or `receiver.pipe(sth).pipe(pred).pipe(succ)`
*
* or `receiver.pipe(succ)` and receiver is pred
*
* Receiver is a Readable Stream object
*/
predicate customStreamPipeAdditionalTaintStep(
API::Node receiver, DataFlow::Node pred, DataFlow::Node succ
) {
// following connect the first pipe parameter to the last pipe parameter
exists(API::Node firstPipe | firstPipe = receiver.getMember("pipe") |
pred = firstPipe.getParameter(0).asSink() and
succ = firstPipe.getASuccessor*().getMember("pipe").getParameter(0).asSink()
)
or
// following connect a pipe parameter to the next pipe parameter
exists(API::Node cn | cn = receiver.getASuccessor+() |
pred = cn.getParameter(0).asSink() and
succ = cn.getReturn().getMember("pipe").getParameter(0).asSink()
)
or
// it is a function that its return value is a Readable stream object
pred = receiver.getReturn().asSource() and
succ = receiver.getReturn().getMember("pipe").getParameter(0).asSink()
or
// it is a Readable stream object
pred = receiver.asSource() and
succ = receiver.getMember("pipe").getParameter(0).asSink()
}
/**
* Holds if
*
* ```js
* await pipeline(
* pred,
* succ_or_pred,
* succ
* )
* ```
*
* It can be global additional step too
*/
predicate streamPipelineAdditionalTaintStep(DataFlow::Node pred, DataFlow::Node succ) {
// this step connect the a pipeline parameter to the next pipeline parameter
exists(API::CallNode cn, int i |
// we assume that there are maximum 10 pipes mostly or maybe less
i in [0 .. 10] and
cn = nodeJsStream().getMember("pipeline").getACall()
|
pred = cn.getParameter(i).asSink() and
succ = cn.getParameter(i + 1).asSink()
)
or
// this step connect the first pipeline parameter to the next parameters
exists(API::CallNode cn, int i |
// we assume that there are maximum 10 pipes mostly or maybe less
i in [1 .. 10] and
cn = nodeJsStream().getMember("pipeline").getACall()
|
pred = cn.getParameter(0).asSink() and
succ = cn.getParameter(i).asSink()
)
}
/**
* Gets `stream` Promises API
*/
API::Node nodeJsStream() {
result = [API::moduleImport("stream/promises"), API::moduleImport("stream").getMember("promises")]
}
/**
* Gets a Readable stream object,
* and returns all nodes responsible for a data read of this Readable stream
*/
DataFlow::Node readableStreamDataNode(API::Node stream) {
result = stream.asSource()
or
// 'data' event
exists(API::CallNode onEvent | onEvent = stream.getMember("on").getACall() |
result = onEvent.getParameter(1).getParameter(0).asSource() and
onEvent.getParameter(0).asSink().mayHaveStringValue("data")
)
or
// 'Readable' event
exists(API::CallNode onEvent | onEvent = stream.getMember("on").getACall() |
(
result = onEvent.getParameter(1).getReceiver().getMember("read").getReturn().asSource() or
result = stream.getMember("read").getReturn().asSource()
) and
onEvent.getParameter(0).asSink().mayHaveStringValue("readable")
)
}

View File

@@ -1,5 +1,5 @@
name: codeql/javascript-queries
version: 0.8.15
version: 0.8.17-dev
groups:
- javascript
- queries

View File

@@ -0,0 +1,234 @@
nodes
| busybus.js:9:30:9:33 | file |
| busybus.js:9:30:9:33 | file |
| busybus.js:9:36:9:39 | info |
| busybus.js:9:36:9:39 | info |
| busybus.js:10:19:10:50 | { filen ... eType } |
| busybus.js:10:19:10:57 | encoding |
| busybus.js:10:19:10:57 | filename |
| busybus.js:10:19:10:57 | mimeType |
| busybus.js:10:21:10:28 | filename |
| busybus.js:10:31:10:38 | encoding |
| busybus.js:10:41:10:48 | mimeType |
| busybus.js:10:54:10:57 | info |
| busybus.js:12:18:12:25 | filename |
| busybus.js:12:18:12:25 | filename |
| busybus.js:12:28:12:35 | encoding |
| busybus.js:12:28:12:35 | encoding |
| busybus.js:12:38:12:45 | mimeType |
| busybus.js:12:38:12:45 | mimeType |
| busybus.js:13:23:13:23 | z |
| busybus.js:13:31:13:36 | sink() |
| busybus.js:13:31:13:36 | sink() |
| busybus.js:15:30:15:33 | data |
| busybus.js:15:30:15:33 | data |
| busybus.js:16:22:16:25 | data |
| busybus.js:16:22:16:25 | data |
| busybus.js:22:25:22:42 | data |
| busybus.js:22:32:22:42 | this.read() |
| busybus.js:22:32:22:42 | this.read() |
| busybus.js:23:26:23:29 | data |
| busybus.js:23:26:23:29 | data |
| busybus.js:27:25:27:28 | name |
| busybus.js:27:25:27:28 | name |
| busybus.js:27:31:27:33 | val |
| busybus.js:27:31:27:33 | val |
| busybus.js:27:36:27:39 | info |
| busybus.js:27:36:27:39 | info |
| busybus.js:28:18:28:21 | name |
| busybus.js:28:18:28:21 | name |
| busybus.js:28:24:28:26 | val |
| busybus.js:28:24:28:26 | val |
| busybus.js:28:29:28:32 | info |
| busybus.js:28:29:28:32 | info |
| dicer.js:12:23:12:26 | part |
| dicer.js:12:23:12:26 | part |
| dicer.js:13:19:13:24 | sink() |
| dicer.js:13:19:13:24 | sink() |
| dicer.js:14:28:14:33 | header |
| dicer.js:14:28:14:33 | header |
| dicer.js:16:22:16:27 | header |
| dicer.js:16:22:16:30 | header[h] |
| dicer.js:16:22:16:30 | header[h] |
| dicer.js:19:26:19:29 | data |
| dicer.js:19:26:19:29 | data |
| dicer.js:20:18:20:21 | data |
| dicer.js:20:18:20:21 | data |
| formidable.js:7:11:7:25 | [fields, files] |
| formidable.js:7:11:7:49 | fields |
| formidable.js:7:11:7:49 | files |
| formidable.js:7:12:7:17 | fields |
| formidable.js:7:20:7:24 | files |
| formidable.js:7:29:7:49 | await f ... se(req) |
| formidable.js:7:35:7:49 | form.parse(req) |
| formidable.js:7:35:7:49 | form.parse(req) |
| formidable.js:8:10:8:15 | fields |
| formidable.js:8:10:8:15 | fields |
| formidable.js:8:18:8:22 | files |
| formidable.js:8:18:8:22 | files |
| formidable.js:9:27:9:34 | formname |
| formidable.js:9:27:9:34 | formname |
| formidable.js:9:37:9:40 | file |
| formidable.js:9:37:9:40 | file |
| formidable.js:10:14:10:21 | formname |
| formidable.js:10:14:10:21 | formname |
| formidable.js:10:24:10:27 | file |
| formidable.js:10:24:10:27 | file |
| formidable.js:12:22:12:29 | formname |
| formidable.js:12:22:12:29 | formname |
| formidable.js:12:32:12:35 | file |
| formidable.js:12:32:12:35 | file |
| formidable.js:13:14:13:21 | formname |
| formidable.js:13:14:13:21 | formname |
| formidable.js:13:24:13:27 | file |
| formidable.js:13:24:13:27 | file |
| formidable.js:15:23:15:31 | fieldName |
| formidable.js:15:23:15:31 | fieldName |
| formidable.js:15:34:15:43 | fieldValue |
| formidable.js:15:34:15:43 | fieldValue |
| formidable.js:16:14:16:22 | fieldName |
| formidable.js:16:14:16:22 | fieldName |
| formidable.js:16:25:16:34 | fieldValue |
| formidable.js:16:25:16:34 | fieldValue |
| multiparty.js:8:22:8:25 | part |
| multiparty.js:8:22:8:25 | part |
| multiparty.js:9:14:9:17 | part |
| multiparty.js:9:14:9:17 | part |
| multiparty.js:10:19:10:24 | sink() |
| multiparty.js:10:19:10:24 | sink() |
| multiparty.js:14:37:14:42 | fields |
| multiparty.js:14:37:14:42 | fields |
| multiparty.js:14:45:14:49 | files |
| multiparty.js:14:45:14:49 | files |
| multiparty.js:15:14:15:19 | fields |
| multiparty.js:15:14:15:19 | fields |
| multiparty.js:15:22:15:26 | files |
| multiparty.js:15:22:15:26 | files |
edges
| busybus.js:9:30:9:33 | file | busybus.js:13:23:13:23 | z |
| busybus.js:9:30:9:33 | file | busybus.js:13:23:13:23 | z |
| busybus.js:9:36:9:39 | info | busybus.js:10:54:10:57 | info |
| busybus.js:9:36:9:39 | info | busybus.js:10:54:10:57 | info |
| busybus.js:10:19:10:50 | { filen ... eType } | busybus.js:10:21:10:28 | filename |
| busybus.js:10:19:10:50 | { filen ... eType } | busybus.js:10:31:10:38 | encoding |
| busybus.js:10:19:10:50 | { filen ... eType } | busybus.js:10:41:10:48 | mimeType |
| busybus.js:10:19:10:57 | encoding | busybus.js:12:28:12:35 | encoding |
| busybus.js:10:19:10:57 | encoding | busybus.js:12:28:12:35 | encoding |
| busybus.js:10:19:10:57 | filename | busybus.js:12:18:12:25 | filename |
| busybus.js:10:19:10:57 | filename | busybus.js:12:18:12:25 | filename |
| busybus.js:10:19:10:57 | mimeType | busybus.js:12:38:12:45 | mimeType |
| busybus.js:10:19:10:57 | mimeType | busybus.js:12:38:12:45 | mimeType |
| busybus.js:10:21:10:28 | filename | busybus.js:10:19:10:57 | filename |
| busybus.js:10:31:10:38 | encoding | busybus.js:10:19:10:57 | encoding |
| busybus.js:10:41:10:48 | mimeType | busybus.js:10:19:10:57 | mimeType |
| busybus.js:10:54:10:57 | info | busybus.js:10:19:10:50 | { filen ... eType } |
| busybus.js:13:23:13:23 | z | busybus.js:13:31:13:36 | sink() |
| busybus.js:13:23:13:23 | z | busybus.js:13:31:13:36 | sink() |
| busybus.js:15:30:15:33 | data | busybus.js:16:22:16:25 | data |
| busybus.js:15:30:15:33 | data | busybus.js:16:22:16:25 | data |
| busybus.js:15:30:15:33 | data | busybus.js:16:22:16:25 | data |
| busybus.js:15:30:15:33 | data | busybus.js:16:22:16:25 | data |
| busybus.js:22:25:22:42 | data | busybus.js:23:26:23:29 | data |
| busybus.js:22:25:22:42 | data | busybus.js:23:26:23:29 | data |
| busybus.js:22:32:22:42 | this.read() | busybus.js:22:25:22:42 | data |
| busybus.js:22:32:22:42 | this.read() | busybus.js:22:25:22:42 | data |
| busybus.js:27:25:27:28 | name | busybus.js:28:18:28:21 | name |
| busybus.js:27:25:27:28 | name | busybus.js:28:18:28:21 | name |
| busybus.js:27:25:27:28 | name | busybus.js:28:18:28:21 | name |
| busybus.js:27:25:27:28 | name | busybus.js:28:18:28:21 | name |
| busybus.js:27:31:27:33 | val | busybus.js:28:24:28:26 | val |
| busybus.js:27:31:27:33 | val | busybus.js:28:24:28:26 | val |
| busybus.js:27:31:27:33 | val | busybus.js:28:24:28:26 | val |
| busybus.js:27:31:27:33 | val | busybus.js:28:24:28:26 | val |
| busybus.js:27:36:27:39 | info | busybus.js:28:29:28:32 | info |
| busybus.js:27:36:27:39 | info | busybus.js:28:29:28:32 | info |
| busybus.js:27:36:27:39 | info | busybus.js:28:29:28:32 | info |
| busybus.js:27:36:27:39 | info | busybus.js:28:29:28:32 | info |
| dicer.js:12:23:12:26 | part | dicer.js:13:19:13:24 | sink() |
| dicer.js:12:23:12:26 | part | dicer.js:13:19:13:24 | sink() |
| dicer.js:12:23:12:26 | part | dicer.js:13:19:13:24 | sink() |
| dicer.js:12:23:12:26 | part | dicer.js:13:19:13:24 | sink() |
| dicer.js:14:28:14:33 | header | dicer.js:16:22:16:27 | header |
| dicer.js:14:28:14:33 | header | dicer.js:16:22:16:27 | header |
| dicer.js:16:22:16:27 | header | dicer.js:16:22:16:30 | header[h] |
| dicer.js:16:22:16:27 | header | dicer.js:16:22:16:30 | header[h] |
| dicer.js:19:26:19:29 | data | dicer.js:20:18:20:21 | data |
| dicer.js:19:26:19:29 | data | dicer.js:20:18:20:21 | data |
| dicer.js:19:26:19:29 | data | dicer.js:20:18:20:21 | data |
| dicer.js:19:26:19:29 | data | dicer.js:20:18:20:21 | data |
| formidable.js:7:11:7:25 | [fields, files] | formidable.js:7:12:7:17 | fields |
| formidable.js:7:11:7:25 | [fields, files] | formidable.js:7:20:7:24 | files |
| formidable.js:7:11:7:49 | fields | formidable.js:8:10:8:15 | fields |
| formidable.js:7:11:7:49 | fields | formidable.js:8:10:8:15 | fields |
| formidable.js:7:11:7:49 | files | formidable.js:8:18:8:22 | files |
| formidable.js:7:11:7:49 | files | formidable.js:8:18:8:22 | files |
| formidable.js:7:12:7:17 | fields | formidable.js:7:11:7:49 | fields |
| formidable.js:7:20:7:24 | files | formidable.js:7:11:7:49 | files |
| formidable.js:7:29:7:49 | await f ... se(req) | formidable.js:7:11:7:25 | [fields, files] |
| formidable.js:7:35:7:49 | form.parse(req) | formidable.js:7:29:7:49 | await f ... se(req) |
| formidable.js:7:35:7:49 | form.parse(req) | formidable.js:7:29:7:49 | await f ... se(req) |
| formidable.js:9:27:9:34 | formname | formidable.js:10:14:10:21 | formname |
| formidable.js:9:27:9:34 | formname | formidable.js:10:14:10:21 | formname |
| formidable.js:9:27:9:34 | formname | formidable.js:10:14:10:21 | formname |
| formidable.js:9:27:9:34 | formname | formidable.js:10:14:10:21 | formname |
| formidable.js:9:37:9:40 | file | formidable.js:10:24:10:27 | file |
| formidable.js:9:37:9:40 | file | formidable.js:10:24:10:27 | file |
| formidable.js:9:37:9:40 | file | formidable.js:10:24:10:27 | file |
| formidable.js:9:37:9:40 | file | formidable.js:10:24:10:27 | file |
| formidable.js:12:22:12:29 | formname | formidable.js:13:14:13:21 | formname |
| formidable.js:12:22:12:29 | formname | formidable.js:13:14:13:21 | formname |
| formidable.js:12:22:12:29 | formname | formidable.js:13:14:13:21 | formname |
| formidable.js:12:22:12:29 | formname | formidable.js:13:14:13:21 | formname |
| formidable.js:12:32:12:35 | file | formidable.js:13:24:13:27 | file |
| formidable.js:12:32:12:35 | file | formidable.js:13:24:13:27 | file |
| formidable.js:12:32:12:35 | file | formidable.js:13:24:13:27 | file |
| formidable.js:12:32:12:35 | file | formidable.js:13:24:13:27 | file |
| formidable.js:15:23:15:31 | fieldName | formidable.js:16:14:16:22 | fieldName |
| formidable.js:15:23:15:31 | fieldName | formidable.js:16:14:16:22 | fieldName |
| formidable.js:15:23:15:31 | fieldName | formidable.js:16:14:16:22 | fieldName |
| formidable.js:15:23:15:31 | fieldName | formidable.js:16:14:16:22 | fieldName |
| formidable.js:15:34:15:43 | fieldValue | formidable.js:16:25:16:34 | fieldValue |
| formidable.js:15:34:15:43 | fieldValue | formidable.js:16:25:16:34 | fieldValue |
| formidable.js:15:34:15:43 | fieldValue | formidable.js:16:25:16:34 | fieldValue |
| formidable.js:15:34:15:43 | fieldValue | formidable.js:16:25:16:34 | fieldValue |
| multiparty.js:8:22:8:25 | part | multiparty.js:9:14:9:17 | part |
| multiparty.js:8:22:8:25 | part | multiparty.js:9:14:9:17 | part |
| multiparty.js:8:22:8:25 | part | multiparty.js:9:14:9:17 | part |
| multiparty.js:8:22:8:25 | part | multiparty.js:9:14:9:17 | part |
| multiparty.js:8:22:8:25 | part | multiparty.js:10:19:10:24 | sink() |
| multiparty.js:8:22:8:25 | part | multiparty.js:10:19:10:24 | sink() |
| multiparty.js:8:22:8:25 | part | multiparty.js:10:19:10:24 | sink() |
| multiparty.js:8:22:8:25 | part | multiparty.js:10:19:10:24 | sink() |
| multiparty.js:14:37:14:42 | fields | multiparty.js:15:14:15:19 | fields |
| multiparty.js:14:37:14:42 | fields | multiparty.js:15:14:15:19 | fields |
| multiparty.js:14:37:14:42 | fields | multiparty.js:15:14:15:19 | fields |
| multiparty.js:14:37:14:42 | fields | multiparty.js:15:14:15:19 | fields |
| multiparty.js:14:45:14:49 | files | multiparty.js:15:22:15:26 | files |
| multiparty.js:14:45:14:49 | files | multiparty.js:15:22:15:26 | files |
| multiparty.js:14:45:14:49 | files | multiparty.js:15:22:15:26 | files |
| multiparty.js:14:45:14:49 | files | multiparty.js:15:22:15:26 | files |
#select
| busybus.js:12:18:12:25 | filename | busybus.js:9:36:9:39 | info | busybus.js:12:18:12:25 | filename | This entity depends on a $@. | busybus.js:9:36:9:39 | info | user-provided value |
| busybus.js:12:28:12:35 | encoding | busybus.js:9:36:9:39 | info | busybus.js:12:28:12:35 | encoding | This entity depends on a $@. | busybus.js:9:36:9:39 | info | user-provided value |
| busybus.js:12:38:12:45 | mimeType | busybus.js:9:36:9:39 | info | busybus.js:12:38:12:45 | mimeType | This entity depends on a $@. | busybus.js:9:36:9:39 | info | user-provided value |
| busybus.js:13:31:13:36 | sink() | busybus.js:9:30:9:33 | file | busybus.js:13:31:13:36 | sink() | This entity depends on a $@. | busybus.js:9:30:9:33 | file | user-provided value |
| busybus.js:16:22:16:25 | data | busybus.js:15:30:15:33 | data | busybus.js:16:22:16:25 | data | This entity depends on a $@. | busybus.js:15:30:15:33 | data | user-provided value |
| busybus.js:23:26:23:29 | data | busybus.js:22:32:22:42 | this.read() | busybus.js:23:26:23:29 | data | This entity depends on a $@. | busybus.js:22:32:22:42 | this.read() | user-provided value |
| busybus.js:28:18:28:21 | name | busybus.js:27:25:27:28 | name | busybus.js:28:18:28:21 | name | This entity depends on a $@. | busybus.js:27:25:27:28 | name | user-provided value |
| busybus.js:28:24:28:26 | val | busybus.js:27:31:27:33 | val | busybus.js:28:24:28:26 | val | This entity depends on a $@. | busybus.js:27:31:27:33 | val | user-provided value |
| busybus.js:28:29:28:32 | info | busybus.js:27:36:27:39 | info | busybus.js:28:29:28:32 | info | This entity depends on a $@. | busybus.js:27:36:27:39 | info | user-provided value |
| dicer.js:13:19:13:24 | sink() | dicer.js:12:23:12:26 | part | dicer.js:13:19:13:24 | sink() | This entity depends on a $@. | dicer.js:12:23:12:26 | part | user-provided value |
| dicer.js:16:22:16:30 | header[h] | dicer.js:14:28:14:33 | header | dicer.js:16:22:16:30 | header[h] | This entity depends on a $@. | dicer.js:14:28:14:33 | header | user-provided value |
| dicer.js:20:18:20:21 | data | dicer.js:19:26:19:29 | data | dicer.js:20:18:20:21 | data | This entity depends on a $@. | dicer.js:19:26:19:29 | data | user-provided value |
| formidable.js:8:10:8:15 | fields | formidable.js:7:35:7:49 | form.parse(req) | formidable.js:8:10:8:15 | fields | This entity depends on a $@. | formidable.js:7:35:7:49 | form.parse(req) | user-provided value |
| formidable.js:8:18:8:22 | files | formidable.js:7:35:7:49 | form.parse(req) | formidable.js:8:18:8:22 | files | This entity depends on a $@. | formidable.js:7:35:7:49 | form.parse(req) | user-provided value |
| formidable.js:10:14:10:21 | formname | formidable.js:9:27:9:34 | formname | formidable.js:10:14:10:21 | formname | This entity depends on a $@. | formidable.js:9:27:9:34 | formname | user-provided value |
| formidable.js:10:24:10:27 | file | formidable.js:9:37:9:40 | file | formidable.js:10:24:10:27 | file | This entity depends on a $@. | formidable.js:9:37:9:40 | file | user-provided value |
| formidable.js:13:14:13:21 | formname | formidable.js:12:22:12:29 | formname | formidable.js:13:14:13:21 | formname | This entity depends on a $@. | formidable.js:12:22:12:29 | formname | user-provided value |
| formidable.js:13:24:13:27 | file | formidable.js:12:32:12:35 | file | formidable.js:13:24:13:27 | file | This entity depends on a $@. | formidable.js:12:32:12:35 | file | user-provided value |
| formidable.js:16:14:16:22 | fieldName | formidable.js:15:23:15:31 | fieldName | formidable.js:16:14:16:22 | fieldName | This entity depends on a $@. | formidable.js:15:23:15:31 | fieldName | user-provided value |
| formidable.js:16:25:16:34 | fieldValue | formidable.js:15:34:15:43 | fieldValue | formidable.js:16:25:16:34 | fieldValue | This entity depends on a $@. | formidable.js:15:34:15:43 | fieldValue | user-provided value |
| multiparty.js:9:14:9:17 | part | multiparty.js:8:22:8:25 | part | multiparty.js:9:14:9:17 | part | This entity depends on a $@. | multiparty.js:8:22:8:25 | part | user-provided value |
| multiparty.js:10:19:10:24 | sink() | multiparty.js:8:22:8:25 | part | multiparty.js:10:19:10:24 | sink() | This entity depends on a $@. | multiparty.js:8:22:8:25 | part | user-provided value |
| multiparty.js:15:14:15:19 | fields | multiparty.js:14:37:14:42 | fields | multiparty.js:15:14:15:19 | fields | This entity depends on a $@. | multiparty.js:14:37:14:42 | fields | user-provided value |
| multiparty.js:15:22:15:26 | files | multiparty.js:14:45:14:49 | files | multiparty.js:15:22:15:26 | files | This entity depends on a $@. | multiparty.js:14:45:14:49 | files | user-provided value |

View File

@@ -0,0 +1,34 @@
/**
* @name Remote Form Flow Sources
* @description Using remote user controlled sources from Forms
* @kind path-problem
* @problem.severity error
* @security-severity 5
* @precision high
* @id js/remote-flow-source
* @tags correctness
* security
*/
import javascript
import DataFlow::PathGraph
import experimental.semmle.javascript.FormParsers
/**
* A taint-tracking configuration for test
*/
class Configuration extends TaintTracking::Configuration {
Configuration() { this = "RemoteFlowSourcesOUserForm" }
override predicate isSource(DataFlow::Node source) { source instanceof RemoteFlowSource }
override predicate isSink(DataFlow::Node sink) {
sink = API::moduleImport("sink").getAParameter().asSink() or
sink = API::moduleImport("sink").getReturn().asSource()
}
}
from Configuration cfg, DataFlow::PathNode source, DataFlow::PathNode sink
where cfg.hasFlowPath(source, sink)
select sink.getNode(), source, sink, "This entity depends on a $@.", source.getNode(),
"user-provided value"

View File

@@ -0,0 +1,33 @@
const http = require('http');
const zlib = require('node:zlib');
const busboy = require('busboy');
const sink = require('sink');
http.createServer((req, res) => {
if (req.method === 'POST') {
const bb = busboy({ headers: req.headers });
bb.on('file', (name, file, info) => {
const { filename, encoding, mimeType } = info;
const z = zlib.createGzip();
sink(filename, encoding, mimeType) // sink
file.pipe(z).pipe(sink())
file.on('data', (data) => {
sink(data)
})
file.on('readable', function () {
// There is some data to read now.
let data;
while ((data = this.read()) !== null) {
sink(data)
}
});
});
bb.on('field', (name, val, info) => {
sink(name, val, info)
});
}
}).listen(8000, () => {
console.log('Listening for requests');
});

View File

@@ -0,0 +1,25 @@
const { inspect } = require('util');
const http = require('http');
const Dicer = require('dicer');
const sink = require('sink');
const PORT = 8080;
http.createServer((req, res) => {
let m;
const dicer = new Dicer({ boundary: m[1] || m[2] });
dicer.on('part', (part) => {
part.pipe(sink())
part.on('header', (header) => {
for (h in header) {
sink(header[h])
}
});
part.on('data', (data) => {
sink(data)
});
});
}).listen(PORT, () => {
console.log(`Listening for requests on port ${PORT}`);
});

View File

@@ -0,0 +1,22 @@
import http from 'node:http';
import formidable from 'formidable';
const sink = require('sink');
const server = http.createServer(async (req, res) => {
const form = formidable({});
const [fields, files] = await form.parse(req);
sink(fields, files)
form.on('fileBegin', (formname, file) => {
sink(formname, file)
});
form.on('file', (formname, file) => {
sink(formname, file)
});
form.on('field', (fieldName, fieldValue) => {
sink(fieldName, fieldValue)
});
});
server.listen(8080, () => {
console.log('Server listening on http://localhost:8080/ ...');
});

View File

@@ -0,0 +1,19 @@
var multiparty = require('multiparty');
var http = require('http');
var util = require('util');
const sink = require('sink');
http.createServer(function (req, res) {
var form = new multiparty.Form();
form.on('part', (part) => {
sink(part)
part.pipe(sink())
});
var form2 = new multiparty.Form();
form2.parse(req, function (err, fields, files) {
sink(fields, files)
});
form2.parse(req);
}).listen(8080);

View File

@@ -52,9 +52,14 @@ test_FileSystemAccess
| tst.js:56:1:56:18 | shelljs.uniq(file) |
| tst.js:57:1:57:26 | shelljs ... file2) |
| tst.js:58:1:58:32 | shelljs ... file2) |
| tst.js:60:1:60:17 | shelljs.cat(file) |
| tst.js:60:1:60:41 | shelljs ... cement) |
| tst.js:61:1:61:17 | shelljs.cat(file) |
test_MissingFileSystemAccess
test_SystemCommandExecution
| tst.js:14:1:14:27 | shelljs ... ts, cb) |
| tst.js:60:1:60:51 | shelljs ... ec(cmd) |
| tst.js:61:1:61:27 | shelljs ... ec(cmd) |
test_FileNameSource
| tst.js:15:1:15:26 | shelljs ... file2) |
| tst.js:24:1:24:16 | shelljs.ls(file) |

View File

@@ -56,3 +56,6 @@ shelljs.touch(file1, file2);
shelljs.uniq(file);
shelljs.uniq(file1, file2);
shelljs.uniq(opts, file1, file2);
shelljs.cat(file).sed(regex, replacement).exec(cmd);
shelljs.cat(file).exec(cmd);

View File

@@ -0,0 +1,391 @@
nodes
| adm-zip.js:13:13:13:21 | req.files |
| adm-zip.js:13:13:13:21 | req.files |
| adm-zip.js:13:13:13:33 | req.fil ... ombFile |
| adm-zip.js:17:18:17:24 | tarFile |
| adm-zip.js:24:22:24:28 | tarFile |
| adm-zip.js:24:22:24:33 | tarFile.data |
| adm-zip.js:28:25:28:42 | zipEntry.getData() |
| adm-zip.js:28:25:28:42 | zipEntry.getData() |
| adm-zip.js:32:17:32:41 | admZip. ... "10GB") |
| adm-zip.js:32:17:32:41 | admZip. ... "10GB") |
| adm-zip.js:34:5:34:55 | admZip. ... , true) |
| adm-zip.js:34:5:34:55 | admZip. ... , true) |
| adm-zip.js:36:5:36:38 | admZip. ... , true) |
| adm-zip.js:36:5:36:38 | admZip. ... , true) |
| decompress.js:11:16:11:33 | req.query.filePath |
| decompress.js:11:16:11:33 | req.query.filePath |
| decompress.js:11:16:11:33 | req.query.filePath |
| jszip.js:12:13:12:21 | req.files |
| jszip.js:12:13:12:21 | req.files |
| jszip.js:12:13:12:33 | req.fil ... ombFile |
| jszip.js:12:13:12:38 | req.fil ... le.data |
| jszip.js:32:18:32:24 | zipFile |
| jszip.js:33:22:33:28 | zipFile |
| jszip.js:33:22:33:33 | zipFile.data |
| jszip.js:33:22:33:33 | zipFile.data |
| node-tar.js:15:13:15:21 | req.files |
| node-tar.js:15:13:15:21 | req.files |
| node-tar.js:15:13:15:33 | req.fil ... ombFile |
| node-tar.js:15:13:15:38 | req.fil ... le.data |
| node-tar.js:19:18:19:24 | tarFile |
| node-tar.js:21:23:21:49 | Readabl ... e.data) |
| node-tar.js:21:37:21:43 | tarFile |
| node-tar.js:21:37:21:48 | tarFile.data |
| node-tar.js:24:9:24:15 | tar.x() |
| node-tar.js:24:9:24:15 | tar.x() |
| node-tar.js:29:5:29:37 | fs.crea ... e.name) |
| node-tar.js:29:25:29:31 | tarFile |
| node-tar.js:29:25:29:36 | tarFile.name |
| node-tar.js:30:9:33:10 | tar.x({ ... }) |
| node-tar.js:30:9:33:10 | tar.x({ ... }) |
| node-tar.js:45:5:45:37 | fs.crea ... e.name) |
| node-tar.js:45:25:45:31 | tarFile |
| node-tar.js:45:25:45:36 | tarFile.name |
| node-tar.js:46:9:46:20 | decompressor |
| node-tar.js:48:9:50:10 | tar.x({ ... }) |
| node-tar.js:48:9:50:10 | tar.x({ ... }) |
| node-tar.js:58:19:58:25 | tarFile |
| node-tar.js:58:19:58:30 | tarFile.name |
| node-tar.js:58:19:58:30 | tarFile.name |
| node-tar.js:59:25:59:31 | tarFile |
| node-tar.js:59:25:59:36 | tarFile.name |
| node-tar.js:59:25:59:36 | tarFile.name |
| pako.js:12:14:12:22 | req.files |
| pako.js:12:14:12:22 | req.files |
| pako.js:12:14:12:34 | req.fil ... ombFile |
| pako.js:12:14:12:39 | req.fil ... le.data |
| pako.js:13:14:13:22 | req.files |
| pako.js:13:14:13:22 | req.files |
| pako.js:13:14:13:34 | req.fil ... ombFile |
| pako.js:13:14:13:39 | req.fil ... le.data |
| pako.js:17:19:17:25 | zipFile |
| pako.js:18:11:18:68 | myArray |
| pako.js:18:21:18:68 | Buffer. ... uffer)) |
| pako.js:18:33:18:67 | new Uin ... buffer) |
| pako.js:18:48:18:54 | zipFile |
| pako.js:18:48:18:59 | zipFile.data |
| pako.js:18:48:18:66 | zipFile.data.buffer |
| pako.js:21:31:21:37 | myArray |
| pako.js:21:31:21:37 | myArray |
| pako.js:28:19:28:25 | zipFile |
| pako.js:29:11:29:62 | myArray |
| pako.js:29:21:29:55 | new Uin ... buffer) |
| pako.js:29:21:29:62 | new Uin ... .buffer |
| pako.js:29:36:29:42 | zipFile |
| pako.js:29:36:29:47 | zipFile.data |
| pako.js:29:36:29:54 | zipFile.data.buffer |
| pako.js:32:31:32:37 | myArray |
| pako.js:32:31:32:37 | myArray |
| unbzip2.js:12:5:12:43 | fs.crea ... lePath) |
| unbzip2.js:12:25:12:42 | req.query.FilePath |
| unbzip2.js:12:25:12:42 | req.query.FilePath |
| unbzip2.js:12:50:12:54 | bz2() |
| unbzip2.js:12:50:12:54 | bz2() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) |
| unzipper.js:13:40:13:48 | req.files |
| unzipper.js:13:40:13:48 | req.files |
| unzipper.js:13:40:13:56 | req.files.ZipFile |
| unzipper.js:13:40:13:61 | req.fil ... le.data |
| unzipper.js:16:23:16:63 | unzippe ... ath' }) |
| unzipper.js:16:23:16:63 | unzippe ... ath' }) |
| unzipper.js:19:23:19:41 | unzipper.ParseOne() |
| unzipper.js:19:23:19:41 | unzipper.ParseOne() |
| unzipper.js:24:15:24:30 | unzipper.Parse() |
| unzipper.js:24:15:24:30 | unzipper.Parse() |
| unzipper.js:34:15:34:30 | unzipper.Parse() |
| unzipper.js:34:15:34:30 | unzipper.Parse() |
| unzipper.js:41:35:41:71 | unzippe ... true }) |
| unzipper.js:41:35:41:71 | unzippe ... true }) |
| unzipper.js:51:36:51:72 | unzippe ... true }) |
| unzipper.js:51:36:51:72 | unzippe ... true }) |
| unzipper.js:60:23:60:38 | unzipper.Parse() |
| unzipper.js:60:23:60:38 | unzipper.Parse() |
| unzipper.js:73:23:73:38 | unzipper.Parse() |
| unzipper.js:73:23:73:38 | unzipper.Parse() |
| yauzl.js:12:18:12:26 | req.files |
| yauzl.js:12:18:12:26 | req.files |
| yauzl.js:12:18:12:34 | req.files.zipFile |
| yauzl.js:12:18:12:39 | req.fil ... le.data |
| yauzl.js:12:18:12:39 | req.fil ... le.data |
| yauzl.js:13:22:13:30 | req.files |
| yauzl.js:13:22:13:30 | req.files |
| yauzl.js:13:22:13:38 | req.files.zipFile |
| yauzl.js:13:22:13:43 | req.fil ... le.data |
| yauzl.js:13:22:13:43 | req.fil ... le.data |
| yauzl.js:14:34:14:42 | req.files |
| yauzl.js:14:34:14:42 | req.files |
| yauzl.js:14:34:14:50 | req.files.zipFile |
| yauzl.js:14:34:14:55 | req.fil ... le.data |
| yauzl.js:14:34:14:55 | req.fil ... le.data |
| yauzl.js:37:16:37:33 | req.query.filePath |
| yauzl.js:37:16:37:33 | req.query.filePath |
| yauzl.js:39:9:39:27 | zipfile.readEntry() |
| yauzl.js:39:9:39:27 | zipfile.readEntry() |
| yauzl.js:41:64:41:73 | readStream |
| yauzl.js:41:64:41:73 | readStream |
| yauzl.js:43:21:43:39 | zipfile.readEntry() |
| yauzl.js:43:21:43:39 | zipfile.readEntry() |
| zlib.js:15:19:15:27 | req.files |
| zlib.js:15:19:15:27 | req.files |
| zlib.js:15:19:15:39 | req.fil ... ombFile |
| zlib.js:15:19:15:44 | req.fil ... le.data |
| zlib.js:17:18:17:26 | req.files |
| zlib.js:17:18:17:26 | req.files |
| zlib.js:17:18:17:38 | req.fil ... ombFile |
| zlib.js:17:18:17:43 | req.fil ... le.data |
| zlib.js:19:24:19:32 | req.files |
| zlib.js:19:24:19:32 | req.files |
| zlib.js:19:24:19:44 | req.fil ... ombFile |
| zlib.js:19:24:19:49 | req.fil ... le.data |
| zlib.js:21:32:21:40 | req.files |
| zlib.js:21:32:21:40 | req.files |
| zlib.js:21:32:21:52 | req.fil ... ombFile |
| zlib.js:21:32:21:57 | req.fil ... le.data |
| zlib.js:27:24:27:30 | zipFile |
| zlib.js:29:9:29:15 | zipFile |
| zlib.js:29:9:29:20 | zipFile.data |
| zlib.js:29:9:29:20 | zipFile.data |
| zlib.js:33:9:33:15 | zipFile |
| zlib.js:33:9:33:20 | zipFile.data |
| zlib.js:33:9:33:20 | zipFile.data |
| zlib.js:38:9:38:15 | zipFile |
| zlib.js:38:9:38:20 | zipFile.data |
| zlib.js:38:9:38:20 | zipFile.data |
| zlib.js:62:23:62:29 | zipFile |
| zlib.js:63:21:63:27 | zipFile |
| zlib.js:63:21:63:32 | zipFile.data |
| zlib.js:63:21:63:32 | zipFile.data |
| zlib.js:64:20:64:26 | zipFile |
| zlib.js:64:20:64:31 | zipFile.data |
| zlib.js:64:20:64:31 | zipFile.data |
| zlib.js:65:31:65:37 | zipFile |
| zlib.js:65:31:65:42 | zipFile.data |
| zlib.js:65:31:65:42 | zipFile.data |
| zlib.js:74:29:74:35 | zipFile |
| zlib.js:75:25:75:51 | Readabl ... e.data) |
| zlib.js:75:39:75:45 | zipFile |
| zlib.js:75:39:75:50 | zipFile.data |
| zlib.js:77:22:77:40 | zlib.createGunzip() |
| zlib.js:77:22:77:40 | zlib.createGunzip() |
| zlib.js:78:22:78:39 | zlib.createUnzip() |
| zlib.js:78:22:78:39 | zlib.createUnzip() |
| zlib.js:79:22:79:50 | zlib.cr ... press() |
| zlib.js:79:22:79:50 | zlib.cr ... press() |
| zlib.js:82:43:82:49 | zipFile |
| zlib.js:83:11:83:51 | inputStream |
| zlib.js:83:25:83:51 | Readabl ... e.data) |
| zlib.js:83:39:83:45 | zipFile |
| zlib.js:83:39:83:50 | zipFile.data |
| zlib.js:86:9:86:19 | inputStream |
| zlib.js:87:9:87:27 | zlib.createGunzip() |
| zlib.js:87:9:87:27 | zlib.createGunzip() |
edges
| adm-zip.js:13:13:13:21 | req.files | adm-zip.js:13:13:13:33 | req.fil ... ombFile |
| adm-zip.js:13:13:13:21 | req.files | adm-zip.js:13:13:13:33 | req.fil ... ombFile |
| adm-zip.js:13:13:13:33 | req.fil ... ombFile | adm-zip.js:17:18:17:24 | tarFile |
| adm-zip.js:17:18:17:24 | tarFile | adm-zip.js:24:22:24:28 | tarFile |
| adm-zip.js:24:22:24:28 | tarFile | adm-zip.js:24:22:24:33 | tarFile.data |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:28:25:28:42 | zipEntry.getData() |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:28:25:28:42 | zipEntry.getData() |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:32:17:32:41 | admZip. ... "10GB") |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:32:17:32:41 | admZip. ... "10GB") |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:34:5:34:55 | admZip. ... , true) |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:34:5:34:55 | admZip. ... , true) |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:36:5:36:38 | admZip. ... , true) |
| adm-zip.js:24:22:24:33 | tarFile.data | adm-zip.js:36:5:36:38 | admZip. ... , true) |
| decompress.js:11:16:11:33 | req.query.filePath | decompress.js:11:16:11:33 | req.query.filePath |
| jszip.js:12:13:12:21 | req.files | jszip.js:12:13:12:33 | req.fil ... ombFile |
| jszip.js:12:13:12:21 | req.files | jszip.js:12:13:12:33 | req.fil ... ombFile |
| jszip.js:12:13:12:33 | req.fil ... ombFile | jszip.js:12:13:12:38 | req.fil ... le.data |
| jszip.js:12:13:12:38 | req.fil ... le.data | jszip.js:32:18:32:24 | zipFile |
| jszip.js:32:18:32:24 | zipFile | jszip.js:33:22:33:28 | zipFile |
| jszip.js:33:22:33:28 | zipFile | jszip.js:33:22:33:33 | zipFile.data |
| jszip.js:33:22:33:28 | zipFile | jszip.js:33:22:33:33 | zipFile.data |
| node-tar.js:15:13:15:21 | req.files | node-tar.js:15:13:15:33 | req.fil ... ombFile |
| node-tar.js:15:13:15:21 | req.files | node-tar.js:15:13:15:33 | req.fil ... ombFile |
| node-tar.js:15:13:15:33 | req.fil ... ombFile | node-tar.js:15:13:15:38 | req.fil ... le.data |
| node-tar.js:15:13:15:38 | req.fil ... le.data | node-tar.js:19:18:19:24 | tarFile |
| node-tar.js:19:18:19:24 | tarFile | node-tar.js:21:37:21:43 | tarFile |
| node-tar.js:19:18:19:24 | tarFile | node-tar.js:29:25:29:31 | tarFile |
| node-tar.js:19:18:19:24 | tarFile | node-tar.js:45:25:45:31 | tarFile |
| node-tar.js:19:18:19:24 | tarFile | node-tar.js:58:19:58:25 | tarFile |
| node-tar.js:19:18:19:24 | tarFile | node-tar.js:59:25:59:31 | tarFile |
| node-tar.js:21:23:21:49 | Readabl ... e.data) | node-tar.js:24:9:24:15 | tar.x() |
| node-tar.js:21:23:21:49 | Readabl ... e.data) | node-tar.js:24:9:24:15 | tar.x() |
| node-tar.js:21:37:21:43 | tarFile | node-tar.js:21:37:21:48 | tarFile.data |
| node-tar.js:21:37:21:48 | tarFile.data | node-tar.js:21:23:21:49 | Readabl ... e.data) |
| node-tar.js:29:5:29:37 | fs.crea ... e.name) | node-tar.js:30:9:33:10 | tar.x({ ... }) |
| node-tar.js:29:5:29:37 | fs.crea ... e.name) | node-tar.js:30:9:33:10 | tar.x({ ... }) |
| node-tar.js:29:25:29:31 | tarFile | node-tar.js:29:25:29:36 | tarFile.name |
| node-tar.js:29:25:29:36 | tarFile.name | node-tar.js:29:5:29:37 | fs.crea ... e.name) |
| node-tar.js:45:5:45:37 | fs.crea ... e.name) | node-tar.js:46:9:46:20 | decompressor |
| node-tar.js:45:25:45:31 | tarFile | node-tar.js:45:25:45:36 | tarFile.name |
| node-tar.js:45:25:45:36 | tarFile.name | node-tar.js:45:5:45:37 | fs.crea ... e.name) |
| node-tar.js:46:9:46:20 | decompressor | node-tar.js:48:9:50:10 | tar.x({ ... }) |
| node-tar.js:46:9:46:20 | decompressor | node-tar.js:48:9:50:10 | tar.x({ ... }) |
| node-tar.js:58:19:58:25 | tarFile | node-tar.js:58:19:58:30 | tarFile.name |
| node-tar.js:58:19:58:25 | tarFile | node-tar.js:58:19:58:30 | tarFile.name |
| node-tar.js:59:25:59:31 | tarFile | node-tar.js:59:25:59:36 | tarFile.name |
| node-tar.js:59:25:59:31 | tarFile | node-tar.js:59:25:59:36 | tarFile.name |
| pako.js:12:14:12:22 | req.files | pako.js:12:14:12:34 | req.fil ... ombFile |
| pako.js:12:14:12:22 | req.files | pako.js:12:14:12:34 | req.fil ... ombFile |
| pako.js:12:14:12:34 | req.fil ... ombFile | pako.js:12:14:12:39 | req.fil ... le.data |
| pako.js:12:14:12:39 | req.fil ... le.data | pako.js:17:19:17:25 | zipFile |
| pako.js:13:14:13:22 | req.files | pako.js:13:14:13:34 | req.fil ... ombFile |
| pako.js:13:14:13:22 | req.files | pako.js:13:14:13:34 | req.fil ... ombFile |
| pako.js:13:14:13:34 | req.fil ... ombFile | pako.js:13:14:13:39 | req.fil ... le.data |
| pako.js:13:14:13:39 | req.fil ... le.data | pako.js:28:19:28:25 | zipFile |
| pako.js:17:19:17:25 | zipFile | pako.js:18:48:18:54 | zipFile |
| pako.js:18:11:18:68 | myArray | pako.js:21:31:21:37 | myArray |
| pako.js:18:11:18:68 | myArray | pako.js:21:31:21:37 | myArray |
| pako.js:18:21:18:68 | Buffer. ... uffer)) | pako.js:18:11:18:68 | myArray |
| pako.js:18:33:18:67 | new Uin ... buffer) | pako.js:18:21:18:68 | Buffer. ... uffer)) |
| pako.js:18:48:18:54 | zipFile | pako.js:18:48:18:59 | zipFile.data |
| pako.js:18:48:18:59 | zipFile.data | pako.js:18:48:18:66 | zipFile.data.buffer |
| pako.js:18:48:18:66 | zipFile.data.buffer | pako.js:18:33:18:67 | new Uin ... buffer) |
| pako.js:28:19:28:25 | zipFile | pako.js:29:36:29:42 | zipFile |
| pako.js:29:11:29:62 | myArray | pako.js:32:31:32:37 | myArray |
| pako.js:29:11:29:62 | myArray | pako.js:32:31:32:37 | myArray |
| pako.js:29:21:29:55 | new Uin ... buffer) | pako.js:29:21:29:62 | new Uin ... .buffer |
| pako.js:29:21:29:62 | new Uin ... .buffer | pako.js:29:11:29:62 | myArray |
| pako.js:29:36:29:42 | zipFile | pako.js:29:36:29:47 | zipFile.data |
| pako.js:29:36:29:47 | zipFile.data | pako.js:29:36:29:54 | zipFile.data.buffer |
| pako.js:29:36:29:54 | zipFile.data.buffer | pako.js:29:21:29:55 | new Uin ... buffer) |
| unbzip2.js:12:5:12:43 | fs.crea ... lePath) | unbzip2.js:12:50:12:54 | bz2() |
| unbzip2.js:12:5:12:43 | fs.crea ... lePath) | unbzip2.js:12:50:12:54 | bz2() |
| unbzip2.js:12:25:12:42 | req.query.FilePath | unbzip2.js:12:5:12:43 | fs.crea ... lePath) |
| unbzip2.js:12:25:12:42 | req.query.FilePath | unbzip2.js:12:5:12:43 | fs.crea ... lePath) |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:16:23:16:63 | unzippe ... ath' }) |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:16:23:16:63 | unzippe ... ath' }) |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:19:23:19:41 | unzipper.ParseOne() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:19:23:19:41 | unzipper.ParseOne() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:24:15:24:30 | unzipper.Parse() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:24:15:24:30 | unzipper.Parse() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:34:15:34:30 | unzipper.Parse() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:34:15:34:30 | unzipper.Parse() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:41:35:41:71 | unzippe ... true }) |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:41:35:41:71 | unzippe ... true }) |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:51:36:51:72 | unzippe ... true }) |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:51:36:51:72 | unzippe ... true }) |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:60:23:60:38 | unzipper.Parse() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:60:23:60:38 | unzipper.Parse() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:73:23:73:38 | unzipper.Parse() |
| unzipper.js:13:26:13:62 | Readabl ... e.data) | unzipper.js:73:23:73:38 | unzipper.Parse() |
| unzipper.js:13:40:13:48 | req.files | unzipper.js:13:40:13:56 | req.files.ZipFile |
| unzipper.js:13:40:13:48 | req.files | unzipper.js:13:40:13:56 | req.files.ZipFile |
| unzipper.js:13:40:13:56 | req.files.ZipFile | unzipper.js:13:40:13:61 | req.fil ... le.data |
| unzipper.js:13:40:13:61 | req.fil ... le.data | unzipper.js:13:26:13:62 | Readabl ... e.data) |
| yauzl.js:12:18:12:26 | req.files | yauzl.js:12:18:12:34 | req.files.zipFile |
| yauzl.js:12:18:12:26 | req.files | yauzl.js:12:18:12:34 | req.files.zipFile |
| yauzl.js:12:18:12:34 | req.files.zipFile | yauzl.js:12:18:12:39 | req.fil ... le.data |
| yauzl.js:12:18:12:34 | req.files.zipFile | yauzl.js:12:18:12:39 | req.fil ... le.data |
| yauzl.js:13:22:13:30 | req.files | yauzl.js:13:22:13:38 | req.files.zipFile |
| yauzl.js:13:22:13:30 | req.files | yauzl.js:13:22:13:38 | req.files.zipFile |
| yauzl.js:13:22:13:38 | req.files.zipFile | yauzl.js:13:22:13:43 | req.fil ... le.data |
| yauzl.js:13:22:13:38 | req.files.zipFile | yauzl.js:13:22:13:43 | req.fil ... le.data |
| yauzl.js:14:34:14:42 | req.files | yauzl.js:14:34:14:50 | req.files.zipFile |
| yauzl.js:14:34:14:42 | req.files | yauzl.js:14:34:14:50 | req.files.zipFile |
| yauzl.js:14:34:14:50 | req.files.zipFile | yauzl.js:14:34:14:55 | req.fil ... le.data |
| yauzl.js:14:34:14:50 | req.files.zipFile | yauzl.js:14:34:14:55 | req.fil ... le.data |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:39:9:39:27 | zipfile.readEntry() |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:39:9:39:27 | zipfile.readEntry() |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:39:9:39:27 | zipfile.readEntry() |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:39:9:39:27 | zipfile.readEntry() |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:41:64:41:73 | readStream |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:41:64:41:73 | readStream |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:41:64:41:73 | readStream |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:41:64:41:73 | readStream |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:43:21:43:39 | zipfile.readEntry() |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:43:21:43:39 | zipfile.readEntry() |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:43:21:43:39 | zipfile.readEntry() |
| yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:43:21:43:39 | zipfile.readEntry() |
| zlib.js:15:19:15:27 | req.files | zlib.js:15:19:15:39 | req.fil ... ombFile |
| zlib.js:15:19:15:27 | req.files | zlib.js:15:19:15:39 | req.fil ... ombFile |
| zlib.js:15:19:15:39 | req.fil ... ombFile | zlib.js:15:19:15:44 | req.fil ... le.data |
| zlib.js:15:19:15:44 | req.fil ... le.data | zlib.js:27:24:27:30 | zipFile |
| zlib.js:17:18:17:26 | req.files | zlib.js:17:18:17:38 | req.fil ... ombFile |
| zlib.js:17:18:17:26 | req.files | zlib.js:17:18:17:38 | req.fil ... ombFile |
| zlib.js:17:18:17:38 | req.fil ... ombFile | zlib.js:17:18:17:43 | req.fil ... le.data |
| zlib.js:17:18:17:43 | req.fil ... le.data | zlib.js:62:23:62:29 | zipFile |
| zlib.js:19:24:19:32 | req.files | zlib.js:19:24:19:44 | req.fil ... ombFile |
| zlib.js:19:24:19:32 | req.files | zlib.js:19:24:19:44 | req.fil ... ombFile |
| zlib.js:19:24:19:44 | req.fil ... ombFile | zlib.js:19:24:19:49 | req.fil ... le.data |
| zlib.js:19:24:19:49 | req.fil ... le.data | zlib.js:74:29:74:35 | zipFile |
| zlib.js:21:32:21:40 | req.files | zlib.js:21:32:21:52 | req.fil ... ombFile |
| zlib.js:21:32:21:40 | req.files | zlib.js:21:32:21:52 | req.fil ... ombFile |
| zlib.js:21:32:21:52 | req.fil ... ombFile | zlib.js:21:32:21:57 | req.fil ... le.data |
| zlib.js:21:32:21:57 | req.fil ... le.data | zlib.js:82:43:82:49 | zipFile |
| zlib.js:27:24:27:30 | zipFile | zlib.js:29:9:29:15 | zipFile |
| zlib.js:27:24:27:30 | zipFile | zlib.js:33:9:33:15 | zipFile |
| zlib.js:27:24:27:30 | zipFile | zlib.js:38:9:38:15 | zipFile |
| zlib.js:29:9:29:15 | zipFile | zlib.js:29:9:29:20 | zipFile.data |
| zlib.js:29:9:29:15 | zipFile | zlib.js:29:9:29:20 | zipFile.data |
| zlib.js:33:9:33:15 | zipFile | zlib.js:33:9:33:20 | zipFile.data |
| zlib.js:33:9:33:15 | zipFile | zlib.js:33:9:33:20 | zipFile.data |
| zlib.js:38:9:38:15 | zipFile | zlib.js:38:9:38:20 | zipFile.data |
| zlib.js:38:9:38:15 | zipFile | zlib.js:38:9:38:20 | zipFile.data |
| zlib.js:62:23:62:29 | zipFile | zlib.js:63:21:63:27 | zipFile |
| zlib.js:62:23:62:29 | zipFile | zlib.js:64:20:64:26 | zipFile |
| zlib.js:62:23:62:29 | zipFile | zlib.js:65:31:65:37 | zipFile |
| zlib.js:63:21:63:27 | zipFile | zlib.js:63:21:63:32 | zipFile.data |
| zlib.js:63:21:63:27 | zipFile | zlib.js:63:21:63:32 | zipFile.data |
| zlib.js:64:20:64:26 | zipFile | zlib.js:64:20:64:31 | zipFile.data |
| zlib.js:64:20:64:26 | zipFile | zlib.js:64:20:64:31 | zipFile.data |
| zlib.js:65:31:65:37 | zipFile | zlib.js:65:31:65:42 | zipFile.data |
| zlib.js:65:31:65:37 | zipFile | zlib.js:65:31:65:42 | zipFile.data |
| zlib.js:74:29:74:35 | zipFile | zlib.js:75:39:75:45 | zipFile |
| zlib.js:75:25:75:51 | Readabl ... e.data) | zlib.js:77:22:77:40 | zlib.createGunzip() |
| zlib.js:75:25:75:51 | Readabl ... e.data) | zlib.js:77:22:77:40 | zlib.createGunzip() |
| zlib.js:75:25:75:51 | Readabl ... e.data) | zlib.js:78:22:78:39 | zlib.createUnzip() |
| zlib.js:75:25:75:51 | Readabl ... e.data) | zlib.js:78:22:78:39 | zlib.createUnzip() |
| zlib.js:75:25:75:51 | Readabl ... e.data) | zlib.js:79:22:79:50 | zlib.cr ... press() |
| zlib.js:75:25:75:51 | Readabl ... e.data) | zlib.js:79:22:79:50 | zlib.cr ... press() |
| zlib.js:75:39:75:45 | zipFile | zlib.js:75:39:75:50 | zipFile.data |
| zlib.js:75:39:75:50 | zipFile.data | zlib.js:75:25:75:51 | Readabl ... e.data) |
| zlib.js:82:43:82:49 | zipFile | zlib.js:83:39:83:45 | zipFile |
| zlib.js:83:11:83:51 | inputStream | zlib.js:86:9:86:19 | inputStream |
| zlib.js:83:25:83:51 | Readabl ... e.data) | zlib.js:83:11:83:51 | inputStream |
| zlib.js:83:39:83:45 | zipFile | zlib.js:83:39:83:50 | zipFile.data |
| zlib.js:83:39:83:50 | zipFile.data | zlib.js:83:25:83:51 | Readabl ... e.data) |
| zlib.js:86:9:86:19 | inputStream | zlib.js:87:9:87:27 | zlib.createGunzip() |
| zlib.js:86:9:86:19 | inputStream | zlib.js:87:9:87:27 | zlib.createGunzip() |
#select
| adm-zip.js:28:25:28:42 | zipEntry.getData() | adm-zip.js:13:13:13:21 | req.files | adm-zip.js:28:25:28:42 | zipEntry.getData() | This Decompression depends on a $@. | adm-zip.js:13:13:13:21 | req.files | potentially untrusted source |
| adm-zip.js:32:17:32:41 | admZip. ... "10GB") | adm-zip.js:13:13:13:21 | req.files | adm-zip.js:32:17:32:41 | admZip. ... "10GB") | This Decompression depends on a $@. | adm-zip.js:13:13:13:21 | req.files | potentially untrusted source |
| adm-zip.js:34:5:34:55 | admZip. ... , true) | adm-zip.js:13:13:13:21 | req.files | adm-zip.js:34:5:34:55 | admZip. ... , true) | This Decompression depends on a $@. | adm-zip.js:13:13:13:21 | req.files | potentially untrusted source |
| adm-zip.js:36:5:36:38 | admZip. ... , true) | adm-zip.js:13:13:13:21 | req.files | adm-zip.js:36:5:36:38 | admZip. ... , true) | This Decompression depends on a $@. | adm-zip.js:13:13:13:21 | req.files | potentially untrusted source |
| decompress.js:11:16:11:33 | req.query.filePath | decompress.js:11:16:11:33 | req.query.filePath | decompress.js:11:16:11:33 | req.query.filePath | This Decompression depends on a $@. | decompress.js:11:16:11:33 | req.query.filePath | potentially untrusted source |
| jszip.js:33:22:33:33 | zipFile.data | jszip.js:12:13:12:21 | req.files | jszip.js:33:22:33:33 | zipFile.data | This Decompression depends on a $@. | jszip.js:12:13:12:21 | req.files | potentially untrusted source |
| node-tar.js:24:9:24:15 | tar.x() | node-tar.js:15:13:15:21 | req.files | node-tar.js:24:9:24:15 | tar.x() | This Decompression depends on a $@. | node-tar.js:15:13:15:21 | req.files | potentially untrusted source |
| node-tar.js:30:9:33:10 | tar.x({ ... }) | node-tar.js:15:13:15:21 | req.files | node-tar.js:30:9:33:10 | tar.x({ ... }) | This Decompression depends on a $@. | node-tar.js:15:13:15:21 | req.files | potentially untrusted source |
| node-tar.js:48:9:50:10 | tar.x({ ... }) | node-tar.js:15:13:15:21 | req.files | node-tar.js:48:9:50:10 | tar.x({ ... }) | This Decompression depends on a $@. | node-tar.js:15:13:15:21 | req.files | potentially untrusted source |
| node-tar.js:58:19:58:30 | tarFile.name | node-tar.js:15:13:15:21 | req.files | node-tar.js:58:19:58:30 | tarFile.name | This Decompression depends on a $@. | node-tar.js:15:13:15:21 | req.files | potentially untrusted source |
| node-tar.js:59:25:59:36 | tarFile.name | node-tar.js:15:13:15:21 | req.files | node-tar.js:59:25:59:36 | tarFile.name | This Decompression depends on a $@. | node-tar.js:15:13:15:21 | req.files | potentially untrusted source |
| pako.js:21:31:21:37 | myArray | pako.js:12:14:12:22 | req.files | pako.js:21:31:21:37 | myArray | This Decompression depends on a $@. | pako.js:12:14:12:22 | req.files | potentially untrusted source |
| pako.js:32:31:32:37 | myArray | pako.js:13:14:13:22 | req.files | pako.js:32:31:32:37 | myArray | This Decompression depends on a $@. | pako.js:13:14:13:22 | req.files | potentially untrusted source |
| unbzip2.js:12:50:12:54 | bz2() | unbzip2.js:12:25:12:42 | req.query.FilePath | unbzip2.js:12:50:12:54 | bz2() | This Decompression depends on a $@. | unbzip2.js:12:25:12:42 | req.query.FilePath | potentially untrusted source |
| unzipper.js:16:23:16:63 | unzippe ... ath' }) | unzipper.js:13:40:13:48 | req.files | unzipper.js:16:23:16:63 | unzippe ... ath' }) | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| unzipper.js:19:23:19:41 | unzipper.ParseOne() | unzipper.js:13:40:13:48 | req.files | unzipper.js:19:23:19:41 | unzipper.ParseOne() | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| unzipper.js:24:15:24:30 | unzipper.Parse() | unzipper.js:13:40:13:48 | req.files | unzipper.js:24:15:24:30 | unzipper.Parse() | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| unzipper.js:34:15:34:30 | unzipper.Parse() | unzipper.js:13:40:13:48 | req.files | unzipper.js:34:15:34:30 | unzipper.Parse() | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| unzipper.js:41:35:41:71 | unzippe ... true }) | unzipper.js:13:40:13:48 | req.files | unzipper.js:41:35:41:71 | unzippe ... true }) | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| unzipper.js:51:36:51:72 | unzippe ... true }) | unzipper.js:13:40:13:48 | req.files | unzipper.js:51:36:51:72 | unzippe ... true }) | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| unzipper.js:60:23:60:38 | unzipper.Parse() | unzipper.js:13:40:13:48 | req.files | unzipper.js:60:23:60:38 | unzipper.Parse() | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| unzipper.js:73:23:73:38 | unzipper.Parse() | unzipper.js:13:40:13:48 | req.files | unzipper.js:73:23:73:38 | unzipper.Parse() | This Decompression depends on a $@. | unzipper.js:13:40:13:48 | req.files | potentially untrusted source |
| yauzl.js:12:18:12:39 | req.fil ... le.data | yauzl.js:12:18:12:26 | req.files | yauzl.js:12:18:12:39 | req.fil ... le.data | This Decompression depends on a $@. | yauzl.js:12:18:12:26 | req.files | potentially untrusted source |
| yauzl.js:13:22:13:43 | req.fil ... le.data | yauzl.js:13:22:13:30 | req.files | yauzl.js:13:22:13:43 | req.fil ... le.data | This Decompression depends on a $@. | yauzl.js:13:22:13:30 | req.files | potentially untrusted source |
| yauzl.js:14:34:14:55 | req.fil ... le.data | yauzl.js:14:34:14:42 | req.files | yauzl.js:14:34:14:55 | req.fil ... le.data | This Decompression depends on a $@. | yauzl.js:14:34:14:42 | req.files | potentially untrusted source |
| yauzl.js:39:9:39:27 | zipfile.readEntry() | yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:39:9:39:27 | zipfile.readEntry() | This Decompression depends on a $@. | yauzl.js:37:16:37:33 | req.query.filePath | potentially untrusted source |
| yauzl.js:41:64:41:73 | readStream | yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:41:64:41:73 | readStream | This Decompression depends on a $@. | yauzl.js:37:16:37:33 | req.query.filePath | potentially untrusted source |
| yauzl.js:43:21:43:39 | zipfile.readEntry() | yauzl.js:37:16:37:33 | req.query.filePath | yauzl.js:43:21:43:39 | zipfile.readEntry() | This Decompression depends on a $@. | yauzl.js:37:16:37:33 | req.query.filePath | potentially untrusted source |
| zlib.js:29:9:29:20 | zipFile.data | zlib.js:15:19:15:27 | req.files | zlib.js:29:9:29:20 | zipFile.data | This Decompression depends on a $@. | zlib.js:15:19:15:27 | req.files | potentially untrusted source |
| zlib.js:33:9:33:20 | zipFile.data | zlib.js:15:19:15:27 | req.files | zlib.js:33:9:33:20 | zipFile.data | This Decompression depends on a $@. | zlib.js:15:19:15:27 | req.files | potentially untrusted source |
| zlib.js:38:9:38:20 | zipFile.data | zlib.js:15:19:15:27 | req.files | zlib.js:38:9:38:20 | zipFile.data | This Decompression depends on a $@. | zlib.js:15:19:15:27 | req.files | potentially untrusted source |
| zlib.js:63:21:63:32 | zipFile.data | zlib.js:17:18:17:26 | req.files | zlib.js:63:21:63:32 | zipFile.data | This Decompression depends on a $@. | zlib.js:17:18:17:26 | req.files | potentially untrusted source |
| zlib.js:64:20:64:31 | zipFile.data | zlib.js:17:18:17:26 | req.files | zlib.js:64:20:64:31 | zipFile.data | This Decompression depends on a $@. | zlib.js:17:18:17:26 | req.files | potentially untrusted source |
| zlib.js:65:31:65:42 | zipFile.data | zlib.js:17:18:17:26 | req.files | zlib.js:65:31:65:42 | zipFile.data | This Decompression depends on a $@. | zlib.js:17:18:17:26 | req.files | potentially untrusted source |
| zlib.js:77:22:77:40 | zlib.createGunzip() | zlib.js:19:24:19:32 | req.files | zlib.js:77:22:77:40 | zlib.createGunzip() | This Decompression depends on a $@. | zlib.js:19:24:19:32 | req.files | potentially untrusted source |
| zlib.js:78:22:78:39 | zlib.createUnzip() | zlib.js:19:24:19:32 | req.files | zlib.js:78:22:78:39 | zlib.createUnzip() | This Decompression depends on a $@. | zlib.js:19:24:19:32 | req.files | potentially untrusted source |
| zlib.js:79:22:79:50 | zlib.cr ... press() | zlib.js:19:24:19:32 | req.files | zlib.js:79:22:79:50 | zlib.cr ... press() | This Decompression depends on a $@. | zlib.js:19:24:19:32 | req.files | potentially untrusted source |
| zlib.js:87:9:87:27 | zlib.createGunzip() | zlib.js:21:32:21:40 | req.files | zlib.js:87:9:87:27 | zlib.createGunzip() | This Decompression depends on a $@. | zlib.js:21:32:21:40 | req.files | potentially untrusted source |

View File

@@ -0,0 +1 @@
experimental/Security/CWE-522-DecompressionBombs/DecompressionBombs.ql

View File

@@ -0,0 +1,37 @@
const AdmZip = require("adm-zip");
const express = require('express')
const fileUpload = require("express-fileupload");
const fs = require("fs");
const app = express();
const port = 3000;
app.use(fileUpload());
app.listen(port, () => {
console.log(`Example app listening on port ${port}`)
});
app.post('/upload', (req, res) => {
zipBomb(req.files.zipBombFile)
res.send('Hello World!')
});
function zipBomb(tarFile) {
fs.writeFileSync(tarFile.name, tarFile.data);
// or using fs.writeFile
// file path is a tmp file name that can get from DB after saving to DB with remote file upload
// so the input file name will come from a DB source
const admZip
= new AdmZip(tarFile.data);
const zipEntries = admZip.getEntries();
zipEntries.forEach(function (zipEntry) {
if (zipEntry.entryName === "my_file.txt") {
console.log(zipEntry.getData().toString("utf8"));
}
});
// outputs the content of file named 10GB
console.log(admZip.readAsText("10GB"));
// extracts the specified file to the specified location
admZip.extractEntryTo("10GB", "/tmp/", false, true);
// extracts everything
admZip.extractAllTo("./tmp", true);
}

View File

@@ -0,0 +1,16 @@
const decompress = require('decompress');
const express = require('express')
const fileUpload = require("express-fileupload");
const app = express();
app.use(fileUpload());
app.listen(3000, () => {
});
app.post('/upload', async (req, res) => {
decompress(req.query.filePath, 'dist').then(files => {
console.log('done!');
});
res.send("OK")
});

View File

@@ -0,0 +1,63 @@
const fflate = require('fflate');
const express = require('express')
const fileUpload = require("express-fileupload");
const { writeFileSync } = require("fs");
const app = express();
app.use(fileUpload());
app.listen(3000, () => {
});
app.post('/upload', async (req, res) => {
// NOT OK
fflate.unzlibSync(new Uint8Array(req.files.CompressedFile.data));
fflate.unzip(new Uint8Array(new Uint8Array(req.files.CompressedFile.data)));
fflate.unzlib(new Uint8Array(req.files.CompressedFile.data));
fflate.unzlibSync(new Uint8Array(req.files.CompressedFile.data));
fflate.gunzip(new Uint8Array(req.files.CompressedFile.data));
fflate.gunzipSync(new Uint8Array(req.files.CompressedFile.data));
fflate.decompress(new Uint8Array(req.files.CompressedFile.data));
fflate.decompressSync(new Uint8Array(req.files.CompressedFile.data));
// OK
fflate.unzlibSync(new Uint8Array(req.files.CompressedFile.data), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
fflate.unzip(new Uint8Array(new Uint8Array(req.files.CompressedFile.data)), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
fflate.unzlib(new Uint8Array(req.files.CompressedFile.data), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
fflate.unzlibSync(new Uint8Array(req.files.CompressedFile.data), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
fflate.gunzip(new Uint8Array(req.files.CompressedFile.data), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
fflate.gunzipSync(new Uint8Array(req.files.CompressedFile.data), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
fflate.decompress(new Uint8Array(req.files.CompressedFile.data), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
fflate.decompressSync(new Uint8Array(req.files.CompressedFile.data), {
filter(file) {
return file.originalSize <= 1_000_000;
}
});
});

View File

@@ -0,0 +1,14 @@
const gunzipmaybe = require("gunzip-maybe");
const express = require('express')
const fileUpload = require("express-fileupload");
const { Readable } = require('stream');
const app = express();
app.use(fileUpload());
app.listen(3000, () => {
});
app.post('/upload', async (req, res) => {
// Unsafe
const RemoteStream = Readable.from(req.files.ZipFile.data);
RemoteStream.pipe(gunzipmaybe).createWriteStream("tmp")
});

View File

@@ -0,0 +1,44 @@
const jszipp = require("jszip");
const express = require('express')
const fileUpload = require("express-fileupload");
const app = express();
const port = 3000;
app.use(fileUpload());
app.listen(port, () => {
console.log(`Example app listening on port ${port}`)
});
app.post('/upload', (req, res) => {
zipBomb(req.files.zipBombFile.data)
zipBombSafe(req.files.zipBombFile.data)
res.send("OK")
});
function zipBombSafe(zipFile) {
jszipp.loadAsync(zipFile.data).then(function (zip) {
if (zip.file("10GB")["_data"]["uncompressedSize"] > 1024 * 1024 * 8) {
console.log("error")
return
}
zip.files["10GB"].async("uint8array").then(function (u8) {
console.log(u8);
});
zip.file("10GB").async("uint8array").then(function (u8) {
console.log(u8);
});
});
}
function zipBomb(zipFile) {
jszipp.loadAsync(zipFile.data).then(function (zip) {
zip.files["10GB"].async("uint8array").then(function (u8) {
console.log(u8);
});
zip.file("10GB").async("uint8array").then(function (u8) {
console.log(u8);
});
});
}
module.exports = { localZipLoad };

View File

@@ -0,0 +1,67 @@
const tar = require("tar");
const express = require('express')
const fileUpload = require("express-fileupload");
const { Readable, writeFileSync } = require("stream");
const fs = require("fs");
const { createGunzip } = require("zlib");
const app = express();
const port = 3000;
app.use(fileUpload());
app.listen(port, () => {
console.log(`Example app listening on port ${port}`)
});
app.post('/upload', (req, res) => {
zipBomb(req.files.zipBombFile.data)
res.send('Hello World!')
});
function zipBomb(tarFile) {
// scenario 1
const inputFile = Readable.from(tarFile.data);
const outputFile = fs.createWriteStream('/tmp/untar');
inputFile.pipe(
tar.x()
).pipe(outputFile);
// scenario 2
fs.writeFileSync(tarFile.name, tarFile.data);
fs.createReadStream(tarFile.name).pipe(
tar.x({
strip: 1,
C: 'some-dir'
})
)
// safe https://github.com/isaacs/node-tar/blob/8c5af15e43a769fd24aa7f1c84d93e54824d19d2/lib/list.js#L90
fs.createReadStream(tarFile.name).pipe(
tar.x({
strip: 1,
C: 'some-dir',
maxReadSize: 16 * 1024 * 1024 // 16 MB
})
)
// scenario 3
const decompressor = createGunzip();
fs.createReadStream(tarFile.name).pipe(
decompressor
).pipe(
tar.x({
cwd: "dest"
})
)
// scenario 4
fs.writeFileSync(tarFile.name, tarFile.data);
// or using fs.writeFile
// file path is a tmp file name that can get from DB after saving to DB with remote file upload
// so the input file name will come from a DB source
tar.x({ file: tarFile.name })
tar.extract({ file: tarFile.name })
// safe https://github.com/isaacs/node-tar/blob/8c5af15e43a769fd24aa7f1c84d93e54824d19d2/lib/list.js#L90
tar.x({
file: tarFile.name,
strip: 1,
C: 'some-dir',
maxReadSize: 16 * 1024 * 1024 // 16 MB
})
}

View File

@@ -0,0 +1,37 @@
const pako = require('pako');
const express = require('express')
const fileUpload = require("express-fileupload");
const app = express();
const port = 3000;
app.use(fileUpload());
app.listen(port, () => {
console.log(`Example app listening on port ${port}`)
});
app.post('/upload', (req, res) => {
zipBomb1(req.files.zipBombFile.data);
zipBomb2(req.files.zipBombFile.data);
res.send('Hello World!');
});
function zipBomb1(zipFile) {
const myArray = Buffer.from(new Uint8Array(zipFile.data.buffer));
let output;
try {
output = pako.inflate(myArray);
console.log(output);
} catch (err) {
console.log(err);
}
}
function zipBomb2(zipFile) {
const myArray = new Uint8Array(zipFile.data.buffer).buffer;
let output;
try {
output = pako.inflate(myArray);
console.log(output);
} catch (err) {
console.log(err);
}
}

View File

@@ -0,0 +1,13 @@
var bz2 = require('unbzip2-stream');
var fs = require('fs');
const express = require('express')
const fileUpload = require("express-fileupload");
const app = express();
app.use(fileUpload());
app.listen(3000, () => {
});
app.post('/upload', async (req, res) => {
fs.createReadStream(req.query.FilePath).pipe(bz2()).pipe(process.stdout);
});

View File

@@ -0,0 +1,26 @@
const unzip = require("unzip");
const { createWriteStream } = require("fs");
const express = require('express')
const fileUpload = require("express-fileupload");
const { Readable } = require("stream");
const app = express();
app.use(fileUpload());
app.listen(3000, () => {
});
app.post('/upload', async (req, res) => {
const InputStream = Readable.from(req.files.ZipFile.data);
InputStream.pipe(unzip.Parse())
.on('entry', function (entry) {
if (entry.uncompressedSize > 1024) {
throw "uncompressed size exceed"
}
});
let writeStream = createWriteStream('output/path');
InputStream
.pipe(unzip.Parse())
.pipe(writeStream)
});

View File

@@ -0,0 +1,106 @@
const unzipper = require("unzipper");
const express = require('express')
const fileUpload = require("express-fileupload");
const { Readable } = require('stream');
const { createWriteStream, readFileSync } = require("fs");
const stream = require("node:stream");
const app = express();
app.use(fileUpload());
app.listen(3000, () => {
});
app.post('/upload', async (req, res) => {
const RemoteStream = Readable.from(req.files.ZipFile.data);
// Unsafe
RemoteStream.pipe(unzipper.Extract({ path: 'output/path' }));
// Unsafe
RemoteStream.pipe(unzipper.ParseOne())
.pipe(createWriteStream('firstFile.txt'));
// Safe because of uncompressedSize
RemoteStream
.pipe(unzipper.Parse())
.on('entry', function (entry) {
const size = entry.vars.uncompressedSize;
if (size < 1024 * 1024 * 1024) {
entry.pipe(createWriteStream('output/path'));
}
});
// Unsafe
RemoteStream
.pipe(unzipper.Parse())
.on('entry', function (entry) {
const size = entry.vars.uncompressedSize;
entry.pipe(createWriteStream('output/path'));
});
// Unsafe
const zip = RemoteStream.pipe(unzipper.Parse({ forceStream: true }));
for await (const entry of zip) {
const fileName = entry.path;
if (fileName === "this IS the file I'm looking for") {
entry.pipe(createWriteStream('output/path'));
} else {
entry.autodrain();
}
}
// Safe
const zip2 = RemoteStream.pipe(unzipper.Parse({ forceStream: true }));
for await (const entry of zip2) {
const size = entry.vars.uncompressedSize;
if (size < 1024 * 1024 * 1024) {
entry.pipe(createWriteStream('output/path'));
}
}
// Safe because of uncompressedSize
RemoteStream.pipe(unzipper.Parse())
.pipe(stream.Transform({
objectMode: true,
transform: function (entry, e, cb) {
const size = entry.vars.uncompressedSize; // There is also compressedSize;
if (size < 1024 * 1024 * 1024) {
entry.pipe(createWriteStream('output/path'))
.on('finish', cb);
}
}
}));
// Unsafe
RemoteStream.pipe(unzipper.Parse())
.pipe(stream.Transform({
objectMode: true,
transform: function (entry, e, cb) {
entry.pipe(createWriteStream('output/path'))
.on('finish', cb);
}
}));
let directory = await unzipper.Open.file('path/to/archive.zip');
new Promise((resolve, reject) => {
directory.files[0]
.stream()
.pipe(fs.createWriteStream('firstFile'))
.on('error', reject)
.on('finish', resolve)
});
const request = require('request');
// Unsafe
directory = await unzipper.Open.url(request, 'http://example.com/example.zip');
const file = directory.files.find(d => d.path === 'example.xml');
await file.buffer();
// Unsafe
const buffer = readFileSync(request.query.FilePath);
directory = await unzipper.Open.buffer(buffer);
directory.files[0].buffer();
// Unsafe
unzipper.Open.file(request.query.FilePath)
.then(d => d.extract({ path: '/extraction/path', concurrency: 5 }));
});

View File

@@ -0,0 +1,54 @@
const { pipeline } = require('stream/promises');
const yauzl = require("yauzl");
const fs = require("fs");
const express = require('express')
const fileUpload = require("express-fileupload");
const app = express();
app.use(fileUpload());
app.listen(3000, () => {
});
app.post('/upload', (req, res) => {
yauzl.fromFd(req.files.zipFile.data)
yauzl.fromBuffer(req.files.zipFile.data)
yauzl.fromRandomAccessReader(req.files.zipFile.data)
// Safe
yauzl.open(req.query.filePath, { lazyEntries: true }, function (err, zipfile) {
if (err) throw err;
zipfile.readEntry();
zipfile.on("entry", function (entry) {
zipfile.openReadStream(entry, async function (err, readStream) {
if (err) throw err;
if (entry.uncompressedSize > 1024 * 1024 * 1024) {
throw err
}
readStream.on("end", function () {
zipfile.readEntry();
});
const outputFile = fs.createWriteStream('testiness');
await pipeline(
readStream,
outputFile
)
});
});
});
// Unsafe
yauzl.open(req.query.filePath, { lazyEntries: true }, function (err, zipfile) {
if (err) throw err;
zipfile.readEntry();
zipfile.on("entry", function (entry) {
zipfile.openReadStream(entry, async function (err, readStream) {
readStream.on("end", function () {
zipfile.readEntry();
});
const outputFile = fs.createWriteStream('testiness');
await pipeline(
readStream,
outputFile
)
});
});
});
res.send("OK")
});

View File

@@ -0,0 +1,98 @@
const fs = require("fs");
const zlib = require("node:zlib");
const { Readable } = require('stream');
const express = require('express');
const fileUpload = require("express-fileupload");
const app = express();
const port = 3000;
const stream = require('stream/promises');
app.use(fileUpload());
app.listen(port, () => {
console.log(`Example app listening on port ${port}`)
});
app.post('/upload', async (req, res) => {
zlibBombAsync(req.files.zipBombFile.data)
zlibBombAsyncSafe(req.files.zipBombFile.data);
zlibBombSync(req.files.zipBombFile.data)
zlibBombSyncSafe(req.files.zipBombFile.data)
zlibBombPipeStream(req.files.zipBombFile.data)
zlibBombPipeStreamSafe(req.files.zipBombFile.data)
zlibBombPipeStreamPromises(req.files.zipBombFile.data).then(r =>
console.log("sone"));
res.send('Hello World!')
});
function zlibBombAsync(zipFile) {
zlib.gunzip(
zipFile.data,
(err, buffer) => {
});
zlib.unzip(
zipFile.data,
(err, buffer) => {
});
zlib.brotliDecompress(
zipFile.data,
(err, buffer) => {
});
}
function zlibBombAsyncSafe(zipFile) {
zlib.gunzip(
zipFile.data,
{ maxOutputLength: 1024 * 1024 * 5 },
(err, buffer) => {
});
zlib.unzip(
zipFile.data,
{ maxOutputLength: 1024 * 1024 * 5 },
(err, buffer) => {
});
zlib.brotliDecompress(
zipFile.data,
{ maxOutputLength: 1024 * 1024 * 5 },
(err, buffer) => {
});
}
function zlibBombSync(zipFile) {
zlib.gunzipSync(zipFile.data, { finishFlush: zlib.constants.Z_SYNC_FLUSH });
zlib.unzipSync(zipFile.data);
zlib.brotliDecompressSync(zipFile.data);
}
function zlibBombSyncSafe(zipFile) {
zlib.gunzipSync(zipFile.data, { finishFlush: zlib.constants.Z_SYNC_FLUSH, maxOutputLength: 1024 * 1024 * 5 });
zlib.unzipSync(zipFile.data, { maxOutputLength: 1024 * 1024 * 5 });
zlib.brotliDecompressSync(zipFile.data, { maxOutputLength: 1024 * 1024 * 5 });
}
function zlibBombPipeStream(zipFile) {
const inputStream = Readable.from(zipFile.data);
const outputFile = fs.createWriteStream('unzip.txt');
inputStream.pipe(zlib.createGunzip()).pipe(outputFile);
inputStream.pipe(zlib.createUnzip()).pipe(outputFile);
inputStream.pipe(zlib.createBrotliDecompress()).pipe(outputFile);
}
async function zlibBombPipeStreamPromises(zipFile) {
const inputStream = Readable.from(zipFile.data);
const outputFile = fs.createWriteStream('unzip.txt');
await stream.pipeline(
inputStream,
zlib.createGunzip(),
outputFile
)
}
function zlibBombPipeStreamSafe(zipFile) {
const inputFile = Readable.from(zipFile.data);
const outputFile = fs.createWriteStream('unzip.txt');
inputFile.pipe(zlib.createGunzip({ maxOutputLength: 1024 * 1024 * 5 })).pipe(outputFile);
inputFile.pipe(zlib.createUnzip({ maxOutputLength: 1024 * 1024 * 5 })).pipe(outputFile);
inputFile.pipe(zlib.createBrotliDecompress({ maxOutputLength: 1024 * 1024 * 5 })).pipe(outputFile);
}

View File

@@ -153,12 +153,12 @@ nodes
| HardcodedCredentials.js:135:41:135:50 | "hgfedcba" |
| HardcodedCredentials.js:135:41:135:50 | "hgfedcba" |
| HardcodedCredentials.js:135:41:135:50 | "hgfedcba" |
| HardcodedCredentials.js:160:38:160:48 | "change_me" |
| HardcodedCredentials.js:160:38:160:48 | "change_me" |
| HardcodedCredentials.js:160:38:160:48 | "change_me" |
| HardcodedCredentials.js:161:41:161:51 | 'change_me' |
| HardcodedCredentials.js:161:41:161:51 | 'change_me' |
| HardcodedCredentials.js:161:41:161:51 | 'change_me' |
| HardcodedCredentials.js:160:38:160:56 | "oiuneawrgiyubaegr" |
| HardcodedCredentials.js:160:38:160:56 | "oiuneawrgiyubaegr" |
| HardcodedCredentials.js:160:38:160:56 | "oiuneawrgiyubaegr" |
| HardcodedCredentials.js:161:41:161:59 | 'oiuneawrgiyubaegr' |
| HardcodedCredentials.js:161:41:161:59 | 'oiuneawrgiyubaegr' |
| HardcodedCredentials.js:161:41:161:59 | 'oiuneawrgiyubaegr' |
| HardcodedCredentials.js:164:35:164:45 | 'change_me' |
| HardcodedCredentials.js:164:35:164:45 | 'change_me' |
| HardcodedCredentials.js:164:35:164:45 | 'change_me' |
@@ -271,6 +271,18 @@ nodes
| HardcodedCredentials.js:295:37:295:66 | `Basic ... 000001` |
| HardcodedCredentials.js:295:37:295:66 | `Basic ... 000001` |
| HardcodedCredentials.js:295:37:295:66 | `Basic ... 000001` |
| HardcodedCredentials.js:299:44:299:52 | 'mytoken' |
| HardcodedCredentials.js:299:44:299:52 | 'mytoken' |
| HardcodedCredentials.js:299:44:299:52 | 'mytoken' |
| HardcodedCredentials.js:300:44:300:56 | 'SampleToken' |
| HardcodedCredentials.js:300:44:300:56 | 'SampleToken' |
| HardcodedCredentials.js:300:44:300:56 | 'SampleToken' |
| HardcodedCredentials.js:301:44:301:55 | 'MyPassword' |
| HardcodedCredentials.js:301:44:301:55 | 'MyPassword' |
| HardcodedCredentials.js:301:44:301:55 | 'MyPassword' |
| HardcodedCredentials.js:302:44:302:69 | 'iubfew ... ybgera' |
| HardcodedCredentials.js:302:44:302:69 | 'iubfew ... ybgera' |
| HardcodedCredentials.js:302:44:302:69 | 'iubfew ... ybgera' |
edges
| HardcodedCredentials.js:5:15:5:22 | 'dbuser' | HardcodedCredentials.js:5:15:5:22 | 'dbuser' |
| HardcodedCredentials.js:8:19:8:28 | 'hgfedcba' | HardcodedCredentials.js:8:19:8:28 | 'hgfedcba' |
@@ -326,8 +338,8 @@ edges
| HardcodedCredentials.js:130:44:130:53 | 'hgfedcba' | HardcodedCredentials.js:130:44:130:53 | 'hgfedcba' |
| HardcodedCredentials.js:131:52:131:61 | 'hgfedcba' | HardcodedCredentials.js:131:52:131:61 | 'hgfedcba' |
| HardcodedCredentials.js:135:41:135:50 | "hgfedcba" | HardcodedCredentials.js:135:41:135:50 | "hgfedcba" |
| HardcodedCredentials.js:160:38:160:48 | "change_me" | HardcodedCredentials.js:160:38:160:48 | "change_me" |
| HardcodedCredentials.js:161:41:161:51 | 'change_me' | HardcodedCredentials.js:161:41:161:51 | 'change_me' |
| HardcodedCredentials.js:160:38:160:56 | "oiuneawrgiyubaegr" | HardcodedCredentials.js:160:38:160:56 | "oiuneawrgiyubaegr" |
| HardcodedCredentials.js:161:41:161:59 | 'oiuneawrgiyubaegr' | HardcodedCredentials.js:161:41:161:59 | 'oiuneawrgiyubaegr' |
| HardcodedCredentials.js:164:35:164:45 | 'change_me' | HardcodedCredentials.js:164:35:164:45 | 'change_me' |
| HardcodedCredentials.js:171:11:171:25 | USER | HardcodedCredentials.js:173:35:173:38 | USER |
| HardcodedCredentials.js:171:18:171:25 | 'sdsdag' | HardcodedCredentials.js:171:11:171:25 | USER |
@@ -399,6 +411,10 @@ edges
| HardcodedCredentials.js:293:37:293:65 | `Basic ... xxxxxx` | HardcodedCredentials.js:293:37:293:65 | `Basic ... xxxxxx` |
| HardcodedCredentials.js:294:37:294:70 | `Basic ... gbbbbb` | HardcodedCredentials.js:294:37:294:70 | `Basic ... gbbbbb` |
| HardcodedCredentials.js:295:37:295:66 | `Basic ... 000001` | HardcodedCredentials.js:295:37:295:66 | `Basic ... 000001` |
| HardcodedCredentials.js:299:44:299:52 | 'mytoken' | HardcodedCredentials.js:299:44:299:52 | 'mytoken' |
| HardcodedCredentials.js:300:44:300:56 | 'SampleToken' | HardcodedCredentials.js:300:44:300:56 | 'SampleToken' |
| HardcodedCredentials.js:301:44:301:55 | 'MyPassword' | HardcodedCredentials.js:301:44:301:55 | 'MyPassword' |
| HardcodedCredentials.js:302:44:302:69 | 'iubfew ... ybgera' | HardcodedCredentials.js:302:44:302:69 | 'iubfew ... ybgera' |
#select
| HardcodedCredentials.js:5:15:5:22 | 'dbuser' | HardcodedCredentials.js:5:15:5:22 | 'dbuser' | HardcodedCredentials.js:5:15:5:22 | 'dbuser' | The hard-coded value "dbuser" is used as $@. | HardcodedCredentials.js:5:15:5:22 | 'dbuser' | user name |
| HardcodedCredentials.js:8:19:8:28 | 'hgfedcba' | HardcodedCredentials.js:8:19:8:28 | 'hgfedcba' | HardcodedCredentials.js:8:19:8:28 | 'hgfedcba' | The hard-coded value "hgfedcba" is used as $@. | HardcodedCredentials.js:8:19:8:28 | 'hgfedcba' | password |
@@ -448,8 +464,8 @@ edges
| HardcodedCredentials.js:130:44:130:53 | 'hgfedcba' | HardcodedCredentials.js:130:44:130:53 | 'hgfedcba' | HardcodedCredentials.js:130:44:130:53 | 'hgfedcba' | The hard-coded value "hgfedcba" is used as $@. | HardcodedCredentials.js:130:44:130:53 | 'hgfedcba' | key |
| HardcodedCredentials.js:131:52:131:61 | 'hgfedcba' | HardcodedCredentials.js:131:52:131:61 | 'hgfedcba' | HardcodedCredentials.js:131:52:131:61 | 'hgfedcba' | The hard-coded value "hgfedcba" is used as $@. | HardcodedCredentials.js:131:52:131:61 | 'hgfedcba' | key |
| HardcodedCredentials.js:135:41:135:50 | "hgfedcba" | HardcodedCredentials.js:135:41:135:50 | "hgfedcba" | HardcodedCredentials.js:135:41:135:50 | "hgfedcba" | The hard-coded value "hgfedcba" is used as $@. | HardcodedCredentials.js:135:41:135:50 | "hgfedcba" | key |
| HardcodedCredentials.js:160:38:160:48 | "change_me" | HardcodedCredentials.js:160:38:160:48 | "change_me" | HardcodedCredentials.js:160:38:160:48 | "change_me" | The hard-coded value "change_me" is used as $@. | HardcodedCredentials.js:160:38:160:48 | "change_me" | key |
| HardcodedCredentials.js:161:41:161:51 | 'change_me' | HardcodedCredentials.js:161:41:161:51 | 'change_me' | HardcodedCredentials.js:161:41:161:51 | 'change_me' | The hard-coded value "change_me" is used as $@. | HardcodedCredentials.js:161:41:161:51 | 'change_me' | key |
| HardcodedCredentials.js:160:38:160:56 | "oiuneawrgiyubaegr" | HardcodedCredentials.js:160:38:160:56 | "oiuneawrgiyubaegr" | HardcodedCredentials.js:160:38:160:56 | "oiuneawrgiyubaegr" | The hard-coded value "oiuneawrgiyubaegr" is used as $@. | HardcodedCredentials.js:160:38:160:56 | "oiuneawrgiyubaegr" | key |
| HardcodedCredentials.js:161:41:161:59 | 'oiuneawrgiyubaegr' | HardcodedCredentials.js:161:41:161:59 | 'oiuneawrgiyubaegr' | HardcodedCredentials.js:161:41:161:59 | 'oiuneawrgiyubaegr' | The hard-coded value "oiuneawrgiyubaegr" is used as $@. | HardcodedCredentials.js:161:41:161:59 | 'oiuneawrgiyubaegr' | key |
| HardcodedCredentials.js:171:18:171:25 | 'sdsdag' | HardcodedCredentials.js:171:18:171:25 | 'sdsdag' | HardcodedCredentials.js:178:30:178:44 | `Basic ${AUTH}` | The hard-coded value "sdsdag" is used as $@. | HardcodedCredentials.js:178:30:178:44 | `Basic ${AUTH}` | authorization header |
| HardcodedCredentials.js:171:18:171:25 | 'sdsdag' | HardcodedCredentials.js:171:18:171:25 | 'sdsdag' | HardcodedCredentials.js:188:30:188:44 | `Basic ${AUTH}` | The hard-coded value "sdsdag" is used as $@. | HardcodedCredentials.js:188:30:188:44 | `Basic ${AUTH}` | authorization header |
| HardcodedCredentials.js:171:18:171:25 | 'sdsdag' | HardcodedCredentials.js:171:18:171:25 | 'sdsdag' | HardcodedCredentials.js:195:37:195:51 | `Basic ${AUTH}` | The hard-coded value "sdsdag" is used as $@. | HardcodedCredentials.js:195:37:195:51 | `Basic ${AUTH}` | authorization header |
@@ -464,3 +480,4 @@ edges
| HardcodedCredentials.js:245:22:245:44 | "myHard ... ateKey" | HardcodedCredentials.js:245:22:245:44 | "myHard ... ateKey" | HardcodedCredentials.js:246:42:246:51 | privateKey | The hard-coded value "myHardCodedPrivateKey" is used as $@. | HardcodedCredentials.js:246:42:246:51 | privateKey | key |
| HardcodedCredentials.js:292:37:292:57 | `Basic ... sdsdag` | HardcodedCredentials.js:292:37:292:57 | `Basic ... sdsdag` | HardcodedCredentials.js:292:37:292:57 | `Basic ... sdsdag` | The hard-coded value "Basic sdsdag:sdsdag" is used as $@. | HardcodedCredentials.js:292:37:292:57 | `Basic ... sdsdag` | authorization header |
| HardcodedCredentials.js:294:37:294:70 | `Basic ... gbbbbb` | HardcodedCredentials.js:294:37:294:70 | `Basic ... gbbbbb` | HardcodedCredentials.js:294:37:294:70 | `Basic ... gbbbbb` | The hard-coded value "Basic sdsdag:aaaiuogrweuibgbbbbb" is used as $@. | HardcodedCredentials.js:294:37:294:70 | `Basic ... gbbbbb` | authorization header |
| HardcodedCredentials.js:302:44:302:69 | 'iubfew ... ybgera' | HardcodedCredentials.js:302:44:302:69 | 'iubfew ... ybgera' | HardcodedCredentials.js:302:44:302:69 | 'iubfew ... ybgera' | The hard-coded value "iubfewiaaweiybgaeuybgera" is used as $@. | HardcodedCredentials.js:302:44:302:69 | 'iubfew ... ybgera' | key |

View File

@@ -157,8 +157,8 @@
})();
(function(){
require("cookie-session")({ secret: "change_me" }); // NOT OK
require('crypto').createHmac('sha256', 'change_me'); // NOT OK
require("cookie-session")({ secret: "oiuneawrgiyubaegr" }); // NOT OK
require('crypto').createHmac('sha256', 'oiuneawrgiyubaegr'); // NOT OK
var basicAuth = require('express-basic-auth');
basicAuth({users: { [adminName]: 'change_me' }}); // OK
@@ -294,3 +294,10 @@
headers.append("Authorization", `Basic sdsdag:aaaiuogrweuibgbbbbb`); // NOT OK
headers.append("Authorization", `Basic sdsdag:000000000000001`); // OK
});
(function () {
require('crypto').createHmac('sha256', 'mytoken'); // OK
require('crypto').createHmac('sha256', 'SampleToken'); // OK
require('crypto').createHmac('sha256', 'MyPassword'); // OK
require('crypto').createHmac('sha256', 'iubfewiaaweiybgaeuybgera'); // NOT OK
})();