Merge branch 'main' into HEAD

This commit is contained in:
erik-krogh
2023-03-13 17:33:06 +01:00
2369 changed files with 118381 additions and 203230 deletions

BIN
ruby/Cargo.lock generated

Binary file not shown.

View File

@@ -11,8 +11,21 @@ file_types:
- name: ruby
display_name: Ruby files
extensions:
- .rb
- .rb
- name: erb
display_name: Ruby templates
extensions:
- .erb
- .erb
options:
trap:
title: Options pertaining to TRAP.
type: object
properties:
compression:
title: Controls compression for the TRAP files written by the extractor.
description: >
This option is only intended for use in debugging the extractor. Accepted
values are 'gzip' (the default, to write gzip-compressed TRAP) and 'none'
(to write uncompressed TRAP).
type: string
pattern: "^(none|gzip)$"

View File

@@ -5,4 +5,3 @@ class AstNode extends @ruby_ast_node {
from AstNode ruby_do_block, AstNode body, int index, AstNode child
where ruby_do_block_body(ruby_do_block, body) and ruby_body_statement_child(body, index, child)
select ruby_do_block, index, child

View File

@@ -5,4 +5,3 @@ class AstNode extends @ruby_ast_node {
from AstNode ruby_module, AstNode body, int index, AstNode child
where ruby_module_body(ruby_module, body) and ruby_body_statement_child(body, index, child)
select ruby_module, index, child

View File

@@ -3,6 +3,7 @@ class AstNode extends @ruby_ast_node {
}
from AstNode ruby_singleton_class, AstNode body, int index, AstNode child
where ruby_singleton_class_body(ruby_singleton_class, body) and ruby_body_statement_child(body, index, child)
where
ruby_singleton_class_body(ruby_singleton_class, body) and
ruby_body_statement_child(body, index, child)
select ruby_singleton_class, index, child

View File

@@ -88,7 +88,7 @@ pub struct LogWriter {
}
impl LogWriter {
pub fn message(&self, id: &str, name: &str) -> DiagnosticMessage {
pub fn new_entry(&self, id: &str, name: &str) -> DiagnosticMessage {
DiagnosticMessage {
timestamp: chrono::Utc::now(),
source: Source {
@@ -199,9 +199,37 @@ impl DiagnosticLoggers {
}
}
fn longest_backtick_sequence_length(text: &str) -> usize {
let mut result = 0;
let mut count = 0;
for c in text.chars() {
if c == '`' {
count += 1;
} else {
if count > result {
result = count;
}
count = 0;
}
}
result
}
/// An argument of a diagnostic message format string.
/// A message argument is either a "code" snippet or a link.
pub enum MessageArg<'a> {
Code(&'a str),
Link(&'a str, &'a str),
}
impl DiagnosticMessage {
pub fn full_error_message(&self) -> String {
match &self.location {
Some(Location {
file: Some(path),
start_line: None,
..
}) => format!("{}: {}", path, self.plaintext_message),
Some(Location {
file: Some(path),
start_line: Some(line),
@@ -211,12 +239,51 @@ impl DiagnosticMessage {
}
}
pub fn text(&mut self, text: &str) -> &mut Self {
fn text(&mut self, text: &str) -> &mut Self {
self.plaintext_message = text.to_owned();
self
}
#[allow(unused)]
pub fn message(&mut self, text: &str, args: &[MessageArg]) -> &mut Self {
let parts = text.split("{}");
let mut plain = String::with_capacity(2 * text.len());
let mut markdown = String::with_capacity(2 * text.len());
for (i, p) in parts.enumerate() {
plain.push_str(p);
markdown.push_str(p);
match args.get(i) {
Some(MessageArg::Code(t)) => {
plain.push_str(t);
if t.len() > 0 {
let count = longest_backtick_sequence_length(t) + 1;
markdown.push_str(&"`".repeat(count));
if count > 1 {
markdown.push_str(" ");
}
markdown.push_str(t);
if count > 1 {
markdown.push_str(" ");
}
markdown.push_str(&"`".repeat(count));
}
}
Some(MessageArg::Link(text, url)) => {
plain.push_str(text);
self.help_link(url);
markdown.push_str("[");
markdown.push_str(text);
markdown.push_str("](");
markdown.push_str(url);
markdown.push_str(")");
}
None => {}
}
}
self.text(&plain);
self.markdown(&markdown);
self
}
pub fn markdown(&mut self, text: &str) -> &mut Self {
self.markdown_message = text.to_owned();
self
@@ -249,6 +316,11 @@ impl DiagnosticMessage {
self.visibility.telemetry = true;
self
}
pub fn file(&mut self, path: &str) -> &mut Self {
let loc = self.location.get_or_insert(Default::default());
loc.file = Some(path.to_owned());
self
}
pub fn location(
&mut self,
path: &str,
@@ -266,3 +338,26 @@ impl DiagnosticMessage {
self
}
}
#[test]
fn test_message() {
let mut m = DiagnosticLoggers::new("foo")
.logger()
.new_entry("id", "name");
m.message("hello: {}", &[MessageArg::Code("hello")]);
assert_eq!("hello: hello", m.plaintext_message);
assert_eq!("hello: `hello`", m.markdown_message);
let mut m = DiagnosticLoggers::new("foo")
.logger()
.new_entry("id", "name");
m.message(
"hello with backticks: {}",
&[MessageArg::Code("oh `hello`!")],
);
assert_eq!("hello with backticks: oh `hello`!", m.plaintext_message);
assert_eq!(
"hello with backticks: `` oh `hello`! ``",
m.markdown_message
);
}

View File

@@ -1,4 +1,5 @@
use crate::diagnostics;
use crate::file_paths;
use crate::trap;
use node_types::{EntryKind, Field, NodeTypeMap, Storage, TypeName};
use std::collections::BTreeMap as Map;
@@ -9,14 +10,15 @@ use std::path::Path;
use tree_sitter::{Language, Node, Parser, Range, Tree};
pub fn populate_file(writer: &mut trap::Writer, absolute_path: &Path) -> trap::Label {
let (file_label, fresh) =
writer.global_id(&trap::full_id_for_file(&normalize_path(absolute_path)));
let (file_label, fresh) = writer.global_id(&trap::full_id_for_file(
&file_paths::normalize_path(absolute_path),
));
if fresh {
writer.add_tuple(
"files",
vec![
trap::Arg::Label(file_label),
trap::Arg::String(normalize_path(absolute_path)),
trap::Arg::String(file_paths::normalize_path(absolute_path)),
],
);
populate_parent_folders(writer, file_label, absolute_path.parent());
@@ -54,8 +56,9 @@ pub fn populate_parent_folders(
match path {
None => break,
Some(folder) => {
let (folder_label, fresh) =
writer.global_id(&trap::full_id_for_folder(&normalize_path(folder)));
let (folder_label, fresh) = writer.global_id(&trap::full_id_for_folder(
&file_paths::normalize_path(folder),
));
writer.add_tuple(
"containerparent",
vec![
@@ -68,7 +71,7 @@ pub fn populate_parent_folders(
"folders",
vec![
trap::Arg::Label(folder_label),
trap::Arg::String(normalize_path(folder)),
trap::Arg::String(file_paths::normalize_path(folder)),
],
);
path = folder.parent();
@@ -119,8 +122,8 @@ pub fn extract(
path: &Path,
source: &[u8],
ranges: &[Range],
) -> std::io::Result<()> {
let path_str = format!("{}", path.display());
) {
let path_str = file_paths::normalize_path(&path);
let span = tracing::span!(
tracing::Level::TRACE,
"extract",
@@ -150,46 +153,6 @@ pub fn extract(
traverse(&tree, &mut visitor);
parser.reset();
Ok(())
}
/// Normalizes the path according the common CodeQL specification. Assumes that
/// `path` has already been canonicalized using `std::fs::canonicalize`.
fn normalize_path(path: &Path) -> String {
if cfg!(windows) {
// The way Rust canonicalizes paths doesn't match the CodeQL spec, so we
// have to do a bit of work removing certain prefixes and replacing
// backslashes.
let mut components: Vec<String> = Vec::new();
for component in path.components() {
match component {
std::path::Component::Prefix(prefix) => match prefix.kind() {
std::path::Prefix::Disk(letter) | std::path::Prefix::VerbatimDisk(letter) => {
components.push(format!("{}:", letter as char));
}
std::path::Prefix::Verbatim(x) | std::path::Prefix::DeviceNS(x) => {
components.push(x.to_string_lossy().to_string());
}
std::path::Prefix::UNC(server, share)
| std::path::Prefix::VerbatimUNC(server, share) => {
components.push(server.to_string_lossy().to_string());
components.push(share.to_string_lossy().to_string());
}
},
std::path::Component::Normal(n) => {
components.push(n.to_string_lossy().to_string());
}
std::path::Component::RootDir => {}
std::path::Component::CurDir => {}
std::path::Component::ParentDir => {}
}
}
components.join("/")
} else {
// For other operating systems, we can use the canonicalized path
// without modifications.
format!("{}", path.display())
}
}
struct ChildNode {
@@ -274,7 +237,13 @@ impl<'a> Visitor<'a> {
);
}
fn record_parse_error_for_node(&mut self, error_message: String, node: Node) {
fn record_parse_error_for_node(
&mut self,
message: &str,
args: &[diagnostics::MessageArg],
node: Node,
status_page: bool,
) {
let (start_line, start_column, end_line, end_column) = location_for(self, node);
let loc = location(
self.trap_writer,
@@ -284,26 +253,38 @@ impl<'a> Visitor<'a> {
end_line,
end_column,
);
self.record_parse_error(
loc,
self.diagnostics_writer
.message("parse-error", "Parse error")
.severity(diagnostics::Severity::Error)
.location(self.path, start_line, start_column, end_line, end_column)
.text(&error_message),
);
let mut mesg = self
.diagnostics_writer
.new_entry("parse-error", "Parse error");
&mesg
.severity(diagnostics::Severity::Warning)
.location(self.path, start_line, start_column, end_line, end_column)
.message(message, args);
if status_page {
&mesg.status_page();
}
self.record_parse_error(loc, &mesg);
}
fn enter_node(&mut self, node: Node) -> bool {
if node.is_error() || node.is_missing() {
let error_message = if node.is_missing() {
format!("parse error: expecting '{}'", node.kind())
} else {
"parse error".to_string()
};
self.record_parse_error_for_node(error_message, node);
if node.is_missing() {
self.record_parse_error_for_node(
"A parse error occurred (expected {} symbol). Check the syntax of the file. If the file is invalid, correct the error or {} the file from analysis.",
&[diagnostics::MessageArg::Code(node.kind()), diagnostics::MessageArg::Link("exclude", "https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/customizing-code-scanning")],
node,
true,
);
return false;
}
if node.is_error() {
self.record_parse_error_for_node(
"A parse error occurred. Check the syntax of the file. If the file is invalid, correct the error or {} the file from analysis.",
&[diagnostics::MessageArg::Link("exclude", "https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/customizing-code-scanning")],
node,
true,
);
return false;
};
let id = self.trap_writer.fresh_id();
@@ -383,15 +364,16 @@ impl<'a> Visitor<'a> {
}
}
_ => {
let error_message = format!("unknown table type: '{}'", node.kind());
self.record_parse_error(
loc,
self.diagnostics_writer
.message("parse-error", "Parse error")
.severity(diagnostics::Severity::Error)
.new_entry("parse-error", "Parse error")
.severity(diagnostics::Severity::Warning)
.location(self.path, start_line, start_column, end_line, end_column)
.text(&error_message)
.status_page(),
.message(
"Unknown table type: {}",
&[diagnostics::MessageArg::Code(node.kind())],
),
);
valid = false;
@@ -439,23 +421,29 @@ impl<'a> Visitor<'a> {
values.push(trap::Arg::Label(child_node.label));
}
} else if field.name.is_some() {
let error_message = format!(
"type mismatch for field {}::{} with type {:?} != {:?}",
node.kind(),
child_node.field_name.unwrap_or("child"),
child_node.type_name,
field.type_info
self.record_parse_error_for_node(
"Type mismatch for field {}::{} with type {} != {}",
&[
diagnostics::MessageArg::Code(node.kind()),
diagnostics::MessageArg::Code(child_node.field_name.unwrap_or("child")),
diagnostics::MessageArg::Code(&format!("{:?}", child_node.type_name)),
diagnostics::MessageArg::Code(&format!("{:?}", field.type_info)),
],
*node,
false,
);
self.record_parse_error_for_node(error_message, *node);
}
} else if child_node.field_name.is_some() || child_node.type_name.named {
let error_message = format!(
"value for unknown field: {}::{} and type {:?}",
node.kind(),
&child_node.field_name.unwrap_or("child"),
&child_node.type_name
self.record_parse_error_for_node(
"Value for unknown field: {}::{} and type {}",
&[
diagnostics::MessageArg::Code(node.kind()),
diagnostics::MessageArg::Code(&child_node.field_name.unwrap_or("child")),
diagnostics::MessageArg::Code(&format!("{:?}", child_node.type_name)),
],
*node,
false,
);
self.record_parse_error_for_node(error_message, *node);
}
}
let mut args = Vec::new();
@@ -471,14 +459,14 @@ impl<'a> Visitor<'a> {
let error_message = format!(
"{} for field: {}::{}",
if child_values.is_empty() {
"missing value"
"Missing value"
} else {
"too many values"
"Too many values"
},
node.kind(),
column_name
);
self.record_parse_error_for_node(error_message, *node);
self.record_parse_error_for_node(&error_message, &[], *node, false);
}
}
Storage::Table {
@@ -488,13 +476,15 @@ impl<'a> Visitor<'a> {
} => {
for (index, child_value) in child_values.iter().enumerate() {
if !*has_index && index > 0 {
let error_message = format!(
"too many values for field: {}::{}",
node.kind(),
table_name,
self.record_parse_error_for_node(
"Too many values for field: {}::{}",
&[
diagnostics::MessageArg::Code(node.kind()),
diagnostics::MessageArg::Code(table_name),
],
*node,
false,
);
self.record_parse_error_for_node(error_message, *node);
break;
}
let mut args = vec![trap::Arg::Label(parent_id)];
@@ -591,9 +581,8 @@ fn location_for(visitor: &mut Visitor, n: Node) -> (usize, usize, usize, usize)
visitor.diagnostics_writer.write(
visitor
.diagnostics_writer
.message("internal-error", "Internal error")
.text("expecting a line break symbol, but none found while correcting end column value")
.status_page()
.new_entry("internal-error", "Internal error")
.message("Expecting a line break symbol, but none found while correcting end column value", &[])
.severity(diagnostics::Severity::Error),
);
}
@@ -607,13 +596,14 @@ fn location_for(visitor: &mut Visitor, n: Node) -> (usize, usize, usize, usize)
visitor.diagnostics_writer.write(
visitor
.diagnostics_writer
.message("internal-error", "Internal error")
.text(&format!(
"cannot correct end column value: end_byte index {} is not in range [1,{}]",
index,
source.len()
))
.status_page()
.new_entry("internal-error", "Internal error")
.message(
"Cannot correct end column value: end_byte index {} is not in range [1,{}].",
&[
diagnostics::MessageArg::Code(&index.to_string()),
diagnostics::MessageArg::Code(&source.len().to_string()),
],
)
.severity(diagnostics::Severity::Error),
);
}

View File

@@ -0,0 +1,135 @@
use std::path::{Path, PathBuf};
/// Normalizes the path according the common CodeQL specification. Assumes that
/// `path` has already been canonicalized using `std::fs::canonicalize`.
pub fn normalize_path(path: &Path) -> String {
if cfg!(windows) {
// The way Rust canonicalizes paths doesn't match the CodeQL spec, so we
// have to do a bit of work removing certain prefixes and replacing
// backslashes.
let mut components: Vec<String> = Vec::new();
for component in path.components() {
match component {
std::path::Component::Prefix(prefix) => match prefix.kind() {
std::path::Prefix::Disk(letter) | std::path::Prefix::VerbatimDisk(letter) => {
components.push(format!("{}:", letter as char));
}
std::path::Prefix::Verbatim(x) | std::path::Prefix::DeviceNS(x) => {
components.push(x.to_string_lossy().to_string());
}
std::path::Prefix::UNC(server, share)
| std::path::Prefix::VerbatimUNC(server, share) => {
components.push(server.to_string_lossy().to_string());
components.push(share.to_string_lossy().to_string());
}
},
std::path::Component::Normal(n) => {
components.push(n.to_string_lossy().to_string());
}
std::path::Component::RootDir => {}
std::path::Component::CurDir => {}
std::path::Component::ParentDir => {}
}
}
components.join("/")
} else {
// For other operating systems, we can use the canonicalized path
// without modifications.
format!("{}", path.display())
}
}
/// Convert a user-supplied path to an absolute path, and convert it to a verbatim path on Windows.
pub fn path_from_string(path: &str) -> PathBuf {
let mut path = PathBuf::from(path);
// make path absolute
if path.is_relative() {
path = std::env::current_dir().unwrap().join(path)
};
let mut components = path.components();
// make Windows paths verbatim (with `\\?\` prefixes) which allow for extended-length paths.
let mut result = match components.next() {
None => unreachable!("empty path"),
Some(component) => match component {
std::path::Component::Prefix(prefix) => match prefix.kind() {
std::path::Prefix::Disk(drive) => {
let root = format!(r"\\?\{}:\", drive as char);
PathBuf::from(root)
}
std::path::Prefix::UNC(server, share) => {
let mut root = std::ffi::OsString::from(r"\\?\UNC\");
root.push(server);
root.push(r"\");
root.push(share);
PathBuf::from(root)
}
std::path::Prefix::Verbatim(_)
| std::path::Prefix::VerbatimUNC(_, _)
| std::path::Prefix::VerbatimDisk(_)
| std::path::Prefix::DeviceNS(_) => Path::new(&component).to_path_buf(),
},
_ => Path::new(&component).to_path_buf(),
},
};
// remove `.` and `..` components
for component in components {
match component {
std::path::Component::CurDir => continue,
std::path::Component::ParentDir => {
result.pop();
}
_ => result.push(component),
}
}
result
}
pub fn path_for(dir: &Path, path: &Path, ext: &str) -> PathBuf {
let mut result = PathBuf::from(dir);
for component in path.components() {
match component {
std::path::Component::Prefix(prefix) => match prefix.kind() {
std::path::Prefix::Disk(letter) | std::path::Prefix::VerbatimDisk(letter) => {
result.push(format!("{}_", letter as char))
}
std::path::Prefix::Verbatim(x) | std::path::Prefix::DeviceNS(x) => {
result.push(x);
}
std::path::Prefix::UNC(server, share)
| std::path::Prefix::VerbatimUNC(server, share) => {
result.push("unc");
result.push(server);
result.push(share);
}
},
std::path::Component::RootDir => {
// skip
}
std::path::Component::Normal(_) => {
result.push(component);
}
std::path::Component::CurDir => {
// skip
}
std::path::Component::ParentDir => {
result.pop();
}
}
}
if !ext.is_empty() {
match result.extension() {
Some(x) => {
let mut new_ext = x.to_os_string();
new_ext.push(".");
new_ext.push(ext);
result.set_extension(new_ext);
}
None => {
result.set_extension(ext);
}
}
}
result
}

View File

@@ -1,5 +1,6 @@
mod diagnostics;
mod extractor;
mod file_paths;
mod trap;
#[macro_use]
@@ -73,9 +74,11 @@ fn main() -> std::io::Result<()> {
Err(e) => {
main_thread_logger.write(
main_thread_logger
.message("configuration-error", "Configuration error")
.text(&format!("{}; defaulting to 1 thread.", e))
.status_page()
.new_entry("configuration-error", "Configuration error")
.message(
"{}; defaulting to 1 thread.",
&[diagnostics::MessageArg::Code(&e)],
)
.severity(diagnostics::Severity::Warning),
);
1
@@ -90,19 +93,19 @@ fn main() -> std::io::Result<()> {
"threads"
}
);
let trap_compression = match trap::Compression::from_env("CODEQL_RUBY_TRAP_COMPRESSION") {
Ok(x) => x,
Err(e) => {
main_thread_logger.write(
main_thread_logger
.message("configuration-error", "Configuration error")
.text(&format!("{}; using gzip.", e))
.status_page()
.severity(diagnostics::Severity::Warning),
);
trap::Compression::Gzip
}
};
let trap_compression =
match trap::Compression::from_env("CODEQL_EXTRACTOR_RUBY_OPTION_TRAP_COMPRESSION") {
Ok(x) => x,
Err(e) => {
main_thread_logger.write(
main_thread_logger
.new_entry("configuration-error", "Configuration error")
.message("{}; using gzip.", &[diagnostics::MessageArg::Code(&e)])
.severity(diagnostics::Severity::Warning),
);
trap::Compression::Gzip
}
};
drop(main_thread_logger);
rayon::ThreadPoolBuilder::new()
.num_threads(num_threads)
@@ -120,15 +123,15 @@ fn main() -> std::io::Result<()> {
let src_archive_dir = matches
.value_of("source-archive-dir")
.expect("missing --source-archive-dir");
let src_archive_dir = PathBuf::from(src_archive_dir);
let src_archive_dir = file_paths::path_from_string(src_archive_dir);
let trap_dir = matches
.value_of("output-dir")
.expect("missing --output-dir");
let trap_dir = PathBuf::from(trap_dir);
let trap_dir = file_paths::path_from_string(trap_dir);
let file_list = matches.value_of("file-list").expect("missing --file-list");
let file_list = fs::File::open(file_list)?;
let file_list = fs::File::open(file_paths::path_from_string(file_list))?;
let language = tree_sitter_ruby::language();
let erb = tree_sitter_embedded_template::language();
@@ -146,7 +149,7 @@ fn main() -> std::io::Result<()> {
.try_for_each(|line| {
let mut diagnostics_writer = diagnostics.logger();
let path = PathBuf::from(line).canonicalize()?;
let src_archive_file = path_for(&src_archive_dir, &path, "");
let src_archive_file = file_paths::path_for(&src_archive_dir, &path, "");
let mut source = std::fs::read(&path)?;
let mut needs_conversion = false;
let code_ranges;
@@ -162,7 +165,7 @@ fn main() -> std::io::Result<()> {
&path,
&source,
&[],
)?;
);
let (ranges, line_breaks) = scan_erb(
erb,
@@ -199,16 +202,20 @@ fn main() -> std::io::Result<()> {
needs_conversion = false;
diagnostics_writer.write(
diagnostics_writer
.message(
"character-encoding-error",
"Character encoding error",
.new_entry(
"character-decoding-error",
"Character decoding error",
)
.file(&file_paths::normalize_path(&path))
.message(
"Could not decode the file contents as {}: {}. The contents of the file must match the character encoding specified in the {} {}.",
&[
diagnostics::MessageArg::Code(&encoding_name),
diagnostics::MessageArg::Code(&msg),
diagnostics::MessageArg::Code("encoding:"),
diagnostics::MessageArg::Link("directive", "https://docs.ruby-lang.org/en/master/syntax/comments_rdoc.html#label-encoding+Directive")
],
)
.text(&format!(
"{}: character decoding failure: {} ({})",
&path.to_string_lossy(),
msg,
&encoding_name
))
.status_page()
.severity(diagnostics::Severity::Warning),
);
@@ -218,12 +225,16 @@ fn main() -> std::io::Result<()> {
} else {
diagnostics_writer.write(
diagnostics_writer
.message("character-encoding-error", "Character encoding error")
.text(&format!(
"{}: unknown character encoding: '{}'",
&path.to_string_lossy(),
&encoding_name
))
.new_entry("unknown-character-encoding", "Unknown character encoding")
.file(&file_paths::normalize_path(&path))
.message(
"Unknown character encoding {} in {} {}.",
&[
diagnostics::MessageArg::Code(&encoding_name),
diagnostics::MessageArg::Code("#encoding:"),
diagnostics::MessageArg::Link("directive", "https://docs.ruby-lang.org/en/master/syntax/comments_rdoc.html#label-encoding+Directive")
],
)
.status_page()
.severity(diagnostics::Severity::Warning),
);
@@ -241,7 +252,7 @@ fn main() -> std::io::Result<()> {
&path,
&source,
&code_ranges,
)?;
);
std::fs::create_dir_all(&src_archive_file.parent().unwrap())?;
if needs_conversion {
std::fs::write(&src_archive_file, &source)?;
@@ -264,7 +275,7 @@ fn write_trap(
trap_writer: &trap::Writer,
trap_compression: trap::Compression,
) -> std::io::Result<()> {
let trap_file = path_for(trap_dir, &path, trap_compression.extension());
let trap_file = file_paths::path_for(trap_dir, &path, trap_compression.extension());
std::fs::create_dir_all(&trap_file.parent().unwrap())?;
trap_writer.write_to_file(&trap_file, trap_compression)
}
@@ -311,54 +322,6 @@ fn scan_erb(
(result, line_breaks)
}
fn path_for(dir: &Path, path: &Path, ext: &str) -> PathBuf {
let mut result = PathBuf::from(dir);
for component in path.components() {
match component {
std::path::Component::Prefix(prefix) => match prefix.kind() {
std::path::Prefix::Disk(letter) | std::path::Prefix::VerbatimDisk(letter) => {
result.push(format!("{}_", letter as char))
}
std::path::Prefix::Verbatim(x) | std::path::Prefix::DeviceNS(x) => {
result.push(x);
}
std::path::Prefix::UNC(server, share)
| std::path::Prefix::VerbatimUNC(server, share) => {
result.push("unc");
result.push(server);
result.push(share);
}
},
std::path::Component::RootDir => {
// skip
}
std::path::Component::Normal(_) => {
result.push(component);
}
std::path::Component::CurDir => {
// skip
}
std::path::Component::ParentDir => {
result.pop();
}
}
}
if !ext.is_empty() {
match result.extension() {
Some(x) => {
let mut new_ext = x.to_os_string();
new_ext.push(".");
new_ext.push(ext);
result.set_extension(new_ext);
}
None => {
result.set_extension(ext);
}
}
}
result
}
fn skip_space(content: &[u8], index: usize) -> usize {
let mut index = index;
while index < content.len() {
@@ -372,7 +335,6 @@ fn skip_space(content: &[u8], index: usize) -> usize {
}
index
}
fn scan_coding_comment(content: &[u8]) -> std::option::Option<Cow<str>> {
let mut index = 0;
// skip UTF-8 BOM marker if there is one

View File

@@ -0,0 +1,3 @@
4 %%% 5
if 1; 2

View File

@@ -0,0 +1,46 @@
{
"helpLinks": [
"https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/customizing-code-scanning"
],
"location": {
"endColumn": 5,
"endLine": 1,
"file": "<test-root-directory>/bad.rb",
"startColumn": 4,
"startLine": 1
},
"markdownMessage": "A parse error occurred. Check the syntax of the file. If the file is invalid, correct the error or [exclude](https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/customizing-code-scanning) the file from analysis.",
"plaintextMessage": "A parse error occurred. Check the syntax of the file. If the file is invalid, correct the error or exclude the file from analysis.",
"severity": "Warning",
"source": {
"extractorName": "ruby",
"id": "ruby/parse-error",
"name": "Parse error"
},
"visibility": {
"statusPage": true
}
}
{
"helpLinks": [
"https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/customizing-code-scanning"
],
"location": {
"endColumn": 7,
"endLine": 3,
"file": "<test-root-directory>/bad.rb",
"startColumn": 8,
"startLine": 3
},
"markdownMessage": "A parse error occurred (expected `end` symbol). Check the syntax of the file. If the file is invalid, correct the error or [exclude](https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/customizing-code-scanning) the file from analysis.",
"plaintextMessage": "A parse error occurred (expected end symbol). Check the syntax of the file. If the file is invalid, correct the error or exclude the file from analysis.",
"severity": "Warning",
"source": {
"extractorName": "ruby",
"id": "ruby/parse-error",
"name": "Parse error"
},
"visibility": {
"statusPage": true
}
}

View File

@@ -0,0 +1,7 @@
import os
from create_database_utils import *
from diagnostics_test_utils import *
run_codeql_database_create([], lang="ruby", runFunction = runSuccessfully, db = None)
check_diagnostics()

View File

@@ -0,0 +1,19 @@
{
"helpLinks": [
"https://docs.ruby-lang.org/en/master/syntax/comments_rdoc.html#label-encoding+Directive"
],
"location": {
"file": "<test-root-directory>/encoding.rb"
},
"markdownMessage": "Unknown character encoding `silly` in `#encoding:` [directive](https://docs.ruby-lang.org/en/master/syntax/comments_rdoc.html#label-encoding+Directive).",
"plaintextMessage": "Unknown character encoding silly in #encoding: directive.",
"severity": "Warning",
"source": {
"extractorName": "ruby",
"id": "ruby/unknown-character-encoding",
"name": "Unknown character encoding"
},
"visibility": {
"statusPage": true
}
}

View File

@@ -0,0 +1,5 @@
# encoding: silly
def f
puts "hello"
end

View File

@@ -0,0 +1,7 @@
import os
from create_database_utils import *
from diagnostics_test_utils import *
run_codeql_database_create([], lang="ruby", runFunction = runSuccessfully, db = None)
check_diagnostics()

View File

@@ -0,0 +1,3 @@
dependencies:
codeql/ruby-all: '*'
codeql/ruby-queries: '*'

View File

@@ -1,3 +1,11 @@
## 0.5.4
### Minor Analysis Improvements
* Flow is now tracked between ActionController `before_filter` and `after_filter` callbacks and their associated action methods.
* Calls to `ApplicationController#render` and `ApplicationController::Renderer#render` are recognized as Rails rendering calls.
* Support for [Twirp framework](https://twitchtv.github.io/twirp/docs/intro.html).
## 0.5.3
### Minor Analysis Improvements

View File

@@ -1,4 +0,0 @@
---
category: minorAnalysis
---
* Calls to `ApplicationController#render` and `ApplicationController::Renderer#render` are recognized as Rails rendering calls.

View File

@@ -1,4 +0,0 @@
---
category: minorAnalysis
---
* Support for [Twirp framework](https://twitchtv.github.io/twirp/docs/intro.html).

View File

@@ -0,0 +1,9 @@
---
category: majorAnalysis
---
* The main data flow and taint tracking APIs have been changed. The old APIs
remain in place for now and translate to the new through a
backwards-compatible wrapper. If multiple configurations are in scope
simultaneously, then this may affect results slightly. The new API is quite
similar to the old, but makes use of a configuration module instead of a
configuration class.

View File

@@ -0,0 +1,6 @@
---
category: minorAnalysis
---
* Deleted the deprecated `getQualifiedName` predicate from the `ConstantWriteAccess` class.
* Deleted the deprecated `getWhenBranch` and `getAWhenBranch` predicates from the `CaseExpr` class.
* Deleted the deprecated `Self`, `PatternParameter`, `Pattern`, `VariablePattern`, `TuplePattern`, and `TuplePatternParameter` classes.

View File

@@ -0,0 +1,4 @@
---
category: minorAnalysis
---
* The severity of parse errors was reduced to warning (previously error).

View File

@@ -0,0 +1,4 @@
---
category: feature
---
* Added support for merging two `PathGraph`s via disjoint union to allow results from multiple data flow computations in a single `path-problem` query.

View File

@@ -0,0 +1,7 @@
## 0.5.4
### Minor Analysis Improvements
* Flow is now tracked between ActionController `before_filter` and `after_filter` callbacks and their associated action methods.
* Calls to `ApplicationController#render` and `ApplicationController::Renderer#render` are recognized as Rails rendering calls.
* Support for [Twirp framework](https://twitchtv.github.io/twirp/docs/intro.html).

View File

@@ -1,2 +1,2 @@
---
lastReleaseVersion: 0.5.3
lastReleaseVersion: 0.5.4

View File

@@ -212,7 +212,8 @@ module FileSystemWriteAccess {
* Extend this class to refine existing API models. If you want to model new APIs,
* extend `FileSystemPermissionModification::Range` instead.
*/
class FileSystemPermissionModification extends DataFlow::Node instanceof FileSystemPermissionModification::Range {
class FileSystemPermissionModification extends DataFlow::Node instanceof FileSystemPermissionModification::Range
{
/**
* Gets an argument to this permission modification that is interpreted as a
* set of permissions.
@@ -468,7 +469,8 @@ module Http {
}
}
private class RequestInputAccessAsRemoteFlowSource extends RemoteFlowSource::Range instanceof RequestInputAccess {
private class RequestInputAccessAsRemoteFlowSource extends RemoteFlowSource::Range instanceof RequestInputAccess
{
override string getSourceType() { result = this.(RequestInputAccess).getSourceType() }
}
@@ -957,7 +959,8 @@ module Path {
* Extend this class to refine existing API models. If you want to model new APIs,
* extend `CookieSecurityConfigurationSetting::Range` instead.
*/
class CookieSecurityConfigurationSetting extends DataFlow::Node instanceof CookieSecurityConfigurationSetting::Range {
class CookieSecurityConfigurationSetting extends DataFlow::Node instanceof CookieSecurityConfigurationSetting::Range
{
/**
* Gets a description of how this cookie setting may weaken application security.
* This predicate has no results if the setting is considered to be safe.
@@ -1037,7 +1040,8 @@ module Cryptography {
* Extend this class to refine existing API models. If you want to model new APIs,
* extend `CryptographicOperation::Range` instead.
*/
class CryptographicOperation extends SC::CryptographicOperation instanceof CryptographicOperation::Range {
class CryptographicOperation extends SC::CryptographicOperation instanceof CryptographicOperation::Range
{
/** DEPRECATED: Use `getAlgorithm().isWeak() or getBlockMode().isWeak()` instead */
deprecated predicate isWeak() { super.isWeak() }
}

View File

@@ -10,5 +10,6 @@ import codeql.Locations
* global (inter-procedural) data flow analyses.
*/
module DataFlow {
import codeql.ruby.dataflow.internal.DataFlowImpl
import codeql.ruby.dataflow.internal.DataFlow
import codeql.ruby.dataflow.internal.DataFlowImpl1
}

View File

@@ -47,6 +47,6 @@ class Diagnostic extends @diagnostic {
}
/** A diagnostic relating to a particular error in extracting a file. */
class ExtractionError extends Diagnostic, @diagnostic_error {
class ExtractionError extends Diagnostic {
ExtractionError() { this.getTag() = "parse_error" }
}

View File

@@ -3,5 +3,6 @@
* global (inter-procedural) taint-tracking analyses.
*/
module TaintTracking {
import codeql.ruby.dataflow.internal.tainttracking1.TaintTracking
import codeql.ruby.dataflow.internal.tainttracking1.TaintTrackingImpl
}

View File

@@ -398,12 +398,6 @@ class ConstantWriteAccess extends ConstantAccess {
* constant can be ambiguous from just statically looking at the AST.
*/
string getAQualifiedName() { result = resolveConstantWrite(this) }
/**
* Gets a qualified name for this constant. Deprecated in favor of
* `getAQualifiedName` because this can return more than one value
*/
deprecated string getQualifiedName() { result = this.getAQualifiedName() }
}
/**

View File

@@ -384,12 +384,6 @@ class CaseExpr extends ControlExpr instanceof CaseExprImpl {
*/
final AstNode getABranch() { result = this.getBranch(_) }
/** Gets the `n`th `when` branch of this case expression. */
deprecated final WhenClause getWhenBranch(int n) { result = this.getBranch(n) }
/** Gets a `when` branch of this case expression. */
deprecated final WhenClause getAWhenBranch() { result = this.getABranch() }
/** Gets the `else` branch of this case expression, if any. */
final StmtSequence getElseBranch() { result = this.getABranch() }
@@ -413,11 +407,6 @@ class CaseExpr extends ControlExpr instanceof CaseExprImpl {
}
}
/**
* DEPRECATED: Use `WhenClause` instead.
*/
deprecated class WhenExpr = WhenClause;
/**
* A `when` branch of a `case` expression.
* ```rb

View File

@@ -22,9 +22,6 @@ class Expr extends Stmt, TExpr {
ConstantValue getConstantValue() { result = getConstantValueExpr(this) }
}
/** DEPRECATED: Use `SelfVariableAccess` instead. */
deprecated class Self = SelfVariableAccess;
/**
* A sequence of expressions in the right-hand side of an assignment or
* a `return`, `break` or `next` statement.

View File

@@ -199,7 +199,8 @@ class StringTextComponent extends StringComponent instanceof StringTextComponent
/**
* An escape sequence component of a string or string-like literal.
*/
class StringEscapeSequenceComponent extends StringComponent instanceof StringEscapeSequenceComponentImpl {
class StringEscapeSequenceComponent extends StringComponent instanceof StringEscapeSequenceComponentImpl
{
final override string getAPrimaryQlClass() { result = "StringEscapeSequenceComponent" }
/** Gets the text of this component as it appears in the source code. */
@@ -209,7 +210,8 @@ class StringEscapeSequenceComponent extends StringComponent instanceof StringEsc
/**
* An interpolation expression component of a string or string-like literal.
*/
class StringInterpolationComponent extends StringComponent, StmtSequence instanceof StringInterpolationComponentImpl {
class StringInterpolationComponent extends StringComponent, StmtSequence instanceof StringInterpolationComponentImpl
{
private Ruby::Interpolation g;
StringInterpolationComponent() { this = TStringInterpolationComponentNonRegexp(g) }
@@ -249,14 +251,16 @@ class RegExpTextComponent extends RegExpComponent instanceof RegExpTextComponent
/**
* An escape sequence component of a regex literal.
*/
class RegExpEscapeSequenceComponent extends RegExpComponent instanceof RegExpEscapeSequenceComponentImpl {
class RegExpEscapeSequenceComponent extends RegExpComponent instanceof RegExpEscapeSequenceComponentImpl
{
final override string getAPrimaryQlClass() { result = "RegExpEscapeSequenceComponent" }
}
/**
* An interpolation expression component of a regex literal.
*/
class RegExpInterpolationComponent extends RegExpComponent, StmtSequence instanceof RegExpComponentImpl {
class RegExpInterpolationComponent extends RegExpComponent, StmtSequence instanceof RegExpComponentImpl
{
private Ruby::Interpolation g;
RegExpInterpolationComponent() { this = TStringInterpolationComponentRegexp(g) }

View File

@@ -68,27 +68,6 @@ class DestructuredParameter extends Parameter, TDestructuredParameter {
final override string getAPrimaryQlClass() { result = "DestructuredParameter" }
}
/**
* DEPRECATED
*
* A parameter defined using a pattern.
*
* This includes both simple parameters and tuple parameters.
*/
deprecated class PatternParameter extends Parameter, Pattern, TPatternParameter {
override LocalVariable getAVariable() { result = Pattern.super.getAVariable() }
}
/**
* DEPRECATED
*
* A parameter defined using a tuple pattern.
*/
deprecated class TuplePatternParameter extends PatternParameter, TuplePattern,
TDestructuredParameter {
final override LocalVariable getAVariable() { result = TuplePattern.super.getAVariable() }
}
/** A named parameter. */
class NamedParameter extends Parameter, TNamedParameter {
/** Gets the name of this parameter. */

View File

@@ -5,78 +5,6 @@ private import internal.TreeSitter
private import internal.Variable
private import internal.Parameter
/**
* DEPRECATED
*
* A pattern.
*/
deprecated class Pattern extends AstNode {
Pattern() {
explicitAssignmentNode(toGenerated(this), _)
or
implicitAssignmentNode(toGenerated(this))
or
implicitParameterAssignmentNode(toGenerated(this), _)
or
this = getSynthChild(any(AssignExpr ae), 0)
or
this instanceof SimpleParameterImpl
}
/** Gets a variable used in (or introduced by) this pattern. */
Variable getAVariable() { none() }
}
/**
* DEPRECATED
*
* A simple variable pattern.
*/
deprecated class VariablePattern extends Pattern, LhsExpr, TVariableAccess {
override Variable getAVariable() { result = this.(VariableAccess).getVariable() }
}
/**
* DEPRECATED
*
* A tuple pattern.
*
* This includes both tuple patterns in parameters and assignments. Example patterns:
* ```rb
* a, self.b = value
* (a, b), c[3] = value
* a, b, *rest, c, d = value
* ```
*/
deprecated class TuplePattern extends Pattern, TTuplePattern {
private TuplePatternImpl getImpl() { result = toGenerated(this) }
private Ruby::AstNode getChild(int i) { result = this.getImpl().getChildNode(i) }
/** Gets the `i`th pattern in this tuple pattern. */
final Pattern getElement(int i) {
exists(Ruby::AstNode c | c = this.getChild(i) |
toGenerated(result) = c.(Ruby::RestAssignment).getChild()
or
toGenerated(result) = c
)
}
/** Gets a sub pattern in this tuple pattern. */
final Pattern getAnElement() { result = this.getElement(_) }
/**
* Gets the index of the pattern with the `*` marker on it, if it exists.
* In the example below the index is `2`.
* ```rb
* a, b, *rest, c, d = value
* ```
*/
final int getRestIndex() { result = this.getImpl().getRestIndex() }
override Variable getAVariable() { result = this.getElement(_).getAVariable() }
}
private class TPatternNode =
TArrayPattern or TFindPattern or THashPattern or TAlternativePattern or TAsPattern or
TParenthesizedPattern or TExpressionReferencePattern or TVariableReferencePattern;

View File

@@ -875,15 +875,10 @@ class TParameter =
class TSimpleParameter = TSimpleParameterReal or TSimpleParameterSynth;
deprecated class TPatternParameter = TSimpleParameter or TDestructuredParameter;
class TNamedParameter =
TSimpleParameter or TBlockParameter or THashSplatParameter or TKeywordParameter or
TOptionalParameter or TSplatParameter;
deprecated class TTuplePattern =
TDestructuredParameter or TDestructuredLeftAssignment or TLeftAssignmentList;
class TVariableAccess =
TLocalVariableAccess or TGlobalVariableAccess or TInstanceVariableAccess or
TClassVariableAccess or TSelfVariableAccess;

View File

@@ -101,7 +101,8 @@ abstract class DestructuredLhsExprImpl extends Ruby::AstNode {
}
class DestructuredLeftAssignmentImpl extends DestructuredLhsExprImpl,
Ruby::DestructuredLeftAssignment {
Ruby::DestructuredLeftAssignment
{
override Ruby::AstNode getChildNode(int i) { result = this.getChild(i) }
}

View File

@@ -326,7 +326,8 @@ private string unescapeTextComponent(string text) {
}
class StringTextComponentStringOrHeredocContent extends StringTextComponentImpl,
TStringTextComponentNonRegexpStringOrHeredocContent {
TStringTextComponentNonRegexpStringOrHeredocContent
{
private Ruby::Token g;
StringTextComponentStringOrHeredocContent() {
@@ -341,7 +342,8 @@ class StringTextComponentStringOrHeredocContent extends StringTextComponentImpl,
}
private class StringTextComponentSimpleSymbol extends StringTextComponentImpl,
TStringTextComponentNonRegexpSimpleSymbol {
TStringTextComponentNonRegexpSimpleSymbol
{
private Ruby::SimpleSymbol g;
StringTextComponentSimpleSymbol() { this = TStringTextComponentNonRegexpSimpleSymbol(g) }
@@ -355,7 +357,8 @@ private class StringTextComponentSimpleSymbol extends StringTextComponentImpl,
}
private class StringTextComponentHashKeySymbol extends StringTextComponentImpl,
TStringTextComponentNonRegexpHashKeySymbol {
TStringTextComponentNonRegexpHashKeySymbol
{
private Ruby::HashKeySymbol g;
StringTextComponentHashKeySymbol() { this = TStringTextComponentNonRegexpHashKeySymbol(g) }
@@ -424,7 +427,8 @@ private string unescapeEscapeSequence(string escaped) {
* An escape sequence component of a string or string-like literal.
*/
class StringEscapeSequenceComponentImpl extends StringComponentImpl,
TStringEscapeSequenceComponentNonRegexp {
TStringEscapeSequenceComponentNonRegexp
{
private Ruby::EscapeSequence g;
StringEscapeSequenceComponentImpl() { this = TStringEscapeSequenceComponentNonRegexp(g) }
@@ -439,7 +443,8 @@ class StringEscapeSequenceComponentImpl extends StringComponentImpl,
}
class StringInterpolationComponentImpl extends StringComponentImpl,
TStringInterpolationComponentNonRegexp {
TStringInterpolationComponentNonRegexp
{
private Ruby::Interpolation g;
StringInterpolationComponentImpl() { this = TStringInterpolationComponentNonRegexp(g) }
@@ -472,7 +477,8 @@ class RegExpTextComponentImpl extends RegExpComponentImpl, TStringTextComponentR
}
class RegExpEscapeSequenceComponentImpl extends RegExpComponentImpl,
TStringEscapeSequenceComponentRegexp {
TStringEscapeSequenceComponentRegexp
{
private Ruby::EscapeSequence g;
RegExpEscapeSequenceComponentImpl() { this = TStringEscapeSequenceComponentRegexp(g) }
@@ -488,7 +494,8 @@ class RegExpEscapeSequenceComponentImpl extends RegExpComponentImpl,
}
class RegExpInterpolationComponentImpl extends RegExpComponentImpl,
TStringInterpolationComponentRegexp {
TStringInterpolationComponentRegexp
{
private Ruby::Interpolation g;
RegExpInterpolationComponentImpl() { this = TStringInterpolationComponentRegexp(g) }

View File

@@ -4,25 +4,6 @@ private import codeql.ruby.ast.internal.Parameter
private import AST
private import TreeSitter
deprecated class TuplePatternImpl extends Ruby::AstNode {
TuplePatternImpl() {
this instanceof DestructuredParameterImpl or
this instanceof DestructuredLhsExprImpl
}
Ruby::AstNode getChildNode(int i) {
result =
[
this.(DestructuredParameterImpl).getChildNode(i),
this.(DestructuredLhsExprImpl).getChildNode(i)
]
}
final int getRestIndex() {
result = unique(int i | this.getChildNode(i) instanceof Ruby::RestAssignment)
}
}
/**
* Holds if `node` is a case pattern.
*/

View File

@@ -607,7 +607,8 @@ private class GlobalVariableAccessReal extends GlobalVariableAccessImpl, TGlobal
final override string toString() { result = g.getValue() }
}
private class GlobalVariableAccessSynth extends GlobalVariableAccessImpl, TGlobalVariableAccessSynth {
private class GlobalVariableAccessSynth extends GlobalVariableAccessImpl, TGlobalVariableAccessSynth
{
private GlobalVariable v;
GlobalVariableAccessSynth() { this = TGlobalVariableAccessSynth(_, _, v) }
@@ -624,7 +625,8 @@ module InstanceVariableAccess {
abstract class InstanceVariableAccessImpl extends VariableAccessImpl, TInstanceVariableAccess { }
private class InstanceVariableAccessReal extends InstanceVariableAccessImpl,
TInstanceVariableAccessReal {
TInstanceVariableAccessReal
{
private Ruby::InstanceVariable g;
private InstanceVariable v;
@@ -636,7 +638,8 @@ private class InstanceVariableAccessReal extends InstanceVariableAccessImpl,
}
private class InstanceVariableAccessSynth extends InstanceVariableAccessImpl,
TInstanceVariableAccessSynth {
TInstanceVariableAccessSynth
{
private InstanceVariable v;
InstanceVariableAccessSynth() { this = TInstanceVariableAccessSynth(_, _, v) }
@@ -664,7 +667,8 @@ private class ClassVariableAccessReal extends ClassVariableAccessRealImpl, TClas
}
private class ClassVariableAccessSynth extends ClassVariableAccessRealImpl,
TClassVariableAccessSynth {
TClassVariableAccessSynth
{
private ClassVariable v;
ClassVariableAccessSynth() { this = TClassVariableAccessSynth(_, _, v) }

View File

@@ -243,6 +243,35 @@ module ExprNodes {
override Literal getExpr() { result = super.getExpr() }
}
private class ControlExprChildMapping extends ExprChildMapping, ControlExpr {
override predicate relevantChild(AstNode n) { none() }
}
/** A control-flow node that wraps a `ControlExpr` AST expression. */
class ControlExprCfgNode extends ExprCfgNode {
override string getAPrimaryQlClass() { result = "ControlExprCfgNode" }
override ControlExprChildMapping e;
override ControlExpr getExpr() { result = super.getExpr() }
}
private class LhsExprChildMapping extends ExprChildMapping, LhsExpr {
override predicate relevantChild(AstNode n) { none() }
}
/** A control-flow node that wraps a `LhsExpr` AST expression. */
class LhsExprCfgNode extends ExprCfgNode {
override string getAPrimaryQlClass() { result = "LhsExprCfgNode" }
override LhsExprChildMapping e;
override LhsExpr getExpr() { result = super.getExpr() }
/** Gets a variable used in (or introduced by) this LHS. */
Variable getAVariable() { result = e.(VariableAccess).getVariable() }
}
private class AssignExprChildMapping extends ExprChildMapping, AssignExpr {
override predicate relevantChild(AstNode n) { n = this.getAnOperand() }
}
@@ -256,7 +285,7 @@ module ExprNodes {
final override AssignExpr getExpr() { result = ExprCfgNode.super.getExpr() }
/** Gets the LHS of this assignment. */
final ExprCfgNode getLhs() { e.hasCfgChild(e.getLeftOperand(), this, result) }
final LhsExprCfgNode getLhs() { e.hasCfgChild(e.getLeftOperand(), this, result) }
/** Gets the RHS of this assignment. */
final ExprCfgNode getRhs() { e.hasCfgChild(e.getRightOperand(), this, result) }

View File

@@ -284,7 +284,8 @@ abstract class ConditionalCompletion extends NormalCompletion {
* A completion that represents evaluation of an expression
* with a Boolean value.
*/
class BooleanCompletion extends ConditionalCompletion, NonNestedNormalCompletion, TBooleanCompletion {
class BooleanCompletion extends ConditionalCompletion, NonNestedNormalCompletion, TBooleanCompletion
{
BooleanCompletion() { this = TBooleanCompletion(value) }
/** Gets the dual Boolean completion. */

View File

@@ -465,7 +465,8 @@ module Trees {
}
private class PatternVariableAccessTree extends LocalVariableAccessTree, LocalVariableWriteAccess,
CasePattern {
CasePattern
{
final override predicate last(AstNode last, Completion c) {
super.last(last, c) and
c.(MatchingCompletion).getValue() = true

View File

@@ -92,7 +92,8 @@ class StringConstCompareBarrier extends DataFlow::Node {
* in the `order` call.
*/
deprecated class StringConstCompare extends DataFlow::BarrierGuard,
CfgNodes::ExprNodes::ComparisonOperationCfgNode {
CfgNodes::ExprNodes::ComparisonOperationCfgNode
{
private CfgNode checkedNode;
// The value of the condition that results in the node being validated.
private boolean checkedBranch;
@@ -160,7 +161,8 @@ class StringConstArrayInclusionCallBarrier extends DataFlow::Node {
* in the `find_by` call.
*/
deprecated class StringConstArrayInclusionCall extends DataFlow::BarrierGuard,
CfgNodes::ExprNodes::MethodCallCfgNode {
CfgNodes::ExprNodes::MethodCallCfgNode
{
private CfgNode checkedNode;
StringConstArrayInclusionCall() { stringConstArrayInclusionCall(this, checkedNode, true) }

View File

@@ -0,0 +1,353 @@
/**
* Provides an implementation of global (interprocedural) data flow. This file
* re-exports the local (intraprocedural) data flow analysis from
* `DataFlowImplSpecific::Public` and adds a global analysis, mainly exposed
* through the `Make` and `MakeWithState` modules.
*/
private import DataFlowImplCommon
private import DataFlowImplSpecific::Private
import DataFlowImplSpecific::Public
import DataFlowImplCommonPublic
private import DataFlowImpl
/** An input configuration for data flow. */
signature module ConfigSig {
/**
* Holds if `source` is a relevant data flow source.
*/
predicate isSource(Node source);
/**
* Holds if `sink` is a relevant data flow sink.
*/
predicate isSink(Node sink);
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
default predicate isBarrier(Node node) { none() }
/** Holds if data flow into `node` is prohibited. */
default predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
default predicate isBarrierOut(Node node) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
default predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
default predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
default int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
default FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
default predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
default predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (as it is in a `path-problem` query).
*/
default predicate includeHiddenNodes() { none() }
}
/** An input configuration for data flow using flow state. */
signature module StateConfigSig {
bindingset[this]
class FlowState;
/**
* Holds if `source` is a relevant data flow source with the given initial
* `state`.
*/
predicate isSource(Node source, FlowState state);
/**
* Holds if `sink` is a relevant data flow sink accepting `state`.
*/
predicate isSink(Node sink, FlowState state);
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
default predicate isBarrier(Node node) { none() }
/**
* Holds if data flow through `node` is prohibited when the flow state is
* `state`.
*/
predicate isBarrier(Node node, FlowState state);
/** Holds if data flow into `node` is prohibited. */
default predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
default predicate isBarrierOut(Node node) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
default predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
* This step is only applicable in `state1` and updates the flow state to `state2`.
*/
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2);
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
default predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
default int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
default FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
default predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
default predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (as it is in a `path-problem` query).
*/
default predicate includeHiddenNodes() { none() }
}
/**
* Gets the exploration limit for `hasPartialFlow` and `hasPartialFlowRev`
* measured in approximate number of interprocedural steps.
*/
signature int explorationLimitSig();
/**
* The output of a data flow computation.
*/
signature module DataFlowSig {
/**
* A `Node` augmented with a call context (except for sinks) and an access path.
* Only those `PathNode`s that are reachable from a source, and which can reach a sink, are generated.
*/
class PathNode;
/**
* Holds if data can flow from `source` to `sink`.
*
* The corresponding paths are generated from the end-points and the graph
* included in the module `PathGraph`.
*/
predicate hasFlowPath(PathNode source, PathNode sink);
/**
* Holds if data can flow from `source` to `sink`.
*/
predicate hasFlow(Node source, Node sink);
/**
* Holds if data can flow from some source to `sink`.
*/
predicate hasFlowTo(Node sink);
/**
* Holds if data can flow from some source to `sink`.
*/
predicate hasFlowToExpr(DataFlowExpr sink);
}
/**
* Constructs a standard data flow computation.
*/
module Make<ConfigSig Config> implements DataFlowSig {
private module C implements FullStateConfigSig {
import DefaultState<Config>
import Config
}
import Impl<C>
}
/**
* Constructs a data flow computation using flow state.
*/
module MakeWithState<StateConfigSig Config> implements DataFlowSig {
private module C implements FullStateConfigSig {
import Config
}
import Impl<C>
}
signature class PathNodeSig {
/** Gets a textual representation of this element. */
string toString();
/**
* Holds if this element is at the specified location.
* The location spans column `startcolumn` of line `startline` to
* column `endcolumn` of line `endline` in file `filepath`.
* For more information, see
* [Locations](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
*/
predicate hasLocationInfo(
string filepath, int startline, int startcolumn, int endline, int endcolumn
);
/** Gets the underlying `Node`. */
Node getNode();
}
signature module PathGraphSig<PathNodeSig PathNode> {
/** Holds if `(a,b)` is an edge in the graph of data flow path explanations. */
predicate edges(PathNode a, PathNode b);
/** Holds if `n` is a node in the graph of data flow path explanations. */
predicate nodes(PathNode n, string key, string val);
/**
* Holds if `(arg, par, ret, out)` forms a subpath-tuple, that is, flow through
* a subpath between `par` and `ret` with the connecting edges `arg -> par` and
* `ret -> out` is summarized as the edge `arg -> out`.
*/
predicate subpaths(PathNode arg, PathNode par, PathNode ret, PathNode out);
}
/**
* Constructs a `PathGraph` from two `PathGraph`s by disjoint union.
*/
module MergePathGraph<
PathNodeSig PathNode1, PathNodeSig PathNode2, PathGraphSig<PathNode1> Graph1,
PathGraphSig<PathNode2> Graph2>
{
private newtype TPathNode =
TPathNode1(PathNode1 p) or
TPathNode2(PathNode2 p)
/** A node in a graph of path explanations that is formed by disjoint union of the two given graphs. */
class PathNode extends TPathNode {
/** Gets this as a projection on the first given `PathGraph`. */
PathNode1 asPathNode1() { this = TPathNode1(result) }
/** Gets this as a projection on the second given `PathGraph`. */
PathNode2 asPathNode2() { this = TPathNode2(result) }
/** Gets a textual representation of this element. */
string toString() {
result = this.asPathNode1().toString() or
result = this.asPathNode2().toString()
}
/**
* Holds if this element is at the specified location.
* The location spans column `startcolumn` of line `startline` to
* column `endcolumn` of line `endline` in file `filepath`.
* For more information, see
* [Locations](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
*/
predicate hasLocationInfo(
string filepath, int startline, int startcolumn, int endline, int endcolumn
) {
this.asPathNode1().hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn) or
this.asPathNode2().hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
}
/** Gets the underlying `Node`. */
Node getNode() {
result = this.asPathNode1().getNode() or
result = this.asPathNode2().getNode()
}
}
/**
* Provides the query predicates needed to include a graph in a path-problem query.
*/
module PathGraph implements PathGraphSig<PathNode> {
/** Holds if `(a,b)` is an edge in the graph of data flow path explanations. */
query predicate edges(PathNode a, PathNode b) {
Graph1::edges(a.asPathNode1(), b.asPathNode1()) or
Graph2::edges(a.asPathNode2(), b.asPathNode2())
}
/** Holds if `n` is a node in the graph of data flow path explanations. */
query predicate nodes(PathNode n, string key, string val) {
Graph1::nodes(n.asPathNode1(), key, val) or
Graph2::nodes(n.asPathNode2(), key, val)
}
/**
* Holds if `(arg, par, ret, out)` forms a subpath-tuple, that is, flow through
* a subpath between `par` and `ret` with the connecting edges `arg -> par` and
* `ret -> out` is summarized as the edge `arg -> out`.
*/
query predicate subpaths(PathNode arg, PathNode par, PathNode ret, PathNode out) {
Graph1::subpaths(arg.asPathNode1(), par.asPathNode1(), ret.asPathNode1(), out.asPathNode1()) or
Graph2::subpaths(arg.asPathNode2(), par.asPathNode2(), ret.asPathNode2(), out.asPathNode2())
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,396 @@
/**
* DEPRECATED: Use `Make` and `MakeWithState` instead.
*
* Provides a `Configuration` class backwards-compatible interface to the data
* flow library.
*/
private import DataFlowImplCommon
private import DataFlowImplSpecific::Private
import DataFlowImplSpecific::Public
private import DataFlowImpl
import DataFlowImplCommonPublic
import FlowStateString
/**
* A configuration of interprocedural data flow analysis. This defines
* sources, sinks, and any other configurable aspect of the analysis. Each
* use of the global data flow library must define its own unique extension
* of this abstract class. To create a configuration, extend this class with
* a subclass whose characteristic predicate is a unique singleton string.
* For example, write
*
* ```ql
* class MyAnalysisConfiguration extends DataFlow::Configuration {
* MyAnalysisConfiguration() { this = "MyAnalysisConfiguration" }
* // Override `isSource` and `isSink`.
* // Optionally override `isBarrier`.
* // Optionally override `isAdditionalFlowStep`.
* }
* ```
* Conceptually, this defines a graph where the nodes are `DataFlow::Node`s and
* the edges are those data-flow steps that preserve the value of the node
* along with any additional edges defined by `isAdditionalFlowStep`.
* Specifying nodes in `isBarrier` will remove those nodes from the graph, and
* specifying nodes in `isBarrierIn` and/or `isBarrierOut` will remove in-going
* and/or out-going edges from those nodes, respectively.
*
* Then, to query whether there is flow between some `source` and `sink`,
* write
*
* ```ql
* exists(MyAnalysisConfiguration cfg | cfg.hasFlow(source, sink))
* ```
*
* Multiple configurations can coexist, but two classes extending
* `DataFlow::Configuration` should never depend on each other. One of them
* should instead depend on a `DataFlow2::Configuration`, a
* `DataFlow3::Configuration`, or a `DataFlow4::Configuration`.
*/
abstract class Configuration extends string {
bindingset[this]
Configuration() { any() }
/**
* Holds if `source` is a relevant data flow source.
*/
predicate isSource(Node source) { none() }
/**
* Holds if `source` is a relevant data flow source with the given initial
* `state`.
*/
predicate isSource(Node source, FlowState state) { none() }
/**
* Holds if `sink` is a relevant data flow sink.
*/
predicate isSink(Node sink) { none() }
/**
* Holds if `sink` is a relevant data flow sink accepting `state`.
*/
predicate isSink(Node sink, FlowState state) { none() }
/**
* Holds if data flow through `node` is prohibited. This completely removes
* `node` from the data flow graph.
*/
predicate isBarrier(Node node) { none() }
/**
* Holds if data flow through `node` is prohibited when the flow state is
* `state`.
*/
predicate isBarrier(Node node, FlowState state) { none() }
/** Holds if data flow into `node` is prohibited. */
predicate isBarrierIn(Node node) { none() }
/** Holds if data flow out of `node` is prohibited. */
predicate isBarrierOut(Node node) { none() }
/**
* DEPRECATED: Use `isBarrier` and `BarrierGuard` module instead.
*
* Holds if data flow through nodes guarded by `guard` is prohibited.
*/
deprecated predicate isBarrierGuard(BarrierGuard guard) { none() }
/**
* DEPRECATED: Use `isBarrier` and `BarrierGuard` module instead.
*
* Holds if data flow through nodes guarded by `guard` is prohibited when
* the flow state is `state`
*/
deprecated predicate isBarrierGuard(BarrierGuard guard, FlowState state) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
*/
predicate isAdditionalFlowStep(Node node1, Node node2) { none() }
/**
* Holds if data may flow from `node1` to `node2` in addition to the normal data-flow steps.
* This step is only applicable in `state1` and updates the flow state to `state2`.
*/
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2) {
none()
}
/**
* Holds if an arbitrary number of implicit read steps of content `c` may be
* taken at `node`.
*/
predicate allowImplicitRead(Node node, ContentSet c) { none() }
/**
* Gets the virtual dispatch branching limit when calculating field flow.
* This can be overridden to a smaller value to improve performance (a
* value of 0 disables field flow), or a larger value to get more results.
*/
int fieldFlowBranchLimit() { result = 2 }
/**
* Gets a data flow configuration feature to add restrictions to the set of
* valid flow paths.
*
* - `FeatureHasSourceCallContext`:
* Assume that sources have some existing call context to disallow
* conflicting return-flow directly following the source.
* - `FeatureHasSinkCallContext`:
* Assume that sinks have some existing call context to disallow
* conflicting argument-to-parameter flow directly preceding the sink.
* - `FeatureEqualSourceSinkCallContext`:
* Implies both of the above and additionally ensures that the entire flow
* path preserves the call context.
*
* These features are generally not relevant for typical end-to-end data flow
* queries, but should only be used for constructing paths that need to
* somehow be pluggable in another path context.
*/
FlowFeature getAFeature() { none() }
/** Holds if sources should be grouped in the result of `hasFlowPath`. */
predicate sourceGrouping(Node source, string sourceGroup) { none() }
/** Holds if sinks should be grouped in the result of `hasFlowPath`. */
predicate sinkGrouping(Node sink, string sinkGroup) { none() }
/**
* Holds if data may flow from `source` to `sink` for this configuration.
*/
predicate hasFlow(Node source, Node sink) { hasFlow(source, sink, this) }
/**
* Holds if data may flow from `source` to `sink` for this configuration.
*
* The corresponding paths are generated from the end-points and the graph
* included in the module `PathGraph`.
*/
predicate hasFlowPath(PathNode source, PathNode sink) { hasFlowPath(source, sink, this) }
/**
* Holds if data may flow from some source to `sink` for this configuration.
*/
predicate hasFlowTo(Node sink) { hasFlowTo(sink, this) }
/**
* Holds if data may flow from some source to `sink` for this configuration.
*/
predicate hasFlowToExpr(DataFlowExpr sink) { this.hasFlowTo(exprNode(sink)) }
/**
* DEPRECATED: Use `FlowExploration<explorationLimit>` instead.
*
* Gets the exploration limit for `hasPartialFlow` and `hasPartialFlowRev`
* measured in approximate number of interprocedural steps.
*/
deprecated int explorationLimit() { none() }
/**
* Holds if hidden nodes should be included in the data flow graph.
*
* This feature should only be used for debugging or when the data flow graph
* is not visualized (for example in a `path-problem` query).
*/
predicate includeHiddenNodes() { none() }
}
/**
* This class exists to prevent mutual recursion between the user-overridden
* member predicates of `Configuration` and the rest of the data-flow library.
* Good performance cannot be guaranteed in the presence of such recursion, so
* it should be replaced by using more than one copy of the data flow library.
*/
abstract private class ConfigurationRecursionPrevention extends Configuration {
bindingset[this]
ConfigurationRecursionPrevention() { any() }
override predicate hasFlow(Node source, Node sink) {
strictcount(Node n | this.isSource(n)) < 0
or
strictcount(Node n | this.isSource(n, _)) < 0
or
strictcount(Node n | this.isSink(n)) < 0
or
strictcount(Node n | this.isSink(n, _)) < 0
or
strictcount(Node n1, Node n2 | this.isAdditionalFlowStep(n1, n2)) < 0
or
strictcount(Node n1, Node n2 | this.isAdditionalFlowStep(n1, _, n2, _)) < 0
or
super.hasFlow(source, sink)
}
}
/** A bridge class to access the deprecated `isBarrierGuard`. */
private class BarrierGuardGuardedNodeBridge extends Unit {
abstract predicate guardedNode(Node n, Configuration config);
abstract predicate guardedNode(Node n, FlowState state, Configuration config);
}
private class BarrierGuardGuardedNode extends BarrierGuardGuardedNodeBridge {
deprecated override predicate guardedNode(Node n, Configuration config) {
exists(BarrierGuard g |
config.isBarrierGuard(g) and
n = g.getAGuardedNode()
)
}
deprecated override predicate guardedNode(Node n, FlowState state, Configuration config) {
exists(BarrierGuard g |
config.isBarrierGuard(g, state) and
n = g.getAGuardedNode()
)
}
}
private FlowState relevantState(Configuration config) {
config.isSource(_, result) or
config.isSink(_, result) or
config.isBarrier(_, result) or
config.isAdditionalFlowStep(_, result, _, _) or
config.isAdditionalFlowStep(_, _, _, result)
}
private newtype TConfigState =
TMkConfigState(Configuration config, FlowState state) {
state = relevantState(config) or state instanceof FlowStateEmpty
}
private Configuration getConfig(TConfigState state) { state = TMkConfigState(result, _) }
private FlowState getState(TConfigState state) { state = TMkConfigState(_, result) }
private predicate singleConfiguration() { 1 = strictcount(Configuration c) }
private module Config implements FullStateConfigSig {
class FlowState = TConfigState;
predicate isSource(Node source, FlowState state) {
getConfig(state).isSource(source, getState(state))
or
getConfig(state).isSource(source) and getState(state) instanceof FlowStateEmpty
}
predicate isSink(Node sink, FlowState state) {
getConfig(state).isSink(sink, getState(state))
or
getConfig(state).isSink(sink) and getState(state) instanceof FlowStateEmpty
}
predicate isBarrier(Node node) { none() }
predicate isBarrier(Node node, FlowState state) {
getConfig(state).isBarrier(node, getState(state)) or
getConfig(state).isBarrier(node) or
any(BarrierGuardGuardedNodeBridge b).guardedNode(node, getState(state), getConfig(state)) or
any(BarrierGuardGuardedNodeBridge b).guardedNode(node, getConfig(state))
}
predicate isBarrierIn(Node node) { any(Configuration config).isBarrierIn(node) }
predicate isBarrierOut(Node node) { any(Configuration config).isBarrierOut(node) }
predicate isAdditionalFlowStep(Node node1, Node node2) {
singleConfiguration() and
any(Configuration config).isAdditionalFlowStep(node1, node2)
}
predicate isAdditionalFlowStep(Node node1, FlowState state1, Node node2, FlowState state2) {
getConfig(state1).isAdditionalFlowStep(node1, getState(state1), node2, getState(state2)) and
getConfig(state2) = getConfig(state1)
or
not singleConfiguration() and
getConfig(state1).isAdditionalFlowStep(node1, node2) and
state2 = state1
}
predicate allowImplicitRead(Node node, ContentSet c) {
any(Configuration config).allowImplicitRead(node, c)
}
int fieldFlowBranchLimit() { result = min(any(Configuration config).fieldFlowBranchLimit()) }
FlowFeature getAFeature() { result = any(Configuration config).getAFeature() }
predicate sourceGrouping(Node source, string sourceGroup) {
any(Configuration config).sourceGrouping(source, sourceGroup)
}
predicate sinkGrouping(Node sink, string sinkGroup) {
any(Configuration config).sinkGrouping(sink, sinkGroup)
}
predicate includeHiddenNodes() { any(Configuration config).includeHiddenNodes() }
}
private import Impl<Config> as I
import I
/**
* A `Node` augmented with a call context (except for sinks), an access path, and a configuration.
* Only those `PathNode`s that are reachable from a source, and which can reach a sink, are generated.
*/
class PathNode instanceof I::PathNode {
/** Gets a textual representation of this element. */
final string toString() { result = super.toString() }
/**
* Gets a textual representation of this element, including a textual
* representation of the call context.
*/
final string toStringWithContext() { result = super.toStringWithContext() }
/**
* Holds if this element is at the specified location.
* The location spans column `startcolumn` of line `startline` to
* column `endcolumn` of line `endline` in file `filepath`.
* For more information, see
* [Locations](https://codeql.github.com/docs/writing-codeql-queries/providing-locations-in-codeql-queries/).
*/
final predicate hasLocationInfo(
string filepath, int startline, int startcolumn, int endline, int endcolumn
) {
super.hasLocationInfo(filepath, startline, startcolumn, endline, endcolumn)
}
/** Gets the underlying `Node`. */
final Node getNode() { result = super.getNode() }
/** Gets the `FlowState` of this node. */
final FlowState getState() { result = getState(super.getState()) }
/** Gets the associated configuration. */
final Configuration getConfiguration() { result = getConfig(super.getState()) }
/** Gets a successor of this node, if any. */
final PathNode getASuccessor() { result = super.getASuccessor() }
/** Holds if this node is a source. */
final predicate isSource() { super.isSource() }
/** Holds if this node is a grouping of source nodes. */
final predicate isSourceGroup(string group) { super.isSourceGroup(group) }
/** Holds if this node is a grouping of sink nodes. */
final predicate isSinkGroup(string group) { super.isSinkGroup(group) }
}
private predicate hasFlow(Node source, Node sink, Configuration config) {
exists(PathNode source0, PathNode sink0 |
hasFlowPath(source0, sink0, config) and
source0.getNode() = source and
sink0.getNode() = sink
)
}
private predicate hasFlowPath(PathNode source, PathNode sink, Configuration config) {
hasFlowPath(source, sink) and source.getConfiguration() = config
}
private predicate hasFlowTo(Node sink, Configuration config) { hasFlow(_, sink, config) }
predicate flowsTo = hasFlow/3;

File diff suppressed because it is too large Load Diff

View File

@@ -3,15 +3,18 @@ private import DataFlowImplSpecific::Public
import Cached
module DataFlowImplCommonPublic {
/** A state value to track during data flow. */
class FlowState = string;
/** Provides `FlowState = string`. */
module FlowStateString {
/** A state value to track during data flow. */
class FlowState = string;
/**
* The default state, which is used when the state is unspecified for a source
* or a sink.
*/
class FlowStateEmpty extends FlowState {
FlowStateEmpty() { this = "" }
/**
* The default state, which is used when the state is unspecified for a source
* or a sink.
*/
class FlowStateEmpty extends FlowState {
FlowStateEmpty() { this = "" }
}
}
private newtype TFlowFeature =

View File

@@ -1157,6 +1157,8 @@ predicate jumpStep(Node pred, Node succ) {
succ.asExpr().getExpr().(ConstantReadAccess).getValue() = pred.asExpr().getExpr()
or
FlowSummaryImpl::Private::Steps::summaryJumpStep(pred, succ)
or
any(AdditionalJumpStep s).step(pred, succ)
}
private ContentSet getKeywordContent(string name) {
@@ -1484,3 +1486,24 @@ ContentApprox getContentApprox(Content c) {
or
result = TNonElementContentApprox(c)
}
/**
* A unit class for adding additional jump steps.
*
* Extend this class to add additional jump steps.
*/
class AdditionalJumpStep extends Unit {
/**
* Holds if data can flow from `pred` to `succ` in a way that discards call contexts.
*/
abstract predicate step(Node pred, Node succ);
}
/**
* Gets an additional term that is added to the `join` and `branch` computations to reflect
* an additional forward or backwards branching factor that is not taken into account
* when calculating the (virtual) dispatch cost.
*
* Argument `arg` is part of a path from a source to a sink, and `p` is the target parameter.
*/
int getAdditionalFlowIntoCallNodeTerm(ArgumentNode arg, ParameterNode p) { none() }

View File

@@ -984,10 +984,109 @@ class ClassNode extends ModuleNode {
ClassNode() { this.isClass() }
}
/**
* A data flow node corresponding to a literal expression.
*/
class LiteralNode extends ExprNode {
private CfgNodes::ExprNodes::LiteralCfgNode literalCfgNode;
LiteralNode() { this.asExpr() = literalCfgNode }
/** Gets the underlying AST node as a `Literal`. */
Literal asLiteralAstNode() { result = literalCfgNode.getExpr() }
}
/**
* A data flow node corresponding to an operation expression.
*/
class OperationNode extends ExprNode {
private CfgNodes::ExprNodes::OperationCfgNode operationCfgNode;
OperationNode() { this.asExpr() = operationCfgNode }
/** Gets the underlying AST node as an `Operation`. */
Operation asOperationAstNode() { result = operationCfgNode.getExpr() }
/** Gets the operator of this operation. */
final string getOperator() { result = operationCfgNode.getOperator() }
/** Gets an operand of this operation. */
final Node getAnOperand() { result.asExpr() = operationCfgNode.getAnOperand() }
}
/**
* A data flow node corresponding to a control expression (e.g. `if`, `while`, `for`).
*/
class ControlExprNode extends ExprNode {
private CfgNodes::ExprNodes::ControlExprCfgNode controlExprCfgNode;
ControlExprNode() { this.asExpr() = controlExprCfgNode }
/** Gets the underlying AST node as a `ControlExpr`. */
ControlExpr asControlExprAstNode() { result = controlExprCfgNode.getExpr() }
}
/**
* A data flow node corresponding to a variable access expression.
*/
class VariableAccessNode extends ExprNode {
private CfgNodes::ExprNodes::VariableAccessCfgNode variableAccessCfgNode;
VariableAccessNode() { this.asExpr() = variableAccessCfgNode }
/** Gets the underlying AST node as a `VariableAccess`. */
VariableAccess asVariableAccessAstNode() { result = variableAccessCfgNode.getExpr() }
}
/**
* A data flow node corresponding to a constant access expression.
*/
class ConstantAccessNode extends ExprNode {
private CfgNodes::ExprNodes::ConstantAccessCfgNode constantAccessCfgNode;
ConstantAccessNode() { this.asExpr() = constantAccessCfgNode }
/** Gets the underlying AST node as a `ConstantAccess`. */
ConstantAccess asConstantAccessAstNode() { result = constantAccessCfgNode.getExpr() }
/** Gets the node corresponding to the scope expression. */
final Node getScopeNode() { result.asExpr() = constantAccessCfgNode.getScopeExpr() }
}
/**
* A data flow node corresponding to a LHS expression.
*/
class LhsExprNode extends ExprNode {
private CfgNodes::ExprNodes::LhsExprCfgNode lhsExprCfgNode;
LhsExprNode() { this.asExpr() = lhsExprCfgNode }
/** Gets the underlying AST node as a `LhsExpr`. */
LhsExpr asLhsExprAstNode() { result = lhsExprCfgNode.getExpr() }
/** Gets a variable used in (or introduced by) this LHS. */
Variable getAVariable() { result = lhsExprCfgNode.getAVariable() }
}
/**
* A data flow node corresponding to a statement sequence expression.
*/
class StmtSequenceNode extends ExprNode {
private CfgNodes::ExprNodes::StmtSequenceCfgNode stmtSequenceCfgNode;
StmtSequenceNode() { this.asExpr() = stmtSequenceCfgNode }
/** Gets the underlying AST node as a `StmtSequence`. */
StmtSequence asStmtSequenceAstNode() { result = stmtSequenceCfgNode.getExpr() }
/** Gets the last statement in this sequence, if any. */
final ExprNode getLastStmt() { result.asExpr() = stmtSequenceCfgNode.getLastStmt() }
}
/**
* A data flow node corresponding to a method, block, or lambda expression.
*/
class CallableNode extends ExprNode {
class CallableNode extends StmtSequenceNode {
private Callable callable;
CallableNode() { this.asExpr().getExpr() = callable }

View File

@@ -248,7 +248,9 @@ module Public {
/**
* Holds if all the summaries that apply to `this` are auto generated and not manually created.
*/
final predicate isAutoGenerated() { this.hasProvenance("generated") and not this.isManual() }
final predicate isAutoGenerated() {
this.hasProvenance(["generated", "ai-generated"]) and not this.isManual()
}
/**
* Holds if there exists a manual summary that applies to `this`.
@@ -268,7 +270,7 @@ module Public {
/**
* Holds if the neutral is auto generated.
*/
predicate isAutoGenerated() { neutralElement(this, "generated") }
predicate isAutoGenerated() { neutralElement(this, ["generated", "ai-generated"]) }
/**
* Holds if there exists a manual neutral that applies to `this`.
@@ -299,8 +301,8 @@ module Private {
TWithoutContentSummaryComponent(ContentSet c) or
TWithContentSummaryComponent(ContentSet c)
private TParameterSummaryComponent thisParam() {
result = TParameterSummaryComponent(instanceParameterPosition())
private TParameterSummaryComponent callbackSelfParam() {
result = TParameterSummaryComponent(callbackSelfParameterPosition())
}
newtype TSummaryComponentStack =
@@ -309,7 +311,7 @@ module Private {
any(RequiredSummaryComponentStack x).required(head, tail)
or
any(RequiredSummaryComponentStack x).required(TParameterSummaryComponent(_), tail) and
head = thisParam()
head = callbackSelfParam()
or
derivedFluentFlowPush(_, _, _, head, tail, _)
}
@@ -334,7 +336,7 @@ module Private {
callbackRef = s.drop(_) and
(isCallbackParameter(callbackRef) or callbackRef.head() = TReturnSummaryComponent(_)) and
input = callbackRef.tail() and
output = TConsSummaryComponentStack(thisParam(), input) and
output = TConsSummaryComponentStack(callbackSelfParam(), input) and
preservesValue = true
)
or
@@ -437,6 +439,9 @@ module Private {
out.head() = TParameterSummaryComponent(_) and
s = out.tail()
)
or
// Add the post-update node corresponding to the requested argument node
outputState(c, s) and isCallbackParameter(s)
}
private newtype TSummaryNodeState =
@@ -1010,7 +1015,7 @@ module Private {
private predicate relevantSummaryElementGenerated(
AccessPath inSpec, AccessPath outSpec, string kind
) {
summaryElement(this, inSpec, outSpec, kind, "generated") and
summaryElement(this, inSpec, outSpec, kind, ["generated", "ai-generated"]) and
not summaryElement(this, _, _, _, "manual")
}
@@ -1202,11 +1207,11 @@ module Private {
}
private string renderProvenance(SummarizedCallable c) {
if c.isAutoGenerated() then result = "generated" else result = "manual"
if c.isManual() then result = "manual" else c.hasProvenance(result)
}
private string renderProvenanceNeutral(NeutralCallable c) {
if c.isAutoGenerated() then result = "generated" else result = "manual"
if c.isManual() then result = "manual" else c.hasProvenance(result)
}
/**

View File

@@ -15,8 +15,8 @@ class SummarizedCallableBase = string;
DataFlowCallable inject(SummarizedCallable c) { result.asLibraryCallable() = c }
/** Gets the parameter position of the instance parameter. */
ArgumentPosition instanceParameterPosition() { none() } // disables implicit summary flow to `self` for callbacks
/** Gets the parameter position representing a callback itself, if any. */
ArgumentPosition callbackSelfParameterPosition() { none() } // disables implicit summary flow to `self` for callbacks
/** Gets the synthesized summary data-flow node for the given values. */
Node summaryNode(SummarizedCallable c, SummaryNodeState state) { result = TSummaryNode(c, state) }

View File

@@ -0,0 +1,64 @@
/**
* Provides classes for performing local (intra-procedural) and
* global (inter-procedural) taint-tracking analyses.
*/
import TaintTrackingParameter::Public
private import TaintTrackingParameter::Private
private module AddTaintDefaults<DataFlowInternal::FullStateConfigSig Config> implements
DataFlowInternal::FullStateConfigSig
{
import Config
predicate isBarrier(DataFlow::Node node) {
Config::isBarrier(node) or defaultTaintSanitizer(node)
}
predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) {
Config::isAdditionalFlowStep(node1, node2) or
defaultAdditionalTaintStep(node1, node2)
}
predicate allowImplicitRead(DataFlow::Node node, DataFlow::ContentSet c) {
Config::allowImplicitRead(node, c)
or
(
Config::isSink(node, _) or
Config::isAdditionalFlowStep(node, _) or
Config::isAdditionalFlowStep(node, _, _, _)
) and
defaultImplicitTaintRead(node, c)
}
}
/**
* Constructs a standard taint tracking computation.
*/
module Make<DataFlow::ConfigSig Config> implements DataFlow::DataFlowSig {
private module Config0 implements DataFlowInternal::FullStateConfigSig {
import DataFlowInternal::DefaultState<Config>
import Config
}
private module C implements DataFlowInternal::FullStateConfigSig {
import AddTaintDefaults<Config0>
}
import DataFlowInternal::Impl<C>
}
/**
* Constructs a taint tracking computation using flow state.
*/
module MakeWithState<DataFlow::StateConfigSig Config> implements DataFlow::DataFlowSig {
private module Config0 implements DataFlowInternal::FullStateConfigSig {
import Config
}
private module C implements DataFlowInternal::FullStateConfigSig {
import AddTaintDefaults<Config0>
}
import DataFlowInternal::Impl<C>
}

View File

@@ -2,5 +2,6 @@ import codeql.ruby.dataflow.internal.TaintTrackingPublic as Public
module Private {
import codeql.ruby.DataFlow::DataFlow as DataFlow
import codeql.ruby.dataflow.internal.DataFlowImpl as DataFlowInternal
import codeql.ruby.dataflow.internal.TaintTrackingPrivate
}

View File

@@ -0,0 +1,137 @@
/**
* Provides default sources, sinks and sanitizers for reasoning about
* zip slip vulnerabilities, as well as extension points for
* adding your own.
*/
private import codeql.ruby.AST
private import codeql.ruby.ApiGraphs
private import codeql.ruby.CFG
private import codeql.ruby.Concepts
private import codeql.ruby.DataFlow
private import codeql.ruby.dataflow.BarrierGuards
private import codeql.ruby.dataflow.RemoteFlowSources
/**
* Provides default sources, sinks and sanitizers for reasoning about
* zip slip vulnerabilities, as well as extension points for
* adding your own.
*/
module ZipSlip {
/**
* A data flow source for zip slip vulnerabilities.
*/
abstract class Source extends DataFlow::Node { }
/**
* A data flow sink for zip slip vulnerabilities.
*/
abstract class Sink extends DataFlow::Node { }
/**
* A sanitizer for zip slip vulnerabilities.
*/
abstract class Sanitizer extends DataFlow::Node { }
/**
* A file system access, considered as a flow sink.
*/
class FileSystemAccessAsSink extends Sink {
FileSystemAccessAsSink() { this = any(FileSystemAccess e).getAPathArgument() }
}
/**
* A call to `Zlib::GzipReader.open(path)`, considered a flow source.
*/
private class GzipReaderOpen extends Source {
GzipReaderOpen() {
(
this = API::getTopLevelMember("Zlib").getMember("GzipReader").getReturn("open").asSource()
or
this = API::getTopLevelMember("Zlib").getMember("GzipReader").getInstance().asSource()
) and
// If argument refers to a string object, then it's a hardcoded path and
// this file is safe.
not this.(DataFlow::CallNode)
.getArgument(0)
.getALocalSource()
.getConstantValue()
.isStringlikeValue(_)
}
}
/**
* A call to `Gem::Package::TarReader.new(file_stream)`, considered a flow source.
*/
private class TarReaderInstance extends Source {
TarReaderInstance() {
exists(API::MethodAccessNode newTarReader |
newTarReader =
API::getTopLevelMember("Gem").getMember("Package").getMember("TarReader").getMethod("new")
|
// Unlike in two other modules, there's no check for the constant path because TarReader class is called with an `io` object and not filepath.
// This, of course, can be modeled but probably in the internal IO.qll file
// For now, I'm leaving this prone to false-positives
not exists(newTarReader.getBlock()) and this = newTarReader.getReturn().asSource()
or
this = newTarReader.getBlock().getParameter(0).asSource()
)
}
}
/**
* A call to `Zip::File.open(path)`, considered a flow source.
*/
private class ZipFileOpen extends Source {
ZipFileOpen() {
exists(API::MethodAccessNode zipOpen |
zipOpen = API::getTopLevelMember("Zip").getMember("File").getMethod("open") and
// If argument refers to a string object, then it's a hardcoded path and
// this file is safe.
not zipOpen
.getCallNode()
.getArgument(0)
.getALocalSource()
.getConstantValue()
.isStringlikeValue(_)
|
// the case with variable assignment `zip_file = Zip::File.open(path)`
not exists(zipOpen.getBlock()) and this = zipOpen.getReturn().asSource()
or
// the case with direct block`Zip::File.open(path) do |zip_file|` case
this = zipOpen.getBlock().getParameter(0).asSource()
)
}
}
/**
* A comparison with a constant string, considered as a sanitizer-guard.
*/
private class StringConstCompareAsSanitizer extends Sanitizer, StringConstCompareBarrier { }
/**
* An inclusion check against an array of constant strings, considered as a
* sanitizer-guard.
*/
private class StringConstArrayInclusionCallAsSanitizer extends Sanitizer,
StringConstArrayInclusionCallBarrier
{ }
/**
* A sanitizer like `File.expand_path(path).start_with?` where `path` is a path of a single entry inside the archive.
* It is assumed that if `File.expand_path` is called, it is to verify the path is safe so there's no modeling of `start_with?` or other comparisons to avoid false-negatives.
*/
private class ExpandedPathStartsWithAsSanitizer extends Sanitizer {
ExpandedPathStartsWithAsSanitizer() {
exists(DataFlow::CallNode cn |
cn.getMethodName() = "expand_path" and
this = cn.getArgument(0)
)
}
}
/**
* Existing PathSanitization model created for regular path traversals
*/
private class PathSanitizationAsSanitizer extends Sanitizer instanceof Path::PathSanitization { }
}

View File

@@ -0,0 +1,38 @@
/**
* Provides a taint tracking configuration for reasoning about
* zip slip vulnerabilities.
*/
import ZipSlipCustomizations
private import codeql.ruby.Concepts
private import codeql.ruby.DataFlow
private import codeql.ruby.TaintTracking
private import codeql.ruby.ApiGraphs
/**
* A taint-tracking configuration for reasoning about zip slip
* vulnerabilities.
*/
class Configuration extends TaintTracking::Configuration {
Configuration() { this = "ZipSlip" }
override predicate isSource(DataFlow::Node source) { source instanceof ZipSlip::Source }
override predicate isSink(DataFlow::Node sink) { sink instanceof ZipSlip::Sink }
/**
* This should actually be
* `and cn = API::getTopLevelMember("Gem").getMember("Package").getMember("TarReader").getMember("Entry").getAMethodCall("full_name")` and similar for other classes
* but I couldn't make it work so there's only check for the method name called on the entry. It is `full_name` for `Gem::Package::TarReader::Entry` and `Zlib`
* and `name` for `Zip::File`
*/
override predicate isAdditionalTaintStep(DataFlow::Node nodeFrom, DataFlow::Node nodeTo) {
exists(DataFlow::CallNode cn |
cn.getReceiver() = nodeFrom and
cn.getMethodName() in ["full_name", "name"] and
cn = nodeTo
)
}
override predicate isSanitizer(DataFlow::Node node) { node instanceof ZipSlip::Sanitizer }
}

View File

@@ -224,7 +224,8 @@ private module Request {
}
abstract private class RequestInputAccess extends RequestMethodCall,
Http::Server::RequestInputAccess::Range {
Http::Server::RequestInputAccess::Range
{
override string getSourceType() { result = "ActionDispatch::Request#" + this.getMethodName() }
}
@@ -520,15 +521,15 @@ ActionControllerClass getAssociatedControllerClass(ErbFile f) {
* templates in `app/views/` and `app/views/layouts/`.
*/
predicate controllerTemplateFile(ActionControllerClass cls, ErbFile templateFile) {
exists(string templatesPath, string sourcePrefix, string subPath, string controllerPath |
exists(string sourcePrefix, string subPath, string controllerPath |
controllerPath = cls.getLocation().getFile().getRelativePath() and
templatesPath = templateFile.getParentContainer().getRelativePath() and
// `sourcePrefix` is either a prefix path ending in a slash, or empty if
// the rails app is at the source root
sourcePrefix = [controllerPath.regexpCapture("^(.*/)app/controllers/(?:.*?)/(?:[^/]*)$", 1), ""] and
controllerPath = sourcePrefix + "app/controllers/" + subPath + "_controller.rb" and
(
templatesPath = sourcePrefix + "app/views/" + subPath or
sourcePrefix + "app/views/" + subPath = templateFile.getParentContainer().getRelativePath()
or
templateFile.getRelativePath().matches(sourcePrefix + "app/views/layouts/" + subPath + "%")
)
)
@@ -556,7 +557,8 @@ class ActionControllerSkipForgeryProtectionCall extends CsrfProtectionSetting::R
* A call to `protect_from_forgery`.
*/
private class ActionControllerProtectFromForgeryCall extends CsrfProtectionSetting::Range,
DataFlow::CallNode {
DataFlow::CallNode
{
ActionControllerProtectFromForgeryCall() {
this = actionControllerInstance().getAMethodCall("protect_from_forgery")
}
@@ -576,7 +578,8 @@ private class ActionControllerProtectFromForgeryCall extends CsrfProtectionSetti
* A call to `send_file`, which sends the file at the given path to the client.
*/
private class SendFile extends FileSystemAccess::Range, Http::Server::HttpResponse::Range,
DataFlow::CallNode {
DataFlow::CallNode
{
SendFile() {
this = [actionControllerInstance(), Response::response()].getAMethodCall("send_file")
}

View File

@@ -93,7 +93,8 @@ private class ActionViewCookiesCall extends ActionViewContextCall, CookiesCallIm
* A call to `render`, `render_to_body` or `render_to_string`, seen as an
* `HttpResponse`.
*/
private class RenderCallAsHttpResponse extends DataFlow::CallNode, Http::Server::HttpResponse::Range {
private class RenderCallAsHttpResponse extends DataFlow::CallNode, Http::Server::HttpResponse::Range
{
RenderCallAsHttpResponse() {
this.asExpr().getExpr() instanceof Rails::RenderCall or
this.asExpr().getExpr() instanceof Rails::RenderToCall

View File

@@ -219,7 +219,8 @@ class ActiveRecordSqlExecutionRange extends SqlExecution::Range {
* A node that may evaluate to one or more `ActiveRecordModelClass` instances.
*/
abstract class ActiveRecordModelInstantiation extends OrmInstantiation::Range,
DataFlow::LocalSourceNode {
DataFlow::LocalSourceNode
{
/**
* Gets the `ActiveRecordModelClass` that this instance belongs to.
*/
@@ -272,7 +273,8 @@ private Expr getUltimateReceiver(MethodCall call) {
}
// A call to `find`, `where`, etc. that may return active record model object(s)
private class ActiveRecordModelFinderCall extends ActiveRecordModelInstantiation, DataFlow::CallNode {
private class ActiveRecordModelFinderCall extends ActiveRecordModelInstantiation, DataFlow::CallNode
{
private ActiveRecordModelClass cls;
ActiveRecordModelFinderCall() {
@@ -305,7 +307,8 @@ private class ActiveRecordModelFinderCall extends ActiveRecordModelInstantiation
// A `self` reference that may resolve to an active record model object
private class ActiveRecordModelClassSelfReference extends ActiveRecordModelInstantiation,
SsaSelfDefinitionNode {
SsaSelfDefinitionNode
{
private ActiveRecordModelClass cls;
ActiveRecordModelClassSelfReference() {
@@ -465,7 +468,8 @@ private module Persistence {
/** A call to e.g. `user.update(name: "foo")` */
private class UpdateLikeInstanceMethodCall extends PersistentWriteAccess::Range,
ActiveRecordInstanceMethodCall {
ActiveRecordInstanceMethodCall
{
UpdateLikeInstanceMethodCall() {
this.getMethodName() = ["update", "update!", "update_attributes", "update_attributes!"]
}
@@ -485,7 +489,8 @@ private module Persistence {
/** A call to e.g. `user.update_attribute(name, "foo")` */
private class UpdateAttributeCall extends PersistentWriteAccess::Range,
ActiveRecordInstanceMethodCall {
ActiveRecordInstanceMethodCall
{
UpdateAttributeCall() { this.getMethodName() = "update_attribute" }
override DataFlow::Node getValue() {
@@ -688,7 +693,8 @@ private class ActiveRecordCollectionProxyMethodCall extends DataFlow::CallNode {
/**
* A call to an association method which yields ActiveRecord instances.
*/
private class ActiveRecordAssociationModelInstantiation extends ActiveRecordModelInstantiation instanceof ActiveRecordAssociationMethodCall {
private class ActiveRecordAssociationModelInstantiation extends ActiveRecordModelInstantiation instanceof ActiveRecordAssociationMethodCall
{
override ActiveRecordModelClass getClass() {
result = this.(ActiveRecordAssociationMethodCall).getAssociation().getTargetClass()
}
@@ -697,7 +703,8 @@ private class ActiveRecordAssociationModelInstantiation extends ActiveRecordMode
/**
* A call to a method on a collection proxy which yields ActiveRecord instances.
*/
private class ActiveRecordCollectionProxyModelInstantiation extends ActiveRecordModelInstantiation instanceof ActiveRecordCollectionProxyMethodCall {
private class ActiveRecordCollectionProxyModelInstantiation extends ActiveRecordModelInstantiation instanceof ActiveRecordCollectionProxyMethodCall
{
override ActiveRecordModelClass getClass() {
result = this.(ActiveRecordCollectionProxyMethodCall).getAssociation().getTargetClass()
}

View File

@@ -215,7 +215,8 @@ module ActiveResource {
}
private class ModelClassMethodCallAsHttpRequest extends Http::Client::Request::Range,
ModelClassMethodCall {
ModelClassMethodCall
{
ModelClass cls;
ModelClassMethodCallAsHttpRequest() {
@@ -239,7 +240,8 @@ module ActiveResource {
}
private class ModelInstanceMethodCallAsHttpRequest extends Http::Client::Request::Range,
ModelInstanceMethodCall {
ModelInstanceMethodCall
{
ModelClass cls;
ModelInstanceMethodCallAsHttpRequest() {

View File

@@ -166,7 +166,8 @@ module ActiveStorage {
* A call on an ActiveStorage object that results in an image transformation.
* Arguments to these calls may be executed as system commands.
*/
private class ImageProcessingCall extends SystemCommandExecution::Range instanceof DataFlow::CallNode {
private class ImageProcessingCall extends SystemCommandExecution::Range instanceof DataFlow::CallNode
{
ImageProcessingCall() {
this.getReceiver() instanceof BlobInstance and
this.getMethodName() = ["variant", "preview", "representation"]

View File

@@ -81,7 +81,8 @@ module File {
}
private class FileModulePermissionModification extends FileSystemPermissionModification::Range,
DataFlow::CallNode {
DataFlow::CallNode
{
private DataFlow::Node permissionArg;
FileModulePermissionModification() {
@@ -164,7 +165,8 @@ module FileUtils {
}
private class FileUtilsPermissionModification extends FileSystemPermissionModification::Range,
DataFlow::CallNode {
DataFlow::CallNode
{
private DataFlow::Node permissionArg;
FileUtilsPermissionModification() {

View File

@@ -29,16 +29,13 @@ private module RenderCallUtils {
result = getTemplatePathValue(renderCall).regexpCapture("^/?(.*/)?(?:[^/]*?)$", 1)
}
// everything after the final slash, or the whole string if there is no slash
private string getBaseName(MethodCall renderCall) {
result = getTemplatePathValue(renderCall).regexpCapture("^/?(?:.*/)?([^/]*?)$", 1)
}
/**
* Gets the template file to be rendered by this render call, if any.
*/
ErbFile getTemplateFile(MethodCall renderCall) {
result.getTemplateName() = getBaseName(renderCall) and
// everything after the final slash, or the whole string if there is no slash
result.getTemplateName() =
getTemplatePathValue(renderCall).regexpCapture("^/?(?:.*/)?([^/]*?)$", 1) and
result.getRelativePath().matches("%app/views/" + getSubPath(renderCall) + "%")
}
@@ -230,7 +227,8 @@ private module Settings {
* production code.
*/
private class AllowForgeryProtectionSetting extends Settings::BooleanSetting,
CsrfProtectionSetting::Range {
CsrfProtectionSetting::Range
{
AllowForgeryProtectionSetting() {
this = Config::actionController().getAMethodCall("allow_forgery_protection=")
}
@@ -244,7 +242,8 @@ private class AllowForgeryProtectionSetting extends Settings::BooleanSetting,
* https://ruby-doc.org/stdlib-2.7.1/libdoc/openssl/rdoc/OpenSSL/Cipher.html
*/
private class EncryptedCookieCipherSetting extends Settings::StringlikeSetting,
CookieSecurityConfigurationSetting::Range {
CookieSecurityConfigurationSetting::Range
{
EncryptedCookieCipherSetting() {
this = Config::actionDispatch().getAMethodCall("encrypted_cookie_cipher=")
}
@@ -264,7 +263,8 @@ private class EncryptedCookieCipherSetting extends Settings::StringlikeSetting,
* than the older AES-256-CBC cipher. Defaults to true.
*/
private class UseAuthenticatedCookieEncryptionSetting extends Settings::BooleanSetting,
CookieSecurityConfigurationSetting::Range {
CookieSecurityConfigurationSetting::Range
{
UseAuthenticatedCookieEncryptionSetting() {
this = Config::actionDispatch().getAMethodCall("use_authenticated_cookie_encryption=")
}
@@ -286,7 +286,8 @@ private class UseAuthenticatedCookieEncryptionSetting extends Settings::BooleanS
* https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie/SameSite#strict
*/
private class CookiesSameSiteProtectionSetting extends Settings::NillableStringlikeSetting,
CookieSecurityConfigurationSetting::Range {
CookieSecurityConfigurationSetting::Range
{
CookiesSameSiteProtectionSetting() {
this = Config::actionDispatch().getAMethodCall("cookies_same_site_protection=")
}

View File

@@ -27,7 +27,8 @@ module Railties {
* A call to `Rails::Generators::Actions#execute_command`.
* This method concatenates its first and second arguments and executes the result as a shell command.
*/
private class ExecuteCommandCall extends SystemCommandExecution::Range instanceof DataFlow::CallNode {
private class ExecuteCommandCall extends SystemCommandExecution::Range instanceof DataFlow::CallNode
{
ExecuteCommandCall() {
this = generatorsActionsClass().getAnInstanceSelf().getAMethodCall("execute_command")
}
@@ -40,7 +41,8 @@ module Railties {
/**
* A call to a method in `Rails::Generators::Actions` which delegates to `execute_command`.
*/
private class ExecuteCommandWrapperCall extends SystemCommandExecution::Range instanceof DataFlow::CallNode {
private class ExecuteCommandWrapperCall extends SystemCommandExecution::Range instanceof DataFlow::CallNode
{
ExecuteCommandWrapperCall() {
this =
generatorsActionsClass()

View File

@@ -73,7 +73,8 @@ module Twirp {
/** A parameter that will receive parts of the url when handling an incoming request. */
class UnmarshaledParameter extends Http::Server::RequestInputAccess::Range,
DataFlow::ParameterNode {
DataFlow::ParameterNode
{
UnmarshaledParameter() {
exists(ServiceInstantiation i | i.getAHandlerMethod().getParameter(0) = this.asParameter())
}

View File

@@ -9,6 +9,7 @@ private import codeql.ruby.controlflow.CfgNodes::ExprNodes
private import codeql.ruby.DataFlow
private import codeql.ruby.dataflow.internal.DataFlowPrivate as DataFlowPrivate
private import codeql.ruby.ast.internal.Constant
private import codeql.ruby.ast.internal.Module
/**
* Provides modeling for ActionController filters.
@@ -34,6 +35,17 @@ module Filters {
}
}
bindingset[call]
pragma[inline_late]
private ActionControllerActionMethod getADescendentAction(MethodCallCfgNode call) {
result = call.getExpr().getEnclosingModule().getAMethod()
or
exists(ModuleBase m |
m.getModule() = call.getExpr().getEnclosingModule().getModule().getAnImmediateDescendent+() and
result = m.getAMethod()
)
}
/**
* A call to a class method that adds or removes a filter from the callback chain.
* This class exists to encapsulate common behavior between calls that
@@ -64,14 +76,7 @@ module Filters {
not exists(this.getOnlyArgument()) and
forall(string except | except = this.getExceptArgument() | result.getName() != except)
) and
(
result = this.getExpr().getEnclosingModule().getAMethod()
or
exists(ModuleBase m |
m.getModule() = this.getExpr().getEnclosingModule().getModule().getADescendent() and
result = m.getAMethod()
)
)
result = getADescendentAction(this)
}
private string getOnlyArgument() {
@@ -104,8 +109,12 @@ module Filters {
StringlikeLiteralCfgNode getFilterArgument() { result = this.getPositionalArgument(_) }
string getFilterArgumentName() {
result = this.getFilterArgument().getConstantValue().getStringlikeValue()
}
/**
* Gets the callable that implements the filter with name `name`.
* Gets the callable that implements a filter registered by this call.
* This currently only finds methods in the local class or superclass.
* It doesn't handle:
* - lambdas
@@ -122,10 +131,9 @@ module Filters {
* end
* ```
*/
Callable getFilterCallable(string name) {
result.(MethodBase).getName() = name and
result.getEnclosingModule().getModule() =
this.getExpr().getEnclosingModule().getModule().getAnAncestor()
Callable getAFilterCallable() {
result =
lookupMethod(this.getExpr().getEnclosingModule().getModule(), this.getFilterArgumentName())
}
}
@@ -321,7 +329,9 @@ module Filters {
string getFilterName() { result = this.getConstantValue().getStringlikeValue() }
Callable getFilterCallable() { result = call.getFilterCallable(this.getFilterName()) }
Callable getFilterCallable() {
result = call.getAFilterCallable() and result.(MethodBase).getName() = this.getFilterName()
}
ActionControllerActionMethod getAnAction() { result = call.getAnAction() }
}
@@ -387,4 +397,62 @@ module Filters {
* `pred` and `succ` may be methods bound to callbacks or controller actions.
*/
predicate next(Method pred, Method succ) { next(_, pred, succ) }
/**
* Holds if `n` is a post-update node for `self` in method `m`.
*/
private predicate selfPostUpdate(DataFlow::PostUpdateNode n, Method m) {
n.getPreUpdateNode().asExpr().getExpr() =
any(SelfVariableAccess self |
pragma[only_bind_into](m) = self.getEnclosingCallable() and
self.getVariable().getDeclaringScope() = m
)
}
/**
* Holds if `n` is the self parameter of method `m`.
*/
private predicate selfParameter(DataFlowPrivate::SelfParameterNode n, Method m) {
m = n.getMethod()
}
/**
* A class defining additional jump steps arising from filters.
*/
class FilterJumpStep extends DataFlowPrivate::AdditionalJumpStep {
/**
* Holds if data can flow from `pred` to `succ` via a callback chain.
* `pred` is the post-update node of the self parameter in a method, and
* `succ` is the self parameter of a subsequent method that is executed as
* part of the callback chain.
*/
override predicate step(DataFlow::Node pred, DataFlow::Node succ) {
exists(Method predMethod, Method succMethod | next(predMethod, succMethod) |
// Flow from a post-update node of self in `pred` to the self parameter of `succ`
//
// def a
// foo() ---------+
// @x = 1 ---+ |
// end | |
// | |
// def b <----+----+
// ...
//
selfPostUpdate(pred, predMethod) and
selfParameter(succ, succMethod)
or
// Flow from the self parameter of `pred` to the self parameter of `succ`
//
// def a ---+
// ... |
// end |
// |
// def b <-+
// ...
//
selfParameter(pred, predMethod) and
selfParameter(succ, succMethod)
)
}
}
}

View File

@@ -15,7 +15,8 @@ private API::Node digest(Cryptography::HashingAlgorithm algo) {
}
/** A call that hashes some input using a hashing algorithm from the `Digest` module. */
private class DigestCall extends Cryptography::CryptographicOperation::Range instanceof DataFlow::CallNode {
private class DigestCall extends Cryptography::CryptographicOperation::Range instanceof DataFlow::CallNode
{
Cryptography::HashingAlgorithm algo;
DigestCall() {

View File

@@ -114,7 +114,8 @@ module String {
}
abstract private class SimpleSummarizedCallable extends SummarizedCallable,
FlowSummary::SimpleSummarizedCallable {
FlowSummary::SimpleSummarizedCallable
{
bindingset[this]
SimpleSummarizedCallable() { any() }
}

View File

@@ -116,7 +116,8 @@ class ExconHttpRequest extends Http::Client::Request::Range, DataFlow::CallNode
}
/** A configuration to track values that can disable certificate validation for Excon. */
private class ExconDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration {
private class ExconDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration
{
ExconDisablesCertificateValidationConfiguration() {
this = "ExconDisablesCertificateValidationConfiguration"
}

View File

@@ -87,7 +87,8 @@ class FaradayHttpRequest extends Http::Client::Request::Range, DataFlow::CallNod
}
/** A configuration to track values that can disable certificate validation for Faraday. */
private class FaradayDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration {
private class FaradayDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration
{
FaradayDisablesCertificateValidationConfiguration() {
this = "FaradayDisablesCertificateValidationConfiguration"
}

View File

@@ -74,7 +74,8 @@ class HttpClientRequest extends Http::Client::Request::Range, DataFlow::CallNode
}
/** A configuration to track values that can disable certificate validation for HttpClient. */
private class HttpClientDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration {
private class HttpClientDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration
{
HttpClientDisablesCertificateValidationConfiguration() {
this = "HttpClientDisablesCertificateValidationConfiguration"
}

View File

@@ -66,7 +66,8 @@ class HttpartyRequest extends Http::Client::Request::Range, DataFlow::CallNode {
}
/** A configuration to track values that can disable certificate validation for Httparty. */
private class HttpartyDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration {
private class HttpartyDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration
{
HttpartyDisablesCertificateValidationConfiguration() {
this = "HttpartyDisablesCertificateValidationConfiguration"
}

View File

@@ -93,7 +93,8 @@ class NetHttpRequest extends Http::Client::Request::Range, DataFlow::CallNode {
}
/** A configuration to track values that can disable certificate validation for NetHttp. */
private class NetHttpDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration {
private class NetHttpDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration
{
NetHttpDisablesCertificateValidationConfiguration() {
this = "NetHttpDisablesCertificateValidationConfiguration"
}

View File

@@ -62,7 +62,8 @@ class OpenUriRequest extends Http::Client::Request::Range, DataFlow::CallNode {
* Kernel.open("http://example.com").read
* ```
*/
class OpenUriKernelOpenRequest extends Http::Client::Request::Range, DataFlow::CallNode instanceof KernelMethodCall {
class OpenUriKernelOpenRequest extends Http::Client::Request::Range, DataFlow::CallNode instanceof KernelMethodCall
{
OpenUriKernelOpenRequest() { this.getMethodName() = "open" }
override DataFlow::Node getAUrlPart() { result = this.getArgument(0) }
@@ -102,7 +103,8 @@ class OpenUriKernelOpenRequest extends Http::Client::Request::Range, DataFlow::C
}
/** A configuration to track values that can disable certificate validation for OpenURI. */
private class OpenUriDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration {
private class OpenUriDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration
{
OpenUriDisablesCertificateValidationConfiguration() {
this = "OpenUriDisablesCertificateValidationConfiguration"
}

View File

@@ -67,7 +67,8 @@ class RestClientHttpRequest extends Http::Client::Request::Range, DataFlow::Call
}
/** A configuration to track values that can disable certificate validation for RestClient. */
private class RestClientDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration {
private class RestClientDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration
{
RestClientDisablesCertificateValidationConfiguration() {
this = "RestClientDisablesCertificateValidationConfiguration"
}

View File

@@ -47,7 +47,8 @@ class TyphoeusHttpRequest extends Http::Client::Request::Range, DataFlow::CallNo
}
/** A configuration to track values that can disable certificate validation for Typhoeus. */
private class TyphoeusDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration {
private class TyphoeusDisablesCertificateValidationConfiguration extends DataFlowImplForHttpClientLibraries::Configuration
{
TyphoeusDisablesCertificateValidationConfiguration() {
this = "TyphoeusDisablesCertificateValidationConfiguration"
}

View File

@@ -100,7 +100,8 @@ module Pathname {
}
private class PathnamePermissionModification extends FileSystemPermissionModification::Range,
PathnameCall {
PathnameCall
{
private DataFlow::Node permissionArg;
PathnamePermissionModification() {

View File

@@ -557,7 +557,8 @@ private class CipherNode extends DataFlow::Node {
/** An operation using the OpenSSL library that uses a cipher. */
private class CipherOperation extends Cryptography::CryptographicOperation::Range,
DataFlow::CallNode {
DataFlow::CallNode
{
private CipherNode cipherNode;
CipherOperation() {
@@ -587,7 +588,8 @@ private module Digest {
private import codeql.ruby.ApiGraphs
/** A call that hashes some input using a hashing algorithm from the `OpenSSL::Digest` module. */
private class DigestCall extends Cryptography::CryptographicOperation::Range instanceof DataFlow::CallNode {
private class DigestCall extends Cryptography::CryptographicOperation::Range instanceof DataFlow::CallNode
{
Cryptography::HashingAlgorithm algo;
DigestCall() {
@@ -612,7 +614,8 @@ private module Digest {
}
/** A call to `OpenSSL::Digest.digest` that hashes input directly without constructing a digest instance. */
private class DigestCallDirect extends Cryptography::CryptographicOperation::Range instanceof DataFlow::CallNode {
private class DigestCallDirect extends Cryptography::CryptographicOperation::Range instanceof DataFlow::CallNode
{
Cryptography::HashingAlgorithm algo;
DigestCallDirect() {

View File

@@ -57,5 +57,6 @@ module PathInjection {
* sanitizer-guard.
*/
class StringConstArrayInclusionCallAsSanitizer extends Sanitizer,
StringConstArrayInclusionCallBarrier { }
StringConstArrayInclusionCallBarrier
{ }
}

View File

@@ -51,5 +51,6 @@ module SqlInjection {
* sanitizer-guard.
*/
class StringConstArrayInclusionCallAsSanitizer extends Sanitizer,
StringConstArrayInclusionCallBarrier { }
StringConstArrayInclusionCallBarrier
{ }
}

View File

@@ -16,9 +16,11 @@ module StoredXss {
import XSS::StoredXss
/**
* DEPRECATED.
*
* A taint-tracking configuration for reasoning about Stored XSS.
*/
class Configuration extends TaintTracking::Configuration {
deprecated class Configuration extends TaintTracking::Configuration {
Configuration() { this = "StoredXss" }
override predicate isSource(DataFlow::Node source) { source instanceof Source }
@@ -38,6 +40,23 @@ module StoredXss {
isAdditionalXssTaintStep(node1, node2)
}
}
/**
* A taint-tracking configuration for reasoning about Stored XSS.
*/
private module Config implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { source instanceof Source }
predicate isSink(DataFlow::Node sink) { sink instanceof Sink }
predicate isBarrier(DataFlow::Node node) { node instanceof Sanitizer }
predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) {
isAdditionalXssTaintStep(node1, node2)
}
}
import TaintTracking::Make<Config>
}
/** DEPRECATED: Alias for StoredXss */

View File

@@ -4,7 +4,6 @@
private import codeql.ruby.AST
private import codeql.ruby.DataFlow
private import codeql.ruby.DataFlow2
private import codeql.ruby.CFG
private import codeql.ruby.Concepts
private import codeql.ruby.Frameworks
@@ -90,7 +89,8 @@ private module Shared {
* tag.
*/
class ArgumentInterpretedAsUrlAsSink extends Sink, ErbOutputMethodCallArgumentNode,
ActionView::ArgumentInterpretedAsUrl { }
ActionView::ArgumentInterpretedAsUrl
{ }
/**
* A argument to a call to the `link_to` method, which does not expect
@@ -129,13 +129,15 @@ private module Shared {
* An inclusion check against an array of constant strings, considered as a sanitizer-guard.
*/
class StringConstArrayInclusionCallAsSanitizer extends Sanitizer,
StringConstArrayInclusionCallBarrier { }
StringConstArrayInclusionCallBarrier
{ }
/**
* A `VariableWriteAccessCfgNode` that is not succeeded (locally) by another
* write to that variable.
*/
private class FinalInstanceVarWrite extends CfgNodes::ExprNodes::InstanceVariableWriteAccessCfgNode {
private class FinalInstanceVarWrite extends CfgNodes::ExprNodes::InstanceVariableWriteAccessCfgNode
{
private InstanceVariable var;
FinalInstanceVarWrite() {
@@ -291,20 +293,18 @@ private module OrmTracking {
/**
* A data flow configuration to track flow from finder calls to field accesses.
*/
class Configuration extends DataFlow2::Configuration {
Configuration() { this = "OrmTracking" }
override predicate isSource(DataFlow2::Node source) { source instanceof OrmInstantiation }
private module Config implements DataFlow::ConfigSig {
predicate isSource(DataFlow::Node source) { source instanceof OrmInstantiation }
// Select any call receiver and narrow down later
override predicate isSink(DataFlow2::Node sink) {
sink = any(DataFlow2::CallNode c).getReceiver()
}
predicate isSink(DataFlow::Node sink) { sink = any(DataFlow::CallNode c).getReceiver() }
override predicate isAdditionalFlowStep(DataFlow2::Node node1, DataFlow2::Node node2) {
predicate isAdditionalFlowStep(DataFlow::Node node1, DataFlow::Node node2) {
Shared::isAdditionalXssFlowStep(node1, node2)
}
}
import DataFlow::Make<Config>
}
/** Provides default sources, sinks and sanitizers for detecting stored cross-site scripting (XSS) vulnerabilities. */
@@ -333,10 +333,10 @@ module StoredXss {
/** DEPRECATED: Alias for isAdditionalXssTaintStep */
deprecated predicate isAdditionalXSSTaintStep = isAdditionalXssTaintStep/2;
private class OrmFieldAsSource extends Source instanceof DataFlow2::CallNode {
private class OrmFieldAsSource extends Source instanceof DataFlow::CallNode {
OrmFieldAsSource() {
exists(OrmTracking::Configuration subConfig, DataFlow2::CallNode subSrc |
subConfig.hasFlow(subSrc, this.getReceiver()) and
exists(DataFlow::CallNode subSrc |
OrmTracking::hasFlow(subSrc, this.getReceiver()) and
subSrc.(OrmInstantiation).methodCallMayAccessField(this.getMethodName())
)
}

View File

@@ -73,7 +73,8 @@ module RegExpInjection {
* sanitizer-guard.
*/
class StringConstArrayInclusionCallAsSanitizer extends Sanitizer,
StringConstArrayInclusionCallBarrier { }
StringConstArrayInclusionCallBarrier
{ }
/**
* A call to `Regexp.escape` (or its alias, `Regexp.quote`), considered as a

View File

@@ -1,5 +1,5 @@
name: codeql/ruby-all
version: 0.5.4-dev
version: 0.5.5-dev
groups: ruby
extractor: ruby
dbscheme: ruby.dbscheme

View File

@@ -1,3 +1,7 @@
## 0.5.4
No user-facing changes.
## 0.5.3
### New Queries

View File

@@ -0,0 +1,4 @@
---
category: newQuery
---
* Added a new query, `rb/zip-slip`, to detect arbitrary file writes during extraction of zip/tar archives.

View File

@@ -0,0 +1,3 @@
## 0.5.4
No user-facing changes.

View File

@@ -1,2 +1,2 @@
---
lastReleaseVersion: 0.5.3
lastReleaseVersion: 0.5.4

View File

@@ -0,0 +1,79 @@
<!DOCTYPE qhelp PUBLIC
"-//Semmle//qhelp//EN"
"qhelp.dtd">
<qhelp>
<overview>
<p>Extracting files from a malicious tar archive without validating that the destination file path
is within the destination directory can cause files outside the destination directory to be
overwritten, due to the possible presence of directory traversal elements (<code>..</code>) in
archive paths.</p>
<p>Tar archives contain archive entries representing each file in the archive. These entries
include a file path for the entry, but these file paths are not restricted and may contain
unexpected special elements such as the directory traversal element (<code>..</code>). If these
file paths are used to determine an output file to write the contents of the archive item to, then
the file may be written to an unexpected location. This can result in sensitive information being
revealed or deleted, or an attacker being able to influence behavior by modifying unexpected
files.</p>
<p>For example, if a tar archive contains a file entry <code>..\sneaky-file</code>, and the tar archive
is extracted to the directory <code>c:\output</code>, then naively combining the paths would result
in an output file path of <code>c:\output\..\sneaky-file</code>, which would cause the file to be
written to <code>c:\sneaky-file</code>.</p>
</overview>
<recommendation>
<p>Ensure that output paths constructed from tar archive entries are validated
to prevent writing files to unexpected locations.</p>
<p>The recommended way of writing an output file from a tar archive entry is to check that
<code>".."</code> does not occur in the path.
</p>
</recommendation>
<example>
<p>
In this example an archive is extracted without validating file paths.
If <code>archive.tar</code> contained relative paths (for
instance, if it were created by something like <code>tar -cf archive.tar
../file.txt</code>) then executing this code could write to locations
outside the destination directory.
</p>
<sample src="examples/zip_slip_bad.rb" />
<p>To fix this vulnerability, we need to check that the path does not
contain any <code>".."</code> elements in it.
</p>
<sample src="examples/zip_slip_good.rb" />
</example>
<references>
<li>
Snyk:
<a href="https://snyk.io/research/zip-slip-vulnerability">Zip Slip Vulnerability</a>.
</li>
<li>
OWASP:
<a href="https://owasp.org/www-community/attacks/Path_Traversal">Path Traversal</a>.
</li>
<li>
class
<a href="https://docs.ruby-lang.org/en/2.4.0/Gem/Package/TarReader.html">Gem::Package::TarReader</a>.
</li>
<li>
class
<a href="https://ruby-doc.org/stdlib-2.4.0/libdoc/zlib/rdoc/Zlib/GzipReader.html">Zlib::GzipReader</a>.
</li>
<li>
class
<a href="https://www.rubydoc.info/github/rubyzip/rubyzip/Zip/File">Zip::File</a>.
</li>
</references>
</qhelp>

View File

@@ -0,0 +1,22 @@
/**
* @name Arbitrary file write during zipfile/tarfile extraction
* @description Extracting files from a malicious tar archive without validating that the
* destination file path is within the destination directory can cause files outside
* the destination directory to be overwritten.
* @kind path-problem
* @id rb/zip-slip
* @problem.severity error
* @security-severity 7.5
* @precision medium
* @tags security
* external/cwe/cwe-022
*/
import ruby
import codeql.ruby.experimental.ZipSlipQuery
import DataFlow::PathGraph
from Configuration cfg, DataFlow::PathNode source, DataFlow::PathNode sink
where cfg.hasFlowPath(source, sink)
select sink.getNode(), source, sink, "This file extraction depends on a $@.", source.getNode(),
"potentially untrusted source"

View File

@@ -0,0 +1,21 @@
class FilesController < ActionController::Base
def zipFileUnsafe
path = params[:path]
Zip::File.open(path).each do |entry|
File.open(entry.name, "wb") do |os|
entry.read
end
end
end
def tarReaderUnsafe
path = params[:path]
file_stream = IO.new(IO.sysopen(path))
tarfile = Gem::Package::TarReader.new(file_stream)
tarfile.each do |entry|
::File.open(entry.full_name, "wb") do |os|
entry.read
end
end
end
end

View File

@@ -0,0 +1,25 @@
class FilesController < ActionController::Base
def zipFileSafe
path = params[:path]
Zip::File.open(path).each do |entry|
entry_path = entry.name
next if !File.expand_path(entry_path).start_with?('/safepath/')
File.open(entry_path, "wb") do |os|
entry.read
end
end
end
def tarReaderSafe
path = params[:path]
file_stream = IO.new(IO.sysopen(path))
tarfile = Gem::Package::TarReader.new(file_stream)
tarfile.each do |entry|
entry_path = entry.full_name
raise ExtractFailed if entry_path != "/safepath"
::File.open(entry_path, "wb") do |os|
entry.read
end
end
end
end

View File

@@ -1,5 +1,5 @@
name: codeql/ruby-queries
version: 0.5.4-dev
version: 0.5.5-dev
groups:
- ruby
- queries

View File

@@ -19,7 +19,8 @@ private import codeql.regex.MissingRegExpAnchor as MissingRegExpAnchor
private import codeql.ruby.regexp.RegExpTreeView::RegexTreeView as TreeImpl
private module Impl implements
MissingRegExpAnchor::MissingRegExpAnchorSig<TreeImpl, HostnameRegexp::Impl> {
MissingRegExpAnchor::MissingRegExpAnchorSig<TreeImpl, HostnameRegexp::Impl>
{
predicate isUsedAsReplace(RegExpPatternSource pattern) {
exists(DataFlow::CallNode mcn, DataFlow::Node arg, string name |
name = mcn.getMethodName() and

View File

@@ -14,9 +14,9 @@
import codeql.ruby.AST
import codeql.ruby.security.StoredXSSQuery
import DataFlow::PathGraph
import StoredXss::PathGraph
from StoredXss::Configuration config, DataFlow::PathNode source, DataFlow::PathNode sink
where config.hasFlowPath(source, sink)
from StoredXss::PathNode source, StoredXss::PathNode sink
where StoredXss::hasFlowPath(source, sink)
select sink.getNode(), source, sink, "Stored cross-site scripting vulnerability due to $@.",
source.getNode(), "stored value"

View File

@@ -6,6 +6,11 @@ actionControllerControllerClasses
| controllers/posts_controller.rb:1:1:32:3 | PostsController |
| controllers/tags_controller.rb:1:1:2:3 | TagsController |
| controllers/users/notifications_controller.rb:2:3:5:5 | Users::NotificationsController |
| filter_flow.rb:9:1:23:3 | OneController |
| filter_flow.rb:25:1:40:3 | TwoController |
| filter_flow.rb:42:1:57:3 | ThreeController |
| filter_flow.rb:59:1:73:3 | FourController |
| filter_flow.rb:75:1:93:3 | FiveController |
| input_access.rb:1:1:50:3 | UsersController |
| params_flow.rb:1:1:162:3 | MyController |
| params_flow.rb:170:1:178:3 | Subclass |
@@ -27,6 +32,22 @@ actionControllerActionMethods
| controllers/posts_controller.rb:17:3:18:5 | show |
| controllers/posts_controller.rb:20:3:21:5 | upvote |
| controllers/users/notifications_controller.rb:3:5:4:7 | mark_as_read |
| filter_flow.rb:13:3:15:5 | a |
| filter_flow.rb:17:3:18:5 | b |
| filter_flow.rb:20:3:22:5 | c |
| filter_flow.rb:29:3:31:5 | a |
| filter_flow.rb:33:3:35:5 | b |
| filter_flow.rb:37:3:39:5 | c |
| filter_flow.rb:46:3:49:5 | a |
| filter_flow.rb:51:3:52:5 | b |
| filter_flow.rb:54:3:56:5 | c |
| filter_flow.rb:63:3:65:5 | a |
| filter_flow.rb:67:3:68:5 | b |
| filter_flow.rb:70:3:72:5 | c |
| filter_flow.rb:79:3:81:5 | a |
| filter_flow.rb:83:3:84:5 | b |
| filter_flow.rb:86:3:88:5 | c |
| filter_flow.rb:90:3:92:5 | taint_foo |
| input_access.rb:2:3:49:5 | index |
| logging.rb:2:5:8:7 | index |
| params_flow.rb:2:3:4:5 | m1 |
@@ -72,6 +93,11 @@ paramsCalls
| controllers/foo/bars_controller.rb:21:21:21:26 | call to params |
| controllers/foo/bars_controller.rb:22:10:22:15 | call to params |
| controllers/posts_controller.rb:26:23:26:28 | call to params |
| filter_flow.rb:14:12:14:17 | call to params |
| filter_flow.rb:30:12:30:17 | call to params |
| filter_flow.rb:47:12:47:17 | call to params |
| filter_flow.rb:64:16:64:21 | call to params |
| filter_flow.rb:91:12:91:17 | call to params |
| params_flow.rb:3:10:3:15 | call to params |
| params_flow.rb:7:10:7:15 | call to params |
| params_flow.rb:11:10:11:15 | call to params |
@@ -127,6 +153,11 @@ paramsSources
| controllers/foo/bars_controller.rb:21:21:21:26 | call to params |
| controllers/foo/bars_controller.rb:22:10:22:15 | call to params |
| controllers/posts_controller.rb:26:23:26:28 | call to params |
| filter_flow.rb:14:12:14:17 | call to params |
| filter_flow.rb:30:12:30:17 | call to params |
| filter_flow.rb:47:12:47:17 | call to params |
| filter_flow.rb:64:16:64:21 | call to params |
| filter_flow.rb:91:12:91:17 | call to params |
| params_flow.rb:3:10:3:15 | call to params |
| params_flow.rb:7:10:7:15 | call to params |
| params_flow.rb:11:10:11:15 | call to params |
@@ -192,6 +223,11 @@ httpInputAccesses
| controllers/foo/bars_controller.rb:21:21:21:26 | call to params | ActionController::Metal#params |
| controllers/foo/bars_controller.rb:22:10:22:15 | call to params | ActionController::Metal#params |
| controllers/posts_controller.rb:26:23:26:28 | call to params | ActionController::Metal#params |
| filter_flow.rb:14:12:14:17 | call to params | ActionController::Metal#params |
| filter_flow.rb:30:12:30:17 | call to params | ActionController::Metal#params |
| filter_flow.rb:47:12:47:17 | call to params | ActionController::Metal#params |
| filter_flow.rb:64:16:64:21 | call to params | ActionController::Metal#params |
| filter_flow.rb:91:12:91:17 | call to params | ActionController::Metal#params |
| input_access.rb:3:5:3:18 | call to params | ActionDispatch::Request#params |
| input_access.rb:4:5:4:22 | call to parameters | ActionDispatch::Request#parameters |
| input_access.rb:5:5:5:15 | call to GET | ActionDispatch::Request#GET |

View File

@@ -1,3 +1,117 @@
additionalFlowSteps
| controllers/application_controller.rb:6:3:8:5 | self in set_user | controllers/comments_controller.rb:74:3:77:5 | self in ensure_user_can_edit_comments |
| controllers/application_controller.rb:6:3:8:5 | self in set_user | controllers/comments_controller.rb:79:3:81:5 | self in set_comment |
| controllers/application_controller.rb:6:3:8:5 | self in set_user | controllers/comments_controller.rb:99:3:100:5 | self in foo |
| controllers/application_controller.rb:6:3:8:5 | self in set_user | controllers/posts_controller.rb:12:3:15:5 | self in index |
| controllers/application_controller.rb:6:3:8:5 | self in set_user | controllers/posts_controller.rb:17:3:18:5 | self in show |
| controllers/application_controller.rb:6:3:8:5 | self in set_user | controllers/posts_controller.rb:20:3:21:5 | self in upvote |
| controllers/application_controller.rb:7:5:7:9 | [post] self | controllers/comments_controller.rb:74:3:77:5 | self in ensure_user_can_edit_comments |
| controllers/application_controller.rb:7:5:7:9 | [post] self | controllers/comments_controller.rb:79:3:81:5 | self in set_comment |
| controllers/application_controller.rb:7:5:7:9 | [post] self | controllers/comments_controller.rb:99:3:100:5 | self in foo |
| controllers/application_controller.rb:7:5:7:9 | [post] self | controllers/posts_controller.rb:12:3:15:5 | self in index |
| controllers/application_controller.rb:7:5:7:9 | [post] self | controllers/posts_controller.rb:17:3:18:5 | self in show |
| controllers/application_controller.rb:7:5:7:9 | [post] self | controllers/posts_controller.rb:20:3:21:5 | self in upvote |
| controllers/application_controller.rb:7:23:7:29 | [post] self | controllers/comments_controller.rb:74:3:77:5 | self in ensure_user_can_edit_comments |
| controllers/application_controller.rb:7:23:7:29 | [post] self | controllers/comments_controller.rb:79:3:81:5 | self in set_comment |
| controllers/application_controller.rb:7:23:7:29 | [post] self | controllers/comments_controller.rb:99:3:100:5 | self in foo |
| controllers/application_controller.rb:7:23:7:29 | [post] self | controllers/posts_controller.rb:12:3:15:5 | self in index |
| controllers/application_controller.rb:7:23:7:29 | [post] self | controllers/posts_controller.rb:17:3:18:5 | self in show |
| controllers/application_controller.rb:7:23:7:29 | [post] self | controllers/posts_controller.rb:20:3:21:5 | self in upvote |
| controllers/application_controller.rb:10:3:12:5 | self in log_request | controllers/application_controller.rb:6:3:8:5 | self in set_user |
| controllers/application_controller.rb:10:3:12:5 | self in log_request | controllers/photos_controller.rb:3:3:6:5 | self in show |
| controllers/application_controller.rb:10:3:12:5 | self in log_request | controllers/posts_controller.rb:25:3:27:5 | self in set_post |
| controllers/application_controller.rb:11:35:11:41 | [post] self | controllers/application_controller.rb:6:3:8:5 | self in set_user |
| controllers/application_controller.rb:11:35:11:41 | [post] self | controllers/photos_controller.rb:3:3:6:5 | self in show |
| controllers/application_controller.rb:11:35:11:41 | [post] self | controllers/posts_controller.rb:25:3:27:5 | self in set_post |
| controllers/application_controller.rb:11:53:11:59 | [post] self | controllers/application_controller.rb:6:3:8:5 | self in set_user |
| controllers/application_controller.rb:11:53:11:59 | [post] self | controllers/photos_controller.rb:3:3:6:5 | self in show |
| controllers/application_controller.rb:11:53:11:59 | [post] self | controllers/posts_controller.rb:25:3:27:5 | self in set_post |
| controllers/comments_controller.rb:17:3:51:5 | self in index | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:18:5:18:11 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:19:5:19:11 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:20:5:20:11 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:21:5:21:11 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:22:5:22:11 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:23:5:23:11 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:24:5:24:11 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:26:5:26:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:28:5:28:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:30:5:30:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:31:5:31:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:32:5:32:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:33:5:33:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:34:5:34:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:36:5:36:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:38:5:38:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:40:5:40:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:41:5:41:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:42:5:42:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:43:5:43:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:44:5:44:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:45:5:45:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:47:5:47:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:48:5:48:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:49:5:49:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:50:5:50:12 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:53:3:54:5 | self in create | controllers/comments_controller.rb:83:3:85:5 | self in log_comment_change |
| controllers/comments_controller.rb:56:3:62:5 | self in show | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:57:5:61:7 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:64:3:66:5 | self in photo | controllers/comments_controller.rb:83:3:85:5 | self in log_comment_change |
| controllers/comments_controller.rb:65:5:65:20 | [post] self | controllers/comments_controller.rb:83:3:85:5 | self in log_comment_change |
| controllers/comments_controller.rb:65:15:65:20 | [post] self | controllers/comments_controller.rb:83:3:85:5 | self in log_comment_change |
| controllers/comments_controller.rb:68:3:70:5 | self in destroy | controllers/comments_controller.rb:83:3:85:5 | self in log_comment_change |
| controllers/comments_controller.rb:69:12:69:18 | [post] self | controllers/comments_controller.rb:83:3:85:5 | self in log_comment_change |
| controllers/comments_controller.rb:74:3:77:5 | self in ensure_user_can_edit_comments | controllers/comments_controller.rb:79:3:81:5 | self in set_comment |
| controllers/comments_controller.rb:74:3:77:5 | self in ensure_user_can_edit_comments | controllers/comments_controller.rb:99:3:100:5 | self in foo |
| controllers/comments_controller.rb:75:15:75:19 | [post] self | controllers/comments_controller.rb:79:3:81:5 | self in set_comment |
| controllers/comments_controller.rb:75:15:75:19 | [post] self | controllers/comments_controller.rb:99:3:100:5 | self in foo |
| controllers/comments_controller.rb:76:5:76:68 | [post] self | controllers/comments_controller.rb:79:3:81:5 | self in set_comment |
| controllers/comments_controller.rb:76:5:76:68 | [post] self | controllers/comments_controller.rb:99:3:100:5 | self in foo |
| controllers/comments_controller.rb:79:3:81:5 | self in set_comment | controllers/comments_controller.rb:99:3:100:5 | self in foo |
| controllers/comments_controller.rb:80:5:80:12 | [post] self | controllers/comments_controller.rb:99:3:100:5 | self in foo |
| controllers/comments_controller.rb:80:16:80:20 | [post] self | controllers/comments_controller.rb:99:3:100:5 | self in foo |
| controllers/comments_controller.rb:80:36:80:41 | [post] self | controllers/comments_controller.rb:99:3:100:5 | self in foo |
| controllers/comments_controller.rb:83:3:85:5 | self in log_comment_change | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:84:45:84:49 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:84:61:84:68 | [post] self | controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags |
| controllers/comments_controller.rb:87:3:89:5 | self in check_feature_flags | controllers/comments_controller.rb:95:3:97:5 | self in this_must_run_last |
| controllers/comments_controller.rb:88:5:88:28 | [post] self | controllers/comments_controller.rb:95:3:97:5 | self in this_must_run_last |
| controllers/comments_controller.rb:91:3:93:5 | self in this_must_run_first | controllers/application_controller.rb:10:3:12:5 | self in log_request |
| controllers/comments_controller.rb:99:3:100:5 | self in foo | controllers/comments_controller.rb:102:3:103:5 | self in bar |
| controllers/comments_controller.rb:102:3:103:5 | self in bar | controllers/comments_controller.rb:17:3:51:5 | self in index |
| controllers/comments_controller.rb:102:3:103:5 | self in bar | controllers/comments_controller.rb:53:3:54:5 | self in create |
| controllers/comments_controller.rb:102:3:103:5 | self in bar | controllers/comments_controller.rb:56:3:62:5 | self in show |
| controllers/comments_controller.rb:102:3:103:5 | self in bar | controllers/comments_controller.rb:64:3:66:5 | self in photo |
| controllers/comments_controller.rb:102:3:103:5 | self in bar | controllers/comments_controller.rb:68:3:70:5 | self in destroy |
| controllers/photos_controller.rb:3:3:6:5 | self in show | controllers/photos_controller.rb:8:3:9:5 | self in foo |
| controllers/photos_controller.rb:4:5:4:6 | [post] self | controllers/photos_controller.rb:8:3:9:5 | self in foo |
| controllers/photos_controller.rb:5:5:5:6 | [post] self | controllers/photos_controller.rb:8:3:9:5 | self in foo |
| controllers/posts_controller.rb:20:3:21:5 | self in upvote | controllers/posts_controller.rb:29:3:31:5 | self in log_upvote |
| controllers/posts_controller.rb:25:3:27:5 | self in set_post | controllers/application_controller.rb:6:3:8:5 | self in set_user |
| controllers/posts_controller.rb:26:5:26:9 | [post] self | controllers/application_controller.rb:6:3:8:5 | self in set_user |
| controllers/posts_controller.rb:26:23:26:28 | [post] self | controllers/application_controller.rb:6:3:8:5 | self in set_user |
| filter_flow.rb:13:3:15:5 | self in a | filter_flow.rb:17:3:18:5 | self in b |
| filter_flow.rb:14:5:14:8 | [post] self | filter_flow.rb:17:3:18:5 | self in b |
| filter_flow.rb:14:12:14:17 | [post] self | filter_flow.rb:17:3:18:5 | self in b |
| filter_flow.rb:17:3:18:5 | self in b | filter_flow.rb:20:3:22:5 | self in c |
| filter_flow.rb:29:3:31:5 | self in a | filter_flow.rb:33:3:35:5 | self in b |
| filter_flow.rb:30:5:30:8 | [post] self | filter_flow.rb:33:3:35:5 | self in b |
| filter_flow.rb:30:12:30:17 | [post] self | filter_flow.rb:33:3:35:5 | self in b |
| filter_flow.rb:33:3:35:5 | self in b | filter_flow.rb:37:3:39:5 | self in c |
| filter_flow.rb:34:5:34:8 | [post] self | filter_flow.rb:37:3:39:5 | self in c |
| filter_flow.rb:46:3:49:5 | self in a | filter_flow.rb:51:3:52:5 | self in b |
| filter_flow.rb:47:5:47:8 | [post] self | filter_flow.rb:51:3:52:5 | self in b |
| filter_flow.rb:47:12:47:17 | [post] self | filter_flow.rb:51:3:52:5 | self in b |
| filter_flow.rb:48:5:48:8 | [post] self | filter_flow.rb:51:3:52:5 | self in b |
| filter_flow.rb:51:3:52:5 | self in b | filter_flow.rb:54:3:56:5 | self in c |
| filter_flow.rb:63:3:65:5 | self in a | filter_flow.rb:67:3:68:5 | self in b |
| filter_flow.rb:64:5:64:8 | [post] self | filter_flow.rb:67:3:68:5 | self in b |
| filter_flow.rb:64:16:64:21 | [post] self | filter_flow.rb:67:3:68:5 | self in b |
| filter_flow.rb:67:3:68:5 | self in b | filter_flow.rb:70:3:72:5 | self in c |
| filter_flow.rb:79:3:81:5 | self in a | filter_flow.rb:83:3:84:5 | self in b |
| filter_flow.rb:80:5:80:8 | [post] self | filter_flow.rb:83:3:84:5 | self in b |
| filter_flow.rb:83:3:84:5 | self in b | filter_flow.rb:86:3:88:5 | self in c |
filterChain
| controllers/comments_controller.rb:17:3:51:5 | index | controllers/application_controller.rb:6:3:8:5 | set_user | controllers/comments_controller.rb:99:3:100:5 | foo |
| controllers/comments_controller.rb:17:3:51:5 | index | controllers/application_controller.rb:10:3:12:5 | log_request | controllers/application_controller.rb:6:3:8:5 | set_user |
| controllers/comments_controller.rb:17:3:51:5 | index | controllers/comments_controller.rb:17:3:51:5 | index | controllers/comments_controller.rb:87:3:89:5 | check_feature_flags |
@@ -51,3 +165,13 @@
| controllers/posts_controller.rb:20:3:21:5 | upvote | controllers/application_controller.rb:10:3:12:5 | log_request | controllers/posts_controller.rb:25:3:27:5 | set_post |
| controllers/posts_controller.rb:20:3:21:5 | upvote | controllers/posts_controller.rb:20:3:21:5 | upvote | controllers/posts_controller.rb:29:3:31:5 | log_upvote |
| controllers/posts_controller.rb:20:3:21:5 | upvote | controllers/posts_controller.rb:25:3:27:5 | set_post | controllers/application_controller.rb:6:3:8:5 | set_user |
| filter_flow.rb:17:3:18:5 | b | filter_flow.rb:13:3:15:5 | a | filter_flow.rb:17:3:18:5 | b |
| filter_flow.rb:17:3:18:5 | b | filter_flow.rb:17:3:18:5 | b | filter_flow.rb:20:3:22:5 | c |
| filter_flow.rb:33:3:35:5 | b | filter_flow.rb:29:3:31:5 | a | filter_flow.rb:33:3:35:5 | b |
| filter_flow.rb:33:3:35:5 | b | filter_flow.rb:33:3:35:5 | b | filter_flow.rb:37:3:39:5 | c |
| filter_flow.rb:51:3:52:5 | b | filter_flow.rb:46:3:49:5 | a | filter_flow.rb:51:3:52:5 | b |
| filter_flow.rb:51:3:52:5 | b | filter_flow.rb:51:3:52:5 | b | filter_flow.rb:54:3:56:5 | c |
| filter_flow.rb:67:3:68:5 | b | filter_flow.rb:63:3:65:5 | a | filter_flow.rb:67:3:68:5 | b |
| filter_flow.rb:67:3:68:5 | b | filter_flow.rb:67:3:68:5 | b | filter_flow.rb:70:3:72:5 | c |
| filter_flow.rb:83:3:84:5 | b | filter_flow.rb:79:3:81:5 | a | filter_flow.rb:83:3:84:5 | b |
| filter_flow.rb:83:3:84:5 | b | filter_flow.rb:83:3:84:5 | b | filter_flow.rb:86:3:88:5 | c |

Some files were not shown because too many files have changed in this diff Show More