mirror of
https://github.com/github/codeql.git
synced 2026-04-27 17:55:19 +02:00
Rust: load files from disk ourselves
This avoids problems with files containing invalid utf-8 data, which may cause
panic's like:
```
thread 'main' panicked at external/rules_rust~~_crate~ql~~r~r__ra_ap_salsa-0.0.232/src/input.rs:91:32:
no value set for CompressedFileTextQuery(FileId(2429))
stack backtrace:
0: rust_begin_unwind
at /rustc/eeb90cda1969383f56a2637cbd3037bdf598841c/library/std/src/panicking.rs:665:5
1: core::panicking::panic_fmt
at /rustc/eeb90cda1969383f56a2637cbd3037bdf598841c/library/core/src/panicking.rs:74:14
2: <salsa::input::InputStorage<Q> as salsa::plumbing::QueryStorageOps<Q>>::fetch
3: <DB as ra_ap_base_db::SourceDatabase>::compressed_file_text::__shim
4: <ra_ap_base_db::FileTextQuery as salsa::plumbing::QueryFunction>::execute
5: salsa::Cycle::catch
6: salsa::derived_lru::slot::Slot<Q,MP>::execute
7: salsa::derived_lru::slot::Slot<Q,MP>::read
8: <salsa::derived_lru::DerivedStorage<Q,MP> as salsa::plumbing::QueryStorageOps<Q>>::fetch
9: <DB as ra_ap_base_db::SourceDatabase>::file_text::__shim
10: <DB as ra_ap_base_db::SourceDatabase>::file_text
11: <ra_ap_base_db::ParseQuery as salsa::plumbing::QueryFunction>::execute
12: salsa::Cycle::catch
13: salsa::derived_lru::slot::Slot<Q,MP>::execute
14: salsa::derived_lru::slot::Slot<Q,MP>::read
15: <salsa::derived_lru::DerivedStorage<Q,MP> as salsa::plumbing::QueryStorageOps<Q>>::fetch
16: <DB as ra_ap_base_db::SourceDatabase>::parse::__shim
17: <DB as ra_ap_base_db::SourceDatabase>::parse
18: ra_ap_hir::semantics::SemanticsImpl::parse
19: single_arch_extractor::main
```
This commit is contained in:
@@ -8,7 +8,7 @@ mod translate;
|
||||
pub mod trap;
|
||||
|
||||
fn extract(
|
||||
rust_analyzer: &rust_analyzer::RustAnalyzer,
|
||||
rust_analyzer: &mut rust_analyzer::RustAnalyzer,
|
||||
traps: &trap::TrapFileProvider,
|
||||
file: std::path::PathBuf,
|
||||
) -> anyhow::Result<()> {
|
||||
@@ -49,7 +49,7 @@ fn main() -> anyhow::Result<()> {
|
||||
.module(module_path!())
|
||||
.verbosity(2 + cfg.verbose as usize)
|
||||
.init()?;
|
||||
let rust_analyzer = rust_analyzer::RustAnalyzer::new(&cfg)?;
|
||||
let mut rust_analyzer = rust_analyzer::RustAnalyzer::new(&cfg)?;
|
||||
|
||||
let traps = trap::TrapFileProvider::new(&cfg).context("failed to set up trap files")?;
|
||||
let archiver = archive::Archiver {
|
||||
@@ -59,7 +59,7 @@ fn main() -> anyhow::Result<()> {
|
||||
let file = std::path::absolute(&file).unwrap_or(file);
|
||||
let file = std::fs::canonicalize(&file).unwrap_or(file);
|
||||
archiver.archive(&file);
|
||||
extract(&rust_analyzer, &traps, file)?;
|
||||
extract(&mut rust_analyzer, &traps, file)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -3,6 +3,7 @@ use anyhow::Context;
|
||||
use itertools::Itertools;
|
||||
use log::info;
|
||||
use ra_ap_base_db::SourceDatabase;
|
||||
use ra_ap_base_db::SourceDatabaseFileInputExt;
|
||||
use ra_ap_hir::Semantics;
|
||||
use ra_ap_ide_db::RootDatabase;
|
||||
use ra_ap_load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
||||
@@ -59,7 +60,7 @@ impl RustAnalyzer {
|
||||
Ok(RustAnalyzer { workspace })
|
||||
}
|
||||
pub fn parse(
|
||||
&self,
|
||||
&mut self,
|
||||
path: &PathBuf,
|
||||
) -> (
|
||||
SourceFile,
|
||||
@@ -68,31 +69,6 @@ impl RustAnalyzer {
|
||||
Option<EditionedFileId>,
|
||||
Option<Semantics<'_, RootDatabase>>,
|
||||
) {
|
||||
let mut p = path.as_path();
|
||||
while let Some(parent) = p.parent() {
|
||||
p = parent;
|
||||
if let Some((vfs, db)) = self.workspace.get(parent) {
|
||||
if let Some(file_id) = Utf8PathBuf::from_path_buf(path.to_path_buf())
|
||||
.ok()
|
||||
.and_then(|x| AbsPathBuf::try_from(x).ok())
|
||||
.map(VfsPath::from)
|
||||
.and_then(|x| vfs.file_id(&x))
|
||||
{
|
||||
let semi = Semantics::new(db);
|
||||
let file_id = EditionedFileId::current_edition(file_id);
|
||||
|
||||
return (
|
||||
semi.parse(file_id),
|
||||
db.file_text(file_id.into()),
|
||||
db.parse_errors(file_id)
|
||||
.map(|x| x.to_vec())
|
||||
.unwrap_or_default(),
|
||||
Some(file_id),
|
||||
Some(semi),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut errors = Vec::new();
|
||||
let input = match std::fs::read(path) {
|
||||
Ok(data) => data,
|
||||
@@ -105,6 +81,40 @@ impl RustAnalyzer {
|
||||
}
|
||||
};
|
||||
let (input, err) = from_utf8_lossy(&input);
|
||||
|
||||
let mut p = path.as_path();
|
||||
while let Some(parent) = p.parent() {
|
||||
p = parent;
|
||||
if self.workspace.contains_key(parent) {
|
||||
let (vfs, db) = self.workspace.get_mut(parent).unwrap();
|
||||
if let Some(file_id) = Utf8PathBuf::from_path_buf(path.to_path_buf())
|
||||
.ok()
|
||||
.and_then(|x| AbsPathBuf::try_from(x).ok())
|
||||
.map(VfsPath::from)
|
||||
.and_then(|x| vfs.file_id(&x))
|
||||
{
|
||||
db.set_file_text(file_id, &input);
|
||||
let semi = Semantics::new(db);
|
||||
|
||||
let file_id = EditionedFileId::current_edition(file_id);
|
||||
let source_file = semi.parse(file_id);
|
||||
errors.extend(
|
||||
db.parse_errors(file_id)
|
||||
.into_iter()
|
||||
.flat_map(|x| x.to_vec()),
|
||||
);
|
||||
return (
|
||||
source_file,
|
||||
input.as_ref().into(),
|
||||
errors,
|
||||
Some(file_id),
|
||||
Some(semi),
|
||||
);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
let parse = ra_ap_syntax::ast::SourceFile::parse(&input, Edition::CURRENT);
|
||||
errors.extend(parse.errors());
|
||||
errors.extend(err);
|
||||
|
||||
Reference in New Issue
Block a user