Merge branch 'main' into rust-data-flow-consistency-query

This commit is contained in:
Simon Friis Vindum
2024-11-04 10:42:53 +01:00
committed by GitHub
3 changed files with 72 additions and 52 deletions

View File

@@ -1,6 +1,7 @@
use anyhow::Context;
use archive::Archiver;
use log::info;
use ra_ap_hir::Semantics;
use ra_ap_ide_db::line_index::{LineCol, LineIndex};
use ra_ap_project_model::ProjectManifest;
use rust_analyzer::{ParseResult, RustAnalyzer};
@@ -17,7 +18,7 @@ mod translate;
pub mod trap;
fn extract(
rust_analyzer: &mut rust_analyzer::RustAnalyzer,
rust_analyzer: &rust_analyzer::RustAnalyzer,
archiver: &Archiver,
traps: &trap::TrapFileProvider,
file: &std::path::Path,
@@ -29,15 +30,10 @@ fn extract(
text,
errors,
file_id,
semantics,
} = rust_analyzer.parse(file);
let line_index = LineIndex::new(text.as_ref());
let display_path = file.to_string_lossy();
let mut trap = traps.create("source", file);
trap.writer.comment(format!(
"semantics: {}",
if semantics.is_some() { "yes" } else { "no" }
));
let label = trap.emit_file(file);
let mut translator = translate::Translator::new(
trap,
@@ -45,7 +41,7 @@ fn extract(
label,
line_index,
file_id,
semantics,
file_id.and(rust_analyzer.semantics()),
);
for err in errors {
@@ -116,14 +112,20 @@ fn main() -> anyhow::Result<()> {
if files.is_empty() {
break;
}
let mut rust_analyzer = RustAnalyzer::new(manifest, &cfg.scratch_dir);
for file in files {
extract(&mut rust_analyzer, &archiver, &traps, file);
if let Some((ref db, ref vfs)) = RustAnalyzer::load_workspace(manifest, &cfg.scratch_dir) {
let semantics = Semantics::new(db);
let rust_analyzer = RustAnalyzer::new(vfs, semantics);
for file in files {
extract(&rust_analyzer, &archiver, &traps, file);
}
} else {
for file in files {
extract(&RustAnalyzer::WithoutSemantics, &archiver, &traps, file);
}
}
}
let mut rust_analyzer = RustAnalyzer::WithoutDatabase();
for file in other_files {
extract(&mut rust_analyzer, &archiver, &traps, file);
extract(&RustAnalyzer::WithoutSemantics, &archiver, &traps, file);
}
Ok(())

View File

@@ -1,7 +1,6 @@
use itertools::Itertools;
use log::{debug, info};
use ra_ap_base_db::SourceDatabase;
use ra_ap_base_db::SourceDatabaseFileInputExt;
use ra_ap_hir::Semantics;
use ra_ap_ide_db::RootDatabase;
use ra_ap_load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
@@ -21,19 +20,24 @@ use ra_ap_vfs::VfsPath;
use std::borrow::Cow;
use std::path::{Path, PathBuf};
use triomphe::Arc;
pub enum RustAnalyzer {
WithDatabase { db: RootDatabase, vfs: Vfs },
WithoutDatabase(),
pub enum RustAnalyzer<'a> {
WithSemantics {
vfs: &'a Vfs,
semantics: Semantics<'a, RootDatabase>,
},
WithoutSemantics,
}
pub struct ParseResult<'a> {
pub struct ParseResult {
pub ast: SourceFile,
pub text: Arc<str>,
pub errors: Vec<SyntaxError>,
pub file_id: Option<EditionedFileId>,
pub semantics: Option<Semantics<'a, RootDatabase>>,
}
impl RustAnalyzer {
pub fn new(project: &ProjectManifest, scratch_dir: &Path) -> Self {
impl<'a> RustAnalyzer<'a> {
pub fn load_workspace(
project: &ProjectManifest,
scratch_dir: &Path,
) -> Option<(RootDatabase, Vfs)> {
let config = CargoConfig {
sysroot: Some(RustLibSource::Discover),
target_dir: ra_ap_paths::Utf8PathBuf::from_path_buf(scratch_dir.to_path_buf())
@@ -50,14 +54,55 @@ impl RustAnalyzer {
let manifest = project.manifest_path();
match load_workspace_at(manifest.as_ref(), &config, &load_config, &progress) {
Ok((db, vfs, _macro_server)) => RustAnalyzer::WithDatabase { db, vfs },
Ok((db, vfs, _macro_server)) => Some((db, vfs)),
Err(err) => {
log::error!("failed to load workspace for {}: {}", manifest, err);
RustAnalyzer::WithoutDatabase()
None
}
}
}
pub fn parse(&mut self, path: &Path) -> ParseResult<'_> {
pub fn new(vfs: &'a Vfs, semantics: Semantics<'a, RootDatabase>) -> Self {
RustAnalyzer::WithSemantics { vfs, semantics }
}
pub fn semantics(&'a self) -> Option<&'a Semantics<'a, RootDatabase>> {
match self {
RustAnalyzer::WithSemantics { vfs: _, semantics } => Some(semantics),
RustAnalyzer::WithoutSemantics => None,
}
}
pub fn parse(&self, path: &Path) -> ParseResult {
if let RustAnalyzer::WithSemantics { vfs, semantics } = self {
if let Some(file_id) = Utf8PathBuf::from_path_buf(path.to_path_buf())
.ok()
.and_then(|x| AbsPathBuf::try_from(x).ok())
.map(VfsPath::from)
.and_then(|x| vfs.file_id(&x))
{
if let Ok(input) = std::panic::catch_unwind(|| semantics.db.file_text(file_id)) {
let file_id = EditionedFileId::current_edition(file_id);
let source_file = semantics.parse(file_id);
let errors = semantics
.db
.parse_errors(file_id)
.into_iter()
.flat_map(|x| x.to_vec())
.collect();
return ParseResult {
ast: source_file,
text: input,
errors,
file_id: Some(file_id),
};
} else {
log::debug!(
"No text available for file_id '{:?}', falling back to loading file '{}' from disk.",
file_id,
path.to_string_lossy()
)
}
}
}
let mut errors = Vec::new();
let input = match std::fs::read(path) {
Ok(data) => data,
@@ -71,32 +116,6 @@ impl RustAnalyzer {
};
let (input, err) = from_utf8_lossy(&input);
if let RustAnalyzer::WithDatabase { vfs, db } = self {
if let Some(file_id) = Utf8PathBuf::from_path_buf(path.to_path_buf())
.ok()
.and_then(|x| AbsPathBuf::try_from(x).ok())
.map(VfsPath::from)
.and_then(|x| vfs.file_id(&x))
{
db.set_file_text(file_id, &input);
let semantics = Semantics::new(db);
let file_id = EditionedFileId::current_edition(file_id);
let source_file = semantics.parse(file_id);
errors.extend(
db.parse_errors(file_id)
.into_iter()
.flat_map(|x| x.to_vec()),
);
return ParseResult {
ast: source_file,
text: input.as_ref().into(),
errors,
file_id: Some(file_id),
semantics: Some(semantics),
};
}
}
let parse = ra_ap_syntax::ast::SourceFile::parse(&input, Edition::CURRENT);
errors.extend(parse.errors());
errors.extend(err);
@@ -105,7 +124,6 @@ impl RustAnalyzer {
text: input.as_ref().into(),
errors,
file_id: None,
semantics: None,
}
}
}

View File

@@ -55,7 +55,7 @@ pub struct Translator<'a> {
label: trap::Label,
line_index: LineIndex,
file_id: Option<EditionedFileId>,
pub semantics: Option<Semantics<'a, RootDatabase>>,
pub semantics: Option<&'a Semantics<'a, RootDatabase>>,
}
impl<'a> Translator<'a> {
@@ -65,7 +65,7 @@ impl<'a> Translator<'a> {
label: trap::Label,
line_index: LineIndex,
file_id: Option<EditionedFileId>,
semantics: Option<Semantics<'a, RootDatabase>>,
semantics: Option<&'a Semantics<'a, RootDatabase>>,
) -> Translator<'a> {
Translator {
trap,