mirror of
https://github.com/github/codeql.git
synced 2025-12-16 16:53:25 +01:00
Rust: fix compilation errors
This commit is contained in:
1
Cargo.lock
generated
1
Cargo.lock
generated
@@ -3449,4 +3449,3 @@ dependencies = [
|
||||
name = "rustc_apfloat"
|
||||
version = "0.2.2+llvm-462a31f5a5ab"
|
||||
source = "git+https://github.com/redsun82/rustc_apfloat.git?rev=32968f16ef1b082243f9bf43a3fbd65c381b3e27#32968f16ef1b082243f9bf43a3fbd65c381b3e27"
|
||||
|
||||
|
||||
@@ -52,7 +52,7 @@ pub struct Config {
|
||||
pub cargo_target: Option<String>,
|
||||
pub cargo_features: Vec<String>,
|
||||
pub cargo_cfg_overrides: Vec<String>,
|
||||
pub cargo_extra_env: FxHashMap<String, String>,
|
||||
pub cargo_extra_env: FxHashMap<String, Option<String>>,
|
||||
pub cargo_extra_args: Vec<String>,
|
||||
pub cargo_all_targets: bool,
|
||||
pub logging_flamegraph: Option<PathBuf>,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use serde::Deserializer;
|
||||
use serde::de::{Error, Unexpected, Visitor};
|
||||
use serde::de::Visitor;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::Formatter;
|
||||
use std::hash::BuildHasher;
|
||||
@@ -36,23 +36,22 @@ impl<'de, T: From<String>> Visitor<'de> for VectorVisitor<T> {
|
||||
}
|
||||
|
||||
impl<'de, S: BuildHasher + Default> Visitor<'de> for MapVisitor<S> {
|
||||
type Value = HashMap<String, String, S>;
|
||||
type Value = HashMap<String, Option<String>, S>;
|
||||
|
||||
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
|
||||
formatter.write_str(
|
||||
"either a sequence, or a comma or newline separated string of key=value entries",
|
||||
"either a sequence, or a comma or newline separated string of key[=value] entries",
|
||||
)
|
||||
}
|
||||
|
||||
fn visit_str<E: serde::de::Error>(self, value: &str) -> Result<Self::Value, E> {
|
||||
value
|
||||
Ok(value
|
||||
.split(['\n', ','])
|
||||
.map(|s| {
|
||||
s.split_once('=')
|
||||
.ok_or_else(|| E::custom(format!("key=value expected, found {s}")))
|
||||
.map(|(key, value)| (key.to_owned(), value.to_owned()))
|
||||
.map(|s| match s.split_once('=') {
|
||||
Some((key, value)) => (key.to_owned(), Some(value.to_owned())),
|
||||
None => (s.to_owned(), None),
|
||||
})
|
||||
.collect()
|
||||
.collect())
|
||||
}
|
||||
|
||||
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
|
||||
@@ -61,10 +60,14 @@ impl<'de, S: BuildHasher + Default> Visitor<'de> for MapVisitor<S> {
|
||||
{
|
||||
let mut ret = HashMap::with_hasher(Default::default());
|
||||
while let Some(el) = seq.next_element::<String>()? {
|
||||
let (key, value) = el
|
||||
.split_once('=')
|
||||
.ok_or_else(|| A::Error::invalid_value(Unexpected::Str(&el), &self))?;
|
||||
ret.insert(key.to_owned(), value.to_owned());
|
||||
match el.split_once('=') {
|
||||
None => {
|
||||
ret.insert(el.to_owned(), None);
|
||||
}
|
||||
Some((key, value)) => {
|
||||
ret.insert(key.to_owned(), Some(value.to_owned()));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(ret)
|
||||
}
|
||||
@@ -83,7 +86,7 @@ pub(crate) fn deserialize_newline_or_comma_separated_vec<
|
||||
deserializer.deserialize_seq(VectorVisitor(PhantomData))
|
||||
}
|
||||
|
||||
/// deserialize into a map of `String`s to `String`s either of:
|
||||
/// deserialize into a map of `String`s to `Option<String>`s either of:
|
||||
/// * a sequence of elements serializable into `String`s, or
|
||||
/// * a single element serializable into `String`, then split on `,` and `\n`
|
||||
pub(crate) fn deserialize_newline_or_comma_separated_map<
|
||||
@@ -92,6 +95,6 @@ pub(crate) fn deserialize_newline_or_comma_separated_map<
|
||||
S: BuildHasher + Default,
|
||||
>(
|
||||
deserializer: D,
|
||||
) -> Result<HashMap<String, String, S>, D::Error> {
|
||||
) -> Result<HashMap<String, Option<String>, S>, D::Error> {
|
||||
deserializer.deserialize_map(MapVisitor(PhantomData))
|
||||
}
|
||||
|
||||
@@ -77,11 +77,7 @@ impl<'a> RustAnalyzer<'a> {
|
||||
let editioned_file_id = semantics
|
||||
.attach_first_edition(file_id)
|
||||
.ok_or("failed to determine rust edition")?;
|
||||
Ok((
|
||||
semantics,
|
||||
EditionedFileId::new(semantics.db, editioned_file_id),
|
||||
input,
|
||||
))
|
||||
Ok((semantics, editioned_file_id, input))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -174,7 +174,7 @@ impl<'a> Translator<'a> {
|
||||
if let Some(semantics) = self.semantics.as_ref() {
|
||||
let file_range = semantics.original_range(node.syntax());
|
||||
let file_id = self.file_id?;
|
||||
if file_id.file_id(semantics.db) == file_range.file_id {
|
||||
if file_id == file_range.file_id {
|
||||
Some(file_range.range)
|
||||
} else {
|
||||
None
|
||||
@@ -298,20 +298,18 @@ impl<'a> Translator<'a> {
|
||||
if let Some(value) = semantics
|
||||
.hir_file_for(expanded)
|
||||
.macro_file()
|
||||
.and_then(|macro_file| {
|
||||
semantics
|
||||
.db
|
||||
.parse_macro_expansion_error(macro_file.macro_call_id)
|
||||
})
|
||||
.and_then(|macro_call_id| semantics.db.parse_macro_expansion_error(macro_call_id))
|
||||
{
|
||||
if let Some(err) = &value.err {
|
||||
let error = err.render_to_string(semantics.db);
|
||||
|
||||
if err.span().anchor.file_id == semantics.hir_file_for(node.syntax()) {
|
||||
let hir_file_id = semantics.hir_file_for(node.syntax());
|
||||
if Some(err.span().anchor.file_id.file_id())
|
||||
== hir_file_id.file_id().map(|f| f.file_id(semantics.db))
|
||||
{
|
||||
let location = err.span().range
|
||||
+ semantics
|
||||
.db
|
||||
.ast_id_map(err.span().anchor.file_id.into())
|
||||
.ast_id_map(hir_file_id)
|
||||
.get_erased(err.span().anchor.ast_id)
|
||||
.text_range()
|
||||
.start();
|
||||
@@ -363,10 +361,10 @@ impl<'a> Translator<'a> {
|
||||
.as_ref()
|
||||
.and_then(|s| s.expand_macro_call(mcall))
|
||||
{
|
||||
self.emit_macro_expansion_parse_errors(mcall, &expanded);
|
||||
self.emit_macro_expansion_parse_errors(mcall, &expanded.value);
|
||||
let expand_to = ra_ap_hir_expand::ExpandTo::from_call_site(mcall);
|
||||
let kind = expanded.kind();
|
||||
if let Some(value) = self.emit_expanded_as(expand_to, expanded) {
|
||||
if let Some(value) = self.emit_expanded_as(expand_to, expanded.value) {
|
||||
generated::MacroCall::emit_macro_call_expansion(
|
||||
label,
|
||||
value,
|
||||
@@ -669,11 +667,11 @@ impl<'a> Translator<'a> {
|
||||
) {
|
||||
// work around a bug in rust-analyzer AST generation machinery
|
||||
// this code was inspired by rust-analyzer's own workaround for this:
|
||||
// https://github.com/rust-lang/rust-analyzer/blob/1f86729f29ea50e8491a1516422df4fd3d1277b0/crates/syntax/src/ast/node_ext.rs#L268-L277
|
||||
if item.l_angle_token().is_some() {
|
||||
// https://github.com/rust-lang/rust-analyzer/blob/a642aa8023be11d6bc027fc6a68c71c2f3fc7f72/crates/syntax/src/ast/node_ext.rs#L290-L297
|
||||
if let Some(anchor) = item.type_anchor() {
|
||||
// <T> or <T as Trait>
|
||||
// T is any TypeRef, Trait has to be a PathType
|
||||
let mut type_refs = item
|
||||
let mut type_refs = anchor
|
||||
.syntax()
|
||||
.children()
|
||||
.filter(|node| ast::Type::can_cast(node.kind()));
|
||||
@@ -691,6 +689,13 @@ impl<'a> Translator<'a> {
|
||||
{
|
||||
generated::PathSegment::emit_trait_type_repr(label, t, &mut self.trap.writer)
|
||||
}
|
||||
// moreover as we're skipping emission of TypeAnchor, we need to attach its comments to
|
||||
// this path segment
|
||||
self.emit_tokens(
|
||||
&anchor,
|
||||
label.into(),
|
||||
anchor.syntax().children_with_tokens(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -743,9 +748,9 @@ impl<'a> Translator<'a> {
|
||||
}
|
||||
let ExpandResult {
|
||||
value: expanded, ..
|
||||
} = semantics.expand_attr_macro(node)?;
|
||||
self.emit_macro_expansion_parse_errors(node, &expanded);
|
||||
let macro_items = ast::MacroItems::cast(expanded).or_else(|| {
|
||||
} = self.semantics.and_then(|s| s.expand_attr_macro(node))?;
|
||||
self.emit_macro_expansion_parse_errors(node, &expanded.value);
|
||||
let macro_items = ast::MacroItems::cast(expanded.value).or_else(|| {
|
||||
let message = "attribute macro expansion cannot be cast to MacroItems".to_owned();
|
||||
let location = self.location_for_node(node);
|
||||
self.emit_diagnostic(
|
||||
|
||||
Reference in New Issue
Block a user