mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 11:31:15 +00:00
Auto merge of #16035 - Veykril:macro-diagnostics, r=Veykril
fix: Fix diagnostics panicking when resolving to different files due to macros Fixes https://github.com/rust-lang/rust-analyzer/issues/14968
This commit is contained in:
commit
77e362c34d
@ -18,7 +18,7 @@ use std::{iter, ops::Range, sync};
|
||||
|
||||
use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase};
|
||||
use expect_test::Expect;
|
||||
use hir_expand::{db::ExpandDatabase, span::SpanMapRef, HirFileIdExt, InFile, MacroFileId};
|
||||
use hir_expand::{db::ExpandDatabase, span::SpanMapRef, InFile, MacroFileId, MacroFileIdExt};
|
||||
use stdx::format_to;
|
||||
use syntax::{
|
||||
ast::{self, edit::IndentLevel},
|
||||
@ -172,35 +172,41 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||
};
|
||||
|
||||
if let Some(src) = src {
|
||||
if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
|
||||
let call = src.file_id.call_node(&db).expect("macro file");
|
||||
let mut show_spans = false;
|
||||
let mut show_ctxt = false;
|
||||
for comment in call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) {
|
||||
show_spans |= comment.to_string().contains("+spans");
|
||||
show_ctxt |= comment.to_string().contains("+syntaxctxt");
|
||||
if let Some(file_id) = src.file_id.macro_file() {
|
||||
if file_id.is_attr_macro(&db) || file_id.is_custom_derive(&db) {
|
||||
let call = file_id.call_node(&db);
|
||||
let mut show_spans = false;
|
||||
let mut show_ctxt = false;
|
||||
for comment in
|
||||
call.value.children_with_tokens().filter(|it| it.kind() == COMMENT)
|
||||
{
|
||||
show_spans |= comment.to_string().contains("+spans");
|
||||
show_ctxt |= comment.to_string().contains("+syntaxctxt");
|
||||
}
|
||||
let pp = pretty_print_macro_expansion(
|
||||
src.value,
|
||||
db.span_map(src.file_id).as_ref(),
|
||||
show_spans,
|
||||
show_ctxt,
|
||||
);
|
||||
format_to!(expanded_text, "\n{}", pp)
|
||||
}
|
||||
let pp = pretty_print_macro_expansion(
|
||||
src.value,
|
||||
db.span_map(src.file_id).as_ref(),
|
||||
show_spans,
|
||||
show_ctxt,
|
||||
);
|
||||
format_to!(expanded_text, "\n{}", pp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for impl_id in def_map[local_id].scope.impls() {
|
||||
let src = impl_id.lookup(&db).source(&db);
|
||||
if src.file_id.is_builtin_derive(&db) {
|
||||
let pp = pretty_print_macro_expansion(
|
||||
src.value.syntax().clone(),
|
||||
db.span_map(src.file_id).as_ref(),
|
||||
false,
|
||||
false,
|
||||
);
|
||||
format_to!(expanded_text, "\n{}", pp)
|
||||
if let Some(macro_file) = src.file_id.macro_file() {
|
||||
if macro_file.is_builtin_derive(&db) {
|
||||
let pp = pretty_print_macro_expansion(
|
||||
src.value.syntax().clone(),
|
||||
db.span_map(macro_file.into()).as_ref(),
|
||||
false,
|
||||
false,
|
||||
);
|
||||
format_to!(expanded_text, "\n{}", pp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! This module resolves `mod foo;` declaration to file.
|
||||
use arrayvec::ArrayVec;
|
||||
use base_db::{AnchoredPath, FileId};
|
||||
use hir_expand::{name::Name, HirFileIdExt};
|
||||
use hir_expand::{name::Name, HirFileIdExt, MacroFileIdExt};
|
||||
use limit::Limit;
|
||||
use syntax::SmolStr;
|
||||
|
||||
@ -73,7 +73,7 @@ impl ModDir {
|
||||
Some(attr_path) => {
|
||||
candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner))
|
||||
}
|
||||
None if file_id.is_include_macro(db.upcast()) => {
|
||||
None if file_id.macro_file().map_or(false, |it| it.is_include_macro(db.upcast())) => {
|
||||
candidate_files.push(format!("{}.rs", name.display(db.upcast())));
|
||||
candidate_files.push(format!("{}/mod.rs", name.display(db.upcast())));
|
||||
}
|
||||
|
@ -6,9 +6,9 @@ use base_db::{
|
||||
FileId, FileRange,
|
||||
};
|
||||
use either::Either;
|
||||
use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange};
|
||||
use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange, TextSize};
|
||||
|
||||
use crate::{db, ExpansionInfo, HirFileIdExt as _};
|
||||
use crate::{db, ExpansionInfo, MacroFileIdExt};
|
||||
|
||||
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
|
||||
///
|
||||
@ -119,16 +119,6 @@ impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
|
||||
// region:specific impls
|
||||
|
||||
impl InFile<&SyntaxNode> {
|
||||
pub fn ancestors_with_macros(
|
||||
self,
|
||||
db: &dyn db::ExpandDatabase,
|
||||
) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
|
||||
iter::successors(Some(self.cloned()), move |node| match node.value.parent() {
|
||||
Some(parent) => Some(node.with_value(parent)),
|
||||
None => node.file_id.call_node(db),
|
||||
})
|
||||
}
|
||||
|
||||
/// Skips the attributed item that caused the macro invocation we are climbing up
|
||||
pub fn ancestors_with_macros_skip_attr_item(
|
||||
self,
|
||||
@ -137,8 +127,9 @@ impl InFile<&SyntaxNode> {
|
||||
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
|
||||
Some(parent) => Some(node.with_value(parent)),
|
||||
None => {
|
||||
let parent_node = node.file_id.call_node(db)?;
|
||||
if node.file_id.is_attr_macro(db) {
|
||||
let macro_file_id = node.file_id.macro_file()?;
|
||||
let parent_node = macro_file_id.call_node(db);
|
||||
if macro_file_id.is_attr_macro(db) {
|
||||
// macro call was an attributed item, skip it
|
||||
// FIXME: does this fail if this is a direct expansion of another macro?
|
||||
parent_node.map(|node| node.parent()).transpose()
|
||||
@ -222,7 +213,7 @@ impl InFile<&SyntaxNode> {
|
||||
}
|
||||
HirFileIdRepr::MacroFile(m) => m,
|
||||
};
|
||||
if !self.file_id.is_attr_macro(db) {
|
||||
if !file_id.is_attr_macro(db) {
|
||||
return None;
|
||||
}
|
||||
|
||||
@ -243,21 +234,23 @@ impl InFile<&SyntaxNode> {
|
||||
}
|
||||
}
|
||||
|
||||
impl InFile<SyntaxToken> {
|
||||
impl InMacroFile<SyntaxToken> {
|
||||
pub fn upmap_once(
|
||||
self,
|
||||
db: &dyn db::ExpandDatabase,
|
||||
) -> Option<InFile<smallvec::SmallVec<[TextRange; 1]>>> {
|
||||
Some(self.file_id.expansion_info(db)?.map_range_up_once(db, self.value.text_range()))
|
||||
) -> InFile<smallvec::SmallVec<[TextRange; 1]>> {
|
||||
self.file_id.expansion_info(db).map_range_up_once(db, self.value.text_range())
|
||||
}
|
||||
}
|
||||
|
||||
impl InFile<SyntaxToken> {
|
||||
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
||||
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
|
||||
match self.file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
|
||||
HirFileIdRepr::MacroFile(mac_file) => {
|
||||
let (range, ctxt) = ExpansionInfo::new(db, mac_file)
|
||||
.map_token_range_up(db, self.value.text_range());
|
||||
.span_for_offset(db, self.value.text_range().start());
|
||||
|
||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||
// keep pre-token map rewrite behaviour.
|
||||
@ -280,7 +273,7 @@ impl InFile<SyntaxToken> {
|
||||
}
|
||||
HirFileIdRepr::MacroFile(mac_file) => {
|
||||
let (range, ctxt) = ExpansionInfo::new(db, mac_file)
|
||||
.map_token_range_up(db, self.value.text_range());
|
||||
.span_for_offset(db, self.value.text_range().start());
|
||||
|
||||
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
|
||||
// keep pre-token map rewrite behaviour.
|
||||
@ -294,20 +287,13 @@ impl InFile<SyntaxToken> {
|
||||
}
|
||||
}
|
||||
|
||||
impl InFile<TextRange> {
|
||||
/// Attempts to map the syntax node back up its macro calls.
|
||||
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
|
||||
let (range, _ctxt) = match self.file_id.repr() {
|
||||
HirFileIdRepr::FileId(file_id) => {
|
||||
(FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
|
||||
}
|
||||
HirFileIdRepr::MacroFile(m) => {
|
||||
ExpansionInfo::new(db, m).map_token_range_up(db, self.value)
|
||||
}
|
||||
};
|
||||
range
|
||||
impl InMacroFile<TextSize> {
|
||||
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> (FileRange, SyntaxContextId) {
|
||||
ExpansionInfo::new(db, self.file_id).span_for_offset(db, self.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl InFile<TextRange> {
|
||||
pub fn original_node_file_range(
|
||||
self,
|
||||
db: &dyn db::ExpandDatabase,
|
||||
@ -353,7 +339,7 @@ impl<N: AstNode> InFile<N> {
|
||||
}
|
||||
HirFileIdRepr::MacroFile(m) => m,
|
||||
};
|
||||
if !self.file_id.is_attr_macro(db) {
|
||||
if !file_id.is_attr_macro(db) {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -179,9 +179,6 @@ pub trait HirFileIdExt {
|
||||
/// one of the calls comes from an `include!``.
|
||||
fn original_file_respecting_includes(self, db: &dyn db::ExpandDatabase) -> FileId;
|
||||
|
||||
/// If this is a macro call, returns the syntax node of the call.
|
||||
fn call_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>>;
|
||||
|
||||
/// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
|
||||
fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<SyntaxNode>>;
|
||||
|
||||
@ -190,19 +187,6 @@ pub trait HirFileIdExt {
|
||||
|
||||
fn as_builtin_derive_attr_node(&self, db: &dyn db::ExpandDatabase)
|
||||
-> Option<InFile<ast::Attr>>;
|
||||
fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||
fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||
|
||||
/// Return whether this file is an include macro
|
||||
fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||
|
||||
fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||
/// Return whether this file is an attr macro
|
||||
fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||
|
||||
/// Return whether this file is the pseudo expansion of the derive attribute.
|
||||
/// See [`crate::builtin_attr_macro::derive_attr_expand`].
|
||||
fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||
}
|
||||
|
||||
impl HirFileIdExt for HirFileId {
|
||||
@ -241,12 +225,6 @@ impl HirFileIdExt for HirFileId {
|
||||
}
|
||||
}
|
||||
|
||||
fn call_node(self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
|
||||
let macro_file = self.macro_file()?;
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
Some(loc.to_node(db))
|
||||
}
|
||||
|
||||
fn original_call_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<SyntaxNode>> {
|
||||
let mut call = db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).to_node(db);
|
||||
loop {
|
||||
@ -278,77 +256,34 @@ impl HirFileIdExt for HirFileId {
|
||||
};
|
||||
Some(attr.with_value(ast::Attr::cast(attr.value.clone())?))
|
||||
}
|
||||
|
||||
fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
match self.macro_file() {
|
||||
Some(macro_file) => {
|
||||
matches!(
|
||||
db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
|
||||
MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
|
||||
)
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
match self.macro_file() {
|
||||
Some(macro_file) => {
|
||||
matches!(
|
||||
db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind,
|
||||
MacroDefKind::BuiltInDerive(..)
|
||||
)
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
match self.macro_file() {
|
||||
Some(macro_file) => {
|
||||
db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include()
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
match self.macro_file() {
|
||||
Some(macro_file) => {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
match self.macro_file() {
|
||||
Some(macro_file) => {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
matches!(loc.kind, MacroCallKind::Attr { .. })
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
match self.macro_file() {
|
||||
Some(macro_file) => {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||
loc.def.is_attribute_derive()
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait MacroFileIdExt {
|
||||
fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32;
|
||||
/// If this is a macro call, returns the syntax node of the call.
|
||||
fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode>;
|
||||
|
||||
fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo;
|
||||
|
||||
fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||
fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||
|
||||
/// Return whether this file is an include macro
|
||||
fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||
|
||||
fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||
/// Return whether this file is an attr macro
|
||||
fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||
|
||||
/// Return whether this file is the pseudo expansion of the derive attribute.
|
||||
/// See [`crate::builtin_attr_macro::derive_attr_expand`].
|
||||
fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool;
|
||||
}
|
||||
|
||||
impl MacroFileIdExt for MacroFileId {
|
||||
fn call_node(self, db: &dyn db::ExpandDatabase) -> InFile<SyntaxNode> {
|
||||
db.lookup_intern_macro_call(self.macro_call_id).to_node(db)
|
||||
}
|
||||
fn expansion_level(self, db: &dyn db::ExpandDatabase) -> u32 {
|
||||
let mut level = 0;
|
||||
let mut macro_file = self;
|
||||
@ -367,6 +302,39 @@ impl MacroFileIdExt for MacroFileId {
|
||||
fn expansion_info(self, db: &dyn db::ExpandDatabase) -> ExpansionInfo {
|
||||
ExpansionInfo::new(db, self)
|
||||
}
|
||||
|
||||
fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
matches!(
|
||||
db.lookup_intern_macro_call(self.macro_call_id).def.kind,
|
||||
MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)
|
||||
)
|
||||
}
|
||||
|
||||
fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
matches!(
|
||||
db.lookup_intern_macro_call(self.macro_call_id).def.kind,
|
||||
MacroDefKind::BuiltInDerive(..)
|
||||
)
|
||||
}
|
||||
|
||||
fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
db.lookup_intern_macro_call(self.macro_call_id).def.is_include()
|
||||
}
|
||||
|
||||
fn is_eager(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
|
||||
matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
|
||||
}
|
||||
|
||||
fn is_attr_macro(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
|
||||
matches!(loc.kind, MacroCallKind::Attr { .. })
|
||||
}
|
||||
|
||||
fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::ExpandDatabase) -> bool {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
|
||||
loc.def.is_attribute_derive()
|
||||
}
|
||||
}
|
||||
|
||||
impl MacroDefId {
|
||||
@ -653,14 +621,14 @@ impl ExpansionInfo {
|
||||
Some(tokens.map(move |token| InMacroFile::new(self.expanded.file_id, token)))
|
||||
}
|
||||
|
||||
/// Maps up the text range out of the expansion hierarchy back into the original file its from.
|
||||
pub fn map_token_range_up(
|
||||
/// Looks up the span at the given offset.
|
||||
pub fn span_for_offset(
|
||||
&self,
|
||||
db: &dyn db::ExpandDatabase,
|
||||
range: TextRange,
|
||||
offset: TextSize,
|
||||
) -> (FileRange, SyntaxContextId) {
|
||||
debug_assert!(self.expanded.value.text_range().contains_range(range));
|
||||
let span = self.exp_map.span_at(range.start());
|
||||
debug_assert!(self.expanded.value.text_range().contains(offset));
|
||||
let span = self.exp_map.span_at(offset);
|
||||
let anchor_offset = db
|
||||
.ast_id_map(span.anchor.file_id.into())
|
||||
.get_erased(span.anchor.ast_id)
|
||||
|
@ -24,7 +24,7 @@ use hir_def::{
|
||||
};
|
||||
use hir_expand::{
|
||||
name::{AsName, Name},
|
||||
HirFileId, HirFileIdExt,
|
||||
HirFileId, MacroFileIdExt,
|
||||
};
|
||||
use stdx::{always, never};
|
||||
use syntax::{
|
||||
@ -196,7 +196,7 @@ impl<'a> DeclValidator<'a> {
|
||||
AttrDefId::GenericParamId(_) => None,
|
||||
}
|
||||
.map_or(false, |file_id| {
|
||||
file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast())
|
||||
matches!(file_id.macro_file(), Some(file_id) if file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast()))
|
||||
})
|
||||
};
|
||||
|
||||
|
@ -128,6 +128,7 @@ pub use {
|
||||
hygiene::{marks_rev, SyntaxContextExt},
|
||||
name::{known, Name},
|
||||
tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId,
|
||||
MacroFileIdExt,
|
||||
},
|
||||
hir_ty::{
|
||||
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
|
||||
@ -607,7 +608,7 @@ impl Module {
|
||||
let tree = loc.id.item_tree(db.upcast());
|
||||
let node = &tree[loc.id.value];
|
||||
let file_id = loc.id.file_id();
|
||||
if file_id.is_builtin_derive(db.upcast()) {
|
||||
if file_id.macro_file().map_or(false, |it| it.is_builtin_derive(db.upcast())) {
|
||||
// these expansion come from us, diagnosing them is a waste of resources
|
||||
// FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow
|
||||
continue;
|
||||
|
@ -20,8 +20,8 @@ use hir_def::{
|
||||
AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
|
||||
};
|
||||
use hir_expand::{
|
||||
db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, HirFileIdExt, MacroCallId,
|
||||
MacroFileId, MacroFileIdExt,
|
||||
db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, MacroCallId, MacroFileId,
|
||||
MacroFileIdExt,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
@ -865,7 +865,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||
Some(parent) => Some(InFile::new(file_id, parent)),
|
||||
None => {
|
||||
self.cache(value.clone(), file_id);
|
||||
file_id.call_node(db)
|
||||
Some(file_id.macro_file()?.call_node(db))
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -29,7 +29,7 @@ use hir_expand::{
|
||||
mod_path::path,
|
||||
name,
|
||||
name::{AsName, Name},
|
||||
HirFileId, HirFileIdExt, InFile, MacroFileId, MacroFileIdExt,
|
||||
HirFileId, InFile, MacroFileId, MacroFileIdExt,
|
||||
};
|
||||
use hir_ty::{
|
||||
diagnostics::{
|
||||
@ -939,11 +939,12 @@ fn scope_for_offset(
|
||||
}
|
||||
|
||||
// FIXME handle attribute expansion
|
||||
let source = iter::successors(file_id.call_node(db.upcast()), |it| {
|
||||
it.file_id.call_node(db.upcast())
|
||||
})
|
||||
.find(|it| it.file_id == from_file)
|
||||
.filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
|
||||
let source =
|
||||
iter::successors(file_id.macro_file().map(|it| it.call_node(db.upcast())), |it| {
|
||||
Some(it.file_id.macro_file()?.call_node(db.upcast()))
|
||||
})
|
||||
.find(|it| it.file_id == from_file)
|
||||
.filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
|
||||
Some((source.value.text_range(), scope))
|
||||
})
|
||||
.filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
|
||||
|
@ -49,10 +49,6 @@ impl DeclarationLocation {
|
||||
let node = resolve_node(db, self.hir_file_id, &self.ptr);
|
||||
node.as_ref().original_file_range(db.upcast())
|
||||
}
|
||||
|
||||
pub fn original_name_range(&self, db: &dyn HirDatabase) -> FileRange {
|
||||
InFile::new(self.hir_file_id, self.name_ptr.text_range()).original_file_range(db.upcast())
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_node(
|
||||
|
@ -1,4 +1,4 @@
|
||||
use hir::{HirFileIdExt, InFile, ModuleDef};
|
||||
use hir::{InFile, MacroFileIdExt, ModuleDef};
|
||||
use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
|
||||
use itertools::Itertools;
|
||||
use syntax::{
|
||||
@ -43,12 +43,12 @@ pub(crate) fn replace_derive_with_manual_impl(
|
||||
) -> Option<()> {
|
||||
let attr = ctx.find_node_at_offset_with_descend::<ast::Attr>()?;
|
||||
let path = attr.path()?;
|
||||
let hir_file = ctx.sema.hir_file_for(attr.syntax());
|
||||
if !hir_file.is_derive_attr_pseudo_expansion(ctx.db()) {
|
||||
let macro_file = ctx.sema.hir_file_for(attr.syntax()).macro_file()?;
|
||||
if !macro_file.is_derive_attr_pseudo_expansion(ctx.db()) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let InFile { file_id, value } = hir_file.call_node(ctx.db())?;
|
||||
let InFile { file_id, value } = macro_file.call_node(ctx.db());
|
||||
if file_id.is_macro() {
|
||||
// FIXME: make this work in macro files
|
||||
return None;
|
||||
@ -56,7 +56,7 @@ pub(crate) fn replace_derive_with_manual_impl(
|
||||
// collect the derive paths from the #[derive] expansion
|
||||
let current_derives = ctx
|
||||
.sema
|
||||
.parse_or_expand(hir_file)
|
||||
.parse_or_expand(macro_file.into())
|
||||
.descendants()
|
||||
.filter_map(ast::Attr::cast)
|
||||
.filter_map(|attr| attr.path())
|
||||
|
@ -1,7 +1,10 @@
|
||||
//! Suggests shortening `Foo { field: field }` to `Foo { field }` in both
|
||||
//! expressions and patterns.
|
||||
|
||||
use ide_db::{base_db::FileId, source_change::SourceChange};
|
||||
use ide_db::{
|
||||
base_db::{FileId, FileRange},
|
||||
source_change::SourceChange,
|
||||
};
|
||||
use syntax::{ast, match_ast, AstNode, SyntaxNode};
|
||||
use text_edit::TextEdit;
|
||||
|
||||
@ -49,7 +52,7 @@ fn check_expr_field_shorthand(
|
||||
Diagnostic::new(
|
||||
DiagnosticCode::Clippy("redundant_field_names"),
|
||||
"Shorthand struct initialization",
|
||||
field_range,
|
||||
FileRange { file_id, range: field_range },
|
||||
)
|
||||
.with_fixes(Some(vec![fix(
|
||||
"use_expr_field_shorthand",
|
||||
@ -93,7 +96,7 @@ fn check_pat_field_shorthand(
|
||||
Diagnostic::new(
|
||||
DiagnosticCode::Clippy("redundant_field_names"),
|
||||
"Shorthand struct pattern",
|
||||
field_range,
|
||||
FileRange { file_id, range: field_range },
|
||||
)
|
||||
.with_fixes(Some(vec![fix(
|
||||
"use_pat_field_shorthand",
|
||||
|
@ -31,7 +31,7 @@ pub(crate) fn inactive_code(
|
||||
let res = Diagnostic::new(
|
||||
DiagnosticCode::Ra("inactive-code", Severity::WeakWarning),
|
||||
message,
|
||||
ctx.sema.diagnostics_display_range(d.node.clone()).range,
|
||||
ctx.sema.diagnostics_display_range(d.node.clone()),
|
||||
)
|
||||
.with_unused(true);
|
||||
Some(res)
|
||||
|
@ -8,7 +8,7 @@ pub(crate) fn invalid_derive_target(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::InvalidDeriveTarget,
|
||||
) -> Diagnostic {
|
||||
let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range;
|
||||
let display_range = ctx.sema.diagnostics_display_range(d.node.clone());
|
||||
|
||||
Diagnostic::new(
|
||||
DiagnosticCode::RustcHardError("E0774"),
|
||||
|
@ -3,7 +3,7 @@
|
||||
|
||||
use hir::{PathResolution, Semantics};
|
||||
use ide_db::{
|
||||
base_db::FileId,
|
||||
base_db::{FileId, FileRange},
|
||||
helpers::mod_path_to_ast,
|
||||
imports::insert_use::{insert_use, ImportScope},
|
||||
source_change::SourceChangeBuilder,
|
||||
@ -119,7 +119,7 @@ pub(crate) fn json_in_items(
|
||||
Diagnostic::new(
|
||||
DiagnosticCode::Ra("json-is-not-rust", Severity::WeakWarning),
|
||||
"JSON syntax is not valid as a Rust item",
|
||||
range,
|
||||
FileRange { file_id, range },
|
||||
)
|
||||
.with_fixes(Some(vec![{
|
||||
let mut scb = SourceChangeBuilder::new(file_id);
|
||||
|
@ -264,4 +264,24 @@ fn f() {
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn include_does_not_break_diagnostics() {
|
||||
let mut config = DiagnosticsConfig::test_sample();
|
||||
config.disabled.insert("inactive-code".to_string());
|
||||
config.disabled.insert("unlinked-file".to_string());
|
||||
check_diagnostics_with_config(
|
||||
config,
|
||||
r#"
|
||||
//- minicore: include
|
||||
//- /lib.rs crate:lib
|
||||
include!("include-me.rs");
|
||||
//- /include-me.rs
|
||||
/// long doc that pushes the diagnostic range beyond the first file's text length
|
||||
#[err]
|
||||
//^^^^^^error: unresolved macro `err`
|
||||
mod prim_never {}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ pub(crate) fn malformed_derive(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::MalformedDerive,
|
||||
) -> Diagnostic {
|
||||
let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range;
|
||||
let display_range = ctx.sema.diagnostics_display_range(d.node.clone());
|
||||
|
||||
Diagnostic::new(
|
||||
DiagnosticCode::RustcHardError("E0777"),
|
||||
|
@ -1,8 +1,9 @@
|
||||
use either::Either;
|
||||
use hir::InFile;
|
||||
use ide_db::base_db::FileRange;
|
||||
use syntax::{
|
||||
ast::{self, HasArgList},
|
||||
AstNode, SyntaxNodePtr, TextRange,
|
||||
AstNode, SyntaxNodePtr,
|
||||
};
|
||||
|
||||
use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext};
|
||||
@ -48,7 +49,7 @@ fn invalid_args_range(
|
||||
source: InFile<SyntaxNodePtr>,
|
||||
expected: usize,
|
||||
found: usize,
|
||||
) -> TextRange {
|
||||
) -> FileRange {
|
||||
adjusted_display_range::<Either<ast::Expr, ast::TupleStructPat>>(ctx, source, &|expr| {
|
||||
let (text_range, r_paren_token, expected_arg) = match expr {
|
||||
Either::Left(ast::Expr::CallExpr(call)) => {
|
||||
|
@ -35,14 +35,10 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch)
|
||||
Some(salient_token_range)
|
||||
},
|
||||
),
|
||||
pat => {
|
||||
ctx.sema
|
||||
.diagnostics_display_range(InFile {
|
||||
file_id: d.expr_or_pat.file_id,
|
||||
value: pat.syntax_node_ptr(),
|
||||
})
|
||||
.range
|
||||
}
|
||||
pat => ctx.sema.diagnostics_display_range(InFile {
|
||||
file_id: d.expr_or_pat.file_id,
|
||||
value: pat.syntax_node_ptr(),
|
||||
}),
|
||||
};
|
||||
let mut diag = Diagnostic::new(
|
||||
DiagnosticCode::RustcHardError("E0308"),
|
||||
@ -84,7 +80,7 @@ fn add_reference(
|
||||
expr_ptr: &InFile<AstPtr<ast::Expr>>,
|
||||
acc: &mut Vec<Assist>,
|
||||
) -> Option<()> {
|
||||
let range = ctx.sema.diagnostics_display_range(expr_ptr.clone().map(|it| it.into())).range;
|
||||
let range = ctx.sema.diagnostics_display_range(expr_ptr.clone().map(|it| it.into()));
|
||||
|
||||
let (_, mutability) = d.expected.as_reference()?;
|
||||
let actual_with_ref = Type::reference(&d.actual, mutability);
|
||||
@ -94,10 +90,9 @@ fn add_reference(
|
||||
|
||||
let ampersands = format!("&{}", mutability.as_keyword_for_ref());
|
||||
|
||||
let edit = TextEdit::insert(range.start(), ampersands);
|
||||
let source_change =
|
||||
SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), edit);
|
||||
acc.push(fix("add_reference_here", "Add reference here", source_change, range));
|
||||
let edit = TextEdit::insert(range.range.start(), ampersands);
|
||||
let source_change = SourceChange::from_text_edit(range.file_id, edit);
|
||||
acc.push(fix("add_reference_here", "Add reference here", source_change, range.range));
|
||||
Some(())
|
||||
}
|
||||
|
||||
|
@ -26,7 +26,7 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di
|
||||
)
|
||||
};
|
||||
|
||||
Diagnostic::new(DiagnosticCode::RustcHardError("typed-hole"), message, display_range.range)
|
||||
Diagnostic::new(DiagnosticCode::RustcHardError("typed-hole"), message, display_range)
|
||||
.with_fixes(fixes)
|
||||
}
|
||||
|
||||
|
@ -4,7 +4,7 @@ use std::iter;
|
||||
|
||||
use hir::{db::DefDatabase, DefMap, InFile, ModuleSource};
|
||||
use ide_db::{
|
||||
base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt},
|
||||
base_db::{FileId, FileLoader, FileRange, SourceDatabase, SourceDatabaseExt},
|
||||
source_change::SourceChange,
|
||||
RootDatabase,
|
||||
};
|
||||
@ -46,8 +46,12 @@ pub(crate) fn unlinked_file(
|
||||
.unwrap_or(range);
|
||||
|
||||
acc.push(
|
||||
Diagnostic::new(DiagnosticCode::Ra("unlinked-file", Severity::WeakWarning), message, range)
|
||||
.with_fixes(fixes),
|
||||
Diagnostic::new(
|
||||
DiagnosticCode::Ra("unlinked-file", Severity::WeakWarning),
|
||||
message,
|
||||
FileRange { file_id, range },
|
||||
)
|
||||
.with_fixes(fixes),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -87,7 +87,12 @@ mod baz {}
|
||||
"E0583",
|
||||
),
|
||||
message: "unresolved module, can't find module file: foo.rs, or foo/mod.rs",
|
||||
range: 0..8,
|
||||
range: FileRange {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
range: 0..8,
|
||||
},
|
||||
severity: Error,
|
||||
unused: false,
|
||||
experimental: false,
|
||||
|
@ -1,5 +1,8 @@
|
||||
use hir::InFile;
|
||||
use ide_db::{base_db::FileId, source_change::SourceChange};
|
||||
use ide_db::{
|
||||
base_db::{FileId, FileRange},
|
||||
source_change::SourceChange,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use syntax::{ast, AstNode, SyntaxNode};
|
||||
use text_edit::TextEdit;
|
||||
@ -38,7 +41,7 @@ pub(crate) fn useless_braces(
|
||||
Diagnostic::new(
|
||||
DiagnosticCode::RustcLint("unused_braces"),
|
||||
"Unnecessary braces in use statement".to_string(),
|
||||
use_range,
|
||||
FileRange { file_id, range: use_range },
|
||||
)
|
||||
.with_main_node(InFile::new(file_id.into(), node.clone()))
|
||||
.with_fixes(Some(vec![fix(
|
||||
|
@ -133,7 +133,7 @@ impl DiagnosticCode {
|
||||
pub struct Diagnostic {
|
||||
pub code: DiagnosticCode,
|
||||
pub message: String,
|
||||
pub range: TextRange,
|
||||
pub range: FileRange,
|
||||
pub severity: Severity,
|
||||
pub unused: bool,
|
||||
pub experimental: bool,
|
||||
@ -143,7 +143,7 @@ pub struct Diagnostic {
|
||||
}
|
||||
|
||||
impl Diagnostic {
|
||||
fn new(code: DiagnosticCode, message: impl Into<String>, range: TextRange) -> Diagnostic {
|
||||
fn new(code: DiagnosticCode, message: impl Into<String>, range: FileRange) -> Diagnostic {
|
||||
let message = message.into();
|
||||
Diagnostic {
|
||||
code,
|
||||
@ -172,7 +172,7 @@ impl Diagnostic {
|
||||
node: InFile<SyntaxNodePtr>,
|
||||
) -> Diagnostic {
|
||||
let file_id = node.file_id;
|
||||
Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node.clone()).range)
|
||||
Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node.clone()))
|
||||
.with_main_node(node.map(|x| x.to_node(&ctx.sema.parse_or_expand(file_id))))
|
||||
}
|
||||
|
||||
@ -267,7 +267,7 @@ impl DiagnosticsContext<'_> {
|
||||
&self,
|
||||
node: &InFile<SyntaxNodePtr>,
|
||||
precise_location: Option<TextRange>,
|
||||
) -> TextRange {
|
||||
) -> FileRange {
|
||||
let sema = &self.sema;
|
||||
(|| {
|
||||
let precise_location = precise_location?;
|
||||
@ -280,10 +280,11 @@ impl DiagnosticsContext<'_> {
|
||||
}
|
||||
})()
|
||||
.unwrap_or_else(|| sema.diagnostics_display_range(node.clone()))
|
||||
.range
|
||||
}
|
||||
}
|
||||
|
||||
/// Request diagnostics for the given [`FileId`]. The produced diagnostics may point to other files
|
||||
/// due to macros.
|
||||
pub fn diagnostics(
|
||||
db: &RootDatabase,
|
||||
config: &DiagnosticsConfig,
|
||||
@ -300,7 +301,7 @@ pub fn diagnostics(
|
||||
Diagnostic::new(
|
||||
DiagnosticCode::RustcHardError("syntax-error"),
|
||||
format!("Syntax Error: {err}"),
|
||||
err.range(),
|
||||
FileRange { file_id, range: err.range() },
|
||||
)
|
||||
}));
|
||||
|
||||
@ -569,12 +570,15 @@ fn adjusted_display_range<N: AstNode>(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
diag_ptr: InFile<SyntaxNodePtr>,
|
||||
adj: &dyn Fn(N) -> Option<TextRange>,
|
||||
) -> TextRange {
|
||||
) -> FileRange {
|
||||
let FileRange { file_id, range } = ctx.sema.diagnostics_display_range(diag_ptr);
|
||||
|
||||
let source_file = ctx.sema.db.parse(file_id);
|
||||
find_node_at_range::<N>(&source_file.syntax_node(), range)
|
||||
.filter(|it| it.syntax().text_range() == range)
|
||||
.and_then(adj)
|
||||
.unwrap_or(range)
|
||||
FileRange {
|
||||
file_id,
|
||||
range: find_node_at_range::<N>(&source_file.syntax_node(), range)
|
||||
.filter(|it| it.syntax().text_range() == range)
|
||||
.and_then(adj)
|
||||
.unwrap_or(range),
|
||||
}
|
||||
}
|
||||
|
@ -7,6 +7,7 @@ use ide_db::{
|
||||
base_db::{fixture::WithFixture, SourceDatabaseExt},
|
||||
LineIndexDatabase, RootDatabase,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use stdx::trim_indent;
|
||||
use test_utils::{assert_eq_text, extract_annotations, MiniCore};
|
||||
|
||||
@ -103,33 +104,39 @@ pub(crate) fn check_diagnostics(ra_fixture: &str) {
|
||||
#[track_caller]
|
||||
pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) {
|
||||
let (db, files) = RootDatabase::with_many_files(ra_fixture);
|
||||
let mut annotations = files
|
||||
.iter()
|
||||
.copied()
|
||||
.flat_map(|file_id| {
|
||||
super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id).into_iter().map(
|
||||
|d| {
|
||||
let mut annotation = String::new();
|
||||
if let Some(fixes) = &d.fixes {
|
||||
assert!(!fixes.is_empty());
|
||||
annotation.push_str("💡 ")
|
||||
}
|
||||
annotation.push_str(match d.severity {
|
||||
Severity::Error => "error",
|
||||
Severity::WeakWarning => "weak",
|
||||
Severity::Warning => "warn",
|
||||
Severity::Allow => "allow",
|
||||
});
|
||||
annotation.push_str(": ");
|
||||
annotation.push_str(&d.message);
|
||||
(d.range, annotation)
|
||||
},
|
||||
)
|
||||
})
|
||||
.map(|(diagnostic, annotation)| (diagnostic.file_id, (diagnostic.range, annotation)))
|
||||
.into_group_map();
|
||||
for file_id in files {
|
||||
let line_index = db.line_index(file_id);
|
||||
let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
|
||||
|
||||
let mut actual = annotations.remove(&file_id).unwrap_or_default();
|
||||
let expected = extract_annotations(&db.file_text(file_id));
|
||||
let mut actual = diagnostics
|
||||
.into_iter()
|
||||
.map(|d| {
|
||||
let mut annotation = String::new();
|
||||
if let Some(fixes) = &d.fixes {
|
||||
assert!(!fixes.is_empty());
|
||||
annotation.push_str("💡 ")
|
||||
}
|
||||
annotation.push_str(match d.severity {
|
||||
Severity::Error => "error",
|
||||
Severity::WeakWarning => "weak",
|
||||
Severity::Warning => "warn",
|
||||
Severity::Allow => "allow",
|
||||
});
|
||||
annotation.push_str(": ");
|
||||
annotation.push_str(&d.message);
|
||||
(d.range, annotation)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
actual.sort_by_key(|(range, _)| range.start());
|
||||
if expected.is_empty() {
|
||||
// makes minicore smoke test debugable
|
||||
// makes minicore smoke test debuggable
|
||||
for (e, _) in &actual {
|
||||
eprintln!(
|
||||
"Code in range {e:?} = {}",
|
||||
|
@ -1,4 +1,4 @@
|
||||
use hir::{DescendPreference, HirFileIdExt, InFile, Semantics};
|
||||
use hir::{DescendPreference, InFile, MacroFileIdExt, Semantics};
|
||||
use ide_db::{
|
||||
base_db::FileId, helpers::pick_best_token,
|
||||
syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase,
|
||||
@ -44,15 +44,15 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
|
||||
.descend_into_macros(DescendPreference::None, tok.clone())
|
||||
.into_iter()
|
||||
.find_map(|descended| {
|
||||
let hir_file = sema.hir_file_for(&descended.parent()?);
|
||||
if !hir_file.is_derive_attr_pseudo_expansion(db) {
|
||||
let macro_file = sema.hir_file_for(&descended.parent()?).macro_file()?;
|
||||
if !macro_file.is_derive_attr_pseudo_expansion(db) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string();
|
||||
// up map out of the #[derive] expansion
|
||||
let InFile { file_id, value: tokens } =
|
||||
hir::InFile::new(hir_file, descended).upmap_once(db)?;
|
||||
hir::InMacroFile::new(macro_file, descended).upmap_once(db);
|
||||
let token = sema.parse_or_expand(file_id).covering_element(tokens[0]).into_token()?;
|
||||
let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
|
||||
let expansions = sema.expand_derive_macro(&attr)?;
|
||||
|
@ -142,7 +142,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
|
||||
Definition::Function(it) => it.source(db).map(|src| src.file_id),
|
||||
_ => None,
|
||||
};
|
||||
if let Some(file_id) = file_id.filter(|file| file.call_node(db).is_some()) {
|
||||
if let Some(file_id) = file_id.filter(|file| file.macro_file().is_some()) {
|
||||
in_macro_expansion.entry(file_id).or_default().push(runnable);
|
||||
return;
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
//! Computes color for a single element.
|
||||
|
||||
use hir::{AsAssocItem, HasVisibility, HirFileIdExt, Semantics};
|
||||
use hir::{AsAssocItem, HasVisibility, MacroFileIdExt, Semantics};
|
||||
use ide_db::{
|
||||
defs::{Definition, IdentClass, NameClass, NameRefClass},
|
||||
FxHashMap, RootDatabase, SymbolKind,
|
||||
@ -218,7 +218,10 @@ fn highlight_name_ref(
|
||||
// We can fix this for derive attributes since derive helpers are recorded, but not for
|
||||
// general attributes.
|
||||
None if name_ref.syntax().ancestors().any(|it| it.kind() == ATTR)
|
||||
&& !sema.hir_file_for(name_ref.syntax()).is_derive_attr_pseudo_expansion(sema.db) =>
|
||||
&& !sema
|
||||
.hir_file_for(name_ref.syntax())
|
||||
.macro_file()
|
||||
.map_or(false, |it| it.is_derive_attr_pseudo_expansion(sema.db)) =>
|
||||
{
|
||||
return HlTag::Symbol(SymbolKind::Attribute).into();
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ use std::mem;
|
||||
|
||||
use ide::FileId;
|
||||
use ide_db::FxHashMap;
|
||||
use itertools::Itertools;
|
||||
use nohash_hasher::{IntMap, IntSet};
|
||||
use rustc_hash::FxHashSet;
|
||||
use triomphe::Arc;
|
||||
@ -129,8 +130,28 @@ pub(crate) fn fetch_native_diagnostics(
|
||||
) -> Vec<(FileId, Vec<lsp_types::Diagnostic>)> {
|
||||
let _p = profile::span("fetch_native_diagnostics");
|
||||
let _ctx = stdx::panic_context::enter("fetch_native_diagnostics".to_owned());
|
||||
subscriptions
|
||||
.into_iter()
|
||||
|
||||
let convert_diagnostic =
|
||||
|line_index: &crate::line_index::LineIndex, d: ide::Diagnostic| lsp_types::Diagnostic {
|
||||
range: lsp::to_proto::range(&line_index, d.range.range),
|
||||
severity: Some(lsp::to_proto::diagnostic_severity(d.severity)),
|
||||
code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_string())),
|
||||
code_description: Some(lsp_types::CodeDescription {
|
||||
href: lsp_types::Url::parse(&d.code.url()).unwrap(),
|
||||
}),
|
||||
source: Some("rust-analyzer".to_string()),
|
||||
message: d.message,
|
||||
related_information: None,
|
||||
tags: d.unused.then(|| vec![lsp_types::DiagnosticTag::UNNECESSARY]),
|
||||
data: None,
|
||||
};
|
||||
|
||||
// the diagnostics produced may point to different files not requested by the concrete request,
|
||||
// put those into here and filter later
|
||||
let mut odd_ones = Vec::new();
|
||||
let mut diagnostics = subscriptions
|
||||
.iter()
|
||||
.copied()
|
||||
.filter_map(|file_id| {
|
||||
let line_index = snapshot.file_line_index(file_id).ok()?;
|
||||
let diagnostics = snapshot
|
||||
@ -142,21 +163,39 @@ pub(crate) fn fetch_native_diagnostics(
|
||||
)
|
||||
.ok()?
|
||||
.into_iter()
|
||||
.map(move |d| lsp_types::Diagnostic {
|
||||
range: lsp::to_proto::range(&line_index, d.range),
|
||||
severity: Some(lsp::to_proto::diagnostic_severity(d.severity)),
|
||||
code: Some(lsp_types::NumberOrString::String(d.code.as_str().to_string())),
|
||||
code_description: Some(lsp_types::CodeDescription {
|
||||
href: lsp_types::Url::parse(&d.code.url()).unwrap(),
|
||||
}),
|
||||
source: Some("rust-analyzer".to_string()),
|
||||
message: d.message,
|
||||
related_information: None,
|
||||
tags: d.unused.then(|| vec![lsp_types::DiagnosticTag::UNNECESSARY]),
|
||||
data: None,
|
||||
.filter_map(|d| {
|
||||
if d.range.file_id == file_id {
|
||||
Some(convert_diagnostic(&line_index, d))
|
||||
} else {
|
||||
odd_ones.push(d);
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Some((file_id, diagnostics))
|
||||
})
|
||||
.collect()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Add back any diagnostics that point to files we are subscribed to
|
||||
for (file_id, group) in odd_ones
|
||||
.into_iter()
|
||||
.sorted_by_key(|it| it.range.file_id)
|
||||
.group_by(|it| it.range.file_id)
|
||||
.into_iter()
|
||||
{
|
||||
if !subscriptions.contains(&file_id) {
|
||||
continue;
|
||||
}
|
||||
let Some((_, diagnostics)) = diagnostics.iter_mut().find(|&&mut (id, _)| id == file_id)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let Some(line_index) = snapshot.file_line_index(file_id).ok() else {
|
||||
break;
|
||||
};
|
||||
for diagnostic in group {
|
||||
diagnostics.push(convert_diagnostic(&line_index, diagnostic));
|
||||
}
|
||||
}
|
||||
diagnostics
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user