mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 11:31:15 +00:00
simplify
This commit is contained in:
parent
411eee7614
commit
c9f0f47bbb
@ -3,9 +3,11 @@
|
|||||||
//! This module uses a bit of static metadata to provide completions
|
//! This module uses a bit of static metadata to provide completions
|
||||||
//! for built-in attributes.
|
//! for built-in attributes.
|
||||||
|
|
||||||
|
use std::mem;
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use syntax::{ast, AstNode, SyntaxKind, T};
|
use syntax::{ast, AstNode, NodeOrToken, SyntaxKind, T};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
context::CompletionContext,
|
context::CompletionContext,
|
||||||
@ -105,23 +107,32 @@ const fn attr(
|
|||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! attrs {
|
macro_rules! attrs {
|
||||||
|
// attributes applicable to all items
|
||||||
[@ { item $($tt:tt)* } {$($acc:tt)*}] => {
|
[@ { item $($tt:tt)* } {$($acc:tt)*}] => {
|
||||||
attrs!(@ { $($tt)* } { $($acc)*, "deprecated", "doc", "dochidden", "docalias", "must_use", "no_mangle" })
|
attrs!(@ { $($tt)* } { $($acc)*, "deprecated", "doc", "dochidden", "docalias", "must_use", "no_mangle" })
|
||||||
};
|
};
|
||||||
|
// attributes applicable to all adts
|
||||||
[@ { adt $($tt:tt)* } {$($acc:tt)*}] => {
|
[@ { adt $($tt:tt)* } {$($acc:tt)*}] => {
|
||||||
attrs!(@ { $($tt)* } { $($acc)*, "derive", "repr" })
|
attrs!(@ { $($tt)* } { $($acc)*, "derive", "repr" })
|
||||||
};
|
};
|
||||||
|
// attributes applicable to all linkable things aka functions/statics
|
||||||
[@ { linkable $($tt:tt)* } {$($acc:tt)*}] => {
|
[@ { linkable $($tt:tt)* } {$($acc:tt)*}] => {
|
||||||
attrs!(@ { $($tt)* } { $($acc)*, "export_name", "link_name", "link_section" }) };
|
attrs!(@ { $($tt)* } { $($acc)*, "export_name", "link_name", "link_section" })
|
||||||
[@ { $ty:ident $($tt:tt)* } {$($acc:tt)*}] => { compile_error!(concat!("unknown attr subtype ", stringify!($ty)))
|
|
||||||
};
|
};
|
||||||
|
// error fallback for nicer error message
|
||||||
|
[@ { $ty:ident $($tt:tt)* } {$($acc:tt)*}] => {
|
||||||
|
compile_error!(concat!("unknown attr subtype ", stringify!($ty)))
|
||||||
|
};
|
||||||
|
// general push down accumulation
|
||||||
[@ { $lit:literal $($tt:tt)*} {$($acc:tt)*}] => {
|
[@ { $lit:literal $($tt:tt)*} {$($acc:tt)*}] => {
|
||||||
attrs!(@ { $($tt)* } { $($acc)*, $lit })
|
attrs!(@ { $($tt)* } { $($acc)*, $lit })
|
||||||
};
|
};
|
||||||
[@ {$($tt:tt)+} {$($tt2:tt)*}] => {
|
[@ {$($tt:tt)+} {$($tt2:tt)*}] => {
|
||||||
compile_error!(concat!("Unexpected input ", stringify!($($tt)+)))
|
compile_error!(concat!("Unexpected input ", stringify!($($tt)+)))
|
||||||
};
|
};
|
||||||
|
// final output construction
|
||||||
[@ {} {$($tt:tt)*}] => { &[$($tt)*] as _ };
|
[@ {} {$($tt:tt)*}] => { &[$($tt)*] as _ };
|
||||||
|
// starting matcher
|
||||||
[$($tt:tt),*] => {
|
[$($tt:tt),*] => {
|
||||||
attrs!(@ { $($tt)* } { "allow", "cfg", "cfg_attr", "deny", "forbid", "warn" })
|
attrs!(@ { $($tt)* } { "allow", "cfg", "cfg_attr", "deny", "forbid", "warn" })
|
||||||
};
|
};
|
||||||
@ -129,28 +140,29 @@ macro_rules! attrs {
|
|||||||
|
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
static KIND_TO_ATTRIBUTES: Lazy<FxHashMap<SyntaxKind, &[&str]>> = Lazy::new(|| {
|
static KIND_TO_ATTRIBUTES: Lazy<FxHashMap<SyntaxKind, &[&str]>> = Lazy::new(|| {
|
||||||
|
use SyntaxKind::*;
|
||||||
std::array::IntoIter::new([
|
std::array::IntoIter::new([
|
||||||
(
|
(
|
||||||
SyntaxKind::SOURCE_FILE,
|
SOURCE_FILE,
|
||||||
attrs!(
|
attrs!(
|
||||||
item,
|
item,
|
||||||
"crate_name", "feature", "no_implicit_prelude", "no_main", "no_std",
|
"crate_name", "feature", "no_implicit_prelude", "no_main", "no_std",
|
||||||
"recursion_limit", "type_length_limit", "windows_subsystem"
|
"recursion_limit", "type_length_limit", "windows_subsystem"
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
(SyntaxKind::MODULE, attrs!(item, "no_implicit_prelude", "path")),
|
(MODULE, attrs!(item, "no_implicit_prelude", "path")),
|
||||||
(SyntaxKind::ITEM_LIST, attrs!(item, "no_implicit_prelude")),
|
(ITEM_LIST, attrs!(item, "no_implicit_prelude")),
|
||||||
(SyntaxKind::MACRO_RULES, attrs!(item, "macro_export", "macro_use")),
|
(MACRO_RULES, attrs!(item, "macro_export", "macro_use")),
|
||||||
(SyntaxKind::MACRO_DEF, attrs!(item)),
|
(MACRO_DEF, attrs!(item)),
|
||||||
(SyntaxKind::EXTERN_CRATE, attrs!(item, "macro_use", "no_link")),
|
(EXTERN_CRATE, attrs!(item, "macro_use", "no_link")),
|
||||||
(SyntaxKind::USE, attrs!(item)),
|
(USE, attrs!(item)),
|
||||||
(SyntaxKind::TYPE_ALIAS, attrs!(item)),
|
(TYPE_ALIAS, attrs!(item)),
|
||||||
(SyntaxKind::STRUCT, attrs!(item, adt, "non_exhaustive")),
|
(STRUCT, attrs!(item, adt, "non_exhaustive")),
|
||||||
(SyntaxKind::ENUM, attrs!(item, adt, "non_exhaustive")),
|
(ENUM, attrs!(item, adt, "non_exhaustive")),
|
||||||
(SyntaxKind::UNION, attrs!(item, adt)),
|
(UNION, attrs!(item, adt)),
|
||||||
(SyntaxKind::CONST, attrs!(item)),
|
(CONST, attrs!(item)),
|
||||||
(
|
(
|
||||||
SyntaxKind::FN,
|
FN,
|
||||||
attrs!(
|
attrs!(
|
||||||
item, linkable,
|
item, linkable,
|
||||||
"cold", "ignore", "inline", "must_use", "panic_handler", "proc_macro",
|
"cold", "ignore", "inline", "must_use", "panic_handler", "proc_macro",
|
||||||
@ -158,29 +170,29 @@ static KIND_TO_ATTRIBUTES: Lazy<FxHashMap<SyntaxKind, &[&str]>> = Lazy::new(|| {
|
|||||||
"test", "track_caller"
|
"test", "track_caller"
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
(SyntaxKind::STATIC, attrs!(item, linkable, "global_allocator", "used")),
|
(STATIC, attrs!(item, linkable, "global_allocator", "used")),
|
||||||
(SyntaxKind::TRAIT, attrs!(item, "must_use")),
|
(TRAIT, attrs!(item, "must_use")),
|
||||||
(SyntaxKind::IMPL, attrs!(item, "automatically_derived")),
|
(IMPL, attrs!(item, "automatically_derived")),
|
||||||
(SyntaxKind::ASSOC_ITEM_LIST, attrs!(item)),
|
(ASSOC_ITEM_LIST, attrs!(item)),
|
||||||
(SyntaxKind::EXTERN_BLOCK, attrs!(item, "link")),
|
(EXTERN_BLOCK, attrs!(item, "link")),
|
||||||
(SyntaxKind::EXTERN_ITEM_LIST, attrs!(item, "link")),
|
(EXTERN_ITEM_LIST, attrs!(item, "link")),
|
||||||
(SyntaxKind::MACRO_CALL, attrs!()),
|
(MACRO_CALL, attrs!()),
|
||||||
(SyntaxKind::SELF_PARAM, attrs!()),
|
(SELF_PARAM, attrs!()),
|
||||||
(SyntaxKind::PARAM, attrs!()),
|
(PARAM, attrs!()),
|
||||||
(SyntaxKind::RECORD_FIELD, attrs!()),
|
(RECORD_FIELD, attrs!()),
|
||||||
(SyntaxKind::VARIANT, attrs!("non_exhaustive")),
|
(VARIANT, attrs!("non_exhaustive")),
|
||||||
(SyntaxKind::TYPE_PARAM, attrs!()),
|
(TYPE_PARAM, attrs!()),
|
||||||
(SyntaxKind::CONST_PARAM, attrs!()),
|
(CONST_PARAM, attrs!()),
|
||||||
(SyntaxKind::LIFETIME_PARAM, attrs!()),
|
(LIFETIME_PARAM, attrs!()),
|
||||||
(SyntaxKind::LET_STMT, attrs!()),
|
(LET_STMT, attrs!()),
|
||||||
(SyntaxKind::EXPR_STMT, attrs!()),
|
(EXPR_STMT, attrs!()),
|
||||||
(SyntaxKind::LITERAL, attrs!()),
|
(LITERAL, attrs!()),
|
||||||
(SyntaxKind::RECORD_EXPR_FIELD_LIST, attrs!()),
|
(RECORD_EXPR_FIELD_LIST, attrs!()),
|
||||||
(SyntaxKind::RECORD_EXPR_FIELD, attrs!()),
|
(RECORD_EXPR_FIELD, attrs!()),
|
||||||
(SyntaxKind::MATCH_ARM_LIST, attrs!()),
|
(MATCH_ARM_LIST, attrs!()),
|
||||||
(SyntaxKind::MATCH_ARM, attrs!()),
|
(MATCH_ARM, attrs!()),
|
||||||
(SyntaxKind::IDENT_PAT, attrs!()),
|
(IDENT_PAT, attrs!()),
|
||||||
(SyntaxKind::RECORD_PAT_FIELD, attrs!()),
|
(RECORD_PAT_FIELD, attrs!()),
|
||||||
])
|
])
|
||||||
.collect()
|
.collect()
|
||||||
});
|
});
|
||||||
@ -257,62 +269,57 @@ const ATTRIBUTES: &[AttrCompletion] = &[
|
|||||||
.prefer_inner(),
|
.prefer_inner(),
|
||||||
];
|
];
|
||||||
|
|
||||||
#[test]
|
fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Option<FxHashSet<String>> {
|
||||||
fn attributes_are_sorted() {
|
let (l_paren, r_paren) = derive_input.l_paren_token().zip(derive_input.r_paren_token())?;
|
||||||
let mut attrs = ATTRIBUTES.iter().map(|attr| attr.key());
|
let mut input_derives = FxHashSet::default();
|
||||||
let mut prev = attrs.next().unwrap();
|
let mut current_derive = String::new();
|
||||||
|
for token in derive_input
|
||||||
attrs.for_each(|next| {
|
.syntax()
|
||||||
assert!(
|
.children_with_tokens()
|
||||||
prev < next,
|
.filter_map(NodeOrToken::into_token)
|
||||||
r#"Attributes are not sorted, "{}" should come after "{}""#,
|
.skip_while(|token| token != &l_paren)
|
||||||
prev,
|
.skip(1)
|
||||||
next
|
.take_while(|token| token != &r_paren)
|
||||||
);
|
{
|
||||||
prev = next;
|
if token.kind() == T![,] {
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Result<FxHashSet<String>, ()> {
|
|
||||||
match (derive_input.left_delimiter_token(), derive_input.right_delimiter_token()) {
|
|
||||||
(Some(left_paren), Some(right_paren))
|
|
||||||
if left_paren.kind() == T!['('] && right_paren.kind() == T![')'] =>
|
|
||||||
{
|
|
||||||
let mut input_derives = FxHashSet::default();
|
|
||||||
let mut current_derive = String::new();
|
|
||||||
for token in derive_input
|
|
||||||
.syntax()
|
|
||||||
.children_with_tokens()
|
|
||||||
.filter_map(|token| token.into_token())
|
|
||||||
.skip_while(|token| token != &left_paren)
|
|
||||||
.skip(1)
|
|
||||||
.take_while(|token| token != &right_paren)
|
|
||||||
{
|
|
||||||
if T![,] == token.kind() {
|
|
||||||
if !current_derive.is_empty() {
|
|
||||||
input_derives.insert(current_derive);
|
|
||||||
current_derive = String::new();
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
current_derive.push_str(token.text().trim());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !current_derive.is_empty() {
|
if !current_derive.is_empty() {
|
||||||
input_derives.insert(current_derive);
|
input_derives.insert(mem::take(&mut current_derive));
|
||||||
}
|
}
|
||||||
Ok(input_derives)
|
} else {
|
||||||
|
current_derive.push_str(token.text().trim());
|
||||||
}
|
}
|
||||||
_ => Err(()),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !current_derive.is_empty() {
|
||||||
|
input_derives.insert(current_derive);
|
||||||
|
}
|
||||||
|
Some(input_derives)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
|
|
||||||
use crate::{test_utils::completion_list, CompletionKind};
|
use crate::{test_utils::completion_list, CompletionKind};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn attributes_are_sorted() {
|
||||||
|
let mut attrs = ATTRIBUTES.iter().map(|attr| attr.key());
|
||||||
|
let mut prev = attrs.next().unwrap();
|
||||||
|
|
||||||
|
attrs.for_each(|next| {
|
||||||
|
assert!(
|
||||||
|
prev < next,
|
||||||
|
r#"ATTRIBUTES array is not sorted, "{}" should come after "{}""#,
|
||||||
|
prev,
|
||||||
|
next
|
||||||
|
);
|
||||||
|
prev = next;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
fn check(ra_fixture: &str, expect: Expect) {
|
fn check(ra_fixture: &str, expect: Expect) {
|
||||||
let actual = completion_list(ra_fixture, CompletionKind::Attribute);
|
let actual = completion_list(ra_fixture, CompletionKind::Attribute);
|
||||||
expect.assert_eq(&actual);
|
expect.assert_eq(&actual);
|
||||||
|
@ -14,7 +14,7 @@ pub(super) fn complete_derive(
|
|||||||
ctx: &CompletionContext,
|
ctx: &CompletionContext,
|
||||||
derive_input: ast::TokenTree,
|
derive_input: ast::TokenTree,
|
||||||
) {
|
) {
|
||||||
if let Ok(existing_derives) = super::parse_comma_sep_input(derive_input) {
|
if let Some(existing_derives) = super::parse_comma_sep_input(derive_input) {
|
||||||
for derive_completion in DEFAULT_DERIVE_COMPLETIONS
|
for derive_completion in DEFAULT_DERIVE_COMPLETIONS
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|completion| !existing_derives.contains(completion.label))
|
.filter(|completion| !existing_derives.contains(completion.label))
|
||||||
|
@ -13,7 +13,7 @@ pub(super) fn complete_lint(
|
|||||||
derive_input: ast::TokenTree,
|
derive_input: ast::TokenTree,
|
||||||
lints_completions: &[LintCompletion],
|
lints_completions: &[LintCompletion],
|
||||||
) {
|
) {
|
||||||
if let Ok(existing_lints) = super::parse_comma_sep_input(derive_input) {
|
if let Some(existing_lints) = super::parse_comma_sep_input(derive_input) {
|
||||||
for lint_completion in lints_completions
|
for lint_completion in lints_completions
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|completion| !existing_lints.contains(completion.label))
|
.filter(|completion| !existing_lints.contains(completion.label))
|
||||||
|
Loading…
x
Reference in New Issue
Block a user