10956: minor: Bump deps r=Veykril a=lnicola

bors r+

10986: fix: Fix lint completions not working for unclosed attributes r=Veykril a=Veykril

Fixes #10682
Uses keywords and nested `TokenTree`s as a heuristic to figure out when to stop parsing in case the attribute is unclosed which should work pretty well as attributes are usually followed by either of those.
bors r+

Co-authored-by: Laurențiu Nicola <lnicola@dend.ro>
Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
This commit is contained in:
bors[bot] 2021-12-11 14:58:07 +00:00 committed by GitHub
commit 48cd733c9e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 49 additions and 36 deletions

16
Cargo.lock generated
View File

@ -759,9 +759,9 @@ dependencies = [
[[package]] [[package]]
name = "itertools" name = "itertools"
version = "0.10.1" version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf" checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
dependencies = [ dependencies = [
"either", "either",
] ]
@ -810,9 +810,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.108" version = "0.2.109"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8521a1b57e76b1ec69af7599e75e38e7b7fad6610f037db8c79b127201b5d119" checksum = "f98a04dce437184842841303488f70d0188c5f51437d2a834dc097eafa909a01"
[[package]] [[package]]
name = "libloading" name = "libloading"
@ -1128,9 +1128,9 @@ checksum = "8d31d11c69a6b52a174b42bdc0c30e5e11670f90788b2c471c31c1d17d449443"
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.32" version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba508cc11742c0dc5c1659771673afbab7a0efab23aa17e854cbab0837ed0b43" checksum = "fb37d2df5df740e582f28f8560cf425f52bb267d872fe58358eadb554909f07a"
dependencies = [ dependencies = [
"unicode-xid", "unicode-xid",
] ]
@ -1227,9 +1227,9 @@ dependencies = [
[[package]] [[package]]
name = "pulldown-cmark-to-cmark" name = "pulldown-cmark-to-cmark"
version = "7.1.0" version = "7.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d85e607de0249c2b5041e38de8684b00b62a64edee60bfcd85c153031a9d658" checksum = "8bc14bc6b9f5881f240f9766414707ab24f972bcc3388589fe15b22fb15dc60d"
dependencies = [ dependencies = [
"pulldown-cmark", "pulldown-cmark",
] ]

View File

@ -480,7 +480,7 @@ impl FunctionBody {
.statements() .statements()
.map(|stmt| stmt.syntax().text_range()) .map(|stmt| stmt.syntax().text_range())
.filter(|&stmt| selected.intersect(stmt).filter(|it| !it.is_empty()).is_some()) .filter(|&stmt| selected.intersect(stmt).filter(|it| !it.is_empty()).is_some())
.fold1(|acc, stmt| acc.cover(stmt)); .reduce(|acc, stmt| acc.cover(stmt));
if let Some(tail_range) = parent if let Some(tail_range) = parent
.tail_expr() .tail_expr()
.map(|it| it.syntax().text_range()) .map(|it| it.syntax().text_range())

View File

@ -1,4 +1,3 @@
use itertools::Itertools;
use syntax::{ use syntax::{
ast::{self, make, HasVisibility}, ast::{self, make, HasVisibility},
ted::{self, Position}, ted::{self, Position},
@ -70,7 +69,7 @@ fn resolve_full_path(tree: &ast::UseTree) -> Option<ast::Path> {
.filter_map(ast::UseTree::cast) .filter_map(ast::UseTree::cast)
.filter_map(|t| t.path()); .filter_map(|t| t.path());
let final_path = paths.fold1(|prev, next| make::path_concat(next, prev))?; let final_path = paths.reduce(|prev, next| make::path_concat(next, prev))?;
if final_path.segment().map_or(false, |it| it.self_token().is_some()) { if final_path.segment().map_or(false, |it| it.self_token().is_some()) {
final_path.qualifier() final_path.qualifier()
} else { } else {

View File

@ -30,16 +30,12 @@ pub(crate) fn complete_attribute(acc: &mut Completions, ctx: &CompletionContext)
None => None, None => None,
}; };
match (name_ref, attribute.token_tree()) { match (name_ref, attribute.token_tree()) {
(Some(path), Some(token_tree)) => match path.text().as_str() { (Some(path), Some(tt)) if tt.l_paren_token().is_some() => match path.text().as_str() {
"repr" => repr::complete_repr(acc, ctx, token_tree), "repr" => repr::complete_repr(acc, ctx, tt),
"derive" => { "derive" => derive::complete_derive(acc, ctx, &parse_tt_as_comma_sep_paths(tt)?),
derive::complete_derive(acc, ctx, &parse_tt_as_comma_sep_paths(token_tree)?) "feature" => lint::complete_lint(acc, ctx, &parse_tt_as_comma_sep_paths(tt)?, FEATURES),
}
"feature" => {
lint::complete_lint(acc, ctx, &parse_tt_as_comma_sep_paths(token_tree)?, FEATURES)
}
"allow" | "warn" | "deny" | "forbid" => { "allow" | "warn" | "deny" | "forbid" => {
let existing_lints = parse_tt_as_comma_sep_paths(token_tree)?; let existing_lints = parse_tt_as_comma_sep_paths(tt)?;
lint::complete_lint(acc, ctx, &existing_lints, DEFAULT_LINTS); lint::complete_lint(acc, ctx, &existing_lints, DEFAULT_LINTS);
lint::complete_lint(acc, ctx, &existing_lints, CLIPPY_LINTS); lint::complete_lint(acc, ctx, &existing_lints, CLIPPY_LINTS);
lint::complete_lint(acc, ctx, &existing_lints, RUSTDOC_LINTS); lint::complete_lint(acc, ctx, &existing_lints, RUSTDOC_LINTS);
@ -49,8 +45,8 @@ pub(crate) fn complete_attribute(acc: &mut Completions, ctx: &CompletionContext)
} }
_ => (), _ => (),
}, },
(None, Some(_)) => (), (_, Some(_)) => (),
_ => complete_new_attribute(acc, ctx, attribute), (_, None) => complete_new_attribute(acc, ctx, attribute),
} }
Some(()) Some(())
} }

View File

@ -831,6 +831,20 @@ mod lint {
r#"#[allow(rustdoc::bare_urls)] struct Test;"#, r#"#[allow(rustdoc::bare_urls)] struct Test;"#,
); );
} }
#[test]
fn lint_unclosed() {
check_edit(
"deprecated",
r#"#[allow(dep$0 struct Test;"#,
r#"#[allow(deprecated struct Test;"#,
);
check_edit(
"bare_urls",
r#"#[allow(rustdoc::$0 struct Test;"#,
r#"#[allow(rustdoc::bare_urls struct Test;"#,
);
}
} }
mod repr { mod repr {

View File

@ -305,20 +305,24 @@ pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool {
} }
} }
/// Parses the input token tree as comma separated paths. /// Parses the input token tree as comma separated plain paths.
pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Path>> { pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Path>> {
let r_paren = input.r_paren_token()?; let r_paren = input.r_paren_token();
let tokens = input let tokens =
.syntax() input.syntax().children_with_tokens().skip(1).map_while(|it| match it.into_token() {
.children_with_tokens() // seeing a keyword means the attribute is unclosed so stop parsing here
.skip(1) Some(tok) if tok.kind().is_keyword() => None,
.take_while(|it| it.as_token() != Some(&r_paren)); // don't include the right token tree parenthesis if it exists
tok @ Some(_) if tok == r_paren => None,
// only nodes that we can find are other TokenTrees, those are unexpected in this parse though
None => None,
Some(tok) => Some(tok),
});
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]); let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
Some( let paths = input_expressions
input_expressions .into_iter()
.into_iter() .filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.filter_map(|(is_sep, group)| (!is_sep).then(|| group)) .filter_map(|mut tokens| ast::Path::parse(&tokens.join("")).ok())
.filter_map(|mut tokens| ast::Path::parse(&tokens.join("")).ok()) .collect();
.collect::<Vec<ast::Path>>(), Some(paths)
)
} }