Auto merge of #135789 - matthiaskrgr:rollup-4cvw8s4, r=matthiaskrgr

Rollup of 7 pull requests

Successful merges:

 - #133695 (Reexport likely/unlikely in std::hint)
 - #135330 (Respect --sysroot for rustc -vV and -Cpasses=list)
 - #135333 (Partial progress on #132735: Replace extern "rust-intrinsic" with #[rustc_intrinsic] across the codebase)
 - #135741 (Recognise new IPv6 documentation range from IETF RFC 9637)
 - #135770 (Update contributing docs for submodule/subtree changes)
 - #135775 (Subtree update of `rust-analyzer`)
 - #135776 (Subtree sync for rustc_codegen_cranelift)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-01-20 20:41:33 +00:00
commit 1f947830b4
286 changed files with 9101 additions and 2933 deletions

102
Cargo.lock generated
View File

@ -98,9 +98,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.6.0"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
checksum = "1be3f42a67d6d345ecd59f675f3f012d6974981560836e938c22b424b85ce1be"
[[package]]
name = "borsh"
@ -194,9 +194,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "chalk-derive"
version = "0.98.0"
version = "0.99.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9426c8fd0fe61c3da880b801d3b510524df17843a8f9ec1f5b9cec24fb7412df"
checksum = "572583d9b97f9d277e5c7607f8239a30e2e04d3ed3b47c87d1cb2152ae724073"
dependencies = [
"proc-macro2",
"quote",
@ -206,19 +206,19 @@ dependencies = [
[[package]]
name = "chalk-ir"
version = "0.98.0"
version = "0.99.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5f2eb1cd6054da221bd1ac0197fb2fe5e2caf3dcb93619398fc1433f8f09093"
checksum = "e60e0ef9c81dce1336a9ed3c76f08775f5b623151d96d85ba45f7b10de76d1c7"
dependencies = [
"bitflags 2.6.0",
"bitflags 2.7.0",
"chalk-derive",
]
[[package]]
name = "chalk-recursive"
version = "0.98.0"
version = "0.99.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "129dc03458f71cfb9c3cd621c9c68166a94e87b85b16ccd29af015d7ff9a1c61"
checksum = "5a06350d614e22b03a69b8105e3541614450a7ea48bc58ecc6c6bd92731a3995"
dependencies = [
"chalk-derive",
"chalk-ir",
@ -229,9 +229,9 @@ dependencies = [
[[package]]
name = "chalk-solve"
version = "0.98.0"
version = "0.99.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7e8a8c1e928f98cdf227b868416ef21dcd8cc3c61b347576d783713444d41c8"
checksum = "0e428761e9b55bee516bfe2457caed8b6d1b86353f92ae825bbe438a36ce91e8"
dependencies = [
"chalk-derive",
"chalk-ir",
@ -523,6 +523,7 @@ dependencies = [
"hir-def",
"hir-expand",
"hir-ty",
"indexmap",
"intern",
"itertools",
"rustc-hash 2.0.0",
@ -544,7 +545,7 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
"bitflags 2.6.0",
"bitflags 2.7.0",
"cfg",
"cov-mark",
"dashmap",
@ -610,7 +611,7 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
"bitflags 2.6.0",
"bitflags 2.7.0",
"chalk-derive",
"chalk-ir",
"chalk-recursive",
@ -734,7 +735,7 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"base-db",
"bitflags 2.6.0",
"bitflags 2.7.0",
"cov-mark",
"crossbeam-channel",
"either",
@ -820,11 +821,11 @@ dependencies = [
[[package]]
name = "inotify"
version = "0.9.6"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff"
checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3"
dependencies = [
"bitflags 1.3.2",
"bitflags 2.7.0",
"inotify-sys",
"libc",
]
@ -908,9 +909,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "libc"
version = "0.2.155"
version = "0.2.169"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
[[package]]
name = "libloading"
@ -938,7 +939,7 @@ version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
dependencies = [
"bitflags 2.6.0",
"bitflags 2.7.0",
"libc",
"redox_syscall",
]
@ -1117,14 +1118,14 @@ dependencies = [
[[package]]
name = "mio"
version = "0.8.11"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd"
dependencies = [
"libc",
"log",
"wasi",
"windows-sys 0.48.0",
"windows-sys 0.52.0",
]
[[package]]
@ -1142,7 +1143,7 @@ version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
dependencies = [
"bitflags 2.6.0",
"bitflags 2.7.0",
"cfg-if",
"cfg_aliases 0.1.1",
"libc",
@ -1156,12 +1157,11 @@ checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451"
[[package]]
name = "notify"
version = "6.1.1"
version = "8.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d"
checksum = "2fee8403b3d66ac7b26aee6e40a897d85dc5ce26f44da36b8b73e987cc52e943"
dependencies = [
"bitflags 2.6.0",
"crossbeam-channel",
"bitflags 2.7.0",
"filetime",
"fsevent-sys",
"inotify",
@ -1169,10 +1169,17 @@ dependencies = [
"libc",
"log",
"mio",
"notify-types",
"walkdir",
"windows-sys 0.48.0",
"windows-sys 0.59.0",
]
[[package]]
name = "notify-types"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d"
[[package]]
name = "nu-ansi-term"
version = "0.50.1"
@ -1371,6 +1378,7 @@ version = "0.0.0"
dependencies = [
"expect-test",
"intern",
"libc",
"libloading",
"memmap2",
"object 0.33.0",
@ -1428,7 +1436,7 @@ dependencies = [
"libc",
"perf-event",
"tikv-jemalloc-ctl",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@ -1482,7 +1490,7 @@ version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b"
dependencies = [
"bitflags 2.6.0",
"bitflags 2.7.0",
"memchr",
"unicase",
]
@ -1507,20 +1515,20 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_abi"
version = "0.87.0"
version = "0.91.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28b782af0a7a8df16ddf43cd70da9f17bc3b1ce712c9e4992b6edb16f5f53632"
checksum = "d5246e9e1f450333a990877eabbc36fe0567e7cedd56d5365db319e14079cf2a"
dependencies = [
"bitflags 2.6.0",
"bitflags 2.7.0",
"ra-ap-rustc_index",
"tracing",
]
[[package]]
name = "ra-ap-rustc_index"
version = "0.87.0"
version = "0.91.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce5742f134960482f543b35ecebec3cacc6d79a9a685713518b4d8d70c5f9aa8"
checksum = "59fd8e4f5b34c434ec111efb0e0614954db048b9307d3b2e4cc3c915da9d2160"
dependencies = [
"ra-ap-rustc_index_macros",
"smallvec",
@ -1528,9 +1536,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index_macros"
version = "0.87.0"
version = "0.91.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7ea011fcf68309a8835ad01d91c032cb18444617b00e2cab21d45b208164441"
checksum = "2d34973fe081392bd1edb022e865e9952fcaa093f9cdae183edce64472e5e889"
dependencies = [
"proc-macro2",
"quote",
@ -1539,9 +1547,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_lexer"
version = "0.87.0"
version = "0.91.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb76f0a4d4c20859e41f0a23bff0f37ab9ca9171c214a6c7dd72ea69434865dc"
checksum = "52fa42c582e21b35e8f61a5afe3c63a9c722d995826762eb19b18beeccf5157f"
dependencies = [
"unicode-properties",
"unicode-xid",
@ -1549,9 +1557,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_parse_format"
version = "0.87.0"
version = "0.91.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06080bd35078305421a62da77f3c128482d8d44441b6da8ce9d146d1cd9cdb5b"
checksum = "740383328d7033393e5385f4a6073b880d5811b0fc0fd2559e481f905940f2f8"
dependencies = [
"ra-ap-rustc_index",
"ra-ap-rustc_lexer",
@ -1559,9 +1567,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_pattern_analysis"
version = "0.87.0"
version = "0.91.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68a3154fe4c20c177d7b3c678a2d3a97aba0cca156ddef88959915041889daf0"
checksum = "c39f544728f32cebffb1a8b92ba3c1f3dcb4144081438d192137ed197d479a9d"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash 2.0.0",
@ -1626,7 +1634,7 @@ version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4"
dependencies = [
"bitflags 2.6.0",
"bitflags 2.7.0",
]
[[package]]
@ -1713,7 +1721,7 @@ dependencies = [
"vfs",
"vfs-notify",
"walkdir",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
"xflags",
"xshell",
]
@ -1936,7 +1944,7 @@ dependencies = [
"jod-thread",
"libc",
"miow",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]

View File

@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"]
resolver = "2"
[workspace.package]
rust-version = "1.82"
rust-version = "1.83"
edition = "2021"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer team"]
@ -79,6 +79,7 @@ span = { path = "./crates/span", version = "0.0.0" }
stdx = { path = "./crates/stdx", version = "0.0.0" }
syntax = { path = "./crates/syntax", version = "0.0.0" }
syntax-bridge = { path = "./crates/syntax-bridge", version = "0.0.0" }
test-fixture = { path = "./crates/test-fixture", version = "0.0.0" }
test-utils = { path = "./crates/test-utils", version = "0.0.0" }
toolchain = { path = "./crates/toolchain", version = "0.0.0" }
tt = { path = "./crates/tt", version = "0.0.0" }
@ -86,16 +87,15 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
ra-ap-rustc_lexer = { version = "0.87", default-features = false }
ra-ap-rustc_parse_format = { version = "0.87", default-features = false }
ra-ap-rustc_index = { version = "0.87", default-features = false }
ra-ap-rustc_abi = { version = "0.87", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.87", default-features = false }
ra-ap-rustc_lexer = { version = "0.91", default-features = false }
ra-ap-rustc_parse_format = { version = "0.91", default-features = false }
ra-ap-rustc_index = { version = "0.91", default-features = false }
ra-ap-rustc_abi = { version = "0.91", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.91", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
test-fixture = { path = "./crates/test-fixture" }
# In-tree crates that are published separately and follow semver. See lib/README.md
# in-tree crates that are published separately and follow semver. See lib/README.md
line-index = { version = "0.1.2" }
la-arena = { version = "0.3.1" }
lsp-server = { version = "0.7.6" }
@ -106,10 +106,10 @@ arrayvec = "0.7.4"
bitflags = "2.4.1"
cargo_metadata = "0.18.1"
camino = "1.1.6"
chalk-solve = { version = "0.98.0", default-features = false }
chalk-ir = "0.98.0"
chalk-recursive = { version = "0.98.0", default-features = false }
chalk-derive = "0.98.0"
chalk-solve = { version = "0.99.0", default-features = false }
chalk-ir = "0.99.0"
chalk-recursive = { version = "0.99.0", default-features = false }
chalk-derive = "0.99.0"
crossbeam-channel = "0.5.8"
dissimilar = "1.0.7"
dot = "0.1.4"

View File

@ -136,7 +136,7 @@ pub trait SourceRootDatabase: SourceDatabase {
#[ra_salsa::input]
fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
/// Crates whose root fool is in `id`.
/// Crates whose root file is in `id`.
fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>;
}

View File

@ -5,7 +5,8 @@ use std::fmt;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(u8)]
pub enum Edition {
Edition2015,
// The syntax context stuff needs the discriminants to start from 0 and be consecutive.
Edition2015 = 0,
Edition2018,
Edition2021,
Edition2024,

View File

@ -122,6 +122,11 @@ impl Attrs {
AttrQuery { attrs: self, key }
}
pub fn rust_analyzer_tool(&self) -> impl Iterator<Item = &Attr> {
self.iter()
.filter(|&attr| attr.path.segments().first().is_some_and(|s| *s == sym::rust_analyzer))
}
pub fn cfg(&self) -> Option<CfgExpr> {
let mut cfgs = self.by_key(&sym::cfg).tt_values().map(CfgExpr::parse);
let first = cfgs.next()?;

View File

@ -15,7 +15,7 @@ use hir_expand::{name::Name, ExpandError, InFile};
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::{Edition, MacroFileId};
use span::{Edition, MacroFileId, SyntaxContextData};
use syntax::{ast, AstPtr, SyntaxNodePtr};
use triomphe::Arc;
use tt::TextRange;
@ -37,15 +37,22 @@ use crate::{
/// A wrapper around [`span::SyntaxContextId`] that is intended only for comparisons.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct HygieneId(pub(crate) span::SyntaxContextId);
pub struct HygieneId(span::SyntaxContextId);
impl HygieneId {
pub const ROOT: Self = Self(span::SyntaxContextId::ROOT);
// The edition doesn't matter here, we only use this for comparisons and to lookup the macro.
pub const ROOT: Self = Self(span::SyntaxContextId::root(Edition::Edition2015));
pub fn new(ctx: span::SyntaxContextId) -> Self {
pub fn new(mut ctx: span::SyntaxContextId) -> Self {
// See `Name` for why we're doing that.
ctx.remove_root_edition();
Self(ctx)
}
pub(crate) fn lookup(self, db: &dyn DefDatabase) -> SyntaxContextData {
db.lookup_intern_syntax_context(self.0)
}
pub(crate) fn is_root(self) -> bool {
self.0.is_root()
}

View File

@ -8,6 +8,7 @@ use std::mem;
use base_db::CrateId;
use either::Either;
use hir_expand::{
mod_path::tool_path,
name::{AsName, Name},
span_map::{ExpansionSpanMap, SpanMap},
InFile, MacroDefId,
@ -27,6 +28,7 @@ use text_size::TextSize;
use triomphe::Arc;
use crate::{
attr::Attrs,
body::{Body, BodyDiagnostic, BodySourceMap, ExprPtr, HygieneId, LabelPtr, PatPtr},
builtin_type::BuiltinUint,
data::adt::StructKind,
@ -212,6 +214,43 @@ impl ExprCollector<'_> {
body: Option<ast::Expr>,
is_async_fn: bool,
) -> (Body, BodySourceMap) {
let skip_body = match self.owner {
DefWithBodyId::FunctionId(it) => self.db.attrs(it.into()),
DefWithBodyId::StaticId(it) => self.db.attrs(it.into()),
DefWithBodyId::ConstId(it) => self.db.attrs(it.into()),
DefWithBodyId::InTypeConstId(_) => Attrs::EMPTY,
DefWithBodyId::VariantId(it) => self.db.attrs(it.into()),
}
.rust_analyzer_tool()
.any(|attr| *attr.path() == tool_path![skip]);
// If #[rust_analyzer::skip] annotated, only construct enough information for the signature
// and skip the body.
if skip_body {
self.body.body_expr = self.missing_expr();
if let Some((param_list, mut attr_enabled)) = param_list {
if let Some(self_param) =
param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
{
let is_mutable =
self_param.mut_token().is_some() && self_param.amp_token().is_none();
let binding_id: la_arena::Idx<Binding> = self.alloc_binding(
Name::new_symbol_root(sym::self_.clone()),
BindingAnnotation::new(is_mutable, false),
);
self.body.self_param = Some(binding_id);
self.source_map.self_param =
Some(self.expander.in_file(AstPtr::new(&self_param)));
}
self.body.params = param_list
.params()
.zip(attr_enabled)
.filter(|(_, enabled)| *enabled)
.map(|_| self.missing_pat())
.collect();
};
return (self.body, self.source_map);
}
self.awaitable_context.replace(if is_async_fn {
Awaitable::Yes
} else {
@ -542,10 +581,7 @@ impl ExprCollector<'_> {
let mutability = if raw_tok {
if e.mut_token().is_some() {
Mutability::Mut
} else if e.const_token().is_some() {
Mutability::Shared
} else {
never!("parser only remaps to raw_token() if matching mutability token follows");
Mutability::Shared
}
} else {
@ -2460,7 +2496,7 @@ impl ExprCollector<'_> {
None => HygieneId::ROOT,
Some(span_map) => {
let ctx = span_map.span_at(span_start).ctx;
HygieneId(self.db.lookup_intern_syntax_context(ctx).opaque_and_semitransparent)
HygieneId::new(self.db.lookup_intern_syntax_context(ctx).opaque_and_semitransparent)
}
}
}

View File

@ -345,7 +345,7 @@ mod tests {
}
}
fn do_check(ra_fixture: &str, expected: &[&str]) {
fn do_check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected: &[&str]) {
let (offset, code) = extract_offset(ra_fixture);
let code = {
let mut buf = String::new();
@ -509,7 +509,7 @@ fn foo() {
);
}
fn do_check_local_name(ra_fixture: &str, expected_offset: u32) {
fn do_check_local_name(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_offset: u32) {
let (db, position) = TestDB::with_position(ra_fixture);
let file_id = position.file_id;
let offset = position.offset;

View File

@ -7,7 +7,7 @@ use crate::{test_db::TestDB, ModuleDefId};
use super::*;
fn lower(ra_fixture: &str) -> (TestDB, Arc<Body>, DefWithBodyId) {
fn lower(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (TestDB, Arc<Body>, DefWithBodyId) {
let db = TestDB::with_files(ra_fixture);
let krate = db.fetch_test_crate();
@ -27,14 +27,14 @@ fn lower(ra_fixture: &str) -> (TestDB, Arc<Body>, DefWithBodyId) {
(db, body, fn_def)
}
fn def_map_at(ra_fixture: &str) -> String {
fn def_map_at(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> String {
let (db, position) = TestDB::with_position(ra_fixture);
let module = db.module_at_position(position);
module.def_map(&db).dump(&db)
}
fn check_block_scopes_at(ra_fixture: &str, expect: Expect) {
fn check_block_scopes_at(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let (db, position) = TestDB::with_position(ra_fixture);
let module = db.module_at_position(position);
@ -42,7 +42,7 @@ fn check_block_scopes_at(ra_fixture: &str, expect: Expect) {
expect.assert_eq(&actual);
}
fn check_at(ra_fixture: &str, expect: Expect) {
fn check_at(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let actual = def_map_at(ra_fixture);
expect.assert_eq(&actual);
}
@ -444,3 +444,18 @@ fn foo() {
}"#
);
}
#[test]
fn skip_skips_body() {
let (db, body, owner) = lower(
r#"
#[rust_analyzer::skip]
async fn foo(a: (), b: i32) -> u32 {
0 + 1 + b()
}
"#,
);
let printed = body.pretty_print(&db, owner, Edition::CURRENT);
expect!["fn foo(<28>: (), <20>: i32) -> impl ::core::future::Future::<Output = u32> <20>"]
.assert_eq(&printed);
}

View File

@ -244,7 +244,7 @@ bitflags::bitflags! {
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TraitData {
pub name: Name,
pub items: Vec<(Name, AssocItemId)>,
pub items: Box<[(Name, AssocItemId)]>,
pub flags: TraitFlags,
pub visibility: RawVisibility,
// box it as the vec is usually empty anyways
@ -360,7 +360,7 @@ impl TraitAliasData {
pub struct ImplData {
pub target_trait: Option<TraitRef>,
pub self_ty: TypeRefId,
pub items: Box<[AssocItemId]>,
pub items: Box<[(Name, AssocItemId)]>,
pub is_negative: bool,
pub is_unsafe: bool,
// box it as the vec is usually empty anyways
@ -393,7 +393,6 @@ impl ImplData {
collector.collect(&item_tree, tree_id.tree_id(), &impl_def.items);
let (items, macro_calls, diagnostics) = collector.finish();
let items = items.into_iter().map(|(_, item)| item).collect();
(
Arc::new(ImplData {
@ -648,12 +647,12 @@ impl<'a> AssocItemCollector<'a> {
fn finish(
self,
) -> (
Vec<(Name, AssocItemId)>,
Box<[(Name, AssocItemId)]>,
Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
Vec<DefDiagnostic>,
) {
(
self.items,
self.items.into_boxed_slice(),
if self.macro_calls.is_empty() { None } else { Some(Box::new(self.macro_calls)) },
self.diagnostics,
)

View File

@ -10,7 +10,7 @@ use hir_expand::{
ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
};
use limit::Limit;
use span::SyntaxContextId;
use span::{Edition, SyntaxContextId};
use syntax::{ast, Parse};
use triomphe::Arc;
@ -60,7 +60,7 @@ impl Expander {
pub fn syntax_context(&self) -> SyntaxContextId {
// FIXME:
SyntaxContextId::ROOT
SyntaxContextId::root(Edition::CURRENT)
}
pub fn enter_expand<T: ast::AstNode>(

View File

@ -665,7 +665,7 @@ mod tests {
/// module the cursor is in.
#[track_caller]
fn check_found_path_(
ra_fixture: &str,
#[rust_analyzer::rust_fixture] ra_fixture: &str,
path: &str,
prefer_prelude: bool,
prefer_absolute: bool,
@ -727,19 +727,35 @@ mod tests {
expect.assert_eq(&res);
}
fn check_found_path(ra_fixture: &str, path: &str, expect: Expect) {
fn check_found_path(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
path: &str,
expect: Expect,
) {
check_found_path_(ra_fixture, path, false, false, false, expect);
}
fn check_found_path_prelude(ra_fixture: &str, path: &str, expect: Expect) {
fn check_found_path_prelude(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
path: &str,
expect: Expect,
) {
check_found_path_(ra_fixture, path, true, false, false, expect);
}
fn check_found_path_absolute(ra_fixture: &str, path: &str, expect: Expect) {
fn check_found_path_absolute(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
path: &str,
expect: Expect,
) {
check_found_path_(ra_fixture, path, false, true, false, expect);
}
fn check_found_path_prefer_no_std(ra_fixture: &str, path: &str, expect: Expect) {
fn check_found_path_prefer_no_std(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
path: &str,
expect: Expect,
) {
check_found_path_(ra_fixture, path, false, false, true, expect);
}

View File

@ -509,7 +509,12 @@ mod tests {
}
}
fn check_search(ra_fixture: &str, crate_name: &str, query: Query, expect: Expect) {
fn check_search(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
crate_name: &str,
query: Query,
expect: Expect,
) {
let db = TestDB::with_files(ra_fixture);
let crate_graph = db.crate_graph();
let krate = crate_graph
@ -587,7 +592,7 @@ mod tests {
))
}
fn check(ra_fixture: &str, expect: Expect) {
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let db = TestDB::with_files(ra_fixture);
let crate_graph = db.crate_graph();

View File

@ -162,6 +162,20 @@ impl ItemScope {
.map(move |name| (name, self.get(name)))
}
pub fn values(&self) -> impl Iterator<Item = (&Name, Item<ModuleDefId, ImportId>)> + '_ {
self.values.iter().map(|(n, &i)| (n, i))
}
pub fn types(
&self,
) -> impl Iterator<Item = (&Name, Item<ModuleDefId, ImportOrExternCrate>)> + '_ {
self.types.iter().map(|(n, &i)| (n, i))
}
pub fn macros(&self) -> impl Iterator<Item = (&Name, Item<MacroId, ImportId>)> + '_ {
self.macros.iter().map(|(n, &i)| (n, i))
}
pub fn imports(&self) -> impl Iterator<Item = ImportId> + '_ {
self.use_imports_types
.keys()
@ -263,11 +277,6 @@ impl ItemScope {
self.unnamed_consts.iter().copied()
}
/// Iterate over all module scoped macros
pub(crate) fn macros(&self) -> impl Iterator<Item = (&Name, MacroId)> + '_ {
self.entries().filter_map(|(name, def)| def.take_macros().map(|macro_| (name, macro_)))
}
/// Iterate over all legacy textual scoped macros visible at the end of the module
pub fn legacy_macros(&self) -> impl Iterator<Item = (&Name, &[MacroId])> + '_ {
self.legacy_macros.iter().map(|(name, def)| (name, &**def))

View File

@ -4,7 +4,7 @@ use test_fixture::WithFixture;
use crate::{db::DefDatabase, test_db::TestDB};
fn check(ra_fixture: &str, expect: Expect) {
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let item_tree = db.file_item_tree(file_id.into());
let pretty = item_tree.pretty_print(&db, Edition::CURRENT);
@ -270,7 +270,7 @@ m!();
// AstId: 2
pub macro m2 { ... }
// AstId: 3, SyntaxContext: 0, ExpandTo: Items
// AstId: 3, SyntaxContext: 2, ExpandTo: Items
m!(...);
"#]],
);

View File

@ -107,7 +107,7 @@ impl LangItems {
for (_, module_data) in crate_def_map.modules() {
for impl_def in module_data.scope.impls() {
lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDef);
for assoc in db.impl_data(impl_def).items.iter().copied() {
for &(_, assoc) in db.impl_data(impl_def).items.iter() {
match assoc {
AssocItemId::FunctionId(f) => {
lang_items.collect_lang_item(db, f, LangItemTarget::Function)

View File

@ -502,7 +502,7 @@ impl ModuleId {
}
/// Whether this module represents the crate root module
fn is_crate_root(&self) -> bool {
pub fn is_crate_root(&self) -> bool {
self.local_id == DefMap::ROOT && self.block.is_none()
}
}

View File

@ -35,9 +35,9 @@ macro_rules! f {
};
}
struct#0:1@58..64#1# MyTraitMap2#0:2@31..42#0# {#0:1@72..73#1#
map#0:1@86..89#1#:#0:1@89..90#1# #0:1@89..90#1#::#0:1@91..93#1#std#0:1@93..96#1#::#0:1@96..98#1#collections#0:1@98..109#1#::#0:1@109..111#1#HashSet#0:1@111..118#1#<#0:1@118..119#1#(#0:1@119..120#1#)#0:1@120..121#1#>#0:1@121..122#1#,#0:1@122..123#1#
}#0:1@132..133#1#
struct#0:1@58..64#4# MyTraitMap2#0:2@31..42#2# {#0:1@72..73#4#
map#0:1@86..89#4#:#0:1@89..90#4# #0:1@89..90#4#::#0:1@91..93#4#std#0:1@93..96#4#::#0:1@96..98#4#collections#0:1@98..109#4#::#0:1@109..111#4#HashSet#0:1@111..118#4#<#0:1@118..119#4#(#0:1@119..120#4#)#0:1@120..121#4#>#0:1@121..122#4#,#0:1@122..123#4#
}#0:1@132..133#4#
"#]],
);
}
@ -75,12 +75,12 @@ macro_rules! f {
};
}
fn#0:2@30..32#0# main#0:2@33..37#0#(#0:2@37..38#0#)#0:2@38..39#0# {#0:2@40..41#0#
1#0:2@50..51#0#;#0:2@51..52#0#
1.0#0:2@61..64#0#;#0:2@64..65#0#
(#0:2@74..75#0#(#0:2@75..76#0#1#0:2@76..77#0#,#0:2@77..78#0# )#0:2@78..79#0#,#0:2@79..80#0# )#0:2@80..81#0#.#0:2@81..82#0#0#0:2@82..85#0#.#0:2@82..85#0#0#0:2@82..85#0#;#0:2@85..86#0#
let#0:2@95..98#0# x#0:2@99..100#0# =#0:2@101..102#0# 1#0:2@103..104#0#;#0:2@104..105#0#
}#0:2@110..111#0#
fn#0:2@30..32#2# main#0:2@33..37#2#(#0:2@37..38#2#)#0:2@38..39#2# {#0:2@40..41#2#
1#0:2@50..51#2#;#0:2@51..52#2#
1.0#0:2@61..64#2#;#0:2@64..65#2#
(#0:2@74..75#2#(#0:2@75..76#2#1#0:2@76..77#2#,#0:2@77..78#2# )#0:2@78..79#2#,#0:2@79..80#2# )#0:2@80..81#2#.#0:2@81..82#2#0#0:2@82..85#2#.#0:2@82..85#2#0#0:2@82..85#2#;#0:2@85..86#2#
let#0:2@95..98#2# x#0:2@99..100#2# =#0:2@101..102#2# 1#0:2@103..104#2#;#0:2@104..105#2#
}#0:2@110..111#2#
"#]],
@ -171,7 +171,7 @@ fn main(foo: ()) {
}
fn main(foo: ()) {
/* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#0#;
/* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#2#;
}
}
@ -197,7 +197,7 @@ macro_rules! mk_struct {
#[macro_use]
mod foo;
struct#1:1@59..65#1# Foo#0:2@32..35#0#(#1:1@70..71#1#u32#0:2@41..44#0#)#1:1@74..75#1#;#1:1@75..76#1#
struct#1:1@59..65#4# Foo#0:2@32..35#2#(#1:1@70..71#4#u32#0:2@41..44#2#)#1:1@74..75#4#;#1:1@75..76#4#
"#]],
);
}
@ -423,10 +423,10 @@ m! { foo, bar }
macro_rules! m {
($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
}
impl#\1# Bar#\1# {#\1#
fn#\1# foo#\0#(#\1#)#\1# {#\1#}#\1#
fn#\1# bar#\0#(#\1#)#\1# {#\1#}#\1#
}#\1#
impl#\4# Bar#\4# {#\4#
fn#\4# foo#\2#(#\4#)#\4# {#\4#}#\4#
fn#\4# bar#\2#(#\4#)#\4# {#\4#}#\4#
}#\4#
"#]],
);
}

View File

@ -47,7 +47,7 @@ use crate::{
};
#[track_caller]
fn check_errors(ra_fixture: &str, expect: Expect) {
fn check_errors(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let db = TestDB::with_files(ra_fixture);
let krate = db.fetch_test_crate();
let def_map = db.crate_def_map(krate);
@ -77,7 +77,7 @@ fn check_errors(ra_fixture: &str, expect: Expect) {
}
#[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) {
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, mut expect: Expect) {
let extra_proc_macros = vec![(
r#"
#[proc_macro_attribute]
@ -358,6 +358,7 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
subtree,
syntax_bridge::TopEntryPoint::MacroItems,
&mut |_| span::Edition::CURRENT,
span::Edition::CURRENT,
);
if parse.errors().is_empty() {

View File

@ -181,9 +181,9 @@ fn foo(&self) {
self.0. 1;
}
fn#0:1@45..47#0# foo#0:1@48..51#0#(#0:1@51..52#0#&#0:1@52..53#0#self#0:1@53..57#0# )#0:1@57..58#0# {#0:1@59..60#0#
self#0:1@65..69#0# .#0:1@69..70#0#0#0:1@70..71#0#.#0:1@71..72#0#1#0:1@73..74#0#;#0:1@74..75#0#
}#0:1@76..77#0#"#]],
fn#0:1@45..47#2# foo#0:1@48..51#2#(#0:1@51..52#2#&#0:1@52..53#2#self#0:1@53..57#2# )#0:1@57..58#2# {#0:1@59..60#2#
self#0:1@65..69#2# .#0:1@69..70#2#0#0:1@70..71#2#.#0:1@71..72#2#1#0:1@73..74#2#;#0:1@74..75#2#
}#0:1@76..77#2#"#]],
);
}

View File

@ -74,7 +74,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
let proc_macros = if krate.is_proc_macro {
db.proc_macros()
.for_crate(def_map.krate, db.syntax_context(tree_id.file_id()))
.for_crate(def_map.krate, db.syntax_context(tree_id.file_id(), krate.edition))
.unwrap_or_default()
} else {
Default::default()
@ -717,8 +717,8 @@ impl DefCollector<'_> {
}
}
None => {
for (name, def) in root_scope.macros() {
self.def_map.macro_use_prelude.insert(name.clone(), (def, extern_crate));
for (name, it) in root_scope.macros() {
self.def_map.macro_use_prelude.insert(name.clone(), (it.def, extern_crate));
}
}
}

View File

@ -11,19 +11,19 @@ use triomphe::Arc;
use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB};
fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> {
fn compute_crate_def_map(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> Arc<DefMap> {
let db = TestDB::with_files(ra_fixture);
let krate = db.fetch_test_crate();
db.crate_def_map(krate)
}
fn render_crate_def_map(ra_fixture: &str) -> String {
fn render_crate_def_map(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> String {
let db = TestDB::with_files(ra_fixture);
let krate = db.fetch_test_crate();
db.crate_def_map(krate).dump(&db)
}
fn check(ra_fixture: &str, expect: Expect) {
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let actual = render_crate_def_map(ra_fixture);
expect.assert_eq(&actual);
}

View File

@ -166,6 +166,17 @@ impl Resolver {
db: &dyn DefDatabase,
path: &Path,
) -> Option<(TypeNs, Option<usize>, Option<ImportOrExternCrate>)> {
self.resolve_path_in_type_ns_with_prefix_info(db, path).map(
|(resolution, remaining_segments, import, _)| (resolution, remaining_segments, import),
)
}
pub fn resolve_path_in_type_ns_with_prefix_info(
&self,
db: &dyn DefDatabase,
path: &Path,
) -> Option<(TypeNs, Option<usize>, Option<ImportOrExternCrate>, ResolvePathResultPrefixInfo)>
{
let path = match path {
Path::BarePath(mod_path) => mod_path,
Path::Normal(it) => it.mod_path(),
@ -181,7 +192,12 @@ impl Resolver {
| LangItemTarget::ImplDef(_)
| LangItemTarget::Static(_) => return None,
};
return Some((type_ns, seg.as_ref().map(|_| 1), None));
return Some((
type_ns,
seg.as_ref().map(|_| 1),
None,
ResolvePathResultPrefixInfo::default(),
));
}
};
let first_name = path.segments().first()?;
@ -197,17 +213,32 @@ impl Resolver {
Scope::ExprScope(_) | Scope::MacroDefScope(_) => continue,
Scope::GenericParams { params, def } => {
if let Some(id) = params.find_type_by_name(first_name, *def) {
return Some((TypeNs::GenericParam(id), remaining_idx(), None));
return Some((
TypeNs::GenericParam(id),
remaining_idx(),
None,
ResolvePathResultPrefixInfo::default(),
));
}
}
&Scope::ImplDefScope(impl_) => {
if *first_name == sym::Self_.clone() {
return Some((TypeNs::SelfType(impl_), remaining_idx(), None));
return Some((
TypeNs::SelfType(impl_),
remaining_idx(),
None,
ResolvePathResultPrefixInfo::default(),
));
}
}
&Scope::AdtScope(adt) => {
if *first_name == sym::Self_.clone() {
return Some((TypeNs::AdtSelfType(adt), remaining_idx(), None));
return Some((
TypeNs::AdtSelfType(adt),
remaining_idx(),
None,
ResolvePathResultPrefixInfo::default(),
));
}
}
Scope::BlockScope(m) => {
@ -220,18 +251,6 @@ impl Resolver {
self.module_scope.resolve_path_in_type_ns(db, path)
}
pub fn resolve_path_in_type_ns_fully_with_imports(
&self,
db: &dyn DefDatabase,
path: &Path,
) -> Option<(TypeNs, Option<ImportOrExternCrate>)> {
let (res, unresolved, imp) = self.resolve_path_in_type_ns(db, path)?;
if unresolved.is_some() {
return None;
}
Some((res, imp))
}
pub fn resolve_path_in_type_ns_fully(
&self,
db: &dyn DefDatabase,
@ -324,7 +343,7 @@ impl Resolver {
if n_segments <= 1 {
let mut hygiene_info = if !hygiene_id.is_root() {
let ctx = db.lookup_intern_syntax_context(hygiene_id.0);
let ctx = hygiene_id.lookup(db);
ctx.outer_expn.map(|expansion| {
let expansion = db.lookup_intern_macro_call(expansion);
(ctx.parent, expansion.def)
@ -986,11 +1005,12 @@ impl ModuleItemMap {
&self,
db: &dyn DefDatabase,
path: &ModPath,
) -> Option<(TypeNs, Option<usize>, Option<ImportOrExternCrate>)> {
let (module_def, idx, _) =
) -> Option<(TypeNs, Option<usize>, Option<ImportOrExternCrate>, ResolvePathResultPrefixInfo)>
{
let (module_def, idx, prefix_info) =
self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
let (res, import) = to_type_ns(module_def)?;
Some((res, idx, import))
Some((res, idx, import, prefix_info))
}
}

View File

@ -240,12 +240,12 @@ impl Visibility {
if a_ancestors.any(|m| m == mod_b.local_id) {
// B is above A
return Some(Visibility::Module(mod_a, expl_b));
return Some(Visibility::Module(mod_a, expl_a));
}
if b_ancestors.any(|m| m == mod_a.local_id) {
// A is above B
return Some(Visibility::Module(mod_b, expl_a));
return Some(Visibility::Module(mod_b, expl_b));
}
None

View File

@ -4,7 +4,7 @@ use intern::sym;
use itertools::{izip, Itertools};
use parser::SyntaxKind;
use rustc_hash::FxHashSet;
use span::{MacroCallId, Span, SyntaxContextId};
use span::{Edition, MacroCallId, Span, SyntaxContextId};
use stdx::never;
use syntax_bridge::DocCommentDesugarMode;
use tracing::debug;
@ -33,7 +33,7 @@ macro_rules! register_builtin {
}
impl BuiltinDeriveExpander {
pub fn expander(&self) -> fn(Span, &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
pub fn expander(&self) -> fn(&dyn ExpandDatabase, Span, &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
match *self {
$( BuiltinDeriveExpander::$trait => $expand, )*
}
@ -58,8 +58,8 @@ impl BuiltinDeriveExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id);
self.expander()(span, tt)
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
self.expander()(db, span, tt)
}
}
@ -226,8 +226,12 @@ struct AdtParam {
}
// FIXME: This whole thing needs a refactor. Each derive requires its special values, and the result is a mess.
fn parse_adt(tt: &tt::TopSubtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> {
let (adt, tm) = to_adt_syntax(tt, call_site)?;
fn parse_adt(
db: &dyn ExpandDatabase,
tt: &tt::TopSubtree,
call_site: Span,
) -> Result<BasicAdtInfo, ExpandError> {
let (adt, tm) = to_adt_syntax(db, tt, call_site)?;
parse_adt_from_syntax(&adt, &tm, call_site)
}
@ -382,12 +386,14 @@ fn parse_adt_from_syntax(
}
fn to_adt_syntax(
db: &dyn ExpandDatabase,
tt: &tt::TopSubtree,
call_site: Span,
) -> Result<(ast::Adt, span::SpanMap<SyntaxContextId>), ExpandError> {
let (parsed, tm) = syntax_bridge::token_tree_to_syntax_node(
let (parsed, tm) = crate::db::token_tree_to_syntax_node(
db,
tt,
syntax_bridge::TopEntryPoint::MacroItems,
crate::ExpandTo::Items,
parser::Edition::CURRENT_FIXME,
);
let macro_items = ast::MacroItems::cast(parsed.syntax_node())
@ -446,12 +452,13 @@ fn name_to_token(
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
/// therefore does not get bound by the derived trait.
fn expand_simple_derive(
db: &dyn ExpandDatabase,
invoc_span: Span,
tt: &tt::TopSubtree,
trait_path: tt::TopSubtree,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let info = match parse_adt(tt, invoc_span) {
let info = match parse_adt(db, tt, invoc_span) {
Ok(info) => info,
Err(e) => {
return ExpandResult::new(
@ -520,14 +527,22 @@ fn expand_simple_derive_with_parsed(
}
}
fn copy_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn copy_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
expand_simple_derive(db, span, tt, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
}
fn clone_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn clone_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::clone::Clone }, |adt| {
expand_simple_derive(db, span, tt, quote! {span => #krate::clone::Clone }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
@ -576,9 +591,13 @@ fn and_and(span: Span) -> tt::TopSubtree {
quote! {span => #and& }
}
fn default_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn default_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::default::Default }, |adt| {
expand_simple_derive(db, span, tt, quote! {span => #krate::default::Default }, |adt| {
let body = match &adt.shape {
AdtShape::Struct(fields) => {
let name = &adt.name;
@ -615,9 +634,13 @@ fn default_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtre
})
}
fn debug_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn debug_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::fmt::Debug }, |adt| {
expand_simple_derive(db, span, tt, quote! {span => #krate::fmt::Debug }, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| {
@ -687,9 +710,13 @@ fn debug_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree>
})
}
fn hash_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn hash_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::hash::Hash }, |adt| {
expand_simple_derive(db, span, tt, quote! {span => #krate::hash::Hash }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote! {span =>};
@ -734,14 +761,22 @@ fn hash_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree>
})
}
fn eq_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn eq_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
}
fn partial_eq_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn partial_eq_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::cmp::PartialEq }, |adt| {
expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialEq }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote! {span =>};
@ -811,9 +846,13 @@ fn self_and_other_patterns(
(self_patterns, other_patterns)
}
fn ord_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn ord_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::cmp::Ord }, |adt| {
expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::Ord }, |adt| {
fn compare(
krate: &tt::Ident,
left: tt::TopSubtree,
@ -869,9 +908,13 @@ fn ord_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
})
}
fn partial_ord_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
fn partial_ord_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let krate = &dollar_crate(span);
expand_simple_derive(span, tt, quote! {span => #krate::cmp::PartialOrd }, |adt| {
expand_simple_derive(db, span, tt, quote! {span => #krate::cmp::PartialOrd }, |adt| {
fn compare(
krate: &tt::Ident,
left: tt::TopSubtree,
@ -932,8 +975,12 @@ fn partial_ord_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSu
})
}
fn coerce_pointee_expand(span: Span, tt: &tt::TopSubtree) -> ExpandResult<tt::TopSubtree> {
let (adt, _span_map) = match to_adt_syntax(tt, span) {
fn coerce_pointee_expand(
db: &dyn ExpandDatabase,
span: Span,
tt: &tt::TopSubtree,
) -> ExpandResult<tt::TopSubtree> {
let (adt, _span_map) = match to_adt_syntax(db, tt, span) {
Ok(it) => it,
Err(err) => {
return ExpandResult::new(tt::TopSubtree::empty(tt::DelimSpan::from_single(span)), err);

View File

@ -69,7 +69,7 @@ impl BuiltinFnLikeExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id);
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
self.expander()(db, id, tt, span)
}
@ -86,7 +86,7 @@ impl EagerExpander {
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let span = span_with_def_site_ctxt(db, span, id);
let span = span_with_def_site_ctxt(db, span, id, Edition::CURRENT);
self.expander()(db, id, tt, span)
}
@ -221,7 +221,7 @@ fn assert_expand(
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let call_site_span = span_with_call_site_ctxt(db, span, id);
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
let mut iter = tt.iter();
@ -342,7 +342,7 @@ fn panic_expand(
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let dollar_crate = dollar_crate(span);
let call_site_span = span_with_call_site_ctxt(db, span, id);
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
let mac = if use_panic_2021(db, call_site_span) {
sym::panic_2021.clone()
@ -373,7 +373,7 @@ fn unreachable_expand(
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let dollar_crate = dollar_crate(span);
let call_site_span = span_with_call_site_ctxt(db, span, id);
let call_site_span = span_with_call_site_ctxt(db, span, id, Edition::CURRENT);
let mac = if use_panic_2021(db, call_site_span) {
sym::unreachable_2021.clone()

View File

@ -102,6 +102,7 @@ macro_rules! quote_impl__ {
($span:ident $builder:ident # ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) '#')};
($span:ident $builder:ident $ ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) '$')};
($span:ident $builder:ident * ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) '*')};
($span:ident $builder:ident = ) => {$crate::builtin::quote::__quote!(@PUNCT($span $builder) '=')};
($span:ident $builder:ident $first:tt $($tail:tt)+ ) => {{
$crate::builtin::quote::__quote!($span $builder $first);
@ -225,7 +226,7 @@ mod tests {
use ::tt::IdentIsRaw;
use expect_test::expect;
use intern::Symbol;
use span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use span::{Edition, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
use syntax::{TextRange, TextSize};
use super::quote;
@ -239,7 +240,7 @@ mod tests {
),
ast_id: ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(Edition::CURRENT),
};
#[test]
@ -276,8 +277,8 @@ mod tests {
assert_eq!(quoted.to_string(), "hello");
let t = format!("{quoted:#?}");
expect![[r#"
SUBTREE $$ 937550:0@0..0#0 937550:0@0..0#0
IDENT hello 937550:0@0..0#0"#]]
SUBTREE $$ 937550:0@0..0#2 937550:0@0..0#2
IDENT hello 937550:0@0..0#2"#]]
.assert_eq(&t);
}

View File

@ -5,7 +5,7 @@ use either::Either;
use limit::Limit;
use mbe::MatchedArmIndex;
use rustc_hash::FxHashSet;
use span::{AstIdMap, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
use span::{AstIdMap, Edition, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode};
use triomphe::Arc;
@ -136,12 +136,12 @@ pub trait ExpandDatabase: SourceDatabase {
macro_call: MacroCallId,
) -> Option<Arc<ExpandResult<Arc<[SyntaxError]>>>>;
#[ra_salsa::transparent]
fn syntax_context(&self, file: HirFileId) -> SyntaxContextId;
fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContextId;
}
fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId) -> SyntaxContextId {
fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) -> SyntaxContextId {
match file.repr() {
HirFileIdRepr::FileId(_) => SyntaxContextId::ROOT,
HirFileIdRepr::FileId(_) => SyntaxContextId::root(edition),
HirFileIdRepr::MacroFile(m) => {
db.macro_arg_considering_derives(m.macro_call_id, &m.macro_call_id.lookup(db).kind)
.2
@ -273,9 +273,9 @@ pub fn expand_speculative(
loc.krate,
&tt,
attr_arg.as_ref(),
span_with_def_site_ctxt(db, span, actual_macro_call),
span_with_call_site_ctxt(db, span, actual_macro_call),
span_with_mixed_site_ctxt(db, span, actual_macro_call),
span_with_def_site_ctxt(db, span, actual_macro_call, loc.def.edition),
span_with_call_site_ctxt(db, span, actual_macro_call, loc.def.edition),
span_with_mixed_site_ctxt(db, span, actual_macro_call, loc.def.edition),
)
}
MacroDefKind::BuiltInAttr(_, it) if it.is_derive() => {
@ -300,7 +300,7 @@ pub fn expand_speculative(
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
let (node, rev_tmap) =
token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition);
token_tree_to_syntax_node(db, &speculative_expansion.value, expand_to, loc.def.edition);
let syntax_node = node.syntax_node();
let token = rev_tmap
@ -346,6 +346,7 @@ fn parse_macro_expansion(
macro_expand(db, macro_file.macro_call_id, loc);
let (parse, mut rev_token_map) = token_tree_to_syntax_node(
db,
match &tt {
CowArc::Arc(it) => it,
CowArc::Owned(it) => it,
@ -699,9 +700,9 @@ fn expand_proc_macro(
loc.krate,
&macro_arg,
attr_arg,
span_with_def_site_ctxt(db, span, id),
span_with_call_site_ctxt(db, span, id),
span_with_mixed_site_ctxt(db, span, id),
span_with_def_site_ctxt(db, span, id, loc.def.edition),
span_with_call_site_ctxt(db, span, id, loc.def.edition),
span_with_mixed_site_ctxt(db, span, id, loc.def.edition),
)
};
@ -715,7 +716,8 @@ fn expand_proc_macro(
ExpandResult { value: Arc::new(tt), err }
}
fn token_tree_to_syntax_node(
pub(crate) fn token_tree_to_syntax_node(
db: &dyn ExpandDatabase,
tt: &tt::TopSubtree,
expand_to: ExpandTo,
edition: parser::Edition,
@ -727,7 +729,12 @@ fn token_tree_to_syntax_node(
ExpandTo::Type => syntax_bridge::TopEntryPoint::Type,
ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr,
};
syntax_bridge::token_tree_to_syntax_node(tt, entry_point, edition)
syntax_bridge::token_tree_to_syntax_node(
tt,
entry_point,
&mut |ctx| ctx.lookup(db).edition,
edition,
)
}
fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
@ -751,5 +758,7 @@ fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
}
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
db.intern_syntax_context(SyntaxContextData::root());
for edition in Edition::iter() {
db.intern_syntax_context(SyntaxContextData::root(edition));
}
}

View File

@ -2,7 +2,7 @@
use base_db::CrateId;
use intern::sym;
use span::{Edition, MacroCallId, Span, SyntaxContextId};
use span::{Edition, HirFileIdRepr, MacroCallId, Span, SyntaxContextId};
use stdx::TupleExt;
use syntax::{ast, AstNode};
use syntax_bridge::DocCommentDesugarMode;
@ -20,6 +20,7 @@ use crate::{
pub struct DeclarativeMacroExpander {
pub mac: mbe::DeclarativeMacro,
pub transparency: Transparency,
edition: Edition,
}
impl DeclarativeMacroExpander {
@ -40,7 +41,7 @@ impl DeclarativeMacroExpander {
.mac
.expand(
&tt,
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency, self.edition),
span,
loc.def.edition,
)
@ -159,6 +160,10 @@ impl DeclarativeMacroExpander {
transparency(&macro_def).unwrap_or(Transparency::Opaque),
),
};
Arc::new(DeclarativeMacroExpander { mac, transparency })
let edition = ctx_edition(match id.file_id.repr() {
HirFileIdRepr::MacroFile(macro_file) => macro_file.macro_call_id.lookup(db).ctxt,
HirFileIdRepr::FileId(file) => SyntaxContextId::root(file.edition()),
});
Arc::new(DeclarativeMacroExpander { mac, transparency, edition })
}
}

View File

@ -380,14 +380,14 @@ impl InFile<TextRange> {
) -> (FileRange, SyntaxContextId) {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
(FileRange { file_id, range: self.value }, SyntaxContextId::root(file_id.edition()))
}
HirFileIdRepr::MacroFile(mac_file) => {
match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) {
Some(it) => it,
None => {
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
(loc.kind.original_call_range(db), SyntaxContextId::ROOT)
(loc.kind.original_call_range(db), SyntaxContextId::root(loc.def.edition))
}
}
}
@ -432,9 +432,10 @@ impl InFile<TextRange> {
db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContextId)> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some((FileRange { file_id, range: self.value }, SyntaxContextId::ROOT))
}
HirFileIdRepr::FileId(file_id) => Some((
FileRange { file_id, range: self.value },
SyntaxContextId::root(file_id.edition()),
)),
HirFileIdRepr::MacroFile(mac_file) => {
map_node_range_up(db, &db.expansion_span_map(mac_file), self.value)
}

View File

@ -380,7 +380,7 @@ pub(crate) fn reverse_fixups(tt: &mut TopSubtree, undo_info: &SyntaxFixupUndoInf
let span = |file_id| Span {
range: TextRange::empty(TextSize::new(0)),
anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContextId::ROOT,
ctx: SyntaxContextId::root(span::Edition::Edition2015),
};
delimiter.open = span(delimiter.open.anchor.file_id);
delimiter.close = span(delimiter.close.anchor.file_id);
@ -441,8 +441,8 @@ fn transform_tt<'a, 'b>(
};
let len_diff = replacement.len() as i64 - old_len as i64;
tt.splice(i..i + old_len, replacement.flat_tokens().iter().cloned());
// `+1` for the loop.
i = i.checked_add_signed(len_diff as isize + 1).unwrap();
// Skip the newly inserted replacement, we don't want to visit it.
i += replacement.len();
for &subtree_idx in &subtrees_stack {
let tt::TokenTree::Subtree(subtree) = &mut tt[subtree_idx] else {
@ -532,7 +532,7 @@ mod tests {
}
#[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) {
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, mut expect: Expect) {
let parsed = syntax::SourceFile::parse(ra_fixture, span::Edition::CURRENT);
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(EditionedFileId::new(
FileId::from_raw(0),
@ -562,6 +562,7 @@ mod tests {
let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
&tt,
syntax_bridge::TopEntryPoint::MacroItems,
&mut |_| parser::Edition::CURRENT,
parser::Edition::CURRENT,
);
assert!(

View File

@ -24,26 +24,37 @@
use std::iter;
use span::{MacroCallId, Span, SyntaxContextData, SyntaxContextId};
use span::{Edition, MacroCallId, Span, SyntaxContextData, SyntaxContextId};
use crate::db::{ExpandDatabase, InternSyntaxContextQuery};
pub use span::Transparency;
pub fn span_with_def_site_ctxt(db: &dyn ExpandDatabase, span: Span, expn_id: MacroCallId) -> Span {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque)
pub fn span_with_def_site_ctxt(
db: &dyn ExpandDatabase,
span: Span,
expn_id: MacroCallId,
edition: Edition,
) -> Span {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque, edition)
}
pub fn span_with_call_site_ctxt(db: &dyn ExpandDatabase, span: Span, expn_id: MacroCallId) -> Span {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent)
pub fn span_with_call_site_ctxt(
db: &dyn ExpandDatabase,
span: Span,
expn_id: MacroCallId,
edition: Edition,
) -> Span {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent, edition)
}
pub fn span_with_mixed_site_ctxt(
db: &dyn ExpandDatabase,
span: Span,
expn_id: MacroCallId,
edition: Edition,
) -> Span {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent)
span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent, edition)
}
fn span_with_ctxt_from_mark(
@ -51,8 +62,12 @@ fn span_with_ctxt_from_mark(
span: Span,
expn_id: MacroCallId,
transparency: Transparency,
edition: Edition,
) -> Span {
Span { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span }
Span {
ctx: apply_mark(db, SyntaxContextId::root(edition), expn_id, transparency, edition),
..span
}
}
pub(super) fn apply_mark(
@ -60,9 +75,10 @@ pub(super) fn apply_mark(
ctxt: SyntaxContextId,
call_id: MacroCallId,
transparency: Transparency,
edition: Edition,
) -> SyntaxContextId {
if transparency == Transparency::Opaque {
return apply_mark_internal(db, ctxt, call_id, transparency);
return apply_mark_internal(db, ctxt, call_id, transparency, edition);
}
let call_site_ctxt = db.lookup_intern_macro_call(call_id).ctxt;
@ -73,7 +89,7 @@ pub(super) fn apply_mark(
};
if call_site_ctxt.is_root() {
return apply_mark_internal(db, ctxt, call_id, transparency);
return apply_mark_internal(db, ctxt, call_id, transparency, edition);
}
// Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a
@ -86,9 +102,9 @@ pub(super) fn apply_mark(
//
// See the example at `test/ui/hygiene/legacy_interaction.rs`.
for (call_id, transparency) in ctxt.marks(db) {
call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency);
call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency, edition);
}
apply_mark_internal(db, call_site_ctxt, call_id, transparency)
apply_mark_internal(db, call_site_ctxt, call_id, transparency, edition)
}
fn apply_mark_internal(
@ -96,6 +112,7 @@ fn apply_mark_internal(
ctxt: SyntaxContextId,
call_id: MacroCallId,
transparency: Transparency,
edition: Edition,
) -> SyntaxContextId {
use base_db::ra_salsa;
@ -108,13 +125,14 @@ fn apply_mark_internal(
if transparency >= Transparency::Opaque {
let parent = opaque;
opaque = ra_salsa::plumbing::get_query_table::<InternSyntaxContextQuery>(db).get_or_insert(
(parent, call_id, transparency),
(parent, call_id, transparency, edition),
|new_opaque| SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque: new_opaque,
opaque_and_semitransparent: new_opaque,
edition,
},
);
}
@ -123,13 +141,14 @@ fn apply_mark_internal(
let parent = opaque_and_semitransparent;
opaque_and_semitransparent =
ra_salsa::plumbing::get_query_table::<InternSyntaxContextQuery>(db).get_or_insert(
(parent, call_id, transparency),
(parent, call_id, transparency, edition),
|new_opaque_and_semitransparent| SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque,
opaque_and_semitransparent: new_opaque_and_semitransparent,
edition,
},
);
}
@ -141,6 +160,7 @@ fn apply_mark_internal(
parent,
opaque,
opaque_and_semitransparent,
edition,
})
}

View File

@ -188,6 +188,8 @@ impl fmt::Display for RenderedExpandError {
impl RenderedExpandError {
const GENERAL_KIND: &str = "macro-error";
const DISABLED: &str = "proc-macro-disabled";
const ATTR_EXP_DISABLED: &str = "attribute-expansion-disabled";
}
impl ExpandErrorKind {
@ -196,12 +198,12 @@ impl ExpandErrorKind {
ExpandErrorKind::ProcMacroAttrExpansionDisabled => RenderedExpandError {
message: "procedural attribute macro expansion is disabled".to_owned(),
error: false,
kind: "proc-macros-disabled",
kind: RenderedExpandError::ATTR_EXP_DISABLED,
},
ExpandErrorKind::MacroDisabled => RenderedExpandError {
message: "proc-macro is explicitly disabled".to_owned(),
error: false,
kind: "proc-macro-disabled",
kind: RenderedExpandError::DISABLED,
},
&ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
match db.proc_macros().get_error_for_crate(def_crate) {

View File

@ -273,10 +273,9 @@ fn convert_path(
res
}
}
ast::PathSegmentKind::SelfTypeKw => ModPath::from_segments(
PathKind::Plain,
Some(Name::new_symbol(sym::Self_.clone(), SyntaxContextId::ROOT)),
),
ast::PathSegmentKind::SelfTypeKw => {
ModPath::from_segments(PathKind::Plain, Some(Name::new_symbol_root(sym::Self_.clone())))
}
ast::PathSegmentKind::CrateKw => ModPath::from_segments(PathKind::Crate, iter::empty()),
ast::PathSegmentKind::SelfKw => handle_super_kw(0)?,
ast::PathSegmentKind::SuperKw => handle_super_kw(1)?,
@ -399,6 +398,9 @@ macro_rules! __known_path {
(core::fmt::Debug) => {};
(std::fmt::format) => {};
(core::ops::Try) => {};
(core::convert::From) => {};
(core::convert::TryFrom) => {};
(core::str::FromStr) => {};
($path:path) => {
compile_error!("Please register your known path in the path module")
};
@ -415,3 +417,14 @@ macro_rules! __path {
}
pub use crate::__path as path;
#[macro_export]
macro_rules! __tool_path {
($start:ident $(:: $seg:ident)*) => ({
$crate::mod_path::ModPath::from_segments($crate::mod_path::PathKind::Plain, vec![
$crate::name::Name::new_symbol_root(intern::sym::rust_analyzer.clone()), $crate::name::Name::new_symbol_root(intern::sym::$start.clone()), $($crate::name::Name::new_symbol_root(intern::sym::$seg.clone()),)*
])
});
}
pub use crate::__tool_path as tool_path;

View File

@ -11,7 +11,7 @@ use syntax::utils::is_raw_identifier;
/// and declarations. In theory, names should also carry hygiene info, but we are
/// not there yet!
///
/// Note that the rawness (`r#`) of names does not depend on whether they are written raw.
/// Note that the rawness (`r#`) of names is not preserved. Names are always stored without a `r#` prefix.
/// This is because we want to show (in completions etc.) names as raw depending on the needs
/// of the current crate, for example if it is edition 2021 complete `gen` even if the defining
/// crate is in edition 2024 and wrote `r#gen`, and the opposite holds as well.
@ -77,20 +77,49 @@ impl Name {
/// Hopefully, this should allow us to integrate hygiene cleaner in the
/// future, and to switch to interned representation of names.
fn new_text(text: &str) -> Name {
debug_assert!(!text.starts_with("r#"));
Name { symbol: Symbol::intern(text), ctx: () }
}
pub fn new(text: &str, ctx: SyntaxContextId) -> Name {
pub fn new(text: &str, mut ctx: SyntaxContextId) -> Name {
// For comparisons etc. we remove the edition, because sometimes we search for some `Name`
// and we don't know which edition it came from.
// Can't do that for all `SyntaxContextId`s because it breaks Salsa.
ctx.remove_root_edition();
_ = ctx;
Self::new_text(text)
}
pub fn new_root(text: &str) -> Name {
// The edition doesn't matter for hygiene.
Self::new(text.trim_start_matches("r#"), SyntaxContextId::root(Edition::Edition2015))
}
pub fn new_tuple_field(idx: usize) -> Name {
Name { symbol: Symbol::intern(&idx.to_string()), ctx: () }
let symbol = match idx {
0 => sym::INTEGER_0.clone(),
1 => sym::INTEGER_1.clone(),
2 => sym::INTEGER_2.clone(),
3 => sym::INTEGER_3.clone(),
4 => sym::INTEGER_4.clone(),
5 => sym::INTEGER_5.clone(),
6 => sym::INTEGER_6.clone(),
7 => sym::INTEGER_7.clone(),
8 => sym::INTEGER_8.clone(),
9 => sym::INTEGER_9.clone(),
10 => sym::INTEGER_10.clone(),
11 => sym::INTEGER_11.clone(),
12 => sym::INTEGER_12.clone(),
13 => sym::INTEGER_13.clone(),
14 => sym::INTEGER_14.clone(),
15 => sym::INTEGER_15.clone(),
_ => Symbol::intern(&idx.to_string()),
};
Name { symbol, ctx: () }
}
pub fn new_lifetime(lt: &ast::Lifetime) -> Name {
Name { symbol: Symbol::intern(lt.text().as_str()), ctx: () }
Self::new_text(lt.text().as_str().trim_start_matches("r#"))
}
/// Resolve a name from the text of token.
@ -133,15 +162,18 @@ impl Name {
}
/// Returns the text this name represents if it isn't a tuple field.
///
/// Do not use this for user-facing text, use `display` instead to handle editions properly.
pub fn as_str(&self) -> &str {
self.symbol.as_str()
}
// FIXME: Remove this
pub fn unescaped(&self) -> UnescapedName<'_> {
UnescapedName(self)
}
pub fn is_escaped(&self, edition: Edition) -> bool {
pub fn needs_escape(&self, edition: Edition) -> bool {
is_raw_identifier(self.symbol.as_str(), edition)
}
@ -164,16 +196,19 @@ impl Name {
&self.symbol
}
pub const fn new_symbol(symbol: Symbol, ctx: SyntaxContextId) -> Self {
pub fn new_symbol(symbol: Symbol, ctx: SyntaxContextId) -> Self {
debug_assert!(!symbol.as_str().starts_with("r#"));
_ = ctx;
Self { symbol, ctx: () }
}
// FIXME: This needs to go once we have hygiene
pub const fn new_symbol_root(sym: Symbol) -> Self {
pub fn new_symbol_root(sym: Symbol) -> Self {
debug_assert!(!sym.as_str().starts_with("r#"));
Self { symbol: sym, ctx: () }
}
// FIXME: Remove this
#[inline]
pub fn eq_ident(&self, ident: &str) -> bool {
self.as_str() == ident.trim_start_matches("r#")

View File

@ -856,7 +856,7 @@ fn impl_def_datum(
let associated_ty_value_ids = impl_data
.items
.iter()
.filter_map(|item| match item {
.filter_map(|(_, item)| match item {
AssocItemId::TypeAliasId(type_alias) => Some(*type_alias),
_ => None,
})

View File

@ -31,7 +31,10 @@ fn simplify(e: ConstEvalError) -> ConstEvalError {
}
#[track_caller]
fn check_fail(ra_fixture: &str, error: impl FnOnce(ConstEvalError) -> bool) {
fn check_fail(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
error: impl FnOnce(ConstEvalError) -> bool,
) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
match eval_goal(&db, file_id) {
Ok(_) => panic!("Expected fail, but it succeeded"),
@ -42,7 +45,7 @@ fn check_fail(ra_fixture: &str, error: impl FnOnce(ConstEvalError) -> bool) {
}
#[track_caller]
fn check_number(ra_fixture: &str, answer: i128) {
fn check_number(#[rust_analyzer::rust_fixture] ra_fixture: &str, answer: i128) {
check_answer(ra_fixture, |b, _| {
assert_eq!(
b,
@ -54,7 +57,7 @@ fn check_number(ra_fixture: &str, answer: i128) {
}
#[track_caller]
fn check_str(ra_fixture: &str, answer: &str) {
fn check_str(#[rust_analyzer::rust_fixture] ra_fixture: &str, answer: &str) {
check_answer(ra_fixture, |b, mm| {
let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
let size = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap());
@ -71,7 +74,10 @@ fn check_str(ra_fixture: &str, answer: &str) {
}
#[track_caller]
fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
fn check_answer(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
check: impl FnOnce(&[u8], &MemoryMap),
) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
let file_id = *file_ids.last().unwrap();
let r = match eval_goal(&db, file_id) {

View File

@ -471,10 +471,55 @@ impl HirDisplay for ProjectionTy {
if f.should_truncate() {
return write!(f, "{TYPE_HINT_TRUNCATION}");
}
let trait_ref = self.trait_ref(f.db);
let self_ty = trait_ref.self_type_parameter(Interner);
// if we are projection on a type parameter, check if the projection target has bounds
// itself, if so, we render them directly as `impl Bound` instead of the less useful
// `<Param as Trait>::Assoc`
if !f.display_target.is_source_code() {
if let TyKind::Placeholder(idx) = self_ty.kind(Interner) {
let db = f.db;
let id = from_placeholder_idx(db, *idx);
let generics = generics(db.upcast(), id.parent);
let substs = generics.placeholder_subst(db);
let bounds = db
.generic_predicates(id.parent)
.iter()
.map(|pred| pred.clone().substitute(Interner, &substs))
.filter(|wc| match wc.skip_binders() {
WhereClause::Implemented(tr) => {
match tr.self_type_parameter(Interner).kind(Interner) {
TyKind::Alias(AliasTy::Projection(proj)) => proj == self,
_ => false,
}
}
WhereClause::TypeOutlives(t) => match t.ty.kind(Interner) {
TyKind::Alias(AliasTy::Projection(proj)) => proj == self,
_ => false,
},
// We shouldn't be here if these exist
WhereClause::AliasEq(_) => false,
WhereClause::LifetimeOutlives(_) => false,
})
.collect::<Vec<_>>();
if !bounds.is_empty() {
return write_bounds_like_dyn_trait_with_prefix(
f,
"impl",
Either::Left(
&TyKind::Alias(AliasTy::Projection(self.clone())).intern(Interner),
),
&bounds,
SizedByDefault::NotSized,
);
};
}
}
write!(f, "<")?;
trait_ref.self_type_parameter(Interner).hir_fmt(f)?;
self_ty.hir_fmt(f)?;
write!(f, " as ")?;
trait_ref.hir_fmt(f)?;
write!(

View File

@ -26,7 +26,7 @@ enum DynCompatibilityViolationKind {
}
fn check_dyn_compatibility<'a>(
ra_fixture: &str,
#[rust_analyzer::rust_fixture] ra_fixture: &str,
expected: impl IntoIterator<Item = (&'a str, Vec<DynCompatibilityViolationKind>)>,
) {
let mut expected: FxHashMap<_, _> =

View File

@ -25,7 +25,10 @@ fn current_machine_data_layout() -> String {
.unwrap()
}
fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
fn eval_goal(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
minicore: &str,
) -> Result<Arc<Layout>, LayoutError> {
let target_data_layout = current_machine_data_layout();
let ra_fixture = format!(
"//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\n{ra_fixture}",
@ -81,7 +84,10 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
}
/// A version of `eval_goal` for types that can not be expressed in ADTs, like closures and `impl Trait`
fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
fn eval_expr(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
minicore: &str,
) -> Result<Arc<Layout>, LayoutError> {
let target_data_layout = current_machine_data_layout();
let ra_fixture = format!(
"//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\nfn main(){{let goal = {{{ra_fixture}}};}}",
@ -114,21 +120,31 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
}
#[track_caller]
fn check_size_and_align(ra_fixture: &str, minicore: &str, size: u64, align: u64) {
fn check_size_and_align(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
minicore: &str,
size: u64,
align: u64,
) {
let l = eval_goal(ra_fixture, minicore).unwrap();
assert_eq!(l.size.bytes(), size, "size mismatch");
assert_eq!(l.align.abi.bytes(), align, "align mismatch");
}
#[track_caller]
fn check_size_and_align_expr(ra_fixture: &str, minicore: &str, size: u64, align: u64) {
fn check_size_and_align_expr(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
minicore: &str,
size: u64,
align: u64,
) {
let l = eval_expr(ra_fixture, minicore).unwrap();
assert_eq!(l.size.bytes(), size, "size mismatch");
assert_eq!(l.align.abi.bytes(), align, "align mismatch");
}
#[track_caller]
fn check_fail(ra_fixture: &str, e: LayoutError) {
fn check_fail(#[rust_analyzer::rust_fixture] ra_fixture: &str, e: LayoutError) {
let r = eval_goal(ra_fixture, "");
assert_eq!(r, Err(e));
}

View File

@ -761,8 +761,8 @@ impl<'a> TyLoweringContext<'a> {
path: &Path,
on_diagnostic: &mut dyn FnMut(&mut Self, PathLoweringDiagnostic),
) -> Option<(TypeNs, Option<usize>)> {
let (resolution, remaining_index, _) =
self.resolver.resolve_path_in_type_ns(self.db.upcast(), path)?;
let (resolution, remaining_index, _, prefix_info) =
self.resolver.resolve_path_in_type_ns_with_prefix_info(self.db.upcast(), path)?;
let segments = path.segments();
match path {
@ -771,13 +771,12 @@ impl<'a> TyLoweringContext<'a> {
_ => return Some((resolution, remaining_index)),
};
let (module_segments, resolved_segment_idx, resolved_segment) = match remaining_index {
None => (
segments.strip_last(),
segments.len() - 1,
segments.last().expect("resolved path has at least one element"),
),
Some(i) => (segments.take(i - 1), i - 1, segments.get(i - 1).unwrap()),
let (module_segments, resolved_segment_idx, enum_segment) = match remaining_index {
None if prefix_info.enum_variant => {
(segments.strip_last_two(), segments.len() - 1, Some(segments.len() - 2))
}
None => (segments.strip_last(), segments.len() - 1, None),
Some(i) => (segments.take(i - 1), i - 1, None),
};
for (i, mod_segment) in module_segments.iter().enumerate() {
@ -792,9 +791,23 @@ impl<'a> TyLoweringContext<'a> {
}
}
if let Some(enum_segment) = enum_segment {
if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some())
&& segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some())
{
on_diagnostic(
self,
PathLoweringDiagnostic::GenericArgsProhibited {
segment: (enum_segment + 1) as u32,
reason: GenericArgsProhibitedReason::EnumVariant,
},
);
}
}
self.handle_type_ns_resolution(
&resolution,
resolved_segment,
segments.get(resolved_segment_idx).expect("should have resolved segment"),
resolved_segment_idx,
on_diagnostic,
);

View File

@ -746,16 +746,9 @@ fn lookup_impl_assoc_item_for_trait_ref(
let table = InferenceTable::new(db, env);
let (impl_data, impl_subst) = find_matching_impl(impls, table, trait_ref)?;
let item = impl_data.items.iter().find_map(|&it| match it {
AssocItemId::FunctionId(f) => {
(db.function_data(f).name == *name).then_some(AssocItemId::FunctionId(f))
}
AssocItemId::ConstId(c) => db
.const_data(c)
.name
.as_ref()
.map(|n| n == name)
.and_then(|result| if result { Some(AssocItemId::ConstId(c)) } else { None }),
let item = impl_data.items.iter().find_map(|(n, it)| match *it {
AssocItemId::FunctionId(f) => (n == name).then_some(AssocItemId::FunctionId(f)),
AssocItemId::ConstId(c) => (n == name).then_some(AssocItemId::ConstId(c)),
AssocItemId::TypeAliasId(_) => None,
})?;
Some((item, impl_subst))
@ -850,7 +843,7 @@ fn is_inherent_impl_coherent(
};
rustc_has_incoherent_inherent_impls
&& !impl_data.items.is_empty()
&& impl_data.items.iter().copied().all(|assoc| match assoc {
&& impl_data.items.iter().all(|&(_, assoc)| match assoc {
AssocItemId::FunctionId(it) => db.function_data(it).rustc_allow_incoherent_impl,
AssocItemId::ConstId(it) => db.const_data(it).rustc_allow_incoherent_impl,
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).rustc_allow_incoherent_impl,
@ -1399,7 +1392,7 @@ fn iterate_inherent_methods(
callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
) -> ControlFlow<()> {
for &impl_id in impls.for_self_ty(self_ty) {
for &item in table.db.impl_data(impl_id).items.iter() {
for &(ref item_name, item) in table.db.impl_data(impl_id).items.iter() {
let visible = match is_valid_impl_method_candidate(
table,
self_ty,
@ -1408,6 +1401,7 @@ fn iterate_inherent_methods(
name,
impl_id,
item,
item_name,
) {
IsValidCandidate::Yes => true,
IsValidCandidate::NotVisible => false,
@ -1467,6 +1461,7 @@ fn is_valid_impl_method_candidate(
name: Option<&Name>,
impl_id: ImplId,
item: AssocItemId,
item_name: &Name,
) -> IsValidCandidate {
match item {
AssocItemId::FunctionId(f) => is_valid_impl_fn_candidate(
@ -1477,11 +1472,12 @@ fn is_valid_impl_method_candidate(
receiver_ty,
self_ty,
visible_from_module,
item_name,
),
AssocItemId::ConstId(c) => {
let db = table.db;
check_that!(receiver_ty.is_none());
check_that!(name.is_none_or(|n| db.const_data(c).name.as_ref() == Some(n)));
check_that!(name.is_none_or(|n| n == item_name));
if let Some(from_module) = visible_from_module {
if !db.const_visibility(c).is_visible_from(db.upcast(), from_module) {
@ -1565,11 +1561,13 @@ fn is_valid_impl_fn_candidate(
receiver_ty: Option<&Ty>,
self_ty: &Ty,
visible_from_module: Option<ModuleId>,
item_name: &Name,
) -> IsValidCandidate {
check_that!(name.is_none_or(|n| n == item_name));
let db = table.db;
let data = db.function_data(fn_id);
check_that!(name.is_none_or(|n| n == &data.name));
if let Some(from_module) = visible_from_module {
if !db.function_visibility(fn_id).is_visible_from(db.upcast(), from_module) {
cov_mark::hit!(autoderef_candidate_not_visible);

View File

@ -37,11 +37,15 @@ fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String),
Ok((output.stdout().into_owned(), output.stderr().into_owned()))
}
fn check_pass(ra_fixture: &str) {
fn check_pass(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
check_pass_and_stdio(ra_fixture, "", "");
}
fn check_pass_and_stdio(ra_fixture: &str, expected_stdout: &str, expected_stderr: &str) {
fn check_pass_and_stdio(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
expected_stdout: &str,
expected_stderr: &str,
) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
let file_id = *file_ids.last().unwrap();
let x = eval_main(&db, file_id);
@ -73,7 +77,7 @@ fn check_pass_and_stdio(ra_fixture: &str, expected_stdout: &str, expected_stderr
}
}
fn check_panic(ra_fixture: &str, expected_panic: &str) {
fn check_panic(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_panic: &str) {
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
let file_id = *file_ids.last().unwrap();
let e = eval_main(&db, file_id).unwrap_err();

View File

@ -69,27 +69,32 @@ fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> {
}
#[track_caller]
fn check_types(ra_fixture: &str) {
fn check_types(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
check_impl(ra_fixture, false, true, false)
}
#[track_caller]
fn check_types_source_code(ra_fixture: &str) {
fn check_types_source_code(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
check_impl(ra_fixture, false, true, true)
}
#[track_caller]
fn check_no_mismatches(ra_fixture: &str) {
fn check_no_mismatches(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
check_impl(ra_fixture, true, false, false)
}
#[track_caller]
fn check(ra_fixture: &str) {
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
check_impl(ra_fixture, false, false, false)
}
#[track_caller]
fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_source: bool) {
fn check_impl(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
allow_none: bool,
only_types: bool,
display_source: bool,
) {
let _tracing = setup_tracing();
let (db, files) = TestDB::with_many_files(ra_fixture);
@ -282,7 +287,7 @@ fn pat_node(
})
}
fn infer(ra_fixture: &str) -> String {
fn infer(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> String {
infer_with_mismatches(ra_fixture, false)
}
@ -430,7 +435,7 @@ pub(crate) fn visit_module(
visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb);
for impl_id in crate_def_map[module_id].scope.impls() {
let impl_data = db.impl_data(impl_id);
for &item in impl_data.items.iter() {
for &(_, item) in impl_data.items.iter() {
match item {
AssocItemId::FunctionId(it) => {
let body = db.body(it.into());
@ -520,13 +525,13 @@ fn ellipsize(mut text: String, max_len: usize) -> String {
text
}
fn check_infer(ra_fixture: &str, expect: Expect) {
fn check_infer(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let mut actual = infer(ra_fixture);
actual.push('\n');
expect.assert_eq(&actual);
}
fn check_infer_with_mismatches(ra_fixture: &str, expect: Expect) {
fn check_infer_with_mismatches(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let mut actual = infer_with_mismatches(ra_fixture, true);
actual.push('\n');
expect.assert_eq(&actual);

View File

@ -14,7 +14,7 @@ use crate::test_db::TestDB;
use super::visit_module;
fn check_closure_captures(ra_fixture: &str, expect: Expect) {
fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let module = db.module_for_file(file_id);
let def_map = module.def_map(&db);

View File

@ -3802,3 +3802,15 @@ fn foo() {
"#,
);
}
#[test]
fn tool_attr_skip() {
check_no_mismatches(
r#"
#[rust_analyzer::skip]
async fn foo(a: (), b: i32) -> u32 {
0 + 1 + b()
}
"#,
);
}

View File

@ -968,7 +968,7 @@ struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
}
#[track_caller]
fn check(ra_fixture: &str, expected: Expect) {
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected: Expect) {
// use tracing_subscriber::{layer::SubscriberExt, Layer};
// let my_layer = tracing_subscriber::fmt::layer();
// let _g = tracing::subscriber::set_default(tracing_subscriber::registry().with(

View File

@ -20,6 +20,7 @@ itertools.workspace = true
smallvec.workspace = true
tracing.workspace = true
triomphe.workspace = true
indexmap.workspace = true
# local deps
base-db.workspace = true

View File

@ -12,7 +12,6 @@ use hir_def::{
};
use hir_expand::{mod_path::PathKind, name::Name};
use hir_ty::{db::HirDatabase, method_resolution};
use span::SyntaxContextId;
use crate::{
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
@ -90,6 +89,16 @@ impl HasAttrs for AssocItem {
}
}
impl HasAttrs for crate::Crate {
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
let def = AttrDefId::ModuleId(self.root_module().id);
AttrsWithOwner::new(db.upcast(), def)
}
fn attr_id(self) -> AttrDefId {
AttrDefId::ModuleId(self.root_module().id)
}
}
/// Resolves the item `link` points to in the scope of `def`.
pub fn resolve_doc_path_on(
db: &dyn HirDatabase,
@ -328,9 +337,7 @@ fn doc_modpath_from_str(link: &str) -> Option<ModPath> {
};
let parts = first_segment.into_iter().chain(parts).map(|segment| match segment.parse() {
Ok(idx) => Name::new_tuple_field(idx),
Err(_) => {
Name::new(segment.split_once('<').map_or(segment, |it| it.0), SyntaxContextId::ROOT)
}
Err(_) => Name::new_root(segment.split_once('<').map_or(segment, |it| it.0)),
});
Some(ModPath::from_segments(kind, parts))
};

View File

@ -23,10 +23,10 @@ use hir_ty::{
use itertools::Itertools;
use crate::{
Adt, AsAssocItem, AssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl,
Field, Function, GenericParam, HasCrate, HasVisibility, Impl, LifetimeParam, Macro, Module,
SelfParam, Static, Struct, Trait, TraitAlias, TraitRef, TupleField, TyBuilder, Type, TypeAlias,
TypeOrConstParam, TypeParam, Union, Variant,
Adt, AsAssocItem, AssocItem, AssocItemContainer, Const, ConstParam, Crate, Enum,
ExternCrateDecl, Field, Function, GenericParam, HasCrate, HasVisibility, Impl, LifetimeParam,
Macro, Module, SelfParam, Static, Struct, Trait, TraitAlias, TraitRef, TupleField, TyBuilder,
Type, TypeAlias, TypeOrConstParam, TypeParam, Union, Variant,
};
impl HirDisplay for Function {
@ -846,14 +846,27 @@ impl HirDisplay for TypeAlias {
impl HirDisplay for Module {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
// FIXME: Module doesn't have visibility saved in data.
match self.parent(f.db) {
Some(m) => write_visibility(m.id, self.visibility(f.db), f)?,
None => {
return match self.krate(f.db).display_name(f.db) {
Some(name) => write!(f, "extern crate {name}"),
None => f.write_str("extern crate {unknown}"),
}
}
}
match self.name(f.db) {
Some(name) => write!(f, "mod {}", name.display(f.db.upcast(), f.edition())),
None if self.is_crate_root() => match self.krate(f.db).display_name(f.db) {
Some(name) => write!(f, "extern crate {name}"),
None => f.write_str("extern crate {unknown}"),
},
None => f.write_str("mod {unnamed}"),
None => f.write_str("mod {unknown}"),
}
}
}
impl HirDisplay for Crate {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
match self.display_name(f.db) {
Some(name) => write!(f, "extern crate {name}"),
None => f.write_str("extern crate {unknown}"),
}
}
}

View File

@ -54,11 +54,11 @@ use hir_def::{
per_ns::PerNs,
resolver::{HasResolver, Resolver},
type_ref::TypesSourceMap,
AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId, CrateRootModuleId,
DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId,
HasModule, ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalFieldId, Lookup,
MacroExpander, ModuleId, StaticId, StructId, SyntheticSyntax, TraitAliasId, TraitId, TupleId,
TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId,
CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FunctionId,
GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstId, ItemContainerId,
LifetimeParamId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
SyntheticSyntax, TraitAliasId, TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
};
use hir_expand::{
attrs::collect_attrs, proc_macro::ProcMacroKind, AstId, MacroCallKind, RenderedExpandError,
@ -83,7 +83,7 @@ use itertools::Itertools;
use nameres::diagnostics::DefDiagnosticKind;
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
use span::{Edition, EditionedFileId, FileId, MacroCallId, SyntaxContextId};
use span::{Edition, EditionedFileId, FileId, MacroCallId};
use stdx::{format_to, impl_from, never};
use syntax::{
ast::{self, HasAttrs as _, HasGenericParams, HasName},
@ -127,7 +127,7 @@ pub use {
ImportPathConfig,
// FIXME: This is here since some queries take it as input that are used
// outside of hir.
{AdtId, MacroId, ModuleDefId},
{ModuleDefId, TraitId},
},
hir_expand::{
attrs::{Attr, AttrId},
@ -775,29 +775,16 @@ impl Module {
AssocItemId::ConstId(id) => !db.const_data(id).has_body,
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).type_ref.is_none(),
});
impl_assoc_items_scratch.extend(db.impl_data(impl_def.id).items.iter().filter_map(
|&item| {
Some((
item,
match item {
AssocItemId::FunctionId(it) => db.function_data(it).name.clone(),
AssocItemId::ConstId(it) => {
db.const_data(it).name.as_ref()?.clone()
}
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).name.clone(),
},
))
},
));
impl_assoc_items_scratch.extend(db.impl_data(impl_def.id).items.iter().cloned());
let redundant = impl_assoc_items_scratch
.iter()
.filter(|(id, name)| {
.filter(|(name, id)| {
!items.iter().any(|(impl_name, impl_item)| {
discriminant(impl_item) == discriminant(id) && impl_name == name
})
})
.map(|(item, name)| (name.clone(), AssocItem::from(*item)));
.map(|(name, item)| (name.clone(), AssocItem::from(*item)));
for (name, assoc_item) in redundant {
acc.push(
TraitImplRedundantAssocItems {
@ -812,7 +799,7 @@ impl Module {
let missing: Vec<_> = required_items
.filter(|(name, id)| {
!impl_assoc_items_scratch.iter().any(|(impl_item, impl_name)| {
!impl_assoc_items_scratch.iter().any(|(impl_name, impl_item)| {
discriminant(impl_item) == discriminant(id) && impl_name == name
})
})
@ -844,7 +831,7 @@ impl Module {
source_map,
);
for &item in db.impl_data(impl_def.id).items.iter() {
for &(_, item) in db.impl_data(impl_def.id).items.iter() {
AssocItem::from(item).diagnostics(db, acc, style_lints);
}
}
@ -3000,6 +2987,10 @@ impl Macro {
matches!(self.id, MacroId::MacroRulesId(id) if db.macro_rules_data(id).macro_export)
}
pub fn is_proc_macro(self) -> bool {
matches!(self.id, MacroId::ProcMacroId(_))
}
pub fn kind(&self, db: &dyn HirDatabase) -> MacroKind {
match self.id {
MacroId::Macro2Id(it) => match it.lookup(db.upcast()).expander {
@ -3046,14 +3037,23 @@ impl Macro {
MacroId::Macro2Id(it) => {
matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInEager(eager) if eager.is_env_or_option_env())
}
MacroId::MacroRulesId(_) | MacroId::ProcMacroId(_) => false,
MacroId::MacroRulesId(it) => {
matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInEager(eager) if eager.is_env_or_option_env())
}
MacroId::ProcMacroId(_) => false,
}
}
pub fn is_asm_or_global_asm(&self, db: &dyn HirDatabase) -> bool {
matches!(self.id, MacroId::Macro2Id(it) if {
matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltIn(m) if m.is_asm())
})
match self.id {
MacroId::Macro2Id(it) => {
matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltIn(m) if m.is_asm())
}
MacroId::MacroRulesId(it) => {
matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltIn(m) if m.is_asm())
}
MacroId::ProcMacroId(_) => false,
}
}
pub fn is_attr(&self, db: &dyn HirDatabase) -> bool {
@ -3902,6 +3902,10 @@ impl ToolModule {
db.crate_def_map(self.krate).registered_tools()[self.idx as usize].clone(),
)
}
pub fn krate(&self) -> Crate {
Crate { id: self.krate }
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@ -4290,7 +4294,7 @@ impl Impl {
}
pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
db.impl_data(self.id).items.iter().map(|&it| it.into()).collect()
db.impl_data(self.id).items.iter().map(|&(_, it)| it.into()).collect()
}
pub fn is_negative(self, db: &dyn HirDatabase) -> bool {
@ -4731,6 +4735,14 @@ impl Type {
Some((self.derived(ty.clone()), m))
}
pub fn add_reference(&self, mutability: Mutability) -> Type {
let ty_mutability = match mutability {
Mutability::Shared => hir_ty::Mutability::Not,
Mutability::Mut => hir_ty::Mutability::Mut,
};
self.derived(TyKind::Ref(ty_mutability, error_lifetime(), self.ty.clone()).intern(Interner))
}
pub fn is_slice(&self) -> bool {
matches!(self.ty.kind(Interner), TyKind::Slice(..))
}
@ -4786,9 +4798,9 @@ impl Type {
}
/// Checks that particular type `ty` implements `std::future::IntoFuture` or
/// `std::future::Future`.
/// `std::future::Future` and returns the `Output` associated type.
/// This function is used in `.await` syntax completion.
pub fn impls_into_future(&self, db: &dyn HirDatabase) -> bool {
pub fn into_future_output(&self, db: &dyn HirDatabase) -> Option<Type> {
let trait_ = db
.lang_item(self.env.krate, LangItem::IntoFutureIntoFuture)
.and_then(|it| {
@ -4800,16 +4812,18 @@ impl Type {
.or_else(|| {
let future_trait = db.lang_item(self.env.krate, LangItem::Future)?;
future_trait.as_trait()
});
let trait_ = match trait_ {
Some(it) => it,
None => return false,
};
})?;
let canonical_ty =
Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
method_resolution::implements_trait(&canonical_ty, db, &self.env, trait_)
if !method_resolution::implements_trait_unique(&canonical_ty, db, &self.env, trait_) {
return None;
}
let output_assoc_type = db
.trait_data(trait_)
.associated_type_by_name(&Name::new_symbol_root(sym::Output.clone()))?;
self.normalize_trait_assoc_type(db, &[], output_assoc_type.into())
}
/// This does **not** resolve `IntoFuture`, only `Future`.
@ -4824,10 +4838,31 @@ impl Type {
let iterator_trait = db.lang_item(self.env.krate, LangItem::Iterator)?.as_trait()?;
let iterator_item = db
.trait_data(iterator_trait)
.associated_type_by_name(&Name::new_symbol(sym::Item.clone(), SyntaxContextId::ROOT))?;
.associated_type_by_name(&Name::new_symbol_root(sym::Item.clone()))?;
self.normalize_trait_assoc_type(db, &[], iterator_item.into())
}
/// Resolves the projection `<Self as IntoIterator>::IntoIter` and returns the resulting type
pub fn into_iterator_iter(self, db: &dyn HirDatabase) -> Option<Type> {
let trait_ = db.lang_item(self.env.krate, LangItem::IntoIterIntoIter).and_then(|it| {
let into_iter_fn = it.as_function()?;
let assoc_item = as_assoc_item(db, AssocItem::Function, into_iter_fn)?;
let into_iter_trait = assoc_item.container_or_implemented_trait(db)?;
Some(into_iter_trait.id)
})?;
let canonical_ty =
Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
if !method_resolution::implements_trait_unique(&canonical_ty, db, &self.env, trait_) {
return None;
}
let into_iter_assoc_type = db
.trait_data(trait_)
.associated_type_by_name(&Name::new_symbol_root(sym::IntoIter.clone()))?;
self.normalize_trait_assoc_type(db, &[], into_iter_assoc_type.into())
}
/// Checks that particular type `ty` implements `std::ops::FnOnce`.
///
/// This function can be used to check if a particular type is callable, since FnOnce is a
@ -5117,7 +5152,7 @@ impl Type {
let impls = db.inherent_impls_in_crate(krate);
for impl_def in impls.for_self_ty(&self.ty) {
for &item in db.impl_data(*impl_def).items.iter() {
for &(_, item) in db.impl_data(*impl_def).items.iter() {
if callback(item) {
return;
}
@ -5535,6 +5570,7 @@ impl Type {
walk_substs(db, type_, &opaque_ty.substitution, cb);
}
TyKind::Placeholder(_) => {
cb(type_.derived(ty.clone()));
if let Some(bounds) = ty.impl_trait_bounds(db) {
walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
}

View File

@ -39,8 +39,8 @@ use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
ast::{self, HasAttrs as _, HasGenericParams},
AstNode, AstToken, Direction, SmolStr, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
TextRange, TextSize,
AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
TextSize,
};
use triomphe::Arc;
@ -136,8 +136,6 @@ pub struct Semantics<'db, DB> {
pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>,
/// Rootnode to HirFileId cache
root_to_file_cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
/// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
}
@ -304,12 +302,7 @@ impl<DB: HirDatabase> Semantics<'_, DB> {
impl<'db> SemanticsImpl<'db> {
fn new(db: &'db dyn HirDatabase) -> Self {
SemanticsImpl {
db,
s2d_cache: Default::default(),
root_to_file_cache: Default::default(),
macro_call_cache: Default::default(),
}
SemanticsImpl { db, s2d_cache: Default::default(), macro_call_cache: Default::default() }
}
pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
@ -483,7 +476,7 @@ impl<'db> SemanticsImpl<'db> {
Some(
calls
.into_iter()
.map(|call| macro_call_to_macro_id(self, ctx, call?).map(|id| Macro { id }))
.map(|call| macro_call_to_macro_id(ctx, call?).map(|id| Macro { id }))
.collect(),
)
})
@ -962,7 +955,7 @@ impl<'db> SemanticsImpl<'db> {
let InMacroFile { file_id, value: mapped_tokens } = self.with_ctx(|ctx| {
Some(
ctx.cache
.get_or_insert_expansion(self, macro_file)
.get_or_insert_expansion(ctx.db, macro_file)
.map_range_down(span)?
.map(SmallVec::<[_; 2]>::from_iter),
)
@ -986,7 +979,10 @@ impl<'db> SemanticsImpl<'db> {
process_expansion_for_token(&mut stack, include)?;
}
None => {
stack.push((file_id.into(), smallvec![(token, SyntaxContextId::ROOT)]));
stack.push((
file_id.into(),
smallvec![(token, SyntaxContextId::root(file_id.edition()))],
));
}
}
@ -1284,7 +1280,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_file = file_id.macro_file()?;
self.with_ctx(|ctx| {
let expansion_info = ctx.cache.get_or_insert_expansion(self, macro_file);
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
expansion_info.arg().map(|node| node?.parent()).transpose()
})
}
@ -1315,8 +1311,8 @@ impl<'db> SemanticsImpl<'db> {
}
pub fn resolve_label(&self, label: &ast::Lifetime) -> Option<Label> {
let (parent, label_id) = self
.with_ctx(|ctx| ctx.label_ref_to_def(self.wrap_node_infile(label.clone()).as_ref()))?;
let src = self.wrap_node_infile(label.clone());
let (parent, label_id) = self.with_ctx(|ctx| ctx.label_ref_to_def(src.as_ref()))?;
Some(Label { parent, label_id })
}
@ -1443,6 +1439,10 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(call.syntax())?.resolve_method_call_fallback(self.db, call)
}
pub fn resolve_known_blanket_dual_impls(&self, call: &ast::MethodCallExpr) -> Option<Function> {
self.analyze(call.syntax())?.resolve_known_blanket_dual_impls(self.db, call)
}
fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<StructId> {
self.analyze(range_pat.syntax())?.resolve_range_pat(self.db, range_pat)
}
@ -1516,7 +1516,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
self.with_ctx(|ctx| {
ctx.macro_call_to_macro_call(macro_call)
.and_then(|call| macro_call_to_macro_id(self, ctx, call))
.and_then(|call| macro_call_to_macro_id(ctx, call))
.map(Into::into)
})
.or_else(|| {
@ -1558,7 +1558,7 @@ impl<'db> SemanticsImpl<'db> {
let item_in_file = self.wrap_node_infile(item.clone());
let id = self.with_ctx(|ctx| {
let macro_call_id = ctx.item_to_macro_call(item_in_file.as_ref())?;
macro_call_to_macro_id(self, ctx, macro_call_id)
macro_call_to_macro_id(ctx, macro_call_id)
})?;
Some(Macro { id })
}
@ -1591,14 +1591,11 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_mod_path_relative(
&self,
to: Module,
segments: impl IntoIterator<Item = SmolStr>,
segments: impl IntoIterator<Item = Name>,
) -> Option<impl Iterator<Item = ItemInNs>> {
let items = to.id.resolver(self.db.upcast()).resolve_module_path_in_items(
self.db.upcast(),
&ModPath::from_segments(
hir_def::path::PathKind::Plain,
segments.into_iter().map(|it| Name::new(&it, SyntaxContextId::ROOT)),
),
&ModPath::from_segments(hir_def::path::PathKind::Plain, segments),
);
Some(items.iter_items().map(|(item, _)| item.into()))
}
@ -1722,10 +1719,11 @@ impl<'db> SemanticsImpl<'db> {
}
fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
assert!(root_node.parent().is_none());
let mut cache = self.root_to_file_cache.borrow_mut();
let prev = cache.insert(root_node, file_id);
assert!(prev.is_none() || prev == Some(file_id));
SourceToDefCache::cache(
&mut self.s2d_cache.borrow_mut().root_to_file_cache,
root_node,
file_id,
);
}
pub fn assert_contains_node(&self, node: &SyntaxNode) {
@ -1733,8 +1731,8 @@ impl<'db> SemanticsImpl<'db> {
}
fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
let cache = self.root_to_file_cache.borrow();
cache.get(root_node).copied()
let cache = self.s2d_cache.borrow();
cache.root_to_file_cache.get(root_node).copied()
}
fn wrap_node_infile<N: AstNode>(&self, node: N) -> InFile<N> {
@ -1753,13 +1751,14 @@ impl<'db> SemanticsImpl<'db> {
let file_id = self.lookup(&root_node).unwrap_or_else(|| {
panic!(
"\n\nFailed to lookup {:?} in this Semantics.\n\
Make sure to use only query nodes, derived from this instance of Semantics.\n\
Make sure to only query nodes derived from this instance of Semantics.\n\
root node: {:?}\n\
known nodes: {}\n\n",
node,
root_node,
self.root_to_file_cache
self.s2d_cache
.borrow()
.root_to_file_cache
.keys()
.map(|it| format!("{it:?}"))
.collect::<Vec<_>>()
@ -1906,7 +1905,6 @@ impl<'db> SemanticsImpl<'db> {
}
fn macro_call_to_macro_id(
sema: &SemanticsImpl<'_>,
ctx: &mut SourceToDefCtx<'_, '_>,
macro_call_id: MacroCallId,
) -> Option<MacroId> {
@ -1922,7 +1920,7 @@ fn macro_call_to_macro_id(
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
HirFileIdRepr::MacroFile(macro_file) => {
let expansion_info = ctx.cache.get_or_insert_expansion(sema, macro_file);
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
it.to_ptr(db).to_node(&expansion_info.expanded().value)
}
};
@ -1934,7 +1932,7 @@ fn macro_call_to_macro_id(
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
HirFileIdRepr::MacroFile(macro_file) => {
let expansion_info = ctx.cache.get_or_insert_expansion(sema, macro_file);
let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
it.to_ptr(db).to_node(&expansion_info.expanded().value)
}
};

View File

@ -56,7 +56,7 @@ impl ChildBySource for ImplId {
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
},
);
data.items.iter().for_each(|&item| {
data.items.iter().for_each(|&(_, item)| {
add_assoc_item(db, res, file_id, item);
});
}

View File

@ -110,10 +110,7 @@ use syntax::{
AstNode, AstPtr, SyntaxNode,
};
use crate::{
db::HirDatabase, semantics::child_by_source::ChildBySource, InFile, InlineAsmOperand,
SemanticsImpl,
};
use crate::{db::HirDatabase, semantics::child_by_source::ChildBySource, InFile, InlineAsmOperand};
#[derive(Default)]
pub(super) struct SourceToDefCache {
@ -121,9 +118,21 @@ pub(super) struct SourceToDefCache {
expansion_info_cache: FxHashMap<MacroFileId, ExpansionInfo>,
pub(super) file_to_def_cache: FxHashMap<FileId, SmallVec<[ModuleId; 1]>>,
pub(super) included_file_cache: FxHashMap<EditionedFileId, Option<MacroFileId>>,
/// Rootnode to HirFileId cache
pub(super) root_to_file_cache: FxHashMap<SyntaxNode, HirFileId>,
}
impl SourceToDefCache {
pub(super) fn cache(
root_to_file_cache: &mut FxHashMap<SyntaxNode, HirFileId>,
root_node: SyntaxNode,
file_id: HirFileId,
) {
assert!(root_node.parent().is_none());
let prev = root_to_file_cache.insert(root_node, file_id);
assert!(prev.is_none() || prev == Some(file_id));
}
pub(super) fn get_or_insert_include_for(
&mut self,
db: &dyn HirDatabase,
@ -143,14 +152,14 @@ impl SourceToDefCache {
pub(super) fn get_or_insert_expansion(
&mut self,
sema: &SemanticsImpl<'_>,
db: &dyn HirDatabase,
macro_file: MacroFileId,
) -> &ExpansionInfo {
self.expansion_info_cache.entry(macro_file).or_insert_with(|| {
let exp_info = macro_file.expansion_info(sema.db.upcast());
let exp_info = macro_file.expansion_info(db.upcast());
let InMacroFile { file_id, value } = exp_info.expanded();
sema.cache(value, file_id.into());
Self::cache(&mut self.root_to_file_cache, value, file_id.into());
exp_info
})
@ -520,18 +529,11 @@ impl SourceToDefCtx<'_, '_> {
node: InFile<&SyntaxNode>,
mut cb: impl FnMut(&mut Self, InFile<SyntaxNode>) -> Option<T>,
) -> Option<T> {
use hir_expand::MacroFileIdExt;
let parent = |this: &mut Self, node: InFile<&SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => {
let macro_file = node.file_id.macro_file()?;
let expansion_info = this
.cache
.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(this.db.upcast()));
let expansion_info = this.cache.get_or_insert_expansion(this.db, macro_file);
expansion_info.arg().map(|node| node?.parent()).transpose()
}
};

View File

@ -322,6 +322,68 @@ impl SourceAnalyzer {
}
}
// If the method is into(), try_into(), parse(), resolve it to from, try_from, from_str.
pub(crate) fn resolve_known_blanket_dual_impls(
&self,
db: &dyn HirDatabase,
call: &ast::MethodCallExpr,
) -> Option<Function> {
// e.g. if the method call is let b = a.into(),
// - receiver_type is A (type of a)
// - return_type is B (type of b)
// We will find the definition of B::from(a: A).
let callable = self.resolve_method_call_as_callable(db, call)?;
let (_, receiver_type) = callable.receiver_param(db)?;
let return_type = callable.return_type();
let (search_method, substs) = match call.name_ref()?.text().as_str() {
"into" => {
let trait_ =
self.resolver.resolve_known_trait(db.upcast(), &path![core::convert::From])?;
(
self.trait_fn(db, trait_, "from")?,
hir_ty::TyBuilder::subst_for_def(db, trait_, None)
.push(return_type.ty)
.push(receiver_type.ty)
.build(),
)
}
"try_into" => {
let trait_ = self
.resolver
.resolve_known_trait(db.upcast(), &path![core::convert::TryFrom])?;
(
self.trait_fn(db, trait_, "try_from")?,
hir_ty::TyBuilder::subst_for_def(db, trait_, None)
// If the method is try_into() or parse(), return_type is Result<T, Error>.
// Get T from type arguments of Result<T, Error>.
.push(return_type.type_arguments().next()?.ty)
.push(receiver_type.ty)
.build(),
)
}
"parse" => {
let trait_ =
self.resolver.resolve_known_trait(db.upcast(), &path![core::str::FromStr])?;
(
self.trait_fn(db, trait_, "from_str")?,
hir_ty::TyBuilder::subst_for_def(db, trait_, None)
.push(return_type.type_arguments().next()?.ty)
.build(),
)
}
_ => return None,
};
let found_method = self.resolve_impl_method_or_trait_def(db, search_method, substs);
// If found_method == search_method, the method in trait itself is resolved.
// It means the blanket dual impl is not found.
if found_method == search_method {
None
} else {
Some(found_method.into())
}
}
pub(crate) fn resolve_expr_as_callable(
&self,
db: &dyn HirDatabase,
@ -1247,6 +1309,18 @@ impl SourceAnalyzer {
Some((trait_id, fn_id))
}
fn trait_fn(
&self,
db: &dyn HirDatabase,
trait_id: TraitId,
method_name: &str,
) -> Option<FunctionId> {
db.trait_data(trait_id).items.iter().find_map(|(item_name, item)| match item {
AssocItemId::FunctionId(t) if item_name.as_str() == method_name => Some(*t),
_ => None,
})
}
fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> {
self.infer.as_ref()?.type_of_expr_or_pat(self.expr_id(db, expr)?)
}

View File

@ -1,27 +1,34 @@
//! File symbol extraction.
use either::Either;
use hir_def::{
db::DefDatabase,
item_scope::ItemInNs,
item_scope::{ImportId, ImportOrExternCrate},
per_ns::Item,
src::{HasChildSource, HasSource},
AdtId, AssocItemId, DefWithBodyId, HasModule, ImplId, Lookup, MacroId, ModuleDefId, ModuleId,
TraitId,
visibility::{Visibility, VisibilityExplicitness},
AdtId, AssocItemId, DefWithBodyId, ExternCrateId, HasModule, ImplId, Lookup, MacroId,
ModuleDefId, ModuleId, TraitId,
};
use hir_expand::HirFileId;
use hir_expand::{name::Name, HirFileId};
use hir_ty::{
db::HirDatabase,
display::{hir_display_with_types_map, HirDisplay},
};
use intern::Symbol;
use rustc_hash::FxHashMap;
use span::Edition;
use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr};
use crate::{Module, ModuleDef, Semantics};
pub type FxIndexSet<T> = indexmap::IndexSet<T, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
/// The actual data that is stored in the index. It should be as compact as
/// possible.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FileSymbol {
pub name: SmolStr,
pub name: Symbol,
pub def: ModuleDef,
pub loc: DeclarationLocation,
pub container_name: Option<SmolStr>,
@ -37,7 +44,7 @@ pub struct DeclarationLocation {
/// This points to the whole syntax node of the declaration.
pub ptr: SyntaxNodePtr,
/// This points to the [`syntax::ast::Name`] identifier of the declaration.
pub name_ptr: AstPtr<syntax::ast::Name>,
pub name_ptr: AstPtr<Either<syntax::ast::Name, syntax::ast::NameRef>>,
}
impl DeclarationLocation {
@ -55,7 +62,7 @@ struct SymbolCollectorWork {
pub struct SymbolCollector<'a> {
db: &'a dyn HirDatabase,
symbols: Vec<FileSymbol>,
symbols: FxIndexSet<FileSymbol>,
work: Vec<SymbolCollectorWork>,
current_container_name: Option<SmolStr>,
edition: Edition,
@ -86,11 +93,11 @@ impl<'a> SymbolCollector<'a> {
}
}
pub fn finish(self) -> Vec<FileSymbol> {
self.symbols
pub fn finish(self) -> Box<[FileSymbol]> {
self.symbols.into_iter().collect()
}
pub fn collect_module(db: &dyn HirDatabase, module: Module) -> Vec<FileSymbol> {
pub fn collect_module(db: &dyn HirDatabase, module: Module) -> Box<[FileSymbol]> {
let mut symbol_collector = SymbolCollector::new(db);
symbol_collector.collect(module);
symbol_collector.finish()
@ -104,96 +111,174 @@ impl<'a> SymbolCollector<'a> {
}
fn collect_from_module(&mut self, module_id: ModuleId) {
let def_map = module_id.def_map(self.db.upcast());
let scope = &def_map[module_id.local_id].scope;
for module_def_id in scope.declarations() {
match module_def_id {
ModuleDefId::ModuleId(id) => self.push_module(id),
let push_decl = |this: &mut Self, def, name| {
match def {
ModuleDefId::ModuleId(id) => this.push_module(id, name),
ModuleDefId::FunctionId(id) => {
self.push_decl(id, false);
self.collect_from_body(id);
this.push_decl(id, name, false);
this.collect_from_body(id);
}
ModuleDefId::AdtId(AdtId::StructId(id)) => self.push_decl(id, false),
ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id, false),
ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id, false),
ModuleDefId::AdtId(AdtId::StructId(id)) => this.push_decl(id, name, false),
ModuleDefId::AdtId(AdtId::EnumId(id)) => this.push_decl(id, name, false),
ModuleDefId::AdtId(AdtId::UnionId(id)) => this.push_decl(id, name, false),
ModuleDefId::ConstId(id) => {
self.push_decl(id, false);
self.collect_from_body(id);
this.push_decl(id, name, false);
this.collect_from_body(id);
}
ModuleDefId::StaticId(id) => {
self.push_decl(id, false);
self.collect_from_body(id);
this.push_decl(id, name, false);
this.collect_from_body(id);
}
ModuleDefId::TraitId(id) => {
self.push_decl(id, false);
self.collect_from_trait(id);
this.push_decl(id, name, false);
this.collect_from_trait(id);
}
ModuleDefId::TraitAliasId(id) => {
self.push_decl(id, false);
this.push_decl(id, name, false);
}
ModuleDefId::TypeAliasId(id) => {
self.push_decl(id, false);
this.push_decl(id, name, false);
}
ModuleDefId::MacroId(id) => match id {
MacroId::Macro2Id(id) => self.push_decl(id, false),
MacroId::MacroRulesId(id) => self.push_decl(id, false),
MacroId::ProcMacroId(id) => self.push_decl(id, false),
MacroId::Macro2Id(id) => this.push_decl(id, name, false),
MacroId::MacroRulesId(id) => this.push_decl(id, name, false),
MacroId::ProcMacroId(id) => this.push_decl(id, name, false),
},
// Don't index these.
ModuleDefId::BuiltinType(_) => {}
ModuleDefId::EnumVariantId(_) => {}
}
}
};
// Nested trees are very common, so a cache here will hit a lot.
let import_child_source_cache = &mut FxHashMap::default();
let mut push_import = |this: &mut Self, i: ImportId, name: &Name, def: ModuleDefId| {
let source = import_child_source_cache
.entry(i.import)
.or_insert_with(|| i.import.child_source(this.db.upcast()));
let Some(use_tree_src) = source.value.get(i.idx) else { return };
let Some(name_ptr) = use_tree_src
.rename()
.and_then(|rename| rename.name())
.map(Either::Left)
.or_else(|| use_tree_src.path()?.segment()?.name_ref().map(Either::Right))
.map(|it| AstPtr::new(&it))
else {
return;
};
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(use_tree_src.syntax()),
name_ptr,
};
this.symbols.insert(FileSymbol {
name: name.symbol().clone(),
def: def.into(),
container_name: this.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
is_assoc: false,
});
};
let push_extern_crate =
|this: &mut Self, i: ExternCrateId, name: &Name, def: ModuleDefId| {
let loc = i.lookup(this.db.upcast());
let source = loc.source(this.db.upcast());
let Some(name_ptr) = source
.value
.rename()
.and_then(|rename| rename.name())
.map(Either::Left)
.or_else(|| source.value.name_ref().map(Either::Right))
.map(|it| AstPtr::new(&it))
else {
return;
};
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(source.value.syntax()),
name_ptr,
};
this.symbols.insert(FileSymbol {
name: name.symbol().clone(),
def: def.into(),
container_name: this.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
is_assoc: false,
});
};
let is_explicit_import = |vis| {
match vis {
Visibility::Module(_, VisibilityExplicitness::Explicit) => true,
Visibility::Module(_, VisibilityExplicitness::Implicit) => {
// consider imports in the crate root explicit, as these are visibly
// crate-wide anyways
module_id.is_crate_root()
}
Visibility::Public => true,
}
};
let def_map = module_id.def_map(self.db.upcast());
let scope = &def_map[module_id.local_id].scope;
for impl_id in scope.impls() {
self.collect_from_impl(impl_id);
}
// Record renamed imports.
// FIXME: In case it imports multiple items under different namespaces we just pick one arbitrarily
// for now.
for id in scope.imports() {
let source = id.import.child_source(self.db.upcast());
let Some(use_tree_src) = source.value.get(id.idx) else { continue };
let Some(rename) = use_tree_src.rename() else { continue };
let Some(name) = rename.name() else { continue };
let res = scope.fully_resolve_import(self.db.upcast(), id);
res.iter_items().for_each(|(item, _)| {
let def = match item {
ItemInNs::Types(def) | ItemInNs::Values(def) => def,
ItemInNs::Macros(def) => ModuleDefId::from(def),
for (name, Item { def, vis, import }) in scope.types() {
if let Some(i) = import {
if is_explicit_import(vis) {
match i {
ImportOrExternCrate::Import(i) => push_import(self, i, name, def),
ImportOrExternCrate::ExternCrate(i) => {
push_extern_crate(self, i, name, def)
}
}
}
.into();
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(use_tree_src.syntax()),
name_ptr: AstPtr::new(&name),
};
continue;
}
// self is a declaration
push_decl(self, def, name)
}
self.symbols.push(FileSymbol {
name: name.text().into(),
def,
container_name: self.current_container_name.clone(),
loc: dec_loc,
is_alias: false,
is_assoc: false,
});
});
for (name, Item { def, vis, import }) in scope.macros() {
if let Some(i) = import {
if is_explicit_import(vis) {
push_import(self, i, name, def.into());
}
continue;
}
// self is a declaration
push_decl(self, def.into(), name)
}
for (name, Item { def, vis, import }) in scope.values() {
if let Some(i) = import {
if is_explicit_import(vis) {
push_import(self, i, name, def);
}
continue;
}
// self is a declaration
push_decl(self, def, name)
}
for const_id in scope.unnamed_consts() {
self.collect_from_body(const_id);
}
for (_, id) in scope.legacy_macros() {
for (name, id) in scope.legacy_macros() {
for &id in id {
if id.module(self.db.upcast()) == module_id {
match id {
MacroId::Macro2Id(id) => self.push_decl(id, false),
MacroId::MacroRulesId(id) => self.push_decl(id, false),
MacroId::ProcMacroId(id) => self.push_decl(id, false),
MacroId::Macro2Id(id) => self.push_decl(id, name, false),
MacroId::MacroRulesId(id) => self.push_decl(id, name, false),
MacroId::ProcMacroId(id) => self.push_decl(id, name, false),
}
}
}
@ -223,8 +308,8 @@ impl<'a> SymbolCollector<'a> {
.to_smolstr(),
);
self.with_container_name(impl_name, |s| {
for &assoc_item_id in impl_data.items.iter() {
s.push_assoc_item(assoc_item_id)
for &(ref name, assoc_item_id) in &impl_data.items {
s.push_assoc_item(assoc_item_id, name)
}
})
}
@ -232,8 +317,8 @@ impl<'a> SymbolCollector<'a> {
fn collect_from_trait(&mut self, trait_id: TraitId) {
let trait_data = self.db.trait_data(trait_id);
self.with_container_name(Some(trait_data.name.as_str().into()), |s| {
for &(_, assoc_item_id) in &trait_data.items {
s.push_assoc_item(assoc_item_id);
for &(ref name, assoc_item_id) in &trait_data.items {
s.push_assoc_item(assoc_item_id, name);
}
});
}
@ -266,15 +351,15 @@ impl<'a> SymbolCollector<'a> {
}
}
fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) {
fn push_assoc_item(&mut self, assoc_item_id: AssocItemId, name: &Name) {
match assoc_item_id {
AssocItemId::FunctionId(id) => self.push_decl(id, true),
AssocItemId::ConstId(id) => self.push_decl(id, true),
AssocItemId::TypeAliasId(id) => self.push_decl(id, true),
AssocItemId::FunctionId(id) => self.push_decl(id, name, true),
AssocItemId::ConstId(id) => self.push_decl(id, name, true),
AssocItemId::TypeAliasId(id) => self.push_decl(id, name, true),
}
}
fn push_decl<'db, L>(&mut self, id: L, is_assoc: bool)
fn push_decl<'db, L>(&mut self, id: L, name: &Name, is_assoc: bool)
where
L: Lookup<Database<'db> = dyn DefDatabase + 'db> + Into<ModuleDefId>,
<L as Lookup>::Data: HasSource,
@ -287,13 +372,13 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
ptr: SyntaxNodePtr::new(source.value.syntax()),
name_ptr: AstPtr::new(&name_node),
name_ptr: AstPtr::new(&name_node).wrap_left(),
};
if let Some(attrs) = def.attrs(self.db) {
for alias in attrs.doc_aliases() {
self.symbols.push(FileSymbol {
name: alias.as_str().into(),
self.symbols.insert(FileSymbol {
name: alias.clone(),
def,
loc: dec_loc.clone(),
container_name: self.current_container_name.clone(),
@ -303,8 +388,8 @@ impl<'a> SymbolCollector<'a> {
}
}
self.symbols.push(FileSymbol {
name: name_node.text().into(),
self.symbols.insert(FileSymbol {
name: name.symbol().clone(),
def,
container_name: self.current_container_name.clone(),
loc: dec_loc,
@ -313,7 +398,7 @@ impl<'a> SymbolCollector<'a> {
});
}
fn push_module(&mut self, module_id: ModuleId) {
fn push_module(&mut self, module_id: ModuleId, name: &Name) {
let def_map = module_id.def_map(self.db.upcast());
let module_data = &def_map[module_id.local_id];
let Some(declaration) = module_data.origin.declaration() else { return };
@ -322,15 +407,15 @@ impl<'a> SymbolCollector<'a> {
let dec_loc = DeclarationLocation {
hir_file_id: declaration.file_id,
ptr: SyntaxNodePtr::new(module.syntax()),
name_ptr: AstPtr::new(&name_node),
name_ptr: AstPtr::new(&name_node).wrap_left(),
};
let def = ModuleDef::Module(module_id.into());
if let Some(attrs) = def.attrs(self.db) {
for alias in attrs.doc_aliases() {
self.symbols.push(FileSymbol {
name: alias.as_str().into(),
self.symbols.insert(FileSymbol {
name: alias.clone(),
def,
loc: dec_loc.clone(),
container_name: self.current_container_name.clone(),
@ -340,8 +425,8 @@ impl<'a> SymbolCollector<'a> {
}
}
self.symbols.push(FileSymbol {
name: name_node.text().into(),
self.symbols.insert(FileSymbol {
name: name.symbol().clone(),
def: ModuleDef::Module(module_id.into()),
container_name: self.current_container_name.clone(),
loc: dec_loc,

View File

@ -109,6 +109,10 @@ impl<'a> AssistContext<'a> {
self.trimmed_range
}
pub(crate) fn source_file(&self) -> &SourceFile {
&self.source_file
}
pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
self.source_file.syntax().token_at_offset(self.offset())
}

View File

@ -212,8 +212,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
!hidden
})
.map(|(pat, _)| {
make::match_arm(iter::once(pat), None, make::ext::expr_todo())
.clone_for_update()
make::match_arm(pat, None, make::ext::expr_todo()).clone_for_update()
});
let catch_all_arm = new_match_arm_list
@ -243,12 +242,9 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
if needs_catch_all_arm && !has_catch_all_arm {
cov_mark::hit!(added_wildcard_pattern);
let arm = make::match_arm(
iter::once(make::wildcard_pat().into()),
None,
make::ext::expr_todo(),
)
.clone_for_update();
let arm =
make::match_arm(make::wildcard_pat().into(), None, make::ext::expr_todo())
.clone_for_update();
todo_placeholders.push(arm.expr().unwrap());
added_arms.push(arm);
}

View File

@ -189,7 +189,7 @@ pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
/// This will create a turbofish generic arg list corresponding to the number of arguments
fn get_fish_head(make: &SyntaxFactory, number_of_arguments: usize) -> ast::GenericArgList {
let args = (0..number_of_arguments).map(|_| make::type_arg(make::ty_placeholder()).into());
make.turbofish_generic_arg_list(args)
make.generic_arg_list(args, true)
}
#[cfg(test)]

View File

@ -252,7 +252,7 @@ fn tail_cb_impl(edit: &mut SourceChangeBuilder, e: &ast::Expr) {
/// Add bang and parentheses to the expression.
fn add_bang_paren(expr: ast::Expr) -> ast::Expr {
make::expr_prefix(T![!], make::expr_paren(expr))
make::expr_prefix(T![!], make::expr_paren(expr)).into()
}
#[cfg(test)]

View File

@ -195,6 +195,7 @@ fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr {
make::tail_only_block_expr(true_expr),
Some(ast::ElseBranch::Block(make::tail_only_block_expr(false_expr))),
)
.into()
}
}

View File

@ -507,7 +507,7 @@ fn wrap_capture_in_deref_if_needed(
if does_autoderef {
return capture_name;
}
make::expr_prefix(T![*], capture_name)
make::expr_prefix(T![*], capture_name).into()
}
fn capture_as_arg(ctx: &AssistContext<'_>, capture: &ClosureCapture) -> ast::Expr {

View File

@ -97,7 +97,7 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_>
);
for r in return_exprs {
let t = r.expr().unwrap_or_else(make::expr_unit);
let t = r.expr().unwrap_or_else(make::ext::expr_unit);
ted::replace(t.syntax(), wrap_ok(t.clone()).syntax().clone_for_update());
}

View File

@ -60,7 +60,7 @@ pub(crate) fn convert_while_to_loop(acc: &mut Assists, ctx: &AssistContext<'_>)
.indent(while_indent_level);
let block_expr = if is_pattern_cond(while_cond.clone()) {
let if_expr = make::expr_if(while_cond, while_body, Some(break_block.into()));
let stmts = iter::once(make::expr_stmt(if_expr).into());
let stmts = iter::once(make::expr_stmt(if_expr.into()).into());
make::block_expr(stmts, None)
} else {
let if_cond = invert_boolean_expression(while_cond);

View File

@ -1128,7 +1128,10 @@ fn main {
destructure_tuple_binding_impl(acc, ctx, false)
}
pub(crate) fn check_in_place_assist(ra_fixture_before: &str, ra_fixture_after: &str) {
pub(crate) fn check_in_place_assist(
#[rust_analyzer::rust_fixture] ra_fixture_before: &str,
#[rust_analyzer::rust_fixture] ra_fixture_after: &str,
) {
check_assist_by_label(
in_place_assist,
ra_fixture_before,
@ -1138,7 +1141,10 @@ fn main {
);
}
pub(crate) fn check_sub_pattern_assist(ra_fixture_before: &str, ra_fixture_after: &str) {
pub(crate) fn check_sub_pattern_assist(
#[rust_analyzer::rust_fixture] ra_fixture_before: &str,
#[rust_analyzer::rust_fixture] ra_fixture_after: &str,
) {
check_assist_by_label(
assist,
ra_fixture_before,

View File

@ -1533,7 +1533,7 @@ impl FlowHandler {
.into(),
call_expr,
);
make::expr_if(condition.into(), block, None)
make::expr_if(condition.into(), block, None).into()
}
FlowHandler::IfOption { action } => {
let path = make::ext::ident_path("Some");
@ -1544,7 +1544,7 @@ impl FlowHandler {
let action_expr = action.make_result_handler(Some(value));
let action_stmt = make::expr_stmt(action_expr);
let then = make::block_expr(iter::once(action_stmt.into()), None);
make::expr_if(cond.into(), then, None)
make::expr_if(cond.into(), then, None).into()
}
FlowHandler::MatchOption { none } => {
let some_name = "value";
@ -1554,15 +1554,15 @@ impl FlowHandler {
let value_pat = make::ext::simple_ident_pat(make::name(some_name));
let pat = make::tuple_struct_pat(path, iter::once(value_pat.into()));
let value = make::expr_path(make::ext::ident_path(some_name));
make::match_arm(iter::once(pat.into()), None, value)
make::match_arm(pat.into(), None, value)
};
let none_arm = {
let path = make::ext::ident_path("None");
let pat = make::path_pat(path);
make::match_arm(iter::once(pat), None, none.make_result_handler(None))
make::match_arm(pat, None, none.make_result_handler(None))
};
let arms = make::match_arm_list(vec![some_arm, none_arm]);
make::expr_match(call_expr, arms)
make::expr_match(call_expr, arms).into()
}
FlowHandler::MatchResult { err } => {
let ok_name = "value";
@ -1573,21 +1573,17 @@ impl FlowHandler {
let value_pat = make::ext::simple_ident_pat(make::name(ok_name));
let pat = make::tuple_struct_pat(path, iter::once(value_pat.into()));
let value = make::expr_path(make::ext::ident_path(ok_name));
make::match_arm(iter::once(pat.into()), None, value)
make::match_arm(pat.into(), None, value)
};
let err_arm = {
let path = make::ext::ident_path("Err");
let value_pat = make::ext::simple_ident_pat(make::name(err_name));
let pat = make::tuple_struct_pat(path, iter::once(value_pat.into()));
let value = make::expr_path(make::ext::ident_path(err_name));
make::match_arm(
iter::once(pat.into()),
None,
err.make_result_handler(Some(value)),
)
make::match_arm(pat.into(), None, err.make_result_handler(Some(value)))
};
let arms = make::match_arm_list(vec![ok_arm, err_arm]);
make::expr_match(call_expr, arms)
make::expr_match(call_expr, arms).into()
}
}
}
@ -1879,7 +1875,7 @@ fn make_body(ctx: &AssistContext<'_>, old_indent: IndentLevel, fun: &Function) -
.iter()
.map(|var| path_expr_from_local(ctx, var.local, fun.mods.edition));
let expr = make::expr_tuple(exprs);
tail_expr = Some(expr);
tail_expr = Some(expr.into());
}
},
};
@ -1910,7 +1906,7 @@ fn make_body(ctx: &AssistContext<'_>, old_indent: IndentLevel, fun: &Function) -
match &handler {
FlowHandler::None => block,
FlowHandler::Try { kind } => {
let block = with_default_tail_expr(block, make::expr_unit());
let block = with_default_tail_expr(block, make::ext::expr_unit());
map_tail_expr(block, |tail_expr| {
let constructor = match kind {
TryKind::Option => "Some",
@ -1924,7 +1920,7 @@ fn make_body(ctx: &AssistContext<'_>, old_indent: IndentLevel, fun: &Function) -
FlowHandler::If { .. } => {
let controlflow_continue = make::expr_call(
make::expr_path(make::path_from_text("ControlFlow::Continue")),
make::arg_list(iter::once(make::expr_unit())),
make::arg_list([make::ext::expr_unit()]),
);
with_tail_expr(block, controlflow_continue)
}
@ -2127,17 +2123,17 @@ fn make_rewritten_flow(handler: &FlowHandler, arg_expr: Option<ast::Expr>) -> Op
FlowHandler::None | FlowHandler::Try { .. } => return None,
FlowHandler::If { .. } => make::expr_call(
make::expr_path(make::path_from_text("ControlFlow::Break")),
make::arg_list(iter::once(make::expr_unit())),
make::arg_list([make::ext::expr_unit()]),
),
FlowHandler::IfOption { .. } => {
let expr = arg_expr.unwrap_or_else(|| make::expr_tuple(Vec::new()));
let args = make::arg_list(iter::once(expr));
let expr = arg_expr.unwrap_or_else(make::ext::expr_unit);
let args = make::arg_list([expr]);
make::expr_call(make::expr_path(make::ext::ident_path("Some")), args)
}
FlowHandler::MatchOption { .. } => make::expr_path(make::ext::ident_path("None")),
FlowHandler::MatchResult { .. } => {
let expr = arg_expr.unwrap_or_else(|| make::expr_tuple(Vec::new()));
let args = make::arg_list(iter::once(expr));
let expr = arg_expr.unwrap_or_else(make::ext::expr_unit);
let args = make::arg_list([expr]);
make::expr_call(make::expr_path(make::ext::ident_path("Err")), args)
}
};

View File

@ -4,6 +4,7 @@ use ide_db::{
syntax_helpers::{suggest_name, LexedStr},
};
use syntax::{
algo::ancestors_at_offset,
ast::{
self, edit::IndentLevel, edit_in_place::Indent, make, syntax_factory::SyntaxFactory,
AstNode,
@ -68,7 +69,10 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let node = if ctx.has_empty_selection() {
if let Some(t) = ctx.token_at_offset().find(|it| it.kind() == T![;]) {
t.parent().and_then(ast::ExprStmt::cast)?.syntax().clone()
} else if let Some(expr) = ctx.find_node_at_offset::<ast::Expr>() {
} else if let Some(expr) = ancestors_at_offset(ctx.source_file().syntax(), ctx.offset())
.next()
.and_then(ast::Expr::cast)
{
expr.syntax().ancestors().find_map(valid_target_expr)?.syntax().clone()
} else {
return None;
@ -469,11 +473,11 @@ mod tests {
extract_variable,
r#"
fn main() -> i32 {
if true {
if$0 true {
1
} else {
2
}$0
}
}
"#,
r#"
@ -581,11 +585,11 @@ fn main() {
extract_variable,
r#"
fn main() -> i32 {
if true {
if$0 true {
1
} else {
2
}$0
}
}
"#,
r#"
@ -676,11 +680,11 @@ fn main() {
extract_variable,
r#"
fn main() -> i32 {
if true {
if$0 true {
1
} else {
2
}$0
}
}
"#,
r#"

View File

@ -933,7 +933,7 @@ mod tests_setter {
use super::*;
fn check_not_applicable(ra_fixture: &str) {
fn check_not_applicable(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
check_assist_not_applicable(generate_setter, ra_fixture)
}

View File

@ -38,21 +38,21 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?;
let macro_call = ctx.sema.to_def(&unexpanded)?;
let expanded = ctx.sema.parse_or_expand(macro_call.as_file());
let span_map = ctx.sema.db.expansion_span_map(macro_call.as_macro_file());
let expanded = prettify_macro_expansion(
ctx.db(),
expanded,
&span_map,
ctx.sema.file_to_module_def(ctx.file_id())?.krate().into(),
);
let target_crate_id = ctx.sema.file_to_module_def(ctx.file_id())?.krate().into();
let text_range = unexpanded.syntax().text_range();
acc.add(
AssistId("inline_macro", AssistKind::RefactorInline),
"Inline macro".to_owned(),
text_range,
|builder| builder.replace(text_range, expanded.to_string()),
|builder| {
let expanded = ctx.sema.parse_or_expand(macro_call.as_file());
let span_map = ctx.sema.db.expansion_span_map(macro_call.as_macro_file());
// Don't call `prettify_macro_expansion()` outside the actual assist action; it does some heavy rowan tree manipulation,
// which can be very costly for big macros when it is done *even without the assist being invoked*.
let expanded = prettify_macro_expansion(ctx.db(), expanded, &span_map, target_crate_id);
builder.replace(text_range, expanded.to_string())
},
)
}

View File

@ -61,7 +61,7 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext<'_>)
};
edit.delete(guard.syntax().text_range());
edit.replace_ast(arm_expr, if_expr);
edit.replace_ast(arm_expr, if_expr.into());
},
)
}

View File

@ -102,7 +102,7 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt
};
(range, None)
},
_ => (macro_call.syntax().text_range(), Some(make::expr_unit())),
_ => (macro_call.syntax().text_range(), Some(make::ext::expr_unit())),
}
}
}
@ -152,7 +152,7 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt
exprs => {
let exprs = exprs.iter().cloned().map(replace_nested_dbgs);
let expr = make::expr_tuple(exprs);
(macro_call.syntax().text_range(), Some(expr))
(macro_call.syntax().text_range(), Some(expr.into()))
}
})
}
@ -209,7 +209,10 @@ mod tests {
use super::*;
fn check(ra_fixture_before: &str, ra_fixture_after: &str) {
fn check(
#[rust_analyzer::rust_fixture] ra_fixture_before: &str,
#[rust_analyzer::rust_fixture] ra_fixture_after: &str,
) {
check_assist(
remove_dbg,
&format!("fn main() {{\n{ra_fixture_before}\n}}"),

View File

@ -1,4 +1,4 @@
use std::iter::{self, successors};
use std::iter::successors;
use either::Either;
use ide_db::{
@ -8,11 +8,7 @@ use ide_db::{
RootDatabase,
};
use syntax::{
ast::{
self,
edit::{AstNodeEdit, IndentLevel},
make, HasName,
},
ast::{self, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory, HasName},
AstNode, TextRange, T,
};
@ -108,53 +104,58 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
AssistId("replace_if_let_with_match", AssistKind::RefactorRewrite),
format!("Replace if{let_} with match"),
available_range,
move |edit| {
move |builder| {
let make = SyntaxFactory::new();
let match_expr = {
let else_arm = make_else_arm(ctx, else_block, &cond_bodies);
let else_arm = make_else_arm(ctx, &make, else_block, &cond_bodies);
let make_match_arm = |(pat, body): (_, ast::BlockExpr)| {
let body = body.reset_indent().indent(IndentLevel(1));
let body = make.block_expr(body.statements(), body.tail_expr());
body.indent(IndentLevel::from(1));
let body = unwrap_trivial_block(body);
match pat {
Either::Left(pat) => {
make::match_arm(iter::once(pat), None, unwrap_trivial_block(body))
Either::Left(pat) => make.match_arm(pat, None, body),
Either::Right(_) if !pat_seen => {
make.match_arm(make.literal_pat("true").into(), None, body)
}
Either::Right(_) if !pat_seen => make::match_arm(
iter::once(make::literal_pat("true").into()),
None,
unwrap_trivial_block(body),
),
Either::Right(expr) => make::match_arm(
iter::once(make::wildcard_pat().into()),
Some(expr),
unwrap_trivial_block(body),
Either::Right(expr) => make.match_arm(
make.wildcard_pat().into(),
Some(make.match_guard(expr)),
body,
),
}
};
let arms = cond_bodies.into_iter().map(make_match_arm).chain(iter::once(else_arm));
let match_expr = make::expr_match(scrutinee_to_be_expr, make::match_arm_list(arms));
match_expr.indent(IndentLevel::from_node(if_expr.syntax()))
let arms = cond_bodies.into_iter().map(make_match_arm).chain([else_arm]);
let match_expr = make.expr_match(scrutinee_to_be_expr, make.match_arm_list(arms));
match_expr.indent(IndentLevel::from_node(if_expr.syntax()));
match_expr.into()
};
let has_preceding_if_expr =
if_expr.syntax().parent().is_some_and(|it| ast::IfExpr::can_cast(it.kind()));
let expr = if has_preceding_if_expr {
// make sure we replace the `else if let ...` with a block so we don't end up with `else expr`
make::block_expr(None, Some(match_expr)).into()
make.block_expr([], Some(match_expr)).into()
} else {
match_expr
};
edit.replace_ast::<ast::Expr>(if_expr.into(), expr);
let mut editor = builder.make_editor(if_expr.syntax());
editor.replace(if_expr.syntax(), expr.syntax());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
},
)
}
fn make_else_arm(
ctx: &AssistContext<'_>,
make: &SyntaxFactory,
else_block: Option<ast::BlockExpr>,
conditionals: &[(Either<ast::Pat, ast::Expr>, ast::BlockExpr)],
) -> ast::MatchArm {
let (pattern, expr) = if let Some(else_block) = else_block {
let pattern = match conditionals {
[(Either::Right(_), _)] => make::literal_pat("false").into(),
[(Either::Right(_), _)] => make.literal_pat("false").into(),
[(Either::Left(pat), _)] => match ctx
.sema
.type_of_pat(pat)
@ -164,24 +165,24 @@ fn make_else_arm(
if does_pat_match_variant(pat, &it.sad_pattern()) {
it.happy_pattern_wildcard()
} else if does_pat_variant_nested_or_literal(ctx, pat) {
make::wildcard_pat().into()
make.wildcard_pat().into()
} else {
it.sad_pattern()
}
}
None => make::wildcard_pat().into(),
None => make.wildcard_pat().into(),
},
_ => make::wildcard_pat().into(),
_ => make.wildcard_pat().into(),
};
(pattern, unwrap_trivial_block(else_block))
} else {
let pattern = match conditionals {
[(Either::Right(_), _)] => make::literal_pat("false").into(),
_ => make::wildcard_pat().into(),
[(Either::Right(_), _)] => make.literal_pat("false").into(),
_ => make.wildcard_pat().into(),
};
(pattern, make::expr_unit())
(pattern, make.expr_unit())
};
make::match_arm(iter::once(pattern), None, expr)
make.match_arm(pattern, None, expr)
}
// Assist: replace_match_with_if_let
@ -247,21 +248,21 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'
}
_ => " let",
};
let target = match_expr.syntax().text_range();
acc.add(
AssistId("replace_match_with_if_let", AssistKind::RefactorRewrite),
format!("Replace match with if{let_}"),
target,
move |edit| {
fn make_block_expr(expr: ast::Expr) -> ast::BlockExpr {
match_expr.syntax().text_range(),
move |builder| {
let make = SyntaxFactory::new();
let make_block_expr = |expr: ast::Expr| {
// Blocks with modifiers (unsafe, async, etc.) are parsed as BlockExpr, but are
// formatted without enclosing braces. If we encounter such block exprs,
// wrap them in another BlockExpr.
match expr {
ast::Expr::BlockExpr(block) if block.modifier().is_none() => block,
expr => make::block_expr(iter::empty(), Some(expr)),
expr => make.block_expr([], Some(expr)),
}
}
};
let condition = match if_let_pat {
ast::Pat::LiteralPat(p)
@ -272,20 +273,25 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'
ast::Pat::LiteralPat(p)
if p.literal().is_some_and(|it| it.token().kind() == T![false]) =>
{
make::expr_prefix(T![!], scrutinee)
make.expr_prefix(T![!], scrutinee).into()
}
_ => make::expr_let(if_let_pat, scrutinee).into(),
_ => make.expr_let(if_let_pat, scrutinee).into(),
};
let then_block = make_block_expr(then_expr.reset_indent());
let then_expr = then_expr.clone_for_update();
then_expr.reindent_to(IndentLevel::single());
let then_block = make_block_expr(then_expr);
let else_expr = if is_empty_expr(&else_expr) { None } else { Some(else_expr) };
let if_let_expr = make::expr_if(
let if_let_expr = make.expr_if(
condition,
then_block,
else_expr.map(make_block_expr).map(ast::ElseBranch::Block),
)
.indent(IndentLevel::from_node(match_expr.syntax()));
);
if_let_expr.indent(IndentLevel::from_node(match_expr.syntax()));
edit.replace_ast::<ast::Expr>(match_expr.into(), if_let_expr);
let mut editor = builder.make_editor(match_expr.syntax());
editor.replace(match_expr.syntax(), if_let_expr.syntax());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
},
)
}

View File

@ -1,12 +1,6 @@
use std::iter::once;
use ide_db::ty_filter::TryEnum;
use syntax::{
ast::{
self,
edit::{AstNodeEdit, IndentLevel},
make,
},
ast::{self, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory},
AstNode, T,
};
@ -47,7 +41,9 @@ pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_>
AssistId("replace_let_with_if_let", AssistKind::RefactorRewrite),
"Replace let with if let",
target,
|edit| {
|builder| {
let mut editor = builder.make_editor(let_stmt.syntax());
let make = SyntaxFactory::new();
let ty = ctx.sema.type_of_expr(&init);
let happy_variant = ty
.and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted()))
@ -55,17 +51,18 @@ pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_>
let pat = match happy_variant {
None => original_pat,
Some(var_name) => {
make::tuple_struct_pat(make::ext::ident_path(var_name), once(original_pat))
.into()
make.tuple_struct_pat(make.ident_path(var_name), [original_pat]).into()
}
};
let block =
make::ext::empty_block_expr().indent(IndentLevel::from_node(let_stmt.syntax()));
let if_ = make::expr_if(make::expr_let(pat, init).into(), block, None);
let stmt = make::expr_stmt(if_);
let block = make.block_expr([], None);
block.indent(IndentLevel::from_node(let_stmt.syntax()));
let if_expr = make.expr_if(make.expr_let(pat, init).into(), block, None);
let if_stmt = make.expr_stmt(if_expr.into());
edit.replace_ast(ast::Stmt::from(let_stmt), ast::Stmt::from(stmt));
editor.replace(let_stmt.syntax(), if_stmt.syntax());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
},
)
}

View File

@ -71,19 +71,17 @@ pub(crate) fn replace_try_expr_with_match(
};
let happy_arm = make::match_arm(
iter::once(
try_enum.happy_pattern(make::ident_pat(false, false, make::name("it")).into()),
),
try_enum.happy_pattern(make::ident_pat(false, false, make::name("it")).into()),
None,
make::expr_path(make::ext::ident_path("it")),
);
let sad_arm = make::match_arm(iter::once(sad_pat), None, sad_expr);
let sad_arm = make::match_arm(sad_pat, None, sad_expr);
let match_arm_list = make::match_arm_list([happy_arm, sad_arm]);
let expr_match = make::expr_match(expr, match_arm_list)
.indent(IndentLevel::from_node(qm_kw_parent.syntax()));
edit.replace_ast::<ast::Expr>(qm_kw_parent.into(), expr_match);
edit.replace_ast::<ast::Expr>(qm_kw_parent.into(), expr_match.into());
},
)
}

View File

@ -54,13 +54,9 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
let pats_after = pipe_token
.siblings_with_tokens(Direction::Next)
.filter_map(|it| ast::Pat::cast(it.into_node()?));
// FIXME: We should add a leading pipe if the original arm has one.
let new_match_arm = make::match_arm(
pats_after,
match_arm.guard().and_then(|guard| guard.condition()),
match_arm_body,
)
.clone_for_update();
let new_pat = make::or_pat(pats_after, or_pat.leading_pipe().is_some());
let new_match_arm =
make::match_arm(new_pat, match_arm.guard(), match_arm_body).clone_for_update();
let mut pipe_index = pipe_token.index();
if pipe_token

View File

@ -61,7 +61,7 @@ pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
}
}
None => {
let empty_tuple = make::expr_tuple([]);
let empty_tuple = make::ext::expr_unit();
make::let_stmt(pattern, ty, Some(empty_tuple)).to_string()
}
};

View File

@ -1,11 +1,11 @@
use either::Either;
use ide_db::{
famous_defs::FamousDefs,
syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
};
use itertools::Itertools;
use syntax::{
ast::{self, Expr, HasGenericArgs},
match_ast, AstNode, NodeOrToken, SyntaxKind, TextRange,
ast::{self, syntax_factory::SyntaxFactory, HasArgList, HasGenericArgs},
match_ast, AstNode, NodeOrToken, SyntaxKind,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@ -39,11 +39,11 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
pub(crate) fn unwrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let ret_type = ctx.find_node_at_offset::<ast::RetType>()?;
let parent = ret_type.syntax().parent()?;
let body = match_ast! {
let body_expr = match_ast! {
match parent {
ast::Fn(func) => func.body()?,
ast::Fn(func) => func.body()?.into(),
ast::ClosureExpr(closure) => match closure.body()? {
Expr::BlockExpr(block) => block,
ast::Expr::BlockExpr(block) => block.into(),
// closures require a block when a return type is specified
_ => return None,
},
@ -65,72 +65,110 @@ pub(crate) fn unwrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let happy_type = extract_wrapped_type(type_ref)?;
acc.add(kind.assist_id(), kind.label(), type_ref.syntax().text_range(), |builder| {
let body = ast::Expr::BlockExpr(body);
let mut editor = builder.make_editor(&parent);
let make = SyntaxFactory::new();
let mut exprs_to_unwrap = Vec::new();
let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_unwrap, e);
walk_expr(&body, &mut |expr| {
if let Expr::ReturnExpr(ret_expr) = expr {
walk_expr(&body_expr, &mut |expr| {
if let ast::Expr::ReturnExpr(ret_expr) = expr {
if let Some(ret_expr_arg) = &ret_expr.expr() {
for_each_tail_expr(ret_expr_arg, tail_cb);
}
}
});
for_each_tail_expr(&body, tail_cb);
for_each_tail_expr(&body_expr, tail_cb);
let is_unit_type = is_unit_type(&happy_type);
if is_unit_type {
let mut text_range = ret_type.syntax().text_range();
if let Some(NodeOrToken::Token(token)) = ret_type.syntax().next_sibling_or_token() {
if token.kind() == SyntaxKind::WHITESPACE {
text_range = TextRange::new(text_range.start(), token.text_range().end());
editor.delete(token);
}
}
builder.delete(text_range);
editor.delete(ret_type.syntax());
} else {
builder.replace(type_ref.syntax().text_range(), happy_type.syntax().text());
editor.replace(type_ref.syntax(), happy_type.syntax());
}
for ret_expr_arg in exprs_to_unwrap {
let ret_expr_str = ret_expr_arg.to_string();
let mut final_placeholder = None;
for tail_expr in exprs_to_unwrap {
match &tail_expr {
ast::Expr::CallExpr(call_expr) => {
let ast::Expr::PathExpr(path_expr) = call_expr.expr().unwrap() else {
continue;
};
let needs_replacing = match kind {
UnwrapperKind::Option => ret_expr_str.starts_with("Some("),
UnwrapperKind::Result => {
ret_expr_str.starts_with("Ok(") || ret_expr_str.starts_with("Err(")
}
};
let path_str = path_expr.path().unwrap().to_string();
let needs_replacing = match kind {
UnwrapperKind::Option => path_str == "Some",
UnwrapperKind::Result => path_str == "Ok" || path_str == "Err",
};
if needs_replacing {
let arg_list = ret_expr_arg.syntax().children().find_map(ast::ArgList::cast);
if let Some(arg_list) = arg_list {
if !needs_replacing {
continue;
}
let arg_list = call_expr.arg_list().unwrap();
if is_unit_type {
match ret_expr_arg.syntax().prev_sibling_or_token() {
// Useful to delete the entire line without leaving trailing whitespaces
Some(whitespace) => {
let new_range = TextRange::new(
whitespace.text_range().start(),
ret_expr_arg.syntax().text_range().end(),
);
builder.delete(new_range);
let tail_parent = tail_expr
.syntax()
.parent()
.and_then(Either::<ast::ReturnExpr, ast::StmtList>::cast)
.unwrap();
match tail_parent {
Either::Left(ret_expr) => {
editor.replace(ret_expr.syntax(), make.expr_return(None).syntax())
}
None => {
builder.delete(ret_expr_arg.syntax().text_range());
Either::Right(stmt_list) => {
let new_block = if stmt_list.statements().next().is_none() {
make.expr_empty_block()
} else {
make.block_expr(stmt_list.statements(), None)
};
editor.replace(
stmt_list.syntax(),
new_block.stmt_list().unwrap().syntax(),
);
}
}
} else {
builder.replace(
ret_expr_arg.syntax().text_range(),
arg_list.args().join(", "),
);
} else if let Some(first_arg) = arg_list.args().next() {
editor.replace(tail_expr.syntax(), first_arg.syntax());
}
}
} else if matches!(kind, UnwrapperKind::Option if ret_expr_str == "None") {
builder.replace(ret_expr_arg.syntax().text_range(), "()");
ast::Expr::PathExpr(path_expr) => {
let UnwrapperKind::Option = kind else {
continue;
};
if path_expr.path().unwrap().to_string() != "None" {
continue;
}
let new_tail_expr = make.expr_unit();
editor.replace(path_expr.syntax(), new_tail_expr.syntax());
if let Some(cap) = ctx.config.snippet_cap {
editor.add_annotation(
new_tail_expr.syntax(),
builder.make_placeholder_snippet(cap),
);
final_placeholder = Some(new_tail_expr);
}
}
_ => (),
}
}
if let Some(cap) = ctx.config.snippet_cap {
if let Some(final_placeholder) = final_placeholder {
editor.add_annotation(final_placeholder.syntax(), builder.make_tabstop_after(cap));
}
}
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
})
}
@ -168,12 +206,12 @@ impl UnwrapperKind {
fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
match e {
Expr::BreakExpr(break_expr) => {
ast::Expr::BreakExpr(break_expr) => {
if let Some(break_expr_arg) = break_expr.expr() {
for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(acc, e))
}
}
Expr::ReturnExpr(_) => {
ast::Expr::ReturnExpr(_) => {
// all return expressions have already been handled by the walk loop
}
e => acc.push(e.clone()),
@ -238,8 +276,7 @@ fn foo() -> Option<()$0> {
}
"#,
r#"
fn foo() {
}
fn foo() {}
"#,
"Unwrap Option return type",
);
@ -254,8 +291,7 @@ fn foo() -> Option<()$0>{
}
"#,
r#"
fn foo() {
}
fn foo() {}
"#,
"Unwrap Option return type",
);
@ -280,7 +316,42 @@ fn foo() -> i32 {
if true {
42
} else {
()
${1:()}$0
}
}
"#,
"Unwrap Option return type",
);
}
#[test]
fn unwrap_option_return_type_multi_none() {
check_assist_by_label(
unwrap_return_type,
r#"
//- minicore: option
fn foo() -> Option<i3$02> {
if false {
return None;
}
if true {
Some(42)
} else {
None
}
}
"#,
r#"
fn foo() -> i32 {
if false {
return ${1:()};
}
if true {
42
} else {
${2:()}$0
}
}
"#,
@ -1262,8 +1333,7 @@ fn foo() -> Result<(), Box<dyn Error$0>> {
}
"#,
r#"
fn foo() {
}
fn foo() {}
"#,
"Unwrap Result return type",
);
@ -1278,8 +1348,7 @@ fn foo() -> Result<(), Box<dyn Error$0>>{
}
"#,
r#"
fn foo() {
}
fn foo() {}
"#,
"Unwrap Result return type",
);

View File

@ -6,10 +6,9 @@ use ide_db::{
famous_defs::FamousDefs,
syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
};
use itertools::Itertools;
use syntax::{
ast::{self, make, Expr, HasGenericParams},
match_ast, ted, AstNode, ToSmolStr,
ast::{self, syntax_factory::SyntaxFactory, Expr, HasGenericArgs, HasGenericParams},
match_ast, AstNode,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@ -43,11 +42,11 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let ret_type = ctx.find_node_at_offset::<ast::RetType>()?;
let parent = ret_type.syntax().parent()?;
let body = match_ast! {
let body_expr = match_ast! {
match parent {
ast::Fn(func) => func.body()?,
ast::Fn(func) => func.body()?.into(),
ast::ClosureExpr(closure) => match closure.body()? {
Expr::BlockExpr(block) => block,
Expr::BlockExpr(block) => block.into(),
// closures require a block when a return type is specified
_ => return None,
},
@ -75,56 +74,65 @@ pub(crate) fn wrap_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
kind.assist_id(),
kind.label(),
type_ref.syntax().text_range(),
|edit| {
let alias = wrapper_alias(ctx, &core_wrapper, type_ref, kind.symbol());
let new_return_ty =
alias.unwrap_or_else(|| kind.wrap_type(type_ref)).clone_for_update();
let body = edit.make_mut(ast::Expr::BlockExpr(body.clone()));
|builder| {
let mut editor = builder.make_editor(&parent);
let make = SyntaxFactory::new();
let alias = wrapper_alias(ctx, &make, &core_wrapper, type_ref, kind.symbol());
let new_return_ty = alias.unwrap_or_else(|| match kind {
WrapperKind::Option => make.ty_option(type_ref.clone()),
WrapperKind::Result => make.ty_result(type_ref.clone(), make.ty_infer().into()),
});
let mut exprs_to_wrap = Vec::new();
let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e);
walk_expr(&body, &mut |expr| {
walk_expr(&body_expr, &mut |expr| {
if let Expr::ReturnExpr(ret_expr) = expr {
if let Some(ret_expr_arg) = &ret_expr.expr() {
for_each_tail_expr(ret_expr_arg, tail_cb);
}
}
});
for_each_tail_expr(&body, tail_cb);
for_each_tail_expr(&body_expr, tail_cb);
for ret_expr_arg in exprs_to_wrap {
let happy_wrapped = make::expr_call(
make::expr_path(make::ext::ident_path(kind.happy_ident())),
make::arg_list(iter::once(ret_expr_arg.clone())),
)
.clone_for_update();
ted::replace(ret_expr_arg.syntax(), happy_wrapped.syntax());
let happy_wrapped = make.expr_call(
make.expr_path(make.ident_path(kind.happy_ident())),
make.arg_list(iter::once(ret_expr_arg.clone())),
);
editor.replace(ret_expr_arg.syntax(), happy_wrapped.syntax());
}
let old_return_ty = edit.make_mut(type_ref.clone());
ted::replace(old_return_ty.syntax(), new_return_ty.syntax());
editor.replace(type_ref.syntax(), new_return_ty.syntax());
if let WrapperKind::Result = kind {
// Add a placeholder snippet at the first generic argument that doesn't equal the return type.
// This is normally the error type, but that may not be the case when we inserted a type alias.
let args =
new_return_ty.syntax().descendants().find_map(ast::GenericArgList::cast);
let error_type_arg = args.and_then(|list| {
list.generic_args().find(|arg| match arg {
ast::GenericArg::TypeArg(_) => {
arg.syntax().text() != type_ref.syntax().text()
}
ast::GenericArg::LifetimeArg(_) => false,
_ => true,
})
let args = new_return_ty
.path()
.unwrap()
.segment()
.unwrap()
.generic_arg_list()
.unwrap();
let error_type_arg = args.generic_args().find(|arg| match arg {
ast::GenericArg::TypeArg(_) => {
arg.syntax().text() != type_ref.syntax().text()
}
ast::GenericArg::LifetimeArg(_) => false,
_ => true,
});
if let Some(error_type_arg) = error_type_arg {
if let Some(cap) = ctx.config.snippet_cap {
edit.add_placeholder_snippet(cap, error_type_arg);
editor.add_annotation(
error_type_arg.syntax(),
builder.make_placeholder_snippet(cap),
);
}
}
}
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.file_id(), editor);
},
);
}
@ -176,22 +184,16 @@ impl WrapperKind {
WrapperKind::Result => hir::sym::Result.clone(),
}
}
fn wrap_type(&self, type_ref: &ast::Type) -> ast::Type {
match self {
WrapperKind::Option => make::ext::ty_option(type_ref.clone()),
WrapperKind::Result => make::ext::ty_result(type_ref.clone(), make::ty_placeholder()),
}
}
}
// Try to find an wrapper type alias in the current scope (shadowing the default).
fn wrapper_alias(
ctx: &AssistContext<'_>,
make: &SyntaxFactory,
core_wrapper: &hir::Enum,
ret_type: &ast::Type,
wrapper: hir::Symbol,
) -> Option<ast::Type> {
) -> Option<ast::PathType> {
let wrapper_path = hir::ModPath::from_segments(
hir::PathKind::Plain,
iter::once(hir::Name::new_symbol_root(wrapper)),
@ -207,25 +209,28 @@ fn wrapper_alias(
})
.find_map(|alias| {
let mut inserted_ret_type = false;
let generic_params = alias
.source(ctx.db())?
.value
.generic_param_list()?
.generic_params()
.map(|param| match param {
// Replace the very first type parameter with the functions return type.
ast::GenericParam::TypeParam(_) if !inserted_ret_type => {
inserted_ret_type = true;
ret_type.to_smolstr()
let generic_args =
alias.source(ctx.db())?.value.generic_param_list()?.generic_params().map(|param| {
match param {
// Replace the very first type parameter with the function's return type.
ast::GenericParam::TypeParam(_) if !inserted_ret_type => {
inserted_ret_type = true;
make.type_arg(ret_type.clone()).into()
}
ast::GenericParam::LifetimeParam(_) => {
make.lifetime_arg(make.lifetime("'_")).into()
}
_ => make.type_arg(make.ty_infer().into()).into(),
}
ast::GenericParam::LifetimeParam(_) => make::lifetime("'_").to_smolstr(),
_ => make::ty_placeholder().to_smolstr(),
})
.join(", ");
});
let name = alias.name(ctx.db());
let name = name.as_str();
Some(make::ty(&format!("{name}<{generic_params}>")))
let generic_arg_list = make.generic_arg_list(generic_args, false);
let path = make.path_unqualified(
make.path_segment_generics(make.name_ref(name.as_str()), generic_arg_list),
);
Some(make.ty_path(path))
})
})
}

View File

@ -77,7 +77,11 @@ pub(crate) fn with_single_file(text: &str) -> (RootDatabase, EditionedFileId) {
}
#[track_caller]
pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_after: &str) {
pub(crate) fn check_assist(
assist: Handler,
#[rust_analyzer::rust_fixture] ra_fixture_before: &str,
#[rust_analyzer::rust_fixture] ra_fixture_after: &str,
) {
let ra_fixture_after = trim_indent(ra_fixture_after);
check(assist, ra_fixture_before, ExpectedResult::After(&ra_fixture_after), None);
}
@ -85,8 +89,8 @@ pub(crate) fn check_assist(assist: Handler, ra_fixture_before: &str, ra_fixture_
#[track_caller]
pub(crate) fn check_assist_no_snippet_cap(
assist: Handler,
ra_fixture_before: &str,
ra_fixture_after: &str,
#[rust_analyzer::rust_fixture] ra_fixture_before: &str,
#[rust_analyzer::rust_fixture] ra_fixture_after: &str,
) {
let ra_fixture_after = trim_indent(ra_fixture_after);
check_with_config(
@ -101,8 +105,8 @@ pub(crate) fn check_assist_no_snippet_cap(
#[track_caller]
pub(crate) fn check_assist_import_one(
assist: Handler,
ra_fixture_before: &str,
ra_fixture_after: &str,
#[rust_analyzer::rust_fixture] ra_fixture_before: &str,
#[rust_analyzer::rust_fixture] ra_fixture_after: &str,
) {
let ra_fixture_after = trim_indent(ra_fixture_after);
check_with_config(
@ -118,8 +122,8 @@ pub(crate) fn check_assist_import_one(
// so this is here to allow you choose.
pub(crate) fn check_assist_by_label(
assist: Handler,
ra_fixture_before: &str,
ra_fixture_after: &str,
#[rust_analyzer::rust_fixture] ra_fixture_before: &str,
#[rust_analyzer::rust_fixture] ra_fixture_after: &str,
label: &str,
) {
let ra_fixture_after = trim_indent(ra_fixture_after);
@ -130,22 +134,36 @@ pub(crate) fn check_assist_by_label(
// `extract_ranges` and mark the target as `<target> </target>` in the
// fixture?
#[track_caller]
pub(crate) fn check_assist_target(assist: Handler, ra_fixture: &str, target: &str) {
pub(crate) fn check_assist_target(
assist: Handler,
#[rust_analyzer::rust_fixture] ra_fixture: &str,
target: &str,
) {
check(assist, ra_fixture, ExpectedResult::Target(target), None);
}
#[track_caller]
pub(crate) fn check_assist_not_applicable(assist: Handler, ra_fixture: &str) {
pub(crate) fn check_assist_not_applicable(
assist: Handler,
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) {
check(assist, ra_fixture, ExpectedResult::NotApplicable, None);
}
#[track_caller]
pub(crate) fn check_assist_not_applicable_by_label(assist: Handler, ra_fixture: &str, label: &str) {
pub(crate) fn check_assist_not_applicable_by_label(
assist: Handler,
#[rust_analyzer::rust_fixture] ra_fixture: &str,
label: &str,
) {
check(assist, ra_fixture, ExpectedResult::NotApplicable, Some(label));
}
#[track_caller]
pub(crate) fn check_assist_not_applicable_for_import_one(assist: Handler, ra_fixture: &str) {
pub(crate) fn check_assist_not_applicable_for_import_one(
assist: Handler,
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) {
check_with_config(
TEST_CONFIG_IMPORT_ONE,
assist,
@ -157,7 +175,10 @@ pub(crate) fn check_assist_not_applicable_for_import_one(assist: Handler, ra_fix
/// Check assist in unresolved state. Useful to check assists for lazy computation.
#[track_caller]
pub(crate) fn check_assist_unresolved(assist: Handler, ra_fixture: &str) {
pub(crate) fn check_assist_unresolved(
assist: Handler,
#[rust_analyzer::rust_fixture] ra_fixture: &str,
) {
check(assist, ra_fixture, ExpectedResult::Unresolved, None);
}

View File

@ -246,7 +246,7 @@ pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize {
}
pub(crate) fn invert_boolean_expression(expr: ast::Expr) -> ast::Expr {
invert_special_case(&expr).unwrap_or_else(|| make::expr_prefix(T![!], expr))
invert_special_case(&expr).unwrap_or_else(|| make::expr_prefix(T![!], expr).into())
}
fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
@ -262,7 +262,7 @@ fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
T![>] => T![<=],
T![>=] => T![<],
// Parenthesize other expressions before prefixing `!`
_ => return Some(make::expr_prefix(T![!], make::expr_paren(expr.clone()))),
_ => return Some(make::expr_prefix(T![!], make::expr_paren(expr.clone())).into()),
};
ted::replace(op_token, make::token(rev_token));
Some(bin.into())

View File

@ -66,7 +66,7 @@ fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
let pat = make::record_pat(variant_name.clone(), pats.into_iter());
let fields = make::record_expr_field_list(fields);
let record_expr = make::record_expr(variant_name, fields).into();
arms.push(make::match_arm(Some(pat.into()), None, record_expr));
arms.push(make::match_arm(pat.into(), None, record_expr));
}
// => match self { Self::Name(arg1) => Self::Name(arg1.clone()) }
@ -84,21 +84,21 @@ fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
let pat = make::tuple_struct_pat(variant_name.clone(), pats.into_iter());
let struct_name = make::expr_path(variant_name);
let tuple_expr = make::expr_call(struct_name, make::arg_list(fields));
arms.push(make::match_arm(Some(pat.into()), None, tuple_expr));
arms.push(make::match_arm(pat.into(), None, tuple_expr));
}
// => match self { Self::Name => Self::Name }
None => {
let pattern = make::path_pat(variant_name.clone());
let variant_expr = make::expr_path(variant_name);
arms.push(make::match_arm(Some(pattern), None, variant_expr));
arms.push(make::match_arm(pattern, None, variant_expr));
}
}
}
let match_target = make::expr_path(make::ext::ident_path("self"));
let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
make::expr_match(match_target, list)
make::expr_match(match_target, list).into()
}
ast::Adt::Struct(strukt) => {
match strukt.field_list() {
@ -190,7 +190,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
// => MyStruct { fields.. } => f.debug_struct("MyStruct")...finish(),
let pat = make::record_pat(variant_name.clone(), pats.into_iter());
arms.push(make::match_arm(Some(pat.into()), None, expr));
arms.push(make::match_arm(pat.into(), None, expr));
}
Some(ast::FieldList::TupleFieldList(list)) => {
// => f.debug_tuple(name)
@ -223,7 +223,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
// => MyStruct (fields..) => f.debug_tuple("MyStruct")...finish(),
let pat = make::tuple_struct_pat(variant_name.clone(), pats.into_iter());
arms.push(make::match_arm(Some(pat.into()), None, expr));
arms.push(make::match_arm(pat.into(), None, expr));
}
None => {
let fmt_string = make::expr_literal(&(format!("\"{name}\""))).into();
@ -232,7 +232,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
let macro_call = make::expr_macro_call(macro_name, args);
let variant_name = make::path_pat(variant_name);
arms.push(make::match_arm(Some(variant_name), None, macro_call));
arms.push(make::match_arm(variant_name, None, macro_call));
}
}
}
@ -241,7 +241,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
let match_expr = make::expr_match(match_target, list);
let body = make::block_expr(None, Some(match_expr));
let body = make::block_expr(None, Some(match_expr.into()));
let body = body.indent(ast::edit::IndentLevel(1));
ted::replace(func.body()?.syntax(), body.clone_for_update().syntax());
Some(())
@ -485,7 +485,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>) -
let tuple = make::tuple_pat(vec![left.into(), right.into()]);
if let Some(expr) = expr {
arms.push(make::match_arm(Some(tuple.into()), None, expr));
arms.push(make::match_arm(tuple.into(), None, expr));
}
}
@ -518,7 +518,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>) -
let tuple = make::tuple_pat(vec![left.into(), right.into()]);
if let Some(expr) = expr {
arms.push(make::match_arm(Some(tuple.into()), None, expr));
arms.push(make::match_arm(tuple.into(), None, expr));
}
}
None => continue,
@ -538,12 +538,12 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>) -
} else {
eq_check
};
arms.push(make::match_arm(Some(lhs), None, rhs));
arms.push(make::match_arm(lhs, None, rhs));
}
let match_target = make::expr_tuple(vec![lhs_name, rhs_name]);
let match_target = make::expr_tuple([lhs_name, rhs_name]).into();
let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
make::expr_match(match_target, list)
make::expr_match(match_target, list).into()
}
};
@ -599,15 +599,15 @@ fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>)
let variant_name =
make::path_pat(make::ext::path_from_idents(["core", "cmp", "Ordering", "Equal"])?);
let lhs = make::tuple_struct_pat(make::ext::path_from_idents(["Some"])?, [variant_name]);
arms.push(make::match_arm(Some(lhs.into()), None, make::expr_empty_block()));
arms.push(make::match_arm(lhs.into(), None, make::expr_empty_block().into()));
arms.push(make::match_arm(
[make::ident_pat(false, false, make::name("ord")).into()],
make::ident_pat(false, false, make::name("ord")).into(),
None,
make::expr_return(Some(make::expr_path(make::ext::ident_path("ord")))),
));
let list = make::match_arm_list(arms).indent(ast::edit::IndentLevel(1));
Some(make::expr_stmt(make::expr_match(match_target, list)).into())
Some(make::expr_stmt(make::expr_match(match_target, list).into()).into())
}
fn gen_partial_cmp_call(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr {

View File

@ -121,7 +121,7 @@ impl RefData {
/// Derefs `expr` and wraps it in parens if necessary
pub(crate) fn wrap_expr(&self, mut expr: ast::Expr) -> ast::Expr {
if self.needs_deref {
expr = make::expr_prefix(T![*], expr);
expr = make::expr_prefix(T![*], expr).into();
}
if self.needs_parentheses {

View File

@ -329,7 +329,7 @@ impl Completions {
ctx: &CompletionContext<'_>,
dot_access: &DotAccess,
func: hir::Function,
receiver: Option<hir::Name>,
receiver: Option<SmolStr>,
local_name: Option<hir::Name>,
) {
if !ctx.check_stability(Some(&func.attrs(ctx.db))) {
@ -475,7 +475,7 @@ impl Completions {
&mut self,
ctx: &CompletionContext<'_>,
dot_access: &DotAccess,
receiver: Option<hir::Name>,
receiver: Option<SmolStr>,
field: hir::Field,
ty: &hir::Type,
) {
@ -533,7 +533,7 @@ impl Completions {
pub(crate) fn add_tuple_field(
&mut self,
ctx: &CompletionContext<'_>,
receiver: Option<hir::Name>,
receiver: Option<SmolStr>,
field: usize,
ty: &hir::Type,
) {

View File

@ -2,7 +2,7 @@
use std::ops::ControlFlow;
use hir::{sym, HasContainer, ItemContainer, MethodCandidateCallback, Name};
use hir::{HasContainer, ItemContainer, MethodCandidateCallback, Name};
use ide_db::FxHashSet;
use syntax::SmolStr;
@ -25,21 +25,49 @@ pub(crate) fn complete_dot(
_ => return,
};
let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. });
let is_method_access_with_parens =
matches!(dot_access.kind, DotAccessKind::Method { has_parens: true });
let traits_in_scope = ctx.traits_in_scope();
// Suggest .await syntax for types that implement Future trait
if receiver_ty.impls_into_future(ctx.db) {
if let Some(future_output) = receiver_ty.into_future_output(ctx.db) {
let await_str = SmolStr::new_static("await");
let mut item = CompletionItem::new(
CompletionItemKind::Keyword,
ctx.source_range(),
SmolStr::new_static("await"),
await_str.clone(),
ctx.edition,
);
item.detail("expr.await");
item.add_to(acc, ctx.db);
}
let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. });
let is_method_access_with_parens =
matches!(dot_access.kind, DotAccessKind::Method { has_parens: true });
// Completions that skip `.await`, e.g. `.await.foo()`.
let dot_access_kind = match &dot_access.kind {
DotAccessKind::Field { receiver_is_ambiguous_float_literal: _ } => {
DotAccessKind::Field { receiver_is_ambiguous_float_literal: false }
}
it @ DotAccessKind::Method { .. } => *it,
};
let dot_access = DotAccess {
receiver: dot_access.receiver.clone(),
receiver_ty: Some(hir::TypeInfo { original: future_output.clone(), adjusted: None }),
kind: dot_access_kind,
ctx: dot_access.ctx,
};
complete_fields(
acc,
ctx,
&future_output,
|acc, field, ty| acc.add_field(ctx, &dot_access, Some(await_str.clone()), field, &ty),
|acc, field, ty| acc.add_tuple_field(ctx, Some(await_str.clone()), field, &ty),
is_field_access,
is_method_access_with_parens,
);
complete_methods(ctx, &future_output, &traits_in_scope, |func| {
acc.add_method(ctx, &dot_access, func, Some(await_str.clone()), None)
});
}
complete_fields(
acc,
@ -50,8 +78,44 @@ pub(crate) fn complete_dot(
is_field_access,
is_method_access_with_parens,
);
complete_methods(ctx, receiver_ty, &traits_in_scope, |func| {
acc.add_method(ctx, dot_access, func, None, None)
});
complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None));
// FIXME:
// Checking for the existence of `iter()` is complicated in our setup, because we need to substitute
// its return type, so we instead check for `<&Self as IntoIterator>::IntoIter`.
// Does <&receiver_ty as IntoIterator>::IntoIter` exist? Assume `iter` is valid
let iter = receiver_ty
.strip_references()
.add_reference(hir::Mutability::Shared)
.into_iterator_iter(ctx.db)
.map(|ty| (ty, SmolStr::new_static("iter()")));
// Does <receiver_ty as IntoIterator>::IntoIter` exist?
let into_iter = || {
receiver_ty
.clone()
.into_iterator_iter(ctx.db)
.map(|ty| (ty, SmolStr::new_static("into_iter()")))
};
if let Some((iter, iter_sym)) = iter.or_else(into_iter) {
// Skip iterators, e.g. complete `.iter().filter_map()`.
let dot_access_kind = match &dot_access.kind {
DotAccessKind::Field { receiver_is_ambiguous_float_literal: _ } => {
DotAccessKind::Field { receiver_is_ambiguous_float_literal: false }
}
it @ DotAccessKind::Method { .. } => *it,
};
let dot_access = DotAccess {
receiver: dot_access.receiver.clone(),
receiver_ty: Some(hir::TypeInfo { original: iter.clone(), adjusted: None }),
kind: dot_access_kind,
ctx: dot_access.ctx,
};
complete_methods(ctx, &iter, &traits_in_scope, |func| {
acc.add_method(ctx, &dot_access, func, Some(iter_sym.clone()), None)
});
}
}
pub(crate) fn complete_undotted_self(
@ -94,18 +158,16 @@ pub(crate) fn complete_undotted_self(
in_breakable: expr_ctx.in_breakable,
},
},
Some(Name::new_symbol_root(sym::self_.clone())),
Some(SmolStr::new_static("self")),
field,
&ty,
)
},
|acc, field, ty| {
acc.add_tuple_field(ctx, Some(Name::new_symbol_root(sym::self_.clone())), field, &ty)
},
|acc, field, ty| acc.add_tuple_field(ctx, Some(SmolStr::new_static("self")), field, &ty),
true,
false,
);
complete_methods(ctx, &ty, |func| {
complete_methods(ctx, &ty, &ctx.traits_in_scope(), |func| {
acc.add_method(
ctx,
&DotAccess {
@ -118,7 +180,7 @@ pub(crate) fn complete_undotted_self(
},
},
func,
Some(Name::new_symbol_root(sym::self_.clone())),
Some(SmolStr::new_static("self")),
None,
)
});
@ -160,6 +222,7 @@ fn complete_fields(
fn complete_methods(
ctx: &CompletionContext<'_>,
receiver: &hir::Type,
traits_in_scope: &FxHashSet<hir::TraitId>,
f: impl FnMut(hir::Function),
) {
struct Callback<'a, F> {
@ -205,7 +268,7 @@ fn complete_methods(
receiver.iterate_method_candidates_split_inherent(
ctx.db,
&ctx.scope,
&ctx.traits_in_scope(),
traits_in_scope,
Some(ctx.module),
None,
Callback { ctx, f, seen_methods: FxHashSet::default() },
@ -214,25 +277,13 @@ fn complete_methods(
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use expect_test::expect;
use crate::tests::{
check_edit, completion_list_no_kw, completion_list_no_kw_with_private_editable,
};
fn check(ra_fixture: &str, expect: Expect) {
let actual = completion_list_no_kw(ra_fixture);
expect.assert_eq(&actual);
}
fn check_with_private_editable(ra_fixture: &str, expect: Expect) {
let actual = completion_list_no_kw_with_private_editable(ra_fixture);
expect.assert_eq(&actual);
}
use crate::tests::{check_edit, check_no_kw, check_with_private_editable};
#[test]
fn test_struct_field_and_method_completion() {
check(
check_no_kw(
r#"
struct S { foo: u32 }
impl S {
@ -249,7 +300,7 @@ fn foo(s: S) { s.$0 }
#[test]
fn no_unstable_method_on_stable() {
check(
check_no_kw(
r#"
//- /main.rs crate:main deps:std
fn foo(s: std::S) { s.$0 }
@ -266,7 +317,7 @@ impl S {
#[test]
fn unstable_method_on_nightly() {
check(
check_no_kw(
r#"
//- toolchain:nightly
//- /main.rs crate:main deps:std
@ -286,7 +337,7 @@ impl S {
#[test]
fn test_struct_field_completion_self() {
check(
check_no_kw(
r#"
struct S { the_field: (u32,) }
impl S {
@ -302,7 +353,7 @@ impl S {
#[test]
fn test_struct_field_completion_autoderef() {
check(
check_no_kw(
r#"
struct A { the_field: (u32, i32) }
impl A {
@ -318,7 +369,7 @@ impl A {
#[test]
fn test_no_struct_field_completion_for_method_call() {
check(
check_no_kw(
r#"
struct A { the_field: u32 }
fn foo(a: A) { a.$0() }
@ -329,7 +380,7 @@ fn foo(a: A) { a.$0() }
#[test]
fn test_visibility_filtering() {
check(
check_no_kw(
r#"
//- /lib.rs crate:lib new_source_root:local
pub mod m {
@ -348,7 +399,7 @@ fn foo(a: lib::m::A) { a.$0 }
"#]],
);
check(
check_no_kw(
r#"
//- /lib.rs crate:lib new_source_root:library
pub mod m {
@ -367,7 +418,7 @@ fn foo(a: lib::m::A) { a.$0 }
"#]],
);
check(
check_no_kw(
r#"
//- /lib.rs crate:lib new_source_root:library
pub mod m {
@ -384,7 +435,7 @@ fn foo(a: lib::m::A) { a.$0 }
"#]],
);
check(
check_no_kw(
r#"
//- /lib.rs crate:lib new_source_root:local
pub struct A {}
@ -402,7 +453,7 @@ fn foo(a: lib::A) { a.$0 }
me pub_method() fn(&self)
"#]],
);
check(
check_no_kw(
r#"
//- /lib.rs crate:lib new_source_root:library
pub struct A {}
@ -524,7 +575,7 @@ fn foo(a: lib::A) { a.$0 }
#[test]
fn test_local_impls() {
check(
check_no_kw(
r#"
pub struct A {}
mod m {
@ -553,7 +604,7 @@ fn foo(a: A) {
#[test]
fn test_doc_hidden_filtering() {
check(
check_no_kw(
r#"
//- /lib.rs crate:lib deps:dep
fn foo(a: dep::A) { a.$0 }
@ -580,7 +631,7 @@ impl A {
#[test]
fn test_union_field_completion() {
check(
check_no_kw(
r#"
union U { field: u8, other: u16 }
fn foo(u: U) { u.$0 }
@ -594,7 +645,7 @@ fn foo(u: U) { u.$0 }
#[test]
fn test_method_completion_only_fitting_impls() {
check(
check_no_kw(
r#"
struct A<T> {}
impl A<u32> {
@ -613,7 +664,7 @@ fn foo(a: A<u32>) { a.$0 }
#[test]
fn test_trait_method_completion() {
check(
check_no_kw(
r#"
struct A {}
trait Trait { fn the_method(&self); }
@ -643,7 +694,7 @@ fn foo(a: A) { a.the_method();$0 }
#[test]
fn test_trait_method_completion_deduplicated() {
check(
check_no_kw(
r"
struct A {}
trait Trait { fn the_method(&self); }
@ -658,7 +709,7 @@ fn foo(a: &A) { a.$0 }
#[test]
fn completes_trait_method_from_other_module() {
check(
check_no_kw(
r"
struct A {}
mod m {
@ -676,7 +727,7 @@ fn foo(a: A) { a.$0 }
#[test]
fn test_no_non_self_method() {
check(
check_no_kw(
r#"
struct A {}
impl A {
@ -692,7 +743,7 @@ fn foo(a: A) {
#[test]
fn test_tuple_field_completion() {
check(
check_no_kw(
r#"
fn foo() {
let b = (0, 3.14);
@ -708,7 +759,7 @@ fn foo() {
#[test]
fn test_tuple_struct_field_completion() {
check(
check_no_kw(
r#"
struct S(i32, f64);
fn foo() {
@ -725,7 +776,7 @@ fn foo() {
#[test]
fn test_tuple_field_inference() {
check(
check_no_kw(
r#"
pub struct S;
impl S { pub fn blah(&self) {} }
@ -747,7 +798,7 @@ impl T {
#[test]
fn test_field_no_same_name() {
check(
check_no_kw(
r#"
//- minicore: deref
struct A { field: u8 }
@ -770,7 +821,7 @@ fn test(a: A) {
#[test]
fn test_tuple_field_no_same_index() {
check(
check_no_kw(
r#"
//- minicore: deref
struct A(u8);
@ -793,7 +844,7 @@ fn test(a: A) {
#[test]
fn test_tuple_struct_deref_to_tuple_no_same_index() {
check(
check_no_kw(
r#"
//- minicore: deref
struct A(u8);
@ -815,7 +866,7 @@ fn test(a: A) {
#[test]
fn test_completion_works_in_consts() {
check(
check_no_kw(
r#"
struct A { the_field: u32 }
const X: u32 = {
@ -830,7 +881,7 @@ const X: u32 = {
#[test]
fn works_in_simple_macro_1() {
check(
check_no_kw(
r#"
macro_rules! m { ($e:expr) => { $e } }
struct A { the_field: u32 }
@ -847,7 +898,7 @@ fn foo(a: A) {
#[test]
fn works_in_simple_macro_2() {
// this doesn't work yet because the macro doesn't expand without the token -- maybe it can be fixed with better recovery
check(
check_no_kw(
r#"
macro_rules! m { ($e:expr) => { $e } }
struct A { the_field: u32 }
@ -863,7 +914,7 @@ fn foo(a: A) {
#[test]
fn works_in_simple_macro_recursive_1() {
check(
check_no_kw(
r#"
macro_rules! m { ($e:expr) => { $e } }
struct A { the_field: u32 }
@ -879,7 +930,7 @@ fn foo(a: A) {
#[test]
fn macro_expansion_resilient() {
check(
check_no_kw(
r#"
macro_rules! d {
() => {};
@ -905,7 +956,7 @@ fn foo(a: A) {
#[test]
fn test_method_completion_issue_3547() {
check(
check_no_kw(
r#"
struct HashSet<T> {}
impl<T> HashSet<T> {
@ -924,7 +975,7 @@ fn foo() {
#[test]
fn completes_method_call_when_receiver_is_a_macro_call() {
check(
check_no_kw(
r#"
struct S;
impl S { fn foo(&self) {} }
@ -939,7 +990,7 @@ fn main() { make_s!().f$0; }
#[test]
fn completes_after_macro_call_in_submodule() {
check(
check_no_kw(
r#"
macro_rules! empty {
() => {};
@ -967,7 +1018,7 @@ mod foo {
#[test]
fn issue_8931() {
check(
check_no_kw(
r#"
//- minicore: fn
struct S;
@ -994,7 +1045,7 @@ impl S {
#[test]
fn completes_bare_fields_and_methods_in_methods() {
check(
check_no_kw(
r#"
struct Foo { field: i32 }
@ -1008,7 +1059,7 @@ impl Foo { fn foo(&self) { $0 } }"#,
bt u32 u32
"#]],
);
check(
check_no_kw(
r#"
struct Foo(i32);
@ -1026,7 +1077,7 @@ impl Foo { fn foo(&mut self) { $0 } }"#,
#[test]
fn macro_completion_after_dot() {
check(
check_no_kw(
r#"
macro_rules! m {
($e:expr) => { $e };
@ -1051,7 +1102,7 @@ fn f() {
#[test]
fn completes_method_call_when_receiver_type_has_errors_issue_10297() {
check(
check_no_kw(
r#"
//- minicore: iterator, sized
struct Vec<T>;
@ -1102,7 +1153,7 @@ fn main() {
#[test]
fn issue_12484() {
check(
check_no_kw(
r#"
//- minicore: sized
trait SizeUser {
@ -1124,7 +1175,7 @@ fn test(thing: impl Encrypt) {
#[test]
fn only_consider_same_type_once() {
check(
check_no_kw(
r#"
//- minicore: deref
struct A(u8);
@ -1150,7 +1201,7 @@ fn test(a: A) {
#[test]
fn no_inference_var_in_completion() {
check(
check_no_kw(
r#"
struct S<T>(T);
fn test(s: S<Unknown>) {
@ -1165,7 +1216,7 @@ fn test(s: S<Unknown>) {
#[test]
fn assoc_impl_1() {
check(
check_no_kw(
r#"
//- minicore: deref
fn main() {
@ -1206,7 +1257,7 @@ impl<F: core::ops::Deref<Target = impl Bar>> Foo<F> {
#[test]
fn assoc_impl_2() {
check(
check_no_kw(
r#"
//- minicore: deref
fn main() {
@ -1242,7 +1293,7 @@ impl<B: Bar, F: core::ops::Deref<Target = B>> Foo<F> {
#[test]
fn test_struct_function_field_completion() {
check(
check_no_kw(
r#"
struct S { va_field: u32, fn_field: fn() }
fn foo() { S { va_field: 0, fn_field: || {} }.fi$0() }
@ -1267,7 +1318,7 @@ fn foo() { (S { va_field: 0, fn_field: || {} }.fn_field)() }
#[test]
fn test_tuple_function_field_completion() {
check(
check_no_kw(
r#"
struct B(u32, fn())
fn foo() {
@ -1301,7 +1352,7 @@ fn foo() {
#[test]
fn test_fn_field_dot_access_method_has_parens_false() {
check(
check_no_kw(
r#"
struct Foo { baz: fn() }
impl Foo {
@ -1318,4 +1369,101 @@ fn baz() {
"#]],
);
}
#[test]
fn skip_iter() {
check_no_kw(
r#"
//- minicore: iterator
fn foo() {
[].$0
}
"#,
expect![[r#"
me clone() (as Clone) fn(&self) -> Self
me into_iter() (as IntoIterator) fn(self) -> <Self as IntoIterator>::IntoIter
"#]],
);
check_no_kw(
r#"
//- minicore: iterator
struct MyIntoIter;
impl IntoIterator for MyIntoIter {
type Item = ();
type IntoIter = MyIterator;
fn into_iter(self) -> Self::IntoIter {
MyIterator
}
}
struct MyIterator;
impl Iterator for MyIterator {
type Item = ();
fn next(&mut self) -> Self::Item {}
}
fn foo() {
MyIntoIter.$0
}
"#,
expect![[r#"
me into_iter() (as IntoIterator) fn(self) -> <Self as IntoIterator>::IntoIter
me into_iter().by_ref() (as Iterator) fn(&mut self) -> &mut Self
me into_iter().into_iter() (as IntoIterator) fn(self) -> <Self as IntoIterator>::IntoIter
me into_iter().next() (as Iterator) fn(&mut self) -> Option<<Self as Iterator>::Item>
me into_iter().nth() (as Iterator) fn(&mut self, usize) -> Option<<Self as Iterator>::Item>
"#]],
);
}
#[test]
fn skip_await() {
check_no_kw(
r#"
//- minicore: future
struct Foo;
impl Foo {
fn foo(self) {}
}
async fn foo() -> Foo { Foo }
async fn bar() {
foo().$0
}
"#,
expect![[r#"
me await.foo() fn(self)
me into_future() (use core::future::IntoFuture) fn(self) -> <Self as IntoFuture>::IntoFuture
"#]],
);
check_edit(
"foo",
r#"
//- minicore: future
struct Foo;
impl Foo {
fn foo(self) {}
}
async fn foo() -> Foo { Foo }
async fn bar() {
foo().$0
}
"#,
r#"
struct Foo;
impl Foo {
fn foo(self) {}
}
async fn foo() -> Foo { Foo }
async fn bar() {
foo().await.foo();$0
}
"#,
);
}
}

View File

@ -68,43 +68,40 @@ pub(crate) fn complete_cargo_env_vars(
mod tests {
use crate::tests::{check_edit, completion_list};
fn check(macro_name: &str) {
check_edit(
"CARGO_BIN_NAME",
&format!(
r#"
#[rustc_builtin_macro]
macro {macro_name} {{
($var:literal) => {{ 0 }}
}}
fn main() {{
let foo = {macro_name}!("CAR$0");
}}
"#
),
&format!(
r#"
#[rustc_builtin_macro]
macro {macro_name} {{
($var:literal) => {{ 0 }}
}}
fn main() {{
let foo = {macro_name}!("CARGO_BIN_NAME");
}}
"#
),
);
}
#[test]
fn completes_env_variable_in_env() {
check("env")
check_edit(
"CARGO_BIN_NAME",
r#"
//- minicore: env
fn main() {
let foo = env!("CAR$0");
}
"#,
r#"
fn main() {
let foo = env!("CARGO_BIN_NAME");
}
"#,
);
}
#[test]
fn completes_env_variable_in_option_env() {
check("option_env");
check_edit(
"CARGO_BIN_NAME",
r#"
//- minicore: env
fn main() {
let foo = option_env!("CAR$0");
}
"#,
r#"
fn main() {
let foo = option_env!("CARGO_BIN_NAME");
}
"#,
);
}
#[test]

View File

@ -62,6 +62,7 @@ pub(crate) fn complete_expr_path(
in_condition,
incomplete_let,
ref ref_expr_parent,
after_amp,
ref is_func_update,
ref innermost_ret_ty,
ref impl_,
@ -69,8 +70,23 @@ pub(crate) fn complete_expr_path(
..
} = expr_ctx;
let wants_mut_token =
ref_expr_parent.as_ref().map(|it| it.mut_token().is_none()).unwrap_or(false);
let (has_raw_token, has_const_token, has_mut_token) = ref_expr_parent
.as_ref()
.map(|it| (it.raw_token().is_some(), it.const_token().is_some(), it.mut_token().is_some()))
.unwrap_or((false, false, false));
let wants_raw_token = ref_expr_parent.is_some() && !has_raw_token && after_amp;
let wants_const_token =
ref_expr_parent.is_some() && has_raw_token && !has_const_token && !has_mut_token;
let wants_mut_token = if ref_expr_parent.is_some() {
if has_raw_token {
!has_const_token && !has_mut_token
} else {
!has_mut_token
}
} else {
false
};
let scope_def_applicable = |def| match def {
ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) | ScopeDef::Label(_) => false,
@ -354,6 +370,12 @@ pub(crate) fn complete_expr_path(
add_keyword("else if", "else if $1 {\n $0\n}");
}
if wants_raw_token {
add_keyword("raw", "raw ");
}
if wants_const_token {
add_keyword("const", "const ");
}
if wants_mut_token {
add_keyword("mut", "mut ");
}

View File

@ -65,18 +65,13 @@ pub(crate) fn complete_extern_abi(
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use expect_test::expect;
use crate::tests::{check_edit, completion_list_no_kw};
fn check(ra_fixture: &str, expect: Expect) {
let actual = completion_list_no_kw(ra_fixture);
expect.assert_eq(&actual);
}
use crate::tests::{check_edit, check_no_kw};
#[test]
fn only_completes_in_string_literals() {
check(
check_no_kw(
r#"
$0 fn foo {}
"#,
@ -86,7 +81,7 @@ $0 fn foo {}
#[test]
fn requires_extern_prefix() {
check(
check_no_kw(
r#"
"$0" fn foo {}
"#,
@ -96,7 +91,7 @@ $0 fn foo {}
#[test]
fn works() {
check(
check_no_kw(
r#"
extern "$0" fn foo {}
"#,

View File

@ -5,7 +5,7 @@ use ide_db::imports::{
insert_use::ImportScope,
};
use itertools::Itertools;
use syntax::{ast, AstNode, SyntaxNode, ToSmolStr, T};
use syntax::{ast, AstNode, SyntaxNode, ToSmolStr};
use crate::{
config::AutoImportExclusionType,
@ -403,10 +403,11 @@ fn import_on_the_fly_method(
fn import_name(ctx: &CompletionContext<'_>) -> String {
let token_kind = ctx.token.kind();
if matches!(token_kind, T![.] | T![::]) {
String::new()
} else {
if token_kind.is_any_identifier() {
ctx.token.to_string()
} else {
String::new()
}
}

View File

@ -61,18 +61,13 @@ pub(crate) fn format_string(
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use expect_test::expect;
use crate::tests::{check_edit, completion_list_no_kw};
fn check(ra_fixture: &str, expect: Expect) {
let actual = completion_list_no_kw(ra_fixture);
expect.assert_eq(&actual);
}
use crate::tests::{check_edit, check_no_kw};
#[test]
fn works_when_wrapped() {
check(
check_no_kw(
r#"
//- minicore: fmt
macro_rules! print {
@ -89,7 +84,7 @@ fn main() {
#[test]
fn no_completion_without_brace() {
check(
check_no_kw(
r#"
//- minicore: fmt
fn main() {

View File

@ -514,18 +514,13 @@ fn function_declaration(
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use expect_test::expect;
use crate::tests::{check_edit, completion_list_no_kw};
fn check(ra_fixture: &str, expect: Expect) {
let actual = completion_list_no_kw(ra_fixture);
expect.assert_eq(&actual)
}
use crate::tests::{check_edit, check_no_kw};
#[test]
fn no_completion_inside_fn() {
check(
check_no_kw(
r"
trait Test { fn test(); fn test2(); }
struct T;
@ -544,7 +539,7 @@ impl Test for T {
"#]],
);
check(
check_no_kw(
r"
trait Test { fn test(); fn test2(); }
struct T;
@ -558,7 +553,7 @@ impl Test for T {
expect![[""]],
);
check(
check_no_kw(
r"
trait Test { fn test(); fn test2(); }
struct T;
@ -573,7 +568,7 @@ impl Test for T {
);
// https://github.com/rust-lang/rust-analyzer/pull/5976#issuecomment-692332191
check(
check_no_kw(
r"
trait Test { fn test(); fn test2(); }
struct T;
@ -587,7 +582,7 @@ impl Test for T {
expect![[r#""#]],
);
check(
check_no_kw(
r"
trait Test { fn test(_: i32); fn test2(); }
struct T;
@ -606,7 +601,7 @@ impl Test for T {
"#]],
);
check(
check_no_kw(
r"
trait Test { fn test(_: fn()); fn test2(); }
struct T;
@ -624,7 +619,7 @@ impl Test for T {
#[test]
fn no_completion_inside_const() {
check(
check_no_kw(
r"
trait Test { const TEST: fn(); const TEST2: u32; type Test; fn test(); }
struct T;
@ -636,7 +631,7 @@ impl Test for T {
expect![[r#""#]],
);
check(
check_no_kw(
r"
trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
struct T;
@ -653,7 +648,7 @@ impl Test for T {
"#]],
);
check(
check_no_kw(
r"
trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
struct T;
@ -670,7 +665,7 @@ impl Test for T {
"#]],
);
check(
check_no_kw(
r"
trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
struct T;
@ -689,7 +684,7 @@ impl Test for T {
"#]],
);
check(
check_no_kw(
r"
trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
struct T;
@ -703,7 +698,7 @@ impl Test for T {
expect![[""]],
);
check(
check_no_kw(
r"
trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
struct T;
@ -720,7 +715,7 @@ impl Test for T {
#[test]
fn no_completion_inside_type() {
check(
check_no_kw(
r"
trait Test { type Test; type Test2; fn test(); }
struct T;
@ -737,7 +732,7 @@ impl Test for T {
"#]],
);
check(
check_no_kw(
r"
trait Test { type Test; type Test2; fn test(); }
struct T;
@ -1263,7 +1258,7 @@ impl Foo<u32> for Bar {
#[test]
fn works_directly_in_impl() {
check(
check_no_kw(
r#"
trait Tr {
fn required();
@ -1277,7 +1272,7 @@ impl Tr for () {
fn fn required()
"#]],
);
check(
check_no_kw(
r#"
trait Tr {
fn provided() {}

View File

@ -32,14 +32,9 @@ pub(crate) fn complete_for_and_where(
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use expect_test::expect;
use crate::tests::{check_edit, completion_list};
fn check(ra_fixture: &str, expect: Expect) {
let actual = completion_list(ra_fixture);
expect.assert_eq(&actual)
}
use crate::tests::{check, check_edit};
#[test]
fn test_else_edit_after_if() {

View File

@ -59,14 +59,9 @@ pub(crate) fn complete_label(
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use expect_test::expect;
use crate::tests::{check_edit, completion_list};
fn check(ra_fixture: &str, expect: Expect) {
let actual = completion_list(ra_fixture);
expect.assert_eq(&actual);
}
use crate::tests::{check, check_edit};
#[test]
fn check_lifetime_edit() {

View File

@ -159,14 +159,9 @@ fn module_chain_to_containing_module_file(
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use expect_test::expect;
use crate::tests::completion_list;
fn check(ra_fixture: &str, expect: Expect) {
let actual = completion_list(ra_fixture);
expect.assert_eq(&actual);
}
use crate::tests::check;
#[test]
fn lib_module_completion() {

View File

@ -303,7 +303,7 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) {
resulting_element = ast::Expr::from(parent_deref_element);
new_element_opt = make::expr_prefix(syntax::T![*], new_element_opt);
new_element_opt = make::expr_prefix(syntax::T![*], new_element_opt).into();
}
if let Some(first_ref_expr) = resulting_element.syntax().parent().and_then(ast::RefExpr::cast) {
@ -401,18 +401,13 @@ fn add_custom_postfix_completions(
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
use expect_test::expect;
use crate::{
tests::{check_edit, check_edit_with_config, completion_list, TEST_CONFIG},
tests::{check, check_edit, check_edit_with_config, TEST_CONFIG},
CompletionConfig, Snippet,
};
fn check(ra_fixture: &str, expect: Expect) {
let actual = completion_list(ra_fixture);
expect.assert_eq(&actual)
}
#[test]
fn postfix_completion_works_for_trivial_path_expression() {
check(

View File

@ -146,6 +146,7 @@ pub(crate) struct PathExprCtx {
pub(crate) in_condition: bool,
pub(crate) incomplete_let: bool,
pub(crate) ref_expr_parent: Option<ast::RefExpr>,
pub(crate) after_amp: bool,
/// The surrounding RecordExpression we are completing a functional update
pub(crate) is_func_update: Option<ast::RecordExpr>,
pub(crate) self_param: Option<hir::SelfParam>,
@ -390,7 +391,7 @@ pub(crate) struct DotAccess {
pub(crate) ctx: DotAccessExprCtx,
}
#[derive(Debug)]
#[derive(Debug, Clone, Copy)]
pub(crate) enum DotAccessKind {
Field {
/// True if the receiver is an integer and there is no ident in the original file after it yet
@ -402,7 +403,7 @@ pub(crate) enum DotAccessKind {
},
}
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) struct DotAccessExprCtx {
pub(crate) in_block_expr: bool,
pub(crate) in_breakable: BreakableKind,

View File

@ -123,10 +123,11 @@ fn expand(
) -> Option<ExpansionResult> {
let _p = tracing::info_span!("CompletionContext::expand").entered();
// Left biased since there may already be an identifier token there, and we appended to it.
if !sema.might_be_inside_macro_call(&fake_ident_token)
&& original_file
.token_at_offset(original_offset + relative_offset)
.right_biased()
.left_biased()
.is_some_and(|original_token| !sema.might_be_inside_macro_call(&original_token))
{
// Recursion base case.
@ -1150,6 +1151,9 @@ fn classify_name_ref(
let after_if_expr = after_if_expr(it.clone());
let ref_expr_parent =
path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
let after_amp = non_trivia_sibling(it.clone().into(), Direction::Prev)
.map(|it| it.kind() == SyntaxKind::AMP)
.unwrap_or(false);
let (innermost_ret_ty, self_param) = {
let find_ret_ty = |it: SyntaxNode| {
if let Some(item) = ast::Item::cast(it.clone()) {
@ -1219,6 +1223,7 @@ fn classify_name_ref(
after_if_expr,
in_condition,
ref_expr_parent,
after_amp,
is_func_update,
innermost_ret_ty,
self_param,

View File

@ -6,7 +6,7 @@ use crate::{
tests::{position, TEST_CONFIG},
};
fn check_expected_type_and_name(ra_fixture: &str, expect: Expect) {
fn check_expected_type_and_name(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let (db, pos) = position(ra_fixture);
let config = TEST_CONFIG;
let (completion_context, _analysis) = CompletionContext::new(&db, pos, &config).unwrap();

View File

@ -79,7 +79,7 @@ pub struct CompletionItem {
// FIXME: We shouldn't expose Mutability here (that is HIR types at all), its fine for now though
// until we have more splitting completions in which case we should think about
// generalizing this. See https://github.com/rust-lang/rust-analyzer/issues/12571
pub ref_match: Option<(Mutability, TextSize)>,
pub ref_match: Option<(CompletionItemRefMode, TextSize)>,
/// The import data to add to completion's edits.
/// (ImportPath, LastSegment)
@ -128,8 +128,15 @@ impl fmt::Debug for CompletionItem {
s.field("relevance", &self.relevance);
}
if let Some((mutability, offset)) = &self.ref_match {
s.field("ref_match", &format!("&{}@{offset:?}", mutability.as_keyword_for_ref()));
if let Some((ref_mode, offset)) = self.ref_match {
let prefix = match ref_mode {
CompletionItemRefMode::Reference(mutability) => match mutability {
Mutability::Shared => "&",
Mutability::Mut => "&mut ",
},
CompletionItemRefMode::Dereference => "*",
};
s.field("ref_match", &format!("{}@{offset:?}", prefix));
}
if self.trigger_call_info {
s.field("trigger_call_info", &true);
@ -400,6 +407,12 @@ impl CompletionItemKind {
}
}
#[derive(Copy, Clone, Debug)]
pub enum CompletionItemRefMode {
Reference(Mutability),
Dereference,
}
impl CompletionItem {
pub(crate) fn new(
kind: impl Into<CompletionItemKind>,
@ -441,15 +454,14 @@ impl CompletionItem {
let mut relevance = self.relevance;
relevance.type_match = Some(CompletionRelevanceTypeMatch::Exact);
self.ref_match.map(|(mutability, offset)| {
(
format!("&{}{}", mutability.as_keyword_for_ref(), self.label.primary),
ide_db::text_edit::Indel::insert(
offset,
format!("&{}", mutability.as_keyword_for_ref()),
),
relevance,
)
self.ref_match.map(|(mode, offset)| {
let prefix = match mode {
CompletionItemRefMode::Reference(Mutability::Shared) => "&",
CompletionItemRefMode::Reference(Mutability::Mut) => "&mut ",
CompletionItemRefMode::Dereference => "*",
};
let label = format!("{prefix}{}", self.label.primary);
(label, ide_db::text_edit::Indel::insert(offset, String::from(prefix)), relevance)
})
}
}
@ -473,7 +485,7 @@ pub(crate) struct Builder {
deprecated: bool,
trigger_call_info: bool,
relevance: CompletionRelevance,
ref_match: Option<(Mutability, TextSize)>,
ref_match: Option<(CompletionItemRefMode, TextSize)>,
edition: Edition,
}
@ -657,8 +669,12 @@ impl Builder {
self.imports_to_add.push(import_to_add);
self
}
pub(crate) fn ref_match(&mut self, mutability: Mutability, offset: TextSize) -> &mut Builder {
self.ref_match = Some((mutability, offset));
pub(crate) fn ref_match(
&mut self,
ref_mode: CompletionItemRefMode,
offset: TextSize,
) -> &mut Builder {
self.ref_match = Some((ref_mode, offset));
self
}
}

View File

@ -33,8 +33,9 @@ use crate::{
pub use crate::{
config::{AutoImportExclusionType, CallableSnippets, CompletionConfig},
item::{
CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevancePostfixMatch,
CompletionRelevanceReturnType, CompletionRelevanceTypeMatch,
CompletionItem, CompletionItemKind, CompletionItemRefMode, CompletionRelevance,
CompletionRelevancePostfixMatch, CompletionRelevanceReturnType,
CompletionRelevanceTypeMatch,
},
snippet::{Snippet, SnippetScope},
};

View File

@ -18,7 +18,7 @@ use ide_db::{
imports::import_assets::LocatedImport,
RootDatabase, SnippetCap, SymbolKind,
};
use syntax::{ast, format_smolstr, AstNode, Edition, SmolStr, SyntaxKind, TextRange, ToSmolStr};
use syntax::{ast, format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange, ToSmolStr};
use crate::{
context::{DotAccess, DotAccessKind, PathCompletionCtx, PathKind, PatternContext},
@ -28,7 +28,8 @@ use crate::{
literal::render_variant_lit,
macro_::{render_macro, render_macro_pat},
},
CompletionContext, CompletionItem, CompletionItemKind, CompletionRelevance,
CompletionContext, CompletionItem, CompletionItemKind, CompletionItemRefMode,
CompletionRelevance,
};
/// Interface for data and methods required for items rendering.
#[derive(Debug, Clone)]
@ -122,7 +123,7 @@ impl<'a> RenderContext<'a> {
pub(crate) fn render_field(
ctx: RenderContext<'_>,
dot_access: &DotAccess,
receiver: Option<hir::Name>,
receiver: Option<SmolStr>,
field: hir::Field,
ty: &hir::Type,
) -> CompletionItem {
@ -136,7 +137,7 @@ pub(crate) fn render_field(
let mut item = CompletionItem::new(
SymbolKind::Field,
ctx.source_range(),
field_with_receiver(db, receiver.as_ref(), &name, ctx.completion.edition),
field_with_receiver(receiver.as_deref(), &name),
ctx.completion.edition,
);
item.set_relevance(CompletionRelevance {
@ -158,8 +159,7 @@ pub(crate) fn render_field(
builder.replace(
ctx.source_range(),
field_with_receiver(db, receiver.as_ref(), &escaped_name, ctx.completion.edition)
.into(),
field_with_receiver(receiver.as_deref(), &escaped_name).into(),
);
let expected_fn_type =
@ -183,17 +183,12 @@ pub(crate) fn render_field(
item.text_edit(builder.finish());
} else {
item.insert_text(field_with_receiver(
db,
receiver.as_ref(),
&escaped_name,
ctx.completion.edition,
));
item.insert_text(field_with_receiver(receiver.as_deref(), &escaped_name));
}
if let Some(receiver) = &dot_access.receiver {
if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) {
if let Some(ref_match) = compute_ref_match(ctx.completion, ty) {
item.ref_match(ref_match, original.syntax().text_range().start());
if let Some(ref_mode) = compute_ref_match(ctx.completion, ty) {
item.ref_match(ref_mode, original.syntax().text_range().start());
}
}
}
@ -201,33 +196,21 @@ pub(crate) fn render_field(
item.build(db)
}
fn field_with_receiver(
db: &RootDatabase,
receiver: Option<&hir::Name>,
field_name: &str,
edition: Edition,
) -> SmolStr {
receiver.map_or_else(
|| field_name.into(),
|receiver| format_smolstr!("{}.{field_name}", receiver.display(db, edition)),
)
fn field_with_receiver(receiver: Option<&str>, field_name: &str) -> SmolStr {
receiver
.map_or_else(|| field_name.into(), |receiver| format_smolstr!("{}.{field_name}", receiver))
}
pub(crate) fn render_tuple_field(
ctx: RenderContext<'_>,
receiver: Option<hir::Name>,
receiver: Option<SmolStr>,
field: usize,
ty: &hir::Type,
) -> CompletionItem {
let mut item = CompletionItem::new(
SymbolKind::Field,
ctx.source_range(),
field_with_receiver(
ctx.db(),
receiver.as_ref(),
&field.to_string(),
ctx.completion.edition,
),
field_with_receiver(receiver.as_deref(), &field.to_string()),
ctx.completion.edition,
);
item.detail(ty.display(ctx.db(), ctx.completion.edition).to_string())
@ -440,7 +423,7 @@ fn render_resolution_path(
let name = local_name.display_no_db(ctx.completion.edition).to_smolstr();
let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution);
if local_name.is_escaped(completion.edition) {
if local_name.needs_escape(completion.edition) {
item.insert_text(local_name.display_no_db(completion.edition).to_smolstr());
}
// Add `<>` for generic types
@ -638,20 +621,34 @@ fn compute_exact_name_match(ctx: &CompletionContext<'_>, completion_name: &str)
fn compute_ref_match(
ctx: &CompletionContext<'_>,
completion_ty: &hir::Type,
) -> Option<hir::Mutability> {
) -> Option<CompletionItemRefMode> {
let expected_type = ctx.expected_type.as_ref()?;
if completion_ty != expected_type {
let expected_type_without_ref = expected_type.remove_ref()?;
if completion_ty.autoderef(ctx.db).any(|deref_ty| deref_ty == expected_type_without_ref) {
let expected_without_ref = expected_type.remove_ref();
let completion_without_ref = completion_ty.remove_ref();
if completion_ty == expected_type {
return None;
}
if let Some(expected_without_ref) = &expected_without_ref {
if completion_ty.autoderef(ctx.db).any(|ty| ty == *expected_without_ref) {
cov_mark::hit!(suggest_ref);
let mutability = if expected_type.is_mutable_reference() {
hir::Mutability::Mut
} else {
hir::Mutability::Shared
};
return Some(mutability);
};
return Some(CompletionItemRefMode::Reference(mutability));
}
}
if let Some(completion_without_ref) = completion_without_ref {
if completion_without_ref == *expected_type && completion_without_ref.is_copy(ctx.db) {
cov_mark::hit!(suggest_deref);
return Some(CompletionItemRefMode::Dereference);
}
}
None
}
@ -664,16 +661,16 @@ fn path_ref_match(
if let Some(original_path) = &path_ctx.original_path {
// At least one char was typed by the user already, in that case look for the original path
if let Some(original_path) = completion.sema.original_ast_node(original_path.clone()) {
if let Some(ref_match) = compute_ref_match(completion, ty) {
item.ref_match(ref_match, original_path.syntax().text_range().start());
if let Some(ref_mode) = compute_ref_match(completion, ty) {
item.ref_match(ref_mode, original_path.syntax().text_range().start());
}
}
} else {
// completion requested on an empty identifier, there is no path here yet.
// FIXME: This might create inconsistent completions where we show a ref match in macro inputs
// as long as nothing was typed yet
if let Some(ref_match) = compute_ref_match(completion, ty) {
item.ref_match(ref_match, completion.position.offset);
if let Some(ref_mode) = compute_ref_match(completion, ty) {
item.ref_match(ref_mode, completion.position.offset);
}
}
}
@ -693,20 +690,28 @@ mod tests {
};
#[track_caller]
fn check(ra_fixture: &str, kind: impl Into<CompletionItemKind>, expect: Expect) {
fn check(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
kind: impl Into<CompletionItemKind>,
expect: Expect,
) {
let actual = do_completion(ra_fixture, kind.into());
expect.assert_debug_eq(&actual);
}
#[track_caller]
fn check_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) {
fn check_kinds(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
kinds: &[CompletionItemKind],
expect: Expect,
) {
let actual: Vec<_> =
kinds.iter().flat_map(|&kind| do_completion(ra_fixture, kind)).collect();
expect.assert_debug_eq(&actual);
}
#[track_caller]
fn check_function_relevance(ra_fixture: &str, expect: Expect) {
fn check_function_relevance(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let actual: Vec<_> =
do_completion(ra_fixture, CompletionItemKind::SymbolKind(SymbolKind::Method))
.into_iter()
@ -717,7 +722,11 @@ mod tests {
}
#[track_caller]
fn check_relevance_for_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) {
fn check_relevance_for_kinds(
#[rust_analyzer::rust_fixture] ra_fixture: &str,
kinds: &[CompletionItemKind],
expect: Expect,
) {
let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None);
actual.retain(|it| kinds.contains(&it.kind));
actual.sort_by_key(|it| cmp::Reverse(it.relevance.score()));
@ -725,7 +734,7 @@ mod tests {
}
#[track_caller]
fn check_relevance(ra_fixture: &str, expect: Expect) {
fn check_relevance(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None);
actual.retain(|it| it.kind != CompletionItemKind::Snippet);
actual.retain(|it| it.kind != CompletionItemKind::Keyword);
@ -2053,7 +2062,42 @@ fn main() {
}
#[test]
fn suggest_deref() {
fn suggest_deref_copy() {
cov_mark::check!(suggest_deref);
check_relevance(
r#"
//- minicore: copy
struct Foo;
impl Copy for Foo {}
impl Clone for Foo {
fn clone(&self) -> Self { *self }
}
fn bar(x: Foo) {}
fn main() {
let foo = &Foo;
bar($0);
}
"#,
expect![[r#"
st Foo Foo [type]
st Foo Foo [type]
ex Foo [type]
lc foo &Foo [local]
lc *foo [type+local]
fn bar() fn(Foo) []
fn main() fn() []
md core []
tt Clone []
tt Copy []
"#]],
);
}
#[test]
fn suggest_deref_trait() {
check_relevance(
r#"
//- minicore: deref

Some files were not shown because too many files have changed in this diff Show More