Merge pull request #4355 from RalfJung/rustup

Rustup
This commit is contained in:
Ralf Jung 2025-05-29 10:21:35 +00:00 committed by GitHub
commit c1fedcfccc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
62 changed files with 1513 additions and 956 deletions

241
Cargo.lock generated
View File

@ -17,15 +17,6 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
[[package]]
name = "aho-corasick"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
dependencies = [
"memchr",
]
[[package]]
name = "allocator-api2"
version = "0.2.21"
@ -124,12 +115,9 @@ dependencies = [
[[package]]
name = "boxcar"
version = "0.2.11"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6740c6e2fc6360fa57c35214c7493826aee95993926092606f27c983b40837be"
dependencies = [
"loom",
]
checksum = "66bb12751a83493ef4b8da1120451a262554e216a247f14b48cb5e8fe7ed8bdf"
[[package]]
name = "camino"
@ -511,19 +499,6 @@ version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a"
[[package]]
name = "generator"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc6bd114ceda131d3b1d665eba35788690ad37f5916457286b32ab6fd3c438dd"
dependencies = [
"cfg-if",
"libc",
"log",
"rustversion",
"windows 0.58.0",
]
[[package]]
name = "getrandom"
version = "0.2.15"
@ -1213,19 +1188,6 @@ version = "0.4.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e"
[[package]]
name = "loom"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca"
dependencies = [
"cfg-if",
"generator",
"scoped-tls",
"tracing",
"tracing-subscriber",
]
[[package]]
name = "lsp-server"
version = "0.7.8"
@ -1265,15 +1227,6 @@ dependencies = [
"url",
]
[[package]]
name = "matchers"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
dependencies = [
"regex-automata 0.1.10",
]
[[package]]
name = "mbe"
version = "0.0.0"
@ -1400,16 +1353,6 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d"
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
dependencies = [
"overload",
"winapi",
]
[[package]]
name = "nu-ansi-term"
version = "0.50.1"
@ -1471,12 +1414,6 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
name = "overload"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "parking_lot"
version = "0.12.3"
@ -1648,7 +1585,7 @@ dependencies = [
"indexmap",
"nix",
"tracing",
"windows 0.61.1",
"windows",
]
[[package]]
@ -1864,50 +1801,6 @@ dependencies = [
"thiserror 2.0.12",
]
[[package]]
name = "regex"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata 0.4.9",
"regex-syntax 0.8.5",
]
[[package]]
name = "regex-automata"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
dependencies = [
"regex-syntax 0.6.29",
]
[[package]]
name = "regex-automata"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax 0.8.5",
]
[[package]]
name = "regex-syntax"
version = "0.6.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]]
name = "regex-syntax"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "rowan"
version = "0.15.15"
@ -2026,12 +1919,6 @@ dependencies = [
"smallvec",
]
[[package]]
name = "rustversion"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2"
[[package]]
name = "ryu"
version = "1.0.20"
@ -2040,9 +1927,9 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.21.1"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f80d5cf3c3fcab2cef898012f242a670477a1baa609267376af9cb4409026c5"
checksum = "c8fff508e3d6ef42a32607f7538e17171a877a12015e32036f46e99d00c95781"
dependencies = [
"boxcar",
"crossbeam-queue",
@ -2063,15 +1950,15 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.21.1"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05303d72606fbf2b9c9523cda2039bb8ecb00304027a3cd7e52b02a65c7d9185"
checksum = "8ea72b3c06f2ce6350fe3a0eeb7aaaf842d1d8352b706973c19c4f02e298a87c"
[[package]]
name = "salsa-macros"
version = "0.21.1"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb2f0e2a30c65cb3cd63440c491dde68d9af7e1be2b77832ac7057141107db50"
checksum = "0ce92025bc160b27814a207cb78d680973af17f863c7f4fc56cf3a535e22f378"
dependencies = [
"heck",
"proc-macro2",
@ -2556,15 +2443,9 @@ version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
dependencies = [
"matchers",
"nu-ansi-term 0.46.0",
"once_cell",
"regex",
"sharded-slab",
"smallvec",
"thread_local",
"time",
"tracing",
"tracing-core",
"tracing-log",
]
@ -2575,7 +2456,7 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f459ca79f1b0d5f71c54ddfde6debfc59c8b6eeb46808ae492077f739dc7b49c"
dependencies = [
"nu-ansi-term 0.50.1",
"nu-ansi-term",
"tracing-core",
"tracing-log",
"tracing-subscriber",
@ -2709,22 +2590,6 @@ version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.9"
@ -2734,22 +2599,6 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows"
version = "0.58.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6"
dependencies = [
"windows-core 0.58.0",
"windows-targets 0.52.6",
]
[[package]]
name = "windows"
version = "0.61.1"
@ -2757,7 +2606,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5ee8f3d025738cb02bad7868bbb5f8a6327501e870bf51f1b455b0a2454a419"
dependencies = [
"windows-collections",
"windows-core 0.61.0",
"windows-core",
"windows-future",
"windows-link",
"windows-numerics",
@ -2769,20 +2618,7 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8"
dependencies = [
"windows-core 0.61.0",
]
[[package]]
name = "windows-core"
version = "0.58.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99"
dependencies = [
"windows-implement 0.58.0",
"windows-interface 0.58.0",
"windows-result 0.2.0",
"windows-strings 0.1.0",
"windows-targets 0.52.6",
"windows-core",
]
[[package]]
@ -2791,11 +2627,11 @@ version = "0.61.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980"
dependencies = [
"windows-implement 0.60.0",
"windows-interface 0.59.1",
"windows-implement",
"windows-interface",
"windows-link",
"windows-result 0.3.2",
"windows-strings 0.4.0",
"windows-result",
"windows-strings",
]
[[package]]
@ -2804,21 +2640,10 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a1d6bbefcb7b60acd19828e1bc965da6fcf18a7e39490c5f8be71e54a19ba32"
dependencies = [
"windows-core 0.61.0",
"windows-core",
"windows-link",
]
[[package]]
name = "windows-implement"
version = "0.58.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "windows-implement"
version = "0.60.0"
@ -2830,17 +2655,6 @@ dependencies = [
"syn",
]
[[package]]
name = "windows-interface"
version = "0.58.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "windows-interface"
version = "0.59.1"
@ -2864,19 +2678,10 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1"
dependencies = [
"windows-core 0.61.0",
"windows-core",
"windows-link",
]
[[package]]
name = "windows-result"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e"
dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "windows-result"
version = "0.3.2"
@ -2886,16 +2691,6 @@ dependencies = [
"windows-link",
]
[[package]]
name = "windows-strings"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10"
dependencies = [
"windows-result 0.2.0",
"windows-targets 0.52.6",
]
[[package]]
name = "windows-strings"
version = "0.4.0"

View File

@ -132,11 +132,8 @@ pulldown-cmark-to-cmark = "10.0.4"
pulldown-cmark = { version = "0.9.6", default-features = false }
rayon = "1.10.0"
rowan = "=0.15.15"
salsa = { version = "0.21.1", default-features = false, features = [
"rayon",
"salsa_unstable",
] }
salsa-macros = "0.21.1"
salsa = { version = "0.22.0", default-features = false, features = ["rayon","salsa_unstable"] }
salsa-macros = "0.22.0"
semver = "1.0.26"
serde = { version = "1.0.219" }
serde_derive = { version = "1.0.219" }

View File

@ -395,21 +395,21 @@ impl BuiltDependency {
pub type CratesIdMap = FxHashMap<CrateBuilderId, Crate>;
#[salsa_macros::input]
#[derive(Debug)]
#[derive(Debug, PartialOrd, Ord)]
pub struct Crate {
#[return_ref]
#[returns(ref)]
pub data: BuiltCrateData,
/// Crate data that is not needed for analysis.
///
/// This is split into a separate field to increase incrementality.
#[return_ref]
#[returns(ref)]
pub extra_data: ExtraCrateData,
// This is in `Arc` because it is shared for all crates in a workspace.
#[return_ref]
#[returns(ref)]
pub workspace_data: Arc<CrateWorkspaceData>,
#[return_ref]
#[returns(ref)]
pub cfg_options: CfgOptions,
#[return_ref]
#[returns(ref)]
pub env: Env,
}

View File

@ -32,6 +32,7 @@ pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet}
macro_rules! impl_intern_key {
($id:ident, $loc:ident) => {
#[salsa_macros::interned(no_lifetime)]
#[derive(PartialOrd, Ord)]
pub struct $id {
pub loc: $loc,
}
@ -165,6 +166,7 @@ impl Files {
}
#[salsa_macros::interned(no_lifetime, debug, constructor=from_span)]
#[derive(PartialOrd, Ord)]
pub struct EditionedFileId {
pub editioned_file_id: span::EditionedFileId,
}
@ -356,7 +358,7 @@ fn parse(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Parse<ast::SourceFil
}
fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<&[SyntaxError]> {
#[salsa_macros::tracked(return_ref)]
#[salsa_macros::tracked(returns(ref))]
fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<Box<[SyntaxError]>> {
let errors = db.parse(file_id).errors();
match &*errors {

View File

@ -85,7 +85,7 @@ impl LangItemTarget {
}
/// Salsa query. This will look for lang items in a specific crate.
#[salsa_macros::tracked(return_ref)]
#[salsa_macros::tracked(returns(ref))]
pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangItems>> {
let _p = tracing::info_span!("crate_lang_items_query").entered();

View File

@ -509,24 +509,6 @@ fn main() { "s"; }
);
}
#[test]
fn test_concat_idents_expand() {
check(
r##"
#[rustc_builtin_macro]
macro_rules! concat_idents {}
fn main() { concat_idents!(foo, bar); }
"##,
expect![[r##"
#[rustc_builtin_macro]
macro_rules! concat_idents {}
fn main() { foobar; }
"##]],
);
}
#[test]
fn test_quote_string() {
check(

View File

@ -381,15 +381,15 @@ mod __ {
#[salsa_macros::tracked]
pub(crate) struct DefMapPair<'db> {
#[tracked]
#[return_ref]
#[returns(ref)]
pub(crate) def_map: DefMap,
#[return_ref]
#[returns(ref)]
pub(crate) local: LocalDefMap,
}
}
pub(crate) use __::DefMapPair;
#[salsa_macros::tracked(return_ref)]
#[salsa_macros::tracked(returns(ref))]
pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefMapPair<'_> {
let krate = crate_id.data(db);
let _p = tracing::info_span!(
@ -420,7 +420,7 @@ pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefM
DefMapPair::new(db, def_map, local_def_map)
}
#[salsa_macros::tracked(return_ref)]
#[salsa_macros::tracked(returns(ref))]
pub fn block_def_map(db: &dyn DefDatabase, block_id: BlockId) -> DefMap {
let BlockLoc { ast_id, module } = block_id.lookup(db);

View File

@ -75,7 +75,7 @@ impl TraitItems {
})
}
pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
pub fn macro_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
self.macro_calls.iter().flat_map(|it| it.iter()).copied()
}
}
@ -109,7 +109,7 @@ impl ImplItems {
(Arc::new(ImplItems { items, macro_calls }), DefDiagnostics::new(diagnostics))
}
pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
pub fn macro_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
self.macro_calls.iter().flat_map(|it| it.iter()).copied()
}
}

View File

@ -30,9 +30,18 @@ pub(crate) struct TestDB {
impl Default for TestDB {
fn default() -> Self {
let events = <Arc<Mutex<Option<Vec<salsa::Event>>>>>::default();
let mut this = Self {
storage: Default::default(),
events: Default::default(),
storage: salsa::Storage::new(Some(Box::new({
let events = events.clone();
move |event| {
let mut events = events.lock().unwrap();
if let Some(events) = &mut *events {
events.push(event);
}
}
}))),
events,
files: Default::default(),
crates_map: Default::default(),
};
@ -45,15 +54,7 @@ impl Default for TestDB {
}
#[salsa_macros::db]
impl salsa::Database for TestDB {
fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
let mut events = self.events.lock().unwrap();
if let Some(events) = &mut *events {
let event = event();
events.push(event);
}
}
}
impl salsa::Database for TestDB {}
impl fmt::Debug for TestDB {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {

View File

@ -140,7 +140,6 @@ register_builtin! {
EagerExpander:
(compile_error, CompileError) => compile_error_expand,
(concat, Concat) => concat_expand,
(concat_idents, ConcatIdents) => concat_idents_expand,
(concat_bytes, ConcatBytes) => concat_bytes_expand,
(include, Include) => include_expand,
(include_bytes, IncludeBytes) => include_bytes_expand,
@ -660,30 +659,6 @@ fn concat_bytes_expand_subtree(
Ok(())
}
fn concat_idents_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let mut err = None;
let mut ident = String::new();
for (i, t) in tt.iter().enumerate() {
match t {
TtElement::Leaf(tt::Leaf::Ident(id)) => {
ident.push_str(id.sym.as_str());
}
TtElement::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
_ => {
err.get_or_insert(ExpandError::other(span, "unexpected token"));
}
}
}
// FIXME merge spans
let ident = tt::Ident { sym: Symbol::intern(&ident), span, is_raw: tt::IdentIsRaw::No };
ExpandResult { value: quote!(span =>#ident), err }
}
fn relative_file(
db: &dyn ExpandDatabase,
call_id: MacroCallId,

View File

@ -259,7 +259,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
}
fn well_known_trait_id(
&self,
well_known_trait: rust_ir::WellKnownTrait,
well_known_trait: WellKnownTrait,
) -> Option<chalk_ir::TraitId<Interner>> {
let lang_attr = lang_item_from_well_known_trait(well_known_trait);
let trait_ = lang_attr.resolve_trait(self.db, self.krate)?;

View File

@ -25,7 +25,7 @@ use triomphe::Arc;
use typed_arena::Arena;
use crate::{
Adjust, InferenceResult, Interner, Ty, TyExt, TyKind,
Adjust, InferenceResult, Interner, TraitEnvironment, Ty, TyExt, TyKind,
db::HirDatabase,
diagnostics::match_check::{
self,
@ -74,8 +74,9 @@ impl BodyValidationDiagnostic {
let _p = tracing::info_span!("BodyValidationDiagnostic::collect").entered();
let infer = db.infer(owner);
let body = db.body(owner);
let env = db.trait_environment_for_body(owner);
let mut validator =
ExprValidator { owner, body, infer, diagnostics: Vec::new(), validate_lints };
ExprValidator { owner, body, infer, diagnostics: Vec::new(), validate_lints, env };
validator.validate_body(db);
validator.diagnostics
}
@ -85,6 +86,7 @@ struct ExprValidator {
owner: DefWithBodyId,
body: Arc<Body>,
infer: Arc<InferenceResult>,
env: Arc<TraitEnvironment>,
diagnostics: Vec<BodyValidationDiagnostic>,
validate_lints: bool,
}
@ -190,7 +192,7 @@ impl ExprValidator {
return;
}
let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db);
let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db, self.env.clone());
let pattern_arena = Arena::new();
let mut m_arms = Vec::with_capacity(arms.len());
@ -317,11 +319,14 @@ impl ExprValidator {
return;
};
let pattern_arena = Arena::new();
let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db);
let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db, self.env.clone());
for stmt in &**statements {
let &Statement::Let { pat, initializer, else_branch: None, .. } = stmt else {
continue;
};
if self.infer.type_mismatch_for_pat(pat).is_some() {
continue;
}
let Some(initializer) = initializer else { continue };
let ty = &self.infer[initializer];
if ty.contains_unknown() {

View File

@ -12,9 +12,10 @@ use rustc_pattern_analysis::{
};
use smallvec::{SmallVec, smallvec};
use stdx::never;
use triomphe::Arc;
use crate::{
AdtId, Interner, Scalar, Ty, TyExt, TyKind,
AdtId, Interner, Scalar, TraitEnvironment, Ty, TyExt, TyKind,
db::HirDatabase,
infer::normalize,
inhabitedness::{is_enum_variant_uninhabited_from, is_ty_uninhabited_from},
@ -69,13 +70,19 @@ pub(crate) struct MatchCheckCtx<'db> {
body: DefWithBodyId,
pub(crate) db: &'db dyn HirDatabase,
exhaustive_patterns: bool,
env: Arc<TraitEnvironment>,
}
impl<'db> MatchCheckCtx<'db> {
pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'db dyn HirDatabase) -> Self {
pub(crate) fn new(
module: ModuleId,
body: DefWithBodyId,
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment>,
) -> Self {
let def_map = module.crate_def_map(db);
let exhaustive_patterns = def_map.is_unstable_feature_enabled(&sym::exhaustive_patterns);
Self { module, body, db, exhaustive_patterns }
Self { module, body, db, exhaustive_patterns, env }
}
pub(crate) fn compute_match_usefulness(
@ -100,7 +107,7 @@ impl<'db> MatchCheckCtx<'db> {
}
fn is_uninhabited(&self, ty: &Ty) -> bool {
is_ty_uninhabited_from(self.db, ty, self.module)
is_ty_uninhabited_from(self.db, ty, self.module, self.env.clone())
}
/// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`.
@ -459,8 +466,13 @@ impl PatCx for MatchCheckCtx<'_> {
} else {
let mut variants = IndexVec::with_capacity(enum_data.variants.len());
for &(variant, _) in enum_data.variants.iter() {
let is_uninhabited =
is_enum_variant_uninhabited_from(cx.db, variant, subst, cx.module);
let is_uninhabited = is_enum_variant_uninhabited_from(
cx.db,
variant,
subst,
cx.module,
self.env.clone(),
);
let visibility = if is_uninhabited {
VariantVisibility::Empty
} else {

View File

@ -1463,6 +1463,8 @@ impl HirDisplay for Ty {
}
if f.closure_style == ClosureStyle::RANotation || !sig.ret().is_unit() {
write!(f, " -> ")?;
// FIXME: We display `AsyncFn` as `-> impl Future`, but this is hard to fix because
// we don't have a trait environment here, required to normalize `<Ret as Future>::Output`.
sig.ret().hir_fmt(f)?;
}
} else {

View File

@ -38,7 +38,7 @@ use crate::{
infer::{BreakableKind, CoerceMany, Diverges, coerce::CoerceNever},
make_binders,
mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem},
to_chalk_trait_id,
to_assoc_type_id, to_chalk_trait_id,
traits::FnTrait,
utils::{self, elaborate_clause_supertraits},
};
@ -245,7 +245,7 @@ impl InferenceContext<'_> {
}
fn deduce_closure_kind_from_predicate_clauses(
&self,
&mut self,
expected_ty: &Ty,
clauses: impl DoubleEndedIterator<Item = WhereClause>,
closure_kind: ClosureKind,
@ -378,7 +378,7 @@ impl InferenceContext<'_> {
}
fn deduce_sig_from_projection(
&self,
&mut self,
closure_kind: ClosureKind,
projection_ty: &ProjectionTy,
projected_ty: &Ty,
@ -392,13 +392,16 @@ impl InferenceContext<'_> {
// For now, we only do signature deduction based off of the `Fn` and `AsyncFn` traits,
// for closures and async closures, respectively.
match closure_kind {
ClosureKind::Closure | ClosureKind::Async
if self.fn_trait_kind_from_trait_id(trait_).is_some() =>
{
self.extract_sig_from_projection(projection_ty, projected_ty)
}
_ => None,
let fn_trait_kind = self.fn_trait_kind_from_trait_id(trait_)?;
if !matches!(closure_kind, ClosureKind::Closure | ClosureKind::Async) {
return None;
}
if fn_trait_kind.is_async() {
// If the expected trait is `AsyncFn(...) -> X`, we don't know what the return type is,
// but we do know it must implement `Future<Output = X>`.
self.extract_async_fn_sig_from_projection(projection_ty, projected_ty)
} else {
self.extract_sig_from_projection(projection_ty, projected_ty)
}
}
@ -424,6 +427,39 @@ impl InferenceContext<'_> {
)))
}
fn extract_async_fn_sig_from_projection(
&mut self,
projection_ty: &ProjectionTy,
projected_ty: &Ty,
) -> Option<FnSubst<Interner>> {
let arg_param_ty = projection_ty.substitution.as_slice(Interner)[1].assert_ty_ref(Interner);
let TyKind::Tuple(_, input_tys) = arg_param_ty.kind(Interner) else {
return None;
};
let ret_param_future_output = projected_ty;
let ret_param_future = self.table.new_type_var();
let future_output =
LangItem::FutureOutput.resolve_type_alias(self.db, self.resolver.krate())?;
let future_projection = crate::AliasTy::Projection(crate::ProjectionTy {
associated_ty_id: to_assoc_type_id(future_output),
substitution: Substitution::from1(Interner, ret_param_future.clone()),
});
self.table.register_obligation(
crate::AliasEq { alias: future_projection, ty: ret_param_future_output.clone() }
.cast(Interner),
);
Some(FnSubst(Substitution::from_iter(
Interner,
input_tys.iter(Interner).map(|t| t.cast(Interner)).chain(Some(GenericArg::new(
Interner,
chalk_ir::GenericArgData::Ty(ret_param_future),
))),
)))
}
fn fn_trait_kind_from_trait_id(&self, trait_id: hir_def::TraitId) -> Option<FnTrait> {
FnTrait::from_lang_item(self.db.lang_attr(trait_id.into())?)
}

View File

@ -7,17 +7,24 @@ use chalk_ir::{
};
use hir_def::{AdtId, EnumVariantId, ModuleId, VariantId, visibility::Visibility};
use rustc_hash::FxHashSet;
use triomphe::Arc;
use crate::{
Binders, Interner, Substitution, Ty, TyKind, consteval::try_const_usize, db::HirDatabase,
AliasTy, Binders, Interner, Substitution, TraitEnvironment, Ty, TyKind,
consteval::try_const_usize, db::HirDatabase,
};
// FIXME: Turn this into a query, it can be quite slow
/// Checks whether a type is visibly uninhabited from a particular module.
pub(crate) fn is_ty_uninhabited_from(db: &dyn HirDatabase, ty: &Ty, target_mod: ModuleId) -> bool {
pub(crate) fn is_ty_uninhabited_from(
db: &dyn HirDatabase,
ty: &Ty,
target_mod: ModuleId,
env: Arc<TraitEnvironment>,
) -> bool {
let _p = tracing::info_span!("is_ty_uninhabited_from", ?ty).entered();
let mut uninhabited_from =
UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default(), env };
let inhabitedness = ty.visit_with(&mut uninhabited_from, DebruijnIndex::INNERMOST);
inhabitedness == BREAK_VISIBLY_UNINHABITED
}
@ -29,11 +36,12 @@ pub(crate) fn is_enum_variant_uninhabited_from(
variant: EnumVariantId,
subst: &Substitution,
target_mod: ModuleId,
env: Arc<TraitEnvironment>,
) -> bool {
let _p = tracing::info_span!("is_enum_variant_uninhabited_from").entered();
let mut uninhabited_from =
UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default(), env };
let inhabitedness = uninhabited_from.visit_variant(variant.into(), subst);
inhabitedness == BREAK_VISIBLY_UNINHABITED
}
@ -44,6 +52,7 @@ struct UninhabitedFrom<'a> {
// guard for preventing stack overflow in non trivial non terminating types
max_depth: usize,
db: &'a dyn HirDatabase,
env: Arc<TraitEnvironment>,
}
const CONTINUE_OPAQUELY_INHABITED: ControlFlow<VisiblyUninhabited> = Continue(());
@ -78,6 +87,12 @@ impl TypeVisitor<Interner> for UninhabitedFrom<'_> {
Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
Some(1..) => item_ty.super_visit_with(self, outer_binder),
},
TyKind::Alias(AliasTy::Projection(projection)) => {
// FIXME: I think this currently isn't used for monomorphized bodies, so there is no need to handle
// `TyKind::AssociatedType`, but perhaps in the future it will.
let normalized = self.db.normalize_projection(projection.clone(), self.env.clone());
self.visit_ty(&normalized, outer_binder)
}
_ => CONTINUE_OPAQUELY_INHABITED,
};
self.recursive_ty.remove(ty);

View File

@ -1121,7 +1121,7 @@ impl Evaluator<'_> {
// We don't call any drop glue yet, so there is nothing here
Ok(())
}
"transmute" => {
"transmute" | "transmute_unchecked" => {
let [arg] = args else {
return Err(MirEvalError::InternalError(
"transmute arg is not provided".into(),

View File

@ -25,7 +25,7 @@ use syntax::TextRange;
use triomphe::Arc;
use crate::{
Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt,
Adjust, Adjustment, AutoBorrow, CallableDefId, TraitEnvironment, TyBuilder, TyExt,
consteval::ConstEvalError,
db::{HirDatabase, InternedClosure, InternedClosureId},
display::{DisplayTarget, HirDisplay, hir_display_with_store},
@ -79,6 +79,7 @@ struct MirLowerCtx<'db> {
infer: &'db InferenceResult,
resolver: Resolver<'db>,
drop_scopes: Vec<DropScope>,
env: Arc<TraitEnvironment>,
}
// FIXME: Make this smaller, its stored in database queries
@ -288,6 +289,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
closures: vec![],
};
let resolver = owner.resolver(db);
let env = db.trait_environment_for_body(owner);
MirLowerCtx {
result: mir,
@ -300,6 +302,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
labeled_loop_blocks: Default::default(),
discr_temp: None,
drop_scopes: vec![DropScope::default()],
env,
}
}
@ -944,10 +947,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let cast_kind = if source_ty.as_reference().is_some() {
CastKind::PointerCoercion(PointerCast::ArrayToPointer)
} else {
let mut table = InferenceTable::new(
self.db,
self.db.trait_environment_for_body(self.owner),
);
let mut table = InferenceTable::new(self.db, self.env.clone());
cast_kind(&mut table, &source_ty, &target_ty)?
};
@ -1412,11 +1412,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
fn lower_literal_to_operand(&mut self, ty: Ty, l: &Literal) -> Result<Operand> {
let size = || {
self.db
.layout_of_ty(ty.clone(), self.db.trait_environment_for_body(self.owner))
.map(|it| it.size.bytes_usize())
};
let size =
|| self.db.layout_of_ty(ty.clone(), self.env.clone()).map(|it| it.size.bytes_usize());
const USIZE_SIZE: usize = size_of::<usize>();
let bytes: Box<[_]> = match l {
hir_def::hir::Literal::String(b) => {
@ -1723,7 +1720,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
fn is_uninhabited(&self, expr_id: ExprId) -> bool {
is_ty_uninhabited_from(self.db, &self.infer[expr_id], self.owner.module(self.db))
is_ty_uninhabited_from(
self.db,
&self.infer[expr_id],
self.owner.module(self.db),
self.env.clone(),
)
}
/// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` and

View File

@ -27,9 +27,18 @@ pub(crate) struct TestDB {
impl Default for TestDB {
fn default() -> Self {
let events = <Arc<Mutex<Option<Vec<salsa::Event>>>>>::default();
let mut this = Self {
storage: Default::default(),
events: Default::default(),
storage: salsa::Storage::new(Some(Box::new({
let events = events.clone();
move |event| {
let mut events = events.lock().unwrap();
if let Some(events) = &mut *events {
events.push(event);
}
}
}))),
events,
files: Default::default(),
crates_map: Default::default(),
};
@ -103,14 +112,7 @@ impl SourceDatabase for TestDB {
}
#[salsa_macros::db]
impl salsa::Database for TestDB {
fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
let mut events = self.events.lock().unwrap();
if let Some(events) = &mut *events {
events.push(event());
}
}
}
impl salsa::Database for TestDB {}
impl panic::RefUnwindSafe for TestDB {}

View File

@ -4903,3 +4903,30 @@ fn main() {
"#]],
);
}
#[test]
fn async_fn_return_type() {
check_infer(
r#"
//- minicore: async_fn
fn foo<F: AsyncFn() -> R, R>(_: F) -> R {
loop {}
}
fn main() {
foo(async move || ());
}
"#,
expect![[r#"
29..30 '_': F
40..55 '{ loop {} }': R
46..53 'loop {}': !
51..53 '{}': ()
67..97 '{ ...()); }': ()
73..76 'foo': fn foo<impl AsyncFn() -> impl Future<Output = ()>, ()>(impl AsyncFn() -> impl Future<Output = ()>)
73..94 'foo(as...|| ())': ()
77..93 'async ... || ()': impl AsyncFn() -> impl Future<Output = ()>
91..93 '()': ()
"#]],
);
}

View File

@ -291,4 +291,9 @@ impl FnTrait {
pub fn get_id(self, db: &dyn HirDatabase, krate: Crate) -> Option<TraitId> {
self.lang_item().resolve_trait(db, krate)
}
#[inline]
pub(crate) fn is_async(self) -> bool {
matches!(self, FnTrait::AsyncFn | FnTrait::AsyncFnMut | FnTrait::AsyncFnOnce)
}
}

View File

@ -5972,6 +5972,59 @@ impl Layout {
}
}
pub fn tail_padding(&self, field_size: &mut impl FnMut(usize) -> Option<u64>) -> Option<u64> {
match self.0.fields {
layout::FieldsShape::Primitive => None,
layout::FieldsShape::Union(_) => None,
layout::FieldsShape::Array { stride, count } => count.checked_sub(1).and_then(|tail| {
let tail_field_size = field_size(tail as usize)?;
let offset = stride.bytes() * tail;
self.0.size.bytes().checked_sub(offset)?.checked_sub(tail_field_size)
}),
layout::FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
let tail = memory_index.last_index()?;
let tail_field_size = field_size(tail.0.into_raw().into_u32() as usize)?;
let offset = offsets.get(tail)?.bytes();
self.0.size.bytes().checked_sub(offset)?.checked_sub(tail_field_size)
}
}
}
pub fn largest_padding(
&self,
field_size: &mut impl FnMut(usize) -> Option<u64>,
) -> Option<u64> {
match self.0.fields {
layout::FieldsShape::Primitive => None,
layout::FieldsShape::Union(_) => None,
layout::FieldsShape::Array { stride: _, count: 0 } => None,
layout::FieldsShape::Array { stride, .. } => {
let size = field_size(0)?;
stride.bytes().checked_sub(size)
}
layout::FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
let mut reverse_index = vec![None; memory_index.len()];
for (src, (mem, offset)) in memory_index.iter().zip(offsets.iter()).enumerate() {
reverse_index[*mem as usize] = Some((src, offset.bytes()));
}
if reverse_index.iter().any(|it| it.is_none()) {
stdx::never!();
return None;
}
reverse_index
.into_iter()
.flatten()
.chain(std::iter::once((0, self.0.size.bytes())))
.tuple_windows()
.filter_map(|((i, start), (_, end))| {
let size = field_size(i)?;
end.checked_sub(start)?.checked_sub(size)
})
.max()
}
}
}
pub fn enum_tag_size(&self) -> Option<usize> {
let tag_size =
if let layout::Variants::Multiple { tag, tag_encoding, .. } = &self.0.variants {

View File

@ -147,7 +147,7 @@ impl TypeInfo {
}
/// Primary API to get semantic information, like types, from syntax trees.
pub struct Semantics<'db, DB> {
pub struct Semantics<'db, DB: ?Sized> {
pub db: &'db DB,
imp: SemanticsImpl<'db>,
}
@ -407,14 +407,10 @@ impl<'db> SemanticsImpl<'db> {
res
}
pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax())?;
let macro_call = InFile::new(sa.file_id, macro_call);
let file_id = sa.expand(self.db, macro_call)?;
pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<InFile<SyntaxNode>> {
let file_id = self.to_def(macro_call)?;
let node = self.parse_or_expand(file_id.into());
Some(node)
Some(InFile::new(file_id.into(), node))
}
pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
@ -434,10 +430,7 @@ impl<'db> SemanticsImpl<'db> {
&self,
macro_call: &ast::MacroCall,
) -> Option<ExpandResult<SyntaxNode>> {
let sa = self.analyze_no_infer(macro_call.syntax())?;
let macro_call = InFile::new(sa.file_id, macro_call);
let file_id = sa.expand(self.db, macro_call)?;
let file_id = self.to_def(macro_call)?;
let macro_call = self.db.lookup_intern_macro_call(file_id);
let skip = matches!(
@ -468,10 +461,10 @@ impl<'db> SemanticsImpl<'db> {
}
/// If `item` has an attribute macro attached to it, expands it.
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<SyntaxNode>> {
pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<InFile<SyntaxNode>>> {
let src = self.wrap_node_infile(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
Some(self.expand(macro_call_id))
Some(self.expand(macro_call_id).map(|it| InFile::new(macro_call_id.into(), it)))
}
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
@ -574,9 +567,7 @@ impl<'db> SemanticsImpl<'db> {
speculative_args: &ast::TokenTree,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
let analyzer = self.analyze_no_infer(actual_macro_call.syntax())?;
let macro_call = InFile::new(analyzer.file_id, actual_macro_call);
let macro_file = analyzer.expansion(macro_call)?;
let macro_file = self.to_def(actual_macro_call)?;
hir_expand::db::expand_speculative(
self.db,
macro_file,
@ -778,6 +769,31 @@ impl<'db> SemanticsImpl<'db> {
})
}
/// Descends the token into the include expansion, if its file is an included file.
pub fn descend_token_into_include_expansion(
&self,
tok: InRealFile<SyntaxToken>,
) -> InFile<SyntaxToken> {
let Some(include) =
self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, tok.file_id)
else {
return tok.into();
};
let span = self.db.real_span_map(tok.file_id).span_for_range(tok.value.text_range());
let Some(InMacroFile { file_id, value: mut mapped_tokens }) = self.with_ctx(|ctx| {
Some(
ctx.cache
.get_or_insert_expansion(ctx.db, include)
.map_range_down(span)?
.map(SmallVec::<[_; 2]>::from_iter),
)
}) else {
return tok.into();
};
// We should only get one result at most
mapped_tokens.pop().map_or_else(|| tok.into(), |(tok, _)| InFile::new(file_id.into(), tok))
}
/// Maps a node down by mapping its first and last token down.
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
// This might not be the correct way to do this, but it works for now
@ -846,49 +862,35 @@ impl<'db> SemanticsImpl<'db> {
res
}
// FIXME: This isn't quite right wrt to inner attributes
/// Does a syntactic traversal to check whether this token might be inside a macro call
pub fn might_be_inside_macro_call(&self, token: &SyntaxToken) -> bool {
token.parent_ancestors().any(|ancestor| {
pub fn is_inside_macro_call(&self, token: InFile<&SyntaxToken>) -> bool {
// FIXME: Maybe `ancestors_with_macros()` is more suitable here? Currently
// this is only used on real (not macro) files so this is not a problem.
token.value.parent_ancestors().any(|ancestor| {
if ast::MacroCall::can_cast(ancestor.kind()) {
return true;
}
// Check if it is an item (only items can have macro attributes) that has a non-builtin attribute.
let Some(item) = ast::Item::cast(ancestor) else { return false };
item.attrs().any(|attr| {
let Some(meta) = attr.meta() else { return false };
let Some(path) = meta.path() else { return false };
if let Some(attr_name) = path.as_single_name_ref() {
let attr_name = attr_name.text();
let attr_name = Symbol::intern(attr_name.as_str());
if attr_name == sym::derive {
return true;
}
// We ignore `#[test]` and friends in the def map, so we cannot expand them.
// FIXME: We match by text. This is both hacky and incorrect (people can, and do, create
// other macros named `test`). We cannot fix that unfortunately because we use this method
// for speculative expansion in completion, which we cannot analyze. Fortunately, most macros
// named `test` are test-like, meaning their expansion is not terribly important for IDE.
if attr_name == sym::test
|| attr_name == sym::bench
|| attr_name == sym::test_case
|| find_builtin_attr_idx(&attr_name).is_some()
{
return false;
}
let Some(item) = ast::Item::cast(ancestor) else {
return false;
};
// Optimization to skip the semantic check.
if item.attrs().all(|attr| {
attr.simple_name()
.is_some_and(|attr| find_builtin_attr_idx(&Symbol::intern(&attr)).is_some())
}) {
return false;
}
self.with_ctx(|ctx| {
if ctx.item_to_macro_call(token.with_value(&item)).is_some() {
return true;
}
let mut segments = path.segments();
let mut next_segment_text = || segments.next().and_then(|it| it.name_ref());
// `#[core::prelude::rust_2024::test]` or `#[std::prelude::rust_2024::test]`.
if next_segment_text().is_some_and(|it| matches!(&*it.text(), "core" | "std"))
&& next_segment_text().is_some_and(|it| it.text() == "prelude")
&& next_segment_text().is_some()
&& next_segment_text()
.is_some_and(|it| matches!(&*it.text(), "test" | "bench" | "test_case"))
{
return false;
}
true
let adt = match item {
ast::Item::Struct(it) => it.into(),
ast::Item::Enum(it) => it.into(),
ast::Item::Union(it) => it.into(),
_ => return false,
};
ctx.has_derives(token.with_value(&adt))
})
})
}
@ -1111,16 +1113,7 @@ impl<'db> SemanticsImpl<'db> {
let file_id = match m_cache.get(&mcall) {
Some(&it) => it,
None => {
let it = token
.parent()
.and_then(|parent| {
self.analyze_impl(
InFile::new(expansion, &parent),
None,
false,
)
})?
.expand(self.db, mcall.as_ref())?;
let it = ast::MacroCall::to_def(self, mcall.as_ref())?;
m_cache.insert(mcall, it);
it
}
@ -1560,14 +1553,9 @@ impl<'db> SemanticsImpl<'db> {
}
pub fn resolve_macro_call2(&self, macro_call: InFile<&ast::MacroCall>) -> Option<Macro> {
self.with_ctx(|ctx| {
ctx.macro_call_to_macro_call(macro_call)
.and_then(|call| macro_call_to_macro_id(ctx, call))
.map(Into::into)
})
.or_else(|| {
self.analyze(macro_call.value.syntax())?.resolve_macro_call(self.db, macro_call)
})
self.to_def2(macro_call)
.and_then(|call| self.with_ctx(|ctx| macro_call_to_macro_id(ctx, call)))
.map(Into::into)
}
pub fn is_proc_macro_call(&self, macro_call: InFile<&ast::MacroCall>) -> bool {
@ -1576,14 +1564,8 @@ impl<'db> SemanticsImpl<'db> {
}
pub fn resolve_macro_call_arm(&self, macro_call: &ast::MacroCall) -> Option<u32> {
let sa = self.analyze(macro_call.syntax())?;
self.db
.parse_macro_expansion(
sa.expand(self.db, self.wrap_node_infile(macro_call.clone()).as_ref())?,
)
.value
.1
.matched_arm
let file_id = self.to_def(macro_call)?;
self.db.parse_macro_expansion(file_id).value.1.matched_arm
}
pub fn get_unsafe_ops(&self, def: DefWithBody) -> FxHashSet<ExprOrPatSource> {
@ -1688,6 +1670,10 @@ impl<'db> SemanticsImpl<'db> {
T::to_def(self, src)
}
pub fn to_def2<T: ToDef>(&self, src: InFile<&T>) -> Option<T::Def> {
T::to_def(self, src)
}
fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
self.with_ctx(|ctx| ctx.file_to_def(file).to_owned()).into_iter().map(Module::from)
}

View File

@ -36,9 +36,14 @@ impl ChildBySource for TraitId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let data = db.trait_items(*self);
data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
data.macro_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| {
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db), call_id);
let ptr = ast_id.to_ptr(db);
if let Some(ptr) = ptr.cast::<ast::MacroCall>() {
res[keys::MACRO_CALL].insert(ptr, call_id);
} else {
res[keys::ATTR_MACRO_CALL].insert(ptr, call_id);
}
},
);
data.items.iter().for_each(|&(_, item)| {
@ -50,10 +55,14 @@ impl ChildBySource for TraitId {
impl ChildBySource for ImplId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let data = db.impl_items(*self);
// FIXME: Macro calls
data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
data.macro_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| {
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db), call_id);
let ptr = ast_id.to_ptr(db);
if let Some(ptr) = ptr.cast::<ast::MacroCall>() {
res[keys::MACRO_CALL].insert(ptr, call_id);
} else {
res[keys::ATTR_MACRO_CALL].insert(ptr, call_id);
}
},
);
data.items.iter().for_each(|&(_, item)| {

View File

@ -399,19 +399,6 @@ impl SourceToDefCtx<'_, '_> {
Some((container, label?))
}
pub(super) fn item_to_macro_call(&mut self, src: InFile<&ast::Item>) -> Option<MacroCallId> {
let map = self.dyn_map(src)?;
map[keys::ATTR_MACRO_CALL].get(&AstPtr::new(src.value)).copied()
}
pub(super) fn macro_call_to_macro_call(
&mut self,
src: InFile<&ast::MacroCall>,
) -> Option<MacroCallId> {
let map = self.dyn_map(src)?;
map[keys::MACRO_CALL].get(&AstPtr::new(src.value)).copied()
}
/// (AttrId, derive attribute call id, derive call ids)
pub(super) fn attr_to_derive_macro_call(
&mut self,
@ -449,6 +436,17 @@ impl SourceToDefCtx<'_, '_> {
.or_insert_with(|| container.child_by_source(db, file_id))
}
pub(super) fn item_to_macro_call(&mut self, src: InFile<&ast::Item>) -> Option<MacroCallId> {
self.to_def(src, keys::ATTR_MACRO_CALL)
}
pub(super) fn macro_call_to_macro_call(
&mut self,
src: InFile<&ast::MacroCall>,
) -> Option<MacroCallId> {
self.to_def(src, keys::MACRO_CALL)
}
pub(super) fn type_param_to_def(
&mut self,
src: InFile<&ast::TypeParam>,

View File

@ -26,12 +26,12 @@ use hir_def::{
},
hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
lang_item::LangItem,
nameres::{MacroSubNs, crate_def_map},
nameres::MacroSubNs,
resolver::{HasResolver, Resolver, TypeNs, ValueNs, resolver_for_scope},
type_ref::{Mutability, TypeRefId},
};
use hir_expand::{
HirFileId, InFile, MacroCallId,
HirFileId, InFile,
mod_path::{ModPath, PathKind, path},
name::{AsName, Name},
};
@ -218,10 +218,6 @@ impl<'db> SourceAnalyzer<'db> {
})
}
pub(crate) fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
self.store_sm()?.expansion(node)
}
fn trait_environment(&self, db: &'db dyn HirDatabase) -> Arc<TraitEnvironment> {
self.body_().map(|(def, ..)| def).map_or_else(
|| TraitEnvironment::empty(self.resolver.krate()),
@ -745,22 +741,6 @@ impl<'db> SourceAnalyzer<'db> {
))
}
pub(crate) fn resolve_macro_call(
&self,
db: &'db dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>,
) -> Option<Macro> {
let bs = self.store_sm()?;
bs.expansion(macro_call).and_then(|it| {
// FIXME: Block def maps
let def = it.lookup(db).def;
crate_def_map(db, def.krate)
.macro_def_to_macro_id
.get(&def.kind.erased_ast_id())
.map(|it| (*it).into())
})
}
pub(crate) fn resolve_bind_pat_to_const(
&self,
db: &'db dyn HirDatabase,
@ -1292,18 +1272,6 @@ impl<'db> SourceAnalyzer<'db> {
.collect()
}
pub(crate) fn expand(
&self,
db: &'db dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>,
) -> Option<MacroCallId> {
self.store_sm().and_then(|bs| bs.expansion(macro_call)).or_else(|| {
self.resolver.item_scope().macro_invoc(
macro_call.with_value(db.ast_id_map(macro_call.file_id).ast_id(macro_call.value)),
)
})
}
pub(crate) fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
let infer = self.infer()?;
let expr_id = self.expr_id(record_lit.into())?;

View File

@ -0,0 +1,281 @@
use std::iter;
use ide_db::{
assists::{AssistId, ExprFillDefaultMode},
ty_filter::TryEnum,
};
use syntax::{
AstNode, T,
ast::{
self,
edit::{AstNodeEdit, IndentLevel},
make,
syntax_factory::SyntaxFactory,
},
};
use crate::assist_context::{AssistContext, Assists};
// Assist: desugar_try_expr_match
//
// Replaces a `try` expression with a `match` expression.
//
// ```
// # //- minicore: try, option
// fn handle() {
// let pat = Some(true)$0?;
// }
// ```
// ->
// ```
// fn handle() {
// let pat = match Some(true) {
// Some(it) => it,
// None => return None,
// };
// }
// ```
// Assist: desugar_try_expr_let_else
//
// Replaces a `try` expression with a `let else` statement.
//
// ```
// # //- minicore: try, option
// fn handle() {
// let pat = Some(true)$0?;
// }
// ```
// ->
// ```
// fn handle() {
// let Some(pat) = Some(true) else {
// return None;
// };
// }
// ```
pub(crate) fn desugar_try_expr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let question_tok = ctx.find_token_syntax_at_offset(T![?])?;
let try_expr = question_tok.parent().and_then(ast::TryExpr::cast)?;
let expr = try_expr.expr()?;
let expr_type_info = ctx.sema.type_of_expr(&expr)?;
let try_enum = TryEnum::from_ty(&ctx.sema, &expr_type_info.original)?;
let target = try_expr.syntax().text_range();
acc.add(
AssistId::refactor_rewrite("desugar_try_expr_match"),
"Replace try expression with match",
target,
|edit| {
let sad_pat = match try_enum {
TryEnum::Option => make::path_pat(make::ext::ident_path("None")),
TryEnum::Result => make::tuple_struct_pat(
make::ext::ident_path("Err"),
iter::once(make::path_pat(make::ext::ident_path("err"))),
)
.into(),
};
let sad_expr = match try_enum {
TryEnum::Option => {
make::expr_return(Some(make::expr_path(make::ext::ident_path("None"))))
}
TryEnum::Result => make::expr_return(Some(
make::expr_call(
make::expr_path(make::ext::ident_path("Err")),
make::arg_list(iter::once(make::expr_path(make::ext::ident_path("err")))),
)
.into(),
)),
};
let happy_arm = make::match_arm(
try_enum.happy_pattern(make::ident_pat(false, false, make::name("it")).into()),
None,
make::expr_path(make::ext::ident_path("it")),
);
let sad_arm = make::match_arm(sad_pat, None, sad_expr);
let match_arm_list = make::match_arm_list([happy_arm, sad_arm]);
let expr_match = make::expr_match(expr.clone(), match_arm_list)
.indent(IndentLevel::from_node(try_expr.syntax()));
edit.replace_ast::<ast::Expr>(try_expr.clone().into(), expr_match.into());
},
);
if let Some(let_stmt) = try_expr.syntax().parent().and_then(ast::LetStmt::cast) {
if let_stmt.let_else().is_none() {
let pat = let_stmt.pat()?;
acc.add(
AssistId::refactor_rewrite("desugar_try_expr_let_else"),
"Replace try expression with let else",
target,
|builder| {
let make = SyntaxFactory::with_mappings();
let mut editor = builder.make_editor(let_stmt.syntax());
let indent_level = IndentLevel::from_node(let_stmt.syntax());
let new_let_stmt = make.let_else_stmt(
try_enum.happy_pattern(pat),
let_stmt.ty(),
expr,
make.block_expr(
iter::once(
make.expr_stmt(
make.expr_return(Some(match try_enum {
TryEnum::Option => make.expr_path(make.ident_path("None")),
TryEnum::Result => make
.expr_call(
make.expr_path(make.ident_path("Err")),
make.arg_list(iter::once(
match ctx.config.expr_fill_default {
ExprFillDefaultMode::Todo => make
.expr_macro(
make.ident_path("todo"),
make.token_tree(
syntax::SyntaxKind::L_PAREN,
[],
),
)
.into(),
ExprFillDefaultMode::Underscore => {
make.expr_underscore().into()
}
ExprFillDefaultMode::Default => make
.expr_macro(
make.ident_path("todo"),
make.token_tree(
syntax::SyntaxKind::L_PAREN,
[],
),
)
.into(),
},
)),
)
.into(),
}))
.indent(indent_level + 1)
.into(),
)
.into(),
),
None,
)
.indent(indent_level),
);
editor.replace(let_stmt.syntax(), new_let_stmt.syntax());
editor.add_mappings(make.finish_with_mappings());
builder.add_file_edits(ctx.vfs_file_id(), editor);
},
);
}
}
Some(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::{check_assist, check_assist_by_label, check_assist_not_applicable};
#[test]
fn test_desugar_try_expr_not_applicable() {
check_assist_not_applicable(
desugar_try_expr,
r#"
fn test() {
let pat: u32 = 25$0;
}
"#,
);
}
#[test]
fn test_desugar_try_expr_option() {
check_assist(
desugar_try_expr,
r#"
//- minicore: try, option
fn test() {
let pat = Some(true)$0?;
}
"#,
r#"
fn test() {
let pat = match Some(true) {
Some(it) => it,
None => return None,
};
}
"#,
);
}
#[test]
fn test_desugar_try_expr_result() {
check_assist(
desugar_try_expr,
r#"
//- minicore: try, from, result
fn test() {
let pat = Ok(true)$0?;
}
"#,
r#"
fn test() {
let pat = match Ok(true) {
Ok(it) => it,
Err(err) => return Err(err),
};
}
"#,
);
}
#[test]
fn test_desugar_try_expr_option_let_else() {
check_assist_by_label(
desugar_try_expr,
r#"
//- minicore: try, option
fn test() {
let pat = Some(true)$0?;
}
"#,
r#"
fn test() {
let Some(pat) = Some(true) else {
return None;
};
}
"#,
"Replace try expression with let else",
);
}
#[test]
fn test_desugar_try_expr_result_let_else() {
check_assist_by_label(
desugar_try_expr,
r#"
//- minicore: try, from, result
fn test() {
let pat = Ok(true)$0?;
}
"#,
r#"
fn test() {
let Ok(pat) = Ok(true) else {
return Err(todo!());
};
}
"#,
"Replace try expression with let else",
);
}
}

View File

@ -1,7 +1,7 @@
use ide_db::famous_defs::FamousDefs;
use syntax::{
AstNode,
ast::{self, make},
ast::{self, edit_in_place::Indent, make},
ted,
};
@ -46,6 +46,7 @@ use crate::{AssistContext, AssistId, Assists};
// ```
pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
let indent = impl_def.indent_level();
let trait_ = impl_def.trait_()?;
if let ast::Type::PathType(trait_path) = trait_ {
@ -97,8 +98,8 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
})?;
let assoc_list = make::assoc_item_list().clone_for_update();
assoc_list.add_item(syntax::ast::AssocItem::Fn(fn_));
ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax());
impl_def.get_or_create_assoc_item_list().add_item(syntax::ast::AssocItem::Fn(fn_));
let target = impl_def.syntax().text_range();
acc.add(
@ -106,7 +107,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
"Generate `IndexMut` impl from this `Index` trait",
target,
|edit| {
edit.insert(target.start(), format!("$0{impl_def}\n\n"));
edit.insert(target.start(), format!("$0{impl_def}\n\n{indent}"));
},
)
}
@ -189,6 +190,93 @@ impl<T> core::ops::Index<Axis> for [T; 3] where T: Copy {
);
}
#[test]
fn test_generate_mut_trait_impl_non_zero_indent() {
check_assist(
generate_mut_trait_impl,
r#"
//- minicore: index
mod foo {
pub enum Axis { X = 0, Y = 1, Z = 2 }
impl<T> core::ops::Index$0<Axis> for [T; 3] where T: Copy {
type Output = T;
fn index(&self, index: Axis) -> &Self::Output {
let var_name = &self[index as usize];
var_name
}
}
}
"#,
r#"
mod foo {
pub enum Axis { X = 0, Y = 1, Z = 2 }
$0impl<T> core::ops::IndexMut<Axis> for [T; 3] where T: Copy {
fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
let var_name = &self[index as usize];
var_name
}
}
impl<T> core::ops::Index<Axis> for [T; 3] where T: Copy {
type Output = T;
fn index(&self, index: Axis) -> &Self::Output {
let var_name = &self[index as usize];
var_name
}
}
}
"#,
);
check_assist(
generate_mut_trait_impl,
r#"
//- minicore: index
mod foo {
mod bar {
pub enum Axis { X = 0, Y = 1, Z = 2 }
impl<T> core::ops::Index$0<Axis> for [T; 3] where T: Copy {
type Output = T;
fn index(&self, index: Axis) -> &Self::Output {
let var_name = &self[index as usize];
var_name
}
}
}
}
"#,
r#"
mod foo {
mod bar {
pub enum Axis { X = 0, Y = 1, Z = 2 }
$0impl<T> core::ops::IndexMut<Axis> for [T; 3] where T: Copy {
fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
let var_name = &self[index as usize];
var_name
}
}
impl<T> core::ops::Index<Axis> for [T; 3] where T: Copy {
type Output = T;
fn index(&self, index: Axis) -> &Self::Output {
let var_name = &self[index as usize];
var_name
}
}
}
}
"#,
);
}
#[test]
fn test_generate_mut_trait_impl_not_applicable() {
check_assist_not_applicable(

View File

@ -129,17 +129,23 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
// Get the mutable version of the impl to modify
let impl_def = if let Some(impl_def) = impl_def {
fn_.indent(impl_def.indent_level());
builder.make_mut(impl_def)
} else {
// Generate a new impl to add the method to
let impl_def = generate_impl(&ast::Adt::Struct(strukt.clone()));
let indent_level = strukt.indent_level();
fn_.indent(indent_level);
// Insert it after the adt
let strukt = builder.make_mut(strukt.clone());
ted::insert_all_raw(
ted::Position::after(strukt.syntax()),
vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
vec![
make::tokens::whitespace(&format!("\n\n{indent_level}")).into(),
impl_def.syntax().clone().into(),
],
);
impl_def
@ -425,6 +431,135 @@ impl Foo {
);
}
#[test]
fn non_zero_indent() {
check_assist(
generate_new,
r#"
mod foo {
struct $0Foo {}
}
"#,
r#"
mod foo {
struct Foo {}
impl Foo {
fn $0new() -> Self {
Self { }
}
}
}
"#,
);
check_assist(
generate_new,
r#"
mod foo {
mod bar {
struct $0Foo {}
}
}
"#,
r#"
mod foo {
mod bar {
struct Foo {}
impl Foo {
fn $0new() -> Self {
Self { }
}
}
}
}
"#,
);
check_assist(
generate_new,
r#"
mod foo {
struct $0Foo {}
impl Foo {
fn some() {}
}
}
"#,
r#"
mod foo {
struct Foo {}
impl Foo {
fn $0new() -> Self {
Self { }
}
fn some() {}
}
}
"#,
);
check_assist(
generate_new,
r#"
mod foo {
mod bar {
struct $0Foo {}
impl Foo {
fn some() {}
}
}
}
"#,
r#"
mod foo {
mod bar {
struct Foo {}
impl Foo {
fn $0new() -> Self {
Self { }
}
fn some() {}
}
}
}
"#,
);
check_assist(
generate_new,
r#"
mod foo {
mod bar {
struct $0Foo {}
impl Foo {
fn some() {}
}
}
}
"#,
r#"
mod foo {
mod bar {
struct Foo {}
impl Foo {
fn $0new() -> Self {
Self { }
}
fn some() {}
}
}
}
"#,
);
}
#[test]
fn check_visibility_of_new_fn_based_on_struct() {
check_assist(

View File

@ -1,148 +0,0 @@
use std::iter;
use ide_db::{assists::AssistId, ty_filter::TryEnum};
use syntax::{
AstNode, T,
ast::{
self,
edit::{AstNodeEdit, IndentLevel},
make,
},
};
use crate::assist_context::{AssistContext, Assists};
// Assist: replace_try_expr_with_match
//
// Replaces a `try` expression with a `match` expression.
//
// ```
// # //- minicore: try, option
// fn handle() {
// let pat = Some(true)$0?;
// }
// ```
// ->
// ```
// fn handle() {
// let pat = match Some(true) {
// Some(it) => it,
// None => return None,
// };
// }
// ```
pub(crate) fn replace_try_expr_with_match(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let qm_kw = ctx.find_token_syntax_at_offset(T![?])?;
let qm_kw_parent = qm_kw.parent().and_then(ast::TryExpr::cast)?;
let expr = qm_kw_parent.expr()?;
let expr_type_info = ctx.sema.type_of_expr(&expr)?;
let try_enum = TryEnum::from_ty(&ctx.sema, &expr_type_info.original)?;
let target = qm_kw_parent.syntax().text_range();
acc.add(
AssistId::refactor_rewrite("replace_try_expr_with_match"),
"Replace try expression with match",
target,
|edit| {
let sad_pat = match try_enum {
TryEnum::Option => make::path_pat(make::ext::ident_path("None")),
TryEnum::Result => make::tuple_struct_pat(
make::ext::ident_path("Err"),
iter::once(make::path_pat(make::ext::ident_path("err"))),
)
.into(),
};
let sad_expr = match try_enum {
TryEnum::Option => {
make::expr_return(Some(make::expr_path(make::ext::ident_path("None"))))
}
TryEnum::Result => make::expr_return(Some(
make::expr_call(
make::expr_path(make::ext::ident_path("Err")),
make::arg_list(iter::once(make::expr_path(make::ext::ident_path("err")))),
)
.into(),
)),
};
let happy_arm = make::match_arm(
try_enum.happy_pattern(make::ident_pat(false, false, make::name("it")).into()),
None,
make::expr_path(make::ext::ident_path("it")),
);
let sad_arm = make::match_arm(sad_pat, None, sad_expr);
let match_arm_list = make::match_arm_list([happy_arm, sad_arm]);
let expr_match = make::expr_match(expr, match_arm_list)
.indent(IndentLevel::from_node(qm_kw_parent.syntax()));
edit.replace_ast::<ast::Expr>(qm_kw_parent.into(), expr_match.into());
},
)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::{check_assist, check_assist_not_applicable};
#[test]
fn test_replace_try_expr_with_match_not_applicable() {
check_assist_not_applicable(
replace_try_expr_with_match,
r#"
fn test() {
let pat: u32 = 25$0;
}
"#,
);
}
#[test]
fn test_replace_try_expr_with_match_option() {
check_assist(
replace_try_expr_with_match,
r#"
//- minicore: try, option
fn test() {
let pat = Some(true)$0?;
}
"#,
r#"
fn test() {
let pat = match Some(true) {
Some(it) => it,
None => return None,
};
}
"#,
);
}
#[test]
fn test_replace_try_expr_with_match_result() {
check_assist(
replace_try_expr_with_match,
r#"
//- minicore: try, from, result
fn test() {
let pat = Ok(true)$0?;
}
"#,
r#"
fn test() {
let pat = match Ok(true) {
Ok(it) => it,
Err(err) => return Err(err),
};
}
"#,
);
}
}

View File

@ -139,6 +139,7 @@ mod handlers {
mod destructure_struct_binding;
mod destructure_tuple_binding;
mod desugar_doc_comment;
mod desugar_try_expr;
mod expand_glob_import;
mod expand_rest_pattern;
mod extract_expressions_from_format_string;
@ -214,7 +215,6 @@ mod handlers {
mod replace_named_generic_with_impl;
mod replace_qualified_name_with_use;
mod replace_string_with_char;
mod replace_try_expr_with_match;
mod replace_turbofish_with_explicit_type;
mod sort_items;
mod split_import;
@ -273,6 +273,7 @@ mod handlers {
destructure_struct_binding::destructure_struct_binding,
destructure_tuple_binding::destructure_tuple_binding,
desugar_doc_comment::desugar_doc_comment,
desugar_try_expr::desugar_try_expr,
expand_glob_import::expand_glob_import,
expand_glob_import::expand_glob_reexport,
expand_rest_pattern::expand_rest_pattern,
@ -354,7 +355,6 @@ mod handlers {
replace_method_eager_lazy::replace_with_lazy_method,
replace_named_generic_with_impl::replace_named_generic_with_impl,
replace_qualified_name_with_use::replace_qualified_name_with_use,
replace_try_expr_with_match::replace_try_expr_with_match,
replace_turbofish_with_explicit_type::replace_turbofish_with_explicit_type,
sort_items::sort_items,
split_import::split_import,

View File

@ -929,6 +929,47 @@ comment"]
)
}
#[test]
fn doctest_desugar_try_expr_let_else() {
check_doc_test(
"desugar_try_expr_let_else",
r#####"
//- minicore: try, option
fn handle() {
let pat = Some(true)$0?;
}
"#####,
r#####"
fn handle() {
let Some(pat) = Some(true) else {
return None;
};
}
"#####,
)
}
#[test]
fn doctest_desugar_try_expr_match() {
check_doc_test(
"desugar_try_expr_match",
r#####"
//- minicore: try, option
fn handle() {
let pat = Some(true)$0?;
}
"#####,
r#####"
fn handle() {
let pat = match Some(true) {
Some(it) => it,
None => return None,
};
}
"#####,
)
}
#[test]
fn doctest_expand_glob_import() {
check_doc_test(
@ -3096,27 +3137,6 @@ fn main() {
)
}
#[test]
fn doctest_replace_try_expr_with_match() {
check_doc_test(
"replace_try_expr_with_match",
r#####"
//- minicore: try, option
fn handle() {
let pat = Some(true)$0?;
}
"#####,
r#####"
fn handle() {
let pat = match Some(true) {
Some(it) => it,
None => return None,
};
}
"#####,
)
}
#[test]
fn doctest_replace_turbofish_with_explicit_type() {
check_doc_test(

View File

@ -8,8 +8,8 @@ use std::{iter, ops::ControlFlow};
use base_db::RootQueryDb as _;
use hir::{
DisplayTarget, HasAttrs, Local, ModuleDef, ModuleSource, Name, PathResolution, ScopeDef,
Semantics, SemanticsScope, Symbol, Type, TypeInfo,
DisplayTarget, HasAttrs, InFile, Local, ModuleDef, ModuleSource, Name, PathResolution,
ScopeDef, Semantics, SemanticsScope, Symbol, Type, TypeInfo,
};
use ide_db::{
FilePosition, FxHashMap, FxHashSet, RootDatabase, famous_defs::FamousDefs,
@ -751,7 +751,7 @@ impl<'a> CompletionContext<'a> {
original_offset,
} = expand_and_analyze(
&sema,
original_file.syntax().clone(),
InFile::new(editioned_file_id.into(), original_file.syntax().clone()),
file_with_fake_ident.syntax().clone(),
offset,
&original_token,

View File

@ -1,7 +1,7 @@
//! Module responsible for analyzing the code surrounding the cursor for completion.
use std::iter;
use hir::{ExpandResult, Semantics, Type, TypeInfo, Variant};
use hir::{ExpandResult, InFile, Semantics, Type, TypeInfo, Variant};
use ide_db::{RootDatabase, active_parameter::ActiveParameter};
use itertools::Either;
use syntax::{
@ -50,7 +50,7 @@ pub(super) struct AnalysisResult {
pub(super) fn expand_and_analyze(
sema: &Semantics<'_, RootDatabase>,
original_file: SyntaxNode,
original_file: InFile<SyntaxNode>,
speculative_file: SyntaxNode,
offset: TextSize,
original_token: &SyntaxToken,
@ -72,7 +72,7 @@ pub(super) fn expand_and_analyze(
relative_offset,
)
.unwrap_or(ExpansionResult {
original_file,
original_file: original_file.value,
speculative_file,
original_offset: offset,
speculative_offset: fake_ident_token.text_range().start(),
@ -125,7 +125,7 @@ fn token_at_offset_ignore_whitespace(file: &SyntaxNode, offset: TextSize) -> Opt
/// the best we can do.
fn expand_maybe_stop(
sema: &Semantics<'_, RootDatabase>,
original_file: SyntaxNode,
original_file: InFile<SyntaxNode>,
speculative_file: SyntaxNode,
original_offset: TextSize,
fake_ident_token: SyntaxToken,
@ -142,17 +142,16 @@ fn expand_maybe_stop(
return result;
}
// This needs to come after the recursive call, because our "inside macro" detection is subtly wrong
// with regard to attribute macros named `test` that are not std's test. So hopefully we will expand
// them successfully above and be able to analyze.
// Left biased since there may already be an identifier token there, and we appended to it.
if !sema.might_be_inside_macro_call(&fake_ident_token)
&& token_at_offset_ignore_whitespace(&original_file, original_offset + relative_offset)
.is_some_and(|original_token| !sema.might_be_inside_macro_call(&original_token))
// We can't check whether the fake expansion is inside macro call, because that requires semantic info.
// But hopefully checking just the real one should be enough.
if token_at_offset_ignore_whitespace(&original_file.value, original_offset + relative_offset)
.is_some_and(|original_token| {
!sema.is_inside_macro_call(original_file.with_value(&original_token))
})
{
// Recursion base case.
Some(ExpansionResult {
original_file,
original_file: original_file.value,
speculative_file,
original_offset,
speculative_offset: fake_ident_token.text_range().start(),
@ -166,7 +165,7 @@ fn expand_maybe_stop(
fn expand(
sema: &Semantics<'_, RootDatabase>,
original_file: SyntaxNode,
original_file: InFile<SyntaxNode>,
speculative_file: SyntaxNode,
original_offset: TextSize,
fake_ident_token: SyntaxToken,
@ -176,7 +175,7 @@ fn expand(
let parent_item =
|item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
let original_node = token_at_offset_ignore_whitespace(&original_file, original_offset)
let original_node = token_at_offset_ignore_whitespace(&original_file.value, original_offset)
.and_then(|token| token.parent_ancestors().find_map(ast::Item::cast));
let ancestor_items = iter::successors(
Option::zip(
@ -249,7 +248,7 @@ fn expand(
}
// No attributes have been expanded, so look for macro_call! token trees or derive token trees
let orig_tt = ancestors_at_offset(&original_file, original_offset)
let orig_tt = ancestors_at_offset(&original_file.value, original_offset)
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
.last()?;
let spec_tt = ancestors_at_offset(&speculative_file, fake_ident_token.text_range().start())
@ -292,7 +291,7 @@ fn expand(
fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank)
{
return Some(ExpansionResult {
original_file,
original_file: original_file.value,
speculative_file,
original_offset,
speculative_offset: fake_ident_token.text_range().start(),
@ -349,7 +348,7 @@ fn expand(
}
let result = expand_maybe_stop(
sema,
actual_expansion.clone(),
InFile::new(file.into(), actual_expansion.clone()),
fake_expansion.clone(),
new_offset,
fake_mapped_token,

View File

@ -2111,6 +2111,56 @@ fn foo() {
);
}
#[test]
fn cfg_attr_attr_macro() {
check(
r#"
//- proc_macros: identity
#[cfg_attr(test, proc_macros::identity)]
fn foo() {
$0
}
"#,
expect![[r#"
fn foo() fn()
md proc_macros
bt u32 u32
kw async
kw const
kw crate::
kw enum
kw extern
kw false
kw fn
kw for
kw if
kw if let
kw impl
kw impl for
kw let
kw letm
kw loop
kw match
kw mod
kw return
kw self::
kw static
kw struct
kw trait
kw true
kw type
kw union
kw unsafe
kw use
kw while
kw while let
sn macro_rules
sn pd
sn ppd
"#]],
);
}
#[test]
fn escaped_label() {
check(

View File

@ -4,7 +4,7 @@
//! in [crate::completions::mod_].
use expect_test::expect;
use crate::tests::{check_edit, check_with_base_items};
use crate::tests::{check, check_edit, check_with_base_items};
#[test]
fn target_type_or_trait_in_impl_block() {
@ -308,3 +308,39 @@ fn bar() {
"#]],
);
}
#[test]
fn expression_in_item_macro() {
check(
r#"
fn foo() -> u8 { 0 }
macro_rules! foo {
($expr:expr) => {
const BAR: u8 = $expr;
};
}
foo!(f$0);
"#,
expect![[r#"
ct BAR u8
fn foo() fn() -> u8
ma foo!() macro_rules! foo
bt u32 u32
kw const
kw crate::
kw false
kw for
kw if
kw if let
kw loop
kw match
kw self::
kw true
kw unsafe
kw while
kw while let
"#]],
);
}

View File

@ -92,9 +92,7 @@ pub struct RootDatabase {
impl std::panic::RefUnwindSafe for RootDatabase {}
#[salsa_macros::db]
impl salsa::Database for RootDatabase {
fn salsa_event(&self, _event: &dyn Fn() -> salsa::Event) {}
}
impl salsa::Database for RootDatabase {}
impl Drop for RootDatabase {
fn drop(&mut self) {

View File

@ -524,6 +524,7 @@ impl<'a> FindUsages<'a> {
fn find_nodes<'b>(
sema: &'b Semantics<'_, RootDatabase>,
name: &str,
file_id: EditionedFileId,
node: &syntax::SyntaxNode,
offset: TextSize,
) -> impl Iterator<Item = SyntaxNode> + 'b {
@ -534,7 +535,7 @@ impl<'a> FindUsages<'a> {
})
.into_iter()
.flat_map(move |token| {
if sema.might_be_inside_macro_call(&token) {
if sema.is_inside_macro_call(InFile::new(file_id.into(), &token)) {
sema.descend_into_macros_exact(token)
} else {
<_>::from([token])
@ -654,11 +655,14 @@ impl<'a> FindUsages<'a> {
let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
for offset in FindUsages::match_indices(&file_text, &finder, search_range) {
let usages =
FindUsages::find_nodes(sema, &current_to_process, &tree, offset)
.filter(|it| {
matches!(it.kind(), SyntaxKind::NAME | SyntaxKind::NAME_REF)
});
let usages = FindUsages::find_nodes(
sema,
&current_to_process,
file_id,
&tree,
offset,
)
.filter(|it| matches!(it.kind(), SyntaxKind::NAME | SyntaxKind::NAME_REF));
for usage in usages {
if let Some(alias) = usage.parent().and_then(|it| {
let path = ast::PathSegment::cast(it)?.parent_path();
@ -813,7 +817,7 @@ impl<'a> FindUsages<'a> {
let tree = LazyCell::new(move || this.sema.parse(file_id).syntax().clone());
for offset in FindUsages::match_indices(&file_text, finder, search_range) {
let usages = FindUsages::find_nodes(this.sema, name, &tree, offset)
let usages = FindUsages::find_nodes(this.sema, name, file_id, &tree, offset)
.filter_map(ast::NameRef::cast);
for usage in usages {
let found_usage = usage
@ -970,8 +974,8 @@ impl<'a> FindUsages<'a> {
return;
}
for name in
Self::find_nodes(sema, name, &tree, offset).filter_map(ast::NameLike::cast)
for name in Self::find_nodes(sema, name, file_id, &tree, offset)
.filter_map(ast::NameLike::cast)
{
if match name {
ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
@ -985,8 +989,8 @@ impl<'a> FindUsages<'a> {
// Search for occurrences of the `Self` referring to our type
if let Some((self_ty, finder)) = &include_self_kw_refs {
for offset in Self::match_indices(&text, finder, search_range) {
for name_ref in
Self::find_nodes(sema, "Self", &tree, offset).filter_map(ast::NameRef::cast)
for name_ref in Self::find_nodes(sema, "Self", file_id, &tree, offset)
.filter_map(ast::NameRef::cast)
{
if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
return;
@ -1010,7 +1014,7 @@ impl<'a> FindUsages<'a> {
let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
for offset in Self::match_indices(&text, finder, search_range) {
for name_ref in Self::find_nodes(sema, "super", &tree, offset)
for name_ref in Self::find_nodes(sema, "super", file_id, &tree, offset)
.filter_map(ast::NameRef::cast)
{
if self.found_name_ref(&name_ref, sink) {
@ -1020,7 +1024,7 @@ impl<'a> FindUsages<'a> {
}
if let Some(finder) = &is_crate_root {
for offset in Self::match_indices(&text, finder, search_range) {
for name_ref in Self::find_nodes(sema, "crate", &tree, offset)
for name_ref in Self::find_nodes(sema, "crate", file_id, &tree, offset)
.filter_map(ast::NameRef::cast)
{
if self.found_name_ref(&name_ref, sink) {
@ -1064,8 +1068,8 @@ impl<'a> FindUsages<'a> {
let finder = &Finder::new("self");
for offset in Self::match_indices(&text, finder, search_range) {
for name_ref in
Self::find_nodes(sema, "self", &tree, offset).filter_map(ast::NameRef::cast)
for name_ref in Self::find_nodes(sema, "self", file_id, &tree, offset)
.filter_map(ast::NameRef::cast)
{
if self.found_self_module_name_ref(&name_ref, sink) {
return;

View File

@ -915,4 +915,47 @@ fn foo() {
"#,
);
}
#[test]
fn regression_19823() {
check_diagnostics(
r#"
pub trait FooTrait {
unsafe fn method1();
unsafe fn method2();
}
unsafe fn some_unsafe_fn() {}
macro_rules! impl_foo {
() => {
unsafe fn method1() {
some_unsafe_fn();
}
unsafe fn method2() {
some_unsafe_fn();
}
};
}
pub struct S1;
#[allow(unsafe_op_in_unsafe_fn)]
impl FooTrait for S1 {
unsafe fn method1() {
some_unsafe_fn();
}
unsafe fn method2() {
some_unsafe_fn();
}
}
pub struct S2;
#[allow(unsafe_op_in_unsafe_fn)]
impl FooTrait for S2 {
impl_foo!();
}
"#,
);
}
}

View File

@ -106,4 +106,29 @@ fn test(x: Result<i32, &'static !>) {
"#,
);
}
#[test]
fn empty_patterns_normalize() {
check_diagnostics(
r#"
enum Infallible {}
trait Foo {
type Assoc;
}
enum Enum<T: Foo> {
A,
B(T::Assoc),
}
impl Foo for () {
type Assoc = Infallible;
}
fn foo(v: Enum<()>) {
let Enum::A = v;
}
"#,
);
}
}

View File

@ -1243,4 +1243,18 @@ fn foo(v: &Enum) {
"#,
);
}
#[test]
fn regression_19844() {
check_diagnostics(
r#"
fn main() {
struct S {}
enum E { V() }
let E::V() = &S {};
// ^^^^^^ error: expected S, found E
}
"#,
);
}
}

View File

@ -83,12 +83,11 @@ mod handlers {
#[cfg(test)]
mod tests;
use std::{collections::hash_map, iter, sync::LazyLock};
use std::{iter, sync::LazyLock};
use either::Either;
use hir::{
Crate, DisplayTarget, HirFileId, InFile, Semantics, db::ExpandDatabase,
diagnostics::AnyDiagnostic,
Crate, DisplayTarget, InFile, Semantics, db::ExpandDatabase, diagnostics::AnyDiagnostic,
};
use ide_db::{
EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, Severity, SnippetCap,
@ -513,13 +512,7 @@ pub fn semantic_diagnostics(
// The edition isn't accurate (each diagnostics may have its own edition due to macros),
// but it's okay as it's only being used for error recovery.
handle_lints(
&ctx.sema,
&mut FxHashMap::default(),
&mut lints,
&mut Vec::new(),
editioned_file_id.edition(db),
);
handle_lints(&ctx.sema, &mut lints, editioned_file_id.edition(db));
res.retain(|d| d.severity != Severity::Allow);
@ -584,8 +577,6 @@ fn handle_diag_from_macros(
true
}
// `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros
struct BuiltLint {
lint: &'static Lint,
groups: Vec<&'static str>,
@ -629,9 +620,7 @@ fn build_lints_map(
fn handle_lints(
sema: &Semantics<'_, RootDatabase>,
cache: &mut FxHashMap<HirFileId, FxHashMap<SmolStr, SeverityAttr>>,
diagnostics: &mut [(InFile<SyntaxNode>, &mut Diagnostic)],
cache_stack: &mut Vec<HirFileId>,
edition: Edition,
) {
for (node, diag) in diagnostics {
@ -645,7 +634,8 @@ fn handle_lints(
diag.severity = default_severity;
}
let mut diag_severity = fill_lint_attrs(sema, node, cache, cache_stack, diag, edition);
let mut diag_severity =
lint_severity_at(sema, node, &lint_groups(&diag.code, edition), edition);
if let outline_diag_severity @ Some(_) =
find_outline_mod_lint_severity(sema, node, diag, edition)
@ -698,155 +688,22 @@ fn find_outline_mod_lint_severity(
result
}
#[derive(Debug, Clone, Copy)]
struct SeverityAttr {
severity: Severity,
/// This field counts how far we are from the main node. Bigger values mean more far.
///
/// Note this isn't accurate: there can be gaps between values (created when merging severity maps).
/// The important thing is that if an attr is closer to the main node, it will have smaller value.
///
/// This is necessary even though we take care to never overwrite a value from deeper nesting
/// because of lint groups. For example, in the following code:
/// ```
/// #[warn(non_snake_case)]
/// mod foo {
/// #[allow(nonstandard_style)]
/// mod bar {}
/// }
/// ```
/// We want to not warn on non snake case inside `bar`. If we are traversing this for the first
/// time, everything will be fine, because we will set `diag_severity` on the first matching group
/// and never overwrite it since then. But if `bar` is cached, the cache will contain both
/// `#[warn(non_snake_case)]` and `#[allow(nonstandard_style)]`, and without this field, we have
/// no way of differentiating between the two.
depth: u32,
}
fn fill_lint_attrs(
fn lint_severity_at(
sema: &Semantics<'_, RootDatabase>,
node: &InFile<SyntaxNode>,
cache: &mut FxHashMap<HirFileId, FxHashMap<SmolStr, SeverityAttr>>,
cache_stack: &mut Vec<HirFileId>,
diag: &Diagnostic,
lint_groups: &LintGroups,
edition: Edition,
) -> Option<Severity> {
let mut collected_lint_attrs = FxHashMap::<SmolStr, SeverityAttr>::default();
let mut diag_severity = None;
let mut ancestors = node.value.ancestors().peekable();
let mut depth = 0;
loop {
let ancestor = ancestors.next().expect("we always return from top-level nodes");
depth += 1;
if ancestors.peek().is_none() {
// We don't want to insert too many nodes into cache, but top level nodes (aka. outline modules
// or macro expansions) need to touch the database so they seem like a good fit to cache.
if let Some(cached) = cache.get_mut(&node.file_id) {
// This node (and everything above it) is already cached; the attribute is either here or nowhere.
// Workaround for the borrow checker.
let cached = std::mem::take(cached);
cached.iter().for_each(|(lint, severity)| {
for item in &*cache_stack {
let node_cache_entry = cache
.get_mut(item)
.expect("we always insert cached nodes into the cache map");
let lint_cache_entry = node_cache_entry.entry(lint.clone());
if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
// Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
// overwrite top attrs.
lint_cache_entry.insert(SeverityAttr {
severity: severity.severity,
depth: severity.depth + depth,
});
}
}
});
let lints = lint_groups(&diag.code, edition);
let all_matching_groups =
lints.iter().filter_map(|lint_group| cached.get(lint_group));
let cached_severity =
all_matching_groups.min_by_key(|it| it.depth).map(|it| it.severity);
cache.insert(node.file_id, cached);
return diag_severity.or(cached_severity);
}
// Insert this node's descendants' attributes into any outline descendant, but not including this node.
// This must come before inserting this node's own attributes to preserve order.
collected_lint_attrs.drain().for_each(|(lint, severity)| {
if diag_severity.is_none() && lint_groups(&diag.code, edition).contains(&lint) {
diag_severity = Some(severity.severity);
}
for item in &*cache_stack {
let node_cache_entry = cache
.get_mut(item)
.expect("we always insert cached nodes into the cache map");
let lint_cache_entry = node_cache_entry.entry(lint.clone());
if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
// Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
// overwrite top attrs.
lint_cache_entry.insert(severity);
}
}
});
cache_stack.push(node.file_id);
cache.insert(node.file_id, FxHashMap::default());
if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
// Insert this node's attributes into any outline descendant, including this node.
lint_attrs(sema, ancestor, edition).for_each(|(lint, severity)| {
if diag_severity.is_none() && lint_groups(&diag.code, edition).contains(&lint) {
diag_severity = Some(severity);
}
for item in &*cache_stack {
let node_cache_entry = cache
.get_mut(item)
.expect("we always insert cached nodes into the cache map");
let lint_cache_entry = node_cache_entry.entry(lint.clone());
if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
// Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
// overwrite top attrs.
lint_cache_entry.insert(SeverityAttr { severity, depth });
}
}
});
}
let parent_node = sema.find_parent_file(node.file_id);
if let Some(parent_node) = parent_node {
let parent_severity =
fill_lint_attrs(sema, &parent_node, cache, cache_stack, diag, edition);
if diag_severity.is_none() {
diag_severity = parent_severity;
}
}
cache_stack.pop();
return diag_severity;
} else if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
lint_attrs(sema, ancestor, edition).for_each(|(lint, severity)| {
if diag_severity.is_none() && lint_groups(&diag.code, edition).contains(&lint) {
diag_severity = Some(severity);
}
let lint_cache_entry = collected_lint_attrs.entry(lint);
if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
// Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
// overwrite top attrs.
lint_cache_entry.insert(SeverityAttr { severity, depth });
}
});
}
}
node.value
.ancestors()
.filter_map(ast::AnyHasAttrs::cast)
.find_map(|ancestor| {
lint_attrs(sema, ancestor, edition)
.find_map(|(lint, severity)| lint_groups.contains(&lint).then_some(severity))
})
.or_else(|| {
lint_severity_at(sema, &sema.find_parent_file(node.file_id)?, lint_groups, edition)
})
}
fn lint_attrs<'a>(
@ -945,10 +802,6 @@ impl LintGroups {
fn contains(&self, group: &str) -> bool {
self.groups.contains(&group) || (self.inside_warnings && group == "warnings")
}
fn iter(&self) -> impl Iterator<Item = &'static str> {
self.groups.iter().copied().chain(self.inside_warnings.then_some("warnings"))
}
}
fn lint_groups(lint: &DiagnosticCode, edition: Edition) -> LintGroups {

View File

@ -287,7 +287,7 @@ impl<'db> MatchFinder<'db> {
if let Some(expanded) = self.sema.expand_macro_call(&macro_call) {
if let Some(tt) = macro_call.token_tree() {
self.output_debug_for_nodes_at_range(
&expanded,
&expanded.value,
range,
&Some(self.sema.original_range(tt.syntax())),
out,

View File

@ -194,7 +194,7 @@ impl MatchFinder<'_> {
// nodes that originated entirely from within the token tree of the macro call.
// i.e. we don't want to match something that came from the macro itself.
if let Some(range) = self.sema.original_range_opt(tt.syntax()) {
self.slow_scan_node(&expanded, rule, &Some(range), matches_out);
self.slow_scan_node(&expanded.value, rule, &Some(range), matches_out);
}
}
}

View File

@ -1,10 +1,10 @@
use hir::db::ExpandDatabase;
use hir::{ExpandResult, InFile, Semantics};
use hir::{ExpandResult, InFile, InRealFile, Semantics};
use ide_db::{
FileId, RootDatabase, base_db::Crate, helpers::pick_best_token,
syntax_helpers::prettify_macro_expansion,
};
use span::{Edition, SpanMap, SyntaxContext, TextRange, TextSize};
use span::{SpanMap, SyntaxContext, TextRange, TextSize};
use stdx::format_to;
use syntax::{AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, ast, ted};
@ -26,8 +26,9 @@ pub struct ExpandedMacro {
// ![Expand Macro Recursively](https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif)
pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> {
let sema = Semantics::new(db);
let file = sema.parse_guess_edition(position.file_id);
let krate = sema.file_to_module_def(position.file_id)?.krate().into();
let file_id = sema.attach_first_edition(position.file_id)?;
let file = sema.parse(file_id);
let krate = sema.file_to_module_def(file_id.file_id(db))?.krate().into();
let tok = pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind {
SyntaxKind::IDENT => 1,
@ -86,7 +87,10 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
return derive;
}
let mut anc = tok.parent_ancestors();
let mut anc = sema
.descend_token_into_include_expansion(InRealFile::new(file_id, tok))
.value
.parent_ancestors();
let mut span_map = SpanMap::empty();
let mut error = String::new();
let (name, expanded, kind) = loop {
@ -95,14 +99,7 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
if let Some(item) = ast::Item::cast(node.clone()) {
if let Some(def) = sema.resolve_attr_macro_call(&item) {
break (
def.name(db)
.display(
db,
sema.attach_first_edition(position.file_id)
.map(|it| it.edition(db))
.unwrap_or(Edition::CURRENT),
)
.to_string(),
def.name(db).display(db, file_id.edition(db)).to_string(),
expand_macro_recur(&sema, &item, &mut error, &mut span_map, TextSize::new(0))?,
SyntaxKind::MACRO_ITEMS,
);
@ -146,10 +143,11 @@ fn expand_macro_recur(
offset_in_original_node: TextSize,
) -> Option<SyntaxNode> {
let ExpandResult { value: expanded, err } = match macro_call {
item @ ast::Item::MacroCall(macro_call) => {
sema.expand_attr_macro(item).or_else(|| sema.expand_allowed_builtins(macro_call))?
}
item => sema.expand_attr_macro(item)?,
item @ ast::Item::MacroCall(macro_call) => sema
.expand_attr_macro(item)
.map(|it| it.map(|it| it.value))
.or_else(|| sema.expand_allowed_builtins(macro_call))?,
item => sema.expand_attr_macro(item)?.map(|it| it.value),
};
let expanded = expanded.clone_for_update();
if let Some(err) = err {
@ -718,4 +716,88 @@ __log!(written:%; "Test"$0);
"#]],
);
}
#[test]
fn assoc_call() {
check(
r#"
macro_rules! mac {
() => { fn assoc() {} }
}
impl () {
mac$0!();
}
"#,
expect![[r#"
mac!
fn assoc(){}"#]],
);
}
#[test]
fn eager() {
check(
r#"
//- minicore: concat
macro_rules! my_concat {
($head:expr, $($tail:tt)*) => { concat!($head, $($tail)*) };
}
fn test() {
_ = my_concat!(
conc$0at!("<", ">"),
"hi",
);
}
"#,
expect![[r#"
my_concat!
"<>hi""#]],
);
}
#[test]
fn in_included() {
check(
r#"
//- minicore: include
//- /main.rs crate:main
include!("./included.rs");
//- /included.rs
macro_rules! foo {
() => { fn item() {} };
}
foo$0!();
"#,
expect![[r#"
foo!
fn item(){}"#]],
);
}
#[test]
fn include() {
check(
r#"
//- minicore: include
//- /main.rs crate:main
include$0!("./included.rs");
//- /included.rs
macro_rules! foo {
() => { fn item() {} };
}
foo();
"#,
expect![[r#"
include!
macro_rules! foo {
() => {
fn item(){}
};
}
foo();"#]],
);
}
}

View File

@ -653,7 +653,7 @@ impl<'a> WalkExpandedExprCtx<'a> {
expr.macro_call().and_then(|call| self.sema.expand_macro_call(&call))
{
match_ast! {
match expanded {
match (expanded.value) {
ast::MacroStmts(it) => {
self.handle_expanded(it, cb);
},

View File

@ -58,6 +58,7 @@ pub struct MemoryLayoutHoverConfig {
pub size: Option<MemoryLayoutHoverRenderKind>,
pub offset: Option<MemoryLayoutHoverRenderKind>,
pub alignment: Option<MemoryLayoutHoverRenderKind>,
pub padding: Option<MemoryLayoutHoverRenderKind>,
pub niches: bool,
}

View File

@ -630,27 +630,57 @@ pub(super) fn definition(
}
},
|_| None,
|_| None,
),
Definition::Adt(it @ Adt::Struct(strukt)) => render_memory_layout(
config.memory_layout,
|| it.layout(db),
|_| None,
|layout| {
let mut field_size =
|i: usize| Some(strukt.fields(db).get(i)?.layout(db).ok()?.size());
if strukt.repr(db).is_some_and(|it| it.inhibit_struct_field_reordering()) {
Some(("tail padding", layout.tail_padding(&mut field_size)?))
} else {
Some(("largest padding", layout.largest_padding(&mut field_size)?))
}
},
|_| None,
),
Definition::Adt(it) => render_memory_layout(
config.memory_layout,
|| it.layout(db),
|_| None,
|_| None,
|_| None,
),
Definition::Adt(it) => {
render_memory_layout(config.memory_layout, || it.layout(db), |_| None, |_| None)
}
Definition::Variant(it) => render_memory_layout(
config.memory_layout,
|| it.layout(db),
|_| None,
|_| None,
|layout| layout.enum_tag_size(),
),
Definition::TypeAlias(it) => {
render_memory_layout(config.memory_layout, || it.ty(db).layout(db), |_| None, |_| None)
}
Definition::Local(it) => {
render_memory_layout(config.memory_layout, || it.ty(db).layout(db), |_| None, |_| None)
}
Definition::TypeAlias(it) => render_memory_layout(
config.memory_layout,
|| it.ty(db).layout(db),
|_| None,
|_| None,
|_| None,
),
Definition::Local(it) => render_memory_layout(
config.memory_layout,
|| it.ty(db).layout(db),
|_| None,
|_| None,
|_| None,
),
Definition::SelfType(it) => render_memory_layout(
config.memory_layout,
|| it.self_ty(db).layout(db),
|_| None,
|_| None,
|_| None,
),
_ => None,
};
@ -1055,9 +1085,13 @@ fn closure_ty(
if let Some(trait_) = c.fn_trait(sema.db).get_id(sema.db, original.krate(sema.db).into()) {
push_new_def(hir::Trait::from(trait_).into())
}
if let Some(layout) =
render_memory_layout(config.memory_layout, || original.layout(sema.db), |_| None, |_| None)
{
if let Some(layout) = render_memory_layout(
config.memory_layout,
|| original.layout(sema.db),
|_| None,
|_| None,
|_| None,
) {
format_to!(markup, "\n___\n{layout}");
}
format_to!(markup, "{adjusted}\n\n## Captures\n{}", captures_rendered,);
@ -1142,6 +1176,7 @@ fn render_memory_layout(
config: Option<MemoryLayoutHoverConfig>,
layout: impl FnOnce() -> Result<Layout, LayoutError>,
offset: impl FnOnce(&Layout) -> Option<u64>,
padding: impl FnOnce(&Layout) -> Option<(&str, u64)>,
tag: impl FnOnce(&Layout) -> Option<usize>,
) -> Option<String> {
let config = config?;
@ -1199,6 +1234,23 @@ fn render_memory_layout(
}
}
if let Some(render) = config.padding {
if let Some((padding_name, padding)) = padding(&layout) {
format_to!(label, "{padding_name} = ");
match render {
MemoryLayoutHoverRenderKind::Decimal => format_to!(label, "{padding}"),
MemoryLayoutHoverRenderKind::Hexadecimal => format_to!(label, "{padding:#X}"),
MemoryLayoutHoverRenderKind::Both if padding >= 10 => {
format_to!(label, "{padding} ({padding:#X})")
}
MemoryLayoutHoverRenderKind::Both => {
format_to!(label, "{padding}")
}
}
format_to!(label, ", ");
}
}
if config.niches {
if let Some(niches) = layout.niches() {
if niches > 1024 {

View File

@ -12,6 +12,7 @@ const HOVER_BASE_CONFIG: HoverConfig = HoverConfig {
size: Some(MemoryLayoutHoverRenderKind::Both),
offset: Some(MemoryLayoutHoverRenderKind::Both),
alignment: Some(MemoryLayoutHoverRenderKind::Both),
padding: Some(MemoryLayoutHoverRenderKind::Both),
niches: true,
}),
documentation: true,
@ -933,7 +934,7 @@ struct Foo$0(pub u32) where u32: Copy;
---
size = 4, align = 4, no Drop
size = 4, align = 4, largest padding = 0, no Drop
"#]],
);
}
@ -959,7 +960,7 @@ struct Foo$0 { field: u32 }
---
size = 4, align = 4, no Drop
size = 4, align = 4, largest padding = 0, no Drop
"#]],
);
check(
@ -984,7 +985,7 @@ struct Foo$0 where u32: Copy { field: u32 }
---
size = 4, align = 4, no Drop
size = 4, align = 4, largest padding = 0, no Drop
"#]],
);
}
@ -1013,7 +1014,7 @@ fn hover_record_struct_limit() {
---
size = 12 (0xC), align = 4, no Drop
size = 12 (0xC), align = 4, largest padding = 0, no Drop
"#]],
);
check_hover_fields_limit(
@ -1036,7 +1037,7 @@ fn hover_record_struct_limit() {
---
size = 4, align = 4, no Drop
size = 4, align = 4, largest padding = 0, no Drop
"#]],
);
check_hover_fields_limit(
@ -1062,7 +1063,7 @@ fn hover_record_struct_limit() {
---
size = 16 (0x10), align = 4, no Drop
size = 16 (0x10), align = 4, largest padding = 0, no Drop
"#]],
);
check_hover_fields_limit(
@ -1083,7 +1084,7 @@ fn hover_record_struct_limit() {
---
size = 12 (0xC), align = 4, no Drop
size = 12 (0xC), align = 4, largest padding = 0, no Drop
"#]],
);
check_hover_fields_limit(
@ -1104,7 +1105,7 @@ fn hover_record_struct_limit() {
---
size = 12 (0xC), align = 4, no Drop
size = 12 (0xC), align = 4, largest padding = 0, no Drop
"#]],
);
@ -3114,7 +3115,7 @@ struct S$0<T>(core::marker::PhantomData<T>);
---
size = 0, align = 1, no Drop
size = 0, align = 1, largest padding = 0, no Drop
"#]],
);
}
@ -3147,6 +3148,111 @@ fn test_hover_layout_of_enum() {
);
}
#[test]
fn test_hover_layout_padding_info() {
check(
r#"struct $0Foo {
x: bool,
y: i64,
z: u32,
}"#,
expect![[r#"
*Foo*
```rust
ra_test_fixture
```
```rust
struct Foo {
x: bool,
y: i64,
z: u32,
}
```
---
size = 16 (0x10), align = 8, largest padding = 3, niches = 254, no Drop
"#]],
);
check(
r#"#[repr(align(32))]
struct $0Foo {
x: bool,
y: i64,
z: u32,
}"#,
expect![[r#"
*Foo*
```rust
ra_test_fixture
```
```rust
struct Foo {
x: bool,
y: i64,
z: u32,
}
```
---
size = 32 (0x20), align = 32 (0x20), largest padding = 19 (0x13), niches = 254, no Drop
"#]],
);
check(
r#"#[repr(C)]
struct $0Foo {
x: bool,
y: i64,
z: u32,
}"#,
expect![[r#"
*Foo*
```rust
ra_test_fixture
```
```rust
struct Foo {
x: bool,
y: i64,
z: u32,
}
```
---
size = 24 (0x18), align = 8, tail padding = 4, niches = 254, no Drop
"#]],
);
check(
r#"struct $0Foo(i16, u128, u64)"#,
expect![[r#"
*Foo*
```rust
ra_test_fixture
```
```rust
struct Foo(i16, u128, u64)
```
---
size = 32 (0x20), align = 8, largest padding = 6, no Drop
"#]],
);
}
#[test]
fn test_hover_no_memory_layout() {
check_hover_no_memory_layout(
@ -9198,7 +9304,7 @@ struct Pedro$0<'a> {
---
size = 16 (0x10), align = 8, niches = 1, no Drop
size = 16 (0x10), align = 8, largest padding = 0, niches = 1, no Drop
"#]],
)
}
@ -10559,7 +10665,7 @@ struct DropField$0 {
---
size = 4, align = 4, needs Drop
size = 4, align = 4, largest padding = 0, needs Drop
"#]],
);
check(

View File

@ -164,7 +164,6 @@ define_symbols! {
completion,
compile_error,
concat_bytes,
concat_idents,
concat,
const_format_args,
const_panic_fmt,

View File

@ -258,7 +258,9 @@ impl server::TokenStream for RaSpanServer {
&mut self,
stream: Self::TokenStream,
) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
stream.into_bridge()
stream.into_bridge(&mut |first, second| {
server::Span::join(self, first, second).unwrap_or(first)
})
}
}

View File

@ -238,7 +238,8 @@ impl server::TokenStream for TokenIdServer {
&mut self,
stream: Self::TokenStream,
) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
stream.into_bridge()
// Can't join with `TokenId`.
stream.into_bridge(&mut |first, _second| first)
}
}

View File

@ -56,7 +56,10 @@ impl<S: Copy> TokenStream<S> {
self.token_trees.is_empty()
}
pub(crate) fn into_bridge(self) -> Vec<bridge::TokenTree<Self, S, intern::Symbol>> {
pub(crate) fn into_bridge(
self,
join_spans: &mut dyn FnMut(S, S) -> S,
) -> Vec<bridge::TokenTree<Self, S, intern::Symbol>> {
let mut result = Vec::new();
let mut iter = self.token_trees.into_iter();
while let Some(tree) = iter.next() {
@ -98,7 +101,11 @@ impl<S: Copy> TokenStream<S> {
token_trees: iter.by_ref().take(subtree.usize_len()).collect(),
})
},
span: bridge::DelimSpan::from_single(subtree.delimiter.open),
span: bridge::DelimSpan {
open: subtree.delimiter.open,
close: subtree.delimiter.close,
entire: join_spans(subtree.delimiter.open, subtree.delimiter.close),
},
}))
}
}

View File

@ -144,7 +144,7 @@ fn test_fn_like_macro_clone_ident_subtree() {
SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
IDENT ident 42:2@0..5#ROOT2024
PUNCH , [alone] 42:2@5..6#ROOT2024
SUBTREE [] 42:2@7..8#ROOT2024 42:2@7..8#ROOT2024"#]],
SUBTREE [] 42:2@7..9#ROOT2024 42:2@7..9#ROOT2024"#]],
);
}

View File

@ -1,33 +1,41 @@
use std::sync::{Arc, Mutex};
#[salsa_macros::db]
#[derive(Default, Clone)]
#[derive(Clone)]
pub(crate) struct LoggerDb {
storage: salsa::Storage<Self>,
logger: Logger,
}
impl Default for LoggerDb {
fn default() -> Self {
let logger = Logger::default();
Self {
storage: salsa::Storage::new(Some(Box::new({
let logger = logger.clone();
move |event| match event.kind {
salsa::EventKind::WillExecute { .. }
| salsa::EventKind::WillCheckCancellation
| salsa::EventKind::DidValidateMemoizedValue { .. }
| salsa::EventKind::WillDiscardStaleOutput { .. }
| salsa::EventKind::DidDiscard { .. } => {
logger.logs.lock().unwrap().push(format!("salsa_event({:?})", event.kind));
}
_ => {}
}
}))),
logger,
}
}
}
#[derive(Default, Clone)]
struct Logger {
logs: Arc<Mutex<Vec<String>>>,
}
#[salsa_macros::db]
impl salsa::Database for LoggerDb {
fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) {
let event = event();
match event.kind {
salsa::EventKind::WillExecute { .. }
| salsa::EventKind::WillCheckCancellation
| salsa::EventKind::DidValidateMemoizedValue { .. }
| salsa::EventKind::WillDiscardStaleOutput { .. }
| salsa::EventKind::DidDiscard { .. } => {
self.push_log(format!("salsa_event({:?})", event.kind));
}
_ => {}
}
}
}
impl salsa::Database for LoggerDb {}
impl LoggerDb {
/// Log an event from inside a tracked function.

View File

@ -1023,7 +1023,7 @@ impl flags::AnalysisStats {
percentage(num_pats_partially_unknown, num_pats),
num_pat_type_mismatches
);
eprintln!(" panics: {}", panics);
eprintln!(" panics: {panics}");
eprintln!("{:<20} {}", "Inference:", inference_time);
report_metric("unknown type", num_exprs_unknown, "#");
report_metric("type mismatches", num_expr_type_mismatches, "#");

View File

@ -149,6 +149,8 @@ config_data! {
hover_memoryLayout_niches: Option<bool> = Some(false),
/// How to render the offset information in a memory layout hover.
hover_memoryLayout_offset: Option<MemoryLayoutHoverRenderKindDef> = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal),
/// How to render the padding information in a memory layout hover.
hover_memoryLayout_padding: Option<MemoryLayoutHoverRenderKindDef> = None,
/// How to render the size information in a memory layout hover.
hover_memoryLayout_size: Option<MemoryLayoutHoverRenderKindDef> = Some(MemoryLayoutHoverRenderKindDef::Both),
@ -544,7 +546,7 @@ config_data! {
/// Whether to prefer import paths containing a `prelude` module.
imports_preferPrelude: bool = false,
/// The path structure for newly inserted paths to use.
imports_prefix: ImportPrefixDef = ImportPrefixDef::Plain,
imports_prefix: ImportPrefixDef = ImportPrefixDef::ByCrate,
/// Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;".
imports_prefixExternPrelude: bool = false,
}
@ -1635,6 +1637,7 @@ impl Config {
size: self.hover_memoryLayout_size().map(mem_kind),
offset: self.hover_memoryLayout_offset().map(mem_kind),
alignment: self.hover_memoryLayout_alignment().map(mem_kind),
padding: self.hover_memoryLayout_padding().map(mem_kind),
niches: self.hover_memoryLayout_niches().unwrap_or_default(),
}),
documentation: self.hover_documentation_enable().to_owned(),

View File

@ -147,7 +147,7 @@ fn integrated_completion_benchmark() {
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
let completion_offset =
patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
patch(&mut text, "db.struct_signature(self.id)", "sel;\ndb.struct_signature(self.id)")
+ "sel".len();
let mut change = ChangeWithProcMacros::default();
change.change_file(file_id, Some(text));
@ -197,9 +197,11 @@ fn integrated_completion_benchmark() {
let completion_offset = {
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
let completion_offset =
patch(&mut text, "sel;\ndb.struct_data(self.id)", ";sel;\ndb.struct_data(self.id)")
+ ";sel".len();
let completion_offset = patch(
&mut text,
"sel;\ndb.struct_signature(self.id)",
";sel;\ndb.struct_signature(self.id)",
) + ";sel".len();
let mut change = ChangeWithProcMacros::default();
change.change_file(file_id, Some(text));
host.apply_change(change);
@ -247,9 +249,11 @@ fn integrated_completion_benchmark() {
let completion_offset = {
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
let completion_offset =
patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)")
+ "self.".len();
let completion_offset = patch(
&mut text,
"sel;\ndb.struct_signature(self.id)",
"self.;\ndb.struct_signature(self.id)",
) + "self.".len();
let mut change = ChangeWithProcMacros::default();
change.change_file(file_id, Some(text));
host.apply_change(change);
@ -366,7 +370,7 @@ fn integrated_diagnostics_benchmark() {
{
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
patch(&mut text, "db.struct_data(self.id)", "();\ndb.struct_data(self.id)");
patch(&mut text, "db.struct_signature(self.id)", "();\ndb.struct_signature(self.id)");
let mut change = ChangeWithProcMacros::default();
change.change_file(file_id, Some(text));
host.apply_change(change);

View File

@ -585,6 +585,18 @@ impl SyntaxFactory {
ast
}
pub fn expr_underscore(&self) -> ast::UnderscoreExpr {
let ast::Expr::UnderscoreExpr(ast) = make::ext::expr_underscore().clone_for_update() else {
unreachable!()
};
if let Some(mut mapping) = self.mappings() {
SyntaxMappingBuilder::new(ast.syntax().clone()).finish(&mut mapping);
}
ast
}
pub fn expr_if(
&self,
condition: ast::Expr,

View File

@ -763,6 +763,13 @@ Default: `"hexadecimal"`
How to render the offset information in a memory layout hover.
## rust-analyzer.hover.memoryLayout.padding {#hover.memoryLayout.padding}
Default: `null`
How to render the padding information in a memory layout hover.
## rust-analyzer.hover.memoryLayout.size {#hover.memoryLayout.size}
Default: `"both"`
@ -835,7 +842,7 @@ Whether to prefer import paths containing a `prelude` module.
## rust-analyzer.imports.prefix {#imports.prefix}
Default: `"plain"`
Default: `"crate"`
The path structure for newly inserted paths to use.

View File

@ -364,30 +364,6 @@ binary](./rust_analyzer_binary.html).
There are multiple rust-analyzer extensions for Visual Studio 2022 on
Windows:
### rust-analyzer.vs
(License: Creative Commons Attribution-NonCommercial-ShareAlike 4.0
International)
[Visual Studio
Marketplace](https://marketplace.visualstudio.com/items?itemName=kitamstudios.RustAnalyzer)
[GitHub](https://github.com/kitamstudios/rust-analyzer/)
Support for Rust development in the Visual Studio IDE is enabled by the
[rust-analyzer](https://marketplace.visualstudio.com/items?itemName=kitamstudios.RustAnalyzer)
package. Either click on the download link or install from IDEs
extension manager. For now [Visual Studio
2022](https://visualstudio.microsoft.com/downloads/) is required. All
editions are supported viz. Community, Professional & Enterprise. The
package aims to provide 0-friction installation and therefore comes
loaded with most things required including rust-analyzer binary. If
anything it needs is missing, appropriate errors / warnings will guide
the user. E.g. cargo.exe needs to be in path and the package will tell
you as much. This package is under rapid active development. So if you
encounter any issues please file it at
[rust-analyzer.vs](https://github.com/kitamstudios/rust-analyzer/).
### VS RustAnalyzer
(License: GPL)

View File

@ -1779,6 +1779,33 @@
}
}
},
{
"title": "hover",
"properties": {
"rust-analyzer.hover.memoryLayout.padding": {
"markdownDescription": "How to render the padding information in a memory layout hover.",
"default": null,
"anyOf": [
{
"type": "null"
},
{
"type": "string",
"enum": [
"both",
"decimal",
"hexadecimal"
],
"enumDescriptions": [
"Render as 12 (0xC)",
"Render as 12",
"Render as 0xC"
]
}
]
}
}
},
{
"title": "hover",
"properties": {
@ -1927,7 +1954,7 @@
"properties": {
"rust-analyzer.imports.prefix": {
"markdownDescription": "The path structure for newly inserted paths to use.",
"default": "plain",
"default": "crate",
"type": "string",
"enum": [
"plain",