mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 11:20:54 +00:00
Auto merge of #140651 - lnicola:sync-from-ra, r=lnicola
Subtree update of `rust-analyzer` r? `@ghost`
This commit is contained in:
commit
676ac98bc7
18
Cargo.lock
generated
18
Cargo.lock
generated
@ -85,6 +85,7 @@ dependencies = [
|
||||
"query-group-macro",
|
||||
"rustc-hash 2.1.1",
|
||||
"salsa",
|
||||
"salsa-macros",
|
||||
"semver",
|
||||
"span",
|
||||
"syntax",
|
||||
@ -630,6 +631,7 @@ dependencies = [
|
||||
"rustc-hash 2.1.1",
|
||||
"rustc_apfloat",
|
||||
"salsa",
|
||||
"salsa-macros",
|
||||
"smallvec",
|
||||
"span",
|
||||
"stdx",
|
||||
@ -660,6 +662,7 @@ dependencies = [
|
||||
"query-group-macro",
|
||||
"rustc-hash 2.1.1",
|
||||
"salsa",
|
||||
"salsa-macros",
|
||||
"smallvec",
|
||||
"span",
|
||||
"stdx",
|
||||
@ -700,6 +703,7 @@ dependencies = [
|
||||
"rustc-hash 2.1.1",
|
||||
"rustc_apfloat",
|
||||
"salsa",
|
||||
"salsa-macros",
|
||||
"scoped-tls",
|
||||
"smallvec",
|
||||
"span",
|
||||
@ -936,6 +940,7 @@ dependencies = [
|
||||
"rayon",
|
||||
"rustc-hash 2.1.1",
|
||||
"salsa",
|
||||
"salsa-macros",
|
||||
"span",
|
||||
"stdx",
|
||||
"syntax",
|
||||
@ -1729,6 +1734,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"salsa",
|
||||
"salsa-macros",
|
||||
"syn",
|
||||
]
|
||||
|
||||
@ -2033,9 +2039,9 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||
|
||||
[[package]]
|
||||
name = "salsa"
|
||||
version = "0.20.0"
|
||||
version = "0.21.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1be22155f8d9732518b2db2bf379fe6f0b2375e76b08b7c8fe6c1b887d548c24"
|
||||
checksum = "6f80d5cf3c3fcab2cef898012f242a670477a1baa609267376af9cb4409026c5"
|
||||
dependencies = [
|
||||
"boxcar",
|
||||
"crossbeam-queue",
|
||||
@ -2056,15 +2062,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macro-rules"
|
||||
version = "0.20.0"
|
||||
version = "0.21.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f55a7ef0a84e336f7c5f0332d81727f5629fe042d2aa556c75307afebc9f78a5"
|
||||
checksum = "05303d72606fbf2b9c9523cda2039bb8ecb00304027a3cd7e52b02a65c7d9185"
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macros"
|
||||
version = "0.20.0"
|
||||
version = "0.21.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8d0e88a9c0c0d231a63f83dcd1a2c5e5d11044fac4b65bc9ad3b68ab48b0a0ab"
|
||||
checksum = "eb2f0e2a30c65cb3cd63440c491dde68d9af7e1be2b77832ac7057141107db50"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
|
@ -131,7 +131,9 @@ process-wrap = { version = "8.2.0", features = ["std"] }
|
||||
pulldown-cmark-to-cmark = "10.0.4"
|
||||
pulldown-cmark = { version = "0.9.6", default-features = false }
|
||||
rayon = "1.10.0"
|
||||
salsa = "0.20.0"
|
||||
rowan = "=0.15.15"
|
||||
salsa = { version = "0.21.1", default-features = false, features = ["rayon","salsa_unstable"] }
|
||||
salsa-macros = "0.21.1"
|
||||
semver = "1.0.26"
|
||||
serde = { version = "1.0.219" }
|
||||
serde_derive = { version = "1.0.219" }
|
||||
|
@ -15,6 +15,7 @@ rust-version.workspace = true
|
||||
la-arena.workspace = true
|
||||
dashmap.workspace = true
|
||||
salsa.workspace = true
|
||||
salsa-macros.workspace = true
|
||||
query-group.workspace = true
|
||||
rustc-hash.workspace = true
|
||||
triomphe.workspace = true
|
||||
|
@ -392,7 +392,7 @@ impl BuiltDependency {
|
||||
|
||||
pub type CratesIdMap = FxHashMap<CrateBuilderId, Crate>;
|
||||
|
||||
#[salsa::input]
|
||||
#[salsa_macros::input]
|
||||
#[derive(Debug)]
|
||||
pub struct Crate {
|
||||
#[return_ref]
|
||||
|
@ -1,9 +1,13 @@
|
||||
//! base_db defines basic database traits. The concrete DB is defined by ide.
|
||||
|
||||
pub use salsa;
|
||||
pub use salsa_macros;
|
||||
|
||||
// FIXME: Rename this crate, base db is non descriptive
|
||||
mod change;
|
||||
mod input;
|
||||
|
||||
use std::hash::BuildHasherDefault;
|
||||
use std::{cell::RefCell, hash::BuildHasherDefault, panic, sync::Once};
|
||||
|
||||
pub use crate::{
|
||||
change::FileChange,
|
||||
@ -17,7 +21,6 @@ pub use crate::{
|
||||
use dashmap::{DashMap, mapref::entry::Entry};
|
||||
pub use query_group::{self};
|
||||
use rustc_hash::{FxHashSet, FxHasher};
|
||||
pub use salsa::{self};
|
||||
use salsa::{Durability, Setter};
|
||||
pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
|
||||
use span::Edition;
|
||||
@ -28,7 +31,7 @@ pub use vfs::{AnchoredPath, AnchoredPathBuf, FileId, VfsPath, file_set::FileSet}
|
||||
#[macro_export]
|
||||
macro_rules! impl_intern_key {
|
||||
($id:ident, $loc:ident) => {
|
||||
#[salsa::interned(no_lifetime)]
|
||||
#[salsa_macros::interned(no_lifetime)]
|
||||
pub struct $id {
|
||||
pub loc: $loc,
|
||||
}
|
||||
@ -57,7 +60,12 @@ pub struct Files {
|
||||
|
||||
impl Files {
|
||||
pub fn file_text(&self, file_id: vfs::FileId) -> FileText {
|
||||
*self.files.get(&file_id).expect("Unable to fetch file; this is a bug")
|
||||
match self.files.get(&file_id) {
|
||||
Some(text) => *text,
|
||||
None => {
|
||||
panic!("Unable to fetch file text for `vfs::FileId`: {file_id:?}; this is a bug")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_file_text(&self, db: &mut dyn SourceDatabase, file_id: vfs::FileId, text: &str) {
|
||||
@ -93,10 +101,12 @@ impl Files {
|
||||
|
||||
/// Source root of the file.
|
||||
pub fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
|
||||
let source_root = self
|
||||
.source_roots
|
||||
.get(&source_root_id)
|
||||
.expect("Unable to fetch source root id; this is a bug");
|
||||
let source_root = match self.source_roots.get(&source_root_id) {
|
||||
Some(source_root) => source_root,
|
||||
None => panic!(
|
||||
"Unable to fetch `SourceRootInput` with `SourceRootId` ({source_root_id:?}); this is a bug"
|
||||
),
|
||||
};
|
||||
|
||||
*source_root
|
||||
}
|
||||
@ -121,10 +131,12 @@ impl Files {
|
||||
}
|
||||
|
||||
pub fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput {
|
||||
let file_source_root = self
|
||||
.file_source_roots
|
||||
.get(&id)
|
||||
.expect("Unable to fetch FileSourceRootInput; this is a bug");
|
||||
let file_source_root = match self.file_source_roots.get(&id) {
|
||||
Some(file_source_root) => file_source_root,
|
||||
None => panic!(
|
||||
"Unable to get `FileSourceRootInput` with `vfs::FileId` ({id:?}); this is a bug",
|
||||
),
|
||||
};
|
||||
*file_source_root
|
||||
}
|
||||
|
||||
@ -152,7 +164,7 @@ impl Files {
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::interned(no_lifetime, debug, constructor=from_span)]
|
||||
#[salsa_macros::interned(no_lifetime, debug, constructor=from_span)]
|
||||
pub struct EditionedFileId {
|
||||
pub editioned_file_id: span::EditionedFileId,
|
||||
}
|
||||
@ -187,18 +199,18 @@ impl EditionedFileId {
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::input(debug)]
|
||||
#[salsa_macros::input(debug)]
|
||||
pub struct FileText {
|
||||
pub text: Arc<str>,
|
||||
pub file_id: vfs::FileId,
|
||||
}
|
||||
|
||||
#[salsa::input(debug)]
|
||||
#[salsa_macros::input(debug)]
|
||||
pub struct FileSourceRootInput {
|
||||
pub source_root_id: SourceRootId,
|
||||
}
|
||||
|
||||
#[salsa::input(debug)]
|
||||
#[salsa_macros::input(debug)]
|
||||
pub struct SourceRootInput {
|
||||
pub source_root: Arc<SourceRoot>,
|
||||
}
|
||||
@ -265,7 +277,7 @@ pub fn transitive_deps(db: &dyn SourceDatabase, crate_id: Crate) -> FxHashSet<Cr
|
||||
deps
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
#[salsa_macros::db]
|
||||
pub trait SourceDatabase: salsa::Database {
|
||||
/// Text of the file.
|
||||
fn file_text(&self, file_id: vfs::FileId) -> FileText;
|
||||
@ -344,7 +356,7 @@ fn parse(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Parse<ast::SourceFil
|
||||
}
|
||||
|
||||
fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<&[SyntaxError]> {
|
||||
#[salsa::tracked(return_ref)]
|
||||
#[salsa_macros::tracked(return_ref)]
|
||||
fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<Box<[SyntaxError]>> {
|
||||
let errors = db.parse(file_id).errors();
|
||||
match &*errors {
|
||||
@ -373,3 +385,49 @@ fn relevant_crates(db: &dyn RootQueryDb, file_id: FileId) -> Arc<[Crate]> {
|
||||
let source_root = db.file_source_root(file_id);
|
||||
db.source_root_crates(source_root.source_root_id(db))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
#[non_exhaustive]
|
||||
pub struct DbPanicContext;
|
||||
|
||||
impl Drop for DbPanicContext {
|
||||
fn drop(&mut self) {
|
||||
Self::with_ctx(|ctx| assert!(ctx.pop().is_some()));
|
||||
}
|
||||
}
|
||||
|
||||
impl DbPanicContext {
|
||||
pub fn enter(frame: String) -> DbPanicContext {
|
||||
#[expect(clippy::print_stderr, reason = "already panicking anyway")]
|
||||
fn set_hook() {
|
||||
let default_hook = panic::take_hook();
|
||||
panic::set_hook(Box::new(move |panic_info| {
|
||||
default_hook(panic_info);
|
||||
if let Some(backtrace) = salsa::Backtrace::capture() {
|
||||
eprintln!("{backtrace:#}");
|
||||
}
|
||||
DbPanicContext::with_ctx(|ctx| {
|
||||
if !ctx.is_empty() {
|
||||
eprintln!("additional context:");
|
||||
for (idx, frame) in ctx.iter().enumerate() {
|
||||
eprintln!("{idx:>4}: {frame}\n");
|
||||
}
|
||||
}
|
||||
});
|
||||
}));
|
||||
}
|
||||
|
||||
static SET_HOOK: Once = Once::new();
|
||||
SET_HOOK.call_once(set_hook);
|
||||
|
||||
Self::with_ctx(|ctx| ctx.push(frame));
|
||||
DbPanicContext
|
||||
}
|
||||
|
||||
fn with_ctx(f: impl FnOnce(&mut Vec<String>)) {
|
||||
thread_local! {
|
||||
static CTX: RefCell<Vec<String>> = const { RefCell::new(Vec::new()) };
|
||||
}
|
||||
CTX.with(|ctx| f(&mut ctx.borrow_mut()));
|
||||
}
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ triomphe.workspace = true
|
||||
rustc_apfloat = "0.2.2"
|
||||
text-size.workspace = true
|
||||
salsa.workspace = true
|
||||
salsa-macros.workspace = true
|
||||
query-group.workspace = true
|
||||
|
||||
ra-ap-rustc_parse_format.workspace = true
|
||||
|
@ -1,6 +1,6 @@
|
||||
//! A higher level attributes based on TokenTree, with also some shortcuts.
|
||||
|
||||
use std::{borrow::Cow, hash::Hash, ops};
|
||||
use std::{borrow::Cow, convert::identity, hash::Hash, ops};
|
||||
|
||||
use base_db::Crate;
|
||||
use cfg::{CfgExpr, CfgOptions};
|
||||
@ -8,6 +8,7 @@ use either::Either;
|
||||
use hir_expand::{
|
||||
HirFileId, InFile,
|
||||
attrs::{Attr, AttrId, RawAttrs, collect_attrs},
|
||||
span_map::SpanMapRef,
|
||||
};
|
||||
use intern::{Symbol, sym};
|
||||
use la_arena::{ArenaMap, Idx, RawIdx};
|
||||
@ -45,8 +46,27 @@ impl Attrs {
|
||||
(**self).iter().find(|attr| attr.id == id)
|
||||
}
|
||||
|
||||
pub(crate) fn filter(db: &dyn DefDatabase, krate: Crate, raw_attrs: RawAttrs) -> Attrs {
|
||||
Attrs(raw_attrs.filter(db, krate))
|
||||
pub(crate) fn expand_cfg_attr(
|
||||
db: &dyn DefDatabase,
|
||||
krate: Crate,
|
||||
raw_attrs: RawAttrs,
|
||||
) -> Attrs {
|
||||
Attrs(raw_attrs.expand_cfg_attr(db, krate))
|
||||
}
|
||||
|
||||
pub(crate) fn is_cfg_enabled_for(
|
||||
db: &dyn DefDatabase,
|
||||
owner: &dyn ast::HasAttrs,
|
||||
span_map: SpanMapRef<'_>,
|
||||
cfg_options: &CfgOptions,
|
||||
) -> Result<(), CfgExpr> {
|
||||
RawAttrs::attrs_iter_expanded::<false>(db, owner, span_map, cfg_options)
|
||||
.filter_map(|attr| attr.cfg())
|
||||
.find_map(|cfg| match cfg_options.check(&cfg).is_none_or(identity) {
|
||||
true => None,
|
||||
false => Some(cfg),
|
||||
})
|
||||
.map_or(Ok(()), Err)
|
||||
}
|
||||
}
|
||||
|
||||
@ -522,38 +542,41 @@ impl AttrsWithOwner {
|
||||
GenericParamId::ConstParamId(it) => {
|
||||
let src = it.parent().child_source(db);
|
||||
// FIXME: We should be never getting `None` here.
|
||||
match src.value.get(it.local_id()) {
|
||||
Some(val) => RawAttrs::from_attrs_owner(
|
||||
return Attrs(match src.value.get(it.local_id()) {
|
||||
Some(val) => RawAttrs::new_expanded(
|
||||
db,
|
||||
src.with_value(val),
|
||||
val,
|
||||
db.span_map(src.file_id).as_ref(),
|
||||
def.krate(db).cfg_options(db),
|
||||
),
|
||||
None => RawAttrs::EMPTY,
|
||||
}
|
||||
});
|
||||
}
|
||||
GenericParamId::TypeParamId(it) => {
|
||||
let src = it.parent().child_source(db);
|
||||
// FIXME: We should be never getting `None` here.
|
||||
match src.value.get(it.local_id()) {
|
||||
Some(val) => RawAttrs::from_attrs_owner(
|
||||
return Attrs(match src.value.get(it.local_id()) {
|
||||
Some(val) => RawAttrs::new_expanded(
|
||||
db,
|
||||
src.with_value(val),
|
||||
val,
|
||||
db.span_map(src.file_id).as_ref(),
|
||||
def.krate(db).cfg_options(db),
|
||||
),
|
||||
None => RawAttrs::EMPTY,
|
||||
}
|
||||
});
|
||||
}
|
||||
GenericParamId::LifetimeParamId(it) => {
|
||||
let src = it.parent.child_source(db);
|
||||
// FIXME: We should be never getting `None` here.
|
||||
match src.value.get(it.local_id) {
|
||||
Some(val) => RawAttrs::from_attrs_owner(
|
||||
return Attrs(match src.value.get(it.local_id) {
|
||||
Some(val) => RawAttrs::new_expanded(
|
||||
db,
|
||||
src.with_value(val),
|
||||
val,
|
||||
db.span_map(src.file_id).as_ref(),
|
||||
def.krate(db).cfg_options(db),
|
||||
),
|
||||
None => RawAttrs::EMPTY,
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),
|
||||
@ -561,7 +584,7 @@ impl AttrsWithOwner {
|
||||
AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it),
|
||||
};
|
||||
|
||||
let attrs = raw_attrs.filter(db, def.krate(db));
|
||||
let attrs = raw_attrs.expand_cfg_attr(db, def.krate(db));
|
||||
Attrs(attrs)
|
||||
}
|
||||
|
||||
|
@ -22,7 +22,7 @@ use crate::{
|
||||
hir::generics::GenericParams,
|
||||
import_map::ImportMap,
|
||||
item_tree::{AttrOwner, ItemTree},
|
||||
lang_item::{self, LangItem, LangItemTarget, LangItems},
|
||||
lang_item::{self, LangItem},
|
||||
nameres::{
|
||||
DefMap, LocalDefMap,
|
||||
assoc::{ImplItems, TraitItems},
|
||||
@ -325,9 +325,6 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
|
||||
|
||||
// endregion:attrs
|
||||
|
||||
#[salsa::invoke(LangItems::lang_item_query)]
|
||||
fn lang_item(&self, start_crate: Crate, item: LangItem) -> Option<LangItemTarget>;
|
||||
|
||||
#[salsa::invoke(ImportMap::import_map_query)]
|
||||
fn import_map(&self, krate: Crate) -> Arc<ImportMap>;
|
||||
|
||||
@ -349,9 +346,6 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
|
||||
|
||||
// endregion:visibilities
|
||||
|
||||
#[salsa::invoke(LangItems::crate_lang_items_query)]
|
||||
fn crate_lang_items(&self, krate: Crate) -> Option<Arc<LangItems>>;
|
||||
|
||||
#[salsa::invoke(crate::lang_item::notable_traits_in_deps)]
|
||||
fn notable_traits_in_deps(&self, krate: Crate) -> Arc<[Arc<[TraitId]>]>;
|
||||
#[salsa::invoke(crate::lang_item::crate_notable_traits)]
|
||||
|
@ -3,21 +3,24 @@
|
||||
use std::mem;
|
||||
|
||||
use base_db::Crate;
|
||||
use cfg::CfgOptions;
|
||||
use drop_bomb::DropBomb;
|
||||
use hir_expand::AstId;
|
||||
use hir_expand::{
|
||||
ExpandError, ExpandErrorKind, ExpandResult, HirFileId, InFile, Lookup, MacroCallId,
|
||||
attrs::RawAttrs, eager::EagerCallBackFn, mod_path::ModPath, span_map::SpanMap,
|
||||
eager::EagerCallBackFn, mod_path::ModPath, span_map::SpanMap,
|
||||
};
|
||||
use span::{AstIdMap, Edition, SyntaxContext};
|
||||
use syntax::ast::HasAttrs;
|
||||
use syntax::{Parse, ast};
|
||||
use syntax::{AstNode, Parse, ast};
|
||||
use triomphe::Arc;
|
||||
use tt::TextRange;
|
||||
|
||||
use crate::attr::Attrs;
|
||||
use crate::expr_store::HygieneId;
|
||||
use crate::macro_call_as_call_id;
|
||||
use crate::nameres::DefMap;
|
||||
use crate::{AsMacroCall, MacroId, UnresolvedMacro, db::DefDatabase};
|
||||
use crate::{MacroId, UnresolvedMacro, db::DefDatabase};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct Expander {
|
||||
@ -64,22 +67,13 @@ impl Expander {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn attrs(
|
||||
&self,
|
||||
db: &dyn DefDatabase,
|
||||
krate: Crate,
|
||||
has_attrs: &dyn HasAttrs,
|
||||
) -> Attrs {
|
||||
Attrs::filter(db, krate, RawAttrs::new(db, has_attrs, self.span_map.as_ref()))
|
||||
}
|
||||
|
||||
pub(super) fn is_cfg_enabled(
|
||||
&self,
|
||||
db: &dyn DefDatabase,
|
||||
krate: Crate,
|
||||
has_attrs: &dyn HasAttrs,
|
||||
) -> bool {
|
||||
self.attrs(db, krate, has_attrs).is_cfg_enabled(krate.cfg_options(db))
|
||||
cfg_options: &CfgOptions,
|
||||
) -> Result<(), cfg::CfgExpr> {
|
||||
Attrs::is_cfg_enabled_for(db, has_attrs, self.span_map.as_ref(), cfg_options)
|
||||
}
|
||||
|
||||
pub(super) fn call_syntax_ctx(&self) -> SyntaxContext {
|
||||
@ -100,8 +94,31 @@ impl Expander {
|
||||
|
||||
let result = self.within_limit(db, |this| {
|
||||
let macro_call = this.in_file(¯o_call);
|
||||
match macro_call.as_call_id_with_errors(
|
||||
|
||||
let expands_to = hir_expand::ExpandTo::from_call_site(macro_call.value);
|
||||
let ast_id = AstId::new(macro_call.file_id, this.ast_id_map().ast_id(macro_call.value));
|
||||
let path = macro_call.value.path().and_then(|path| {
|
||||
let range = path.syntax().text_range();
|
||||
let mod_path = ModPath::from_src(db, path, &mut |range| {
|
||||
this.span_map.span_for_range(range).ctx
|
||||
})?;
|
||||
let call_site = this.span_map.span_for_range(range);
|
||||
Some((call_site, mod_path))
|
||||
});
|
||||
|
||||
let Some((call_site, path)) = path else {
|
||||
return ExpandResult::only_err(ExpandError::other(
|
||||
this.span_map.span_for_range(macro_call.value.syntax().text_range()),
|
||||
"malformed macro invocation",
|
||||
));
|
||||
};
|
||||
|
||||
match macro_call_as_call_id(
|
||||
db,
|
||||
ast_id,
|
||||
&path,
|
||||
call_site.ctx,
|
||||
expands_to,
|
||||
krate,
|
||||
|path| resolver(path).map(|it| db.macro_def(it)),
|
||||
eager_callback,
|
||||
|
@ -7,6 +7,7 @@ mod path;
|
||||
|
||||
use std::mem;
|
||||
|
||||
use cfg::CfgOptions;
|
||||
use either::Either;
|
||||
use hir_expand::{
|
||||
HirFileId, InFile, Lookup, MacroDefId,
|
||||
@ -81,8 +82,6 @@ pub(super) fn lower_body(
|
||||
// even though they should be the same. Also, when the body comes from multiple expansions, their
|
||||
// hygiene is different.
|
||||
|
||||
let krate = module.krate();
|
||||
|
||||
let mut self_param = None;
|
||||
let mut source_map_self_param = None;
|
||||
let mut params = vec![];
|
||||
@ -100,9 +99,8 @@ pub(super) fn lower_body(
|
||||
// and skip the body.
|
||||
if skip_body {
|
||||
if let Some(param_list) = parameters {
|
||||
if let Some(self_param_syn) = param_list
|
||||
.self_param()
|
||||
.filter(|self_param| collector.expander.is_cfg_enabled(db, krate, self_param))
|
||||
if let Some(self_param_syn) =
|
||||
param_list.self_param().filter(|self_param| collector.check_cfg(self_param))
|
||||
{
|
||||
let is_mutable =
|
||||
self_param_syn.mut_token().is_some() && self_param_syn.amp_token().is_none();
|
||||
@ -119,10 +117,7 @@ pub(super) fn lower_body(
|
||||
source_map_self_param =
|
||||
Some(collector.expander.in_file(AstPtr::new(&self_param_syn)));
|
||||
}
|
||||
let count = param_list
|
||||
.params()
|
||||
.filter(|it| collector.expander.is_cfg_enabled(db, krate, it))
|
||||
.count();
|
||||
let count = param_list.params().filter(|it| collector.check_cfg(it)).count();
|
||||
params = (0..count).map(|_| collector.missing_pat()).collect();
|
||||
};
|
||||
let body_expr = collector.missing_expr();
|
||||
@ -138,9 +133,7 @@ pub(super) fn lower_body(
|
||||
}
|
||||
|
||||
if let Some(param_list) = parameters {
|
||||
if let Some(self_param_syn) =
|
||||
param_list.self_param().filter(|it| collector.expander.is_cfg_enabled(db, krate, it))
|
||||
{
|
||||
if let Some(self_param_syn) = param_list.self_param().filter(|it| collector.check_cfg(it)) {
|
||||
let is_mutable =
|
||||
self_param_syn.mut_token().is_some() && self_param_syn.amp_token().is_none();
|
||||
let hygiene = self_param_syn
|
||||
@ -157,7 +150,7 @@ pub(super) fn lower_body(
|
||||
}
|
||||
|
||||
for param in param_list.params() {
|
||||
if collector.expander.is_cfg_enabled(db, krate, ¶m) {
|
||||
if collector.check_cfg(¶m) {
|
||||
let param_pat = collector.collect_pat_top(param.pat());
|
||||
params.push(param_pat);
|
||||
}
|
||||
@ -346,7 +339,7 @@ pub(crate) fn lower_function(
|
||||
collector.collect_impl_trait(&mut expr_collector, |collector, mut impl_trait_lower_fn| {
|
||||
if let Some(param_list) = fn_.value.param_list() {
|
||||
if let Some(param) = param_list.self_param() {
|
||||
let enabled = collector.expander.is_cfg_enabled(db, module.krate(), ¶m);
|
||||
let enabled = collector.check_cfg(¶m);
|
||||
if enabled {
|
||||
has_self_param = true;
|
||||
params.push(match param.ty() {
|
||||
@ -381,7 +374,7 @@ pub(crate) fn lower_function(
|
||||
}
|
||||
let p = param_list
|
||||
.params()
|
||||
.filter(|param| collector.expander.is_cfg_enabled(db, module.krate(), param))
|
||||
.filter(|param| collector.check_cfg(param))
|
||||
.filter(|param| {
|
||||
let is_variadic = param.dotdotdot_token().is_some();
|
||||
has_variadic |= is_variadic;
|
||||
@ -441,6 +434,7 @@ pub(crate) fn lower_function(
|
||||
|
||||
pub struct ExprCollector<'db> {
|
||||
db: &'db dyn DefDatabase,
|
||||
cfg_options: &'db CfgOptions,
|
||||
expander: Expander,
|
||||
def_map: Arc<DefMap>,
|
||||
local_def_map: Arc<LocalDefMap>,
|
||||
@ -553,6 +547,7 @@ impl ExprCollector<'_> {
|
||||
let expander = Expander::new(db, current_file_id, &def_map);
|
||||
ExprCollector {
|
||||
db,
|
||||
cfg_options: module.krate().cfg_options(db),
|
||||
module,
|
||||
def_map,
|
||||
local_def_map,
|
||||
@ -1026,7 +1021,9 @@ impl ExprCollector<'_> {
|
||||
/// Returns `None` if and only if the expression is `#[cfg]`d out.
|
||||
fn maybe_collect_expr(&mut self, expr: ast::Expr) -> Option<ExprId> {
|
||||
let syntax_ptr = AstPtr::new(&expr);
|
||||
self.check_cfg(&expr)?;
|
||||
if !self.check_cfg(&expr) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// FIXME: Move some of these arms out into separate methods for clarity
|
||||
Some(match expr {
|
||||
@ -1114,6 +1111,7 @@ impl ExprCollector<'_> {
|
||||
ast::Expr::WhileExpr(e) => self.collect_while_loop(syntax_ptr, e),
|
||||
ast::Expr::ForExpr(e) => self.collect_for_loop(syntax_ptr, e),
|
||||
ast::Expr::CallExpr(e) => {
|
||||
// FIXME: Remove this once we drop support for <1.86, https://github.com/rust-lang/rust/commit/ac9cb908ac4301dfc25e7a2edee574320022ae2c
|
||||
let is_rustc_box = {
|
||||
let attrs = e.attrs();
|
||||
attrs.filter_map(|it| it.as_simple_atom()).any(|it| it == "rustc_box")
|
||||
@ -1156,13 +1154,17 @@ impl ExprCollector<'_> {
|
||||
match_arm_list
|
||||
.arms()
|
||||
.filter_map(|arm| {
|
||||
self.check_cfg(&arm).map(|()| MatchArm {
|
||||
pat: self.collect_pat_top(arm.pat()),
|
||||
expr: self.collect_expr_opt(arm.expr()),
|
||||
guard: arm
|
||||
.guard()
|
||||
.map(|guard| self.collect_expr_opt(guard.condition())),
|
||||
})
|
||||
if self.check_cfg(&arm) {
|
||||
Some(MatchArm {
|
||||
pat: self.collect_pat_top(arm.pat()),
|
||||
expr: self.collect_expr_opt(arm.expr()),
|
||||
guard: arm
|
||||
.guard()
|
||||
.map(|guard| self.collect_expr_opt(guard.condition())),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
@ -1230,7 +1232,9 @@ impl ExprCollector<'_> {
|
||||
let fields = nfl
|
||||
.fields()
|
||||
.filter_map(|field| {
|
||||
self.check_cfg(&field)?;
|
||||
if !self.check_cfg(&field) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let name = field.field_name()?.as_name();
|
||||
|
||||
@ -1483,7 +1487,9 @@ impl ExprCollector<'_> {
|
||||
}
|
||||
|
||||
fn maybe_collect_expr_as_pat(&mut self, expr: &ast::Expr) -> Option<PatId> {
|
||||
self.check_cfg(expr)?;
|
||||
if !self.check_cfg(expr) {
|
||||
return None;
|
||||
}
|
||||
let syntax_ptr = AstPtr::new(expr);
|
||||
|
||||
let result = match expr {
|
||||
@ -1558,7 +1564,9 @@ impl ExprCollector<'_> {
|
||||
let args = record_field_list
|
||||
.fields()
|
||||
.filter_map(|f| {
|
||||
self.check_cfg(&f)?;
|
||||
if !self.check_cfg(&f) {
|
||||
return None;
|
||||
}
|
||||
let field_expr = f.expr()?;
|
||||
let pat = self.collect_expr_as_pat(field_expr);
|
||||
let name = f.field_name()?.as_name();
|
||||
@ -2044,7 +2052,7 @@ impl ExprCollector<'_> {
|
||||
fn collect_stmt(&mut self, statements: &mut Vec<Statement>, s: ast::Stmt) {
|
||||
match s {
|
||||
ast::Stmt::LetStmt(stmt) => {
|
||||
if self.check_cfg(&stmt).is_none() {
|
||||
if !self.check_cfg(&stmt) {
|
||||
return;
|
||||
}
|
||||
let pat = self.collect_pat_top(stmt.pat());
|
||||
@ -2059,7 +2067,7 @@ impl ExprCollector<'_> {
|
||||
ast::Stmt::ExprStmt(stmt) => {
|
||||
let expr = stmt.expr();
|
||||
match &expr {
|
||||
Some(expr) if self.check_cfg(expr).is_none() => return,
|
||||
Some(expr) if !self.check_cfg(expr) => return,
|
||||
_ => (),
|
||||
}
|
||||
let has_semi = stmt.semicolon_token().is_some();
|
||||
@ -2074,7 +2082,7 @@ impl ExprCollector<'_> {
|
||||
}
|
||||
}
|
||||
ast::Stmt::Item(ast::Item::MacroDef(macro_)) => {
|
||||
if self.check_cfg(¯o_).is_none() {
|
||||
if !self.check_cfg(¯o_) {
|
||||
return;
|
||||
}
|
||||
let Some(name) = macro_.name() else {
|
||||
@ -2086,7 +2094,7 @@ impl ExprCollector<'_> {
|
||||
self.collect_macro_def(statements, macro_id);
|
||||
}
|
||||
ast::Stmt::Item(ast::Item::MacroRules(macro_)) => {
|
||||
if self.check_cfg(¯o_).is_none() {
|
||||
if !self.check_cfg(¯o_) {
|
||||
return;
|
||||
}
|
||||
let Some(name) = macro_.name() else {
|
||||
@ -2360,7 +2368,9 @@ impl ExprCollector<'_> {
|
||||
let args = record_pat_field_list
|
||||
.fields()
|
||||
.filter_map(|f| {
|
||||
self.check_cfg(&f)?;
|
||||
if !self.check_cfg(&f) {
|
||||
return None;
|
||||
}
|
||||
let ast_pat = f.pat()?;
|
||||
let pat = self.collect_pat(ast_pat, binding_list);
|
||||
let name = f.field_name()?.as_name();
|
||||
@ -2536,25 +2546,18 @@ impl ExprCollector<'_> {
|
||||
|
||||
/// Returns `None` (and emits diagnostics) when `owner` if `#[cfg]`d out, and `Some(())` when
|
||||
/// not.
|
||||
fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> Option<()> {
|
||||
let attrs = self.expander.attrs(self.db, self.module.krate(), owner);
|
||||
match attrs.cfg() {
|
||||
Some(cfg) => {
|
||||
let cfg_options = self.module.krate().cfg_options(self.db);
|
||||
|
||||
if cfg_options.check(&cfg) != Some(false) {
|
||||
return Some(());
|
||||
}
|
||||
|
||||
fn check_cfg(&mut self, owner: &dyn ast::HasAttrs) -> bool {
|
||||
let enabled = self.expander.is_cfg_enabled(self.db, owner, self.cfg_options);
|
||||
match enabled {
|
||||
Ok(()) => true,
|
||||
Err(cfg) => {
|
||||
self.source_map.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode {
|
||||
node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())),
|
||||
cfg,
|
||||
opts: cfg_options.clone(),
|
||||
opts: self.cfg_options.clone(),
|
||||
});
|
||||
|
||||
None
|
||||
false
|
||||
}
|
||||
None => Some(()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -110,7 +110,7 @@ impl GenericParamsCollector {
|
||||
|
||||
fn lower_param_list(&mut self, ec: &mut ExprCollector<'_>, params: ast::GenericParamList) {
|
||||
for generic_param in params.generic_params() {
|
||||
let enabled = ec.expander.is_cfg_enabled(ec.db, ec.module.krate(), &generic_param);
|
||||
let enabled = ec.check_cfg(&generic_param);
|
||||
if !enabled {
|
||||
continue;
|
||||
}
|
||||
@ -270,7 +270,7 @@ impl GenericParamsCollector {
|
||||
let self_ = Name::new_symbol_root(sym::Self_);
|
||||
let idx = self.type_or_consts.alloc(
|
||||
TypeParamData {
|
||||
name: Some(self_.clone()),
|
||||
name: Some(self_),
|
||||
default: None,
|
||||
provenance: TypeParamProvenance::TraitSelf,
|
||||
}
|
||||
|
@ -167,7 +167,7 @@ pub struct ItemScope {
|
||||
// the resolutions of the imports of this scope
|
||||
use_imports_types: FxHashMap<ImportOrExternCrate, ImportOrDef>,
|
||||
use_imports_values: FxHashMap<ImportOrGlob, ImportOrDef>,
|
||||
use_imports_macros: FxHashMap<ImportOrGlob, ImportOrDef>,
|
||||
use_imports_macros: FxHashMap<ImportOrExternCrate, ImportOrDef>,
|
||||
|
||||
use_decls: Vec<UseId>,
|
||||
extern_crate_decls: Vec<ExternCrateId>,
|
||||
@ -242,7 +242,7 @@ impl ItemScope {
|
||||
self.types.iter().map(|(n, &i)| (n, i))
|
||||
}
|
||||
|
||||
pub fn macros(&self) -> impl Iterator<Item = (&Name, Item<MacroId, ImportOrGlob>)> + '_ {
|
||||
pub fn macros(&self) -> impl Iterator<Item = (&Name, Item<MacroId, ImportOrExternCrate>)> + '_ {
|
||||
self.macros.iter().map(|(n, &i)| (n, i))
|
||||
}
|
||||
|
||||
@ -250,9 +250,9 @@ impl ItemScope {
|
||||
self.use_imports_types
|
||||
.keys()
|
||||
.copied()
|
||||
.chain(self.use_imports_macros.keys().copied())
|
||||
.filter_map(ImportOrExternCrate::import_or_glob)
|
||||
.chain(self.use_imports_values.keys().copied())
|
||||
.chain(self.use_imports_macros.keys().copied())
|
||||
.filter_map(ImportOrGlob::into_import)
|
||||
.sorted()
|
||||
.dedup()
|
||||
@ -263,7 +263,7 @@ impl ItemScope {
|
||||
|
||||
let mut def_map;
|
||||
let mut scope = self;
|
||||
while let Some(&m) = scope.use_imports_macros.get(&ImportOrGlob::Import(import)) {
|
||||
while let Some(&m) = scope.use_imports_macros.get(&ImportOrExternCrate::Import(import)) {
|
||||
match m {
|
||||
ImportOrDef::Import(i) => {
|
||||
let module_id = i.use_.lookup(db).container;
|
||||
@ -682,7 +682,6 @@ impl ItemScope {
|
||||
}
|
||||
_ => _ = glob_imports.macros.remove(&lookup),
|
||||
}
|
||||
let import = import.and_then(ImportOrExternCrate::import_or_glob);
|
||||
let prev = std::mem::replace(&mut fld.import, import);
|
||||
if let Some(import) = import {
|
||||
self.use_imports_macros.insert(
|
||||
@ -698,7 +697,6 @@ impl ItemScope {
|
||||
{
|
||||
if glob_imports.macros.remove(&lookup) {
|
||||
cov_mark::hit!(import_shadowed);
|
||||
let import = import.and_then(ImportOrExternCrate::import_or_glob);
|
||||
let prev = std::mem::replace(&mut fld.import, import);
|
||||
if let Some(import) = import {
|
||||
self.use_imports_macros.insert(
|
||||
@ -783,8 +781,9 @@ impl ItemScope {
|
||||
if let Some(Item { import, .. }) = def.macros {
|
||||
buf.push_str(" m");
|
||||
match import {
|
||||
Some(ImportOrGlob::Import(_)) => buf.push('i'),
|
||||
Some(ImportOrGlob::Glob(_)) => buf.push('g'),
|
||||
Some(ImportOrExternCrate::Import(_)) => buf.push('i'),
|
||||
Some(ImportOrExternCrate::Glob(_)) => buf.push('g'),
|
||||
Some(ImportOrExternCrate::ExternCrate(_)) => buf.push('e'),
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
@ -893,9 +892,7 @@ impl PerNs {
|
||||
ModuleDefId::TraitAliasId(_) => PerNs::types(def, v, import),
|
||||
ModuleDefId::TypeAliasId(_) => PerNs::types(def, v, import),
|
||||
ModuleDefId::BuiltinType(_) => PerNs::types(def, v, import),
|
||||
ModuleDefId::MacroId(mac) => {
|
||||
PerNs::macros(mac, v, import.and_then(ImportOrExternCrate::import_or_glob))
|
||||
}
|
||||
ModuleDefId::MacroId(mac) => PerNs::macros(mac, v, import),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -179,7 +179,7 @@ impl ItemTree {
|
||||
|
||||
/// Returns the inner attributes of the source file.
|
||||
pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: Crate) -> Attrs {
|
||||
Attrs::filter(
|
||||
Attrs::expand_cfg_attr(
|
||||
db,
|
||||
krate,
|
||||
self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone(),
|
||||
@ -191,7 +191,7 @@ impl ItemTree {
|
||||
}
|
||||
|
||||
pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: Crate, of: AttrOwner) -> Attrs {
|
||||
Attrs::filter(db, krate, self.raw_attrs(of).clone())
|
||||
Attrs::expand_cfg_attr(db, krate, self.raw_attrs(of).clone())
|
||||
}
|
||||
|
||||
/// Returns a count of a few, expensive items.
|
||||
|
@ -83,6 +83,91 @@ impl LangItemTarget {
|
||||
}
|
||||
}
|
||||
|
||||
/// Salsa query. This will look for lang items in a specific crate.
|
||||
#[salsa_macros::tracked(return_ref)]
|
||||
pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangItems>> {
|
||||
let _p = tracing::info_span!("crate_lang_items_query").entered();
|
||||
|
||||
let mut lang_items = LangItems::default();
|
||||
|
||||
let crate_def_map = db.crate_def_map(krate);
|
||||
|
||||
for (_, module_data) in crate_def_map.modules() {
|
||||
for impl_def in module_data.scope.impls() {
|
||||
lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDef);
|
||||
for &(_, assoc) in db.impl_items(impl_def).items.iter() {
|
||||
match assoc {
|
||||
AssocItemId::FunctionId(f) => {
|
||||
lang_items.collect_lang_item(db, f, LangItemTarget::Function)
|
||||
}
|
||||
AssocItemId::TypeAliasId(t) => {
|
||||
lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias)
|
||||
}
|
||||
AssocItemId::ConstId(_) => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for def in module_data.scope.declarations() {
|
||||
match def {
|
||||
ModuleDefId::TraitId(trait_) => {
|
||||
lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait);
|
||||
db.trait_items(trait_).items.iter().for_each(|&(_, assoc_id)| match assoc_id {
|
||||
AssocItemId::FunctionId(f) => {
|
||||
lang_items.collect_lang_item(db, f, LangItemTarget::Function);
|
||||
}
|
||||
AssocItemId::TypeAliasId(alias) => {
|
||||
lang_items.collect_lang_item(db, alias, LangItemTarget::TypeAlias)
|
||||
}
|
||||
AssocItemId::ConstId(_) => {}
|
||||
});
|
||||
}
|
||||
ModuleDefId::AdtId(AdtId::EnumId(e)) => {
|
||||
lang_items.collect_lang_item(db, e, LangItemTarget::EnumId);
|
||||
db.enum_variants(e).variants.iter().for_each(|&(id, _)| {
|
||||
lang_items.collect_lang_item(db, id, LangItemTarget::EnumVariant);
|
||||
});
|
||||
}
|
||||
ModuleDefId::AdtId(AdtId::StructId(s)) => {
|
||||
lang_items.collect_lang_item(db, s, LangItemTarget::Struct);
|
||||
}
|
||||
ModuleDefId::AdtId(AdtId::UnionId(u)) => {
|
||||
lang_items.collect_lang_item(db, u, LangItemTarget::Union);
|
||||
}
|
||||
ModuleDefId::FunctionId(f) => {
|
||||
lang_items.collect_lang_item(db, f, LangItemTarget::Function);
|
||||
}
|
||||
ModuleDefId::StaticId(s) => {
|
||||
lang_items.collect_lang_item(db, s, LangItemTarget::Static);
|
||||
}
|
||||
ModuleDefId::TypeAliasId(t) => {
|
||||
lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if lang_items.items.is_empty() { None } else { Some(Box::new(lang_items)) }
|
||||
}
|
||||
|
||||
/// Salsa query. Look for a lang item, starting from the specified crate and recursively
|
||||
/// traversing its dependencies.
|
||||
#[salsa_macros::tracked]
|
||||
pub fn lang_item(
|
||||
db: &dyn DefDatabase,
|
||||
start_crate: Crate,
|
||||
item: LangItem,
|
||||
) -> Option<LangItemTarget> {
|
||||
let _p = tracing::info_span!("lang_item_query").entered();
|
||||
if let Some(target) =
|
||||
crate_lang_items(db, start_crate).as_ref().and_then(|it| it.items.get(&item).copied())
|
||||
{
|
||||
return Some(target);
|
||||
}
|
||||
start_crate.data(db).dependencies.iter().find_map(|dep| lang_item(db, dep.crate_id, item))
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||
pub struct LangItems {
|
||||
items: FxHashMap<LangItem, LangItemTarget>,
|
||||
@ -93,96 +178,6 @@ impl LangItems {
|
||||
self.items.get(&item).copied()
|
||||
}
|
||||
|
||||
/// Salsa query. This will look for lang items in a specific crate.
|
||||
pub(crate) fn crate_lang_items_query(
|
||||
db: &dyn DefDatabase,
|
||||
krate: Crate,
|
||||
) -> Option<Arc<LangItems>> {
|
||||
let _p = tracing::info_span!("crate_lang_items_query").entered();
|
||||
|
||||
let mut lang_items = LangItems::default();
|
||||
|
||||
let crate_def_map = db.crate_def_map(krate);
|
||||
|
||||
for (_, module_data) in crate_def_map.modules() {
|
||||
for impl_def in module_data.scope.impls() {
|
||||
lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDef);
|
||||
for &(_, assoc) in db.impl_items(impl_def).items.iter() {
|
||||
match assoc {
|
||||
AssocItemId::FunctionId(f) => {
|
||||
lang_items.collect_lang_item(db, f, LangItemTarget::Function)
|
||||
}
|
||||
AssocItemId::TypeAliasId(t) => {
|
||||
lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias)
|
||||
}
|
||||
AssocItemId::ConstId(_) => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for def in module_data.scope.declarations() {
|
||||
match def {
|
||||
ModuleDefId::TraitId(trait_) => {
|
||||
lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait);
|
||||
db.trait_items(trait_).items.iter().for_each(
|
||||
|&(_, assoc_id)| match assoc_id {
|
||||
AssocItemId::FunctionId(f) => {
|
||||
lang_items.collect_lang_item(db, f, LangItemTarget::Function);
|
||||
}
|
||||
AssocItemId::TypeAliasId(alias) => lang_items.collect_lang_item(
|
||||
db,
|
||||
alias,
|
||||
LangItemTarget::TypeAlias,
|
||||
),
|
||||
AssocItemId::ConstId(_) => {}
|
||||
},
|
||||
);
|
||||
}
|
||||
ModuleDefId::AdtId(AdtId::EnumId(e)) => {
|
||||
lang_items.collect_lang_item(db, e, LangItemTarget::EnumId);
|
||||
db.enum_variants(e).variants.iter().for_each(|&(id, _)| {
|
||||
lang_items.collect_lang_item(db, id, LangItemTarget::EnumVariant);
|
||||
});
|
||||
}
|
||||
ModuleDefId::AdtId(AdtId::StructId(s)) => {
|
||||
lang_items.collect_lang_item(db, s, LangItemTarget::Struct);
|
||||
}
|
||||
ModuleDefId::AdtId(AdtId::UnionId(u)) => {
|
||||
lang_items.collect_lang_item(db, u, LangItemTarget::Union);
|
||||
}
|
||||
ModuleDefId::FunctionId(f) => {
|
||||
lang_items.collect_lang_item(db, f, LangItemTarget::Function);
|
||||
}
|
||||
ModuleDefId::StaticId(s) => {
|
||||
lang_items.collect_lang_item(db, s, LangItemTarget::Static);
|
||||
}
|
||||
ModuleDefId::TypeAliasId(t) => {
|
||||
lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if lang_items.items.is_empty() { None } else { Some(Arc::new(lang_items)) }
|
||||
}
|
||||
|
||||
/// Salsa query. Look for a lang item, starting from the specified crate and recursively
|
||||
/// traversing its dependencies.
|
||||
pub(crate) fn lang_item_query(
|
||||
db: &dyn DefDatabase,
|
||||
start_crate: Crate,
|
||||
item: LangItem,
|
||||
) -> Option<LangItemTarget> {
|
||||
let _p = tracing::info_span!("lang_item_query").entered();
|
||||
if let Some(target) =
|
||||
db.crate_lang_items(start_crate).and_then(|it| it.items.get(&item).copied())
|
||||
{
|
||||
return Some(target);
|
||||
}
|
||||
start_crate.data(db).dependencies.iter().find_map(|dep| db.lang_item(dep.crate_id, item))
|
||||
}
|
||||
|
||||
fn collect_lang_item<T>(
|
||||
&mut self,
|
||||
db: &dyn DefDatabase,
|
||||
@ -269,18 +264,38 @@ macro_rules! language_item_table {
|
||||
}
|
||||
|
||||
impl LangItem {
|
||||
pub fn resolve_function(self, db: &dyn DefDatabase, start_crate: Crate) -> Option<FunctionId> {
|
||||
lang_item(db, start_crate, self).and_then(|t| t.as_function())
|
||||
}
|
||||
|
||||
pub fn resolve_trait(self, db: &dyn DefDatabase, start_crate: Crate) -> Option<TraitId> {
|
||||
lang_item(db, start_crate, self).and_then(|t| t.as_trait())
|
||||
}
|
||||
|
||||
pub fn resolve_enum(self, db: &dyn DefDatabase, start_crate: Crate) -> Option<EnumId> {
|
||||
lang_item(db, start_crate, self).and_then(|t| t.as_enum())
|
||||
}
|
||||
|
||||
pub fn resolve_type_alias(
|
||||
self,
|
||||
db: &dyn DefDatabase,
|
||||
start_crate: Crate,
|
||||
) -> Option<TypeAliasId> {
|
||||
lang_item(db, start_crate, self).and_then(|t| t.as_type_alias())
|
||||
}
|
||||
|
||||
/// Opposite of [`LangItem::name`]
|
||||
pub fn from_name(name: &hir_expand::name::Name) -> Option<Self> {
|
||||
Self::from_symbol(name.symbol())
|
||||
}
|
||||
|
||||
pub fn path(&self, db: &dyn DefDatabase, start_crate: Crate) -> Option<Path> {
|
||||
let t = db.lang_item(start_crate, *self)?;
|
||||
let t = lang_item(db, start_crate, *self)?;
|
||||
Some(Path::LangItem(t, None))
|
||||
}
|
||||
|
||||
pub fn ty_rel_path(&self, db: &dyn DefDatabase, start_crate: Crate, seg: Name) -> Option<Path> {
|
||||
let t = db.lang_item(start_crate, *self)?;
|
||||
let t = lang_item(db, start_crate, *self)?;
|
||||
Some(Path::LangItem(t, Some(seg)))
|
||||
}
|
||||
}
|
||||
|
@ -64,8 +64,8 @@ use std::hash::{Hash, Hasher};
|
||||
|
||||
use base_db::{Crate, impl_intern_key};
|
||||
use hir_expand::{
|
||||
AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
|
||||
MacroDefId, MacroDefKind,
|
||||
AstId, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId,
|
||||
MacroDefKind,
|
||||
builtin::{BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerExpander},
|
||||
db::ExpandDatabase,
|
||||
eager::expand_eager_macro_input,
|
||||
@ -79,7 +79,7 @@ use la_arena::Idx;
|
||||
use nameres::DefMap;
|
||||
use span::{AstIdNode, Edition, FileAstId, SyntaxContext};
|
||||
use stdx::impl_from;
|
||||
use syntax::{AstNode, ast};
|
||||
use syntax::ast;
|
||||
|
||||
pub use hir_expand::{Intern, Lookup, tt};
|
||||
|
||||
@ -554,7 +554,7 @@ pub enum ItemContainerId {
|
||||
impl_from!(ModuleId for ItemContainerId);
|
||||
|
||||
/// A Data Type
|
||||
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
|
||||
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)]
|
||||
pub enum AdtId {
|
||||
StructId(StructId),
|
||||
UnionId(UnionId),
|
||||
@ -563,7 +563,7 @@ pub enum AdtId {
|
||||
impl_from!(StructId, UnionId, EnumId for AdtId);
|
||||
|
||||
/// A macro
|
||||
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
|
||||
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)]
|
||||
pub enum MacroId {
|
||||
Macro2Id(Macro2Id),
|
||||
MacroRulesId(MacroRulesId),
|
||||
@ -619,7 +619,7 @@ impl_from!(
|
||||
|
||||
/// A constant, which might appears as a const item, an anonymous const block in expressions
|
||||
/// or patterns, or as a constant in types with const generics.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)]
|
||||
pub enum GeneralConstId {
|
||||
ConstId(ConstId),
|
||||
StaticId(StaticId),
|
||||
@ -656,7 +656,7 @@ impl GeneralConstId {
|
||||
}
|
||||
|
||||
/// The defs which have a body (have root expressions for type inference).
|
||||
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
|
||||
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)]
|
||||
pub enum DefWithBodyId {
|
||||
FunctionId(FunctionId),
|
||||
StaticId(StaticId),
|
||||
@ -701,7 +701,7 @@ pub enum AssocItemId {
|
||||
// casting them, and somehow making the constructors private, which would be annoying.
|
||||
impl_from!(FunctionId, ConstId, TypeAliasId for AssocItemId);
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
|
||||
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)]
|
||||
pub enum GenericDefId {
|
||||
AdtId(AdtId),
|
||||
// consts can have type parameters from their parents (i.e. associated consts of traits)
|
||||
@ -790,7 +790,7 @@ impl From<AssocItemId> for GenericDefId {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
|
||||
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)]
|
||||
pub enum CallableDefId {
|
||||
FunctionId(FunctionId),
|
||||
StructId(StructId),
|
||||
@ -906,7 +906,7 @@ impl From<VariantId> for AttrDefId {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)]
|
||||
pub enum VariantId {
|
||||
EnumVariantId(EnumVariantId),
|
||||
StructId(StructId),
|
||||
@ -1166,66 +1166,6 @@ impl ModuleDefId {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: Replace this with a plain function, it only has one impl
|
||||
/// A helper trait for converting to MacroCallId
|
||||
trait AsMacroCall {
|
||||
fn as_call_id_with_errors(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
krate: Crate,
|
||||
resolver: impl Fn(&ModPath) -> Option<MacroDefId> + Copy,
|
||||
eager_callback: &mut dyn FnMut(
|
||||
InFile<(syntax::AstPtr<ast::MacroCall>, span::FileAstId<ast::MacroCall>)>,
|
||||
MacroCallId,
|
||||
),
|
||||
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro>;
|
||||
}
|
||||
|
||||
impl AsMacroCall for InFile<&ast::MacroCall> {
|
||||
fn as_call_id_with_errors(
|
||||
&self,
|
||||
db: &dyn ExpandDatabase,
|
||||
krate: Crate,
|
||||
resolver: impl Fn(&ModPath) -> Option<MacroDefId> + Copy,
|
||||
eager_callback: &mut dyn FnMut(
|
||||
InFile<(syntax::AstPtr<ast::MacroCall>, span::FileAstId<ast::MacroCall>)>,
|
||||
MacroCallId,
|
||||
),
|
||||
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
|
||||
let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
|
||||
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
|
||||
let span_map = db.span_map(self.file_id);
|
||||
let path = self.value.path().and_then(|path| {
|
||||
let range = path.syntax().text_range();
|
||||
let mod_path = ModPath::from_src(db, path, &mut |range| {
|
||||
span_map.as_ref().span_for_range(range).ctx
|
||||
})?;
|
||||
let call_site = span_map.span_for_range(range);
|
||||
Some((call_site, mod_path))
|
||||
});
|
||||
|
||||
let Some((call_site, path)) = path else {
|
||||
return Ok(ExpandResult::only_err(ExpandError::other(
|
||||
span_map.span_for_range(self.value.syntax().text_range()),
|
||||
"malformed macro invocation",
|
||||
)));
|
||||
};
|
||||
|
||||
macro_call_as_call_id_with_eager(
|
||||
db,
|
||||
ast_id,
|
||||
&path,
|
||||
call_site.ctx,
|
||||
expands_to,
|
||||
krate,
|
||||
resolver,
|
||||
resolver,
|
||||
eager_callback,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper wrapper for `AstId` with `ModPath`
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
struct AstIdWithPath<T: AstIdNode> {
|
||||
@ -1239,41 +1179,14 @@ impl<T: AstIdNode> AstIdWithPath<T> {
|
||||
}
|
||||
}
|
||||
|
||||
fn macro_call_as_call_id(
|
||||
db: &dyn ExpandDatabase,
|
||||
call: &AstIdWithPath<ast::MacroCall>,
|
||||
call_site: SyntaxContext,
|
||||
expand_to: ExpandTo,
|
||||
krate: Crate,
|
||||
resolver: impl Fn(&ModPath) -> Option<MacroDefId> + Copy,
|
||||
eager_callback: &mut dyn FnMut(
|
||||
InFile<(syntax::AstPtr<ast::MacroCall>, span::FileAstId<ast::MacroCall>)>,
|
||||
MacroCallId,
|
||||
),
|
||||
) -> Result<Option<MacroCallId>, UnresolvedMacro> {
|
||||
macro_call_as_call_id_with_eager(
|
||||
db,
|
||||
call.ast_id,
|
||||
&call.path,
|
||||
call_site,
|
||||
expand_to,
|
||||
krate,
|
||||
resolver,
|
||||
resolver,
|
||||
eager_callback,
|
||||
)
|
||||
.map(|res| res.value)
|
||||
}
|
||||
|
||||
fn macro_call_as_call_id_with_eager(
|
||||
pub fn macro_call_as_call_id(
|
||||
db: &dyn ExpandDatabase,
|
||||
ast_id: AstId<ast::MacroCall>,
|
||||
path: &ModPath,
|
||||
call_site: SyntaxContext,
|
||||
expand_to: ExpandTo,
|
||||
krate: Crate,
|
||||
resolver: impl FnOnce(&ModPath) -> Option<MacroDefId>,
|
||||
eager_resolver: impl Fn(&ModPath) -> Option<MacroDefId>,
|
||||
resolver: impl Fn(&ModPath) -> Option<MacroDefId> + Copy,
|
||||
eager_callback: &mut dyn FnMut(
|
||||
InFile<(syntax::AstPtr<ast::MacroCall>, span::FileAstId<ast::MacroCall>)>,
|
||||
MacroCallId,
|
||||
@ -1289,7 +1202,7 @@ fn macro_call_as_call_id_with_eager(
|
||||
ast_id,
|
||||
def,
|
||||
call_site,
|
||||
&|path| eager_resolver(path).filter(MacroDefId::is_fn_like),
|
||||
&|path| resolver(path).filter(MacroDefId::is_fn_like),
|
||||
eager_callback,
|
||||
),
|
||||
_ if def.is_fn_like() => ExpandResult {
|
||||
|
@ -285,8 +285,6 @@ fn main() {
|
||||
/* parse error: expected expression */
|
||||
builtin #format_args (x = );
|
||||
/* parse error: expected expression */
|
||||
/* parse error: expected R_PAREN */
|
||||
/* parse error: expected expression, item or let statement */
|
||||
builtin #format_args (x = , x = 2);
|
||||
/* parse error: expected expression */
|
||||
builtin #format_args ("{}", x = );
|
||||
|
@ -2001,8 +2001,9 @@ macro_rules! bug {
|
||||
true
|
||||
};
|
||||
}
|
||||
|
||||
let _ = bug!(a;;;test);
|
||||
fn f() {
|
||||
let _ = bug!(a;;;test);
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
macro_rules! bug {
|
||||
@ -2022,8 +2023,9 @@ macro_rules! bug {
|
||||
true
|
||||
};
|
||||
}
|
||||
|
||||
let _ = true;
|
||||
fn f() {
|
||||
let _ = true;
|
||||
}
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
@ -19,7 +19,7 @@ use std::{iter, ops::Range, sync};
|
||||
use base_db::RootQueryDb;
|
||||
use expect_test::Expect;
|
||||
use hir_expand::{
|
||||
InFile, MacroCallKind, MacroKind,
|
||||
AstId, InFile, MacroCallId, MacroCallKind, MacroKind,
|
||||
db::ExpandDatabase,
|
||||
proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind},
|
||||
span_map::SpanMapRef,
|
||||
@ -29,7 +29,7 @@ use itertools::Itertools;
|
||||
use span::{Edition, Span};
|
||||
use stdx::{format_to, format_to_acc};
|
||||
use syntax::{
|
||||
AstNode,
|
||||
AstNode, AstPtr,
|
||||
SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT},
|
||||
SyntaxNode, T,
|
||||
ast::{self, edit::IndentLevel},
|
||||
@ -37,10 +37,9 @@ use syntax::{
|
||||
use test_fixture::WithFixture;
|
||||
|
||||
use crate::{
|
||||
AdtId, AsMacroCall, Lookup, ModuleDefId,
|
||||
AdtId, Lookup, ModuleDefId,
|
||||
db::DefDatabase,
|
||||
nameres::{DefMap, MacroSubNs, ModuleSource},
|
||||
resolver::HasResolver,
|
||||
nameres::{DefMap, ModuleSource},
|
||||
src::HasSource,
|
||||
test_db::TestDB,
|
||||
tt::TopSubtree,
|
||||
@ -78,7 +77,6 @@ fn check_errors(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect)
|
||||
expect.assert_eq(&errors);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, mut expect: Expect) {
|
||||
let extra_proc_macros = vec![(
|
||||
r#"
|
||||
@ -95,54 +93,59 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
|
||||
disabled: false,
|
||||
},
|
||||
)];
|
||||
|
||||
fn resolve(
|
||||
db: &dyn DefDatabase,
|
||||
def_map: &DefMap,
|
||||
ast_id: AstId<ast::MacroCall>,
|
||||
ast_ptr: InFile<AstPtr<ast::MacroCall>>,
|
||||
) -> Option<MacroCallId> {
|
||||
def_map.modules().find_map(|module| {
|
||||
for decl in
|
||||
module.1.scope.declarations().chain(module.1.scope.unnamed_consts().map(Into::into))
|
||||
{
|
||||
let body = match decl {
|
||||
ModuleDefId::FunctionId(it) => it.into(),
|
||||
ModuleDefId::ConstId(it) => it.into(),
|
||||
ModuleDefId::StaticId(it) => it.into(),
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
let (body, sm) = db.body_with_source_map(body);
|
||||
if let Some(it) =
|
||||
body.blocks(db).find_map(|block| resolve(db, &block.1, ast_id, ast_ptr))
|
||||
{
|
||||
return Some(it);
|
||||
}
|
||||
if let Some((_, res)) = sm.macro_calls().find(|it| it.0 == ast_ptr) {
|
||||
return Some(res);
|
||||
}
|
||||
}
|
||||
module.1.scope.macro_invoc(ast_id)
|
||||
})
|
||||
}
|
||||
|
||||
let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros);
|
||||
let krate = db.fetch_test_crate();
|
||||
let def_map = db.crate_def_map(krate);
|
||||
let local_id = DefMap::ROOT;
|
||||
let module = def_map.module_id(local_id);
|
||||
let resolver = module.resolver(&db);
|
||||
let source = def_map[local_id].definition_source(&db);
|
||||
let source_file = match source.value {
|
||||
ModuleSource::SourceFile(it) => it,
|
||||
ModuleSource::Module(_) | ModuleSource::BlockExpr(_) => panic!(),
|
||||
};
|
||||
|
||||
// What we want to do is to replace all macros (fn-like, derive, attr) with
|
||||
// their expansions. Turns out, we don't actually store enough information
|
||||
// to do this precisely though! Specifically, if a macro expands to nothing,
|
||||
// it leaves zero traces in def-map, so we can't get its expansion after the
|
||||
// fact.
|
||||
//
|
||||
// This is the usual
|
||||
// <https://github.com/rust-lang/rust-analyzer/issues/3407>
|
||||
// resolve/record tension!
|
||||
//
|
||||
// So here we try to do a resolve, which is necessary a heuristic. For macro
|
||||
// calls, we use `as_call_id_with_errors`. For derives, we look at the impls
|
||||
// in the module and assume that, if impls's source is a different
|
||||
// `HirFileId`, than it came from macro expansion.
|
||||
|
||||
let mut text_edits = Vec::new();
|
||||
let mut expansions = Vec::new();
|
||||
|
||||
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
|
||||
let macro_call = InFile::new(source.file_id, ¯o_call);
|
||||
let res = macro_call
|
||||
.as_call_id_with_errors(
|
||||
&db,
|
||||
krate,
|
||||
|path| {
|
||||
resolver
|
||||
.resolve_path_as_macro(&db, path, Some(MacroSubNs::Bang))
|
||||
.map(|(it, _)| db.macro_def(it))
|
||||
},
|
||||
&mut |_, _| (),
|
||||
)
|
||||
.unwrap();
|
||||
let macro_call_id = res.value.unwrap();
|
||||
let mut expansion_result = db.parse_macro_expansion(macro_call_id);
|
||||
expansion_result.err = expansion_result.err.or(res.err);
|
||||
expansions.push((macro_call.value.clone(), expansion_result));
|
||||
for macro_call_node in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
|
||||
let ast_id = db.ast_id_map(source.file_id).ast_id(¯o_call_node);
|
||||
let ast_id = InFile::new(source.file_id, ast_id);
|
||||
let ptr = InFile::new(source.file_id, AstPtr::new(¯o_call_node));
|
||||
let macro_call_id = resolve(&db, &def_map, ast_id, ptr)
|
||||
.unwrap_or_else(|| panic!("unable to find semantic macro call {macro_call_node}"));
|
||||
let expansion_result = db.parse_macro_expansion(macro_call_id);
|
||||
expansions.push((macro_call_node.clone(), expansion_result));
|
||||
}
|
||||
|
||||
for (call, exp) in expansions.into_iter().rev() {
|
||||
|
@ -259,7 +259,8 @@ impl<'a> AssocItemCollector<'a> {
|
||||
};
|
||||
match macro_call_as_call_id(
|
||||
self.db,
|
||||
&AstIdWithPath::new(tree_id.file_id(), ast_id, Clone::clone(path)),
|
||||
InFile::new(tree_id.file_id(), ast_id),
|
||||
path,
|
||||
ctxt,
|
||||
expand_to,
|
||||
self.module_id.krate(),
|
||||
@ -268,12 +269,15 @@ impl<'a> AssocItemCollector<'a> {
|
||||
self.macro_calls.push((ptr.map(|(_, it)| it.upcast()), call_id))
|
||||
},
|
||||
) {
|
||||
Ok(Some(call_id)) => {
|
||||
self.macro_calls
|
||||
.push((InFile::new(tree_id.file_id(), ast_id.upcast()), call_id));
|
||||
self.collect_macro_items(call_id);
|
||||
}
|
||||
Ok(None) => (),
|
||||
// FIXME: Expansion error?
|
||||
Ok(call_id) => match call_id.value {
|
||||
Some(call_id) => {
|
||||
self.macro_calls
|
||||
.push((InFile::new(tree_id.file_id(), ast_id.upcast()), call_id));
|
||||
self.collect_macro_items(call_id);
|
||||
}
|
||||
None => (),
|
||||
},
|
||||
Err(_) => {
|
||||
self.diagnostics.push(DefDiagnostic::unresolved_macro_call(
|
||||
self.module_id.local_id,
|
||||
|
@ -39,7 +39,7 @@ use crate::{
|
||||
ItemTreeId, ItemTreeNode, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, TreeId,
|
||||
UseTreeKind,
|
||||
},
|
||||
macro_call_as_call_id, macro_call_as_call_id_with_eager,
|
||||
macro_call_as_call_id,
|
||||
nameres::{
|
||||
BuiltinShadowMode, DefMap, LocalDefMap, MacroSubNs, ModuleData, ModuleOrigin, ResolveMode,
|
||||
attr_resolution::{attr_macro_as_call_id, derive_macro_as_call_id},
|
||||
@ -1256,7 +1256,8 @@ impl DefCollector<'_> {
|
||||
MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
|
||||
let call_id = macro_call_as_call_id(
|
||||
self.db,
|
||||
ast_id,
|
||||
ast_id.ast_id,
|
||||
&ast_id.path,
|
||||
*call_site,
|
||||
*expand_to,
|
||||
self.def_map.krate,
|
||||
@ -1265,15 +1266,18 @@ impl DefCollector<'_> {
|
||||
eager_callback_buffer.push((directive.module_id, ptr, call_id));
|
||||
},
|
||||
);
|
||||
if let Ok(Some(call_id)) = call_id {
|
||||
self.def_map.modules[directive.module_id]
|
||||
.scope
|
||||
.add_macro_invoc(ast_id.ast_id, call_id);
|
||||
if let Ok(call_id) = call_id {
|
||||
// FIXME: Expansion error
|
||||
if let Some(call_id) = call_id.value {
|
||||
self.def_map.modules[directive.module_id]
|
||||
.scope
|
||||
.add_macro_invoc(ast_id.ast_id, call_id);
|
||||
|
||||
push_resolved(directive, call_id);
|
||||
push_resolved(directive, call_id);
|
||||
|
||||
res = ReachedFixedPoint::No;
|
||||
return Resolved::Yes;
|
||||
res = ReachedFixedPoint::No;
|
||||
return Resolved::Yes;
|
||||
}
|
||||
}
|
||||
}
|
||||
MacroDirectiveKind::Derive {
|
||||
@ -1542,7 +1546,8 @@ impl DefCollector<'_> {
|
||||
// FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error!
|
||||
let macro_call_as_call_id = macro_call_as_call_id(
|
||||
self.db,
|
||||
ast_id,
|
||||
ast_id.ast_id,
|
||||
&ast_id.path,
|
||||
*call_site,
|
||||
*expand_to,
|
||||
self.def_map.krate,
|
||||
@ -2420,7 +2425,7 @@ impl ModCollector<'_, '_> {
|
||||
|
||||
let mut eager_callback_buffer = vec![];
|
||||
// Case 1: try to resolve macro calls with single-segment name and expand macro_rules
|
||||
if let Ok(res) = macro_call_as_call_id_with_eager(
|
||||
if let Ok(res) = macro_call_as_call_id(
|
||||
db,
|
||||
ast_id.ast_id,
|
||||
&ast_id.path,
|
||||
@ -2445,21 +2450,6 @@ impl ModCollector<'_, '_> {
|
||||
.map(|it| self.def_collector.db.macro_def(it))
|
||||
})
|
||||
},
|
||||
|path| {
|
||||
let resolved_res = self.def_collector.def_map.resolve_path_fp_with_macro(
|
||||
self.def_collector
|
||||
.crate_local_def_map
|
||||
.as_deref()
|
||||
.unwrap_or(&self.def_collector.local_def_map),
|
||||
db,
|
||||
ResolveMode::Other,
|
||||
self.module_id,
|
||||
path,
|
||||
BuiltinShadowMode::Module,
|
||||
Some(MacroSubNs::Bang),
|
||||
);
|
||||
resolved_res.resolved_def.take_macros().map(|it| db.macro_def(it))
|
||||
},
|
||||
&mut |ptr, call_id| eager_callback_buffer.push((ptr, call_id)),
|
||||
) {
|
||||
for (ptr, call_id) in eager_callback_buffer {
|
||||
|
@ -86,7 +86,7 @@ impl ModDir {
|
||||
let dir_path = if root_dir_owner {
|
||||
DirPath::empty()
|
||||
} else {
|
||||
DirPath::new(format!("{}/", name))
|
||||
DirPath::new(format!("{name}/"))
|
||||
};
|
||||
if let Some(mod_dir) = self.child(dir_path, !root_dir_owner) {
|
||||
return Ok((
|
||||
|
@ -673,12 +673,11 @@ impl DefMap {
|
||||
}
|
||||
|
||||
fn resolve_in_macro_use_prelude(&self, name: &Name) -> PerNs {
|
||||
self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, _extern_crate)| {
|
||||
self.macro_use_prelude.get(name).map_or(PerNs::none(), |&(it, extern_crate)| {
|
||||
PerNs::macros(
|
||||
it,
|
||||
Visibility::Public,
|
||||
// FIXME?
|
||||
None, // extern_crate.map(ImportOrExternCrate::ExternCrate),
|
||||
extern_crate.map(ImportOrExternCrate::ExternCrate),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
@ -37,7 +37,8 @@ pub struct Item<Def, Import = ImportId> {
|
||||
|
||||
pub type TypesItem = Item<ModuleDefId, ImportOrExternCrate>;
|
||||
pub type ValuesItem = Item<ModuleDefId, ImportOrGlob>;
|
||||
pub type MacrosItem = Item<MacroId, ImportOrGlob>;
|
||||
// May be Externcrate for `[macro_use]`'d macros
|
||||
pub type MacrosItem = Item<MacroId, ImportOrExternCrate>;
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)]
|
||||
pub struct PerNs {
|
||||
@ -84,7 +85,7 @@ impl PerNs {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn macros(def: MacroId, vis: Visibility, import: Option<ImportOrGlob>) -> PerNs {
|
||||
pub fn macros(def: MacroId, vis: Visibility, import: Option<ImportOrExternCrate>) -> PerNs {
|
||||
PerNs { types: None, values: None, macros: Some(Item { def, vis, import }) }
|
||||
}
|
||||
|
||||
@ -116,7 +117,7 @@ impl PerNs {
|
||||
self.macros.map(|it| it.def)
|
||||
}
|
||||
|
||||
pub fn take_macros_import(self) -> Option<(MacroId, Option<ImportOrGlob>)> {
|
||||
pub fn take_macros_import(self) -> Option<(MacroId, Option<ImportOrExternCrate>)> {
|
||||
self.macros.map(|it| (it.def, it.import))
|
||||
}
|
||||
|
||||
@ -158,9 +159,6 @@ impl PerNs {
|
||||
self.values
|
||||
.map(|it| (ItemInNs::Values(it.def), it.import.map(ImportOrExternCrate::from))),
|
||||
)
|
||||
.chain(
|
||||
self.macros
|
||||
.map(|it| (ItemInNs::Macros(it.def), it.import.map(ImportOrExternCrate::from))),
|
||||
)
|
||||
.chain(self.macros.map(|it| (ItemInNs::Macros(it.def), it.import)))
|
||||
}
|
||||
}
|
||||
|
@ -493,7 +493,7 @@ impl Resolver {
|
||||
db: &dyn DefDatabase,
|
||||
path: &ModPath,
|
||||
expected_macro_kind: Option<MacroSubNs>,
|
||||
) -> Option<(MacroId, Option<ImportOrGlob>)> {
|
||||
) -> Option<(MacroId, Option<ImportOrExternCrate>)> {
|
||||
let (item_map, item_local_map, module) = self.item_scope_();
|
||||
item_map
|
||||
.resolve_path(
|
||||
|
@ -19,7 +19,7 @@ use crate::{
|
||||
src::HasSource,
|
||||
};
|
||||
|
||||
#[salsa::db]
|
||||
#[salsa_macros::db]
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct TestDB {
|
||||
storage: salsa::Storage<Self>,
|
||||
@ -44,7 +44,7 @@ impl Default for TestDB {
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
#[salsa_macros::db]
|
||||
impl salsa::Database for TestDB {
|
||||
fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
|
||||
let mut events = self.events.lock().unwrap();
|
||||
@ -63,7 +63,7 @@ impl fmt::Debug for TestDB {
|
||||
|
||||
impl panic::RefUnwindSafe for TestDB {}
|
||||
|
||||
#[salsa::db]
|
||||
#[salsa_macros::db]
|
||||
impl SourceDatabase for TestDB {
|
||||
fn file_text(&self, file_id: base_db::FileId) -> FileText {
|
||||
self.files.file_text(file_id)
|
||||
|
@ -21,6 +21,7 @@ smallvec.workspace = true
|
||||
triomphe.workspace = true
|
||||
query-group.workspace = true
|
||||
salsa.workspace = true
|
||||
salsa-macros.workspace = true
|
||||
|
||||
# local deps
|
||||
stdx.workspace = true
|
||||
|
@ -2,7 +2,7 @@
|
||||
use std::{borrow::Cow, fmt, ops};
|
||||
|
||||
use base_db::Crate;
|
||||
use cfg::CfgExpr;
|
||||
use cfg::{CfgExpr, CfgOptions};
|
||||
use either::Either;
|
||||
use intern::{Interned, Symbol, sym};
|
||||
|
||||
@ -14,11 +14,10 @@ use syntax::{AstNode, AstToken, SyntaxNode, ast, match_ast};
|
||||
use syntax_bridge::{DocCommentDesugarMode, desugar_doc_comment_text, syntax_node_to_token_tree};
|
||||
use triomphe::ThinArc;
|
||||
|
||||
use crate::name::Name;
|
||||
use crate::{
|
||||
InFile,
|
||||
db::ExpandDatabase,
|
||||
mod_path::ModPath,
|
||||
name::Name,
|
||||
span_map::SpanMapRef,
|
||||
tt::{self, TopSubtree, token_to_literal},
|
||||
};
|
||||
@ -49,29 +48,7 @@ impl RawAttrs {
|
||||
owner: &dyn ast::HasAttrs,
|
||||
span_map: SpanMapRef<'_>,
|
||||
) -> Self {
|
||||
let entries: Vec<_> = collect_attrs(owner)
|
||||
.filter_map(|(id, attr)| match attr {
|
||||
Either::Left(attr) => {
|
||||
attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
|
||||
}
|
||||
Either::Right(comment) => comment.doc_comment().map(|doc| {
|
||||
let span = span_map.span_for_range(comment.syntax().text_range());
|
||||
let (text, kind) =
|
||||
desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro);
|
||||
Attr {
|
||||
id,
|
||||
input: Some(Box::new(AttrInput::Literal(tt::Literal {
|
||||
symbol: text,
|
||||
span,
|
||||
kind,
|
||||
suffix: None,
|
||||
}))),
|
||||
path: Interned::new(ModPath::from(Name::new_symbol(sym::doc, span.ctx))),
|
||||
ctxt: span.ctx,
|
||||
}
|
||||
}),
|
||||
})
|
||||
.collect();
|
||||
let entries: Vec<_> = Self::attrs_iter::<true>(db, owner, span_map).collect();
|
||||
|
||||
let entries = if entries.is_empty() {
|
||||
None
|
||||
@ -82,12 +59,61 @@ impl RawAttrs {
|
||||
RawAttrs { entries }
|
||||
}
|
||||
|
||||
pub fn from_attrs_owner(
|
||||
/// A [`RawAttrs`] that has its `#[cfg_attr(...)]` attributes expanded.
|
||||
pub fn new_expanded(
|
||||
db: &dyn ExpandDatabase,
|
||||
owner: InFile<&dyn ast::HasAttrs>,
|
||||
owner: &dyn ast::HasAttrs,
|
||||
span_map: SpanMapRef<'_>,
|
||||
cfg_options: &CfgOptions,
|
||||
) -> Self {
|
||||
Self::new(db, owner.value, span_map)
|
||||
let entries: Vec<_> =
|
||||
Self::attrs_iter_expanded::<true>(db, owner, span_map, cfg_options).collect();
|
||||
|
||||
let entries = if entries.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(ThinArc::from_header_and_iter((), entries.into_iter()))
|
||||
};
|
||||
|
||||
RawAttrs { entries }
|
||||
}
|
||||
|
||||
pub fn attrs_iter<const DESUGAR_COMMENTS: bool>(
|
||||
db: &dyn ExpandDatabase,
|
||||
owner: &dyn ast::HasAttrs,
|
||||
span_map: SpanMapRef<'_>,
|
||||
) -> impl Iterator<Item = Attr> {
|
||||
collect_attrs(owner).filter_map(move |(id, attr)| match attr {
|
||||
Either::Left(attr) => {
|
||||
attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
|
||||
}
|
||||
Either::Right(comment) if DESUGAR_COMMENTS => comment.doc_comment().map(|doc| {
|
||||
let span = span_map.span_for_range(comment.syntax().text_range());
|
||||
let (text, kind) = desugar_doc_comment_text(doc, DocCommentDesugarMode::ProcMacro);
|
||||
Attr {
|
||||
id,
|
||||
input: Some(Box::new(AttrInput::Literal(tt::Literal {
|
||||
symbol: text,
|
||||
span,
|
||||
kind,
|
||||
suffix: None,
|
||||
}))),
|
||||
path: Interned::new(ModPath::from(Name::new_symbol(sym::doc, span.ctx))),
|
||||
ctxt: span.ctx,
|
||||
}
|
||||
}),
|
||||
Either::Right(_) => None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn attrs_iter_expanded<const DESUGAR_COMMENTS: bool>(
|
||||
db: &dyn ExpandDatabase,
|
||||
owner: &dyn ast::HasAttrs,
|
||||
span_map: SpanMapRef<'_>,
|
||||
cfg_options: &CfgOptions,
|
||||
) -> impl Iterator<Item = Attr> {
|
||||
Self::attrs_iter::<DESUGAR_COMMENTS>(db, owner, span_map)
|
||||
.flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
|
||||
}
|
||||
|
||||
pub fn merge(&self, other: Self) -> Self {
|
||||
@ -114,9 +140,8 @@ impl RawAttrs {
|
||||
}
|
||||
}
|
||||
|
||||
/// Processes `cfg_attr`s, returning the resulting semantic `Attrs`.
|
||||
// FIXME: This should return a different type, signaling it was filtered?
|
||||
pub fn filter(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs {
|
||||
/// Processes `cfg_attr`s
|
||||
pub fn expand_cfg_attr(self, db: &dyn ExpandDatabase, krate: Crate) -> RawAttrs {
|
||||
let has_cfg_attrs =
|
||||
self.iter().any(|attr| attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr));
|
||||
if !has_cfg_attrs {
|
||||
@ -126,37 +151,8 @@ impl RawAttrs {
|
||||
let cfg_options = krate.cfg_options(db);
|
||||
let new_attrs = self
|
||||
.iter()
|
||||
.flat_map(|attr| -> SmallVec<[_; 1]> {
|
||||
let is_cfg_attr = attr.path.as_ident().is_some_and(|name| *name == sym::cfg_attr);
|
||||
if !is_cfg_attr {
|
||||
return smallvec![attr.clone()];
|
||||
}
|
||||
|
||||
let subtree = match attr.token_tree_value() {
|
||||
Some(it) => it,
|
||||
_ => return smallvec![attr.clone()],
|
||||
};
|
||||
|
||||
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
|
||||
Some(it) => it,
|
||||
None => return smallvec![attr.clone()],
|
||||
};
|
||||
let index = attr.id;
|
||||
let attrs = parts
|
||||
.enumerate()
|
||||
.take(1 << AttrId::CFG_ATTR_BITS)
|
||||
.filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)));
|
||||
|
||||
let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
|
||||
let cfg = CfgExpr::parse(&cfg);
|
||||
if cfg_options.check(&cfg) == Some(false) {
|
||||
smallvec![]
|
||||
} else {
|
||||
cov_mark::hit!(cfg_attr_active);
|
||||
|
||||
attrs.collect()
|
||||
}
|
||||
})
|
||||
.cloned()
|
||||
.flat_map(|attr| attr.expand_cfg_attr(db, cfg_options))
|
||||
.collect::<Vec<_>>();
|
||||
let entries = if new_attrs.is_empty() {
|
||||
None
|
||||
@ -316,6 +312,42 @@ impl Attr {
|
||||
pub fn path(&self) -> &ModPath {
|
||||
&self.path
|
||||
}
|
||||
|
||||
pub fn expand_cfg_attr(
|
||||
self,
|
||||
db: &dyn ExpandDatabase,
|
||||
cfg_options: &CfgOptions,
|
||||
) -> impl IntoIterator<Item = Self> {
|
||||
let is_cfg_attr = self.path.as_ident().is_some_and(|name| *name == sym::cfg_attr);
|
||||
if !is_cfg_attr {
|
||||
return smallvec![self];
|
||||
}
|
||||
|
||||
let subtree = match self.token_tree_value() {
|
||||
Some(it) => it,
|
||||
_ => return smallvec![self.clone()],
|
||||
};
|
||||
|
||||
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
|
||||
Some(it) => it,
|
||||
None => return smallvec![self.clone()],
|
||||
};
|
||||
let index = self.id;
|
||||
let attrs = parts
|
||||
.enumerate()
|
||||
.take(1 << AttrId::CFG_ATTR_BITS)
|
||||
.filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)));
|
||||
|
||||
let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
|
||||
let cfg = CfgExpr::parse(&cfg);
|
||||
if cfg_options.check(&cfg) == Some(false) {
|
||||
smallvec![]
|
||||
} else {
|
||||
cov_mark::hit!(cfg_attr_active);
|
||||
|
||||
attrs.collect::<SmallVec<[_; 1]>>()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Attr {
|
||||
|
@ -1,7 +1,6 @@
|
||||
//! Defines a unit of change that can applied to the database to get the next
|
||||
//! state. Changes are transactional.
|
||||
use base_db::{CrateGraphBuilder, FileChange, SourceRoot};
|
||||
use salsa::Durability;
|
||||
use base_db::{CrateGraphBuilder, FileChange, SourceRoot, salsa::Durability};
|
||||
use span::FileId;
|
||||
use triomphe::Arc;
|
||||
|
||||
|
@ -18,10 +18,7 @@ use crate::{
|
||||
cfg_process,
|
||||
declarative::DeclarativeMacroExpander,
|
||||
fixup::{self, SyntaxFixupUndoInfo},
|
||||
hygiene::{
|
||||
SyntaxContextExt as _, span_with_call_site_ctxt, span_with_def_site_ctxt,
|
||||
span_with_mixed_site_ctxt,
|
||||
},
|
||||
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
|
||||
proc_macro::{CrateProcMacros, CustomProcMacroExpander, ProcMacros},
|
||||
span_map::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef},
|
||||
tt,
|
||||
@ -147,7 +144,7 @@ pub trait ExpandDatabase: RootQueryDb {
|
||||
fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContext;
|
||||
}
|
||||
|
||||
#[salsa::interned(no_lifetime, id = span::SyntaxContext)]
|
||||
#[salsa_macros::interned(no_lifetime, id = span::SyntaxContext)]
|
||||
pub struct SyntaxContextWrapper {
|
||||
pub data: SyntaxContext,
|
||||
}
|
||||
@ -752,8 +749,7 @@ fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
|
||||
err: Some(ExpandError::other(
|
||||
tt.delimiter.open,
|
||||
format!(
|
||||
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
||||
count, TOKEN_LIMIT,
|
||||
"macro invocation exceeds token limit: produced {count} tokens, limit is {TOKEN_LIMIT}",
|
||||
),
|
||||
)),
|
||||
})
|
||||
|
@ -82,7 +82,7 @@ impl DeclarativeMacroExpander {
|
||||
|
||||
let transparency = |node| {
|
||||
// ... would be nice to have the item tree here
|
||||
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
|
||||
let attrs = RawAttrs::new_expanded(db, node, map.as_ref(), def_crate.cfg_options(db));
|
||||
match attrs
|
||||
.iter()
|
||||
.find(|it| {
|
||||
|
@ -22,7 +22,7 @@
|
||||
// FIXME: Move this into the span crate? Not quite possible today as that depends on `MacroCallLoc`
|
||||
// which contains a bunch of unrelated things
|
||||
|
||||
use std::{convert::identity, iter};
|
||||
use std::convert::identity;
|
||||
|
||||
use span::{Edition, MacroCallId, Span, SyntaxContext};
|
||||
|
||||
@ -141,61 +141,3 @@ fn apply_mark_internal(
|
||||
|_| opaque_and_semitransparent,
|
||||
)
|
||||
}
|
||||
|
||||
pub trait SyntaxContextExt {
|
||||
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> span::SyntaxContext;
|
||||
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> span::SyntaxContext;
|
||||
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> span::SyntaxContext;
|
||||
fn remove_mark(&mut self, db: &dyn ExpandDatabase)
|
||||
-> (Option<span::MacroCallId>, Transparency);
|
||||
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<span::MacroCallId>, Transparency);
|
||||
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(span::MacroCallId, Transparency)>;
|
||||
fn is_opaque(self, db: &dyn ExpandDatabase) -> bool;
|
||||
}
|
||||
|
||||
impl SyntaxContextExt for SyntaxContext {
|
||||
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> span::SyntaxContext {
|
||||
self.opaque_and_semitransparent(db)
|
||||
}
|
||||
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> span::SyntaxContext {
|
||||
self.opaque(db)
|
||||
}
|
||||
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> span::SyntaxContext {
|
||||
self.parent(db)
|
||||
}
|
||||
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<span::MacroCallId>, Transparency) {
|
||||
let data = self;
|
||||
(data.outer_expn(db), data.outer_transparency(db))
|
||||
}
|
||||
fn remove_mark(
|
||||
&mut self,
|
||||
db: &dyn ExpandDatabase,
|
||||
) -> (Option<span::MacroCallId>, Transparency) {
|
||||
let data = *self;
|
||||
*self = data.parent(db);
|
||||
(data.outer_expn(db), data.outer_transparency(db))
|
||||
}
|
||||
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(span::MacroCallId, Transparency)> {
|
||||
let mut marks = marks_rev(self, db).collect::<Vec<_>>();
|
||||
marks.reverse();
|
||||
marks
|
||||
}
|
||||
fn is_opaque(self, db: &dyn ExpandDatabase) -> bool {
|
||||
!self.is_root() && self.outer_transparency(db).is_opaque()
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: Make this a SyntaxContextExt method once we have RPIT
|
||||
pub fn marks_rev(
|
||||
ctxt: SyntaxContext,
|
||||
db: &dyn ExpandDatabase,
|
||||
) -> impl Iterator<Item = (span::MacroCallId, Transparency)> + '_ {
|
||||
iter::successors(Some(ctxt), move |&mark| Some(mark.parent_ctxt(db)))
|
||||
.take_while(|&it| !it.is_root())
|
||||
.map(|ctx| {
|
||||
let mark = ctx.outer_mark(db);
|
||||
// We stop before taking the root expansion, as such we cannot encounter a `None` outer
|
||||
// expansion, as only the ROOT has it.
|
||||
(mark.0.unwrap(), mark.1)
|
||||
})
|
||||
}
|
||||
|
@ -206,8 +206,7 @@ impl ExpandErrorKind {
|
||||
},
|
||||
None => RenderedExpandError {
|
||||
message: format!(
|
||||
"internal error: proc-macro map is missing error entry for crate {:?}",
|
||||
def_crate
|
||||
"internal error: proc-macro map is missing error entry for crate {def_crate:?}"
|
||||
),
|
||||
error: true,
|
||||
kind: RenderedExpandError::GENERAL_KIND,
|
||||
@ -1051,7 +1050,7 @@ impl ExpandTo {
|
||||
|
||||
intern::impl_internable!(ModPath, attrs::AttrInput);
|
||||
|
||||
#[salsa::interned(no_lifetime, debug)]
|
||||
#[salsa_macros::interned(no_lifetime, debug)]
|
||||
#[doc(alias = "MacroFileId")]
|
||||
pub struct MacroCallId {
|
||||
pub loc: MacroCallLoc,
|
||||
@ -1071,7 +1070,7 @@ impl From<MacroCallId> for span::MacroCallId {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)]
|
||||
pub enum HirFileId {
|
||||
FileId(EditionedFileId),
|
||||
MacroFile(MacroCallId),
|
||||
|
@ -7,7 +7,7 @@ use std::{
|
||||
|
||||
use crate::{
|
||||
db::ExpandDatabase,
|
||||
hygiene::{SyntaxContextExt, Transparency, marks_rev},
|
||||
hygiene::Transparency,
|
||||
name::{AsName, Name},
|
||||
tt,
|
||||
};
|
||||
@ -340,7 +340,7 @@ pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContext) -> O
|
||||
// definitions actually produced by `macro` and `macro` definitions produced by
|
||||
// `macro_rules!`, but at least such configurations are not stable yet.
|
||||
ctxt = ctxt.normalize_to_macro_rules(db);
|
||||
let mut iter = marks_rev(ctxt, db).peekable();
|
||||
let mut iter = ctxt.marks_rev(db).peekable();
|
||||
let mut result_mark = None;
|
||||
// Find the last opaque mark from the end if it exists.
|
||||
while let Some(&(mark, Transparency::Opaque)) = iter.peek() {
|
||||
|
@ -191,7 +191,7 @@ impl Name {
|
||||
// FIXME: Remove this in favor of `display`, see fixme on `as_str`
|
||||
#[doc(hidden)]
|
||||
pub fn display_no_db(&self, edition: Edition) -> impl fmt::Display + '_ {
|
||||
Display { name: self, needs_escaping: is_raw_identifier(self.symbol.as_str(), edition) }
|
||||
Display { name: self, edition }
|
||||
}
|
||||
|
||||
pub fn symbol(&self) -> &Symbol {
|
||||
@ -201,15 +201,28 @@ impl Name {
|
||||
|
||||
struct Display<'a> {
|
||||
name: &'a Name,
|
||||
needs_escaping: bool,
|
||||
edition: Edition,
|
||||
}
|
||||
|
||||
impl fmt::Display for Display<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if self.needs_escaping {
|
||||
write!(f, "r#")?;
|
||||
let mut symbol = self.name.symbol.as_str();
|
||||
|
||||
if symbol == "'static" {
|
||||
// FIXME: '`static` can also be a label, and there it does need escaping.
|
||||
// But knowing where it is will require adding a parameter to `display()`,
|
||||
// and that is an infectious change.
|
||||
return f.write_str(symbol);
|
||||
}
|
||||
fmt::Display::fmt(self.name.symbol.as_str(), f)
|
||||
|
||||
if let Some(s) = symbol.strip_prefix('\'') {
|
||||
f.write_str("'")?;
|
||||
symbol = s;
|
||||
}
|
||||
if is_raw_identifier(symbol, self.edition) {
|
||||
f.write_str("r#")?;
|
||||
}
|
||||
f.write_str(symbol)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -34,6 +34,7 @@ indexmap.workspace = true
|
||||
rustc_apfloat = "0.2.2"
|
||||
query-group.workspace = true
|
||||
salsa.workspace = true
|
||||
salsa-macros.workspace = true
|
||||
|
||||
ra-ap-rustc_abi.workspace = true
|
||||
ra-ap-rustc_index.workspace = true
|
||||
|
@ -198,15 +198,13 @@ pub(crate) fn deref_by_trait(
|
||||
// blanked impl on `Deref`.
|
||||
#[expect(clippy::overly_complex_bool_expr)]
|
||||
if use_receiver_trait && false {
|
||||
if let Some(receiver) =
|
||||
db.lang_item(table.trait_env.krate, LangItem::Receiver).and_then(|l| l.as_trait())
|
||||
{
|
||||
if let Some(receiver) = LangItem::Receiver.resolve_trait(db, table.trait_env.krate) {
|
||||
return Some(receiver);
|
||||
}
|
||||
}
|
||||
// Old rustc versions might not have `Receiver` trait.
|
||||
// Fallback to `Deref` if they don't
|
||||
db.lang_item(table.trait_env.krate, LangItem::Deref).and_then(|l| l.as_trait())
|
||||
LangItem::Deref.resolve_trait(db, table.trait_env.krate)
|
||||
};
|
||||
let trait_id = trait_id()?;
|
||||
let target =
|
||||
|
@ -16,7 +16,7 @@ use hir_def::{
|
||||
AssocItemId, BlockId, CallableDefId, GenericDefId, HasModule, ItemContainerId, Lookup,
|
||||
TypeAliasId, VariantId,
|
||||
hir::Movability,
|
||||
lang_item::{LangItem, LangItemTarget},
|
||||
lang_item::LangItem,
|
||||
signatures::{ImplFlags, StructFlags, TraitFlags},
|
||||
};
|
||||
|
||||
@ -262,10 +262,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
|
||||
well_known_trait: rust_ir::WellKnownTrait,
|
||||
) -> Option<chalk_ir::TraitId<Interner>> {
|
||||
let lang_attr = lang_item_from_well_known_trait(well_known_trait);
|
||||
let trait_ = match self.db.lang_item(self.krate, lang_attr) {
|
||||
Some(LangItemTarget::Trait(trait_)) => trait_,
|
||||
_ => return None,
|
||||
};
|
||||
let trait_ = lang_attr.resolve_trait(self.db, self.krate)?;
|
||||
Some(to_chalk_trait_id(trait_))
|
||||
}
|
||||
|
||||
@ -306,11 +303,8 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
|
||||
chalk_ir::Binders::new(binders, bound)
|
||||
}
|
||||
crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => {
|
||||
if let Some((future_trait, future_output)) = self
|
||||
.db
|
||||
.lang_item(self.krate, LangItem::Future)
|
||||
.and_then(|item| item.as_trait())
|
||||
.and_then(|trait_| {
|
||||
if let Some((future_trait, future_output)) =
|
||||
LangItem::Future.resolve_trait(self.db, self.krate).and_then(|trait_| {
|
||||
let alias = self
|
||||
.db
|
||||
.trait_items(trait_)
|
||||
@ -338,10 +332,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
|
||||
});
|
||||
let mut binder = vec![];
|
||||
binder.push(crate::wrap_empty_binders(impl_bound));
|
||||
let sized_trait = self
|
||||
.db
|
||||
.lang_item(self.krate, LangItem::Sized)
|
||||
.and_then(|item| item.as_trait());
|
||||
let sized_trait = LangItem::Sized.resolve_trait(self.db, self.krate);
|
||||
if let Some(sized_trait_) = sized_trait {
|
||||
let sized_bound = WhereClause::Implemented(TraitRef {
|
||||
trait_id: to_chalk_trait_id(sized_trait_),
|
||||
@ -646,7 +637,10 @@ pub(crate) fn associated_ty_data_query(
|
||||
.fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, 0)
|
||||
.build();
|
||||
let pro_ty = TyBuilder::assoc_type_projection(db, type_alias, Some(trait_subst))
|
||||
.fill_with_bound_vars(crate::DebruijnIndex::INNERMOST, generic_params.len_self())
|
||||
.fill_with_bound_vars(
|
||||
crate::DebruijnIndex::INNERMOST,
|
||||
generic_params.parent_generics().map_or(0, |it| it.len()),
|
||||
)
|
||||
.build();
|
||||
let self_ty = TyKind::Alias(AliasTy::Projection(pro_ty)).intern(Interner);
|
||||
|
||||
@ -660,9 +654,8 @@ pub(crate) fn associated_ty_data_query(
|
||||
}
|
||||
|
||||
if !ctx.unsized_types.contains(&self_ty) {
|
||||
let sized_trait = db
|
||||
.lang_item(resolver.krate(), LangItem::Sized)
|
||||
.and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
|
||||
let sized_trait =
|
||||
LangItem::Sized.resolve_trait(db, resolver.krate()).map(to_chalk_trait_id);
|
||||
let sized_bound = sized_trait.into_iter().map(|sized_trait| {
|
||||
let trait_bound =
|
||||
rust_ir::TraitBound { trait_id: sized_trait, args_no_self: Default::default() };
|
||||
|
@ -251,9 +251,7 @@ impl TyExt for Ty {
|
||||
match db.lookup_intern_impl_trait_id((*opaque_ty_id).into()) {
|
||||
ImplTraitId::AsyncBlockTypeImplTrait(def, _expr) => {
|
||||
let krate = def.module(db).krate();
|
||||
if let Some(future_trait) =
|
||||
db.lang_item(krate, LangItem::Future).and_then(|item| item.as_trait())
|
||||
{
|
||||
if let Some(future_trait) = LangItem::Future.resolve_trait(db, krate) {
|
||||
// This is only used by type walking.
|
||||
// Parameters will be walked outside, and projection predicate is not used.
|
||||
// So just provide the Future trait.
|
||||
@ -364,8 +362,7 @@ impl TyExt for Ty {
|
||||
|
||||
fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool {
|
||||
let crate_id = owner.module(db).krate();
|
||||
let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|it| it.as_trait())
|
||||
else {
|
||||
let Some(copy_trait) = LangItem::Copy.resolve_trait(db, crate_id) else {
|
||||
return false;
|
||||
};
|
||||
let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(self).build();
|
||||
|
@ -283,8 +283,9 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
|
||||
|
||||
#[salsa::invoke(crate::variance::variances_of)]
|
||||
#[salsa::cycle(
|
||||
cycle_fn = crate::variance::variances_of_cycle_fn,
|
||||
cycle_initial = crate::variance::variances_of_cycle_initial,
|
||||
// cycle_fn = crate::variance::variances_of_cycle_fn,
|
||||
// cycle_initial = crate::variance::variances_of_cycle_initial,
|
||||
cycle_result = crate::variance::variances_of_cycle_initial,
|
||||
)]
|
||||
fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>;
|
||||
|
||||
|
@ -482,9 +482,8 @@ struct FilterMapNextChecker {
|
||||
impl FilterMapNextChecker {
|
||||
fn new(resolver: &hir_def::resolver::Resolver, db: &dyn HirDatabase) -> Self {
|
||||
// Find and store the FunctionIds for Iterator::filter_map and Iterator::next
|
||||
let (next_function_id, filter_map_function_id) = match db
|
||||
.lang_item(resolver.krate(), LangItem::IteratorNext)
|
||||
.and_then(|it| it.as_function())
|
||||
let (next_function_id, filter_map_function_id) = match LangItem::IteratorNext
|
||||
.resolve_function(db, resolver.krate())
|
||||
{
|
||||
Some(next_function_id) => (
|
||||
Some(next_function_id),
|
||||
|
@ -19,7 +19,7 @@ use hir_def::{
|
||||
hir::generics::{TypeOrConstParamData, TypeParamProvenance, WherePredicate},
|
||||
item_scope::ItemInNs,
|
||||
item_tree::FieldsShape,
|
||||
lang_item::{LangItem, LangItemTarget},
|
||||
lang_item::LangItem,
|
||||
nameres::DefMap,
|
||||
signatures::VariantFields,
|
||||
type_ref::{
|
||||
@ -90,11 +90,26 @@ pub struct HirFormatter<'a> {
|
||||
show_container_bounds: bool,
|
||||
omit_verbose_types: bool,
|
||||
closure_style: ClosureStyle,
|
||||
display_lifetimes: DisplayLifetime,
|
||||
display_kind: DisplayKind,
|
||||
display_target: DisplayTarget,
|
||||
bounds_formatting_ctx: BoundsFormattingCtx,
|
||||
}
|
||||
|
||||
// FIXME: To consider, ref and dyn trait lifetimes can be omitted if they are `'_`, path args should
|
||||
// not be when in signatures
|
||||
// So this enum does not encode this well enough
|
||||
// Also 'static can be omitted for ref and dyn trait lifetimes in static/const item types
|
||||
// FIXME: Also named lifetimes may be rendered in places where their name is not in scope?
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum DisplayLifetime {
|
||||
Always,
|
||||
OnlyStatic,
|
||||
OnlyNamed,
|
||||
OnlyNamedOrStatic,
|
||||
Never,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
enum BoundsFormattingCtx {
|
||||
Entered {
|
||||
@ -155,6 +170,21 @@ impl HirFormatter<'_> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_lifetime(&self, lifetime: &Lifetime) -> bool {
|
||||
match self.display_lifetimes {
|
||||
DisplayLifetime::Always => true,
|
||||
DisplayLifetime::OnlyStatic => matches!(***lifetime.interned(), LifetimeData::Static),
|
||||
DisplayLifetime::OnlyNamed => {
|
||||
matches!(***lifetime.interned(), LifetimeData::Placeholder(_))
|
||||
}
|
||||
DisplayLifetime::OnlyNamedOrStatic => matches!(
|
||||
***lifetime.interned(),
|
||||
LifetimeData::Static | LifetimeData::Placeholder(_)
|
||||
),
|
||||
DisplayLifetime::Never => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HirDisplay {
|
||||
@ -189,6 +219,7 @@ pub trait HirDisplay {
|
||||
display_kind,
|
||||
closure_style,
|
||||
show_container_bounds,
|
||||
display_lifetimes: DisplayLifetime::OnlyNamedOrStatic,
|
||||
}
|
||||
}
|
||||
|
||||
@ -212,6 +243,7 @@ pub trait HirDisplay {
|
||||
display_target,
|
||||
display_kind: DisplayKind::Diagnostics,
|
||||
show_container_bounds: false,
|
||||
display_lifetimes: DisplayLifetime::OnlyNamedOrStatic,
|
||||
}
|
||||
}
|
||||
|
||||
@ -236,6 +268,7 @@ pub trait HirDisplay {
|
||||
display_target,
|
||||
display_kind: DisplayKind::Diagnostics,
|
||||
show_container_bounds: false,
|
||||
display_lifetimes: DisplayLifetime::OnlyNamedOrStatic,
|
||||
}
|
||||
}
|
||||
|
||||
@ -260,6 +293,7 @@ pub trait HirDisplay {
|
||||
display_target,
|
||||
display_kind: DisplayKind::Diagnostics,
|
||||
show_container_bounds: false,
|
||||
display_lifetimes: DisplayLifetime::OnlyNamedOrStatic,
|
||||
}
|
||||
}
|
||||
|
||||
@ -284,6 +318,7 @@ pub trait HirDisplay {
|
||||
display_target: DisplayTarget::from_crate(db, module_id.krate()),
|
||||
display_kind: DisplayKind::SourceCode { target_module_id: module_id, allow_opaque },
|
||||
show_container_bounds: false,
|
||||
display_lifetimes: DisplayLifetime::OnlyNamedOrStatic,
|
||||
bounds_formatting_ctx: Default::default(),
|
||||
}) {
|
||||
Ok(()) => {}
|
||||
@ -312,6 +347,7 @@ pub trait HirDisplay {
|
||||
display_target,
|
||||
display_kind: DisplayKind::Test,
|
||||
show_container_bounds: false,
|
||||
display_lifetimes: DisplayLifetime::Always,
|
||||
}
|
||||
}
|
||||
|
||||
@ -336,6 +372,7 @@ pub trait HirDisplay {
|
||||
display_target,
|
||||
display_kind: DisplayKind::Diagnostics,
|
||||
show_container_bounds,
|
||||
display_lifetimes: DisplayLifetime::OnlyNamedOrStatic,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -480,6 +517,7 @@ pub struct HirDisplayWrapper<'a, T> {
|
||||
display_kind: DisplayKind,
|
||||
display_target: DisplayTarget,
|
||||
show_container_bounds: bool,
|
||||
display_lifetimes: DisplayLifetime,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
@ -502,7 +540,7 @@ impl<T: HirDisplay> HirDisplayWrapper<'_, T> {
|
||||
self.t.hir_fmt(&mut HirFormatter {
|
||||
db: self.db,
|
||||
fmt: f,
|
||||
buf: String::with_capacity(20),
|
||||
buf: String::with_capacity(self.max_size.unwrap_or(20)),
|
||||
curr_size: 0,
|
||||
max_size: self.max_size,
|
||||
entity_limit: self.limited_size,
|
||||
@ -511,6 +549,7 @@ impl<T: HirDisplay> HirDisplayWrapper<'_, T> {
|
||||
display_target: self.display_target,
|
||||
closure_style: self.closure_style,
|
||||
show_container_bounds: self.show_container_bounds,
|
||||
display_lifetimes: self.display_lifetimes,
|
||||
bounds_formatting_ctx: Default::default(),
|
||||
})
|
||||
}
|
||||
@ -519,6 +558,11 @@ impl<T: HirDisplay> HirDisplayWrapper<'_, T> {
|
||||
self.closure_style = c;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_lifetime_display(mut self, l: DisplayLifetime) -> Self {
|
||||
self.display_lifetimes = l;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> fmt::Display for HirDisplayWrapper<'_, T>
|
||||
@ -1022,9 +1066,7 @@ impl HirDisplay for Ty {
|
||||
kind @ (TyKind::Raw(m, t) | TyKind::Ref(m, _, t)) => {
|
||||
if let TyKind::Ref(_, l, _) = kind {
|
||||
f.write_char('&')?;
|
||||
if cfg!(test) {
|
||||
// rendering these unconditionally is probably too much (at least for inlay
|
||||
// hints) so we gate it to testing only for the time being
|
||||
if f.render_lifetime(l) {
|
||||
l.hir_fmt(f)?;
|
||||
f.write_char(' ')?;
|
||||
}
|
||||
@ -1055,9 +1097,10 @@ impl HirDisplay for Ty {
|
||||
})
|
||||
};
|
||||
let (preds_to_print, has_impl_fn_pred) = match t.kind(Interner) {
|
||||
TyKind::Dyn(dyn_ty) if dyn_ty.bounds.skip_binders().interned().len() > 1 => {
|
||||
TyKind::Dyn(dyn_ty) => {
|
||||
let bounds = dyn_ty.bounds.skip_binders().interned();
|
||||
(bounds.len(), contains_impl_fn(bounds))
|
||||
let render_lifetime = f.render_lifetime(&dyn_ty.lifetime);
|
||||
(bounds.len() + render_lifetime as usize, contains_impl_fn(bounds))
|
||||
}
|
||||
TyKind::Alias(AliasTy::Opaque(OpaqueTy {
|
||||
opaque_ty_id,
|
||||
@ -1348,9 +1391,8 @@ impl HirDisplay for Ty {
|
||||
)?;
|
||||
}
|
||||
ImplTraitId::AsyncBlockTypeImplTrait(body, ..) => {
|
||||
let future_trait = db
|
||||
.lang_item(body.module(db).krate(), LangItem::Future)
|
||||
.and_then(LangItemTarget::as_trait);
|
||||
let future_trait =
|
||||
LangItem::Future.resolve_trait(db, body.module(db).krate());
|
||||
let output = future_trait.and_then(|t| {
|
||||
db.trait_items(t)
|
||||
.associated_type_by_name(&Name::new_symbol_root(sym::Output))
|
||||
@ -1480,7 +1522,7 @@ impl HirDisplay for Ty {
|
||||
TyKind::BoundVar(idx) => idx.hir_fmt(f)?,
|
||||
TyKind::Dyn(dyn_ty) => {
|
||||
// Reorder bounds to satisfy `write_bounds_like_dyn_trait()`'s expectation.
|
||||
// FIXME: `Iterator::partition_in_place()` or `Vec::drain_filter()` may make it
|
||||
// FIXME: `Iterator::partition_in_place()` or `Vec::extract_if()` may make it
|
||||
// more efficient when either of them hits stable.
|
||||
let mut bounds: SmallVec<[_; 4]> =
|
||||
dyn_ty.bounds.skip_binders().iter(Interner).cloned().collect();
|
||||
@ -1489,6 +1531,17 @@ impl HirDisplay for Ty {
|
||||
bounds.extend(others);
|
||||
bounds.extend(auto_traits);
|
||||
|
||||
if f.render_lifetime(&dyn_ty.lifetime) {
|
||||
// we skip the binders in `write_bounds_like_dyn_trait_with_prefix`
|
||||
bounds.push(Binders::empty(
|
||||
Interner,
|
||||
chalk_ir::WhereClause::TypeOutlives(chalk_ir::TypeOutlives {
|
||||
ty: self.clone(),
|
||||
lifetime: dyn_ty.lifetime.clone(),
|
||||
}),
|
||||
));
|
||||
}
|
||||
|
||||
write_bounds_like_dyn_trait_with_prefix(
|
||||
f,
|
||||
"dyn",
|
||||
@ -1728,9 +1781,7 @@ impl SizedByDefault {
|
||||
match self {
|
||||
Self::NotSized => false,
|
||||
Self::Sized { anchor } => {
|
||||
let sized_trait = db
|
||||
.lang_item(anchor, LangItem::Sized)
|
||||
.and_then(|lang_item| lang_item.as_trait());
|
||||
let sized_trait = LangItem::Sized.resolve_trait(db, anchor);
|
||||
Some(trait_) == sized_trait
|
||||
}
|
||||
}
|
||||
@ -1895,8 +1946,7 @@ fn write_bounds_like_dyn_trait(
|
||||
write!(f, ">")?;
|
||||
}
|
||||
if let SizedByDefault::Sized { anchor } = default_sized {
|
||||
let sized_trait =
|
||||
f.db.lang_item(anchor, LangItem::Sized).and_then(|lang_item| lang_item.as_trait());
|
||||
let sized_trait = LangItem::Sized.resolve_trait(f.db, anchor);
|
||||
if !is_sized {
|
||||
if !first {
|
||||
write!(f, " + ")?;
|
||||
@ -1993,7 +2043,6 @@ impl HirDisplay for LifetimeData {
|
||||
write!(f, "{}", param_data.name.display(f.db, f.edition()))?;
|
||||
Ok(())
|
||||
}
|
||||
_ if f.display_kind.is_source_code() => write!(f, "'_"),
|
||||
LifetimeData::BoundVar(idx) => idx.hir_fmt(f),
|
||||
LifetimeData::InferenceVar(_) => write!(f, "_"),
|
||||
LifetimeData::Static => write!(f, "'static"),
|
||||
|
@ -19,9 +19,7 @@ fn has_destructor(db: &dyn HirDatabase, adt: AdtId) -> bool {
|
||||
AdtId::StructId(id) => db.lookup_intern_struct(id).container,
|
||||
AdtId::UnionId(id) => db.lookup_intern_union(id).container,
|
||||
};
|
||||
let Some(drop_trait) =
|
||||
db.lang_item(module.krate(), LangItem::Drop).and_then(|it| it.as_trait())
|
||||
else {
|
||||
let Some(drop_trait) = LangItem::Drop.resolve_trait(db, module.krate()) else {
|
||||
return false;
|
||||
};
|
||||
let impls = match module.containing_block() {
|
||||
@ -181,8 +179,7 @@ fn projection_has_drop_glue(
|
||||
}
|
||||
|
||||
fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment>) -> bool {
|
||||
let Some(copy_trait) = db.lang_item(env.krate, LangItem::Copy).and_then(|it| it.as_trait())
|
||||
else {
|
||||
let Some(copy_trait) = LangItem::Copy.resolve_trait(db, env.krate) else {
|
||||
return false;
|
||||
};
|
||||
let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(ty).build();
|
||||
|
@ -124,7 +124,7 @@ pub fn dyn_compatibility_of_trait_query(
|
||||
|
||||
fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> bool {
|
||||
let krate = def.module(db).krate();
|
||||
let Some(sized) = db.lang_item(krate, LangItem::Sized).and_then(|l| l.as_trait()) else {
|
||||
let Some(sized) = LangItem::Sized.resolve_trait(db, krate) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
@ -491,8 +491,8 @@ fn receiver_is_dispatchable(
|
||||
|
||||
let krate = func.module(db).krate();
|
||||
let traits = (
|
||||
db.lang_item(krate, LangItem::Unsize).and_then(|it| it.as_trait()),
|
||||
db.lang_item(krate, LangItem::DispatchFromDyn).and_then(|it| it.as_trait()),
|
||||
LangItem::Unsize.resolve_trait(db, krate),
|
||||
LangItem::DispatchFromDyn.resolve_trait(db, krate),
|
||||
);
|
||||
let (Some(unsize_did), Some(dispatch_from_dyn_did)) = traits else {
|
||||
return false;
|
||||
@ -515,7 +515,7 @@ fn receiver_is_dispatchable(
|
||||
trait_id: to_chalk_trait_id(trait_),
|
||||
substitution: Substitution::from_iter(
|
||||
Interner,
|
||||
std::iter::once(unsized_self_ty.clone().cast(Interner))
|
||||
std::iter::once(unsized_self_ty.cast(Interner))
|
||||
.chain(placeholder_subst.iter(Interner).skip(1).cloned()),
|
||||
),
|
||||
});
|
||||
|
@ -39,7 +39,7 @@ use hir_def::{
|
||||
builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
|
||||
expr_store::{Body, ExpressionStore, HygieneId, path::Path},
|
||||
hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, PatId},
|
||||
lang_item::{LangItem, LangItemTarget},
|
||||
lang_item::{LangItem, LangItemTarget, lang_item},
|
||||
layout::Integer,
|
||||
resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
|
||||
signatures::{ConstSignature, StaticSignature},
|
||||
@ -1801,7 +1801,7 @@ impl<'a> InferenceContext<'a> {
|
||||
|
||||
fn resolve_lang_item(&self, item: LangItem) -> Option<LangItemTarget> {
|
||||
let krate = self.resolver.krate();
|
||||
self.db.lang_item(krate, item)
|
||||
lang_item(self.db, krate, item)
|
||||
}
|
||||
|
||||
fn resolve_output_on(&self, trait_: TraitId) -> Option<TypeAliasId> {
|
||||
|
@ -127,7 +127,7 @@ impl InferenceContext<'_> {
|
||||
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
|
||||
let prev_closure = mem::replace(&mut self.current_closure, id);
|
||||
let prev_ret_ty = mem::replace(&mut self.return_ty, body_ret_ty.clone());
|
||||
let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(body_ret_ty.clone()));
|
||||
let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(body_ret_ty));
|
||||
let prev_resume_yield_tys = mem::replace(&mut self.resume_yield_tys, resume_yield_tys);
|
||||
|
||||
self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
|
||||
|
@ -8,10 +8,7 @@
|
||||
use std::iter;
|
||||
|
||||
use chalk_ir::{BoundVar, Goal, Mutability, TyKind, TyVariableKind, cast::Cast};
|
||||
use hir_def::{
|
||||
hir::ExprId,
|
||||
lang_item::{LangItem, LangItemTarget},
|
||||
};
|
||||
use hir_def::{hir::ExprId, lang_item::LangItem};
|
||||
use stdx::always;
|
||||
use triomphe::Arc;
|
||||
|
||||
@ -701,8 +698,8 @@ impl InferenceTable<'_> {
|
||||
reborrow.as_ref().map_or_else(|| from_ty.clone(), |(_, adj)| adj.target.clone());
|
||||
|
||||
let krate = self.trait_env.krate;
|
||||
let coerce_unsized_trait = match self.db.lang_item(krate, LangItem::CoerceUnsized) {
|
||||
Some(LangItemTarget::Trait(trait_)) => trait_,
|
||||
let coerce_unsized_trait = match LangItem::CoerceUnsized.resolve_trait(self.db, krate) {
|
||||
Some(trait_) => trait_,
|
||||
_ => return Err(TypeError),
|
||||
};
|
||||
|
||||
|
@ -827,9 +827,9 @@ impl InferenceContext<'_> {
|
||||
}
|
||||
let assoc = self.resolve_ops_index_output();
|
||||
self.resolve_associated_type_with_params(
|
||||
self_ty.clone(),
|
||||
self_ty,
|
||||
assoc,
|
||||
&[index_ty.clone().cast(Interner)],
|
||||
&[index_ty.cast(Interner)],
|
||||
)
|
||||
} else {
|
||||
self.err_ty()
|
||||
|
@ -126,10 +126,8 @@ impl InferenceContext<'_> {
|
||||
&Expr::Index { base, index } => {
|
||||
if mutability == Mutability::Mut {
|
||||
if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) {
|
||||
if let Some(index_trait) = self
|
||||
.db
|
||||
.lang_item(self.table.trait_env.krate, LangItem::IndexMut)
|
||||
.and_then(|l| l.as_trait())
|
||||
if let Some(index_trait) =
|
||||
LangItem::IndexMut.resolve_trait(self.db, self.table.trait_env.krate)
|
||||
{
|
||||
if let Some(index_fn) = self
|
||||
.db
|
||||
@ -183,10 +181,8 @@ impl InferenceContext<'_> {
|
||||
let mut mutability = mutability;
|
||||
if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) {
|
||||
if mutability == Mutability::Mut {
|
||||
if let Some(deref_trait) = self
|
||||
.db
|
||||
.lang_item(self.table.trait_env.krate, LangItem::DerefMut)
|
||||
.and_then(|l| l.as_trait())
|
||||
if let Some(deref_trait) =
|
||||
LangItem::DerefMut.resolve_trait(self.db, self.table.trait_env.krate)
|
||||
{
|
||||
let ty = self.result.type_of_expr.get(*expr);
|
||||
let is_mut_ptr = ty.is_some_and(|ty| {
|
||||
|
@ -435,7 +435,7 @@ impl InferenceContext<'_> {
|
||||
decl: Option<DeclContext>,
|
||||
) -> Ty {
|
||||
let (expectation_type, expectation_lt) = match expected.as_reference() {
|
||||
Some((inner_ty, lifetime, _exp_mut)) => (inner_ty.clone(), lifetime.clone()),
|
||||
Some((inner_ty, lifetime, _exp_mut)) => (inner_ty.clone(), lifetime),
|
||||
None => {
|
||||
let inner_ty = self.table.new_type_var();
|
||||
let inner_lt = self.table.new_lifetime_var();
|
||||
@ -597,7 +597,7 @@ impl InferenceContext<'_> {
|
||||
let size = consteval::usize_const(self.db, Some(len as u128), self.owner.krate(self.db));
|
||||
|
||||
let elem_ty = self.table.new_type_var();
|
||||
let array_ty = TyKind::Array(elem_ty.clone(), size).intern(Interner);
|
||||
let array_ty = TyKind::Array(elem_ty, size).intern(Interner);
|
||||
Some(array_ty)
|
||||
}
|
||||
|
||||
|
@ -1024,16 +1024,12 @@ impl<'a> InferenceTable<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
let Some(sized) = self
|
||||
.db
|
||||
.lang_item(self.trait_env.krate, LangItem::Sized)
|
||||
.and_then(|sized| sized.as_trait())
|
||||
else {
|
||||
let Some(sized) = LangItem::Sized.resolve_trait(self.db, self.trait_env.krate) else {
|
||||
return false;
|
||||
};
|
||||
let sized_pred = WhereClause::Implemented(TraitRef {
|
||||
trait_id: to_chalk_trait_id(sized),
|
||||
substitution: Substitution::from1(Interner, ty.clone()),
|
||||
substitution: Substitution::from1(Interner, ty),
|
||||
});
|
||||
let goal = GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(sized_pred)).intern(Interner);
|
||||
matches!(self.try_obligation(goal), Some(Solution::Unique(_)))
|
||||
|
@ -590,10 +590,7 @@ impl<'a> TyLoweringContext<'a> {
|
||||
}
|
||||
}
|
||||
&TypeBound::Path(path, TraitBoundModifier::Maybe) => {
|
||||
let sized_trait = self
|
||||
.db
|
||||
.lang_item(self.resolver.krate(), LangItem::Sized)
|
||||
.and_then(|lang_item| lang_item.as_trait());
|
||||
let sized_trait = LangItem::Sized.resolve_trait(self.db, self.resolver.krate());
|
||||
// Don't lower associated type bindings as the only possible relaxed trait bound
|
||||
// `?Sized` has no of them.
|
||||
// If we got another trait here ignore the bound completely.
|
||||
@ -736,10 +733,8 @@ impl<'a> TyLoweringContext<'a> {
|
||||
}
|
||||
|
||||
if !ctx.unsized_types.contains(&self_ty) {
|
||||
let sized_trait = ctx
|
||||
.db
|
||||
.lang_item(krate, LangItem::Sized)
|
||||
.and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
|
||||
let sized_trait =
|
||||
LangItem::Sized.resolve_trait(ctx.db, krate).map(to_chalk_trait_id);
|
||||
let sized_clause = sized_trait.map(|trait_id| {
|
||||
let clause = WhereClause::Implemented(TraitRef {
|
||||
trait_id,
|
||||
@ -1188,9 +1183,7 @@ fn implicitly_sized_clauses<'a, 'subst: 'a>(
|
||||
substitution: &'subst Substitution,
|
||||
resolver: &Resolver,
|
||||
) -> Option<impl Iterator<Item = WhereClause> + Captures<'a> + Captures<'subst>> {
|
||||
let sized_trait = db
|
||||
.lang_item(resolver.krate(), LangItem::Sized)
|
||||
.and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id))?;
|
||||
let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()).map(to_chalk_trait_id)?;
|
||||
|
||||
let trait_self_idx = trait_self_param_idx(db, def);
|
||||
|
||||
@ -1475,7 +1468,7 @@ fn type_for_enum_variant_constructor(
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked(cycle_result = type_for_adt_cycle_result)]
|
||||
#[salsa_macros::tracked(cycle_result = type_for_adt_cycle_result)]
|
||||
fn type_for_adt_tracked(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
|
||||
type_for_adt(db, adt)
|
||||
}
|
||||
@ -1540,7 +1533,7 @@ pub enum TyDefId {
|
||||
}
|
||||
impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)]
|
||||
pub enum ValueTyDefId {
|
||||
FunctionId(FunctionId),
|
||||
StructId(StructId),
|
||||
|
@ -515,9 +515,15 @@ impl From<Option<BlockId>> for VisibleFromModule {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum AutorefOrPtrAdjustment {
|
||||
Autoref(Mutability),
|
||||
ToConstPtr,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ReceiverAdjustments {
|
||||
autoref: Option<Mutability>,
|
||||
autoref: Option<AutorefOrPtrAdjustment>,
|
||||
autoderefs: usize,
|
||||
unsize_array: bool,
|
||||
}
|
||||
@ -535,10 +541,15 @@ impl ReceiverAdjustments {
|
||||
}
|
||||
Some((kind, new_ty)) => {
|
||||
ty = new_ty.clone();
|
||||
let mutbl = match self.autoref {
|
||||
Some(AutorefOrPtrAdjustment::Autoref(m)) => Some(m),
|
||||
Some(AutorefOrPtrAdjustment::ToConstPtr) => Some(Mutability::Not),
|
||||
// FIXME should we know the mutability here, when autoref is `None`?
|
||||
None => None,
|
||||
};
|
||||
adjust.push(Adjustment {
|
||||
kind: Adjust::Deref(match kind {
|
||||
// FIXME should we know the mutability here, when autoref is `None`?
|
||||
AutoderefKind::Overloaded => Some(OverloadedDeref(self.autoref)),
|
||||
AutoderefKind::Overloaded => Some(OverloadedDeref(mutbl)),
|
||||
AutoderefKind::Builtin => None,
|
||||
}),
|
||||
target: new_ty,
|
||||
@ -546,11 +557,27 @@ impl ReceiverAdjustments {
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(m) = self.autoref {
|
||||
if let Some(autoref) = &self.autoref {
|
||||
let lt = table.new_lifetime_var();
|
||||
let a = Adjustment::borrow(m, ty, lt);
|
||||
ty = a.target.clone();
|
||||
adjust.push(a);
|
||||
match autoref {
|
||||
AutorefOrPtrAdjustment::Autoref(m) => {
|
||||
let a = Adjustment::borrow(*m, ty, lt);
|
||||
ty = a.target.clone();
|
||||
adjust.push(a);
|
||||
}
|
||||
AutorefOrPtrAdjustment::ToConstPtr => {
|
||||
if let TyKind::Raw(Mutability::Mut, pointee) = ty.kind(Interner) {
|
||||
let a = Adjustment {
|
||||
kind: Adjust::Pointer(PointerCast::MutToConstPointer),
|
||||
target: TyKind::Raw(Mutability::Not, pointee.clone()).intern(Interner),
|
||||
};
|
||||
ty = a.target.clone();
|
||||
adjust.push(a);
|
||||
} else {
|
||||
never!("`ToConstPtr` target is not a raw mutable pointer");
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
if self.unsize_array {
|
||||
ty = 'it: {
|
||||
@ -575,8 +602,8 @@ impl ReceiverAdjustments {
|
||||
(ty, adjust)
|
||||
}
|
||||
|
||||
fn with_autoref(&self, m: Mutability) -> ReceiverAdjustments {
|
||||
Self { autoref: Some(m), ..*self }
|
||||
fn with_autoref(&self, a: AutorefOrPtrAdjustment) -> ReceiverAdjustments {
|
||||
Self { autoref: Some(a), ..*self }
|
||||
}
|
||||
}
|
||||
|
||||
@ -1051,7 +1078,7 @@ fn iterate_method_candidates_with_autoref(
|
||||
let mut maybe_reborrowed = first_adjustment.clone();
|
||||
if let Some((_, _, m)) = receiver_ty.value.as_reference() {
|
||||
// Prefer reborrow of references to move
|
||||
maybe_reborrowed.autoref = Some(m);
|
||||
maybe_reborrowed.autoref = Some(AutorefOrPtrAdjustment::Autoref(m));
|
||||
maybe_reborrowed.autoderefs += 1;
|
||||
}
|
||||
|
||||
@ -1063,15 +1090,34 @@ fn iterate_method_candidates_with_autoref(
|
||||
binders: receiver_ty.binders.clone(),
|
||||
};
|
||||
|
||||
iterate_method_candidates_by_receiver(refed, first_adjustment.with_autoref(Mutability::Not))?;
|
||||
iterate_method_candidates_by_receiver(
|
||||
refed,
|
||||
first_adjustment.with_autoref(AutorefOrPtrAdjustment::Autoref(Mutability::Not)),
|
||||
)?;
|
||||
|
||||
let ref_muted = Canonical {
|
||||
value: TyKind::Ref(Mutability::Mut, error_lifetime(), receiver_ty.value.clone())
|
||||
.intern(Interner),
|
||||
binders: receiver_ty.binders,
|
||||
binders: receiver_ty.binders.clone(),
|
||||
};
|
||||
|
||||
iterate_method_candidates_by_receiver(ref_muted, first_adjustment.with_autoref(Mutability::Mut))
|
||||
iterate_method_candidates_by_receiver(
|
||||
ref_muted,
|
||||
first_adjustment.with_autoref(AutorefOrPtrAdjustment::Autoref(Mutability::Mut)),
|
||||
)?;
|
||||
|
||||
if let Some((ty, Mutability::Mut)) = receiver_ty.value.as_raw_ptr() {
|
||||
let const_ptr_ty = Canonical {
|
||||
value: TyKind::Raw(Mutability::Not, ty.clone()).intern(Interner),
|
||||
binders: receiver_ty.binders,
|
||||
};
|
||||
iterate_method_candidates_by_receiver(
|
||||
const_ptr_ty,
|
||||
first_adjustment.with_autoref(AutorefOrPtrAdjustment::ToConstPtr),
|
||||
)?;
|
||||
}
|
||||
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
|
||||
pub trait MethodCandidateCallback {
|
||||
|
@ -77,7 +77,14 @@ pub struct Local {
|
||||
/// currently implements it, but it seems like this may be something to check against in the
|
||||
/// validator.
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum Operand {
|
||||
pub struct Operand {
|
||||
kind: OperandKind,
|
||||
// FIXME : This should actually just be of type `MirSpan`.
|
||||
span: Option<MirSpan>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum OperandKind {
|
||||
/// Creates a value by loading the given place.
|
||||
///
|
||||
/// Before drop elaboration, the type of the place must be `Copy`. After drop elaboration there
|
||||
@ -101,7 +108,13 @@ pub enum Operand {
|
||||
|
||||
impl Operand {
|
||||
fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap, ty: Ty) -> Self {
|
||||
Operand::Constant(intern_const_scalar(ConstScalar::Bytes(data, memory_map), ty))
|
||||
Operand {
|
||||
kind: OperandKind::Constant(intern_const_scalar(
|
||||
ConstScalar::Bytes(data, memory_map),
|
||||
ty,
|
||||
)),
|
||||
span: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn from_bytes(data: Box<[u8]>, ty: Ty) -> Self {
|
||||
@ -1076,11 +1089,11 @@ impl MirBody {
|
||||
f: &mut impl FnMut(&mut Place, &mut ProjectionStore),
|
||||
store: &mut ProjectionStore,
|
||||
) {
|
||||
match op {
|
||||
Operand::Copy(p) | Operand::Move(p) => {
|
||||
match &mut op.kind {
|
||||
OperandKind::Copy(p) | OperandKind::Move(p) => {
|
||||
f(p, store);
|
||||
}
|
||||
Operand::Constant(_) | Operand::Static(_) => (),
|
||||
OperandKind::Constant(_) | OperandKind::Static(_) => (),
|
||||
}
|
||||
}
|
||||
for (_, block) in self.basic_blocks.iter_mut() {
|
||||
|
@ -15,13 +15,13 @@ use crate::{
|
||||
ClosureId, Interner, Substitution, Ty, TyExt, TypeFlags,
|
||||
db::{HirDatabase, InternedClosure},
|
||||
display::DisplayTarget,
|
||||
mir::Operand,
|
||||
mir::OperandKind,
|
||||
utils::ClosureSubst,
|
||||
};
|
||||
|
||||
use super::{
|
||||
BasicBlockId, BorrowKind, LocalId, MirBody, MirLowerError, MirSpan, MutBorrowKind, Place,
|
||||
ProjectionElem, Rvalue, StatementKind, TerminatorKind,
|
||||
BasicBlockId, BorrowKind, LocalId, MirBody, MirLowerError, MirSpan, MutBorrowKind, Operand,
|
||||
Place, ProjectionElem, Rvalue, StatementKind, TerminatorKind,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
@ -120,8 +120,8 @@ fn make_fetch_closure_field(
|
||||
|
||||
fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef> {
|
||||
let mut result = vec![];
|
||||
let mut for_operand = |op: &Operand, span: MirSpan| match op {
|
||||
Operand::Copy(p) | Operand::Move(p) => {
|
||||
let mut for_operand = |op: &Operand, span: MirSpan| match op.kind {
|
||||
OperandKind::Copy(p) | OperandKind::Move(p) => {
|
||||
let mut ty: Ty = body.locals[p.local].ty.clone();
|
||||
let mut is_dereference_of_ref = false;
|
||||
for proj in p.projection.lookup(&body.projection_store) {
|
||||
@ -139,10 +139,10 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
|
||||
&& !ty.clone().is_copy(db, body.owner)
|
||||
&& !ty.data(Interner).flags.intersects(TypeFlags::HAS_ERROR)
|
||||
{
|
||||
result.push(MovedOutOfRef { span, ty });
|
||||
result.push(MovedOutOfRef { span: op.span.unwrap_or(span), ty });
|
||||
}
|
||||
}
|
||||
Operand::Constant(_) | Operand::Static(_) => (),
|
||||
OperandKind::Constant(_) | OperandKind::Static(_) => (),
|
||||
};
|
||||
for (_, block) in body.basic_blocks.iter() {
|
||||
db.unwind_if_revision_cancelled();
|
||||
@ -215,8 +215,8 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
|
||||
|
||||
fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved> {
|
||||
let mut result = vec![];
|
||||
let mut for_operand = |op: &Operand, span: MirSpan| match op {
|
||||
Operand::Copy(p) | Operand::Move(p) => {
|
||||
let mut for_operand = |op: &Operand, span: MirSpan| match op.kind {
|
||||
OperandKind::Copy(p) | OperandKind::Move(p) => {
|
||||
let mut ty: Ty = body.locals[p.local].ty.clone();
|
||||
for proj in p.projection.lookup(&body.projection_store) {
|
||||
ty = proj.projected_ty(
|
||||
@ -232,7 +232,7 @@ fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved>
|
||||
result.push(PartiallyMoved { span, ty, local: p.local });
|
||||
}
|
||||
}
|
||||
Operand::Constant(_) | Operand::Static(_) => (),
|
||||
OperandKind::Constant(_) | OperandKind::Static(_) => (),
|
||||
};
|
||||
for (_, block) in body.basic_blocks.iter() {
|
||||
db.unwind_if_revision_cancelled();
|
||||
@ -492,7 +492,7 @@ fn record_usage(local: LocalId, result: &mut ArenaMap<LocalId, MutabilityReason>
|
||||
}
|
||||
|
||||
fn record_usage_for_operand(arg: &Operand, result: &mut ArenaMap<LocalId, MutabilityReason>) {
|
||||
if let Operand::Copy(p) | Operand::Move(p) = arg {
|
||||
if let OperandKind::Copy(p) | OperandKind::Move(p) = arg.kind {
|
||||
record_usage(p.local, result);
|
||||
}
|
||||
}
|
||||
|
@ -47,7 +47,7 @@ use crate::{
|
||||
|
||||
use super::{
|
||||
AggregateKind, BasicBlockId, BinOp, CastKind, LocalId, MirBody, MirLowerError, MirSpan,
|
||||
Operand, Place, PlaceElem, ProjectionElem, ProjectionStore, Rvalue, StatementKind,
|
||||
Operand, OperandKind, Place, PlaceElem, ProjectionElem, ProjectionStore, Rvalue, StatementKind,
|
||||
TerminatorKind, UnOp, return_slot,
|
||||
};
|
||||
|
||||
@ -655,22 +655,15 @@ impl Evaluator<'_> {
|
||||
mir_or_dyn_index_cache: RefCell::new(Default::default()),
|
||||
unused_locals_store: RefCell::new(Default::default()),
|
||||
cached_ptr_size,
|
||||
cached_fn_trait_func: db
|
||||
.lang_item(crate_id, LangItem::Fn)
|
||||
.and_then(|x| x.as_trait())
|
||||
cached_fn_trait_func: LangItem::Fn
|
||||
.resolve_trait(db, crate_id)
|
||||
.and_then(|x| db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call))),
|
||||
cached_fn_mut_trait_func: db
|
||||
.lang_item(crate_id, LangItem::FnMut)
|
||||
.and_then(|x| x.as_trait())
|
||||
.and_then(|x| {
|
||||
db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call_mut))
|
||||
}),
|
||||
cached_fn_once_trait_func: db
|
||||
.lang_item(crate_id, LangItem::FnOnce)
|
||||
.and_then(|x| x.as_trait())
|
||||
.and_then(|x| {
|
||||
db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call_once))
|
||||
}),
|
||||
cached_fn_mut_trait_func: LangItem::FnMut.resolve_trait(db, crate_id).and_then(|x| {
|
||||
db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call_mut))
|
||||
}),
|
||||
cached_fn_once_trait_func: LangItem::FnOnce.resolve_trait(db, crate_id).and_then(|x| {
|
||||
db.trait_items(x).method_by_name(&Name::new_symbol_root(sym::call_once))
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
@ -863,10 +856,10 @@ impl Evaluator<'_> {
|
||||
}
|
||||
|
||||
fn operand_ty(&self, o: &Operand, locals: &Locals) -> Result<Ty> {
|
||||
Ok(match o {
|
||||
Operand::Copy(p) | Operand::Move(p) => self.place_ty(p, locals)?,
|
||||
Operand::Constant(c) => c.data(Interner).ty.clone(),
|
||||
&Operand::Static(s) => {
|
||||
Ok(match &o.kind {
|
||||
OperandKind::Copy(p) | OperandKind::Move(p) => self.place_ty(p, locals)?,
|
||||
OperandKind::Constant(c) => c.data(Interner).ty.clone(),
|
||||
&OperandKind::Static(s) => {
|
||||
let ty = self.db.infer(s.into())[self.db.body(s.into()).body_expr].clone();
|
||||
TyKind::Ref(Mutability::Not, static_lifetime(), ty).intern(Interner)
|
||||
}
|
||||
@ -1880,16 +1873,16 @@ impl Evaluator<'_> {
|
||||
}
|
||||
|
||||
fn eval_operand(&mut self, it: &Operand, locals: &mut Locals) -> Result<Interval> {
|
||||
Ok(match it {
|
||||
Operand::Copy(p) | Operand::Move(p) => {
|
||||
Ok(match &it.kind {
|
||||
OperandKind::Copy(p) | OperandKind::Move(p) => {
|
||||
locals.drop_flags.remove_place(p, &locals.body.projection_store);
|
||||
self.eval_place(p, locals)?
|
||||
}
|
||||
Operand::Static(st) => {
|
||||
OperandKind::Static(st) => {
|
||||
let addr = self.eval_static(*st, locals)?;
|
||||
Interval::new(addr, self.ptr_size())
|
||||
}
|
||||
Operand::Constant(konst) => self.allocate_const_in_heap(locals, konst)?,
|
||||
OperandKind::Constant(konst) => self.allocate_const_in_heap(locals, konst)?,
|
||||
})
|
||||
}
|
||||
|
||||
@ -2811,7 +2804,7 @@ impl Evaluator<'_> {
|
||||
span: MirSpan,
|
||||
) -> Result<()> {
|
||||
let Some(drop_fn) = (|| {
|
||||
let drop_trait = self.db.lang_item(self.crate_id, LangItem::Drop)?.as_trait()?;
|
||||
let drop_trait = LangItem::Drop.resolve_trait(self.db, self.crate_id)?;
|
||||
self.db.trait_items(drop_trait).method_by_name(&Name::new_symbol_root(sym::drop))
|
||||
})() else {
|
||||
// in some tests we don't have drop trait in minicore, and
|
||||
|
@ -6,7 +6,6 @@ use std::cmp::{self, Ordering};
|
||||
use chalk_ir::TyKind;
|
||||
use hir_def::{
|
||||
builtin_type::{BuiltinInt, BuiltinUint},
|
||||
lang_item::LangItemTarget,
|
||||
resolver::HasResolver,
|
||||
};
|
||||
use hir_expand::name::Name;
|
||||
@ -156,8 +155,8 @@ impl Evaluator<'_> {
|
||||
if let Some(LangItem::PanicFmt) = self.db.lang_attr(def.into()) {
|
||||
let resolver = self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db);
|
||||
|
||||
let Some(hir_def::lang_item::LangItemTarget::Function(const_panic_fmt)) =
|
||||
self.db.lang_item(resolver.krate(), LangItem::ConstPanicFmt)
|
||||
let Some(const_panic_fmt) =
|
||||
LangItem::ConstPanicFmt.resolve_function(self.db, resolver.krate())
|
||||
else {
|
||||
not_supported!("const_panic_fmt lang item not found or not a function");
|
||||
};
|
||||
@ -1257,12 +1256,12 @@ impl Evaluator<'_> {
|
||||
let addr = tuple.interval.addr.offset(offset);
|
||||
args.push(IntervalAndTy::new(addr, field, self, locals)?);
|
||||
}
|
||||
if let Some(target) = self.db.lang_item(self.crate_id, LangItem::FnOnce) {
|
||||
if let Some(def) = target.as_trait().and_then(|it| {
|
||||
self.db
|
||||
.trait_items(it)
|
||||
.method_by_name(&Name::new_symbol_root(sym::call_once))
|
||||
}) {
|
||||
if let Some(target) = LangItem::FnOnce.resolve_trait(self.db, self.crate_id) {
|
||||
if let Some(def) = self
|
||||
.db
|
||||
.trait_items(target)
|
||||
.method_by_name(&Name::new_symbol_root(sym::call_once))
|
||||
{
|
||||
self.exec_fn_trait(
|
||||
def,
|
||||
&args,
|
||||
@ -1376,9 +1375,7 @@ impl Evaluator<'_> {
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(LangItemTarget::EnumId(e)) =
|
||||
self.db.lang_item(self.crate_id, LangItem::Ordering)
|
||||
{
|
||||
if let Some(e) = LangItem::Ordering.resolve_enum(self.db, self.crate_id) {
|
||||
let ty = self.db.ty(e.into());
|
||||
let r = self
|
||||
.compute_discriminant(ty.skip_binders().clone(), &[result as i8 as u8])?;
|
||||
|
@ -13,7 +13,7 @@ use hir_def::{
|
||||
Pat, PatId, RecordFieldPat, RecordLitField,
|
||||
},
|
||||
item_tree::FieldsShape,
|
||||
lang_item::{LangItem, LangItemTarget},
|
||||
lang_item::{LangItem, LangItemTarget, lang_item},
|
||||
resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs},
|
||||
};
|
||||
use hir_expand::name::Name;
|
||||
@ -48,6 +48,8 @@ use crate::{
|
||||
utils::ClosureSubst,
|
||||
};
|
||||
|
||||
use super::OperandKind;
|
||||
|
||||
mod as_place;
|
||||
mod pattern_matching;
|
||||
|
||||
@ -324,7 +326,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
let Some((p, current)) = self.lower_expr_as_place(current, expr_id, true)? else {
|
||||
return Ok(None);
|
||||
};
|
||||
Ok(Some((Operand::Copy(p), current)))
|
||||
Ok(Some((Operand { kind: OperandKind::Copy(p), span: Some(expr_id.into()) }, current)))
|
||||
}
|
||||
|
||||
fn lower_expr_to_place_with_adjust(
|
||||
@ -347,7 +349,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
|
||||
self.push_assignment(
|
||||
current,
|
||||
place,
|
||||
Operand { kind: OperandKind::Copy(p), span: None }.into(),
|
||||
expr_id.into(),
|
||||
);
|
||||
Ok(Some(current))
|
||||
}
|
||||
Adjust::Borrow(AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) => {
|
||||
@ -371,7 +378,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
place,
|
||||
Rvalue::Cast(
|
||||
CastKind::PointerCoercion(*cast),
|
||||
Operand::Copy(p),
|
||||
Operand { kind: OperandKind::Copy(p), span: None },
|
||||
last.target.clone(),
|
||||
),
|
||||
expr_id.into(),
|
||||
@ -476,7 +483,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
self.push_assignment(
|
||||
current,
|
||||
place,
|
||||
Operand::Copy(temp).into(),
|
||||
Operand { kind: OperandKind::Copy(temp), span: None }.into(),
|
||||
expr_id.into(),
|
||||
);
|
||||
Ok(Some(current))
|
||||
@ -517,21 +524,23 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
self.push_assignment(
|
||||
current,
|
||||
place,
|
||||
Operand::Constant(
|
||||
ConstData {
|
||||
ty,
|
||||
value: chalk_ir::ConstValue::BoundVar(BoundVar::new(
|
||||
DebruijnIndex::INNERMOST,
|
||||
generics.type_or_const_param_idx(p.into()).ok_or(
|
||||
MirLowerError::TypeError(
|
||||
"fail to lower const generic param",
|
||||
),
|
||||
)?,
|
||||
)),
|
||||
}
|
||||
.intern(Interner),
|
||||
)
|
||||
.into(),
|
||||
Rvalue::from(Operand {
|
||||
kind: OperandKind::Constant(
|
||||
ConstData {
|
||||
ty,
|
||||
value: chalk_ir::ConstValue::BoundVar(BoundVar::new(
|
||||
DebruijnIndex::INNERMOST,
|
||||
generics.type_or_const_param_idx(p.into()).ok_or(
|
||||
MirLowerError::TypeError(
|
||||
"fail to lower const generic param",
|
||||
),
|
||||
)?,
|
||||
)),
|
||||
}
|
||||
.intern(Interner),
|
||||
),
|
||||
span: None,
|
||||
}),
|
||||
expr_id.into(),
|
||||
);
|
||||
Ok(Some(current))
|
||||
@ -876,7 +885,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
})),
|
||||
&mut self.result.projection_store,
|
||||
);
|
||||
Operand::Copy(p)
|
||||
Operand { kind: OperandKind::Copy(p), span: None }
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
@ -979,7 +988,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
|
||||
self.push_assignment(
|
||||
current,
|
||||
place,
|
||||
Operand { kind: OperandKind::Copy(p), span: None }.into(),
|
||||
expr_id.into(),
|
||||
);
|
||||
Ok(Some(current))
|
||||
}
|
||||
Expr::UnaryOp {
|
||||
@ -1056,8 +1070,11 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
else {
|
||||
return Ok(None);
|
||||
};
|
||||
let r_value =
|
||||
Rvalue::CheckedBinaryOp(op.into(), Operand::Copy(lhs_place), rhs_op);
|
||||
let r_value = Rvalue::CheckedBinaryOp(
|
||||
op.into(),
|
||||
Operand { kind: OperandKind::Copy(lhs_place), span: None },
|
||||
rhs_op,
|
||||
);
|
||||
self.push_assignment(current, lhs_place, r_value, expr_id.into());
|
||||
return Ok(Some(current));
|
||||
}
|
||||
@ -1232,9 +1249,11 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
Rvalue::Ref(*bk, p),
|
||||
capture_spans[0],
|
||||
);
|
||||
operands.push(Operand::Move(tmp));
|
||||
operands.push(Operand { kind: OperandKind::Move(tmp), span: None });
|
||||
}
|
||||
CaptureKind::ByValue => {
|
||||
operands.push(Operand { kind: OperandKind::Move(p), span: None })
|
||||
}
|
||||
CaptureKind::ByValue => operands.push(Operand::Move(p)),
|
||||
}
|
||||
}
|
||||
self.push_assignment(
|
||||
@ -1476,7 +1495,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
.const_eval(const_id, subst, None)
|
||||
.map_err(|e| MirLowerError::ConstEvalError(name.into(), Box::new(e)))?
|
||||
};
|
||||
Ok(Operand::Constant(c))
|
||||
Ok(Operand { kind: OperandKind::Constant(c), span: None })
|
||||
}
|
||||
|
||||
fn write_bytes_to_place(
|
||||
@ -1727,7 +1746,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
|
||||
fn resolve_lang_item(&self, item: LangItem) -> Result<LangItemTarget> {
|
||||
let crate_id = self.owner.module(self.db).krate();
|
||||
self.db.lang_item(crate_id, item).ok_or(MirLowerError::LangItemNotFound(item))
|
||||
lang_item(self.db, crate_id, item).ok_or(MirLowerError::LangItemNotFound(item))
|
||||
}
|
||||
|
||||
fn lower_block_to_place(
|
||||
|
@ -1,6 +1,6 @@
|
||||
//! MIR lowering for places
|
||||
|
||||
use crate::mir::MutBorrowKind;
|
||||
use crate::mir::{MutBorrowKind, Operand, OperandKind};
|
||||
|
||||
use super::*;
|
||||
use hir_def::FunctionId;
|
||||
@ -155,7 +155,7 @@ impl MirLowerCtx<'_> {
|
||||
self.push_assignment(
|
||||
current,
|
||||
temp,
|
||||
Operand::Static(s).into(),
|
||||
Operand { kind: OperandKind::Static(s), span: None }.into(),
|
||||
expr_id.into(),
|
||||
);
|
||||
Ok(Some((
|
||||
@ -305,7 +305,7 @@ impl MirLowerCtx<'_> {
|
||||
);
|
||||
let Some(current) = self.lower_call(
|
||||
index_fn_op,
|
||||
Box::new([Operand::Copy(place), index_operand]),
|
||||
Box::new([Operand { kind: OperandKind::Copy(place), span: None }, index_operand]),
|
||||
result,
|
||||
current,
|
||||
false,
|
||||
@ -365,7 +365,7 @@ impl MirLowerCtx<'_> {
|
||||
let mut result: Place = self.temp(target_ty_ref, current, span)?.into();
|
||||
let Some(current) = self.lower_call(
|
||||
deref_fn_op,
|
||||
Box::new([Operand::Copy(ref_place)]),
|
||||
Box::new([Operand { kind: OperandKind::Copy(ref_place), span: None }]),
|
||||
result,
|
||||
current,
|
||||
false,
|
||||
|
@ -5,10 +5,10 @@ use hir_def::{AssocItemId, hir::ExprId, signatures::VariantFields};
|
||||
use crate::{
|
||||
BindingMode,
|
||||
mir::{
|
||||
LocalId, MutBorrowKind,
|
||||
LocalId, MutBorrowKind, Operand, OperandKind,
|
||||
lower::{
|
||||
BasicBlockId, BinOp, BindingId, BorrowKind, Either, Expr, FieldId, Idx, Interner,
|
||||
MemoryMap, MirLowerCtx, MirLowerError, MirSpan, Mutability, Operand, Pat, PatId, Place,
|
||||
MemoryMap, MirLowerCtx, MirLowerError, MirSpan, Mutability, Pat, PatId, Place,
|
||||
PlaceElem, ProjectionElem, RecordFieldPat, ResolveValueResult, Result, Rvalue,
|
||||
Substitution, SwitchTargets, TerminatorKind, TupleFieldId, TupleId, TyBuilder, TyKind,
|
||||
ValueNs, VariantId,
|
||||
@ -217,10 +217,14 @@ impl MirLowerCtx<'_> {
|
||||
self.push_assignment(
|
||||
current,
|
||||
discr,
|
||||
Rvalue::CheckedBinaryOp(binop, lv, Operand::Copy(cond_place)),
|
||||
Rvalue::CheckedBinaryOp(
|
||||
binop,
|
||||
lv,
|
||||
Operand { kind: OperandKind::Copy(cond_place), span: None },
|
||||
),
|
||||
pattern.into(),
|
||||
);
|
||||
let discr = Operand::Copy(discr);
|
||||
let discr = Operand { kind: OperandKind::Copy(discr), span: None };
|
||||
self.set_terminator(
|
||||
current,
|
||||
TerminatorKind::SwitchInt {
|
||||
@ -262,7 +266,10 @@ impl MirLowerCtx<'_> {
|
||||
self.set_terminator(
|
||||
current,
|
||||
TerminatorKind::SwitchInt {
|
||||
discr: Operand::Copy(place_len),
|
||||
discr: Operand {
|
||||
kind: OperandKind::Copy(place_len),
|
||||
span: None,
|
||||
},
|
||||
targets: SwitchTargets::static_if(
|
||||
pattern_len as u128,
|
||||
next,
|
||||
@ -282,10 +289,14 @@ impl MirLowerCtx<'_> {
|
||||
self.push_assignment(
|
||||
current,
|
||||
discr,
|
||||
Rvalue::CheckedBinaryOp(BinOp::Le, c, Operand::Copy(place_len)),
|
||||
Rvalue::CheckedBinaryOp(
|
||||
BinOp::Le,
|
||||
c,
|
||||
Operand { kind: OperandKind::Copy(place_len), span: None },
|
||||
),
|
||||
pattern.into(),
|
||||
);
|
||||
let discr = Operand::Copy(discr);
|
||||
let discr = Operand { kind: OperandKind::Copy(discr), span: None };
|
||||
self.set_terminator(
|
||||
current,
|
||||
TerminatorKind::SwitchInt {
|
||||
@ -407,8 +418,8 @@ impl MirLowerCtx<'_> {
|
||||
tmp2,
|
||||
Rvalue::CheckedBinaryOp(
|
||||
BinOp::Eq,
|
||||
Operand::Copy(tmp),
|
||||
Operand::Copy(cond_place),
|
||||
Operand { kind: OperandKind::Copy(tmp), span: None },
|
||||
Operand { kind: OperandKind::Copy(cond_place), span: None },
|
||||
),
|
||||
span,
|
||||
);
|
||||
@ -417,7 +428,7 @@ impl MirLowerCtx<'_> {
|
||||
self.set_terminator(
|
||||
current,
|
||||
TerminatorKind::SwitchInt {
|
||||
discr: Operand::Copy(tmp2),
|
||||
discr: Operand { kind: OperandKind::Copy(tmp2), span: None },
|
||||
targets: SwitchTargets::static_if(1, next, else_target),
|
||||
},
|
||||
span,
|
||||
@ -486,7 +497,7 @@ impl MirLowerCtx<'_> {
|
||||
self.push_assignment(
|
||||
current,
|
||||
lhs_place,
|
||||
Operand::Copy(cond_place).into(),
|
||||
Operand { kind: OperandKind::Copy(cond_place), span: None }.into(),
|
||||
expr.into(),
|
||||
);
|
||||
(current, current_else)
|
||||
@ -523,7 +534,9 @@ impl MirLowerCtx<'_> {
|
||||
current,
|
||||
target_place.into(),
|
||||
match mode {
|
||||
BindingMode::Move => Operand::Copy(cond_place).into(),
|
||||
BindingMode::Move => {
|
||||
Operand { kind: OperandKind::Copy(cond_place), span: None }.into()
|
||||
}
|
||||
BindingMode::Ref(Mutability::Not) => Rvalue::Ref(BorrowKind::Shared, cond_place),
|
||||
BindingMode::Ref(Mutability::Mut) => {
|
||||
Rvalue::Ref(BorrowKind::Mut { kind: MutBorrowKind::Default }, cond_place)
|
||||
@ -547,10 +560,14 @@ impl MirLowerCtx<'_> {
|
||||
self.push_assignment(
|
||||
current,
|
||||
discr,
|
||||
Rvalue::CheckedBinaryOp(BinOp::Eq, c, Operand::Copy(cond_place)),
|
||||
Rvalue::CheckedBinaryOp(
|
||||
BinOp::Eq,
|
||||
c,
|
||||
Operand { kind: OperandKind::Copy(cond_place), span: None },
|
||||
),
|
||||
pattern.into(),
|
||||
);
|
||||
let discr = Operand::Copy(discr);
|
||||
let discr = Operand { kind: OperandKind::Copy(discr), span: None };
|
||||
self.set_terminator(
|
||||
current,
|
||||
TerminatorKind::SwitchInt {
|
||||
@ -583,7 +600,7 @@ impl MirLowerCtx<'_> {
|
||||
self.set_terminator(
|
||||
current,
|
||||
TerminatorKind::SwitchInt {
|
||||
discr: Operand::Copy(tmp),
|
||||
discr: Operand { kind: OperandKind::Copy(tmp), span: None },
|
||||
targets: SwitchTargets::static_if(e, next, *else_target),
|
||||
},
|
||||
span,
|
||||
|
@ -25,7 +25,7 @@ use crate::{
|
||||
infer::normalize,
|
||||
};
|
||||
|
||||
use super::{MirBody, MirLowerError, Operand, Rvalue, StatementKind, TerminatorKind};
|
||||
use super::{MirBody, MirLowerError, Operand, OperandKind, Rvalue, StatementKind, TerminatorKind};
|
||||
|
||||
macro_rules! not_supported {
|
||||
($it: expr) => {
|
||||
@ -170,8 +170,8 @@ impl Filler<'_> {
|
||||
}
|
||||
|
||||
fn fill_operand(&mut self, op: &mut Operand) -> Result<(), MirLowerError> {
|
||||
match op {
|
||||
Operand::Constant(c) => {
|
||||
match &mut op.kind {
|
||||
OperandKind::Constant(c) => {
|
||||
match &c.data(Interner).value {
|
||||
chalk_ir::ConstValue::BoundVar(b) => {
|
||||
let resolved = self
|
||||
@ -215,7 +215,7 @@ impl Filler<'_> {
|
||||
}
|
||||
self.fill_const(c)?;
|
||||
}
|
||||
Operand::Copy(_) | Operand::Move(_) | Operand::Static(_) => (),
|
||||
OperandKind::Copy(_) | OperandKind::Move(_) | OperandKind::Static(_) => (),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -18,8 +18,8 @@ use crate::{
|
||||
};
|
||||
|
||||
use super::{
|
||||
AggregateKind, BasicBlockId, BorrowKind, LocalId, MirBody, MutBorrowKind, Operand, Place,
|
||||
Rvalue, UnOp,
|
||||
AggregateKind, BasicBlockId, BorrowKind, LocalId, MirBody, MutBorrowKind, Operand, OperandKind,
|
||||
Place, Rvalue, UnOp,
|
||||
};
|
||||
|
||||
macro_rules! w {
|
||||
@ -374,14 +374,14 @@ impl<'a> MirPrettyCtx<'a> {
|
||||
}
|
||||
|
||||
fn operand(&mut self, r: &Operand) {
|
||||
match r {
|
||||
Operand::Copy(p) | Operand::Move(p) => {
|
||||
match &r.kind {
|
||||
OperandKind::Copy(p) | OperandKind::Move(p) => {
|
||||
// MIR at the time of writing doesn't have difference between move and copy, so we show them
|
||||
// equally. Feel free to change it.
|
||||
self.place(p);
|
||||
}
|
||||
Operand::Constant(c) => w!(self, "Const({})", self.hir_display(c)),
|
||||
Operand::Static(s) => w!(self, "Static({:?})", s),
|
||||
OperandKind::Constant(c) => w!(self, "Const({})", self.hir_display(c)),
|
||||
OperandKind::Static(s) => w!(self, "Static({:?})", s),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -16,7 +16,7 @@ use syntax::TextRange;
|
||||
use test_utils::extract_annotations;
|
||||
use triomphe::Arc;
|
||||
|
||||
#[salsa::db]
|
||||
#[salsa_macros::db]
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct TestDB {
|
||||
storage: salsa::Storage<Self>,
|
||||
@ -47,7 +47,7 @@ impl fmt::Debug for TestDB {
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
#[salsa_macros::db]
|
||||
impl SourceDatabase for TestDB {
|
||||
fn file_text(&self, file_id: base_db::FileId) -> FileText {
|
||||
self.files.file_text(file_id)
|
||||
@ -102,7 +102,7 @@ impl SourceDatabase for TestDB {
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
#[salsa_macros::db]
|
||||
impl salsa::Database for TestDB {
|
||||
fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
|
||||
let mut events = self.events.lock().unwrap();
|
||||
|
@ -561,7 +561,7 @@ trait Foo {}
|
||||
fn test(f: impl Foo, g: &(impl Foo + ?Sized)) {
|
||||
let _: &dyn Foo = &f;
|
||||
let _: &dyn Foo = g;
|
||||
//^ expected &'? dyn Foo, got &'? impl Foo + ?Sized
|
||||
//^ expected &'? (dyn Foo + 'static), got &'? impl Foo + ?Sized
|
||||
}
|
||||
"#,
|
||||
);
|
||||
@ -827,11 +827,11 @@ struct V<T> { t: T }
|
||||
fn main() {
|
||||
let a: V<&dyn Tr>;
|
||||
(a,) = V { t: &S };
|
||||
//^^^^expected V<&'? S>, got (V<&'? dyn Tr>,)
|
||||
//^^^^expected V<&'? S>, got (V<&'? (dyn Tr + '?)>,)
|
||||
|
||||
let mut a: V<&dyn Tr> = V { t: &S };
|
||||
(a,) = V { t: &S };
|
||||
//^^^^expected V<&'? S>, got (V<&'? dyn Tr>,)
|
||||
//^^^^expected V<&'? S>, got (V<&'? (dyn Tr + '?)>,)
|
||||
}
|
||||
"#,
|
||||
);
|
||||
|
@ -65,13 +65,13 @@ trait A {
|
||||
}
|
||||
trait B: A {}
|
||||
|
||||
fn test(
|
||||
fn test<'a>(
|
||||
_: &(dyn A<Assoc = ()> + Send),
|
||||
//^ &'_ (dyn A<Assoc = ()> + Send)
|
||||
_: &(dyn Send + A<Assoc = ()>),
|
||||
//^ &'_ (dyn A<Assoc = ()> + Send)
|
||||
//^ &(dyn A<Assoc = ()> + Send + 'static)
|
||||
_: &'a (dyn Send + A<Assoc = ()>),
|
||||
//^ &'a (dyn A<Assoc = ()> + Send + 'static)
|
||||
_: &dyn B<Assoc = ()>,
|
||||
//^ &'_ (dyn B<Assoc = ()>)
|
||||
//^ &(dyn B<Assoc = ()> + 'static)
|
||||
) {}
|
||||
"#,
|
||||
);
|
||||
@ -85,7 +85,7 @@ fn render_dyn_for_ty() {
|
||||
trait Foo<'a> {}
|
||||
|
||||
fn foo(foo: &dyn for<'a> Foo<'a>) {}
|
||||
// ^^^ &'_ dyn Foo<'_>
|
||||
// ^^^ &(dyn Foo<'?> + 'static)
|
||||
"#,
|
||||
);
|
||||
}
|
||||
@ -111,11 +111,11 @@ fn test(
|
||||
b;
|
||||
//^ impl Foo
|
||||
c;
|
||||
//^ &'_ impl Foo + ?Sized
|
||||
//^ &impl Foo + ?Sized
|
||||
d;
|
||||
//^ S<impl Foo>
|
||||
ref_any;
|
||||
//^^^^^^^ &'_ impl ?Sized
|
||||
//^^^^^^^ &impl ?Sized
|
||||
empty;
|
||||
} //^^^^^ impl Sized
|
||||
"#,
|
||||
@ -192,7 +192,7 @@ fn test(
|
||||
b;
|
||||
//^ fn(impl Foo) -> impl Foo
|
||||
c;
|
||||
} //^ fn(&'_ impl Foo + ?Sized) -> &'_ impl Foo + ?Sized
|
||||
} //^ fn(&impl Foo + ?Sized) -> &impl Foo + ?Sized
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
@ -1153,9 +1153,9 @@ fn dyn_trait_super_trait_not_in_scope() {
|
||||
51..55 'self': &'? Self
|
||||
64..69 '{ 0 }': u32
|
||||
66..67 '0': u32
|
||||
176..177 'd': &'? dyn Trait
|
||||
176..177 'd': &'? (dyn Trait + 'static)
|
||||
191..207 '{ ...o(); }': ()
|
||||
197..198 'd': &'? dyn Trait
|
||||
197..198 'd': &'? (dyn Trait + 'static)
|
||||
197..204 'd.foo()': u32
|
||||
"#]],
|
||||
);
|
||||
@ -2019,10 +2019,10 @@ impl dyn Error + Send {
|
||||
/// Attempts to downcast the box to a concrete type.
|
||||
pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<dyn Error + Send>> {
|
||||
let err: Box<dyn Error> = self;
|
||||
// ^^^^ expected Box<dyn Error>, got Box<dyn Error + Send>
|
||||
// ^^^^ expected Box<dyn Error + 'static>, got Box<dyn Error + Send + 'static>
|
||||
// FIXME, type mismatch should not occur
|
||||
<dyn Error>::downcast(err).map_err(|_| loop {})
|
||||
//^^^^^^^^^^^^^^^^^^^^^ type: fn downcast<{unknown}>(Box<dyn Error>) -> Result<Box<{unknown}>, Box<dyn Error>>
|
||||
//^^^^^^^^^^^^^^^^^^^^^ type: fn downcast<{unknown}>(Box<dyn Error + 'static>) -> Result<Box<{unknown}>, Box<dyn Error + 'static>>
|
||||
}
|
||||
}
|
||||
"#,
|
||||
@ -2170,3 +2170,26 @@ fn main() {
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mut_to_const_pointer() {
|
||||
check(
|
||||
r#"
|
||||
pub trait X {
|
||||
fn perform(self) -> u64;
|
||||
}
|
||||
|
||||
impl X for *const u8 {
|
||||
fn perform(self) -> u64 {
|
||||
42
|
||||
}
|
||||
}
|
||||
|
||||
fn test(x: *mut u8) {
|
||||
let _v = x.perform();
|
||||
// ^ adjustments: Pointer(MutToConstPointer)
|
||||
// ^^^^^^^^^^^ type: u64
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
@ -629,7 +629,7 @@ fn issue_4053_diesel_where_clauses() {
|
||||
488..522 '{ ... }': ()
|
||||
498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
|
||||
498..508 'self.order': O
|
||||
498..515 'self.o...into()': dyn QueryFragment<DB>
|
||||
498..515 'self.o...into()': dyn QueryFragment<DB> + 'static
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
@ -773,7 +773,7 @@ fn issue_4800() {
|
||||
"#,
|
||||
expect![[r#"
|
||||
379..383 'self': &'? mut PeerSet<D>
|
||||
401..424 '{ ... }': dyn Future<Output = ()>
|
||||
401..424 '{ ... }': dyn Future<Output = ()> + 'static
|
||||
411..418 'loop {}': !
|
||||
416..418 '{}': ()
|
||||
575..579 'self': &'? mut Self
|
||||
@ -2278,3 +2278,26 @@ fn test(x: bool) {
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn issue_19730() {
|
||||
check_infer(
|
||||
r#"
|
||||
trait Trait<T = Self> {}
|
||||
|
||||
trait Foo {
|
||||
type Bar<A, B>: Trait;
|
||||
|
||||
fn foo<A, B>(bar: Self::Bar<A, B>) {
|
||||
let _ = bar;
|
||||
}
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
83..86 'bar': Foo::Bar<Self, A, B>
|
||||
105..133 '{ ... }': ()
|
||||
119..120 '_': Foo::Bar<Self, A, B>
|
||||
123..126 'bar': Foo::Bar<Self, A, B>
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
@ -2741,11 +2741,11 @@ impl B for Astruct {}
|
||||
715..744 '#[rust...1i32])': Box<[i32; 1], Global>
|
||||
737..743 '[1i32]': [i32; 1]
|
||||
738..742 '1i32': i32
|
||||
755..756 'v': Vec<Box<dyn B, Global>, Global>
|
||||
776..793 '<[_]> ...to_vec': fn into_vec<Box<dyn B, Global>, Global>(Box<[Box<dyn B, Global>], Global>) -> Vec<Box<dyn B, Global>, Global>
|
||||
776..850 '<[_]> ...ct)]))': Vec<Box<dyn B, Global>, Global>
|
||||
794..849 '#[rust...uct)])': Box<[Box<dyn B, Global>; 1], Global>
|
||||
816..848 '[#[rus...ruct)]': [Box<dyn B, Global>; 1]
|
||||
755..756 'v': Vec<Box<dyn B + 'static, Global>, Global>
|
||||
776..793 '<[_]> ...to_vec': fn into_vec<Box<dyn B + 'static, Global>, Global>(Box<[Box<dyn B + 'static, Global>], Global>) -> Vec<Box<dyn B + 'static, Global>, Global>
|
||||
776..850 '<[_]> ...ct)]))': Vec<Box<dyn B + 'static, Global>, Global>
|
||||
794..849 '#[rust...uct)])': Box<[Box<dyn B + 'static, Global>; 1], Global>
|
||||
816..848 '[#[rus...ruct)]': [Box<dyn B + 'static, Global>; 1]
|
||||
817..847 '#[rust...truct)': Box<Astruct, Global>
|
||||
839..846 'Astruct': Astruct
|
||||
"#]],
|
||||
|
@ -1475,26 +1475,26 @@ fn test(x: Box<dyn Trait<u64>>, y: &dyn Trait<u64>) {
|
||||
expect![[r#"
|
||||
29..33 'self': &'? Self
|
||||
54..58 'self': &'? Self
|
||||
198..200 '{}': Box<dyn Trait<u64>>
|
||||
210..211 'x': Box<dyn Trait<u64>>
|
||||
234..235 'y': &'? dyn Trait<u64>
|
||||
198..200 '{}': Box<dyn Trait<u64> + 'static>
|
||||
210..211 'x': Box<dyn Trait<u64> + 'static>
|
||||
234..235 'y': &'? (dyn Trait<u64> + 'static)
|
||||
254..371 '{ ...2(); }': ()
|
||||
260..261 'x': Box<dyn Trait<u64>>
|
||||
267..268 'y': &'? dyn Trait<u64>
|
||||
278..279 'z': Box<dyn Trait<u64>>
|
||||
282..285 'bar': fn bar() -> Box<dyn Trait<u64>>
|
||||
282..287 'bar()': Box<dyn Trait<u64>>
|
||||
293..294 'x': Box<dyn Trait<u64>>
|
||||
260..261 'x': Box<dyn Trait<u64> + 'static>
|
||||
267..268 'y': &'? (dyn Trait<u64> + 'static)
|
||||
278..279 'z': Box<dyn Trait<u64> + 'static>
|
||||
282..285 'bar': fn bar() -> Box<dyn Trait<u64> + 'static>
|
||||
282..287 'bar()': Box<dyn Trait<u64> + 'static>
|
||||
293..294 'x': Box<dyn Trait<u64> + 'static>
|
||||
293..300 'x.foo()': u64
|
||||
306..307 'y': &'? dyn Trait<u64>
|
||||
306..307 'y': &'? (dyn Trait<u64> + 'static)
|
||||
306..313 'y.foo()': u64
|
||||
319..320 'z': Box<dyn Trait<u64>>
|
||||
319..320 'z': Box<dyn Trait<u64> + 'static>
|
||||
319..326 'z.foo()': u64
|
||||
332..333 'x': Box<dyn Trait<u64>>
|
||||
332..333 'x': Box<dyn Trait<u64> + 'static>
|
||||
332..340 'x.foo2()': i64
|
||||
346..347 'y': &'? dyn Trait<u64>
|
||||
346..347 'y': &'? (dyn Trait<u64> + 'static)
|
||||
346..354 'y.foo2()': i64
|
||||
360..361 'z': Box<dyn Trait<u64>>
|
||||
360..361 'z': Box<dyn Trait<u64> + 'static>
|
||||
360..368 'z.foo2()': i64
|
||||
"#]],
|
||||
);
|
||||
@ -1523,14 +1523,14 @@ fn test(s: S<u32, i32>) {
|
||||
expect![[r#"
|
||||
32..36 'self': &'? Self
|
||||
102..106 'self': &'? S<T, U>
|
||||
128..139 '{ loop {} }': &'? dyn Trait<T, U>
|
||||
128..139 '{ loop {} }': &'? (dyn Trait<T, U> + 'static)
|
||||
130..137 'loop {}': !
|
||||
135..137 '{}': ()
|
||||
175..179 'self': &'? Self
|
||||
251..252 's': S<u32, i32>
|
||||
267..289 '{ ...z(); }': ()
|
||||
273..274 's': S<u32, i32>
|
||||
273..280 's.bar()': &'? dyn Trait<u32, i32>
|
||||
273..280 's.bar()': &'? (dyn Trait<u32, i32> + 'static)
|
||||
273..286 's.bar().baz()': (u32, i32)
|
||||
"#]],
|
||||
);
|
||||
@ -1556,20 +1556,20 @@ fn test(x: Trait, y: &Trait) -> u64 {
|
||||
}"#,
|
||||
expect![[r#"
|
||||
26..30 'self': &'? Self
|
||||
60..62 '{}': dyn Trait
|
||||
72..73 'x': dyn Trait
|
||||
82..83 'y': &'? dyn Trait
|
||||
60..62 '{}': dyn Trait + 'static
|
||||
72..73 'x': dyn Trait + 'static
|
||||
82..83 'y': &'? (dyn Trait + 'static)
|
||||
100..175 '{ ...o(); }': u64
|
||||
106..107 'x': dyn Trait
|
||||
113..114 'y': &'? dyn Trait
|
||||
124..125 'z': dyn Trait
|
||||
128..131 'bar': fn bar() -> dyn Trait
|
||||
128..133 'bar()': dyn Trait
|
||||
139..140 'x': dyn Trait
|
||||
106..107 'x': dyn Trait + 'static
|
||||
113..114 'y': &'? (dyn Trait + 'static)
|
||||
124..125 'z': dyn Trait + 'static
|
||||
128..131 'bar': fn bar() -> dyn Trait + 'static
|
||||
128..133 'bar()': dyn Trait + 'static
|
||||
139..140 'x': dyn Trait + 'static
|
||||
139..146 'x.foo()': u64
|
||||
152..153 'y': &'? dyn Trait
|
||||
152..153 'y': &'? (dyn Trait + 'static)
|
||||
152..159 'y.foo()': u64
|
||||
165..166 'z': dyn Trait
|
||||
165..166 'z': dyn Trait + 'static
|
||||
165..172 'z.foo()': u64
|
||||
"#]],
|
||||
);
|
||||
@ -1589,10 +1589,10 @@ fn main() {
|
||||
expect![[r#"
|
||||
31..35 'self': &'? S
|
||||
37..39 '{}': ()
|
||||
47..48 '_': &'? dyn Fn(S)
|
||||
47..48 '_': &'? (dyn Fn(S) + 'static)
|
||||
58..60 '{}': ()
|
||||
71..105 '{ ...()); }': ()
|
||||
77..78 'f': fn f(&'? dyn Fn(S))
|
||||
77..78 'f': fn f(&'? (dyn Fn(S) + 'static))
|
||||
77..102 'f(&|nu...foo())': ()
|
||||
79..101 '&|numb....foo()': &'? impl Fn(S)
|
||||
80..101 '|numbe....foo()': impl Fn(S)
|
||||
@ -2927,13 +2927,13 @@ fn test(x: &dyn Foo) {
|
||||
foo(x);
|
||||
}"#,
|
||||
expect![[r#"
|
||||
21..22 'x': &'? dyn Foo
|
||||
21..22 'x': &'? (dyn Foo + 'static)
|
||||
34..36 '{}': ()
|
||||
46..47 'x': &'? dyn Foo
|
||||
46..47 'x': &'? (dyn Foo + 'static)
|
||||
59..74 '{ foo(x); }': ()
|
||||
65..68 'foo': fn foo(&'? dyn Foo)
|
||||
65..68 'foo': fn foo(&'? (dyn Foo + 'static))
|
||||
65..71 'foo(x)': ()
|
||||
69..70 'x': &'? dyn Foo
|
||||
69..70 'x': &'? (dyn Foo + 'static)
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
@ -3210,13 +3210,13 @@ fn foo() {
|
||||
218..324 '{ ...&s); }': ()
|
||||
228..229 's': Option<i32>
|
||||
232..236 'None': Option<i32>
|
||||
246..247 'f': Box<dyn FnOnce(&'? Option<i32>)>
|
||||
281..310 'Box { ... {}) }': Box<dyn FnOnce(&'? Option<i32>)>
|
||||
246..247 'f': Box<dyn FnOnce(&'? Option<i32>) + 'static>
|
||||
281..310 'Box { ... {}) }': Box<dyn FnOnce(&'? Option<i32>) + 'static>
|
||||
294..308 '&mut (|ps| {})': &'? mut impl FnOnce(&'? Option<i32>)
|
||||
300..307 '|ps| {}': impl FnOnce(&'? Option<i32>)
|
||||
301..303 'ps': &'? Option<i32>
|
||||
305..307 '{}': ()
|
||||
316..317 'f': Box<dyn FnOnce(&'? Option<i32>)>
|
||||
316..317 'f': Box<dyn FnOnce(&'? Option<i32>) + 'static>
|
||||
316..321 'f(&s)': ()
|
||||
318..320 '&s': &'? Option<i32>
|
||||
319..320 's': Option<i32>
|
||||
@ -4252,9 +4252,9 @@ fn f<'a>(v: &dyn Trait<Assoc<i32> = &'a i32>) {
|
||||
"#,
|
||||
expect![[r#"
|
||||
90..94 'self': &'? Self
|
||||
127..128 'v': &'? (dyn Trait<Assoc<i32> = &'a i32>)
|
||||
127..128 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + 'static)
|
||||
164..195 '{ ...f(); }': ()
|
||||
170..171 'v': &'? (dyn Trait<Assoc<i32> = &'a i32>)
|
||||
170..171 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + 'static)
|
||||
170..184 'v.get::<i32>()': &'? i32
|
||||
170..192 'v.get:...eref()': &'? i32
|
||||
"#]],
|
||||
@ -4735,7 +4735,7 @@ pub async fn foo_async<'a>() -> Box<dyn Trait + 'a> {
|
||||
|
||||
fn foo() {
|
||||
foo_async();
|
||||
//^^^^^^^^^^^impl Future<Output = Box<dyn Trait>> + ?Sized
|
||||
//^^^^^^^^^^^impl Future<Output = Box<dyn Trait + '?>> + ?Sized
|
||||
}
|
||||
"#,
|
||||
)
|
||||
|
@ -8,10 +8,7 @@ use chalk_recursive::Cache;
|
||||
use chalk_solve::{Solver, logging_db::LoggingRustIrDatabase, rust_ir};
|
||||
|
||||
use base_db::Crate;
|
||||
use hir_def::{
|
||||
BlockId, TraitId,
|
||||
lang_item::{LangItem, LangItemTarget},
|
||||
};
|
||||
use hir_def::{BlockId, TraitId, lang_item::LangItem};
|
||||
use hir_expand::name::Name;
|
||||
use intern::sym;
|
||||
use span::Edition;
|
||||
@ -292,10 +289,6 @@ impl FnTrait {
|
||||
}
|
||||
|
||||
pub fn get_id(self, db: &dyn HirDatabase, krate: Crate) -> Option<TraitId> {
|
||||
let target = db.lang_item(krate, self.lang_item())?;
|
||||
match target {
|
||||
LangItemTarget::Trait(t) => Some(t),
|
||||
_ => None,
|
||||
}
|
||||
self.lang_item().resolve_trait(db, krate)
|
||||
}
|
||||
}
|
||||
|
@ -36,8 +36,7 @@ use crate::{
|
||||
pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: Crate) -> impl Iterator<Item = TraitId> + '_ {
|
||||
[LangItem::Fn, LangItem::FnMut, LangItem::FnOnce]
|
||||
.into_iter()
|
||||
.filter_map(move |lang| db.lang_item(krate, lang))
|
||||
.flat_map(|it| it.as_trait())
|
||||
.filter_map(move |lang| lang.resolve_trait(db, krate))
|
||||
}
|
||||
|
||||
/// Returns an iterator over the direct super traits (including the trait itself).
|
||||
|
@ -22,7 +22,6 @@ use crate::{
|
||||
use chalk_ir::Mutability;
|
||||
use hir_def::signatures::StructFlags;
|
||||
use hir_def::{AdtId, GenericDefId, GenericParamId, VariantId};
|
||||
use salsa::CycleRecoveryAction;
|
||||
use std::fmt;
|
||||
use std::ops::Not;
|
||||
use stdx::never;
|
||||
@ -55,14 +54,14 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option<Ar
|
||||
variances.is_empty().not().then(|| Arc::from_iter(variances))
|
||||
}
|
||||
|
||||
pub(crate) fn variances_of_cycle_fn(
|
||||
_db: &dyn HirDatabase,
|
||||
_result: &Option<Arc<[Variance]>>,
|
||||
_count: u32,
|
||||
_def: GenericDefId,
|
||||
) -> CycleRecoveryAction<Option<Arc<[Variance]>>> {
|
||||
CycleRecoveryAction::Iterate
|
||||
}
|
||||
// pub(crate) fn variances_of_cycle_fn(
|
||||
// _db: &dyn HirDatabase,
|
||||
// _result: &Option<Arc<[Variance]>>,
|
||||
// _count: u32,
|
||||
// _def: GenericDefId,
|
||||
// ) -> salsa::CycleRecoveryAction<Option<Arc<[Variance]>>> {
|
||||
// salsa::CycleRecoveryAction::Iterate
|
||||
// }
|
||||
|
||||
pub(crate) fn variances_of_cycle_initial(
|
||||
db: &dyn HirDatabase,
|
||||
@ -966,7 +965,7 @@ struct S3<T>(S<T, T>);
|
||||
struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
|
||||
"#,
|
||||
expect![[r#"
|
||||
FixedPoint[T: covariant, U: covariant, V: covariant]
|
||||
FixedPoint[T: bivariant, U: bivariant, V: bivariant]
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
@ -516,8 +516,7 @@ impl HirDisplay for TypeParam {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let sized_trait =
|
||||
f.db.lang_item(krate, LangItem::Sized).and_then(|lang_item| lang_item.as_trait());
|
||||
let sized_trait = LangItem::Sized.resolve_trait(f.db, krate);
|
||||
let has_only_sized_bound = predicates.iter().all(move |pred| match pred.skip_binders() {
|
||||
WhereClause::Implemented(it) => Some(it.hir_trait_id()) == sized_trait,
|
||||
_ => false,
|
||||
|
@ -53,7 +53,6 @@ use hir_def::{
|
||||
generics::{LifetimeParamData, TypeOrConstParamData, TypeParamProvenance},
|
||||
},
|
||||
item_tree::{AttrOwner, FieldParent, ImportAlias, ItemTreeFieldId, ItemTreeNode},
|
||||
lang_item::LangItemTarget,
|
||||
layout::{self, ReprOptions, TargetDataLayout},
|
||||
nameres::{self, diagnostics::DefDiagnostic},
|
||||
per_ns::PerNs,
|
||||
@ -137,7 +136,6 @@ pub use {
|
||||
HirFileRange, InFile, InFileWrapper, InMacroFile, InRealFile, MacroFilePosition,
|
||||
MacroFileRange,
|
||||
},
|
||||
hygiene::{SyntaxContextExt, marks_rev},
|
||||
inert_attr_macro::AttributeTemplate,
|
||||
mod_path::{ModPath, PathKind, tool_path},
|
||||
name::Name,
|
||||
@ -781,7 +779,7 @@ impl Module {
|
||||
let drop_maybe_dangle = (|| {
|
||||
// FIXME: This can be simplified a lot by exposing hir-ty's utils.rs::Generics helper
|
||||
let trait_ = trait_?;
|
||||
let drop_trait = db.lang_item(self.krate().into(), LangItem::Drop)?.as_trait()?;
|
||||
let drop_trait = LangItem::Drop.resolve_trait(db, self.krate().into())?;
|
||||
if drop_trait != trait_.into() {
|
||||
return None;
|
||||
}
|
||||
@ -2388,14 +2386,11 @@ impl Function {
|
||||
}
|
||||
|
||||
let Some(impl_traits) = self.ret_type(db).as_impl_traits(db) else { return false };
|
||||
let Some(future_trait_id) =
|
||||
db.lang_item(self.ty(db).env.krate, LangItem::Future).and_then(|t| t.as_trait())
|
||||
let Some(future_trait_id) = LangItem::Future.resolve_trait(db, self.ty(db).env.krate)
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
let Some(sized_trait_id) =
|
||||
db.lang_item(self.ty(db).env.krate, LangItem::Sized).and_then(|t| t.as_trait())
|
||||
else {
|
||||
let Some(sized_trait_id) = LangItem::Sized.resolve_trait(db, self.ty(db).env.krate) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
@ -2861,9 +2856,7 @@ pub struct Trait {
|
||||
|
||||
impl Trait {
|
||||
pub fn lang(db: &dyn HirDatabase, krate: Crate, name: &Name) -> Option<Trait> {
|
||||
db.lang_item(krate.into(), LangItem::from_name(name)?)
|
||||
.and_then(LangItemTarget::as_trait)
|
||||
.map(Into::into)
|
||||
LangItem::from_name(name)?.resolve_trait(db, krate.into()).map(Into::into)
|
||||
}
|
||||
|
||||
pub fn module(self, db: &dyn HirDatabase) -> Module {
|
||||
@ -3692,24 +3685,16 @@ impl GenericDef {
|
||||
}
|
||||
|
||||
let source_map = match def {
|
||||
GenericDefId::AdtId(AdtId::EnumId(it)) => {
|
||||
db.enum_signature_with_source_map(it).1.clone()
|
||||
}
|
||||
GenericDefId::AdtId(AdtId::StructId(it)) => {
|
||||
db.struct_signature_with_source_map(it).1.clone()
|
||||
}
|
||||
GenericDefId::AdtId(AdtId::UnionId(it)) => {
|
||||
db.union_signature_with_source_map(it).1.clone()
|
||||
}
|
||||
GenericDefId::AdtId(AdtId::EnumId(it)) => db.enum_signature_with_source_map(it).1,
|
||||
GenericDefId::AdtId(AdtId::StructId(it)) => db.struct_signature_with_source_map(it).1,
|
||||
GenericDefId::AdtId(AdtId::UnionId(it)) => db.union_signature_with_source_map(it).1,
|
||||
GenericDefId::ConstId(_) => return,
|
||||
GenericDefId::FunctionId(it) => db.function_signature_with_source_map(it).1.clone(),
|
||||
GenericDefId::ImplId(it) => db.impl_signature_with_source_map(it).1.clone(),
|
||||
GenericDefId::FunctionId(it) => db.function_signature_with_source_map(it).1,
|
||||
GenericDefId::ImplId(it) => db.impl_signature_with_source_map(it).1,
|
||||
GenericDefId::StaticId(_) => return,
|
||||
GenericDefId::TraitAliasId(it) => {
|
||||
db.trait_alias_signature_with_source_map(it).1.clone()
|
||||
}
|
||||
GenericDefId::TraitId(it) => db.trait_signature_with_source_map(it).1.clone(),
|
||||
GenericDefId::TypeAliasId(it) => db.type_alias_signature_with_source_map(it).1.clone(),
|
||||
GenericDefId::TraitAliasId(it) => db.trait_alias_signature_with_source_map(it).1,
|
||||
GenericDefId::TraitId(it) => db.trait_signature_with_source_map(it).1,
|
||||
GenericDefId::TypeAliasId(it) => db.type_alias_signature_with_source_map(it).1,
|
||||
};
|
||||
|
||||
expr_store_diagnostics(db, acc, &source_map);
|
||||
@ -3809,7 +3794,7 @@ impl GenericSubstitution {
|
||||
container_params
|
||||
.chain(self_params)
|
||||
.filter_map(|(ty, name)| {
|
||||
Some((name?.symbol().clone(), Type { ty: ty.clone(), env: self.env.clone() }))
|
||||
Some((name?.symbol().clone(), Type { ty, env: self.env.clone() }))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@ -4989,18 +4974,14 @@ impl Type {
|
||||
/// `std::future::Future` and returns the `Output` associated type.
|
||||
/// This function is used in `.await` syntax completion.
|
||||
pub fn into_future_output(&self, db: &dyn HirDatabase) -> Option<Type> {
|
||||
let trait_ = db
|
||||
.lang_item(self.env.krate, LangItem::IntoFutureIntoFuture)
|
||||
.and_then(|it| {
|
||||
let into_future_fn = it.as_function()?;
|
||||
let trait_ = LangItem::IntoFutureIntoFuture
|
||||
.resolve_function(db, self.env.krate)
|
||||
.and_then(|into_future_fn| {
|
||||
let assoc_item = as_assoc_item(db, AssocItem::Function, into_future_fn)?;
|
||||
let into_future_trait = assoc_item.container_or_implemented_trait(db)?;
|
||||
Some(into_future_trait.id)
|
||||
})
|
||||
.or_else(|| {
|
||||
let future_trait = db.lang_item(self.env.krate, LangItem::Future)?;
|
||||
future_trait.as_trait()
|
||||
})?;
|
||||
.or_else(|| LangItem::Future.resolve_trait(db, self.env.krate))?;
|
||||
|
||||
let canonical_ty =
|
||||
Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
|
||||
@ -5015,14 +4996,13 @@ impl Type {
|
||||
|
||||
/// This does **not** resolve `IntoFuture`, only `Future`.
|
||||
pub fn future_output(self, db: &dyn HirDatabase) -> Option<Type> {
|
||||
let future_output =
|
||||
db.lang_item(self.env.krate, LangItem::FutureOutput)?.as_type_alias()?;
|
||||
let future_output = LangItem::FutureOutput.resolve_type_alias(db, self.env.krate)?;
|
||||
self.normalize_trait_assoc_type(db, &[], future_output.into())
|
||||
}
|
||||
|
||||
/// This does **not** resolve `IntoIterator`, only `Iterator`.
|
||||
pub fn iterator_item(self, db: &dyn HirDatabase) -> Option<Type> {
|
||||
let iterator_trait = db.lang_item(self.env.krate, LangItem::Iterator)?.as_trait()?;
|
||||
let iterator_trait = LangItem::Iterator.resolve_trait(db, self.env.krate)?;
|
||||
let iterator_item = db
|
||||
.trait_items(iterator_trait)
|
||||
.associated_type_by_name(&Name::new_symbol_root(sym::Item))?;
|
||||
@ -5030,9 +5010,7 @@ impl Type {
|
||||
}
|
||||
|
||||
pub fn impls_iterator(self, db: &dyn HirDatabase) -> bool {
|
||||
let Some(iterator_trait) =
|
||||
db.lang_item(self.env.krate, LangItem::Iterator).and_then(|it| it.as_trait())
|
||||
else {
|
||||
let Some(iterator_trait) = LangItem::Iterator.resolve_trait(db, self.env.krate) else {
|
||||
return false;
|
||||
};
|
||||
let canonical_ty =
|
||||
@ -5042,12 +5020,13 @@ impl Type {
|
||||
|
||||
/// Resolves the projection `<Self as IntoIterator>::IntoIter` and returns the resulting type
|
||||
pub fn into_iterator_iter(self, db: &dyn HirDatabase) -> Option<Type> {
|
||||
let trait_ = db.lang_item(self.env.krate, LangItem::IntoIterIntoIter).and_then(|it| {
|
||||
let into_iter_fn = it.as_function()?;
|
||||
let assoc_item = as_assoc_item(db, AssocItem::Function, into_iter_fn)?;
|
||||
let into_iter_trait = assoc_item.container_or_implemented_trait(db)?;
|
||||
Some(into_iter_trait.id)
|
||||
})?;
|
||||
let trait_ = LangItem::IntoIterIntoIter.resolve_function(db, self.env.krate).and_then(
|
||||
|into_iter_fn| {
|
||||
let assoc_item = as_assoc_item(db, AssocItem::Function, into_iter_fn)?;
|
||||
let into_iter_trait = assoc_item.container_or_implemented_trait(db)?;
|
||||
Some(into_iter_trait.id)
|
||||
},
|
||||
)?;
|
||||
|
||||
let canonical_ty =
|
||||
Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
|
||||
@ -5133,10 +5112,8 @@ impl Type {
|
||||
}
|
||||
|
||||
pub fn is_copy(&self, db: &dyn HirDatabase) -> bool {
|
||||
let lang_item = db.lang_item(self.env.krate, LangItem::Copy);
|
||||
let copy_trait = match lang_item {
|
||||
Some(LangItemTarget::Trait(it)) => it,
|
||||
_ => return false,
|
||||
let Some(copy_trait) = LangItem::Copy.resolve_trait(db, self.env.krate) else {
|
||||
return false;
|
||||
};
|
||||
self.impls_trait(db, copy_trait.into(), &[])
|
||||
}
|
||||
|
@ -25,7 +25,6 @@ use hir_expand::{
|
||||
builtin::{BuiltinFnLikeExpander, EagerExpander},
|
||||
db::ExpandDatabase,
|
||||
files::{FileRangeWrapper, InRealFile},
|
||||
hygiene::SyntaxContextExt as _,
|
||||
inert_attr_macro::find_builtin_attr_idx,
|
||||
mod_path::{ModPath, PathKind},
|
||||
name::AsName,
|
||||
@ -927,7 +926,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||
token: InRealFile<SyntaxToken>,
|
||||
mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
|
||||
) -> Option<T> {
|
||||
self.descend_into_macros_impl(token.clone(), &mut cb)
|
||||
self.descend_into_macros_impl(token, &mut cb)
|
||||
}
|
||||
|
||||
/// Descends the token into expansions, returning the tokens that matches the input
|
||||
@ -959,17 +958,13 @@ impl<'db> SemanticsImpl<'db> {
|
||||
let text = token.text();
|
||||
let kind = token.kind();
|
||||
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
|
||||
self.descend_into_macros_breakable(
|
||||
token.clone(),
|
||||
|InFile { value, file_id: _ }, _ctx| {
|
||||
let mapped_kind = value.kind();
|
||||
let any_ident_match =
|
||||
|| kind.is_any_identifier() && value.kind().is_any_identifier();
|
||||
let matches =
|
||||
(kind == mapped_kind || any_ident_match()) && text == value.text();
|
||||
if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) }
|
||||
},
|
||||
)
|
||||
self.descend_into_macros_breakable(token, |InFile { value, file_id: _ }, _ctx| {
|
||||
let mapped_kind = value.kind();
|
||||
let any_ident_match =
|
||||
|| kind.is_any_identifier() && value.kind().is_any_identifier();
|
||||
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
|
||||
if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) }
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -559,7 +559,7 @@ impl SourceToDefCtx<'_, '_> {
|
||||
let item = match ast::Item::cast(value.clone()) {
|
||||
Some(it) => it,
|
||||
None => {
|
||||
let variant = ast::Variant::cast(value.clone())?;
|
||||
let variant = ast::Variant::cast(value)?;
|
||||
return this
|
||||
.enum_variant_to_def(InFile::new(file_id, &variant))
|
||||
.map(Into::into);
|
||||
|
@ -556,8 +556,8 @@ impl SourceAnalyzer {
|
||||
}
|
||||
}
|
||||
|
||||
let future_trait = db.lang_item(self.resolver.krate(), LangItem::Future)?.as_trait()?;
|
||||
let poll_fn = db.lang_item(self.resolver.krate(), LangItem::FuturePoll)?.as_function()?;
|
||||
let future_trait = LangItem::Future.resolve_trait(db, self.resolver.krate())?;
|
||||
let poll_fn = LangItem::FuturePoll.resolve_function(db, self.resolver.krate())?;
|
||||
// HACK: subst for `poll()` coincides with that for `Future` because `poll()` itself
|
||||
// doesn't have any generic parameters, so we skip building another subst for `poll()`.
|
||||
let substs = hir_ty::TyBuilder::subst_for_def(db, future_trait, None).push(ty).build();
|
||||
@ -666,7 +666,7 @@ impl SourceAnalyzer {
|
||||
) -> Option<FunctionId> {
|
||||
let ty = self.ty_of_expr(try_expr.expr()?)?;
|
||||
|
||||
let op_fn = db.lang_item(self.resolver.krate(), LangItem::TryTraitBranch)?.as_function()?;
|
||||
let op_fn = LangItem::TryTraitBranch.resolve_function(db, self.resolver.krate())?;
|
||||
let op_trait = match op_fn.lookup(db).container {
|
||||
ItemContainerId::TraitId(id) => id,
|
||||
_ => return None,
|
||||
@ -1425,13 +1425,13 @@ impl SourceAnalyzer {
|
||||
lang_trait: LangItem,
|
||||
method_name: &Name,
|
||||
) -> Option<(TraitId, FunctionId)> {
|
||||
let trait_id = db.lang_item(self.resolver.krate(), lang_trait)?.as_trait()?;
|
||||
let trait_id = lang_trait.resolve_trait(db, self.resolver.krate())?;
|
||||
let fn_id = db.trait_items(trait_id).method_by_name(method_name)?;
|
||||
Some((trait_id, fn_id))
|
||||
}
|
||||
|
||||
fn ty_of_expr(&self, expr: ast::Expr) -> Option<&Ty> {
|
||||
self.infer()?.type_of_expr_or_pat(self.expr_id(expr.clone())?)
|
||||
self.infer()?.type_of_expr_or_pat(self.expr_id(expr)?)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -13,13 +13,13 @@ use hir_def::{
|
||||
use hir_expand::{HirFileId, name::Name};
|
||||
use hir_ty::{
|
||||
db::HirDatabase,
|
||||
display::{DisplayTarget, HirDisplay, hir_display_with_store},
|
||||
display::{HirDisplay, hir_display_with_store},
|
||||
};
|
||||
use intern::Symbol;
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::{AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr, ast::HasName};
|
||||
|
||||
use crate::{Module, ModuleDef, Semantics};
|
||||
use crate::{HasCrate, Module, ModuleDef, Semantics};
|
||||
|
||||
pub type FxIndexSet<T> = indexmap::IndexSet<T, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
|
||||
|
||||
@ -66,7 +66,6 @@ pub struct SymbolCollector<'a> {
|
||||
symbols: FxIndexSet<FileSymbol>,
|
||||
work: Vec<SymbolCollectorWork>,
|
||||
current_container_name: Option<SmolStr>,
|
||||
display_target: DisplayTarget,
|
||||
}
|
||||
|
||||
/// Given a [`ModuleId`] and a [`HirDatabase`], use the DefMap for the module's crate to collect
|
||||
@ -78,10 +77,6 @@ impl<'a> SymbolCollector<'a> {
|
||||
symbols: Default::default(),
|
||||
work: Default::default(),
|
||||
current_container_name: None,
|
||||
display_target: DisplayTarget::from_crate(
|
||||
db,
|
||||
*db.all_crates().last().expect("no crate graph present"),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
@ -93,8 +88,7 @@ impl<'a> SymbolCollector<'a> {
|
||||
|
||||
pub fn collect(&mut self, module: Module) {
|
||||
let _p = tracing::info_span!("SymbolCollector::collect", ?module).entered();
|
||||
tracing::info!(?module, "SymbolCollector::collect",);
|
||||
self.display_target = module.krate().to_display_target(self.db);
|
||||
tracing::info!(?module, "SymbolCollector::collect");
|
||||
|
||||
// The initial work is the root module we're collecting, additional work will
|
||||
// be populated as we traverse the module's definitions.
|
||||
@ -263,8 +257,9 @@ impl<'a> SymbolCollector<'a> {
|
||||
for (name, Item { def, vis, import }) in scope.macros() {
|
||||
if let Some(i) = import {
|
||||
match i {
|
||||
ImportOrGlob::Import(i) => push_import(self, i, name, def.into(), vis),
|
||||
ImportOrGlob::Glob(_) => (),
|
||||
ImportOrExternCrate::Import(i) => push_import(self, i, name, def.into(), vis),
|
||||
ImportOrExternCrate::Glob(_) => (),
|
||||
ImportOrExternCrate::ExternCrate(_) => (),
|
||||
}
|
||||
continue;
|
||||
}
|
||||
@ -320,7 +315,10 @@ impl<'a> SymbolCollector<'a> {
|
||||
let impl_data = self.db.impl_signature(impl_id);
|
||||
let impl_name = Some(
|
||||
hir_display_with_store(impl_data.self_ty, &impl_data.store)
|
||||
.display(self.db, self.display_target)
|
||||
.display(
|
||||
self.db,
|
||||
crate::Impl::from(impl_id).krate(self.db).to_display_target(self.db),
|
||||
)
|
||||
.to_smolstr(),
|
||||
);
|
||||
self.with_container_name(impl_name, |s| {
|
||||
|
@ -5,7 +5,7 @@
|
||||
//! assists if we are allowed to.
|
||||
|
||||
use hir::ImportPathConfig;
|
||||
use ide_db::{SnippetCap, imports::insert_use::InsertUseConfig};
|
||||
use ide_db::{SnippetCap, assists::ExprFillDefaultMode, imports::insert_use::InsertUseConfig};
|
||||
|
||||
use crate::AssistKind;
|
||||
|
||||
@ -21,6 +21,7 @@ pub struct AssistConfig {
|
||||
pub term_search_fuel: u64,
|
||||
pub term_search_borrowck: bool,
|
||||
pub code_action_grouping: bool,
|
||||
pub expr_fill_default: ExprFillDefaultMode,
|
||||
}
|
||||
|
||||
impl AssistConfig {
|
||||
|
@ -150,6 +150,7 @@ fn add_missing_impl_members_inner(
|
||||
let new_impl_def = edit.make_mut(impl_def.clone());
|
||||
let first_new_item = add_trait_assoc_items_to_impl(
|
||||
&ctx.sema,
|
||||
ctx.config,
|
||||
&missing_items,
|
||||
trait_,
|
||||
&new_impl_def,
|
||||
|
@ -3,6 +3,7 @@ use std::iter::{self, Peekable};
|
||||
use either::Either;
|
||||
use hir::{Adt, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics, sym};
|
||||
use ide_db::RootDatabase;
|
||||
use ide_db::assists::ExprFillDefaultMode;
|
||||
use ide_db::syntax_helpers::suggest_name;
|
||||
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
|
||||
use itertools::Itertools;
|
||||
@ -216,7 +217,17 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||
// filter out hidden patterns because they're handled by the catch-all arm
|
||||
!hidden
|
||||
})
|
||||
.map(|(pat, _)| make.match_arm(pat, None, make::ext::expr_todo()));
|
||||
.map(|(pat, _)| {
|
||||
make.match_arm(
|
||||
pat,
|
||||
None,
|
||||
match ctx.config.expr_fill_default {
|
||||
ExprFillDefaultMode::Todo => make::ext::expr_todo(),
|
||||
ExprFillDefaultMode::Underscore => make::ext::expr_underscore(),
|
||||
ExprFillDefaultMode::Default => make::ext::expr_todo(),
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
let mut arms: Vec<_> = match_arm_list
|
||||
.arms()
|
||||
@ -246,7 +257,15 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||
|
||||
if needs_catch_all_arm && !has_catch_all_arm {
|
||||
cov_mark::hit!(added_wildcard_pattern);
|
||||
let arm = make.match_arm(make.wildcard_pat().into(), None, make::ext::expr_todo());
|
||||
let arm = make.match_arm(
|
||||
make.wildcard_pat().into(),
|
||||
None,
|
||||
match ctx.config.expr_fill_default {
|
||||
ExprFillDefaultMode::Todo => make::ext::expr_todo(),
|
||||
ExprFillDefaultMode::Underscore => make::ext::expr_underscore(),
|
||||
ExprFillDefaultMode::Default => make::ext::expr_todo(),
|
||||
},
|
||||
);
|
||||
arms.push(arm);
|
||||
}
|
||||
|
||||
@ -474,8 +493,8 @@ fn build_pat(
|
||||
hir::StructKind::Record => {
|
||||
let fields = fields
|
||||
.into_iter()
|
||||
.map(|f| make.name_ref(f.name(db).as_str()))
|
||||
.map(|name_ref| make.record_pat_field_shorthand(name_ref));
|
||||
.map(|f| make.ident_pat(false, false, make.name(f.name(db).as_str())))
|
||||
.map(|ident| make.record_pat_field_shorthand(ident.into()));
|
||||
let fields = make.record_pat_field_list(fields, None);
|
||||
make.record_pat_with_fields(path, fields).into()
|
||||
}
|
||||
|
@ -196,7 +196,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||
// Wrap all tails in `Some(...)`
|
||||
let none_path = mapless_make.expr_path(mapless_make.ident_path("None"));
|
||||
let some_path = mapless_make.expr_path(mapless_make.ident_path("Some"));
|
||||
for_each_tail_expr(&ast::Expr::BlockExpr(closure_body.clone()), &mut |e| {
|
||||
for_each_tail_expr(&ast::Expr::BlockExpr(closure_body), &mut |e| {
|
||||
let e = match e {
|
||||
ast::Expr::BreakExpr(e) => e.expr(),
|
||||
ast::Expr::ReturnExpr(e) => e.expr(),
|
||||
|
@ -1066,7 +1066,7 @@ fn foo() {
|
||||
r#"
|
||||
fn foo() {
|
||||
let (mut a, b) = (0.1, "abc");
|
||||
fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) {
|
||||
fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&'static str) {
|
||||
*a = 1.2;
|
||||
let c = *b;
|
||||
}
|
||||
@ -1098,7 +1098,7 @@ fn foo() {
|
||||
r#"
|
||||
fn foo() {
|
||||
let (mut a, b) = (0.1, "abc");
|
||||
fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) {
|
||||
fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&'static str) {
|
||||
let _: &mut bool = p2;
|
||||
*a = 1.2;
|
||||
let c = *b;
|
||||
@ -1136,7 +1136,7 @@ fn foo() {
|
||||
r#"
|
||||
fn foo() {
|
||||
let (mut a, b) = (0.1, "abc");
|
||||
fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) {
|
||||
fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&'static str) {
|
||||
let _: &mut bool = p2;
|
||||
*a = 1.2;
|
||||
let c = *b;
|
||||
|
@ -80,7 +80,7 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||
let from_fn_name = builder.make_mut(from_fn_name);
|
||||
let tail_expr = builder.make_mut(tail_expr);
|
||||
let return_exprs = return_exprs.map(|r| builder.make_mut(r)).collect_vec();
|
||||
let associated_items = builder.make_mut(associated_items).clone();
|
||||
let associated_items = builder.make_mut(associated_items);
|
||||
|
||||
ted::replace(
|
||||
trait_ty.syntax(),
|
||||
|
@ -1,8 +1,9 @@
|
||||
use hir::Semantics;
|
||||
use ide_db::RootDatabase;
|
||||
use syntax::T;
|
||||
use syntax::ast::RangeItem;
|
||||
use syntax::ast::{AstNode, HasName, LetStmt, Name, Pat, edit::AstNodeEdit};
|
||||
use syntax::ast::edit::IndentLevel;
|
||||
use syntax::ast::edit_in_place::Indent;
|
||||
use syntax::ast::syntax_factory::SyntaxFactory;
|
||||
use syntax::ast::{self, AstNode, HasName, LetStmt, Pat};
|
||||
|
||||
use crate::{AssistContext, AssistId, Assists};
|
||||
|
||||
@ -25,155 +26,205 @@ use crate::{AssistContext, AssistId, Assists};
|
||||
// }
|
||||
// ```
|
||||
pub(crate) fn convert_let_else_to_match(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
// should focus on else token to trigger
|
||||
// Should focus on the `else` token to trigger
|
||||
let let_stmt = ctx
|
||||
.find_token_syntax_at_offset(T![else])
|
||||
.and_then(|it| it.parent()?.parent())
|
||||
.or_else(|| ctx.find_token_syntax_at_offset(T![let])?.parent())?;
|
||||
let let_stmt = LetStmt::cast(let_stmt)?;
|
||||
let let_else_block = let_stmt.let_else()?.block_expr()?;
|
||||
let let_init = let_stmt.initializer()?;
|
||||
let else_block = let_stmt.let_else()?.block_expr()?;
|
||||
let else_expr = if else_block.statements().next().is_none() {
|
||||
else_block.tail_expr()?
|
||||
} else {
|
||||
else_block.into()
|
||||
};
|
||||
let init = let_stmt.initializer()?;
|
||||
// Ignore let stmt with type annotation
|
||||
if let_stmt.ty().is_some() {
|
||||
// don't support let with type annotation
|
||||
return None;
|
||||
}
|
||||
let pat = let_stmt.pat()?;
|
||||
let mut binders = Vec::new();
|
||||
binders_in_pat(&mut binders, &pat, &ctx.sema)?;
|
||||
|
||||
let target = let_stmt.syntax().text_range();
|
||||
let make = SyntaxFactory::with_mappings();
|
||||
let mut idents = Vec::default();
|
||||
let pat_without_mut = remove_mut_and_collect_idents(&make, &pat, &mut idents)?;
|
||||
let bindings = idents
|
||||
.into_iter()
|
||||
.filter_map(|ref pat| {
|
||||
// Identifiers which resolve to constants are not bindings
|
||||
if ctx.sema.resolve_bind_pat_to_const(pat).is_none() {
|
||||
Some((pat.name()?, pat.ref_token().is_none() && pat.mut_token().is_some()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
acc.add(
|
||||
AssistId::refactor_rewrite("convert_let_else_to_match"),
|
||||
"Convert let-else to let and match",
|
||||
target,
|
||||
|edit| {
|
||||
let indent_level = let_stmt.indent_level().0 as usize;
|
||||
let indent = " ".repeat(indent_level);
|
||||
let indent1 = " ".repeat(indent_level + 1);
|
||||
if bindings.is_empty() {
|
||||
"Convert let-else to match"
|
||||
} else {
|
||||
"Convert let-else to let and match"
|
||||
},
|
||||
let_stmt.syntax().text_range(),
|
||||
|builder| {
|
||||
let mut editor = builder.make_editor(let_stmt.syntax());
|
||||
|
||||
let binders_str = binders_to_str(&binders, false);
|
||||
let binders_str_mut = binders_to_str(&binders, true);
|
||||
let binding_paths = bindings
|
||||
.iter()
|
||||
.map(|(name, _)| make.expr_path(make.ident_path(&name.to_string())))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let init_expr = let_init.syntax().text();
|
||||
let mut pat_no_mut = pat.syntax().text().to_string();
|
||||
// remove the mut from the pattern
|
||||
for (b, ismut) in binders.iter() {
|
||||
if *ismut {
|
||||
pat_no_mut = pat_no_mut.replace(&format!("mut {b}"), &b.to_string());
|
||||
}
|
||||
let binding_arm = make.match_arm(
|
||||
pat_without_mut,
|
||||
None,
|
||||
// There are three possible cases:
|
||||
//
|
||||
// - No bindings: `None => {}`
|
||||
// - Single binding: `Some(it) => it`
|
||||
// - Multiple bindings: `Foo::Bar { a, b, .. } => (a, b)`
|
||||
match binding_paths.len() {
|
||||
0 => make.expr_empty_block().into(),
|
||||
|
||||
1 => binding_paths[0].clone(),
|
||||
_ => make.expr_tuple(binding_paths).into(),
|
||||
},
|
||||
);
|
||||
let else_arm = make.match_arm(make.wildcard_pat().into(), None, else_expr);
|
||||
let match_ = make.expr_match(init, make.match_arm_list([binding_arm, else_arm]));
|
||||
match_.reindent_to(IndentLevel::from_node(let_stmt.syntax()));
|
||||
|
||||
if bindings.is_empty() {
|
||||
editor.replace(let_stmt.syntax(), match_.syntax());
|
||||
} else {
|
||||
let ident_pats = bindings
|
||||
.into_iter()
|
||||
.map(|(name, is_mut)| make.ident_pat(false, is_mut, name).into())
|
||||
.collect::<Vec<Pat>>();
|
||||
let new_let_stmt = make.let_stmt(
|
||||
if ident_pats.len() == 1 {
|
||||
ident_pats[0].clone()
|
||||
} else {
|
||||
make.tuple_pat(ident_pats).into()
|
||||
},
|
||||
None,
|
||||
Some(match_.into()),
|
||||
);
|
||||
editor.replace(let_stmt.syntax(), new_let_stmt.syntax());
|
||||
}
|
||||
|
||||
let only_expr = let_else_block.statements().next().is_none();
|
||||
let branch2 = match &let_else_block.tail_expr() {
|
||||
Some(tail) if only_expr => format!("{tail},"),
|
||||
_ => let_else_block.syntax().text().to_string(),
|
||||
};
|
||||
let replace = if binders.is_empty() {
|
||||
format!(
|
||||
"match {init_expr} {{
|
||||
{indent1}{pat_no_mut} => {binders_str}
|
||||
{indent1}_ => {branch2}
|
||||
{indent}}}"
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"let {binders_str_mut} = match {init_expr} {{
|
||||
{indent1}{pat_no_mut} => {binders_str},
|
||||
{indent1}_ => {branch2}
|
||||
{indent}}};"
|
||||
)
|
||||
};
|
||||
edit.replace(target, replace);
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
builder.add_file_edits(ctx.vfs_file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// Gets a list of binders in a pattern, and whether they are mut.
|
||||
fn binders_in_pat(
|
||||
acc: &mut Vec<(Name, bool)>,
|
||||
pat: &Pat,
|
||||
sem: &Semantics<'_, RootDatabase>,
|
||||
) -> Option<()> {
|
||||
use Pat::*;
|
||||
match pat {
|
||||
IdentPat(p) => {
|
||||
let ident = p.name()?;
|
||||
let ismut = p.ref_token().is_none() && p.mut_token().is_some();
|
||||
// check for const reference
|
||||
if sem.resolve_bind_pat_to_const(p).is_none() {
|
||||
acc.push((ident, ismut));
|
||||
}
|
||||
fn remove_mut_and_collect_idents(
|
||||
make: &SyntaxFactory,
|
||||
pat: &ast::Pat,
|
||||
acc: &mut Vec<ast::IdentPat>,
|
||||
) -> Option<ast::Pat> {
|
||||
Some(match pat {
|
||||
ast::Pat::IdentPat(p) => {
|
||||
acc.push(p.clone());
|
||||
let non_mut_pat = make.ident_pat(
|
||||
p.ref_token().is_some(),
|
||||
p.ref_token().is_some() && p.mut_token().is_some(),
|
||||
p.name()?,
|
||||
);
|
||||
if let Some(inner) = p.pat() {
|
||||
binders_in_pat(acc, &inner, sem)?;
|
||||
non_mut_pat.set_pat(remove_mut_and_collect_idents(make, &inner, acc));
|
||||
}
|
||||
Some(())
|
||||
non_mut_pat.into()
|
||||
}
|
||||
BoxPat(p) => p.pat().and_then(|p| binders_in_pat(acc, &p, sem)),
|
||||
RestPat(_) | LiteralPat(_) | PathPat(_) | WildcardPat(_) | ConstBlockPat(_) => Some(()),
|
||||
OrPat(p) => {
|
||||
for p in p.pats() {
|
||||
binders_in_pat(acc, &p, sem)?;
|
||||
}
|
||||
Some(())
|
||||
ast::Pat::BoxPat(p) => {
|
||||
make.box_pat(remove_mut_and_collect_idents(make, &p.pat()?, acc)?).into()
|
||||
}
|
||||
ParenPat(p) => p.pat().and_then(|p| binders_in_pat(acc, &p, sem)),
|
||||
RangePat(p) => {
|
||||
if let Some(st) = p.start() {
|
||||
binders_in_pat(acc, &st, sem)?
|
||||
}
|
||||
if let Some(ed) = p.end() {
|
||||
binders_in_pat(acc, &ed, sem)?
|
||||
}
|
||||
Some(())
|
||||
ast::Pat::OrPat(p) => make
|
||||
.or_pat(
|
||||
p.pats()
|
||||
.map(|pat| remove_mut_and_collect_idents(make, &pat, acc))
|
||||
.collect::<Option<Vec<_>>>()?,
|
||||
p.leading_pipe().is_some(),
|
||||
)
|
||||
.into(),
|
||||
ast::Pat::ParenPat(p) => {
|
||||
make.paren_pat(remove_mut_and_collect_idents(make, &p.pat()?, acc)?).into()
|
||||
}
|
||||
RecordPat(p) => {
|
||||
for f in p.record_pat_field_list()?.fields() {
|
||||
let pat = f.pat()?;
|
||||
binders_in_pat(acc, &pat, sem)?;
|
||||
ast::Pat::RangePat(p) => make
|
||||
.range_pat(
|
||||
if let Some(start) = p.start() {
|
||||
Some(remove_mut_and_collect_idents(make, &start, acc)?)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
if let Some(end) = p.end() {
|
||||
Some(remove_mut_and_collect_idents(make, &end, acc)?)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
)
|
||||
.into(),
|
||||
ast::Pat::RecordPat(p) => make
|
||||
.record_pat_with_fields(
|
||||
p.path()?,
|
||||
make.record_pat_field_list(
|
||||
p.record_pat_field_list()?
|
||||
.fields()
|
||||
.map(|field| {
|
||||
remove_mut_and_collect_idents(make, &field.pat()?, acc).map(|pat| {
|
||||
if let Some(name_ref) = field.name_ref() {
|
||||
make.record_pat_field(name_ref, pat)
|
||||
} else {
|
||||
make.record_pat_field_shorthand(pat)
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect::<Option<Vec<_>>>()?,
|
||||
p.record_pat_field_list()?.rest_pat(),
|
||||
),
|
||||
)
|
||||
.into(),
|
||||
ast::Pat::RefPat(p) => {
|
||||
let inner = p.pat()?;
|
||||
if let ast::Pat::IdentPat(ident) = inner {
|
||||
acc.push(ident);
|
||||
p.clone_for_update().into()
|
||||
} else {
|
||||
make.ref_pat(remove_mut_and_collect_idents(make, &inner, acc)?).into()
|
||||
}
|
||||
Some(())
|
||||
}
|
||||
RefPat(p) => p.pat().and_then(|p| binders_in_pat(acc, &p, sem)),
|
||||
SlicePat(p) => {
|
||||
for p in p.pats() {
|
||||
binders_in_pat(acc, &p, sem)?;
|
||||
}
|
||||
Some(())
|
||||
}
|
||||
TuplePat(p) => {
|
||||
for p in p.fields() {
|
||||
binders_in_pat(acc, &p, sem)?;
|
||||
}
|
||||
Some(())
|
||||
}
|
||||
TupleStructPat(p) => {
|
||||
for p in p.fields() {
|
||||
binders_in_pat(acc, &p, sem)?;
|
||||
}
|
||||
Some(())
|
||||
}
|
||||
ast::Pat::SlicePat(p) => make
|
||||
.slice_pat(
|
||||
p.pats()
|
||||
.map(|pat| remove_mut_and_collect_idents(make, &pat, acc))
|
||||
.collect::<Option<Vec<_>>>()?,
|
||||
)
|
||||
.into(),
|
||||
ast::Pat::TuplePat(p) => make
|
||||
.tuple_pat(
|
||||
p.fields()
|
||||
.map(|field| remove_mut_and_collect_idents(make, &field, acc))
|
||||
.collect::<Option<Vec<_>>>()?,
|
||||
)
|
||||
.into(),
|
||||
ast::Pat::TupleStructPat(p) => make
|
||||
.tuple_struct_pat(
|
||||
p.path()?,
|
||||
p.fields()
|
||||
.map(|field| remove_mut_and_collect_idents(make, &field, acc))
|
||||
.collect::<Option<Vec<_>>>()?,
|
||||
)
|
||||
.into(),
|
||||
ast::Pat::RestPat(_)
|
||||
| ast::Pat::LiteralPat(_)
|
||||
| ast::Pat::PathPat(_)
|
||||
| ast::Pat::WildcardPat(_)
|
||||
| ast::Pat::ConstBlockPat(_) => pat.clone(),
|
||||
// don't support macro pat yet
|
||||
MacroPat(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn binders_to_str(binders: &[(Name, bool)], addmut: bool) -> String {
|
||||
let vars = binders
|
||||
.iter()
|
||||
.map(
|
||||
|(ident, ismut)| {
|
||||
if *ismut && addmut { format!("mut {ident}") } else { ident.to_string() }
|
||||
},
|
||||
)
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
if binders.is_empty() {
|
||||
String::from("{}")
|
||||
} else if binders.len() == 1 {
|
||||
vars
|
||||
} else {
|
||||
format!("({vars})")
|
||||
}
|
||||
ast::Pat::MacroPat(_) => return None,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -196,7 +196,9 @@ fn destructure_pat(
|
||||
let fields = field_names.iter().map(|(old_name, new_name)| {
|
||||
// Use shorthand syntax if possible
|
||||
if old_name == new_name && !is_mut {
|
||||
make.record_pat_field_shorthand(make.name_ref(old_name))
|
||||
make.record_pat_field_shorthand(
|
||||
make.ident_pat(false, false, make.name(old_name)).into(),
|
||||
)
|
||||
} else {
|
||||
make.record_pat_field(
|
||||
make.name_ref(old_name),
|
||||
|
@ -142,7 +142,7 @@ fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option<TupleDat
|
||||
.map(|(id, ty)| {
|
||||
match name_generator.for_type(&ty, ctx.db(), ctx.edition()) {
|
||||
Some(name) => name,
|
||||
None => name_generator.suggest_name(&format!("_{}", id)),
|
||||
None => name_generator.suggest_name(&format!("_{id}")),
|
||||
}
|
||||
.to_string()
|
||||
})
|
||||
|
@ -56,7 +56,12 @@ fn expand_record_rest_pattern(
|
||||
let new_field_list = make.record_pat_field_list(old_field_list.fields(), None);
|
||||
for (f, _) in missing_fields.iter() {
|
||||
let field = make.record_pat_field_shorthand(
|
||||
make.name_ref(&f.name(ctx.sema.db).display_no_db(edition).to_smolstr()),
|
||||
make.ident_pat(
|
||||
false,
|
||||
false,
|
||||
make.name(&f.name(ctx.sema.db).display_no_db(edition).to_smolstr()),
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
new_field_list.add_field(field);
|
||||
}
|
||||
|
@ -5033,7 +5033,7 @@ fn main() {
|
||||
fun_name(bar);
|
||||
}
|
||||
|
||||
fn $0fun_name(bar: &str) {
|
||||
fn $0fun_name(bar: &'static str) {
|
||||
m!(bar);
|
||||
}
|
||||
"#,
|
||||
|
@ -631,7 +631,7 @@ fn main() {
|
||||
"#,
|
||||
r#"
|
||||
fn main() {
|
||||
const $0HELLO: &str = "hello";
|
||||
const $0HELLO: &'static str = "hello";
|
||||
}
|
||||
"#,
|
||||
"Extract into constant",
|
||||
@ -726,7 +726,7 @@ fn main() {
|
||||
"#,
|
||||
r#"
|
||||
fn main() {
|
||||
static $0HELLO: &str = "hello";
|
||||
static $0HELLO: &'static str = "hello";
|
||||
}
|
||||
"#,
|
||||
"Extract into static",
|
||||
@ -2528,13 +2528,13 @@ fn foo() {
|
||||
check_assist_by_label(
|
||||
extract_variable,
|
||||
r#"
|
||||
struct Entry(&str);
|
||||
struct Entry<'a>(&'a str);
|
||||
fn foo() {
|
||||
let entry = Entry($0"Hello"$0);
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Entry(&str);
|
||||
struct Entry<'a>(&'a str);
|
||||
fn foo() {
|
||||
let $0hello = "Hello";
|
||||
let entry = Entry(hello);
|
||||
@ -2546,13 +2546,13 @@ fn foo() {
|
||||
check_assist_by_label(
|
||||
extract_variable,
|
||||
r#"
|
||||
struct Entry(&str);
|
||||
struct Entry<'a>(&'a str);
|
||||
fn foo() {
|
||||
let entry = Entry($0"Hello"$0);
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Entry(&str);
|
||||
struct Entry<'a>(&'a str);
|
||||
fn foo() {
|
||||
const $0HELLO: &str = "Hello";
|
||||
let entry = Entry(HELLO);
|
||||
@ -2564,13 +2564,13 @@ fn foo() {
|
||||
check_assist_by_label(
|
||||
extract_variable,
|
||||
r#"
|
||||
struct Entry(&str);
|
||||
struct Entry<'a>(&'a str);
|
||||
fn foo() {
|
||||
let entry = Entry($0"Hello"$0);
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Entry(&str);
|
||||
struct Entry<'a>(&'a str);
|
||||
fn foo() {
|
||||
static $0HELLO: &str = "Hello";
|
||||
let entry = Entry(HELLO);
|
||||
@ -2587,13 +2587,13 @@ fn foo() {
|
||||
check_assist_by_label(
|
||||
extract_variable,
|
||||
r#"
|
||||
struct Entry { message: &str }
|
||||
struct Entry<'a> { message: &'a str }
|
||||
fn foo() {
|
||||
let entry = Entry { message: $0"Hello"$0 };
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Entry { message: &str }
|
||||
struct Entry<'a> { message: &'a str }
|
||||
fn foo() {
|
||||
let $0message = "Hello";
|
||||
let entry = Entry { message };
|
||||
@ -2605,13 +2605,13 @@ fn foo() {
|
||||
check_assist_by_label(
|
||||
extract_variable,
|
||||
r#"
|
||||
struct Entry { message: &str }
|
||||
struct Entry<'a> { message: &'a str }
|
||||
fn foo() {
|
||||
let entry = Entry { message: $0"Hello"$0 };
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Entry { message: &str }
|
||||
struct Entry<'a> { message: &'a str }
|
||||
fn foo() {
|
||||
const $0HELLO: &str = "Hello";
|
||||
let entry = Entry { message: HELLO };
|
||||
@ -2623,13 +2623,13 @@ fn foo() {
|
||||
check_assist_by_label(
|
||||
extract_variable,
|
||||
r#"
|
||||
struct Entry { message: &str }
|
||||
struct Entry<'a> { message: &'a str }
|
||||
fn foo() {
|
||||
let entry = Entry { message: $0"Hello"$0 };
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct Entry { message: &str }
|
||||
struct Entry<'a> { message: &'a str }
|
||||
fn foo() {
|
||||
static $0HELLO: &str = "Hello";
|
||||
let entry = Entry { message: HELLO };
|
||||
|
@ -4,6 +4,7 @@ use hir::{
|
||||
};
|
||||
use ide_db::{
|
||||
FileId, FxHashMap, FxHashSet, RootDatabase, SnippetCap,
|
||||
assists::ExprFillDefaultMode,
|
||||
defs::{Definition, NameRefClass},
|
||||
famous_defs::FamousDefs,
|
||||
helpers::is_editable_crate,
|
||||
@ -46,7 +47,7 @@ use crate::{
|
||||
// bar("", baz());
|
||||
// }
|
||||
//
|
||||
// fn bar(arg: &str, baz: Baz) ${0:-> _} {
|
||||
// fn bar(arg: &'static str, baz: Baz) ${0:-> _} {
|
||||
// todo!()
|
||||
// }
|
||||
//
|
||||
@ -276,7 +277,11 @@ impl FunctionBuilder {
|
||||
target_module,
|
||||
&mut necessary_generic_params,
|
||||
);
|
||||
let placeholder_expr = make::ext::expr_todo();
|
||||
let placeholder_expr = match ctx.config.expr_fill_default {
|
||||
ExprFillDefaultMode::Todo => make::ext::expr_todo(),
|
||||
ExprFillDefaultMode::Underscore => make::ext::expr_underscore(),
|
||||
ExprFillDefaultMode::Default => make::ext::expr_todo(),
|
||||
};
|
||||
fn_body = make::block_expr(vec![], Some(placeholder_expr));
|
||||
};
|
||||
|
||||
@ -331,7 +336,11 @@ impl FunctionBuilder {
|
||||
let (generic_param_list, where_clause) =
|
||||
fn_generic_params(ctx, necessary_generic_params, &target)?;
|
||||
|
||||
let placeholder_expr = make::ext::expr_todo();
|
||||
let placeholder_expr = match ctx.config.expr_fill_default {
|
||||
ExprFillDefaultMode::Todo => make::ext::expr_todo(),
|
||||
ExprFillDefaultMode::Underscore => make::ext::expr_underscore(),
|
||||
ExprFillDefaultMode::Default => make::ext::expr_todo(),
|
||||
};
|
||||
let fn_body = make::block_expr(vec![], Some(placeholder_expr));
|
||||
|
||||
Some(Self {
|
||||
@ -383,14 +392,14 @@ impl FunctionBuilder {
|
||||
// Focus the return type if there is one
|
||||
match ret_type {
|
||||
Some(ret_type) => {
|
||||
edit.add_placeholder_snippet(cap, ret_type.clone());
|
||||
edit.add_placeholder_snippet(cap, ret_type);
|
||||
}
|
||||
None => {
|
||||
edit.add_placeholder_snippet(cap, tail_expr.clone());
|
||||
edit.add_placeholder_snippet(cap, tail_expr);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
edit.add_placeholder_snippet(cap, tail_expr.clone());
|
||||
edit.add_placeholder_snippet(cap, tail_expr);
|
||||
}
|
||||
}
|
||||
|
||||
@ -444,7 +453,11 @@ fn make_fn_body_as_new_function(
|
||||
let adt_info = adt_info.as_ref()?;
|
||||
|
||||
let path_self = make::ext::ident_path("Self");
|
||||
let placeholder_expr = make::ext::expr_todo();
|
||||
let placeholder_expr = match ctx.config.expr_fill_default {
|
||||
ExprFillDefaultMode::Todo => make::ext::expr_todo(),
|
||||
ExprFillDefaultMode::Underscore => make::ext::expr_underscore(),
|
||||
ExprFillDefaultMode::Default => make::ext::expr_todo(),
|
||||
};
|
||||
let tail_expr = if let Some(strukt) = adt_info.adt.as_struct() {
|
||||
match strukt.kind(ctx.db()) {
|
||||
StructKind::Record => {
|
||||
@ -1505,7 +1518,7 @@ fn foo() {
|
||||
bar("bar")
|
||||
}
|
||||
|
||||
fn bar(arg: &str) {
|
||||
fn bar(arg: &'static str) {
|
||||
${0:todo!()}
|
||||
}
|
||||
"#,
|
||||
@ -2122,7 +2135,7 @@ fn foo() {
|
||||
bar(baz(), baz(), "foo", "bar")
|
||||
}
|
||||
|
||||
fn bar(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) {
|
||||
fn bar(baz_1: Baz, baz_2: Baz, arg_1: &'static str, arg_2: &'static str) {
|
||||
${0:todo!()}
|
||||
}
|
||||
"#,
|
||||
@ -3090,7 +3103,7 @@ pub struct Foo {
|
||||
field_2: String,
|
||||
}
|
||||
impl Foo {
|
||||
fn new(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) -> Self {
|
||||
fn new(baz_1: Baz, baz_2: Baz, arg_1: &'static str, arg_2: &'static str) -> Self {
|
||||
${0:Self { field_1: todo!(), field_2: todo!() }}
|
||||
}
|
||||
}
|
||||
|
@ -1,14 +1,14 @@
|
||||
use either::Either;
|
||||
use ide_db::imports::{
|
||||
insert_use::{ImportGranularity, InsertUseConfig},
|
||||
merge_imports::{MergeBehavior, try_merge_imports, try_merge_trees, try_normalize_use_tree},
|
||||
merge_imports::{MergeBehavior, try_merge_imports, try_merge_trees},
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use syntax::{
|
||||
AstNode, SyntaxElement, SyntaxNode,
|
||||
algo::neighbor,
|
||||
ast::{self, edit_in_place::Removable},
|
||||
match_ast, ted,
|
||||
ast::{self, syntax_factory::SyntaxFactory},
|
||||
match_ast,
|
||||
syntax_editor::Removable,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
@ -69,49 +69,32 @@ pub(crate) fn merge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
|
||||
(selection_range, edits?)
|
||||
};
|
||||
|
||||
acc.add(AssistId::refactor_rewrite("merge_imports"), "Merge imports", target, |builder| {
|
||||
let edits_mut: Vec<Edit> = edits
|
||||
.into_iter()
|
||||
.map(|it| match it {
|
||||
Remove(Either::Left(it)) => Remove(Either::Left(builder.make_mut(it))),
|
||||
Remove(Either::Right(it)) => Remove(Either::Right(builder.make_mut(it))),
|
||||
Replace(old, new) => Replace(builder.make_syntax_mut(old), new),
|
||||
})
|
||||
.collect();
|
||||
for edit in edits_mut {
|
||||
match edit {
|
||||
Remove(it) => it.as_ref().either(Removable::remove, Removable::remove),
|
||||
Replace(old, new) => {
|
||||
ted::replace(old, &new);
|
||||
let parent_node = match ctx.covering_element() {
|
||||
SyntaxElement::Node(n) => n,
|
||||
SyntaxElement::Token(t) => t.parent()?,
|
||||
};
|
||||
|
||||
// If there's a selection and we're replacing a use tree in a tree list,
|
||||
// normalize the parent use tree if it only contains the merged subtree.
|
||||
if !ctx.has_empty_selection() {
|
||||
let normalized_use_tree = ast::UseTree::cast(new)
|
||||
.as_ref()
|
||||
.and_then(ast::UseTree::parent_use_tree_list)
|
||||
.and_then(|use_tree_list| {
|
||||
if use_tree_list.use_trees().collect_tuple::<(_,)>().is_some() {
|
||||
Some(use_tree_list.parent_use_tree())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.and_then(|target_tree| {
|
||||
try_normalize_use_tree(
|
||||
&target_tree,
|
||||
ctx.config.insert_use.granularity.into(),
|
||||
)
|
||||
.map(|top_use_tree_flat| (target_tree, top_use_tree_flat))
|
||||
});
|
||||
if let Some((old_tree, new_tree)) = normalized_use_tree {
|
||||
cov_mark::hit!(replace_parent_with_normalized_use_tree);
|
||||
ted::replace(old_tree.syntax(), new_tree.syntax());
|
||||
}
|
||||
acc.add(AssistId::refactor_rewrite("merge_imports"), "Merge imports", target, |builder| {
|
||||
let make = SyntaxFactory::with_mappings();
|
||||
let mut editor = builder.make_editor(&parent_node);
|
||||
|
||||
for edit in edits {
|
||||
match edit {
|
||||
Remove(it) => {
|
||||
let node = it.as_ref();
|
||||
if let Some(left) = node.left() {
|
||||
left.remove(&mut editor);
|
||||
} else if let Some(right) = node.right() {
|
||||
right.remove(&mut editor);
|
||||
}
|
||||
}
|
||||
Replace(old, new) => {
|
||||
editor.replace(old, &new);
|
||||
}
|
||||
}
|
||||
}
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
builder.add_file_edits(ctx.vfs_file_id(), editor);
|
||||
})
|
||||
}
|
||||
|
||||
@ -723,11 +706,10 @@ use std::{
|
||||
);
|
||||
|
||||
cov_mark::check!(merge_with_selected_use_tree_neighbors);
|
||||
cov_mark::check!(replace_parent_with_normalized_use_tree);
|
||||
check_assist(
|
||||
merge_imports,
|
||||
r"use std::$0{fmt::Display, fmt::Debug}$0;",
|
||||
r"use std::fmt::{Debug, Display};",
|
||||
r"use std::{fmt::{Debug, Display}};",
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -9,7 +9,7 @@ use syntax::{
|
||||
};
|
||||
|
||||
use crate::{
|
||||
AssistId,
|
||||
AssistConfig, AssistId,
|
||||
assist_context::{AssistContext, Assists, SourceChangeBuilder},
|
||||
utils::{
|
||||
DefaultMethods, IgnoreAssocItems, add_trait_assoc_items_to_impl, filter_assoc_items,
|
||||
@ -128,8 +128,14 @@ fn add_assist(
|
||||
acc.add(AssistId::refactor("replace_derive_with_manual_impl"), label, target, |builder| {
|
||||
let insert_after = ted::Position::after(builder.make_mut(adt.clone()).syntax());
|
||||
let impl_is_unsafe = trait_.map(|s| s.is_unsafe(ctx.db())).unwrap_or(false);
|
||||
let impl_def_with_items =
|
||||
impl_def_from_trait(&ctx.sema, adt, &annotated_name, trait_, replace_trait_path);
|
||||
let impl_def_with_items = impl_def_from_trait(
|
||||
&ctx.sema,
|
||||
ctx.config,
|
||||
adt,
|
||||
&annotated_name,
|
||||
trait_,
|
||||
replace_trait_path,
|
||||
);
|
||||
update_attribute(builder, old_derives, old_tree, old_trait_path, attr);
|
||||
|
||||
let trait_path = make::ty_path(replace_trait_path.clone());
|
||||
@ -217,6 +223,7 @@ fn add_assist(
|
||||
|
||||
fn impl_def_from_trait(
|
||||
sema: &hir::Semantics<'_, ide_db::RootDatabase>,
|
||||
config: &AssistConfig,
|
||||
adt: &ast::Adt,
|
||||
annotated_name: &ast::Name,
|
||||
trait_: Option<hir::Trait>,
|
||||
@ -241,7 +248,7 @@ fn impl_def_from_trait(
|
||||
let impl_def = generate_trait_impl(adt, make::ty_path(trait_path.clone()));
|
||||
|
||||
let first_assoc_item =
|
||||
add_trait_assoc_items_to_impl(sema, &trait_items, trait_, &impl_def, &target_scope);
|
||||
add_trait_assoc_items_to_impl(sema, config, &trait_items, trait_, &impl_def, &target_scope);
|
||||
|
||||
// Generate a default `impl` function body for the derived trait.
|
||||
if let ast::AssocItem::Fn(ref func) = first_assoc_item {
|
||||
|
@ -1,7 +1,10 @@
|
||||
use syntax::{
|
||||
AstNode, SyntaxKind,
|
||||
ast::{self, HasVisibility, edit_in_place::Removable, make},
|
||||
ted::{self, Position},
|
||||
ast::{
|
||||
self, HasAttrs, HasVisibility, edit::IndentLevel, edit_in_place::AttrsOwnerEdit, make,
|
||||
syntax_factory::SyntaxFactory,
|
||||
},
|
||||
syntax_editor::{Element, Position, Removable},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
@ -9,9 +12,9 @@ use crate::{
|
||||
assist_context::{AssistContext, Assists},
|
||||
};
|
||||
|
||||
// Assist: unmerge_use
|
||||
// Assist: unmerge_imports
|
||||
//
|
||||
// Extracts single use item from use list.
|
||||
// Extracts a use item from a use list into a standalone use list.
|
||||
//
|
||||
// ```
|
||||
// use std::fmt::{Debug, Display$0};
|
||||
@ -21,21 +24,18 @@ use crate::{
|
||||
// use std::fmt::{Debug};
|
||||
// use std::fmt::Display;
|
||||
// ```
|
||||
pub(crate) fn unmerge_use(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
let tree: ast::UseTree = ctx.find_node_at_offset::<ast::UseTree>()?.clone_for_update();
|
||||
pub(crate) fn unmerge_imports(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
let tree = ctx.find_node_at_offset::<ast::UseTree>()?;
|
||||
|
||||
let tree_list = tree.syntax().parent().and_then(ast::UseTreeList::cast)?;
|
||||
if tree_list.use_trees().count() < 2 {
|
||||
cov_mark::hit!(skip_single_use_item);
|
||||
cov_mark::hit!(skip_single_import);
|
||||
return None;
|
||||
}
|
||||
|
||||
let use_: ast::Use = tree_list.syntax().ancestors().find_map(ast::Use::cast)?;
|
||||
let use_ = tree_list.syntax().ancestors().find_map(ast::Use::cast)?;
|
||||
let path = resolve_full_path(&tree)?;
|
||||
|
||||
let old_parent_range = use_.syntax().parent()?.text_range();
|
||||
let new_parent = use_.syntax().parent()?;
|
||||
|
||||
// If possible, explain what is going to be done.
|
||||
let label = match tree.path().and_then(|path| path.first_segment()) {
|
||||
Some(name) => format!("Unmerge use of `{name}`"),
|
||||
@ -43,17 +43,31 @@ pub(crate) fn unmerge_use(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
|
||||
};
|
||||
|
||||
let target = tree.syntax().text_range();
|
||||
acc.add(AssistId::refactor_rewrite("unmerge_use"), label, target, |builder| {
|
||||
let new_use = make::use_(
|
||||
acc.add(AssistId::refactor_rewrite("unmerge_imports"), label, target, |builder| {
|
||||
let make = SyntaxFactory::with_mappings();
|
||||
let new_use = make.use_(
|
||||
use_.visibility(),
|
||||
make::use_tree(path, tree.use_tree_list(), tree.rename(), tree.star_token().is_some()),
|
||||
)
|
||||
.clone_for_update();
|
||||
make.use_tree(path, tree.use_tree_list(), tree.rename(), tree.star_token().is_some()),
|
||||
);
|
||||
// Add any attributes that are present on the use tree
|
||||
use_.attrs().for_each(|attr| {
|
||||
new_use.add_attr(attr.clone_for_update());
|
||||
});
|
||||
|
||||
tree.remove();
|
||||
ted::insert(Position::after(use_.syntax()), new_use.syntax());
|
||||
|
||||
builder.replace(old_parent_range, new_parent.to_string());
|
||||
let mut editor = builder.make_editor(use_.syntax());
|
||||
// Remove the use tree from the current use item
|
||||
tree.remove(&mut editor);
|
||||
// Insert a newline and indentation, followed by the new use item
|
||||
editor.insert_all(
|
||||
Position::after(use_.syntax()),
|
||||
vec![
|
||||
make.whitespace(&format!("\n{}", IndentLevel::from_node(use_.syntax())))
|
||||
.syntax_element(),
|
||||
new_use.syntax().syntax_element(),
|
||||
],
|
||||
);
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
builder.add_file_edits(ctx.vfs_file_id(), editor);
|
||||
})
|
||||
}
|
||||
|
||||
@ -80,22 +94,22 @@ mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn skip_single_use_item() {
|
||||
cov_mark::check!(skip_single_use_item);
|
||||
fn skip_single_import() {
|
||||
cov_mark::check!(skip_single_import);
|
||||
check_assist_not_applicable(
|
||||
unmerge_use,
|
||||
unmerge_imports,
|
||||
r"
|
||||
use std::fmt::Debug$0;
|
||||
",
|
||||
);
|
||||
check_assist_not_applicable(
|
||||
unmerge_use,
|
||||
unmerge_imports,
|
||||
r"
|
||||
use std::fmt::{Debug$0};
|
||||
",
|
||||
);
|
||||
check_assist_not_applicable(
|
||||
unmerge_use,
|
||||
unmerge_imports,
|
||||
r"
|
||||
use std::fmt::Debug as Dbg$0;
|
||||
",
|
||||
@ -105,7 +119,7 @@ use std::fmt::Debug as Dbg$0;
|
||||
#[test]
|
||||
fn skip_single_glob_import() {
|
||||
check_assist_not_applicable(
|
||||
unmerge_use,
|
||||
unmerge_imports,
|
||||
r"
|
||||
use std::fmt::*$0;
|
||||
",
|
||||
@ -113,9 +127,9 @@ use std::fmt::*$0;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unmerge_use_item() {
|
||||
fn unmerge_import() {
|
||||
check_assist(
|
||||
unmerge_use,
|
||||
unmerge_imports,
|
||||
r"
|
||||
use std::fmt::{Debug, Display$0};
|
||||
",
|
||||
@ -126,7 +140,7 @@ use std::fmt::Display;
|
||||
);
|
||||
|
||||
check_assist(
|
||||
unmerge_use,
|
||||
unmerge_imports,
|
||||
r"
|
||||
use std::fmt::{Debug, format$0, Display};
|
||||
",
|
||||
@ -140,7 +154,7 @@ use std::fmt::format;
|
||||
#[test]
|
||||
fn unmerge_glob_import() {
|
||||
check_assist(
|
||||
unmerge_use,
|
||||
unmerge_imports,
|
||||
r"
|
||||
use std::fmt::{*$0, Display};
|
||||
",
|
||||
@ -152,9 +166,9 @@ use std::fmt::*;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unmerge_renamed_use_item() {
|
||||
fn unmerge_renamed_import() {
|
||||
check_assist(
|
||||
unmerge_use,
|
||||
unmerge_imports,
|
||||
r"
|
||||
use std::fmt::{Debug, Display as Disp$0};
|
||||
",
|
||||
@ -166,9 +180,9 @@ use std::fmt::Display as Disp;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unmerge_indented_use_item() {
|
||||
fn unmerge_indented_import() {
|
||||
check_assist(
|
||||
unmerge_use,
|
||||
unmerge_imports,
|
||||
r"
|
||||
mod format {
|
||||
use std::fmt::{Debug, Display$0 as Disp, format};
|
||||
@ -184,9 +198,9 @@ mod format {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unmerge_nested_use_item() {
|
||||
fn unmerge_nested_import() {
|
||||
check_assist(
|
||||
unmerge_use,
|
||||
unmerge_imports,
|
||||
r"
|
||||
use foo::bar::{baz::{qux$0, foobar}, barbaz};
|
||||
",
|
||||
@ -196,7 +210,7 @@ use foo::bar::baz::qux;
|
||||
",
|
||||
);
|
||||
check_assist(
|
||||
unmerge_use,
|
||||
unmerge_imports,
|
||||
r"
|
||||
use foo::bar::{baz$0::{qux, foobar}, barbaz};
|
||||
",
|
||||
@ -208,9 +222,9 @@ use foo::bar::baz::{qux, foobar};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unmerge_use_item_with_visibility() {
|
||||
fn unmerge_import_with_visibility() {
|
||||
check_assist(
|
||||
unmerge_use,
|
||||
unmerge_imports,
|
||||
r"
|
||||
pub use std::fmt::{Debug, Display$0};
|
||||
",
|
||||
@ -222,12 +236,27 @@ pub use std::fmt::Display;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unmerge_use_item_on_self() {
|
||||
fn unmerge_import_on_self() {
|
||||
check_assist(
|
||||
unmerge_use,
|
||||
unmerge_imports,
|
||||
r"use std::process::{Command, self$0};",
|
||||
r"use std::process::{Command};
|
||||
use std::process;",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unmerge_import_with_attributes() {
|
||||
check_assist(
|
||||
unmerge_imports,
|
||||
r"
|
||||
#[allow(deprecated)]
|
||||
use foo::{bar, baz$0};",
|
||||
r"
|
||||
#[allow(deprecated)]
|
||||
use foo::{bar};
|
||||
#[allow(deprecated)]
|
||||
use foo::baz;",
|
||||
);
|
||||
}
|
||||
}
|
@ -53,8 +53,14 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
|
||||
|edit| {
|
||||
let pats_after = pipe_token
|
||||
.siblings_with_tokens(Direction::Next)
|
||||
.filter_map(|it| ast::Pat::cast(it.into_node()?));
|
||||
let new_pat = make::or_pat(pats_after, or_pat.leading_pipe().is_some());
|
||||
.filter_map(|it| ast::Pat::cast(it.into_node()?))
|
||||
.collect::<Vec<_>>();
|
||||
// It is guaranteed that `pats_after` has at least one element
|
||||
let new_pat = if pats_after.len() == 1 {
|
||||
pats_after[0].clone()
|
||||
} else {
|
||||
make::or_pat(pats_after, or_pat.leading_pipe().is_some()).into()
|
||||
};
|
||||
let new_match_arm =
|
||||
make::match_arm(new_pat, match_arm.guard(), match_arm_body).clone_for_update();
|
||||
|
||||
|
@ -116,7 +116,7 @@ pub(crate) fn wrap_unwrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>) -
|
||||
(Some(attr), Some(ident))
|
||||
if attr.simple_name().map(|v| v.eq("derive")).unwrap_or_default() =>
|
||||
{
|
||||
Some(attempt_get_derive(attr.clone(), ident))
|
||||
Some(attempt_get_derive(attr, ident))
|
||||
}
|
||||
|
||||
(Some(attr), _) => Some(WrapUnwrapOption::WrapAttr(attr)),
|
||||
@ -128,7 +128,7 @@ pub(crate) fn wrap_unwrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>) -
|
||||
NodeOrToken::Node(node) => ast::Attr::cast(node).map(WrapUnwrapOption::WrapAttr),
|
||||
NodeOrToken::Token(ident) if ident.kind() == syntax::T![ident] => {
|
||||
let attr = ident.parent_ancestors().find_map(ast::Attr::cast)?;
|
||||
Some(attempt_get_derive(attr.clone(), ident))
|
||||
Some(attempt_get_derive(attr, ident))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
@ -233,7 +233,7 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) ->
|
||||
if let Some(meta) = attr.meta() {
|
||||
if let (Some(eq), Some(expr)) = (meta.eq_token(), meta.expr()) {
|
||||
raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
|
||||
raw_tokens.push(NodeOrToken::Token(eq.clone()));
|
||||
raw_tokens.push(NodeOrToken::Token(eq));
|
||||
raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
|
||||
|
||||
expr.syntax().descendants_with_tokens().for_each(|it| {
|
||||
|
@ -222,8 +222,8 @@ mod handlers {
|
||||
mod toggle_async_sugar;
|
||||
mod toggle_ignore;
|
||||
mod toggle_macro_delimiter;
|
||||
mod unmerge_imports;
|
||||
mod unmerge_match_arm;
|
||||
mod unmerge_use;
|
||||
mod unnecessary_async;
|
||||
mod unqualify_method_call;
|
||||
mod unwrap_block;
|
||||
@ -363,7 +363,7 @@ mod handlers {
|
||||
toggle_ignore::toggle_ignore,
|
||||
toggle_macro_delimiter::toggle_macro_delimiter,
|
||||
unmerge_match_arm::unmerge_match_arm,
|
||||
unmerge_use::unmerge_use,
|
||||
unmerge_imports::unmerge_imports,
|
||||
unnecessary_async::unnecessary_async,
|
||||
unqualify_method_call::unqualify_method_call,
|
||||
unwrap_block::unwrap_block,
|
||||
|
@ -4,6 +4,7 @@ use expect_test::expect;
|
||||
use hir::Semantics;
|
||||
use ide_db::{
|
||||
EditionedFileId, FileRange, RootDatabase, SnippetCap,
|
||||
assists::ExprFillDefaultMode,
|
||||
base_db::SourceDatabase,
|
||||
imports::insert_use::{ImportGranularity, InsertUseConfig},
|
||||
source_change::FileSystemEdit,
|
||||
@ -35,6 +36,7 @@ pub(crate) const TEST_CONFIG: AssistConfig = AssistConfig {
|
||||
term_search_fuel: 400,
|
||||
term_search_borrowck: true,
|
||||
code_action_grouping: true,
|
||||
expr_fill_default: ExprFillDefaultMode::Todo,
|
||||
};
|
||||
|
||||
pub(crate) const TEST_CONFIG_NO_GROUPING: AssistConfig = AssistConfig {
|
||||
@ -54,6 +56,7 @@ pub(crate) const TEST_CONFIG_NO_GROUPING: AssistConfig = AssistConfig {
|
||||
term_search_fuel: 400,
|
||||
term_search_borrowck: true,
|
||||
code_action_grouping: false,
|
||||
expr_fill_default: ExprFillDefaultMode::Todo,
|
||||
};
|
||||
|
||||
pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
|
||||
@ -73,6 +76,7 @@ pub(crate) const TEST_CONFIG_NO_SNIPPET_CAP: AssistConfig = AssistConfig {
|
||||
term_search_fuel: 400,
|
||||
term_search_borrowck: true,
|
||||
code_action_grouping: true,
|
||||
expr_fill_default: ExprFillDefaultMode::Todo,
|
||||
};
|
||||
|
||||
pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
|
||||
@ -92,6 +96,7 @@ pub(crate) const TEST_CONFIG_IMPORT_ONE: AssistConfig = AssistConfig {
|
||||
term_search_fuel: 400,
|
||||
term_search_borrowck: true,
|
||||
code_action_grouping: true,
|
||||
expr_fill_default: ExprFillDefaultMode::Todo,
|
||||
};
|
||||
|
||||
pub(crate) fn with_single_file(text: &str) -> (RootDatabase, EditionedFileId) {
|
||||
|
@ -1737,7 +1737,7 @@ fn foo() {
|
||||
bar("", baz());
|
||||
}
|
||||
|
||||
fn bar(arg: &str, baz: Baz) ${0:-> _} {
|
||||
fn bar(arg: &'static str, baz: Baz) ${0:-> _} {
|
||||
todo!()
|
||||
}
|
||||
|
||||
@ -3339,6 +3339,20 @@ sth!{ }
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_unmerge_imports() {
|
||||
check_doc_test(
|
||||
"unmerge_imports",
|
||||
r#####"
|
||||
use std::fmt::{Debug, Display$0};
|
||||
"#####,
|
||||
r#####"
|
||||
use std::fmt::{Debug};
|
||||
use std::fmt::Display;
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_unmerge_match_arm() {
|
||||
check_doc_test(
|
||||
@ -3365,20 +3379,6 @@ fn handle(action: Action) {
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_unmerge_use() {
|
||||
check_doc_test(
|
||||
"unmerge_use",
|
||||
r#####"
|
||||
use std::fmt::{Debug, Display$0};
|
||||
"#####,
|
||||
r#####"
|
||||
use std::fmt::{Debug};
|
||||
use std::fmt::Display;
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_unnecessary_async() {
|
||||
check_doc_test(
|
||||
|
@ -8,6 +8,7 @@ use hir::{
|
||||
};
|
||||
use ide_db::{
|
||||
RootDatabase,
|
||||
assists::ExprFillDefaultMode,
|
||||
famous_defs::FamousDefs,
|
||||
path_transform::PathTransform,
|
||||
syntax_helpers::{node_ext::preorder_expr, prettify_macro_expansion},
|
||||
@ -27,7 +28,10 @@ use syntax::{
|
||||
ted,
|
||||
};
|
||||
|
||||
use crate::assist_context::{AssistContext, SourceChangeBuilder};
|
||||
use crate::{
|
||||
AssistConfig,
|
||||
assist_context::{AssistContext, SourceChangeBuilder},
|
||||
};
|
||||
|
||||
mod gen_trait_fn_body;
|
||||
pub(crate) mod ref_field_expr;
|
||||
@ -174,6 +178,7 @@ pub fn filter_assoc_items(
|
||||
/// inserted.
|
||||
pub fn add_trait_assoc_items_to_impl(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
config: &AssistConfig,
|
||||
original_items: &[InFile<ast::AssocItem>],
|
||||
trait_: hir::Trait,
|
||||
impl_: &ast::Impl,
|
||||
@ -219,7 +224,14 @@ pub fn add_trait_assoc_items_to_impl(
|
||||
match &item {
|
||||
ast::AssocItem::Fn(fn_) if fn_.body().is_none() => {
|
||||
let body = AstNodeEdit::indent(
|
||||
&make::block_expr(None, Some(make::ext::expr_todo())),
|
||||
&make::block_expr(
|
||||
None,
|
||||
Some(match config.expr_fill_default {
|
||||
ExprFillDefaultMode::Todo => make::ext::expr_todo(),
|
||||
ExprFillDefaultMode::Underscore => make::ext::expr_underscore(),
|
||||
ExprFillDefaultMode::Default => make::ext::expr_todo(),
|
||||
}),
|
||||
),
|
||||
new_indent_level,
|
||||
);
|
||||
ted::replace(fn_.get_or_create_body().syntax(), body.clone_for_update().syntax())
|
||||
|
@ -363,9 +363,14 @@ pub(crate) fn complete_expr_path(
|
||||
add_keyword("true", "true");
|
||||
add_keyword("false", "false");
|
||||
|
||||
if in_condition || in_block_expr {
|
||||
add_keyword("letm", "let mut $0");
|
||||
add_keyword("let", "let $0");
|
||||
if in_condition {
|
||||
add_keyword("letm", "let mut $1 = $0");
|
||||
add_keyword("let", "let $1 = $0");
|
||||
}
|
||||
|
||||
if in_block_expr {
|
||||
add_keyword("letm", "let mut $1 = $0;");
|
||||
add_keyword("let", "let $1 = $0;");
|
||||
}
|
||||
|
||||
if after_if_expr {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user