Auto merge of #144114 - lnicola:sync-from-ra, r=lnicola

Subtree update of `rust-analyzer`

r? `@ghost`
This commit is contained in:
bors 2025-07-18 08:44:14 +00:00
commit f2840cd6c8
110 changed files with 3767 additions and 1221 deletions

82
Cargo.lock generated
View File

@ -153,6 +153,22 @@ dependencies = [
"url",
]
[[package]]
name = "cargo-util-schemas"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dc1a6f7b5651af85774ae5a34b4e8be397d9cf4bc063b7e6dbd99a841837830"
dependencies = [
"semver",
"serde",
"serde-untagged",
"serde-value",
"thiserror 2.0.12",
"toml",
"unicode-xid",
"url",
]
[[package]]
name = "cargo_metadata"
version = "0.20.0"
@ -161,7 +177,22 @@ checksum = "4f7835cfc6135093070e95eb2b53e5d9b5c403dc3a6be6040ee026270aa82502"
dependencies = [
"camino",
"cargo-platform",
"cargo-util-schemas",
"cargo-util-schemas 0.2.0",
"semver",
"serde",
"serde_json",
"thiserror 2.0.12",
]
[[package]]
name = "cargo_metadata"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cfca2aaa699835ba88faf58a06342a314a950d2b9686165e038286c30316868"
dependencies = [
"camino",
"cargo-platform",
"cargo-util-schemas 0.8.2",
"semver",
"serde",
"serde_json",
@ -1190,13 +1221,16 @@ checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
name = "lsp-server"
version = "0.7.8"
dependencies = [
"anyhow",
"crossbeam-channel",
"ctrlc",
"log",
"lsp-types",
"rustc-hash 2.1.1",
"serde",
"serde_derive",
"serde_json",
"toolchain",
]
[[package]]
@ -1471,7 +1505,7 @@ dependencies = [
"edition",
"expect-test",
"ra-ap-rustc_lexer",
"rustc-literal-escaper 0.0.4",
"rustc-literal-escaper",
"stdx",
"tracing",
]
@ -1599,7 +1633,7 @@ dependencies = [
name = "proc-macro-test"
version = "0.0.0"
dependencies = [
"cargo_metadata",
"cargo_metadata 0.20.0",
]
[[package]]
@ -1640,7 +1674,7 @@ version = "0.0.0"
dependencies = [
"anyhow",
"base-db",
"cargo_metadata",
"cargo_metadata 0.21.0",
"cfg",
"expect-test",
"intern",
@ -1722,9 +1756,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_abi"
version = "0.116.0"
version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a967e3a9cd3e38b543f503978e0eccee461e3aea3f7b10e944959bff41dbe612"
checksum = "3ee51482d1c9d3e538acda8cce723db8eea1a81540544bf362bf4c3d841b2329"
dependencies = [
"bitflags 2.9.1",
"ra-ap-rustc_hashes",
@ -1734,18 +1768,18 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_hashes"
version = "0.116.0"
version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ea4c755ecbbffa5743c251344f484ebe571ec7bc5b36d80b2a8ae775d1a7a40"
checksum = "19c8f1e0c28e24e1b4c55dc08058c6c9829df2204497d4034259f491d348c204"
dependencies = [
"rustc-stable-hash",
]
[[package]]
name = "ra-ap-rustc_index"
version = "0.116.0"
version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aca7ad7cf911538c619caa2162339fe98637e9e46f11bb0484ef96735df4d64a"
checksum = "5f33f429cec6b92fa2c7243883279fb29dd233fdc3e94099aff32aa91aa87f50"
dependencies = [
"ra-ap-rustc_index_macros",
"smallvec",
@ -1753,9 +1787,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index_macros"
version = "0.116.0"
version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8767ba551c9355bc3031be072cc4bb0381106e5e7cd275e72b7a8c76051c4070"
checksum = "b9b55910dbe1fe7ef34bdc1d1bcb41e99b377eb680ea58a1218d95d6b4152257"
dependencies = [
"proc-macro2",
"quote",
@ -1764,9 +1798,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_lexer"
version = "0.116.0"
version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6101374afb267e6c27e4e2eb0b1352e9f3504c1a8f716f619cd39244e2ed92ab"
checksum = "22944e31fb91e9b3e75bcbc91e37d958b8c0825a6160927f2856831d2ce83b36"
dependencies = [
"memchr",
"unicode-properties",
@ -1775,19 +1809,19 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_parse_format"
version = "0.116.0"
version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ecd88a19f00da4f43e6727d5013444cbc399804b5046dfa2bbcd28ebed3970ce"
checksum = "81057891bc2063ad9e353f29462fbc47a0f5072560af34428ae9313aaa5e9d97"
dependencies = [
"ra-ap-rustc_lexer",
"rustc-literal-escaper 0.0.2",
"rustc-literal-escaper",
]
[[package]]
name = "ra-ap-rustc_pattern_analysis"
version = "0.116.0"
version = "0.121.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb332dd32d7850a799862533b1c021e6062558861a4ad57817bf522499fbb892"
checksum = "fe21a3542980d56d2435e96c2720773cac1c63fd4db666417e414729da192eb3"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash 2.1.1",
@ -1855,7 +1889,7 @@ version = "0.0.0"
dependencies = [
"anyhow",
"base64",
"cargo_metadata",
"cargo_metadata 0.21.0",
"cfg",
"crossbeam-channel",
"dirs",
@ -1932,12 +1966,6 @@ version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
[[package]]
name = "rustc-literal-escaper"
version = "0.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0041b6238913c41fe704213a4a9329e2f685a156d1781998128b4149c230ad04"
[[package]]
name = "rustc-literal-escaper"
version = "0.0.4"
@ -2231,7 +2259,7 @@ dependencies = [
"rayon",
"rowan",
"rustc-hash 2.1.1",
"rustc-literal-escaper 0.0.4",
"rustc-literal-escaper",
"rustc_apfloat",
"smol_str",
"stdx",

View File

@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"]
resolver = "2"
[workspace.package]
rust-version = "1.86"
rust-version = "1.88"
edition = "2024"
license = "MIT OR Apache-2.0"
authors = ["rust-analyzer team"]
@ -89,11 +89,11 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
ra-ap-rustc_lexer = { version = "0.116", default-features = false }
ra-ap-rustc_parse_format = { version = "0.116", default-features = false }
ra-ap-rustc_index = { version = "0.116", default-features = false }
ra-ap-rustc_abi = { version = "0.116", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.116", default-features = false }
ra-ap-rustc_lexer = { version = "0.121", default-features = false }
ra-ap-rustc_parse_format = { version = "0.121", default-features = false }
ra-ap-rustc_index = { version = "0.121", default-features = false }
ra-ap-rustc_abi = { version = "0.121", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.121", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
@ -106,7 +106,7 @@ lsp-server = { version = "0.7.8" }
anyhow = "1.0.98"
arrayvec = "0.7.6"
bitflags = "2.9.1"
cargo_metadata = "0.20.0"
cargo_metadata = "0.21.0"
camino = "1.1.10"
chalk-solve = { version = "0.103.0", default-features = false }
chalk-ir = "0.103.0"
@ -138,7 +138,11 @@ rayon = "1.10.0"
rowan = "=0.15.15"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
salsa = { version = "0.23.0", default-features = true, features = ["rayon","salsa_unstable", "macros"] }
salsa = { version = "0.23.0", default-features = true, features = [
"rayon",
"salsa_unstable",
"macros",
] }
salsa-macros = "0.23.0"
semver = "1.0.26"
serde = { version = "1.0.219" }

View File

@ -22,6 +22,7 @@ use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::{Edition, SyntaxContext};
use syntax::{AstPtr, SyntaxNodePtr, ast};
use thin_vec::ThinVec;
use triomphe::Arc;
use tt::TextRange;
@ -93,17 +94,17 @@ pub type TypeSource = InFile<TypePtr>;
pub type LifetimePtr = AstPtr<ast::Lifetime>;
pub type LifetimeSource = InFile<LifetimePtr>;
// We split the store into types-only and expressions, because most stores (e.g. generics)
// don't store any expressions and this saves memory. Same thing for the source map.
#[derive(Debug, PartialEq, Eq)]
pub struct ExpressionStore {
pub exprs: Arena<Expr>,
pub pats: Arena<Pat>,
pub bindings: Arena<Binding>,
pub labels: Arena<Label>,
pub types: Arena<TypeRef>,
pub lifetimes: Arena<LifetimeRef>,
struct ExpressionOnlyStore {
exprs: Arena<Expr>,
pats: Arena<Pat>,
bindings: Arena<Binding>,
labels: Arena<Label>,
/// Id of the closure/coroutine that owns the corresponding binding. If a binding is owned by the
/// top level expression, it will not be listed in here.
pub binding_owners: FxHashMap<BindingId, ExprId>,
binding_owners: FxHashMap<BindingId, ExprId>,
/// Block expressions in this store that may contain inner items.
block_scopes: Box<[BlockId]>,
@ -114,8 +115,118 @@ pub struct ExpressionStore {
ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
}
#[derive(Debug, PartialEq, Eq)]
pub struct ExpressionStore {
expr_only: Option<Box<ExpressionOnlyStore>>,
pub types: Arena<TypeRef>,
pub lifetimes: Arena<LifetimeRef>,
}
#[derive(Debug, Eq, Default)]
struct ExpressionOnlySourceMap {
// AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map
// to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected).
expr_map: FxHashMap<ExprSource, ExprOrPatId>,
expr_map_back: ArenaMap<ExprId, ExprOrPatSource>,
pat_map: FxHashMap<PatSource, ExprOrPatId>,
pat_map_back: ArenaMap<PatId, ExprOrPatSource>,
label_map: FxHashMap<LabelSource, LabelId>,
label_map_back: ArenaMap<LabelId, LabelSource>,
binding_definitions:
ArenaMap<BindingId, SmallVec<[PatId; 2 * size_of::<usize>() / size_of::<PatId>()]>>,
/// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
/// Instead, we use id of expression (`92`) to identify the field.
field_map_back: FxHashMap<ExprId, FieldSource>,
pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
template_map: Option<Box<FormatTemplate>>,
expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
/// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
/// the source map (since they're just as volatile).
//
// We store diagnostics on the `ExpressionOnlySourceMap` because diagnostics are rare (except
// maybe for cfgs, and they are also not common in type places).
diagnostics: ThinVec<ExpressionStoreDiagnostics>,
}
impl PartialEq for ExpressionOnlySourceMap {
fn eq(&self, other: &Self) -> bool {
// we only need to compare one of the two mappings
// as the other is a reverse mapping and thus will compare
// the same as normal mapping
let Self {
expr_map: _,
expr_map_back,
pat_map: _,
pat_map_back,
label_map: _,
label_map_back,
// If this changed, our pattern data must have changed
binding_definitions: _,
// If this changed, our expression data must have changed
field_map_back: _,
// If this changed, our pattern data must have changed
pat_field_map_back: _,
template_map,
expansions,
diagnostics,
} = self;
*expr_map_back == other.expr_map_back
&& *pat_map_back == other.pat_map_back
&& *label_map_back == other.label_map_back
&& *template_map == other.template_map
&& *expansions == other.expansions
&& *diagnostics == other.diagnostics
}
}
#[derive(Debug, Eq, Default)]
pub struct ExpressionStoreSourceMap {
expr_only: Option<Box<ExpressionOnlySourceMap>>,
types_map_back: ArenaMap<TypeRefId, TypeSource>,
types_map: FxHashMap<TypeSource, TypeRefId>,
lifetime_map_back: ArenaMap<LifetimeRefId, LifetimeSource>,
#[expect(
unused,
reason = "this is here for completeness, and maybe we'll need it in the future"
)]
lifetime_map: FxHashMap<LifetimeSource, LifetimeRefId>,
}
impl PartialEq for ExpressionStoreSourceMap {
fn eq(&self, other: &Self) -> bool {
// we only need to compare one of the two mappings
// as the other is a reverse mapping and thus will compare
// the same as normal mapping
let Self { expr_only, types_map_back, types_map: _, lifetime_map_back, lifetime_map: _ } =
self;
*expr_only == other.expr_only
&& *types_map_back == other.types_map_back
&& *lifetime_map_back == other.lifetime_map_back
}
}
/// The body of an item (function, const etc.).
#[derive(Debug, Eq, PartialEq, Default)]
pub struct ExpressionStoreBuilder {
pub exprs: Arena<Expr>,
pub pats: Arena<Pat>,
pub bindings: Arena<Binding>,
pub labels: Arena<Label>,
pub lifetimes: Arena<LifetimeRef>,
pub binding_owners: FxHashMap<BindingId, ExprId>,
pub types: Arena<TypeRef>,
block_scopes: Vec<BlockId>,
ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
// AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map
// to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected).
expr_map: FxHashMap<ExprSource, ExprOrPatId>,
@ -143,62 +254,14 @@ pub struct ExpressionStoreSourceMap {
template_map: Option<Box<FormatTemplate>>,
pub expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
/// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
/// the source map (since they're just as volatile).
pub diagnostics: Vec<ExpressionStoreDiagnostics>,
}
impl PartialEq for ExpressionStoreSourceMap {
fn eq(&self, other: &Self) -> bool {
// we only need to compare one of the two mappings
// as the other is a reverse mapping and thus will compare
// the same as normal mapping
let Self {
expr_map: _,
expr_map_back,
pat_map: _,
pat_map_back,
label_map: _,
label_map_back,
types_map_back,
types_map: _,
lifetime_map_back,
lifetime_map: _,
// If this changed, our pattern data must have changed
binding_definitions: _,
// If this changed, our expression data must have changed
field_map_back: _,
// If this changed, our pattern data must have changed
pat_field_map_back: _,
template_map,
expansions,
diagnostics,
} = self;
*expr_map_back == other.expr_map_back
&& *pat_map_back == other.pat_map_back
&& *label_map_back == other.label_map_back
&& *types_map_back == other.types_map_back
&& *lifetime_map_back == other.lifetime_map_back
&& *template_map == other.template_map
&& *expansions == other.expansions
&& *diagnostics == other.diagnostics
}
}
/// The body of an item (function, const etc.).
#[derive(Debug, Eq, PartialEq, Default)]
pub struct ExpressionStoreBuilder {
pub exprs: Arena<Expr>,
pub pats: Arena<Pat>,
pub bindings: Arena<Binding>,
pub labels: Arena<Label>,
pub lifetimes: Arena<LifetimeRef>,
pub binding_owners: FxHashMap<BindingId, ExprId>,
pub types: Arena<TypeRef>,
block_scopes: Vec<BlockId>,
ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
//
// We store diagnostics on the `ExpressionOnlySourceMap` because diagnostics are rare (except
// maybe for cfgs, and they are also not common in type places).
pub(crate) diagnostics: Vec<ExpressionStoreDiagnostics>,
}
#[derive(Default, Debug, Eq, PartialEq)]
@ -226,7 +289,7 @@ pub enum ExpressionStoreDiagnostics {
}
impl ExpressionStoreBuilder {
pub fn finish(self) -> ExpressionStore {
pub fn finish(self) -> (ExpressionStore, ExpressionStoreSourceMap) {
let Self {
block_scopes,
mut exprs,
@ -237,6 +300,23 @@ impl ExpressionStoreBuilder {
mut ident_hygiene,
mut types,
mut lifetimes,
mut expr_map,
mut expr_map_back,
mut pat_map,
mut pat_map_back,
mut label_map,
mut label_map_back,
mut types_map_back,
mut types_map,
mut lifetime_map_back,
mut lifetime_map,
mut binding_definitions,
mut field_map_back,
mut pat_field_map_back,
mut template_map,
mut expansions,
diagnostics,
} = self;
exprs.shrink_to_fit();
labels.shrink_to_fit();
@ -247,24 +327,90 @@ impl ExpressionStoreBuilder {
types.shrink_to_fit();
lifetimes.shrink_to_fit();
ExpressionStore {
exprs,
pats,
bindings,
labels,
binding_owners,
types,
lifetimes,
block_scopes: block_scopes.into_boxed_slice(),
ident_hygiene,
expr_map.shrink_to_fit();
expr_map_back.shrink_to_fit();
pat_map.shrink_to_fit();
pat_map_back.shrink_to_fit();
label_map.shrink_to_fit();
label_map_back.shrink_to_fit();
types_map_back.shrink_to_fit();
types_map.shrink_to_fit();
lifetime_map_back.shrink_to_fit();
lifetime_map.shrink_to_fit();
binding_definitions.shrink_to_fit();
field_map_back.shrink_to_fit();
pat_field_map_back.shrink_to_fit();
if let Some(template_map) = &mut template_map {
let FormatTemplate {
format_args_to_captures,
asm_to_captures,
implicit_capture_to_source,
} = &mut **template_map;
format_args_to_captures.shrink_to_fit();
asm_to_captures.shrink_to_fit();
implicit_capture_to_source.shrink_to_fit();
}
expansions.shrink_to_fit();
let has_exprs =
!exprs.is_empty() || !labels.is_empty() || !pats.is_empty() || !bindings.is_empty();
let store = {
let expr_only = if has_exprs {
Some(Box::new(ExpressionOnlyStore {
exprs,
pats,
bindings,
labels,
binding_owners,
block_scopes: block_scopes.into_boxed_slice(),
ident_hygiene,
}))
} else {
None
};
ExpressionStore { expr_only, types, lifetimes }
};
let source_map = {
let expr_only = if has_exprs || !expansions.is_empty() || !diagnostics.is_empty() {
Some(Box::new(ExpressionOnlySourceMap {
expr_map,
expr_map_back,
pat_map,
pat_map_back,
label_map,
label_map_back,
binding_definitions,
field_map_back,
pat_field_map_back,
template_map,
expansions,
diagnostics: ThinVec::from_iter(diagnostics),
}))
} else {
None
};
ExpressionStoreSourceMap {
expr_only,
types_map_back,
types_map,
lifetime_map_back,
lifetime_map,
}
};
(store, source_map)
}
}
impl ExpressionStore {
pub fn empty_singleton() -> Arc<Self> {
static EMPTY: LazyLock<Arc<ExpressionStore>> =
LazyLock::new(|| Arc::new(ExpressionStoreBuilder::default().finish()));
pub fn empty_singleton() -> (Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>) {
static EMPTY: LazyLock<(Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>)> =
LazyLock::new(|| {
let (store, source_map) = ExpressionStoreBuilder::default().finish();
(Arc::new(store), Arc::new(source_map))
});
EMPTY.clone()
}
@ -273,7 +419,12 @@ impl ExpressionStore {
&'a self,
db: &'a dyn DefDatabase,
) -> impl Iterator<Item = (BlockId, &'a DefMap)> + 'a {
self.block_scopes.iter().map(move |&block| (block, block_def_map(db, block)))
self.expr_only
.as_ref()
.map(|it| &*it.block_scopes)
.unwrap_or_default()
.iter()
.map(move |&block| (block, block_def_map(db, block)))
}
pub fn walk_bindings_in_pat(&self, pat_id: PatId, mut f: impl FnMut(BindingId)) {
@ -320,7 +471,8 @@ impl ExpressionStore {
}
pub fn is_binding_upvar(&self, binding: BindingId, relative_to: ExprId) -> bool {
match self.binding_owners.get(&binding) {
let Some(expr_only) = &self.expr_only else { return false };
match expr_only.binding_owners.get(&binding) {
Some(it) => {
// We assign expression ids in a way that outer closures will receive
// a lower id
@ -330,6 +482,11 @@ impl ExpressionStore {
}
}
#[inline]
pub fn binding_owner(&self, id: BindingId) -> Option<ExprId> {
self.expr_only.as_ref()?.binding_owners.get(&id).copied()
}
/// Walks the immediate children expressions and calls `f` for each child expression.
///
/// Note that this does not walk const blocks.
@ -601,16 +758,22 @@ impl ExpressionStore {
});
}
#[inline]
#[track_caller]
fn assert_expr_only(&self) -> &ExpressionOnlyStore {
self.expr_only.as_ref().expect("should have `ExpressionStore::expr_only`")
}
fn binding_hygiene(&self, binding: BindingId) -> HygieneId {
self.bindings[binding].hygiene
self.assert_expr_only().bindings[binding].hygiene
}
pub fn expr_path_hygiene(&self, expr: ExprId) -> HygieneId {
self.ident_hygiene.get(&expr.into()).copied().unwrap_or(HygieneId::ROOT)
self.assert_expr_only().ident_hygiene.get(&expr.into()).copied().unwrap_or(HygieneId::ROOT)
}
pub fn pat_path_hygiene(&self, pat: PatId) -> HygieneId {
self.ident_hygiene.get(&pat.into()).copied().unwrap_or(HygieneId::ROOT)
self.assert_expr_only().ident_hygiene.get(&pat.into()).copied().unwrap_or(HygieneId::ROOT)
}
pub fn expr_or_pat_path_hygiene(&self, id: ExprOrPatId) -> HygieneId {
@ -619,43 +782,72 @@ impl ExpressionStore {
ExprOrPatId::PatId(id) => self.pat_path_hygiene(id),
}
}
#[inline]
pub fn exprs(&self) -> impl Iterator<Item = (ExprId, &Expr)> {
match &self.expr_only {
Some(it) => it.exprs.iter(),
None => const { &Arena::new() }.iter(),
}
}
#[inline]
pub fn pats(&self) -> impl Iterator<Item = (PatId, &Pat)> {
match &self.expr_only {
Some(it) => it.pats.iter(),
None => const { &Arena::new() }.iter(),
}
}
#[inline]
pub fn bindings(&self) -> impl Iterator<Item = (BindingId, &Binding)> {
match &self.expr_only {
Some(it) => it.bindings.iter(),
None => const { &Arena::new() }.iter(),
}
}
}
impl Index<ExprId> for ExpressionStore {
type Output = Expr;
#[inline]
fn index(&self, expr: ExprId) -> &Expr {
&self.exprs[expr]
&self.assert_expr_only().exprs[expr]
}
}
impl Index<PatId> for ExpressionStore {
type Output = Pat;
#[inline]
fn index(&self, pat: PatId) -> &Pat {
&self.pats[pat]
&self.assert_expr_only().pats[pat]
}
}
impl Index<LabelId> for ExpressionStore {
type Output = Label;
#[inline]
fn index(&self, label: LabelId) -> &Label {
&self.labels[label]
&self.assert_expr_only().labels[label]
}
}
impl Index<BindingId> for ExpressionStore {
type Output = Binding;
#[inline]
fn index(&self, b: BindingId) -> &Binding {
&self.bindings[b]
&self.assert_expr_only().bindings[b]
}
}
impl Index<TypeRefId> for ExpressionStore {
type Output = TypeRef;
#[inline]
fn index(&self, b: TypeRefId) -> &TypeRef {
&self.types[b]
}
@ -664,6 +856,7 @@ impl Index<TypeRefId> for ExpressionStore {
impl Index<LifetimeRefId> for ExpressionStore {
type Output = LifetimeRef;
#[inline]
fn index(&self, b: LifetimeRefId) -> &LifetimeRef {
&self.lifetimes[b]
}
@ -684,12 +877,6 @@ impl Index<PathId> for ExpressionStore {
// FIXME: Change `node_` prefix to something more reasonable.
// Perhaps `expr_syntax` and `expr_id`?
impl ExpressionStoreSourceMap {
pub fn empty_singleton() -> Arc<Self> {
static EMPTY: LazyLock<Arc<ExpressionStoreSourceMap>> =
LazyLock::new(|| Arc::new(ExpressionStoreSourceMap::default()));
EMPTY.clone()
}
pub fn expr_or_pat_syntax(&self, id: ExprOrPatId) -> Result<ExprOrPatSource, SyntheticSyntax> {
match id {
ExprOrPatId::ExprId(id) => self.expr_syntax(id),
@ -697,30 +884,46 @@ impl ExpressionStoreSourceMap {
}
}
#[inline]
fn expr_or_synthetic(&self) -> Result<&ExpressionOnlySourceMap, SyntheticSyntax> {
self.expr_only.as_deref().ok_or(SyntheticSyntax)
}
#[inline]
fn expr_only(&self) -> Option<&ExpressionOnlySourceMap> {
self.expr_only.as_deref()
}
#[inline]
#[track_caller]
fn assert_expr_only(&self) -> &ExpressionOnlySourceMap {
self.expr_only.as_ref().expect("should have `ExpressionStoreSourceMap::expr_only`")
}
pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprOrPatSource, SyntheticSyntax> {
self.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax)
self.expr_or_synthetic()?.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax)
}
pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option<ExprOrPatId> {
let src = node.map(AstPtr::new);
self.expr_map.get(&src).cloned()
self.expr_only()?.expr_map.get(&src).cloned()
}
pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
let src = node.map(AstPtr::new);
self.expansions.get(&src).cloned()
self.expr_only()?.expansions.get(&src).cloned()
}
pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroCallId)> + '_ {
self.expansions.iter().map(|(&a, &b)| (a, b))
self.expr_only().into_iter().flat_map(|it| it.expansions.iter().map(|(&a, &b)| (a, b)))
}
pub fn pat_syntax(&self, pat: PatId) -> Result<ExprOrPatSource, SyntheticSyntax> {
self.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax)
self.expr_or_synthetic()?.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax)
}
pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<ExprOrPatId> {
self.pat_map.get(&node.map(AstPtr::new)).cloned()
self.expr_only()?.pat_map.get(&node.map(AstPtr::new)).cloned()
}
pub fn type_syntax(&self, id: TypeRefId) -> Result<TypeSource, SyntheticSyntax> {
@ -732,49 +935,50 @@ impl ExpressionStoreSourceMap {
}
pub fn label_syntax(&self, label: LabelId) -> LabelSource {
self.label_map_back[label]
self.assert_expr_only().label_map_back[label]
}
pub fn patterns_for_binding(&self, binding: BindingId) -> &[PatId] {
self.binding_definitions.get(binding).map_or(&[], Deref::deref)
self.assert_expr_only().binding_definitions.get(binding).map_or(&[], Deref::deref)
}
pub fn node_label(&self, node: InFile<&ast::Label>) -> Option<LabelId> {
let src = node.map(AstPtr::new);
self.label_map.get(&src).cloned()
self.expr_only()?.label_map.get(&src).cloned()
}
pub fn field_syntax(&self, expr: ExprId) -> FieldSource {
self.field_map_back[&expr]
self.assert_expr_only().field_map_back[&expr]
}
pub fn pat_field_syntax(&self, pat: PatId) -> PatFieldSource {
self.pat_field_map_back[&pat]
self.assert_expr_only().pat_field_map_back[&pat]
}
pub fn macro_expansion_expr(&self, node: InFile<&ast::MacroExpr>) -> Option<ExprOrPatId> {
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::MacroExpr>).map(AstPtr::upcast);
self.expr_map.get(&src).copied()
self.expr_only()?.expr_map.get(&src).copied()
}
pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroCallId)> {
self.expansions.iter()
self.expr_only().into_iter().flat_map(|it| it.expansions.iter())
}
pub fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
self.expansions.get(&node.map(AstPtr::new)).copied()
self.expr_only()?.expansions.get(&node.map(AstPtr::new)).copied()
}
pub fn implicit_format_args(
&self,
node: InFile<&ast::FormatArgsExpr>,
) -> Option<(HygieneId, &[(syntax::TextRange, Name)])> {
let expr_only = self.expr_only()?;
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
let (hygiene, names) = self
let (hygiene, names) = expr_only
.template_map
.as_ref()?
.format_args_to_captures
.get(&self.expr_map.get(&src)?.as_expr()?)?;
.get(&expr_only.expr_map.get(&src)?.as_expr()?)?;
Some((*hygiene, &**names))
}
@ -782,67 +986,28 @@ impl ExpressionStoreSourceMap {
&self,
capture_expr: ExprId,
) -> Option<InFile<(ExprPtr, TextRange)>> {
self.template_map.as_ref()?.implicit_capture_to_source.get(&capture_expr).copied()
self.expr_only()?
.template_map
.as_ref()?
.implicit_capture_to_source
.get(&capture_expr)
.copied()
}
pub fn asm_template_args(
&self,
node: InFile<&ast::AsmExpr>,
) -> Option<(ExprId, &[Vec<(syntax::TextRange, usize)>])> {
let expr_only = self.expr_only()?;
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
let expr = self.expr_map.get(&src)?.as_expr()?;
Some(expr)
.zip(self.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref))
let expr = expr_only.expr_map.get(&src)?.as_expr()?;
Some(expr).zip(
expr_only.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref),
)
}
/// Get a reference to the source map's diagnostics.
pub fn diagnostics(&self) -> &[ExpressionStoreDiagnostics] {
&self.diagnostics
}
fn shrink_to_fit(&mut self) {
let Self {
expr_map,
expr_map_back,
pat_map,
pat_map_back,
label_map,
label_map_back,
field_map_back,
pat_field_map_back,
expansions,
template_map,
diagnostics,
binding_definitions,
types_map,
types_map_back,
lifetime_map_back,
lifetime_map,
} = self;
if let Some(template_map) = template_map {
let FormatTemplate {
format_args_to_captures,
asm_to_captures,
implicit_capture_to_source,
} = &mut **template_map;
format_args_to_captures.shrink_to_fit();
asm_to_captures.shrink_to_fit();
implicit_capture_to_source.shrink_to_fit();
}
expr_map.shrink_to_fit();
expr_map_back.shrink_to_fit();
pat_map.shrink_to_fit();
pat_map_back.shrink_to_fit();
label_map.shrink_to_fit();
label_map_back.shrink_to_fit();
field_map_back.shrink_to_fit();
pat_field_map_back.shrink_to_fit();
expansions.shrink_to_fit();
diagnostics.shrink_to_fit();
binding_definitions.shrink_to_fit();
types_map.shrink_to_fit();
types_map_back.shrink_to_fit();
lifetime_map.shrink_to_fit();
lifetime_map_back.shrink_to_fit();
self.expr_only().map(|it| &*it.diagnostics).unwrap_or_default()
}
}

View File

@ -36,6 +36,7 @@ pub struct Body {
impl ops::Deref for Body {
type Target = ExpressionStore;
#[inline]
fn deref(&self) -> &Self::Target {
&self.store
}
@ -61,6 +62,7 @@ pub struct BodySourceMap {
impl ops::Deref for BodySourceMap {
type Target = ExpressionStoreSourceMap;
#[inline]
fn deref(&self) -> &Self::Target {
&self.store
}
@ -102,9 +104,7 @@ impl Body {
}
};
let module = def.module(db);
let (body, mut source_map) =
lower_body(db, def, file_id, module, params, body, is_async_fn);
source_map.store.shrink_to_fit();
let (body, source_map) = lower_body(db, def, file_id, module, params, body, is_async_fn);
(Arc::new(body), Arc::new(source_map))
}

View File

@ -121,14 +121,10 @@ pub(super) fn lower_body(
params = (0..count).map(|_| collector.missing_pat()).collect();
};
let body_expr = collector.missing_expr();
let (store, source_map) = collector.store.finish();
return (
Body {
store: collector.store.finish(),
params: params.into_boxed_slice(),
self_param,
body_expr,
},
BodySourceMap { self_param: source_map_self_param, store: collector.source_map },
Body { store, params: params.into_boxed_slice(), self_param, body_expr },
BodySourceMap { self_param: source_map_self_param, store: source_map },
);
}
@ -171,14 +167,10 @@ pub(super) fn lower_body(
},
);
let (store, source_map) = collector.store.finish();
(
Body {
store: collector.store.finish(),
params: params.into_boxed_slice(),
self_param,
body_expr,
},
BodySourceMap { self_param: source_map_self_param, store: collector.source_map },
Body { store, params: params.into_boxed_slice(), self_param, body_expr },
BodySourceMap { self_param: source_map_self_param, store: source_map },
)
}
@ -190,7 +182,8 @@ pub(crate) fn lower_type_ref(
let mut expr_collector = ExprCollector::new(db, module, type_ref.file_id);
let type_ref =
expr_collector.lower_type_ref_opt(type_ref.value, &mut ExprCollector::impl_trait_allocator);
(expr_collector.store.finish(), expr_collector.source_map, type_ref)
let (store, source_map) = expr_collector.store.finish();
(store, source_map, type_ref)
}
pub(crate) fn lower_generic_params(
@ -205,7 +198,8 @@ pub(crate) fn lower_generic_params(
let mut collector = generics::GenericParamsCollector::new(def);
collector.lower(&mut expr_collector, param_list, where_clause);
let params = collector.finish();
(Arc::new(expr_collector.store.finish()), params, expr_collector.source_map)
let (store, source_map) = expr_collector.store.finish();
(Arc::new(store), params, source_map)
}
pub(crate) fn lower_impl(
@ -232,7 +226,8 @@ pub(crate) fn lower_impl(
impl_syntax.value.where_clause(),
);
let params = collector.finish();
(expr_collector.store.finish(), expr_collector.source_map, self_ty, trait_, params)
let (store, source_map) = expr_collector.store.finish();
(store, source_map, self_ty, trait_, params)
}
pub(crate) fn lower_trait(
@ -253,7 +248,8 @@ pub(crate) fn lower_trait(
trait_syntax.value.where_clause(),
);
let params = collector.finish();
(expr_collector.store.finish(), expr_collector.source_map, params)
let (store, source_map) = expr_collector.store.finish();
(store, source_map, params)
}
pub(crate) fn lower_trait_alias(
@ -274,7 +270,8 @@ pub(crate) fn lower_trait_alias(
trait_syntax.value.where_clause(),
);
let params = collector.finish();
(expr_collector.store.finish(), expr_collector.source_map, params)
let (store, source_map) = expr_collector.store.finish();
(store, source_map, params)
}
pub(crate) fn lower_type_alias(
@ -313,7 +310,8 @@ pub(crate) fn lower_type_alias(
.value
.ty()
.map(|ty| expr_collector.lower_type_ref(ty, &mut ExprCollector::impl_trait_allocator));
(expr_collector.store.finish(), expr_collector.source_map, params, bounds, type_ref)
let (store, source_map) = expr_collector.store.finish();
(store, source_map, params, bounds, type_ref)
}
pub(crate) fn lower_function(
@ -421,9 +419,10 @@ pub(crate) fn lower_function(
} else {
return_type
};
let (store, source_map) = expr_collector.store.finish();
(
expr_collector.store.finish(),
expr_collector.source_map,
store,
source_map,
generics,
params.into_boxed_slice(),
return_type,
@ -440,7 +439,6 @@ pub struct ExprCollector<'db> {
local_def_map: &'db LocalDefMap,
module: ModuleId,
pub store: ExpressionStoreBuilder,
pub(crate) source_map: ExpressionStoreSourceMap,
// state stuff
// Prevent nested impl traits like `impl Foo<impl Bar>`.
@ -551,7 +549,6 @@ impl ExprCollector<'_> {
module,
def_map,
local_def_map,
source_map: ExpressionStoreSourceMap::default(),
store: ExpressionStoreBuilder::default(),
expander,
current_try_block_label: None,
@ -698,7 +695,7 @@ impl ExprCollector<'_> {
let id = self.collect_macro_call(mcall, macro_ptr, true, |this, expansion| {
this.lower_type_ref_opt(expansion, impl_trait_lower_fn)
});
self.source_map.types_map.insert(src, id);
self.store.types_map.insert(src, id);
return id;
}
None => TypeRef::Error,
@ -732,8 +729,8 @@ impl ExprCollector<'_> {
fn alloc_type_ref(&mut self, type_ref: TypeRef, node: TypePtr) -> TypeRefId {
let id = self.store.types.alloc(type_ref);
let ptr = self.expander.in_file(node);
self.source_map.types_map_back.insert(id, ptr);
self.source_map.types_map.insert(ptr, id);
self.store.types_map_back.insert(id, ptr);
self.store.types_map.insert(ptr, id);
id
}
@ -744,8 +741,8 @@ impl ExprCollector<'_> {
) -> LifetimeRefId {
let id = self.store.lifetimes.alloc(lifetime_ref);
let ptr = self.expander.in_file(node);
self.source_map.lifetime_map_back.insert(id, ptr);
self.source_map.lifetime_map.insert(ptr, id);
self.store.lifetime_map_back.insert(id, ptr);
self.store.lifetime_map.insert(ptr, id);
id
}
@ -1190,14 +1187,14 @@ impl ExprCollector<'_> {
}
ast::Expr::ContinueExpr(e) => {
let label = self.resolve_label(e.lifetime()).unwrap_or_else(|e| {
self.source_map.diagnostics.push(e);
self.store.diagnostics.push(e);
None
});
self.alloc_expr(Expr::Continue { label }, syntax_ptr)
}
ast::Expr::BreakExpr(e) => {
let label = self.resolve_label(e.lifetime()).unwrap_or_else(|e| {
self.source_map.diagnostics.push(e);
self.store.diagnostics.push(e);
None
});
let expr = e.expr().map(|e| self.collect_expr(e));
@ -1207,7 +1204,7 @@ impl ExprCollector<'_> {
let inner = self.collect_expr_opt(e.expr());
// make the paren expr point to the inner expression as well for IDE resolution
let src = self.expander.in_file(syntax_ptr);
self.source_map.expr_map.insert(src, inner.into());
self.store.expr_map.insert(src, inner.into());
inner
}
ast::Expr::ReturnExpr(e) => {
@ -1248,7 +1245,7 @@ impl ExprCollector<'_> {
None => self.missing_expr(),
};
let src = self.expander.in_file(AstPtr::new(&field));
self.source_map.field_map_back.insert(expr, src);
self.store.field_map_back.insert(expr, src);
Some(RecordLitField { name, expr })
})
.collect();
@ -1271,12 +1268,10 @@ impl ExprCollector<'_> {
ast::Expr::AwaitExpr(e) => {
let expr = self.collect_expr_opt(e.expr());
if let Awaitable::No(location) = self.is_lowering_awaitable_block() {
self.source_map.diagnostics.push(
ExpressionStoreDiagnostics::AwaitOutsideOfAsync {
node: self.expander.in_file(AstPtr::new(&e)),
location: location.to_string(),
},
);
self.store.diagnostics.push(ExpressionStoreDiagnostics::AwaitOutsideOfAsync {
node: self.expander.in_file(AstPtr::new(&e)),
location: location.to_string(),
});
}
self.alloc_expr(Expr::Await { expr }, syntax_ptr)
}
@ -1442,7 +1437,7 @@ impl ExprCollector<'_> {
// Make the macro-call point to its expanded expression so we can query
// semantics on syntax pointers to the macro
let src = self.expander.in_file(syntax_ptr);
self.source_map.expr_map.insert(src, id.into());
self.store.expr_map.insert(src, id.into());
id
}
None => self.alloc_expr(Expr::Missing, syntax_ptr),
@ -1486,7 +1481,7 @@ impl ExprCollector<'_> {
let expr = self.collect_expr(expr);
// Do not use `alloc_pat_from_expr()` here, it will override the entry in `expr_map`.
let id = self.store.pats.alloc(Pat::Expr(expr));
self.source_map.pat_map_back.insert(id, src);
self.store.pat_map_back.insert(id, src);
id
})
}
@ -1555,7 +1550,7 @@ impl ExprCollector<'_> {
let id = self.collect_macro_call(e, macro_ptr, true, |this, expansion| {
this.collect_expr_as_pat_opt(expansion)
});
self.source_map.expr_map.insert(src, id.into());
self.store.expr_map.insert(src, id.into());
id
}
ast::Expr::RecordExpr(e) => {
@ -1576,7 +1571,7 @@ impl ExprCollector<'_> {
let pat = self.collect_expr_as_pat(field_expr);
let name = f.field_name()?.as_name();
let src = self.expander.in_file(AstPtr::new(&f).wrap_left());
self.source_map.pat_field_map_back.insert(pat, src);
self.store.pat_field_map_back.insert(pat, src);
Some(RecordFieldPat { name, pat })
})
.collect();
@ -1622,7 +1617,7 @@ impl ExprCollector<'_> {
);
if let Either::Left(pat) = pat {
let src = this.expander.in_file(AstPtr::new(&expr).wrap_left());
this.source_map.pat_map_back.insert(pat, src);
this.store.pat_map_back.insert(pat, src);
}
pat
}
@ -1968,7 +1963,7 @@ impl ExprCollector<'_> {
self.module.krate(),
resolver,
&mut |ptr, call| {
_ = self.source_map.expansions.insert(ptr.map(|(it, _)| it), call);
_ = self.store.expansions.insert(ptr.map(|(it, _)| it), call);
},
)
}
@ -1978,19 +1973,17 @@ impl ExprCollector<'_> {
Ok(res) => res,
Err(UnresolvedMacro { path }) => {
if record_diagnostics {
self.source_map.diagnostics.push(
ExpressionStoreDiagnostics::UnresolvedMacroCall {
node: self.expander.in_file(syntax_ptr),
path,
},
);
self.store.diagnostics.push(ExpressionStoreDiagnostics::UnresolvedMacroCall {
node: self.expander.in_file(syntax_ptr),
path,
});
}
return collector(self, None);
}
};
if record_diagnostics {
if let Some(err) = res.err {
self.source_map
self.store
.diagnostics
.push(ExpressionStoreDiagnostics::MacroError { node: macro_call_ptr, err });
}
@ -2001,7 +1994,7 @@ impl ExprCollector<'_> {
// Keep collecting even with expansion errors so we can provide completions and
// other services in incomplete macro expressions.
if let Some(macro_file) = self.expander.current_file_id().macro_file() {
self.source_map.expansions.insert(macro_call_ptr, macro_file);
self.store.expansions.insert(macro_call_ptr, macro_file);
}
if record_diagnostics {
@ -2050,7 +2043,7 @@ impl ExprCollector<'_> {
// Make the macro-call point to its expanded expression so we can query
// semantics on syntax pointers to the macro
let src = self.expander.in_file(syntax_ptr);
self.source_map.expr_map.insert(src, tail.into());
self.store.expr_map.insert(src, tail.into());
})
}
@ -2361,7 +2354,7 @@ impl ExprCollector<'_> {
let pat = self.collect_pat(ast_pat, binding_list);
let name = f.field_name()?.as_name();
let src = self.expander.in_file(AstPtr::new(&f).wrap_right());
self.source_map.pat_field_map_back.insert(pat, src);
self.store.pat_field_map_back.insert(pat, src);
Some(RecordFieldPat { name, pat })
})
.collect();
@ -2424,7 +2417,7 @@ impl ExprCollector<'_> {
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
this.collect_pat_opt(expanded_pat, binding_list)
});
self.source_map.pat_map.insert(src, pat.into());
self.store.pat_map.insert(src, pat.into());
return pat;
}
None => Pat::Missing,
@ -2515,7 +2508,7 @@ impl ExprCollector<'_> {
}
});
if let Some(pat) = pat.left() {
self.source_map.pat_map.insert(src, pat.into());
self.store.pat_map.insert(src, pat.into());
}
pat
}
@ -2537,7 +2530,7 @@ impl ExprCollector<'_> {
match enabled {
Ok(()) => true,
Err(cfg) => {
self.source_map.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode {
self.store.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode {
node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())),
cfg,
opts: self.cfg_options.clone(),
@ -2548,7 +2541,7 @@ impl ExprCollector<'_> {
}
fn add_definition_to_binding(&mut self, binding_id: BindingId, pat_id: PatId) {
self.source_map.binding_definitions.entry(binding_id).or_default().push(pat_id);
self.store.binding_definitions.entry(binding_id).or_default().push(pat_id);
}
// region: labels
@ -2724,7 +2717,7 @@ impl ExprCollector<'_> {
|name, range| {
let expr_id = self.alloc_expr_desugared(Expr::Path(Path::from(name)));
if let Some(range) = range {
self.source_map
self.store
.template_map
.get_or_insert_with(Default::default)
.implicit_capture_to_source
@ -2836,7 +2829,7 @@ impl ExprCollector<'_> {
)
};
self.source_map
self.store
.template_map
.get_or_insert_with(Default::default)
.format_args_to_captures
@ -3386,8 +3379,8 @@ impl ExprCollector<'_> {
fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
let src = self.expander.in_file(ptr);
let id = self.store.exprs.alloc(expr);
self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
self.source_map.expr_map.insert(src, id.into());
self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
self.store.expr_map.insert(src, id.into());
id
}
// FIXME: desugared exprs don't have ptr, that's wrong and should be fixed.
@ -3398,9 +3391,9 @@ impl ExprCollector<'_> {
fn alloc_expr_desugared_with_ptr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
let src = self.expander.in_file(ptr);
let id = self.store.exprs.alloc(expr);
self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
// We intentionally don't fill this as it could overwrite a non-desugared entry
// self.source_map.expr_map.insert(src, id);
// self.store.expr_map.insert(src, id);
id
}
fn missing_expr(&mut self) -> ExprId {
@ -3423,24 +3416,24 @@ impl ExprCollector<'_> {
fn alloc_pat_from_expr(&mut self, pat: Pat, ptr: ExprPtr) -> PatId {
let src = self.expander.in_file(ptr);
let id = self.store.pats.alloc(pat);
self.source_map.expr_map.insert(src, id.into());
self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_left));
self.store.expr_map.insert(src, id.into());
self.store.pat_map_back.insert(id, src.map(AstPtr::wrap_left));
id
}
fn alloc_expr_from_pat(&mut self, expr: Expr, ptr: PatPtr) -> ExprId {
let src = self.expander.in_file(ptr);
let id = self.store.exprs.alloc(expr);
self.source_map.pat_map.insert(src, id.into());
self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_right));
self.store.pat_map.insert(src, id.into());
self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_right));
id
}
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
let src = self.expander.in_file(ptr);
let id = self.store.pats.alloc(pat);
self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_right));
self.source_map.pat_map.insert(src, id.into());
self.store.pat_map_back.insert(id, src.map(AstPtr::wrap_right));
self.store.pat_map.insert(src, id.into());
id
}
// FIXME: desugared pats don't have ptr, that's wrong and should be fixed somehow.
@ -3454,8 +3447,8 @@ impl ExprCollector<'_> {
fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId {
let src = self.expander.in_file(ptr);
let id = self.store.labels.alloc(label);
self.source_map.label_map_back.insert(id, src);
self.source_map.label_map.insert(src, id);
self.store.label_map_back.insert(id, src);
self.store.label_map.insert(src, id);
id
}
// FIXME: desugared labels don't have ptr, that's wrong and should be fixed somehow.

View File

@ -10,7 +10,7 @@ use tt::TextRange;
use crate::{
expr_store::lower::{ExprCollector, FxIndexSet},
hir::{AsmOperand, AsmOptions, Expr, ExprId, InlineAsm, InlineAsmRegOrRegClass},
hir::{AsmOperand, AsmOptions, Expr, ExprId, InlineAsm, InlineAsmKind, InlineAsmRegOrRegClass},
};
impl ExprCollector<'_> {
@ -269,11 +269,20 @@ impl ExprCollector<'_> {
}
})
};
let kind = if asm.global_asm_token().is_some() {
InlineAsmKind::GlobalAsm
} else if asm.naked_asm_token().is_some() {
InlineAsmKind::NakedAsm
} else {
InlineAsmKind::Asm
};
let idx = self.alloc_expr(
Expr::InlineAsm(InlineAsm { operands: operands.into_boxed_slice(), options }),
Expr::InlineAsm(InlineAsm { operands: operands.into_boxed_slice(), options, kind }),
syntax_ptr,
);
self.source_map
self.store
.template_map
.get_or_insert_with(Default::default)
.asm_to_captures

View File

@ -23,7 +23,7 @@ fn lower_path(path: ast::Path) -> (TestDB, ExpressionStore, Option<Path>) {
let mut ctx =
ExprCollector::new(&db, crate_def_map(&db, krate).root_module_id(), file_id.into());
let lowered_path = ctx.lower_path(path, &mut ExprCollector::impl_trait_allocator);
let store = ctx.store.finish();
let (store, _) = ctx.store.finish();
(db, store, lowered_path)
}

View File

@ -902,7 +902,7 @@ impl Printer<'_> {
let mut same_name = false;
if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] {
if let Binding { name, mode: BindingAnnotation::Unannotated, .. } =
&self.store.bindings[*id]
&self.store.assert_expr_only().bindings[*id]
{
if name.as_str() == field_name {
same_name = true;
@ -1063,7 +1063,7 @@ impl Printer<'_> {
}
fn print_binding(&mut self, id: BindingId) {
let Binding { name, mode, .. } = &self.store.bindings[id];
let Binding { name, mode, .. } = &self.store.assert_expr_only().bindings[id];
let mode = match mode {
BindingAnnotation::Unannotated => "",
BindingAnnotation::Mutable => "mut ",

View File

@ -106,7 +106,9 @@ impl ExprScopes {
let mut scopes = ExprScopes {
scopes: Arena::default(),
scope_entries: Arena::default(),
scope_by_expr: ArenaMap::with_capacity(body.exprs.len()),
scope_by_expr: ArenaMap::with_capacity(
body.expr_only.as_ref().map_or(0, |it| it.exprs.len()),
),
};
let mut root = scopes.root_scope();
if let Some(self_param) = body.self_param {
@ -179,7 +181,7 @@ impl ExprScopes {
binding: BindingId,
hygiene: HygieneId,
) {
let Binding { name, .. } = &store.bindings[binding];
let Binding { name, .. } = &store[binding];
let entry = self.scope_entries.alloc(ScopeEntry { name: name.clone(), binding, hygiene });
self.scopes[scope].entries =
IdxRange::new_inclusive(self.scopes[scope].entries.start()..=entry);
@ -251,7 +253,7 @@ fn compute_expr_scopes(
scope: &mut ScopeId,
) {
let make_label =
|label: &Option<LabelId>| label.map(|label| (label, store.labels[label].name.clone()));
|label: &Option<LabelId>| label.map(|label| (label, store[label].name.clone()));
let compute_expr_scopes = |scopes: &mut ExprScopes, expr: ExprId, scope: &mut ScopeId| {
compute_expr_scopes(expr, store, scopes, scope)
@ -534,9 +536,8 @@ fn foo() {
};
let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
let pat_src = source_map
.pat_syntax(*source_map.binding_definitions[resolved.binding()].first().unwrap())
.unwrap();
let pat_src =
source_map.pat_syntax(source_map.patterns_for_binding(resolved.binding())[0]).unwrap();
let local_name = pat_src.value.syntax_node_ptr().to_node(file.syntax());
assert_eq!(local_name.text_range(), expected_name.syntax().text_range());

View File

@ -508,9 +508,9 @@ fn f() {
}
"#,
);
assert_eq!(body.bindings.len(), 1, "should have a binding for `B`");
assert_eq!(body.assert_expr_only().bindings.len(), 1, "should have a binding for `B`");
assert_eq!(
body.bindings[BindingId::from_raw(RawIdx::from_u32(0))].name.as_str(),
body[BindingId::from_raw(RawIdx::from_u32(0))].name.as_str(),
"B",
"should have a binding for `B`",
);
@ -566,6 +566,7 @@ const fn f(x: i32) -> i32 {
);
let mtch_arms = body
.assert_expr_only()
.exprs
.iter()
.find_map(|(_, expr)| {
@ -578,10 +579,10 @@ const fn f(x: i32) -> i32 {
.unwrap();
let MatchArm { pat, .. } = mtch_arms[1];
match body.pats[pat] {
match body[pat] {
Pat::Range { start, end } => {
let hir_start = &body.exprs[start.unwrap()];
let hir_end = &body.exprs[end.unwrap()];
let hir_start = &body[start.unwrap()];
let hir_end = &body[end.unwrap()];
assert!(matches!(hir_start, Expr::Path { .. }));
assert!(matches!(hir_end, Expr::Path { .. }));

View File

@ -332,6 +332,17 @@ pub struct OffsetOf {
pub struct InlineAsm {
pub operands: Box<[(Option<Name>, AsmOperand)]>,
pub options: AsmOptions,
pub kind: InlineAsmKind,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum InlineAsmKind {
/// `asm!()`.
Asm,
/// `global_asm!()`.
GlobalAsm,
/// `naked_asm!()`.
NakedAsm,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]

View File

@ -143,6 +143,8 @@ impl<'a> Ctx<'a> {
ast::Item::MacroRules(ast) => self.lower_macro_rules(ast)?.into(),
ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
// FIXME: Handle `global_asm!()`.
ast::Item::AsmExpr(_) => return None,
};
let attrs = RawAttrs::new(self.db, item, self.span_map());
self.add_attrs(mod_item.ast_id(), attrs);

View File

@ -35,10 +35,10 @@ use a::{c, d::{e}};
#![no_std]
#![doc = " another file comment"]
// AstId: ExternCrate[5A82, 0]
// AstId: ExternCrate[070B, 0]
pub(self) extern crate self as renamed;
// AstId: ExternCrate[7E1C, 0]
// AstId: ExternCrate[1EA5, 0]
pub(in super) extern crate bli;
// AstId: Use[0000, 0]
@ -78,15 +78,15 @@ extern "C" {
// AstId: ExternBlock[0000, 0]
extern {
#[on_extern_type]
// AstId: TypeAlias[9FDF, 0]
// AstId: TypeAlias[A09C, 0]
pub(self) type ExType;
#[on_extern_static]
// AstId: Static[43C1, 0]
// AstId: Static[D85E, 0]
pub(self) static EX_STATIC = _;
#[on_extern_fn]
// AstId: Fn[452D, 0]
// AstId: Fn[B240, 0]
pub(self) fn ex_fn;
}
"#]],
@ -124,20 +124,20 @@ enum E {
}
"#,
expect![[r#"
// AstId: Struct[DFF3, 0]
// AstId: Struct[ED35, 0]
pub(self) struct Unit;
#[derive(Debug)]
// AstId: Struct[C7A1, 0]
// AstId: Struct[A47C, 0]
pub(self) struct Struct { ... }
// AstId: Struct[DAC2, 0]
// AstId: Struct[C8C9, 0]
pub(self) struct Tuple(...);
// AstId: Union[2DBB, 0]
// AstId: Union[2797, 0]
pub(self) union Ize { ... }
// AstId: Enum[7FF8, 0]
// AstId: Enum[7D23, 0]
pub(self) enum E { ... }
"#]],
);
@ -162,18 +162,18 @@ trait Tr: SuperTrait + 'lifetime {
}
"#,
expect![[r#"
// AstId: Static[B393, 0]
// AstId: Static[F7C1, 0]
pub static ST = _;
// AstId: Const[B309, 0]
// AstId: Const[84BB, 0]
pub(self) const _ = _;
#[attr]
#[inner_attr_in_fn]
// AstId: Fn[75E3, 0]
// AstId: Fn[BE8F, 0]
pub(self) fn f;
// AstId: Trait[2998, 0]
// AstId: Trait[9320, 0]
pub(self) trait Tr { ... }
"#]],
);
@ -197,16 +197,16 @@ mod outline;
expect![[r##"
#[doc = " outer"]
#[doc = " inner"]
// AstId: Module[CF93, 0]
// AstId: Module[03AE, 0]
pub(self) mod inline {
// AstId: Use[0000, 0]
pub(self) use super::*;
// AstId: Fn[1B26, 0]
// AstId: Fn[2A78, 0]
pub(self) fn fn_in_module;
}
// AstId: Module[8994, 0]
// AstId: Module[C08B, 0]
pub(self) mod outline;
"##]],
);
@ -225,13 +225,13 @@ pub macro m2() {}
m!();
"#,
expect![[r#"
// AstId: MacroRules[88CE, 0]
// AstId: MacroRules[7E68, 0]
macro_rules! m { ... }
// AstId: MacroDef[DC34, 0]
// AstId: MacroDef[1C1E, 0]
pub macro m2 { ... }
// AstId: MacroCall[612F, 0], SyntaxContextId: ROOT2024, ExpandTo: Items
// AstId: MacroCall[7E68, 0], SyntaxContextId: ROOT2024, ExpandTo: Items
m!(...);
"#]],
);
@ -244,7 +244,7 @@ fn pub_self() {
pub(self) struct S;
"#,
expect![[r#"
// AstId: Struct[42E2, 0]
// AstId: Struct[5024, 0]
pub(self) struct S;
"#]],
)

View File

@ -28,6 +28,19 @@ fn test_asm_expand() {
r#"
#[rustc_builtin_macro]
macro_rules! asm {() => {}}
#[rustc_builtin_macro]
macro_rules! global_asm {() => {}}
#[rustc_builtin_macro]
macro_rules! naked_asm {() => {}}
global_asm! {
""
}
#[unsafe(naked)]
extern "C" fn foo() {
naked_asm!("");
}
fn main() {
let i: u64 = 3;
@ -45,6 +58,17 @@ fn main() {
expect![[r##"
#[rustc_builtin_macro]
macro_rules! asm {() => {}}
#[rustc_builtin_macro]
macro_rules! global_asm {() => {}}
#[rustc_builtin_macro]
macro_rules! naked_asm {() => {}}
builtin #global_asm ("")
#[unsafe(naked)]
extern "C" fn foo() {
builtin #naked_asm ("");
}
fn main() {
let i: u64 = 3;

View File

@ -35,9 +35,9 @@ macro_rules! f {
};
}
struct#0:MacroRules[8C8E, 0]@58..64#14336# MyTraitMap2#0:MacroCall[D499, 0]@31..42#ROOT2024# {#0:MacroRules[8C8E, 0]@72..73#14336#
map#0:MacroRules[8C8E, 0]@86..89#14336#:#0:MacroRules[8C8E, 0]@89..90#14336# #0:MacroRules[8C8E, 0]@89..90#14336#::#0:MacroRules[8C8E, 0]@91..93#14336#std#0:MacroRules[8C8E, 0]@93..96#14336#::#0:MacroRules[8C8E, 0]@96..98#14336#collections#0:MacroRules[8C8E, 0]@98..109#14336#::#0:MacroRules[8C8E, 0]@109..111#14336#HashSet#0:MacroRules[8C8E, 0]@111..118#14336#<#0:MacroRules[8C8E, 0]@118..119#14336#(#0:MacroRules[8C8E, 0]@119..120#14336#)#0:MacroRules[8C8E, 0]@120..121#14336#>#0:MacroRules[8C8E, 0]@121..122#14336#,#0:MacroRules[8C8E, 0]@122..123#14336#
}#0:MacroRules[8C8E, 0]@132..133#14336#
struct#0:MacroRules[BE8F, 0]@58..64#14336# MyTraitMap2#0:MacroCall[BE8F, 0]@31..42#ROOT2024# {#0:MacroRules[BE8F, 0]@72..73#14336#
map#0:MacroRules[BE8F, 0]@86..89#14336#:#0:MacroRules[BE8F, 0]@89..90#14336# #0:MacroRules[BE8F, 0]@89..90#14336#::#0:MacroRules[BE8F, 0]@91..93#14336#std#0:MacroRules[BE8F, 0]@93..96#14336#::#0:MacroRules[BE8F, 0]@96..98#14336#collections#0:MacroRules[BE8F, 0]@98..109#14336#::#0:MacroRules[BE8F, 0]@109..111#14336#HashSet#0:MacroRules[BE8F, 0]@111..118#14336#<#0:MacroRules[BE8F, 0]@118..119#14336#(#0:MacroRules[BE8F, 0]@119..120#14336#)#0:MacroRules[BE8F, 0]@120..121#14336#>#0:MacroRules[BE8F, 0]@121..122#14336#,#0:MacroRules[BE8F, 0]@122..123#14336#
}#0:MacroRules[BE8F, 0]@132..133#14336#
"#]],
);
}
@ -75,12 +75,12 @@ macro_rules! f {
};
}
fn#0:MacroCall[D499, 0]@30..32#ROOT2024# main#0:MacroCall[D499, 0]@33..37#ROOT2024#(#0:MacroCall[D499, 0]@37..38#ROOT2024#)#0:MacroCall[D499, 0]@38..39#ROOT2024# {#0:MacroCall[D499, 0]@40..41#ROOT2024#
1#0:MacroCall[D499, 0]@50..51#ROOT2024#;#0:MacroCall[D499, 0]@51..52#ROOT2024#
1.0#0:MacroCall[D499, 0]@61..64#ROOT2024#;#0:MacroCall[D499, 0]@64..65#ROOT2024#
(#0:MacroCall[D499, 0]@74..75#ROOT2024#(#0:MacroCall[D499, 0]@75..76#ROOT2024#1#0:MacroCall[D499, 0]@76..77#ROOT2024#,#0:MacroCall[D499, 0]@77..78#ROOT2024# )#0:MacroCall[D499, 0]@78..79#ROOT2024#,#0:MacroCall[D499, 0]@79..80#ROOT2024# )#0:MacroCall[D499, 0]@80..81#ROOT2024#.#0:MacroCall[D499, 0]@81..82#ROOT2024#0#0:MacroCall[D499, 0]@82..85#ROOT2024#.#0:MacroCall[D499, 0]@82..85#ROOT2024#0#0:MacroCall[D499, 0]@82..85#ROOT2024#;#0:MacroCall[D499, 0]@85..86#ROOT2024#
let#0:MacroCall[D499, 0]@95..98#ROOT2024# x#0:MacroCall[D499, 0]@99..100#ROOT2024# =#0:MacroCall[D499, 0]@101..102#ROOT2024# 1#0:MacroCall[D499, 0]@103..104#ROOT2024#;#0:MacroCall[D499, 0]@104..105#ROOT2024#
}#0:MacroCall[D499, 0]@110..111#ROOT2024#
fn#0:MacroCall[BE8F, 0]@30..32#ROOT2024# main#0:MacroCall[BE8F, 0]@33..37#ROOT2024#(#0:MacroCall[BE8F, 0]@37..38#ROOT2024#)#0:MacroCall[BE8F, 0]@38..39#ROOT2024# {#0:MacroCall[BE8F, 0]@40..41#ROOT2024#
1#0:MacroCall[BE8F, 0]@50..51#ROOT2024#;#0:MacroCall[BE8F, 0]@51..52#ROOT2024#
1.0#0:MacroCall[BE8F, 0]@61..64#ROOT2024#;#0:MacroCall[BE8F, 0]@64..65#ROOT2024#
(#0:MacroCall[BE8F, 0]@74..75#ROOT2024#(#0:MacroCall[BE8F, 0]@75..76#ROOT2024#1#0:MacroCall[BE8F, 0]@76..77#ROOT2024#,#0:MacroCall[BE8F, 0]@77..78#ROOT2024# )#0:MacroCall[BE8F, 0]@78..79#ROOT2024#,#0:MacroCall[BE8F, 0]@79..80#ROOT2024# )#0:MacroCall[BE8F, 0]@80..81#ROOT2024#.#0:MacroCall[BE8F, 0]@81..82#ROOT2024#0#0:MacroCall[BE8F, 0]@82..85#ROOT2024#.#0:MacroCall[BE8F, 0]@82..85#ROOT2024#0#0:MacroCall[BE8F, 0]@82..85#ROOT2024#;#0:MacroCall[BE8F, 0]@85..86#ROOT2024#
let#0:MacroCall[BE8F, 0]@95..98#ROOT2024# x#0:MacroCall[BE8F, 0]@99..100#ROOT2024# =#0:MacroCall[BE8F, 0]@101..102#ROOT2024# 1#0:MacroCall[BE8F, 0]@103..104#ROOT2024#;#0:MacroCall[BE8F, 0]@104..105#ROOT2024#
}#0:MacroCall[BE8F, 0]@110..111#ROOT2024#
"#]],
@ -171,7 +171,7 @@ fn main(foo: ()) {
}
fn main(foo: ()) {
/* error: unresolved macro unresolved */"helloworld!"#0:Fn[B9C7, 0]@236..321#ROOT2024#;
/* error: unresolved macro unresolved */"helloworld!"#0:Fn[15AE, 0]@236..321#ROOT2024#;
}
}
@ -197,7 +197,7 @@ macro_rules! mk_struct {
#[macro_use]
mod foo;
struct#1:MacroRules[E572, 0]@59..65#14336# Foo#0:MacroCall[BDD3, 0]@32..35#ROOT2024#(#1:MacroRules[E572, 0]@70..71#14336#u32#0:MacroCall[BDD3, 0]@41..44#ROOT2024#)#1:MacroRules[E572, 0]@74..75#14336#;#1:MacroRules[E572, 0]@75..76#14336#
struct#1:MacroRules[DB0C, 0]@59..65#14336# Foo#0:MacroCall[DB0C, 0]@32..35#ROOT2024#(#1:MacroRules[DB0C, 0]@70..71#14336#u32#0:MacroCall[DB0C, 0]@41..44#ROOT2024#)#1:MacroRules[DB0C, 0]@74..75#14336#;#1:MacroRules[DB0C, 0]@75..76#14336#
"#]],
);
}

View File

@ -20,13 +20,14 @@ use base_db::RootQueryDb;
use expect_test::Expect;
use hir_expand::{
AstId, InFile, MacroCallId, MacroCallKind, MacroKind,
builtin::quote::quote,
db::ExpandDatabase,
proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind},
span_map::SpanMapRef,
};
use intern::Symbol;
use intern::{Symbol, sym};
use itertools::Itertools;
use span::{Edition, Span};
use span::{Edition, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext};
use stdx::{format_to, format_to_acc};
use syntax::{
AstNode, AstPtr,
@ -34,7 +35,9 @@ use syntax::{
SyntaxNode, T,
ast::{self, edit::IndentLevel},
};
use syntax_bridge::token_tree_to_syntax_node;
use test_fixture::WithFixture;
use tt::{TextRange, TextSize};
use crate::{
AdtId, Lookup, ModuleDefId,
@ -386,3 +389,38 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
other.type_id() == TypeId::of::<Self>()
}
}
#[test]
fn regression_20171() {
// This really isn't the appropriate place to put this test, but it's convenient with access to `quote!`.
let span = Span {
range: TextRange::empty(TextSize::new(0)),
anchor: SpanAnchor {
file_id: span::EditionedFileId::current_edition(span::FileId::from_raw(0)),
ast_id: ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContext::root(Edition::CURRENT),
};
let close_brace = tt::Punct { char: '}', spacing: tt::Spacing::Alone, span };
let dotdot1 = tt::Punct { char: '.', spacing: tt::Spacing::Joint, span };
let dotdot2 = tt::Punct { char: '.', spacing: tt::Spacing::Alone, span };
let dollar_crate = sym::dollar_crate;
let tt = quote! {
span => {
if !((matches!(
drive_parser(&mut parser, data, false),
Err(TarParserError::CorruptField {
field: CorruptFieldContext::PaxKvLength,
error: GeneralParseError::ParseInt(ParseIntError { #dotdot1 #dotdot2 })
})
#close_brace ))) {
#dollar_crate::panic::panic_2021!();
}}
};
token_tree_to_syntax_node(
&tt,
syntax_bridge::TopEntryPoint::MacroStmts,
&mut |_| Edition::CURRENT,
Edition::CURRENT,
);
}

View File

@ -181,9 +181,9 @@ fn foo(&self) {
self.0. 1;
}
fn#0:Fn[4D85, 0]@45..47#ROOT2024# foo#0:Fn[4D85, 0]@48..51#ROOT2024#(#0:Fn[4D85, 0]@51..52#ROOT2024#&#0:Fn[4D85, 0]@52..53#ROOT2024#self#0:Fn[4D85, 0]@53..57#ROOT2024# )#0:Fn[4D85, 0]@57..58#ROOT2024# {#0:Fn[4D85, 0]@59..60#ROOT2024#
self#0:Fn[4D85, 0]@65..69#ROOT2024# .#0:Fn[4D85, 0]@69..70#ROOT2024#0#0:Fn[4D85, 0]@70..71#ROOT2024#.#0:Fn[4D85, 0]@71..72#ROOT2024#1#0:Fn[4D85, 0]@73..74#ROOT2024#;#0:Fn[4D85, 0]@74..75#ROOT2024#
}#0:Fn[4D85, 0]@76..77#ROOT2024#"#]],
fn#0:Fn[8A31, 0]@45..47#ROOT2024# foo#0:Fn[8A31, 0]@48..51#ROOT2024#(#0:Fn[8A31, 0]@51..52#ROOT2024#&#0:Fn[8A31, 0]@52..53#ROOT2024#self#0:Fn[8A31, 0]@53..57#ROOT2024# )#0:Fn[8A31, 0]@57..58#ROOT2024# {#0:Fn[8A31, 0]@59..60#ROOT2024#
self#0:Fn[8A31, 0]@65..69#ROOT2024# .#0:Fn[8A31, 0]@69..70#ROOT2024#0#0:Fn[8A31, 0]@70..71#ROOT2024#.#0:Fn[8A31, 0]@71..72#ROOT2024#1#0:Fn[8A31, 0]@73..74#ROOT2024#;#0:Fn[8A31, 0]@74..75#ROOT2024#
}#0:Fn[8A31, 0]@76..77#ROOT2024#"#]],
);
}

View File

@ -373,19 +373,14 @@ pub fn crate_def_map(db: &dyn DefDatabase, crate_id: Crate) -> &DefMap {
crate_local_def_map(db, crate_id).def_map(db)
}
#[allow(unused_lifetimes)]
mod __ {
use super::*;
#[salsa_macros::tracked]
pub(crate) struct DefMapPair<'db> {
#[tracked]
#[returns(ref)]
pub(crate) def_map: DefMap,
#[returns(ref)]
pub(crate) local: LocalDefMap,
}
#[salsa_macros::tracked]
pub(crate) struct DefMapPair<'db> {
#[tracked]
#[returns(ref)]
pub(crate) def_map: DefMap,
#[returns(ref)]
pub(crate) local: LocalDefMap,
}
pub(crate) use __::DefMapPair;
#[salsa_macros::tracked(returns(ref))]
pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefMapPair<'_> {

View File

@ -1052,17 +1052,6 @@ impl<'db> Scope<'db> {
}
}
pub fn resolver_for_expr(
db: &dyn DefDatabase,
owner: DefWithBodyId,
expr_id: ExprId,
) -> Resolver<'_> {
let r = owner.resolver(db);
let scopes = db.expr_scopes(owner);
let scope_id = scopes.scope_for(expr_id);
resolver_for_scope_(db, scopes, scope_id, r, owner)
}
pub fn resolver_for_scope(
db: &dyn DefDatabase,
owner: DefWithBodyId,

View File

@ -779,14 +779,10 @@ impl VariantFields {
Arc::new(VariantFields { fields, store: Arc::new(store), shape }),
Arc::new(source_map),
),
None => (
Arc::new(VariantFields {
fields: Arena::default(),
store: ExpressionStore::empty_singleton(),
shape,
}),
ExpressionStoreSourceMap::empty_singleton(),
),
None => {
let (store, source_map) = ExpressionStore::empty_singleton();
(Arc::new(VariantFields { fields: Arena::default(), store, shape }), source_map)
}
}
}
@ -878,7 +874,7 @@ fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>(
idx += 1;
}
Err(cfg) => {
col.source_map.diagnostics.push(
col.store.diagnostics.push(
crate::expr_store::ExpressionStoreDiagnostics::InactiveCode {
node: InFile::new(fields.file_id, SyntaxNodePtr::new(field.syntax())),
cfg,
@ -891,9 +887,9 @@ fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>(
if !has_fields {
return None;
}
let store = col.store.finish();
let (store, source_map) = col.store.finish();
arena.shrink_to_fit();
Some((arena, store, col.source_map))
Some((arena, store, source_map))
}
#[derive(Debug, PartialEq, Eq)]
@ -980,7 +976,7 @@ impl EnumVariants {
if !matches!(variant.shape, FieldsShape::Unit) {
let body = db.body(v.into());
// A variant with explicit discriminant
if body.exprs[body.body_expr] != crate::hir::Expr::Missing {
if !matches!(body[body.body_expr], crate::hir::Expr::Missing) {
return false;
}
}

View File

@ -125,8 +125,8 @@ register_builtin! {
(assert, Assert) => assert_expand,
(stringify, Stringify) => stringify_expand,
(asm, Asm) => asm_expand,
(global_asm, GlobalAsm) => asm_expand,
(naked_asm, NakedAsm) => asm_expand,
(global_asm, GlobalAsm) => global_asm_expand,
(naked_asm, NakedAsm) => naked_asm_expand,
(cfg, Cfg) => cfg_expand,
(core_panic, CorePanic) => panic_expand,
(std_panic, StdPanic) => panic_expand,
@ -325,6 +325,36 @@ fn asm_expand(
ExpandResult::ok(expanded)
}
fn global_asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let mut tt = tt.clone();
tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis;
let pound = mk_pound(span);
let expanded = quote! {span =>
builtin #pound global_asm #tt
};
ExpandResult::ok(expanded)
}
fn naked_asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::TopSubtree,
span: Span,
) -> ExpandResult<tt::TopSubtree> {
let mut tt = tt.clone();
tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis;
let pound = mk_pound(span);
let expanded = quote! {span =>
builtin #pound naked_asm #tt
};
ExpandResult::ok(expanded)
}
fn cfg_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,

View File

@ -129,7 +129,7 @@ macro_rules! quote {
}
}
}
pub(super) use quote;
pub use quote;
pub trait ToTokenTree {
fn to_tokens(self, span: Span, builder: &mut TopSubtreeBuilder);

View File

@ -179,10 +179,9 @@ impl Name {
self.symbol.as_str()
}
#[inline]
pub fn display<'a>(
&'a self,
db: &dyn salsa::Database,
db: &dyn crate::db::ExpandDatabase,
edition: Edition,
) -> impl fmt::Display + 'a {
_ = db;

View File

@ -281,7 +281,7 @@ pub(crate) fn const_eval_discriminant_variant(
let def = variant_id.into();
let body = db.body(def);
let loc = variant_id.lookup(db);
if body.exprs[body.body_expr] == Expr::Missing {
if matches!(body[body.body_expr], Expr::Missing) {
let prev_idx = loc.index.checked_sub(1);
let value = match prev_idx {
Some(prev_idx) => {
@ -334,7 +334,7 @@ pub(crate) fn eval_to_const(
// Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic.
return unknown_const(infer[expr].clone());
}
if let Expr::Path(p) = &ctx.body.exprs[expr] {
if let Expr::Path(p) = &ctx.body[expr] {
let resolver = &ctx.resolver;
if let Some(c) =
path_to_const(db, resolver, p, mode, || ctx.generics(), debruijn, infer[expr].clone())

View File

@ -273,8 +273,9 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::variance::variances_of)]
#[salsa::cycle(
cycle_fn = crate::variance::variances_of_cycle_fn,
cycle_initial = crate::variance::variances_of_cycle_initial,
// cycle_fn = crate::variance::variances_of_cycle_fn,
// cycle_initial = crate::variance::variances_of_cycle_initial,
cycle_result = crate::variance::variances_of_cycle_initial,
)]
fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>;

View File

@ -226,11 +226,10 @@ impl<'a> DeclValidator<'a> {
let body = self.db.body(func.into());
let edition = self.edition(func);
let mut pats_replacements = body
.pats
.iter()
.pats()
.filter_map(|(pat_id, pat)| match pat {
Pat::Bind { id, .. } => {
let bind_name = &body.bindings[*id].name;
let bind_name = &body[*id].name;
let mut suggested_text = to_lower_snake_case(bind_name.as_str())?;
if is_raw_identifier(&suggested_text, edition) {
suggested_text.insert_str(0, "r#");

View File

@ -101,7 +101,7 @@ impl ExprValidator {
self.check_for_trailing_return(body.body_expr, &body);
}
for (id, expr) in body.exprs.iter() {
for (id, expr) in body.exprs() {
if let Some((variant, missed_fields, true)) =
record_literal_missing_fields(db, &self.infer, id, expr)
{
@ -132,7 +132,7 @@ impl ExprValidator {
}
}
for (id, pat) in body.pats.iter() {
for (id, pat) in body.pats() {
if let Some((variant, missed_fields, true)) =
record_pattern_missing_fields(db, &self.infer, id, pat)
{
@ -389,7 +389,7 @@ impl ExprValidator {
if !self.validate_lints {
return;
}
match &body.exprs[body_expr] {
match &body[body_expr] {
Expr::Block { statements, tail, .. } => {
let last_stmt = tail.or_else(|| match statements.last()? {
Statement::Expr { expr, .. } => Some(*expr),
@ -428,7 +428,7 @@ impl ExprValidator {
if else_branch.is_none() {
return;
}
if let Expr::Block { statements, tail, .. } = &self.body.exprs[*then_branch] {
if let Expr::Block { statements, tail, .. } = &self.body[*then_branch] {
let last_then_expr = tail.or_else(|| match statements.last()? {
Statement::Expr { expr, .. } => Some(*expr),
_ => None,

View File

@ -150,7 +150,7 @@ impl<'a> PatCtxt<'a> {
hir_def::hir::Pat::Bind { id, subpat, .. } => {
let bm = self.infer.binding_modes[pat];
ty = &self.infer[id];
let name = &self.body.bindings[id].name;
let name = &self.body[id].name;
match (bm, ty.kind(Interner)) {
(BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
(BindingMode::Ref(_), _) => {

View File

@ -7,7 +7,7 @@ use either::Either;
use hir_def::{
AdtId, DefWithBodyId, FieldId, FunctionId, VariantId,
expr_store::{Body, path::Path},
hir::{AsmOperand, Expr, ExprId, ExprOrPatId, Pat, PatId, Statement, UnaryOp},
hir::{AsmOperand, Expr, ExprId, ExprOrPatId, InlineAsmKind, Pat, PatId, Statement, UnaryOp},
resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs},
signatures::StaticFlags,
type_ref::Rawness,
@ -217,7 +217,7 @@ impl<'db> UnsafeVisitor<'db> {
}
fn walk_pat(&mut self, current: PatId) {
let pat = &self.body.pats[current];
let pat = &self.body[current];
if self.inside_union_destructure {
match pat {
@ -264,7 +264,7 @@ impl<'db> UnsafeVisitor<'db> {
}
fn walk_expr(&mut self, current: ExprId) {
let expr = &self.body.exprs[current];
let expr = &self.body[current];
let inside_assignment = mem::replace(&mut self.inside_assignment, false);
match expr {
&Expr::Call { callee, .. } => {
@ -284,7 +284,7 @@ impl<'db> UnsafeVisitor<'db> {
self.resolver.reset_to_guard(guard);
}
Expr::Ref { expr, rawness: Rawness::RawPtr, mutability: _ } => {
match self.body.exprs[*expr] {
match self.body[*expr] {
// Do not report unsafe for `addr_of[_mut]!(EXTERN_OR_MUT_STATIC)`,
// see https://github.com/rust-lang/rust/pull/125834.
Expr::Path(_) => return,
@ -315,7 +315,12 @@ impl<'db> UnsafeVisitor<'db> {
self.inside_assignment = old_inside_assignment;
}
Expr::InlineAsm(asm) => {
self.on_unsafe_op(current.into(), UnsafetyReason::InlineAsm);
if asm.kind == InlineAsmKind::Asm {
// `naked_asm!()` requires `unsafe` on the attribute (`#[unsafe(naked)]`),
// and `global_asm!()` doesn't require it at all.
self.on_unsafe_op(current.into(), UnsafetyReason::InlineAsm);
}
asm.operands.iter().for_each(|(_, op)| match op {
AsmOperand::In { expr, .. }
| AsmOperand::Out { expr: Some(expr), .. }

View File

@ -795,6 +795,14 @@ fn render_const_scalar(
let Some(bytes) = memory_map.get(addr, size_one * count) else {
return f.write_str("<ref-data-not-available>");
};
let expected_len = count * size_one;
if bytes.len() < expected_len {
never!(
"Memory map size is too small. Expected {expected_len}, got {}",
bytes.len(),
);
return f.write_str("<layout-error>");
}
f.write_str("&[")?;
let mut first = true;
for i in 0..count {
@ -2328,6 +2336,7 @@ impl HirDisplayWithExpressionStore for TypeBound {
store[*path].hir_fmt(f, store)
}
TypeBound::Use(args) => {
write!(f, "use<")?;
let edition = f.edition();
let last = args.len().saturating_sub(1);
for (idx, arg) in args.iter().enumerate() {

View File

@ -273,7 +273,7 @@ impl InferenceContext<'_> {
fn pat_bound_mutability(&self, pat: PatId) -> Mutability {
let mut r = Mutability::Not;
self.body.walk_bindings_in_pat(pat, |b| {
if self.body.bindings[b].mode == BindingAnnotation::RefMut {
if self.body[b].mode == BindingAnnotation::RefMut {
r = Mutability::Mut;
}
});

View File

@ -459,7 +459,7 @@ impl InferenceContext<'_> {
expected: &Ty,
decl: Option<DeclContext>,
) -> Ty {
let Binding { mode, .. } = self.body.bindings[binding];
let Binding { mode, .. } = self.body[binding];
let mode = if mode == BindingAnnotation::Unannotated {
default_bm
} else {
@ -639,7 +639,7 @@ impl InferenceContext<'_> {
pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool {
let mut res = false;
body.walk_pats(pat_id, &mut |pat| {
res |= matches!(body[pat], Pat::Bind { id, .. } if body.bindings[id].mode == BindingAnnotation::Ref);
res |= matches!(body[pat], Pat::Bind { id, .. } if body[id].mode == BindingAnnotation::Ref);
});
res
}

View File

@ -2,7 +2,7 @@
use base_db::Crate;
use hir_def::layout::TargetDataLayout;
use rustc_abi::{AlignFromBytesError, TargetDataLayoutErrors, AddressSpace};
use rustc_abi::{AddressSpace, AlignFromBytesError, TargetDataLayoutErrors};
use triomphe::Arc;
use crate::db::HirDatabase;

View File

@ -119,8 +119,7 @@ fn eval_expr(
.unwrap();
let hir_body = db.body(function_id.into());
let b = hir_body
.bindings
.iter()
.bindings()
.find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal")
.unwrap()
.0;

View File

@ -1018,8 +1018,12 @@ fn check_generic_args_len(
}
let lifetime_args_len = def_generics.len_lifetimes_self();
if provided_lifetimes_count == 0 && lifetime_args_len > 0 && !lowering_assoc_type_generics {
// In generic associated types, we never allow inferring the lifetimes.
if provided_lifetimes_count == 0
&& lifetime_args_len > 0
&& (!lowering_assoc_type_generics || infer_args)
{
// In generic associated types, we never allow inferring the lifetimes, but only in type context, that is
// when `infer_args == false`. In expression/pattern context we always allow inferring them, even for GATs.
match lifetime_elision {
&LifetimeElisionKind::AnonymousCreateParameter { report_in_path } => {
ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, report_in_path);

View File

@ -1212,10 +1212,9 @@ impl MirSpan {
match *self {
MirSpan::ExprId(expr) => matches!(body[expr], Expr::Ref { .. }),
// FIXME: Figure out if this is correct wrt. match ergonomics.
MirSpan::BindingId(binding) => matches!(
body.bindings[binding].mode,
BindingAnnotation::Ref | BindingAnnotation::RefMut
),
MirSpan::BindingId(binding) => {
matches!(body[binding].mode, BindingAnnotation::Ref | BindingAnnotation::RefMut)
}
MirSpan::PatId(_) | MirSpan::SelfParam | MirSpan::Unknown => false,
}
}

View File

@ -31,8 +31,8 @@ use syntax::{SyntaxNodePtr, TextRange};
use triomphe::Arc;
use crate::{
CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId, Interner,
MemoryMap, Substitution, ToChalk, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
AliasTy, CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId,
Interner, MemoryMap, Substitution, ToChalk, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
consteval::{ConstEvalError, intern_const_scalar, try_const_usize},
db::{HirDatabase, InternedClosure},
display::{ClosureStyle, DisplayTarget, HirDisplay},
@ -2195,7 +2195,7 @@ impl Evaluator<'_> {
}
}
}
chalk_ir::TyKind::Array(inner, len) => {
TyKind::Array(inner, len) => {
let len = match try_const_usize(this.db, len) {
Some(it) => it as usize,
None => not_supported!("non evaluatable array len in patching addresses"),
@ -2213,7 +2213,7 @@ impl Evaluator<'_> {
)?;
}
}
chalk_ir::TyKind::Tuple(_, subst) => {
TyKind::Tuple(_, subst) => {
let layout = this.layout(ty)?;
for (id, ty) in subst.iter(Interner).enumerate() {
let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
@ -2229,7 +2229,7 @@ impl Evaluator<'_> {
)?;
}
}
chalk_ir::TyKind::Adt(adt, subst) => match adt.0 {
TyKind::Adt(adt, subst) => match adt.0 {
AdtId::StructId(s) => {
let data = s.fields(this.db);
let layout = this.layout(ty)?;
@ -2280,6 +2280,10 @@ impl Evaluator<'_> {
}
AdtId::UnionId(_) => (),
},
TyKind::Alias(AliasTy::Projection(proj)) => {
let ty = this.db.normalize_projection(proj.clone(), this.trait_env.clone());
rec(this, bytes, &ty, locals, mm, stack_depth_limit - 1)?;
}
_ => (),
}
Ok(())

View File

@ -321,7 +321,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
current: BasicBlockId,
) -> Result<Option<(Operand, BasicBlockId)>> {
if !self.has_adjustments(expr_id) {
if let Expr::Literal(l) = &self.body.exprs[expr_id] {
if let Expr::Literal(l) = &self.body[expr_id] {
let ty = self.expr_ty_without_adjust(expr_id);
return Ok(Some((self.lower_literal_to_operand(ty, l)?, current)));
}
@ -411,7 +411,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
place: Place,
mut current: BasicBlockId,
) -> Result<Option<BasicBlockId>> {
match &self.body.exprs[expr_id] {
match &self.body[expr_id] {
Expr::OffsetOf(_) => {
not_supported!("builtin#offset_of")
}
@ -1374,7 +1374,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
fn lower_literal_or_const_to_operand(&mut self, ty: Ty, loc: &ExprId) -> Result<Operand> {
match &self.body.exprs[*loc] {
match &self.body[*loc] {
Expr::Literal(l) => self.lower_literal_to_operand(ty, l),
Expr::Path(c) => {
let owner = self.owner;
@ -1850,7 +1850,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
if let Pat::Bind { id, subpat: None } = self.body[it] {
if matches!(
self.body.bindings[id].mode,
self.body[id].mode,
BindingAnnotation::Unannotated | BindingAnnotation::Mutable
) {
self.result.binding_locals.insert(id, local_id);
@ -1859,7 +1859,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
local_id
}));
// and then rest of bindings
for (id, _) in self.body.bindings.iter() {
for (id, _) in self.body.bindings() {
if !pick_binding(id) {
continue;
}
@ -2126,7 +2126,7 @@ pub fn mir_body_for_closure_query(
.result
.binding_locals
.into_iter()
.filter(|it| ctx.body.binding_owners.get(&it.0).copied() == Some(expr))
.filter(|it| ctx.body.binding_owner(it.0) == Some(expr))
.collect();
if let Some(err) = err {
return Err(MirLowerError::UnresolvedUpvar(err));
@ -2191,7 +2191,7 @@ pub fn lower_to_mir(
// 0 is return local
ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr) });
let binding_picker = |b: BindingId| {
let owner = ctx.body.binding_owners.get(&b).copied();
let owner = ctx.body.binding_owner(b);
if root_expr == body.body_expr { owner.is_none() } else { owner == Some(root_expr) }
};
// 1 to param_len is for params

View File

@ -133,7 +133,7 @@ impl MirLowerCtx<'_> {
}
this.lower_expr_to_some_place_without_adjust(expr_id, current)
};
match &self.body.exprs[expr_id] {
match &self.body[expr_id] {
Expr::Path(p) => {
let resolver_guard =
self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);

View File

@ -130,7 +130,7 @@ impl MirLowerCtx<'_> {
.collect::<Vec<_>>()
.into(),
);
Ok(match &self.body.pats[pattern] {
Ok(match &self.body[pattern] {
Pat::Missing => return Err(MirLowerError::IncompletePattern),
Pat::Wild => (current, current_else),
Pat::Tuple { args, ellipsis } => {
@ -436,7 +436,7 @@ impl MirLowerCtx<'_> {
(next, Some(else_target))
}
},
Pat::Lit(l) => match &self.body.exprs[*l] {
Pat::Lit(l) => match &self.body[*l] {
Expr::Literal(l) => {
if mode == MatchingMode::Check {
let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?;

View File

@ -219,7 +219,7 @@ impl<'a> MirPrettyCtx<'a> {
fn local_name(&self, local: LocalId) -> LocalName {
match self.local_to_binding.get(local) {
Some(b) => LocalName::Binding(self.hir_body.bindings[*b].name.clone(), local),
Some(b) => LocalName::Binding(self.hir_body[*b].name.clone(), local),
None => LocalName::Unknown(local),
}
}

View File

@ -168,7 +168,7 @@ fn check_impl(
let inference_result = db.infer(def);
for (pat, mut ty) in inference_result.type_of_pat.iter() {
if let Pat::Bind { id, .. } = body.pats[pat] {
if let Pat::Bind { id, .. } = body[pat] {
ty = &inference_result.type_of_binding[id];
}
let node = match pat_node(&body_source_map, pat, &db) {
@ -316,7 +316,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
}
for (pat, mut ty) in inference_result.type_of_pat.iter() {
if let Pat::Bind { id, .. } = body.pats[pat] {
if let Pat::Bind { id, .. } = body[pat] {
ty = &inference_result.type_of_binding[id];
}
let node = match body_source_map.pat_syntax(pat) {

View File

@ -54,14 +54,14 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option<Ar
variances.is_empty().not().then(|| Arc::from_iter(variances))
}
pub(crate) fn variances_of_cycle_fn(
_db: &dyn HirDatabase,
_result: &Option<Arc<[Variance]>>,
_count: u32,
_def: GenericDefId,
) -> salsa::CycleRecoveryAction<Option<Arc<[Variance]>>> {
salsa::CycleRecoveryAction::Iterate
}
// pub(crate) fn variances_of_cycle_fn(
// _db: &dyn HirDatabase,
// _result: &Option<Arc<[Variance]>>,
// _count: u32,
// _def: GenericDefId,
// ) -> salsa::CycleRecoveryAction<Option<Arc<[Variance]>>> {
// salsa::CycleRecoveryAction::Iterate
// }
pub(crate) fn variances_of_cycle_initial(
db: &dyn HirDatabase,
@ -965,7 +965,7 @@ struct S3<T>(S<T, T>);
struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V);
"#,
expect![[r#"
FixedPoint[T: covariant, U: covariant, V: covariant]
FixedPoint[T: bivariant, U: bivariant, V: bivariant]
"#]],
);
}

View File

@ -2036,7 +2036,7 @@ impl DefWithBody {
)
}
let mol = &borrowck_result.mutability_of_locals;
for (binding_id, binding_data) in body.bindings.iter() {
for (binding_id, binding_data) in body.bindings() {
if binding_data.problems.is_some() {
// We should report specific diagnostics for these problems, not `need-mut` and `unused-mut`.
continue;
@ -3222,7 +3222,8 @@ impl Macro {
}
}
pub fn is_asm_or_global_asm(&self, db: &dyn HirDatabase) -> bool {
/// Is this `asm!()`, or a variant of it (e.g. `global_asm!()`)?
pub fn is_asm_like(&self, db: &dyn HirDatabase) -> bool {
match self.id {
MacroId::Macro2Id(it) => {
matches!(it.lookup(db).expander, MacroExpander::BuiltIn(m) if m.is_asm())

View File

@ -677,8 +677,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &Name) -> Vec<Local> {
let body = self.db.body(to_be_renamed.parent);
let resolver = to_be_renamed.parent.resolver(self.db);
let starting_expr =
body.binding_owners.get(&to_be_renamed.binding_id).copied().unwrap_or(body.body_expr);
let starting_expr = body.binding_owner(to_be_renamed.binding_id).unwrap_or(body.body_expr);
let mut visitor = RenameConflictsVisitor {
body: &body,
conflicts: FxHashSet::default(),
@ -1776,7 +1775,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
let Some(mac) = self.resolve_macro_call(macro_call) else { return false };
if mac.is_asm_or_global_asm(self.db) {
if mac.is_asm_like(self.db) {
return true;
}

View File

@ -242,11 +242,7 @@ impl<'db> SourceAnalyzer<'db> {
fn binding_id_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingId> {
let pat_id = self.pat_id(&pat.clone().into())?;
if let Pat::Bind { id, .. } = self.store()?.pats[pat_id.as_pat()?] {
Some(id)
} else {
None
}
if let Pat::Bind { id, .. } = self.store()?[pat_id.as_pat()?] { Some(id) } else { None }
}
pub(crate) fn expr_adjustments(&self, expr: &ast::Expr) -> Option<&[Adjustment]> {
@ -995,7 +991,7 @@ impl<'db> SourceAnalyzer<'db> {
let parent_hir_path = path
.parent_path()
.and_then(|p| collector.lower_path(p, &mut ExprCollector::impl_trait_error_allocator));
let store = collector.store.finish();
let (store, _) = collector.store.finish();
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
// trying to resolve foo::bar.
@ -1204,7 +1200,7 @@ impl<'db> SourceAnalyzer<'db> {
let mut collector = ExprCollector::new(db, self.resolver.module(), self.file_id);
let hir_path =
collector.lower_path(path.clone(), &mut ExprCollector::impl_trait_error_allocator)?;
let store = collector.store.finish();
let (store, _) = collector.store.finish();
Some(resolve_hir_path_(
db,
&self.resolver,
@ -1439,9 +1435,11 @@ fn scope_for(
) -> Option<ScopeId> {
node.ancestors_with_macros(db)
.take_while(|it| {
!ast::Item::can_cast(it.kind())
|| ast::MacroCall::can_cast(it.kind())
|| ast::Use::can_cast(it.kind())
let kind = it.kind();
!ast::Item::can_cast(kind)
|| ast::MacroCall::can_cast(kind)
|| ast::Use::can_cast(kind)
|| ast::AsmExpr::can_cast(kind)
})
.filter_map(|it| it.map(ast::Expr::cast).transpose())
.filter_map(|it| source_map.node_expr(it.as_ref())?.as_expr())

View File

@ -125,6 +125,13 @@ impl<'a> SymbolCollector<'a> {
}
ModuleDefId::AdtId(AdtId::EnumId(id)) => {
this.push_decl(id, name, false, None);
let enum_name = this.db.enum_signature(id).name.as_str().to_smolstr();
this.with_container_name(Some(enum_name), |this| {
let variants = id.enum_variants(this.db);
for (variant_id, variant_name, _) in &variants.variants {
this.push_decl(*variant_id, variant_name, true, None);
}
});
}
ModuleDefId::AdtId(AdtId::UnionId(id)) => {
this.push_decl(id, name, false, None);

View File

@ -1,8 +1,8 @@
use ide_db::defs::{Definition, NameRefClass};
use syntax::{
AstNode, SyntaxNode,
ast::{self, HasName, Name},
ted,
ast::{self, HasName, Name, syntax_factory::SyntaxFactory},
syntax_editor::SyntaxEditor,
};
use crate::{
@ -121,34 +121,36 @@ fn find_extracted_variable(ctx: &AssistContext<'_>, arm: &ast::MatchArm) -> Opti
// Rename `extracted` with `binding` in `pat`.
fn rename_variable(pat: &ast::Pat, extracted: &[Name], binding: ast::Pat) -> SyntaxNode {
let syntax = pat.syntax().clone_for_update();
let syntax = pat.syntax().clone_subtree();
let mut editor = SyntaxEditor::new(syntax.clone());
let make = SyntaxFactory::with_mappings();
let extracted = extracted
.iter()
.map(|e| syntax.covering_element(e.syntax().text_range()))
.map(|e| e.syntax().text_range() - pat.syntax().text_range().start())
.map(|r| syntax.covering_element(r))
.collect::<Vec<_>>();
for extracted_syntax in extracted {
// If `extracted` variable is a record field, we should rename it to `binding`,
// otherwise we just need to replace `extracted` with `binding`.
if let Some(record_pat_field) =
extracted_syntax.ancestors().find_map(ast::RecordPatField::cast)
{
if let Some(name_ref) = record_pat_field.field_name() {
ted::replace(
editor.replace(
record_pat_field.syntax(),
ast::make::record_pat_field(
ast::make::name_ref(&name_ref.text()),
binding.clone(),
make.record_pat_field(
make.name_ref(&name_ref.text()),
binding.clone_for_update(),
)
.syntax()
.clone_for_update(),
.syntax(),
);
}
} else {
ted::replace(extracted_syntax, binding.clone().syntax().clone_for_update());
editor.replace(extracted_syntax, binding.syntax().clone_for_update());
}
}
syntax
editor.add_mappings(make.finish_with_mappings());
editor.finish().new_root().clone()
}
#[cfg(test)]

View File

@ -4,7 +4,8 @@ use itertools::Itertools;
use syntax::{
SyntaxKind,
ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility},
match_ast, ted,
match_ast,
syntax_editor::{Position, SyntaxEditor},
};
use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder};
@ -97,11 +98,14 @@ fn edit_struct_def(
// Note that we don't need to consider macro files in this function because this is
// currently not triggered for struct definitions inside macro calls.
let tuple_fields = record_fields.fields().filter_map(|f| {
let field = ast::make::tuple_field(f.visibility(), f.ty()?).clone_for_update();
ted::insert_all(
ted::Position::first_child_of(field.syntax()),
let field = ast::make::tuple_field(f.visibility(), f.ty()?);
let mut editor = SyntaxEditor::new(field.syntax().clone());
editor.insert_all(
Position::first_child_of(field.syntax()),
f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(),
);
let field_syntax = editor.finish().new_root().clone();
let field = ast::TupleField::cast(field_syntax)?;
Some(field)
});
let tuple_fields = ast::make::tuple_field_list(tuple_fields);
@ -1086,8 +1090,7 @@ pub struct $0Foo {
}
"#,
r#"
pub struct Foo(#[my_custom_attr]
u32);
pub struct Foo(#[my_custom_attr]u32);
"#,
);
}

View File

@ -2,7 +2,7 @@ use ide_db::famous_defs::FamousDefs;
use stdx::format_to;
use syntax::{
AstNode,
ast::{self, HasGenericParams, HasName, Impl, make},
ast::{self, HasGenericParams, HasName, HasTypeBounds, Impl, make},
};
use crate::{
@ -88,20 +88,19 @@ fn generate_trait_impl_text_from_impl(
let generic_params = impl_.generic_param_list().map(|generic_params| {
let lifetime_params =
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
let ty_or_const_params = generic_params.type_or_const_params().map(|param| {
let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| {
// remove defaults since they can't be specified in impls
match param {
let param = match param {
ast::TypeOrConstParam::Type(param) => {
let param = param.clone_for_update();
param.remove_default();
let param = make::type_param(param.name()?, param.type_bound_list());
ast::GenericParam::TypeParam(param)
}
ast::TypeOrConstParam::Const(param) => {
let param = param.clone_for_update();
param.remove_default();
let param = make::const_param(param.name()?, param.ty()?);
ast::GenericParam::ConstParam(param)
}
}
};
Some(param)
});
make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))

View File

@ -294,7 +294,7 @@ fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldI
let self_expr = make::ext::expr_self();
let lhs = make::expr_field(self_expr, field_name);
let rhs = make::expr_path(make::ext::ident_path(field_name));
let assign_stmt = make::expr_stmt(make::expr_assignment(lhs, rhs));
let assign_stmt = make::expr_stmt(make::expr_assignment(lhs, rhs).into());
let body = make::block_expr([assign_stmt.into()], None);
// Make the setter fn

View File

@ -1,14 +1,14 @@
use syntax::{
ast::{self, AstNode, HasName, edit_in_place::Indent, make},
ted,
syntax_editor::{Position, SyntaxEditor},
};
use crate::{AssistContext, AssistId, Assists, utils};
fn insert_impl(impl_: ast::Impl, nominal: &ast::Adt) {
fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &ast::Adt) {
let indent = nominal.indent_level();
ted::insert_all_raw(
ted::Position::after(nominal.syntax()),
editor.insert_all(
Position::after(nominal.syntax()),
vec![
// Add a blank line after the ADT, and indentation for the impl to match the ADT
make::tokens::whitespace(&format!("\n\n{indent}")).into(),
@ -51,14 +51,17 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
// Generate the impl
let impl_ = utils::generate_impl(&nominal);
let mut editor = edit.make_editor(nominal.syntax());
// Add a tabstop after the left curly brace
if let Some(cap) = ctx.config.snippet_cap {
if let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) {
edit.add_tabstop_after_token(cap, l_curly);
let tabstop = edit.make_tabstop_after(cap);
editor.add_annotation(l_curly, tabstop);
}
}
insert_impl(impl_, &edit.make_mut(nominal));
insert_impl(&mut editor, &impl_, &nominal);
edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}
@ -97,18 +100,22 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
// Generate the impl
let impl_ = utils::generate_trait_impl_intransitive(&nominal, make::ty_placeholder());
let mut editor = edit.make_editor(nominal.syntax());
// Make the trait type a placeholder snippet
if let Some(cap) = ctx.config.snippet_cap {
if let Some(trait_) = impl_.trait_() {
edit.add_placeholder_snippet(cap, trait_);
let placeholder = edit.make_placeholder_snippet(cap);
editor.add_annotation(trait_.syntax(), placeholder);
}
if let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) {
edit.add_tabstop_after_token(cap, l_curly);
let tabstop = edit.make_tabstop_after(cap);
editor.add_annotation(l_curly, tabstop);
}
}
insert_impl(impl_, &edit.make_mut(nominal));
insert_impl(&mut editor, &impl_, &nominal);
edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View File

@ -1,6 +1,6 @@
use ide_db::famous_defs::FamousDefs;
use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait};
use syntax::{
AstNode,
AstNode, T,
ast::{self, edit_in_place::Indent, make},
ted,
};
@ -32,7 +32,7 @@ use crate::{AssistContext, AssistId, Assists};
//
// $0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
// fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
// &self[index as usize]
// &mut self[index as usize]
// }
// }
//
@ -48,36 +48,34 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
let indent = impl_def.indent_level();
let trait_ = impl_def.trait_()?;
if let ast::Type::PathType(trait_path) = trait_ {
let trait_type = ctx.sema.resolve_trait(&trait_path.path()?)?;
let scope = ctx.sema.scope(trait_path.syntax())?;
if trait_type != FamousDefs(&ctx.sema, scope.krate()).core_convert_Index()? {
return None;
}
}
let ast::Type::PathType(path) = impl_def.trait_()? else {
return None;
};
let trait_name = path.path()?.segment()?.name_ref()?;
let scope = ctx.sema.scope(impl_def.trait_()?.syntax())?;
let famous = FamousDefs(&ctx.sema, scope.krate());
let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?;
let trait_new = get_trait_mut(&trait_, famous)?;
// Index -> IndexMut
let index_trait = impl_def
.syntax()
.descendants()
.filter_map(ast::NameRef::cast)
.find(|it| it.text() == "Index")?;
ted::replace(
index_trait.syntax(),
make::path_segment(make::name_ref("IndexMut")).clone_for_update().syntax(),
);
ted::replace(trait_name.syntax(), make::name_ref(trait_new).clone_for_update().syntax());
// index -> index_mut
let trait_method_name = impl_def
let (trait_method_name, new_trait_method_name) = impl_def
.syntax()
.descendants()
.filter_map(ast::Name::cast)
.find(|it| it.text() == "index")?;
ted::replace(trait_method_name.syntax(), make::name("index_mut").clone_for_update().syntax());
.find_map(process_method_name)?;
ted::replace(
trait_method_name.syntax(),
make::name(new_trait_method_name).clone_for_update().syntax(),
);
let type_alias = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast)?;
ted::remove(type_alias.syntax());
if let Some(type_alias) = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast) {
ted::remove(type_alias.syntax());
}
// &self -> &mut self
let mut_self_param = make::mut_self_param();
@ -87,15 +85,14 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
// &Self::Output -> &mut Self::Output
let ret_type = impl_def.syntax().descendants().find_map(ast::RetType::cast)?;
ted::replace(
ret_type.syntax(),
make::ret_type(make::ty("&mut Self::Output")).clone_for_update().syntax(),
);
let new_ret_type = process_ret_type(&ret_type)?;
ted::replace(ret_type.syntax(), make::ret_type(new_ret_type).clone_for_update().syntax());
let fn_ = impl_def.assoc_item_list()?.assoc_items().find_map(|it| match it {
ast::AssocItem::Fn(f) => Some(f),
_ => None,
})?;
let _ = process_ref_mut(&fn_);
let assoc_list = make::assoc_item_list().clone_for_update();
ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax());
@ -104,7 +101,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
let target = impl_def.syntax().text_range();
acc.add(
AssistId::generate("generate_mut_trait_impl"),
"Generate `IndexMut` impl from this `Index` trait",
format!("Generate `{trait_new}` impl from this `{trait_name}` trait"),
target,
|edit| {
edit.insert(target.start(), format!("$0{impl_def}\n\n{indent}"));
@ -112,6 +109,52 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
)
}
fn process_ref_mut(fn_: &ast::Fn) -> Option<()> {
let expr = fn_.body()?.tail_expr()?;
match &expr {
ast::Expr::RefExpr(ref_expr) if ref_expr.mut_token().is_none() => {
ted::insert_all_raw(
ted::Position::after(ref_expr.amp_token()?),
vec![make::token(T![mut]).into(), make::tokens::whitespace(" ").into()],
);
}
_ => {}
}
None
}
fn get_trait_mut(apply_trait: &hir::Trait, famous: FamousDefs<'_, '_>) -> Option<&'static str> {
let trait_ = Some(apply_trait);
if trait_ == famous.core_convert_Index().as_ref() {
return Some("IndexMut");
}
if trait_ == famous.core_convert_AsRef().as_ref() {
return Some("AsMut");
}
if trait_ == famous.core_borrow_Borrow().as_ref() {
return Some("BorrowMut");
}
None
}
fn process_method_name(name: ast::Name) -> Option<(ast::Name, &'static str)> {
let new_name = match &*name.text() {
"index" => "index_mut",
"as_ref" => "as_mut",
"borrow" => "borrow_mut",
_ => return None,
};
Some((name, new_name))
}
fn process_ret_type(ref_ty: &ast::RetType) -> Option<ast::Type> {
let ty = ref_ty.ty()?;
let ast::Type::RefType(ref_type) = ty else {
return None;
};
Some(make::ty_ref(ref_type.ty()?, true))
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
@ -139,7 +182,7 @@ pub enum Axis { X = 0, Y = 1, Z = 2 }
$0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
&self[index as usize]
&mut self[index as usize]
}
}
@ -186,6 +229,35 @@ impl<T> core::ops::Index<Axis> for [T; 3] where T: Copy {
var_name
}
}
"#,
);
check_assist(
generate_mut_trait_impl,
r#"
//- minicore: as_ref
struct Foo(i32);
impl core::convert::AsRef$0<i32> for Foo {
fn as_ref(&self) -> &i32 {
&self.0
}
}
"#,
r#"
struct Foo(i32);
$0impl core::convert::AsMut<i32> for Foo {
fn as_mut(&mut self) -> &mut i32 {
&mut self.0
}
}
impl core::convert::AsRef<i32> for Foo {
fn as_ref(&self) -> &i32 {
&self.0
}
}
"#,
);
}
@ -285,6 +357,14 @@ mod foo {
pub trait Index<Idx: ?Sized> {}
impl<T> Index$0<i32> for [T; 3] {}
"#,
);
check_assist_not_applicable(
generate_mut_trait_impl,
r#"
pub trait AsRef<T: ?Sized> {}
impl AsRef$0<i32> for [T; 3] {}
"#,
);
}

View File

@ -1,5 +1,6 @@
use ide_db::{
imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor,
imports::import_assets::item_for_path_search, syntax_helpers::suggest_name::NameGenerator,
use_trivial_constructor::use_trivial_constructor,
};
use syntax::{
ast::{self, AstNode, HasName, HasVisibility, StructKind, edit_in_place::Indent, make},
@ -35,10 +36,30 @@ use crate::{
pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
// We want to only apply this to non-union structs with named fields
let field_list = match strukt.kind() {
StructKind::Record(named) => named,
_ => return None,
StructKind::Record(named) => {
named.fields().filter_map(|f| Some((f.name()?, f.ty()?))).collect::<Vec<_>>()
}
StructKind::Tuple(tuple) => {
let mut name_generator = NameGenerator::default();
tuple
.fields()
.enumerate()
.filter_map(|(i, f)| {
let ty = f.ty()?;
let name = match name_generator.for_type(
&ctx.sema.resolve_type(&ty)?,
ctx.db(),
ctx.edition(),
) {
Some(name) => name,
None => name_generator.suggest_name(&format!("_{i}")),
};
Some((make::name(name.as_str()), f.ty()?))
})
.collect::<Vec<_>>()
}
StructKind::Unit => return None,
};
// Return early if we've found an existing new fn
@ -50,11 +71,9 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
let target = strukt.syntax().text_range();
acc.add(AssistId::generate("generate_new"), "Generate `new`", target, |builder| {
let trivial_constructors = field_list
.fields()
.map(|f| {
let name = f.name()?;
let ty = ctx.sema.resolve_type(&f.ty()?)?;
.iter()
.map(|(name, ty)| {
let ty = ctx.sema.resolve_type(ty)?;
let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?));
@ -73,34 +92,44 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
edition,
)?;
Some(make::record_expr_field(make::name_ref(&name.text()), Some(expr)))
Some((make::name_ref(&name.text()), Some(expr)))
})
.collect::<Vec<_>>();
let params = field_list.fields().enumerate().filter_map(|(i, f)| {
let params = field_list.iter().enumerate().filter_map(|(i, (name, ty))| {
if trivial_constructors[i].is_none() {
let name = f.name()?;
let ty = f.ty()?;
Some(make::param(make::ident_pat(false, false, name).into(), ty))
Some(make::param(make::ident_pat(false, false, name.clone()).into(), ty.clone()))
} else {
None
}
});
let params = make::param_list(None, params);
let fields = field_list.fields().enumerate().filter_map(|(i, f)| {
let constructor = trivial_constructors[i].clone();
if constructor.is_some() {
let fields = field_list.iter().enumerate().map(|(i, (name, _))| {
if let Some(constructor) = trivial_constructors[i].clone() {
constructor
} else {
Some(make::record_expr_field(make::name_ref(&f.name()?.text()), None))
(make::name_ref(&name.text()), None)
}
});
let fields = make::record_expr_field_list(fields);
let record_expr = make::record_expr(make::ext::ident_path("Self"), fields);
let body = make::block_expr(None, Some(record_expr.into()));
let tail_expr: ast::Expr = match strukt.kind() {
StructKind::Record(_) => {
let fields = fields.map(|(name, expr)| make::record_expr_field(name, expr));
let fields = make::record_expr_field_list(fields);
make::record_expr(make::ext::ident_path("Self"), fields).into()
}
StructKind::Tuple(_) => {
let args = fields.map(|(arg, expr)| {
let arg = || make::expr_path(make::path_unqualified(make::path_segment(arg)));
expr.unwrap_or_else(arg)
});
let arg_list = make::arg_list(args);
make::expr_call(make::expr_path(make::ext::ident_path("Self")), arg_list).into()
}
StructKind::Unit => unreachable!(),
};
let body = make::block_expr(None, tail_expr.into());
let ret_type = make::ret_type(make::ty_path(make::ext::ident_path("Self")));
@ -120,8 +149,35 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
.clone_for_update();
fn_.indent(1.into());
// Add a tabstop before the name
if let Some(cap) = ctx.config.snippet_cap {
match strukt.kind() {
StructKind::Tuple(_) => {
let struct_args = fn_
.body()
.unwrap()
.syntax()
.descendants()
.filter(|it| syntax::ast::ArgList::can_cast(it.kind()))
.flat_map(|args| args.children())
.filter(|it| syntax::ast::PathExpr::can_cast(it.kind()))
.enumerate()
.filter_map(|(i, node)| {
if trivial_constructors[i].is_none() { Some(node) } else { None }
});
if let Some(fn_params) = fn_.param_list() {
for (struct_arg, fn_param) in struct_args.zip(fn_params.params()) {
if let Some(fn_pat) = fn_param.pat() {
let fn_pat = fn_pat.syntax().clone();
builder
.add_placeholder_snippet_group(cap, vec![struct_arg, fn_pat]);
}
}
}
}
_ => {}
}
// Add a tabstop before the name
if let Some(name) = fn_.name() {
builder.add_tabstop_before(cap, name);
}
@ -157,7 +213,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
}
#[cfg(test)]
mod tests {
mod record_tests {
use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
use super::*;
@ -695,3 +751,308 @@ impl<T> Source<T> {
);
}
}
#[cfg(test)]
mod tuple_tests {
use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
use super::*;
#[test]
fn test_generate_new_with_zst_fields() {
check_assist(
generate_new,
r#"
struct Empty;
struct Foo(Empty$0);
"#,
r#"
struct Empty;
struct Foo(Empty);
impl Foo {
fn $0new() -> Self {
Self(Empty)
}
}
"#,
);
check_assist(
generate_new,
r#"
struct Empty;
struct Foo(String, Empty$0);
"#,
r#"
struct Empty;
struct Foo(String, Empty);
impl Foo {
fn $0new(${1:_0}: String) -> Self {
Self(${1:_0}, Empty)
}
}
"#,
);
check_assist(
generate_new,
r#"
enum Empty { Bar }
struct Foo(Empty$0);
"#,
r#"
enum Empty { Bar }
struct Foo(Empty);
impl Foo {
fn $0new() -> Self {
Self(Empty::Bar)
}
}
"#,
);
// make sure the assist only works on unit variants
check_assist(
generate_new,
r#"
struct Empty {}
struct Foo(Empty$0);
"#,
r#"
struct Empty {}
struct Foo(Empty);
impl Foo {
fn $0new(${1:empty}: Empty) -> Self {
Self(${1:empty})
}
}
"#,
);
check_assist(
generate_new,
r#"
enum Empty { Bar {} }
struct Foo(Empty$0);
"#,
r#"
enum Empty { Bar {} }
struct Foo(Empty);
impl Foo {
fn $0new(${1:empty}: Empty) -> Self {
Self(${1:empty})
}
}
"#,
);
}
#[test]
fn test_generate_new() {
check_assist(
generate_new,
r#"
struct Foo($0);
"#,
r#"
struct Foo();
impl Foo {
fn $0new() -> Self {
Self()
}
}
"#,
);
check_assist(
generate_new,
r#"
struct Foo<T: Clone>($0);
"#,
r#"
struct Foo<T: Clone>();
impl<T: Clone> Foo<T> {
fn $0new() -> Self {
Self()
}
}
"#,
);
check_assist(
generate_new,
r#"
struct Foo<'a, T: Foo<'a>>($0);
"#,
r#"
struct Foo<'a, T: Foo<'a>>();
impl<'a, T: Foo<'a>> Foo<'a, T> {
fn $0new() -> Self {
Self()
}
}
"#,
);
check_assist(
generate_new,
r#"
struct Foo(String$0);
"#,
r#"
struct Foo(String);
impl Foo {
fn $0new(${1:_0}: String) -> Self {
Self(${1:_0})
}
}
"#,
);
check_assist(
generate_new,
r#"
struct Vec<T> { };
struct Foo(String, Vec<i32>$0);
"#,
r#"
struct Vec<T> { };
struct Foo(String, Vec<i32>);
impl Foo {
fn $0new(${1:_0}: String, ${2:items}: Vec<i32>) -> Self {
Self(${1:_0}, ${2:items})
}
}
"#,
);
}
#[test]
fn check_that_visibility_modifiers_dont_get_brought_in() {
check_assist(
generate_new,
r#"
struct Vec<T> { };
struct Foo(pub String, pub Vec<i32>$0);
"#,
r#"
struct Vec<T> { };
struct Foo(pub String, pub Vec<i32>);
impl Foo {
fn $0new(${1:_0}: String, ${2:items}: Vec<i32>) -> Self {
Self(${1:_0}, ${2:items})
}
}
"#,
);
}
#[test]
fn generate_new_not_applicable_if_fn_exists() {
check_assist_not_applicable(
generate_new,
r#"
struct Foo($0);
impl Foo {
fn new() -> Self {
Self
}
}
"#,
);
check_assist_not_applicable(
generate_new,
r#"
struct Foo($0);
impl Foo {
fn New() -> Self {
Self
}
}
"#,
);
}
#[test]
fn generate_new_target() {
check_assist_target(
generate_new,
r#"
struct SomeThingIrrelevant;
/// Has a lifetime parameter
struct Foo<'a, T: Foo<'a>>($0);
struct EvenMoreIrrelevant;
"#,
"/// Has a lifetime parameter
struct Foo<'a, T: Foo<'a>>();",
);
}
#[test]
fn test_unrelated_new() {
check_assist(
generate_new,
r#"
pub struct AstId<N: AstNode> {
file_id: HirFileId,
file_ast_id: FileAstId<N>,
}
impl<N: AstNode> AstId<N> {
pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
AstId { file_id, file_ast_id }
}
}
pub struct Source<T>(pub HirFileId,$0 pub T);
impl<T> Source<T> {
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
Source(self.file_id, f(self.ast))
}
}
"#,
r#"
pub struct AstId<N: AstNode> {
file_id: HirFileId,
file_ast_id: FileAstId<N>,
}
impl<N: AstNode> AstId<N> {
pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
AstId { file_id, file_ast_id }
}
}
pub struct Source<T>(pub HirFileId, pub T);
impl<T> Source<T> {
pub fn $0new(${1:_0}: HirFileId, ${2:_1}: T) -> Self {
Self(${1:_0}, ${2:_1})
}
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
Source(self.file_id, f(self.ast))
}
}
"#,
);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,8 @@
use syntax::{
AstNode,
ast::{self, make},
ted,
algo::find_node_at_range,
ast::{self, syntax_factory::SyntaxFactory},
syntax_editor::SyntaxEditor,
};
use crate::{
@ -66,33 +67,51 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) ->
return None;
}
}
let target = tgt.syntax().text_range();
let edit_tgt = tgt.syntax().clone_subtree();
let assignments: Vec<_> = collector
.assignments
.into_iter()
.filter_map(|(stmt, rhs)| {
Some((
find_node_at_range::<ast::BinExpr>(
&edit_tgt,
stmt.syntax().text_range() - target.start(),
)?,
find_node_at_range::<ast::Expr>(
&edit_tgt,
rhs.syntax().text_range() - target.start(),
)?,
))
})
.collect();
let mut editor = SyntaxEditor::new(edit_tgt);
for (stmt, rhs) in assignments {
let mut stmt = stmt.syntax().clone();
if let Some(parent) = stmt.parent() {
if ast::ExprStmt::cast(parent.clone()).is_some() {
stmt = parent.clone();
}
}
editor.replace(stmt, rhs.syntax());
}
let new_tgt_root = editor.finish().new_root().clone();
let new_tgt = ast::Expr::cast(new_tgt_root)?;
acc.add(
AssistId::refactor_extract("pull_assignment_up"),
"Pull assignment up",
tgt.syntax().text_range(),
target,
move |edit| {
let assignments: Vec<_> = collector
.assignments
.into_iter()
.map(|(stmt, rhs)| (edit.make_mut(stmt), rhs.clone_for_update()))
.collect();
let make = SyntaxFactory::with_mappings();
let mut editor = edit.make_editor(tgt.syntax());
let assign_expr = make.expr_assignment(collector.common_lhs, new_tgt.clone());
let assign_stmt = make.expr_stmt(assign_expr.into());
let tgt = edit.make_mut(tgt);
for (stmt, rhs) in assignments {
let mut stmt = stmt.syntax().clone();
if let Some(parent) = stmt.parent() {
if ast::ExprStmt::cast(parent.clone()).is_some() {
stmt = parent.clone();
}
}
ted::replace(stmt, rhs.syntax());
}
let assign_expr = make::expr_assignment(collector.common_lhs, tgt.clone());
let assign_stmt = make::expr_stmt(assign_expr);
ted::replace(tgt.syntax(), assign_stmt.syntax().clone_for_update());
editor.replace(tgt.syntax(), assign_stmt.syntax());
editor.add_mappings(make.finish_with_mappings());
edit.add_file_edits(ctx.vfs_file_id(), editor);
},
)
}

View File

@ -1,8 +1,9 @@
use itertools::Itertools;
use syntax::{
Edition, NodeOrToken, SyntaxElement, T, TextRange, TextSize,
ast::{self, AstNode, AstToken, make},
match_ast, ted,
Edition, NodeOrToken, SyntaxNode, SyntaxToken, T,
ast::{self, AstNode, make},
match_ast,
syntax_editor::{Position, SyntaxEditor},
};
use crate::{AssistContext, AssistId, Assists};
@ -40,21 +41,23 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
let replacements =
macro_calls.into_iter().filter_map(compute_dbg_replacement).collect::<Vec<_>>();
acc.add(
AssistId::quick_fix("remove_dbg"),
"Remove dbg!()",
replacements.iter().map(|&(range, _)| range).reduce(|acc, range| acc.cover(range))?,
|builder| {
for (range, expr) in replacements {
if let Some(expr) = expr {
builder.replace(range, expr.to_string());
} else {
builder.delete(range);
}
let target = replacements
.iter()
.flat_map(|(node_or_token, _)| node_or_token.iter())
.map(|t| t.text_range())
.reduce(|acc, range| acc.cover(range))?;
acc.add(AssistId::quick_fix("remove_dbg"), "Remove dbg!()", target, |builder| {
let mut editor = builder.make_editor(ctx.source_file().syntax());
for (range, expr) in replacements {
if let Some(expr) = expr {
editor.insert(Position::before(range[0].clone()), expr.syntax().clone_for_update());
}
},
)
for node_or_token in range {
editor.delete(node_or_token);
}
}
builder.add_file_edits(ctx.vfs_file_id(), editor);
})
}
/// Returns `None` when either
@ -63,7 +66,9 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
/// - (`macro_expr` has no parent - is that possible?)
///
/// Returns `Some(_, None)` when the macro call should just be removed.
fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Option<ast::Expr>)> {
fn compute_dbg_replacement(
macro_expr: ast::MacroExpr,
) -> Option<(Vec<NodeOrToken<SyntaxNode, SyntaxToken>>, Option<ast::Expr>)> {
let macro_call = macro_expr.macro_call()?;
let tt = macro_call.token_tree()?;
let r_delim = NodeOrToken::Token(tt.right_delimiter_token()?);
@ -88,22 +93,22 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt
match_ast! {
match parent {
ast::StmtList(_) => {
let range = macro_expr.syntax().text_range();
let range = match whitespace_start(macro_expr.syntax().prev_sibling_or_token()) {
Some(start) => range.cover_offset(start),
None => range,
};
(range, None)
let mut replace = vec![macro_expr.syntax().clone().into()];
if let Some(prev_sibling) = macro_expr.syntax().prev_sibling_or_token()
&& prev_sibling.kind() == syntax::SyntaxKind::WHITESPACE {
replace.push(prev_sibling);
}
(replace, None)
},
ast::ExprStmt(it) => {
let range = it.syntax().text_range();
let range = match whitespace_start(it.syntax().prev_sibling_or_token()) {
Some(start) => range.cover_offset(start),
None => range,
};
(range, None)
let mut replace = vec![it.syntax().clone().into()];
if let Some(prev_sibling) = it.syntax().prev_sibling_or_token()
&& prev_sibling.kind() == syntax::SyntaxKind::WHITESPACE {
replace.push(prev_sibling);
}
(replace, None)
},
_ => (macro_call.syntax().text_range(), Some(make::ext::expr_unit())),
_ => (vec![macro_call.syntax().clone().into()], Some(make::ext::expr_unit())),
}
}
}
@ -147,13 +152,13 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt
};
let expr = replace_nested_dbgs(expr.clone());
let expr = if wrap { make::expr_paren(expr).into() } else { expr.clone_subtree() };
(macro_call.syntax().text_range(), Some(expr))
(vec![macro_call.syntax().clone().into()], Some(expr))
}
// dbg!(expr0, expr1, ...)
exprs => {
let exprs = exprs.iter().cloned().map(replace_nested_dbgs);
let expr = make::expr_tuple(exprs);
(macro_call.syntax().text_range(), Some(expr.into()))
(vec![macro_call.syntax().clone().into()], Some(expr.into()))
}
})
}
@ -178,8 +183,8 @@ fn replace_nested_dbgs(expanded: ast::Expr) -> ast::Expr {
return replaced;
}
let expanded = expanded.clone_for_update();
let expanded = expanded.clone_subtree();
let mut editor = SyntaxEditor::new(expanded.syntax().clone());
// We need to collect to avoid mutation during traversal.
let macro_exprs: Vec<_> =
expanded.syntax().descendants().filter_map(ast::MacroExpr::cast).collect();
@ -191,17 +196,13 @@ fn replace_nested_dbgs(expanded: ast::Expr) -> ast::Expr {
};
if let Some(expr) = expr_opt {
ted::replace(mac.syntax(), expr.syntax().clone_for_update());
editor.replace(mac.syntax(), expr.syntax().clone_for_update());
} else {
ted::remove(mac.syntax());
editor.delete(mac.syntax());
}
}
expanded
}
fn whitespace_start(it: Option<SyntaxElement>) -> Option<TextSize> {
Some(it?.into_token().and_then(ast::Whitespace::cast)?.syntax().text_range().start())
let expanded_syntax = editor.finish().new_root().clone();
ast::Expr::cast(expanded_syntax).unwrap()
}
#[cfg(test)]

View File

@ -64,13 +64,12 @@ pub(crate) fn replace_is_method_with_if_let_method(
let pat = make.tuple_struct_pat(make.ident_path(text), [var_pat.into()]);
let let_expr = make.expr_let(pat.into(), receiver);
if let Some(cap) = ctx.config.snippet_cap {
if let Some(ast::Pat::TupleStructPat(pat)) = let_expr.pat() {
if let Some(first_var) = pat.fields().next() {
let placeholder = edit.make_placeholder_snippet(cap);
editor.add_annotation(first_var.syntax(), placeholder);
}
}
if let Some(cap) = ctx.config.snippet_cap
&& let Some(ast::Pat::TupleStructPat(pat)) = let_expr.pat()
&& let Some(first_var) = pat.fields().next()
{
let placeholder = edit.make_placeholder_snippet(cap);
editor.add_annotation(first_var.syntax(), placeholder);
}
editor.replace(call_expr.syntax(), let_expr.syntax());

View File

@ -172,6 +172,7 @@ mod handlers {
mod generate_is_empty_from_len;
mod generate_mut_trait_impl;
mod generate_new;
mod generate_single_field_struct_from;
mod generate_trait_from_impl;
mod inline_call;
mod inline_const_as_literal;
@ -305,6 +306,7 @@ mod handlers {
generate_mut_trait_impl::generate_mut_trait_impl,
generate_new::generate_new,
generate_trait_from_impl::generate_trait_from_impl,
generate_single_field_struct_from::generate_single_field_struct_from,
inline_call::inline_call,
inline_call::inline_into_callers,
inline_const_as_literal::inline_const_as_literal,

View File

@ -1933,7 +1933,7 @@ pub enum Axis { X = 0, Y = 1, Z = 2 }
$0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
&self[index as usize]
&mut self[index as usize]
}
}
@ -1994,6 +1994,34 @@ impl Person {
)
}
#[test]
fn doctest_generate_single_field_struct_from() {
check_doc_test(
"generate_single_field_struct_from",
r#####"
//- minicore: from, phantom_data
use core::marker::PhantomData;
struct $0Foo<T> {
id: i32,
_phantom_data: PhantomData<T>,
}
"#####,
r#####"
use core::marker::PhantomData;
struct Foo<T> {
id: i32,
_phantom_data: PhantomData<T>,
}
impl<T> From<i32> for Foo<T> {
fn from(id: i32) -> Self {
Self { id, _phantom_data: PhantomData }
}
}
"#####,
)
}
#[test]
fn doctest_generate_trait_from_impl() {
check_doc_test(

View File

@ -594,12 +594,10 @@ fn generate_impl_text_inner(
let generic_params = adt.generic_param_list().map(|generic_params| {
let lifetime_params =
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
let ty_or_const_params = generic_params.type_or_const_params().map(|param| {
match param {
let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| {
let param = match param {
ast::TypeOrConstParam::Type(param) => {
let param = param.clone_for_update();
// remove defaults since they can't be specified in impls
param.remove_default();
let mut bounds =
param.type_bound_list().map_or_else(Vec::new, |it| it.bounds().collect());
if let Some(trait_) = trait_text {
@ -610,17 +608,16 @@ fn generate_impl_text_inner(
}
};
// `{ty_param}: {bounds}`
let param =
make::type_param(param.name().unwrap(), make::type_bound_list(bounds));
let param = make::type_param(param.name()?, make::type_bound_list(bounds));
ast::GenericParam::TypeParam(param)
}
ast::TypeOrConstParam::Const(param) => {
let param = param.clone_for_update();
// remove defaults since they can't be specified in impls
param.remove_default();
let param = make::const_param(param.name()?, param.ty()?);
ast::GenericParam::ConstParam(param)
}
}
};
Some(param)
});
make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))
@ -695,12 +692,10 @@ fn generate_impl_inner(
let generic_params = adt.generic_param_list().map(|generic_params| {
let lifetime_params =
generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
let ty_or_const_params = generic_params.type_or_const_params().map(|param| {
match param {
let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| {
let param = match param {
ast::TypeOrConstParam::Type(param) => {
let param = param.clone_for_update();
// remove defaults since they can't be specified in impls
param.remove_default();
let mut bounds =
param.type_bound_list().map_or_else(Vec::new, |it| it.bounds().collect());
if let Some(trait_) = &trait_ {
@ -711,17 +706,16 @@ fn generate_impl_inner(
}
};
// `{ty_param}: {bounds}`
let param =
make::type_param(param.name().unwrap(), make::type_bound_list(bounds));
let param = make::type_param(param.name()?, make::type_bound_list(bounds));
ast::GenericParam::TypeParam(param)
}
ast::TypeOrConstParam::Const(param) => {
let param = param.clone_for_update();
// remove defaults since they can't be specified in impls
param.remove_default();
let param = make::const_param(param.name()?, param.ty()?);
ast::GenericParam::ConstParam(param)
}
}
};
Some(param)
});
make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))
@ -749,16 +743,23 @@ fn generate_impl_inner(
.clone_for_update();
// Copy any cfg attrs from the original adt
let cfg_attrs = adt
.attrs()
.filter(|attr| attr.as_simple_call().map(|(name, _arg)| name == "cfg").unwrap_or(false));
for attr in cfg_attrs {
impl_.add_attr(attr.clone_for_update());
}
add_cfg_attrs_to(adt, &impl_);
impl_
}
pub(crate) fn add_cfg_attrs_to<T, U>(from: &T, to: &U)
where
T: HasAttrs,
U: AttrsOwnerEdit,
{
let cfg_attrs =
from.attrs().filter(|attr| attr.as_simple_call().is_some_and(|(name, _arg)| name == "cfg"));
for attr in cfg_attrs {
to.add_attr(attr.clone_for_update());
}
}
pub(crate) fn add_method_to_adt(
builder: &mut SourceChangeBuilder,
adt: &ast::Adt,

View File

@ -37,6 +37,7 @@ use ide_db::{
SymbolKind, documentation::HasDocs, path_transform::PathTransform,
syntax_helpers::prettify_macro_expansion, traits::get_missing_assoc_items,
};
use syntax::ast::HasGenericParams;
use syntax::{
AstNode, SmolStr, SyntaxElement, SyntaxKind, T, TextRange, ToSmolStr,
ast::{self, HasGenericArgs, HasTypeBounds, edit_in_place::AttrsOwnerEdit, make},
@ -388,6 +389,12 @@ fn add_type_alias_impl(
{
end
} else if let Some(end) = transformed_ty.eq_token().map(|tok| tok.text_range().start())
{
end
} else if let Some(end) = transformed_ty
.where_clause()
.and_then(|wc| wc.where_token())
.map(|tok| tok.text_range().start())
{
end
} else if let Some(end) =
@ -400,17 +407,29 @@ fn add_type_alias_impl(
let len = end - start;
let mut decl = transformed_ty.syntax().text().slice(..len).to_string();
if !decl.ends_with(' ') {
decl.push(' ');
}
decl.push_str("= ");
decl.truncate(decl.trim_end().len());
decl.push_str(" = ");
let wc = transformed_ty
.where_clause()
.map(|wc| {
let ws = wc
.where_token()
.and_then(|it| it.prev_token())
.filter(|token| token.kind() == SyntaxKind::WHITESPACE)
.map(|token| token.to_string())
.unwrap_or_else(|| " ".into());
format!("{ws}{wc}")
})
.unwrap_or_default();
match ctx.config.snippet_cap {
Some(cap) => {
let snippet = format!("{decl}$0;");
let snippet = format!("{decl}$0{wc};");
item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet));
}
None => {
decl.push_str(&wc);
item.text_edit(TextEdit::replace(replacement_range, decl));
}
};
@ -1437,6 +1456,30 @@ trait Tr<'b> {
impl<'b> Tr<'b> for () {
type Ty<'a: 'b, T: Copy, const C: usize> = $0;
}
"#,
);
}
#[test]
fn includes_where_clause() {
check_edit(
"type Ty",
r#"
trait Tr {
type Ty where Self: Copy;
}
impl Tr for () {
$0
}
"#,
r#"
trait Tr {
type Ty where Self: Copy;
}
impl Tr for () {
type Ty = $0 where Self: Copy;
}
"#,
);
}

View File

@ -458,6 +458,33 @@ type O = $0;
r"
struct A;
trait B {
type O<'a>
where
Self: 'a;
}
impl B for A {
$0
}
",
r#"
struct A;
trait B {
type O<'a>
where
Self: 'a;
}
impl B for A {
type O<'a> = $0
where
Self: 'a;
}
"#,
);
check_edit(
"type O",
r"
struct A;
trait B {
type O: ?Sized = u32;
}
impl B for A {

View File

@ -10,6 +10,40 @@
},
},
[
FileSymbol {
name: "A",
def: Variant(
Variant {
id: EnumVariantId(
7800,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
),
),
ptr: SyntaxNodePtr {
kind: VARIANT,
range: 201..202,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
range: 201..202,
},
),
},
container_name: Some(
"Enum",
),
is_alias: false,
is_assoc: true,
is_import: false,
do_not_complete: Yes,
},
FileSymbol {
name: "Alias",
def: TypeAlias(
@ -42,6 +76,40 @@
is_import: false,
do_not_complete: Yes,
},
FileSymbol {
name: "B",
def: Variant(
Variant {
id: EnumVariantId(
7801,
),
},
),
loc: DeclarationLocation {
hir_file_id: FileId(
EditionedFileId(
Id(2000),
),
),
ptr: SyntaxNodePtr {
kind: VARIANT,
range: 204..205,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
range: 204..205,
},
),
},
container_name: Some(
"Enum",
),
is_alias: false,
is_assoc: true,
is_import: false,
do_not_complete: Yes,
},
FileSymbol {
name: "CONST",
def: Const(

View File

@ -183,4 +183,28 @@ fn main() {
"#,
);
}
#[test]
fn generic_assoc_type_infer_lifetime_in_expr_position() {
check_diagnostics(
r#"
//- minicore: sized
struct Player;
struct Foo<'c, C> {
_v: &'c C,
}
trait WithSignals: Sized {
type SignalCollection<'c, C>;
fn __signals_from_external(&self) -> Self::SignalCollection<'_, Self>;
}
impl WithSignals for Player {
type SignalCollection<'c, C> = Foo<'c, C>;
fn __signals_from_external(&self) -> Self::SignalCollection<'_, Self> {
Self::SignalCollection { _v: self }
}
}
"#,
);
}
}

View File

@ -983,4 +983,19 @@ fn test() {
"#,
);
}
#[test]
fn naked_asm_is_safe() {
check_diagnostics(
r#"
#[rustc_builtin_macro]
macro_rules! naked_asm { () => {} }
#[unsafe(naked)]
extern "C" fn naked() {
naked_asm!("");
}
"#,
);
}
}

View File

@ -505,7 +505,7 @@ fn map_links<'e>(
Event::End(Tag::Link(link_type, target, _)) => {
in_link = false;
Event::End(Tag::Link(
end_link_type.unwrap_or(link_type),
end_link_type.take().unwrap_or(link_type),
end_link_target.take().unwrap_or(target),
CowStr::Borrowed(""),
))
@ -514,7 +514,7 @@ fn map_links<'e>(
let (link_type, link_target_s, link_name) =
callback(&end_link_target.take().unwrap(), &s, range, end_link_type.unwrap());
end_link_target = Some(CowStr::Boxed(link_target_s.into()));
if !matches!(end_link_type, Some(LinkType::Autolink)) {
if !matches!(end_link_type, Some(LinkType::Autolink)) && link_type.is_some() {
end_link_type = link_type;
}
Event::Text(CowStr::Boxed(link_name.into()))
@ -523,7 +523,7 @@ fn map_links<'e>(
let (link_type, link_target_s, link_name) =
callback(&end_link_target.take().unwrap(), &s, range, end_link_type.unwrap());
end_link_target = Some(CowStr::Boxed(link_target_s.into()));
if !matches!(end_link_type, Some(LinkType::Autolink)) {
if !matches!(end_link_type, Some(LinkType::Autolink)) && link_type.is_some() {
end_link_type = link_type;
}
Event::Code(CowStr::Boxed(link_name.into()))

View File

@ -23,6 +23,7 @@ pub enum FoldKind {
WhereClause,
ReturnType,
MatchArm,
Function,
// region: item runs
Modules,
Consts,
@ -47,6 +48,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
let mut res = vec![];
let mut visited_comments = FxHashSet::default();
let mut visited_nodes = FxHashSet::default();
let mut merged_fn_bodies = FxHashSet::default();
// regions can be nested, here is a LIFO buffer
let mut region_starts: Vec<TextSize> = vec![];
@ -59,6 +61,32 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
NodeOrToken::Token(token) => token.text().contains('\n'),
};
if is_multiline {
// for the func with multiline param list
if matches!(element.kind(), FN) {
if let NodeOrToken::Node(node) = &element {
if let Some(fn_node) = ast::Fn::cast(node.clone()) {
if !fn_node
.param_list()
.map(|param_list| param_list.syntax().text().contains_char('\n'))
.unwrap_or(false)
{
continue;
}
if let Some(body) = fn_node.body() {
res.push(Fold {
range: TextRange::new(
node.text_range().start(),
node.text_range().end(),
),
kind: FoldKind::Function,
});
merged_fn_bodies.insert(body.syntax().text_range());
continue;
}
}
}
}
res.push(Fold { range: element.text_range(), kind });
continue;
}
@ -152,6 +180,7 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
ARG_LIST | PARAM_LIST | GENERIC_ARG_LIST | GENERIC_PARAM_LIST => Some(FoldKind::ArgList),
ARRAY_EXPR => Some(FoldKind::Array),
RET_TYPE => Some(FoldKind::ReturnType),
FN => Some(FoldKind::Function),
WHERE_CLAUSE => Some(FoldKind::WhereClause),
ASSOC_ITEM_LIST
| RECORD_FIELD_LIST
@ -291,6 +320,7 @@ mod tests {
use super::*;
#[track_caller]
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
let (ranges, text) = extract_tags(ra_fixture, "fold");
@ -322,6 +352,7 @@ mod tests {
FoldKind::WhereClause => "whereclause",
FoldKind::ReturnType => "returntype",
FoldKind::MatchArm => "matcharm",
FoldKind::Function => "function",
FoldKind::TraitAliases => "traitaliases",
FoldKind::ExternCrates => "externcrates",
};
@ -329,6 +360,23 @@ mod tests {
}
}
#[test]
fn test_fold_func_with_multiline_param_list() {
check(
r#"
<fold function>fn func<fold arglist>(
a: i32,
b: i32,
c: i32,
)</fold> <fold block>{
}</fold></fold>
"#,
);
}
#[test]
fn test_fold_comments() {
check(
@ -541,10 +589,10 @@ const _: S = S <fold block>{
fn fold_multiline_params() {
check(
r#"
fn foo<fold arglist>(
<fold function>fn foo<fold arglist>(
x: i32,
y: String,
)</fold> {}
)</fold> {}</fold>
"#,
)
}

View File

@ -10958,3 +10958,68 @@ fn bar$0() -> Foo {
"#]],
);
}
#[test]
fn regression_20190() {
check(
r#"
struct Foo;
/// [`foo` bar](Foo).
fn has_docs$0() {}
"#,
expect![[r#"
*has_docs*
```rust
ra_test_fixture
```
```rust
fn has_docs()
```
---
[`foo` bar](https://docs.rs/ra_test_fixture/*/ra_test_fixture/struct.Foo.html).
"#]],
);
}
#[test]
fn regression_20225() {
check(
r#"
//- minicore: coerce_unsized
trait Trait {
type Type<'a, T: ?Sized + 'a>;
}
enum Borrowed {}
impl Trait for Borrowed {
type Type<'a, T: ?Sized + 'a> = &'a T;
}
enum Enum<'a, T: Trait + 'a> {
Variant1(T::Type<'a, [Enum<'a, T>]>),
Variant2,
}
impl Enum<'_, Borrowed> {
const CONSTANT$0: Self = Self::Variant1(&[Self::Variant2]);
}
"#,
expect![[r#"
*CONSTANT*
```rust
ra_test_fixture::Enum
```
```rust
const CONSTANT: Self = Variant1(&[Variant2])
```
"#]],
);
}

View File

@ -92,7 +92,7 @@ pub(super) fn hints(
},
MirSpan::Unknown => continue,
};
let binding = &hir.bindings[binding_idx];
let binding = &hir[binding_idx];
let name = binding.name.display_no_db(display_target.edition).to_smolstr();
if name.starts_with("<ra@") {
continue; // Ignore desugared variables

View File

@ -17,8 +17,12 @@ pub(super) fn hints(
let parent = path.syntax().parent()?;
let range = match path {
Either::Left(path) => {
let paren =
parent.ancestors().take_while(|it| ast::ParenType::can_cast(it.kind())).last();
let paren = parent
.ancestors()
.take_while(|it| {
ast::ParenType::can_cast(it.kind()) || ast::ForType::can_cast(it.kind())
})
.last();
let parent = paren.as_ref().and_then(|it| it.parent()).unwrap_or(parent);
if ast::TypeBound::can_cast(parent.kind())
|| ast::TypeAnchor::can_cast(parent.kind())
@ -34,7 +38,7 @@ pub(super) fn hints(
return None;
}
sema.resolve_trait(&path.path()?)?;
paren.map_or_else(|| path.syntax().text_range(), |it| it.text_range())
path.syntax().text_range()
}
Either::Right(dyn_) => {
if dyn_.dyn_token().is_some() {
@ -89,7 +93,7 @@ fn foo(_: &T, _: for<'a> T) {}
impl T {}
// ^ dyn
impl T for (T) {}
// ^^^ dyn
// ^ dyn
impl T
"#,
);
@ -112,7 +116,7 @@ fn foo(
_: &mut (T + T)
// ^^^^^ dyn
_: *mut (T),
// ^^^ dyn
// ^ dyn
) {}
"#,
);
@ -136,4 +140,26 @@ fn foo(
"#]],
);
}
#[test]
fn hrtb_bound_does_not_add_dyn() {
check(
r#"
//- minicore: fn
fn test<F>(f: F) where F: for<'a> FnOnce(&'a i32) {}
// ^: Sized
"#,
);
}
#[test]
fn with_parentheses() {
check(
r#"
trait T {}
fn foo(v: &(T)) {}
// ^ dyn
"#,
);
}
}

View File

@ -4,7 +4,7 @@ use crate::grammar::attributes::ATTRIBUTE_FIRST;
use super::*;
pub(super) use atom::{EXPR_RECOVERY_SET, LITERAL_FIRST, literal};
pub(super) use atom::{EXPR_RECOVERY_SET, LITERAL_FIRST, literal, parse_asm_expr};
pub(crate) use atom::{block_expr, match_arm_list};
#[derive(PartialEq, Eq)]

View File

@ -253,8 +253,7 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> {
let m = p.start();
p.bump_remap(T![builtin]);
p.bump(T![#]);
if p.at_contextual_kw(T![offset_of]) {
p.bump_remap(T![offset_of]);
if p.eat_contextual_kw(T![offset_of]) {
p.expect(T!['(']);
type_(p);
p.expect(T![,]);
@ -278,8 +277,7 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> {
p.expect(T![')']);
}
Some(m.complete(p, OFFSET_OF_EXPR))
} else if p.at_contextual_kw(T![format_args]) {
p.bump_remap(T![format_args]);
} else if p.eat_contextual_kw(T![format_args]) {
p.expect(T!['(']);
expr(p);
if p.eat(T![,]) {
@ -302,7 +300,16 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> {
}
p.expect(T![')']);
Some(m.complete(p, FORMAT_ARGS_EXPR))
} else if p.at_contextual_kw(T![asm]) {
} else if p.eat_contextual_kw(T![asm])
|| p.eat_contextual_kw(T![global_asm])
|| p.eat_contextual_kw(T![naked_asm])
{
// test asm_kinds
// fn foo() {
// builtin#asm("");
// builtin#global_asm("");
// builtin#naked_asm("");
// }
parse_asm_expr(p, m)
} else {
m.abandon(p);
@ -321,8 +328,7 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> {
// tmp = out(reg) _,
// );
// }
fn parse_asm_expr(p: &mut Parser<'_>, m: Marker) -> Option<CompletedMarker> {
p.bump_remap(T![asm]);
pub(crate) fn parse_asm_expr(p: &mut Parser<'_>, m: Marker) -> Option<CompletedMarker> {
p.expect(T!['(']);
if expr(p).is_none() {
p.err_and_bump("expected asm template");
@ -411,11 +417,10 @@ fn parse_asm_expr(p: &mut Parser<'_>, m: Marker) -> Option<CompletedMarker> {
dir_spec.abandon(p);
op.abandon(p);
op_n.abandon(p);
p.err_and_bump("expected asm operand");
// improves error recovery and handles err_and_bump recovering from `{` which gets
// the parser stuck here
// improves error recovery
if p.at(T!['{']) {
p.error("expected asm operand");
// test_err bad_asm_expr
// fn foo() {
// builtin#asm(
@ -423,6 +428,8 @@ fn parse_asm_expr(p: &mut Parser<'_>, m: Marker) -> Option<CompletedMarker> {
// );
// }
expr(p);
} else {
p.err_and_bump("expected asm operand");
}
if p.at(T!['}']) {

View File

@ -261,6 +261,19 @@ fn opt_item_without_modifiers(p: &mut Parser<'_>, m: Marker) -> Result<(), Marke
T![const] if (la == IDENT || la == T![_] || la == T![mut]) => consts::konst(p, m),
T![static] if (la == IDENT || la == T![_] || la == T![mut]) => consts::static_(p, m),
IDENT
if p.at_contextual_kw(T![builtin])
&& p.nth_at(1, T![#])
&& p.nth_at_contextual_kw(2, T![global_asm]) =>
{
p.bump_remap(T![builtin]);
p.bump(T![#]);
p.bump_remap(T![global_asm]);
// test global_asm
// builtin#global_asm("")
expressions::parse_asm_expr(p, m);
}
_ => return Err(m),
};
Ok(())

View File

@ -11,8 +11,8 @@
use std::ops;
use rustc_literal_escaper::{
unescape_byte, unescape_byte_str, unescape_c_str, unescape_char, unescape_str, EscapeError,
Mode,
EscapeError, Mode, unescape_byte, unescape_byte_str, unescape_c_str, unescape_char,
unescape_str,
};
use crate::{

View File

@ -29,7 +29,7 @@ pub(crate) struct Parser<'t> {
edition: Edition,
}
const PARSER_STEP_LIMIT: usize = 15_000_000;
const PARSER_STEP_LIMIT: usize = if cfg!(debug_assertions) { 150_000 } else { 15_000_000 };
impl<'t> Parser<'t> {
pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> {
@ -254,7 +254,10 @@ impl<'t> Parser<'t> {
/// Create an error node and consume the next token.
pub(crate) fn err_and_bump(&mut self, message: &str) {
self.err_recover(message, TokenSet::EMPTY);
let m = self.start();
self.error(message);
self.bump_any();
m.complete(self, ERROR);
}
/// Create an error node and consume the next token unless it is in the recovery set.

File diff suppressed because one or more lines are too long

View File

@ -21,6 +21,8 @@ mod ok {
#[test]
fn asm_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/asm_expr.rs"); }
#[test]
fn asm_kinds() { run_and_expect_no_errors("test_data/parser/inline/ok/asm_kinds.rs"); }
#[test]
fn asm_label() { run_and_expect_no_errors("test_data/parser/inline/ok/asm_label.rs"); }
#[test]
fn assoc_const_eq() {
@ -298,6 +300,8 @@ mod ok {
run_and_expect_no_errors("test_data/parser/inline/ok/generic_param_list.rs");
}
#[test]
fn global_asm() { run_and_expect_no_errors("test_data/parser/inline/ok/global_asm.rs"); }
#[test]
fn half_open_range_pat() {
run_and_expect_no_errors("test_data/parser/inline/ok/half_open_range_pat.rs");
}

View File

@ -0,0 +1,48 @@
SOURCE_FILE
FN
FN_KW "fn"
WHITESPACE " "
NAME
IDENT "foo"
PARAM_LIST
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
WHITESPACE "\n "
EXPR_STMT
ASM_EXPR
BUILTIN_KW "builtin"
POUND "#"
ASM_KW "asm"
L_PAREN "("
LITERAL
STRING "\"\""
R_PAREN ")"
SEMICOLON ";"
WHITESPACE "\n "
ASM_EXPR
BUILTIN_KW "builtin"
POUND "#"
GLOBAL_ASM_KW "global_asm"
L_PAREN "("
LITERAL
STRING "\"\""
R_PAREN ")"
SEMICOLON ";"
WHITESPACE "\n "
EXPR_STMT
ASM_EXPR
BUILTIN_KW "builtin"
POUND "#"
NAKED_ASM_KW "naked_asm"
L_PAREN "("
LITERAL
STRING "\"\""
R_PAREN ")"
SEMICOLON ";"
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n"

View File

@ -0,0 +1,5 @@
fn foo() {
builtin#asm("");
builtin#global_asm("");
builtin#naked_asm("");
}

View File

@ -0,0 +1,10 @@
SOURCE_FILE
ASM_EXPR
BUILTIN_KW "builtin"
POUND "#"
GLOBAL_ASM_KW "global_asm"
L_PAREN "("
LITERAL
STRING "\"\""
R_PAREN ")"
WHITESPACE "\n"

View File

@ -0,0 +1 @@
builtin#global_asm("")

View File

@ -0,0 +1,34 @@
//! Read `.cargo/config.toml` as a JSON object
use rustc_hash::FxHashMap;
use toolchain::Tool;
use crate::{ManifestPath, Sysroot, utf8_stdout};
pub(crate) type CargoConfigFile = serde_json::Map<String, serde_json::Value>;
pub(crate) fn read(
manifest: &ManifestPath,
extra_env: &FxHashMap<String, Option<String>>,
sysroot: &Sysroot,
) -> Option<CargoConfigFile> {
let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
cargo_config
.args(["-Z", "unstable-options", "config", "get", "--format", "json"])
.env("RUSTC_BOOTSTRAP", "1");
if manifest.is_rust_manifest() {
cargo_config.arg("-Zscript");
}
tracing::debug!("Discovering cargo config by {:?}", cargo_config);
let json: serde_json::Map<String, serde_json::Value> = utf8_stdout(&mut cargo_config)
.inspect(|json| {
tracing::debug!("Discovered cargo config: {:?}", json);
})
.inspect_err(|err| {
tracing::debug!("Failed to discover cargo config: {:?}", err);
})
.ok()
.and_then(|stdout| serde_json::from_str(&stdout).ok())?;
Some(json)
}

View File

@ -300,8 +300,6 @@ pub struct CargoMetadataConfig {
pub extra_args: Vec<String>,
/// Extra env vars to set when invoking the cargo command
pub extra_env: FxHashMap<String, Option<String>>,
/// The target dir for this workspace load.
pub target_dir: Utf8PathBuf,
/// What kind of metadata are we fetching: workspace, rustc, or sysroot.
pub kind: &'static str,
/// The toolchain version, if known.
@ -317,188 +315,6 @@ struct PackageMetadata {
}
impl CargoWorkspace {
/// Fetches the metadata for the given `cargo_toml` manifest.
/// A successful result may contain another metadata error if the initial fetching failed but
/// the `--no-deps` retry succeeded.
///
/// The sysroot is used to set the `RUSTUP_TOOLCHAIN` env var when invoking cargo
/// to ensure that the rustup proxy uses the correct toolchain.
pub fn fetch_metadata(
cargo_toml: &ManifestPath,
current_dir: &AbsPath,
config: &CargoMetadataConfig,
sysroot: &Sysroot,
no_deps: bool,
locked: bool,
progress: &dyn Fn(String),
) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
let res = Self::fetch_metadata_(
cargo_toml,
current_dir,
config,
sysroot,
no_deps,
locked,
progress,
);
if let Ok((_, Some(ref e))) = res {
tracing::warn!(
%cargo_toml,
?e,
"`cargo metadata` failed, but retry with `--no-deps` succeeded"
);
}
res
}
fn fetch_metadata_(
cargo_toml: &ManifestPath,
current_dir: &AbsPath,
config: &CargoMetadataConfig,
sysroot: &Sysroot,
no_deps: bool,
locked: bool,
progress: &dyn Fn(String),
) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
let cargo = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env);
let mut meta = MetadataCommand::new();
meta.cargo_path(cargo.get_program());
cargo.get_envs().for_each(|(var, val)| _ = meta.env(var, val.unwrap_or_default()));
meta.manifest_path(cargo_toml.to_path_buf());
match &config.features {
CargoFeatures::All => {
meta.features(CargoOpt::AllFeatures);
}
CargoFeatures::Selected { features, no_default_features } => {
if *no_default_features {
meta.features(CargoOpt::NoDefaultFeatures);
}
if !features.is_empty() {
meta.features(CargoOpt::SomeFeatures(features.clone()));
}
}
}
meta.current_dir(current_dir);
let mut other_options = vec![];
// cargo metadata only supports a subset of flags of what cargo usually accepts, and usually
// the only relevant flags for metadata here are unstable ones, so we pass those along
// but nothing else
let mut extra_args = config.extra_args.iter();
while let Some(arg) = extra_args.next() {
if arg == "-Z" {
if let Some(arg) = extra_args.next() {
other_options.push("-Z".to_owned());
other_options.push(arg.to_owned());
}
}
}
if !config.targets.is_empty() {
other_options.extend(
config.targets.iter().flat_map(|it| ["--filter-platform".to_owned(), it.clone()]),
);
}
if no_deps {
other_options.push("--no-deps".to_owned());
}
let mut using_lockfile_copy = false;
// The manifest is a rust file, so this means its a script manifest
if cargo_toml.is_rust_manifest() {
other_options.push("-Zscript".to_owned());
} else if config
.toolchain_version
.as_ref()
.is_some_and(|v| *v >= MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH)
{
let lockfile = <_ as AsRef<Utf8Path>>::as_ref(cargo_toml).with_extension("lock");
let target_lockfile = config
.target_dir
.join("rust-analyzer")
.join("metadata")
.join(config.kind)
.join("Cargo.lock");
match std::fs::copy(&lockfile, &target_lockfile) {
Ok(_) => {
using_lockfile_copy = true;
other_options.push("--lockfile-path".to_owned());
other_options.push(target_lockfile.to_string());
}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
// There exists no lockfile yet
using_lockfile_copy = true;
other_options.push("--lockfile-path".to_owned());
other_options.push(target_lockfile.to_string());
}
Err(e) => {
tracing::warn!(
"Failed to copy lock file from `{lockfile}` to `{target_lockfile}`: {e}",
);
}
}
}
if using_lockfile_copy {
other_options.push("-Zunstable-options".to_owned());
meta.env("RUSTC_BOOTSTRAP", "1");
}
// No need to lock it if we copied the lockfile, we won't modify the original after all/
// This way cargo cannot error out on us if the lockfile requires updating.
if !using_lockfile_copy && locked {
other_options.push("--locked".to_owned());
}
meta.other_options(other_options);
// FIXME: Fetching metadata is a slow process, as it might require
// calling crates.io. We should be reporting progress here, but it's
// unclear whether cargo itself supports it.
progress("cargo metadata: started".to_owned());
let res = (|| -> anyhow::Result<(_, _)> {
let mut errored = false;
let output =
spawn_with_streaming_output(meta.cargo_command(), &mut |_| (), &mut |line| {
errored = errored || line.starts_with("error") || line.starts_with("warning");
if errored {
progress("cargo metadata: ?".to_owned());
return;
}
progress(format!("cargo metadata: {line}"));
})?;
if !output.status.success() {
progress(format!("cargo metadata: failed {}", output.status));
let error = cargo_metadata::Error::CargoMetadata {
stderr: String::from_utf8(output.stderr)?,
}
.into();
if !no_deps {
// If we failed to fetch metadata with deps, try again without them.
// This makes r-a still work partially when offline.
if let Ok((metadata, _)) = Self::fetch_metadata_(
cargo_toml,
current_dir,
config,
sysroot,
true,
locked,
progress,
) {
return Ok((metadata, Some(error)));
}
}
return Err(error);
}
let stdout = from_utf8(&output.stdout)?
.lines()
.find(|line| line.starts_with('{'))
.ok_or(cargo_metadata::Error::NoJson)?;
Ok((cargo_metadata::MetadataCommand::parse(stdout)?, None))
})()
.with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()));
progress("cargo metadata: finished".to_owned());
res
}
pub fn new(
mut meta: cargo_metadata::Metadata,
ws_manifest_path: ManifestPath,
@ -733,3 +549,214 @@ impl CargoWorkspace {
self.requires_rustc_private
}
}
pub(crate) struct FetchMetadata {
command: cargo_metadata::MetadataCommand,
lockfile_path: Option<Utf8PathBuf>,
kind: &'static str,
no_deps: bool,
no_deps_result: anyhow::Result<cargo_metadata::Metadata>,
other_options: Vec<String>,
}
impl FetchMetadata {
/// Builds a command to fetch metadata for the given `cargo_toml` manifest.
///
/// Performs a lightweight pre-fetch using the `--no-deps` option,
/// available via [`FetchMetadata::no_deps_metadata`], to gather basic
/// information such as the `target-dir`.
///
/// The provided sysroot is used to set the `RUSTUP_TOOLCHAIN`
/// environment variable when invoking Cargo, ensuring that the
/// rustup proxy selects the correct toolchain.
pub(crate) fn new(
cargo_toml: &ManifestPath,
current_dir: &AbsPath,
config: &CargoMetadataConfig,
sysroot: &Sysroot,
no_deps: bool,
) -> Self {
let cargo = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env);
let mut command = MetadataCommand::new();
command.cargo_path(cargo.get_program());
cargo.get_envs().for_each(|(var, val)| _ = command.env(var, val.unwrap_or_default()));
command.manifest_path(cargo_toml.to_path_buf());
match &config.features {
CargoFeatures::All => {
command.features(CargoOpt::AllFeatures);
}
CargoFeatures::Selected { features, no_default_features } => {
if *no_default_features {
command.features(CargoOpt::NoDefaultFeatures);
}
if !features.is_empty() {
command.features(CargoOpt::SomeFeatures(features.clone()));
}
}
}
command.current_dir(current_dir);
let mut needs_nightly = false;
let mut other_options = vec![];
// cargo metadata only supports a subset of flags of what cargo usually accepts, and usually
// the only relevant flags for metadata here are unstable ones, so we pass those along
// but nothing else
let mut extra_args = config.extra_args.iter();
while let Some(arg) = extra_args.next() {
if arg == "-Z" {
if let Some(arg) = extra_args.next() {
needs_nightly = true;
other_options.push("-Z".to_owned());
other_options.push(arg.to_owned());
}
}
}
let mut lockfile_path = None;
if cargo_toml.is_rust_manifest() {
needs_nightly = true;
other_options.push("-Zscript".to_owned());
} else if config
.toolchain_version
.as_ref()
.is_some_and(|v| *v >= MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH)
{
lockfile_path = Some(<_ as AsRef<Utf8Path>>::as_ref(cargo_toml).with_extension("lock"));
}
if !config.targets.is_empty() {
other_options.extend(
config.targets.iter().flat_map(|it| ["--filter-platform".to_owned(), it.clone()]),
);
}
command.other_options(other_options.clone());
if needs_nightly {
command.env("RUSTC_BOOTSTRAP", "1");
}
// Pre-fetch basic metadata using `--no-deps`, which:
// - avoids fetching registries like crates.io,
// - skips dependency resolution and does not modify lockfiles,
// - and thus doesn't require progress reporting or copying lockfiles.
//
// Useful as a fast fallback to extract info like `target-dir`.
let cargo_command;
let no_deps_result = if no_deps {
command.no_deps();
cargo_command = command.cargo_command();
command.exec()
} else {
let mut no_deps_command = command.clone();
no_deps_command.no_deps();
cargo_command = no_deps_command.cargo_command();
no_deps_command.exec()
}
.with_context(|| format!("Failed to run `{cargo_command:?}`"));
Self { command, lockfile_path, kind: config.kind, no_deps, no_deps_result, other_options }
}
pub(crate) fn no_deps_metadata(&self) -> Option<&cargo_metadata::Metadata> {
self.no_deps_result.as_ref().ok()
}
/// Executes the metadata-fetching command.
///
/// A successful result may still contain a metadata error if the full fetch failed,
/// but the fallback `--no-deps` pre-fetch succeeded during command construction.
pub(crate) fn exec(
self,
target_dir: &Utf8Path,
locked: bool,
progress: &dyn Fn(String),
) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
let Self { mut command, lockfile_path, kind, no_deps, no_deps_result, mut other_options } =
self;
if no_deps {
return no_deps_result.map(|m| (m, None));
}
let mut using_lockfile_copy = false;
// The manifest is a rust file, so this means its a script manifest
if let Some(lockfile) = lockfile_path {
let target_lockfile =
target_dir.join("rust-analyzer").join("metadata").join(kind).join("Cargo.lock");
match std::fs::copy(&lockfile, &target_lockfile) {
Ok(_) => {
using_lockfile_copy = true;
other_options.push("--lockfile-path".to_owned());
other_options.push(target_lockfile.to_string());
}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
// There exists no lockfile yet
using_lockfile_copy = true;
other_options.push("--lockfile-path".to_owned());
other_options.push(target_lockfile.to_string());
}
Err(e) => {
tracing::warn!(
"Failed to copy lock file from `{lockfile}` to `{target_lockfile}`: {e}",
);
}
}
}
if using_lockfile_copy {
other_options.push("-Zunstable-options".to_owned());
command.env("RUSTC_BOOTSTRAP", "1");
}
// No need to lock it if we copied the lockfile, we won't modify the original after all/
// This way cargo cannot error out on us if the lockfile requires updating.
if !using_lockfile_copy && locked {
other_options.push("--locked".to_owned());
}
command.other_options(other_options);
// FIXME: Fetching metadata is a slow process, as it might require
// calling crates.io. We should be reporting progress here, but it's
// unclear whether cargo itself supports it.
progress("cargo metadata: started".to_owned());
let res = (|| -> anyhow::Result<(_, _)> {
let mut errored = false;
let output =
spawn_with_streaming_output(command.cargo_command(), &mut |_| (), &mut |line| {
errored = errored || line.starts_with("error") || line.starts_with("warning");
if errored {
progress("cargo metadata: ?".to_owned());
return;
}
progress(format!("cargo metadata: {line}"));
})?;
if !output.status.success() {
progress(format!("cargo metadata: failed {}", output.status));
let error = cargo_metadata::Error::CargoMetadata {
stderr: String::from_utf8(output.stderr)?,
}
.into();
if !no_deps {
// If we failed to fetch metadata with deps, return pre-fetched result without them.
// This makes r-a still work partially when offline.
if let Ok(metadata) = no_deps_result {
tracing::warn!(
?error,
"`cargo metadata` failed and returning succeeded result with `--no-deps`"
);
return Ok((metadata, Some(error)));
}
}
return Err(error);
}
let stdout = from_utf8(&output.stdout)?
.lines()
.find(|line| line.starts_with('{'))
.ok_or(cargo_metadata::Error::NoJson)?;
Ok((cargo_metadata::MetadataCommand::parse(stdout)?, None))
})()
.with_context(|| format!("Failed to run `{:?}`", command.cargo_command()));
progress("cargo metadata: finished".to_owned());
res
}
}

View File

@ -1,10 +1,9 @@
//! Cargo-like environment variables injection.
use base_db::Env;
use paths::{Utf8Path, Utf8PathBuf};
use rustc_hash::FxHashMap;
use paths::Utf8Path;
use toolchain::Tool;
use crate::{ManifestPath, PackageData, Sysroot, TargetKind, utf8_stdout};
use crate::{ManifestPath, PackageData, TargetKind, cargo_config_file::CargoConfigFile};
/// Recreates the compile-time environment variables that Cargo sets.
///
@ -61,104 +60,68 @@ pub(crate) fn inject_rustc_tool_env(env: &mut Env, cargo_name: &str, kind: Targe
env.set("CARGO_CRATE_NAME", cargo_name.replace('-', "_"));
}
pub(crate) fn cargo_config_env(
manifest: &ManifestPath,
extra_env: &FxHashMap<String, Option<String>>,
sysroot: &Sysroot,
) -> Env {
let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
cargo_config
.args(["-Z", "unstable-options", "config", "get", "env"])
.env("RUSTC_BOOTSTRAP", "1");
if manifest.is_rust_manifest() {
cargo_config.arg("-Zscript");
}
// if successful we receive `env.key.value = "value" per entry
tracing::debug!("Discovering cargo config env by {:?}", cargo_config);
utf8_stdout(&mut cargo_config)
.map(|stdout| parse_output_cargo_config_env(manifest, &stdout))
.inspect(|env| {
tracing::debug!("Discovered cargo config env: {:?}", env);
})
.inspect_err(|err| {
tracing::debug!("Failed to discover cargo config env: {:?}", err);
})
.unwrap_or_default()
}
fn parse_output_cargo_config_env(manifest: &ManifestPath, stdout: &str) -> Env {
pub(crate) fn cargo_config_env(manifest: &ManifestPath, config: &Option<CargoConfigFile>) -> Env {
let mut env = Env::default();
let mut relatives = vec![];
for (key, val) in
stdout.lines().filter_map(|l| l.strip_prefix("env.")).filter_map(|l| l.split_once(" = "))
{
let val = val.trim_matches('"').to_owned();
if let Some((key, modifier)) = key.split_once('.') {
match modifier {
"relative" => relatives.push((key, val)),
"value" => _ = env.insert(key, val),
_ => {
tracing::warn!(
"Unknown modifier in cargo config env: {}, expected `relative` or `value`",
modifier
);
continue;
}
}
} else {
env.insert(key, val);
}
}
let Some(serde_json::Value::Object(env_json)) = config.as_ref().and_then(|c| c.get("env"))
else {
return env;
};
// FIXME: The base here should be the parent of the `.cargo/config` file, not the manifest.
// But cargo does not provide this information.
let base = <_ as AsRef<Utf8Path>>::as_ref(manifest.parent());
for (key, relative) in relatives {
if relative != "true" {
continue;
}
if let Some(suffix) = env.get(key) {
env.insert(key, base.join(suffix).to_string());
}
}
env
}
pub(crate) fn cargo_config_build_target_dir(
manifest: &ManifestPath,
extra_env: &FxHashMap<String, Option<String>>,
sysroot: &Sysroot,
) -> Option<Utf8PathBuf> {
let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
cargo_config
.args(["-Z", "unstable-options", "config", "get", "build.target-dir"])
.env("RUSTC_BOOTSTRAP", "1");
if manifest.is_rust_manifest() {
cargo_config.arg("-Zscript");
for (key, entry) in env_json {
let serde_json::Value::Object(entry) = entry else {
continue;
};
let Some(value) = entry.get("value").and_then(|v| v.as_str()) else {
continue;
};
let value = if entry
.get("relative")
.and_then(|v| v.as_bool())
.is_some_and(std::convert::identity)
{
base.join(value).to_string()
} else {
value.to_owned()
};
env.insert(key, value);
}
utf8_stdout(&mut cargo_config)
.map(|stdout| {
Utf8Path::new(stdout.trim_start_matches("build.target-dir = ").trim_matches('"'))
.to_owned()
})
.ok()
env
}
#[test]
fn parse_output_cargo_config_env_works() {
let stdout = r#"
env.CARGO_WORKSPACE_DIR.relative = true
env.CARGO_WORKSPACE_DIR.value = ""
env.RELATIVE.relative = true
env.RELATIVE.value = "../relative"
env.INVALID.relative = invalidbool
env.INVALID.value = "../relative"
env.TEST.value = "test"
"#
.trim();
let raw = r#"
{
"env": {
"CARGO_WORKSPACE_DIR": {
"relative": true,
"value": ""
},
"INVALID": {
"relative": "invalidbool",
"value": "../relative"
},
"RELATIVE": {
"relative": true,
"value": "../relative"
},
"TEST": {
"value": "test"
}
}
}
"#;
let config: CargoConfigFile = serde_json::from_str(raw).unwrap();
let cwd = paths::Utf8PathBuf::try_from(std::env::current_dir().unwrap()).unwrap();
let manifest = paths::AbsPathBuf::assert(cwd.join("Cargo.toml"));
let manifest = ManifestPath::try_from(manifest).unwrap();
let env = parse_output_cargo_config_env(&manifest, stdout);
let env = cargo_config_env(&manifest, &Some(config));
assert_eq!(env.get("CARGO_WORKSPACE_DIR").as_deref(), Some(cwd.join("").as_str()));
assert_eq!(env.get("RELATIVE").as_deref(), Some(cwd.join("../relative").as_str()));
assert_eq!(env.get("INVALID").as_deref(), Some("../relative"));

View File

@ -24,7 +24,7 @@ pub mod toolchain_info {
use std::path::Path;
use crate::{ManifestPath, Sysroot};
use crate::{ManifestPath, Sysroot, cargo_config_file::CargoConfigFile};
#[derive(Copy, Clone)]
pub enum QueryConfig<'a> {
@ -32,11 +32,12 @@ pub mod toolchain_info {
Rustc(&'a Sysroot, &'a Path),
/// Attempt to use cargo to query the desired information, honoring cargo configurations.
/// If this fails, falls back to invoking `rustc` directly.
Cargo(&'a Sysroot, &'a ManifestPath),
Cargo(&'a Sysroot, &'a ManifestPath, &'a Option<CargoConfigFile>),
}
}
mod build_dependencies;
mod cargo_config_file;
mod cargo_workspace;
mod env;
mod manifest_path;

View File

@ -9,14 +9,15 @@ use std::{env, fs, ops::Not, path::Path, process::Command};
use anyhow::{Result, format_err};
use itertools::Itertools;
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf};
use rustc_hash::FxHashMap;
use stdx::format_to;
use toolchain::{Tool, probe_for_binary};
use crate::{
CargoWorkspace, ManifestPath, ProjectJson, RustSourceWorkspaceConfig,
cargo_workspace::CargoMetadataConfig, utf8_stdout,
cargo_workspace::{CargoMetadataConfig, FetchMetadata},
utf8_stdout,
};
#[derive(Debug, Clone, PartialEq, Eq)]
@ -211,6 +212,7 @@ impl Sysroot {
sysroot_source_config: &RustSourceWorkspaceConfig,
no_deps: bool,
current_dir: &AbsPath,
target_dir: &Utf8Path,
progress: &dyn Fn(String),
) -> Option<RustLibSrcWorkspace> {
assert!(matches!(self.workspace, RustLibSrcWorkspace::Empty), "workspace already loaded");
@ -224,6 +226,7 @@ impl Sysroot {
match self.load_library_via_cargo(
&library_manifest,
current_dir,
target_dir,
cargo_config,
no_deps,
progress,
@ -319,6 +322,7 @@ impl Sysroot {
&self,
library_manifest: &ManifestPath,
current_dir: &AbsPath,
target_dir: &Utf8Path,
cargo_config: &CargoMetadataConfig,
no_deps: bool,
progress: &dyn Fn(String),
@ -331,16 +335,11 @@ impl Sysroot {
Some("nightly".to_owned()),
);
let (mut res, _) = CargoWorkspace::fetch_metadata(
library_manifest,
current_dir,
&cargo_config,
self,
no_deps,
// Make sure we never attempt to write to the sysroot
true,
progress,
)?;
// Make sure we never attempt to write to the sysroot
let locked = true;
let (mut res, _) =
FetchMetadata::new(library_manifest, current_dir, &cargo_config, self, no_deps)
.exec(target_dir, locked, progress)?;
// Patch out `rustc-std-workspace-*` crates to point to the real crates.
// This is done prior to `CrateGraph` construction to prevent de-duplication logic from failing.

View File

@ -239,8 +239,13 @@ fn smoke_test_real_sysroot_cargo() {
);
let cwd = AbsPathBuf::assert_utf8(temp_dir().join("smoke_test_real_sysroot_cargo"));
std::fs::create_dir_all(&cwd).unwrap();
let loaded_sysroot =
sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo(), false, &cwd, &|_| ());
let loaded_sysroot = sysroot.load_workspace(
&RustSourceWorkspaceConfig::default_cargo(),
false,
&cwd,
&Utf8PathBuf::default(),
&|_| (),
);
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}

View File

@ -63,7 +63,7 @@ fn rustc_print_cfg(
) -> anyhow::Result<String> {
const RUSTC_ARGS: [&str; 2] = ["--print", "cfg"];
let (sysroot, current_dir) = match config {
QueryConfig::Cargo(sysroot, cargo_toml) => {
QueryConfig::Cargo(sysroot, cargo_toml, _) => {
let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env);
cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS);
if let Some(target) = target {
@ -109,7 +109,7 @@ mod tests {
let sysroot = Sysroot::empty();
let manifest_path =
ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap();
let cfg = QueryConfig::Cargo(&sysroot, &manifest_path);
let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None);
assert_ne!(get(cfg, None, &FxHashMap::default()), vec![]);
}

View File

@ -20,7 +20,7 @@ pub fn get(
})
};
let (sysroot, current_dir) = match config {
QueryConfig::Cargo(sysroot, cargo_toml) => {
QueryConfig::Cargo(sysroot, cargo_toml, _) => {
let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env);
cmd.env("RUSTC_BOOTSTRAP", "1");
cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS).args([
@ -66,7 +66,7 @@ mod tests {
let sysroot = Sysroot::empty();
let manifest_path =
ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap();
let cfg = QueryConfig::Cargo(&sysroot, &manifest_path);
let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None);
assert!(get(cfg, None, &FxHashMap::default()).is_ok());
}

View File

@ -5,7 +5,9 @@ use anyhow::Context;
use rustc_hash::FxHashMap;
use toolchain::Tool;
use crate::{ManifestPath, Sysroot, toolchain_info::QueryConfig, utf8_stdout};
use crate::{
Sysroot, cargo_config_file::CargoConfigFile, toolchain_info::QueryConfig, utf8_stdout,
};
/// For cargo, runs `cargo -Zunstable-options config get build.target` to get the configured project target(s).
/// For rustc, runs `rustc --print -vV` to get the host target.
@ -20,8 +22,8 @@ pub fn get(
}
let (sysroot, current_dir) = match config {
QueryConfig::Cargo(sysroot, cargo_toml) => {
match cargo_config_build_target(cargo_toml, extra_env, sysroot) {
QueryConfig::Cargo(sysroot, cargo_toml, config_file) => {
match config_file.as_ref().and_then(cargo_config_build_target) {
Some(it) => return Ok(it),
None => (sysroot, cargo_toml.parent().as_ref()),
}
@ -50,30 +52,30 @@ fn rustc_discover_host_tuple(
}
}
fn cargo_config_build_target(
cargo_toml: &ManifestPath,
extra_env: &FxHashMap<String, Option<String>>,
sysroot: &Sysroot,
) -> Option<Vec<String>> {
let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env);
cmd.current_dir(cargo_toml.parent()).env("RUSTC_BOOTSTRAP", "1");
cmd.args(["-Z", "unstable-options", "config", "get", "build.target"]);
// if successful we receive `build.target = "target-tuple"`
// or `build.target = ["<target 1>", ..]`
// this might be `error: config value `build.target` is not set` in which case we
// don't wanna log the error
utf8_stdout(&mut cmd).and_then(parse_output_cargo_config_build_target).ok()
fn cargo_config_build_target(config: &CargoConfigFile) -> Option<Vec<String>> {
match parse_json_cargo_config_build_target(config) {
Ok(v) => v,
Err(e) => {
tracing::debug!("Failed to discover cargo config build target {e:?}");
None
}
}
}
// Parses `"build.target = [target-tuple, target-tuple, ...]"` or `"build.target = "target-tuple"`
fn parse_output_cargo_config_build_target(stdout: String) -> anyhow::Result<Vec<String>> {
let trimmed = stdout.trim_start_matches("build.target = ").trim_matches('"');
if !trimmed.starts_with('[') {
return Ok([trimmed.to_owned()].to_vec());
fn parse_json_cargo_config_build_target(
config: &CargoConfigFile,
) -> anyhow::Result<Option<Vec<String>>> {
let target = config.get("build").and_then(|v| v.as_object()).and_then(|m| m.get("target"));
match target {
Some(serde_json::Value::String(s)) => Ok(Some(vec![s.to_owned()])),
Some(v) => serde_json::from_value(v.clone())
.map(Option::Some)
.context("Failed to parse `build.target` as an array of target"),
// t`error: config value `build.target` is not set`, in which case we
// don't wanna log the error
None => Ok(None),
}
serde_json::from_str(trimmed).context("Failed to parse `build.target` as an array of target")
}
#[cfg(test)]
@ -90,7 +92,7 @@ mod tests {
let sysroot = Sysroot::empty();
let manifest_path =
ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap();
let cfg = QueryConfig::Cargo(&sysroot, &manifest_path);
let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None);
assert!(get(cfg, None, &FxHashMap::default()).is_ok());
}

View File

@ -12,7 +12,7 @@ pub(crate) fn get(
extra_env: &FxHashMap<String, Option<String>>,
) -> Result<Option<Version>, anyhow::Error> {
let (mut cmd, prefix) = match config {
QueryConfig::Cargo(sysroot, cargo_toml) => {
QueryConfig::Cargo(sysroot, cargo_toml, _) => {
(sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env), "cargo ")
}
QueryConfig::Rustc(sysroot, current_dir) => {
@ -44,7 +44,7 @@ mod tests {
let sysroot = Sysroot::empty();
let manifest_path =
ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap();
let cfg = QueryConfig::Cargo(&sysroot, &manifest_path);
let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None);
assert!(get(cfg, &FxHashMap::default()).is_ok());
}

View File

@ -25,11 +25,9 @@ use crate::{
ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind,
WorkspaceBuildScripts,
build_dependencies::BuildScriptOutput,
cargo_workspace::{CargoMetadataConfig, DepKind, PackageData, RustLibSource},
env::{
cargo_config_build_target_dir, cargo_config_env, inject_cargo_env,
inject_cargo_package_env, inject_rustc_tool_env,
},
cargo_config_file,
cargo_workspace::{CargoMetadataConfig, DepKind, FetchMetadata, PackageData, RustLibSource},
env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
project_json::{Crate, CrateArrayIdx},
sysroot::RustLibSrcWorkspace,
toolchain_info::{QueryConfig, rustc_cfg, target_data_layout, target_tuple, version},
@ -270,7 +268,9 @@ impl ProjectWorkspace {
tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot");
progress("querying project metadata".to_owned());
let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml);
let config_file = cargo_config_file::read(cargo_toml, extra_env, &sysroot);
let config_file_ = config_file.clone();
let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml, &config_file_);
let targets =
target_tuple::get(toolchain_config, target.as_deref(), extra_env).unwrap_or_default();
let toolchain = version::get(toolchain_config, extra_env)
@ -282,10 +282,24 @@ impl ProjectWorkspace {
.ok()
.flatten();
let fetch_metadata = FetchMetadata::new(
cargo_toml,
workspace_dir,
&CargoMetadataConfig {
features: features.clone(),
targets: targets.clone(),
extra_args: extra_args.clone(),
extra_env: extra_env.clone(),
toolchain_version: toolchain.clone(),
kind: "workspace",
},
&sysroot,
*no_deps,
);
let target_dir = config
.target_dir
.clone()
.or_else(|| cargo_config_build_target_dir(cargo_toml, extra_env, &sysroot))
.or_else(|| fetch_metadata.no_deps_metadata().map(|m| m.target_directory.clone()))
.unwrap_or_else(|| workspace_dir.join("target").into());
// We spawn a bunch of processes to query various information about the workspace's
@ -319,7 +333,7 @@ impl ProjectWorkspace {
};
rustc_dir.and_then(|rustc_dir| {
info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source");
match CargoWorkspace::fetch_metadata(
match FetchMetadata::new(
&rustc_dir,
workspace_dir,
&CargoMetadataConfig {
@ -327,15 +341,12 @@ impl ProjectWorkspace {
targets: targets.clone(),
extra_args: extra_args.clone(),
extra_env: extra_env.clone(),
target_dir: target_dir.clone(),
toolchain_version: toolchain.clone(),
kind: "rustc-dev"
},
&sysroot,
*no_deps,
true,
progress,
) {
).exec(&target_dir, true, progress) {
Ok((meta, _error)) => {
let workspace = CargoWorkspace::new(
meta,
@ -364,40 +375,22 @@ impl ProjectWorkspace {
})
});
let cargo_metadata = s.spawn(|| {
CargoWorkspace::fetch_metadata(
cargo_toml,
workspace_dir,
&CargoMetadataConfig {
features: features.clone(),
targets: targets.clone(),
extra_args: extra_args.clone(),
extra_env: extra_env.clone(),
target_dir: target_dir.clone(),
toolchain_version: toolchain.clone(),
kind: "workspace",
},
&sysroot,
*no_deps,
false,
progress,
)
});
let cargo_metadata = s.spawn(|| fetch_metadata.exec(&target_dir, false, progress));
let loaded_sysroot = s.spawn(|| {
sysroot.load_workspace(
&RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
config,
&targets,
toolchain.clone(),
target_dir.clone(),
)),
config.no_deps,
workspace_dir,
&target_dir,
progress,
)
});
let cargo_config_extra_env =
s.spawn(|| cargo_config_env(cargo_toml, extra_env, &sysroot));
s.spawn(move || cargo_config_env(cargo_toml, &config_file));
thread::Result::Ok((
rustc_cfg.join()?,
data_layout.join()?,
@ -476,9 +469,7 @@ impl ProjectWorkspace {
let target_dir = config
.target_dir
.clone()
.or_else(|| {
cargo_config_build_target_dir(project_json.manifest()?, &config.extra_env, &sysroot)
})
.or_else(|| cargo_target_dir(project_json.manifest()?, &config.extra_env, &sysroot))
.unwrap_or_else(|| project_root.join("target").into());
// We spawn a bunch of processes to query various information about the workspace's
@ -502,6 +493,7 @@ impl ProjectWorkspace {
&RustSourceWorkspaceConfig::Json(*sysroot_project),
config.no_deps,
project_root,
&target_dir,
progress,
)
} else {
@ -510,10 +502,10 @@ impl ProjectWorkspace {
config,
&targets,
toolchain.clone(),
target_dir,
)),
config.no_deps,
project_root,
&target_dir,
progress,
)
}
@ -554,7 +546,8 @@ impl ProjectWorkspace {
None => Sysroot::empty(),
};
let query_config = QueryConfig::Cargo(&sysroot, detached_file);
let config_file = cargo_config_file::read(detached_file, &config.extra_env, &sysroot);
let query_config = QueryConfig::Cargo(&sysroot, detached_file, &config_file);
let toolchain = version::get(query_config, &config.extra_env).ok().flatten();
let targets = target_tuple::get(query_config, config.target.as_deref(), &config.extra_env)
.unwrap_or_default();
@ -563,7 +556,7 @@ impl ProjectWorkspace {
let target_dir = config
.target_dir
.clone()
.or_else(|| cargo_config_build_target_dir(detached_file, &config.extra_env, &sysroot))
.or_else(|| cargo_target_dir(detached_file, &config.extra_env, &sysroot))
.unwrap_or_else(|| dir.join("target").into());
let loaded_sysroot = sysroot.load_workspace(
@ -571,17 +564,17 @@ impl ProjectWorkspace {
config,
&targets,
toolchain.clone(),
target_dir.clone(),
)),
config.no_deps,
dir,
&target_dir,
&|_| (),
);
if let Some(loaded_sysroot) = loaded_sysroot {
sysroot.set_workspace(loaded_sysroot);
}
let cargo_script = CargoWorkspace::fetch_metadata(
let fetch_metadata = FetchMetadata::new(
detached_file,
dir,
&CargoMetadataConfig {
@ -589,25 +582,26 @@ impl ProjectWorkspace {
targets,
extra_args: config.extra_args.clone(),
extra_env: config.extra_env.clone(),
target_dir,
toolchain_version: toolchain.clone(),
kind: "detached-file",
},
&sysroot,
config.no_deps,
false,
&|_| (),
)
.ok()
.map(|(ws, error)| {
let cargo_config_extra_env =
cargo_config_env(detached_file, &config.extra_env, &sysroot);
(
CargoWorkspace::new(ws, detached_file.clone(), cargo_config_extra_env, false),
WorkspaceBuildScripts::default(),
error.map(Arc::new),
)
});
);
let target_dir = config
.target_dir
.clone()
.or_else(|| fetch_metadata.no_deps_metadata().map(|m| m.target_directory.clone()))
.unwrap_or_else(|| dir.join("target").into());
let cargo_script =
fetch_metadata.exec(&target_dir, false, &|_| ()).ok().map(|(ws, error)| {
let cargo_config_extra_env = cargo_config_env(detached_file, &config_file);
(
CargoWorkspace::new(ws, detached_file.clone(), cargo_config_extra_env, false),
WorkspaceBuildScripts::default(),
error.map(Arc::new),
)
});
Ok(ProjectWorkspace {
kind: ProjectWorkspaceKind::DetachedFile {
@ -1889,15 +1883,33 @@ fn sysroot_metadata_config(
config: &CargoConfig,
targets: &[String],
toolchain_version: Option<Version>,
target_dir: Utf8PathBuf,
) -> CargoMetadataConfig {
CargoMetadataConfig {
features: Default::default(),
targets: targets.to_vec(),
extra_args: Default::default(),
extra_env: config.extra_env.clone(),
target_dir,
toolchain_version,
kind: "sysroot",
}
}
fn cargo_target_dir(
manifest: &ManifestPath,
extra_env: &FxHashMap<String, Option<String>>,
sysroot: &Sysroot,
) -> Option<Utf8PathBuf> {
let cargo = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
let mut meta = cargo_metadata::MetadataCommand::new();
meta.cargo_path(cargo.get_program());
meta.manifest_path(manifest);
// `--no-deps` doesn't (over)write lockfiles as it doesn't do any package resolve.
// So we can use it to get `target_directory` before copying lockfiles
let mut other_options = vec!["--no-deps".to_owned()];
if manifest.is_rust_manifest() {
meta.env("RUSTC_BOOTSTRAP", "1");
other_options.push("-Zscript".to_owned());
}
meta.other_options(other_options);
meta.exec().map(|m| m.target_directory).ok()
}

View File

@ -796,7 +796,7 @@ impl flags::AnalysisStats {
// region:expressions
let (previous_exprs, previous_unknown, previous_partially_unknown) =
(num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
for (expr_id, _) in body.exprs.iter() {
for (expr_id, _) in body.exprs() {
let ty = &inference_result[expr_id];
num_exprs += 1;
let unknown_or_partial = if ty.is_unknown() {
@ -901,7 +901,7 @@ impl flags::AnalysisStats {
// region:patterns
let (previous_pats, previous_unknown, previous_partially_unknown) =
(num_pats, num_pats_unknown, num_pats_partially_unknown);
for (pat_id, _) in body.pats.iter() {
for (pat_id, _) in body.pats() {
let ty = &inference_result[pat_id];
num_pats += 1;
let unknown_or_partial = if ty.is_unknown() {

View File

@ -9,6 +9,7 @@ use hir::{ChangeWithProcMacros, Crate};
use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig};
use ide_db::base_db;
use itertools::Either;
use paths::Utf8PathBuf;
use profile::StopWatch;
use project_model::toolchain_info::{QueryConfig, target_data_layout};
use project_model::{
@ -79,6 +80,7 @@ impl Tester {
&RustSourceWorkspaceConfig::default_cargo(),
false,
&path,
&Utf8PathBuf::default(),
&|_| (),
);
if let Some(loaded_sysroot) = loaded_sysroot {

View File

@ -911,7 +911,8 @@ pub(crate) fn folding_range(
| FoldKind::Array
| FoldKind::TraitAliases
| FoldKind::ExternCrates
| FoldKind::MatchArm => None,
| FoldKind::MatchArm
| FoldKind::Function => None,
};
let range = range(line_index, fold.range);

View File

@ -880,7 +880,8 @@ fn main() {{}}
#[test]
fn diagnostics_dont_block_typing() {
if skip_slow_tests() {
if skip_slow_tests() || std::env::var("CI").is_ok() {
// FIXME: This test is failing too frequently (therefore we disable it on CI).
return;
}

View File

@ -92,6 +92,7 @@ impl fmt::Debug for ErasedFileAstId {
Use,
Impl,
BlockExpr,
AsmExpr,
Fixup,
);
if f.alternate() {
@ -144,6 +145,10 @@ enum ErasedFileAstIdKind {
Impl,
/// Associated with [`BlockExprFileAstId`].
BlockExpr,
// `global_asm!()` is an item, so we need to give it an `AstId`. So we give to all inline asm
// because incrementality is not a problem, they will always be the only item in the macro file,
// and memory usage also not because they're rare.
AsmExpr,
/// Keep this last.
Root,
}
@ -204,14 +209,17 @@ impl ErasedFileAstId {
.or_else(|| extern_block_ast_id(node, index_map))
.or_else(|| use_ast_id(node, index_map))
.or_else(|| impl_ast_id(node, index_map))
.or_else(|| asm_expr_ast_id(node, index_map))
}
fn should_alloc(node: &SyntaxNode) -> bool {
should_alloc_has_name(node)
|| should_alloc_assoc_item(node)
|| ast::ExternBlock::can_cast(node.kind())
|| ast::Use::can_cast(node.kind())
|| ast::Impl::can_cast(node.kind())
let kind = node.kind();
should_alloc_has_name(kind)
|| should_alloc_assoc_item(kind)
|| ast::ExternBlock::can_cast(kind)
|| ast::Use::can_cast(kind)
|| ast::Impl::can_cast(kind)
|| ast::AsmExpr::can_cast(kind)
}
#[inline]
@ -278,7 +286,6 @@ impl<N> FileAstId<N> {
#[derive(Hash)]
struct ErasedHasNameFileAstId<'a> {
kind: SyntaxKind,
name: &'a str,
}
@ -332,6 +339,19 @@ fn use_ast_id(
}
}
impl AstIdNode for ast::AsmExpr {}
fn asm_expr_ast_id(
node: &SyntaxNode,
index_map: &mut ErasedAstIdNextIndexMap,
) -> Option<ErasedFileAstId> {
if ast::AsmExpr::can_cast(node.kind()) {
Some(index_map.new_id(ErasedFileAstIdKind::AsmExpr, ()))
} else {
None
}
}
impl AstIdNode for ast::Impl {}
fn impl_ast_id(
@ -433,7 +453,6 @@ macro_rules! register_has_name_ast_id {
)+
fn has_name_ast_id(node: &SyntaxNode, index_map: &mut ErasedAstIdNextIndexMap) -> Option<ErasedFileAstId> {
let kind = node.kind();
match_ast! {
match node {
$(
@ -441,7 +460,6 @@ macro_rules! register_has_name_ast_id {
let name = node.$name_method();
let name = name.as_ref().map_or("", |it| it.text_non_mutable());
let result = ErasedHasNameFileAstId {
kind,
name,
};
Some(index_map.new_id(ErasedFileAstIdKind::$ident, result))
@ -452,8 +470,7 @@ macro_rules! register_has_name_ast_id {
}
}
fn should_alloc_has_name(node: &SyntaxNode) -> bool {
let kind = node.kind();
fn should_alloc_has_name(kind: SyntaxKind) -> bool {
false $( || ast::$ident::can_cast(kind) )*
}
};
@ -483,7 +500,6 @@ macro_rules! register_assoc_item_ast_id {
index_map: &mut ErasedAstIdNextIndexMap,
parent: Option<&ErasedFileAstId>,
) -> Option<ErasedFileAstId> {
let kind = node.kind();
match_ast! {
match node {
$(
@ -491,7 +507,6 @@ macro_rules! register_assoc_item_ast_id {
let name = $name_callback(node);
let name = name.as_ref().map_or("", |it| it.text_non_mutable());
let properties = ErasedHasNameFileAstId {
kind,
name,
};
let result = ErasedAssocItemFileAstId {
@ -506,8 +521,7 @@ macro_rules! register_assoc_item_ast_id {
}
}
fn should_alloc_assoc_item(node: &SyntaxNode) -> bool {
let kind = node.kind();
fn should_alloc_assoc_item(kind: SyntaxKind) -> bool {
false $( || ast::$ident::can_cast(kind) )*
}
};

View File

@ -158,6 +158,7 @@ Item =
| TypeAlias
| Union
| Use
| AsmExpr
MacroRules =
Attr* Visibility?
@ -409,7 +410,8 @@ OffsetOfExpr =
// global_asm := "global_asm!(" format_string *("," format_string) *("," operand) [","] ")"
// format_string := STRING_LITERAL / RAW_STRING_LITERAL
AsmExpr =
Attr* 'builtin' '#' 'asm' '(' template:(Expr (',' Expr)*) (AsmPiece (',' AsmPiece)*)? ','? ')'
Attr* 'builtin' '#' ( 'asm' | 'global_asm' | 'naked_asm' )
'(' template:(Expr (',' Expr)*) (AsmPiece (',' AsmPiece)*)? ','? ')'
// operand_expr := expr / "_" / expr "=>" expr / expr "=>" "_"
AsmOperandExpr = in_expr:Expr ('=>' out_expr:Expr)?

View File

@ -406,42 +406,6 @@ impl ast::WhereClause {
}
}
impl ast::TypeParam {
pub fn remove_default(&self) {
if let Some((eq, last)) = self
.syntax()
.children_with_tokens()
.find(|it| it.kind() == T![=])
.zip(self.syntax().last_child_or_token())
{
ted::remove_all(eq..=last);
// remove any trailing ws
if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE) {
last.detach();
}
}
}
}
impl ast::ConstParam {
pub fn remove_default(&self) {
if let Some((eq, last)) = self
.syntax()
.children_with_tokens()
.find(|it| it.kind() == T![=])
.zip(self.syntax().last_child_or_token())
{
ted::remove_all(eq..=last);
// remove any trailing ws
if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE) {
last.detach();
}
}
}
}
pub trait Removable: AstNode {
fn remove(&self);
}

Some files were not shown because too many files have changed in this diff Show More