mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-25 11:17:13 +00:00
Put the expression stuff in the expression store behind an Option<Box>
And leave only the type stuff without it. This is because most expression stores don't have anything but types (e.g. generics, fields, signatures) so this saves a lot of memory. This saves 58mb on `analysis-stats .`.
This commit is contained in:
parent
3816d0ae53
commit
ad708fdbd1
@ -22,6 +22,7 @@ use rustc_hash::FxHashMap;
|
||||
use smallvec::SmallVec;
|
||||
use span::{Edition, SyntaxContext};
|
||||
use syntax::{AstPtr, SyntaxNodePtr, ast};
|
||||
use thin_vec::ThinVec;
|
||||
use triomphe::Arc;
|
||||
use tt::TextRange;
|
||||
|
||||
@ -93,17 +94,17 @@ pub type TypeSource = InFile<TypePtr>;
|
||||
pub type LifetimePtr = AstPtr<ast::Lifetime>;
|
||||
pub type LifetimeSource = InFile<LifetimePtr>;
|
||||
|
||||
// We split the store into types-only and expressions, because most stores (e.g. generics)
|
||||
// don't store any expressions and this saves memory. Same thing for the source map.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ExpressionStore {
|
||||
pub exprs: Arena<Expr>,
|
||||
pub pats: Arena<Pat>,
|
||||
pub bindings: Arena<Binding>,
|
||||
pub labels: Arena<Label>,
|
||||
pub types: Arena<TypeRef>,
|
||||
pub lifetimes: Arena<LifetimeRef>,
|
||||
struct ExpressionOnlyStore {
|
||||
exprs: Arena<Expr>,
|
||||
pats: Arena<Pat>,
|
||||
bindings: Arena<Binding>,
|
||||
labels: Arena<Label>,
|
||||
/// Id of the closure/coroutine that owns the corresponding binding. If a binding is owned by the
|
||||
/// top level expression, it will not be listed in here.
|
||||
pub binding_owners: FxHashMap<BindingId, ExprId>,
|
||||
binding_owners: FxHashMap<BindingId, ExprId>,
|
||||
/// Block expressions in this store that may contain inner items.
|
||||
block_scopes: Box<[BlockId]>,
|
||||
|
||||
@ -114,8 +115,118 @@ pub struct ExpressionStore {
|
||||
ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ExpressionStore {
|
||||
expr_only: Option<Box<ExpressionOnlyStore>>,
|
||||
pub types: Arena<TypeRef>,
|
||||
pub lifetimes: Arena<LifetimeRef>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, Default)]
|
||||
struct ExpressionOnlySourceMap {
|
||||
// AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map
|
||||
// to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected).
|
||||
expr_map: FxHashMap<ExprSource, ExprOrPatId>,
|
||||
expr_map_back: ArenaMap<ExprId, ExprOrPatSource>,
|
||||
|
||||
pat_map: FxHashMap<PatSource, ExprOrPatId>,
|
||||
pat_map_back: ArenaMap<PatId, ExprOrPatSource>,
|
||||
|
||||
label_map: FxHashMap<LabelSource, LabelId>,
|
||||
label_map_back: ArenaMap<LabelId, LabelSource>,
|
||||
|
||||
binding_definitions:
|
||||
ArenaMap<BindingId, SmallVec<[PatId; 2 * size_of::<usize>() / size_of::<PatId>()]>>,
|
||||
|
||||
/// We don't create explicit nodes for record fields (`S { record_field: 92 }`).
|
||||
/// Instead, we use id of expression (`92`) to identify the field.
|
||||
field_map_back: FxHashMap<ExprId, FieldSource>,
|
||||
pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
|
||||
|
||||
template_map: Option<Box<FormatTemplate>>,
|
||||
|
||||
expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
|
||||
|
||||
/// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
|
||||
/// the source map (since they're just as volatile).
|
||||
//
|
||||
// We store diagnostics on the `ExpressionOnlySourceMap` because diagnostics are rare (except
|
||||
// maybe for cfgs, and they are also not common in type places).
|
||||
diagnostics: ThinVec<ExpressionStoreDiagnostics>,
|
||||
}
|
||||
|
||||
impl PartialEq for ExpressionOnlySourceMap {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
// we only need to compare one of the two mappings
|
||||
// as the other is a reverse mapping and thus will compare
|
||||
// the same as normal mapping
|
||||
let Self {
|
||||
expr_map: _,
|
||||
expr_map_back,
|
||||
pat_map: _,
|
||||
pat_map_back,
|
||||
label_map: _,
|
||||
label_map_back,
|
||||
// If this changed, our pattern data must have changed
|
||||
binding_definitions: _,
|
||||
// If this changed, our expression data must have changed
|
||||
field_map_back: _,
|
||||
// If this changed, our pattern data must have changed
|
||||
pat_field_map_back: _,
|
||||
template_map,
|
||||
expansions,
|
||||
diagnostics,
|
||||
} = self;
|
||||
*expr_map_back == other.expr_map_back
|
||||
&& *pat_map_back == other.pat_map_back
|
||||
&& *label_map_back == other.label_map_back
|
||||
&& *template_map == other.template_map
|
||||
&& *expansions == other.expansions
|
||||
&& *diagnostics == other.diagnostics
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, Default)]
|
||||
pub struct ExpressionStoreSourceMap {
|
||||
expr_only: Option<Box<ExpressionOnlySourceMap>>,
|
||||
|
||||
types_map_back: ArenaMap<TypeRefId, TypeSource>,
|
||||
types_map: FxHashMap<TypeSource, TypeRefId>,
|
||||
|
||||
lifetime_map_back: ArenaMap<LifetimeRefId, LifetimeSource>,
|
||||
#[expect(
|
||||
unused,
|
||||
reason = "this is here for completeness, and maybe we'll need it in the future"
|
||||
)]
|
||||
lifetime_map: FxHashMap<LifetimeSource, LifetimeRefId>,
|
||||
}
|
||||
|
||||
impl PartialEq for ExpressionStoreSourceMap {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
// we only need to compare one of the two mappings
|
||||
// as the other is a reverse mapping and thus will compare
|
||||
// the same as normal mapping
|
||||
let Self { expr_only, types_map_back, types_map: _, lifetime_map_back, lifetime_map: _ } =
|
||||
self;
|
||||
*expr_only == other.expr_only
|
||||
&& *types_map_back == other.types_map_back
|
||||
&& *lifetime_map_back == other.lifetime_map_back
|
||||
}
|
||||
}
|
||||
|
||||
/// The body of an item (function, const etc.).
|
||||
#[derive(Debug, Eq, PartialEq, Default)]
|
||||
pub struct ExpressionStoreBuilder {
|
||||
pub exprs: Arena<Expr>,
|
||||
pub pats: Arena<Pat>,
|
||||
pub bindings: Arena<Binding>,
|
||||
pub labels: Arena<Label>,
|
||||
pub lifetimes: Arena<LifetimeRef>,
|
||||
pub binding_owners: FxHashMap<BindingId, ExprId>,
|
||||
pub types: Arena<TypeRef>,
|
||||
block_scopes: Vec<BlockId>,
|
||||
ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
|
||||
|
||||
// AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map
|
||||
// to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected).
|
||||
expr_map: FxHashMap<ExprSource, ExprOrPatId>,
|
||||
@ -143,62 +254,14 @@ pub struct ExpressionStoreSourceMap {
|
||||
|
||||
template_map: Option<Box<FormatTemplate>>,
|
||||
|
||||
pub expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
|
||||
expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>,
|
||||
|
||||
/// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in
|
||||
/// the source map (since they're just as volatile).
|
||||
pub diagnostics: Vec<ExpressionStoreDiagnostics>,
|
||||
}
|
||||
|
||||
impl PartialEq for ExpressionStoreSourceMap {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
// we only need to compare one of the two mappings
|
||||
// as the other is a reverse mapping and thus will compare
|
||||
// the same as normal mapping
|
||||
let Self {
|
||||
expr_map: _,
|
||||
expr_map_back,
|
||||
pat_map: _,
|
||||
pat_map_back,
|
||||
label_map: _,
|
||||
label_map_back,
|
||||
types_map_back,
|
||||
types_map: _,
|
||||
lifetime_map_back,
|
||||
lifetime_map: _,
|
||||
// If this changed, our pattern data must have changed
|
||||
binding_definitions: _,
|
||||
// If this changed, our expression data must have changed
|
||||
field_map_back: _,
|
||||
// If this changed, our pattern data must have changed
|
||||
pat_field_map_back: _,
|
||||
template_map,
|
||||
expansions,
|
||||
diagnostics,
|
||||
} = self;
|
||||
*expr_map_back == other.expr_map_back
|
||||
&& *pat_map_back == other.pat_map_back
|
||||
&& *label_map_back == other.label_map_back
|
||||
&& *types_map_back == other.types_map_back
|
||||
&& *lifetime_map_back == other.lifetime_map_back
|
||||
&& *template_map == other.template_map
|
||||
&& *expansions == other.expansions
|
||||
&& *diagnostics == other.diagnostics
|
||||
}
|
||||
}
|
||||
|
||||
/// The body of an item (function, const etc.).
|
||||
#[derive(Debug, Eq, PartialEq, Default)]
|
||||
pub struct ExpressionStoreBuilder {
|
||||
pub exprs: Arena<Expr>,
|
||||
pub pats: Arena<Pat>,
|
||||
pub bindings: Arena<Binding>,
|
||||
pub labels: Arena<Label>,
|
||||
pub lifetimes: Arena<LifetimeRef>,
|
||||
pub binding_owners: FxHashMap<BindingId, ExprId>,
|
||||
pub types: Arena<TypeRef>,
|
||||
block_scopes: Vec<BlockId>,
|
||||
ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>,
|
||||
//
|
||||
// We store diagnostics on the `ExpressionOnlySourceMap` because diagnostics are rare (except
|
||||
// maybe for cfgs, and they are also not common in type places).
|
||||
pub(crate) diagnostics: Vec<ExpressionStoreDiagnostics>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Eq, PartialEq)]
|
||||
@ -226,7 +289,7 @@ pub enum ExpressionStoreDiagnostics {
|
||||
}
|
||||
|
||||
impl ExpressionStoreBuilder {
|
||||
pub fn finish(self) -> ExpressionStore {
|
||||
pub fn finish(self) -> (ExpressionStore, ExpressionStoreSourceMap) {
|
||||
let Self {
|
||||
block_scopes,
|
||||
mut exprs,
|
||||
@ -237,6 +300,23 @@ impl ExpressionStoreBuilder {
|
||||
mut ident_hygiene,
|
||||
mut types,
|
||||
mut lifetimes,
|
||||
|
||||
mut expr_map,
|
||||
mut expr_map_back,
|
||||
mut pat_map,
|
||||
mut pat_map_back,
|
||||
mut label_map,
|
||||
mut label_map_back,
|
||||
mut types_map_back,
|
||||
mut types_map,
|
||||
mut lifetime_map_back,
|
||||
mut lifetime_map,
|
||||
mut binding_definitions,
|
||||
mut field_map_back,
|
||||
mut pat_field_map_back,
|
||||
mut template_map,
|
||||
mut expansions,
|
||||
diagnostics,
|
||||
} = self;
|
||||
exprs.shrink_to_fit();
|
||||
labels.shrink_to_fit();
|
||||
@ -247,24 +327,90 @@ impl ExpressionStoreBuilder {
|
||||
types.shrink_to_fit();
|
||||
lifetimes.shrink_to_fit();
|
||||
|
||||
ExpressionStore {
|
||||
exprs,
|
||||
pats,
|
||||
bindings,
|
||||
labels,
|
||||
binding_owners,
|
||||
types,
|
||||
lifetimes,
|
||||
block_scopes: block_scopes.into_boxed_slice(),
|
||||
ident_hygiene,
|
||||
expr_map.shrink_to_fit();
|
||||
expr_map_back.shrink_to_fit();
|
||||
pat_map.shrink_to_fit();
|
||||
pat_map_back.shrink_to_fit();
|
||||
label_map.shrink_to_fit();
|
||||
label_map_back.shrink_to_fit();
|
||||
types_map_back.shrink_to_fit();
|
||||
types_map.shrink_to_fit();
|
||||
lifetime_map_back.shrink_to_fit();
|
||||
lifetime_map.shrink_to_fit();
|
||||
binding_definitions.shrink_to_fit();
|
||||
field_map_back.shrink_to_fit();
|
||||
pat_field_map_back.shrink_to_fit();
|
||||
if let Some(template_map) = &mut template_map {
|
||||
let FormatTemplate {
|
||||
format_args_to_captures,
|
||||
asm_to_captures,
|
||||
implicit_capture_to_source,
|
||||
} = &mut **template_map;
|
||||
format_args_to_captures.shrink_to_fit();
|
||||
asm_to_captures.shrink_to_fit();
|
||||
implicit_capture_to_source.shrink_to_fit();
|
||||
}
|
||||
expansions.shrink_to_fit();
|
||||
|
||||
let has_exprs =
|
||||
!exprs.is_empty() || !labels.is_empty() || !pats.is_empty() || !bindings.is_empty();
|
||||
|
||||
let store = {
|
||||
let expr_only = if has_exprs {
|
||||
Some(Box::new(ExpressionOnlyStore {
|
||||
exprs,
|
||||
pats,
|
||||
bindings,
|
||||
labels,
|
||||
binding_owners,
|
||||
block_scopes: block_scopes.into_boxed_slice(),
|
||||
ident_hygiene,
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
ExpressionStore { expr_only, types, lifetimes }
|
||||
};
|
||||
|
||||
let source_map = {
|
||||
let expr_only = if has_exprs || !expansions.is_empty() || !diagnostics.is_empty() {
|
||||
Some(Box::new(ExpressionOnlySourceMap {
|
||||
expr_map,
|
||||
expr_map_back,
|
||||
pat_map,
|
||||
pat_map_back,
|
||||
label_map,
|
||||
label_map_back,
|
||||
binding_definitions,
|
||||
field_map_back,
|
||||
pat_field_map_back,
|
||||
template_map,
|
||||
expansions,
|
||||
diagnostics: ThinVec::from_iter(diagnostics),
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
ExpressionStoreSourceMap {
|
||||
expr_only,
|
||||
types_map_back,
|
||||
types_map,
|
||||
lifetime_map_back,
|
||||
lifetime_map,
|
||||
}
|
||||
};
|
||||
|
||||
(store, source_map)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpressionStore {
|
||||
pub fn empty_singleton() -> Arc<Self> {
|
||||
static EMPTY: LazyLock<Arc<ExpressionStore>> =
|
||||
LazyLock::new(|| Arc::new(ExpressionStoreBuilder::default().finish()));
|
||||
pub fn empty_singleton() -> (Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>) {
|
||||
static EMPTY: LazyLock<(Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>)> =
|
||||
LazyLock::new(|| {
|
||||
let (store, source_map) = ExpressionStoreBuilder::default().finish();
|
||||
(Arc::new(store), Arc::new(source_map))
|
||||
});
|
||||
EMPTY.clone()
|
||||
}
|
||||
|
||||
@ -273,7 +419,12 @@ impl ExpressionStore {
|
||||
&'a self,
|
||||
db: &'a dyn DefDatabase,
|
||||
) -> impl Iterator<Item = (BlockId, &'a DefMap)> + 'a {
|
||||
self.block_scopes.iter().map(move |&block| (block, block_def_map(db, block)))
|
||||
self.expr_only
|
||||
.as_ref()
|
||||
.map(|it| &*it.block_scopes)
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(move |&block| (block, block_def_map(db, block)))
|
||||
}
|
||||
|
||||
pub fn walk_bindings_in_pat(&self, pat_id: PatId, mut f: impl FnMut(BindingId)) {
|
||||
@ -320,7 +471,8 @@ impl ExpressionStore {
|
||||
}
|
||||
|
||||
pub fn is_binding_upvar(&self, binding: BindingId, relative_to: ExprId) -> bool {
|
||||
match self.binding_owners.get(&binding) {
|
||||
let Some(expr_only) = &self.expr_only else { return false };
|
||||
match expr_only.binding_owners.get(&binding) {
|
||||
Some(it) => {
|
||||
// We assign expression ids in a way that outer closures will receive
|
||||
// a lower id
|
||||
@ -330,6 +482,11 @@ impl ExpressionStore {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn binding_owner(&self, id: BindingId) -> Option<ExprId> {
|
||||
self.expr_only.as_ref()?.binding_owners.get(&id).copied()
|
||||
}
|
||||
|
||||
/// Walks the immediate children expressions and calls `f` for each child expression.
|
||||
///
|
||||
/// Note that this does not walk const blocks.
|
||||
@ -601,16 +758,22 @@ impl ExpressionStore {
|
||||
});
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
fn assert_expr_only(&self) -> &ExpressionOnlyStore {
|
||||
self.expr_only.as_ref().expect("should have `ExpressionStore::expr_only`")
|
||||
}
|
||||
|
||||
fn binding_hygiene(&self, binding: BindingId) -> HygieneId {
|
||||
self.bindings[binding].hygiene
|
||||
self.assert_expr_only().bindings[binding].hygiene
|
||||
}
|
||||
|
||||
pub fn expr_path_hygiene(&self, expr: ExprId) -> HygieneId {
|
||||
self.ident_hygiene.get(&expr.into()).copied().unwrap_or(HygieneId::ROOT)
|
||||
self.assert_expr_only().ident_hygiene.get(&expr.into()).copied().unwrap_or(HygieneId::ROOT)
|
||||
}
|
||||
|
||||
pub fn pat_path_hygiene(&self, pat: PatId) -> HygieneId {
|
||||
self.ident_hygiene.get(&pat.into()).copied().unwrap_or(HygieneId::ROOT)
|
||||
self.assert_expr_only().ident_hygiene.get(&pat.into()).copied().unwrap_or(HygieneId::ROOT)
|
||||
}
|
||||
|
||||
pub fn expr_or_pat_path_hygiene(&self, id: ExprOrPatId) -> HygieneId {
|
||||
@ -619,43 +782,72 @@ impl ExpressionStore {
|
||||
ExprOrPatId::PatId(id) => self.pat_path_hygiene(id),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn exprs(&self) -> impl Iterator<Item = (ExprId, &Expr)> {
|
||||
match &self.expr_only {
|
||||
Some(it) => it.exprs.iter(),
|
||||
None => const { &Arena::new() }.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn pats(&self) -> impl Iterator<Item = (PatId, &Pat)> {
|
||||
match &self.expr_only {
|
||||
Some(it) => it.pats.iter(),
|
||||
None => const { &Arena::new() }.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn bindings(&self) -> impl Iterator<Item = (BindingId, &Binding)> {
|
||||
match &self.expr_only {
|
||||
Some(it) => it.bindings.iter(),
|
||||
None => const { &Arena::new() }.iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ExprId> for ExpressionStore {
|
||||
type Output = Expr;
|
||||
|
||||
#[inline]
|
||||
fn index(&self, expr: ExprId) -> &Expr {
|
||||
&self.exprs[expr]
|
||||
&self.assert_expr_only().exprs[expr]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<PatId> for ExpressionStore {
|
||||
type Output = Pat;
|
||||
|
||||
#[inline]
|
||||
fn index(&self, pat: PatId) -> &Pat {
|
||||
&self.pats[pat]
|
||||
&self.assert_expr_only().pats[pat]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<LabelId> for ExpressionStore {
|
||||
type Output = Label;
|
||||
|
||||
#[inline]
|
||||
fn index(&self, label: LabelId) -> &Label {
|
||||
&self.labels[label]
|
||||
&self.assert_expr_only().labels[label]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<BindingId> for ExpressionStore {
|
||||
type Output = Binding;
|
||||
|
||||
#[inline]
|
||||
fn index(&self, b: BindingId) -> &Binding {
|
||||
&self.bindings[b]
|
||||
&self.assert_expr_only().bindings[b]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<TypeRefId> for ExpressionStore {
|
||||
type Output = TypeRef;
|
||||
|
||||
#[inline]
|
||||
fn index(&self, b: TypeRefId) -> &TypeRef {
|
||||
&self.types[b]
|
||||
}
|
||||
@ -664,6 +856,7 @@ impl Index<TypeRefId> for ExpressionStore {
|
||||
impl Index<LifetimeRefId> for ExpressionStore {
|
||||
type Output = LifetimeRef;
|
||||
|
||||
#[inline]
|
||||
fn index(&self, b: LifetimeRefId) -> &LifetimeRef {
|
||||
&self.lifetimes[b]
|
||||
}
|
||||
@ -684,12 +877,6 @@ impl Index<PathId> for ExpressionStore {
|
||||
// FIXME: Change `node_` prefix to something more reasonable.
|
||||
// Perhaps `expr_syntax` and `expr_id`?
|
||||
impl ExpressionStoreSourceMap {
|
||||
pub fn empty_singleton() -> Arc<Self> {
|
||||
static EMPTY: LazyLock<Arc<ExpressionStoreSourceMap>> =
|
||||
LazyLock::new(|| Arc::new(ExpressionStoreSourceMap::default()));
|
||||
EMPTY.clone()
|
||||
}
|
||||
|
||||
pub fn expr_or_pat_syntax(&self, id: ExprOrPatId) -> Result<ExprOrPatSource, SyntheticSyntax> {
|
||||
match id {
|
||||
ExprOrPatId::ExprId(id) => self.expr_syntax(id),
|
||||
@ -697,30 +884,46 @@ impl ExpressionStoreSourceMap {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn expr_or_synthetic(&self) -> Result<&ExpressionOnlySourceMap, SyntheticSyntax> {
|
||||
self.expr_only.as_deref().ok_or(SyntheticSyntax)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn expr_only(&self) -> Option<&ExpressionOnlySourceMap> {
|
||||
self.expr_only.as_deref()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
fn assert_expr_only(&self) -> &ExpressionOnlySourceMap {
|
||||
self.expr_only.as_ref().expect("should have `ExpressionStoreSourceMap::expr_only`")
|
||||
}
|
||||
|
||||
pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprOrPatSource, SyntheticSyntax> {
|
||||
self.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax)
|
||||
self.expr_or_synthetic()?.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax)
|
||||
}
|
||||
|
||||
pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option<ExprOrPatId> {
|
||||
let src = node.map(AstPtr::new);
|
||||
self.expr_map.get(&src).cloned()
|
||||
self.expr_only()?.expr_map.get(&src).cloned()
|
||||
}
|
||||
|
||||
pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
|
||||
let src = node.map(AstPtr::new);
|
||||
self.expansions.get(&src).cloned()
|
||||
self.expr_only()?.expansions.get(&src).cloned()
|
||||
}
|
||||
|
||||
pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroCallId)> + '_ {
|
||||
self.expansions.iter().map(|(&a, &b)| (a, b))
|
||||
self.expr_only().into_iter().flat_map(|it| it.expansions.iter().map(|(&a, &b)| (a, b)))
|
||||
}
|
||||
|
||||
pub fn pat_syntax(&self, pat: PatId) -> Result<ExprOrPatSource, SyntheticSyntax> {
|
||||
self.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax)
|
||||
self.expr_or_synthetic()?.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax)
|
||||
}
|
||||
|
||||
pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<ExprOrPatId> {
|
||||
self.pat_map.get(&node.map(AstPtr::new)).cloned()
|
||||
self.expr_only()?.pat_map.get(&node.map(AstPtr::new)).cloned()
|
||||
}
|
||||
|
||||
pub fn type_syntax(&self, id: TypeRefId) -> Result<TypeSource, SyntheticSyntax> {
|
||||
@ -732,49 +935,50 @@ impl ExpressionStoreSourceMap {
|
||||
}
|
||||
|
||||
pub fn label_syntax(&self, label: LabelId) -> LabelSource {
|
||||
self.label_map_back[label]
|
||||
self.assert_expr_only().label_map_back[label]
|
||||
}
|
||||
|
||||
pub fn patterns_for_binding(&self, binding: BindingId) -> &[PatId] {
|
||||
self.binding_definitions.get(binding).map_or(&[], Deref::deref)
|
||||
self.assert_expr_only().binding_definitions.get(binding).map_or(&[], Deref::deref)
|
||||
}
|
||||
|
||||
pub fn node_label(&self, node: InFile<&ast::Label>) -> Option<LabelId> {
|
||||
let src = node.map(AstPtr::new);
|
||||
self.label_map.get(&src).cloned()
|
||||
self.expr_only()?.label_map.get(&src).cloned()
|
||||
}
|
||||
|
||||
pub fn field_syntax(&self, expr: ExprId) -> FieldSource {
|
||||
self.field_map_back[&expr]
|
||||
self.assert_expr_only().field_map_back[&expr]
|
||||
}
|
||||
|
||||
pub fn pat_field_syntax(&self, pat: PatId) -> PatFieldSource {
|
||||
self.pat_field_map_back[&pat]
|
||||
self.assert_expr_only().pat_field_map_back[&pat]
|
||||
}
|
||||
|
||||
pub fn macro_expansion_expr(&self, node: InFile<&ast::MacroExpr>) -> Option<ExprOrPatId> {
|
||||
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::MacroExpr>).map(AstPtr::upcast);
|
||||
self.expr_map.get(&src).copied()
|
||||
self.expr_only()?.expr_map.get(&src).copied()
|
||||
}
|
||||
|
||||
pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroCallId)> {
|
||||
self.expansions.iter()
|
||||
self.expr_only().into_iter().flat_map(|it| it.expansions.iter())
|
||||
}
|
||||
|
||||
pub fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
|
||||
self.expansions.get(&node.map(AstPtr::new)).copied()
|
||||
self.expr_only()?.expansions.get(&node.map(AstPtr::new)).copied()
|
||||
}
|
||||
|
||||
pub fn implicit_format_args(
|
||||
&self,
|
||||
node: InFile<&ast::FormatArgsExpr>,
|
||||
) -> Option<(HygieneId, &[(syntax::TextRange, Name)])> {
|
||||
let expr_only = self.expr_only()?;
|
||||
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
|
||||
let (hygiene, names) = self
|
||||
let (hygiene, names) = expr_only
|
||||
.template_map
|
||||
.as_ref()?
|
||||
.format_args_to_captures
|
||||
.get(&self.expr_map.get(&src)?.as_expr()?)?;
|
||||
.get(&expr_only.expr_map.get(&src)?.as_expr()?)?;
|
||||
Some((*hygiene, &**names))
|
||||
}
|
||||
|
||||
@ -782,67 +986,28 @@ impl ExpressionStoreSourceMap {
|
||||
&self,
|
||||
capture_expr: ExprId,
|
||||
) -> Option<InFile<(ExprPtr, TextRange)>> {
|
||||
self.template_map.as_ref()?.implicit_capture_to_source.get(&capture_expr).copied()
|
||||
self.expr_only()?
|
||||
.template_map
|
||||
.as_ref()?
|
||||
.implicit_capture_to_source
|
||||
.get(&capture_expr)
|
||||
.copied()
|
||||
}
|
||||
|
||||
pub fn asm_template_args(
|
||||
&self,
|
||||
node: InFile<&ast::AsmExpr>,
|
||||
) -> Option<(ExprId, &[Vec<(syntax::TextRange, usize)>])> {
|
||||
let expr_only = self.expr_only()?;
|
||||
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
|
||||
let expr = self.expr_map.get(&src)?.as_expr()?;
|
||||
Some(expr)
|
||||
.zip(self.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref))
|
||||
let expr = expr_only.expr_map.get(&src)?.as_expr()?;
|
||||
Some(expr).zip(
|
||||
expr_only.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref),
|
||||
)
|
||||
}
|
||||
|
||||
/// Get a reference to the source map's diagnostics.
|
||||
pub fn diagnostics(&self) -> &[ExpressionStoreDiagnostics] {
|
||||
&self.diagnostics
|
||||
}
|
||||
|
||||
fn shrink_to_fit(&mut self) {
|
||||
let Self {
|
||||
expr_map,
|
||||
expr_map_back,
|
||||
pat_map,
|
||||
pat_map_back,
|
||||
label_map,
|
||||
label_map_back,
|
||||
field_map_back,
|
||||
pat_field_map_back,
|
||||
expansions,
|
||||
template_map,
|
||||
diagnostics,
|
||||
binding_definitions,
|
||||
types_map,
|
||||
types_map_back,
|
||||
lifetime_map_back,
|
||||
lifetime_map,
|
||||
} = self;
|
||||
if let Some(template_map) = template_map {
|
||||
let FormatTemplate {
|
||||
format_args_to_captures,
|
||||
asm_to_captures,
|
||||
implicit_capture_to_source,
|
||||
} = &mut **template_map;
|
||||
format_args_to_captures.shrink_to_fit();
|
||||
asm_to_captures.shrink_to_fit();
|
||||
implicit_capture_to_source.shrink_to_fit();
|
||||
}
|
||||
expr_map.shrink_to_fit();
|
||||
expr_map_back.shrink_to_fit();
|
||||
pat_map.shrink_to_fit();
|
||||
pat_map_back.shrink_to_fit();
|
||||
label_map.shrink_to_fit();
|
||||
label_map_back.shrink_to_fit();
|
||||
field_map_back.shrink_to_fit();
|
||||
pat_field_map_back.shrink_to_fit();
|
||||
expansions.shrink_to_fit();
|
||||
diagnostics.shrink_to_fit();
|
||||
binding_definitions.shrink_to_fit();
|
||||
types_map.shrink_to_fit();
|
||||
types_map_back.shrink_to_fit();
|
||||
lifetime_map.shrink_to_fit();
|
||||
lifetime_map_back.shrink_to_fit();
|
||||
self.expr_only().map(|it| &*it.diagnostics).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
@ -36,6 +36,7 @@ pub struct Body {
|
||||
impl ops::Deref for Body {
|
||||
type Target = ExpressionStore;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.store
|
||||
}
|
||||
@ -61,6 +62,7 @@ pub struct BodySourceMap {
|
||||
impl ops::Deref for BodySourceMap {
|
||||
type Target = ExpressionStoreSourceMap;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.store
|
||||
}
|
||||
@ -102,9 +104,7 @@ impl Body {
|
||||
}
|
||||
};
|
||||
let module = def.module(db);
|
||||
let (body, mut source_map) =
|
||||
lower_body(db, def, file_id, module, params, body, is_async_fn);
|
||||
source_map.store.shrink_to_fit();
|
||||
let (body, source_map) = lower_body(db, def, file_id, module, params, body, is_async_fn);
|
||||
|
||||
(Arc::new(body), Arc::new(source_map))
|
||||
}
|
||||
|
@ -121,14 +121,10 @@ pub(super) fn lower_body(
|
||||
params = (0..count).map(|_| collector.missing_pat()).collect();
|
||||
};
|
||||
let body_expr = collector.missing_expr();
|
||||
let (store, source_map) = collector.store.finish();
|
||||
return (
|
||||
Body {
|
||||
store: collector.store.finish(),
|
||||
params: params.into_boxed_slice(),
|
||||
self_param,
|
||||
body_expr,
|
||||
},
|
||||
BodySourceMap { self_param: source_map_self_param, store: collector.source_map },
|
||||
Body { store, params: params.into_boxed_slice(), self_param, body_expr },
|
||||
BodySourceMap { self_param: source_map_self_param, store: source_map },
|
||||
);
|
||||
}
|
||||
|
||||
@ -171,14 +167,10 @@ pub(super) fn lower_body(
|
||||
},
|
||||
);
|
||||
|
||||
let (store, source_map) = collector.store.finish();
|
||||
(
|
||||
Body {
|
||||
store: collector.store.finish(),
|
||||
params: params.into_boxed_slice(),
|
||||
self_param,
|
||||
body_expr,
|
||||
},
|
||||
BodySourceMap { self_param: source_map_self_param, store: collector.source_map },
|
||||
Body { store, params: params.into_boxed_slice(), self_param, body_expr },
|
||||
BodySourceMap { self_param: source_map_self_param, store: source_map },
|
||||
)
|
||||
}
|
||||
|
||||
@ -190,7 +182,8 @@ pub(crate) fn lower_type_ref(
|
||||
let mut expr_collector = ExprCollector::new(db, module, type_ref.file_id);
|
||||
let type_ref =
|
||||
expr_collector.lower_type_ref_opt(type_ref.value, &mut ExprCollector::impl_trait_allocator);
|
||||
(expr_collector.store.finish(), expr_collector.source_map, type_ref)
|
||||
let (store, source_map) = expr_collector.store.finish();
|
||||
(store, source_map, type_ref)
|
||||
}
|
||||
|
||||
pub(crate) fn lower_generic_params(
|
||||
@ -205,7 +198,8 @@ pub(crate) fn lower_generic_params(
|
||||
let mut collector = generics::GenericParamsCollector::new(def);
|
||||
collector.lower(&mut expr_collector, param_list, where_clause);
|
||||
let params = collector.finish();
|
||||
(Arc::new(expr_collector.store.finish()), params, expr_collector.source_map)
|
||||
let (store, source_map) = expr_collector.store.finish();
|
||||
(Arc::new(store), params, source_map)
|
||||
}
|
||||
|
||||
pub(crate) fn lower_impl(
|
||||
@ -232,7 +226,8 @@ pub(crate) fn lower_impl(
|
||||
impl_syntax.value.where_clause(),
|
||||
);
|
||||
let params = collector.finish();
|
||||
(expr_collector.store.finish(), expr_collector.source_map, self_ty, trait_, params)
|
||||
let (store, source_map) = expr_collector.store.finish();
|
||||
(store, source_map, self_ty, trait_, params)
|
||||
}
|
||||
|
||||
pub(crate) fn lower_trait(
|
||||
@ -253,7 +248,8 @@ pub(crate) fn lower_trait(
|
||||
trait_syntax.value.where_clause(),
|
||||
);
|
||||
let params = collector.finish();
|
||||
(expr_collector.store.finish(), expr_collector.source_map, params)
|
||||
let (store, source_map) = expr_collector.store.finish();
|
||||
(store, source_map, params)
|
||||
}
|
||||
|
||||
pub(crate) fn lower_trait_alias(
|
||||
@ -274,7 +270,8 @@ pub(crate) fn lower_trait_alias(
|
||||
trait_syntax.value.where_clause(),
|
||||
);
|
||||
let params = collector.finish();
|
||||
(expr_collector.store.finish(), expr_collector.source_map, params)
|
||||
let (store, source_map) = expr_collector.store.finish();
|
||||
(store, source_map, params)
|
||||
}
|
||||
|
||||
pub(crate) fn lower_type_alias(
|
||||
@ -313,7 +310,8 @@ pub(crate) fn lower_type_alias(
|
||||
.value
|
||||
.ty()
|
||||
.map(|ty| expr_collector.lower_type_ref(ty, &mut ExprCollector::impl_trait_allocator));
|
||||
(expr_collector.store.finish(), expr_collector.source_map, params, bounds, type_ref)
|
||||
let (store, source_map) = expr_collector.store.finish();
|
||||
(store, source_map, params, bounds, type_ref)
|
||||
}
|
||||
|
||||
pub(crate) fn lower_function(
|
||||
@ -421,9 +419,10 @@ pub(crate) fn lower_function(
|
||||
} else {
|
||||
return_type
|
||||
};
|
||||
let (store, source_map) = expr_collector.store.finish();
|
||||
(
|
||||
expr_collector.store.finish(),
|
||||
expr_collector.source_map,
|
||||
store,
|
||||
source_map,
|
||||
generics,
|
||||
params.into_boxed_slice(),
|
||||
return_type,
|
||||
@ -440,7 +439,6 @@ pub struct ExprCollector<'db> {
|
||||
local_def_map: &'db LocalDefMap,
|
||||
module: ModuleId,
|
||||
pub store: ExpressionStoreBuilder,
|
||||
pub(crate) source_map: ExpressionStoreSourceMap,
|
||||
|
||||
// state stuff
|
||||
// Prevent nested impl traits like `impl Foo<impl Bar>`.
|
||||
@ -551,7 +549,6 @@ impl ExprCollector<'_> {
|
||||
module,
|
||||
def_map,
|
||||
local_def_map,
|
||||
source_map: ExpressionStoreSourceMap::default(),
|
||||
store: ExpressionStoreBuilder::default(),
|
||||
expander,
|
||||
current_try_block_label: None,
|
||||
@ -698,7 +695,7 @@ impl ExprCollector<'_> {
|
||||
let id = self.collect_macro_call(mcall, macro_ptr, true, |this, expansion| {
|
||||
this.lower_type_ref_opt(expansion, impl_trait_lower_fn)
|
||||
});
|
||||
self.source_map.types_map.insert(src, id);
|
||||
self.store.types_map.insert(src, id);
|
||||
return id;
|
||||
}
|
||||
None => TypeRef::Error,
|
||||
@ -732,8 +729,8 @@ impl ExprCollector<'_> {
|
||||
fn alloc_type_ref(&mut self, type_ref: TypeRef, node: TypePtr) -> TypeRefId {
|
||||
let id = self.store.types.alloc(type_ref);
|
||||
let ptr = self.expander.in_file(node);
|
||||
self.source_map.types_map_back.insert(id, ptr);
|
||||
self.source_map.types_map.insert(ptr, id);
|
||||
self.store.types_map_back.insert(id, ptr);
|
||||
self.store.types_map.insert(ptr, id);
|
||||
id
|
||||
}
|
||||
|
||||
@ -744,8 +741,8 @@ impl ExprCollector<'_> {
|
||||
) -> LifetimeRefId {
|
||||
let id = self.store.lifetimes.alloc(lifetime_ref);
|
||||
let ptr = self.expander.in_file(node);
|
||||
self.source_map.lifetime_map_back.insert(id, ptr);
|
||||
self.source_map.lifetime_map.insert(ptr, id);
|
||||
self.store.lifetime_map_back.insert(id, ptr);
|
||||
self.store.lifetime_map.insert(ptr, id);
|
||||
id
|
||||
}
|
||||
|
||||
@ -1190,14 +1187,14 @@ impl ExprCollector<'_> {
|
||||
}
|
||||
ast::Expr::ContinueExpr(e) => {
|
||||
let label = self.resolve_label(e.lifetime()).unwrap_or_else(|e| {
|
||||
self.source_map.diagnostics.push(e);
|
||||
self.store.diagnostics.push(e);
|
||||
None
|
||||
});
|
||||
self.alloc_expr(Expr::Continue { label }, syntax_ptr)
|
||||
}
|
||||
ast::Expr::BreakExpr(e) => {
|
||||
let label = self.resolve_label(e.lifetime()).unwrap_or_else(|e| {
|
||||
self.source_map.diagnostics.push(e);
|
||||
self.store.diagnostics.push(e);
|
||||
None
|
||||
});
|
||||
let expr = e.expr().map(|e| self.collect_expr(e));
|
||||
@ -1207,7 +1204,7 @@ impl ExprCollector<'_> {
|
||||
let inner = self.collect_expr_opt(e.expr());
|
||||
// make the paren expr point to the inner expression as well for IDE resolution
|
||||
let src = self.expander.in_file(syntax_ptr);
|
||||
self.source_map.expr_map.insert(src, inner.into());
|
||||
self.store.expr_map.insert(src, inner.into());
|
||||
inner
|
||||
}
|
||||
ast::Expr::ReturnExpr(e) => {
|
||||
@ -1248,7 +1245,7 @@ impl ExprCollector<'_> {
|
||||
None => self.missing_expr(),
|
||||
};
|
||||
let src = self.expander.in_file(AstPtr::new(&field));
|
||||
self.source_map.field_map_back.insert(expr, src);
|
||||
self.store.field_map_back.insert(expr, src);
|
||||
Some(RecordLitField { name, expr })
|
||||
})
|
||||
.collect();
|
||||
@ -1271,12 +1268,10 @@ impl ExprCollector<'_> {
|
||||
ast::Expr::AwaitExpr(e) => {
|
||||
let expr = self.collect_expr_opt(e.expr());
|
||||
if let Awaitable::No(location) = self.is_lowering_awaitable_block() {
|
||||
self.source_map.diagnostics.push(
|
||||
ExpressionStoreDiagnostics::AwaitOutsideOfAsync {
|
||||
node: self.expander.in_file(AstPtr::new(&e)),
|
||||
location: location.to_string(),
|
||||
},
|
||||
);
|
||||
self.store.diagnostics.push(ExpressionStoreDiagnostics::AwaitOutsideOfAsync {
|
||||
node: self.expander.in_file(AstPtr::new(&e)),
|
||||
location: location.to_string(),
|
||||
});
|
||||
}
|
||||
self.alloc_expr(Expr::Await { expr }, syntax_ptr)
|
||||
}
|
||||
@ -1442,7 +1437,7 @@ impl ExprCollector<'_> {
|
||||
// Make the macro-call point to its expanded expression so we can query
|
||||
// semantics on syntax pointers to the macro
|
||||
let src = self.expander.in_file(syntax_ptr);
|
||||
self.source_map.expr_map.insert(src, id.into());
|
||||
self.store.expr_map.insert(src, id.into());
|
||||
id
|
||||
}
|
||||
None => self.alloc_expr(Expr::Missing, syntax_ptr),
|
||||
@ -1486,7 +1481,7 @@ impl ExprCollector<'_> {
|
||||
let expr = self.collect_expr(expr);
|
||||
// Do not use `alloc_pat_from_expr()` here, it will override the entry in `expr_map`.
|
||||
let id = self.store.pats.alloc(Pat::Expr(expr));
|
||||
self.source_map.pat_map_back.insert(id, src);
|
||||
self.store.pat_map_back.insert(id, src);
|
||||
id
|
||||
})
|
||||
}
|
||||
@ -1555,7 +1550,7 @@ impl ExprCollector<'_> {
|
||||
let id = self.collect_macro_call(e, macro_ptr, true, |this, expansion| {
|
||||
this.collect_expr_as_pat_opt(expansion)
|
||||
});
|
||||
self.source_map.expr_map.insert(src, id.into());
|
||||
self.store.expr_map.insert(src, id.into());
|
||||
id
|
||||
}
|
||||
ast::Expr::RecordExpr(e) => {
|
||||
@ -1576,7 +1571,7 @@ impl ExprCollector<'_> {
|
||||
let pat = self.collect_expr_as_pat(field_expr);
|
||||
let name = f.field_name()?.as_name();
|
||||
let src = self.expander.in_file(AstPtr::new(&f).wrap_left());
|
||||
self.source_map.pat_field_map_back.insert(pat, src);
|
||||
self.store.pat_field_map_back.insert(pat, src);
|
||||
Some(RecordFieldPat { name, pat })
|
||||
})
|
||||
.collect();
|
||||
@ -1622,7 +1617,7 @@ impl ExprCollector<'_> {
|
||||
);
|
||||
if let Either::Left(pat) = pat {
|
||||
let src = this.expander.in_file(AstPtr::new(&expr).wrap_left());
|
||||
this.source_map.pat_map_back.insert(pat, src);
|
||||
this.store.pat_map_back.insert(pat, src);
|
||||
}
|
||||
pat
|
||||
}
|
||||
@ -1968,7 +1963,7 @@ impl ExprCollector<'_> {
|
||||
self.module.krate(),
|
||||
resolver,
|
||||
&mut |ptr, call| {
|
||||
_ = self.source_map.expansions.insert(ptr.map(|(it, _)| it), call);
|
||||
_ = self.store.expansions.insert(ptr.map(|(it, _)| it), call);
|
||||
},
|
||||
)
|
||||
}
|
||||
@ -1978,19 +1973,17 @@ impl ExprCollector<'_> {
|
||||
Ok(res) => res,
|
||||
Err(UnresolvedMacro { path }) => {
|
||||
if record_diagnostics {
|
||||
self.source_map.diagnostics.push(
|
||||
ExpressionStoreDiagnostics::UnresolvedMacroCall {
|
||||
node: self.expander.in_file(syntax_ptr),
|
||||
path,
|
||||
},
|
||||
);
|
||||
self.store.diagnostics.push(ExpressionStoreDiagnostics::UnresolvedMacroCall {
|
||||
node: self.expander.in_file(syntax_ptr),
|
||||
path,
|
||||
});
|
||||
}
|
||||
return collector(self, None);
|
||||
}
|
||||
};
|
||||
if record_diagnostics {
|
||||
if let Some(err) = res.err {
|
||||
self.source_map
|
||||
self.store
|
||||
.diagnostics
|
||||
.push(ExpressionStoreDiagnostics::MacroError { node: macro_call_ptr, err });
|
||||
}
|
||||
@ -2001,7 +1994,7 @@ impl ExprCollector<'_> {
|
||||
// Keep collecting even with expansion errors so we can provide completions and
|
||||
// other services in incomplete macro expressions.
|
||||
if let Some(macro_file) = self.expander.current_file_id().macro_file() {
|
||||
self.source_map.expansions.insert(macro_call_ptr, macro_file);
|
||||
self.store.expansions.insert(macro_call_ptr, macro_file);
|
||||
}
|
||||
|
||||
if record_diagnostics {
|
||||
@ -2050,7 +2043,7 @@ impl ExprCollector<'_> {
|
||||
// Make the macro-call point to its expanded expression so we can query
|
||||
// semantics on syntax pointers to the macro
|
||||
let src = self.expander.in_file(syntax_ptr);
|
||||
self.source_map.expr_map.insert(src, tail.into());
|
||||
self.store.expr_map.insert(src, tail.into());
|
||||
})
|
||||
}
|
||||
|
||||
@ -2361,7 +2354,7 @@ impl ExprCollector<'_> {
|
||||
let pat = self.collect_pat(ast_pat, binding_list);
|
||||
let name = f.field_name()?.as_name();
|
||||
let src = self.expander.in_file(AstPtr::new(&f).wrap_right());
|
||||
self.source_map.pat_field_map_back.insert(pat, src);
|
||||
self.store.pat_field_map_back.insert(pat, src);
|
||||
Some(RecordFieldPat { name, pat })
|
||||
})
|
||||
.collect();
|
||||
@ -2424,7 +2417,7 @@ impl ExprCollector<'_> {
|
||||
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
|
||||
this.collect_pat_opt(expanded_pat, binding_list)
|
||||
});
|
||||
self.source_map.pat_map.insert(src, pat.into());
|
||||
self.store.pat_map.insert(src, pat.into());
|
||||
return pat;
|
||||
}
|
||||
None => Pat::Missing,
|
||||
@ -2515,7 +2508,7 @@ impl ExprCollector<'_> {
|
||||
}
|
||||
});
|
||||
if let Some(pat) = pat.left() {
|
||||
self.source_map.pat_map.insert(src, pat.into());
|
||||
self.store.pat_map.insert(src, pat.into());
|
||||
}
|
||||
pat
|
||||
}
|
||||
@ -2537,7 +2530,7 @@ impl ExprCollector<'_> {
|
||||
match enabled {
|
||||
Ok(()) => true,
|
||||
Err(cfg) => {
|
||||
self.source_map.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode {
|
||||
self.store.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode {
|
||||
node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())),
|
||||
cfg,
|
||||
opts: self.cfg_options.clone(),
|
||||
@ -2548,7 +2541,7 @@ impl ExprCollector<'_> {
|
||||
}
|
||||
|
||||
fn add_definition_to_binding(&mut self, binding_id: BindingId, pat_id: PatId) {
|
||||
self.source_map.binding_definitions.entry(binding_id).or_default().push(pat_id);
|
||||
self.store.binding_definitions.entry(binding_id).or_default().push(pat_id);
|
||||
}
|
||||
|
||||
// region: labels
|
||||
@ -2724,7 +2717,7 @@ impl ExprCollector<'_> {
|
||||
|name, range| {
|
||||
let expr_id = self.alloc_expr_desugared(Expr::Path(Path::from(name)));
|
||||
if let Some(range) = range {
|
||||
self.source_map
|
||||
self.store
|
||||
.template_map
|
||||
.get_or_insert_with(Default::default)
|
||||
.implicit_capture_to_source
|
||||
@ -2836,7 +2829,7 @@ impl ExprCollector<'_> {
|
||||
)
|
||||
};
|
||||
|
||||
self.source_map
|
||||
self.store
|
||||
.template_map
|
||||
.get_or_insert_with(Default::default)
|
||||
.format_args_to_captures
|
||||
@ -3386,8 +3379,8 @@ impl ExprCollector<'_> {
|
||||
fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
|
||||
let src = self.expander.in_file(ptr);
|
||||
let id = self.store.exprs.alloc(expr);
|
||||
self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
|
||||
self.source_map.expr_map.insert(src, id.into());
|
||||
self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
|
||||
self.store.expr_map.insert(src, id.into());
|
||||
id
|
||||
}
|
||||
// FIXME: desugared exprs don't have ptr, that's wrong and should be fixed.
|
||||
@ -3398,9 +3391,9 @@ impl ExprCollector<'_> {
|
||||
fn alloc_expr_desugared_with_ptr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
|
||||
let src = self.expander.in_file(ptr);
|
||||
let id = self.store.exprs.alloc(expr);
|
||||
self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
|
||||
self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
|
||||
// We intentionally don't fill this as it could overwrite a non-desugared entry
|
||||
// self.source_map.expr_map.insert(src, id);
|
||||
// self.store.expr_map.insert(src, id);
|
||||
id
|
||||
}
|
||||
fn missing_expr(&mut self) -> ExprId {
|
||||
@ -3423,24 +3416,24 @@ impl ExprCollector<'_> {
|
||||
fn alloc_pat_from_expr(&mut self, pat: Pat, ptr: ExprPtr) -> PatId {
|
||||
let src = self.expander.in_file(ptr);
|
||||
let id = self.store.pats.alloc(pat);
|
||||
self.source_map.expr_map.insert(src, id.into());
|
||||
self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_left));
|
||||
self.store.expr_map.insert(src, id.into());
|
||||
self.store.pat_map_back.insert(id, src.map(AstPtr::wrap_left));
|
||||
id
|
||||
}
|
||||
|
||||
fn alloc_expr_from_pat(&mut self, expr: Expr, ptr: PatPtr) -> ExprId {
|
||||
let src = self.expander.in_file(ptr);
|
||||
let id = self.store.exprs.alloc(expr);
|
||||
self.source_map.pat_map.insert(src, id.into());
|
||||
self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_right));
|
||||
self.store.pat_map.insert(src, id.into());
|
||||
self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_right));
|
||||
id
|
||||
}
|
||||
|
||||
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
|
||||
let src = self.expander.in_file(ptr);
|
||||
let id = self.store.pats.alloc(pat);
|
||||
self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_right));
|
||||
self.source_map.pat_map.insert(src, id.into());
|
||||
self.store.pat_map_back.insert(id, src.map(AstPtr::wrap_right));
|
||||
self.store.pat_map.insert(src, id.into());
|
||||
id
|
||||
}
|
||||
// FIXME: desugared pats don't have ptr, that's wrong and should be fixed somehow.
|
||||
@ -3454,8 +3447,8 @@ impl ExprCollector<'_> {
|
||||
fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId {
|
||||
let src = self.expander.in_file(ptr);
|
||||
let id = self.store.labels.alloc(label);
|
||||
self.source_map.label_map_back.insert(id, src);
|
||||
self.source_map.label_map.insert(src, id);
|
||||
self.store.label_map_back.insert(id, src);
|
||||
self.store.label_map.insert(src, id);
|
||||
id
|
||||
}
|
||||
// FIXME: desugared labels don't have ptr, that's wrong and should be fixed somehow.
|
||||
|
@ -273,7 +273,7 @@ impl ExprCollector<'_> {
|
||||
Expr::InlineAsm(InlineAsm { operands: operands.into_boxed_slice(), options }),
|
||||
syntax_ptr,
|
||||
);
|
||||
self.source_map
|
||||
self.store
|
||||
.template_map
|
||||
.get_or_insert_with(Default::default)
|
||||
.asm_to_captures
|
||||
|
@ -23,7 +23,7 @@ fn lower_path(path: ast::Path) -> (TestDB, ExpressionStore, Option<Path>) {
|
||||
let mut ctx =
|
||||
ExprCollector::new(&db, crate_def_map(&db, krate).root_module_id(), file_id.into());
|
||||
let lowered_path = ctx.lower_path(path, &mut ExprCollector::impl_trait_allocator);
|
||||
let store = ctx.store.finish();
|
||||
let (store, _) = ctx.store.finish();
|
||||
(db, store, lowered_path)
|
||||
}
|
||||
|
||||
|
@ -902,7 +902,7 @@ impl Printer<'_> {
|
||||
let mut same_name = false;
|
||||
if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] {
|
||||
if let Binding { name, mode: BindingAnnotation::Unannotated, .. } =
|
||||
&self.store.bindings[*id]
|
||||
&self.store.assert_expr_only().bindings[*id]
|
||||
{
|
||||
if name.as_str() == field_name {
|
||||
same_name = true;
|
||||
@ -1063,7 +1063,7 @@ impl Printer<'_> {
|
||||
}
|
||||
|
||||
fn print_binding(&mut self, id: BindingId) {
|
||||
let Binding { name, mode, .. } = &self.store.bindings[id];
|
||||
let Binding { name, mode, .. } = &self.store.assert_expr_only().bindings[id];
|
||||
let mode = match mode {
|
||||
BindingAnnotation::Unannotated => "",
|
||||
BindingAnnotation::Mutable => "mut ",
|
||||
|
@ -106,7 +106,9 @@ impl ExprScopes {
|
||||
let mut scopes = ExprScopes {
|
||||
scopes: Arena::default(),
|
||||
scope_entries: Arena::default(),
|
||||
scope_by_expr: ArenaMap::with_capacity(body.exprs.len()),
|
||||
scope_by_expr: ArenaMap::with_capacity(
|
||||
body.expr_only.as_ref().map_or(0, |it| it.exprs.len()),
|
||||
),
|
||||
};
|
||||
let mut root = scopes.root_scope();
|
||||
if let Some(self_param) = body.self_param {
|
||||
@ -179,7 +181,7 @@ impl ExprScopes {
|
||||
binding: BindingId,
|
||||
hygiene: HygieneId,
|
||||
) {
|
||||
let Binding { name, .. } = &store.bindings[binding];
|
||||
let Binding { name, .. } = &store[binding];
|
||||
let entry = self.scope_entries.alloc(ScopeEntry { name: name.clone(), binding, hygiene });
|
||||
self.scopes[scope].entries =
|
||||
IdxRange::new_inclusive(self.scopes[scope].entries.start()..=entry);
|
||||
@ -251,7 +253,7 @@ fn compute_expr_scopes(
|
||||
scope: &mut ScopeId,
|
||||
) {
|
||||
let make_label =
|
||||
|label: &Option<LabelId>| label.map(|label| (label, store.labels[label].name.clone()));
|
||||
|label: &Option<LabelId>| label.map(|label| (label, store[label].name.clone()));
|
||||
|
||||
let compute_expr_scopes = |scopes: &mut ExprScopes, expr: ExprId, scope: &mut ScopeId| {
|
||||
compute_expr_scopes(expr, store, scopes, scope)
|
||||
@ -534,9 +536,8 @@ fn foo() {
|
||||
};
|
||||
|
||||
let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
|
||||
let pat_src = source_map
|
||||
.pat_syntax(*source_map.binding_definitions[resolved.binding()].first().unwrap())
|
||||
.unwrap();
|
||||
let pat_src =
|
||||
source_map.pat_syntax(source_map.patterns_for_binding(resolved.binding())[0]).unwrap();
|
||||
|
||||
let local_name = pat_src.value.syntax_node_ptr().to_node(file.syntax());
|
||||
assert_eq!(local_name.text_range(), expected_name.syntax().text_range());
|
||||
|
@ -508,9 +508,9 @@ fn f() {
|
||||
}
|
||||
"#,
|
||||
);
|
||||
assert_eq!(body.bindings.len(), 1, "should have a binding for `B`");
|
||||
assert_eq!(body.assert_expr_only().bindings.len(), 1, "should have a binding for `B`");
|
||||
assert_eq!(
|
||||
body.bindings[BindingId::from_raw(RawIdx::from_u32(0))].name.as_str(),
|
||||
body[BindingId::from_raw(RawIdx::from_u32(0))].name.as_str(),
|
||||
"B",
|
||||
"should have a binding for `B`",
|
||||
);
|
||||
@ -566,6 +566,7 @@ const fn f(x: i32) -> i32 {
|
||||
);
|
||||
|
||||
let mtch_arms = body
|
||||
.assert_expr_only()
|
||||
.exprs
|
||||
.iter()
|
||||
.find_map(|(_, expr)| {
|
||||
@ -578,10 +579,10 @@ const fn f(x: i32) -> i32 {
|
||||
.unwrap();
|
||||
|
||||
let MatchArm { pat, .. } = mtch_arms[1];
|
||||
match body.pats[pat] {
|
||||
match body[pat] {
|
||||
Pat::Range { start, end } => {
|
||||
let hir_start = &body.exprs[start.unwrap()];
|
||||
let hir_end = &body.exprs[end.unwrap()];
|
||||
let hir_start = &body[start.unwrap()];
|
||||
let hir_end = &body[end.unwrap()];
|
||||
|
||||
assert!(matches!(hir_start, Expr::Path { .. }));
|
||||
assert!(matches!(hir_end, Expr::Path { .. }));
|
||||
|
@ -779,14 +779,10 @@ impl VariantFields {
|
||||
Arc::new(VariantFields { fields, store: Arc::new(store), shape }),
|
||||
Arc::new(source_map),
|
||||
),
|
||||
None => (
|
||||
Arc::new(VariantFields {
|
||||
fields: Arena::default(),
|
||||
store: ExpressionStore::empty_singleton(),
|
||||
shape,
|
||||
}),
|
||||
ExpressionStoreSourceMap::empty_singleton(),
|
||||
),
|
||||
None => {
|
||||
let (store, source_map) = ExpressionStore::empty_singleton();
|
||||
(Arc::new(VariantFields { fields: Arena::default(), store, shape }), source_map)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -878,7 +874,7 @@ fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>(
|
||||
idx += 1;
|
||||
}
|
||||
Err(cfg) => {
|
||||
col.source_map.diagnostics.push(
|
||||
col.store.diagnostics.push(
|
||||
crate::expr_store::ExpressionStoreDiagnostics::InactiveCode {
|
||||
node: InFile::new(fields.file_id, SyntaxNodePtr::new(field.syntax())),
|
||||
cfg,
|
||||
@ -891,9 +887,9 @@ fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>(
|
||||
if !has_fields {
|
||||
return None;
|
||||
}
|
||||
let store = col.store.finish();
|
||||
let (store, source_map) = col.store.finish();
|
||||
arena.shrink_to_fit();
|
||||
Some((arena, store, col.source_map))
|
||||
Some((arena, store, source_map))
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
@ -980,7 +976,7 @@ impl EnumVariants {
|
||||
if !matches!(variant.shape, FieldsShape::Unit) {
|
||||
let body = db.body(v.into());
|
||||
// A variant with explicit discriminant
|
||||
if body.exprs[body.body_expr] != crate::hir::Expr::Missing {
|
||||
if !matches!(body[body.body_expr], crate::hir::Expr::Missing) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -281,7 +281,7 @@ pub(crate) fn const_eval_discriminant_variant(
|
||||
let def = variant_id.into();
|
||||
let body = db.body(def);
|
||||
let loc = variant_id.lookup(db);
|
||||
if body.exprs[body.body_expr] == Expr::Missing {
|
||||
if matches!(body[body.body_expr], Expr::Missing) {
|
||||
let prev_idx = loc.index.checked_sub(1);
|
||||
let value = match prev_idx {
|
||||
Some(prev_idx) => {
|
||||
@ -334,7 +334,7 @@ pub(crate) fn eval_to_const(
|
||||
// Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic.
|
||||
return unknown_const(infer[expr].clone());
|
||||
}
|
||||
if let Expr::Path(p) = &ctx.body.exprs[expr] {
|
||||
if let Expr::Path(p) = &ctx.body[expr] {
|
||||
let resolver = &ctx.resolver;
|
||||
if let Some(c) =
|
||||
path_to_const(db, resolver, p, mode, || ctx.generics(), debruijn, infer[expr].clone())
|
||||
|
@ -226,11 +226,10 @@ impl<'a> DeclValidator<'a> {
|
||||
let body = self.db.body(func.into());
|
||||
let edition = self.edition(func);
|
||||
let mut pats_replacements = body
|
||||
.pats
|
||||
.iter()
|
||||
.pats()
|
||||
.filter_map(|(pat_id, pat)| match pat {
|
||||
Pat::Bind { id, .. } => {
|
||||
let bind_name = &body.bindings[*id].name;
|
||||
let bind_name = &body[*id].name;
|
||||
let mut suggested_text = to_lower_snake_case(bind_name.as_str())?;
|
||||
if is_raw_identifier(&suggested_text, edition) {
|
||||
suggested_text.insert_str(0, "r#");
|
||||
|
@ -101,7 +101,7 @@ impl ExprValidator {
|
||||
self.check_for_trailing_return(body.body_expr, &body);
|
||||
}
|
||||
|
||||
for (id, expr) in body.exprs.iter() {
|
||||
for (id, expr) in body.exprs() {
|
||||
if let Some((variant, missed_fields, true)) =
|
||||
record_literal_missing_fields(db, &self.infer, id, expr)
|
||||
{
|
||||
@ -132,7 +132,7 @@ impl ExprValidator {
|
||||
}
|
||||
}
|
||||
|
||||
for (id, pat) in body.pats.iter() {
|
||||
for (id, pat) in body.pats() {
|
||||
if let Some((variant, missed_fields, true)) =
|
||||
record_pattern_missing_fields(db, &self.infer, id, pat)
|
||||
{
|
||||
@ -389,7 +389,7 @@ impl ExprValidator {
|
||||
if !self.validate_lints {
|
||||
return;
|
||||
}
|
||||
match &body.exprs[body_expr] {
|
||||
match &body[body_expr] {
|
||||
Expr::Block { statements, tail, .. } => {
|
||||
let last_stmt = tail.or_else(|| match statements.last()? {
|
||||
Statement::Expr { expr, .. } => Some(*expr),
|
||||
@ -428,7 +428,7 @@ impl ExprValidator {
|
||||
if else_branch.is_none() {
|
||||
return;
|
||||
}
|
||||
if let Expr::Block { statements, tail, .. } = &self.body.exprs[*then_branch] {
|
||||
if let Expr::Block { statements, tail, .. } = &self.body[*then_branch] {
|
||||
let last_then_expr = tail.or_else(|| match statements.last()? {
|
||||
Statement::Expr { expr, .. } => Some(*expr),
|
||||
_ => None,
|
||||
|
@ -151,7 +151,7 @@ impl<'a> PatCtxt<'a> {
|
||||
hir_def::hir::Pat::Bind { id, subpat, .. } => {
|
||||
let bm = self.infer.binding_modes[pat];
|
||||
ty = &self.infer[id];
|
||||
let name = &self.body.bindings[id].name;
|
||||
let name = &self.body[id].name;
|
||||
match (bm, ty.kind(Interner)) {
|
||||
(BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
|
||||
(BindingMode::Ref(_), _) => {
|
||||
|
@ -217,7 +217,7 @@ impl<'db> UnsafeVisitor<'db> {
|
||||
}
|
||||
|
||||
fn walk_pat(&mut self, current: PatId) {
|
||||
let pat = &self.body.pats[current];
|
||||
let pat = &self.body[current];
|
||||
|
||||
if self.inside_union_destructure {
|
||||
match pat {
|
||||
@ -264,7 +264,7 @@ impl<'db> UnsafeVisitor<'db> {
|
||||
}
|
||||
|
||||
fn walk_expr(&mut self, current: ExprId) {
|
||||
let expr = &self.body.exprs[current];
|
||||
let expr = &self.body[current];
|
||||
let inside_assignment = mem::replace(&mut self.inside_assignment, false);
|
||||
match expr {
|
||||
&Expr::Call { callee, .. } => {
|
||||
@ -284,7 +284,7 @@ impl<'db> UnsafeVisitor<'db> {
|
||||
self.resolver.reset_to_guard(guard);
|
||||
}
|
||||
Expr::Ref { expr, rawness: Rawness::RawPtr, mutability: _ } => {
|
||||
match self.body.exprs[*expr] {
|
||||
match self.body[*expr] {
|
||||
// Do not report unsafe for `addr_of[_mut]!(EXTERN_OR_MUT_STATIC)`,
|
||||
// see https://github.com/rust-lang/rust/pull/125834.
|
||||
Expr::Path(_) => return,
|
||||
|
@ -273,7 +273,7 @@ impl InferenceContext<'_> {
|
||||
fn pat_bound_mutability(&self, pat: PatId) -> Mutability {
|
||||
let mut r = Mutability::Not;
|
||||
self.body.walk_bindings_in_pat(pat, |b| {
|
||||
if self.body.bindings[b].mode == BindingAnnotation::RefMut {
|
||||
if self.body[b].mode == BindingAnnotation::RefMut {
|
||||
r = Mutability::Mut;
|
||||
}
|
||||
});
|
||||
|
@ -459,7 +459,7 @@ impl InferenceContext<'_> {
|
||||
expected: &Ty,
|
||||
decl: Option<DeclContext>,
|
||||
) -> Ty {
|
||||
let Binding { mode, .. } = self.body.bindings[binding];
|
||||
let Binding { mode, .. } = self.body[binding];
|
||||
let mode = if mode == BindingAnnotation::Unannotated {
|
||||
default_bm
|
||||
} else {
|
||||
@ -639,7 +639,7 @@ impl InferenceContext<'_> {
|
||||
pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool {
|
||||
let mut res = false;
|
||||
body.walk_pats(pat_id, &mut |pat| {
|
||||
res |= matches!(body[pat], Pat::Bind { id, .. } if body.bindings[id].mode == BindingAnnotation::Ref);
|
||||
res |= matches!(body[pat], Pat::Bind { id, .. } if body[id].mode == BindingAnnotation::Ref);
|
||||
});
|
||||
res
|
||||
}
|
||||
|
@ -119,8 +119,7 @@ fn eval_expr(
|
||||
.unwrap();
|
||||
let hir_body = db.body(function_id.into());
|
||||
let b = hir_body
|
||||
.bindings
|
||||
.iter()
|
||||
.bindings()
|
||||
.find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal")
|
||||
.unwrap()
|
||||
.0;
|
||||
|
@ -1212,10 +1212,9 @@ impl MirSpan {
|
||||
match *self {
|
||||
MirSpan::ExprId(expr) => matches!(body[expr], Expr::Ref { .. }),
|
||||
// FIXME: Figure out if this is correct wrt. match ergonomics.
|
||||
MirSpan::BindingId(binding) => matches!(
|
||||
body.bindings[binding].mode,
|
||||
BindingAnnotation::Ref | BindingAnnotation::RefMut
|
||||
),
|
||||
MirSpan::BindingId(binding) => {
|
||||
matches!(body[binding].mode, BindingAnnotation::Ref | BindingAnnotation::RefMut)
|
||||
}
|
||||
MirSpan::PatId(_) | MirSpan::SelfParam | MirSpan::Unknown => false,
|
||||
}
|
||||
}
|
||||
|
@ -321,7 +321,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
current: BasicBlockId,
|
||||
) -> Result<Option<(Operand, BasicBlockId)>> {
|
||||
if !self.has_adjustments(expr_id) {
|
||||
if let Expr::Literal(l) = &self.body.exprs[expr_id] {
|
||||
if let Expr::Literal(l) = &self.body[expr_id] {
|
||||
let ty = self.expr_ty_without_adjust(expr_id);
|
||||
return Ok(Some((self.lower_literal_to_operand(ty, l)?, current)));
|
||||
}
|
||||
@ -411,7 +411,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
place: Place,
|
||||
mut current: BasicBlockId,
|
||||
) -> Result<Option<BasicBlockId>> {
|
||||
match &self.body.exprs[expr_id] {
|
||||
match &self.body[expr_id] {
|
||||
Expr::OffsetOf(_) => {
|
||||
not_supported!("builtin#offset_of")
|
||||
}
|
||||
@ -1374,7 +1374,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
}
|
||||
|
||||
fn lower_literal_or_const_to_operand(&mut self, ty: Ty, loc: &ExprId) -> Result<Operand> {
|
||||
match &self.body.exprs[*loc] {
|
||||
match &self.body[*loc] {
|
||||
Expr::Literal(l) => self.lower_literal_to_operand(ty, l),
|
||||
Expr::Path(c) => {
|
||||
let owner = self.owner;
|
||||
@ -1850,7 +1850,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
self.drop_scopes.last_mut().unwrap().locals.push(local_id);
|
||||
if let Pat::Bind { id, subpat: None } = self.body[it] {
|
||||
if matches!(
|
||||
self.body.bindings[id].mode,
|
||||
self.body[id].mode,
|
||||
BindingAnnotation::Unannotated | BindingAnnotation::Mutable
|
||||
) {
|
||||
self.result.binding_locals.insert(id, local_id);
|
||||
@ -1859,7 +1859,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
local_id
|
||||
}));
|
||||
// and then rest of bindings
|
||||
for (id, _) in self.body.bindings.iter() {
|
||||
for (id, _) in self.body.bindings() {
|
||||
if !pick_binding(id) {
|
||||
continue;
|
||||
}
|
||||
@ -2126,7 +2126,7 @@ pub fn mir_body_for_closure_query(
|
||||
.result
|
||||
.binding_locals
|
||||
.into_iter()
|
||||
.filter(|it| ctx.body.binding_owners.get(&it.0).copied() == Some(expr))
|
||||
.filter(|it| ctx.body.binding_owner(it.0) == Some(expr))
|
||||
.collect();
|
||||
if let Some(err) = err {
|
||||
return Err(MirLowerError::UnresolvedUpvar(err));
|
||||
@ -2191,7 +2191,7 @@ pub fn lower_to_mir(
|
||||
// 0 is return local
|
||||
ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr) });
|
||||
let binding_picker = |b: BindingId| {
|
||||
let owner = ctx.body.binding_owners.get(&b).copied();
|
||||
let owner = ctx.body.binding_owner(b);
|
||||
if root_expr == body.body_expr { owner.is_none() } else { owner == Some(root_expr) }
|
||||
};
|
||||
// 1 to param_len is for params
|
||||
|
@ -133,7 +133,7 @@ impl MirLowerCtx<'_> {
|
||||
}
|
||||
this.lower_expr_to_some_place_without_adjust(expr_id, current)
|
||||
};
|
||||
match &self.body.exprs[expr_id] {
|
||||
match &self.body[expr_id] {
|
||||
Expr::Path(p) => {
|
||||
let resolver_guard =
|
||||
self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
|
||||
|
@ -130,7 +130,7 @@ impl MirLowerCtx<'_> {
|
||||
.collect::<Vec<_>>()
|
||||
.into(),
|
||||
);
|
||||
Ok(match &self.body.pats[pattern] {
|
||||
Ok(match &self.body[pattern] {
|
||||
Pat::Missing => return Err(MirLowerError::IncompletePattern),
|
||||
Pat::Wild => (current, current_else),
|
||||
Pat::Tuple { args, ellipsis } => {
|
||||
@ -436,7 +436,7 @@ impl MirLowerCtx<'_> {
|
||||
(next, Some(else_target))
|
||||
}
|
||||
},
|
||||
Pat::Lit(l) => match &self.body.exprs[*l] {
|
||||
Pat::Lit(l) => match &self.body[*l] {
|
||||
Expr::Literal(l) => {
|
||||
if mode == MatchingMode::Check {
|
||||
let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?;
|
||||
|
@ -219,7 +219,7 @@ impl<'a> MirPrettyCtx<'a> {
|
||||
|
||||
fn local_name(&self, local: LocalId) -> LocalName {
|
||||
match self.local_to_binding.get(local) {
|
||||
Some(b) => LocalName::Binding(self.hir_body.bindings[*b].name.clone(), local),
|
||||
Some(b) => LocalName::Binding(self.hir_body[*b].name.clone(), local),
|
||||
None => LocalName::Unknown(local),
|
||||
}
|
||||
}
|
||||
|
@ -168,7 +168,7 @@ fn check_impl(
|
||||
let inference_result = db.infer(def);
|
||||
|
||||
for (pat, mut ty) in inference_result.type_of_pat.iter() {
|
||||
if let Pat::Bind { id, .. } = body.pats[pat] {
|
||||
if let Pat::Bind { id, .. } = body[pat] {
|
||||
ty = &inference_result.type_of_binding[id];
|
||||
}
|
||||
let node = match pat_node(&body_source_map, pat, &db) {
|
||||
@ -316,7 +316,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
|
||||
}
|
||||
|
||||
for (pat, mut ty) in inference_result.type_of_pat.iter() {
|
||||
if let Pat::Bind { id, .. } = body.pats[pat] {
|
||||
if let Pat::Bind { id, .. } = body[pat] {
|
||||
ty = &inference_result.type_of_binding[id];
|
||||
}
|
||||
let node = match body_source_map.pat_syntax(pat) {
|
||||
|
@ -2034,7 +2034,7 @@ impl DefWithBody {
|
||||
)
|
||||
}
|
||||
let mol = &borrowck_result.mutability_of_locals;
|
||||
for (binding_id, binding_data) in body.bindings.iter() {
|
||||
for (binding_id, binding_data) in body.bindings() {
|
||||
if binding_data.problems.is_some() {
|
||||
// We should report specific diagnostics for these problems, not `need-mut` and `unused-mut`.
|
||||
continue;
|
||||
|
@ -677,8 +677,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||
pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &Name) -> Vec<Local> {
|
||||
let body = self.db.body(to_be_renamed.parent);
|
||||
let resolver = to_be_renamed.parent.resolver(self.db);
|
||||
let starting_expr =
|
||||
body.binding_owners.get(&to_be_renamed.binding_id).copied().unwrap_or(body.body_expr);
|
||||
let starting_expr = body.binding_owner(to_be_renamed.binding_id).unwrap_or(body.body_expr);
|
||||
let mut visitor = RenameConflictsVisitor {
|
||||
body: &body,
|
||||
conflicts: FxHashSet::default(),
|
||||
|
@ -242,11 +242,7 @@ impl<'db> SourceAnalyzer<'db> {
|
||||
|
||||
fn binding_id_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingId> {
|
||||
let pat_id = self.pat_id(&pat.clone().into())?;
|
||||
if let Pat::Bind { id, .. } = self.store()?.pats[pat_id.as_pat()?] {
|
||||
Some(id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
if let Pat::Bind { id, .. } = self.store()?[pat_id.as_pat()?] { Some(id) } else { None }
|
||||
}
|
||||
|
||||
pub(crate) fn expr_adjustments(&self, expr: &ast::Expr) -> Option<&[Adjustment]> {
|
||||
@ -997,7 +993,7 @@ impl<'db> SourceAnalyzer<'db> {
|
||||
let parent_hir_path = path
|
||||
.parent_path()
|
||||
.and_then(|p| collector.lower_path(p, &mut ExprCollector::impl_trait_error_allocator));
|
||||
let store = collector.store.finish();
|
||||
let (store, _) = collector.store.finish();
|
||||
|
||||
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
|
||||
// trying to resolve foo::bar.
|
||||
@ -1206,7 +1202,7 @@ impl<'db> SourceAnalyzer<'db> {
|
||||
let mut collector = ExprCollector::new(db, self.resolver.module(), self.file_id);
|
||||
let hir_path =
|
||||
collector.lower_path(path.clone(), &mut ExprCollector::impl_trait_error_allocator)?;
|
||||
let store = collector.store.finish();
|
||||
let (store, _) = collector.store.finish();
|
||||
Some(resolve_hir_path_(
|
||||
db,
|
||||
&self.resolver,
|
||||
|
@ -92,7 +92,7 @@ pub(super) fn hints(
|
||||
},
|
||||
MirSpan::Unknown => continue,
|
||||
};
|
||||
let binding = &hir.bindings[binding_idx];
|
||||
let binding = &hir[binding_idx];
|
||||
let name = binding.name.display_no_db(display_target.edition).to_smolstr();
|
||||
if name.starts_with("<ra@") {
|
||||
continue; // Ignore desugared variables
|
||||
|
@ -796,7 +796,7 @@ impl flags::AnalysisStats {
|
||||
// region:expressions
|
||||
let (previous_exprs, previous_unknown, previous_partially_unknown) =
|
||||
(num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
|
||||
for (expr_id, _) in body.exprs.iter() {
|
||||
for (expr_id, _) in body.exprs() {
|
||||
let ty = &inference_result[expr_id];
|
||||
num_exprs += 1;
|
||||
let unknown_or_partial = if ty.is_unknown() {
|
||||
@ -901,7 +901,7 @@ impl flags::AnalysisStats {
|
||||
// region:patterns
|
||||
let (previous_pats, previous_unknown, previous_partially_unknown) =
|
||||
(num_pats, num_pats_unknown, num_pats_partially_unknown);
|
||||
for (pat_id, _) in body.pats.iter() {
|
||||
for (pat_id, _) in body.pats() {
|
||||
let ty = &inference_result[pat_id];
|
||||
num_pats += 1;
|
||||
let unknown_or_partial = if ty.is_unknown() {
|
||||
|
Loading…
x
Reference in New Issue
Block a user