mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 11:20:54 +00:00
Merge pull request #19688 from ChayimFriedman2/less-unused
internal: More `shrink_to_fit()` and upgrade dashmap and hashbrown
This commit is contained in:
commit
71a3888d07
19
Cargo.lock
generated
19
Cargo.lock
generated
@ -79,7 +79,7 @@ name = "base-db"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"cfg",
|
||||
"dashmap 5.5.3",
|
||||
"dashmap",
|
||||
"intern",
|
||||
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"query-group-macro",
|
||||
@ -323,19 +323,6 @@ dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dashmap"
|
||||
version = "5.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"hashbrown 0.14.5",
|
||||
"lock_api",
|
||||
"once_cell",
|
||||
"parking_lot_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dashmap"
|
||||
version = "6.1.0"
|
||||
@ -1051,7 +1038,7 @@ dependencies = [
|
||||
name = "intern"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"dashmap 5.5.3",
|
||||
"dashmap",
|
||||
"hashbrown 0.14.5",
|
||||
"rustc-hash 2.1.1",
|
||||
"triomphe",
|
||||
@ -2045,7 +2032,7 @@ checksum = "1be22155f8d9732518b2db2bf379fe6f0b2375e76b08b7c8fe6c1b887d548c24"
|
||||
dependencies = [
|
||||
"boxcar",
|
||||
"crossbeam-queue",
|
||||
"dashmap 6.1.0",
|
||||
"dashmap",
|
||||
"hashbrown 0.15.2",
|
||||
"hashlink",
|
||||
"indexmap",
|
||||
|
@ -158,11 +158,10 @@ triomphe = { version = "0.1.14", default-features = false, features = ["std"] }
|
||||
url = "2.5.4"
|
||||
xshell = "0.2.7"
|
||||
|
||||
|
||||
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
|
||||
dashmap = { version = "=5.5.3", features = ["raw-api"] }
|
||||
dashmap = { version = "=6.1.0", features = ["raw-api", "inline"] }
|
||||
# We need to freeze the version of the crate, as it needs to match with dashmap
|
||||
hashbrown = { version = "=0.14.5", features = [
|
||||
hashbrown = { version = "0.14.0", features = [
|
||||
"inline-more",
|
||||
], default-features = false }
|
||||
|
||||
|
@ -422,8 +422,8 @@ impl CrateGraphBuilder {
|
||||
edition: Edition,
|
||||
display_name: Option<CrateDisplayName>,
|
||||
version: Option<String>,
|
||||
cfg_options: CfgOptions,
|
||||
potential_cfg_options: Option<CfgOptions>,
|
||||
mut cfg_options: CfgOptions,
|
||||
mut potential_cfg_options: Option<CfgOptions>,
|
||||
mut env: Env,
|
||||
origin: CrateOrigin,
|
||||
is_proc_macro: bool,
|
||||
@ -431,6 +431,10 @@ impl CrateGraphBuilder {
|
||||
ws_data: Arc<CrateWorkspaceData>,
|
||||
) -> CrateBuilderId {
|
||||
env.entries.shrink_to_fit();
|
||||
cfg_options.shrink_to_fit();
|
||||
if let Some(potential_cfg_options) = &mut potential_cfg_options {
|
||||
potential_cfg_options.shrink_to_fit();
|
||||
}
|
||||
self.arena.alloc(CrateBuilder {
|
||||
basic: CrateData {
|
||||
root_file_id,
|
||||
|
@ -110,6 +110,11 @@ impl CfgOptions {
|
||||
enabled.sort_unstable();
|
||||
HashableCfgOptions { _enabled: enabled }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn shrink_to_fit(&mut self) {
|
||||
self.enabled.shrink_to_fit();
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<CfgAtom> for CfgOptions {
|
||||
|
@ -223,7 +223,10 @@ impl ItemTree {
|
||||
}
|
||||
|
||||
fn shrink_to_fit(&mut self) {
|
||||
if let Some(data) = &mut self.data {
|
||||
let ItemTree { top_level, attrs, data } = self;
|
||||
top_level.shrink_to_fit();
|
||||
attrs.shrink_to_fit();
|
||||
if let Some(data) = data {
|
||||
let ItemTreeData {
|
||||
uses,
|
||||
extern_crates,
|
||||
|
@ -493,7 +493,7 @@ pub struct InferenceResult {
|
||||
/// ```
|
||||
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
|
||||
pub binding_modes: ArenaMap<PatId, BindingMode>,
|
||||
pub expr_adjustments: FxHashMap<ExprId, Vec<Adjustment>>,
|
||||
pub expr_adjustments: FxHashMap<ExprId, Box<[Adjustment]>>,
|
||||
pub(crate) closure_info: FxHashMap<ClosureId, (Vec<CapturedItem>, FnTrait)>,
|
||||
// FIXME: remove this field
|
||||
pub mutated_bindings_in_closure: FxHashSet<BindingId>,
|
||||
@ -785,8 +785,8 @@ impl<'a> InferenceContext<'a> {
|
||||
// Comment from rustc:
|
||||
// Even though coercion casts provide type hints, we check casts after fallback for
|
||||
// backwards compatibility. This makes fallback a stronger type hint than a cast coercion.
|
||||
let mut apply_adjustments = |expr, adj| {
|
||||
expr_adjustments.insert(expr, adj);
|
||||
let mut apply_adjustments = |expr, adj: Vec<_>| {
|
||||
expr_adjustments.insert(expr, adj.into_boxed_slice());
|
||||
};
|
||||
let mut set_coercion_cast = |expr| {
|
||||
coercion_casts.insert(expr);
|
||||
@ -808,22 +808,27 @@ impl<'a> InferenceContext<'a> {
|
||||
*ty = table.resolve_completely(ty.clone());
|
||||
*has_errors = *has_errors || ty.contains_unknown();
|
||||
}
|
||||
type_of_expr.shrink_to_fit();
|
||||
for ty in type_of_pat.values_mut() {
|
||||
*ty = table.resolve_completely(ty.clone());
|
||||
*has_errors = *has_errors || ty.contains_unknown();
|
||||
}
|
||||
type_of_pat.shrink_to_fit();
|
||||
for ty in type_of_binding.values_mut() {
|
||||
*ty = table.resolve_completely(ty.clone());
|
||||
*has_errors = *has_errors || ty.contains_unknown();
|
||||
}
|
||||
type_of_binding.shrink_to_fit();
|
||||
for ty in type_of_rpit.values_mut() {
|
||||
*ty = table.resolve_completely(ty.clone());
|
||||
*has_errors = *has_errors || ty.contains_unknown();
|
||||
}
|
||||
type_of_rpit.shrink_to_fit();
|
||||
for ty in type_of_for_iterator.values_mut() {
|
||||
*ty = table.resolve_completely(ty.clone());
|
||||
*has_errors = *has_errors || ty.contains_unknown();
|
||||
}
|
||||
type_of_for_iterator.shrink_to_fit();
|
||||
|
||||
*has_errors |= !type_mismatches.is_empty();
|
||||
|
||||
@ -838,6 +843,7 @@ impl<'a> InferenceContext<'a> {
|
||||
)
|
||||
.is_ok()
|
||||
});
|
||||
type_mismatches.shrink_to_fit();
|
||||
diagnostics.retain_mut(|diagnostic| {
|
||||
use InferenceDiagnostic::*;
|
||||
match diagnostic {
|
||||
@ -866,24 +872,29 @@ impl<'a> InferenceContext<'a> {
|
||||
}
|
||||
true
|
||||
});
|
||||
diagnostics.shrink_to_fit();
|
||||
for (_, subst) in method_resolutions.values_mut() {
|
||||
*subst = table.resolve_completely(subst.clone());
|
||||
*has_errors =
|
||||
*has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
|
||||
}
|
||||
method_resolutions.shrink_to_fit();
|
||||
for (_, subst) in assoc_resolutions.values_mut() {
|
||||
*subst = table.resolve_completely(subst.clone());
|
||||
*has_errors =
|
||||
*has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
|
||||
}
|
||||
assoc_resolutions.shrink_to_fit();
|
||||
for adjustment in expr_adjustments.values_mut().flatten() {
|
||||
adjustment.target = table.resolve_completely(adjustment.target.clone());
|
||||
*has_errors = *has_errors || adjustment.target.contains_unknown();
|
||||
}
|
||||
expr_adjustments.shrink_to_fit();
|
||||
for adjustment in pat_adjustments.values_mut().flatten() {
|
||||
*adjustment = table.resolve_completely(adjustment.clone());
|
||||
*has_errors = *has_errors || adjustment.contains_unknown();
|
||||
}
|
||||
pat_adjustments.shrink_to_fit();
|
||||
result.tuple_field_access_types = tuple_field_accesses_rev
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
@ -893,6 +904,7 @@ impl<'a> InferenceContext<'a> {
|
||||
*has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
|
||||
})
|
||||
.collect();
|
||||
result.tuple_field_access_types.shrink_to_fit();
|
||||
|
||||
result.diagnostics = diagnostics;
|
||||
|
||||
@ -1261,7 +1273,7 @@ impl<'a> InferenceContext<'a> {
|
||||
self.result.type_of_expr.insert(expr, ty);
|
||||
}
|
||||
|
||||
fn write_expr_adj(&mut self, expr: ExprId, adjustments: Vec<Adjustment>) {
|
||||
fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment]>) {
|
||||
if adjustments.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
@ -869,8 +869,8 @@ impl CapturedItemWithoutTy {
|
||||
impl InferenceContext<'_> {
|
||||
fn place_of_expr(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
|
||||
let r = self.place_of_expr_without_adjust(tgt_expr)?;
|
||||
let default = vec![];
|
||||
let adjustments = self.result.expr_adjustments.get(&tgt_expr).unwrap_or(&default);
|
||||
let adjustments =
|
||||
self.result.expr_adjustments.get(&tgt_expr).map(|it| &**it).unwrap_or_default();
|
||||
apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments)
|
||||
}
|
||||
|
||||
@ -1701,7 +1701,7 @@ impl InferenceContext<'_> {
|
||||
for (derefed_callee, callee_ty, params, expr) in exprs {
|
||||
if let &Expr::Call { callee, .. } = &self.body[expr] {
|
||||
let mut adjustments =
|
||||
self.result.expr_adjustments.remove(&callee).unwrap_or_default();
|
||||
self.result.expr_adjustments.remove(&callee).unwrap_or_default().into_vec();
|
||||
self.write_fn_trait_method_resolution(
|
||||
kind,
|
||||
&derefed_callee,
|
||||
@ -1710,7 +1710,7 @@ impl InferenceContext<'_> {
|
||||
¶ms,
|
||||
expr,
|
||||
);
|
||||
self.result.expr_adjustments.insert(callee, adjustments);
|
||||
self.result.expr_adjustments.insert(callee, adjustments.into_boxed_slice());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -148,11 +148,11 @@ impl CoerceMany {
|
||||
if let (Ok(result1), Ok(result2)) = (result1, result2) {
|
||||
ctx.table.register_infer_ok(InferOk { value: (), goals: result1.goals });
|
||||
for &e in &self.expressions {
|
||||
ctx.write_expr_adj(e, result1.value.0.clone());
|
||||
ctx.write_expr_adj(e, result1.value.0.clone().into_boxed_slice());
|
||||
}
|
||||
ctx.table.register_infer_ok(InferOk { value: (), goals: result2.goals });
|
||||
if let Some(expr) = expr {
|
||||
ctx.write_expr_adj(expr, result2.value.0);
|
||||
ctx.write_expr_adj(expr, result2.value.0.into_boxed_slice());
|
||||
self.expressions.push(expr);
|
||||
}
|
||||
return self.final_ty = Some(target_ty);
|
||||
@ -182,7 +182,7 @@ impl CoerceMany {
|
||||
{
|
||||
self.final_ty = Some(res);
|
||||
for &e in &self.expressions {
|
||||
ctx.write_expr_adj(e, adjustments.clone());
|
||||
ctx.write_expr_adj(e, adjustments.clone().into_boxed_slice());
|
||||
}
|
||||
} else {
|
||||
match cause {
|
||||
@ -263,7 +263,7 @@ impl InferenceContext<'_> {
|
||||
) -> Result<Ty, TypeError> {
|
||||
let (adjustments, ty) = self.coerce_inner(from_ty, to_ty, coerce_never)?;
|
||||
if let Some(expr) = expr {
|
||||
self.write_expr_adj(expr, adjustments);
|
||||
self.write_expr_adj(expr, adjustments.into_boxed_slice());
|
||||
}
|
||||
Ok(ty)
|
||||
}
|
||||
|
@ -812,7 +812,7 @@ impl InferenceContext<'_> {
|
||||
self_ty.clone(),
|
||||
self.table.new_lifetime_var(),
|
||||
));
|
||||
self.write_expr_adj(*base, adj);
|
||||
self.write_expr_adj(*base, adj.into_boxed_slice());
|
||||
if let Some(func) = self
|
||||
.db
|
||||
.trait_items(index_trait)
|
||||
@ -1356,10 +1356,10 @@ impl InferenceContext<'_> {
|
||||
if let TyKind::Ref(mtbl, lt, _) = p_left.kind(Interner) {
|
||||
self.write_expr_adj(
|
||||
lhs,
|
||||
vec![Adjustment {
|
||||
Box::new([Adjustment {
|
||||
kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)),
|
||||
target: p_left.clone(),
|
||||
}],
|
||||
}]),
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -1368,10 +1368,10 @@ impl InferenceContext<'_> {
|
||||
if let TyKind::Ref(mtbl, lt, _) = p_right.kind(Interner) {
|
||||
self.write_expr_adj(
|
||||
rhs,
|
||||
vec![Adjustment {
|
||||
Box::new([Adjustment {
|
||||
kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)),
|
||||
target: p_right.clone(),
|
||||
}],
|
||||
}]),
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -1627,7 +1627,7 @@ impl InferenceContext<'_> {
|
||||
|
||||
match self.lookup_field(&receiver_ty, name) {
|
||||
Some((ty, field_id, adjustments, is_public)) => {
|
||||
self.write_expr_adj(receiver, adjustments);
|
||||
self.write_expr_adj(receiver, adjustments.into_boxed_slice());
|
||||
self.result.field_resolutions.insert(tgt_expr, field_id);
|
||||
if !is_public {
|
||||
if let Either::Left(field) = field_id {
|
||||
@ -1662,7 +1662,7 @@ impl InferenceContext<'_> {
|
||||
Some((adjust, func, _)) => {
|
||||
let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
|
||||
let substs = self.substs_for_method_call(tgt_expr, func.into(), None);
|
||||
self.write_expr_adj(receiver, adjustments);
|
||||
self.write_expr_adj(receiver, adjustments.into_boxed_slice());
|
||||
self.write_method_resolution(tgt_expr, func, substs.clone());
|
||||
|
||||
self.check_method_call(
|
||||
@ -1725,7 +1725,7 @@ impl InferenceContext<'_> {
|
||||
tgt_expr,
|
||||
);
|
||||
}
|
||||
self.write_expr_adj(callee, adjustments);
|
||||
self.write_expr_adj(callee, adjustments.into_boxed_slice());
|
||||
(params, ret_ty)
|
||||
}
|
||||
None => {
|
||||
@ -1809,7 +1809,7 @@ impl InferenceContext<'_> {
|
||||
}
|
||||
|
||||
let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
|
||||
self.write_expr_adj(receiver, adjustments);
|
||||
self.write_expr_adj(receiver, adjustments.into_boxed_slice());
|
||||
|
||||
let substs = self.substs_for_method_call(tgt_expr, func.into(), generic_args);
|
||||
self.write_method_resolution(tgt_expr, func, substs.clone());
|
||||
@ -1828,7 +1828,7 @@ impl InferenceContext<'_> {
|
||||
let field_with_same_name_exists = match self.lookup_field(&receiver_ty, method_name)
|
||||
{
|
||||
Some((ty, field_id, adjustments, _public)) => {
|
||||
self.write_expr_adj(receiver, adjustments);
|
||||
self.write_expr_adj(receiver, adjustments.into_boxed_slice());
|
||||
self.result.field_resolutions.insert(tgt_expr, field_id);
|
||||
Some(ty)
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ impl_internable!(
|
||||
InternedWrapper<ConstData>,
|
||||
InternedWrapper<ConstScalar>,
|
||||
InternedWrapper<Vec<CanonicalVarKind>>,
|
||||
InternedWrapper<Vec<ProgramClause>>,
|
||||
InternedWrapper<Box<[ProgramClause]>>,
|
||||
InternedWrapper<Vec<QuantifiedWhereClause>>,
|
||||
InternedWrapper<SmallVec<[Variance; 16]>>,
|
||||
);
|
||||
@ -60,7 +60,7 @@ impl chalk_ir::interner::Interner for Interner {
|
||||
type InternedGoal = Arc<GoalData>;
|
||||
type InternedGoals = Vec<Goal>;
|
||||
type InternedSubstitution = Interned<InternedWrapper<SmallVec<[GenericArg; 2]>>>;
|
||||
type InternedProgramClauses = Interned<InternedWrapper<Vec<ProgramClause>>>;
|
||||
type InternedProgramClauses = Interned<InternedWrapper<Box<[ProgramClause]>>>;
|
||||
type InternedProgramClause = ProgramClauseData;
|
||||
type InternedQuantifiedWhereClauses = Interned<InternedWrapper<Vec<QuantifiedWhereClause>>>;
|
||||
type InternedVariableKinds = Interned<InternedWrapper<Vec<VariableKind>>>;
|
||||
|
@ -3,22 +3,20 @@
|
||||
//! Eventually this should probably be replaced with salsa-based interning.
|
||||
|
||||
use std::{
|
||||
borrow::Borrow,
|
||||
fmt::{self, Debug, Display},
|
||||
hash::{BuildHasherDefault, Hash, Hasher},
|
||||
hash::{BuildHasher, BuildHasherDefault, Hash, Hasher},
|
||||
ops::Deref,
|
||||
sync::OnceLock,
|
||||
};
|
||||
|
||||
use dashmap::{DashMap, SharedValue};
|
||||
use hashbrown::{HashMap, hash_map::RawEntryMut};
|
||||
use hashbrown::raw::RawTable;
|
||||
use rustc_hash::FxHasher;
|
||||
use triomphe::Arc;
|
||||
|
||||
type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>;
|
||||
type Guard<T> = dashmap::RwLockWriteGuard<
|
||||
'static,
|
||||
HashMap<Arc<T>, SharedValue<()>, BuildHasherDefault<FxHasher>>,
|
||||
>;
|
||||
type Guard<T> = dashmap::RwLockWriteGuard<'static, RawTable<(Arc<T>, SharedValue<()>)>>;
|
||||
|
||||
mod symbol;
|
||||
pub use self::symbol::{Symbol, symbols as sym};
|
||||
@ -28,54 +26,61 @@ pub struct Interned<T: Internable + ?Sized> {
|
||||
}
|
||||
|
||||
impl<T: Internable> Interned<T> {
|
||||
#[inline]
|
||||
pub fn new(obj: T) -> Self {
|
||||
let (mut shard, hash) = Self::select(&obj);
|
||||
// Atomically,
|
||||
// - check if `obj` is already in the map
|
||||
// - if so, clone its `Arc` and return it
|
||||
// - if not, box it up, insert it, and return a clone
|
||||
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
|
||||
// insert the same object between us looking it up and inserting it.
|
||||
match shard.raw_entry_mut().from_key_hashed_nocheck(hash, &obj) {
|
||||
RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() },
|
||||
RawEntryMut::Vacant(vac) => Self {
|
||||
arc: vac.insert_hashed_nocheck(hash, Arc::new(obj), SharedValue::new(())).0.clone(),
|
||||
},
|
||||
}
|
||||
Self::new_generic(obj)
|
||||
}
|
||||
}
|
||||
|
||||
impl Interned<str> {
|
||||
#[inline]
|
||||
pub fn new_str(s: &str) -> Self {
|
||||
let (mut shard, hash) = Self::select(s);
|
||||
// Atomically,
|
||||
// - check if `obj` is already in the map
|
||||
// - if so, clone its `Arc` and return it
|
||||
// - if not, box it up, insert it, and return a clone
|
||||
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
|
||||
// insert the same object between us looking it up and inserting it.
|
||||
match shard.raw_entry_mut().from_key_hashed_nocheck(hash, s) {
|
||||
RawEntryMut::Occupied(occ) => Self { arc: occ.key().clone() },
|
||||
RawEntryMut::Vacant(vac) => Self {
|
||||
arc: vac.insert_hashed_nocheck(hash, Arc::from(s), SharedValue::new(())).0.clone(),
|
||||
},
|
||||
}
|
||||
Self::new_generic(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Internable + ?Sized> Interned<T> {
|
||||
#[inline]
|
||||
fn select(obj: &T) -> (Guard<T>, u64) {
|
||||
pub fn new_generic<U>(obj: U) -> Self
|
||||
where
|
||||
U: Borrow<T>,
|
||||
Arc<T>: From<U>,
|
||||
{
|
||||
let storage = T::storage().get();
|
||||
let hash = {
|
||||
let mut hasher = std::hash::BuildHasher::build_hasher(storage.hasher());
|
||||
obj.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
let (mut shard, hash) = Self::select(storage, obj.borrow());
|
||||
// Atomically,
|
||||
// - check if `obj` is already in the map
|
||||
// - if so, clone its `Arc` and return it
|
||||
// - if not, box it up, insert it, and return a clone
|
||||
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
|
||||
// insert the same object between us looking it up and inserting it.
|
||||
let bucket = match shard.find_or_find_insert_slot(
|
||||
hash,
|
||||
|(other, _)| **other == *obj.borrow(),
|
||||
|(x, _)| Self::hash(storage, x),
|
||||
) {
|
||||
Ok(bucket) => bucket,
|
||||
// SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then.
|
||||
Err(insert_slot) => unsafe {
|
||||
shard.insert_in_slot(hash, insert_slot, (Arc::from(obj), SharedValue::new(())))
|
||||
},
|
||||
};
|
||||
// SAFETY: We just retrieved/inserted this bucket.
|
||||
unsafe { Self { arc: bucket.as_ref().0.clone() } }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn select(storage: &'static InternMap<T>, obj: &T) -> (Guard<T>, u64) {
|
||||
let hash = Self::hash(storage, obj);
|
||||
let shard_idx = storage.determine_shard(hash as usize);
|
||||
let shard = &storage.shards()[shard_idx];
|
||||
(shard.write(), hash)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn hash(storage: &'static InternMap<T>, obj: &T) -> u64 {
|
||||
storage.hasher().hash_one(obj)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Internable + ?Sized> Drop for Interned<T> {
|
||||
@ -93,21 +98,20 @@ impl<T: Internable + ?Sized> Drop for Interned<T> {
|
||||
impl<T: Internable + ?Sized> Interned<T> {
|
||||
#[cold]
|
||||
fn drop_slow(&mut self) {
|
||||
let (mut shard, hash) = Self::select(&self.arc);
|
||||
let storage = T::storage().get();
|
||||
let (mut shard, hash) = Self::select(storage, &self.arc);
|
||||
|
||||
if Arc::count(&self.arc) != 2 {
|
||||
// Another thread has interned another copy
|
||||
return;
|
||||
}
|
||||
|
||||
match shard.raw_entry_mut().from_key_hashed_nocheck(hash, &self.arc) {
|
||||
RawEntryMut::Occupied(occ) => occ.remove(),
|
||||
RawEntryMut::Vacant(_) => unreachable!(),
|
||||
};
|
||||
shard.remove_entry(hash, |(other, _)| **other == *self.arc);
|
||||
|
||||
// Shrink the backing storage if the shard is less than 50% occupied.
|
||||
if shard.len() * 2 < shard.capacity() {
|
||||
shard.shrink_to_fit();
|
||||
let len = shard.len();
|
||||
shard.shrink_to(len, |(x, _)| Self::hash(storage, x));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,16 +2,15 @@
|
||||
//! supporting compile time declaration of symbols that will never be freed.
|
||||
|
||||
use std::{
|
||||
borrow::Borrow,
|
||||
fmt,
|
||||
hash::{BuildHasherDefault, Hash, Hasher},
|
||||
hash::{BuildHasher, BuildHasherDefault, Hash},
|
||||
mem::{self, ManuallyDrop},
|
||||
ptr::NonNull,
|
||||
sync::OnceLock,
|
||||
};
|
||||
|
||||
use dashmap::{DashMap, SharedValue};
|
||||
use hashbrown::{HashMap, hash_map::RawEntryMut};
|
||||
use hashbrown::raw::RawTable;
|
||||
use rustc_hash::FxHasher;
|
||||
use triomphe::Arc;
|
||||
|
||||
@ -127,31 +126,39 @@ impl fmt::Debug for Symbol {
|
||||
const _: () = assert!(size_of::<Symbol>() == size_of::<NonNull<()>>());
|
||||
const _: () = assert!(align_of::<Symbol>() == align_of::<NonNull<()>>());
|
||||
|
||||
static MAP: OnceLock<DashMap<SymbolProxy, (), BuildHasherDefault<FxHasher>>> = OnceLock::new();
|
||||
type Map = DashMap<Symbol, (), BuildHasherDefault<FxHasher>>;
|
||||
static MAP: OnceLock<Map> = OnceLock::new();
|
||||
|
||||
impl Symbol {
|
||||
pub fn intern(s: &str) -> Self {
|
||||
let (mut shard, hash) = Self::select_shard(s);
|
||||
let storage = MAP.get_or_init(symbols::prefill);
|
||||
let (mut shard, hash) = Self::select_shard(storage, s);
|
||||
// Atomically,
|
||||
// - check if `obj` is already in the map
|
||||
// - if so, copy out its entry, conditionally bumping the backing Arc and return it
|
||||
// - if not, put it into a box and then into an Arc, insert it, bump the ref-count and return the copy
|
||||
// This needs to be atomic (locking the shard) to avoid races with other thread, which could
|
||||
// insert the same object between us looking it up and inserting it.
|
||||
match shard.raw_entry_mut().from_key_hashed_nocheck(hash, s) {
|
||||
RawEntryMut::Occupied(occ) => Self { repr: increase_arc_refcount(occ.key().0) },
|
||||
RawEntryMut::Vacant(vac) => Self {
|
||||
repr: increase_arc_refcount(
|
||||
vac.insert_hashed_nocheck(
|
||||
hash,
|
||||
SymbolProxy(TaggedArcPtr::arc(Arc::new(Box::<str>::from(s)))),
|
||||
let bucket = match shard.find_or_find_insert_slot(
|
||||
hash,
|
||||
|(other, _)| other.as_str() == s,
|
||||
|(x, _)| Self::hash(storage, x.as_str()),
|
||||
) {
|
||||
Ok(bucket) => bucket,
|
||||
// SAFETY: The slot came from `find_or_find_insert_slot()`, and the table wasn't modified since then.
|
||||
Err(insert_slot) => unsafe {
|
||||
shard.insert_in_slot(
|
||||
hash,
|
||||
insert_slot,
|
||||
(
|
||||
Symbol { repr: TaggedArcPtr::arc(Arc::new(Box::<str>::from(s))) },
|
||||
SharedValue::new(()),
|
||||
)
|
||||
.0
|
||||
.0,
|
||||
),
|
||||
),
|
||||
)
|
||||
},
|
||||
}
|
||||
};
|
||||
// SAFETY: We just retrieved/inserted this bucket.
|
||||
unsafe { bucket.as_ref().0.clone() }
|
||||
}
|
||||
|
||||
pub fn integer(i: usize) -> Self {
|
||||
@ -180,38 +187,34 @@ impl Symbol {
|
||||
symbols::__empty.clone()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.repr.as_str()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn select_shard(
|
||||
storage: &'static Map,
|
||||
s: &str,
|
||||
) -> (
|
||||
dashmap::RwLockWriteGuard<
|
||||
'static,
|
||||
HashMap<SymbolProxy, SharedValue<()>, BuildHasherDefault<FxHasher>>,
|
||||
>,
|
||||
u64,
|
||||
) {
|
||||
let storage = MAP.get_or_init(symbols::prefill);
|
||||
let hash = {
|
||||
let mut hasher = std::hash::BuildHasher::build_hasher(storage.hasher());
|
||||
s.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
};
|
||||
) -> (dashmap::RwLockWriteGuard<'static, RawTable<(Symbol, SharedValue<()>)>>, u64) {
|
||||
let hash = Self::hash(storage, s);
|
||||
let shard_idx = storage.determine_shard(hash as usize);
|
||||
let shard = &storage.shards()[shard_idx];
|
||||
(shard.write(), hash)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn hash(storage: &'static Map, s: &str) -> u64 {
|
||||
storage.hasher().hash_one(s)
|
||||
}
|
||||
|
||||
#[cold]
|
||||
fn drop_slow(arc: &Arc<Box<str>>) {
|
||||
let (mut shard, hash) = Self::select_shard(arc);
|
||||
let storage = MAP.get_or_init(symbols::prefill);
|
||||
let (mut shard, hash) = Self::select_shard(storage, arc);
|
||||
|
||||
match Arc::count(arc) {
|
||||
0 => unreachable!(),
|
||||
1 => unreachable!(),
|
||||
0 | 1 => unreachable!(),
|
||||
2 => (),
|
||||
_ => {
|
||||
// Another thread has interned another copy
|
||||
@ -219,19 +222,17 @@ impl Symbol {
|
||||
}
|
||||
}
|
||||
|
||||
let ptr = match shard.raw_entry_mut().from_key_hashed_nocheck::<str>(hash, arc.as_ref()) {
|
||||
RawEntryMut::Occupied(occ) => occ.remove_entry(),
|
||||
RawEntryMut::Vacant(_) => unreachable!(),
|
||||
}
|
||||
.0
|
||||
.0;
|
||||
let s = &***arc;
|
||||
let (ptr, _) = shard.remove_entry(hash, |(x, _)| x.as_str() == s).unwrap();
|
||||
let ptr = ManuallyDrop::new(ptr);
|
||||
// SAFETY: We're dropping, we have ownership.
|
||||
ManuallyDrop::into_inner(unsafe { ptr.try_as_arc_owned().unwrap() });
|
||||
ManuallyDrop::into_inner(unsafe { ptr.repr.try_as_arc_owned().unwrap() });
|
||||
debug_assert_eq!(Arc::count(arc), 1);
|
||||
|
||||
// Shrink the backing storage if the shard is less than 50% occupied.
|
||||
if shard.len() * 2 < shard.capacity() {
|
||||
shard.shrink_to_fit();
|
||||
let len = shard.len();
|
||||
shard.shrink_to(len, |(x, _)| Self::hash(storage, x.as_str()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -276,22 +277,6 @@ impl fmt::Display for Symbol {
|
||||
}
|
||||
}
|
||||
|
||||
// only exists so we can use `from_key_hashed_nocheck` with a &str
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
struct SymbolProxy(TaggedArcPtr);
|
||||
|
||||
impl Hash for SymbolProxy {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.0.as_str().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<str> for SymbolProxy {
|
||||
fn borrow(&self) -> &str {
|
||||
self.0.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -1,15 +1,12 @@
|
||||
//! Module defining all known symbols required by the rest of rust-analyzer.
|
||||
#![allow(non_upper_case_globals)]
|
||||
|
||||
use std::hash::{BuildHasherDefault, Hash as _, Hasher as _};
|
||||
use std::hash::{BuildHasher, BuildHasherDefault};
|
||||
|
||||
use dashmap::{DashMap, SharedValue};
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
use crate::{
|
||||
Symbol,
|
||||
symbol::{SymbolProxy, TaggedArcPtr},
|
||||
};
|
||||
use crate::{Symbol, symbol::TaggedArcPtr};
|
||||
|
||||
macro_rules! define_symbols {
|
||||
(@WITH_NAME: $($alias:ident = $value:literal,)* @PLAIN: $($name:ident,)*) => {
|
||||
@ -28,28 +25,23 @@ macro_rules! define_symbols {
|
||||
)*
|
||||
|
||||
|
||||
pub(super) fn prefill() -> DashMap<SymbolProxy, (), BuildHasherDefault<FxHasher>> {
|
||||
let mut dashmap_ = <DashMap<SymbolProxy, (), BuildHasherDefault<FxHasher>>>::with_hasher(BuildHasherDefault::default());
|
||||
pub(super) fn prefill() -> DashMap<Symbol, (), BuildHasherDefault<FxHasher>> {
|
||||
let mut dashmap_ = <DashMap<Symbol, (), BuildHasherDefault<FxHasher>>>::with_hasher(BuildHasherDefault::default());
|
||||
|
||||
let hash_thing_ = |hasher_: &BuildHasherDefault<FxHasher>, it_: &SymbolProxy| {
|
||||
let mut hasher_ = std::hash::BuildHasher::build_hasher(hasher_);
|
||||
it_.hash(&mut hasher_);
|
||||
hasher_.finish()
|
||||
};
|
||||
let hasher_ = dashmap_.hasher().clone();
|
||||
let hash_one = |it_: &str| hasher_.hash_one(it_);
|
||||
{
|
||||
$(
|
||||
|
||||
let proxy_ = SymbolProxy($name.repr);
|
||||
let hash_ = hash_thing_(dashmap_.hasher(), &proxy_);
|
||||
let s = stringify!($name);
|
||||
let hash_ = hash_one(s);
|
||||
let shard_idx_ = dashmap_.determine_shard(hash_ as usize);
|
||||
dashmap_.shards_mut()[shard_idx_].get_mut().raw_entry_mut().from_hash(hash_, |k| k == &proxy_).insert(proxy_, SharedValue::new(()));
|
||||
dashmap_.shards_mut()[shard_idx_].get_mut().insert(hash_, ($name, SharedValue::new(())), |(x, _)| hash_one(x.as_str()));
|
||||
)*
|
||||
$(
|
||||
|
||||
let proxy_ = SymbolProxy($alias.repr);
|
||||
let hash_ = hash_thing_(dashmap_.hasher(), &proxy_);
|
||||
let s = $value;
|
||||
let hash_ = hash_one(s);
|
||||
let shard_idx_ = dashmap_.determine_shard(hash_ as usize);
|
||||
dashmap_.shards_mut()[shard_idx_].get_mut().raw_entry_mut().from_hash(hash_, |k| k == &proxy_).insert(proxy_, SharedValue::new(()));
|
||||
dashmap_.shards_mut()[shard_idx_].get_mut().insert(hash_, ($alias, SharedValue::new(())), |(x, _)| hash_one(x.as_str()));
|
||||
)*
|
||||
}
|
||||
dashmap_
|
||||
|
@ -132,7 +132,7 @@ pub struct AstIdMap {
|
||||
/// Maps stable id to unstable ptr.
|
||||
arena: Arena<SyntaxNodePtr>,
|
||||
/// Reverse: map ptr to id.
|
||||
map: hashbrown::HashMap<Idx<SyntaxNodePtr>, (), ()>,
|
||||
map: hashbrown::HashTable<Idx<SyntaxNodePtr>>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for AstIdMap {
|
||||
@ -169,13 +169,13 @@ impl AstIdMap {
|
||||
TreeOrder::DepthFirst
|
||||
}
|
||||
});
|
||||
res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
|
||||
res.map = hashbrown::HashTable::with_capacity(res.arena.len());
|
||||
for (idx, ptr) in res.arena.iter() {
|
||||
let hash = hash_ptr(ptr);
|
||||
match res.map.raw_entry_mut().from_hash(hash, |idx2| *idx2 == idx) {
|
||||
hashbrown::hash_map::RawEntryMut::Occupied(_) => unreachable!(),
|
||||
hashbrown::hash_map::RawEntryMut::Vacant(entry) => {
|
||||
entry.insert_with_hasher(hash, idx, (), |&idx| hash_ptr(&res.arena[idx]));
|
||||
match res.map.entry(hash, |&idx2| idx2 == idx, |&idx| hash_ptr(&res.arena[idx])) {
|
||||
hashbrown::hash_table::Entry::Occupied(_) => unreachable!(),
|
||||
hashbrown::hash_table::Entry::Vacant(entry) => {
|
||||
entry.insert(idx);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -196,8 +196,8 @@ impl AstIdMap {
|
||||
pub fn ast_id_for_ptr<N: AstIdNode>(&self, ptr: AstPtr<N>) -> FileAstId<N> {
|
||||
let ptr = ptr.syntax_node_ptr();
|
||||
let hash = hash_ptr(&ptr);
|
||||
match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) {
|
||||
Some((&raw, &())) => FileAstId {
|
||||
match self.map.find(hash, |&idx| self.arena[idx] == ptr) {
|
||||
Some(&raw) => FileAstId {
|
||||
raw: ErasedFileAstId(raw.into_raw().into_u32()),
|
||||
covariant: PhantomData,
|
||||
},
|
||||
@ -221,8 +221,8 @@ impl AstIdMap {
|
||||
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
|
||||
let ptr = SyntaxNodePtr::new(item);
|
||||
let hash = hash_ptr(&ptr);
|
||||
match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) {
|
||||
Some((&idx, &())) => ErasedFileAstId(idx.into_raw().into_u32()),
|
||||
match self.map.find(hash, |&idx| self.arena[idx] == ptr) {
|
||||
Some(&idx) => ErasedFileAstId(idx.into_raw().into_u32()),
|
||||
None => panic!(
|
||||
"Can't find {:?} in AstIdMap:\n{:?}\n source text: {}",
|
||||
item,
|
||||
|
Loading…
x
Reference in New Issue
Block a user