Merge pull request #18774 from Veykril/push-ysppqxpuknnw

Implement parameter variance inference
This commit is contained in:
Lukas Wirth 2024-12-29 12:34:48 +00:00 committed by GitHub
commit 11b351f44b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 1335 additions and 129 deletions

View File

@ -950,11 +950,18 @@ pub(crate) fn fn_def_datum_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Ar
pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Variances { pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Variances {
let callable_def: CallableDefId = from_chalk(db, fn_def_id); let callable_def: CallableDefId = from_chalk(db, fn_def_id);
let generic_params =
generics(db.upcast(), GenericDefId::from_callable(db.upcast(), callable_def));
Variances::from_iter( Variances::from_iter(
Interner, Interner,
std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()), db.variances_of(GenericDefId::from_callable(db.upcast(), callable_def))
.as_deref()
.unwrap_or_default()
.iter()
.map(|v| match v {
crate::variance::Variance::Covariant => chalk_ir::Variance::Covariant,
crate::variance::Variance::Invariant => chalk_ir::Variance::Invariant,
crate::variance::Variance::Contravariant => chalk_ir::Variance::Contravariant,
crate::variance::Variance::Bivariant => chalk_ir::Variance::Invariant,
}),
) )
} }
@ -962,10 +969,14 @@ pub(crate) fn adt_variance_query(
db: &dyn HirDatabase, db: &dyn HirDatabase,
chalk_ir::AdtId(adt_id): AdtId, chalk_ir::AdtId(adt_id): AdtId,
) -> Variances { ) -> Variances {
let generic_params = generics(db.upcast(), adt_id.into());
Variances::from_iter( Variances::from_iter(
Interner, Interner,
std::iter::repeat(chalk_ir::Variance::Invariant).take(generic_params.len()), db.variances_of(adt_id.into()).as_deref().unwrap_or_default().iter().map(|v| match v {
crate::variance::Variance::Covariant => chalk_ir::Variance::Covariant,
crate::variance::Variance::Invariant => chalk_ir::Variance::Invariant,
crate::variance::Variance::Contravariant => chalk_ir::Variance::Contravariant,
crate::variance::Variance::Bivariant => chalk_ir::Variance::Invariant,
}),
) )
} }

View File

@ -443,13 +443,25 @@ impl ProjectionTyExt for ProjectionTy {
} }
pub trait DynTyExt { pub trait DynTyExt {
fn principal(&self) -> Option<&TraitRef>; fn principal(&self) -> Option<Binders<Binders<&TraitRef>>>;
fn principal_id(&self) -> Option<chalk_ir::TraitId<Interner>>;
} }
impl DynTyExt for DynTy { impl DynTyExt for DynTy {
fn principal(&self) -> Option<&TraitRef> { fn principal(&self) -> Option<Binders<Binders<&TraitRef>>> {
self.bounds.as_ref().filter_map(|bounds| {
bounds.interned().first().and_then(|b| {
b.as_ref().filter_map(|b| match b {
crate::WhereClause::Implemented(trait_ref) => Some(trait_ref),
_ => None,
})
})
})
}
fn principal_id(&self) -> Option<chalk_ir::TraitId<Interner>> {
self.bounds.skip_binders().interned().first().and_then(|b| match b.skip_binders() { self.bounds.skip_binders().interned().first().and_then(|b| match b.skip_binders() {
crate::WhereClause::Implemented(trait_ref) => Some(trait_ref), crate::WhereClause::Implemented(trait_ref) => Some(trait_ref.trait_id),
_ => None, _ => None,
}) })
} }

View File

@ -271,6 +271,10 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[ra_salsa::invoke(chalk_db::adt_variance_query)] #[ra_salsa::invoke(chalk_db::adt_variance_query)]
fn adt_variance(&self, adt_id: chalk_db::AdtId) -> chalk_db::Variances; fn adt_variance(&self, adt_id: chalk_db::AdtId) -> chalk_db::Variances;
#[ra_salsa::invoke(crate::variance::variances_of)]
#[ra_salsa::cycle(crate::variance::variances_of_cycle)]
fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>;
#[ra_salsa::invoke(chalk_db::associated_ty_value_query)] #[ra_salsa::invoke(chalk_db::associated_ty_value_query)]
fn associated_ty_value( fn associated_ty_value(
&self, &self,

View File

@ -26,14 +26,14 @@ use triomphe::Arc;
use crate::{db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx, Interner, Substitution}; use crate::{db::HirDatabase, lt_to_placeholder_idx, to_placeholder_idx, Interner, Substitution};
pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics { pub fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def))); let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
let params = db.generic_params(def); let params = db.generic_params(def);
let has_trait_self_param = params.trait_self_param().is_some(); let has_trait_self_param = params.trait_self_param().is_some();
Generics { def, params, parent_generics, has_trait_self_param } Generics { def, params, parent_generics, has_trait_self_param }
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub(crate) struct Generics { pub struct Generics {
def: GenericDefId, def: GenericDefId,
params: Arc<GenericParams>, params: Arc<GenericParams>,
parent_generics: Option<Box<Generics>>, parent_generics: Option<Box<Generics>>,
@ -153,7 +153,7 @@ impl Generics {
(parent_len, self_param, type_params, const_params, impl_trait_params, lifetime_params) (parent_len, self_param, type_params, const_params, impl_trait_params, lifetime_params)
} }
pub(crate) fn type_or_const_param_idx(&self, param: TypeOrConstParamId) -> Option<usize> { pub fn type_or_const_param_idx(&self, param: TypeOrConstParamId) -> Option<usize> {
self.find_type_or_const_param(param) self.find_type_or_const_param(param)
} }
@ -174,7 +174,7 @@ impl Generics {
} }
} }
pub(crate) fn lifetime_idx(&self, lifetime: LifetimeParamId) -> Option<usize> { pub fn lifetime_idx(&self, lifetime: LifetimeParamId) -> Option<usize> {
self.find_lifetime(lifetime) self.find_lifetime(lifetime)
} }

View File

@ -96,8 +96,8 @@ impl InferenceContext<'_> {
.map(|b| b.into_value_and_skipped_binders().0); .map(|b| b.into_value_and_skipped_binders().0);
self.deduce_closure_kind_from_predicate_clauses(clauses) self.deduce_closure_kind_from_predicate_clauses(clauses)
} }
TyKind::Dyn(dyn_ty) => dyn_ty.principal().and_then(|trait_ref| { TyKind::Dyn(dyn_ty) => dyn_ty.principal_id().and_then(|trait_id| {
self.fn_trait_kind_from_trait_id(from_chalk_trait_id(trait_ref.trait_id)) self.fn_trait_kind_from_trait_id(from_chalk_trait_id(trait_id))
}), }),
TyKind::InferenceVar(ty, chalk_ir::TyVariableKind::General) => { TyKind::InferenceVar(ty, chalk_ir::TyVariableKind::General) => {
let clauses = self.clauses_for_self_ty(*ty); let clauses = self.clauses_for_self_ty(*ty);

View File

@ -13,6 +13,7 @@ pub fn is_box(db: &dyn HirDatabase, adt: AdtId) -> bool {
pub fn is_unsafe_cell(db: &dyn HirDatabase, adt: AdtId) -> bool { pub fn is_unsafe_cell(db: &dyn HirDatabase, adt: AdtId) -> bool {
let AdtId::StructId(id) = adt else { return false }; let AdtId::StructId(id) = adt else { return false };
db.struct_data(id).flags.contains(StructFlags::IS_UNSAFE_CELL) db.struct_data(id).flags.contains(StructFlags::IS_UNSAFE_CELL)
} }

View File

@ -24,7 +24,6 @@ extern crate ra_ap_rustc_pattern_analysis as rustc_pattern_analysis;
mod builder; mod builder;
mod chalk_db; mod chalk_db;
mod chalk_ext; mod chalk_ext;
mod generics;
mod infer; mod infer;
mod inhabitedness; mod inhabitedness;
mod interner; mod interner;
@ -39,6 +38,7 @@ pub mod db;
pub mod diagnostics; pub mod diagnostics;
pub mod display; pub mod display;
pub mod dyn_compatibility; pub mod dyn_compatibility;
pub mod generics;
pub mod lang_items; pub mod lang_items;
pub mod layout; pub mod layout;
pub mod method_resolution; pub mod method_resolution;
@ -50,6 +50,7 @@ pub mod traits;
mod test_db; mod test_db;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
mod variance;
use std::hash::Hash; use std::hash::Hash;
@ -88,10 +89,9 @@ pub use infer::{
PointerCast, PointerCast,
}; };
pub use interner::Interner; pub use interner::Interner;
pub use lower::diagnostics::*;
pub use lower::{ pub use lower::{
associated_type_shorthand_candidates, ImplTraitLoweringMode, ParamLoweringMode, TyDefId, associated_type_shorthand_candidates, diagnostics::*, ImplTraitLoweringMode, ParamLoweringMode,
TyLoweringContext, ValueTyDefId, TyDefId, TyLoweringContext, ValueTyDefId,
}; };
pub use mapping::{ pub use mapping::{
from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
@ -101,6 +101,7 @@ pub use mapping::{
pub use method_resolution::check_orphan_rules; pub use method_resolution::check_orphan_rules;
pub use traits::TraitEnvironment; pub use traits::TraitEnvironment;
pub use utils::{all_super_traits, direct_super_traits, is_fn_unsafe_to_call}; pub use utils::{all_super_traits, direct_super_traits, is_fn_unsafe_to_call};
pub use variance::Variance;
pub use chalk_ir::{ pub use chalk_ir::{
cast::Cast, cast::Cast,

View File

@ -805,8 +805,8 @@ fn is_inherent_impl_coherent(
| TyKind::Scalar(_) => def_map.is_rustc_coherence_is_core(), | TyKind::Scalar(_) => def_map.is_rustc_coherence_is_core(),
&TyKind::Adt(AdtId(adt), _) => adt.module(db.upcast()).krate() == def_map.krate(), &TyKind::Adt(AdtId(adt), _) => adt.module(db.upcast()).krate() == def_map.krate(),
TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| { TyKind::Dyn(it) => it.principal_id().map_or(false, |trait_id| {
from_chalk_trait_id(trait_ref.trait_id).module(db.upcast()).krate() == def_map.krate() from_chalk_trait_id(trait_id).module(db.upcast()).krate() == def_map.krate()
}), }),
_ => true, _ => true,
@ -834,9 +834,8 @@ fn is_inherent_impl_coherent(
.contains(StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL), .contains(StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL),
hir_def::AdtId::EnumId(it) => db.enum_data(it).rustc_has_incoherent_inherent_impls, hir_def::AdtId::EnumId(it) => db.enum_data(it).rustc_has_incoherent_inherent_impls,
}, },
TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| { TyKind::Dyn(it) => it.principal_id().map_or(false, |trait_id| {
db.trait_data(from_chalk_trait_id(trait_ref.trait_id)) db.trait_data(from_chalk_trait_id(trait_id)).rustc_has_incoherent_inherent_impls
.rustc_has_incoherent_inherent_impls
}), }),
_ => false, _ => false,
@ -896,8 +895,8 @@ pub fn check_orphan_rules(db: &dyn HirDatabase, impl_: ImplId) -> bool {
match unwrap_fundamental(ty).kind(Interner) { match unwrap_fundamental(ty).kind(Interner) {
&TyKind::Adt(AdtId(id), _) => is_local(id.module(db.upcast()).krate()), &TyKind::Adt(AdtId(id), _) => is_local(id.module(db.upcast()).krate()),
TyKind::Error => true, TyKind::Error => true,
TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| { TyKind::Dyn(it) => it.principal_id().map_or(false, |trait_id| {
is_local(from_chalk_trait_id(trait_ref.trait_id).module(db.upcast()).krate()) is_local(from_chalk_trait_id(trait_id).module(db.upcast()).krate())
}), }),
_ => false, _ => false,
} }

View File

@ -127,7 +127,15 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
None => continue, None => continue,
}; };
let def_map = module.def_map(&db); let def_map = module.def_map(&db);
visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it)); visit_module(&db, &def_map, module.local_id, &mut |it| {
defs.push(match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
})
});
} }
defs.sort_by_key(|def| match def { defs.sort_by_key(|def| match def {
DefWithBodyId::FunctionId(it) => { DefWithBodyId::FunctionId(it) => {
@ -375,7 +383,15 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let def_map = module.def_map(&db); let def_map = module.def_map(&db);
let mut defs: Vec<DefWithBodyId> = Vec::new(); let mut defs: Vec<DefWithBodyId> = Vec::new();
visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it)); visit_module(&db, &def_map, module.local_id, &mut |it| {
defs.push(match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
})
});
defs.sort_by_key(|def| match def { defs.sort_by_key(|def| match def {
DefWithBodyId::FunctionId(it) => { DefWithBodyId::FunctionId(it) => {
let loc = it.lookup(&db); let loc = it.lookup(&db);
@ -405,11 +421,11 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
buf buf
} }
fn visit_module( pub(crate) fn visit_module(
db: &TestDB, db: &TestDB,
crate_def_map: &DefMap, crate_def_map: &DefMap,
module_id: LocalModuleId, module_id: LocalModuleId,
cb: &mut dyn FnMut(DefWithBodyId), cb: &mut dyn FnMut(ModuleDefId),
) { ) {
visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb); visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb);
for impl_id in crate_def_map[module_id].scope.impls() { for impl_id in crate_def_map[module_id].scope.impls() {
@ -417,18 +433,18 @@ fn visit_module(
for &item in impl_data.items.iter() { for &item in impl_data.items.iter() {
match item { match item {
AssocItemId::FunctionId(it) => { AssocItemId::FunctionId(it) => {
let def = it.into(); let body = db.body(it.into());
cb(def); cb(it.into());
let body = db.body(def);
visit_body(db, &body, cb); visit_body(db, &body, cb);
} }
AssocItemId::ConstId(it) => { AssocItemId::ConstId(it) => {
let def = it.into(); let body = db.body(it.into());
cb(def); cb(it.into());
let body = db.body(def);
visit_body(db, &body, cb); visit_body(db, &body, cb);
} }
AssocItemId::TypeAliasId(_) => (), AssocItemId::TypeAliasId(it) => {
cb(it.into());
}
} }
} }
} }
@ -437,33 +453,27 @@ fn visit_module(
db: &TestDB, db: &TestDB,
crate_def_map: &DefMap, crate_def_map: &DefMap,
scope: &ItemScope, scope: &ItemScope,
cb: &mut dyn FnMut(DefWithBodyId), cb: &mut dyn FnMut(ModuleDefId),
) { ) {
for decl in scope.declarations() { for decl in scope.declarations() {
cb(decl);
match decl { match decl {
ModuleDefId::FunctionId(it) => { ModuleDefId::FunctionId(it) => {
let def = it.into(); let body = db.body(it.into());
cb(def);
let body = db.body(def);
visit_body(db, &body, cb); visit_body(db, &body, cb);
} }
ModuleDefId::ConstId(it) => { ModuleDefId::ConstId(it) => {
let def = it.into(); let body = db.body(it.into());
cb(def);
let body = db.body(def);
visit_body(db, &body, cb); visit_body(db, &body, cb);
} }
ModuleDefId::StaticId(it) => { ModuleDefId::StaticId(it) => {
let def = it.into(); let body = db.body(it.into());
cb(def);
let body = db.body(def);
visit_body(db, &body, cb); visit_body(db, &body, cb);
} }
ModuleDefId::AdtId(hir_def::AdtId::EnumId(it)) => { ModuleDefId::AdtId(hir_def::AdtId::EnumId(it)) => {
db.enum_data(it).variants.iter().for_each(|&(it, _)| { db.enum_data(it).variants.iter().for_each(|&(it, _)| {
let def = it.into(); let body = db.body(it.into());
cb(def); cb(it.into());
let body = db.body(def);
visit_body(db, &body, cb); visit_body(db, &body, cb);
}); });
} }
@ -473,7 +483,7 @@ fn visit_module(
match item { match item {
AssocItemId::FunctionId(it) => cb(it.into()), AssocItemId::FunctionId(it) => cb(it.into()),
AssocItemId::ConstId(it) => cb(it.into()), AssocItemId::ConstId(it) => cb(it.into()),
AssocItemId::TypeAliasId(_) => (), AssocItemId::TypeAliasId(it) => cb(it.into()),
} }
} }
} }
@ -483,7 +493,7 @@ fn visit_module(
} }
} }
fn visit_body(db: &TestDB, body: &Body, cb: &mut dyn FnMut(DefWithBodyId)) { fn visit_body(db: &TestDB, body: &Body, cb: &mut dyn FnMut(ModuleDefId)) {
for (_, def_map) in body.blocks(db) { for (_, def_map) in body.blocks(db) {
for (mod_id, _) in def_map.modules() { for (mod_id, _) in def_map.modules() {
visit_module(db, &def_map, mod_id, cb); visit_module(db, &def_map, mod_id, cb);
@ -553,7 +563,13 @@ fn salsa_bug() {
let module = db.module_for_file(pos.file_id); let module = db.module_for_file(pos.file_id);
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| { visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(def); db.infer(match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
});
}); });
let new_text = " let new_text = "
@ -586,6 +602,12 @@ fn salsa_bug() {
let module = db.module_for_file(pos.file_id); let module = db.module_for_file(pos.file_id);
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| { visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(def); db.infer(match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
ModuleDefId::ConstId(it) => it.into(),
ModuleDefId::StaticId(it) => it.into(),
_ => return,
});
}); });
} }

View File

@ -24,6 +24,13 @@ fn check_closure_captures(ra_fixture: &str, expect: Expect) {
let mut captures_info = Vec::new(); let mut captures_info = Vec::new();
for def in defs { for def in defs {
let def = match def {
hir_def::ModuleDefId::FunctionId(it) => it.into(),
hir_def::ModuleDefId::EnumVariantId(it) => it.into(),
hir_def::ModuleDefId::ConstId(it) => it.into(),
hir_def::ModuleDefId::StaticId(it) => it.into(),
_ => continue,
};
let infer = db.infer(def); let infer = db.infer(def);
let db = &db; let db = &db;
captures_info.extend(infer.closure_info.iter().flat_map(|(closure_id, (captures, _))| { captures_info.extend(infer.closure_info.iter().flat_map(|(closure_id, (captures, _))| {

View File

@ -1,4 +1,5 @@
use base_db::SourceDatabaseFileInputExt as _; use base_db::SourceDatabaseFileInputExt as _;
use hir_def::ModuleDefId;
use test_fixture::WithFixture; use test_fixture::WithFixture;
use crate::{db::HirDatabase, test_db::TestDB}; use crate::{db::HirDatabase, test_db::TestDB};
@ -19,7 +20,9 @@ fn foo() -> i32 {
let module = db.module_for_file(pos.file_id.file_id()); let module = db.module_for_file(pos.file_id.file_id());
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| { visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(def); if let ModuleDefId::FunctionId(it) = def {
db.infer(it.into());
}
}); });
}); });
assert!(format!("{events:?}").contains("infer")) assert!(format!("{events:?}").contains("infer"))
@ -39,7 +42,9 @@ fn foo() -> i32 {
let module = db.module_for_file(pos.file_id.file_id()); let module = db.module_for_file(pos.file_id.file_id());
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| { visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(def); if let ModuleDefId::FunctionId(it) = def {
db.infer(it.into());
}
}); });
}); });
assert!(!format!("{events:?}").contains("infer"), "{events:#?}") assert!(!format!("{events:?}").contains("infer"), "{events:#?}")
@ -66,7 +71,9 @@ fn baz() -> i32 {
let module = db.module_for_file(pos.file_id.file_id()); let module = db.module_for_file(pos.file_id.file_id());
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| { visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(def); if let ModuleDefId::FunctionId(it) = def {
db.infer(it.into());
}
}); });
}); });
assert!(format!("{events:?}").contains("infer")) assert!(format!("{events:?}").contains("infer"))
@ -91,7 +98,9 @@ fn baz() -> i32 {
let module = db.module_for_file(pos.file_id.file_id()); let module = db.module_for_file(pos.file_id.file_id());
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| { visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(def); if let ModuleDefId::FunctionId(it) = def {
db.infer(it.into());
}
}); });
}); });
assert!(format!("{events:?}").matches("infer").count() == 1, "{events:#?}") assert!(format!("{events:?}").matches("infer").count() == 1, "{events:#?}")

File diff suppressed because it is too large Load Diff

View File

@ -101,7 +101,6 @@ pub use crate::{
PathResolution, Semantics, SemanticsImpl, SemanticsScope, TypeInfo, VisibleTraits, PathResolution, Semantics, SemanticsImpl, SemanticsScope, TypeInfo, VisibleTraits,
}, },
}; };
pub use hir_ty::method_resolution::TyFingerprint;
// Be careful with these re-exports. // Be careful with these re-exports.
// //
@ -151,8 +150,9 @@ pub use {
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite}, display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
dyn_compatibility::{DynCompatibilityViolation, MethodViolationCode}, dyn_compatibility::{DynCompatibilityViolation, MethodViolationCode},
layout::LayoutError, layout::LayoutError,
method_resolution::TyFingerprint,
mir::{MirEvalError, MirLowerError}, mir::{MirEvalError, MirLowerError},
CastError, FnAbi, PointerCast, Safety, CastError, FnAbi, PointerCast, Safety, Variance,
}, },
// FIXME: Properly encapsulate mir // FIXME: Properly encapsulate mir
hir_ty::{mir, Interner as ChalkTyInterner}, hir_ty::{mir, Interner as ChalkTyInterner},
@ -3957,6 +3957,22 @@ impl GenericParam {
GenericParam::LifetimeParam(it) => it.id.parent.into(), GenericParam::LifetimeParam(it) => it.id.parent.into(),
} }
} }
pub fn variance(self, db: &dyn HirDatabase) -> Option<Variance> {
let parent = match self {
GenericParam::TypeParam(it) => it.id.parent(),
// const parameters are always invariant
GenericParam::ConstParam(_) => return None,
GenericParam::LifetimeParam(it) => it.id.parent,
};
let generics = hir_ty::generics::generics(db.upcast(), parent);
let index = match self {
GenericParam::TypeParam(it) => generics.type_or_const_param_idx(it.id.into())?,
GenericParam::ConstParam(_) => return None,
GenericParam::LifetimeParam(it) => generics.lifetime_idx(it.id)?,
};
db.variances_of(parent)?.get(index).copied()
}
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]

View File

@ -594,12 +594,21 @@ pub(super) fn definition(
_ => None, _ => None,
}; };
let variance_info = || match def {
Definition::GenericParam(it) => it.variance(db).as_ref().map(ToString::to_string),
_ => None,
};
let mut extra = String::new(); let mut extra = String::new();
if hovered_definition { if hovered_definition {
if let Some(notable_traits) = render_notable_trait(db, notable_traits, edition) { if let Some(notable_traits) = render_notable_trait(db, notable_traits, edition) {
extra.push_str("\n___\n"); extra.push_str("\n___\n");
extra.push_str(&notable_traits); extra.push_str(&notable_traits);
} }
if let Some(variance_info) = variance_info() {
extra.push_str("\n___\n");
extra.push_str(&variance_info);
}
if let Some(layout_info) = layout_info() { if let Some(layout_info) = layout_info() {
extra.push_str("\n___\n"); extra.push_str("\n___\n");
extra.push_str(&layout_info); extra.push_str(&layout_info);

View File

@ -4721,7 +4721,7 @@ fn hover_type_param_sized_bounds() {
//- minicore: sized //- minicore: sized
trait Trait {} trait Trait {}
struct Foo<T>(T); struct Foo<T>(T);
impl<T: Trait> Foo<T$0> {} impl<T$0: Trait> Foo<T> {}
"#, "#,
expect![[r#" expect![[r#"
*T* *T*
@ -4736,7 +4736,7 @@ impl<T: Trait> Foo<T$0> {}
//- minicore: sized //- minicore: sized
trait Trait {} trait Trait {}
struct Foo<T>(T); struct Foo<T>(T);
impl<T: Trait + ?Sized> Foo<T$0> {} impl<T$0: Trait + ?Sized> Foo<T> {}
"#, "#,
expect![[r#" expect![[r#"
*T* *T*
@ -4764,6 +4764,10 @@ fn foo<T$0>() {}
```rust ```rust
T T
``` ```
---
invariant
"#]], "#]],
); );
} }
@ -4781,6 +4785,10 @@ fn foo<T$0: Sized>() {}
```rust ```rust
T T
``` ```
---
invariant
"#]], "#]],
); );
} }
@ -4798,6 +4806,10 @@ fn foo<T$0: ?Sized>() {}
```rust ```rust
T: ?Sized T: ?Sized
``` ```
---
invariant
"#]], "#]],
); );
} }
@ -4816,6 +4828,10 @@ fn foo<T$0: Trait>() {}
```rust ```rust
T: Trait T: Trait
``` ```
---
invariant
"#]], "#]],
); );
} }
@ -4834,6 +4850,10 @@ fn foo<T$0: Trait + Sized>() {}
```rust ```rust
T: Trait T: Trait
``` ```
---
invariant
"#]], "#]],
); );
} }
@ -4852,6 +4872,10 @@ fn foo<T$0: Trait + ?Sized>() {}
```rust ```rust
T: Trait + ?Sized T: Trait + ?Sized
``` ```
---
invariant
"#]], "#]],
); );
} }
@ -4869,6 +4893,10 @@ fn foo<T$0: ?Sized + Sized + Sized>() {}
```rust ```rust
T T
``` ```
---
invariant
"#]], "#]],
); );
} }
@ -4887,6 +4915,10 @@ fn foo<T$0: Sized + ?Sized + Sized + Trait>() {}
```rust ```rust
T: Trait T: Trait
``` ```
---
invariant
"#]], "#]],
); );
} }

View File

@ -242,7 +242,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream
let tracing = if let QueryStorage::Memoized | QueryStorage::LruMemoized = query.storage { let tracing = if let QueryStorage::Memoized | QueryStorage::LruMemoized = query.storage {
let s = format!("{trait_name}::{fn_name}"); let s = format!("{trait_name}::{fn_name}");
Some(quote! { Some(quote! {
let _p = tracing::debug_span!(#s, #(#key_names = tracing::field::debug(&#key_names)),*).entered(); let _p = tracing::trace_span!(#s, #(#key_names = tracing::field::debug(&#key_names)),*).entered();
}) })
} else { } else {
None None

View File

@ -13,7 +13,7 @@ use crate::{Database, DatabaseKeyIndex, Event, EventKind, QueryDb};
use parking_lot::{RawRwLock, RwLock}; use parking_lot::{RawRwLock, RwLock};
use std::ops::Deref; use std::ops::Deref;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use tracing::{debug, info}; use tracing::trace;
pub(super) struct Slot<Q> pub(super) struct Slot<Q>
where where
@ -126,7 +126,7 @@ where
// doing any `set` invocations while the query function runs. // doing any `set` invocations while the query function runs.
let revision_now = runtime.current_revision(); let revision_now = runtime.current_revision();
info!("{:?}: invoked at {:?}", self, revision_now,); trace!("{:?}: invoked at {:?}", self, revision_now,);
// First, do a check with a read-lock. // First, do a check with a read-lock.
loop { loop {
@ -152,7 +152,7 @@ where
) -> StampedValue<Q::Value> { ) -> StampedValue<Q::Value> {
let runtime = db.salsa_runtime(); let runtime = db.salsa_runtime();
debug!("{:?}: read_upgrade(revision_now={:?})", self, revision_now,); trace!("{:?}: read_upgrade(revision_now={:?})", self, revision_now,);
// Check with an upgradable read to see if there is a value // Check with an upgradable read to see if there is a value
// already. (This permits other readers but prevents anyone // already. (This permits other readers but prevents anyone
@ -184,7 +184,7 @@ where
// inputs and check whether they are out of date. // inputs and check whether they are out of date.
if let Some(memo) = &mut old_memo { if let Some(memo) = &mut old_memo {
if let Some(value) = memo.verify_value(db.ops_database(), revision_now, &active_query) { if let Some(value) = memo.verify_value(db.ops_database(), revision_now, &active_query) {
info!("{:?}: validated old memoized value", self,); trace!("{:?}: validated old memoized value", self,);
db.salsa_event(Event { db.salsa_event(Event {
runtime_id: runtime.id(), runtime_id: runtime.id(),
@ -212,7 +212,7 @@ where
old_memo: Option<Memo<Q::Value>>, old_memo: Option<Memo<Q::Value>>,
key: &Q::Key, key: &Q::Key,
) -> StampedValue<Q::Value> { ) -> StampedValue<Q::Value> {
tracing::info!("{:?}: executing query", self.database_key_index().debug(db)); tracing::trace!("{:?}: executing query", self.database_key_index().debug(db));
db.salsa_event(Event { db.salsa_event(Event {
runtime_id: db.salsa_runtime().id(), runtime_id: db.salsa_runtime().id(),
@ -224,7 +224,7 @@ where
let value = match Cycle::catch(|| Q::execute(db, key.clone())) { let value = match Cycle::catch(|| Q::execute(db, key.clone())) {
Ok(v) => v, Ok(v) => v,
Err(cycle) => { Err(cycle) => {
tracing::debug!( tracing::trace!(
"{:?}: caught cycle {:?}, have strategy {:?}", "{:?}: caught cycle {:?}, have strategy {:?}",
self.database_key_index().debug(db), self.database_key_index().debug(db),
cycle, cycle,
@ -272,9 +272,10 @@ where
// consumers must be aware of. Becoming *more* durable // consumers must be aware of. Becoming *more* durable
// is not. See the test `constant_to_non_constant`. // is not. See the test `constant_to_non_constant`.
if revisions.durability >= old_memo.revisions.durability && old_memo.value == value { if revisions.durability >= old_memo.revisions.durability && old_memo.value == value {
debug!( trace!(
"read_upgrade({:?}): value is equal, back-dating to {:?}", "read_upgrade({:?}): value is equal, back-dating to {:?}",
self, old_memo.revisions.changed_at, self,
old_memo.revisions.changed_at,
); );
assert!(old_memo.revisions.changed_at <= revisions.changed_at); assert!(old_memo.revisions.changed_at <= revisions.changed_at);
@ -290,7 +291,7 @@ where
let memo_value = new_value.value.clone(); let memo_value = new_value.value.clone();
debug!("read_upgrade({:?}): result.revisions = {:#?}", self, revisions,); trace!("read_upgrade({:?}): result.revisions = {:#?}", self, revisions,);
panic_guard.proceed(Some(Memo { value: memo_value, verified_at: revision_now, revisions })); panic_guard.proceed(Some(Memo { value: memo_value, verified_at: revision_now, revisions }));
@ -339,9 +340,11 @@ where
} }
QueryState::Memoized(memo) => { QueryState::Memoized(memo) => {
debug!( trace!(
"{:?}: found memoized value, verified_at={:?}, changed_at={:?}", "{:?}: found memoized value, verified_at={:?}, changed_at={:?}",
self, memo.verified_at, memo.revisions.changed_at, self,
memo.verified_at,
memo.revisions.changed_at,
); );
if memo.verified_at < revision_now { if memo.verified_at < revision_now {
@ -355,7 +358,7 @@ where
value: value.clone(), value: value.clone(),
}; };
info!("{:?}: returning memoized value changed at {:?}", self, value.changed_at); trace!("{:?}: returning memoized value changed at {:?}", self, value.changed_at);
ProbeState::UpToDate(value) ProbeState::UpToDate(value)
} }
@ -387,7 +390,7 @@ where
} }
pub(super) fn invalidate(&self, new_revision: Revision) -> Option<Durability> { pub(super) fn invalidate(&self, new_revision: Revision) -> Option<Durability> {
tracing::debug!("Slot::invalidate(new_revision = {:?})", new_revision); tracing::trace!("Slot::invalidate(new_revision = {:?})", new_revision);
match &mut *self.state.write() { match &mut *self.state.write() {
QueryState::Memoized(memo) => { QueryState::Memoized(memo) => {
memo.revisions.untracked = true; memo.revisions.untracked = true;
@ -411,9 +414,11 @@ where
db.unwind_if_cancelled(); db.unwind_if_cancelled();
debug!( trace!(
"maybe_changed_after({:?}) called with revision={:?}, revision_now={:?}", "maybe_changed_after({:?}) called with revision={:?}, revision_now={:?}",
self, revision, revision_now, self,
revision,
revision_now,
); );
// Do an initial probe with just the read-lock. // Do an initial probe with just the read-lock.
@ -680,9 +685,11 @@ where
assert!(self.verified_at != revision_now); assert!(self.verified_at != revision_now);
let verified_at = self.verified_at; let verified_at = self.verified_at;
debug!( trace!(
"verify_revisions: verified_at={:?}, revision_now={:?}, inputs={:#?}", "verify_revisions: verified_at={:?}, revision_now={:?}, inputs={:#?}",
verified_at, revision_now, self.revisions.inputs verified_at,
revision_now,
self.revisions.inputs
); );
if self.check_durability(db.salsa_runtime()) { if self.check_durability(db.salsa_runtime()) {
@ -708,7 +715,7 @@ where
let changed_input = let changed_input =
inputs.slice.iter().find(|&&input| db.maybe_changed_after(input, verified_at)); inputs.slice.iter().find(|&&input| db.maybe_changed_after(input, verified_at));
if let Some(input) = changed_input { if let Some(input) = changed_input {
debug!("validate_memoized_value: `{:?}` may have changed", input); trace!("validate_memoized_value: `{:?}` may have changed", input);
return false; return false;
} }
@ -721,7 +728,7 @@ where
/// True if this memo is known not to have changed based on its durability. /// True if this memo is known not to have changed based on its durability.
fn check_durability(&self, runtime: &Runtime) -> bool { fn check_durability(&self, runtime: &Runtime) -> bool {
let last_changed = runtime.last_changed_revision(self.revisions.durability); let last_changed = runtime.last_changed_revision(self.revisions.durability);
debug!( trace!(
"check_durability(last_changed={:?} <= verified_at={:?}) = {:?}", "check_durability(last_changed={:?} <= verified_at={:?}) = {:?}",
last_changed, last_changed,
self.verified_at, self.verified_at,

View File

@ -17,7 +17,7 @@ use parking_lot::{RawRwLock, RwLock};
use std::marker::PhantomData; use std::marker::PhantomData;
use std::ops::Deref; use std::ops::Deref;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use tracing::{debug, info}; use tracing::trace;
pub(super) struct Slot<Q, MP> pub(super) struct Slot<Q, MP>
where where
@ -140,7 +140,7 @@ where
// doing any `set` invocations while the query function runs. // doing any `set` invocations while the query function runs.
let revision_now = runtime.current_revision(); let revision_now = runtime.current_revision();
info!("{:?}: invoked at {:?}", self, revision_now,); trace!("{:?}: invoked at {:?}", self, revision_now,);
// First, do a check with a read-lock. // First, do a check with a read-lock.
loop { loop {
@ -168,7 +168,7 @@ where
) -> StampedValue<Q::Value> { ) -> StampedValue<Q::Value> {
let runtime = db.salsa_runtime(); let runtime = db.salsa_runtime();
debug!("{:?}: read_upgrade(revision_now={:?})", self, revision_now,); trace!("{:?}: read_upgrade(revision_now={:?})", self, revision_now,);
// Check with an upgradable read to see if there is a value // Check with an upgradable read to see if there is a value
// already. (This permits other readers but prevents anyone // already. (This permits other readers but prevents anyone
@ -202,7 +202,7 @@ where
// inputs and check whether they are out of date. // inputs and check whether they are out of date.
if let Some(memo) = &mut old_memo { if let Some(memo) = &mut old_memo {
if let Some(value) = memo.verify_value(db.ops_database(), revision_now, &active_query) { if let Some(value) = memo.verify_value(db.ops_database(), revision_now, &active_query) {
info!("{:?}: validated old memoized value", self,); trace!("{:?}: validated old memoized value", self,);
db.salsa_event(Event { db.salsa_event(Event {
runtime_id: runtime.id(), runtime_id: runtime.id(),
@ -230,7 +230,7 @@ where
old_memo: Option<Memo<Q::Value>>, old_memo: Option<Memo<Q::Value>>,
key: &Q::Key, key: &Q::Key,
) -> StampedValue<Q::Value> { ) -> StampedValue<Q::Value> {
tracing::info!("{:?}: executing query", self.database_key_index().debug(db)); tracing::trace!("{:?}: executing query", self.database_key_index().debug(db));
db.salsa_event(Event { db.salsa_event(Event {
runtime_id: db.salsa_runtime().id(), runtime_id: db.salsa_runtime().id(),
@ -242,7 +242,7 @@ where
let value = match Cycle::catch(|| Q::execute(db, key.clone())) { let value = match Cycle::catch(|| Q::execute(db, key.clone())) {
Ok(v) => v, Ok(v) => v,
Err(cycle) => { Err(cycle) => {
tracing::debug!( tracing::trace!(
"{:?}: caught cycle {:?}, have strategy {:?}", "{:?}: caught cycle {:?}, have strategy {:?}",
self.database_key_index().debug(db), self.database_key_index().debug(db),
cycle, cycle,
@ -293,9 +293,10 @@ where
if revisions.durability >= old_memo.revisions.durability if revisions.durability >= old_memo.revisions.durability
&& MP::memoized_value_eq(old_value, &value) && MP::memoized_value_eq(old_value, &value)
{ {
debug!( trace!(
"read_upgrade({:?}): value is equal, back-dating to {:?}", "read_upgrade({:?}): value is equal, back-dating to {:?}",
self, old_memo.revisions.changed_at, self,
old_memo.revisions.changed_at,
); );
assert!(old_memo.revisions.changed_at <= revisions.changed_at); assert!(old_memo.revisions.changed_at <= revisions.changed_at);
@ -313,7 +314,7 @@ where
let memo_value = let memo_value =
if self.should_memoize_value(key) { Some(new_value.value.clone()) } else { None }; if self.should_memoize_value(key) { Some(new_value.value.clone()) } else { None };
debug!("read_upgrade({:?}): result.revisions = {:#?}", self, revisions,); trace!("read_upgrade({:?}): result.revisions = {:#?}", self, revisions,);
panic_guard.proceed(Some(Memo { value: memo_value, verified_at: revision_now, revisions })); panic_guard.proceed(Some(Memo { value: memo_value, verified_at: revision_now, revisions }));
@ -362,9 +363,11 @@ where
} }
QueryState::Memoized(memo) => { QueryState::Memoized(memo) => {
debug!( trace!(
"{:?}: found memoized value, verified_at={:?}, changed_at={:?}", "{:?}: found memoized value, verified_at={:?}, changed_at={:?}",
self, memo.verified_at, memo.revisions.changed_at, self,
memo.verified_at,
memo.revisions.changed_at,
); );
if memo.verified_at < revision_now { if memo.verified_at < revision_now {
@ -378,7 +381,11 @@ where
value: value.clone(), value: value.clone(),
}; };
info!("{:?}: returning memoized value changed at {:?}", self, value.changed_at); trace!(
"{:?}: returning memoized value changed at {:?}",
self,
value.changed_at
);
ProbeState::UpToDate(value) ProbeState::UpToDate(value)
} else { } else {
@ -426,7 +433,7 @@ where
} }
pub(super) fn invalidate(&self, new_revision: Revision) -> Option<Durability> { pub(super) fn invalidate(&self, new_revision: Revision) -> Option<Durability> {
tracing::debug!("Slot::invalidate(new_revision = {:?})", new_revision); tracing::trace!("Slot::invalidate(new_revision = {:?})", new_revision);
match &mut *self.state.write() { match &mut *self.state.write() {
QueryState::Memoized(memo) => { QueryState::Memoized(memo) => {
memo.revisions.untracked = true; memo.revisions.untracked = true;
@ -450,9 +457,11 @@ where
db.unwind_if_cancelled(); db.unwind_if_cancelled();
debug!( trace!(
"maybe_changed_after({:?}) called with revision={:?}, revision_now={:?}", "maybe_changed_after({:?}) called with revision={:?}, revision_now={:?}",
self, revision, revision_now, self,
revision,
revision_now,
); );
// Do an initial probe with just the read-lock. // Do an initial probe with just the read-lock.
@ -734,9 +743,11 @@ where
assert!(self.verified_at != revision_now); assert!(self.verified_at != revision_now);
let verified_at = self.verified_at; let verified_at = self.verified_at;
debug!( trace!(
"verify_revisions: verified_at={:?}, revision_now={:?}, inputs={:#?}", "verify_revisions: verified_at={:?}, revision_now={:?}, inputs={:#?}",
verified_at, revision_now, self.revisions.inputs verified_at,
revision_now,
self.revisions.inputs
); );
if self.check_durability(db.salsa_runtime()) { if self.check_durability(db.salsa_runtime()) {
@ -762,7 +773,7 @@ where
let changed_input = let changed_input =
inputs.slice.iter().find(|&&input| db.maybe_changed_after(input, verified_at)); inputs.slice.iter().find(|&&input| db.maybe_changed_after(input, verified_at));
if let Some(input) = changed_input { if let Some(input) = changed_input {
debug!("validate_memoized_value: `{:?}` may have changed", input); trace!("validate_memoized_value: `{:?}` may have changed", input);
return false; return false;
} }
@ -775,7 +786,7 @@ where
/// True if this memo is known not to have changed based on its durability. /// True if this memo is known not to have changed based on its durability.
fn check_durability(&self, runtime: &Runtime) -> bool { fn check_durability(&self, runtime: &Runtime) -> bool {
let last_changed = runtime.last_changed_revision(self.revisions.durability); let last_changed = runtime.last_changed_revision(self.revisions.durability);
debug!( trace!(
"check_durability(last_changed={:?} <= verified_at={:?}) = {:?}", "check_durability(last_changed={:?} <= verified_at={:?}) = {:?}",
last_changed, last_changed,
self.verified_at, self.verified_at,

View File

@ -14,7 +14,7 @@ use crate::{DatabaseKeyIndex, QueryDb};
use indexmap::map::Entry; use indexmap::map::Entry;
use parking_lot::RwLock; use parking_lot::RwLock;
use std::iter; use std::iter;
use tracing::debug; use tracing::trace;
/// Input queries store the result plus a list of the other queries /// Input queries store the result plus a list of the other queries
/// that they invoked. This means we can avoid recomputing them when /// that they invoked. This means we can avoid recomputing them when
@ -73,11 +73,11 @@ where
return true; return true;
}; };
debug!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,); trace!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,);
let changed_at = slot.stamped_value.read().changed_at; let changed_at = slot.stamped_value.read().changed_at;
debug!("maybe_changed_after: changed_at = {:?}", changed_at); trace!("maybe_changed_after: changed_at = {:?}", changed_at);
changed_at > revision changed_at > revision
} }
@ -140,7 +140,7 @@ where
Q: Query, Q: Query,
{ {
fn set(&self, runtime: &mut Runtime, key: &Q::Key, value: Q::Value, durability: Durability) { fn set(&self, runtime: &mut Runtime, key: &Q::Key, value: Q::Value, durability: Durability) {
tracing::debug!("{:?}({:?}) = {:?} ({:?})", Q::default(), key, value, durability); tracing::trace!("{:?}({:?}) = {:?} ({:?})", Q::default(), key, value, durability);
// The value is changing, so we need a new revision (*). We also // The value is changing, so we need a new revision (*). We also
// need to update the 'last changed' revision by invoking // need to update the 'last changed' revision by invoking
@ -234,14 +234,14 @@ where
) -> bool { ) -> bool {
debug_assert!(revision < db.salsa_runtime().current_revision()); debug_assert!(revision < db.salsa_runtime().current_revision());
debug!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,); trace!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,);
let Some(value) = &*self.slot.stamped_value.read() else { let Some(value) = &*self.slot.stamped_value.read() else {
return true; return true;
}; };
let changed_at = value.changed_at; let changed_at = value.changed_at;
debug!("maybe_changed_after: changed_at = {:?}", changed_at); trace!("maybe_changed_after: changed_at = {:?}", changed_at);
changed_at > revision changed_at > revision
} }
@ -298,7 +298,7 @@ where
Q: Query<Key = ()>, Q: Query<Key = ()>,
{ {
fn set(&self, runtime: &mut Runtime, (): &Q::Key, value: Q::Value, durability: Durability) { fn set(&self, runtime: &mut Runtime, (): &Q::Key, value: Q::Value, durability: Durability) {
tracing::debug!("{:?} = {:?} ({:?})", Q::default(), value, durability); tracing::trace!("{:?} = {:?} ({:?})", Q::default(), value, durability);
// The value is changing, so we need a new revision (*). We also // The value is changing, so we need a new revision (*). We also
// need to update the 'last changed' revision by invoking // need to update the 'last changed' revision by invoking

View File

@ -79,7 +79,7 @@ pub trait Database: plumbing::DatabaseOps {
let current_revision = runtime.current_revision(); let current_revision = runtime.current_revision();
let pending_revision = runtime.pending_revision(); let pending_revision = runtime.pending_revision();
tracing::debug!( tracing::trace!(
"unwind_if_cancelled: current_revision={:?}, pending_revision={:?}", "unwind_if_cancelled: current_revision={:?}, pending_revision={:?}",
current_revision, current_revision,
pending_revision pending_revision
@ -684,7 +684,7 @@ impl Cycle {
} }
pub(crate) fn throw(self) -> ! { pub(crate) fn throw(self) -> ! {
tracing::debug!("throwing cycle {:?}", self); tracing::trace!("throwing cycle {:?}", self);
std::panic::resume_unwind(Box::new(self)) std::panic::resume_unwind(Box::new(self))
} }

View File

@ -103,11 +103,11 @@ where
/// Records that `node` was used. This may displace an old node (if the LRU limits are /// Records that `node` was used. This may displace an old node (if the LRU limits are
pub(crate) fn record_use(&self, node: &Arc<Node>) -> Option<Arc<Node>> { pub(crate) fn record_use(&self, node: &Arc<Node>) -> Option<Arc<Node>> {
tracing::debug!("record_use(node={:?})", node); tracing::trace!("record_use(node={:?})", node);
// Load green zone length and check if the LRU cache is even enabled. // Load green zone length and check if the LRU cache is even enabled.
let green_zone = self.green_zone.load(Ordering::Acquire); let green_zone = self.green_zone.load(Ordering::Acquire);
tracing::debug!("record_use: green_zone={}", green_zone); tracing::trace!("record_use: green_zone={}", green_zone);
if green_zone == 0 { if green_zone == 0 {
return None; return None;
} }
@ -115,7 +115,7 @@ where
// Find current index of list (if any) and the current length // Find current index of list (if any) and the current length
// of our green zone. // of our green zone.
let index = node.lru_index().load(); let index = node.lru_index().load();
tracing::debug!("record_use: index={}", index); tracing::trace!("record_use: index={}", index);
// Already a member of the list, and in the green zone -- nothing to do! // Already a member of the list, and in the green zone -- nothing to do!
if index < green_zone { if index < green_zone {
@ -162,9 +162,9 @@ where
let entries = let entries =
std::mem::replace(&mut self.entries, Vec::with_capacity(self.end_red_zone as usize)); std::mem::replace(&mut self.entries, Vec::with_capacity(self.end_red_zone as usize));
tracing::debug!("green_zone = {:?}", self.green_zone()); tracing::trace!("green_zone = {:?}", self.green_zone());
tracing::debug!("yellow_zone = {:?}", self.yellow_zone()); tracing::trace!("yellow_zone = {:?}", self.yellow_zone());
tracing::debug!("red_zone = {:?}", self.red_zone()); tracing::trace!("red_zone = {:?}", self.red_zone());
// We expect to resize when the LRU cache is basically empty. // We expect to resize when the LRU cache is basically empty.
// So just forget all the old LRU indices to start. // So just forget all the old LRU indices to start.
@ -180,7 +180,7 @@ where
/// list may displace an old member of the red zone, in which case /// list may displace an old member of the red zone, in which case
/// that is returned. /// that is returned.
fn record_use(&mut self, node: &Arc<Node>) -> Option<Arc<Node>> { fn record_use(&mut self, node: &Arc<Node>) -> Option<Arc<Node>> {
tracing::debug!("record_use(node={:?})", node); tracing::trace!("record_use(node={:?})", node);
// NB: When this is invoked, we have typically already loaded // NB: When this is invoked, we have typically already loaded
// the LRU index (to check if it is in green zone). But that // the LRU index (to check if it is in green zone). But that
@ -212,7 +212,7 @@ where
if len < self.end_red_zone { if len < self.end_red_zone {
self.entries.push(node.clone()); self.entries.push(node.clone());
node.lru_index().store(len); node.lru_index().store(len);
tracing::debug!("inserted node {:?} at {}", node, len); tracing::trace!("inserted node {:?} at {}", node, len);
return self.record_use(node); return self.record_use(node);
} }
@ -220,7 +220,7 @@ where
// zone and then promoting. // zone and then promoting.
let victim_index = self.pick_index(self.red_zone()); let victim_index = self.pick_index(self.red_zone());
let victim_node = std::mem::replace(&mut self.entries[victim_index as usize], node.clone()); let victim_node = std::mem::replace(&mut self.entries[victim_index as usize], node.clone());
tracing::debug!("evicting red node {:?} from {}", victim_node, victim_index); tracing::trace!("evicting red node {:?} from {}", victim_node, victim_index);
victim_node.lru_index().clear(); victim_node.lru_index().clear();
self.promote_red_to_green(node, victim_index); self.promote_red_to_green(node, victim_index);
Some(victim_node) Some(victim_node)
@ -241,7 +241,7 @@ where
// going to invoke `self.promote_yellow` next, and it will get // going to invoke `self.promote_yellow` next, and it will get
// updated then. // updated then.
let yellow_index = self.pick_index(self.yellow_zone()); let yellow_index = self.pick_index(self.yellow_zone());
tracing::debug!( tracing::trace!(
"demoting yellow node {:?} from {} to red at {}", "demoting yellow node {:?} from {} to red at {}",
self.entries[yellow_index as usize], self.entries[yellow_index as usize],
yellow_index, yellow_index,
@ -265,7 +265,7 @@ where
// Pick a yellow at random and switch places with it. // Pick a yellow at random and switch places with it.
let green_index = self.pick_index(self.green_zone()); let green_index = self.pick_index(self.green_zone());
tracing::debug!( tracing::trace!(
"demoting green node {:?} from {} to yellow at {}", "demoting green node {:?} from {} to yellow at {}",
self.entries[green_index as usize], self.entries[green_index as usize],
green_index, green_index,
@ -275,7 +275,7 @@ where
self.entries[yellow_index as usize].lru_index().store(yellow_index); self.entries[yellow_index as usize].lru_index().store(yellow_index);
node.lru_index().store(green_index); node.lru_index().store(green_index);
tracing::debug!("promoted {:?} to green index {}", node, green_index); tracing::trace!("promoted {:?} to green index {}", node, green_index);
} }
fn pick_index(&mut self, zone: std::ops::Range<u16>) -> u16 { fn pick_index(&mut self, zone: std::ops::Range<u16>) -> u16 {

View File

@ -9,7 +9,7 @@ use parking_lot::{Mutex, RwLock};
use std::hash::Hash; use std::hash::Hash;
use std::panic::panic_any; use std::panic::panic_any;
use std::sync::atomic::{AtomicU32, Ordering}; use std::sync::atomic::{AtomicU32, Ordering};
use tracing::debug; use tracing::trace;
use triomphe::{Arc, ThinArc}; use triomphe::{Arc, ThinArc};
mod dependency_graph; mod dependency_graph;
@ -177,7 +177,7 @@ impl Runtime {
where where
F: FnOnce(Revision) -> Option<Durability>, F: FnOnce(Revision) -> Option<Durability>,
{ {
tracing::debug!("increment_revision()"); tracing::trace!("increment_revision()");
if !self.permits_increment() { if !self.permits_increment() {
panic!("increment_revision invoked during a query computation"); panic!("increment_revision invoked during a query computation");
@ -196,7 +196,7 @@ impl Runtime {
let new_revision = current_revision.next(); let new_revision = current_revision.next();
debug!("increment_revision: incremented to {:?}", new_revision); trace!("increment_revision: incremented to {:?}", new_revision);
if let Some(d) = op(new_revision) { if let Some(d) = op(new_revision) {
for rev in &self.shared_state.revisions[1..=d.index()] { for rev in &self.shared_state.revisions[1..=d.index()] {
@ -267,7 +267,7 @@ impl Runtime {
database_key_index: DatabaseKeyIndex, database_key_index: DatabaseKeyIndex,
to_id: RuntimeId, to_id: RuntimeId,
) { ) {
debug!("unblock_cycle_and_maybe_throw(database_key={:?})", database_key_index); trace!("unblock_cycle_and_maybe_throw(database_key={:?})", database_key_index);
let mut from_stack = self.local_state.take_query_stack(); let mut from_stack = self.local_state.take_query_stack();
let from_id = self.id(); let from_id = self.id();
@ -305,7 +305,7 @@ impl Runtime {
Cycle::new(Arc::new(v)) Cycle::new(Arc::new(v))
}; };
debug!("cycle {:?}, cycle_query {:#?}", cycle.debug(db), cycle_query,); trace!("cycle {:?}, cycle_query {:#?}", cycle.debug(db), cycle_query,);
// We can remove the cycle participants from the list of dependencies; // We can remove the cycle participants from the list of dependencies;
// they are a strongly connected component (SCC) and we only care about // they are a strongly connected component (SCC) and we only care about
@ -323,7 +323,7 @@ impl Runtime {
CycleRecoveryStrategy::Fallback => false, CycleRecoveryStrategy::Fallback => false,
}) })
.for_each(|aq| { .for_each(|aq| {
debug!("marking {:?} for fallback", aq.database_key_index.debug(db)); trace!("marking {:?} for fallback", aq.database_key_index.debug(db));
aq.take_inputs_from(&cycle_query); aq.take_inputs_from(&cycle_query);
assert!(aq.cycle.is_none()); assert!(aq.cycle.is_none());
aq.cycle = Some(cycle.clone()); aq.cycle = Some(cycle.clone());

View File

@ -1,4 +1,4 @@
use tracing::debug; use tracing::trace;
use triomphe::ThinArc; use triomphe::ThinArc;
use crate::durability::Durability; use crate::durability::Durability;
@ -78,7 +78,7 @@ impl LocalState {
durability: Durability, durability: Durability,
changed_at: Revision, changed_at: Revision,
) { ) {
debug!( trace!(
"report_query_read_and_unwind_if_cycle_resulted(input={:?}, durability={:?}, changed_at={:?})", "report_query_read_and_unwind_if_cycle_resulted(input={:?}, durability={:?}, changed_at={:?})",
input, durability, changed_at input, durability, changed_at
); );