Expand target for autocompletion

This commit is contained in:
Tavo Annus 2024-01-06 15:17:16 +02:00
parent a946970e2d
commit 0b838e3e23
14 changed files with 450 additions and 466 deletions

View File

@ -12,7 +12,7 @@ authors = ["rust-analyzer team"]
[profile.dev] [profile.dev]
# Disabling debug info speeds up builds a bunch, # Disabling debug info speeds up builds a bunch,
# and we don't rely on it for debugging that much. # and we don't rely on it for debugging that much.
debug = 2 debug = 0
[profile.dev.package] [profile.dev.package]
# These speed up local tests. # These speed up local tests.

View File

@ -106,10 +106,6 @@ pub fn could_unify_deeply(
let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner); let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner);
let ty1_with_vars = table.normalize_associated_types_in(ty1_with_vars); let ty1_with_vars = table.normalize_associated_types_in(ty1_with_vars);
let ty2_with_vars = table.normalize_associated_types_in(ty2_with_vars); let ty2_with_vars = table.normalize_associated_types_in(ty2_with_vars);
// table.resolve_obligations_as_possible();
// table.propagate_diverging_flag();
// let ty1_with_vars = table.resolve_completely(ty1_with_vars);
// let ty2_with_vars = table.resolve_completely(ty2_with_vars);
table.unify_deeply(&ty1_with_vars, &ty2_with_vars) table.unify_deeply(&ty1_with_vars, &ty2_with_vars)
} }

View File

@ -1085,20 +1085,21 @@ impl Field {
Type::new(db, var_id, ty) Type::new(db, var_id, ty)
} }
pub fn ty_with_generics( pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator<Item = Type>) -> Type {
&self,
db: &dyn HirDatabase,
mut generics: impl Iterator<Item = Type>,
) -> Type {
let var_id = self.parent.into(); let var_id = self.parent.into();
let def_id: AdtId = match self.parent { let def_id: AdtId = match self.parent {
VariantDef::Struct(it) => it.id.into(), VariantDef::Struct(it) => it.id.into(),
VariantDef::Union(it) => it.id.into(), VariantDef::Union(it) => it.id.into(),
VariantDef::Variant(it) => it.parent_enum(db).id.into(), VariantDef::Variant(it) => it.parent_enum(db).id.into(),
}; };
let mut generics = generics.map(|it| it.ty.clone());
let substs = TyBuilder::subst_for_def(db, def_id, None) let substs = TyBuilder::subst_for_def(db, def_id, None)
.fill(|_| { .fill(|x| {
GenericArg::new(Interner, GenericArgData::Ty(generics.next().unwrap().ty.clone())) let ty = generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
match x {
ParamKind::Type => ty.cast(Interner),
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
}
}) })
.build(); .build();
let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs); let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs);
@ -1158,14 +1159,15 @@ impl Struct {
Type::from_def(db, self.id) Type::from_def(db, self.id)
} }
pub fn ty_with_generics( pub fn ty_with_args(self, db: &dyn HirDatabase, generics: impl Iterator<Item = Type>) -> Type {
self, let mut generics = generics.map(|it| it.ty.clone());
db: &dyn HirDatabase,
mut generics: impl Iterator<Item = Type>,
) -> Type {
let substs = TyBuilder::subst_for_def(db, self.id, None) let substs = TyBuilder::subst_for_def(db, self.id, None)
.fill(|_| { .fill(|x| {
GenericArg::new(Interner, GenericArgData::Ty(generics.next().unwrap().ty.clone())) let ty = generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
match x {
ParamKind::Type => ty.cast(Interner),
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
}
}) })
.build(); .build();
let ty = db.ty(self.id.into()).substitute(Interner, &substs); let ty = db.ty(self.id.into()).substitute(Interner, &substs);
@ -1271,16 +1273,18 @@ impl Enum {
Type::from_def(db, self.id) Type::from_def(db, self.id)
} }
pub fn ty_with_generics( pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator<Item = Type>) -> Type {
&self, let mut generics = generics.map(|it| it.ty.clone());
db: &dyn HirDatabase,
mut generics: impl Iterator<Item = Type>,
) -> Type {
let substs = TyBuilder::subst_for_def(db, self.id, None) let substs = TyBuilder::subst_for_def(db, self.id, None)
.fill(|_| { .fill(|x| {
GenericArg::new(Interner, GenericArgData::Ty(generics.next().unwrap().ty.clone())) let ty = generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
match x {
ParamKind::Type => ty.cast(Interner),
ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
}
}) })
.build(); .build();
let ty = db.ty(self.id.into()).substitute(Interner, &substs); let ty = db.ty(self.id.into()).substitute(Interner, &substs);
Type::new(db, self.id, ty) Type::new(db, self.id, ty)
} }
@ -1854,10 +1858,10 @@ impl Function {
Type::new_with_resolver_inner(db, &resolver, ty) Type::new_with_resolver_inner(db, &resolver, ty)
} }
pub fn ret_type_with_generics( pub fn ret_type_with_args(
self, self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
mut generics: impl Iterator<Item = Type>, generics: impl Iterator<Item = Type>,
) -> Type { ) -> Type {
let resolver = self.id.resolver(db.upcast()); let resolver = self.id.resolver(db.upcast());
let parent_id: Option<GenericDefId> = match self.id.lookup(db.upcast()).container { let parent_id: Option<GenericDefId> = match self.id.lookup(db.upcast()).container {
@ -1865,22 +1869,18 @@ impl Function {
ItemContainerId::TraitId(it) => Some(it.into()), ItemContainerId::TraitId(it) => Some(it.into()),
ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None, ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
}; };
let parent_substs = parent_id.map(|id| { let mut generics = generics.map(|it| it.ty.clone());
TyBuilder::subst_for_def(db, id, None) let mut filler = |x: &_| {
.fill(|_| { let ty = generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
GenericArg::new( match x {
Interner, ParamKind::Type => ty.cast(Interner),
GenericArgData::Ty(generics.next().unwrap().ty.clone()), ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
) }
}) };
.build()
});
let substs = TyBuilder::subst_for_def(db, self.id, parent_substs) let parent_substs =
.fill(|_| { parent_id.map(|id| TyBuilder::subst_for_def(db, id, None).fill(&mut filler).build());
GenericArg::new(Interner, GenericArgData::Ty(generics.next().unwrap().ty.clone())) let substs = TyBuilder::subst_for_def(db, self.id, parent_substs).fill(&mut filler).build();
})
.build();
let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs); let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
let ty = callable_sig.ret().clone(); let ty = callable_sig.ret().clone();
@ -2197,11 +2197,7 @@ impl SelfParam {
Type { env: environment, ty } Type { env: environment, ty }
} }
pub fn ty_with_generics( pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator<Item = Type>) -> Type {
&self,
db: &dyn HirDatabase,
mut generics: impl Iterator<Item = Type>,
) -> Type {
let parent_id: GenericDefId = match self.func.lookup(db.upcast()).container { let parent_id: GenericDefId = match self.func.lookup(db.upcast()).container {
ItemContainerId::ImplId(it) => it.into(), ItemContainerId::ImplId(it) => it.into(),
ItemContainerId::TraitId(it) => it.into(), ItemContainerId::TraitId(it) => it.into(),
@ -2210,16 +2206,18 @@ impl SelfParam {
} }
}; };
let parent_substs = TyBuilder::subst_for_def(db, parent_id, None) let mut generics = generics.map(|it| it.ty.clone());
.fill(|_| { let mut filler = |x: &_| {
GenericArg::new(Interner, GenericArgData::Ty(generics.next().unwrap().ty.clone())) let ty = generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
}) match x {
.build(); ParamKind::Type => ty.cast(Interner),
let substs = TyBuilder::subst_for_def(db, self.func, Some(parent_substs)) ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
.fill(|_| { }
GenericArg::new(Interner, GenericArgData::Ty(generics.next().unwrap().ty.clone())) };
})
.build(); let parent_substs = TyBuilder::subst_for_def(db, parent_id, None).fill(&mut filler).build();
let substs =
TyBuilder::subst_for_def(db, self.func, Some(parent_substs)).fill(&mut filler).build();
let callable_sig = let callable_sig =
db.callable_item_signature(self.func.into()).substitute(Interner, &substs); db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
let environment = db.trait_environment(self.func.into()); let environment = db.trait_environment(self.func.into());

View File

@ -88,7 +88,7 @@ fn non_default_generics(db: &dyn HirDatabase, def: GenericDef, generics: &[Type]
/// So in short it pretty much gives us a way to get type `Option<i32>` using the items we have in /// So in short it pretty much gives us a way to get type `Option<i32>` using the items we have in
/// scope. /// scope.
#[derive(Debug, Clone, Eq, Hash, PartialEq)] #[derive(Debug, Clone, Eq, Hash, PartialEq)]
pub enum TypeTree { pub enum Expr {
/// Constant /// Constant
Const(Const), Const(Const),
/// Static variable /// Static variable
@ -99,21 +99,23 @@ pub enum TypeTree {
ConstParam(ConstParam), ConstParam(ConstParam),
/// Well known type (such as `true` for bool) /// Well known type (such as `true` for bool)
FamousType { ty: Type, value: &'static str }, FamousType { ty: Type, value: &'static str },
/// Function or method call /// Function call (does not take self param)
Function { func: Function, generics: Vec<Type>, params: Vec<TypeTree> }, Function { func: Function, generics: Vec<Type>, params: Vec<Expr> },
/// Method call (has self param)
Method { func: Function, generics: Vec<Type>, target: Box<Expr>, params: Vec<Expr> },
/// Enum variant construction /// Enum variant construction
Variant { variant: Variant, generics: Vec<Type>, params: Vec<TypeTree> }, Variant { variant: Variant, generics: Vec<Type>, params: Vec<Expr> },
/// Struct construction /// Struct construction
Struct { strukt: Struct, generics: Vec<Type>, params: Vec<TypeTree> }, Struct { strukt: Struct, generics: Vec<Type>, params: Vec<Expr> },
/// Struct field access /// Struct field access
Field { type_tree: Box<TypeTree>, field: Field }, Field { expr: Box<Expr>, field: Field },
/// Passing type as reference (with `&`) /// Passing type as reference (with `&`)
Reference(Box<TypeTree>), Reference(Box<Expr>),
/// Indicates possibility of many different options that all evaluate to `ty` /// Indicates possibility of many different options that all evaluate to `ty`
Many(Type), Many(Type),
} }
impl TypeTree { impl Expr {
/// Generate source code for type tree. /// Generate source code for type tree.
/// ///
/// Note that trait imports are not added to generated code. /// Note that trait imports are not added to generated code.
@ -126,45 +128,14 @@ impl TypeTree {
) -> String { ) -> String {
let db = sema_scope.db; let db = sema_scope.db;
match self { match self {
TypeTree::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)), Expr::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
TypeTree::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)), Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)),
TypeTree::Local(it) => return it.name(db).display(db.upcast()).to_string(), Expr::Local(it) => return it.name(db).display(db.upcast()).to_string(),
TypeTree::ConstParam(it) => return it.name(db).display(db.upcast()).to_string(), Expr::ConstParam(it) => return it.name(db).display(db.upcast()).to_string(),
TypeTree::FamousType { value, .. } => return value.to_string(), Expr::FamousType { value, .. } => return value.to_string(),
TypeTree::Function { func, params, .. } => { Expr::Function { func, params, .. } => {
if let Some(self_param) = func.self_param(db) { let args =
let func_name = func.name(db).display(db.upcast()).to_string(); params.iter().map(|f| f.gen_source_code(sema_scope, many_formatter)).join(", ");
let target = params
.first()
.expect("no self param")
.gen_source_code(sema_scope, many_formatter);
let args = params
.iter()
.skip(1)
.map(|f| f.gen_source_code(sema_scope, many_formatter))
.join(", ");
match func.as_assoc_item(db).unwrap().containing_trait_or_trait_impl(db) {
Some(trait_) => {
let trait_name =
mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_));
let target = match self_param.access(db) {
crate::Access::Shared => format!("&{target}"),
crate::Access::Exclusive => format!("&mut {target}"),
crate::Access::Owned => target,
};
match args.is_empty() {
true => format!("{trait_name}::{func_name}({target})",),
false => format!("{trait_name}::{func_name}({target}, {args})",),
}
}
None => format!("{target}.{func_name}({args})"),
}
} else {
let args = params
.iter()
.map(|f| f.gen_source_code(sema_scope, many_formatter))
.join(", ");
match func.as_assoc_item(db).map(|it| it.container(db)) { match func.as_assoc_item(db).map(|it| it.container(db)) {
Some(container) => { Some(container) => {
@ -179,9 +150,7 @@ impl TypeTree {
.as_adt() .as_adt()
.and_then(|adt| mod_item_path(sema_scope, &adt.into())) .and_then(|adt| mod_item_path(sema_scope, &adt.into()))
{ {
Some(path) => { Some(path) => path.display(sema_scope.db.upcast()).to_string(),
path.display(sema_scope.db.upcast()).to_string()
}
None => self_ty.display(db).to_string(), None => self_ty.display(db).to_string(),
} }
} }
@ -190,14 +159,39 @@ impl TypeTree {
format!("{container_name}::{fn_name}({args})",) format!("{container_name}::{fn_name}({args})",)
} }
None => { None => {
let fn_name = let fn_name = mod_item_path_str(sema_scope, &ModuleDef::Function(*func));
mod_item_path_str(sema_scope, &ModuleDef::Function(*func));
format!("{fn_name}({args})",) format!("{fn_name}({args})",)
} }
} }
} }
Expr::Method { func, target, params, .. } => {
if target.contains_many_in_illegal_pos() {
return many_formatter(&target.ty(db));
} }
TypeTree::Variant { variant, generics, params } => {
let func_name = func.name(db).display(db.upcast()).to_string();
let self_param = func.self_param(db).unwrap();
let target = target.gen_source_code(sema_scope, many_formatter);
let args =
params.iter().map(|f| f.gen_source_code(sema_scope, many_formatter)).join(", ");
match func.as_assoc_item(db).and_then(|it| it.containing_trait_or_trait_impl(db)) {
Some(trait_) => {
let trait_name = mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_));
let target = match self_param.access(db) {
crate::Access::Shared => format!("&{target}"),
crate::Access::Exclusive => format!("&mut {target}"),
crate::Access::Owned => target,
};
match args.is_empty() {
true => format!("{trait_name}::{func_name}({target})",),
false => format!("{trait_name}::{func_name}({target}, {args})",),
}
}
None => format!("{target}.{func_name}({args})"),
}
}
Expr::Variant { variant, generics, params } => {
let generics = non_default_generics(db, (*variant).into(), generics); let generics = non_default_generics(db, (*variant).into(), generics);
let generics_str = match generics.is_empty() { let generics_str = match generics.is_empty() {
true => String::new(), true => String::new(),
@ -236,7 +230,7 @@ impl TypeTree {
let prefix = mod_item_path_str(sema_scope, &ModuleDef::Variant(*variant)); let prefix = mod_item_path_str(sema_scope, &ModuleDef::Variant(*variant));
format!("{prefix}{inner}") format!("{prefix}{inner}")
} }
TypeTree::Struct { strukt, generics, params } => { Expr::Struct { strukt, generics, params } => {
let generics = non_default_generics(db, (*strukt).into(), generics); let generics = non_default_generics(db, (*strukt).into(), generics);
let inner = match strukt.kind(db) { let inner = match strukt.kind(db) {
StructKind::Tuple => { StructKind::Tuple => {
@ -274,16 +268,24 @@ impl TypeTree {
let prefix = mod_item_path_str(sema_scope, &ModuleDef::Adt(Adt::Struct(*strukt))); let prefix = mod_item_path_str(sema_scope, &ModuleDef::Adt(Adt::Struct(*strukt)));
format!("{prefix}{inner}") format!("{prefix}{inner}")
} }
TypeTree::Field { type_tree, field } => { Expr::Field { expr, field } => {
let strukt = type_tree.gen_source_code(sema_scope, many_formatter); if expr.contains_many_in_illegal_pos() {
return many_formatter(&expr.ty(db));
}
let strukt = expr.gen_source_code(sema_scope, many_formatter);
let field = field.name(db).display(db.upcast()).to_string(); let field = field.name(db).display(db.upcast()).to_string();
format!("{strukt}.{field}") format!("{strukt}.{field}")
} }
TypeTree::Reference(type_tree) => { Expr::Reference(expr) => {
let inner = type_tree.gen_source_code(sema_scope, many_formatter); if expr.contains_many_in_illegal_pos() {
return many_formatter(&expr.ty(db));
}
let inner = expr.gen_source_code(sema_scope, many_formatter);
format!("&{inner}") format!("&{inner}")
} }
TypeTree::Many(ty) => many_formatter(ty), Expr::Many(ty) => many_formatter(ty),
} }
} }
@ -292,29 +294,27 @@ impl TypeTree {
/// Same as getting the type of root node /// Same as getting the type of root node
pub fn ty(&self, db: &dyn HirDatabase) -> Type { pub fn ty(&self, db: &dyn HirDatabase) -> Type {
match self { match self {
TypeTree::Const(it) => it.ty(db), Expr::Const(it) => it.ty(db),
TypeTree::Static(it) => it.ty(db), Expr::Static(it) => it.ty(db),
TypeTree::Local(it) => it.ty(db), Expr::Local(it) => it.ty(db),
TypeTree::ConstParam(it) => it.ty(db), Expr::ConstParam(it) => it.ty(db),
TypeTree::FamousType { ty, .. } => ty.clone(), Expr::FamousType { ty, .. } => ty.clone(),
TypeTree::Function { func, generics, params } => match func.has_self_param(db) { Expr::Function { func, generics, .. } => {
true => func.ret_type_with_generics( func.ret_type_with_args(db, generics.iter().cloned())
}
Expr::Method { func, generics, target, .. } => func.ret_type_with_args(
db, db,
params[0].ty(db).type_arguments().chain(generics.iter().cloned()), target.ty(db).type_arguments().chain(generics.iter().cloned()),
), ),
false => func.ret_type_with_generics(db, generics.iter().cloned()), Expr::Variant { variant, generics, .. } => {
}, variant.parent_enum(db).ty_with_args(db, generics.iter().cloned())
TypeTree::Variant { variant, generics, .. } => {
variant.parent_enum(db).ty_with_generics(db, generics.iter().cloned())
} }
TypeTree::Struct { strukt, generics, .. } => { Expr::Struct { strukt, generics, .. } => {
strukt.ty_with_generics(db, generics.iter().cloned()) strukt.ty_with_args(db, generics.iter().cloned())
} }
TypeTree::Field { type_tree, field } => { Expr::Field { expr, field } => field.ty_with_args(db, expr.ty(db).type_arguments()),
field.ty_with_generics(db, type_tree.ty(db).type_arguments()) Expr::Reference(it) => it.ty(db),
} Expr::Many(ty) => ty.clone(),
TypeTree::Reference(it) => it.ty(db),
TypeTree::Many(ty) => ty.clone(),
} }
} }
@ -323,7 +323,7 @@ impl TypeTree {
let mut res = Vec::new(); let mut res = Vec::new();
match self { match self {
TypeTree::Function { func, params, .. } => { Expr::Method { func, params, .. } => {
res.extend(params.iter().flat_map(|it| it.traits_used(db))); res.extend(params.iter().flat_map(|it| it.traits_used(db)));
if let Some(it) = func.as_assoc_item(db) { if let Some(it) = func.as_assoc_item(db) {
if let Some(it) = it.containing_trait_or_trait_impl(db) { if let Some(it) = it.containing_trait_or_trait_impl(db) {
@ -336,4 +336,28 @@ impl TypeTree {
res res
} }
/// Check in the tree contains `Expr::Many` variant in illegal place to insert `todo`,
/// `unimplemented` or similar macro
///
/// Some examples are following
/// ```no_compile
/// macro!().foo
/// macro!().bar()
/// &macro!()
/// ```
fn contains_many_in_illegal_pos(&self) -> bool {
match self {
Expr::Method { target, .. } => target.contains_many_in_illegal_pos(),
Expr::Field { expr, .. } => expr.contains_many_in_illegal_pos(),
Expr::Reference(target) => target.is_many(),
Expr::Many(_) => true,
_ => false,
}
}
/// Helper function to check if outermost type tree is `Expr::Many` variant
pub fn is_many(&self) -> bool {
matches!(self, Expr::Many(_))
}
} }

View File

@ -7,8 +7,8 @@ use rustc_hash::{FxHashMap, FxHashSet};
use crate::{ModuleDef, ScopeDef, Semantics, SemanticsScope, Type}; use crate::{ModuleDef, ScopeDef, Semantics, SemanticsScope, Type};
pub mod type_tree; mod expr;
pub use type_tree::TypeTree; pub use expr::Expr;
mod tactics; mod tactics;
@ -19,48 +19,57 @@ enum NewTypesKey {
StructProjection, StructProjection,
} }
/// Helper enum to squash big number of alternative trees into `Many` variant as there is too many
/// to take into account.
#[derive(Debug)] #[derive(Debug)]
enum AlternativeTrees { enum AlternativeExprs {
Few(FxHashSet<TypeTree>), /// There are few trees, so we keep track of them all
Many(Type), Few(FxHashSet<Expr>),
/// There are too many trees to keep track of
Many,
} }
impl AlternativeTrees { impl AlternativeExprs {
pub fn new( /// Construct alternative trees
threshold: usize, ///
ty: Type, /// # Arguments
trees: impl Iterator<Item = TypeTree>, /// `threshold` - threshold value for many trees (more than that is many)
) -> AlternativeTrees { /// `exprs` - expressions iterator
let mut it = AlternativeTrees::Few(Default::default()); fn new(threshold: usize, exprs: impl Iterator<Item = Expr>) -> AlternativeExprs {
it.extend_with_threshold(threshold, ty, trees); let mut it = AlternativeExprs::Few(Default::default());
it.extend_with_threshold(threshold, exprs);
it it
} }
pub fn trees(&self) -> Vec<TypeTree> { /// Get type trees stored in alternative trees (or `Expr::Many` in case of many)
///
/// # Arguments
/// `ty` - Type of expressions queried (this is used to give type to `Expr::Many`)
fn exprs(&self, ty: &Type) -> Vec<Expr> {
match self { match self {
AlternativeTrees::Few(trees) => trees.iter().cloned().collect(), AlternativeExprs::Few(exprs) => exprs.iter().cloned().collect(),
AlternativeTrees::Many(ty) => vec![TypeTree::Many(ty.clone())], AlternativeExprs::Many => vec![Expr::Many(ty.clone())],
} }
} }
pub fn extend_with_threshold( /// Extend alternative expressions
&mut self, ///
threshold: usize, /// # Arguments
ty: Type, /// `threshold` - threshold value for many trees (more than that is many)
mut trees: impl Iterator<Item = TypeTree>, /// `exprs` - expressions iterator
) { fn extend_with_threshold(&mut self, threshold: usize, mut exprs: impl Iterator<Item = Expr>) {
match self { match self {
AlternativeTrees::Few(tts) => { AlternativeExprs::Few(tts) => {
while let Some(it) = trees.next() { while let Some(it) = exprs.next() {
if tts.len() > threshold { if tts.len() > threshold {
*self = AlternativeTrees::Many(ty); *self = AlternativeExprs::Many;
break; break;
} }
tts.insert(it); tts.insert(it);
} }
} }
AlternativeTrees::Many(_) => (), AlternativeExprs::Many => (),
} }
} }
} }
@ -76,8 +85,8 @@ impl AlternativeTrees {
/// not produce any new results. /// not produce any new results.
#[derive(Default, Debug)] #[derive(Default, Debug)]
struct LookupTable { struct LookupTable {
/// All the `TypeTree`s in "value" produce the type of "key" /// All the `Expr`s in "value" produce the type of "key"
data: FxHashMap<Type, AlternativeTrees>, data: FxHashMap<Type, AlternativeExprs>,
/// New types reached since last query by the `NewTypesKey` /// New types reached since last query by the `NewTypesKey`
new_types: FxHashMap<NewTypesKey, Vec<Type>>, new_types: FxHashMap<NewTypesKey, Vec<Type>>,
/// ScopeDefs that are not interesting any more /// ScopeDefs that are not interesting any more
@ -94,40 +103,40 @@ struct LookupTable {
impl LookupTable { impl LookupTable {
/// Initialize lookup table /// Initialize lookup table
fn new() -> Self { fn new(many_threshold: usize) -> Self {
let mut res: Self = Default::default(); let mut res = Self { many_threshold, ..Default::default() };
res.new_types.insert(NewTypesKey::ImplMethod, Vec::new()); res.new_types.insert(NewTypesKey::ImplMethod, Vec::new());
res.new_types.insert(NewTypesKey::StructProjection, Vec::new()); res.new_types.insert(NewTypesKey::StructProjection, Vec::new());
res res
} }
/// Find all `TypeTree`s that unify with the `ty` /// Find all `Expr`s that unify with the `ty`
fn find(&self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<TypeTree>> { fn find(&self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<Expr>> {
self.data self.data
.iter() .iter()
.find(|(t, _)| t.could_unify_with_deeply(db, ty)) .find(|(t, _)| t.could_unify_with_deeply(db, ty))
.map(|(_, tts)| tts.trees()) .map(|(t, tts)| tts.exprs(t))
} }
/// Same as find but automatically creates shared reference of types in the lookup /// Same as find but automatically creates shared reference of types in the lookup
/// ///
/// For example if we have type `i32` in data and we query for `&i32` it map all the type /// For example if we have type `i32` in data and we query for `&i32` it map all the type
/// trees we have for `i32` with `TypeTree::Reference` and returns them. /// trees we have for `i32` with `Expr::Reference` and returns them.
fn find_autoref(&self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<TypeTree>> { fn find_autoref(&self, db: &dyn HirDatabase, ty: &Type) -> Option<Vec<Expr>> {
self.data self.data
.iter() .iter()
.find(|(t, _)| t.could_unify_with_deeply(db, ty)) .find(|(t, _)| t.could_unify_with_deeply(db, ty))
.map(|(_, tts)| tts.trees()) .map(|(t, it)| it.exprs(t))
.or_else(|| { .or_else(|| {
self.data self.data
.iter() .iter()
.find(|(t, _)| { .find(|(t, _)| {
Type::reference(t, Mutability::Shared).could_unify_with_deeply(db, &ty) Type::reference(t, Mutability::Shared).could_unify_with_deeply(db, &ty)
}) })
.map(|(_, tts)| { .map(|(t, it)| {
tts.trees() it.exprs(t)
.into_iter() .into_iter()
.map(|tt| TypeTree::Reference(Box::new(tt))) .map(|expr| Expr::Reference(Box::new(expr)))
.collect() .collect()
}) })
}) })
@ -138,14 +147,11 @@ impl LookupTable {
/// Note that the types have to be the same, unification is not enough as unification is not /// Note that the types have to be the same, unification is not enough as unification is not
/// transitive. For example Vec<i32> and FxHashSet<i32> both unify with Iterator<Item = i32>, /// transitive. For example Vec<i32> and FxHashSet<i32> both unify with Iterator<Item = i32>,
/// but they clearly do not unify themselves. /// but they clearly do not unify themselves.
fn insert(&mut self, ty: Type, trees: impl Iterator<Item = TypeTree>) { fn insert(&mut self, ty: Type, exprs: impl Iterator<Item = Expr>) {
match self.data.get_mut(&ty) { match self.data.get_mut(&ty) {
Some(it) => it.extend_with_threshold(self.many_threshold, ty, trees), Some(it) => it.extend_with_threshold(self.many_threshold, exprs),
None => { None => {
self.data.insert( self.data.insert(ty.clone(), AlternativeExprs::new(self.many_threshold, exprs));
ty.clone(),
AlternativeTrees::new(self.many_threshold, ty.clone(), trees),
);
for it in self.new_types.values_mut() { for it in self.new_types.values_mut() {
it.push(ty.clone()); it.push(ty.clone());
} }
@ -206,6 +212,7 @@ impl LookupTable {
} }
/// Context for the `term_search` function /// Context for the `term_search` function
#[derive(Debug)]
pub struct TermSearchCtx<'a, DB: HirDatabase> { pub struct TermSearchCtx<'a, DB: HirDatabase> {
/// Semantics for the program /// Semantics for the program
pub sema: &'a Semantics<'a, DB>, pub sema: &'a Semantics<'a, DB>,
@ -230,7 +237,7 @@ pub struct TermSearchConfig {
impl Default for TermSearchConfig { impl Default for TermSearchConfig {
fn default() -> Self { fn default() -> Self {
Self { enable_borrowcheck: true, many_alternatives_threshold: 1, depth: 5 } Self { enable_borrowcheck: true, many_alternatives_threshold: 1, depth: 6 }
} }
} }
@ -239,9 +246,7 @@ impl Default for TermSearchConfig {
/// Search for terms (expressions) that unify with the `goal` type. /// Search for terms (expressions) that unify with the `goal` type.
/// ///
/// # Arguments /// # Arguments
/// * `sema` - Semantics for the program /// * `ctx` - Context for term search
/// * `scope` - Semantic scope, captures context for the term search
/// * `goal` - Target / expected output type
/// ///
/// Internally this function uses Breadth First Search to find path to `goal` type. /// Internally this function uses Breadth First Search to find path to `goal` type.
/// The general idea is following: /// The general idea is following:
@ -258,7 +263,7 @@ impl Default for TermSearchConfig {
/// Note that there are usually more ways we can get to the `goal` type but some are discarded to /// Note that there are usually more ways we can get to the `goal` type but some are discarded to
/// reduce the memory consumption. It is also unlikely anyone is willing ti browse through /// reduce the memory consumption. It is also unlikely anyone is willing ti browse through
/// thousands of possible responses so we currently take first 10 from every tactic. /// thousands of possible responses so we currently take first 10 from every tactic.
pub fn term_search<DB: HirDatabase>(ctx: TermSearchCtx<'_, DB>) -> Vec<TypeTree> { pub fn term_search<DB: HirDatabase>(ctx: &TermSearchCtx<'_, DB>) -> Vec<Expr> {
let module = ctx.scope.module(); let module = ctx.scope.module();
let mut defs = FxHashSet::default(); let mut defs = FxHashSet::default();
defs.insert(ScopeDef::ModuleDef(ModuleDef::Module(module))); defs.insert(ScopeDef::ModuleDef(ModuleDef::Module(module)));
@ -267,30 +272,21 @@ pub fn term_search<DB: HirDatabase>(ctx: TermSearchCtx<'_, DB>) -> Vec<TypeTree>
defs.insert(def); defs.insert(def);
}); });
let mut lookup = LookupTable::new(); let mut lookup = LookupTable::new(ctx.config.many_alternatives_threshold);
// Try trivial tactic first, also populates lookup table // Try trivial tactic first, also populates lookup table
let mut solutions: Vec<TypeTree> = tactics::trivial(&ctx, &defs, &mut lookup).collect(); let mut solutions: Vec<Expr> = tactics::trivial(ctx, &defs, &mut lookup).collect();
// Use well known types tactic before iterations as it does not depend on other tactics // Use well known types tactic before iterations as it does not depend on other tactics
solutions.extend(tactics::famous_types(&ctx, &defs, &mut lookup)); solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup));
let mut solution_found = !solutions.is_empty();
for _ in 0..ctx.config.depth { for _ in 0..ctx.config.depth {
lookup.new_round(); lookup.new_round();
solutions.extend(tactics::type_constructor(&ctx, &defs, &mut lookup)); solutions.extend(tactics::type_constructor(ctx, &defs, &mut lookup));
solutions.extend(tactics::free_function(&ctx, &defs, &mut lookup)); solutions.extend(tactics::free_function(ctx, &defs, &mut lookup));
solutions.extend(tactics::impl_method(&ctx, &defs, &mut lookup)); solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup));
solutions.extend(tactics::struct_projection(&ctx, &defs, &mut lookup)); solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup));
solutions.extend(tactics::impl_static_method(&ctx, &defs, &mut lookup)); solutions.extend(tactics::impl_static_method(ctx, &defs, &mut lookup));
// Break after 1 round after successful solution
if solution_found {
break;
}
solution_found = !solutions.is_empty();
// Discard not interesting `ScopeDef`s for speedup // Discard not interesting `ScopeDef`s for speedup
for def in lookup.exhausted_scopedefs() { for def in lookup.exhausted_scopedefs() {
@ -298,5 +294,5 @@ pub fn term_search<DB: HirDatabase>(ctx: TermSearchCtx<'_, DB>) -> Vec<TypeTree>
} }
} }
solutions.into_iter().unique().collect() solutions.into_iter().filter(|it| !it.is_many()).unique().collect()
} }

View File

@ -19,7 +19,7 @@ use crate::{
Variant, Variant,
}; };
use crate::term_search::{TermSearchConfig, TypeTree}; use crate::term_search::{Expr, TermSearchConfig};
use super::{LookupTable, NewTypesKey, TermSearchCtx}; use super::{LookupTable, NewTypesKey, TermSearchCtx};
@ -41,13 +41,13 @@ pub(super) fn trivial<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>, ctx: &'a TermSearchCtx<'a, DB>,
defs: &'a FxHashSet<ScopeDef>, defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable, lookup: &'a mut LookupTable,
) -> impl Iterator<Item = TypeTree> + 'a { ) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db; let db = ctx.sema.db;
defs.iter().filter_map(|def| { defs.iter().filter_map(|def| {
let tt = match def { let expr = match def {
ScopeDef::ModuleDef(ModuleDef::Const(it)) => Some(TypeTree::Const(*it)), ScopeDef::ModuleDef(ModuleDef::Const(it)) => Some(Expr::Const(*it)),
ScopeDef::ModuleDef(ModuleDef::Static(it)) => Some(TypeTree::Static(*it)), ScopeDef::ModuleDef(ModuleDef::Static(it)) => Some(Expr::Static(*it)),
ScopeDef::GenericParam(GenericParam::ConstParam(it)) => Some(TypeTree::ConstParam(*it)), ScopeDef::GenericParam(GenericParam::ConstParam(it)) => Some(Expr::ConstParam(*it)),
ScopeDef::Local(it) => { ScopeDef::Local(it) => {
if ctx.config.enable_borrowcheck { if ctx.config.enable_borrowcheck {
let borrowck = db.borrowck(it.parent).ok()?; let borrowck = db.borrowck(it.parent).ok()?;
@ -67,22 +67,22 @@ pub(super) fn trivial<'a, DB: HirDatabase>(
} }
} }
Some(TypeTree::Local(*it)) Some(Expr::Local(*it))
} }
_ => None, _ => None,
}?; }?;
lookup.mark_exhausted(*def); lookup.mark_exhausted(*def);
let ty = tt.ty(db); let ty = expr.ty(db);
lookup.insert(ty.clone(), std::iter::once(tt.clone())); lookup.insert(ty.clone(), std::iter::once(expr.clone()));
// Don't suggest local references as they are not valid for return // Don't suggest local references as they are not valid for return
if matches!(tt, TypeTree::Local(_)) && ty.contains_reference(db) { if matches!(expr, Expr::Local(_)) && ty.contains_reference(db) {
return None; return None;
} }
ty.could_unify_with_deeply(db, &ctx.goal).then(|| tt) ty.could_unify_with_deeply(db, &ctx.goal).then(|| expr)
}) })
} }
@ -101,7 +101,7 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>, ctx: &'a TermSearchCtx<'a, DB>,
defs: &'a FxHashSet<ScopeDef>, defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable, lookup: &'a mut LookupTable,
) -> impl Iterator<Item = TypeTree> + 'a { ) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db; let db = ctx.sema.db;
let module = ctx.scope.module(); let module = ctx.scope.module();
fn variant_helper( fn variant_helper(
@ -111,14 +111,14 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
variant: Variant, variant: Variant,
goal: &Type, goal: &Type,
config: &TermSearchConfig, config: &TermSearchConfig,
) -> Vec<(Type, Vec<TypeTree>)> { ) -> Vec<(Type, Vec<Expr>)> {
let generics = GenericDef::from(variant.parent_enum(db)); // Ignore unstable
// Ignore unstable variants
if variant.is_unstable(db) { if variant.is_unstable(db) {
return Vec::new(); return Vec::new();
} }
let generics = GenericDef::from(variant.parent_enum(db));
// Ignore enums with const generics // Ignore enums with const generics
if !generics.const_params(db).is_empty() { if !generics.const_params(db).is_empty() {
return Vec::new(); return Vec::new();
@ -160,7 +160,7 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
}) })
.collect(); .collect();
let enum_ty = parent_enum.ty_with_generics(db, generics.iter().cloned()); let enum_ty = parent_enum.ty_with_args(db, generics.iter().cloned());
// Allow types with generics only if they take us straight to goal for // Allow types with generics only if they take us straight to goal for
// performance reasons // performance reasons
@ -174,52 +174,42 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
} }
// Early exit if some param cannot be filled from lookup // Early exit if some param cannot be filled from lookup
let param_trees: Vec<Vec<TypeTree>> = variant let param_exprs: Vec<Vec<Expr>> = variant
.fields(db) .fields(db)
.into_iter() .into_iter()
.map(|field| { .map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned())))
lookup.find(db, &field.ty_with_generics(db, generics.iter().cloned()))
})
.collect::<Option<_>>()?; .collect::<Option<_>>()?;
// Note that we need special case for 0 param constructors because of multi cartesian // Note that we need special case for 0 param constructors because of multi cartesian
// product // product
let variant_trees: Vec<TypeTree> = if param_trees.is_empty() { let variant_exprs: Vec<Expr> = if param_exprs.is_empty() {
vec![TypeTree::Variant { vec![Expr::Variant { variant, generics: generics.clone(), params: Vec::new() }]
variant,
generics: generics.clone(),
params: Vec::new(),
}]
} else { } else {
param_trees param_exprs
.into_iter() .into_iter()
.multi_cartesian_product() .multi_cartesian_product()
.map(|params| TypeTree::Variant { .map(|params| Expr::Variant { variant, generics: generics.clone(), params })
variant,
generics: generics.clone(),
params,
})
.collect() .collect()
}; };
lookup.insert(enum_ty.clone(), variant_trees.iter().cloned()); lookup.insert(enum_ty.clone(), variant_exprs.iter().cloned());
Some((enum_ty, variant_trees)) Some((enum_ty, variant_exprs))
}) })
.collect() .collect()
} }
defs.iter() defs.iter()
.filter_map(move |def| match def { .filter_map(move |def| match def {
ScopeDef::ModuleDef(ModuleDef::Variant(it)) => { ScopeDef::ModuleDef(ModuleDef::Variant(it)) => {
let variant_trees = let variant_exprs =
variant_helper(db, lookup, it.parent_enum(db), *it, &ctx.goal, &ctx.config); variant_helper(db, lookup, it.parent_enum(db), *it, &ctx.goal, &ctx.config);
if variant_trees.is_empty() { if variant_exprs.is_empty() {
return None; return None;
} }
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Variant(*it))); lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Variant(*it)));
Some(variant_trees) Some(variant_exprs)
} }
ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(enum_))) => { ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(enum_))) => {
let trees: Vec<(Type, Vec<TypeTree>)> = enum_ let exprs: Vec<(Type, Vec<Expr>)> = enum_
.variants(db) .variants(db)
.into_iter() .into_iter()
.flat_map(|it| { .flat_map(|it| {
@ -227,11 +217,11 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
}) })
.collect(); .collect();
if !trees.is_empty() { if !exprs.is_empty() {
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(*enum_)))); lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(*enum_))));
} }
Some(trees) Some(exprs)
} }
ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(it))) => { ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(it))) => {
// Ignore unstable and not visible // Ignore unstable and not visible
@ -269,7 +259,7 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
.into_iter() .into_iter()
.permutations(non_default_type_params_len); .permutations(non_default_type_params_len);
let trees = generic_params let exprs = generic_params
.filter_map(|generics| { .filter_map(|generics| {
// Insert default type params // Insert default type params
let mut g = generics.into_iter(); let mut g = generics.into_iter();
@ -280,7 +270,7 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
None => g.next().expect("Missing type param"), None => g.next().expect("Missing type param"),
}) })
.collect(); .collect();
let struct_ty = it.ty_with_generics(db, generics.iter().cloned()); let struct_ty = it.ty_with_args(db, generics.iter().cloned());
// Allow types with generics only if they take us straight to goal for // Allow types with generics only if they take us straight to goal for
// performance reasons // performance reasons
@ -301,20 +291,20 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
} }
// Early exit if some param cannot be filled from lookup // Early exit if some param cannot be filled from lookup
let param_trees: Vec<Vec<TypeTree>> = fileds let param_exprs: Vec<Vec<Expr>> = fileds
.into_iter() .into_iter()
.map(|field| lookup.find(db, &field.ty(db))) .map(|field| lookup.find(db, &field.ty(db)))
.collect::<Option<_>>()?; .collect::<Option<_>>()?;
// Note that we need special case for 0 param constructors because of multi cartesian // Note that we need special case for 0 param constructors because of multi cartesian
// product // product
let struct_trees: Vec<TypeTree> = if param_trees.is_empty() { let struct_exprs: Vec<Expr> = if param_exprs.is_empty() {
vec![TypeTree::Struct { strukt: *it, generics, params: Vec::new() }] vec![Expr::Struct { strukt: *it, generics, params: Vec::new() }]
} else { } else {
param_trees param_exprs
.into_iter() .into_iter()
.multi_cartesian_product() .multi_cartesian_product()
.map(|params| TypeTree::Struct { .map(|params| Expr::Struct {
strukt: *it, strukt: *it,
generics: generics.clone(), generics: generics.clone(),
params, params,
@ -324,17 +314,17 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>(
lookup lookup
.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(*it)))); .mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(*it))));
lookup.insert(struct_ty.clone(), struct_trees.iter().cloned()); lookup.insert(struct_ty.clone(), struct_exprs.iter().cloned());
Some((struct_ty, struct_trees)) Some((struct_ty, struct_exprs))
}) })
.collect(); .collect();
Some(trees) Some(exprs)
} }
_ => None, _ => None,
}) })
.flatten() .flatten()
.filter_map(|(ty, trees)| ty.could_unify_with_deeply(db, &ctx.goal).then(|| trees)) .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then(|| exprs))
.flatten() .flatten()
} }
@ -354,7 +344,7 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>, ctx: &'a TermSearchCtx<'a, DB>,
defs: &'a FxHashSet<ScopeDef>, defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable, lookup: &'a mut LookupTable,
) -> impl Iterator<Item = TypeTree> + 'a { ) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db; let db = ctx.sema.db;
let module = ctx.scope.module(); let module = ctx.scope.module();
defs.iter() defs.iter()
@ -394,7 +384,7 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
.into_iter() .into_iter()
.permutations(non_default_type_params_len); .permutations(non_default_type_params_len);
let trees: Vec<_> = generic_params let exprs: Vec<_> = generic_params
.filter_map(|generics| { .filter_map(|generics| {
// Insert default type params // Insert default type params
let mut g = generics.into_iter(); let mut g = generics.into_iter();
@ -406,7 +396,7 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
}) })
.collect(); .collect();
let ret_ty = it.ret_type_with_generics(db, generics.iter().cloned()); let ret_ty = it.ret_type_with_args(db, generics.iter().cloned());
// Filter out private and unsafe functions // Filter out private and unsafe functions
if !it.is_visible_from(db, module) if !it.is_visible_from(db, module)
|| it.is_unsafe_to_call(db) || it.is_unsafe_to_call(db)
@ -418,7 +408,7 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
} }
// Early exit if some param cannot be filled from lookup // Early exit if some param cannot be filled from lookup
let param_trees: Vec<Vec<TypeTree>> = it let param_exprs: Vec<Vec<Expr>> = it
.params_without_self_with_generics(db, generics.iter().cloned()) .params_without_self_with_generics(db, generics.iter().cloned())
.into_iter() .into_iter()
.map(|field| { .map(|field| {
@ -432,13 +422,13 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
// Note that we need special case for 0 param constructors because of multi cartesian // Note that we need special case for 0 param constructors because of multi cartesian
// product // product
let fn_trees: Vec<TypeTree> = if param_trees.is_empty() { let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
vec![TypeTree::Function { func: *it, generics, params: Vec::new() }] vec![Expr::Function { func: *it, generics, params: Vec::new() }]
} else { } else {
param_trees param_exprs
.into_iter() .into_iter()
.multi_cartesian_product() .multi_cartesian_product()
.map(|params| TypeTree::Function { .map(|params| Expr::Function {
func: *it, func: *it,
generics: generics.clone(), generics: generics.clone(),
@ -448,16 +438,16 @@ pub(super) fn free_function<'a, DB: HirDatabase>(
}; };
lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Function(*it))); lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Function(*it)));
lookup.insert(ret_ty.clone(), fn_trees.iter().cloned()); lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
Some((ret_ty, fn_trees)) Some((ret_ty, fn_exprs))
}) })
.collect(); .collect();
Some(trees) Some(exprs)
} }
_ => None, _ => None,
}) })
.flatten() .flatten()
.filter_map(|(ty, trees)| ty.could_unify_with_deeply(db, &ctx.goal).then(|| trees)) .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then(|| exprs))
.flatten() .flatten()
} }
@ -479,7 +469,7 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>, ctx: &'a TermSearchCtx<'a, DB>,
_defs: &'a FxHashSet<ScopeDef>, _defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable, lookup: &'a mut LookupTable,
) -> impl Iterator<Item = TypeTree> + 'a { ) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db; let db = ctx.sema.db;
let module = ctx.scope.module(); let module = ctx.scope.module();
lookup lookup
@ -546,7 +536,7 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
.into_iter() .into_iter()
.permutations(non_default_type_params_len); .permutations(non_default_type_params_len);
let trees: Vec<_> = generic_params let exprs: Vec<_> = generic_params
.filter_map(|generics| { .filter_map(|generics| {
// Insert default type params // Insert default type params
let mut g = generics.into_iter(); let mut g = generics.into_iter();
@ -559,7 +549,7 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
}) })
.collect(); .collect();
let ret_ty = it.ret_type_with_generics( let ret_ty = it.ret_type_with_args(
db, db,
ty.type_arguments().chain(generics.iter().cloned()), ty.type_arguments().chain(generics.iter().cloned()),
); );
@ -578,17 +568,17 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
let self_ty = it let self_ty = it
.self_param(db) .self_param(db)
.expect("No self param") .expect("No self param")
.ty_with_generics(db, ty.type_arguments().chain(generics.iter().cloned())); .ty_with_args(db, ty.type_arguments().chain(generics.iter().cloned()));
// Ignore functions that have different self type // Ignore functions that have different self type
if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) { if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) {
return None; return None;
} }
let target_type_trees = lookup.find(db, &ty).expect("Type not in lookup"); let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup");
// Early exit if some param cannot be filled from lookup // Early exit if some param cannot be filled from lookup
let param_trees: Vec<Vec<TypeTree>> = it let param_exprs: Vec<Vec<Expr>> = it
.params_without_self_with_generics( .params_without_self_with_generics(
db, db,
ty.type_arguments().chain(generics.iter().cloned()), ty.type_arguments().chain(generics.iter().cloned()),
@ -597,20 +587,29 @@ pub(super) fn impl_method<'a, DB: HirDatabase>(
.map(|field| lookup.find_autoref(db, &field.ty())) .map(|field| lookup.find_autoref(db, &field.ty()))
.collect::<Option<_>>()?; .collect::<Option<_>>()?;
let fn_trees: Vec<TypeTree> = std::iter::once(target_type_trees) let fn_exprs: Vec<Expr> = std::iter::once(target_type_exprs)
.chain(param_trees.into_iter()) .chain(param_exprs.into_iter())
.multi_cartesian_product() .multi_cartesian_product()
.map(|params| TypeTree::Function { func: it, generics: Vec::new(), params }) .map(|params| {
let mut params = params.into_iter();
let target = Box::new(params.next().unwrap());
Expr::Method {
func: it,
generics: generics.clone(),
target,
params: params.collect(),
}
})
.collect(); .collect();
lookup.insert(ret_ty.clone(), fn_trees.iter().cloned()); lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
Some((ret_ty, fn_trees)) Some((ret_ty, fn_exprs))
}) })
.collect(); .collect();
Some(trees) Some(exprs)
}) })
.flatten() .flatten()
.filter_map(|(ty, trees)| ty.could_unify_with_deeply(db, &ctx.goal).then(|| trees)) .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then(|| exprs))
.flatten() .flatten()
} }
@ -629,26 +628,26 @@ pub(super) fn struct_projection<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>, ctx: &'a TermSearchCtx<'a, DB>,
_defs: &'a FxHashSet<ScopeDef>, _defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable, lookup: &'a mut LookupTable,
) -> impl Iterator<Item = TypeTree> + 'a { ) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db; let db = ctx.sema.db;
let module = ctx.scope.module(); let module = ctx.scope.module();
lookup lookup
.new_types(NewTypesKey::StructProjection) .new_types(NewTypesKey::StructProjection)
.into_iter() .into_iter()
.map(|ty| (ty.clone(), lookup.find(db, &ty).expect("TypeTree not in lookup"))) .map(|ty| (ty.clone(), lookup.find(db, &ty).expect("Expr not in lookup")))
.flat_map(move |(ty, targets)| { .flat_map(move |(ty, targets)| {
ty.fields(db).into_iter().filter_map(move |(field, filed_ty)| { ty.fields(db).into_iter().filter_map(move |(field, filed_ty)| {
if !field.is_visible_from(db, module) { if !field.is_visible_from(db, module) {
return None; return None;
} }
let trees = targets let exprs = targets
.clone() .clone()
.into_iter() .into_iter()
.map(move |target| TypeTree::Field { field, type_tree: Box::new(target) }); .map(move |target| Expr::Field { field, expr: Box::new(target) });
Some((filed_ty, trees)) Some((filed_ty, exprs))
}) })
}) })
.filter_map(|(ty, trees)| ty.could_unify_with_deeply(db, &ctx.goal).then(|| trees)) .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then(|| exprs))
.flatten() .flatten()
} }
@ -669,20 +668,20 @@ pub(super) fn famous_types<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>, ctx: &'a TermSearchCtx<'a, DB>,
_defs: &'a FxHashSet<ScopeDef>, _defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable, lookup: &'a mut LookupTable,
) -> impl Iterator<Item = TypeTree> + 'a { ) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db; let db = ctx.sema.db;
let module = ctx.scope.module(); let module = ctx.scope.module();
[ [
TypeTree::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "true" }, Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "true" },
TypeTree::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "false" }, Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "false" },
TypeTree::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" }, Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" },
] ]
.into_iter() .into_iter()
.map(|tt| { .map(|exprs| {
lookup.insert(tt.ty(db), std::iter::once(tt.clone())); lookup.insert(exprs.ty(db), std::iter::once(exprs.clone()));
tt exprs
}) })
.filter(|tt| tt.ty(db).could_unify_with_deeply(db, &ctx.goal)) .filter(|expr| expr.ty(db).could_unify_with_deeply(db, &ctx.goal))
} }
/// # Impl static method (without self type) tactic /// # Impl static method (without self type) tactic
@ -700,7 +699,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
ctx: &'a TermSearchCtx<'a, DB>, ctx: &'a TermSearchCtx<'a, DB>,
_defs: &'a FxHashSet<ScopeDef>, _defs: &'a FxHashSet<ScopeDef>,
lookup: &'a mut LookupTable, lookup: &'a mut LookupTable,
) -> impl Iterator<Item = TypeTree> + 'a { ) -> impl Iterator<Item = Expr> + 'a {
let db = ctx.sema.db; let db = ctx.sema.db;
let module = ctx.scope.module(); let module = ctx.scope.module();
lookup lookup
@ -771,7 +770,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
.into_iter() .into_iter()
.permutations(non_default_type_params_len); .permutations(non_default_type_params_len);
let trees: Vec<_> = generic_params let exprs: Vec<_> = generic_params
.filter_map(|generics| { .filter_map(|generics| {
// Insert default type params // Insert default type params
let mut g = generics.into_iter(); let mut g = generics.into_iter();
@ -784,7 +783,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
}) })
.collect(); .collect();
let ret_ty = it.ret_type_with_generics( let ret_ty = it.ret_type_with_args(
db, db,
ty.type_arguments().chain(generics.iter().cloned()), ty.type_arguments().chain(generics.iter().cloned()),
); );
@ -801,7 +800,7 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
// } // }
// Early exit if some param cannot be filled from lookup // Early exit if some param cannot be filled from lookup
let param_trees: Vec<Vec<TypeTree>> = it let param_exprs: Vec<Vec<Expr>> = it
.params_without_self_with_generics( .params_without_self_with_generics(
db, db,
ty.type_arguments().chain(generics.iter().cloned()), ty.type_arguments().chain(generics.iter().cloned()),
@ -812,28 +811,27 @@ pub(super) fn impl_static_method<'a, DB: HirDatabase>(
// Note that we need special case for 0 param constructors because of multi cartesian // Note that we need special case for 0 param constructors because of multi cartesian
// product // product
let fn_trees: Vec<TypeTree> = if param_trees.is_empty() { let fn_exprs: Vec<Expr> = if param_exprs.is_empty() {
vec![TypeTree::Function { func: it, generics, params: Vec::new() }] vec![Expr::Function { func: it, generics, params: Vec::new() }]
} else { } else {
param_trees param_exprs
.into_iter() .into_iter()
.multi_cartesian_product() .multi_cartesian_product()
.map(|params| TypeTree::Function { .map(|params| Expr::Function {
func: it, func: it,
generics: generics.clone(), generics: generics.clone(),
params, params,
}) })
.collect() .collect()
}; };
lookup.insert(ret_ty.clone(), fn_trees.iter().cloned()); lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned());
Some((ret_ty, fn_trees)) Some((ret_ty, fn_exprs))
}) })
.collect(); .collect();
Some(trees) Some(exprs)
}) })
.flatten() .flatten()
.filter_map(|(ty, trees)| ty.could_unify_with_deeply(db, &ctx.goal).then(|| trees)) .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then(|| exprs))
.flatten() .flatten()
} }

View File

@ -30,7 +30,7 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
goal: target_ty, goal: target_ty,
config: Default::default(), config: Default::default(),
}; };
let paths = hir::term_search::term_search(term_search_ctx); let paths = hir::term_search::term_search(&term_search_ctx);
if paths.is_empty() { if paths.is_empty() {
return None; return None;

View File

@ -40,8 +40,8 @@ use crate::{
literal::{render_struct_literal, render_variant_lit}, literal::{render_struct_literal, render_variant_lit},
macro_::render_macro, macro_::render_macro,
pattern::{render_struct_pat, render_variant_pat}, pattern::{render_struct_pat, render_variant_pat},
render_field, render_path_resolution, render_pattern_resolution, render_tuple_field, render_expr, render_field, render_path_resolution, render_pattern_resolution,
render_type_tree, render_tuple_field,
type_alias::{render_type_alias, render_type_alias_with_eq}, type_alias::{render_type_alias, render_type_alias_with_eq},
union_literal::render_union_literal, union_literal::render_union_literal,
RenderContext, RenderContext,
@ -158,12 +158,8 @@ impl Completions {
item.add_to(self, ctx.db); item.add_to(self, ctx.db);
} }
pub(crate) fn add_expr( pub(crate) fn add_expr(&mut self, ctx: &CompletionContext<'_>, expr: &hir::term_search::Expr) {
&mut self, match render_expr(ctx, expr) {
ctx: &CompletionContext<'_>,
expr: &hir::term_search::TypeTree,
) {
match render_type_tree(ctx, expr) {
Some(item) => item.add_to(self, ctx.db), Some(item) => item.add_to(self, ctx.db),
None => (), None => (),
} }
@ -699,7 +695,6 @@ pub(super) fn complete_name_ref(
ctx: &CompletionContext<'_>, ctx: &CompletionContext<'_>,
NameRefContext { nameref, kind }: &NameRefContext, NameRefContext { nameref, kind }: &NameRefContext,
) { ) {
expr::complete_expr(acc, ctx);
match kind { match kind {
NameRefKind::Path(path_ctx) => { NameRefKind::Path(path_ctx) => {
flyimport::import_on_the_fly_path(acc, ctx, path_ctx); flyimport::import_on_the_fly_path(acc, ctx, path_ctx);
@ -707,6 +702,7 @@ pub(super) fn complete_name_ref(
match &path_ctx.kind { match &path_ctx.kind {
PathKind::Expr { expr_ctx } => { PathKind::Expr { expr_ctx } => {
expr::complete_expr_path(acc, ctx, path_ctx, expr_ctx); expr::complete_expr_path(acc, ctx, path_ctx, expr_ctx);
expr::complete_expr(acc, ctx);
dot::complete_undotted_self(acc, ctx, path_ctx, expr_ctx); dot::complete_undotted_self(acc, ctx, path_ctx, expr_ctx);
item_list::complete_item_list_in_expr(acc, ctx, path_ctx, expr_ctx); item_list::complete_item_list_in_expr(acc, ctx, path_ctx, expr_ctx);
@ -763,6 +759,7 @@ pub(super) fn complete_name_ref(
flyimport::import_on_the_fly_dot(acc, ctx, dot_access); flyimport::import_on_the_fly_dot(acc, ctx, dot_access);
dot::complete_dot(acc, ctx, dot_access); dot::complete_dot(acc, ctx, dot_access);
postfix::complete_postfix(acc, ctx, dot_access); postfix::complete_postfix(acc, ctx, dot_access);
expr::complete_expr(acc, ctx);
} }
NameRefKind::Keyword(item) => { NameRefKind::Keyword(item) => {
keyword::complete_for_and_where(acc, ctx, item); keyword::complete_for_and_where(acc, ctx, item);

View File

@ -329,11 +329,8 @@ pub(crate) fn complete_expr_path(
} }
} }
pub(crate) fn complete_expr( pub(crate) fn complete_expr(acc: &mut Completions, ctx: &CompletionContext<'_>) {
acc: &mut Completions, let _p = tracing::span!(tracing::Level::INFO, "complete_expr").entered();
ctx: &CompletionContext<'_>,
) {
let _p = profile::span("complete_expr");
if !ctx.qualifier_ctx.none() { if !ctx.qualifier_ctx.none() {
return; return;
} }
@ -351,12 +348,34 @@ pub(crate) fn complete_expr(
config: hir::term_search::TermSearchConfig { config: hir::term_search::TermSearchConfig {
enable_borrowcheck: false, enable_borrowcheck: false,
many_alternatives_threshold: 1, many_alternatives_threshold: 1,
depth: 2, depth: 6,
}, },
}; };
let exprs = hir::term_search::term_search(term_search_ctx); let exprs = hir::term_search::term_search(&term_search_ctx);
for expr in exprs { for expr in exprs {
acc.add_expr(ctx, &expr); // Expand method calls
match expr {
hir::term_search::Expr::Method { func, generics, target, params }
if target.is_many() =>
{
let target_ty = target.ty(ctx.db);
let term_search_ctx =
hir::term_search::TermSearchCtx { goal: target_ty, ..term_search_ctx };
let target_exprs = hir::term_search::term_search(&term_search_ctx);
for expr in target_exprs {
let expanded_expr = hir::term_search::Expr::Method {
func,
generics: generics.clone(),
target: Box::new(expr),
params: params.clone(),
};
acc.add_expr(ctx, &expanded_expr)
}
}
_ => acc.add_expr(ctx, &expr),
}
} }
} }
} }

View File

@ -17,7 +17,7 @@ use ide_db::{
imports::import_assets::LocatedImport, imports::import_assets::LocatedImport,
RootDatabase, SnippetCap, SymbolKind, RootDatabase, SnippetCap, SymbolKind,
}; };
use syntax::{ast, AstNode, SmolStr, SyntaxKind, TextRange}; use syntax::{ast, format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange};
use text_edit::TextEdit; use text_edit::TextEdit;
use crate::{ use crate::{
@ -272,9 +272,9 @@ pub(crate) fn render_resolution_with_import_pat(
Some(render_resolution_pat(ctx, pattern_ctx, local_name, Some(import_edit), resolution)) Some(render_resolution_pat(ctx, pattern_ctx, local_name, Some(import_edit), resolution))
} }
pub(crate) fn render_type_tree( pub(crate) fn render_expr(
ctx: &CompletionContext<'_>, ctx: &CompletionContext<'_>,
expr: &hir::term_search::TypeTree, expr: &hir::term_search::Expr,
) -> Option<Builder> { ) -> Option<Builder> {
let mut i = 1; let mut i = 1;
let mut snippet_formatter = |ty: &hir::Type| { let mut snippet_formatter = |ty: &hir::Type| {
@ -292,31 +292,42 @@ pub(crate) fn render_type_tree(
ty.as_adt() ty.as_adt()
.and_then(|adt| adt.name(ctx.db).as_text()) .and_then(|adt| adt.name(ctx.db).as_text())
.map(|s| stdx::to_lower_snake_case(s.as_str())) .map(|s| stdx::to_lower_snake_case(s.as_str()))
.unwrap_or_else(|| String::from("_")) .unwrap_or_else(|| String::from("..."))
}; };
let label = expr.gen_source_code(&ctx.scope, &mut label_formatter); let label = expr.gen_source_code(&ctx.scope, &mut label_formatter);
let source_range = match &ctx.expected_name { let source_range = match ctx.original_token.parent() {
Some(name_or_ref) => name_or_ref.syntax().text_range(),
None => match ctx.original_token.parent() {
Some(node) => match node.ancestors().find_map(|n| ast::Path::cast(n)) { Some(node) => match node.ancestors().find_map(|n| ast::Path::cast(n)) {
Some(path) => path.syntax().text_range(), Some(path) => path.syntax().text_range(),
None => node.text_range(), None => node.text_range(),
}, },
None => ctx.source_range(), None => ctx.source_range(),
},
}; };
let mut item = CompletionItem::new(CompletionItemKind::Snippet, source_range, label); let mut item = CompletionItem::new(CompletionItemKind::Snippet, source_range, label.clone());
let snippet = format!("{}$0", expr.gen_source_code(&ctx.scope, &mut snippet_formatter)); let snippet = format!("{}$0", expr.gen_source_code(&ctx.scope, &mut snippet_formatter));
let edit = TextEdit::replace(source_range, snippet); let edit = TextEdit::replace(source_range, snippet);
item.snippet_edit(ctx.config.snippet_cap?, edit); item.snippet_edit(ctx.config.snippet_cap?, edit);
item.documentation(Documentation::new(String::from("Autogenerated expression by term search")));
item.set_relevance(crate::CompletionRelevance { item.set_relevance(crate::CompletionRelevance {
type_match: Some(crate::item::CompletionRelevanceTypeMatch::CouldUnify), type_match: compute_type_match(ctx, &expr.ty(ctx.db)),
..Default::default() ..Default::default()
}); });
for trait_ in expr.traits_used(ctx.db) {
let trait_item = hir::ItemInNs::from(hir::ModuleDef::from(trait_));
let Some(path) = ctx.module.find_use_path(
ctx.db,
trait_item,
ctx.config.prefer_no_std,
ctx.config.prefer_prelude,
) else {
continue;
};
item.add_import(LocatedImport::new(path, trait_item, trait_item));
}
Some(item) Some(item)
} }
@ -2243,6 +2254,8 @@ fn main() {
&[CompletionItemKind::Snippet, CompletionItemKind::Method], &[CompletionItemKind::Snippet, CompletionItemKind::Method],
expect![[r#" expect![[r#"
sn not [snippet] sn not [snippet]
sn true [type]
sn false [type]
me not() (use ops::Not) [type_could_unify+requires_import] me not() (use ops::Not) [type_could_unify+requires_import]
sn if [] sn if []
sn while [] sn while []

View File

@ -316,15 +316,6 @@ fn func() {
bn RecordV {} RecordV { field$1 }$0 bn RecordV {} RecordV { field$1 }$0
bn TupleV() TupleV($1)$0 bn TupleV() TupleV($1)$0
bn UnitV UnitV$0 bn UnitV UnitV$0
sn ()
sn CONST
sn Enum::UnitV
sn STATIC
sn Unit
sn false
sn func()
sn function()
sn true
"#]], "#]],
); );
} }
@ -571,8 +562,6 @@ fn foo() {
bn B {} B { r#type$1 }$0 bn B {} B { r#type$1 }$0
bn struct {} r#struct { r#type$1 }$0 bn struct {} r#struct { r#type$1 }$0
bn type r#type$0 bn type r#type$0
sn Enum::A
sn Enum::r#type
"#]], "#]],
); );
} }
@ -597,7 +586,6 @@ fn f(t: Ty) {
"#, "#,
expect![[r#" expect![[r#"
ct ABC const ABC: Self ct ABC const ABC: Self
sn t
"#]], "#]],
); );
@ -620,7 +608,6 @@ fn f(e: MyEnum) {
expect![[r#" expect![[r#"
ct A pub const A: i32 ct A pub const A: i32
ct B pub const B: i32 ct B pub const B: i32
sn e
"#]], "#]],
); );
@ -646,7 +633,6 @@ fn f(u: U) {
expect![[r#" expect![[r#"
ct C pub const C: i32 ct C pub const C: i32
ct D pub const D: i32 ct D pub const D: i32
sn u
"#]], "#]],
); );
@ -666,7 +652,6 @@ fn f(v: u32) {
"#, "#,
expect![[r#" expect![[r#"
ct MIN pub const MIN: Self ct MIN pub const MIN: Self
sn v
"#]], "#]],
); );
} }
@ -778,7 +763,6 @@ fn f(x: EnumAlias<u8>) {
expect![[r#" expect![[r#"
bn Tuple() Tuple($1)$0 bn Tuple() Tuple($1)$0
bn Unit Unit$0 bn Unit Unit$0
sn x
"#]], "#]],
); );
} }

View File

@ -82,15 +82,6 @@ fn x<'lt, T, const C: usize>() -> $0
bt u32 u32 bt u32 u32
kw crate:: kw crate::
kw self:: kw self::
sn ()
sn C
sn CONST
sn Enum::UnitV
sn STATIC
sn Unit
sn false
sn function()
sn true
"#]], "#]],
); );
} }
@ -121,15 +112,6 @@ fn foo() -> B$0 {
it () it ()
kw crate:: kw crate::
kw self:: kw self::
sn ()
sn CONST
sn Enum::UnitV
sn STATIC
sn Unit
sn false
sn foo()
sn function()
sn true
"#]], "#]],
) )
} }
@ -234,14 +216,6 @@ fn f2(x: u64) -> $0 {
it u64 it u64
kw crate:: kw crate::
kw self:: kw self::
sn ()
sn CONST
sn Enum::UnitV
sn STATIC
sn Unit
sn false
sn function()
sn true
"#]], "#]],
); );
} }
@ -357,15 +331,6 @@ fn foo<'lt, T, const C: usize>() {
bt u32 u32 bt u32 u32
kw crate:: kw crate::
kw self:: kw self::
sn ()
sn C
sn CONST
sn Enum::UnitV
sn STATIC
sn Unit
sn false
sn function()
sn true
"#]], "#]],
); );
check( check(
@ -384,15 +349,6 @@ fn foo<'lt, T, const C: usize>() {
st Unit Unit st Unit Unit
tt Trait tt Trait
un Union Union un Union Union
sn ()
sn C
sn CONST
sn Enum::UnitV
sn STATIC
sn Unit
sn false
sn function()
sn true
"#]], "#]],
); );
} }

View File

@ -46,7 +46,7 @@ fn fixes(sema: &Semantics<'_, RootDatabase>, d: &hir::TypedHole) -> Option<Vec<A
let ctx = let ctx =
TermSearchCtx { sema, scope: &scope, goal: d.expected.clone(), config: Default::default() }; TermSearchCtx { sema, scope: &scope, goal: d.expected.clone(), config: Default::default() };
let paths = term_search(ctx); let paths = term_search(&ctx);
let mut assists = vec![]; let mut assists = vec![];
let mut formatter = |_: &hir::Type| String::from("_"); let mut formatter = |_: &hir::Type| String::from("_");

View File

@ -415,7 +415,7 @@ impl flags::AnalysisStats {
..Default::default() ..Default::default()
}, },
}; };
let found_terms = hir::term_search::term_search(ctx); let found_terms = hir::term_search::term_search(&ctx);
if found_terms.is_empty() { if found_terms.is_empty() {
acc.tail_expr_no_term += 1; acc.tail_expr_no_term += 1;
@ -428,7 +428,8 @@ impl flags::AnalysisStats {
s.chars().into_iter().filter(|c| !c.is_whitespace()).collect() s.chars().into_iter().filter(|c| !c.is_whitespace()).collect()
} }
let mut formatter = |_: &hir::Type| syntax::ast::make::ext::expr_todo().to_string(); let todo = syntax::ast::make::ext::expr_todo().to_string();
let mut formatter = |_: &hir::Type| todo.clone();
let mut syntax_hit_found = false; let mut syntax_hit_found = false;
for term in found_terms { for term in found_terms {
let generated = term.gen_source_code(&scope, &mut formatter); let generated = term.gen_source_code(&scope, &mut formatter);
@ -449,8 +450,10 @@ impl flags::AnalysisStats {
if let Some(mut err_idx) = err.find("error[E") { if let Some(mut err_idx) = err.find("error[E") {
err_idx += 7; err_idx += 7;
let err_code = &err[err_idx..err_idx + 4]; let err_code = &err[err_idx..err_idx + 4];
if err_code == "0282" { match err_code {
continue; // Byproduct of testing method "0282" => continue, // Byproduct of testing method
"0277" if generated.contains(&todo) => continue, // See https://github.com/rust-lang/rust/issues/69882
_ => (),
} }
bar.println(err); bar.println(err);
bar.println(generated); bar.println(generated);