505: Inherent methods r=matklad a=flodiebold

This adds resolution, type checking and completion for inherent methods.

The main open question here is the caching, I think. I'm not sure whether we should be caching method resolutions in a more fine grained way (currently we just build a hash map of types -> impl blocks, and iterate through all potential impl blocks when looking for a method).

Co-authored-by: Florian Diebold <flodiebold@gmail.com>
This commit is contained in:
bors[bot] 2019-01-12 21:18:14 +00:00
commit eb931c0d9e
16 changed files with 398 additions and 47 deletions

View File

@ -113,6 +113,11 @@ impl Module {
self.child_impl(db, name) self.child_impl(db, name)
} }
/// Iterates over all child modules.
pub fn children(&self, db: &impl HirDatabase) -> Cancelable<impl Iterator<Item = Module>> {
self.children_impl(db)
}
/// Finds a parent module. /// Finds a parent module.
pub fn parent(&self, db: &impl HirDatabase) -> Cancelable<Option<Module>> { pub fn parent(&self, db: &impl HirDatabase) -> Cancelable<Option<Module>> {
self.parent_impl(db) self.parent_impl(db)
@ -268,8 +273,11 @@ pub use crate::code_model_impl::function::ScopeEntryWithSyntax;
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct FnSignature { pub struct FnSignature {
pub(crate) name: Name, pub(crate) name: Name,
pub(crate) args: Vec<TypeRef>, pub(crate) params: Vec<TypeRef>,
pub(crate) ret_type: TypeRef, pub(crate) ret_type: TypeRef,
/// True if the first param is `self`. This is relevant to decide whether this
/// can be called as a method.
pub(crate) has_self_param: bool,
} }
impl FnSignature { impl FnSignature {
@ -277,13 +285,19 @@ impl FnSignature {
&self.name &self.name
} }
pub fn args(&self) -> &[TypeRef] { pub fn params(&self) -> &[TypeRef] {
&self.args &self.params
} }
pub fn ret_type(&self) -> &TypeRef { pub fn ret_type(&self) -> &TypeRef {
&self.ret_type &self.ret_type
} }
/// True if the first arg is `self`. This is relevant to decide whether this
/// can be called as a method.
pub fn has_self_param(&self) -> bool {
self.has_self_param
}
} }
impl Function { impl Function {

View File

@ -42,7 +42,8 @@ impl FnSignature {
.name() .name()
.map(|n| n.as_name()) .map(|n| n.as_name())
.unwrap_or_else(Name::missing); .unwrap_or_else(Name::missing);
let mut args = Vec::new(); let mut params = Vec::new();
let mut has_self_param = false;
if let Some(param_list) = node.param_list() { if let Some(param_list) = node.param_list() {
if let Some(self_param) = param_list.self_param() { if let Some(self_param) = param_list.self_param() {
let self_type = if let Some(type_ref) = self_param.type_ref() { let self_type = if let Some(type_ref) = self_param.type_ref() {
@ -59,11 +60,12 @@ impl FnSignature {
} }
} }
}; };
args.push(self_type); params.push(self_type);
has_self_param = true;
} }
for param in param_list.params() { for param in param_list.params() {
let type_ref = TypeRef::from_ast_opt(param.type_ref()); let type_ref = TypeRef::from_ast_opt(param.type_ref());
args.push(type_ref); params.push(type_ref);
} }
} }
let ret_type = if let Some(type_ref) = node.ret_type().and_then(|rt| rt.type_ref()) { let ret_type = if let Some(type_ref) = node.ret_type().and_then(|rt| rt.type_ref()) {
@ -73,8 +75,9 @@ impl FnSignature {
}; };
let sig = FnSignature { let sig = FnSignature {
name, name,
args, params,
ret_type, ret_type,
has_self_param,
}; };
Arc::new(sig) Arc::new(sig)
} }

View File

@ -43,7 +43,7 @@ impl FnScopes {
scope_for: FxHashMap::default(), scope_for: FxHashMap::default(),
}; };
let root = scopes.root_scope(); let root = scopes.root_scope();
scopes.add_params_bindings(root, body.args()); scopes.add_params_bindings(root, body.params());
compute_expr_scopes(body.body_expr(), &body, &mut scopes, root); compute_expr_scopes(body.body_expr(), &body, &mut scopes, root);
scopes scopes
} }

View File

@ -95,6 +95,21 @@ impl Module {
Module::from_module_id(db, loc.source_root_id, child_id).map(Some) Module::from_module_id(db, loc.source_root_id, child_id).map(Some)
} }
/// Iterates over all child modules.
pub fn children_impl(&self, db: &impl HirDatabase) -> Cancelable<impl Iterator<Item = Module>> {
// FIXME this should be implementable without collecting into a vec, but
// it's kind of hard since the iterator needs to keep a reference to the
// module tree.
let loc = self.def_id.loc(db);
let module_tree = db.module_tree(loc.source_root_id)?;
let children = loc
.module_id
.children(&module_tree)
.map(|(_, module_id)| Module::from_module_id(db, loc.source_root_id, module_id))
.collect::<Cancelable<Vec<_>>>()?;
Ok(children.into_iter())
}
pub fn parent_impl(&self, db: &impl HirDatabase) -> Cancelable<Option<Module>> { pub fn parent_impl(&self, db: &impl HirDatabase) -> Cancelable<Option<Module>> {
let loc = self.def_id.loc(db); let loc = self.def_id.loc(db);
let module_tree = db.module_tree(loc.source_root_id)?; let module_tree = db.module_tree(loc.source_root_id)?;

View File

@ -5,13 +5,13 @@ use ra_db::{SourceRootId, LocationIntener, SyntaxDatabase, Cancelable};
use crate::{ use crate::{
DefLoc, DefId, MacroCallLoc, MacroCallId, Name, HirFileId, DefLoc, DefId, MacroCallLoc, MacroCallId, Name, HirFileId,
SourceFileItems, SourceItemId, SourceFileItems, SourceItemId, Crate,
query_definitions, query_definitions,
FnSignature, FnScopes, FnSignature, FnScopes,
macros::MacroExpansion, macros::MacroExpansion,
module_tree::{ModuleId, ModuleTree}, module_tree::{ModuleId, ModuleTree},
nameres::{ItemMap, InputModuleItems}, nameres::{ItemMap, InputModuleItems},
ty::{InferenceResult, Ty}, ty::{InferenceResult, Ty, method_resolution::CrateImplBlocks},
adt::{StructData, EnumData, EnumVariantData}, adt::{StructData, EnumData, EnumVariantData},
impl_block::ModuleImplBlocks, impl_block::ModuleImplBlocks,
}; };
@ -102,6 +102,11 @@ pub trait HirDatabase: SyntaxDatabase
use fn crate::impl_block::impls_in_module; use fn crate::impl_block::impls_in_module;
} }
fn impls_in_crate(krate: Crate) -> Cancelable<Arc<CrateImplBlocks>> {
type ImplsInCrateQuery;
use fn crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query;
}
fn body_hir(def_id: DefId) -> Cancelable<Arc<crate::expr::Body>> { fn body_hir(def_id: DefId) -> Cancelable<Arc<crate::expr::Body>> {
type BodyHirQuery; type BodyHirQuery;
use fn crate::expr::body_hir; use fn crate::expr::body_hir;

View File

@ -18,13 +18,13 @@ impl_arena_id!(ExprId);
pub struct Body { pub struct Body {
exprs: Arena<ExprId, Expr>, exprs: Arena<ExprId, Expr>,
pats: Arena<PatId, Pat>, pats: Arena<PatId, Pat>,
/// The patterns for the function's arguments. While the argument types are /// The patterns for the function's parameters. While the parameter types are
/// part of the function signature, the patterns are not (they don't change /// part of the function signature, the patterns are not (they don't change
/// the external type of the function). /// the external type of the function).
/// ///
/// If this `Body` is for the body of a constant, this will just be /// If this `Body` is for the body of a constant, this will just be
/// empty. /// empty.
args: Vec<PatId>, params: Vec<PatId>,
/// The `ExprId` of the actual body expression. /// The `ExprId` of the actual body expression.
body_expr: ExprId, body_expr: ExprId,
} }
@ -44,8 +44,8 @@ pub struct BodySyntaxMapping {
} }
impl Body { impl Body {
pub fn args(&self) -> &[PatId] { pub fn params(&self) -> &[PatId] {
&self.args &self.params
} }
pub fn body_expr(&self) -> ExprId { pub fn body_expr(&self) -> ExprId {
@ -699,11 +699,11 @@ impl ExprCollector {
} }
} }
fn into_body_syntax_mapping(self, args: Vec<PatId>, body_expr: ExprId) -> BodySyntaxMapping { fn into_body_syntax_mapping(self, params: Vec<PatId>, body_expr: ExprId) -> BodySyntaxMapping {
let body = Body { let body = Body {
exprs: self.exprs, exprs: self.exprs,
pats: self.pats, pats: self.pats,
args, params,
body_expr, body_expr,
}; };
BodySyntaxMapping { BodySyntaxMapping {
@ -719,8 +719,8 @@ impl ExprCollector {
pub(crate) fn collect_fn_body_syntax(node: &ast::FnDef) -> BodySyntaxMapping { pub(crate) fn collect_fn_body_syntax(node: &ast::FnDef) -> BodySyntaxMapping {
let mut collector = ExprCollector::new(); let mut collector = ExprCollector::new();
let args = if let Some(param_list) = node.param_list() { let params = if let Some(param_list) = node.param_list() {
let mut args = Vec::new(); let mut params = Vec::new();
if let Some(self_param) = param_list.self_param() { if let Some(self_param) = param_list.self_param() {
let self_param = LocalSyntaxPtr::new( let self_param = LocalSyntaxPtr::new(
@ -729,13 +729,13 @@ pub(crate) fn collect_fn_body_syntax(node: &ast::FnDef) -> BodySyntaxMapping {
.expect("self param without self keyword") .expect("self param without self keyword")
.syntax(), .syntax(),
); );
let arg = collector.alloc_pat( let param = collector.alloc_pat(
Pat::Bind { Pat::Bind {
name: Name::self_param(), name: Name::self_param(),
}, },
self_param, self_param,
); );
args.push(arg); params.push(param);
} }
for param in param_list.params() { for param in param_list.params() {
@ -744,15 +744,15 @@ pub(crate) fn collect_fn_body_syntax(node: &ast::FnDef) -> BodySyntaxMapping {
} else { } else {
continue; continue;
}; };
args.push(collector.collect_pat(pat)); params.push(collector.collect_pat(pat));
} }
args params
} else { } else {
Vec::new() Vec::new()
}; };
let body = collector.collect_block_opt(node.body()); let body = collector.collect_block_opt(node.body());
collector.into_body_syntax_mapping(args, body) collector.into_body_syntax_mapping(params, body)
} }
pub(crate) fn body_syntax_mapping( pub(crate) fn body_syntax_mapping(

View File

@ -33,20 +33,27 @@ impl ImplBlock {
}) })
} }
pub(crate) fn from_id(module_impl_blocks: Arc<ModuleImplBlocks>, impl_id: ImplId) -> ImplBlock {
ImplBlock {
module_impl_blocks,
impl_id,
}
}
fn impl_data(&self) -> &ImplData { fn impl_data(&self) -> &ImplData {
&self.module_impl_blocks.impls[self.impl_id] &self.module_impl_blocks.impls[self.impl_id]
} }
pub fn target_trait(&self) -> Option<&TypeRef> { pub fn target_trait(&self) -> Option<&TypeRef> {
self.impl_data().target_trait.as_ref() self.impl_data().target_trait()
} }
pub fn target_type(&self) -> &TypeRef { pub fn target_type(&self) -> &TypeRef {
&self.impl_data().target_type self.impl_data().target_type()
} }
pub fn items(&self) -> &[ImplItem] { pub fn items(&self) -> &[ImplItem] {
&self.impl_data().items self.impl_data().items()
} }
} }
@ -64,7 +71,7 @@ impl ImplData {
module: &Module, module: &Module,
node: &ast::ImplBlock, node: &ast::ImplBlock,
) -> Self { ) -> Self {
let target_trait = node.target_type().map(TypeRef::from_ast); let target_trait = node.target_trait().map(TypeRef::from_ast);
let target_type = TypeRef::from_ast_opt(node.target_type()); let target_type = TypeRef::from_ast_opt(node.target_type());
let module_loc = module.def_id.loc(db); let module_loc = module.def_id.loc(db);
let items = if let Some(item_list) = node.item_list() { let items = if let Some(item_list) = node.item_list() {
@ -103,6 +110,18 @@ impl ImplData {
items, items,
} }
} }
pub fn target_trait(&self) -> Option<&TypeRef> {
self.target_trait.as_ref()
}
pub fn target_type(&self) -> &TypeRef {
&self.target_type
}
pub fn items(&self) -> &[ImplItem] {
&self.items
}
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
@ -133,11 +152,9 @@ impl_arena_id!(ImplId);
/// This way, we avoid having to do this process for the whole crate whenever /// This way, we avoid having to do this process for the whole crate whenever
/// a file is changed; as long as the impl blocks in the file don't change, /// a file is changed; as long as the impl blocks in the file don't change,
/// we don't need to do the second step again. /// we don't need to do the second step again.
///
/// (The second step does not yet exist.)
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub struct ModuleImplBlocks { pub struct ModuleImplBlocks {
impls: Arena<ImplId, ImplData>, pub(crate) impls: Arena<ImplId, ImplData>,
impls_by_def: FxHashMap<DefId, ImplId>, impls_by_def: FxHashMap<DefId, ImplId>,
} }
@ -153,7 +170,10 @@ impl ModuleImplBlocks {
let (file_id, module_source) = module.definition_source(db)?; let (file_id, module_source) = module.definition_source(db)?;
let node = match &module_source { let node = match &module_source {
ModuleSource::SourceFile(node) => node.syntax(), ModuleSource::SourceFile(node) => node.syntax(),
ModuleSource::Module(node) => node.syntax(), ModuleSource::Module(node) => node
.item_list()
.expect("inline module should have item list")
.syntax(),
}; };
let source_file_items = db.file_items(file_id.into()); let source_file_items = db.file_items(file_id.into());

View File

@ -235,6 +235,7 @@ salsa::database_storage! {
fn enum_data() for db::EnumDataQuery; fn enum_data() for db::EnumDataQuery;
fn enum_variant_data() for db::EnumVariantDataQuery; fn enum_variant_data() for db::EnumVariantDataQuery;
fn impls_in_module() for db::ImplsInModuleQuery; fn impls_in_module() for db::ImplsInModuleQuery;
fn impls_in_crate() for db::ImplsInCrateQuery;
fn body_hir() for db::BodyHirQuery; fn body_hir() for db::BodyHirQuery;
fn body_syntax_mapping() for db::BodySyntaxMappingQuery; fn body_syntax_mapping() for db::BodySyntaxMappingQuery;
fn fn_signature() for db::FnSignatureQuery; fn fn_signature() for db::FnSignatureQuery;

View File

@ -17,6 +17,7 @@ mod autoderef;
mod primitive; mod primitive;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
pub(crate) mod method_resolution;
use std::borrow::Cow; use std::borrow::Cow;
use std::ops::Index; use std::ops::Index;
@ -431,7 +432,7 @@ fn type_for_fn(db: &impl HirDatabase, f: Function) -> Cancelable<Ty> {
let impl_block = f.impl_block(db)?; let impl_block = f.impl_block(db)?;
// TODO we ignore type parameters for now // TODO we ignore type parameters for now
let input = signature let input = signature
.args() .params()
.iter() .iter()
.map(|tr| Ty::from_hir(db, &module, impl_block.as_ref(), tr)) .map(|tr| Ty::from_hir(db, &module, impl_block.as_ref(), tr))
.collect::<Cancelable<Vec<_>>>()?; .collect::<Cancelable<Vec<_>>>()?;
@ -875,7 +876,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
Expr::Call { callee, args } => { Expr::Call { callee, args } => {
let callee_ty = self.infer_expr(*callee, &Expectation::none())?; let callee_ty = self.infer_expr(*callee, &Expectation::none())?;
let (arg_tys, ret_ty) = match &callee_ty { let (param_tys, ret_ty) = match &callee_ty {
Ty::FnPtr(sig) => (&sig.input[..], sig.output.clone()), Ty::FnPtr(sig) => (&sig.input[..], sig.output.clone()),
_ => { _ => {
// not callable // not callable
@ -886,19 +887,43 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
for (i, arg) in args.iter().enumerate() { for (i, arg) in args.iter().enumerate() {
self.infer_expr( self.infer_expr(
*arg, *arg,
&Expectation::has_type(arg_tys.get(i).cloned().unwrap_or(Ty::Unknown)), &Expectation::has_type(param_tys.get(i).cloned().unwrap_or(Ty::Unknown)),
)?; )?;
} }
ret_ty ret_ty
} }
Expr::MethodCall { receiver, args, .. } => { Expr::MethodCall {
let _receiver_ty = self.infer_expr(*receiver, &Expectation::none())?; receiver,
// TODO resolve method... args,
for (_i, arg) in args.iter().enumerate() { method_name,
// TODO unify / expect argument type } => {
self.infer_expr(*arg, &Expectation::none())?; let receiver_ty = self.infer_expr(*receiver, &Expectation::none())?;
let resolved = receiver_ty.clone().lookup_method(self.db, method_name)?;
let method_ty = match resolved {
Some(def_id) => self.db.type_for_def(def_id)?,
None => Ty::Unknown,
};
let method_ty = self.insert_type_vars(method_ty);
let (expected_receiver_ty, param_tys, ret_ty) = match &method_ty {
Ty::FnPtr(sig) => {
if sig.input.len() > 0 {
(&sig.input[0], &sig.input[1..], sig.output.clone())
} else {
(&Ty::Unknown, &[][..], sig.output.clone())
}
}
_ => (&Ty::Unknown, &[][..], Ty::Unknown),
};
// TODO we would have to apply the autoderef/autoref steps here
// to get the correct receiver type to unify...
self.unify(expected_receiver_ty, &receiver_ty);
for (i, arg) in args.iter().enumerate() {
self.infer_expr(
*arg,
&Expectation::has_type(param_tys.get(i).cloned().unwrap_or(Ty::Unknown)),
)?;
} }
Ty::Unknown ret_ty
} }
Expr::Match { expr, arms } => { Expr::Match { expr, arms } => {
let _ty = self.infer_expr(*expr, &Expectation::none())?; let _ty = self.infer_expr(*expr, &Expectation::none())?;
@ -1068,7 +1093,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
fn collect_fn_signature(&mut self, signature: &FnSignature) -> Cancelable<()> { fn collect_fn_signature(&mut self, signature: &FnSignature) -> Cancelable<()> {
let body = Arc::clone(&self.body); // avoid borrow checker problem let body = Arc::clone(&self.body); // avoid borrow checker problem
for (type_ref, pat) in signature.args().iter().zip(body.args()) { for (type_ref, pat) in signature.params().iter().zip(body.params()) {
let ty = self.make_ty(type_ref)?; let ty = self.make_ty(type_ref)?;
let ty = self.insert_type_vars(ty); let ty = self.insert_type_vars(ty);
self.write_pat_ty(*pat, ty); self.write_pat_ty(*pat, ty);

View File

@ -0,0 +1,164 @@
//! This module is concerned with finding methods that a given type provides.
//! For details about how this works in rustc, see the method lookup page in the
//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html)
//! and the corresponding code mostly in librustc_typeck/check/method/probe.rs.
use std::sync::Arc;
use rustc_hash::FxHashMap;
use ra_db::{Cancelable, SourceRootId};
use crate::{HirDatabase, DefId, module_tree::ModuleId, Module, Crate, Name, Function, impl_block::{ImplId, ImplBlock, ImplItem}};
use super::Ty;
/// This is used as a key for indexing impls.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum TyFingerprint {
Adt(DefId),
// we'll also want to index impls for primitive types etc.
}
impl TyFingerprint {
/// Creates a TyFingerprint for looking up an impl. Only certain types can
/// have impls: if we have some `struct S`, we can have an `impl S`, but not
/// `impl &S`. Hence, this will return `None` for reference types and such.
fn for_impl(ty: &Ty) -> Option<TyFingerprint> {
match ty {
Ty::Adt { def_id, .. } => Some(TyFingerprint::Adt(*def_id)),
_ => None,
}
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct CrateImplBlocks {
/// To make sense of the ModuleIds, we need the source root.
source_root_id: SourceRootId,
impls: FxHashMap<TyFingerprint, Vec<(ModuleId, ImplId)>>,
}
impl CrateImplBlocks {
pub fn lookup_impl_blocks<'a>(
&'a self,
db: &'a impl HirDatabase,
ty: &Ty,
) -> impl Iterator<Item = Cancelable<ImplBlock>> + 'a {
let fingerprint = TyFingerprint::for_impl(ty);
fingerprint
.and_then(|f| self.impls.get(&f))
.into_iter()
.flat_map(|i| i.iter())
.map(move |(module_id, impl_id)| {
let module_impl_blocks = db.impls_in_module(self.source_root_id, *module_id)?;
Ok(ImplBlock::from_id(module_impl_blocks, *impl_id))
})
}
fn collect_recursive(&mut self, db: &impl HirDatabase, module: Module) -> Cancelable<()> {
let module_id = module.def_id.loc(db).module_id;
let module_impl_blocks = db.impls_in_module(self.source_root_id, module_id)?;
for (impl_id, impl_data) in module_impl_blocks.impls.iter() {
let impl_block = ImplBlock::from_id(Arc::clone(&module_impl_blocks), impl_id);
if let Some(_target_trait) = impl_data.target_trait() {
// ignore for now
} else {
let target_ty =
Ty::from_hir(db, &module, Some(&impl_block), impl_data.target_type())?;
if let Some(target_ty_fp) = TyFingerprint::for_impl(&target_ty) {
self.impls
.entry(target_ty_fp)
.or_insert_with(Vec::new)
.push((module_id, impl_id));
}
}
}
for child in module.children(db)? {
self.collect_recursive(db, child)?;
}
Ok(())
}
pub(crate) fn impls_in_crate_query(
db: &impl HirDatabase,
krate: Crate,
) -> Cancelable<Arc<CrateImplBlocks>> {
let crate_graph = db.crate_graph();
let file_id = crate_graph.crate_root(krate.crate_id);
let source_root_id = db.file_source_root(file_id);
let mut crate_impl_blocks = CrateImplBlocks {
source_root_id,
impls: FxHashMap::default(),
};
if let Some(module) = krate.root_module(db)? {
crate_impl_blocks.collect_recursive(db, module)?;
}
Ok(Arc::new(crate_impl_blocks))
}
}
fn def_crate(db: &impl HirDatabase, ty: &Ty) -> Cancelable<Option<Crate>> {
match ty {
Ty::Adt { def_id, .. } => def_id.krate(db),
_ => Ok(None),
}
}
impl Ty {
// TODO: cache this as a query?
// - if so, what signature? (TyFingerprint, Name)?
// - or maybe cache all names and def_ids of methods per fingerprint?
pub fn lookup_method(self, db: &impl HirDatabase, name: &Name) -> Cancelable<Option<DefId>> {
self.iterate_methods(db, |f| {
let sig = f.signature(db);
if sig.name() == name && sig.has_self_param() {
Ok(Some(f.def_id()))
} else {
Ok(None)
}
})
}
// This would be nicer if it just returned an iterator, but that's really
// complicated with all the cancelable operations
pub fn iterate_methods<T>(
self,
db: &impl HirDatabase,
mut callback: impl FnMut(Function) -> Cancelable<Option<T>>,
) -> Cancelable<Option<T>> {
// For method calls, rust first does any number of autoderef, and then one
// autoref (i.e. when the method takes &self or &mut self). We just ignore
// the autoref currently -- when we find a method matching the given name,
// we assume it fits.
// Also note that when we've got a receiver like &S, even if the method we
// find in the end takes &self, we still do the autoderef step (just as
// rustc does an autoderef and then autoref again).
for derefed_ty in self.autoderef(db) {
let krate = match def_crate(db, &derefed_ty)? {
Some(krate) => krate,
None => continue,
};
let impls = db.impls_in_crate(krate)?;
for impl_block in impls.lookup_impl_blocks(db, &derefed_ty) {
let impl_block = impl_block?;
for item in impl_block.items() {
match item {
ImplItem::Method(f) => {
if let Some(result) = callback(f.clone())? {
return Ok(Some(result));
}
}
_ => {}
}
}
}
}
Ok(None)
}
}

View File

@ -242,6 +242,32 @@ fn test() {
); );
} }
#[test]
fn infer_inherent_method() {
check_inference(
r#"
struct A;
impl A {
fn foo(self, x: u32) -> i32 {}
}
mod b {
impl super::A {
fn bar(&self, x: u64) -> i64 {}
}
}
fn test(a: A) {
a.foo(1);
(&a).bar(1);
a.bar(1);
}
"#,
"inherent_method.txt",
);
}
fn infer(content: &str) -> String { fn infer(content: &str) -> String {
let (db, _, file_id) = MockDatabase::with_single_file(content); let (db, _, file_id) = MockDatabase::with_single_file(content);
let source_file = db.source_file(file_id); let source_file = db.source_file(file_id);

View File

@ -0,0 +1,18 @@
[32; 36) 'self': A
[38; 39) 'x': u32
[53; 55) '{}': ()
[103; 107) 'self': &A
[109; 110) 'x': u64
[124; 126) '{}': ()
[144; 145) 'a': A
[150; 198) '{ ...(1); }': ()
[156; 157) 'a': A
[156; 164) 'a.foo(1)': i32
[162; 163) '1': u32
[170; 181) '(&a).bar(1)': i64
[171; 173) '&a': &A
[172; 173) 'a': A
[179; 180) '1': u64
[187; 188) 'a': A
[187; 195) 'a.bar(1)': i64
[193; 194) '1': u64

View File

@ -17,8 +17,9 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) -> Ca
}; };
let receiver_ty = infer_result[expr].clone(); let receiver_ty = infer_result[expr].clone();
if !ctx.is_call { if !ctx.is_call {
complete_fields(acc, ctx, receiver_ty)?; complete_fields(acc, ctx, receiver_ty.clone())?;
} }
complete_methods(acc, ctx, receiver_ty)?;
Ok(()) Ok(())
} }
@ -55,6 +56,24 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty)
Ok(()) Ok(())
} }
fn complete_methods(
acc: &mut Completions,
ctx: &CompletionContext,
receiver: Ty,
) -> Cancelable<()> {
receiver.iterate_methods(ctx.db, |func| {
let sig = func.signature(ctx.db);
if sig.has_self_param() {
CompletionItem::new(CompletionKind::Reference, sig.name().to_string())
.from_function(ctx, func)
.kind(CompletionItemKind::Method)
.add_to(acc);
}
Ok(None::<()>)
})?;
Ok(())
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::completion::*; use crate::completion::*;
@ -87,7 +106,8 @@ mod tests {
} }
} }
", ",
r#"the_field "(u32,)""#, r#"the_field "(u32,)"
foo "foo($0)""#,
); );
} }
@ -102,7 +122,8 @@ mod tests {
} }
} }
", ",
r#"the_field "(u32, i32)""#, r#"the_field "(u32, i32)"
foo "foo($0)""#,
); );
} }
@ -118,4 +139,36 @@ mod tests {
r#""#, r#""#,
); );
} }
#[test]
fn test_method_completion() {
check_ref_completion(
r"
struct A {}
impl A {
fn the_method(&self) {}
}
fn foo(a: A) {
a.<|>
}
",
r#"the_method "the_method($0)""#,
);
}
#[test]
fn test_no_non_self_method() {
check_ref_completion(
r"
struct A {}
impl A {
fn the_method() {}
}
fn foo(a: A) {
a.<|>
}
",
r#""#,
);
}
} }

View File

@ -37,6 +37,7 @@ pub enum CompletionItemKind {
Const, Const,
Trait, Trait,
TypeAlias, TypeAlias,
Method,
} }
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
@ -183,10 +184,14 @@ impl Builder {
self self
} }
fn from_function(mut self, ctx: &CompletionContext, function: hir::Function) -> Builder { pub(super) fn from_function(
mut self,
ctx: &CompletionContext,
function: hir::Function,
) -> Builder {
// If not an import, add parenthesis automatically. // If not an import, add parenthesis automatically.
if ctx.use_item_syntax.is_none() && !ctx.is_call { if ctx.use_item_syntax.is_none() && !ctx.is_call {
if function.signature(ctx.db).args().is_empty() { if function.signature(ctx.db).params().is_empty() {
self.snippet = Some(format!("{}()$0", self.label)); self.snippet = Some(format!("{}()$0", self.label));
} else { } else {
self.snippet = Some(format!("{}($0)", self.label)); self.snippet = Some(format!("{}($0)", self.label));

View File

@ -124,6 +124,7 @@ salsa::database_storage! {
fn enum_data() for hir::db::EnumDataQuery; fn enum_data() for hir::db::EnumDataQuery;
fn enum_variant_data() for hir::db::EnumVariantDataQuery; fn enum_variant_data() for hir::db::EnumVariantDataQuery;
fn impls_in_module() for hir::db::ImplsInModuleQuery; fn impls_in_module() for hir::db::ImplsInModuleQuery;
fn impls_in_crate() for hir::db::ImplsInCrateQuery;
fn body_hir() for hir::db::BodyHirQuery; fn body_hir() for hir::db::BodyHirQuery;
fn body_syntax_mapping() for hir::db::BodySyntaxMappingQuery; fn body_syntax_mapping() for hir::db::BodySyntaxMappingQuery;
fn fn_signature() for hir::db::FnSignatureQuery; fn fn_signature() for hir::db::FnSignatureQuery;

View File

@ -69,6 +69,7 @@ impl Conv for CompletionItemKind {
CompletionItemKind::TypeAlias => Struct, CompletionItemKind::TypeAlias => Struct,
CompletionItemKind::Const => Constant, CompletionItemKind::Const => Constant,
CompletionItemKind::Static => Value, CompletionItemKind::Static => Value,
CompletionItemKind::Method => Method,
} }
} }
} }