mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 11:31:15 +00:00
Merge #968
968: Macro aware name resoltion r=matklad a=matklad The first commit lays the ground work for new name resolution, including * extracting position-indendent items from parse trees * walking the tree of modules * old-style macro_rules resolve cc @pnkfelix: this looks like an API name resolution should interact with. Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
7d3f48cdaf
@ -124,6 +124,10 @@ impl CrateGraph {
|
||||
self.arena.is_empty()
|
||||
}
|
||||
|
||||
pub fn iter<'a>(&'a self) -> impl Iterator<Item = CrateId> + 'a {
|
||||
self.arena.keys().map(|it| *it)
|
||||
}
|
||||
|
||||
pub fn crate_root(&self, crate_id: CrateId) -> FileId {
|
||||
self.arena[&crate_id].file_id
|
||||
}
|
||||
|
@ -8,13 +8,12 @@ use crate::{
|
||||
Name, ScopesWithSourceMap, Ty, HirFileId,
|
||||
HirDatabase, PersistentHirDatabase,
|
||||
type_ref::TypeRef,
|
||||
nameres::{ModuleScope, Namespace, lower::ImportId},
|
||||
nameres::{ModuleScope, Namespace, ImportId, CrateModuleId},
|
||||
expr::{Body, BodySourceMap},
|
||||
ty::InferenceResult,
|
||||
adt::{EnumVariantId, StructFieldId, VariantDef},
|
||||
generics::GenericParams,
|
||||
docs::{Documentation, Docs, docs_from_ast},
|
||||
module_tree::ModuleId,
|
||||
ids::{FunctionId, StructId, EnumId, AstItemDef, ConstId, StaticId, TraitId, TypeId},
|
||||
impl_block::ImplBlock,
|
||||
resolve::Resolver,
|
||||
@ -65,7 +64,7 @@ impl Crate {
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct Module {
|
||||
pub(crate) krate: Crate,
|
||||
pub(crate) module_id: ModuleId,
|
||||
pub(crate) module_id: CrateModuleId,
|
||||
}
|
||||
|
||||
/// The defs which can be visible in the module.
|
||||
@ -173,7 +172,7 @@ impl Module {
|
||||
|
||||
/// Returns a `ModuleScope`: a set of items, visible in this module.
|
||||
pub fn scope(&self, db: &impl HirDatabase) -> ModuleScope {
|
||||
db.item_map(self.krate)[self.module_id].clone()
|
||||
db.crate_def_map(self.krate)[self.module_id].scope.clone()
|
||||
}
|
||||
|
||||
pub fn problems(&self, db: &impl HirDatabase) -> Vec<(TreeArc<SyntaxNode>, Problem)> {
|
||||
@ -181,16 +180,16 @@ impl Module {
|
||||
}
|
||||
|
||||
pub fn resolver(&self, db: &impl HirDatabase) -> Resolver {
|
||||
let item_map = db.item_map(self.krate);
|
||||
Resolver::default().push_module_scope(item_map, *self)
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
Resolver::default().push_module_scope(def_map, self.module_id)
|
||||
}
|
||||
|
||||
pub fn declarations(self, db: &impl HirDatabase) -> Vec<ModuleDef> {
|
||||
let lowered_module = db.lower_module(self);
|
||||
lowered_module
|
||||
.declarations
|
||||
.values()
|
||||
.cloned()
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
def_map[self.module_id]
|
||||
.scope
|
||||
.entries()
|
||||
.filter_map(|(_name, res)| if res.import.is_none() { Some(res.def) } else { None })
|
||||
.flat_map(|per_ns| {
|
||||
per_ns.take_types().into_iter().chain(per_ns.take_values().into_iter())
|
||||
})
|
||||
|
@ -18,9 +18,7 @@ impl Crate {
|
||||
.collect()
|
||||
}
|
||||
pub(crate) fn root_module_impl(&self, db: &impl PersistentHirDatabase) -> Option<Module> {
|
||||
let module_tree = db.module_tree(*self);
|
||||
let module_id = module_tree.modules().next()?;
|
||||
|
||||
let module_id = db.crate_def_map(*self).root();
|
||||
let module = Module { krate: *self, module_id };
|
||||
Some(module)
|
||||
}
|
||||
|
@ -1,33 +1,61 @@
|
||||
use ra_syntax::{ast, SyntaxNode, TreeArc};
|
||||
use ra_db::FileId;
|
||||
use ra_syntax::{ast, SyntaxNode, TreeArc, AstNode};
|
||||
|
||||
use crate::{
|
||||
Module, ModuleSource, Problem,
|
||||
Name,
|
||||
module_tree::ModuleId,
|
||||
nameres::lower::ImportId,
|
||||
Module, ModuleSource, Problem, Name,
|
||||
nameres::{CrateModuleId, ImportId},
|
||||
HirDatabase, PersistentHirDatabase,
|
||||
HirFileId
|
||||
HirFileId, SourceItemId,
|
||||
};
|
||||
|
||||
impl ModuleSource {
|
||||
pub(crate) fn new(
|
||||
db: &impl PersistentHirDatabase,
|
||||
file_id: Option<FileId>,
|
||||
decl_id: Option<SourceItemId>,
|
||||
) -> ModuleSource {
|
||||
match (file_id, decl_id) {
|
||||
(Some(file_id), _) => {
|
||||
let source_file = db.parse(file_id);
|
||||
ModuleSource::SourceFile(source_file)
|
||||
}
|
||||
(None, Some(item_id)) => {
|
||||
let module = db.file_item(item_id);
|
||||
let module = ast::Module::cast(&*module).unwrap();
|
||||
assert!(module.item_list().is_some(), "expected inline module");
|
||||
ModuleSource::Module(module.to_owned())
|
||||
}
|
||||
(None, None) => panic!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Module {
|
||||
fn with_module_id(&self, module_id: ModuleId) -> Module {
|
||||
fn with_module_id(&self, module_id: CrateModuleId) -> Module {
|
||||
Module { module_id, krate: self.krate }
|
||||
}
|
||||
|
||||
pub(crate) fn name_impl(&self, db: &impl HirDatabase) -> Option<Name> {
|
||||
let module_tree = db.module_tree(self.krate);
|
||||
let link = self.module_id.parent_link(&module_tree)?;
|
||||
Some(link.name(&module_tree).clone())
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
let parent = def_map[self.module_id].parent?;
|
||||
def_map[parent].children.iter().find_map(|(name, module_id)| {
|
||||
if *module_id == self.module_id {
|
||||
Some(name.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn definition_source_impl(
|
||||
&self,
|
||||
db: &impl PersistentHirDatabase,
|
||||
) -> (HirFileId, ModuleSource) {
|
||||
let module_tree = db.module_tree(self.krate);
|
||||
let file_id = self.module_id.file_id(&module_tree);
|
||||
let decl_id = self.module_id.decl_id(&module_tree);
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
let decl_id = def_map[self.module_id].declaration;
|
||||
let file_id = def_map[self.module_id].definition;
|
||||
let module_source = ModuleSource::new(db, file_id, decl_id);
|
||||
let file_id = file_id.map(HirFileId::from).unwrap_or_else(|| decl_id.unwrap().file_id);
|
||||
(file_id, module_source)
|
||||
}
|
||||
|
||||
@ -35,11 +63,11 @@ impl Module {
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
) -> Option<(HirFileId, TreeArc<ast::Module>)> {
|
||||
let module_tree = db.module_tree(self.krate);
|
||||
let link = self.module_id.parent_link(&module_tree)?;
|
||||
let file_id = link.owner(&module_tree).file_id(&module_tree);
|
||||
let src = link.source(&module_tree, db);
|
||||
Some((file_id, src))
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
let decl = def_map[self.module_id].declaration?;
|
||||
let syntax_node = db.file_item(decl);
|
||||
let ast = ast::Module::cast(&syntax_node).unwrap().to_owned();
|
||||
Some((decl.file_id, ast))
|
||||
}
|
||||
|
||||
pub(crate) fn import_source_impl(
|
||||
@ -47,22 +75,21 @@ impl Module {
|
||||
db: &impl HirDatabase,
|
||||
import: ImportId,
|
||||
) -> TreeArc<ast::PathSegment> {
|
||||
let (_, source_map) = db.lower_module_with_source_map(*self);
|
||||
let (_, source) = self.definition_source(db);
|
||||
let (file_id, source) = self.definition_source(db);
|
||||
let (_, source_map) = db.raw_items_with_source_map(file_id.original_file(db));
|
||||
source_map.get(&source, import)
|
||||
}
|
||||
|
||||
pub(crate) fn crate_root_impl(&self, db: &impl PersistentHirDatabase) -> Module {
|
||||
let module_tree = db.module_tree(self.krate);
|
||||
let module_id = self.module_id.crate_root(&module_tree);
|
||||
self.with_module_id(module_id)
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
self.with_module_id(def_map.root())
|
||||
}
|
||||
|
||||
/// Finds a child module with the specified name.
|
||||
pub(crate) fn child_impl(&self, db: &impl HirDatabase, name: &Name) -> Option<Module> {
|
||||
let module_tree = db.module_tree(self.krate);
|
||||
let child_id = self.module_id.child(&module_tree, name)?;
|
||||
Some(self.with_module_id(child_id))
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
let child_id = def_map[self.module_id].children.get(name)?;
|
||||
Some(self.with_module_id(*child_id))
|
||||
}
|
||||
|
||||
/// Iterates over all child modules.
|
||||
@ -70,18 +97,18 @@ impl Module {
|
||||
&self,
|
||||
db: &impl PersistentHirDatabase,
|
||||
) -> impl Iterator<Item = Module> {
|
||||
let module_tree = db.module_tree(self.krate);
|
||||
let children = self
|
||||
.module_id
|
||||
.children(&module_tree)
|
||||
.map(|(_, module_id)| self.with_module_id(module_id))
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
let children = def_map[self.module_id]
|
||||
.children
|
||||
.iter()
|
||||
.map(|(_, module_id)| self.with_module_id(*module_id))
|
||||
.collect::<Vec<_>>();
|
||||
children.into_iter()
|
||||
}
|
||||
|
||||
pub(crate) fn parent_impl(&self, db: &impl PersistentHirDatabase) -> Option<Module> {
|
||||
let module_tree = db.module_tree(self.krate);
|
||||
let parent_id = self.module_id.parent(&module_tree)?;
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
let parent_id = def_map[self.module_id].parent?;
|
||||
Some(self.with_module_id(parent_id))
|
||||
}
|
||||
|
||||
@ -89,7 +116,14 @@ impl Module {
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
) -> Vec<(TreeArc<SyntaxNode>, Problem)> {
|
||||
let module_tree = db.module_tree(self.krate);
|
||||
self.module_id.problems(&module_tree, db)
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
let (my_file_id, _) = self.definition_source(db);
|
||||
// FIXME: not entirely corret filterint by module
|
||||
def_map
|
||||
.problems()
|
||||
.iter()
|
||||
.filter(|(source_item_id, _problem)| my_file_id == source_item_id.file_id)
|
||||
.map(|(source_item_id, problem)| (db.file_item(*source_item_id), problem.clone()))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
@ -1,23 +1,18 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_syntax::{SyntaxNode, TreeArc, SourceFile};
|
||||
use ra_db::{SourceDatabase, salsa};
|
||||
use ra_db::{SourceDatabase, salsa, FileId};
|
||||
|
||||
use crate::{
|
||||
MacroCallId, HirFileId,
|
||||
SourceFileItems, SourceItemId, Crate, Module, HirInterner,
|
||||
HirFileId, SourceFileItems, SourceItemId, Crate, Module, HirInterner,
|
||||
Function, FnSignature, ExprScopes, TypeAlias,
|
||||
Struct, Enum, StructField,
|
||||
Const, ConstSignature, Static,
|
||||
macros::MacroExpansion,
|
||||
module_tree::ModuleTree,
|
||||
nameres::{ItemMap, lower::{LoweredModule, ImportSourceMap}},
|
||||
nameres::{Namespace, ImportSourceMap, RawItems, CrateDefMap},
|
||||
ty::{InferenceResult, Ty, method_resolution::CrateImplBlocks, TypableDef, CallableDef, FnSig},
|
||||
adt::{StructData, EnumData},
|
||||
impl_block::{ModuleImplBlocks, ImplSourceMap},
|
||||
generics::{GenericParams, GenericDef},
|
||||
ids::SourceFileItemId,
|
||||
nameres::Namespace,
|
||||
type_ref::TypeRef,
|
||||
};
|
||||
|
||||
@ -26,9 +21,6 @@ pub trait PersistentHirDatabase: SourceDatabase + AsRef<HirInterner> {
|
||||
#[salsa::invoke(HirFileId::hir_parse)]
|
||||
fn hir_parse(&self, file_id: HirFileId) -> TreeArc<SourceFile>;
|
||||
|
||||
#[salsa::invoke(crate::macros::expand_macro_invocation)]
|
||||
fn expand_macro_invocation(&self, invoc: MacroCallId) -> Option<Arc<MacroExpansion>>;
|
||||
|
||||
#[salsa::invoke(crate::adt::StructData::struct_data_query)]
|
||||
fn struct_data(&self, s: Struct) -> Arc<StructData>;
|
||||
|
||||
@ -41,27 +33,14 @@ pub trait PersistentHirDatabase: SourceDatabase + AsRef<HirInterner> {
|
||||
#[salsa::invoke(crate::ids::SourceFileItems::file_item_query)]
|
||||
fn file_item(&self, source_item_id: SourceItemId) -> TreeArc<SyntaxNode>;
|
||||
|
||||
#[salsa::invoke(crate::module_tree::Submodule::submodules_query)]
|
||||
fn submodules(
|
||||
&self,
|
||||
file_id: HirFileId,
|
||||
delc_id: Option<SourceFileItemId>,
|
||||
) -> Arc<Vec<crate::module_tree::Submodule>>;
|
||||
#[salsa::invoke(RawItems::raw_items_query)]
|
||||
fn raw_items(&self, file_id: FileId) -> Arc<RawItems>;
|
||||
|
||||
#[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_with_source_map_query)]
|
||||
fn lower_module_with_source_map(
|
||||
&self,
|
||||
module: Module,
|
||||
) -> (Arc<LoweredModule>, Arc<ImportSourceMap>);
|
||||
#[salsa::invoke(RawItems::raw_items_with_source_map_query)]
|
||||
fn raw_items_with_source_map(&self, file_id: FileId) -> (Arc<RawItems>, Arc<ImportSourceMap>);
|
||||
|
||||
#[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_query)]
|
||||
fn lower_module(&self, module: Module) -> Arc<LoweredModule>;
|
||||
|
||||
#[salsa::invoke(crate::nameres::ItemMap::item_map_query)]
|
||||
fn item_map(&self, krate: Crate) -> Arc<ItemMap>;
|
||||
|
||||
#[salsa::invoke(crate::module_tree::ModuleTree::module_tree_query)]
|
||||
fn module_tree(&self, krate: Crate) -> Arc<ModuleTree>;
|
||||
#[salsa::invoke(CrateDefMap::crate_def_map_query)]
|
||||
fn crate_def_map(&self, krate: Crate) -> Arc<CrateDefMap>;
|
||||
|
||||
#[salsa::invoke(crate::impl_block::impls_in_module)]
|
||||
fn impls_in_module(&self, module: Module) -> Arc<ModuleImplBlocks>;
|
||||
|
@ -83,30 +83,37 @@ impl HirFileId {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn as_macro_call_id(self) -> Option<MacroCallId> {
|
||||
match self.0 {
|
||||
HirFileIdRepr::Macro(it) => Some(it),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn hir_parse(
|
||||
db: &impl PersistentHirDatabase,
|
||||
file_id: HirFileId,
|
||||
) -> TreeArc<SourceFile> {
|
||||
match file_id.0 {
|
||||
HirFileIdRepr::File(file_id) => db.parse(file_id),
|
||||
HirFileIdRepr::Macro(m) => {
|
||||
if let Some(exp) = db.expand_macro_invocation(m) {
|
||||
return exp.file();
|
||||
}
|
||||
HirFileIdRepr::Macro(macro_call_id) => {
|
||||
// returning an empty string looks fishy...
|
||||
SourceFile::parse("")
|
||||
parse_macro(db, macro_call_id).unwrap_or_else(|| SourceFile::parse(""))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_macro(
|
||||
db: &impl PersistentHirDatabase,
|
||||
macro_call_id: MacroCallId,
|
||||
) -> Option<TreeArc<SourceFile>> {
|
||||
let loc = macro_call_id.loc(db);
|
||||
let syntax = db.file_item(loc.source_item_id);
|
||||
let macro_call = ast::MacroCall::cast(&syntax).unwrap();
|
||||
let (macro_arg, _) = macro_call.token_tree().and_then(mbe::ast_to_token_tree)?;
|
||||
|
||||
let def_map = db.crate_def_map(loc.module.krate);
|
||||
let (krate, macro_id) = def_map.resolve_macro(macro_call_id)?;
|
||||
let def_map = db.crate_def_map(krate);
|
||||
let macro_rules = &def_map[macro_id];
|
||||
let tt = macro_rules.expand(¯o_arg).ok()?;
|
||||
Some(mbe::token_tree_to_ast_item_list(&tt))
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
enum HirFileIdRepr {
|
||||
File(FileId),
|
||||
@ -200,8 +207,14 @@ pub(crate) trait AstItemDef<N: AstNode>: ArenaId + Clone {
|
||||
fn interner(interner: &HirInterner) -> &LocationIntener<ItemLoc<N>, Self>;
|
||||
fn from_ast(ctx: LocationCtx<&impl PersistentHirDatabase>, ast: &N) -> Self {
|
||||
let items = ctx.db.file_items(ctx.file_id);
|
||||
let raw =
|
||||
SourceItemId { file_id: ctx.file_id, item_id: items.id_of(ctx.file_id, ast.syntax()) };
|
||||
let item_id = items.id_of(ctx.file_id, ast.syntax());
|
||||
Self::from_source_item_id_unchecked(ctx, item_id)
|
||||
}
|
||||
fn from_source_item_id_unchecked(
|
||||
ctx: LocationCtx<&impl PersistentHirDatabase>,
|
||||
item_id: SourceFileItemId,
|
||||
) -> Self {
|
||||
let raw = SourceItemId { file_id: ctx.file_id, item_id };
|
||||
let loc = ItemLoc { module: ctx.module, raw, _ty: PhantomData };
|
||||
|
||||
Self::interner(ctx.db.as_ref()).loc2id(&loc)
|
||||
@ -290,6 +303,12 @@ impl AstItemDef<ast::TypeAliasDef> for TypeId {
|
||||
pub struct SourceFileItemId(RawId);
|
||||
impl_arena_id!(SourceFileItemId);
|
||||
|
||||
impl SourceFileItemId {
|
||||
pub(crate) fn with_file_id(self, file_id: HirFileId) -> SourceItemId {
|
||||
SourceItemId { file_id, item_id: self }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct SourceItemId {
|
||||
pub(crate) file_id: HirFileId,
|
||||
@ -309,9 +328,7 @@ impl SourceFileItems {
|
||||
file_id: HirFileId,
|
||||
) -> Arc<SourceFileItems> {
|
||||
let source_file = db.hir_parse(file_id);
|
||||
let mut res = SourceFileItems { file_id, arena: Arena::default() };
|
||||
res.init(&source_file);
|
||||
Arc::new(res)
|
||||
Arc::new(SourceFileItems::from_source_file(&source_file, file_id))
|
||||
}
|
||||
|
||||
pub(crate) fn file_item_query(
|
||||
@ -324,18 +341,23 @@ impl SourceFileItems {
|
||||
.to_owned()
|
||||
}
|
||||
|
||||
fn init(&mut self, source_file: &SourceFile) {
|
||||
pub(crate) fn from_source_file(
|
||||
source_file: &SourceFile,
|
||||
file_id: HirFileId,
|
||||
) -> SourceFileItems {
|
||||
let mut res = SourceFileItems { file_id, arena: Arena::default() };
|
||||
// By walking the tree in bread-first order we make sure that parents
|
||||
// get lower ids then children. That is, adding a new child does not
|
||||
// change parent's id. This means that, say, adding a new function to a
|
||||
// trait does not change ids of top-level items, which helps caching.
|
||||
bfs(source_file.syntax(), |it| {
|
||||
if let Some(module_item) = ast::ModuleItem::cast(it) {
|
||||
self.alloc(module_item.syntax());
|
||||
res.alloc(module_item.syntax());
|
||||
} else if let Some(macro_call) = ast::MacroCall::cast(it) {
|
||||
self.alloc(macro_call.syntax());
|
||||
res.alloc(macro_call.syntax());
|
||||
}
|
||||
})
|
||||
});
|
||||
res
|
||||
}
|
||||
|
||||
fn alloc(&mut self, item: &SyntaxNode) -> SourceFileItemId {
|
||||
|
@ -4,7 +4,8 @@ use rustc_hash::FxHashMap;
|
||||
use ra_arena::{Arena, RawId, impl_arena_id, map::ArenaMap};
|
||||
use ra_syntax::{
|
||||
AstPtr, SourceFile, TreeArc,
|
||||
ast::{self, AstNode}};
|
||||
ast::{self, AstNode}
|
||||
};
|
||||
|
||||
use crate::{
|
||||
Const, TypeAlias, Function, HirFileId,
|
||||
@ -13,7 +14,7 @@ use crate::{
|
||||
type_ref::TypeRef,
|
||||
ids::LocationCtx,
|
||||
resolve::Resolver,
|
||||
ty::Ty, generics::GenericParams
|
||||
ty::Ty, generics::GenericParams,
|
||||
};
|
||||
|
||||
use crate::code_model_api::{Module, ModuleSource};
|
||||
|
@ -24,9 +24,7 @@ mod path;
|
||||
pub mod source_binder;
|
||||
|
||||
mod ids;
|
||||
mod macros;
|
||||
mod name;
|
||||
mod module_tree;
|
||||
mod nameres;
|
||||
mod adt;
|
||||
mod type_alias;
|
||||
@ -54,8 +52,7 @@ pub use self::{
|
||||
path::{Path, PathKind},
|
||||
name::Name,
|
||||
ids::{HirFileId, MacroCallId, MacroCallLoc, HirInterner},
|
||||
macros::{MacroDef, MacroInput, MacroExpansion},
|
||||
nameres::{ItemMap, PerNs, Namespace},
|
||||
nameres::{PerNs, Namespace},
|
||||
ty::{Ty, Substs, display::HirDisplay},
|
||||
impl_block::{ImplBlock, ImplItem},
|
||||
docs::{Docs, Documentation},
|
||||
|
@ -1,135 +0,0 @@
|
||||
/// Machinery for macro expansion.
|
||||
///
|
||||
/// One of the more complicated things about macros is managing the source code
|
||||
/// that is produced after expansion. See `HirFileId` and `MacroCallId` for how
|
||||
/// do we do that.
|
||||
///
|
||||
/// When the file-management question is resolved, all that is left is a
|
||||
/// token-tree-to-token-tree transformation plus hygiene. We don't have either of
|
||||
/// those yet, so all macros are string based at the moment!
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_syntax::{
|
||||
TextRange, TextUnit, SourceFile, AstNode, SyntaxNode, TreeArc, SyntaxNodePtr,
|
||||
ast,
|
||||
};
|
||||
|
||||
use crate::{MacroCallId, PersistentHirDatabase};
|
||||
|
||||
// Hard-coded defs for now :-(
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum MacroDef {
|
||||
Vec,
|
||||
}
|
||||
|
||||
impl MacroDef {
|
||||
/// Expands macro call, returning the expansion and offset to be used to
|
||||
/// convert ranges between expansion and original source.
|
||||
pub fn ast_expand(macro_call: &ast::MacroCall) -> Option<(TextUnit, MacroExpansion)> {
|
||||
let (def, input) = MacroDef::from_call(macro_call)?;
|
||||
let exp = def.expand(input)?;
|
||||
let off = macro_call.token_tree()?.syntax().range().start();
|
||||
Some((off, exp))
|
||||
}
|
||||
|
||||
fn from_call(macro_call: &ast::MacroCall) -> Option<(MacroDef, MacroInput)> {
|
||||
let def = {
|
||||
let path = macro_call.path()?;
|
||||
let name_ref = path.segment()?.name_ref()?;
|
||||
if name_ref.text() == "vec" {
|
||||
MacroDef::Vec
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
let input = {
|
||||
let arg = macro_call.token_tree()?.syntax();
|
||||
MacroInput { text: arg.text().to_string() }
|
||||
};
|
||||
Some((def, input))
|
||||
}
|
||||
|
||||
fn expand(self, input: MacroInput) -> Option<MacroExpansion> {
|
||||
match self {
|
||||
MacroDef::Vec => self.expand_vec(input),
|
||||
}
|
||||
}
|
||||
fn expand_vec(self, input: MacroInput) -> Option<MacroExpansion> {
|
||||
let text = format!(r"fn dummy() {{ {}; }}", input.text);
|
||||
let file = SourceFile::parse(&text);
|
||||
let array_expr = file.syntax().descendants().find_map(ast::ArrayExpr::cast)?;
|
||||
let ptr = SyntaxNodePtr::new(array_expr.syntax());
|
||||
let src_range = TextRange::offset_len(0.into(), TextUnit::of_str(&input.text));
|
||||
let ranges_map = vec![(src_range, array_expr.syntax().range())];
|
||||
let res = MacroExpansion { text, ranges_map, ptr };
|
||||
Some(res)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct MacroInput {
|
||||
// Should be token trees
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct MacroExpansion {
|
||||
/// The result of macro expansion. Should be token tree as well.
|
||||
text: String,
|
||||
/// Correspondence between ranges in the original source code and ranges in
|
||||
/// the macro.
|
||||
ranges_map: Vec<(TextRange, TextRange)>,
|
||||
/// Implementation detail: internally, a macro is expanded to the whole file,
|
||||
/// even if it is an expression. This `ptr` selects the actual expansion from
|
||||
/// the expanded file.
|
||||
ptr: SyntaxNodePtr,
|
||||
}
|
||||
|
||||
impl MacroExpansion {
|
||||
// FIXME: does not really make sense, macro expansion is not necessary a
|
||||
// whole file. See `MacroExpansion::ptr` as well.
|
||||
pub(crate) fn file(&self) -> TreeArc<SourceFile> {
|
||||
SourceFile::parse(&self.text)
|
||||
}
|
||||
|
||||
pub fn syntax(&self) -> TreeArc<SyntaxNode> {
|
||||
self.ptr.to_node(&self.file()).to_owned()
|
||||
}
|
||||
/// Maps range in the source code to the range in the expanded code.
|
||||
pub fn map_range_forward(&self, src_range: TextRange) -> Option<TextRange> {
|
||||
for (s_range, t_range) in self.ranges_map.iter() {
|
||||
if src_range.is_subrange(&s_range) {
|
||||
let src_at_zero_range = src_range - src_range.start();
|
||||
let src_range_offset = src_range.start() - s_range.start();
|
||||
let src_range = src_at_zero_range + src_range_offset + t_range.start();
|
||||
return Some(src_range);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
/// Maps range in the expanded code to the range in the source code.
|
||||
pub fn map_range_back(&self, tgt_range: TextRange) -> Option<TextRange> {
|
||||
for (s_range, t_range) in self.ranges_map.iter() {
|
||||
if tgt_range.is_subrange(&t_range) {
|
||||
let tgt_at_zero_range = tgt_range - tgt_range.start();
|
||||
let tgt_range_offset = tgt_range.start() - t_range.start();
|
||||
let src_range = tgt_at_zero_range + tgt_range_offset + s_range.start();
|
||||
return Some(src_range);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn expand_macro_invocation(
|
||||
db: &impl PersistentHirDatabase,
|
||||
invoc: MacroCallId,
|
||||
) -> Option<Arc<MacroExpansion>> {
|
||||
let loc = invoc.loc(db);
|
||||
let syntax = db.file_item(loc.source_item_id);
|
||||
let macro_call = ast::MacroCall::cast(&syntax).unwrap();
|
||||
|
||||
let (def, input) = MacroDef::from_call(macro_call)?;
|
||||
def.expand(input).map(Arc::new)
|
||||
}
|
@ -1,6 +1,7 @@
|
||||
test_utils::marks!(
|
||||
bogus_paths
|
||||
name_res_works_for_broken_modules
|
||||
item_map_enum_importing
|
||||
can_import_enum_variant
|
||||
type_var_cycles_resolve_completely
|
||||
type_var_cycles_resolve_as_possible
|
||||
type_var_resolves_to_int_var
|
||||
|
@ -1,331 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use arrayvec::ArrayVec;
|
||||
use relative_path::RelativePathBuf;
|
||||
use ra_db::{FileId, SourceRoot};
|
||||
use ra_syntax::{
|
||||
SyntaxNode, TreeArc,
|
||||
algo::generate,
|
||||
ast::{self, AstNode, NameOwner},
|
||||
};
|
||||
use ra_arena::{Arena, RawId, impl_arena_id};
|
||||
use test_utils::tested_by;
|
||||
|
||||
use crate::{
|
||||
Name, AsName, HirDatabase, SourceItemId, HirFileId, Problem, SourceFileItems, ModuleSource,
|
||||
PersistentHirDatabase,
|
||||
Crate,
|
||||
ids::SourceFileItemId,
|
||||
};
|
||||
|
||||
impl ModuleSource {
|
||||
pub(crate) fn new(
|
||||
db: &impl PersistentHirDatabase,
|
||||
file_id: HirFileId,
|
||||
decl_id: Option<SourceFileItemId>,
|
||||
) -> ModuleSource {
|
||||
match decl_id {
|
||||
Some(item_id) => {
|
||||
let module = db.file_item(SourceItemId { file_id, item_id });
|
||||
let module = ast::Module::cast(&*module).unwrap();
|
||||
assert!(module.item_list().is_some(), "expected inline module");
|
||||
ModuleSource::Module(module.to_owned())
|
||||
}
|
||||
None => {
|
||||
let source_file = db.hir_parse(file_id);
|
||||
ModuleSource::SourceFile(source_file)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, PartialEq, Eq, Debug)]
|
||||
pub struct Submodule {
|
||||
name: Name,
|
||||
is_declaration: bool,
|
||||
decl_id: SourceFileItemId,
|
||||
}
|
||||
|
||||
impl Submodule {
|
||||
pub(crate) fn submodules_query(
|
||||
db: &impl PersistentHirDatabase,
|
||||
file_id: HirFileId,
|
||||
decl_id: Option<SourceFileItemId>,
|
||||
) -> Arc<Vec<Submodule>> {
|
||||
db.check_canceled();
|
||||
let file_items = db.file_items(file_id);
|
||||
let module_source = ModuleSource::new(db, file_id, decl_id);
|
||||
let submodules = match module_source {
|
||||
ModuleSource::SourceFile(source_file) => {
|
||||
collect_submodules(file_id, &file_items, &*source_file)
|
||||
}
|
||||
ModuleSource::Module(module) => {
|
||||
collect_submodules(file_id, &file_items, module.item_list().unwrap())
|
||||
}
|
||||
};
|
||||
|
||||
return Arc::new(submodules);
|
||||
|
||||
fn collect_submodules(
|
||||
file_id: HirFileId,
|
||||
file_items: &SourceFileItems,
|
||||
root: &impl ast::ModuleItemOwner,
|
||||
) -> Vec<Submodule> {
|
||||
root.items()
|
||||
.filter_map(|item| match item.kind() {
|
||||
ast::ModuleItemKind::Module(m) => Some(m),
|
||||
_ => None,
|
||||
})
|
||||
.filter_map(|module| {
|
||||
let name = module.name()?.as_name();
|
||||
if !module.has_semi() && module.item_list().is_none() {
|
||||
tested_by!(name_res_works_for_broken_modules);
|
||||
return None;
|
||||
}
|
||||
let sub = Submodule {
|
||||
name,
|
||||
is_declaration: module.has_semi(),
|
||||
decl_id: file_items.id_of(file_id, module.syntax()),
|
||||
};
|
||||
Some(sub)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct ModuleId(RawId);
|
||||
impl_arena_id!(ModuleId);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct LinkId(RawId);
|
||||
impl_arena_id!(LinkId);
|
||||
|
||||
/// Physically, rust source is organized as a set of files, but logically it is
|
||||
/// organized as a tree of modules. Usually, a single file corresponds to a
|
||||
/// single module, but it is not neccessarily always the case.
|
||||
///
|
||||
/// `ModuleTree` encapsulates the logic of transitioning from the fuzzy world of files
|
||||
/// (which can have multiple parents) to the precise world of modules (which
|
||||
/// always have one parent).
|
||||
#[derive(Default, Debug, PartialEq, Eq)]
|
||||
pub struct ModuleTree {
|
||||
mods: Arena<ModuleId, ModuleData>,
|
||||
links: Arena<LinkId, LinkData>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ModuleData {
|
||||
file_id: HirFileId,
|
||||
/// Points to `ast::Module`, `None` for the whole file.
|
||||
decl_id: Option<SourceFileItemId>,
|
||||
parent: Option<LinkId>,
|
||||
children: Vec<LinkId>,
|
||||
}
|
||||
|
||||
#[derive(Hash, Debug, PartialEq, Eq)]
|
||||
struct LinkData {
|
||||
source: SourceItemId,
|
||||
owner: ModuleId,
|
||||
name: Name,
|
||||
points_to: Vec<ModuleId>,
|
||||
problem: Option<Problem>,
|
||||
}
|
||||
|
||||
impl ModuleTree {
|
||||
pub(crate) fn module_tree_query(
|
||||
db: &impl PersistentHirDatabase,
|
||||
krate: Crate,
|
||||
) -> Arc<ModuleTree> {
|
||||
db.check_canceled();
|
||||
let mut res = ModuleTree::default();
|
||||
res.init_crate(db, krate);
|
||||
Arc::new(res)
|
||||
}
|
||||
|
||||
pub(crate) fn modules<'a>(&'a self) -> impl Iterator<Item = ModuleId> + 'a {
|
||||
self.mods.iter().map(|(id, _)| id)
|
||||
}
|
||||
|
||||
pub(crate) fn find_module_by_source(
|
||||
&self,
|
||||
file_id: HirFileId,
|
||||
decl_id: Option<SourceFileItemId>,
|
||||
) -> Option<ModuleId> {
|
||||
let (res, _) =
|
||||
self.mods.iter().find(|(_, m)| (m.file_id, m.decl_id) == (file_id, decl_id))?;
|
||||
Some(res)
|
||||
}
|
||||
|
||||
fn init_crate(&mut self, db: &impl PersistentHirDatabase, krate: Crate) {
|
||||
let crate_graph = db.crate_graph();
|
||||
let file_id = crate_graph.crate_root(krate.crate_id);
|
||||
let source_root_id = db.file_source_root(file_id);
|
||||
|
||||
let source_root = db.source_root(source_root_id);
|
||||
self.init_subtree(db, &source_root, None, file_id.into(), None);
|
||||
}
|
||||
|
||||
fn init_subtree(
|
||||
&mut self,
|
||||
db: &impl PersistentHirDatabase,
|
||||
source_root: &SourceRoot,
|
||||
parent: Option<LinkId>,
|
||||
file_id: HirFileId,
|
||||
decl_id: Option<SourceFileItemId>,
|
||||
) -> ModuleId {
|
||||
let is_root = parent.is_none();
|
||||
let id = self.alloc_mod(ModuleData { file_id, decl_id, parent, children: Vec::new() });
|
||||
for sub in db.submodules(file_id, decl_id).iter() {
|
||||
let link = self.alloc_link(LinkData {
|
||||
source: SourceItemId { file_id, item_id: sub.decl_id },
|
||||
name: sub.name.clone(),
|
||||
owner: id,
|
||||
points_to: Vec::new(),
|
||||
problem: None,
|
||||
});
|
||||
|
||||
let (points_to, problem) = if sub.is_declaration {
|
||||
let (points_to, problem) = resolve_submodule(db, file_id, &sub.name, is_root);
|
||||
let points_to = points_to
|
||||
.into_iter()
|
||||
.map(|file_id| {
|
||||
self.init_subtree(db, source_root, Some(link), file_id.into(), None)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
(points_to, problem)
|
||||
} else {
|
||||
let points_to =
|
||||
self.init_subtree(db, source_root, Some(link), file_id, Some(sub.decl_id));
|
||||
(vec![points_to], None)
|
||||
};
|
||||
|
||||
self.links[link].points_to = points_to;
|
||||
self.links[link].problem = problem;
|
||||
}
|
||||
id
|
||||
}
|
||||
|
||||
fn alloc_mod(&mut self, data: ModuleData) -> ModuleId {
|
||||
self.mods.alloc(data)
|
||||
}
|
||||
|
||||
fn alloc_link(&mut self, data: LinkData) -> LinkId {
|
||||
let owner = data.owner;
|
||||
let id = self.links.alloc(data);
|
||||
self.mods[owner].children.push(id);
|
||||
id
|
||||
}
|
||||
}
|
||||
|
||||
impl ModuleId {
|
||||
pub(crate) fn file_id(self, tree: &ModuleTree) -> HirFileId {
|
||||
tree.mods[self].file_id
|
||||
}
|
||||
pub(crate) fn decl_id(self, tree: &ModuleTree) -> Option<SourceFileItemId> {
|
||||
tree.mods[self].decl_id
|
||||
}
|
||||
pub(crate) fn parent_link(self, tree: &ModuleTree) -> Option<LinkId> {
|
||||
tree.mods[self].parent
|
||||
}
|
||||
pub(crate) fn parent(self, tree: &ModuleTree) -> Option<ModuleId> {
|
||||
let link = self.parent_link(tree)?;
|
||||
Some(tree.links[link].owner)
|
||||
}
|
||||
pub(crate) fn crate_root(self, tree: &ModuleTree) -> ModuleId {
|
||||
generate(Some(self), move |it| it.parent(tree)).last().unwrap()
|
||||
}
|
||||
pub(crate) fn child(self, tree: &ModuleTree, name: &Name) -> Option<ModuleId> {
|
||||
let link = tree.mods[self]
|
||||
.children
|
||||
.iter()
|
||||
.map(|&it| &tree.links[it])
|
||||
.find(|it| it.name == *name)?;
|
||||
Some(*link.points_to.first()?)
|
||||
}
|
||||
pub(crate) fn children<'a>(
|
||||
self,
|
||||
tree: &'a ModuleTree,
|
||||
) -> impl Iterator<Item = (Name, ModuleId)> + 'a {
|
||||
tree.mods[self].children.iter().filter_map(move |&it| {
|
||||
let link = &tree.links[it];
|
||||
let module = *link.points_to.first()?;
|
||||
Some((link.name.clone(), module))
|
||||
})
|
||||
}
|
||||
pub(crate) fn problems(
|
||||
self,
|
||||
tree: &ModuleTree,
|
||||
db: &impl HirDatabase,
|
||||
) -> Vec<(TreeArc<SyntaxNode>, Problem)> {
|
||||
tree.mods[self]
|
||||
.children
|
||||
.iter()
|
||||
.filter_map(|&link| {
|
||||
let p = tree.links[link].problem.clone()?;
|
||||
let s = link.source(tree, db);
|
||||
let s = s.name().unwrap().syntax().to_owned();
|
||||
Some((s, p))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl LinkId {
|
||||
pub(crate) fn owner(self, tree: &ModuleTree) -> ModuleId {
|
||||
tree.links[self].owner
|
||||
}
|
||||
pub(crate) fn name(self, tree: &ModuleTree) -> &Name {
|
||||
&tree.links[self].name
|
||||
}
|
||||
pub(crate) fn source(
|
||||
self,
|
||||
tree: &ModuleTree,
|
||||
db: &impl PersistentHirDatabase,
|
||||
) -> TreeArc<ast::Module> {
|
||||
let syntax_node = db.file_item(tree.links[self].source);
|
||||
ast::Module::cast(&syntax_node).unwrap().to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_submodule(
|
||||
db: &impl PersistentHirDatabase,
|
||||
file_id: HirFileId,
|
||||
name: &Name,
|
||||
is_root: bool,
|
||||
) -> (Vec<FileId>, Option<Problem>) {
|
||||
// FIXME: handle submodules of inline modules properly
|
||||
let file_id = file_id.original_file(db);
|
||||
let source_root_id = db.file_source_root(file_id);
|
||||
let path = db.file_relative_path(file_id);
|
||||
let root = RelativePathBuf::default();
|
||||
let dir_path = path.parent().unwrap_or(&root);
|
||||
let mod_name = path.file_stem().unwrap_or("unknown");
|
||||
let is_dir_owner = is_root || mod_name == "mod";
|
||||
|
||||
let file_mod = dir_path.join(format!("{}.rs", name));
|
||||
let dir_mod = dir_path.join(format!("{}/mod.rs", name));
|
||||
let file_dir_mod = dir_path.join(format!("{}/{}.rs", mod_name, name));
|
||||
let mut candidates = ArrayVec::<[_; 2]>::new();
|
||||
if is_dir_owner {
|
||||
candidates.push(file_mod.clone());
|
||||
candidates.push(dir_mod);
|
||||
} else {
|
||||
candidates.push(file_dir_mod.clone());
|
||||
};
|
||||
let sr = db.source_root(source_root_id);
|
||||
let points_to = candidates
|
||||
.into_iter()
|
||||
.filter_map(|path| sr.files.get(&path))
|
||||
.map(|&it| it)
|
||||
.collect::<Vec<_>>();
|
||||
let problem = if points_to.is_empty() {
|
||||
Some(Problem::UnresolvedModule {
|
||||
candidate: if is_dir_owner { file_mod } else { file_dir_mod },
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
(points_to, problem)
|
||||
}
|
@ -64,6 +64,7 @@ impl Name {
|
||||
"str" => KnownName::Str,
|
||||
"Self" => KnownName::SelfType,
|
||||
"self" => KnownName::SelfParam,
|
||||
"macro_rules" => KnownName::MacroRules,
|
||||
_ => return None,
|
||||
};
|
||||
Some(name)
|
||||
@ -122,4 +123,6 @@ pub(crate) enum KnownName {
|
||||
|
||||
SelfType,
|
||||
SelfParam,
|
||||
|
||||
MacroRules,
|
||||
}
|
||||
|
@ -1,60 +1,148 @@
|
||||
//! Name resolution algorithm. The end result of the algorithm is an `ItemMap`:
|
||||
//! a map which maps each module to its scope: the set of items visible in the
|
||||
//! module. That is, we only resolve imports here, name resolution of item
|
||||
//! bodies will be done in a separate step.
|
||||
//!
|
||||
//! Like Rustc, we use an interactive per-crate algorithm: we start with scopes
|
||||
//! containing only directly defined items, and then iteratively resolve
|
||||
//! imports.
|
||||
//!
|
||||
//! To make this work nicely in the IDE scenario, we place `InputModuleItems`
|
||||
//! in between raw syntax and name resolution. `InputModuleItems` are computed
|
||||
//! using only the module's syntax, and it is all directly defined items plus
|
||||
//! imports. The plan is to make `InputModuleItems` independent of local
|
||||
//! modifications (that is, typing inside a function should not change IMIs),
|
||||
//! so that the results of name resolution can be preserved unless the module
|
||||
//! structure itself is modified.
|
||||
pub(crate) mod lower;
|
||||
/// This module implements import-resolution/macro expansion algorithm.
|
||||
///
|
||||
/// The result of this module is `CrateDefMap`: a datastructure which contains:
|
||||
///
|
||||
/// * a tree of modules for the crate
|
||||
/// * for each module, a set of items visible in the module (directly declared
|
||||
/// or imported)
|
||||
///
|
||||
/// Note that `CrateDefMap` contains fully macro expanded code.
|
||||
///
|
||||
/// Computing `CrateDefMap` can be partitioned into several logically
|
||||
/// independent "phases". The phases are mutually recursive though, there's no
|
||||
/// strict ordering.
|
||||
///
|
||||
/// ## Collecting RawItems
|
||||
///
|
||||
/// This happens in the `raw` module, which parses a single source file into a
|
||||
/// set of top-level items. Nested imports are desugared to flat imports in
|
||||
/// this phase. Macro calls are represented as a triple of (Path, Option<Name>,
|
||||
/// TokenTree).
|
||||
///
|
||||
/// ## Collecting Modules
|
||||
///
|
||||
/// This happens in the `collector` module. In this phase, we recursively walk
|
||||
/// tree of modules, collect raw items from submodules, populate module scopes
|
||||
/// with defined items (so, we assign item ids in this phase) and record the set
|
||||
/// of unresolved imports and macros.
|
||||
///
|
||||
/// While we walk tree of modules, we also record macro_rules definitions and
|
||||
/// expand calls to macro_rules defined macros.
|
||||
///
|
||||
/// ## Resolving Imports
|
||||
///
|
||||
/// We maintain a list of currently unresolved imports. On every iteration, we
|
||||
/// try to resolve some imports from this list. If the import is resolved, we
|
||||
/// record it, by adding an item to current module scope and, if necessary, by
|
||||
/// recursively populating glob imports.
|
||||
///
|
||||
/// ## Resolving Macros
|
||||
///
|
||||
/// macro_rules from the same crate use a global mutable namespace. We expand
|
||||
/// them immediately, when we collect modules.
|
||||
///
|
||||
/// Macros from other crates (including proc-macros) can be used with
|
||||
/// `foo::bar!` syntax. We handle them similarly to imports. There's a list of
|
||||
/// unexpanded macros. On every iteration, we try to resolve each macro call
|
||||
/// path and, upon success, we run macro expansion and "collect module" phase
|
||||
/// on the result
|
||||
|
||||
use std::{time, sync::Arc};
|
||||
mod per_ns;
|
||||
mod raw;
|
||||
mod collector;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_arena::map::ArenaMap;
|
||||
use ra_db::Edition;
|
||||
use rustc_hash::FxHashMap;
|
||||
use ra_arena::{Arena, RawId, impl_arena_id};
|
||||
use ra_db::{FileId, Edition};
|
||||
use test_utils::tested_by;
|
||||
|
||||
use crate::{
|
||||
Module, ModuleDef,
|
||||
Path, PathKind, PersistentHirDatabase,
|
||||
Crate, Name,
|
||||
module_tree::{ModuleId, ModuleTree},
|
||||
nameres::lower::{ImportId, LoweredModule, ImportData},
|
||||
ModuleDef, Name, Crate, Module, Problem,
|
||||
PersistentHirDatabase, Path, PathKind, HirFileId,
|
||||
ids::{SourceItemId, SourceFileItemId, MacroCallId},
|
||||
};
|
||||
|
||||
/// `ItemMap` is the result of module name resolution. It contains, for each
|
||||
/// module, the set of visible items.
|
||||
pub(crate) use self::raw::{RawItems, ImportId, ImportSourceMap};
|
||||
|
||||
pub use self::per_ns::{PerNs, Namespace};
|
||||
|
||||
/// Contans all top-level defs from a macro-expanded crate
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ItemMap {
|
||||
pub struct CrateDefMap {
|
||||
krate: Crate,
|
||||
edition: Edition,
|
||||
/// The prelude module for this crate. This either comes from an import
|
||||
/// marked with the `prelude_import` attribute, or (in the normal case) from
|
||||
/// a dependency (`std` or `core`).
|
||||
pub(crate) prelude: Option<Module>,
|
||||
pub(crate) extern_prelude: FxHashMap<Name, ModuleDef>,
|
||||
per_module: ArenaMap<ModuleId, ModuleScope>,
|
||||
prelude: Option<Module>,
|
||||
extern_prelude: FxHashMap<Name, ModuleDef>,
|
||||
root: CrateModuleId,
|
||||
modules: Arena<CrateModuleId, ModuleData>,
|
||||
macros: Arena<CrateMacroId, mbe::MacroRules>,
|
||||
public_macros: FxHashMap<Name, CrateMacroId>,
|
||||
macro_resolutions: FxHashMap<MacroCallId, (Crate, CrateMacroId)>,
|
||||
problems: CrateDefMapProblems,
|
||||
}
|
||||
|
||||
impl std::ops::Index<ModuleId> for ItemMap {
|
||||
type Output = ModuleScope;
|
||||
fn index(&self, id: ModuleId) -> &ModuleScope {
|
||||
&self.per_module[id]
|
||||
impl std::ops::Index<CrateModuleId> for CrateDefMap {
|
||||
type Output = ModuleData;
|
||||
fn index(&self, id: CrateModuleId) -> &ModuleData {
|
||||
&self.modules[id]
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Index<CrateMacroId> for CrateDefMap {
|
||||
type Output = mbe::MacroRules;
|
||||
fn index(&self, id: CrateMacroId) -> &mbe::MacroRules {
|
||||
&self.macros[id]
|
||||
}
|
||||
}
|
||||
|
||||
/// An ID of a macro, **local** to a specific crate
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub(crate) struct CrateMacroId(RawId);
|
||||
impl_arena_id!(CrateMacroId);
|
||||
|
||||
/// An ID of a module, **local** to a specific crate
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub(crate) struct CrateModuleId(RawId);
|
||||
impl_arena_id!(CrateModuleId);
|
||||
|
||||
#[derive(Default, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct ModuleData {
|
||||
pub(crate) parent: Option<CrateModuleId>,
|
||||
pub(crate) children: FxHashMap<Name, CrateModuleId>,
|
||||
pub(crate) scope: ModuleScope,
|
||||
/// None for root
|
||||
pub(crate) declaration: Option<SourceItemId>,
|
||||
/// None for inline modules.
|
||||
///
|
||||
/// Note that non-inline modules, by definition, live inside non-macro file.
|
||||
pub(crate) definition: Option<FileId>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct CrateDefMapProblems {
|
||||
problems: Vec<(SourceItemId, Problem)>,
|
||||
}
|
||||
|
||||
impl CrateDefMapProblems {
|
||||
fn add(&mut self, source_item_id: SourceItemId, problem: Problem) {
|
||||
self.problems.push((source_item_id, problem))
|
||||
}
|
||||
|
||||
pub(crate) fn iter<'a>(&'a self) -> impl Iterator<Item = (&'a SourceItemId, &'a Problem)> + 'a {
|
||||
self.problems.iter().map(|(s, p)| (s, p))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Eq, Clone)]
|
||||
pub struct ModuleScope {
|
||||
pub(crate) items: FxHashMap<Name, Resolution>,
|
||||
items: FxHashMap<Name, Resolution>,
|
||||
}
|
||||
|
||||
impl ModuleScope {
|
||||
@ -66,8 +154,6 @@ impl ModuleScope {
|
||||
}
|
||||
}
|
||||
|
||||
/// `Resolution` is basically `DefId` atm, but it should account for stuff like
|
||||
/// multiple namespaces, ambiguity and errors.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
||||
pub struct Resolution {
|
||||
/// None for unresolved
|
||||
@ -76,372 +162,6 @@ pub struct Resolution {
|
||||
pub import: Option<ImportId>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum Namespace {
|
||||
Types,
|
||||
Values,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct PerNs<T> {
|
||||
pub types: Option<T>,
|
||||
pub values: Option<T>,
|
||||
}
|
||||
|
||||
impl<T> Default for PerNs<T> {
|
||||
fn default() -> Self {
|
||||
PerNs { types: None, values: None }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PerNs<T> {
|
||||
pub fn none() -> PerNs<T> {
|
||||
PerNs { types: None, values: None }
|
||||
}
|
||||
|
||||
pub fn values(t: T) -> PerNs<T> {
|
||||
PerNs { types: None, values: Some(t) }
|
||||
}
|
||||
|
||||
pub fn types(t: T) -> PerNs<T> {
|
||||
PerNs { types: Some(t), values: None }
|
||||
}
|
||||
|
||||
pub fn both(types: T, values: T) -> PerNs<T> {
|
||||
PerNs { types: Some(types), values: Some(values) }
|
||||
}
|
||||
|
||||
pub fn is_none(&self) -> bool {
|
||||
self.types.is_none() && self.values.is_none()
|
||||
}
|
||||
|
||||
pub fn is_both(&self) -> bool {
|
||||
self.types.is_some() && self.values.is_some()
|
||||
}
|
||||
|
||||
pub fn take(self, namespace: Namespace) -> Option<T> {
|
||||
match namespace {
|
||||
Namespace::Types => self.types,
|
||||
Namespace::Values => self.values,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn take_types(self) -> Option<T> {
|
||||
self.take(Namespace::Types)
|
||||
}
|
||||
|
||||
pub fn take_values(self) -> Option<T> {
|
||||
self.take(Namespace::Values)
|
||||
}
|
||||
|
||||
pub fn get(&self, namespace: Namespace) -> Option<&T> {
|
||||
self.as_ref().take(namespace)
|
||||
}
|
||||
|
||||
pub fn as_ref(&self) -> PerNs<&T> {
|
||||
PerNs { types: self.types.as_ref(), values: self.values.as_ref() }
|
||||
}
|
||||
|
||||
pub fn or(self, other: PerNs<T>) -> PerNs<T> {
|
||||
PerNs { types: self.types.or(other.types), values: self.values.or(other.values) }
|
||||
}
|
||||
|
||||
pub fn and_then<U>(self, f: impl Fn(T) -> Option<U>) -> PerNs<U> {
|
||||
PerNs { types: self.types.and_then(&f), values: self.values.and_then(&f) }
|
||||
}
|
||||
|
||||
pub fn map<U>(self, f: impl Fn(T) -> U) -> PerNs<U> {
|
||||
PerNs { types: self.types.map(&f), values: self.values.map(&f) }
|
||||
}
|
||||
}
|
||||
|
||||
struct Resolver<'a, DB> {
|
||||
db: &'a DB,
|
||||
input: &'a FxHashMap<ModuleId, Arc<LoweredModule>>,
|
||||
krate: Crate,
|
||||
module_tree: Arc<ModuleTree>,
|
||||
processed_imports: FxHashSet<(ModuleId, ImportId)>,
|
||||
/// If module `a` has `use b::*`, then this contains the mapping b -> a (and the import)
|
||||
glob_imports: FxHashMap<ModuleId, Vec<(ModuleId, ImportId)>>,
|
||||
result: ItemMap,
|
||||
}
|
||||
|
||||
impl<'a, DB> Resolver<'a, DB>
|
||||
where
|
||||
DB: PersistentHirDatabase,
|
||||
{
|
||||
fn new(
|
||||
db: &'a DB,
|
||||
input: &'a FxHashMap<ModuleId, Arc<LoweredModule>>,
|
||||
krate: Crate,
|
||||
) -> Resolver<'a, DB> {
|
||||
let module_tree = db.module_tree(krate);
|
||||
Resolver {
|
||||
db,
|
||||
input,
|
||||
krate,
|
||||
module_tree,
|
||||
processed_imports: FxHashSet::default(),
|
||||
glob_imports: FxHashMap::default(),
|
||||
result: ItemMap {
|
||||
edition: krate.edition(db),
|
||||
prelude: None,
|
||||
extern_prelude: FxHashMap::default(),
|
||||
per_module: ArenaMap::default(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn resolve(mut self) -> ItemMap {
|
||||
self.populate_extern_prelude();
|
||||
for (&module_id, items) in self.input.iter() {
|
||||
self.populate_module(module_id, Arc::clone(items));
|
||||
}
|
||||
|
||||
let mut iter = 0;
|
||||
loop {
|
||||
iter += 1;
|
||||
if iter > 1000 {
|
||||
panic!("failed to reach fixedpoint after 1000 iters")
|
||||
}
|
||||
let processed_imports_count = self.processed_imports.len();
|
||||
for &module_id in self.input.keys() {
|
||||
self.db.check_canceled();
|
||||
self.resolve_imports(module_id);
|
||||
}
|
||||
if processed_imports_count == self.processed_imports.len() {
|
||||
// no new imports resolved
|
||||
break;
|
||||
}
|
||||
}
|
||||
self.result
|
||||
}
|
||||
|
||||
fn populate_extern_prelude(&mut self) {
|
||||
for dep in self.krate.dependencies(self.db) {
|
||||
log::debug!("crate dep {:?} -> {:?}", dep.name, dep.krate);
|
||||
if let Some(module) = dep.krate.root_module(self.db) {
|
||||
self.result.extern_prelude.insert(dep.name.clone(), module.into());
|
||||
}
|
||||
// look for the prelude
|
||||
if self.result.prelude.is_none() {
|
||||
let item_map = self.db.item_map(dep.krate);
|
||||
if item_map.prelude.is_some() {
|
||||
self.result.prelude = item_map.prelude;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn populate_module(&mut self, module_id: ModuleId, input: Arc<LoweredModule>) {
|
||||
let mut module_items = ModuleScope::default();
|
||||
for (import_id, import_data) in input.imports.iter() {
|
||||
if let Some(last_segment) = import_data.path.segments.iter().last() {
|
||||
if !import_data.is_glob {
|
||||
let name =
|
||||
import_data.alias.clone().unwrap_or_else(|| last_segment.name.clone());
|
||||
module_items
|
||||
.items
|
||||
.insert(name, Resolution { def: PerNs::none(), import: Some(import_id) });
|
||||
}
|
||||
}
|
||||
}
|
||||
// Populate explicitly declared items, except modules
|
||||
for (name, &def) in input.declarations.iter() {
|
||||
let resolution = Resolution { def, import: None };
|
||||
module_items.items.insert(name.clone(), resolution);
|
||||
}
|
||||
|
||||
// Populate modules
|
||||
for (name, module_id) in module_id.children(&self.module_tree) {
|
||||
let module = Module { module_id, krate: self.krate };
|
||||
self.add_module_item(&mut module_items, name, PerNs::types(module.into()));
|
||||
}
|
||||
|
||||
self.result.per_module.insert(module_id, module_items);
|
||||
}
|
||||
|
||||
fn add_module_item(&self, module_items: &mut ModuleScope, name: Name, def: PerNs<ModuleDef>) {
|
||||
let resolution = Resolution { def, import: None };
|
||||
module_items.items.insert(name, resolution);
|
||||
}
|
||||
|
||||
fn resolve_imports(&mut self, module_id: ModuleId) {
|
||||
for (import_id, import_data) in self.input[&module_id].imports.iter() {
|
||||
if self.processed_imports.contains(&(module_id, import_id)) {
|
||||
// already done
|
||||
continue;
|
||||
}
|
||||
if self.resolve_import(module_id, import_id, import_data) == ReachedFixedPoint::Yes {
|
||||
log::debug!("import {:?} resolved (or definite error)", import_id);
|
||||
self.processed_imports.insert((module_id, import_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_import(
|
||||
&mut self,
|
||||
module_id: ModuleId,
|
||||
import_id: ImportId,
|
||||
import: &ImportData,
|
||||
) -> ReachedFixedPoint {
|
||||
log::debug!("resolving import: {:?} ({:?})", import, self.result.edition);
|
||||
let original_module = Module { krate: self.krate, module_id };
|
||||
|
||||
let (def, reached_fixedpoint) = if import.is_extern_crate {
|
||||
let res = self.result.resolve_name_in_extern_prelude(
|
||||
&import
|
||||
.path
|
||||
.as_ident()
|
||||
.expect("extern crate should have been desugared to one-element path"),
|
||||
);
|
||||
(res, if res.is_none() { ReachedFixedPoint::No } else { ReachedFixedPoint::Yes })
|
||||
} else {
|
||||
let res = self.result.resolve_path_fp(
|
||||
self.db,
|
||||
ResolveMode::Import,
|
||||
original_module,
|
||||
&import.path,
|
||||
);
|
||||
|
||||
(res.resolved_def, res.reached_fixedpoint)
|
||||
};
|
||||
|
||||
if reached_fixedpoint != ReachedFixedPoint::Yes {
|
||||
return reached_fixedpoint;
|
||||
}
|
||||
|
||||
if import.is_glob {
|
||||
log::debug!("glob import: {:?}", import);
|
||||
match def.take_types() {
|
||||
Some(ModuleDef::Module(m)) => {
|
||||
if import.is_prelude {
|
||||
tested_by!(std_prelude);
|
||||
self.result.prelude = Some(m);
|
||||
} else if m.krate != self.krate {
|
||||
tested_by!(glob_across_crates);
|
||||
// glob import from other crate => we can just import everything once
|
||||
let item_map = self.db.item_map(m.krate);
|
||||
let scope = &item_map[m.module_id];
|
||||
let items = scope
|
||||
.items
|
||||
.iter()
|
||||
.map(|(name, res)| (name.clone(), res.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
self.update(module_id, Some(import_id), &items);
|
||||
} else {
|
||||
// glob import from same crate => we do an initial
|
||||
// import, and then need to propagate any further
|
||||
// additions
|
||||
let scope = &self.result[m.module_id];
|
||||
let items = scope
|
||||
.items
|
||||
.iter()
|
||||
.map(|(name, res)| (name.clone(), res.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
self.update(module_id, Some(import_id), &items);
|
||||
// record the glob import in case we add further items
|
||||
self.glob_imports
|
||||
.entry(m.module_id)
|
||||
.or_default()
|
||||
.push((module_id, import_id));
|
||||
}
|
||||
}
|
||||
Some(ModuleDef::Enum(e)) => {
|
||||
tested_by!(glob_enum);
|
||||
// glob import from enum => just import all the variants
|
||||
let variants = e.variants(self.db);
|
||||
let resolutions = variants
|
||||
.into_iter()
|
||||
.filter_map(|variant| {
|
||||
let res = Resolution {
|
||||
def: PerNs::both(variant.into(), variant.into()),
|
||||
import: Some(import_id),
|
||||
};
|
||||
let name = variant.name(self.db)?;
|
||||
Some((name, res))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
self.update(module_id, Some(import_id), &resolutions);
|
||||
}
|
||||
Some(d) => {
|
||||
log::debug!("glob import {:?} from non-module/enum {:?}", import, d);
|
||||
}
|
||||
None => {
|
||||
log::debug!("glob import {:?} didn't resolve as type", import);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let last_segment = import.path.segments.last().unwrap();
|
||||
let name = import.alias.clone().unwrap_or_else(|| last_segment.name.clone());
|
||||
log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
|
||||
|
||||
// extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
|
||||
if let Some(root_module) = self.krate.root_module(self.db) {
|
||||
if import.is_extern_crate && module_id == root_module.module_id {
|
||||
if let Some(def) = def.take_types() {
|
||||
self.result.extern_prelude.insert(name.clone(), def);
|
||||
}
|
||||
}
|
||||
}
|
||||
let resolution = Resolution { def, import: Some(import_id) };
|
||||
self.update(module_id, None, &[(name, resolution)]);
|
||||
}
|
||||
reached_fixedpoint
|
||||
}
|
||||
|
||||
fn update(
|
||||
&mut self,
|
||||
module_id: ModuleId,
|
||||
import: Option<ImportId>,
|
||||
resolutions: &[(Name, Resolution)],
|
||||
) {
|
||||
self.update_recursive(module_id, import, resolutions, 0)
|
||||
}
|
||||
|
||||
fn update_recursive(
|
||||
&mut self,
|
||||
module_id: ModuleId,
|
||||
import: Option<ImportId>,
|
||||
resolutions: &[(Name, Resolution)],
|
||||
depth: usize,
|
||||
) {
|
||||
if depth > 100 {
|
||||
// prevent stack overflows (but this shouldn't be possible)
|
||||
panic!("infinite recursion in glob imports!");
|
||||
}
|
||||
let module_items = self.result.per_module.get_mut(module_id).unwrap();
|
||||
let mut changed = false;
|
||||
for (name, res) in resolutions {
|
||||
let existing = module_items.items.entry(name.clone()).or_default();
|
||||
if existing.def.types.is_none() && res.def.types.is_some() {
|
||||
existing.def.types = res.def.types;
|
||||
existing.import = import.or(res.import);
|
||||
changed = true;
|
||||
}
|
||||
if existing.def.values.is_none() && res.def.values.is_some() {
|
||||
existing.def.values = res.def.values;
|
||||
existing.import = import.or(res.import);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if !changed {
|
||||
return;
|
||||
}
|
||||
let glob_imports = self
|
||||
.glob_imports
|
||||
.get(&module_id)
|
||||
.into_iter()
|
||||
.flat_map(|v| v.iter())
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
for (glob_importing_module, glob_import) in glob_imports {
|
||||
// We pass the glob import so that the tracked import in those modules is that glob import
|
||||
self.update_recursive(glob_importing_module, Some(glob_import), resolutions, depth + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct ResolvePathResult {
|
||||
resolved_def: PerNs<ModuleDef>,
|
||||
@ -475,99 +195,104 @@ enum ReachedFixedPoint {
|
||||
No,
|
||||
}
|
||||
|
||||
impl ItemMap {
|
||||
pub(crate) fn item_map_query(db: &impl PersistentHirDatabase, krate: Crate) -> Arc<ItemMap> {
|
||||
let start = time::Instant::now();
|
||||
let module_tree = db.module_tree(krate);
|
||||
let input = module_tree
|
||||
.modules()
|
||||
.map(|module_id| (module_id, db.lower_module(Module { krate, module_id })))
|
||||
.collect::<FxHashMap<_, _>>();
|
||||
impl CrateDefMap {
|
||||
pub(crate) fn crate_def_map_query(
|
||||
db: &impl PersistentHirDatabase,
|
||||
krate: Crate,
|
||||
) -> Arc<CrateDefMap> {
|
||||
let start = std::time::Instant::now();
|
||||
let def_map = {
|
||||
let edition = krate.edition(db);
|
||||
let mut modules: Arena<CrateModuleId, ModuleData> = Arena::default();
|
||||
let root = modules.alloc(ModuleData::default());
|
||||
CrateDefMap {
|
||||
krate,
|
||||
edition,
|
||||
extern_prelude: FxHashMap::default(),
|
||||
prelude: None,
|
||||
root,
|
||||
modules,
|
||||
macros: Arena::default(),
|
||||
public_macros: FxHashMap::default(),
|
||||
macro_resolutions: FxHashMap::default(),
|
||||
problems: CrateDefMapProblems::default(),
|
||||
}
|
||||
};
|
||||
let def_map = collector::collect_defs(db, def_map);
|
||||
log::info!("crate_def_map_query: {:?}", start.elapsed());
|
||||
Arc::new(def_map)
|
||||
}
|
||||
|
||||
let resolver = Resolver::new(db, &input, krate);
|
||||
let res = resolver.resolve();
|
||||
let elapsed = start.elapsed();
|
||||
log::info!("item_map: {:?}", elapsed);
|
||||
Arc::new(res)
|
||||
pub(crate) fn root(&self) -> CrateModuleId {
|
||||
self.root
|
||||
}
|
||||
|
||||
pub(crate) fn problems(&self) -> &CrateDefMapProblems {
|
||||
&self.problems
|
||||
}
|
||||
|
||||
pub(crate) fn mk_module(&self, module_id: CrateModuleId) -> Module {
|
||||
Module { krate: self.krate, module_id }
|
||||
}
|
||||
|
||||
pub(crate) fn prelude(&self) -> Option<Module> {
|
||||
self.prelude
|
||||
}
|
||||
|
||||
pub(crate) fn extern_prelude(&self) -> &FxHashMap<Name, ModuleDef> {
|
||||
&self.extern_prelude
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_macro(
|
||||
&self,
|
||||
macro_call_id: MacroCallId,
|
||||
) -> Option<(Crate, CrateMacroId)> {
|
||||
self.macro_resolutions.get(¯o_call_id).map(|&it| it)
|
||||
}
|
||||
|
||||
pub(crate) fn find_module_by_source(
|
||||
&self,
|
||||
file_id: HirFileId,
|
||||
decl_id: Option<SourceFileItemId>,
|
||||
) -> Option<CrateModuleId> {
|
||||
let decl_id = decl_id.map(|it| it.with_file_id(file_id));
|
||||
let (module_id, _module_data) = self.modules.iter().find(|(_module_id, module_data)| {
|
||||
if decl_id.is_some() {
|
||||
module_data.declaration == decl_id
|
||||
} else {
|
||||
module_data.definition.map(|it| it.into()) == Some(file_id)
|
||||
}
|
||||
})?;
|
||||
Some(module_id)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path(
|
||||
&self,
|
||||
db: &impl PersistentHirDatabase,
|
||||
original_module: Module,
|
||||
original_module: CrateModuleId,
|
||||
path: &Path,
|
||||
) -> (PerNs<ModuleDef>, Option<usize>) {
|
||||
let res = self.resolve_path_fp(db, ResolveMode::Other, original_module, path);
|
||||
(res.resolved_def, res.segment_index)
|
||||
}
|
||||
|
||||
fn resolve_in_prelude(
|
||||
&self,
|
||||
db: &impl PersistentHirDatabase,
|
||||
original_module: Module,
|
||||
name: &Name,
|
||||
) -> PerNs<ModuleDef> {
|
||||
if let Some(prelude) = self.prelude {
|
||||
let resolution = if prelude.krate == original_module.krate {
|
||||
self[prelude.module_id].items.get(name).cloned()
|
||||
} else {
|
||||
db.item_map(prelude.krate)[prelude.module_id].items.get(name).cloned()
|
||||
};
|
||||
resolution.map(|r| r.def).unwrap_or_else(PerNs::none)
|
||||
} else {
|
||||
PerNs::none()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_name_in_module(
|
||||
&self,
|
||||
db: &impl PersistentHirDatabase,
|
||||
module: Module,
|
||||
name: &Name,
|
||||
) -> PerNs<ModuleDef> {
|
||||
// Resolve in:
|
||||
// - current module / scope
|
||||
// - extern prelude
|
||||
// - std prelude
|
||||
let from_scope = self[module.module_id].items.get(name).map_or(PerNs::none(), |it| it.def);
|
||||
let from_extern_prelude =
|
||||
self.extern_prelude.get(name).map_or(PerNs::none(), |&it| PerNs::types(it));
|
||||
let from_prelude = self.resolve_in_prelude(db, module, name);
|
||||
|
||||
from_scope.or(from_extern_prelude).or(from_prelude)
|
||||
}
|
||||
|
||||
fn resolve_name_in_extern_prelude(&self, name: &Name) -> PerNs<ModuleDef> {
|
||||
self.extern_prelude.get(name).map_or(PerNs::none(), |&it| PerNs::types(it))
|
||||
}
|
||||
|
||||
fn resolve_name_in_crate_root_or_extern_prelude(
|
||||
&self,
|
||||
db: &impl PersistentHirDatabase,
|
||||
module: Module,
|
||||
name: &Name,
|
||||
) -> PerNs<ModuleDef> {
|
||||
let crate_root = module.crate_root(db);
|
||||
let from_crate_root =
|
||||
self[crate_root.module_id].items.get(name).map_or(PerNs::none(), |it| it.def);
|
||||
let from_extern_prelude = self.resolve_name_in_extern_prelude(name);
|
||||
|
||||
from_crate_root.or(from_extern_prelude)
|
||||
}
|
||||
|
||||
// Returns Yes if we are sure that additions to `ItemMap` wouldn't change
|
||||
// the result.
|
||||
fn resolve_path_fp(
|
||||
&self,
|
||||
db: &impl PersistentHirDatabase,
|
||||
mode: ResolveMode,
|
||||
original_module: Module,
|
||||
original_module: CrateModuleId,
|
||||
path: &Path,
|
||||
) -> ResolvePathResult {
|
||||
let mut segments = path.segments.iter().enumerate();
|
||||
let mut curr_per_ns: PerNs<ModuleDef> = match path.kind {
|
||||
PathKind::Crate => PerNs::types(original_module.crate_root(db).into()),
|
||||
PathKind::Self_ => PerNs::types(original_module.into()),
|
||||
PathKind::Crate => {
|
||||
PerNs::types(Module { krate: self.krate, module_id: self.root }.into())
|
||||
}
|
||||
PathKind::Self_ => {
|
||||
PerNs::types(Module { krate: self.krate, module_id: original_module }.into())
|
||||
}
|
||||
// plain import or absolute path in 2015: crate-relative with
|
||||
// fallback to extern prelude (with the simplification in
|
||||
// rust-lang/rust#57745)
|
||||
@ -581,11 +306,7 @@ impl ItemMap {
|
||||
None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
|
||||
};
|
||||
log::debug!("resolving {:?} in crate root (+ extern prelude)", segment);
|
||||
self.resolve_name_in_crate_root_or_extern_prelude(
|
||||
db,
|
||||
original_module,
|
||||
&segment.name,
|
||||
)
|
||||
self.resolve_name_in_crate_root_or_extern_prelude(&segment.name)
|
||||
}
|
||||
PathKind::Plain => {
|
||||
let segment = match segments.next() {
|
||||
@ -596,8 +317,8 @@ impl ItemMap {
|
||||
self.resolve_name_in_module(db, original_module, &segment.name)
|
||||
}
|
||||
PathKind::Super => {
|
||||
if let Some(p) = original_module.parent(db) {
|
||||
PerNs::types(p.into())
|
||||
if let Some(p) = self.modules[original_module].parent {
|
||||
PerNs::types(Module { krate: self.krate, module_id: p }.into())
|
||||
} else {
|
||||
log::debug!("super path in root module");
|
||||
return ResolvePathResult::empty(ReachedFixedPoint::Yes);
|
||||
@ -634,14 +355,14 @@ impl ItemMap {
|
||||
|
||||
curr_per_ns = match curr {
|
||||
ModuleDef::Module(module) => {
|
||||
if module.krate != original_module.krate {
|
||||
if module.krate != self.krate {
|
||||
let path = Path {
|
||||
segments: path.segments[i..].iter().cloned().collect(),
|
||||
kind: PathKind::Self_,
|
||||
};
|
||||
log::debug!("resolving {:?} in other crate", path);
|
||||
let item_map = db.item_map(module.krate);
|
||||
let (def, s) = item_map.resolve_path(db, *module, &path);
|
||||
let defp_map = db.crate_def_map(module.krate);
|
||||
let (def, s) = defp_map.resolve_path(db, module.module_id, &path);
|
||||
return ResolvePathResult::with(
|
||||
def,
|
||||
ReachedFixedPoint::Yes,
|
||||
@ -649,7 +370,7 @@ impl ItemMap {
|
||||
);
|
||||
}
|
||||
|
||||
match self[module.module_id].items.get(&segment.name) {
|
||||
match self[module.module_id].scope.items.get(&segment.name) {
|
||||
Some(res) if !res.def.is_none() => res.def,
|
||||
_ => {
|
||||
log::debug!("path segment {:?} not found", segment.name);
|
||||
@ -659,7 +380,7 @@ impl ItemMap {
|
||||
}
|
||||
ModuleDef::Enum(e) => {
|
||||
// enum variant
|
||||
tested_by!(item_map_enum_importing);
|
||||
tested_by!(can_import_enum_variant);
|
||||
match e.variant(db, &segment.name) {
|
||||
Some(variant) => PerNs::both(variant.into(), variant.into()),
|
||||
None => {
|
||||
@ -690,7 +411,47 @@ impl ItemMap {
|
||||
}
|
||||
ResolvePathResult::with(curr_per_ns, ReachedFixedPoint::Yes, None)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
fn resolve_name_in_crate_root_or_extern_prelude(&self, name: &Name) -> PerNs<ModuleDef> {
|
||||
let from_crate_root =
|
||||
self[self.root].scope.items.get(name).map_or(PerNs::none(), |it| it.def);
|
||||
let from_extern_prelude = self.resolve_name_in_extern_prelude(name);
|
||||
|
||||
from_crate_root.or(from_extern_prelude)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_name_in_module(
|
||||
&self,
|
||||
db: &impl PersistentHirDatabase,
|
||||
module: CrateModuleId,
|
||||
name: &Name,
|
||||
) -> PerNs<ModuleDef> {
|
||||
// Resolve in:
|
||||
// - current module / scope
|
||||
// - extern prelude
|
||||
// - std prelude
|
||||
let from_scope = self[module].scope.items.get(name).map_or(PerNs::none(), |it| it.def);
|
||||
let from_extern_prelude =
|
||||
self.extern_prelude.get(name).map_or(PerNs::none(), |&it| PerNs::types(it));
|
||||
let from_prelude = self.resolve_in_prelude(db, name);
|
||||
|
||||
from_scope.or(from_extern_prelude).or(from_prelude)
|
||||
}
|
||||
|
||||
fn resolve_name_in_extern_prelude(&self, name: &Name) -> PerNs<ModuleDef> {
|
||||
self.extern_prelude.get(name).map_or(PerNs::none(), |&it| PerNs::types(it))
|
||||
}
|
||||
|
||||
fn resolve_in_prelude(&self, db: &impl PersistentHirDatabase, name: &Name) -> PerNs<ModuleDef> {
|
||||
if let Some(prelude) = self.prelude {
|
||||
let resolution = if prelude.krate == self.krate {
|
||||
self[prelude.module_id].scope.items.get(name).cloned()
|
||||
} else {
|
||||
db.crate_def_map(prelude.krate)[prelude.module_id].scope.items.get(name).cloned()
|
||||
};
|
||||
resolution.map(|r| r.def).unwrap_or_else(PerNs::none)
|
||||
} else {
|
||||
PerNs::none()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
564
crates/ra_hir/src/nameres/collector.rs
Normal file
564
crates/ra_hir/src/nameres/collector.rs
Normal file
@ -0,0 +1,564 @@
|
||||
use arrayvec::ArrayVec;
|
||||
use rustc_hash::FxHashMap;
|
||||
use relative_path::RelativePathBuf;
|
||||
use test_utils::tested_by;
|
||||
use ra_db::FileId;
|
||||
|
||||
use crate::{
|
||||
Function, Module, Struct, Enum, Const, Static, Trait, TypeAlias,
|
||||
PersistentHirDatabase, HirFileId, Name, Path, Problem, Crate,
|
||||
KnownName,
|
||||
nameres::{Resolution, PerNs, ModuleDef, ReachedFixedPoint, ResolveMode, raw},
|
||||
ids::{AstItemDef, LocationCtx, MacroCallLoc, SourceItemId, MacroCallId},
|
||||
};
|
||||
|
||||
use super::{CrateDefMap, CrateModuleId, ModuleData, CrateMacroId};
|
||||
|
||||
pub(super) fn collect_defs(
|
||||
db: &impl PersistentHirDatabase,
|
||||
mut def_map: CrateDefMap,
|
||||
) -> CrateDefMap {
|
||||
// populate external prelude
|
||||
for dep in def_map.krate.dependencies(db) {
|
||||
log::debug!("crate dep {:?} -> {:?}", dep.name, dep.krate);
|
||||
if let Some(module) = dep.krate.root_module(db) {
|
||||
def_map.extern_prelude.insert(dep.name.clone(), module.into());
|
||||
}
|
||||
// look for the prelude
|
||||
if def_map.prelude.is_none() {
|
||||
let map = db.crate_def_map(dep.krate);
|
||||
if map.prelude.is_some() {
|
||||
def_map.prelude = map.prelude;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut collector = DefCollector {
|
||||
db,
|
||||
def_map,
|
||||
glob_imports: FxHashMap::default(),
|
||||
unresolved_imports: Vec::new(),
|
||||
unexpanded_macros: Vec::new(),
|
||||
global_macro_scope: FxHashMap::default(),
|
||||
};
|
||||
collector.collect();
|
||||
collector.finish()
|
||||
}
|
||||
|
||||
/// Walks the tree of module recursively
|
||||
struct DefCollector<DB> {
|
||||
db: DB,
|
||||
def_map: CrateDefMap,
|
||||
glob_imports: FxHashMap<CrateModuleId, Vec<(CrateModuleId, raw::ImportId)>>,
|
||||
unresolved_imports: Vec<(CrateModuleId, raw::ImportId, raw::ImportData)>,
|
||||
unexpanded_macros: Vec<(CrateModuleId, MacroCallId, Path, tt::Subtree)>,
|
||||
global_macro_scope: FxHashMap<Name, CrateMacroId>,
|
||||
}
|
||||
|
||||
impl<'a, DB> DefCollector<&'a DB>
|
||||
where
|
||||
DB: PersistentHirDatabase,
|
||||
{
|
||||
fn collect(&mut self) {
|
||||
let crate_graph = self.db.crate_graph();
|
||||
let file_id = crate_graph.crate_root(self.def_map.krate.crate_id());
|
||||
let raw_items = self.db.raw_items(file_id);
|
||||
let module_id = self.def_map.root;
|
||||
self.def_map.modules[module_id].definition = Some(file_id);
|
||||
ModCollector {
|
||||
def_collector: &mut *self,
|
||||
module_id,
|
||||
file_id: file_id.into(),
|
||||
raw_items: &raw_items,
|
||||
}
|
||||
.collect(raw_items.items());
|
||||
|
||||
// main name resolution fixed-point loop.
|
||||
let mut i = 0;
|
||||
loop {
|
||||
match (self.resolve_imports(), self.resolve_macros()) {
|
||||
(ReachedFixedPoint::Yes, ReachedFixedPoint::Yes) => break,
|
||||
_ => i += 1,
|
||||
}
|
||||
if i == 1000 {
|
||||
log::error!("diverging name resolution");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let unresolved_imports = std::mem::replace(&mut self.unresolved_imports, Vec::new());
|
||||
// show unresolved imports in completion, etc
|
||||
for (module_id, import, import_data) in unresolved_imports {
|
||||
self.record_resolved_import(module_id, PerNs::none(), import, &import_data)
|
||||
}
|
||||
}
|
||||
|
||||
fn define_macro(&mut self, name: Name, tt: &tt::Subtree, export: bool) {
|
||||
if let Ok(rules) = mbe::MacroRules::parse(tt) {
|
||||
let macro_id = self.def_map.macros.alloc(rules);
|
||||
if export {
|
||||
self.def_map.public_macros.insert(name.clone(), macro_id);
|
||||
}
|
||||
self.global_macro_scope.insert(name, macro_id);
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_imports(&mut self) -> ReachedFixedPoint {
|
||||
let mut imports = std::mem::replace(&mut self.unresolved_imports, Vec::new());
|
||||
let mut resolved = Vec::new();
|
||||
imports.retain(|(module_id, import, import_data)| {
|
||||
let (def, fp) = self.resolve_import(*module_id, import_data);
|
||||
if fp == ReachedFixedPoint::Yes {
|
||||
resolved.push((*module_id, def, *import, import_data.clone()))
|
||||
}
|
||||
fp == ReachedFixedPoint::No
|
||||
});
|
||||
self.unresolved_imports = imports;
|
||||
// Resolves imports, filling-in module scopes
|
||||
let result =
|
||||
if resolved.is_empty() { ReachedFixedPoint::Yes } else { ReachedFixedPoint::No };
|
||||
for (module_id, def, import, import_data) in resolved {
|
||||
self.record_resolved_import(module_id, def, import, &import_data)
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn resolve_import(
|
||||
&mut self,
|
||||
module_id: CrateModuleId,
|
||||
import: &raw::ImportData,
|
||||
) -> (PerNs<ModuleDef>, ReachedFixedPoint) {
|
||||
log::debug!("resolving import: {:?} ({:?})", import, self.def_map.edition);
|
||||
if import.is_extern_crate {
|
||||
let res = self.def_map.resolve_name_in_extern_prelude(
|
||||
&import
|
||||
.path
|
||||
.as_ident()
|
||||
.expect("extern crate should have been desugared to one-element path"),
|
||||
);
|
||||
(res, ReachedFixedPoint::Yes)
|
||||
} else {
|
||||
let res =
|
||||
self.def_map.resolve_path_fp(self.db, ResolveMode::Import, module_id, &import.path);
|
||||
|
||||
(res.resolved_def, res.reached_fixedpoint)
|
||||
}
|
||||
}
|
||||
|
||||
fn record_resolved_import(
|
||||
&mut self,
|
||||
module_id: CrateModuleId,
|
||||
def: PerNs<ModuleDef>,
|
||||
import_id: raw::ImportId,
|
||||
import: &raw::ImportData,
|
||||
) {
|
||||
if import.is_glob {
|
||||
log::debug!("glob import: {:?}", import);
|
||||
match def.take_types() {
|
||||
Some(ModuleDef::Module(m)) => {
|
||||
if import.is_prelude {
|
||||
tested_by!(std_prelude);
|
||||
self.def_map.prelude = Some(m);
|
||||
} else if m.krate != self.def_map.krate {
|
||||
tested_by!(glob_across_crates);
|
||||
// glob import from other crate => we can just import everything once
|
||||
let item_map = self.db.crate_def_map(m.krate);
|
||||
let scope = &item_map[m.module_id].scope;
|
||||
let items = scope
|
||||
.items
|
||||
.iter()
|
||||
.map(|(name, res)| (name.clone(), res.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
self.update(module_id, Some(import_id), &items);
|
||||
} else {
|
||||
// glob import from same crate => we do an initial
|
||||
// import, and then need to propagate any further
|
||||
// additions
|
||||
let scope = &self.def_map[m.module_id].scope;
|
||||
let items = scope
|
||||
.items
|
||||
.iter()
|
||||
.map(|(name, res)| (name.clone(), res.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
self.update(module_id, Some(import_id), &items);
|
||||
// record the glob import in case we add further items
|
||||
self.glob_imports
|
||||
.entry(m.module_id)
|
||||
.or_default()
|
||||
.push((module_id, import_id));
|
||||
}
|
||||
}
|
||||
Some(ModuleDef::Enum(e)) => {
|
||||
tested_by!(glob_enum);
|
||||
// glob import from enum => just import all the variants
|
||||
let variants = e.variants(self.db);
|
||||
let resolutions = variants
|
||||
.into_iter()
|
||||
.filter_map(|variant| {
|
||||
let res = Resolution {
|
||||
def: PerNs::both(variant.into(), variant.into()),
|
||||
import: Some(import_id),
|
||||
};
|
||||
let name = variant.name(self.db)?;
|
||||
Some((name, res))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
self.update(module_id, Some(import_id), &resolutions);
|
||||
}
|
||||
Some(d) => {
|
||||
log::debug!("glob import {:?} from non-module/enum {:?}", import, d);
|
||||
}
|
||||
None => {
|
||||
log::debug!("glob import {:?} didn't resolve as type", import);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
match import.path.segments.last() {
|
||||
Some(last_segment) => {
|
||||
let name = import.alias.clone().unwrap_or_else(|| last_segment.name.clone());
|
||||
log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
|
||||
|
||||
// extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
|
||||
if import.is_extern_crate && module_id == self.def_map.root {
|
||||
if let Some(def) = def.take_types() {
|
||||
self.def_map.extern_prelude.insert(name.clone(), def);
|
||||
}
|
||||
}
|
||||
let resolution = Resolution { def, import: Some(import_id) };
|
||||
self.update(module_id, Some(import_id), &[(name, resolution)]);
|
||||
}
|
||||
None => tested_by!(bogus_paths),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn update(
|
||||
&mut self,
|
||||
module_id: CrateModuleId,
|
||||
import: Option<raw::ImportId>,
|
||||
resolutions: &[(Name, Resolution)],
|
||||
) {
|
||||
self.update_recursive(module_id, import, resolutions, 0)
|
||||
}
|
||||
|
||||
fn update_recursive(
|
||||
&mut self,
|
||||
module_id: CrateModuleId,
|
||||
import: Option<raw::ImportId>,
|
||||
resolutions: &[(Name, Resolution)],
|
||||
depth: usize,
|
||||
) {
|
||||
if depth > 100 {
|
||||
// prevent stack overflows (but this shouldn't be possible)
|
||||
panic!("infinite recursion in glob imports!");
|
||||
}
|
||||
let module_items = &mut self.def_map.modules[module_id].scope;
|
||||
let mut changed = false;
|
||||
for (name, res) in resolutions {
|
||||
let existing = module_items.items.entry(name.clone()).or_default();
|
||||
if existing.def.types.is_none() && res.def.types.is_some() {
|
||||
existing.def.types = res.def.types;
|
||||
existing.import = import.or(res.import);
|
||||
changed = true;
|
||||
}
|
||||
if existing.def.values.is_none() && res.def.values.is_some() {
|
||||
existing.def.values = res.def.values;
|
||||
existing.import = import.or(res.import);
|
||||
changed = true;
|
||||
}
|
||||
if existing.def.is_none()
|
||||
&& res.def.is_none()
|
||||
&& existing.import.is_none()
|
||||
&& res.import.is_some()
|
||||
{
|
||||
existing.import = res.import;
|
||||
}
|
||||
}
|
||||
if !changed {
|
||||
return;
|
||||
}
|
||||
let glob_imports = self
|
||||
.glob_imports
|
||||
.get(&module_id)
|
||||
.into_iter()
|
||||
.flat_map(|v| v.iter())
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
for (glob_importing_module, glob_import) in glob_imports {
|
||||
// We pass the glob import so that the tracked import in those modules is that glob import
|
||||
self.update_recursive(glob_importing_module, Some(glob_import), resolutions, depth + 1);
|
||||
}
|
||||
}
|
||||
|
||||
// XXX: this is just a pile of hacks now, because `PerNs` does not handle
|
||||
// macro namespace.
|
||||
fn resolve_macros(&mut self) -> ReachedFixedPoint {
|
||||
let mut macros = std::mem::replace(&mut self.unexpanded_macros, Vec::new());
|
||||
let mut resolved = Vec::new();
|
||||
let mut res = ReachedFixedPoint::Yes;
|
||||
macros.retain(|(module_id, call_id, path, tt)| {
|
||||
if path.segments.len() != 2 {
|
||||
return true;
|
||||
}
|
||||
let crate_name = &path.segments[0].name;
|
||||
let krate = match self.def_map.resolve_name_in_extern_prelude(crate_name).take_types() {
|
||||
Some(ModuleDef::Module(m)) => m.krate(self.db),
|
||||
_ => return true,
|
||||
};
|
||||
let krate = match krate {
|
||||
Some(it) => it,
|
||||
_ => return true,
|
||||
};
|
||||
res = ReachedFixedPoint::No;
|
||||
let def_map = self.db.crate_def_map(krate);
|
||||
if let Some(macro_id) = def_map.public_macros.get(&path.segments[1].name).cloned() {
|
||||
resolved.push((*module_id, *call_id, (krate, macro_id), tt.clone()));
|
||||
}
|
||||
false
|
||||
});
|
||||
|
||||
for (module_id, macro_call_id, macro_def_id, arg) in resolved {
|
||||
self.collect_macro_expansion(module_id, macro_call_id, macro_def_id, arg);
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
fn collect_macro_expansion(
|
||||
&mut self,
|
||||
module_id: CrateModuleId,
|
||||
macro_call_id: MacroCallId,
|
||||
macro_def_id: (Crate, CrateMacroId),
|
||||
macro_arg: tt::Subtree,
|
||||
) {
|
||||
let (macro_krate, macro_id) = macro_def_id;
|
||||
let dm;
|
||||
let rules = if macro_krate == self.def_map.krate {
|
||||
&self.def_map[macro_id]
|
||||
} else {
|
||||
dm = self.db.crate_def_map(macro_krate);
|
||||
&dm[macro_id]
|
||||
};
|
||||
if let Ok(expansion) = rules.expand(¯o_arg) {
|
||||
self.def_map.macro_resolutions.insert(macro_call_id, macro_def_id);
|
||||
// XXX: this **does not** go through a database, because we can't
|
||||
// identify macro_call without adding the whole state of name resolution
|
||||
// as a parameter to the query.
|
||||
//
|
||||
// So, we run the queries "manually" and we must ensure that
|
||||
// `db.hir_parse(macro_call_id)` returns the same source_file.
|
||||
let file_id: HirFileId = macro_call_id.into();
|
||||
let source_file = mbe::token_tree_to_ast_item_list(&expansion);
|
||||
|
||||
let raw_items = raw::RawItems::from_source_file(&source_file, file_id);
|
||||
ModCollector { def_collector: &mut *self, file_id, module_id, raw_items: &raw_items }
|
||||
.collect(raw_items.items())
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(self) -> CrateDefMap {
|
||||
self.def_map
|
||||
}
|
||||
}
|
||||
|
||||
/// Walks a single module, populating defs, imports and macros
|
||||
struct ModCollector<'a, D> {
|
||||
def_collector: D,
|
||||
module_id: CrateModuleId,
|
||||
file_id: HirFileId,
|
||||
raw_items: &'a raw::RawItems,
|
||||
}
|
||||
|
||||
impl<DB> ModCollector<'_, &'_ mut DefCollector<&'_ DB>>
|
||||
where
|
||||
DB: PersistentHirDatabase,
|
||||
{
|
||||
fn collect(&mut self, items: &[raw::RawItem]) {
|
||||
for item in items {
|
||||
match *item {
|
||||
raw::RawItem::Module(m) => self.collect_module(&self.raw_items[m]),
|
||||
raw::RawItem::Import(import) => self.def_collector.unresolved_imports.push((
|
||||
self.module_id,
|
||||
import,
|
||||
self.raw_items[import].clone(),
|
||||
)),
|
||||
raw::RawItem::Def(def) => self.define_def(&self.raw_items[def]),
|
||||
raw::RawItem::Macro(mac) => self.collect_macro(&self.raw_items[mac]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_module(&mut self, module: &raw::ModuleData) {
|
||||
match module {
|
||||
// inline module, just recurse
|
||||
raw::ModuleData::Definition { name, items, source_item_id } => {
|
||||
let module_id = self.push_child_module(
|
||||
name.clone(),
|
||||
source_item_id.with_file_id(self.file_id),
|
||||
None,
|
||||
);
|
||||
ModCollector {
|
||||
def_collector: &mut *self.def_collector,
|
||||
module_id,
|
||||
file_id: self.file_id,
|
||||
raw_items: self.raw_items,
|
||||
}
|
||||
.collect(&*items);
|
||||
}
|
||||
// out of line module, resovle, parse and recurse
|
||||
raw::ModuleData::Declaration { name, source_item_id } => {
|
||||
let source_item_id = source_item_id.with_file_id(self.file_id);
|
||||
let is_root = self.def_collector.def_map.modules[self.module_id].parent.is_none();
|
||||
let (file_ids, problem) =
|
||||
resolve_submodule(self.def_collector.db, self.file_id, name, is_root);
|
||||
|
||||
if let Some(problem) = problem {
|
||||
self.def_collector.def_map.problems.add(source_item_id, problem)
|
||||
}
|
||||
|
||||
if let Some(&file_id) = file_ids.first() {
|
||||
let module_id =
|
||||
self.push_child_module(name.clone(), source_item_id, Some(file_id));
|
||||
let raw_items = self.def_collector.db.raw_items(file_id);
|
||||
ModCollector {
|
||||
def_collector: &mut *self.def_collector,
|
||||
module_id,
|
||||
file_id: file_id.into(),
|
||||
raw_items: &raw_items,
|
||||
}
|
||||
.collect(raw_items.items())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn push_child_module(
|
||||
&mut self,
|
||||
name: Name,
|
||||
declaration: SourceItemId,
|
||||
definition: Option<FileId>,
|
||||
) -> CrateModuleId {
|
||||
let modules = &mut self.def_collector.def_map.modules;
|
||||
let res = modules.alloc(ModuleData::default());
|
||||
modules[res].parent = Some(self.module_id);
|
||||
modules[res].declaration = Some(declaration);
|
||||
modules[res].definition = definition;
|
||||
modules[self.module_id].children.insert(name.clone(), res);
|
||||
let resolution = Resolution {
|
||||
def: PerNs::types(
|
||||
Module { krate: self.def_collector.def_map.krate, module_id: res }.into(),
|
||||
),
|
||||
import: None,
|
||||
};
|
||||
self.def_collector.update(self.module_id, None, &[(name, resolution)]);
|
||||
res
|
||||
}
|
||||
|
||||
fn define_def(&mut self, def: &raw::DefData) {
|
||||
let module = Module { krate: self.def_collector.def_map.krate, module_id: self.module_id };
|
||||
let ctx = LocationCtx::new(self.def_collector.db, module, self.file_id.into());
|
||||
macro_rules! id {
|
||||
() => {
|
||||
AstItemDef::from_source_item_id_unchecked(ctx, def.source_item_id)
|
||||
};
|
||||
}
|
||||
let name = def.name.clone();
|
||||
let def: PerNs<ModuleDef> = match def.kind {
|
||||
raw::DefKind::Function => PerNs::values(Function { id: id!() }.into()),
|
||||
raw::DefKind::Struct => {
|
||||
let s = Struct { id: id!() }.into();
|
||||
PerNs::both(s, s)
|
||||
}
|
||||
raw::DefKind::Enum => PerNs::types(Enum { id: id!() }.into()),
|
||||
raw::DefKind::Const => PerNs::values(Const { id: id!() }.into()),
|
||||
raw::DefKind::Static => PerNs::values(Static { id: id!() }.into()),
|
||||
raw::DefKind::Trait => PerNs::types(Trait { id: id!() }.into()),
|
||||
raw::DefKind::TypeAlias => PerNs::types(TypeAlias { id: id!() }.into()),
|
||||
};
|
||||
let resolution = Resolution { def, import: None };
|
||||
self.def_collector.update(self.module_id, None, &[(name, resolution)])
|
||||
}
|
||||
|
||||
fn collect_macro(&mut self, mac: &raw::MacroData) {
|
||||
// Case 1: macro rules, define a macro in crate-global mutable scope
|
||||
if is_macro_rules(&mac.path) {
|
||||
if let Some(name) = &mac.name {
|
||||
self.def_collector.define_macro(name.clone(), &mac.arg, mac.export)
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
let source_item_id = SourceItemId { file_id: self.file_id, item_id: mac.source_item_id };
|
||||
let macro_call_id = MacroCallLoc {
|
||||
module: Module { krate: self.def_collector.def_map.krate, module_id: self.module_id },
|
||||
source_item_id,
|
||||
}
|
||||
.id(self.def_collector.db);
|
||||
|
||||
// Case 2: try to expand macro_rules from this crate, triggering
|
||||
// recursive item collection.
|
||||
if let Some(¯o_id) =
|
||||
mac.path.as_ident().and_then(|name| self.def_collector.global_macro_scope.get(name))
|
||||
{
|
||||
self.def_collector.collect_macro_expansion(
|
||||
self.module_id,
|
||||
macro_call_id,
|
||||
(self.def_collector.def_map.krate, macro_id),
|
||||
mac.arg.clone(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Case 3: path to a macro from another crate, expand during name resolution
|
||||
self.def_collector.unexpanded_macros.push((
|
||||
self.module_id,
|
||||
macro_call_id,
|
||||
mac.path.clone(),
|
||||
mac.arg.clone(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn is_macro_rules(path: &Path) -> bool {
|
||||
path.as_ident().and_then(Name::as_known_name) == Some(KnownName::MacroRules)
|
||||
}
|
||||
|
||||
fn resolve_submodule(
|
||||
db: &impl PersistentHirDatabase,
|
||||
file_id: HirFileId,
|
||||
name: &Name,
|
||||
is_root: bool,
|
||||
) -> (Vec<FileId>, Option<Problem>) {
|
||||
// FIXME: handle submodules of inline modules properly
|
||||
let file_id = file_id.original_file(db);
|
||||
let source_root_id = db.file_source_root(file_id);
|
||||
let path = db.file_relative_path(file_id);
|
||||
let root = RelativePathBuf::default();
|
||||
let dir_path = path.parent().unwrap_or(&root);
|
||||
let mod_name = path.file_stem().unwrap_or("unknown");
|
||||
let is_dir_owner = is_root || mod_name == "mod";
|
||||
|
||||
let file_mod = dir_path.join(format!("{}.rs", name));
|
||||
let dir_mod = dir_path.join(format!("{}/mod.rs", name));
|
||||
let file_dir_mod = dir_path.join(format!("{}/{}.rs", mod_name, name));
|
||||
let mut candidates = ArrayVec::<[_; 2]>::new();
|
||||
if is_dir_owner {
|
||||
candidates.push(file_mod.clone());
|
||||
candidates.push(dir_mod);
|
||||
} else {
|
||||
candidates.push(file_dir_mod.clone());
|
||||
};
|
||||
let sr = db.source_root(source_root_id);
|
||||
let points_to = candidates
|
||||
.into_iter()
|
||||
.filter_map(|path| sr.files.get(&path))
|
||||
.map(|&it| it)
|
||||
.collect::<Vec<_>>();
|
||||
let problem = if points_to.is_empty() {
|
||||
Some(Problem::UnresolvedModule {
|
||||
candidate: if is_dir_owner { file_mod } else { file_dir_mod },
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
(points_to, problem)
|
||||
}
|
@ -1,222 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_syntax::{
|
||||
AstNode, SourceFile, TreeArc, AstPtr,
|
||||
ast::{self, ModuleItemOwner, NameOwner, AttrsOwner},
|
||||
};
|
||||
use ra_arena::{Arena, RawId, impl_arena_id, map::ArenaMap};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::{
|
||||
SourceItemId, Path, ModuleSource, Name,
|
||||
HirFileId, MacroCallLoc, AsName, PerNs, Function,
|
||||
ModuleDef, Module, Struct, Enum, Const, Static, Trait, TypeAlias,
|
||||
ids::LocationCtx, PersistentHirDatabase,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct ImportId(RawId);
|
||||
impl_arena_id!(ImportId);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(super) struct ImportData {
|
||||
pub(super) path: Path,
|
||||
pub(super) alias: Option<Name>,
|
||||
pub(super) is_glob: bool,
|
||||
pub(super) is_prelude: bool,
|
||||
pub(super) is_extern_crate: bool,
|
||||
}
|
||||
|
||||
/// A set of items and imports declared inside a module, without relation to
|
||||
/// other modules.
|
||||
///
|
||||
/// This sits in-between raw syntax and name resolution and allows us to avoid
|
||||
/// recomputing name res: if two instance of `InputModuleItems` are the same, we
|
||||
/// can avoid redoing name resolution.
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
pub struct LoweredModule {
|
||||
pub(crate) declarations: FxHashMap<Name, PerNs<ModuleDef>>,
|
||||
pub(super) imports: Arena<ImportId, ImportData>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
pub struct ImportSourceMap {
|
||||
map: ArenaMap<ImportId, AstPtr<ast::PathSegment>>,
|
||||
}
|
||||
|
||||
impl ImportSourceMap {
|
||||
fn insert(&mut self, import: ImportId, segment: &ast::PathSegment) {
|
||||
self.map.insert(import, AstPtr::new(segment))
|
||||
}
|
||||
|
||||
pub fn get(&self, source: &ModuleSource, import: ImportId) -> TreeArc<ast::PathSegment> {
|
||||
let file = match source {
|
||||
ModuleSource::SourceFile(file) => &*file,
|
||||
ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(),
|
||||
};
|
||||
|
||||
self.map[import].to_node(file).to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
impl LoweredModule {
|
||||
pub(crate) fn lower_module_query(
|
||||
db: &impl PersistentHirDatabase,
|
||||
module: Module,
|
||||
) -> Arc<LoweredModule> {
|
||||
db.lower_module_with_source_map(module).0
|
||||
}
|
||||
|
||||
pub(crate) fn lower_module_with_source_map_query(
|
||||
db: &impl PersistentHirDatabase,
|
||||
module: Module,
|
||||
) -> (Arc<LoweredModule>, Arc<ImportSourceMap>) {
|
||||
let (file_id, source) = module.definition_source(db);
|
||||
let file_id: HirFileId = file_id.into();
|
||||
let mut source_map = ImportSourceMap::default();
|
||||
let mut res = LoweredModule::default();
|
||||
match source {
|
||||
ModuleSource::SourceFile(it) => {
|
||||
res.fill(&mut source_map, db, module, file_id, &mut it.items_with_macros())
|
||||
}
|
||||
ModuleSource::Module(it) => {
|
||||
if let Some(item_list) = it.item_list() {
|
||||
res.fill(
|
||||
&mut source_map,
|
||||
db,
|
||||
module,
|
||||
file_id,
|
||||
&mut item_list.items_with_macros(),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
(Arc::new(res), Arc::new(source_map))
|
||||
}
|
||||
|
||||
fn fill(
|
||||
&mut self,
|
||||
source_map: &mut ImportSourceMap,
|
||||
db: &impl PersistentHirDatabase,
|
||||
module: Module,
|
||||
file_id: HirFileId,
|
||||
items: &mut Iterator<Item = ast::ItemOrMacro>,
|
||||
) {
|
||||
let file_items = db.file_items(file_id);
|
||||
|
||||
for item in items {
|
||||
match item {
|
||||
ast::ItemOrMacro::Item(it) => {
|
||||
self.add_def_id(source_map, db, module, file_id, it);
|
||||
}
|
||||
ast::ItemOrMacro::Macro(macro_call) => {
|
||||
let item_id = file_items.id_of_unchecked(macro_call.syntax());
|
||||
let loc =
|
||||
MacroCallLoc { module, source_item_id: SourceItemId { file_id, item_id } };
|
||||
let id = loc.id(db);
|
||||
let file_id = HirFileId::from(id);
|
||||
//FIXME: expand recursively
|
||||
for item in db.hir_parse(file_id).items() {
|
||||
self.add_def_id(source_map, db, module, file_id, item);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn add_def_id(
|
||||
&mut self,
|
||||
source_map: &mut ImportSourceMap,
|
||||
db: &impl PersistentHirDatabase,
|
||||
module: Module,
|
||||
file_id: HirFileId,
|
||||
item: &ast::ModuleItem,
|
||||
) {
|
||||
let ctx = LocationCtx::new(db, module, file_id);
|
||||
match item.kind() {
|
||||
ast::ModuleItemKind::StructDef(it) => {
|
||||
if let Some(name) = it.name() {
|
||||
let s = Struct { id: ctx.to_def(it) };
|
||||
let s: ModuleDef = s.into();
|
||||
self.declarations.insert(name.as_name(), PerNs::both(s, s));
|
||||
}
|
||||
}
|
||||
ast::ModuleItemKind::EnumDef(it) => {
|
||||
if let Some(name) = it.name() {
|
||||
let e = Enum { id: ctx.to_def(it) };
|
||||
let e: ModuleDef = e.into();
|
||||
self.declarations.insert(name.as_name(), PerNs::types(e));
|
||||
}
|
||||
}
|
||||
ast::ModuleItemKind::FnDef(it) => {
|
||||
if let Some(name) = it.name() {
|
||||
let func = Function { id: ctx.to_def(it) };
|
||||
self.declarations.insert(name.as_name(), PerNs::values(func.into()));
|
||||
}
|
||||
}
|
||||
ast::ModuleItemKind::TraitDef(it) => {
|
||||
if let Some(name) = it.name() {
|
||||
let t = Trait { id: ctx.to_def(it) };
|
||||
self.declarations.insert(name.as_name(), PerNs::types(t.into()));
|
||||
}
|
||||
}
|
||||
ast::ModuleItemKind::TypeAliasDef(it) => {
|
||||
if let Some(name) = it.name() {
|
||||
let t = TypeAlias { id: ctx.to_def(it) };
|
||||
self.declarations.insert(name.as_name(), PerNs::types(t.into()));
|
||||
}
|
||||
}
|
||||
ast::ModuleItemKind::ImplBlock(_) => {
|
||||
// impls don't define items
|
||||
}
|
||||
ast::ModuleItemKind::UseItem(it) => {
|
||||
self.add_use_item(source_map, it);
|
||||
}
|
||||
ast::ModuleItemKind::ExternCrateItem(it) => {
|
||||
if let Some(name_ref) = it.name_ref() {
|
||||
let path = Path::from_name_ref(name_ref);
|
||||
let alias = it.alias().and_then(|a| a.name()).map(AsName::as_name);
|
||||
self.imports.alloc(ImportData {
|
||||
path,
|
||||
alias,
|
||||
is_glob: false,
|
||||
is_prelude: false,
|
||||
is_extern_crate: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
ast::ModuleItemKind::ConstDef(it) => {
|
||||
if let Some(name) = it.name() {
|
||||
let c = Const { id: ctx.to_def(it) };
|
||||
self.declarations.insert(name.as_name(), PerNs::values(c.into()));
|
||||
}
|
||||
}
|
||||
ast::ModuleItemKind::StaticDef(it) => {
|
||||
if let Some(name) = it.name() {
|
||||
let s = Static { id: ctx.to_def(it) };
|
||||
self.declarations.insert(name.as_name(), PerNs::values(s.into()));
|
||||
}
|
||||
}
|
||||
ast::ModuleItemKind::Module(_) => {
|
||||
// modules are handled separately directly by name res
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn add_use_item(&mut self, source_map: &mut ImportSourceMap, item: &ast::UseItem) {
|
||||
let is_prelude =
|
||||
item.attrs().any(|attr| attr.as_atom().map(|s| s == "prelude_import").unwrap_or(false));
|
||||
Path::expand_use_item(item, |path, segment, alias| {
|
||||
let import = self.imports.alloc(ImportData {
|
||||
path,
|
||||
alias,
|
||||
is_glob: segment.is_none(),
|
||||
is_prelude,
|
||||
is_extern_crate: false,
|
||||
});
|
||||
if let Some(segment) = segment {
|
||||
source_map.insert(import, segment)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
78
crates/ra_hir/src/nameres/per_ns.rs
Normal file
78
crates/ra_hir/src/nameres/per_ns.rs
Normal file
@ -0,0 +1,78 @@
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum Namespace {
|
||||
Types,
|
||||
Values,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct PerNs<T> {
|
||||
pub types: Option<T>,
|
||||
pub values: Option<T>,
|
||||
}
|
||||
|
||||
impl<T> Default for PerNs<T> {
|
||||
fn default() -> Self {
|
||||
PerNs { types: None, values: None }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PerNs<T> {
|
||||
pub fn none() -> PerNs<T> {
|
||||
PerNs { types: None, values: None }
|
||||
}
|
||||
|
||||
pub fn values(t: T) -> PerNs<T> {
|
||||
PerNs { types: None, values: Some(t) }
|
||||
}
|
||||
|
||||
pub fn types(t: T) -> PerNs<T> {
|
||||
PerNs { types: Some(t), values: None }
|
||||
}
|
||||
|
||||
pub fn both(types: T, values: T) -> PerNs<T> {
|
||||
PerNs { types: Some(types), values: Some(values) }
|
||||
}
|
||||
|
||||
pub fn is_none(&self) -> bool {
|
||||
self.types.is_none() && self.values.is_none()
|
||||
}
|
||||
|
||||
pub fn is_both(&self) -> bool {
|
||||
self.types.is_some() && self.values.is_some()
|
||||
}
|
||||
|
||||
pub fn take(self, namespace: Namespace) -> Option<T> {
|
||||
match namespace {
|
||||
Namespace::Types => self.types,
|
||||
Namespace::Values => self.values,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn take_types(self) -> Option<T> {
|
||||
self.take(Namespace::Types)
|
||||
}
|
||||
|
||||
pub fn take_values(self) -> Option<T> {
|
||||
self.take(Namespace::Values)
|
||||
}
|
||||
|
||||
pub fn get(&self, namespace: Namespace) -> Option<&T> {
|
||||
self.as_ref().take(namespace)
|
||||
}
|
||||
|
||||
pub fn as_ref(&self) -> PerNs<&T> {
|
||||
PerNs { types: self.types.as_ref(), values: self.values.as_ref() }
|
||||
}
|
||||
|
||||
pub fn or(self, other: PerNs<T>) -> PerNs<T> {
|
||||
PerNs { types: self.types.or(other.types), values: self.values.or(other.values) }
|
||||
}
|
||||
|
||||
pub fn and_then<U>(self, f: impl Fn(T) -> Option<U>) -> PerNs<U> {
|
||||
PerNs { types: self.types.and_then(&f), values: self.values.and_then(&f) }
|
||||
}
|
||||
|
||||
pub fn map<U>(self, f: impl Fn(T) -> U) -> PerNs<U> {
|
||||
PerNs { types: self.types.map(&f), values: self.values.map(&f) }
|
||||
}
|
||||
}
|
322
crates/ra_hir/src/nameres/raw.rs
Normal file
322
crates/ra_hir/src/nameres/raw.rs
Normal file
@ -0,0 +1,322 @@
|
||||
use std::{
|
||||
sync::Arc,
|
||||
ops::Index,
|
||||
};
|
||||
|
||||
use test_utils::tested_by;
|
||||
use ra_db::FileId;
|
||||
use ra_arena::{Arena, impl_arena_id, RawId, map::ArenaMap};
|
||||
use ra_syntax::{
|
||||
AstNode, SourceFile, AstPtr, TreeArc,
|
||||
ast::{self, NameOwner, AttrsOwner},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
PersistentHirDatabase, Name, AsName, Path, HirFileId, ModuleSource,
|
||||
ids::{SourceFileItemId, SourceFileItems},
|
||||
};
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
pub struct RawItems {
|
||||
modules: Arena<Module, ModuleData>,
|
||||
imports: Arena<ImportId, ImportData>,
|
||||
defs: Arena<Def, DefData>,
|
||||
macros: Arena<Macro, MacroData>,
|
||||
/// items for top-level module
|
||||
items: Vec<RawItem>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
pub struct ImportSourceMap {
|
||||
map: ArenaMap<ImportId, AstPtr<ast::PathSegment>>,
|
||||
}
|
||||
|
||||
impl ImportSourceMap {
|
||||
pub(crate) fn insert(&mut self, import: ImportId, segment: &ast::PathSegment) {
|
||||
self.map.insert(import, AstPtr::new(segment))
|
||||
}
|
||||
|
||||
pub fn get(&self, source: &ModuleSource, import: ImportId) -> TreeArc<ast::PathSegment> {
|
||||
let file = match source {
|
||||
ModuleSource::SourceFile(file) => &*file,
|
||||
ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(),
|
||||
};
|
||||
|
||||
self.map[import].to_node(file).to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
impl RawItems {
|
||||
pub(crate) fn raw_items_query(
|
||||
db: &impl PersistentHirDatabase,
|
||||
file_id: FileId,
|
||||
) -> Arc<RawItems> {
|
||||
db.raw_items_with_source_map(file_id).0
|
||||
}
|
||||
|
||||
pub(crate) fn raw_items_with_source_map_query(
|
||||
db: &impl PersistentHirDatabase,
|
||||
file_id: FileId,
|
||||
) -> (Arc<RawItems>, Arc<ImportSourceMap>) {
|
||||
let mut collector = RawItemsCollector {
|
||||
raw_items: RawItems::default(),
|
||||
source_file_items: db.file_items(file_id.into()),
|
||||
source_map: ImportSourceMap::default(),
|
||||
};
|
||||
let source_file = db.parse(file_id);
|
||||
collector.process_module(None, &*source_file);
|
||||
(Arc::new(collector.raw_items), Arc::new(collector.source_map))
|
||||
}
|
||||
|
||||
pub(crate) fn items(&self) -> &[RawItem] {
|
||||
&self.items
|
||||
}
|
||||
|
||||
// We can't use queries during name resolution for fear of cycles, so this
|
||||
// is a query-less variant of the above function.
|
||||
pub(crate) fn from_source_file(source_file: &SourceFile, file_id: HirFileId) -> RawItems {
|
||||
let source_file_items = SourceFileItems::from_source_file(source_file, file_id);
|
||||
let mut collector = RawItemsCollector {
|
||||
raw_items: RawItems::default(),
|
||||
source_file_items: Arc::new(source_file_items),
|
||||
source_map: ImportSourceMap::default(),
|
||||
};
|
||||
collector.process_module(None, &*source_file);
|
||||
collector.raw_items
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<Module> for RawItems {
|
||||
type Output = ModuleData;
|
||||
fn index(&self, idx: Module) -> &ModuleData {
|
||||
&self.modules[idx]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<ImportId> for RawItems {
|
||||
type Output = ImportData;
|
||||
fn index(&self, idx: ImportId) -> &ImportData {
|
||||
&self.imports[idx]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<Def> for RawItems {
|
||||
type Output = DefData;
|
||||
fn index(&self, idx: Def) -> &DefData {
|
||||
&self.defs[idx]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<Macro> for RawItems {
|
||||
type Output = MacroData;
|
||||
fn index(&self, idx: Macro) -> &MacroData {
|
||||
&self.macros[idx]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub(crate) enum RawItem {
|
||||
Module(Module),
|
||||
Import(ImportId),
|
||||
Def(Def),
|
||||
Macro(Macro),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub(crate) struct Module(RawId);
|
||||
impl_arena_id!(Module);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) enum ModuleData {
|
||||
Declaration { name: Name, source_item_id: SourceFileItemId },
|
||||
Definition { name: Name, source_item_id: SourceFileItemId, items: Vec<RawItem> },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct ImportId(RawId);
|
||||
impl_arena_id!(ImportId);
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ImportData {
|
||||
pub(crate) path: Path,
|
||||
pub(crate) alias: Option<Name>,
|
||||
pub(crate) is_glob: bool,
|
||||
pub(crate) is_prelude: bool,
|
||||
pub(crate) is_extern_crate: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub(crate) struct Def(RawId);
|
||||
impl_arena_id!(Def);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct DefData {
|
||||
pub(crate) source_item_id: SourceFileItemId,
|
||||
pub(crate) name: Name,
|
||||
pub(crate) kind: DefKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub(crate) enum DefKind {
|
||||
Function,
|
||||
Struct,
|
||||
Enum,
|
||||
Const,
|
||||
Static,
|
||||
Trait,
|
||||
TypeAlias,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub(crate) struct Macro(RawId);
|
||||
impl_arena_id!(Macro);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct MacroData {
|
||||
pub(crate) source_item_id: SourceFileItemId,
|
||||
pub(crate) path: Path,
|
||||
pub(crate) name: Option<Name>,
|
||||
pub(crate) arg: tt::Subtree,
|
||||
pub(crate) export: bool,
|
||||
}
|
||||
|
||||
struct RawItemsCollector {
|
||||
raw_items: RawItems,
|
||||
source_file_items: Arc<SourceFileItems>,
|
||||
source_map: ImportSourceMap,
|
||||
}
|
||||
|
||||
impl RawItemsCollector {
|
||||
fn process_module(&mut self, current_module: Option<Module>, body: &impl ast::ModuleItemOwner) {
|
||||
for item_or_macro in body.items_with_macros() {
|
||||
match item_or_macro {
|
||||
ast::ItemOrMacro::Macro(m) => self.add_macro(current_module, m),
|
||||
ast::ItemOrMacro::Item(item) => self.add_item(current_module, item),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn add_item(&mut self, current_module: Option<Module>, item: &ast::ModuleItem) {
|
||||
let (kind, name) = match item.kind() {
|
||||
ast::ModuleItemKind::Module(module) => {
|
||||
self.add_module(current_module, module);
|
||||
return;
|
||||
}
|
||||
ast::ModuleItemKind::UseItem(use_item) => {
|
||||
self.add_use_item(current_module, use_item);
|
||||
return;
|
||||
}
|
||||
ast::ModuleItemKind::ExternCrateItem(extern_crate) => {
|
||||
self.add_extern_crate_item(current_module, extern_crate);
|
||||
return;
|
||||
}
|
||||
ast::ModuleItemKind::ImplBlock(_) => {
|
||||
// impls don't participate in name resolution
|
||||
return;
|
||||
}
|
||||
ast::ModuleItemKind::StructDef(it) => (DefKind::Struct, it.name()),
|
||||
ast::ModuleItemKind::EnumDef(it) => (DefKind::Enum, it.name()),
|
||||
ast::ModuleItemKind::FnDef(it) => (DefKind::Function, it.name()),
|
||||
ast::ModuleItemKind::TraitDef(it) => (DefKind::Trait, it.name()),
|
||||
ast::ModuleItemKind::TypeAliasDef(it) => (DefKind::TypeAlias, it.name()),
|
||||
ast::ModuleItemKind::ConstDef(it) => (DefKind::Const, it.name()),
|
||||
ast::ModuleItemKind::StaticDef(it) => (DefKind::Static, it.name()),
|
||||
};
|
||||
if let Some(name) = name {
|
||||
let name = name.as_name();
|
||||
let source_item_id = self.source_file_items.id_of_unchecked(item.syntax());
|
||||
let def = self.raw_items.defs.alloc(DefData { name, kind, source_item_id });
|
||||
self.push_item(current_module, RawItem::Def(def))
|
||||
}
|
||||
}
|
||||
|
||||
fn add_module(&mut self, current_module: Option<Module>, module: &ast::Module) {
|
||||
let name = match module.name() {
|
||||
Some(it) => it.as_name(),
|
||||
None => return,
|
||||
};
|
||||
let source_item_id = self.source_file_items.id_of_unchecked(module.syntax());
|
||||
if module.has_semi() {
|
||||
let item =
|
||||
self.raw_items.modules.alloc(ModuleData::Declaration { name, source_item_id });
|
||||
self.push_item(current_module, RawItem::Module(item));
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(item_list) = module.item_list() {
|
||||
let item = self.raw_items.modules.alloc(ModuleData::Definition {
|
||||
name,
|
||||
source_item_id,
|
||||
items: Vec::new(),
|
||||
});
|
||||
self.process_module(Some(item), item_list);
|
||||
self.push_item(current_module, RawItem::Module(item));
|
||||
return;
|
||||
}
|
||||
tested_by!(name_res_works_for_broken_modules);
|
||||
}
|
||||
|
||||
fn add_use_item(&mut self, current_module: Option<Module>, use_item: &ast::UseItem) {
|
||||
let is_prelude = use_item.has_atom_attr("prelude_import");
|
||||
|
||||
Path::expand_use_item(use_item, |path, segment, alias| {
|
||||
let import = self.raw_items.imports.alloc(ImportData {
|
||||
path,
|
||||
alias,
|
||||
is_glob: segment.is_none(),
|
||||
is_prelude,
|
||||
is_extern_crate: false,
|
||||
});
|
||||
if let Some(segment) = segment {
|
||||
self.source_map.insert(import, segment)
|
||||
}
|
||||
self.push_item(current_module, RawItem::Import(import))
|
||||
})
|
||||
}
|
||||
|
||||
fn add_extern_crate_item(
|
||||
&mut self,
|
||||
current_module: Option<Module>,
|
||||
extern_crate: &ast::ExternCrateItem,
|
||||
) {
|
||||
if let Some(name_ref) = extern_crate.name_ref() {
|
||||
let path = Path::from_name_ref(name_ref);
|
||||
let alias = extern_crate.alias().and_then(|a| a.name()).map(AsName::as_name);
|
||||
let import = self.raw_items.imports.alloc(ImportData {
|
||||
path,
|
||||
alias,
|
||||
is_glob: false,
|
||||
is_prelude: false,
|
||||
is_extern_crate: true,
|
||||
});
|
||||
self.push_item(current_module, RawItem::Import(import))
|
||||
}
|
||||
}
|
||||
|
||||
fn add_macro(&mut self, current_module: Option<Module>, m: &ast::MacroCall) {
|
||||
let (path, arg) = match (
|
||||
m.path().and_then(Path::from_ast),
|
||||
m.token_tree().and_then(mbe::ast_to_token_tree),
|
||||
) {
|
||||
(Some(path), Some((token_tree, _token_map))) => (path, token_tree),
|
||||
_ => return,
|
||||
};
|
||||
|
||||
let name = m.name().map(|it| it.as_name());
|
||||
let source_item_id = self.source_file_items.id_of_unchecked(m.syntax());
|
||||
let export = m.has_atom_attr("macro_export");
|
||||
let m = self.raw_items.macros.alloc(MacroData { source_item_id, path, arg, name, export });
|
||||
self.push_item(current_module, RawItem::Macro(m));
|
||||
}
|
||||
|
||||
fn push_item(&mut self, current_module: Option<Module>, item: RawItem) {
|
||||
match current_module {
|
||||
Some(module) => match &mut self.raw_items.modules[module] {
|
||||
ModuleData::Definition { items, .. } => items,
|
||||
ModuleData::Declaration { .. } => unreachable!(),
|
||||
},
|
||||
None => &mut self.raw_items.items,
|
||||
}
|
||||
.push(item)
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
118
crates/ra_hir/src/nameres/tests/globs.rs
Normal file
118
crates/ra_hir/src/nameres/tests/globs.rs
Normal file
@ -0,0 +1,118 @@
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn glob_1() {
|
||||
let map = def_map(
|
||||
"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
use foo::*;
|
||||
|
||||
//- /foo/mod.rs
|
||||
pub mod bar;
|
||||
pub use self::bar::Baz;
|
||||
pub struct Foo;
|
||||
|
||||
//- /foo/bar.rs
|
||||
pub struct Baz;
|
||||
",
|
||||
);
|
||||
assert_snapshot_matches!(map, @r###"
|
||||
crate
|
||||
bar: t
|
||||
Foo: t v
|
||||
Baz: t v
|
||||
foo: t
|
||||
|
||||
crate::foo
|
||||
bar: t
|
||||
Foo: t v
|
||||
Baz: t v
|
||||
|
||||
crate::foo::bar
|
||||
Baz: t v
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glob_2() {
|
||||
let map = def_map(
|
||||
"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
use foo::*;
|
||||
|
||||
//- /foo/mod.rs
|
||||
pub mod bar;
|
||||
pub use self::bar::*;
|
||||
pub struct Foo;
|
||||
|
||||
//- /foo/bar.rs
|
||||
pub struct Baz;
|
||||
pub use super::*;
|
||||
",
|
||||
);
|
||||
assert_snapshot_matches!(map, @r###"
|
||||
crate
|
||||
bar: t
|
||||
Foo: t v
|
||||
Baz: t v
|
||||
foo: t
|
||||
|
||||
crate::foo
|
||||
bar: t
|
||||
Foo: t v
|
||||
Baz: t v
|
||||
|
||||
crate::foo::bar
|
||||
bar: t
|
||||
Foo: t v
|
||||
Baz: t v
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glob_across_crates() {
|
||||
covers!(glob_across_crates);
|
||||
let map = def_map_with_crate_graph(
|
||||
"
|
||||
//- /main.rs
|
||||
use test_crate::*;
|
||||
|
||||
//- /lib.rs
|
||||
pub struct Baz;
|
||||
",
|
||||
crate_graph! {
|
||||
"main": ("/main.rs", ["test_crate"]),
|
||||
"test_crate": ("/lib.rs", []),
|
||||
},
|
||||
);
|
||||
assert_snapshot_matches!(map, @r###"
|
||||
crate
|
||||
Baz: t v
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn glob_enum() {
|
||||
covers!(glob_enum);
|
||||
let map = def_map(
|
||||
"
|
||||
//- /lib.rs
|
||||
enum Foo {
|
||||
Bar, Baz
|
||||
}
|
||||
use self::Foo::*;
|
||||
",
|
||||
);
|
||||
assert_snapshot_matches!(map, @r###"
|
||||
crate
|
||||
Foo: t
|
||||
Bar: t v
|
||||
Baz: t v
|
||||
"###
|
||||
);
|
||||
}
|
123
crates/ra_hir/src/nameres/tests/incremental.rs
Normal file
123
crates/ra_hir/src/nameres/tests/incremental.rs
Normal file
@ -0,0 +1,123 @@
|
||||
use super::*;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_db::SourceDatabase;
|
||||
|
||||
fn check_def_map_is_not_recomputed(initial: &str, file_change: &str) {
|
||||
let (mut db, pos) = MockDatabase::with_position(initial);
|
||||
let crate_id = db.crate_graph().iter().next().unwrap();
|
||||
let krate = Crate { crate_id };
|
||||
{
|
||||
let events = db.log_executed(|| {
|
||||
db.crate_def_map(krate);
|
||||
});
|
||||
assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
|
||||
}
|
||||
db.set_file_text(pos.file_id, Arc::new(file_change.to_string()));
|
||||
|
||||
{
|
||||
let events = db.log_executed(|| {
|
||||
db.crate_def_map(krate);
|
||||
});
|
||||
assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typing_inside_a_function_should_not_invalidate_def_map() {
|
||||
check_def_map_is_not_recomputed(
|
||||
"
|
||||
//- /lib.rs
|
||||
mod foo;<|>
|
||||
|
||||
use crate::foo::bar::Baz;
|
||||
|
||||
fn foo() -> i32 {
|
||||
1 + 1
|
||||
}
|
||||
//- /foo/mod.rs
|
||||
pub mod bar;
|
||||
|
||||
//- /foo/bar.rs
|
||||
pub struct Baz;
|
||||
",
|
||||
"
|
||||
mod foo;
|
||||
|
||||
use crate::foo::bar::Baz;
|
||||
|
||||
fn foo() -> i32 { 92 }
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn adding_inner_items_should_not_invalidate_def_map() {
|
||||
check_def_map_is_not_recomputed(
|
||||
"
|
||||
//- /lib.rs
|
||||
struct S { a: i32}
|
||||
enum E { A }
|
||||
trait T {
|
||||
fn a() {}
|
||||
}
|
||||
mod foo;<|>
|
||||
impl S {
|
||||
fn a() {}
|
||||
}
|
||||
use crate::foo::bar::Baz;
|
||||
//- /foo/mod.rs
|
||||
pub mod bar;
|
||||
|
||||
//- /foo/bar.rs
|
||||
pub struct Baz;
|
||||
",
|
||||
"
|
||||
struct S { a: i32, b: () }
|
||||
enum E { A, B }
|
||||
trait T {
|
||||
fn a() {}
|
||||
fn b() {}
|
||||
}
|
||||
mod foo;<|>
|
||||
impl S {
|
||||
fn a() {}
|
||||
fn b() {}
|
||||
}
|
||||
use crate::foo::bar::Baz;
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
// It would be awesome to make this work, but it's unclear how
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn typing_inside_a_function_inside_a_macro_should_not_invalidate_def_map() {
|
||||
check_def_map_is_not_recomputed(
|
||||
"
|
||||
//- /lib.rs
|
||||
mod foo;
|
||||
|
||||
use crate::foo::bar::Baz;
|
||||
|
||||
//- /foo/mod.rs
|
||||
pub mod bar;
|
||||
|
||||
//- /foo/bar.rs
|
||||
<|>
|
||||
salsa::query_group! {
|
||||
trait Baz {
|
||||
fn foo() -> i32 { 1 + 1 }
|
||||
}
|
||||
}
|
||||
",
|
||||
"
|
||||
salsa::query_group! {
|
||||
trait Baz {
|
||||
fn foo() -> i32 { 92 }
|
||||
}
|
||||
}
|
||||
",
|
||||
);
|
||||
}
|
94
crates/ra_hir/src/nameres/tests/macros.rs
Normal file
94
crates/ra_hir/src/nameres/tests/macros.rs
Normal file
@ -0,0 +1,94 @@
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn macro_rules_are_globally_visible() {
|
||||
let map = def_map(
|
||||
"
|
||||
//- /lib.rs
|
||||
macro_rules! structs {
|
||||
($($i:ident),*) => {
|
||||
$(struct $i { field: u32 } )*
|
||||
}
|
||||
}
|
||||
structs!(Foo);
|
||||
mod nested;
|
||||
|
||||
//- /nested.rs
|
||||
structs!(Bar, Baz);
|
||||
",
|
||||
);
|
||||
assert_snapshot_matches!(map, @r###"
|
||||
crate
|
||||
nested: t
|
||||
Foo: t v
|
||||
|
||||
crate::nested
|
||||
Bar: t v
|
||||
Baz: t v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macro_rules_can_define_modules() {
|
||||
let map = def_map(
|
||||
"
|
||||
//- /lib.rs
|
||||
macro_rules! m {
|
||||
($name:ident) => { mod $name; }
|
||||
}
|
||||
m!(n1);
|
||||
|
||||
//- /n1.rs
|
||||
m!(n2)
|
||||
//- /n1/n2.rs
|
||||
struct X;
|
||||
",
|
||||
);
|
||||
assert_snapshot_matches!(map, @r###"
|
||||
crate
|
||||
n1: t
|
||||
|
||||
crate::n1
|
||||
n2: t
|
||||
|
||||
crate::n1::n2
|
||||
X: t v
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn macro_rules_from_other_crates_are_visible() {
|
||||
let map = def_map_with_crate_graph(
|
||||
"
|
||||
//- /main.rs
|
||||
foo::structs!(Foo, Bar)
|
||||
mod bar;
|
||||
|
||||
//- /bar.rs
|
||||
use crate::*;
|
||||
|
||||
//- /lib.rs
|
||||
#[macro_export]
|
||||
macro_rules! structs {
|
||||
($($i:ident),*) => {
|
||||
$(struct $i { field: u32 } )*
|
||||
}
|
||||
}
|
||||
",
|
||||
crate_graph! {
|
||||
"main": ("/main.rs", ["foo"]),
|
||||
"foo": ("/lib.rs", []),
|
||||
},
|
||||
);
|
||||
assert_snapshot_matches!(map, @r###"
|
||||
crate
|
||||
bar: t
|
||||
Foo: t v
|
||||
Bar: t v
|
||||
|
||||
crate::bar
|
||||
bar: t
|
||||
Foo: t v
|
||||
Bar: t v
|
||||
"###);
|
||||
}
|
@ -4,10 +4,10 @@ use std::sync::Arc;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::{
|
||||
ModuleDef, Module,
|
||||
ModuleDef,
|
||||
db::HirDatabase,
|
||||
name::{Name, KnownName},
|
||||
nameres::{PerNs, ItemMap},
|
||||
nameres::{PerNs, CrateDefMap, CrateModuleId},
|
||||
generics::GenericParams,
|
||||
expr::{scope::{ExprScopes, ScopeId}, PatId, Body},
|
||||
impl_block::ImplBlock,
|
||||
@ -22,8 +22,8 @@ pub struct Resolver {
|
||||
// TODO how to store these best
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ModuleItemMap {
|
||||
item_map: Arc<ItemMap>,
|
||||
module: Module,
|
||||
crate_def_map: Arc<CrateDefMap>,
|
||||
module_id: CrateModuleId,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@ -175,9 +175,9 @@ impl Resolver {
|
||||
names
|
||||
}
|
||||
|
||||
fn module(&self) -> Option<(&ItemMap, Module)> {
|
||||
fn module(&self) -> Option<(&CrateDefMap, CrateModuleId)> {
|
||||
self.scopes.iter().rev().find_map(|scope| match scope {
|
||||
Scope::ModuleScope(m) => Some((&*m.item_map, m.module.clone())),
|
||||
Scope::ModuleScope(m) => Some((&*m.crate_def_map, m.module_id)),
|
||||
|
||||
_ => None,
|
||||
})
|
||||
@ -206,8 +206,12 @@ impl Resolver {
|
||||
self.push_scope(Scope::ImplBlockScope(impl_block))
|
||||
}
|
||||
|
||||
pub(crate) fn push_module_scope(self, item_map: Arc<ItemMap>, module: Module) -> Resolver {
|
||||
self.push_scope(Scope::ModuleScope(ModuleItemMap { item_map, module }))
|
||||
pub(crate) fn push_module_scope(
|
||||
self,
|
||||
crate_def_map: Arc<CrateDefMap>,
|
||||
module_id: CrateModuleId,
|
||||
) -> Resolver {
|
||||
self.push_scope(Scope::ModuleScope(ModuleItemMap { crate_def_map, module_id }))
|
||||
}
|
||||
|
||||
pub(crate) fn push_expr_scope(
|
||||
@ -224,9 +228,11 @@ impl Scope {
|
||||
match self {
|
||||
Scope::ModuleScope(m) => {
|
||||
if let Some(KnownName::SelfParam) = name.as_known_name() {
|
||||
PerNs::types(Resolution::Def(m.module.into()))
|
||||
PerNs::types(Resolution::Def(m.crate_def_map.mk_module(m.module_id).into()))
|
||||
} else {
|
||||
m.item_map.resolve_name_in_module(db, m.module, name).map(Resolution::Def)
|
||||
m.crate_def_map
|
||||
.resolve_name_in_module(db, m.module_id, name)
|
||||
.map(Resolution::Def)
|
||||
}
|
||||
}
|
||||
Scope::GenericParams(gp) => match gp.find_by_name(name) {
|
||||
@ -261,15 +267,15 @@ impl Scope {
|
||||
// def: m.module.into(),
|
||||
// }),
|
||||
// );
|
||||
m.item_map[m.module.module_id].entries().for_each(|(name, res)| {
|
||||
m.crate_def_map[m.module_id].scope.entries().for_each(|(name, res)| {
|
||||
f(name.clone(), res.def.map(Resolution::Def));
|
||||
});
|
||||
m.item_map.extern_prelude.iter().for_each(|(name, def)| {
|
||||
m.crate_def_map.extern_prelude().iter().for_each(|(name, def)| {
|
||||
f(name.clone(), PerNs::types(Resolution::Def(*def)));
|
||||
});
|
||||
if let Some(prelude) = m.item_map.prelude {
|
||||
let prelude_item_map = db.item_map(prelude.krate);
|
||||
prelude_item_map[prelude.module_id].entries().for_each(|(name, res)| {
|
||||
if let Some(prelude) = m.crate_def_map.prelude() {
|
||||
let prelude_def_map = db.crate_def_map(prelude.krate);
|
||||
prelude_def_map[prelude.module_id].scope.entries().for_each(|(name, res)| {
|
||||
f(name.clone(), res.def.map(Resolution::Def));
|
||||
});
|
||||
}
|
||||
|
@ -7,13 +7,13 @@
|
||||
/// purely for "IDE needs".
|
||||
use ra_db::{FileId, FilePosition};
|
||||
use ra_syntax::{
|
||||
SmolStr, TextRange, SyntaxNode,
|
||||
SyntaxNode,
|
||||
ast::{self, AstNode, NameOwner},
|
||||
algo::{find_node_at_offset, find_leaf_at_offset},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
HirDatabase, Function, ModuleDef, Struct, Enum,
|
||||
HirDatabase, Function, Struct, Enum,
|
||||
AsName, Module, HirFileId, Crate, Trait, Resolver,
|
||||
ids::{LocationCtx, SourceFileItemId},
|
||||
expr
|
||||
@ -80,8 +80,8 @@ fn module_from_source(
|
||||
let source_root_id = db.file_source_root(file_id.as_original_file());
|
||||
db.source_root_crates(source_root_id).iter().map(|&crate_id| Crate { crate_id }).find_map(
|
||||
|krate| {
|
||||
let module_tree = db.module_tree(krate);
|
||||
let module_id = module_tree.find_module_by_source(file_id, decl_id)?;
|
||||
let def_map = db.crate_def_map(krate);
|
||||
let module_id = def_map.find_module_by_source(file_id, decl_id)?;
|
||||
Some(Module { krate, module_id })
|
||||
},
|
||||
)
|
||||
@ -152,44 +152,6 @@ pub fn trait_from_module(
|
||||
Trait { id: ctx.to_def(trait_def) }
|
||||
}
|
||||
|
||||
pub fn macro_symbols(db: &impl HirDatabase, file_id: FileId) -> Vec<(SmolStr, TextRange)> {
|
||||
let module = match module_from_file_id(db, file_id) {
|
||||
Some(it) => it,
|
||||
None => return Vec::new(),
|
||||
};
|
||||
let items = db.lower_module(module);
|
||||
let mut res = Vec::new();
|
||||
|
||||
for macro_call_id in items
|
||||
.declarations
|
||||
.iter()
|
||||
.filter_map(|(_, it)| it.clone().take_types())
|
||||
.filter_map(|it| match it {
|
||||
ModuleDef::Trait(it) => Some(it),
|
||||
_ => None,
|
||||
})
|
||||
.filter_map(|it| it.source(db).0.as_macro_call_id())
|
||||
{
|
||||
if let Some(exp) = db.expand_macro_invocation(macro_call_id) {
|
||||
let loc = macro_call_id.loc(db);
|
||||
let syntax = db.file_item(loc.source_item_id);
|
||||
let macro_call = ast::MacroCall::cast(&syntax).unwrap();
|
||||
let off = macro_call.token_tree().unwrap().syntax().range().start();
|
||||
let file = exp.file();
|
||||
for trait_def in file.syntax().descendants().filter_map(ast::TraitDef::cast) {
|
||||
if let Some(name) = trait_def.name() {
|
||||
let dst_range = name.syntax().range();
|
||||
if let Some(src_range) = exp.map_range_back(dst_range) {
|
||||
res.push((name.text().clone(), src_range + off))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
pub fn resolver_for_position(db: &impl HirDatabase, position: FilePosition) -> Resolver {
|
||||
let file_id = position.file_id;
|
||||
let file = db.parse(file_id);
|
||||
|
@ -7,10 +7,12 @@ use std::sync::Arc;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::{
|
||||
HirDatabase, module_tree::ModuleId, Module, Crate, Name, Function, Trait,
|
||||
HirDatabase, Module, Crate, Name, Function, Trait,
|
||||
ids::TraitId,
|
||||
impl_block::{ImplId, ImplBlock, ImplItem},
|
||||
ty::{AdtDef, Ty},
|
||||
nameres::CrateModuleId,
|
||||
|
||||
};
|
||||
|
||||
/// This is used as a key for indexing impls.
|
||||
@ -33,10 +35,10 @@ impl TyFingerprint {
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct CrateImplBlocks {
|
||||
/// To make sense of the ModuleIds, we need the source root.
|
||||
/// To make sense of the CrateModuleIds, we need the source root.
|
||||
krate: Crate,
|
||||
impls: FxHashMap<TyFingerprint, Vec<(ModuleId, ImplId)>>,
|
||||
impls_by_trait: FxHashMap<TraitId, Vec<(ModuleId, ImplId)>>,
|
||||
impls: FxHashMap<TyFingerprint, Vec<(CrateModuleId, ImplId)>>,
|
||||
impls_by_trait: FxHashMap<TraitId, Vec<(CrateModuleId, ImplId)>>,
|
||||
}
|
||||
|
||||
impl CrateImplBlocks {
|
||||
|
@ -223,7 +223,7 @@ impl RootDatabase {
|
||||
self.query(hir::db::FileItemsQuery).sweep(sweep);
|
||||
self.query(hir::db::FileItemQuery).sweep(sweep);
|
||||
|
||||
self.query(hir::db::LowerModuleWithSourceMapQuery).sweep(sweep);
|
||||
self.query(hir::db::RawItemsWithSourceMapQuery).sweep(sweep);
|
||||
self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep);
|
||||
}
|
||||
}
|
||||
|
@ -1,55 +1,13 @@
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{
|
||||
SyntaxNode, AstNode, SourceFile,
|
||||
ast, algo::find_covering_node,
|
||||
};
|
||||
use ra_syntax::AstNode;
|
||||
|
||||
use crate::{
|
||||
TextRange, FileRange,
|
||||
db::RootDatabase,
|
||||
};
|
||||
|
||||
// FIXME: restore macro support
|
||||
pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
|
||||
let source_file = db.parse(frange.file_id);
|
||||
if let Some(range) = extend_selection_in_macro(db, &source_file, frange) {
|
||||
return range;
|
||||
}
|
||||
ra_ide_api_light::extend_selection(source_file.syntax(), frange.range).unwrap_or(frange.range)
|
||||
}
|
||||
|
||||
fn extend_selection_in_macro(
|
||||
_db: &RootDatabase,
|
||||
source_file: &SourceFile,
|
||||
frange: FileRange,
|
||||
) -> Option<TextRange> {
|
||||
let macro_call = find_macro_call(source_file.syntax(), frange.range)?;
|
||||
let (off, exp) = hir::MacroDef::ast_expand(macro_call)?;
|
||||
let dst_range = exp.map_range_forward(frange.range - off)?;
|
||||
let dst_range = ra_ide_api_light::extend_selection(&exp.syntax(), dst_range)?;
|
||||
let src_range = exp.map_range_back(dst_range)? + off;
|
||||
Some(src_range)
|
||||
}
|
||||
|
||||
fn find_macro_call(node: &SyntaxNode, range: TextRange) -> Option<&ast::MacroCall> {
|
||||
find_covering_node(node, range).ancestors().find_map(ast::MacroCall::cast)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ra_syntax::TextRange;
|
||||
|
||||
use crate::mock_analysis::single_file_with_range;
|
||||
|
||||
#[test]
|
||||
fn extend_selection_inside_macros() {
|
||||
let (analysis, frange) = single_file_with_range(
|
||||
"
|
||||
fn main() {
|
||||
vec![foo(|x| <|>x<|>)];
|
||||
}
|
||||
",
|
||||
);
|
||||
let r = analysis.extend_selection(frange).unwrap();
|
||||
assert_eq!(r, TextRange::from_to(50.into(), 55.into()));
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use itertools::Itertools;
|
||||
use ra_syntax::{
|
||||
TextRange, SyntaxNode,
|
||||
ast::{self, AstNode, NameOwner, ModuleItemOwner},
|
||||
ast::{self, AstNode, NameOwner, ModuleItemOwner, AttrsOwner},
|
||||
};
|
||||
use ra_db::SourceDatabase;
|
||||
|
||||
|
@ -30,7 +30,7 @@ use std::{
|
||||
use fst::{self, Streamer};
|
||||
use ra_syntax::{
|
||||
SyntaxNode, SyntaxNodePtr, SourceFile, SmolStr, TreeArc, AstNode,
|
||||
algo::{visit::{visitor, Visitor}, find_covering_node},
|
||||
algo::{visit::{visitor, Visitor}},
|
||||
SyntaxKind::{self, *},
|
||||
ast::{self, NameOwner},
|
||||
WalkEvent,
|
||||
@ -66,14 +66,9 @@ fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex>
|
||||
db.check_canceled();
|
||||
let source_file = db.parse(file_id);
|
||||
|
||||
let mut symbols = source_file_to_file_symbols(&source_file, file_id);
|
||||
let symbols = source_file_to_file_symbols(&source_file, file_id);
|
||||
|
||||
for (name, text_range) in hir::source_binder::macro_symbols(db, file_id) {
|
||||
let node = find_covering_node(source_file.syntax(), text_range);
|
||||
let ptr = SyntaxNodePtr::new(node);
|
||||
// TODO: Should we get container name for macro symbols?
|
||||
symbols.push(FileSymbol { file_id, name, ptr, name_range: None, container_name: None })
|
||||
}
|
||||
// TODO: add macros here
|
||||
|
||||
Arc::new(SymbolIndex::new(symbols))
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use ra_syntax::{ast, AstNode,};
|
||||
use ra_syntax::AstNode;
|
||||
use ra_db::SourceDatabase;
|
||||
|
||||
use crate::{
|
||||
@ -8,37 +8,5 @@ use crate::{
|
||||
|
||||
pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> {
|
||||
let source_file = db.parse(file_id);
|
||||
let mut res = ra_ide_api_light::highlight(source_file.syntax());
|
||||
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
|
||||
if let Some((off, exp)) = hir::MacroDef::ast_expand(macro_call) {
|
||||
let mapped_ranges =
|
||||
ra_ide_api_light::highlight(&exp.syntax()).into_iter().filter_map(|r| {
|
||||
let mapped_range = exp.map_range_back(r.range)?;
|
||||
let res = HighlightedRange { range: mapped_range + off, tag: r.tag };
|
||||
Some(res)
|
||||
});
|
||||
res.extend(mapped_ranges);
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::mock_analysis::single_file;
|
||||
|
||||
use insta::assert_debug_snapshot_matches;
|
||||
|
||||
#[test]
|
||||
fn highlights_code_inside_macros() {
|
||||
let (analysis, file_id) = single_file(
|
||||
"
|
||||
fn main() {
|
||||
vec![{ let x = 92; x}];
|
||||
}
|
||||
",
|
||||
);
|
||||
let highlights = analysis.highlight(file_id).unwrap();
|
||||
assert_debug_snapshot_matches!("highlights_code_inside_macros", &highlights);
|
||||
}
|
||||
ra_ide_api_light::highlight(source_file.syntax())
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ source: "crates\\ra_ide_api\\tests\\test\\main.rs"
|
||||
[
|
||||
Diagnostic {
|
||||
message: "unresolved module",
|
||||
range: [4; 7),
|
||||
range: [0; 8),
|
||||
fix: Some(
|
||||
SourceChange {
|
||||
label: "create module",
|
||||
|
@ -42,7 +42,7 @@ pub use crate::syntax_bridge::{ast_to_token_tree, token_tree_to_ast_item_list};
|
||||
/// be very confusing is that AST has almost exactly the same shape as
|
||||
/// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident`
|
||||
/// and `$()*` have special meaning (see `Var` and `Repeat` data structures)
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct MacroRules {
|
||||
pub(crate) rules: Vec<Rule>,
|
||||
}
|
||||
@ -56,13 +56,13 @@ impl MacroRules {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct Rule {
|
||||
pub(crate) lhs: Subtree,
|
||||
pub(crate) rhs: Subtree,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum TokenTree {
|
||||
Leaf(Leaf),
|
||||
Subtree(Subtree),
|
||||
@ -70,7 +70,7 @@ pub(crate) enum TokenTree {
|
||||
}
|
||||
impl_froms!(TokenTree: Leaf, Subtree, Repeat);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum Leaf {
|
||||
Literal(Literal),
|
||||
Punct(Punct),
|
||||
@ -79,37 +79,37 @@ pub(crate) enum Leaf {
|
||||
}
|
||||
impl_froms!(Leaf: Literal, Punct, Ident, Var);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct Subtree {
|
||||
pub(crate) delimiter: Delimiter,
|
||||
pub(crate) token_trees: Vec<TokenTree>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct Repeat {
|
||||
pub(crate) subtree: Subtree,
|
||||
pub(crate) kind: RepeatKind,
|
||||
pub(crate) separator: Option<char>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum RepeatKind {
|
||||
ZeroOrMore,
|
||||
OneOrMore,
|
||||
ZeroOrOne,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct Literal {
|
||||
pub(crate) text: SmolStr,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct Ident {
|
||||
pub(crate) text: SmolStr,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct Var {
|
||||
pub(crate) text: SmolStr,
|
||||
pub(crate) kind: Option<SmolStr>,
|
||||
|
@ -155,7 +155,14 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
|
||||
_ => return Err(ExpandError::UnexpectedToken),
|
||||
},
|
||||
crate::TokenTree::Repeat(crate::Repeat { subtree, kind: _, separator }) => {
|
||||
// Dirty hack to make macro-expansion terminate.
|
||||
// This should be replaced by a propper macro-by-example implementation
|
||||
let mut limit = 128;
|
||||
while let Ok(nested) = match_lhs(subtree, input) {
|
||||
limit -= 1;
|
||||
if limit == 0 {
|
||||
break;
|
||||
}
|
||||
res.push_nested(nested)?;
|
||||
if let Some(separator) = *separator {
|
||||
if !input.is_eof() {
|
||||
@ -196,7 +203,14 @@ fn expand_tt(
|
||||
crate::TokenTree::Repeat(repeat) => {
|
||||
let mut token_trees = Vec::new();
|
||||
nesting.push(0);
|
||||
// Dirty hack to make macro-expansion terminate.
|
||||
// This should be replaced by a propper macro-by-example implementation
|
||||
let mut limit = 128;
|
||||
while let Ok(t) = expand_subtree(&repeat.subtree, bindings, nesting) {
|
||||
limit -= 1;
|
||||
if limit == 0 {
|
||||
break;
|
||||
}
|
||||
let idx = nesting.pop().unwrap();
|
||||
nesting.push(idx + 1);
|
||||
token_trees.push(t.into())
|
||||
|
@ -114,6 +114,9 @@ pub trait AttrsOwner: AstNode {
|
||||
fn attrs(&self) -> AstChildren<Attr> {
|
||||
children(self)
|
||||
}
|
||||
fn has_atom_attr(&self, atom: &str) -> bool {
|
||||
self.attrs().filter_map(|x| x.as_atom()).any(|x| x == atom)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait DocCommentsOwner: AstNode {
|
||||
@ -153,12 +156,6 @@ pub trait DocCommentsOwner: AstNode {
|
||||
}
|
||||
}
|
||||
|
||||
impl FnDef {
|
||||
pub fn has_atom_attr(&self, atom: &str) -> bool {
|
||||
self.attrs().filter_map(|x| x.as_atom()).any(|x| x == atom)
|
||||
}
|
||||
}
|
||||
|
||||
impl Attr {
|
||||
pub fn is_inner(&self) -> bool {
|
||||
let tt = match self.value() {
|
||||
|
@ -2108,6 +2108,7 @@ impl ToOwned for MacroCall {
|
||||
|
||||
|
||||
impl ast::NameOwner for MacroCall {}
|
||||
impl ast::AttrsOwner for MacroCall {}
|
||||
impl MacroCall {
|
||||
pub fn token_tree(&self) -> Option<&TokenTree> {
|
||||
super::child_opt(self)
|
||||
|
@ -557,7 +557,7 @@ Grammar(
|
||||
"Name": (),
|
||||
"NameRef": (),
|
||||
"MacroCall": (
|
||||
traits: [ "NameOwner" ],
|
||||
traits: [ "NameOwner", "AttrsOwner" ],
|
||||
options: [ "TokenTree", "Path" ],
|
||||
),
|
||||
"Attr": ( options: [ ["value", "TokenTree"] ] ),
|
||||
|
@ -30,13 +30,13 @@ use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! tested_by {
|
||||
($ident:ident) => {
|
||||
($ident:ident) => {{
|
||||
#[cfg(test)]
|
||||
{
|
||||
// sic! use call-site crate
|
||||
crate::marks::$ident.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
|
||||
}
|
||||
};
|
||||
}};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
|
Loading…
x
Reference in New Issue
Block a user