From d659b7a2f03788eb0f4f15e3730bbf65a18ed818 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Tue, 27 Nov 2018 21:45:42 +0300 Subject: start descriptors -> hir rename --- crates/ra_analysis/src/completion/mod.rs | 2 +- .../src/completion/reference_completion.rs | 2 +- crates/ra_analysis/src/db.rs | 20 +- crates/ra_analysis/src/descriptors/function/imp.rs | 21 - crates/ra_analysis/src/descriptors/function/mod.rs | 137 ----- .../ra_analysis/src/descriptors/function/scope.rs | 424 ---------------- crates/ra_analysis/src/descriptors/mod.rs | 137 ----- crates/ra_analysis/src/descriptors/module/imp.rs | 229 --------- crates/ra_analysis/src/descriptors/module/mod.rs | 378 -------------- .../ra_analysis/src/descriptors/module/nameres.rs | 549 --------------------- crates/ra_analysis/src/descriptors/path.rs | 148 ------ crates/ra_analysis/src/hir/function/imp.rs | 21 + crates/ra_analysis/src/hir/function/mod.rs | 137 +++++ crates/ra_analysis/src/hir/function/scope.rs | 424 ++++++++++++++++ crates/ra_analysis/src/hir/mod.rs | 144 ++++++ crates/ra_analysis/src/hir/module/imp.rs | 229 +++++++++ crates/ra_analysis/src/hir/module/mod.rs | 378 ++++++++++++++ crates/ra_analysis/src/hir/module/nameres.rs | 549 +++++++++++++++++++++ crates/ra_analysis/src/hir/path.rs | 148 ++++++ crates/ra_analysis/src/imp.rs | 4 +- crates/ra_analysis/src/lib.rs | 8 +- crates/ra_analysis/src/loc2id.rs | 4 +- 22 files changed, 2050 insertions(+), 2043 deletions(-) delete mode 100644 crates/ra_analysis/src/descriptors/function/imp.rs delete mode 100644 crates/ra_analysis/src/descriptors/function/mod.rs delete mode 100644 crates/ra_analysis/src/descriptors/function/scope.rs delete mode 100644 crates/ra_analysis/src/descriptors/mod.rs delete mode 100644 crates/ra_analysis/src/descriptors/module/imp.rs delete mode 100644 crates/ra_analysis/src/descriptors/module/mod.rs delete mode 100644 crates/ra_analysis/src/descriptors/module/nameres.rs delete mode 100644 crates/ra_analysis/src/descriptors/path.rs create mode 100644 crates/ra_analysis/src/hir/function/imp.rs create mode 100644 crates/ra_analysis/src/hir/function/mod.rs create mode 100644 crates/ra_analysis/src/hir/function/scope.rs create mode 100644 crates/ra_analysis/src/hir/mod.rs create mode 100644 crates/ra_analysis/src/hir/module/imp.rs create mode 100644 crates/ra_analysis/src/hir/module/mod.rs create mode 100644 crates/ra_analysis/src/hir/module/nameres.rs create mode 100644 crates/ra_analysis/src/hir/path.rs (limited to 'crates') diff --git a/crates/ra_analysis/src/completion/mod.rs b/crates/ra_analysis/src/completion/mod.rs index 5ef278127..1eb804c85 100644 --- a/crates/ra_analysis/src/completion/mod.rs +++ b/crates/ra_analysis/src/completion/mod.rs @@ -11,7 +11,7 @@ use rustc_hash::{FxHashMap}; use crate::{ db::{self, SyntaxDatabase}, - descriptors::{ + hir::{ module::{ModuleDescriptor} }, Cancelable, FilePosition diff --git a/crates/ra_analysis/src/completion/reference_completion.rs b/crates/ra_analysis/src/completion/reference_completion.rs index 858b52e76..ac6ef1d4a 100644 --- a/crates/ra_analysis/src/completion/reference_completion.rs +++ b/crates/ra_analysis/src/completion/reference_completion.rs @@ -10,7 +10,7 @@ use ra_syntax::{ use crate::{ db::RootDatabase, completion::CompletionItem, - descriptors::{ + hir::{ module::{ModuleDescriptor}, function::FnScopes, Def, diff --git a/crates/ra_analysis/src/db.rs b/crates/ra_analysis/src/db.rs index 4e34265fb..fa59b8b3e 100644 --- a/crates/ra_analysis/src/db.rs +++ b/crates/ra_analysis/src/db.rs @@ -7,7 +7,7 @@ use salsa::{self, Database}; use crate::{ db, - descriptors, + hir, symbol_index::SymbolIndex, syntax_ptr::SyntaxPtr, loc2id::{IdMaps, IdDatabase}, @@ -122,15 +122,15 @@ salsa::database_storage! { fn file_symbols() for FileSymbolsQuery; fn resolve_syntax_ptr() for ResolveSyntaxPtrQuery; } - impl descriptors::DescriptorDatabase { - fn module_tree() for descriptors::ModuleTreeQuery; - fn fn_scopes() for descriptors::FnScopesQuery; - fn _file_items() for descriptors::FileItemsQuery; - fn _file_item() for descriptors::FileItemQuery; - fn _input_module_items() for descriptors::InputModuleItemsQuery; - fn _item_map() for descriptors::ItemMapQuery; - fn _fn_syntax() for descriptors::FnSyntaxQuery; - fn _submodules() for descriptors::SubmodulesQuery; + impl hir::DescriptorDatabase { + fn module_tree() for hir::ModuleTreeQuery; + fn fn_scopes() for hir::FnScopesQuery; + fn _file_items() for hir::FileItemsQuery; + fn _file_item() for hir::FileItemQuery; + fn _input_module_items() for hir::InputModuleItemsQuery; + fn _item_map() for hir::ItemMapQuery; + fn _fn_syntax() for hir::FnSyntaxQuery; + fn _submodules() for hir::SubmodulesQuery; } } } diff --git a/crates/ra_analysis/src/descriptors/function/imp.rs b/crates/ra_analysis/src/descriptors/function/imp.rs deleted file mode 100644 index e09deba0f..000000000 --- a/crates/ra_analysis/src/descriptors/function/imp.rs +++ /dev/null @@ -1,21 +0,0 @@ -use std::sync::Arc; - -use ra_syntax::ast::{AstNode, FnDef, FnDefNode}; - -use crate::descriptors::{ - function::{FnId, FnScopes}, - DescriptorDatabase, -}; - -/// Resolve `FnId` to the corresponding `SyntaxNode` -pub(crate) fn fn_syntax(db: &impl DescriptorDatabase, fn_id: FnId) -> FnDefNode { - let ptr = db.id_maps().fn_ptr(fn_id); - let syntax = db.resolve_syntax_ptr(ptr); - FnDef::cast(syntax.borrowed()).unwrap().owned() -} - -pub(crate) fn fn_scopes(db: &impl DescriptorDatabase, fn_id: FnId) -> Arc { - let syntax = db._fn_syntax(fn_id); - let res = FnScopes::new(syntax.borrowed()); - Arc::new(res) -} diff --git a/crates/ra_analysis/src/descriptors/function/mod.rs b/crates/ra_analysis/src/descriptors/function/mod.rs deleted file mode 100644 index 86eee5e93..000000000 --- a/crates/ra_analysis/src/descriptors/function/mod.rs +++ /dev/null @@ -1,137 +0,0 @@ -pub(super) mod imp; -mod scope; - -use std::cmp::{max, min}; - -use ra_syntax::{ - ast::{self, AstNode, DocCommentsOwner, NameOwner}, - TextRange, TextUnit, -}; - -use crate::{ - syntax_ptr::SyntaxPtr, FileId, - loc2id::IdDatabase, -}; - -pub(crate) use self::scope::{resolve_local_name, FnScopes}; -pub(crate) use crate::loc2id::FnId; - -impl FnId { - pub(crate) fn get(db: &impl IdDatabase, file_id: FileId, fn_def: ast::FnDef) -> FnId { - let ptr = SyntaxPtr::new(file_id, fn_def.syntax()); - db.id_maps().fn_id(ptr) - } -} - -#[derive(Debug, Clone)] -pub struct FnDescriptor { - pub name: String, - pub label: String, - pub ret_type: Option, - pub params: Vec, - pub doc: Option, -} - -impl FnDescriptor { - pub fn new(node: ast::FnDef) -> Option { - let name = node.name()?.text().to_string(); - - let mut doc = None; - - // Strip the body out for the label. - let mut label: String = if let Some(body) = node.body() { - let body_range = body.syntax().range(); - let label: String = node - .syntax() - .children() - .filter(|child| !child.range().is_subrange(&body_range)) - .map(|node| node.text().to_string()) - .collect(); - label - } else { - node.syntax().text().to_string() - }; - - if let Some((comment_range, docs)) = FnDescriptor::extract_doc_comments(node) { - let comment_range = comment_range - .checked_sub(node.syntax().range().start()) - .unwrap(); - let start = comment_range.start().to_usize(); - let end = comment_range.end().to_usize(); - - // Remove the comment from the label - label.replace_range(start..end, ""); - - // Massage markdown - let mut processed_lines = Vec::new(); - let mut in_code_block = false; - for line in docs.lines() { - if line.starts_with("```") { - in_code_block = !in_code_block; - } - - let line = if in_code_block && line.starts_with("```") && !line.contains("rust") { - "```rust".into() - } else { - line.to_string() - }; - - processed_lines.push(line); - } - - if !processed_lines.is_empty() { - doc = Some(processed_lines.join("\n")); - } - } - - let params = FnDescriptor::param_list(node); - let ret_type = node.ret_type().map(|r| r.syntax().text().to_string()); - - Some(FnDescriptor { - name, - ret_type, - params, - label: label.trim().to_owned(), - doc, - }) - } - - fn extract_doc_comments(node: ast::FnDef) -> Option<(TextRange, String)> { - if node.doc_comments().count() == 0 { - return None; - } - - let comment_text = node.doc_comment_text(); - - let (begin, end) = node - .doc_comments() - .map(|comment| comment.syntax().range()) - .map(|range| (range.start().to_usize(), range.end().to_usize())) - .fold((std::usize::MAX, std::usize::MIN), |acc, range| { - (min(acc.0, range.0), max(acc.1, range.1)) - }); - - let range = TextRange::from_to(TextUnit::from_usize(begin), TextUnit::from_usize(end)); - - Some((range, comment_text)) - } - - fn param_list(node: ast::FnDef) -> Vec { - let mut res = vec![]; - if let Some(param_list) = node.param_list() { - if let Some(self_param) = param_list.self_param() { - res.push(self_param.syntax().text().to_string()) - } - - // Maybe use param.pat here? See if we can just extract the name? - //res.extend(param_list.params().map(|p| p.syntax().text().to_string())); - res.extend( - param_list - .params() - .filter_map(|p| p.pat()) - .map(|pat| pat.syntax().text().to_string()), - ); - } - res - } -} diff --git a/crates/ra_analysis/src/descriptors/function/scope.rs b/crates/ra_analysis/src/descriptors/function/scope.rs deleted file mode 100644 index 5307a0a8e..000000000 --- a/crates/ra_analysis/src/descriptors/function/scope.rs +++ /dev/null @@ -1,424 +0,0 @@ -use rustc_hash::{FxHashMap, FxHashSet}; - -use ra_syntax::{ - algo::generate, - ast::{self, ArgListOwner, LoopBodyOwner, NameOwner}, - AstNode, SmolStr, SyntaxNodeRef, -}; - -use crate::{ - syntax_ptr::LocalSyntaxPtr, - arena::{Arena, Id}, -}; - -pub(crate) type ScopeId = Id; - -#[derive(Debug, PartialEq, Eq)] -pub struct FnScopes { - pub(crate) self_param: Option, - scopes: Arena, - scope_for: FxHashMap, -} - -#[derive(Debug, PartialEq, Eq)] -pub struct ScopeEntry { - name: SmolStr, - ptr: LocalSyntaxPtr, -} - -#[derive(Debug, PartialEq, Eq)] -pub(crate) struct ScopeData { - parent: Option, - entries: Vec, -} - -impl FnScopes { - pub(crate) fn new(fn_def: ast::FnDef) -> FnScopes { - let mut scopes = FnScopes { - self_param: fn_def - .param_list() - .and_then(|it| it.self_param()) - .map(|it| LocalSyntaxPtr::new(it.syntax())), - scopes: Arena::default(), - scope_for: FxHashMap::default(), - }; - let root = scopes.root_scope(); - scopes.add_params_bindings(root, fn_def.param_list()); - if let Some(body) = fn_def.body() { - compute_block_scopes(body, &mut scopes, root) - } - scopes - } - pub(crate) fn entries(&self, scope: ScopeId) -> &[ScopeEntry] { - &self.scopes[scope].entries - } - pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator + 'a { - generate(self.scope_for(node), move |&scope| { - self.scopes[scope].parent - }) - } - fn root_scope(&mut self) -> ScopeId { - self.scopes.alloc(ScopeData { - parent: None, - entries: vec![], - }) - } - fn new_scope(&mut self, parent: ScopeId) -> ScopeId { - self.scopes.alloc(ScopeData { - parent: Some(parent), - entries: vec![], - }) - } - fn add_bindings(&mut self, scope: ScopeId, pat: ast::Pat) { - let entries = pat - .syntax() - .descendants() - .filter_map(ast::BindPat::cast) - .filter_map(ScopeEntry::new); - self.scopes[scope].entries.extend(entries); - } - fn add_params_bindings(&mut self, scope: ScopeId, params: Option) { - params - .into_iter() - .flat_map(|it| it.params()) - .filter_map(|it| it.pat()) - .for_each(|it| self.add_bindings(scope, it)); - } - fn set_scope(&mut self, node: SyntaxNodeRef, scope: ScopeId) { - self.scope_for.insert(LocalSyntaxPtr::new(node), scope); - } - fn scope_for(&self, node: SyntaxNodeRef) -> Option { - node.ancestors() - .map(LocalSyntaxPtr::new) - .filter_map(|it| self.scope_for.get(&it).map(|&scope| scope)) - .next() - } -} - -impl ScopeEntry { - fn new(pat: ast::BindPat) -> Option { - let name = pat.name()?; - let res = ScopeEntry { - name: name.text(), - ptr: LocalSyntaxPtr::new(pat.syntax()), - }; - Some(res) - } - pub(crate) fn name(&self) -> &SmolStr { - &self.name - } - pub(crate) fn ptr(&self) -> LocalSyntaxPtr { - self.ptr - } -} - -fn compute_block_scopes(block: ast::Block, scopes: &mut FnScopes, mut scope: ScopeId) { - for stmt in block.statements() { - match stmt { - ast::Stmt::LetStmt(stmt) => { - if let Some(expr) = stmt.initializer() { - scopes.set_scope(expr.syntax(), scope); - compute_expr_scopes(expr, scopes, scope); - } - scope = scopes.new_scope(scope); - if let Some(pat) = stmt.pat() { - scopes.add_bindings(scope, pat); - } - } - ast::Stmt::ExprStmt(expr_stmt) => { - if let Some(expr) = expr_stmt.expr() { - scopes.set_scope(expr.syntax(), scope); - compute_expr_scopes(expr, scopes, scope); - } - } - } - } - if let Some(expr) = block.expr() { - scopes.set_scope(expr.syntax(), scope); - compute_expr_scopes(expr, scopes, scope); - } -} - -fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) { - match expr { - ast::Expr::IfExpr(e) => { - let cond_scope = e - .condition() - .and_then(|cond| compute_cond_scopes(cond, scopes, scope)); - if let Some(block) = e.then_branch() { - compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope)); - } - if let Some(block) = e.else_branch() { - compute_block_scopes(block, scopes, scope); - } - } - ast::Expr::BlockExpr(e) => { - if let Some(block) = e.block() { - compute_block_scopes(block, scopes, scope); - } - } - ast::Expr::LoopExpr(e) => { - if let Some(block) = e.loop_body() { - compute_block_scopes(block, scopes, scope); - } - } - ast::Expr::WhileExpr(e) => { - let cond_scope = e - .condition() - .and_then(|cond| compute_cond_scopes(cond, scopes, scope)); - if let Some(block) = e.loop_body() { - compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope)); - } - } - ast::Expr::ForExpr(e) => { - if let Some(expr) = e.iterable() { - compute_expr_scopes(expr, scopes, scope); - } - let mut scope = scope; - if let Some(pat) = e.pat() { - scope = scopes.new_scope(scope); - scopes.add_bindings(scope, pat); - } - if let Some(block) = e.loop_body() { - compute_block_scopes(block, scopes, scope); - } - } - ast::Expr::LambdaExpr(e) => { - let scope = scopes.new_scope(scope); - scopes.add_params_bindings(scope, e.param_list()); - if let Some(body) = e.body() { - scopes.set_scope(body.syntax(), scope); - compute_expr_scopes(body, scopes, scope); - } - } - ast::Expr::CallExpr(e) => { - compute_call_scopes(e.expr(), e.arg_list(), scopes, scope); - } - ast::Expr::MethodCallExpr(e) => { - compute_call_scopes(e.expr(), e.arg_list(), scopes, scope); - } - ast::Expr::MatchExpr(e) => { - if let Some(expr) = e.expr() { - compute_expr_scopes(expr, scopes, scope); - } - for arm in e.match_arm_list().into_iter().flat_map(|it| it.arms()) { - let scope = scopes.new_scope(scope); - for pat in arm.pats() { - scopes.add_bindings(scope, pat); - } - if let Some(expr) = arm.expr() { - compute_expr_scopes(expr, scopes, scope); - } - } - } - _ => expr - .syntax() - .children() - .filter_map(ast::Expr::cast) - .for_each(|expr| compute_expr_scopes(expr, scopes, scope)), - }; - - fn compute_call_scopes( - receiver: Option, - arg_list: Option, - scopes: &mut FnScopes, - scope: ScopeId, - ) { - arg_list - .into_iter() - .flat_map(|it| it.args()) - .chain(receiver) - .for_each(|expr| compute_expr_scopes(expr, scopes, scope)); - } - - fn compute_cond_scopes( - cond: ast::Condition, - scopes: &mut FnScopes, - scope: ScopeId, - ) -> Option { - if let Some(expr) = cond.expr() { - compute_expr_scopes(expr, scopes, scope); - } - if let Some(pat) = cond.pat() { - let s = scopes.new_scope(scope); - scopes.add_bindings(s, pat); - Some(s) - } else { - None - } - } -} - -pub fn resolve_local_name<'a>( - name_ref: ast::NameRef, - scopes: &'a FnScopes, -) -> Option<&'a ScopeEntry> { - let mut shadowed = FxHashSet::default(); - let ret = scopes - .scope_chain(name_ref.syntax()) - .flat_map(|scope| scopes.entries(scope).iter()) - .filter(|entry| shadowed.insert(entry.name())) - .filter(|entry| entry.name() == &name_ref.text()) - .nth(0); - ret -} - -#[cfg(test)] -mod tests { - use ra_editor::find_node_at_offset; - use ra_syntax::SourceFileNode; - use test_utils::extract_offset; - - use super::*; - - fn do_check(code: &str, expected: &[&str]) { - let (off, code) = extract_offset(code); - let code = { - let mut buf = String::new(); - let off = u32::from(off) as usize; - buf.push_str(&code[..off]); - buf.push_str("marker"); - buf.push_str(&code[off..]); - buf - }; - let file = SourceFileNode::parse(&code); - let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); - let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); - let scopes = FnScopes::new(fn_def); - let actual = scopes - .scope_chain(marker.syntax()) - .flat_map(|scope| scopes.entries(scope)) - .map(|it| it.name()) - .collect::>(); - assert_eq!(actual.as_slice(), expected); - } - - #[test] - fn test_lambda_scope() { - do_check( - r" - fn quux(foo: i32) { - let f = |bar, baz: i32| { - <|> - }; - }", - &["bar", "baz", "foo"], - ); - } - - #[test] - fn test_call_scope() { - do_check( - r" - fn quux() { - f(|x| <|> ); - }", - &["x"], - ); - } - - #[test] - fn test_metod_call_scope() { - do_check( - r" - fn quux() { - z.f(|x| <|> ); - }", - &["x"], - ); - } - - #[test] - fn test_loop_scope() { - do_check( - r" - fn quux() { - loop { - let x = (); - <|> - }; - }", - &["x"], - ); - } - - #[test] - fn test_match() { - do_check( - r" - fn quux() { - match () { - Some(x) => { - <|> - } - }; - }", - &["x"], - ); - } - - #[test] - fn test_shadow_variable() { - do_check( - r" - fn foo(x: String) { - let x : &str = &x<|>; - }", - &["x"], - ); - } - - fn do_check_local_name(code: &str, expected_offset: u32) { - let (off, code) = extract_offset(code); - let file = SourceFileNode::parse(&code); - let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); - let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); - - let scopes = FnScopes::new(fn_def); - - let local_name_entry = resolve_local_name(name_ref, &scopes).unwrap(); - let local_name = local_name_entry.ptr().resolve(&file); - let expected_name = - find_node_at_offset::(file.syntax(), expected_offset.into()).unwrap(); - assert_eq!(local_name.range(), expected_name.syntax().range()); - } - - #[test] - fn test_resolve_local_name() { - do_check_local_name( - r#" - fn foo(x: i32, y: u32) { - { - let z = x * 2; - } - { - let t = x<|> * 3; - } - }"#, - 21, - ); - } - - #[test] - fn test_resolve_local_name_declaration() { - do_check_local_name( - r#" - fn foo(x: String) { - let x : &str = &x<|>; - }"#, - 21, - ); - } - - #[test] - fn test_resolve_local_name_shadow() { - do_check_local_name( - r" - fn foo(x: String) { - let x : &str = &x; - x<|> - }", - 46, - ); - } -} diff --git a/crates/ra_analysis/src/descriptors/mod.rs b/crates/ra_analysis/src/descriptors/mod.rs deleted file mode 100644 index 7a1bcf447..000000000 --- a/crates/ra_analysis/src/descriptors/mod.rs +++ /dev/null @@ -1,137 +0,0 @@ -pub(crate) mod function; -pub(crate) mod module; -mod path; - -use std::sync::Arc; - -use ra_syntax::{ - ast::{self, FnDefNode, AstNode}, - TextRange, SyntaxNode, -}; - -use crate::{ - FileId, - db::SyntaxDatabase, - descriptors::function::{resolve_local_name, FnId, FnScopes}, - descriptors::module::{ - ModuleId, ModuleTree, ModuleSource, ModuleDescriptor, - nameres::{ItemMap, InputModuleItems, FileItems} - }, - input::SourceRootId, - loc2id::{IdDatabase, DefId, DefLoc}, - syntax_ptr::LocalSyntaxPtr, - Cancelable, -}; - -pub(crate) use self::path::{Path, PathKind}; -pub(crate) use self::module::nameres::FileItemId; - -salsa::query_group! { - pub(crate) trait DescriptorDatabase: SyntaxDatabase + IdDatabase { - fn fn_scopes(fn_id: FnId) -> Arc { - type FnScopesQuery; - use fn function::imp::fn_scopes; - } - - fn _file_items(file_id: FileId) -> Arc { - type FileItemsQuery; - storage dependencies; - use fn module::nameres::file_items; - } - - fn _file_item(file_id: FileId, file_item_id: FileItemId) -> SyntaxNode { - type FileItemQuery; - storage dependencies; - use fn module::nameres::file_item; - } - - fn _input_module_items(source_root_id: SourceRootId, module_id: ModuleId) -> Cancelable> { - type InputModuleItemsQuery; - use fn module::nameres::input_module_items; - } - fn _item_map(source_root_id: SourceRootId) -> Cancelable> { - type ItemMapQuery; - use fn module::nameres::item_map; - } - fn _module_tree(source_root_id: SourceRootId) -> Cancelable> { - type ModuleTreeQuery; - use fn module::imp::module_tree; - } - fn _fn_syntax(fn_id: FnId) -> FnDefNode { - type FnSyntaxQuery; - // Don't retain syntax trees in memory - storage dependencies; - use fn function::imp::fn_syntax; - } - fn _submodules(source: ModuleSource) -> Cancelable>> { - type SubmodulesQuery; - use fn module::imp::submodules; - } - } -} - -pub(crate) enum Def { - Module(ModuleDescriptor), - Item, -} - -impl DefId { - pub(crate) fn resolve(self, db: &impl DescriptorDatabase) -> Cancelable { - let loc = db.id_maps().def_loc(self); - let res = match loc { - DefLoc::Module { id, source_root } => { - let descr = ModuleDescriptor::new(db, source_root, id)?; - Def::Module(descr) - } - DefLoc::Item { .. } => Def::Item, - }; - Ok(res) - } -} - -#[derive(Debug)] -pub struct ReferenceDescriptor { - pub range: TextRange, - pub name: String, -} - -#[derive(Debug)] -pub struct DeclarationDescriptor<'a> { - pat: ast::BindPat<'a>, - pub range: TextRange, -} - -impl<'a> DeclarationDescriptor<'a> { - pub fn new(pat: ast::BindPat) -> DeclarationDescriptor { - let range = pat.syntax().range(); - - DeclarationDescriptor { pat, range } - } - - pub fn find_all_refs(&self) -> Vec { - let name_ptr = LocalSyntaxPtr::new(self.pat.syntax()); - - let fn_def = match self.pat.syntax().ancestors().find_map(ast::FnDef::cast) { - Some(def) => def, - None => return Default::default(), - }; - - let fn_scopes = FnScopes::new(fn_def); - - let refs: Vec<_> = fn_def - .syntax() - .descendants() - .filter_map(ast::NameRef::cast) - .filter(|name_ref| match resolve_local_name(*name_ref, &fn_scopes) { - None => false, - Some(entry) => entry.ptr() == name_ptr, - }) - .map(|name_ref| ReferenceDescriptor { - name: name_ref.syntax().text().to_string(), - range: name_ref.syntax().range(), - }) - .collect(); - - refs - } -} diff --git a/crates/ra_analysis/src/descriptors/module/imp.rs b/crates/ra_analysis/src/descriptors/module/imp.rs deleted file mode 100644 index 80892acb7..000000000 --- a/crates/ra_analysis/src/descriptors/module/imp.rs +++ /dev/null @@ -1,229 +0,0 @@ -use std::sync::Arc; - -use ra_syntax::{ - ast::{self, NameOwner}, - SmolStr, -}; -use relative_path::RelativePathBuf; -use rustc_hash::{FxHashMap, FxHashSet}; - -use crate::{ - db, - descriptors::DescriptorDatabase, - input::{SourceRoot, SourceRootId}, - Cancelable, FileId, FileResolverImp, -}; - -use super::{ - LinkData, LinkId, ModuleData, ModuleId, ModuleSource, ModuleSourceNode, - ModuleTree, Problem, -}; - -#[derive(Clone, Hash, PartialEq, Eq, Debug)] -pub(crate) enum Submodule { - Declaration(SmolStr), - Definition(SmolStr, ModuleSource), -} - -impl Submodule { - fn name(&self) -> &SmolStr { - match self { - Submodule::Declaration(name) => name, - Submodule::Definition(name, _) => name, - } - } -} - -pub(crate) fn submodules( - db: &impl DescriptorDatabase, - source: ModuleSource, -) -> Cancelable>> { - db::check_canceled(db)?; - let file_id = source.file_id(); - let submodules = match source.resolve(db) { - ModuleSourceNode::SourceFile(it) => collect_submodules(file_id, it.borrowed()), - ModuleSourceNode::Module(it) => it - .borrowed() - .item_list() - .map(|it| collect_submodules(file_id, it)) - .unwrap_or_else(Vec::new), - }; - return Ok(Arc::new(submodules)); - - fn collect_submodules<'a>( - file_id: FileId, - root: impl ast::ModuleItemOwner<'a>, - ) -> Vec { - modules(root) - .map(|(name, m)| { - if m.has_semi() { - Submodule::Declaration(name) - } else { - let src = ModuleSource::new_inline(file_id, m); - Submodule::Definition(name, src) - } - }) - .collect() - } -} - -pub(crate) fn modules<'a>( - root: impl ast::ModuleItemOwner<'a>, -) -> impl Iterator)> { - root.items() - .filter_map(|item| match item { - ast::ModuleItem::Module(m) => Some(m), - _ => None, - }) - .filter_map(|module| { - let name = module.name()?.text(); - Some((name, module)) - }) -} - -pub(crate) fn module_tree( - db: &impl DescriptorDatabase, - source_root: SourceRootId, -) -> Cancelable> { - db::check_canceled(db)?; - let res = create_module_tree(db, source_root)?; - Ok(Arc::new(res)) -} - -fn create_module_tree<'a>( - db: &impl DescriptorDatabase, - source_root: SourceRootId, -) -> Cancelable { - let mut tree = ModuleTree::default(); - - let mut roots = FxHashMap::default(); - let mut visited = FxHashSet::default(); - - let source_root = db.source_root(source_root); - for &file_id in source_root.files.iter() { - let source = ModuleSource::SourceFile(file_id); - if visited.contains(&source) { - continue; // TODO: use explicit crate_roots here - } - assert!(!roots.contains_key(&file_id)); - let module_id = build_subtree( - db, - &source_root, - &mut tree, - &mut visited, - &mut roots, - None, - source, - )?; - roots.insert(file_id, module_id); - } - Ok(tree) -} - -fn build_subtree( - db: &impl DescriptorDatabase, - source_root: &SourceRoot, - tree: &mut ModuleTree, - visited: &mut FxHashSet, - roots: &mut FxHashMap, - parent: Option, - source: ModuleSource, -) -> Cancelable { - visited.insert(source); - let id = tree.push_mod(ModuleData { - source, - parent, - children: Vec::new(), - }); - for sub in db._submodules(source)?.iter() { - let link = tree.push_link(LinkData { - name: sub.name().clone(), - owner: id, - points_to: Vec::new(), - problem: None, - }); - - let (points_to, problem) = match sub { - Submodule::Declaration(name) => { - let (points_to, problem) = - resolve_submodule(source, &name, &source_root.file_resolver); - let points_to = points_to - .into_iter() - .map(|file_id| match roots.remove(&file_id) { - Some(module_id) => { - tree.mods[module_id].parent = Some(link); - Ok(module_id) - } - None => build_subtree( - db, - source_root, - tree, - visited, - roots, - Some(link), - ModuleSource::SourceFile(file_id), - ), - }) - .collect::>>()?; - (points_to, problem) - } - Submodule::Definition(_name, submodule_source) => { - let points_to = build_subtree( - db, - source_root, - tree, - visited, - roots, - Some(link), - *submodule_source, - )?; - (vec![points_to], None) - } - }; - - tree.links[link].points_to = points_to; - tree.links[link].problem = problem; - } - Ok(id) -} - -fn resolve_submodule( - source: ModuleSource, - name: &SmolStr, - file_resolver: &FileResolverImp, -) -> (Vec, Option) { - let file_id = match source { - ModuleSource::SourceFile(it) => it, - ModuleSource::Module(..) => { - // TODO - return (Vec::new(), None); - } - }; - let mod_name = file_resolver.file_stem(file_id); - let is_dir_owner = mod_name == "mod" || mod_name == "lib" || mod_name == "main"; - - let file_mod = RelativePathBuf::from(format!("../{}.rs", name)); - let dir_mod = RelativePathBuf::from(format!("../{}/mod.rs", name)); - let points_to: Vec; - let problem: Option; - if is_dir_owner { - points_to = [&file_mod, &dir_mod] - .iter() - .filter_map(|path| file_resolver.resolve(file_id, path)) - .collect(); - problem = if points_to.is_empty() { - Some(Problem::UnresolvedModule { - candidate: file_mod, - }) - } else { - None - } - } else { - points_to = Vec::new(); - problem = Some(Problem::NotDirOwner { - move_to: RelativePathBuf::from(format!("../{}/mod.rs", mod_name)), - candidate: file_mod, - }); - } - (points_to, problem) -} diff --git a/crates/ra_analysis/src/descriptors/module/mod.rs b/crates/ra_analysis/src/descriptors/module/mod.rs deleted file mode 100644 index 78911d5d9..000000000 --- a/crates/ra_analysis/src/descriptors/module/mod.rs +++ /dev/null @@ -1,378 +0,0 @@ -pub(super) mod imp; -pub(super) mod nameres; - -use std::sync::Arc; - -use ra_editor::find_node_at_offset; - -use ra_syntax::{ - algo::generate, - ast::{self, AstNode, NameOwner}, - SmolStr, SyntaxNode, -}; -use relative_path::RelativePathBuf; - -use crate::{ - db::SyntaxDatabase, syntax_ptr::SyntaxPtr, FileId, FilePosition, Cancelable, - descriptors::{Path, PathKind, DescriptorDatabase}, - input::SourceRootId, - arena::{Arena, Id}, - loc2id::{DefLoc, DefId}, -}; - -pub(crate) use self::nameres::ModuleScope; - -/// `ModuleDescriptor` is API entry point to get all the information -/// about a particular module. -#[derive(Debug, Clone)] -pub(crate) struct ModuleDescriptor { - tree: Arc, - source_root_id: SourceRootId, - module_id: ModuleId, -} - -impl ModuleDescriptor { - /// Lookup `ModuleDescriptor` by `FileId`. Note that this is inherently - /// lossy transformation: in general, a single source might correspond to - /// several modules. - pub fn guess_from_file_id( - db: &impl DescriptorDatabase, - file_id: FileId, - ) -> Cancelable> { - ModuleDescriptor::guess_from_source(db, file_id, ModuleSource::SourceFile(file_id)) - } - - /// Lookup `ModuleDescriptor` by position in the source code. Note that this - /// is inherently lossy transformation: in general, a single source might - /// correspond to several modules. - pub fn guess_from_position( - db: &impl DescriptorDatabase, - position: FilePosition, - ) -> Cancelable> { - let file = db.file_syntax(position.file_id); - let module_source = match find_node_at_offset::(file.syntax(), position.offset) - { - Some(m) if !m.has_semi() => ModuleSource::new_inline(position.file_id, m), - _ => ModuleSource::SourceFile(position.file_id), - }; - ModuleDescriptor::guess_from_source(db, position.file_id, module_source) - } - - fn guess_from_source( - db: &impl DescriptorDatabase, - file_id: FileId, - module_source: ModuleSource, - ) -> Cancelable> { - let source_root_id = db.file_source_root(file_id); - let module_tree = db._module_tree(source_root_id)?; - - let res = match module_tree.any_module_for_source(module_source) { - None => None, - Some(module_id) => Some(ModuleDescriptor { - tree: module_tree, - source_root_id, - module_id, - }), - }; - Ok(res) - } - - pub(super) fn new( - db: &impl DescriptorDatabase, - source_root_id: SourceRootId, - module_id: ModuleId, - ) -> Cancelable { - let module_tree = db._module_tree(source_root_id)?; - let res = ModuleDescriptor { - tree: module_tree, - source_root_id, - module_id, - }; - Ok(res) - } - - /// Returns `mod foo;` or `mod foo {}` node whihc declared this module. - /// Returns `None` for the root module - pub fn parent_link_source( - &self, - db: &impl DescriptorDatabase, - ) -> Option<(FileId, ast::ModuleNode)> { - let link = self.module_id.parent_link(&self.tree)?; - let file_id = link.owner(&self.tree).source(&self.tree).file_id(); - let src = link.bind_source(&self.tree, db); - Some((file_id, src)) - } - - pub fn source(&self) -> ModuleSource { - self.module_id.source(&self.tree) - } - - /// Parent module. Returns `None` if this is a root module. - pub fn parent(&self) -> Option { - let parent_id = self.module_id.parent(&self.tree)?; - Some(ModuleDescriptor { - module_id: parent_id, - ..self.clone() - }) - } - - /// The root of the tree this module is part of - pub fn crate_root(&self) -> ModuleDescriptor { - let root_id = self.module_id.crate_root(&self.tree); - ModuleDescriptor { - module_id: root_id, - ..self.clone() - } - } - - /// `name` is `None` for the crate's root module - #[allow(unused)] - pub fn name(&self) -> Option { - let link = self.module_id.parent_link(&self.tree)?; - Some(link.name(&self.tree)) - } - - pub fn def_id(&self, db: &impl DescriptorDatabase) -> DefId { - let def_loc = DefLoc::Module { - id: self.module_id, - source_root: self.source_root_id, - }; - db.id_maps().def_id(def_loc) - } - - /// Finds a child module with the specified name. - pub fn child(&self, name: &str) -> Option { - let child_id = self.module_id.child(&self.tree, name)?; - Some(ModuleDescriptor { - module_id: child_id, - ..self.clone() - }) - } - - /// Returns a `ModuleScope`: a set of items, visible in this module. - pub(crate) fn scope(&self, db: &impl DescriptorDatabase) -> Cancelable { - let item_map = db._item_map(self.source_root_id)?; - let res = item_map.per_module[&self.module_id].clone(); - Ok(res) - } - - pub(crate) fn resolve_path( - &self, - db: &impl DescriptorDatabase, - path: Path, - ) -> Cancelable> { - let mut curr = match path.kind { - PathKind::Crate => self.crate_root(), - PathKind::Self_ | PathKind::Plain => self.clone(), - PathKind::Super => ctry!(self.parent()), - } - .def_id(db); - - let segments = path.segments; - for name in segments.iter() { - let module = match db.id_maps().def_loc(curr) { - DefLoc::Module { id, source_root } => ModuleDescriptor::new(db, source_root, id)?, - _ => return Ok(None), - }; - let scope = module.scope(db)?; - curr = ctry!(ctry!(scope.get(&name)).def_id); - } - Ok(Some(curr)) - } - - pub fn problems(&self, db: &impl DescriptorDatabase) -> Vec<(SyntaxNode, Problem)> { - self.module_id.problems(&self.tree, db) - } -} - -/// Phisically, rust source is organized as a set of files, but logically it is -/// organized as a tree of modules. Usually, a single file corresponds to a -/// single module, but it is not nessary the case. -/// -/// Module encapsulate the logic of transitioning from the fuzzy world of files -/// (which can have multiple parents) to the precise world of modules (which -/// always have one parent). -#[derive(Default, Debug, PartialEq, Eq)] -pub(crate) struct ModuleTree { - mods: Arena, - links: Arena, -} - -impl ModuleTree { - fn modules<'a>(&'a self) -> impl Iterator + 'a { - self.mods.iter().map(|(id, _)| id) - } - - fn modules_for_source(&self, source: ModuleSource) -> Vec { - self.mods - .iter() - .filter(|(_idx, it)| it.source == source) - .map(|(idx, _)| idx) - .collect() - } - - fn any_module_for_source(&self, source: ModuleSource) -> Option { - self.modules_for_source(source).pop() - } -} - -/// `ModuleSource` is the syntax tree element that produced this module: -/// either a file, or an inlinde module. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub(crate) enum ModuleSource { - SourceFile(FileId), - #[allow(dead_code)] - Module(SyntaxPtr), -} - -/// An owned syntax node for a module. Unlike `ModuleSource`, -/// this holds onto the AST for the whole file. -enum ModuleSourceNode { - SourceFile(ast::SourceFileNode), - Module(ast::ModuleNode), -} - -pub(crate) type ModuleId = Id; -type LinkId = Id; - -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub enum Problem { - UnresolvedModule { - candidate: RelativePathBuf, - }, - NotDirOwner { - move_to: RelativePathBuf, - candidate: RelativePathBuf, - }, -} - -impl ModuleId { - fn source(self, tree: &ModuleTree) -> ModuleSource { - tree.mods[self].source - } - fn parent_link(self, tree: &ModuleTree) -> Option { - tree.mods[self].parent - } - fn parent(self, tree: &ModuleTree) -> Option { - let link = self.parent_link(tree)?; - Some(tree.links[link].owner) - } - fn crate_root(self, tree: &ModuleTree) -> ModuleId { - generate(Some(self), move |it| it.parent(tree)) - .last() - .unwrap() - } - fn child(self, tree: &ModuleTree, name: &str) -> Option { - let link = tree.mods[self] - .children - .iter() - .map(|&it| &tree.links[it]) - .find(|it| it.name == name)?; - Some(*link.points_to.first()?) - } - fn children<'a>(self, tree: &'a ModuleTree) -> impl Iterator + 'a { - tree.mods[self].children.iter().filter_map(move |&it| { - let link = &tree.links[it]; - let module = *link.points_to.first()?; - Some((link.name.clone(), module)) - }) - } - fn problems(self, tree: &ModuleTree, db: &impl SyntaxDatabase) -> Vec<(SyntaxNode, Problem)> { - tree.mods[self] - .children - .iter() - .filter_map(|&it| { - let p = tree.links[it].problem.clone()?; - let s = it.bind_source(tree, db); - let s = s.borrowed().name().unwrap().syntax().owned(); - Some((s, p)) - }) - .collect() - } -} - -impl LinkId { - fn owner(self, tree: &ModuleTree) -> ModuleId { - tree.links[self].owner - } - fn name(self, tree: &ModuleTree) -> SmolStr { - tree.links[self].name.clone() - } - fn bind_source<'a>(self, tree: &ModuleTree, db: &impl SyntaxDatabase) -> ast::ModuleNode { - let owner = self.owner(tree); - match owner.source(tree).resolve(db) { - ModuleSourceNode::SourceFile(root) => { - let ast = imp::modules(root.borrowed()) - .find(|(name, _)| name == &tree.links[self].name) - .unwrap() - .1; - ast.owned() - } - ModuleSourceNode::Module(it) => it, - } - } -} - -#[derive(Debug, PartialEq, Eq, Hash)] -pub(crate) struct ModuleData { - source: ModuleSource, - parent: Option, - children: Vec, -} - -impl ModuleSource { - fn new_inline(file_id: FileId, module: ast::Module) -> ModuleSource { - assert!(!module.has_semi()); - let ptr = SyntaxPtr::new(file_id, module.syntax()); - ModuleSource::Module(ptr) - } - - pub(crate) fn as_file(self) -> Option { - match self { - ModuleSource::SourceFile(f) => Some(f), - ModuleSource::Module(..) => None, - } - } - - pub(crate) fn file_id(self) -> FileId { - match self { - ModuleSource::SourceFile(f) => f, - ModuleSource::Module(ptr) => ptr.file_id(), - } - } - - fn resolve(self, db: &impl SyntaxDatabase) -> ModuleSourceNode { - match self { - ModuleSource::SourceFile(file_id) => { - let syntax = db.file_syntax(file_id); - ModuleSourceNode::SourceFile(syntax.ast().owned()) - } - ModuleSource::Module(ptr) => { - let syntax = db.resolve_syntax_ptr(ptr); - let syntax = syntax.borrowed(); - let module = ast::Module::cast(syntax).unwrap(); - ModuleSourceNode::Module(module.owned()) - } - } - } -} - -#[derive(Hash, Debug, PartialEq, Eq)] -struct LinkData { - owner: ModuleId, - name: SmolStr, - points_to: Vec, - problem: Option, -} - -impl ModuleTree { - fn push_mod(&mut self, data: ModuleData) -> ModuleId { - self.mods.alloc(data) - } - fn push_link(&mut self, data: LinkData) -> LinkId { - let owner = data.owner; - let id = self.links.alloc(data); - self.mods[owner].children.push(id); - id - } -} diff --git a/crates/ra_analysis/src/descriptors/module/nameres.rs b/crates/ra_analysis/src/descriptors/module/nameres.rs deleted file mode 100644 index d2964f67f..000000000 --- a/crates/ra_analysis/src/descriptors/module/nameres.rs +++ /dev/null @@ -1,549 +0,0 @@ -//! Name resolution algorithm. The end result of the algorithm is `ItemMap`: a -//! map with maps each module to it's scope: the set of items, visible in the -//! module. That is, we only resolve imports here, name resolution of item -//! bodies will be done in a separate step. -//! -//! Like Rustc, we use an interative per-crate algorithm: we start with scopes -//! containing only directly defined items, and then iteratively resolve -//! imports. -//! -//! To make this work nicely in the IDE scenarios, we place `InputModuleItems` -//! in between raw syntax and name resolution. `InputModuleItems` are computed -//! using only the module's syntax, and it is all directly defined items plus -//! imports. The plain is to make `InputModuleItems` independent of local -//! modifications (that is, typing inside a function shold not change IMIs), -//! such that the results of name resolution can be preserved unless the module -//! structure itself is modified. -use std::{ - sync::Arc, - time::Instant, - ops::Index, -}; - -use rustc_hash::FxHashMap; - -use ra_syntax::{ - SyntaxNode, SyntaxNodeRef, TextRange, - SmolStr, SyntaxKind::{self, *}, - ast::{self, ModuleItemOwner, AstNode} -}; - -use crate::{ - Cancelable, FileId, - loc2id::{DefId, DefLoc}, - descriptors::{ - Path, PathKind, - DescriptorDatabase, - module::{ModuleId, ModuleTree, ModuleSourceNode}, - }, - input::SourceRootId, - arena::{Arena, Id} -}; - -/// Identifier of item within a specific file. This is stable over reparses, so -/// it's OK to use it as a salsa key/value. -pub(crate) type FileItemId = Id; - -/// Maps item's `SyntaxNode`s to `FileItemId` and back. -#[derive(Debug, PartialEq, Eq, Default)] -pub(crate) struct FileItems { - arena: Arena, -} - -impl FileItems { - fn alloc(&mut self, item: SyntaxNode) -> FileItemId { - self.arena.alloc(item) - } - fn id_of(&self, item: SyntaxNodeRef) -> FileItemId { - let (id, _item) = self - .arena - .iter() - .find(|(_id, i)| i.borrowed() == item) - .unwrap(); - id - } -} - -impl Index for FileItems { - type Output = SyntaxNode; - fn index(&self, idx: FileItemId) -> &SyntaxNode { - &self.arena[idx] - } -} - -pub(crate) fn file_items(db: &impl DescriptorDatabase, file_id: FileId) -> Arc { - let source_file = db.file_syntax(file_id); - let source_file = source_file.borrowed(); - let mut res = FileItems::default(); - source_file - .syntax() - .descendants() - .filter_map(ast::ModuleItem::cast) - .map(|it| it.syntax().owned()) - .for_each(|it| { - res.alloc(it); - }); - Arc::new(res) -} - -pub(crate) fn file_item( - db: &impl DescriptorDatabase, - file_id: FileId, - file_item_id: FileItemId, -) -> SyntaxNode { - db._file_items(file_id)[file_item_id].clone() -} - -/// Item map is the result of the name resolution. Item map contains, for each -/// module, the set of visible items. -#[derive(Default, Debug, PartialEq, Eq)] -pub(crate) struct ItemMap { - pub(crate) per_module: FxHashMap, -} - -#[derive(Debug, Default, PartialEq, Eq, Clone)] -pub(crate) struct ModuleScope { - items: FxHashMap, -} - -impl ModuleScope { - pub(crate) fn entries<'a>(&'a self) -> impl Iterator + 'a { - self.items.iter() - } - pub(crate) fn get(&self, name: &SmolStr) -> Option<&Resolution> { - self.items.get(name) - } -} - -/// A set of items and imports declared inside a module, without relation to -/// other modules. -/// -/// This stands in-between raw syntax and name resolution and alow us to avoid -/// recomputing name res: if `InputModuleItems` are the same, we can avoid -/// running name resolution. -#[derive(Debug, Default, PartialEq, Eq)] -pub(crate) struct InputModuleItems { - items: Vec, - imports: Vec, -} - -#[derive(Debug, PartialEq, Eq)] -struct ModuleItem { - id: FileItemId, - name: SmolStr, - kind: SyntaxKind, - vis: Vis, -} - -#[derive(Debug, PartialEq, Eq)] -enum Vis { - // Priv, - Other, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -struct Import { - path: Path, - kind: ImportKind, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub(crate) struct NamedImport { - file_item_id: FileItemId, - relative_range: TextRange, -} - -impl NamedImport { - pub(crate) fn range(&self, db: &impl DescriptorDatabase, file_id: FileId) -> TextRange { - let syntax = db._file_item(file_id, self.file_item_id); - let offset = syntax.borrowed().range().start(); - self.relative_range + offset - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -enum ImportKind { - Glob, - Named(NamedImport), -} - -pub(crate) fn input_module_items( - db: &impl DescriptorDatabase, - source_root: SourceRootId, - module_id: ModuleId, -) -> Cancelable> { - let module_tree = db._module_tree(source_root)?; - let source = module_id.source(&module_tree); - let file_items = db._file_items(source.file_id()); - let res = match source.resolve(db) { - ModuleSourceNode::SourceFile(it) => { - let items = it.borrowed().items(); - InputModuleItems::new(&file_items, items) - } - ModuleSourceNode::Module(it) => { - let items = it - .borrowed() - .item_list() - .into_iter() - .flat_map(|it| it.items()); - InputModuleItems::new(&file_items, items) - } - }; - Ok(Arc::new(res)) -} - -pub(crate) fn item_map( - db: &impl DescriptorDatabase, - source_root: SourceRootId, -) -> Cancelable> { - let start = Instant::now(); - let module_tree = db._module_tree(source_root)?; - let input = module_tree - .modules() - .map(|id| { - let items = db._input_module_items(source_root, id)?; - Ok((id, items)) - }) - .collect::>>()?; - let mut resolver = Resolver { - db: db, - input: &input, - source_root, - module_tree, - result: ItemMap::default(), - }; - resolver.resolve()?; - let res = resolver.result; - let elapsed = start.elapsed(); - log::info!("item_map: {:?}", elapsed); - Ok(Arc::new(res)) -} - -/// Resolution is basically `DefId` atm, but it should account for stuff like -/// multiple namespaces, ambiguity and errors. -#[derive(Debug, Clone, PartialEq, Eq)] -pub(crate) struct Resolution { - /// None for unresolved - pub(crate) def_id: Option, - /// ident by whitch this is imported into local scope. - pub(crate) import: Option, -} - -// #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -// enum Namespace { -// Types, -// Values, -// } - -// #[derive(Debug)] -// struct PerNs { -// types: Option, -// values: Option, -// } - -impl InputModuleItems { - fn new<'a>( - file_items: &FileItems, - items: impl Iterator>, - ) -> InputModuleItems { - let mut res = InputModuleItems::default(); - for item in items { - res.add_item(file_items, item); - } - res - } - - fn add_item(&mut self, file_items: &FileItems, item: ast::ModuleItem) -> Option<()> { - match item { - ast::ModuleItem::StructDef(it) => self.items.push(ModuleItem::new(file_items, it)?), - ast::ModuleItem::EnumDef(it) => self.items.push(ModuleItem::new(file_items, it)?), - ast::ModuleItem::FnDef(it) => self.items.push(ModuleItem::new(file_items, it)?), - ast::ModuleItem::TraitDef(it) => self.items.push(ModuleItem::new(file_items, it)?), - ast::ModuleItem::TypeDef(it) => self.items.push(ModuleItem::new(file_items, it)?), - ast::ModuleItem::ImplItem(_) => { - // impls don't define items - } - ast::ModuleItem::UseItem(it) => self.add_use_item(file_items, it), - ast::ModuleItem::ExternCrateItem(_) => { - // TODO - } - ast::ModuleItem::ConstDef(it) => self.items.push(ModuleItem::new(file_items, it)?), - ast::ModuleItem::StaticDef(it) => self.items.push(ModuleItem::new(file_items, it)?), - ast::ModuleItem::Module(it) => self.items.push(ModuleItem::new(file_items, it)?), - } - Some(()) - } - - fn add_use_item(&mut self, file_items: &FileItems, item: ast::UseItem) { - let file_item_id = file_items.id_of(item.syntax()); - let start_offset = item.syntax().range().start(); - Path::expand_use_item(item, |path, range| { - let kind = match range { - None => ImportKind::Glob, - Some(range) => ImportKind::Named(NamedImport { - file_item_id, - relative_range: range - start_offset, - }), - }; - self.imports.push(Import { kind, path }) - }) - } -} - -impl ModuleItem { - fn new<'a>(file_items: &FileItems, item: impl ast::NameOwner<'a>) -> Option { - let name = item.name()?.text(); - let kind = item.syntax().kind(); - let vis = Vis::Other; - let id = file_items.id_of(item.syntax()); - let res = ModuleItem { - id, - name, - kind, - vis, - }; - Some(res) - } -} - -struct Resolver<'a, DB> { - db: &'a DB, - input: &'a FxHashMap>, - source_root: SourceRootId, - module_tree: Arc, - result: ItemMap, -} - -impl<'a, DB> Resolver<'a, DB> -where - DB: DescriptorDatabase, -{ - fn resolve(&mut self) -> Cancelable<()> { - for (&module_id, items) in self.input.iter() { - self.populate_module(module_id, items) - } - - for &module_id in self.input.keys() { - crate::db::check_canceled(self.db)?; - self.resolve_imports(module_id); - } - Ok(()) - } - - fn populate_module(&mut self, module_id: ModuleId, input: &InputModuleItems) { - let file_id = module_id.source(&self.module_tree).file_id(); - - let mut module_items = ModuleScope::default(); - - for import in input.imports.iter() { - if let Some(name) = import.path.segments.iter().last() { - if let ImportKind::Named(import) = import.kind { - module_items.items.insert( - name.clone(), - Resolution { - def_id: None, - import: Some(import), - }, - ); - } - } - } - - for item in input.items.iter() { - if item.kind == MODULE { - // handle submodules separatelly - continue; - } - let def_loc = DefLoc::Item { - file_id, - id: item.id, - }; - let def_id = self.db.id_maps().def_id(def_loc); - let resolution = Resolution { - def_id: Some(def_id), - import: None, - }; - module_items.items.insert(item.name.clone(), resolution); - } - - for (name, mod_id) in module_id.children(&self.module_tree) { - let def_loc = DefLoc::Module { - id: mod_id, - source_root: self.source_root, - }; - let def_id = self.db.id_maps().def_id(def_loc); - let resolution = Resolution { - def_id: Some(def_id), - import: None, - }; - module_items.items.insert(name, resolution); - } - - self.result.per_module.insert(module_id, module_items); - } - - fn resolve_imports(&mut self, module_id: ModuleId) { - for import in self.input[&module_id].imports.iter() { - self.resolve_import(module_id, import); - } - } - - fn resolve_import(&mut self, module_id: ModuleId, import: &Import) { - let ptr = match import.kind { - ImportKind::Glob => return, - ImportKind::Named(ptr) => ptr, - }; - - let mut curr = match import.path.kind { - // TODO: handle extern crates - PathKind::Plain => return, - PathKind::Self_ => module_id, - PathKind::Super => { - match module_id.parent(&self.module_tree) { - Some(it) => it, - // TODO: error - None => return, - } - } - PathKind::Crate => module_id.crate_root(&self.module_tree), - }; - - for (i, name) in import.path.segments.iter().enumerate() { - let is_last = i == import.path.segments.len() - 1; - - let def_id = match self.result.per_module[&curr].items.get(name) { - None => return, - Some(res) => match res.def_id { - Some(it) => it, - None => return, - }, - }; - - if !is_last { - curr = match self.db.id_maps().def_loc(def_id) { - DefLoc::Module { id, .. } => id, - _ => return, - } - } else { - self.update(module_id, |items| { - let res = Resolution { - def_id: Some(def_id), - import: Some(ptr), - }; - items.items.insert(name.clone(), res); - }) - } - } - } - - fn update(&mut self, module_id: ModuleId, f: impl FnOnce(&mut ModuleScope)) { - let module_items = self.result.per_module.get_mut(&module_id).unwrap(); - f(module_items) - } -} - -#[cfg(test)] -mod tests { - use crate::{ - AnalysisChange, - mock_analysis::{MockAnalysis, analysis_and_position}, - descriptors::{DescriptorDatabase, module::ModuleDescriptor}, - input::FilesDatabase, -}; - use super::*; - - fn item_map(fixture: &str) -> (Arc, ModuleId) { - let (analysis, pos) = analysis_and_position(fixture); - let db = analysis.imp.db; - let source_root = db.file_source_root(pos.file_id); - let descr = ModuleDescriptor::guess_from_position(&*db, pos) - .unwrap() - .unwrap(); - let module_id = descr.module_id; - (db._item_map(source_root).unwrap(), module_id) - } - - #[test] - fn test_item_map() { - let (item_map, module_id) = item_map( - " - //- /lib.rs - mod foo; - - use crate::foo::bar::Baz; - <|> - - //- /foo/mod.rs - pub mod bar; - - //- /foo/bar.rs - pub struct Baz; - ", - ); - let name = SmolStr::from("Baz"); - let resolution = &item_map.per_module[&module_id].items[&name]; - assert!(resolution.def_id.is_some()); - } - - #[test] - fn typing_inside_a_function_should_not_invalidate_item_map() { - let mock_analysis = MockAnalysis::with_files( - " - //- /lib.rs - mod foo; - - use crate::foo::bar::Baz; - - fn foo() -> i32 { - 1 + 1 - } - //- /foo/mod.rs - pub mod bar; - - //- /foo/bar.rs - pub struct Baz; - ", - ); - - let file_id = mock_analysis.id_of("/lib.rs"); - let mut host = mock_analysis.analysis_host(); - - let source_root = host.analysis().imp.db.file_source_root(file_id); - - { - let db = host.analysis().imp.db; - let events = db.log_executed(|| { - db._item_map(source_root).unwrap(); - }); - assert!(format!("{:?}", events).contains("_item_map")) - } - - let mut change = AnalysisChange::new(); - - change.change_file( - file_id, - " - mod foo; - - use crate::foo::bar::Baz; - - fn foo() -> i32 { 92 } - " - .to_string(), - ); - - host.apply_change(change); - - { - let db = host.analysis().imp.db; - let events = db.log_executed(|| { - db._item_map(source_root).unwrap(); - }); - assert!( - !format!("{:?}", events).contains("_item_map"), - "{:#?}", - events - ) - } - } -} diff --git a/crates/ra_analysis/src/descriptors/path.rs b/crates/ra_analysis/src/descriptors/path.rs deleted file mode 100644 index 8279daf4b..000000000 --- a/crates/ra_analysis/src/descriptors/path.rs +++ /dev/null @@ -1,148 +0,0 @@ -use ra_syntax::{SmolStr, ast, AstNode, TextRange}; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub(crate) struct Path { - pub(crate) kind: PathKind, - pub(crate) segments: Vec, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub(crate) enum PathKind { - Plain, - Self_, - Super, - Crate, -} - -impl Path { - /// Calls `cb` with all paths, represented by this use item. - pub(crate) fn expand_use_item(item: ast::UseItem, mut cb: impl FnMut(Path, Option)) { - if let Some(tree) = item.use_tree() { - expand_use_tree(None, tree, &mut cb); - } - } - - /// Converts an `ast::Path` to `Path`. Works with use trees. - pub(crate) fn from_ast(mut path: ast::Path) -> Option { - let mut kind = PathKind::Plain; - let mut segments = Vec::new(); - loop { - let segment = path.segment()?; - match segment.kind()? { - ast::PathSegmentKind::Name(name) => segments.push(name.text()), - ast::PathSegmentKind::CrateKw => { - kind = PathKind::Crate; - break; - } - ast::PathSegmentKind::SelfKw => { - kind = PathKind::Self_; - break; - } - ast::PathSegmentKind::SuperKw => { - kind = PathKind::Super; - break; - } - } - path = match qualifier(path) { - Some(it) => it, - None => break, - }; - } - segments.reverse(); - return Some(Path { kind, segments }); - - fn qualifier(path: ast::Path) -> Option { - if let Some(q) = path.qualifier() { - return Some(q); - } - // TODO: this bottom up traversal is not too precise. - // Should we handle do a top-down analysiss, recording results? - let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?; - let use_tree = use_tree_list.parent_use_tree(); - use_tree.path() - } - } - - /// `true` is this path is a single identifier, like `foo` - pub(crate) fn is_ident(&self) -> bool { - self.kind == PathKind::Plain && self.segments.len() == 1 - } -} - -fn expand_use_tree( - prefix: Option, - tree: ast::UseTree, - cb: &mut impl FnMut(Path, Option), -) { - if let Some(use_tree_list) = tree.use_tree_list() { - let prefix = match tree.path() { - None => prefix, - Some(path) => match convert_path(prefix, path) { - Some(it) => Some(it), - None => return, // TODO: report errors somewhere - }, - }; - for tree in use_tree_list.use_trees() { - expand_use_tree(prefix.clone(), tree, cb); - } - } else { - if let Some(ast_path) = tree.path() { - if let Some(path) = convert_path(prefix, ast_path) { - let range = if tree.has_star() { - None - } else { - let range = ast_path.segment().unwrap().syntax().range(); - Some(range) - }; - cb(path, range) - } - } - } -} - -fn convert_path(prefix: Option, path: ast::Path) -> Option { - let prefix = if let Some(qual) = path.qualifier() { - Some(convert_path(prefix, qual)?) - } else { - None - }; - let segment = path.segment()?; - let res = match segment.kind()? { - ast::PathSegmentKind::Name(name) => { - let mut res = prefix.unwrap_or_else(|| Path { - kind: PathKind::Plain, - segments: Vec::with_capacity(1), - }); - res.segments.push(name.text()); - res - } - ast::PathSegmentKind::CrateKw => { - if prefix.is_some() { - return None; - } - Path { - kind: PathKind::Crate, - segments: Vec::new(), - } - } - ast::PathSegmentKind::SelfKw => { - if prefix.is_some() { - return None; - } - Path { - kind: PathKind::Self_, - segments: Vec::new(), - } - } - ast::PathSegmentKind::SuperKw => { - if prefix.is_some() { - return None; - } - Path { - kind: PathKind::Super, - segments: Vec::new(), - } - } - }; - Some(res) -} diff --git a/crates/ra_analysis/src/hir/function/imp.rs b/crates/ra_analysis/src/hir/function/imp.rs new file mode 100644 index 000000000..cd573a47f --- /dev/null +++ b/crates/ra_analysis/src/hir/function/imp.rs @@ -0,0 +1,21 @@ +use std::sync::Arc; + +use ra_syntax::ast::{AstNode, FnDef, FnDefNode}; + +use crate::hir::{ + function::{FnId, FnScopes}, + DescriptorDatabase, +}; + +/// Resolve `FnId` to the corresponding `SyntaxNode` +pub(crate) fn fn_syntax(db: &impl DescriptorDatabase, fn_id: FnId) -> FnDefNode { + let ptr = db.id_maps().fn_ptr(fn_id); + let syntax = db.resolve_syntax_ptr(ptr); + FnDef::cast(syntax.borrowed()).unwrap().owned() +} + +pub(crate) fn fn_scopes(db: &impl DescriptorDatabase, fn_id: FnId) -> Arc { + let syntax = db._fn_syntax(fn_id); + let res = FnScopes::new(syntax.borrowed()); + Arc::new(res) +} diff --git a/crates/ra_analysis/src/hir/function/mod.rs b/crates/ra_analysis/src/hir/function/mod.rs new file mode 100644 index 000000000..86eee5e93 --- /dev/null +++ b/crates/ra_analysis/src/hir/function/mod.rs @@ -0,0 +1,137 @@ +pub(super) mod imp; +mod scope; + +use std::cmp::{max, min}; + +use ra_syntax::{ + ast::{self, AstNode, DocCommentsOwner, NameOwner}, + TextRange, TextUnit, +}; + +use crate::{ + syntax_ptr::SyntaxPtr, FileId, + loc2id::IdDatabase, +}; + +pub(crate) use self::scope::{resolve_local_name, FnScopes}; +pub(crate) use crate::loc2id::FnId; + +impl FnId { + pub(crate) fn get(db: &impl IdDatabase, file_id: FileId, fn_def: ast::FnDef) -> FnId { + let ptr = SyntaxPtr::new(file_id, fn_def.syntax()); + db.id_maps().fn_id(ptr) + } +} + +#[derive(Debug, Clone)] +pub struct FnDescriptor { + pub name: String, + pub label: String, + pub ret_type: Option, + pub params: Vec, + pub doc: Option, +} + +impl FnDescriptor { + pub fn new(node: ast::FnDef) -> Option { + let name = node.name()?.text().to_string(); + + let mut doc = None; + + // Strip the body out for the label. + let mut label: String = if let Some(body) = node.body() { + let body_range = body.syntax().range(); + let label: String = node + .syntax() + .children() + .filter(|child| !child.range().is_subrange(&body_range)) + .map(|node| node.text().to_string()) + .collect(); + label + } else { + node.syntax().text().to_string() + }; + + if let Some((comment_range, docs)) = FnDescriptor::extract_doc_comments(node) { + let comment_range = comment_range + .checked_sub(node.syntax().range().start()) + .unwrap(); + let start = comment_range.start().to_usize(); + let end = comment_range.end().to_usize(); + + // Remove the comment from the label + label.replace_range(start..end, ""); + + // Massage markdown + let mut processed_lines = Vec::new(); + let mut in_code_block = false; + for line in docs.lines() { + if line.starts_with("```") { + in_code_block = !in_code_block; + } + + let line = if in_code_block && line.starts_with("```") && !line.contains("rust") { + "```rust".into() + } else { + line.to_string() + }; + + processed_lines.push(line); + } + + if !processed_lines.is_empty() { + doc = Some(processed_lines.join("\n")); + } + } + + let params = FnDescriptor::param_list(node); + let ret_type = node.ret_type().map(|r| r.syntax().text().to_string()); + + Some(FnDescriptor { + name, + ret_type, + params, + label: label.trim().to_owned(), + doc, + }) + } + + fn extract_doc_comments(node: ast::FnDef) -> Option<(TextRange, String)> { + if node.doc_comments().count() == 0 { + return None; + } + + let comment_text = node.doc_comment_text(); + + let (begin, end) = node + .doc_comments() + .map(|comment| comment.syntax().range()) + .map(|range| (range.start().to_usize(), range.end().to_usize())) + .fold((std::usize::MAX, std::usize::MIN), |acc, range| { + (min(acc.0, range.0), max(acc.1, range.1)) + }); + + let range = TextRange::from_to(TextUnit::from_usize(begin), TextUnit::from_usize(end)); + + Some((range, comment_text)) + } + + fn param_list(node: ast::FnDef) -> Vec { + let mut res = vec![]; + if let Some(param_list) = node.param_list() { + if let Some(self_param) = param_list.self_param() { + res.push(self_param.syntax().text().to_string()) + } + + // Maybe use param.pat here? See if we can just extract the name? + //res.extend(param_list.params().map(|p| p.syntax().text().to_string())); + res.extend( + param_list + .params() + .filter_map(|p| p.pat()) + .map(|pat| pat.syntax().text().to_string()), + ); + } + res + } +} diff --git a/crates/ra_analysis/src/hir/function/scope.rs b/crates/ra_analysis/src/hir/function/scope.rs new file mode 100644 index 000000000..5307a0a8e --- /dev/null +++ b/crates/ra_analysis/src/hir/function/scope.rs @@ -0,0 +1,424 @@ +use rustc_hash::{FxHashMap, FxHashSet}; + +use ra_syntax::{ + algo::generate, + ast::{self, ArgListOwner, LoopBodyOwner, NameOwner}, + AstNode, SmolStr, SyntaxNodeRef, +}; + +use crate::{ + syntax_ptr::LocalSyntaxPtr, + arena::{Arena, Id}, +}; + +pub(crate) type ScopeId = Id; + +#[derive(Debug, PartialEq, Eq)] +pub struct FnScopes { + pub(crate) self_param: Option, + scopes: Arena, + scope_for: FxHashMap, +} + +#[derive(Debug, PartialEq, Eq)] +pub struct ScopeEntry { + name: SmolStr, + ptr: LocalSyntaxPtr, +} + +#[derive(Debug, PartialEq, Eq)] +pub(crate) struct ScopeData { + parent: Option, + entries: Vec, +} + +impl FnScopes { + pub(crate) fn new(fn_def: ast::FnDef) -> FnScopes { + let mut scopes = FnScopes { + self_param: fn_def + .param_list() + .and_then(|it| it.self_param()) + .map(|it| LocalSyntaxPtr::new(it.syntax())), + scopes: Arena::default(), + scope_for: FxHashMap::default(), + }; + let root = scopes.root_scope(); + scopes.add_params_bindings(root, fn_def.param_list()); + if let Some(body) = fn_def.body() { + compute_block_scopes(body, &mut scopes, root) + } + scopes + } + pub(crate) fn entries(&self, scope: ScopeId) -> &[ScopeEntry] { + &self.scopes[scope].entries + } + pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator + 'a { + generate(self.scope_for(node), move |&scope| { + self.scopes[scope].parent + }) + } + fn root_scope(&mut self) -> ScopeId { + self.scopes.alloc(ScopeData { + parent: None, + entries: vec![], + }) + } + fn new_scope(&mut self, parent: ScopeId) -> ScopeId { + self.scopes.alloc(ScopeData { + parent: Some(parent), + entries: vec![], + }) + } + fn add_bindings(&mut self, scope: ScopeId, pat: ast::Pat) { + let entries = pat + .syntax() + .descendants() + .filter_map(ast::BindPat::cast) + .filter_map(ScopeEntry::new); + self.scopes[scope].entries.extend(entries); + } + fn add_params_bindings(&mut self, scope: ScopeId, params: Option) { + params + .into_iter() + .flat_map(|it| it.params()) + .filter_map(|it| it.pat()) + .for_each(|it| self.add_bindings(scope, it)); + } + fn set_scope(&mut self, node: SyntaxNodeRef, scope: ScopeId) { + self.scope_for.insert(LocalSyntaxPtr::new(node), scope); + } + fn scope_for(&self, node: SyntaxNodeRef) -> Option { + node.ancestors() + .map(LocalSyntaxPtr::new) + .filter_map(|it| self.scope_for.get(&it).map(|&scope| scope)) + .next() + } +} + +impl ScopeEntry { + fn new(pat: ast::BindPat) -> Option { + let name = pat.name()?; + let res = ScopeEntry { + name: name.text(), + ptr: LocalSyntaxPtr::new(pat.syntax()), + }; + Some(res) + } + pub(crate) fn name(&self) -> &SmolStr { + &self.name + } + pub(crate) fn ptr(&self) -> LocalSyntaxPtr { + self.ptr + } +} + +fn compute_block_scopes(block: ast::Block, scopes: &mut FnScopes, mut scope: ScopeId) { + for stmt in block.statements() { + match stmt { + ast::Stmt::LetStmt(stmt) => { + if let Some(expr) = stmt.initializer() { + scopes.set_scope(expr.syntax(), scope); + compute_expr_scopes(expr, scopes, scope); + } + scope = scopes.new_scope(scope); + if let Some(pat) = stmt.pat() { + scopes.add_bindings(scope, pat); + } + } + ast::Stmt::ExprStmt(expr_stmt) => { + if let Some(expr) = expr_stmt.expr() { + scopes.set_scope(expr.syntax(), scope); + compute_expr_scopes(expr, scopes, scope); + } + } + } + } + if let Some(expr) = block.expr() { + scopes.set_scope(expr.syntax(), scope); + compute_expr_scopes(expr, scopes, scope); + } +} + +fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) { + match expr { + ast::Expr::IfExpr(e) => { + let cond_scope = e + .condition() + .and_then(|cond| compute_cond_scopes(cond, scopes, scope)); + if let Some(block) = e.then_branch() { + compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope)); + } + if let Some(block) = e.else_branch() { + compute_block_scopes(block, scopes, scope); + } + } + ast::Expr::BlockExpr(e) => { + if let Some(block) = e.block() { + compute_block_scopes(block, scopes, scope); + } + } + ast::Expr::LoopExpr(e) => { + if let Some(block) = e.loop_body() { + compute_block_scopes(block, scopes, scope); + } + } + ast::Expr::WhileExpr(e) => { + let cond_scope = e + .condition() + .and_then(|cond| compute_cond_scopes(cond, scopes, scope)); + if let Some(block) = e.loop_body() { + compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope)); + } + } + ast::Expr::ForExpr(e) => { + if let Some(expr) = e.iterable() { + compute_expr_scopes(expr, scopes, scope); + } + let mut scope = scope; + if let Some(pat) = e.pat() { + scope = scopes.new_scope(scope); + scopes.add_bindings(scope, pat); + } + if let Some(block) = e.loop_body() { + compute_block_scopes(block, scopes, scope); + } + } + ast::Expr::LambdaExpr(e) => { + let scope = scopes.new_scope(scope); + scopes.add_params_bindings(scope, e.param_list()); + if let Some(body) = e.body() { + scopes.set_scope(body.syntax(), scope); + compute_expr_scopes(body, scopes, scope); + } + } + ast::Expr::CallExpr(e) => { + compute_call_scopes(e.expr(), e.arg_list(), scopes, scope); + } + ast::Expr::MethodCallExpr(e) => { + compute_call_scopes(e.expr(), e.arg_list(), scopes, scope); + } + ast::Expr::MatchExpr(e) => { + if let Some(expr) = e.expr() { + compute_expr_scopes(expr, scopes, scope); + } + for arm in e.match_arm_list().into_iter().flat_map(|it| it.arms()) { + let scope = scopes.new_scope(scope); + for pat in arm.pats() { + scopes.add_bindings(scope, pat); + } + if let Some(expr) = arm.expr() { + compute_expr_scopes(expr, scopes, scope); + } + } + } + _ => expr + .syntax() + .children() + .filter_map(ast::Expr::cast) + .for_each(|expr| compute_expr_scopes(expr, scopes, scope)), + }; + + fn compute_call_scopes( + receiver: Option, + arg_list: Option, + scopes: &mut FnScopes, + scope: ScopeId, + ) { + arg_list + .into_iter() + .flat_map(|it| it.args()) + .chain(receiver) + .for_each(|expr| compute_expr_scopes(expr, scopes, scope)); + } + + fn compute_cond_scopes( + cond: ast::Condition, + scopes: &mut FnScopes, + scope: ScopeId, + ) -> Option { + if let Some(expr) = cond.expr() { + compute_expr_scopes(expr, scopes, scope); + } + if let Some(pat) = cond.pat() { + let s = scopes.new_scope(scope); + scopes.add_bindings(s, pat); + Some(s) + } else { + None + } + } +} + +pub fn resolve_local_name<'a>( + name_ref: ast::NameRef, + scopes: &'a FnScopes, +) -> Option<&'a ScopeEntry> { + let mut shadowed = FxHashSet::default(); + let ret = scopes + .scope_chain(name_ref.syntax()) + .flat_map(|scope| scopes.entries(scope).iter()) + .filter(|entry| shadowed.insert(entry.name())) + .filter(|entry| entry.name() == &name_ref.text()) + .nth(0); + ret +} + +#[cfg(test)] +mod tests { + use ra_editor::find_node_at_offset; + use ra_syntax::SourceFileNode; + use test_utils::extract_offset; + + use super::*; + + fn do_check(code: &str, expected: &[&str]) { + let (off, code) = extract_offset(code); + let code = { + let mut buf = String::new(); + let off = u32::from(off) as usize; + buf.push_str(&code[..off]); + buf.push_str("marker"); + buf.push_str(&code[off..]); + buf + }; + let file = SourceFileNode::parse(&code); + let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); + let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); + let scopes = FnScopes::new(fn_def); + let actual = scopes + .scope_chain(marker.syntax()) + .flat_map(|scope| scopes.entries(scope)) + .map(|it| it.name()) + .collect::>(); + assert_eq!(actual.as_slice(), expected); + } + + #[test] + fn test_lambda_scope() { + do_check( + r" + fn quux(foo: i32) { + let f = |bar, baz: i32| { + <|> + }; + }", + &["bar", "baz", "foo"], + ); + } + + #[test] + fn test_call_scope() { + do_check( + r" + fn quux() { + f(|x| <|> ); + }", + &["x"], + ); + } + + #[test] + fn test_metod_call_scope() { + do_check( + r" + fn quux() { + z.f(|x| <|> ); + }", + &["x"], + ); + } + + #[test] + fn test_loop_scope() { + do_check( + r" + fn quux() { + loop { + let x = (); + <|> + }; + }", + &["x"], + ); + } + + #[test] + fn test_match() { + do_check( + r" + fn quux() { + match () { + Some(x) => { + <|> + } + }; + }", + &["x"], + ); + } + + #[test] + fn test_shadow_variable() { + do_check( + r" + fn foo(x: String) { + let x : &str = &x<|>; + }", + &["x"], + ); + } + + fn do_check_local_name(code: &str, expected_offset: u32) { + let (off, code) = extract_offset(code); + let file = SourceFileNode::parse(&code); + let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); + let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); + + let scopes = FnScopes::new(fn_def); + + let local_name_entry = resolve_local_name(name_ref, &scopes).unwrap(); + let local_name = local_name_entry.ptr().resolve(&file); + let expected_name = + find_node_at_offset::(file.syntax(), expected_offset.into()).unwrap(); + assert_eq!(local_name.range(), expected_name.syntax().range()); + } + + #[test] + fn test_resolve_local_name() { + do_check_local_name( + r#" + fn foo(x: i32, y: u32) { + { + let z = x * 2; + } + { + let t = x<|> * 3; + } + }"#, + 21, + ); + } + + #[test] + fn test_resolve_local_name_declaration() { + do_check_local_name( + r#" + fn foo(x: String) { + let x : &str = &x<|>; + }"#, + 21, + ); + } + + #[test] + fn test_resolve_local_name_shadow() { + do_check_local_name( + r" + fn foo(x: String) { + let x : &str = &x; + x<|> + }", + 46, + ); + } +} diff --git a/crates/ra_analysis/src/hir/mod.rs b/crates/ra_analysis/src/hir/mod.rs new file mode 100644 index 000000000..6bee2b5c4 --- /dev/null +++ b/crates/ra_analysis/src/hir/mod.rs @@ -0,0 +1,144 @@ +//! HIR (previsouly known as descriptors) provides a high-level OO acess to Rust +//! code. +//! +//! The principal difference between HIR and syntax trees is that HIR is bound +//! to a particular crate instance. That is, it has cfg flags and features +//! applied. So, there relation between syntax and HIR is many-to-one. + +pub(crate) mod function; +pub(crate) mod module; +mod path; + +use std::sync::Arc; + +use ra_syntax::{ + ast::{self, FnDefNode, AstNode}, + TextRange, SyntaxNode, +}; + +use crate::{ + FileId, + db::SyntaxDatabase, + hir::function::{resolve_local_name, FnId, FnScopes}, + hir::module::{ + ModuleId, ModuleTree, ModuleSource, ModuleDescriptor, + nameres::{ItemMap, InputModuleItems, FileItems} + }, + input::SourceRootId, + loc2id::{IdDatabase, DefId, DefLoc}, + syntax_ptr::LocalSyntaxPtr, + Cancelable, +}; + +pub(crate) use self::path::{Path, PathKind}; +pub(crate) use self::module::nameres::FileItemId; + +salsa::query_group! { + pub(crate) trait DescriptorDatabase: SyntaxDatabase + IdDatabase { + fn fn_scopes(fn_id: FnId) -> Arc { + type FnScopesQuery; + use fn function::imp::fn_scopes; + } + + fn _file_items(file_id: FileId) -> Arc { + type FileItemsQuery; + storage dependencies; + use fn module::nameres::file_items; + } + + fn _file_item(file_id: FileId, file_item_id: FileItemId) -> SyntaxNode { + type FileItemQuery; + storage dependencies; + use fn module::nameres::file_item; + } + + fn _input_module_items(source_root_id: SourceRootId, module_id: ModuleId) -> Cancelable> { + type InputModuleItemsQuery; + use fn module::nameres::input_module_items; + } + fn _item_map(source_root_id: SourceRootId) -> Cancelable> { + type ItemMapQuery; + use fn module::nameres::item_map; + } + fn _module_tree(source_root_id: SourceRootId) -> Cancelable> { + type ModuleTreeQuery; + use fn module::imp::module_tree; + } + fn _fn_syntax(fn_id: FnId) -> FnDefNode { + type FnSyntaxQuery; + // Don't retain syntax trees in memory + storage dependencies; + use fn function::imp::fn_syntax; + } + fn _submodules(source: ModuleSource) -> Cancelable>> { + type SubmodulesQuery; + use fn module::imp::submodules; + } + } +} + +pub(crate) enum Def { + Module(ModuleDescriptor), + Item, +} + +impl DefId { + pub(crate) fn resolve(self, db: &impl DescriptorDatabase) -> Cancelable { + let loc = db.id_maps().def_loc(self); + let res = match loc { + DefLoc::Module { id, source_root } => { + let descr = ModuleDescriptor::new(db, source_root, id)?; + Def::Module(descr) + } + DefLoc::Item { .. } => Def::Item, + }; + Ok(res) + } +} + +#[derive(Debug)] +pub struct ReferenceDescriptor { + pub range: TextRange, + pub name: String, +} + +#[derive(Debug)] +pub struct DeclarationDescriptor<'a> { + pat: ast::BindPat<'a>, + pub range: TextRange, +} + +impl<'a> DeclarationDescriptor<'a> { + pub fn new(pat: ast::BindPat) -> DeclarationDescriptor { + let range = pat.syntax().range(); + + DeclarationDescriptor { pat, range } + } + + pub fn find_all_refs(&self) -> Vec { + let name_ptr = LocalSyntaxPtr::new(self.pat.syntax()); + + let fn_def = match self.pat.syntax().ancestors().find_map(ast::FnDef::cast) { + Some(def) => def, + None => return Default::default(), + }; + + let fn_scopes = FnScopes::new(fn_def); + + let refs: Vec<_> = fn_def + .syntax() + .descendants() + .filter_map(ast::NameRef::cast) + .filter(|name_ref| match resolve_local_name(*name_ref, &fn_scopes) { + None => false, + Some(entry) => entry.ptr() == name_ptr, + }) + .map(|name_ref| ReferenceDescriptor { + name: name_ref.syntax().text().to_string(), + range: name_ref.syntax().range(), + }) + .collect(); + + refs + } +} diff --git a/crates/ra_analysis/src/hir/module/imp.rs b/crates/ra_analysis/src/hir/module/imp.rs new file mode 100644 index 000000000..d8539ed85 --- /dev/null +++ b/crates/ra_analysis/src/hir/module/imp.rs @@ -0,0 +1,229 @@ +use std::sync::Arc; + +use ra_syntax::{ + ast::{self, NameOwner}, + SmolStr, +}; +use relative_path::RelativePathBuf; +use rustc_hash::{FxHashMap, FxHashSet}; + +use crate::{ + db, + hir::DescriptorDatabase, + input::{SourceRoot, SourceRootId}, + Cancelable, FileId, FileResolverImp, +}; + +use super::{ + LinkData, LinkId, ModuleData, ModuleId, ModuleSource, ModuleSourceNode, + ModuleTree, Problem, +}; + +#[derive(Clone, Hash, PartialEq, Eq, Debug)] +pub(crate) enum Submodule { + Declaration(SmolStr), + Definition(SmolStr, ModuleSource), +} + +impl Submodule { + fn name(&self) -> &SmolStr { + match self { + Submodule::Declaration(name) => name, + Submodule::Definition(name, _) => name, + } + } +} + +pub(crate) fn submodules( + db: &impl DescriptorDatabase, + source: ModuleSource, +) -> Cancelable>> { + db::check_canceled(db)?; + let file_id = source.file_id(); + let submodules = match source.resolve(db) { + ModuleSourceNode::SourceFile(it) => collect_submodules(file_id, it.borrowed()), + ModuleSourceNode::Module(it) => it + .borrowed() + .item_list() + .map(|it| collect_submodules(file_id, it)) + .unwrap_or_else(Vec::new), + }; + return Ok(Arc::new(submodules)); + + fn collect_submodules<'a>( + file_id: FileId, + root: impl ast::ModuleItemOwner<'a>, + ) -> Vec { + modules(root) + .map(|(name, m)| { + if m.has_semi() { + Submodule::Declaration(name) + } else { + let src = ModuleSource::new_inline(file_id, m); + Submodule::Definition(name, src) + } + }) + .collect() + } +} + +pub(crate) fn modules<'a>( + root: impl ast::ModuleItemOwner<'a>, +) -> impl Iterator)> { + root.items() + .filter_map(|item| match item { + ast::ModuleItem::Module(m) => Some(m), + _ => None, + }) + .filter_map(|module| { + let name = module.name()?.text(); + Some((name, module)) + }) +} + +pub(crate) fn module_tree( + db: &impl DescriptorDatabase, + source_root: SourceRootId, +) -> Cancelable> { + db::check_canceled(db)?; + let res = create_module_tree(db, source_root)?; + Ok(Arc::new(res)) +} + +fn create_module_tree<'a>( + db: &impl DescriptorDatabase, + source_root: SourceRootId, +) -> Cancelable { + let mut tree = ModuleTree::default(); + + let mut roots = FxHashMap::default(); + let mut visited = FxHashSet::default(); + + let source_root = db.source_root(source_root); + for &file_id in source_root.files.iter() { + let source = ModuleSource::SourceFile(file_id); + if visited.contains(&source) { + continue; // TODO: use explicit crate_roots here + } + assert!(!roots.contains_key(&file_id)); + let module_id = build_subtree( + db, + &source_root, + &mut tree, + &mut visited, + &mut roots, + None, + source, + )?; + roots.insert(file_id, module_id); + } + Ok(tree) +} + +fn build_subtree( + db: &impl DescriptorDatabase, + source_root: &SourceRoot, + tree: &mut ModuleTree, + visited: &mut FxHashSet, + roots: &mut FxHashMap, + parent: Option, + source: ModuleSource, +) -> Cancelable { + visited.insert(source); + let id = tree.push_mod(ModuleData { + source, + parent, + children: Vec::new(), + }); + for sub in db._submodules(source)?.iter() { + let link = tree.push_link(LinkData { + name: sub.name().clone(), + owner: id, + points_to: Vec::new(), + problem: None, + }); + + let (points_to, problem) = match sub { + Submodule::Declaration(name) => { + let (points_to, problem) = + resolve_submodule(source, &name, &source_root.file_resolver); + let points_to = points_to + .into_iter() + .map(|file_id| match roots.remove(&file_id) { + Some(module_id) => { + tree.mods[module_id].parent = Some(link); + Ok(module_id) + } + None => build_subtree( + db, + source_root, + tree, + visited, + roots, + Some(link), + ModuleSource::SourceFile(file_id), + ), + }) + .collect::>>()?; + (points_to, problem) + } + Submodule::Definition(_name, submodule_source) => { + let points_to = build_subtree( + db, + source_root, + tree, + visited, + roots, + Some(link), + *submodule_source, + )?; + (vec![points_to], None) + } + }; + + tree.links[link].points_to = points_to; + tree.links[link].problem = problem; + } + Ok(id) +} + +fn resolve_submodule( + source: ModuleSource, + name: &SmolStr, + file_resolver: &FileResolverImp, +) -> (Vec, Option) { + let file_id = match source { + ModuleSource::SourceFile(it) => it, + ModuleSource::Module(..) => { + // TODO + return (Vec::new(), None); + } + }; + let mod_name = file_resolver.file_stem(file_id); + let is_dir_owner = mod_name == "mod" || mod_name == "lib" || mod_name == "main"; + + let file_mod = RelativePathBuf::from(format!("../{}.rs", name)); + let dir_mod = RelativePathBuf::from(format!("../{}/mod.rs", name)); + let points_to: Vec; + let problem: Option; + if is_dir_owner { + points_to = [&file_mod, &dir_mod] + .iter() + .filter_map(|path| file_resolver.resolve(file_id, path)) + .collect(); + problem = if points_to.is_empty() { + Some(Problem::UnresolvedModule { + candidate: file_mod, + }) + } else { + None + } + } else { + points_to = Vec::new(); + problem = Some(Problem::NotDirOwner { + move_to: RelativePathBuf::from(format!("../{}/mod.rs", mod_name)), + candidate: file_mod, + }); + } + (points_to, problem) +} diff --git a/crates/ra_analysis/src/hir/module/mod.rs b/crates/ra_analysis/src/hir/module/mod.rs new file mode 100644 index 000000000..f374a079f --- /dev/null +++ b/crates/ra_analysis/src/hir/module/mod.rs @@ -0,0 +1,378 @@ +pub(super) mod imp; +pub(super) mod nameres; + +use std::sync::Arc; + +use ra_editor::find_node_at_offset; + +use ra_syntax::{ + algo::generate, + ast::{self, AstNode, NameOwner}, + SmolStr, SyntaxNode, +}; +use relative_path::RelativePathBuf; + +use crate::{ + db::SyntaxDatabase, syntax_ptr::SyntaxPtr, FileId, FilePosition, Cancelable, + hir::{Path, PathKind, DescriptorDatabase}, + input::SourceRootId, + arena::{Arena, Id}, + loc2id::{DefLoc, DefId}, +}; + +pub(crate) use self::nameres::ModuleScope; + +/// `ModuleDescriptor` is API entry point to get all the information +/// about a particular module. +#[derive(Debug, Clone)] +pub(crate) struct ModuleDescriptor { + tree: Arc, + source_root_id: SourceRootId, + module_id: ModuleId, +} + +impl ModuleDescriptor { + /// Lookup `ModuleDescriptor` by `FileId`. Note that this is inherently + /// lossy transformation: in general, a single source might correspond to + /// several modules. + pub fn guess_from_file_id( + db: &impl DescriptorDatabase, + file_id: FileId, + ) -> Cancelable> { + ModuleDescriptor::guess_from_source(db, file_id, ModuleSource::SourceFile(file_id)) + } + + /// Lookup `ModuleDescriptor` by position in the source code. Note that this + /// is inherently lossy transformation: in general, a single source might + /// correspond to several modules. + pub fn guess_from_position( + db: &impl DescriptorDatabase, + position: FilePosition, + ) -> Cancelable> { + let file = db.file_syntax(position.file_id); + let module_source = match find_node_at_offset::(file.syntax(), position.offset) + { + Some(m) if !m.has_semi() => ModuleSource::new_inline(position.file_id, m), + _ => ModuleSource::SourceFile(position.file_id), + }; + ModuleDescriptor::guess_from_source(db, position.file_id, module_source) + } + + fn guess_from_source( + db: &impl DescriptorDatabase, + file_id: FileId, + module_source: ModuleSource, + ) -> Cancelable> { + let source_root_id = db.file_source_root(file_id); + let module_tree = db._module_tree(source_root_id)?; + + let res = match module_tree.any_module_for_source(module_source) { + None => None, + Some(module_id) => Some(ModuleDescriptor { + tree: module_tree, + source_root_id, + module_id, + }), + }; + Ok(res) + } + + pub(super) fn new( + db: &impl DescriptorDatabase, + source_root_id: SourceRootId, + module_id: ModuleId, + ) -> Cancelable { + let module_tree = db._module_tree(source_root_id)?; + let res = ModuleDescriptor { + tree: module_tree, + source_root_id, + module_id, + }; + Ok(res) + } + + /// Returns `mod foo;` or `mod foo {}` node whihc declared this module. + /// Returns `None` for the root module + pub fn parent_link_source( + &self, + db: &impl DescriptorDatabase, + ) -> Option<(FileId, ast::ModuleNode)> { + let link = self.module_id.parent_link(&self.tree)?; + let file_id = link.owner(&self.tree).source(&self.tree).file_id(); + let src = link.bind_source(&self.tree, db); + Some((file_id, src)) + } + + pub fn source(&self) -> ModuleSource { + self.module_id.source(&self.tree) + } + + /// Parent module. Returns `None` if this is a root module. + pub fn parent(&self) -> Option { + let parent_id = self.module_id.parent(&self.tree)?; + Some(ModuleDescriptor { + module_id: parent_id, + ..self.clone() + }) + } + + /// The root of the tree this module is part of + pub fn crate_root(&self) -> ModuleDescriptor { + let root_id = self.module_id.crate_root(&self.tree); + ModuleDescriptor { + module_id: root_id, + ..self.clone() + } + } + + /// `name` is `None` for the crate's root module + #[allow(unused)] + pub fn name(&self) -> Option { + let link = self.module_id.parent_link(&self.tree)?; + Some(link.name(&self.tree)) + } + + pub fn def_id(&self, db: &impl DescriptorDatabase) -> DefId { + let def_loc = DefLoc::Module { + id: self.module_id, + source_root: self.source_root_id, + }; + db.id_maps().def_id(def_loc) + } + + /// Finds a child module with the specified name. + pub fn child(&self, name: &str) -> Option { + let child_id = self.module_id.child(&self.tree, name)?; + Some(ModuleDescriptor { + module_id: child_id, + ..self.clone() + }) + } + + /// Returns a `ModuleScope`: a set of items, visible in this module. + pub(crate) fn scope(&self, db: &impl DescriptorDatabase) -> Cancelable { + let item_map = db._item_map(self.source_root_id)?; + let res = item_map.per_module[&self.module_id].clone(); + Ok(res) + } + + pub(crate) fn resolve_path( + &self, + db: &impl DescriptorDatabase, + path: Path, + ) -> Cancelable> { + let mut curr = match path.kind { + PathKind::Crate => self.crate_root(), + PathKind::Self_ | PathKind::Plain => self.clone(), + PathKind::Super => ctry!(self.parent()), + } + .def_id(db); + + let segments = path.segments; + for name in segments.iter() { + let module = match db.id_maps().def_loc(curr) { + DefLoc::Module { id, source_root } => ModuleDescriptor::new(db, source_root, id)?, + _ => return Ok(None), + }; + let scope = module.scope(db)?; + curr = ctry!(ctry!(scope.get(&name)).def_id); + } + Ok(Some(curr)) + } + + pub fn problems(&self, db: &impl DescriptorDatabase) -> Vec<(SyntaxNode, Problem)> { + self.module_id.problems(&self.tree, db) + } +} + +/// Phisically, rust source is organized as a set of files, but logically it is +/// organized as a tree of modules. Usually, a single file corresponds to a +/// single module, but it is not nessary the case. +/// +/// Module encapsulate the logic of transitioning from the fuzzy world of files +/// (which can have multiple parents) to the precise world of modules (which +/// always have one parent). +#[derive(Default, Debug, PartialEq, Eq)] +pub(crate) struct ModuleTree { + mods: Arena, + links: Arena, +} + +impl ModuleTree { + fn modules<'a>(&'a self) -> impl Iterator + 'a { + self.mods.iter().map(|(id, _)| id) + } + + fn modules_for_source(&self, source: ModuleSource) -> Vec { + self.mods + .iter() + .filter(|(_idx, it)| it.source == source) + .map(|(idx, _)| idx) + .collect() + } + + fn any_module_for_source(&self, source: ModuleSource) -> Option { + self.modules_for_source(source).pop() + } +} + +/// `ModuleSource` is the syntax tree element that produced this module: +/// either a file, or an inlinde module. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub(crate) enum ModuleSource { + SourceFile(FileId), + #[allow(dead_code)] + Module(SyntaxPtr), +} + +/// An owned syntax node for a module. Unlike `ModuleSource`, +/// this holds onto the AST for the whole file. +enum ModuleSourceNode { + SourceFile(ast::SourceFileNode), + Module(ast::ModuleNode), +} + +pub(crate) type ModuleId = Id; +type LinkId = Id; + +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub enum Problem { + UnresolvedModule { + candidate: RelativePathBuf, + }, + NotDirOwner { + move_to: RelativePathBuf, + candidate: RelativePathBuf, + }, +} + +impl ModuleId { + fn source(self, tree: &ModuleTree) -> ModuleSource { + tree.mods[self].source + } + fn parent_link(self, tree: &ModuleTree) -> Option { + tree.mods[self].parent + } + fn parent(self, tree: &ModuleTree) -> Option { + let link = self.parent_link(tree)?; + Some(tree.links[link].owner) + } + fn crate_root(self, tree: &ModuleTree) -> ModuleId { + generate(Some(self), move |it| it.parent(tree)) + .last() + .unwrap() + } + fn child(self, tree: &ModuleTree, name: &str) -> Option { + let link = tree.mods[self] + .children + .iter() + .map(|&it| &tree.links[it]) + .find(|it| it.name == name)?; + Some(*link.points_to.first()?) + } + fn children<'a>(self, tree: &'a ModuleTree) -> impl Iterator + 'a { + tree.mods[self].children.iter().filter_map(move |&it| { + let link = &tree.links[it]; + let module = *link.points_to.first()?; + Some((link.name.clone(), module)) + }) + } + fn problems(self, tree: &ModuleTree, db: &impl SyntaxDatabase) -> Vec<(SyntaxNode, Problem)> { + tree.mods[self] + .children + .iter() + .filter_map(|&it| { + let p = tree.links[it].problem.clone()?; + let s = it.bind_source(tree, db); + let s = s.borrowed().name().unwrap().syntax().owned(); + Some((s, p)) + }) + .collect() + } +} + +impl LinkId { + fn owner(self, tree: &ModuleTree) -> ModuleId { + tree.links[self].owner + } + fn name(self, tree: &ModuleTree) -> SmolStr { + tree.links[self].name.clone() + } + fn bind_source<'a>(self, tree: &ModuleTree, db: &impl SyntaxDatabase) -> ast::ModuleNode { + let owner = self.owner(tree); + match owner.source(tree).resolve(db) { + ModuleSourceNode::SourceFile(root) => { + let ast = imp::modules(root.borrowed()) + .find(|(name, _)| name == &tree.links[self].name) + .unwrap() + .1; + ast.owned() + } + ModuleSourceNode::Module(it) => it, + } + } +} + +#[derive(Debug, PartialEq, Eq, Hash)] +pub(crate) struct ModuleData { + source: ModuleSource, + parent: Option, + children: Vec, +} + +impl ModuleSource { + fn new_inline(file_id: FileId, module: ast::Module) -> ModuleSource { + assert!(!module.has_semi()); + let ptr = SyntaxPtr::new(file_id, module.syntax()); + ModuleSource::Module(ptr) + } + + pub(crate) fn as_file(self) -> Option { + match self { + ModuleSource::SourceFile(f) => Some(f), + ModuleSource::Module(..) => None, + } + } + + pub(crate) fn file_id(self) -> FileId { + match self { + ModuleSource::SourceFile(f) => f, + ModuleSource::Module(ptr) => ptr.file_id(), + } + } + + fn resolve(self, db: &impl SyntaxDatabase) -> ModuleSourceNode { + match self { + ModuleSource::SourceFile(file_id) => { + let syntax = db.file_syntax(file_id); + ModuleSourceNode::SourceFile(syntax.ast().owned()) + } + ModuleSource::Module(ptr) => { + let syntax = db.resolve_syntax_ptr(ptr); + let syntax = syntax.borrowed(); + let module = ast::Module::cast(syntax).unwrap(); + ModuleSourceNode::Module(module.owned()) + } + } + } +} + +#[derive(Hash, Debug, PartialEq, Eq)] +struct LinkData { + owner: ModuleId, + name: SmolStr, + points_to: Vec, + problem: Option, +} + +impl ModuleTree { + fn push_mod(&mut self, data: ModuleData) -> ModuleId { + self.mods.alloc(data) + } + fn push_link(&mut self, data: LinkData) -> LinkId { + let owner = data.owner; + let id = self.links.alloc(data); + self.mods[owner].children.push(id); + id + } +} diff --git a/crates/ra_analysis/src/hir/module/nameres.rs b/crates/ra_analysis/src/hir/module/nameres.rs new file mode 100644 index 000000000..1ec85fbf2 --- /dev/null +++ b/crates/ra_analysis/src/hir/module/nameres.rs @@ -0,0 +1,549 @@ +//! Name resolution algorithm. The end result of the algorithm is `ItemMap`: a +//! map with maps each module to it's scope: the set of items, visible in the +//! module. That is, we only resolve imports here, name resolution of item +//! bodies will be done in a separate step. +//! +//! Like Rustc, we use an interative per-crate algorithm: we start with scopes +//! containing only directly defined items, and then iteratively resolve +//! imports. +//! +//! To make this work nicely in the IDE scenarios, we place `InputModuleItems` +//! in between raw syntax and name resolution. `InputModuleItems` are computed +//! using only the module's syntax, and it is all directly defined items plus +//! imports. The plain is to make `InputModuleItems` independent of local +//! modifications (that is, typing inside a function shold not change IMIs), +//! such that the results of name resolution can be preserved unless the module +//! structure itself is modified. +use std::{ + sync::Arc, + time::Instant, + ops::Index, +}; + +use rustc_hash::FxHashMap; + +use ra_syntax::{ + SyntaxNode, SyntaxNodeRef, TextRange, + SmolStr, SyntaxKind::{self, *}, + ast::{self, ModuleItemOwner, AstNode} +}; + +use crate::{ + Cancelable, FileId, + loc2id::{DefId, DefLoc}, + hir::{ + Path, PathKind, + DescriptorDatabase, + module::{ModuleId, ModuleTree, ModuleSourceNode}, + }, + input::SourceRootId, + arena::{Arena, Id} +}; + +/// Identifier of item within a specific file. This is stable over reparses, so +/// it's OK to use it as a salsa key/value. +pub(crate) type FileItemId = Id; + +/// Maps item's `SyntaxNode`s to `FileItemId` and back. +#[derive(Debug, PartialEq, Eq, Default)] +pub(crate) struct FileItems { + arena: Arena, +} + +impl FileItems { + fn alloc(&mut self, item: SyntaxNode) -> FileItemId { + self.arena.alloc(item) + } + fn id_of(&self, item: SyntaxNodeRef) -> FileItemId { + let (id, _item) = self + .arena + .iter() + .find(|(_id, i)| i.borrowed() == item) + .unwrap(); + id + } +} + +impl Index for FileItems { + type Output = SyntaxNode; + fn index(&self, idx: FileItemId) -> &SyntaxNode { + &self.arena[idx] + } +} + +pub(crate) fn file_items(db: &impl DescriptorDatabase, file_id: FileId) -> Arc { + let source_file = db.file_syntax(file_id); + let source_file = source_file.borrowed(); + let mut res = FileItems::default(); + source_file + .syntax() + .descendants() + .filter_map(ast::ModuleItem::cast) + .map(|it| it.syntax().owned()) + .for_each(|it| { + res.alloc(it); + }); + Arc::new(res) +} + +pub(crate) fn file_item( + db: &impl DescriptorDatabase, + file_id: FileId, + file_item_id: FileItemId, +) -> SyntaxNode { + db._file_items(file_id)[file_item_id].clone() +} + +/// Item map is the result of the name resolution. Item map contains, for each +/// module, the set of visible items. +#[derive(Default, Debug, PartialEq, Eq)] +pub(crate) struct ItemMap { + pub(crate) per_module: FxHashMap, +} + +#[derive(Debug, Default, PartialEq, Eq, Clone)] +pub(crate) struct ModuleScope { + items: FxHashMap, +} + +impl ModuleScope { + pub(crate) fn entries<'a>(&'a self) -> impl Iterator + 'a { + self.items.iter() + } + pub(crate) fn get(&self, name: &SmolStr) -> Option<&Resolution> { + self.items.get(name) + } +} + +/// A set of items and imports declared inside a module, without relation to +/// other modules. +/// +/// This stands in-between raw syntax and name resolution and alow us to avoid +/// recomputing name res: if `InputModuleItems` are the same, we can avoid +/// running name resolution. +#[derive(Debug, Default, PartialEq, Eq)] +pub(crate) struct InputModuleItems { + items: Vec, + imports: Vec, +} + +#[derive(Debug, PartialEq, Eq)] +struct ModuleItem { + id: FileItemId, + name: SmolStr, + kind: SyntaxKind, + vis: Vis, +} + +#[derive(Debug, PartialEq, Eq)] +enum Vis { + // Priv, + Other, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct Import { + path: Path, + kind: ImportKind, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) struct NamedImport { + file_item_id: FileItemId, + relative_range: TextRange, +} + +impl NamedImport { + pub(crate) fn range(&self, db: &impl DescriptorDatabase, file_id: FileId) -> TextRange { + let syntax = db._file_item(file_id, self.file_item_id); + let offset = syntax.borrowed().range().start(); + self.relative_range + offset + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +enum ImportKind { + Glob, + Named(NamedImport), +} + +pub(crate) fn input_module_items( + db: &impl DescriptorDatabase, + source_root: SourceRootId, + module_id: ModuleId, +) -> Cancelable> { + let module_tree = db._module_tree(source_root)?; + let source = module_id.source(&module_tree); + let file_items = db._file_items(source.file_id()); + let res = match source.resolve(db) { + ModuleSourceNode::SourceFile(it) => { + let items = it.borrowed().items(); + InputModuleItems::new(&file_items, items) + } + ModuleSourceNode::Module(it) => { + let items = it + .borrowed() + .item_list() + .into_iter() + .flat_map(|it| it.items()); + InputModuleItems::new(&file_items, items) + } + }; + Ok(Arc::new(res)) +} + +pub(crate) fn item_map( + db: &impl DescriptorDatabase, + source_root: SourceRootId, +) -> Cancelable> { + let start = Instant::now(); + let module_tree = db._module_tree(source_root)?; + let input = module_tree + .modules() + .map(|id| { + let items = db._input_module_items(source_root, id)?; + Ok((id, items)) + }) + .collect::>>()?; + let mut resolver = Resolver { + db: db, + input: &input, + source_root, + module_tree, + result: ItemMap::default(), + }; + resolver.resolve()?; + let res = resolver.result; + let elapsed = start.elapsed(); + log::info!("item_map: {:?}", elapsed); + Ok(Arc::new(res)) +} + +/// Resolution is basically `DefId` atm, but it should account for stuff like +/// multiple namespaces, ambiguity and errors. +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct Resolution { + /// None for unresolved + pub(crate) def_id: Option, + /// ident by whitch this is imported into local scope. + pub(crate) import: Option, +} + +// #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +// enum Namespace { +// Types, +// Values, +// } + +// #[derive(Debug)] +// struct PerNs { +// types: Option, +// values: Option, +// } + +impl InputModuleItems { + fn new<'a>( + file_items: &FileItems, + items: impl Iterator>, + ) -> InputModuleItems { + let mut res = InputModuleItems::default(); + for item in items { + res.add_item(file_items, item); + } + res + } + + fn add_item(&mut self, file_items: &FileItems, item: ast::ModuleItem) -> Option<()> { + match item { + ast::ModuleItem::StructDef(it) => self.items.push(ModuleItem::new(file_items, it)?), + ast::ModuleItem::EnumDef(it) => self.items.push(ModuleItem::new(file_items, it)?), + ast::ModuleItem::FnDef(it) => self.items.push(ModuleItem::new(file_items, it)?), + ast::ModuleItem::TraitDef(it) => self.items.push(ModuleItem::new(file_items, it)?), + ast::ModuleItem::TypeDef(it) => self.items.push(ModuleItem::new(file_items, it)?), + ast::ModuleItem::ImplItem(_) => { + // impls don't define items + } + ast::ModuleItem::UseItem(it) => self.add_use_item(file_items, it), + ast::ModuleItem::ExternCrateItem(_) => { + // TODO + } + ast::ModuleItem::ConstDef(it) => self.items.push(ModuleItem::new(file_items, it)?), + ast::ModuleItem::StaticDef(it) => self.items.push(ModuleItem::new(file_items, it)?), + ast::ModuleItem::Module(it) => self.items.push(ModuleItem::new(file_items, it)?), + } + Some(()) + } + + fn add_use_item(&mut self, file_items: &FileItems, item: ast::UseItem) { + let file_item_id = file_items.id_of(item.syntax()); + let start_offset = item.syntax().range().start(); + Path::expand_use_item(item, |path, range| { + let kind = match range { + None => ImportKind::Glob, + Some(range) => ImportKind::Named(NamedImport { + file_item_id, + relative_range: range - start_offset, + }), + }; + self.imports.push(Import { kind, path }) + }) + } +} + +impl ModuleItem { + fn new<'a>(file_items: &FileItems, item: impl ast::NameOwner<'a>) -> Option { + let name = item.name()?.text(); + let kind = item.syntax().kind(); + let vis = Vis::Other; + let id = file_items.id_of(item.syntax()); + let res = ModuleItem { + id, + name, + kind, + vis, + }; + Some(res) + } +} + +struct Resolver<'a, DB> { + db: &'a DB, + input: &'a FxHashMap>, + source_root: SourceRootId, + module_tree: Arc, + result: ItemMap, +} + +impl<'a, DB> Resolver<'a, DB> +where + DB: DescriptorDatabase, +{ + fn resolve(&mut self) -> Cancelable<()> { + for (&module_id, items) in self.input.iter() { + self.populate_module(module_id, items) + } + + for &module_id in self.input.keys() { + crate::db::check_canceled(self.db)?; + self.resolve_imports(module_id); + } + Ok(()) + } + + fn populate_module(&mut self, module_id: ModuleId, input: &InputModuleItems) { + let file_id = module_id.source(&self.module_tree).file_id(); + + let mut module_items = ModuleScope::default(); + + for import in input.imports.iter() { + if let Some(name) = import.path.segments.iter().last() { + if let ImportKind::Named(import) = import.kind { + module_items.items.insert( + name.clone(), + Resolution { + def_id: None, + import: Some(import), + }, + ); + } + } + } + + for item in input.items.iter() { + if item.kind == MODULE { + // handle submodules separatelly + continue; + } + let def_loc = DefLoc::Item { + file_id, + id: item.id, + }; + let def_id = self.db.id_maps().def_id(def_loc); + let resolution = Resolution { + def_id: Some(def_id), + import: None, + }; + module_items.items.insert(item.name.clone(), resolution); + } + + for (name, mod_id) in module_id.children(&self.module_tree) { + let def_loc = DefLoc::Module { + id: mod_id, + source_root: self.source_root, + }; + let def_id = self.db.id_maps().def_id(def_loc); + let resolution = Resolution { + def_id: Some(def_id), + import: None, + }; + module_items.items.insert(name, resolution); + } + + self.result.per_module.insert(module_id, module_items); + } + + fn resolve_imports(&mut self, module_id: ModuleId) { + for import in self.input[&module_id].imports.iter() { + self.resolve_import(module_id, import); + } + } + + fn resolve_import(&mut self, module_id: ModuleId, import: &Import) { + let ptr = match import.kind { + ImportKind::Glob => return, + ImportKind::Named(ptr) => ptr, + }; + + let mut curr = match import.path.kind { + // TODO: handle extern crates + PathKind::Plain => return, + PathKind::Self_ => module_id, + PathKind::Super => { + match module_id.parent(&self.module_tree) { + Some(it) => it, + // TODO: error + None => return, + } + } + PathKind::Crate => module_id.crate_root(&self.module_tree), + }; + + for (i, name) in import.path.segments.iter().enumerate() { + let is_last = i == import.path.segments.len() - 1; + + let def_id = match self.result.per_module[&curr].items.get(name) { + None => return, + Some(res) => match res.def_id { + Some(it) => it, + None => return, + }, + }; + + if !is_last { + curr = match self.db.id_maps().def_loc(def_id) { + DefLoc::Module { id, .. } => id, + _ => return, + } + } else { + self.update(module_id, |items| { + let res = Resolution { + def_id: Some(def_id), + import: Some(ptr), + }; + items.items.insert(name.clone(), res); + }) + } + } + } + + fn update(&mut self, module_id: ModuleId, f: impl FnOnce(&mut ModuleScope)) { + let module_items = self.result.per_module.get_mut(&module_id).unwrap(); + f(module_items) + } +} + +#[cfg(test)] +mod tests { + use crate::{ + AnalysisChange, + mock_analysis::{MockAnalysis, analysis_and_position}, + hir::{DescriptorDatabase, module::ModuleDescriptor}, + input::FilesDatabase, +}; + use super::*; + + fn item_map(fixture: &str) -> (Arc, ModuleId) { + let (analysis, pos) = analysis_and_position(fixture); + let db = analysis.imp.db; + let source_root = db.file_source_root(pos.file_id); + let descr = ModuleDescriptor::guess_from_position(&*db, pos) + .unwrap() + .unwrap(); + let module_id = descr.module_id; + (db._item_map(source_root).unwrap(), module_id) + } + + #[test] + fn test_item_map() { + let (item_map, module_id) = item_map( + " + //- /lib.rs + mod foo; + + use crate::foo::bar::Baz; + <|> + + //- /foo/mod.rs + pub mod bar; + + //- /foo/bar.rs + pub struct Baz; + ", + ); + let name = SmolStr::from("Baz"); + let resolution = &item_map.per_module[&module_id].items[&name]; + assert!(resolution.def_id.is_some()); + } + + #[test] + fn typing_inside_a_function_should_not_invalidate_item_map() { + let mock_analysis = MockAnalysis::with_files( + " + //- /lib.rs + mod foo; + + use crate::foo::bar::Baz; + + fn foo() -> i32 { + 1 + 1 + } + //- /foo/mod.rs + pub mod bar; + + //- /foo/bar.rs + pub struct Baz; + ", + ); + + let file_id = mock_analysis.id_of("/lib.rs"); + let mut host = mock_analysis.analysis_host(); + + let source_root = host.analysis().imp.db.file_source_root(file_id); + + { + let db = host.analysis().imp.db; + let events = db.log_executed(|| { + db._item_map(source_root).unwrap(); + }); + assert!(format!("{:?}", events).contains("_item_map")) + } + + let mut change = AnalysisChange::new(); + + change.change_file( + file_id, + " + mod foo; + + use crate::foo::bar::Baz; + + fn foo() -> i32 { 92 } + " + .to_string(), + ); + + host.apply_change(change); + + { + let db = host.analysis().imp.db; + let events = db.log_executed(|| { + db._item_map(source_root).unwrap(); + }); + assert!( + !format!("{:?}", events).contains("_item_map"), + "{:#?}", + events + ) + } + } +} diff --git a/crates/ra_analysis/src/hir/path.rs b/crates/ra_analysis/src/hir/path.rs new file mode 100644 index 000000000..8279daf4b --- /dev/null +++ b/crates/ra_analysis/src/hir/path.rs @@ -0,0 +1,148 @@ +use ra_syntax::{SmolStr, ast, AstNode, TextRange}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct Path { + pub(crate) kind: PathKind, + pub(crate) segments: Vec, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) enum PathKind { + Plain, + Self_, + Super, + Crate, +} + +impl Path { + /// Calls `cb` with all paths, represented by this use item. + pub(crate) fn expand_use_item(item: ast::UseItem, mut cb: impl FnMut(Path, Option)) { + if let Some(tree) = item.use_tree() { + expand_use_tree(None, tree, &mut cb); + } + } + + /// Converts an `ast::Path` to `Path`. Works with use trees. + pub(crate) fn from_ast(mut path: ast::Path) -> Option { + let mut kind = PathKind::Plain; + let mut segments = Vec::new(); + loop { + let segment = path.segment()?; + match segment.kind()? { + ast::PathSegmentKind::Name(name) => segments.push(name.text()), + ast::PathSegmentKind::CrateKw => { + kind = PathKind::Crate; + break; + } + ast::PathSegmentKind::SelfKw => { + kind = PathKind::Self_; + break; + } + ast::PathSegmentKind::SuperKw => { + kind = PathKind::Super; + break; + } + } + path = match qualifier(path) { + Some(it) => it, + None => break, + }; + } + segments.reverse(); + return Some(Path { kind, segments }); + + fn qualifier(path: ast::Path) -> Option { + if let Some(q) = path.qualifier() { + return Some(q); + } + // TODO: this bottom up traversal is not too precise. + // Should we handle do a top-down analysiss, recording results? + let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?; + let use_tree = use_tree_list.parent_use_tree(); + use_tree.path() + } + } + + /// `true` is this path is a single identifier, like `foo` + pub(crate) fn is_ident(&self) -> bool { + self.kind == PathKind::Plain && self.segments.len() == 1 + } +} + +fn expand_use_tree( + prefix: Option, + tree: ast::UseTree, + cb: &mut impl FnMut(Path, Option), +) { + if let Some(use_tree_list) = tree.use_tree_list() { + let prefix = match tree.path() { + None => prefix, + Some(path) => match convert_path(prefix, path) { + Some(it) => Some(it), + None => return, // TODO: report errors somewhere + }, + }; + for tree in use_tree_list.use_trees() { + expand_use_tree(prefix.clone(), tree, cb); + } + } else { + if let Some(ast_path) = tree.path() { + if let Some(path) = convert_path(prefix, ast_path) { + let range = if tree.has_star() { + None + } else { + let range = ast_path.segment().unwrap().syntax().range(); + Some(range) + }; + cb(path, range) + } + } + } +} + +fn convert_path(prefix: Option, path: ast::Path) -> Option { + let prefix = if let Some(qual) = path.qualifier() { + Some(convert_path(prefix, qual)?) + } else { + None + }; + let segment = path.segment()?; + let res = match segment.kind()? { + ast::PathSegmentKind::Name(name) => { + let mut res = prefix.unwrap_or_else(|| Path { + kind: PathKind::Plain, + segments: Vec::with_capacity(1), + }); + res.segments.push(name.text()); + res + } + ast::PathSegmentKind::CrateKw => { + if prefix.is_some() { + return None; + } + Path { + kind: PathKind::Crate, + segments: Vec::new(), + } + } + ast::PathSegmentKind::SelfKw => { + if prefix.is_some() { + return None; + } + Path { + kind: PathKind::Self_, + segments: Vec::new(), + } + } + ast::PathSegmentKind::SuperKw => { + if prefix.is_some() { + return None; + } + Path { + kind: PathKind::Super, + segments: Vec::new(), + } + } + }; + Some(res) +} diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs index 8a41b3152..8d60f9af7 100644 --- a/crates/ra_analysis/src/imp.rs +++ b/crates/ra_analysis/src/imp.rs @@ -19,7 +19,7 @@ use salsa::{Database, ParallelDatabase}; use crate::{ completion::{completions, CompletionItem}, db::{self, FileSyntaxQuery, SyntaxDatabase}, - descriptors::{ + hir::{ function::{FnDescriptor, FnId}, module::{ModuleDescriptor, Problem}, DeclarationDescriptor, DescriptorDatabase, @@ -589,7 +589,7 @@ fn resolve_local_name( let fn_def = name_ref.syntax().ancestors().find_map(ast::FnDef::cast)?; let fn_id = FnId::get(db, file_id, fn_def); let scopes = db.fn_scopes(fn_id); - let scope_entry = crate::descriptors::function::resolve_local_name(name_ref, &scopes)?; + let scope_entry = crate::hir::function::resolve_local_name(name_ref, &scopes)?; let syntax = db.resolve_syntax_ptr(scope_entry.ptr().into_global(file_id)); Some((scope_entry.name().clone(), syntax.range())) } diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs index 0fbfd8a40..4659eb523 100644 --- a/crates/ra_analysis/src/lib.rs +++ b/crates/ra_analysis/src/lib.rs @@ -19,14 +19,14 @@ macro_rules! ctry { } mod arena; +mod syntax_ptr; +mod input; mod db; mod loc2id; -mod input; mod imp; mod completion; -mod descriptors; +mod hir; mod symbol_index; -mod syntax_ptr; pub mod mock_analysis; use std::{fmt, sync::Arc}; @@ -42,7 +42,7 @@ use crate::{ pub use crate::{ completion::CompletionItem, - descriptors::function::FnDescriptor, + hir::function::FnDescriptor, input::{CrateGraph, CrateId, FileId, FileResolver}, }; pub use ra_editor::{ diff --git a/crates/ra_analysis/src/loc2id.rs b/crates/ra_analysis/src/loc2id.rs index c7c799a91..d42af4e0a 100644 --- a/crates/ra_analysis/src/loc2id.rs +++ b/crates/ra_analysis/src/loc2id.rs @@ -9,8 +9,8 @@ use rustc_hash::FxHashMap; use crate::{ FileId, - descriptors::FileItemId, - descriptors::module::ModuleId, + hir::FileItemId, + hir::module::ModuleId, syntax_ptr::SyntaxPtr, input::SourceRootId, }; -- cgit v1.2.3