From f4a94e74bcd6c8f9275a57a775e64314af1878da Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sat, 13 Apr 2019 10:49:01 +0300 Subject: fold ScopeWithSyntax into SourceAnalyzer --- crates/ra_hir/src/code_model_api.rs | 14 +-- crates/ra_hir/src/expr.rs | 4 +- crates/ra_hir/src/expr/scope.rs | 166 ++++++------------------------------ crates/ra_hir/src/lib.rs | 2 +- crates/ra_hir/src/source_binder.rs | 162 +++++++++++++++++++++++++++-------- 5 files changed, 157 insertions(+), 191 deletions(-) (limited to 'crates/ra_hir') diff --git a/crates/ra_hir/src/code_model_api.rs b/crates/ra_hir/src/code_model_api.rs index 5179f719d..882208ec1 100644 --- a/crates/ra_hir/src/code_model_api.rs +++ b/crates/ra_hir/src/code_model_api.rs @@ -4,7 +4,7 @@ use ra_db::{CrateId, SourceRootId, Edition}; use ra_syntax::{ast::self, TreeArc}; use crate::{ - Name, ScopesWithSourceMap, Ty, HirFileId, Either, + Name, Ty, HirFileId, Either, HirDatabase, DefDatabase, type_ref::TypeRef, nameres::{ModuleScope, Namespace, ImportId, CrateModuleId}, @@ -466,12 +466,6 @@ impl DefWithBody { DefWithBody::Static(ref s) => s.resolver(db), } } - - pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSourceMap { - let scopes = db.expr_scopes(*self); - let source_map = db.body_with_source_map(*self).1; - ScopesWithSourceMap { scopes, source_map } - } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -535,12 +529,6 @@ impl Function { db.type_for_def((*self).into(), Namespace::Values) } - pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSourceMap { - let scopes = db.expr_scopes((*self).into()); - let source_map = db.body_with_source_map((*self).into()).1; - ScopesWithSourceMap { scopes, source_map } - } - pub fn signature(&self, db: &impl HirDatabase) -> Arc { db.fn_signature(*self) } diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs index 3806a3605..038a25a97 100644 --- a/crates/ra_hir/src/expr.rs +++ b/crates/ra_hir/src/expr.rs @@ -16,7 +16,7 @@ use crate::{ }; use crate::{ path::GenericArgs, ty::primitive::{IntTy, UncertainIntTy, FloatTy, UncertainFloatTy}}; -pub use self::scope::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax}; +pub use self::scope::{ExprScopes, ScopeEntryWithSyntax}; pub(crate) mod scope; @@ -93,7 +93,7 @@ pub fn resolver_for_scope( ) -> Resolver { let mut r = body.owner.resolver(db); let scopes = db.expr_scopes(body.owner); - let scope_chain = scopes.scope_chain_for(scope_id).collect::>(); + let scope_chain = scopes.scope_chain(scope_id).collect::>(); for scope in scope_chain.into_iter().rev() { r = r.push_expr_scope(Arc::clone(&scopes), scope); } diff --git a/crates/ra_hir/src/expr/scope.rs b/crates/ra_hir/src/expr/scope.rs index dcec51a10..476385a2f 100644 --- a/crates/ra_hir/src/expr/scope.rs +++ b/crates/ra_hir/src/expr/scope.rs @@ -1,17 +1,16 @@ use std::sync::Arc; -use rustc_hash::{FxHashMap, FxHashSet}; - +use rustc_hash::{FxHashMap}; use ra_syntax::{ - AstNode, SyntaxNode, TextUnit, TextRange, SyntaxNodePtr, AstPtr, + TextRange, AstPtr, algo::generate, ast, }; use ra_arena::{Arena, RawId, impl_arena_id}; use crate::{ - Name, AsName,DefWithBody, Either, - expr::{PatId, ExprId, Pat, Expr, Body, Statement, BodySourceMap}, + Name, DefWithBody, Either, + expr::{PatId, ExprId, Pat, Expr, Body, Statement}, HirDatabase, }; @@ -23,7 +22,7 @@ impl_arena_id!(ScopeId); pub struct ExprScopes { body: Arc, scopes: Arena, - scope_for: FxHashMap, + pub(crate) scope_for: FxHashMap, } #[derive(Debug, PartialEq, Eq)] @@ -66,10 +65,7 @@ impl ExprScopes { &self.scopes[scope].entries } - pub fn scope_chain_for<'a>( - &'a self, - scope: Option, - ) -> impl Iterator + 'a { + pub fn scope_chain<'a>(&'a self, scope: Option) -> impl Iterator + 'a { generate(scope, move |&scope| self.scopes[scope].parent) } @@ -107,16 +103,10 @@ impl ExprScopes { } } -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ScopesWithSourceMap { - pub(crate) source_map: Arc, - pub(crate) scopes: Arc, -} - #[derive(Debug, Clone, PartialEq, Eq)] pub struct ScopeEntryWithSyntax { - name: Name, - ptr: Either, AstPtr>, + pub(crate) name: Name, + pub(crate) ptr: Either, AstPtr>, } impl ScopeEntryWithSyntax { @@ -129,96 +119,6 @@ impl ScopeEntryWithSyntax { } } -impl ScopesWithSourceMap { - fn scope_chain<'a>(&'a self, node: &SyntaxNode) -> impl Iterator + 'a { - generate(self.scope_for(node), move |&scope| self.scopes.scopes[scope].parent) - } - - pub(crate) fn scope_for_offset(&self, offset: TextUnit) -> Option { - self.scopes - .scope_for - .iter() - .filter_map(|(id, scope)| Some((self.source_map.expr_syntax(*id)?, scope))) - // find containing scope - .min_by_key(|(ptr, _scope)| { - (!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len()) - }) - .map(|(ptr, scope)| self.adjust(ptr, *scope, offset)) - } - - // XXX: during completion, cursor might be outside of any particular - // expression. Try to figure out the correct scope... - // FIXME: move this to source binder? - fn adjust(&self, ptr: SyntaxNodePtr, original_scope: ScopeId, offset: TextUnit) -> ScopeId { - let r = ptr.range(); - let child_scopes = self - .scopes - .scope_for - .iter() - .filter_map(|(id, scope)| Some((self.source_map.expr_syntax(*id)?, scope))) - .map(|(ptr, scope)| (ptr.range(), scope)) - .filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r); - - child_scopes - .max_by(|(r1, _), (r2, _)| { - if r2.is_subrange(&r1) { - std::cmp::Ordering::Greater - } else if r1.is_subrange(&r2) { - std::cmp::Ordering::Less - } else { - r1.start().cmp(&r2.start()) - } - }) - .map(|(_ptr, scope)| *scope) - .unwrap_or(original_scope) - } - - pub(crate) fn resolve_local_name( - &self, - name_ref: &ast::NameRef, - ) -> Option { - let mut shadowed = FxHashSet::default(); - let name = name_ref.as_name(); - let ret = self - .scope_chain(name_ref.syntax()) - .flat_map(|scope| self.scopes.entries(scope).iter()) - .filter(|entry| shadowed.insert(entry.name())) - .filter(|entry| entry.name() == &name) - .nth(0); - ret.and_then(|entry| { - Some(ScopeEntryWithSyntax { - name: entry.name().clone(), - ptr: self.source_map.pat_syntax(entry.pat())?, - }) - }) - } - - pub(crate) fn find_all_refs(&self, pat: &ast::BindPat) -> Vec { - let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); - let ptr = Either::A(AstPtr::new(pat.into())); - fn_def - .syntax() - .descendants() - .filter_map(ast::NameRef::cast) - .filter(|name_ref| match self.resolve_local_name(*name_ref) { - None => false, - Some(entry) => entry.ptr() == ptr, - }) - .map(|name_ref| ReferenceDescriptor { - name: name_ref.syntax().text().to_string(), - range: name_ref.syntax().range(), - }) - .collect() - } - - pub(crate) fn scope_for(&self, node: &SyntaxNode) -> Option { - node.ancestors() - .map(SyntaxNodePtr::new) - .filter_map(|ptr| self.source_map.syntax_expr(ptr)) - .find_map(|it| self.scopes.scope_for(it)) - } -} - impl ScopeEntry { pub fn name(&self) -> &Name { &self.name @@ -297,12 +197,11 @@ pub struct ReferenceDescriptor { #[cfg(test)] mod tests { - use ra_db::salsa::InternKey; - use ra_syntax::{SourceFile, algo::find_node_at_offset}; + use ra_db::SourceDatabase; + use ra_syntax::{algo::find_node_at_offset, AstNode, SyntaxNodePtr}; use test_utils::{extract_offset, assert_eq_text}; - use crate::Function; - use crate::expr::{ExprCollector}; + use crate::{source_binder::SourceAnalyzer, mock::MockDatabase}; use super::*; @@ -316,18 +215,20 @@ mod tests { buf.push_str(&code[off..]); buf }; - let file = SourceFile::parse(&code); + + let (db, _source_root, file_id) = MockDatabase::with_single_file(&code); + let file = db.parse(file_id); let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); - let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); - let irrelevant_function = - Function { id: crate::ids::FunctionId::from_intern_id(0u32.into()) }; - let (body, source_map) = collect_fn_body_syntax(irrelevant_function, fn_def); - let scopes = ExprScopes::new(Arc::new(body)); - let scopes = - ScopesWithSourceMap { scopes: Arc::new(scopes), source_map: Arc::new(source_map) }; + let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None); + + let scopes = analyzer.scopes(); + let expr_id = + analyzer.body_source_map().syntax_expr(SyntaxNodePtr::new(marker.syntax())).unwrap(); + let scope = scopes.scope_for(expr_id); + let actual = scopes - .scope_chain(marker.syntax()) - .flat_map(|scope| scopes.scopes.entries(scope)) + .scope_chain(scope) + .flat_map(|scope| scopes.entries(scope)) .map(|it| it.name().to_string()) .collect::>() .join("\n"); @@ -410,28 +311,17 @@ mod tests { ); } - fn collect_fn_body_syntax(function: Function, node: &ast::FnDef) -> (Body, BodySourceMap) { - let mut collector = ExprCollector::new(DefWithBody::Function(function)); - collector.collect_fn_body(node); - collector.finish() - } - fn do_check_local_name(code: &str, expected_offset: u32) { let (off, code) = extract_offset(code); - let file = SourceFile::parse(&code); + + let (db, _source_root, file_id) = MockDatabase::with_single_file(&code); + let file = db.parse(file_id); let expected_name = find_node_at_offset::(file.syntax(), expected_offset.into()) .expect("failed to find a name at the target offset"); - - let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); + let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None); - let irrelevant_function = - Function { id: crate::ids::FunctionId::from_intern_id(0u32.into()) }; - let (body, source_map) = collect_fn_body_syntax(irrelevant_function, fn_def); - let scopes = ExprScopes::new(Arc::new(body)); - let scopes = - ScopesWithSourceMap { scopes: Arc::new(scopes), source_map: Arc::new(source_map) }; - let local_name_entry = scopes.resolve_local_name(name_ref).unwrap(); + let local_name_entry = analyzer.resolve_local_name(name_ref).unwrap(); let local_name = local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); assert_eq!(local_name.range(), expected_name.syntax().range()); diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index 3ca810a8b..eb2aa0e6c 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs @@ -64,7 +64,7 @@ pub use self::{ impl_block::{ImplBlock, ImplItem}, docs::{Docs, Documentation}, adt::AdtDef, - expr::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax}, + expr::{ExprScopes, ScopeEntryWithSyntax}, resolve::{Resolver, Resolution}, source_binder::{SourceAnalyzer, PathResolution}, }; diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 1c9e9320d..d87f8ff34 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs @@ -7,9 +7,10 @@ /// purely for "IDE needs". use std::sync::Arc; +use rustc_hash::FxHashSet; use ra_db::{FileId, FilePosition}; use ra_syntax::{ - SyntaxNode, AstPtr, TextUnit, + SyntaxNode, AstPtr, TextUnit, SyntaxNodePtr, ast::{self, AstNode, NameOwner}, algo::find_node_at_offset, SyntaxKind::*, @@ -18,7 +19,7 @@ use ra_syntax::{ use crate::{ HirDatabase, Function, Struct, Enum, Const, Static, Either, DefWithBody, AsName, Module, HirFileId, Crate, Trait, Resolver, - expr::scope::{ReferenceDescriptor, ScopeEntryWithSyntax}, + expr::{BodySourceMap, scope::{ReferenceDescriptor, ScopeEntryWithSyntax, ScopeId, ExprScopes}}, ids::LocationCtx, expr, AstId }; @@ -120,29 +121,6 @@ pub fn trait_from_module( Trait { id: ctx.to_def(trait_def) } } -fn resolver_for_node( - db: &impl HirDatabase, - file_id: FileId, - node: &SyntaxNode, - offset: Option, -) -> Resolver { - node.ancestors() - .find_map(|node| { - if ast::Expr::cast(node).is_some() || ast::Block::cast(node).is_some() { - let def = def_with_body_from_child_node(db, file_id, node)?; - let scopes = def.scopes(db); - let scope = match offset { - None => scopes.scope_for(&node), - Some(offset) => scopes.scope_for_offset(offset), - }; - Some(expr::resolver_for_scope(def.body(db), db, scope)) - } else { - try_get_resolver_for_node(db, file_id, node) - } - }) - .unwrap_or_default() -} - fn try_get_resolver_for_node( db: &impl HirDatabase, file_id: FileId, @@ -192,9 +170,9 @@ fn def_with_body_from_child_node( #[derive(Debug)] pub struct SourceAnalyzer { resolver: Resolver, - body_source_map: Option>, + body_source_map: Option>, infer: Option>, - scopes: Option, + scopes: Option>, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -217,11 +195,30 @@ impl SourceAnalyzer { offset: Option, ) -> SourceAnalyzer { let def_with_body = def_with_body_from_child_node(db, file_id, node); - SourceAnalyzer { - resolver: resolver_for_node(db, file_id, node, offset), - body_source_map: def_with_body.map(|it| it.body_source_map(db)), - infer: def_with_body.map(|it| it.infer(db)), - scopes: def_with_body.map(|it| it.scopes(db)), + if let Some(def) = def_with_body { + let source_map = def.body_source_map(db); + let scopes = db.expr_scopes(def); + let scope = match offset { + None => scope_for(&scopes, &source_map, &node), + Some(offset) => scope_for_offset(&scopes, &source_map, offset), + }; + let resolver = expr::resolver_for_scope(def.body(db), db, scope); + SourceAnalyzer { + resolver, + body_source_map: Some(source_map), + infer: Some(def.infer(db)), + scopes: Some(scopes), + } + } else { + SourceAnalyzer { + resolver: node + .ancestors() + .find_map(|node| try_get_resolver_for_node(db, file_id, node)) + .unwrap_or_default(), + body_source_map: None, + infer: None, + scopes: None, + } } } @@ -276,16 +273,46 @@ impl SourceAnalyzer { Some(res) } - pub fn find_all_refs(&self, pat: &ast::BindPat) -> Option> { - self.scopes.as_ref().map(|it| it.find_all_refs(pat)) + pub fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option { + let mut shadowed = FxHashSet::default(); + let name = name_ref.as_name(); + let source_map = self.body_source_map.as_ref()?; + let scopes = self.scopes.as_ref()?; + let scope = scope_for(scopes, source_map, name_ref.syntax()); + let ret = scopes + .scope_chain(scope) + .flat_map(|scope| scopes.entries(scope).iter()) + .filter(|entry| shadowed.insert(entry.name())) + .filter(|entry| entry.name() == &name) + .nth(0); + ret.and_then(|entry| { + Some(ScopeEntryWithSyntax { + name: entry.name().clone(), + ptr: source_map.pat_syntax(entry.pat())?, + }) + }) } - pub fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option { - self.scopes.as_ref()?.resolve_local_name(name_ref) + pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec { + let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); + let ptr = Either::A(AstPtr::new(pat.into())); + fn_def + .syntax() + .descendants() + .filter_map(ast::NameRef::cast) + .filter(|name_ref| match self.resolve_local_name(*name_ref) { + None => false, + Some(entry) => entry.ptr() == ptr, + }) + .map(|name_ref| ReferenceDescriptor { + name: name_ref.syntax().text().to_string(), + range: name_ref.syntax().range(), + }) + .collect() } #[cfg(test)] - pub(crate) fn body_source_map(&self) -> Arc { + pub(crate) fn body_source_map(&self) -> Arc { self.body_source_map.clone().unwrap() } @@ -293,4 +320,65 @@ impl SourceAnalyzer { pub(crate) fn inference_result(&self) -> Arc { self.infer.clone().unwrap() } + + #[cfg(test)] + pub(crate) fn scopes(&self) -> Arc { + self.scopes.clone().unwrap() + } +} + +fn scope_for( + scopes: &ExprScopes, + source_map: &BodySourceMap, + node: &SyntaxNode, +) -> Option { + node.ancestors() + .map(SyntaxNodePtr::new) + .filter_map(|ptr| source_map.syntax_expr(ptr)) + .find_map(|it| scopes.scope_for(it)) +} + +fn scope_for_offset( + scopes: &ExprScopes, + source_map: &BodySourceMap, + offset: TextUnit, +) -> Option { + scopes + .scope_for + .iter() + .filter_map(|(id, scope)| Some((source_map.expr_syntax(*id)?, scope))) + // find containing scope + .min_by_key(|(ptr, _scope)| { + (!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len()) + }) + .map(|(ptr, scope)| adjust(scopes, source_map, ptr, offset).unwrap_or(*scope)) +} + +// XXX: during completion, cursor might be outside of any particular +// expression. Try to figure out the correct scope... +fn adjust( + scopes: &ExprScopes, + source_map: &BodySourceMap, + ptr: SyntaxNodePtr, + offset: TextUnit, +) -> Option { + let r = ptr.range(); + let child_scopes = scopes + .scope_for + .iter() + .filter_map(|(id, scope)| Some((source_map.expr_syntax(*id)?, scope))) + .map(|(ptr, scope)| (ptr.range(), scope)) + .filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r); + + child_scopes + .max_by(|(r1, _), (r2, _)| { + if r2.is_subrange(&r1) { + std::cmp::Ordering::Greater + } else if r1.is_subrange(&r2) { + std::cmp::Ordering::Less + } else { + r1.start().cmp(&r2.start()) + } + }) + .map(|(_ptr, scope)| *scope) } -- cgit v1.2.3