diff options
author | Aleksey Kladov <[email protected]> | 2019-04-13 08:49:01 +0100 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2019-04-13 08:49:01 +0100 |
commit | f4a94e74bcd6c8f9275a57a775e64314af1878da (patch) | |
tree | d10b8da727d6b581a78d79e660fe05218e5b80d3 /crates/ra_hir | |
parent | 30481808fbfea109f324dfaf93daaaebacc75333 (diff) |
fold ScopeWithSyntax into SourceAnalyzer
Diffstat (limited to 'crates/ra_hir')
-rw-r--r-- | crates/ra_hir/src/code_model_api.rs | 14 | ||||
-rw-r--r-- | crates/ra_hir/src/expr.rs | 4 | ||||
-rw-r--r-- | crates/ra_hir/src/expr/scope.rs | 166 | ||||
-rw-r--r-- | crates/ra_hir/src/lib.rs | 2 | ||||
-rw-r--r-- | crates/ra_hir/src/source_binder.rs | 162 |
5 files changed, 157 insertions, 191 deletions
diff --git a/crates/ra_hir/src/code_model_api.rs b/crates/ra_hir/src/code_model_api.rs index 5179f719d..882208ec1 100644 --- a/crates/ra_hir/src/code_model_api.rs +++ b/crates/ra_hir/src/code_model_api.rs | |||
@@ -4,7 +4,7 @@ use ra_db::{CrateId, SourceRootId, Edition}; | |||
4 | use ra_syntax::{ast::self, TreeArc}; | 4 | use ra_syntax::{ast::self, TreeArc}; |
5 | 5 | ||
6 | use crate::{ | 6 | use crate::{ |
7 | Name, ScopesWithSourceMap, Ty, HirFileId, Either, | 7 | Name, Ty, HirFileId, Either, |
8 | HirDatabase, DefDatabase, | 8 | HirDatabase, DefDatabase, |
9 | type_ref::TypeRef, | 9 | type_ref::TypeRef, |
10 | nameres::{ModuleScope, Namespace, ImportId, CrateModuleId}, | 10 | nameres::{ModuleScope, Namespace, ImportId, CrateModuleId}, |
@@ -466,12 +466,6 @@ impl DefWithBody { | |||
466 | DefWithBody::Static(ref s) => s.resolver(db), | 466 | DefWithBody::Static(ref s) => s.resolver(db), |
467 | } | 467 | } |
468 | } | 468 | } |
469 | |||
470 | pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSourceMap { | ||
471 | let scopes = db.expr_scopes(*self); | ||
472 | let source_map = db.body_with_source_map(*self).1; | ||
473 | ScopesWithSourceMap { scopes, source_map } | ||
474 | } | ||
475 | } | 469 | } |
476 | 470 | ||
477 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 471 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
@@ -535,12 +529,6 @@ impl Function { | |||
535 | db.type_for_def((*self).into(), Namespace::Values) | 529 | db.type_for_def((*self).into(), Namespace::Values) |
536 | } | 530 | } |
537 | 531 | ||
538 | pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSourceMap { | ||
539 | let scopes = db.expr_scopes((*self).into()); | ||
540 | let source_map = db.body_with_source_map((*self).into()).1; | ||
541 | ScopesWithSourceMap { scopes, source_map } | ||
542 | } | ||
543 | |||
544 | pub fn signature(&self, db: &impl HirDatabase) -> Arc<FnSignature> { | 532 | pub fn signature(&self, db: &impl HirDatabase) -> Arc<FnSignature> { |
545 | db.fn_signature(*self) | 533 | db.fn_signature(*self) |
546 | } | 534 | } |
diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs index 3806a3605..038a25a97 100644 --- a/crates/ra_hir/src/expr.rs +++ b/crates/ra_hir/src/expr.rs | |||
@@ -16,7 +16,7 @@ use crate::{ | |||
16 | }; | 16 | }; |
17 | use crate::{ path::GenericArgs, ty::primitive::{IntTy, UncertainIntTy, FloatTy, UncertainFloatTy}}; | 17 | use crate::{ path::GenericArgs, ty::primitive::{IntTy, UncertainIntTy, FloatTy, UncertainFloatTy}}; |
18 | 18 | ||
19 | pub use self::scope::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax}; | 19 | pub use self::scope::{ExprScopes, ScopeEntryWithSyntax}; |
20 | 20 | ||
21 | pub(crate) mod scope; | 21 | pub(crate) mod scope; |
22 | 22 | ||
@@ -93,7 +93,7 @@ pub fn resolver_for_scope( | |||
93 | ) -> Resolver { | 93 | ) -> Resolver { |
94 | let mut r = body.owner.resolver(db); | 94 | let mut r = body.owner.resolver(db); |
95 | let scopes = db.expr_scopes(body.owner); | 95 | let scopes = db.expr_scopes(body.owner); |
96 | let scope_chain = scopes.scope_chain_for(scope_id).collect::<Vec<_>>(); | 96 | let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>(); |
97 | for scope in scope_chain.into_iter().rev() { | 97 | for scope in scope_chain.into_iter().rev() { |
98 | r = r.push_expr_scope(Arc::clone(&scopes), scope); | 98 | r = r.push_expr_scope(Arc::clone(&scopes), scope); |
99 | } | 99 | } |
diff --git a/crates/ra_hir/src/expr/scope.rs b/crates/ra_hir/src/expr/scope.rs index dcec51a10..476385a2f 100644 --- a/crates/ra_hir/src/expr/scope.rs +++ b/crates/ra_hir/src/expr/scope.rs | |||
@@ -1,17 +1,16 @@ | |||
1 | use std::sync::Arc; | 1 | use std::sync::Arc; |
2 | 2 | ||
3 | use rustc_hash::{FxHashMap, FxHashSet}; | 3 | use rustc_hash::{FxHashMap}; |
4 | |||
5 | use ra_syntax::{ | 4 | use ra_syntax::{ |
6 | AstNode, SyntaxNode, TextUnit, TextRange, SyntaxNodePtr, AstPtr, | 5 | TextRange, AstPtr, |
7 | algo::generate, | 6 | algo::generate, |
8 | ast, | 7 | ast, |
9 | }; | 8 | }; |
10 | use ra_arena::{Arena, RawId, impl_arena_id}; | 9 | use ra_arena::{Arena, RawId, impl_arena_id}; |
11 | 10 | ||
12 | use crate::{ | 11 | use crate::{ |
13 | Name, AsName,DefWithBody, Either, | 12 | Name, DefWithBody, Either, |
14 | expr::{PatId, ExprId, Pat, Expr, Body, Statement, BodySourceMap}, | 13 | expr::{PatId, ExprId, Pat, Expr, Body, Statement}, |
15 | HirDatabase, | 14 | HirDatabase, |
16 | }; | 15 | }; |
17 | 16 | ||
@@ -23,7 +22,7 @@ impl_arena_id!(ScopeId); | |||
23 | pub struct ExprScopes { | 22 | pub struct ExprScopes { |
24 | body: Arc<Body>, | 23 | body: Arc<Body>, |
25 | scopes: Arena<ScopeId, ScopeData>, | 24 | scopes: Arena<ScopeId, ScopeData>, |
26 | scope_for: FxHashMap<ExprId, ScopeId>, | 25 | pub(crate) scope_for: FxHashMap<ExprId, ScopeId>, |
27 | } | 26 | } |
28 | 27 | ||
29 | #[derive(Debug, PartialEq, Eq)] | 28 | #[derive(Debug, PartialEq, Eq)] |
@@ -66,10 +65,7 @@ impl ExprScopes { | |||
66 | &self.scopes[scope].entries | 65 | &self.scopes[scope].entries |
67 | } | 66 | } |
68 | 67 | ||
69 | pub fn scope_chain_for<'a>( | 68 | pub fn scope_chain<'a>(&'a self, scope: Option<ScopeId>) -> impl Iterator<Item = ScopeId> + 'a { |
70 | &'a self, | ||
71 | scope: Option<ScopeId>, | ||
72 | ) -> impl Iterator<Item = ScopeId> + 'a { | ||
73 | generate(scope, move |&scope| self.scopes[scope].parent) | 69 | generate(scope, move |&scope| self.scopes[scope].parent) |
74 | } | 70 | } |
75 | 71 | ||
@@ -108,15 +104,9 @@ impl ExprScopes { | |||
108 | } | 104 | } |
109 | 105 | ||
110 | #[derive(Debug, Clone, PartialEq, Eq)] | 106 | #[derive(Debug, Clone, PartialEq, Eq)] |
111 | pub struct ScopesWithSourceMap { | ||
112 | pub(crate) source_map: Arc<BodySourceMap>, | ||
113 | pub(crate) scopes: Arc<ExprScopes>, | ||
114 | } | ||
115 | |||
116 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
117 | pub struct ScopeEntryWithSyntax { | 107 | pub struct ScopeEntryWithSyntax { |
118 | name: Name, | 108 | pub(crate) name: Name, |
119 | ptr: Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>, | 109 | pub(crate) ptr: Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>, |
120 | } | 110 | } |
121 | 111 | ||
122 | impl ScopeEntryWithSyntax { | 112 | impl ScopeEntryWithSyntax { |
@@ -129,96 +119,6 @@ impl ScopeEntryWithSyntax { | |||
129 | } | 119 | } |
130 | } | 120 | } |
131 | 121 | ||
132 | impl ScopesWithSourceMap { | ||
133 | fn scope_chain<'a>(&'a self, node: &SyntaxNode) -> impl Iterator<Item = ScopeId> + 'a { | ||
134 | generate(self.scope_for(node), move |&scope| self.scopes.scopes[scope].parent) | ||
135 | } | ||
136 | |||
137 | pub(crate) fn scope_for_offset(&self, offset: TextUnit) -> Option<ScopeId> { | ||
138 | self.scopes | ||
139 | .scope_for | ||
140 | .iter() | ||
141 | .filter_map(|(id, scope)| Some((self.source_map.expr_syntax(*id)?, scope))) | ||
142 | // find containing scope | ||
143 | .min_by_key(|(ptr, _scope)| { | ||
144 | (!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len()) | ||
145 | }) | ||
146 | .map(|(ptr, scope)| self.adjust(ptr, *scope, offset)) | ||
147 | } | ||
148 | |||
149 | // XXX: during completion, cursor might be outside of any particular | ||
150 | // expression. Try to figure out the correct scope... | ||
151 | // FIXME: move this to source binder? | ||
152 | fn adjust(&self, ptr: SyntaxNodePtr, original_scope: ScopeId, offset: TextUnit) -> ScopeId { | ||
153 | let r = ptr.range(); | ||
154 | let child_scopes = self | ||
155 | .scopes | ||
156 | .scope_for | ||
157 | .iter() | ||
158 | .filter_map(|(id, scope)| Some((self.source_map.expr_syntax(*id)?, scope))) | ||
159 | .map(|(ptr, scope)| (ptr.range(), scope)) | ||
160 | .filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r); | ||
161 | |||
162 | child_scopes | ||
163 | .max_by(|(r1, _), (r2, _)| { | ||
164 | if r2.is_subrange(&r1) { | ||
165 | std::cmp::Ordering::Greater | ||
166 | } else if r1.is_subrange(&r2) { | ||
167 | std::cmp::Ordering::Less | ||
168 | } else { | ||
169 | r1.start().cmp(&r2.start()) | ||
170 | } | ||
171 | }) | ||
172 | .map(|(_ptr, scope)| *scope) | ||
173 | .unwrap_or(original_scope) | ||
174 | } | ||
175 | |||
176 | pub(crate) fn resolve_local_name( | ||
177 | &self, | ||
178 | name_ref: &ast::NameRef, | ||
179 | ) -> Option<ScopeEntryWithSyntax> { | ||
180 | let mut shadowed = FxHashSet::default(); | ||
181 | let name = name_ref.as_name(); | ||
182 | let ret = self | ||
183 | .scope_chain(name_ref.syntax()) | ||
184 | .flat_map(|scope| self.scopes.entries(scope).iter()) | ||
185 | .filter(|entry| shadowed.insert(entry.name())) | ||
186 | .filter(|entry| entry.name() == &name) | ||
187 | .nth(0); | ||
188 | ret.and_then(|entry| { | ||
189 | Some(ScopeEntryWithSyntax { | ||
190 | name: entry.name().clone(), | ||
191 | ptr: self.source_map.pat_syntax(entry.pat())?, | ||
192 | }) | ||
193 | }) | ||
194 | } | ||
195 | |||
196 | pub(crate) fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> { | ||
197 | let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); | ||
198 | let ptr = Either::A(AstPtr::new(pat.into())); | ||
199 | fn_def | ||
200 | .syntax() | ||
201 | .descendants() | ||
202 | .filter_map(ast::NameRef::cast) | ||
203 | .filter(|name_ref| match self.resolve_local_name(*name_ref) { | ||
204 | None => false, | ||
205 | Some(entry) => entry.ptr() == ptr, | ||
206 | }) | ||
207 | .map(|name_ref| ReferenceDescriptor { | ||
208 | name: name_ref.syntax().text().to_string(), | ||
209 | range: name_ref.syntax().range(), | ||
210 | }) | ||
211 | .collect() | ||
212 | } | ||
213 | |||
214 | pub(crate) fn scope_for(&self, node: &SyntaxNode) -> Option<ScopeId> { | ||
215 | node.ancestors() | ||
216 | .map(SyntaxNodePtr::new) | ||
217 | .filter_map(|ptr| self.source_map.syntax_expr(ptr)) | ||
218 | .find_map(|it| self.scopes.scope_for(it)) | ||
219 | } | ||
220 | } | ||
221 | |||
222 | impl ScopeEntry { | 122 | impl ScopeEntry { |
223 | pub fn name(&self) -> &Name { | 123 | pub fn name(&self) -> &Name { |
224 | &self.name | 124 | &self.name |
@@ -297,12 +197,11 @@ pub struct ReferenceDescriptor { | |||
297 | 197 | ||
298 | #[cfg(test)] | 198 | #[cfg(test)] |
299 | mod tests { | 199 | mod tests { |
300 | use ra_db::salsa::InternKey; | 200 | use ra_db::SourceDatabase; |
301 | use ra_syntax::{SourceFile, algo::find_node_at_offset}; | 201 | use ra_syntax::{algo::find_node_at_offset, AstNode, SyntaxNodePtr}; |
302 | use test_utils::{extract_offset, assert_eq_text}; | 202 | use test_utils::{extract_offset, assert_eq_text}; |
303 | use crate::Function; | ||
304 | 203 | ||
305 | use crate::expr::{ExprCollector}; | 204 | use crate::{source_binder::SourceAnalyzer, mock::MockDatabase}; |
306 | 205 | ||
307 | use super::*; | 206 | use super::*; |
308 | 207 | ||
@@ -316,18 +215,20 @@ mod tests { | |||
316 | buf.push_str(&code[off..]); | 215 | buf.push_str(&code[off..]); |
317 | buf | 216 | buf |
318 | }; | 217 | }; |
319 | let file = SourceFile::parse(&code); | 218 | |
219 | let (db, _source_root, file_id) = MockDatabase::with_single_file(&code); | ||
220 | let file = db.parse(file_id); | ||
320 | let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); | 221 | let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); |
321 | let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); | 222 | let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None); |
322 | let irrelevant_function = | 223 | |
323 | Function { id: crate::ids::FunctionId::from_intern_id(0u32.into()) }; | 224 | let scopes = analyzer.scopes(); |
324 | let (body, source_map) = collect_fn_body_syntax(irrelevant_function, fn_def); | 225 | let expr_id = |
325 | let scopes = ExprScopes::new(Arc::new(body)); | 226 | analyzer.body_source_map().syntax_expr(SyntaxNodePtr::new(marker.syntax())).unwrap(); |
326 | let scopes = | 227 | let scope = scopes.scope_for(expr_id); |
327 | ScopesWithSourceMap { scopes: Arc::new(scopes), source_map: Arc::new(source_map) }; | 228 | |
328 | let actual = scopes | 229 | let actual = scopes |
329 | .scope_chain(marker.syntax()) | 230 | .scope_chain(scope) |
330 | .flat_map(|scope| scopes.scopes.entries(scope)) | 231 | .flat_map(|scope| scopes.entries(scope)) |
331 | .map(|it| it.name().to_string()) | 232 | .map(|it| it.name().to_string()) |
332 | .collect::<Vec<_>>() | 233 | .collect::<Vec<_>>() |
333 | .join("\n"); | 234 | .join("\n"); |
@@ -410,28 +311,17 @@ mod tests { | |||
410 | ); | 311 | ); |
411 | } | 312 | } |
412 | 313 | ||
413 | fn collect_fn_body_syntax(function: Function, node: &ast::FnDef) -> (Body, BodySourceMap) { | ||
414 | let mut collector = ExprCollector::new(DefWithBody::Function(function)); | ||
415 | collector.collect_fn_body(node); | ||
416 | collector.finish() | ||
417 | } | ||
418 | |||
419 | fn do_check_local_name(code: &str, expected_offset: u32) { | 314 | fn do_check_local_name(code: &str, expected_offset: u32) { |
420 | let (off, code) = extract_offset(code); | 315 | let (off, code) = extract_offset(code); |
421 | let file = SourceFile::parse(&code); | 316 | |
317 | let (db, _source_root, file_id) = MockDatabase::with_single_file(&code); | ||
318 | let file = db.parse(file_id); | ||
422 | let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()) | 319 | let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()) |
423 | .expect("failed to find a name at the target offset"); | 320 | .expect("failed to find a name at the target offset"); |
424 | |||
425 | let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); | ||
426 | let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); | 321 | let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); |
322 | let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None); | ||
427 | 323 | ||
428 | let irrelevant_function = | 324 | let local_name_entry = analyzer.resolve_local_name(name_ref).unwrap(); |
429 | Function { id: crate::ids::FunctionId::from_intern_id(0u32.into()) }; | ||
430 | let (body, source_map) = collect_fn_body_syntax(irrelevant_function, fn_def); | ||
431 | let scopes = ExprScopes::new(Arc::new(body)); | ||
432 | let scopes = | ||
433 | ScopesWithSourceMap { scopes: Arc::new(scopes), source_map: Arc::new(source_map) }; | ||
434 | let local_name_entry = scopes.resolve_local_name(name_ref).unwrap(); | ||
435 | let local_name = | 325 | let local_name = |
436 | local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); | 326 | local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); |
437 | assert_eq!(local_name.range(), expected_name.syntax().range()); | 327 | assert_eq!(local_name.range(), expected_name.syntax().range()); |
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index 3ca810a8b..eb2aa0e6c 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs | |||
@@ -64,7 +64,7 @@ pub use self::{ | |||
64 | impl_block::{ImplBlock, ImplItem}, | 64 | impl_block::{ImplBlock, ImplItem}, |
65 | docs::{Docs, Documentation}, | 65 | docs::{Docs, Documentation}, |
66 | adt::AdtDef, | 66 | adt::AdtDef, |
67 | expr::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax}, | 67 | expr::{ExprScopes, ScopeEntryWithSyntax}, |
68 | resolve::{Resolver, Resolution}, | 68 | resolve::{Resolver, Resolution}, |
69 | source_binder::{SourceAnalyzer, PathResolution}, | 69 | source_binder::{SourceAnalyzer, PathResolution}, |
70 | }; | 70 | }; |
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 1c9e9320d..d87f8ff34 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs | |||
@@ -7,9 +7,10 @@ | |||
7 | /// purely for "IDE needs". | 7 | /// purely for "IDE needs". |
8 | use std::sync::Arc; | 8 | use std::sync::Arc; |
9 | 9 | ||
10 | use rustc_hash::FxHashSet; | ||
10 | use ra_db::{FileId, FilePosition}; | 11 | use ra_db::{FileId, FilePosition}; |
11 | use ra_syntax::{ | 12 | use ra_syntax::{ |
12 | SyntaxNode, AstPtr, TextUnit, | 13 | SyntaxNode, AstPtr, TextUnit, SyntaxNodePtr, |
13 | ast::{self, AstNode, NameOwner}, | 14 | ast::{self, AstNode, NameOwner}, |
14 | algo::find_node_at_offset, | 15 | algo::find_node_at_offset, |
15 | SyntaxKind::*, | 16 | SyntaxKind::*, |
@@ -18,7 +19,7 @@ use ra_syntax::{ | |||
18 | use crate::{ | 19 | use crate::{ |
19 | HirDatabase, Function, Struct, Enum, Const, Static, Either, DefWithBody, | 20 | HirDatabase, Function, Struct, Enum, Const, Static, Either, DefWithBody, |
20 | AsName, Module, HirFileId, Crate, Trait, Resolver, | 21 | AsName, Module, HirFileId, Crate, Trait, Resolver, |
21 | expr::scope::{ReferenceDescriptor, ScopeEntryWithSyntax}, | 22 | expr::{BodySourceMap, scope::{ReferenceDescriptor, ScopeEntryWithSyntax, ScopeId, ExprScopes}}, |
22 | ids::LocationCtx, | 23 | ids::LocationCtx, |
23 | expr, AstId | 24 | expr, AstId |
24 | }; | 25 | }; |
@@ -120,29 +121,6 @@ pub fn trait_from_module( | |||
120 | Trait { id: ctx.to_def(trait_def) } | 121 | Trait { id: ctx.to_def(trait_def) } |
121 | } | 122 | } |
122 | 123 | ||
123 | fn resolver_for_node( | ||
124 | db: &impl HirDatabase, | ||
125 | file_id: FileId, | ||
126 | node: &SyntaxNode, | ||
127 | offset: Option<TextUnit>, | ||
128 | ) -> Resolver { | ||
129 | node.ancestors() | ||
130 | .find_map(|node| { | ||
131 | if ast::Expr::cast(node).is_some() || ast::Block::cast(node).is_some() { | ||
132 | let def = def_with_body_from_child_node(db, file_id, node)?; | ||
133 | let scopes = def.scopes(db); | ||
134 | let scope = match offset { | ||
135 | None => scopes.scope_for(&node), | ||
136 | Some(offset) => scopes.scope_for_offset(offset), | ||
137 | }; | ||
138 | Some(expr::resolver_for_scope(def.body(db), db, scope)) | ||
139 | } else { | ||
140 | try_get_resolver_for_node(db, file_id, node) | ||
141 | } | ||
142 | }) | ||
143 | .unwrap_or_default() | ||
144 | } | ||
145 | |||
146 | fn try_get_resolver_for_node( | 124 | fn try_get_resolver_for_node( |
147 | db: &impl HirDatabase, | 125 | db: &impl HirDatabase, |
148 | file_id: FileId, | 126 | file_id: FileId, |
@@ -192,9 +170,9 @@ fn def_with_body_from_child_node( | |||
192 | #[derive(Debug)] | 170 | #[derive(Debug)] |
193 | pub struct SourceAnalyzer { | 171 | pub struct SourceAnalyzer { |
194 | resolver: Resolver, | 172 | resolver: Resolver, |
195 | body_source_map: Option<Arc<crate::expr::BodySourceMap>>, | 173 | body_source_map: Option<Arc<BodySourceMap>>, |
196 | infer: Option<Arc<crate::ty::InferenceResult>>, | 174 | infer: Option<Arc<crate::ty::InferenceResult>>, |
197 | scopes: Option<crate::expr::ScopesWithSourceMap>, | 175 | scopes: Option<Arc<crate::expr::ExprScopes>>, |
198 | } | 176 | } |
199 | 177 | ||
200 | #[derive(Debug, Clone, PartialEq, Eq)] | 178 | #[derive(Debug, Clone, PartialEq, Eq)] |
@@ -217,11 +195,30 @@ impl SourceAnalyzer { | |||
217 | offset: Option<TextUnit>, | 195 | offset: Option<TextUnit>, |
218 | ) -> SourceAnalyzer { | 196 | ) -> SourceAnalyzer { |
219 | let def_with_body = def_with_body_from_child_node(db, file_id, node); | 197 | let def_with_body = def_with_body_from_child_node(db, file_id, node); |
220 | SourceAnalyzer { | 198 | if let Some(def) = def_with_body { |
221 | resolver: resolver_for_node(db, file_id, node, offset), | 199 | let source_map = def.body_source_map(db); |
222 | body_source_map: def_with_body.map(|it| it.body_source_map(db)), | 200 | let scopes = db.expr_scopes(def); |
223 | infer: def_with_body.map(|it| it.infer(db)), | 201 | let scope = match offset { |
224 | scopes: def_with_body.map(|it| it.scopes(db)), | 202 | None => scope_for(&scopes, &source_map, &node), |
203 | Some(offset) => scope_for_offset(&scopes, &source_map, offset), | ||
204 | }; | ||
205 | let resolver = expr::resolver_for_scope(def.body(db), db, scope); | ||
206 | SourceAnalyzer { | ||
207 | resolver, | ||
208 | body_source_map: Some(source_map), | ||
209 | infer: Some(def.infer(db)), | ||
210 | scopes: Some(scopes), | ||
211 | } | ||
212 | } else { | ||
213 | SourceAnalyzer { | ||
214 | resolver: node | ||
215 | .ancestors() | ||
216 | .find_map(|node| try_get_resolver_for_node(db, file_id, node)) | ||
217 | .unwrap_or_default(), | ||
218 | body_source_map: None, | ||
219 | infer: None, | ||
220 | scopes: None, | ||
221 | } | ||
225 | } | 222 | } |
226 | } | 223 | } |
227 | 224 | ||
@@ -276,16 +273,46 @@ impl SourceAnalyzer { | |||
276 | Some(res) | 273 | Some(res) |
277 | } | 274 | } |
278 | 275 | ||
279 | pub fn find_all_refs(&self, pat: &ast::BindPat) -> Option<Vec<ReferenceDescriptor>> { | 276 | pub fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> { |
280 | self.scopes.as_ref().map(|it| it.find_all_refs(pat)) | 277 | let mut shadowed = FxHashSet::default(); |
278 | let name = name_ref.as_name(); | ||
279 | let source_map = self.body_source_map.as_ref()?; | ||
280 | let scopes = self.scopes.as_ref()?; | ||
281 | let scope = scope_for(scopes, source_map, name_ref.syntax()); | ||
282 | let ret = scopes | ||
283 | .scope_chain(scope) | ||
284 | .flat_map(|scope| scopes.entries(scope).iter()) | ||
285 | .filter(|entry| shadowed.insert(entry.name())) | ||
286 | .filter(|entry| entry.name() == &name) | ||
287 | .nth(0); | ||
288 | ret.and_then(|entry| { | ||
289 | Some(ScopeEntryWithSyntax { | ||
290 | name: entry.name().clone(), | ||
291 | ptr: source_map.pat_syntax(entry.pat())?, | ||
292 | }) | ||
293 | }) | ||
281 | } | 294 | } |
282 | 295 | ||
283 | pub fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> { | 296 | pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> { |
284 | self.scopes.as_ref()?.resolve_local_name(name_ref) | 297 | let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); |
298 | let ptr = Either::A(AstPtr::new(pat.into())); | ||
299 | fn_def | ||
300 | .syntax() | ||
301 | .descendants() | ||
302 | .filter_map(ast::NameRef::cast) | ||
303 | .filter(|name_ref| match self.resolve_local_name(*name_ref) { | ||
304 | None => false, | ||
305 | Some(entry) => entry.ptr() == ptr, | ||
306 | }) | ||
307 | .map(|name_ref| ReferenceDescriptor { | ||
308 | name: name_ref.syntax().text().to_string(), | ||
309 | range: name_ref.syntax().range(), | ||
310 | }) | ||
311 | .collect() | ||
285 | } | 312 | } |
286 | 313 | ||
287 | #[cfg(test)] | 314 | #[cfg(test)] |
288 | pub(crate) fn body_source_map(&self) -> Arc<crate::expr::BodySourceMap> { | 315 | pub(crate) fn body_source_map(&self) -> Arc<BodySourceMap> { |
289 | self.body_source_map.clone().unwrap() | 316 | self.body_source_map.clone().unwrap() |
290 | } | 317 | } |
291 | 318 | ||
@@ -293,4 +320,65 @@ impl SourceAnalyzer { | |||
293 | pub(crate) fn inference_result(&self) -> Arc<crate::ty::InferenceResult> { | 320 | pub(crate) fn inference_result(&self) -> Arc<crate::ty::InferenceResult> { |
294 | self.infer.clone().unwrap() | 321 | self.infer.clone().unwrap() |
295 | } | 322 | } |
323 | |||
324 | #[cfg(test)] | ||
325 | pub(crate) fn scopes(&self) -> Arc<ExprScopes> { | ||
326 | self.scopes.clone().unwrap() | ||
327 | } | ||
328 | } | ||
329 | |||
330 | fn scope_for( | ||
331 | scopes: &ExprScopes, | ||
332 | source_map: &BodySourceMap, | ||
333 | node: &SyntaxNode, | ||
334 | ) -> Option<ScopeId> { | ||
335 | node.ancestors() | ||
336 | .map(SyntaxNodePtr::new) | ||
337 | .filter_map(|ptr| source_map.syntax_expr(ptr)) | ||
338 | .find_map(|it| scopes.scope_for(it)) | ||
339 | } | ||
340 | |||
341 | fn scope_for_offset( | ||
342 | scopes: &ExprScopes, | ||
343 | source_map: &BodySourceMap, | ||
344 | offset: TextUnit, | ||
345 | ) -> Option<ScopeId> { | ||
346 | scopes | ||
347 | .scope_for | ||
348 | .iter() | ||
349 | .filter_map(|(id, scope)| Some((source_map.expr_syntax(*id)?, scope))) | ||
350 | // find containing scope | ||
351 | .min_by_key(|(ptr, _scope)| { | ||
352 | (!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len()) | ||
353 | }) | ||
354 | .map(|(ptr, scope)| adjust(scopes, source_map, ptr, offset).unwrap_or(*scope)) | ||
355 | } | ||
356 | |||
357 | // XXX: during completion, cursor might be outside of any particular | ||
358 | // expression. Try to figure out the correct scope... | ||
359 | fn adjust( | ||
360 | scopes: &ExprScopes, | ||
361 | source_map: &BodySourceMap, | ||
362 | ptr: SyntaxNodePtr, | ||
363 | offset: TextUnit, | ||
364 | ) -> Option<ScopeId> { | ||
365 | let r = ptr.range(); | ||
366 | let child_scopes = scopes | ||
367 | .scope_for | ||
368 | .iter() | ||
369 | .filter_map(|(id, scope)| Some((source_map.expr_syntax(*id)?, scope))) | ||
370 | .map(|(ptr, scope)| (ptr.range(), scope)) | ||
371 | .filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r); | ||
372 | |||
373 | child_scopes | ||
374 | .max_by(|(r1, _), (r2, _)| { | ||
375 | if r2.is_subrange(&r1) { | ||
376 | std::cmp::Ordering::Greater | ||
377 | } else if r1.is_subrange(&r2) { | ||
378 | std::cmp::Ordering::Less | ||
379 | } else { | ||
380 | r1.start().cmp(&r2.start()) | ||
381 | } | ||
382 | }) | ||
383 | .map(|(_ptr, scope)| *scope) | ||
296 | } | 384 | } |