diff options
author | Aleksey Kladov <[email protected]> | 2020-02-26 12:04:22 +0000 |
---|---|---|
committer | GitHub <[email protected]> | 2020-02-26 12:04:22 +0000 |
commit | 5c64ad27e041bcdb281c0a751720ceb3a6369d04 (patch) | |
tree | 12d89798f61b276f8bd640db07276a7d4e92b1c2 /crates/ra_hir/src | |
parent | 04deae3dba7c9b7054f7a1d64e4b93a05aecc132 (diff) | |
parent | c3a4c4429de83450654795534e64e878a774a088 (diff) |
Merge pull request #3222 from matklad/identity
Introduce Semantics API
Diffstat (limited to 'crates/ra_hir/src')
-rw-r--r-- | crates/ra_hir/src/lib.rs | 5 | ||||
-rw-r--r-- | crates/ra_hir/src/semantics.rs | 335 | ||||
-rw-r--r-- | crates/ra_hir/src/source_analyzer.rs | 194 | ||||
-rw-r--r-- | crates/ra_hir/src/source_binder.rs | 161 |
4 files changed, 480 insertions, 215 deletions
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index 7a9745ebe..004a2185f 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs | |||
@@ -26,6 +26,7 @@ macro_rules! impl_froms { | |||
26 | } | 26 | } |
27 | } | 27 | } |
28 | 28 | ||
29 | mod semantics; | ||
29 | pub mod db; | 30 | pub mod db; |
30 | pub mod source_analyzer; | 31 | pub mod source_analyzer; |
31 | pub mod source_binder; | 32 | pub mod source_binder; |
@@ -45,8 +46,8 @@ pub use crate::{ | |||
45 | StructField, Trait, Type, TypeAlias, TypeParam, Union, VariantDef, | 46 | StructField, Trait, Type, TypeAlias, TypeParam, Union, VariantDef, |
46 | }, | 47 | }, |
47 | has_source::HasSource, | 48 | has_source::HasSource, |
48 | source_analyzer::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer}, | 49 | semantics::{original_range, Semantics, SemanticsScope}, |
49 | source_binder::SourceBinder, | 50 | source_analyzer::{PathResolution, ScopeEntryWithSyntax}, |
50 | }; | 51 | }; |
51 | 52 | ||
52 | pub use hir_def::{ | 53 | pub use hir_def::{ |
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs new file mode 100644 index 000000000..22a7e7588 --- /dev/null +++ b/crates/ra_hir/src/semantics.rs | |||
@@ -0,0 +1,335 @@ | |||
1 | //! See `Semantics`. | ||
2 | |||
3 | use std::{cell::RefCell, fmt, iter::successors}; | ||
4 | |||
5 | use hir_def::{ | ||
6 | resolver::{self, HasResolver, Resolver}, | ||
7 | TraitId, | ||
8 | }; | ||
9 | use ra_db::{FileId, FileRange}; | ||
10 | use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken, TextRange, TextUnit}; | ||
11 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
12 | |||
13 | use crate::{ | ||
14 | db::HirDatabase, | ||
15 | source_analyzer::{resolve_hir_path, ReferenceDescriptor, SourceAnalyzer}, | ||
16 | source_binder::{ChildContainer, SourceBinder, ToDef}, | ||
17 | Function, HirFileId, InFile, Local, MacroDef, Module, Name, Origin, Path, PathResolution, | ||
18 | ScopeDef, StructField, Trait, Type, TypeParam, VariantDef, | ||
19 | }; | ||
20 | use ra_prof::profile; | ||
21 | |||
22 | /// Primary API to get semantic information, like types, from syntax trees. | ||
23 | pub struct Semantics<'db, DB> { | ||
24 | pub db: &'db DB, | ||
25 | pub(crate) sb: RefCell<SourceBinder>, | ||
26 | cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, | ||
27 | } | ||
28 | |||
29 | impl<DB> fmt::Debug for Semantics<'_, DB> { | ||
30 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
31 | write!(f, "Semantics {{ ... }}") | ||
32 | } | ||
33 | } | ||
34 | |||
35 | impl<'db, DB: HirDatabase> Semantics<'db, DB> { | ||
36 | pub fn new(db: &DB) -> Semantics<DB> { | ||
37 | let sb = RefCell::new(SourceBinder::new()); | ||
38 | Semantics { db, sb, cache: RefCell::default() } | ||
39 | } | ||
40 | |||
41 | pub fn parse(&self, file_id: FileId) -> ast::SourceFile { | ||
42 | let tree = self.db.parse(file_id).tree(); | ||
43 | self.cache(tree.syntax().clone(), file_id.into()); | ||
44 | tree | ||
45 | } | ||
46 | |||
47 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | ||
48 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); | ||
49 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); | ||
50 | let file_id = sa.expand(self.db, macro_call)?; | ||
51 | let node = self.db.parse_or_expand(file_id)?; | ||
52 | self.cache(node.clone(), file_id); | ||
53 | Some(node) | ||
54 | } | ||
55 | |||
56 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | ||
57 | let parent = token.parent(); | ||
58 | let parent = self.find_file(parent); | ||
59 | let sa = self.analyze2(parent.as_ref(), None); | ||
60 | |||
61 | let token = successors(Some(parent.with_value(token)), |token| { | ||
62 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; | ||
63 | let tt = macro_call.token_tree()?; | ||
64 | if !token.value.text_range().is_subrange(&tt.syntax().text_range()) { | ||
65 | return None; | ||
66 | } | ||
67 | let file_id = sa.expand(self.db, token.with_value(¯o_call))?; | ||
68 | let token = file_id.expansion_info(self.db)?.map_token_down(token.as_ref())?; | ||
69 | |||
70 | self.cache(find_root(&token.value.parent()), token.file_id); | ||
71 | |||
72 | Some(token) | ||
73 | }) | ||
74 | .last() | ||
75 | .unwrap(); | ||
76 | |||
77 | token.value | ||
78 | } | ||
79 | |||
80 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { | ||
81 | let node = self.find_file(node.clone()); | ||
82 | original_range(self.db, node.as_ref()) | ||
83 | } | ||
84 | |||
85 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
86 | let node = self.find_file(node); | ||
87 | node.ancestors_with_macros(self.db).map(|it| it.value) | ||
88 | } | ||
89 | |||
90 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | ||
91 | self.analyze(expr.syntax()).type_of(self.db, &expr) | ||
92 | } | ||
93 | |||
94 | pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> { | ||
95 | self.analyze(pat.syntax()).type_of_pat(self.db, &pat) | ||
96 | } | ||
97 | |||
98 | pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { | ||
99 | self.analyze(call.syntax()).resolve_method_call(call) | ||
100 | } | ||
101 | |||
102 | pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<StructField> { | ||
103 | self.analyze(field.syntax()).resolve_field(field) | ||
104 | } | ||
105 | |||
106 | pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<StructField> { | ||
107 | self.analyze(field.syntax()).resolve_record_field(field) | ||
108 | } | ||
109 | |||
110 | pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<VariantDef> { | ||
111 | self.analyze(record_lit.syntax()).resolve_record_literal(record_lit) | ||
112 | } | ||
113 | |||
114 | pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<VariantDef> { | ||
115 | self.analyze(record_pat.syntax()).resolve_record_pattern(record_pat) | ||
116 | } | ||
117 | |||
118 | pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { | ||
119 | let sa = self.analyze(macro_call.syntax()); | ||
120 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); | ||
121 | sa.resolve_macro_call(self.db, macro_call) | ||
122 | } | ||
123 | |||
124 | pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> { | ||
125 | self.analyze(path.syntax()).resolve_path(self.db, path) | ||
126 | } | ||
127 | |||
128 | // FIXME: use this instead? | ||
129 | // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; | ||
130 | |||
131 | pub fn to_def<T: ToDef + Clone>(&self, src: &T) -> Option<T::Def> { | ||
132 | let src = self.find_file(src.syntax().clone()).with_value(src.clone()); | ||
133 | let mut sb = self.sb.borrow_mut(); | ||
134 | T::to_def(self.db, &mut sb, src) | ||
135 | } | ||
136 | |||
137 | pub fn to_module_def(&self, file: FileId) -> Option<Module> { | ||
138 | let mut sb = self.sb.borrow_mut(); | ||
139 | sb.to_module_def(self.db, file) | ||
140 | } | ||
141 | |||
142 | pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db, DB> { | ||
143 | let node = self.find_file(node.clone()); | ||
144 | let resolver = self.analyze2(node.as_ref(), None).resolver; | ||
145 | SemanticsScope { db: self.db, resolver } | ||
146 | } | ||
147 | |||
148 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextUnit) -> SemanticsScope<'db, DB> { | ||
149 | let node = self.find_file(node.clone()); | ||
150 | let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; | ||
151 | SemanticsScope { db: self.db, resolver } | ||
152 | } | ||
153 | |||
154 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db, DB> { | ||
155 | let resolver = def.id.resolver(self.db); | ||
156 | SemanticsScope { db: self.db, resolver } | ||
157 | } | ||
158 | |||
159 | // FIXME: we only use this in `inline_local_variable` assist, ideally, we | ||
160 | // should switch to general reference search infra there. | ||
161 | pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> { | ||
162 | self.analyze(pat.syntax()).find_all_refs(pat) | ||
163 | } | ||
164 | |||
165 | fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { | ||
166 | let src = self.find_file(node.clone()); | ||
167 | self.analyze2(src.as_ref(), None) | ||
168 | } | ||
169 | |||
170 | fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextUnit>) -> SourceAnalyzer { | ||
171 | let _p = profile("Semantics::analyze2"); | ||
172 | |||
173 | let container = match self.sb.borrow_mut().find_container(self.db, src) { | ||
174 | Some(it) => it, | ||
175 | None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src), | ||
176 | }; | ||
177 | |||
178 | let resolver = match container { | ||
179 | ChildContainer::DefWithBodyId(def) => { | ||
180 | return SourceAnalyzer::new_for_body(self.db, def, src, offset) | ||
181 | } | ||
182 | ChildContainer::TraitId(it) => it.resolver(self.db), | ||
183 | ChildContainer::ImplId(it) => it.resolver(self.db), | ||
184 | ChildContainer::ModuleId(it) => it.resolver(self.db), | ||
185 | ChildContainer::EnumId(it) => it.resolver(self.db), | ||
186 | ChildContainer::VariantId(it) => it.resolver(self.db), | ||
187 | ChildContainer::GenericDefId(it) => it.resolver(self.db), | ||
188 | }; | ||
189 | SourceAnalyzer::new_for_resolver(resolver, src) | ||
190 | } | ||
191 | |||
192 | fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { | ||
193 | assert!(root_node.parent().is_none()); | ||
194 | let mut cache = self.cache.borrow_mut(); | ||
195 | let prev = cache.insert(root_node, file_id); | ||
196 | assert!(prev == None || prev == Some(file_id)) | ||
197 | } | ||
198 | |||
199 | pub fn assert_contains_node(&self, node: &SyntaxNode) { | ||
200 | self.find_file(node.clone()); | ||
201 | } | ||
202 | |||
203 | fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> { | ||
204 | let cache = self.cache.borrow(); | ||
205 | cache.get(root_node).copied() | ||
206 | } | ||
207 | |||
208 | fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> { | ||
209 | let root_node = find_root(&node); | ||
210 | let file_id = self.lookup(&root_node).unwrap_or_else(|| { | ||
211 | panic!( | ||
212 | "\n\nFailed to lookup {:?} in this Semantics.\n\ | ||
213 | Make sure to use only query nodes, derived from this instance of Semantics.\n\ | ||
214 | root node: {:?}\n\ | ||
215 | known nodes: {}\n\n", | ||
216 | node, | ||
217 | root_node, | ||
218 | self.cache | ||
219 | .borrow() | ||
220 | .keys() | ||
221 | .map(|it| format!("{:?}", it)) | ||
222 | .collect::<Vec<_>>() | ||
223 | .join(", ") | ||
224 | ) | ||
225 | }); | ||
226 | InFile::new(file_id, node) | ||
227 | } | ||
228 | } | ||
229 | |||
230 | fn find_root(node: &SyntaxNode) -> SyntaxNode { | ||
231 | node.ancestors().last().unwrap() | ||
232 | } | ||
233 | |||
234 | pub struct SemanticsScope<'a, DB> { | ||
235 | pub db: &'a DB, | ||
236 | resolver: Resolver, | ||
237 | } | ||
238 | |||
239 | impl<'a, DB: HirDatabase> SemanticsScope<'a, DB> { | ||
240 | pub fn module(&self) -> Option<Module> { | ||
241 | Some(Module { id: self.resolver.module()? }) | ||
242 | } | ||
243 | |||
244 | /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type | ||
245 | // FIXME: rename to visible_traits to not repeat scope? | ||
246 | pub fn traits_in_scope(&self) -> FxHashSet<TraitId> { | ||
247 | let resolver = &self.resolver; | ||
248 | resolver.traits_in_scope(self.db) | ||
249 | } | ||
250 | |||
251 | pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) { | ||
252 | let resolver = &self.resolver; | ||
253 | |||
254 | resolver.process_all_names(self.db, &mut |name, def| { | ||
255 | let def = match def { | ||
256 | resolver::ScopeDef::PerNs(it) => it.into(), | ||
257 | resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()), | ||
258 | resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()), | ||
259 | resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }), | ||
260 | resolver::ScopeDef::Local(pat_id) => { | ||
261 | let parent = resolver.body_owner().unwrap().into(); | ||
262 | ScopeDef::Local(Local { parent, pat_id }) | ||
263 | } | ||
264 | }; | ||
265 | f(name, def) | ||
266 | }) | ||
267 | } | ||
268 | |||
269 | pub fn resolve_hir_path(&self, path: &Path) -> Option<PathResolution> { | ||
270 | resolve_hir_path(self.db, &self.resolver, path) | ||
271 | } | ||
272 | } | ||
273 | |||
274 | // FIXME: Change `HasSource` trait to work with `Semantics` and remove this? | ||
275 | pub fn original_range(db: &impl HirDatabase, node: InFile<&SyntaxNode>) -> FileRange { | ||
276 | if let Some((range, Origin::Call)) = original_range_and_origin(db, node) { | ||
277 | return range; | ||
278 | } | ||
279 | |||
280 | if let Some(expansion) = node.file_id.expansion_info(db) { | ||
281 | if let Some(call_node) = expansion.call_node() { | ||
282 | return FileRange { | ||
283 | file_id: call_node.file_id.original_file(db), | ||
284 | range: call_node.value.text_range(), | ||
285 | }; | ||
286 | } | ||
287 | } | ||
288 | |||
289 | FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() } | ||
290 | } | ||
291 | |||
292 | fn original_range_and_origin( | ||
293 | db: &impl HirDatabase, | ||
294 | node: InFile<&SyntaxNode>, | ||
295 | ) -> Option<(FileRange, Origin)> { | ||
296 | let expansion = node.file_id.expansion_info(db)?; | ||
297 | |||
298 | // the input node has only one token ? | ||
299 | let single = node.value.first_token()? == node.value.last_token()?; | ||
300 | |||
301 | // FIXME: We should handle recurside macro expansions | ||
302 | let (range, origin) = node.value.descendants().find_map(|it| { | ||
303 | let first = it.first_token()?; | ||
304 | let last = it.last_token()?; | ||
305 | |||
306 | if !single && first == last { | ||
307 | return None; | ||
308 | } | ||
309 | |||
310 | // Try to map first and last tokens of node, and, if success, return the union range of mapped tokens | ||
311 | let (first, first_origin) = expansion.map_token_up(node.with_value(&first))?; | ||
312 | let (last, last_origin) = expansion.map_token_up(node.with_value(&last))?; | ||
313 | |||
314 | if first.file_id != last.file_id || first_origin != last_origin { | ||
315 | return None; | ||
316 | } | ||
317 | |||
318 | // FIXME: Add union method in TextRange | ||
319 | Some(( | ||
320 | first.with_value(union_range(first.value.text_range(), last.value.text_range())), | ||
321 | first_origin, | ||
322 | )) | ||
323 | })?; | ||
324 | |||
325 | return Some(( | ||
326 | FileRange { file_id: range.file_id.original_file(db), range: range.value }, | ||
327 | origin, | ||
328 | )); | ||
329 | |||
330 | fn union_range(a: TextRange, b: TextRange) -> TextRange { | ||
331 | let start = a.start().min(b.start()); | ||
332 | let end = a.end().max(b.end()); | ||
333 | TextRange::from_to(start, end) | ||
334 | } | ||
335 | } | ||
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs index efa3f8a79..bff1ecd14 100644 --- a/crates/ra_hir/src/source_analyzer.rs +++ b/crates/ra_hir/src/source_analyzer.rs | |||
@@ -14,29 +14,27 @@ use hir_def::{ | |||
14 | BodySourceMap, | 14 | BodySourceMap, |
15 | }, | 15 | }, |
16 | expr::{ExprId, PatId}, | 16 | expr::{ExprId, PatId}, |
17 | resolver::{self, resolver_for_scope, Resolver, TypeNs, ValueNs}, | 17 | resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, |
18 | AsMacroCall, DefWithBodyId, TraitId, | 18 | AsMacroCall, DefWithBodyId, |
19 | }; | 19 | }; |
20 | use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile, MacroCallId}; | 20 | use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile}; |
21 | use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment}; | 21 | use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment}; |
22 | use ra_syntax::{ | 22 | use ra_syntax::{ |
23 | ast::{self, AstNode}, | 23 | ast::{self, AstNode}, |
24 | AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit, | 24 | AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit, |
25 | }; | 25 | }; |
26 | use rustc_hash::FxHashSet; | ||
27 | 26 | ||
28 | use crate::{ | 27 | use crate::{ |
29 | db::HirDatabase, Adt, Const, DefWithBody, EnumVariant, Function, Local, MacroDef, Name, Path, | 28 | db::HirDatabase, Adt, Const, EnumVariant, Function, Local, MacroDef, Name, Path, Static, |
30 | ScopeDef, Static, Struct, Trait, Type, TypeAlias, TypeParam, | 29 | Struct, Trait, Type, TypeAlias, TypeParam, |
31 | }; | 30 | }; |
32 | 31 | ||
33 | /// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of | 32 | /// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of |
34 | /// original source files. It should not be used inside the HIR itself. | 33 | /// original source files. It should not be used inside the HIR itself. |
35 | #[derive(Debug)] | 34 | #[derive(Debug)] |
36 | pub struct SourceAnalyzer { | 35 | pub(crate) struct SourceAnalyzer { |
37 | file_id: HirFileId, | 36 | file_id: HirFileId, |
38 | resolver: Resolver, | 37 | pub(crate) resolver: Resolver, |
39 | body_owner: Option<DefWithBody>, | ||
40 | body_source_map: Option<Arc<BodySourceMap>>, | 38 | body_source_map: Option<Arc<BodySourceMap>>, |
41 | infer: Option<Arc<InferenceResult>>, | 39 | infer: Option<Arc<InferenceResult>>, |
42 | scopes: Option<Arc<ExprScopes>>, | 40 | scopes: Option<Arc<ExprScopes>>, |
@@ -77,35 +75,7 @@ pub struct ReferenceDescriptor { | |||
77 | pub name: String, | 75 | pub name: String, |
78 | } | 76 | } |
79 | 77 | ||
80 | #[derive(Debug)] | ||
81 | pub struct Expansion { | ||
82 | macro_call_id: MacroCallId, | ||
83 | } | ||
84 | |||
85 | impl Expansion { | ||
86 | pub fn map_token_down( | ||
87 | &self, | ||
88 | db: &impl HirDatabase, | ||
89 | token: InFile<&SyntaxToken>, | ||
90 | ) -> Option<InFile<SyntaxToken>> { | ||
91 | let exp_info = self.file_id().expansion_info(db)?; | ||
92 | exp_info.map_token_down(token) | ||
93 | } | ||
94 | |||
95 | pub fn file_id(&self) -> HirFileId { | ||
96 | self.macro_call_id.as_file() | ||
97 | } | ||
98 | } | ||
99 | |||
100 | impl SourceAnalyzer { | 78 | impl SourceAnalyzer { |
101 | pub fn new( | ||
102 | db: &impl HirDatabase, | ||
103 | node: InFile<&SyntaxNode>, | ||
104 | offset: Option<TextUnit>, | ||
105 | ) -> SourceAnalyzer { | ||
106 | crate::source_binder::SourceBinder::new(db).analyze(node, offset) | ||
107 | } | ||
108 | |||
109 | pub(crate) fn new_for_body( | 79 | pub(crate) fn new_for_body( |
110 | db: &impl HirDatabase, | 80 | db: &impl HirDatabase, |
111 | def: DefWithBodyId, | 81 | def: DefWithBodyId, |
@@ -121,7 +91,6 @@ impl SourceAnalyzer { | |||
121 | let resolver = resolver_for_scope(db, def, scope); | 91 | let resolver = resolver_for_scope(db, def, scope); |
122 | SourceAnalyzer { | 92 | SourceAnalyzer { |
123 | resolver, | 93 | resolver, |
124 | body_owner: Some(def.into()), | ||
125 | body_source_map: Some(source_map), | 94 | body_source_map: Some(source_map), |
126 | infer: Some(db.infer(def)), | 95 | infer: Some(db.infer(def)), |
127 | scopes: Some(scopes), | 96 | scopes: Some(scopes), |
@@ -135,7 +104,6 @@ impl SourceAnalyzer { | |||
135 | ) -> SourceAnalyzer { | 104 | ) -> SourceAnalyzer { |
136 | SourceAnalyzer { | 105 | SourceAnalyzer { |
137 | resolver, | 106 | resolver, |
138 | body_owner: None, | ||
139 | body_source_map: None, | 107 | body_source_map: None, |
140 | infer: None, | 108 | infer: None, |
141 | scopes: None, | 109 | scopes: None, |
@@ -143,10 +111,6 @@ impl SourceAnalyzer { | |||
143 | } | 111 | } |
144 | } | 112 | } |
145 | 113 | ||
146 | pub fn module(&self) -> Option<crate::code_model::Module> { | ||
147 | Some(crate::code_model::Module { id: self.resolver.module()? }) | ||
148 | } | ||
149 | |||
150 | fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> { | 114 | fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> { |
151 | let src = InFile { file_id: self.file_id, value: expr }; | 115 | let src = InFile { file_id: self.file_id, value: expr }; |
152 | self.body_source_map.as_ref()?.node_expr(src) | 116 | self.body_source_map.as_ref()?.node_expr(src) |
@@ -180,7 +144,7 @@ impl SourceAnalyzer { | |||
180 | TraitEnvironment::lower(db, &self.resolver) | 144 | TraitEnvironment::lower(db, &self.resolver) |
181 | } | 145 | } |
182 | 146 | ||
183 | pub fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> { | 147 | pub(crate) fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> { |
184 | let expr_id = if let Some(expr) = self.expand_expr(db, InFile::new(self.file_id, expr)) { | 148 | let expr_id = if let Some(expr) = self.expand_expr(db, InFile::new(self.file_id, expr)) { |
185 | self.body_source_map.as_ref()?.node_expr(expr.as_ref())? | 149 | self.body_source_map.as_ref()?.node_expr(expr.as_ref())? |
186 | } else { | 150 | } else { |
@@ -192,24 +156,27 @@ impl SourceAnalyzer { | |||
192 | Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) | 156 | Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) |
193 | } | 157 | } |
194 | 158 | ||
195 | pub fn type_of_pat(&self, db: &impl HirDatabase, pat: &ast::Pat) -> Option<Type> { | 159 | pub(crate) fn type_of_pat(&self, db: &impl HirDatabase, pat: &ast::Pat) -> Option<Type> { |
196 | let pat_id = self.pat_id(pat)?; | 160 | let pat_id = self.pat_id(pat)?; |
197 | let ty = self.infer.as_ref()?[pat_id].clone(); | 161 | let ty = self.infer.as_ref()?[pat_id].clone(); |
198 | let environment = self.trait_env(db); | 162 | let environment = self.trait_env(db); |
199 | Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) | 163 | Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) |
200 | } | 164 | } |
201 | 165 | ||
202 | pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { | 166 | pub(crate) fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { |
203 | let expr_id = self.expr_id(&call.clone().into())?; | 167 | let expr_id = self.expr_id(&call.clone().into())?; |
204 | self.infer.as_ref()?.method_resolution(expr_id).map(Function::from) | 168 | self.infer.as_ref()?.method_resolution(expr_id).map(Function::from) |
205 | } | 169 | } |
206 | 170 | ||
207 | pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> { | 171 | pub(crate) fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> { |
208 | let expr_id = self.expr_id(&field.clone().into())?; | 172 | let expr_id = self.expr_id(&field.clone().into())?; |
209 | self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into()) | 173 | self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into()) |
210 | } | 174 | } |
211 | 175 | ||
212 | pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<crate::StructField> { | 176 | pub(crate) fn resolve_record_field( |
177 | &self, | ||
178 | field: &ast::RecordField, | ||
179 | ) -> Option<crate::StructField> { | ||
213 | let expr_id = match field.expr() { | 180 | let expr_id = match field.expr() { |
214 | Some(it) => self.expr_id(&it)?, | 181 | Some(it) => self.expr_id(&it)?, |
215 | None => { | 182 | None => { |
@@ -220,17 +187,23 @@ impl SourceAnalyzer { | |||
220 | self.infer.as_ref()?.record_field_resolution(expr_id).map(|it| it.into()) | 187 | self.infer.as_ref()?.record_field_resolution(expr_id).map(|it| it.into()) |
221 | } | 188 | } |
222 | 189 | ||
223 | pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<crate::VariantDef> { | 190 | pub(crate) fn resolve_record_literal( |
191 | &self, | ||
192 | record_lit: &ast::RecordLit, | ||
193 | ) -> Option<crate::VariantDef> { | ||
224 | let expr_id = self.expr_id(&record_lit.clone().into())?; | 194 | let expr_id = self.expr_id(&record_lit.clone().into())?; |
225 | self.infer.as_ref()?.variant_resolution_for_expr(expr_id).map(|it| it.into()) | 195 | self.infer.as_ref()?.variant_resolution_for_expr(expr_id).map(|it| it.into()) |
226 | } | 196 | } |
227 | 197 | ||
228 | pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<crate::VariantDef> { | 198 | pub(crate) fn resolve_record_pattern( |
199 | &self, | ||
200 | record_pat: &ast::RecordPat, | ||
201 | ) -> Option<crate::VariantDef> { | ||
229 | let pat_id = self.pat_id(&record_pat.clone().into())?; | 202 | let pat_id = self.pat_id(&record_pat.clone().into())?; |
230 | self.infer.as_ref()?.variant_resolution_for_pat(pat_id).map(|it| it.into()) | 203 | self.infer.as_ref()?.variant_resolution_for_pat(pat_id).map(|it| it.into()) |
231 | } | 204 | } |
232 | 205 | ||
233 | pub fn resolve_macro_call( | 206 | pub(crate) fn resolve_macro_call( |
234 | &self, | 207 | &self, |
235 | db: &impl HirDatabase, | 208 | db: &impl HirDatabase, |
236 | macro_call: InFile<&ast::MacroCall>, | 209 | macro_call: InFile<&ast::MacroCall>, |
@@ -240,52 +213,11 @@ impl SourceAnalyzer { | |||
240 | self.resolver.resolve_path_as_macro(db, path.mod_path()).map(|it| it.into()) | 213 | self.resolver.resolve_path_as_macro(db, path.mod_path()).map(|it| it.into()) |
241 | } | 214 | } |
242 | 215 | ||
243 | pub fn resolve_hir_path( | 216 | pub(crate) fn resolve_path( |
244 | &self, | 217 | &self, |
245 | db: &impl HirDatabase, | 218 | db: &impl HirDatabase, |
246 | path: &crate::Path, | 219 | path: &ast::Path, |
247 | ) -> Option<PathResolution> { | 220 | ) -> Option<PathResolution> { |
248 | let types = | ||
249 | self.resolver.resolve_path_in_type_ns_fully(db, path.mod_path()).map(|ty| match ty { | ||
250 | TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), | ||
251 | TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }), | ||
252 | TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => { | ||
253 | PathResolution::Def(Adt::from(it).into()) | ||
254 | } | ||
255 | TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), | ||
256 | TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()), | ||
257 | TypeNs::BuiltinType(it) => PathResolution::Def(it.into()), | ||
258 | TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), | ||
259 | }); | ||
260 | let values = | ||
261 | self.resolver.resolve_path_in_value_ns_fully(db, path.mod_path()).and_then(|val| { | ||
262 | let res = match val { | ||
263 | ValueNs::LocalBinding(pat_id) => { | ||
264 | let var = Local { parent: self.body_owner?, pat_id }; | ||
265 | PathResolution::Local(var) | ||
266 | } | ||
267 | ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()), | ||
268 | ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()), | ||
269 | ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()), | ||
270 | ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()), | ||
271 | ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), | ||
272 | }; | ||
273 | Some(res) | ||
274 | }); | ||
275 | |||
276 | let items = self | ||
277 | .resolver | ||
278 | .resolve_module_path_in_items(db, path.mod_path()) | ||
279 | .take_types() | ||
280 | .map(|it| PathResolution::Def(it.into())); | ||
281 | types.or(values).or(items).or_else(|| { | ||
282 | self.resolver | ||
283 | .resolve_path_as_macro(db, path.mod_path()) | ||
284 | .map(|def| PathResolution::Macro(def.into())) | ||
285 | }) | ||
286 | } | ||
287 | |||
288 | pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> { | ||
289 | if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { | 221 | if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { |
290 | let expr_id = self.expr_id(&path_expr.into())?; | 222 | let expr_id = self.expr_id(&path_expr.into())?; |
291 | if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) { | 223 | if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) { |
@@ -300,7 +232,7 @@ impl SourceAnalyzer { | |||
300 | } | 232 | } |
301 | // This must be a normal source file rather than macro file. | 233 | // This must be a normal source file rather than macro file. |
302 | let hir_path = crate::Path::from_ast(path.clone())?; | 234 | let hir_path = crate::Path::from_ast(path.clone())?; |
303 | self.resolve_hir_path(db, &hir_path) | 235 | resolve_hir_path(db, &self.resolver, &hir_path) |
304 | } | 236 | } |
305 | 237 | ||
306 | fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> { | 238 | fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> { |
@@ -315,25 +247,9 @@ impl SourceAnalyzer { | |||
315 | }) | 247 | }) |
316 | } | 248 | } |
317 | 249 | ||
318 | pub fn process_all_names(&self, db: &impl HirDatabase, f: &mut dyn FnMut(Name, ScopeDef)) { | ||
319 | self.resolver.process_all_names(db, &mut |name, def| { | ||
320 | let def = match def { | ||
321 | resolver::ScopeDef::PerNs(it) => it.into(), | ||
322 | resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()), | ||
323 | resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()), | ||
324 | resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }), | ||
325 | resolver::ScopeDef::Local(pat_id) => { | ||
326 | let parent = self.resolver.body_owner().unwrap().into(); | ||
327 | ScopeDef::Local(Local { parent, pat_id }) | ||
328 | } | ||
329 | }; | ||
330 | f(name, def) | ||
331 | }) | ||
332 | } | ||
333 | |||
334 | // FIXME: we only use this in `inline_local_variable` assist, ideally, we | 250 | // FIXME: we only use this in `inline_local_variable` assist, ideally, we |
335 | // should switch to general reference search infra there. | 251 | // should switch to general reference search infra there. |
336 | pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> { | 252 | pub(crate) fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> { |
337 | let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); | 253 | let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); |
338 | let ptr = Either::Left(AstPtr::new(&ast::Pat::from(pat.clone()))); | 254 | let ptr = Either::Left(AstPtr::new(&ast::Pat::from(pat.clone()))); |
339 | fn_def | 255 | fn_def |
@@ -351,19 +267,14 @@ impl SourceAnalyzer { | |||
351 | .collect() | 267 | .collect() |
352 | } | 268 | } |
353 | 269 | ||
354 | /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type | 270 | pub(crate) fn expand( |
355 | pub fn traits_in_scope(&self, db: &impl HirDatabase) -> FxHashSet<TraitId> { | ||
356 | self.resolver.traits_in_scope(db) | ||
357 | } | ||
358 | |||
359 | pub fn expand( | ||
360 | &self, | 271 | &self, |
361 | db: &impl HirDatabase, | 272 | db: &impl HirDatabase, |
362 | macro_call: InFile<&ast::MacroCall>, | 273 | macro_call: InFile<&ast::MacroCall>, |
363 | ) -> Option<Expansion> { | 274 | ) -> Option<HirFileId> { |
364 | let macro_call_id = | 275 | let macro_call_id = |
365 | macro_call.as_call_id(db, |path| self.resolver.resolve_path_as_macro(db, &path))?; | 276 | macro_call.as_call_id(db, |path| self.resolver.resolve_path_as_macro(db, &path))?; |
366 | Some(Expansion { macro_call_id }) | 277 | Some(macro_call_id.as_file()) |
367 | } | 278 | } |
368 | } | 279 | } |
369 | 280 | ||
@@ -409,6 +320,47 @@ fn scope_for_offset( | |||
409 | }) | 320 | }) |
410 | } | 321 | } |
411 | 322 | ||
323 | pub(crate) fn resolve_hir_path( | ||
324 | db: &impl HirDatabase, | ||
325 | resolver: &Resolver, | ||
326 | path: &crate::Path, | ||
327 | ) -> Option<PathResolution> { | ||
328 | let types = resolver.resolve_path_in_type_ns_fully(db, path.mod_path()).map(|ty| match ty { | ||
329 | TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), | ||
330 | TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }), | ||
331 | TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => PathResolution::Def(Adt::from(it).into()), | ||
332 | TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), | ||
333 | TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()), | ||
334 | TypeNs::BuiltinType(it) => PathResolution::Def(it.into()), | ||
335 | TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()), | ||
336 | }); | ||
337 | let body_owner = resolver.body_owner(); | ||
338 | let values = resolver.resolve_path_in_value_ns_fully(db, path.mod_path()).and_then(|val| { | ||
339 | let res = match val { | ||
340 | ValueNs::LocalBinding(pat_id) => { | ||
341 | let var = Local { parent: body_owner?.into(), pat_id }; | ||
342 | PathResolution::Local(var) | ||
343 | } | ||
344 | ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()), | ||
345 | ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()), | ||
346 | ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()), | ||
347 | ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()), | ||
348 | ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()), | ||
349 | }; | ||
350 | Some(res) | ||
351 | }); | ||
352 | |||
353 | let items = resolver | ||
354 | .resolve_module_path_in_items(db, path.mod_path()) | ||
355 | .take_types() | ||
356 | .map(|it| PathResolution::Def(it.into())); | ||
357 | types.or(values).or(items).or_else(|| { | ||
358 | resolver | ||
359 | .resolve_path_as_macro(db, path.mod_path()) | ||
360 | .map(|def| PathResolution::Macro(def.into())) | ||
361 | }) | ||
362 | } | ||
363 | |||
412 | // XXX: during completion, cursor might be outside of any particular | 364 | // XXX: during completion, cursor might be outside of any particular |
413 | // expression. Try to figure out the correct scope... | 365 | // expression. Try to figure out the correct scope... |
414 | fn adjust( | 366 | fn adjust( |
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index f3150f578..0b8a641f9 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs | |||
@@ -5,112 +5,85 @@ use hir_def::{ | |||
5 | child_by_source::ChildBySource, | 5 | child_by_source::ChildBySource, |
6 | dyn_map::DynMap, | 6 | dyn_map::DynMap, |
7 | keys::{self, Key}, | 7 | keys::{self, Key}, |
8 | resolver::{HasResolver, Resolver}, | ||
9 | ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, GenericDefId, ImplId, ModuleId, | 8 | ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, GenericDefId, ImplId, ModuleId, |
10 | StaticId, StructFieldId, StructId, TraitId, TypeAliasId, UnionId, VariantId, | 9 | StaticId, StructFieldId, StructId, TraitId, TypeAliasId, UnionId, VariantId, |
11 | }; | 10 | }; |
12 | use hir_expand::{name::AsName, AstId, InFile, MacroDefId, MacroDefKind}; | 11 | use hir_expand::{name::AsName, AstId, InFile, MacroDefId, MacroDefKind}; |
12 | use ra_db::FileId; | ||
13 | use ra_prof::profile; | 13 | use ra_prof::profile; |
14 | use ra_syntax::{ | 14 | use ra_syntax::{ |
15 | ast::{self, NameOwner}, | 15 | ast::{self, NameOwner}, |
16 | match_ast, AstNode, SyntaxNode, TextUnit, | 16 | match_ast, AstNode, SyntaxNode, |
17 | }; | 17 | }; |
18 | use rustc_hash::FxHashMap; | 18 | use rustc_hash::FxHashMap; |
19 | 19 | ||
20 | use crate::{db::HirDatabase, Local, Module, SourceAnalyzer, TypeParam}; | 20 | use crate::{db::HirDatabase, Local, Module, TypeParam}; |
21 | use ra_db::FileId; | ||
22 | 21 | ||
23 | pub struct SourceBinder<'a, DB> { | 22 | pub struct SourceBinder { |
24 | pub db: &'a DB, | ||
25 | child_by_source_cache: FxHashMap<ChildContainer, DynMap>, | 23 | child_by_source_cache: FxHashMap<ChildContainer, DynMap>, |
26 | } | 24 | } |
27 | 25 | ||
28 | impl<DB: HirDatabase> SourceBinder<'_, DB> { | 26 | impl SourceBinder { |
29 | pub fn new(db: &DB) -> SourceBinder<DB> { | 27 | pub(crate) fn new() -> SourceBinder { |
30 | SourceBinder { db, child_by_source_cache: FxHashMap::default() } | 28 | SourceBinder { child_by_source_cache: FxHashMap::default() } |
31 | } | ||
32 | |||
33 | pub fn analyze( | ||
34 | &mut self, | ||
35 | src: InFile<&SyntaxNode>, | ||
36 | offset: Option<TextUnit>, | ||
37 | ) -> SourceAnalyzer { | ||
38 | let _p = profile("SourceBinder::analyzer"); | ||
39 | let container = match self.find_container(src) { | ||
40 | Some(it) => it, | ||
41 | None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src), | ||
42 | }; | ||
43 | |||
44 | let resolver = match container { | ||
45 | ChildContainer::DefWithBodyId(def) => { | ||
46 | return SourceAnalyzer::new_for_body(self.db, def, src, offset) | ||
47 | } | ||
48 | ChildContainer::TraitId(it) => it.resolver(self.db), | ||
49 | ChildContainer::ImplId(it) => it.resolver(self.db), | ||
50 | ChildContainer::ModuleId(it) => it.resolver(self.db), | ||
51 | ChildContainer::EnumId(it) => it.resolver(self.db), | ||
52 | ChildContainer::VariantId(it) => it.resolver(self.db), | ||
53 | ChildContainer::GenericDefId(it) => it.resolver(self.db), | ||
54 | }; | ||
55 | SourceAnalyzer::new_for_resolver(resolver, src) | ||
56 | } | 29 | } |
57 | 30 | ||
58 | pub fn to_def<T: ToDef>(&mut self, src: InFile<T>) -> Option<T::Def> { | 31 | pub(crate) fn to_module_def(&mut self, db: &impl HirDatabase, file: FileId) -> Option<Module> { |
59 | T::to_def(self, src) | ||
60 | } | ||
61 | |||
62 | pub fn to_module_def(&mut self, file: FileId) -> Option<Module> { | ||
63 | let _p = profile("SourceBinder::to_module_def"); | 32 | let _p = profile("SourceBinder::to_module_def"); |
64 | let (krate, local_id) = self.db.relevant_crates(file).iter().find_map(|&crate_id| { | 33 | let (krate, local_id) = db.relevant_crates(file).iter().find_map(|&crate_id| { |
65 | let crate_def_map = self.db.crate_def_map(crate_id); | 34 | let crate_def_map = db.crate_def_map(crate_id); |
66 | let local_id = crate_def_map.modules_for_file(file).next()?; | 35 | let local_id = crate_def_map.modules_for_file(file).next()?; |
67 | Some((crate_id, local_id)) | 36 | Some((crate_id, local_id)) |
68 | })?; | 37 | })?; |
69 | Some(Module { id: ModuleId { krate, local_id } }) | 38 | Some(Module { id: ModuleId { krate, local_id } }) |
70 | } | 39 | } |
71 | 40 | ||
72 | fn to_id<T: ToId>(&mut self, src: InFile<T>) -> Option<T::ID> { | 41 | fn to_id<T: ToId>(&mut self, db: &impl HirDatabase, src: InFile<T>) -> Option<T::ID> { |
73 | T::to_id(self, src) | 42 | T::to_id(db, self, src) |
74 | } | 43 | } |
75 | 44 | ||
76 | fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> { | 45 | pub(crate) fn find_container( |
77 | for container in src.cloned().ancestors_with_macros(self.db).skip(1) { | 46 | &mut self, |
47 | db: &impl HirDatabase, | ||
48 | src: InFile<&SyntaxNode>, | ||
49 | ) -> Option<ChildContainer> { | ||
50 | for container in src.cloned().ancestors_with_macros(db).skip(1) { | ||
78 | let res: ChildContainer = match_ast! { | 51 | let res: ChildContainer = match_ast! { |
79 | match (container.value) { | 52 | match (container.value) { |
80 | ast::TraitDef(it) => { | 53 | ast::TraitDef(it) => { |
81 | let def: TraitId = self.to_id(container.with_value(it))?; | 54 | let def: TraitId = self.to_id(db, container.with_value(it))?; |
82 | def.into() | 55 | def.into() |
83 | }, | 56 | }, |
84 | ast::ImplBlock(it) => { | 57 | ast::ImplBlock(it) => { |
85 | let def: ImplId = self.to_id(container.with_value(it))?; | 58 | let def: ImplId = self.to_id(db, container.with_value(it))?; |
86 | def.into() | 59 | def.into() |
87 | }, | 60 | }, |
88 | ast::FnDef(it) => { | 61 | ast::FnDef(it) => { |
89 | let def: FunctionId = self.to_id(container.with_value(it))?; | 62 | let def: FunctionId = self.to_id(db, container.with_value(it))?; |
90 | DefWithBodyId::from(def).into() | 63 | DefWithBodyId::from(def).into() |
91 | }, | 64 | }, |
92 | ast::StaticDef(it) => { | 65 | ast::StaticDef(it) => { |
93 | let def: StaticId = self.to_id(container.with_value(it))?; | 66 | let def: StaticId = self.to_id(db, container.with_value(it))?; |
94 | DefWithBodyId::from(def).into() | 67 | DefWithBodyId::from(def).into() |
95 | }, | 68 | }, |
96 | ast::ConstDef(it) => { | 69 | ast::ConstDef(it) => { |
97 | let def: ConstId = self.to_id(container.with_value(it))?; | 70 | let def: ConstId = self.to_id(db, container.with_value(it))?; |
98 | DefWithBodyId::from(def).into() | 71 | DefWithBodyId::from(def).into() |
99 | }, | 72 | }, |
100 | ast::EnumDef(it) => { | 73 | ast::EnumDef(it) => { |
101 | let def: EnumId = self.to_id(container.with_value(it))?; | 74 | let def: EnumId = self.to_id(db, container.with_value(it))?; |
102 | def.into() | 75 | def.into() |
103 | }, | 76 | }, |
104 | ast::StructDef(it) => { | 77 | ast::StructDef(it) => { |
105 | let def: StructId = self.to_id(container.with_value(it))?; | 78 | let def: StructId = self.to_id(db, container.with_value(it))?; |
106 | VariantId::from(def).into() | 79 | VariantId::from(def).into() |
107 | }, | 80 | }, |
108 | ast::UnionDef(it) => { | 81 | ast::UnionDef(it) => { |
109 | let def: UnionId = self.to_id(container.with_value(it))?; | 82 | let def: UnionId = self.to_id(db, container.with_value(it))?; |
110 | VariantId::from(def).into() | 83 | VariantId::from(def).into() |
111 | }, | 84 | }, |
112 | ast::Module(it) => { | 85 | ast::Module(it) => { |
113 | let def: ModuleId = self.to_id(container.with_value(it))?; | 86 | let def: ModuleId = self.to_id(db, container.with_value(it))?; |
114 | def.into() | 87 | def.into() |
115 | }, | 88 | }, |
116 | _ => { continue }, | 89 | _ => { continue }, |
@@ -119,12 +92,11 @@ impl<DB: HirDatabase> SourceBinder<'_, DB> { | |||
119 | return Some(res); | 92 | return Some(res); |
120 | } | 93 | } |
121 | 94 | ||
122 | let c = self.to_module_def(src.file_id.original_file(self.db))?; | 95 | let c = self.to_module_def(db, src.file_id.original_file(db))?; |
123 | Some(c.id.into()) | 96 | Some(c.id.into()) |
124 | } | 97 | } |
125 | 98 | ||
126 | fn child_by_source(&mut self, container: ChildContainer) -> &DynMap { | 99 | fn child_by_source(&mut self, db: &impl HirDatabase, container: ChildContainer) -> &DynMap { |
127 | let db = self.db; | ||
128 | self.child_by_source_cache.entry(container).or_insert_with(|| match container { | 100 | self.child_by_source_cache.entry(container).or_insert_with(|| match container { |
129 | ChildContainer::DefWithBodyId(it) => it.child_by_source(db), | 101 | ChildContainer::DefWithBodyId(it) => it.child_by_source(db), |
130 | ChildContainer::ModuleId(it) => it.child_by_source(db), | 102 | ChildContainer::ModuleId(it) => it.child_by_source(db), |
@@ -137,16 +109,20 @@ impl<DB: HirDatabase> SourceBinder<'_, DB> { | |||
137 | } | 109 | } |
138 | } | 110 | } |
139 | 111 | ||
140 | pub trait ToId: Sized { | 112 | pub(crate) trait ToId: Sized { |
141 | type ID: Sized + Copy + 'static; | 113 | type ID: Sized + Copy + 'static; |
142 | fn to_id<DB: HirDatabase>(sb: &mut SourceBinder<'_, DB>, src: InFile<Self>) | 114 | fn to_id<DB: HirDatabase>( |
143 | -> Option<Self::ID>; | 115 | db: &DB, |
116 | sb: &mut SourceBinder, | ||
117 | src: InFile<Self>, | ||
118 | ) -> Option<Self::ID>; | ||
144 | } | 119 | } |
145 | 120 | ||
146 | pub trait ToDef: Sized + AstNode + 'static { | 121 | pub trait ToDef: Sized + AstNode + 'static { |
147 | type Def; | 122 | type Def; |
148 | fn to_def<DB: HirDatabase>( | 123 | fn to_def<DB: HirDatabase>( |
149 | sb: &mut SourceBinder<'_, DB>, | 124 | db: &DB, |
125 | sb: &mut SourceBinder, | ||
150 | src: InFile<Self>, | 126 | src: InFile<Self>, |
151 | ) -> Option<Self::Def>; | 127 | ) -> Option<Self::Def>; |
152 | } | 128 | } |
@@ -155,9 +131,9 @@ macro_rules! to_def_impls { | |||
155 | ($(($def:path, $ast:path)),* ,) => {$( | 131 | ($(($def:path, $ast:path)),* ,) => {$( |
156 | impl ToDef for $ast { | 132 | impl ToDef for $ast { |
157 | type Def = $def; | 133 | type Def = $def; |
158 | fn to_def<DB: HirDatabase>(sb: &mut SourceBinder<'_, DB>, src: InFile<Self>) | 134 | fn to_def<DB: HirDatabase>(db: &DB, sb: &mut SourceBinder, src: InFile<Self>) |
159 | -> Option<Self::Def> | 135 | -> Option<Self::Def> |
160 | { sb.to_id(src).map(Into::into) } | 136 | { sb.to_id(db, src).map(Into::into) } |
161 | } | 137 | } |
162 | )*} | 138 | )*} |
163 | } | 139 | } |
@@ -179,7 +155,7 @@ to_def_impls![ | |||
179 | ]; | 155 | ]; |
180 | 156 | ||
181 | #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] | 157 | #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] |
182 | enum ChildContainer { | 158 | pub(crate) enum ChildContainer { |
183 | DefWithBodyId(DefWithBodyId), | 159 | DefWithBodyId(DefWithBodyId), |
184 | ModuleId(ModuleId), | 160 | ModuleId(ModuleId), |
185 | TraitId(TraitId), | 161 | TraitId(TraitId), |
@@ -201,7 +177,7 @@ impl_froms! { | |||
201 | GenericDefId | 177 | GenericDefId |
202 | } | 178 | } |
203 | 179 | ||
204 | pub trait ToIdByKey: Sized + AstNode + 'static { | 180 | pub(crate) trait ToIdByKey: Sized + AstNode + 'static { |
205 | type ID: Sized + Copy + 'static; | 181 | type ID: Sized + Copy + 'static; |
206 | const KEY: Key<Self, Self::ID>; | 182 | const KEY: Key<Self, Self::ID>; |
207 | } | 183 | } |
@@ -209,11 +185,11 @@ pub trait ToIdByKey: Sized + AstNode + 'static { | |||
209 | impl<T: ToIdByKey> ToId for T { | 185 | impl<T: ToIdByKey> ToId for T { |
210 | type ID = <T as ToIdByKey>::ID; | 186 | type ID = <T as ToIdByKey>::ID; |
211 | fn to_id<DB: HirDatabase>( | 187 | fn to_id<DB: HirDatabase>( |
212 | sb: &mut SourceBinder<'_, DB>, | 188 | db: &DB, |
189 | sb: &mut SourceBinder, | ||
213 | src: InFile<Self>, | 190 | src: InFile<Self>, |
214 | ) -> Option<Self::ID> { | 191 | ) -> Option<Self::ID> { |
215 | let container = sb.find_container(src.as_ref().map(|it| it.syntax()))?; | 192 | let container = sb.find_container(db, src.as_ref().map(|it| it.syntax()))?; |
216 | let db = sb.db; | ||
217 | let dyn_map = | 193 | let dyn_map = |
218 | &*sb.child_by_source_cache.entry(container).or_insert_with(|| match container { | 194 | &*sb.child_by_source_cache.entry(container).or_insert_with(|| match container { |
219 | ChildContainer::DefWithBodyId(it) => it.child_by_source(db), | 195 | ChildContainer::DefWithBodyId(it) => it.child_by_source(db), |
@@ -255,15 +231,15 @@ to_id_key_impls![ | |||
255 | impl ToId for ast::MacroCall { | 231 | impl ToId for ast::MacroCall { |
256 | type ID = MacroDefId; | 232 | type ID = MacroDefId; |
257 | fn to_id<DB: HirDatabase>( | 233 | fn to_id<DB: HirDatabase>( |
258 | sb: &mut SourceBinder<'_, DB>, | 234 | db: &DB, |
235 | sb: &mut SourceBinder, | ||
259 | src: InFile<Self>, | 236 | src: InFile<Self>, |
260 | ) -> Option<Self::ID> { | 237 | ) -> Option<Self::ID> { |
261 | let kind = MacroDefKind::Declarative; | 238 | let kind = MacroDefKind::Declarative; |
262 | 239 | ||
263 | let krate = sb.to_module_def(src.file_id.original_file(sb.db))?.id.krate; | 240 | let krate = sb.to_module_def(db, src.file_id.original_file(db))?.id.krate; |
264 | 241 | ||
265 | let ast_id = | 242 | let ast_id = Some(AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.value))); |
266 | Some(AstId::new(src.file_id, sb.db.ast_id_map(src.file_id).ast_id(&src.value))); | ||
267 | 243 | ||
268 | Some(MacroDefId { krate: Some(krate), ast_id, kind }) | 244 | Some(MacroDefId { krate: Some(krate), ast_id, kind }) |
269 | } | 245 | } |
@@ -272,20 +248,20 @@ impl ToId for ast::MacroCall { | |||
272 | impl ToDef for ast::BindPat { | 248 | impl ToDef for ast::BindPat { |
273 | type Def = Local; | 249 | type Def = Local; |
274 | 250 | ||
275 | fn to_def<DB: HirDatabase>(sb: &mut SourceBinder<'_, DB>, src: InFile<Self>) -> Option<Local> { | 251 | fn to_def<DB: HirDatabase>(db: &DB, sb: &mut SourceBinder, src: InFile<Self>) -> Option<Local> { |
276 | let file_id = src.file_id; | 252 | let file_id = src.file_id; |
277 | let parent: DefWithBodyId = src.value.syntax().ancestors().find_map(|it| { | 253 | let parent: DefWithBodyId = src.value.syntax().ancestors().find_map(|it| { |
278 | let res = match_ast! { | 254 | let res = match_ast! { |
279 | match it { | 255 | match it { |
280 | ast::ConstDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, | 256 | ast::ConstDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() }, |
281 | ast::StaticDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, | 257 | ast::StaticDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() }, |
282 | ast::FnDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, | 258 | ast::FnDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() }, |
283 | _ => return None, | 259 | _ => return None, |
284 | } | 260 | } |
285 | }; | 261 | }; |
286 | Some(res) | 262 | Some(res) |
287 | })?; | 263 | })?; |
288 | let (_body, source_map) = sb.db.body_with_source_map(parent); | 264 | let (_body, source_map) = db.body_with_source_map(parent); |
289 | let src = src.map(ast::Pat::from); | 265 | let src = src.map(ast::Pat::from); |
290 | let pat_id = source_map.node_pat(src.as_ref())?; | 266 | let pat_id = source_map.node_pat(src.as_ref())?; |
291 | Some(Local { parent: parent.into(), pat_id }) | 267 | Some(Local { parent: parent.into(), pat_id }) |
@@ -296,26 +272,26 @@ impl ToDef for ast::TypeParam { | |||
296 | type Def = TypeParam; | 272 | type Def = TypeParam; |
297 | 273 | ||
298 | fn to_def<DB: HirDatabase>( | 274 | fn to_def<DB: HirDatabase>( |
299 | sb: &mut SourceBinder<'_, DB>, | 275 | db: &DB, |
276 | sb: &mut SourceBinder, | ||
300 | src: InFile<ast::TypeParam>, | 277 | src: InFile<ast::TypeParam>, |
301 | ) -> Option<TypeParam> { | 278 | ) -> Option<TypeParam> { |
302 | let mut sb = SourceBinder::new(sb.db); | ||
303 | let file_id = src.file_id; | 279 | let file_id = src.file_id; |
304 | let parent: GenericDefId = src.value.syntax().ancestors().find_map(|it| { | 280 | let parent: GenericDefId = src.value.syntax().ancestors().find_map(|it| { |
305 | let res = match_ast! { | 281 | let res = match_ast! { |
306 | match it { | 282 | match it { |
307 | ast::FnDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, | 283 | ast::FnDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() }, |
308 | ast::StructDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, | 284 | ast::StructDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() }, |
309 | ast::EnumDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, | 285 | ast::EnumDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() }, |
310 | ast::TraitDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, | 286 | ast::TraitDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() }, |
311 | ast::TypeAliasDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, | 287 | ast::TypeAliasDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() }, |
312 | ast::ImplBlock(value) => { sb.to_id(InFile { value, file_id})?.into() }, | 288 | ast::ImplBlock(value) => { sb.to_id(db, InFile { value, file_id})?.into() }, |
313 | _ => return None, | 289 | _ => return None, |
314 | } | 290 | } |
315 | }; | 291 | }; |
316 | Some(res) | 292 | Some(res) |
317 | })?; | 293 | })?; |
318 | let &id = sb.child_by_source(parent.into())[keys::TYPE_PARAM].get(&src)?; | 294 | let &id = sb.child_by_source(db, parent.into())[keys::TYPE_PARAM].get(&src)?; |
319 | Some(TypeParam { id }) | 295 | Some(TypeParam { id }) |
320 | } | 296 | } |
321 | } | 297 | } |
@@ -324,7 +300,8 @@ impl ToId for ast::Module { | |||
324 | type ID = ModuleId; | 300 | type ID = ModuleId; |
325 | 301 | ||
326 | fn to_id<DB: HirDatabase>( | 302 | fn to_id<DB: HirDatabase>( |
327 | sb: &mut SourceBinder<'_, DB>, | 303 | db: &DB, |
304 | sb: &mut SourceBinder, | ||
328 | src: InFile<ast::Module>, | 305 | src: InFile<ast::Module>, |
329 | ) -> Option<ModuleId> { | 306 | ) -> Option<ModuleId> { |
330 | { | 307 | { |
@@ -333,7 +310,7 @@ impl ToId for ast::Module { | |||
333 | .as_ref() | 310 | .as_ref() |
334 | .map(|it| it.syntax()) | 311 | .map(|it| it.syntax()) |
335 | .cloned() | 312 | .cloned() |
336 | .ancestors_with_macros(sb.db) | 313 | .ancestors_with_macros(db) |
337 | .skip(1) | 314 | .skip(1) |
338 | .find_map(|it| { | 315 | .find_map(|it| { |
339 | let m = ast::Module::cast(it.value.clone())?; | 316 | let m = ast::Module::cast(it.value.clone())?; |
@@ -341,15 +318,15 @@ impl ToId for ast::Module { | |||
341 | }); | 318 | }); |
342 | 319 | ||
343 | let parent_module = match parent_declaration { | 320 | let parent_module = match parent_declaration { |
344 | Some(parent_declaration) => sb.to_id(parent_declaration)?, | 321 | Some(parent_declaration) => sb.to_id(db, parent_declaration)?, |
345 | None => { | 322 | None => { |
346 | let file_id = src.file_id.original_file(sb.db); | 323 | let file_id = src.file_id.original_file(db); |
347 | sb.to_module_def(file_id)?.id | 324 | sb.to_module_def(db, file_id)?.id |
348 | } | 325 | } |
349 | }; | 326 | }; |
350 | 327 | ||
351 | let child_name = src.value.name()?.as_name(); | 328 | let child_name = src.value.name()?.as_name(); |
352 | let def_map = sb.db.crate_def_map(parent_module.krate); | 329 | let def_map = db.crate_def_map(parent_module.krate); |
353 | let child_id = *def_map[parent_module.local_id].children.get(&child_name)?; | 330 | let child_id = *def_map[parent_module.local_id].children.get(&child_name)?; |
354 | Some(ModuleId { krate: parent_module.krate, local_id: child_id }) | 331 | Some(ModuleId { krate: parent_module.krate, local_id: child_id }) |
355 | } | 332 | } |