diff options
Diffstat (limited to 'crates/ra_hir/src/semantics.rs')
-rw-r--r-- | crates/ra_hir/src/semantics.rs | 419 |
1 files changed, 419 insertions, 0 deletions
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs new file mode 100644 index 000000000..5b0b94e34 --- /dev/null +++ b/crates/ra_hir/src/semantics.rs | |||
@@ -0,0 +1,419 @@ | |||
1 | //! See `Semantics`. | ||
2 | |||
3 | use std::{cell::RefCell, fmt, iter::successors}; | ||
4 | |||
5 | use hir_def::{ | ||
6 | resolver::{self, HasResolver, Resolver}, | ||
7 | DefWithBodyId, TraitId, | ||
8 | }; | ||
9 | use ra_db::{FileId, FileRange}; | ||
10 | use ra_syntax::{ | ||
11 | algo::{find_covering_element, skip_trivia_token}, | ||
12 | ast, match_ast, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, | ||
13 | TextRange, TextUnit, | ||
14 | }; | ||
15 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
16 | |||
17 | use crate::{ | ||
18 | db::HirDatabase, | ||
19 | source_analyzer::{resolve_hir_path, ReferenceDescriptor, SourceAnalyzer}, | ||
20 | source_binder::{ChildContainer, SourceBinder}, | ||
21 | Function, HirFileId, InFile, Local, MacroDef, Module, Name, Origin, Path, PathResolution, | ||
22 | ScopeDef, StructField, Trait, Type, TypeParam, VariantDef, | ||
23 | }; | ||
24 | use ra_prof::profile; | ||
25 | |||
26 | /// Primary API to get semantic information, like types, from syntax trees. | ||
27 | pub struct Semantics<'db, DB> { | ||
28 | pub db: &'db DB, | ||
29 | pub(crate) sb: RefCell<SourceBinder>, | ||
30 | cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, | ||
31 | } | ||
32 | |||
33 | impl<DB> fmt::Debug for Semantics<'_, DB> { | ||
34 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
35 | write!(f, "Semantics {{ ... }}") | ||
36 | } | ||
37 | } | ||
38 | |||
39 | impl<'db, DB: HirDatabase> Semantics<'db, DB> { | ||
40 | pub fn new(db: &DB) -> Semantics<DB> { | ||
41 | let sb = RefCell::new(SourceBinder::new()); | ||
42 | Semantics { db, sb, cache: RefCell::default() } | ||
43 | } | ||
44 | |||
45 | pub fn parse(&self, file_id: FileId) -> ast::SourceFile { | ||
46 | let tree = self.db.parse(file_id).tree(); | ||
47 | self.cache(tree.syntax().clone(), file_id.into()); | ||
48 | tree | ||
49 | } | ||
50 | |||
51 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | ||
52 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); | ||
53 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); | ||
54 | let file_id = sa.expand(self.db, macro_call)?; | ||
55 | let node = self.db.parse_or_expand(file_id)?; | ||
56 | self.cache(node.clone(), file_id); | ||
57 | Some(node) | ||
58 | } | ||
59 | |||
60 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | ||
61 | let parent = token.parent(); | ||
62 | let parent = self.find_file(parent); | ||
63 | let sa = self.analyze2(parent.as_ref(), None); | ||
64 | |||
65 | let token = successors(Some(parent.with_value(token)), |token| { | ||
66 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; | ||
67 | let tt = macro_call.token_tree()?; | ||
68 | if !token.value.text_range().is_subrange(&tt.syntax().text_range()) { | ||
69 | return None; | ||
70 | } | ||
71 | let file_id = sa.expand(self.db, token.with_value(¯o_call))?; | ||
72 | let token = file_id.expansion_info(self.db)?.map_token_down(token.as_ref())?; | ||
73 | |||
74 | self.cache(find_root(&token.value.parent()), token.file_id); | ||
75 | |||
76 | Some(token) | ||
77 | }) | ||
78 | .last() | ||
79 | .unwrap(); | ||
80 | |||
81 | token.value | ||
82 | } | ||
83 | |||
84 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { | ||
85 | let node = self.find_file(node.clone()); | ||
86 | original_range(self.db, node.as_ref()) | ||
87 | } | ||
88 | |||
89 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
90 | let node = self.find_file(node); | ||
91 | node.ancestors_with_macros(self.db).map(|it| it.value) | ||
92 | } | ||
93 | |||
94 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | ||
95 | self.analyze(expr.syntax()).type_of(self.db, &expr) | ||
96 | } | ||
97 | |||
98 | pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> { | ||
99 | self.analyze(pat.syntax()).type_of_pat(self.db, &pat) | ||
100 | } | ||
101 | |||
102 | pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { | ||
103 | self.analyze(call.syntax()).resolve_method_call(call) | ||
104 | } | ||
105 | |||
106 | pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<StructField> { | ||
107 | self.analyze(field.syntax()).resolve_field(field) | ||
108 | } | ||
109 | |||
110 | pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<StructField> { | ||
111 | self.analyze(field.syntax()).resolve_record_field(field) | ||
112 | } | ||
113 | |||
114 | pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<VariantDef> { | ||
115 | self.analyze(record_lit.syntax()).resolve_record_literal(record_lit) | ||
116 | } | ||
117 | |||
118 | pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<VariantDef> { | ||
119 | self.analyze(record_pat.syntax()).resolve_record_pattern(record_pat) | ||
120 | } | ||
121 | |||
122 | pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { | ||
123 | let sa = self.analyze(macro_call.syntax()); | ||
124 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); | ||
125 | sa.resolve_macro_call(self.db, macro_call) | ||
126 | } | ||
127 | |||
128 | pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> { | ||
129 | self.analyze(path.syntax()).resolve_path(self.db, path) | ||
130 | } | ||
131 | |||
132 | // FIXME: use this instead? | ||
133 | // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; | ||
134 | |||
135 | pub fn to_def<T: ToDef + Clone>(&self, src: &T) -> Option<T::Def> { | ||
136 | T::to_def(self, src) | ||
137 | } | ||
138 | |||
139 | pub fn to_module_def(&self, file: FileId) -> Option<Module> { | ||
140 | let mut sb = self.sb.borrow_mut(); | ||
141 | sb.to_module_def(self.db, file) | ||
142 | } | ||
143 | |||
144 | pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db, DB> { | ||
145 | let node = self.find_file(node.clone()); | ||
146 | let resolver = self.analyze2(node.as_ref(), None).resolver; | ||
147 | SemanticsScope { db: self.db, resolver } | ||
148 | } | ||
149 | |||
150 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextUnit) -> SemanticsScope<'db, DB> { | ||
151 | let node = self.find_file(node.clone()); | ||
152 | let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; | ||
153 | SemanticsScope { db: self.db, resolver } | ||
154 | } | ||
155 | |||
156 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db, DB> { | ||
157 | let resolver = def.id.resolver(self.db); | ||
158 | SemanticsScope { db: self.db, resolver } | ||
159 | } | ||
160 | |||
161 | // FIXME: we only use this in `inline_local_variable` assist, ideally, we | ||
162 | // should switch to general reference search infra there. | ||
163 | pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> { | ||
164 | self.analyze(pat.syntax()).find_all_refs(pat) | ||
165 | } | ||
166 | |||
167 | fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { | ||
168 | let src = self.find_file(node.clone()); | ||
169 | self.analyze2(src.as_ref(), None) | ||
170 | } | ||
171 | |||
172 | fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextUnit>) -> SourceAnalyzer { | ||
173 | let _p = profile("Semantics::analyze2"); | ||
174 | |||
175 | let container = match self.sb.borrow_mut().find_container(self.db, src) { | ||
176 | Some(it) => it, | ||
177 | None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src), | ||
178 | }; | ||
179 | |||
180 | let resolver = match container { | ||
181 | ChildContainer::DefWithBodyId(def) => { | ||
182 | return SourceAnalyzer::new_for_body(self.db, def, src, offset) | ||
183 | } | ||
184 | ChildContainer::TraitId(it) => it.resolver(self.db), | ||
185 | ChildContainer::ImplId(it) => it.resolver(self.db), | ||
186 | ChildContainer::ModuleId(it) => it.resolver(self.db), | ||
187 | ChildContainer::EnumId(it) => it.resolver(self.db), | ||
188 | ChildContainer::VariantId(it) => it.resolver(self.db), | ||
189 | ChildContainer::GenericDefId(it) => it.resolver(self.db), | ||
190 | }; | ||
191 | SourceAnalyzer::new_for_resolver(resolver, src) | ||
192 | } | ||
193 | |||
194 | fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { | ||
195 | assert!(root_node.parent().is_none()); | ||
196 | let mut cache = self.cache.borrow_mut(); | ||
197 | let prev = cache.insert(root_node, file_id); | ||
198 | assert!(prev == None || prev == Some(file_id)) | ||
199 | } | ||
200 | |||
201 | pub fn assert_contains_node(&self, node: &SyntaxNode) { | ||
202 | self.find_file(node.clone()); | ||
203 | } | ||
204 | |||
205 | fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> { | ||
206 | let cache = self.cache.borrow(); | ||
207 | cache.get(root_node).copied() | ||
208 | } | ||
209 | |||
210 | fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> { | ||
211 | let root_node = find_root(&node); | ||
212 | let file_id = self.lookup(&root_node).unwrap_or_else(|| { | ||
213 | panic!( | ||
214 | "\n\nFailed to lookup {:?} in this Semantics.\n\ | ||
215 | Make sure to use only query nodes, derived from this instance of Semantics.\n\ | ||
216 | root node: {:?}\n\ | ||
217 | known nodes: {}\n\n", | ||
218 | node, | ||
219 | root_node, | ||
220 | self.cache | ||
221 | .borrow() | ||
222 | .keys() | ||
223 | .map(|it| format!("{:?}", it)) | ||
224 | .collect::<Vec<_>>() | ||
225 | .join(", ") | ||
226 | ) | ||
227 | }); | ||
228 | InFile::new(file_id, node) | ||
229 | } | ||
230 | } | ||
231 | |||
232 | pub trait ToDef: Sized + AstNode + 'static { | ||
233 | type Def; | ||
234 | fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: &Self) -> Option<Self::Def>; | ||
235 | } | ||
236 | |||
237 | macro_rules! to_def_impls { | ||
238 | ($(($def:path, $ast:path)),* ,) => {$( | ||
239 | impl ToDef for $ast { | ||
240 | type Def = $def; | ||
241 | fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: &Self) | ||
242 | -> Option<Self::Def> | ||
243 | { | ||
244 | let src = sema.find_file(src.syntax().clone()).with_value(src); | ||
245 | sema.sb.borrow_mut().to_id(sema.db, src.cloned()).map(Into::into) | ||
246 | } | ||
247 | } | ||
248 | )*} | ||
249 | } | ||
250 | |||
251 | to_def_impls![ | ||
252 | (crate::Module, ast::Module), | ||
253 | (crate::Struct, ast::StructDef), | ||
254 | (crate::Enum, ast::EnumDef), | ||
255 | (crate::Union, ast::UnionDef), | ||
256 | (crate::Trait, ast::TraitDef), | ||
257 | (crate::ImplBlock, ast::ImplBlock), | ||
258 | (crate::TypeAlias, ast::TypeAliasDef), | ||
259 | (crate::Const, ast::ConstDef), | ||
260 | (crate::Static, ast::StaticDef), | ||
261 | (crate::Function, ast::FnDef), | ||
262 | (crate::StructField, ast::RecordFieldDef), | ||
263 | (crate::EnumVariant, ast::EnumVariant), | ||
264 | (crate::TypeParam, ast::TypeParam), | ||
265 | (crate::MacroDef, ast::MacroCall), // this one is dubious, not all calls are macros | ||
266 | ]; | ||
267 | |||
268 | impl ToDef for ast::BindPat { | ||
269 | type Def = Local; | ||
270 | |||
271 | fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: &Self) -> Option<Local> { | ||
272 | let src = sema.find_file(src.syntax().clone()).with_value(src); | ||
273 | let file_id = src.file_id; | ||
274 | let mut sb = sema.sb.borrow_mut(); | ||
275 | let db = sema.db; | ||
276 | let parent: DefWithBodyId = src.value.syntax().ancestors().find_map(|it| { | ||
277 | let res = match_ast! { | ||
278 | match it { | ||
279 | ast::ConstDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() }, | ||
280 | ast::StaticDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() }, | ||
281 | ast::FnDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() }, | ||
282 | _ => return None, | ||
283 | } | ||
284 | }; | ||
285 | Some(res) | ||
286 | })?; | ||
287 | let (_body, source_map) = db.body_with_source_map(parent); | ||
288 | let src = src.cloned().map(ast::Pat::from); | ||
289 | let pat_id = source_map.node_pat(src.as_ref())?; | ||
290 | Some(Local { parent: parent.into(), pat_id }) | ||
291 | } | ||
292 | } | ||
293 | |||
294 | fn find_root(node: &SyntaxNode) -> SyntaxNode { | ||
295 | node.ancestors().last().unwrap() | ||
296 | } | ||
297 | |||
298 | pub struct SemanticsScope<'a, DB> { | ||
299 | pub db: &'a DB, | ||
300 | resolver: Resolver, | ||
301 | } | ||
302 | |||
303 | impl<'a, DB: HirDatabase> SemanticsScope<'a, DB> { | ||
304 | pub fn module(&self) -> Option<Module> { | ||
305 | Some(Module { id: self.resolver.module()? }) | ||
306 | } | ||
307 | |||
308 | /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type | ||
309 | // FIXME: rename to visible_traits to not repeat scope? | ||
310 | pub fn traits_in_scope(&self) -> FxHashSet<TraitId> { | ||
311 | let resolver = &self.resolver; | ||
312 | resolver.traits_in_scope(self.db) | ||
313 | } | ||
314 | |||
315 | pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) { | ||
316 | let resolver = &self.resolver; | ||
317 | |||
318 | resolver.process_all_names(self.db, &mut |name, def| { | ||
319 | let def = match def { | ||
320 | resolver::ScopeDef::PerNs(it) => it.into(), | ||
321 | resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()), | ||
322 | resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()), | ||
323 | resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }), | ||
324 | resolver::ScopeDef::Local(pat_id) => { | ||
325 | let parent = resolver.body_owner().unwrap().into(); | ||
326 | ScopeDef::Local(Local { parent, pat_id }) | ||
327 | } | ||
328 | }; | ||
329 | f(name, def) | ||
330 | }) | ||
331 | } | ||
332 | |||
333 | pub fn resolve_hir_path(&self, path: &Path) -> Option<PathResolution> { | ||
334 | resolve_hir_path(self.db, &self.resolver, path) | ||
335 | } | ||
336 | } | ||
337 | |||
338 | // FIXME: Change `HasSource` trait to work with `Semantics` and remove this? | ||
339 | pub fn original_range(db: &impl HirDatabase, node: InFile<&SyntaxNode>) -> FileRange { | ||
340 | let mut elem: InFile<SyntaxElement> = node.map(|n| n.clone().into()); | ||
341 | |||
342 | while let Some((range, Origin::Call)) = original_range_and_origin(db, elem.as_ref()) { | ||
343 | let original_file = range.file_id.original_file(db); | ||
344 | |||
345 | if range.file_id == original_file.into() { | ||
346 | return FileRange { file_id: original_file, range: range.value }; | ||
347 | } | ||
348 | |||
349 | if range.file_id != elem.file_id { | ||
350 | if let Some(root) = db.parse_or_expand(range.file_id) { | ||
351 | elem = range.with_value(find_covering_element(&root, range.value)); | ||
352 | continue; | ||
353 | } | ||
354 | } | ||
355 | |||
356 | log::error!("Fail to mapping up more for {:?}", range); | ||
357 | return FileRange { file_id: range.file_id.original_file(db), range: range.value }; | ||
358 | } | ||
359 | |||
360 | // Fall back to whole macro call | ||
361 | if let Some(expansion) = node.file_id.expansion_info(db) { | ||
362 | if let Some(call_node) = expansion.call_node() { | ||
363 | return FileRange { | ||
364 | file_id: call_node.file_id.original_file(db), | ||
365 | range: call_node.value.text_range(), | ||
366 | }; | ||
367 | } | ||
368 | } | ||
369 | |||
370 | FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() } | ||
371 | } | ||
372 | |||
373 | fn original_range_and_origin( | ||
374 | db: &impl HirDatabase, | ||
375 | elem: InFile<&SyntaxElement>, | ||
376 | ) -> Option<(InFile<TextRange>, Origin)> { | ||
377 | let expansion = elem.file_id.expansion_info(db)?; | ||
378 | |||
379 | let node = match elem.as_ref().value { | ||
380 | NodeOrToken::Node(it) => elem.with_value(it), | ||
381 | NodeOrToken::Token(it) => { | ||
382 | let (tt, origin) = expansion.map_token_up(elem.with_value(it))?; | ||
383 | return Some((tt.map(|it| it.text_range()), origin)); | ||
384 | } | ||
385 | }; | ||
386 | |||
387 | // the input node has only one token ? | ||
388 | let single = skip_trivia_token(node.value.first_token()?, Direction::Next)? | ||
389 | == skip_trivia_token(node.value.last_token()?, Direction::Prev)?; | ||
390 | |||
391 | return Some(node.value.descendants().find_map(|it| { | ||
392 | let first = skip_trivia_token(it.first_token()?, Direction::Next)?; | ||
393 | let last = skip_trivia_token(it.last_token()?, Direction::Prev)?; | ||
394 | |||
395 | if !single && first == last { | ||
396 | return None; | ||
397 | } | ||
398 | |||
399 | // Try to map first and last tokens of node, and, if success, return the union range of mapped tokens | ||
400 | let (first, first_origin) = expansion.map_token_up(node.with_value(&first))?; | ||
401 | let (last, last_origin) = expansion.map_token_up(node.with_value(&last))?; | ||
402 | |||
403 | if first.file_id != last.file_id || first_origin != last_origin { | ||
404 | return None; | ||
405 | } | ||
406 | |||
407 | // FIXME: Add union method in TextRange | ||
408 | Some(( | ||
409 | first.with_value(union_range(first.value.text_range(), last.value.text_range())), | ||
410 | first_origin, | ||
411 | )) | ||
412 | })?); | ||
413 | |||
414 | fn union_range(a: TextRange, b: TextRange) -> TextRange { | ||
415 | let start = a.start().min(b.start()); | ||
416 | let end = a.end().max(b.end()); | ||
417 | TextRange::from_to(start, end) | ||
418 | } | ||
419 | } | ||