aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_hir/src/semantics.rs
diff options
context:
space:
mode:
authorAleksey Kladov <[email protected]>2020-02-26 12:04:22 +0000
committerGitHub <[email protected]>2020-02-26 12:04:22 +0000
commit5c64ad27e041bcdb281c0a751720ceb3a6369d04 (patch)
tree12d89798f61b276f8bd640db07276a7d4e92b1c2 /crates/ra_hir/src/semantics.rs
parent04deae3dba7c9b7054f7a1d64e4b93a05aecc132 (diff)
parentc3a4c4429de83450654795534e64e878a774a088 (diff)
Merge pull request #3222 from matklad/identity
Introduce Semantics API
Diffstat (limited to 'crates/ra_hir/src/semantics.rs')
-rw-r--r--crates/ra_hir/src/semantics.rs335
1 files changed, 335 insertions, 0 deletions
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs
new file mode 100644
index 000000000..22a7e7588
--- /dev/null
+++ b/crates/ra_hir/src/semantics.rs
@@ -0,0 +1,335 @@
1//! See `Semantics`.
2
3use std::{cell::RefCell, fmt, iter::successors};
4
5use hir_def::{
6 resolver::{self, HasResolver, Resolver},
7 TraitId,
8};
9use ra_db::{FileId, FileRange};
10use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken, TextRange, TextUnit};
11use rustc_hash::{FxHashMap, FxHashSet};
12
13use crate::{
14 db::HirDatabase,
15 source_analyzer::{resolve_hir_path, ReferenceDescriptor, SourceAnalyzer},
16 source_binder::{ChildContainer, SourceBinder, ToDef},
17 Function, HirFileId, InFile, Local, MacroDef, Module, Name, Origin, Path, PathResolution,
18 ScopeDef, StructField, Trait, Type, TypeParam, VariantDef,
19};
20use ra_prof::profile;
21
22/// Primary API to get semantic information, like types, from syntax trees.
23pub struct Semantics<'db, DB> {
24 pub db: &'db DB,
25 pub(crate) sb: RefCell<SourceBinder>,
26 cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
27}
28
29impl<DB> fmt::Debug for Semantics<'_, DB> {
30 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
31 write!(f, "Semantics {{ ... }}")
32 }
33}
34
35impl<'db, DB: HirDatabase> Semantics<'db, DB> {
36 pub fn new(db: &DB) -> Semantics<DB> {
37 let sb = RefCell::new(SourceBinder::new());
38 Semantics { db, sb, cache: RefCell::default() }
39 }
40
41 pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
42 let tree = self.db.parse(file_id).tree();
43 self.cache(tree.syntax().clone(), file_id.into());
44 tree
45 }
46
47 pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
48 let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
49 let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
50 let file_id = sa.expand(self.db, macro_call)?;
51 let node = self.db.parse_or_expand(file_id)?;
52 self.cache(node.clone(), file_id);
53 Some(node)
54 }
55
56 pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
57 let parent = token.parent();
58 let parent = self.find_file(parent);
59 let sa = self.analyze2(parent.as_ref(), None);
60
61 let token = successors(Some(parent.with_value(token)), |token| {
62 let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
63 let tt = macro_call.token_tree()?;
64 if !token.value.text_range().is_subrange(&tt.syntax().text_range()) {
65 return None;
66 }
67 let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
68 let token = file_id.expansion_info(self.db)?.map_token_down(token.as_ref())?;
69
70 self.cache(find_root(&token.value.parent()), token.file_id);
71
72 Some(token)
73 })
74 .last()
75 .unwrap();
76
77 token.value
78 }
79
80 pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
81 let node = self.find_file(node.clone());
82 original_range(self.db, node.as_ref())
83 }
84
85 pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
86 let node = self.find_file(node);
87 node.ancestors_with_macros(self.db).map(|it| it.value)
88 }
89
90 pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
91 self.analyze(expr.syntax()).type_of(self.db, &expr)
92 }
93
94 pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> {
95 self.analyze(pat.syntax()).type_of_pat(self.db, &pat)
96 }
97
98 pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
99 self.analyze(call.syntax()).resolve_method_call(call)
100 }
101
102 pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<StructField> {
103 self.analyze(field.syntax()).resolve_field(field)
104 }
105
106 pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<StructField> {
107 self.analyze(field.syntax()).resolve_record_field(field)
108 }
109
110 pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<VariantDef> {
111 self.analyze(record_lit.syntax()).resolve_record_literal(record_lit)
112 }
113
114 pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<VariantDef> {
115 self.analyze(record_pat.syntax()).resolve_record_pattern(record_pat)
116 }
117
118 pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
119 let sa = self.analyze(macro_call.syntax());
120 let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
121 sa.resolve_macro_call(self.db, macro_call)
122 }
123
124 pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
125 self.analyze(path.syntax()).resolve_path(self.db, path)
126 }
127
128 // FIXME: use this instead?
129 // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
130
131 pub fn to_def<T: ToDef + Clone>(&self, src: &T) -> Option<T::Def> {
132 let src = self.find_file(src.syntax().clone()).with_value(src.clone());
133 let mut sb = self.sb.borrow_mut();
134 T::to_def(self.db, &mut sb, src)
135 }
136
137 pub fn to_module_def(&self, file: FileId) -> Option<Module> {
138 let mut sb = self.sb.borrow_mut();
139 sb.to_module_def(self.db, file)
140 }
141
142 pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db, DB> {
143 let node = self.find_file(node.clone());
144 let resolver = self.analyze2(node.as_ref(), None).resolver;
145 SemanticsScope { db: self.db, resolver }
146 }
147
148 pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextUnit) -> SemanticsScope<'db, DB> {
149 let node = self.find_file(node.clone());
150 let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver;
151 SemanticsScope { db: self.db, resolver }
152 }
153
154 pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db, DB> {
155 let resolver = def.id.resolver(self.db);
156 SemanticsScope { db: self.db, resolver }
157 }
158
159 // FIXME: we only use this in `inline_local_variable` assist, ideally, we
160 // should switch to general reference search infra there.
161 pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
162 self.analyze(pat.syntax()).find_all_refs(pat)
163 }
164
165 fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
166 let src = self.find_file(node.clone());
167 self.analyze2(src.as_ref(), None)
168 }
169
170 fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextUnit>) -> SourceAnalyzer {
171 let _p = profile("Semantics::analyze2");
172
173 let container = match self.sb.borrow_mut().find_container(self.db, src) {
174 Some(it) => it,
175 None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src),
176 };
177
178 let resolver = match container {
179 ChildContainer::DefWithBodyId(def) => {
180 return SourceAnalyzer::new_for_body(self.db, def, src, offset)
181 }
182 ChildContainer::TraitId(it) => it.resolver(self.db),
183 ChildContainer::ImplId(it) => it.resolver(self.db),
184 ChildContainer::ModuleId(it) => it.resolver(self.db),
185 ChildContainer::EnumId(it) => it.resolver(self.db),
186 ChildContainer::VariantId(it) => it.resolver(self.db),
187 ChildContainer::GenericDefId(it) => it.resolver(self.db),
188 };
189 SourceAnalyzer::new_for_resolver(resolver, src)
190 }
191
192 fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
193 assert!(root_node.parent().is_none());
194 let mut cache = self.cache.borrow_mut();
195 let prev = cache.insert(root_node, file_id);
196 assert!(prev == None || prev == Some(file_id))
197 }
198
199 pub fn assert_contains_node(&self, node: &SyntaxNode) {
200 self.find_file(node.clone());
201 }
202
203 fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
204 let cache = self.cache.borrow();
205 cache.get(root_node).copied()
206 }
207
208 fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> {
209 let root_node = find_root(&node);
210 let file_id = self.lookup(&root_node).unwrap_or_else(|| {
211 panic!(
212 "\n\nFailed to lookup {:?} in this Semantics.\n\
213 Make sure to use only query nodes, derived from this instance of Semantics.\n\
214 root node: {:?}\n\
215 known nodes: {}\n\n",
216 node,
217 root_node,
218 self.cache
219 .borrow()
220 .keys()
221 .map(|it| format!("{:?}", it))
222 .collect::<Vec<_>>()
223 .join(", ")
224 )
225 });
226 InFile::new(file_id, node)
227 }
228}
229
230fn find_root(node: &SyntaxNode) -> SyntaxNode {
231 node.ancestors().last().unwrap()
232}
233
234pub struct SemanticsScope<'a, DB> {
235 pub db: &'a DB,
236 resolver: Resolver,
237}
238
239impl<'a, DB: HirDatabase> SemanticsScope<'a, DB> {
240 pub fn module(&self) -> Option<Module> {
241 Some(Module { id: self.resolver.module()? })
242 }
243
244 /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type
245 // FIXME: rename to visible_traits to not repeat scope?
246 pub fn traits_in_scope(&self) -> FxHashSet<TraitId> {
247 let resolver = &self.resolver;
248 resolver.traits_in_scope(self.db)
249 }
250
251 pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
252 let resolver = &self.resolver;
253
254 resolver.process_all_names(self.db, &mut |name, def| {
255 let def = match def {
256 resolver::ScopeDef::PerNs(it) => it.into(),
257 resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
258 resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
259 resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }),
260 resolver::ScopeDef::Local(pat_id) => {
261 let parent = resolver.body_owner().unwrap().into();
262 ScopeDef::Local(Local { parent, pat_id })
263 }
264 };
265 f(name, def)
266 })
267 }
268
269 pub fn resolve_hir_path(&self, path: &Path) -> Option<PathResolution> {
270 resolve_hir_path(self.db, &self.resolver, path)
271 }
272}
273
274// FIXME: Change `HasSource` trait to work with `Semantics` and remove this?
275pub fn original_range(db: &impl HirDatabase, node: InFile<&SyntaxNode>) -> FileRange {
276 if let Some((range, Origin::Call)) = original_range_and_origin(db, node) {
277 return range;
278 }
279
280 if let Some(expansion) = node.file_id.expansion_info(db) {
281 if let Some(call_node) = expansion.call_node() {
282 return FileRange {
283 file_id: call_node.file_id.original_file(db),
284 range: call_node.value.text_range(),
285 };
286 }
287 }
288
289 FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
290}
291
292fn original_range_and_origin(
293 db: &impl HirDatabase,
294 node: InFile<&SyntaxNode>,
295) -> Option<(FileRange, Origin)> {
296 let expansion = node.file_id.expansion_info(db)?;
297
298 // the input node has only one token ?
299 let single = node.value.first_token()? == node.value.last_token()?;
300
301 // FIXME: We should handle recurside macro expansions
302 let (range, origin) = node.value.descendants().find_map(|it| {
303 let first = it.first_token()?;
304 let last = it.last_token()?;
305
306 if !single && first == last {
307 return None;
308 }
309
310 // Try to map first and last tokens of node, and, if success, return the union range of mapped tokens
311 let (first, first_origin) = expansion.map_token_up(node.with_value(&first))?;
312 let (last, last_origin) = expansion.map_token_up(node.with_value(&last))?;
313
314 if first.file_id != last.file_id || first_origin != last_origin {
315 return None;
316 }
317
318 // FIXME: Add union method in TextRange
319 Some((
320 first.with_value(union_range(first.value.text_range(), last.value.text_range())),
321 first_origin,
322 ))
323 })?;
324
325 return Some((
326 FileRange { file_id: range.file_id.original_file(db), range: range.value },
327 origin,
328 ));
329
330 fn union_range(a: TextRange, b: TextRange) -> TextRange {
331 let start = a.start().min(b.start());
332 let end = a.end().max(b.end());
333 TextRange::from_to(start, end)
334 }
335}