diff options
author | Igor Aleksanov <[email protected]> | 2020-08-14 05:34:07 +0100 |
---|---|---|
committer | Igor Aleksanov <[email protected]> | 2020-08-14 05:34:07 +0100 |
commit | c26c911ec1e6c2ad1dcb7d155a6a1d528839ad1a (patch) | |
tree | 7cff36c38234be0afb65273146d8247083a5cfeb /crates/hir/src/semantics.rs | |
parent | 3c018bf84de5c693b5ee1c6bec0fed3b201c2060 (diff) | |
parent | f1f73649a686dc6e6449afc35e0fa6fed00e225d (diff) |
Merge branch 'master' into add-disable-diagnostics
Diffstat (limited to 'crates/hir/src/semantics.rs')
-rw-r--r-- | crates/hir/src/semantics.rs | 850 |
1 files changed, 850 insertions, 0 deletions
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs new file mode 100644 index 000000000..d8beac98a --- /dev/null +++ b/crates/hir/src/semantics.rs | |||
@@ -0,0 +1,850 @@ | |||
1 | //! See `Semantics`. | ||
2 | |||
3 | mod source_to_def; | ||
4 | |||
5 | use std::{cell::RefCell, fmt, iter::successors}; | ||
6 | |||
7 | use base_db::{FileId, FileRange}; | ||
8 | use hir_def::{ | ||
9 | resolver::{self, HasResolver, Resolver}, | ||
10 | AsMacroCall, FunctionId, TraitId, VariantId, | ||
11 | }; | ||
12 | use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo}; | ||
13 | use hir_ty::associated_type_shorthand_candidates; | ||
14 | use itertools::Itertools; | ||
15 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
16 | use syntax::{ | ||
17 | algo::{find_node_at_offset, skip_trivia_token}, | ||
18 | ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize, | ||
19 | }; | ||
20 | |||
21 | use crate::{ | ||
22 | db::HirDatabase, | ||
23 | diagnostics::Diagnostic, | ||
24 | semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, | ||
25 | source_analyzer::{resolve_hir_path, resolve_hir_path_qualifier, SourceAnalyzer}, | ||
26 | AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, | ||
27 | Module, ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, TypeRef, | ||
28 | VariantDef, | ||
29 | }; | ||
30 | use resolver::TypeNs; | ||
31 | |||
32 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
33 | pub enum PathResolution { | ||
34 | /// An item | ||
35 | Def(ModuleDef), | ||
36 | /// A local binding (only value namespace) | ||
37 | Local(Local), | ||
38 | /// A generic parameter | ||
39 | TypeParam(TypeParam), | ||
40 | SelfType(ImplDef), | ||
41 | Macro(MacroDef), | ||
42 | AssocItem(AssocItem), | ||
43 | } | ||
44 | |||
45 | impl PathResolution { | ||
46 | fn in_type_ns(&self) -> Option<TypeNs> { | ||
47 | match self { | ||
48 | PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())), | ||
49 | PathResolution::Def(ModuleDef::BuiltinType(builtin)) => { | ||
50 | Some(TypeNs::BuiltinType(*builtin)) | ||
51 | } | ||
52 | PathResolution::Def(ModuleDef::Const(_)) | ||
53 | | PathResolution::Def(ModuleDef::EnumVariant(_)) | ||
54 | | PathResolution::Def(ModuleDef::Function(_)) | ||
55 | | PathResolution::Def(ModuleDef::Module(_)) | ||
56 | | PathResolution::Def(ModuleDef::Static(_)) | ||
57 | | PathResolution::Def(ModuleDef::Trait(_)) => None, | ||
58 | PathResolution::Def(ModuleDef::TypeAlias(alias)) => { | ||
59 | Some(TypeNs::TypeAliasId((*alias).into())) | ||
60 | } | ||
61 | PathResolution::Local(_) | PathResolution::Macro(_) => None, | ||
62 | PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())), | ||
63 | PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())), | ||
64 | PathResolution::AssocItem(AssocItem::Const(_)) | ||
65 | | PathResolution::AssocItem(AssocItem::Function(_)) => None, | ||
66 | PathResolution::AssocItem(AssocItem::TypeAlias(alias)) => { | ||
67 | Some(TypeNs::TypeAliasId((*alias).into())) | ||
68 | } | ||
69 | } | ||
70 | } | ||
71 | |||
72 | /// Returns an iterator over associated types that may be specified after this path (using | ||
73 | /// `Ty::Assoc` syntax). | ||
74 | pub fn assoc_type_shorthand_candidates<R>( | ||
75 | &self, | ||
76 | db: &dyn HirDatabase, | ||
77 | mut cb: impl FnMut(TypeAlias) -> Option<R>, | ||
78 | ) -> Option<R> { | ||
79 | associated_type_shorthand_candidates(db, self.in_type_ns()?, |_, _, id| cb(id.into())) | ||
80 | } | ||
81 | } | ||
82 | |||
83 | /// Primary API to get semantic information, like types, from syntax trees. | ||
84 | pub struct Semantics<'db, DB> { | ||
85 | pub db: &'db DB, | ||
86 | imp: SemanticsImpl<'db>, | ||
87 | } | ||
88 | |||
89 | pub struct SemanticsImpl<'db> { | ||
90 | pub db: &'db dyn HirDatabase, | ||
91 | s2d_cache: RefCell<SourceToDefCache>, | ||
92 | expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>, | ||
93 | cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, | ||
94 | } | ||
95 | |||
96 | impl<DB> fmt::Debug for Semantics<'_, DB> { | ||
97 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
98 | write!(f, "Semantics {{ ... }}") | ||
99 | } | ||
100 | } | ||
101 | |||
102 | impl<'db, DB: HirDatabase> Semantics<'db, DB> { | ||
103 | pub fn new(db: &DB) -> Semantics<DB> { | ||
104 | let impl_ = SemanticsImpl::new(db); | ||
105 | Semantics { db, imp: impl_ } | ||
106 | } | ||
107 | |||
108 | pub fn parse(&self, file_id: FileId) -> ast::SourceFile { | ||
109 | self.imp.parse(file_id) | ||
110 | } | ||
111 | |||
112 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | ||
113 | self.imp.expand(macro_call) | ||
114 | } | ||
115 | |||
116 | pub fn expand_hypothetical( | ||
117 | &self, | ||
118 | actual_macro_call: &ast::MacroCall, | ||
119 | hypothetical_args: &ast::TokenTree, | ||
120 | token_to_map: SyntaxToken, | ||
121 | ) -> Option<(SyntaxNode, SyntaxToken)> { | ||
122 | self.imp.expand_hypothetical(actual_macro_call, hypothetical_args, token_to_map) | ||
123 | } | ||
124 | |||
125 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | ||
126 | self.imp.descend_into_macros(token) | ||
127 | } | ||
128 | |||
129 | pub fn descend_node_at_offset<N: ast::AstNode>( | ||
130 | &self, | ||
131 | node: &SyntaxNode, | ||
132 | offset: TextSize, | ||
133 | ) -> Option<N> { | ||
134 | self.imp.descend_node_at_offset(node, offset).find_map(N::cast) | ||
135 | } | ||
136 | |||
137 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { | ||
138 | self.imp.original_range(node) | ||
139 | } | ||
140 | |||
141 | pub fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { | ||
142 | self.imp.diagnostics_display_range(diagnostics) | ||
143 | } | ||
144 | |||
145 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
146 | self.imp.ancestors_with_macros(node) | ||
147 | } | ||
148 | |||
149 | pub fn ancestors_at_offset_with_macros( | ||
150 | &self, | ||
151 | node: &SyntaxNode, | ||
152 | offset: TextSize, | ||
153 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
154 | self.imp.ancestors_at_offset_with_macros(node, offset) | ||
155 | } | ||
156 | |||
157 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, | ||
158 | /// search up until it is of the target AstNode type | ||
159 | pub fn find_node_at_offset_with_macros<N: AstNode>( | ||
160 | &self, | ||
161 | node: &SyntaxNode, | ||
162 | offset: TextSize, | ||
163 | ) -> Option<N> { | ||
164 | self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) | ||
165 | } | ||
166 | |||
167 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, | ||
168 | /// descend it and find again | ||
169 | pub fn find_node_at_offset_with_descend<N: AstNode>( | ||
170 | &self, | ||
171 | node: &SyntaxNode, | ||
172 | offset: TextSize, | ||
173 | ) -> Option<N> { | ||
174 | if let Some(it) = find_node_at_offset(&node, offset) { | ||
175 | return Some(it); | ||
176 | } | ||
177 | |||
178 | self.imp.descend_node_at_offset(node, offset).find_map(N::cast) | ||
179 | } | ||
180 | |||
181 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | ||
182 | self.imp.type_of_expr(expr) | ||
183 | } | ||
184 | |||
185 | pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> { | ||
186 | self.imp.type_of_pat(pat) | ||
187 | } | ||
188 | |||
189 | pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> { | ||
190 | self.imp.type_of_self(param) | ||
191 | } | ||
192 | |||
193 | pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { | ||
194 | self.imp.resolve_method_call(call).map(Function::from) | ||
195 | } | ||
196 | |||
197 | pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> { | ||
198 | self.imp.resolve_method_call_as_callable(call) | ||
199 | } | ||
200 | |||
201 | pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> { | ||
202 | self.imp.resolve_field(field) | ||
203 | } | ||
204 | |||
205 | pub fn resolve_record_field( | ||
206 | &self, | ||
207 | field: &ast::RecordExprField, | ||
208 | ) -> Option<(Field, Option<Local>)> { | ||
209 | self.imp.resolve_record_field(field) | ||
210 | } | ||
211 | |||
212 | pub fn resolve_record_field_pat(&self, field: &ast::RecordPatField) -> Option<Field> { | ||
213 | self.imp.resolve_record_field_pat(field) | ||
214 | } | ||
215 | |||
216 | pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { | ||
217 | self.imp.resolve_macro_call(macro_call) | ||
218 | } | ||
219 | |||
220 | pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> { | ||
221 | self.imp.resolve_path(path) | ||
222 | } | ||
223 | |||
224 | pub fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> { | ||
225 | self.imp.resolve_extern_crate(extern_crate) | ||
226 | } | ||
227 | |||
228 | pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> { | ||
229 | self.imp.resolve_variant(record_lit).map(VariantDef::from) | ||
230 | } | ||
231 | |||
232 | pub fn lower_path(&self, path: &ast::Path) -> Option<Path> { | ||
233 | self.imp.lower_path(path) | ||
234 | } | ||
235 | |||
236 | pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> { | ||
237 | self.imp.resolve_bind_pat_to_const(pat) | ||
238 | } | ||
239 | |||
240 | // FIXME: use this instead? | ||
241 | // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; | ||
242 | |||
243 | pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { | ||
244 | self.imp.record_literal_missing_fields(literal) | ||
245 | } | ||
246 | |||
247 | pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { | ||
248 | self.imp.record_pattern_missing_fields(pattern) | ||
249 | } | ||
250 | |||
251 | pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { | ||
252 | let src = self.imp.find_file(src.syntax().clone()).with_value(src).cloned(); | ||
253 | T::to_def(&self.imp, src) | ||
254 | } | ||
255 | |||
256 | pub fn to_module_def(&self, file: FileId) -> Option<Module> { | ||
257 | self.imp.to_module_def(file) | ||
258 | } | ||
259 | |||
260 | pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { | ||
261 | self.imp.scope(node) | ||
262 | } | ||
263 | |||
264 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { | ||
265 | self.imp.scope_at_offset(node, offset) | ||
266 | } | ||
267 | |||
268 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { | ||
269 | self.imp.scope_for_def(def) | ||
270 | } | ||
271 | |||
272 | pub fn assert_contains_node(&self, node: &SyntaxNode) { | ||
273 | self.imp.assert_contains_node(node) | ||
274 | } | ||
275 | |||
276 | pub fn is_unsafe_method_call(&self, method_call_expr: ast::MethodCallExpr) -> bool { | ||
277 | self.imp.is_unsafe_method_call(method_call_expr) | ||
278 | } | ||
279 | |||
280 | pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool { | ||
281 | self.imp.is_unsafe_ref_expr(ref_expr) | ||
282 | } | ||
283 | |||
284 | pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool { | ||
285 | self.imp.is_unsafe_ident_pat(ident_pat) | ||
286 | } | ||
287 | } | ||
288 | |||
289 | impl<'db> SemanticsImpl<'db> { | ||
290 | fn new(db: &'db dyn HirDatabase) -> Self { | ||
291 | SemanticsImpl { | ||
292 | db, | ||
293 | s2d_cache: Default::default(), | ||
294 | cache: Default::default(), | ||
295 | expansion_info_cache: Default::default(), | ||
296 | } | ||
297 | } | ||
298 | |||
299 | fn parse(&self, file_id: FileId) -> ast::SourceFile { | ||
300 | let tree = self.db.parse(file_id).tree(); | ||
301 | self.cache(tree.syntax().clone(), file_id.into()); | ||
302 | tree | ||
303 | } | ||
304 | |||
305 | fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | ||
306 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); | ||
307 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); | ||
308 | let file_id = sa.expand(self.db, macro_call)?; | ||
309 | let node = self.db.parse_or_expand(file_id)?; | ||
310 | self.cache(node.clone(), file_id); | ||
311 | Some(node) | ||
312 | } | ||
313 | |||
314 | fn expand_hypothetical( | ||
315 | &self, | ||
316 | actual_macro_call: &ast::MacroCall, | ||
317 | hypothetical_args: &ast::TokenTree, | ||
318 | token_to_map: SyntaxToken, | ||
319 | ) -> Option<(SyntaxNode, SyntaxToken)> { | ||
320 | let macro_call = | ||
321 | self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); | ||
322 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); | ||
323 | let krate = sa.resolver.krate()?; | ||
324 | let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { | ||
325 | sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) | ||
326 | })?; | ||
327 | hir_expand::db::expand_hypothetical( | ||
328 | self.db.upcast(), | ||
329 | macro_call_id, | ||
330 | hypothetical_args, | ||
331 | token_to_map, | ||
332 | ) | ||
333 | } | ||
334 | |||
335 | fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | ||
336 | let _p = profile::span("descend_into_macros"); | ||
337 | let parent = token.parent(); | ||
338 | let parent = self.find_file(parent); | ||
339 | let sa = self.analyze2(parent.as_ref(), None); | ||
340 | |||
341 | let token = successors(Some(parent.with_value(token)), |token| { | ||
342 | self.db.check_canceled(); | ||
343 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; | ||
344 | let tt = macro_call.token_tree()?; | ||
345 | if !tt.syntax().text_range().contains_range(token.value.text_range()) { | ||
346 | return None; | ||
347 | } | ||
348 | let file_id = sa.expand(self.db, token.with_value(¯o_call))?; | ||
349 | let token = self | ||
350 | .expansion_info_cache | ||
351 | .borrow_mut() | ||
352 | .entry(file_id) | ||
353 | .or_insert_with(|| file_id.expansion_info(self.db.upcast())) | ||
354 | .as_ref()? | ||
355 | .map_token_down(token.as_ref())?; | ||
356 | |||
357 | self.cache(find_root(&token.value.parent()), token.file_id); | ||
358 | |||
359 | Some(token) | ||
360 | }) | ||
361 | .last() | ||
362 | .unwrap(); | ||
363 | |||
364 | token.value | ||
365 | } | ||
366 | |||
367 | fn descend_node_at_offset( | ||
368 | &self, | ||
369 | node: &SyntaxNode, | ||
370 | offset: TextSize, | ||
371 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
372 | // Handle macro token cases | ||
373 | node.token_at_offset(offset) | ||
374 | .map(|token| self.descend_into_macros(token)) | ||
375 | .map(|it| self.ancestors_with_macros(it.parent())) | ||
376 | .flatten() | ||
377 | } | ||
378 | |||
379 | fn original_range(&self, node: &SyntaxNode) -> FileRange { | ||
380 | let node = self.find_file(node.clone()); | ||
381 | original_range(self.db, node.as_ref()) | ||
382 | } | ||
383 | |||
384 | fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { | ||
385 | let src = diagnostics.display_source(); | ||
386 | let root = self.db.parse_or_expand(src.file_id).unwrap(); | ||
387 | let node = src.value.to_node(&root); | ||
388 | self.cache(root, src.file_id); | ||
389 | original_range(self.db, src.with_value(&node)) | ||
390 | } | ||
391 | |||
392 | fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
393 | let node = self.find_file(node); | ||
394 | node.ancestors_with_macros(self.db.upcast()).map(|it| it.value) | ||
395 | } | ||
396 | |||
397 | fn ancestors_at_offset_with_macros( | ||
398 | &self, | ||
399 | node: &SyntaxNode, | ||
400 | offset: TextSize, | ||
401 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
402 | node.token_at_offset(offset) | ||
403 | .map(|token| self.ancestors_with_macros(token.parent())) | ||
404 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) | ||
405 | } | ||
406 | |||
407 | fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | ||
408 | self.analyze(expr.syntax()).type_of_expr(self.db, &expr) | ||
409 | } | ||
410 | |||
411 | fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> { | ||
412 | self.analyze(pat.syntax()).type_of_pat(self.db, &pat) | ||
413 | } | ||
414 | |||
415 | fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> { | ||
416 | self.analyze(param.syntax()).type_of_self(self.db, ¶m) | ||
417 | } | ||
418 | |||
419 | fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> { | ||
420 | self.analyze(call.syntax()).resolve_method_call(self.db, call) | ||
421 | } | ||
422 | |||
423 | fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> { | ||
424 | // FIXME: this erases Substs | ||
425 | let func = self.resolve_method_call(call)?; | ||
426 | let ty = self.db.value_ty(func.into()); | ||
427 | let resolver = self.analyze(call.syntax()).resolver; | ||
428 | let ty = Type::new_with_resolver(self.db, &resolver, ty.value)?; | ||
429 | let mut res = ty.as_callable(self.db)?; | ||
430 | res.is_bound_method = true; | ||
431 | Some(res) | ||
432 | } | ||
433 | |||
434 | fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> { | ||
435 | self.analyze(field.syntax()).resolve_field(self.db, field) | ||
436 | } | ||
437 | |||
438 | fn resolve_record_field(&self, field: &ast::RecordExprField) -> Option<(Field, Option<Local>)> { | ||
439 | self.analyze(field.syntax()).resolve_record_field(self.db, field) | ||
440 | } | ||
441 | |||
442 | fn resolve_record_field_pat(&self, field: &ast::RecordPatField) -> Option<Field> { | ||
443 | self.analyze(field.syntax()).resolve_record_field_pat(self.db, field) | ||
444 | } | ||
445 | |||
446 | fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { | ||
447 | let sa = self.analyze(macro_call.syntax()); | ||
448 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); | ||
449 | sa.resolve_macro_call(self.db, macro_call) | ||
450 | } | ||
451 | |||
452 | fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> { | ||
453 | self.analyze(path.syntax()).resolve_path(self.db, path) | ||
454 | } | ||
455 | |||
456 | fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> { | ||
457 | let krate = self.scope(extern_crate.syntax()).krate()?; | ||
458 | krate.dependencies(self.db).into_iter().find_map(|dep| { | ||
459 | if dep.name == extern_crate.name_ref()?.as_name() { | ||
460 | Some(dep.krate) | ||
461 | } else { | ||
462 | None | ||
463 | } | ||
464 | }) | ||
465 | } | ||
466 | |||
467 | fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> { | ||
468 | self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit) | ||
469 | } | ||
470 | |||
471 | fn lower_path(&self, path: &ast::Path) -> Option<Path> { | ||
472 | let src = self.find_file(path.syntax().clone()); | ||
473 | Path::from_src(path.clone(), &Hygiene::new(self.db.upcast(), src.file_id.into())) | ||
474 | } | ||
475 | |||
476 | fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> { | ||
477 | self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) | ||
478 | } | ||
479 | |||
480 | fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { | ||
481 | self.analyze(literal.syntax()) | ||
482 | .record_literal_missing_fields(self.db, literal) | ||
483 | .unwrap_or_default() | ||
484 | } | ||
485 | |||
486 | fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { | ||
487 | self.analyze(pattern.syntax()) | ||
488 | .record_pattern_missing_fields(self.db, pattern) | ||
489 | .unwrap_or_default() | ||
490 | } | ||
491 | |||
492 | fn with_ctx<F: FnOnce(&mut SourceToDefCtx) -> T, T>(&self, f: F) -> T { | ||
493 | let mut cache = self.s2d_cache.borrow_mut(); | ||
494 | let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; | ||
495 | f(&mut ctx) | ||
496 | } | ||
497 | |||
498 | fn to_module_def(&self, file: FileId) -> Option<Module> { | ||
499 | self.with_ctx(|ctx| ctx.file_to_def(file)).map(Module::from) | ||
500 | } | ||
501 | |||
502 | fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { | ||
503 | let node = self.find_file(node.clone()); | ||
504 | let resolver = self.analyze2(node.as_ref(), None).resolver; | ||
505 | SemanticsScope { db: self.db, file_id: node.file_id, resolver } | ||
506 | } | ||
507 | |||
508 | fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { | ||
509 | let node = self.find_file(node.clone()); | ||
510 | let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; | ||
511 | SemanticsScope { db: self.db, file_id: node.file_id, resolver } | ||
512 | } | ||
513 | |||
514 | fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { | ||
515 | let file_id = self.db.lookup_intern_trait(def.id).id.file_id; | ||
516 | let resolver = def.id.resolver(self.db.upcast()); | ||
517 | SemanticsScope { db: self.db, file_id, resolver } | ||
518 | } | ||
519 | |||
520 | fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { | ||
521 | let src = self.find_file(node.clone()); | ||
522 | self.analyze2(src.as_ref(), None) | ||
523 | } | ||
524 | |||
525 | fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextSize>) -> SourceAnalyzer { | ||
526 | let _p = profile::span("Semantics::analyze2"); | ||
527 | |||
528 | let container = match self.with_ctx(|ctx| ctx.find_container(src)) { | ||
529 | Some(it) => it, | ||
530 | None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src), | ||
531 | }; | ||
532 | |||
533 | let resolver = match container { | ||
534 | ChildContainer::DefWithBodyId(def) => { | ||
535 | return SourceAnalyzer::new_for_body(self.db, def, src, offset) | ||
536 | } | ||
537 | ChildContainer::TraitId(it) => it.resolver(self.db.upcast()), | ||
538 | ChildContainer::ImplId(it) => it.resolver(self.db.upcast()), | ||
539 | ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()), | ||
540 | ChildContainer::EnumId(it) => it.resolver(self.db.upcast()), | ||
541 | ChildContainer::VariantId(it) => it.resolver(self.db.upcast()), | ||
542 | ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()), | ||
543 | ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()), | ||
544 | }; | ||
545 | SourceAnalyzer::new_for_resolver(resolver, src) | ||
546 | } | ||
547 | |||
548 | fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { | ||
549 | assert!(root_node.parent().is_none()); | ||
550 | let mut cache = self.cache.borrow_mut(); | ||
551 | let prev = cache.insert(root_node, file_id); | ||
552 | assert!(prev == None || prev == Some(file_id)) | ||
553 | } | ||
554 | |||
555 | fn assert_contains_node(&self, node: &SyntaxNode) { | ||
556 | self.find_file(node.clone()); | ||
557 | } | ||
558 | |||
559 | fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> { | ||
560 | let cache = self.cache.borrow(); | ||
561 | cache.get(root_node).copied() | ||
562 | } | ||
563 | |||
564 | fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> { | ||
565 | let root_node = find_root(&node); | ||
566 | let file_id = self.lookup(&root_node).unwrap_or_else(|| { | ||
567 | panic!( | ||
568 | "\n\nFailed to lookup {:?} in this Semantics.\n\ | ||
569 | Make sure to use only query nodes, derived from this instance of Semantics.\n\ | ||
570 | root node: {:?}\n\ | ||
571 | known nodes: {}\n\n", | ||
572 | node, | ||
573 | root_node, | ||
574 | self.cache | ||
575 | .borrow() | ||
576 | .keys() | ||
577 | .map(|it| format!("{:?}", it)) | ||
578 | .collect::<Vec<_>>() | ||
579 | .join(", ") | ||
580 | ) | ||
581 | }); | ||
582 | InFile::new(file_id, node) | ||
583 | } | ||
584 | |||
585 | pub fn is_unsafe_method_call(&self, method_call_expr: ast::MethodCallExpr) -> bool { | ||
586 | method_call_expr | ||
587 | .expr() | ||
588 | .and_then(|expr| { | ||
589 | let field_expr = if let ast::Expr::FieldExpr(field_expr) = expr { | ||
590 | field_expr | ||
591 | } else { | ||
592 | return None; | ||
593 | }; | ||
594 | let ty = self.type_of_expr(&field_expr.expr()?)?; | ||
595 | if !ty.is_packed(self.db) { | ||
596 | return None; | ||
597 | } | ||
598 | |||
599 | let func = self.resolve_method_call(&method_call_expr).map(Function::from)?; | ||
600 | let is_unsafe = func.has_self_param(self.db) | ||
601 | && matches!(func.params(self.db).first(), Some(TypeRef::Reference(..))); | ||
602 | Some(is_unsafe) | ||
603 | }) | ||
604 | .unwrap_or(false) | ||
605 | } | ||
606 | |||
607 | pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool { | ||
608 | ref_expr | ||
609 | .expr() | ||
610 | .and_then(|expr| { | ||
611 | let field_expr = match expr { | ||
612 | ast::Expr::FieldExpr(field_expr) => field_expr, | ||
613 | _ => return None, | ||
614 | }; | ||
615 | let expr = field_expr.expr()?; | ||
616 | self.type_of_expr(&expr) | ||
617 | }) | ||
618 | // Binding a reference to a packed type is possibly unsafe. | ||
619 | .map(|ty| ty.is_packed(self.db)) | ||
620 | .unwrap_or(false) | ||
621 | |||
622 | // FIXME This needs layout computation to be correct. It will highlight | ||
623 | // more than it should with the current implementation. | ||
624 | } | ||
625 | |||
626 | pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool { | ||
627 | if !ident_pat.ref_token().is_some() { | ||
628 | return false; | ||
629 | } | ||
630 | |||
631 | ident_pat | ||
632 | .syntax() | ||
633 | .parent() | ||
634 | .and_then(|parent| { | ||
635 | // `IdentPat` can live under `RecordPat` directly under `RecordPatField` or | ||
636 | // `RecordPatFieldList`. `RecordPatField` also lives under `RecordPatFieldList`, | ||
637 | // so this tries to lookup the `IdentPat` anywhere along that structure to the | ||
638 | // `RecordPat` so we can get the containing type. | ||
639 | let record_pat = ast::RecordPatField::cast(parent.clone()) | ||
640 | .and_then(|record_pat| record_pat.syntax().parent()) | ||
641 | .or_else(|| Some(parent.clone())) | ||
642 | .and_then(|parent| { | ||
643 | ast::RecordPatFieldList::cast(parent)? | ||
644 | .syntax() | ||
645 | .parent() | ||
646 | .and_then(ast::RecordPat::cast) | ||
647 | }); | ||
648 | |||
649 | // If this doesn't match a `RecordPat`, fallback to a `LetStmt` to see if | ||
650 | // this is initialized from a `FieldExpr`. | ||
651 | if let Some(record_pat) = record_pat { | ||
652 | self.type_of_pat(&ast::Pat::RecordPat(record_pat)) | ||
653 | } else if let Some(let_stmt) = ast::LetStmt::cast(parent) { | ||
654 | let field_expr = match let_stmt.initializer()? { | ||
655 | ast::Expr::FieldExpr(field_expr) => field_expr, | ||
656 | _ => return None, | ||
657 | }; | ||
658 | |||
659 | self.type_of_expr(&field_expr.expr()?) | ||
660 | } else { | ||
661 | None | ||
662 | } | ||
663 | }) | ||
664 | // Binding a reference to a packed type is possibly unsafe. | ||
665 | .map(|ty| ty.is_packed(self.db)) | ||
666 | .unwrap_or(false) | ||
667 | } | ||
668 | } | ||
669 | |||
670 | pub trait ToDef: AstNode + Clone { | ||
671 | type Def; | ||
672 | |||
673 | fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def>; | ||
674 | } | ||
675 | |||
676 | macro_rules! to_def_impls { | ||
677 | ($(($def:path, $ast:path, $meth:ident)),* ,) => {$( | ||
678 | impl ToDef for $ast { | ||
679 | type Def = $def; | ||
680 | fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def> { | ||
681 | sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from) | ||
682 | } | ||
683 | } | ||
684 | )*} | ||
685 | } | ||
686 | |||
687 | to_def_impls![ | ||
688 | (crate::Module, ast::Module, module_to_def), | ||
689 | (crate::Struct, ast::Struct, struct_to_def), | ||
690 | (crate::Enum, ast::Enum, enum_to_def), | ||
691 | (crate::Union, ast::Union, union_to_def), | ||
692 | (crate::Trait, ast::Trait, trait_to_def), | ||
693 | (crate::ImplDef, ast::Impl, impl_to_def), | ||
694 | (crate::TypeAlias, ast::TypeAlias, type_alias_to_def), | ||
695 | (crate::Const, ast::Const, const_to_def), | ||
696 | (crate::Static, ast::Static, static_to_def), | ||
697 | (crate::Function, ast::Fn, fn_to_def), | ||
698 | (crate::Field, ast::RecordField, record_field_to_def), | ||
699 | (crate::Field, ast::TupleField, tuple_field_to_def), | ||
700 | (crate::EnumVariant, ast::Variant, enum_variant_to_def), | ||
701 | (crate::TypeParam, ast::TypeParam, type_param_to_def), | ||
702 | (crate::MacroDef, ast::MacroCall, macro_call_to_def), // this one is dubious, not all calls are macros | ||
703 | (crate::Local, ast::IdentPat, bind_pat_to_def), | ||
704 | ]; | ||
705 | |||
706 | fn find_root(node: &SyntaxNode) -> SyntaxNode { | ||
707 | node.ancestors().last().unwrap() | ||
708 | } | ||
709 | |||
710 | #[derive(Debug)] | ||
711 | pub struct SemanticsScope<'a> { | ||
712 | pub db: &'a dyn HirDatabase, | ||
713 | file_id: HirFileId, | ||
714 | resolver: Resolver, | ||
715 | } | ||
716 | |||
717 | impl<'a> SemanticsScope<'a> { | ||
718 | pub fn module(&self) -> Option<Module> { | ||
719 | Some(Module { id: self.resolver.module()? }) | ||
720 | } | ||
721 | |||
722 | pub fn krate(&self) -> Option<Crate> { | ||
723 | Some(Crate { id: self.resolver.krate()? }) | ||
724 | } | ||
725 | |||
726 | /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type | ||
727 | // FIXME: rename to visible_traits to not repeat scope? | ||
728 | pub fn traits_in_scope(&self) -> FxHashSet<TraitId> { | ||
729 | let resolver = &self.resolver; | ||
730 | resolver.traits_in_scope(self.db.upcast()) | ||
731 | } | ||
732 | |||
733 | pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) { | ||
734 | let resolver = &self.resolver; | ||
735 | |||
736 | resolver.process_all_names(self.db.upcast(), &mut |name, def| { | ||
737 | let def = match def { | ||
738 | resolver::ScopeDef::PerNs(it) => { | ||
739 | let items = ScopeDef::all_items(it); | ||
740 | for item in items { | ||
741 | f(name.clone(), item); | ||
742 | } | ||
743 | return; | ||
744 | } | ||
745 | resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()), | ||
746 | resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()), | ||
747 | resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }), | ||
748 | resolver::ScopeDef::Local(pat_id) => { | ||
749 | let parent = resolver.body_owner().unwrap().into(); | ||
750 | ScopeDef::Local(Local { parent, pat_id }) | ||
751 | } | ||
752 | }; | ||
753 | f(name, def) | ||
754 | }) | ||
755 | } | ||
756 | |||
757 | /// Resolve a path as-if it was written at the given scope. This is | ||
758 | /// necessary a heuristic, as it doesn't take hygiene into account. | ||
759 | pub fn resolve_hypothetical(&self, path: &ast::Path) -> Option<PathResolution> { | ||
760 | let hygiene = Hygiene::new(self.db.upcast(), self.file_id); | ||
761 | let path = Path::from_src(path.clone(), &hygiene)?; | ||
762 | self.resolve_hir_path(&path) | ||
763 | } | ||
764 | |||
765 | pub fn resolve_hir_path(&self, path: &Path) -> Option<PathResolution> { | ||
766 | resolve_hir_path(self.db, &self.resolver, path) | ||
767 | } | ||
768 | |||
769 | /// Resolves a path where we know it is a qualifier of another path. | ||
770 | /// | ||
771 | /// For example, if we have: | ||
772 | /// ``` | ||
773 | /// mod my { | ||
774 | /// pub mod foo { | ||
775 | /// struct Bar; | ||
776 | /// } | ||
777 | /// | ||
778 | /// pub fn foo() {} | ||
779 | /// } | ||
780 | /// ``` | ||
781 | /// then we know that `foo` in `my::foo::Bar` refers to the module, not the function. | ||
782 | pub fn resolve_hir_path_qualifier(&self, path: &Path) -> Option<PathResolution> { | ||
783 | resolve_hir_path_qualifier(self.db, &self.resolver, path) | ||
784 | } | ||
785 | } | ||
786 | |||
787 | // FIXME: Change `HasSource` trait to work with `Semantics` and remove this? | ||
788 | pub fn original_range(db: &dyn HirDatabase, node: InFile<&SyntaxNode>) -> FileRange { | ||
789 | if let Some(range) = original_range_opt(db, node) { | ||
790 | let original_file = range.file_id.original_file(db.upcast()); | ||
791 | if range.file_id == original_file.into() { | ||
792 | return FileRange { file_id: original_file, range: range.value }; | ||
793 | } | ||
794 | |||
795 | log::error!("Fail to mapping up more for {:?}", range); | ||
796 | return FileRange { file_id: range.file_id.original_file(db.upcast()), range: range.value }; | ||
797 | } | ||
798 | |||
799 | // Fall back to whole macro call | ||
800 | if let Some(expansion) = node.file_id.expansion_info(db.upcast()) { | ||
801 | if let Some(call_node) = expansion.call_node() { | ||
802 | return FileRange { | ||
803 | file_id: call_node.file_id.original_file(db.upcast()), | ||
804 | range: call_node.value.text_range(), | ||
805 | }; | ||
806 | } | ||
807 | } | ||
808 | |||
809 | FileRange { file_id: node.file_id.original_file(db.upcast()), range: node.value.text_range() } | ||
810 | } | ||
811 | |||
812 | fn original_range_opt( | ||
813 | db: &dyn HirDatabase, | ||
814 | node: InFile<&SyntaxNode>, | ||
815 | ) -> Option<InFile<TextRange>> { | ||
816 | let expansion = node.file_id.expansion_info(db.upcast())?; | ||
817 | |||
818 | // the input node has only one token ? | ||
819 | let single = skip_trivia_token(node.value.first_token()?, Direction::Next)? | ||
820 | == skip_trivia_token(node.value.last_token()?, Direction::Prev)?; | ||
821 | |||
822 | Some(node.value.descendants().find_map(|it| { | ||
823 | let first = skip_trivia_token(it.first_token()?, Direction::Next)?; | ||
824 | let first = ascend_call_token(db, &expansion, node.with_value(first))?; | ||
825 | |||
826 | let last = skip_trivia_token(it.last_token()?, Direction::Prev)?; | ||
827 | let last = ascend_call_token(db, &expansion, node.with_value(last))?; | ||
828 | |||
829 | if (!single && first == last) || (first.file_id != last.file_id) { | ||
830 | return None; | ||
831 | } | ||
832 | |||
833 | Some(first.with_value(first.value.text_range().cover(last.value.text_range()))) | ||
834 | })?) | ||
835 | } | ||
836 | |||
837 | fn ascend_call_token( | ||
838 | db: &dyn HirDatabase, | ||
839 | expansion: &ExpansionInfo, | ||
840 | token: InFile<SyntaxToken>, | ||
841 | ) -> Option<InFile<SyntaxToken>> { | ||
842 | let (mapped, origin) = expansion.map_token_up(token.as_ref())?; | ||
843 | if origin != Origin::Call { | ||
844 | return None; | ||
845 | } | ||
846 | if let Some(info) = mapped.file_id.expansion_info(db.upcast()) { | ||
847 | return ascend_call_token(db, &info, mapped); | ||
848 | } | ||
849 | Some(mapped) | ||
850 | } | ||