diff options
Diffstat (limited to 'crates/hir/src/semantics.rs')
-rw-r--r-- | crates/hir/src/semantics.rs | 819 |
1 files changed, 819 insertions, 0 deletions
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs new file mode 100644 index 000000000..1594d4f0f --- /dev/null +++ b/crates/hir/src/semantics.rs | |||
@@ -0,0 +1,819 @@ | |||
1 | //! See `Semantics`. | ||
2 | |||
3 | mod source_to_def; | ||
4 | |||
5 | use std::{cell::RefCell, fmt, iter::successors}; | ||
6 | |||
7 | use base_db::{FileId, FileRange}; | ||
8 | use hir_def::{ | ||
9 | resolver::{self, HasResolver, Resolver, TypeNs}, | ||
10 | AsMacroCall, FunctionId, TraitId, VariantId, | ||
11 | }; | ||
12 | use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo}; | ||
13 | use hir_ty::associated_type_shorthand_candidates; | ||
14 | use itertools::Itertools; | ||
15 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
16 | use syntax::{ | ||
17 | algo::{find_node_at_offset, skip_trivia_token}, | ||
18 | ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextSize, | ||
19 | }; | ||
20 | |||
21 | use crate::{ | ||
22 | code_model::Access, | ||
23 | db::HirDatabase, | ||
24 | diagnostics::Diagnostic, | ||
25 | semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, | ||
26 | source_analyzer::{resolve_hir_path, SourceAnalyzer}, | ||
27 | AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, | ||
28 | Module, ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, VariantDef, | ||
29 | }; | ||
30 | |||
31 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
32 | pub enum PathResolution { | ||
33 | /// An item | ||
34 | Def(ModuleDef), | ||
35 | /// A local binding (only value namespace) | ||
36 | Local(Local), | ||
37 | /// A generic parameter | ||
38 | TypeParam(TypeParam), | ||
39 | SelfType(ImplDef), | ||
40 | Macro(MacroDef), | ||
41 | AssocItem(AssocItem), | ||
42 | } | ||
43 | |||
44 | impl PathResolution { | ||
45 | fn in_type_ns(&self) -> Option<TypeNs> { | ||
46 | match self { | ||
47 | PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())), | ||
48 | PathResolution::Def(ModuleDef::BuiltinType(builtin)) => { | ||
49 | Some(TypeNs::BuiltinType(*builtin)) | ||
50 | } | ||
51 | PathResolution::Def(ModuleDef::Const(_)) | ||
52 | | PathResolution::Def(ModuleDef::EnumVariant(_)) | ||
53 | | PathResolution::Def(ModuleDef::Function(_)) | ||
54 | | PathResolution::Def(ModuleDef::Module(_)) | ||
55 | | PathResolution::Def(ModuleDef::Static(_)) | ||
56 | | PathResolution::Def(ModuleDef::Trait(_)) => None, | ||
57 | PathResolution::Def(ModuleDef::TypeAlias(alias)) => { | ||
58 | Some(TypeNs::TypeAliasId((*alias).into())) | ||
59 | } | ||
60 | PathResolution::Local(_) | PathResolution::Macro(_) => None, | ||
61 | PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())), | ||
62 | PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())), | ||
63 | PathResolution::AssocItem(AssocItem::Const(_)) | ||
64 | | PathResolution::AssocItem(AssocItem::Function(_)) => None, | ||
65 | PathResolution::AssocItem(AssocItem::TypeAlias(alias)) => { | ||
66 | Some(TypeNs::TypeAliasId((*alias).into())) | ||
67 | } | ||
68 | } | ||
69 | } | ||
70 | |||
71 | /// Returns an iterator over associated types that may be specified after this path (using | ||
72 | /// `Ty::Assoc` syntax). | ||
73 | pub fn assoc_type_shorthand_candidates<R>( | ||
74 | &self, | ||
75 | db: &dyn HirDatabase, | ||
76 | mut cb: impl FnMut(TypeAlias) -> Option<R>, | ||
77 | ) -> Option<R> { | ||
78 | associated_type_shorthand_candidates(db, self.in_type_ns()?, |_, _, id| cb(id.into())) | ||
79 | } | ||
80 | } | ||
81 | |||
82 | /// Primary API to get semantic information, like types, from syntax trees. | ||
83 | pub struct Semantics<'db, DB> { | ||
84 | pub db: &'db DB, | ||
85 | imp: SemanticsImpl<'db>, | ||
86 | } | ||
87 | |||
88 | pub struct SemanticsImpl<'db> { | ||
89 | pub db: &'db dyn HirDatabase, | ||
90 | s2d_cache: RefCell<SourceToDefCache>, | ||
91 | expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>, | ||
92 | cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, | ||
93 | } | ||
94 | |||
95 | impl<DB> fmt::Debug for Semantics<'_, DB> { | ||
96 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
97 | write!(f, "Semantics {{ ... }}") | ||
98 | } | ||
99 | } | ||
100 | |||
101 | impl<'db, DB: HirDatabase> Semantics<'db, DB> { | ||
102 | pub fn new(db: &DB) -> Semantics<DB> { | ||
103 | let impl_ = SemanticsImpl::new(db); | ||
104 | Semantics { db, imp: impl_ } | ||
105 | } | ||
106 | |||
107 | pub fn parse(&self, file_id: FileId) -> ast::SourceFile { | ||
108 | self.imp.parse(file_id) | ||
109 | } | ||
110 | |||
111 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | ||
112 | self.imp.expand(macro_call) | ||
113 | } | ||
114 | pub fn speculative_expand( | ||
115 | &self, | ||
116 | actual_macro_call: &ast::MacroCall, | ||
117 | hypothetical_args: &ast::TokenTree, | ||
118 | token_to_map: SyntaxToken, | ||
119 | ) -> Option<(SyntaxNode, SyntaxToken)> { | ||
120 | self.imp.speculative_expand(actual_macro_call, hypothetical_args, token_to_map) | ||
121 | } | ||
122 | |||
123 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | ||
124 | self.imp.descend_into_macros(token) | ||
125 | } | ||
126 | |||
127 | pub fn descend_node_at_offset<N: ast::AstNode>( | ||
128 | &self, | ||
129 | node: &SyntaxNode, | ||
130 | offset: TextSize, | ||
131 | ) -> Option<N> { | ||
132 | self.imp.descend_node_at_offset(node, offset).find_map(N::cast) | ||
133 | } | ||
134 | |||
135 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { | ||
136 | self.imp.original_range(node) | ||
137 | } | ||
138 | |||
139 | pub fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { | ||
140 | self.imp.diagnostics_display_range(diagnostics) | ||
141 | } | ||
142 | |||
143 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
144 | self.imp.ancestors_with_macros(node) | ||
145 | } | ||
146 | |||
147 | pub fn ancestors_at_offset_with_macros( | ||
148 | &self, | ||
149 | node: &SyntaxNode, | ||
150 | offset: TextSize, | ||
151 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
152 | self.imp.ancestors_at_offset_with_macros(node, offset) | ||
153 | } | ||
154 | |||
155 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, | ||
156 | /// search up until it is of the target AstNode type | ||
157 | pub fn find_node_at_offset_with_macros<N: AstNode>( | ||
158 | &self, | ||
159 | node: &SyntaxNode, | ||
160 | offset: TextSize, | ||
161 | ) -> Option<N> { | ||
162 | self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) | ||
163 | } | ||
164 | |||
165 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, | ||
166 | /// descend it and find again | ||
167 | pub fn find_node_at_offset_with_descend<N: AstNode>( | ||
168 | &self, | ||
169 | node: &SyntaxNode, | ||
170 | offset: TextSize, | ||
171 | ) -> Option<N> { | ||
172 | if let Some(it) = find_node_at_offset(&node, offset) { | ||
173 | return Some(it); | ||
174 | } | ||
175 | |||
176 | self.imp.descend_node_at_offset(node, offset).find_map(N::cast) | ||
177 | } | ||
178 | |||
179 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | ||
180 | self.imp.type_of_expr(expr) | ||
181 | } | ||
182 | |||
183 | pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> { | ||
184 | self.imp.type_of_pat(pat) | ||
185 | } | ||
186 | |||
187 | pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> { | ||
188 | self.imp.type_of_self(param) | ||
189 | } | ||
190 | |||
191 | pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { | ||
192 | self.imp.resolve_method_call(call).map(Function::from) | ||
193 | } | ||
194 | |||
195 | pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> { | ||
196 | self.imp.resolve_method_call_as_callable(call) | ||
197 | } | ||
198 | |||
199 | pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> { | ||
200 | self.imp.resolve_field(field) | ||
201 | } | ||
202 | |||
203 | pub fn resolve_record_field( | ||
204 | &self, | ||
205 | field: &ast::RecordExprField, | ||
206 | ) -> Option<(Field, Option<Local>)> { | ||
207 | self.imp.resolve_record_field(field) | ||
208 | } | ||
209 | |||
210 | pub fn resolve_record_field_pat(&self, field: &ast::RecordPatField) -> Option<Field> { | ||
211 | self.imp.resolve_record_field_pat(field) | ||
212 | } | ||
213 | |||
214 | pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { | ||
215 | self.imp.resolve_macro_call(macro_call) | ||
216 | } | ||
217 | |||
218 | pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> { | ||
219 | self.imp.resolve_path(path) | ||
220 | } | ||
221 | |||
222 | pub fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> { | ||
223 | self.imp.resolve_extern_crate(extern_crate) | ||
224 | } | ||
225 | |||
226 | pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> { | ||
227 | self.imp.resolve_variant(record_lit).map(VariantDef::from) | ||
228 | } | ||
229 | |||
230 | pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> { | ||
231 | self.imp.resolve_bind_pat_to_const(pat) | ||
232 | } | ||
233 | |||
234 | // FIXME: use this instead? | ||
235 | // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; | ||
236 | |||
237 | pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { | ||
238 | self.imp.record_literal_missing_fields(literal) | ||
239 | } | ||
240 | |||
241 | pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { | ||
242 | self.imp.record_pattern_missing_fields(pattern) | ||
243 | } | ||
244 | |||
245 | pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { | ||
246 | let src = self.imp.find_file(src.syntax().clone()).with_value(src).cloned(); | ||
247 | T::to_def(&self.imp, src) | ||
248 | } | ||
249 | |||
250 | pub fn to_module_def(&self, file: FileId) -> Option<Module> { | ||
251 | self.imp.to_module_def(file) | ||
252 | } | ||
253 | |||
254 | pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { | ||
255 | self.imp.scope(node) | ||
256 | } | ||
257 | |||
258 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { | ||
259 | self.imp.scope_at_offset(node, offset) | ||
260 | } | ||
261 | |||
262 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { | ||
263 | self.imp.scope_for_def(def) | ||
264 | } | ||
265 | |||
266 | pub fn assert_contains_node(&self, node: &SyntaxNode) { | ||
267 | self.imp.assert_contains_node(node) | ||
268 | } | ||
269 | |||
270 | pub fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool { | ||
271 | self.imp.is_unsafe_method_call(method_call_expr) | ||
272 | } | ||
273 | |||
274 | pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool { | ||
275 | self.imp.is_unsafe_ref_expr(ref_expr) | ||
276 | } | ||
277 | |||
278 | pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool { | ||
279 | self.imp.is_unsafe_ident_pat(ident_pat) | ||
280 | } | ||
281 | } | ||
282 | |||
283 | impl<'db> SemanticsImpl<'db> { | ||
284 | fn new(db: &'db dyn HirDatabase) -> Self { | ||
285 | SemanticsImpl { | ||
286 | db, | ||
287 | s2d_cache: Default::default(), | ||
288 | cache: Default::default(), | ||
289 | expansion_info_cache: Default::default(), | ||
290 | } | ||
291 | } | ||
292 | |||
293 | fn parse(&self, file_id: FileId) -> ast::SourceFile { | ||
294 | let tree = self.db.parse(file_id).tree(); | ||
295 | self.cache(tree.syntax().clone(), file_id.into()); | ||
296 | tree | ||
297 | } | ||
298 | |||
299 | fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | ||
300 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); | ||
301 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); | ||
302 | let file_id = sa.expand(self.db, macro_call)?; | ||
303 | let node = self.db.parse_or_expand(file_id)?; | ||
304 | self.cache(node.clone(), file_id); | ||
305 | Some(node) | ||
306 | } | ||
307 | |||
308 | fn speculative_expand( | ||
309 | &self, | ||
310 | actual_macro_call: &ast::MacroCall, | ||
311 | hypothetical_args: &ast::TokenTree, | ||
312 | token_to_map: SyntaxToken, | ||
313 | ) -> Option<(SyntaxNode, SyntaxToken)> { | ||
314 | let macro_call = | ||
315 | self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); | ||
316 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); | ||
317 | let krate = sa.resolver.krate()?; | ||
318 | let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { | ||
319 | sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) | ||
320 | })?; | ||
321 | hir_expand::db::expand_hypothetical( | ||
322 | self.db.upcast(), | ||
323 | macro_call_id, | ||
324 | hypothetical_args, | ||
325 | token_to_map, | ||
326 | ) | ||
327 | } | ||
328 | |||
329 | fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | ||
330 | let _p = profile::span("descend_into_macros"); | ||
331 | let parent = token.parent(); | ||
332 | let parent = self.find_file(parent); | ||
333 | let sa = self.analyze2(parent.as_ref(), None); | ||
334 | |||
335 | let token = successors(Some(parent.with_value(token)), |token| { | ||
336 | self.db.check_canceled(); | ||
337 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; | ||
338 | let tt = macro_call.token_tree()?; | ||
339 | if !tt.syntax().text_range().contains_range(token.value.text_range()) { | ||
340 | return None; | ||
341 | } | ||
342 | let file_id = sa.expand(self.db, token.with_value(¯o_call))?; | ||
343 | let token = self | ||
344 | .expansion_info_cache | ||
345 | .borrow_mut() | ||
346 | .entry(file_id) | ||
347 | .or_insert_with(|| file_id.expansion_info(self.db.upcast())) | ||
348 | .as_ref()? | ||
349 | .map_token_down(token.as_ref())?; | ||
350 | |||
351 | self.cache(find_root(&token.value.parent()), token.file_id); | ||
352 | |||
353 | Some(token) | ||
354 | }) | ||
355 | .last() | ||
356 | .unwrap(); | ||
357 | |||
358 | token.value | ||
359 | } | ||
360 | |||
361 | fn descend_node_at_offset( | ||
362 | &self, | ||
363 | node: &SyntaxNode, | ||
364 | offset: TextSize, | ||
365 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
366 | // Handle macro token cases | ||
367 | node.token_at_offset(offset) | ||
368 | .map(|token| self.descend_into_macros(token)) | ||
369 | .map(|it| self.ancestors_with_macros(it.parent())) | ||
370 | .flatten() | ||
371 | } | ||
372 | |||
373 | fn original_range(&self, node: &SyntaxNode) -> FileRange { | ||
374 | let node = self.find_file(node.clone()); | ||
375 | original_range(self.db, node.as_ref()) | ||
376 | } | ||
377 | |||
378 | fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { | ||
379 | let src = diagnostics.display_source(); | ||
380 | let root = self.db.parse_or_expand(src.file_id).unwrap(); | ||
381 | let node = src.value.to_node(&root); | ||
382 | self.cache(root, src.file_id); | ||
383 | original_range(self.db, src.with_value(&node)) | ||
384 | } | ||
385 | |||
386 | fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
387 | let node = self.find_file(node); | ||
388 | node.ancestors_with_macros(self.db.upcast()).map(|it| it.value) | ||
389 | } | ||
390 | |||
391 | fn ancestors_at_offset_with_macros( | ||
392 | &self, | ||
393 | node: &SyntaxNode, | ||
394 | offset: TextSize, | ||
395 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
396 | node.token_at_offset(offset) | ||
397 | .map(|token| self.ancestors_with_macros(token.parent())) | ||
398 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) | ||
399 | } | ||
400 | |||
401 | fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | ||
402 | self.analyze(expr.syntax()).type_of_expr(self.db, &expr) | ||
403 | } | ||
404 | |||
405 | fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> { | ||
406 | self.analyze(pat.syntax()).type_of_pat(self.db, &pat) | ||
407 | } | ||
408 | |||
409 | fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> { | ||
410 | self.analyze(param.syntax()).type_of_self(self.db, ¶m) | ||
411 | } | ||
412 | |||
413 | fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> { | ||
414 | self.analyze(call.syntax()).resolve_method_call(self.db, call) | ||
415 | } | ||
416 | |||
417 | fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> { | ||
418 | // FIXME: this erases Substs | ||
419 | let func = self.resolve_method_call(call)?; | ||
420 | let ty = self.db.value_ty(func.into()); | ||
421 | let resolver = self.analyze(call.syntax()).resolver; | ||
422 | let ty = Type::new_with_resolver(self.db, &resolver, ty.value)?; | ||
423 | let mut res = ty.as_callable(self.db)?; | ||
424 | res.is_bound_method = true; | ||
425 | Some(res) | ||
426 | } | ||
427 | |||
428 | fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> { | ||
429 | self.analyze(field.syntax()).resolve_field(self.db, field) | ||
430 | } | ||
431 | |||
432 | fn resolve_record_field(&self, field: &ast::RecordExprField) -> Option<(Field, Option<Local>)> { | ||
433 | self.analyze(field.syntax()).resolve_record_field(self.db, field) | ||
434 | } | ||
435 | |||
436 | fn resolve_record_field_pat(&self, field: &ast::RecordPatField) -> Option<Field> { | ||
437 | self.analyze(field.syntax()).resolve_record_field_pat(self.db, field) | ||
438 | } | ||
439 | |||
440 | fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { | ||
441 | let sa = self.analyze(macro_call.syntax()); | ||
442 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); | ||
443 | sa.resolve_macro_call(self.db, macro_call) | ||
444 | } | ||
445 | |||
446 | fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> { | ||
447 | self.analyze(path.syntax()).resolve_path(self.db, path) | ||
448 | } | ||
449 | |||
450 | fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> { | ||
451 | let krate = self.scope(extern_crate.syntax()).krate()?; | ||
452 | krate.dependencies(self.db).into_iter().find_map(|dep| { | ||
453 | if dep.name == extern_crate.name_ref()?.as_name() { | ||
454 | Some(dep.krate) | ||
455 | } else { | ||
456 | None | ||
457 | } | ||
458 | }) | ||
459 | } | ||
460 | |||
461 | fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> { | ||
462 | self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit) | ||
463 | } | ||
464 | |||
465 | fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> { | ||
466 | self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) | ||
467 | } | ||
468 | |||
469 | fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { | ||
470 | self.analyze(literal.syntax()) | ||
471 | .record_literal_missing_fields(self.db, literal) | ||
472 | .unwrap_or_default() | ||
473 | } | ||
474 | |||
475 | fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { | ||
476 | self.analyze(pattern.syntax()) | ||
477 | .record_pattern_missing_fields(self.db, pattern) | ||
478 | .unwrap_or_default() | ||
479 | } | ||
480 | |||
481 | fn with_ctx<F: FnOnce(&mut SourceToDefCtx) -> T, T>(&self, f: F) -> T { | ||
482 | let mut cache = self.s2d_cache.borrow_mut(); | ||
483 | let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; | ||
484 | f(&mut ctx) | ||
485 | } | ||
486 | |||
487 | fn to_module_def(&self, file: FileId) -> Option<Module> { | ||
488 | self.with_ctx(|ctx| ctx.file_to_def(file)).map(Module::from) | ||
489 | } | ||
490 | |||
491 | fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { | ||
492 | let node = self.find_file(node.clone()); | ||
493 | let resolver = self.analyze2(node.as_ref(), None).resolver; | ||
494 | SemanticsScope { db: self.db, file_id: node.file_id, resolver } | ||
495 | } | ||
496 | |||
497 | fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { | ||
498 | let node = self.find_file(node.clone()); | ||
499 | let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; | ||
500 | SemanticsScope { db: self.db, file_id: node.file_id, resolver } | ||
501 | } | ||
502 | |||
503 | fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { | ||
504 | let file_id = self.db.lookup_intern_trait(def.id).id.file_id; | ||
505 | let resolver = def.id.resolver(self.db.upcast()); | ||
506 | SemanticsScope { db: self.db, file_id, resolver } | ||
507 | } | ||
508 | |||
509 | fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { | ||
510 | let src = self.find_file(node.clone()); | ||
511 | self.analyze2(src.as_ref(), None) | ||
512 | } | ||
513 | |||
514 | fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextSize>) -> SourceAnalyzer { | ||
515 | let _p = profile::span("Semantics::analyze2"); | ||
516 | |||
517 | let container = match self.with_ctx(|ctx| ctx.find_container(src)) { | ||
518 | Some(it) => it, | ||
519 | None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src), | ||
520 | }; | ||
521 | |||
522 | let resolver = match container { | ||
523 | ChildContainer::DefWithBodyId(def) => { | ||
524 | return SourceAnalyzer::new_for_body(self.db, def, src, offset) | ||
525 | } | ||
526 | ChildContainer::TraitId(it) => it.resolver(self.db.upcast()), | ||
527 | ChildContainer::ImplId(it) => it.resolver(self.db.upcast()), | ||
528 | ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()), | ||
529 | ChildContainer::EnumId(it) => it.resolver(self.db.upcast()), | ||
530 | ChildContainer::VariantId(it) => it.resolver(self.db.upcast()), | ||
531 | ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()), | ||
532 | ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()), | ||
533 | }; | ||
534 | SourceAnalyzer::new_for_resolver(resolver, src) | ||
535 | } | ||
536 | |||
537 | fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { | ||
538 | assert!(root_node.parent().is_none()); | ||
539 | let mut cache = self.cache.borrow_mut(); | ||
540 | let prev = cache.insert(root_node, file_id); | ||
541 | assert!(prev == None || prev == Some(file_id)) | ||
542 | } | ||
543 | |||
544 | fn assert_contains_node(&self, node: &SyntaxNode) { | ||
545 | self.find_file(node.clone()); | ||
546 | } | ||
547 | |||
548 | fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> { | ||
549 | let cache = self.cache.borrow(); | ||
550 | cache.get(root_node).copied() | ||
551 | } | ||
552 | |||
553 | fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> { | ||
554 | let root_node = find_root(&node); | ||
555 | let file_id = self.lookup(&root_node).unwrap_or_else(|| { | ||
556 | panic!( | ||
557 | "\n\nFailed to lookup {:?} in this Semantics.\n\ | ||
558 | Make sure to use only query nodes, derived from this instance of Semantics.\n\ | ||
559 | root node: {:?}\n\ | ||
560 | known nodes: {}\n\n", | ||
561 | node, | ||
562 | root_node, | ||
563 | self.cache | ||
564 | .borrow() | ||
565 | .keys() | ||
566 | .map(|it| format!("{:?}", it)) | ||
567 | .collect::<Vec<_>>() | ||
568 | .join(", ") | ||
569 | ) | ||
570 | }); | ||
571 | InFile::new(file_id, node) | ||
572 | } | ||
573 | |||
574 | fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool { | ||
575 | method_call_expr | ||
576 | .receiver() | ||
577 | .and_then(|expr| { | ||
578 | let field_expr = match expr { | ||
579 | ast::Expr::FieldExpr(field_expr) => field_expr, | ||
580 | _ => return None, | ||
581 | }; | ||
582 | let ty = self.type_of_expr(&field_expr.expr()?)?; | ||
583 | if !ty.is_packed(self.db) { | ||
584 | return None; | ||
585 | } | ||
586 | |||
587 | let func = self.resolve_method_call(&method_call_expr).map(Function::from)?; | ||
588 | let res = match func.self_param(self.db)?.access(self.db) { | ||
589 | Access::Shared | Access::Exclusive => true, | ||
590 | Access::Owned => false, | ||
591 | }; | ||
592 | Some(res) | ||
593 | }) | ||
594 | .unwrap_or(false) | ||
595 | } | ||
596 | |||
597 | fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool { | ||
598 | ref_expr | ||
599 | .expr() | ||
600 | .and_then(|expr| { | ||
601 | let field_expr = match expr { | ||
602 | ast::Expr::FieldExpr(field_expr) => field_expr, | ||
603 | _ => return None, | ||
604 | }; | ||
605 | let expr = field_expr.expr()?; | ||
606 | self.type_of_expr(&expr) | ||
607 | }) | ||
608 | // Binding a reference to a packed type is possibly unsafe. | ||
609 | .map(|ty| ty.is_packed(self.db)) | ||
610 | .unwrap_or(false) | ||
611 | |||
612 | // FIXME This needs layout computation to be correct. It will highlight | ||
613 | // more than it should with the current implementation. | ||
614 | } | ||
615 | |||
616 | fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool { | ||
617 | if !ident_pat.ref_token().is_some() { | ||
618 | return false; | ||
619 | } | ||
620 | |||
621 | ident_pat | ||
622 | .syntax() | ||
623 | .parent() | ||
624 | .and_then(|parent| { | ||
625 | // `IdentPat` can live under `RecordPat` directly under `RecordPatField` or | ||
626 | // `RecordPatFieldList`. `RecordPatField` also lives under `RecordPatFieldList`, | ||
627 | // so this tries to lookup the `IdentPat` anywhere along that structure to the | ||
628 | // `RecordPat` so we can get the containing type. | ||
629 | let record_pat = ast::RecordPatField::cast(parent.clone()) | ||
630 | .and_then(|record_pat| record_pat.syntax().parent()) | ||
631 | .or_else(|| Some(parent.clone())) | ||
632 | .and_then(|parent| { | ||
633 | ast::RecordPatFieldList::cast(parent)? | ||
634 | .syntax() | ||
635 | .parent() | ||
636 | .and_then(ast::RecordPat::cast) | ||
637 | }); | ||
638 | |||
639 | // If this doesn't match a `RecordPat`, fallback to a `LetStmt` to see if | ||
640 | // this is initialized from a `FieldExpr`. | ||
641 | if let Some(record_pat) = record_pat { | ||
642 | self.type_of_pat(&ast::Pat::RecordPat(record_pat)) | ||
643 | } else if let Some(let_stmt) = ast::LetStmt::cast(parent) { | ||
644 | let field_expr = match let_stmt.initializer()? { | ||
645 | ast::Expr::FieldExpr(field_expr) => field_expr, | ||
646 | _ => return None, | ||
647 | }; | ||
648 | |||
649 | self.type_of_expr(&field_expr.expr()?) | ||
650 | } else { | ||
651 | None | ||
652 | } | ||
653 | }) | ||
654 | // Binding a reference to a packed type is possibly unsafe. | ||
655 | .map(|ty| ty.is_packed(self.db)) | ||
656 | .unwrap_or(false) | ||
657 | } | ||
658 | } | ||
659 | |||
660 | pub trait ToDef: AstNode + Clone { | ||
661 | type Def; | ||
662 | |||
663 | fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def>; | ||
664 | } | ||
665 | |||
666 | macro_rules! to_def_impls { | ||
667 | ($(($def:path, $ast:path, $meth:ident)),* ,) => {$( | ||
668 | impl ToDef for $ast { | ||
669 | type Def = $def; | ||
670 | fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def> { | ||
671 | sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from) | ||
672 | } | ||
673 | } | ||
674 | )*} | ||
675 | } | ||
676 | |||
677 | to_def_impls![ | ||
678 | (crate::Module, ast::Module, module_to_def), | ||
679 | (crate::Struct, ast::Struct, struct_to_def), | ||
680 | (crate::Enum, ast::Enum, enum_to_def), | ||
681 | (crate::Union, ast::Union, union_to_def), | ||
682 | (crate::Trait, ast::Trait, trait_to_def), | ||
683 | (crate::ImplDef, ast::Impl, impl_to_def), | ||
684 | (crate::TypeAlias, ast::TypeAlias, type_alias_to_def), | ||
685 | (crate::Const, ast::Const, const_to_def), | ||
686 | (crate::Static, ast::Static, static_to_def), | ||
687 | (crate::Function, ast::Fn, fn_to_def), | ||
688 | (crate::Field, ast::RecordField, record_field_to_def), | ||
689 | (crate::Field, ast::TupleField, tuple_field_to_def), | ||
690 | (crate::EnumVariant, ast::Variant, enum_variant_to_def), | ||
691 | (crate::TypeParam, ast::TypeParam, type_param_to_def), | ||
692 | (crate::MacroDef, ast::MacroCall, macro_call_to_def), // this one is dubious, not all calls are macros | ||
693 | (crate::Local, ast::IdentPat, bind_pat_to_def), | ||
694 | ]; | ||
695 | |||
696 | fn find_root(node: &SyntaxNode) -> SyntaxNode { | ||
697 | node.ancestors().last().unwrap() | ||
698 | } | ||
699 | |||
700 | #[derive(Debug)] | ||
701 | pub struct SemanticsScope<'a> { | ||
702 | pub db: &'a dyn HirDatabase, | ||
703 | file_id: HirFileId, | ||
704 | resolver: Resolver, | ||
705 | } | ||
706 | |||
707 | impl<'a> SemanticsScope<'a> { | ||
708 | pub fn module(&self) -> Option<Module> { | ||
709 | Some(Module { id: self.resolver.module()? }) | ||
710 | } | ||
711 | |||
712 | pub fn krate(&self) -> Option<Crate> { | ||
713 | Some(Crate { id: self.resolver.krate()? }) | ||
714 | } | ||
715 | |||
716 | /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type | ||
717 | // FIXME: rename to visible_traits to not repeat scope? | ||
718 | pub fn traits_in_scope(&self) -> FxHashSet<TraitId> { | ||
719 | let resolver = &self.resolver; | ||
720 | resolver.traits_in_scope(self.db.upcast()) | ||
721 | } | ||
722 | |||
723 | pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) { | ||
724 | let resolver = &self.resolver; | ||
725 | |||
726 | resolver.process_all_names(self.db.upcast(), &mut |name, def| { | ||
727 | let def = match def { | ||
728 | resolver::ScopeDef::PerNs(it) => { | ||
729 | let items = ScopeDef::all_items(it); | ||
730 | for item in items { | ||
731 | f(name.clone(), item); | ||
732 | } | ||
733 | return; | ||
734 | } | ||
735 | resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()), | ||
736 | resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()), | ||
737 | resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }), | ||
738 | resolver::ScopeDef::Local(pat_id) => { | ||
739 | let parent = resolver.body_owner().unwrap().into(); | ||
740 | ScopeDef::Local(Local { parent, pat_id }) | ||
741 | } | ||
742 | }; | ||
743 | f(name, def) | ||
744 | }) | ||
745 | } | ||
746 | |||
747 | /// Resolve a path as-if it was written at the given scope. This is | ||
748 | /// necessary a heuristic, as it doesn't take hygiene into account. | ||
749 | pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> { | ||
750 | let hygiene = Hygiene::new(self.db.upcast(), self.file_id); | ||
751 | let path = Path::from_src(path.clone(), &hygiene)?; | ||
752 | resolve_hir_path(self.db, &self.resolver, &path) | ||
753 | } | ||
754 | } | ||
755 | |||
756 | // FIXME: Change `HasSource` trait to work with `Semantics` and remove this? | ||
757 | pub fn original_range(db: &dyn HirDatabase, node: InFile<&SyntaxNode>) -> FileRange { | ||
758 | if let Some(range) = original_range_opt(db, node) { | ||
759 | let original_file = range.file_id.original_file(db.upcast()); | ||
760 | if range.file_id == original_file.into() { | ||
761 | return FileRange { file_id: original_file, range: range.value }; | ||
762 | } | ||
763 | |||
764 | log::error!("Fail to mapping up more for {:?}", range); | ||
765 | return FileRange { file_id: range.file_id.original_file(db.upcast()), range: range.value }; | ||
766 | } | ||
767 | |||
768 | // Fall back to whole macro call | ||
769 | if let Some(expansion) = node.file_id.expansion_info(db.upcast()) { | ||
770 | if let Some(call_node) = expansion.call_node() { | ||
771 | return FileRange { | ||
772 | file_id: call_node.file_id.original_file(db.upcast()), | ||
773 | range: call_node.value.text_range(), | ||
774 | }; | ||
775 | } | ||
776 | } | ||
777 | |||
778 | FileRange { file_id: node.file_id.original_file(db.upcast()), range: node.value.text_range() } | ||
779 | } | ||
780 | |||
781 | fn original_range_opt( | ||
782 | db: &dyn HirDatabase, | ||
783 | node: InFile<&SyntaxNode>, | ||
784 | ) -> Option<InFile<TextRange>> { | ||
785 | let expansion = node.file_id.expansion_info(db.upcast())?; | ||
786 | |||
787 | // the input node has only one token ? | ||
788 | let single = skip_trivia_token(node.value.first_token()?, Direction::Next)? | ||
789 | == skip_trivia_token(node.value.last_token()?, Direction::Prev)?; | ||
790 | |||
791 | Some(node.value.descendants().find_map(|it| { | ||
792 | let first = skip_trivia_token(it.first_token()?, Direction::Next)?; | ||
793 | let first = ascend_call_token(db, &expansion, node.with_value(first))?; | ||
794 | |||
795 | let last = skip_trivia_token(it.last_token()?, Direction::Prev)?; | ||
796 | let last = ascend_call_token(db, &expansion, node.with_value(last))?; | ||
797 | |||
798 | if (!single && first == last) || (first.file_id != last.file_id) { | ||
799 | return None; | ||
800 | } | ||
801 | |||
802 | Some(first.with_value(first.value.text_range().cover(last.value.text_range()))) | ||
803 | })?) | ||
804 | } | ||
805 | |||
806 | fn ascend_call_token( | ||
807 | db: &dyn HirDatabase, | ||
808 | expansion: &ExpansionInfo, | ||
809 | token: InFile<SyntaxToken>, | ||
810 | ) -> Option<InFile<SyntaxToken>> { | ||
811 | let (mapped, origin) = expansion.map_token_up(token.as_ref())?; | ||
812 | if origin != Origin::Call { | ||
813 | return None; | ||
814 | } | ||
815 | if let Some(info) = mapped.file_id.expansion_info(db.upcast()) { | ||
816 | return ascend_call_token(db, &info, mapped); | ||
817 | } | ||
818 | Some(mapped) | ||
819 | } | ||