diff options
Diffstat (limited to 'crates/ra_hir/src')
-rw-r--r-- | crates/ra_hir/src/code_model.rs | 22 | ||||
-rw-r--r-- | crates/ra_hir/src/db.rs | 8 | ||||
-rw-r--r-- | crates/ra_hir/src/semantics.rs | 269 |
3 files changed, 221 insertions, 78 deletions
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index 479c82fa4..1b3525011 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -31,7 +31,7 @@ use hir_ty::{ | |||
31 | ApplicationTy, Canonical, GenericPredicate, InEnvironment, Substs, TraitEnvironment, Ty, | 31 | ApplicationTy, Canonical, GenericPredicate, InEnvironment, Substs, TraitEnvironment, Ty, |
32 | TyDefId, TypeCtor, | 32 | TyDefId, TypeCtor, |
33 | }; | 33 | }; |
34 | use ra_db::{CrateId, CrateName, Edition, FileId}; | 34 | use ra_db::{CrateId, Edition, FileId}; |
35 | use ra_prof::profile; | 35 | use ra_prof::profile; |
36 | use ra_syntax::ast::{self, AttrsOwner, NameOwner}; | 36 | use ra_syntax::ast::{self, AttrsOwner, NameOwner}; |
37 | use rustc_hash::FxHashSet; | 37 | use rustc_hash::FxHashSet; |
@@ -94,8 +94,8 @@ impl Crate { | |||
94 | db.crate_graph()[self.id].edition | 94 | db.crate_graph()[self.id].edition |
95 | } | 95 | } |
96 | 96 | ||
97 | pub fn display_name(self, db: &dyn HirDatabase) -> Option<CrateName> { | 97 | pub fn display_name(self, db: &dyn HirDatabase) -> Option<String> { |
98 | db.crate_graph()[self.id].display_name.as_ref().cloned() | 98 | db.crate_graph()[self.id].display_name.clone() |
99 | } | 99 | } |
100 | 100 | ||
101 | pub fn query_external_importables( | 101 | pub fn query_external_importables( |
@@ -1053,12 +1053,14 @@ pub struct ImplDef { | |||
1053 | 1053 | ||
1054 | impl ImplDef { | 1054 | impl ImplDef { |
1055 | pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<ImplDef> { | 1055 | pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<ImplDef> { |
1056 | let impls = db.impls_in_crate(krate.id); | 1056 | let inherent = db.inherent_impls_in_crate(krate.id); |
1057 | impls.all_impls().map(Self::from).collect() | 1057 | let trait_ = db.trait_impls_in_crate(krate.id); |
1058 | |||
1059 | inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect() | ||
1058 | } | 1060 | } |
1059 | pub fn for_trait(db: &dyn HirDatabase, krate: Crate, trait_: Trait) -> Vec<ImplDef> { | 1061 | pub fn for_trait(db: &dyn HirDatabase, krate: Crate, trait_: Trait) -> Vec<ImplDef> { |
1060 | let impls = db.impls_in_crate(krate.id); | 1062 | let impls = db.trait_impls_in_crate(krate.id); |
1061 | impls.lookup_impl_defs_for_trait(trait_.id).map(Self::from).collect() | 1063 | impls.for_trait(trait_.id).map(Self::from).collect() |
1062 | } | 1064 | } |
1063 | 1065 | ||
1064 | pub fn target_trait(self, db: &dyn HirDatabase) -> Option<TypeRef> { | 1066 | pub fn target_trait(self, db: &dyn HirDatabase) -> Option<TypeRef> { |
@@ -1303,10 +1305,10 @@ impl Type { | |||
1303 | mut callback: impl FnMut(AssocItem) -> Option<T>, | 1305 | mut callback: impl FnMut(AssocItem) -> Option<T>, |
1304 | ) -> Option<T> { | 1306 | ) -> Option<T> { |
1305 | for krate in self.ty.value.def_crates(db, krate.id)? { | 1307 | for krate in self.ty.value.def_crates(db, krate.id)? { |
1306 | let impls = db.impls_in_crate(krate); | 1308 | let impls = db.inherent_impls_in_crate(krate); |
1307 | 1309 | ||
1308 | for impl_def in impls.lookup_impl_defs(&self.ty.value) { | 1310 | for impl_def in impls.for_self_ty(&self.ty.value) { |
1309 | for &item in db.impl_data(impl_def).items.iter() { | 1311 | for &item in db.impl_data(*impl_def).items.iter() { |
1310 | if let Some(result) = callback(item.into()) { | 1312 | if let Some(result) = callback(item.into()) { |
1311 | return Some(result); | 1313 | return Some(result); |
1312 | } | 1314 | } |
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index bb67952de..cb48ca065 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -16,10 +16,10 @@ pub use hir_expand::db::{ | |||
16 | pub use hir_ty::db::{ | 16 | pub use hir_ty::db::{ |
17 | AssociatedTyDataQuery, AssociatedTyValueQuery, CallableItemSignatureQuery, FieldTypesQuery, | 17 | AssociatedTyDataQuery, AssociatedTyValueQuery, CallableItemSignatureQuery, FieldTypesQuery, |
18 | GenericDefaultsQuery, GenericPredicatesForParamQuery, GenericPredicatesQuery, HirDatabase, | 18 | GenericDefaultsQuery, GenericPredicatesForParamQuery, GenericPredicatesQuery, HirDatabase, |
19 | HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, ImplsFromDepsQuery, | 19 | HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, InferQueryQuery, |
20 | ImplsInCrateQuery, InferQueryQuery, InternAssocTyValueQuery, InternChalkImplQuery, | 20 | InherentImplsInCrateQuery, InternAssocTyValueQuery, InternChalkImplQuery, InternTypeCtorQuery, |
21 | InternTypeCtorQuery, InternTypeParamIdQuery, ReturnTypeImplTraitsQuery, StructDatumQuery, | 21 | InternTypeParamIdQuery, ReturnTypeImplTraitsQuery, StructDatumQuery, TraitDatumQuery, |
22 | TraitDatumQuery, TraitSolveQuery, TyQuery, ValueTyQuery, | 22 | TraitImplsInCrateQuery, TraitImplsInDepsQuery, TraitSolveQuery, TyQuery, ValueTyQuery, |
23 | }; | 23 | }; |
24 | 24 | ||
25 | #[test] | 25 | #[test] |
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs index 6a49c424a..3d78f71c1 100644 --- a/crates/ra_hir/src/semantics.rs +++ b/crates/ra_hir/src/semantics.rs | |||
@@ -83,6 +83,11 @@ impl PathResolution { | |||
83 | /// Primary API to get semantic information, like types, from syntax trees. | 83 | /// Primary API to get semantic information, like types, from syntax trees. |
84 | pub struct Semantics<'db, DB> { | 84 | pub struct Semantics<'db, DB> { |
85 | pub db: &'db DB, | 85 | pub db: &'db DB, |
86 | imp: SemanticsImpl<'db>, | ||
87 | } | ||
88 | |||
89 | pub struct SemanticsImpl<'db> { | ||
90 | pub db: &'db dyn HirDatabase, | ||
86 | s2d_cache: RefCell<SourceToDefCache>, | 91 | s2d_cache: RefCell<SourceToDefCache>, |
87 | cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, | 92 | cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, |
88 | } | 93 | } |
@@ -95,20 +100,180 @@ impl<DB> fmt::Debug for Semantics<'_, DB> { | |||
95 | 100 | ||
96 | impl<'db, DB: HirDatabase> Semantics<'db, DB> { | 101 | impl<'db, DB: HirDatabase> Semantics<'db, DB> { |
97 | pub fn new(db: &DB) -> Semantics<DB> { | 102 | pub fn new(db: &DB) -> Semantics<DB> { |
98 | Semantics { db, s2d_cache: Default::default(), cache: Default::default() } | 103 | let impl_ = SemanticsImpl::new(db); |
104 | Semantics { db, imp: impl_ } | ||
99 | } | 105 | } |
100 | 106 | ||
101 | pub fn parse(&self, file_id: FileId) -> ast::SourceFile { | 107 | pub fn parse(&self, file_id: FileId) -> ast::SourceFile { |
102 | let tree = self.db.parse(file_id).tree(); | 108 | self.imp.parse(file_id) |
103 | self.cache(tree.syntax().clone(), file_id.into()); | ||
104 | tree | ||
105 | } | 109 | } |
106 | 110 | ||
107 | pub fn ast<T: AstDiagnostic + Diagnostic>(&self, d: &T) -> <T as AstDiagnostic>::AST { | 111 | pub fn ast<T: AstDiagnostic + Diagnostic>(&self, d: &T) -> <T as AstDiagnostic>::AST { |
108 | let file_id = d.source().file_id; | 112 | let file_id = d.source().file_id; |
109 | let root = self.db.parse_or_expand(file_id).unwrap(); | 113 | let root = self.db.parse_or_expand(file_id).unwrap(); |
110 | self.cache(root, file_id); | 114 | self.imp.cache(root, file_id); |
111 | d.ast(self.db) | 115 | d.ast(self.db.upcast()) |
116 | } | ||
117 | |||
118 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | ||
119 | self.imp.expand(macro_call) | ||
120 | } | ||
121 | |||
122 | pub fn expand_hypothetical( | ||
123 | &self, | ||
124 | actual_macro_call: &ast::MacroCall, | ||
125 | hypothetical_args: &ast::TokenTree, | ||
126 | token_to_map: SyntaxToken, | ||
127 | ) -> Option<(SyntaxNode, SyntaxToken)> { | ||
128 | self.imp.expand_hypothetical(actual_macro_call, hypothetical_args, token_to_map) | ||
129 | } | ||
130 | |||
131 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | ||
132 | self.imp.descend_into_macros(token) | ||
133 | } | ||
134 | |||
135 | pub fn descend_node_at_offset<N: ast::AstNode>( | ||
136 | &self, | ||
137 | node: &SyntaxNode, | ||
138 | offset: TextSize, | ||
139 | ) -> Option<N> { | ||
140 | self.imp.descend_node_at_offset(node, offset).find_map(N::cast) | ||
141 | } | ||
142 | |||
143 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { | ||
144 | self.imp.original_range(node) | ||
145 | } | ||
146 | |||
147 | pub fn diagnostics_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { | ||
148 | self.imp.diagnostics_range(diagnostics) | ||
149 | } | ||
150 | |||
151 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
152 | self.imp.ancestors_with_macros(node) | ||
153 | } | ||
154 | |||
155 | pub fn ancestors_at_offset_with_macros( | ||
156 | &self, | ||
157 | node: &SyntaxNode, | ||
158 | offset: TextSize, | ||
159 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
160 | self.imp.ancestors_at_offset_with_macros(node, offset) | ||
161 | } | ||
162 | |||
163 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, | ||
164 | /// search up until it is of the target AstNode type | ||
165 | pub fn find_node_at_offset_with_macros<N: AstNode>( | ||
166 | &self, | ||
167 | node: &SyntaxNode, | ||
168 | offset: TextSize, | ||
169 | ) -> Option<N> { | ||
170 | self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) | ||
171 | } | ||
172 | |||
173 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, | ||
174 | /// descend it and find again | ||
175 | pub fn find_node_at_offset_with_descend<N: AstNode>( | ||
176 | &self, | ||
177 | node: &SyntaxNode, | ||
178 | offset: TextSize, | ||
179 | ) -> Option<N> { | ||
180 | if let Some(it) = find_node_at_offset(&node, offset) { | ||
181 | return Some(it); | ||
182 | } | ||
183 | |||
184 | self.imp.descend_node_at_offset(node, offset).find_map(N::cast) | ||
185 | } | ||
186 | |||
187 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | ||
188 | self.imp.type_of_expr(expr) | ||
189 | } | ||
190 | |||
191 | pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> { | ||
192 | self.imp.type_of_pat(pat) | ||
193 | } | ||
194 | |||
195 | pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { | ||
196 | self.imp.resolve_method_call(call) | ||
197 | } | ||
198 | |||
199 | pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> { | ||
200 | self.imp.resolve_field(field) | ||
201 | } | ||
202 | |||
203 | pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<(Field, Option<Local>)> { | ||
204 | self.imp.resolve_record_field(field) | ||
205 | } | ||
206 | |||
207 | pub fn resolve_record_field_pat(&self, field: &ast::RecordFieldPat) -> Option<Field> { | ||
208 | self.imp.resolve_record_field_pat(field) | ||
209 | } | ||
210 | |||
211 | pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { | ||
212 | self.imp.resolve_macro_call(macro_call) | ||
213 | } | ||
214 | |||
215 | pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> { | ||
216 | self.imp.resolve_path(path) | ||
217 | } | ||
218 | |||
219 | pub fn resolve_variant(&self, record_lit: ast::RecordLit) -> Option<VariantId> { | ||
220 | self.imp.resolve_variant(record_lit) | ||
221 | } | ||
222 | |||
223 | pub fn lower_path(&self, path: &ast::Path) -> Option<Path> { | ||
224 | self.imp.lower_path(path) | ||
225 | } | ||
226 | |||
227 | pub fn resolve_bind_pat_to_const(&self, pat: &ast::BindPat) -> Option<ModuleDef> { | ||
228 | self.imp.resolve_bind_pat_to_const(pat) | ||
229 | } | ||
230 | |||
231 | // FIXME: use this instead? | ||
232 | // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; | ||
233 | |||
234 | pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> { | ||
235 | self.imp.record_literal_missing_fields(literal) | ||
236 | } | ||
237 | |||
238 | pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { | ||
239 | self.imp.record_pattern_missing_fields(pattern) | ||
240 | } | ||
241 | |||
242 | pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { | ||
243 | let src = self.imp.find_file(src.syntax().clone()).with_value(src).cloned(); | ||
244 | T::to_def(&self.imp, src) | ||
245 | } | ||
246 | |||
247 | pub fn to_module_def(&self, file: FileId) -> Option<Module> { | ||
248 | self.imp.to_module_def(file) | ||
249 | } | ||
250 | |||
251 | pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { | ||
252 | self.imp.scope(node) | ||
253 | } | ||
254 | |||
255 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { | ||
256 | self.imp.scope_at_offset(node, offset) | ||
257 | } | ||
258 | |||
259 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { | ||
260 | self.imp.scope_for_def(def) | ||
261 | } | ||
262 | |||
263 | pub fn assert_contains_node(&self, node: &SyntaxNode) { | ||
264 | self.imp.assert_contains_node(node) | ||
265 | } | ||
266 | } | ||
267 | |||
268 | impl<'db> SemanticsImpl<'db> { | ||
269 | pub fn new(db: &'db dyn HirDatabase) -> Self { | ||
270 | Self { db, s2d_cache: Default::default(), cache: Default::default() } | ||
271 | } | ||
272 | |||
273 | pub fn parse(&self, file_id: FileId) -> ast::SourceFile { | ||
274 | let tree = self.db.parse(file_id).tree(); | ||
275 | self.cache(tree.syntax().clone(), file_id.into()); | ||
276 | tree | ||
112 | } | 277 | } |
113 | 278 | ||
114 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | 279 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { |
@@ -130,9 +295,15 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
130 | self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); | 295 | self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); |
131 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); | 296 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); |
132 | let krate = sa.resolver.krate()?; | 297 | let krate = sa.resolver.krate()?; |
133 | let macro_call_id = macro_call | 298 | let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { |
134 | .as_call_id(self.db, krate, |path| sa.resolver.resolve_path_as_macro(self.db, &path))?; | 299 | sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) |
135 | hir_expand::db::expand_hypothetical(self.db, macro_call_id, hypothetical_args, token_to_map) | 300 | })?; |
301 | hir_expand::db::expand_hypothetical( | ||
302 | self.db.upcast(), | ||
303 | macro_call_id, | ||
304 | hypothetical_args, | ||
305 | token_to_map, | ||
306 | ) | ||
136 | } | 307 | } |
137 | 308 | ||
138 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | 309 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { |
@@ -147,7 +318,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
147 | return None; | 318 | return None; |
148 | } | 319 | } |
149 | let file_id = sa.expand(self.db, token.with_value(¯o_call))?; | 320 | let file_id = sa.expand(self.db, token.with_value(¯o_call))?; |
150 | let token = file_id.expansion_info(self.db)?.map_token_down(token.as_ref())?; | 321 | let token = file_id.expansion_info(self.db.upcast())?.map_token_down(token.as_ref())?; |
151 | 322 | ||
152 | self.cache(find_root(&token.value.parent()), token.file_id); | 323 | self.cache(find_root(&token.value.parent()), token.file_id); |
153 | 324 | ||
@@ -159,15 +330,16 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
159 | token.value | 330 | token.value |
160 | } | 331 | } |
161 | 332 | ||
162 | pub fn descend_node_at_offset<N: ast::AstNode>( | 333 | pub fn descend_node_at_offset( |
163 | &self, | 334 | &self, |
164 | node: &SyntaxNode, | 335 | node: &SyntaxNode, |
165 | offset: TextSize, | 336 | offset: TextSize, |
166 | ) -> Option<N> { | 337 | ) -> impl Iterator<Item = SyntaxNode> + '_ { |
167 | // Handle macro token cases | 338 | // Handle macro token cases |
168 | node.token_at_offset(offset) | 339 | node.token_at_offset(offset) |
169 | .map(|token| self.descend_into_macros(token)) | 340 | .map(|token| self.descend_into_macros(token)) |
170 | .find_map(|it| self.ancestors_with_macros(it.parent()).find_map(N::cast)) | 341 | .map(|it| self.ancestors_with_macros(it.parent())) |
342 | .flatten() | ||
171 | } | 343 | } |
172 | 344 | ||
173 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { | 345 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { |
@@ -184,7 +356,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
184 | 356 | ||
185 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | 357 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { |
186 | let node = self.find_file(node); | 358 | let node = self.find_file(node); |
187 | node.ancestors_with_macros(self.db).map(|it| it.value) | 359 | node.ancestors_with_macros(self.db.upcast()).map(|it| it.value) |
188 | } | 360 | } |
189 | 361 | ||
190 | pub fn ancestors_at_offset_with_macros( | 362 | pub fn ancestors_at_offset_with_macros( |
@@ -197,29 +369,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
197 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) | 369 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) |
198 | } | 370 | } |
199 | 371 | ||
200 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, | ||
201 | /// search up until it is of the target AstNode type | ||
202 | pub fn find_node_at_offset_with_macros<N: AstNode>( | ||
203 | &self, | ||
204 | node: &SyntaxNode, | ||
205 | offset: TextSize, | ||
206 | ) -> Option<N> { | ||
207 | self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) | ||
208 | } | ||
209 | |||
210 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, | ||
211 | /// descend it and find again | ||
212 | pub fn find_node_at_offset_with_descend<N: AstNode>( | ||
213 | &self, | ||
214 | node: &SyntaxNode, | ||
215 | offset: TextSize, | ||
216 | ) -> Option<N> { | ||
217 | if let Some(it) = find_node_at_offset(&node, offset) { | ||
218 | return Some(it); | ||
219 | } | ||
220 | self.descend_node_at_offset(&node, offset) | ||
221 | } | ||
222 | |||
223 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | 372 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { |
224 | self.analyze(expr.syntax()).type_of(self.db, &expr) | 373 | self.analyze(expr.syntax()).type_of(self.db, &expr) |
225 | } | 374 | } |
@@ -267,9 +416,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
267 | self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) | 416 | self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) |
268 | } | 417 | } |
269 | 418 | ||
270 | // FIXME: use this instead? | ||
271 | // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; | ||
272 | |||
273 | pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> { | 419 | pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> { |
274 | self.analyze(literal.syntax()) | 420 | self.analyze(literal.syntax()) |
275 | .record_literal_missing_fields(self.db, literal) | 421 | .record_literal_missing_fields(self.db, literal) |
@@ -282,11 +428,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
282 | .unwrap_or_default() | 428 | .unwrap_or_default() |
283 | } | 429 | } |
284 | 430 | ||
285 | pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { | ||
286 | let src = self.find_file(src.syntax().clone()).with_value(src).cloned(); | ||
287 | T::to_def(self, src) | ||
288 | } | ||
289 | |||
290 | fn with_ctx<F: FnOnce(&mut SourceToDefCtx) -> T, T>(&self, f: F) -> T { | 431 | fn with_ctx<F: FnOnce(&mut SourceToDefCtx) -> T, T>(&self, f: F) -> T { |
291 | let mut cache = self.s2d_cache.borrow_mut(); | 432 | let mut cache = self.s2d_cache.borrow_mut(); |
292 | let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; | 433 | let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; |
@@ -297,20 +438,20 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
297 | self.with_ctx(|ctx| ctx.file_to_def(file)).map(Module::from) | 438 | self.with_ctx(|ctx| ctx.file_to_def(file)).map(Module::from) |
298 | } | 439 | } |
299 | 440 | ||
300 | pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db, DB> { | 441 | pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { |
301 | let node = self.find_file(node.clone()); | 442 | let node = self.find_file(node.clone()); |
302 | let resolver = self.analyze2(node.as_ref(), None).resolver; | 443 | let resolver = self.analyze2(node.as_ref(), None).resolver; |
303 | SemanticsScope { db: self.db, resolver } | 444 | SemanticsScope { db: self.db, resolver } |
304 | } | 445 | } |
305 | 446 | ||
306 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db, DB> { | 447 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { |
307 | let node = self.find_file(node.clone()); | 448 | let node = self.find_file(node.clone()); |
308 | let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; | 449 | let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; |
309 | SemanticsScope { db: self.db, resolver } | 450 | SemanticsScope { db: self.db, resolver } |
310 | } | 451 | } |
311 | 452 | ||
312 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db, DB> { | 453 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { |
313 | let resolver = def.id.resolver(self.db); | 454 | let resolver = def.id.resolver(self.db.upcast()); |
314 | SemanticsScope { db: self.db, resolver } | 455 | SemanticsScope { db: self.db, resolver } |
315 | } | 456 | } |
316 | 457 | ||
@@ -331,17 +472,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
331 | ChildContainer::DefWithBodyId(def) => { | 472 | ChildContainer::DefWithBodyId(def) => { |
332 | return SourceAnalyzer::new_for_body(self.db, def, src, offset) | 473 | return SourceAnalyzer::new_for_body(self.db, def, src, offset) |
333 | } | 474 | } |
334 | ChildContainer::TraitId(it) => it.resolver(self.db), | 475 | ChildContainer::TraitId(it) => it.resolver(self.db.upcast()), |
335 | ChildContainer::ImplId(it) => it.resolver(self.db), | 476 | ChildContainer::ImplId(it) => it.resolver(self.db.upcast()), |
336 | ChildContainer::ModuleId(it) => it.resolver(self.db), | 477 | ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()), |
337 | ChildContainer::EnumId(it) => it.resolver(self.db), | 478 | ChildContainer::EnumId(it) => it.resolver(self.db.upcast()), |
338 | ChildContainer::VariantId(it) => it.resolver(self.db), | 479 | ChildContainer::VariantId(it) => it.resolver(self.db.upcast()), |
339 | ChildContainer::GenericDefId(it) => it.resolver(self.db), | 480 | ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()), |
340 | }; | 481 | }; |
341 | SourceAnalyzer::new_for_resolver(resolver, src) | 482 | SourceAnalyzer::new_for_resolver(resolver, src) |
342 | } | 483 | } |
343 | 484 | ||
344 | fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { | 485 | pub fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { |
345 | assert!(root_node.parent().is_none()); | 486 | assert!(root_node.parent().is_none()); |
346 | let mut cache = self.cache.borrow_mut(); | 487 | let mut cache = self.cache.borrow_mut(); |
347 | let prev = cache.insert(root_node, file_id); | 488 | let prev = cache.insert(root_node, file_id); |
@@ -357,7 +498,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
357 | cache.get(root_node).copied() | 498 | cache.get(root_node).copied() |
358 | } | 499 | } |
359 | 500 | ||
360 | fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> { | 501 | pub fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> { |
361 | let root_node = find_root(&node); | 502 | let root_node = find_root(&node); |
362 | let file_id = self.lookup(&root_node).unwrap_or_else(|| { | 503 | let file_id = self.lookup(&root_node).unwrap_or_else(|| { |
363 | panic!( | 504 | panic!( |
@@ -382,14 +523,14 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
382 | pub trait ToDef: AstNode + Clone { | 523 | pub trait ToDef: AstNode + Clone { |
383 | type Def; | 524 | type Def; |
384 | 525 | ||
385 | fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: InFile<Self>) -> Option<Self::Def>; | 526 | fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def>; |
386 | } | 527 | } |
387 | 528 | ||
388 | macro_rules! to_def_impls { | 529 | macro_rules! to_def_impls { |
389 | ($(($def:path, $ast:path, $meth:ident)),* ,) => {$( | 530 | ($(($def:path, $ast:path, $meth:ident)),* ,) => {$( |
390 | impl ToDef for $ast { | 531 | impl ToDef for $ast { |
391 | type Def = $def; | 532 | type Def = $def; |
392 | fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: InFile<Self>) -> Option<Self::Def> { | 533 | fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def> { |
393 | sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from) | 534 | sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from) |
394 | } | 535 | } |
395 | } | 536 | } |
@@ -419,12 +560,12 @@ fn find_root(node: &SyntaxNode) -> SyntaxNode { | |||
419 | node.ancestors().last().unwrap() | 560 | node.ancestors().last().unwrap() |
420 | } | 561 | } |
421 | 562 | ||
422 | pub struct SemanticsScope<'a, DB> { | 563 | pub struct SemanticsScope<'a> { |
423 | pub db: &'a DB, | 564 | pub db: &'a dyn HirDatabase, |
424 | resolver: Resolver, | 565 | resolver: Resolver, |
425 | } | 566 | } |
426 | 567 | ||
427 | impl<'a, DB: HirDatabase> SemanticsScope<'a, DB> { | 568 | impl<'a> SemanticsScope<'a> { |
428 | pub fn module(&self) -> Option<Module> { | 569 | pub fn module(&self) -> Option<Module> { |
429 | Some(Module { id: self.resolver.module()? }) | 570 | Some(Module { id: self.resolver.module()? }) |
430 | } | 571 | } |
@@ -433,13 +574,13 @@ impl<'a, DB: HirDatabase> SemanticsScope<'a, DB> { | |||
433 | // FIXME: rename to visible_traits to not repeat scope? | 574 | // FIXME: rename to visible_traits to not repeat scope? |
434 | pub fn traits_in_scope(&self) -> FxHashSet<TraitId> { | 575 | pub fn traits_in_scope(&self) -> FxHashSet<TraitId> { |
435 | let resolver = &self.resolver; | 576 | let resolver = &self.resolver; |
436 | resolver.traits_in_scope(self.db) | 577 | resolver.traits_in_scope(self.db.upcast()) |
437 | } | 578 | } |
438 | 579 | ||
439 | pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) { | 580 | pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) { |
440 | let resolver = &self.resolver; | 581 | let resolver = &self.resolver; |
441 | 582 | ||
442 | resolver.process_all_names(self.db, &mut |name, def| { | 583 | resolver.process_all_names(self.db.upcast(), &mut |name, def| { |
443 | let def = match def { | 584 | let def = match def { |
444 | resolver::ScopeDef::PerNs(it) => { | 585 | resolver::ScopeDef::PerNs(it) => { |
445 | let items = ScopeDef::all_items(it); | 586 | let items = ScopeDef::all_items(it); |