diff options
-rw-r--r-- | crates/ra_hir/src/semantics.rs | 253 | ||||
-rw-r--r-- | crates/ra_ide_db/src/lib.rs | 8 |
2 files changed, 204 insertions, 57 deletions
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs index 810c49d6f..3d78f71c1 100644 --- a/crates/ra_hir/src/semantics.rs +++ b/crates/ra_hir/src/semantics.rs | |||
@@ -83,6 +83,11 @@ impl PathResolution { | |||
83 | /// Primary API to get semantic information, like types, from syntax trees. | 83 | /// Primary API to get semantic information, like types, from syntax trees. |
84 | pub struct Semantics<'db, DB> { | 84 | pub struct Semantics<'db, DB> { |
85 | pub db: &'db DB, | 85 | pub db: &'db DB, |
86 | imp: SemanticsImpl<'db>, | ||
87 | } | ||
88 | |||
89 | pub struct SemanticsImpl<'db> { | ||
90 | pub db: &'db dyn HirDatabase, | ||
86 | s2d_cache: RefCell<SourceToDefCache>, | 91 | s2d_cache: RefCell<SourceToDefCache>, |
87 | cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, | 92 | cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, |
88 | } | 93 | } |
@@ -95,20 +100,180 @@ impl<DB> fmt::Debug for Semantics<'_, DB> { | |||
95 | 100 | ||
96 | impl<'db, DB: HirDatabase> Semantics<'db, DB> { | 101 | impl<'db, DB: HirDatabase> Semantics<'db, DB> { |
97 | pub fn new(db: &DB) -> Semantics<DB> { | 102 | pub fn new(db: &DB) -> Semantics<DB> { |
98 | Semantics { db, s2d_cache: Default::default(), cache: Default::default() } | 103 | let impl_ = SemanticsImpl::new(db); |
104 | Semantics { db, imp: impl_ } | ||
99 | } | 105 | } |
100 | 106 | ||
101 | pub fn parse(&self, file_id: FileId) -> ast::SourceFile { | 107 | pub fn parse(&self, file_id: FileId) -> ast::SourceFile { |
102 | let tree = self.db.parse(file_id).tree(); | 108 | self.imp.parse(file_id) |
103 | self.cache(tree.syntax().clone(), file_id.into()); | ||
104 | tree | ||
105 | } | 109 | } |
106 | 110 | ||
107 | pub fn ast<T: AstDiagnostic + Diagnostic>(&self, d: &T) -> <T as AstDiagnostic>::AST { | 111 | pub fn ast<T: AstDiagnostic + Diagnostic>(&self, d: &T) -> <T as AstDiagnostic>::AST { |
108 | let file_id = d.source().file_id; | 112 | let file_id = d.source().file_id; |
109 | let root = self.db.parse_or_expand(file_id).unwrap(); | 113 | let root = self.db.parse_or_expand(file_id).unwrap(); |
110 | self.cache(root, file_id); | 114 | self.imp.cache(root, file_id); |
111 | d.ast(self.db) | 115 | d.ast(self.db.upcast()) |
116 | } | ||
117 | |||
118 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | ||
119 | self.imp.expand(macro_call) | ||
120 | } | ||
121 | |||
122 | pub fn expand_hypothetical( | ||
123 | &self, | ||
124 | actual_macro_call: &ast::MacroCall, | ||
125 | hypothetical_args: &ast::TokenTree, | ||
126 | token_to_map: SyntaxToken, | ||
127 | ) -> Option<(SyntaxNode, SyntaxToken)> { | ||
128 | self.imp.expand_hypothetical(actual_macro_call, hypothetical_args, token_to_map) | ||
129 | } | ||
130 | |||
131 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | ||
132 | self.imp.descend_into_macros(token) | ||
133 | } | ||
134 | |||
135 | pub fn descend_node_at_offset<N: ast::AstNode>( | ||
136 | &self, | ||
137 | node: &SyntaxNode, | ||
138 | offset: TextSize, | ||
139 | ) -> Option<N> { | ||
140 | self.imp.descend_node_at_offset(node, offset).find_map(N::cast) | ||
141 | } | ||
142 | |||
143 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { | ||
144 | self.imp.original_range(node) | ||
145 | } | ||
146 | |||
147 | pub fn diagnostics_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { | ||
148 | self.imp.diagnostics_range(diagnostics) | ||
149 | } | ||
150 | |||
151 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
152 | self.imp.ancestors_with_macros(node) | ||
153 | } | ||
154 | |||
155 | pub fn ancestors_at_offset_with_macros( | ||
156 | &self, | ||
157 | node: &SyntaxNode, | ||
158 | offset: TextSize, | ||
159 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
160 | self.imp.ancestors_at_offset_with_macros(node, offset) | ||
161 | } | ||
162 | |||
163 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, | ||
164 | /// search up until it is of the target AstNode type | ||
165 | pub fn find_node_at_offset_with_macros<N: AstNode>( | ||
166 | &self, | ||
167 | node: &SyntaxNode, | ||
168 | offset: TextSize, | ||
169 | ) -> Option<N> { | ||
170 | self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) | ||
171 | } | ||
172 | |||
173 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, | ||
174 | /// descend it and find again | ||
175 | pub fn find_node_at_offset_with_descend<N: AstNode>( | ||
176 | &self, | ||
177 | node: &SyntaxNode, | ||
178 | offset: TextSize, | ||
179 | ) -> Option<N> { | ||
180 | if let Some(it) = find_node_at_offset(&node, offset) { | ||
181 | return Some(it); | ||
182 | } | ||
183 | |||
184 | self.imp.descend_node_at_offset(node, offset).find_map(N::cast) | ||
185 | } | ||
186 | |||
187 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | ||
188 | self.imp.type_of_expr(expr) | ||
189 | } | ||
190 | |||
191 | pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> { | ||
192 | self.imp.type_of_pat(pat) | ||
193 | } | ||
194 | |||
195 | pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { | ||
196 | self.imp.resolve_method_call(call) | ||
197 | } | ||
198 | |||
199 | pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> { | ||
200 | self.imp.resolve_field(field) | ||
201 | } | ||
202 | |||
203 | pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<(Field, Option<Local>)> { | ||
204 | self.imp.resolve_record_field(field) | ||
205 | } | ||
206 | |||
207 | pub fn resolve_record_field_pat(&self, field: &ast::RecordFieldPat) -> Option<Field> { | ||
208 | self.imp.resolve_record_field_pat(field) | ||
209 | } | ||
210 | |||
211 | pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { | ||
212 | self.imp.resolve_macro_call(macro_call) | ||
213 | } | ||
214 | |||
215 | pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> { | ||
216 | self.imp.resolve_path(path) | ||
217 | } | ||
218 | |||
219 | pub fn resolve_variant(&self, record_lit: ast::RecordLit) -> Option<VariantId> { | ||
220 | self.imp.resolve_variant(record_lit) | ||
221 | } | ||
222 | |||
223 | pub fn lower_path(&self, path: &ast::Path) -> Option<Path> { | ||
224 | self.imp.lower_path(path) | ||
225 | } | ||
226 | |||
227 | pub fn resolve_bind_pat_to_const(&self, pat: &ast::BindPat) -> Option<ModuleDef> { | ||
228 | self.imp.resolve_bind_pat_to_const(pat) | ||
229 | } | ||
230 | |||
231 | // FIXME: use this instead? | ||
232 | // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; | ||
233 | |||
234 | pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> { | ||
235 | self.imp.record_literal_missing_fields(literal) | ||
236 | } | ||
237 | |||
238 | pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { | ||
239 | self.imp.record_pattern_missing_fields(pattern) | ||
240 | } | ||
241 | |||
242 | pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { | ||
243 | let src = self.imp.find_file(src.syntax().clone()).with_value(src).cloned(); | ||
244 | T::to_def(&self.imp, src) | ||
245 | } | ||
246 | |||
247 | pub fn to_module_def(&self, file: FileId) -> Option<Module> { | ||
248 | self.imp.to_module_def(file) | ||
249 | } | ||
250 | |||
251 | pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { | ||
252 | self.imp.scope(node) | ||
253 | } | ||
254 | |||
255 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { | ||
256 | self.imp.scope_at_offset(node, offset) | ||
257 | } | ||
258 | |||
259 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { | ||
260 | self.imp.scope_for_def(def) | ||
261 | } | ||
262 | |||
263 | pub fn assert_contains_node(&self, node: &SyntaxNode) { | ||
264 | self.imp.assert_contains_node(node) | ||
265 | } | ||
266 | } | ||
267 | |||
268 | impl<'db> SemanticsImpl<'db> { | ||
269 | pub fn new(db: &'db dyn HirDatabase) -> Self { | ||
270 | Self { db, s2d_cache: Default::default(), cache: Default::default() } | ||
271 | } | ||
272 | |||
273 | pub fn parse(&self, file_id: FileId) -> ast::SourceFile { | ||
274 | let tree = self.db.parse(file_id).tree(); | ||
275 | self.cache(tree.syntax().clone(), file_id.into()); | ||
276 | tree | ||
112 | } | 277 | } |
113 | 278 | ||
114 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | 279 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { |
@@ -130,9 +295,15 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
130 | self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); | 295 | self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); |
131 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); | 296 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); |
132 | let krate = sa.resolver.krate()?; | 297 | let krate = sa.resolver.krate()?; |
133 | let macro_call_id = macro_call | 298 | let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { |
134 | .as_call_id(self.db, krate, |path| sa.resolver.resolve_path_as_macro(self.db, &path))?; | 299 | sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) |
135 | hir_expand::db::expand_hypothetical(self.db, macro_call_id, hypothetical_args, token_to_map) | 300 | })?; |
301 | hir_expand::db::expand_hypothetical( | ||
302 | self.db.upcast(), | ||
303 | macro_call_id, | ||
304 | hypothetical_args, | ||
305 | token_to_map, | ||
306 | ) | ||
136 | } | 307 | } |
137 | 308 | ||
138 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | 309 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { |
@@ -147,7 +318,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
147 | return None; | 318 | return None; |
148 | } | 319 | } |
149 | let file_id = sa.expand(self.db, token.with_value(¯o_call))?; | 320 | let file_id = sa.expand(self.db, token.with_value(¯o_call))?; |
150 | let token = file_id.expansion_info(self.db)?.map_token_down(token.as_ref())?; | 321 | let token = file_id.expansion_info(self.db.upcast())?.map_token_down(token.as_ref())?; |
151 | 322 | ||
152 | self.cache(find_root(&token.value.parent()), token.file_id); | 323 | self.cache(find_root(&token.value.parent()), token.file_id); |
153 | 324 | ||
@@ -159,15 +330,16 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
159 | token.value | 330 | token.value |
160 | } | 331 | } |
161 | 332 | ||
162 | pub fn descend_node_at_offset<N: ast::AstNode>( | 333 | pub fn descend_node_at_offset( |
163 | &self, | 334 | &self, |
164 | node: &SyntaxNode, | 335 | node: &SyntaxNode, |
165 | offset: TextSize, | 336 | offset: TextSize, |
166 | ) -> Option<N> { | 337 | ) -> impl Iterator<Item = SyntaxNode> + '_ { |
167 | // Handle macro token cases | 338 | // Handle macro token cases |
168 | node.token_at_offset(offset) | 339 | node.token_at_offset(offset) |
169 | .map(|token| self.descend_into_macros(token)) | 340 | .map(|token| self.descend_into_macros(token)) |
170 | .find_map(|it| self.ancestors_with_macros(it.parent()).find_map(N::cast)) | 341 | .map(|it| self.ancestors_with_macros(it.parent())) |
342 | .flatten() | ||
171 | } | 343 | } |
172 | 344 | ||
173 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { | 345 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { |
@@ -184,7 +356,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
184 | 356 | ||
185 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | 357 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { |
186 | let node = self.find_file(node); | 358 | let node = self.find_file(node); |
187 | node.ancestors_with_macros(self.db).map(|it| it.value) | 359 | node.ancestors_with_macros(self.db.upcast()).map(|it| it.value) |
188 | } | 360 | } |
189 | 361 | ||
190 | pub fn ancestors_at_offset_with_macros( | 362 | pub fn ancestors_at_offset_with_macros( |
@@ -197,29 +369,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
197 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) | 369 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) |
198 | } | 370 | } |
199 | 371 | ||
200 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, | ||
201 | /// search up until it is of the target AstNode type | ||
202 | pub fn find_node_at_offset_with_macros<N: AstNode>( | ||
203 | &self, | ||
204 | node: &SyntaxNode, | ||
205 | offset: TextSize, | ||
206 | ) -> Option<N> { | ||
207 | self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) | ||
208 | } | ||
209 | |||
210 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, | ||
211 | /// descend it and find again | ||
212 | pub fn find_node_at_offset_with_descend<N: AstNode>( | ||
213 | &self, | ||
214 | node: &SyntaxNode, | ||
215 | offset: TextSize, | ||
216 | ) -> Option<N> { | ||
217 | if let Some(it) = find_node_at_offset(&node, offset) { | ||
218 | return Some(it); | ||
219 | } | ||
220 | self.descend_node_at_offset(&node, offset) | ||
221 | } | ||
222 | |||
223 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | 372 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { |
224 | self.analyze(expr.syntax()).type_of(self.db, &expr) | 373 | self.analyze(expr.syntax()).type_of(self.db, &expr) |
225 | } | 374 | } |
@@ -267,9 +416,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
267 | self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) | 416 | self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) |
268 | } | 417 | } |
269 | 418 | ||
270 | // FIXME: use this instead? | ||
271 | // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; | ||
272 | |||
273 | pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> { | 419 | pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> { |
274 | self.analyze(literal.syntax()) | 420 | self.analyze(literal.syntax()) |
275 | .record_literal_missing_fields(self.db, literal) | 421 | .record_literal_missing_fields(self.db, literal) |
@@ -282,11 +428,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
282 | .unwrap_or_default() | 428 | .unwrap_or_default() |
283 | } | 429 | } |
284 | 430 | ||
285 | pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { | ||
286 | let src = self.find_file(src.syntax().clone()).with_value(src).cloned(); | ||
287 | T::to_def(self, src) | ||
288 | } | ||
289 | |||
290 | fn with_ctx<F: FnOnce(&mut SourceToDefCtx) -> T, T>(&self, f: F) -> T { | 431 | fn with_ctx<F: FnOnce(&mut SourceToDefCtx) -> T, T>(&self, f: F) -> T { |
291 | let mut cache = self.s2d_cache.borrow_mut(); | 432 | let mut cache = self.s2d_cache.borrow_mut(); |
292 | let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; | 433 | let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; |
@@ -310,7 +451,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
310 | } | 451 | } |
311 | 452 | ||
312 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { | 453 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { |
313 | let resolver = def.id.resolver(self.db); | 454 | let resolver = def.id.resolver(self.db.upcast()); |
314 | SemanticsScope { db: self.db, resolver } | 455 | SemanticsScope { db: self.db, resolver } |
315 | } | 456 | } |
316 | 457 | ||
@@ -331,17 +472,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
331 | ChildContainer::DefWithBodyId(def) => { | 472 | ChildContainer::DefWithBodyId(def) => { |
332 | return SourceAnalyzer::new_for_body(self.db, def, src, offset) | 473 | return SourceAnalyzer::new_for_body(self.db, def, src, offset) |
333 | } | 474 | } |
334 | ChildContainer::TraitId(it) => it.resolver(self.db), | 475 | ChildContainer::TraitId(it) => it.resolver(self.db.upcast()), |
335 | ChildContainer::ImplId(it) => it.resolver(self.db), | 476 | ChildContainer::ImplId(it) => it.resolver(self.db.upcast()), |
336 | ChildContainer::ModuleId(it) => it.resolver(self.db), | 477 | ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()), |
337 | ChildContainer::EnumId(it) => it.resolver(self.db), | 478 | ChildContainer::EnumId(it) => it.resolver(self.db.upcast()), |
338 | ChildContainer::VariantId(it) => it.resolver(self.db), | 479 | ChildContainer::VariantId(it) => it.resolver(self.db.upcast()), |
339 | ChildContainer::GenericDefId(it) => it.resolver(self.db), | 480 | ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()), |
340 | }; | 481 | }; |
341 | SourceAnalyzer::new_for_resolver(resolver, src) | 482 | SourceAnalyzer::new_for_resolver(resolver, src) |
342 | } | 483 | } |
343 | 484 | ||
344 | fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { | 485 | pub fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { |
345 | assert!(root_node.parent().is_none()); | 486 | assert!(root_node.parent().is_none()); |
346 | let mut cache = self.cache.borrow_mut(); | 487 | let mut cache = self.cache.borrow_mut(); |
347 | let prev = cache.insert(root_node, file_id); | 488 | let prev = cache.insert(root_node, file_id); |
@@ -357,7 +498,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
357 | cache.get(root_node).copied() | 498 | cache.get(root_node).copied() |
358 | } | 499 | } |
359 | 500 | ||
360 | fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> { | 501 | pub fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> { |
361 | let root_node = find_root(&node); | 502 | let root_node = find_root(&node); |
362 | let file_id = self.lookup(&root_node).unwrap_or_else(|| { | 503 | let file_id = self.lookup(&root_node).unwrap_or_else(|| { |
363 | panic!( | 504 | panic!( |
@@ -382,14 +523,14 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
382 | pub trait ToDef: AstNode + Clone { | 523 | pub trait ToDef: AstNode + Clone { |
383 | type Def; | 524 | type Def; |
384 | 525 | ||
385 | fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: InFile<Self>) -> Option<Self::Def>; | 526 | fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def>; |
386 | } | 527 | } |
387 | 528 | ||
388 | macro_rules! to_def_impls { | 529 | macro_rules! to_def_impls { |
389 | ($(($def:path, $ast:path, $meth:ident)),* ,) => {$( | 530 | ($(($def:path, $ast:path, $meth:ident)),* ,) => {$( |
390 | impl ToDef for $ast { | 531 | impl ToDef for $ast { |
391 | type Def = $def; | 532 | type Def = $def; |
392 | fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: InFile<Self>) -> Option<Self::Def> { | 533 | fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def> { |
393 | sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from) | 534 | sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from) |
394 | } | 535 | } |
395 | } | 536 | } |
diff --git a/crates/ra_ide_db/src/lib.rs b/crates/ra_ide_db/src/lib.rs index a808de4f1..c78071ad6 100644 --- a/crates/ra_ide_db/src/lib.rs +++ b/crates/ra_ide_db/src/lib.rs | |||
@@ -13,7 +13,7 @@ mod wasm_shims; | |||
13 | 13 | ||
14 | use std::sync::Arc; | 14 | use std::sync::Arc; |
15 | 15 | ||
16 | use hir::db::{AstDatabase, DefDatabase}; | 16 | use hir::db::{AstDatabase, DefDatabase, HirDatabase}; |
17 | use ra_db::{ | 17 | use ra_db::{ |
18 | salsa::{self, Database, Durability}, | 18 | salsa::{self, Database, Durability}, |
19 | Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, | 19 | Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, |
@@ -52,6 +52,12 @@ impl Upcast<dyn DefDatabase> for RootDatabase { | |||
52 | } | 52 | } |
53 | } | 53 | } |
54 | 54 | ||
55 | impl Upcast<dyn HirDatabase> for RootDatabase { | ||
56 | fn upcast(&self) -> &(dyn HirDatabase + 'static) { | ||
57 | &*self | ||
58 | } | ||
59 | } | ||
60 | |||
55 | impl FileLoader for RootDatabase { | 61 | impl FileLoader for RootDatabase { |
56 | fn file_text(&self, file_id: FileId) -> Arc<String> { | 62 | fn file_text(&self, file_id: FileId) -> Arc<String> { |
57 | FileLoaderDelegate(self).file_text(file_id) | 63 | FileLoaderDelegate(self).file_text(file_id) |