diff options
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_assists/src/fill_match_arms.rs | 4 | ||||
-rw-r--r-- | crates/ra_hir/src/code_model_api.rs | 16 | ||||
-rw-r--r-- | crates/ra_hir/src/code_model_impl/module.rs | 2 | ||||
-rw-r--r-- | crates/ra_hir/src/db.rs | 25 | ||||
-rw-r--r-- | crates/ra_hir/src/expr.rs | 93 | ||||
-rw-r--r-- | crates/ra_hir/src/expr/scope.rs | 36 | ||||
-rw-r--r-- | crates/ra_hir/src/lib.rs | 2 | ||||
-rw-r--r-- | crates/ra_hir/src/nameres.rs | 2 | ||||
-rw-r--r-- | crates/ra_hir/src/nameres/lower.rs | 13 | ||||
-rw-r--r-- | crates/ra_hir/src/source_binder.rs | 2 | ||||
-rw-r--r-- | crates/ra_hir/src/ty.rs | 29 | ||||
-rw-r--r-- | crates/ra_hir/src/ty/tests.rs | 10 | ||||
-rw-r--r-- | crates/ra_ide_api/src/change.rs | 5 | ||||
-rw-r--r-- | crates/ra_ide_api/src/completion/complete_dot.rs | 4 | ||||
-rw-r--r-- | crates/ra_ide_api/src/completion/complete_struct_literal.rs | 4 | ||||
-rw-r--r-- | crates/ra_ide_api/src/goto_definition.rs | 17 | ||||
-rw-r--r-- | crates/ra_ide_api/src/hover.rs | 8 |
17 files changed, 110 insertions, 162 deletions
diff --git a/crates/ra_assists/src/fill_match_arms.rs b/crates/ra_assists/src/fill_match_arms.rs index 30020b56e..6a22b0af5 100644 --- a/crates/ra_assists/src/fill_match_arms.rs +++ b/crates/ra_assists/src/fill_match_arms.rs | |||
@@ -23,8 +23,8 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As | |||
23 | let function = | 23 | let function = |
24 | source_binder::function_from_child_node(ctx.db, ctx.frange.file_id, expr.syntax())?; | 24 | source_binder::function_from_child_node(ctx.db, ctx.frange.file_id, expr.syntax())?; |
25 | let infer_result = function.infer(ctx.db); | 25 | let infer_result = function.infer(ctx.db); |
26 | let syntax_mapping = function.body_syntax_mapping(ctx.db); | 26 | let source_map = function.body_source_map(ctx.db); |
27 | let node_expr = syntax_mapping.node_expr(expr)?; | 27 | let node_expr = source_map.node_expr(expr)?; |
28 | let match_expr_ty = infer_result[node_expr].clone(); | 28 | let match_expr_ty = infer_result[node_expr].clone(); |
29 | let enum_def = match match_expr_ty { | 29 | let enum_def = match match_expr_ty { |
30 | Ty::Adt { def_id: AdtDef::Enum(e), .. } => e, | 30 | Ty::Adt { def_id: AdtDef::Enum(e), .. } => e, |
diff --git a/crates/ra_hir/src/code_model_api.rs b/crates/ra_hir/src/code_model_api.rs index da0f1ec94..3ac146950 100644 --- a/crates/ra_hir/src/code_model_api.rs +++ b/crates/ra_hir/src/code_model_api.rs | |||
@@ -5,11 +5,11 @@ use ra_db::{CrateId, SourceRootId, Edition}; | |||
5 | use ra_syntax::{ast::self, TreeArc, SyntaxNode}; | 5 | use ra_syntax::{ast::self, TreeArc, SyntaxNode}; |
6 | 6 | ||
7 | use crate::{ | 7 | use crate::{ |
8 | Name, ScopesWithSyntaxMapping, Ty, HirFileId, | 8 | Name, ScopesWithSourceMap, Ty, HirFileId, |
9 | HirDatabase, PersistentHirDatabase, | 9 | HirDatabase, PersistentHirDatabase, |
10 | type_ref::TypeRef, | 10 | type_ref::TypeRef, |
11 | nameres::{ModuleScope, Namespace, lower::ImportId}, | 11 | nameres::{ModuleScope, Namespace, lower::ImportId}, |
12 | expr::{Body, BodySyntaxMapping}, | 12 | expr::{Body, BodySourceMap}, |
13 | ty::InferenceResult, | 13 | ty::InferenceResult, |
14 | adt::{EnumVariantId, StructFieldId, VariantDef}, | 14 | adt::{EnumVariantId, StructFieldId, VariantDef}, |
15 | generics::GenericParams, | 15 | generics::GenericParams, |
@@ -191,7 +191,7 @@ impl Module { | |||
191 | } | 191 | } |
192 | 192 | ||
193 | pub fn declarations(self, db: &impl HirDatabase) -> Vec<ModuleDef> { | 193 | pub fn declarations(self, db: &impl HirDatabase) -> Vec<ModuleDef> { |
194 | let (lowered_module, _) = db.lower_module(self); | 194 | let lowered_module = db.lower_module(self); |
195 | lowered_module | 195 | lowered_module |
196 | .declarations | 196 | .declarations |
197 | .values() | 197 | .values() |
@@ -483,8 +483,8 @@ impl Function { | |||
483 | self.signature(db).name.clone() | 483 | self.signature(db).name.clone() |
484 | } | 484 | } |
485 | 485 | ||
486 | pub fn body_syntax_mapping(&self, db: &impl HirDatabase) -> Arc<BodySyntaxMapping> { | 486 | pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> { |
487 | db.body_syntax_mapping(*self) | 487 | db.body_with_source_map(*self).1 |
488 | } | 488 | } |
489 | 489 | ||
490 | pub fn body(&self, db: &impl HirDatabase) -> Arc<Body> { | 490 | pub fn body(&self, db: &impl HirDatabase) -> Arc<Body> { |
@@ -495,10 +495,10 @@ impl Function { | |||
495 | db.type_for_def((*self).into(), Namespace::Values) | 495 | db.type_for_def((*self).into(), Namespace::Values) |
496 | } | 496 | } |
497 | 497 | ||
498 | pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSyntaxMapping { | 498 | pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSourceMap { |
499 | let scopes = db.expr_scopes(*self); | 499 | let scopes = db.expr_scopes(*self); |
500 | let syntax_mapping = db.body_syntax_mapping(*self); | 500 | let source_map = db.body_with_source_map(*self).1; |
501 | ScopesWithSyntaxMapping { scopes, syntax_mapping } | 501 | ScopesWithSourceMap { scopes, source_map } |
502 | } | 502 | } |
503 | 503 | ||
504 | pub fn signature(&self, db: &impl HirDatabase) -> Arc<FnSignature> { | 504 | pub fn signature(&self, db: &impl HirDatabase) -> Arc<FnSignature> { |
diff --git a/crates/ra_hir/src/code_model_impl/module.rs b/crates/ra_hir/src/code_model_impl/module.rs index 2d3058afd..437f96942 100644 --- a/crates/ra_hir/src/code_model_impl/module.rs +++ b/crates/ra_hir/src/code_model_impl/module.rs | |||
@@ -47,7 +47,7 @@ impl Module { | |||
47 | db: &impl HirDatabase, | 47 | db: &impl HirDatabase, |
48 | import: ImportId, | 48 | import: ImportId, |
49 | ) -> TreeArc<ast::PathSegment> { | 49 | ) -> TreeArc<ast::PathSegment> { |
50 | let source_map = db.lower_module_source_map(*self); | 50 | let (_, source_map) = db.lower_module_with_source_map(*self); |
51 | let (_, source) = self.definition_source(db); | 51 | let (_, source) = self.definition_source(db); |
52 | source_map.get(&source, import) | 52 | source_map.get(&source, import) |
53 | } | 53 | } |
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index ec848f1b2..21d22aa7f 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -48,14 +48,14 @@ pub trait PersistentHirDatabase: SourceDatabase + AsRef<HirInterner> { | |||
48 | delc_id: Option<SourceFileItemId>, | 48 | delc_id: Option<SourceFileItemId>, |
49 | ) -> Arc<Vec<crate::module_tree::Submodule>>; | 49 | ) -> Arc<Vec<crate::module_tree::Submodule>>; |
50 | 50 | ||
51 | #[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_query)] | 51 | #[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_with_source_map_query)] |
52 | fn lower_module(&self, module: Module) -> (Arc<LoweredModule>, Arc<ImportSourceMap>); | 52 | fn lower_module_with_source_map( |
53 | 53 | &self, | |
54 | #[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_module_query)] | 54 | module: Module, |
55 | fn lower_module_module(&self, module: Module) -> Arc<LoweredModule>; | 55 | ) -> (Arc<LoweredModule>, Arc<ImportSourceMap>); |
56 | 56 | ||
57 | #[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_source_map_query)] | 57 | #[salsa::invoke(crate::nameres::lower::LoweredModule::lower_module_query)] |
58 | fn lower_module_source_map(&self, module: Module) -> Arc<ImportSourceMap>; | 58 | fn lower_module(&self, module: Module) -> Arc<LoweredModule>; |
59 | 59 | ||
60 | #[salsa::invoke(crate::nameres::ItemMap::item_map_query)] | 60 | #[salsa::invoke(crate::nameres::ItemMap::item_map_query)] |
61 | fn item_map(&self, krate: Crate) -> Arc<ItemMap>; | 61 | fn item_map(&self, krate: Crate) -> Arc<ItemMap>; |
@@ -105,11 +105,14 @@ pub trait HirDatabase: PersistentHirDatabase { | |||
105 | #[salsa::invoke(crate::ty::type_for_field)] | 105 | #[salsa::invoke(crate::ty::type_for_field)] |
106 | fn type_for_field(&self, field: StructField) -> Ty; | 106 | fn type_for_field(&self, field: StructField) -> Ty; |
107 | 107 | ||
108 | #[salsa::invoke(crate::expr::body_hir)] | 108 | #[salsa::invoke(crate::expr::body_with_source_map_query)] |
109 | fn body_hir(&self, func: Function) -> Arc<crate::expr::Body>; | 109 | fn body_with_source_map( |
110 | &self, | ||
111 | func: Function, | ||
112 | ) -> (Arc<crate::expr::Body>, Arc<crate::expr::BodySourceMap>); | ||
110 | 113 | ||
111 | #[salsa::invoke(crate::expr::body_syntax_mapping)] | 114 | #[salsa::invoke(crate::expr::body_hir_query)] |
112 | fn body_syntax_mapping(&self, func: Function) -> Arc<crate::expr::BodySyntaxMapping>; | 115 | fn body_hir(&self, func: Function) -> Arc<crate::expr::Body>; |
113 | 116 | ||
114 | #[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)] | 117 | #[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)] |
115 | fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>; | 118 | fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>; |
diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs index aa39d28ed..6c7489e63 100644 --- a/crates/ra_hir/src/expr.rs +++ b/crates/ra_hir/src/expr.rs | |||
@@ -16,7 +16,7 @@ use crate::{ | |||
16 | }; | 16 | }; |
17 | use crate::{ path::GenericArgs, ty::primitive::{UintTy, UncertainIntTy, UncertainFloatTy}}; | 17 | use crate::{ path::GenericArgs, ty::primitive::{UintTy, UncertainIntTy, UncertainFloatTy}}; |
18 | 18 | ||
19 | pub use self::scope::{ExprScopes, ScopesWithSyntaxMapping, ScopeEntryWithSyntax}; | 19 | pub use self::scope::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax}; |
20 | 20 | ||
21 | pub(crate) mod scope; | 21 | pub(crate) mod scope; |
22 | 22 | ||
@@ -48,13 +48,12 @@ pub struct Body { | |||
48 | /// expression containing it; but for type inference etc., we want to operate on | 48 | /// expression containing it; but for type inference etc., we want to operate on |
49 | /// a structure that is agnostic to the actual positions of expressions in the | 49 | /// a structure that is agnostic to the actual positions of expressions in the |
50 | /// file, so that we don't recompute types whenever some whitespace is typed. | 50 | /// file, so that we don't recompute types whenever some whitespace is typed. |
51 | #[derive(Debug, Eq, PartialEq)] | 51 | #[derive(Default, Debug, Eq, PartialEq)] |
52 | pub struct BodySyntaxMapping { | 52 | pub struct BodySourceMap { |
53 | body: Arc<Body>, | 53 | expr_map: FxHashMap<SyntaxNodePtr, ExprId>, |
54 | expr_syntax_mapping: FxHashMap<SyntaxNodePtr, ExprId>, | 54 | expr_map_back: ArenaMap<ExprId, SyntaxNodePtr>, |
55 | expr_syntax_mapping_back: ArenaMap<ExprId, SyntaxNodePtr>, | 55 | pat_map: FxHashMap<SyntaxNodePtr, PatId>, |
56 | pat_syntax_mapping: FxHashMap<SyntaxNodePtr, PatId>, | 56 | pat_map_back: ArenaMap<PatId, SyntaxNodePtr>, |
57 | pat_syntax_mapping_back: ArenaMap<PatId, SyntaxNodePtr>, | ||
58 | } | 57 | } |
59 | 58 | ||
60 | impl Body { | 59 | impl Body { |
@@ -77,10 +76,6 @@ impl Body { | |||
77 | pub fn pats(&self) -> impl Iterator<Item = (PatId, &Pat)> { | 76 | pub fn pats(&self) -> impl Iterator<Item = (PatId, &Pat)> { |
78 | self.pats.iter() | 77 | self.pats.iter() |
79 | } | 78 | } |
80 | |||
81 | pub fn syntax_mapping(&self, db: &impl HirDatabase) -> Arc<BodySyntaxMapping> { | ||
82 | db.body_syntax_mapping(self.owner) | ||
83 | } | ||
84 | } | 79 | } |
85 | 80 | ||
86 | // needs arbitrary_self_types to be a method... or maybe move to the def? | 81 | // needs arbitrary_self_types to be a method... or maybe move to the def? |
@@ -119,33 +114,29 @@ impl Index<PatId> for Body { | |||
119 | } | 114 | } |
120 | } | 115 | } |
121 | 116 | ||
122 | impl BodySyntaxMapping { | 117 | impl BodySourceMap { |
123 | pub fn expr_syntax(&self, expr: ExprId) -> Option<SyntaxNodePtr> { | 118 | pub fn expr_syntax(&self, expr: ExprId) -> Option<SyntaxNodePtr> { |
124 | self.expr_syntax_mapping_back.get(expr).cloned() | 119 | self.expr_map_back.get(expr).cloned() |
125 | } | 120 | } |
126 | 121 | ||
127 | pub fn syntax_expr(&self, ptr: SyntaxNodePtr) -> Option<ExprId> { | 122 | pub fn syntax_expr(&self, ptr: SyntaxNodePtr) -> Option<ExprId> { |
128 | self.expr_syntax_mapping.get(&ptr).cloned() | 123 | self.expr_map.get(&ptr).cloned() |
129 | } | 124 | } |
130 | 125 | ||
131 | pub fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> { | 126 | pub fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> { |
132 | self.expr_syntax_mapping.get(&SyntaxNodePtr::new(node.syntax())).cloned() | 127 | self.expr_map.get(&SyntaxNodePtr::new(node.syntax())).cloned() |
133 | } | 128 | } |
134 | 129 | ||
135 | pub fn pat_syntax(&self, pat: PatId) -> Option<SyntaxNodePtr> { | 130 | pub fn pat_syntax(&self, pat: PatId) -> Option<SyntaxNodePtr> { |
136 | self.pat_syntax_mapping_back.get(pat).cloned() | 131 | self.pat_map_back.get(pat).cloned() |
137 | } | 132 | } |
138 | 133 | ||
139 | pub fn syntax_pat(&self, ptr: SyntaxNodePtr) -> Option<PatId> { | 134 | pub fn syntax_pat(&self, ptr: SyntaxNodePtr) -> Option<PatId> { |
140 | self.pat_syntax_mapping.get(&ptr).cloned() | 135 | self.pat_map.get(&ptr).cloned() |
141 | } | 136 | } |
142 | 137 | ||
143 | pub fn node_pat(&self, node: &ast::Pat) -> Option<PatId> { | 138 | pub fn node_pat(&self, node: &ast::Pat) -> Option<PatId> { |
144 | self.pat_syntax_mapping.get(&SyntaxNodePtr::new(node.syntax())).cloned() | 139 | self.pat_map.get(&SyntaxNodePtr::new(node.syntax())).cloned() |
145 | } | ||
146 | |||
147 | pub fn body(&self) -> &Arc<Body> { | ||
148 | &self.body | ||
149 | } | 140 | } |
150 | } | 141 | } |
151 | 142 | ||
@@ -467,18 +458,11 @@ impl Pat { | |||
467 | 458 | ||
468 | // Queries | 459 | // Queries |
469 | 460 | ||
470 | pub(crate) fn body_hir(db: &impl HirDatabase, func: Function) -> Arc<Body> { | ||
471 | Arc::clone(&body_syntax_mapping(db, func).body) | ||
472 | } | ||
473 | |||
474 | struct ExprCollector { | 461 | struct ExprCollector { |
475 | owner: Function, | 462 | owner: Function, |
476 | exprs: Arena<ExprId, Expr>, | 463 | exprs: Arena<ExprId, Expr>, |
477 | pats: Arena<PatId, Pat>, | 464 | pats: Arena<PatId, Pat>, |
478 | expr_syntax_mapping: FxHashMap<SyntaxNodePtr, ExprId>, | 465 | source_map: BodySourceMap, |
479 | expr_syntax_mapping_back: ArenaMap<ExprId, SyntaxNodePtr>, | ||
480 | pat_syntax_mapping: FxHashMap<SyntaxNodePtr, PatId>, | ||
481 | pat_syntax_mapping_back: ArenaMap<PatId, SyntaxNodePtr>, | ||
482 | params: Vec<PatId>, | 466 | params: Vec<PatId>, |
483 | body_expr: Option<ExprId>, | 467 | body_expr: Option<ExprId>, |
484 | } | 468 | } |
@@ -489,10 +473,7 @@ impl ExprCollector { | |||
489 | owner, | 473 | owner, |
490 | exprs: Arena::default(), | 474 | exprs: Arena::default(), |
491 | pats: Arena::default(), | 475 | pats: Arena::default(), |
492 | expr_syntax_mapping: FxHashMap::default(), | 476 | source_map: BodySourceMap::default(), |
493 | expr_syntax_mapping_back: ArenaMap::default(), | ||
494 | pat_syntax_mapping: FxHashMap::default(), | ||
495 | pat_syntax_mapping_back: ArenaMap::default(), | ||
496 | params: Vec::new(), | 477 | params: Vec::new(), |
497 | body_expr: None, | 478 | body_expr: None, |
498 | } | 479 | } |
@@ -500,15 +481,15 @@ impl ExprCollector { | |||
500 | 481 | ||
501 | fn alloc_expr(&mut self, expr: Expr, syntax_ptr: SyntaxNodePtr) -> ExprId { | 482 | fn alloc_expr(&mut self, expr: Expr, syntax_ptr: SyntaxNodePtr) -> ExprId { |
502 | let id = self.exprs.alloc(expr); | 483 | let id = self.exprs.alloc(expr); |
503 | self.expr_syntax_mapping.insert(syntax_ptr, id); | 484 | self.source_map.expr_map.insert(syntax_ptr, id); |
504 | self.expr_syntax_mapping_back.insert(id, syntax_ptr); | 485 | self.source_map.expr_map_back.insert(id, syntax_ptr); |
505 | id | 486 | id |
506 | } | 487 | } |
507 | 488 | ||
508 | fn alloc_pat(&mut self, pat: Pat, syntax_ptr: SyntaxNodePtr) -> PatId { | 489 | fn alloc_pat(&mut self, pat: Pat, syntax_ptr: SyntaxNodePtr) -> PatId { |
509 | let id = self.pats.alloc(pat); | 490 | let id = self.pats.alloc(pat); |
510 | self.pat_syntax_mapping.insert(syntax_ptr, id); | 491 | self.source_map.pat_map.insert(syntax_ptr, id); |
511 | self.pat_syntax_mapping_back.insert(id, syntax_ptr); | 492 | self.source_map.pat_map_back.insert(id, syntax_ptr); |
512 | id | 493 | id |
513 | } | 494 | } |
514 | 495 | ||
@@ -639,7 +620,7 @@ impl ExprCollector { | |||
639 | ast::ExprKind::ParenExpr(e) => { | 620 | ast::ExprKind::ParenExpr(e) => { |
640 | let inner = self.collect_expr_opt(e.expr()); | 621 | let inner = self.collect_expr_opt(e.expr()); |
641 | // make the paren expr point to the inner expression as well | 622 | // make the paren expr point to the inner expression as well |
642 | self.expr_syntax_mapping.insert(syntax_ptr, inner); | 623 | self.source_map.expr_map.insert(syntax_ptr, inner); |
643 | inner | 624 | inner |
644 | } | 625 | } |
645 | ast::ExprKind::ReturnExpr(e) => { | 626 | ast::ExprKind::ReturnExpr(e) => { |
@@ -660,9 +641,11 @@ impl ExprCollector { | |||
660 | } else if let Some(nr) = field.name_ref() { | 641 | } else if let Some(nr) = field.name_ref() { |
661 | // field shorthand | 642 | // field shorthand |
662 | let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(nr))); | 643 | let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(nr))); |
663 | self.expr_syntax_mapping | 644 | self.source_map |
645 | .expr_map | ||
664 | .insert(SyntaxNodePtr::new(nr.syntax()), id); | 646 | .insert(SyntaxNodePtr::new(nr.syntax()), id); |
665 | self.expr_syntax_mapping_back | 647 | self.source_map |
648 | .expr_map_back | ||
666 | .insert(id, SyntaxNodePtr::new(nr.syntax())); | 649 | .insert(id, SyntaxNodePtr::new(nr.syntax())); |
667 | id | 650 | id |
668 | } else { | 651 | } else { |
@@ -910,7 +893,7 @@ impl ExprCollector { | |||
910 | self.body_expr = Some(body); | 893 | self.body_expr = Some(body); |
911 | } | 894 | } |
912 | 895 | ||
913 | fn into_body_syntax_mapping(self) -> BodySyntaxMapping { | 896 | fn finish(self) -> (Body, BodySourceMap) { |
914 | let body = Body { | 897 | let body = Body { |
915 | owner: self.owner, | 898 | owner: self.owner, |
916 | exprs: self.exprs, | 899 | exprs: self.exprs, |
@@ -918,28 +901,30 @@ impl ExprCollector { | |||
918 | params: self.params, | 901 | params: self.params, |
919 | body_expr: self.body_expr.expect("A body should have been collected"), | 902 | body_expr: self.body_expr.expect("A body should have been collected"), |
920 | }; | 903 | }; |
921 | BodySyntaxMapping { | 904 | (body, self.source_map) |
922 | body: Arc::new(body), | ||
923 | expr_syntax_mapping: self.expr_syntax_mapping, | ||
924 | expr_syntax_mapping_back: self.expr_syntax_mapping_back, | ||
925 | pat_syntax_mapping: self.pat_syntax_mapping, | ||
926 | pat_syntax_mapping_back: self.pat_syntax_mapping_back, | ||
927 | } | ||
928 | } | 905 | } |
929 | } | 906 | } |
930 | 907 | ||
931 | pub(crate) fn body_syntax_mapping(db: &impl HirDatabase, func: Function) -> Arc<BodySyntaxMapping> { | 908 | pub(crate) fn body_with_source_map_query( |
909 | db: &impl HirDatabase, | ||
910 | func: Function, | ||
911 | ) -> (Arc<Body>, Arc<BodySourceMap>) { | ||
932 | let mut collector = ExprCollector::new(func); | 912 | let mut collector = ExprCollector::new(func); |
933 | 913 | ||
934 | // TODO: consts, etc. | 914 | // TODO: consts, etc. |
935 | collector.collect_fn_body(&func.source(db).1); | 915 | collector.collect_fn_body(&func.source(db).1); |
936 | 916 | ||
937 | Arc::new(collector.into_body_syntax_mapping()) | 917 | let (body, source_map) = collector.finish(); |
918 | (Arc::new(body), Arc::new(source_map)) | ||
919 | } | ||
920 | |||
921 | pub(crate) fn body_hir_query(db: &impl HirDatabase, func: Function) -> Arc<Body> { | ||
922 | db.body_with_source_map(func).0 | ||
938 | } | 923 | } |
939 | 924 | ||
940 | #[cfg(test)] | 925 | #[cfg(test)] |
941 | pub(crate) fn collect_fn_body_syntax(function: Function, node: &ast::FnDef) -> BodySyntaxMapping { | 926 | fn collect_fn_body_syntax(function: Function, node: &ast::FnDef) -> (Body, BodySourceMap) { |
942 | let mut collector = ExprCollector::new(function); | 927 | let mut collector = ExprCollector::new(function); |
943 | collector.collect_fn_body(node); | 928 | collector.collect_fn_body(node); |
944 | collector.into_body_syntax_mapping() | 929 | collector.finish() |
945 | } | 930 | } |
diff --git a/crates/ra_hir/src/expr/scope.rs b/crates/ra_hir/src/expr/scope.rs index bb8d50db8..81fbc509e 100644 --- a/crates/ra_hir/src/expr/scope.rs +++ b/crates/ra_hir/src/expr/scope.rs | |||
@@ -11,7 +11,7 @@ use ra_arena::{Arena, RawId, impl_arena_id}; | |||
11 | 11 | ||
12 | use crate::{ | 12 | use crate::{ |
13 | Name, AsName, Function, | 13 | Name, AsName, Function, |
14 | expr::{PatId, ExprId, Pat, Expr, Body, Statement, BodySyntaxMapping}, | 14 | expr::{PatId, ExprId, Pat, Expr, Body, Statement, BodySourceMap}, |
15 | HirDatabase, | 15 | HirDatabase, |
16 | }; | 16 | }; |
17 | 17 | ||
@@ -108,8 +108,8 @@ impl ExprScopes { | |||
108 | } | 108 | } |
109 | 109 | ||
110 | #[derive(Debug, Clone, PartialEq, Eq)] | 110 | #[derive(Debug, Clone, PartialEq, Eq)] |
111 | pub struct ScopesWithSyntaxMapping { | 111 | pub struct ScopesWithSourceMap { |
112 | pub syntax_mapping: Arc<BodySyntaxMapping>, | 112 | pub source_map: Arc<BodySourceMap>, |
113 | pub scopes: Arc<ExprScopes>, | 113 | pub scopes: Arc<ExprScopes>, |
114 | } | 114 | } |
115 | 115 | ||
@@ -129,7 +129,7 @@ impl ScopeEntryWithSyntax { | |||
129 | } | 129 | } |
130 | } | 130 | } |
131 | 131 | ||
132 | impl ScopesWithSyntaxMapping { | 132 | impl ScopesWithSourceMap { |
133 | fn scope_chain<'a>(&'a self, node: &SyntaxNode) -> impl Iterator<Item = ScopeId> + 'a { | 133 | fn scope_chain<'a>(&'a self, node: &SyntaxNode) -> impl Iterator<Item = ScopeId> + 'a { |
134 | generate(self.scope_for(node), move |&scope| self.scopes.scopes[scope].parent) | 134 | generate(self.scope_for(node), move |&scope| self.scopes.scopes[scope].parent) |
135 | } | 135 | } |
@@ -138,7 +138,7 @@ impl ScopesWithSyntaxMapping { | |||
138 | self.scopes | 138 | self.scopes |
139 | .scope_for | 139 | .scope_for |
140 | .iter() | 140 | .iter() |
141 | .filter_map(|(id, scope)| Some((self.syntax_mapping.expr_syntax(*id)?, scope))) | 141 | .filter_map(|(id, scope)| Some((self.source_map.expr_syntax(*id)?, scope))) |
142 | // find containing scope | 142 | // find containing scope |
143 | .min_by_key(|(ptr, _scope)| { | 143 | .min_by_key(|(ptr, _scope)| { |
144 | (!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len()) | 144 | (!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len()) |
@@ -155,7 +155,7 @@ impl ScopesWithSyntaxMapping { | |||
155 | .scopes | 155 | .scopes |
156 | .scope_for | 156 | .scope_for |
157 | .iter() | 157 | .iter() |
158 | .filter_map(|(id, scope)| Some((self.syntax_mapping.expr_syntax(*id)?, scope))) | 158 | .filter_map(|(id, scope)| Some((self.source_map.expr_syntax(*id)?, scope))) |
159 | .map(|(ptr, scope)| (ptr.range(), scope)) | 159 | .map(|(ptr, scope)| (ptr.range(), scope)) |
160 | .filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r); | 160 | .filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r); |
161 | 161 | ||
@@ -185,7 +185,7 @@ impl ScopesWithSyntaxMapping { | |||
185 | ret.and_then(|entry| { | 185 | ret.and_then(|entry| { |
186 | Some(ScopeEntryWithSyntax { | 186 | Some(ScopeEntryWithSyntax { |
187 | name: entry.name().clone(), | 187 | name: entry.name().clone(), |
188 | ptr: self.syntax_mapping.pat_syntax(entry.pat())?, | 188 | ptr: self.source_map.pat_syntax(entry.pat())?, |
189 | }) | 189 | }) |
190 | }) | 190 | }) |
191 | } | 191 | } |
@@ -211,7 +211,7 @@ impl ScopesWithSyntaxMapping { | |||
211 | pub fn scope_for(&self, node: &SyntaxNode) -> Option<ScopeId> { | 211 | pub fn scope_for(&self, node: &SyntaxNode) -> Option<ScopeId> { |
212 | node.ancestors() | 212 | node.ancestors() |
213 | .map(SyntaxNodePtr::new) | 213 | .map(SyntaxNodePtr::new) |
214 | .filter_map(|ptr| self.syntax_mapping.syntax_expr(ptr)) | 214 | .filter_map(|ptr| self.source_map.syntax_expr(ptr)) |
215 | .find_map(|it| self.scopes.scope_for(it)) | 215 | .find_map(|it| self.scopes.scope_for(it)) |
216 | } | 216 | } |
217 | } | 217 | } |
@@ -316,12 +316,10 @@ mod tests { | |||
316 | let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); | 316 | let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); |
317 | let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); | 317 | let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); |
318 | let irrelevant_function = Function { id: crate::ids::FunctionId::from_raw(0.into()) }; | 318 | let irrelevant_function = Function { id: crate::ids::FunctionId::from_raw(0.into()) }; |
319 | let body_hir = expr::collect_fn_body_syntax(irrelevant_function, fn_def); | 319 | let (body, source_map) = expr::collect_fn_body_syntax(irrelevant_function, fn_def); |
320 | let scopes = ExprScopes::new(Arc::clone(body_hir.body())); | 320 | let scopes = ExprScopes::new(Arc::new(body)); |
321 | let scopes = ScopesWithSyntaxMapping { | 321 | let scopes = |
322 | scopes: Arc::new(scopes), | 322 | ScopesWithSourceMap { scopes: Arc::new(scopes), source_map: Arc::new(source_map) }; |
323 | syntax_mapping: Arc::new(body_hir), | ||
324 | }; | ||
325 | let actual = scopes | 323 | let actual = scopes |
326 | .scope_chain(marker.syntax()) | 324 | .scope_chain(marker.syntax()) |
327 | .flat_map(|scope| scopes.scopes.entries(scope)) | 325 | .flat_map(|scope| scopes.scopes.entries(scope)) |
@@ -417,12 +415,10 @@ mod tests { | |||
417 | let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); | 415 | let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); |
418 | 416 | ||
419 | let irrelevant_function = Function { id: crate::ids::FunctionId::from_raw(0.into()) }; | 417 | let irrelevant_function = Function { id: crate::ids::FunctionId::from_raw(0.into()) }; |
420 | let body_hir = expr::collect_fn_body_syntax(irrelevant_function, fn_def); | 418 | let (body, source_map) = expr::collect_fn_body_syntax(irrelevant_function, fn_def); |
421 | let scopes = ExprScopes::new(Arc::clone(body_hir.body())); | 419 | let scopes = ExprScopes::new(Arc::new(body)); |
422 | let scopes = ScopesWithSyntaxMapping { | 420 | let scopes = |
423 | scopes: Arc::new(scopes), | 421 | ScopesWithSourceMap { scopes: Arc::new(scopes), source_map: Arc::new(source_map) }; |
424 | syntax_mapping: Arc::new(body_hir), | ||
425 | }; | ||
426 | let local_name_entry = scopes.resolve_local_name(name_ref).unwrap(); | 422 | let local_name_entry = scopes.resolve_local_name(name_ref).unwrap(); |
427 | let local_name = local_name_entry.ptr(); | 423 | let local_name = local_name_entry.ptr(); |
428 | assert_eq!(local_name.range(), expected_name.syntax().range()); | 424 | assert_eq!(local_name.range(), expected_name.syntax().range()); |
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index edc1b4f57..a6e744ea7 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs | |||
@@ -60,7 +60,7 @@ pub use self::{ | |||
60 | impl_block::{ImplBlock, ImplItem}, | 60 | impl_block::{ImplBlock, ImplItem}, |
61 | docs::{Docs, Documentation}, | 61 | docs::{Docs, Documentation}, |
62 | adt::AdtDef, | 62 | adt::AdtDef, |
63 | expr::{ExprScopes, ScopesWithSyntaxMapping, ScopeEntryWithSyntax}, | 63 | expr::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax}, |
64 | resolve::{Resolver, Resolution}, | 64 | resolve::{Resolver, Resolution}, |
65 | }; | 65 | }; |
66 | 66 | ||
diff --git a/crates/ra_hir/src/nameres.rs b/crates/ra_hir/src/nameres.rs index ac390c5db..73919ee37 100644 --- a/crates/ra_hir/src/nameres.rs +++ b/crates/ra_hir/src/nameres.rs | |||
@@ -481,7 +481,7 @@ impl ItemMap { | |||
481 | let module_tree = db.module_tree(krate); | 481 | let module_tree = db.module_tree(krate); |
482 | let input = module_tree | 482 | let input = module_tree |
483 | .modules() | 483 | .modules() |
484 | .map(|module_id| (module_id, db.lower_module_module(Module { krate, module_id }))) | 484 | .map(|module_id| (module_id, db.lower_module(Module { krate, module_id }))) |
485 | .collect::<FxHashMap<_, _>>(); | 485 | .collect::<FxHashMap<_, _>>(); |
486 | 486 | ||
487 | let resolver = Resolver::new(db, &input, krate); | 487 | let resolver = Resolver::new(db, &input, krate); |
diff --git a/crates/ra_hir/src/nameres/lower.rs b/crates/ra_hir/src/nameres/lower.rs index 2bc3eb60c..56262ad6d 100644 --- a/crates/ra_hir/src/nameres/lower.rs +++ b/crates/ra_hir/src/nameres/lower.rs | |||
@@ -60,21 +60,14 @@ impl ImportSourceMap { | |||
60 | } | 60 | } |
61 | 61 | ||
62 | impl LoweredModule { | 62 | impl LoweredModule { |
63 | pub(crate) fn lower_module_module_query( | 63 | pub(crate) fn lower_module_query( |
64 | db: &impl PersistentHirDatabase, | 64 | db: &impl PersistentHirDatabase, |
65 | module: Module, | 65 | module: Module, |
66 | ) -> Arc<LoweredModule> { | 66 | ) -> Arc<LoweredModule> { |
67 | db.lower_module(module).0 | 67 | db.lower_module_with_source_map(module).0 |
68 | } | ||
69 | |||
70 | pub(crate) fn lower_module_source_map_query( | ||
71 | db: &impl PersistentHirDatabase, | ||
72 | module: Module, | ||
73 | ) -> Arc<ImportSourceMap> { | ||
74 | db.lower_module(module).1 | ||
75 | } | 68 | } |
76 | 69 | ||
77 | pub(crate) fn lower_module_query( | 70 | pub(crate) fn lower_module_with_source_map_query( |
78 | db: &impl PersistentHirDatabase, | 71 | db: &impl PersistentHirDatabase, |
79 | module: Module, | 72 | module: Module, |
80 | ) -> (Arc<LoweredModule>, Arc<ImportSourceMap>) { | 73 | ) -> (Arc<LoweredModule>, Arc<ImportSourceMap>) { |
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 625a2ce45..ea20cd15a 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs | |||
@@ -157,7 +157,7 @@ pub fn macro_symbols(db: &impl HirDatabase, file_id: FileId) -> Vec<(SmolStr, Te | |||
157 | Some(it) => it, | 157 | Some(it) => it, |
158 | None => return Vec::new(), | 158 | None => return Vec::new(), |
159 | }; | 159 | }; |
160 | let items = db.lower_module_module(module); | 160 | let items = db.lower_module(module); |
161 | let mut res = Vec::new(); | 161 | let mut res = Vec::new(); |
162 | 162 | ||
163 | for macro_call_id in items | 163 | for macro_call_id in items |
diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index e505c86e3..78270a9b7 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs | |||
@@ -54,7 +54,7 @@ pub enum Ty { | |||
54 | /// The pointee of an array slice. Written as `[T]`. | 54 | /// The pointee of an array slice. Written as `[T]`. |
55 | Slice(Arc<Ty>), | 55 | Slice(Arc<Ty>), |
56 | 56 | ||
57 | // An array with the given length. Written as `[T; n]`. | 57 | /// An array with the given length. Written as `[T; n]`. |
58 | Array(Arc<Ty>), | 58 | Array(Arc<Ty>), |
59 | 59 | ||
60 | /// A raw pointer. Written as `*mut T` or `*const T` | 60 | /// A raw pointer. Written as `*mut T` or `*const T` |
@@ -97,36 +97,12 @@ pub enum Ty { | |||
97 | /// ``` | 97 | /// ``` |
98 | FnPtr(Arc<FnSig>), | 98 | FnPtr(Arc<FnSig>), |
99 | 99 | ||
100 | // rustc has a separate type for each function, which just coerces to the | ||
101 | // above function pointer type. Once we implement generics, we will probably | ||
102 | // need this as well. | ||
103 | |||
104 | // A trait, defined with `dyn Trait`. | ||
105 | // Dynamic(), | ||
106 | |||
107 | // The anonymous type of a closure. Used to represent the type of | ||
108 | // `|a| a`. | ||
109 | // Closure(DefId, ClosureSubsts<'tcx>), | ||
110 | |||
111 | // The anonymous type of a generator. Used to represent the type of | ||
112 | // `|a| yield a`. | ||
113 | // Generator(DefId, GeneratorSubsts<'tcx>, hir::GeneratorMovability), | ||
114 | |||
115 | // A type representing the types stored inside a generator. | ||
116 | // This should only appear in GeneratorInteriors. | ||
117 | // GeneratorWitness(Binder<&'tcx List<Ty<'tcx>>>), | ||
118 | /// The never type `!`. | 100 | /// The never type `!`. |
119 | Never, | 101 | Never, |
120 | 102 | ||
121 | /// A tuple type. For example, `(i32, bool)`. | 103 | /// A tuple type. For example, `(i32, bool)`. |
122 | Tuple(Arc<[Ty]>), | 104 | Tuple(Arc<[Ty]>), |
123 | 105 | ||
124 | // The projection of an associated type. For example, | ||
125 | // `<T as Trait<..>>::N`.pub | ||
126 | // Projection(ProjectionTy), | ||
127 | |||
128 | // Opaque (`impl Trait`) type found in a return type. | ||
129 | // Opaque(DefId, Substs), | ||
130 | /// A type parameter; for example, `T` in `fn f<T>(x: T) {} | 106 | /// A type parameter; for example, `T` in `fn f<T>(x: T) {} |
131 | Param { | 107 | Param { |
132 | /// The index of the parameter (starting with parameters from the | 108 | /// The index of the parameter (starting with parameters from the |
@@ -304,9 +280,6 @@ impl Ty { | |||
304 | if (idx as usize) < substs.0.len() { | 280 | if (idx as usize) < substs.0.len() { |
305 | substs.0[idx as usize].clone() | 281 | substs.0[idx as usize].clone() |
306 | } else { | 282 | } else { |
307 | // TODO: does this indicate a bug? i.e. should we always | ||
308 | // have substs for all type params? (they might contain the | ||
309 | // params themselves again...) | ||
310 | Ty::Param { idx, name } | 283 | Ty::Param { idx, name } |
311 | } | 284 | } |
312 | } | 285 | } |
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index 8de46a29e..2fdfb54f4 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs | |||
@@ -1045,11 +1045,11 @@ fn test() { | |||
1045 | 1045 | ||
1046 | fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String { | 1046 | fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String { |
1047 | let func = source_binder::function_from_position(db, pos).unwrap(); | 1047 | let func = source_binder::function_from_position(db, pos).unwrap(); |
1048 | let body_syntax_mapping = func.body_syntax_mapping(db); | 1048 | let body_source_map = func.body_source_map(db); |
1049 | let inference_result = func.infer(db); | 1049 | let inference_result = func.infer(db); |
1050 | let (_, syntax) = func.source(db); | 1050 | let (_, syntax) = func.source(db); |
1051 | let node = algo::find_node_at_offset::<ast::Expr>(syntax.syntax(), pos.offset).unwrap(); | 1051 | let node = algo::find_node_at_offset::<ast::Expr>(syntax.syntax(), pos.offset).unwrap(); |
1052 | let expr = body_syntax_mapping.node_expr(node).unwrap(); | 1052 | let expr = body_source_map.node_expr(node).unwrap(); |
1053 | let ty = &inference_result[expr]; | 1053 | let ty = &inference_result[expr]; |
1054 | ty.to_string() | 1054 | ty.to_string() |
1055 | } | 1055 | } |
@@ -1061,17 +1061,17 @@ fn infer(content: &str) -> String { | |||
1061 | for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) { | 1061 | for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) { |
1062 | let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap(); | 1062 | let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap(); |
1063 | let inference_result = func.infer(&db); | 1063 | let inference_result = func.infer(&db); |
1064 | let body_syntax_mapping = func.body_syntax_mapping(&db); | 1064 | let body_source_map = func.body_source_map(&db); |
1065 | let mut types = Vec::new(); | 1065 | let mut types = Vec::new(); |
1066 | for (pat, ty) in inference_result.type_of_pat.iter() { | 1066 | for (pat, ty) in inference_result.type_of_pat.iter() { |
1067 | let syntax_ptr = match body_syntax_mapping.pat_syntax(pat) { | 1067 | let syntax_ptr = match body_source_map.pat_syntax(pat) { |
1068 | Some(sp) => sp, | 1068 | Some(sp) => sp, |
1069 | None => continue, | 1069 | None => continue, |
1070 | }; | 1070 | }; |
1071 | types.push((syntax_ptr, ty)); | 1071 | types.push((syntax_ptr, ty)); |
1072 | } | 1072 | } |
1073 | for (expr, ty) in inference_result.type_of_expr.iter() { | 1073 | for (expr, ty) in inference_result.type_of_expr.iter() { |
1074 | let syntax_ptr = match body_syntax_mapping.expr_syntax(expr) { | 1074 | let syntax_ptr = match body_source_map.expr_syntax(expr) { |
1075 | Some(sp) => sp, | 1075 | Some(sp) => sp, |
1076 | None => continue, | 1076 | None => continue, |
1077 | }; | 1077 | }; |
diff --git a/crates/ra_ide_api/src/change.rs b/crates/ra_ide_api/src/change.rs index 3f041f9c3..0c90ed5b5 100644 --- a/crates/ra_ide_api/src/change.rs +++ b/crates/ra_ide_api/src/change.rs | |||
@@ -223,8 +223,7 @@ impl RootDatabase { | |||
223 | self.query(hir::db::FileItemsQuery).sweep(sweep); | 223 | self.query(hir::db::FileItemsQuery).sweep(sweep); |
224 | self.query(hir::db::FileItemQuery).sweep(sweep); | 224 | self.query(hir::db::FileItemQuery).sweep(sweep); |
225 | 225 | ||
226 | self.query(hir::db::LowerModuleQuery).sweep(sweep); | 226 | self.query(hir::db::LowerModuleWithSourceMapQuery).sweep(sweep); |
227 | self.query(hir::db::LowerModuleSourceMapQuery).sweep(sweep); | 227 | self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep); |
228 | self.query(hir::db::BodySyntaxMappingQuery).sweep(sweep); | ||
229 | } | 228 | } |
230 | } | 229 | } |
diff --git a/crates/ra_ide_api/src/completion/complete_dot.rs b/crates/ra_ide_api/src/completion/complete_dot.rs index d5ad2e79f..94c66be31 100644 --- a/crates/ra_ide_api/src/completion/complete_dot.rs +++ b/crates/ra_ide_api/src/completion/complete_dot.rs | |||
@@ -9,8 +9,8 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { | |||
9 | _ => return, | 9 | _ => return, |
10 | }; | 10 | }; |
11 | let infer_result = function.infer(ctx.db); | 11 | let infer_result = function.infer(ctx.db); |
12 | let syntax_mapping = function.body_syntax_mapping(ctx.db); | 12 | let source_map = function.body_source_map(ctx.db); |
13 | let expr = match syntax_mapping.node_expr(receiver) { | 13 | let expr = match source_map.node_expr(receiver) { |
14 | Some(expr) => expr, | 14 | Some(expr) => expr, |
15 | None => return, | 15 | None => return, |
16 | }; | 16 | }; |
diff --git a/crates/ra_ide_api/src/completion/complete_struct_literal.rs b/crates/ra_ide_api/src/completion/complete_struct_literal.rs index afb092f59..6bef9624e 100644 --- a/crates/ra_ide_api/src/completion/complete_struct_literal.rs +++ b/crates/ra_ide_api/src/completion/complete_struct_literal.rs | |||
@@ -9,8 +9,8 @@ pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionCon | |||
9 | _ => return, | 9 | _ => return, |
10 | }; | 10 | }; |
11 | let infer_result = function.infer(ctx.db); | 11 | let infer_result = function.infer(ctx.db); |
12 | let syntax_mapping = function.body_syntax_mapping(ctx.db); | 12 | let source_map = function.body_source_map(ctx.db); |
13 | let expr = match syntax_mapping.node_expr(struct_lit.into()) { | 13 | let expr = match source_map.node_expr(struct_lit.into()) { |
14 | Some(expr) => expr, | 14 | Some(expr) => expr, |
15 | None => return, | 15 | None => return, |
16 | }; | 16 | }; |
diff --git a/crates/ra_ide_api/src/goto_definition.rs b/crates/ra_ide_api/src/goto_definition.rs index 6fa430754..9ec179593 100644 --- a/crates/ra_ide_api/src/goto_definition.rs +++ b/crates/ra_ide_api/src/goto_definition.rs | |||
@@ -54,10 +54,10 @@ pub(crate) fn reference_definition( | |||
54 | if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) { | 54 | if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) { |
55 | tested_by!(goto_definition_works_for_methods); | 55 | tested_by!(goto_definition_works_for_methods); |
56 | let infer_result = function.infer(db); | 56 | let infer_result = function.infer(db); |
57 | let syntax_mapping = function.body_syntax_mapping(db); | 57 | let source_map = function.body_source_map(db); |
58 | let expr = ast::Expr::cast(method_call.syntax()).unwrap(); | 58 | let expr = ast::Expr::cast(method_call.syntax()).unwrap(); |
59 | if let Some(func) = | 59 | if let Some(func) = |
60 | syntax_mapping.node_expr(expr).and_then(|it| infer_result.method_resolution(it)) | 60 | source_map.node_expr(expr).and_then(|it| infer_result.method_resolution(it)) |
61 | { | 61 | { |
62 | return Exact(NavigationTarget::from_function(db, func)); | 62 | return Exact(NavigationTarget::from_function(db, func)); |
63 | }; | 63 | }; |
@@ -66,10 +66,10 @@ pub(crate) fn reference_definition( | |||
66 | if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) { | 66 | if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) { |
67 | tested_by!(goto_definition_works_for_fields); | 67 | tested_by!(goto_definition_works_for_fields); |
68 | let infer_result = function.infer(db); | 68 | let infer_result = function.infer(db); |
69 | let syntax_mapping = function.body_syntax_mapping(db); | 69 | let source_map = function.body_source_map(db); |
70 | let expr = ast::Expr::cast(field_expr.syntax()).unwrap(); | 70 | let expr = ast::Expr::cast(field_expr.syntax()).unwrap(); |
71 | if let Some(field) = | 71 | if let Some(field) = |
72 | syntax_mapping.node_expr(expr).and_then(|it| infer_result.field_resolution(it)) | 72 | source_map.node_expr(expr).and_then(|it| infer_result.field_resolution(it)) |
73 | { | 73 | { |
74 | return Exact(NavigationTarget::from_field(db, field)); | 74 | return Exact(NavigationTarget::from_field(db, field)); |
75 | }; | 75 | }; |
@@ -80,11 +80,11 @@ pub(crate) fn reference_definition( | |||
80 | tested_by!(goto_definition_works_for_named_fields); | 80 | tested_by!(goto_definition_works_for_named_fields); |
81 | 81 | ||
82 | let infer_result = function.infer(db); | 82 | let infer_result = function.infer(db); |
83 | let syntax_mapping = function.body_syntax_mapping(db); | 83 | let source_map = function.body_source_map(db); |
84 | 84 | ||
85 | let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast); | 85 | let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast); |
86 | 86 | ||
87 | if let Some(expr) = struct_lit.and_then(|lit| syntax_mapping.node_expr(lit.into())) { | 87 | if let Some(expr) = struct_lit.and_then(|lit| source_map.node_expr(lit.into())) { |
88 | let ty = infer_result[expr].clone(); | 88 | let ty = infer_result[expr].clone(); |
89 | if let hir::Ty::Adt { def_id, .. } = ty { | 89 | if let hir::Ty::Adt { def_id, .. } = ty { |
90 | if let hir::AdtDef::Struct(s) = def_id { | 90 | if let hir::AdtDef::Struct(s) = def_id { |
@@ -109,9 +109,8 @@ pub(crate) fn reference_definition( | |||
109 | Some(Resolution::Def(def)) => return Exact(NavigationTarget::from_def(db, def)), | 109 | Some(Resolution::Def(def)) => return Exact(NavigationTarget::from_def(db, def)), |
110 | Some(Resolution::LocalBinding(pat)) => { | 110 | Some(Resolution::LocalBinding(pat)) => { |
111 | let body = resolver.body().expect("no body for local binding"); | 111 | let body = resolver.body().expect("no body for local binding"); |
112 | let syntax_mapping = body.syntax_mapping(db); | 112 | let source_map = body.owner().body_source_map(db); |
113 | let ptr = | 113 | let ptr = source_map.pat_syntax(pat).expect("pattern not found in syntax mapping"); |
114 | syntax_mapping.pat_syntax(pat).expect("pattern not found in syntax mapping"); | ||
115 | let name = | 114 | let name = |
116 | path.as_ident().cloned().expect("local binding from a multi-segment path"); | 115 | path.as_ident().cloned().expect("local binding from a multi-segment path"); |
117 | let nav = NavigationTarget::from_scope_entry(file_id, name, ptr); | 116 | let nav = NavigationTarget::from_scope_entry(file_id, name, ptr); |
diff --git a/crates/ra_ide_api/src/hover.rs b/crates/ra_ide_api/src/hover.rs index d05da5464..4722206e2 100644 --- a/crates/ra_ide_api/src/hover.rs +++ b/crates/ra_ide_api/src/hover.rs | |||
@@ -50,7 +50,7 @@ impl HoverResult { | |||
50 | /// for displaying in a UI | 50 | /// for displaying in a UI |
51 | pub fn to_markup(&self) -> String { | 51 | pub fn to_markup(&self) -> String { |
52 | let mut markup = if !self.exact { | 52 | let mut markup = if !self.exact { |
53 | let mut msg = String::from("Failed to exactly resolve the symbol. This is probably because rust_analyzer does not yet support glob imports or traits."); | 53 | let mut msg = String::from("Failed to exactly resolve the symbol. This is probably because rust_analyzer does not yet support traits."); |
54 | if !self.results.is_empty() { | 54 | if !self.results.is_empty() { |
55 | msg.push_str(" \nThese items were found instead:"); | 55 | msg.push_str(" \nThese items were found instead:"); |
56 | } | 56 | } |
@@ -132,10 +132,10 @@ pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { | |||
132 | let parent_fn = node.ancestors().find_map(ast::FnDef::cast)?; | 132 | let parent_fn = node.ancestors().find_map(ast::FnDef::cast)?; |
133 | let function = hir::source_binder::function_from_source(db, frange.file_id, parent_fn)?; | 133 | let function = hir::source_binder::function_from_source(db, frange.file_id, parent_fn)?; |
134 | let infer = function.infer(db); | 134 | let infer = function.infer(db); |
135 | let syntax_mapping = function.body_syntax_mapping(db); | 135 | let source_map = function.body_source_map(db); |
136 | if let Some(expr) = ast::Expr::cast(node).and_then(|e| syntax_mapping.node_expr(e)) { | 136 | if let Some(expr) = ast::Expr::cast(node).and_then(|e| source_map.node_expr(e)) { |
137 | Some(infer[expr].to_string()) | 137 | Some(infer[expr].to_string()) |
138 | } else if let Some(pat) = ast::Pat::cast(node).and_then(|p| syntax_mapping.node_pat(p)) { | 138 | } else if let Some(pat) = ast::Pat::cast(node).and_then(|p| source_map.node_pat(p)) { |
139 | Some(infer[pat].to_string()) | 139 | Some(infer[pat].to_string()) |
140 | } else { | 140 | } else { |
141 | None | 141 | None |