aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--crates/ra_cli/src/analysis_stats.rs7
-rw-r--r--crates/ra_hir/src/code_model/src.rs31
-rw-r--r--crates/ra_hir/src/expr.rs29
-rw-r--r--crates/ra_hir/src/expr/lower.rs16
-rw-r--r--crates/ra_hir/src/expr/validation.rs47
-rw-r--r--crates/ra_hir/src/source_binder.rs29
-rw-r--r--crates/ra_hir/src/ty/tests.rs33
-rw-r--r--crates/ra_syntax/src/ptr.rs5
8 files changed, 127 insertions, 70 deletions
diff --git a/crates/ra_cli/src/analysis_stats.rs b/crates/ra_cli/src/analysis_stats.rs
index d355fa2e8..5c0a9dd98 100644
--- a/crates/ra_cli/src/analysis_stats.rs
+++ b/crates/ra_cli/src/analysis_stats.rs
@@ -110,9 +110,12 @@ pub fn run(verbose: bool, memory_usage: bool, path: &Path, only: Option<&str>) -
110 let original_file = src.file_id.original_file(db); 110 let original_file = src.file_id.original_file(db);
111 let path = db.file_relative_path(original_file); 111 let path = db.file_relative_path(original_file);
112 let line_index = host.analysis().file_line_index(original_file).unwrap(); 112 let line_index = host.analysis().file_line_index(original_file).unwrap();
113 let text_range = src
114 .ast
115 .either(|it| it.syntax().text_range(), |it| it.syntax().text_range());
113 let (start, end) = ( 116 let (start, end) = (
114 line_index.line_col(src.ast.syntax().text_range().start()), 117 line_index.line_col(text_range.start()),
115 line_index.line_col(src.ast.syntax().text_range().end()), 118 line_index.line_col(text_range.end()),
116 ); 119 );
117 bar.println(format!( 120 bar.println(format!(
118 "{} {}:{}-{}:{}: Expected {}, got {}", 121 "{} {}:{}-{}:{}: Expected {}, got {}",
diff --git a/crates/ra_hir/src/code_model/src.rs b/crates/ra_hir/src/code_model/src.rs
index 7c9454c0b..b9ffb0c7a 100644
--- a/crates/ra_hir/src/code_model/src.rs
+++ b/crates/ra_hir/src/code_model/src.rs
@@ -1,11 +1,15 @@
1use ra_syntax::ast::{self, AstNode}; 1use ra_syntax::{
2 ast::{self, AstNode},
3 SyntaxNode,
4};
2 5
3use crate::{ 6use crate::{
4 ids::AstItemDef, AstDatabase, Const, DefDatabase, Enum, EnumVariant, FieldSource, Function, 7 ids::AstItemDef, AstDatabase, Const, DefDatabase, Either, Enum, EnumVariant, FieldSource,
5 HasBody, HirDatabase, HirFileId, MacroDef, Module, ModuleSource, Static, Struct, StructField, 8 Function, HasBody, HirDatabase, HirFileId, MacroDef, Module, ModuleSource, Static, Struct,
6 Trait, TypeAlias, Union, 9 StructField, Trait, TypeAlias, Union,
7}; 10};
8 11
12#[derive(Debug, PartialEq, Eq, Clone, Copy)]
9pub struct Source<T> { 13pub struct Source<T> {
10 pub file_id: HirFileId, 14 pub file_id: HirFileId,
11 pub ast: T, 15 pub ast: T,
@@ -16,6 +20,15 @@ pub trait HasSource {
16 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<Self::Ast>; 20 fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<Self::Ast>;
17} 21}
18 22
23impl<T> Source<T> {
24 pub(crate) fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
25 Source { file_id: self.file_id, ast: f(self.ast) }
26 }
27 pub(crate) fn file_syntax(&self, db: &impl AstDatabase) -> SyntaxNode {
28 db.parse_or_expand(self.file_id).expect("source created from invalid file")
29 }
30}
31
19/// NB: Module is !HasSource, because it has two source nodes at the same time: 32/// NB: Module is !HasSource, because it has two source nodes at the same time:
20/// definition and declaration. 33/// definition and declaration.
21impl Module { 34impl Module {
@@ -117,12 +130,12 @@ where
117 self, 130 self,
118 db: &impl HirDatabase, 131 db: &impl HirDatabase,
119 expr_id: crate::expr::ExprId, 132 expr_id: crate::expr::ExprId,
120 ) -> Option<Source<ast::Expr>> { 133 ) -> Option<Source<Either<ast::Expr, ast::RecordField>>> {
121 let source_map = self.body_source_map(db); 134 let source_map = self.body_source_map(db);
122 let expr_syntax = source_map.expr_syntax(expr_id)?.a()?; 135 let source_ptr = source_map.expr_syntax(expr_id)?;
123 let source = self.source(db); 136 let root = source_ptr.file_syntax(db);
124 let ast = expr_syntax.to_node(&source.ast.syntax()); 137 let source = source_ptr.map(|ast| ast.map(|it| it.to_node(&root), |it| it.to_node(&root)));
125 Some(Source { file_id: source.file_id, ast }) 138 Some(source)
126 } 139 }
127} 140}
128 141
diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs
index 5f6a4b320..fc21e269f 100644
--- a/crates/ra_hir/src/expr.rs
+++ b/crates/ra_hir/src/expr.rs
@@ -12,7 +12,7 @@ use crate::{
12 path::GenericArgs, 12 path::GenericArgs,
13 ty::primitive::{UncertainFloatTy, UncertainIntTy}, 13 ty::primitive::{UncertainFloatTy, UncertainIntTy},
14 type_ref::{Mutability, TypeRef}, 14 type_ref::{Mutability, TypeRef},
15 DefWithBody, Either, HasSource, HirDatabase, Name, Path, Resolver, 15 DefWithBody, Either, HasSource, HirDatabase, Name, Path, Resolver, Source,
16}; 16};
17 17
18pub use self::scope::ExprScopes; 18pub use self::scope::ExprScopes;
@@ -43,23 +43,32 @@ pub struct Body {
43 body_expr: ExprId, 43 body_expr: ExprId,
44} 44}
45 45
46type ExprPtr = Either<AstPtr<ast::Expr>, AstPtr<ast::RecordField>>;
47type ExprSource = Source<ExprPtr>;
48
49type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>;
50type PatSource = Source<PatPtr>;
51
46/// An item body together with the mapping from syntax nodes to HIR expression 52/// An item body together with the mapping from syntax nodes to HIR expression
47/// IDs. This is needed to go from e.g. a position in a file to the HIR 53/// IDs. This is needed to go from e.g. a position in a file to the HIR
48/// expression containing it; but for type inference etc., we want to operate on 54/// expression containing it; but for type inference etc., we want to operate on
49/// a structure that is agnostic to the actual positions of expressions in the 55/// a structure that is agnostic to the actual positions of expressions in the
50/// file, so that we don't recompute types whenever some whitespace is typed. 56/// file, so that we don't recompute types whenever some whitespace is typed.
57///
58/// One complication here is that, due to macro expansion, a single `Body` might
59/// be spread across several files. So, for each ExprId and PatId, we record
60/// both the HirFileId and the position inside the file. However, we only store
61/// AST -> ExprId mapping for non-macro files, as it is not clear how to handle
62/// this properly for macros.
51#[derive(Default, Debug, Eq, PartialEq)] 63#[derive(Default, Debug, Eq, PartialEq)]
52pub struct BodySourceMap { 64pub struct BodySourceMap {
53 expr_map: FxHashMap<ExprPtr, ExprId>, 65 expr_map: FxHashMap<ExprPtr, ExprId>,
54 expr_map_back: ArenaMap<ExprId, ExprPtr>, 66 expr_map_back: ArenaMap<ExprId, ExprSource>,
55 pat_map: FxHashMap<PatPtr, PatId>, 67 pat_map: FxHashMap<PatPtr, PatId>,
56 pat_map_back: ArenaMap<PatId, PatPtr>, 68 pat_map_back: ArenaMap<PatId, PatSource>,
57 field_map: FxHashMap<(ExprId, usize), AstPtr<ast::RecordField>>, 69 field_map: FxHashMap<(ExprId, usize), AstPtr<ast::RecordField>>,
58} 70}
59 71
60type ExprPtr = Either<AstPtr<ast::Expr>, AstPtr<ast::RecordField>>;
61type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>;
62
63impl Body { 72impl Body {
64 pub fn params(&self) -> &[PatId] { 73 pub fn params(&self) -> &[PatId] {
65 &self.params 74 &self.params
@@ -123,16 +132,16 @@ impl Index<PatId> for Body {
123} 132}
124 133
125impl BodySourceMap { 134impl BodySourceMap {
126 pub(crate) fn expr_syntax(&self, expr: ExprId) -> Option<ExprPtr> { 135 pub(crate) fn expr_syntax(&self, expr: ExprId) -> Option<ExprSource> {
127 self.expr_map_back.get(expr).cloned() 136 self.expr_map_back.get(expr).copied()
128 } 137 }
129 138
130 pub(crate) fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> { 139 pub(crate) fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> {
131 self.expr_map.get(&Either::A(AstPtr::new(node))).cloned() 140 self.expr_map.get(&Either::A(AstPtr::new(node))).cloned()
132 } 141 }
133 142
134 pub(crate) fn pat_syntax(&self, pat: PatId) -> Option<PatPtr> { 143 pub(crate) fn pat_syntax(&self, pat: PatId) -> Option<PatSource> {
135 self.pat_map_back.get(pat).cloned() 144 self.pat_map_back.get(pat).copied()
136 } 145 }
137 146
138 pub(crate) fn node_pat(&self, node: &ast::Pat) -> Option<PatId> { 147 pub(crate) fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
diff --git a/crates/ra_hir/src/expr/lower.rs b/crates/ra_hir/src/expr/lower.rs
index 7b3e55b7e..6afd80989 100644
--- a/crates/ra_hir/src/expr/lower.rs
+++ b/crates/ra_hir/src/expr/lower.rs
@@ -14,7 +14,7 @@ use crate::{
14 ty::primitive::{FloatTy, IntTy, UncertainFloatTy, UncertainIntTy}, 14 ty::primitive::{FloatTy, IntTy, UncertainFloatTy, UncertainIntTy},
15 type_ref::TypeRef, 15 type_ref::TypeRef,
16 DefWithBody, Either, HirDatabase, HirFileId, MacroCallLoc, MacroFileKind, Mutability, Path, 16 DefWithBody, Either, HirDatabase, HirFileId, MacroCallLoc, MacroFileKind, Mutability, Path,
17 Resolver, 17 Resolver, Source,
18}; 18};
19 19
20use super::{ 20use super::{
@@ -103,11 +103,13 @@ where
103 let id = self.body.exprs.alloc(expr); 103 let id = self.body.exprs.alloc(expr);
104 if self.current_file_id == self.original_file_id { 104 if self.current_file_id == self.original_file_id {
105 self.source_map.expr_map.insert(ptr, id); 105 self.source_map.expr_map.insert(ptr, id);
106 self.source_map.expr_map_back.insert(id, ptr);
107 } 106 }
107 self.source_map
108 .expr_map_back
109 .insert(id, Source { file_id: self.current_file_id, ast: ptr });
108 id 110 id
109 } 111 }
110 // deshugared exprs don't have ptr, that's wrong and should be fixed 112 // desugared exprs don't have ptr, that's wrong and should be fixed
111 // somehow. 113 // somehow.
112 fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { 114 fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId {
113 self.body.exprs.alloc(expr) 115 self.body.exprs.alloc(expr)
@@ -117,18 +119,18 @@ where
117 let id = self.body.exprs.alloc(expr); 119 let id = self.body.exprs.alloc(expr);
118 if self.current_file_id == self.original_file_id { 120 if self.current_file_id == self.original_file_id {
119 self.source_map.expr_map.insert(ptr, id); 121 self.source_map.expr_map.insert(ptr, id);
120 self.source_map.expr_map_back.insert(id, ptr);
121 } 122 }
123 self.source_map
124 .expr_map_back
125 .insert(id, Source { file_id: self.current_file_id, ast: ptr });
122 id 126 id
123 } 127 }
124 fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { 128 fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
125 let id = self.body.pats.alloc(pat); 129 let id = self.body.pats.alloc(pat);
126
127 if self.current_file_id == self.original_file_id { 130 if self.current_file_id == self.original_file_id {
128 self.source_map.pat_map.insert(ptr, id); 131 self.source_map.pat_map.insert(ptr, id);
129 self.source_map.pat_map_back.insert(id, ptr);
130 } 132 }
131 133 self.source_map.pat_map_back.insert(id, Source { file_id: self.current_file_id, ast: ptr });
132 id 134 id
133 } 135 }
134 136
diff --git a/crates/ra_hir/src/expr/validation.rs b/crates/ra_hir/src/expr/validation.rs
index 6fdaf1fce..1202913e2 100644
--- a/crates/ra_hir/src/expr/validation.rs
+++ b/crates/ra_hir/src/expr/validation.rs
@@ -1,9 +1,8 @@
1use std::sync::Arc; 1use std::sync::Arc;
2 2
3use ra_syntax::ast::{self, AstNode}; 3use ra_syntax::ast;
4use rustc_hash::FxHashSet; 4use rustc_hash::FxHashSet;
5 5
6use super::{Expr, ExprId, RecordLitField};
7use crate::{ 6use crate::{
8 adt::AdtDef, 7 adt::AdtDef,
9 diagnostics::{DiagnosticSink, MissingFields, MissingOkInTailExpr}, 8 diagnostics::{DiagnosticSink, MissingFields, MissingOkInTailExpr},
@@ -11,9 +10,11 @@ use crate::{
11 name, 10 name,
12 path::{PathKind, PathSegment}, 11 path::{PathKind, PathSegment},
13 ty::{ApplicationTy, InferenceResult, Ty, TypeCtor}, 12 ty::{ApplicationTy, InferenceResult, Ty, TypeCtor},
14 Function, HasSource, HirDatabase, ModuleDef, Name, Path, PerNs, Resolution, 13 Function, HirDatabase, ModuleDef, Name, Path, PerNs, Resolution,
15}; 14};
16 15
16use super::{Expr, ExprId, RecordLitField};
17
17pub(crate) struct ExprValidator<'a, 'b: 'a> { 18pub(crate) struct ExprValidator<'a, 'b: 'a> {
18 func: Function, 19 func: Function,
19 infer: Arc<InferenceResult>, 20 infer: Arc<InferenceResult>,
@@ -78,25 +79,20 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
78 return; 79 return;
79 } 80 }
80 let source_map = self.func.body_source_map(db); 81 let source_map = self.func.body_source_map(db);
81 let file_id = self.func.source(db).file_id; 82
82 let parse = db.parse(file_id.original_file(db)); 83 if let Some(source_ptr) = source_map.expr_syntax(id) {
83 let source_file = parse.tree(); 84 if let Some(expr) = source_ptr.ast.a() {
84 if let Some(field_list_node) = source_map 85 let root = source_ptr.file_syntax(db);
85 .expr_syntax(id) 86 if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
86 .and_then(|ptr| ptr.a()) 87 if let Some(field_list) = record_lit.record_field_list() {
87 .map(|ptr| ptr.to_node(source_file.syntax())) 88 self.sink.push(MissingFields {
88 .and_then(|expr| match expr { 89 file: source_ptr.file_id,
89 ast::Expr::RecordLit(it) => Some(it), 90 field_list: AstPtr::new(&field_list),
90 _ => None, 91 missed_fields,
91 }) 92 })
92 .and_then(|lit| lit.record_field_list()) 93 }
93 { 94 }
94 let field_list_ptr = AstPtr::new(&field_list_node); 95 }
95 self.sink.push(MissingFields {
96 file: file_id,
97 field_list: field_list_ptr,
98 missed_fields,
99 })
100 } 96 }
101 } 97 }
102 98
@@ -136,10 +132,11 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
136 132
137 if params.len() == 2 && &params[0] == &mismatch.actual { 133 if params.len() == 2 && &params[0] == &mismatch.actual {
138 let source_map = self.func.body_source_map(db); 134 let source_map = self.func.body_source_map(db);
139 let file_id = self.func.source(db).file_id;
140 135
141 if let Some(expr) = source_map.expr_syntax(id).and_then(|n| n.a()) { 136 if let Some(source_ptr) = source_map.expr_syntax(id) {
142 self.sink.push(MissingOkInTailExpr { file: file_id, expr }); 137 if let Some(expr) = source_ptr.ast.a() {
138 self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr });
139 }
143 } 140 }
144 } 141 }
145 } 142 }
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs
index e5f4d11a6..fdbe5e8b0 100644
--- a/crates/ra_hir/src/source_binder.rs
+++ b/crates/ra_hir/src/source_binder.rs
@@ -228,7 +228,7 @@ impl SourceAnalyzer {
228 let scopes = db.expr_scopes(def); 228 let scopes = db.expr_scopes(def);
229 let scope = match offset { 229 let scope = match offset {
230 None => scope_for(&scopes, &source_map, &node), 230 None => scope_for(&scopes, &source_map, &node),
231 Some(offset) => scope_for_offset(&scopes, &source_map, offset), 231 Some(offset) => scope_for_offset(&scopes, &source_map, file_id.into(), offset),
232 }; 232 };
233 let resolver = expr::resolver_for_scope(def.body(db), db, scope); 233 let resolver = expr::resolver_for_scope(def.body(db), db, scope);
234 SourceAnalyzer { 234 SourceAnalyzer {
@@ -330,6 +330,7 @@ impl SourceAnalyzer {
330 .body_source_map 330 .body_source_map
331 .as_ref()? 331 .as_ref()?
332 .pat_syntax(it)? 332 .pat_syntax(it)?
333 .ast // FIXME: ignoring file_id here is definitelly wrong
333 .map_a(|ptr| ptr.cast::<ast::BindPat>().unwrap()); 334 .map_a(|ptr| ptr.cast::<ast::BindPat>().unwrap());
334 PathResolution::LocalBinding(pat_ptr) 335 PathResolution::LocalBinding(pat_ptr)
335 } 336 }
@@ -354,7 +355,7 @@ impl SourceAnalyzer {
354 ret.and_then(|entry| { 355 ret.and_then(|entry| {
355 Some(ScopeEntryWithSyntax { 356 Some(ScopeEntryWithSyntax {
356 name: entry.name().clone(), 357 name: entry.name().clone(),
357 ptr: source_map.pat_syntax(entry.pat())?, 358 ptr: source_map.pat_syntax(entry.pat())?.ast,
358 }) 359 })
359 }) 360 })
360 } 361 }
@@ -470,20 +471,27 @@ fn scope_for(
470fn scope_for_offset( 471fn scope_for_offset(
471 scopes: &ExprScopes, 472 scopes: &ExprScopes,
472 source_map: &BodySourceMap, 473 source_map: &BodySourceMap,
474 file_id: HirFileId,
473 offset: TextUnit, 475 offset: TextUnit,
474) -> Option<ScopeId> { 476) -> Option<ScopeId> {
475 scopes 477 scopes
476 .scope_by_expr() 478 .scope_by_expr()
477 .iter() 479 .iter()
478 .filter_map(|(id, scope)| { 480 .filter_map(|(id, scope)| {
479 let ast_ptr = source_map.expr_syntax(*id)?.a()?; 481 let source = source_map.expr_syntax(*id)?;
480 Some((ast_ptr.syntax_node_ptr(), scope)) 482 // FIXME: correctly handle macro expansion
483 if source.file_id != file_id {
484 return None;
485 }
486 let syntax_node_ptr =
487 source.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
488 Some((syntax_node_ptr, scope))
481 }) 489 })
482 // find containing scope 490 // find containing scope
483 .min_by_key(|(ptr, _scope)| { 491 .min_by_key(|(ptr, _scope)| {
484 (!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len()) 492 (!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len())
485 }) 493 })
486 .map(|(ptr, scope)| adjust(scopes, source_map, ptr, offset).unwrap_or(*scope)) 494 .map(|(ptr, scope)| adjust(scopes, source_map, ptr, file_id, offset).unwrap_or(*scope))
487} 495}
488 496
489// XXX: during completion, cursor might be outside of any particular 497// XXX: during completion, cursor might be outside of any particular
@@ -492,6 +500,7 @@ fn adjust(
492 scopes: &ExprScopes, 500 scopes: &ExprScopes,
493 source_map: &BodySourceMap, 501 source_map: &BodySourceMap,
494 ptr: SyntaxNodePtr, 502 ptr: SyntaxNodePtr,
503 file_id: HirFileId,
495 offset: TextUnit, 504 offset: TextUnit,
496) -> Option<ScopeId> { 505) -> Option<ScopeId> {
497 let r = ptr.range(); 506 let r = ptr.range();
@@ -499,8 +508,14 @@ fn adjust(
499 .scope_by_expr() 508 .scope_by_expr()
500 .iter() 509 .iter()
501 .filter_map(|(id, scope)| { 510 .filter_map(|(id, scope)| {
502 let ast_ptr = source_map.expr_syntax(*id)?.a()?; 511 let source = source_map.expr_syntax(*id)?;
503 Some((ast_ptr.syntax_node_ptr(), scope)) 512 // FIXME: correctly handle macro expansion
513 if source.file_id != file_id {
514 return None;
515 }
516 let syntax_node_ptr =
517 source.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
518 Some((syntax_node_ptr, scope))
504 }) 519 })
505 .map(|(ptr, scope)| (ptr.range(), scope)) 520 .map(|(ptr, scope)| (ptr.range(), scope))
506 .filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r); 521 .filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r);
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs
index d344ab12e..cde9801f6 100644
--- a/crates/ra_hir/src/ty/tests.rs
+++ b/crates/ra_hir/src/ty/tests.rs
@@ -2793,6 +2793,10 @@ fn main() {
2793} 2793}
2794"#), 2794"#),
2795 @r###" 2795 @r###"
2796 ![0; 17) '{Foo(v...,2,])}': Foo
2797 ![1; 4) 'Foo': Foo({unknown}) -> Foo
2798 ![1; 16) 'Foo(vec![1,2,])': Foo
2799 ![5; 15) 'vec![1,2,]': {unknown}
2796 [156; 182) '{ ...,2); }': () 2800 [156; 182) '{ ...,2); }': ()
2797 [166; 167) 'x': Foo 2801 [166; 167) 'x': Foo
2798 "### 2802 "###
@@ -3566,7 +3570,6 @@ fn infer(content: &str) -> String {
3566 let source_file = db.parse(file_id).ok().unwrap(); 3570 let source_file = db.parse(file_id).ok().unwrap();
3567 3571
3568 let mut acc = String::new(); 3572 let mut acc = String::new();
3569 // acc.push_str("\n");
3570 3573
3571 let mut infer_def = |inference_result: Arc<InferenceResult>, 3574 let mut infer_def = |inference_result: Arc<InferenceResult>,
3572 body_source_map: Arc<BodySourceMap>| { 3575 body_source_map: Arc<BodySourceMap>| {
@@ -3574,7 +3577,9 @@ fn infer(content: &str) -> String {
3574 3577
3575 for (pat, ty) in inference_result.type_of_pat.iter() { 3578 for (pat, ty) in inference_result.type_of_pat.iter() {
3576 let syntax_ptr = match body_source_map.pat_syntax(pat) { 3579 let syntax_ptr = match body_source_map.pat_syntax(pat) {
3577 Some(sp) => sp.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()), 3580 Some(sp) => {
3581 sp.map(|ast| ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()))
3582 }
3578 None => continue, 3583 None => continue,
3579 }; 3584 };
3580 types.push((syntax_ptr, ty)); 3585 types.push((syntax_ptr, ty));
@@ -3582,22 +3587,34 @@ fn infer(content: &str) -> String {
3582 3587
3583 for (expr, ty) in inference_result.type_of_expr.iter() { 3588 for (expr, ty) in inference_result.type_of_expr.iter() {
3584 let syntax_ptr = match body_source_map.expr_syntax(expr) { 3589 let syntax_ptr = match body_source_map.expr_syntax(expr) {
3585 Some(sp) => sp.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()), 3590 Some(sp) => {
3591 sp.map(|ast| ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()))
3592 }
3586 None => continue, 3593 None => continue,
3587 }; 3594 };
3588 types.push((syntax_ptr, ty)); 3595 types.push((syntax_ptr, ty));
3589 } 3596 }
3590 3597
3591 // sort ranges for consistency 3598 // sort ranges for consistency
3592 types.sort_by_key(|(ptr, _)| (ptr.range().start(), ptr.range().end())); 3599 types.sort_by_key(|(src_ptr, _)| (src_ptr.ast.range().start(), src_ptr.ast.range().end()));
3593 for (syntax_ptr, ty) in &types { 3600 for (src_ptr, ty) in &types {
3594 let node = syntax_ptr.to_node(source_file.syntax()); 3601 let node = src_ptr.ast.to_node(&src_ptr.file_syntax(&db));
3602
3595 let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) { 3603 let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) {
3596 (self_param.self_kw_token().text_range(), "self".to_string()) 3604 (self_param.self_kw_token().text_range(), "self".to_string())
3597 } else { 3605 } else {
3598 (syntax_ptr.range(), node.text().to_string().replace("\n", " ")) 3606 (src_ptr.ast.range(), node.text().to_string().replace("\n", " "))
3599 }; 3607 };
3600 write!(acc, "{} '{}': {}\n", range, ellipsize(text, 15), ty.display(&db)).unwrap(); 3608 let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
3609 write!(
3610 acc,
3611 "{}{} '{}': {}\n",
3612 macro_prefix,
3613 range,
3614 ellipsize(text, 15),
3615 ty.display(&db)
3616 )
3617 .unwrap();
3601 } 3618 }
3602 }; 3619 };
3603 3620
diff --git a/crates/ra_syntax/src/ptr.rs b/crates/ra_syntax/src/ptr.rs
index 80e55d2aa..992034ef0 100644
--- a/crates/ra_syntax/src/ptr.rs
+++ b/crates/ra_syntax/src/ptr.rs
@@ -15,8 +15,9 @@ impl SyntaxNodePtr {
15 SyntaxNodePtr { range: node.text_range(), kind: node.kind() } 15 SyntaxNodePtr { range: node.text_range(), kind: node.kind() }
16 } 16 }
17 17
18 pub fn to_node(self, parent: &SyntaxNode) -> SyntaxNode { 18 pub fn to_node(self, root: &SyntaxNode) -> SyntaxNode {
19 successors(Some(parent.clone()), |node| { 19 assert!(root.parent().is_none());
20 successors(Some(root.clone()), |node| {
20 node.children().find(|it| self.range.is_subrange(&it.text_range())) 21 node.children().find(|it| self.range.is_subrange(&it.text_range()))
21 }) 22 })
22 .find(|it| it.text_range() == self.range && it.kind() == self.kind) 23 .find(|it| it.text_range() == self.range && it.kind() == self.kind)