diff options
author | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2019-11-15 23:12:59 +0000 |
---|---|---|
committer | GitHub <[email protected]> | 2019-11-15 23:12:59 +0000 |
commit | d9d99369b2765eaef7f49cd519990769191c3381 (patch) | |
tree | 5685e9db16e4a35c7ff3158d5c09becf9a45b31d /crates | |
parent | 69f3b01dc5dd70d9bdf6de4d859ad593a689395d (diff) | |
parent | d898ecb8f2c19eb041bcb27c7ce9edd9d891f2c2 (diff) |
Merge #2271
2271: Force passing Source when creating a SourceAnalyzer r=matklad a=matklad
Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_assists/src/assist_ctx.rs | 2 | ||||
-rw-r--r-- | crates/ra_hir/src/source_binder.rs | 51 | ||||
-rw-r--r-- | crates/ra_hir/src/ty/tests.rs | 17 | ||||
-rw-r--r-- | crates/ra_ide_api/src/call_info.rs | 6 | ||||
-rw-r--r-- | crates/ra_ide_api/src/completion/completion_context.rs | 7 | ||||
-rw-r--r-- | crates/ra_ide_api/src/goto_type_definition.rs | 3 | ||||
-rw-r--r-- | crates/ra_ide_api/src/hover.rs | 3 | ||||
-rw-r--r-- | crates/ra_ide_api/src/inlay_hints.rs | 7 | ||||
-rw-r--r-- | crates/ra_ide_api/src/references/classify.rs | 3 | ||||
-rw-r--r-- | crates/ra_syntax/src/lib.rs | 6 |
10 files changed, 51 insertions, 54 deletions
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs index 71f7ce1b1..0ea84d548 100644 --- a/crates/ra_assists/src/assist_ctx.rs +++ b/crates/ra_assists/src/assist_ctx.rs | |||
@@ -117,7 +117,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { | |||
117 | node: &SyntaxNode, | 117 | node: &SyntaxNode, |
118 | offset: Option<TextUnit>, | 118 | offset: Option<TextUnit>, |
119 | ) -> SourceAnalyzer { | 119 | ) -> SourceAnalyzer { |
120 | SourceAnalyzer::new(self.db, self.frange.file_id, node, offset) | 120 | SourceAnalyzer::new(self.db, hir::Source::new(self.frange.file_id.into(), node), offset) |
121 | } | 121 | } |
122 | 122 | ||
123 | pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement { | 123 | pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement { |
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 540ddd0b5..5764dc26d 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs | |||
@@ -12,7 +12,6 @@ use hir_def::{ | |||
12 | path::known, | 12 | path::known, |
13 | }; | 13 | }; |
14 | use hir_expand::{name::AsName, Source}; | 14 | use hir_expand::{name::AsName, Source}; |
15 | use ra_db::FileId; | ||
16 | use ra_syntax::{ | 15 | use ra_syntax::{ |
17 | ast::{self, AstNode}, | 16 | ast::{self, AstNode}, |
18 | match_ast, AstPtr, | 17 | match_ast, AstPtr, |
@@ -30,38 +29,32 @@ use crate::{ | |||
30 | HirFileId, Local, MacroDef, Module, Name, Path, Resolver, Static, Struct, Ty, | 29 | HirFileId, Local, MacroDef, Module, Name, Path, Resolver, Static, Struct, Ty, |
31 | }; | 30 | }; |
32 | 31 | ||
33 | fn try_get_resolver_for_node( | 32 | fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -> Option<Resolver> { |
34 | db: &impl HirDatabase, | ||
35 | file_id: FileId, | ||
36 | node: &SyntaxNode, | ||
37 | ) -> Option<Resolver> { | ||
38 | match_ast! { | 33 | match_ast! { |
39 | match node { | 34 | match (node.ast) { |
40 | ast::Module(it) => { | 35 | ast::Module(it) => { |
41 | let src = crate::Source { file_id: file_id.into(), ast: it }; | 36 | let src = node.with_ast(it); |
42 | Some(crate::Module::from_declaration(db, src)?.resolver(db)) | 37 | Some(crate::Module::from_declaration(db, src)?.resolver(db)) |
43 | }, | 38 | }, |
44 | ast::SourceFile(it) => { | 39 | ast::SourceFile(it) => { |
45 | let src = | 40 | let src = node.with_ast(crate::ModuleSource::SourceFile(it)); |
46 | crate::Source { file_id: file_id.into(), ast: crate::ModuleSource::SourceFile(it) }; | ||
47 | Some(crate::Module::from_definition(db, src)?.resolver(db)) | 41 | Some(crate::Module::from_definition(db, src)?.resolver(db)) |
48 | }, | 42 | }, |
49 | ast::StructDef(it) => { | 43 | ast::StructDef(it) => { |
50 | let src = crate::Source { file_id: file_id.into(), ast: it }; | 44 | let src = node.with_ast(it); |
51 | Some(Struct::from_source(db, src)?.resolver(db)) | 45 | Some(Struct::from_source(db, src)?.resolver(db)) |
52 | }, | 46 | }, |
53 | ast::EnumDef(it) => { | 47 | ast::EnumDef(it) => { |
54 | let src = crate::Source { file_id: file_id.into(), ast: it }; | 48 | let src = node.with_ast(it); |
55 | Some(Enum::from_source(db, src)?.resolver(db)) | 49 | Some(Enum::from_source(db, src)?.resolver(db)) |
56 | }, | 50 | }, |
57 | _ => { | 51 | _ => match node.ast.kind() { |
58 | if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { | 52 | FN_DEF | CONST_DEF | STATIC_DEF => { |
59 | Some(def_with_body_from_child_node(db, Source::new(file_id.into(), node))?.resolver(db)) | 53 | Some(def_with_body_from_child_node(db, node)?.resolver(db)) |
60 | } else { | ||
61 | // FIXME add missing cases | ||
62 | None | ||
63 | } | 54 | } |
64 | }, | 55 | // FIXME add missing cases |
56 | _ => None | ||
57 | } | ||
65 | } | 58 | } |
66 | } | 59 | } |
67 | } | 60 | } |
@@ -90,7 +83,6 @@ fn def_with_body_from_child_node( | |||
90 | /// original source files. It should not be used inside the HIR itself. | 83 | /// original source files. It should not be used inside the HIR itself. |
91 | #[derive(Debug)] | 84 | #[derive(Debug)] |
92 | pub struct SourceAnalyzer { | 85 | pub struct SourceAnalyzer { |
93 | // FIXME: this doesn't handle macros at all | ||
94 | file_id: HirFileId, | 86 | file_id: HirFileId, |
95 | resolver: Resolver, | 87 | resolver: Resolver, |
96 | body_owner: Option<DefWithBody>, | 88 | body_owner: Option<DefWithBody>, |
@@ -137,20 +129,16 @@ pub struct ReferenceDescriptor { | |||
137 | impl SourceAnalyzer { | 129 | impl SourceAnalyzer { |
138 | pub fn new( | 130 | pub fn new( |
139 | db: &impl HirDatabase, | 131 | db: &impl HirDatabase, |
140 | file_id: FileId, | 132 | node: Source<&SyntaxNode>, |
141 | node: &SyntaxNode, | ||
142 | offset: Option<TextUnit>, | 133 | offset: Option<TextUnit>, |
143 | ) -> SourceAnalyzer { | 134 | ) -> SourceAnalyzer { |
144 | let node_source = Source::new(file_id.into(), node); | 135 | let def_with_body = def_with_body_from_child_node(db, node); |
145 | let def_with_body = def_with_body_from_child_node(db, node_source); | ||
146 | if let Some(def) = def_with_body { | 136 | if let Some(def) = def_with_body { |
147 | let source_map = def.body_source_map(db); | 137 | let source_map = def.body_source_map(db); |
148 | let scopes = def.expr_scopes(db); | 138 | let scopes = def.expr_scopes(db); |
149 | let scope = match offset { | 139 | let scope = match offset { |
150 | None => scope_for(&scopes, &source_map, node_source), | 140 | None => scope_for(&scopes, &source_map, node), |
151 | Some(offset) => { | 141 | Some(offset) => scope_for_offset(&scopes, &source_map, node.with_ast(offset)), |
152 | scope_for_offset(&scopes, &source_map, Source::new(file_id.into(), offset)) | ||
153 | } | ||
154 | }; | 142 | }; |
155 | let resolver = expr::resolver_for_scope(db, def, scope); | 143 | let resolver = expr::resolver_for_scope(db, def, scope); |
156 | SourceAnalyzer { | 144 | SourceAnalyzer { |
@@ -159,19 +147,20 @@ impl SourceAnalyzer { | |||
159 | body_source_map: Some(source_map), | 147 | body_source_map: Some(source_map), |
160 | infer: Some(def.infer(db)), | 148 | infer: Some(def.infer(db)), |
161 | scopes: Some(scopes), | 149 | scopes: Some(scopes), |
162 | file_id: file_id.into(), | 150 | file_id: node.file_id, |
163 | } | 151 | } |
164 | } else { | 152 | } else { |
165 | SourceAnalyzer { | 153 | SourceAnalyzer { |
166 | resolver: node | 154 | resolver: node |
155 | .ast | ||
167 | .ancestors() | 156 | .ancestors() |
168 | .find_map(|node| try_get_resolver_for_node(db, file_id, &node)) | 157 | .find_map(|it| try_get_resolver_for_node(db, node.with_ast(&it))) |
169 | .unwrap_or_default(), | 158 | .unwrap_or_default(), |
170 | body_owner: None, | 159 | body_owner: None, |
171 | body_source_map: None, | 160 | body_source_map: None, |
172 | infer: None, | 161 | infer: None, |
173 | scopes: None, | 162 | scopes: None, |
174 | file_id: file_id.into(), | 163 | file_id: node.file_id, |
175 | } | 164 | } |
176 | } | 165 | } |
177 | } | 166 | } |
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index fe9346c78..9a26e02fa 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs | |||
@@ -1,3 +1,6 @@ | |||
1 | mod never_type; | ||
2 | mod coercion; | ||
3 | |||
1 | use std::fmt::Write; | 4 | use std::fmt::Write; |
2 | use std::sync::Arc; | 5 | use std::sync::Arc; |
3 | 6 | ||
@@ -11,7 +14,7 @@ use ra_syntax::{ | |||
11 | use test_utils::covers; | 14 | use test_utils::covers; |
12 | 15 | ||
13 | use crate::{ | 16 | use crate::{ |
14 | expr::BodySourceMap, test_db::TestDB, ty::display::HirDisplay, ty::InferenceResult, | 17 | expr::BodySourceMap, test_db::TestDB, ty::display::HirDisplay, ty::InferenceResult, Source, |
15 | SourceAnalyzer, | 18 | SourceAnalyzer, |
16 | }; | 19 | }; |
17 | 20 | ||
@@ -19,9 +22,6 @@ use crate::{ | |||
19 | // against snapshots of the expected results using insta. Use cargo-insta to | 22 | // against snapshots of the expected results using insta. Use cargo-insta to |
20 | // update the snapshots. | 23 | // update the snapshots. |
21 | 24 | ||
22 | mod never_type; | ||
23 | mod coercion; | ||
24 | |||
25 | #[test] | 25 | #[test] |
26 | fn cfg_impl_block() { | 26 | fn cfg_impl_block() { |
27 | let (db, pos) = TestDB::with_position( | 27 | let (db, pos) = TestDB::with_position( |
@@ -4609,7 +4609,8 @@ fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> { | |||
4609 | fn type_at_pos(db: &TestDB, pos: FilePosition) -> String { | 4609 | fn type_at_pos(db: &TestDB, pos: FilePosition) -> String { |
4610 | let file = db.parse(pos.file_id).ok().unwrap(); | 4610 | let file = db.parse(pos.file_id).ok().unwrap(); |
4611 | let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap(); | 4611 | let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap(); |
4612 | let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset)); | 4612 | let analyzer = |
4613 | SourceAnalyzer::new(db, Source::new(pos.file_id.into(), expr.syntax()), Some(pos.offset)); | ||
4613 | let ty = analyzer.type_of(db, &expr).unwrap(); | 4614 | let ty = analyzer.type_of(db, &expr).unwrap(); |
4614 | ty.display(db).to_string() | 4615 | ty.display(db).to_string() |
4615 | } | 4616 | } |
@@ -4674,7 +4675,7 @@ fn infer(content: &str) -> String { | |||
4674 | 4675 | ||
4675 | for node in source_file.syntax().descendants() { | 4676 | for node in source_file.syntax().descendants() { |
4676 | if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { | 4677 | if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { |
4677 | let analyzer = SourceAnalyzer::new(&db, file_id, &node, None); | 4678 | let analyzer = SourceAnalyzer::new(&db, Source::new(file_id.into(), &node), None); |
4678 | infer_def(analyzer.inference_result(), analyzer.body_source_map()); | 4679 | infer_def(analyzer.inference_result(), analyzer.body_source_map()); |
4679 | } | 4680 | } |
4680 | } | 4681 | } |
@@ -4715,7 +4716,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { | |||
4715 | let file = db.parse(pos.file_id).ok().unwrap(); | 4716 | let file = db.parse(pos.file_id).ok().unwrap(); |
4716 | let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent(); | 4717 | let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent(); |
4717 | let events = db.log_executed(|| { | 4718 | let events = db.log_executed(|| { |
4718 | SourceAnalyzer::new(&db, pos.file_id, &node, None); | 4719 | SourceAnalyzer::new(&db, Source::new(pos.file_id.into(), &node), None); |
4719 | }); | 4720 | }); |
4720 | assert!(format!("{:?}", events).contains("infer")) | 4721 | assert!(format!("{:?}", events).contains("infer")) |
4721 | } | 4722 | } |
@@ -4735,7 +4736,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { | |||
4735 | let file = db.parse(pos.file_id).ok().unwrap(); | 4736 | let file = db.parse(pos.file_id).ok().unwrap(); |
4736 | let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent(); | 4737 | let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent(); |
4737 | let events = db.log_executed(|| { | 4738 | let events = db.log_executed(|| { |
4738 | SourceAnalyzer::new(&db, pos.file_id, &node, None); | 4739 | SourceAnalyzer::new(&db, Source::new(pos.file_id.into(), &node), None); |
4739 | }); | 4740 | }); |
4740 | assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) | 4741 | assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) |
4741 | } | 4742 | } |
diff --git a/crates/ra_ide_api/src/call_info.rs b/crates/ra_ide_api/src/call_info.rs index 3572825b5..41ee81511 100644 --- a/crates/ra_ide_api/src/call_info.rs +++ b/crates/ra_ide_api/src/call_info.rs | |||
@@ -19,7 +19,11 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal | |||
19 | let calling_node = FnCallNode::with_node(&syntax, position.offset)?; | 19 | let calling_node = FnCallNode::with_node(&syntax, position.offset)?; |
20 | let name_ref = calling_node.name_ref()?; | 20 | let name_ref = calling_node.name_ref()?; |
21 | 21 | ||
22 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); | 22 | let analyzer = hir::SourceAnalyzer::new( |
23 | db, | ||
24 | hir::Source::new(position.file_id.into(), name_ref.syntax()), | ||
25 | None, | ||
26 | ); | ||
23 | let (mut call_info, has_self) = match &calling_node { | 27 | let (mut call_info, has_self) = match &calling_node { |
24 | FnCallNode::CallExpr(expr) => { | 28 | FnCallNode::CallExpr(expr) => { |
25 | //FIXME: apply subst | 29 | //FIXME: apply subst |
diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs index 64cbc0f98..0906a4e1b 100644 --- a/crates/ra_ide_api/src/completion/completion_context.rs +++ b/crates/ra_ide_api/src/completion/completion_context.rs | |||
@@ -58,8 +58,11 @@ impl<'a> CompletionContext<'a> { | |||
58 | ); | 58 | ); |
59 | let token = | 59 | let token = |
60 | original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?; | 60 | original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?; |
61 | let analyzer = | 61 | let analyzer = hir::SourceAnalyzer::new( |
62 | hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset)); | 62 | db, |
63 | hir::Source::new(position.file_id.into(), &token.parent()), | ||
64 | Some(position.offset), | ||
65 | ); | ||
63 | let mut ctx = CompletionContext { | 66 | let mut ctx = CompletionContext { |
64 | db, | 67 | db, |
65 | analyzer, | 68 | analyzer, |
diff --git a/crates/ra_ide_api/src/goto_type_definition.rs b/crates/ra_ide_api/src/goto_type_definition.rs index 71146591d..2327cb1e7 100644 --- a/crates/ra_ide_api/src/goto_type_definition.rs +++ b/crates/ra_ide_api/src/goto_type_definition.rs | |||
@@ -18,7 +18,8 @@ pub(crate) fn goto_type_definition( | |||
18 | .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()) | 18 | .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()) |
19 | })?; | 19 | })?; |
20 | 20 | ||
21 | let analyzer = hir::SourceAnalyzer::new(db, position.file_id, &node, None); | 21 | let analyzer = |
22 | hir::SourceAnalyzer::new(db, hir::Source::new(position.file_id.into(), &node), None); | ||
22 | 23 | ||
23 | let ty: hir::Ty = if let Some(ty) = | 24 | let ty: hir::Ty = if let Some(ty) = |
24 | ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e)) | 25 | ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e)) |
diff --git a/crates/ra_ide_api/src/hover.rs b/crates/ra_ide_api/src/hover.rs index 07d511fb3..92b4b1f79 100644 --- a/crates/ra_ide_api/src/hover.rs +++ b/crates/ra_ide_api/src/hover.rs | |||
@@ -230,7 +230,8 @@ pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { | |||
230 | .ancestors() | 230 | .ancestors() |
231 | .take_while(|it| it.text_range() == leaf_node.text_range()) | 231 | .take_while(|it| it.text_range() == leaf_node.text_range()) |
232 | .find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?; | 232 | .find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?; |
233 | let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, &node, None); | 233 | let analyzer = |
234 | hir::SourceAnalyzer::new(db, hir::Source::new(frange.file_id.into(), &node), None); | ||
234 | let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e)) | 235 | let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e)) |
235 | { | 236 | { |
236 | ty | 237 | ty |
diff --git a/crates/ra_ide_api/src/inlay_hints.rs b/crates/ra_ide_api/src/inlay_hints.rs index 2ff10b89a..0cd959848 100644 --- a/crates/ra_ide_api/src/inlay_hints.rs +++ b/crates/ra_ide_api/src/inlay_hints.rs | |||
@@ -32,6 +32,7 @@ fn get_inlay_hints( | |||
32 | file_id: FileId, | 32 | file_id: FileId, |
33 | node: &SyntaxNode, | 33 | node: &SyntaxNode, |
34 | ) -> Option<Vec<InlayHint>> { | 34 | ) -> Option<Vec<InlayHint>> { |
35 | let analyzer = SourceAnalyzer::new(db, hir::Source::new(file_id.into(), node), None); | ||
35 | match_ast! { | 36 | match_ast! { |
36 | match node { | 37 | match node { |
37 | ast::LetStmt(it) => { | 38 | ast::LetStmt(it) => { |
@@ -39,11 +40,9 @@ fn get_inlay_hints( | |||
39 | return None; | 40 | return None; |
40 | } | 41 | } |
41 | let pat = it.pat()?; | 42 | let pat = it.pat()?; |
42 | let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None); | ||
43 | Some(get_pat_type_hints(db, &analyzer, pat, false)) | 43 | Some(get_pat_type_hints(db, &analyzer, pat, false)) |
44 | }, | 44 | }, |
45 | ast::LambdaExpr(it) => { | 45 | ast::LambdaExpr(it) => { |
46 | let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None); | ||
47 | it.param_list().map(|param_list| { | 46 | it.param_list().map(|param_list| { |
48 | param_list | 47 | param_list |
49 | .params() | 48 | .params() |
@@ -56,21 +55,17 @@ fn get_inlay_hints( | |||
56 | }, | 55 | }, |
57 | ast::ForExpr(it) => { | 56 | ast::ForExpr(it) => { |
58 | let pat = it.pat()?; | 57 | let pat = it.pat()?; |
59 | let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None); | ||
60 | Some(get_pat_type_hints(db, &analyzer, pat, false)) | 58 | Some(get_pat_type_hints(db, &analyzer, pat, false)) |
61 | }, | 59 | }, |
62 | ast::IfExpr(it) => { | 60 | ast::IfExpr(it) => { |
63 | let pat = it.condition()?.pat()?; | 61 | let pat = it.condition()?.pat()?; |
64 | let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None); | ||
65 | Some(get_pat_type_hints(db, &analyzer, pat, true)) | 62 | Some(get_pat_type_hints(db, &analyzer, pat, true)) |
66 | }, | 63 | }, |
67 | ast::WhileExpr(it) => { | 64 | ast::WhileExpr(it) => { |
68 | let pat = it.condition()?.pat()?; | 65 | let pat = it.condition()?.pat()?; |
69 | let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None); | ||
70 | Some(get_pat_type_hints(db, &analyzer, pat, true)) | 66 | Some(get_pat_type_hints(db, &analyzer, pat, true)) |
71 | }, | 67 | }, |
72 | ast::MatchArmList(it) => { | 68 | ast::MatchArmList(it) => { |
73 | let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None); | ||
74 | Some( | 69 | Some( |
75 | it | 70 | it |
76 | .arms() | 71 | .arms() |
diff --git a/crates/ra_ide_api/src/references/classify.rs b/crates/ra_ide_api/src/references/classify.rs index 0eeaa7f38..f12b58cb9 100644 --- a/crates/ra_ide_api/src/references/classify.rs +++ b/crates/ra_ide_api/src/references/classify.rs | |||
@@ -129,7 +129,8 @@ pub(crate) fn classify_name_ref( | |||
129 | let _p = profile("classify_name_ref"); | 129 | let _p = profile("classify_name_ref"); |
130 | 130 | ||
131 | let parent = name_ref.syntax().parent()?; | 131 | let parent = name_ref.syntax().parent()?; |
132 | let analyzer = SourceAnalyzer::new(db, file_id, name_ref.syntax(), None); | 132 | let analyzer = |
133 | SourceAnalyzer::new(db, hir::Source::new(file_id.into(), name_ref.syntax()), None); | ||
133 | 134 | ||
134 | if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { | 135 | if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { |
135 | tested_by!(goto_definition_works_for_methods); | 136 | tested_by!(goto_definition_works_for_methods); |
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 5dcb6a95a..9931fec84 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
@@ -176,9 +176,11 @@ impl SourceFile { | |||
176 | /// ``` | 176 | /// ``` |
177 | #[macro_export] | 177 | #[macro_export] |
178 | macro_rules! match_ast { | 178 | macro_rules! match_ast { |
179 | (match $node:ident { | 179 | (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; |
180 | |||
181 | (match ($node:expr) { | ||
180 | $( ast::$ast:ident($it:ident) => $res:block, )* | 182 | $( ast::$ast:ident($it:ident) => $res:block, )* |
181 | _ => $catch_all:expr, | 183 | _ => $catch_all:expr $(,)? |
182 | }) => {{ | 184 | }) => {{ |
183 | $( if let Some($it) = ast::$ast::cast($node.clone()) $res else )* | 185 | $( if let Some($it) = ast::$ast::cast($node.clone()) $res else )* |
184 | { $catch_all } | 186 | { $catch_all } |