aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_hir
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_hir')
-rw-r--r--crates/ra_hir/src/source_binder.rs51
-rw-r--r--crates/ra_hir/src/ty/tests.rs17
2 files changed, 29 insertions, 39 deletions
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs
index 540ddd0b5..5764dc26d 100644
--- a/crates/ra_hir/src/source_binder.rs
+++ b/crates/ra_hir/src/source_binder.rs
@@ -12,7 +12,6 @@ use hir_def::{
12 path::known, 12 path::known,
13}; 13};
14use hir_expand::{name::AsName, Source}; 14use hir_expand::{name::AsName, Source};
15use ra_db::FileId;
16use ra_syntax::{ 15use ra_syntax::{
17 ast::{self, AstNode}, 16 ast::{self, AstNode},
18 match_ast, AstPtr, 17 match_ast, AstPtr,
@@ -30,38 +29,32 @@ use crate::{
30 HirFileId, Local, MacroDef, Module, Name, Path, Resolver, Static, Struct, Ty, 29 HirFileId, Local, MacroDef, Module, Name, Path, Resolver, Static, Struct, Ty,
31}; 30};
32 31
33fn try_get_resolver_for_node( 32fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -> Option<Resolver> {
34 db: &impl HirDatabase,
35 file_id: FileId,
36 node: &SyntaxNode,
37) -> Option<Resolver> {
38 match_ast! { 33 match_ast! {
39 match node { 34 match (node.ast) {
40 ast::Module(it) => { 35 ast::Module(it) => {
41 let src = crate::Source { file_id: file_id.into(), ast: it }; 36 let src = node.with_ast(it);
42 Some(crate::Module::from_declaration(db, src)?.resolver(db)) 37 Some(crate::Module::from_declaration(db, src)?.resolver(db))
43 }, 38 },
44 ast::SourceFile(it) => { 39 ast::SourceFile(it) => {
45 let src = 40 let src = node.with_ast(crate::ModuleSource::SourceFile(it));
46 crate::Source { file_id: file_id.into(), ast: crate::ModuleSource::SourceFile(it) };
47 Some(crate::Module::from_definition(db, src)?.resolver(db)) 41 Some(crate::Module::from_definition(db, src)?.resolver(db))
48 }, 42 },
49 ast::StructDef(it) => { 43 ast::StructDef(it) => {
50 let src = crate::Source { file_id: file_id.into(), ast: it }; 44 let src = node.with_ast(it);
51 Some(Struct::from_source(db, src)?.resolver(db)) 45 Some(Struct::from_source(db, src)?.resolver(db))
52 }, 46 },
53 ast::EnumDef(it) => { 47 ast::EnumDef(it) => {
54 let src = crate::Source { file_id: file_id.into(), ast: it }; 48 let src = node.with_ast(it);
55 Some(Enum::from_source(db, src)?.resolver(db)) 49 Some(Enum::from_source(db, src)?.resolver(db))
56 }, 50 },
57 _ => { 51 _ => match node.ast.kind() {
58 if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { 52 FN_DEF | CONST_DEF | STATIC_DEF => {
59 Some(def_with_body_from_child_node(db, Source::new(file_id.into(), node))?.resolver(db)) 53 Some(def_with_body_from_child_node(db, node)?.resolver(db))
60 } else {
61 // FIXME add missing cases
62 None
63 } 54 }
64 }, 55 // FIXME add missing cases
56 _ => None
57 }
65 } 58 }
66 } 59 }
67} 60}
@@ -90,7 +83,6 @@ fn def_with_body_from_child_node(
90/// original source files. It should not be used inside the HIR itself. 83/// original source files. It should not be used inside the HIR itself.
91#[derive(Debug)] 84#[derive(Debug)]
92pub struct SourceAnalyzer { 85pub struct SourceAnalyzer {
93 // FIXME: this doesn't handle macros at all
94 file_id: HirFileId, 86 file_id: HirFileId,
95 resolver: Resolver, 87 resolver: Resolver,
96 body_owner: Option<DefWithBody>, 88 body_owner: Option<DefWithBody>,
@@ -137,20 +129,16 @@ pub struct ReferenceDescriptor {
137impl SourceAnalyzer { 129impl SourceAnalyzer {
138 pub fn new( 130 pub fn new(
139 db: &impl HirDatabase, 131 db: &impl HirDatabase,
140 file_id: FileId, 132 node: Source<&SyntaxNode>,
141 node: &SyntaxNode,
142 offset: Option<TextUnit>, 133 offset: Option<TextUnit>,
143 ) -> SourceAnalyzer { 134 ) -> SourceAnalyzer {
144 let node_source = Source::new(file_id.into(), node); 135 let def_with_body = def_with_body_from_child_node(db, node);
145 let def_with_body = def_with_body_from_child_node(db, node_source);
146 if let Some(def) = def_with_body { 136 if let Some(def) = def_with_body {
147 let source_map = def.body_source_map(db); 137 let source_map = def.body_source_map(db);
148 let scopes = def.expr_scopes(db); 138 let scopes = def.expr_scopes(db);
149 let scope = match offset { 139 let scope = match offset {
150 None => scope_for(&scopes, &source_map, node_source), 140 None => scope_for(&scopes, &source_map, node),
151 Some(offset) => { 141 Some(offset) => scope_for_offset(&scopes, &source_map, node.with_ast(offset)),
152 scope_for_offset(&scopes, &source_map, Source::new(file_id.into(), offset))
153 }
154 }; 142 };
155 let resolver = expr::resolver_for_scope(db, def, scope); 143 let resolver = expr::resolver_for_scope(db, def, scope);
156 SourceAnalyzer { 144 SourceAnalyzer {
@@ -159,19 +147,20 @@ impl SourceAnalyzer {
159 body_source_map: Some(source_map), 147 body_source_map: Some(source_map),
160 infer: Some(def.infer(db)), 148 infer: Some(def.infer(db)),
161 scopes: Some(scopes), 149 scopes: Some(scopes),
162 file_id: file_id.into(), 150 file_id: node.file_id,
163 } 151 }
164 } else { 152 } else {
165 SourceAnalyzer { 153 SourceAnalyzer {
166 resolver: node 154 resolver: node
155 .ast
167 .ancestors() 156 .ancestors()
168 .find_map(|node| try_get_resolver_for_node(db, file_id, &node)) 157 .find_map(|it| try_get_resolver_for_node(db, node.with_ast(&it)))
169 .unwrap_or_default(), 158 .unwrap_or_default(),
170 body_owner: None, 159 body_owner: None,
171 body_source_map: None, 160 body_source_map: None,
172 infer: None, 161 infer: None,
173 scopes: None, 162 scopes: None,
174 file_id: file_id.into(), 163 file_id: node.file_id,
175 } 164 }
176 } 165 }
177 } 166 }
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs
index fe9346c78..9a26e02fa 100644
--- a/crates/ra_hir/src/ty/tests.rs
+++ b/crates/ra_hir/src/ty/tests.rs
@@ -1,3 +1,6 @@
1mod never_type;
2mod coercion;
3
1use std::fmt::Write; 4use std::fmt::Write;
2use std::sync::Arc; 5use std::sync::Arc;
3 6
@@ -11,7 +14,7 @@ use ra_syntax::{
11use test_utils::covers; 14use test_utils::covers;
12 15
13use crate::{ 16use crate::{
14 expr::BodySourceMap, test_db::TestDB, ty::display::HirDisplay, ty::InferenceResult, 17 expr::BodySourceMap, test_db::TestDB, ty::display::HirDisplay, ty::InferenceResult, Source,
15 SourceAnalyzer, 18 SourceAnalyzer,
16}; 19};
17 20
@@ -19,9 +22,6 @@ use crate::{
19// against snapshots of the expected results using insta. Use cargo-insta to 22// against snapshots of the expected results using insta. Use cargo-insta to
20// update the snapshots. 23// update the snapshots.
21 24
22mod never_type;
23mod coercion;
24
25#[test] 25#[test]
26fn cfg_impl_block() { 26fn cfg_impl_block() {
27 let (db, pos) = TestDB::with_position( 27 let (db, pos) = TestDB::with_position(
@@ -4609,7 +4609,8 @@ fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
4609fn type_at_pos(db: &TestDB, pos: FilePosition) -> String { 4609fn type_at_pos(db: &TestDB, pos: FilePosition) -> String {
4610 let file = db.parse(pos.file_id).ok().unwrap(); 4610 let file = db.parse(pos.file_id).ok().unwrap();
4611 let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap(); 4611 let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap();
4612 let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset)); 4612 let analyzer =
4613 SourceAnalyzer::new(db, Source::new(pos.file_id.into(), expr.syntax()), Some(pos.offset));
4613 let ty = analyzer.type_of(db, &expr).unwrap(); 4614 let ty = analyzer.type_of(db, &expr).unwrap();
4614 ty.display(db).to_string() 4615 ty.display(db).to_string()
4615} 4616}
@@ -4674,7 +4675,7 @@ fn infer(content: &str) -> String {
4674 4675
4675 for node in source_file.syntax().descendants() { 4676 for node in source_file.syntax().descendants() {
4676 if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { 4677 if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
4677 let analyzer = SourceAnalyzer::new(&db, file_id, &node, None); 4678 let analyzer = SourceAnalyzer::new(&db, Source::new(file_id.into(), &node), None);
4678 infer_def(analyzer.inference_result(), analyzer.body_source_map()); 4679 infer_def(analyzer.inference_result(), analyzer.body_source_map());
4679 } 4680 }
4680 } 4681 }
@@ -4715,7 +4716,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
4715 let file = db.parse(pos.file_id).ok().unwrap(); 4716 let file = db.parse(pos.file_id).ok().unwrap();
4716 let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent(); 4717 let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
4717 let events = db.log_executed(|| { 4718 let events = db.log_executed(|| {
4718 SourceAnalyzer::new(&db, pos.file_id, &node, None); 4719 SourceAnalyzer::new(&db, Source::new(pos.file_id.into(), &node), None);
4719 }); 4720 });
4720 assert!(format!("{:?}", events).contains("infer")) 4721 assert!(format!("{:?}", events).contains("infer"))
4721 } 4722 }
@@ -4735,7 +4736,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
4735 let file = db.parse(pos.file_id).ok().unwrap(); 4736 let file = db.parse(pos.file_id).ok().unwrap();
4736 let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent(); 4737 let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
4737 let events = db.log_executed(|| { 4738 let events = db.log_executed(|| {
4738 SourceAnalyzer::new(&db, pos.file_id, &node, None); 4739 SourceAnalyzer::new(&db, Source::new(pos.file_id.into(), &node), None);
4739 }); 4740 });
4740 assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) 4741 assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
4741 } 4742 }