aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
authorAleksey Kladov <[email protected]>2020-02-18 17:35:10 +0000
committerAleksey Kladov <[email protected]>2020-02-26 11:55:50 +0000
commitc3a4c4429de83450654795534e64e878a774a088 (patch)
tree12d89798f61b276f8bd640db07276a7d4e92b1c2 /crates
parent04deae3dba7c9b7054f7a1d64e4b93a05aecc132 (diff)
Refactor primary IDE API
This introduces the new type -- Semantics. Semantics maps SyntaxNodes to various semantic info, such as type, name resolution or macro expansions. To do so, Semantics maintains a HashMap which maps every node it saw to the file from which the node originated. This is enough to get all the necessary hir bits just from syntax.
Diffstat (limited to 'crates')
-rw-r--r--crates/ra_assists/src/assist_ctx.rs40
-rw-r--r--crates/ra_assists/src/ast_transform.rs65
-rw-r--r--crates/ra_assists/src/handlers/add_explicit_type.rs5
-rw-r--r--crates/ra_assists/src/handlers/add_missing_impl_members.rs29
-rw-r--r--crates/ra_assists/src/handlers/add_new.rs11
-rw-r--r--crates/ra_assists/src/handlers/auto_import.rs42
-rw-r--r--crates/ra_assists/src/handlers/fill_match_arms.rs23
-rw-r--r--crates/ra_assists/src/handlers/inline_local_variable.rs3
-rw-r--r--crates/ra_assists/src/lib.rs11
-rw-r--r--crates/ra_assists/src/utils.rs24
-rw-r--r--crates/ra_hir/src/lib.rs5
-rw-r--r--crates/ra_hir/src/semantics.rs335
-rw-r--r--crates/ra_hir/src/source_analyzer.rs194
-rw-r--r--crates/ra_hir/src/source_binder.rs161
-rw-r--r--crates/ra_ide/src/call_hierarchy.rs42
-rw-r--r--crates/ra_ide/src/call_info.rs27
-rw-r--r--crates/ra_ide/src/completion.rs4
-rw-r--r--crates/ra_ide/src/completion/complete_dot.rs4
-rw-r--r--crates/ra_ide/src/completion/complete_macro_in_item_position.rs2
-rw-r--r--crates/ra_ide/src/completion/complete_path.rs4
-rw-r--r--crates/ra_ide/src/completion/complete_pattern.rs2
-rw-r--r--crates/ra_ide/src/completion/complete_postfix.rs2
-rw-r--r--crates/ra_ide/src/completion/complete_record_literal.rs5
-rw-r--r--crates/ra_ide/src/completion/complete_record_pattern.rs5
-rw-r--r--crates/ra_ide/src/completion/complete_scope.rs4
-rw-r--r--crates/ra_ide/src/completion/complete_trait_impl.rs33
-rw-r--r--crates/ra_ide/src/completion/completion_context.rs65
-rw-r--r--crates/ra_ide/src/diagnostics.rs9
-rw-r--r--crates/ra_ide/src/display/navigation_target.rs8
-rw-r--r--crates/ra_ide/src/expand.rs102
-rw-r--r--crates/ra_ide/src/expand_macro.rs29
-rw-r--r--crates/ra_ide/src/extend_selection.rs51
-rw-r--r--crates/ra_ide/src/goto_definition.rs39
-rw-r--r--crates/ra_ide/src/goto_type_definition.rs36
-rw-r--r--crates/ra_ide/src/hover.rs45
-rw-r--r--crates/ra_ide/src/impls.rs49
-rw-r--r--crates/ra_ide/src/inlay_hints.rs48
-rw-r--r--crates/ra_ide/src/lib.rs5
-rw-r--r--crates/ra_ide/src/marks.rs1
-rw-r--r--crates/ra_ide/src/parent_module.rs17
-rw-r--r--crates/ra_ide/src/references.rs130
-rw-r--r--crates/ra_ide/src/references/classify.rs30
-rw-r--r--crates/ra_ide/src/references/rename.rs28
-rw-r--r--crates/ra_ide/src/runnables.rs50
-rw-r--r--crates/ra_ide/src/snapshots/rainbow_highlighting.html12
-rw-r--r--crates/ra_ide/src/syntax_highlighting.rs87
-rw-r--r--crates/ra_ide_db/src/defs.rs50
-rw-r--r--crates/ra_ide_db/src/imports_locator.rs26
-rw-r--r--crates/ra_syntax/src/algo.rs7
49 files changed, 1027 insertions, 979 deletions
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs
index 5aab5fb8b..c25d2e323 100644
--- a/crates/ra_assists/src/assist_ctx.rs
+++ b/crates/ra_assists/src/assist_ctx.rs
@@ -1,6 +1,6 @@
1//! This module defines `AssistCtx` -- the API surface that is exposed to assists. 1//! This module defines `AssistCtx` -- the API surface that is exposed to assists.
2use hir::{InFile, SourceAnalyzer, SourceBinder}; 2use hir::Semantics;
3use ra_db::{FileRange, SourceDatabase}; 3use ra_db::FileRange;
4use ra_fmt::{leading_indent, reindent}; 4use ra_fmt::{leading_indent, reindent};
5use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
6use ra_syntax::{ 6use ra_syntax::{
@@ -74,29 +74,23 @@ pub(crate) type AssistHandler = fn(AssistCtx) -> Option<Assist>;
74/// Note, however, that we don't actually use such two-phase logic at the 74/// Note, however, that we don't actually use such two-phase logic at the
75/// moment, because the LSP API is pretty awkward in this place, and it's much 75/// moment, because the LSP API is pretty awkward in this place, and it's much
76/// easier to just compute the edit eagerly :-) 76/// easier to just compute the edit eagerly :-)
77#[derive(Debug)] 77#[derive(Clone)]
78pub(crate) struct AssistCtx<'a> { 78pub(crate) struct AssistCtx<'a> {
79 pub(crate) sema: &'a Semantics<'a, RootDatabase>,
79 pub(crate) db: &'a RootDatabase, 80 pub(crate) db: &'a RootDatabase,
80 pub(crate) frange: FileRange, 81 pub(crate) frange: FileRange,
81 source_file: SourceFile, 82 source_file: SourceFile,
82 should_compute_edit: bool, 83 should_compute_edit: bool,
83} 84}
84 85
85impl Clone for AssistCtx<'_> {
86 fn clone(&self) -> Self {
87 AssistCtx {
88 db: self.db,
89 frange: self.frange,
90 source_file: self.source_file.clone(),
91 should_compute_edit: self.should_compute_edit,
92 }
93 }
94}
95
96impl<'a> AssistCtx<'a> { 86impl<'a> AssistCtx<'a> {
97 pub fn new(db: &RootDatabase, frange: FileRange, should_compute_edit: bool) -> AssistCtx { 87 pub fn new(
98 let parse = db.parse(frange.file_id); 88 sema: &'a Semantics<'a, RootDatabase>,
99 AssistCtx { db, frange, source_file: parse.tree(), should_compute_edit } 89 frange: FileRange,
90 should_compute_edit: bool,
91 ) -> AssistCtx<'a> {
92 let source_file = sema.parse(frange.file_id);
93 AssistCtx { sema, db: sema.db, frange, source_file, should_compute_edit }
100 } 94 }
101 95
102 pub(crate) fn add_assist( 96 pub(crate) fn add_assist(
@@ -138,18 +132,6 @@ impl<'a> AssistCtx<'a> {
138 pub(crate) fn covering_element(&self) -> SyntaxElement { 132 pub(crate) fn covering_element(&self) -> SyntaxElement {
139 find_covering_element(self.source_file.syntax(), self.frange.range) 133 find_covering_element(self.source_file.syntax(), self.frange.range)
140 } 134 }
141 pub(crate) fn source_binder(&self) -> SourceBinder<'a, RootDatabase> {
142 SourceBinder::new(self.db)
143 }
144 pub(crate) fn source_analyzer(
145 &self,
146 node: &SyntaxNode,
147 offset: Option<TextUnit>,
148 ) -> SourceAnalyzer {
149 let src = InFile::new(self.frange.file_id.into(), node);
150 self.source_binder().analyze(src, offset)
151 }
152
153 pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement { 135 pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement {
154 find_covering_element(self.source_file.syntax(), range) 136 find_covering_element(self.source_file.syntax(), range)
155 } 137 }
diff --git a/crates/ra_assists/src/ast_transform.rs b/crates/ra_assists/src/ast_transform.rs
index c6d15af5f..7846e9798 100644
--- a/crates/ra_assists/src/ast_transform.rs
+++ b/crates/ra_assists/src/ast_transform.rs
@@ -1,15 +1,12 @@
1//! `AstTransformer`s are functions that replace nodes in an AST and can be easily combined. 1//! `AstTransformer`s are functions that replace nodes in an AST and can be easily combined.
2use rustc_hash::FxHashMap; 2use rustc_hash::FxHashMap;
3 3
4use hir::{InFile, PathResolution}; 4use hir::{PathResolution, SemanticsScope};
5use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
6use ra_syntax::ast::{self, AstNode}; 6use ra_syntax::ast::{self, AstNode};
7 7
8pub trait AstTransform<'a> { 8pub trait AstTransform<'a> {
9 fn get_substitution( 9 fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode>;
10 &self,
11 node: InFile<&ra_syntax::SyntaxNode>,
12 ) -> Option<ra_syntax::SyntaxNode>;
13 10
14 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a>; 11 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a>;
15 fn or<T: AstTransform<'a> + 'a>(self, other: T) -> Box<dyn AstTransform<'a> + 'a> 12 fn or<T: AstTransform<'a> + 'a>(self, other: T) -> Box<dyn AstTransform<'a> + 'a>
@@ -23,10 +20,7 @@ pub trait AstTransform<'a> {
23struct NullTransformer; 20struct NullTransformer;
24 21
25impl<'a> AstTransform<'a> for NullTransformer { 22impl<'a> AstTransform<'a> for NullTransformer {
26 fn get_substitution( 23 fn get_substitution(&self, _node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
27 &self,
28 _node: InFile<&ra_syntax::SyntaxNode>,
29 ) -> Option<ra_syntax::SyntaxNode> {
30 None 24 None
31 } 25 }
32 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> { 26 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {
@@ -35,14 +29,16 @@ impl<'a> AstTransform<'a> for NullTransformer {
35} 29}
36 30
37pub struct SubstituteTypeParams<'a> { 31pub struct SubstituteTypeParams<'a> {
38 db: &'a RootDatabase, 32 source_scope: &'a SemanticsScope<'a, RootDatabase>,
39 substs: FxHashMap<hir::TypeParam, ast::TypeRef>, 33 substs: FxHashMap<hir::TypeParam, ast::TypeRef>,
40 previous: Box<dyn AstTransform<'a> + 'a>, 34 previous: Box<dyn AstTransform<'a> + 'a>,
41} 35}
42 36
43impl<'a> SubstituteTypeParams<'a> { 37impl<'a> SubstituteTypeParams<'a> {
44 pub fn for_trait_impl( 38 pub fn for_trait_impl(
39 source_scope: &'a SemanticsScope<'a, RootDatabase>,
45 db: &'a RootDatabase, 40 db: &'a RootDatabase,
41 // FIXME: there's implicit invariant that `trait_` and `source_scope` match...
46 trait_: hir::Trait, 42 trait_: hir::Trait,
47 impl_block: ast::ImplBlock, 43 impl_block: ast::ImplBlock,
48 ) -> SubstituteTypeParams<'a> { 44 ) -> SubstituteTypeParams<'a> {
@@ -56,7 +52,7 @@ impl<'a> SubstituteTypeParams<'a> {
56 .zip(substs.into_iter()) 52 .zip(substs.into_iter())
57 .collect(); 53 .collect();
58 return SubstituteTypeParams { 54 return SubstituteTypeParams {
59 db, 55 source_scope,
60 substs: substs_by_param, 56 substs: substs_by_param,
61 previous: Box::new(NullTransformer), 57 previous: Box::new(NullTransformer),
62 }; 58 };
@@ -80,15 +76,15 @@ impl<'a> SubstituteTypeParams<'a> {
80 } 76 }
81 fn get_substitution_inner( 77 fn get_substitution_inner(
82 &self, 78 &self,
83 node: InFile<&ra_syntax::SyntaxNode>, 79 node: &ra_syntax::SyntaxNode,
84 ) -> Option<ra_syntax::SyntaxNode> { 80 ) -> Option<ra_syntax::SyntaxNode> {
85 let type_ref = ast::TypeRef::cast(node.value.clone())?; 81 let type_ref = ast::TypeRef::cast(node.clone())?;
86 let path = match &type_ref { 82 let path = match &type_ref {
87 ast::TypeRef::PathType(path_type) => path_type.path()?, 83 ast::TypeRef::PathType(path_type) => path_type.path()?,
88 _ => return None, 84 _ => return None,
89 }; 85 };
90 let analyzer = hir::SourceAnalyzer::new(self.db, node, None); 86 let path = hir::Path::from_ast(path)?;
91 let resolution = analyzer.resolve_path(self.db, &path)?; 87 let resolution = self.source_scope.resolve_hir_path(&path)?;
92 match resolution { 88 match resolution {
93 hir::PathResolution::TypeParam(tp) => Some(self.substs.get(&tp)?.syntax().clone()), 89 hir::PathResolution::TypeParam(tp) => Some(self.substs.get(&tp)?.syntax().clone()),
94 _ => None, 90 _ => None,
@@ -97,10 +93,7 @@ impl<'a> SubstituteTypeParams<'a> {
97} 93}
98 94
99impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> { 95impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> {
100 fn get_substitution( 96 fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
101 &self,
102 node: InFile<&ra_syntax::SyntaxNode>,
103 ) -> Option<ra_syntax::SyntaxNode> {
104 self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node)) 97 self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node))
105 } 98 }
106 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> { 99 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {
@@ -109,29 +102,34 @@ impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> {
109} 102}
110 103
111pub struct QualifyPaths<'a> { 104pub struct QualifyPaths<'a> {
105 target_scope: &'a SemanticsScope<'a, RootDatabase>,
106 source_scope: &'a SemanticsScope<'a, RootDatabase>,
112 db: &'a RootDatabase, 107 db: &'a RootDatabase,
113 from: Option<hir::Module>,
114 previous: Box<dyn AstTransform<'a> + 'a>, 108 previous: Box<dyn AstTransform<'a> + 'a>,
115} 109}
116 110
117impl<'a> QualifyPaths<'a> { 111impl<'a> QualifyPaths<'a> {
118 pub fn new(db: &'a RootDatabase, from: Option<hir::Module>) -> Self { 112 pub fn new(
119 Self { db, from, previous: Box::new(NullTransformer) } 113 target_scope: &'a SemanticsScope<'a, RootDatabase>,
114 source_scope: &'a SemanticsScope<'a, RootDatabase>,
115 db: &'a RootDatabase,
116 ) -> Self {
117 Self { target_scope, source_scope, db, previous: Box::new(NullTransformer) }
120 } 118 }
121 119
122 fn get_substitution_inner( 120 fn get_substitution_inner(
123 &self, 121 &self,
124 node: InFile<&ra_syntax::SyntaxNode>, 122 node: &ra_syntax::SyntaxNode,
125 ) -> Option<ra_syntax::SyntaxNode> { 123 ) -> Option<ra_syntax::SyntaxNode> {
126 // FIXME handle value ns? 124 // FIXME handle value ns?
127 let from = self.from?; 125 let from = self.target_scope.module()?;
128 let p = ast::Path::cast(node.value.clone())?; 126 let p = ast::Path::cast(node.clone())?;
129 if p.segment().and_then(|s| s.param_list()).is_some() { 127 if p.segment().and_then(|s| s.param_list()).is_some() {
130 // don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway 128 // don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway
131 return None; 129 return None;
132 } 130 }
133 let analyzer = hir::SourceAnalyzer::new(self.db, node, None); 131 let hir_path = hir::Path::from_ast(p.clone());
134 let resolution = analyzer.resolve_path(self.db, &p)?; 132 let resolution = self.source_scope.resolve_hir_path(&hir_path?)?;
135 match resolution { 133 match resolution {
136 PathResolution::Def(def) => { 134 PathResolution::Def(def) => {
137 let found_path = from.find_use_path(self.db, def)?; 135 let found_path = from.find_use_path(self.db, def)?;
@@ -140,7 +138,7 @@ impl<'a> QualifyPaths<'a> {
140 let type_args = p 138 let type_args = p
141 .segment() 139 .segment()
142 .and_then(|s| s.type_arg_list()) 140 .and_then(|s| s.type_arg_list())
143 .map(|arg_list| apply(self, node.with_value(arg_list))); 141 .map(|arg_list| apply(self, arg_list));
144 if let Some(type_args) = type_args { 142 if let Some(type_args) = type_args {
145 let last_segment = path.segment().unwrap(); 143 let last_segment = path.segment().unwrap();
146 path = path.with_segment(last_segment.with_type_args(type_args)) 144 path = path.with_segment(last_segment.with_type_args(type_args))
@@ -157,11 +155,11 @@ impl<'a> QualifyPaths<'a> {
157 } 155 }
158} 156}
159 157
160pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: InFile<N>) -> N { 158pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N {
161 let syntax = node.value.syntax(); 159 let syntax = node.syntax();
162 let result = ra_syntax::algo::replace_descendants(syntax, &|element| match element { 160 let result = ra_syntax::algo::replace_descendants(syntax, &|element| match element {
163 ra_syntax::SyntaxElement::Node(n) => { 161 ra_syntax::SyntaxElement::Node(n) => {
164 let replacement = transformer.get_substitution(node.with_value(&n))?; 162 let replacement = transformer.get_substitution(&n)?;
165 Some(replacement.into()) 163 Some(replacement.into())
166 } 164 }
167 _ => None, 165 _ => None,
@@ -170,10 +168,7 @@ pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: InFile<N>
170} 168}
171 169
172impl<'a> AstTransform<'a> for QualifyPaths<'a> { 170impl<'a> AstTransform<'a> for QualifyPaths<'a> {
173 fn get_substitution( 171 fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
174 &self,
175 node: InFile<&ra_syntax::SyntaxNode>,
176 ) -> Option<ra_syntax::SyntaxNode> {
177 self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node)) 172 self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node))
178 } 173 }
179 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> { 174 fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {
diff --git a/crates/ra_assists/src/handlers/add_explicit_type.rs b/crates/ra_assists/src/handlers/add_explicit_type.rs
index 2cb9d2f48..a63ef48b1 100644
--- a/crates/ra_assists/src/handlers/add_explicit_type.rs
+++ b/crates/ra_assists/src/handlers/add_explicit_type.rs
@@ -51,14 +51,13 @@ pub(crate) fn add_explicit_type(ctx: AssistCtx) -> Option<Assist> {
51 } 51 }
52 } 52 }
53 // Infer type 53 // Infer type
54 let db = ctx.db; 54 let ty = ctx.sema.type_of_expr(&expr)?;
55 let analyzer = ctx.source_analyzer(stmt.syntax(), None);
56 let ty = analyzer.type_of(db, &expr)?;
57 // Assist not applicable if the type is unknown 55 // Assist not applicable if the type is unknown
58 if ty.contains_unknown() { 56 if ty.contains_unknown() {
59 return None; 57 return None;
60 } 58 }
61 59
60 let db = ctx.db;
62 ctx.add_assist( 61 ctx.add_assist(
63 AssistId("add_explicit_type"), 62 AssistId("add_explicit_type"),
64 format!("Insert explicit type '{}'", ty.display(db)), 63 format!("Insert explicit type '{}'", ty.display(db)),
diff --git a/crates/ra_assists/src/handlers/add_missing_impl_members.rs b/crates/ra_assists/src/handlers/add_missing_impl_members.rs
index ab21388c8..4005014bd 100644
--- a/crates/ra_assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ra_assists/src/handlers/add_missing_impl_members.rs
@@ -1,4 +1,4 @@
1use hir::{HasSource, InFile}; 1use hir::HasSource;
2use ra_syntax::{ 2use ra_syntax::{
3 ast::{self, edit, make, AstNode, NameOwner}, 3 ast::{self, edit, make, AstNode, NameOwner},
4 SmolStr, 4 SmolStr,
@@ -104,9 +104,7 @@ fn add_missing_impl_members_inner(
104 let impl_node = ctx.find_node_at_offset::<ast::ImplBlock>()?; 104 let impl_node = ctx.find_node_at_offset::<ast::ImplBlock>()?;
105 let impl_item_list = impl_node.item_list()?; 105 let impl_item_list = impl_node.item_list()?;
106 106
107 let analyzer = ctx.source_analyzer(impl_node.syntax(), None); 107 let trait_ = resolve_target_trait(&ctx.sema, &impl_node)?;
108
109 let trait_ = resolve_target_trait(ctx.db, &analyzer, &impl_node)?;
110 108
111 let def_name = |item: &ast::ImplItem| -> Option<SmolStr> { 109 let def_name = |item: &ast::ImplItem| -> Option<SmolStr> {
112 match item { 110 match item {
@@ -117,7 +115,7 @@ fn add_missing_impl_members_inner(
117 .map(|it| it.text().clone()) 115 .map(|it| it.text().clone())
118 }; 116 };
119 117
120 let missing_items = get_missing_impl_items(ctx.db, &analyzer, &impl_node) 118 let missing_items = get_missing_impl_items(&ctx.sema, &impl_node)
121 .iter() 119 .iter()
122 .map(|i| match i { 120 .map(|i| match i {
123 hir::AssocItem::Function(i) => ast::ImplItem::FnDef(i.source(ctx.db).value), 121 hir::AssocItem::Function(i) => ast::ImplItem::FnDef(i.source(ctx.db).value),
@@ -138,23 +136,17 @@ fn add_missing_impl_members_inner(
138 return None; 136 return None;
139 } 137 }
140 138
141 let db = ctx.db; 139 let sema = ctx.sema;
142 let file_id = ctx.frange.file_id;
143 let trait_file_id = trait_.source(db).file_id;
144 140
145 ctx.add_assist(AssistId(assist_id), label, |edit| { 141 ctx.add_assist(AssistId(assist_id), label, |edit| {
146 let n_existing_items = impl_item_list.impl_items().count(); 142 let n_existing_items = impl_item_list.impl_items().count();
147 let module = hir::SourceAnalyzer::new( 143 let source_scope = sema.scope_for_def(trait_);
148 db, 144 let target_scope = sema.scope(impl_item_list.syntax());
149 hir::InFile::new(file_id.into(), impl_node.syntax()), 145 let ast_transform = QualifyPaths::new(&target_scope, &source_scope, sema.db)
150 None, 146 .or(SubstituteTypeParams::for_trait_impl(&source_scope, sema.db, trait_, impl_node));
151 )
152 .module();
153 let ast_transform = QualifyPaths::new(db, module)
154 .or(SubstituteTypeParams::for_trait_impl(db, trait_, impl_node));
155 let items = missing_items 147 let items = missing_items
156 .into_iter() 148 .into_iter()
157 .map(|it| ast_transform::apply(&*ast_transform, InFile::new(trait_file_id, it))) 149 .map(|it| ast_transform::apply(&*ast_transform, it))
158 .map(|it| match it { 150 .map(|it| match it {
159 ast::ImplItem::FnDef(def) => ast::ImplItem::FnDef(add_body(def)), 151 ast::ImplItem::FnDef(def) => ast::ImplItem::FnDef(add_body(def)),
160 _ => it, 152 _ => it,
@@ -181,9 +173,10 @@ fn add_body(fn_def: ast::FnDef) -> ast::FnDef {
181 173
182#[cfg(test)] 174#[cfg(test)]
183mod tests { 175mod tests {
184 use super::*;
185 use crate::helpers::{check_assist, check_assist_not_applicable}; 176 use crate::helpers::{check_assist, check_assist_not_applicable};
186 177
178 use super::*;
179
187 #[test] 180 #[test]
188 fn test_add_missing_impl_members() { 181 fn test_add_missing_impl_members() {
189 check_assist( 182 check_assist(
diff --git a/crates/ra_assists/src/handlers/add_new.rs b/crates/ra_assists/src/handlers/add_new.rs
index dd070e8ec..166e907fb 100644
--- a/crates/ra_assists/src/handlers/add_new.rs
+++ b/crates/ra_assists/src/handlers/add_new.rs
@@ -1,5 +1,5 @@
1use format_buf::format; 1use format_buf::format;
2use hir::{Adt, InFile}; 2use hir::Adt;
3use join_to_string::join; 3use join_to_string::join;
4use ra_syntax::{ 4use ra_syntax::{
5 ast::{ 5 ast::{
@@ -133,16 +133,11 @@ fn find_struct_impl(ctx: &AssistCtx, strukt: &ast::StructDef) -> Option<Option<a
133 let module = strukt.syntax().ancestors().find(|node| { 133 let module = strukt.syntax().ancestors().find(|node| {
134 ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind()) 134 ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind())
135 })?; 135 })?;
136 let mut sb = ctx.source_binder();
137 136
138 let struct_def = { 137 let struct_def = ctx.sema.to_def(strukt)?;
139 let src = InFile { file_id: ctx.frange.file_id.into(), value: strukt.clone() };
140 sb.to_def(src)?
141 };
142 138
143 let block = module.descendants().filter_map(ast::ImplBlock::cast).find_map(|impl_blk| { 139 let block = module.descendants().filter_map(ast::ImplBlock::cast).find_map(|impl_blk| {
144 let src = InFile { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() }; 140 let blk = ctx.sema.to_def(&impl_blk)?;
145 let blk = sb.to_def(src)?;
146 141
147 // FIXME: handle e.g. `struct S<T>; impl<U> S<U> {}` 142 // FIXME: handle e.g. `struct S<T>; impl<U> S<U> {}`
148 // (we currently use the wrong type parameter) 143 // (we currently use the wrong type parameter)
diff --git a/crates/ra_assists/src/handlers/auto_import.rs b/crates/ra_assists/src/handlers/auto_import.rs
index c4aea2a06..edf0cf6d0 100644
--- a/crates/ra_assists/src/handlers/auto_import.rs
+++ b/crates/ra_assists/src/handlers/auto_import.rs
@@ -3,8 +3,8 @@ use crate::{
3 insert_use_statement, AssistId, 3 insert_use_statement, AssistId,
4}; 4};
5use hir::{ 5use hir::{
6 db::HirDatabase, AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution, 6 AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution, Semantics, Trait,
7 SourceAnalyzer, Trait, Type, 7 Type,
8}; 8};
9use ra_ide_db::{imports_locator::ImportsLocator, RootDatabase}; 9use ra_ide_db::{imports_locator::ImportsLocator, RootDatabase};
10use ra_prof::profile; 10use ra_prof::profile;
@@ -78,14 +78,9 @@ impl AutoImportAssets {
78 78
79 fn for_method_call(method_call: ast::MethodCallExpr, ctx: &AssistCtx) -> Option<Self> { 79 fn for_method_call(method_call: ast::MethodCallExpr, ctx: &AssistCtx) -> Option<Self> {
80 let syntax_under_caret = method_call.syntax().to_owned(); 80 let syntax_under_caret = method_call.syntax().to_owned();
81 let source_analyzer = ctx.source_analyzer(&syntax_under_caret, None); 81 let module_with_name_to_import = ctx.sema.scope(&syntax_under_caret).module()?;
82 let module_with_name_to_import = source_analyzer.module()?;
83 Some(Self { 82 Some(Self {
84 import_candidate: ImportCandidate::for_method_call( 83 import_candidate: ImportCandidate::for_method_call(&ctx.sema, &method_call)?,
85 &method_call,
86 &source_analyzer,
87 ctx.db,
88 )?,
89 module_with_name_to_import, 84 module_with_name_to_import,
90 syntax_under_caret, 85 syntax_under_caret,
91 }) 86 })
@@ -97,14 +92,9 @@ impl AutoImportAssets {
97 return None; 92 return None;
98 } 93 }
99 94
100 let source_analyzer = ctx.source_analyzer(&syntax_under_caret, None); 95 let module_with_name_to_import = ctx.sema.scope(&syntax_under_caret).module()?;
101 let module_with_name_to_import = source_analyzer.module()?;
102 Some(Self { 96 Some(Self {
103 import_candidate: ImportCandidate::for_regular_path( 97 import_candidate: ImportCandidate::for_regular_path(&ctx.sema, &path_under_caret)?,
104 &path_under_caret,
105 &source_analyzer,
106 ctx.db,
107 )?,
108 module_with_name_to_import, 98 module_with_name_to_import,
109 syntax_under_caret, 99 syntax_under_caret,
110 }) 100 })
@@ -229,25 +219,23 @@ enum ImportCandidate {
229 219
230impl ImportCandidate { 220impl ImportCandidate {
231 fn for_method_call( 221 fn for_method_call(
222 sema: &Semantics<RootDatabase>,
232 method_call: &ast::MethodCallExpr, 223 method_call: &ast::MethodCallExpr,
233 source_analyzer: &SourceAnalyzer,
234 db: &impl HirDatabase,
235 ) -> Option<Self> { 224 ) -> Option<Self> {
236 if source_analyzer.resolve_method_call(method_call).is_some() { 225 if sema.resolve_method_call(method_call).is_some() {
237 return None; 226 return None;
238 } 227 }
239 Some(Self::TraitMethod( 228 Some(Self::TraitMethod(
240 source_analyzer.type_of(db, &method_call.expr()?)?, 229 sema.type_of_expr(&method_call.expr()?)?,
241 method_call.name_ref()?.syntax().to_string(), 230 method_call.name_ref()?.syntax().to_string(),
242 )) 231 ))
243 } 232 }
244 233
245 fn for_regular_path( 234 fn for_regular_path(
235 sema: &Semantics<RootDatabase>,
246 path_under_caret: &ast::Path, 236 path_under_caret: &ast::Path,
247 source_analyzer: &SourceAnalyzer,
248 db: &impl HirDatabase,
249 ) -> Option<Self> { 237 ) -> Option<Self> {
250 if source_analyzer.resolve_path(db, path_under_caret).is_some() { 238 if sema.resolve_path(path_under_caret).is_some() {
251 return None; 239 return None;
252 } 240 }
253 241
@@ -256,17 +244,15 @@ impl ImportCandidate {
256 let qualifier_start = qualifier.syntax().descendants().find_map(ast::NameRef::cast)?; 244 let qualifier_start = qualifier.syntax().descendants().find_map(ast::NameRef::cast)?;
257 let qualifier_start_path = 245 let qualifier_start_path =
258 qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?; 246 qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?;
259 if let Some(qualifier_start_resolution) = 247 if let Some(qualifier_start_resolution) = sema.resolve_path(&qualifier_start_path) {
260 source_analyzer.resolve_path(db, &qualifier_start_path)
261 {
262 let qualifier_resolution = if qualifier_start_path == qualifier { 248 let qualifier_resolution = if qualifier_start_path == qualifier {
263 qualifier_start_resolution 249 qualifier_start_resolution
264 } else { 250 } else {
265 source_analyzer.resolve_path(db, &qualifier)? 251 sema.resolve_path(&qualifier)?
266 }; 252 };
267 if let PathResolution::Def(ModuleDef::Adt(assoc_item_path)) = qualifier_resolution { 253 if let PathResolution::Def(ModuleDef::Adt(assoc_item_path)) = qualifier_resolution {
268 Some(ImportCandidate::TraitAssocItem( 254 Some(ImportCandidate::TraitAssocItem(
269 assoc_item_path.ty(db), 255 assoc_item_path.ty(sema.db),
270 segment.syntax().to_string(), 256 segment.syntax().to_string(),
271 )) 257 ))
272 } else { 258 } else {
diff --git a/crates/ra_assists/src/handlers/fill_match_arms.rs b/crates/ra_assists/src/handlers/fill_match_arms.rs
index ae2437ed3..e5d8c639d 100644
--- a/crates/ra_assists/src/handlers/fill_match_arms.rs
+++ b/crates/ra_assists/src/handlers/fill_match_arms.rs
@@ -2,10 +2,11 @@
2 2
3use std::iter; 3use std::iter;
4 4
5use hir::{db::HirDatabase, Adt, HasSource}; 5use hir::{db::HirDatabase, Adt, HasSource, Semantics};
6use ra_syntax::ast::{self, edit::IndentLevel, make, AstNode, NameOwner}; 6use ra_syntax::ast::{self, edit::IndentLevel, make, AstNode, NameOwner};
7 7
8use crate::{Assist, AssistCtx, AssistId}; 8use crate::{Assist, AssistCtx, AssistId};
9use ra_ide_db::RootDatabase;
9 10
10// Assist: fill_match_arms 11// Assist: fill_match_arms
11// 12//
@@ -46,10 +47,9 @@ pub(crate) fn fill_match_arms(ctx: AssistCtx) -> Option<Assist> {
46 }; 47 };
47 48
48 let expr = match_expr.expr()?; 49 let expr = match_expr.expr()?;
49 let (enum_def, module) = { 50 let enum_def = resolve_enum_def(&ctx.sema, &expr)?;
50 let analyzer = ctx.source_analyzer(expr.syntax(), None); 51 let module = ctx.sema.scope(expr.syntax()).module()?;
51 (resolve_enum_def(ctx.db, &analyzer, &expr)?, analyzer.module()?) 52
52 };
53 let variants = enum_def.variants(ctx.db); 53 let variants = enum_def.variants(ctx.db);
54 if variants.is_empty() { 54 if variants.is_empty() {
55 return None; 55 return None;
@@ -81,18 +81,11 @@ fn is_trivial(arm: &ast::MatchArm) -> bool {
81 } 81 }
82} 82}
83 83
84fn resolve_enum_def( 84fn resolve_enum_def(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<hir::Enum> {
85 db: &impl HirDatabase, 85 sema.type_of_expr(&expr)?.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
86 analyzer: &hir::SourceAnalyzer,
87 expr: &ast::Expr,
88) -> Option<hir::Enum> {
89 let expr_ty = analyzer.type_of(db, &expr)?;
90
91 let result = expr_ty.autoderef(db).find_map(|ty| match ty.as_adt() {
92 Some(Adt::Enum(e)) => Some(e), 86 Some(Adt::Enum(e)) => Some(e),
93 _ => None, 87 _ => None,
94 }); 88 })
95 result
96} 89}
97 90
98fn build_pat( 91fn build_pat(
diff --git a/crates/ra_assists/src/handlers/inline_local_variable.rs b/crates/ra_assists/src/handlers/inline_local_variable.rs
index 91b588243..53a72309b 100644
--- a/crates/ra_assists/src/handlers/inline_local_variable.rs
+++ b/crates/ra_assists/src/handlers/inline_local_variable.rs
@@ -44,8 +44,7 @@ pub(crate) fn inline_local_variable(ctx: AssistCtx) -> Option<Assist> {
44 } else { 44 } else {
45 let_stmt.syntax().text_range() 45 let_stmt.syntax().text_range()
46 }; 46 };
47 let analyzer = ctx.source_analyzer(bind_pat.syntax(), None); 47 let refs = ctx.sema.find_all_refs(&bind_pat);
48 let refs = analyzer.find_all_refs(&bind_pat);
49 if refs.is_empty() { 48 if refs.is_empty() {
50 return None; 49 return None;
51 }; 50 };
diff --git a/crates/ra_assists/src/lib.rs b/crates/ra_assists/src/lib.rs
index 79fe43aa4..c28a9b92b 100644
--- a/crates/ra_assists/src/lib.rs
+++ b/crates/ra_assists/src/lib.rs
@@ -19,6 +19,7 @@ use ra_text_edit::TextEdit;
19 19
20pub(crate) use crate::assist_ctx::{Assist, AssistCtx, AssistHandler}; 20pub(crate) use crate::assist_ctx::{Assist, AssistCtx, AssistHandler};
21pub use crate::handlers::replace_qualified_name_with_use::insert_use_statement; 21pub use crate::handlers::replace_qualified_name_with_use::insert_use_statement;
22use hir::Semantics;
22 23
23/// Unique identifier of the assist, should not be shown to the user 24/// Unique identifier of the assist, should not be shown to the user
24/// directly. 25/// directly.
@@ -63,7 +64,8 @@ pub struct ResolvedAssist {
63/// Assists are returned in the "unresolved" state, that is only labels are 64/// Assists are returned in the "unresolved" state, that is only labels are
64/// returned, without actual edits. 65/// returned, without actual edits.
65pub fn unresolved_assists(db: &RootDatabase, range: FileRange) -> Vec<AssistLabel> { 66pub fn unresolved_assists(db: &RootDatabase, range: FileRange) -> Vec<AssistLabel> {
66 let ctx = AssistCtx::new(db, range, false); 67 let sema = Semantics::new(db);
68 let ctx = AssistCtx::new(&sema, range, false);
67 handlers::all() 69 handlers::all()
68 .iter() 70 .iter()
69 .filter_map(|f| f(ctx.clone())) 71 .filter_map(|f| f(ctx.clone()))
@@ -77,7 +79,8 @@ pub fn unresolved_assists(db: &RootDatabase, range: FileRange) -> Vec<AssistLabe
77/// Assists are returned in the "resolved" state, that is with edit fully 79/// Assists are returned in the "resolved" state, that is with edit fully
78/// computed. 80/// computed.
79pub fn resolved_assists(db: &RootDatabase, range: FileRange) -> Vec<ResolvedAssist> { 81pub fn resolved_assists(db: &RootDatabase, range: FileRange) -> Vec<ResolvedAssist> {
80 let ctx = AssistCtx::new(db, range, true); 82 let sema = Semantics::new(db);
83 let ctx = AssistCtx::new(&sema, range, true);
81 let mut a = handlers::all() 84 let mut a = handlers::all()
82 .iter() 85 .iter()
83 .filter_map(|f| f(ctx.clone())) 86 .filter_map(|f| f(ctx.clone()))
@@ -165,6 +168,7 @@ mod helpers {
165 use test_utils::{add_cursor, assert_eq_text, extract_range_or_offset, RangeOrOffset}; 168 use test_utils::{add_cursor, assert_eq_text, extract_range_or_offset, RangeOrOffset};
166 169
167 use crate::{AssistCtx, AssistHandler}; 170 use crate::{AssistCtx, AssistHandler};
171 use hir::Semantics;
168 172
169 pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) { 173 pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
170 let (mut db, file_id) = RootDatabase::with_single_file(text); 174 let (mut db, file_id) = RootDatabase::with_single_file(text);
@@ -202,7 +206,8 @@ mod helpers {
202 206
203 let (db, file_id) = with_single_file(&before); 207 let (db, file_id) = with_single_file(&before);
204 let frange = FileRange { file_id, range }; 208 let frange = FileRange { file_id, range };
205 let assist_ctx = AssistCtx::new(&db, frange, true); 209 let sema = Semantics::new(&db);
210 let assist_ctx = AssistCtx::new(&sema, frange, true);
206 211
207 match (assist(assist_ctx), expected) { 212 match (assist(assist_ctx), expected) {
208 (Some(assist), ExpectedResult::After(after)) => { 213 (Some(assist), ExpectedResult::After(after)) => {
diff --git a/crates/ra_assists/src/utils.rs b/crates/ra_assists/src/utils.rs
index 6ff44c95c..92d3ed471 100644
--- a/crates/ra_assists/src/utils.rs
+++ b/crates/ra_assists/src/utils.rs
@@ -1,16 +1,15 @@
1//! Assorted functions shared by several assists. 1//! Assorted functions shared by several assists.
2 2
3use hir::Semantics;
4use ra_ide_db::RootDatabase;
3use ra_syntax::{ 5use ra_syntax::{
4 ast::{self, make, NameOwner}, 6 ast::{self, make, NameOwner},
5 AstNode, T, 7 AstNode, T,
6}; 8};
7
8use hir::db::HirDatabase;
9use rustc_hash::FxHashSet; 9use rustc_hash::FxHashSet;
10 10
11pub fn get_missing_impl_items( 11pub fn get_missing_impl_items(
12 db: &impl HirDatabase, 12 sema: &Semantics<RootDatabase>,
13 analyzer: &hir::SourceAnalyzer,
14 impl_block: &ast::ImplBlock, 13 impl_block: &ast::ImplBlock,
15) -> Vec<hir::AssocItem> { 14) -> Vec<hir::AssocItem> {
16 // Names must be unique between constants and functions. However, type aliases 15 // Names must be unique between constants and functions. However, type aliases
@@ -42,15 +41,17 @@ pub fn get_missing_impl_items(
42 } 41 }
43 } 42 }
44 43
45 resolve_target_trait(db, analyzer, impl_block).map_or(vec![], |target_trait| { 44 resolve_target_trait(sema, impl_block).map_or(vec![], |target_trait| {
46 target_trait 45 target_trait
47 .items(db) 46 .items(sema.db)
48 .iter() 47 .iter()
49 .filter(|i| match i { 48 .filter(|i| match i {
50 hir::AssocItem::Function(f) => !impl_fns_consts.contains(&f.name(db).to_string()), 49 hir::AssocItem::Function(f) => {
51 hir::AssocItem::TypeAlias(t) => !impl_type.contains(&t.name(db).to_string()), 50 !impl_fns_consts.contains(&f.name(sema.db).to_string())
51 }
52 hir::AssocItem::TypeAlias(t) => !impl_type.contains(&t.name(sema.db).to_string()),
52 hir::AssocItem::Const(c) => c 53 hir::AssocItem::Const(c) => c
53 .name(db) 54 .name(sema.db)
54 .map(|n| !impl_fns_consts.contains(&n.to_string())) 55 .map(|n| !impl_fns_consts.contains(&n.to_string()))
55 .unwrap_or_default(), 56 .unwrap_or_default(),
56 }) 57 })
@@ -60,8 +61,7 @@ pub fn get_missing_impl_items(
60} 61}
61 62
62pub(crate) fn resolve_target_trait( 63pub(crate) fn resolve_target_trait(
63 db: &impl HirDatabase, 64 sema: &Semantics<RootDatabase>,
64 analyzer: &hir::SourceAnalyzer,
65 impl_block: &ast::ImplBlock, 65 impl_block: &ast::ImplBlock,
66) -> Option<hir::Trait> { 66) -> Option<hir::Trait> {
67 let ast_path = impl_block 67 let ast_path = impl_block
@@ -70,7 +70,7 @@ pub(crate) fn resolve_target_trait(
70 .and_then(ast::PathType::cast)? 70 .and_then(ast::PathType::cast)?
71 .path()?; 71 .path()?;
72 72
73 match analyzer.resolve_path(db, &ast_path) { 73 match sema.resolve_path(&ast_path) {
74 Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def), 74 Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def),
75 _ => None, 75 _ => None,
76 } 76 }
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs
index 7a9745ebe..004a2185f 100644
--- a/crates/ra_hir/src/lib.rs
+++ b/crates/ra_hir/src/lib.rs
@@ -26,6 +26,7 @@ macro_rules! impl_froms {
26 } 26 }
27} 27}
28 28
29mod semantics;
29pub mod db; 30pub mod db;
30pub mod source_analyzer; 31pub mod source_analyzer;
31pub mod source_binder; 32pub mod source_binder;
@@ -45,8 +46,8 @@ pub use crate::{
45 StructField, Trait, Type, TypeAlias, TypeParam, Union, VariantDef, 46 StructField, Trait, Type, TypeAlias, TypeParam, Union, VariantDef,
46 }, 47 },
47 has_source::HasSource, 48 has_source::HasSource,
48 source_analyzer::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer}, 49 semantics::{original_range, Semantics, SemanticsScope},
49 source_binder::SourceBinder, 50 source_analyzer::{PathResolution, ScopeEntryWithSyntax},
50}; 51};
51 52
52pub use hir_def::{ 53pub use hir_def::{
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs
new file mode 100644
index 000000000..22a7e7588
--- /dev/null
+++ b/crates/ra_hir/src/semantics.rs
@@ -0,0 +1,335 @@
1//! See `Semantics`.
2
3use std::{cell::RefCell, fmt, iter::successors};
4
5use hir_def::{
6 resolver::{self, HasResolver, Resolver},
7 TraitId,
8};
9use ra_db::{FileId, FileRange};
10use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken, TextRange, TextUnit};
11use rustc_hash::{FxHashMap, FxHashSet};
12
13use crate::{
14 db::HirDatabase,
15 source_analyzer::{resolve_hir_path, ReferenceDescriptor, SourceAnalyzer},
16 source_binder::{ChildContainer, SourceBinder, ToDef},
17 Function, HirFileId, InFile, Local, MacroDef, Module, Name, Origin, Path, PathResolution,
18 ScopeDef, StructField, Trait, Type, TypeParam, VariantDef,
19};
20use ra_prof::profile;
21
22/// Primary API to get semantic information, like types, from syntax trees.
23pub struct Semantics<'db, DB> {
24 pub db: &'db DB,
25 pub(crate) sb: RefCell<SourceBinder>,
26 cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
27}
28
29impl<DB> fmt::Debug for Semantics<'_, DB> {
30 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
31 write!(f, "Semantics {{ ... }}")
32 }
33}
34
35impl<'db, DB: HirDatabase> Semantics<'db, DB> {
36 pub fn new(db: &DB) -> Semantics<DB> {
37 let sb = RefCell::new(SourceBinder::new());
38 Semantics { db, sb, cache: RefCell::default() }
39 }
40
41 pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
42 let tree = self.db.parse(file_id).tree();
43 self.cache(tree.syntax().clone(), file_id.into());
44 tree
45 }
46
47 pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
48 let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
49 let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
50 let file_id = sa.expand(self.db, macro_call)?;
51 let node = self.db.parse_or_expand(file_id)?;
52 self.cache(node.clone(), file_id);
53 Some(node)
54 }
55
56 pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
57 let parent = token.parent();
58 let parent = self.find_file(parent);
59 let sa = self.analyze2(parent.as_ref(), None);
60
61 let token = successors(Some(parent.with_value(token)), |token| {
62 let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
63 let tt = macro_call.token_tree()?;
64 if !token.value.text_range().is_subrange(&tt.syntax().text_range()) {
65 return None;
66 }
67 let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
68 let token = file_id.expansion_info(self.db)?.map_token_down(token.as_ref())?;
69
70 self.cache(find_root(&token.value.parent()), token.file_id);
71
72 Some(token)
73 })
74 .last()
75 .unwrap();
76
77 token.value
78 }
79
80 pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
81 let node = self.find_file(node.clone());
82 original_range(self.db, node.as_ref())
83 }
84
85 pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
86 let node = self.find_file(node);
87 node.ancestors_with_macros(self.db).map(|it| it.value)
88 }
89
90 pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
91 self.analyze(expr.syntax()).type_of(self.db, &expr)
92 }
93
94 pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> {
95 self.analyze(pat.syntax()).type_of_pat(self.db, &pat)
96 }
97
98 pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
99 self.analyze(call.syntax()).resolve_method_call(call)
100 }
101
102 pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<StructField> {
103 self.analyze(field.syntax()).resolve_field(field)
104 }
105
106 pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<StructField> {
107 self.analyze(field.syntax()).resolve_record_field(field)
108 }
109
110 pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<VariantDef> {
111 self.analyze(record_lit.syntax()).resolve_record_literal(record_lit)
112 }
113
114 pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<VariantDef> {
115 self.analyze(record_pat.syntax()).resolve_record_pattern(record_pat)
116 }
117
118 pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
119 let sa = self.analyze(macro_call.syntax());
120 let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
121 sa.resolve_macro_call(self.db, macro_call)
122 }
123
124 pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
125 self.analyze(path.syntax()).resolve_path(self.db, path)
126 }
127
128 // FIXME: use this instead?
129 // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
130
131 pub fn to_def<T: ToDef + Clone>(&self, src: &T) -> Option<T::Def> {
132 let src = self.find_file(src.syntax().clone()).with_value(src.clone());
133 let mut sb = self.sb.borrow_mut();
134 T::to_def(self.db, &mut sb, src)
135 }
136
137 pub fn to_module_def(&self, file: FileId) -> Option<Module> {
138 let mut sb = self.sb.borrow_mut();
139 sb.to_module_def(self.db, file)
140 }
141
142 pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db, DB> {
143 let node = self.find_file(node.clone());
144 let resolver = self.analyze2(node.as_ref(), None).resolver;
145 SemanticsScope { db: self.db, resolver }
146 }
147
148 pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextUnit) -> SemanticsScope<'db, DB> {
149 let node = self.find_file(node.clone());
150 let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver;
151 SemanticsScope { db: self.db, resolver }
152 }
153
154 pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db, DB> {
155 let resolver = def.id.resolver(self.db);
156 SemanticsScope { db: self.db, resolver }
157 }
158
159 // FIXME: we only use this in `inline_local_variable` assist, ideally, we
160 // should switch to general reference search infra there.
161 pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
162 self.analyze(pat.syntax()).find_all_refs(pat)
163 }
164
165 fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
166 let src = self.find_file(node.clone());
167 self.analyze2(src.as_ref(), None)
168 }
169
170 fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextUnit>) -> SourceAnalyzer {
171 let _p = profile("Semantics::analyze2");
172
173 let container = match self.sb.borrow_mut().find_container(self.db, src) {
174 Some(it) => it,
175 None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src),
176 };
177
178 let resolver = match container {
179 ChildContainer::DefWithBodyId(def) => {
180 return SourceAnalyzer::new_for_body(self.db, def, src, offset)
181 }
182 ChildContainer::TraitId(it) => it.resolver(self.db),
183 ChildContainer::ImplId(it) => it.resolver(self.db),
184 ChildContainer::ModuleId(it) => it.resolver(self.db),
185 ChildContainer::EnumId(it) => it.resolver(self.db),
186 ChildContainer::VariantId(it) => it.resolver(self.db),
187 ChildContainer::GenericDefId(it) => it.resolver(self.db),
188 };
189 SourceAnalyzer::new_for_resolver(resolver, src)
190 }
191
192 fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
193 assert!(root_node.parent().is_none());
194 let mut cache = self.cache.borrow_mut();
195 let prev = cache.insert(root_node, file_id);
196 assert!(prev == None || prev == Some(file_id))
197 }
198
199 pub fn assert_contains_node(&self, node: &SyntaxNode) {
200 self.find_file(node.clone());
201 }
202
203 fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
204 let cache = self.cache.borrow();
205 cache.get(root_node).copied()
206 }
207
208 fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> {
209 let root_node = find_root(&node);
210 let file_id = self.lookup(&root_node).unwrap_or_else(|| {
211 panic!(
212 "\n\nFailed to lookup {:?} in this Semantics.\n\
213 Make sure to use only query nodes, derived from this instance of Semantics.\n\
214 root node: {:?}\n\
215 known nodes: {}\n\n",
216 node,
217 root_node,
218 self.cache
219 .borrow()
220 .keys()
221 .map(|it| format!("{:?}", it))
222 .collect::<Vec<_>>()
223 .join(", ")
224 )
225 });
226 InFile::new(file_id, node)
227 }
228}
229
230fn find_root(node: &SyntaxNode) -> SyntaxNode {
231 node.ancestors().last().unwrap()
232}
233
234pub struct SemanticsScope<'a, DB> {
235 pub db: &'a DB,
236 resolver: Resolver,
237}
238
239impl<'a, DB: HirDatabase> SemanticsScope<'a, DB> {
240 pub fn module(&self) -> Option<Module> {
241 Some(Module { id: self.resolver.module()? })
242 }
243
244 /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type
245 // FIXME: rename to visible_traits to not repeat scope?
246 pub fn traits_in_scope(&self) -> FxHashSet<TraitId> {
247 let resolver = &self.resolver;
248 resolver.traits_in_scope(self.db)
249 }
250
251 pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
252 let resolver = &self.resolver;
253
254 resolver.process_all_names(self.db, &mut |name, def| {
255 let def = match def {
256 resolver::ScopeDef::PerNs(it) => it.into(),
257 resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
258 resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
259 resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }),
260 resolver::ScopeDef::Local(pat_id) => {
261 let parent = resolver.body_owner().unwrap().into();
262 ScopeDef::Local(Local { parent, pat_id })
263 }
264 };
265 f(name, def)
266 })
267 }
268
269 pub fn resolve_hir_path(&self, path: &Path) -> Option<PathResolution> {
270 resolve_hir_path(self.db, &self.resolver, path)
271 }
272}
273
274// FIXME: Change `HasSource` trait to work with `Semantics` and remove this?
275pub fn original_range(db: &impl HirDatabase, node: InFile<&SyntaxNode>) -> FileRange {
276 if let Some((range, Origin::Call)) = original_range_and_origin(db, node) {
277 return range;
278 }
279
280 if let Some(expansion) = node.file_id.expansion_info(db) {
281 if let Some(call_node) = expansion.call_node() {
282 return FileRange {
283 file_id: call_node.file_id.original_file(db),
284 range: call_node.value.text_range(),
285 };
286 }
287 }
288
289 FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
290}
291
292fn original_range_and_origin(
293 db: &impl HirDatabase,
294 node: InFile<&SyntaxNode>,
295) -> Option<(FileRange, Origin)> {
296 let expansion = node.file_id.expansion_info(db)?;
297
298 // the input node has only one token ?
299 let single = node.value.first_token()? == node.value.last_token()?;
300
301 // FIXME: We should handle recurside macro expansions
302 let (range, origin) = node.value.descendants().find_map(|it| {
303 let first = it.first_token()?;
304 let last = it.last_token()?;
305
306 if !single && first == last {
307 return None;
308 }
309
310 // Try to map first and last tokens of node, and, if success, return the union range of mapped tokens
311 let (first, first_origin) = expansion.map_token_up(node.with_value(&first))?;
312 let (last, last_origin) = expansion.map_token_up(node.with_value(&last))?;
313
314 if first.file_id != last.file_id || first_origin != last_origin {
315 return None;
316 }
317
318 // FIXME: Add union method in TextRange
319 Some((
320 first.with_value(union_range(first.value.text_range(), last.value.text_range())),
321 first_origin,
322 ))
323 })?;
324
325 return Some((
326 FileRange { file_id: range.file_id.original_file(db), range: range.value },
327 origin,
328 ));
329
330 fn union_range(a: TextRange, b: TextRange) -> TextRange {
331 let start = a.start().min(b.start());
332 let end = a.end().max(b.end());
333 TextRange::from_to(start, end)
334 }
335}
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs
index efa3f8a79..bff1ecd14 100644
--- a/crates/ra_hir/src/source_analyzer.rs
+++ b/crates/ra_hir/src/source_analyzer.rs
@@ -14,29 +14,27 @@ use hir_def::{
14 BodySourceMap, 14 BodySourceMap,
15 }, 15 },
16 expr::{ExprId, PatId}, 16 expr::{ExprId, PatId},
17 resolver::{self, resolver_for_scope, Resolver, TypeNs, ValueNs}, 17 resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
18 AsMacroCall, DefWithBodyId, TraitId, 18 AsMacroCall, DefWithBodyId,
19}; 19};
20use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile, MacroCallId}; 20use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile};
21use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment}; 21use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment};
22use ra_syntax::{ 22use ra_syntax::{
23 ast::{self, AstNode}, 23 ast::{self, AstNode},
24 AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit, 24 AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit,
25}; 25};
26use rustc_hash::FxHashSet;
27 26
28use crate::{ 27use crate::{
29 db::HirDatabase, Adt, Const, DefWithBody, EnumVariant, Function, Local, MacroDef, Name, Path, 28 db::HirDatabase, Adt, Const, EnumVariant, Function, Local, MacroDef, Name, Path, Static,
30 ScopeDef, Static, Struct, Trait, Type, TypeAlias, TypeParam, 29 Struct, Trait, Type, TypeAlias, TypeParam,
31}; 30};
32 31
33/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of 32/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
34/// original source files. It should not be used inside the HIR itself. 33/// original source files. It should not be used inside the HIR itself.
35#[derive(Debug)] 34#[derive(Debug)]
36pub struct SourceAnalyzer { 35pub(crate) struct SourceAnalyzer {
37 file_id: HirFileId, 36 file_id: HirFileId,
38 resolver: Resolver, 37 pub(crate) resolver: Resolver,
39 body_owner: Option<DefWithBody>,
40 body_source_map: Option<Arc<BodySourceMap>>, 38 body_source_map: Option<Arc<BodySourceMap>>,
41 infer: Option<Arc<InferenceResult>>, 39 infer: Option<Arc<InferenceResult>>,
42 scopes: Option<Arc<ExprScopes>>, 40 scopes: Option<Arc<ExprScopes>>,
@@ -77,35 +75,7 @@ pub struct ReferenceDescriptor {
77 pub name: String, 75 pub name: String,
78} 76}
79 77
80#[derive(Debug)]
81pub struct Expansion {
82 macro_call_id: MacroCallId,
83}
84
85impl Expansion {
86 pub fn map_token_down(
87 &self,
88 db: &impl HirDatabase,
89 token: InFile<&SyntaxToken>,
90 ) -> Option<InFile<SyntaxToken>> {
91 let exp_info = self.file_id().expansion_info(db)?;
92 exp_info.map_token_down(token)
93 }
94
95 pub fn file_id(&self) -> HirFileId {
96 self.macro_call_id.as_file()
97 }
98}
99
100impl SourceAnalyzer { 78impl SourceAnalyzer {
101 pub fn new(
102 db: &impl HirDatabase,
103 node: InFile<&SyntaxNode>,
104 offset: Option<TextUnit>,
105 ) -> SourceAnalyzer {
106 crate::source_binder::SourceBinder::new(db).analyze(node, offset)
107 }
108
109 pub(crate) fn new_for_body( 79 pub(crate) fn new_for_body(
110 db: &impl HirDatabase, 80 db: &impl HirDatabase,
111 def: DefWithBodyId, 81 def: DefWithBodyId,
@@ -121,7 +91,6 @@ impl SourceAnalyzer {
121 let resolver = resolver_for_scope(db, def, scope); 91 let resolver = resolver_for_scope(db, def, scope);
122 SourceAnalyzer { 92 SourceAnalyzer {
123 resolver, 93 resolver,
124 body_owner: Some(def.into()),
125 body_source_map: Some(source_map), 94 body_source_map: Some(source_map),
126 infer: Some(db.infer(def)), 95 infer: Some(db.infer(def)),
127 scopes: Some(scopes), 96 scopes: Some(scopes),
@@ -135,7 +104,6 @@ impl SourceAnalyzer {
135 ) -> SourceAnalyzer { 104 ) -> SourceAnalyzer {
136 SourceAnalyzer { 105 SourceAnalyzer {
137 resolver, 106 resolver,
138 body_owner: None,
139 body_source_map: None, 107 body_source_map: None,
140 infer: None, 108 infer: None,
141 scopes: None, 109 scopes: None,
@@ -143,10 +111,6 @@ impl SourceAnalyzer {
143 } 111 }
144 } 112 }
145 113
146 pub fn module(&self) -> Option<crate::code_model::Module> {
147 Some(crate::code_model::Module { id: self.resolver.module()? })
148 }
149
150 fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> { 114 fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> {
151 let src = InFile { file_id: self.file_id, value: expr }; 115 let src = InFile { file_id: self.file_id, value: expr };
152 self.body_source_map.as_ref()?.node_expr(src) 116 self.body_source_map.as_ref()?.node_expr(src)
@@ -180,7 +144,7 @@ impl SourceAnalyzer {
180 TraitEnvironment::lower(db, &self.resolver) 144 TraitEnvironment::lower(db, &self.resolver)
181 } 145 }
182 146
183 pub fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> { 147 pub(crate) fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> {
184 let expr_id = if let Some(expr) = self.expand_expr(db, InFile::new(self.file_id, expr)) { 148 let expr_id = if let Some(expr) = self.expand_expr(db, InFile::new(self.file_id, expr)) {
185 self.body_source_map.as_ref()?.node_expr(expr.as_ref())? 149 self.body_source_map.as_ref()?.node_expr(expr.as_ref())?
186 } else { 150 } else {
@@ -192,24 +156,27 @@ impl SourceAnalyzer {
192 Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) 156 Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } })
193 } 157 }
194 158
195 pub fn type_of_pat(&self, db: &impl HirDatabase, pat: &ast::Pat) -> Option<Type> { 159 pub(crate) fn type_of_pat(&self, db: &impl HirDatabase, pat: &ast::Pat) -> Option<Type> {
196 let pat_id = self.pat_id(pat)?; 160 let pat_id = self.pat_id(pat)?;
197 let ty = self.infer.as_ref()?[pat_id].clone(); 161 let ty = self.infer.as_ref()?[pat_id].clone();
198 let environment = self.trait_env(db); 162 let environment = self.trait_env(db);
199 Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) 163 Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } })
200 } 164 }
201 165
202 pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { 166 pub(crate) fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
203 let expr_id = self.expr_id(&call.clone().into())?; 167 let expr_id = self.expr_id(&call.clone().into())?;
204 self.infer.as_ref()?.method_resolution(expr_id).map(Function::from) 168 self.infer.as_ref()?.method_resolution(expr_id).map(Function::from)
205 } 169 }
206 170
207 pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> { 171 pub(crate) fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> {
208 let expr_id = self.expr_id(&field.clone().into())?; 172 let expr_id = self.expr_id(&field.clone().into())?;
209 self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into()) 173 self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
210 } 174 }
211 175
212 pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<crate::StructField> { 176 pub(crate) fn resolve_record_field(
177 &self,
178 field: &ast::RecordField,
179 ) -> Option<crate::StructField> {
213 let expr_id = match field.expr() { 180 let expr_id = match field.expr() {
214 Some(it) => self.expr_id(&it)?, 181 Some(it) => self.expr_id(&it)?,
215 None => { 182 None => {
@@ -220,17 +187,23 @@ impl SourceAnalyzer {
220 self.infer.as_ref()?.record_field_resolution(expr_id).map(|it| it.into()) 187 self.infer.as_ref()?.record_field_resolution(expr_id).map(|it| it.into())
221 } 188 }
222 189
223 pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<crate::VariantDef> { 190 pub(crate) fn resolve_record_literal(
191 &self,
192 record_lit: &ast::RecordLit,
193 ) -> Option<crate::VariantDef> {
224 let expr_id = self.expr_id(&record_lit.clone().into())?; 194 let expr_id = self.expr_id(&record_lit.clone().into())?;
225 self.infer.as_ref()?.variant_resolution_for_expr(expr_id).map(|it| it.into()) 195 self.infer.as_ref()?.variant_resolution_for_expr(expr_id).map(|it| it.into())
226 } 196 }
227 197
228 pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<crate::VariantDef> { 198 pub(crate) fn resolve_record_pattern(
199 &self,
200 record_pat: &ast::RecordPat,
201 ) -> Option<crate::VariantDef> {
229 let pat_id = self.pat_id(&record_pat.clone().into())?; 202 let pat_id = self.pat_id(&record_pat.clone().into())?;
230 self.infer.as_ref()?.variant_resolution_for_pat(pat_id).map(|it| it.into()) 203 self.infer.as_ref()?.variant_resolution_for_pat(pat_id).map(|it| it.into())
231 } 204 }
232 205
233 pub fn resolve_macro_call( 206 pub(crate) fn resolve_macro_call(
234 &self, 207 &self,
235 db: &impl HirDatabase, 208 db: &impl HirDatabase,
236 macro_call: InFile<&ast::MacroCall>, 209 macro_call: InFile<&ast::MacroCall>,
@@ -240,52 +213,11 @@ impl SourceAnalyzer {
240 self.resolver.resolve_path_as_macro(db, path.mod_path()).map(|it| it.into()) 213 self.resolver.resolve_path_as_macro(db, path.mod_path()).map(|it| it.into())
241 } 214 }
242 215
243 pub fn resolve_hir_path( 216 pub(crate) fn resolve_path(
244 &self, 217 &self,
245 db: &impl HirDatabase, 218 db: &impl HirDatabase,
246 path: &crate::Path, 219 path: &ast::Path,
247 ) -> Option<PathResolution> { 220 ) -> Option<PathResolution> {
248 let types =
249 self.resolver.resolve_path_in_type_ns_fully(db, path.mod_path()).map(|ty| match ty {
250 TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
251 TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }),
252 TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
253 PathResolution::Def(Adt::from(it).into())
254 }
255 TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
256 TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
257 TypeNs::BuiltinType(it) => PathResolution::Def(it.into()),
258 TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
259 });
260 let values =
261 self.resolver.resolve_path_in_value_ns_fully(db, path.mod_path()).and_then(|val| {
262 let res = match val {
263 ValueNs::LocalBinding(pat_id) => {
264 let var = Local { parent: self.body_owner?, pat_id };
265 PathResolution::Local(var)
266 }
267 ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
268 ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
269 ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
270 ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
271 ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
272 };
273 Some(res)
274 });
275
276 let items = self
277 .resolver
278 .resolve_module_path_in_items(db, path.mod_path())
279 .take_types()
280 .map(|it| PathResolution::Def(it.into()));
281 types.or(values).or(items).or_else(|| {
282 self.resolver
283 .resolve_path_as_macro(db, path.mod_path())
284 .map(|def| PathResolution::Macro(def.into()))
285 })
286 }
287
288 pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> {
289 if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { 221 if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) {
290 let expr_id = self.expr_id(&path_expr.into())?; 222 let expr_id = self.expr_id(&path_expr.into())?;
291 if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) { 223 if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) {
@@ -300,7 +232,7 @@ impl SourceAnalyzer {
300 } 232 }
301 // This must be a normal source file rather than macro file. 233 // This must be a normal source file rather than macro file.
302 let hir_path = crate::Path::from_ast(path.clone())?; 234 let hir_path = crate::Path::from_ast(path.clone())?;
303 self.resolve_hir_path(db, &hir_path) 235 resolve_hir_path(db, &self.resolver, &hir_path)
304 } 236 }
305 237
306 fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> { 238 fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> {
@@ -315,25 +247,9 @@ impl SourceAnalyzer {
315 }) 247 })
316 } 248 }
317 249
318 pub fn process_all_names(&self, db: &impl HirDatabase, f: &mut dyn FnMut(Name, ScopeDef)) {
319 self.resolver.process_all_names(db, &mut |name, def| {
320 let def = match def {
321 resolver::ScopeDef::PerNs(it) => it.into(),
322 resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
323 resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
324 resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }),
325 resolver::ScopeDef::Local(pat_id) => {
326 let parent = self.resolver.body_owner().unwrap().into();
327 ScopeDef::Local(Local { parent, pat_id })
328 }
329 };
330 f(name, def)
331 })
332 }
333
334 // FIXME: we only use this in `inline_local_variable` assist, ideally, we 250 // FIXME: we only use this in `inline_local_variable` assist, ideally, we
335 // should switch to general reference search infra there. 251 // should switch to general reference search infra there.
336 pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> { 252 pub(crate) fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
337 let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); 253 let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
338 let ptr = Either::Left(AstPtr::new(&ast::Pat::from(pat.clone()))); 254 let ptr = Either::Left(AstPtr::new(&ast::Pat::from(pat.clone())));
339 fn_def 255 fn_def
@@ -351,19 +267,14 @@ impl SourceAnalyzer {
351 .collect() 267 .collect()
352 } 268 }
353 269
354 /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type 270 pub(crate) fn expand(
355 pub fn traits_in_scope(&self, db: &impl HirDatabase) -> FxHashSet<TraitId> {
356 self.resolver.traits_in_scope(db)
357 }
358
359 pub fn expand(
360 &self, 271 &self,
361 db: &impl HirDatabase, 272 db: &impl HirDatabase,
362 macro_call: InFile<&ast::MacroCall>, 273 macro_call: InFile<&ast::MacroCall>,
363 ) -> Option<Expansion> { 274 ) -> Option<HirFileId> {
364 let macro_call_id = 275 let macro_call_id =
365 macro_call.as_call_id(db, |path| self.resolver.resolve_path_as_macro(db, &path))?; 276 macro_call.as_call_id(db, |path| self.resolver.resolve_path_as_macro(db, &path))?;
366 Some(Expansion { macro_call_id }) 277 Some(macro_call_id.as_file())
367 } 278 }
368} 279}
369 280
@@ -409,6 +320,47 @@ fn scope_for_offset(
409 }) 320 })
410} 321}
411 322
323pub(crate) fn resolve_hir_path(
324 db: &impl HirDatabase,
325 resolver: &Resolver,
326 path: &crate::Path,
327) -> Option<PathResolution> {
328 let types = resolver.resolve_path_in_type_ns_fully(db, path.mod_path()).map(|ty| match ty {
329 TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
330 TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }),
331 TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => PathResolution::Def(Adt::from(it).into()),
332 TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
333 TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
334 TypeNs::BuiltinType(it) => PathResolution::Def(it.into()),
335 TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
336 });
337 let body_owner = resolver.body_owner();
338 let values = resolver.resolve_path_in_value_ns_fully(db, path.mod_path()).and_then(|val| {
339 let res = match val {
340 ValueNs::LocalBinding(pat_id) => {
341 let var = Local { parent: body_owner?.into(), pat_id };
342 PathResolution::Local(var)
343 }
344 ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
345 ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
346 ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
347 ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
348 ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
349 };
350 Some(res)
351 });
352
353 let items = resolver
354 .resolve_module_path_in_items(db, path.mod_path())
355 .take_types()
356 .map(|it| PathResolution::Def(it.into()));
357 types.or(values).or(items).or_else(|| {
358 resolver
359 .resolve_path_as_macro(db, path.mod_path())
360 .map(|def| PathResolution::Macro(def.into()))
361 })
362}
363
412// XXX: during completion, cursor might be outside of any particular 364// XXX: during completion, cursor might be outside of any particular
413// expression. Try to figure out the correct scope... 365// expression. Try to figure out the correct scope...
414fn adjust( 366fn adjust(
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs
index f3150f578..0b8a641f9 100644
--- a/crates/ra_hir/src/source_binder.rs
+++ b/crates/ra_hir/src/source_binder.rs
@@ -5,112 +5,85 @@ use hir_def::{
5 child_by_source::ChildBySource, 5 child_by_source::ChildBySource,
6 dyn_map::DynMap, 6 dyn_map::DynMap,
7 keys::{self, Key}, 7 keys::{self, Key},
8 resolver::{HasResolver, Resolver},
9 ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, GenericDefId, ImplId, ModuleId, 8 ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, GenericDefId, ImplId, ModuleId,
10 StaticId, StructFieldId, StructId, TraitId, TypeAliasId, UnionId, VariantId, 9 StaticId, StructFieldId, StructId, TraitId, TypeAliasId, UnionId, VariantId,
11}; 10};
12use hir_expand::{name::AsName, AstId, InFile, MacroDefId, MacroDefKind}; 11use hir_expand::{name::AsName, AstId, InFile, MacroDefId, MacroDefKind};
12use ra_db::FileId;
13use ra_prof::profile; 13use ra_prof::profile;
14use ra_syntax::{ 14use ra_syntax::{
15 ast::{self, NameOwner}, 15 ast::{self, NameOwner},
16 match_ast, AstNode, SyntaxNode, TextUnit, 16 match_ast, AstNode, SyntaxNode,
17}; 17};
18use rustc_hash::FxHashMap; 18use rustc_hash::FxHashMap;
19 19
20use crate::{db::HirDatabase, Local, Module, SourceAnalyzer, TypeParam}; 20use crate::{db::HirDatabase, Local, Module, TypeParam};
21use ra_db::FileId;
22 21
23pub struct SourceBinder<'a, DB> { 22pub struct SourceBinder {
24 pub db: &'a DB,
25 child_by_source_cache: FxHashMap<ChildContainer, DynMap>, 23 child_by_source_cache: FxHashMap<ChildContainer, DynMap>,
26} 24}
27 25
28impl<DB: HirDatabase> SourceBinder<'_, DB> { 26impl SourceBinder {
29 pub fn new(db: &DB) -> SourceBinder<DB> { 27 pub(crate) fn new() -> SourceBinder {
30 SourceBinder { db, child_by_source_cache: FxHashMap::default() } 28 SourceBinder { child_by_source_cache: FxHashMap::default() }
31 }
32
33 pub fn analyze(
34 &mut self,
35 src: InFile<&SyntaxNode>,
36 offset: Option<TextUnit>,
37 ) -> SourceAnalyzer {
38 let _p = profile("SourceBinder::analyzer");
39 let container = match self.find_container(src) {
40 Some(it) => it,
41 None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src),
42 };
43
44 let resolver = match container {
45 ChildContainer::DefWithBodyId(def) => {
46 return SourceAnalyzer::new_for_body(self.db, def, src, offset)
47 }
48 ChildContainer::TraitId(it) => it.resolver(self.db),
49 ChildContainer::ImplId(it) => it.resolver(self.db),
50 ChildContainer::ModuleId(it) => it.resolver(self.db),
51 ChildContainer::EnumId(it) => it.resolver(self.db),
52 ChildContainer::VariantId(it) => it.resolver(self.db),
53 ChildContainer::GenericDefId(it) => it.resolver(self.db),
54 };
55 SourceAnalyzer::new_for_resolver(resolver, src)
56 } 29 }
57 30
58 pub fn to_def<T: ToDef>(&mut self, src: InFile<T>) -> Option<T::Def> { 31 pub(crate) fn to_module_def(&mut self, db: &impl HirDatabase, file: FileId) -> Option<Module> {
59 T::to_def(self, src)
60 }
61
62 pub fn to_module_def(&mut self, file: FileId) -> Option<Module> {
63 let _p = profile("SourceBinder::to_module_def"); 32 let _p = profile("SourceBinder::to_module_def");
64 let (krate, local_id) = self.db.relevant_crates(file).iter().find_map(|&crate_id| { 33 let (krate, local_id) = db.relevant_crates(file).iter().find_map(|&crate_id| {
65 let crate_def_map = self.db.crate_def_map(crate_id); 34 let crate_def_map = db.crate_def_map(crate_id);
66 let local_id = crate_def_map.modules_for_file(file).next()?; 35 let local_id = crate_def_map.modules_for_file(file).next()?;
67 Some((crate_id, local_id)) 36 Some((crate_id, local_id))
68 })?; 37 })?;
69 Some(Module { id: ModuleId { krate, local_id } }) 38 Some(Module { id: ModuleId { krate, local_id } })
70 } 39 }
71 40
72 fn to_id<T: ToId>(&mut self, src: InFile<T>) -> Option<T::ID> { 41 fn to_id<T: ToId>(&mut self, db: &impl HirDatabase, src: InFile<T>) -> Option<T::ID> {
73 T::to_id(self, src) 42 T::to_id(db, self, src)
74 } 43 }
75 44
76 fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> { 45 pub(crate) fn find_container(
77 for container in src.cloned().ancestors_with_macros(self.db).skip(1) { 46 &mut self,
47 db: &impl HirDatabase,
48 src: InFile<&SyntaxNode>,
49 ) -> Option<ChildContainer> {
50 for container in src.cloned().ancestors_with_macros(db).skip(1) {
78 let res: ChildContainer = match_ast! { 51 let res: ChildContainer = match_ast! {
79 match (container.value) { 52 match (container.value) {
80 ast::TraitDef(it) => { 53 ast::TraitDef(it) => {
81 let def: TraitId = self.to_id(container.with_value(it))?; 54 let def: TraitId = self.to_id(db, container.with_value(it))?;
82 def.into() 55 def.into()
83 }, 56 },
84 ast::ImplBlock(it) => { 57 ast::ImplBlock(it) => {
85 let def: ImplId = self.to_id(container.with_value(it))?; 58 let def: ImplId = self.to_id(db, container.with_value(it))?;
86 def.into() 59 def.into()
87 }, 60 },
88 ast::FnDef(it) => { 61 ast::FnDef(it) => {
89 let def: FunctionId = self.to_id(container.with_value(it))?; 62 let def: FunctionId = self.to_id(db, container.with_value(it))?;
90 DefWithBodyId::from(def).into() 63 DefWithBodyId::from(def).into()
91 }, 64 },
92 ast::StaticDef(it) => { 65 ast::StaticDef(it) => {
93 let def: StaticId = self.to_id(container.with_value(it))?; 66 let def: StaticId = self.to_id(db, container.with_value(it))?;
94 DefWithBodyId::from(def).into() 67 DefWithBodyId::from(def).into()
95 }, 68 },
96 ast::ConstDef(it) => { 69 ast::ConstDef(it) => {
97 let def: ConstId = self.to_id(container.with_value(it))?; 70 let def: ConstId = self.to_id(db, container.with_value(it))?;
98 DefWithBodyId::from(def).into() 71 DefWithBodyId::from(def).into()
99 }, 72 },
100 ast::EnumDef(it) => { 73 ast::EnumDef(it) => {
101 let def: EnumId = self.to_id(container.with_value(it))?; 74 let def: EnumId = self.to_id(db, container.with_value(it))?;
102 def.into() 75 def.into()
103 }, 76 },
104 ast::StructDef(it) => { 77 ast::StructDef(it) => {
105 let def: StructId = self.to_id(container.with_value(it))?; 78 let def: StructId = self.to_id(db, container.with_value(it))?;
106 VariantId::from(def).into() 79 VariantId::from(def).into()
107 }, 80 },
108 ast::UnionDef(it) => { 81 ast::UnionDef(it) => {
109 let def: UnionId = self.to_id(container.with_value(it))?; 82 let def: UnionId = self.to_id(db, container.with_value(it))?;
110 VariantId::from(def).into() 83 VariantId::from(def).into()
111 }, 84 },
112 ast::Module(it) => { 85 ast::Module(it) => {
113 let def: ModuleId = self.to_id(container.with_value(it))?; 86 let def: ModuleId = self.to_id(db, container.with_value(it))?;
114 def.into() 87 def.into()
115 }, 88 },
116 _ => { continue }, 89 _ => { continue },
@@ -119,12 +92,11 @@ impl<DB: HirDatabase> SourceBinder<'_, DB> {
119 return Some(res); 92 return Some(res);
120 } 93 }
121 94
122 let c = self.to_module_def(src.file_id.original_file(self.db))?; 95 let c = self.to_module_def(db, src.file_id.original_file(db))?;
123 Some(c.id.into()) 96 Some(c.id.into())
124 } 97 }
125 98
126 fn child_by_source(&mut self, container: ChildContainer) -> &DynMap { 99 fn child_by_source(&mut self, db: &impl HirDatabase, container: ChildContainer) -> &DynMap {
127 let db = self.db;
128 self.child_by_source_cache.entry(container).or_insert_with(|| match container { 100 self.child_by_source_cache.entry(container).or_insert_with(|| match container {
129 ChildContainer::DefWithBodyId(it) => it.child_by_source(db), 101 ChildContainer::DefWithBodyId(it) => it.child_by_source(db),
130 ChildContainer::ModuleId(it) => it.child_by_source(db), 102 ChildContainer::ModuleId(it) => it.child_by_source(db),
@@ -137,16 +109,20 @@ impl<DB: HirDatabase> SourceBinder<'_, DB> {
137 } 109 }
138} 110}
139 111
140pub trait ToId: Sized { 112pub(crate) trait ToId: Sized {
141 type ID: Sized + Copy + 'static; 113 type ID: Sized + Copy + 'static;
142 fn to_id<DB: HirDatabase>(sb: &mut SourceBinder<'_, DB>, src: InFile<Self>) 114 fn to_id<DB: HirDatabase>(
143 -> Option<Self::ID>; 115 db: &DB,
116 sb: &mut SourceBinder,
117 src: InFile<Self>,
118 ) -> Option<Self::ID>;
144} 119}
145 120
146pub trait ToDef: Sized + AstNode + 'static { 121pub trait ToDef: Sized + AstNode + 'static {
147 type Def; 122 type Def;
148 fn to_def<DB: HirDatabase>( 123 fn to_def<DB: HirDatabase>(
149 sb: &mut SourceBinder<'_, DB>, 124 db: &DB,
125 sb: &mut SourceBinder,
150 src: InFile<Self>, 126 src: InFile<Self>,
151 ) -> Option<Self::Def>; 127 ) -> Option<Self::Def>;
152} 128}
@@ -155,9 +131,9 @@ macro_rules! to_def_impls {
155 ($(($def:path, $ast:path)),* ,) => {$( 131 ($(($def:path, $ast:path)),* ,) => {$(
156 impl ToDef for $ast { 132 impl ToDef for $ast {
157 type Def = $def; 133 type Def = $def;
158 fn to_def<DB: HirDatabase>(sb: &mut SourceBinder<'_, DB>, src: InFile<Self>) 134 fn to_def<DB: HirDatabase>(db: &DB, sb: &mut SourceBinder, src: InFile<Self>)
159 -> Option<Self::Def> 135 -> Option<Self::Def>
160 { sb.to_id(src).map(Into::into) } 136 { sb.to_id(db, src).map(Into::into) }
161 } 137 }
162 )*} 138 )*}
163} 139}
@@ -179,7 +155,7 @@ to_def_impls![
179]; 155];
180 156
181#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] 157#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
182enum ChildContainer { 158pub(crate) enum ChildContainer {
183 DefWithBodyId(DefWithBodyId), 159 DefWithBodyId(DefWithBodyId),
184 ModuleId(ModuleId), 160 ModuleId(ModuleId),
185 TraitId(TraitId), 161 TraitId(TraitId),
@@ -201,7 +177,7 @@ impl_froms! {
201 GenericDefId 177 GenericDefId
202} 178}
203 179
204pub trait ToIdByKey: Sized + AstNode + 'static { 180pub(crate) trait ToIdByKey: Sized + AstNode + 'static {
205 type ID: Sized + Copy + 'static; 181 type ID: Sized + Copy + 'static;
206 const KEY: Key<Self, Self::ID>; 182 const KEY: Key<Self, Self::ID>;
207} 183}
@@ -209,11 +185,11 @@ pub trait ToIdByKey: Sized + AstNode + 'static {
209impl<T: ToIdByKey> ToId for T { 185impl<T: ToIdByKey> ToId for T {
210 type ID = <T as ToIdByKey>::ID; 186 type ID = <T as ToIdByKey>::ID;
211 fn to_id<DB: HirDatabase>( 187 fn to_id<DB: HirDatabase>(
212 sb: &mut SourceBinder<'_, DB>, 188 db: &DB,
189 sb: &mut SourceBinder,
213 src: InFile<Self>, 190 src: InFile<Self>,
214 ) -> Option<Self::ID> { 191 ) -> Option<Self::ID> {
215 let container = sb.find_container(src.as_ref().map(|it| it.syntax()))?; 192 let container = sb.find_container(db, src.as_ref().map(|it| it.syntax()))?;
216 let db = sb.db;
217 let dyn_map = 193 let dyn_map =
218 &*sb.child_by_source_cache.entry(container).or_insert_with(|| match container { 194 &*sb.child_by_source_cache.entry(container).or_insert_with(|| match container {
219 ChildContainer::DefWithBodyId(it) => it.child_by_source(db), 195 ChildContainer::DefWithBodyId(it) => it.child_by_source(db),
@@ -255,15 +231,15 @@ to_id_key_impls![
255impl ToId for ast::MacroCall { 231impl ToId for ast::MacroCall {
256 type ID = MacroDefId; 232 type ID = MacroDefId;
257 fn to_id<DB: HirDatabase>( 233 fn to_id<DB: HirDatabase>(
258 sb: &mut SourceBinder<'_, DB>, 234 db: &DB,
235 sb: &mut SourceBinder,
259 src: InFile<Self>, 236 src: InFile<Self>,
260 ) -> Option<Self::ID> { 237 ) -> Option<Self::ID> {
261 let kind = MacroDefKind::Declarative; 238 let kind = MacroDefKind::Declarative;
262 239
263 let krate = sb.to_module_def(src.file_id.original_file(sb.db))?.id.krate; 240 let krate = sb.to_module_def(db, src.file_id.original_file(db))?.id.krate;
264 241
265 let ast_id = 242 let ast_id = Some(AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.value)));
266 Some(AstId::new(src.file_id, sb.db.ast_id_map(src.file_id).ast_id(&src.value)));
267 243
268 Some(MacroDefId { krate: Some(krate), ast_id, kind }) 244 Some(MacroDefId { krate: Some(krate), ast_id, kind })
269 } 245 }
@@ -272,20 +248,20 @@ impl ToId for ast::MacroCall {
272impl ToDef for ast::BindPat { 248impl ToDef for ast::BindPat {
273 type Def = Local; 249 type Def = Local;
274 250
275 fn to_def<DB: HirDatabase>(sb: &mut SourceBinder<'_, DB>, src: InFile<Self>) -> Option<Local> { 251 fn to_def<DB: HirDatabase>(db: &DB, sb: &mut SourceBinder, src: InFile<Self>) -> Option<Local> {
276 let file_id = src.file_id; 252 let file_id = src.file_id;
277 let parent: DefWithBodyId = src.value.syntax().ancestors().find_map(|it| { 253 let parent: DefWithBodyId = src.value.syntax().ancestors().find_map(|it| {
278 let res = match_ast! { 254 let res = match_ast! {
279 match it { 255 match it {
280 ast::ConstDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, 256 ast::ConstDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
281 ast::StaticDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, 257 ast::StaticDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
282 ast::FnDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, 258 ast::FnDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
283 _ => return None, 259 _ => return None,
284 } 260 }
285 }; 261 };
286 Some(res) 262 Some(res)
287 })?; 263 })?;
288 let (_body, source_map) = sb.db.body_with_source_map(parent); 264 let (_body, source_map) = db.body_with_source_map(parent);
289 let src = src.map(ast::Pat::from); 265 let src = src.map(ast::Pat::from);
290 let pat_id = source_map.node_pat(src.as_ref())?; 266 let pat_id = source_map.node_pat(src.as_ref())?;
291 Some(Local { parent: parent.into(), pat_id }) 267 Some(Local { parent: parent.into(), pat_id })
@@ -296,26 +272,26 @@ impl ToDef for ast::TypeParam {
296 type Def = TypeParam; 272 type Def = TypeParam;
297 273
298 fn to_def<DB: HirDatabase>( 274 fn to_def<DB: HirDatabase>(
299 sb: &mut SourceBinder<'_, DB>, 275 db: &DB,
276 sb: &mut SourceBinder,
300 src: InFile<ast::TypeParam>, 277 src: InFile<ast::TypeParam>,
301 ) -> Option<TypeParam> { 278 ) -> Option<TypeParam> {
302 let mut sb = SourceBinder::new(sb.db);
303 let file_id = src.file_id; 279 let file_id = src.file_id;
304 let parent: GenericDefId = src.value.syntax().ancestors().find_map(|it| { 280 let parent: GenericDefId = src.value.syntax().ancestors().find_map(|it| {
305 let res = match_ast! { 281 let res = match_ast! {
306 match it { 282 match it {
307 ast::FnDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, 283 ast::FnDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
308 ast::StructDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, 284 ast::StructDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
309 ast::EnumDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, 285 ast::EnumDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
310 ast::TraitDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, 286 ast::TraitDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
311 ast::TypeAliasDef(value) => { sb.to_id(InFile { value, file_id})?.into() }, 287 ast::TypeAliasDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
312 ast::ImplBlock(value) => { sb.to_id(InFile { value, file_id})?.into() }, 288 ast::ImplBlock(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
313 _ => return None, 289 _ => return None,
314 } 290 }
315 }; 291 };
316 Some(res) 292 Some(res)
317 })?; 293 })?;
318 let &id = sb.child_by_source(parent.into())[keys::TYPE_PARAM].get(&src)?; 294 let &id = sb.child_by_source(db, parent.into())[keys::TYPE_PARAM].get(&src)?;
319 Some(TypeParam { id }) 295 Some(TypeParam { id })
320 } 296 }
321} 297}
@@ -324,7 +300,8 @@ impl ToId for ast::Module {
324 type ID = ModuleId; 300 type ID = ModuleId;
325 301
326 fn to_id<DB: HirDatabase>( 302 fn to_id<DB: HirDatabase>(
327 sb: &mut SourceBinder<'_, DB>, 303 db: &DB,
304 sb: &mut SourceBinder,
328 src: InFile<ast::Module>, 305 src: InFile<ast::Module>,
329 ) -> Option<ModuleId> { 306 ) -> Option<ModuleId> {
330 { 307 {
@@ -333,7 +310,7 @@ impl ToId for ast::Module {
333 .as_ref() 310 .as_ref()
334 .map(|it| it.syntax()) 311 .map(|it| it.syntax())
335 .cloned() 312 .cloned()
336 .ancestors_with_macros(sb.db) 313 .ancestors_with_macros(db)
337 .skip(1) 314 .skip(1)
338 .find_map(|it| { 315 .find_map(|it| {
339 let m = ast::Module::cast(it.value.clone())?; 316 let m = ast::Module::cast(it.value.clone())?;
@@ -341,15 +318,15 @@ impl ToId for ast::Module {
341 }); 318 });
342 319
343 let parent_module = match parent_declaration { 320 let parent_module = match parent_declaration {
344 Some(parent_declaration) => sb.to_id(parent_declaration)?, 321 Some(parent_declaration) => sb.to_id(db, parent_declaration)?,
345 None => { 322 None => {
346 let file_id = src.file_id.original_file(sb.db); 323 let file_id = src.file_id.original_file(db);
347 sb.to_module_def(file_id)?.id 324 sb.to_module_def(db, file_id)?.id
348 } 325 }
349 }; 326 };
350 327
351 let child_name = src.value.name()?.as_name(); 328 let child_name = src.value.name()?.as_name();
352 let def_map = sb.db.crate_def_map(parent_module.krate); 329 let def_map = db.crate_def_map(parent_module.krate);
353 let child_id = *def_map[parent_module.local_id].children.get(&child_name)?; 330 let child_id = *def_map[parent_module.local_id].children.get(&child_name)?;
354 Some(ModuleId { krate: parent_module.krate, local_id: child_id }) 331 Some(ModuleId { krate: parent_module.krate, local_id: child_id })
355 } 332 }
diff --git a/crates/ra_ide/src/call_hierarchy.rs b/crates/ra_ide/src/call_hierarchy.rs
index 51ac59a71..b00b6d431 100644
--- a/crates/ra_ide/src/call_hierarchy.rs
+++ b/crates/ra_ide/src/call_hierarchy.rs
@@ -2,13 +2,13 @@
2 2
3use indexmap::IndexMap; 3use indexmap::IndexMap;
4 4
5use hir::db::AstDatabase; 5use hir::Semantics;
6use ra_ide_db::RootDatabase; 6use ra_ide_db::RootDatabase;
7use ra_syntax::{ast, match_ast, AstNode, TextRange}; 7use ra_syntax::{ast, match_ast, AstNode, TextRange};
8 8
9use crate::{ 9use crate::{
10 call_info::FnCallNode, display::ToNav, expand::descend_into_macros, goto_definition, 10 call_info::FnCallNode, display::ToNav, goto_definition, references, FilePosition,
11 references, FilePosition, NavigationTarget, RangeInfo, 11 NavigationTarget, RangeInfo,
12}; 12};
13 13
14#[derive(Debug, Clone)] 14#[derive(Debug, Clone)]
@@ -38,30 +38,31 @@ pub(crate) fn call_hierarchy(
38} 38}
39 39
40pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> { 40pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> {
41 let sema = Semantics::new(db);
41 // 1. Find all refs 42 // 1. Find all refs
42 // 2. Loop through refs and determine unique fndef. This will become our `from: CallHierarchyItem,` in the reply. 43 // 2. Loop through refs and determine unique fndef. This will become our `from: CallHierarchyItem,` in the reply.
43 // 3. Add ranges relative to the start of the fndef. 44 // 3. Add ranges relative to the start of the fndef.
44 let refs = references::find_all_refs(db, position, None)?; 45 let refs = references::find_all_refs(db, position, None)?;
45 46
46 let mut calls = CallLocations::default(); 47 let mut calls = CallLocations::default();
47 let mut sb = hir::SourceBinder::new(db);
48 48
49 for reference in refs.info.references() { 49 for reference in refs.info.references() {
50 let file_id = reference.file_range.file_id; 50 let file_id = reference.file_range.file_id;
51 let file = db.parse_or_expand(file_id.into())?; 51 let file = sema.parse(file_id);
52 let file = file.syntax();
52 let token = file.token_at_offset(reference.file_range.range.start()).next()?; 53 let token = file.token_at_offset(reference.file_range.range.start()).next()?;
53 let token = descend_into_macros(db, file_id, token); 54 let token = sema.descend_into_macros(token);
54 let syntax = token.value.parent(); 55 let syntax = token.parent();
55 56
56 // This target is the containing function 57 // This target is the containing function
57 if let Some(nav) = syntax.ancestors().find_map(|node| { 58 if let Some(nav) = syntax.ancestors().find_map(|node| {
58 match_ast! { 59 match_ast! {
59 match node { 60 match node {
60 ast::FnDef(it) => { 61 ast::FnDef(it) => {
61 let def = sb.to_def(token.with_value(it))?; 62 let def = sema.to_def(&it)?;
62 Some(def.to_nav(sb.db)) 63 Some(def.to_nav(sema.db))
63 }, 64 },
64 _ => { None }, 65 _ => None,
65 } 66 }
66 } 67 }
67 }) { 68 }) {
@@ -74,11 +75,13 @@ pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Optio
74} 75}
75 76
76pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> { 77pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> {
78 let sema = Semantics::new(db);
77 let file_id = position.file_id; 79 let file_id = position.file_id;
78 let file = db.parse_or_expand(file_id.into())?; 80 let file = sema.parse(file_id);
81 let file = file.syntax();
79 let token = file.token_at_offset(position.offset).next()?; 82 let token = file.token_at_offset(position.offset).next()?;
80 let token = descend_into_macros(db, file_id, token); 83 let token = sema.descend_into_macros(token);
81 let syntax = token.value.parent(); 84 let syntax = token.parent();
82 85
83 let mut calls = CallLocations::default(); 86 let mut calls = CallLocations::default();
84 87
@@ -87,14 +90,11 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio
87 .filter_map(|node| FnCallNode::with_node_exact(&node)) 90 .filter_map(|node| FnCallNode::with_node_exact(&node))
88 .filter_map(|call_node| { 91 .filter_map(|call_node| {
89 let name_ref = call_node.name_ref()?; 92 let name_ref = call_node.name_ref()?;
90 let name_ref = token.with_value(name_ref.syntax());
91
92 let analyzer = hir::SourceAnalyzer::new(db, name_ref, None);
93 93
94 if let Some(func_target) = match &call_node { 94 if let Some(func_target) = match &call_node {
95 FnCallNode::CallExpr(expr) => { 95 FnCallNode::CallExpr(expr) => {
96 //FIXME: Type::as_callable is broken 96 //FIXME: Type::as_callable is broken
97 let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; 97 let callable_def = sema.type_of_expr(&expr.expr()?)?.as_callable()?;
98 match callable_def { 98 match callable_def {
99 hir::CallableDef::FunctionId(it) => { 99 hir::CallableDef::FunctionId(it) => {
100 let fn_def: hir::Function = it.into(); 100 let fn_def: hir::Function = it.into();
@@ -105,15 +105,15 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio
105 } 105 }
106 } 106 }
107 FnCallNode::MethodCallExpr(expr) => { 107 FnCallNode::MethodCallExpr(expr) => {
108 let function = analyzer.resolve_method_call(&expr)?; 108 let function = sema.resolve_method_call(&expr)?;
109 Some(function.to_nav(db)) 109 Some(function.to_nav(db))
110 } 110 }
111 FnCallNode::MacroCallExpr(expr) => { 111 FnCallNode::MacroCallExpr(macro_call) => {
112 let macro_def = analyzer.resolve_macro_call(db, name_ref.with_value(&expr))?; 112 let macro_def = sema.resolve_macro_call(&macro_call)?;
113 Some(macro_def.to_nav(db)) 113 Some(macro_def.to_nav(db))
114 } 114 }
115 } { 115 } {
116 Some((func_target, name_ref.value.text_range())) 116 Some((func_target, name_ref.syntax().text_range()))
117 } else { 117 } else {
118 None 118 None
119 } 119 }
diff --git a/crates/ra_ide/src/call_info.rs b/crates/ra_ide/src/call_info.rs
index 7c6322cb4..9a1fc0d35 100644
--- a/crates/ra_ide/src/call_info.rs
+++ b/crates/ra_ide/src/call_info.rs
@@ -1,5 +1,5 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2use hir::db::AstDatabase; 2use hir::Semantics;
3use ra_ide_db::RootDatabase; 3use ra_ide_db::RootDatabase;
4use ra_syntax::{ 4use ra_syntax::{
5 ast::{self, ArgListOwner}, 5 ast::{self, ArgListOwner},
@@ -7,24 +7,23 @@ use ra_syntax::{
7}; 7};
8use test_utils::tested_by; 8use test_utils::tested_by;
9 9
10use crate::{expand::descend_into_macros, CallInfo, FilePosition, FunctionSignature}; 10use crate::{CallInfo, FilePosition, FunctionSignature};
11 11
12/// Computes parameter information for the given call expression. 12/// Computes parameter information for the given call expression.
13pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { 13pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> {
14 let file = db.parse_or_expand(position.file_id.into())?; 14 let sema = Semantics::new(db);
15 let file = sema.parse(position.file_id);
16 let file = file.syntax();
15 let token = file.token_at_offset(position.offset).next()?; 17 let token = file.token_at_offset(position.offset).next()?;
16 let token = descend_into_macros(db, position.file_id, token); 18 let token = sema.descend_into_macros(token);
17 19
18 // Find the calling expression and it's NameRef 20 // Find the calling expression and it's NameRef
19 let calling_node = FnCallNode::with_node(&token.value.parent())?; 21 let calling_node = FnCallNode::with_node(&token.parent())?;
20 let name_ref = calling_node.name_ref()?;
21 let name_ref = token.with_value(name_ref.syntax());
22 22
23 let analyzer = hir::SourceAnalyzer::new(db, name_ref, None);
24 let (mut call_info, has_self) = match &calling_node { 23 let (mut call_info, has_self) = match &calling_node {
25 FnCallNode::CallExpr(expr) => { 24 FnCallNode::CallExpr(call) => {
26 //FIXME: Type::as_callable is broken 25 //FIXME: Type::as_callable is broken
27 let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; 26 let callable_def = sema.type_of_expr(&call.expr()?)?.as_callable()?;
28 match callable_def { 27 match callable_def {
29 hir::CallableDef::FunctionId(it) => { 28 hir::CallableDef::FunctionId(it) => {
30 let fn_def = it.into(); 29 let fn_def = it.into();
@@ -36,12 +35,12 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
36 } 35 }
37 } 36 }
38 } 37 }
39 FnCallNode::MethodCallExpr(expr) => { 38 FnCallNode::MethodCallExpr(method_call) => {
40 let function = analyzer.resolve_method_call(&expr)?; 39 let function = sema.resolve_method_call(&method_call)?;
41 (CallInfo::with_fn(db, function), function.has_self_param(db)) 40 (CallInfo::with_fn(db, function), function.has_self_param(db))
42 } 41 }
43 FnCallNode::MacroCallExpr(expr) => { 42 FnCallNode::MacroCallExpr(macro_call) => {
44 let macro_def = analyzer.resolve_macro_call(db, name_ref.with_value(&expr))?; 43 let macro_def = sema.resolve_macro_call(&macro_call)?;
45 (CallInfo::with_macro(db, macro_def)?, false) 44 (CallInfo::with_macro(db, macro_def)?, false)
46 } 45 }
47 }; 46 };
diff --git a/crates/ra_ide/src/completion.rs b/crates/ra_ide/src/completion.rs
index 4bdc6ba23..c378c2c62 100644
--- a/crates/ra_ide/src/completion.rs
+++ b/crates/ra_ide/src/completion.rs
@@ -17,7 +17,6 @@ mod complete_postfix;
17mod complete_macro_in_item_position; 17mod complete_macro_in_item_position;
18mod complete_trait_impl; 18mod complete_trait_impl;
19 19
20use ra_db::SourceDatabase;
21use ra_ide_db::RootDatabase; 20use ra_ide_db::RootDatabase;
22 21
23#[cfg(test)] 22#[cfg(test)]
@@ -57,8 +56,7 @@ pub use crate::completion::completion_item::{
57/// identifier prefix/fuzzy match should be done higher in the stack, together 56/// identifier prefix/fuzzy match should be done higher in the stack, together
58/// with ordering of completions (currently this is done by the client). 57/// with ordering of completions (currently this is done by the client).
59pub(crate) fn completions(db: &RootDatabase, position: FilePosition) -> Option<Completions> { 58pub(crate) fn completions(db: &RootDatabase, position: FilePosition) -> Option<Completions> {
60 let original_parse = db.parse(position.file_id); 59 let ctx = CompletionContext::new(db, position)?;
61 let ctx = CompletionContext::new(db, &original_parse, position)?;
62 60
63 let mut acc = Completions::default(); 61 let mut acc = Completions::default();
64 62
diff --git a/crates/ra_ide/src/completion/complete_dot.rs b/crates/ra_ide/src/completion/complete_dot.rs
index 2ca78c927..a6e0158b2 100644
--- a/crates/ra_ide/src/completion/complete_dot.rs
+++ b/crates/ra_ide/src/completion/complete_dot.rs
@@ -16,7 +16,7 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
16 _ => return, 16 _ => return,
17 }; 17 };
18 18
19 let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) { 19 let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) {
20 Some(ty) => ty, 20 Some(ty) => ty,
21 _ => return, 21 _ => return,
22 }; 22 };
@@ -55,7 +55,7 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Ty
55fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) { 55fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) {
56 if let Some(krate) = ctx.module.map(|it| it.krate()) { 56 if let Some(krate) = ctx.module.map(|it| it.krate()) {
57 let mut seen_methods = FxHashSet::default(); 57 let mut seen_methods = FxHashSet::default();
58 let traits_in_scope = ctx.analyzer.traits_in_scope(ctx.db); 58 let traits_in_scope = ctx.scope().traits_in_scope();
59 receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| { 59 receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| {
60 if func.has_self_param(ctx.db) && seen_methods.insert(func.name(ctx.db)) { 60 if func.has_self_param(ctx.db) && seen_methods.insert(func.name(ctx.db)) {
61 acc.add_function(ctx, func); 61 acc.add_function(ctx, func);
diff --git a/crates/ra_ide/src/completion/complete_macro_in_item_position.rs b/crates/ra_ide/src/completion/complete_macro_in_item_position.rs
index faadd1e3f..1866d9e6c 100644
--- a/crates/ra_ide/src/completion/complete_macro_in_item_position.rs
+++ b/crates/ra_ide/src/completion/complete_macro_in_item_position.rs
@@ -5,7 +5,7 @@ use crate::completion::{CompletionContext, Completions};
5pub(super) fn complete_macro_in_item_position(acc: &mut Completions, ctx: &CompletionContext) { 5pub(super) fn complete_macro_in_item_position(acc: &mut Completions, ctx: &CompletionContext) {
6 // Show only macros in top level. 6 // Show only macros in top level.
7 if ctx.is_new_item { 7 if ctx.is_new_item {
8 ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { 8 ctx.scope().process_all_names(&mut |name, res| {
9 if let hir::ScopeDef::MacroDef(mac) = res { 9 if let hir::ScopeDef::MacroDef(mac) = res {
10 acc.add_macro(ctx, Some(name.to_string()), mac); 10 acc.add_macro(ctx, Some(name.to_string()), mac);
11 } 11 }
diff --git a/crates/ra_ide/src/completion/complete_path.rs b/crates/ra_ide/src/completion/complete_path.rs
index 2d7f09a6c..c626e90cc 100644
--- a/crates/ra_ide/src/completion/complete_path.rs
+++ b/crates/ra_ide/src/completion/complete_path.rs
@@ -11,7 +11,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
11 Some(path) => path.clone(), 11 Some(path) => path.clone(),
12 _ => return, 12 _ => return,
13 }; 13 };
14 let def = match ctx.analyzer.resolve_hir_path(ctx.db, &path) { 14 let def = match ctx.scope().resolve_hir_path(&path) {
15 Some(PathResolution::Def(def)) => def, 15 Some(PathResolution::Def(def)) => def,
16 _ => return, 16 _ => return,
17 }; 17 };
@@ -49,7 +49,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
49 // FIXME: complete T::AssocType 49 // FIXME: complete T::AssocType
50 let krate = ctx.module.map(|m| m.krate()); 50 let krate = ctx.module.map(|m| m.krate());
51 if let Some(krate) = krate { 51 if let Some(krate) = krate {
52 let traits_in_scope = ctx.analyzer.traits_in_scope(ctx.db); 52 let traits_in_scope = ctx.scope().traits_in_scope();
53 ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { 53 ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| {
54 match item { 54 match item {
55 hir::AssocItem::Function(func) => { 55 hir::AssocItem::Function(func) => {
diff --git a/crates/ra_ide/src/completion/complete_pattern.rs b/crates/ra_ide/src/completion/complete_pattern.rs
index fd03b1c40..c2c6ca002 100644
--- a/crates/ra_ide/src/completion/complete_pattern.rs
+++ b/crates/ra_ide/src/completion/complete_pattern.rs
@@ -9,7 +9,7 @@ pub(super) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) {
9 } 9 }
10 // FIXME: ideally, we should look at the type we are matching against and 10 // FIXME: ideally, we should look at the type we are matching against and
11 // suggest variants + auto-imports 11 // suggest variants + auto-imports
12 ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { 12 ctx.scope().process_all_names(&mut |name, res| {
13 let def = match &res { 13 let def = match &res {
14 hir::ScopeDef::ModuleDef(def) => def, 14 hir::ScopeDef::ModuleDef(def) => def,
15 _ => return, 15 _ => return,
diff --git a/crates/ra_ide/src/completion/complete_postfix.rs b/crates/ra_ide/src/completion/complete_postfix.rs
index 5470dc291..8a74f993a 100644
--- a/crates/ra_ide/src/completion/complete_postfix.rs
+++ b/crates/ra_ide/src/completion/complete_postfix.rs
@@ -29,7 +29,7 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
29 dot_receiver.syntax().text().to_string() 29 dot_receiver.syntax().text().to_string()
30 }; 30 };
31 31
32 let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) { 32 let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) {
33 Some(it) => it, 33 Some(it) => it,
34 None => return, 34 None => return,
35 }; 35 };
diff --git a/crates/ra_ide/src/completion/complete_record_literal.rs b/crates/ra_ide/src/completion/complete_record_literal.rs
index 577c394d2..f98353d76 100644
--- a/crates/ra_ide/src/completion/complete_record_literal.rs
+++ b/crates/ra_ide/src/completion/complete_record_literal.rs
@@ -5,10 +5,7 @@ use crate::completion::{CompletionContext, Completions};
5/// Complete fields in fields literals. 5/// Complete fields in fields literals.
6pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionContext) { 6pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionContext) {
7 let (ty, variant) = match ctx.record_lit_syntax.as_ref().and_then(|it| { 7 let (ty, variant) = match ctx.record_lit_syntax.as_ref().and_then(|it| {
8 Some(( 8 Some((ctx.sema.type_of_expr(&it.clone().into())?, ctx.sema.resolve_record_literal(it)?))
9 ctx.analyzer.type_of(ctx.db, &it.clone().into())?,
10 ctx.analyzer.resolve_record_literal(it)?,
11 ))
12 }) { 9 }) {
13 Some(it) => it, 10 Some(it) => it,
14 _ => return, 11 _ => return,
diff --git a/crates/ra_ide/src/completion/complete_record_pattern.rs b/crates/ra_ide/src/completion/complete_record_pattern.rs
index a56c7e3a1..9bdeae49f 100644
--- a/crates/ra_ide/src/completion/complete_record_pattern.rs
+++ b/crates/ra_ide/src/completion/complete_record_pattern.rs
@@ -4,10 +4,7 @@ use crate::completion::{CompletionContext, Completions};
4 4
5pub(super) fn complete_record_pattern(acc: &mut Completions, ctx: &CompletionContext) { 5pub(super) fn complete_record_pattern(acc: &mut Completions, ctx: &CompletionContext) {
6 let (ty, variant) = match ctx.record_lit_pat.as_ref().and_then(|it| { 6 let (ty, variant) = match ctx.record_lit_pat.as_ref().and_then(|it| {
7 Some(( 7 Some((ctx.sema.type_of_pat(&it.clone().into())?, ctx.sema.resolve_record_pattern(it)?))
8 ctx.analyzer.type_of_pat(ctx.db, &it.clone().into())?,
9 ctx.analyzer.resolve_record_pattern(it)?,
10 ))
11 }) { 8 }) {
12 Some(it) => it, 9 Some(it) => it,
13 _ => return, 10 _ => return,
diff --git a/crates/ra_ide/src/completion/complete_scope.rs b/crates/ra_ide/src/completion/complete_scope.rs
index e2ee86dd1..aad016d4a 100644
--- a/crates/ra_ide/src/completion/complete_scope.rs
+++ b/crates/ra_ide/src/completion/complete_scope.rs
@@ -7,9 +7,7 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) {
7 return; 7 return;
8 } 8 }
9 9
10 ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { 10 ctx.scope().process_all_names(&mut |name, res| acc.add_resolution(ctx, name.to_string(), &res));
11 acc.add_resolution(ctx, name.to_string(), &res)
12 });
13} 11}
14 12
15#[cfg(test)] 13#[cfg(test)]
diff --git a/crates/ra_ide/src/completion/complete_trait_impl.rs b/crates/ra_ide/src/completion/complete_trait_impl.rs
index 83628e35c..9a27c164b 100644
--- a/crates/ra_ide/src/completion/complete_trait_impl.rs
+++ b/crates/ra_ide/src/completion/complete_trait_impl.rs
@@ -64,11 +64,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
64 if let (Some(trigger), Some(impl_block)) = (trigger, impl_block) { 64 if let (Some(trigger), Some(impl_block)) = (trigger, impl_block) {
65 match trigger.kind() { 65 match trigger.kind() {
66 SyntaxKind::FN_DEF => { 66 SyntaxKind::FN_DEF => {
67 for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block) 67 for missing_fn in
68 .iter() 68 get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| {
69 .filter_map(|item| match item { 69 match item {
70 hir::AssocItem::Function(fn_item) => Some(fn_item), 70 hir::AssocItem::Function(fn_item) => Some(fn_item),
71 _ => None, 71 _ => None,
72 }
72 }) 73 })
73 { 74 {
74 add_function_impl(&trigger, acc, ctx, &missing_fn); 75 add_function_impl(&trigger, acc, ctx, &missing_fn);
@@ -76,11 +77,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
76 } 77 }
77 78
78 SyntaxKind::TYPE_ALIAS_DEF => { 79 SyntaxKind::TYPE_ALIAS_DEF => {
79 for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block) 80 for missing_fn in
80 .iter() 81 get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| {
81 .filter_map(|item| match item { 82 match item {
82 hir::AssocItem::TypeAlias(type_item) => Some(type_item), 83 hir::AssocItem::TypeAlias(type_item) => Some(type_item),
83 _ => None, 84 _ => None,
85 }
84 }) 86 })
85 { 87 {
86 add_type_alias_impl(&trigger, acc, ctx, &missing_fn); 88 add_type_alias_impl(&trigger, acc, ctx, &missing_fn);
@@ -88,11 +90,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
88 } 90 }
89 91
90 SyntaxKind::CONST_DEF => { 92 SyntaxKind::CONST_DEF => {
91 for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block) 93 for missing_fn in
92 .iter() 94 get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| {
93 .filter_map(|item| match item { 95 match item {
94 hir::AssocItem::Const(const_item) => Some(const_item), 96 hir::AssocItem::Const(const_item) => Some(const_item),
95 _ => None, 97 _ => None,
98 }
96 }) 99 })
97 { 100 {
98 add_const_impl(&trigger, acc, ctx, &missing_fn); 101 add_const_impl(&trigger, acc, ctx, &missing_fn);
diff --git a/crates/ra_ide/src/completion/completion_context.rs b/crates/ra_ide/src/completion/completion_context.rs
index 8678a3234..81321a897 100644
--- a/crates/ra_ide/src/completion/completion_context.rs
+++ b/crates/ra_ide/src/completion/completion_context.rs
@@ -1,9 +1,11 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::{Semantics, SemanticsScope};
4use ra_db::SourceDatabase;
3use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
4use ra_syntax::{ 6use ra_syntax::{
5 algo::{find_covering_element, find_node_at_offset}, 7 algo::{find_covering_element, find_node_at_offset},
6 ast, AstNode, Parse, SourceFile, 8 ast, AstNode, SourceFile,
7 SyntaxKind::*, 9 SyntaxKind::*,
8 SyntaxNode, SyntaxToken, TextRange, TextUnit, 10 SyntaxNode, SyntaxToken, TextRange, TextUnit,
9}; 11};
@@ -15,8 +17,8 @@ use crate::FilePosition;
15/// exactly is the cursor, syntax-wise. 17/// exactly is the cursor, syntax-wise.
16#[derive(Debug)] 18#[derive(Debug)]
17pub(crate) struct CompletionContext<'a> { 19pub(crate) struct CompletionContext<'a> {
20 pub(super) sema: Semantics<'a, RootDatabase>,
18 pub(super) db: &'a RootDatabase, 21 pub(super) db: &'a RootDatabase,
19 pub(super) analyzer: hir::SourceAnalyzer,
20 pub(super) offset: TextUnit, 22 pub(super) offset: TextUnit,
21 pub(super) token: SyntaxToken, 23 pub(super) token: SyntaxToken,
22 pub(super) module: Option<hir::Module>, 24 pub(super) module: Option<hir::Module>,
@@ -51,20 +53,26 @@ pub(crate) struct CompletionContext<'a> {
51impl<'a> CompletionContext<'a> { 53impl<'a> CompletionContext<'a> {
52 pub(super) fn new( 54 pub(super) fn new(
53 db: &'a RootDatabase, 55 db: &'a RootDatabase,
54 original_parse: &'a Parse<ast::SourceFile>,
55 position: FilePosition, 56 position: FilePosition,
56 ) -> Option<CompletionContext<'a>> { 57 ) -> Option<CompletionContext<'a>> {
57 let mut sb = hir::SourceBinder::new(db); 58 let sema = Semantics::new(db);
58 let module = sb.to_module_def(position.file_id); 59
59 let token = 60 let original_file = sema.parse(position.file_id);
60 original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?; 61
61 let analyzer = sb.analyze( 62 // Insert a fake ident to get a valid parse tree. We will use this file
62 hir::InFile::new(position.file_id.into(), &token.parent()), 63 // to determine context, though the original_file will be used for
63 Some(position.offset), 64 // actual completion.
64 ); 65 let file_with_fake_ident = {
66 let parse = db.parse(position.file_id);
67 let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string());
68 parse.reparse(&edit).tree()
69 };
70
71 let module = sema.to_module_def(position.file_id);
72 let token = original_file.syntax().token_at_offset(position.offset).left_biased()?;
65 let mut ctx = CompletionContext { 73 let mut ctx = CompletionContext {
74 sema,
66 db, 75 db,
67 analyzer,
68 token, 76 token,
69 offset: position.offset, 77 offset: position.offset,
70 module, 78 module,
@@ -87,7 +95,7 @@ impl<'a> CompletionContext<'a> {
87 has_type_args: false, 95 has_type_args: false,
88 dot_receiver_is_ambiguous_float_literal: false, 96 dot_receiver_is_ambiguous_float_literal: false,
89 }; 97 };
90 ctx.fill(&original_parse, position.offset); 98 ctx.fill(&original_file, file_with_fake_ident, position.offset);
91 Some(ctx) 99 Some(ctx)
92 } 100 }
93 101
@@ -100,29 +108,33 @@ impl<'a> CompletionContext<'a> {
100 } 108 }
101 } 109 }
102 110
103 fn fill(&mut self, original_parse: &'a Parse<ast::SourceFile>, offset: TextUnit) { 111 pub(crate) fn scope(&self) -> SemanticsScope<'_, RootDatabase> {
104 // Insert a fake ident to get a valid parse tree. We will use this file 112 self.sema.scope_at_offset(&self.token.parent(), self.offset)
105 // to determine context, though the original_file will be used for 113 }
106 // actual completion.
107 let file = {
108 let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string());
109 original_parse.reparse(&edit).tree()
110 };
111 114
115 fn fill(
116 &mut self,
117 original_file: &ast::SourceFile,
118 file_with_fake_ident: ast::SourceFile,
119 offset: TextUnit,
120 ) {
112 // First, let's try to complete a reference to some declaration. 121 // First, let's try to complete a reference to some declaration.
113 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), offset) { 122 if let Some(name_ref) =
123 find_node_at_offset::<ast::NameRef>(file_with_fake_ident.syntax(), offset)
124 {
114 // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. 125 // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`.
115 // See RFC#1685. 126 // See RFC#1685.
116 if is_node::<ast::Param>(name_ref.syntax()) { 127 if is_node::<ast::Param>(name_ref.syntax()) {
117 self.is_param = true; 128 self.is_param = true;
118 return; 129 return;
119 } 130 }
120 self.classify_name_ref(original_parse.tree(), name_ref); 131 self.classify_name_ref(original_file, name_ref);
121 } 132 }
122 133
123 // Otherwise, see if this is a declaration. We can use heuristics to 134 // Otherwise, see if this is a declaration. We can use heuristics to
124 // suggest declaration names, see `CompletionKind::Magic`. 135 // suggest declaration names, see `CompletionKind::Magic`.
125 if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) { 136 if let Some(name) = find_node_at_offset::<ast::Name>(file_with_fake_ident.syntax(), offset)
137 {
126 if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) { 138 if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) {
127 let parent = bind_pat.syntax().parent(); 139 let parent = bind_pat.syntax().parent();
128 if parent.clone().and_then(ast::MatchArm::cast).is_some() 140 if parent.clone().and_then(ast::MatchArm::cast).is_some()
@@ -136,13 +148,12 @@ impl<'a> CompletionContext<'a> {
136 return; 148 return;
137 } 149 }
138 if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() { 150 if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() {
139 self.record_lit_pat = 151 self.record_lit_pat = find_node_at_offset(original_file.syntax(), self.offset);
140 find_node_at_offset(original_parse.tree().syntax(), self.offset);
141 } 152 }
142 } 153 }
143 } 154 }
144 155
145 fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) { 156 fn classify_name_ref(&mut self, original_file: &SourceFile, name_ref: ast::NameRef) {
146 self.name_ref_syntax = 157 self.name_ref_syntax =
147 find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start()); 158 find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start());
148 let name_range = name_ref.syntax().text_range(); 159 let name_range = name_ref.syntax().text_range();
diff --git a/crates/ra_ide/src/diagnostics.rs b/crates/ra_ide/src/diagnostics.rs
index 9cf86b26d..a52f7fdd9 100644
--- a/crates/ra_ide/src/diagnostics.rs
+++ b/crates/ra_ide/src/diagnostics.rs
@@ -2,7 +2,10 @@
2 2
3use std::cell::RefCell; 3use std::cell::RefCell;
4 4
5use hir::diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}; 5use hir::{
6 diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink},
7 Semantics,
8};
6use itertools::Itertools; 9use itertools::Itertools;
7use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt}; 10use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt};
8use ra_ide_db::RootDatabase; 11use ra_ide_db::RootDatabase;
@@ -24,7 +27,7 @@ pub enum Severity {
24 27
25pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> { 28pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> {
26 let _p = profile("diagnostics"); 29 let _p = profile("diagnostics");
27 let mut sb = hir::SourceBinder::new(db); 30 let sema = Semantics::new(db);
28 let parse = db.parse(file_id); 31 let parse = db.parse(file_id);
29 let mut res = Vec::new(); 32 let mut res = Vec::new();
30 33
@@ -110,7 +113,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
110 fix: Some(fix), 113 fix: Some(fix),
111 }) 114 })
112 }); 115 });
113 if let Some(m) = sb.to_module_def(file_id) { 116 if let Some(m) = sema.to_module_def(file_id) {
114 m.diagnostics(db, &mut sink); 117 m.diagnostics(db, &mut sink);
115 }; 118 };
116 drop(sink); 119 drop(sink);
diff --git a/crates/ra_ide/src/display/navigation_target.rs b/crates/ra_ide/src/display/navigation_target.rs
index c9d0058a6..5afb23764 100644
--- a/crates/ra_ide/src/display/navigation_target.rs
+++ b/crates/ra_ide/src/display/navigation_target.rs
@@ -1,7 +1,7 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use either::Either; 3use either::Either;
4use hir::{AssocItem, FieldSource, HasSource, InFile, ModuleSource}; 4use hir::{original_range, AssocItem, FieldSource, HasSource, InFile, ModuleSource};
5use ra_db::{FileId, SourceDatabase}; 5use ra_db::{FileId, SourceDatabase};
6use ra_ide_db::RootDatabase; 6use ra_ide_db::RootDatabase;
7use ra_syntax::{ 7use ra_syntax::{
@@ -11,7 +11,11 @@ use ra_syntax::{
11 TextRange, 11 TextRange,
12}; 12};
13 13
14use crate::{expand::original_range, references::NameDefinition, FileSymbol}; 14use crate::{
15 // expand::original_range,
16 references::NameDefinition,
17 FileSymbol,
18};
15 19
16use super::short_label::ShortLabel; 20use super::short_label::ShortLabel;
17 21
diff --git a/crates/ra_ide/src/expand.rs b/crates/ra_ide/src/expand.rs
deleted file mode 100644
index 9f3aaa3a3..000000000
--- a/crates/ra_ide/src/expand.rs
+++ /dev/null
@@ -1,102 +0,0 @@
1//! Utilities to work with files, produced by macros.
2use std::iter::successors;
3
4use hir::{InFile, Origin};
5use ra_db::FileId;
6use ra_ide_db::RootDatabase;
7use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken, TextRange};
8
9use crate::FileRange;
10
11pub(crate) fn original_range(db: &RootDatabase, node: InFile<&SyntaxNode>) -> FileRange {
12 if let Some((range, Origin::Call)) = original_range_and_origin(db, node) {
13 return range;
14 }
15
16 if let Some(expansion) = node.file_id.expansion_info(db) {
17 if let Some(call_node) = expansion.call_node() {
18 return FileRange {
19 file_id: call_node.file_id.original_file(db),
20 range: call_node.value.text_range(),
21 };
22 }
23 }
24
25 FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
26}
27
28fn original_range_and_origin(
29 db: &RootDatabase,
30 node: InFile<&SyntaxNode>,
31) -> Option<(FileRange, Origin)> {
32 let expansion = node.file_id.expansion_info(db)?;
33
34 // the input node has only one token ?
35 let single = node.value.first_token()? == node.value.last_token()?;
36
37 // FIXME: We should handle recurside macro expansions
38 let (range, origin) = node.value.descendants().find_map(|it| {
39 let first = it.first_token()?;
40 let last = it.last_token()?;
41
42 if !single && first == last {
43 return None;
44 }
45
46 // Try to map first and last tokens of node, and, if success, return the union range of mapped tokens
47 let (first, first_origin) = expansion.map_token_up(node.with_value(&first))?;
48 let (last, last_origin) = expansion.map_token_up(node.with_value(&last))?;
49
50 if first.file_id != last.file_id || first_origin != last_origin {
51 return None;
52 }
53
54 // FIXME: Add union method in TextRange
55 Some((
56 first.with_value(union_range(first.value.text_range(), last.value.text_range())),
57 first_origin,
58 ))
59 })?;
60
61 return Some((
62 FileRange { file_id: range.file_id.original_file(db), range: range.value },
63 origin,
64 ));
65
66 fn union_range(a: TextRange, b: TextRange) -> TextRange {
67 let start = a.start().min(b.start());
68 let end = a.end().max(b.end());
69 TextRange::from_to(start, end)
70 }
71}
72
73pub(crate) fn descend_into_macros(
74 db: &RootDatabase,
75 file_id: FileId,
76 token: SyntaxToken,
77) -> InFile<SyntaxToken> {
78 let src = InFile::new(file_id.into(), token);
79
80 let source_analyzer =
81 hir::SourceAnalyzer::new(db, src.with_value(src.value.parent()).as_ref(), None);
82
83 descend_into_macros_with_analyzer(db, &source_analyzer, src)
84}
85
86pub(crate) fn descend_into_macros_with_analyzer(
87 db: &RootDatabase,
88 source_analyzer: &hir::SourceAnalyzer,
89 src: InFile<SyntaxToken>,
90) -> InFile<SyntaxToken> {
91 successors(Some(src), |token| {
92 let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
93 let tt = macro_call.token_tree()?;
94 if !token.value.text_range().is_subrange(&tt.syntax().text_range()) {
95 return None;
96 }
97 let exp = source_analyzer.expand(db, token.with_value(&macro_call))?;
98 exp.map_token_down(db, token.as_ref())
99 })
100 .last()
101 .unwrap()
102}
diff --git a/crates/ra_ide/src/expand_macro.rs b/crates/ra_ide/src/expand_macro.rs
index af2783bef..f2814e684 100644
--- a/crates/ra_ide/src/expand_macro.rs
+++ b/crates/ra_ide/src/expand_macro.rs
@@ -1,7 +1,6 @@
1//! This modules implements "expand macro" functionality in the IDE 1//! This modules implements "expand macro" functionality in the IDE
2 2
3use hir::db::AstDatabase; 3use hir::Semantics;
4use ra_db::SourceDatabase;
5use ra_ide_db::RootDatabase; 4use ra_ide_db::RootDatabase;
6use ra_syntax::{ 5use ra_syntax::{
7 algo::{find_node_at_offset, replace_descendants}, 6 algo::{find_node_at_offset, replace_descendants},
@@ -17,13 +16,12 @@ pub struct ExpandedMacro {
17} 16}
18 17
19pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> { 18pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> {
20 let parse = db.parse(position.file_id); 19 let sema = Semantics::new(db);
21 let file = parse.tree(); 20 let file = sema.parse(position.file_id);
22 let name_ref = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset)?; 21 let name_ref = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset)?;
23 let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?; 22 let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?;
24 23
25 let source = hir::InFile::new(position.file_id.into(), mac.syntax()); 24 let expanded = expand_macro_recur(&sema, &mac)?;
26 let expanded = expand_macro_recur(db, source, source.with_value(&mac))?;
27 25
28 // FIXME: 26 // FIXME:
29 // macro expansion may lose all white space information 27 // macro expansion may lose all white space information
@@ -33,21 +31,16 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
33} 31}
34 32
35fn expand_macro_recur( 33fn expand_macro_recur(
36 db: &RootDatabase, 34 sema: &Semantics<RootDatabase>,
37 source: hir::InFile<&SyntaxNode>, 35 macro_call: &ast::MacroCall,
38 macro_call: hir::InFile<&ast::MacroCall>,
39) -> Option<SyntaxNode> { 36) -> Option<SyntaxNode> {
40 let analyzer = hir::SourceAnalyzer::new(db, source, None); 37 let mut expanded = sema.expand(macro_call)?;
41 let expansion = analyzer.expand(db, macro_call)?;
42 let macro_file_id = expansion.file_id();
43 let mut expanded: SyntaxNode = db.parse_or_expand(macro_file_id)?;
44 38
45 let children = expanded.descendants().filter_map(ast::MacroCall::cast); 39 let children = expanded.descendants().filter_map(ast::MacroCall::cast);
46 let mut replaces: FxHashMap<SyntaxElement, SyntaxElement> = FxHashMap::default(); 40 let mut replaces: FxHashMap<SyntaxElement, SyntaxElement> = FxHashMap::default();
47 41
48 for child in children.into_iter() { 42 for child in children.into_iter() {
49 let node = hir::InFile::new(macro_file_id, &child); 43 if let Some(new_node) = expand_macro_recur(sema, &child) {
50 if let Some(new_node) = expand_macro_recur(db, source, node) {
51 // Replace the whole node if it is root 44 // Replace the whole node if it is root
52 // `replace_descendants` will not replace the parent node 45 // `replace_descendants` will not replace the parent node
53 // but `SyntaxNode::descendants include itself 46 // but `SyntaxNode::descendants include itself
@@ -120,10 +113,12 @@ fn insert_whitespaces(syn: SyntaxNode) -> String {
120 113
121#[cfg(test)] 114#[cfg(test)]
122mod tests { 115mod tests {
123 use super::*;
124 use crate::mock_analysis::analysis_and_position;
125 use insta::assert_snapshot; 116 use insta::assert_snapshot;
126 117
118 use crate::mock_analysis::analysis_and_position;
119
120 use super::*;
121
127 fn check_expand_macro(fixture: &str) -> ExpandedMacro { 122 fn check_expand_macro(fixture: &str) -> ExpandedMacro {
128 let (analysis, pos) = analysis_and_position(fixture); 123 let (analysis, pos) = analysis_and_position(fixture);
129 analysis.expand_macro(pos).unwrap().unwrap() 124 analysis.expand_macro(pos).unwrap().unwrap()
diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs
index 1e7d0621a..86e6f12d7 100644
--- a/crates/ra_ide/src/extend_selection.rs
+++ b/crates/ra_ide/src/extend_selection.rs
@@ -2,26 +2,26 @@
2 2
3use std::iter::successors; 3use std::iter::successors;
4 4
5use hir::db::AstDatabase; 5use hir::Semantics;
6use ra_db::SourceDatabase;
7use ra_ide_db::RootDatabase; 6use ra_ide_db::RootDatabase;
8use ra_syntax::{ 7use ra_syntax::{
9 algo::find_covering_element, 8 algo::{self, find_covering_element},
10 ast::{self, AstNode, AstToken}, 9 ast::{self, AstNode, AstToken},
11 Direction, NodeOrToken, SyntaxElement, 10 Direction, NodeOrToken,
12 SyntaxKind::{self, *}, 11 SyntaxKind::{self, *},
13 SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, 12 SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T,
14}; 13};
15 14
16use crate::{expand::descend_into_macros, FileId, FileRange}; 15use crate::FileRange;
17 16
18pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { 17pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
19 let src = db.parse(frange.file_id).tree(); 18 let sema = Semantics::new(db);
20 try_extend_selection(db, src.syntax(), frange).unwrap_or(frange.range) 19 let src = sema.parse(frange.file_id);
20 try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range)
21} 21}
22 22
23fn try_extend_selection( 23fn try_extend_selection(
24 db: &RootDatabase, 24 sema: &Semantics<RootDatabase>,
25 root: &SyntaxNode, 25 root: &SyntaxNode,
26 frange: FileRange, 26 frange: FileRange,
27) -> Option<TextRange> { 27) -> Option<TextRange> {
@@ -86,7 +86,7 @@ fn try_extend_selection(
86 // if we are in single token_tree, we maybe live in macro or attr 86 // if we are in single token_tree, we maybe live in macro or attr
87 if node.kind() == TOKEN_TREE { 87 if node.kind() == TOKEN_TREE {
88 if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) { 88 if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) {
89 if let Some(range) = extend_tokens_from_range(db, frange.file_id, macro_call, range) { 89 if let Some(range) = extend_tokens_from_range(sema, macro_call, range) {
90 return Some(range); 90 return Some(range);
91 } 91 }
92 } 92 }
@@ -96,7 +96,7 @@ fn try_extend_selection(
96 return Some(node.text_range()); 96 return Some(node.text_range());
97 } 97 }
98 98
99 let node = shallowest_node(&node.into()).unwrap(); 99 let node = shallowest_node(&node.into());
100 100
101 if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { 101 if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
102 if let Some(range) = extend_list_item(&node) { 102 if let Some(range) = extend_list_item(&node) {
@@ -108,8 +108,7 @@ fn try_extend_selection(
108} 108}
109 109
110fn extend_tokens_from_range( 110fn extend_tokens_from_range(
111 db: &RootDatabase, 111 sema: &Semantics<RootDatabase>,
112 file_id: FileId,
113 macro_call: ast::MacroCall, 112 macro_call: ast::MacroCall,
114 original_range: TextRange, 113 original_range: TextRange,
115) -> Option<TextRange> { 114) -> Option<TextRange> {
@@ -130,25 +129,21 @@ fn extend_tokens_from_range(
130 } 129 }
131 130
132 // compute original mapped token range 131 // compute original mapped token range
133 let expanded = { 132 let extended = {
134 let first_node = descend_into_macros(db, file_id, first_token.clone()); 133 let fst_expanded = sema.descend_into_macros(first_token.clone());
135 let first_node = first_node.map(|it| it.text_range()); 134 let lst_expanded = sema.descend_into_macros(last_token.clone());
136 135 let mut lca = algo::least_common_ancestor(&fst_expanded.parent(), &lst_expanded.parent())?;
137 let last_node = descend_into_macros(db, file_id, last_token.clone()); 136 lca = shallowest_node(&lca);
138 if last_node.file_id == file_id.into() || first_node.file_id != last_node.file_id { 137 if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) {
139 return None; 138 lca = lca.parent()?;
140 } 139 }
141 first_node.map(|it| union_range(it, last_node.value.text_range())) 140 lca
142 }; 141 };
143 142
144 // Compute parent node range 143 // Compute parent node range
145 let src = db.parse_or_expand(expanded.file_id)?;
146 let parent = shallowest_node(&find_covering_element(&src, expanded.value))?.parent()?;
147
148 let validate = |token: &SyntaxToken| { 144 let validate = |token: &SyntaxToken| {
149 let node = descend_into_macros(db, file_id, token.clone()); 145 let expanded = sema.descend_into_macros(token.clone());
150 node.file_id == expanded.file_id 146 algo::least_common_ancestor(&extended, &expanded.parent()).as_ref() == Some(&extended)
151 && node.value.text_range().is_subrange(&parent.text_range())
152 }; 147 };
153 148
154 // Find the first and last text range under expanded parent 149 // Find the first and last text range under expanded parent
@@ -191,8 +186,8 @@ fn union_range(range: TextRange, r: TextRange) -> TextRange {
191} 186}
192 187
193/// Find the shallowest node with same range, which allows us to traverse siblings. 188/// Find the shallowest node with same range, which allows us to traverse siblings.
194fn shallowest_node(node: &SyntaxElement) -> Option<SyntaxNode> { 189fn shallowest_node(node: &SyntaxNode) -> SyntaxNode {
195 node.ancestors().take_while(|n| n.text_range() == node.text_range()).last() 190 node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap()
196} 191}
197 192
198fn extend_single_word_in_comment_or_string( 193fn extend_single_word_in_comment_or_string(
diff --git a/crates/ra_ide/src/goto_definition.rs b/crates/ra_ide/src/goto_definition.rs
index feff1ec3f..6053c1bb6 100644
--- a/crates/ra_ide/src/goto_definition.rs
+++ b/crates/ra_ide/src/goto_definition.rs
@@ -1,7 +1,7 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::{db::AstDatabase, InFile, SourceBinder}; 3use hir::Semantics;
4use ra_ide_db::{symbol_index, RootDatabase}; 4use ra_ide_db::{defs::classify_name, symbol_index, RootDatabase};
5use ra_syntax::{ 5use ra_syntax::{
6 ast::{self}, 6 ast::{self},
7 match_ast, AstNode, 7 match_ast, AstNode,
@@ -11,8 +11,7 @@ use ra_syntax::{
11 11
12use crate::{ 12use crate::{
13 display::{ToNav, TryToNav}, 13 display::{ToNav, TryToNav},
14 expand::descend_into_macros, 14 references::classify_name_ref,
15 references::{classify_name, classify_name_ref},
16 FilePosition, NavigationTarget, RangeInfo, 15 FilePosition, NavigationTarget, RangeInfo,
17}; 16};
18 17
@@ -20,18 +19,18 @@ pub(crate) fn goto_definition(
20 db: &RootDatabase, 19 db: &RootDatabase,
21 position: FilePosition, 20 position: FilePosition,
22) -> Option<RangeInfo<Vec<NavigationTarget>>> { 21) -> Option<RangeInfo<Vec<NavigationTarget>>> {
23 let file = db.parse_or_expand(position.file_id.into())?; 22 let sema = Semantics::new(db);
23 let file = sema.parse(position.file_id).syntax().clone();
24 let original_token = pick_best(file.token_at_offset(position.offset))?; 24 let original_token = pick_best(file.token_at_offset(position.offset))?;
25 let token = descend_into_macros(db, position.file_id, original_token.clone()); 25 let token = sema.descend_into_macros(original_token.clone());
26 26
27 let mut sb = SourceBinder::new(db);
28 let nav_targets = match_ast! { 27 let nav_targets = match_ast! {
29 match (token.value.parent()) { 28 match (token.parent()) {
30 ast::NameRef(name_ref) => { 29 ast::NameRef(name_ref) => {
31 reference_definition(&mut sb, token.with_value(&name_ref)).to_vec() 30 reference_definition(&sema, &name_ref).to_vec()
32 }, 31 },
33 ast::Name(name) => { 32 ast::Name(name) => {
34 name_definition(&mut sb, token.with_value(&name))? 33 name_definition(&sema, &name)?
35 }, 34 },
36 _ => return None, 35 _ => return None,
37 } 36 }
@@ -68,33 +67,33 @@ impl ReferenceResult {
68} 67}
69 68
70pub(crate) fn reference_definition( 69pub(crate) fn reference_definition(
71 sb: &mut SourceBinder<RootDatabase>, 70 sema: &Semantics<RootDatabase>,
72 name_ref: InFile<&ast::NameRef>, 71 name_ref: &ast::NameRef,
73) -> ReferenceResult { 72) -> ReferenceResult {
74 use self::ReferenceResult::*; 73 use self::ReferenceResult::*;
75 74
76 let name_kind = classify_name_ref(sb, name_ref); 75 let name_kind = classify_name_ref(sema, name_ref);
77 if let Some(def) = name_kind { 76 if let Some(def) = name_kind {
78 return match def.try_to_nav(sb.db) { 77 return match def.try_to_nav(sema.db) {
79 Some(nav) => ReferenceResult::Exact(nav), 78 Some(nav) => ReferenceResult::Exact(nav),
80 None => ReferenceResult::Approximate(Vec::new()), 79 None => ReferenceResult::Approximate(Vec::new()),
81 }; 80 };
82 } 81 }
83 82
84 // Fallback index based approach: 83 // Fallback index based approach:
85 let navs = symbol_index::index_resolve(sb.db, name_ref.value) 84 let navs = symbol_index::index_resolve(sema.db, name_ref)
86 .into_iter() 85 .into_iter()
87 .map(|s| s.to_nav(sb.db)) 86 .map(|s| s.to_nav(sema.db))
88 .collect(); 87 .collect();
89 Approximate(navs) 88 Approximate(navs)
90} 89}
91 90
92fn name_definition( 91fn name_definition(
93 sb: &mut SourceBinder<RootDatabase>, 92 sema: &Semantics<RootDatabase>,
94 name: InFile<&ast::Name>, 93 name: &ast::Name,
95) -> Option<Vec<NavigationTarget>> { 94) -> Option<Vec<NavigationTarget>> {
96 let def = classify_name(sb, name)?; 95 let def = classify_name(sema, name)?;
97 let nav = def.try_to_nav(sb.db)?; 96 let nav = def.try_to_nav(sema.db)?;
98 Some(vec![nav]) 97 Some(vec![nav])
99} 98}
100 99
diff --git a/crates/ra_ide/src/goto_type_definition.rs b/crates/ra_ide/src/goto_type_definition.rs
index 69940fc36..869a4708b 100644
--- a/crates/ra_ide/src/goto_type_definition.rs
+++ b/crates/ra_ide/src/goto_type_definition.rs
@@ -1,31 +1,31 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::db::AstDatabase;
4use ra_ide_db::RootDatabase; 3use ra_ide_db::RootDatabase;
5use ra_syntax::{ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset}; 4use ra_syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset};
6 5
7use crate::{ 6use crate::{display::ToNav, FilePosition, NavigationTarget, RangeInfo};
8 display::ToNav, expand::descend_into_macros, FilePosition, NavigationTarget, RangeInfo,
9};
10 7
11pub(crate) fn goto_type_definition( 8pub(crate) fn goto_type_definition(
12 db: &RootDatabase, 9 db: &RootDatabase,
13 position: FilePosition, 10 position: FilePosition,
14) -> Option<RangeInfo<Vec<NavigationTarget>>> { 11) -> Option<RangeInfo<Vec<NavigationTarget>>> {
15 let file = db.parse_or_expand(position.file_id.into())?; 12 let sema = hir::Semantics::new(db);
16 let token = pick_best(file.token_at_offset(position.offset))?; 13
17 let token = descend_into_macros(db, position.file_id, token); 14 let file: ast::SourceFile = sema.parse(position.file_id);
18 15 let token: SyntaxToken = pick_best(file.syntax().token_at_offset(position.offset))?;
19 let node = token 16 let token: SyntaxToken = sema.descend_into_macros(token);
20 .value 17
21 .ancestors() 18 let (ty, node) = sema.ancestors_with_macros(token.parent()).find_map(|node| {
22 .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?; 19 let ty = match_ast! {
23 20 match node {
24 let analyzer = hir::SourceAnalyzer::new(db, token.with_value(&node), None); 21 ast::Expr(expr) => { sema.type_of_expr(&expr)? },
22 ast::Pat(pat) => { sema.type_of_pat(&pat)? },
23 _ => { return None },
24 }
25 };
25 26
26 let ty: hir::Type = ast::Expr::cast(node.clone()) 27 Some((ty, node))
27 .and_then(|e| analyzer.type_of(db, &e)) 28 })?;
28 .or_else(|| ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p)))?;
29 29
30 let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt())?; 30 let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt())?;
31 31
diff --git a/crates/ra_ide/src/hover.rs b/crates/ra_ide/src/hover.rs
index 1c6ca36df..ace33c079 100644
--- a/crates/ra_ide/src/hover.rs
+++ b/crates/ra_ide/src/hover.rs
@@ -1,8 +1,10 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::{db::AstDatabase, Adt, HasSource, HirDisplay, SourceBinder}; 3use hir::{Adt, HasSource, HirDisplay, Semantics};
4use ra_db::SourceDatabase; 4use ra_ide_db::{
5use ra_ide_db::{defs::NameDefinition, RootDatabase}; 5 defs::{classify_name, NameDefinition},
6 RootDatabase,
7};
6use ra_syntax::{ 8use ra_syntax::{
7 algo::find_covering_element, 9 algo::find_covering_element,
8 ast::{self, DocCommentsOwner}, 10 ast::{self, DocCommentsOwner},
@@ -13,8 +15,7 @@ use ra_syntax::{
13 15
14use crate::{ 16use crate::{
15 display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel}, 17 display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel},
16 expand::{descend_into_macros, original_range}, 18 references::classify_name_ref,
17 references::{classify_name, classify_name_ref},
18 FilePosition, FileRange, RangeInfo, 19 FilePosition, FileRange, RangeInfo,
19}; 20};
20 21
@@ -143,25 +144,25 @@ fn hover_text_from_name_kind(db: &RootDatabase, def: NameDefinition) -> Option<S
143} 144}
144 145
145pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> { 146pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> {
146 let file = db.parse_or_expand(position.file_id.into())?; 147 let sema = Semantics::new(db);
148 let file = sema.parse(position.file_id).syntax().clone();
147 let token = pick_best(file.token_at_offset(position.offset))?; 149 let token = pick_best(file.token_at_offset(position.offset))?;
148 let token = descend_into_macros(db, position.file_id, token); 150 let token = sema.descend_into_macros(token);
149 151
150 let mut res = HoverResult::new(); 152 let mut res = HoverResult::new();
151 153
152 let mut sb = SourceBinder::new(db);
153 if let Some((node, name_kind)) = match_ast! { 154 if let Some((node, name_kind)) = match_ast! {
154 match (token.value.parent()) { 155 match (token.parent()) {
155 ast::NameRef(name_ref) => { 156 ast::NameRef(name_ref) => {
156 classify_name_ref(&mut sb, token.with_value(&name_ref)).map(|d| (name_ref.syntax().clone(), d)) 157 classify_name_ref(&sema, &name_ref).map(|d| (name_ref.syntax().clone(), d))
157 }, 158 },
158 ast::Name(name) => { 159 ast::Name(name) => {
159 classify_name(&mut sb, token.with_value(&name)).map(|d| (name.syntax().clone(), d)) 160 classify_name(&sema, &name).map(|d| (name.syntax().clone(), d))
160 }, 161 },
161 _ => None, 162 _ => None,
162 } 163 }
163 } { 164 } {
164 let range = original_range(db, token.with_value(&node)).range; 165 let range = sema.original_range(&node).range;
165 res.extend(hover_text_from_name_kind(db, name_kind)); 166 res.extend(hover_text_from_name_kind(db, name_kind));
166 167
167 if !res.is_empty() { 168 if !res.is_empty() {
@@ -170,11 +171,10 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
170 } 171 }
171 172
172 let node = token 173 let node = token
173 .value
174 .ancestors() 174 .ancestors()
175 .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?; 175 .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?;
176 176
177 let frange = original_range(db, token.with_value(&node)); 177 let frange = sema.original_range(&node);
178 res.extend(type_of(db, frange).map(rust_code_markup)); 178 res.extend(type_of(db, frange).map(rust_code_markup));
179 if res.is_empty() { 179 if res.is_empty() {
180 return None; 180 return None;
@@ -197,19 +197,17 @@ fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> {
197} 197}
198 198
199pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { 199pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
200 let parse = db.parse(frange.file_id); 200 let sema = Semantics::new(db);
201 let leaf_node = find_covering_element(parse.tree().syntax(), frange.range); 201 let source_file = sema.parse(frange.file_id);
202 let leaf_node = find_covering_element(source_file.syntax(), frange.range);
202 // if we picked identifier, expand to pattern/expression 203 // if we picked identifier, expand to pattern/expression
203 let node = leaf_node 204 let node = leaf_node
204 .ancestors() 205 .ancestors()
205 .take_while(|it| it.text_range() == leaf_node.text_range()) 206 .take_while(|it| it.text_range() == leaf_node.text_range())
206 .find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?; 207 .find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?;
207 let analyzer = 208 let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| sema.type_of_expr(&e)) {
208 hir::SourceAnalyzer::new(db, hir::InFile::new(frange.file_id.into(), &node), None);
209 let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
210 {
211 ty 209 ty
212 } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, &p)) { 210 } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| sema.type_of_pat(&p)) {
213 ty 211 ty
214 } else { 212 } else {
215 return None; 213 return None;
@@ -219,11 +217,12 @@ pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
219 217
220#[cfg(test)] 218#[cfg(test)]
221mod tests { 219mod tests {
220 use ra_db::FileLoader;
221 use ra_syntax::TextRange;
222
222 use crate::mock_analysis::{ 223 use crate::mock_analysis::{
223 analysis_and_position, single_file_with_position, single_file_with_range, 224 analysis_and_position, single_file_with_position, single_file_with_range,
224 }; 225 };
225 use ra_db::FileLoader;
226 use ra_syntax::TextRange;
227 226
228 fn trim_markup(s: &str) -> &str { 227 fn trim_markup(s: &str) -> &str {
229 s.trim_start_matches("```rust\n").trim_end_matches("\n```") 228 s.trim_start_matches("```rust\n").trim_end_matches("\n```")
diff --git a/crates/ra_ide/src/impls.rs b/crates/ra_ide/src/impls.rs
index 64a2dadc8..bf82b2a16 100644
--- a/crates/ra_ide/src/impls.rs
+++ b/crates/ra_ide/src/impls.rs
@@ -1,7 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::{Crate, ImplBlock, SourceBinder}; 3use hir::{Crate, ImplBlock, Semantics};
4use ra_db::SourceDatabase;
5use ra_ide_db::RootDatabase; 4use ra_ide_db::RootDatabase;
6use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; 5use ra_syntax::{algo::find_node_at_offset, ast, AstNode};
7 6
@@ -11,21 +10,21 @@ pub(crate) fn goto_implementation(
11 db: &RootDatabase, 10 db: &RootDatabase,
12 position: FilePosition, 11 position: FilePosition,
13) -> Option<RangeInfo<Vec<NavigationTarget>>> { 12) -> Option<RangeInfo<Vec<NavigationTarget>>> {
14 let parse = db.parse(position.file_id); 13 let sema = Semantics::new(db);
15 let syntax = parse.tree().syntax().clone(); 14 let source_file = sema.parse(position.file_id);
16 let mut sb = SourceBinder::new(db); 15 let syntax = source_file.syntax().clone();
17 16
18 let krate = sb.to_module_def(position.file_id)?.krate(); 17 let krate = sema.to_module_def(position.file_id)?.krate();
19 18
20 if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) { 19 if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) {
21 return Some(RangeInfo::new( 20 return Some(RangeInfo::new(
22 nominal_def.syntax().text_range(), 21 nominal_def.syntax().text_range(),
23 impls_for_def(&mut sb, position, &nominal_def, krate)?, 22 impls_for_def(&sema, &nominal_def, krate)?,
24 )); 23 ));
25 } else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(&syntax, position.offset) { 24 } else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(&syntax, position.offset) {
26 return Some(RangeInfo::new( 25 return Some(RangeInfo::new(
27 trait_def.syntax().text_range(), 26 trait_def.syntax().text_range(),
28 impls_for_trait(&mut sb, position, &trait_def, krate)?, 27 impls_for_trait(&sema, &trait_def, krate)?,
29 )); 28 ));
30 } 29 }
31 30
@@ -33,49 +32,37 @@ pub(crate) fn goto_implementation(
33} 32}
34 33
35fn impls_for_def( 34fn impls_for_def(
36 sb: &mut SourceBinder<RootDatabase>, 35 sema: &Semantics<RootDatabase>,
37 position: FilePosition,
38 node: &ast::NominalDef, 36 node: &ast::NominalDef,
39 krate: Crate, 37 krate: Crate,
40) -> Option<Vec<NavigationTarget>> { 38) -> Option<Vec<NavigationTarget>> {
41 let ty = match node { 39 let ty = match node {
42 ast::NominalDef::StructDef(def) => { 40 ast::NominalDef::StructDef(def) => sema.to_def(def)?.ty(sema.db),
43 let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() }; 41 ast::NominalDef::EnumDef(def) => sema.to_def(def)?.ty(sema.db),
44 sb.to_def(src)?.ty(sb.db) 42 ast::NominalDef::UnionDef(def) => sema.to_def(def)?.ty(sema.db),
45 }
46 ast::NominalDef::EnumDef(def) => {
47 let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() };
48 sb.to_def(src)?.ty(sb.db)
49 }
50 ast::NominalDef::UnionDef(def) => {
51 let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() };
52 sb.to_def(src)?.ty(sb.db)
53 }
54 }; 43 };
55 44
56 let impls = ImplBlock::all_in_crate(sb.db, krate); 45 let impls = ImplBlock::all_in_crate(sema.db, krate);
57 46
58 Some( 47 Some(
59 impls 48 impls
60 .into_iter() 49 .into_iter()
61 .filter(|impl_block| ty.is_equal_for_find_impls(&impl_block.target_ty(sb.db))) 50 .filter(|impl_block| ty.is_equal_for_find_impls(&impl_block.target_ty(sema.db)))
62 .map(|imp| imp.to_nav(sb.db)) 51 .map(|imp| imp.to_nav(sema.db))
63 .collect(), 52 .collect(),
64 ) 53 )
65} 54}
66 55
67fn impls_for_trait( 56fn impls_for_trait(
68 sb: &mut SourceBinder<RootDatabase>, 57 sema: &Semantics<RootDatabase>,
69 position: FilePosition,
70 node: &ast::TraitDef, 58 node: &ast::TraitDef,
71 krate: Crate, 59 krate: Crate,
72) -> Option<Vec<NavigationTarget>> { 60) -> Option<Vec<NavigationTarget>> {
73 let src = hir::InFile { file_id: position.file_id.into(), value: node.clone() }; 61 let tr = sema.to_def(node)?;
74 let tr = sb.to_def(src)?;
75 62
76 let impls = ImplBlock::for_trait(sb.db, krate, tr); 63 let impls = ImplBlock::for_trait(sema.db, krate, tr);
77 64
78 Some(impls.into_iter().map(|imp| imp.to_nav(sb.db)).collect()) 65 Some(impls.into_iter().map(|imp| imp.to_nav(sema.db)).collect())
79} 66}
80 67
81#[cfg(test)] 68#[cfg(test)]
diff --git a/crates/ra_ide/src/inlay_hints.rs b/crates/ra_ide/src/inlay_hints.rs
index b42aa1523..35e3f782d 100644
--- a/crates/ra_ide/src/inlay_hints.rs
+++ b/crates/ra_ide/src/inlay_hints.rs
@@ -1,12 +1,11 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::{Adt, HirDisplay, SourceAnalyzer, SourceBinder, Type}; 3use hir::{Adt, HirDisplay, Semantics, Type};
4use once_cell::unsync::Lazy;
5use ra_ide_db::RootDatabase; 4use ra_ide_db::RootDatabase;
6use ra_prof::profile; 5use ra_prof::profile;
7use ra_syntax::{ 6use ra_syntax::{
8 ast::{self, ArgListOwner, AstNode, TypeAscriptionOwner}, 7 ast::{self, ArgListOwner, AstNode, TypeAscriptionOwner},
9 match_ast, SmolStr, SourceFile, SyntaxNode, TextRange, 8 match_ast, SmolStr, SyntaxNode, TextRange,
10}; 9};
11 10
12use crate::{FileId, FunctionSignature}; 11use crate::{FileId, FunctionSignature};
@@ -27,38 +26,36 @@ pub struct InlayHint {
27pub(crate) fn inlay_hints( 26pub(crate) fn inlay_hints(
28 db: &RootDatabase, 27 db: &RootDatabase,
29 file_id: FileId, 28 file_id: FileId,
30 file: &SourceFile,
31 max_inlay_hint_length: Option<usize>, 29 max_inlay_hint_length: Option<usize>,
32) -> Vec<InlayHint> { 30) -> Vec<InlayHint> {
33 let mut sb = SourceBinder::new(db); 31 let sema = Semantics::new(db);
32 let file = sema.parse(file_id);
34 let mut res = Vec::new(); 33 let mut res = Vec::new();
35 for node in file.syntax().descendants() { 34 for node in file.syntax().descendants() {
36 get_inlay_hints(&mut res, &mut sb, file_id, &node, max_inlay_hint_length); 35 get_inlay_hints(&mut res, &sema, &node, max_inlay_hint_length);
37 } 36 }
38 res 37 res
39} 38}
40 39
41fn get_inlay_hints( 40fn get_inlay_hints(
42 acc: &mut Vec<InlayHint>, 41 acc: &mut Vec<InlayHint>,
43 sb: &mut SourceBinder<RootDatabase>, 42 sema: &Semantics<RootDatabase>,
44 file_id: FileId,
45 node: &SyntaxNode, 43 node: &SyntaxNode,
46 max_inlay_hint_length: Option<usize>, 44 max_inlay_hint_length: Option<usize>,
47) -> Option<()> { 45) -> Option<()> {
48 let _p = profile("get_inlay_hints"); 46 let _p = profile("get_inlay_hints");
49 let db = sb.db; 47 let db = sema.db;
50 let analyzer = Lazy::new(move || sb.analyze(hir::InFile::new(file_id.into(), node), None));
51 match_ast! { 48 match_ast! {
52 match node { 49 match node {
53 ast::CallExpr(it) => { 50 ast::CallExpr(it) => {
54 get_param_name_hints(acc, db, &analyzer, ast::Expr::from(it)); 51 get_param_name_hints(acc, sema, ast::Expr::from(it));
55 }, 52 },
56 ast::MethodCallExpr(it) => { 53 ast::MethodCallExpr(it) => {
57 get_param_name_hints(acc, db, &analyzer, ast::Expr::from(it)); 54 get_param_name_hints(acc, sema, ast::Expr::from(it));
58 }, 55 },
59 ast::BindPat(it) => { 56 ast::BindPat(it) => {
60 let pat = ast::Pat::from(it.clone()); 57 let pat = ast::Pat::from(it.clone());
61 let ty = analyzer.type_of_pat(db, &pat)?; 58 let ty = sema.type_of_pat(&pat)?;
62 59
63 if should_not_display_type_hint(db, &it, &ty) { 60 if should_not_display_type_hint(db, &it, &ty) {
64 return None; 61 return None;
@@ -125,8 +122,7 @@ fn should_not_display_type_hint(db: &RootDatabase, bind_pat: &ast::BindPat, pat_
125 122
126fn get_param_name_hints( 123fn get_param_name_hints(
127 acc: &mut Vec<InlayHint>, 124 acc: &mut Vec<InlayHint>,
128 db: &RootDatabase, 125 sema: &Semantics<RootDatabase>,
129 analyzer: &SourceAnalyzer,
130 expr: ast::Expr, 126 expr: ast::Expr,
131) -> Option<()> { 127) -> Option<()> {
132 let args = match &expr { 128 let args = match &expr {
@@ -138,7 +134,7 @@ fn get_param_name_hints(
138 // we need args len to determine whether to skip or not the &self parameter 134 // we need args len to determine whether to skip or not the &self parameter
139 .collect::<Vec<_>>(); 135 .collect::<Vec<_>>();
140 136
141 let fn_signature = get_fn_signature(db, analyzer, &expr)?; 137 let fn_signature = get_fn_signature(sema, &expr)?;
142 let n_params_to_skip = 138 let n_params_to_skip =
143 if fn_signature.has_self_param && fn_signature.parameter_names.len() > args.len() { 139 if fn_signature.has_self_param && fn_signature.parameter_names.len() > args.len() {
144 1 140 1
@@ -184,28 +180,26 @@ fn should_show_param_hint(
184 true 180 true
185} 181}
186 182
187fn get_fn_signature( 183fn get_fn_signature(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<FunctionSignature> {
188 db: &RootDatabase,
189 analyzer: &SourceAnalyzer,
190 expr: &ast::Expr,
191) -> Option<FunctionSignature> {
192 match expr { 184 match expr {
193 ast::Expr::CallExpr(expr) => { 185 ast::Expr::CallExpr(expr) => {
194 // FIXME: Type::as_callable is broken for closures 186 // FIXME: Type::as_callable is broken for closures
195 let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; 187 let callable_def = sema.type_of_expr(&expr.expr()?)?.as_callable()?;
196 match callable_def { 188 match callable_def {
197 hir::CallableDef::FunctionId(it) => { 189 hir::CallableDef::FunctionId(it) => {
198 Some(FunctionSignature::from_hir(db, it.into())) 190 Some(FunctionSignature::from_hir(sema.db, it.into()))
191 }
192 hir::CallableDef::StructId(it) => {
193 FunctionSignature::from_struct(sema.db, it.into())
199 } 194 }
200 hir::CallableDef::StructId(it) => FunctionSignature::from_struct(db, it.into()),
201 hir::CallableDef::EnumVariantId(it) => { 195 hir::CallableDef::EnumVariantId(it) => {
202 FunctionSignature::from_enum_variant(db, it.into()) 196 FunctionSignature::from_enum_variant(sema.db, it.into())
203 } 197 }
204 } 198 }
205 } 199 }
206 ast::Expr::MethodCallExpr(expr) => { 200 ast::Expr::MethodCallExpr(expr) => {
207 let fn_def = analyzer.resolve_method_call(&expr)?; 201 let fn_def = sema.resolve_method_call(&expr)?;
208 Some(FunctionSignature::from_hir(db, fn_def)) 202 Some(FunctionSignature::from_hir(sema.db, fn_def))
209 } 203 }
210 _ => None, 204 _ => None,
211 } 205 }
diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs
index d22870669..f31d3c295 100644
--- a/crates/ra_ide/src/lib.rs
+++ b/crates/ra_ide/src/lib.rs
@@ -35,7 +35,6 @@ mod typing;
35mod matching_brace; 35mod matching_brace;
36mod display; 36mod display;
37mod inlay_hints; 37mod inlay_hints;
38mod expand;
39mod expand_macro; 38mod expand_macro;
40mod ssr; 39mod ssr;
41 40
@@ -319,9 +318,7 @@ impl Analysis {
319 file_id: FileId, 318 file_id: FileId,
320 max_inlay_hint_length: Option<usize>, 319 max_inlay_hint_length: Option<usize>,
321 ) -> Cancelable<Vec<InlayHint>> { 320 ) -> Cancelable<Vec<InlayHint>> {
322 self.with_db(|db| { 321 self.with_db(|db| inlay_hints::inlay_hints(db, file_id, max_inlay_hint_length))
323 inlay_hints::inlay_hints(db, file_id, &db.parse(file_id).tree(), max_inlay_hint_length)
324 })
325 } 322 }
326 323
327 /// Returns the set of folding ranges. 324 /// Returns the set of folding ranges.
diff --git a/crates/ra_ide/src/marks.rs b/crates/ra_ide/src/marks.rs
index bcb67e373..7b8b727b4 100644
--- a/crates/ra_ide/src/marks.rs
+++ b/crates/ra_ide/src/marks.rs
@@ -11,4 +11,5 @@ test_utils::marks!(
11 call_info_bad_offset 11 call_info_bad_offset
12 dont_complete_current_use 12 dont_complete_current_use
13 test_resolve_parent_module_on_module_decl 13 test_resolve_parent_module_on_module_decl
14 search_filters_by_range
14); 15);
diff --git a/crates/ra_ide/src/parent_module.rs b/crates/ra_ide/src/parent_module.rs
index af14d6ab3..2c4bdb039 100644
--- a/crates/ra_ide/src/parent_module.rs
+++ b/crates/ra_ide/src/parent_module.rs
@@ -1,6 +1,7 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use ra_db::{CrateId, FileId, FilePosition, SourceDatabase}; 3use hir::Semantics;
4use ra_db::{CrateId, FileId, FilePosition};
4use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
5use ra_syntax::{ 6use ra_syntax::{
6 algo::find_node_at_offset, 7 algo::find_node_at_offset,
@@ -13,10 +14,10 @@ use crate::NavigationTarget;
13/// This returns `Vec` because a module may be included from several places. We 14/// This returns `Vec` because a module may be included from several places. We
14/// don't handle this case yet though, so the Vec has length at most one. 15/// don't handle this case yet though, so the Vec has length at most one.
15pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> { 16pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> {
16 let mut sb = hir::SourceBinder::new(db); 17 let sema = Semantics::new(db);
17 let parse = db.parse(position.file_id); 18 let source_file = sema.parse(position.file_id);
18 19
19 let mut module = find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset); 20 let mut module = find_node_at_offset::<ast::Module>(source_file.syntax(), position.offset);
20 21
21 // If cursor is literally on `mod foo`, go to the grandpa. 22 // If cursor is literally on `mod foo`, go to the grandpa.
22 if let Some(m) = &module { 23 if let Some(m) = &module {
@@ -30,8 +31,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
30 } 31 }
31 32
32 let module = match module { 33 let module = match module {
33 Some(module) => sb.to_def(hir::InFile::new(position.file_id.into(), module)), 34 Some(module) => sema.to_def(&module),
34 None => sb.to_module_def(position.file_id), 35 None => sema.to_module_def(position.file_id),
35 }; 36 };
36 let module = match module { 37 let module = match module {
37 None => return Vec::new(), 38 None => return Vec::new(),
@@ -43,8 +44,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
43 44
44/// Returns `Vec` for the same reason as `parent_module` 45/// Returns `Vec` for the same reason as `parent_module`
45pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> { 46pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
46 let mut sb = hir::SourceBinder::new(db); 47 let sema = Semantics::new(db);
47 let module = match sb.to_module_def(file_id) { 48 let module = match sema.to_module_def(file_id) {
48 Some(it) => it, 49 Some(it) => it,
49 None => return Vec::new(), 50 None => return Vec::new(),
50 }; 51 };
diff --git a/crates/ra_ide/src/references.rs b/crates/ra_ide/src/references.rs
index aadc2dbcb..baa8a4d29 100644
--- a/crates/ra_ide/src/references.rs
+++ b/crates/ra_ide/src/references.rs
@@ -13,25 +13,22 @@ mod classify;
13mod rename; 13mod rename;
14mod search_scope; 14mod search_scope;
15 15
16use crate::expand::descend_into_macros_with_analyzer; 16use hir::Semantics;
17use hir::{InFile, SourceBinder};
18use once_cell::unsync::Lazy; 17use once_cell::unsync::Lazy;
19use ra_db::{SourceDatabase, SourceDatabaseExt}; 18use ra_db::SourceDatabaseExt;
20use ra_ide_db::RootDatabase; 19use ra_ide_db::RootDatabase;
21use ra_prof::profile; 20use ra_prof::profile;
22use ra_syntax::{ 21use ra_syntax::{
23 algo::find_node_at_offset, 22 algo::find_node_at_offset,
24 ast::{self, NameOwner}, 23 ast::{self, NameOwner},
25 match_ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, TextUnit, TokenAtOffset, 24 match_ast, AstNode, SyntaxKind, SyntaxNode, TextRange, TextUnit, TokenAtOffset,
26}; 25};
26use test_utils::tested_by;
27 27
28use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo}; 28use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo};
29 29
30pub(crate) use self::{ 30pub(crate) use self::{classify::classify_name_ref, rename::rename};
31 classify::{classify_name, classify_name_ref}, 31pub(crate) use ra_ide_db::defs::{classify_name, NameDefinition};
32 rename::rename,
33};
34pub(crate) use ra_ide_db::defs::NameDefinition;
35 32
36pub use self::search_scope::SearchScope; 33pub use self::search_scope::SearchScope;
37 34
@@ -114,8 +111,8 @@ pub(crate) fn find_all_refs(
114 position: FilePosition, 111 position: FilePosition,
115 search_scope: Option<SearchScope>, 112 search_scope: Option<SearchScope>,
116) -> Option<RangeInfo<ReferenceSearchResult>> { 113) -> Option<RangeInfo<ReferenceSearchResult>> {
117 let parse = db.parse(position.file_id); 114 let sema = Semantics::new(db);
118 let syntax = parse.tree().syntax().clone(); 115 let syntax = sema.parse(position.file_id).syntax().clone();
119 116
120 let (opt_name, search_kind) = 117 let (opt_name, search_kind) =
121 if let Some(name) = get_struct_def_name_for_struc_litetal_search(&syntax, position) { 118 if let Some(name) = get_struct_def_name_for_struc_litetal_search(&syntax, position) {
@@ -124,7 +121,7 @@ pub(crate) fn find_all_refs(
124 (find_node_at_offset::<ast::Name>(&syntax, position.offset), ReferenceKind::Other) 121 (find_node_at_offset::<ast::Name>(&syntax, position.offset), ReferenceKind::Other)
125 }; 122 };
126 123
127 let RangeInfo { range, info: (name, def) } = find_name(db, &syntax, position, opt_name)?; 124 let RangeInfo { range, info: (name, def) } = find_name(&sema, &syntax, position, opt_name)?;
128 let declaration = def.try_to_nav(db)?; 125 let declaration = def.try_to_nav(db)?;
129 126
130 let search_scope = { 127 let search_scope = {
@@ -152,19 +149,18 @@ pub(crate) fn find_all_refs(
152} 149}
153 150
154fn find_name( 151fn find_name(
155 db: &RootDatabase, 152 sema: &Semantics<RootDatabase>,
156 syntax: &SyntaxNode, 153 syntax: &SyntaxNode,
157 position: FilePosition, 154 position: FilePosition,
158 opt_name: Option<ast::Name>, 155 opt_name: Option<ast::Name>,
159) -> Option<RangeInfo<(String, NameDefinition)>> { 156) -> Option<RangeInfo<(String, NameDefinition)>> {
160 let mut sb = SourceBinder::new(db);
161 if let Some(name) = opt_name { 157 if let Some(name) = opt_name {
162 let def = classify_name(&mut sb, InFile::new(position.file_id.into(), &name))?; 158 let def = classify_name(sema, &name)?;
163 let range = name.syntax().text_range(); 159 let range = name.syntax().text_range();
164 return Some(RangeInfo::new(range, (name.text().to_string(), def))); 160 return Some(RangeInfo::new(range, (name.text().to_string(), def)));
165 } 161 }
166 let name_ref = find_node_at_offset::<ast::NameRef>(&syntax, position.offset)?; 162 let name_ref = find_node_at_offset::<ast::NameRef>(&syntax, position.offset)?;
167 let def = classify_name_ref(&mut sb, InFile::new(position.file_id.into(), &name_ref))?; 163 let def = classify_name_ref(sema, &name_ref)?;
168 let range = name_ref.syntax().text_range(); 164 let range = name_ref.syntax().text_range();
169 Some(RangeInfo::new(range, (name_ref.text().to_string(), def))) 165 Some(RangeInfo::new(range, (name_ref.text().to_string(), def)))
170} 166}
@@ -182,64 +178,53 @@ fn process_definition(
182 178
183 for (file_id, search_range) in scope { 179 for (file_id, search_range) in scope {
184 let text = db.file_text(file_id); 180 let text = db.file_text(file_id);
181 let search_range =
182 search_range.unwrap_or(TextRange::offset_len(0.into(), TextUnit::of_str(&text)));
185 183
186 let parse = Lazy::new(|| SourceFile::parse(&text)); 184 let sema = Semantics::new(db);
187 let mut sb = Lazy::new(|| SourceBinder::new(db)); 185 let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
188 let mut analyzer = None;
189 186
190 for (idx, _) in text.match_indices(pat) { 187 for (idx, _) in text.match_indices(pat) {
191 let offset = TextUnit::from_usize(idx); 188 let offset = TextUnit::from_usize(idx);
189 if !search_range.contains_inclusive(offset) {
190 tested_by!(search_filters_by_range);
191 continue;
192 }
192 193
193 let (name_ref, range) = if let Some(name_ref) = 194 let name_ref =
194 find_node_at_offset::<ast::NameRef>(parse.tree().syntax(), offset) 195 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&tree, offset) {
195 { 196 name_ref
196 let range = name_ref.syntax().text_range();
197 (InFile::new(file_id.into(), name_ref), range)
198 } else {
199 // Handle macro token cases
200 let t = match parse.tree().syntax().token_at_offset(offset) {
201 TokenAtOffset::None => continue,
202 TokenAtOffset::Single(t) => t,
203 TokenAtOffset::Between(_, t) => t,
204 };
205 let range = t.text_range();
206 let analyzer = analyzer.get_or_insert_with(|| {
207 sb.analyze(InFile::new(file_id.into(), parse.tree().syntax()), None)
208 });
209 let expanded = descend_into_macros_with_analyzer(
210 db,
211 &analyzer,
212 InFile::new(file_id.into(), t),
213 );
214 if let Some(token) = ast::NameRef::cast(expanded.value.parent()) {
215 (expanded.with_value(token), range)
216 } else { 197 } else {
217 continue; 198 // Handle macro token cases
218 } 199 let token = match tree.token_at_offset(offset) {
219 }; 200 TokenAtOffset::None => continue,
201 TokenAtOffset::Single(t) => t,
202 TokenAtOffset::Between(_, t) => t,
203 };
204 let expanded = sema.descend_into_macros(token);
205 match ast::NameRef::cast(expanded.parent()) {
206 Some(name_ref) => name_ref,
207 _ => continue,
208 }
209 };
220 210
221 if let Some(search_range) = search_range {
222 if !range.is_subrange(&search_range) {
223 continue;
224 }
225 }
226 // FIXME: reuse sb 211 // FIXME: reuse sb
227 // See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098 212 // See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098
228 213
229 if let Some(d) = classify_name_ref(&mut sb, name_ref.as_ref()) { 214 if let Some(d) = classify_name_ref(&sema, &name_ref) {
230 if d == def { 215 if d == def {
231 let kind = if is_record_lit_name_ref(&name_ref.value) 216 let kind =
232 || is_call_expr_name_ref(&name_ref.value) 217 if is_record_lit_name_ref(&name_ref) || is_call_expr_name_ref(&name_ref) {
233 { 218 ReferenceKind::StructLiteral
234 ReferenceKind::StructLiteral 219 } else {
235 } else { 220 ReferenceKind::Other
236 ReferenceKind::Other 221 };
237 }; 222
238 223 let file_range = sema.original_range(name_ref.syntax());
239 refs.push(Reference { 224 refs.push(Reference {
240 file_range: FileRange { file_id, range }, 225 file_range,
241 kind, 226 kind,
242 access: reference_access(&d, &name_ref.value), 227 access: reference_access(&d, &name_ref),
243 }); 228 });
244 } 229 }
245 } 230 }
@@ -348,6 +333,8 @@ fn is_call_expr_name_ref(name_ref: &ast::NameRef) -> bool {
348 333
349#[cfg(test)] 334#[cfg(test)]
350mod tests { 335mod tests {
336 use test_utils::covers;
337
351 use crate::{ 338 use crate::{
352 mock_analysis::{analysis_and_position, single_file_with_position, MockAnalysis}, 339 mock_analysis::{analysis_and_position, single_file_with_position, MockAnalysis},
353 Declaration, Reference, ReferenceSearchResult, SearchScope, 340 Declaration, Reference, ReferenceSearchResult, SearchScope,
@@ -456,6 +443,27 @@ mod tests {
456 } 443 }
457 444
458 #[test] 445 #[test]
446 fn search_filters_by_range() {
447 covers!(search_filters_by_range);
448 let code = r#"
449 fn foo() {
450 let spam<|> = 92;
451 spam + spam
452 }
453 fn bar() {
454 let spam = 92;
455 spam + spam
456 }
457 "#;
458 let refs = get_all_refs(code);
459 check_result(
460 refs,
461 "spam BIND_PAT FileId(1) [44; 48) Other Write",
462 &["FileId(1) [71; 75) Other Read", "FileId(1) [78; 82) Other Read"],
463 );
464 }
465
466 #[test]
459 fn test_find_all_refs_for_param_inside() { 467 fn test_find_all_refs_for_param_inside() {
460 let code = r#" 468 let code = r#"
461 fn foo(i : u32) -> u32 { 469 fn foo(i : u32) -> u32 {
diff --git a/crates/ra_ide/src/references/classify.rs b/crates/ra_ide/src/references/classify.rs
index 478e18871..91b21429a 100644
--- a/crates/ra_ide/src/references/classify.rs
+++ b/crates/ra_ide/src/references/classify.rs
@@ -1,34 +1,32 @@
1//! Functions that are used to classify an element from its definition or reference. 1//! Functions that are used to classify an element from its definition or reference.
2 2
3use hir::{InFile, PathResolution, SourceBinder}; 3use hir::{PathResolution, Semantics};
4use ra_ide_db::defs::NameDefinition;
5use ra_ide_db::RootDatabase;
4use ra_prof::profile; 6use ra_prof::profile;
5use ra_syntax::{ast, AstNode}; 7use ra_syntax::{ast, AstNode};
6use test_utils::tested_by; 8use test_utils::tested_by;
7 9
8use super::NameDefinition; 10pub use ra_ide_db::defs::{from_module_def, from_struct_field};
9use ra_ide_db::RootDatabase;
10
11pub use ra_ide_db::defs::{classify_name, from_module_def, from_struct_field};
12 11
13pub(crate) fn classify_name_ref( 12pub(crate) fn classify_name_ref(
14 sb: &mut SourceBinder<RootDatabase>, 13 sema: &Semantics<RootDatabase>,
15 name_ref: InFile<&ast::NameRef>, 14 name_ref: &ast::NameRef,
16) -> Option<NameDefinition> { 15) -> Option<NameDefinition> {
17 let _p = profile("classify_name_ref"); 16 let _p = profile("classify_name_ref");
18 17
19 let parent = name_ref.value.syntax().parent()?; 18 let parent = name_ref.syntax().parent()?;
20 let analyzer = sb.analyze(name_ref.map(|it| it.syntax()), None);
21 19
22 if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { 20 if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
23 tested_by!(goto_def_for_methods); 21 tested_by!(goto_def_for_methods);
24 if let Some(func) = analyzer.resolve_method_call(&method_call) { 22 if let Some(func) = sema.resolve_method_call(&method_call) {
25 return Some(from_module_def(func.into())); 23 return Some(from_module_def(func.into()));
26 } 24 }
27 } 25 }
28 26
29 if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { 27 if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) {
30 tested_by!(goto_def_for_fields); 28 tested_by!(goto_def_for_fields);
31 if let Some(field) = analyzer.resolve_field(&field_expr) { 29 if let Some(field) = sema.resolve_field(&field_expr) {
32 return Some(from_struct_field(field)); 30 return Some(from_struct_field(field));
33 } 31 }
34 } 32 }
@@ -36,22 +34,20 @@ pub(crate) fn classify_name_ref(
36 if let Some(record_field) = ast::RecordField::cast(parent.clone()) { 34 if let Some(record_field) = ast::RecordField::cast(parent.clone()) {
37 tested_by!(goto_def_for_record_fields); 35 tested_by!(goto_def_for_record_fields);
38 tested_by!(goto_def_for_field_init_shorthand); 36 tested_by!(goto_def_for_field_init_shorthand);
39 if let Some(field_def) = analyzer.resolve_record_field(&record_field) { 37 if let Some(field_def) = sema.resolve_record_field(&record_field) {
40 return Some(from_struct_field(field_def)); 38 return Some(from_struct_field(field_def));
41 } 39 }
42 } 40 }
43 41
44 if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { 42 if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) {
45 tested_by!(goto_def_for_macros); 43 tested_by!(goto_def_for_macros);
46 if let Some(macro_def) = 44 if let Some(macro_def) = sema.resolve_macro_call(&macro_call) {
47 analyzer.resolve_macro_call(sb.db, name_ref.with_value(&macro_call))
48 {
49 return Some(NameDefinition::Macro(macro_def)); 45 return Some(NameDefinition::Macro(macro_def));
50 } 46 }
51 } 47 }
52 48
53 let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?; 49 let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?;
54 let resolved = analyzer.resolve_path(sb.db, &path)?; 50 let resolved = sema.resolve_path(&path)?;
55 let res = match resolved { 51 let res = match resolved {
56 PathResolution::Def(def) => from_module_def(def), 52 PathResolution::Def(def) => from_module_def(def),
57 PathResolution::AssocItem(item) => { 53 PathResolution::AssocItem(item) => {
diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs
index bdb90020b..5b4bcf434 100644
--- a/crates/ra_ide/src/references/rename.rs
+++ b/crates/ra_ide/src/references/rename.rs
@@ -1,7 +1,7 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::ModuleSource; 3use hir::{ModuleSource, Semantics};
4use ra_db::{RelativePath, RelativePathBuf, SourceDatabase, SourceDatabaseExt}; 4use ra_db::{RelativePath, RelativePathBuf, SourceDatabaseExt};
5use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
6use ra_syntax::{ 6use ra_syntax::{
7 algo::find_node_at_offset, ast, lex_single_valid_syntax_kind, AstNode, SyntaxKind, SyntaxNode, 7 algo::find_node_at_offset, ast, lex_single_valid_syntax_kind, AstNode, SyntaxKind, SyntaxNode,
@@ -24,15 +24,16 @@ pub(crate) fn rename(
24 _ => return None, 24 _ => return None,
25 } 25 }
26 26
27 let parse = db.parse(position.file_id); 27 let sema = Semantics::new(db);
28 let source_file = sema.parse(position.file_id);
28 if let Some((ast_name, ast_module)) = 29 if let Some((ast_name, ast_module)) =
29 find_name_and_module_at_offset(parse.tree().syntax(), position) 30 find_name_and_module_at_offset(source_file.syntax(), position)
30 { 31 {
31 let range = ast_name.syntax().text_range(); 32 let range = ast_name.syntax().text_range();
32 rename_mod(db, &ast_name, &ast_module, position, new_name) 33 rename_mod(&sema, &ast_name, &ast_module, position, new_name)
33 .map(|info| RangeInfo::new(range, info)) 34 .map(|info| RangeInfo::new(range, info))
34 } else { 35 } else {
35 rename_reference(db, position, new_name) 36 rename_reference(sema.db, position, new_name)
36 } 37 }
37} 38}
38 39
@@ -54,7 +55,7 @@ fn source_edit_from_file_id_range(
54} 55}
55 56
56fn rename_mod( 57fn rename_mod(
57 db: &RootDatabase, 58 sema: &Semantics<RootDatabase>,
58 ast_name: &ast::Name, 59 ast_name: &ast::Name,
59 ast_module: &ast::Module, 60 ast_module: &ast::Module,
60 position: FilePosition, 61 position: FilePosition,
@@ -62,13 +63,12 @@ fn rename_mod(
62) -> Option<SourceChange> { 63) -> Option<SourceChange> {
63 let mut source_file_edits = Vec::new(); 64 let mut source_file_edits = Vec::new();
64 let mut file_system_edits = Vec::new(); 65 let mut file_system_edits = Vec::new();
65 let module_src = hir::InFile { file_id: position.file_id.into(), value: ast_module.clone() }; 66 if let Some(module) = sema.to_def(ast_module) {
66 if let Some(module) = hir::SourceBinder::new(db).to_def(module_src) { 67 let src = module.definition_source(sema.db);
67 let src = module.definition_source(db); 68 let file_id = src.file_id.original_file(sema.db);
68 let file_id = src.file_id.original_file(db);
69 match src.value { 69 match src.value {
70 ModuleSource::SourceFile(..) => { 70 ModuleSource::SourceFile(..) => {
71 let mod_path: RelativePathBuf = db.file_relative_path(file_id); 71 let mod_path: RelativePathBuf = sema.db.file_relative_path(file_id);
72 // mod is defined in path/to/dir/mod.rs 72 // mod is defined in path/to/dir/mod.rs
73 let dst_path = if mod_path.file_stem() == Some("mod") { 73 let dst_path = if mod_path.file_stem() == Some("mod") {
74 mod_path 74 mod_path
@@ -82,7 +82,7 @@ fn rename_mod(
82 if let Some(path) = dst_path { 82 if let Some(path) = dst_path {
83 let move_file = FileSystemEdit::MoveFile { 83 let move_file = FileSystemEdit::MoveFile {
84 src: file_id, 84 src: file_id,
85 dst_source_root: db.file_source_root(position.file_id), 85 dst_source_root: sema.db.file_source_root(position.file_id),
86 dst_path: path, 86 dst_path: path,
87 }; 87 };
88 file_system_edits.push(move_file); 88 file_system_edits.push(move_file);
@@ -98,7 +98,7 @@ fn rename_mod(
98 }; 98 };
99 source_file_edits.push(edit); 99 source_file_edits.push(edit);
100 100
101 if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(db, position, None) { 101 if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(sema.db, position, None) {
102 let ref_edits = refs.references.into_iter().map(|reference| { 102 let ref_edits = refs.references.into_iter().map(|reference| {
103 source_edit_from_file_id_range( 103 source_edit_from_file_id_range(
104 reference.file_range.file_id, 104 reference.file_range.file_id,
diff --git a/crates/ra_ide/src/runnables.rs b/crates/ra_ide/src/runnables.rs
index be2a67d0a..74877e90f 100644
--- a/crates/ra_ide/src/runnables.rs
+++ b/crates/ra_ide/src/runnables.rs
@@ -1,8 +1,7 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::{InFile, SourceBinder}; 3use hir::Semantics;
4use itertools::Itertools; 4use itertools::Itertools;
5use ra_db::SourceDatabase;
6use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
7use ra_syntax::{ 6use ra_syntax::{
8 ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner}, 7 ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner},
@@ -42,46 +41,33 @@ pub enum RunnableKind {
42} 41}
43 42
44pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { 43pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
45 let parse = db.parse(file_id); 44 let sema = Semantics::new(db);
46 let mut sb = SourceBinder::new(db); 45 let source_file = sema.parse(file_id);
47 parse.tree().syntax().descendants().filter_map(|i| runnable(db, &mut sb, file_id, i)).collect() 46 source_file.syntax().descendants().filter_map(|i| runnable(&sema, i)).collect()
48} 47}
49 48
50fn runnable( 49fn runnable(sema: &Semantics<RootDatabase>, item: SyntaxNode) -> Option<Runnable> {
51 db: &RootDatabase,
52 source_binder: &mut SourceBinder<RootDatabase>,
53 file_id: FileId,
54 item: SyntaxNode,
55) -> Option<Runnable> {
56 match_ast! { 50 match_ast! {
57 match item { 51 match item {
58 ast::FnDef(it) => { runnable_fn(db, source_binder, file_id, it) }, 52 ast::FnDef(it) => { runnable_fn(sema, it) },
59 ast::Module(it) => { runnable_mod(db, source_binder, file_id, it) }, 53 ast::Module(it) => { runnable_mod(sema, it) },
60 _ => { None }, 54 _ => None,
61 } 55 }
62 } 56 }
63} 57}
64 58
65fn runnable_fn( 59fn runnable_fn(sema: &Semantics<RootDatabase>, fn_def: ast::FnDef) -> Option<Runnable> {
66 db: &RootDatabase,
67 source_binder: &mut SourceBinder<RootDatabase>,
68 file_id: FileId,
69 fn_def: ast::FnDef,
70) -> Option<Runnable> {
71 let name_string = fn_def.name()?.text().to_string(); 60 let name_string = fn_def.name()?.text().to_string();
72 61
73 let kind = if name_string == "main" { 62 let kind = if name_string == "main" {
74 RunnableKind::Bin 63 RunnableKind::Bin
75 } else { 64 } else {
76 let test_id = if let Some(module) = source_binder 65 let test_id = if let Some(module) = sema.to_def(&fn_def).map(|def| def.module(sema.db)) {
77 .to_def(InFile::new(file_id.into(), fn_def.clone()))
78 .map(|def| def.module(db))
79 {
80 let path = module 66 let path = module
81 .path_to_root(db) 67 .path_to_root(sema.db)
82 .into_iter() 68 .into_iter()
83 .rev() 69 .rev()
84 .filter_map(|it| it.name(db)) 70 .filter_map(|it| it.name(sema.db))
85 .map(|name| name.to_string()) 71 .map(|name| name.to_string())
86 .chain(std::iter::once(name_string)) 72 .chain(std::iter::once(name_string))
87 .join("::"); 73 .join("::");
@@ -115,12 +101,7 @@ fn has_test_related_attribute(fn_def: &ast::FnDef) -> bool {
115 .any(|attribute_text| attribute_text.contains("test")) 101 .any(|attribute_text| attribute_text.contains("test"))
116} 102}
117 103
118fn runnable_mod( 104fn runnable_mod(sema: &Semantics<RootDatabase>, module: ast::Module) -> Option<Runnable> {
119 db: &RootDatabase,
120 source_binder: &mut SourceBinder<RootDatabase>,
121 file_id: FileId,
122 module: ast::Module,
123) -> Option<Runnable> {
124 let has_test_function = module 105 let has_test_function = module
125 .item_list()? 106 .item_list()?
126 .items() 107 .items()
@@ -133,9 +114,10 @@ fn runnable_mod(
133 return None; 114 return None;
134 } 115 }
135 let range = module.syntax().text_range(); 116 let range = module.syntax().text_range();
136 let module = source_binder.to_def(InFile::new(file_id.into(), module))?; 117 let module = sema.to_def(&module)?;
137 118
138 let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::"); 119 let path =
120 module.path_to_root(sema.db).into_iter().rev().filter_map(|it| it.name(sema.db)).join("::");
139 Some(Runnable { range, kind: RunnableKind::TestMod { path } }) 121 Some(Runnable { range, kind: RunnableKind::TestMod { path } })
140} 122}
141 123
diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/src/snapshots/rainbow_highlighting.html
index 95f038f00..d6a7da953 100644
--- a/crates/ra_ide/src/snapshots/rainbow_highlighting.html
+++ b/crates/ra_ide/src/snapshots/rainbow_highlighting.html
@@ -25,14 +25,14 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
25.keyword\.control { color: #F0DFAF; font-weight: bold; } 25.keyword\.control { color: #F0DFAF; font-weight: bold; }
26</style> 26</style>
27<pre><code><span class="keyword">fn</span> <span class="function">main</span>() { 27<pre><code><span class="keyword">fn</span> <span class="function">main</span>() {
28 <span class="keyword">let</span> <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span> = <span class="string">"hello"</span>; 28 <span class="keyword">let</span> <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string">"hello"</span>;
29 <span class="keyword">let</span> <span class="variable" data-binding-hash="4303609361109701698" style="color: hsl(242,75%,88%);">x</span> = <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span>.to_string(); 29 <span class="keyword">let</span> <span class="variable" data-binding-hash="2705725358298919760" style="color: hsl(17,51%,74%);">x</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string();
30 <span class="keyword">let</span> <span class="variable" data-binding-hash="13865792086344377029" style="color: hsl(340,64%,86%);">y</span> = <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span>.to_string(); 30 <span class="keyword">let</span> <span class="variable" data-binding-hash="3365759661443752373" style="color: hsl(127,76%,66%);">y</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string();
31 31
32 <span class="keyword">let</span> <span class="variable" data-binding-hash="7011301204224269512" style="color: hsl(198,45%,40%);">x</span> = <span class="string">"other color please!"</span>; 32 <span class="keyword">let</span> <span class="variable" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span> = <span class="string">"other color please!"</span>;
33 <span class="keyword">let</span> <span class="variable" data-binding-hash="12461245066629867975" style="color: hsl(132,91%,68%);">y</span> = <span class="variable" data-binding-hash="7011301204224269512" style="color: hsl(198,45%,40%);">x</span>.to_string(); 33 <span class="keyword">let</span> <span class="variable" data-binding-hash="6717528807933952652" style="color: hsl(85,49%,84%);">y</span> = <span class="variable" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span>.to_string();
34} 34}
35 35
36<span class="keyword">fn</span> <span class="function">bar</span>() { 36<span class="keyword">fn</span> <span class="function">bar</span>() {
37 <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span> = <span class="string">"hello"</span>; 37 <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string">"hello"</span>;
38}</code></pre> \ No newline at end of file 38}</code></pre> \ No newline at end of file
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs
index 9bc3ad448..987476d2c 100644
--- a/crates/ra_ide/src/syntax_highlighting.rs
+++ b/crates/ra_ide/src/syntax_highlighting.rs
@@ -1,8 +1,11 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::{HirFileId, InFile, Name, SourceAnalyzer, SourceBinder}; 3use hir::{Name, Semantics};
4use ra_db::SourceDatabase; 4use ra_db::SourceDatabase;
5use ra_ide_db::{defs::NameDefinition, RootDatabase}; 5use ra_ide_db::{
6 defs::{classify_name, NameDefinition},
7 RootDatabase,
8};
6use ra_prof::profile; 9use ra_prof::profile;
7use ra_syntax::{ 10use ra_syntax::{
8 ast, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken, 11 ast, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken,
@@ -10,11 +13,7 @@ use ra_syntax::{
10}; 13};
11use rustc_hash::FxHashMap; 14use rustc_hash::FxHashMap;
12 15
13use crate::{ 16use crate::{references::classify_name_ref, FileId};
14 expand::descend_into_macros_with_analyzer,
15 references::{classify_name, classify_name_ref},
16 FileId,
17};
18 17
19pub mod tags { 18pub mod tags {
20 pub const FIELD: &str = "field"; 19 pub const FIELD: &str = "field";
@@ -73,14 +72,11 @@ pub(crate) fn highlight(
73 range: Option<TextRange>, 72 range: Option<TextRange>,
74) -> Vec<HighlightedRange> { 73) -> Vec<HighlightedRange> {
75 let _p = profile("highlight"); 74 let _p = profile("highlight");
75 let sema = Semantics::new(db);
76 let root = sema.parse(file_id).syntax().clone();
76 77
77 let parse = db.parse(file_id);
78 let root = parse.tree().syntax().clone();
79
80 let mut sb = SourceBinder::new(db);
81 let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default(); 78 let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
82 let mut res = Vec::new(); 79 let mut res = Vec::new();
83 let analyzer = sb.analyze(InFile::new(file_id.into(), &root), None);
84 80
85 let mut in_macro_call = None; 81 let mut in_macro_call = None;
86 82
@@ -105,7 +101,7 @@ pub(crate) fn highlight(
105 match node.kind() { 101 match node.kind() {
106 MACRO_CALL => { 102 MACRO_CALL => {
107 in_macro_call = Some(node.clone()); 103 in_macro_call = Some(node.clone());
108 if let Some(range) = highlight_macro(InFile::new(file_id.into(), node)) { 104 if let Some(range) = highlight_macro(node) {
109 res.push(HighlightedRange { 105 res.push(HighlightedRange {
110 range, 106 range,
111 tag: tags::MACRO, 107 tag: tags::MACRO,
@@ -116,10 +112,9 @@ pub(crate) fn highlight(
116 _ if in_macro_call.is_some() => { 112 _ if in_macro_call.is_some() => {
117 if let Some(token) = node.as_token() { 113 if let Some(token) = node.as_token() {
118 if let Some((tag, binding_hash)) = highlight_token_tree( 114 if let Some((tag, binding_hash)) = highlight_token_tree(
119 &mut sb, 115 &sema,
120 &analyzer,
121 &mut bindings_shadow_count, 116 &mut bindings_shadow_count,
122 InFile::new(file_id.into(), token.clone()), 117 token.clone(),
123 ) { 118 ) {
124 res.push(HighlightedRange { 119 res.push(HighlightedRange {
125 range: node.text_range(), 120 range: node.text_range(),
@@ -130,11 +125,9 @@ pub(crate) fn highlight(
130 } 125 }
131 } 126 }
132 _ => { 127 _ => {
133 if let Some((tag, binding_hash)) = highlight_node( 128 if let Some((tag, binding_hash)) =
134 &mut sb, 129 highlight_node(&sema, &mut bindings_shadow_count, node.clone())
135 &mut bindings_shadow_count, 130 {
136 InFile::new(file_id.into(), node.clone()),
137 ) {
138 res.push(HighlightedRange { 131 res.push(HighlightedRange {
139 range: node.text_range(), 132 range: node.text_range(),
140 tag, 133 tag,
@@ -161,8 +154,8 @@ pub(crate) fn highlight(
161 res 154 res
162} 155}
163 156
164fn highlight_macro(node: InFile<SyntaxElement>) -> Option<TextRange> { 157fn highlight_macro(node: SyntaxElement) -> Option<TextRange> {
165 let macro_call = ast::MacroCall::cast(node.value.as_node()?.clone())?; 158 let macro_call = ast::MacroCall::cast(node.as_node()?.clone())?;
166 let path = macro_call.path()?; 159 let path = macro_call.path()?;
167 let name_ref = path.segment()?.name_ref()?; 160 let name_ref = path.segment()?.name_ref()?;
168 161
@@ -179,35 +172,34 @@ fn highlight_macro(node: InFile<SyntaxElement>) -> Option<TextRange> {
179} 172}
180 173
181fn highlight_token_tree( 174fn highlight_token_tree(
182 sb: &mut SourceBinder<RootDatabase>, 175 sema: &Semantics<RootDatabase>,
183 analyzer: &SourceAnalyzer,
184 bindings_shadow_count: &mut FxHashMap<Name, u32>, 176 bindings_shadow_count: &mut FxHashMap<Name, u32>,
185 token: InFile<SyntaxToken>, 177 token: SyntaxToken,
186) -> Option<(&'static str, Option<u64>)> { 178) -> Option<(&'static str, Option<u64>)> {
187 if token.value.parent().kind() != TOKEN_TREE { 179 if token.parent().kind() != TOKEN_TREE {
188 return None; 180 return None;
189 } 181 }
190 let token = descend_into_macros_with_analyzer(sb.db, analyzer, token); 182 let token = sema.descend_into_macros(token.clone());
191 let expanded = { 183 let expanded = {
192 let parent = token.value.parent(); 184 let parent = token.parent();
193 // We only care Name and Name_ref 185 // We only care Name and Name_ref
194 match (token.value.kind(), parent.kind()) { 186 match (token.kind(), parent.kind()) {
195 (IDENT, NAME) | (IDENT, NAME_REF) => token.with_value(parent.into()), 187 (IDENT, NAME) | (IDENT, NAME_REF) => parent.into(),
196 _ => token.map(|it| it.into()), 188 _ => token.into(),
197 } 189 }
198 }; 190 };
199 191
200 highlight_node(sb, bindings_shadow_count, expanded) 192 highlight_node(sema, bindings_shadow_count, expanded)
201} 193}
202 194
203fn highlight_node( 195fn highlight_node(
204 sb: &mut SourceBinder<RootDatabase>, 196 sema: &Semantics<RootDatabase>,
205 bindings_shadow_count: &mut FxHashMap<Name, u32>, 197 bindings_shadow_count: &mut FxHashMap<Name, u32>,
206 node: InFile<SyntaxElement>, 198 node: SyntaxElement,
207) -> Option<(&'static str, Option<u64>)> { 199) -> Option<(&'static str, Option<u64>)> {
208 let db = sb.db; 200 let db = sema.db;
209 let mut binding_hash = None; 201 let mut binding_hash = None;
210 let tag = match node.value.kind() { 202 let tag = match node.kind() {
211 FN_DEF => { 203 FN_DEF => {
212 bindings_shadow_count.clear(); 204 bindings_shadow_count.clear();
213 return None; 205 return None;
@@ -216,19 +208,18 @@ fn highlight_node(
216 STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => tags::LITERAL_STRING, 208 STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => tags::LITERAL_STRING,
217 ATTR => tags::LITERAL_ATTRIBUTE, 209 ATTR => tags::LITERAL_ATTRIBUTE,
218 // Special-case field init shorthand 210 // Special-case field init shorthand
219 NAME_REF if node.value.parent().and_then(ast::RecordField::cast).is_some() => tags::FIELD, 211 NAME_REF if node.parent().and_then(ast::RecordField::cast).is_some() => tags::FIELD,
220 NAME_REF if node.value.ancestors().any(|it| it.kind() == ATTR) => return None, 212 NAME_REF if node.ancestors().any(|it| it.kind() == ATTR) => return None,
221 NAME_REF => { 213 NAME_REF => {
222 let name_ref = node.value.as_node().cloned().and_then(ast::NameRef::cast).unwrap(); 214 let name_ref = node.as_node().cloned().and_then(ast::NameRef::cast).unwrap();
223 let name_kind = classify_name_ref(sb, node.with_value(&name_ref)); 215 let name_kind = classify_name_ref(sema, &name_ref);
224 match name_kind { 216 match name_kind {
225 Some(name_kind) => { 217 Some(name_kind) => {
226 if let NameDefinition::Local(local) = &name_kind { 218 if let NameDefinition::Local(local) = &name_kind {
227 if let Some(name) = local.name(db) { 219 if let Some(name) = local.name(db) {
228 let shadow_count = 220 let shadow_count =
229 bindings_shadow_count.entry(name.clone()).or_default(); 221 bindings_shadow_count.entry(name.clone()).or_default();
230 binding_hash = 222 binding_hash = Some(calc_binding_hash(&name, *shadow_count))
231 Some(calc_binding_hash(node.file_id, &name, *shadow_count))
232 } 223 }
233 }; 224 };
234 225
@@ -238,14 +229,14 @@ fn highlight_node(
238 } 229 }
239 } 230 }
240 NAME => { 231 NAME => {
241 let name = node.value.as_node().cloned().and_then(ast::Name::cast).unwrap(); 232 let name = node.as_node().cloned().and_then(ast::Name::cast).unwrap();
242 let name_kind = classify_name(sb, node.with_value(&name)); 233 let name_kind = classify_name(sema, &name);
243 234
244 if let Some(NameDefinition::Local(local)) = &name_kind { 235 if let Some(NameDefinition::Local(local)) = &name_kind {
245 if let Some(name) = local.name(db) { 236 if let Some(name) = local.name(db) {
246 let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); 237 let shadow_count = bindings_shadow_count.entry(name.clone()).or_default();
247 *shadow_count += 1; 238 *shadow_count += 1;
248 binding_hash = Some(calc_binding_hash(node.file_id, &name, *shadow_count)) 239 binding_hash = Some(calc_binding_hash(&name, *shadow_count))
249 } 240 }
250 }; 241 };
251 242
@@ -272,7 +263,7 @@ fn highlight_node(
272 263
273 return Some((tag, binding_hash)); 264 return Some((tag, binding_hash));
274 265
275 fn calc_binding_hash(file_id: HirFileId, name: &Name, shadow_count: u32) -> u64 { 266 fn calc_binding_hash(name: &Name, shadow_count: u32) -> u64 {
276 fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 { 267 fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 {
277 use std::{collections::hash_map::DefaultHasher, hash::Hasher}; 268 use std::{collections::hash_map::DefaultHasher, hash::Hasher};
278 269
@@ -281,7 +272,7 @@ fn highlight_node(
281 hasher.finish() 272 hasher.finish()
282 } 273 }
283 274
284 hash((file_id, name, shadow_count)) 275 hash((name, shadow_count))
285 } 276 }
286} 277}
287 278
diff --git a/crates/ra_ide_db/src/defs.rs b/crates/ra_ide_db/src/defs.rs
index 04c214624..e10e72f71 100644
--- a/crates/ra_ide_db/src/defs.rs
+++ b/crates/ra_ide_db/src/defs.rs
@@ -6,8 +6,8 @@
6// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06). 6// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
7 7
8use hir::{ 8use hir::{
9 Adt, FieldSource, HasSource, ImplBlock, InFile, Local, MacroDef, Module, ModuleDef, 9 Adt, FieldSource, HasSource, ImplBlock, Local, MacroDef, Module, ModuleDef, Semantics,
10 SourceBinder, StructField, TypeParam, 10 StructField, TypeParam,
11}; 11};
12use ra_prof::profile; 12use ra_prof::profile;
13use ra_syntax::{ 13use ra_syntax::{
@@ -68,78 +68,62 @@ impl NameDefinition {
68 } 68 }
69} 69}
70 70
71pub fn classify_name( 71pub fn classify_name(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Option<NameDefinition> {
72 sb: &mut SourceBinder<RootDatabase>,
73 name: InFile<&ast::Name>,
74) -> Option<NameDefinition> {
75 let _p = profile("classify_name"); 72 let _p = profile("classify_name");
76 let parent = name.value.syntax().parent()?; 73 let parent = name.syntax().parent()?;
77 74
78 match_ast! { 75 match_ast! {
79 match parent { 76 match parent {
80 ast::BindPat(it) => { 77 ast::BindPat(it) => {
81 let src = name.with_value(it); 78 let local = sema.to_def(&it)?;
82 let local = sb.to_def(src)?;
83 Some(NameDefinition::Local(local)) 79 Some(NameDefinition::Local(local))
84 }, 80 },
85 ast::RecordFieldDef(it) => { 81 ast::RecordFieldDef(it) => {
86 let src = name.with_value(it); 82 let field: hir::StructField = sema.to_def(&it)?;
87 let field: hir::StructField = sb.to_def(src)?;
88 Some(from_struct_field(field)) 83 Some(from_struct_field(field))
89 }, 84 },
90 ast::Module(it) => { 85 ast::Module(it) => {
91 let def = sb.to_def(name.with_value(it))?; 86 let def = sema.to_def(&it)?;
92 Some(from_module_def(def.into())) 87 Some(from_module_def(def.into()))
93 }, 88 },
94 ast::StructDef(it) => { 89 ast::StructDef(it) => {
95 let src = name.with_value(it); 90 let def: hir::Struct = sema.to_def(&it)?;
96 let def: hir::Struct = sb.to_def(src)?;
97 Some(from_module_def(def.into())) 91 Some(from_module_def(def.into()))
98 }, 92 },
99 ast::EnumDef(it) => { 93 ast::EnumDef(it) => {
100 let src = name.with_value(it); 94 let def: hir::Enum = sema.to_def(&it)?;
101 let def: hir::Enum = sb.to_def(src)?;
102 Some(from_module_def(def.into())) 95 Some(from_module_def(def.into()))
103 }, 96 },
104 ast::TraitDef(it) => { 97 ast::TraitDef(it) => {
105 let src = name.with_value(it); 98 let def: hir::Trait = sema.to_def(&it)?;
106 let def: hir::Trait = sb.to_def(src)?;
107 Some(from_module_def(def.into())) 99 Some(from_module_def(def.into()))
108 }, 100 },
109 ast::StaticDef(it) => { 101 ast::StaticDef(it) => {
110 let src = name.with_value(it); 102 let def: hir::Static = sema.to_def(&it)?;
111 let def: hir::Static = sb.to_def(src)?;
112 Some(from_module_def(def.into())) 103 Some(from_module_def(def.into()))
113 }, 104 },
114 ast::EnumVariant(it) => { 105 ast::EnumVariant(it) => {
115 let src = name.with_value(it); 106 let def: hir::EnumVariant = sema.to_def(&it)?;
116 let def: hir::EnumVariant = sb.to_def(src)?;
117 Some(from_module_def(def.into())) 107 Some(from_module_def(def.into()))
118 }, 108 },
119 ast::FnDef(it) => { 109 ast::FnDef(it) => {
120 let src = name.with_value(it); 110 let def: hir::Function = sema.to_def(&it)?;
121 let def: hir::Function = sb.to_def(src)?;
122 Some(from_module_def(def.into())) 111 Some(from_module_def(def.into()))
123 }, 112 },
124 ast::ConstDef(it) => { 113 ast::ConstDef(it) => {
125 let src = name.with_value(it); 114 let def: hir::Const = sema.to_def(&it)?;
126 let def: hir::Const = sb.to_def(src)?;
127 Some(from_module_def(def.into())) 115 Some(from_module_def(def.into()))
128 }, 116 },
129 ast::TypeAliasDef(it) => { 117 ast::TypeAliasDef(it) => {
130 let src = name.with_value(it); 118 let def: hir::TypeAlias = sema.to_def(&it)?;
131 let def: hir::TypeAlias = sb.to_def(src)?;
132 Some(from_module_def(def.into())) 119 Some(from_module_def(def.into()))
133 }, 120 },
134 ast::MacroCall(it) => { 121 ast::MacroCall(it) => {
135 let src = name.with_value(it); 122 let def = sema.to_def(&it)?;
136 let def = sb.to_def(src.clone())?;
137
138 Some(NameDefinition::Macro(def)) 123 Some(NameDefinition::Macro(def))
139 }, 124 },
140 ast::TypeParam(it) => { 125 ast::TypeParam(it) => {
141 let src = name.with_value(it); 126 let def = sema.to_def(&it)?;
142 let def = sb.to_def(src)?;
143 Some(NameDefinition::TypeParam(def)) 127 Some(NameDefinition::TypeParam(def))
144 }, 128 },
145 _ => None, 129 _ => None,
diff --git a/crates/ra_ide_db/src/imports_locator.rs b/crates/ra_ide_db/src/imports_locator.rs
index b8dd358a9..e590d2a5c 100644
--- a/crates/ra_ide_db/src/imports_locator.rs
+++ b/crates/ra_ide_db/src/imports_locator.rs
@@ -1,7 +1,7 @@
1//! This module contains an import search funcionality that is provided to the ra_assists module. 1//! This module contains an import search funcionality that is provided to the ra_assists module.
2//! Later, this should be moved away to a separate crate that is accessible from the ra_assists module. 2//! Later, this should be moved away to a separate crate that is accessible from the ra_assists module.
3 3
4use hir::{db::HirDatabase, ModuleDef, SourceBinder}; 4use hir::{ModuleDef, Semantics};
5use ra_prof::profile; 5use ra_prof::profile;
6use ra_syntax::{ast, AstNode, SyntaxKind::NAME}; 6use ra_syntax::{ast, AstNode, SyntaxKind::NAME};
7 7
@@ -12,17 +12,17 @@ use crate::{
12}; 12};
13 13
14pub struct ImportsLocator<'a> { 14pub struct ImportsLocator<'a> {
15 source_binder: SourceBinder<'a, RootDatabase>, 15 sema: Semantics<'a, RootDatabase>,
16} 16}
17 17
18impl<'a> ImportsLocator<'a> { 18impl<'a> ImportsLocator<'a> {
19 pub fn new(db: &'a RootDatabase) -> Self { 19 pub fn new(db: &'a RootDatabase) -> Self {
20 Self { source_binder: SourceBinder::new(db) } 20 Self { sema: Semantics::new(db) }
21 } 21 }
22 22
23 pub fn find_imports(&mut self, name_to_import: &str) -> Vec<ModuleDef> { 23 pub fn find_imports(&mut self, name_to_import: &str) -> Vec<ModuleDef> {
24 let _p = profile("search_for_imports"); 24 let _p = profile("search_for_imports");
25 let db = self.source_binder.db; 25 let db = self.sema.db;
26 26
27 let project_results = { 27 let project_results = {
28 let mut query = Query::new(name_to_import.to_string()); 28 let mut query = Query::new(name_to_import.to_string());
@@ -41,7 +41,7 @@ impl<'a> ImportsLocator<'a> {
41 project_results 41 project_results
42 .into_iter() 42 .into_iter()
43 .chain(lib_results.into_iter()) 43 .chain(lib_results.into_iter())
44 .filter_map(|import_candidate| self.get_name_definition(db, &import_candidate)) 44 .filter_map(|import_candidate| self.get_name_definition(&import_candidate))
45 .filter_map(|name_definition_to_import| match name_definition_to_import { 45 .filter_map(|name_definition_to_import| match name_definition_to_import {
46 NameDefinition::ModuleDef(module_def) => Some(module_def), 46 NameDefinition::ModuleDef(module_def) => Some(module_def),
47 _ => None, 47 _ => None,
@@ -49,22 +49,16 @@ impl<'a> ImportsLocator<'a> {
49 .collect() 49 .collect()
50 } 50 }
51 51
52 fn get_name_definition( 52 fn get_name_definition(&mut self, import_candidate: &FileSymbol) -> Option<NameDefinition> {
53 &mut self,
54 db: &impl HirDatabase,
55 import_candidate: &FileSymbol,
56 ) -> Option<NameDefinition> {
57 let _p = profile("get_name_definition"); 53 let _p = profile("get_name_definition");
58 let file_id = import_candidate.file_id.into(); 54 let file_id = import_candidate.file_id;
59 let candidate_node = import_candidate.ptr.to_node(&db.parse_or_expand(file_id)?); 55
56 let candidate_node = import_candidate.ptr.to_node(self.sema.parse(file_id).syntax());
60 let candidate_name_node = if candidate_node.kind() != NAME { 57 let candidate_name_node = if candidate_node.kind() != NAME {
61 candidate_node.children().find(|it| it.kind() == NAME)? 58 candidate_node.children().find(|it| it.kind() == NAME)?
62 } else { 59 } else {
63 candidate_node 60 candidate_node
64 }; 61 };
65 classify_name( 62 classify_name(&self.sema, &ast::Name::cast(candidate_name_node)?)
66 &mut self.source_binder,
67 hir::InFile { file_id, value: &ast::Name::cast(candidate_name_node)? },
68 )
69 } 63 }
70} 64}
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs
index 21fca99a6..f14bcbb35 100644
--- a/crates/ra_syntax/src/algo.rs
+++ b/crates/ra_syntax/src/algo.rs
@@ -4,7 +4,7 @@ use std::ops::RangeInclusive;
4 4
5use itertools::Itertools; 5use itertools::Itertools;
6use ra_text_edit::TextEditBuilder; 6use ra_text_edit::TextEditBuilder;
7use rustc_hash::FxHashMap; 7use rustc_hash::{FxHashMap, FxHashSet};
8 8
9use crate::{ 9use crate::{
10 AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit, 10 AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit,
@@ -56,6 +56,11 @@ pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxEleme
56 root.covering_element(range) 56 root.covering_element(range)
57} 57}
58 58
59pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option<SyntaxNode> {
60 let u_ancestors = u.ancestors().collect::<FxHashSet<SyntaxNode>>();
61 v.ancestors().find(|it| u_ancestors.contains(it))
62}
63
59#[derive(Debug, PartialEq, Eq, Clone, Copy)] 64#[derive(Debug, PartialEq, Eq, Clone, Copy)]
60pub enum InsertPosition<T> { 65pub enum InsertPosition<T> {
61 First, 66 First,