diff options
author | Aleksey Kladov <[email protected]> | 2020-02-18 17:35:10 +0000 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2020-02-26 11:55:50 +0000 |
commit | c3a4c4429de83450654795534e64e878a774a088 (patch) | |
tree | 12d89798f61b276f8bd640db07276a7d4e92b1c2 /crates/ra_ide | |
parent | 04deae3dba7c9b7054f7a1d64e4b93a05aecc132 (diff) |
Refactor primary IDE API
This introduces the new type -- Semantics.
Semantics maps SyntaxNodes to various semantic info, such as type,
name resolution or macro expansions.
To do so, Semantics maintains a HashMap which maps every node it saw
to the file from which the node originated. This is enough to get all
the necessary hir bits just from syntax.
Diffstat (limited to 'crates/ra_ide')
32 files changed, 414 insertions, 561 deletions
diff --git a/crates/ra_ide/src/call_hierarchy.rs b/crates/ra_ide/src/call_hierarchy.rs index 51ac59a71..b00b6d431 100644 --- a/crates/ra_ide/src/call_hierarchy.rs +++ b/crates/ra_ide/src/call_hierarchy.rs | |||
@@ -2,13 +2,13 @@ | |||
2 | 2 | ||
3 | use indexmap::IndexMap; | 3 | use indexmap::IndexMap; |
4 | 4 | ||
5 | use hir::db::AstDatabase; | 5 | use hir::Semantics; |
6 | use ra_ide_db::RootDatabase; | 6 | use ra_ide_db::RootDatabase; |
7 | use ra_syntax::{ast, match_ast, AstNode, TextRange}; | 7 | use ra_syntax::{ast, match_ast, AstNode, TextRange}; |
8 | 8 | ||
9 | use crate::{ | 9 | use crate::{ |
10 | call_info::FnCallNode, display::ToNav, expand::descend_into_macros, goto_definition, | 10 | call_info::FnCallNode, display::ToNav, goto_definition, references, FilePosition, |
11 | references, FilePosition, NavigationTarget, RangeInfo, | 11 | NavigationTarget, RangeInfo, |
12 | }; | 12 | }; |
13 | 13 | ||
14 | #[derive(Debug, Clone)] | 14 | #[derive(Debug, Clone)] |
@@ -38,30 +38,31 @@ pub(crate) fn call_hierarchy( | |||
38 | } | 38 | } |
39 | 39 | ||
40 | pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> { | 40 | pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> { |
41 | let sema = Semantics::new(db); | ||
41 | // 1. Find all refs | 42 | // 1. Find all refs |
42 | // 2. Loop through refs and determine unique fndef. This will become our `from: CallHierarchyItem,` in the reply. | 43 | // 2. Loop through refs and determine unique fndef. This will become our `from: CallHierarchyItem,` in the reply. |
43 | // 3. Add ranges relative to the start of the fndef. | 44 | // 3. Add ranges relative to the start of the fndef. |
44 | let refs = references::find_all_refs(db, position, None)?; | 45 | let refs = references::find_all_refs(db, position, None)?; |
45 | 46 | ||
46 | let mut calls = CallLocations::default(); | 47 | let mut calls = CallLocations::default(); |
47 | let mut sb = hir::SourceBinder::new(db); | ||
48 | 48 | ||
49 | for reference in refs.info.references() { | 49 | for reference in refs.info.references() { |
50 | let file_id = reference.file_range.file_id; | 50 | let file_id = reference.file_range.file_id; |
51 | let file = db.parse_or_expand(file_id.into())?; | 51 | let file = sema.parse(file_id); |
52 | let file = file.syntax(); | ||
52 | let token = file.token_at_offset(reference.file_range.range.start()).next()?; | 53 | let token = file.token_at_offset(reference.file_range.range.start()).next()?; |
53 | let token = descend_into_macros(db, file_id, token); | 54 | let token = sema.descend_into_macros(token); |
54 | let syntax = token.value.parent(); | 55 | let syntax = token.parent(); |
55 | 56 | ||
56 | // This target is the containing function | 57 | // This target is the containing function |
57 | if let Some(nav) = syntax.ancestors().find_map(|node| { | 58 | if let Some(nav) = syntax.ancestors().find_map(|node| { |
58 | match_ast! { | 59 | match_ast! { |
59 | match node { | 60 | match node { |
60 | ast::FnDef(it) => { | 61 | ast::FnDef(it) => { |
61 | let def = sb.to_def(token.with_value(it))?; | 62 | let def = sema.to_def(&it)?; |
62 | Some(def.to_nav(sb.db)) | 63 | Some(def.to_nav(sema.db)) |
63 | }, | 64 | }, |
64 | _ => { None }, | 65 | _ => None, |
65 | } | 66 | } |
66 | } | 67 | } |
67 | }) { | 68 | }) { |
@@ -74,11 +75,13 @@ pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Optio | |||
74 | } | 75 | } |
75 | 76 | ||
76 | pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> { | 77 | pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> { |
78 | let sema = Semantics::new(db); | ||
77 | let file_id = position.file_id; | 79 | let file_id = position.file_id; |
78 | let file = db.parse_or_expand(file_id.into())?; | 80 | let file = sema.parse(file_id); |
81 | let file = file.syntax(); | ||
79 | let token = file.token_at_offset(position.offset).next()?; | 82 | let token = file.token_at_offset(position.offset).next()?; |
80 | let token = descend_into_macros(db, file_id, token); | 83 | let token = sema.descend_into_macros(token); |
81 | let syntax = token.value.parent(); | 84 | let syntax = token.parent(); |
82 | 85 | ||
83 | let mut calls = CallLocations::default(); | 86 | let mut calls = CallLocations::default(); |
84 | 87 | ||
@@ -87,14 +90,11 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio | |||
87 | .filter_map(|node| FnCallNode::with_node_exact(&node)) | 90 | .filter_map(|node| FnCallNode::with_node_exact(&node)) |
88 | .filter_map(|call_node| { | 91 | .filter_map(|call_node| { |
89 | let name_ref = call_node.name_ref()?; | 92 | let name_ref = call_node.name_ref()?; |
90 | let name_ref = token.with_value(name_ref.syntax()); | ||
91 | |||
92 | let analyzer = hir::SourceAnalyzer::new(db, name_ref, None); | ||
93 | 93 | ||
94 | if let Some(func_target) = match &call_node { | 94 | if let Some(func_target) = match &call_node { |
95 | FnCallNode::CallExpr(expr) => { | 95 | FnCallNode::CallExpr(expr) => { |
96 | //FIXME: Type::as_callable is broken | 96 | //FIXME: Type::as_callable is broken |
97 | let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; | 97 | let callable_def = sema.type_of_expr(&expr.expr()?)?.as_callable()?; |
98 | match callable_def { | 98 | match callable_def { |
99 | hir::CallableDef::FunctionId(it) => { | 99 | hir::CallableDef::FunctionId(it) => { |
100 | let fn_def: hir::Function = it.into(); | 100 | let fn_def: hir::Function = it.into(); |
@@ -105,15 +105,15 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio | |||
105 | } | 105 | } |
106 | } | 106 | } |
107 | FnCallNode::MethodCallExpr(expr) => { | 107 | FnCallNode::MethodCallExpr(expr) => { |
108 | let function = analyzer.resolve_method_call(&expr)?; | 108 | let function = sema.resolve_method_call(&expr)?; |
109 | Some(function.to_nav(db)) | 109 | Some(function.to_nav(db)) |
110 | } | 110 | } |
111 | FnCallNode::MacroCallExpr(expr) => { | 111 | FnCallNode::MacroCallExpr(macro_call) => { |
112 | let macro_def = analyzer.resolve_macro_call(db, name_ref.with_value(&expr))?; | 112 | let macro_def = sema.resolve_macro_call(¯o_call)?; |
113 | Some(macro_def.to_nav(db)) | 113 | Some(macro_def.to_nav(db)) |
114 | } | 114 | } |
115 | } { | 115 | } { |
116 | Some((func_target, name_ref.value.text_range())) | 116 | Some((func_target, name_ref.syntax().text_range())) |
117 | } else { | 117 | } else { |
118 | None | 118 | None |
119 | } | 119 | } |
diff --git a/crates/ra_ide/src/call_info.rs b/crates/ra_ide/src/call_info.rs index 7c6322cb4..9a1fc0d35 100644 --- a/crates/ra_ide/src/call_info.rs +++ b/crates/ra_ide/src/call_info.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | use hir::db::AstDatabase; | 2 | use hir::Semantics; |
3 | use ra_ide_db::RootDatabase; | 3 | use ra_ide_db::RootDatabase; |
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | ast::{self, ArgListOwner}, | 5 | ast::{self, ArgListOwner}, |
@@ -7,24 +7,23 @@ use ra_syntax::{ | |||
7 | }; | 7 | }; |
8 | use test_utils::tested_by; | 8 | use test_utils::tested_by; |
9 | 9 | ||
10 | use crate::{expand::descend_into_macros, CallInfo, FilePosition, FunctionSignature}; | 10 | use crate::{CallInfo, FilePosition, FunctionSignature}; |
11 | 11 | ||
12 | /// Computes parameter information for the given call expression. | 12 | /// Computes parameter information for the given call expression. |
13 | pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { | 13 | pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { |
14 | let file = db.parse_or_expand(position.file_id.into())?; | 14 | let sema = Semantics::new(db); |
15 | let file = sema.parse(position.file_id); | ||
16 | let file = file.syntax(); | ||
15 | let token = file.token_at_offset(position.offset).next()?; | 17 | let token = file.token_at_offset(position.offset).next()?; |
16 | let token = descend_into_macros(db, position.file_id, token); | 18 | let token = sema.descend_into_macros(token); |
17 | 19 | ||
18 | // Find the calling expression and it's NameRef | 20 | // Find the calling expression and it's NameRef |
19 | let calling_node = FnCallNode::with_node(&token.value.parent())?; | 21 | let calling_node = FnCallNode::with_node(&token.parent())?; |
20 | let name_ref = calling_node.name_ref()?; | ||
21 | let name_ref = token.with_value(name_ref.syntax()); | ||
22 | 22 | ||
23 | let analyzer = hir::SourceAnalyzer::new(db, name_ref, None); | ||
24 | let (mut call_info, has_self) = match &calling_node { | 23 | let (mut call_info, has_self) = match &calling_node { |
25 | FnCallNode::CallExpr(expr) => { | 24 | FnCallNode::CallExpr(call) => { |
26 | //FIXME: Type::as_callable is broken | 25 | //FIXME: Type::as_callable is broken |
27 | let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; | 26 | let callable_def = sema.type_of_expr(&call.expr()?)?.as_callable()?; |
28 | match callable_def { | 27 | match callable_def { |
29 | hir::CallableDef::FunctionId(it) => { | 28 | hir::CallableDef::FunctionId(it) => { |
30 | let fn_def = it.into(); | 29 | let fn_def = it.into(); |
@@ -36,12 +35,12 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal | |||
36 | } | 35 | } |
37 | } | 36 | } |
38 | } | 37 | } |
39 | FnCallNode::MethodCallExpr(expr) => { | 38 | FnCallNode::MethodCallExpr(method_call) => { |
40 | let function = analyzer.resolve_method_call(&expr)?; | 39 | let function = sema.resolve_method_call(&method_call)?; |
41 | (CallInfo::with_fn(db, function), function.has_self_param(db)) | 40 | (CallInfo::with_fn(db, function), function.has_self_param(db)) |
42 | } | 41 | } |
43 | FnCallNode::MacroCallExpr(expr) => { | 42 | FnCallNode::MacroCallExpr(macro_call) => { |
44 | let macro_def = analyzer.resolve_macro_call(db, name_ref.with_value(&expr))?; | 43 | let macro_def = sema.resolve_macro_call(¯o_call)?; |
45 | (CallInfo::with_macro(db, macro_def)?, false) | 44 | (CallInfo::with_macro(db, macro_def)?, false) |
46 | } | 45 | } |
47 | }; | 46 | }; |
diff --git a/crates/ra_ide/src/completion.rs b/crates/ra_ide/src/completion.rs index 4bdc6ba23..c378c2c62 100644 --- a/crates/ra_ide/src/completion.rs +++ b/crates/ra_ide/src/completion.rs | |||
@@ -17,7 +17,6 @@ mod complete_postfix; | |||
17 | mod complete_macro_in_item_position; | 17 | mod complete_macro_in_item_position; |
18 | mod complete_trait_impl; | 18 | mod complete_trait_impl; |
19 | 19 | ||
20 | use ra_db::SourceDatabase; | ||
21 | use ra_ide_db::RootDatabase; | 20 | use ra_ide_db::RootDatabase; |
22 | 21 | ||
23 | #[cfg(test)] | 22 | #[cfg(test)] |
@@ -57,8 +56,7 @@ pub use crate::completion::completion_item::{ | |||
57 | /// identifier prefix/fuzzy match should be done higher in the stack, together | 56 | /// identifier prefix/fuzzy match should be done higher in the stack, together |
58 | /// with ordering of completions (currently this is done by the client). | 57 | /// with ordering of completions (currently this is done by the client). |
59 | pub(crate) fn completions(db: &RootDatabase, position: FilePosition) -> Option<Completions> { | 58 | pub(crate) fn completions(db: &RootDatabase, position: FilePosition) -> Option<Completions> { |
60 | let original_parse = db.parse(position.file_id); | 59 | let ctx = CompletionContext::new(db, position)?; |
61 | let ctx = CompletionContext::new(db, &original_parse, position)?; | ||
62 | 60 | ||
63 | let mut acc = Completions::default(); | 61 | let mut acc = Completions::default(); |
64 | 62 | ||
diff --git a/crates/ra_ide/src/completion/complete_dot.rs b/crates/ra_ide/src/completion/complete_dot.rs index 2ca78c927..a6e0158b2 100644 --- a/crates/ra_ide/src/completion/complete_dot.rs +++ b/crates/ra_ide/src/completion/complete_dot.rs | |||
@@ -16,7 +16,7 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { | |||
16 | _ => return, | 16 | _ => return, |
17 | }; | 17 | }; |
18 | 18 | ||
19 | let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) { | 19 | let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) { |
20 | Some(ty) => ty, | 20 | Some(ty) => ty, |
21 | _ => return, | 21 | _ => return, |
22 | }; | 22 | }; |
@@ -55,7 +55,7 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Ty | |||
55 | fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) { | 55 | fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) { |
56 | if let Some(krate) = ctx.module.map(|it| it.krate()) { | 56 | if let Some(krate) = ctx.module.map(|it| it.krate()) { |
57 | let mut seen_methods = FxHashSet::default(); | 57 | let mut seen_methods = FxHashSet::default(); |
58 | let traits_in_scope = ctx.analyzer.traits_in_scope(ctx.db); | 58 | let traits_in_scope = ctx.scope().traits_in_scope(); |
59 | receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| { | 59 | receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| { |
60 | if func.has_self_param(ctx.db) && seen_methods.insert(func.name(ctx.db)) { | 60 | if func.has_self_param(ctx.db) && seen_methods.insert(func.name(ctx.db)) { |
61 | acc.add_function(ctx, func); | 61 | acc.add_function(ctx, func); |
diff --git a/crates/ra_ide/src/completion/complete_macro_in_item_position.rs b/crates/ra_ide/src/completion/complete_macro_in_item_position.rs index faadd1e3f..1866d9e6c 100644 --- a/crates/ra_ide/src/completion/complete_macro_in_item_position.rs +++ b/crates/ra_ide/src/completion/complete_macro_in_item_position.rs | |||
@@ -5,7 +5,7 @@ use crate::completion::{CompletionContext, Completions}; | |||
5 | pub(super) fn complete_macro_in_item_position(acc: &mut Completions, ctx: &CompletionContext) { | 5 | pub(super) fn complete_macro_in_item_position(acc: &mut Completions, ctx: &CompletionContext) { |
6 | // Show only macros in top level. | 6 | // Show only macros in top level. |
7 | if ctx.is_new_item { | 7 | if ctx.is_new_item { |
8 | ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { | 8 | ctx.scope().process_all_names(&mut |name, res| { |
9 | if let hir::ScopeDef::MacroDef(mac) = res { | 9 | if let hir::ScopeDef::MacroDef(mac) = res { |
10 | acc.add_macro(ctx, Some(name.to_string()), mac); | 10 | acc.add_macro(ctx, Some(name.to_string()), mac); |
11 | } | 11 | } |
diff --git a/crates/ra_ide/src/completion/complete_path.rs b/crates/ra_ide/src/completion/complete_path.rs index 2d7f09a6c..c626e90cc 100644 --- a/crates/ra_ide/src/completion/complete_path.rs +++ b/crates/ra_ide/src/completion/complete_path.rs | |||
@@ -11,7 +11,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) { | |||
11 | Some(path) => path.clone(), | 11 | Some(path) => path.clone(), |
12 | _ => return, | 12 | _ => return, |
13 | }; | 13 | }; |
14 | let def = match ctx.analyzer.resolve_hir_path(ctx.db, &path) { | 14 | let def = match ctx.scope().resolve_hir_path(&path) { |
15 | Some(PathResolution::Def(def)) => def, | 15 | Some(PathResolution::Def(def)) => def, |
16 | _ => return, | 16 | _ => return, |
17 | }; | 17 | }; |
@@ -49,7 +49,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) { | |||
49 | // FIXME: complete T::AssocType | 49 | // FIXME: complete T::AssocType |
50 | let krate = ctx.module.map(|m| m.krate()); | 50 | let krate = ctx.module.map(|m| m.krate()); |
51 | if let Some(krate) = krate { | 51 | if let Some(krate) = krate { |
52 | let traits_in_scope = ctx.analyzer.traits_in_scope(ctx.db); | 52 | let traits_in_scope = ctx.scope().traits_in_scope(); |
53 | ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { | 53 | ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { |
54 | match item { | 54 | match item { |
55 | hir::AssocItem::Function(func) => { | 55 | hir::AssocItem::Function(func) => { |
diff --git a/crates/ra_ide/src/completion/complete_pattern.rs b/crates/ra_ide/src/completion/complete_pattern.rs index fd03b1c40..c2c6ca002 100644 --- a/crates/ra_ide/src/completion/complete_pattern.rs +++ b/crates/ra_ide/src/completion/complete_pattern.rs | |||
@@ -9,7 +9,7 @@ pub(super) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) { | |||
9 | } | 9 | } |
10 | // FIXME: ideally, we should look at the type we are matching against and | 10 | // FIXME: ideally, we should look at the type we are matching against and |
11 | // suggest variants + auto-imports | 11 | // suggest variants + auto-imports |
12 | ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { | 12 | ctx.scope().process_all_names(&mut |name, res| { |
13 | let def = match &res { | 13 | let def = match &res { |
14 | hir::ScopeDef::ModuleDef(def) => def, | 14 | hir::ScopeDef::ModuleDef(def) => def, |
15 | _ => return, | 15 | _ => return, |
diff --git a/crates/ra_ide/src/completion/complete_postfix.rs b/crates/ra_ide/src/completion/complete_postfix.rs index 5470dc291..8a74f993a 100644 --- a/crates/ra_ide/src/completion/complete_postfix.rs +++ b/crates/ra_ide/src/completion/complete_postfix.rs | |||
@@ -29,7 +29,7 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { | |||
29 | dot_receiver.syntax().text().to_string() | 29 | dot_receiver.syntax().text().to_string() |
30 | }; | 30 | }; |
31 | 31 | ||
32 | let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) { | 32 | let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) { |
33 | Some(it) => it, | 33 | Some(it) => it, |
34 | None => return, | 34 | None => return, |
35 | }; | 35 | }; |
diff --git a/crates/ra_ide/src/completion/complete_record_literal.rs b/crates/ra_ide/src/completion/complete_record_literal.rs index 577c394d2..f98353d76 100644 --- a/crates/ra_ide/src/completion/complete_record_literal.rs +++ b/crates/ra_ide/src/completion/complete_record_literal.rs | |||
@@ -5,10 +5,7 @@ use crate::completion::{CompletionContext, Completions}; | |||
5 | /// Complete fields in fields literals. | 5 | /// Complete fields in fields literals. |
6 | pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionContext) { | 6 | pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionContext) { |
7 | let (ty, variant) = match ctx.record_lit_syntax.as_ref().and_then(|it| { | 7 | let (ty, variant) = match ctx.record_lit_syntax.as_ref().and_then(|it| { |
8 | Some(( | 8 | Some((ctx.sema.type_of_expr(&it.clone().into())?, ctx.sema.resolve_record_literal(it)?)) |
9 | ctx.analyzer.type_of(ctx.db, &it.clone().into())?, | ||
10 | ctx.analyzer.resolve_record_literal(it)?, | ||
11 | )) | ||
12 | }) { | 9 | }) { |
13 | Some(it) => it, | 10 | Some(it) => it, |
14 | _ => return, | 11 | _ => return, |
diff --git a/crates/ra_ide/src/completion/complete_record_pattern.rs b/crates/ra_ide/src/completion/complete_record_pattern.rs index a56c7e3a1..9bdeae49f 100644 --- a/crates/ra_ide/src/completion/complete_record_pattern.rs +++ b/crates/ra_ide/src/completion/complete_record_pattern.rs | |||
@@ -4,10 +4,7 @@ use crate::completion::{CompletionContext, Completions}; | |||
4 | 4 | ||
5 | pub(super) fn complete_record_pattern(acc: &mut Completions, ctx: &CompletionContext) { | 5 | pub(super) fn complete_record_pattern(acc: &mut Completions, ctx: &CompletionContext) { |
6 | let (ty, variant) = match ctx.record_lit_pat.as_ref().and_then(|it| { | 6 | let (ty, variant) = match ctx.record_lit_pat.as_ref().and_then(|it| { |
7 | Some(( | 7 | Some((ctx.sema.type_of_pat(&it.clone().into())?, ctx.sema.resolve_record_pattern(it)?)) |
8 | ctx.analyzer.type_of_pat(ctx.db, &it.clone().into())?, | ||
9 | ctx.analyzer.resolve_record_pattern(it)?, | ||
10 | )) | ||
11 | }) { | 8 | }) { |
12 | Some(it) => it, | 9 | Some(it) => it, |
13 | _ => return, | 10 | _ => return, |
diff --git a/crates/ra_ide/src/completion/complete_scope.rs b/crates/ra_ide/src/completion/complete_scope.rs index e2ee86dd1..aad016d4a 100644 --- a/crates/ra_ide/src/completion/complete_scope.rs +++ b/crates/ra_ide/src/completion/complete_scope.rs | |||
@@ -7,9 +7,7 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) { | |||
7 | return; | 7 | return; |
8 | } | 8 | } |
9 | 9 | ||
10 | ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { | 10 | ctx.scope().process_all_names(&mut |name, res| acc.add_resolution(ctx, name.to_string(), &res)); |
11 | acc.add_resolution(ctx, name.to_string(), &res) | ||
12 | }); | ||
13 | } | 11 | } |
14 | 12 | ||
15 | #[cfg(test)] | 13 | #[cfg(test)] |
diff --git a/crates/ra_ide/src/completion/complete_trait_impl.rs b/crates/ra_ide/src/completion/complete_trait_impl.rs index 83628e35c..9a27c164b 100644 --- a/crates/ra_ide/src/completion/complete_trait_impl.rs +++ b/crates/ra_ide/src/completion/complete_trait_impl.rs | |||
@@ -64,11 +64,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext | |||
64 | if let (Some(trigger), Some(impl_block)) = (trigger, impl_block) { | 64 | if let (Some(trigger), Some(impl_block)) = (trigger, impl_block) { |
65 | match trigger.kind() { | 65 | match trigger.kind() { |
66 | SyntaxKind::FN_DEF => { | 66 | SyntaxKind::FN_DEF => { |
67 | for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block) | 67 | for missing_fn in |
68 | .iter() | 68 | get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| { |
69 | .filter_map(|item| match item { | 69 | match item { |
70 | hir::AssocItem::Function(fn_item) => Some(fn_item), | 70 | hir::AssocItem::Function(fn_item) => Some(fn_item), |
71 | _ => None, | 71 | _ => None, |
72 | } | ||
72 | }) | 73 | }) |
73 | { | 74 | { |
74 | add_function_impl(&trigger, acc, ctx, &missing_fn); | 75 | add_function_impl(&trigger, acc, ctx, &missing_fn); |
@@ -76,11 +77,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext | |||
76 | } | 77 | } |
77 | 78 | ||
78 | SyntaxKind::TYPE_ALIAS_DEF => { | 79 | SyntaxKind::TYPE_ALIAS_DEF => { |
79 | for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block) | 80 | for missing_fn in |
80 | .iter() | 81 | get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| { |
81 | .filter_map(|item| match item { | 82 | match item { |
82 | hir::AssocItem::TypeAlias(type_item) => Some(type_item), | 83 | hir::AssocItem::TypeAlias(type_item) => Some(type_item), |
83 | _ => None, | 84 | _ => None, |
85 | } | ||
84 | }) | 86 | }) |
85 | { | 87 | { |
86 | add_type_alias_impl(&trigger, acc, ctx, &missing_fn); | 88 | add_type_alias_impl(&trigger, acc, ctx, &missing_fn); |
@@ -88,11 +90,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext | |||
88 | } | 90 | } |
89 | 91 | ||
90 | SyntaxKind::CONST_DEF => { | 92 | SyntaxKind::CONST_DEF => { |
91 | for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block) | 93 | for missing_fn in |
92 | .iter() | 94 | get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| { |
93 | .filter_map(|item| match item { | 95 | match item { |
94 | hir::AssocItem::Const(const_item) => Some(const_item), | 96 | hir::AssocItem::Const(const_item) => Some(const_item), |
95 | _ => None, | 97 | _ => None, |
98 | } | ||
96 | }) | 99 | }) |
97 | { | 100 | { |
98 | add_const_impl(&trigger, acc, ctx, &missing_fn); | 101 | add_const_impl(&trigger, acc, ctx, &missing_fn); |
diff --git a/crates/ra_ide/src/completion/completion_context.rs b/crates/ra_ide/src/completion/completion_context.rs index 8678a3234..81321a897 100644 --- a/crates/ra_ide/src/completion/completion_context.rs +++ b/crates/ra_ide/src/completion/completion_context.rs | |||
@@ -1,9 +1,11 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use hir::{Semantics, SemanticsScope}; | ||
4 | use ra_db::SourceDatabase; | ||
3 | use ra_ide_db::RootDatabase; | 5 | use ra_ide_db::RootDatabase; |
4 | use ra_syntax::{ | 6 | use ra_syntax::{ |
5 | algo::{find_covering_element, find_node_at_offset}, | 7 | algo::{find_covering_element, find_node_at_offset}, |
6 | ast, AstNode, Parse, SourceFile, | 8 | ast, AstNode, SourceFile, |
7 | SyntaxKind::*, | 9 | SyntaxKind::*, |
8 | SyntaxNode, SyntaxToken, TextRange, TextUnit, | 10 | SyntaxNode, SyntaxToken, TextRange, TextUnit, |
9 | }; | 11 | }; |
@@ -15,8 +17,8 @@ use crate::FilePosition; | |||
15 | /// exactly is the cursor, syntax-wise. | 17 | /// exactly is the cursor, syntax-wise. |
16 | #[derive(Debug)] | 18 | #[derive(Debug)] |
17 | pub(crate) struct CompletionContext<'a> { | 19 | pub(crate) struct CompletionContext<'a> { |
20 | pub(super) sema: Semantics<'a, RootDatabase>, | ||
18 | pub(super) db: &'a RootDatabase, | 21 | pub(super) db: &'a RootDatabase, |
19 | pub(super) analyzer: hir::SourceAnalyzer, | ||
20 | pub(super) offset: TextUnit, | 22 | pub(super) offset: TextUnit, |
21 | pub(super) token: SyntaxToken, | 23 | pub(super) token: SyntaxToken, |
22 | pub(super) module: Option<hir::Module>, | 24 | pub(super) module: Option<hir::Module>, |
@@ -51,20 +53,26 @@ pub(crate) struct CompletionContext<'a> { | |||
51 | impl<'a> CompletionContext<'a> { | 53 | impl<'a> CompletionContext<'a> { |
52 | pub(super) fn new( | 54 | pub(super) fn new( |
53 | db: &'a RootDatabase, | 55 | db: &'a RootDatabase, |
54 | original_parse: &'a Parse<ast::SourceFile>, | ||
55 | position: FilePosition, | 56 | position: FilePosition, |
56 | ) -> Option<CompletionContext<'a>> { | 57 | ) -> Option<CompletionContext<'a>> { |
57 | let mut sb = hir::SourceBinder::new(db); | 58 | let sema = Semantics::new(db); |
58 | let module = sb.to_module_def(position.file_id); | 59 | |
59 | let token = | 60 | let original_file = sema.parse(position.file_id); |
60 | original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?; | 61 | |
61 | let analyzer = sb.analyze( | 62 | // Insert a fake ident to get a valid parse tree. We will use this file |
62 | hir::InFile::new(position.file_id.into(), &token.parent()), | 63 | // to determine context, though the original_file will be used for |
63 | Some(position.offset), | 64 | // actual completion. |
64 | ); | 65 | let file_with_fake_ident = { |
66 | let parse = db.parse(position.file_id); | ||
67 | let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string()); | ||
68 | parse.reparse(&edit).tree() | ||
69 | }; | ||
70 | |||
71 | let module = sema.to_module_def(position.file_id); | ||
72 | let token = original_file.syntax().token_at_offset(position.offset).left_biased()?; | ||
65 | let mut ctx = CompletionContext { | 73 | let mut ctx = CompletionContext { |
74 | sema, | ||
66 | db, | 75 | db, |
67 | analyzer, | ||
68 | token, | 76 | token, |
69 | offset: position.offset, | 77 | offset: position.offset, |
70 | module, | 78 | module, |
@@ -87,7 +95,7 @@ impl<'a> CompletionContext<'a> { | |||
87 | has_type_args: false, | 95 | has_type_args: false, |
88 | dot_receiver_is_ambiguous_float_literal: false, | 96 | dot_receiver_is_ambiguous_float_literal: false, |
89 | }; | 97 | }; |
90 | ctx.fill(&original_parse, position.offset); | 98 | ctx.fill(&original_file, file_with_fake_ident, position.offset); |
91 | Some(ctx) | 99 | Some(ctx) |
92 | } | 100 | } |
93 | 101 | ||
@@ -100,29 +108,33 @@ impl<'a> CompletionContext<'a> { | |||
100 | } | 108 | } |
101 | } | 109 | } |
102 | 110 | ||
103 | fn fill(&mut self, original_parse: &'a Parse<ast::SourceFile>, offset: TextUnit) { | 111 | pub(crate) fn scope(&self) -> SemanticsScope<'_, RootDatabase> { |
104 | // Insert a fake ident to get a valid parse tree. We will use this file | 112 | self.sema.scope_at_offset(&self.token.parent(), self.offset) |
105 | // to determine context, though the original_file will be used for | 113 | } |
106 | // actual completion. | ||
107 | let file = { | ||
108 | let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string()); | ||
109 | original_parse.reparse(&edit).tree() | ||
110 | }; | ||
111 | 114 | ||
115 | fn fill( | ||
116 | &mut self, | ||
117 | original_file: &ast::SourceFile, | ||
118 | file_with_fake_ident: ast::SourceFile, | ||
119 | offset: TextUnit, | ||
120 | ) { | ||
112 | // First, let's try to complete a reference to some declaration. | 121 | // First, let's try to complete a reference to some declaration. |
113 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), offset) { | 122 | if let Some(name_ref) = |
123 | find_node_at_offset::<ast::NameRef>(file_with_fake_ident.syntax(), offset) | ||
124 | { | ||
114 | // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. | 125 | // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. |
115 | // See RFC#1685. | 126 | // See RFC#1685. |
116 | if is_node::<ast::Param>(name_ref.syntax()) { | 127 | if is_node::<ast::Param>(name_ref.syntax()) { |
117 | self.is_param = true; | 128 | self.is_param = true; |
118 | return; | 129 | return; |
119 | } | 130 | } |
120 | self.classify_name_ref(original_parse.tree(), name_ref); | 131 | self.classify_name_ref(original_file, name_ref); |
121 | } | 132 | } |
122 | 133 | ||
123 | // Otherwise, see if this is a declaration. We can use heuristics to | 134 | // Otherwise, see if this is a declaration. We can use heuristics to |
124 | // suggest declaration names, see `CompletionKind::Magic`. | 135 | // suggest declaration names, see `CompletionKind::Magic`. |
125 | if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) { | 136 | if let Some(name) = find_node_at_offset::<ast::Name>(file_with_fake_ident.syntax(), offset) |
137 | { | ||
126 | if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) { | 138 | if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) { |
127 | let parent = bind_pat.syntax().parent(); | 139 | let parent = bind_pat.syntax().parent(); |
128 | if parent.clone().and_then(ast::MatchArm::cast).is_some() | 140 | if parent.clone().and_then(ast::MatchArm::cast).is_some() |
@@ -136,13 +148,12 @@ impl<'a> CompletionContext<'a> { | |||
136 | return; | 148 | return; |
137 | } | 149 | } |
138 | if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() { | 150 | if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() { |
139 | self.record_lit_pat = | 151 | self.record_lit_pat = find_node_at_offset(original_file.syntax(), self.offset); |
140 | find_node_at_offset(original_parse.tree().syntax(), self.offset); | ||
141 | } | 152 | } |
142 | } | 153 | } |
143 | } | 154 | } |
144 | 155 | ||
145 | fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) { | 156 | fn classify_name_ref(&mut self, original_file: &SourceFile, name_ref: ast::NameRef) { |
146 | self.name_ref_syntax = | 157 | self.name_ref_syntax = |
147 | find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start()); | 158 | find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start()); |
148 | let name_range = name_ref.syntax().text_range(); | 159 | let name_range = name_ref.syntax().text_range(); |
diff --git a/crates/ra_ide/src/diagnostics.rs b/crates/ra_ide/src/diagnostics.rs index 9cf86b26d..a52f7fdd9 100644 --- a/crates/ra_ide/src/diagnostics.rs +++ b/crates/ra_ide/src/diagnostics.rs | |||
@@ -2,7 +2,10 @@ | |||
2 | 2 | ||
3 | use std::cell::RefCell; | 3 | use std::cell::RefCell; |
4 | 4 | ||
5 | use hir::diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}; | 5 | use hir::{ |
6 | diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}, | ||
7 | Semantics, | ||
8 | }; | ||
6 | use itertools::Itertools; | 9 | use itertools::Itertools; |
7 | use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt}; | 10 | use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt}; |
8 | use ra_ide_db::RootDatabase; | 11 | use ra_ide_db::RootDatabase; |
@@ -24,7 +27,7 @@ pub enum Severity { | |||
24 | 27 | ||
25 | pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> { | 28 | pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> { |
26 | let _p = profile("diagnostics"); | 29 | let _p = profile("diagnostics"); |
27 | let mut sb = hir::SourceBinder::new(db); | 30 | let sema = Semantics::new(db); |
28 | let parse = db.parse(file_id); | 31 | let parse = db.parse(file_id); |
29 | let mut res = Vec::new(); | 32 | let mut res = Vec::new(); |
30 | 33 | ||
@@ -110,7 +113,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> | |||
110 | fix: Some(fix), | 113 | fix: Some(fix), |
111 | }) | 114 | }) |
112 | }); | 115 | }); |
113 | if let Some(m) = sb.to_module_def(file_id) { | 116 | if let Some(m) = sema.to_module_def(file_id) { |
114 | m.diagnostics(db, &mut sink); | 117 | m.diagnostics(db, &mut sink); |
115 | }; | 118 | }; |
116 | drop(sink); | 119 | drop(sink); |
diff --git a/crates/ra_ide/src/display/navigation_target.rs b/crates/ra_ide/src/display/navigation_target.rs index c9d0058a6..5afb23764 100644 --- a/crates/ra_ide/src/display/navigation_target.rs +++ b/crates/ra_ide/src/display/navigation_target.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use either::Either; | 3 | use either::Either; |
4 | use hir::{AssocItem, FieldSource, HasSource, InFile, ModuleSource}; | 4 | use hir::{original_range, AssocItem, FieldSource, HasSource, InFile, ModuleSource}; |
5 | use ra_db::{FileId, SourceDatabase}; | 5 | use ra_db::{FileId, SourceDatabase}; |
6 | use ra_ide_db::RootDatabase; | 6 | use ra_ide_db::RootDatabase; |
7 | use ra_syntax::{ | 7 | use ra_syntax::{ |
@@ -11,7 +11,11 @@ use ra_syntax::{ | |||
11 | TextRange, | 11 | TextRange, |
12 | }; | 12 | }; |
13 | 13 | ||
14 | use crate::{expand::original_range, references::NameDefinition, FileSymbol}; | 14 | use crate::{ |
15 | // expand::original_range, | ||
16 | references::NameDefinition, | ||
17 | FileSymbol, | ||
18 | }; | ||
15 | 19 | ||
16 | use super::short_label::ShortLabel; | 20 | use super::short_label::ShortLabel; |
17 | 21 | ||
diff --git a/crates/ra_ide/src/expand.rs b/crates/ra_ide/src/expand.rs deleted file mode 100644 index 9f3aaa3a3..000000000 --- a/crates/ra_ide/src/expand.rs +++ /dev/null | |||
@@ -1,102 +0,0 @@ | |||
1 | //! Utilities to work with files, produced by macros. | ||
2 | use std::iter::successors; | ||
3 | |||
4 | use hir::{InFile, Origin}; | ||
5 | use ra_db::FileId; | ||
6 | use ra_ide_db::RootDatabase; | ||
7 | use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken, TextRange}; | ||
8 | |||
9 | use crate::FileRange; | ||
10 | |||
11 | pub(crate) fn original_range(db: &RootDatabase, node: InFile<&SyntaxNode>) -> FileRange { | ||
12 | if let Some((range, Origin::Call)) = original_range_and_origin(db, node) { | ||
13 | return range; | ||
14 | } | ||
15 | |||
16 | if let Some(expansion) = node.file_id.expansion_info(db) { | ||
17 | if let Some(call_node) = expansion.call_node() { | ||
18 | return FileRange { | ||
19 | file_id: call_node.file_id.original_file(db), | ||
20 | range: call_node.value.text_range(), | ||
21 | }; | ||
22 | } | ||
23 | } | ||
24 | |||
25 | FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() } | ||
26 | } | ||
27 | |||
28 | fn original_range_and_origin( | ||
29 | db: &RootDatabase, | ||
30 | node: InFile<&SyntaxNode>, | ||
31 | ) -> Option<(FileRange, Origin)> { | ||
32 | let expansion = node.file_id.expansion_info(db)?; | ||
33 | |||
34 | // the input node has only one token ? | ||
35 | let single = node.value.first_token()? == node.value.last_token()?; | ||
36 | |||
37 | // FIXME: We should handle recurside macro expansions | ||
38 | let (range, origin) = node.value.descendants().find_map(|it| { | ||
39 | let first = it.first_token()?; | ||
40 | let last = it.last_token()?; | ||
41 | |||
42 | if !single && first == last { | ||
43 | return None; | ||
44 | } | ||
45 | |||
46 | // Try to map first and last tokens of node, and, if success, return the union range of mapped tokens | ||
47 | let (first, first_origin) = expansion.map_token_up(node.with_value(&first))?; | ||
48 | let (last, last_origin) = expansion.map_token_up(node.with_value(&last))?; | ||
49 | |||
50 | if first.file_id != last.file_id || first_origin != last_origin { | ||
51 | return None; | ||
52 | } | ||
53 | |||
54 | // FIXME: Add union method in TextRange | ||
55 | Some(( | ||
56 | first.with_value(union_range(first.value.text_range(), last.value.text_range())), | ||
57 | first_origin, | ||
58 | )) | ||
59 | })?; | ||
60 | |||
61 | return Some(( | ||
62 | FileRange { file_id: range.file_id.original_file(db), range: range.value }, | ||
63 | origin, | ||
64 | )); | ||
65 | |||
66 | fn union_range(a: TextRange, b: TextRange) -> TextRange { | ||
67 | let start = a.start().min(b.start()); | ||
68 | let end = a.end().max(b.end()); | ||
69 | TextRange::from_to(start, end) | ||
70 | } | ||
71 | } | ||
72 | |||
73 | pub(crate) fn descend_into_macros( | ||
74 | db: &RootDatabase, | ||
75 | file_id: FileId, | ||
76 | token: SyntaxToken, | ||
77 | ) -> InFile<SyntaxToken> { | ||
78 | let src = InFile::new(file_id.into(), token); | ||
79 | |||
80 | let source_analyzer = | ||
81 | hir::SourceAnalyzer::new(db, src.with_value(src.value.parent()).as_ref(), None); | ||
82 | |||
83 | descend_into_macros_with_analyzer(db, &source_analyzer, src) | ||
84 | } | ||
85 | |||
86 | pub(crate) fn descend_into_macros_with_analyzer( | ||
87 | db: &RootDatabase, | ||
88 | source_analyzer: &hir::SourceAnalyzer, | ||
89 | src: InFile<SyntaxToken>, | ||
90 | ) -> InFile<SyntaxToken> { | ||
91 | successors(Some(src), |token| { | ||
92 | let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?; | ||
93 | let tt = macro_call.token_tree()?; | ||
94 | if !token.value.text_range().is_subrange(&tt.syntax().text_range()) { | ||
95 | return None; | ||
96 | } | ||
97 | let exp = source_analyzer.expand(db, token.with_value(¯o_call))?; | ||
98 | exp.map_token_down(db, token.as_ref()) | ||
99 | }) | ||
100 | .last() | ||
101 | .unwrap() | ||
102 | } | ||
diff --git a/crates/ra_ide/src/expand_macro.rs b/crates/ra_ide/src/expand_macro.rs index af2783bef..f2814e684 100644 --- a/crates/ra_ide/src/expand_macro.rs +++ b/crates/ra_ide/src/expand_macro.rs | |||
@@ -1,7 +1,6 @@ | |||
1 | //! This modules implements "expand macro" functionality in the IDE | 1 | //! This modules implements "expand macro" functionality in the IDE |
2 | 2 | ||
3 | use hir::db::AstDatabase; | 3 | use hir::Semantics; |
4 | use ra_db::SourceDatabase; | ||
5 | use ra_ide_db::RootDatabase; | 4 | use ra_ide_db::RootDatabase; |
6 | use ra_syntax::{ | 5 | use ra_syntax::{ |
7 | algo::{find_node_at_offset, replace_descendants}, | 6 | algo::{find_node_at_offset, replace_descendants}, |
@@ -17,13 +16,12 @@ pub struct ExpandedMacro { | |||
17 | } | 16 | } |
18 | 17 | ||
19 | pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> { | 18 | pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> { |
20 | let parse = db.parse(position.file_id); | 19 | let sema = Semantics::new(db); |
21 | let file = parse.tree(); | 20 | let file = sema.parse(position.file_id); |
22 | let name_ref = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset)?; | 21 | let name_ref = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset)?; |
23 | let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?; | 22 | let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?; |
24 | 23 | ||
25 | let source = hir::InFile::new(position.file_id.into(), mac.syntax()); | 24 | let expanded = expand_macro_recur(&sema, &mac)?; |
26 | let expanded = expand_macro_recur(db, source, source.with_value(&mac))?; | ||
27 | 25 | ||
28 | // FIXME: | 26 | // FIXME: |
29 | // macro expansion may lose all white space information | 27 | // macro expansion may lose all white space information |
@@ -33,21 +31,16 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< | |||
33 | } | 31 | } |
34 | 32 | ||
35 | fn expand_macro_recur( | 33 | fn expand_macro_recur( |
36 | db: &RootDatabase, | 34 | sema: &Semantics<RootDatabase>, |
37 | source: hir::InFile<&SyntaxNode>, | 35 | macro_call: &ast::MacroCall, |
38 | macro_call: hir::InFile<&ast::MacroCall>, | ||
39 | ) -> Option<SyntaxNode> { | 36 | ) -> Option<SyntaxNode> { |
40 | let analyzer = hir::SourceAnalyzer::new(db, source, None); | 37 | let mut expanded = sema.expand(macro_call)?; |
41 | let expansion = analyzer.expand(db, macro_call)?; | ||
42 | let macro_file_id = expansion.file_id(); | ||
43 | let mut expanded: SyntaxNode = db.parse_or_expand(macro_file_id)?; | ||
44 | 38 | ||
45 | let children = expanded.descendants().filter_map(ast::MacroCall::cast); | 39 | let children = expanded.descendants().filter_map(ast::MacroCall::cast); |
46 | let mut replaces: FxHashMap<SyntaxElement, SyntaxElement> = FxHashMap::default(); | 40 | let mut replaces: FxHashMap<SyntaxElement, SyntaxElement> = FxHashMap::default(); |
47 | 41 | ||
48 | for child in children.into_iter() { | 42 | for child in children.into_iter() { |
49 | let node = hir::InFile::new(macro_file_id, &child); | 43 | if let Some(new_node) = expand_macro_recur(sema, &child) { |
50 | if let Some(new_node) = expand_macro_recur(db, source, node) { | ||
51 | // Replace the whole node if it is root | 44 | // Replace the whole node if it is root |
52 | // `replace_descendants` will not replace the parent node | 45 | // `replace_descendants` will not replace the parent node |
53 | // but `SyntaxNode::descendants include itself | 46 | // but `SyntaxNode::descendants include itself |
@@ -120,10 +113,12 @@ fn insert_whitespaces(syn: SyntaxNode) -> String { | |||
120 | 113 | ||
121 | #[cfg(test)] | 114 | #[cfg(test)] |
122 | mod tests { | 115 | mod tests { |
123 | use super::*; | ||
124 | use crate::mock_analysis::analysis_and_position; | ||
125 | use insta::assert_snapshot; | 116 | use insta::assert_snapshot; |
126 | 117 | ||
118 | use crate::mock_analysis::analysis_and_position; | ||
119 | |||
120 | use super::*; | ||
121 | |||
127 | fn check_expand_macro(fixture: &str) -> ExpandedMacro { | 122 | fn check_expand_macro(fixture: &str) -> ExpandedMacro { |
128 | let (analysis, pos) = analysis_and_position(fixture); | 123 | let (analysis, pos) = analysis_and_position(fixture); |
129 | analysis.expand_macro(pos).unwrap().unwrap() | 124 | analysis.expand_macro(pos).unwrap().unwrap() |
diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs index 1e7d0621a..86e6f12d7 100644 --- a/crates/ra_ide/src/extend_selection.rs +++ b/crates/ra_ide/src/extend_selection.rs | |||
@@ -2,26 +2,26 @@ | |||
2 | 2 | ||
3 | use std::iter::successors; | 3 | use std::iter::successors; |
4 | 4 | ||
5 | use hir::db::AstDatabase; | 5 | use hir::Semantics; |
6 | use ra_db::SourceDatabase; | ||
7 | use ra_ide_db::RootDatabase; | 6 | use ra_ide_db::RootDatabase; |
8 | use ra_syntax::{ | 7 | use ra_syntax::{ |
9 | algo::find_covering_element, | 8 | algo::{self, find_covering_element}, |
10 | ast::{self, AstNode, AstToken}, | 9 | ast::{self, AstNode, AstToken}, |
11 | Direction, NodeOrToken, SyntaxElement, | 10 | Direction, NodeOrToken, |
12 | SyntaxKind::{self, *}, | 11 | SyntaxKind::{self, *}, |
13 | SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, | 12 | SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, |
14 | }; | 13 | }; |
15 | 14 | ||
16 | use crate::{expand::descend_into_macros, FileId, FileRange}; | 15 | use crate::FileRange; |
17 | 16 | ||
18 | pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { | 17 | pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { |
19 | let src = db.parse(frange.file_id).tree(); | 18 | let sema = Semantics::new(db); |
20 | try_extend_selection(db, src.syntax(), frange).unwrap_or(frange.range) | 19 | let src = sema.parse(frange.file_id); |
20 | try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range) | ||
21 | } | 21 | } |
22 | 22 | ||
23 | fn try_extend_selection( | 23 | fn try_extend_selection( |
24 | db: &RootDatabase, | 24 | sema: &Semantics<RootDatabase>, |
25 | root: &SyntaxNode, | 25 | root: &SyntaxNode, |
26 | frange: FileRange, | 26 | frange: FileRange, |
27 | ) -> Option<TextRange> { | 27 | ) -> Option<TextRange> { |
@@ -86,7 +86,7 @@ fn try_extend_selection( | |||
86 | // if we are in single token_tree, we maybe live in macro or attr | 86 | // if we are in single token_tree, we maybe live in macro or attr |
87 | if node.kind() == TOKEN_TREE { | 87 | if node.kind() == TOKEN_TREE { |
88 | if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) { | 88 | if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) { |
89 | if let Some(range) = extend_tokens_from_range(db, frange.file_id, macro_call, range) { | 89 | if let Some(range) = extend_tokens_from_range(sema, macro_call, range) { |
90 | return Some(range); | 90 | return Some(range); |
91 | } | 91 | } |
92 | } | 92 | } |
@@ -96,7 +96,7 @@ fn try_extend_selection( | |||
96 | return Some(node.text_range()); | 96 | return Some(node.text_range()); |
97 | } | 97 | } |
98 | 98 | ||
99 | let node = shallowest_node(&node.into()).unwrap(); | 99 | let node = shallowest_node(&node.into()); |
100 | 100 | ||
101 | if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { | 101 | if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { |
102 | if let Some(range) = extend_list_item(&node) { | 102 | if let Some(range) = extend_list_item(&node) { |
@@ -108,8 +108,7 @@ fn try_extend_selection( | |||
108 | } | 108 | } |
109 | 109 | ||
110 | fn extend_tokens_from_range( | 110 | fn extend_tokens_from_range( |
111 | db: &RootDatabase, | 111 | sema: &Semantics<RootDatabase>, |
112 | file_id: FileId, | ||
113 | macro_call: ast::MacroCall, | 112 | macro_call: ast::MacroCall, |
114 | original_range: TextRange, | 113 | original_range: TextRange, |
115 | ) -> Option<TextRange> { | 114 | ) -> Option<TextRange> { |
@@ -130,25 +129,21 @@ fn extend_tokens_from_range( | |||
130 | } | 129 | } |
131 | 130 | ||
132 | // compute original mapped token range | 131 | // compute original mapped token range |
133 | let expanded = { | 132 | let extended = { |
134 | let first_node = descend_into_macros(db, file_id, first_token.clone()); | 133 | let fst_expanded = sema.descend_into_macros(first_token.clone()); |
135 | let first_node = first_node.map(|it| it.text_range()); | 134 | let lst_expanded = sema.descend_into_macros(last_token.clone()); |
136 | 135 | let mut lca = algo::least_common_ancestor(&fst_expanded.parent(), &lst_expanded.parent())?; | |
137 | let last_node = descend_into_macros(db, file_id, last_token.clone()); | 136 | lca = shallowest_node(&lca); |
138 | if last_node.file_id == file_id.into() || first_node.file_id != last_node.file_id { | 137 | if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) { |
139 | return None; | 138 | lca = lca.parent()?; |
140 | } | 139 | } |
141 | first_node.map(|it| union_range(it, last_node.value.text_range())) | 140 | lca |
142 | }; | 141 | }; |
143 | 142 | ||
144 | // Compute parent node range | 143 | // Compute parent node range |
145 | let src = db.parse_or_expand(expanded.file_id)?; | ||
146 | let parent = shallowest_node(&find_covering_element(&src, expanded.value))?.parent()?; | ||
147 | |||
148 | let validate = |token: &SyntaxToken| { | 144 | let validate = |token: &SyntaxToken| { |
149 | let node = descend_into_macros(db, file_id, token.clone()); | 145 | let expanded = sema.descend_into_macros(token.clone()); |
150 | node.file_id == expanded.file_id | 146 | algo::least_common_ancestor(&extended, &expanded.parent()).as_ref() == Some(&extended) |
151 | && node.value.text_range().is_subrange(&parent.text_range()) | ||
152 | }; | 147 | }; |
153 | 148 | ||
154 | // Find the first and last text range under expanded parent | 149 | // Find the first and last text range under expanded parent |
@@ -191,8 +186,8 @@ fn union_range(range: TextRange, r: TextRange) -> TextRange { | |||
191 | } | 186 | } |
192 | 187 | ||
193 | /// Find the shallowest node with same range, which allows us to traverse siblings. | 188 | /// Find the shallowest node with same range, which allows us to traverse siblings. |
194 | fn shallowest_node(node: &SyntaxElement) -> Option<SyntaxNode> { | 189 | fn shallowest_node(node: &SyntaxNode) -> SyntaxNode { |
195 | node.ancestors().take_while(|n| n.text_range() == node.text_range()).last() | 190 | node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap() |
196 | } | 191 | } |
197 | 192 | ||
198 | fn extend_single_word_in_comment_or_string( | 193 | fn extend_single_word_in_comment_or_string( |
diff --git a/crates/ra_ide/src/goto_definition.rs b/crates/ra_ide/src/goto_definition.rs index feff1ec3f..6053c1bb6 100644 --- a/crates/ra_ide/src/goto_definition.rs +++ b/crates/ra_ide/src/goto_definition.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use hir::{db::AstDatabase, InFile, SourceBinder}; | 3 | use hir::Semantics; |
4 | use ra_ide_db::{symbol_index, RootDatabase}; | 4 | use ra_ide_db::{defs::classify_name, symbol_index, RootDatabase}; |
5 | use ra_syntax::{ | 5 | use ra_syntax::{ |
6 | ast::{self}, | 6 | ast::{self}, |
7 | match_ast, AstNode, | 7 | match_ast, AstNode, |
@@ -11,8 +11,7 @@ use ra_syntax::{ | |||
11 | 11 | ||
12 | use crate::{ | 12 | use crate::{ |
13 | display::{ToNav, TryToNav}, | 13 | display::{ToNav, TryToNav}, |
14 | expand::descend_into_macros, | 14 | references::classify_name_ref, |
15 | references::{classify_name, classify_name_ref}, | ||
16 | FilePosition, NavigationTarget, RangeInfo, | 15 | FilePosition, NavigationTarget, RangeInfo, |
17 | }; | 16 | }; |
18 | 17 | ||
@@ -20,18 +19,18 @@ pub(crate) fn goto_definition( | |||
20 | db: &RootDatabase, | 19 | db: &RootDatabase, |
21 | position: FilePosition, | 20 | position: FilePosition, |
22 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { | 21 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { |
23 | let file = db.parse_or_expand(position.file_id.into())?; | 22 | let sema = Semantics::new(db); |
23 | let file = sema.parse(position.file_id).syntax().clone(); | ||
24 | let original_token = pick_best(file.token_at_offset(position.offset))?; | 24 | let original_token = pick_best(file.token_at_offset(position.offset))?; |
25 | let token = descend_into_macros(db, position.file_id, original_token.clone()); | 25 | let token = sema.descend_into_macros(original_token.clone()); |
26 | 26 | ||
27 | let mut sb = SourceBinder::new(db); | ||
28 | let nav_targets = match_ast! { | 27 | let nav_targets = match_ast! { |
29 | match (token.value.parent()) { | 28 | match (token.parent()) { |
30 | ast::NameRef(name_ref) => { | 29 | ast::NameRef(name_ref) => { |
31 | reference_definition(&mut sb, token.with_value(&name_ref)).to_vec() | 30 | reference_definition(&sema, &name_ref).to_vec() |
32 | }, | 31 | }, |
33 | ast::Name(name) => { | 32 | ast::Name(name) => { |
34 | name_definition(&mut sb, token.with_value(&name))? | 33 | name_definition(&sema, &name)? |
35 | }, | 34 | }, |
36 | _ => return None, | 35 | _ => return None, |
37 | } | 36 | } |
@@ -68,33 +67,33 @@ impl ReferenceResult { | |||
68 | } | 67 | } |
69 | 68 | ||
70 | pub(crate) fn reference_definition( | 69 | pub(crate) fn reference_definition( |
71 | sb: &mut SourceBinder<RootDatabase>, | 70 | sema: &Semantics<RootDatabase>, |
72 | name_ref: InFile<&ast::NameRef>, | 71 | name_ref: &ast::NameRef, |
73 | ) -> ReferenceResult { | 72 | ) -> ReferenceResult { |
74 | use self::ReferenceResult::*; | 73 | use self::ReferenceResult::*; |
75 | 74 | ||
76 | let name_kind = classify_name_ref(sb, name_ref); | 75 | let name_kind = classify_name_ref(sema, name_ref); |
77 | if let Some(def) = name_kind { | 76 | if let Some(def) = name_kind { |
78 | return match def.try_to_nav(sb.db) { | 77 | return match def.try_to_nav(sema.db) { |
79 | Some(nav) => ReferenceResult::Exact(nav), | 78 | Some(nav) => ReferenceResult::Exact(nav), |
80 | None => ReferenceResult::Approximate(Vec::new()), | 79 | None => ReferenceResult::Approximate(Vec::new()), |
81 | }; | 80 | }; |
82 | } | 81 | } |
83 | 82 | ||
84 | // Fallback index based approach: | 83 | // Fallback index based approach: |
85 | let navs = symbol_index::index_resolve(sb.db, name_ref.value) | 84 | let navs = symbol_index::index_resolve(sema.db, name_ref) |
86 | .into_iter() | 85 | .into_iter() |
87 | .map(|s| s.to_nav(sb.db)) | 86 | .map(|s| s.to_nav(sema.db)) |
88 | .collect(); | 87 | .collect(); |
89 | Approximate(navs) | 88 | Approximate(navs) |
90 | } | 89 | } |
91 | 90 | ||
92 | fn name_definition( | 91 | fn name_definition( |
93 | sb: &mut SourceBinder<RootDatabase>, | 92 | sema: &Semantics<RootDatabase>, |
94 | name: InFile<&ast::Name>, | 93 | name: &ast::Name, |
95 | ) -> Option<Vec<NavigationTarget>> { | 94 | ) -> Option<Vec<NavigationTarget>> { |
96 | let def = classify_name(sb, name)?; | 95 | let def = classify_name(sema, name)?; |
97 | let nav = def.try_to_nav(sb.db)?; | 96 | let nav = def.try_to_nav(sema.db)?; |
98 | Some(vec![nav]) | 97 | Some(vec![nav]) |
99 | } | 98 | } |
100 | 99 | ||
diff --git a/crates/ra_ide/src/goto_type_definition.rs b/crates/ra_ide/src/goto_type_definition.rs index 69940fc36..869a4708b 100644 --- a/crates/ra_ide/src/goto_type_definition.rs +++ b/crates/ra_ide/src/goto_type_definition.rs | |||
@@ -1,31 +1,31 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use hir::db::AstDatabase; | ||
4 | use ra_ide_db::RootDatabase; | 3 | use ra_ide_db::RootDatabase; |
5 | use ra_syntax::{ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset}; | 4 | use ra_syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset}; |
6 | 5 | ||
7 | use crate::{ | 6 | use crate::{display::ToNav, FilePosition, NavigationTarget, RangeInfo}; |
8 | display::ToNav, expand::descend_into_macros, FilePosition, NavigationTarget, RangeInfo, | ||
9 | }; | ||
10 | 7 | ||
11 | pub(crate) fn goto_type_definition( | 8 | pub(crate) fn goto_type_definition( |
12 | db: &RootDatabase, | 9 | db: &RootDatabase, |
13 | position: FilePosition, | 10 | position: FilePosition, |
14 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { | 11 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { |
15 | let file = db.parse_or_expand(position.file_id.into())?; | 12 | let sema = hir::Semantics::new(db); |
16 | let token = pick_best(file.token_at_offset(position.offset))?; | 13 | |
17 | let token = descend_into_macros(db, position.file_id, token); | 14 | let file: ast::SourceFile = sema.parse(position.file_id); |
18 | 15 | let token: SyntaxToken = pick_best(file.syntax().token_at_offset(position.offset))?; | |
19 | let node = token | 16 | let token: SyntaxToken = sema.descend_into_macros(token); |
20 | .value | 17 | |
21 | .ancestors() | 18 | let (ty, node) = sema.ancestors_with_macros(token.parent()).find_map(|node| { |
22 | .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?; | 19 | let ty = match_ast! { |
23 | 20 | match node { | |
24 | let analyzer = hir::SourceAnalyzer::new(db, token.with_value(&node), None); | 21 | ast::Expr(expr) => { sema.type_of_expr(&expr)? }, |
22 | ast::Pat(pat) => { sema.type_of_pat(&pat)? }, | ||
23 | _ => { return None }, | ||
24 | } | ||
25 | }; | ||
25 | 26 | ||
26 | let ty: hir::Type = ast::Expr::cast(node.clone()) | 27 | Some((ty, node)) |
27 | .and_then(|e| analyzer.type_of(db, &e)) | 28 | })?; |
28 | .or_else(|| ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p)))?; | ||
29 | 29 | ||
30 | let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt())?; | 30 | let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt())?; |
31 | 31 | ||
diff --git a/crates/ra_ide/src/hover.rs b/crates/ra_ide/src/hover.rs index 1c6ca36df..ace33c079 100644 --- a/crates/ra_ide/src/hover.rs +++ b/crates/ra_ide/src/hover.rs | |||
@@ -1,8 +1,10 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use hir::{db::AstDatabase, Adt, HasSource, HirDisplay, SourceBinder}; | 3 | use hir::{Adt, HasSource, HirDisplay, Semantics}; |
4 | use ra_db::SourceDatabase; | 4 | use ra_ide_db::{ |
5 | use ra_ide_db::{defs::NameDefinition, RootDatabase}; | 5 | defs::{classify_name, NameDefinition}, |
6 | RootDatabase, | ||
7 | }; | ||
6 | use ra_syntax::{ | 8 | use ra_syntax::{ |
7 | algo::find_covering_element, | 9 | algo::find_covering_element, |
8 | ast::{self, DocCommentsOwner}, | 10 | ast::{self, DocCommentsOwner}, |
@@ -13,8 +15,7 @@ use ra_syntax::{ | |||
13 | 15 | ||
14 | use crate::{ | 16 | use crate::{ |
15 | display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel}, | 17 | display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel}, |
16 | expand::{descend_into_macros, original_range}, | 18 | references::classify_name_ref, |
17 | references::{classify_name, classify_name_ref}, | ||
18 | FilePosition, FileRange, RangeInfo, | 19 | FilePosition, FileRange, RangeInfo, |
19 | }; | 20 | }; |
20 | 21 | ||
@@ -143,25 +144,25 @@ fn hover_text_from_name_kind(db: &RootDatabase, def: NameDefinition) -> Option<S | |||
143 | } | 144 | } |
144 | 145 | ||
145 | pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> { | 146 | pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> { |
146 | let file = db.parse_or_expand(position.file_id.into())?; | 147 | let sema = Semantics::new(db); |
148 | let file = sema.parse(position.file_id).syntax().clone(); | ||
147 | let token = pick_best(file.token_at_offset(position.offset))?; | 149 | let token = pick_best(file.token_at_offset(position.offset))?; |
148 | let token = descend_into_macros(db, position.file_id, token); | 150 | let token = sema.descend_into_macros(token); |
149 | 151 | ||
150 | let mut res = HoverResult::new(); | 152 | let mut res = HoverResult::new(); |
151 | 153 | ||
152 | let mut sb = SourceBinder::new(db); | ||
153 | if let Some((node, name_kind)) = match_ast! { | 154 | if let Some((node, name_kind)) = match_ast! { |
154 | match (token.value.parent()) { | 155 | match (token.parent()) { |
155 | ast::NameRef(name_ref) => { | 156 | ast::NameRef(name_ref) => { |
156 | classify_name_ref(&mut sb, token.with_value(&name_ref)).map(|d| (name_ref.syntax().clone(), d)) | 157 | classify_name_ref(&sema, &name_ref).map(|d| (name_ref.syntax().clone(), d)) |
157 | }, | 158 | }, |
158 | ast::Name(name) => { | 159 | ast::Name(name) => { |
159 | classify_name(&mut sb, token.with_value(&name)).map(|d| (name.syntax().clone(), d)) | 160 | classify_name(&sema, &name).map(|d| (name.syntax().clone(), d)) |
160 | }, | 161 | }, |
161 | _ => None, | 162 | _ => None, |
162 | } | 163 | } |
163 | } { | 164 | } { |
164 | let range = original_range(db, token.with_value(&node)).range; | 165 | let range = sema.original_range(&node).range; |
165 | res.extend(hover_text_from_name_kind(db, name_kind)); | 166 | res.extend(hover_text_from_name_kind(db, name_kind)); |
166 | 167 | ||
167 | if !res.is_empty() { | 168 | if !res.is_empty() { |
@@ -170,11 +171,10 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn | |||
170 | } | 171 | } |
171 | 172 | ||
172 | let node = token | 173 | let node = token |
173 | .value | ||
174 | .ancestors() | 174 | .ancestors() |
175 | .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?; | 175 | .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?; |
176 | 176 | ||
177 | let frange = original_range(db, token.with_value(&node)); | 177 | let frange = sema.original_range(&node); |
178 | res.extend(type_of(db, frange).map(rust_code_markup)); | 178 | res.extend(type_of(db, frange).map(rust_code_markup)); |
179 | if res.is_empty() { | 179 | if res.is_empty() { |
180 | return None; | 180 | return None; |
@@ -197,19 +197,17 @@ fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> { | |||
197 | } | 197 | } |
198 | 198 | ||
199 | pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { | 199 | pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { |
200 | let parse = db.parse(frange.file_id); | 200 | let sema = Semantics::new(db); |
201 | let leaf_node = find_covering_element(parse.tree().syntax(), frange.range); | 201 | let source_file = sema.parse(frange.file_id); |
202 | let leaf_node = find_covering_element(source_file.syntax(), frange.range); | ||
202 | // if we picked identifier, expand to pattern/expression | 203 | // if we picked identifier, expand to pattern/expression |
203 | let node = leaf_node | 204 | let node = leaf_node |
204 | .ancestors() | 205 | .ancestors() |
205 | .take_while(|it| it.text_range() == leaf_node.text_range()) | 206 | .take_while(|it| it.text_range() == leaf_node.text_range()) |
206 | .find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?; | 207 | .find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?; |
207 | let analyzer = | 208 | let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| sema.type_of_expr(&e)) { |
208 | hir::SourceAnalyzer::new(db, hir::InFile::new(frange.file_id.into(), &node), None); | ||
209 | let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e)) | ||
210 | { | ||
211 | ty | 209 | ty |
212 | } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, &p)) { | 210 | } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| sema.type_of_pat(&p)) { |
213 | ty | 211 | ty |
214 | } else { | 212 | } else { |
215 | return None; | 213 | return None; |
@@ -219,11 +217,12 @@ pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { | |||
219 | 217 | ||
220 | #[cfg(test)] | 218 | #[cfg(test)] |
221 | mod tests { | 219 | mod tests { |
220 | use ra_db::FileLoader; | ||
221 | use ra_syntax::TextRange; | ||
222 | |||
222 | use crate::mock_analysis::{ | 223 | use crate::mock_analysis::{ |
223 | analysis_and_position, single_file_with_position, single_file_with_range, | 224 | analysis_and_position, single_file_with_position, single_file_with_range, |
224 | }; | 225 | }; |
225 | use ra_db::FileLoader; | ||
226 | use ra_syntax::TextRange; | ||
227 | 226 | ||
228 | fn trim_markup(s: &str) -> &str { | 227 | fn trim_markup(s: &str) -> &str { |
229 | s.trim_start_matches("```rust\n").trim_end_matches("\n```") | 228 | s.trim_start_matches("```rust\n").trim_end_matches("\n```") |
diff --git a/crates/ra_ide/src/impls.rs b/crates/ra_ide/src/impls.rs index 64a2dadc8..bf82b2a16 100644 --- a/crates/ra_ide/src/impls.rs +++ b/crates/ra_ide/src/impls.rs | |||
@@ -1,7 +1,6 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use hir::{Crate, ImplBlock, SourceBinder}; | 3 | use hir::{Crate, ImplBlock, Semantics}; |
4 | use ra_db::SourceDatabase; | ||
5 | use ra_ide_db::RootDatabase; | 4 | use ra_ide_db::RootDatabase; |
6 | use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; | 5 | use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; |
7 | 6 | ||
@@ -11,21 +10,21 @@ pub(crate) fn goto_implementation( | |||
11 | db: &RootDatabase, | 10 | db: &RootDatabase, |
12 | position: FilePosition, | 11 | position: FilePosition, |
13 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { | 12 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { |
14 | let parse = db.parse(position.file_id); | 13 | let sema = Semantics::new(db); |
15 | let syntax = parse.tree().syntax().clone(); | 14 | let source_file = sema.parse(position.file_id); |
16 | let mut sb = SourceBinder::new(db); | 15 | let syntax = source_file.syntax().clone(); |
17 | 16 | ||
18 | let krate = sb.to_module_def(position.file_id)?.krate(); | 17 | let krate = sema.to_module_def(position.file_id)?.krate(); |
19 | 18 | ||
20 | if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) { | 19 | if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) { |
21 | return Some(RangeInfo::new( | 20 | return Some(RangeInfo::new( |
22 | nominal_def.syntax().text_range(), | 21 | nominal_def.syntax().text_range(), |
23 | impls_for_def(&mut sb, position, &nominal_def, krate)?, | 22 | impls_for_def(&sema, &nominal_def, krate)?, |
24 | )); | 23 | )); |
25 | } else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(&syntax, position.offset) { | 24 | } else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(&syntax, position.offset) { |
26 | return Some(RangeInfo::new( | 25 | return Some(RangeInfo::new( |
27 | trait_def.syntax().text_range(), | 26 | trait_def.syntax().text_range(), |
28 | impls_for_trait(&mut sb, position, &trait_def, krate)?, | 27 | impls_for_trait(&sema, &trait_def, krate)?, |
29 | )); | 28 | )); |
30 | } | 29 | } |
31 | 30 | ||
@@ -33,49 +32,37 @@ pub(crate) fn goto_implementation( | |||
33 | } | 32 | } |
34 | 33 | ||
35 | fn impls_for_def( | 34 | fn impls_for_def( |
36 | sb: &mut SourceBinder<RootDatabase>, | 35 | sema: &Semantics<RootDatabase>, |
37 | position: FilePosition, | ||
38 | node: &ast::NominalDef, | 36 | node: &ast::NominalDef, |
39 | krate: Crate, | 37 | krate: Crate, |
40 | ) -> Option<Vec<NavigationTarget>> { | 38 | ) -> Option<Vec<NavigationTarget>> { |
41 | let ty = match node { | 39 | let ty = match node { |
42 | ast::NominalDef::StructDef(def) => { | 40 | ast::NominalDef::StructDef(def) => sema.to_def(def)?.ty(sema.db), |
43 | let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() }; | 41 | ast::NominalDef::EnumDef(def) => sema.to_def(def)?.ty(sema.db), |
44 | sb.to_def(src)?.ty(sb.db) | 42 | ast::NominalDef::UnionDef(def) => sema.to_def(def)?.ty(sema.db), |
45 | } | ||
46 | ast::NominalDef::EnumDef(def) => { | ||
47 | let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() }; | ||
48 | sb.to_def(src)?.ty(sb.db) | ||
49 | } | ||
50 | ast::NominalDef::UnionDef(def) => { | ||
51 | let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() }; | ||
52 | sb.to_def(src)?.ty(sb.db) | ||
53 | } | ||
54 | }; | 43 | }; |
55 | 44 | ||
56 | let impls = ImplBlock::all_in_crate(sb.db, krate); | 45 | let impls = ImplBlock::all_in_crate(sema.db, krate); |
57 | 46 | ||
58 | Some( | 47 | Some( |
59 | impls | 48 | impls |
60 | .into_iter() | 49 | .into_iter() |
61 | .filter(|impl_block| ty.is_equal_for_find_impls(&impl_block.target_ty(sb.db))) | 50 | .filter(|impl_block| ty.is_equal_for_find_impls(&impl_block.target_ty(sema.db))) |
62 | .map(|imp| imp.to_nav(sb.db)) | 51 | .map(|imp| imp.to_nav(sema.db)) |
63 | .collect(), | 52 | .collect(), |
64 | ) | 53 | ) |
65 | } | 54 | } |
66 | 55 | ||
67 | fn impls_for_trait( | 56 | fn impls_for_trait( |
68 | sb: &mut SourceBinder<RootDatabase>, | 57 | sema: &Semantics<RootDatabase>, |
69 | position: FilePosition, | ||
70 | node: &ast::TraitDef, | 58 | node: &ast::TraitDef, |
71 | krate: Crate, | 59 | krate: Crate, |
72 | ) -> Option<Vec<NavigationTarget>> { | 60 | ) -> Option<Vec<NavigationTarget>> { |
73 | let src = hir::InFile { file_id: position.file_id.into(), value: node.clone() }; | 61 | let tr = sema.to_def(node)?; |
74 | let tr = sb.to_def(src)?; | ||
75 | 62 | ||
76 | let impls = ImplBlock::for_trait(sb.db, krate, tr); | 63 | let impls = ImplBlock::for_trait(sema.db, krate, tr); |
77 | 64 | ||
78 | Some(impls.into_iter().map(|imp| imp.to_nav(sb.db)).collect()) | 65 | Some(impls.into_iter().map(|imp| imp.to_nav(sema.db)).collect()) |
79 | } | 66 | } |
80 | 67 | ||
81 | #[cfg(test)] | 68 | #[cfg(test)] |
diff --git a/crates/ra_ide/src/inlay_hints.rs b/crates/ra_ide/src/inlay_hints.rs index b42aa1523..35e3f782d 100644 --- a/crates/ra_ide/src/inlay_hints.rs +++ b/crates/ra_ide/src/inlay_hints.rs | |||
@@ -1,12 +1,11 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use hir::{Adt, HirDisplay, SourceAnalyzer, SourceBinder, Type}; | 3 | use hir::{Adt, HirDisplay, Semantics, Type}; |
4 | use once_cell::unsync::Lazy; | ||
5 | use ra_ide_db::RootDatabase; | 4 | use ra_ide_db::RootDatabase; |
6 | use ra_prof::profile; | 5 | use ra_prof::profile; |
7 | use ra_syntax::{ | 6 | use ra_syntax::{ |
8 | ast::{self, ArgListOwner, AstNode, TypeAscriptionOwner}, | 7 | ast::{self, ArgListOwner, AstNode, TypeAscriptionOwner}, |
9 | match_ast, SmolStr, SourceFile, SyntaxNode, TextRange, | 8 | match_ast, SmolStr, SyntaxNode, TextRange, |
10 | }; | 9 | }; |
11 | 10 | ||
12 | use crate::{FileId, FunctionSignature}; | 11 | use crate::{FileId, FunctionSignature}; |
@@ -27,38 +26,36 @@ pub struct InlayHint { | |||
27 | pub(crate) fn inlay_hints( | 26 | pub(crate) fn inlay_hints( |
28 | db: &RootDatabase, | 27 | db: &RootDatabase, |
29 | file_id: FileId, | 28 | file_id: FileId, |
30 | file: &SourceFile, | ||
31 | max_inlay_hint_length: Option<usize>, | 29 | max_inlay_hint_length: Option<usize>, |
32 | ) -> Vec<InlayHint> { | 30 | ) -> Vec<InlayHint> { |
33 | let mut sb = SourceBinder::new(db); | 31 | let sema = Semantics::new(db); |
32 | let file = sema.parse(file_id); | ||
34 | let mut res = Vec::new(); | 33 | let mut res = Vec::new(); |
35 | for node in file.syntax().descendants() { | 34 | for node in file.syntax().descendants() { |
36 | get_inlay_hints(&mut res, &mut sb, file_id, &node, max_inlay_hint_length); | 35 | get_inlay_hints(&mut res, &sema, &node, max_inlay_hint_length); |
37 | } | 36 | } |
38 | res | 37 | res |
39 | } | 38 | } |
40 | 39 | ||
41 | fn get_inlay_hints( | 40 | fn get_inlay_hints( |
42 | acc: &mut Vec<InlayHint>, | 41 | acc: &mut Vec<InlayHint>, |
43 | sb: &mut SourceBinder<RootDatabase>, | 42 | sema: &Semantics<RootDatabase>, |
44 | file_id: FileId, | ||
45 | node: &SyntaxNode, | 43 | node: &SyntaxNode, |
46 | max_inlay_hint_length: Option<usize>, | 44 | max_inlay_hint_length: Option<usize>, |
47 | ) -> Option<()> { | 45 | ) -> Option<()> { |
48 | let _p = profile("get_inlay_hints"); | 46 | let _p = profile("get_inlay_hints"); |
49 | let db = sb.db; | 47 | let db = sema.db; |
50 | let analyzer = Lazy::new(move || sb.analyze(hir::InFile::new(file_id.into(), node), None)); | ||
51 | match_ast! { | 48 | match_ast! { |
52 | match node { | 49 | match node { |
53 | ast::CallExpr(it) => { | 50 | ast::CallExpr(it) => { |
54 | get_param_name_hints(acc, db, &analyzer, ast::Expr::from(it)); | 51 | get_param_name_hints(acc, sema, ast::Expr::from(it)); |
55 | }, | 52 | }, |
56 | ast::MethodCallExpr(it) => { | 53 | ast::MethodCallExpr(it) => { |
57 | get_param_name_hints(acc, db, &analyzer, ast::Expr::from(it)); | 54 | get_param_name_hints(acc, sema, ast::Expr::from(it)); |
58 | }, | 55 | }, |
59 | ast::BindPat(it) => { | 56 | ast::BindPat(it) => { |
60 | let pat = ast::Pat::from(it.clone()); | 57 | let pat = ast::Pat::from(it.clone()); |
61 | let ty = analyzer.type_of_pat(db, &pat)?; | 58 | let ty = sema.type_of_pat(&pat)?; |
62 | 59 | ||
63 | if should_not_display_type_hint(db, &it, &ty) { | 60 | if should_not_display_type_hint(db, &it, &ty) { |
64 | return None; | 61 | return None; |
@@ -125,8 +122,7 @@ fn should_not_display_type_hint(db: &RootDatabase, bind_pat: &ast::BindPat, pat_ | |||
125 | 122 | ||
126 | fn get_param_name_hints( | 123 | fn get_param_name_hints( |
127 | acc: &mut Vec<InlayHint>, | 124 | acc: &mut Vec<InlayHint>, |
128 | db: &RootDatabase, | 125 | sema: &Semantics<RootDatabase>, |
129 | analyzer: &SourceAnalyzer, | ||
130 | expr: ast::Expr, | 126 | expr: ast::Expr, |
131 | ) -> Option<()> { | 127 | ) -> Option<()> { |
132 | let args = match &expr { | 128 | let args = match &expr { |
@@ -138,7 +134,7 @@ fn get_param_name_hints( | |||
138 | // we need args len to determine whether to skip or not the &self parameter | 134 | // we need args len to determine whether to skip or not the &self parameter |
139 | .collect::<Vec<_>>(); | 135 | .collect::<Vec<_>>(); |
140 | 136 | ||
141 | let fn_signature = get_fn_signature(db, analyzer, &expr)?; | 137 | let fn_signature = get_fn_signature(sema, &expr)?; |
142 | let n_params_to_skip = | 138 | let n_params_to_skip = |
143 | if fn_signature.has_self_param && fn_signature.parameter_names.len() > args.len() { | 139 | if fn_signature.has_self_param && fn_signature.parameter_names.len() > args.len() { |
144 | 1 | 140 | 1 |
@@ -184,28 +180,26 @@ fn should_show_param_hint( | |||
184 | true | 180 | true |
185 | } | 181 | } |
186 | 182 | ||
187 | fn get_fn_signature( | 183 | fn get_fn_signature(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<FunctionSignature> { |
188 | db: &RootDatabase, | ||
189 | analyzer: &SourceAnalyzer, | ||
190 | expr: &ast::Expr, | ||
191 | ) -> Option<FunctionSignature> { | ||
192 | match expr { | 184 | match expr { |
193 | ast::Expr::CallExpr(expr) => { | 185 | ast::Expr::CallExpr(expr) => { |
194 | // FIXME: Type::as_callable is broken for closures | 186 | // FIXME: Type::as_callable is broken for closures |
195 | let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; | 187 | let callable_def = sema.type_of_expr(&expr.expr()?)?.as_callable()?; |
196 | match callable_def { | 188 | match callable_def { |
197 | hir::CallableDef::FunctionId(it) => { | 189 | hir::CallableDef::FunctionId(it) => { |
198 | Some(FunctionSignature::from_hir(db, it.into())) | 190 | Some(FunctionSignature::from_hir(sema.db, it.into())) |
191 | } | ||
192 | hir::CallableDef::StructId(it) => { | ||
193 | FunctionSignature::from_struct(sema.db, it.into()) | ||
199 | } | 194 | } |
200 | hir::CallableDef::StructId(it) => FunctionSignature::from_struct(db, it.into()), | ||
201 | hir::CallableDef::EnumVariantId(it) => { | 195 | hir::CallableDef::EnumVariantId(it) => { |
202 | FunctionSignature::from_enum_variant(db, it.into()) | 196 | FunctionSignature::from_enum_variant(sema.db, it.into()) |
203 | } | 197 | } |
204 | } | 198 | } |
205 | } | 199 | } |
206 | ast::Expr::MethodCallExpr(expr) => { | 200 | ast::Expr::MethodCallExpr(expr) => { |
207 | let fn_def = analyzer.resolve_method_call(&expr)?; | 201 | let fn_def = sema.resolve_method_call(&expr)?; |
208 | Some(FunctionSignature::from_hir(db, fn_def)) | 202 | Some(FunctionSignature::from_hir(sema.db, fn_def)) |
209 | } | 203 | } |
210 | _ => None, | 204 | _ => None, |
211 | } | 205 | } |
diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs index d22870669..f31d3c295 100644 --- a/crates/ra_ide/src/lib.rs +++ b/crates/ra_ide/src/lib.rs | |||
@@ -35,7 +35,6 @@ mod typing; | |||
35 | mod matching_brace; | 35 | mod matching_brace; |
36 | mod display; | 36 | mod display; |
37 | mod inlay_hints; | 37 | mod inlay_hints; |
38 | mod expand; | ||
39 | mod expand_macro; | 38 | mod expand_macro; |
40 | mod ssr; | 39 | mod ssr; |
41 | 40 | ||
@@ -319,9 +318,7 @@ impl Analysis { | |||
319 | file_id: FileId, | 318 | file_id: FileId, |
320 | max_inlay_hint_length: Option<usize>, | 319 | max_inlay_hint_length: Option<usize>, |
321 | ) -> Cancelable<Vec<InlayHint>> { | 320 | ) -> Cancelable<Vec<InlayHint>> { |
322 | self.with_db(|db| { | 321 | self.with_db(|db| inlay_hints::inlay_hints(db, file_id, max_inlay_hint_length)) |
323 | inlay_hints::inlay_hints(db, file_id, &db.parse(file_id).tree(), max_inlay_hint_length) | ||
324 | }) | ||
325 | } | 322 | } |
326 | 323 | ||
327 | /// Returns the set of folding ranges. | 324 | /// Returns the set of folding ranges. |
diff --git a/crates/ra_ide/src/marks.rs b/crates/ra_ide/src/marks.rs index bcb67e373..7b8b727b4 100644 --- a/crates/ra_ide/src/marks.rs +++ b/crates/ra_ide/src/marks.rs | |||
@@ -11,4 +11,5 @@ test_utils::marks!( | |||
11 | call_info_bad_offset | 11 | call_info_bad_offset |
12 | dont_complete_current_use | 12 | dont_complete_current_use |
13 | test_resolve_parent_module_on_module_decl | 13 | test_resolve_parent_module_on_module_decl |
14 | search_filters_by_range | ||
14 | ); | 15 | ); |
diff --git a/crates/ra_ide/src/parent_module.rs b/crates/ra_ide/src/parent_module.rs index af14d6ab3..2c4bdb039 100644 --- a/crates/ra_ide/src/parent_module.rs +++ b/crates/ra_ide/src/parent_module.rs | |||
@@ -1,6 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use ra_db::{CrateId, FileId, FilePosition, SourceDatabase}; | 3 | use hir::Semantics; |
4 | use ra_db::{CrateId, FileId, FilePosition}; | ||
4 | use ra_ide_db::RootDatabase; | 5 | use ra_ide_db::RootDatabase; |
5 | use ra_syntax::{ | 6 | use ra_syntax::{ |
6 | algo::find_node_at_offset, | 7 | algo::find_node_at_offset, |
@@ -13,10 +14,10 @@ use crate::NavigationTarget; | |||
13 | /// This returns `Vec` because a module may be included from several places. We | 14 | /// This returns `Vec` because a module may be included from several places. We |
14 | /// don't handle this case yet though, so the Vec has length at most one. | 15 | /// don't handle this case yet though, so the Vec has length at most one. |
15 | pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> { | 16 | pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> { |
16 | let mut sb = hir::SourceBinder::new(db); | 17 | let sema = Semantics::new(db); |
17 | let parse = db.parse(position.file_id); | 18 | let source_file = sema.parse(position.file_id); |
18 | 19 | ||
19 | let mut module = find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset); | 20 | let mut module = find_node_at_offset::<ast::Module>(source_file.syntax(), position.offset); |
20 | 21 | ||
21 | // If cursor is literally on `mod foo`, go to the grandpa. | 22 | // If cursor is literally on `mod foo`, go to the grandpa. |
22 | if let Some(m) = &module { | 23 | if let Some(m) = &module { |
@@ -30,8 +31,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na | |||
30 | } | 31 | } |
31 | 32 | ||
32 | let module = match module { | 33 | let module = match module { |
33 | Some(module) => sb.to_def(hir::InFile::new(position.file_id.into(), module)), | 34 | Some(module) => sema.to_def(&module), |
34 | None => sb.to_module_def(position.file_id), | 35 | None => sema.to_module_def(position.file_id), |
35 | }; | 36 | }; |
36 | let module = match module { | 37 | let module = match module { |
37 | None => return Vec::new(), | 38 | None => return Vec::new(), |
@@ -43,8 +44,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na | |||
43 | 44 | ||
44 | /// Returns `Vec` for the same reason as `parent_module` | 45 | /// Returns `Vec` for the same reason as `parent_module` |
45 | pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> { | 46 | pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> { |
46 | let mut sb = hir::SourceBinder::new(db); | 47 | let sema = Semantics::new(db); |
47 | let module = match sb.to_module_def(file_id) { | 48 | let module = match sema.to_module_def(file_id) { |
48 | Some(it) => it, | 49 | Some(it) => it, |
49 | None => return Vec::new(), | 50 | None => return Vec::new(), |
50 | }; | 51 | }; |
diff --git a/crates/ra_ide/src/references.rs b/crates/ra_ide/src/references.rs index aadc2dbcb..baa8a4d29 100644 --- a/crates/ra_ide/src/references.rs +++ b/crates/ra_ide/src/references.rs | |||
@@ -13,25 +13,22 @@ mod classify; | |||
13 | mod rename; | 13 | mod rename; |
14 | mod search_scope; | 14 | mod search_scope; |
15 | 15 | ||
16 | use crate::expand::descend_into_macros_with_analyzer; | 16 | use hir::Semantics; |
17 | use hir::{InFile, SourceBinder}; | ||
18 | use once_cell::unsync::Lazy; | 17 | use once_cell::unsync::Lazy; |
19 | use ra_db::{SourceDatabase, SourceDatabaseExt}; | 18 | use ra_db::SourceDatabaseExt; |
20 | use ra_ide_db::RootDatabase; | 19 | use ra_ide_db::RootDatabase; |
21 | use ra_prof::profile; | 20 | use ra_prof::profile; |
22 | use ra_syntax::{ | 21 | use ra_syntax::{ |
23 | algo::find_node_at_offset, | 22 | algo::find_node_at_offset, |
24 | ast::{self, NameOwner}, | 23 | ast::{self, NameOwner}, |
25 | match_ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, TextUnit, TokenAtOffset, | 24 | match_ast, AstNode, SyntaxKind, SyntaxNode, TextRange, TextUnit, TokenAtOffset, |
26 | }; | 25 | }; |
26 | use test_utils::tested_by; | ||
27 | 27 | ||
28 | use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo}; | 28 | use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo}; |
29 | 29 | ||
30 | pub(crate) use self::{ | 30 | pub(crate) use self::{classify::classify_name_ref, rename::rename}; |
31 | classify::{classify_name, classify_name_ref}, | 31 | pub(crate) use ra_ide_db::defs::{classify_name, NameDefinition}; |
32 | rename::rename, | ||
33 | }; | ||
34 | pub(crate) use ra_ide_db::defs::NameDefinition; | ||
35 | 32 | ||
36 | pub use self::search_scope::SearchScope; | 33 | pub use self::search_scope::SearchScope; |
37 | 34 | ||
@@ -114,8 +111,8 @@ pub(crate) fn find_all_refs( | |||
114 | position: FilePosition, | 111 | position: FilePosition, |
115 | search_scope: Option<SearchScope>, | 112 | search_scope: Option<SearchScope>, |
116 | ) -> Option<RangeInfo<ReferenceSearchResult>> { | 113 | ) -> Option<RangeInfo<ReferenceSearchResult>> { |
117 | let parse = db.parse(position.file_id); | 114 | let sema = Semantics::new(db); |
118 | let syntax = parse.tree().syntax().clone(); | 115 | let syntax = sema.parse(position.file_id).syntax().clone(); |
119 | 116 | ||
120 | let (opt_name, search_kind) = | 117 | let (opt_name, search_kind) = |
121 | if let Some(name) = get_struct_def_name_for_struc_litetal_search(&syntax, position) { | 118 | if let Some(name) = get_struct_def_name_for_struc_litetal_search(&syntax, position) { |
@@ -124,7 +121,7 @@ pub(crate) fn find_all_refs( | |||
124 | (find_node_at_offset::<ast::Name>(&syntax, position.offset), ReferenceKind::Other) | 121 | (find_node_at_offset::<ast::Name>(&syntax, position.offset), ReferenceKind::Other) |
125 | }; | 122 | }; |
126 | 123 | ||
127 | let RangeInfo { range, info: (name, def) } = find_name(db, &syntax, position, opt_name)?; | 124 | let RangeInfo { range, info: (name, def) } = find_name(&sema, &syntax, position, opt_name)?; |
128 | let declaration = def.try_to_nav(db)?; | 125 | let declaration = def.try_to_nav(db)?; |
129 | 126 | ||
130 | let search_scope = { | 127 | let search_scope = { |
@@ -152,19 +149,18 @@ pub(crate) fn find_all_refs( | |||
152 | } | 149 | } |
153 | 150 | ||
154 | fn find_name( | 151 | fn find_name( |
155 | db: &RootDatabase, | 152 | sema: &Semantics<RootDatabase>, |
156 | syntax: &SyntaxNode, | 153 | syntax: &SyntaxNode, |
157 | position: FilePosition, | 154 | position: FilePosition, |
158 | opt_name: Option<ast::Name>, | 155 | opt_name: Option<ast::Name>, |
159 | ) -> Option<RangeInfo<(String, NameDefinition)>> { | 156 | ) -> Option<RangeInfo<(String, NameDefinition)>> { |
160 | let mut sb = SourceBinder::new(db); | ||
161 | if let Some(name) = opt_name { | 157 | if let Some(name) = opt_name { |
162 | let def = classify_name(&mut sb, InFile::new(position.file_id.into(), &name))?; | 158 | let def = classify_name(sema, &name)?; |
163 | let range = name.syntax().text_range(); | 159 | let range = name.syntax().text_range(); |
164 | return Some(RangeInfo::new(range, (name.text().to_string(), def))); | 160 | return Some(RangeInfo::new(range, (name.text().to_string(), def))); |
165 | } | 161 | } |
166 | let name_ref = find_node_at_offset::<ast::NameRef>(&syntax, position.offset)?; | 162 | let name_ref = find_node_at_offset::<ast::NameRef>(&syntax, position.offset)?; |
167 | let def = classify_name_ref(&mut sb, InFile::new(position.file_id.into(), &name_ref))?; | 163 | let def = classify_name_ref(sema, &name_ref)?; |
168 | let range = name_ref.syntax().text_range(); | 164 | let range = name_ref.syntax().text_range(); |
169 | Some(RangeInfo::new(range, (name_ref.text().to_string(), def))) | 165 | Some(RangeInfo::new(range, (name_ref.text().to_string(), def))) |
170 | } | 166 | } |
@@ -182,64 +178,53 @@ fn process_definition( | |||
182 | 178 | ||
183 | for (file_id, search_range) in scope { | 179 | for (file_id, search_range) in scope { |
184 | let text = db.file_text(file_id); | 180 | let text = db.file_text(file_id); |
181 | let search_range = | ||
182 | search_range.unwrap_or(TextRange::offset_len(0.into(), TextUnit::of_str(&text))); | ||
185 | 183 | ||
186 | let parse = Lazy::new(|| SourceFile::parse(&text)); | 184 | let sema = Semantics::new(db); |
187 | let mut sb = Lazy::new(|| SourceBinder::new(db)); | 185 | let tree = Lazy::new(|| sema.parse(file_id).syntax().clone()); |
188 | let mut analyzer = None; | ||
189 | 186 | ||
190 | for (idx, _) in text.match_indices(pat) { | 187 | for (idx, _) in text.match_indices(pat) { |
191 | let offset = TextUnit::from_usize(idx); | 188 | let offset = TextUnit::from_usize(idx); |
189 | if !search_range.contains_inclusive(offset) { | ||
190 | tested_by!(search_filters_by_range); | ||
191 | continue; | ||
192 | } | ||
192 | 193 | ||
193 | let (name_ref, range) = if let Some(name_ref) = | 194 | let name_ref = |
194 | find_node_at_offset::<ast::NameRef>(parse.tree().syntax(), offset) | 195 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&tree, offset) { |
195 | { | 196 | name_ref |
196 | let range = name_ref.syntax().text_range(); | ||
197 | (InFile::new(file_id.into(), name_ref), range) | ||
198 | } else { | ||
199 | // Handle macro token cases | ||
200 | let t = match parse.tree().syntax().token_at_offset(offset) { | ||
201 | TokenAtOffset::None => continue, | ||
202 | TokenAtOffset::Single(t) => t, | ||
203 | TokenAtOffset::Between(_, t) => t, | ||
204 | }; | ||
205 | let range = t.text_range(); | ||
206 | let analyzer = analyzer.get_or_insert_with(|| { | ||
207 | sb.analyze(InFile::new(file_id.into(), parse.tree().syntax()), None) | ||
208 | }); | ||
209 | let expanded = descend_into_macros_with_analyzer( | ||
210 | db, | ||
211 | &analyzer, | ||
212 | InFile::new(file_id.into(), t), | ||
213 | ); | ||
214 | if let Some(token) = ast::NameRef::cast(expanded.value.parent()) { | ||
215 | (expanded.with_value(token), range) | ||
216 | } else { | 197 | } else { |
217 | continue; | 198 | // Handle macro token cases |
218 | } | 199 | let token = match tree.token_at_offset(offset) { |
219 | }; | 200 | TokenAtOffset::None => continue, |
201 | TokenAtOffset::Single(t) => t, | ||
202 | TokenAtOffset::Between(_, t) => t, | ||
203 | }; | ||
204 | let expanded = sema.descend_into_macros(token); | ||
205 | match ast::NameRef::cast(expanded.parent()) { | ||
206 | Some(name_ref) => name_ref, | ||
207 | _ => continue, | ||
208 | } | ||
209 | }; | ||
220 | 210 | ||
221 | if let Some(search_range) = search_range { | ||
222 | if !range.is_subrange(&search_range) { | ||
223 | continue; | ||
224 | } | ||
225 | } | ||
226 | // FIXME: reuse sb | 211 | // FIXME: reuse sb |
227 | // See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098 | 212 | // See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098 |
228 | 213 | ||
229 | if let Some(d) = classify_name_ref(&mut sb, name_ref.as_ref()) { | 214 | if let Some(d) = classify_name_ref(&sema, &name_ref) { |
230 | if d == def { | 215 | if d == def { |
231 | let kind = if is_record_lit_name_ref(&name_ref.value) | 216 | let kind = |
232 | || is_call_expr_name_ref(&name_ref.value) | 217 | if is_record_lit_name_ref(&name_ref) || is_call_expr_name_ref(&name_ref) { |
233 | { | 218 | ReferenceKind::StructLiteral |
234 | ReferenceKind::StructLiteral | 219 | } else { |
235 | } else { | 220 | ReferenceKind::Other |
236 | ReferenceKind::Other | 221 | }; |
237 | }; | 222 | |
238 | 223 | let file_range = sema.original_range(name_ref.syntax()); | |
239 | refs.push(Reference { | 224 | refs.push(Reference { |
240 | file_range: FileRange { file_id, range }, | 225 | file_range, |
241 | kind, | 226 | kind, |
242 | access: reference_access(&d, &name_ref.value), | 227 | access: reference_access(&d, &name_ref), |
243 | }); | 228 | }); |
244 | } | 229 | } |
245 | } | 230 | } |
@@ -348,6 +333,8 @@ fn is_call_expr_name_ref(name_ref: &ast::NameRef) -> bool { | |||
348 | 333 | ||
349 | #[cfg(test)] | 334 | #[cfg(test)] |
350 | mod tests { | 335 | mod tests { |
336 | use test_utils::covers; | ||
337 | |||
351 | use crate::{ | 338 | use crate::{ |
352 | mock_analysis::{analysis_and_position, single_file_with_position, MockAnalysis}, | 339 | mock_analysis::{analysis_and_position, single_file_with_position, MockAnalysis}, |
353 | Declaration, Reference, ReferenceSearchResult, SearchScope, | 340 | Declaration, Reference, ReferenceSearchResult, SearchScope, |
@@ -456,6 +443,27 @@ mod tests { | |||
456 | } | 443 | } |
457 | 444 | ||
458 | #[test] | 445 | #[test] |
446 | fn search_filters_by_range() { | ||
447 | covers!(search_filters_by_range); | ||
448 | let code = r#" | ||
449 | fn foo() { | ||
450 | let spam<|> = 92; | ||
451 | spam + spam | ||
452 | } | ||
453 | fn bar() { | ||
454 | let spam = 92; | ||
455 | spam + spam | ||
456 | } | ||
457 | "#; | ||
458 | let refs = get_all_refs(code); | ||
459 | check_result( | ||
460 | refs, | ||
461 | "spam BIND_PAT FileId(1) [44; 48) Other Write", | ||
462 | &["FileId(1) [71; 75) Other Read", "FileId(1) [78; 82) Other Read"], | ||
463 | ); | ||
464 | } | ||
465 | |||
466 | #[test] | ||
459 | fn test_find_all_refs_for_param_inside() { | 467 | fn test_find_all_refs_for_param_inside() { |
460 | let code = r#" | 468 | let code = r#" |
461 | fn foo(i : u32) -> u32 { | 469 | fn foo(i : u32) -> u32 { |
diff --git a/crates/ra_ide/src/references/classify.rs b/crates/ra_ide/src/references/classify.rs index 478e18871..91b21429a 100644 --- a/crates/ra_ide/src/references/classify.rs +++ b/crates/ra_ide/src/references/classify.rs | |||
@@ -1,34 +1,32 @@ | |||
1 | //! Functions that are used to classify an element from its definition or reference. | 1 | //! Functions that are used to classify an element from its definition or reference. |
2 | 2 | ||
3 | use hir::{InFile, PathResolution, SourceBinder}; | 3 | use hir::{PathResolution, Semantics}; |
4 | use ra_ide_db::defs::NameDefinition; | ||
5 | use ra_ide_db::RootDatabase; | ||
4 | use ra_prof::profile; | 6 | use ra_prof::profile; |
5 | use ra_syntax::{ast, AstNode}; | 7 | use ra_syntax::{ast, AstNode}; |
6 | use test_utils::tested_by; | 8 | use test_utils::tested_by; |
7 | 9 | ||
8 | use super::NameDefinition; | 10 | pub use ra_ide_db::defs::{from_module_def, from_struct_field}; |
9 | use ra_ide_db::RootDatabase; | ||
10 | |||
11 | pub use ra_ide_db::defs::{classify_name, from_module_def, from_struct_field}; | ||
12 | 11 | ||
13 | pub(crate) fn classify_name_ref( | 12 | pub(crate) fn classify_name_ref( |
14 | sb: &mut SourceBinder<RootDatabase>, | 13 | sema: &Semantics<RootDatabase>, |
15 | name_ref: InFile<&ast::NameRef>, | 14 | name_ref: &ast::NameRef, |
16 | ) -> Option<NameDefinition> { | 15 | ) -> Option<NameDefinition> { |
17 | let _p = profile("classify_name_ref"); | 16 | let _p = profile("classify_name_ref"); |
18 | 17 | ||
19 | let parent = name_ref.value.syntax().parent()?; | 18 | let parent = name_ref.syntax().parent()?; |
20 | let analyzer = sb.analyze(name_ref.map(|it| it.syntax()), None); | ||
21 | 19 | ||
22 | if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { | 20 | if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { |
23 | tested_by!(goto_def_for_methods); | 21 | tested_by!(goto_def_for_methods); |
24 | if let Some(func) = analyzer.resolve_method_call(&method_call) { | 22 | if let Some(func) = sema.resolve_method_call(&method_call) { |
25 | return Some(from_module_def(func.into())); | 23 | return Some(from_module_def(func.into())); |
26 | } | 24 | } |
27 | } | 25 | } |
28 | 26 | ||
29 | if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { | 27 | if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { |
30 | tested_by!(goto_def_for_fields); | 28 | tested_by!(goto_def_for_fields); |
31 | if let Some(field) = analyzer.resolve_field(&field_expr) { | 29 | if let Some(field) = sema.resolve_field(&field_expr) { |
32 | return Some(from_struct_field(field)); | 30 | return Some(from_struct_field(field)); |
33 | } | 31 | } |
34 | } | 32 | } |
@@ -36,22 +34,20 @@ pub(crate) fn classify_name_ref( | |||
36 | if let Some(record_field) = ast::RecordField::cast(parent.clone()) { | 34 | if let Some(record_field) = ast::RecordField::cast(parent.clone()) { |
37 | tested_by!(goto_def_for_record_fields); | 35 | tested_by!(goto_def_for_record_fields); |
38 | tested_by!(goto_def_for_field_init_shorthand); | 36 | tested_by!(goto_def_for_field_init_shorthand); |
39 | if let Some(field_def) = analyzer.resolve_record_field(&record_field) { | 37 | if let Some(field_def) = sema.resolve_record_field(&record_field) { |
40 | return Some(from_struct_field(field_def)); | 38 | return Some(from_struct_field(field_def)); |
41 | } | 39 | } |
42 | } | 40 | } |
43 | 41 | ||
44 | if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { | 42 | if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { |
45 | tested_by!(goto_def_for_macros); | 43 | tested_by!(goto_def_for_macros); |
46 | if let Some(macro_def) = | 44 | if let Some(macro_def) = sema.resolve_macro_call(¯o_call) { |
47 | analyzer.resolve_macro_call(sb.db, name_ref.with_value(¯o_call)) | ||
48 | { | ||
49 | return Some(NameDefinition::Macro(macro_def)); | 45 | return Some(NameDefinition::Macro(macro_def)); |
50 | } | 46 | } |
51 | } | 47 | } |
52 | 48 | ||
53 | let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?; | 49 | let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?; |
54 | let resolved = analyzer.resolve_path(sb.db, &path)?; | 50 | let resolved = sema.resolve_path(&path)?; |
55 | let res = match resolved { | 51 | let res = match resolved { |
56 | PathResolution::Def(def) => from_module_def(def), | 52 | PathResolution::Def(def) => from_module_def(def), |
57 | PathResolution::AssocItem(item) => { | 53 | PathResolution::AssocItem(item) => { |
diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs index bdb90020b..5b4bcf434 100644 --- a/crates/ra_ide/src/references/rename.rs +++ b/crates/ra_ide/src/references/rename.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use hir::ModuleSource; | 3 | use hir::{ModuleSource, Semantics}; |
4 | use ra_db::{RelativePath, RelativePathBuf, SourceDatabase, SourceDatabaseExt}; | 4 | use ra_db::{RelativePath, RelativePathBuf, SourceDatabaseExt}; |
5 | use ra_ide_db::RootDatabase; | 5 | use ra_ide_db::RootDatabase; |
6 | use ra_syntax::{ | 6 | use ra_syntax::{ |
7 | algo::find_node_at_offset, ast, lex_single_valid_syntax_kind, AstNode, SyntaxKind, SyntaxNode, | 7 | algo::find_node_at_offset, ast, lex_single_valid_syntax_kind, AstNode, SyntaxKind, SyntaxNode, |
@@ -24,15 +24,16 @@ pub(crate) fn rename( | |||
24 | _ => return None, | 24 | _ => return None, |
25 | } | 25 | } |
26 | 26 | ||
27 | let parse = db.parse(position.file_id); | 27 | let sema = Semantics::new(db); |
28 | let source_file = sema.parse(position.file_id); | ||
28 | if let Some((ast_name, ast_module)) = | 29 | if let Some((ast_name, ast_module)) = |
29 | find_name_and_module_at_offset(parse.tree().syntax(), position) | 30 | find_name_and_module_at_offset(source_file.syntax(), position) |
30 | { | 31 | { |
31 | let range = ast_name.syntax().text_range(); | 32 | let range = ast_name.syntax().text_range(); |
32 | rename_mod(db, &ast_name, &ast_module, position, new_name) | 33 | rename_mod(&sema, &ast_name, &ast_module, position, new_name) |
33 | .map(|info| RangeInfo::new(range, info)) | 34 | .map(|info| RangeInfo::new(range, info)) |
34 | } else { | 35 | } else { |
35 | rename_reference(db, position, new_name) | 36 | rename_reference(sema.db, position, new_name) |
36 | } | 37 | } |
37 | } | 38 | } |
38 | 39 | ||
@@ -54,7 +55,7 @@ fn source_edit_from_file_id_range( | |||
54 | } | 55 | } |
55 | 56 | ||
56 | fn rename_mod( | 57 | fn rename_mod( |
57 | db: &RootDatabase, | 58 | sema: &Semantics<RootDatabase>, |
58 | ast_name: &ast::Name, | 59 | ast_name: &ast::Name, |
59 | ast_module: &ast::Module, | 60 | ast_module: &ast::Module, |
60 | position: FilePosition, | 61 | position: FilePosition, |
@@ -62,13 +63,12 @@ fn rename_mod( | |||
62 | ) -> Option<SourceChange> { | 63 | ) -> Option<SourceChange> { |
63 | let mut source_file_edits = Vec::new(); | 64 | let mut source_file_edits = Vec::new(); |
64 | let mut file_system_edits = Vec::new(); | 65 | let mut file_system_edits = Vec::new(); |
65 | let module_src = hir::InFile { file_id: position.file_id.into(), value: ast_module.clone() }; | 66 | if let Some(module) = sema.to_def(ast_module) { |
66 | if let Some(module) = hir::SourceBinder::new(db).to_def(module_src) { | 67 | let src = module.definition_source(sema.db); |
67 | let src = module.definition_source(db); | 68 | let file_id = src.file_id.original_file(sema.db); |
68 | let file_id = src.file_id.original_file(db); | ||
69 | match src.value { | 69 | match src.value { |
70 | ModuleSource::SourceFile(..) => { | 70 | ModuleSource::SourceFile(..) => { |
71 | let mod_path: RelativePathBuf = db.file_relative_path(file_id); | 71 | let mod_path: RelativePathBuf = sema.db.file_relative_path(file_id); |
72 | // mod is defined in path/to/dir/mod.rs | 72 | // mod is defined in path/to/dir/mod.rs |
73 | let dst_path = if mod_path.file_stem() == Some("mod") { | 73 | let dst_path = if mod_path.file_stem() == Some("mod") { |
74 | mod_path | 74 | mod_path |
@@ -82,7 +82,7 @@ fn rename_mod( | |||
82 | if let Some(path) = dst_path { | 82 | if let Some(path) = dst_path { |
83 | let move_file = FileSystemEdit::MoveFile { | 83 | let move_file = FileSystemEdit::MoveFile { |
84 | src: file_id, | 84 | src: file_id, |
85 | dst_source_root: db.file_source_root(position.file_id), | 85 | dst_source_root: sema.db.file_source_root(position.file_id), |
86 | dst_path: path, | 86 | dst_path: path, |
87 | }; | 87 | }; |
88 | file_system_edits.push(move_file); | 88 | file_system_edits.push(move_file); |
@@ -98,7 +98,7 @@ fn rename_mod( | |||
98 | }; | 98 | }; |
99 | source_file_edits.push(edit); | 99 | source_file_edits.push(edit); |
100 | 100 | ||
101 | if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(db, position, None) { | 101 | if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(sema.db, position, None) { |
102 | let ref_edits = refs.references.into_iter().map(|reference| { | 102 | let ref_edits = refs.references.into_iter().map(|reference| { |
103 | source_edit_from_file_id_range( | 103 | source_edit_from_file_id_range( |
104 | reference.file_range.file_id, | 104 | reference.file_range.file_id, |
diff --git a/crates/ra_ide/src/runnables.rs b/crates/ra_ide/src/runnables.rs index be2a67d0a..74877e90f 100644 --- a/crates/ra_ide/src/runnables.rs +++ b/crates/ra_ide/src/runnables.rs | |||
@@ -1,8 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use hir::{InFile, SourceBinder}; | 3 | use hir::Semantics; |
4 | use itertools::Itertools; | 4 | use itertools::Itertools; |
5 | use ra_db::SourceDatabase; | ||
6 | use ra_ide_db::RootDatabase; | 5 | use ra_ide_db::RootDatabase; |
7 | use ra_syntax::{ | 6 | use ra_syntax::{ |
8 | ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner}, | 7 | ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner}, |
@@ -42,46 +41,33 @@ pub enum RunnableKind { | |||
42 | } | 41 | } |
43 | 42 | ||
44 | pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { | 43 | pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { |
45 | let parse = db.parse(file_id); | 44 | let sema = Semantics::new(db); |
46 | let mut sb = SourceBinder::new(db); | 45 | let source_file = sema.parse(file_id); |
47 | parse.tree().syntax().descendants().filter_map(|i| runnable(db, &mut sb, file_id, i)).collect() | 46 | source_file.syntax().descendants().filter_map(|i| runnable(&sema, i)).collect() |
48 | } | 47 | } |
49 | 48 | ||
50 | fn runnable( | 49 | fn runnable(sema: &Semantics<RootDatabase>, item: SyntaxNode) -> Option<Runnable> { |
51 | db: &RootDatabase, | ||
52 | source_binder: &mut SourceBinder<RootDatabase>, | ||
53 | file_id: FileId, | ||
54 | item: SyntaxNode, | ||
55 | ) -> Option<Runnable> { | ||
56 | match_ast! { | 50 | match_ast! { |
57 | match item { | 51 | match item { |
58 | ast::FnDef(it) => { runnable_fn(db, source_binder, file_id, it) }, | 52 | ast::FnDef(it) => { runnable_fn(sema, it) }, |
59 | ast::Module(it) => { runnable_mod(db, source_binder, file_id, it) }, | 53 | ast::Module(it) => { runnable_mod(sema, it) }, |
60 | _ => { None }, | 54 | _ => None, |
61 | } | 55 | } |
62 | } | 56 | } |
63 | } | 57 | } |
64 | 58 | ||
65 | fn runnable_fn( | 59 | fn runnable_fn(sema: &Semantics<RootDatabase>, fn_def: ast::FnDef) -> Option<Runnable> { |
66 | db: &RootDatabase, | ||
67 | source_binder: &mut SourceBinder<RootDatabase>, | ||
68 | file_id: FileId, | ||
69 | fn_def: ast::FnDef, | ||
70 | ) -> Option<Runnable> { | ||
71 | let name_string = fn_def.name()?.text().to_string(); | 60 | let name_string = fn_def.name()?.text().to_string(); |
72 | 61 | ||
73 | let kind = if name_string == "main" { | 62 | let kind = if name_string == "main" { |
74 | RunnableKind::Bin | 63 | RunnableKind::Bin |
75 | } else { | 64 | } else { |
76 | let test_id = if let Some(module) = source_binder | 65 | let test_id = if let Some(module) = sema.to_def(&fn_def).map(|def| def.module(sema.db)) { |
77 | .to_def(InFile::new(file_id.into(), fn_def.clone())) | ||
78 | .map(|def| def.module(db)) | ||
79 | { | ||
80 | let path = module | 66 | let path = module |
81 | .path_to_root(db) | 67 | .path_to_root(sema.db) |
82 | .into_iter() | 68 | .into_iter() |
83 | .rev() | 69 | .rev() |
84 | .filter_map(|it| it.name(db)) | 70 | .filter_map(|it| it.name(sema.db)) |
85 | .map(|name| name.to_string()) | 71 | .map(|name| name.to_string()) |
86 | .chain(std::iter::once(name_string)) | 72 | .chain(std::iter::once(name_string)) |
87 | .join("::"); | 73 | .join("::"); |
@@ -115,12 +101,7 @@ fn has_test_related_attribute(fn_def: &ast::FnDef) -> bool { | |||
115 | .any(|attribute_text| attribute_text.contains("test")) | 101 | .any(|attribute_text| attribute_text.contains("test")) |
116 | } | 102 | } |
117 | 103 | ||
118 | fn runnable_mod( | 104 | fn runnable_mod(sema: &Semantics<RootDatabase>, module: ast::Module) -> Option<Runnable> { |
119 | db: &RootDatabase, | ||
120 | source_binder: &mut SourceBinder<RootDatabase>, | ||
121 | file_id: FileId, | ||
122 | module: ast::Module, | ||
123 | ) -> Option<Runnable> { | ||
124 | let has_test_function = module | 105 | let has_test_function = module |
125 | .item_list()? | 106 | .item_list()? |
126 | .items() | 107 | .items() |
@@ -133,9 +114,10 @@ fn runnable_mod( | |||
133 | return None; | 114 | return None; |
134 | } | 115 | } |
135 | let range = module.syntax().text_range(); | 116 | let range = module.syntax().text_range(); |
136 | let module = source_binder.to_def(InFile::new(file_id.into(), module))?; | 117 | let module = sema.to_def(&module)?; |
137 | 118 | ||
138 | let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::"); | 119 | let path = |
120 | module.path_to_root(sema.db).into_iter().rev().filter_map(|it| it.name(sema.db)).join("::"); | ||
139 | Some(Runnable { range, kind: RunnableKind::TestMod { path } }) | 121 | Some(Runnable { range, kind: RunnableKind::TestMod { path } }) |
140 | } | 122 | } |
141 | 123 | ||
diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/src/snapshots/rainbow_highlighting.html index 95f038f00..d6a7da953 100644 --- a/crates/ra_ide/src/snapshots/rainbow_highlighting.html +++ b/crates/ra_ide/src/snapshots/rainbow_highlighting.html | |||
@@ -25,14 +25,14 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
25 | .keyword\.control { color: #F0DFAF; font-weight: bold; } | 25 | .keyword\.control { color: #F0DFAF; font-weight: bold; } |
26 | </style> | 26 | </style> |
27 | <pre><code><span class="keyword">fn</span> <span class="function">main</span>() { | 27 | <pre><code><span class="keyword">fn</span> <span class="function">main</span>() { |
28 | <span class="keyword">let</span> <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span> = <span class="string">"hello"</span>; | 28 | <span class="keyword">let</span> <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string">"hello"</span>; |
29 | <span class="keyword">let</span> <span class="variable" data-binding-hash="4303609361109701698" style="color: hsl(242,75%,88%);">x</span> = <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span>.to_string(); | 29 | <span class="keyword">let</span> <span class="variable" data-binding-hash="2705725358298919760" style="color: hsl(17,51%,74%);">x</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string(); |
30 | <span class="keyword">let</span> <span class="variable" data-binding-hash="13865792086344377029" style="color: hsl(340,64%,86%);">y</span> = <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span>.to_string(); | 30 | <span class="keyword">let</span> <span class="variable" data-binding-hash="3365759661443752373" style="color: hsl(127,76%,66%);">y</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string(); |
31 | 31 | ||
32 | <span class="keyword">let</span> <span class="variable" data-binding-hash="7011301204224269512" style="color: hsl(198,45%,40%);">x</span> = <span class="string">"other color please!"</span>; | 32 | <span class="keyword">let</span> <span class="variable" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span> = <span class="string">"other color please!"</span>; |
33 | <span class="keyword">let</span> <span class="variable" data-binding-hash="12461245066629867975" style="color: hsl(132,91%,68%);">y</span> = <span class="variable" data-binding-hash="7011301204224269512" style="color: hsl(198,45%,40%);">x</span>.to_string(); | 33 | <span class="keyword">let</span> <span class="variable" data-binding-hash="6717528807933952652" style="color: hsl(85,49%,84%);">y</span> = <span class="variable" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span>.to_string(); |
34 | } | 34 | } |
35 | 35 | ||
36 | <span class="keyword">fn</span> <span class="function">bar</span>() { | 36 | <span class="keyword">fn</span> <span class="function">bar</span>() { |
37 | <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span> = <span class="string">"hello"</span>; | 37 | <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string">"hello"</span>; |
38 | }</code></pre> \ No newline at end of file | 38 | }</code></pre> \ No newline at end of file |
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs index 9bc3ad448..987476d2c 100644 --- a/crates/ra_ide/src/syntax_highlighting.rs +++ b/crates/ra_ide/src/syntax_highlighting.rs | |||
@@ -1,8 +1,11 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use hir::{HirFileId, InFile, Name, SourceAnalyzer, SourceBinder}; | 3 | use hir::{Name, Semantics}; |
4 | use ra_db::SourceDatabase; | 4 | use ra_db::SourceDatabase; |
5 | use ra_ide_db::{defs::NameDefinition, RootDatabase}; | 5 | use ra_ide_db::{ |
6 | defs::{classify_name, NameDefinition}, | ||
7 | RootDatabase, | ||
8 | }; | ||
6 | use ra_prof::profile; | 9 | use ra_prof::profile; |
7 | use ra_syntax::{ | 10 | use ra_syntax::{ |
8 | ast, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken, | 11 | ast, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken, |
@@ -10,11 +13,7 @@ use ra_syntax::{ | |||
10 | }; | 13 | }; |
11 | use rustc_hash::FxHashMap; | 14 | use rustc_hash::FxHashMap; |
12 | 15 | ||
13 | use crate::{ | 16 | use crate::{references::classify_name_ref, FileId}; |
14 | expand::descend_into_macros_with_analyzer, | ||
15 | references::{classify_name, classify_name_ref}, | ||
16 | FileId, | ||
17 | }; | ||
18 | 17 | ||
19 | pub mod tags { | 18 | pub mod tags { |
20 | pub const FIELD: &str = "field"; | 19 | pub const FIELD: &str = "field"; |
@@ -73,14 +72,11 @@ pub(crate) fn highlight( | |||
73 | range: Option<TextRange>, | 72 | range: Option<TextRange>, |
74 | ) -> Vec<HighlightedRange> { | 73 | ) -> Vec<HighlightedRange> { |
75 | let _p = profile("highlight"); | 74 | let _p = profile("highlight"); |
75 | let sema = Semantics::new(db); | ||
76 | let root = sema.parse(file_id).syntax().clone(); | ||
76 | 77 | ||
77 | let parse = db.parse(file_id); | ||
78 | let root = parse.tree().syntax().clone(); | ||
79 | |||
80 | let mut sb = SourceBinder::new(db); | ||
81 | let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default(); | 78 | let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default(); |
82 | let mut res = Vec::new(); | 79 | let mut res = Vec::new(); |
83 | let analyzer = sb.analyze(InFile::new(file_id.into(), &root), None); | ||
84 | 80 | ||
85 | let mut in_macro_call = None; | 81 | let mut in_macro_call = None; |
86 | 82 | ||
@@ -105,7 +101,7 @@ pub(crate) fn highlight( | |||
105 | match node.kind() { | 101 | match node.kind() { |
106 | MACRO_CALL => { | 102 | MACRO_CALL => { |
107 | in_macro_call = Some(node.clone()); | 103 | in_macro_call = Some(node.clone()); |
108 | if let Some(range) = highlight_macro(InFile::new(file_id.into(), node)) { | 104 | if let Some(range) = highlight_macro(node) { |
109 | res.push(HighlightedRange { | 105 | res.push(HighlightedRange { |
110 | range, | 106 | range, |
111 | tag: tags::MACRO, | 107 | tag: tags::MACRO, |
@@ -116,10 +112,9 @@ pub(crate) fn highlight( | |||
116 | _ if in_macro_call.is_some() => { | 112 | _ if in_macro_call.is_some() => { |
117 | if let Some(token) = node.as_token() { | 113 | if let Some(token) = node.as_token() { |
118 | if let Some((tag, binding_hash)) = highlight_token_tree( | 114 | if let Some((tag, binding_hash)) = highlight_token_tree( |
119 | &mut sb, | 115 | &sema, |
120 | &analyzer, | ||
121 | &mut bindings_shadow_count, | 116 | &mut bindings_shadow_count, |
122 | InFile::new(file_id.into(), token.clone()), | 117 | token.clone(), |
123 | ) { | 118 | ) { |
124 | res.push(HighlightedRange { | 119 | res.push(HighlightedRange { |
125 | range: node.text_range(), | 120 | range: node.text_range(), |
@@ -130,11 +125,9 @@ pub(crate) fn highlight( | |||
130 | } | 125 | } |
131 | } | 126 | } |
132 | _ => { | 127 | _ => { |
133 | if let Some((tag, binding_hash)) = highlight_node( | 128 | if let Some((tag, binding_hash)) = |
134 | &mut sb, | 129 | highlight_node(&sema, &mut bindings_shadow_count, node.clone()) |
135 | &mut bindings_shadow_count, | 130 | { |
136 | InFile::new(file_id.into(), node.clone()), | ||
137 | ) { | ||
138 | res.push(HighlightedRange { | 131 | res.push(HighlightedRange { |
139 | range: node.text_range(), | 132 | range: node.text_range(), |
140 | tag, | 133 | tag, |
@@ -161,8 +154,8 @@ pub(crate) fn highlight( | |||
161 | res | 154 | res |
162 | } | 155 | } |
163 | 156 | ||
164 | fn highlight_macro(node: InFile<SyntaxElement>) -> Option<TextRange> { | 157 | fn highlight_macro(node: SyntaxElement) -> Option<TextRange> { |
165 | let macro_call = ast::MacroCall::cast(node.value.as_node()?.clone())?; | 158 | let macro_call = ast::MacroCall::cast(node.as_node()?.clone())?; |
166 | let path = macro_call.path()?; | 159 | let path = macro_call.path()?; |
167 | let name_ref = path.segment()?.name_ref()?; | 160 | let name_ref = path.segment()?.name_ref()?; |
168 | 161 | ||
@@ -179,35 +172,34 @@ fn highlight_macro(node: InFile<SyntaxElement>) -> Option<TextRange> { | |||
179 | } | 172 | } |
180 | 173 | ||
181 | fn highlight_token_tree( | 174 | fn highlight_token_tree( |
182 | sb: &mut SourceBinder<RootDatabase>, | 175 | sema: &Semantics<RootDatabase>, |
183 | analyzer: &SourceAnalyzer, | ||
184 | bindings_shadow_count: &mut FxHashMap<Name, u32>, | 176 | bindings_shadow_count: &mut FxHashMap<Name, u32>, |
185 | token: InFile<SyntaxToken>, | 177 | token: SyntaxToken, |
186 | ) -> Option<(&'static str, Option<u64>)> { | 178 | ) -> Option<(&'static str, Option<u64>)> { |
187 | if token.value.parent().kind() != TOKEN_TREE { | 179 | if token.parent().kind() != TOKEN_TREE { |
188 | return None; | 180 | return None; |
189 | } | 181 | } |
190 | let token = descend_into_macros_with_analyzer(sb.db, analyzer, token); | 182 | let token = sema.descend_into_macros(token.clone()); |
191 | let expanded = { | 183 | let expanded = { |
192 | let parent = token.value.parent(); | 184 | let parent = token.parent(); |
193 | // We only care Name and Name_ref | 185 | // We only care Name and Name_ref |
194 | match (token.value.kind(), parent.kind()) { | 186 | match (token.kind(), parent.kind()) { |
195 | (IDENT, NAME) | (IDENT, NAME_REF) => token.with_value(parent.into()), | 187 | (IDENT, NAME) | (IDENT, NAME_REF) => parent.into(), |
196 | _ => token.map(|it| it.into()), | 188 | _ => token.into(), |
197 | } | 189 | } |
198 | }; | 190 | }; |
199 | 191 | ||
200 | highlight_node(sb, bindings_shadow_count, expanded) | 192 | highlight_node(sema, bindings_shadow_count, expanded) |
201 | } | 193 | } |
202 | 194 | ||
203 | fn highlight_node( | 195 | fn highlight_node( |
204 | sb: &mut SourceBinder<RootDatabase>, | 196 | sema: &Semantics<RootDatabase>, |
205 | bindings_shadow_count: &mut FxHashMap<Name, u32>, | 197 | bindings_shadow_count: &mut FxHashMap<Name, u32>, |
206 | node: InFile<SyntaxElement>, | 198 | node: SyntaxElement, |
207 | ) -> Option<(&'static str, Option<u64>)> { | 199 | ) -> Option<(&'static str, Option<u64>)> { |
208 | let db = sb.db; | 200 | let db = sema.db; |
209 | let mut binding_hash = None; | 201 | let mut binding_hash = None; |
210 | let tag = match node.value.kind() { | 202 | let tag = match node.kind() { |
211 | FN_DEF => { | 203 | FN_DEF => { |
212 | bindings_shadow_count.clear(); | 204 | bindings_shadow_count.clear(); |
213 | return None; | 205 | return None; |
@@ -216,19 +208,18 @@ fn highlight_node( | |||
216 | STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => tags::LITERAL_STRING, | 208 | STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => tags::LITERAL_STRING, |
217 | ATTR => tags::LITERAL_ATTRIBUTE, | 209 | ATTR => tags::LITERAL_ATTRIBUTE, |
218 | // Special-case field init shorthand | 210 | // Special-case field init shorthand |
219 | NAME_REF if node.value.parent().and_then(ast::RecordField::cast).is_some() => tags::FIELD, | 211 | NAME_REF if node.parent().and_then(ast::RecordField::cast).is_some() => tags::FIELD, |
220 | NAME_REF if node.value.ancestors().any(|it| it.kind() == ATTR) => return None, | 212 | NAME_REF if node.ancestors().any(|it| it.kind() == ATTR) => return None, |
221 | NAME_REF => { | 213 | NAME_REF => { |
222 | let name_ref = node.value.as_node().cloned().and_then(ast::NameRef::cast).unwrap(); | 214 | let name_ref = node.as_node().cloned().and_then(ast::NameRef::cast).unwrap(); |
223 | let name_kind = classify_name_ref(sb, node.with_value(&name_ref)); | 215 | let name_kind = classify_name_ref(sema, &name_ref); |
224 | match name_kind { | 216 | match name_kind { |
225 | Some(name_kind) => { | 217 | Some(name_kind) => { |
226 | if let NameDefinition::Local(local) = &name_kind { | 218 | if let NameDefinition::Local(local) = &name_kind { |
227 | if let Some(name) = local.name(db) { | 219 | if let Some(name) = local.name(db) { |
228 | let shadow_count = | 220 | let shadow_count = |
229 | bindings_shadow_count.entry(name.clone()).or_default(); | 221 | bindings_shadow_count.entry(name.clone()).or_default(); |
230 | binding_hash = | 222 | binding_hash = Some(calc_binding_hash(&name, *shadow_count)) |
231 | Some(calc_binding_hash(node.file_id, &name, *shadow_count)) | ||
232 | } | 223 | } |
233 | }; | 224 | }; |
234 | 225 | ||
@@ -238,14 +229,14 @@ fn highlight_node( | |||
238 | } | 229 | } |
239 | } | 230 | } |
240 | NAME => { | 231 | NAME => { |
241 | let name = node.value.as_node().cloned().and_then(ast::Name::cast).unwrap(); | 232 | let name = node.as_node().cloned().and_then(ast::Name::cast).unwrap(); |
242 | let name_kind = classify_name(sb, node.with_value(&name)); | 233 | let name_kind = classify_name(sema, &name); |
243 | 234 | ||
244 | if let Some(NameDefinition::Local(local)) = &name_kind { | 235 | if let Some(NameDefinition::Local(local)) = &name_kind { |
245 | if let Some(name) = local.name(db) { | 236 | if let Some(name) = local.name(db) { |
246 | let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); | 237 | let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); |
247 | *shadow_count += 1; | 238 | *shadow_count += 1; |
248 | binding_hash = Some(calc_binding_hash(node.file_id, &name, *shadow_count)) | 239 | binding_hash = Some(calc_binding_hash(&name, *shadow_count)) |
249 | } | 240 | } |
250 | }; | 241 | }; |
251 | 242 | ||
@@ -272,7 +263,7 @@ fn highlight_node( | |||
272 | 263 | ||
273 | return Some((tag, binding_hash)); | 264 | return Some((tag, binding_hash)); |
274 | 265 | ||
275 | fn calc_binding_hash(file_id: HirFileId, name: &Name, shadow_count: u32) -> u64 { | 266 | fn calc_binding_hash(name: &Name, shadow_count: u32) -> u64 { |
276 | fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 { | 267 | fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 { |
277 | use std::{collections::hash_map::DefaultHasher, hash::Hasher}; | 268 | use std::{collections::hash_map::DefaultHasher, hash::Hasher}; |
278 | 269 | ||
@@ -281,7 +272,7 @@ fn highlight_node( | |||
281 | hasher.finish() | 272 | hasher.finish() |
282 | } | 273 | } |
283 | 274 | ||
284 | hash((file_id, name, shadow_count)) | 275 | hash((name, shadow_count)) |
285 | } | 276 | } |
286 | } | 277 | } |
287 | 278 | ||