aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_ide
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_ide')
-rw-r--r--crates/ra_ide/Cargo.toml4
-rw-r--r--crates/ra_ide/src/call_hierarchy.rs42
-rw-r--r--crates/ra_ide/src/call_info.rs61
-rw-r--r--crates/ra_ide/src/completion.rs4
-rw-r--r--crates/ra_ide/src/completion/complete_dot.rs4
-rw-r--r--crates/ra_ide/src/completion/complete_macro_in_item_position.rs2
-rw-r--r--crates/ra_ide/src/completion/complete_path.rs4
-rw-r--r--crates/ra_ide/src/completion/complete_pattern.rs2
-rw-r--r--crates/ra_ide/src/completion/complete_postfix.rs2
-rw-r--r--crates/ra_ide/src/completion/complete_record_literal.rs5
-rw-r--r--crates/ra_ide/src/completion/complete_record_pattern.rs5
-rw-r--r--crates/ra_ide/src/completion/complete_scope.rs4
-rw-r--r--crates/ra_ide/src/completion/complete_trait_impl.rs33
-rw-r--r--crates/ra_ide/src/completion/completion_context.rs65
-rw-r--r--crates/ra_ide/src/diagnostics.rs9
-rw-r--r--crates/ra_ide/src/display/navigation_target.rs8
-rw-r--r--crates/ra_ide/src/expand.rs102
-rw-r--r--crates/ra_ide/src/expand_macro.rs31
-rw-r--r--crates/ra_ide/src/extend_selection.rs81
-rw-r--r--crates/ra_ide/src/goto_definition.rs42
-rw-r--r--crates/ra_ide/src/goto_type_definition.rs36
-rw-r--r--crates/ra_ide/src/hover.rs198
-rw-r--r--crates/ra_ide/src/impls.rs49
-rw-r--r--crates/ra_ide/src/inlay_hints.rs48
-rw-r--r--crates/ra_ide/src/lib.rs23
-rw-r--r--crates/ra_ide/src/marks.rs1
-rw-r--r--crates/ra_ide/src/mock_analysis.rs16
-rw-r--r--crates/ra_ide/src/parent_module.rs17
-rw-r--r--crates/ra_ide/src/references.rs130
-rw-r--r--crates/ra_ide/src/references/classify.rs30
-rw-r--r--crates/ra_ide/src/references/rename.rs28
-rw-r--r--crates/ra_ide/src/runnables.rs50
-rw-r--r--crates/ra_ide/src/snapshots/highlighting.html74
-rw-r--r--crates/ra_ide/src/snapshots/rainbow_highlighting.html36
-rw-r--r--crates/ra_ide/src/ssr.rs141
-rw-r--r--crates/ra_ide/src/syntax_highlighting.rs632
-rw-r--r--crates/ra_ide/src/syntax_highlighting/html.rs106
-rw-r--r--crates/ra_ide/src/syntax_highlighting/tags.rs175
-rw-r--r--crates/ra_ide/src/syntax_highlighting/tests.rs133
39 files changed, 1359 insertions, 1074 deletions
diff --git a/crates/ra_ide/Cargo.toml b/crates/ra_ide/Cargo.toml
index 3407d2598..7625fc8c8 100644
--- a/crates/ra_ide/Cargo.toml
+++ b/crates/ra_ide/Cargo.toml
@@ -17,11 +17,7 @@ indexmap = "1.3.2"
17itertools = "0.8.2" 17itertools = "0.8.2"
18join_to_string = "0.1.3" 18join_to_string = "0.1.3"
19log = "0.4.8" 19log = "0.4.8"
20rayon = "1.3.0"
21fst = { version = "0.3.5", default-features = false }
22rustc-hash = "1.1.0" 20rustc-hash = "1.1.0"
23unicase = "2.6.0"
24superslice = "1.0.0"
25rand = { version = "0.7.3", features = ["small_rng"] } 21rand = { version = "0.7.3", features = ["small_rng"] }
26once_cell = "1.3.1" 22once_cell = "1.3.1"
27 23
diff --git a/crates/ra_ide/src/call_hierarchy.rs b/crates/ra_ide/src/call_hierarchy.rs
index 51ac59a71..b00b6d431 100644
--- a/crates/ra_ide/src/call_hierarchy.rs
+++ b/crates/ra_ide/src/call_hierarchy.rs
@@ -2,13 +2,13 @@
2 2
3use indexmap::IndexMap; 3use indexmap::IndexMap;
4 4
5use hir::db::AstDatabase; 5use hir::Semantics;
6use ra_ide_db::RootDatabase; 6use ra_ide_db::RootDatabase;
7use ra_syntax::{ast, match_ast, AstNode, TextRange}; 7use ra_syntax::{ast, match_ast, AstNode, TextRange};
8 8
9use crate::{ 9use crate::{
10 call_info::FnCallNode, display::ToNav, expand::descend_into_macros, goto_definition, 10 call_info::FnCallNode, display::ToNav, goto_definition, references, FilePosition,
11 references, FilePosition, NavigationTarget, RangeInfo, 11 NavigationTarget, RangeInfo,
12}; 12};
13 13
14#[derive(Debug, Clone)] 14#[derive(Debug, Clone)]
@@ -38,30 +38,31 @@ pub(crate) fn call_hierarchy(
38} 38}
39 39
40pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> { 40pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> {
41 let sema = Semantics::new(db);
41 // 1. Find all refs 42 // 1. Find all refs
42 // 2. Loop through refs and determine unique fndef. This will become our `from: CallHierarchyItem,` in the reply. 43 // 2. Loop through refs and determine unique fndef. This will become our `from: CallHierarchyItem,` in the reply.
43 // 3. Add ranges relative to the start of the fndef. 44 // 3. Add ranges relative to the start of the fndef.
44 let refs = references::find_all_refs(db, position, None)?; 45 let refs = references::find_all_refs(db, position, None)?;
45 46
46 let mut calls = CallLocations::default(); 47 let mut calls = CallLocations::default();
47 let mut sb = hir::SourceBinder::new(db);
48 48
49 for reference in refs.info.references() { 49 for reference in refs.info.references() {
50 let file_id = reference.file_range.file_id; 50 let file_id = reference.file_range.file_id;
51 let file = db.parse_or_expand(file_id.into())?; 51 let file = sema.parse(file_id);
52 let file = file.syntax();
52 let token = file.token_at_offset(reference.file_range.range.start()).next()?; 53 let token = file.token_at_offset(reference.file_range.range.start()).next()?;
53 let token = descend_into_macros(db, file_id, token); 54 let token = sema.descend_into_macros(token);
54 let syntax = token.value.parent(); 55 let syntax = token.parent();
55 56
56 // This target is the containing function 57 // This target is the containing function
57 if let Some(nav) = syntax.ancestors().find_map(|node| { 58 if let Some(nav) = syntax.ancestors().find_map(|node| {
58 match_ast! { 59 match_ast! {
59 match node { 60 match node {
60 ast::FnDef(it) => { 61 ast::FnDef(it) => {
61 let def = sb.to_def(token.with_value(it))?; 62 let def = sema.to_def(&it)?;
62 Some(def.to_nav(sb.db)) 63 Some(def.to_nav(sema.db))
63 }, 64 },
64 _ => { None }, 65 _ => None,
65 } 66 }
66 } 67 }
67 }) { 68 }) {
@@ -74,11 +75,13 @@ pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Optio
74} 75}
75 76
76pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> { 77pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> {
78 let sema = Semantics::new(db);
77 let file_id = position.file_id; 79 let file_id = position.file_id;
78 let file = db.parse_or_expand(file_id.into())?; 80 let file = sema.parse(file_id);
81 let file = file.syntax();
79 let token = file.token_at_offset(position.offset).next()?; 82 let token = file.token_at_offset(position.offset).next()?;
80 let token = descend_into_macros(db, file_id, token); 83 let token = sema.descend_into_macros(token);
81 let syntax = token.value.parent(); 84 let syntax = token.parent();
82 85
83 let mut calls = CallLocations::default(); 86 let mut calls = CallLocations::default();
84 87
@@ -87,14 +90,11 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio
87 .filter_map(|node| FnCallNode::with_node_exact(&node)) 90 .filter_map(|node| FnCallNode::with_node_exact(&node))
88 .filter_map(|call_node| { 91 .filter_map(|call_node| {
89 let name_ref = call_node.name_ref()?; 92 let name_ref = call_node.name_ref()?;
90 let name_ref = token.with_value(name_ref.syntax());
91
92 let analyzer = hir::SourceAnalyzer::new(db, name_ref, None);
93 93
94 if let Some(func_target) = match &call_node { 94 if let Some(func_target) = match &call_node {
95 FnCallNode::CallExpr(expr) => { 95 FnCallNode::CallExpr(expr) => {
96 //FIXME: Type::as_callable is broken 96 //FIXME: Type::as_callable is broken
97 let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; 97 let callable_def = sema.type_of_expr(&expr.expr()?)?.as_callable()?;
98 match callable_def { 98 match callable_def {
99 hir::CallableDef::FunctionId(it) => { 99 hir::CallableDef::FunctionId(it) => {
100 let fn_def: hir::Function = it.into(); 100 let fn_def: hir::Function = it.into();
@@ -105,15 +105,15 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio
105 } 105 }
106 } 106 }
107 FnCallNode::MethodCallExpr(expr) => { 107 FnCallNode::MethodCallExpr(expr) => {
108 let function = analyzer.resolve_method_call(&expr)?; 108 let function = sema.resolve_method_call(&expr)?;
109 Some(function.to_nav(db)) 109 Some(function.to_nav(db))
110 } 110 }
111 FnCallNode::MacroCallExpr(expr) => { 111 FnCallNode::MacroCallExpr(macro_call) => {
112 let macro_def = analyzer.resolve_macro_call(db, name_ref.with_value(&expr))?; 112 let macro_def = sema.resolve_macro_call(&macro_call)?;
113 Some(macro_def.to_nav(db)) 113 Some(macro_def.to_nav(db))
114 } 114 }
115 } { 115 } {
116 Some((func_target, name_ref.value.text_range())) 116 Some((func_target, name_ref.syntax().text_range()))
117 } else { 117 } else {
118 None 118 None
119 } 119 }
diff --git a/crates/ra_ide/src/call_info.rs b/crates/ra_ide/src/call_info.rs
index 7c6322cb4..2b35a3803 100644
--- a/crates/ra_ide/src/call_info.rs
+++ b/crates/ra_ide/src/call_info.rs
@@ -1,48 +1,55 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2use hir::db::AstDatabase; 2use hir::Semantics;
3use ra_ide_db::RootDatabase; 3use ra_ide_db::RootDatabase;
4use ra_syntax::{ 4use ra_syntax::{
5 ast::{self, ArgListOwner}, 5 ast::{self, ArgListOwner},
6 match_ast, AstNode, SyntaxNode, 6 match_ast, AstNode, SyntaxNode, SyntaxToken,
7}; 7};
8use test_utils::tested_by; 8use test_utils::tested_by;
9 9
10use crate::{expand::descend_into_macros, CallInfo, FilePosition, FunctionSignature}; 10use crate::{CallInfo, FilePosition, FunctionSignature};
11 11
12/// Computes parameter information for the given call expression. 12/// Computes parameter information for the given call expression.
13pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { 13pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> {
14 let file = db.parse_or_expand(position.file_id.into())?; 14 let sema = Semantics::new(db);
15 let file = sema.parse(position.file_id);
16 let file = file.syntax();
15 let token = file.token_at_offset(position.offset).next()?; 17 let token = file.token_at_offset(position.offset).next()?;
16 let token = descend_into_macros(db, position.file_id, token); 18 let token = sema.descend_into_macros(token);
19 call_info_for_token(&sema, token)
20}
17 21
22pub(crate) fn call_info_for_token(
23 sema: &Semantics<RootDatabase>,
24 token: SyntaxToken,
25) -> Option<CallInfo> {
18 // Find the calling expression and it's NameRef 26 // Find the calling expression and it's NameRef
19 let calling_node = FnCallNode::with_node(&token.value.parent())?; 27 let calling_node = FnCallNode::with_node(&token.parent())?;
20 let name_ref = calling_node.name_ref()?;
21 let name_ref = token.with_value(name_ref.syntax());
22 28
23 let analyzer = hir::SourceAnalyzer::new(db, name_ref, None);
24 let (mut call_info, has_self) = match &calling_node { 29 let (mut call_info, has_self) = match &calling_node {
25 FnCallNode::CallExpr(expr) => { 30 FnCallNode::CallExpr(call) => {
26 //FIXME: Type::as_callable is broken 31 //FIXME: Type::as_callable is broken
27 let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; 32 let callable_def = sema.type_of_expr(&call.expr()?)?.as_callable()?;
28 match callable_def { 33 match callable_def {
29 hir::CallableDef::FunctionId(it) => { 34 hir::CallableDef::FunctionId(it) => {
30 let fn_def = it.into(); 35 let fn_def = it.into();
31 (CallInfo::with_fn(db, fn_def), fn_def.has_self_param(db)) 36 (CallInfo::with_fn(sema.db, fn_def), fn_def.has_self_param(sema.db))
37 }
38 hir::CallableDef::StructId(it) => {
39 (CallInfo::with_struct(sema.db, it.into())?, false)
32 } 40 }
33 hir::CallableDef::StructId(it) => (CallInfo::with_struct(db, it.into())?, false),
34 hir::CallableDef::EnumVariantId(it) => { 41 hir::CallableDef::EnumVariantId(it) => {
35 (CallInfo::with_enum_variant(db, it.into())?, false) 42 (CallInfo::with_enum_variant(sema.db, it.into())?, false)
36 } 43 }
37 } 44 }
38 } 45 }
39 FnCallNode::MethodCallExpr(expr) => { 46 FnCallNode::MethodCallExpr(method_call) => {
40 let function = analyzer.resolve_method_call(&expr)?; 47 let function = sema.resolve_method_call(&method_call)?;
41 (CallInfo::with_fn(db, function), function.has_self_param(db)) 48 (CallInfo::with_fn(sema.db, function), function.has_self_param(sema.db))
42 } 49 }
43 FnCallNode::MacroCallExpr(expr) => { 50 FnCallNode::MacroCallExpr(macro_call) => {
44 let macro_def = analyzer.resolve_macro_call(db, name_ref.with_value(&expr))?; 51 let macro_def = sema.resolve_macro_call(&macro_call)?;
45 (CallInfo::with_macro(db, macro_def)?, false) 52 (CallInfo::with_macro(sema.db, macro_def)?, false)
46 } 53 }
47 }; 54 };
48 55
@@ -62,7 +69,7 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
62 let num_args_at_callsite = arg_list.args().count(); 69 let num_args_at_callsite = arg_list.args().count();
63 70
64 let arg_list_range = arg_list.syntax().text_range(); 71 let arg_list_range = arg_list.syntax().text_range();
65 if !arg_list_range.contains_inclusive(position.offset) { 72 if !arg_list_range.contains_inclusive(token.text_range().start()) {
66 tested_by!(call_info_bad_offset); 73 tested_by!(call_info_bad_offset);
67 return None; 74 return None;
68 } 75 }
@@ -71,7 +78,9 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
71 num_args_at_callsite, 78 num_args_at_callsite,
72 arg_list 79 arg_list
73 .args() 80 .args()
74 .take_while(|arg| arg.syntax().text_range().end() < position.offset) 81 .take_while(|arg| {
82 arg.syntax().text_range().end() < token.text_range().start()
83 })
75 .count(), 84 .count(),
76 ); 85 );
77 86
@@ -101,7 +110,13 @@ impl FnCallNode {
101 match_ast! { 110 match_ast! {
102 match node { 111 match node {
103 ast::CallExpr(it) => { Some(FnCallNode::CallExpr(it)) }, 112 ast::CallExpr(it) => { Some(FnCallNode::CallExpr(it)) },
104 ast::MethodCallExpr(it) => { Some(FnCallNode::MethodCallExpr(it)) }, 113 ast::MethodCallExpr(it) => {
114 let arg_list = it.arg_list()?;
115 if !syntax.text_range().is_subrange(&arg_list.syntax().text_range()) {
116 return None;
117 }
118 Some(FnCallNode::MethodCallExpr(it))
119 },
105 ast::MacroCall(it) => { Some(FnCallNode::MacroCallExpr(it)) }, 120 ast::MacroCall(it) => { Some(FnCallNode::MacroCallExpr(it)) },
106 _ => { None }, 121 _ => { None },
107 } 122 }
diff --git a/crates/ra_ide/src/completion.rs b/crates/ra_ide/src/completion.rs
index 4bdc6ba23..c378c2c62 100644
--- a/crates/ra_ide/src/completion.rs
+++ b/crates/ra_ide/src/completion.rs
@@ -17,7 +17,6 @@ mod complete_postfix;
17mod complete_macro_in_item_position; 17mod complete_macro_in_item_position;
18mod complete_trait_impl; 18mod complete_trait_impl;
19 19
20use ra_db::SourceDatabase;
21use ra_ide_db::RootDatabase; 20use ra_ide_db::RootDatabase;
22 21
23#[cfg(test)] 22#[cfg(test)]
@@ -57,8 +56,7 @@ pub use crate::completion::completion_item::{
57/// identifier prefix/fuzzy match should be done higher in the stack, together 56/// identifier prefix/fuzzy match should be done higher in the stack, together
58/// with ordering of completions (currently this is done by the client). 57/// with ordering of completions (currently this is done by the client).
59pub(crate) fn completions(db: &RootDatabase, position: FilePosition) -> Option<Completions> { 58pub(crate) fn completions(db: &RootDatabase, position: FilePosition) -> Option<Completions> {
60 let original_parse = db.parse(position.file_id); 59 let ctx = CompletionContext::new(db, position)?;
61 let ctx = CompletionContext::new(db, &original_parse, position)?;
62 60
63 let mut acc = Completions::default(); 61 let mut acc = Completions::default();
64 62
diff --git a/crates/ra_ide/src/completion/complete_dot.rs b/crates/ra_ide/src/completion/complete_dot.rs
index 2ca78c927..a6e0158b2 100644
--- a/crates/ra_ide/src/completion/complete_dot.rs
+++ b/crates/ra_ide/src/completion/complete_dot.rs
@@ -16,7 +16,7 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
16 _ => return, 16 _ => return,
17 }; 17 };
18 18
19 let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) { 19 let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) {
20 Some(ty) => ty, 20 Some(ty) => ty,
21 _ => return, 21 _ => return,
22 }; 22 };
@@ -55,7 +55,7 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Ty
55fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) { 55fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) {
56 if let Some(krate) = ctx.module.map(|it| it.krate()) { 56 if let Some(krate) = ctx.module.map(|it| it.krate()) {
57 let mut seen_methods = FxHashSet::default(); 57 let mut seen_methods = FxHashSet::default();
58 let traits_in_scope = ctx.analyzer.traits_in_scope(ctx.db); 58 let traits_in_scope = ctx.scope().traits_in_scope();
59 receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| { 59 receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| {
60 if func.has_self_param(ctx.db) && seen_methods.insert(func.name(ctx.db)) { 60 if func.has_self_param(ctx.db) && seen_methods.insert(func.name(ctx.db)) {
61 acc.add_function(ctx, func); 61 acc.add_function(ctx, func);
diff --git a/crates/ra_ide/src/completion/complete_macro_in_item_position.rs b/crates/ra_ide/src/completion/complete_macro_in_item_position.rs
index faadd1e3f..1866d9e6c 100644
--- a/crates/ra_ide/src/completion/complete_macro_in_item_position.rs
+++ b/crates/ra_ide/src/completion/complete_macro_in_item_position.rs
@@ -5,7 +5,7 @@ use crate::completion::{CompletionContext, Completions};
5pub(super) fn complete_macro_in_item_position(acc: &mut Completions, ctx: &CompletionContext) { 5pub(super) fn complete_macro_in_item_position(acc: &mut Completions, ctx: &CompletionContext) {
6 // Show only macros in top level. 6 // Show only macros in top level.
7 if ctx.is_new_item { 7 if ctx.is_new_item {
8 ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { 8 ctx.scope().process_all_names(&mut |name, res| {
9 if let hir::ScopeDef::MacroDef(mac) = res { 9 if let hir::ScopeDef::MacroDef(mac) = res {
10 acc.add_macro(ctx, Some(name.to_string()), mac); 10 acc.add_macro(ctx, Some(name.to_string()), mac);
11 } 11 }
diff --git a/crates/ra_ide/src/completion/complete_path.rs b/crates/ra_ide/src/completion/complete_path.rs
index 2d7f09a6c..c626e90cc 100644
--- a/crates/ra_ide/src/completion/complete_path.rs
+++ b/crates/ra_ide/src/completion/complete_path.rs
@@ -11,7 +11,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
11 Some(path) => path.clone(), 11 Some(path) => path.clone(),
12 _ => return, 12 _ => return,
13 }; 13 };
14 let def = match ctx.analyzer.resolve_hir_path(ctx.db, &path) { 14 let def = match ctx.scope().resolve_hir_path(&path) {
15 Some(PathResolution::Def(def)) => def, 15 Some(PathResolution::Def(def)) => def,
16 _ => return, 16 _ => return,
17 }; 17 };
@@ -49,7 +49,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
49 // FIXME: complete T::AssocType 49 // FIXME: complete T::AssocType
50 let krate = ctx.module.map(|m| m.krate()); 50 let krate = ctx.module.map(|m| m.krate());
51 if let Some(krate) = krate { 51 if let Some(krate) = krate {
52 let traits_in_scope = ctx.analyzer.traits_in_scope(ctx.db); 52 let traits_in_scope = ctx.scope().traits_in_scope();
53 ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { 53 ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| {
54 match item { 54 match item {
55 hir::AssocItem::Function(func) => { 55 hir::AssocItem::Function(func) => {
diff --git a/crates/ra_ide/src/completion/complete_pattern.rs b/crates/ra_ide/src/completion/complete_pattern.rs
index fd03b1c40..c2c6ca002 100644
--- a/crates/ra_ide/src/completion/complete_pattern.rs
+++ b/crates/ra_ide/src/completion/complete_pattern.rs
@@ -9,7 +9,7 @@ pub(super) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) {
9 } 9 }
10 // FIXME: ideally, we should look at the type we are matching against and 10 // FIXME: ideally, we should look at the type we are matching against and
11 // suggest variants + auto-imports 11 // suggest variants + auto-imports
12 ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { 12 ctx.scope().process_all_names(&mut |name, res| {
13 let def = match &res { 13 let def = match &res {
14 hir::ScopeDef::ModuleDef(def) => def, 14 hir::ScopeDef::ModuleDef(def) => def,
15 _ => return, 15 _ => return,
diff --git a/crates/ra_ide/src/completion/complete_postfix.rs b/crates/ra_ide/src/completion/complete_postfix.rs
index 5470dc291..8a74f993a 100644
--- a/crates/ra_ide/src/completion/complete_postfix.rs
+++ b/crates/ra_ide/src/completion/complete_postfix.rs
@@ -29,7 +29,7 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
29 dot_receiver.syntax().text().to_string() 29 dot_receiver.syntax().text().to_string()
30 }; 30 };
31 31
32 let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) { 32 let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) {
33 Some(it) => it, 33 Some(it) => it,
34 None => return, 34 None => return,
35 }; 35 };
diff --git a/crates/ra_ide/src/completion/complete_record_literal.rs b/crates/ra_ide/src/completion/complete_record_literal.rs
index 577c394d2..f98353d76 100644
--- a/crates/ra_ide/src/completion/complete_record_literal.rs
+++ b/crates/ra_ide/src/completion/complete_record_literal.rs
@@ -5,10 +5,7 @@ use crate::completion::{CompletionContext, Completions};
5/// Complete fields in fields literals. 5/// Complete fields in fields literals.
6pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionContext) { 6pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionContext) {
7 let (ty, variant) = match ctx.record_lit_syntax.as_ref().and_then(|it| { 7 let (ty, variant) = match ctx.record_lit_syntax.as_ref().and_then(|it| {
8 Some(( 8 Some((ctx.sema.type_of_expr(&it.clone().into())?, ctx.sema.resolve_record_literal(it)?))
9 ctx.analyzer.type_of(ctx.db, &it.clone().into())?,
10 ctx.analyzer.resolve_record_literal(it)?,
11 ))
12 }) { 9 }) {
13 Some(it) => it, 10 Some(it) => it,
14 _ => return, 11 _ => return,
diff --git a/crates/ra_ide/src/completion/complete_record_pattern.rs b/crates/ra_ide/src/completion/complete_record_pattern.rs
index a56c7e3a1..9bdeae49f 100644
--- a/crates/ra_ide/src/completion/complete_record_pattern.rs
+++ b/crates/ra_ide/src/completion/complete_record_pattern.rs
@@ -4,10 +4,7 @@ use crate::completion::{CompletionContext, Completions};
4 4
5pub(super) fn complete_record_pattern(acc: &mut Completions, ctx: &CompletionContext) { 5pub(super) fn complete_record_pattern(acc: &mut Completions, ctx: &CompletionContext) {
6 let (ty, variant) = match ctx.record_lit_pat.as_ref().and_then(|it| { 6 let (ty, variant) = match ctx.record_lit_pat.as_ref().and_then(|it| {
7 Some(( 7 Some((ctx.sema.type_of_pat(&it.clone().into())?, ctx.sema.resolve_record_pattern(it)?))
8 ctx.analyzer.type_of_pat(ctx.db, &it.clone().into())?,
9 ctx.analyzer.resolve_record_pattern(it)?,
10 ))
11 }) { 8 }) {
12 Some(it) => it, 9 Some(it) => it,
13 _ => return, 10 _ => return,
diff --git a/crates/ra_ide/src/completion/complete_scope.rs b/crates/ra_ide/src/completion/complete_scope.rs
index e2ee86dd1..aad016d4a 100644
--- a/crates/ra_ide/src/completion/complete_scope.rs
+++ b/crates/ra_ide/src/completion/complete_scope.rs
@@ -7,9 +7,7 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) {
7 return; 7 return;
8 } 8 }
9 9
10 ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { 10 ctx.scope().process_all_names(&mut |name, res| acc.add_resolution(ctx, name.to_string(), &res));
11 acc.add_resolution(ctx, name.to_string(), &res)
12 });
13} 11}
14 12
15#[cfg(test)] 13#[cfg(test)]
diff --git a/crates/ra_ide/src/completion/complete_trait_impl.rs b/crates/ra_ide/src/completion/complete_trait_impl.rs
index 83628e35c..9a27c164b 100644
--- a/crates/ra_ide/src/completion/complete_trait_impl.rs
+++ b/crates/ra_ide/src/completion/complete_trait_impl.rs
@@ -64,11 +64,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
64 if let (Some(trigger), Some(impl_block)) = (trigger, impl_block) { 64 if let (Some(trigger), Some(impl_block)) = (trigger, impl_block) {
65 match trigger.kind() { 65 match trigger.kind() {
66 SyntaxKind::FN_DEF => { 66 SyntaxKind::FN_DEF => {
67 for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block) 67 for missing_fn in
68 .iter() 68 get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| {
69 .filter_map(|item| match item { 69 match item {
70 hir::AssocItem::Function(fn_item) => Some(fn_item), 70 hir::AssocItem::Function(fn_item) => Some(fn_item),
71 _ => None, 71 _ => None,
72 }
72 }) 73 })
73 { 74 {
74 add_function_impl(&trigger, acc, ctx, &missing_fn); 75 add_function_impl(&trigger, acc, ctx, &missing_fn);
@@ -76,11 +77,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
76 } 77 }
77 78
78 SyntaxKind::TYPE_ALIAS_DEF => { 79 SyntaxKind::TYPE_ALIAS_DEF => {
79 for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block) 80 for missing_fn in
80 .iter() 81 get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| {
81 .filter_map(|item| match item { 82 match item {
82 hir::AssocItem::TypeAlias(type_item) => Some(type_item), 83 hir::AssocItem::TypeAlias(type_item) => Some(type_item),
83 _ => None, 84 _ => None,
85 }
84 }) 86 })
85 { 87 {
86 add_type_alias_impl(&trigger, acc, ctx, &missing_fn); 88 add_type_alias_impl(&trigger, acc, ctx, &missing_fn);
@@ -88,11 +90,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
88 } 90 }
89 91
90 SyntaxKind::CONST_DEF => { 92 SyntaxKind::CONST_DEF => {
91 for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block) 93 for missing_fn in
92 .iter() 94 get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| {
93 .filter_map(|item| match item { 95 match item {
94 hir::AssocItem::Const(const_item) => Some(const_item), 96 hir::AssocItem::Const(const_item) => Some(const_item),
95 _ => None, 97 _ => None,
98 }
96 }) 99 })
97 { 100 {
98 add_const_impl(&trigger, acc, ctx, &missing_fn); 101 add_const_impl(&trigger, acc, ctx, &missing_fn);
diff --git a/crates/ra_ide/src/completion/completion_context.rs b/crates/ra_ide/src/completion/completion_context.rs
index 8678a3234..81321a897 100644
--- a/crates/ra_ide/src/completion/completion_context.rs
+++ b/crates/ra_ide/src/completion/completion_context.rs
@@ -1,9 +1,11 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::{Semantics, SemanticsScope};
4use ra_db::SourceDatabase;
3use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
4use ra_syntax::{ 6use ra_syntax::{
5 algo::{find_covering_element, find_node_at_offset}, 7 algo::{find_covering_element, find_node_at_offset},
6 ast, AstNode, Parse, SourceFile, 8 ast, AstNode, SourceFile,
7 SyntaxKind::*, 9 SyntaxKind::*,
8 SyntaxNode, SyntaxToken, TextRange, TextUnit, 10 SyntaxNode, SyntaxToken, TextRange, TextUnit,
9}; 11};
@@ -15,8 +17,8 @@ use crate::FilePosition;
15/// exactly is the cursor, syntax-wise. 17/// exactly is the cursor, syntax-wise.
16#[derive(Debug)] 18#[derive(Debug)]
17pub(crate) struct CompletionContext<'a> { 19pub(crate) struct CompletionContext<'a> {
20 pub(super) sema: Semantics<'a, RootDatabase>,
18 pub(super) db: &'a RootDatabase, 21 pub(super) db: &'a RootDatabase,
19 pub(super) analyzer: hir::SourceAnalyzer,
20 pub(super) offset: TextUnit, 22 pub(super) offset: TextUnit,
21 pub(super) token: SyntaxToken, 23 pub(super) token: SyntaxToken,
22 pub(super) module: Option<hir::Module>, 24 pub(super) module: Option<hir::Module>,
@@ -51,20 +53,26 @@ pub(crate) struct CompletionContext<'a> {
51impl<'a> CompletionContext<'a> { 53impl<'a> CompletionContext<'a> {
52 pub(super) fn new( 54 pub(super) fn new(
53 db: &'a RootDatabase, 55 db: &'a RootDatabase,
54 original_parse: &'a Parse<ast::SourceFile>,
55 position: FilePosition, 56 position: FilePosition,
56 ) -> Option<CompletionContext<'a>> { 57 ) -> Option<CompletionContext<'a>> {
57 let mut sb = hir::SourceBinder::new(db); 58 let sema = Semantics::new(db);
58 let module = sb.to_module_def(position.file_id); 59
59 let token = 60 let original_file = sema.parse(position.file_id);
60 original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?; 61
61 let analyzer = sb.analyze( 62 // Insert a fake ident to get a valid parse tree. We will use this file
62 hir::InFile::new(position.file_id.into(), &token.parent()), 63 // to determine context, though the original_file will be used for
63 Some(position.offset), 64 // actual completion.
64 ); 65 let file_with_fake_ident = {
66 let parse = db.parse(position.file_id);
67 let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string());
68 parse.reparse(&edit).tree()
69 };
70
71 let module = sema.to_module_def(position.file_id);
72 let token = original_file.syntax().token_at_offset(position.offset).left_biased()?;
65 let mut ctx = CompletionContext { 73 let mut ctx = CompletionContext {
74 sema,
66 db, 75 db,
67 analyzer,
68 token, 76 token,
69 offset: position.offset, 77 offset: position.offset,
70 module, 78 module,
@@ -87,7 +95,7 @@ impl<'a> CompletionContext<'a> {
87 has_type_args: false, 95 has_type_args: false,
88 dot_receiver_is_ambiguous_float_literal: false, 96 dot_receiver_is_ambiguous_float_literal: false,
89 }; 97 };
90 ctx.fill(&original_parse, position.offset); 98 ctx.fill(&original_file, file_with_fake_ident, position.offset);
91 Some(ctx) 99 Some(ctx)
92 } 100 }
93 101
@@ -100,29 +108,33 @@ impl<'a> CompletionContext<'a> {
100 } 108 }
101 } 109 }
102 110
103 fn fill(&mut self, original_parse: &'a Parse<ast::SourceFile>, offset: TextUnit) { 111 pub(crate) fn scope(&self) -> SemanticsScope<'_, RootDatabase> {
104 // Insert a fake ident to get a valid parse tree. We will use this file 112 self.sema.scope_at_offset(&self.token.parent(), self.offset)
105 // to determine context, though the original_file will be used for 113 }
106 // actual completion.
107 let file = {
108 let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string());
109 original_parse.reparse(&edit).tree()
110 };
111 114
115 fn fill(
116 &mut self,
117 original_file: &ast::SourceFile,
118 file_with_fake_ident: ast::SourceFile,
119 offset: TextUnit,
120 ) {
112 // First, let's try to complete a reference to some declaration. 121 // First, let's try to complete a reference to some declaration.
113 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), offset) { 122 if let Some(name_ref) =
123 find_node_at_offset::<ast::NameRef>(file_with_fake_ident.syntax(), offset)
124 {
114 // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. 125 // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`.
115 // See RFC#1685. 126 // See RFC#1685.
116 if is_node::<ast::Param>(name_ref.syntax()) { 127 if is_node::<ast::Param>(name_ref.syntax()) {
117 self.is_param = true; 128 self.is_param = true;
118 return; 129 return;
119 } 130 }
120 self.classify_name_ref(original_parse.tree(), name_ref); 131 self.classify_name_ref(original_file, name_ref);
121 } 132 }
122 133
123 // Otherwise, see if this is a declaration. We can use heuristics to 134 // Otherwise, see if this is a declaration. We can use heuristics to
124 // suggest declaration names, see `CompletionKind::Magic`. 135 // suggest declaration names, see `CompletionKind::Magic`.
125 if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) { 136 if let Some(name) = find_node_at_offset::<ast::Name>(file_with_fake_ident.syntax(), offset)
137 {
126 if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) { 138 if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) {
127 let parent = bind_pat.syntax().parent(); 139 let parent = bind_pat.syntax().parent();
128 if parent.clone().and_then(ast::MatchArm::cast).is_some() 140 if parent.clone().and_then(ast::MatchArm::cast).is_some()
@@ -136,13 +148,12 @@ impl<'a> CompletionContext<'a> {
136 return; 148 return;
137 } 149 }
138 if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() { 150 if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() {
139 self.record_lit_pat = 151 self.record_lit_pat = find_node_at_offset(original_file.syntax(), self.offset);
140 find_node_at_offset(original_parse.tree().syntax(), self.offset);
141 } 152 }
142 } 153 }
143 } 154 }
144 155
145 fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) { 156 fn classify_name_ref(&mut self, original_file: &SourceFile, name_ref: ast::NameRef) {
146 self.name_ref_syntax = 157 self.name_ref_syntax =
147 find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start()); 158 find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start());
148 let name_range = name_ref.syntax().text_range(); 159 let name_range = name_ref.syntax().text_range();
diff --git a/crates/ra_ide/src/diagnostics.rs b/crates/ra_ide/src/diagnostics.rs
index 9cf86b26d..a52f7fdd9 100644
--- a/crates/ra_ide/src/diagnostics.rs
+++ b/crates/ra_ide/src/diagnostics.rs
@@ -2,7 +2,10 @@
2 2
3use std::cell::RefCell; 3use std::cell::RefCell;
4 4
5use hir::diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}; 5use hir::{
6 diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink},
7 Semantics,
8};
6use itertools::Itertools; 9use itertools::Itertools;
7use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt}; 10use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt};
8use ra_ide_db::RootDatabase; 11use ra_ide_db::RootDatabase;
@@ -24,7 +27,7 @@ pub enum Severity {
24 27
25pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> { 28pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> {
26 let _p = profile("diagnostics"); 29 let _p = profile("diagnostics");
27 let mut sb = hir::SourceBinder::new(db); 30 let sema = Semantics::new(db);
28 let parse = db.parse(file_id); 31 let parse = db.parse(file_id);
29 let mut res = Vec::new(); 32 let mut res = Vec::new();
30 33
@@ -110,7 +113,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
110 fix: Some(fix), 113 fix: Some(fix),
111 }) 114 })
112 }); 115 });
113 if let Some(m) = sb.to_module_def(file_id) { 116 if let Some(m) = sema.to_module_def(file_id) {
114 m.diagnostics(db, &mut sink); 117 m.diagnostics(db, &mut sink);
115 }; 118 };
116 drop(sink); 119 drop(sink);
diff --git a/crates/ra_ide/src/display/navigation_target.rs b/crates/ra_ide/src/display/navigation_target.rs
index c9d0058a6..5afb23764 100644
--- a/crates/ra_ide/src/display/navigation_target.rs
+++ b/crates/ra_ide/src/display/navigation_target.rs
@@ -1,7 +1,7 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use either::Either; 3use either::Either;
4use hir::{AssocItem, FieldSource, HasSource, InFile, ModuleSource}; 4use hir::{original_range, AssocItem, FieldSource, HasSource, InFile, ModuleSource};
5use ra_db::{FileId, SourceDatabase}; 5use ra_db::{FileId, SourceDatabase};
6use ra_ide_db::RootDatabase; 6use ra_ide_db::RootDatabase;
7use ra_syntax::{ 7use ra_syntax::{
@@ -11,7 +11,11 @@ use ra_syntax::{
11 TextRange, 11 TextRange,
12}; 12};
13 13
14use crate::{expand::original_range, references::NameDefinition, FileSymbol}; 14use crate::{
15 // expand::original_range,
16 references::NameDefinition,
17 FileSymbol,
18};
15 19
16use super::short_label::ShortLabel; 20use super::short_label::ShortLabel;
17 21
diff --git a/crates/ra_ide/src/expand.rs b/crates/ra_ide/src/expand.rs
deleted file mode 100644
index 9f3aaa3a3..000000000
--- a/crates/ra_ide/src/expand.rs
+++ /dev/null
@@ -1,102 +0,0 @@
1//! Utilities to work with files, produced by macros.
2use std::iter::successors;
3
4use hir::{InFile, Origin};
5use ra_db::FileId;
6use ra_ide_db::RootDatabase;
7use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken, TextRange};
8
9use crate::FileRange;
10
11pub(crate) fn original_range(db: &RootDatabase, node: InFile<&SyntaxNode>) -> FileRange {
12 if let Some((range, Origin::Call)) = original_range_and_origin(db, node) {
13 return range;
14 }
15
16 if let Some(expansion) = node.file_id.expansion_info(db) {
17 if let Some(call_node) = expansion.call_node() {
18 return FileRange {
19 file_id: call_node.file_id.original_file(db),
20 range: call_node.value.text_range(),
21 };
22 }
23 }
24
25 FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
26}
27
28fn original_range_and_origin(
29 db: &RootDatabase,
30 node: InFile<&SyntaxNode>,
31) -> Option<(FileRange, Origin)> {
32 let expansion = node.file_id.expansion_info(db)?;
33
34 // the input node has only one token ?
35 let single = node.value.first_token()? == node.value.last_token()?;
36
37 // FIXME: We should handle recurside macro expansions
38 let (range, origin) = node.value.descendants().find_map(|it| {
39 let first = it.first_token()?;
40 let last = it.last_token()?;
41
42 if !single && first == last {
43 return None;
44 }
45
46 // Try to map first and last tokens of node, and, if success, return the union range of mapped tokens
47 let (first, first_origin) = expansion.map_token_up(node.with_value(&first))?;
48 let (last, last_origin) = expansion.map_token_up(node.with_value(&last))?;
49
50 if first.file_id != last.file_id || first_origin != last_origin {
51 return None;
52 }
53
54 // FIXME: Add union method in TextRange
55 Some((
56 first.with_value(union_range(first.value.text_range(), last.value.text_range())),
57 first_origin,
58 ))
59 })?;
60
61 return Some((
62 FileRange { file_id: range.file_id.original_file(db), range: range.value },
63 origin,
64 ));
65
66 fn union_range(a: TextRange, b: TextRange) -> TextRange {
67 let start = a.start().min(b.start());
68 let end = a.end().max(b.end());
69 TextRange::from_to(start, end)
70 }
71}
72
73pub(crate) fn descend_into_macros(
74 db: &RootDatabase,
75 file_id: FileId,
76 token: SyntaxToken,
77) -> InFile<SyntaxToken> {
78 let src = InFile::new(file_id.into(), token);
79
80 let source_analyzer =
81 hir::SourceAnalyzer::new(db, src.with_value(src.value.parent()).as_ref(), None);
82
83 descend_into_macros_with_analyzer(db, &source_analyzer, src)
84}
85
86pub(crate) fn descend_into_macros_with_analyzer(
87 db: &RootDatabase,
88 source_analyzer: &hir::SourceAnalyzer,
89 src: InFile<SyntaxToken>,
90) -> InFile<SyntaxToken> {
91 successors(Some(src), |token| {
92 let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
93 let tt = macro_call.token_tree()?;
94 if !token.value.text_range().is_subrange(&tt.syntax().text_range()) {
95 return None;
96 }
97 let exp = source_analyzer.expand(db, token.with_value(&macro_call))?;
98 exp.map_token_down(db, token.as_ref())
99 })
100 .last()
101 .unwrap()
102}
diff --git a/crates/ra_ide/src/expand_macro.rs b/crates/ra_ide/src/expand_macro.rs
index af2783bef..5a079de27 100644
--- a/crates/ra_ide/src/expand_macro.rs
+++ b/crates/ra_ide/src/expand_macro.rs
@@ -1,7 +1,6 @@
1//! This modules implements "expand macro" functionality in the IDE 1//! This modules implements "expand macro" functionality in the IDE
2 2
3use hir::db::AstDatabase; 3use hir::Semantics;
4use ra_db::SourceDatabase;
5use ra_ide_db::RootDatabase; 4use ra_ide_db::RootDatabase;
6use ra_syntax::{ 5use ra_syntax::{
7 algo::{find_node_at_offset, replace_descendants}, 6 algo::{find_node_at_offset, replace_descendants},
@@ -17,13 +16,12 @@ pub struct ExpandedMacro {
17} 16}
18 17
19pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> { 18pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> {
20 let parse = db.parse(position.file_id); 19 let sema = Semantics::new(db);
21 let file = parse.tree(); 20 let file = sema.parse(position.file_id);
22 let name_ref = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset)?; 21 let name_ref = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset)?;
23 let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?; 22 let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?;
24 23
25 let source = hir::InFile::new(position.file_id.into(), mac.syntax()); 24 let expanded = expand_macro_recur(&sema, &mac)?;
26 let expanded = expand_macro_recur(db, source, source.with_value(&mac))?;
27 25
28 // FIXME: 26 // FIXME:
29 // macro expansion may lose all white space information 27 // macro expansion may lose all white space information
@@ -33,21 +31,16 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
33} 31}
34 32
35fn expand_macro_recur( 33fn expand_macro_recur(
36 db: &RootDatabase, 34 sema: &Semantics<RootDatabase>,
37 source: hir::InFile<&SyntaxNode>, 35 macro_call: &ast::MacroCall,
38 macro_call: hir::InFile<&ast::MacroCall>,
39) -> Option<SyntaxNode> { 36) -> Option<SyntaxNode> {
40 let analyzer = hir::SourceAnalyzer::new(db, source, None); 37 let mut expanded = sema.expand(macro_call)?;
41 let expansion = analyzer.expand(db, macro_call)?;
42 let macro_file_id = expansion.file_id();
43 let mut expanded: SyntaxNode = db.parse_or_expand(macro_file_id)?;
44 38
45 let children = expanded.descendants().filter_map(ast::MacroCall::cast); 39 let children = expanded.descendants().filter_map(ast::MacroCall::cast);
46 let mut replaces: FxHashMap<SyntaxElement, SyntaxElement> = FxHashMap::default(); 40 let mut replaces: FxHashMap<SyntaxElement, SyntaxElement> = FxHashMap::default();
47 41
48 for child in children.into_iter() { 42 for child in children.into_iter() {
49 let node = hir::InFile::new(macro_file_id, &child); 43 if let Some(new_node) = expand_macro_recur(sema, &child) {
50 if let Some(new_node) = expand_macro_recur(db, source, node) {
51 // Replace the whole node if it is root 44 // Replace the whole node if it is root
52 // `replace_descendants` will not replace the parent node 45 // `replace_descendants` will not replace the parent node
53 // but `SyntaxNode::descendants include itself 46 // but `SyntaxNode::descendants include itself
@@ -59,7 +52,7 @@ fn expand_macro_recur(
59 } 52 }
60 } 53 }
61 54
62 Some(replace_descendants(&expanded, &|n| replaces.get(n).cloned())) 55 Some(replace_descendants(&expanded, |n| replaces.get(n).cloned()))
63} 56}
64 57
65// FIXME: It would also be cool to share logic here and in the mbe tests, 58// FIXME: It would also be cool to share logic here and in the mbe tests,
@@ -120,10 +113,12 @@ fn insert_whitespaces(syn: SyntaxNode) -> String {
120 113
121#[cfg(test)] 114#[cfg(test)]
122mod tests { 115mod tests {
123 use super::*;
124 use crate::mock_analysis::analysis_and_position;
125 use insta::assert_snapshot; 116 use insta::assert_snapshot;
126 117
118 use crate::mock_analysis::analysis_and_position;
119
120 use super::*;
121
127 fn check_expand_macro(fixture: &str) -> ExpandedMacro { 122 fn check_expand_macro(fixture: &str) -> ExpandedMacro {
128 let (analysis, pos) = analysis_and_position(fixture); 123 let (analysis, pos) = analysis_and_position(fixture);
129 analysis.expand_macro(pos).unwrap().unwrap() 124 analysis.expand_macro(pos).unwrap().unwrap()
diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs
index 1e7d0621a..f5a063351 100644
--- a/crates/ra_ide/src/extend_selection.rs
+++ b/crates/ra_ide/src/extend_selection.rs
@@ -2,26 +2,26 @@
2 2
3use std::iter::successors; 3use std::iter::successors;
4 4
5use hir::db::AstDatabase; 5use hir::Semantics;
6use ra_db::SourceDatabase;
7use ra_ide_db::RootDatabase; 6use ra_ide_db::RootDatabase;
8use ra_syntax::{ 7use ra_syntax::{
9 algo::find_covering_element, 8 algo::{self, find_covering_element, skip_trivia_token},
10 ast::{self, AstNode, AstToken}, 9 ast::{self, AstNode, AstToken},
11 Direction, NodeOrToken, SyntaxElement, 10 Direction, NodeOrToken,
12 SyntaxKind::{self, *}, 11 SyntaxKind::{self, *},
13 SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, 12 SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T,
14}; 13};
15 14
16use crate::{expand::descend_into_macros, FileId, FileRange}; 15use crate::FileRange;
17 16
18pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { 17pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
19 let src = db.parse(frange.file_id).tree(); 18 let sema = Semantics::new(db);
20 try_extend_selection(db, src.syntax(), frange).unwrap_or(frange.range) 19 let src = sema.parse(frange.file_id);
20 try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range)
21} 21}
22 22
23fn try_extend_selection( 23fn try_extend_selection(
24 db: &RootDatabase, 24 sema: &Semantics<RootDatabase>,
25 root: &SyntaxNode, 25 root: &SyntaxNode,
26 frange: FileRange, 26 frange: FileRange,
27) -> Option<TextRange> { 27) -> Option<TextRange> {
@@ -86,7 +86,7 @@ fn try_extend_selection(
86 // if we are in single token_tree, we maybe live in macro or attr 86 // if we are in single token_tree, we maybe live in macro or attr
87 if node.kind() == TOKEN_TREE { 87 if node.kind() == TOKEN_TREE {
88 if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) { 88 if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) {
89 if let Some(range) = extend_tokens_from_range(db, frange.file_id, macro_call, range) { 89 if let Some(range) = extend_tokens_from_range(sema, macro_call, range) {
90 return Some(range); 90 return Some(range);
91 } 91 }
92 } 92 }
@@ -96,7 +96,7 @@ fn try_extend_selection(
96 return Some(node.text_range()); 96 return Some(node.text_range());
97 } 97 }
98 98
99 let node = shallowest_node(&node.into()).unwrap(); 99 let node = shallowest_node(&node.into());
100 100
101 if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { 101 if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
102 if let Some(range) = extend_list_item(&node) { 102 if let Some(range) = extend_list_item(&node) {
@@ -108,8 +108,7 @@ fn try_extend_selection(
108} 108}
109 109
110fn extend_tokens_from_range( 110fn extend_tokens_from_range(
111 db: &RootDatabase, 111 sema: &Semantics<RootDatabase>,
112 file_id: FileId,
113 macro_call: ast::MacroCall, 112 macro_call: ast::MacroCall,
114 original_range: TextRange, 113 original_range: TextRange,
115) -> Option<TextRange> { 114) -> Option<TextRange> {
@@ -119,54 +118,50 @@ fn extend_tokens_from_range(
119 NodeOrToken::Token(it) => (it.clone(), it), 118 NodeOrToken::Token(it) => (it.clone(), it),
120 }; 119 };
121 120
122 let mut first_token = skip_whitespace(first_token, Direction::Next)?; 121 let mut first_token = skip_trivia_token(first_token, Direction::Next)?;
123 let mut last_token = skip_whitespace(last_token, Direction::Prev)?; 122 let mut last_token = skip_trivia_token(last_token, Direction::Prev)?;
124 123
125 while !first_token.text_range().is_subrange(&original_range) { 124 while !first_token.text_range().is_subrange(&original_range) {
126 first_token = skip_whitespace(first_token.next_token()?, Direction::Next)?; 125 first_token = skip_trivia_token(first_token.next_token()?, Direction::Next)?;
127 } 126 }
128 while !last_token.text_range().is_subrange(&original_range) { 127 while !last_token.text_range().is_subrange(&original_range) {
129 last_token = skip_whitespace(last_token.prev_token()?, Direction::Prev)?; 128 last_token = skip_trivia_token(last_token.prev_token()?, Direction::Prev)?;
130 } 129 }
131 130
132 // compute original mapped token range 131 // compute original mapped token range
133 let expanded = { 132 let extended = {
134 let first_node = descend_into_macros(db, file_id, first_token.clone()); 133 let fst_expanded = sema.descend_into_macros(first_token.clone());
135 let first_node = first_node.map(|it| it.text_range()); 134 let lst_expanded = sema.descend_into_macros(last_token.clone());
136 135 let mut lca = algo::least_common_ancestor(&fst_expanded.parent(), &lst_expanded.parent())?;
137 let last_node = descend_into_macros(db, file_id, last_token.clone()); 136 lca = shallowest_node(&lca);
138 if last_node.file_id == file_id.into() || first_node.file_id != last_node.file_id { 137 if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) {
139 return None; 138 lca = lca.parent()?;
140 } 139 }
141 first_node.map(|it| union_range(it, last_node.value.text_range())) 140 lca
142 }; 141 };
143 142
144 // Compute parent node range 143 // Compute parent node range
145 let src = db.parse_or_expand(expanded.file_id)?;
146 let parent = shallowest_node(&find_covering_element(&src, expanded.value))?.parent()?;
147
148 let validate = |token: &SyntaxToken| { 144 let validate = |token: &SyntaxToken| {
149 let node = descend_into_macros(db, file_id, token.clone()); 145 let expanded = sema.descend_into_macros(token.clone());
150 node.file_id == expanded.file_id 146 algo::least_common_ancestor(&extended, &expanded.parent()).as_ref() == Some(&extended)
151 && node.value.text_range().is_subrange(&parent.text_range())
152 }; 147 };
153 148
154 // Find the first and last text range under expanded parent 149 // Find the first and last text range under expanded parent
155 let first = successors(Some(first_token), |token| { 150 let first = successors(Some(first_token), |token| {
156 let token = token.prev_token()?; 151 let token = token.prev_token()?;
157 skip_whitespace(token, Direction::Prev) 152 skip_trivia_token(token, Direction::Prev)
158 }) 153 })
159 .take_while(validate) 154 .take_while(validate)
160 .last()?; 155 .last()?;
161 156
162 let last = successors(Some(last_token), |token| { 157 let last = successors(Some(last_token), |token| {
163 let token = token.next_token()?; 158 let token = token.next_token()?;
164 skip_whitespace(token, Direction::Next) 159 skip_trivia_token(token, Direction::Next)
165 }) 160 })
166 .take_while(validate) 161 .take_while(validate)
167 .last()?; 162 .last()?;
168 163
169 let range = union_range(first.text_range(), last.text_range()); 164 let range = first.text_range().extend_to(&last.text_range());
170 if original_range.is_subrange(&range) && original_range != range { 165 if original_range.is_subrange(&range) && original_range != range {
171 Some(range) 166 Some(range)
172 } else { 167 } else {
@@ -174,25 +169,9 @@ fn extend_tokens_from_range(
174 } 169 }
175} 170}
176 171
177fn skip_whitespace(mut token: SyntaxToken, direction: Direction) -> Option<SyntaxToken> {
178 while token.kind() == WHITESPACE {
179 token = match direction {
180 Direction::Next => token.next_token()?,
181 Direction::Prev => token.prev_token()?,
182 }
183 }
184 Some(token)
185}
186
187fn union_range(range: TextRange, r: TextRange) -> TextRange {
188 let start = range.start().min(r.start());
189 let end = range.end().max(r.end());
190 TextRange::from_to(start, end)
191}
192
193/// Find the shallowest node with same range, which allows us to traverse siblings. 172/// Find the shallowest node with same range, which allows us to traverse siblings.
194fn shallowest_node(node: &SyntaxElement) -> Option<SyntaxNode> { 173fn shallowest_node(node: &SyntaxNode) -> SyntaxNode {
195 node.ancestors().take_while(|n| n.text_range() == node.text_range()).last() 174 node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap()
196} 175}
197 176
198fn extend_single_word_in_comment_or_string( 177fn extend_single_word_in_comment_or_string(
diff --git a/crates/ra_ide/src/goto_definition.rs b/crates/ra_ide/src/goto_definition.rs
index feff1ec3f..621ab982c 100644
--- a/crates/ra_ide/src/goto_definition.rs
+++ b/crates/ra_ide/src/goto_definition.rs
@@ -1,7 +1,7 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::{db::AstDatabase, InFile, SourceBinder}; 3use hir::Semantics;
4use ra_ide_db::{symbol_index, RootDatabase}; 4use ra_ide_db::{defs::classify_name, symbol_index, RootDatabase};
5use ra_syntax::{ 5use ra_syntax::{
6 ast::{self}, 6 ast::{self},
7 match_ast, AstNode, 7 match_ast, AstNode,
@@ -11,8 +11,7 @@ use ra_syntax::{
11 11
12use crate::{ 12use crate::{
13 display::{ToNav, TryToNav}, 13 display::{ToNav, TryToNav},
14 expand::descend_into_macros, 14 references::classify_name_ref,
15 references::{classify_name, classify_name_ref},
16 FilePosition, NavigationTarget, RangeInfo, 15 FilePosition, NavigationTarget, RangeInfo,
17}; 16};
18 17
@@ -20,18 +19,20 @@ pub(crate) fn goto_definition(
20 db: &RootDatabase, 19 db: &RootDatabase,
21 position: FilePosition, 20 position: FilePosition,
22) -> Option<RangeInfo<Vec<NavigationTarget>>> { 21) -> Option<RangeInfo<Vec<NavigationTarget>>> {
23 let file = db.parse_or_expand(position.file_id.into())?; 22 let sema = Semantics::new(db);
23 let file = sema.parse(position.file_id).syntax().clone();
24 let original_token = pick_best(file.token_at_offset(position.offset))?; 24 let original_token = pick_best(file.token_at_offset(position.offset))?;
25 let token = descend_into_macros(db, position.file_id, original_token.clone()); 25 let token = sema.descend_into_macros(original_token.clone());
26 26
27 let mut sb = SourceBinder::new(db);
28 let nav_targets = match_ast! { 27 let nav_targets = match_ast! {
29 match (token.value.parent()) { 28 match (token.parent()) {
30 ast::NameRef(name_ref) => { 29 ast::NameRef(name_ref) => {
31 reference_definition(&mut sb, token.with_value(&name_ref)).to_vec() 30 reference_definition(&sema, &name_ref).to_vec()
32 }, 31 },
33 ast::Name(name) => { 32 ast::Name(name) => {
34 name_definition(&mut sb, token.with_value(&name))? 33 let def = classify_name(&sema, &name)?.definition();
34 let nav = def.try_to_nav(sema.db)?;
35 vec![nav]
35 }, 36 },
36 _ => return None, 37 _ => return None,
37 } 38 }
@@ -68,36 +69,27 @@ impl ReferenceResult {
68} 69}
69 70
70pub(crate) fn reference_definition( 71pub(crate) fn reference_definition(
71 sb: &mut SourceBinder<RootDatabase>, 72 sema: &Semantics<RootDatabase>,
72 name_ref: InFile<&ast::NameRef>, 73 name_ref: &ast::NameRef,
73) -> ReferenceResult { 74) -> ReferenceResult {
74 use self::ReferenceResult::*; 75 use self::ReferenceResult::*;
75 76
76 let name_kind = classify_name_ref(sb, name_ref); 77 let name_kind = classify_name_ref(sema, name_ref);
77 if let Some(def) = name_kind { 78 if let Some(def) = name_kind {
78 return match def.try_to_nav(sb.db) { 79 return match def.try_to_nav(sema.db) {
79 Some(nav) => ReferenceResult::Exact(nav), 80 Some(nav) => ReferenceResult::Exact(nav),
80 None => ReferenceResult::Approximate(Vec::new()), 81 None => ReferenceResult::Approximate(Vec::new()),
81 }; 82 };
82 } 83 }
83 84
84 // Fallback index based approach: 85 // Fallback index based approach:
85 let navs = symbol_index::index_resolve(sb.db, name_ref.value) 86 let navs = symbol_index::index_resolve(sema.db, name_ref)
86 .into_iter() 87 .into_iter()
87 .map(|s| s.to_nav(sb.db)) 88 .map(|s| s.to_nav(sema.db))
88 .collect(); 89 .collect();
89 Approximate(navs) 90 Approximate(navs)
90} 91}
91 92
92fn name_definition(
93 sb: &mut SourceBinder<RootDatabase>,
94 name: InFile<&ast::Name>,
95) -> Option<Vec<NavigationTarget>> {
96 let def = classify_name(sb, name)?;
97 let nav = def.try_to_nav(sb.db)?;
98 Some(vec![nav])
99}
100
101#[cfg(test)] 93#[cfg(test)]
102mod tests { 94mod tests {
103 use test_utils::{assert_eq_text, covers}; 95 use test_utils::{assert_eq_text, covers};
diff --git a/crates/ra_ide/src/goto_type_definition.rs b/crates/ra_ide/src/goto_type_definition.rs
index 69940fc36..869a4708b 100644
--- a/crates/ra_ide/src/goto_type_definition.rs
+++ b/crates/ra_ide/src/goto_type_definition.rs
@@ -1,31 +1,31 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::db::AstDatabase;
4use ra_ide_db::RootDatabase; 3use ra_ide_db::RootDatabase;
5use ra_syntax::{ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset}; 4use ra_syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset};
6 5
7use crate::{ 6use crate::{display::ToNav, FilePosition, NavigationTarget, RangeInfo};
8 display::ToNav, expand::descend_into_macros, FilePosition, NavigationTarget, RangeInfo,
9};
10 7
11pub(crate) fn goto_type_definition( 8pub(crate) fn goto_type_definition(
12 db: &RootDatabase, 9 db: &RootDatabase,
13 position: FilePosition, 10 position: FilePosition,
14) -> Option<RangeInfo<Vec<NavigationTarget>>> { 11) -> Option<RangeInfo<Vec<NavigationTarget>>> {
15 let file = db.parse_or_expand(position.file_id.into())?; 12 let sema = hir::Semantics::new(db);
16 let token = pick_best(file.token_at_offset(position.offset))?; 13
17 let token = descend_into_macros(db, position.file_id, token); 14 let file: ast::SourceFile = sema.parse(position.file_id);
18 15 let token: SyntaxToken = pick_best(file.syntax().token_at_offset(position.offset))?;
19 let node = token 16 let token: SyntaxToken = sema.descend_into_macros(token);
20 .value 17
21 .ancestors() 18 let (ty, node) = sema.ancestors_with_macros(token.parent()).find_map(|node| {
22 .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?; 19 let ty = match_ast! {
23 20 match node {
24 let analyzer = hir::SourceAnalyzer::new(db, token.with_value(&node), None); 21 ast::Expr(expr) => { sema.type_of_expr(&expr)? },
22 ast::Pat(pat) => { sema.type_of_pat(&pat)? },
23 _ => { return None },
24 }
25 };
25 26
26 let ty: hir::Type = ast::Expr::cast(node.clone()) 27 Some((ty, node))
27 .and_then(|e| analyzer.type_of(db, &e)) 28 })?;
28 .or_else(|| ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p)))?;
29 29
30 let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt())?; 30 let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt())?;
31 31
diff --git a/crates/ra_ide/src/hover.rs b/crates/ra_ide/src/hover.rs
index 1c6ca36df..5073bb1cf 100644
--- a/crates/ra_ide/src/hover.rs
+++ b/crates/ra_ide/src/hover.rs
@@ -1,10 +1,11 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::{db::AstDatabase, Adt, HasSource, HirDisplay, SourceBinder}; 3use hir::{Adt, HasSource, HirDisplay, Semantics};
4use ra_db::SourceDatabase; 4use ra_ide_db::{
5use ra_ide_db::{defs::NameDefinition, RootDatabase}; 5 defs::{classify_name, NameDefinition},
6 RootDatabase,
7};
6use ra_syntax::{ 8use ra_syntax::{
7 algo::find_covering_element,
8 ast::{self, DocCommentsOwner}, 9 ast::{self, DocCommentsOwner},
9 match_ast, AstNode, 10 match_ast, AstNode,
10 SyntaxKind::*, 11 SyntaxKind::*,
@@ -13,9 +14,8 @@ use ra_syntax::{
13 14
14use crate::{ 15use crate::{
15 display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel}, 16 display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel},
16 expand::{descend_into_macros, original_range}, 17 references::classify_name_ref,
17 references::{classify_name, classify_name_ref}, 18 FilePosition, RangeInfo,
18 FilePosition, FileRange, RangeInfo,
19}; 19};
20 20
21/// Contains the results when hovering over an item 21/// Contains the results when hovering over an item
@@ -143,25 +143,25 @@ fn hover_text_from_name_kind(db: &RootDatabase, def: NameDefinition) -> Option<S
143} 143}
144 144
145pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> { 145pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> {
146 let file = db.parse_or_expand(position.file_id.into())?; 146 let sema = Semantics::new(db);
147 let file = sema.parse(position.file_id).syntax().clone();
147 let token = pick_best(file.token_at_offset(position.offset))?; 148 let token = pick_best(file.token_at_offset(position.offset))?;
148 let token = descend_into_macros(db, position.file_id, token); 149 let token = sema.descend_into_macros(token);
149 150
150 let mut res = HoverResult::new(); 151 let mut res = HoverResult::new();
151 152
152 let mut sb = SourceBinder::new(db);
153 if let Some((node, name_kind)) = match_ast! { 153 if let Some((node, name_kind)) = match_ast! {
154 match (token.value.parent()) { 154 match (token.parent()) {
155 ast::NameRef(name_ref) => { 155 ast::NameRef(name_ref) => {
156 classify_name_ref(&mut sb, token.with_value(&name_ref)).map(|d| (name_ref.syntax().clone(), d)) 156 classify_name_ref(&sema, &name_ref).map(|d| (name_ref.syntax().clone(), d))
157 }, 157 },
158 ast::Name(name) => { 158 ast::Name(name) => {
159 classify_name(&mut sb, token.with_value(&name)).map(|d| (name.syntax().clone(), d)) 159 classify_name(&sema, &name).map(|d| (name.syntax().clone(), d.definition()))
160 }, 160 },
161 _ => None, 161 _ => None,
162 } 162 }
163 } { 163 } {
164 let range = original_range(db, token.with_value(&node)).range; 164 let range = sema.original_range(&node).range;
165 res.extend(hover_text_from_name_kind(db, name_kind)); 165 res.extend(hover_text_from_name_kind(db, name_kind));
166 166
167 if !res.is_empty() { 167 if !res.is_empty() {
@@ -170,17 +170,28 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
170 } 170 }
171 171
172 let node = token 172 let node = token
173 .value
174 .ancestors() 173 .ancestors()
175 .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?; 174 .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?;
176 175
177 let frange = original_range(db, token.with_value(&node)); 176 let ty = match_ast! {
178 res.extend(type_of(db, frange).map(rust_code_markup)); 177 match node {
179 if res.is_empty() { 178 ast::MacroCall(_it) => {
180 return None; 179 // If this node is a MACRO_CALL, it means that `descend_into_macros` failed to resolve.
181 } 180 // (e.g expanding a builtin macro). So we give up here.
182 let range = node.text_range(); 181 return None;
182 },
183 ast::Expr(it) => {
184 sema.type_of_expr(&it)
185 },
186 ast::Pat(it) => {
187 sema.type_of_pat(&it)
188 },
189 _ => None,
190 }
191 }?;
183 192
193 res.extend(Some(rust_code_markup(ty.display_truncated(db, None).to_string())));
194 let range = sema.original_range(&node).range;
184 Some(RangeInfo::new(range, res)) 195 Some(RangeInfo::new(range, res))
185} 196}
186 197
@@ -196,35 +207,13 @@ fn pick_best(tokens: TokenAtOffset<SyntaxToken>) -> Option<SyntaxToken> {
196 } 207 }
197} 208}
198 209
199pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
200 let parse = db.parse(frange.file_id);
201 let leaf_node = find_covering_element(parse.tree().syntax(), frange.range);
202 // if we picked identifier, expand to pattern/expression
203 let node = leaf_node
204 .ancestors()
205 .take_while(|it| it.text_range() == leaf_node.text_range())
206 .find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?;
207 let analyzer =
208 hir::SourceAnalyzer::new(db, hir::InFile::new(frange.file_id.into(), &node), None);
209 let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
210 {
211 ty
212 } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, &p)) {
213 ty
214 } else {
215 return None;
216 };
217 Some(ty.display_truncated(db, None).to_string())
218}
219
220#[cfg(test)] 210#[cfg(test)]
221mod tests { 211mod tests {
222 use crate::mock_analysis::{
223 analysis_and_position, single_file_with_position, single_file_with_range,
224 };
225 use ra_db::FileLoader; 212 use ra_db::FileLoader;
226 use ra_syntax::TextRange; 213 use ra_syntax::TextRange;
227 214
215 use crate::mock_analysis::{analysis_and_position, single_file_with_position};
216
228 fn trim_markup(s: &str) -> &str { 217 fn trim_markup(s: &str) -> &str {
229 s.trim_start_matches("```rust\n").trim_end_matches("\n```") 218 s.trim_start_matches("```rust\n").trim_end_matches("\n```")
230 } 219 }
@@ -251,6 +240,11 @@ mod tests {
251 content[hover.range].to_string() 240 content[hover.range].to_string()
252 } 241 }
253 242
243 fn check_hover_no_result(fixture: &str) {
244 let (analysis, position) = analysis_and_position(fixture);
245 assert!(analysis.hover(position).unwrap().is_none());
246 }
247
254 #[test] 248 #[test]
255 fn hover_shows_type_of_an_expression() { 249 fn hover_shows_type_of_an_expression() {
256 let (analysis, position) = single_file_with_position( 250 let (analysis, position) = single_file_with_position(
@@ -511,37 +505,6 @@ fn func(foo: i32) { if true { <|>foo; }; }
511 } 505 }
512 506
513 #[test] 507 #[test]
514 fn test_type_of_for_function() {
515 let (analysis, range) = single_file_with_range(
516 "
517 pub fn foo() -> u32 { 1 };
518
519 fn main() {
520 let foo_test = <|>foo()<|>;
521 }
522 ",
523 );
524
525 let type_name = analysis.type_of(range).unwrap().unwrap();
526 assert_eq!("u32", &type_name);
527 }
528
529 #[test]
530 fn test_type_of_for_expr() {
531 let (analysis, range) = single_file_with_range(
532 "
533 fn main() {
534 let foo: usize = 1;
535 let bar = <|>1 + foo<|>;
536 }
537 ",
538 );
539
540 let type_name = analysis.type_of(range).unwrap().unwrap();
541 assert_eq!("usize", &type_name);
542 }
543
544 #[test]
545 fn test_hover_infer_associated_method_result() { 508 fn test_hover_infer_associated_method_result() {
546 let (analysis, position) = single_file_with_position( 509 let (analysis, position) = single_file_with_position(
547 " 510 "
@@ -755,6 +718,89 @@ fn func(foo: i32) { if true { <|>foo; }; }
755 } 718 }
756 719
757 #[test] 720 #[test]
721 fn test_hover_through_expr_in_macro_recursive() {
722 let hover_on = check_hover_result(
723 "
724 //- /lib.rs
725 macro_rules! id_deep {
726 ($($tt:tt)*) => { $($tt)* }
727 }
728 macro_rules! id {
729 ($($tt:tt)*) => { id_deep!($($tt)*) }
730 }
731 fn foo(bar:u32) {
732 let a = id!(ba<|>r);
733 }
734 ",
735 &["u32"],
736 );
737
738 assert_eq!(hover_on, "bar")
739 }
740
741 #[test]
742 fn test_hover_through_func_in_macro_recursive() {
743 let hover_on = check_hover_result(
744 "
745 //- /lib.rs
746 macro_rules! id_deep {
747 ($($tt:tt)*) => { $($tt)* }
748 }
749 macro_rules! id {
750 ($($tt:tt)*) => { id_deep!($($tt)*) }
751 }
752 fn bar() -> u32 {
753 0
754 }
755 fn foo() {
756 let a = id!([0u32, bar(<|>)] );
757 }
758 ",
759 &["u32"],
760 );
761
762 assert_eq!(hover_on, "bar()")
763 }
764
765 #[test]
766 fn test_hover_through_literal_string_in_macro() {
767 let hover_on = check_hover_result(
768 r#"
769 //- /lib.rs
770 macro_rules! arr {
771 ($($tt:tt)*) => { [$($tt)*)] }
772 }
773 fn foo() {
774 let mastered_for_itunes = "";
775 let _ = arr!("Tr<|>acks", &mastered_for_itunes);
776 }
777 "#,
778 &["&str"],
779 );
780
781 assert_eq!(hover_on, "\"Tracks\"");
782 }
783
784 #[test]
785 fn test_hover_through_literal_string_in_builtin_macro() {
786 check_hover_no_result(
787 r#"
788 //- /lib.rs
789 #[rustc_builtin_macro]
790 macro_rules! assert {
791 ($cond:expr) => {{ /* compiler built-in */ }};
792 ($cond:expr,) => {{ /* compiler built-in */ }};
793 ($cond:expr, $($arg:tt)+) => {{ /* compiler built-in */ }};
794 }
795
796 fn foo() {
797 assert!("hel<|>lo");
798 }
799 "#,
800 );
801 }
802
803 #[test]
758 fn test_hover_non_ascii_space_doc() { 804 fn test_hover_non_ascii_space_doc() {
759 check_hover_result( 805 check_hover_result(
760 " 806 "
diff --git a/crates/ra_ide/src/impls.rs b/crates/ra_ide/src/impls.rs
index 64a2dadc8..bf82b2a16 100644
--- a/crates/ra_ide/src/impls.rs
+++ b/crates/ra_ide/src/impls.rs
@@ -1,7 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::{Crate, ImplBlock, SourceBinder}; 3use hir::{Crate, ImplBlock, Semantics};
4use ra_db::SourceDatabase;
5use ra_ide_db::RootDatabase; 4use ra_ide_db::RootDatabase;
6use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; 5use ra_syntax::{algo::find_node_at_offset, ast, AstNode};
7 6
@@ -11,21 +10,21 @@ pub(crate) fn goto_implementation(
11 db: &RootDatabase, 10 db: &RootDatabase,
12 position: FilePosition, 11 position: FilePosition,
13) -> Option<RangeInfo<Vec<NavigationTarget>>> { 12) -> Option<RangeInfo<Vec<NavigationTarget>>> {
14 let parse = db.parse(position.file_id); 13 let sema = Semantics::new(db);
15 let syntax = parse.tree().syntax().clone(); 14 let source_file = sema.parse(position.file_id);
16 let mut sb = SourceBinder::new(db); 15 let syntax = source_file.syntax().clone();
17 16
18 let krate = sb.to_module_def(position.file_id)?.krate(); 17 let krate = sema.to_module_def(position.file_id)?.krate();
19 18
20 if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) { 19 if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) {
21 return Some(RangeInfo::new( 20 return Some(RangeInfo::new(
22 nominal_def.syntax().text_range(), 21 nominal_def.syntax().text_range(),
23 impls_for_def(&mut sb, position, &nominal_def, krate)?, 22 impls_for_def(&sema, &nominal_def, krate)?,
24 )); 23 ));
25 } else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(&syntax, position.offset) { 24 } else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(&syntax, position.offset) {
26 return Some(RangeInfo::new( 25 return Some(RangeInfo::new(
27 trait_def.syntax().text_range(), 26 trait_def.syntax().text_range(),
28 impls_for_trait(&mut sb, position, &trait_def, krate)?, 27 impls_for_trait(&sema, &trait_def, krate)?,
29 )); 28 ));
30 } 29 }
31 30
@@ -33,49 +32,37 @@ pub(crate) fn goto_implementation(
33} 32}
34 33
35fn impls_for_def( 34fn impls_for_def(
36 sb: &mut SourceBinder<RootDatabase>, 35 sema: &Semantics<RootDatabase>,
37 position: FilePosition,
38 node: &ast::NominalDef, 36 node: &ast::NominalDef,
39 krate: Crate, 37 krate: Crate,
40) -> Option<Vec<NavigationTarget>> { 38) -> Option<Vec<NavigationTarget>> {
41 let ty = match node { 39 let ty = match node {
42 ast::NominalDef::StructDef(def) => { 40 ast::NominalDef::StructDef(def) => sema.to_def(def)?.ty(sema.db),
43 let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() }; 41 ast::NominalDef::EnumDef(def) => sema.to_def(def)?.ty(sema.db),
44 sb.to_def(src)?.ty(sb.db) 42 ast::NominalDef::UnionDef(def) => sema.to_def(def)?.ty(sema.db),
45 }
46 ast::NominalDef::EnumDef(def) => {
47 let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() };
48 sb.to_def(src)?.ty(sb.db)
49 }
50 ast::NominalDef::UnionDef(def) => {
51 let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() };
52 sb.to_def(src)?.ty(sb.db)
53 }
54 }; 43 };
55 44
56 let impls = ImplBlock::all_in_crate(sb.db, krate); 45 let impls = ImplBlock::all_in_crate(sema.db, krate);
57 46
58 Some( 47 Some(
59 impls 48 impls
60 .into_iter() 49 .into_iter()
61 .filter(|impl_block| ty.is_equal_for_find_impls(&impl_block.target_ty(sb.db))) 50 .filter(|impl_block| ty.is_equal_for_find_impls(&impl_block.target_ty(sema.db)))
62 .map(|imp| imp.to_nav(sb.db)) 51 .map(|imp| imp.to_nav(sema.db))
63 .collect(), 52 .collect(),
64 ) 53 )
65} 54}
66 55
67fn impls_for_trait( 56fn impls_for_trait(
68 sb: &mut SourceBinder<RootDatabase>, 57 sema: &Semantics<RootDatabase>,
69 position: FilePosition,
70 node: &ast::TraitDef, 58 node: &ast::TraitDef,
71 krate: Crate, 59 krate: Crate,
72) -> Option<Vec<NavigationTarget>> { 60) -> Option<Vec<NavigationTarget>> {
73 let src = hir::InFile { file_id: position.file_id.into(), value: node.clone() }; 61 let tr = sema.to_def(node)?;
74 let tr = sb.to_def(src)?;
75 62
76 let impls = ImplBlock::for_trait(sb.db, krate, tr); 63 let impls = ImplBlock::for_trait(sema.db, krate, tr);
77 64
78 Some(impls.into_iter().map(|imp| imp.to_nav(sb.db)).collect()) 65 Some(impls.into_iter().map(|imp| imp.to_nav(sema.db)).collect())
79} 66}
80 67
81#[cfg(test)] 68#[cfg(test)]
diff --git a/crates/ra_ide/src/inlay_hints.rs b/crates/ra_ide/src/inlay_hints.rs
index b42aa1523..35e3f782d 100644
--- a/crates/ra_ide/src/inlay_hints.rs
+++ b/crates/ra_ide/src/inlay_hints.rs
@@ -1,12 +1,11 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::{Adt, HirDisplay, SourceAnalyzer, SourceBinder, Type}; 3use hir::{Adt, HirDisplay, Semantics, Type};
4use once_cell::unsync::Lazy;
5use ra_ide_db::RootDatabase; 4use ra_ide_db::RootDatabase;
6use ra_prof::profile; 5use ra_prof::profile;
7use ra_syntax::{ 6use ra_syntax::{
8 ast::{self, ArgListOwner, AstNode, TypeAscriptionOwner}, 7 ast::{self, ArgListOwner, AstNode, TypeAscriptionOwner},
9 match_ast, SmolStr, SourceFile, SyntaxNode, TextRange, 8 match_ast, SmolStr, SyntaxNode, TextRange,
10}; 9};
11 10
12use crate::{FileId, FunctionSignature}; 11use crate::{FileId, FunctionSignature};
@@ -27,38 +26,36 @@ pub struct InlayHint {
27pub(crate) fn inlay_hints( 26pub(crate) fn inlay_hints(
28 db: &RootDatabase, 27 db: &RootDatabase,
29 file_id: FileId, 28 file_id: FileId,
30 file: &SourceFile,
31 max_inlay_hint_length: Option<usize>, 29 max_inlay_hint_length: Option<usize>,
32) -> Vec<InlayHint> { 30) -> Vec<InlayHint> {
33 let mut sb = SourceBinder::new(db); 31 let sema = Semantics::new(db);
32 let file = sema.parse(file_id);
34 let mut res = Vec::new(); 33 let mut res = Vec::new();
35 for node in file.syntax().descendants() { 34 for node in file.syntax().descendants() {
36 get_inlay_hints(&mut res, &mut sb, file_id, &node, max_inlay_hint_length); 35 get_inlay_hints(&mut res, &sema, &node, max_inlay_hint_length);
37 } 36 }
38 res 37 res
39} 38}
40 39
41fn get_inlay_hints( 40fn get_inlay_hints(
42 acc: &mut Vec<InlayHint>, 41 acc: &mut Vec<InlayHint>,
43 sb: &mut SourceBinder<RootDatabase>, 42 sema: &Semantics<RootDatabase>,
44 file_id: FileId,
45 node: &SyntaxNode, 43 node: &SyntaxNode,
46 max_inlay_hint_length: Option<usize>, 44 max_inlay_hint_length: Option<usize>,
47) -> Option<()> { 45) -> Option<()> {
48 let _p = profile("get_inlay_hints"); 46 let _p = profile("get_inlay_hints");
49 let db = sb.db; 47 let db = sema.db;
50 let analyzer = Lazy::new(move || sb.analyze(hir::InFile::new(file_id.into(), node), None));
51 match_ast! { 48 match_ast! {
52 match node { 49 match node {
53 ast::CallExpr(it) => { 50 ast::CallExpr(it) => {
54 get_param_name_hints(acc, db, &analyzer, ast::Expr::from(it)); 51 get_param_name_hints(acc, sema, ast::Expr::from(it));
55 }, 52 },
56 ast::MethodCallExpr(it) => { 53 ast::MethodCallExpr(it) => {
57 get_param_name_hints(acc, db, &analyzer, ast::Expr::from(it)); 54 get_param_name_hints(acc, sema, ast::Expr::from(it));
58 }, 55 },
59 ast::BindPat(it) => { 56 ast::BindPat(it) => {
60 let pat = ast::Pat::from(it.clone()); 57 let pat = ast::Pat::from(it.clone());
61 let ty = analyzer.type_of_pat(db, &pat)?; 58 let ty = sema.type_of_pat(&pat)?;
62 59
63 if should_not_display_type_hint(db, &it, &ty) { 60 if should_not_display_type_hint(db, &it, &ty) {
64 return None; 61 return None;
@@ -125,8 +122,7 @@ fn should_not_display_type_hint(db: &RootDatabase, bind_pat: &ast::BindPat, pat_
125 122
126fn get_param_name_hints( 123fn get_param_name_hints(
127 acc: &mut Vec<InlayHint>, 124 acc: &mut Vec<InlayHint>,
128 db: &RootDatabase, 125 sema: &Semantics<RootDatabase>,
129 analyzer: &SourceAnalyzer,
130 expr: ast::Expr, 126 expr: ast::Expr,
131) -> Option<()> { 127) -> Option<()> {
132 let args = match &expr { 128 let args = match &expr {
@@ -138,7 +134,7 @@ fn get_param_name_hints(
138 // we need args len to determine whether to skip or not the &self parameter 134 // we need args len to determine whether to skip or not the &self parameter
139 .collect::<Vec<_>>(); 135 .collect::<Vec<_>>();
140 136
141 let fn_signature = get_fn_signature(db, analyzer, &expr)?; 137 let fn_signature = get_fn_signature(sema, &expr)?;
142 let n_params_to_skip = 138 let n_params_to_skip =
143 if fn_signature.has_self_param && fn_signature.parameter_names.len() > args.len() { 139 if fn_signature.has_self_param && fn_signature.parameter_names.len() > args.len() {
144 1 140 1
@@ -184,28 +180,26 @@ fn should_show_param_hint(
184 true 180 true
185} 181}
186 182
187fn get_fn_signature( 183fn get_fn_signature(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<FunctionSignature> {
188 db: &RootDatabase,
189 analyzer: &SourceAnalyzer,
190 expr: &ast::Expr,
191) -> Option<FunctionSignature> {
192 match expr { 184 match expr {
193 ast::Expr::CallExpr(expr) => { 185 ast::Expr::CallExpr(expr) => {
194 // FIXME: Type::as_callable is broken for closures 186 // FIXME: Type::as_callable is broken for closures
195 let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; 187 let callable_def = sema.type_of_expr(&expr.expr()?)?.as_callable()?;
196 match callable_def { 188 match callable_def {
197 hir::CallableDef::FunctionId(it) => { 189 hir::CallableDef::FunctionId(it) => {
198 Some(FunctionSignature::from_hir(db, it.into())) 190 Some(FunctionSignature::from_hir(sema.db, it.into()))
191 }
192 hir::CallableDef::StructId(it) => {
193 FunctionSignature::from_struct(sema.db, it.into())
199 } 194 }
200 hir::CallableDef::StructId(it) => FunctionSignature::from_struct(db, it.into()),
201 hir::CallableDef::EnumVariantId(it) => { 195 hir::CallableDef::EnumVariantId(it) => {
202 FunctionSignature::from_enum_variant(db, it.into()) 196 FunctionSignature::from_enum_variant(sema.db, it.into())
203 } 197 }
204 } 198 }
205 } 199 }
206 ast::Expr::MethodCallExpr(expr) => { 200 ast::Expr::MethodCallExpr(expr) => {
207 let fn_def = analyzer.resolve_method_call(&expr)?; 201 let fn_def = sema.resolve_method_call(&expr)?;
208 Some(FunctionSignature::from_hir(db, fn_def)) 202 Some(FunctionSignature::from_hir(sema.db, fn_def))
209 } 203 }
210 _ => None, 204 _ => None,
211 } 205 }
diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs
index 82e10bc7e..f61028f78 100644
--- a/crates/ra_ide/src/lib.rs
+++ b/crates/ra_ide/src/lib.rs
@@ -35,7 +35,6 @@ mod typing;
35mod matching_brace; 35mod matching_brace;
36mod display; 36mod display;
37mod inlay_hints; 37mod inlay_hints;
38mod expand;
39mod expand_macro; 38mod expand_macro;
40mod ssr; 39mod ssr;
41 40
@@ -75,7 +74,9 @@ pub use crate::{
75 runnables::{Runnable, RunnableKind, TestId}, 74 runnables::{Runnable, RunnableKind, TestId},
76 source_change::{FileSystemEdit, SourceChange, SourceFileEdit}, 75 source_change::{FileSystemEdit, SourceChange, SourceFileEdit},
77 ssr::SsrError, 76 ssr::SsrError,
78 syntax_highlighting::{tags, HighlightedRange}, 77 syntax_highlighting::{
78 Highlight, HighlightModifier, HighlightModifiers, HighlightTag, HighlightedRange,
79 },
79}; 80};
80 81
81pub use hir::Documentation; 82pub use hir::Documentation;
@@ -319,9 +320,7 @@ impl Analysis {
319 file_id: FileId, 320 file_id: FileId,
320 max_inlay_hint_length: Option<usize>, 321 max_inlay_hint_length: Option<usize>,
321 ) -> Cancelable<Vec<InlayHint>> { 322 ) -> Cancelable<Vec<InlayHint>> {
322 self.with_db(|db| { 323 self.with_db(|db| inlay_hints::inlay_hints(db, file_id, max_inlay_hint_length))
323 inlay_hints::inlay_hints(db, file_id, &db.parse(file_id).tree(), max_inlay_hint_length)
324 })
325 } 324 }
326 325
327 /// Returns the set of folding ranges. 326 /// Returns the set of folding ranges.
@@ -425,9 +424,14 @@ impl Analysis {
425 self.with_db(|db| runnables::runnables(db, file_id)) 424 self.with_db(|db| runnables::runnables(db, file_id))
426 } 425 }
427 426
428 /// Computes syntax highlighting for the given file. 427 /// Computes syntax highlighting for the given file
429 pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> { 428 pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> {
430 self.with_db(|db| syntax_highlighting::highlight(db, file_id)) 429 self.with_db(|db| syntax_highlighting::highlight(db, file_id, None))
430 }
431
432 /// Computes syntax highlighting for the given file range.
433 pub fn highlight_range(&self, frange: FileRange) -> Cancelable<Vec<HighlightedRange>> {
434 self.with_db(|db| syntax_highlighting::highlight(db, frange.file_id, Some(frange.range)))
431 } 435 }
432 436
433 /// Computes syntax highlighting for the given file. 437 /// Computes syntax highlighting for the given file.
@@ -451,11 +455,6 @@ impl Analysis {
451 self.with_db(|db| diagnostics::diagnostics(db, file_id)) 455 self.with_db(|db| diagnostics::diagnostics(db, file_id))
452 } 456 }
453 457
454 /// Computes the type of the expression at the given position.
455 pub fn type_of(&self, frange: FileRange) -> Cancelable<Option<String>> {
456 self.with_db(|db| hover::type_of(db, frange))
457 }
458
459 /// Returns the edit required to rename reference at the position to the new 458 /// Returns the edit required to rename reference at the position to the new
460 /// name. 459 /// name.
461 pub fn rename( 460 pub fn rename(
diff --git a/crates/ra_ide/src/marks.rs b/crates/ra_ide/src/marks.rs
index bcb67e373..7b8b727b4 100644
--- a/crates/ra_ide/src/marks.rs
+++ b/crates/ra_ide/src/marks.rs
@@ -11,4 +11,5 @@ test_utils::marks!(
11 call_info_bad_offset 11 call_info_bad_offset
12 dont_complete_current_use 12 dont_complete_current_use
13 test_resolve_parent_module_on_module_decl 13 test_resolve_parent_module_on_module_decl
14 search_filters_by_range
14); 15);
diff --git a/crates/ra_ide/src/mock_analysis.rs b/crates/ra_ide/src/mock_analysis.rs
index 081aaee8c..f4cd6deb7 100644
--- a/crates/ra_ide/src/mock_analysis.rs
+++ b/crates/ra_ide/src/mock_analysis.rs
@@ -124,28 +124,28 @@ impl MockAnalysis {
124} 124}
125 125
126/// Creates analysis from a multi-file fixture, returns positions marked with <|>. 126/// Creates analysis from a multi-file fixture, returns positions marked with <|>.
127pub fn analysis_and_position(fixture: &str) -> (Analysis, FilePosition) { 127pub fn analysis_and_position(ra_fixture: &str) -> (Analysis, FilePosition) {
128 let (mock, position) = MockAnalysis::with_files_and_position(fixture); 128 let (mock, position) = MockAnalysis::with_files_and_position(ra_fixture);
129 (mock.analysis(), position) 129 (mock.analysis(), position)
130} 130}
131 131
132/// Creates analysis for a single file. 132/// Creates analysis for a single file.
133pub fn single_file(code: &str) -> (Analysis, FileId) { 133pub fn single_file(ra_fixture: &str) -> (Analysis, FileId) {
134 let mut mock = MockAnalysis::new(); 134 let mut mock = MockAnalysis::new();
135 let file_id = mock.add_file("/main.rs", code); 135 let file_id = mock.add_file("/main.rs", ra_fixture);
136 (mock.analysis(), file_id) 136 (mock.analysis(), file_id)
137} 137}
138 138
139/// Creates analysis for a single file, returns position marked with <|>. 139/// Creates analysis for a single file, returns position marked with <|>.
140pub fn single_file_with_position(code: &str) -> (Analysis, FilePosition) { 140pub fn single_file_with_position(ra_fixture: &str) -> (Analysis, FilePosition) {
141 let mut mock = MockAnalysis::new(); 141 let mut mock = MockAnalysis::new();
142 let pos = mock.add_file_with_position("/main.rs", code); 142 let pos = mock.add_file_with_position("/main.rs", ra_fixture);
143 (mock.analysis(), pos) 143 (mock.analysis(), pos)
144} 144}
145 145
146/// Creates analysis for a single file, returns range marked with a pair of <|>. 146/// Creates analysis for a single file, returns range marked with a pair of <|>.
147pub fn single_file_with_range(code: &str) -> (Analysis, FileRange) { 147pub fn single_file_with_range(ra_fixture: &str) -> (Analysis, FileRange) {
148 let mut mock = MockAnalysis::new(); 148 let mut mock = MockAnalysis::new();
149 let pos = mock.add_file_with_range("/main.rs", code); 149 let pos = mock.add_file_with_range("/main.rs", ra_fixture);
150 (mock.analysis(), pos) 150 (mock.analysis(), pos)
151} 151}
diff --git a/crates/ra_ide/src/parent_module.rs b/crates/ra_ide/src/parent_module.rs
index af14d6ab3..2c4bdb039 100644
--- a/crates/ra_ide/src/parent_module.rs
+++ b/crates/ra_ide/src/parent_module.rs
@@ -1,6 +1,7 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use ra_db::{CrateId, FileId, FilePosition, SourceDatabase}; 3use hir::Semantics;
4use ra_db::{CrateId, FileId, FilePosition};
4use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
5use ra_syntax::{ 6use ra_syntax::{
6 algo::find_node_at_offset, 7 algo::find_node_at_offset,
@@ -13,10 +14,10 @@ use crate::NavigationTarget;
13/// This returns `Vec` because a module may be included from several places. We 14/// This returns `Vec` because a module may be included from several places. We
14/// don't handle this case yet though, so the Vec has length at most one. 15/// don't handle this case yet though, so the Vec has length at most one.
15pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> { 16pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> {
16 let mut sb = hir::SourceBinder::new(db); 17 let sema = Semantics::new(db);
17 let parse = db.parse(position.file_id); 18 let source_file = sema.parse(position.file_id);
18 19
19 let mut module = find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset); 20 let mut module = find_node_at_offset::<ast::Module>(source_file.syntax(), position.offset);
20 21
21 // If cursor is literally on `mod foo`, go to the grandpa. 22 // If cursor is literally on `mod foo`, go to the grandpa.
22 if let Some(m) = &module { 23 if let Some(m) = &module {
@@ -30,8 +31,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
30 } 31 }
31 32
32 let module = match module { 33 let module = match module {
33 Some(module) => sb.to_def(hir::InFile::new(position.file_id.into(), module)), 34 Some(module) => sema.to_def(&module),
34 None => sb.to_module_def(position.file_id), 35 None => sema.to_module_def(position.file_id),
35 }; 36 };
36 let module = match module { 37 let module = match module {
37 None => return Vec::new(), 38 None => return Vec::new(),
@@ -43,8 +44,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
43 44
44/// Returns `Vec` for the same reason as `parent_module` 45/// Returns `Vec` for the same reason as `parent_module`
45pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> { 46pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
46 let mut sb = hir::SourceBinder::new(db); 47 let sema = Semantics::new(db);
47 let module = match sb.to_module_def(file_id) { 48 let module = match sema.to_module_def(file_id) {
48 Some(it) => it, 49 Some(it) => it,
49 None => return Vec::new(), 50 None => return Vec::new(),
50 }; 51 };
diff --git a/crates/ra_ide/src/references.rs b/crates/ra_ide/src/references.rs
index aadc2dbcb..f763013ae 100644
--- a/crates/ra_ide/src/references.rs
+++ b/crates/ra_ide/src/references.rs
@@ -13,25 +13,22 @@ mod classify;
13mod rename; 13mod rename;
14mod search_scope; 14mod search_scope;
15 15
16use crate::expand::descend_into_macros_with_analyzer; 16use hir::Semantics;
17use hir::{InFile, SourceBinder};
18use once_cell::unsync::Lazy; 17use once_cell::unsync::Lazy;
19use ra_db::{SourceDatabase, SourceDatabaseExt}; 18use ra_db::SourceDatabaseExt;
20use ra_ide_db::RootDatabase; 19use ra_ide_db::RootDatabase;
21use ra_prof::profile; 20use ra_prof::profile;
22use ra_syntax::{ 21use ra_syntax::{
23 algo::find_node_at_offset, 22 algo::find_node_at_offset,
24 ast::{self, NameOwner}, 23 ast::{self, NameOwner},
25 match_ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, TextUnit, TokenAtOffset, 24 match_ast, AstNode, SyntaxKind, SyntaxNode, TextRange, TextUnit, TokenAtOffset,
26}; 25};
26use test_utils::tested_by;
27 27
28use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo}; 28use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo};
29 29
30pub(crate) use self::{ 30pub(crate) use self::{classify::classify_name_ref, rename::rename};
31 classify::{classify_name, classify_name_ref}, 31pub(crate) use ra_ide_db::defs::{classify_name, NameDefinition};
32 rename::rename,
33};
34pub(crate) use ra_ide_db::defs::NameDefinition;
35 32
36pub use self::search_scope::SearchScope; 33pub use self::search_scope::SearchScope;
37 34
@@ -114,8 +111,8 @@ pub(crate) fn find_all_refs(
114 position: FilePosition, 111 position: FilePosition,
115 search_scope: Option<SearchScope>, 112 search_scope: Option<SearchScope>,
116) -> Option<RangeInfo<ReferenceSearchResult>> { 113) -> Option<RangeInfo<ReferenceSearchResult>> {
117 let parse = db.parse(position.file_id); 114 let sema = Semantics::new(db);
118 let syntax = parse.tree().syntax().clone(); 115 let syntax = sema.parse(position.file_id).syntax().clone();
119 116
120 let (opt_name, search_kind) = 117 let (opt_name, search_kind) =
121 if let Some(name) = get_struct_def_name_for_struc_litetal_search(&syntax, position) { 118 if let Some(name) = get_struct_def_name_for_struc_litetal_search(&syntax, position) {
@@ -124,7 +121,7 @@ pub(crate) fn find_all_refs(
124 (find_node_at_offset::<ast::Name>(&syntax, position.offset), ReferenceKind::Other) 121 (find_node_at_offset::<ast::Name>(&syntax, position.offset), ReferenceKind::Other)
125 }; 122 };
126 123
127 let RangeInfo { range, info: (name, def) } = find_name(db, &syntax, position, opt_name)?; 124 let RangeInfo { range, info: (name, def) } = find_name(&sema, &syntax, position, opt_name)?;
128 let declaration = def.try_to_nav(db)?; 125 let declaration = def.try_to_nav(db)?;
129 126
130 let search_scope = { 127 let search_scope = {
@@ -152,19 +149,18 @@ pub(crate) fn find_all_refs(
152} 149}
153 150
154fn find_name( 151fn find_name(
155 db: &RootDatabase, 152 sema: &Semantics<RootDatabase>,
156 syntax: &SyntaxNode, 153 syntax: &SyntaxNode,
157 position: FilePosition, 154 position: FilePosition,
158 opt_name: Option<ast::Name>, 155 opt_name: Option<ast::Name>,
159) -> Option<RangeInfo<(String, NameDefinition)>> { 156) -> Option<RangeInfo<(String, NameDefinition)>> {
160 let mut sb = SourceBinder::new(db);
161 if let Some(name) = opt_name { 157 if let Some(name) = opt_name {
162 let def = classify_name(&mut sb, InFile::new(position.file_id.into(), &name))?; 158 let def = classify_name(sema, &name)?.definition();
163 let range = name.syntax().text_range(); 159 let range = name.syntax().text_range();
164 return Some(RangeInfo::new(range, (name.text().to_string(), def))); 160 return Some(RangeInfo::new(range, (name.text().to_string(), def)));
165 } 161 }
166 let name_ref = find_node_at_offset::<ast::NameRef>(&syntax, position.offset)?; 162 let name_ref = find_node_at_offset::<ast::NameRef>(&syntax, position.offset)?;
167 let def = classify_name_ref(&mut sb, InFile::new(position.file_id.into(), &name_ref))?; 163 let def = classify_name_ref(sema, &name_ref)?;
168 let range = name_ref.syntax().text_range(); 164 let range = name_ref.syntax().text_range();
169 Some(RangeInfo::new(range, (name_ref.text().to_string(), def))) 165 Some(RangeInfo::new(range, (name_ref.text().to_string(), def)))
170} 166}
@@ -182,64 +178,53 @@ fn process_definition(
182 178
183 for (file_id, search_range) in scope { 179 for (file_id, search_range) in scope {
184 let text = db.file_text(file_id); 180 let text = db.file_text(file_id);
181 let search_range =
182 search_range.unwrap_or(TextRange::offset_len(0.into(), TextUnit::of_str(&text)));
185 183
186 let parse = Lazy::new(|| SourceFile::parse(&text)); 184 let sema = Semantics::new(db);
187 let mut sb = Lazy::new(|| SourceBinder::new(db)); 185 let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
188 let mut analyzer = None;
189 186
190 for (idx, _) in text.match_indices(pat) { 187 for (idx, _) in text.match_indices(pat) {
191 let offset = TextUnit::from_usize(idx); 188 let offset = TextUnit::from_usize(idx);
189 if !search_range.contains_inclusive(offset) {
190 tested_by!(search_filters_by_range);
191 continue;
192 }
192 193
193 let (name_ref, range) = if let Some(name_ref) = 194 let name_ref =
194 find_node_at_offset::<ast::NameRef>(parse.tree().syntax(), offset) 195 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&tree, offset) {
195 { 196 name_ref
196 let range = name_ref.syntax().text_range();
197 (InFile::new(file_id.into(), name_ref), range)
198 } else {
199 // Handle macro token cases
200 let t = match parse.tree().syntax().token_at_offset(offset) {
201 TokenAtOffset::None => continue,
202 TokenAtOffset::Single(t) => t,
203 TokenAtOffset::Between(_, t) => t,
204 };
205 let range = t.text_range();
206 let analyzer = analyzer.get_or_insert_with(|| {
207 sb.analyze(InFile::new(file_id.into(), parse.tree().syntax()), None)
208 });
209 let expanded = descend_into_macros_with_analyzer(
210 db,
211 &analyzer,
212 InFile::new(file_id.into(), t),
213 );
214 if let Some(token) = ast::NameRef::cast(expanded.value.parent()) {
215 (expanded.with_value(token), range)
216 } else { 197 } else {
217 continue; 198 // Handle macro token cases
218 } 199 let token = match tree.token_at_offset(offset) {
219 }; 200 TokenAtOffset::None => continue,
201 TokenAtOffset::Single(t) => t,
202 TokenAtOffset::Between(_, t) => t,
203 };
204 let expanded = sema.descend_into_macros(token);
205 match ast::NameRef::cast(expanded.parent()) {
206 Some(name_ref) => name_ref,
207 _ => continue,
208 }
209 };
220 210
221 if let Some(search_range) = search_range {
222 if !range.is_subrange(&search_range) {
223 continue;
224 }
225 }
226 // FIXME: reuse sb 211 // FIXME: reuse sb
227 // See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098 212 // See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098
228 213
229 if let Some(d) = classify_name_ref(&mut sb, name_ref.as_ref()) { 214 if let Some(d) = classify_name_ref(&sema, &name_ref) {
230 if d == def { 215 if d == def {
231 let kind = if is_record_lit_name_ref(&name_ref.value) 216 let kind =
232 || is_call_expr_name_ref(&name_ref.value) 217 if is_record_lit_name_ref(&name_ref) || is_call_expr_name_ref(&name_ref) {
233 { 218 ReferenceKind::StructLiteral
234 ReferenceKind::StructLiteral 219 } else {
235 } else { 220 ReferenceKind::Other
236 ReferenceKind::Other 221 };
237 }; 222
238 223 let file_range = sema.original_range(name_ref.syntax());
239 refs.push(Reference { 224 refs.push(Reference {
240 file_range: FileRange { file_id, range }, 225 file_range,
241 kind, 226 kind,
242 access: reference_access(&d, &name_ref.value), 227 access: reference_access(&d, &name_ref),
243 }); 228 });
244 } 229 }
245 } 230 }
@@ -348,6 +333,8 @@ fn is_call_expr_name_ref(name_ref: &ast::NameRef) -> bool {
348 333
349#[cfg(test)] 334#[cfg(test)]
350mod tests { 335mod tests {
336 use test_utils::covers;
337
351 use crate::{ 338 use crate::{
352 mock_analysis::{analysis_and_position, single_file_with_position, MockAnalysis}, 339 mock_analysis::{analysis_and_position, single_file_with_position, MockAnalysis},
353 Declaration, Reference, ReferenceSearchResult, SearchScope, 340 Declaration, Reference, ReferenceSearchResult, SearchScope,
@@ -456,6 +443,27 @@ mod tests {
456 } 443 }
457 444
458 #[test] 445 #[test]
446 fn search_filters_by_range() {
447 covers!(search_filters_by_range);
448 let code = r#"
449 fn foo() {
450 let spam<|> = 92;
451 spam + spam
452 }
453 fn bar() {
454 let spam = 92;
455 spam + spam
456 }
457 "#;
458 let refs = get_all_refs(code);
459 check_result(
460 refs,
461 "spam BIND_PAT FileId(1) [44; 48) Other Write",
462 &["FileId(1) [71; 75) Other Read", "FileId(1) [78; 82) Other Read"],
463 );
464 }
465
466 #[test]
459 fn test_find_all_refs_for_param_inside() { 467 fn test_find_all_refs_for_param_inside() {
460 let code = r#" 468 let code = r#"
461 fn foo(i : u32) -> u32 { 469 fn foo(i : u32) -> u32 {
diff --git a/crates/ra_ide/src/references/classify.rs b/crates/ra_ide/src/references/classify.rs
index 478e18871..91b21429a 100644
--- a/crates/ra_ide/src/references/classify.rs
+++ b/crates/ra_ide/src/references/classify.rs
@@ -1,34 +1,32 @@
1//! Functions that are used to classify an element from its definition or reference. 1//! Functions that are used to classify an element from its definition or reference.
2 2
3use hir::{InFile, PathResolution, SourceBinder}; 3use hir::{PathResolution, Semantics};
4use ra_ide_db::defs::NameDefinition;
5use ra_ide_db::RootDatabase;
4use ra_prof::profile; 6use ra_prof::profile;
5use ra_syntax::{ast, AstNode}; 7use ra_syntax::{ast, AstNode};
6use test_utils::tested_by; 8use test_utils::tested_by;
7 9
8use super::NameDefinition; 10pub use ra_ide_db::defs::{from_module_def, from_struct_field};
9use ra_ide_db::RootDatabase;
10
11pub use ra_ide_db::defs::{classify_name, from_module_def, from_struct_field};
12 11
13pub(crate) fn classify_name_ref( 12pub(crate) fn classify_name_ref(
14 sb: &mut SourceBinder<RootDatabase>, 13 sema: &Semantics<RootDatabase>,
15 name_ref: InFile<&ast::NameRef>, 14 name_ref: &ast::NameRef,
16) -> Option<NameDefinition> { 15) -> Option<NameDefinition> {
17 let _p = profile("classify_name_ref"); 16 let _p = profile("classify_name_ref");
18 17
19 let parent = name_ref.value.syntax().parent()?; 18 let parent = name_ref.syntax().parent()?;
20 let analyzer = sb.analyze(name_ref.map(|it| it.syntax()), None);
21 19
22 if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { 20 if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
23 tested_by!(goto_def_for_methods); 21 tested_by!(goto_def_for_methods);
24 if let Some(func) = analyzer.resolve_method_call(&method_call) { 22 if let Some(func) = sema.resolve_method_call(&method_call) {
25 return Some(from_module_def(func.into())); 23 return Some(from_module_def(func.into()));
26 } 24 }
27 } 25 }
28 26
29 if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { 27 if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) {
30 tested_by!(goto_def_for_fields); 28 tested_by!(goto_def_for_fields);
31 if let Some(field) = analyzer.resolve_field(&field_expr) { 29 if let Some(field) = sema.resolve_field(&field_expr) {
32 return Some(from_struct_field(field)); 30 return Some(from_struct_field(field));
33 } 31 }
34 } 32 }
@@ -36,22 +34,20 @@ pub(crate) fn classify_name_ref(
36 if let Some(record_field) = ast::RecordField::cast(parent.clone()) { 34 if let Some(record_field) = ast::RecordField::cast(parent.clone()) {
37 tested_by!(goto_def_for_record_fields); 35 tested_by!(goto_def_for_record_fields);
38 tested_by!(goto_def_for_field_init_shorthand); 36 tested_by!(goto_def_for_field_init_shorthand);
39 if let Some(field_def) = analyzer.resolve_record_field(&record_field) { 37 if let Some(field_def) = sema.resolve_record_field(&record_field) {
40 return Some(from_struct_field(field_def)); 38 return Some(from_struct_field(field_def));
41 } 39 }
42 } 40 }
43 41
44 if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { 42 if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) {
45 tested_by!(goto_def_for_macros); 43 tested_by!(goto_def_for_macros);
46 if let Some(macro_def) = 44 if let Some(macro_def) = sema.resolve_macro_call(&macro_call) {
47 analyzer.resolve_macro_call(sb.db, name_ref.with_value(&macro_call))
48 {
49 return Some(NameDefinition::Macro(macro_def)); 45 return Some(NameDefinition::Macro(macro_def));
50 } 46 }
51 } 47 }
52 48
53 let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?; 49 let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?;
54 let resolved = analyzer.resolve_path(sb.db, &path)?; 50 let resolved = sema.resolve_path(&path)?;
55 let res = match resolved { 51 let res = match resolved {
56 PathResolution::Def(def) => from_module_def(def), 52 PathResolution::Def(def) => from_module_def(def),
57 PathResolution::AssocItem(item) => { 53 PathResolution::AssocItem(item) => {
diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs
index bdb90020b..5b4bcf434 100644
--- a/crates/ra_ide/src/references/rename.rs
+++ b/crates/ra_ide/src/references/rename.rs
@@ -1,7 +1,7 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::ModuleSource; 3use hir::{ModuleSource, Semantics};
4use ra_db::{RelativePath, RelativePathBuf, SourceDatabase, SourceDatabaseExt}; 4use ra_db::{RelativePath, RelativePathBuf, SourceDatabaseExt};
5use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
6use ra_syntax::{ 6use ra_syntax::{
7 algo::find_node_at_offset, ast, lex_single_valid_syntax_kind, AstNode, SyntaxKind, SyntaxNode, 7 algo::find_node_at_offset, ast, lex_single_valid_syntax_kind, AstNode, SyntaxKind, SyntaxNode,
@@ -24,15 +24,16 @@ pub(crate) fn rename(
24 _ => return None, 24 _ => return None,
25 } 25 }
26 26
27 let parse = db.parse(position.file_id); 27 let sema = Semantics::new(db);
28 let source_file = sema.parse(position.file_id);
28 if let Some((ast_name, ast_module)) = 29 if let Some((ast_name, ast_module)) =
29 find_name_and_module_at_offset(parse.tree().syntax(), position) 30 find_name_and_module_at_offset(source_file.syntax(), position)
30 { 31 {
31 let range = ast_name.syntax().text_range(); 32 let range = ast_name.syntax().text_range();
32 rename_mod(db, &ast_name, &ast_module, position, new_name) 33 rename_mod(&sema, &ast_name, &ast_module, position, new_name)
33 .map(|info| RangeInfo::new(range, info)) 34 .map(|info| RangeInfo::new(range, info))
34 } else { 35 } else {
35 rename_reference(db, position, new_name) 36 rename_reference(sema.db, position, new_name)
36 } 37 }
37} 38}
38 39
@@ -54,7 +55,7 @@ fn source_edit_from_file_id_range(
54} 55}
55 56
56fn rename_mod( 57fn rename_mod(
57 db: &RootDatabase, 58 sema: &Semantics<RootDatabase>,
58 ast_name: &ast::Name, 59 ast_name: &ast::Name,
59 ast_module: &ast::Module, 60 ast_module: &ast::Module,
60 position: FilePosition, 61 position: FilePosition,
@@ -62,13 +63,12 @@ fn rename_mod(
62) -> Option<SourceChange> { 63) -> Option<SourceChange> {
63 let mut source_file_edits = Vec::new(); 64 let mut source_file_edits = Vec::new();
64 let mut file_system_edits = Vec::new(); 65 let mut file_system_edits = Vec::new();
65 let module_src = hir::InFile { file_id: position.file_id.into(), value: ast_module.clone() }; 66 if let Some(module) = sema.to_def(ast_module) {
66 if let Some(module) = hir::SourceBinder::new(db).to_def(module_src) { 67 let src = module.definition_source(sema.db);
67 let src = module.definition_source(db); 68 let file_id = src.file_id.original_file(sema.db);
68 let file_id = src.file_id.original_file(db);
69 match src.value { 69 match src.value {
70 ModuleSource::SourceFile(..) => { 70 ModuleSource::SourceFile(..) => {
71 let mod_path: RelativePathBuf = db.file_relative_path(file_id); 71 let mod_path: RelativePathBuf = sema.db.file_relative_path(file_id);
72 // mod is defined in path/to/dir/mod.rs 72 // mod is defined in path/to/dir/mod.rs
73 let dst_path = if mod_path.file_stem() == Some("mod") { 73 let dst_path = if mod_path.file_stem() == Some("mod") {
74 mod_path 74 mod_path
@@ -82,7 +82,7 @@ fn rename_mod(
82 if let Some(path) = dst_path { 82 if let Some(path) = dst_path {
83 let move_file = FileSystemEdit::MoveFile { 83 let move_file = FileSystemEdit::MoveFile {
84 src: file_id, 84 src: file_id,
85 dst_source_root: db.file_source_root(position.file_id), 85 dst_source_root: sema.db.file_source_root(position.file_id),
86 dst_path: path, 86 dst_path: path,
87 }; 87 };
88 file_system_edits.push(move_file); 88 file_system_edits.push(move_file);
@@ -98,7 +98,7 @@ fn rename_mod(
98 }; 98 };
99 source_file_edits.push(edit); 99 source_file_edits.push(edit);
100 100
101 if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(db, position, None) { 101 if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(sema.db, position, None) {
102 let ref_edits = refs.references.into_iter().map(|reference| { 102 let ref_edits = refs.references.into_iter().map(|reference| {
103 source_edit_from_file_id_range( 103 source_edit_from_file_id_range(
104 reference.file_range.file_id, 104 reference.file_range.file_id,
diff --git a/crates/ra_ide/src/runnables.rs b/crates/ra_ide/src/runnables.rs
index be2a67d0a..74877e90f 100644
--- a/crates/ra_ide/src/runnables.rs
+++ b/crates/ra_ide/src/runnables.rs
@@ -1,8 +1,7 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::{InFile, SourceBinder}; 3use hir::Semantics;
4use itertools::Itertools; 4use itertools::Itertools;
5use ra_db::SourceDatabase;
6use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
7use ra_syntax::{ 6use ra_syntax::{
8 ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner}, 7 ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner},
@@ -42,46 +41,33 @@ pub enum RunnableKind {
42} 41}
43 42
44pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { 43pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
45 let parse = db.parse(file_id); 44 let sema = Semantics::new(db);
46 let mut sb = SourceBinder::new(db); 45 let source_file = sema.parse(file_id);
47 parse.tree().syntax().descendants().filter_map(|i| runnable(db, &mut sb, file_id, i)).collect() 46 source_file.syntax().descendants().filter_map(|i| runnable(&sema, i)).collect()
48} 47}
49 48
50fn runnable( 49fn runnable(sema: &Semantics<RootDatabase>, item: SyntaxNode) -> Option<Runnable> {
51 db: &RootDatabase,
52 source_binder: &mut SourceBinder<RootDatabase>,
53 file_id: FileId,
54 item: SyntaxNode,
55) -> Option<Runnable> {
56 match_ast! { 50 match_ast! {
57 match item { 51 match item {
58 ast::FnDef(it) => { runnable_fn(db, source_binder, file_id, it) }, 52 ast::FnDef(it) => { runnable_fn(sema, it) },
59 ast::Module(it) => { runnable_mod(db, source_binder, file_id, it) }, 53 ast::Module(it) => { runnable_mod(sema, it) },
60 _ => { None }, 54 _ => None,
61 } 55 }
62 } 56 }
63} 57}
64 58
65fn runnable_fn( 59fn runnable_fn(sema: &Semantics<RootDatabase>, fn_def: ast::FnDef) -> Option<Runnable> {
66 db: &RootDatabase,
67 source_binder: &mut SourceBinder<RootDatabase>,
68 file_id: FileId,
69 fn_def: ast::FnDef,
70) -> Option<Runnable> {
71 let name_string = fn_def.name()?.text().to_string(); 60 let name_string = fn_def.name()?.text().to_string();
72 61
73 let kind = if name_string == "main" { 62 let kind = if name_string == "main" {
74 RunnableKind::Bin 63 RunnableKind::Bin
75 } else { 64 } else {
76 let test_id = if let Some(module) = source_binder 65 let test_id = if let Some(module) = sema.to_def(&fn_def).map(|def| def.module(sema.db)) {
77 .to_def(InFile::new(file_id.into(), fn_def.clone()))
78 .map(|def| def.module(db))
79 {
80 let path = module 66 let path = module
81 .path_to_root(db) 67 .path_to_root(sema.db)
82 .into_iter() 68 .into_iter()
83 .rev() 69 .rev()
84 .filter_map(|it| it.name(db)) 70 .filter_map(|it| it.name(sema.db))
85 .map(|name| name.to_string()) 71 .map(|name| name.to_string())
86 .chain(std::iter::once(name_string)) 72 .chain(std::iter::once(name_string))
87 .join("::"); 73 .join("::");
@@ -115,12 +101,7 @@ fn has_test_related_attribute(fn_def: &ast::FnDef) -> bool {
115 .any(|attribute_text| attribute_text.contains("test")) 101 .any(|attribute_text| attribute_text.contains("test"))
116} 102}
117 103
118fn runnable_mod( 104fn runnable_mod(sema: &Semantics<RootDatabase>, module: ast::Module) -> Option<Runnable> {
119 db: &RootDatabase,
120 source_binder: &mut SourceBinder<RootDatabase>,
121 file_id: FileId,
122 module: ast::Module,
123) -> Option<Runnable> {
124 let has_test_function = module 105 let has_test_function = module
125 .item_list()? 106 .item_list()?
126 .items() 107 .items()
@@ -133,9 +114,10 @@ fn runnable_mod(
133 return None; 114 return None;
134 } 115 }
135 let range = module.syntax().text_range(); 116 let range = module.syntax().text_range();
136 let module = source_binder.to_def(InFile::new(file_id.into(), module))?; 117 let module = sema.to_def(&module)?;
137 118
138 let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::"); 119 let path =
120 module.path_to_root(sema.db).into_iter().rev().filter_map(|it| it.name(sema.db)).join("::");
139 Some(Runnable { range, kind: RunnableKind::TestMod { path } }) 121 Some(Runnable { range, kind: RunnableKind::TestMod { path } })
140} 122}
141 123
diff --git a/crates/ra_ide/src/snapshots/highlighting.html b/crates/ra_ide/src/snapshots/highlighting.html
index a02dbaf2f..495b07f69 100644
--- a/crates/ra_ide/src/snapshots/highlighting.html
+++ b/crates/ra_ide/src/snapshots/highlighting.html
@@ -3,70 +3,78 @@
3body { margin: 0; } 3body { margin: 0; }
4pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; } 4pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
5 5
6.lifetime { color: #DFAF8F; font-style: italic; }
6.comment { color: #7F9F7F; } 7.comment { color: #7F9F7F; }
7.string { color: #CC9393; } 8.struct, .enum { color: #7CB8BB; }
9.enum_variant { color: #BDE0F3; }
10.string_literal { color: #CC9393; }
8.field { color: #94BFF3; } 11.field { color: #94BFF3; }
9.function { color: #93E0E3; } 12.function { color: #93E0E3; }
10.parameter { color: #94BFF3; } 13.parameter { color: #94BFF3; }
11.text { color: #DCDCCC; } 14.text { color: #DCDCCC; }
12.type { color: #7CB8BB; } 15.type { color: #7CB8BB; }
13.type\.builtin { color: #8CD0D3; } 16.builtin_type { color: #8CD0D3; }
14.type\.param { color: #20999D; } 17.type_param { color: #DFAF8F; }
15.attribute { color: #94BFF3; } 18.attribute { color: #94BFF3; }
16.literal { color: #BFEBBF; } 19.numeric_literal { color: #BFEBBF; }
17.literal\.numeric { color: #6A8759; }
18.macro { color: #94BFF3; } 20.macro { color: #94BFF3; }
19.module { color: #AFD8AF; } 21.module { color: #AFD8AF; }
20.variable { color: #DCDCCC; } 22.variable { color: #DCDCCC; }
21.variable\.mut { color: #DCDCCC; text-decoration: underline; } 23.mutable { text-decoration: underline; }
22 24
23.keyword { color: #F0DFAF; } 25.keyword { color: #F0DFAF; font-weight: bold; }
24.keyword\.unsafe { color: #DFAF8F; } 26.keyword.unsafe { color: #BC8383; font-weight: bold; }
25.keyword\.control { color: #F0DFAF; font-weight: bold; } 27.control { font-style: italic; }
26</style> 28</style>
27<pre><code><span class="attribute">#</span><span class="attribute">[</span><span class="attribute">derive</span><span class="attribute">(</span><span class="attribute">Clone</span><span class="attribute">,</span><span class="attribute"> </span><span class="attribute">Debug</span><span class="attribute">)</span><span class="attribute">]</span> 29<pre><code><span class="attribute">#</span><span class="attribute">[</span><span class="attribute">derive</span><span class="attribute">(</span><span class="attribute">Clone</span><span class="attribute">,</span><span class="attribute"> </span><span class="attribute">Debug</span><span class="attribute">)</span><span class="attribute">]</span>
28<span class="keyword">struct</span> <span class="type">Foo</span> { 30<span class="keyword">struct</span> <span class="struct declaration">Foo</span> {
29 <span class="keyword">pub</span> <span class="field">x</span>: <span class="type.builtin">i32</span>, 31 <span class="keyword">pub</span> <span class="field declaration">x</span>: <span class="builtin_type">i32</span>,
30 <span class="keyword">pub</span> <span class="field">y</span>: <span class="type.builtin">i32</span>, 32 <span class="keyword">pub</span> <span class="field declaration">y</span>: <span class="builtin_type">i32</span>,
31} 33}
32 34
33<span class="keyword">fn</span> <span class="function">foo</span>&lt;<span class="type.param">T</span>&gt;() -&gt; <span class="type.param">T</span> { 35<span class="keyword">fn</span> <span class="function declaration">foo</span>&lt;<span class="lifetime declaration">'a</span>, <span class="type_param declaration">T</span>&gt;() -&gt; <span class="type_param">T</span> {
34 <span class="macro">unimplemented</span><span class="macro">!</span>(); 36 <span class="function">foo</span>::&lt;<span class="lifetime">'a</span>, <span class="builtin_type">i32</span>&gt;()
35 <span class="function">foo</span>::&lt;<span class="type.builtin">i32</span>&gt;();
36} 37}
37 38
38<span class="macro">macro_rules</span><span class="macro">!</span> def_fn { 39<span class="macro">macro_rules</span><span class="macro">!</span> def_fn {
39 ($($tt:tt)*) =&gt; {$($tt)*} 40 ($($tt:tt)*) =&gt; {$($tt)*}
40} 41}
41 42
42<span class="macro">def_fn</span><span class="macro">!</span>{ 43<span class="macro">def_fn</span><span class="macro">!</span> {
43 <span class="keyword">fn</span> <span class="function">bar</span>() -&gt; <span class="type.builtin">u32</span> { 44 <span class="keyword">fn</span> <span class="function declaration">bar</span>() -&gt; <span class="builtin_type">u32</span> {
44 <span class="literal.numeric">100</span> 45 <span class="numeric_literal">100</span>
45 } 46 }
46} 47}
47 48
48<span class="comment">// comment</span> 49<span class="comment">// comment</span>
49<span class="keyword">fn</span> <span class="function">main</span>() { 50<span class="keyword">fn</span> <span class="function declaration">main</span>() {
50 <span class="macro">println</span><span class="macro">!</span>(<span class="string">"Hello, {}!"</span>, <span class="literal.numeric">92</span>); 51 <span class="macro">println</span><span class="macro">!</span>(<span class="string_literal">"Hello, {}!"</span>, <span class="numeric_literal">92</span>);
51 52
52 <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut">vec</span> = Vec::new(); 53 <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">vec</span> = Vec::new();
53 <span class="keyword.control">if</span> <span class="keyword">true</span> { 54 <span class="keyword control">if</span> <span class="keyword">true</span> {
54 <span class="keyword">let</span> <span class="variable">x</span> = <span class="literal.numeric">92</span>; 55 <span class="keyword">let</span> <span class="variable declaration">x</span> = <span class="numeric_literal">92</span>;
55 <span class="variable.mut">vec</span>.push(<span class="type">Foo</span> { <span class="field">x</span>, <span class="field">y</span>: <span class="literal.numeric">1</span> }); 56 <span class="variable mutable">vec</span>.push(<span class="struct">Foo</span> { <span class="field">x</span>, <span class="field">y</span>: <span class="numeric_literal">1</span> });
56 } 57 }
57 <span class="keyword.unsafe">unsafe</span> { <span class="variable.mut">vec</span>.set_len(<span class="literal.numeric">0</span>); } 58 <span class="keyword unsafe">unsafe</span> { <span class="variable mutable">vec</span>.set_len(<span class="numeric_literal">0</span>); }
58 59
59 <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut">x</span> = <span class="literal.numeric">42</span>; 60 <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">x</span> = <span class="numeric_literal">42</span>;
60 <span class="keyword">let</span> <span class="variable.mut">y</span> = &<span class="keyword">mut</span> <span class="variable.mut">x</span>; 61 <span class="keyword">let</span> <span class="variable declaration mutable">y</span> = &<span class="keyword">mut</span> <span class="variable mutable">x</span>;
61 <span class="keyword">let</span> <span class="variable">z</span> = &<span class="variable.mut">y</span>; 62 <span class="keyword">let</span> <span class="variable declaration">z</span> = &<span class="variable mutable">y</span>;
62 63
63 <span class="variable.mut">y</span>; 64 <span class="variable mutable">y</span>;
64} 65}
65 66
66<span class="keyword">enum</span> <span class="type">E</span>&lt;<span class="type.param">X</span>&gt; { 67<span class="keyword">enum</span> <span class="enum declaration">Option</span>&lt;<span class="type_param declaration">T</span>&gt; {
67 <span class="constant">V</span>(<span class="type.param">X</span>) 68 <span class="enum_variant declaration">Some</span>(<span class="type_param">T</span>),
69 <span class="enum_variant declaration">None</span>,
68} 70}
71<span class="keyword">use</span> <span class="enum">Option</span>::*;
69 72
70<span class="keyword">impl</span>&lt;<span class="type.param">X</span>&gt; <span class="type">E</span>&lt;<span class="type.param">X</span>&gt; { 73<span class="keyword">impl</span>&lt;<span class="type_param declaration">T</span>&gt; <span class="enum">Option</span>&lt;<span class="type_param">T</span>&gt; {
71 <span class="keyword">fn</span> <span class="function">new</span>&lt;<span class="type.param">T</span>&gt;() -&gt; <span class="type">E</span>&lt;<span class="type.param">T</span>&gt; {} 74 <span class="keyword">fn</span> <span class="function declaration">and</span>&lt;<span class="type_param declaration">U</span>&gt;(<span class="keyword">self</span>, <span class="variable declaration">other</span>: <span class="enum">Option</span>&lt;<span class="type_param">U</span>&gt;) -&gt; <span class="enum">Option</span>&lt;(<span class="type_param">T</span>, <span class="type_param">U</span>)&gt; {
75 <span class="keyword control">match</span> <span class="variable">other</span> {
76 <span class="enum_variant">None</span> =&gt; <span class="macro">unimplemented</span><span class="macro">!</span>(),
77 <span class="variable declaration">Nope</span> =&gt; <span class="variable">Nope</span>,
78 }
79 }
72}</code></pre> \ No newline at end of file 80}</code></pre> \ No newline at end of file
diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/src/snapshots/rainbow_highlighting.html
index 95f038f00..dddbfc0dd 100644
--- a/crates/ra_ide/src/snapshots/rainbow_highlighting.html
+++ b/crates/ra_ide/src/snapshots/rainbow_highlighting.html
@@ -3,36 +3,38 @@
3body { margin: 0; } 3body { margin: 0; }
4pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; } 4pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
5 5
6.lifetime { color: #DFAF8F; font-style: italic; }
6.comment { color: #7F9F7F; } 7.comment { color: #7F9F7F; }
7.string { color: #CC9393; } 8.struct, .enum { color: #7CB8BB; }
9.enum_variant { color: #BDE0F3; }
10.string_literal { color: #CC9393; }
8.field { color: #94BFF3; } 11.field { color: #94BFF3; }
9.function { color: #93E0E3; } 12.function { color: #93E0E3; }
10.parameter { color: #94BFF3; } 13.parameter { color: #94BFF3; }
11.text { color: #DCDCCC; } 14.text { color: #DCDCCC; }
12.type { color: #7CB8BB; } 15.type { color: #7CB8BB; }
13.type\.builtin { color: #8CD0D3; } 16.builtin_type { color: #8CD0D3; }
14.type\.param { color: #20999D; } 17.type_param { color: #DFAF8F; }
15.attribute { color: #94BFF3; } 18.attribute { color: #94BFF3; }
16.literal { color: #BFEBBF; } 19.numeric_literal { color: #BFEBBF; }
17.literal\.numeric { color: #6A8759; }
18.macro { color: #94BFF3; } 20.macro { color: #94BFF3; }
19.module { color: #AFD8AF; } 21.module { color: #AFD8AF; }
20.variable { color: #DCDCCC; } 22.variable { color: #DCDCCC; }
21.variable\.mut { color: #DCDCCC; text-decoration: underline; } 23.mutable { text-decoration: underline; }
22 24
23.keyword { color: #F0DFAF; } 25.keyword { color: #F0DFAF; font-weight: bold; }
24.keyword\.unsafe { color: #DFAF8F; } 26.keyword.unsafe { color: #BC8383; font-weight: bold; }
25.keyword\.control { color: #F0DFAF; font-weight: bold; } 27.control { font-style: italic; }
26</style> 28</style>
27<pre><code><span class="keyword">fn</span> <span class="function">main</span>() { 29<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span>() {
28 <span class="keyword">let</span> <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span> = <span class="string">"hello"</span>; 30 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string_literal">"hello"</span>;
29 <span class="keyword">let</span> <span class="variable" data-binding-hash="4303609361109701698" style="color: hsl(242,75%,88%);">x</span> = <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span>.to_string(); 31 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="2705725358298919760" style="color: hsl(17,51%,74%);">x</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string();
30 <span class="keyword">let</span> <span class="variable" data-binding-hash="13865792086344377029" style="color: hsl(340,64%,86%);">y</span> = <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span>.to_string(); 32 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="3365759661443752373" style="color: hsl(127,76%,66%);">y</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string();
31 33
32 <span class="keyword">let</span> <span class="variable" data-binding-hash="7011301204224269512" style="color: hsl(198,45%,40%);">x</span> = <span class="string">"other color please!"</span>; 34 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span> = <span class="string_literal">"other color please!"</span>;
33 <span class="keyword">let</span> <span class="variable" data-binding-hash="12461245066629867975" style="color: hsl(132,91%,68%);">y</span> = <span class="variable" data-binding-hash="7011301204224269512" style="color: hsl(198,45%,40%);">x</span>.to_string(); 35 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="6717528807933952652" style="color: hsl(85,49%,84%);">y</span> = <span class="variable" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span>.to_string();
34} 36}
35 37
36<span class="keyword">fn</span> <span class="function">bar</span>() { 38<span class="keyword">fn</span> <span class="function declaration">bar</span>() {
37 <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span> = <span class="string">"hello"</span>; 39 <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string_literal">"hello"</span>;
38}</code></pre> \ No newline at end of file 40}</code></pre> \ No newline at end of file
diff --git a/crates/ra_ide/src/ssr.rs b/crates/ra_ide/src/ssr.rs
index 902c29fc6..c011a2e74 100644
--- a/crates/ra_ide/src/ssr.rs
+++ b/crates/ra_ide/src/ssr.rs
@@ -3,9 +3,8 @@
3use crate::source_change::SourceFileEdit; 3use crate::source_change::SourceFileEdit;
4use ra_ide_db::RootDatabase; 4use ra_ide_db::RootDatabase;
5use ra_syntax::ast::make::expr_from_text; 5use ra_syntax::ast::make::expr_from_text;
6use ra_syntax::AstNode; 6use ra_syntax::ast::{AstToken, Comment};
7use ra_syntax::SyntaxElement; 7use ra_syntax::{AstNode, SyntaxElement, SyntaxNode};
8use ra_syntax::SyntaxNode;
9use ra_text_edit::{TextEdit, TextEditBuilder}; 8use ra_text_edit::{TextEdit, TextEditBuilder};
10use rustc_hash::FxHashMap; 9use rustc_hash::FxHashMap;
11use std::collections::HashMap; 10use std::collections::HashMap;
@@ -72,6 +71,7 @@ type Binding = HashMap<Var, SyntaxNode>;
72struct Match { 71struct Match {
73 place: SyntaxNode, 72 place: SyntaxNode,
74 binding: Binding, 73 binding: Binding,
74 ignored_comments: Vec<Comment>,
75} 75}
76 76
77#[derive(Debug)] 77#[derive(Debug)]
@@ -179,44 +179,61 @@ fn find(pattern: &SsrPattern, code: &SyntaxNode) -> SsrMatches {
179 pattern: &SyntaxElement, 179 pattern: &SyntaxElement,
180 code: &SyntaxElement, 180 code: &SyntaxElement,
181 placeholders: &[Var], 181 placeholders: &[Var],
182 match_: &mut Match, 182 mut match_: Match,
183 ) -> bool { 183 ) -> Option<Match> {
184 match (pattern, code) { 184 match (pattern, code) {
185 (SyntaxElement::Token(ref pattern), SyntaxElement::Token(ref code)) => { 185 (SyntaxElement::Token(ref pattern), SyntaxElement::Token(ref code)) => {
186 pattern.text() == code.text() 186 if pattern.text() == code.text() {
187 Some(match_)
188 } else {
189 None
190 }
187 } 191 }
188 (SyntaxElement::Node(ref pattern), SyntaxElement::Node(ref code)) => { 192 (SyntaxElement::Node(ref pattern), SyntaxElement::Node(ref code)) => {
189 if placeholders.iter().any(|n| n.0.as_str() == pattern.text()) { 193 if placeholders.iter().any(|n| n.0.as_str() == pattern.text()) {
190 match_.binding.insert(Var(pattern.text().to_string()), code.clone()); 194 match_.binding.insert(Var(pattern.text().to_string()), code.clone());
191 true 195 Some(match_)
192 } else { 196 } else {
193 pattern.green().children().count() == code.green().children().count() 197 let mut pattern_children = pattern
194 && pattern 198 .children_with_tokens()
195 .children_with_tokens() 199 .filter(|element| !element.kind().is_trivia());
196 .zip(code.children_with_tokens()) 200 let mut code_children =
197 .all(|(a, b)| check(&a, &b, placeholders, match_)) 201 code.children_with_tokens().filter(|element| !element.kind().is_trivia());
202 let new_ignored_comments = code.children_with_tokens().filter_map(|element| {
203 element.as_token().and_then(|token| Comment::cast(token.clone()))
204 });
205 match_.ignored_comments.extend(new_ignored_comments);
206 let match_from_children = pattern_children
207 .by_ref()
208 .zip(code_children.by_ref())
209 .fold(Some(match_), |accum, (a, b)| {
210 accum.and_then(|match_| check(&a, &b, placeholders, match_))
211 });
212 match_from_children.and_then(|match_| {
213 if pattern_children.count() == 0 && code_children.count() == 0 {
214 Some(match_)
215 } else {
216 None
217 }
218 })
198 } 219 }
199 } 220 }
200 _ => false, 221 _ => None,
201 } 222 }
202 } 223 }
203 let kind = pattern.pattern.kind(); 224 let kind = pattern.pattern.kind();
204 let matches = code 225 let matches = code
205 .descendants_with_tokens() 226 .descendants()
206 .filter(|n| n.kind() == kind) 227 .filter(|n| n.kind() == kind)
207 .filter_map(|code| { 228 .filter_map(|code| {
208 let mut match_ = 229 let match_ =
209 Match { place: code.as_node().unwrap().clone(), binding: HashMap::new() }; 230 Match { place: code.clone(), binding: HashMap::new(), ignored_comments: vec![] };
210 if check( 231 check(
211 &SyntaxElement::from(pattern.pattern.clone()), 232 &SyntaxElement::from(pattern.pattern.clone()),
212 &code, 233 &SyntaxElement::from(code),
213 &pattern.vars, 234 &pattern.vars,
214 &mut match_, 235 match_,
215 ) { 236 )
216 Some(match_)
217 } else {
218 None
219 }
220 }) 237 })
221 .collect(); 238 .collect();
222 SsrMatches { matches } 239 SsrMatches { matches }
@@ -225,18 +242,28 @@ fn find(pattern: &SsrPattern, code: &SyntaxNode) -> SsrMatches {
225fn replace(matches: &SsrMatches, template: &SsrTemplate) -> TextEdit { 242fn replace(matches: &SsrMatches, template: &SsrTemplate) -> TextEdit {
226 let mut builder = TextEditBuilder::default(); 243 let mut builder = TextEditBuilder::default();
227 for match_ in &matches.matches { 244 for match_ in &matches.matches {
228 builder.replace(match_.place.text_range(), render_replace(&match_.binding, template)); 245 builder.replace(
246 match_.place.text_range(),
247 render_replace(&match_.binding, &match_.ignored_comments, template),
248 );
229 } 249 }
230 builder.finish() 250 builder.finish()
231} 251}
232 252
233fn render_replace(binding: &Binding, template: &SsrTemplate) -> String { 253fn render_replace(
254 binding: &Binding,
255 ignored_comments: &Vec<Comment>,
256 template: &SsrTemplate,
257) -> String {
234 let mut builder = TextEditBuilder::default(); 258 let mut builder = TextEditBuilder::default();
235 for element in template.template.descendants() { 259 for element in template.template.descendants() {
236 if let Some(var) = template.placeholders.get(&element) { 260 if let Some(var) = template.placeholders.get(&element) {
237 builder.replace(element.text_range(), binding[var].to_string()) 261 builder.replace(element.text_range(), binding[var].to_string())
238 } 262 }
239 } 263 }
264 for comment in ignored_comments {
265 builder.insert(template.template.text_range().end(), comment.syntax().to_string())
266 }
240 builder.finish().apply(&template.template.text().to_string()) 267 builder.finish().apply(&template.template.text().to_string())
241} 268}
242 269
@@ -325,4 +352,66 @@ mod tests {
325 let edit = replace(&matches, &query.template); 352 let edit = replace(&matches, &query.template);
326 assert_eq!(edit.apply(input), "fn main() { bar(1+2); }"); 353 assert_eq!(edit.apply(input), "fn main() { bar(1+2); }");
327 } 354 }
355
356 fn assert_ssr_transform(query: &str, input: &str, result: &str) {
357 let query: SsrQuery = query.parse().unwrap();
358 let code = SourceFile::parse(input).tree();
359 let matches = find(&query.pattern, code.syntax());
360 let edit = replace(&matches, &query.template);
361 assert_eq!(edit.apply(input), result);
362 }
363
364 #[test]
365 fn ssr_function_to_method() {
366 assert_ssr_transform(
367 "my_function($a:expr, $b:expr) ==>> ($a).my_method($b)",
368 "loop { my_function( other_func(x, y), z + w) }",
369 "loop { (other_func(x, y)).my_method(z + w) }",
370 )
371 }
372
373 #[test]
374 fn ssr_nested_function() {
375 assert_ssr_transform(
376 "foo($a:expr, $b:expr, $c:expr) ==>> bar($c, baz($a, $b))",
377 "fn main { foo (x + value.method(b), x+y-z, true && false) }",
378 "fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }",
379 )
380 }
381
382 #[test]
383 fn ssr_expected_spacing() {
384 assert_ssr_transform(
385 "foo($x:expr) + bar() ==>> bar($x)",
386 "fn main() { foo(5) + bar() }",
387 "fn main() { bar(5) }",
388 );
389 }
390
391 #[test]
392 fn ssr_with_extra_space() {
393 assert_ssr_transform(
394 "foo($x:expr ) + bar() ==>> bar($x)",
395 "fn main() { foo( 5 ) +bar( ) }",
396 "fn main() { bar(5) }",
397 );
398 }
399
400 #[test]
401 fn ssr_keeps_nested_comment() {
402 assert_ssr_transform(
403 "foo($x:expr) ==>> bar($x)",
404 "fn main() { foo(other(5 /* using 5 */)) }",
405 "fn main() { bar(other(5 /* using 5 */)) }",
406 )
407 }
408
409 #[test]
410 fn ssr_keeps_comment() {
411 assert_ssr_transform(
412 "foo($x:expr) ==>> bar($x)",
413 "fn main() { foo(5 /* using 5 */) }",
414 "fn main() { bar(5)/* using 5 */ }",
415 )
416 }
328} 417}
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs
index 812229b4e..b94b6a022 100644
--- a/crates/ra_ide/src/syntax_highlighting.rs
+++ b/crates/ra_ide/src/syntax_highlighting.rs
@@ -1,134 +1,141 @@
1//! FIXME: write short doc here 1//! Implements syntax highlighting.
2 2
3use hir::{HirFileId, InFile, Name, SourceAnalyzer, SourceBinder}; 3mod tags;
4use ra_db::SourceDatabase; 4mod html;
5use ra_ide_db::{defs::NameDefinition, RootDatabase}; 5#[cfg(test)]
6mod tests;
7
8use hir::{Name, Semantics};
9use ra_ide_db::{
10 defs::{classify_name, NameClass, NameDefinition},
11 RootDatabase,
12};
6use ra_prof::profile; 13use ra_prof::profile;
7use ra_syntax::{ 14use ra_syntax::{
8 ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken, TextRange, 15 ast::{self, HasQuotes, HasStringValue},
9 WalkEvent, T, 16 AstNode, AstToken, Direction, NodeOrToken, SyntaxElement,
17 SyntaxKind::*,
18 SyntaxToken, TextRange, WalkEvent, T,
10}; 19};
11use rustc_hash::FxHashMap; 20use rustc_hash::FxHashMap;
12 21
13use crate::{ 22use crate::{call_info::call_info_for_token, references::classify_name_ref, Analysis, FileId};
14 expand::descend_into_macros_with_analyzer,
15 references::{classify_name, classify_name_ref},
16 FileId,
17};
18 23
19pub mod tags { 24pub(crate) use html::highlight_as_html;
20 pub const FIELD: &str = "field"; 25pub use tags::{Highlight, HighlightModifier, HighlightModifiers, HighlightTag};
21 pub const FUNCTION: &str = "function";
22 pub const MODULE: &str = "module";
23 pub const CONSTANT: &str = "constant";
24 pub const MACRO: &str = "macro";
25
26 pub const VARIABLE: &str = "variable";
27 pub const VARIABLE_MUT: &str = "variable.mut";
28
29 pub const TYPE: &str = "type";
30 pub const TYPE_BUILTIN: &str = "type.builtin";
31 pub const TYPE_SELF: &str = "type.self";
32 pub const TYPE_PARAM: &str = "type.param";
33 pub const TYPE_LIFETIME: &str = "type.lifetime";
34
35 pub const LITERAL_BYTE: &str = "literal.byte";
36 pub const LITERAL_NUMERIC: &str = "literal.numeric";
37 pub const LITERAL_CHAR: &str = "literal.char";
38
39 pub const LITERAL_COMMENT: &str = "comment";
40 pub const LITERAL_STRING: &str = "string";
41 pub const LITERAL_ATTRIBUTE: &str = "attribute";
42
43 pub const KEYWORD: &str = "keyword";
44 pub const KEYWORD_UNSAFE: &str = "keyword.unsafe";
45 pub const KEYWORD_CONTROL: &str = "keyword.control";
46}
47 26
48#[derive(Debug)] 27#[derive(Debug)]
49pub struct HighlightedRange { 28pub struct HighlightedRange {
50 pub range: TextRange, 29 pub range: TextRange,
51 pub tag: &'static str, 30 pub highlight: Highlight,
52 pub binding_hash: Option<u64>, 31 pub binding_hash: Option<u64>,
53} 32}
54 33
55fn is_control_keyword(kind: SyntaxKind) -> bool { 34pub(crate) fn highlight(
56 match kind { 35 db: &RootDatabase,
57 T![for] 36 file_id: FileId,
58 | T![loop] 37 range_to_highlight: Option<TextRange>,
59 | T![while] 38) -> Vec<HighlightedRange> {
60 | T![continue]
61 | T![break]
62 | T![if]
63 | T![else]
64 | T![match]
65 | T![return] => true,
66 _ => false,
67 }
68}
69
70pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> {
71 let _p = profile("highlight"); 39 let _p = profile("highlight");
72 let parse = db.parse(file_id); 40 let sema = Semantics::new(db);
73 let root = parse.tree().syntax().clone(); 41
42 // Determine the root based on the given range.
43 let (root, range_to_highlight) = {
44 let source_file = sema.parse(file_id);
45 match range_to_highlight {
46 Some(range) => {
47 let node = match source_file.syntax().covering_element(range) {
48 NodeOrToken::Node(it) => it,
49 NodeOrToken::Token(it) => it.parent(),
50 };
51 (node, range)
52 }
53 None => (source_file.syntax().clone(), source_file.syntax().text_range()),
54 }
55 };
74 56
75 let mut sb = SourceBinder::new(db);
76 let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default(); 57 let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
77 let mut res = Vec::new(); 58 let mut res = Vec::new();
78 let analyzer = sb.analyze(InFile::new(file_id.into(), &root), None);
79 59
80 let mut in_macro_call = None; 60 let mut current_macro_call: Option<ast::MacroCall> = None;
81 61
62 // Walk all nodes, keeping track of whether we are inside a macro or not.
63 // If in macro, expand it first and highlight the expanded code.
82 for event in root.preorder_with_tokens() { 64 for event in root.preorder_with_tokens() {
83 match event { 65 let event_range = match &event {
84 WalkEvent::Enter(node) => match node.kind() { 66 WalkEvent::Enter(it) => it.text_range(),
85 MACRO_CALL => { 67 WalkEvent::Leave(it) => it.text_range(),
86 in_macro_call = Some(node.clone()); 68 };
87 if let Some(range) = highlight_macro(InFile::new(file_id.into(), node)) { 69
88 res.push(HighlightedRange { range, tag: tags::MACRO, binding_hash: None }); 70 // Element outside of the viewport, no need to highlight
89 } 71 if range_to_highlight.intersection(&event_range).is_none() {
90 } 72 continue;
91 _ if in_macro_call.is_some() => { 73 }
92 if let Some(token) = node.as_token() { 74
93 if let Some((tag, binding_hash)) = highlight_token_tree( 75 // Track "inside macro" state
94 &mut sb, 76 match event.clone().map(|it| it.into_node().and_then(ast::MacroCall::cast)) {
95 &analyzer, 77 WalkEvent::Enter(Some(mc)) => {
96 &mut bindings_shadow_count, 78 current_macro_call = Some(mc.clone());
97 InFile::new(file_id.into(), token.clone()), 79 if let Some(range) = macro_call_range(&mc) {
98 ) { 80 res.push(HighlightedRange {
99 res.push(HighlightedRange { 81 range,
100 range: node.text_range(), 82 highlight: HighlightTag::Macro.into(),
101 tag, 83 binding_hash: None,
102 binding_hash, 84 });
103 });
104 }
105 }
106 }
107 _ => {
108 if let Some((tag, binding_hash)) = highlight_node(
109 &mut sb,
110 &mut bindings_shadow_count,
111 InFile::new(file_id.into(), node.clone()),
112 ) {
113 res.push(HighlightedRange { range: node.text_range(), tag, binding_hash });
114 }
115 }
116 },
117 WalkEvent::Leave(node) => {
118 if let Some(m) = in_macro_call.as_ref() {
119 if *m == node {
120 in_macro_call = None;
121 }
122 } 85 }
86 continue;
123 } 87 }
88 WalkEvent::Leave(Some(mc)) => {
89 assert!(current_macro_call == Some(mc));
90 current_macro_call = None;
91 continue;
92 }
93 _ => (),
94 }
95
96 let element = match event {
97 WalkEvent::Enter(it) => it,
98 WalkEvent::Leave(_) => continue,
99 };
100
101 let range = element.text_range();
102
103 let element_to_highlight = if current_macro_call.is_some() {
104 // Inside a macro -- expand it first
105 let token = match element.clone().into_token() {
106 Some(it) if it.parent().kind() == TOKEN_TREE => it,
107 _ => continue,
108 };
109 let token = sema.descend_into_macros(token.clone());
110 let parent = token.parent();
111 // We only care Name and Name_ref
112 match (token.kind(), parent.kind()) {
113 (IDENT, NAME) | (IDENT, NAME_REF) => parent.into(),
114 _ => token.into(),
115 }
116 } else {
117 element.clone()
118 };
119
120 if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) {
121 let expanded = element_to_highlight.as_token().unwrap().clone();
122 if highlight_injection(&mut res, &sema, token, expanded).is_some() {
123 eprintln!("res = {:?}", res);
124 continue;
125 }
126 }
127
128 if let Some((highlight, binding_hash)) =
129 highlight_element(&sema, &mut bindings_shadow_count, element_to_highlight)
130 {
131 res.push(HighlightedRange { range, highlight, binding_hash });
124 } 132 }
125 } 133 }
126 134
127 res 135 res
128} 136}
129 137
130fn highlight_macro(node: InFile<SyntaxElement>) -> Option<TextRange> { 138fn macro_call_range(macro_call: &ast::MacroCall) -> Option<TextRange> {
131 let macro_call = ast::MacroCall::cast(node.value.as_node()?.clone())?;
132 let path = macro_call.path()?; 139 let path = macro_call.path()?;
133 let name_ref = path.segment()?.name_ref()?; 140 let name_ref = path.segment()?.name_ref()?;
134 141
@@ -144,101 +151,100 @@ fn highlight_macro(node: InFile<SyntaxElement>) -> Option<TextRange> {
144 Some(TextRange::from_to(range_start, range_end)) 151 Some(TextRange::from_to(range_start, range_end))
145} 152}
146 153
147fn highlight_token_tree( 154fn highlight_element(
148 sb: &mut SourceBinder<RootDatabase>, 155 sema: &Semantics<RootDatabase>,
149 analyzer: &SourceAnalyzer,
150 bindings_shadow_count: &mut FxHashMap<Name, u32>,
151 token: InFile<SyntaxToken>,
152) -> Option<(&'static str, Option<u64>)> {
153 if token.value.parent().kind() != TOKEN_TREE {
154 return None;
155 }
156 let token = descend_into_macros_with_analyzer(sb.db, analyzer, token);
157 let expanded = {
158 let parent = token.value.parent();
159 // We only care Name and Name_ref
160 match (token.value.kind(), parent.kind()) {
161 (IDENT, NAME) | (IDENT, NAME_REF) => token.with_value(parent.into()),
162 _ => token.map(|it| it.into()),
163 }
164 };
165
166 highlight_node(sb, bindings_shadow_count, expanded)
167}
168
169fn highlight_node(
170 sb: &mut SourceBinder<RootDatabase>,
171 bindings_shadow_count: &mut FxHashMap<Name, u32>, 156 bindings_shadow_count: &mut FxHashMap<Name, u32>,
172 node: InFile<SyntaxElement>, 157 element: SyntaxElement,
173) -> Option<(&'static str, Option<u64>)> { 158) -> Option<(Highlight, Option<u64>)> {
174 let db = sb.db; 159 let db = sema.db;
175 let mut binding_hash = None; 160 let mut binding_hash = None;
176 let tag = match node.value.kind() { 161 let highlight: Highlight = match element.kind() {
177 FN_DEF => { 162 FN_DEF => {
178 bindings_shadow_count.clear(); 163 bindings_shadow_count.clear();
179 return None; 164 return None;
180 } 165 }
181 COMMENT => tags::LITERAL_COMMENT, 166
182 STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => tags::LITERAL_STRING, 167 // Highlight definitions depending on the "type" of the definition.
183 ATTR => tags::LITERAL_ATTRIBUTE, 168 NAME => {
184 // Special-case field init shorthand 169 let name = element.into_node().and_then(ast::Name::cast).unwrap();
185 NAME_REF if node.value.parent().and_then(ast::RecordField::cast).is_some() => tags::FIELD, 170 let name_kind = classify_name(sema, &name);
186 NAME_REF if node.value.ancestors().any(|it| it.kind() == ATTR) => return None, 171
187 NAME_REF => { 172 if let Some(NameClass::NameDefinition(NameDefinition::Local(local))) = &name_kind {
188 let name_ref = node.value.as_node().cloned().and_then(ast::NameRef::cast).unwrap(); 173 if let Some(name) = local.name(db) {
189 let name_kind = classify_name_ref(sb, node.with_value(&name_ref)); 174 let shadow_count = bindings_shadow_count.entry(name.clone()).or_default();
175 *shadow_count += 1;
176 binding_hash = Some(calc_binding_hash(&name, *shadow_count))
177 }
178 };
179
190 match name_kind { 180 match name_kind {
191 Some(name_kind) => { 181 Some(NameClass::NameDefinition(def)) => {
192 if let NameDefinition::Local(local) = &name_kind { 182 highlight_name(db, def) | HighlightModifier::Definition
193 if let Some(name) = local.name(db) {
194 let shadow_count =
195 bindings_shadow_count.entry(name.clone()).or_default();
196 binding_hash =
197 Some(calc_binding_hash(node.file_id, &name, *shadow_count))
198 }
199 };
200
201 highlight_name(db, name_kind)
202 } 183 }
203 _ => return None, 184 Some(NameClass::ConstReference(def)) => highlight_name(db, def),
185 None => highlight_name_by_syntax(name) | HighlightModifier::Definition,
204 } 186 }
205 } 187 }
206 NAME => {
207 let name = node.value.as_node().cloned().and_then(ast::Name::cast).unwrap();
208 let name_kind = classify_name(sb, node.with_value(&name));
209 188
210 if let Some(NameDefinition::Local(local)) = &name_kind { 189 // Highlight references like the definitions they resolve to
190
191 // Special-case field init shorthand
192 NAME_REF if element.parent().and_then(ast::RecordField::cast).is_some() => {
193 HighlightTag::Field.into()
194 }
195 NAME_REF if element.ancestors().any(|it| it.kind() == ATTR) => return None,
196 NAME_REF => {
197 let name_ref = element.into_node().and_then(ast::NameRef::cast).unwrap();
198 let name_kind = classify_name_ref(sema, &name_ref)?;
199
200 if let NameDefinition::Local(local) = &name_kind {
211 if let Some(name) = local.name(db) { 201 if let Some(name) = local.name(db) {
212 let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); 202 let shadow_count = bindings_shadow_count.entry(name.clone()).or_default();
213 *shadow_count += 1; 203 binding_hash = Some(calc_binding_hash(&name, *shadow_count))
214 binding_hash = Some(calc_binding_hash(node.file_id, &name, *shadow_count))
215 } 204 }
216 }; 205 };
217 206
218 match name_kind { 207 highlight_name(db, name_kind)
219 Some(name_kind) => highlight_name(db, name_kind), 208 }
220 None => name.syntax().parent().map_or(tags::FUNCTION, |x| match x.kind() { 209
221 STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_ALIAS_DEF => tags::TYPE, 210 // Simple token-based highlighting
222 TYPE_PARAM => tags::TYPE_PARAM, 211 COMMENT => HighlightTag::Comment.into(),
223 RECORD_FIELD_DEF => tags::FIELD, 212 STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => HighlightTag::StringLiteral.into(),
224 _ => tags::FUNCTION, 213 ATTR => HighlightTag::Attribute.into(),
225 }), 214 INT_NUMBER | FLOAT_NUMBER => HighlightTag::NumericLiteral.into(),
215 BYTE => HighlightTag::ByteLiteral.into(),
216 CHAR => HighlightTag::CharLiteral.into(),
217 LIFETIME => {
218 let h = Highlight::new(HighlightTag::Lifetime);
219 dbg!(match element.parent().map(|it| it.kind()) {
220 Some(LIFETIME_PARAM) | Some(LABEL) => h | HighlightModifier::Definition,
221 _ => h,
222 })
223 }
224
225 k if k.is_keyword() => {
226 let h = Highlight::new(HighlightTag::Keyword);
227 match k {
228 T![break]
229 | T![continue]
230 | T![else]
231 | T![for]
232 | T![if]
233 | T![loop]
234 | T![match]
235 | T![return]
236 | T![while] => h | HighlightModifier::Control,
237 T![unsafe] => h | HighlightModifier::Unsafe,
238 _ => h,
226 } 239 }
227 } 240 }
228 INT_NUMBER | FLOAT_NUMBER => tags::LITERAL_NUMERIC,
229 BYTE => tags::LITERAL_BYTE,
230 CHAR => tags::LITERAL_CHAR,
231 LIFETIME => tags::TYPE_LIFETIME,
232 T![unsafe] => tags::KEYWORD_UNSAFE,
233 k if is_control_keyword(k) => tags::KEYWORD_CONTROL,
234 k if k.is_keyword() => tags::KEYWORD,
235 241
236 _ => return None, 242 _ => return None,
237 }; 243 };
238 244
239 return Some((tag, binding_hash)); 245 return Some((highlight, binding_hash));
240 246
241 fn calc_binding_hash(file_id: HirFileId, name: &Name, shadow_count: u32) -> u64 { 247 fn calc_binding_hash(name: &Name, shadow_count: u32) -> u64 {
242 fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 { 248 fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 {
243 use std::{collections::hash_map::DefaultHasher, hash::Hasher}; 249 use std::{collections::hash_map::DefaultHasher, hash::Hasher};
244 250
@@ -247,232 +253,98 @@ fn highlight_node(
247 hasher.finish() 253 hasher.finish()
248 } 254 }
249 255
250 hash((file_id, name, shadow_count)) 256 hash((name, shadow_count))
251 }
252}
253
254pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
255 let parse = db.parse(file_id);
256
257 fn rainbowify(seed: u64) -> String {
258 use rand::prelude::*;
259 let mut rng = SmallRng::seed_from_u64(seed);
260 format!(
261 "hsl({h},{s}%,{l}%)",
262 h = rng.gen_range::<u16, _, _>(0, 361),
263 s = rng.gen_range::<u16, _, _>(42, 99),
264 l = rng.gen_range::<u16, _, _>(40, 91),
265 )
266 }
267
268 let mut ranges = highlight(db, file_id);
269 ranges.sort_by_key(|it| it.range.start());
270 // quick non-optimal heuristic to intersect token ranges and highlighted ranges
271 let mut frontier = 0;
272 let mut could_intersect: Vec<&HighlightedRange> = Vec::new();
273
274 let mut buf = String::new();
275 buf.push_str(&STYLE);
276 buf.push_str("<pre><code>");
277 let tokens = parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.into_token());
278 for token in tokens {
279 could_intersect.retain(|it| token.text_range().start() <= it.range.end());
280 while let Some(r) = ranges.get(frontier) {
281 if r.range.start() <= token.text_range().end() {
282 could_intersect.push(r);
283 frontier += 1;
284 } else {
285 break;
286 }
287 }
288 let text = html_escape(&token.text());
289 let ranges = could_intersect
290 .iter()
291 .filter(|it| token.text_range().is_subrange(&it.range))
292 .collect::<Vec<_>>();
293 if ranges.is_empty() {
294 buf.push_str(&text);
295 } else {
296 let classes = ranges.iter().map(|x| x.tag).collect::<Vec<_>>().join(" ");
297 let binding_hash = ranges.first().and_then(|x| x.binding_hash);
298 let color = match (rainbow, binding_hash) {
299 (true, Some(hash)) => format!(
300 " data-binding-hash=\"{}\" style=\"color: {};\"",
301 hash,
302 rainbowify(hash)
303 ),
304 _ => "".into(),
305 };
306 buf.push_str(&format!("<span class=\"{}\"{}>{}</span>", classes, color, text));
307 }
308 } 257 }
309 buf.push_str("</code></pre>");
310 buf
311} 258}
312 259
313fn highlight_name(db: &RootDatabase, def: NameDefinition) -> &'static str { 260fn highlight_name(db: &RootDatabase, def: NameDefinition) -> Highlight {
314 match def { 261 match def {
315 NameDefinition::Macro(_) => tags::MACRO, 262 NameDefinition::Macro(_) => HighlightTag::Macro,
316 NameDefinition::StructField(_) => tags::FIELD, 263 NameDefinition::StructField(_) => HighlightTag::Field,
317 NameDefinition::ModuleDef(hir::ModuleDef::Module(_)) => tags::MODULE, 264 NameDefinition::ModuleDef(def) => match def {
318 NameDefinition::ModuleDef(hir::ModuleDef::Function(_)) => tags::FUNCTION, 265 hir::ModuleDef::Module(_) => HighlightTag::Module,
319 NameDefinition::ModuleDef(hir::ModuleDef::Adt(_)) => tags::TYPE, 266 hir::ModuleDef::Function(_) => HighlightTag::Function,
320 NameDefinition::ModuleDef(hir::ModuleDef::EnumVariant(_)) => tags::CONSTANT, 267 hir::ModuleDef::Adt(hir::Adt::Struct(_)) => HighlightTag::Struct,
321 NameDefinition::ModuleDef(hir::ModuleDef::Const(_)) => tags::CONSTANT, 268 hir::ModuleDef::Adt(hir::Adt::Enum(_)) => HighlightTag::Enum,
322 NameDefinition::ModuleDef(hir::ModuleDef::Static(_)) => tags::CONSTANT, 269 hir::ModuleDef::Adt(hir::Adt::Union(_)) => HighlightTag::Union,
323 NameDefinition::ModuleDef(hir::ModuleDef::Trait(_)) => tags::TYPE, 270 hir::ModuleDef::EnumVariant(_) => HighlightTag::EnumVariant,
324 NameDefinition::ModuleDef(hir::ModuleDef::TypeAlias(_)) => tags::TYPE, 271 hir::ModuleDef::Const(_) => HighlightTag::Constant,
325 NameDefinition::ModuleDef(hir::ModuleDef::BuiltinType(_)) => tags::TYPE_BUILTIN, 272 hir::ModuleDef::Static(_) => HighlightTag::Static,
326 NameDefinition::SelfType(_) => tags::TYPE_SELF, 273 hir::ModuleDef::Trait(_) => HighlightTag::Trait,
327 NameDefinition::TypeParam(_) => tags::TYPE_PARAM, 274 hir::ModuleDef::TypeAlias(_) => HighlightTag::TypeAlias,
275 hir::ModuleDef::BuiltinType(_) => HighlightTag::BuiltinType,
276 },
277 NameDefinition::SelfType(_) => HighlightTag::SelfType,
278 NameDefinition::TypeParam(_) => HighlightTag::TypeParam,
279 // FIXME: distinguish between locals and parameters
328 NameDefinition::Local(local) => { 280 NameDefinition::Local(local) => {
281 let mut h = Highlight::new(HighlightTag::Local);
329 if local.is_mut(db) || local.ty(db).is_mutable_reference() { 282 if local.is_mut(db) || local.ty(db).is_mutable_reference() {
330 tags::VARIABLE_MUT 283 h |= HighlightModifier::Mutable;
331 } else {
332 tags::VARIABLE
333 } 284 }
285 return h;
334 } 286 }
335 } 287 }
288 .into()
336} 289}
337 290
338//FIXME: like, real html escaping 291fn highlight_name_by_syntax(name: ast::Name) -> Highlight {
339fn html_escape(text: &str) -> String { 292 let default = HighlightTag::Function.into();
340 text.replace("<", "&lt;").replace(">", "&gt;")
341}
342
343const STYLE: &str = "
344<style>
345body { margin: 0; }
346pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
347
348.comment { color: #7F9F7F; }
349.string { color: #CC9393; }
350.field { color: #94BFF3; }
351.function { color: #93E0E3; }
352.parameter { color: #94BFF3; }
353.text { color: #DCDCCC; }
354.type { color: #7CB8BB; }
355.type\\.builtin { color: #8CD0D3; }
356.type\\.param { color: #20999D; }
357.attribute { color: #94BFF3; }
358.literal { color: #BFEBBF; }
359.literal\\.numeric { color: #6A8759; }
360.macro { color: #94BFF3; }
361.module { color: #AFD8AF; }
362.variable { color: #DCDCCC; }
363.variable\\.mut { color: #DCDCCC; text-decoration: underline; }
364
365.keyword { color: #F0DFAF; }
366.keyword\\.unsafe { color: #DFAF8F; }
367.keyword\\.control { color: #F0DFAF; font-weight: bold; }
368</style>
369";
370
371#[cfg(test)]
372mod tests {
373 use std::fs;
374
375 use test_utils::{assert_eq_text, project_dir, read_text};
376
377 use crate::mock_analysis::{single_file, MockAnalysis};
378
379 #[test]
380 fn test_highlighting() {
381 let (analysis, file_id) = single_file(
382 r#"
383#[derive(Clone, Debug)]
384struct Foo {
385 pub x: i32,
386 pub y: i32,
387}
388
389fn foo<T>() -> T {
390 unimplemented!();
391 foo::<i32>();
392}
393 293
394macro_rules! def_fn { 294 let parent = match name.syntax().parent() {
395 ($($tt:tt)*) => {$($tt)*} 295 Some(it) => it,
396} 296 _ => return default,
297 };
397 298
398def_fn!{ 299 match parent.kind() {
399 fn bar() -> u32 { 300 STRUCT_DEF => HighlightTag::Struct.into(),
400 100 301 ENUM_DEF => HighlightTag::Enum.into(),
302 UNION_DEF => HighlightTag::Union.into(),
303 TRAIT_DEF => HighlightTag::Trait.into(),
304 TYPE_ALIAS_DEF => HighlightTag::TypeAlias.into(),
305 TYPE_PARAM => HighlightTag::TypeParam.into(),
306 RECORD_FIELD_DEF => HighlightTag::Field.into(),
307 _ => default,
401 } 308 }
402} 309}
403 310
404// comment 311fn highlight_injection(
405fn main() { 312 acc: &mut Vec<HighlightedRange>,
406 println!("Hello, {}!", 92); 313 sema: &Semantics<RootDatabase>,
407 314 literal: ast::RawString,
408 let mut vec = Vec::new(); 315 expanded: SyntaxToken,
409 if true { 316) -> Option<()> {
410 let x = 92; 317 let call_info = call_info_for_token(&sema, expanded)?;
411 vec.push(Foo { x, y: 1 }); 318 let idx = call_info.active_parameter?;
319 let name = call_info.signature.parameter_names.get(idx)?;
320 if name != "ra_fixture" {
321 return None;
412 } 322 }
413 unsafe { vec.set_len(0); } 323 let value = literal.value()?;
414 324 let (analysis, tmp_file_id) = Analysis::from_single_file(value);
415 let mut x = 42; 325
416 let y = &mut x; 326 if let Some(range) = literal.open_quote_text_range() {
417 let z = &y; 327 acc.push(HighlightedRange {
418 328 range,
419 y; 329 highlight: HighlightTag::StringLiteral.into(),
420} 330 binding_hash: None,
421 331 })
422enum E<X> {
423 V(X)
424}
425
426impl<X> E<X> {
427 fn new<T>() -> E<T> {}
428}
429"#
430 .trim(),
431 );
432 let dst_file = project_dir().join("crates/ra_ide/src/snapshots/highlighting.html");
433 let actual_html = &analysis.highlight_as_html(file_id, false).unwrap();
434 let expected_html = &read_text(&dst_file);
435 fs::write(dst_file, &actual_html).unwrap();
436 assert_eq_text!(expected_html, actual_html);
437 } 332 }
438 333
439 #[test] 334 for mut h in analysis.highlight(tmp_file_id).unwrap() {
440 fn test_rainbow_highlighting() { 335 if let Some(r) = literal.map_range_up(h.range) {
441 let (analysis, file_id) = single_file( 336 h.range = r;
442 r#" 337 acc.push(h)
443fn main() { 338 }
444 let hello = "hello";
445 let x = hello.to_string();
446 let y = hello.to_string();
447
448 let x = "other color please!";
449 let y = x.to_string();
450}
451
452fn bar() {
453 let mut hello = "hello";
454}
455"#
456 .trim(),
457 );
458 let dst_file = project_dir().join("crates/ra_ide/src/snapshots/rainbow_highlighting.html");
459 let actual_html = &analysis.highlight_as_html(file_id, true).unwrap();
460 let expected_html = &read_text(&dst_file);
461 fs::write(dst_file, &actual_html).unwrap();
462 assert_eq_text!(expected_html, actual_html);
463 } 339 }
464 340
465 #[test] 341 if let Some(range) = literal.close_quote_text_range() {
466 fn accidentally_quadratic() { 342 acc.push(HighlightedRange {
467 let file = project_dir().join("crates/ra_syntax/test_data/accidentally_quadratic"); 343 range,
468 let src = fs::read_to_string(file).unwrap(); 344 highlight: HighlightTag::StringLiteral.into(),
469 345 binding_hash: None,
470 let mut mock = MockAnalysis::new(); 346 })
471 let file_id = mock.add_file("/main.rs", &src);
472 let host = mock.analysis_host();
473
474 // let t = std::time::Instant::now();
475 let _ = host.analysis().highlight(file_id).unwrap();
476 // eprintln!("elapsed: {:?}", t.elapsed());
477 } 347 }
348
349 Some(())
478} 350}
diff --git a/crates/ra_ide/src/syntax_highlighting/html.rs b/crates/ra_ide/src/syntax_highlighting/html.rs
new file mode 100644
index 000000000..e13766c9d
--- /dev/null
+++ b/crates/ra_ide/src/syntax_highlighting/html.rs
@@ -0,0 +1,106 @@
1//! Renders a bit of code as HTML.
2
3use ra_db::SourceDatabase;
4use ra_syntax::AstNode;
5
6use crate::{FileId, HighlightedRange, RootDatabase};
7
8use super::highlight;
9
10pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
11 let parse = db.parse(file_id);
12
13 fn rainbowify(seed: u64) -> String {
14 use rand::prelude::*;
15 let mut rng = SmallRng::seed_from_u64(seed);
16 format!(
17 "hsl({h},{s}%,{l}%)",
18 h = rng.gen_range::<u16, _, _>(0, 361),
19 s = rng.gen_range::<u16, _, _>(42, 99),
20 l = rng.gen_range::<u16, _, _>(40, 91),
21 )
22 }
23
24 let mut ranges = highlight(db, file_id, None);
25 ranges.sort_by_key(|it| it.range.start());
26 // quick non-optimal heuristic to intersect token ranges and highlighted ranges
27 let mut frontier = 0;
28 let mut could_intersect: Vec<&HighlightedRange> = Vec::new();
29
30 let mut buf = String::new();
31 buf.push_str(&STYLE);
32 buf.push_str("<pre><code>");
33 let tokens = parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.into_token());
34 for token in tokens {
35 could_intersect.retain(|it| token.text_range().start() <= it.range.end());
36 while let Some(r) = ranges.get(frontier) {
37 if r.range.start() <= token.text_range().end() {
38 could_intersect.push(r);
39 frontier += 1;
40 } else {
41 break;
42 }
43 }
44 let text = html_escape(&token.text());
45 let ranges = could_intersect
46 .iter()
47 .filter(|it| token.text_range().is_subrange(&it.range))
48 .collect::<Vec<_>>();
49 if ranges.is_empty() {
50 buf.push_str(&text);
51 } else {
52 let classes = ranges
53 .iter()
54 .map(|it| it.highlight.to_string().replace('.', " "))
55 .collect::<Vec<_>>()
56 .join(" ");
57 let binding_hash = ranges.first().and_then(|x| x.binding_hash);
58 let color = match (rainbow, binding_hash) {
59 (true, Some(hash)) => format!(
60 " data-binding-hash=\"{}\" style=\"color: {};\"",
61 hash,
62 rainbowify(hash)
63 ),
64 _ => "".into(),
65 };
66 buf.push_str(&format!("<span class=\"{}\"{}>{}</span>", classes, color, text));
67 }
68 }
69 buf.push_str("</code></pre>");
70 buf
71}
72
73//FIXME: like, real html escaping
74fn html_escape(text: &str) -> String {
75 text.replace("<", "&lt;").replace(">", "&gt;")
76}
77
78const STYLE: &str = "
79<style>
80body { margin: 0; }
81pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
82
83.lifetime { color: #DFAF8F; font-style: italic; }
84.comment { color: #7F9F7F; }
85.struct, .enum { color: #7CB8BB; }
86.enum_variant { color: #BDE0F3; }
87.string_literal { color: #CC9393; }
88.field { color: #94BFF3; }
89.function { color: #93E0E3; }
90.parameter { color: #94BFF3; }
91.text { color: #DCDCCC; }
92.type { color: #7CB8BB; }
93.builtin_type { color: #8CD0D3; }
94.type_param { color: #DFAF8F; }
95.attribute { color: #94BFF3; }
96.numeric_literal { color: #BFEBBF; }
97.macro { color: #94BFF3; }
98.module { color: #AFD8AF; }
99.variable { color: #DCDCCC; }
100.mutable { text-decoration: underline; }
101
102.keyword { color: #F0DFAF; font-weight: bold; }
103.keyword.unsafe { color: #BC8383; font-weight: bold; }
104.control { font-style: italic; }
105</style>
106";
diff --git a/crates/ra_ide/src/syntax_highlighting/tags.rs b/crates/ra_ide/src/syntax_highlighting/tags.rs
new file mode 100644
index 000000000..8835a5de2
--- /dev/null
+++ b/crates/ra_ide/src/syntax_highlighting/tags.rs
@@ -0,0 +1,175 @@
1//! Defines token tags we use for syntax highlighting.
2//! A tag is not unlike a CSS class.
3
4use std::{fmt, ops};
5
6#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
7pub struct Highlight {
8 pub tag: HighlightTag,
9 pub modifiers: HighlightModifiers,
10}
11
12#[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
13pub struct HighlightModifiers(u32);
14
15#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
16pub enum HighlightTag {
17 Attribute,
18 BuiltinType,
19 ByteLiteral,
20 CharLiteral,
21 Comment,
22 Constant,
23 Enum,
24 EnumVariant,
25 Field,
26 Function,
27 Keyword,
28 Lifetime,
29 Macro,
30 Module,
31 NumericLiteral,
32 SelfType,
33 Static,
34 StringLiteral,
35 Struct,
36 Trait,
37 TypeAlias,
38 TypeParam,
39 Union,
40 Local,
41}
42
43#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
44#[repr(u8)]
45pub enum HighlightModifier {
46 /// Used with keywords like `if` and `break`.
47 Control = 0,
48 /// `foo` in `fn foo(x: i32)` is a definition, `foo` in `foo(90 + 2)` is
49 /// not.
50 Definition,
51 Mutable,
52 Unsafe,
53}
54
55impl HighlightTag {
56 fn as_str(self) -> &'static str {
57 match self {
58 HighlightTag::Attribute => "attribute",
59 HighlightTag::BuiltinType => "builtin_type",
60 HighlightTag::ByteLiteral => "byte_literal",
61 HighlightTag::CharLiteral => "char_literal",
62 HighlightTag::Comment => "comment",
63 HighlightTag::Constant => "constant",
64 HighlightTag::Enum => "enum",
65 HighlightTag::EnumVariant => "enum_variant",
66 HighlightTag::Field => "field",
67 HighlightTag::Function => "function",
68 HighlightTag::Keyword => "keyword",
69 HighlightTag::Lifetime => "lifetime",
70 HighlightTag::Macro => "macro",
71 HighlightTag::Module => "module",
72 HighlightTag::NumericLiteral => "numeric_literal",
73 HighlightTag::SelfType => "self_type",
74 HighlightTag::Static => "static",
75 HighlightTag::StringLiteral => "string_literal",
76 HighlightTag::Struct => "struct",
77 HighlightTag::Trait => "trait",
78 HighlightTag::TypeAlias => "type_alias",
79 HighlightTag::TypeParam => "type_param",
80 HighlightTag::Union => "union",
81 HighlightTag::Local => "variable",
82 }
83 }
84}
85
86impl fmt::Display for HighlightTag {
87 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
88 fmt::Display::fmt(self.as_str(), f)
89 }
90}
91
92impl HighlightModifier {
93 const ALL: &'static [HighlightModifier] = &[
94 HighlightModifier::Control,
95 HighlightModifier::Definition,
96 HighlightModifier::Mutable,
97 HighlightModifier::Unsafe,
98 ];
99
100 fn as_str(self) -> &'static str {
101 match self {
102 HighlightModifier::Control => "control",
103 HighlightModifier::Definition => "declaration",
104 HighlightModifier::Mutable => "mutable",
105 HighlightModifier::Unsafe => "unsafe",
106 }
107 }
108
109 fn mask(self) -> u32 {
110 1 << (self as u32)
111 }
112}
113
114impl fmt::Display for HighlightModifier {
115 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
116 fmt::Display::fmt(self.as_str(), f)
117 }
118}
119
120impl fmt::Display for Highlight {
121 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
122 write!(f, "{}", self.tag)?;
123 for modifier in self.modifiers.iter() {
124 write!(f, ".{}", modifier)?
125 }
126 Ok(())
127 }
128}
129
130impl From<HighlightTag> for Highlight {
131 fn from(tag: HighlightTag) -> Highlight {
132 Highlight::new(tag)
133 }
134}
135
136impl Highlight {
137 pub(crate) fn new(tag: HighlightTag) -> Highlight {
138 Highlight { tag, modifiers: HighlightModifiers::default() }
139 }
140}
141
142impl ops::BitOr<HighlightModifier> for HighlightTag {
143 type Output = Highlight;
144
145 fn bitor(self, rhs: HighlightModifier) -> Highlight {
146 Highlight::new(self) | rhs
147 }
148}
149
150impl ops::BitOrAssign<HighlightModifier> for HighlightModifiers {
151 fn bitor_assign(&mut self, rhs: HighlightModifier) {
152 self.0 |= rhs.mask();
153 }
154}
155
156impl ops::BitOrAssign<HighlightModifier> for Highlight {
157 fn bitor_assign(&mut self, rhs: HighlightModifier) {
158 self.modifiers |= rhs;
159 }
160}
161
162impl ops::BitOr<HighlightModifier> for Highlight {
163 type Output = Highlight;
164
165 fn bitor(mut self, rhs: HighlightModifier) -> Highlight {
166 self |= rhs;
167 self
168 }
169}
170
171impl HighlightModifiers {
172 pub fn iter(self) -> impl Iterator<Item = HighlightModifier> {
173 HighlightModifier::ALL.iter().copied().filter(move |it| self.0 & it.mask() == it.mask())
174 }
175}
diff --git a/crates/ra_ide/src/syntax_highlighting/tests.rs b/crates/ra_ide/src/syntax_highlighting/tests.rs
new file mode 100644
index 000000000..98c030791
--- /dev/null
+++ b/crates/ra_ide/src/syntax_highlighting/tests.rs
@@ -0,0 +1,133 @@
1use std::fs;
2
3use test_utils::{assert_eq_text, project_dir, read_text};
4
5use crate::{
6 mock_analysis::{single_file, MockAnalysis},
7 FileRange, TextRange,
8};
9
10#[test]
11fn test_highlighting() {
12 let (analysis, file_id) = single_file(
13 r#"
14#[derive(Clone, Debug)]
15struct Foo {
16 pub x: i32,
17 pub y: i32,
18}
19
20fn foo<'a, T>() -> T {
21 foo::<'a, i32>()
22}
23
24macro_rules! def_fn {
25 ($($tt:tt)*) => {$($tt)*}
26}
27
28def_fn! {
29 fn bar() -> u32 {
30 100
31 }
32}
33
34// comment
35fn main() {
36 println!("Hello, {}!", 92);
37
38 let mut vec = Vec::new();
39 if true {
40 let x = 92;
41 vec.push(Foo { x, y: 1 });
42 }
43 unsafe { vec.set_len(0); }
44
45 let mut x = 42;
46 let y = &mut x;
47 let z = &y;
48
49 y;
50}
51
52enum Option<T> {
53 Some(T),
54 None,
55}
56use Option::*;
57
58impl<T> Option<T> {
59 fn and<U>(self, other: Option<U>) -> Option<(T, U)> {
60 match other {
61 None => unimplemented!(),
62 Nope => Nope,
63 }
64 }
65}
66"#
67 .trim(),
68 );
69 let dst_file = project_dir().join("crates/ra_ide/src/snapshots/highlighting.html");
70 let actual_html = &analysis.highlight_as_html(file_id, false).unwrap();
71 let expected_html = &read_text(&dst_file);
72 fs::write(dst_file, &actual_html).unwrap();
73 assert_eq_text!(expected_html, actual_html);
74}
75
76#[test]
77fn test_rainbow_highlighting() {
78 let (analysis, file_id) = single_file(
79 r#"
80fn main() {
81 let hello = "hello";
82 let x = hello.to_string();
83 let y = hello.to_string();
84
85 let x = "other color please!";
86 let y = x.to_string();
87}
88
89fn bar() {
90 let mut hello = "hello";
91}
92"#
93 .trim(),
94 );
95 let dst_file = project_dir().join("crates/ra_ide/src/snapshots/rainbow_highlighting.html");
96 let actual_html = &analysis.highlight_as_html(file_id, true).unwrap();
97 let expected_html = &read_text(&dst_file);
98 fs::write(dst_file, &actual_html).unwrap();
99 assert_eq_text!(expected_html, actual_html);
100}
101
102#[test]
103fn accidentally_quadratic() {
104 let file = project_dir().join("crates/ra_syntax/test_data/accidentally_quadratic");
105 let src = fs::read_to_string(file).unwrap();
106
107 let mut mock = MockAnalysis::new();
108 let file_id = mock.add_file("/main.rs", &src);
109 let host = mock.analysis_host();
110
111 // let t = std::time::Instant::now();
112 let _ = host.analysis().highlight(file_id).unwrap();
113 // eprintln!("elapsed: {:?}", t.elapsed());
114}
115
116#[test]
117fn test_ranges() {
118 let (analysis, file_id) = single_file(
119 r#"
120 #[derive(Clone, Debug)]
121 struct Foo {
122 pub x: i32,
123 pub y: i32,
124 }"#,
125 );
126
127 // The "x"
128 let highlights = &analysis
129 .highlight_range(FileRange { file_id, range: TextRange::offset_len(82.into(), 1.into()) })
130 .unwrap();
131
132 assert_eq!(&highlights[0].highlight.to_string(), "field.declaration");
133}