aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_ide_api
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2019-07-19 12:15:55 +0100
committerbors[bot] <26634292+bors[bot]@users.noreply.github.com>2019-07-19 12:15:55 +0100
commitf209843e31af7f0e0212aa28ffec2efad2a70c6f (patch)
tree548227da78a3bea644f57714d075410c0bdf7469 /crates/ra_ide_api
parent58d4983ba5745975446d60f2886d96f8d2adf0f2 (diff)
parentd4a66166c002f0a49e41d856a49cb5685ac93202 (diff)
Merge #1545
1545: migrate ra_syntax to the new rowan API r=matklad a=matklad Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates/ra_ide_api')
-rw-r--r--crates/ra_ide_api/src/call_info.rs28
-rw-r--r--crates/ra_ide_api/src/completion/complete_dot.rs9
-rw-r--r--crates/ra_ide_api/src/completion/complete_fn_param.rs7
-rw-r--r--crates/ra_ide_api/src/completion/complete_keyword.rs16
-rw-r--r--crates/ra_ide_api/src/completion/complete_postfix.rs7
-rw-r--r--crates/ra_ide_api/src/completion/complete_scope.rs4
-rw-r--r--crates/ra_ide_api/src/completion/complete_struct_literal.rs4
-rw-r--r--crates/ra_ide_api/src/completion/completion_context.rs26
-rw-r--r--crates/ra_ide_api/src/diagnostics.rs16
-rw-r--r--crates/ra_ide_api/src/display/function_signature.rs2
-rw-r--r--crates/ra_ide_api/src/display/navigation_target.rs56
-rw-r--r--crates/ra_ide_api/src/display/structure.rs50
-rw-r--r--crates/ra_ide_api/src/extend_selection.rs28
-rw-r--r--crates/ra_ide_api/src/folding_ranges.rs48
-rw-r--r--crates/ra_ide_api/src/goto_definition.rs106
-rw-r--r--crates/ra_ide_api/src/goto_type_definition.rs10
-rw-r--r--crates/ra_ide_api/src/hover.rs51
-rw-r--r--crates/ra_ide_api/src/impls.rs10
-rw-r--r--crates/ra_ide_api/src/join_lines.rs23
-rw-r--r--crates/ra_ide_api/src/lib.rs12
-rw-r--r--crates/ra_ide_api/src/matching_brace.rs2
-rw-r--r--crates/ra_ide_api/src/name_ref_kind.rs10
-rw-r--r--crates/ra_ide_api/src/references.rs35
-rw-r--r--crates/ra_ide_api/src/runnables.rs10
-rw-r--r--crates/ra_ide_api/src/status.rs2
-rw-r--r--crates/ra_ide_api/src/symbol_index.rs10
-rw-r--r--crates/ra_ide_api/src/syntax_highlighting.rs25
-rw-r--r--crates/ra_ide_api/src/syntax_tree.rs6
-rw-r--r--crates/ra_ide_api/src/typing.rs19
29 files changed, 342 insertions, 290 deletions
diff --git a/crates/ra_ide_api/src/call_info.rs b/crates/ra_ide_api/src/call_info.rs
index 11dea7c14..270499612 100644
--- a/crates/ra_ide_api/src/call_info.rs
+++ b/crates/ra_ide_api/src/call_info.rs
@@ -11,24 +11,24 @@ use crate::{db::RootDatabase, CallInfo, FilePosition, FunctionSignature};
11/// Computes parameter information for the given call expression. 11/// Computes parameter information for the given call expression.
12pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { 12pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> {
13 let parse = db.parse(position.file_id); 13 let parse = db.parse(position.file_id);
14 let syntax = parse.tree().syntax(); 14 let syntax = parse.tree().syntax().clone();
15 15
16 // Find the calling expression and it's NameRef 16 // Find the calling expression and it's NameRef
17 let calling_node = FnCallNode::with_node(syntax, position.offset)?; 17 let calling_node = FnCallNode::with_node(&syntax, position.offset)?;
18 let name_ref = calling_node.name_ref()?; 18 let name_ref = calling_node.name_ref()?;
19 19
20 let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); 20 let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None);
21 let function = match calling_node { 21 let function = match &calling_node {
22 FnCallNode::CallExpr(expr) => { 22 FnCallNode::CallExpr(expr) => {
23 //FIXME: apply subst 23 //FIXME: apply subst
24 let (callable_def, _subst) = analyzer.type_of(db, expr.expr()?)?.as_callable()?; 24 let (callable_def, _subst) = analyzer.type_of(db, &expr.expr()?)?.as_callable()?;
25 match callable_def { 25 match callable_def {
26 hir::CallableDef::Function(it) => it, 26 hir::CallableDef::Function(it) => it,
27 //FIXME: handle other callables 27 //FIXME: handle other callables
28 _ => return None, 28 _ => return None,
29 } 29 }
30 } 30 }
31 FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(expr)?, 31 FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(&expr)?,
32 }; 32 };
33 33
34 let mut call_info = CallInfo::new(db, function); 34 let mut call_info = CallInfo::new(db, function);
@@ -73,13 +73,13 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
73 Some(call_info) 73 Some(call_info)
74} 74}
75 75
76enum FnCallNode<'a> { 76enum FnCallNode {
77 CallExpr(&'a ast::CallExpr), 77 CallExpr(ast::CallExpr),
78 MethodCallExpr(&'a ast::MethodCallExpr), 78 MethodCallExpr(ast::MethodCallExpr),
79} 79}
80 80
81impl<'a> FnCallNode<'a> { 81impl FnCallNode {
82 fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option<FnCallNode<'a>> { 82 fn with_node(syntax: &SyntaxNode, offset: TextUnit) -> Option<FnCallNode> {
83 if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) { 83 if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) {
84 return Some(FnCallNode::CallExpr(expr)); 84 return Some(FnCallNode::CallExpr(expr));
85 } 85 }
@@ -89,8 +89,8 @@ impl<'a> FnCallNode<'a> {
89 None 89 None
90 } 90 }
91 91
92 fn name_ref(&self) -> Option<&'a ast::NameRef> { 92 fn name_ref(&self) -> Option<ast::NameRef> {
93 match *self { 93 match self {
94 FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() { 94 FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() {
95 ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, 95 ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?,
96 _ => return None, 96 _ => return None,
@@ -102,8 +102,8 @@ impl<'a> FnCallNode<'a> {
102 } 102 }
103 } 103 }
104 104
105 fn arg_list(&self) -> Option<&'a ast::ArgList> { 105 fn arg_list(&self) -> Option<ast::ArgList> {
106 match *self { 106 match self {
107 FnCallNode::CallExpr(expr) => expr.arg_list(), 107 FnCallNode::CallExpr(expr) => expr.arg_list(),
108 FnCallNode::MethodCallExpr(expr) => expr.arg_list(), 108 FnCallNode::MethodCallExpr(expr) => expr.arg_list(),
109 } 109 }
diff --git a/crates/ra_ide_api/src/completion/complete_dot.rs b/crates/ra_ide_api/src/completion/complete_dot.rs
index a5f071442..536ba36df 100644
--- a/crates/ra_ide_api/src/completion/complete_dot.rs
+++ b/crates/ra_ide_api/src/completion/complete_dot.rs
@@ -5,10 +5,11 @@ use rustc_hash::FxHashSet;
5 5
6/// Complete dot accesses, i.e. fields or methods (currently only fields). 6/// Complete dot accesses, i.e. fields or methods (currently only fields).
7pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { 7pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
8 let receiver_ty = match ctx.dot_receiver.and_then(|it| ctx.analyzer.type_of(ctx.db, it)) { 8 let receiver_ty =
9 Some(it) => it, 9 match ctx.dot_receiver.as_ref().and_then(|it| ctx.analyzer.type_of(ctx.db, it)) {
10 None => return, 10 Some(it) => it,
11 }; 11 None => return,
12 };
12 if !ctx.is_call { 13 if !ctx.is_call {
13 complete_fields(acc, ctx, receiver_ty.clone()); 14 complete_fields(acc, ctx, receiver_ty.clone());
14 } 15 }
diff --git a/crates/ra_ide_api/src/completion/complete_fn_param.rs b/crates/ra_ide_api/src/completion/complete_fn_param.rs
index 5a117c485..0887ef1f6 100644
--- a/crates/ra_ide_api/src/completion/complete_fn_param.rs
+++ b/crates/ra_ide_api/src/completion/complete_fn_param.rs
@@ -20,7 +20,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
20 let _ = visitor_ctx(&mut params) 20 let _ = visitor_ctx(&mut params)
21 .visit::<ast::SourceFile, _>(process) 21 .visit::<ast::SourceFile, _>(process)
22 .visit::<ast::ItemList, _>(process) 22 .visit::<ast::ItemList, _>(process)
23 .accept(node); 23 .accept(&node);
24 } 24 }
25 params 25 params
26 .into_iter() 26 .into_iter()
@@ -38,10 +38,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
38 .add_to(acc) 38 .add_to(acc)
39 }); 39 });
40 40
41 fn process<'a, N: ast::FnDefOwner>( 41 fn process<N: ast::FnDefOwner>(node: N, params: &mut FxHashMap<String, (u32, ast::Param)>) {
42 node: &'a N,
43 params: &mut FxHashMap<String, (u32, &'a ast::Param)>,
44 ) {
45 node.functions().filter_map(|it| it.param_list()).flat_map(|it| it.params()).for_each( 42 node.functions().filter_map(|it| it.param_list()).flat_map(|it| it.params()).for_each(
46 |param| { 43 |param| {
47 let text = param.syntax().text().to_string(); 44 let text = param.syntax().text().to_string();
diff --git a/crates/ra_ide_api/src/completion/complete_keyword.rs b/crates/ra_ide_api/src/completion/complete_keyword.rs
index 034ed934d..4cf34eff8 100644
--- a/crates/ra_ide_api/src/completion/complete_keyword.rs
+++ b/crates/ra_ide_api/src/completion/complete_keyword.rs
@@ -52,7 +52,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
52 return; 52 return;
53 } 53 }
54 54
55 let fn_def = match ctx.function_syntax { 55 let fn_def = match &ctx.function_syntax {
56 Some(it) => it, 56 Some(it) => it,
57 None => return, 57 None => return,
58 }; 58 };
@@ -65,7 +65,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
65 acc.add(keyword(ctx, "else", "else {$0}")); 65 acc.add(keyword(ctx, "else", "else {$0}"));
66 acc.add(keyword(ctx, "else if", "else if $0 {}")); 66 acc.add(keyword(ctx, "else if", "else if $0 {}"));
67 } 67 }
68 if is_in_loop_body(ctx.token) { 68 if is_in_loop_body(&ctx.token) {
69 if ctx.can_be_stmt { 69 if ctx.can_be_stmt {
70 acc.add(keyword(ctx, "continue", "continue;")); 70 acc.add(keyword(ctx, "continue", "continue;"));
71 acc.add(keyword(ctx, "break", "break;")); 71 acc.add(keyword(ctx, "break", "break;"));
@@ -74,19 +74,19 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
74 acc.add(keyword(ctx, "break", "break")); 74 acc.add(keyword(ctx, "break", "break"));
75 } 75 }
76 } 76 }
77 acc.add_all(complete_return(ctx, fn_def, ctx.can_be_stmt)); 77 acc.add_all(complete_return(ctx, &fn_def, ctx.can_be_stmt));
78} 78}
79 79
80fn is_in_loop_body(leaf: SyntaxToken) -> bool { 80fn is_in_loop_body(leaf: &SyntaxToken) -> bool {
81 for node in leaf.parent().ancestors() { 81 for node in leaf.parent().ancestors() {
82 if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { 82 if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
83 break; 83 break;
84 } 84 }
85 let loop_body = visitor() 85 let loop_body = visitor()
86 .visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body) 86 .visit::<ast::ForExpr, _>(|it| it.loop_body())
87 .visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body) 87 .visit::<ast::WhileExpr, _>(|it| it.loop_body())
88 .visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body) 88 .visit::<ast::LoopExpr, _>(|it| it.loop_body())
89 .accept(node); 89 .accept(&node);
90 if let Some(Some(body)) = loop_body { 90 if let Some(Some(body)) = loop_body {
91 if leaf.range().is_subrange(&body.syntax().range()) { 91 if leaf.range().is_subrange(&body.syntax().range()) {
92 return true; 92 return true;
diff --git a/crates/ra_ide_api/src/completion/complete_postfix.rs b/crates/ra_ide_api/src/completion/complete_postfix.rs
index 4f5062214..c75b1c159 100644
--- a/crates/ra_ide_api/src/completion/complete_postfix.rs
+++ b/crates/ra_ide_api/src/completion/complete_postfix.rs
@@ -11,7 +11,8 @@ use ra_text_edit::TextEditBuilder;
11 11
12fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder { 12fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder {
13 let edit = { 13 let edit = {
14 let receiver_range = ctx.dot_receiver.expect("no receiver available").syntax().range(); 14 let receiver_range =
15 ctx.dot_receiver.as_ref().expect("no receiver available").syntax().range();
15 let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end()); 16 let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end());
16 let mut builder = TextEditBuilder::default(); 17 let mut builder = TextEditBuilder::default();
17 builder.replace(delete_range, snippet.to_string()); 18 builder.replace(delete_range, snippet.to_string());
@@ -38,9 +39,9 @@ fn is_bool_or_unknown(ty: Option<Ty>) -> bool {
38} 39}
39 40
40pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { 41pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
41 if let Some(dot_receiver) = ctx.dot_receiver { 42 if let Some(dot_receiver) = &ctx.dot_receiver {
42 let receiver_text = dot_receiver.syntax().text().to_string(); 43 let receiver_text = dot_receiver.syntax().text().to_string();
43 let receiver_ty = ctx.analyzer.type_of(ctx.db, dot_receiver); 44 let receiver_ty = ctx.analyzer.type_of(ctx.db, &dot_receiver);
44 if is_bool_or_unknown(receiver_ty) { 45 if is_bool_or_unknown(receiver_ty) {
45 postfix_snippet(ctx, "if", "if expr {}", &format!("if {} {{$0}}", receiver_text)) 46 postfix_snippet(ctx, "if", "if expr {}", &format!("if {} {{$0}}", receiver_text))
46 .add_to(acc); 47 .add_to(acc);
diff --git a/crates/ra_ide_api/src/completion/complete_scope.rs b/crates/ra_ide_api/src/completion/complete_scope.rs
index 1ba871257..f92034055 100644
--- a/crates/ra_ide_api/src/completion/complete_scope.rs
+++ b/crates/ra_ide_api/src/completion/complete_scope.rs
@@ -20,8 +20,8 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) {
20 let mut builder = TextEditBuilder::default(); 20 let mut builder = TextEditBuilder::default();
21 builder.replace(ctx.source_range(), name.to_string()); 21 builder.replace(ctx.source_range(), name.to_string());
22 auto_import::auto_import_text_edit( 22 auto_import::auto_import_text_edit(
23 ctx.token.parent(), 23 &ctx.token.parent(),
24 ctx.token.parent(), 24 &ctx.token.parent(),
25 &path, 25 &path,
26 &mut builder, 26 &mut builder,
27 ); 27 );
diff --git a/crates/ra_ide_api/src/completion/complete_struct_literal.rs b/crates/ra_ide_api/src/completion/complete_struct_literal.rs
index b6216f857..9410f740f 100644
--- a/crates/ra_ide_api/src/completion/complete_struct_literal.rs
+++ b/crates/ra_ide_api/src/completion/complete_struct_literal.rs
@@ -4,8 +4,8 @@ use crate::completion::{CompletionContext, Completions};
4 4
5/// Complete fields in fields literals. 5/// Complete fields in fields literals.
6pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) { 6pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) {
7 let (ty, variant) = match ctx.struct_lit_syntax.and_then(|it| { 7 let (ty, variant) = match ctx.struct_lit_syntax.as_ref().and_then(|it| {
8 Some((ctx.analyzer.type_of(ctx.db, it.into())?, ctx.analyzer.resolve_variant(it)?)) 8 Some((ctx.analyzer.type_of(ctx.db, &it.clone().into())?, ctx.analyzer.resolve_variant(it)?))
9 }) { 9 }) {
10 Some(it) => it, 10 Some(it) => it,
11 _ => return, 11 _ => return,
diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs
index 4aa84751f..b803271ab 100644
--- a/crates/ra_ide_api/src/completion/completion_context.rs
+++ b/crates/ra_ide_api/src/completion/completion_context.rs
@@ -16,11 +16,11 @@ pub(crate) struct CompletionContext<'a> {
16 pub(super) db: &'a db::RootDatabase, 16 pub(super) db: &'a db::RootDatabase,
17 pub(super) analyzer: hir::SourceAnalyzer, 17 pub(super) analyzer: hir::SourceAnalyzer,
18 pub(super) offset: TextUnit, 18 pub(super) offset: TextUnit,
19 pub(super) token: SyntaxToken<'a>, 19 pub(super) token: SyntaxToken,
20 pub(super) module: Option<hir::Module>, 20 pub(super) module: Option<hir::Module>,
21 pub(super) function_syntax: Option<&'a ast::FnDef>, 21 pub(super) function_syntax: Option<ast::FnDef>,
22 pub(super) use_item_syntax: Option<&'a ast::UseItem>, 22 pub(super) use_item_syntax: Option<ast::UseItem>,
23 pub(super) struct_lit_syntax: Option<&'a ast::StructLit>, 23 pub(super) struct_lit_syntax: Option<ast::StructLit>,
24 pub(super) is_param: bool, 24 pub(super) is_param: bool,
25 /// If a name-binding or reference to a const in a pattern. 25 /// If a name-binding or reference to a const in a pattern.
26 /// Irrefutable patterns (like let) are excluded. 26 /// Irrefutable patterns (like let) are excluded.
@@ -35,7 +35,7 @@ pub(crate) struct CompletionContext<'a> {
35 /// Something is typed at the "top" level, in module or impl/trait. 35 /// Something is typed at the "top" level, in module or impl/trait.
36 pub(super) is_new_item: bool, 36 pub(super) is_new_item: bool,
37 /// The receiver if this is a field or method access, i.e. writing something.<|> 37 /// The receiver if this is a field or method access, i.e. writing something.<|>
38 pub(super) dot_receiver: Option<&'a ast::Expr>, 38 pub(super) dot_receiver: Option<ast::Expr>,
39 /// If this is a call (method or function) in particular, i.e. the () are already there. 39 /// If this is a call (method or function) in particular, i.e. the () are already there.
40 pub(super) is_call: bool, 40 pub(super) is_call: bool,
41} 41}
@@ -50,7 +50,7 @@ impl<'a> CompletionContext<'a> {
50 let token = 50 let token =
51 find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?; 51 find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?;
52 let analyzer = 52 let analyzer =
53 hir::SourceAnalyzer::new(db, position.file_id, token.parent(), Some(position.offset)); 53 hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset));
54 let mut ctx = CompletionContext { 54 let mut ctx = CompletionContext {
55 db, 55 db,
56 analyzer, 56 analyzer,
@@ -109,7 +109,7 @@ impl<'a> CompletionContext<'a> {
109 if is_node::<ast::BindPat>(name.syntax()) { 109 if is_node::<ast::BindPat>(name.syntax()) {
110 let bind_pat = name.syntax().ancestors().find_map(ast::BindPat::cast).unwrap(); 110 let bind_pat = name.syntax().ancestors().find_map(ast::BindPat::cast).unwrap();
111 let parent = bind_pat.syntax().parent(); 111 let parent = bind_pat.syntax().parent();
112 if parent.and_then(ast::MatchArm::cast).is_some() 112 if parent.clone().and_then(ast::MatchArm::cast).is_some()
113 || parent.and_then(ast::Condition::cast).is_some() 113 || parent.and_then(ast::Condition::cast).is_some()
114 { 114 {
115 self.is_pat_binding = true; 115 self.is_pat_binding = true;
@@ -122,7 +122,7 @@ impl<'a> CompletionContext<'a> {
122 } 122 }
123 } 123 }
124 124
125 fn classify_name_ref(&mut self, original_file: &'a SourceFile, name_ref: &ast::NameRef) { 125 fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) {
126 let name_range = name_ref.syntax().range(); 126 let name_range = name_ref.syntax().range();
127 if name_ref.syntax().parent().and_then(ast::NamedField::cast).is_some() { 127 if name_ref.syntax().parent().and_then(ast::NamedField::cast).is_some() {
128 self.struct_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset); 128 self.struct_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset);
@@ -153,7 +153,7 @@ impl<'a> CompletionContext<'a> {
153 None => return, 153 None => return,
154 }; 154 };
155 155
156 if let Some(segment) = ast::PathSegment::cast(parent) { 156 if let Some(segment) = ast::PathSegment::cast(parent.clone()) {
157 let path = segment.parent_path(); 157 let path = segment.parent_path();
158 self.is_call = path 158 self.is_call = path
159 .syntax() 159 .syntax()
@@ -162,7 +162,7 @@ impl<'a> CompletionContext<'a> {
162 .and_then(|it| it.syntax().parent().and_then(ast::CallExpr::cast)) 162 .and_then(|it| it.syntax().parent().and_then(ast::CallExpr::cast))
163 .is_some(); 163 .is_some();
164 164
165 if let Some(mut path) = hir::Path::from_ast(path) { 165 if let Some(mut path) = hir::Path::from_ast(path.clone()) {
166 if !path.is_ident() { 166 if !path.is_ident() {
167 path.segments.pop().unwrap(); 167 path.segments.pop().unwrap();
168 self.path_prefix = Some(path); 168 self.path_prefix = Some(path);
@@ -179,7 +179,7 @@ impl<'a> CompletionContext<'a> {
179 .syntax() 179 .syntax()
180 .ancestors() 180 .ancestors()
181 .find_map(|node| { 181 .find_map(|node| {
182 if let Some(stmt) = ast::ExprStmt::cast(node) { 182 if let Some(stmt) = ast::ExprStmt::cast(node.clone()) {
183 return Some(stmt.syntax().range() == name_ref.syntax().range()); 183 return Some(stmt.syntax().range() == name_ref.syntax().range());
184 } 184 }
185 if let Some(block) = ast::Block::cast(node) { 185 if let Some(block) = ast::Block::cast(node) {
@@ -203,7 +203,7 @@ impl<'a> CompletionContext<'a> {
203 } 203 }
204 } 204 }
205 } 205 }
206 if let Some(field_expr) = ast::FieldExpr::cast(parent) { 206 if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) {
207 // The receiver comes before the point of insertion of the fake 207 // The receiver comes before the point of insertion of the fake
208 // ident, so it should have the same range in the non-modified file 208 // ident, so it should have the same range in the non-modified file
209 self.dot_receiver = field_expr 209 self.dot_receiver = field_expr
@@ -222,7 +222,7 @@ impl<'a> CompletionContext<'a> {
222 } 222 }
223} 223}
224 224
225fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<&N> { 225fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> {
226 find_covering_element(syntax, range).ancestors().find_map(N::cast) 226 find_covering_element(syntax, range).ancestors().find_map(N::cast)
227} 227}
228 228
diff --git a/crates/ra_ide_api/src/diagnostics.rs b/crates/ra_ide_api/src/diagnostics.rs
index 3f5b9e0a0..affbad6cd 100644
--- a/crates/ra_ide_api/src/diagnostics.rs
+++ b/crates/ra_ide_api/src/diagnostics.rs
@@ -35,8 +35,8 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
35 })); 35 }));
36 36
37 for node in parse.tree().syntax().descendants() { 37 for node in parse.tree().syntax().descendants() {
38 check_unnecessary_braces_in_use_statement(&mut res, file_id, node); 38 check_unnecessary_braces_in_use_statement(&mut res, file_id, &node);
39 check_struct_shorthand_initialization(&mut res, file_id, node); 39 check_struct_shorthand_initialization(&mut res, file_id, &node);
40 } 40 }
41 let res = RefCell::new(res); 41 let res = RefCell::new(res);
42 let mut sink = DiagnosticSink::new(|d| { 42 let mut sink = DiagnosticSink::new(|d| {
@@ -60,7 +60,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
60 }) 60 })
61 .on::<hir::diagnostics::MissingFields, _>(|d| { 61 .on::<hir::diagnostics::MissingFields, _>(|d| {
62 let node = d.ast(db); 62 let node = d.ast(db);
63 let mut ast_editor = AstEditor::new(&*node); 63 let mut ast_editor = AstEditor::new(node);
64 for f in d.missed_fields.iter() { 64 for f in d.missed_fields.iter() {
65 ast_editor.append_field(&AstBuilder::<NamedField>::from_name(f)); 65 ast_editor.append_field(&AstBuilder::<NamedField>::from_name(f));
66 } 66 }
@@ -94,11 +94,11 @@ fn check_unnecessary_braces_in_use_statement(
94 file_id: FileId, 94 file_id: FileId,
95 node: &SyntaxNode, 95 node: &SyntaxNode,
96) -> Option<()> { 96) -> Option<()> {
97 let use_tree_list = ast::UseTreeList::cast(node)?; 97 let use_tree_list = ast::UseTreeList::cast(node.clone())?;
98 if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() { 98 if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() {
99 let range = use_tree_list.syntax().range(); 99 let range = use_tree_list.syntax().range();
100 let edit = 100 let edit =
101 text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(single_use_tree) 101 text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(&single_use_tree)
102 .unwrap_or_else(|| { 102 .unwrap_or_else(|| {
103 let to_replace = single_use_tree.syntax().text().to_string(); 103 let to_replace = single_use_tree.syntax().text().to_string();
104 let mut edit_builder = TextEditBuilder::default(); 104 let mut edit_builder = TextEditBuilder::default();
@@ -141,7 +141,7 @@ fn check_struct_shorthand_initialization(
141 file_id: FileId, 141 file_id: FileId,
142 node: &SyntaxNode, 142 node: &SyntaxNode,
143) -> Option<()> { 143) -> Option<()> {
144 let struct_lit = ast::StructLit::cast(node)?; 144 let struct_lit = ast::StructLit::cast(node.clone())?;
145 let named_field_list = struct_lit.named_field_list()?; 145 let named_field_list = struct_lit.named_field_list()?;
146 for named_field in named_field_list.fields() { 146 for named_field in named_field_list.fields() {
147 if let (Some(name_ref), Some(expr)) = (named_field.name_ref(), named_field.expr()) { 147 if let (Some(name_ref), Some(expr)) = (named_field.name_ref(), named_field.expr()) {
@@ -184,7 +184,7 @@ mod tests {
184 let parse = SourceFile::parse(code); 184 let parse = SourceFile::parse(code);
185 let mut diagnostics = Vec::new(); 185 let mut diagnostics = Vec::new();
186 for node in parse.tree().syntax().descendants() { 186 for node in parse.tree().syntax().descendants() {
187 func(&mut diagnostics, FileId(0), node); 187 func(&mut diagnostics, FileId(0), &node);
188 } 188 }
189 assert!(diagnostics.is_empty()); 189 assert!(diagnostics.is_empty());
190 } 190 }
@@ -193,7 +193,7 @@ mod tests {
193 let parse = SourceFile::parse(before); 193 let parse = SourceFile::parse(before);
194 let mut diagnostics = Vec::new(); 194 let mut diagnostics = Vec::new();
195 for node in parse.tree().syntax().descendants() { 195 for node in parse.tree().syntax().descendants() {
196 func(&mut diagnostics, FileId(0), node); 196 func(&mut diagnostics, FileId(0), &node);
197 } 197 }
198 let diagnostic = 198 let diagnostic =
199 diagnostics.pop().unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before)); 199 diagnostics.pop().unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before));
diff --git a/crates/ra_ide_api/src/display/function_signature.rs b/crates/ra_ide_api/src/display/function_signature.rs
index e7ad5a0d1..644a4532b 100644
--- a/crates/ra_ide_api/src/display/function_signature.rs
+++ b/crates/ra_ide_api/src/display/function_signature.rs
@@ -38,7 +38,7 @@ impl FunctionSignature {
38 pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self { 38 pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self {
39 let doc = function.docs(db); 39 let doc = function.docs(db);
40 let ast_node = function.source(db).ast; 40 let ast_node = function.source(db).ast;
41 FunctionSignature::from(&*ast_node).with_doc_opt(doc) 41 FunctionSignature::from(&ast_node).with_doc_opt(doc)
42 } 42 }
43} 43}
44 44
diff --git a/crates/ra_ide_api/src/display/navigation_target.rs b/crates/ra_ide_api/src/display/navigation_target.rs
index 20a8d418e..8cc853dd1 100644
--- a/crates/ra_ide_api/src/display/navigation_target.rs
+++ b/crates/ra_ide_api/src/display/navigation_target.rs
@@ -5,7 +5,7 @@ use ra_syntax::{
5 ast::{self, DocCommentsOwner}, 5 ast::{self, DocCommentsOwner},
6 AstNode, AstPtr, SmolStr, 6 AstNode, AstPtr, SmolStr,
7 SyntaxKind::{self, NAME}, 7 SyntaxKind::{self, NAME},
8 SyntaxNode, TextRange, TreeArc, 8 SyntaxNode, TextRange,
9}; 9};
10 10
11use super::short_label::ShortLabel; 11use super::short_label::ShortLabel;
@@ -169,7 +169,7 @@ impl NavigationTarget {
169 let file_id = src.file_id.original_file(db); 169 let file_id = src.file_id.original_file(db);
170 match src.ast { 170 match src.ast {
171 FieldSource::Named(it) => { 171 FieldSource::Named(it) => {
172 NavigationTarget::from_named(file_id, &*it, it.doc_comment_text(), it.short_label()) 172 NavigationTarget::from_named(file_id, &it, it.doc_comment_text(), it.short_label())
173 } 173 }
174 FieldSource::Pos(it) => { 174 FieldSource::Pos(it) => {
175 NavigationTarget::from_syntax(file_id, "".into(), None, it.syntax(), None, None) 175 NavigationTarget::from_syntax(file_id, "".into(), None, it.syntax(), None, None)
@@ -179,13 +179,13 @@ impl NavigationTarget {
179 179
180 pub(crate) fn from_def_source<A, D>(db: &RootDatabase, def: D) -> NavigationTarget 180 pub(crate) fn from_def_source<A, D>(db: &RootDatabase, def: D) -> NavigationTarget
181 where 181 where
182 D: HasSource<Ast = TreeArc<A>>, 182 D: HasSource<Ast = A>,
183 A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, 183 A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
184 { 184 {
185 let src = def.source(db); 185 let src = def.source(db);
186 NavigationTarget::from_named( 186 NavigationTarget::from_named(
187 src.file_id.original_file(db), 187 src.file_id.original_file(db),
188 &*src.ast, 188 &src.ast,
189 src.ast.doc_comment_text(), 189 src.ast.doc_comment_text(),
190 src.ast.short_label(), 190 src.ast.short_label(),
191 ) 191 )
@@ -249,7 +249,7 @@ impl NavigationTarget {
249 log::debug!("nav target {}", src.ast.syntax().debug_dump()); 249 log::debug!("nav target {}", src.ast.syntax().debug_dump());
250 NavigationTarget::from_named( 250 NavigationTarget::from_named(
251 src.file_id.original_file(db), 251 src.file_id.original_file(db),
252 &*src.ast, 252 &src.ast,
253 src.ast.doc_comment_text(), 253 src.ast.doc_comment_text(),
254 None, 254 None,
255 ) 255 )
@@ -318,22 +318,18 @@ pub(crate) fn docs_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option
318 let parse = db.parse(symbol.file_id); 318 let parse = db.parse(symbol.file_id);
319 let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned(); 319 let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned();
320 320
321 fn doc_comments<N: ast::DocCommentsOwner>(node: &N) -> Option<String> {
322 node.doc_comment_text()
323 }
324
325 visitor() 321 visitor()
326 .visit(doc_comments::<ast::FnDef>) 322 .visit(|it: ast::FnDef| it.doc_comment_text())
327 .visit(doc_comments::<ast::StructDef>) 323 .visit(|it: ast::StructDef| it.doc_comment_text())
328 .visit(doc_comments::<ast::EnumDef>) 324 .visit(|it: ast::EnumDef| it.doc_comment_text())
329 .visit(doc_comments::<ast::TraitDef>) 325 .visit(|it: ast::TraitDef| it.doc_comment_text())
330 .visit(doc_comments::<ast::Module>) 326 .visit(|it: ast::Module| it.doc_comment_text())
331 .visit(doc_comments::<ast::TypeAliasDef>) 327 .visit(|it: ast::TypeAliasDef| it.doc_comment_text())
332 .visit(doc_comments::<ast::ConstDef>) 328 .visit(|it: ast::ConstDef| it.doc_comment_text())
333 .visit(doc_comments::<ast::StaticDef>) 329 .visit(|it: ast::StaticDef| it.doc_comment_text())
334 .visit(doc_comments::<ast::NamedFieldDef>) 330 .visit(|it: ast::NamedFieldDef| it.doc_comment_text())
335 .visit(doc_comments::<ast::EnumVariant>) 331 .visit(|it: ast::EnumVariant| it.doc_comment_text())
336 .visit(doc_comments::<ast::MacroCall>) 332 .visit(|it: ast::MacroCall| it.doc_comment_text())
337 .accept(&node)? 333 .accept(&node)?
338} 334}
339 335
@@ -345,15 +341,15 @@ pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) ->
345 let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned(); 341 let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned();
346 342
347 visitor() 343 visitor()
348 .visit(|node: &ast::FnDef| node.short_label()) 344 .visit(|node: ast::FnDef| node.short_label())
349 .visit(|node: &ast::StructDef| node.short_label()) 345 .visit(|node: ast::StructDef| node.short_label())
350 .visit(|node: &ast::EnumDef| node.short_label()) 346 .visit(|node: ast::EnumDef| node.short_label())
351 .visit(|node: &ast::TraitDef| node.short_label()) 347 .visit(|node: ast::TraitDef| node.short_label())
352 .visit(|node: &ast::Module| node.short_label()) 348 .visit(|node: ast::Module| node.short_label())
353 .visit(|node: &ast::TypeAliasDef| node.short_label()) 349 .visit(|node: ast::TypeAliasDef| node.short_label())
354 .visit(|node: &ast::ConstDef| node.short_label()) 350 .visit(|node: ast::ConstDef| node.short_label())
355 .visit(|node: &ast::StaticDef| node.short_label()) 351 .visit(|node: ast::StaticDef| node.short_label())
356 .visit(|node: &ast::NamedFieldDef| node.short_label()) 352 .visit(|node: ast::NamedFieldDef| node.short_label())
357 .visit(|node: &ast::EnumVariant| node.short_label()) 353 .visit(|node: ast::EnumVariant| node.short_label())
358 .accept(&node)? 354 .accept(&node)?
359} 355}
diff --git a/crates/ra_ide_api/src/display/structure.rs b/crates/ra_ide_api/src/display/structure.rs
index 638484a9b..2e183d2f6 100644
--- a/crates/ra_ide_api/src/display/structure.rs
+++ b/crates/ra_ide_api/src/display/structure.rs
@@ -24,14 +24,14 @@ pub fn file_structure(file: &SourceFile) -> Vec<StructureNode> {
24 for event in file.syntax().preorder() { 24 for event in file.syntax().preorder() {
25 match event { 25 match event {
26 WalkEvent::Enter(node) => { 26 WalkEvent::Enter(node) => {
27 if let Some(mut symbol) = structure_node(node) { 27 if let Some(mut symbol) = structure_node(&node) {
28 symbol.parent = stack.last().copied(); 28 symbol.parent = stack.last().copied();
29 stack.push(res.len()); 29 stack.push(res.len());
30 res.push(symbol); 30 res.push(symbol);
31 } 31 }
32 } 32 }
33 WalkEvent::Leave(node) => { 33 WalkEvent::Leave(node) => {
34 if structure_node(node).is_some() { 34 if structure_node(&node).is_some() {
35 stack.pop().unwrap(); 35 stack.pop().unwrap();
36 } 36 }
37 } 37 }
@@ -41,19 +41,20 @@ pub fn file_structure(file: &SourceFile) -> Vec<StructureNode> {
41} 41}
42 42
43fn structure_node(node: &SyntaxNode) -> Option<StructureNode> { 43fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
44 fn decl<N: NameOwner + AttrsOwner>(node: &N) -> Option<StructureNode> { 44 fn decl<N: NameOwner + AttrsOwner>(node: N) -> Option<StructureNode> {
45 decl_with_detail(node, None) 45 decl_with_detail(node, None)
46 } 46 }
47 47
48 fn decl_with_ascription<N: NameOwner + AttrsOwner + TypeAscriptionOwner>( 48 fn decl_with_ascription<N: NameOwner + AttrsOwner + TypeAscriptionOwner>(
49 node: &N, 49 node: N,
50 ) -> Option<StructureNode> { 50 ) -> Option<StructureNode> {
51 decl_with_type_ref(node, node.ascribed_type()) 51 let ty = node.ascribed_type();
52 decl_with_type_ref(node, ty)
52 } 53 }
53 54
54 fn decl_with_type_ref<N: NameOwner + AttrsOwner>( 55 fn decl_with_type_ref<N: NameOwner + AttrsOwner>(
55 node: &N, 56 node: N,
56 type_ref: Option<&ast::TypeRef>, 57 type_ref: Option<ast::TypeRef>,
57 ) -> Option<StructureNode> { 58 ) -> Option<StructureNode> {
58 let detail = type_ref.map(|type_ref| { 59 let detail = type_ref.map(|type_ref| {
59 let mut detail = String::new(); 60 let mut detail = String::new();
@@ -64,7 +65,7 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
64 } 65 }
65 66
66 fn decl_with_detail<N: NameOwner + AttrsOwner>( 67 fn decl_with_detail<N: NameOwner + AttrsOwner>(
67 node: &N, 68 node: N,
68 detail: Option<String>, 69 detail: Option<String>,
69 ) -> Option<StructureNode> { 70 ) -> Option<StructureNode> {
70 let name = node.name()?; 71 let name = node.name()?;
@@ -82,22 +83,24 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
82 83
83 fn collapse_ws(node: &SyntaxNode, output: &mut String) { 84 fn collapse_ws(node: &SyntaxNode, output: &mut String) {
84 let mut can_insert_ws = false; 85 let mut can_insert_ws = false;
85 for line in node.text().chunks().flat_map(|chunk| chunk.lines()) { 86 for chunk in node.text().chunks() {
86 let line = line.trim(); 87 for line in chunk.lines() {
87 if line.is_empty() { 88 let line = line.trim();
88 if can_insert_ws { 89 if line.is_empty() {
89 output.push_str(" "); 90 if can_insert_ws {
90 can_insert_ws = false; 91 output.push_str(" ");
92 can_insert_ws = false;
93 }
94 } else {
95 output.push_str(line);
96 can_insert_ws = true;
91 } 97 }
92 } else {
93 output.push_str(line);
94 can_insert_ws = true;
95 } 98 }
96 } 99 }
97 } 100 }
98 101
99 visitor() 102 visitor()
100 .visit(|fn_def: &ast::FnDef| { 103 .visit(|fn_def: ast::FnDef| {
101 let mut detail = String::from("fn"); 104 let mut detail = String::from("fn");
102 if let Some(type_param_list) = fn_def.type_param_list() { 105 if let Some(type_param_list) = fn_def.type_param_list() {
103 collapse_ws(type_param_list.syntax(), &mut detail); 106 collapse_ws(type_param_list.syntax(), &mut detail);
@@ -117,11 +120,14 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
117 .visit(decl::<ast::EnumVariant>) 120 .visit(decl::<ast::EnumVariant>)
118 .visit(decl::<ast::TraitDef>) 121 .visit(decl::<ast::TraitDef>)
119 .visit(decl::<ast::Module>) 122 .visit(decl::<ast::Module>)
120 .visit(|td: &ast::TypeAliasDef| decl_with_type_ref(td, td.type_ref())) 123 .visit(|td: ast::TypeAliasDef| {
124 let ty = td.type_ref();
125 decl_with_type_ref(td, ty)
126 })
121 .visit(decl_with_ascription::<ast::NamedFieldDef>) 127 .visit(decl_with_ascription::<ast::NamedFieldDef>)
122 .visit(decl_with_ascription::<ast::ConstDef>) 128 .visit(decl_with_ascription::<ast::ConstDef>)
123 .visit(decl_with_ascription::<ast::StaticDef>) 129 .visit(decl_with_ascription::<ast::StaticDef>)
124 .visit(|im: &ast::ImplBlock| { 130 .visit(|im: ast::ImplBlock| {
125 let target_type = im.target_type()?; 131 let target_type = im.target_type()?;
126 let target_trait = im.target_trait(); 132 let target_trait = im.target_trait();
127 let label = match target_trait { 133 let label = match target_trait {
@@ -142,14 +148,14 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
142 }; 148 };
143 Some(node) 149 Some(node)
144 }) 150 })
145 .visit(|mc: &ast::MacroCall| { 151 .visit(|mc: ast::MacroCall| {
146 let first_token = mc.syntax().first_token().unwrap(); 152 let first_token = mc.syntax().first_token().unwrap();
147 if first_token.text().as_str() != "macro_rules" { 153 if first_token.text().as_str() != "macro_rules" {
148 return None; 154 return None;
149 } 155 }
150 decl(mc) 156 decl(mc)
151 }) 157 })
152 .accept(node)? 158 .accept(&node)?
153} 159}
154 160
155#[cfg(test)] 161#[cfg(test)]
diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs
index 491b15702..8c49960f5 100644
--- a/crates/ra_ide_api/src/extend_selection.rs
+++ b/crates/ra_ide_api/src/extend_selection.rs
@@ -42,7 +42,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
42 TokenAtOffset::None => return None, 42 TokenAtOffset::None => return None,
43 TokenAtOffset::Single(l) => { 43 TokenAtOffset::Single(l) => {
44 if string_kinds.contains(&l.kind()) { 44 if string_kinds.contains(&l.kind()) {
45 extend_single_word_in_comment_or_string(l, offset).unwrap_or_else(|| l.range()) 45 extend_single_word_in_comment_or_string(&l, offset).unwrap_or_else(|| l.range())
46 } else { 46 } else {
47 l.range() 47 l.range()
48 } 48 }
@@ -56,7 +56,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
56 if token.range() != range { 56 if token.range() != range {
57 return Some(token.range()); 57 return Some(token.range());
58 } 58 }
59 if let Some(comment) = ast::Comment::cast(token) { 59 if let Some(comment) = ast::Comment::cast(token.clone()) {
60 if let Some(range) = extend_comments(comment) { 60 if let Some(range) = extend_comments(comment) {
61 return Some(range); 61 return Some(range);
62 } 62 }
@@ -73,7 +73,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
73 let node = node.ancestors().take_while(|n| n.range() == node.range()).last().unwrap(); 73 let node = node.ancestors().take_while(|n| n.range() == node.range()).last().unwrap();
74 74
75 if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { 75 if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
76 if let Some(range) = extend_list_item(node) { 76 if let Some(range) = extend_list_item(&node) {
77 return Some(range); 77 return Some(range);
78 } 78 }
79 } 79 }
@@ -82,7 +82,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
82} 82}
83 83
84fn extend_single_word_in_comment_or_string( 84fn extend_single_word_in_comment_or_string(
85 leaf: SyntaxToken, 85 leaf: &SyntaxToken,
86 offset: TextUnit, 86 offset: TextUnit,
87) -> Option<TextRange> { 87) -> Option<TextRange> {
88 let text: &str = leaf.text(); 88 let text: &str = leaf.text();
@@ -131,9 +131,9 @@ fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextUnit) -> TextRange
131 ws.range() 131 ws.range()
132} 132}
133 133
134fn pick_best<'a>(l: SyntaxToken<'a>, r: SyntaxToken<'a>) -> SyntaxToken<'a> { 134fn pick_best<'a>(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken {
135 return if priority(r) > priority(l) { r } else { l }; 135 return if priority(&r) > priority(&l) { r } else { l };
136 fn priority(n: SyntaxToken) -> usize { 136 fn priority(n: &SyntaxToken) -> usize {
137 match n.kind() { 137 match n.kind() {
138 WHITESPACE => 0, 138 WHITESPACE => 0,
139 IDENT | T![self] | T![super] | T![crate] | LIFETIME => 2, 139 IDENT | T![self] | T![super] | T![crate] | LIFETIME => 2,
@@ -156,7 +156,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
156 SyntaxElement::Token(it) => is_single_line_ws(it), 156 SyntaxElement::Token(it) => is_single_line_ws(it),
157 }) 157 })
158 .next() 158 .next()
159 .and_then(|it| it.as_token()) 159 .and_then(|it| it.as_token().cloned())
160 .filter(|node| node.kind() == T![,]) 160 .filter(|node| node.kind() == T![,])
161 } 161 }
162 162
@@ -167,7 +167,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
167 // Include any following whitespace when comma if after list item. 167 // Include any following whitespace when comma if after list item.
168 let final_node = comma_node 168 let final_node = comma_node
169 .next_sibling_or_token() 169 .next_sibling_or_token()
170 .and_then(|it| it.as_token()) 170 .and_then(|it| it.as_token().cloned())
171 .filter(|node| is_single_line_ws(node)) 171 .filter(|node| is_single_line_ws(node))
172 .unwrap_or(comma_node); 172 .unwrap_or(comma_node);
173 173
@@ -178,8 +178,8 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
178} 178}
179 179
180fn extend_comments(comment: ast::Comment) -> Option<TextRange> { 180fn extend_comments(comment: ast::Comment) -> Option<TextRange> {
181 let prev = adj_comments(comment, Direction::Prev); 181 let prev = adj_comments(&comment, Direction::Prev);
182 let next = adj_comments(comment, Direction::Next); 182 let next = adj_comments(&comment, Direction::Next);
183 if prev != next { 183 if prev != next {
184 Some(TextRange::from_to(prev.syntax().range().start(), next.syntax().range().end())) 184 Some(TextRange::from_to(prev.syntax().range().start(), next.syntax().range().end()))
185 } else { 185 } else {
@@ -187,14 +187,14 @@ fn extend_comments(comment: ast::Comment) -> Option<TextRange> {
187 } 187 }
188} 188}
189 189
190fn adj_comments(comment: ast::Comment, dir: Direction) -> ast::Comment { 190fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment {
191 let mut res = comment; 191 let mut res = comment.clone();
192 for element in comment.syntax().siblings_with_tokens(dir) { 192 for element in comment.syntax().siblings_with_tokens(dir) {
193 let token = match element.as_token() { 193 let token = match element.as_token() {
194 None => break, 194 None => break,
195 Some(token) => token, 195 Some(token) => token,
196 }; 196 };
197 if let Some(c) = ast::Comment::cast(token) { 197 if let Some(c) = ast::Comment::cast(token.clone()) {
198 res = c 198 res = c
199 } else if token.kind() != WHITESPACE || token.text().contains("\n\n") { 199 } else if token.kind() != WHITESPACE || token.text().contains("\n\n") {
200 break; 200 break;
diff --git a/crates/ra_ide_api/src/folding_ranges.rs b/crates/ra_ide_api/src/folding_ranges.rs
index 9d4855a64..c2b981aed 100644
--- a/crates/ra_ide_api/src/folding_ranges.rs
+++ b/crates/ra_ide_api/src/folding_ranges.rs
@@ -30,7 +30,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
30 for element in file.syntax().descendants_with_tokens() { 30 for element in file.syntax().descendants_with_tokens() {
31 // Fold items that span multiple lines 31 // Fold items that span multiple lines
32 if let Some(kind) = fold_kind(element.kind()) { 32 if let Some(kind) = fold_kind(element.kind()) {
33 let is_multiline = match element { 33 let is_multiline = match &element {
34 SyntaxElement::Node(node) => node.text().contains('\n'), 34 SyntaxElement::Node(node) => node.text().contains('\n'),
35 SyntaxElement::Token(token) => token.text().contains('\n'), 35 SyntaxElement::Token(token) => token.text().contains('\n'),
36 }; 36 };
@@ -56,7 +56,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
56 SyntaxElement::Node(node) => { 56 SyntaxElement::Node(node) => {
57 // Fold groups of imports 57 // Fold groups of imports
58 if node.kind() == USE_ITEM && !visited_imports.contains(&node) { 58 if node.kind() == USE_ITEM && !visited_imports.contains(&node) {
59 if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) { 59 if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) {
60 res.push(Fold { range, kind: FoldKind::Imports }) 60 res.push(Fold { range, kind: FoldKind::Imports })
61 } 61 }
62 } 62 }
@@ -65,7 +65,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
65 if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node) 65 if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node)
66 { 66 {
67 if let Some(range) = 67 if let Some(range) =
68 contiguous_range_for_group_unless(node, has_visibility, &mut visited_mods) 68 contiguous_range_for_group_unless(&node, has_visibility, &mut visited_mods)
69 { 69 {
70 res.push(Fold { range, kind: FoldKind::Mods }) 70 res.push(Fold { range, kind: FoldKind::Mods })
71 } 71 }
@@ -88,24 +88,24 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
88} 88}
89 89
90fn has_visibility(node: &SyntaxNode) -> bool { 90fn has_visibility(node: &SyntaxNode) -> bool {
91 ast::Module::cast(node).and_then(|m| m.visibility()).is_some() 91 ast::Module::cast(node.clone()).and_then(|m| m.visibility()).is_some()
92} 92}
93 93
94fn contiguous_range_for_group<'a>( 94fn contiguous_range_for_group(
95 first: &'a SyntaxNode, 95 first: &SyntaxNode,
96 visited: &mut FxHashSet<&'a SyntaxNode>, 96 visited: &mut FxHashSet<SyntaxNode>,
97) -> Option<TextRange> { 97) -> Option<TextRange> {
98 contiguous_range_for_group_unless(first, |_| false, visited) 98 contiguous_range_for_group_unless(first, |_| false, visited)
99} 99}
100 100
101fn contiguous_range_for_group_unless<'a>( 101fn contiguous_range_for_group_unless(
102 first: &'a SyntaxNode, 102 first: &SyntaxNode,
103 unless: impl Fn(&'a SyntaxNode) -> bool, 103 unless: impl Fn(&SyntaxNode) -> bool,
104 visited: &mut FxHashSet<&'a SyntaxNode>, 104 visited: &mut FxHashSet<SyntaxNode>,
105) -> Option<TextRange> { 105) -> Option<TextRange> {
106 visited.insert(first); 106 visited.insert(first.clone());
107 107
108 let mut last = first; 108 let mut last = first.clone();
109 for element in first.siblings_with_tokens(Direction::Next) { 109 for element in first.siblings_with_tokens(Direction::Next) {
110 let node = match element { 110 let node = match element {
111 SyntaxElement::Token(token) => { 111 SyntaxElement::Token(token) => {
@@ -123,15 +123,15 @@ fn contiguous_range_for_group_unless<'a>(
123 }; 123 };
124 124
125 // Stop if we find a node that doesn't belong to the group 125 // Stop if we find a node that doesn't belong to the group
126 if node.kind() != first.kind() || unless(node) { 126 if node.kind() != first.kind() || unless(&node) {
127 break; 127 break;
128 } 128 }
129 129
130 visited.insert(node); 130 visited.insert(node.clone());
131 last = node; 131 last = node;
132 } 132 }
133 133
134 if first != last { 134 if first != &last {
135 Some(TextRange::from_to(first.range().start(), last.range().end())) 135 Some(TextRange::from_to(first.range().start(), last.range().end()))
136 } else { 136 } else {
137 // The group consists of only one element, therefore it cannot be folded 137 // The group consists of only one element, therefore it cannot be folded
@@ -139,11 +139,11 @@ fn contiguous_range_for_group_unless<'a>(
139 } 139 }
140} 140}
141 141
142fn contiguous_range_for_comment<'a>( 142fn contiguous_range_for_comment(
143 first: ast::Comment<'a>, 143 first: ast::Comment,
144 visited: &mut FxHashSet<ast::Comment<'a>>, 144 visited: &mut FxHashSet<ast::Comment>,
145) -> Option<TextRange> { 145) -> Option<TextRange> {
146 visited.insert(first); 146 visited.insert(first.clone());
147 147
148 // Only fold comments of the same flavor 148 // Only fold comments of the same flavor
149 let group_kind = first.kind(); 149 let group_kind = first.kind();
@@ -151,11 +151,11 @@ fn contiguous_range_for_comment<'a>(
151 return None; 151 return None;
152 } 152 }
153 153
154 let mut last = first; 154 let mut last = first.clone();
155 for element in first.syntax().siblings_with_tokens(Direction::Next) { 155 for element in first.syntax().siblings_with_tokens(Direction::Next) {
156 match element { 156 match element {
157 SyntaxElement::Token(token) => { 157 SyntaxElement::Token(token) => {
158 if let Some(ws) = ast::Whitespace::cast(token) { 158 if let Some(ws) = ast::Whitespace::cast(token.clone()) {
159 if !ws.spans_multiple_lines() { 159 if !ws.spans_multiple_lines() {
160 // Ignore whitespace without blank lines 160 // Ignore whitespace without blank lines
161 continue; 161 continue;
@@ -163,7 +163,7 @@ fn contiguous_range_for_comment<'a>(
163 } 163 }
164 if let Some(c) = ast::Comment::cast(token) { 164 if let Some(c) = ast::Comment::cast(token) {
165 if c.kind() == group_kind { 165 if c.kind() == group_kind {
166 visited.insert(c); 166 visited.insert(c.clone());
167 last = c; 167 last = c;
168 continue; 168 continue;
169 } 169 }
@@ -193,7 +193,7 @@ mod tests {
193 fn do_check(text: &str, fold_kinds: &[FoldKind]) { 193 fn do_check(text: &str, fold_kinds: &[FoldKind]) {
194 let (ranges, text) = extract_ranges(text, "fold"); 194 let (ranges, text) = extract_ranges(text, "fold");
195 let parse = SourceFile::parse(&text); 195 let parse = SourceFile::parse(&text);
196 let folds = folding_ranges(parse.tree()); 196 let folds = folding_ranges(&parse.tree());
197 197
198 assert_eq!( 198 assert_eq!(
199 folds.len(), 199 folds.len(),
diff --git a/crates/ra_ide_api/src/goto_definition.rs b/crates/ra_ide_api/src/goto_definition.rs
index 1066bf155..82b5e3b5e 100644
--- a/crates/ra_ide_api/src/goto_definition.rs
+++ b/crates/ra_ide_api/src/goto_definition.rs
@@ -20,13 +20,13 @@ pub(crate) fn goto_definition(
20 position: FilePosition, 20 position: FilePosition,
21) -> Option<RangeInfo<Vec<NavigationTarget>>> { 21) -> Option<RangeInfo<Vec<NavigationTarget>>> {
22 let parse = db.parse(position.file_id); 22 let parse = db.parse(position.file_id);
23 let syntax = parse.tree().syntax(); 23 let syntax = parse.tree().syntax().clone();
24 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) { 24 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&syntax, position.offset) {
25 let navs = reference_definition(db, position.file_id, name_ref).to_vec(); 25 let navs = reference_definition(db, position.file_id, &name_ref).to_vec();
26 return Some(RangeInfo::new(name_ref.syntax().range(), navs.to_vec())); 26 return Some(RangeInfo::new(name_ref.syntax().range(), navs.to_vec()));
27 } 27 }
28 if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) { 28 if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, position.offset) {
29 let navs = name_definition(db, position.file_id, name)?; 29 let navs = name_definition(db, position.file_id, &name)?;
30 return Some(RangeInfo::new(name.syntax().range(), navs)); 30 return Some(RangeInfo::new(name.syntax().range(), navs));
31 } 31 }
32 None 32 None
@@ -94,7 +94,7 @@ pub(crate) fn name_definition(
94) -> Option<Vec<NavigationTarget>> { 94) -> Option<Vec<NavigationTarget>> {
95 let parent = name.syntax().parent()?; 95 let parent = name.syntax().parent()?;
96 96
97 if let Some(module) = ast::Module::cast(&parent) { 97 if let Some(module) = ast::Module::cast(parent.clone()) {
98 if module.has_semi() { 98 if module.has_semi() {
99 if let Some(child_module) = 99 if let Some(child_module) =
100 hir::source_binder::module_from_declaration(db, file_id, module) 100 hir::source_binder::module_from_declaration(db, file_id, module)
@@ -114,38 +114,88 @@ pub(crate) fn name_definition(
114 114
115fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget> { 115fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget> {
116 visitor() 116 visitor()
117 .visit(|node: &ast::StructDef| { 117 .visit(|node: ast::StructDef| {
118 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 118 NavigationTarget::from_named(
119 file_id,
120 &node,
121 node.doc_comment_text(),
122 node.short_label(),
123 )
119 }) 124 })
120 .visit(|node: &ast::EnumDef| { 125 .visit(|node: ast::EnumDef| {
121 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 126 NavigationTarget::from_named(
127 file_id,
128 &node,
129 node.doc_comment_text(),
130 node.short_label(),
131 )
122 }) 132 })
123 .visit(|node: &ast::EnumVariant| { 133 .visit(|node: ast::EnumVariant| {
124 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 134 NavigationTarget::from_named(
135 file_id,
136 &node,
137 node.doc_comment_text(),
138 node.short_label(),
139 )
125 }) 140 })
126 .visit(|node: &ast::FnDef| { 141 .visit(|node: ast::FnDef| {
127 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 142 NavigationTarget::from_named(
143 file_id,
144 &node,
145 node.doc_comment_text(),
146 node.short_label(),
147 )
128 }) 148 })
129 .visit(|node: &ast::TypeAliasDef| { 149 .visit(|node: ast::TypeAliasDef| {
130 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 150 NavigationTarget::from_named(
151 file_id,
152 &node,
153 node.doc_comment_text(),
154 node.short_label(),
155 )
131 }) 156 })
132 .visit(|node: &ast::ConstDef| { 157 .visit(|node: ast::ConstDef| {
133 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 158 NavigationTarget::from_named(
159 file_id,
160 &node,
161 node.doc_comment_text(),
162 node.short_label(),
163 )
134 }) 164 })
135 .visit(|node: &ast::StaticDef| { 165 .visit(|node: ast::StaticDef| {
136 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 166 NavigationTarget::from_named(
167 file_id,
168 &node,
169 node.doc_comment_text(),
170 node.short_label(),
171 )
137 }) 172 })
138 .visit(|node: &ast::TraitDef| { 173 .visit(|node: ast::TraitDef| {
139 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 174 NavigationTarget::from_named(
175 file_id,
176 &node,
177 node.doc_comment_text(),
178 node.short_label(),
179 )
140 }) 180 })
141 .visit(|node: &ast::NamedFieldDef| { 181 .visit(|node: ast::NamedFieldDef| {
142 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 182 NavigationTarget::from_named(
183 file_id,
184 &node,
185 node.doc_comment_text(),
186 node.short_label(),
187 )
143 }) 188 })
144 .visit(|node: &ast::Module| { 189 .visit(|node: ast::Module| {
145 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) 190 NavigationTarget::from_named(
191 file_id,
192 &node,
193 node.doc_comment_text(),
194 node.short_label(),
195 )
146 }) 196 })
147 .visit(|node: &ast::MacroCall| { 197 .visit(|node: ast::MacroCall| {
148 NavigationTarget::from_named(file_id, node, node.doc_comment_text(), None) 198 NavigationTarget::from_named(file_id, &node, node.doc_comment_text(), None)
149 }) 199 })
150 .accept(node) 200 .accept(node)
151} 201}
diff --git a/crates/ra_ide_api/src/goto_type_definition.rs b/crates/ra_ide_api/src/goto_type_definition.rs
index 6ce5e214f..fc4b6e1af 100644
--- a/crates/ra_ide_api/src/goto_type_definition.rs
+++ b/crates/ra_ide_api/src/goto_type_definition.rs
@@ -13,15 +13,17 @@ pub(crate) fn goto_type_definition(
13 token 13 token
14 .parent() 14 .parent()
15 .ancestors() 15 .ancestors()
16 .find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some()) 16 .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())
17 })?; 17 })?;
18 18
19 let analyzer = hir::SourceAnalyzer::new(db, position.file_id, node, None); 19 let analyzer = hir::SourceAnalyzer::new(db, position.file_id, &node, None);
20 20
21 let ty: hir::Ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e)) 21 let ty: hir::Ty = if let Some(ty) =
22 ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
22 { 23 {
23 ty 24 ty
24 } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) { 25 } else if let Some(ty) = ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p))
26 {
25 ty 27 ty
26 } else { 28 } else {
27 return None; 29 return None;
diff --git a/crates/ra_ide_api/src/hover.rs b/crates/ra_ide_api/src/hover.rs
index 253d21f48..e503bf6a9 100644
--- a/crates/ra_ide_api/src/hover.rs
+++ b/crates/ra_ide_api/src/hover.rs
@@ -6,7 +6,7 @@ use ra_syntax::{
6 visit::{visitor, Visitor}, 6 visit::{visitor, Visitor},
7 }, 7 },
8 ast::{self, DocCommentsOwner}, 8 ast::{self, DocCommentsOwner},
9 AstNode, TreeArc, 9 AstNode,
10}; 10};
11 11
12use crate::{ 12use crate::{
@@ -104,7 +104,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
104 104
105 let mut no_fallback = false; 105 let mut no_fallback = false;
106 106
107 match classify_name_ref(db, &analyzer, name_ref) { 107 match classify_name_ref(db, &analyzer, &name_ref) {
108 Some(Method(it)) => res.extend(from_def_source(db, it)), 108 Some(Method(it)) => res.extend(from_def_source(db, it)),
109 Some(Macro(it)) => { 109 Some(Macro(it)) => {
110 let src = it.source(db); 110 let src = it.source(db);
@@ -163,7 +163,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
163 163
164 if res.is_empty() && !no_fallback { 164 if res.is_empty() && !no_fallback {
165 // Fallback index based approach: 165 // Fallback index based approach:
166 let symbols = crate::symbol_index::index_resolve(db, name_ref); 166 let symbols = crate::symbol_index::index_resolve(db, &name_ref);
167 for sym in symbols { 167 for sym in symbols {
168 let docs = docs_from_symbol(db, &sym); 168 let docs = docs_from_symbol(db, &sym);
169 let desc = description_from_symbol(db, &sym); 169 let desc = description_from_symbol(db, &sym);
@@ -177,34 +177,32 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
177 } else if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) { 177 } else if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) {
178 if let Some(parent) = name.syntax().parent() { 178 if let Some(parent) = name.syntax().parent() {
179 let text = visitor() 179 let text = visitor()
180 .visit(|node: &ast::StructDef| { 180 .visit(|node: ast::StructDef| {
181 hover_text(node.doc_comment_text(), node.short_label()) 181 hover_text(node.doc_comment_text(), node.short_label())
182 }) 182 })
183 .visit(|node: &ast::EnumDef| { 183 .visit(|node: ast::EnumDef| hover_text(node.doc_comment_text(), node.short_label()))
184 .visit(|node: ast::EnumVariant| {
184 hover_text(node.doc_comment_text(), node.short_label()) 185 hover_text(node.doc_comment_text(), node.short_label())
185 }) 186 })
186 .visit(|node: &ast::EnumVariant| { 187 .visit(|node: ast::FnDef| hover_text(node.doc_comment_text(), node.short_label()))
188 .visit(|node: ast::TypeAliasDef| {
187 hover_text(node.doc_comment_text(), node.short_label()) 189 hover_text(node.doc_comment_text(), node.short_label())
188 }) 190 })
189 .visit(|node: &ast::FnDef| hover_text(node.doc_comment_text(), node.short_label())) 191 .visit(|node: ast::ConstDef| {
190 .visit(|node: &ast::TypeAliasDef| {
191 hover_text(node.doc_comment_text(), node.short_label()) 192 hover_text(node.doc_comment_text(), node.short_label())
192 }) 193 })
193 .visit(|node: &ast::ConstDef| { 194 .visit(|node: ast::StaticDef| {
194 hover_text(node.doc_comment_text(), node.short_label()) 195 hover_text(node.doc_comment_text(), node.short_label())
195 }) 196 })
196 .visit(|node: &ast::StaticDef| { 197 .visit(|node: ast::TraitDef| {
197 hover_text(node.doc_comment_text(), node.short_label()) 198 hover_text(node.doc_comment_text(), node.short_label())
198 }) 199 })
199 .visit(|node: &ast::TraitDef| { 200 .visit(|node: ast::NamedFieldDef| {
200 hover_text(node.doc_comment_text(), node.short_label()) 201 hover_text(node.doc_comment_text(), node.short_label())
201 }) 202 })
202 .visit(|node: &ast::NamedFieldDef| { 203 .visit(|node: ast::Module| hover_text(node.doc_comment_text(), node.short_label()))
203 hover_text(node.doc_comment_text(), node.short_label()) 204 .visit(|node: ast::MacroCall| hover_text(node.doc_comment_text(), None))
204 }) 205 .accept(&parent);
205 .visit(|node: &ast::Module| hover_text(node.doc_comment_text(), node.short_label()))
206 .visit(|node: &ast::MacroCall| hover_text(node.doc_comment_text(), None))
207 .accept(parent);
208 206
209 if let Some(text) = text { 207 if let Some(text) = text {
210 res.extend(text); 208 res.extend(text);
@@ -217,8 +215,9 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
217 } 215 }
218 216
219 if range.is_none() { 217 if range.is_none() {
220 let node = ancestors_at_offset(file.syntax(), position.offset) 218 let node = ancestors_at_offset(file.syntax(), position.offset).find(|n| {
221 .find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some())?; 219 ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()
220 })?;
222 let frange = FileRange { file_id: position.file_id, range: node.range() }; 221 let frange = FileRange { file_id: position.file_id, range: node.range() };
223 res.extend(type_of(db, frange).map(rust_code_markup)); 222 res.extend(type_of(db, frange).map(rust_code_markup));
224 range = Some(node.range()); 223 range = Some(node.range());
@@ -233,7 +232,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
233 232
234 fn from_def_source<A, D>(db: &RootDatabase, def: D) -> Option<String> 233 fn from_def_source<A, D>(db: &RootDatabase, def: D) -> Option<String>
235 where 234 where
236 D: HasSource<Ast = TreeArc<A>>, 235 D: HasSource<Ast = A>,
237 A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, 236 A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
238 { 237 {
239 let src = def.source(db); 238 let src = def.source(db);
@@ -243,17 +242,17 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
243 242
244pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { 243pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
245 let parse = db.parse(frange.file_id); 244 let parse = db.parse(frange.file_id);
246 let syntax = parse.tree().syntax(); 245 let leaf_node = find_covering_element(parse.tree().syntax(), frange.range);
247 let leaf_node = find_covering_element(syntax, frange.range);
248 // if we picked identifier, expand to pattern/expression 246 // if we picked identifier, expand to pattern/expression
249 let node = leaf_node 247 let node = leaf_node
250 .ancestors() 248 .ancestors()
251 .take_while(|it| it.range() == leaf_node.range()) 249 .take_while(|it| it.range() == leaf_node.range())
252 .find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some())?; 250 .find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?;
253 let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, node, None); 251 let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, &node, None);
254 let ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e)) { 252 let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
253 {
255 ty 254 ty
256 } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) { 255 } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, &p)) {
257 ty 256 ty
258 } else { 257 } else {
259 return None; 258 return None;
diff --git a/crates/ra_ide_api/src/impls.rs b/crates/ra_ide_api/src/impls.rs
index 6d69f36aa..15999d433 100644
--- a/crates/ra_ide_api/src/impls.rs
+++ b/crates/ra_ide_api/src/impls.rs
@@ -9,19 +9,19 @@ pub(crate) fn goto_implementation(
9 position: FilePosition, 9 position: FilePosition,
10) -> Option<RangeInfo<Vec<NavigationTarget>>> { 10) -> Option<RangeInfo<Vec<NavigationTarget>>> {
11 let parse = db.parse(position.file_id); 11 let parse = db.parse(position.file_id);
12 let syntax = parse.tree().syntax(); 12 let syntax = parse.tree().syntax().clone();
13 13
14 let module = source_binder::module_from_position(db, position)?; 14 let module = source_binder::module_from_position(db, position)?;
15 15
16 if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(syntax, position.offset) { 16 if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) {
17 return Some(RangeInfo::new( 17 return Some(RangeInfo::new(
18 nominal_def.syntax().range(), 18 nominal_def.syntax().range(),
19 impls_for_def(db, nominal_def, module)?, 19 impls_for_def(db, &nominal_def, module)?,
20 )); 20 ));
21 } else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(syntax, position.offset) { 21 } else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(&syntax, position.offset) {
22 return Some(RangeInfo::new( 22 return Some(RangeInfo::new(
23 trait_def.syntax().range(), 23 trait_def.syntax().range(),
24 impls_for_trait(db, trait_def, module)?, 24 impls_for_trait(db, &trait_def, module)?,
25 )); 25 ));
26 } 26 }
27 27
diff --git a/crates/ra_ide_api/src/join_lines.rs b/crates/ra_ide_api/src/join_lines.rs
index e20cb1370..50bcfb5b7 100644
--- a/crates/ra_ide_api/src/join_lines.rs
+++ b/crates/ra_ide_api/src/join_lines.rs
@@ -27,7 +27,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
27 SyntaxElement::Token(token) => token.parent(), 27 SyntaxElement::Token(token) => token.parent(),
28 }; 28 };
29 let mut edit = TextEditBuilder::default(); 29 let mut edit = TextEditBuilder::default();
30 for token in node.descendants_with_tokens().filter_map(|it| it.as_token()) { 30 for token in node.descendants_with_tokens().filter_map(|it| it.as_token().cloned()) {
31 let range = match range.intersection(&token.range()) { 31 let range = match range.intersection(&token.range()) {
32 Some(range) => range, 32 Some(range) => range,
33 None => continue, 33 None => continue,
@@ -37,7 +37,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
37 let pos: TextUnit = (pos as u32).into(); 37 let pos: TextUnit = (pos as u32).into();
38 let off = token.range().start() + range.start() + pos; 38 let off = token.range().start() + range.start() + pos;
39 if !edit.invalidates_offset(off) { 39 if !edit.invalidates_offset(off) {
40 remove_newline(&mut edit, token, off); 40 remove_newline(&mut edit, &token, off);
41 } 41 }
42 } 42 }
43 } 43 }
@@ -45,7 +45,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
45 edit.finish() 45 edit.finish()
46} 46}
47 47
48fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUnit) { 48fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextUnit) {
49 if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { 49 if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 {
50 // The node is either the first or the last in the file 50 // The node is either the first or the last in the file
51 let suff = &token.text()[TextRange::from_to( 51 let suff = &token.text()[TextRange::from_to(
@@ -98,9 +98,10 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn
98 TextRange::from_to(prev.range().start(), token.range().end()), 98 TextRange::from_to(prev.range().start(), token.range().end()),
99 space.to_string(), 99 space.to_string(),
100 ); 100 );
101 } else if let (Some(_), Some(next)) = 101 } else if let (Some(_), Some(next)) = (
102 (prev.as_token().and_then(ast::Comment::cast), next.as_token().and_then(ast::Comment::cast)) 102 prev.as_token().cloned().and_then(ast::Comment::cast),
103 { 103 next.as_token().cloned().and_then(ast::Comment::cast),
104 ) {
104 // Removes: newline (incl. surrounding whitespace), start of the next comment 105 // Removes: newline (incl. surrounding whitespace), start of the next comment
105 edit.delete(TextRange::from_to( 106 edit.delete(TextRange::from_to(
106 token.range().start(), 107 token.range().start(),
@@ -113,16 +114,16 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn
113} 114}
114 115
115fn has_comma_after(node: &SyntaxNode) -> bool { 116fn has_comma_after(node: &SyntaxNode) -> bool {
116 match non_trivia_sibling(node.into(), Direction::Next) { 117 match non_trivia_sibling(node.clone().into(), Direction::Next) {
117 Some(n) => n.kind() == T![,], 118 Some(n) => n.kind() == T![,],
118 _ => false, 119 _ => false,
119 } 120 }
120} 121}
121 122
122fn join_single_expr_block(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> { 123fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
123 let block = ast::Block::cast(token.parent())?; 124 let block = ast::Block::cast(token.parent())?;
124 let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; 125 let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?;
125 let expr = extract_trivial_expression(block)?; 126 let expr = extract_trivial_expression(&block)?;
126 127
127 let block_range = block_expr.syntax().range(); 128 let block_range = block_expr.syntax().range();
128 let mut buf = expr.syntax().text().to_string(); 129 let mut buf = expr.syntax().text().to_string();
@@ -139,7 +140,7 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, token: SyntaxToken) -> Opt
139 Some(()) 140 Some(())
140} 141}
141 142
142fn join_single_use_tree(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> { 143fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
143 let use_tree_list = ast::UseTreeList::cast(token.parent())?; 144 let use_tree_list = ast::UseTreeList::cast(token.parent())?;
144 let (tree,) = use_tree_list.use_trees().collect_tuple()?; 145 let (tree,) = use_tree_list.use_trees().collect_tuple()?;
145 edit.replace(use_tree_list.syntax().range(), tree.syntax().text().to_string()); 146 edit.replace(use_tree_list.syntax().range(), tree.syntax().text().to_string());
@@ -504,7 +505,7 @@ fn foo() {
504 fn check_join_lines_sel(before: &str, after: &str) { 505 fn check_join_lines_sel(before: &str, after: &str) {
505 let (sel, before) = extract_range(before); 506 let (sel, before) = extract_range(before);
506 let parse = SourceFile::parse(&before); 507 let parse = SourceFile::parse(&before);
507 let result = join_lines(parse.tree(), sel); 508 let result = join_lines(&parse.tree(), sel);
508 let actual = result.apply(&before); 509 let actual = result.apply(&before);
509 assert_eq_text!(after, &actual); 510 assert_eq_text!(after, &actual);
510 } 511 }
diff --git a/crates/ra_ide_api/src/lib.rs b/crates/ra_ide_api/src/lib.rs
index 9f3b18d9d..c54d574bc 100644
--- a/crates/ra_ide_api/src/lib.rs
+++ b/crates/ra_ide_api/src/lib.rs
@@ -50,7 +50,7 @@ use ra_db::{
50 salsa::{self, ParallelDatabase}, 50 salsa::{self, ParallelDatabase},
51 CheckCanceled, SourceDatabase, 51 CheckCanceled, SourceDatabase,
52}; 52};
53use ra_syntax::{SourceFile, TextRange, TextUnit, TreeArc}; 53use ra_syntax::{SourceFile, TextRange, TextUnit};
54use ra_text_edit::TextEdit; 54use ra_text_edit::TextEdit;
55use relative_path::RelativePathBuf; 55use relative_path::RelativePathBuf;
56 56
@@ -325,8 +325,8 @@ impl Analysis {
325 } 325 }
326 326
327 /// Gets the syntax tree of the file. 327 /// Gets the syntax tree of the file.
328 pub fn parse(&self, file_id: FileId) -> TreeArc<SourceFile> { 328 pub fn parse(&self, file_id: FileId) -> SourceFile {
329 self.db.parse(file_id).tree().to_owned() 329 self.db.parse(file_id).tree()
330 } 330 }
331 331
332 /// Gets the file's `LineIndex`: data structure to convert between absolute 332 /// Gets the file's `LineIndex`: data structure to convert between absolute
@@ -360,7 +360,7 @@ impl Analysis {
360 let parse = self.db.parse(frange.file_id); 360 let parse = self.db.parse(frange.file_id);
361 let file_edit = SourceFileEdit { 361 let file_edit = SourceFileEdit {
362 file_id: frange.file_id, 362 file_id: frange.file_id,
363 edit: join_lines::join_lines(parse.tree(), frange.range), 363 edit: join_lines::join_lines(&parse.tree(), frange.range),
364 }; 364 };
365 SourceChange::source_file_edit("join lines", file_edit) 365 SourceChange::source_file_edit("join lines", file_edit)
366 } 366 }
@@ -393,13 +393,13 @@ impl Analysis {
393 /// file outline. 393 /// file outline.
394 pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { 394 pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> {
395 let parse = self.db.parse(file_id); 395 let parse = self.db.parse(file_id);
396 file_structure(parse.tree()) 396 file_structure(&parse.tree())
397 } 397 }
398 398
399 /// Returns the set of folding ranges. 399 /// Returns the set of folding ranges.
400 pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> { 400 pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> {
401 let parse = self.db.parse(file_id); 401 let parse = self.db.parse(file_id);
402 folding_ranges::folding_ranges(parse.tree()) 402 folding_ranges::folding_ranges(&parse.tree())
403 } 403 }
404 404
405 /// Fuzzy searches for a symbol. 405 /// Fuzzy searches for a symbol.
diff --git a/crates/ra_ide_api/src/matching_brace.rs b/crates/ra_ide_api/src/matching_brace.rs
index 455a5c891..102327fd7 100644
--- a/crates/ra_ide_api/src/matching_brace.rs
+++ b/crates/ra_ide_api/src/matching_brace.rs
@@ -26,7 +26,7 @@ mod tests {
26 fn do_check(before: &str, after: &str) { 26 fn do_check(before: &str, after: &str) {
27 let (pos, before) = extract_offset(before); 27 let (pos, before) = extract_offset(before);
28 let parse = SourceFile::parse(&before); 28 let parse = SourceFile::parse(&before);
29 let new_pos = match matching_brace(parse.tree(), pos) { 29 let new_pos = match matching_brace(&parse.tree(), pos) {
30 None => pos, 30 None => pos,
31 Some(pos) => pos, 31 Some(pos) => pos,
32 }; 32 };
diff --git a/crates/ra_ide_api/src/name_ref_kind.rs b/crates/ra_ide_api/src/name_ref_kind.rs
index 67381c9c8..6832acf5d 100644
--- a/crates/ra_ide_api/src/name_ref_kind.rs
+++ b/crates/ra_ide_api/src/name_ref_kind.rs
@@ -26,7 +26,7 @@ pub(crate) fn classify_name_ref(
26 // Check if it is a method 26 // Check if it is a method
27 if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) { 27 if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) {
28 tested_by!(goto_definition_works_for_methods); 28 tested_by!(goto_definition_works_for_methods);
29 if let Some(func) = analyzer.resolve_method_call(method_call) { 29 if let Some(func) = analyzer.resolve_method_call(&method_call) {
30 return Some(Method(func)); 30 return Some(Method(func));
31 } 31 }
32 } 32 }
@@ -40,7 +40,7 @@ pub(crate) fn classify_name_ref(
40 .and_then(ast::MacroCall::cast) 40 .and_then(ast::MacroCall::cast)
41 { 41 {
42 tested_by!(goto_definition_works_for_macros); 42 tested_by!(goto_definition_works_for_macros);
43 if let Some(mac) = analyzer.resolve_macro_call(db, macro_call) { 43 if let Some(mac) = analyzer.resolve_macro_call(db, &macro_call) {
44 return Some(Macro(mac)); 44 return Some(Macro(mac));
45 } 45 }
46 } 46 }
@@ -48,7 +48,7 @@ pub(crate) fn classify_name_ref(
48 // It could also be a field access 48 // It could also be a field access
49 if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) { 49 if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) {
50 tested_by!(goto_definition_works_for_fields); 50 tested_by!(goto_definition_works_for_fields);
51 if let Some(field) = analyzer.resolve_field(field_expr) { 51 if let Some(field) = analyzer.resolve_field(&field_expr) {
52 return Some(FieldAccess(field)); 52 return Some(FieldAccess(field));
53 }; 53 };
54 } 54 }
@@ -59,7 +59,7 @@ pub(crate) fn classify_name_ref(
59 59
60 let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast); 60 let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast);
61 61
62 if let Some(ty) = struct_lit.and_then(|lit| analyzer.type_of(db, lit.into())) { 62 if let Some(ty) = struct_lit.and_then(|lit| analyzer.type_of(db, &lit.into())) {
63 if let Some((hir::AdtDef::Struct(s), _)) = ty.as_adt() { 63 if let Some((hir::AdtDef::Struct(s), _)) = ty.as_adt() {
64 let hir_path = hir::Path::from_name_ref(name_ref); 64 let hir_path = hir::Path::from_name_ref(name_ref);
65 let hir_name = hir_path.as_ident().unwrap(); 65 let hir_name = hir_path.as_ident().unwrap();
@@ -73,7 +73,7 @@ pub(crate) fn classify_name_ref(
73 73
74 // General case, a path or a local: 74 // General case, a path or a local:
75 if let Some(path) = name_ref.syntax().ancestors().find_map(ast::Path::cast) { 75 if let Some(path) = name_ref.syntax().ancestors().find_map(ast::Path::cast) {
76 if let Some(resolved) = analyzer.resolve_path(db, path) { 76 if let Some(resolved) = analyzer.resolve_path(db, &path) {
77 return match resolved { 77 return match resolved {
78 hir::PathResolution::Def(def) => Some(Def(def)), 78 hir::PathResolution::Def(def) => Some(Def(def)),
79 hir::PathResolution::LocalBinding(Either::A(pat)) => Some(Pat(pat)), 79 hir::PathResolution::LocalBinding(Either::A(pat)) => Some(Pat(pat)),
diff --git a/crates/ra_ide_api/src/references.rs b/crates/ra_ide_api/src/references.rs
index 0af1ae811..1c4cd49dc 100644
--- a/crates/ra_ide_api/src/references.rs
+++ b/crates/ra_ide_api/src/references.rs
@@ -50,11 +50,11 @@ pub(crate) fn find_all_refs(
50 position: FilePosition, 50 position: FilePosition,
51) -> Option<ReferenceSearchResult> { 51) -> Option<ReferenceSearchResult> {
52 let parse = db.parse(position.file_id); 52 let parse = db.parse(position.file_id);
53 let (binding, analyzer) = find_binding(db, parse.tree(), position)?; 53 let (binding, analyzer) = find_binding(db, &parse.tree(), position)?;
54 let declaration = NavigationTarget::from_bind_pat(position.file_id, binding); 54 let declaration = NavigationTarget::from_bind_pat(position.file_id, &binding);
55 55
56 let references = analyzer 56 let references = analyzer
57 .find_all_refs(binding) 57 .find_all_refs(&binding)
58 .into_iter() 58 .into_iter()
59 .map(move |ref_desc| FileRange { file_id: position.file_id, range: ref_desc.range }) 59 .map(move |ref_desc| FileRange { file_id: position.file_id, range: ref_desc.range })
60 .collect::<Vec<_>>(); 60 .collect::<Vec<_>>();
@@ -63,9 +63,9 @@ pub(crate) fn find_all_refs(
63 63
64 fn find_binding<'a>( 64 fn find_binding<'a>(
65 db: &RootDatabase, 65 db: &RootDatabase,
66 source_file: &'a SourceFile, 66 source_file: &SourceFile,
67 position: FilePosition, 67 position: FilePosition,
68 ) -> Option<(&'a ast::BindPat, hir::SourceAnalyzer)> { 68 ) -> Option<(ast::BindPat, hir::SourceAnalyzer)> {
69 let syntax = source_file.syntax(); 69 let syntax = source_file.syntax();
70 if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) { 70 if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) {
71 let analyzer = hir::SourceAnalyzer::new(db, position.file_id, binding.syntax(), None); 71 let analyzer = hir::SourceAnalyzer::new(db, position.file_id, binding.syntax(), None);
@@ -73,7 +73,7 @@ pub(crate) fn find_all_refs(
73 }; 73 };
74 let name_ref = find_node_at_offset::<ast::NameRef>(syntax, position.offset)?; 74 let name_ref = find_node_at_offset::<ast::NameRef>(syntax, position.offset)?;
75 let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); 75 let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None);
76 let resolved = analyzer.resolve_local_name(name_ref)?; 76 let resolved = analyzer.resolve_local_name(&name_ref)?;
77 if let Either::A(ptr) = resolved.ptr() { 77 if let Either::A(ptr) = resolved.ptr() {
78 if let ast::PatKind::BindPat(binding) = ptr.to_node(source_file.syntax()).kind() { 78 if let ast::PatKind::BindPat(binding) = ptr.to_node(source_file.syntax()).kind() {
79 return Some((binding, analyzer)); 79 return Some((binding, analyzer));
@@ -89,10 +89,10 @@ pub(crate) fn rename(
89 new_name: &str, 89 new_name: &str,
90) -> Option<SourceChange> { 90) -> Option<SourceChange> {
91 let parse = db.parse(position.file_id); 91 let parse = db.parse(position.file_id);
92 let syntax = parse.tree().syntax(); 92 if let Some((ast_name, ast_module)) =
93 93 find_name_and_module_at_offset(parse.tree().syntax(), position)
94 if let Some((ast_name, ast_module)) = find_name_and_module_at_offset(syntax, position) { 94 {
95 rename_mod(db, ast_name, ast_module, position, new_name) 95 rename_mod(db, &ast_name, &ast_module, position, new_name)
96 } else { 96 } else {
97 rename_reference(db, position, new_name) 97 rename_reference(db, position, new_name)
98 } 98 }
@@ -101,14 +101,10 @@ pub(crate) fn rename(
101fn find_name_and_module_at_offset( 101fn find_name_and_module_at_offset(
102 syntax: &SyntaxNode, 102 syntax: &SyntaxNode,
103 position: FilePosition, 103 position: FilePosition,
104) -> Option<(&ast::Name, &ast::Module)> { 104) -> Option<(ast::Name, ast::Module)> {
105 let ast_name = find_node_at_offset::<ast::Name>(syntax, position.offset); 105 let ast_name = find_node_at_offset::<ast::Name>(syntax, position.offset)?;
106 let ast_name_parent = ast::Module::cast(ast_name?.syntax().parent()?); 106 let ast_module = ast::Module::cast(ast_name.syntax().parent()?)?;
107 107 Some((ast_name, ast_module))
108 if let (Some(ast_module), Some(name)) = (ast_name_parent, ast_name) {
109 return Some((name, ast_module));
110 }
111 None
112} 108}
113 109
114fn source_edit_from_fileid_range( 110fn source_edit_from_fileid_range(
@@ -135,7 +131,8 @@ fn rename_mod(
135) -> Option<SourceChange> { 131) -> Option<SourceChange> {
136 let mut source_file_edits = Vec::new(); 132 let mut source_file_edits = Vec::new();
137 let mut file_system_edits = Vec::new(); 133 let mut file_system_edits = Vec::new();
138 if let Some(module) = source_binder::module_from_declaration(db, position.file_id, &ast_module) 134 if let Some(module) =
135 source_binder::module_from_declaration(db, position.file_id, ast_module.clone())
139 { 136 {
140 let src = module.definition_source(db); 137 let src = module.definition_source(db);
141 let file_id = src.file_id.as_original_file(); 138 let file_id = src.file_id.as_original_file();
diff --git a/crates/ra_ide_api/src/runnables.rs b/crates/ra_ide_api/src/runnables.rs
index 8cb859b37..200958434 100644
--- a/crates/ra_ide_api/src/runnables.rs
+++ b/crates/ra_ide_api/src/runnables.rs
@@ -26,8 +26,8 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
26 parse.tree().syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect() 26 parse.tree().syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect()
27} 27}
28 28
29fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option<Runnable> { 29fn runnable(db: &RootDatabase, file_id: FileId, item: SyntaxNode) -> Option<Runnable> {
30 if let Some(fn_def) = ast::FnDef::cast(item) { 30 if let Some(fn_def) = ast::FnDef::cast(item.clone()) {
31 runnable_fn(fn_def) 31 runnable_fn(fn_def)
32 } else if let Some(m) = ast::Module::cast(item) { 32 } else if let Some(m) = ast::Module::cast(item) {
33 runnable_mod(db, file_id, m) 33 runnable_mod(db, file_id, m)
@@ -36,8 +36,8 @@ fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option<Run
36 } 36 }
37} 37}
38 38
39fn runnable_fn(fn_def: &ast::FnDef) -> Option<Runnable> { 39fn runnable_fn(fn_def: ast::FnDef) -> Option<Runnable> {
40 let name = fn_def.name()?.text(); 40 let name = fn_def.name()?.text().clone();
41 let kind = if name == "main" { 41 let kind = if name == "main" {
42 RunnableKind::Bin 42 RunnableKind::Bin
43 } else if fn_def.has_atom_attr("test") { 43 } else if fn_def.has_atom_attr("test") {
@@ -50,7 +50,7 @@ fn runnable_fn(fn_def: &ast::FnDef) -> Option<Runnable> {
50 Some(Runnable { range: fn_def.syntax().range(), kind }) 50 Some(Runnable { range: fn_def.syntax().range(), kind })
51} 51}
52 52
53fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Option<Runnable> { 53fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option<Runnable> {
54 let has_test_function = module 54 let has_test_function = module
55 .item_list()? 55 .item_list()?
56 .items() 56 .items()
diff --git a/crates/ra_ide_api/src/status.rs b/crates/ra_ide_api/src/status.rs
index a31e15245..d533d1742 100644
--- a/crates/ra_ide_api/src/status.rs
+++ b/crates/ra_ide_api/src/status.rs
@@ -104,7 +104,7 @@ impl FromIterator<TableEntry<MacroFile, Option<Parse<SyntaxNode>>>> for SyntaxTr
104 let mut res = SyntaxTreeStats::default(); 104 let mut res = SyntaxTreeStats::default();
105 for entry in iter { 105 for entry in iter {
106 res.total += 1; 106 res.total += 1;
107 if let Some(tree) = entry.value.and_then(|it| it).map(|it| it.tree().to_owned()) { 107 if let Some(tree) = entry.value.and_then(|it| it).map(|it| it.syntax_node()) {
108 res.retained += 1; 108 res.retained += 1;
109 res.retained_size += tree.memory_size_of_subtree(); 109 res.retained_size += tree.memory_size_of_subtree();
110 } 110 }
diff --git a/crates/ra_ide_api/src/symbol_index.rs b/crates/ra_ide_api/src/symbol_index.rs
index 9b3a45319..e784b5f69 100644
--- a/crates/ra_ide_api/src/symbol_index.rs
+++ b/crates/ra_ide_api/src/symbol_index.rs
@@ -61,7 +61,7 @@ fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex>
61 db.check_canceled(); 61 db.check_canceled();
62 let parse = db.parse(file_id); 62 let parse = db.parse(file_id);
63 63
64 let symbols = source_file_to_file_symbols(parse.tree(), file_id); 64 let symbols = source_file_to_file_symbols(&parse.tree(), file_id);
65 65
66 // FIXME: add macros here 66 // FIXME: add macros here
67 67
@@ -173,7 +173,7 @@ impl SymbolIndex {
173 files: impl ParallelIterator<Item = (FileId, Parse<ast::SourceFile>)>, 173 files: impl ParallelIterator<Item = (FileId, Parse<ast::SourceFile>)>,
174 ) -> SymbolIndex { 174 ) -> SymbolIndex {
175 let symbols = files 175 let symbols = files
176 .flat_map(|(file_id, file)| source_file_to_file_symbols(file.tree(), file_id)) 176 .flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id))
177 .collect::<Vec<_>>(); 177 .collect::<Vec<_>>();
178 SymbolIndex::new(symbols) 178 SymbolIndex::new(symbols)
179 } 179 }
@@ -249,7 +249,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec
249 for event in source_file.syntax().preorder() { 249 for event in source_file.syntax().preorder() {
250 match event { 250 match event {
251 WalkEvent::Enter(node) => { 251 WalkEvent::Enter(node) => {
252 if let Some(mut symbol) = to_file_symbol(node, file_id) { 252 if let Some(mut symbol) = to_file_symbol(&node, file_id) {
253 symbol.container_name = stack.last().cloned(); 253 symbol.container_name = stack.last().cloned();
254 254
255 stack.push(symbol.name.clone()); 255 stack.push(symbol.name.clone());
@@ -258,7 +258,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec
258 } 258 }
259 259
260 WalkEvent::Leave(node) => { 260 WalkEvent::Leave(node) => {
261 if to_symbol(node).is_some() { 261 if to_symbol(&node).is_some() {
262 stack.pop(); 262 stack.pop();
263 } 263 }
264 } 264 }
@@ -269,7 +269,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec
269} 269}
270 270
271fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { 271fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
272 fn decl<N: NameOwner>(node: &N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { 272 fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
273 let name = node.name()?; 273 let name = node.name()?;
274 let name_range = name.syntax().range(); 274 let name_range = name.syntax().range();
275 let name = name.text().clone(); 275 let name = name.text().clone();
diff --git a/crates/ra_ide_api/src/syntax_highlighting.rs b/crates/ra_ide_api/src/syntax_highlighting.rs
index eb392d5da..16a728789 100644
--- a/crates/ra_ide_api/src/syntax_highlighting.rs
+++ b/crates/ra_ide_api/src/syntax_highlighting.rs
@@ -31,8 +31,8 @@ fn is_control_keyword(kind: SyntaxKind) -> bool {
31 } 31 }
32} 32}
33 33
34fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: &ast::Pat) -> bool { 34fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: ast::Pat) -> bool {
35 let ty = analyzer.type_of_pat(db, pat).unwrap_or(Ty::Unknown); 35 let ty = analyzer.type_of_pat(db, &pat).unwrap_or(Ty::Unknown);
36 let is_ty_mut = { 36 let is_ty_mut = {
37 if let Some((_, mutability)) = ty.as_reference() { 37 if let Some((_, mutability)) = ty.as_reference() {
38 match mutability { 38 match mutability {
@@ -55,7 +55,7 @@ fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: &
55pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { 55pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> {
56 let _p = profile("highlight"); 56 let _p = profile("highlight");
57 let parse = db.parse(file_id); 57 let parse = db.parse(file_id);
58 let root = parse.tree().syntax(); 58 let root = parse.tree().syntax().clone();
59 59
60 fn calc_binding_hash(file_id: FileId, text: &SmolStr, shadow_count: u32) -> u64 { 60 fn calc_binding_hash(file_id: FileId, text: &SmolStr, shadow_count: u32) -> u64 {
61 fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 { 61 fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 {
@@ -70,6 +70,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
70 } 70 }
71 71
72 // Visited nodes to handle highlighting priorities 72 // Visited nodes to handle highlighting priorities
73 // FIXME: retain only ranges here
73 let mut highlighted: FxHashSet<SyntaxElement> = FxHashSet::default(); 74 let mut highlighted: FxHashSet<SyntaxElement> = FxHashSet::default();
74 let mut bindings_shadow_count: FxHashMap<SmolStr, u32> = FxHashMap::default(); 75 let mut bindings_shadow_count: FxHashMap<SmolStr, u32> = FxHashMap::default();
75 76
@@ -84,14 +85,14 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
84 STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => "string", 85 STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => "string",
85 ATTR => "attribute", 86 ATTR => "attribute",
86 NAME_REF => { 87 NAME_REF => {
87 if let Some(name_ref) = node.as_node().and_then(ast::NameRef::cast) { 88 if let Some(name_ref) = node.as_node().cloned().and_then(ast::NameRef::cast) {
88 // FIXME: revisit this after #1340 89 // FIXME: revisit this after #1340
89 use crate::name_ref_kind::{classify_name_ref, NameRefKind::*}; 90 use crate::name_ref_kind::{classify_name_ref, NameRefKind::*};
90 use hir::{ImplItem, ModuleDef}; 91 use hir::{ImplItem, ModuleDef};
91 92
92 // FIXME: try to reuse the SourceAnalyzers 93 // FIXME: try to reuse the SourceAnalyzers
93 let analyzer = hir::SourceAnalyzer::new(db, file_id, name_ref.syntax(), None); 94 let analyzer = hir::SourceAnalyzer::new(db, file_id, name_ref.syntax(), None);
94 match classify_name_ref(db, &analyzer, name_ref) { 95 match classify_name_ref(db, &analyzer, &name_ref) {
95 Some(Method(_)) => "function", 96 Some(Method(_)) => "function",
96 Some(Macro(_)) => "macro", 97 Some(Macro(_)) => "macro",
97 Some(FieldAccess(_)) => "field", 98 Some(FieldAccess(_)) => "field",
@@ -113,13 +114,13 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
113 Some(Pat(ptr)) => { 114 Some(Pat(ptr)) => {
114 binding_hash = Some({ 115 binding_hash = Some({
115 let text = 116 let text =
116 ptr.syntax_node_ptr().to_node(root).text().to_smol_string(); 117 ptr.syntax_node_ptr().to_node(&root).text().to_smol_string();
117 let shadow_count = 118 let shadow_count =
118 bindings_shadow_count.entry(text.clone()).or_default(); 119 bindings_shadow_count.entry(text.clone()).or_default();
119 calc_binding_hash(file_id, &text, *shadow_count) 120 calc_binding_hash(file_id, &text, *shadow_count)
120 }); 121 });
121 122
122 if is_variable_mutable(db, &analyzer, ptr.to_node(root)) { 123 if is_variable_mutable(db, &analyzer, ptr.to_node(&root)) {
123 "variable.mut" 124 "variable.mut"
124 } else { 125 } else {
125 "variable" 126 "variable"
@@ -134,7 +135,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
134 } 135 }
135 } 136 }
136 NAME => { 137 NAME => {
137 if let Some(name) = node.as_node().and_then(ast::Name::cast) { 138 if let Some(name) = node.as_node().cloned().and_then(ast::Name::cast) {
138 let analyzer = hir::SourceAnalyzer::new(db, file_id, name.syntax(), None); 139 let analyzer = hir::SourceAnalyzer::new(db, file_id, name.syntax(), None);
139 if let Some(pat) = name.syntax().ancestors().find_map(ast::Pat::cast) { 140 if let Some(pat) = name.syntax().ancestors().find_map(ast::Pat::cast) {
140 binding_hash = Some({ 141 binding_hash = Some({
@@ -176,12 +177,11 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
176 k if is_control_keyword(k) => "keyword.control", 177 k if is_control_keyword(k) => "keyword.control",
177 k if k.is_keyword() => "keyword", 178 k if k.is_keyword() => "keyword",
178 _ => { 179 _ => {
179 // let analyzer = hir::SourceAnalyzer::new(db, file_id, name_ref.syntax(), None); 180 if let Some(macro_call) = node.as_node().cloned().and_then(ast::MacroCall::cast) {
180 if let Some(macro_call) = node.as_node().and_then(ast::MacroCall::cast) {
181 if let Some(path) = macro_call.path() { 181 if let Some(path) = macro_call.path() {
182 if let Some(segment) = path.segment() { 182 if let Some(segment) = path.segment() {
183 if let Some(name_ref) = segment.name_ref() { 183 if let Some(name_ref) = segment.name_ref() {
184 highlighted.insert(name_ref.syntax().into()); 184 highlighted.insert(name_ref.syntax().clone().into());
185 let range_start = name_ref.syntax().range().start(); 185 let range_start = name_ref.syntax().range().start();
186 let mut range_end = name_ref.syntax().range().end(); 186 let mut range_end = name_ref.syntax().range().end();
187 for sibling in path.syntax().siblings_with_tokens(Direction::Next) { 187 for sibling in path.syntax().siblings_with_tokens(Direction::Next) {
@@ -230,7 +230,8 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
230 let mut buf = String::new(); 230 let mut buf = String::new();
231 buf.push_str(&STYLE); 231 buf.push_str(&STYLE);
232 buf.push_str("<pre><code>"); 232 buf.push_str("<pre><code>");
233 let tokens = parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.as_token()); 233 let tokens =
234 parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.as_token().cloned());
234 for token in tokens { 235 for token in tokens {
235 could_intersect.retain(|it| token.range().start() <= it.range.end()); 236 could_intersect.retain(|it| token.range().start() <= it.range.end());
236 while let Some(r) = ranges.get(frontier) { 237 while let Some(r) = ranges.get(frontier) {
diff --git a/crates/ra_ide_api/src/syntax_tree.rs b/crates/ra_ide_api/src/syntax_tree.rs
index b3e08c041..3d7373d02 100644
--- a/crates/ra_ide_api/src/syntax_tree.rs
+++ b/crates/ra_ide_api/src/syntax_tree.rs
@@ -18,7 +18,7 @@ pub(crate) fn syntax_tree(
18 let node = match algo::find_covering_element(parse.tree().syntax(), text_range) { 18 let node = match algo::find_covering_element(parse.tree().syntax(), text_range) {
19 SyntaxElement::Node(node) => node, 19 SyntaxElement::Node(node) => node,
20 SyntaxElement::Token(token) => { 20 SyntaxElement::Token(token) => {
21 if let Some(tree) = syntax_tree_for_string(token, text_range) { 21 if let Some(tree) = syntax_tree_for_string(&token, text_range) {
22 return tree; 22 return tree;
23 } 23 }
24 token.parent() 24 token.parent()
@@ -33,7 +33,7 @@ pub(crate) fn syntax_tree(
33 33
34/// Attempts parsing the selected contents of a string literal 34/// Attempts parsing the selected contents of a string literal
35/// as rust syntax and returns its syntax tree 35/// as rust syntax and returns its syntax tree
36fn syntax_tree_for_string(token: SyntaxToken, text_range: TextRange) -> Option<String> { 36fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<String> {
37 // When the range is inside a string 37 // When the range is inside a string
38 // we'll attempt parsing it as rust syntax 38 // we'll attempt parsing it as rust syntax
39 // to provide the syntax tree of the contents of the string 39 // to provide the syntax tree of the contents of the string
@@ -43,7 +43,7 @@ fn syntax_tree_for_string(token: SyntaxToken, text_range: TextRange) -> Option<S
43 } 43 }
44} 44}
45 45
46fn syntax_tree_for_token(node: SyntaxToken, text_range: TextRange) -> Option<String> { 46fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> {
47 // Range of the full node 47 // Range of the full node
48 let node_range = node.range(); 48 let node_range = node.range();
49 let text = node.text().to_string(); 49 let text = node.text().to_string();
diff --git a/crates/ra_ide_api/src/typing.rs b/crates/ra_ide_api/src/typing.rs
index 01eb32b2f..ad0ababcc 100644
--- a/crates/ra_ide_api/src/typing.rs
+++ b/crates/ra_ide_api/src/typing.rs
@@ -1,15 +1,16 @@
1use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
2use ra_db::{FilePosition, SourceDatabase}; 1use ra_db::{FilePosition, SourceDatabase};
3use ra_fmt::leading_indent; 2use ra_fmt::leading_indent;
4use ra_syntax::{ 3use ra_syntax::{
5 algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset}, 4 algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset},
6 ast::{self, AstToken}, 5 ast::{self, AstToken},
7 AstNode, SourceFile, 6 AstNode, SmolStr, SourceFile,
8 SyntaxKind::*, 7 SyntaxKind::*,
9 SyntaxToken, TextRange, TextUnit, 8 SyntaxToken, TextRange, TextUnit,
10}; 9};
11use ra_text_edit::{TextEdit, TextEditBuilder}; 10use ra_text_edit::{TextEdit, TextEditBuilder};
12 11
12use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
13
13pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { 14pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
14 let parse = db.parse(position.file_id); 15 let parse = db.parse(position.file_id);
15 let file = parse.tree(); 16 let file = parse.tree();
@@ -43,15 +44,15 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
43 ) 44 )
44} 45}
45 46
46fn node_indent<'a>(file: &'a SourceFile, token: SyntaxToken) -> Option<&'a str> { 47fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
47 let ws = match find_token_at_offset(file.syntax(), token.range().start()) { 48 let ws = match find_token_at_offset(file.syntax(), token.range().start()) {
48 TokenAtOffset::Between(l, r) => { 49 TokenAtOffset::Between(l, r) => {
49 assert!(r == token); 50 assert!(r == *token);
50 l 51 l
51 } 52 }
52 TokenAtOffset::Single(n) => { 53 TokenAtOffset::Single(n) => {
53 assert!(n == token); 54 assert!(n == *token);
54 return Some(""); 55 return Some("".into());
55 } 56 }
56 TokenAtOffset::None => unreachable!(), 57 TokenAtOffset::None => unreachable!(),
57 }; 58 };
@@ -60,12 +61,12 @@ fn node_indent<'a>(file: &'a SourceFile, token: SyntaxToken) -> Option<&'a str>
60 } 61 }
61 let text = ws.text(); 62 let text = ws.text();
62 let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0); 63 let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0);
63 Some(&text[pos..]) 64 Some(text[pos..].into())
64} 65}
65 66
66pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option<TextEdit> { 67pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option<TextEdit> {
67 assert_eq!(file.syntax().text().char_at(eq_offset), Some('=')); 68 assert_eq!(file.syntax().text().char_at(eq_offset), Some('='));
68 let let_stmt: &ast::LetStmt = find_node_at_offset(file.syntax(), eq_offset)?; 69 let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), eq_offset)?;
69 if let_stmt.has_semi() { 70 if let_stmt.has_semi() {
70 return None; 71 return None;
71 } 72 }
@@ -141,7 +142,7 @@ mod tests {
141 edit.insert(offset, "=".to_string()); 142 edit.insert(offset, "=".to_string());
142 let before = edit.finish().apply(&before); 143 let before = edit.finish().apply(&before);
143 let parse = SourceFile::parse(&before); 144 let parse = SourceFile::parse(&before);
144 if let Some(result) = on_eq_typed(parse.tree(), offset) { 145 if let Some(result) = on_eq_typed(&parse.tree(), offset) {
145 let actual = result.apply(&before); 146 let actual = result.apply(&before);
146 assert_eq_text!(after, &actual); 147 assert_eq_text!(after, &actual);
147 } else { 148 } else {