aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_ide_api
diff options
context:
space:
mode:
authorbors[bot] <bors[bot]@users.noreply.github.com>2019-04-01 10:30:25 +0100
committerbors[bot] <bors[bot]@users.noreply.github.com>2019-04-01 10:30:25 +0100
commit42a883f06c28ddeab22e5703a578f19110dde7f3 (patch)
treefe57697b54ccfb791fe96c13cb553a8570516270 /crates/ra_ide_api
parentdec9bde10868b5e459535449476d17a6a0987b3e (diff)
parent9e213385c9d06db3c8ca20812779e2b8f8ad2c71 (diff)
Merge #1078
1078: rewrite syntax trees r=matklad a=matklad Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates/ra_ide_api')
-rw-r--r--crates/ra_ide_api/src/completion.rs23
-rw-r--r--crates/ra_ide_api/src/completion/complete_fn_param.rs2
-rw-r--r--crates/ra_ide_api/src/completion/complete_keyword.rs8
-rw-r--r--crates/ra_ide_api/src/completion/completion_context.rs22
-rw-r--r--crates/ra_ide_api/src/diagnostics.rs6
-rw-r--r--crates/ra_ide_api/src/extend_selection.rs144
-rw-r--r--crates/ra_ide_api/src/folding_ranges.rs134
-rw-r--r--crates/ra_ide_api/src/hover.rs14
-rw-r--r--crates/ra_ide_api/src/join_lines.rs79
-rw-r--r--crates/ra_ide_api/src/matching_brace.rs9
-rw-r--r--crates/ra_ide_api/src/syntax_highlighting.rs12
-rw-r--r--crates/ra_ide_api/src/syntax_tree.rs33
-rw-r--r--crates/ra_ide_api/src/typing.rs32
13 files changed, 274 insertions, 244 deletions
diff --git a/crates/ra_ide_api/src/completion.rs b/crates/ra_ide_api/src/completion.rs
index 639942f7b..a846a7a3c 100644
--- a/crates/ra_ide_api/src/completion.rs
+++ b/crates/ra_ide_api/src/completion.rs
@@ -13,7 +13,7 @@ mod complete_scope;
13mod complete_postfix; 13mod complete_postfix;
14 14
15use ra_db::SourceDatabase; 15use ra_db::SourceDatabase;
16use ra_syntax::ast::{self, AstNode}; 16use ra_syntax::{ast::{self, AstNode}, SyntaxKind::{ATTR, COMMENT}};
17 17
18use crate::{ 18use crate::{
19 db, 19 db,
@@ -76,11 +76,10 @@ pub fn function_label(node: &ast::FnDef) -> Option<String> {
76 let body_range = body.syntax().range(); 76 let body_range = body.syntax().range();
77 let label: String = node 77 let label: String = node
78 .syntax() 78 .syntax()
79 .children() 79 .children_with_tokens()
80 .filter(|child| !child.range().is_subrange(&body_range)) // Filter out body 80 .filter(|child| !child.range().is_subrange(&body_range)) // Filter out body
81 .filter(|child| ast::Comment::cast(child).is_none()) // Filter out comments 81 .filter(|child| !(child.kind() == COMMENT || child.kind() == ATTR)) // Filter out comments and attrs
82 .filter(|child| ast::Attr::cast(child).is_none()) // Filter out attributes 82 .map(|node| node.to_string())
83 .map(|node| node.text().to_string())
84 .collect(); 83 .collect();
85 label 84 label
86 } else { 85 } else {
@@ -93,10 +92,9 @@ pub fn function_label(node: &ast::FnDef) -> Option<String> {
93pub fn const_label(node: &ast::ConstDef) -> String { 92pub fn const_label(node: &ast::ConstDef) -> String {
94 let label: String = node 93 let label: String = node
95 .syntax() 94 .syntax()
96 .children() 95 .children_with_tokens()
97 .filter(|child| ast::Comment::cast(child).is_none()) 96 .filter(|child| !(child.kind() == COMMENT || child.kind() == ATTR))
98 .filter(|child| ast::Attr::cast(child).is_none()) 97 .map(|node| node.to_string())
99 .map(|node| node.text().to_string())
100 .collect(); 98 .collect();
101 99
102 label.trim().to_owned() 100 label.trim().to_owned()
@@ -105,10 +103,9 @@ pub fn const_label(node: &ast::ConstDef) -> String {
105pub fn type_label(node: &ast::TypeAliasDef) -> String { 103pub fn type_label(node: &ast::TypeAliasDef) -> String {
106 let label: String = node 104 let label: String = node
107 .syntax() 105 .syntax()
108 .children() 106 .children_with_tokens()
109 .filter(|child| ast::Comment::cast(child).is_none()) 107 .filter(|child| !(child.kind() == COMMENT || child.kind() == ATTR))
110 .filter(|child| ast::Attr::cast(child).is_none()) 108 .map(|node| node.to_string())
111 .map(|node| node.text().to_string())
112 .collect(); 109 .collect();
113 110
114 label.trim().to_owned() 111 label.trim().to_owned()
diff --git a/crates/ra_ide_api/src/completion/complete_fn_param.rs b/crates/ra_ide_api/src/completion/complete_fn_param.rs
index ffdc744b2..f87ccdeb9 100644
--- a/crates/ra_ide_api/src/completion/complete_fn_param.rs
+++ b/crates/ra_ide_api/src/completion/complete_fn_param.rs
@@ -17,7 +17,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
17 } 17 }
18 18
19 let mut params = FxHashMap::default(); 19 let mut params = FxHashMap::default();
20 for node in ctx.leaf.ancestors() { 20 for node in ctx.token.parent().ancestors() {
21 let _ = visitor_ctx(&mut params) 21 let _ = visitor_ctx(&mut params)
22 .visit::<ast::SourceFile, _>(process) 22 .visit::<ast::SourceFile, _>(process)
23 .visit::<ast::ItemList, _>(process) 23 .visit::<ast::ItemList, _>(process)
diff --git a/crates/ra_ide_api/src/completion/complete_keyword.rs b/crates/ra_ide_api/src/completion/complete_keyword.rs
index 841c0c554..718b83418 100644
--- a/crates/ra_ide_api/src/completion/complete_keyword.rs
+++ b/crates/ra_ide_api/src/completion/complete_keyword.rs
@@ -2,7 +2,7 @@ use ra_syntax::{
2 algo::visit::{visitor, Visitor}, 2 algo::visit::{visitor, Visitor},
3 AstNode, 3 AstNode,
4 ast::{self, LoopBodyOwner}, 4 ast::{self, LoopBodyOwner},
5 SyntaxKind::*, SyntaxNode, 5 SyntaxKind::*, SyntaxToken,
6}; 6};
7 7
8use crate::completion::{CompletionContext, CompletionItem, Completions, CompletionKind, CompletionItemKind}; 8use crate::completion::{CompletionContext, CompletionItem, Completions, CompletionKind, CompletionItemKind};
@@ -62,7 +62,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
62 acc.add(keyword(ctx, "else", "else {$0}")); 62 acc.add(keyword(ctx, "else", "else {$0}"));
63 acc.add(keyword(ctx, "else if", "else if $0 {}")); 63 acc.add(keyword(ctx, "else if", "else if $0 {}"));
64 } 64 }
65 if is_in_loop_body(ctx.leaf) { 65 if is_in_loop_body(ctx.token) {
66 if ctx.can_be_stmt { 66 if ctx.can_be_stmt {
67 acc.add(keyword(ctx, "continue", "continue;")); 67 acc.add(keyword(ctx, "continue", "continue;"));
68 acc.add(keyword(ctx, "break", "break;")); 68 acc.add(keyword(ctx, "break", "break;"));
@@ -74,8 +74,8 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
74 acc.add_all(complete_return(ctx, fn_def, ctx.can_be_stmt)); 74 acc.add_all(complete_return(ctx, fn_def, ctx.can_be_stmt));
75} 75}
76 76
77fn is_in_loop_body(leaf: &SyntaxNode) -> bool { 77fn is_in_loop_body(leaf: SyntaxToken) -> bool {
78 for node in leaf.ancestors() { 78 for node in leaf.parent().ancestors() {
79 if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { 79 if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
80 break; 80 break;
81 } 81 }
diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs
index 724d0dfbf..65dffa470 100644
--- a/crates/ra_ide_api/src/completion/completion_context.rs
+++ b/crates/ra_ide_api/src/completion/completion_context.rs
@@ -1,8 +1,8 @@
1use ra_text_edit::AtomTextEdit; 1use ra_text_edit::AtomTextEdit;
2use ra_syntax::{ 2use ra_syntax::{
3 AstNode, SyntaxNode, SourceFile, TextUnit, TextRange, 3 AstNode, SyntaxNode, SourceFile, TextUnit, TextRange, SyntaxToken,
4 ast, 4 ast,
5 algo::{find_leaf_at_offset, find_covering_node, find_node_at_offset}, 5 algo::{find_token_at_offset, find_covering_element, find_node_at_offset},
6 SyntaxKind::*, 6 SyntaxKind::*,
7}; 7};
8use hir::{source_binder, Resolver}; 8use hir::{source_binder, Resolver};
@@ -15,7 +15,7 @@ use crate::{db, FilePosition};
15pub(crate) struct CompletionContext<'a> { 15pub(crate) struct CompletionContext<'a> {
16 pub(super) db: &'a db::RootDatabase, 16 pub(super) db: &'a db::RootDatabase,
17 pub(super) offset: TextUnit, 17 pub(super) offset: TextUnit,
18 pub(super) leaf: &'a SyntaxNode, 18 pub(super) token: SyntaxToken<'a>,
19 pub(super) resolver: Resolver, 19 pub(super) resolver: Resolver,
20 pub(super) module: Option<hir::Module>, 20 pub(super) module: Option<hir::Module>,
21 pub(super) function: Option<hir::Function>, 21 pub(super) function: Option<hir::Function>,
@@ -49,10 +49,10 @@ impl<'a> CompletionContext<'a> {
49 ) -> Option<CompletionContext<'a>> { 49 ) -> Option<CompletionContext<'a>> {
50 let resolver = source_binder::resolver_for_position(db, position); 50 let resolver = source_binder::resolver_for_position(db, position);
51 let module = source_binder::module_from_position(db, position); 51 let module = source_binder::module_from_position(db, position);
52 let leaf = find_leaf_at_offset(original_file.syntax(), position.offset).left_biased()?; 52 let token = find_token_at_offset(original_file.syntax(), position.offset).left_biased()?;
53 let mut ctx = CompletionContext { 53 let mut ctx = CompletionContext {
54 db, 54 db,
55 leaf, 55 token,
56 offset: position.offset, 56 offset: position.offset,
57 resolver, 57 resolver,
58 module, 58 module,
@@ -76,9 +76,9 @@ impl<'a> CompletionContext<'a> {
76 76
77 // The range of the identifier that is being completed. 77 // The range of the identifier that is being completed.
78 pub(crate) fn source_range(&self) -> TextRange { 78 pub(crate) fn source_range(&self) -> TextRange {
79 match self.leaf.kind() { 79 match self.token.kind() {
80 // workaroud when completion is triggered by trigger characters. 80 // workaroud when completion is triggered by trigger characters.
81 IDENT => self.leaf.range(), 81 IDENT => self.token.range(),
82 _ => TextRange::offset_len(self.offset, 0.into()), 82 _ => TextRange::offset_len(self.offset, 0.into()),
83 } 83 }
84 } 84 }
@@ -139,10 +139,11 @@ impl<'a> CompletionContext<'a> {
139 _ => (), 139 _ => (),
140 } 140 }
141 141
142 self.use_item_syntax = self.leaf.ancestors().find_map(ast::UseItem::cast); 142 self.use_item_syntax = self.token.parent().ancestors().find_map(ast::UseItem::cast);
143 143
144 self.function_syntax = self 144 self.function_syntax = self
145 .leaf 145 .token
146 .parent()
146 .ancestors() 147 .ancestors()
147 .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) 148 .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
148 .find_map(ast::FnDef::cast); 149 .find_map(ast::FnDef::cast);
@@ -224,8 +225,7 @@ impl<'a> CompletionContext<'a> {
224} 225}
225 226
226fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<&N> { 227fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<&N> {
227 let node = find_covering_node(syntax, range); 228 find_covering_element(syntax, range).ancestors().find_map(N::cast)
228 node.ancestors().find_map(N::cast)
229} 229}
230 230
231fn is_node<N: AstNode>(node: &SyntaxNode) -> bool { 231fn is_node<N: AstNode>(node: &SyntaxNode) -> bool {
diff --git a/crates/ra_ide_api/src/diagnostics.rs b/crates/ra_ide_api/src/diagnostics.rs
index 5a78e94d8..2dfaa0045 100644
--- a/crates/ra_ide_api/src/diagnostics.rs
+++ b/crates/ra_ide_api/src/diagnostics.rs
@@ -106,8 +106,10 @@ fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(
106 single_use_tree: &ast::UseTree, 106 single_use_tree: &ast::UseTree,
107) -> Option<TextEdit> { 107) -> Option<TextEdit> {
108 let use_tree_list_node = single_use_tree.syntax().parent()?; 108 let use_tree_list_node = single_use_tree.syntax().parent()?;
109 if single_use_tree.path()?.segment()?.syntax().first_child()?.kind() == SyntaxKind::SELF_KW { 109 if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind()
110 let start = use_tree_list_node.prev_sibling()?.range().start(); 110 == SyntaxKind::SELF_KW
111 {
112 let start = use_tree_list_node.prev_sibling_or_token()?.range().start();
111 let end = use_tree_list_node.range().end(); 113 let end = use_tree_list_node.range().end();
112 let range = TextRange::from_to(start, end); 114 let range = TextRange::from_to(start, end);
113 let mut edit_builder = TextEditBuilder::default(); 115 let mut edit_builder = TextEditBuilder::default();
diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs
index 63879a0b5..e743bf0fe 100644
--- a/crates/ra_ide_api/src/extend_selection.rs
+++ b/crates/ra_ide_api/src/extend_selection.rs
@@ -1,8 +1,9 @@
1use ra_db::SourceDatabase; 1use ra_db::SourceDatabase;
2use ra_syntax::{ 2use ra_syntax::{
3 Direction, SyntaxNode, TextRange, TextUnit, AstNode, 3 Direction, SyntaxNode, TextRange, TextUnit, AstNode, SyntaxElement,
4 algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset}, 4 algo::{find_covering_element, find_token_at_offset, TokenAtOffset},
5 SyntaxKind::*, 5 SyntaxKind::*, SyntaxToken,
6 ast::Comment,
6}; 7};
7 8
8use crate::{FileRange, db::RootDatabase}; 9use crate::{FileRange, db::RootDatabase};
@@ -32,53 +33,58 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
32 33
33 if range.is_empty() { 34 if range.is_empty() {
34 let offset = range.start(); 35 let offset = range.start();
35 let mut leaves = find_leaf_at_offset(root, offset); 36 let mut leaves = find_token_at_offset(root, offset);
36 if leaves.clone().all(|it| it.kind() == WHITESPACE) { 37 if leaves.clone().all(|it| it.kind() == WHITESPACE) {
37 return Some(extend_ws(root, leaves.next()?, offset)); 38 return Some(extend_ws(root, leaves.next()?, offset));
38 } 39 }
39 let leaf_range = match leaves { 40 let leaf_range = match leaves {
40 LeafAtOffset::None => return None, 41 TokenAtOffset::None => return None,
41 LeafAtOffset::Single(l) => { 42 TokenAtOffset::Single(l) => {
42 if string_kinds.contains(&l.kind()) { 43 if string_kinds.contains(&l.kind()) {
43 extend_single_word_in_comment_or_string(l, offset).unwrap_or_else(|| l.range()) 44 extend_single_word_in_comment_or_string(l, offset).unwrap_or_else(|| l.range())
44 } else { 45 } else {
45 l.range() 46 l.range()
46 } 47 }
47 } 48 }
48 LeafAtOffset::Between(l, r) => pick_best(l, r).range(), 49 TokenAtOffset::Between(l, r) => pick_best(l, r).range(),
49 }; 50 };
50 return Some(leaf_range); 51 return Some(leaf_range);
51 }; 52 };
52 let node = find_covering_node(root, range); 53 let node = match find_covering_element(root, range) {
54 SyntaxElement::Token(token) => {
55 if token.range() != range {
56 return Some(token.range());
57 }
58 if let Some(comment) = Comment::cast(token) {
59 if let Some(range) = extend_comments(comment) {
60 return Some(range);
61 }
62 }
63 token.parent()
64 }
65 SyntaxElement::Node(node) => node,
66 };
67 if node.range() != range {
68 return Some(node.range());
69 }
53 70
54 // Using shallowest node with same range allows us to traverse siblings. 71 // Using shallowest node with same range allows us to traverse siblings.
55 let node = node.ancestors().take_while(|n| n.range() == node.range()).last().unwrap(); 72 let node = node.ancestors().take_while(|n| n.range() == node.range()).last().unwrap();
56 73
57 if range == node.range() { 74 if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
58 if string_kinds.contains(&node.kind()) { 75 if let Some(range) = extend_list_item(node) {
59 if let Some(range) = extend_comments(node) { 76 return Some(range);
60 return Some(range);
61 }
62 }
63
64 if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
65 if let Some(range) = extend_list_item(node) {
66 return Some(range);
67 }
68 } 77 }
69 } 78 }
70 79
71 match node.ancestors().skip_while(|n| n.range() == range).next() { 80 node.parent().map(|it| it.range())
72 None => None,
73 Some(parent) => Some(parent.range()),
74 }
75} 81}
76 82
77fn extend_single_word_in_comment_or_string( 83fn extend_single_word_in_comment_or_string(
78 leaf: &SyntaxNode, 84 leaf: SyntaxToken,
79 offset: TextUnit, 85 offset: TextUnit,
80) -> Option<TextRange> { 86) -> Option<TextRange> {
81 let text: &str = leaf.leaf_text()?; 87 let text: &str = leaf.text();
82 let cursor_position: u32 = (offset - leaf.range().start()).into(); 88 let cursor_position: u32 = (offset - leaf.range().start()).into();
83 89
84 let (before, after) = text.split_at(cursor_position as usize); 90 let (before, after) = text.split_at(cursor_position as usize);
@@ -101,14 +107,14 @@ fn extend_single_word_in_comment_or_string(
101 } 107 }
102} 108}
103 109
104fn extend_ws(root: &SyntaxNode, ws: &SyntaxNode, offset: TextUnit) -> TextRange { 110fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextUnit) -> TextRange {
105 let ws_text = ws.leaf_text().unwrap(); 111 let ws_text = ws.text();
106 let suffix = TextRange::from_to(offset, ws.range().end()) - ws.range().start(); 112 let suffix = TextRange::from_to(offset, ws.range().end()) - ws.range().start();
107 let prefix = TextRange::from_to(ws.range().start(), offset) - ws.range().start(); 113 let prefix = TextRange::from_to(ws.range().start(), offset) - ws.range().start();
108 let ws_suffix = &ws_text.as_str()[suffix]; 114 let ws_suffix = &ws_text.as_str()[suffix];
109 let ws_prefix = &ws_text.as_str()[prefix]; 115 let ws_prefix = &ws_text.as_str()[prefix];
110 if ws_text.contains('\n') && !ws_suffix.contains('\n') { 116 if ws_text.contains('\n') && !ws_suffix.contains('\n') {
111 if let Some(node) = ws.next_sibling() { 117 if let Some(node) = ws.next_sibling_or_token() {
112 let start = match ws_prefix.rfind('\n') { 118 let start = match ws_prefix.rfind('\n') {
113 Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32), 119 Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32),
114 None => node.range().start(), 120 None => node.range().start(),
@@ -124,9 +130,9 @@ fn extend_ws(root: &SyntaxNode, ws: &SyntaxNode, offset: TextUnit) -> TextRange
124 ws.range() 130 ws.range()
125} 131}
126 132
127fn pick_best<'a>(l: &'a SyntaxNode, r: &'a SyntaxNode) -> &'a SyntaxNode { 133fn pick_best<'a>(l: SyntaxToken<'a>, r: SyntaxToken<'a>) -> SyntaxToken<'a> {
128 return if priority(r) > priority(l) { r } else { l }; 134 return if priority(r) > priority(l) { r } else { l };
129 fn priority(n: &SyntaxNode) -> usize { 135 fn priority(n: SyntaxToken) -> usize {
130 match n.kind() { 136 match n.kind() {
131 WHITESPACE => 0, 137 WHITESPACE => 0,
132 IDENT | SELF_KW | SUPER_KW | CRATE_KW | LIFETIME => 2, 138 IDENT | SELF_KW | SUPER_KW | CRATE_KW | LIFETIME => 2,
@@ -137,54 +143,60 @@ fn pick_best<'a>(l: &'a SyntaxNode, r: &'a SyntaxNode) -> &'a SyntaxNode {
137 143
138/// Extend list item selection to include nearby comma and whitespace. 144/// Extend list item selection to include nearby comma and whitespace.
139fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> { 145fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
140 fn is_single_line_ws(node: &SyntaxNode) -> bool { 146 fn is_single_line_ws(node: &SyntaxToken) -> bool {
141 node.kind() == WHITESPACE && !node.leaf_text().unwrap().contains('\n') 147 node.kind() == WHITESPACE && !node.text().contains('\n')
142 } 148 }
143 149
144 fn nearby_comma(node: &SyntaxNode, dir: Direction) -> Option<&SyntaxNode> { 150 fn nearby_comma(node: &SyntaxNode, dir: Direction) -> Option<SyntaxToken> {
145 node.siblings(dir) 151 node.siblings_with_tokens(dir)
146 .skip(1) 152 .skip(1)
147 .skip_while(|node| is_single_line_ws(node)) 153 .skip_while(|node| match node {
154 SyntaxElement::Node(_) => false,
155 SyntaxElement::Token(it) => is_single_line_ws(it),
156 })
148 .next() 157 .next()
158 .and_then(|it| it.as_token())
149 .filter(|node| node.kind() == COMMA) 159 .filter(|node| node.kind() == COMMA)
150 } 160 }
151 161
152 if let Some(comma_node) = nearby_comma(node, Direction::Prev) { 162 if let Some(comma_node) = nearby_comma(node, Direction::Prev) {
153 return Some(TextRange::from_to(comma_node.range().start(), node.range().end())); 163 return Some(TextRange::from_to(comma_node.range().start(), node.range().end()));
154 } 164 }
155
156 if let Some(comma_node) = nearby_comma(node, Direction::Next) { 165 if let Some(comma_node) = nearby_comma(node, Direction::Next) {
157 // Include any following whitespace when comma if after list item. 166 // Include any following whitespace when comma if after list item.
158 let final_node = comma_node 167 let final_node = comma_node
159 .siblings(Direction::Next) 168 .next_sibling_or_token()
160 .skip(1) 169 .and_then(|it| it.as_token())
161 .next()
162 .filter(|node| is_single_line_ws(node)) 170 .filter(|node| is_single_line_ws(node))
163 .unwrap_or(comma_node); 171 .unwrap_or(comma_node);
164 172
165 return Some(TextRange::from_to(node.range().start(), final_node.range().end())); 173 return Some(TextRange::from_to(node.range().start(), final_node.range().end()));
166 } 174 }
167 175
168 return None; 176 None
169} 177}
170 178
171fn extend_comments(node: &SyntaxNode) -> Option<TextRange> { 179fn extend_comments(comment: Comment) -> Option<TextRange> {
172 let prev = adj_comments(node, Direction::Prev); 180 let prev = adj_comments(comment, Direction::Prev);
173 let next = adj_comments(node, Direction::Next); 181 let next = adj_comments(comment, Direction::Next);
174 if prev != next { 182 if prev != next {
175 Some(TextRange::from_to(prev.range().start(), next.range().end())) 183 Some(TextRange::from_to(prev.syntax().range().start(), next.syntax().range().end()))
176 } else { 184 } else {
177 None 185 None
178 } 186 }
179} 187}
180 188
181fn adj_comments(node: &SyntaxNode, dir: Direction) -> &SyntaxNode { 189fn adj_comments(comment: Comment, dir: Direction) -> Comment {
182 let mut res = node; 190 let mut res = comment;
183 for node in node.siblings(dir) { 191 for element in comment.syntax().siblings_with_tokens(dir) {
184 match node.kind() { 192 let token = match element.as_token() {
185 COMMENT => res = node, 193 None => break,
186 WHITESPACE if !node.leaf_text().unwrap().as_str().contains("\n\n") => (), 194 Some(token) => token,
187 _ => break, 195 };
196 if let Some(c) = Comment::cast(token) {
197 res = c
198 } else if token.kind() != WHITESPACE || token.text().contains("\n\n") {
199 break;
188 } 200 }
189 } 201 }
190 res 202 res
@@ -308,23 +320,13 @@ fn bar(){}
308/* 320/*
309foo 321foo
310_bar1<|>*/ 322_bar1<|>*/
311 "#, 323"#,
312 &["_bar1", "/*\nfoo\n_bar1*/"], 324 &["_bar1", "/*\nfoo\n_bar1*/"],
313 ); 325 );
314 326
315 do_check( 327 do_check(r#"//!<|>foo_2 bar"#, &["foo_2", "//!foo_2 bar"]);
316 r#"
317//!<|>foo_2 bar
318 "#,
319 &["foo_2", "//!foo_2 bar"],
320 );
321 328
322 do_check( 329 do_check(r#"/<|>/foo bar"#, &["//foo bar"]);
323 r#"
324/<|>/foo bar
325 "#,
326 &["//foo bar"],
327 );
328 } 330 }
329 331
330 #[test] 332 #[test]
@@ -332,13 +334,13 @@ _bar1<|>*/
332 do_check( 334 do_check(
333 r#" 335 r#"
334fn main() { foo<|>+bar;} 336fn main() { foo<|>+bar;}
335 "#, 337"#,
336 &["foo", "foo+bar"], 338 &["foo", "foo+bar"],
337 ); 339 );
338 do_check( 340 do_check(
339 r#" 341 r#"
340fn main() { foo+<|>bar;} 342fn main() { foo+<|>bar;}
341 "#, 343"#,
342 &["bar", "foo+bar"], 344 &["bar", "foo+bar"],
343 ); 345 );
344 } 346 }
@@ -355,11 +357,11 @@ fn main() { foo+<|>bar;}
355 do_check( 357 do_check(
356 r#" 358 r#"
357impl S { 359impl S {
358 fn foo() { 360fn foo() {
359 // hel<|>lo world 361// hel<|>lo world
360 }
361} 362}
362 "#, 363}
364"#,
363 &["hello", "// hello world"], 365 &["hello", "// hello world"],
364 ); 366 );
365 } 367 }
@@ -371,7 +373,7 @@ impl S {
371fn bar(){} 373fn bar(){}
372 374
373" fn f<|>oo() {" 375" fn f<|>oo() {"
374 "#, 376"#,
375 &["foo", "\" fn foo() {\""], 377 &["foo", "\" fn foo() {\""],
376 ); 378 );
377 } 379 }
diff --git a/crates/ra_ide_api/src/folding_ranges.rs b/crates/ra_ide_api/src/folding_ranges.rs
index b96145f05..a6fe8a5d5 100644
--- a/crates/ra_ide_api/src/folding_ranges.rs
+++ b/crates/ra_ide_api/src/folding_ranges.rs
@@ -1,9 +1,9 @@
1use rustc_hash::FxHashSet; 1use rustc_hash::FxHashSet;
2 2
3use ra_syntax::{ 3use ra_syntax::{
4 AstNode, Direction, SourceFile, SyntaxNode, TextRange, 4 AstNode, SourceFile, SyntaxNode, TextRange, Direction, SyntaxElement,
5 SyntaxKind::{self, *}, 5 SyntaxKind::{self, *},
6 ast::{self, VisibilityOwner}, 6 ast::{self, VisibilityOwner, Comment},
7}; 7};
8 8
9#[derive(Debug, PartialEq, Eq)] 9#[derive(Debug, PartialEq, Eq)]
@@ -26,34 +26,49 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
26 let mut visited_imports = FxHashSet::default(); 26 let mut visited_imports = FxHashSet::default();
27 let mut visited_mods = FxHashSet::default(); 27 let mut visited_mods = FxHashSet::default();
28 28
29 for node in file.syntax().descendants() { 29 for element in file.syntax().descendants_with_tokens() {
30 // Fold items that span multiple lines 30 // Fold items that span multiple lines
31 if let Some(kind) = fold_kind(node.kind()) { 31 if let Some(kind) = fold_kind(element.kind()) {
32 if node.text().contains('\n') { 32 let is_multiline = match element {
33 res.push(Fold { range: node.range(), kind }); 33 SyntaxElement::Node(node) => node.text().contains('\n'),
34 SyntaxElement::Token(token) => token.text().contains('\n'),
35 };
36 if is_multiline {
37 res.push(Fold { range: element.range(), kind });
38 continue;
34 } 39 }
35 } 40 }
36 41
37 // Fold groups of comments 42 match element {
38 if node.kind() == COMMENT && !visited_comments.contains(&node) { 43 SyntaxElement::Token(token) => {
39 if let Some(range) = contiguous_range_for_comment(node, &mut visited_comments) { 44 // Fold groups of comments
40 res.push(Fold { range, kind: FoldKind::Comment }) 45 if let Some(comment) = ast::Comment::cast(token) {
46 if !visited_comments.contains(&comment) {
47 if let Some(range) =
48 contiguous_range_for_comment(comment, &mut visited_comments)
49 {
50 res.push(Fold { range, kind: FoldKind::Comment })
51 }
52 }
53 }
41 } 54 }
42 } 55 SyntaxElement::Node(node) => {
43 56 // Fold groups of imports
44 // Fold groups of imports 57 if node.kind() == USE_ITEM && !visited_imports.contains(&node) {
45 if node.kind() == USE_ITEM && !visited_imports.contains(&node) { 58 if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) {
46 if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) { 59 res.push(Fold { range, kind: FoldKind::Imports })
47 res.push(Fold { range, kind: FoldKind::Imports }) 60 }
48 } 61 }
49 } 62
50 63 // Fold groups of mods
51 // Fold groups of mods 64 if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node)
52 if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node) { 65 {
53 if let Some(range) = 66 if let Some(range) =
54 contiguous_range_for_group_unless(node, has_visibility, &mut visited_mods) 67 contiguous_range_for_group_unless(node, has_visibility, &mut visited_mods)
55 { 68 {
56 res.push(Fold { range, kind: FoldKind::Mods }) 69 res.push(Fold { range, kind: FoldKind::Mods })
70 }
71 }
57 } 72 }
58 } 73 }
59 } 74 }
@@ -90,16 +105,21 @@ fn contiguous_range_for_group_unless<'a>(
90 visited.insert(first); 105 visited.insert(first);
91 106
92 let mut last = first; 107 let mut last = first;
93 for node in first.siblings(Direction::Next) { 108 for element in first.siblings_with_tokens(Direction::Next) {
94 if let Some(ws) = ast::Whitespace::cast(node) { 109 let node = match element {
95 // There is a blank line, which means that the group ends here 110 SyntaxElement::Token(token) => {
96 if ws.count_newlines_lazy().take(2).count() == 2 { 111 if let Some(ws) = ast::Whitespace::cast(token) {
112 if !ws.spans_multiple_lines() {
113 // Ignore whitespace without blank lines
114 continue;
115 }
116 }
117 // There is a blank line or another token, which means that the
118 // group ends here
97 break; 119 break;
98 } 120 }
99 121 SyntaxElement::Node(node) => node,
100 // Ignore whitespace without blank lines 122 };
101 continue;
102 }
103 123
104 // Stop if we find a node that doesn't belong to the group 124 // Stop if we find a node that doesn't belong to the group
105 if node.kind() != first.kind() || unless(node) { 125 if node.kind() != first.kind() || unless(node) {
@@ -119,40 +139,42 @@ fn contiguous_range_for_group_unless<'a>(
119} 139}
120 140
121fn contiguous_range_for_comment<'a>( 141fn contiguous_range_for_comment<'a>(
122 first: &'a SyntaxNode, 142 first: Comment<'a>,
123 visited: &mut FxHashSet<&'a SyntaxNode>, 143 visited: &mut FxHashSet<Comment<'a>>,
124) -> Option<TextRange> { 144) -> Option<TextRange> {
125 visited.insert(first); 145 visited.insert(first);
126 146
127 // Only fold comments of the same flavor 147 // Only fold comments of the same flavor
128 let group_flavor = ast::Comment::cast(first)?.flavor(); 148 let group_flavor = first.flavor();
129 149
130 let mut last = first; 150 let mut last = first;
131 for node in first.siblings(Direction::Next) { 151 for element in first.syntax().siblings_with_tokens(Direction::Next) {
132 if let Some(ws) = ast::Whitespace::cast(node) { 152 match element {
133 // There is a blank line, which means the group ends here 153 SyntaxElement::Token(token) => {
134 if ws.count_newlines_lazy().take(2).count() == 2 { 154 if let Some(ws) = ast::Whitespace::cast(token) {
155 if !ws.spans_multiple_lines() {
156 // Ignore whitespace without blank lines
157 continue;
158 }
159 }
160 if let Some(c) = Comment::cast(token) {
161 if c.flavor() == group_flavor {
162 visited.insert(c);
163 last = c;
164 continue;
165 }
166 }
167 // The comment group ends because either:
168 // * An element of a different kind was reached
169 // * A comment of a different flavor was reached
135 break; 170 break;
136 } 171 }
137 172 SyntaxElement::Node(_) => break,
138 // Ignore whitespace without blank lines 173 };
139 continue;
140 }
141
142 match ast::Comment::cast(node) {
143 Some(next_comment) if next_comment.flavor() == group_flavor => {
144 visited.insert(node);
145 last = node;
146 }
147 // The comment group ends because either:
148 // * An element of a different kind was reached
149 // * A comment of a different flavor was reached
150 _ => break,
151 }
152 } 174 }
153 175
154 if first != last { 176 if first != last {
155 Some(TextRange::from_to(first.range().start(), last.range().end())) 177 Some(TextRange::from_to(first.syntax().range().start(), last.syntax().range().end()))
156 } else { 178 } else {
157 // The group consists of only one element, therefore it cannot be folded 179 // The group consists of only one element, therefore it cannot be folded
158 None 180 None
diff --git a/crates/ra_ide_api/src/hover.rs b/crates/ra_ide_api/src/hover.rs
index 3206e68b9..bfa7cd67a 100644
--- a/crates/ra_ide_api/src/hover.rs
+++ b/crates/ra_ide_api/src/hover.rs
@@ -1,7 +1,7 @@
1use ra_db::SourceDatabase; 1use ra_db::SourceDatabase;
2use ra_syntax::{ 2use ra_syntax::{
3 AstNode, SyntaxNode, TreeArc, ast::{self, NameOwner, VisibilityOwner, TypeAscriptionOwner}, 3 AstNode, SyntaxNode, TreeArc, ast::{self, NameOwner, VisibilityOwner, TypeAscriptionOwner},
4 algo::{find_covering_node, find_node_at_offset, find_leaf_at_offset, visit::{visitor, Visitor}}, 4 algo::{find_covering_element, find_node_at_offset, find_token_at_offset, visit::{visitor, Visitor}},
5}; 5};
6use hir::HirDisplay; 6use hir::HirDisplay;
7 7
@@ -104,8 +104,11 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
104 } 104 }
105 105
106 if range.is_none() { 106 if range.is_none() {
107 let node = find_leaf_at_offset(file.syntax(), position.offset).find_map(|leaf| { 107 let node = find_token_at_offset(file.syntax(), position.offset).find_map(|token| {
108 leaf.ancestors().find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some()) 108 token
109 .parent()
110 .ancestors()
111 .find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some())
109 })?; 112 })?;
110 let frange = FileRange { file_id: position.file_id, range: node.range() }; 113 let frange = FileRange { file_id: position.file_id, range: node.range() };
111 res.extend(type_of(db, frange).map(rust_code_markup)); 114 res.extend(type_of(db, frange).map(rust_code_markup));
@@ -123,13 +126,12 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
123pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { 126pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
124 let file = db.parse(frange.file_id); 127 let file = db.parse(frange.file_id);
125 let syntax = file.syntax(); 128 let syntax = file.syntax();
126 let leaf_node = find_covering_node(syntax, frange.range); 129 let leaf_node = find_covering_element(syntax, frange.range);
127 // if we picked identifier, expand to pattern/expression 130 // if we picked identifier, expand to pattern/expression
128 let node = leaf_node 131 let node = leaf_node
129 .ancestors() 132 .ancestors()
130 .take_while(|it| it.range() == leaf_node.range()) 133 .take_while(|it| it.range() == leaf_node.range())
131 .find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some()) 134 .find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some())?;
132 .unwrap_or(leaf_node);
133 let parent_fn = node.ancestors().find_map(ast::FnDef::cast)?; 135 let parent_fn = node.ancestors().find_map(ast::FnDef::cast)?;
134 let function = hir::source_binder::function_from_source(db, frange.file_id, parent_fn)?; 136 let function = hir::source_binder::function_from_source(db, frange.file_id, parent_fn)?;
135 let infer = function.infer(db); 137 let infer = function.infer(db);
diff --git a/crates/ra_ide_api/src/join_lines.rs b/crates/ra_ide_api/src/join_lines.rs
index 8fb3eaa06..57b6f8384 100644
--- a/crates/ra_ide_api/src/join_lines.rs
+++ b/crates/ra_ide_api/src/join_lines.rs
@@ -1,8 +1,8 @@
1use itertools::Itertools; 1use itertools::Itertools;
2use ra_syntax::{ 2use ra_syntax::{
3 SourceFile, TextRange, TextUnit, AstNode, SyntaxNode, 3 SourceFile, TextRange, TextUnit, AstNode, SyntaxNode, SyntaxElement, SyntaxToken,
4 SyntaxKind::{self, WHITESPACE, COMMA, R_CURLY, R_PAREN, R_BRACK}, 4 SyntaxKind::{self, WHITESPACE, COMMA, R_CURLY, R_PAREN, R_BRACK},
5 algo::{find_covering_node, non_trivia_sibling}, 5 algo::{find_covering_element, non_trivia_sibling},
6 ast, 6 ast,
7 Direction, 7 Direction,
8}; 8};
@@ -24,22 +24,22 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
24 range 24 range
25 }; 25 };
26 26
27 let node = find_covering_node(file.syntax(), range); 27 let node = match find_covering_element(file.syntax(), range) {
28 SyntaxElement::Node(node) => node,
29 SyntaxElement::Token(token) => token.parent(),
30 };
28 let mut edit = TextEditBuilder::default(); 31 let mut edit = TextEditBuilder::default();
29 for node in node.descendants() { 32 for token in node.descendants_with_tokens().filter_map(|it| it.as_token()) {
30 let text = match node.leaf_text() { 33 let range = match range.intersection(&token.range()) {
31 Some(text) => text,
32 None => continue,
33 };
34 let range = match range.intersection(&node.range()) {
35 Some(range) => range, 34 Some(range) => range,
36 None => continue, 35 None => continue,
37 } - node.range().start(); 36 } - token.range().start();
37 let text = token.text();
38 for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') { 38 for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') {
39 let pos: TextUnit = (pos as u32).into(); 39 let pos: TextUnit = (pos as u32).into();
40 let off = node.range().start() + range.start() + pos; 40 let off = token.range().start() + range.start() + pos;
41 if !edit.invalidates_offset(off) { 41 if !edit.invalidates_offset(off) {
42 remove_newline(&mut edit, node, text.as_str(), off); 42 remove_newline(&mut edit, token, off);
43 } 43 }
44 } 44 }
45 } 45 }
@@ -47,17 +47,12 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
47 edit.finish() 47 edit.finish()
48} 48}
49 49
50fn remove_newline( 50fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUnit) {
51 edit: &mut TextEditBuilder, 51 if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 {
52 node: &SyntaxNode,
53 node_text: &str,
54 offset: TextUnit,
55) {
56 if node.kind() != WHITESPACE || node_text.bytes().filter(|&b| b == b'\n').count() != 1 {
57 // The node is either the first or the last in the file 52 // The node is either the first or the last in the file
58 let suff = &node_text[TextRange::from_to( 53 let suff = &token.text()[TextRange::from_to(
59 offset - node.range().start() + TextUnit::of_char('\n'), 54 offset - token.range().start() + TextUnit::of_char('\n'),
60 TextUnit::of_str(node_text), 55 TextUnit::of_str(token.text()),
61 )]; 56 )];
62 let spaces = suff.bytes().take_while(|&b| b == b' ').count(); 57 let spaces = suff.bytes().take_while(|&b| b == b' ').count();
63 58
@@ -74,7 +69,7 @@ fn remove_newline(
74 // ``` 69 // ```
75 // 70 //
76 // into `my_function(<some-expr>)` 71 // into `my_function(<some-expr>)`
77 if join_single_expr_block(edit, node).is_some() { 72 if join_single_expr_block(edit, token).is_some() {
78 return; 73 return;
79 } 74 }
80 // ditto for 75 // ditto for
@@ -84,44 +79,50 @@ fn remove_newline(
84 // bar 79 // bar
85 // }; 80 // };
86 // ``` 81 // ```
87 if join_single_use_tree(edit, node).is_some() { 82 if join_single_use_tree(edit, token).is_some() {
88 return; 83 return;
89 } 84 }
90 85
91 // The node is between two other nodes 86 // The node is between two other nodes
92 let prev = node.prev_sibling().unwrap(); 87 let prev = token.prev_sibling_or_token().unwrap();
93 let next = node.next_sibling().unwrap(); 88 let next = token.next_sibling_or_token().unwrap();
94 if is_trailing_comma(prev.kind(), next.kind()) { 89 if is_trailing_comma(prev.kind(), next.kind()) {
95 // Removes: trailing comma, newline (incl. surrounding whitespace) 90 // Removes: trailing comma, newline (incl. surrounding whitespace)
96 edit.delete(TextRange::from_to(prev.range().start(), node.range().end())); 91 edit.delete(TextRange::from_to(prev.range().start(), token.range().end()));
97 } else if prev.kind() == COMMA && next.kind() == R_CURLY { 92 } else if prev.kind() == COMMA && next.kind() == R_CURLY {
98 // Removes: comma, newline (incl. surrounding whitespace) 93 // Removes: comma, newline (incl. surrounding whitespace)
99 let space = if let Some(left) = prev.prev_sibling() { compute_ws(left, next) } else { " " }; 94 let space = if let Some(left) = prev.prev_sibling_or_token() {
95 compute_ws(left.kind(), next.kind())
96 } else {
97 " "
98 };
100 edit.replace( 99 edit.replace(
101 TextRange::from_to(prev.range().start(), node.range().end()), 100 TextRange::from_to(prev.range().start(), token.range().end()),
102 space.to_string(), 101 space.to_string(),
103 ); 102 );
104 } else if let (Some(_), Some(next)) = (ast::Comment::cast(prev), ast::Comment::cast(next)) { 103 } else if let (Some(_), Some(next)) =
104 (prev.as_token().and_then(ast::Comment::cast), next.as_token().and_then(ast::Comment::cast))
105 {
105 // Removes: newline (incl. surrounding whitespace), start of the next comment 106 // Removes: newline (incl. surrounding whitespace), start of the next comment
106 edit.delete(TextRange::from_to( 107 edit.delete(TextRange::from_to(
107 node.range().start(), 108 token.range().start(),
108 next.syntax().range().start() + TextUnit::of_str(next.prefix()), 109 next.syntax().range().start() + TextUnit::of_str(next.prefix()),
109 )); 110 ));
110 } else { 111 } else {
111 // Remove newline but add a computed amount of whitespace characters 112 // Remove newline but add a computed amount of whitespace characters
112 edit.replace(node.range(), compute_ws(prev, next).to_string()); 113 edit.replace(token.range(), compute_ws(prev.kind(), next.kind()).to_string());
113 } 114 }
114} 115}
115 116
116fn has_comma_after(node: &SyntaxNode) -> bool { 117fn has_comma_after(node: &SyntaxNode) -> bool {
117 match non_trivia_sibling(node, Direction::Next) { 118 match non_trivia_sibling(node.into(), Direction::Next) {
118 Some(n) => n.kind() == COMMA, 119 Some(n) => n.kind() == COMMA,
119 _ => false, 120 _ => false,
120 } 121 }
121} 122}
122 123
123fn join_single_expr_block(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Option<()> { 124fn join_single_expr_block(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> {
124 let block = ast::Block::cast(node.parent()?)?; 125 let block = ast::Block::cast(token.parent())?;
125 let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; 126 let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?;
126 let expr = extract_trivial_expression(block)?; 127 let expr = extract_trivial_expression(block)?;
127 128
@@ -140,8 +141,8 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Opti
140 Some(()) 141 Some(())
141} 142}
142 143
143fn join_single_use_tree(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Option<()> { 144fn join_single_use_tree(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> {
144 let use_tree_list = ast::UseTreeList::cast(node.parent()?)?; 145 let use_tree_list = ast::UseTreeList::cast(token.parent())?;
145 let (tree,) = use_tree_list.use_trees().collect_tuple()?; 146 let (tree,) = use_tree_list.use_trees().collect_tuple()?;
146 edit.replace(use_tree_list.syntax().range(), tree.syntax().text().to_string()); 147 edit.replace(use_tree_list.syntax().range(), tree.syntax().text().to_string());
147 Some(()) 148 Some(())
@@ -401,13 +402,13 @@ use ra_syntax::{
401 r" 402 r"
402use ra_syntax::{ 403use ra_syntax::{
403 algo::<|>{ 404 algo::<|>{
404 find_leaf_at_offset, 405 find_token_at_offset,
405 }, 406 },
406 ast, 407 ast,
407};", 408};",
408 r" 409 r"
409use ra_syntax::{ 410use ra_syntax::{
410 algo::<|>find_leaf_at_offset, 411 algo::<|>find_token_at_offset,
411 ast, 412 ast,
412};", 413};",
413 ); 414 );
diff --git a/crates/ra_ide_api/src/matching_brace.rs b/crates/ra_ide_api/src/matching_brace.rs
index d1405f14f..bebd16a69 100644
--- a/crates/ra_ide_api/src/matching_brace.rs
+++ b/crates/ra_ide_api/src/matching_brace.rs
@@ -1,6 +1,6 @@
1use ra_syntax::{ 1use ra_syntax::{
2 SourceFile, TextUnit, 2 SourceFile, TextUnit,
3 algo::find_leaf_at_offset, 3 algo::find_token_at_offset,
4 SyntaxKind::{self, *}, 4 SyntaxKind::{self, *},
5 ast::AstNode, 5 ast::AstNode,
6}; 6};
@@ -8,15 +8,15 @@ use ra_syntax::{
8pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> { 8pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> {
9 const BRACES: &[SyntaxKind] = 9 const BRACES: &[SyntaxKind] =
10 &[L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE]; 10 &[L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE];
11 let (brace_node, brace_idx) = find_leaf_at_offset(file.syntax(), offset) 11 let (brace_node, brace_idx) = find_token_at_offset(file.syntax(), offset)
12 .filter_map(|node| { 12 .filter_map(|node| {
13 let idx = BRACES.iter().position(|&brace| brace == node.kind())?; 13 let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
14 Some((node, idx)) 14 Some((node, idx))
15 }) 15 })
16 .next()?; 16 .next()?;
17 let parent = brace_node.parent()?; 17 let parent = brace_node.parent();
18 let matching_kind = BRACES[brace_idx ^ 1]; 18 let matching_kind = BRACES[brace_idx ^ 1];
19 let matching_node = parent.children().find(|node| node.kind() == matching_kind)?; 19 let matching_node = parent.children_with_tokens().find(|node| node.kind() == matching_kind)?;
20 Some(matching_node.range().start()) 20 Some(matching_node.range().start())
21} 21}
22 22
@@ -41,5 +41,4 @@ mod tests {
41 41
42 do_check("struct Foo { a: i32, }<|>", "struct Foo <|>{ a: i32, }"); 42 do_check("struct Foo { a: i32, }<|>", "struct Foo <|>{ a: i32, }");
43 } 43 }
44
45} 44}
diff --git a/crates/ra_ide_api/src/syntax_highlighting.rs b/crates/ra_ide_api/src/syntax_highlighting.rs
index a0c5e78ad..d9a28d2b5 100644
--- a/crates/ra_ide_api/src/syntax_highlighting.rs
+++ b/crates/ra_ide_api/src/syntax_highlighting.rs
@@ -1,6 +1,6 @@
1use rustc_hash::FxHashSet; 1use rustc_hash::FxHashSet;
2 2
3use ra_syntax::{ast, AstNode, TextRange, Direction, SyntaxKind::*}; 3use ra_syntax::{ast, AstNode, TextRange, Direction, SyntaxKind::*, SyntaxElement};
4use ra_db::SourceDatabase; 4use ra_db::SourceDatabase;
5 5
6use crate::{FileId, db::RootDatabase}; 6use crate::{FileId, db::RootDatabase};
@@ -15,9 +15,9 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
15 let source_file = db.parse(file_id); 15 let source_file = db.parse(file_id);
16 16
17 // Visited nodes to handle highlighting priorities 17 // Visited nodes to handle highlighting priorities
18 let mut highlighted = FxHashSet::default(); 18 let mut highlighted: FxHashSet<SyntaxElement> = FxHashSet::default();
19 let mut res = Vec::new(); 19 let mut res = Vec::new();
20 for node in source_file.syntax().descendants() { 20 for node in source_file.syntax().descendants_with_tokens() {
21 if highlighted.contains(&node) { 21 if highlighted.contains(&node) {
22 continue; 22 continue;
23 } 23 }
@@ -31,14 +31,14 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
31 LIFETIME => "parameter", 31 LIFETIME => "parameter",
32 k if k.is_keyword() => "keyword", 32 k if k.is_keyword() => "keyword",
33 _ => { 33 _ => {
34 if let Some(macro_call) = ast::MacroCall::cast(node) { 34 if let Some(macro_call) = node.as_node().and_then(ast::MacroCall::cast) {
35 if let Some(path) = macro_call.path() { 35 if let Some(path) = macro_call.path() {
36 if let Some(segment) = path.segment() { 36 if let Some(segment) = path.segment() {
37 if let Some(name_ref) = segment.name_ref() { 37 if let Some(name_ref) = segment.name_ref() {
38 highlighted.insert(name_ref.syntax()); 38 highlighted.insert(name_ref.syntax().into());
39 let range_start = name_ref.syntax().range().start(); 39 let range_start = name_ref.syntax().range().start();
40 let mut range_end = name_ref.syntax().range().end(); 40 let mut range_end = name_ref.syntax().range().end();
41 for sibling in path.syntax().siblings(Direction::Next) { 41 for sibling in path.syntax().siblings_with_tokens(Direction::Next) {
42 match sibling.kind() { 42 match sibling.kind() {
43 EXCL | IDENT => range_end = sibling.range().end(), 43 EXCL | IDENT => range_end = sibling.range().end(),
44 _ => (), 44 _ => (),
diff --git a/crates/ra_ide_api/src/syntax_tree.rs b/crates/ra_ide_api/src/syntax_tree.rs
index 276f8a8c8..a4e4c3dbe 100644
--- a/crates/ra_ide_api/src/syntax_tree.rs
+++ b/crates/ra_ide_api/src/syntax_tree.rs
@@ -1,8 +1,9 @@
1use ra_db::SourceDatabase; 1use ra_db::SourceDatabase;
2use crate::db::RootDatabase; 2use crate::db::RootDatabase;
3use ra_syntax::{ 3use ra_syntax::{
4 SourceFile, SyntaxNode, TextRange, AstNode, 4 SourceFile, TextRange, AstNode, SyntaxToken, SyntaxElement,
5 algo::{self, visit::{visitor, Visitor}}, ast::{self, AstToken} 5 algo,
6 SyntaxKind::{STRING, RAW_STRING},
6}; 7};
7 8
8pub use ra_db::FileId; 9pub use ra_db::FileId;
@@ -14,11 +15,15 @@ pub(crate) fn syntax_tree(
14) -> String { 15) -> String {
15 if let Some(text_range) = text_range { 16 if let Some(text_range) = text_range {
16 let file = db.parse(file_id); 17 let file = db.parse(file_id);
17 let node = algo::find_covering_node(file.syntax(), text_range); 18 let node = match algo::find_covering_element(file.syntax(), text_range) {
18 19 SyntaxElement::Node(node) => node,
19 if let Some(tree) = syntax_tree_for_string(node, text_range) { 20 SyntaxElement::Token(token) => {
20 return tree; 21 if let Some(tree) = syntax_tree_for_string(token, text_range) {
21 } 22 return tree;
23 }
24 token.parent()
25 }
26 };
22 27
23 node.debug_dump() 28 node.debug_dump()
24 } else { 29 } else {
@@ -28,19 +33,19 @@ pub(crate) fn syntax_tree(
28 33
29/// Attempts parsing the selected contents of a string literal 34/// Attempts parsing the selected contents of a string literal
30/// as rust syntax and returns its syntax tree 35/// as rust syntax and returns its syntax tree
31fn syntax_tree_for_string(node: &SyntaxNode, text_range: TextRange) -> Option<String> { 36fn syntax_tree_for_string(token: SyntaxToken, text_range: TextRange) -> Option<String> {
32 // When the range is inside a string 37 // When the range is inside a string
33 // we'll attempt parsing it as rust syntax 38 // we'll attempt parsing it as rust syntax
34 // to provide the syntax tree of the contents of the string 39 // to provide the syntax tree of the contents of the string
35 visitor() 40 match token.kind() {
36 .visit(|node: &ast::String| syntax_tree_for_token(node, text_range)) 41 STRING | RAW_STRING => syntax_tree_for_token(token, text_range),
37 .visit(|node: &ast::RawString| syntax_tree_for_token(node, text_range)) 42 _ => None,
38 .accept(node)? 43 }
39} 44}
40 45
41fn syntax_tree_for_token<T: AstToken>(node: &T, text_range: TextRange) -> Option<String> { 46fn syntax_tree_for_token(node: SyntaxToken, text_range: TextRange) -> Option<String> {
42 // Range of the full node 47 // Range of the full node
43 let node_range = node.syntax().range(); 48 let node_range = node.range();
44 let text = node.text().to_string(); 49 let text = node.text().to_string();
45 50
46 // We start at some point inside the node 51 // We start at some point inside the node
diff --git a/crates/ra_ide_api/src/typing.rs b/crates/ra_ide_api/src/typing.rs
index 501d44dbb..4510d663d 100644
--- a/crates/ra_ide_api/src/typing.rs
+++ b/crates/ra_ide_api/src/typing.rs
@@ -1,8 +1,8 @@
1use ra_syntax::{ 1use ra_syntax::{
2 AstNode, SourceFile, SyntaxKind::*, 2 AstNode, SourceFile, SyntaxKind::*,
3 SyntaxNode, TextUnit, TextRange, 3 TextUnit, TextRange, SyntaxToken,
4 algo::{find_node_at_offset, find_leaf_at_offset, LeafAtOffset}, 4 algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset},
5 ast::{self, AstToken}, 5 ast::{self},
6}; 6};
7use ra_fmt::leading_indent; 7use ra_fmt::leading_indent;
8use ra_text_edit::{TextEdit, TextEditBuilder}; 8use ra_text_edit::{TextEdit, TextEditBuilder};
@@ -11,11 +11,11 @@ use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
11 11
12pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { 12pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
13 let file = db.parse(position.file_id); 13 let file = db.parse(position.file_id);
14 let comment = find_leaf_at_offset(file.syntax(), position.offset) 14 let comment = find_token_at_offset(file.syntax(), position.offset)
15 .left_biased() 15 .left_biased()
16 .and_then(ast::Comment::cast)?; 16 .and_then(ast::Comment::cast)?;
17 17
18 if let ast::CommentFlavor::Multiline = comment.flavor() { 18 if comment.flavor() == ast::CommentFlavor::Multiline {
19 return None; 19 return None;
20 } 20 }
21 21
@@ -41,23 +41,23 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
41 ) 41 )
42} 42}
43 43
44fn node_indent<'a>(file: &'a SourceFile, node: &SyntaxNode) -> Option<&'a str> { 44fn node_indent<'a>(file: &'a SourceFile, token: SyntaxToken) -> Option<&'a str> {
45 let ws = match find_leaf_at_offset(file.syntax(), node.range().start()) { 45 let ws = match find_token_at_offset(file.syntax(), token.range().start()) {
46 LeafAtOffset::Between(l, r) => { 46 TokenAtOffset::Between(l, r) => {
47 assert!(r == node); 47 assert!(r == token);
48 l 48 l
49 } 49 }
50 LeafAtOffset::Single(n) => { 50 TokenAtOffset::Single(n) => {
51 assert!(n == node); 51 assert!(n == token);
52 return Some(""); 52 return Some("");
53 } 53 }
54 LeafAtOffset::None => unreachable!(), 54 TokenAtOffset::None => unreachable!(),
55 }; 55 };
56 if ws.kind() != WHITESPACE { 56 if ws.kind() != WHITESPACE {
57 return None; 57 return None;
58 } 58 }
59 let text = ws.leaf_text().unwrap(); 59 let text = ws.text();
60 let pos = text.as_str().rfind('\n').map(|it| it + 1).unwrap_or(0); 60 let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0);
61 Some(&text[pos..]) 61 Some(&text[pos..])
62} 62}
63 63
@@ -88,7 +88,7 @@ pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<
88 let file = db.parse(position.file_id); 88 let file = db.parse(position.file_id);
89 assert_eq!(file.syntax().text().char_at(position.offset), Some('.')); 89 assert_eq!(file.syntax().text().char_at(position.offset), Some('.'));
90 90
91 let whitespace = find_leaf_at_offset(file.syntax(), position.offset) 91 let whitespace = find_token_at_offset(file.syntax(), position.offset)
92 .left_biased() 92 .left_biased()
93 .and_then(ast::Whitespace::cast)?; 93 .and_then(ast::Whitespace::cast)?;
94 94
@@ -100,7 +100,7 @@ pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<
100 let current_indent_len = TextUnit::of_str(current_indent); 100 let current_indent_len = TextUnit::of_str(current_indent);
101 101
102 // Make sure dot is a part of call chain 102 // Make sure dot is a part of call chain
103 let field_expr = whitespace.syntax().parent().and_then(ast::FieldExpr::cast)?; 103 let field_expr = ast::FieldExpr::cast(whitespace.syntax().parent())?;
104 let prev_indent = leading_indent(field_expr.syntax())?; 104 let prev_indent = leading_indent(field_expr.syntax())?;
105 let target_indent = format!(" {}", prev_indent); 105 let target_indent = format!(" {}", prev_indent);
106 let target_indent_len = TextUnit::of_str(&target_indent); 106 let target_indent_len = TextUnit::of_str(&target_indent);