aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_ide/src/extend_selection.rs
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_ide/src/extend_selection.rs')
-rw-r--r--crates/ra_ide/src/extend_selection.rs99
1 files changed, 40 insertions, 59 deletions
diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs
index 4757d8e22..f5a063351 100644
--- a/crates/ra_ide/src/extend_selection.rs
+++ b/crates/ra_ide/src/extend_selection.rs
@@ -2,26 +2,26 @@
2 2
3use std::iter::successors; 3use std::iter::successors;
4 4
5use hir::db::AstDatabase; 5use hir::Semantics;
6use ra_db::SourceDatabase;
7use ra_ide_db::RootDatabase; 6use ra_ide_db::RootDatabase;
8use ra_syntax::{ 7use ra_syntax::{
9 algo::find_covering_element, 8 algo::{self, find_covering_element, skip_trivia_token},
10 ast::{self, AstNode, AstToken}, 9 ast::{self, AstNode, AstToken},
11 Direction, NodeOrToken, SyntaxElement, 10 Direction, NodeOrToken,
12 SyntaxKind::{self, *}, 11 SyntaxKind::{self, *},
13 SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, 12 SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T,
14}; 13};
15 14
16use crate::{expand::descend_into_macros, FileId, FileRange}; 15use crate::FileRange;
17 16
18pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { 17pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
19 let src = db.parse(frange.file_id).tree(); 18 let sema = Semantics::new(db);
20 try_extend_selection(db, src.syntax(), frange).unwrap_or(frange.range) 19 let src = sema.parse(frange.file_id);
20 try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range)
21} 21}
22 22
23fn try_extend_selection( 23fn try_extend_selection(
24 db: &RootDatabase, 24 sema: &Semantics<RootDatabase>,
25 root: &SyntaxNode, 25 root: &SyntaxNode,
26 frange: FileRange, 26 frange: FileRange,
27) -> Option<TextRange> { 27) -> Option<TextRange> {
@@ -86,7 +86,7 @@ fn try_extend_selection(
86 // if we are in single token_tree, we maybe live in macro or attr 86 // if we are in single token_tree, we maybe live in macro or attr
87 if node.kind() == TOKEN_TREE { 87 if node.kind() == TOKEN_TREE {
88 if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) { 88 if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) {
89 if let Some(range) = extend_tokens_from_range(db, frange.file_id, macro_call, range) { 89 if let Some(range) = extend_tokens_from_range(sema, macro_call, range) {
90 return Some(range); 90 return Some(range);
91 } 91 }
92 } 92 }
@@ -96,7 +96,7 @@ fn try_extend_selection(
96 return Some(node.text_range()); 96 return Some(node.text_range());
97 } 97 }
98 98
99 let node = shallowest_node(&node.into()).unwrap(); 99 let node = shallowest_node(&node.into());
100 100
101 if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { 101 if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
102 if let Some(range) = extend_list_item(&node) { 102 if let Some(range) = extend_list_item(&node) {
@@ -108,8 +108,7 @@ fn try_extend_selection(
108} 108}
109 109
110fn extend_tokens_from_range( 110fn extend_tokens_from_range(
111 db: &RootDatabase, 111 sema: &Semantics<RootDatabase>,
112 file_id: FileId,
113 macro_call: ast::MacroCall, 112 macro_call: ast::MacroCall,
114 original_range: TextRange, 113 original_range: TextRange,
115) -> Option<TextRange> { 114) -> Option<TextRange> {
@@ -119,54 +118,50 @@ fn extend_tokens_from_range(
119 NodeOrToken::Token(it) => (it.clone(), it), 118 NodeOrToken::Token(it) => (it.clone(), it),
120 }; 119 };
121 120
122 let mut first_token = skip_whitespace(first_token, Direction::Next)?; 121 let mut first_token = skip_trivia_token(first_token, Direction::Next)?;
123 let mut last_token = skip_whitespace(last_token, Direction::Prev)?; 122 let mut last_token = skip_trivia_token(last_token, Direction::Prev)?;
124 123
125 while !first_token.text_range().is_subrange(&original_range) { 124 while !first_token.text_range().is_subrange(&original_range) {
126 first_token = skip_whitespace(first_token.next_token()?, Direction::Next)?; 125 first_token = skip_trivia_token(first_token.next_token()?, Direction::Next)?;
127 } 126 }
128 while !last_token.text_range().is_subrange(&original_range) { 127 while !last_token.text_range().is_subrange(&original_range) {
129 last_token = skip_whitespace(last_token.prev_token()?, Direction::Prev)?; 128 last_token = skip_trivia_token(last_token.prev_token()?, Direction::Prev)?;
130 } 129 }
131 130
132 // compute original mapped token range 131 // compute original mapped token range
133 let expanded = { 132 let extended = {
134 let first_node = descend_into_macros(db, file_id, first_token.clone()); 133 let fst_expanded = sema.descend_into_macros(first_token.clone());
135 let first_node = first_node.map(|it| it.text_range()); 134 let lst_expanded = sema.descend_into_macros(last_token.clone());
136 135 let mut lca = algo::least_common_ancestor(&fst_expanded.parent(), &lst_expanded.parent())?;
137 let last_node = descend_into_macros(db, file_id, last_token.clone()); 136 lca = shallowest_node(&lca);
138 if last_node.file_id == file_id.into() || first_node.file_id != last_node.file_id { 137 if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) {
139 return None; 138 lca = lca.parent()?;
140 } 139 }
141 first_node.map(|it| union_range(it, last_node.value.text_range())) 140 lca
142 }; 141 };
143 142
144 // Compute parent node range 143 // Compute parent node range
145 let src = db.parse_or_expand(expanded.file_id)?; 144 let validate = |token: &SyntaxToken| {
146 let parent = shallowest_node(&find_covering_element(&src, expanded.value))?.parent()?; 145 let expanded = sema.descend_into_macros(token.clone());
147 146 algo::least_common_ancestor(&extended, &expanded.parent()).as_ref() == Some(&extended)
148 let validate = |token: SyntaxToken| {
149 let node = descend_into_macros(db, file_id, token.clone());
150 if node.file_id == expanded.file_id
151 && node.value.text_range().is_subrange(&parent.text_range())
152 {
153 Some(token)
154 } else {
155 None
156 }
157 }; 147 };
158 148
159 // Find the first and last text range under expanded parent 149 // Find the first and last text range under expanded parent
160 let first = successors(Some(first_token), |token| { 150 let first = successors(Some(first_token), |token| {
161 validate(skip_whitespace(token.prev_token()?, Direction::Prev)?) 151 let token = token.prev_token()?;
152 skip_trivia_token(token, Direction::Prev)
162 }) 153 })
154 .take_while(validate)
163 .last()?; 155 .last()?;
156
164 let last = successors(Some(last_token), |token| { 157 let last = successors(Some(last_token), |token| {
165 validate(skip_whitespace(token.next_token()?, Direction::Next)?) 158 let token = token.next_token()?;
159 skip_trivia_token(token, Direction::Next)
166 }) 160 })
161 .take_while(validate)
167 .last()?; 162 .last()?;
168 163
169 let range = union_range(first.text_range(), last.text_range()); 164 let range = first.text_range().extend_to(&last.text_range());
170 if original_range.is_subrange(&range) && original_range != range { 165 if original_range.is_subrange(&range) && original_range != range {
171 Some(range) 166 Some(range)
172 } else { 167 } else {
@@ -174,25 +169,9 @@ fn extend_tokens_from_range(
174 } 169 }
175} 170}
176 171
177fn skip_whitespace(mut token: SyntaxToken, direction: Direction) -> Option<SyntaxToken> {
178 while token.kind() == WHITESPACE {
179 token = match direction {
180 Direction::Next => token.next_token()?,
181 Direction::Prev => token.prev_token()?,
182 }
183 }
184 Some(token)
185}
186
187fn union_range(range: TextRange, r: TextRange) -> TextRange {
188 let start = range.start().min(r.start());
189 let end = range.end().max(r.end());
190 TextRange::from_to(start, end)
191}
192
193/// Find the shallowest node with same range, which allows us to traverse siblings. 172/// Find the shallowest node with same range, which allows us to traverse siblings.
194fn shallowest_node(node: &SyntaxElement) -> Option<SyntaxNode> { 173fn shallowest_node(node: &SyntaxNode) -> SyntaxNode {
195 node.ancestors().take_while(|n| n.text_range() == node.text_range()).last() 174 node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap()
196} 175}
197 176
198fn extend_single_word_in_comment_or_string( 177fn extend_single_word_in_comment_or_string(
@@ -334,10 +313,12 @@ fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment {
334 313
335#[cfg(test)] 314#[cfg(test)]
336mod tests { 315mod tests {
337 use super::*;
338 use crate::mock_analysis::single_file;
339 use test_utils::extract_offset; 316 use test_utils::extract_offset;
340 317
318 use crate::mock_analysis::single_file;
319
320 use super::*;
321
341 fn do_check(before: &str, afters: &[&str]) { 322 fn do_check(before: &str, afters: &[&str]) {
342 let (cursor, before) = extract_offset(before); 323 let (cursor, before) = extract_offset(before);
343 let (analysis, file_id) = single_file(&before); 324 let (analysis, file_id) = single_file(&before);