aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_ide/src
diff options
context:
space:
mode:
authorEdwin Cheng <[email protected]>2020-01-08 20:03:50 +0000
committerEdwin Cheng <[email protected]>2020-01-12 12:25:58 +0000
commitcaed836e417a239ae1e384f7e977352b846a3804 (patch)
treea820c24381d7e9cf2d1eb3a5aa87c687ffb8ac35 /crates/ra_ide/src
parentb30e6a7b56942f31cbd7b9fdb78925bf5b65b247 (diff)
Use first and last token only
Diffstat (limited to 'crates/ra_ide/src')
-rw-r--r--crates/ra_ide/src/extend_selection.rs91
1 files changed, 56 insertions, 35 deletions
diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs
index a9ad4b476..dc1a625ed 100644
--- a/crates/ra_ide/src/extend_selection.rs
+++ b/crates/ra_ide/src/extend_selection.rs
@@ -110,48 +110,69 @@ fn extend_tokens_from_range(
110 macro_call: ast::MacroCall, 110 macro_call: ast::MacroCall,
111 original_range: TextRange, 111 original_range: TextRange,
112) -> Option<TextRange> { 112) -> Option<TextRange> {
113 // compute original mapped token range 113 // Find all non-whitespace tokens under MacroCall
114 let mut expanded = None; 114 let all_tokens: Vec<_> = macro_call
115 let range = macro_call
116 .syntax() 115 .syntax()
117 .descendants_with_tokens() 116 .descendants_with_tokens()
118 .filter_map(|n| match n { 117 .filter_map(|n| {
119 NodeOrToken::Token(token) if token.text_range().is_subrange(&original_range) => { 118 let token = n.as_token()?;
120 let node = descend_into_macros(db, file_id, token); 119 if token.kind() == WHITESPACE {
121 match node.file_id { 120 None
122 it if it == file_id.into() => None, 121 } else {
123 it if expanded.is_none() || expanded == Some(it) => { 122 Some(token.clone())
124 expanded = Some(it.into());
125 Some(node.value.text_range())
126 }
127 _ => None,
128 }
129 } 123 }
130 _ => None,
131 }) 124 })
132 .fold1(|x, y| union_range(x, y))?; 125 .sorted_by(|a, b| Ord::cmp(&a.text_range().start(), &b.text_range().start()))
133 126 .collect();
134 let expanded = expanded?; 127
135 let src = db.parse_or_expand(expanded)?; 128 // Get all indices which is in original range
136 let parent = shallowest_node(&find_covering_element(&src, range))?.parent()?; 129 let indices: Vec<_> =
137 // compute parent mapped token range 130 all_tokens
138 let range = macro_call 131 .iter()
139 .syntax() 132 .enumerate()
140 .descendants_with_tokens() 133 .filter_map(|(i, token)| {
141 .filter_map(|n| match n { 134 if token.text_range().is_subrange(&original_range) {
142 NodeOrToken::Token(token) => { 135 Some(i)
143 let node = descend_into_macros(db, file_id, token.clone());
144 if node.file_id == expanded
145 && node.value.text_range().is_subrange(&parent.text_range())
146 {
147 Some(token.text_range())
148 } else { 136 } else {
149 None 137 None
150 } 138 }
151 } 139 })
152 _ => None, 140 .collect();
153 }) 141
154 .fold1(|x, y| union_range(x, y))?; 142 // Compute the first and last token index in original_range
143 let first_idx = *indices.iter().min_by_key(|&&idx| all_tokens[idx].text_range().start())?;
144 let last_idx = *indices.iter().max_by_key(|&&idx| all_tokens[idx].text_range().end())?;
145
146 // compute original mapped token range
147 let expanded = {
148 let first_node = descend_into_macros(db, file_id, all_tokens[first_idx].clone());
149 let first_node = first_node.map(|it| it.text_range());
150
151 let last_node = descend_into_macros(db, file_id, all_tokens[last_idx].clone());
152 if last_node.file_id == file_id.into() || first_node.file_id != last_node.file_id {
153 return None;
154 }
155 first_node.map(|it| union_range(it, last_node.value.text_range()))
156 };
157
158 // Compute parent node range
159 let src = db.parse_or_expand(expanded.file_id)?;
160 let parent = shallowest_node(&find_covering_element(&src, expanded.value))?.parent()?;
161
162 let validate = |&idx: &usize| {
163 let token: &SyntaxToken = &all_tokens[idx];
164 let node = descend_into_macros(db, file_id, token.clone());
165
166 node.file_id == expanded.file_id
167 && node.value.text_range().is_subrange(&parent.text_range())
168 };
169
170 // Find the first and last text range under expanded parent
171 let first = (0..=first_idx).rev().take_while(validate).last()?;
172 let last = (last_idx..all_tokens.len()).take_while(validate).last()?;
173
174 let range = union_range(all_tokens[first].text_range(), all_tokens[last].text_range());
175
155 if original_range.is_subrange(&range) && original_range != range { 176 if original_range.is_subrange(&range) && original_range != range {
156 Some(range) 177 Some(range)
157 } else { 178 } else {