diff options
Diffstat (limited to 'crates/ra_ide/src')
-rw-r--r-- | crates/ra_ide/src/display/navigation_target.rs | 6 | ||||
-rw-r--r-- | crates/ra_ide/src/extend_selection.rs | 168 | ||||
-rw-r--r-- | crates/ra_ide/src/impls.rs | 12 |
3 files changed, 170 insertions, 16 deletions
diff --git a/crates/ra_ide/src/display/navigation_target.rs b/crates/ra_ide/src/display/navigation_target.rs index f2e45fa31..b2af3479c 100644 --- a/crates/ra_ide/src/display/navigation_target.rs +++ b/crates/ra_ide/src/display/navigation_target.rs | |||
@@ -251,7 +251,11 @@ impl ToNav for hir::Module { | |||
251 | impl ToNav for hir::ImplBlock { | 251 | impl ToNav for hir::ImplBlock { |
252 | fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { | 252 | fn to_nav(&self, db: &RootDatabase) -> NavigationTarget { |
253 | let src = self.source(db); | 253 | let src = self.source(db); |
254 | let frange = original_range(db, src.as_ref().map(|it| it.syntax())); | 254 | let frange = if let Some(item) = self.is_builtin_derive(db) { |
255 | original_range(db, item.syntax()) | ||
256 | } else { | ||
257 | original_range(db, src.as_ref().map(|it| it.syntax())) | ||
258 | }; | ||
255 | 259 | ||
256 | NavigationTarget::from_syntax( | 260 | NavigationTarget::from_syntax( |
257 | frange.file_id, | 261 | frange.file_id, |
diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs index 1ec41a117..70b6fde82 100644 --- a/crates/ra_ide/src/extend_selection.rs +++ b/crates/ra_ide/src/extend_selection.rs | |||
@@ -4,20 +4,27 @@ use ra_db::SourceDatabase; | |||
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | algo::find_covering_element, | 5 | algo::find_covering_element, |
6 | ast::{self, AstNode, AstToken}, | 6 | ast::{self, AstNode, AstToken}, |
7 | Direction, NodeOrToken, | 7 | Direction, NodeOrToken, SyntaxElement, |
8 | SyntaxKind::{self, *}, | 8 | SyntaxKind::{self, *}, |
9 | SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, | 9 | SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T, |
10 | }; | 10 | }; |
11 | 11 | ||
12 | use crate::{db::RootDatabase, FileRange}; | 12 | use crate::{db::RootDatabase, expand::descend_into_macros, FileId, FileRange}; |
13 | use hir::db::AstDatabase; | ||
14 | use std::iter::successors; | ||
13 | 15 | ||
14 | // FIXME: restore macro support | ||
15 | pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { | 16 | pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { |
16 | let parse = db.parse(frange.file_id); | 17 | let src = db.parse(frange.file_id).tree(); |
17 | try_extend_selection(parse.tree().syntax(), frange.range).unwrap_or(frange.range) | 18 | try_extend_selection(db, src.syntax(), frange).unwrap_or(frange.range) |
18 | } | 19 | } |
19 | 20 | ||
20 | fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange> { | 21 | fn try_extend_selection( |
22 | db: &RootDatabase, | ||
23 | root: &SyntaxNode, | ||
24 | frange: FileRange, | ||
25 | ) -> Option<TextRange> { | ||
26 | let range = frange.range; | ||
27 | |||
21 | let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; | 28 | let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING]; |
22 | let list_kinds = [ | 29 | let list_kinds = [ |
23 | RECORD_FIELD_PAT_LIST, | 30 | RECORD_FIELD_PAT_LIST, |
@@ -72,12 +79,21 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange | |||
72 | } | 79 | } |
73 | NodeOrToken::Node(node) => node, | 80 | NodeOrToken::Node(node) => node, |
74 | }; | 81 | }; |
82 | |||
83 | // if we are in single token_tree, we maybe live in macro or attr | ||
84 | if node.kind() == TOKEN_TREE { | ||
85 | if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) { | ||
86 | if let Some(range) = extend_tokens_from_range(db, frange.file_id, macro_call, range) { | ||
87 | return Some(range); | ||
88 | } | ||
89 | } | ||
90 | } | ||
91 | |||
75 | if node.text_range() != range { | 92 | if node.text_range() != range { |
76 | return Some(node.text_range()); | 93 | return Some(node.text_range()); |
77 | } | 94 | } |
78 | 95 | ||
79 | // Using shallowest node with same range allows us to traverse siblings. | 96 | let node = shallowest_node(&node.into()).unwrap(); |
80 | let node = node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap(); | ||
81 | 97 | ||
82 | if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { | 98 | if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { |
83 | if let Some(range) = extend_list_item(&node) { | 99 | if let Some(range) = extend_list_item(&node) { |
@@ -88,6 +104,94 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange | |||
88 | node.parent().map(|it| it.text_range()) | 104 | node.parent().map(|it| it.text_range()) |
89 | } | 105 | } |
90 | 106 | ||
107 | fn extend_tokens_from_range( | ||
108 | db: &RootDatabase, | ||
109 | file_id: FileId, | ||
110 | macro_call: ast::MacroCall, | ||
111 | original_range: TextRange, | ||
112 | ) -> Option<TextRange> { | ||
113 | let src = find_covering_element(¯o_call.syntax(), original_range); | ||
114 | let (first_token, last_token) = match src { | ||
115 | NodeOrToken::Node(it) => (it.first_token()?, it.last_token()?), | ||
116 | NodeOrToken::Token(it) => (it.clone(), it), | ||
117 | }; | ||
118 | |||
119 | let mut first_token = skip_whitespace(first_token, Direction::Next)?; | ||
120 | let mut last_token = skip_whitespace(last_token, Direction::Prev)?; | ||
121 | |||
122 | while !first_token.text_range().is_subrange(&original_range) { | ||
123 | first_token = skip_whitespace(first_token.next_token()?, Direction::Next)?; | ||
124 | } | ||
125 | while !last_token.text_range().is_subrange(&original_range) { | ||
126 | last_token = skip_whitespace(last_token.prev_token()?, Direction::Prev)?; | ||
127 | } | ||
128 | |||
129 | // compute original mapped token range | ||
130 | let expanded = { | ||
131 | let first_node = descend_into_macros(db, file_id, first_token.clone()); | ||
132 | let first_node = first_node.map(|it| it.text_range()); | ||
133 | |||
134 | let last_node = descend_into_macros(db, file_id, last_token.clone()); | ||
135 | if last_node.file_id == file_id.into() || first_node.file_id != last_node.file_id { | ||
136 | return None; | ||
137 | } | ||
138 | first_node.map(|it| union_range(it, last_node.value.text_range())) | ||
139 | }; | ||
140 | |||
141 | // Compute parent node range | ||
142 | let src = db.parse_or_expand(expanded.file_id)?; | ||
143 | let parent = shallowest_node(&find_covering_element(&src, expanded.value))?.parent()?; | ||
144 | |||
145 | let validate = |token: SyntaxToken| { | ||
146 | let node = descend_into_macros(db, file_id, token.clone()); | ||
147 | if node.file_id == expanded.file_id | ||
148 | && node.value.text_range().is_subrange(&parent.text_range()) | ||
149 | { | ||
150 | Some(token) | ||
151 | } else { | ||
152 | None | ||
153 | } | ||
154 | }; | ||
155 | |||
156 | // Find the first and last text range under expanded parent | ||
157 | let first = successors(Some(first_token), |token| { | ||
158 | validate(skip_whitespace(token.prev_token()?, Direction::Prev)?) | ||
159 | }) | ||
160 | .last()?; | ||
161 | let last = successors(Some(last_token), |token| { | ||
162 | validate(skip_whitespace(token.next_token()?, Direction::Next)?) | ||
163 | }) | ||
164 | .last()?; | ||
165 | |||
166 | let range = union_range(first.text_range(), last.text_range()); | ||
167 | if original_range.is_subrange(&range) && original_range != range { | ||
168 | Some(range) | ||
169 | } else { | ||
170 | None | ||
171 | } | ||
172 | } | ||
173 | |||
174 | fn skip_whitespace(mut token: SyntaxToken, direction: Direction) -> Option<SyntaxToken> { | ||
175 | while token.kind() == WHITESPACE { | ||
176 | token = match direction { | ||
177 | Direction::Next => token.next_token()?, | ||
178 | Direction::Prev => token.prev_token()?, | ||
179 | } | ||
180 | } | ||
181 | Some(token) | ||
182 | } | ||
183 | |||
184 | fn union_range(range: TextRange, r: TextRange) -> TextRange { | ||
185 | let start = range.start().min(r.start()); | ||
186 | let end = range.end().max(r.end()); | ||
187 | TextRange::from_to(start, end) | ||
188 | } | ||
189 | |||
190 | /// Find the shallowest node with same range, which allows us to traverse siblings. | ||
191 | fn shallowest_node(node: &SyntaxElement) -> Option<SyntaxNode> { | ||
192 | node.ancestors().take_while(|n| n.text_range() == node.text_range()).last() | ||
193 | } | ||
194 | |||
91 | fn extend_single_word_in_comment_or_string( | 195 | fn extend_single_word_in_comment_or_string( |
92 | leaf: &SyntaxToken, | 196 | leaf: &SyntaxToken, |
93 | offset: TextUnit, | 197 | offset: TextUnit, |
@@ -227,18 +331,19 @@ fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment { | |||
227 | 331 | ||
228 | #[cfg(test)] | 332 | #[cfg(test)] |
229 | mod tests { | 333 | mod tests { |
230 | use ra_syntax::{AstNode, SourceFile}; | ||
231 | use test_utils::extract_offset; | ||
232 | |||
233 | use super::*; | 334 | use super::*; |
335 | use crate::mock_analysis::single_file; | ||
336 | use test_utils::extract_offset; | ||
234 | 337 | ||
235 | fn do_check(before: &str, afters: &[&str]) { | 338 | fn do_check(before: &str, afters: &[&str]) { |
236 | let (cursor, before) = extract_offset(before); | 339 | let (cursor, before) = extract_offset(before); |
237 | let parse = SourceFile::parse(&before); | 340 | let (analysis, file_id) = single_file(&before); |
238 | let mut range = TextRange::offset_len(cursor, 0.into()); | 341 | let range = TextRange::offset_len(cursor, 0.into()); |
342 | let mut frange = FileRange { file_id: file_id, range }; | ||
343 | |||
239 | for &after in afters { | 344 | for &after in afters { |
240 | range = try_extend_selection(parse.tree().syntax(), range).unwrap(); | 345 | frange.range = analysis.extend_selection(frange).unwrap(); |
241 | let actual = &before[range]; | 346 | let actual = &before[frange.range]; |
242 | assert_eq!(after, actual); | 347 | assert_eq!(after, actual); |
243 | } | 348 | } |
244 | } | 349 | } |
@@ -503,4 +608,37 @@ fn main() { let var = ( | |||
503 | ], | 608 | ], |
504 | ); | 609 | ); |
505 | } | 610 | } |
611 | |||
612 | #[test] | ||
613 | fn extend_selection_inside_macros() { | ||
614 | do_check( | ||
615 | r#"macro_rules! foo { ($item:item) => {$item} } | ||
616 | foo!{fn hello(na<|>me:usize){}}"#, | ||
617 | &[ | ||
618 | "name", | ||
619 | "name:usize", | ||
620 | "(name:usize)", | ||
621 | "fn hello(name:usize){}", | ||
622 | "{fn hello(name:usize){}}", | ||
623 | "foo!{fn hello(name:usize){}}", | ||
624 | ], | ||
625 | ); | ||
626 | } | ||
627 | |||
628 | #[test] | ||
629 | fn extend_selection_inside_recur_macros() { | ||
630 | do_check( | ||
631 | r#" macro_rules! foo2 { ($item:item) => {$item} } | ||
632 | macro_rules! foo { ($item:item) => {foo2!($item);} } | ||
633 | foo!{fn hello(na<|>me:usize){}}"#, | ||
634 | &[ | ||
635 | "name", | ||
636 | "name:usize", | ||
637 | "(name:usize)", | ||
638 | "fn hello(name:usize){}", | ||
639 | "{fn hello(name:usize){}}", | ||
640 | "foo!{fn hello(name:usize){}}", | ||
641 | ], | ||
642 | ); | ||
643 | } | ||
506 | } | 644 | } |
diff --git a/crates/ra_ide/src/impls.rs b/crates/ra_ide/src/impls.rs index 9b165ee2a..31195036e 100644 --- a/crates/ra_ide/src/impls.rs +++ b/crates/ra_ide/src/impls.rs | |||
@@ -203,4 +203,16 @@ mod tests { | |||
203 | ], | 203 | ], |
204 | ); | 204 | ); |
205 | } | 205 | } |
206 | |||
207 | #[test] | ||
208 | fn goto_implementation_to_builtin_derive() { | ||
209 | check_goto( | ||
210 | " | ||
211 | //- /lib.rs | ||
212 | #[derive(Copy)] | ||
213 | struct Foo<|>; | ||
214 | ", | ||
215 | &["impl IMPL_BLOCK FileId(1) [0; 15)"], | ||
216 | ); | ||
217 | } | ||
206 | } | 218 | } |