diff options
author | bors[bot] <bors[bot]@users.noreply.github.com> | 2018-10-23 14:28:05 +0100 |
---|---|---|
committer | bors[bot] <bors[bot]@users.noreply.github.com> | 2018-10-23 14:28:05 +0100 |
commit | ed736b1ac1d75eaea650d5d953950f0aed947865 (patch) | |
tree | 0368291df52fd3ca9ebc13ddc33154813e6472c6 | |
parent | 0043d7e9c7f27af95cbf299d5b92ce28ba9945ab (diff) | |
parent | 7ee10b073e4a17192bf1d12f6ea06efb293d6dfe (diff) |
Merge #153
153: Restore folding import groups r=matklad a=aochagavia
Closes #133
Co-authored-by: Adolfo OchagavĂa <[email protected]>
-rw-r--r-- | crates/ra_editor/src/folding_ranges.rs | 103 |
1 files changed, 97 insertions, 6 deletions
diff --git a/crates/ra_editor/src/folding_ranges.rs b/crates/ra_editor/src/folding_ranges.rs index d0d4ed3d3..8b79ea874 100644 --- a/crates/ra_editor/src/folding_ranges.rs +++ b/crates/ra_editor/src/folding_ranges.rs | |||
@@ -21,6 +21,7 @@ pub struct Fold { | |||
21 | pub fn folding_ranges(file: &File) -> Vec<Fold> { | 21 | pub fn folding_ranges(file: &File) -> Vec<Fold> { |
22 | let mut res = vec![]; | 22 | let mut res = vec![]; |
23 | let mut visited_comments = FxHashSet::default(); | 23 | let mut visited_comments = FxHashSet::default(); |
24 | let mut visited_imports = FxHashSet::default(); | ||
24 | 25 | ||
25 | for node in file.syntax().descendants() { | 26 | for node in file.syntax().descendants() { |
26 | // Fold items that span multiple lines | 27 | // Fold items that span multiple lines |
@@ -33,11 +34,8 @@ pub fn folding_ranges(file: &File) -> Vec<Fold> { | |||
33 | } | 34 | } |
34 | } | 35 | } |
35 | 36 | ||
36 | // Also fold groups of comments | 37 | // Fold groups of comments |
37 | if visited_comments.contains(&node) { | 38 | if node.kind() == COMMENT && !visited_comments.contains(&node) { |
38 | continue; | ||
39 | } | ||
40 | if node.kind() == COMMENT { | ||
41 | if let Some(range) = contiguous_range_for_comment(node, &mut visited_comments) { | 39 | if let Some(range) = contiguous_range_for_comment(node, &mut visited_comments) { |
42 | res.push(Fold { | 40 | res.push(Fold { |
43 | range, | 41 | range, |
@@ -45,6 +43,16 @@ pub fn folding_ranges(file: &File) -> Vec<Fold> { | |||
45 | }) | 43 | }) |
46 | } | 44 | } |
47 | } | 45 | } |
46 | |||
47 | // Fold groups of imports | ||
48 | if node.kind() == USE_ITEM && !visited_imports.contains(&node) { | ||
49 | if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) { | ||
50 | res.push(Fold { | ||
51 | range, | ||
52 | kind: FoldKind::Imports, | ||
53 | }) | ||
54 | } | ||
55 | } | ||
48 | } | 56 | } |
49 | 57 | ||
50 | res | 58 | res |
@@ -74,6 +82,44 @@ fn has_newline(node: SyntaxNodeRef) -> bool { | |||
74 | false | 82 | false |
75 | } | 83 | } |
76 | 84 | ||
85 | fn contiguous_range_for_group<'a>( | ||
86 | first: SyntaxNodeRef<'a>, | ||
87 | visited: &mut FxHashSet<SyntaxNodeRef<'a>>, | ||
88 | ) -> Option<TextRange> { | ||
89 | visited.insert(first); | ||
90 | |||
91 | let mut last = first; | ||
92 | for node in first.siblings(Direction::Next) { | ||
93 | if let Some(ws) = ast::Whitespace::cast(node) { | ||
94 | // There is a blank line, which means that the group ends here | ||
95 | if ws.count_newlines_lazy().take(2).count() == 2 { | ||
96 | break; | ||
97 | } | ||
98 | |||
99 | // Ignore whitespace without blank lines | ||
100 | continue; | ||
101 | } | ||
102 | |||
103 | // Stop if we find a node that doesn't belong to the group | ||
104 | if node.kind() != first.kind() { | ||
105 | break; | ||
106 | } | ||
107 | |||
108 | visited.insert(node); | ||
109 | last = node; | ||
110 | } | ||
111 | |||
112 | if first != last { | ||
113 | Some(TextRange::from_to( | ||
114 | first.range().start(), | ||
115 | last.range().end(), | ||
116 | )) | ||
117 | } else { | ||
118 | // The group consists of only one element, therefore it cannot be folded | ||
119 | None | ||
120 | } | ||
121 | } | ||
122 | |||
77 | fn contiguous_range_for_comment<'a>( | 123 | fn contiguous_range_for_comment<'a>( |
78 | first: SyntaxNodeRef<'a>, | 124 | first: SyntaxNodeRef<'a>, |
79 | visited: &mut FxHashSet<SyntaxNodeRef<'a>>, | 125 | visited: &mut FxHashSet<SyntaxNodeRef<'a>>, |
@@ -128,7 +174,8 @@ mod tests { | |||
128 | let file = File::parse(&text); | 174 | let file = File::parse(&text); |
129 | let folds = folding_ranges(&file); | 175 | let folds = folding_ranges(&file); |
130 | 176 | ||
131 | assert_eq!(folds.len(), ranges.len()); | 177 | assert_eq!(folds.len(), ranges.len(), "The amount of folds is different than the expected amount"); |
178 | assert_eq!(folds.len(), fold_kinds.len(), "The amount of fold kinds is different than the expected amount"); | ||
132 | for ((fold, range), fold_kind) in folds | 179 | for ((fold, range), fold_kind) in folds |
133 | .into_iter() | 180 | .into_iter() |
134 | .zip(ranges.into_iter()) | 181 | .zip(ranges.into_iter()) |
@@ -186,4 +233,48 @@ fn main() { | |||
186 | do_check(text, folds); | 233 | do_check(text, folds); |
187 | } | 234 | } |
188 | 235 | ||
236 | #[test] | ||
237 | fn test_fold_import_groups() { | ||
238 | let text = r#" | ||
239 | <|>use std::str; | ||
240 | use std::vec; | ||
241 | use std::io as iop;<|> | ||
242 | |||
243 | <|>use std::mem; | ||
244 | use std::f64;<|> | ||
245 | |||
246 | use std::collections::HashMap; | ||
247 | // Some random comment | ||
248 | use std::collections::VecDeque; | ||
249 | |||
250 | fn main() { | ||
251 | }"#; | ||
252 | |||
253 | let folds = &[FoldKind::Imports, FoldKind::Imports]; | ||
254 | do_check(text, folds); | ||
255 | } | ||
256 | |||
257 | #[test] | ||
258 | fn test_fold_import_and_groups() { | ||
259 | let text = r#" | ||
260 | <|>use std::str; | ||
261 | use std::vec; | ||
262 | use std::io as iop;<|> | ||
263 | |||
264 | <|>use std::mem; | ||
265 | use std::f64;<|> | ||
266 | |||
267 | <|>use std::collections::{ | ||
268 | HashMap, | ||
269 | VecDeque, | ||
270 | };<|> | ||
271 | // Some random comment | ||
272 | |||
273 | fn main() { | ||
274 | }"#; | ||
275 | |||
276 | let folds = &[FoldKind::Imports, FoldKind::Imports, FoldKind::Imports]; | ||
277 | do_check(text, folds); | ||
278 | } | ||
279 | |||
189 | } | 280 | } |