diff options
Diffstat (limited to 'crates/ra_editor')
-rw-r--r-- | crates/ra_editor/Cargo.toml | 1 | ||||
-rw-r--r-- | crates/ra_editor/src/completion.rs | 8 | ||||
-rw-r--r-- | crates/ra_editor/src/folding_ranges.rs | 6 | ||||
-rw-r--r-- | crates/ra_editor/src/lib.rs | 19 | ||||
-rw-r--r-- | crates/ra_editor/src/scope/fn_scope.rs | 14 | ||||
-rw-r--r-- | crates/ra_editor/src/typing.rs | 195 |
6 files changed, 155 insertions, 88 deletions
diff --git a/crates/ra_editor/Cargo.toml b/crates/ra_editor/Cargo.toml index 40e3254ff..91cefc8d7 100644 --- a/crates/ra_editor/Cargo.toml +++ b/crates/ra_editor/Cargo.toml | |||
@@ -8,6 +8,7 @@ publish = false | |||
8 | itertools = "0.7.8" | 8 | itertools = "0.7.8" |
9 | superslice = "0.1.0" | 9 | superslice = "0.1.0" |
10 | join_to_string = "0.1.1" | 10 | join_to_string = "0.1.1" |
11 | rustc-hash = "1.0" | ||
11 | 12 | ||
12 | ra_syntax = { path = "../ra_syntax" } | 13 | ra_syntax = { path = "../ra_syntax" } |
13 | 14 | ||
diff --git a/crates/ra_editor/src/completion.rs b/crates/ra_editor/src/completion.rs index 570d72d66..20b8484b3 100644 --- a/crates/ra_editor/src/completion.rs +++ b/crates/ra_editor/src/completion.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use std::collections::{HashSet, HashMap}; | 1 | use rustc_hash::{FxHashMap, FxHashSet}; |
2 | 2 | ||
3 | use ra_syntax::{ | 3 | use ra_syntax::{ |
4 | File, TextUnit, AstNode, SyntaxNodeRef, SyntaxKind::*, | 4 | File, TextUnit, AstNode, SyntaxNodeRef, SyntaxKind::*, |
@@ -96,7 +96,7 @@ fn complete_name_ref(file: &File, name_ref: ast::NameRef, acc: &mut Vec<Completi | |||
96 | } | 96 | } |
97 | 97 | ||
98 | fn param_completions(ctx: SyntaxNodeRef, acc: &mut Vec<CompletionItem>) { | 98 | fn param_completions(ctx: SyntaxNodeRef, acc: &mut Vec<CompletionItem>) { |
99 | let mut params = HashMap::new(); | 99 | let mut params = FxHashMap::default(); |
100 | for node in ctx.ancestors() { | 100 | for node in ctx.ancestors() { |
101 | let _ = visitor_ctx(&mut params) | 101 | let _ = visitor_ctx(&mut params) |
102 | .visit::<ast::Root, _>(process) | 102 | .visit::<ast::Root, _>(process) |
@@ -114,7 +114,7 @@ fn param_completions(ctx: SyntaxNodeRef, acc: &mut Vec<CompletionItem>) { | |||
114 | }) | 114 | }) |
115 | }); | 115 | }); |
116 | 116 | ||
117 | fn process<'a, N: ast::FnDefOwner<'a>>(node: N, params: &mut HashMap<String, (u32, ast::Param<'a>)>) { | 117 | fn process<'a, N: ast::FnDefOwner<'a>>(node: N, params: &mut FxHashMap<String, (u32, ast::Param<'a>)>) { |
118 | node.functions() | 118 | node.functions() |
119 | .filter_map(|it| it.param_list()) | 119 | .filter_map(|it| it.param_list()) |
120 | .flat_map(|it| it.params()) | 120 | .flat_map(|it| it.params()) |
@@ -232,7 +232,7 @@ fn complete_mod_item_snippets(acc: &mut Vec<CompletionItem>) { | |||
232 | } | 232 | } |
233 | 233 | ||
234 | fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec<CompletionItem>) { | 234 | fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec<CompletionItem>) { |
235 | let mut shadowed = HashSet::new(); | 235 | let mut shadowed = FxHashSet::default(); |
236 | acc.extend( | 236 | acc.extend( |
237 | scopes.scope_chain(name_ref.syntax()) | 237 | scopes.scope_chain(name_ref.syntax()) |
238 | .flat_map(|scope| scopes.entries(scope).iter()) | 238 | .flat_map(|scope| scopes.entries(scope).iter()) |
diff --git a/crates/ra_editor/src/folding_ranges.rs b/crates/ra_editor/src/folding_ranges.rs index 733512368..3aabd54ae 100644 --- a/crates/ra_editor/src/folding_ranges.rs +++ b/crates/ra_editor/src/folding_ranges.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use std::collections::HashSet; | 1 | use rustc_hash::FxHashSet; |
2 | 2 | ||
3 | use ra_syntax::{ | 3 | use ra_syntax::{ |
4 | File, TextRange, SyntaxNodeRef, | 4 | File, TextRange, SyntaxNodeRef, |
@@ -20,7 +20,7 @@ pub struct Fold { | |||
20 | 20 | ||
21 | pub fn folding_ranges(file: &File) -> Vec<Fold> { | 21 | pub fn folding_ranges(file: &File) -> Vec<Fold> { |
22 | let mut res = vec![]; | 22 | let mut res = vec![]; |
23 | let mut visited = HashSet::new(); | 23 | let mut visited = FxHashSet::default(); |
24 | 24 | ||
25 | for node in file.syntax().descendants() { | 25 | for node in file.syntax().descendants() { |
26 | if visited.contains(&node) { | 26 | if visited.contains(&node) { |
@@ -56,7 +56,7 @@ pub fn folding_ranges(file: &File) -> Vec<Fold> { | |||
56 | fn contiguous_range_for<'a>( | 56 | fn contiguous_range_for<'a>( |
57 | kind: SyntaxKind, | 57 | kind: SyntaxKind, |
58 | node: SyntaxNodeRef<'a>, | 58 | node: SyntaxNodeRef<'a>, |
59 | visited: &mut HashSet<SyntaxNodeRef<'a>>, | 59 | visited: &mut FxHashSet<SyntaxNodeRef<'a>>, |
60 | ) -> Option<TextRange> { | 60 | ) -> Option<TextRange> { |
61 | visited.insert(node); | 61 | visited.insert(node); |
62 | 62 | ||
diff --git a/crates/ra_editor/src/lib.rs b/crates/ra_editor/src/lib.rs index fe0045378..710afc65d 100644 --- a/crates/ra_editor/src/lib.rs +++ b/crates/ra_editor/src/lib.rs | |||
@@ -2,6 +2,7 @@ extern crate ra_syntax; | |||
2 | extern crate superslice; | 2 | extern crate superslice; |
3 | extern crate itertools; | 3 | extern crate itertools; |
4 | extern crate join_to_string; | 4 | extern crate join_to_string; |
5 | extern crate rustc_hash; | ||
5 | #[cfg(test)] | 6 | #[cfg(test)] |
6 | #[macro_use] | 7 | #[macro_use] |
7 | extern crate test_utils as _test_utils; | 8 | extern crate test_utils as _test_utils; |
@@ -88,7 +89,6 @@ pub fn highlight(file: &File) -> Vec<HighlightedRange> { | |||
88 | let mut res = Vec::new(); | 89 | let mut res = Vec::new(); |
89 | for node in file.syntax().descendants() { | 90 | for node in file.syntax().descendants() { |
90 | let tag = match node.kind() { | 91 | let tag = match node.kind() { |
91 | ERROR => "error", | ||
92 | COMMENT | DOC_COMMENT => "comment", | 92 | COMMENT | DOC_COMMENT => "comment", |
93 | STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => "string", | 93 | STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => "string", |
94 | ATTR => "attribute", | 94 | ATTR => "attribute", |
@@ -108,21 +108,10 @@ pub fn highlight(file: &File) -> Vec<HighlightedRange> { | |||
108 | } | 108 | } |
109 | 109 | ||
110 | pub fn diagnostics(file: &File) -> Vec<Diagnostic> { | 110 | pub fn diagnostics(file: &File) -> Vec<Diagnostic> { |
111 | let mut res = Vec::new(); | 111 | file.errors().into_iter().map(|err| Diagnostic { |
112 | |||
113 | for node in file.syntax().descendants() { | ||
114 | if node.kind() == ERROR { | ||
115 | res.push(Diagnostic { | ||
116 | range: node.range(), | ||
117 | msg: "Syntax Error".to_string(), | ||
118 | }); | ||
119 | } | ||
120 | } | ||
121 | res.extend(file.errors().into_iter().map(|err| Diagnostic { | ||
122 | range: TextRange::offset_len(err.offset, 1.into()), | 112 | range: TextRange::offset_len(err.offset, 1.into()), |
123 | msg: err.msg, | 113 | msg: "Syntax Error: ".to_string() + &err.msg, |
124 | })); | 114 | }).collect() |
125 | res | ||
126 | } | 115 | } |
127 | 116 | ||
128 | pub fn syntax_tree(file: &File) -> String { | 117 | pub fn syntax_tree(file: &File) -> String { |
diff --git a/crates/ra_editor/src/scope/fn_scope.rs b/crates/ra_editor/src/scope/fn_scope.rs index 65d85279f..9a48bda02 100644 --- a/crates/ra_editor/src/scope/fn_scope.rs +++ b/crates/ra_editor/src/scope/fn_scope.rs | |||
@@ -1,7 +1,5 @@ | |||
1 | use std::{ | 1 | use std::fmt; |
2 | fmt, | 2 | use rustc_hash::FxHashMap; |
3 | collections::HashMap, | ||
4 | }; | ||
5 | 3 | ||
6 | use ra_syntax::{ | 4 | use ra_syntax::{ |
7 | SyntaxNodeRef, SyntaxNode, SmolStr, AstNode, | 5 | SyntaxNodeRef, SyntaxNode, SmolStr, AstNode, |
@@ -15,7 +13,7 @@ type ScopeId = usize; | |||
15 | pub struct FnScopes { | 13 | pub struct FnScopes { |
16 | pub self_param: Option<SyntaxNode>, | 14 | pub self_param: Option<SyntaxNode>, |
17 | scopes: Vec<ScopeData>, | 15 | scopes: Vec<ScopeData>, |
18 | scope_for: HashMap<SyntaxNode, ScopeId>, | 16 | scope_for: FxHashMap<SyntaxNode, ScopeId>, |
19 | } | 17 | } |
20 | 18 | ||
21 | impl FnScopes { | 19 | impl FnScopes { |
@@ -25,7 +23,7 @@ impl FnScopes { | |||
25 | .and_then(|it| it.self_param()) | 23 | .and_then(|it| it.self_param()) |
26 | .map(|it| it.syntax().owned()), | 24 | .map(|it| it.syntax().owned()), |
27 | scopes: Vec::new(), | 25 | scopes: Vec::new(), |
28 | scope_for: HashMap::new() | 26 | scope_for: FxHashMap::default() |
29 | }; | 27 | }; |
30 | let root = scopes.root_scope(); | 28 | let root = scopes.root_scope(); |
31 | scopes.add_params_bindings(root, fn_def.param_list()); | 29 | scopes.add_params_bindings(root, fn_def.param_list()); |
@@ -242,9 +240,9 @@ struct ScopeData { | |||
242 | } | 240 | } |
243 | 241 | ||
244 | pub fn resolve_local_name<'a>(name_ref: ast::NameRef, scopes: &'a FnScopes) -> Option<&'a ScopeEntry> { | 242 | pub fn resolve_local_name<'a>(name_ref: ast::NameRef, scopes: &'a FnScopes) -> Option<&'a ScopeEntry> { |
245 | use std::collections::HashSet; | 243 | use rustc_hash::FxHashSet; |
246 | 244 | ||
247 | let mut shadowed = HashSet::new(); | 245 | let mut shadowed = FxHashSet::default(); |
248 | let ret = scopes.scope_chain(name_ref.syntax()) | 246 | let ret = scopes.scope_chain(name_ref.syntax()) |
249 | .flat_map(|scope| scopes.entries(scope).iter()) | 247 | .flat_map(|scope| scopes.entries(scope).iter()) |
250 | .filter(|entry| shadowed.insert(entry.name())) | 248 | .filter(|entry| shadowed.insert(entry.name())) |
diff --git a/crates/ra_editor/src/typing.rs b/crates/ra_editor/src/typing.rs index 3384389d1..1dc658f9b 100644 --- a/crates/ra_editor/src/typing.rs +++ b/crates/ra_editor/src/typing.rs | |||
@@ -30,6 +30,7 @@ pub fn join_lines(file: &File, range: TextRange) -> LocalEdit { | |||
30 | } else { | 30 | } else { |
31 | range | 31 | range |
32 | }; | 32 | }; |
33 | |||
33 | let node = find_covering_node(file.syntax(), range); | 34 | let node = find_covering_node(file.syntax(), range); |
34 | let mut edit = EditBuilder::new(); | 35 | let mut edit = EditBuilder::new(); |
35 | for node in node.descendants() { | 36 | for node in node.descendants() { |
@@ -57,14 +58,19 @@ pub fn join_lines(file: &File, range: TextRange) -> LocalEdit { | |||
57 | } | 58 | } |
58 | 59 | ||
59 | pub fn on_enter(file: &File, offset: TextUnit) -> Option<LocalEdit> { | 60 | pub fn on_enter(file: &File, offset: TextUnit) -> Option<LocalEdit> { |
60 | let comment = find_leaf_at_offset(file.syntax(), offset).left_biased().filter(|it| it.kind() == COMMENT)?; | 61 | let comment = find_leaf_at_offset(file.syntax(), offset).left_biased().and_then(|it| ast::Comment::cast(it))?; |
61 | let prefix = comment_preffix(comment)?; | 62 | |
62 | if offset < comment.range().start() + TextUnit::of_str(prefix) { | 63 | if let ast::CommentFlavor::Multiline = comment.flavor() { |
64 | return None; | ||
65 | } | ||
66 | |||
67 | let prefix = comment.prefix(); | ||
68 | if offset < comment.syntax().range().start() + TextUnit::of_str(prefix) + TextUnit::from(1) { | ||
63 | return None; | 69 | return None; |
64 | } | 70 | } |
65 | 71 | ||
66 | let indent = node_indent(file, comment)?; | 72 | let indent = node_indent(file, comment.syntax())?; |
67 | let inserted = format!("\n{}{}", indent, prefix); | 73 | let inserted = format!("\n{}{} ", indent, prefix); |
68 | let cursor_position = offset + TextUnit::of_str(&inserted); | 74 | let cursor_position = offset + TextUnit::of_str(&inserted); |
69 | let mut edit = EditBuilder::new(); | 75 | let mut edit = EditBuilder::new(); |
70 | edit.insert(offset, inserted); | 76 | edit.insert(offset, inserted); |
@@ -74,20 +80,6 @@ pub fn on_enter(file: &File, offset: TextUnit) -> Option<LocalEdit> { | |||
74 | }) | 80 | }) |
75 | } | 81 | } |
76 | 82 | ||
77 | fn comment_preffix(comment: SyntaxNodeRef) -> Option<&'static str> { | ||
78 | let text = comment.leaf_text().unwrap(); | ||
79 | let res = if text.starts_with("///") { | ||
80 | "/// " | ||
81 | } else if text.starts_with("//!") { | ||
82 | "//! " | ||
83 | } else if text.starts_with("//") { | ||
84 | "// " | ||
85 | } else { | ||
86 | return None; | ||
87 | }; | ||
88 | Some(res) | ||
89 | } | ||
90 | |||
91 | fn node_indent<'a>(file: &'a File, node: SyntaxNodeRef) -> Option<&'a str> { | 83 | fn node_indent<'a>(file: &'a File, node: SyntaxNodeRef) -> Option<&'a str> { |
92 | let ws = match find_leaf_at_offset(file.syntax(), node.range().start()) { | 84 | let ws = match find_leaf_at_offset(file.syntax(), node.range().start()) { |
93 | LeafAtOffset::Between(l, r) => { | 85 | LeafAtOffset::Between(l, r) => { |
@@ -139,41 +131,60 @@ fn remove_newline( | |||
139 | node_text: &str, | 131 | node_text: &str, |
140 | offset: TextUnit, | 132 | offset: TextUnit, |
141 | ) { | 133 | ) { |
142 | if node.kind() == WHITESPACE && node_text.bytes().filter(|&b| b == b'\n').count() == 1 { | 134 | if node.kind() != WHITESPACE || node_text.bytes().filter(|&b| b == b'\n').count() != 1 { |
143 | if join_single_expr_block(edit, node).is_some() { | 135 | // The node is either the first or the last in the file |
144 | return | 136 | let suff = &node_text[TextRange::from_to( |
145 | } | 137 | offset - node.range().start() + TextUnit::of_char('\n'), |
146 | match (node.prev_sibling(), node.next_sibling()) { | 138 | TextUnit::of_str(node_text), |
147 | (Some(prev), Some(next)) => { | 139 | )]; |
148 | let range = TextRange::from_to(prev.range().start(), node.range().end()); | 140 | let spaces = suff.bytes().take_while(|&b| b == b' ').count(); |
149 | if is_trailing_comma(prev.kind(), next.kind()) { | 141 | |
150 | edit.delete(range); | 142 | edit.replace( |
151 | } else if no_space_required(prev.kind(), next.kind()) { | 143 | TextRange::offset_len(offset, ((spaces + 1) as u32).into()), |
152 | edit.delete(node.range()); | 144 | " ".to_string(), |
153 | } else if prev.kind() == COMMA && next.kind() == R_CURLY { | 145 | ); |
154 | edit.replace(range, " ".to_string()); | 146 | return; |
155 | } else { | ||
156 | edit.replace( | ||
157 | node.range(), | ||
158 | compute_ws(prev, next).to_string(), | ||
159 | ); | ||
160 | } | ||
161 | return; | ||
162 | } | ||
163 | _ => (), | ||
164 | } | ||
165 | } | 147 | } |
166 | 148 | ||
167 | let suff = &node_text[TextRange::from_to( | 149 | // Special case that turns something like: |
168 | offset - node.range().start() + TextUnit::of_char('\n'), | 150 | // |
169 | TextUnit::of_str(node_text), | 151 | // ``` |
170 | )]; | 152 | // my_function({<|> |
171 | let spaces = suff.bytes().take_while(|&b| b == b' ').count(); | 153 | // <some-expr> |
154 | // }) | ||
155 | // ``` | ||
156 | // | ||
157 | // into `my_function(<some-expr>)` | ||
158 | if join_single_expr_block(edit, node).is_some() { | ||
159 | return | ||
160 | } | ||
172 | 161 | ||
173 | edit.replace( | 162 | // The node is between two other nodes |
174 | TextRange::offset_len(offset, ((spaces + 1) as u32).into()), | 163 | let prev = node.prev_sibling().unwrap(); |
175 | " ".to_string(), | 164 | let next = node.next_sibling().unwrap(); |
176 | ); | 165 | if is_trailing_comma(prev.kind(), next.kind()) { |
166 | // Removes: trailing comma, newline (incl. surrounding whitespace) | ||
167 | edit.delete(TextRange::from_to(prev.range().start(), node.range().end())); | ||
168 | } else if prev.kind() == COMMA && next.kind() == R_CURLY { | ||
169 | // Removes: comma, newline (incl. surrounding whitespace) | ||
170 | // Adds: a single whitespace | ||
171 | edit.replace( | ||
172 | TextRange::from_to(prev.range().start(), node.range().end()), | ||
173 | " ".to_string() | ||
174 | ); | ||
175 | } else if let (Some(_), Some(next)) = (ast::Comment::cast(prev), ast::Comment::cast(next)) { | ||
176 | // Removes: newline (incl. surrounding whitespace), start of the next comment | ||
177 | edit.delete(TextRange::from_to( | ||
178 | node.range().start(), | ||
179 | next.syntax().range().start() + TextUnit::of_str(next.prefix()) | ||
180 | )); | ||
181 | } else { | ||
182 | // Remove newline but add a computed amount of whitespace characters | ||
183 | edit.replace( | ||
184 | node.range(), | ||
185 | compute_ws(prev, next).to_string(), | ||
186 | ); | ||
187 | } | ||
177 | } | 188 | } |
178 | 189 | ||
179 | fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { | 190 | fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { |
@@ -183,13 +194,6 @@ fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { | |||
183 | } | 194 | } |
184 | } | 195 | } |
185 | 196 | ||
186 | fn no_space_required(left: SyntaxKind, right: SyntaxKind) -> bool { | ||
187 | match (left, right) { | ||
188 | (_, DOT) => true, | ||
189 | _ => false | ||
190 | } | ||
191 | } | ||
192 | |||
193 | fn join_single_expr_block( | 197 | fn join_single_expr_block( |
194 | edit: &mut EditBuilder, | 198 | edit: &mut EditBuilder, |
195 | node: SyntaxNodeRef, | 199 | node: SyntaxNodeRef, |
@@ -231,6 +235,7 @@ fn compute_ws(left: SyntaxNodeRef, right: SyntaxNodeRef) -> &'static str { | |||
231 | } | 235 | } |
232 | match right.kind() { | 236 | match right.kind() { |
233 | R_PAREN | R_BRACK => return "", | 237 | R_PAREN | R_BRACK => return "", |
238 | DOT => return "", | ||
234 | _ => (), | 239 | _ => (), |
235 | } | 240 | } |
236 | " " | 241 | " " |
@@ -291,6 +296,80 @@ fn foo() { | |||
291 | }"); | 296 | }"); |
292 | } | 297 | } |
293 | 298 | ||
299 | #[test] | ||
300 | fn test_join_lines_normal_comments() { | ||
301 | check_join_lines(r" | ||
302 | fn foo() { | ||
303 | // Hello<|> | ||
304 | // world! | ||
305 | } | ||
306 | ", r" | ||
307 | fn foo() { | ||
308 | // Hello<|> world! | ||
309 | } | ||
310 | "); | ||
311 | } | ||
312 | |||
313 | #[test] | ||
314 | fn test_join_lines_doc_comments() { | ||
315 | check_join_lines(r" | ||
316 | fn foo() { | ||
317 | /// Hello<|> | ||
318 | /// world! | ||
319 | } | ||
320 | ", r" | ||
321 | fn foo() { | ||
322 | /// Hello<|> world! | ||
323 | } | ||
324 | "); | ||
325 | } | ||
326 | |||
327 | #[test] | ||
328 | fn test_join_lines_mod_comments() { | ||
329 | check_join_lines(r" | ||
330 | fn foo() { | ||
331 | //! Hello<|> | ||
332 | //! world! | ||
333 | } | ||
334 | ", r" | ||
335 | fn foo() { | ||
336 | //! Hello<|> world! | ||
337 | } | ||
338 | "); | ||
339 | } | ||
340 | |||
341 | #[test] | ||
342 | fn test_join_lines_multiline_comments_1() { | ||
343 | check_join_lines(r" | ||
344 | fn foo() { | ||
345 | // Hello<|> | ||
346 | /* world! */ | ||
347 | } | ||
348 | ", r" | ||
349 | fn foo() { | ||
350 | // Hello<|> world! */ | ||
351 | } | ||
352 | "); | ||
353 | } | ||
354 | |||
355 | #[test] | ||
356 | fn test_join_lines_multiline_comments_2() { | ||
357 | check_join_lines(r" | ||
358 | fn foo() { | ||
359 | // The<|> | ||
360 | /* quick | ||
361 | brown | ||
362 | fox! */ | ||
363 | } | ||
364 | ", r" | ||
365 | fn foo() { | ||
366 | // The<|> quick | ||
367 | brown | ||
368 | fox! */ | ||
369 | } | ||
370 | "); | ||
371 | } | ||
372 | |||
294 | fn check_join_lines_sel(before: &str, after: &str) { | 373 | fn check_join_lines_sel(before: &str, after: &str) { |
295 | let (sel, before) = extract_range(before); | 374 | let (sel, before) = extract_range(before); |
296 | let file = File::parse(&before); | 375 | let file = File::parse(&before); |