diff options
author | Aleksey Kladov <[email protected]> | 2020-08-13 16:42:52 +0100 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2020-08-13 16:58:27 +0100 |
commit | 1b0c7701cc97cd7bef8bb9729011d4cf291a60c5 (patch) | |
tree | b69f0c9947d9cec522ce835d7213b21075fe6dcf /crates/ide/src/typing | |
parent | fc34403018079ea053f26d0a31b7517053c7dd8c (diff) |
Rename ra_ide -> ide
Diffstat (limited to 'crates/ide/src/typing')
-rw-r--r-- | crates/ide/src/typing/on_enter.rs | 256 |
1 files changed, 256 insertions, 0 deletions
diff --git a/crates/ide/src/typing/on_enter.rs b/crates/ide/src/typing/on_enter.rs new file mode 100644 index 000000000..f7d46146c --- /dev/null +++ b/crates/ide/src/typing/on_enter.rs | |||
@@ -0,0 +1,256 @@ | |||
1 | //! Handles the `Enter` key press. At the momently, this only continues | ||
2 | //! comments, but should handle indent some time in the future as well. | ||
3 | |||
4 | use base_db::{FilePosition, SourceDatabase}; | ||
5 | use ide_db::RootDatabase; | ||
6 | use syntax::{ | ||
7 | ast::{self, AstToken}, | ||
8 | AstNode, SmolStr, SourceFile, | ||
9 | SyntaxKind::*, | ||
10 | SyntaxToken, TextRange, TextSize, TokenAtOffset, | ||
11 | }; | ||
12 | use test_utils::mark; | ||
13 | use text_edit::TextEdit; | ||
14 | |||
15 | // Feature: On Enter | ||
16 | // | ||
17 | // rust-analyzer can override kbd:[Enter] key to make it smarter: | ||
18 | // | ||
19 | // - kbd:[Enter] inside triple-slash comments automatically inserts `///` | ||
20 | // - kbd:[Enter] in the middle or after a trailing space in `//` inserts `//` | ||
21 | // | ||
22 | // This action needs to be assigned to shortcut explicitly. | ||
23 | // | ||
24 | // VS Code:: | ||
25 | // | ||
26 | // Add the following to `keybindings.json`: | ||
27 | // [source,json] | ||
28 | // ---- | ||
29 | // { | ||
30 | // "key": "Enter", | ||
31 | // "command": "rust-analyzer.onEnter", | ||
32 | // "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust" | ||
33 | // } | ||
34 | // ---- | ||
35 | pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> { | ||
36 | let parse = db.parse(position.file_id); | ||
37 | let file = parse.tree(); | ||
38 | let comment = file | ||
39 | .syntax() | ||
40 | .token_at_offset(position.offset) | ||
41 | .left_biased() | ||
42 | .and_then(ast::Comment::cast)?; | ||
43 | |||
44 | if comment.kind().shape.is_block() { | ||
45 | return None; | ||
46 | } | ||
47 | |||
48 | let prefix = comment.prefix(); | ||
49 | let comment_range = comment.syntax().text_range(); | ||
50 | if position.offset < comment_range.start() + TextSize::of(prefix) { | ||
51 | return None; | ||
52 | } | ||
53 | |||
54 | let mut remove_last_space = false; | ||
55 | // Continuing single-line non-doc comments (like this one :) ) is annoying | ||
56 | if prefix == "//" && comment_range.end() == position.offset { | ||
57 | if comment.text().ends_with(' ') { | ||
58 | mark::hit!(continues_end_of_line_comment_with_space); | ||
59 | remove_last_space = true; | ||
60 | } else if !followed_by_comment(&comment) { | ||
61 | return None; | ||
62 | } | ||
63 | } | ||
64 | |||
65 | let indent = node_indent(&file, comment.syntax())?; | ||
66 | let inserted = format!("\n{}{} $0", indent, prefix); | ||
67 | let delete = if remove_last_space { | ||
68 | TextRange::new(position.offset - TextSize::of(' '), position.offset) | ||
69 | } else { | ||
70 | TextRange::empty(position.offset) | ||
71 | }; | ||
72 | let edit = TextEdit::replace(delete, inserted); | ||
73 | Some(edit) | ||
74 | } | ||
75 | |||
76 | fn followed_by_comment(comment: &ast::Comment) -> bool { | ||
77 | let ws = match comment.syntax().next_token().and_then(ast::Whitespace::cast) { | ||
78 | Some(it) => it, | ||
79 | None => return false, | ||
80 | }; | ||
81 | if ws.spans_multiple_lines() { | ||
82 | return false; | ||
83 | } | ||
84 | ws.syntax().next_token().and_then(ast::Comment::cast).is_some() | ||
85 | } | ||
86 | |||
87 | fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> { | ||
88 | let ws = match file.syntax().token_at_offset(token.text_range().start()) { | ||
89 | TokenAtOffset::Between(l, r) => { | ||
90 | assert!(r == *token); | ||
91 | l | ||
92 | } | ||
93 | TokenAtOffset::Single(n) => { | ||
94 | assert!(n == *token); | ||
95 | return Some("".into()); | ||
96 | } | ||
97 | TokenAtOffset::None => unreachable!(), | ||
98 | }; | ||
99 | if ws.kind() != WHITESPACE { | ||
100 | return None; | ||
101 | } | ||
102 | let text = ws.text(); | ||
103 | let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0); | ||
104 | Some(text[pos..].into()) | ||
105 | } | ||
106 | |||
107 | #[cfg(test)] | ||
108 | mod tests { | ||
109 | use stdx::trim_indent; | ||
110 | use test_utils::{assert_eq_text, mark}; | ||
111 | |||
112 | use crate::mock_analysis::analysis_and_position; | ||
113 | |||
114 | fn apply_on_enter(before: &str) -> Option<String> { | ||
115 | let (analysis, position) = analysis_and_position(&before); | ||
116 | let result = analysis.on_enter(position).unwrap()?; | ||
117 | |||
118 | let mut actual = analysis.file_text(position.file_id).unwrap().to_string(); | ||
119 | result.apply(&mut actual); | ||
120 | Some(actual) | ||
121 | } | ||
122 | |||
123 | fn do_check(ra_fixture_before: &str, ra_fixture_after: &str) { | ||
124 | let ra_fixture_after = &trim_indent(ra_fixture_after); | ||
125 | let actual = apply_on_enter(ra_fixture_before).unwrap(); | ||
126 | assert_eq_text!(ra_fixture_after, &actual); | ||
127 | } | ||
128 | |||
129 | fn do_check_noop(ra_fixture_text: &str) { | ||
130 | assert!(apply_on_enter(ra_fixture_text).is_none()) | ||
131 | } | ||
132 | |||
133 | #[test] | ||
134 | fn continues_doc_comment() { | ||
135 | do_check( | ||
136 | r" | ||
137 | /// Some docs<|> | ||
138 | fn foo() { | ||
139 | } | ||
140 | ", | ||
141 | r" | ||
142 | /// Some docs | ||
143 | /// $0 | ||
144 | fn foo() { | ||
145 | } | ||
146 | ", | ||
147 | ); | ||
148 | |||
149 | do_check( | ||
150 | r" | ||
151 | impl S { | ||
152 | /// Some<|> docs. | ||
153 | fn foo() {} | ||
154 | } | ||
155 | ", | ||
156 | r" | ||
157 | impl S { | ||
158 | /// Some | ||
159 | /// $0 docs. | ||
160 | fn foo() {} | ||
161 | } | ||
162 | ", | ||
163 | ); | ||
164 | |||
165 | do_check( | ||
166 | r" | ||
167 | ///<|> Some docs | ||
168 | fn foo() { | ||
169 | } | ||
170 | ", | ||
171 | r" | ||
172 | /// | ||
173 | /// $0 Some docs | ||
174 | fn foo() { | ||
175 | } | ||
176 | ", | ||
177 | ); | ||
178 | } | ||
179 | |||
180 | #[test] | ||
181 | fn does_not_continue_before_doc_comment() { | ||
182 | do_check_noop(r"<|>//! docz"); | ||
183 | } | ||
184 | |||
185 | #[test] | ||
186 | fn continues_code_comment_in_the_middle_of_line() { | ||
187 | do_check( | ||
188 | r" | ||
189 | fn main() { | ||
190 | // Fix<|> me | ||
191 | let x = 1 + 1; | ||
192 | } | ||
193 | ", | ||
194 | r" | ||
195 | fn main() { | ||
196 | // Fix | ||
197 | // $0 me | ||
198 | let x = 1 + 1; | ||
199 | } | ||
200 | ", | ||
201 | ); | ||
202 | } | ||
203 | |||
204 | #[test] | ||
205 | fn continues_code_comment_in_the_middle_several_lines() { | ||
206 | do_check( | ||
207 | r" | ||
208 | fn main() { | ||
209 | // Fix<|> | ||
210 | // me | ||
211 | let x = 1 + 1; | ||
212 | } | ||
213 | ", | ||
214 | r" | ||
215 | fn main() { | ||
216 | // Fix | ||
217 | // $0 | ||
218 | // me | ||
219 | let x = 1 + 1; | ||
220 | } | ||
221 | ", | ||
222 | ); | ||
223 | } | ||
224 | |||
225 | #[test] | ||
226 | fn does_not_continue_end_of_line_comment() { | ||
227 | do_check_noop( | ||
228 | r" | ||
229 | fn main() { | ||
230 | // Fix me<|> | ||
231 | let x = 1 + 1; | ||
232 | } | ||
233 | ", | ||
234 | ); | ||
235 | } | ||
236 | |||
237 | #[test] | ||
238 | fn continues_end_of_line_comment_with_space() { | ||
239 | mark::check!(continues_end_of_line_comment_with_space); | ||
240 | do_check( | ||
241 | r#" | ||
242 | fn main() { | ||
243 | // Fix me <|> | ||
244 | let x = 1 + 1; | ||
245 | } | ||
246 | "#, | ||
247 | r#" | ||
248 | fn main() { | ||
249 | // Fix me | ||
250 | // $0 | ||
251 | let x = 1 + 1; | ||
252 | } | ||
253 | "#, | ||
254 | ); | ||
255 | } | ||
256 | } | ||