diff options
author | Aleksey Kladov <[email protected]> | 2019-02-20 12:47:32 +0000 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2019-02-20 12:47:32 +0000 |
commit | 5222b8aba3b1c2c68706aacf6869423a8e4fe6d5 (patch) | |
tree | c8a6e999b8ac5f1f29bde86a2e0b3a53466bb369 /crates/ra_syntax/src/reparsing.rs | |
parent | 9d0cda4bc84350961f3884e75a1c20e62c449ede (diff) |
move all parsing related bits to a separate module
Diffstat (limited to 'crates/ra_syntax/src/reparsing.rs')
-rw-r--r-- | crates/ra_syntax/src/reparsing.rs | 364 |
1 files changed, 0 insertions, 364 deletions
diff --git a/crates/ra_syntax/src/reparsing.rs b/crates/ra_syntax/src/reparsing.rs deleted file mode 100644 index dd751465c..000000000 --- a/crates/ra_syntax/src/reparsing.rs +++ /dev/null | |||
@@ -1,364 +0,0 @@ | |||
1 | use crate::algo; | ||
2 | use crate::grammar; | ||
3 | use crate::lexer::{tokenize, Token}; | ||
4 | use crate::parser_api::Parser; | ||
5 | use crate::parser_impl; | ||
6 | use crate::syntax_node::{self, GreenNode, SyntaxError, SyntaxNode}; | ||
7 | use crate::{SyntaxKind::*, TextRange, TextUnit}; | ||
8 | use ra_text_edit::AtomTextEdit; | ||
9 | |||
10 | pub(crate) fn incremental_reparse( | ||
11 | node: &SyntaxNode, | ||
12 | edit: &AtomTextEdit, | ||
13 | errors: Vec<SyntaxError>, | ||
14 | ) -> Option<(GreenNode, Vec<SyntaxError>)> { | ||
15 | let (node, green, new_errors) = | ||
16 | reparse_leaf(node, &edit).or_else(|| reparse_block(node, &edit))?; | ||
17 | let green_root = node.replace_with(green); | ||
18 | let errors = merge_errors(errors, new_errors, node, edit); | ||
19 | Some((green_root, errors)) | ||
20 | } | ||
21 | |||
22 | fn reparse_leaf<'node>( | ||
23 | node: &'node SyntaxNode, | ||
24 | edit: &AtomTextEdit, | ||
25 | ) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> { | ||
26 | let node = algo::find_covering_node(node, edit.delete); | ||
27 | match node.kind() { | ||
28 | WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { | ||
29 | let text = get_text_after_edit(node, &edit); | ||
30 | let tokens = tokenize(&text); | ||
31 | let token = match tokens[..] { | ||
32 | [token] if token.kind == node.kind() => token, | ||
33 | _ => return None, | ||
34 | }; | ||
35 | |||
36 | if token.kind == IDENT && is_contextual_kw(&text) { | ||
37 | return None; | ||
38 | } | ||
39 | |||
40 | let green = GreenNode::new_leaf(node.kind(), text.into()); | ||
41 | let new_errors = vec![]; | ||
42 | Some((node, green, new_errors)) | ||
43 | } | ||
44 | _ => None, | ||
45 | } | ||
46 | } | ||
47 | |||
48 | fn reparse_block<'node>( | ||
49 | node: &'node SyntaxNode, | ||
50 | edit: &AtomTextEdit, | ||
51 | ) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> { | ||
52 | let (node, reparser) = find_reparsable_node(node, edit.delete)?; | ||
53 | let text = get_text_after_edit(node, &edit); | ||
54 | let tokens = tokenize(&text); | ||
55 | if !is_balanced(&tokens) { | ||
56 | return None; | ||
57 | } | ||
58 | let (green, new_errors) = | ||
59 | parser_impl::parse_with(syntax_node::GreenBuilder::new(), &text, &tokens, reparser); | ||
60 | Some((node, green, new_errors)) | ||
61 | } | ||
62 | |||
63 | fn get_text_after_edit(node: &SyntaxNode, edit: &AtomTextEdit) -> String { | ||
64 | let edit = AtomTextEdit::replace(edit.delete - node.range().start(), edit.insert.clone()); | ||
65 | edit.apply(node.text().to_string()) | ||
66 | } | ||
67 | |||
68 | fn is_contextual_kw(text: &str) -> bool { | ||
69 | match text { | ||
70 | "auto" | "default" | "union" => true, | ||
71 | _ => false, | ||
72 | } | ||
73 | } | ||
74 | |||
75 | type ParseFn = fn(&mut Parser); | ||
76 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, ParseFn)> { | ||
77 | let node = algo::find_covering_node(node, range); | ||
78 | return node.ancestors().filter_map(|node| reparser(node).map(|r| (node, r))).next(); | ||
79 | |||
80 | fn reparser(node: &SyntaxNode) -> Option<ParseFn> { | ||
81 | let res = match node.kind() { | ||
82 | BLOCK => grammar::block, | ||
83 | NAMED_FIELD_DEF_LIST => grammar::named_field_def_list, | ||
84 | NAMED_FIELD_LIST => grammar::named_field_list, | ||
85 | ENUM_VARIANT_LIST => grammar::enum_variant_list, | ||
86 | MATCH_ARM_LIST => grammar::match_arm_list, | ||
87 | USE_TREE_LIST => grammar::use_tree_list, | ||
88 | EXTERN_ITEM_LIST => grammar::extern_item_list, | ||
89 | TOKEN_TREE if node.first_child().unwrap().kind() == L_CURLY => grammar::token_tree, | ||
90 | ITEM_LIST => { | ||
91 | let parent = node.parent().unwrap(); | ||
92 | match parent.kind() { | ||
93 | IMPL_BLOCK => grammar::impl_item_list, | ||
94 | TRAIT_DEF => grammar::trait_item_list, | ||
95 | MODULE => grammar::mod_item_list, | ||
96 | _ => return None, | ||
97 | } | ||
98 | } | ||
99 | _ => return None, | ||
100 | }; | ||
101 | Some(res) | ||
102 | } | ||
103 | } | ||
104 | |||
105 | fn is_balanced(tokens: &[Token]) -> bool { | ||
106 | if tokens.is_empty() | ||
107 | || tokens.first().unwrap().kind != L_CURLY | ||
108 | || tokens.last().unwrap().kind != R_CURLY | ||
109 | { | ||
110 | return false; | ||
111 | } | ||
112 | let mut balance = 0usize; | ||
113 | for t in tokens.iter() { | ||
114 | match t.kind { | ||
115 | L_CURLY => balance += 1, | ||
116 | R_CURLY => { | ||
117 | balance = match balance.checked_sub(1) { | ||
118 | Some(b) => b, | ||
119 | None => return false, | ||
120 | } | ||
121 | } | ||
122 | _ => (), | ||
123 | } | ||
124 | } | ||
125 | balance == 0 | ||
126 | } | ||
127 | |||
128 | fn merge_errors( | ||
129 | old_errors: Vec<SyntaxError>, | ||
130 | new_errors: Vec<SyntaxError>, | ||
131 | old_node: &SyntaxNode, | ||
132 | edit: &AtomTextEdit, | ||
133 | ) -> Vec<SyntaxError> { | ||
134 | let mut res = Vec::new(); | ||
135 | for e in old_errors { | ||
136 | if e.offset() <= old_node.range().start() { | ||
137 | res.push(e) | ||
138 | } else if e.offset() >= old_node.range().end() { | ||
139 | res.push(e.add_offset(TextUnit::of_str(&edit.insert) - edit.delete.len())); | ||
140 | } | ||
141 | } | ||
142 | for e in new_errors { | ||
143 | res.push(e.add_offset(old_node.range().start())); | ||
144 | } | ||
145 | res | ||
146 | } | ||
147 | |||
148 | #[cfg(test)] | ||
149 | mod tests { | ||
150 | use test_utils::{extract_range, assert_eq_text}; | ||
151 | |||
152 | use crate::{SourceFile, AstNode, utils::dump_tree}; | ||
153 | use super::*; | ||
154 | |||
155 | fn do_check<F>(before: &str, replace_with: &str, reparser: F) | ||
156 | where | ||
157 | for<'a> F: Fn( | ||
158 | &'a SyntaxNode, | ||
159 | &AtomTextEdit, | ||
160 | ) -> Option<(&'a SyntaxNode, GreenNode, Vec<SyntaxError>)>, | ||
161 | { | ||
162 | let (range, before) = extract_range(before); | ||
163 | let edit = AtomTextEdit::replace(range, replace_with.to_owned()); | ||
164 | let after = edit.apply(before.clone()); | ||
165 | |||
166 | let fully_reparsed = SourceFile::parse(&after); | ||
167 | let incrementally_reparsed = { | ||
168 | let f = SourceFile::parse(&before); | ||
169 | let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() }; | ||
170 | let (node, green, new_errors) = | ||
171 | reparser(f.syntax(), &edit).expect("cannot incrementally reparse"); | ||
172 | let green_root = node.replace_with(green); | ||
173 | let errors = super::merge_errors(f.errors(), new_errors, node, &edit); | ||
174 | SourceFile::new(green_root, errors) | ||
175 | }; | ||
176 | |||
177 | assert_eq_text!( | ||
178 | &dump_tree(fully_reparsed.syntax()), | ||
179 | &dump_tree(incrementally_reparsed.syntax()), | ||
180 | ) | ||
181 | } | ||
182 | |||
183 | #[test] | ||
184 | fn reparse_block_tests() { | ||
185 | let do_check = |before, replace_to| do_check(before, replace_to, reparse_block); | ||
186 | |||
187 | do_check( | ||
188 | r" | ||
189 | fn foo() { | ||
190 | let x = foo + <|>bar<|> | ||
191 | } | ||
192 | ", | ||
193 | "baz", | ||
194 | ); | ||
195 | do_check( | ||
196 | r" | ||
197 | fn foo() { | ||
198 | let x = foo<|> + bar<|> | ||
199 | } | ||
200 | ", | ||
201 | "baz", | ||
202 | ); | ||
203 | do_check( | ||
204 | r" | ||
205 | struct Foo { | ||
206 | f: foo<|><|> | ||
207 | } | ||
208 | ", | ||
209 | ",\n g: (),", | ||
210 | ); | ||
211 | do_check( | ||
212 | r" | ||
213 | fn foo { | ||
214 | let; | ||
215 | 1 + 1; | ||
216 | <|>92<|>; | ||
217 | } | ||
218 | ", | ||
219 | "62", | ||
220 | ); | ||
221 | do_check( | ||
222 | r" | ||
223 | mod foo { | ||
224 | fn <|><|> | ||
225 | } | ||
226 | ", | ||
227 | "bar", | ||
228 | ); | ||
229 | do_check( | ||
230 | r" | ||
231 | trait Foo { | ||
232 | type <|>Foo<|>; | ||
233 | } | ||
234 | ", | ||
235 | "Output", | ||
236 | ); | ||
237 | do_check( | ||
238 | r" | ||
239 | impl IntoIterator<Item=i32> for Foo { | ||
240 | f<|><|> | ||
241 | } | ||
242 | ", | ||
243 | "n next(", | ||
244 | ); | ||
245 | do_check( | ||
246 | r" | ||
247 | use a::b::{foo,<|>,bar<|>}; | ||
248 | ", | ||
249 | "baz", | ||
250 | ); | ||
251 | do_check( | ||
252 | r" | ||
253 | pub enum A { | ||
254 | Foo<|><|> | ||
255 | } | ||
256 | ", | ||
257 | "\nBar;\n", | ||
258 | ); | ||
259 | do_check( | ||
260 | r" | ||
261 | foo!{a, b<|><|> d} | ||
262 | ", | ||
263 | ", c[3]", | ||
264 | ); | ||
265 | do_check( | ||
266 | r" | ||
267 | fn foo() { | ||
268 | vec![<|><|>] | ||
269 | } | ||
270 | ", | ||
271 | "123", | ||
272 | ); | ||
273 | do_check( | ||
274 | r" | ||
275 | extern { | ||
276 | fn<|>;<|> | ||
277 | } | ||
278 | ", | ||
279 | " exit(code: c_int)", | ||
280 | ); | ||
281 | } | ||
282 | |||
283 | #[test] | ||
284 | fn reparse_leaf_tests() { | ||
285 | let do_check = |before, replace_to| do_check(before, replace_to, reparse_leaf); | ||
286 | |||
287 | do_check( | ||
288 | r"<|><|> | ||
289 | fn foo() -> i32 { 1 } | ||
290 | ", | ||
291 | "\n\n\n \n", | ||
292 | ); | ||
293 | do_check( | ||
294 | r" | ||
295 | fn foo() -> <|><|> {} | ||
296 | ", | ||
297 | " \n", | ||
298 | ); | ||
299 | do_check( | ||
300 | r" | ||
301 | fn <|>foo<|>() -> i32 { 1 } | ||
302 | ", | ||
303 | "bar", | ||
304 | ); | ||
305 | do_check( | ||
306 | r" | ||
307 | fn foo<|><|>foo() { } | ||
308 | ", | ||
309 | "bar", | ||
310 | ); | ||
311 | do_check( | ||
312 | r" | ||
313 | fn foo /* <|><|> */ () {} | ||
314 | ", | ||
315 | "some comment", | ||
316 | ); | ||
317 | do_check( | ||
318 | r" | ||
319 | fn baz <|><|> () {} | ||
320 | ", | ||
321 | " \t\t\n\n", | ||
322 | ); | ||
323 | do_check( | ||
324 | r" | ||
325 | fn baz <|><|> () {} | ||
326 | ", | ||
327 | " \t\t\n\n", | ||
328 | ); | ||
329 | do_check( | ||
330 | r" | ||
331 | /// foo <|><|>omment | ||
332 | mod { } | ||
333 | ", | ||
334 | "c", | ||
335 | ); | ||
336 | do_check( | ||
337 | r#" | ||
338 | fn -> &str { "Hello<|><|>" } | ||
339 | "#, | ||
340 | ", world", | ||
341 | ); | ||
342 | do_check( | ||
343 | r#" | ||
344 | fn -> &str { // "Hello<|><|>" | ||
345 | "#, | ||
346 | ", world", | ||
347 | ); | ||
348 | do_check( | ||
349 | r##" | ||
350 | fn -> &str { r#"Hello<|><|>"# | ||
351 | "##, | ||
352 | ", world", | ||
353 | ); | ||
354 | do_check( | ||
355 | r" | ||
356 | #[derive(<|>Copy<|>)] | ||
357 | enum Foo { | ||
358 | |||
359 | } | ||
360 | ", | ||
361 | "Clone", | ||
362 | ); | ||
363 | } | ||
364 | } | ||