diff options
Diffstat (limited to 'crates/ra_syntax/src/parsing/reparsing.rs')
-rw-r--r-- | crates/ra_syntax/src/parsing/reparsing.rs | 84 |
1 files changed, 67 insertions, 17 deletions
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs index a86da0675..aad70d015 100644 --- a/crates/ra_syntax/src/parsing/reparsing.rs +++ b/crates/ra_syntax/src/parsing/reparsing.rs | |||
@@ -27,8 +27,8 @@ pub(crate) fn incremental_reparse( | |||
27 | edit: &AtomTextEdit, | 27 | edit: &AtomTextEdit, |
28 | errors: Vec<SyntaxError>, | 28 | errors: Vec<SyntaxError>, |
29 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { | 29 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { |
30 | if let Some((green, old_range)) = reparse_token(node, &edit) { | 30 | if let Some((green, new_errors, old_range)) = reparse_token(node, &edit) { |
31 | return Some((green, merge_errors(errors, Vec::new(), old_range, edit), old_range)); | 31 | return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); |
32 | } | 32 | } |
33 | 33 | ||
34 | if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) { | 34 | if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) { |
@@ -40,7 +40,7 @@ pub(crate) fn incremental_reparse( | |||
40 | fn reparse_token<'node>( | 40 | fn reparse_token<'node>( |
41 | root: &'node SyntaxNode, | 41 | root: &'node SyntaxNode, |
42 | edit: &AtomTextEdit, | 42 | edit: &AtomTextEdit, |
43 | ) -> Option<(GreenNode, TextRange)> { | 43 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { |
44 | let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); | 44 | let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); |
45 | let prev_token_kind = prev_token.kind(); | 45 | let prev_token_kind = prev_token.kind(); |
46 | match prev_token_kind { | 46 | match prev_token_kind { |
@@ -54,7 +54,7 @@ fn reparse_token<'node>( | |||
54 | } | 54 | } |
55 | 55 | ||
56 | let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit); | 56 | let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit); |
57 | let (new_token_kind, _error) = lex_single_syntax_kind(&new_text)?; | 57 | let (new_token_kind, new_err) = lex_single_syntax_kind(&new_text)?; |
58 | 58 | ||
59 | if new_token_kind != prev_token_kind | 59 | if new_token_kind != prev_token_kind |
60 | || (new_token_kind == IDENT && is_contextual_kw(&new_text)) | 60 | || (new_token_kind == IDENT && is_contextual_kw(&new_text)) |
@@ -76,7 +76,11 @@ fn reparse_token<'node>( | |||
76 | 76 | ||
77 | let new_token = | 77 | let new_token = |
78 | GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into()); | 78 | GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into()); |
79 | Some((prev_token.replace_with(new_token), prev_token.text_range())) | 79 | Some(( |
80 | prev_token.replace_with(new_token), | ||
81 | new_err.into_iter().collect(), | ||
82 | prev_token.text_range(), | ||
83 | )) | ||
80 | } | 84 | } |
81 | _ => None, | 85 | _ => None, |
82 | } | 86 | } |
@@ -87,7 +91,7 @@ fn reparse_block<'node>( | |||
87 | edit: &AtomTextEdit, | 91 | edit: &AtomTextEdit, |
88 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { | 92 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { |
89 | let (node, reparser) = find_reparsable_node(root, edit.delete)?; | 93 | let (node, reparser) = find_reparsable_node(root, edit.delete)?; |
90 | let text = get_text_after_edit(node.clone().into(), &edit); | 94 | let text = get_text_after_edit(node.clone().into(), edit); |
91 | 95 | ||
92 | let (tokens, new_lexer_errors) = tokenize(&text); | 96 | let (tokens, new_lexer_errors) = tokenize(&text); |
93 | if !is_balanced(&tokens) { | 97 | if !is_balanced(&tokens) { |
@@ -162,20 +166,27 @@ fn is_balanced(tokens: &[Token]) -> bool { | |||
162 | fn merge_errors( | 166 | fn merge_errors( |
163 | old_errors: Vec<SyntaxError>, | 167 | old_errors: Vec<SyntaxError>, |
164 | new_errors: Vec<SyntaxError>, | 168 | new_errors: Vec<SyntaxError>, |
165 | old_range: TextRange, | 169 | range_before_reparse: TextRange, |
166 | edit: &AtomTextEdit, | 170 | edit: &AtomTextEdit, |
167 | ) -> Vec<SyntaxError> { | 171 | ) -> Vec<SyntaxError> { |
168 | let mut res = Vec::new(); | 172 | let mut res = Vec::new(); |
169 | for e in old_errors { | 173 | |
170 | if e.offset() <= old_range.start() { | 174 | for old_err in old_errors { |
171 | res.push(e) | 175 | let old_err_range = old_err.range(); |
172 | } else if e.offset() >= old_range.end() { | 176 | // FIXME: make sure that .start() was here previously by a mistake |
173 | res.push(e.add_offset(TextUnit::of_str(&edit.insert), edit.delete.len())); | 177 | if old_err_range.end() <= range_before_reparse.start() { |
178 | res.push(old_err); | ||
179 | } else if old_err_range.start() >= range_before_reparse.end() { | ||
180 | let inserted_len = TextUnit::of_str(&edit.insert); | ||
181 | res.push(old_err.with_range((old_err_range + inserted_len) - edit.delete.len())); | ||
182 | // Note: extra parens are intentional to prevent uint underflow, HWAB (here was a bug) | ||
174 | } | 183 | } |
175 | } | 184 | } |
176 | for e in new_errors { | 185 | res.extend(new_errors.into_iter().map(|new_err| { |
177 | res.push(e.add_offset(old_range.start(), 0.into())); | 186 | // fighting borrow checker with a variable ;) |
178 | } | 187 | let offseted_range = new_err.range() + range_before_reparse.start(); |
188 | new_err.with_range(offseted_range) | ||
189 | })); | ||
179 | res | 190 | res |
180 | } | 191 | } |
181 | 192 | ||
@@ -193,9 +204,9 @@ mod tests { | |||
193 | 204 | ||
194 | let fully_reparsed = SourceFile::parse(&after); | 205 | let fully_reparsed = SourceFile::parse(&after); |
195 | let incrementally_reparsed: Parse<SourceFile> = { | 206 | let incrementally_reparsed: Parse<SourceFile> = { |
196 | let f = SourceFile::parse(&before); | 207 | let before = SourceFile::parse(&before); |
197 | let (green, new_errors, range) = | 208 | let (green, new_errors, range) = |
198 | incremental_reparse(f.tree().syntax(), &edit, f.errors.to_vec()).unwrap(); | 209 | incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap(); |
199 | assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); | 210 | assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); |
200 | Parse::new(green, new_errors) | 211 | Parse::new(green, new_errors) |
201 | }; | 212 | }; |
@@ -204,6 +215,7 @@ mod tests { | |||
204 | &format!("{:#?}", fully_reparsed.tree().syntax()), | 215 | &format!("{:#?}", fully_reparsed.tree().syntax()), |
205 | &format!("{:#?}", incrementally_reparsed.tree().syntax()), | 216 | &format!("{:#?}", incrementally_reparsed.tree().syntax()), |
206 | ); | 217 | ); |
218 | assert_eq!(fully_reparsed.errors(), incrementally_reparsed.errors()); | ||
207 | } | 219 | } |
208 | 220 | ||
209 | #[test] // FIXME: some test here actually test token reparsing | 221 | #[test] // FIXME: some test here actually test token reparsing |
@@ -402,4 +414,42 @@ enum Foo { | |||
402 | 4, | 414 | 4, |
403 | ); | 415 | ); |
404 | } | 416 | } |
417 | |||
418 | #[test] | ||
419 | fn reparse_str_token_with_error_unchanged() { | ||
420 | do_check(r#""<|>Unclosed<|> string literal"#, "Still unclosed", 24); | ||
421 | } | ||
422 | |||
423 | #[test] | ||
424 | fn reparse_str_token_with_error_fixed() { | ||
425 | do_check(r#""unterinated<|><|>"#, "\"", 12); | ||
426 | } | ||
427 | |||
428 | #[test] | ||
429 | fn reparse_block_with_error_in_middle_unchanged() { | ||
430 | do_check( | ||
431 | r#"fn main() { | ||
432 | if {} | ||
433 | 32 + 4<|><|> | ||
434 | return | ||
435 | if {} | ||
436 | }"#, | ||
437 | "23", | ||
438 | 105, | ||
439 | ) | ||
440 | } | ||
441 | |||
442 | #[test] | ||
443 | fn reparse_block_with_error_in_middle_fixed() { | ||
444 | do_check( | ||
445 | r#"fn main() { | ||
446 | if {} | ||
447 | 32 + 4<|><|> | ||
448 | return | ||
449 | if {} | ||
450 | }"#, | ||
451 | ";", | ||
452 | 105, | ||
453 | ) | ||
454 | } | ||
405 | } | 455 | } |