diff options
author | Veetaha <[email protected]> | 2020-02-18 00:11:16 +0000 |
---|---|---|
committer | Veetaha <[email protected]> | 2020-02-18 00:11:16 +0000 |
commit | 053ccf4121797e4e559e3225d46d3f23cb1ad70b (patch) | |
tree | 8037fccd14a1d8961d3996af9276484cb617e472 /crates | |
parent | fc5e7b8807f6d438028389b030dfae00965a8cd5 (diff) |
ra_syntax: fix reparsing merging errors, also now reparse_token() reports errors
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_syntax/src/parsing/reparsing.rs | 57 |
1 files changed, 50 insertions, 7 deletions
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs index 57453e220..aad70d015 100644 --- a/crates/ra_syntax/src/parsing/reparsing.rs +++ b/crates/ra_syntax/src/parsing/reparsing.rs | |||
@@ -27,8 +27,8 @@ pub(crate) fn incremental_reparse( | |||
27 | edit: &AtomTextEdit, | 27 | edit: &AtomTextEdit, |
28 | errors: Vec<SyntaxError>, | 28 | errors: Vec<SyntaxError>, |
29 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { | 29 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { |
30 | if let Some((green, old_range)) = reparse_token(node, &edit) { | 30 | if let Some((green, new_errors, old_range)) = reparse_token(node, &edit) { |
31 | return Some((green, merge_errors(errors, Vec::new(), old_range, edit), old_range)); | 31 | return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range)); |
32 | } | 32 | } |
33 | 33 | ||
34 | if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) { | 34 | if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) { |
@@ -40,7 +40,7 @@ pub(crate) fn incremental_reparse( | |||
40 | fn reparse_token<'node>( | 40 | fn reparse_token<'node>( |
41 | root: &'node SyntaxNode, | 41 | root: &'node SyntaxNode, |
42 | edit: &AtomTextEdit, | 42 | edit: &AtomTextEdit, |
43 | ) -> Option<(GreenNode, TextRange)> { | 43 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { |
44 | let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); | 44 | let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); |
45 | let prev_token_kind = prev_token.kind(); | 45 | let prev_token_kind = prev_token.kind(); |
46 | match prev_token_kind { | 46 | match prev_token_kind { |
@@ -54,7 +54,7 @@ fn reparse_token<'node>( | |||
54 | } | 54 | } |
55 | 55 | ||
56 | let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit); | 56 | let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit); |
57 | let (new_token_kind, _error) = lex_single_syntax_kind(&new_text)?; | 57 | let (new_token_kind, new_err) = lex_single_syntax_kind(&new_text)?; |
58 | 58 | ||
59 | if new_token_kind != prev_token_kind | 59 | if new_token_kind != prev_token_kind |
60 | || (new_token_kind == IDENT && is_contextual_kw(&new_text)) | 60 | || (new_token_kind == IDENT && is_contextual_kw(&new_text)) |
@@ -76,7 +76,11 @@ fn reparse_token<'node>( | |||
76 | 76 | ||
77 | let new_token = | 77 | let new_token = |
78 | GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into()); | 78 | GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into()); |
79 | Some((prev_token.replace_with(new_token), prev_token.text_range())) | 79 | Some(( |
80 | prev_token.replace_with(new_token), | ||
81 | new_err.into_iter().collect(), | ||
82 | prev_token.text_range(), | ||
83 | )) | ||
80 | } | 84 | } |
81 | _ => None, | 85 | _ => None, |
82 | } | 86 | } |
@@ -200,9 +204,9 @@ mod tests { | |||
200 | 204 | ||
201 | let fully_reparsed = SourceFile::parse(&after); | 205 | let fully_reparsed = SourceFile::parse(&after); |
202 | let incrementally_reparsed: Parse<SourceFile> = { | 206 | let incrementally_reparsed: Parse<SourceFile> = { |
203 | let f = SourceFile::parse(&before); | 207 | let before = SourceFile::parse(&before); |
204 | let (green, new_errors, range) = | 208 | let (green, new_errors, range) = |
205 | incremental_reparse(f.tree().syntax(), &edit, f.errors.to_vec()).unwrap(); | 209 | incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap(); |
206 | assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); | 210 | assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); |
207 | Parse::new(green, new_errors) | 211 | Parse::new(green, new_errors) |
208 | }; | 212 | }; |
@@ -211,6 +215,7 @@ mod tests { | |||
211 | &format!("{:#?}", fully_reparsed.tree().syntax()), | 215 | &format!("{:#?}", fully_reparsed.tree().syntax()), |
212 | &format!("{:#?}", incrementally_reparsed.tree().syntax()), | 216 | &format!("{:#?}", incrementally_reparsed.tree().syntax()), |
213 | ); | 217 | ); |
218 | assert_eq!(fully_reparsed.errors(), incrementally_reparsed.errors()); | ||
214 | } | 219 | } |
215 | 220 | ||
216 | #[test] // FIXME: some test here actually test token reparsing | 221 | #[test] // FIXME: some test here actually test token reparsing |
@@ -409,4 +414,42 @@ enum Foo { | |||
409 | 4, | 414 | 4, |
410 | ); | 415 | ); |
411 | } | 416 | } |
417 | |||
418 | #[test] | ||
419 | fn reparse_str_token_with_error_unchanged() { | ||
420 | do_check(r#""<|>Unclosed<|> string literal"#, "Still unclosed", 24); | ||
421 | } | ||
422 | |||
423 | #[test] | ||
424 | fn reparse_str_token_with_error_fixed() { | ||
425 | do_check(r#""unterinated<|><|>"#, "\"", 12); | ||
426 | } | ||
427 | |||
428 | #[test] | ||
429 | fn reparse_block_with_error_in_middle_unchanged() { | ||
430 | do_check( | ||
431 | r#"fn main() { | ||
432 | if {} | ||
433 | 32 + 4<|><|> | ||
434 | return | ||
435 | if {} | ||
436 | }"#, | ||
437 | "23", | ||
438 | 105, | ||
439 | ) | ||
440 | } | ||
441 | |||
442 | #[test] | ||
443 | fn reparse_block_with_error_in_middle_fixed() { | ||
444 | do_check( | ||
445 | r#"fn main() { | ||
446 | if {} | ||
447 | 32 + 4<|><|> | ||
448 | return | ||
449 | if {} | ||
450 | }"#, | ||
451 | ";", | ||
452 | 105, | ||
453 | ) | ||
454 | } | ||
412 | } | 455 | } |