aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_syntax/src/parsing
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_syntax/src/parsing')
-rw-r--r--crates/ra_syntax/src/parsing/lexer.rs1
-rw-r--r--crates/ra_syntax/src/parsing/reparsing.rs24
2 files changed, 20 insertions, 5 deletions
diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/ra_syntax/src/parsing/lexer.rs
index f9362120e..36e841609 100644
--- a/crates/ra_syntax/src/parsing/lexer.rs
+++ b/crates/ra_syntax/src/parsing/lexer.rs
@@ -195,6 +195,7 @@ fn scan_ident(c: char, ptr: &mut Ptr) -> SyntaxKind {
195 ptr.bump(); 195 ptr.bump();
196 true 196 true
197 } 197 }
198 ('_', None) => return UNDERSCORE,
198 ('_', Some(c)) if !is_ident_continue(c) => return UNDERSCORE, 199 ('_', Some(c)) if !is_ident_continue(c) => return UNDERSCORE,
199 _ => false, 200 _ => false,
200 }; 201 };
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs
index ba77a3b6c..7e7f914f5 100644
--- a/crates/ra_syntax/src/parsing/reparsing.rs
+++ b/crates/ra_syntax/src/parsing/reparsing.rs
@@ -33,12 +33,19 @@ pub(crate) fn incremental_reparse(
33} 33}
34 34
35fn reparse_leaf<'node>( 35fn reparse_leaf<'node>(
36 node: &'node SyntaxNode, 36 root: &'node SyntaxNode,
37 edit: &AtomTextEdit, 37 edit: &AtomTextEdit,
38) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> { 38) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> {
39 let node = algo::find_covering_node(node, edit.delete); 39 let node = algo::find_covering_node(root, edit.delete);
40 match node.kind() { 40 match node.kind() {
41 WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { 41 WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => {
42 if node.kind() == WHITESPACE || node.kind() == COMMENT {
43 // removing a new line may extends previous token
44 if node.text().to_string()[edit.delete - node.range().start()].contains('\n') {
45 return None;
46 }
47 }
48
42 let text = get_text_after_edit(node, &edit); 49 let text = get_text_after_edit(node, &edit);
43 let tokens = tokenize(&text); 50 let tokens = tokenize(&text);
44 let token = match tokens[..] { 51 let token = match tokens[..] {
@@ -50,6 +57,13 @@ fn reparse_leaf<'node>(
50 return None; 57 return None;
51 } 58 }
52 59
60 if let Some(next_char) = root.text().char_at(node.range().end()) {
61 let tokens_with_next_char = tokenize(&format!("{}{}", text, next_char));
62 if tokens_with_next_char.len() == 1 {
63 return None;
64 }
65 }
66
53 let green = GreenNode::new_leaf(node.kind(), text.into()); 67 let green = GreenNode::new_leaf(node.kind(), text.into());
54 let new_errors = vec![]; 68 let new_errors = vec![];
55 Some((node, green, new_errors)) 69 Some((node, green, new_errors))
@@ -104,7 +118,7 @@ fn is_balanced(tokens: &[Token]) -> bool {
104 return false; 118 return false;
105 } 119 }
106 let mut balance = 0usize; 120 let mut balance = 0usize;
107 for t in tokens.iter() { 121 for t in &tokens[1..tokens.len() - 1] {
108 match t.kind { 122 match t.kind {
109 L_CURLY => balance += 1, 123 L_CURLY => balance += 1,
110 R_CURLY => { 124 R_CURLY => {
@@ -130,11 +144,11 @@ fn merge_errors(
130 if e.offset() <= old_node.range().start() { 144 if e.offset() <= old_node.range().start() {
131 res.push(e) 145 res.push(e)
132 } else if e.offset() >= old_node.range().end() { 146 } else if e.offset() >= old_node.range().end() {
133 res.push(e.add_offset(TextUnit::of_str(&edit.insert) - edit.delete.len())); 147 res.push(e.add_offset(TextUnit::of_str(&edit.insert), edit.delete.len()));
134 } 148 }
135 } 149 }
136 for e in new_errors { 150 for e in new_errors {
137 res.push(e.add_offset(old_node.range().start())); 151 res.push(e.add_offset(old_node.range().start(), 0.into()));
138 } 152 }
139 res 153 res
140} 154}