aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_syntax/src/parsing/reparsing.rs
diff options
context:
space:
mode:
authorbors[bot] <bors[bot]@users.noreply.github.com>2019-02-20 13:50:29 +0000
committerbors[bot] <bors[bot]@users.noreply.github.com>2019-02-20 13:50:29 +0000
commit96899f8278b787280bd07d9ac9dce29a610ce40d (patch)
treed02a22f02908fd3c89e50845a06a89b997220fc2 /crates/ra_syntax/src/parsing/reparsing.rs
parent5b617e3bf8252887a3eb1ce76d4b62cbee74e551 (diff)
parent86a67dce25f11ba9803a5727f77c02fd1f49e2c0 (diff)
Merge #861
861: Move parsing to a separate module r=matklad a=matklad Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates/ra_syntax/src/parsing/reparsing.rs')
-rw-r--r--crates/ra_syntax/src/parsing/reparsing.rs349
1 files changed, 349 insertions, 0 deletions
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs
new file mode 100644
index 000000000..edf3fa291
--- /dev/null
+++ b/crates/ra_syntax/src/parsing/reparsing.rs
@@ -0,0 +1,349 @@
1use crate::{
2 SyntaxKind::*, TextRange, TextUnit,
3 algo,
4 syntax_node::{GreenNode, SyntaxNode},
5 syntax_error::SyntaxError,
6 parsing::{
7 grammar,
8 parser_impl,
9 builder::GreenBuilder,
10 parser_api::Parser,
11 lexer::{tokenize, Token},
12 }
13};
14
15use ra_text_edit::AtomTextEdit;
16
17pub(crate) fn incremental_reparse(
18 node: &SyntaxNode,
19 edit: &AtomTextEdit,
20 errors: Vec<SyntaxError>,
21) -> Option<(GreenNode, Vec<SyntaxError>)> {
22 let (node, green, new_errors) =
23 reparse_leaf(node, &edit).or_else(|| reparse_block(node, &edit))?;
24 let green_root = node.replace_with(green);
25 let errors = merge_errors(errors, new_errors, node, edit);
26 Some((green_root, errors))
27}
28
29fn reparse_leaf<'node>(
30 node: &'node SyntaxNode,
31 edit: &AtomTextEdit,
32) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> {
33 let node = algo::find_covering_node(node, edit.delete);
34 match node.kind() {
35 WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => {
36 let text = get_text_after_edit(node, &edit);
37 let tokens = tokenize(&text);
38 let token = match tokens[..] {
39 [token] if token.kind == node.kind() => token,
40 _ => return None,
41 };
42
43 if token.kind == IDENT && is_contextual_kw(&text) {
44 return None;
45 }
46
47 let green = GreenNode::new_leaf(node.kind(), text.into());
48 let new_errors = vec![];
49 Some((node, green, new_errors))
50 }
51 _ => None,
52 }
53}
54
55fn reparse_block<'node>(
56 node: &'node SyntaxNode,
57 edit: &AtomTextEdit,
58) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> {
59 let (node, reparser) = find_reparsable_node(node, edit.delete)?;
60 let text = get_text_after_edit(node, &edit);
61 let tokens = tokenize(&text);
62 if !is_balanced(&tokens) {
63 return None;
64 }
65 let (green, new_errors) =
66 parser_impl::parse_with(GreenBuilder::new(), &text, &tokens, reparser);
67 Some((node, green, new_errors))
68}
69
70fn get_text_after_edit(node: &SyntaxNode, edit: &AtomTextEdit) -> String {
71 let edit = AtomTextEdit::replace(edit.delete - node.range().start(), edit.insert.clone());
72 edit.apply(node.text().to_string())
73}
74
75fn is_contextual_kw(text: &str) -> bool {
76 match text {
77 "auto" | "default" | "union" => true,
78 _ => false,
79 }
80}
81
82fn find_reparsable_node(
83 node: &SyntaxNode,
84 range: TextRange,
85) -> Option<(&SyntaxNode, fn(&mut Parser))> {
86 let node = algo::find_covering_node(node, range);
87 node.ancestors().find_map(|node| grammar::reparser(node).map(|r| (node, r)))
88}
89
90fn is_balanced(tokens: &[Token]) -> bool {
91 if tokens.is_empty()
92 || tokens.first().unwrap().kind != L_CURLY
93 || tokens.last().unwrap().kind != R_CURLY
94 {
95 return false;
96 }
97 let mut balance = 0usize;
98 for t in tokens.iter() {
99 match t.kind {
100 L_CURLY => balance += 1,
101 R_CURLY => {
102 balance = match balance.checked_sub(1) {
103 Some(b) => b,
104 None => return false,
105 }
106 }
107 _ => (),
108 }
109 }
110 balance == 0
111}
112
113fn merge_errors(
114 old_errors: Vec<SyntaxError>,
115 new_errors: Vec<SyntaxError>,
116 old_node: &SyntaxNode,
117 edit: &AtomTextEdit,
118) -> Vec<SyntaxError> {
119 let mut res = Vec::new();
120 for e in old_errors {
121 if e.offset() <= old_node.range().start() {
122 res.push(e)
123 } else if e.offset() >= old_node.range().end() {
124 res.push(e.add_offset(TextUnit::of_str(&edit.insert) - edit.delete.len()));
125 }
126 }
127 for e in new_errors {
128 res.push(e.add_offset(old_node.range().start()));
129 }
130 res
131}
132
133#[cfg(test)]
134mod tests {
135 use test_utils::{extract_range, assert_eq_text};
136
137 use crate::{SourceFile, AstNode, utils::dump_tree};
138 use super::*;
139
140 fn do_check<F>(before: &str, replace_with: &str, reparser: F)
141 where
142 for<'a> F: Fn(
143 &'a SyntaxNode,
144 &AtomTextEdit,
145 ) -> Option<(&'a SyntaxNode, GreenNode, Vec<SyntaxError>)>,
146 {
147 let (range, before) = extract_range(before);
148 let edit = AtomTextEdit::replace(range, replace_with.to_owned());
149 let after = edit.apply(before.clone());
150
151 let fully_reparsed = SourceFile::parse(&after);
152 let incrementally_reparsed = {
153 let f = SourceFile::parse(&before);
154 let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() };
155 let (node, green, new_errors) =
156 reparser(f.syntax(), &edit).expect("cannot incrementally reparse");
157 let green_root = node.replace_with(green);
158 let errors = super::merge_errors(f.errors(), new_errors, node, &edit);
159 SourceFile::new(green_root, errors)
160 };
161
162 assert_eq_text!(
163 &dump_tree(fully_reparsed.syntax()),
164 &dump_tree(incrementally_reparsed.syntax()),
165 )
166 }
167
168 #[test]
169 fn reparse_block_tests() {
170 let do_check = |before, replace_to| do_check(before, replace_to, reparse_block);
171
172 do_check(
173 r"
174fn foo() {
175 let x = foo + <|>bar<|>
176}
177",
178 "baz",
179 );
180 do_check(
181 r"
182fn foo() {
183 let x = foo<|> + bar<|>
184}
185",
186 "baz",
187 );
188 do_check(
189 r"
190struct Foo {
191 f: foo<|><|>
192}
193",
194 ",\n g: (),",
195 );
196 do_check(
197 r"
198fn foo {
199 let;
200 1 + 1;
201 <|>92<|>;
202}
203",
204 "62",
205 );
206 do_check(
207 r"
208mod foo {
209 fn <|><|>
210}
211",
212 "bar",
213 );
214 do_check(
215 r"
216trait Foo {
217 type <|>Foo<|>;
218}
219",
220 "Output",
221 );
222 do_check(
223 r"
224impl IntoIterator<Item=i32> for Foo {
225 f<|><|>
226}
227",
228 "n next(",
229 );
230 do_check(
231 r"
232use a::b::{foo,<|>,bar<|>};
233 ",
234 "baz",
235 );
236 do_check(
237 r"
238pub enum A {
239 Foo<|><|>
240}
241",
242 "\nBar;\n",
243 );
244 do_check(
245 r"
246foo!{a, b<|><|> d}
247",
248 ", c[3]",
249 );
250 do_check(
251 r"
252fn foo() {
253 vec![<|><|>]
254}
255",
256 "123",
257 );
258 do_check(
259 r"
260extern {
261 fn<|>;<|>
262}
263",
264 " exit(code: c_int)",
265 );
266 }
267
268 #[test]
269 fn reparse_leaf_tests() {
270 let do_check = |before, replace_to| do_check(before, replace_to, reparse_leaf);
271
272 do_check(
273 r"<|><|>
274fn foo() -> i32 { 1 }
275",
276 "\n\n\n \n",
277 );
278 do_check(
279 r"
280fn foo() -> <|><|> {}
281",
282 " \n",
283 );
284 do_check(
285 r"
286fn <|>foo<|>() -> i32 { 1 }
287",
288 "bar",
289 );
290 do_check(
291 r"
292fn foo<|><|>foo() { }
293",
294 "bar",
295 );
296 do_check(
297 r"
298fn foo /* <|><|> */ () {}
299",
300 "some comment",
301 );
302 do_check(
303 r"
304fn baz <|><|> () {}
305",
306 " \t\t\n\n",
307 );
308 do_check(
309 r"
310fn baz <|><|> () {}
311",
312 " \t\t\n\n",
313 );
314 do_check(
315 r"
316/// foo <|><|>omment
317mod { }
318",
319 "c",
320 );
321 do_check(
322 r#"
323fn -> &str { "Hello<|><|>" }
324"#,
325 ", world",
326 );
327 do_check(
328 r#"
329fn -> &str { // "Hello<|><|>"
330"#,
331 ", world",
332 );
333 do_check(
334 r##"
335fn -> &str { r#"Hello<|><|>"#
336"##,
337 ", world",
338 );
339 do_check(
340 r"
341#[derive(<|>Copy<|>)]
342enum Foo {
343
344}
345",
346 "Clone",
347 );
348 }
349}