aboutsummaryrefslogtreecommitdiff
path: root/crates/libsyntax2
diff options
context:
space:
mode:
Diffstat (limited to 'crates/libsyntax2')
-rw-r--r--crates/libsyntax2/src/lib.rs175
-rw-r--r--crates/libsyntax2/src/reparsing.rs188
-rw-r--r--crates/libsyntax2/src/text_utils.rs7
3 files changed, 200 insertions, 170 deletions
diff --git a/crates/libsyntax2/src/lib.rs b/crates/libsyntax2/src/lib.rs
index 7c9fbc421..014cdafee 100644
--- a/crates/libsyntax2/src/lib.rs
+++ b/crates/libsyntax2/src/lib.rs
@@ -35,6 +35,7 @@ mod token_set;
35mod parser_api; 35mod parser_api;
36mod grammar; 36mod grammar;
37mod parser_impl; 37mod parser_impl;
38mod reparsing;
38 39
39mod syntax_kinds; 40mod syntax_kinds;
40mod yellow; 41mod yellow;
@@ -49,12 +50,11 @@ pub use {
49 lexer::{tokenize, Token}, 50 lexer::{tokenize, Token},
50 syntax_kinds::SyntaxKind, 51 syntax_kinds::SyntaxKind,
51 yellow::{SyntaxNode, SyntaxNodeRef, OwnedRoot, RefRoot, TreeRoot, SyntaxError}, 52 yellow::{SyntaxNode, SyntaxNodeRef, OwnedRoot, RefRoot, TreeRoot, SyntaxError},
53 reparsing::AtomEdit,
52}; 54};
53 55
54use { 56use {
55 SyntaxKind::*,
56 yellow::{GreenNode, SyntaxRoot}, 57 yellow::{GreenNode, SyntaxRoot},
57 parser_api::Parser,
58}; 58};
59 59
60#[derive(Clone, Debug)] 60#[derive(Clone, Debug)]
@@ -82,55 +82,11 @@ impl File {
82 self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) 82 self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
83 } 83 }
84 pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<File> { 84 pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<File> {
85 let (node, green, new_errors) = 85 reparsing::incremental_reparse(self.syntax(), edit, self.errors())
86 self.reparse_leaf(&edit).or_else(|| self.reparse_block(&edit))?; 86 .map(|(green_node, errors)| File::new(green_node, errors))
87
88 let green_root = node.replace_with(green);
89 let errors = merge_errors(self.errors(), new_errors, node, edit);
90 Some(File::new(green_root, errors))
91 }
92 fn reparse_leaf(&self, edit: &AtomEdit) -> Option<(SyntaxNodeRef, GreenNode, Vec<SyntaxError>)> {
93 let node = algo::find_covering_node(self.syntax(), edit.delete);
94 match node.kind() {
95 | WHITESPACE
96 | COMMENT
97 | DOC_COMMENT
98 | IDENT
99 | STRING
100 | RAW_STRING => {
101 let text = get_text_after_edit(node, &edit);
102 let tokens = tokenize(&text);
103 let token = match tokens[..] {
104 [token] if token.kind == node.kind() => token,
105 _ => return None,
106 };
107
108 if token.kind == IDENT && is_contextual_kw(&text) {
109 return None;
110 }
111
112 let green = GreenNode::new_leaf(node.kind(), &text);
113 let new_errors = vec![];
114 Some((node, green, new_errors))
115 },
116 _ => None,
117 }
118 }
119 fn reparse_block(&self, edit: &AtomEdit) -> Option<(SyntaxNodeRef, GreenNode, Vec<SyntaxError>)> {
120 let (node, reparser) = find_reparsable_node(self.syntax(), edit.delete)?;
121 let text = get_text_after_edit(node, &edit);
122 let tokens = tokenize(&text);
123 if !is_balanced(&tokens) {
124 return None;
125 }
126 let (green, new_errors) =
127 parser_impl::parse_with::<yellow::GreenBuilder>(
128 &text, &tokens, reparser,
129 );
130 Some((node, green, new_errors))
131 } 87 }
132 fn full_reparse(&self, edit: &AtomEdit) -> File { 88 fn full_reparse(&self, edit: &AtomEdit) -> File {
133 let text = replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert); 89 let text = text_utils::replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert);
134 File::parse(&text) 90 File::parse(&text)
135 } 91 }
136 pub fn ast(&self) -> ast::Root { 92 pub fn ast(&self) -> ast::Root {
@@ -143,124 +99,3 @@ impl File {
143 self.syntax().root.syntax_root().errors.clone() 99 self.syntax().root.syntax_root().errors.clone()
144 } 100 }
145} 101}
146
147#[derive(Debug, Clone)]
148pub struct AtomEdit {
149 pub delete: TextRange,
150 pub insert: String,
151}
152
153impl AtomEdit {
154 pub fn replace(range: TextRange, replace_with: String) -> AtomEdit {
155 AtomEdit { delete: range, insert: replace_with }
156 }
157
158 pub fn delete(range: TextRange) -> AtomEdit {
159 AtomEdit::replace(range, String::new())
160 }
161
162 pub fn insert(offset: TextUnit, text: String) -> AtomEdit {
163 AtomEdit::replace(TextRange::offset_len(offset, 0.into()), text)
164 }
165}
166
167fn get_text_after_edit(node: SyntaxNodeRef, edit: &AtomEdit) -> String {
168 replace_range(
169 node.text().to_string(),
170 edit.delete - node.range().start(),
171 &edit.insert,
172 )
173}
174
175fn is_contextual_kw(text: &str) -> bool {
176 match text {
177 | "auto"
178 | "default"
179 | "union" => true,
180 _ => false,
181 }
182}
183
184fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> {
185 let node = algo::find_covering_node(node, range);
186 return algo::ancestors(node)
187 .filter_map(|node| reparser(node).map(|r| (node, r)))
188 .next();
189
190 fn reparser(node: SyntaxNodeRef) -> Option<fn(&mut Parser)> {
191 let res = match node.kind() {
192 BLOCK => grammar::block,
193 NAMED_FIELD_DEF_LIST => grammar::named_field_def_list,
194 NAMED_FIELD_LIST => grammar::named_field_list,
195 ENUM_VARIANT_LIST => grammar::enum_variant_list,
196 MATCH_ARM_LIST => grammar::match_arm_list,
197 USE_TREE_LIST => grammar::use_tree_list,
198 EXTERN_ITEM_LIST => grammar::extern_item_list,
199 TOKEN_TREE if node.first_child().unwrap().kind() == L_CURLY => grammar::token_tree,
200 ITEM_LIST => {
201 let parent = node.parent().unwrap();
202 match parent.kind() {
203 IMPL_ITEM => grammar::impl_item_list,
204 TRAIT_DEF => grammar::trait_item_list,
205 MODULE => grammar::mod_item_list,
206 _ => return None,
207 }
208 },
209 _ => return None,
210 };
211 Some(res)
212 }
213}
214
215pub /*(meh)*/ fn replace_range(mut text: String, range: TextRange, replace_with: &str) -> String {
216 let start = u32::from(range.start()) as usize;
217 let end = u32::from(range.end()) as usize;
218 text.replace_range(start..end, replace_with);
219 text
220}
221
222fn is_balanced(tokens: &[Token]) -> bool {
223 if tokens.len() == 0
224 || tokens.first().unwrap().kind != L_CURLY
225 || tokens.last().unwrap().kind != R_CURLY {
226 return false
227 }
228 let mut balance = 0usize;
229 for t in tokens.iter() {
230 match t.kind {
231 L_CURLY => balance += 1,
232 R_CURLY => balance = match balance.checked_sub(1) {
233 Some(b) => b,
234 None => return false,
235 },
236 _ => (),
237 }
238 }
239 balance == 0
240}
241
242fn merge_errors(
243 old_errors: Vec<SyntaxError>,
244 new_errors: Vec<SyntaxError>,
245 old_node: SyntaxNodeRef,
246 edit: &AtomEdit,
247) -> Vec<SyntaxError> {
248 let mut res = Vec::new();
249 for e in old_errors {
250 if e.offset <= old_node.range().start() {
251 res.push(e)
252 } else if e.offset >= old_node.range().end() {
253 res.push(SyntaxError {
254 msg: e.msg,
255 offset: e.offset + TextUnit::of_str(&edit.insert) - edit.delete.len(),
256 })
257 }
258 }
259 for e in new_errors {
260 res.push(SyntaxError {
261 msg: e.msg,
262 offset: e.offset + old_node.range().start(),
263 })
264 }
265 res
266}
diff --git a/crates/libsyntax2/src/reparsing.rs b/crates/libsyntax2/src/reparsing.rs
new file mode 100644
index 000000000..723ea2b8b
--- /dev/null
+++ b/crates/libsyntax2/src/reparsing.rs
@@ -0,0 +1,188 @@
1use algo;
2use grammar;
3use lexer::{tokenize, Token};
4use text_unit::{TextRange, TextUnit};
5use yellow::{self, SyntaxNodeRef, GreenNode, SyntaxError};
6use parser_impl;
7use parser_api::Parser;
8use {
9 SyntaxKind::*,
10};
11use text_utils::replace_range;
12
13#[derive(Debug, Clone)]
14pub struct AtomEdit {
15 pub delete: TextRange,
16 pub insert: String,
17}
18
19impl AtomEdit {
20 pub fn replace(range: TextRange, replace_with: String) -> AtomEdit {
21 AtomEdit { delete: range, insert: replace_with }
22 }
23
24 pub fn delete(range: TextRange) -> AtomEdit {
25 AtomEdit::replace(range, String::new())
26 }
27
28 pub fn insert(offset: TextUnit, text: String) -> AtomEdit {
29 AtomEdit::replace(TextRange::offset_len(offset, 0.into()), text)
30 }
31}
32
33pub(crate) fn incremental_reparse(
34 node: SyntaxNodeRef,
35 edit: &AtomEdit,
36 errors: Vec<SyntaxError>,
37) -> Option<(GreenNode, Vec<SyntaxError>)> {
38 let (node, green, new_errors) =
39 reparse_leaf(node, &edit).or_else(|| reparse_block(node, &edit))?;
40 let green_root = node.replace_with(green);
41 let errors = merge_errors(errors, new_errors, node, edit);
42 Some((green_root, errors))
43}
44
45fn reparse_leaf<'node>(
46 node: SyntaxNodeRef<'node>,
47 edit: &AtomEdit,
48) -> Option<(SyntaxNodeRef<'node>, GreenNode, Vec<SyntaxError>)> {
49 let node = algo::find_covering_node(node, edit.delete);
50 match node.kind() {
51 | WHITESPACE
52 | COMMENT
53 | DOC_COMMENT
54 | IDENT
55 | STRING
56 | RAW_STRING => {
57 let text = get_text_after_edit(node, &edit);
58 let tokens = tokenize(&text);
59 let token = match tokens[..] {
60 [token] if token.kind == node.kind() => token,
61 _ => return None,
62 };
63
64 if token.kind == IDENT && is_contextual_kw(&text) {
65 return None;
66 }
67
68 let green = GreenNode::new_leaf(node.kind(), &text);
69 let new_errors = vec![];
70 Some((node, green, new_errors))
71 }
72 _ => None,
73 }
74}
75
76fn reparse_block<'node>(
77 node: SyntaxNodeRef<'node>,
78 edit: &AtomEdit,
79) -> Option<(SyntaxNodeRef<'node>, GreenNode, Vec<SyntaxError>)> {
80 let (node, reparser) = find_reparsable_node(node, edit.delete)?;
81 let text = get_text_after_edit(node, &edit);
82 let tokens = tokenize(&text);
83 if !is_balanced(&tokens) {
84 return None;
85 }
86 let (green, new_errors) =
87 parser_impl::parse_with::<yellow::GreenBuilder>(
88 &text, &tokens, reparser,
89 );
90 Some((node, green, new_errors))
91}
92
93fn get_text_after_edit(node: SyntaxNodeRef, edit: &AtomEdit) -> String {
94 replace_range(
95 node.text().to_string(),
96 edit.delete - node.range().start(),
97 &edit.insert,
98 )
99}
100
101fn is_contextual_kw(text: &str) -> bool {
102 match text {
103 | "auto"
104 | "default"
105 | "union" => true,
106 _ => false,
107 }
108}
109
110fn find_reparsable_node<'node>(
111 node: SyntaxNodeRef<'node>,
112 range: TextRange,
113) -> Option<(SyntaxNodeRef<'node>, fn(&mut Parser))> {
114 let node = algo::find_covering_node(node, range);
115 return algo::ancestors(node)
116 .filter_map(|node| reparser(node).map(|r| (node, r)))
117 .next();
118
119 fn reparser(node: SyntaxNodeRef) -> Option<fn(&mut Parser)> {
120 let res = match node.kind() {
121 BLOCK => grammar::block,
122 NAMED_FIELD_DEF_LIST => grammar::named_field_def_list,
123 NAMED_FIELD_LIST => grammar::named_field_list,
124 ENUM_VARIANT_LIST => grammar::enum_variant_list,
125 MATCH_ARM_LIST => grammar::match_arm_list,
126 USE_TREE_LIST => grammar::use_tree_list,
127 EXTERN_ITEM_LIST => grammar::extern_item_list,
128 TOKEN_TREE if node.first_child().unwrap().kind() == L_CURLY => grammar::token_tree,
129 ITEM_LIST => {
130 let parent = node.parent().unwrap();
131 match parent.kind() {
132 IMPL_ITEM => grammar::impl_item_list,
133 TRAIT_DEF => grammar::trait_item_list,
134 MODULE => grammar::mod_item_list,
135 _ => return None,
136 }
137 }
138 _ => return None,
139 };
140 Some(res)
141 }
142}
143
144fn is_balanced(tokens: &[Token]) -> bool {
145 if tokens.len() == 0
146 || tokens.first().unwrap().kind != L_CURLY
147 || tokens.last().unwrap().kind != R_CURLY {
148 return false;
149 }
150 let mut balance = 0usize;
151 for t in tokens.iter() {
152 match t.kind {
153 L_CURLY => balance += 1,
154 R_CURLY => balance = match balance.checked_sub(1) {
155 Some(b) => b,
156 None => return false,
157 },
158 _ => (),
159 }
160 }
161 balance == 0
162}
163
164fn merge_errors(
165 old_errors: Vec<SyntaxError>,
166 new_errors: Vec<SyntaxError>,
167 old_node: SyntaxNodeRef,
168 edit: &AtomEdit,
169) -> Vec<SyntaxError> {
170 let mut res = Vec::new();
171 for e in old_errors {
172 if e.offset <= old_node.range().start() {
173 res.push(e)
174 } else if e.offset >= old_node.range().end() {
175 res.push(SyntaxError {
176 msg: e.msg,
177 offset: e.offset + TextUnit::of_str(&edit.insert) - edit.delete.len(),
178 })
179 }
180 }
181 for e in new_errors {
182 res.push(SyntaxError {
183 msg: e.msg,
184 offset: e.offset + old_node.range().start(),
185 })
186 }
187 res
188}
diff --git a/crates/libsyntax2/src/text_utils.rs b/crates/libsyntax2/src/text_utils.rs
index e3d73888f..58ae1e43e 100644
--- a/crates/libsyntax2/src/text_utils.rs
+++ b/crates/libsyntax2/src/text_utils.rs
@@ -17,3 +17,10 @@ pub fn intersect(r1: TextRange, r2: TextRange) -> Option<TextRange> {
17 None 17 None
18 } 18 }
19} 19}
20
21pub fn replace_range(mut text: String, range: TextRange, replace_with: &str) -> String {
22 let start = u32::from(range.start()) as usize;
23 let end = u32::from(range.end()) as usize;
24 text.replace_range(start..end, replace_with);
25 text
26} \ No newline at end of file