aboutsummaryrefslogtreecommitdiff
path: root/crates/libsyntax2/src/lib.rs
diff options
context:
space:
mode:
Diffstat (limited to 'crates/libsyntax2/src/lib.rs')
-rw-r--r--crates/libsyntax2/src/lib.rs132
1 files changed, 9 insertions, 123 deletions
diff --git a/crates/libsyntax2/src/lib.rs b/crates/libsyntax2/src/lib.rs
index e761fa358..eb271762e 100644
--- a/crates/libsyntax2/src/lib.rs
+++ b/crates/libsyntax2/src/lib.rs
@@ -27,6 +27,10 @@ extern crate parking_lot;
27extern crate smol_str; 27extern crate smol_str;
28extern crate text_unit; 28extern crate text_unit;
29 29
30#[cfg(test)]
31#[macro_use]
32extern crate test_utils;
33
30pub mod algo; 34pub mod algo;
31pub mod ast; 35pub mod ast;
32mod lexer; 36mod lexer;
@@ -35,6 +39,7 @@ mod token_set;
35mod parser_api; 39mod parser_api;
36mod grammar; 40mod grammar;
37mod parser_impl; 41mod parser_impl;
42mod reparsing;
38 43
39mod syntax_kinds; 44mod syntax_kinds;
40mod yellow; 45mod yellow;
@@ -49,12 +54,11 @@ pub use {
49 lexer::{tokenize, Token}, 54 lexer::{tokenize, Token},
50 syntax_kinds::SyntaxKind, 55 syntax_kinds::SyntaxKind,
51 yellow::{SyntaxNode, SyntaxNodeRef, OwnedRoot, RefRoot, TreeRoot, SyntaxError}, 56 yellow::{SyntaxNode, SyntaxNodeRef, OwnedRoot, RefRoot, TreeRoot, SyntaxError},
57 reparsing::AtomEdit,
52}; 58};
53 59
54use { 60use {
55 SyntaxKind::*,
56 yellow::{GreenNode, SyntaxRoot}, 61 yellow::{GreenNode, SyntaxRoot},
57 parser_api::Parser,
58}; 62};
59 63
60#[derive(Clone, Debug, Hash)] 64#[derive(Clone, Debug, Hash)]
@@ -82,25 +86,11 @@ impl File {
82 self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) 86 self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
83 } 87 }
84 pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<File> { 88 pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<File> {
85 let (node, reparser) = find_reparsable_node(self.syntax(), edit.delete)?; 89 reparsing::incremental_reparse(self.syntax(), edit, self.errors())
86 let text = replace_range( 90 .map(|(green_node, errors)| File::new(green_node, errors))
87 node.text().to_string(),
88 edit.delete - node.range().start(),
89 &edit.insert,
90 );
91 let tokens = tokenize(&text);
92 if !is_balanced(&tokens) {
93 return None;
94 }
95 let (green, new_errors) = parser_impl::parse_with::<yellow::GreenBuilder>(
96 &text, &tokens, reparser,
97 );
98 let green_root = node.replace_with(green);
99 let errors = merge_errors(self.errors(), new_errors, node, edit);
100 Some(File::new(green_root, errors))
101 } 91 }
102 fn full_reparse(&self, edit: &AtomEdit) -> File { 92 fn full_reparse(&self, edit: &AtomEdit) -> File {
103 let text = replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert); 93 let text = text_utils::replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert);
104 File::parse(&text) 94 File::parse(&text)
105 } 95 }
106 pub fn ast(&self) -> ast::Root { 96 pub fn ast(&self) -> ast::Root {
@@ -113,107 +103,3 @@ impl File {
113 self.syntax().root.syntax_root().errors.clone() 103 self.syntax().root.syntax_root().errors.clone()
114 } 104 }
115} 105}
116
117#[derive(Debug, Clone)]
118pub struct AtomEdit {
119 pub delete: TextRange,
120 pub insert: String,
121}
122
123impl AtomEdit {
124 pub fn replace(range: TextRange, replace_with: String) -> AtomEdit {
125 AtomEdit { delete: range, insert: replace_with }
126 }
127
128 pub fn delete(range: TextRange) -> AtomEdit {
129 AtomEdit::replace(range, String::new())
130 }
131
132 pub fn insert(offset: TextUnit, text: String) -> AtomEdit {
133 AtomEdit::replace(TextRange::offset_len(offset, 0.into()), text)
134 }
135}
136
137fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> {
138 let node = algo::find_covering_node(node, range);
139 return algo::ancestors(node)
140 .filter_map(|node| reparser(node).map(|r| (node, r)))
141 .next();
142
143 fn reparser(node: SyntaxNodeRef) -> Option<fn(&mut Parser)> {
144 let res = match node.kind() {
145 BLOCK => grammar::block,
146 NAMED_FIELD_DEF_LIST => grammar::named_field_def_list,
147 NAMED_FIELD_LIST => grammar::named_field_list,
148 ENUM_VARIANT_LIST => grammar::enum_variant_list,
149 MATCH_ARM_LIST => grammar::match_arm_list,
150 USE_TREE_LIST => grammar::use_tree_list,
151 EXTERN_ITEM_LIST => grammar::extern_item_list,
152 TOKEN_TREE if node.first_child().unwrap().kind() == L_CURLY => grammar::token_tree,
153 ITEM_LIST => {
154 let parent = node.parent().unwrap();
155 match parent.kind() {
156 IMPL_ITEM => grammar::impl_item_list,
157 TRAIT_DEF => grammar::trait_item_list,
158 MODULE => grammar::mod_item_list,
159 _ => return None,
160 }
161 },
162 _ => return None,
163 };
164 Some(res)
165 }
166}
167
168pub /*(meh)*/ fn replace_range(mut text: String, range: TextRange, replace_with: &str) -> String {
169 let start = u32::from(range.start()) as usize;
170 let end = u32::from(range.end()) as usize;
171 text.replace_range(start..end, replace_with);
172 text
173}
174
175fn is_balanced(tokens: &[Token]) -> bool {
176 if tokens.len() == 0
177 || tokens.first().unwrap().kind != L_CURLY
178 || tokens.last().unwrap().kind != R_CURLY {
179 return false
180 }
181 let mut balance = 0usize;
182 for t in tokens.iter() {
183 match t.kind {
184 L_CURLY => balance += 1,
185 R_CURLY => balance = match balance.checked_sub(1) {
186 Some(b) => b,
187 None => return false,
188 },
189 _ => (),
190 }
191 }
192 balance == 0
193}
194
195fn merge_errors(
196 old_errors: Vec<SyntaxError>,
197 new_errors: Vec<SyntaxError>,
198 old_node: SyntaxNodeRef,
199 edit: &AtomEdit,
200) -> Vec<SyntaxError> {
201 let mut res = Vec::new();
202 for e in old_errors {
203 if e.offset < old_node.range().start() {
204 res.push(e)
205 } else if e.offset > old_node.range().end() {
206 res.push(SyntaxError {
207 msg: e.msg,
208 offset: e.offset + TextUnit::of_str(&edit.insert) - edit.delete.len(),
209 })
210 }
211 }
212 for e in new_errors {
213 res.push(SyntaxError {
214 msg: e.msg,
215 offset: e.offset + old_node.range().start(),
216 })
217 }
218 res
219}