From a29211918b728fb83246bfcb43d2ad9c79e182fb Mon Sep 17 00:00:00 2001 From: darksv Date: Sat, 15 Sep 2018 13:35:55 +0200 Subject: create separated mod for reparsing functionality --- crates/libsyntax2/src/lib.rs | 175 ++----------------------------------------- 1 file changed, 5 insertions(+), 170 deletions(-) (limited to 'crates/libsyntax2/src/lib.rs') diff --git a/crates/libsyntax2/src/lib.rs b/crates/libsyntax2/src/lib.rs index 7c9fbc421..014cdafee 100644 --- a/crates/libsyntax2/src/lib.rs +++ b/crates/libsyntax2/src/lib.rs @@ -35,6 +35,7 @@ mod token_set; mod parser_api; mod grammar; mod parser_impl; +mod reparsing; mod syntax_kinds; mod yellow; @@ -49,12 +50,11 @@ pub use { lexer::{tokenize, Token}, syntax_kinds::SyntaxKind, yellow::{SyntaxNode, SyntaxNodeRef, OwnedRoot, RefRoot, TreeRoot, SyntaxError}, + reparsing::AtomEdit, }; use { - SyntaxKind::*, yellow::{GreenNode, SyntaxRoot}, - parser_api::Parser, }; #[derive(Clone, Debug)] @@ -82,55 +82,11 @@ impl File { self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) } pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option { - let (node, green, new_errors) = - self.reparse_leaf(&edit).or_else(|| self.reparse_block(&edit))?; - - let green_root = node.replace_with(green); - let errors = merge_errors(self.errors(), new_errors, node, edit); - Some(File::new(green_root, errors)) - } - fn reparse_leaf(&self, edit: &AtomEdit) -> Option<(SyntaxNodeRef, GreenNode, Vec)> { - let node = algo::find_covering_node(self.syntax(), edit.delete); - match node.kind() { - | WHITESPACE - | COMMENT - | DOC_COMMENT - | IDENT - | STRING - | RAW_STRING => { - let text = get_text_after_edit(node, &edit); - let tokens = tokenize(&text); - let token = match tokens[..] { - [token] if token.kind == node.kind() => token, - _ => return None, - }; - - if token.kind == IDENT && is_contextual_kw(&text) { - return None; - } - - let green = GreenNode::new_leaf(node.kind(), &text); - let new_errors = vec![]; - Some((node, green, new_errors)) - }, - _ => None, - } - } - fn reparse_block(&self, edit: &AtomEdit) -> Option<(SyntaxNodeRef, GreenNode, Vec)> { - let (node, reparser) = find_reparsable_node(self.syntax(), edit.delete)?; - let text = get_text_after_edit(node, &edit); - let tokens = tokenize(&text); - if !is_balanced(&tokens) { - return None; - } - let (green, new_errors) = - parser_impl::parse_with::( - &text, &tokens, reparser, - ); - Some((node, green, new_errors)) + reparsing::incremental_reparse(self.syntax(), edit, self.errors()) + .map(|(green_node, errors)| File::new(green_node, errors)) } fn full_reparse(&self, edit: &AtomEdit) -> File { - let text = replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert); + let text = text_utils::replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert); File::parse(&text) } pub fn ast(&self) -> ast::Root { @@ -143,124 +99,3 @@ impl File { self.syntax().root.syntax_root().errors.clone() } } - -#[derive(Debug, Clone)] -pub struct AtomEdit { - pub delete: TextRange, - pub insert: String, -} - -impl AtomEdit { - pub fn replace(range: TextRange, replace_with: String) -> AtomEdit { - AtomEdit { delete: range, insert: replace_with } - } - - pub fn delete(range: TextRange) -> AtomEdit { - AtomEdit::replace(range, String::new()) - } - - pub fn insert(offset: TextUnit, text: String) -> AtomEdit { - AtomEdit::replace(TextRange::offset_len(offset, 0.into()), text) - } -} - -fn get_text_after_edit(node: SyntaxNodeRef, edit: &AtomEdit) -> String { - replace_range( - node.text().to_string(), - edit.delete - node.range().start(), - &edit.insert, - ) -} - -fn is_contextual_kw(text: &str) -> bool { - match text { - | "auto" - | "default" - | "union" => true, - _ => false, - } -} - -fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> { - let node = algo::find_covering_node(node, range); - return algo::ancestors(node) - .filter_map(|node| reparser(node).map(|r| (node, r))) - .next(); - - fn reparser(node: SyntaxNodeRef) -> Option { - let res = match node.kind() { - BLOCK => grammar::block, - NAMED_FIELD_DEF_LIST => grammar::named_field_def_list, - NAMED_FIELD_LIST => grammar::named_field_list, - ENUM_VARIANT_LIST => grammar::enum_variant_list, - MATCH_ARM_LIST => grammar::match_arm_list, - USE_TREE_LIST => grammar::use_tree_list, - EXTERN_ITEM_LIST => grammar::extern_item_list, - TOKEN_TREE if node.first_child().unwrap().kind() == L_CURLY => grammar::token_tree, - ITEM_LIST => { - let parent = node.parent().unwrap(); - match parent.kind() { - IMPL_ITEM => grammar::impl_item_list, - TRAIT_DEF => grammar::trait_item_list, - MODULE => grammar::mod_item_list, - _ => return None, - } - }, - _ => return None, - }; - Some(res) - } -} - -pub /*(meh)*/ fn replace_range(mut text: String, range: TextRange, replace_with: &str) -> String { - let start = u32::from(range.start()) as usize; - let end = u32::from(range.end()) as usize; - text.replace_range(start..end, replace_with); - text -} - -fn is_balanced(tokens: &[Token]) -> bool { - if tokens.len() == 0 - || tokens.first().unwrap().kind != L_CURLY - || tokens.last().unwrap().kind != R_CURLY { - return false - } - let mut balance = 0usize; - for t in tokens.iter() { - match t.kind { - L_CURLY => balance += 1, - R_CURLY => balance = match balance.checked_sub(1) { - Some(b) => b, - None => return false, - }, - _ => (), - } - } - balance == 0 -} - -fn merge_errors( - old_errors: Vec, - new_errors: Vec, - old_node: SyntaxNodeRef, - edit: &AtomEdit, -) -> Vec { - let mut res = Vec::new(); - for e in old_errors { - if e.offset <= old_node.range().start() { - res.push(e) - } else if e.offset >= old_node.range().end() { - res.push(SyntaxError { - msg: e.msg, - offset: e.offset + TextUnit::of_str(&edit.insert) - edit.delete.len(), - }) - } - } - for e in new_errors { - res.push(SyntaxError { - msg: e.msg, - offset: e.offset + old_node.range().start(), - }) - } - res -} -- cgit v1.2.3