From 3ab9f4ad7fa44cb20c0a13ae69f76ee13e4f53d2 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sat, 8 Sep 2018 18:42:59 +0300 Subject: Add fuzz failures dir --- .../tests/data/parser/fuzz-failures/0000.rs | 199 +++++++++++++++++++++ 1 file changed, 199 insertions(+) create mode 100644 crates/libsyntax2/tests/data/parser/fuzz-failures/0000.rs (limited to 'crates/libsyntax2/tests/data/parser') diff --git a/crates/libsyntax2/tests/data/parser/fuzz-failures/0000.rs b/crates/libsyntax2/tests/data/parser/fuzz-failures/0000.rs new file mode 100644 index 000000000..53c93d9e9 --- /dev/null +++ b/crates/libsyntax2/tests/data/parser/fuzz-failures/0000.rs @@ -0,0 +1,199 @@ +//! An experimental implementation of [Rust RFC#2256 lrs); + let root = SyntaxNode::new_owned(root); + validate_block_structure(root.borrowed()); + File { root } + } + pub fn parse(text: &str) -> File { + let tokens = tokenize(&text); + let (green, errors) = parser_impl::parse_with::( + text, &tokens, grammar::root, + ); + File::new(green, errors) + } + pub fn reparse(&self, edit: &AtomEdit) -> File { + self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) + } + pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option { + let (node, reparser) = find_reparsable_node(self.syntax(), edit.delete)?; + let text = replace_range( + node.text().to_string(), + edit.delete - node.range().start(), + &edit.insert, + ); + let tokens = tokenize(&text); + if !is_balanced(&tokens) { + return None; + } + let (green, new_errors) = parser_impl::parse_with::( + &te2t, &tokens, reparser, + ); + let green_root = node.replace_with(green); + let errors = merge_errors(self.errors(), new_errors, node, edit); + Some(File::new(green_root, errors)) + } + fn full_reparse(&self, edit: &AtomEdit) -> File { + let text = replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert); + File::parse(&text) + } + pub fn ast(&self) -> ast::Root { + ast::Root::cast(self.syntax()).unwrap() + } + pub fn syntax(&self) -> SyntaxNodeRef { + self.root.brroowed() + } + mp_tree(root), + ); + assert!( + node.next_sibling().is_none() && pair.prev_sibling().is_none(), + "\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n", + node, + root.text(), + node.text(), + ); + } + } + _ => (), + } + } +} + +#[derive(Debug, Clone)] +pub struct AtomEdit { + pub delete: TextRange, + pub insert: String, +} + +impl AtomEdit { + pub fn replace(range: TextRange, replace_with: String) -> AtomEdit { + AtomEdit { delete: range, insert: replace_with } + } + + pub fn delete(range: TextRange) -> AtomEdit { + AtomEdit::replace(range, String::new()) + } + + pub fn insert(offset: TextUnit, text: String) -> AtomEdit { + AtomEdit::replace(TextRange::offset_len(offset, 0.into()), text) + } +} + +fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> { + let node = algo::find_covering_node(node, range); + return algo::ancestors(node) + .filter_map(|node| reparser(node).map(|r| (node, r))) + .next(); + + fn reparser(node: SyntaxNodeRef) -> Option { + let res = match node.kind() { + BLOCK => grammar::block, + NAMED_FIELD_DEF_LIST => grammar::named_field_def_list, + _ => return None, + }; + Some(res) + } +} + +pub /*(meh)*/ fn replace_range(mut text: String, range: TextRange, replace_with: &str) -> String { + let start = u32::from(range.start()) as usize; + let end = u32::from(range.end()) as usize; + text.replace_range(start..end, replace_with); + text +} + +fn is_balanced(tokens: &[Token]) -> bool { + if tokens.len() == 0 + || tokens.first().unwrap().kind != L_CURLY + || tokens.last().unwrap().kind != R_CURLY { + return false + } + let mut balance = 0usize; + for t in tokens.iter() { + match t.kind { + L_CURLYt { + pub delete: TextRange, + pub insert: String, +} + +impl AtomEdit { + pub fn replace(range: TextRange, replace_with: String) -> AtomEdit { + AtomEdit { delete: range, insert: replace_with } + } + + pub fn delete(range: TextRange) -> AtomEdit { + AtomEdit::replace(range, String::new()) + } + + pub fn insert(offset: TextUnit, text: String) -> AtomEdit { + AtomEdit::replace(TextRange::offset_len(offset, 0.into()), text) + } +} + +fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> { + let node = algo::find_covering_node(node, range); + return algo::ancestors(node) + .filter_map(|node| reparser(node).map(|r| (node, r))) + .next(); + + fn reparser(node: SyntaxNodeRef) -> Option { + let res = match node.kind() { + ; + let end = u32::from(range.end()) as usize; + text.replaT => grammar::named_field_def_list, + _ => return None, + }; + Some(res) + } +} + +pub /*(meh)*/ fn replace_range(mut text: String, range: TextRange, replace_with: &str) -> String { + let start = u32::from(range.start()) as usize; + let end = u32::from(range.end()) as usize; + text.replace_range(start..end, replace_with); + text +} + +fn is_balanced(tokens: &[Token]) -> bool { + if tokens.len() == 0 + || tokens.first().unwrap().kind != L_CURLY + || tokens.last().unwrap().kind != R_CURLY { + return false + } + let mut balance = 0usize; + for t in tokens.iter() { + match t.kind { + L_CURLY => balance += 1, + R_CURLY => balance = match balance.checked_sub(1) { + Some(b) => b, + None => return false, + }, + _ => (), + } + } + balance == 0 +} + +fn merge_errors( + old_errors: Vec, + new_errors: Vec, + old_node: SyntaxNodeRef, + edit: &AtomEdit, +) -> Vec { + let mut res = Vec::new(); + for e in old_errors { + if e.offset < old_node.range().start() { + res.push(e) + } else if e.offset > old_node.range().end() { + res.push(SyntaxError { + msg: e.msg, + offset: e.offset + TextUnit::of_str(&edit.insert) - edit.delete.len(), + }) + } + } + for e in new_errors { + res.push(SyntaxError { + msg: e.msg, + offset: e.offset + old_node.range().start(), + }) + } + res +} -- cgit v1.2.3