From f5bf1a9650089ec7bd0a4d3fb69706fab06da308 Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Sun, 28 Feb 2021 13:06:17 +0800 Subject: Fix builtin macros split exprs on comma --- crates/mbe/src/expander/matcher.rs | 68 +-------------------------------- crates/mbe/src/lib.rs | 4 +- crates/mbe/src/syntax_bridge.rs | 39 ++++++++++++++++++- crates/mbe/src/tt_iter.rs | 77 ++++++++++++++++++++++++++++++++++++++ 4 files changed, 119 insertions(+), 69 deletions(-) (limited to 'crates/mbe') diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 800931cd1..e3bd4c09a 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -3,15 +3,13 @@ use crate::{ expander::{Binding, Bindings, Fragment}, parser::{Op, RepeatKind, Separator}, - subtree_source::SubtreeTokenSource, tt_iter::TtIter, ExpandError, MetaTemplate, }; use super::ExpandResult; -use parser::{FragmentKind::*, TreeSink}; -use syntax::{SmolStr, SyntaxKind}; -use tt::buffer::{Cursor, TokenBuffer}; +use parser::FragmentKind::*; +use syntax::SmolStr; impl Bindings { fn push_optional(&mut self, name: &SmolStr) { @@ -409,68 +407,6 @@ impl<'a> TtIter<'a> { .into()) } - fn expect_fragment( - &mut self, - fragment_kind: parser::FragmentKind, - ) -> ExpandResult> { - struct OffsetTokenSink<'a> { - cursor: Cursor<'a>, - error: bool, - } - - impl<'a> TreeSink for OffsetTokenSink<'a> { - fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { - if kind == SyntaxKind::LIFETIME_IDENT { - n_tokens = 2; - } - for _ in 0..n_tokens { - self.cursor = self.cursor.bump_subtree(); - } - } - fn start_node(&mut self, _kind: SyntaxKind) {} - fn finish_node(&mut self) {} - fn error(&mut self, _error: parser::ParseError) { - self.error = true; - } - } - - let buffer = TokenBuffer::from_tokens(&self.inner.as_slice()); - let mut src = SubtreeTokenSource::new(&buffer); - let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; - - parser::parse_fragment(&mut src, &mut sink, fragment_kind); - - let mut err = None; - if !sink.cursor.is_root() || sink.error { - err = Some(err!("expected {:?}", fragment_kind)); - } - - let mut curr = buffer.begin(); - let mut res = vec![]; - - if sink.cursor.is_root() { - while curr != sink.cursor { - if let Some(token) = curr.token_tree() { - res.push(token); - } - curr = curr.bump(); - } - } - self.inner = self.inner.as_slice()[res.len()..].iter(); - if res.len() == 0 && err.is_none() { - err = Some(err!("no tokens consumed")); - } - let res = match res.len() { - 1 => Some(res[0].cloned()), - 0 => None, - _ => Some(tt::TokenTree::Subtree(tt::Subtree { - delimiter: None, - token_trees: res.into_iter().map(|it| it.cloned()).collect(), - })), - }; - ExpandResult { value: res, err } - } - fn eat_vis(&mut self) -> Option { let mut fork = self.clone(); match fork.expect_fragment(Visibility) { diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 6b4a4eb53..4c298f85f 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -65,8 +65,8 @@ impl fmt::Display for ExpandError { } pub use crate::syntax_bridge::{ - ast_to_token_tree, parse_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, - TokenMap, + ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, + token_tree_to_syntax_node, TokenMap, }; /// This struct contains AST for a single `macro_rules` definition. What might diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 0cdc175be..5a91781fc 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -10,8 +10,8 @@ use syntax::{ }; use tt::buffer::{Cursor, TokenBuffer}; -use crate::subtree_source::SubtreeTokenSource; use crate::ExpandError; +use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum TokenTextRange { @@ -112,6 +112,43 @@ pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { Some((subtree, conv.id_alloc.map)) } +/// Split token tree with seperate expr: $($e:expr)SEP* +pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec { + if tt.token_trees.is_empty() { + return Vec::new(); + } + + let mut iter = TtIter::new(tt); + let mut res = Vec::new(); + + while iter.peek_n(0).is_some() { + let expanded = iter.expect_fragment(FragmentKind::Expr); + if expanded.err.is_some() { + break; + } + + res.push(match expanded.value { + None => break, + Some(tt @ tt::TokenTree::Leaf(_)) => { + tt::Subtree { delimiter: None, token_trees: vec![tt.into()] } + } + Some(tt::TokenTree::Subtree(tt)) => tt, + }); + + let mut fork = iter.clone(); + if fork.expect_char(sep).is_err() { + break; + } + iter = fork; + } + + if iter.peek_n(0).is_some() { + res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() }); + } + + res +} + impl TokenMap { pub fn token_by_range(&self, relative_range: TextRange) -> Option { let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 46c420718..a362d31fc 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -1,5 +1,20 @@ //! FIXME: write short doc here +use crate::{subtree_source::SubtreeTokenSource, ExpandError, ExpandResult}; + +use parser::TreeSink; +use syntax::SyntaxKind; +use tt::buffer::{Cursor, TokenBuffer}; + +macro_rules! err { + () => { + ExpandError::BindingError(format!("")) + }; + ($($tt:tt)*) => { + ExpandError::BindingError(format!($($tt)*)) + }; +} + #[derive(Debug, Clone)] pub(crate) struct TtIter<'a> { pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>, @@ -56,6 +71,68 @@ impl<'a> TtIter<'a> { } } + pub(crate) fn expect_fragment( + &mut self, + fragment_kind: parser::FragmentKind, + ) -> ExpandResult> { + struct OffsetTokenSink<'a> { + cursor: Cursor<'a>, + error: bool, + } + + impl<'a> TreeSink for OffsetTokenSink<'a> { + fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { + if kind == SyntaxKind::LIFETIME_IDENT { + n_tokens = 2; + } + for _ in 0..n_tokens { + self.cursor = self.cursor.bump_subtree(); + } + } + fn start_node(&mut self, _kind: SyntaxKind) {} + fn finish_node(&mut self) {} + fn error(&mut self, _error: parser::ParseError) { + self.error = true; + } + } + + let buffer = TokenBuffer::from_tokens(&self.inner.as_slice()); + let mut src = SubtreeTokenSource::new(&buffer); + let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; + + parser::parse_fragment(&mut src, &mut sink, fragment_kind); + + let mut err = None; + if !sink.cursor.is_root() || sink.error { + err = Some(err!("expected {:?}", fragment_kind)); + } + + let mut curr = buffer.begin(); + let mut res = vec![]; + + if sink.cursor.is_root() { + while curr != sink.cursor { + if let Some(token) = curr.token_tree() { + res.push(token); + } + curr = curr.bump(); + } + } + self.inner = self.inner.as_slice()[res.len()..].iter(); + if res.len() == 0 && err.is_none() { + err = Some(err!("no tokens consumed")); + } + let res = match res.len() { + 1 => Some(res[0].cloned()), + 0 => None, + _ => Some(tt::TokenTree::Subtree(tt::Subtree { + delimiter: None, + token_trees: res.into_iter().map(|it| it.cloned()).collect(), + })), + }; + ExpandResult { value: res, err } + } + pub(crate) fn peek_n(&self, n: usize) -> Option<&tt::TokenTree> { self.inner.as_slice().get(n) } -- cgit v1.2.3