From 299d97b6d98cec673ff056c188ac45a17febc7d4 Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Wed, 24 Apr 2019 23:01:32 +0800 Subject: Add handling `token` seperator in mbe --- crates/ra_mbe/src/lib.rs | 216 +++++------------------------------- crates/ra_mbe/src/mbe_expander.rs | 49 ++++++-- crates/ra_mbe/src/mbe_parser.rs | 40 ++++--- crates/ra_mbe/src/subtree_source.rs | 22 ++-- crates/ra_mbe/src/syntax_bridge.rs | 25 ++++- crates/ra_mbe/src/tt_cursor.rs | 91 +++++++++++++++ 6 files changed, 217 insertions(+), 226 deletions(-) (limited to 'crates/ra_mbe/src') diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index d7b18dd0f..7ebba807c 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs @@ -24,6 +24,7 @@ mod subtree_source; mod subtree_parser; use ra_syntax::SmolStr; +use smallvec::SmallVec; pub use tt::{Delimiter, Punct}; @@ -98,11 +99,18 @@ pub(crate) struct Subtree { pub(crate) token_trees: Vec, } +#[derive(Clone, Debug, PartialEq, Eq)] +pub(crate) enum Separator { + Literal(tt::Literal), + Ident(tt::Ident), + Puncts(SmallVec<[tt::Punct; 3]>), +} + #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) struct Repeat { pub(crate) subtree: Subtree, pub(crate) kind: RepeatKind, - pub(crate) separator: Option, + pub(crate) separator: Option, } #[derive(Clone, Debug, PartialEq, Eq)] @@ -175,8 +183,8 @@ impl_froms!(TokenTree: Leaf, Subtree); let expansion = rules.expand(&invocation_tt).unwrap(); assert_eq!( expansion.to_string(), - "impl From < Leaf > for TokenTree {fn from (it : Leaf) -> TokenTree {TokenTree :: Leaf (it)}} \ - impl From < Subtree > for TokenTree {fn from (it : Subtree) -> TokenTree {TokenTree :: Subtree (it)}}" + "impl From for TokenTree {fn from (it : Leaf) -> TokenTree {TokenTree ::Leaf (it)}} \ + impl From for TokenTree {fn from (it : Subtree) -> TokenTree {TokenTree ::Subtree (it)}}" ) } @@ -384,7 +392,7 @@ impl_froms!(TokenTree: Leaf, Subtree); "#, ); - assert_expansion(&rules, "foo! { foo, bar }", "fn baz {foo () ; bar ()}"); + assert_expansion(&rules, "foo! { foo, bar }", "fn baz {foo () ;bar ()}"); } #[test] @@ -416,6 +424,18 @@ impl_froms!(TokenTree: Leaf, Subtree); assert_expansion(&rules, "foo! {fn baz {a b} }", "fn baz () {a () ; b () ;}"); } + #[test] + fn test_match_group_with_multichar_sep() { + let rules = create_rules( + r#" + macro_rules! foo { + (fn $name:ident {$($i:literal)*} ) => ( fn $name() -> bool { $($i)&&*} ); + }"#, + ); + + assert_expansion(&rules, "foo! (fn baz {true true} )", "fn baz () -> bool {true &&true}"); + } + #[test] fn test_expand_to_item_list() { let rules = create_rules( @@ -597,7 +617,7 @@ MACRO_ITEMS@[0; 40) assert_expansion( &rules, "foo! { bar::::baz:: }", - "fn foo () {let a = bar :: < u8 > :: baz :: < u8 > ;}", + "fn foo () {let a = bar ::< u8 >:: baz ::< u8 > ;}", ); } @@ -891,7 +911,7 @@ MACRO_ITEMS@[0; 40) } "#, ); - assert_expansion(&rules, r#"foo!{'a}"#, r#"struct Ref < 'a > {s : & 'a str}"#); + assert_expansion(&rules, r#"foo!{'a}"#, r#"struct Ref <'a > {s : &'a str}"#); } #[test] @@ -1063,7 +1083,7 @@ macro_rules! int_base { ); assert_expansion(&rules, r#" int_base!{Binary for isize as usize -> Binary}"#, - "# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt :: Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}" + "# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt ::Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}" ); } @@ -1140,186 +1160,4 @@ impl_fn_for_zst ! { assert_expansion(&rules, r#"impl_nonzero_fmt ! { # [ stable ( feature = "nonzero" , since = "1.28.0" ) ] ( Debug , Display , Binary , Octal , LowerHex , UpperHex ) for NonZeroU8 }"#, "fn foo () {}"); } - - #[test] - fn test_tuple_impls() { - // from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/num/mod.rs#L12 - let rules = create_rules( - r#" - macro_rules! tuple_impls { - ($( - $Tuple:ident { - $(($idx:tt) -> $T:ident)+ - } - )+) => { - $( - #[stable(feature = "rust1", since = "1.0.0")] - impl<$($T:PartialEq),+> PartialEq for ($($T,)+) where last_type!($($T,)+): ?Sized { - #[inline] - fn eq(&self, other: &($($T,)+)) -> bool { - $(self.$idx == other.$idx)&&+ - } - #[inline] - fn ne(&self, other: &($($T,)+)) -> bool { - $(self.$idx != other.$idx)||+ - } - } - - #[stable(feature = "rust1", since = "1.0.0")] - impl<$($T:Eq),+> Eq for ($($T,)+) where last_type!($($T,)+): ?Sized {} - - #[stable(feature = "rust1", since = "1.0.0")] - impl<$($T:PartialOrd + PartialEq),+> PartialOrd for ($($T,)+) - where last_type!($($T,)+): ?Sized { - #[inline] - fn partial_cmp(&self, other: &($($T,)+)) -> Option { - lexical_partial_cmp!($(self.$idx, other.$idx),+) - } - #[inline] - fn lt(&self, other: &($($T,)+)) -> bool { - lexical_ord!(lt, $(self.$idx, other.$idx),+) - } - #[inline] - fn le(&self, other: &($($T,)+)) -> bool { - lexical_ord!(le, $(self.$idx, other.$idx),+) - } - #[inline] - fn ge(&self, other: &($($T,)+)) -> bool { - lexical_ord!(ge, $(self.$idx, other.$idx),+) - } - #[inline] - fn gt(&self, other: &($($T,)+)) -> bool { - lexical_ord!(gt, $(self.$idx, other.$idx),+) - } - } - - #[stable(feature = "rust1", since = "1.0.0")] - impl<$($T:Ord),+> Ord for ($($T,)+) where last_type!($($T,)+): ?Sized { - #[inline] - fn cmp(&self, other: &($($T,)+)) -> Ordering { - lexical_cmp!($(self.$idx, other.$idx),+) - } - } - - #[stable(feature = "rust1", since = "1.0.0")] - impl<$($T:Default),+> Default for ($($T,)+) { - #[inline] - fn default() -> ($($T,)+) { - ($({ let x: $T = Default::default(); x},)+) - } - } - )+ - } -}"#, - ); - - assert_expansion( - &rules, - r#"tuple_impls ! { - Tuple1 { - ( 0 ) -> A - } - Tuple2 { - ( 0 ) -> A - ( 1 ) -> B - } - Tuple3 { - ( 0 ) -> A - ( 1 ) -> B - ( 2 ) -> C - } - Tuple4 { - ( 0 ) -> A - ( 1 ) -> B - ( 2 ) -> C - ( 3 ) -> D - } - Tuple5 { - ( 0 ) -> A - ( 1 ) -> B - ( 2 ) -> C - ( 3 ) -> D - ( 4 ) -> E - } - Tuple6 { - ( 0 ) -> A - ( 1 ) -> B - ( 2 ) -> C - ( 3 ) -> D - ( 4 ) -> E - ( 5 ) -> F - } - Tuple7 { - ( 0 ) -> A - ( 1 ) -> B - ( 2 ) -> C - ( 3 ) -> D - ( 4 ) -> E - ( 5 ) -> F - ( 6 ) -> G - } - Tuple8 { - ( 0 ) -> A - ( 1 ) -> B - ( 2 ) -> C - ( 3 ) -> D - ( 4 ) -> E - ( 5 ) -> F - ( 6 ) -> G - ( 7 ) -> H - } - Tuple9 { - ( 0 ) -> A - ( 1 ) -> B - ( 2 ) -> C - ( 3 ) -> D - ( 4 ) -> E - ( 5 ) -> F - ( 6 ) -> G - ( 7 ) -> H - ( 8 ) -> I - } - Tuple10 { - ( 0 ) -> A - ( 1 ) -> B - ( 2 ) -> C - ( 3 ) -> D - ( 4 ) -> E - ( 5 ) -> F - ( 6 ) -> G - ( 7 ) -> H - ( 8 ) -> I - ( 9 ) -> J - } - Tuple11 { - ( 0 ) -> A - ( 1 ) -> B - ( 2 ) -> C - ( 3 ) -> D - ( 4 ) -> E - ( 5 ) -> F - ( 6 ) -> G - ( 7 ) -> H - ( 8 ) -> I - ( 9 ) -> J - ( 10 ) -> K - } - Tuple12 { - ( 0 ) -> A - ( 1 ) -> B - ( 2 ) -> C - ( 3 ) -> D - ( 4 ) -> E - ( 5 ) -> F - ( 6 ) -> G - ( 7 ) -> H - ( 8 ) -> I - ( 9 ) -> J - ( 10 ) -> K - ( 11 ) -> L - } -}"#, - "fn foo () {}", - ); - } } diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs index 91b6db522..7411dd8b1 100644 --- a/crates/ra_mbe/src/mbe_expander.rs +++ b/crates/ra_mbe/src/mbe_expander.rs @@ -196,6 +196,7 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result { let literal = input.eat_literal().ok_or(ExpandError::UnexpectedToken)?.clone(); + res.inner.insert( text.clone(), Binding::Simple(tt::Leaf::from(literal).into()), @@ -210,7 +211,7 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result { - if input.eat_punct() != Some(punct) { + if !input.eat_punct().map(|p| p.char == punct.char).unwrap_or(false) { return Err(ExpandError::UnexpectedToken); } } @@ -246,8 +247,23 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result a.text == b.text, + (Literal(ref a), Literal(ref b)) => a.text == b.text, + (Puncts(ref a), Puncts(ref b)) if a.len() == b.len() => { + let a_iter = a.iter().map(|a| a.char); + let b_iter = b.iter().map(|b| b.char); + a_iter.eq(b_iter) + } + _ => false, + }) + .unwrap_or(false) + { input.rollback(memento); break; } @@ -328,7 +344,7 @@ fn expand_tt( // Dirty hack to make macro-expansion terminate. // This should be replaced by a propper macro-by-example implementation let mut limit = 128; - let mut has_sep = false; + let mut has_seps = 0; while let Ok(t) = expand_subtree(&repeat.subtree, bindings, nesting) { limit -= 1; @@ -339,15 +355,28 @@ fn expand_tt( nesting.push(idx + 1); token_trees.push(reduce_single_token(t).into()); - if let Some(sep) = repeat.separator { - let punct = - tt::Leaf::from(tt::Punct { char: sep, spacing: tt::Spacing::Alone }); - token_trees.push(punct.into()); - has_sep = true; + if let Some(ref sep) = repeat.separator { + match sep { + crate::Separator::Ident(ident) => { + has_seps = 1; + token_trees.push(tt::Leaf::from(ident.clone()).into()); + } + crate::Separator::Literal(lit) => { + has_seps = 1; + token_trees.push(tt::Leaf::from(lit.clone()).into()); + } + + crate::Separator::Puncts(puncts) => { + has_seps = puncts.len(); + for punct in puncts { + token_trees.push(tt::Leaf::from(*punct).into()); + } + } + } } } nesting.pop().unwrap(); - if has_sep { + for _ in 0..has_seps { token_trees.pop(); } diff --git a/crates/ra_mbe/src/mbe_parser.rs b/crates/ra_mbe/src/mbe_parser.rs index 0710062d9..c7ab463e2 100644 --- a/crates/ra_mbe/src/mbe_parser.rs +++ b/crates/ra_mbe/src/mbe_parser.rs @@ -74,18 +74,11 @@ fn parse_var(p: &mut TtCursor, transcriber: bool) -> Result Result { - let subtree = p.eat_subtree().unwrap(); - let mut subtree = parse_subtree(subtree, transcriber)?; - subtree.delimiter = crate::Delimiter::None; - let sep = p.eat_punct().ok_or(ParseError::Expected(String::from("separator")))?; - let (separator, rep) = match sep.char { - '*' | '+' | '?' => (None, sep.char), - char => { - (Some(char), p.eat_punct().ok_or(ParseError::Expected(String::from("separator")))?.char) - } - }; - +fn mk_repeat( + rep: char, + subtree: crate::Subtree, + separator: Option, +) -> Result { let kind = match rep { '*' => crate::RepeatKind::ZeroOrMore, '+' => crate::RepeatKind::OneOrMore, @@ -95,6 +88,27 @@ fn parse_repeat(p: &mut TtCursor, transcriber: bool) -> Result Result { + let subtree = p.eat_subtree().unwrap(); + let mut subtree = parse_subtree(subtree, transcriber)?; + subtree.delimiter = crate::Delimiter::None; + + if let Some(rep) = p.at_punct() { + match rep.char { + '*' | '+' | '?' => { + p.bump(); + return mk_repeat(rep.char, subtree, None); + } + _ => {} + } + } + + let sep = p.eat_seperator().ok_or(ParseError::Expected(String::from("separator")))?; + let rep = p.eat_punct().ok_or(ParseError::Expected(String::from("repeat")))?; + + mk_repeat(rep.char, subtree, Some(sep)) +} + #[cfg(test)] mod tests { use ra_syntax::{ast, AstNode}; @@ -109,7 +123,7 @@ mod tests { is_valid("($i:ident) => ()"); expect_err("$i:ident => ()", "subtree"); expect_err("($i:ident) ()", "`=`"); - expect_err("($($i:ident)_) => ()", "separator"); + expect_err("($($i:ident)_) => ()", "repeat"); } fn expect_err(macro_body: &str, expected: &str) { diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index 3229cfa8f..e979777fe 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs @@ -342,7 +342,7 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> { } } -struct TokenPeek<'a, I> +pub(crate) struct TokenPeek<'a, I> where I: Iterator, { @@ -365,7 +365,7 @@ where TokenPeek { iter: itertools::multipeek(iter) } } - fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> { + pub fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> { if p.spacing != tt::Spacing::Joint { return None; } @@ -375,7 +375,7 @@ where Some(((p.char, p1.char), p1.spacing == tt::Spacing::Joint)) } - fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> { + pub fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> { self.current_punct2(p).and_then(|((p0, p1), last_joint)| { if !last_joint { None @@ -437,12 +437,16 @@ fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { } fn convert_literal(l: &tt::Literal) -> TtToken { - TtToken { - kind: classify_literal(&l.text).unwrap().kind, - is_joint_to_next: false, - text: l.text.clone(), - n_tokens: 1, - } + let kind = classify_literal(&l.text) + .map(|tkn| tkn.kind) + .or_else(|| match l.text.as_ref() { + "true" => Some(SyntaxKind::TRUE_KW), + "false" => Some(SyntaxKind::FALSE_KW), + _ => None, + }) + .unwrap(); + + TtToken { kind, is_joint_to_next: false, text: l.text.clone(), n_tokens: 1 } } fn convert_ident(ident: &tt::Ident) -> TtToken { diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 38a481029..9cca19dbb 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs @@ -133,7 +133,9 @@ fn convert_tt( }; let mut token_trees = Vec::new(); - for child in tt.children_with_tokens().skip(skip_first as usize) { + let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable(); + + while let Some(child) = child_iter.next() { if (skip_first && (child == first_child || child == last_child)) || child.kind().is_trivia() { continue; @@ -152,12 +154,25 @@ fn convert_tt( prev = Some(char) } if let Some(char) = prev { - token_trees.push( - tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Alone }).into(), - ); + let spacing = match child_iter.peek() { + Some(SyntaxElement::Token(token)) => { + if token.kind().is_punct() { + tt::Spacing::Joint + } else { + tt::Spacing::Alone + } + } + _ => tt::Spacing::Alone, + }; + + token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); } } else { - let child: tt::TokenTree = if token.kind().is_keyword() + let child: tt::TokenTree = if token.kind() == SyntaxKind::TRUE_KW + || token.kind() == SyntaxKind::FALSE_KW + { + tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() + } else if token.kind().is_keyword() || token.kind() == IDENT || token.kind() == LIFETIME { diff --git a/crates/ra_mbe/src/tt_cursor.rs b/crates/ra_mbe/src/tt_cursor.rs index 87bcf8b0d..343416421 100644 --- a/crates/ra_mbe/src/tt_cursor.rs +++ b/crates/ra_mbe/src/tt_cursor.rs @@ -1,5 +1,7 @@ use crate::ParseError; use crate::subtree_parser::Parser; +use crate::subtree_source::TokenPeek; +use smallvec::{SmallVec, smallvec}; #[derive(Clone)] pub(crate) struct TtCursor<'a> { @@ -162,6 +164,95 @@ impl<'a> TtCursor<'a> { } } + fn eat_punct3(&mut self, p: &tt::Punct) -> Option> { + let sec = self.eat_punct()?.clone(); + let third = self.eat_punct()?.clone(); + Some(smallvec![p.clone(), sec, third]) + } + + fn eat_punct2(&mut self, p: &tt::Punct) -> Option> { + let sec = self.eat_punct()?.clone(); + Some(smallvec![p.clone(), sec]) + } + + fn eat_multi_char_punct<'b, I>( + &mut self, + p: &tt::Punct, + iter: &mut TokenPeek<'b, I>, + ) -> Option> + where + I: Iterator, + { + if let Some((m, _)) = iter.current_punct3(p) { + if let r @ Some(_) = match m { + ('<', '<', '=') | ('>', '>', '=') | ('.', '.', '.') | ('.', '.', '=') => { + self.eat_punct3(p) + } + _ => None, + } { + return r; + } + } + + if let Some((m, _)) = iter.current_punct2(p) { + if let r @ Some(_) = match m { + ('<', '=') + | ('>', '=') + | ('+', '=') + | ('-', '=') + | ('|', '=') + | ('&', '=') + | ('^', '=') + | ('/', '=') + | ('*', '=') + | ('%', '=') + | ('&', '&') + | ('|', '|') + | ('<', '<') + | ('>', '>') + | ('-', '>') + | ('!', '=') + | ('=', '>') + | ('=', '=') + | ('.', '.') + | (':', ':') => self.eat_punct2(p), + + _ => None, + } { + return r; + } + } + + None + } + + pub(crate) fn eat_seperator(&mut self) -> Option { + match self.eat()? { + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { + Some(crate::Separator::Literal(lit.clone())) + } + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { + Some(crate::Separator::Ident(ident.clone())) + } + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { + match punct.char { + '*' | '+' | '?' => return None, + _ => {} + }; + + // FIXME: The parser is only handle some compositeable punct, + // But at this phase, some punct still is jointed. + // So we by pass that check here. + let mut peekable = TokenPeek::new(self.subtree.token_trees[self.pos..].iter()); + let puncts = self.eat_multi_char_punct(punct, &mut peekable); + let puncts = puncts.unwrap_or_else(|| smallvec![punct.clone()]); + + Some(crate::Separator::Puncts(puncts)) + } + _ => None, + } + } + #[must_use] pub(crate) fn save(&self) -> TtCursorMemento { TtCursorMemento { pos: self.pos } -- cgit v1.2.3