From 74e846b9ecffd819af3109c50e48517b560b17cf Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Thu, 11 Apr 2019 11:02:41 +0800 Subject: Add L_DOLLAR and R_DOLLAR --- crates/ra_parser/src/syntax_kind/generated.rs | 4 ++++ crates/ra_syntax/src/grammar.ron | 2 ++ 2 files changed, 6 insertions(+) diff --git a/crates/ra_parser/src/syntax_kind/generated.rs b/crates/ra_parser/src/syntax_kind/generated.rs index 547af1b27..b16177de2 100644 --- a/crates/ra_parser/src/syntax_kind/generated.rs +++ b/crates/ra_parser/src/syntax_kind/generated.rs @@ -119,6 +119,8 @@ pub enum SyntaxKind { LIFETIME, COMMENT, SHEBANG, + L_DOLLAR, + R_DOLLAR, SOURCE_FILE, STRUCT_DEF, ENUM_DEF, @@ -460,6 +462,8 @@ impl SyntaxKind { LIFETIME => &SyntaxInfo { name: "LIFETIME" }, COMMENT => &SyntaxInfo { name: "COMMENT" }, SHEBANG => &SyntaxInfo { name: "SHEBANG" }, + L_DOLLAR => &SyntaxInfo { name: "L_DOLLAR" }, + R_DOLLAR => &SyntaxInfo { name: "R_DOLLAR" }, SOURCE_FILE => &SyntaxInfo { name: "SOURCE_FILE" }, STRUCT_DEF => &SyntaxInfo { name: "STRUCT_DEF" }, ENUM_DEF => &SyntaxInfo { name: "ENUM_DEF" }, diff --git a/crates/ra_syntax/src/grammar.ron b/crates/ra_syntax/src/grammar.ron index 0a35e25d5..b41241287 100644 --- a/crates/ra_syntax/src/grammar.ron +++ b/crates/ra_syntax/src/grammar.ron @@ -118,6 +118,8 @@ Grammar( "LIFETIME", "COMMENT", "SHEBANG", + "L_DOLLAR", + "R_DOLLAR", ], nodes: [ "SOURCE_FILE", -- cgit v1.2.3 From f66300ccd1e6ef05b633cda06c87f913d1c91a1e Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Sat, 13 Apr 2019 01:50:05 +0800 Subject: Remove skip Delimiter::None and handle Dollars --- crates/ra_mbe/src/lib.rs | 57 +++++- crates/ra_mbe/src/subtree_parser.rs | 3 +- crates/ra_mbe/src/subtree_source.rs | 354 ++++++++++++++++-------------------- crates/ra_mbe/src/syntax_bridge.rs | 47 +++-- crates/ra_parser/src/parser.rs | 12 ++ 5 files changed, 255 insertions(+), 218 deletions(-) diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index a21ea4dbc..4126854d1 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs @@ -39,7 +39,7 @@ pub enum ExpandError { BindingError(String), } -pub use crate::syntax_bridge::{ast_to_token_tree, token_tree_to_ast_item_list}; +pub use crate::syntax_bridge::{ast_to_token_tree, token_tree_to_ast_item_list, syntax_node_to_token_tree}; /// This struct contains AST for a single `macro_rules` definition. What might /// be very confusing is that AST has almost exactly the same shape as @@ -192,6 +192,15 @@ impl_froms!(TokenTree: Leaf, Subtree); pub(crate) fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { let expanded = expand(rules, invocation); assert_eq!(expanded.to_string(), expansion); + + let tree = token_tree_to_ast_item_list(&expanded); + + // Eat all white space by parse it back and forth + let expansion = ast::SourceFile::parse(expansion); + let expansion = syntax_node_to_token_tree(expansion.syntax()).unwrap().0; + let file = token_tree_to_ast_item_list(&expansion); + + assert_eq!(tree.syntax().debug_dump().trim(), file.syntax().debug_dump().trim()); } #[test] @@ -287,6 +296,36 @@ impl_froms!(TokenTree: Leaf, Subtree); assert_expansion(&rules, "foo! { Foo,# Bar }", "struct Foo ; struct Bar ;"); } + #[test] + fn test_match_group_pattern_with_multiple_defs() { + let rules = create_rules( + r#" + macro_rules! foo { + ($ ($ i:ident),*) => ( struct Bar { $ ( + fn $ i {} + )*} ); + } +"#, + ); + + assert_expansion(&rules, "foo! { foo, bar }", "struct Bar {fn foo {} fn bar {}}"); + } + + #[test] + fn test_match_group_pattern_with_multiple_statement() { + let rules = create_rules( + r#" + macro_rules! foo { + ($ ($ i:ident),*) => ( fn baz { $ ( + $ i (); + )*} ); + } +"#, + ); + + assert_expansion(&rules, "foo! { foo, bar }", "fn baz {foo () ; bar () ;}"); + } + #[test] fn expand_to_item_list() { let rules = create_rules( @@ -415,7 +454,7 @@ SOURCE_FILE@[0; 40) assert_expansion( &rules, "foo! { bar::::baz:: }", - "fn foo () {let a = bar ::< u8 > ::baz ::< u8 > ;}", + "fn foo () {let a = bar :: < u8 > :: baz :: < u8 > ;}", ); } @@ -432,4 +471,18 @@ SOURCE_FILE@[0; 40) ); assert_expansion(&rules, "foo! { foo, bar }", "fn foo () {let a = foo ; let b = bar ;}"); } + + #[test] + fn test_path_with_path() { + let rules = create_rules( + r#" + macro_rules! foo { + ($ i:path) => { + fn foo() { let a = $ i :: bar; } + } + } +"#, + ); + assert_expansion(&rules, "foo! { foo }", "fn foo () {let a = foo :: bar ;}"); + } } diff --git a/crates/ra_mbe/src/subtree_parser.rs b/crates/ra_mbe/src/subtree_parser.rs index ce39a40bb..164240d92 100644 --- a/crates/ra_mbe/src/subtree_parser.rs +++ b/crates/ra_mbe/src/subtree_parser.rs @@ -34,8 +34,7 @@ impl<'a> Parser<'a> { where F: FnOnce(&dyn TokenSource, &mut dyn TreeSink), { - let mut src = SubtreeTokenSource::new(self.subtree); - src.start_from_nth(*self.cur_pos); + let mut src = SubtreeTokenSource::new(&self.subtree.token_trees[*self.cur_pos..]); let mut sink = OffsetTokenSink { token_pos: 0 }; f(&src, &mut sink); diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index 4b37c2bda..6aa20057e 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs @@ -2,6 +2,64 @@ use ra_parser::{TokenSource}; use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*}; use std::cell::{RefCell}; +// A Sequece of Token, +#[derive(Debug, Clone, Eq, PartialEq)] +pub(super) enum TokenSeq<'a> { + Subtree(&'a tt::Subtree), + Seq(&'a [tt::TokenTree]), +} + +impl<'a> From<&'a tt::Subtree> for TokenSeq<'a> { + fn from(s: &'a tt::Subtree) -> TokenSeq<'a> { + TokenSeq::Subtree(s) + } +} + +impl<'a> From<&'a [tt::TokenTree]> for TokenSeq<'a> { + fn from(s: &'a [tt::TokenTree]) -> TokenSeq<'a> { + TokenSeq::Seq(s) + } +} + +enum DelimToken<'a> { + Delim(&'a tt::Delimiter, bool), + Token(&'a tt::TokenTree), + End, +} + +impl<'a> TokenSeq<'a> { + fn get(&self, pos: usize) -> DelimToken<'a> { + match self { + TokenSeq::Subtree(subtree) => { + let len = subtree.token_trees.len() + 2; + match pos { + p if p >= len => DelimToken::End, + p if p == len - 1 => DelimToken::Delim(&subtree.delimiter, true), + 0 => DelimToken::Delim(&subtree.delimiter, false), + p => DelimToken::Token(&subtree.token_trees[p - 1]), + } + } + TokenSeq::Seq(tokens) => { + tokens.get(pos).map(DelimToken::Token).unwrap_or(DelimToken::End) + } + } + } + + fn len(&self) -> usize { + match self { + TokenSeq::Subtree(subtree) => subtree.token_trees.len() + 2, + TokenSeq::Seq(tokens) => tokens.len(), + } + } + + fn child_slice(&self) -> &[tt::TokenTree] { + match self { + TokenSeq::Subtree(subtree) => &subtree.token_trees, + TokenSeq::Seq(tokens) => &tokens, + } + } +} + #[derive(Debug, Clone, Eq, PartialEq)] struct TtToken { pub kind: SyntaxKind, @@ -12,29 +70,27 @@ struct TtToken { #[derive(Debug, Clone, Eq, PartialEq)] enum WalkCursor { - DelimiterBegin(Option), - Token(usize, Option), - DelimiterEnd(Option), + Token(usize, TtToken), Eof, } #[derive(Debug)] struct SubTreeWalker<'a> { pos: usize, - stack: Vec<(&'a tt::Subtree, Option)>, + stack: Vec<(TokenSeq<'a>, usize)>, cursor: WalkCursor, last_steps: Vec, - subtree: &'a tt::Subtree, + ts: TokenSeq<'a>, } impl<'a> SubTreeWalker<'a> { - fn new(subtree: &tt::Subtree) -> SubTreeWalker { + fn new(ts: TokenSeq<'a>) -> SubTreeWalker { let mut res = SubTreeWalker { pos: 0, stack: vec![], cursor: WalkCursor::Eof, last_steps: vec![], - subtree, + ts, }; res.reset(); @@ -47,209 +103,106 @@ impl<'a> SubTreeWalker<'a> { fn reset(&mut self) { self.pos = 0; - self.stack = vec![(self.subtree, None)]; - self.cursor = WalkCursor::DelimiterBegin(convert_delim(self.subtree.delimiter, false)); + self.stack = vec![]; self.last_steps = vec![]; - while self.is_empty_delimiter() { - self.forward_unchecked(); - } - } - - // This funciton will fast forward the cursor, - // Such that backward will stop at `start_pos` point - fn start_from_nth(&mut self, start_pos: usize) { - self.reset(); - self.pos = start_pos; - self.cursor = self.walk_token(start_pos, 0, false); - - while self.is_empty_delimiter() { - self.forward_unchecked(); + self.cursor = match self.ts.get(0) { + DelimToken::Token(token) => match token { + tt::TokenTree::Subtree(subtree) => { + WalkCursor::Token(0, convert_delim(subtree.delimiter, false)) + } + tt::TokenTree::Leaf(leaf) => { + let next_tokens = self.ts.child_slice(); + WalkCursor::Token(0, convert_leaf(&next_tokens, leaf)) + } + }, + DelimToken::Delim(delim, is_end) => { + assert!(!is_end); + WalkCursor::Token(0, convert_delim(*delim, false)) + } + DelimToken::End => WalkCursor::Eof, } } fn current(&self) -> Option<&TtToken> { match &self.cursor { - WalkCursor::DelimiterBegin(t) => t.as_ref(), - WalkCursor::Token(_, t) => t.as_ref(), - WalkCursor::DelimiterEnd(t) => t.as_ref(), + WalkCursor::Token(_, t) => Some(t), WalkCursor::Eof => None, } } - fn is_empty_delimiter(&self) -> bool { - match &self.cursor { - WalkCursor::DelimiterBegin(None) => true, - WalkCursor::DelimiterEnd(None) => true, - _ => false, - } + fn top(&self) -> &TokenSeq { + self.stack.last().map(|(t, _)| t).unwrap_or(&self.ts) } - /// Move cursor backward by 1 step with empty checking + /// Move cursor backward by 1 step fn backward(&mut self) { if self.last_steps.is_empty() { return; } + self.pos -= 1; - loop { - self.backward_unchecked(); - // Skip Empty delimiter - if self.last_steps.is_empty() || !self.is_empty_delimiter() { - break; - } - } + let last_step = self.last_steps.pop().unwrap(); - // Move forward if it is empty delimiter - if self.last_steps.is_empty() { - while self.is_empty_delimiter() { - self.forward_unchecked(); + self.cursor = match self.cursor { + WalkCursor::Token(idx, _) => self.walk_token(idx, last_step, true), + WalkCursor::Eof => { + let len = self.top().len(); + self.walk_token(len, last_step, true) } } } - /// Move cursor backward by 1 step without empty check - /// - /// Depends on the current state of cursor: - /// - /// * Delimiter Begin => Pop the stack, goto last walking token (`walk_token`) - /// * Token => Goto prev token (`walk_token`) - /// * Delimiter End => Goto the last child token (`walk_token`) - /// * Eof => push the root subtree, and set it as Delimiter End - fn backward_unchecked(&mut self) { - if self.last_steps.is_empty() { - return; - } - - let last_step = self.last_steps.pop().unwrap(); - let do_walk_token = match self.cursor { - WalkCursor::DelimiterBegin(_) => None, - WalkCursor::Token(u, _) => Some(u), - WalkCursor::DelimiterEnd(_) => { - let (top, _) = self.stack.last().unwrap(); - Some(top.token_trees.len()) - } - WalkCursor::Eof => None, - }; - - self.cursor = match do_walk_token { - Some(u) => self.walk_token(u, last_step, true), - None => match self.cursor { - WalkCursor::Eof => { - self.stack.push((self.subtree, None)); - WalkCursor::DelimiterEnd(convert_delim( - self.stack.last().unwrap().0.delimiter, - true, - )) - } - _ => { - let (_, last_top_cursor) = self.stack.pop().unwrap(); - assert!(!self.stack.is_empty()); - - self.walk_token(last_top_cursor.unwrap(), last_step, true) - } - }, - }; - } - - /// Move cursor forward by 1 step with empty checking + /// Move cursor forward by 1 step fn forward(&mut self) { if self.is_eof() { return; } - self.pos += 1; - loop { - self.forward_unchecked(); - if !self.is_empty_delimiter() { - break; - } - } - } - - /// Move cursor forward by 1 step without empty checking - /// - /// Depends on the current state of cursor: - /// - /// * Delimiter Begin => Goto the first child token (`walk_token`) - /// * Token => Goto next token (`walk_token`) - /// * Delimiter End => Pop the stack, goto last walking token (`walk_token`) - /// - fn forward_unchecked(&mut self) { - if self.is_eof() { - return; - } let step = self.current().map(|x| x.n_tokens).unwrap_or(1); self.last_steps.push(step); - let do_walk_token = match self.cursor { - WalkCursor::DelimiterBegin(_) => Some((0, 0)), - WalkCursor::Token(u, _) => Some((u, step)), - WalkCursor::DelimiterEnd(_) => None, - _ => unreachable!(), - }; - - self.cursor = match do_walk_token { - Some((u, step)) => self.walk_token(u, step, false), - None => { - let (_, last_top_idx) = self.stack.pop().unwrap(); - match self.stack.last() { - Some(_) => self.walk_token(last_top_idx.unwrap(), 1, false), - None => WalkCursor::Eof, - } - } - }; + if let WalkCursor::Token(u, _) = self.cursor { + self.cursor = self.walk_token(u, step, false) + } } /// Traversal child token - /// Depends on the new position, it returns: - /// - /// * new position < 0 => DelimiterBegin - /// * new position > token_tree.len() => DelimiterEnd - /// * if new position is a subtree, depends on traversal direction: - /// ** backward => DelimiterEnd - /// ** forward => DelimiterBegin - /// * if new psoition is a leaf, return walk_leaf() fn walk_token(&mut self, pos: usize, offset: usize, backward: bool) -> WalkCursor { - let (top, _) = self.stack.last().unwrap(); + let top = self.stack.last().map(|(t, _)| t).unwrap_or(&self.ts); if backward && pos < offset { - return WalkCursor::DelimiterBegin(convert_delim( - self.stack.last().unwrap().0.delimiter, - false, - )); - } - - if !backward && pos + offset >= top.token_trees.len() { - return WalkCursor::DelimiterEnd(convert_delim( - self.stack.last().unwrap().0.delimiter, - true, - )); + let (_, last_idx) = self.stack.pop().unwrap(); + return self.walk_token(last_idx, offset, backward); } let pos = if backward { pos - offset } else { pos + offset }; - match &top.token_trees[pos] { - tt::TokenTree::Subtree(subtree) => { - self.stack.push((subtree, Some(pos))); - let delim = convert_delim(self.stack.last().unwrap().0.delimiter, backward); - if backward { - WalkCursor::DelimiterEnd(delim) - } else { - WalkCursor::DelimiterBegin(delim) + match top.get(pos) { + DelimToken::Token(token) => match token { + tt::TokenTree::Subtree(subtree) => { + let ts = TokenSeq::from(subtree); + let new_idx = if backward { ts.len() - 1 } else { 0 }; + self.stack.push((ts, pos)); + WalkCursor::Token(new_idx, convert_delim(subtree.delimiter, backward)) + } + tt::TokenTree::Leaf(leaf) => { + let next_tokens = top.child_slice(); + WalkCursor::Token(pos, convert_leaf(&next_tokens[pos..], leaf)) } + }, + DelimToken::Delim(delim, is_end) => { + WalkCursor::Token(pos, convert_delim(*delim, is_end)) } - tt::TokenTree::Leaf(leaf) => WalkCursor::Token(pos, Some(self.walk_leaf(leaf, pos))), - } - } - - fn walk_leaf(&mut self, leaf: &tt::Leaf, pos: usize) -> TtToken { - match leaf { - tt::Leaf::Literal(l) => convert_literal(l), - tt::Leaf::Ident(ident) => convert_ident(ident), - tt::Leaf::Punct(punct) => { - let (top, _) = self.stack.last().unwrap(); - convert_punct(punct, top, pos) + DelimToken::End => { + // it is the top level + if let Some((_, last_idx)) = self.stack.pop() { + assert!(!backward); + self.walk_token(last_idx, offset, backward) + } else { + WalkCursor::Eof + } } } } @@ -263,27 +216,20 @@ pub(crate) trait Querier { #[derive(Debug)] pub(crate) struct WalkerOwner<'a> { walker: RefCell>, - offset: usize, } impl<'a> WalkerOwner<'a> { - fn new(subtree: &'a tt::Subtree) -> Self { - WalkerOwner { walker: RefCell::new(SubTreeWalker::new(subtree)), offset: 0 } + fn new>>(ts: I) -> Self { + WalkerOwner { walker: RefCell::new(SubTreeWalker::new(ts.into())) } } fn get<'b>(&self, pos: usize) -> Option { - self.set_walker_pos(pos); + self.set_pos(pos); let walker = self.walker.borrow(); walker.current().cloned() } - fn start_from_nth(&mut self, pos: usize) { - self.offset = pos; - self.walker.borrow_mut().start_from_nth(pos); - } - - fn set_walker_pos(&self, mut pos: usize) { - pos += self.offset; + fn set_pos(&self, pos: usize) { let mut walker = self.walker.borrow_mut(); while pos > walker.pos && !walker.is_eof() { walker.forward(); @@ -294,19 +240,26 @@ impl<'a> WalkerOwner<'a> { } fn collect_token_trees(&mut self, n: usize) -> Vec<&tt::TokenTree> { - self.start_from_nth(self.offset); - let mut res = vec![]; let mut walker = self.walker.borrow_mut(); + walker.reset(); - while walker.pos - self.offset < n { + while walker.pos < n { if let WalkCursor::Token(u, tt) = &walker.cursor { - if walker.stack.len() == 1 { - // We only collect the topmost child - res.push(&walker.stack[0].0.token_trees[*u]); - if let Some(tt) = tt { - for i in 0..tt.n_tokens - 1 { - res.push(&walker.stack[0].0.token_trees[u + i]); + // We only collect the topmost child + if walker.stack.len() == 0 { + for i in 0..tt.n_tokens { + if let DelimToken::Token(token) = walker.ts.get(u + i) { + res.push(token); + } + } + } else if walker.stack.len() == 1 { + if let DelimToken::Delim(_, is_end) = walker.ts.get(*u) { + if !is_end { + let (_, last_idx) = &walker.stack[0]; + if let DelimToken::Token(token) = walker.ts.get(*last_idx) { + res.push(token); + } } } } @@ -331,12 +284,8 @@ pub(crate) struct SubtreeTokenSource<'a> { } impl<'a> SubtreeTokenSource<'a> { - pub fn new(subtree: &tt::Subtree) -> SubtreeTokenSource { - SubtreeTokenSource { walker: WalkerOwner::new(subtree) } - } - - pub fn start_from_nth(&mut self, n: usize) { - self.walker.start_from_nth(n); + pub fn new>>(ts: I) -> SubtreeTokenSource<'a> { + SubtreeTokenSource { walker: WalkerOwner::new(ts) } } pub fn querier<'b>(&'a self) -> &'b WalkerOwner<'a> @@ -467,18 +416,18 @@ where None } -fn convert_delim(d: tt::Delimiter, closing: bool) -> Option { +fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { let (kinds, texts) = match d { tt::Delimiter::Parenthesis => ([L_PAREN, R_PAREN], "()"), tt::Delimiter::Brace => ([L_CURLY, R_CURLY], "{}"), tt::Delimiter::Bracket => ([L_BRACK, R_BRACK], "[]"), - tt::Delimiter::None => return None, + tt::Delimiter::None => ([L_DOLLAR, R_DOLLAR], ""), }; let idx = closing as usize; let kind = kinds[idx]; - let text = &texts[idx..texts.len() - (1 - idx)]; - Some(TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text), n_tokens: 1 }) + let text = if texts.len() > 0 { &texts[idx..texts.len() - (1 - idx)] } else { "" }; + TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text), n_tokens: 1 } } fn convert_literal(l: &tt::Literal) -> TtToken { @@ -495,8 +444,9 @@ fn convert_ident(ident: &tt::Ident) -> TtToken { TtToken { kind, is_joint_to_next: false, text: ident.text.clone(), n_tokens: 1 } } -fn convert_punct(p: &tt::Punct, parent: &tt::Subtree, next: usize) -> TtToken { - let iter = parent.token_trees[next + 1..].iter(); +fn convert_punct(p: &tt::Punct, next_tokens: &[tt::TokenTree]) -> TtToken { + let mut iter = next_tokens.iter(); + iter.next(); let mut peek = TokenPeek::new(iter); if let Some((kind, is_joint_to_next, text, size)) = convert_multi_char_punct(p, &mut peek) { @@ -519,3 +469,11 @@ fn convert_punct(p: &tt::Punct, parent: &tt::Subtree, next: usize) -> TtToken { TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text, n_tokens: 1 } } } + +fn convert_leaf(tokens: &[tt::TokenTree], leaf: &tt::Leaf) -> TtToken { + match leaf { + tt::Leaf::Literal(l) => convert_literal(l), + tt::Leaf::Ident(ident) => convert_ident(ident), + tt::Leaf::Punct(punct) => convert_punct(punct, tokens), + } +} diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 19c17bd55..28ded7870 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs @@ -22,6 +22,14 @@ pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap) Some((tt, token_map)) } +/// Convert the syntax node to a `TokenTree` (what macro +/// will consume). +pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> { + let mut token_map = TokenMap::default(); + let tt = convert_tt(&mut token_map, node.range().start(), node)?; + Some((tt, token_map)) +} + /// Parses the token tree (result of macro expansion) as a sequence of items pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> TreeArc { let token_source = SubtreeTokenSource::new(tt); @@ -51,15 +59,17 @@ fn convert_tt( ) -> Option { let first_child = tt.first_child_or_token()?; let last_child = tt.last_child_or_token()?; - let delimiter = match (first_child.kind(), last_child.kind()) { - (L_PAREN, R_PAREN) => tt::Delimiter::Parenthesis, - (L_CURLY, R_CURLY) => tt::Delimiter::Brace, - (L_BRACK, R_BRACK) => tt::Delimiter::Bracket, - _ => return None, + let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) { + (L_PAREN, R_PAREN) => (tt::Delimiter::Parenthesis, true), + (L_CURLY, R_CURLY) => (tt::Delimiter::Brace, true), + (L_BRACK, R_BRACK) => (tt::Delimiter::Bracket, true), + _ => (tt::Delimiter::None, false), }; + let mut token_trees = Vec::new(); - for child in tt.children_with_tokens().skip(1) { - if child == first_child || child == last_child || child.kind().is_trivia() { + for child in tt.children_with_tokens().skip(skip_first as usize) { + if (skip_first && (child == first_child || child == last_child)) || child.kind().is_trivia() + { continue; } match child { @@ -127,6 +137,11 @@ impl<'a, Q: Querier> TtTreeSink<'a, Q> { impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> { fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { + if kind == L_DOLLAR || kind == R_DOLLAR { + self.token_pos += n_tokens as usize; + return; + } + for _ in 0..n_tokens { self.buf += &self.src_querier.token(self.token_pos).1; self.token_pos += 1; @@ -176,19 +191,19 @@ mod tests { let query = tt_src.querier(); - // [{] + // [${] // [let] [a] [=] ['c'] [;] - assert_eq!(query.token(1 + 3).1, "'c'"); - assert_eq!(query.token(1 + 3).0, CHAR); + assert_eq!(query.token(2 + 3).1, "'c'"); + assert_eq!(query.token(2 + 3).0, CHAR); // [let] [c] [=] [1000] [;] - assert_eq!(query.token(1 + 5 + 3).1, "1000"); - assert_eq!(query.token(1 + 5 + 3).0, INT_NUMBER); + assert_eq!(query.token(2 + 5 + 3).1, "1000"); + assert_eq!(query.token(2 + 5 + 3).0, INT_NUMBER); // [let] [f] [=] [12E+99_f64] [;] - assert_eq!(query.token(1 + 10 + 3).1, "12E+99_f64"); - assert_eq!(query.token(1 + 10 + 3).0, FLOAT_NUMBER); + assert_eq!(query.token(2 + 10 + 3).1, "12E+99_f64"); + assert_eq!(query.token(2 + 10 + 3).0, FLOAT_NUMBER); // [let] [s] [=] ["rust1"] [;] - assert_eq!(query.token(1 + 15 + 3).1, "\"rust1\""); - assert_eq!(query.token(1 + 15 + 3).0, STRING); + assert_eq!(query.token(2 + 15 + 3).1, "\"rust1\""); + assert_eq!(query.token(2 + 15 + 3).0, STRING); } } diff --git a/crates/ra_parser/src/parser.rs b/crates/ra_parser/src/parser.rs index 56f8b7126..3cb57ed9c 100644 --- a/crates/ra_parser/src/parser.rs +++ b/crates/ra_parser/src/parser.rs @@ -99,6 +99,8 @@ impl<'t> Parser<'t> { /// consumed between the `start` and the corresponding `Marker::complete` /// belong to the same node. pub(crate) fn start(&mut self) -> Marker { + self.eat_dollars(); + let pos = self.events.len() as u32; self.push_event(Event::tombstone()); Marker::new(pos) @@ -180,13 +182,23 @@ impl<'t> Parser<'t> { } fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) { + self.eat_dollars(); self.token_pos += usize::from(n_raw_tokens); self.push_event(Event::Token { kind, n_raw_tokens }); + self.eat_dollars(); } fn push_event(&mut self, event: Event) { self.events.push(event) } + + fn eat_dollars(&mut self) { + while self.nth(0) == SyntaxKind::L_DOLLAR || self.nth(0) == SyntaxKind::R_DOLLAR { + let kind = self.nth(0); + self.token_pos += 1; + self.push_event(Event::Token { kind, n_raw_tokens: 1 }); + } + } } /// See `Parser::start`. -- cgit v1.2.3 From 6646d49f238bb92d55fcb4900830f19faa2994a5 Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Sat, 13 Apr 2019 18:38:31 +0800 Subject: Fix bug and add expr , pat , ty matcher --- crates/ra_mbe/src/lib.rs | 97 +++++++++++++++++++++++++++++ crates/ra_mbe/src/mbe_expander.rs | 13 ++++ crates/ra_mbe/src/subtree_parser.rs | 12 ++++ crates/ra_mbe/src/subtree_source.rs | 14 ++++- crates/ra_mbe/src/tt_cursor.rs | 15 +++++ crates/ra_parser/src/grammar.rs | 12 ++++ crates/ra_parser/src/grammar/expressions.rs | 50 ++++++++++++--- crates/ra_parser/src/grammar/patterns.rs | 2 +- crates/ra_parser/src/lib.rs | 33 +++++++--- crates/ra_parser/src/parser.rs | 92 +++++++++++++++++++++++---- 10 files changed, 307 insertions(+), 33 deletions(-) diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index 4126854d1..a530f3b03 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs @@ -189,6 +189,14 @@ impl_froms!(TokenTree: Leaf, Subtree); rules.expand(&invocation_tt).unwrap() } + pub(crate) fn expand_to_syntax( + rules: &MacroRules, + invocation: &str, + ) -> ra_syntax::TreeArc { + let expanded = expand(rules, invocation); + token_tree_to_ast_item_list(&expanded) + } + pub(crate) fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { let expanded = expand(rules, invocation); assert_eq!(expanded.to_string(), expansion); @@ -485,4 +493,93 @@ SOURCE_FILE@[0; 40) ); assert_expansion(&rules, "foo! { foo }", "fn foo () {let a = foo :: bar ;}"); } + + #[test] + fn test_expr() { + let rules = create_rules( + r#" + macro_rules! foo { + ($ i:expr) => { + fn bar() { $ i; } + } + } +"#, + ); + + assert_expansion( + &rules, + "foo! { 2 + 2 * baz(3).quux() }", + "fn bar () {2 + 2 * baz (3) . quux () ;}", + ); + } + + #[test] + fn test_expr_order() { + let rules = create_rules( + r#" + macro_rules! foo { + ($ i:expr) => { + fn bar() { $ i * 2; } + } + } +"#, + ); + + assert_eq!( + expand_to_syntax(&rules, "foo! { 1 + 1 }").syntax().debug_dump().trim(), + r#"SOURCE_FILE@[0; 15) + FN_DEF@[0; 15) + FN_KW@[0; 2) "fn" + NAME@[2; 5) + IDENT@[2; 5) "bar" + PARAM_LIST@[5; 7) + L_PAREN@[5; 6) "(" + R_PAREN@[6; 7) ")" + BLOCK@[7; 15) + L_CURLY@[7; 8) "{" + EXPR_STMT@[8; 14) + BIN_EXPR@[8; 13) + BIN_EXPR@[8; 11) + LITERAL@[8; 9) + INT_NUMBER@[8; 9) "1" + PLUS@[9; 10) "+" + LITERAL@[10; 11) + INT_NUMBER@[10; 11) "1" + STAR@[11; 12) "*" + LITERAL@[12; 13) + INT_NUMBER@[12; 13) "2" + SEMI@[13; 14) ";" + R_CURLY@[14; 15) "}""#, + ); + } + + #[test] + fn test_ty() { + let rules = create_rules( + r#" + macro_rules! foo { + ($ i:ty) => ( + fn bar() -> $ i { unimplemented!() } + ) + } +"#, + ); + assert_expansion( + &rules, + "foo! { Baz }", + "fn bar () -> Baz < u8 > {unimplemented ! ()}", + ); + } + + #[test] + fn test_pat_() { + let rules = create_rules( + r#" + macro_rules! foo { + ($ i:pat) => { fn foo() { let $ i; } } + } +"#, + ); + assert_expansion(&rules, "foo! { (a, b) }", "fn foo () {let (a , b) ;}"); + } } diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs index ce41d7225..7a259f338 100644 --- a/crates/ra_mbe/src/mbe_expander.rs +++ b/crates/ra_mbe/src/mbe_expander.rs @@ -144,6 +144,19 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result { + let expr = + input.eat_expr().ok_or(ExpandError::UnexpectedToken)?.clone(); + res.inner.insert(text.clone(), Binding::Simple(expr.into())); + } + "ty" => { + let ty = input.eat_ty().ok_or(ExpandError::UnexpectedToken)?.clone(); + res.inner.insert(text.clone(), Binding::Simple(ty.into())); + } + "pat" => { + let pat = input.eat_pat().ok_or(ExpandError::UnexpectedToken)?.clone(); + res.inner.insert(text.clone(), Binding::Simple(pat.into())); + } _ => return Err(ExpandError::UnexpectedToken), } } diff --git a/crates/ra_mbe/src/subtree_parser.rs b/crates/ra_mbe/src/subtree_parser.rs index 164240d92..13d5d2169 100644 --- a/crates/ra_mbe/src/subtree_parser.rs +++ b/crates/ra_mbe/src/subtree_parser.rs @@ -30,6 +30,18 @@ impl<'a> Parser<'a> { self.parse(ra_parser::parse_path) } + pub fn parse_expr(self) -> Option { + self.parse(ra_parser::parse_expr) + } + + pub fn parse_ty(self) -> Option { + self.parse(ra_parser::parse_ty) + } + + pub fn parse_pat(self) -> Option { + self.parse(ra_parser::parse_pat) + } + fn parse(self, f: F) -> Option where F: FnOnce(&dyn TokenSource, &mut dyn TreeSink), diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index 6aa20057e..0a070b46a 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs @@ -109,6 +109,8 @@ impl<'a> SubTreeWalker<'a> { self.cursor = match self.ts.get(0) { DelimToken::Token(token) => match token { tt::TokenTree::Subtree(subtree) => { + let ts = TokenSeq::from(subtree); + self.stack.push((ts, 0)); WalkCursor::Token(0, convert_delim(subtree.delimiter, false)) } tt::TokenTree::Leaf(leaf) => { @@ -254,7 +256,7 @@ impl<'a> WalkerOwner<'a> { } } } else if walker.stack.len() == 1 { - if let DelimToken::Delim(_, is_end) = walker.ts.get(*u) { + if let DelimToken::Delim(_, is_end) = walker.top().get(*u) { if !is_end { let (_, last_idx) = &walker.stack[0]; if let DelimToken::Token(token) = walker.ts.get(*last_idx) { @@ -310,10 +312,16 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> { } } fn is_token_joint_to_next(&self, pos: usize) -> bool { - self.walker.get(pos).unwrap().is_joint_to_next + match self.walker.get(pos) { + Some(t) => t.is_joint_to_next, + _ => false, + } } fn is_keyword(&self, pos: usize, kw: &str) -> bool { - self.walker.get(pos).unwrap().text == *kw + match self.walker.get(pos) { + Some(t) => t.text == *kw, + _ => false, + } } } diff --git a/crates/ra_mbe/src/tt_cursor.rs b/crates/ra_mbe/src/tt_cursor.rs index d29faa77c..f6cefe087 100644 --- a/crates/ra_mbe/src/tt_cursor.rs +++ b/crates/ra_mbe/src/tt_cursor.rs @@ -84,6 +84,21 @@ impl<'a> TtCursor<'a> { parser.parse_path() } + pub(crate) fn eat_expr(&mut self) -> Option { + let parser = Parser::new(&mut self.pos, self.subtree); + parser.parse_expr() + } + + pub(crate) fn eat_ty(&mut self) -> Option { + let parser = Parser::new(&mut self.pos, self.subtree); + parser.parse_ty() + } + + pub(crate) fn eat_pat(&mut self) -> Option { + let parser = Parser::new(&mut self.pos, self.subtree); + parser.parse_pat() + } + pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ParseError> { if self.at_char(char) { self.bump(); diff --git a/crates/ra_parser/src/grammar.rs b/crates/ra_parser/src/grammar.rs index c5f510e6b..5a7a55141 100644 --- a/crates/ra_parser/src/grammar.rs +++ b/crates/ra_parser/src/grammar.rs @@ -53,6 +53,18 @@ pub(crate) fn path(p: &mut Parser) { paths::type_path(p); } +pub(crate) fn expr(p: &mut Parser) { + expressions::expr(p); +} + +pub(crate) fn type_(p: &mut Parser) { + types::type_(p) +} + +pub(crate) fn pattern(p: &mut Parser) { + patterns::pattern(p) +} + pub(crate) fn reparser( node: SyntaxKind, first_child: Option, diff --git a/crates/ra_parser/src/grammar/expressions.rs b/crates/ra_parser/src/grammar/expressions.rs index 9b38b0a31..295577325 100644 --- a/crates/ra_parser/src/grammar/expressions.rs +++ b/crates/ra_parser/src/grammar/expressions.rs @@ -8,17 +8,20 @@ const EXPR_FIRST: TokenSet = LHS_FIRST; pub(super) fn expr(p: &mut Parser) -> BlockLike { let r = Restrictions { forbid_structs: false, prefer_stmt: false }; - expr_bp(p, r, 1).1 + let mut dollar_lvl = 0; + expr_bp(p, r, 1, &mut dollar_lvl).1 } pub(super) fn expr_stmt(p: &mut Parser) -> (Option, BlockLike) { let r = Restrictions { forbid_structs: false, prefer_stmt: true }; - expr_bp(p, r, 1) + let mut dollar_lvl = 0; + expr_bp(p, r, 1, &mut dollar_lvl) } fn expr_no_struct(p: &mut Parser) { let r = Restrictions { forbid_structs: true, prefer_stmt: false }; - expr_bp(p, r, 1); + let mut dollar_lvl = 0; + expr_bp(p, r, 1, &mut dollar_lvl); } // test block @@ -206,8 +209,23 @@ fn current_op(p: &Parser) -> (u8, Op) { } // Parses expression with binding power of at least bp. -fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option, BlockLike) { - let mut lhs = match lhs(p, r) { +fn expr_bp( + p: &mut Parser, + r: Restrictions, + mut bp: u8, + dollar_lvl: &mut usize, +) -> (Option, BlockLike) { + // `newly_dollar_open` is a flag indicated that dollar is just closed after lhs, e.g. + // `$1$ + a` + // We use this flag to skip handling it. + let mut newly_dollar_open = false; + + if p.at_l_dollar() { + *dollar_lvl += p.eat_l_dollars(); + newly_dollar_open = true; + } + + let mut lhs = match lhs(p, r, dollar_lvl) { Some((lhs, blocklike)) => { // test stmt_bin_expr_ambiguity // fn foo() { @@ -223,6 +241,15 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option, }; loop { + if *dollar_lvl > 0 && p.at_r_dollar() { + *dollar_lvl -= p.eat_r_dollars(*dollar_lvl); + if !newly_dollar_open { + // We "pump" bp for make it highest priority + bp = 255; + } + newly_dollar_open = false; + } + let is_range = p.current() == DOTDOT || p.current() == DOTDOTEQ; let (op_bp, op) = current_op(p); if op_bp < bp { @@ -235,7 +262,8 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option, p.bump_compound(kind, n); } } - expr_bp(p, r, op_bp + 1); + + expr_bp(p, r, op_bp + 1, dollar_lvl); lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR }); } (Some(lhs), BlockLike::NotBlock) @@ -244,7 +272,11 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option, const LHS_FIRST: TokenSet = atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS]); -fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { +fn lhs( + p: &mut Parser, + r: Restrictions, + dollar_lvl: &mut usize, +) -> Option<(CompletedMarker, BlockLike)> { let m; let kind = match p.current() { // test ref_expr @@ -275,7 +307,7 @@ fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> m = p.start(); p.bump(); if p.at_ts(EXPR_FIRST) { - expr_bp(p, r, 2); + expr_bp(p, r, 2, dollar_lvl); } return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock)); } @@ -287,7 +319,7 @@ fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> )); } }; - expr_bp(p, r, 255); + expr_bp(p, r, 255, dollar_lvl); Some((m.complete(p, kind), BlockLike::NotBlock)) } diff --git a/crates/ra_parser/src/grammar/patterns.rs b/crates/ra_parser/src/grammar/patterns.rs index 9a307559b..03fa9b71e 100644 --- a/crates/ra_parser/src/grammar/patterns.rs +++ b/crates/ra_parser/src/grammar/patterns.rs @@ -5,7 +5,7 @@ pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST .union(token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE, MINUS]); pub(super) fn pattern(p: &mut Parser) { - pattern_r(p, PAT_RECOVERY_SET) + pattern_r(p, PAT_RECOVERY_SET); } /// Parses a pattern list separated by pipes `|` diff --git a/crates/ra_parser/src/lib.rs b/crates/ra_parser/src/lib.rs index 3ceeeebd7..56755c394 100644 --- a/crates/ra_parser/src/lib.rs +++ b/crates/ra_parser/src/lib.rs @@ -53,20 +53,39 @@ pub trait TreeSink { fn error(&mut self, error: ParseError); } -/// Parse given tokens into the given sink as a rust file. -pub fn parse(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { +fn parse_from_tokens(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink, f: F) +where + F: FnOnce(&mut parser::Parser), +{ let mut p = parser::Parser::new(token_source); - grammar::root(&mut p); + f(&mut p); let events = p.finish(); event::process(tree_sink, events); } +/// Parse given tokens into the given sink as a rust file. +pub fn parse(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { + parse_from_tokens(token_source, tree_sink, grammar::root); +} + /// Parse given tokens into the given sink as a path pub fn parse_path(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { - let mut p = parser::Parser::new(token_source); - grammar::path(&mut p); - let events = p.finish(); - event::process(tree_sink, events); + parse_from_tokens(token_source, tree_sink, grammar::path); +} + +/// Parse given tokens into the given sink as a expression +pub fn parse_expr(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { + parse_from_tokens(token_source, tree_sink, grammar::expr); +} + +/// Parse given tokens into the given sink as a ty +pub fn parse_ty(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { + parse_from_tokens(token_source, tree_sink, grammar::type_); +} + +/// Parse given tokens into the given sink as a pattern +pub fn parse_pat(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { + parse_from_tokens(token_source, tree_sink, grammar::pattern); } /// A parsing function for a specific braced-block. diff --git a/crates/ra_parser/src/parser.rs b/crates/ra_parser/src/parser.rs index 3cb57ed9c..71f1f8b30 100644 --- a/crates/ra_parser/src/parser.rs +++ b/crates/ra_parser/src/parser.rs @@ -45,8 +45,9 @@ impl<'t> Parser<'t> { /// /// Useful for parsing things like `>>`. pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> { - let c1 = self.token_source.token_kind(self.token_pos); - let c2 = self.token_source.token_kind(self.token_pos + 1); + let c1 = self.nth(0); + let c2 = self.nth(1); + if self.token_source.is_token_joint_to_next(self.token_pos) { Some((c1, c2)) } else { @@ -59,9 +60,9 @@ impl<'t> Parser<'t> { /// /// Useful for parsing things like `=>>`. pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> { - let c1 = self.token_source.token_kind(self.token_pos); - let c2 = self.token_source.token_kind(self.token_pos + 1); - let c3 = self.token_source.token_kind(self.token_pos + 2); + let c1 = self.nth(0); + let c2 = self.nth(1); + let c3 = self.nth(2); if self.token_source.is_token_joint_to_next(self.token_pos) && self.token_source.is_token_joint_to_next(self.token_pos + 1) { @@ -77,7 +78,23 @@ impl<'t> Parser<'t> { let steps = self.steps.get(); assert!(steps <= 10_000_000, "the parser seems stuck"); self.steps.set(steps + 1); - self.token_source.token_kind(self.token_pos + n) + + // It is beecause the Dollar will appear between nth + // Following code skips through it + let mut non_dollars_count = 0; + let mut i = 0; + + loop { + let kind = self.token_source.token_kind(self.token_pos + i); + i += 1; + + match kind { + EOF => return EOF, + SyntaxKind::L_DOLLAR | SyntaxKind::R_DOLLAR => {} + _ if non_dollars_count == n => return kind, + _ => non_dollars_count += 1, + } + } } /// Checks if the current token is `kind`. @@ -99,8 +116,6 @@ impl<'t> Parser<'t> { /// consumed between the `start` and the corresponding `Marker::complete` /// belong to the same node. pub(crate) fn start(&mut self) -> Marker { - self.eat_dollars(); - let pos = self.events.len() as u32; self.push_event(Event::tombstone()); Marker::new(pos) @@ -185,7 +200,6 @@ impl<'t> Parser<'t> { self.eat_dollars(); self.token_pos += usize::from(n_raw_tokens); self.push_event(Event::Token { kind, n_raw_tokens }); - self.eat_dollars(); } fn push_event(&mut self, event: Event) { @@ -193,12 +207,64 @@ impl<'t> Parser<'t> { } fn eat_dollars(&mut self) { - while self.nth(0) == SyntaxKind::L_DOLLAR || self.nth(0) == SyntaxKind::R_DOLLAR { - let kind = self.nth(0); - self.token_pos += 1; - self.push_event(Event::Token { kind, n_raw_tokens: 1 }); + loop { + match self.token_source.token_kind(self.token_pos) { + k @ SyntaxKind::L_DOLLAR | k @ SyntaxKind::R_DOLLAR => { + self.token_pos += 1; + self.push_event(Event::Token { kind: k, n_raw_tokens: 1 }); + } + _ => { + return; + } + } + } + } + + pub(crate) fn eat_l_dollars(&mut self) -> usize { + let mut ate_count = 0; + loop { + match self.token_source.token_kind(self.token_pos) { + k @ SyntaxKind::L_DOLLAR => { + self.token_pos += 1; + self.push_event(Event::Token { kind: k, n_raw_tokens: 1 }); + ate_count += 1; + } + _ => { + return ate_count; + } + } } } + + pub(crate) fn eat_r_dollars(&mut self, max_count: usize) -> usize { + let mut ate_count = 0; + loop { + match self.token_source.token_kind(self.token_pos) { + k @ SyntaxKind::R_DOLLAR => { + self.token_pos += 1; + self.push_event(Event::Token { kind: k, n_raw_tokens: 1 }); + ate_count += 1; + + if max_count >= ate_count { + return ate_count; + } + } + _ => { + return ate_count; + } + } + } + } + + pub(crate) fn at_l_dollar(&self) -> bool { + let kind = self.token_source.token_kind(self.token_pos); + (kind == SyntaxKind::L_DOLLAR) + } + + pub(crate) fn at_r_dollar(&self) -> bool { + let kind = self.token_source.token_kind(self.token_pos); + (kind == SyntaxKind::R_DOLLAR) + } } /// See `Parser::start`. -- cgit v1.2.3