aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEdwin Cheng <[email protected]>2019-04-13 11:38:31 +0100
committerEdwin Cheng <[email protected]>2019-04-14 04:42:20 +0100
commit6646d49f238bb92d55fcb4900830f19faa2994a5 (patch)
tree1a7091b6e104abedcc086e99f560f36010aefe5f
parentf66300ccd1e6ef05b633cda06c87f913d1c91a1e (diff)
Fix bug and add expr , pat , ty matcher
-rw-r--r--crates/ra_mbe/src/lib.rs97
-rw-r--r--crates/ra_mbe/src/mbe_expander.rs13
-rw-r--r--crates/ra_mbe/src/subtree_parser.rs12
-rw-r--r--crates/ra_mbe/src/subtree_source.rs14
-rw-r--r--crates/ra_mbe/src/tt_cursor.rs15
-rw-r--r--crates/ra_parser/src/grammar.rs12
-rw-r--r--crates/ra_parser/src/grammar/expressions.rs50
-rw-r--r--crates/ra_parser/src/grammar/patterns.rs2
-rw-r--r--crates/ra_parser/src/lib.rs33
-rw-r--r--crates/ra_parser/src/parser.rs92
10 files changed, 307 insertions, 33 deletions
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs
index 4126854d1..a530f3b03 100644
--- a/crates/ra_mbe/src/lib.rs
+++ b/crates/ra_mbe/src/lib.rs
@@ -189,6 +189,14 @@ impl_froms!(TokenTree: Leaf, Subtree);
189 rules.expand(&invocation_tt).unwrap() 189 rules.expand(&invocation_tt).unwrap()
190 } 190 }
191 191
192 pub(crate) fn expand_to_syntax(
193 rules: &MacroRules,
194 invocation: &str,
195 ) -> ra_syntax::TreeArc<ast::SourceFile> {
196 let expanded = expand(rules, invocation);
197 token_tree_to_ast_item_list(&expanded)
198 }
199
192 pub(crate) fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { 200 pub(crate) fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) {
193 let expanded = expand(rules, invocation); 201 let expanded = expand(rules, invocation);
194 assert_eq!(expanded.to_string(), expansion); 202 assert_eq!(expanded.to_string(), expansion);
@@ -485,4 +493,93 @@ SOURCE_FILE@[0; 40)
485 ); 493 );
486 assert_expansion(&rules, "foo! { foo }", "fn foo () {let a = foo :: bar ;}"); 494 assert_expansion(&rules, "foo! { foo }", "fn foo () {let a = foo :: bar ;}");
487 } 495 }
496
497 #[test]
498 fn test_expr() {
499 let rules = create_rules(
500 r#"
501 macro_rules! foo {
502 ($ i:expr) => {
503 fn bar() { $ i; }
504 }
505 }
506"#,
507 );
508
509 assert_expansion(
510 &rules,
511 "foo! { 2 + 2 * baz(3).quux() }",
512 "fn bar () {2 + 2 * baz (3) . quux () ;}",
513 );
514 }
515
516 #[test]
517 fn test_expr_order() {
518 let rules = create_rules(
519 r#"
520 macro_rules! foo {
521 ($ i:expr) => {
522 fn bar() { $ i * 2; }
523 }
524 }
525"#,
526 );
527
528 assert_eq!(
529 expand_to_syntax(&rules, "foo! { 1 + 1 }").syntax().debug_dump().trim(),
530 r#"SOURCE_FILE@[0; 15)
531 FN_DEF@[0; 15)
532 FN_KW@[0; 2) "fn"
533 NAME@[2; 5)
534 IDENT@[2; 5) "bar"
535 PARAM_LIST@[5; 7)
536 L_PAREN@[5; 6) "("
537 R_PAREN@[6; 7) ")"
538 BLOCK@[7; 15)
539 L_CURLY@[7; 8) "{"
540 EXPR_STMT@[8; 14)
541 BIN_EXPR@[8; 13)
542 BIN_EXPR@[8; 11)
543 LITERAL@[8; 9)
544 INT_NUMBER@[8; 9) "1"
545 PLUS@[9; 10) "+"
546 LITERAL@[10; 11)
547 INT_NUMBER@[10; 11) "1"
548 STAR@[11; 12) "*"
549 LITERAL@[12; 13)
550 INT_NUMBER@[12; 13) "2"
551 SEMI@[13; 14) ";"
552 R_CURLY@[14; 15) "}""#,
553 );
554 }
555
556 #[test]
557 fn test_ty() {
558 let rules = create_rules(
559 r#"
560 macro_rules! foo {
561 ($ i:ty) => (
562 fn bar() -> $ i { unimplemented!() }
563 )
564 }
565"#,
566 );
567 assert_expansion(
568 &rules,
569 "foo! { Baz<u8> }",
570 "fn bar () -> Baz < u8 > {unimplemented ! ()}",
571 );
572 }
573
574 #[test]
575 fn test_pat_() {
576 let rules = create_rules(
577 r#"
578 macro_rules! foo {
579 ($ i:pat) => { fn foo() { let $ i; } }
580 }
581"#,
582 );
583 assert_expansion(&rules, "foo! { (a, b) }", "fn foo () {let (a , b) ;}");
584 }
488} 585}
diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs
index ce41d7225..7a259f338 100644
--- a/crates/ra_mbe/src/mbe_expander.rs
+++ b/crates/ra_mbe/src/mbe_expander.rs
@@ -144,6 +144,19 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
144 input.eat_path().ok_or(ExpandError::UnexpectedToken)?.clone(); 144 input.eat_path().ok_or(ExpandError::UnexpectedToken)?.clone();
145 res.inner.insert(text.clone(), Binding::Simple(path.into())); 145 res.inner.insert(text.clone(), Binding::Simple(path.into()));
146 } 146 }
147 "expr" => {
148 let expr =
149 input.eat_expr().ok_or(ExpandError::UnexpectedToken)?.clone();
150 res.inner.insert(text.clone(), Binding::Simple(expr.into()));
151 }
152 "ty" => {
153 let ty = input.eat_ty().ok_or(ExpandError::UnexpectedToken)?.clone();
154 res.inner.insert(text.clone(), Binding::Simple(ty.into()));
155 }
156 "pat" => {
157 let pat = input.eat_pat().ok_or(ExpandError::UnexpectedToken)?.clone();
158 res.inner.insert(text.clone(), Binding::Simple(pat.into()));
159 }
147 _ => return Err(ExpandError::UnexpectedToken), 160 _ => return Err(ExpandError::UnexpectedToken),
148 } 161 }
149 } 162 }
diff --git a/crates/ra_mbe/src/subtree_parser.rs b/crates/ra_mbe/src/subtree_parser.rs
index 164240d92..13d5d2169 100644
--- a/crates/ra_mbe/src/subtree_parser.rs
+++ b/crates/ra_mbe/src/subtree_parser.rs
@@ -30,6 +30,18 @@ impl<'a> Parser<'a> {
30 self.parse(ra_parser::parse_path) 30 self.parse(ra_parser::parse_path)
31 } 31 }
32 32
33 pub fn parse_expr(self) -> Option<tt::TokenTree> {
34 self.parse(ra_parser::parse_expr)
35 }
36
37 pub fn parse_ty(self) -> Option<tt::TokenTree> {
38 self.parse(ra_parser::parse_ty)
39 }
40
41 pub fn parse_pat(self) -> Option<tt::TokenTree> {
42 self.parse(ra_parser::parse_pat)
43 }
44
33 fn parse<F>(self, f: F) -> Option<tt::TokenTree> 45 fn parse<F>(self, f: F) -> Option<tt::TokenTree>
34 where 46 where
35 F: FnOnce(&dyn TokenSource, &mut dyn TreeSink), 47 F: FnOnce(&dyn TokenSource, &mut dyn TreeSink),
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs
index 6aa20057e..0a070b46a 100644
--- a/crates/ra_mbe/src/subtree_source.rs
+++ b/crates/ra_mbe/src/subtree_source.rs
@@ -109,6 +109,8 @@ impl<'a> SubTreeWalker<'a> {
109 self.cursor = match self.ts.get(0) { 109 self.cursor = match self.ts.get(0) {
110 DelimToken::Token(token) => match token { 110 DelimToken::Token(token) => match token {
111 tt::TokenTree::Subtree(subtree) => { 111 tt::TokenTree::Subtree(subtree) => {
112 let ts = TokenSeq::from(subtree);
113 self.stack.push((ts, 0));
112 WalkCursor::Token(0, convert_delim(subtree.delimiter, false)) 114 WalkCursor::Token(0, convert_delim(subtree.delimiter, false))
113 } 115 }
114 tt::TokenTree::Leaf(leaf) => { 116 tt::TokenTree::Leaf(leaf) => {
@@ -254,7 +256,7 @@ impl<'a> WalkerOwner<'a> {
254 } 256 }
255 } 257 }
256 } else if walker.stack.len() == 1 { 258 } else if walker.stack.len() == 1 {
257 if let DelimToken::Delim(_, is_end) = walker.ts.get(*u) { 259 if let DelimToken::Delim(_, is_end) = walker.top().get(*u) {
258 if !is_end { 260 if !is_end {
259 let (_, last_idx) = &walker.stack[0]; 261 let (_, last_idx) = &walker.stack[0];
260 if let DelimToken::Token(token) = walker.ts.get(*last_idx) { 262 if let DelimToken::Token(token) = walker.ts.get(*last_idx) {
@@ -310,10 +312,16 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> {
310 } 312 }
311 } 313 }
312 fn is_token_joint_to_next(&self, pos: usize) -> bool { 314 fn is_token_joint_to_next(&self, pos: usize) -> bool {
313 self.walker.get(pos).unwrap().is_joint_to_next 315 match self.walker.get(pos) {
316 Some(t) => t.is_joint_to_next,
317 _ => false,
318 }
314 } 319 }
315 fn is_keyword(&self, pos: usize, kw: &str) -> bool { 320 fn is_keyword(&self, pos: usize, kw: &str) -> bool {
316 self.walker.get(pos).unwrap().text == *kw 321 match self.walker.get(pos) {
322 Some(t) => t.text == *kw,
323 _ => false,
324 }
317 } 325 }
318} 326}
319 327
diff --git a/crates/ra_mbe/src/tt_cursor.rs b/crates/ra_mbe/src/tt_cursor.rs
index d29faa77c..f6cefe087 100644
--- a/crates/ra_mbe/src/tt_cursor.rs
+++ b/crates/ra_mbe/src/tt_cursor.rs
@@ -84,6 +84,21 @@ impl<'a> TtCursor<'a> {
84 parser.parse_path() 84 parser.parse_path()
85 } 85 }
86 86
87 pub(crate) fn eat_expr(&mut self) -> Option<tt::TokenTree> {
88 let parser = Parser::new(&mut self.pos, self.subtree);
89 parser.parse_expr()
90 }
91
92 pub(crate) fn eat_ty(&mut self) -> Option<tt::TokenTree> {
93 let parser = Parser::new(&mut self.pos, self.subtree);
94 parser.parse_ty()
95 }
96
97 pub(crate) fn eat_pat(&mut self) -> Option<tt::TokenTree> {
98 let parser = Parser::new(&mut self.pos, self.subtree);
99 parser.parse_pat()
100 }
101
87 pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ParseError> { 102 pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ParseError> {
88 if self.at_char(char) { 103 if self.at_char(char) {
89 self.bump(); 104 self.bump();
diff --git a/crates/ra_parser/src/grammar.rs b/crates/ra_parser/src/grammar.rs
index c5f510e6b..5a7a55141 100644
--- a/crates/ra_parser/src/grammar.rs
+++ b/crates/ra_parser/src/grammar.rs
@@ -53,6 +53,18 @@ pub(crate) fn path(p: &mut Parser) {
53 paths::type_path(p); 53 paths::type_path(p);
54} 54}
55 55
56pub(crate) fn expr(p: &mut Parser) {
57 expressions::expr(p);
58}
59
60pub(crate) fn type_(p: &mut Parser) {
61 types::type_(p)
62}
63
64pub(crate) fn pattern(p: &mut Parser) {
65 patterns::pattern(p)
66}
67
56pub(crate) fn reparser( 68pub(crate) fn reparser(
57 node: SyntaxKind, 69 node: SyntaxKind,
58 first_child: Option<SyntaxKind>, 70 first_child: Option<SyntaxKind>,
diff --git a/crates/ra_parser/src/grammar/expressions.rs b/crates/ra_parser/src/grammar/expressions.rs
index 9b38b0a31..295577325 100644
--- a/crates/ra_parser/src/grammar/expressions.rs
+++ b/crates/ra_parser/src/grammar/expressions.rs
@@ -8,17 +8,20 @@ const EXPR_FIRST: TokenSet = LHS_FIRST;
8 8
9pub(super) fn expr(p: &mut Parser) -> BlockLike { 9pub(super) fn expr(p: &mut Parser) -> BlockLike {
10 let r = Restrictions { forbid_structs: false, prefer_stmt: false }; 10 let r = Restrictions { forbid_structs: false, prefer_stmt: false };
11 expr_bp(p, r, 1).1 11 let mut dollar_lvl = 0;
12 expr_bp(p, r, 1, &mut dollar_lvl).1
12} 13}
13 14
14pub(super) fn expr_stmt(p: &mut Parser) -> (Option<CompletedMarker>, BlockLike) { 15pub(super) fn expr_stmt(p: &mut Parser) -> (Option<CompletedMarker>, BlockLike) {
15 let r = Restrictions { forbid_structs: false, prefer_stmt: true }; 16 let r = Restrictions { forbid_structs: false, prefer_stmt: true };
16 expr_bp(p, r, 1) 17 let mut dollar_lvl = 0;
18 expr_bp(p, r, 1, &mut dollar_lvl)
17} 19}
18 20
19fn expr_no_struct(p: &mut Parser) { 21fn expr_no_struct(p: &mut Parser) {
20 let r = Restrictions { forbid_structs: true, prefer_stmt: false }; 22 let r = Restrictions { forbid_structs: true, prefer_stmt: false };
21 expr_bp(p, r, 1); 23 let mut dollar_lvl = 0;
24 expr_bp(p, r, 1, &mut dollar_lvl);
22} 25}
23 26
24// test block 27// test block
@@ -206,8 +209,23 @@ fn current_op(p: &Parser) -> (u8, Op) {
206} 209}
207 210
208// Parses expression with binding power of at least bp. 211// Parses expression with binding power of at least bp.
209fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>, BlockLike) { 212fn expr_bp(
210 let mut lhs = match lhs(p, r) { 213 p: &mut Parser,
214 r: Restrictions,
215 mut bp: u8,
216 dollar_lvl: &mut usize,
217) -> (Option<CompletedMarker>, BlockLike) {
218 // `newly_dollar_open` is a flag indicated that dollar is just closed after lhs, e.g.
219 // `$1$ + a`
220 // We use this flag to skip handling it.
221 let mut newly_dollar_open = false;
222
223 if p.at_l_dollar() {
224 *dollar_lvl += p.eat_l_dollars();
225 newly_dollar_open = true;
226 }
227
228 let mut lhs = match lhs(p, r, dollar_lvl) {
211 Some((lhs, blocklike)) => { 229 Some((lhs, blocklike)) => {
212 // test stmt_bin_expr_ambiguity 230 // test stmt_bin_expr_ambiguity
213 // fn foo() { 231 // fn foo() {
@@ -223,6 +241,15 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>,
223 }; 241 };
224 242
225 loop { 243 loop {
244 if *dollar_lvl > 0 && p.at_r_dollar() {
245 *dollar_lvl -= p.eat_r_dollars(*dollar_lvl);
246 if !newly_dollar_open {
247 // We "pump" bp for make it highest priority
248 bp = 255;
249 }
250 newly_dollar_open = false;
251 }
252
226 let is_range = p.current() == DOTDOT || p.current() == DOTDOTEQ; 253 let is_range = p.current() == DOTDOT || p.current() == DOTDOTEQ;
227 let (op_bp, op) = current_op(p); 254 let (op_bp, op) = current_op(p);
228 if op_bp < bp { 255 if op_bp < bp {
@@ -235,7 +262,8 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>,
235 p.bump_compound(kind, n); 262 p.bump_compound(kind, n);
236 } 263 }
237 } 264 }
238 expr_bp(p, r, op_bp + 1); 265
266 expr_bp(p, r, op_bp + 1, dollar_lvl);
239 lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR }); 267 lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR });
240 } 268 }
241 (Some(lhs), BlockLike::NotBlock) 269 (Some(lhs), BlockLike::NotBlock)
@@ -244,7 +272,11 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>,
244const LHS_FIRST: TokenSet = 272const LHS_FIRST: TokenSet =
245 atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS]); 273 atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS]);
246 274
247fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { 275fn lhs(
276 p: &mut Parser,
277 r: Restrictions,
278 dollar_lvl: &mut usize,
279) -> Option<(CompletedMarker, BlockLike)> {
248 let m; 280 let m;
249 let kind = match p.current() { 281 let kind = match p.current() {
250 // test ref_expr 282 // test ref_expr
@@ -275,7 +307,7 @@ fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)>
275 m = p.start(); 307 m = p.start();
276 p.bump(); 308 p.bump();
277 if p.at_ts(EXPR_FIRST) { 309 if p.at_ts(EXPR_FIRST) {
278 expr_bp(p, r, 2); 310 expr_bp(p, r, 2, dollar_lvl);
279 } 311 }
280 return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock)); 312 return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock));
281 } 313 }
@@ -287,7 +319,7 @@ fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)>
287 )); 319 ));
288 } 320 }
289 }; 321 };
290 expr_bp(p, r, 255); 322 expr_bp(p, r, 255, dollar_lvl);
291 Some((m.complete(p, kind), BlockLike::NotBlock)) 323 Some((m.complete(p, kind), BlockLike::NotBlock))
292} 324}
293 325
diff --git a/crates/ra_parser/src/grammar/patterns.rs b/crates/ra_parser/src/grammar/patterns.rs
index 9a307559b..03fa9b71e 100644
--- a/crates/ra_parser/src/grammar/patterns.rs
+++ b/crates/ra_parser/src/grammar/patterns.rs
@@ -5,7 +5,7 @@ pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST
5 .union(token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE, MINUS]); 5 .union(token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE, MINUS]);
6 6
7pub(super) fn pattern(p: &mut Parser) { 7pub(super) fn pattern(p: &mut Parser) {
8 pattern_r(p, PAT_RECOVERY_SET) 8 pattern_r(p, PAT_RECOVERY_SET);
9} 9}
10 10
11/// Parses a pattern list separated by pipes `|` 11/// Parses a pattern list separated by pipes `|`
diff --git a/crates/ra_parser/src/lib.rs b/crates/ra_parser/src/lib.rs
index 3ceeeebd7..56755c394 100644
--- a/crates/ra_parser/src/lib.rs
+++ b/crates/ra_parser/src/lib.rs
@@ -53,20 +53,39 @@ pub trait TreeSink {
53 fn error(&mut self, error: ParseError); 53 fn error(&mut self, error: ParseError);
54} 54}
55 55
56/// Parse given tokens into the given sink as a rust file. 56fn parse_from_tokens<F>(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink, f: F)
57pub fn parse(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { 57where
58 F: FnOnce(&mut parser::Parser),
59{
58 let mut p = parser::Parser::new(token_source); 60 let mut p = parser::Parser::new(token_source);
59 grammar::root(&mut p); 61 f(&mut p);
60 let events = p.finish(); 62 let events = p.finish();
61 event::process(tree_sink, events); 63 event::process(tree_sink, events);
62} 64}
63 65
66/// Parse given tokens into the given sink as a rust file.
67pub fn parse(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
68 parse_from_tokens(token_source, tree_sink, grammar::root);
69}
70
64/// Parse given tokens into the given sink as a path 71/// Parse given tokens into the given sink as a path
65pub fn parse_path(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { 72pub fn parse_path(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
66 let mut p = parser::Parser::new(token_source); 73 parse_from_tokens(token_source, tree_sink, grammar::path);
67 grammar::path(&mut p); 74}
68 let events = p.finish(); 75
69 event::process(tree_sink, events); 76/// Parse given tokens into the given sink as a expression
77pub fn parse_expr(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
78 parse_from_tokens(token_source, tree_sink, grammar::expr);
79}
80
81/// Parse given tokens into the given sink as a ty
82pub fn parse_ty(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
83 parse_from_tokens(token_source, tree_sink, grammar::type_);
84}
85
86/// Parse given tokens into the given sink as a pattern
87pub fn parse_pat(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) {
88 parse_from_tokens(token_source, tree_sink, grammar::pattern);
70} 89}
71 90
72/// A parsing function for a specific braced-block. 91/// A parsing function for a specific braced-block.
diff --git a/crates/ra_parser/src/parser.rs b/crates/ra_parser/src/parser.rs
index 3cb57ed9c..71f1f8b30 100644
--- a/crates/ra_parser/src/parser.rs
+++ b/crates/ra_parser/src/parser.rs
@@ -45,8 +45,9 @@ impl<'t> Parser<'t> {
45 /// 45 ///
46 /// Useful for parsing things like `>>`. 46 /// Useful for parsing things like `>>`.
47 pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> { 47 pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> {
48 let c1 = self.token_source.token_kind(self.token_pos); 48 let c1 = self.nth(0);
49 let c2 = self.token_source.token_kind(self.token_pos + 1); 49 let c2 = self.nth(1);
50
50 if self.token_source.is_token_joint_to_next(self.token_pos) { 51 if self.token_source.is_token_joint_to_next(self.token_pos) {
51 Some((c1, c2)) 52 Some((c1, c2))
52 } else { 53 } else {
@@ -59,9 +60,9 @@ impl<'t> Parser<'t> {
59 /// 60 ///
60 /// Useful for parsing things like `=>>`. 61 /// Useful for parsing things like `=>>`.
61 pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> { 62 pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> {
62 let c1 = self.token_source.token_kind(self.token_pos); 63 let c1 = self.nth(0);
63 let c2 = self.token_source.token_kind(self.token_pos + 1); 64 let c2 = self.nth(1);
64 let c3 = self.token_source.token_kind(self.token_pos + 2); 65 let c3 = self.nth(2);
65 if self.token_source.is_token_joint_to_next(self.token_pos) 66 if self.token_source.is_token_joint_to_next(self.token_pos)
66 && self.token_source.is_token_joint_to_next(self.token_pos + 1) 67 && self.token_source.is_token_joint_to_next(self.token_pos + 1)
67 { 68 {
@@ -77,7 +78,23 @@ impl<'t> Parser<'t> {
77 let steps = self.steps.get(); 78 let steps = self.steps.get();
78 assert!(steps <= 10_000_000, "the parser seems stuck"); 79 assert!(steps <= 10_000_000, "the parser seems stuck");
79 self.steps.set(steps + 1); 80 self.steps.set(steps + 1);
80 self.token_source.token_kind(self.token_pos + n) 81
82 // It is beecause the Dollar will appear between nth
83 // Following code skips through it
84 let mut non_dollars_count = 0;
85 let mut i = 0;
86
87 loop {
88 let kind = self.token_source.token_kind(self.token_pos + i);
89 i += 1;
90
91 match kind {
92 EOF => return EOF,
93 SyntaxKind::L_DOLLAR | SyntaxKind::R_DOLLAR => {}
94 _ if non_dollars_count == n => return kind,
95 _ => non_dollars_count += 1,
96 }
97 }
81 } 98 }
82 99
83 /// Checks if the current token is `kind`. 100 /// Checks if the current token is `kind`.
@@ -99,8 +116,6 @@ impl<'t> Parser<'t> {
99 /// consumed between the `start` and the corresponding `Marker::complete` 116 /// consumed between the `start` and the corresponding `Marker::complete`
100 /// belong to the same node. 117 /// belong to the same node.
101 pub(crate) fn start(&mut self) -> Marker { 118 pub(crate) fn start(&mut self) -> Marker {
102 self.eat_dollars();
103
104 let pos = self.events.len() as u32; 119 let pos = self.events.len() as u32;
105 self.push_event(Event::tombstone()); 120 self.push_event(Event::tombstone());
106 Marker::new(pos) 121 Marker::new(pos)
@@ -185,7 +200,6 @@ impl<'t> Parser<'t> {
185 self.eat_dollars(); 200 self.eat_dollars();
186 self.token_pos += usize::from(n_raw_tokens); 201 self.token_pos += usize::from(n_raw_tokens);
187 self.push_event(Event::Token { kind, n_raw_tokens }); 202 self.push_event(Event::Token { kind, n_raw_tokens });
188 self.eat_dollars();
189 } 203 }
190 204
191 fn push_event(&mut self, event: Event) { 205 fn push_event(&mut self, event: Event) {
@@ -193,12 +207,64 @@ impl<'t> Parser<'t> {
193 } 207 }
194 208
195 fn eat_dollars(&mut self) { 209 fn eat_dollars(&mut self) {
196 while self.nth(0) == SyntaxKind::L_DOLLAR || self.nth(0) == SyntaxKind::R_DOLLAR { 210 loop {
197 let kind = self.nth(0); 211 match self.token_source.token_kind(self.token_pos) {
198 self.token_pos += 1; 212 k @ SyntaxKind::L_DOLLAR | k @ SyntaxKind::R_DOLLAR => {
199 self.push_event(Event::Token { kind, n_raw_tokens: 1 }); 213 self.token_pos += 1;
214 self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
215 }
216 _ => {
217 return;
218 }
219 }
220 }
221 }
222
223 pub(crate) fn eat_l_dollars(&mut self) -> usize {
224 let mut ate_count = 0;
225 loop {
226 match self.token_source.token_kind(self.token_pos) {
227 k @ SyntaxKind::L_DOLLAR => {
228 self.token_pos += 1;
229 self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
230 ate_count += 1;
231 }
232 _ => {
233 return ate_count;
234 }
235 }
200 } 236 }
201 } 237 }
238
239 pub(crate) fn eat_r_dollars(&mut self, max_count: usize) -> usize {
240 let mut ate_count = 0;
241 loop {
242 match self.token_source.token_kind(self.token_pos) {
243 k @ SyntaxKind::R_DOLLAR => {
244 self.token_pos += 1;
245 self.push_event(Event::Token { kind: k, n_raw_tokens: 1 });
246 ate_count += 1;
247
248 if max_count >= ate_count {
249 return ate_count;
250 }
251 }
252 _ => {
253 return ate_count;
254 }
255 }
256 }
257 }
258
259 pub(crate) fn at_l_dollar(&self) -> bool {
260 let kind = self.token_source.token_kind(self.token_pos);
261 (kind == SyntaxKind::L_DOLLAR)
262 }
263
264 pub(crate) fn at_r_dollar(&self) -> bool {
265 let kind = self.token_source.token_kind(self.token_pos);
266 (kind == SyntaxKind::R_DOLLAR)
267 }
202} 268}
203 269
204/// See `Parser::start`. 270/// See `Parser::start`.