diff options
Diffstat (limited to 'crates/mbe/src')
-rw-r--r-- | crates/mbe/src/benchmark.rs | 211 | ||||
-rw-r--r-- | crates/mbe/src/expander/matcher.rs | 68 | ||||
-rw-r--r-- | crates/mbe/src/lib.rs | 7 | ||||
-rw-r--r-- | crates/mbe/src/syntax_bridge.rs | 41 | ||||
-rw-r--r-- | crates/mbe/src/tests.rs | 23 | ||||
-rw-r--r-- | crates/mbe/src/tt_iter.rs | 77 |
6 files changed, 357 insertions, 70 deletions
diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs new file mode 100644 index 000000000..0d0acd589 --- /dev/null +++ b/crates/mbe/src/benchmark.rs | |||
@@ -0,0 +1,211 @@ | |||
1 | //! This module add real world mbe example for benchmark tests | ||
2 | |||
3 | use rustc_hash::FxHashMap; | ||
4 | use syntax::{ | ||
5 | ast::{self, NameOwner}, | ||
6 | AstNode, SmolStr, | ||
7 | }; | ||
8 | use test_utils::{bench, bench_fixture, skip_slow_tests}; | ||
9 | |||
10 | use crate::{ | ||
11 | ast_to_token_tree, | ||
12 | parser::{Op, RepeatKind, Separator}, | ||
13 | MacroRules, | ||
14 | }; | ||
15 | |||
16 | #[test] | ||
17 | fn benchmark_parse_macro_rules() { | ||
18 | if skip_slow_tests() { | ||
19 | return; | ||
20 | } | ||
21 | let rules = macro_rules_fixtures_tt(); | ||
22 | let hash: usize = { | ||
23 | let _pt = bench("mbe parse macro rules"); | ||
24 | rules.values().map(|it| MacroRules::parse(it).unwrap().rules.len()).sum() | ||
25 | }; | ||
26 | assert_eq!(hash, 1144); | ||
27 | } | ||
28 | |||
29 | #[test] | ||
30 | fn benchmark_expand_macro_rules() { | ||
31 | if skip_slow_tests() { | ||
32 | return; | ||
33 | } | ||
34 | let rules = macro_rules_fixtures(); | ||
35 | let invocations = invocation_fixtures(&rules); | ||
36 | |||
37 | let hash: usize = { | ||
38 | let _pt = bench("mbe expand macro rules"); | ||
39 | invocations | ||
40 | .into_iter() | ||
41 | .map(|(id, tt)| { | ||
42 | let res = rules[&id].expand(&tt); | ||
43 | if res.err.is_some() { | ||
44 | // FIXME: | ||
45 | // Currently `invocation_fixtures` will generate some correct invocations but | ||
46 | // cannot be expanded by mbe. We ignore errors here. | ||
47 | // See: https://github.com/rust-analyzer/rust-analyzer/issues/4777 | ||
48 | eprintln!("err from {} {:?}", id, res.err); | ||
49 | } | ||
50 | res.value.token_trees.len() | ||
51 | }) | ||
52 | .sum() | ||
53 | }; | ||
54 | assert_eq!(hash, 66995); | ||
55 | } | ||
56 | |||
57 | fn macro_rules_fixtures() -> FxHashMap<String, MacroRules> { | ||
58 | macro_rules_fixtures_tt() | ||
59 | .into_iter() | ||
60 | .map(|(id, tt)| (id, MacroRules::parse(&tt).unwrap())) | ||
61 | .collect() | ||
62 | } | ||
63 | |||
64 | fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> { | ||
65 | let fixture = bench_fixture::numerous_macro_rules(); | ||
66 | let source_file = ast::SourceFile::parse(&fixture).ok().unwrap(); | ||
67 | |||
68 | source_file | ||
69 | .syntax() | ||
70 | .descendants() | ||
71 | .filter_map(ast::MacroRules::cast) | ||
72 | .map(|rule| { | ||
73 | let id = rule.name().unwrap().to_string(); | ||
74 | let (def_tt, _) = ast_to_token_tree(&rule.token_tree().unwrap()).unwrap(); | ||
75 | (id, def_tt) | ||
76 | }) | ||
77 | .collect() | ||
78 | } | ||
79 | |||
80 | // Generate random invocation fixtures from rules | ||
81 | fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt::Subtree)> { | ||
82 | let mut seed = 123456789; | ||
83 | let mut res = Vec::new(); | ||
84 | |||
85 | for (name, it) in rules { | ||
86 | for rule in &it.rules { | ||
87 | // Generate twice | ||
88 | for _ in 0..2 { | ||
89 | let mut subtree = tt::Subtree::default(); | ||
90 | for op in rule.lhs.iter() { | ||
91 | collect_from_op(op, &mut subtree, &mut seed); | ||
92 | } | ||
93 | res.push((name.clone(), subtree)); | ||
94 | } | ||
95 | } | ||
96 | } | ||
97 | return res; | ||
98 | |||
99 | fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) { | ||
100 | return match op { | ||
101 | Op::Var { kind, .. } => match kind.as_ref().map(|it| it.as_str()) { | ||
102 | Some("ident") => parent.token_trees.push(make_ident("foo")), | ||
103 | Some("ty") => parent.token_trees.push(make_ident("Foo")), | ||
104 | Some("tt") => parent.token_trees.push(make_ident("foo")), | ||
105 | Some("vis") => parent.token_trees.push(make_ident("pub")), | ||
106 | Some("pat") => parent.token_trees.push(make_ident("foo")), | ||
107 | Some("path") => parent.token_trees.push(make_ident("foo")), | ||
108 | Some("literal") => parent.token_trees.push(make_literal("1")), | ||
109 | Some("expr") => parent.token_trees.push(make_ident("foo").into()), | ||
110 | Some("lifetime") => { | ||
111 | parent.token_trees.push(make_punct('\'')); | ||
112 | parent.token_trees.push(make_ident("a")); | ||
113 | } | ||
114 | Some("block") => { | ||
115 | parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None)) | ||
116 | } | ||
117 | Some("item") => { | ||
118 | parent.token_trees.push(make_ident("fn")); | ||
119 | parent.token_trees.push(make_ident("foo")); | ||
120 | parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None)); | ||
121 | parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None)); | ||
122 | } | ||
123 | Some("meta") => { | ||
124 | parent.token_trees.push(make_ident("foo")); | ||
125 | parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None)); | ||
126 | } | ||
127 | |||
128 | None => (), | ||
129 | Some(kind) => panic!("Unhandled kind {}", kind), | ||
130 | }, | ||
131 | Op::Leaf(leaf) => parent.token_trees.push(leaf.clone().into()), | ||
132 | Op::Repeat { tokens, kind, separator } => { | ||
133 | let max = 10; | ||
134 | let cnt = match kind { | ||
135 | RepeatKind::ZeroOrMore => rand(seed) % max, | ||
136 | RepeatKind::OneOrMore => 1 + rand(seed) % max, | ||
137 | RepeatKind::ZeroOrOne => rand(seed) % 2, | ||
138 | }; | ||
139 | for i in 0..cnt { | ||
140 | for it in tokens.iter() { | ||
141 | collect_from_op(it, parent, seed); | ||
142 | } | ||
143 | if i + 1 != cnt { | ||
144 | if let Some(sep) = separator { | ||
145 | match sep { | ||
146 | Separator::Literal(it) => parent | ||
147 | .token_trees | ||
148 | .push(tt::Leaf::Literal(it.clone().into()).into()), | ||
149 | Separator::Ident(it) => parent | ||
150 | .token_trees | ||
151 | .push(tt::Leaf::Ident(it.clone().into()).into()), | ||
152 | Separator::Puncts(puncts) => { | ||
153 | for it in puncts { | ||
154 | parent | ||
155 | .token_trees | ||
156 | .push(tt::Leaf::Punct(it.clone().into()).into()) | ||
157 | } | ||
158 | } | ||
159 | }; | ||
160 | } | ||
161 | } | ||
162 | } | ||
163 | } | ||
164 | Op::Subtree { tokens, delimiter } => { | ||
165 | let mut subtree = | ||
166 | tt::Subtree { delimiter: delimiter.clone(), token_trees: Vec::new() }; | ||
167 | tokens.iter().for_each(|it| { | ||
168 | collect_from_op(it, &mut subtree, seed); | ||
169 | }); | ||
170 | parent.token_trees.push(subtree.into()); | ||
171 | } | ||
172 | }; | ||
173 | |||
174 | // Simple linear congruential generator for determistic result | ||
175 | fn rand(seed: &mut usize) -> usize { | ||
176 | let a = 1664525; | ||
177 | let c = 1013904223; | ||
178 | *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c); | ||
179 | return *seed; | ||
180 | }; | ||
181 | fn make_ident(ident: &str) -> tt::TokenTree { | ||
182 | tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) }) | ||
183 | .into() | ||
184 | } | ||
185 | fn make_punct(char: char) -> tt::TokenTree { | ||
186 | tt::Leaf::Punct(tt::Punct { | ||
187 | id: tt::TokenId::unspecified(), | ||
188 | char, | ||
189 | spacing: tt::Spacing::Alone, | ||
190 | }) | ||
191 | .into() | ||
192 | } | ||
193 | fn make_literal(lit: &str) -> tt::TokenTree { | ||
194 | tt::Leaf::Literal(tt::Literal { | ||
195 | id: tt::TokenId::unspecified(), | ||
196 | text: SmolStr::new(lit), | ||
197 | }) | ||
198 | .into() | ||
199 | } | ||
200 | fn make_subtree( | ||
201 | kind: tt::DelimiterKind, | ||
202 | token_trees: Option<Vec<tt::TokenTree>>, | ||
203 | ) -> tt::TokenTree { | ||
204 | tt::Subtree { | ||
205 | delimiter: Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }), | ||
206 | token_trees: token_trees.unwrap_or_default(), | ||
207 | } | ||
208 | .into() | ||
209 | } | ||
210 | } | ||
211 | } | ||
diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 800931cd1..e3bd4c09a 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs | |||
@@ -3,15 +3,13 @@ | |||
3 | use crate::{ | 3 | use crate::{ |
4 | expander::{Binding, Bindings, Fragment}, | 4 | expander::{Binding, Bindings, Fragment}, |
5 | parser::{Op, RepeatKind, Separator}, | 5 | parser::{Op, RepeatKind, Separator}, |
6 | subtree_source::SubtreeTokenSource, | ||
7 | tt_iter::TtIter, | 6 | tt_iter::TtIter, |
8 | ExpandError, MetaTemplate, | 7 | ExpandError, MetaTemplate, |
9 | }; | 8 | }; |
10 | 9 | ||
11 | use super::ExpandResult; | 10 | use super::ExpandResult; |
12 | use parser::{FragmentKind::*, TreeSink}; | 11 | use parser::FragmentKind::*; |
13 | use syntax::{SmolStr, SyntaxKind}; | 12 | use syntax::SmolStr; |
14 | use tt::buffer::{Cursor, TokenBuffer}; | ||
15 | 13 | ||
16 | impl Bindings { | 14 | impl Bindings { |
17 | fn push_optional(&mut self, name: &SmolStr) { | 15 | fn push_optional(&mut self, name: &SmolStr) { |
@@ -409,68 +407,6 @@ impl<'a> TtIter<'a> { | |||
409 | .into()) | 407 | .into()) |
410 | } | 408 | } |
411 | 409 | ||
412 | fn expect_fragment( | ||
413 | &mut self, | ||
414 | fragment_kind: parser::FragmentKind, | ||
415 | ) -> ExpandResult<Option<tt::TokenTree>> { | ||
416 | struct OffsetTokenSink<'a> { | ||
417 | cursor: Cursor<'a>, | ||
418 | error: bool, | ||
419 | } | ||
420 | |||
421 | impl<'a> TreeSink for OffsetTokenSink<'a> { | ||
422 | fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { | ||
423 | if kind == SyntaxKind::LIFETIME_IDENT { | ||
424 | n_tokens = 2; | ||
425 | } | ||
426 | for _ in 0..n_tokens { | ||
427 | self.cursor = self.cursor.bump_subtree(); | ||
428 | } | ||
429 | } | ||
430 | fn start_node(&mut self, _kind: SyntaxKind) {} | ||
431 | fn finish_node(&mut self) {} | ||
432 | fn error(&mut self, _error: parser::ParseError) { | ||
433 | self.error = true; | ||
434 | } | ||
435 | } | ||
436 | |||
437 | let buffer = TokenBuffer::from_tokens(&self.inner.as_slice()); | ||
438 | let mut src = SubtreeTokenSource::new(&buffer); | ||
439 | let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; | ||
440 | |||
441 | parser::parse_fragment(&mut src, &mut sink, fragment_kind); | ||
442 | |||
443 | let mut err = None; | ||
444 | if !sink.cursor.is_root() || sink.error { | ||
445 | err = Some(err!("expected {:?}", fragment_kind)); | ||
446 | } | ||
447 | |||
448 | let mut curr = buffer.begin(); | ||
449 | let mut res = vec![]; | ||
450 | |||
451 | if sink.cursor.is_root() { | ||
452 | while curr != sink.cursor { | ||
453 | if let Some(token) = curr.token_tree() { | ||
454 | res.push(token); | ||
455 | } | ||
456 | curr = curr.bump(); | ||
457 | } | ||
458 | } | ||
459 | self.inner = self.inner.as_slice()[res.len()..].iter(); | ||
460 | if res.len() == 0 && err.is_none() { | ||
461 | err = Some(err!("no tokens consumed")); | ||
462 | } | ||
463 | let res = match res.len() { | ||
464 | 1 => Some(res[0].cloned()), | ||
465 | 0 => None, | ||
466 | _ => Some(tt::TokenTree::Subtree(tt::Subtree { | ||
467 | delimiter: None, | ||
468 | token_trees: res.into_iter().map(|it| it.cloned()).collect(), | ||
469 | })), | ||
470 | }; | ||
471 | ExpandResult { value: res, err } | ||
472 | } | ||
473 | |||
474 | fn eat_vis(&mut self) -> Option<tt::TokenTree> { | 410 | fn eat_vis(&mut self) -> Option<tt::TokenTree> { |
475 | let mut fork = self.clone(); | 411 | let mut fork = self.clone(); |
476 | match fork.expect_fragment(Visibility) { | 412 | match fork.expect_fragment(Visibility) { |
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index d80bd7a33..4c298f85f 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs | |||
@@ -12,6 +12,9 @@ mod subtree_source; | |||
12 | #[cfg(test)] | 12 | #[cfg(test)] |
13 | mod tests; | 13 | mod tests; |
14 | 14 | ||
15 | #[cfg(test)] | ||
16 | mod benchmark; | ||
17 | |||
15 | use std::fmt; | 18 | use std::fmt; |
16 | 19 | ||
17 | use test_utils::mark; | 20 | use test_utils::mark; |
@@ -62,8 +65,8 @@ impl fmt::Display for ExpandError { | |||
62 | } | 65 | } |
63 | 66 | ||
64 | pub use crate::syntax_bridge::{ | 67 | pub use crate::syntax_bridge::{ |
65 | ast_to_token_tree, parse_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, | 68 | ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, |
66 | TokenMap, | 69 | token_tree_to_syntax_node, TokenMap, |
67 | }; | 70 | }; |
68 | 71 | ||
69 | /// This struct contains AST for a single `macro_rules` definition. What might | 72 | /// This struct contains AST for a single `macro_rules` definition. What might |
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 0cdc175be..aacae1026 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs | |||
@@ -10,8 +10,8 @@ use syntax::{ | |||
10 | }; | 10 | }; |
11 | use tt::buffer::{Cursor, TokenBuffer}; | 11 | use tt::buffer::{Cursor, TokenBuffer}; |
12 | 12 | ||
13 | use crate::subtree_source::SubtreeTokenSource; | ||
14 | use crate::ExpandError; | 13 | use crate::ExpandError; |
14 | use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; | ||
15 | 15 | ||
16 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | 16 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] |
17 | pub enum TokenTextRange { | 17 | pub enum TokenTextRange { |
@@ -112,6 +112,43 @@ pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { | |||
112 | Some((subtree, conv.id_alloc.map)) | 112 | Some((subtree, conv.id_alloc.map)) |
113 | } | 113 | } |
114 | 114 | ||
115 | /// Split token tree with seperate expr: $($e:expr)SEP* | ||
116 | pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> { | ||
117 | if tt.token_trees.is_empty() { | ||
118 | return Vec::new(); | ||
119 | } | ||
120 | |||
121 | let mut iter = TtIter::new(tt); | ||
122 | let mut res = Vec::new(); | ||
123 | |||
124 | while iter.peek_n(0).is_some() { | ||
125 | let expanded = iter.expect_fragment(FragmentKind::Expr); | ||
126 | if expanded.err.is_some() { | ||
127 | break; | ||
128 | } | ||
129 | |||
130 | res.push(match expanded.value { | ||
131 | None => break, | ||
132 | Some(tt @ tt::TokenTree::Leaf(_)) => { | ||
133 | tt::Subtree { delimiter: None, token_trees: vec![tt.into()] } | ||
134 | } | ||
135 | Some(tt::TokenTree::Subtree(tt)) => tt, | ||
136 | }); | ||
137 | |||
138 | let mut fork = iter.clone(); | ||
139 | if fork.expect_char(sep).is_err() { | ||
140 | break; | ||
141 | } | ||
142 | iter = fork; | ||
143 | } | ||
144 | |||
145 | if iter.peek_n(0).is_some() { | ||
146 | res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() }); | ||
147 | } | ||
148 | |||
149 | res | ||
150 | } | ||
151 | |||
115 | impl TokenMap { | 152 | impl TokenMap { |
116 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { | 153 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { |
117 | let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { | 154 | let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { |
@@ -176,7 +213,7 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr { | |||
176 | 213 | ||
177 | // Quote the string | 214 | // Quote the string |
178 | // Note that `tt::Literal` expect an escaped string | 215 | // Note that `tt::Literal` expect an escaped string |
179 | let text = format!("{:?}", text.escape_default().to_string()); | 216 | let text = format!("{:?}", text.escape_debug().to_string()); |
180 | text.into() | 217 | text.into() |
181 | } | 218 | } |
182 | 219 | ||
diff --git a/crates/mbe/src/tests.rs b/crates/mbe/src/tests.rs index 1c467facd..f1eadcd1e 100644 --- a/crates/mbe/src/tests.rs +++ b/crates/mbe/src/tests.rs | |||
@@ -970,6 +970,29 @@ fn test_meta_doc_comments() { | |||
970 | } | 970 | } |
971 | 971 | ||
972 | #[test] | 972 | #[test] |
973 | fn test_meta_doc_comments_non_latin() { | ||
974 | parse_macro( | ||
975 | r#" | ||
976 | macro_rules! foo { | ||
977 | ($(#[$ i:meta])+) => ( | ||
978 | $(#[$ i])+ | ||
979 | fn bar() {} | ||
980 | ) | ||
981 | } | ||
982 | "#, | ||
983 | ). | ||
984 | assert_expand_items( | ||
985 | r#"foo! { | ||
986 | /// 錦瑟無端五十弦,一弦一柱思華年。 | ||
987 | /** | ||
988 | 莊生曉夢迷蝴蝶,望帝春心託杜鵑。 | ||
989 | */ | ||
990 | }"#, | ||
991 | "# [doc = \" 錦瑟無端五十弦,一弦一柱思華年。\"] # [doc = \"\\\\n 莊生曉夢迷蝴蝶,望帝春心託杜鵑。\\\\n \"] fn bar () {}", | ||
992 | ); | ||
993 | } | ||
994 | |||
995 | #[test] | ||
973 | fn test_tt_block() { | 996 | fn test_tt_block() { |
974 | parse_macro( | 997 | parse_macro( |
975 | r#" | 998 | r#" |
diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 46c420718..a362d31fc 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs | |||
@@ -1,5 +1,20 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use crate::{subtree_source::SubtreeTokenSource, ExpandError, ExpandResult}; | ||
4 | |||
5 | use parser::TreeSink; | ||
6 | use syntax::SyntaxKind; | ||
7 | use tt::buffer::{Cursor, TokenBuffer}; | ||
8 | |||
9 | macro_rules! err { | ||
10 | () => { | ||
11 | ExpandError::BindingError(format!("")) | ||
12 | }; | ||
13 | ($($tt:tt)*) => { | ||
14 | ExpandError::BindingError(format!($($tt)*)) | ||
15 | }; | ||
16 | } | ||
17 | |||
3 | #[derive(Debug, Clone)] | 18 | #[derive(Debug, Clone)] |
4 | pub(crate) struct TtIter<'a> { | 19 | pub(crate) struct TtIter<'a> { |
5 | pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>, | 20 | pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>, |
@@ -56,6 +71,68 @@ impl<'a> TtIter<'a> { | |||
56 | } | 71 | } |
57 | } | 72 | } |
58 | 73 | ||
74 | pub(crate) fn expect_fragment( | ||
75 | &mut self, | ||
76 | fragment_kind: parser::FragmentKind, | ||
77 | ) -> ExpandResult<Option<tt::TokenTree>> { | ||
78 | struct OffsetTokenSink<'a> { | ||
79 | cursor: Cursor<'a>, | ||
80 | error: bool, | ||
81 | } | ||
82 | |||
83 | impl<'a> TreeSink for OffsetTokenSink<'a> { | ||
84 | fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { | ||
85 | if kind == SyntaxKind::LIFETIME_IDENT { | ||
86 | n_tokens = 2; | ||
87 | } | ||
88 | for _ in 0..n_tokens { | ||
89 | self.cursor = self.cursor.bump_subtree(); | ||
90 | } | ||
91 | } | ||
92 | fn start_node(&mut self, _kind: SyntaxKind) {} | ||
93 | fn finish_node(&mut self) {} | ||
94 | fn error(&mut self, _error: parser::ParseError) { | ||
95 | self.error = true; | ||
96 | } | ||
97 | } | ||
98 | |||
99 | let buffer = TokenBuffer::from_tokens(&self.inner.as_slice()); | ||
100 | let mut src = SubtreeTokenSource::new(&buffer); | ||
101 | let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; | ||
102 | |||
103 | parser::parse_fragment(&mut src, &mut sink, fragment_kind); | ||
104 | |||
105 | let mut err = None; | ||
106 | if !sink.cursor.is_root() || sink.error { | ||
107 | err = Some(err!("expected {:?}", fragment_kind)); | ||
108 | } | ||
109 | |||
110 | let mut curr = buffer.begin(); | ||
111 | let mut res = vec![]; | ||
112 | |||
113 | if sink.cursor.is_root() { | ||
114 | while curr != sink.cursor { | ||
115 | if let Some(token) = curr.token_tree() { | ||
116 | res.push(token); | ||
117 | } | ||
118 | curr = curr.bump(); | ||
119 | } | ||
120 | } | ||
121 | self.inner = self.inner.as_slice()[res.len()..].iter(); | ||
122 | if res.len() == 0 && err.is_none() { | ||
123 | err = Some(err!("no tokens consumed")); | ||
124 | } | ||
125 | let res = match res.len() { | ||
126 | 1 => Some(res[0].cloned()), | ||
127 | 0 => None, | ||
128 | _ => Some(tt::TokenTree::Subtree(tt::Subtree { | ||
129 | delimiter: None, | ||
130 | token_trees: res.into_iter().map(|it| it.cloned()).collect(), | ||
131 | })), | ||
132 | }; | ||
133 | ExpandResult { value: res, err } | ||
134 | } | ||
135 | |||
59 | pub(crate) fn peek_n(&self, n: usize) -> Option<&tt::TokenTree> { | 136 | pub(crate) fn peek_n(&self, n: usize) -> Option<&tt::TokenTree> { |
60 | self.inner.as_slice().get(n) | 137 | self.inner.as_slice().get(n) |
61 | } | 138 | } |