diff options
author | Zac Pullar-Strecker <[email protected]> | 2020-08-24 10:19:53 +0100 |
---|---|---|
committer | Zac Pullar-Strecker <[email protected]> | 2020-08-24 10:20:13 +0100 |
commit | 7bbca7a1b3f9293d2f5cc5745199bc5f8396f2f0 (patch) | |
tree | bdb47765991cb973b2cd5481a088fac636bd326c /crates/mbe | |
parent | ca464650eeaca6195891199a93f4f76cf3e7e697 (diff) | |
parent | e65d48d1fb3d4d91d9dc1148a7a836ff5c9a3c87 (diff) |
Merge remote-tracking branch 'upstream/master' into 503-hover-doc-links
Diffstat (limited to 'crates/mbe')
-rw-r--r-- | crates/mbe/Cargo.toml | 21 | ||||
-rw-r--r-- | crates/mbe/src/lib.rs | 278 | ||||
-rw-r--r-- | crates/mbe/src/mbe_expander.rs | 180 | ||||
-rw-r--r-- | crates/mbe/src/mbe_expander/matcher.rs | 477 | ||||
-rw-r--r-- | crates/mbe/src/mbe_expander/transcriber.rs | 254 | ||||
-rw-r--r-- | crates/mbe/src/parser.rs | 184 | ||||
-rw-r--r-- | crates/mbe/src/subtree_source.rs | 197 | ||||
-rw-r--r-- | crates/mbe/src/syntax_bridge.rs | 832 | ||||
-rw-r--r-- | crates/mbe/src/tests.rs | 1898 | ||||
-rw-r--r-- | crates/mbe/src/tt_iter.rs | 75 |
10 files changed, 4396 insertions, 0 deletions
diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml new file mode 100644 index 000000000..1aba8b7c4 --- /dev/null +++ b/crates/mbe/Cargo.toml | |||
@@ -0,0 +1,21 @@ | |||
1 | [package] | ||
2 | name = "mbe" | ||
3 | version = "0.0.0" | ||
4 | license = "MIT OR Apache-2.0" | ||
5 | authors = ["rust-analyzer developers"] | ||
6 | edition = "2018" | ||
7 | |||
8 | [lib] | ||
9 | doctest = false | ||
10 | |||
11 | [dependencies] | ||
12 | rustc-hash = "1.1.0" | ||
13 | smallvec = "1.2.0" | ||
14 | log = "0.4.8" | ||
15 | |||
16 | syntax = { path = "../syntax" } | ||
17 | parser = { path = "../parser" } | ||
18 | tt = { path = "../tt" } | ||
19 | |||
20 | [dev-dependencies] | ||
21 | test_utils = { path = "../test_utils" } | ||
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs new file mode 100644 index 000000000..f854ca09a --- /dev/null +++ b/crates/mbe/src/lib.rs | |||
@@ -0,0 +1,278 @@ | |||
1 | //! `mbe` (short for Macro By Example) crate contains code for handling | ||
2 | //! `macro_rules` macros. It uses `TokenTree` (from `tt` package) as the | ||
3 | //! interface, although it contains some code to bridge `SyntaxNode`s and | ||
4 | //! `TokenTree`s as well! | ||
5 | |||
6 | mod parser; | ||
7 | mod mbe_expander; | ||
8 | mod syntax_bridge; | ||
9 | mod tt_iter; | ||
10 | mod subtree_source; | ||
11 | |||
12 | #[cfg(test)] | ||
13 | mod tests; | ||
14 | |||
15 | pub use tt::{Delimiter, Punct}; | ||
16 | |||
17 | use crate::{ | ||
18 | parser::{parse_pattern, Op}, | ||
19 | tt_iter::TtIter, | ||
20 | }; | ||
21 | |||
22 | #[derive(Debug, PartialEq, Eq)] | ||
23 | pub enum ParseError { | ||
24 | Expected(String), | ||
25 | RepetitionEmtpyTokenTree, | ||
26 | } | ||
27 | |||
28 | #[derive(Debug, PartialEq, Eq, Clone)] | ||
29 | pub enum ExpandError { | ||
30 | NoMatchingRule, | ||
31 | UnexpectedToken, | ||
32 | BindingError(String), | ||
33 | ConversionError, | ||
34 | InvalidRepeat, | ||
35 | ProcMacroError(tt::ExpansionError), | ||
36 | } | ||
37 | |||
38 | impl From<tt::ExpansionError> for ExpandError { | ||
39 | fn from(it: tt::ExpansionError) -> Self { | ||
40 | ExpandError::ProcMacroError(it) | ||
41 | } | ||
42 | } | ||
43 | |||
44 | pub use crate::syntax_bridge::{ | ||
45 | ast_to_token_tree, parse_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, | ||
46 | TokenMap, | ||
47 | }; | ||
48 | |||
49 | /// This struct contains AST for a single `macro_rules` definition. What might | ||
50 | /// be very confusing is that AST has almost exactly the same shape as | ||
51 | /// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident` | ||
52 | /// and `$()*` have special meaning (see `Var` and `Repeat` data structures) | ||
53 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
54 | pub struct MacroRules { | ||
55 | rules: Vec<Rule>, | ||
56 | /// Highest id of the token we have in TokenMap | ||
57 | shift: Shift, | ||
58 | } | ||
59 | |||
60 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
61 | struct Rule { | ||
62 | lhs: tt::Subtree, | ||
63 | rhs: tt::Subtree, | ||
64 | } | ||
65 | |||
66 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] | ||
67 | struct Shift(u32); | ||
68 | |||
69 | impl Shift { | ||
70 | fn new(tt: &tt::Subtree) -> Shift { | ||
71 | // Note that TokenId is started from zero, | ||
72 | // We have to add 1 to prevent duplication. | ||
73 | let value = max_id(tt).map_or(0, |it| it + 1); | ||
74 | return Shift(value); | ||
75 | |||
76 | // Find the max token id inside a subtree | ||
77 | fn max_id(subtree: &tt::Subtree) -> Option<u32> { | ||
78 | subtree | ||
79 | .token_trees | ||
80 | .iter() | ||
81 | .filter_map(|tt| match tt { | ||
82 | tt::TokenTree::Subtree(subtree) => { | ||
83 | let tree_id = max_id(subtree); | ||
84 | match subtree.delimiter { | ||
85 | Some(it) if it.id != tt::TokenId::unspecified() => { | ||
86 | Some(tree_id.map_or(it.id.0, |t| t.max(it.id.0))) | ||
87 | } | ||
88 | _ => tree_id, | ||
89 | } | ||
90 | } | ||
91 | tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) | ||
92 | if ident.id != tt::TokenId::unspecified() => | ||
93 | { | ||
94 | Some(ident.id.0) | ||
95 | } | ||
96 | _ => None, | ||
97 | }) | ||
98 | .max() | ||
99 | } | ||
100 | } | ||
101 | |||
102 | /// Shift given TokenTree token id | ||
103 | fn shift_all(self, tt: &mut tt::Subtree) { | ||
104 | for t in tt.token_trees.iter_mut() { | ||
105 | match t { | ||
106 | tt::TokenTree::Leaf(leaf) => match leaf { | ||
107 | tt::Leaf::Ident(ident) => ident.id = self.shift(ident.id), | ||
108 | tt::Leaf::Punct(punct) => punct.id = self.shift(punct.id), | ||
109 | tt::Leaf::Literal(lit) => lit.id = self.shift(lit.id), | ||
110 | }, | ||
111 | tt::TokenTree::Subtree(tt) => { | ||
112 | if let Some(it) = tt.delimiter.as_mut() { | ||
113 | it.id = self.shift(it.id); | ||
114 | }; | ||
115 | self.shift_all(tt) | ||
116 | } | ||
117 | } | ||
118 | } | ||
119 | } | ||
120 | |||
121 | fn shift(self, id: tt::TokenId) -> tt::TokenId { | ||
122 | if id == tt::TokenId::unspecified() { | ||
123 | return id; | ||
124 | } | ||
125 | tt::TokenId(id.0 + self.0) | ||
126 | } | ||
127 | |||
128 | fn unshift(self, id: tt::TokenId) -> Option<tt::TokenId> { | ||
129 | id.0.checked_sub(self.0).map(tt::TokenId) | ||
130 | } | ||
131 | } | ||
132 | |||
133 | #[derive(Debug, Eq, PartialEq)] | ||
134 | pub enum Origin { | ||
135 | Def, | ||
136 | Call, | ||
137 | } | ||
138 | |||
139 | impl MacroRules { | ||
140 | pub fn parse(tt: &tt::Subtree) -> Result<MacroRules, ParseError> { | ||
141 | // Note: this parsing can be implemented using mbe machinery itself, by | ||
142 | // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing | ||
143 | // manually seems easier. | ||
144 | let mut src = TtIter::new(tt); | ||
145 | let mut rules = Vec::new(); | ||
146 | while src.len() > 0 { | ||
147 | let rule = Rule::parse(&mut src)?; | ||
148 | rules.push(rule); | ||
149 | if let Err(()) = src.expect_char(';') { | ||
150 | if src.len() > 0 { | ||
151 | return Err(ParseError::Expected("expected `:`".to_string())); | ||
152 | } | ||
153 | break; | ||
154 | } | ||
155 | } | ||
156 | |||
157 | for rule in rules.iter() { | ||
158 | validate(&rule.lhs)?; | ||
159 | } | ||
160 | |||
161 | Ok(MacroRules { rules, shift: Shift::new(tt) }) | ||
162 | } | ||
163 | |||
164 | pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> { | ||
165 | // apply shift | ||
166 | let mut tt = tt.clone(); | ||
167 | self.shift.shift_all(&mut tt); | ||
168 | mbe_expander::expand(self, &tt) | ||
169 | } | ||
170 | |||
171 | pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { | ||
172 | self.shift.shift(id) | ||
173 | } | ||
174 | |||
175 | pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) { | ||
176 | match self.shift.unshift(id) { | ||
177 | Some(id) => (id, Origin::Call), | ||
178 | None => (id, Origin::Def), | ||
179 | } | ||
180 | } | ||
181 | } | ||
182 | |||
183 | impl Rule { | ||
184 | fn parse(src: &mut TtIter) -> Result<Rule, ParseError> { | ||
185 | let mut lhs = src | ||
186 | .expect_subtree() | ||
187 | .map_err(|()| ParseError::Expected("expected subtree".to_string()))? | ||
188 | .clone(); | ||
189 | lhs.delimiter = None; | ||
190 | src.expect_char('=').map_err(|()| ParseError::Expected("expected `=`".to_string()))?; | ||
191 | src.expect_char('>').map_err(|()| ParseError::Expected("expected `>`".to_string()))?; | ||
192 | let mut rhs = src | ||
193 | .expect_subtree() | ||
194 | .map_err(|()| ParseError::Expected("expected subtree".to_string()))? | ||
195 | .clone(); | ||
196 | rhs.delimiter = None; | ||
197 | Ok(crate::Rule { lhs, rhs }) | ||
198 | } | ||
199 | } | ||
200 | |||
201 | fn to_parse_error(e: ExpandError) -> ParseError { | ||
202 | let msg = match e { | ||
203 | ExpandError::InvalidRepeat => "invalid repeat".to_string(), | ||
204 | _ => "invalid macro definition".to_string(), | ||
205 | }; | ||
206 | ParseError::Expected(msg) | ||
207 | } | ||
208 | |||
209 | fn validate(pattern: &tt::Subtree) -> Result<(), ParseError> { | ||
210 | for op in parse_pattern(pattern) { | ||
211 | let op = op.map_err(to_parse_error)?; | ||
212 | |||
213 | match op { | ||
214 | Op::TokenTree(tt::TokenTree::Subtree(subtree)) => validate(subtree)?, | ||
215 | Op::Repeat { subtree, separator, .. } => { | ||
216 | // Checks that no repetition which could match an empty token | ||
217 | // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558 | ||
218 | |||
219 | if separator.is_none() { | ||
220 | if parse_pattern(subtree).all(|child_op| { | ||
221 | match child_op.map_err(to_parse_error) { | ||
222 | Ok(Op::Var { kind, .. }) => { | ||
223 | // vis is optional | ||
224 | if kind.map_or(false, |it| it == "vis") { | ||
225 | return true; | ||
226 | } | ||
227 | } | ||
228 | Ok(Op::Repeat { kind, .. }) => { | ||
229 | return matches!( | ||
230 | kind, | ||
231 | parser::RepeatKind::ZeroOrMore | parser::RepeatKind::ZeroOrOne | ||
232 | ) | ||
233 | } | ||
234 | _ => {} | ||
235 | } | ||
236 | false | ||
237 | }) { | ||
238 | return Err(ParseError::RepetitionEmtpyTokenTree); | ||
239 | } | ||
240 | } | ||
241 | validate(subtree)? | ||
242 | } | ||
243 | _ => (), | ||
244 | } | ||
245 | } | ||
246 | Ok(()) | ||
247 | } | ||
248 | |||
249 | #[derive(Debug)] | ||
250 | pub struct ExpandResult<T>(pub T, pub Option<ExpandError>); | ||
251 | |||
252 | impl<T> ExpandResult<T> { | ||
253 | pub fn ok(t: T) -> ExpandResult<T> { | ||
254 | ExpandResult(t, None) | ||
255 | } | ||
256 | |||
257 | pub fn only_err(err: ExpandError) -> ExpandResult<T> | ||
258 | where | ||
259 | T: Default, | ||
260 | { | ||
261 | ExpandResult(Default::default(), Some(err)) | ||
262 | } | ||
263 | |||
264 | pub fn map<U>(self, f: impl FnOnce(T) -> U) -> ExpandResult<U> { | ||
265 | ExpandResult(f(self.0), self.1) | ||
266 | } | ||
267 | |||
268 | pub fn result(self) -> Result<T, ExpandError> { | ||
269 | self.1.map(Err).unwrap_or(Ok(self.0)) | ||
270 | } | ||
271 | } | ||
272 | |||
273 | impl<T: Default> From<Result<T, ExpandError>> for ExpandResult<T> { | ||
274 | fn from(result: Result<T, ExpandError>) -> ExpandResult<T> { | ||
275 | result | ||
276 | .map_or_else(|e| ExpandResult(Default::default(), Some(e)), |it| ExpandResult(it, None)) | ||
277 | } | ||
278 | } | ||
diff --git a/crates/mbe/src/mbe_expander.rs b/crates/mbe/src/mbe_expander.rs new file mode 100644 index 000000000..1ad8b9f8a --- /dev/null +++ b/crates/mbe/src/mbe_expander.rs | |||
@@ -0,0 +1,180 @@ | |||
1 | //! This module takes a (parsed) definition of `macro_rules` invocation, a | ||
2 | //! `tt::TokenTree` representing an argument of macro invocation, and produces a | ||
3 | //! `tt::TokenTree` for the result of the expansion. | ||
4 | |||
5 | mod matcher; | ||
6 | mod transcriber; | ||
7 | |||
8 | use rustc_hash::FxHashMap; | ||
9 | use syntax::SmolStr; | ||
10 | |||
11 | use crate::{ExpandError, ExpandResult}; | ||
12 | |||
13 | pub(crate) fn expand(rules: &crate::MacroRules, input: &tt::Subtree) -> ExpandResult<tt::Subtree> { | ||
14 | expand_rules(&rules.rules, input) | ||
15 | } | ||
16 | |||
17 | fn expand_rules(rules: &[crate::Rule], input: &tt::Subtree) -> ExpandResult<tt::Subtree> { | ||
18 | let mut match_: Option<(matcher::Match, &crate::Rule)> = None; | ||
19 | for rule in rules { | ||
20 | let new_match = match matcher::match_(&rule.lhs, input) { | ||
21 | Ok(m) => m, | ||
22 | Err(_e) => { | ||
23 | // error in pattern parsing | ||
24 | continue; | ||
25 | } | ||
26 | }; | ||
27 | if new_match.err.is_none() { | ||
28 | // If we find a rule that applies without errors, we're done. | ||
29 | // Unconditionally returning the transcription here makes the | ||
30 | // `test_repeat_bad_var` test fail. | ||
31 | let ExpandResult(res, transcribe_err) = | ||
32 | transcriber::transcribe(&rule.rhs, &new_match.bindings); | ||
33 | if transcribe_err.is_none() { | ||
34 | return ExpandResult::ok(res); | ||
35 | } | ||
36 | } | ||
37 | // Use the rule if we matched more tokens, or had fewer errors | ||
38 | if let Some((prev_match, _)) = &match_ { | ||
39 | if (new_match.unmatched_tts, new_match.err_count) | ||
40 | < (prev_match.unmatched_tts, prev_match.err_count) | ||
41 | { | ||
42 | match_ = Some((new_match, rule)); | ||
43 | } | ||
44 | } else { | ||
45 | match_ = Some((new_match, rule)); | ||
46 | } | ||
47 | } | ||
48 | if let Some((match_, rule)) = match_ { | ||
49 | // if we got here, there was no match without errors | ||
50 | let ExpandResult(result, transcribe_err) = | ||
51 | transcriber::transcribe(&rule.rhs, &match_.bindings); | ||
52 | ExpandResult(result, match_.err.or(transcribe_err)) | ||
53 | } else { | ||
54 | ExpandResult(tt::Subtree::default(), Some(ExpandError::NoMatchingRule)) | ||
55 | } | ||
56 | } | ||
57 | |||
58 | /// The actual algorithm for expansion is not too hard, but is pretty tricky. | ||
59 | /// `Bindings` structure is the key to understanding what we are doing here. | ||
60 | /// | ||
61 | /// On the high level, it stores mapping from meta variables to the bits of | ||
62 | /// syntax it should be substituted with. For example, if `$e:expr` is matched | ||
63 | /// with `1 + 1` by macro_rules, the `Binding` will store `$e -> 1 + 1`. | ||
64 | /// | ||
65 | /// The tricky bit is dealing with repetitions (`$()*`). Consider this example: | ||
66 | /// | ||
67 | /// ```not_rust | ||
68 | /// macro_rules! foo { | ||
69 | /// ($($ i:ident $($ e:expr),*);*) => { | ||
70 | /// $(fn $ i() { $($ e);*; })* | ||
71 | /// } | ||
72 | /// } | ||
73 | /// foo! { foo 1,2,3; bar 4,5,6 } | ||
74 | /// ``` | ||
75 | /// | ||
76 | /// Here, the `$i` meta variable is matched first with `foo` and then with | ||
77 | /// `bar`, and `$e` is matched in turn with `1`, `2`, `3`, `4`, `5`, `6`. | ||
78 | /// | ||
79 | /// To represent such "multi-mappings", we use a recursive structures: we map | ||
80 | /// variables not to values, but to *lists* of values or other lists (that is, | ||
81 | /// to the trees). | ||
82 | /// | ||
83 | /// For the above example, the bindings would store | ||
84 | /// | ||
85 | /// ```not_rust | ||
86 | /// i -> [foo, bar] | ||
87 | /// e -> [[1, 2, 3], [4, 5, 6]] | ||
88 | /// ``` | ||
89 | /// | ||
90 | /// We construct `Bindings` in the `match_lhs`. The interesting case is | ||
91 | /// `TokenTree::Repeat`, where we use `push_nested` to create the desired | ||
92 | /// nesting structure. | ||
93 | /// | ||
94 | /// The other side of the puzzle is `expand_subtree`, where we use the bindings | ||
95 | /// to substitute meta variables in the output template. When expanding, we | ||
96 | /// maintain a `nesting` stack of indices which tells us which occurrence from | ||
97 | /// the `Bindings` we should take. We push to the stack when we enter a | ||
98 | /// repetition. | ||
99 | /// | ||
100 | /// In other words, `Bindings` is a *multi* mapping from `SmolStr` to | ||
101 | /// `tt::TokenTree`, where the index to select a particular `TokenTree` among | ||
102 | /// many is not a plain `usize`, but an `&[usize]`. | ||
103 | #[derive(Debug, Default)] | ||
104 | struct Bindings { | ||
105 | inner: FxHashMap<SmolStr, Binding>, | ||
106 | } | ||
107 | |||
108 | #[derive(Debug)] | ||
109 | enum Binding { | ||
110 | Fragment(Fragment), | ||
111 | Nested(Vec<Binding>), | ||
112 | Empty, | ||
113 | } | ||
114 | |||
115 | #[derive(Debug, Clone)] | ||
116 | enum Fragment { | ||
117 | /// token fragments are just copy-pasted into the output | ||
118 | Tokens(tt::TokenTree), | ||
119 | /// Ast fragments are inserted with fake delimiters, so as to make things | ||
120 | /// like `$i * 2` where `$i = 1 + 1` work as expectd. | ||
121 | Ast(tt::TokenTree), | ||
122 | } | ||
123 | |||
124 | #[cfg(test)] | ||
125 | mod tests { | ||
126 | use syntax::{ast, AstNode}; | ||
127 | |||
128 | use super::*; | ||
129 | use crate::ast_to_token_tree; | ||
130 | |||
131 | #[test] | ||
132 | fn test_expand_rule() { | ||
133 | assert_err( | ||
134 | "($($i:ident);*) => ($i)", | ||
135 | "foo!{a}", | ||
136 | ExpandError::BindingError(String::from( | ||
137 | "expected simple binding, found nested binding `i`", | ||
138 | )), | ||
139 | ); | ||
140 | |||
141 | // FIXME: | ||
142 | // Add an err test case for ($($i:ident)) => ($()) | ||
143 | } | ||
144 | |||
145 | fn assert_err(macro_body: &str, invocation: &str, err: ExpandError) { | ||
146 | assert_eq!(expand_first(&create_rules(&format_macro(macro_body)), invocation).1, Some(err)); | ||
147 | } | ||
148 | |||
149 | fn format_macro(macro_body: &str) -> String { | ||
150 | format!( | ||
151 | " | ||
152 | macro_rules! foo {{ | ||
153 | {} | ||
154 | }} | ||
155 | ", | ||
156 | macro_body | ||
157 | ) | ||
158 | } | ||
159 | |||
160 | fn create_rules(macro_definition: &str) -> crate::MacroRules { | ||
161 | let source_file = ast::SourceFile::parse(macro_definition).ok().unwrap(); | ||
162 | let macro_definition = | ||
163 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | ||
164 | |||
165 | let (definition_tt, _) = | ||
166 | ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap(); | ||
167 | crate::MacroRules::parse(&definition_tt).unwrap() | ||
168 | } | ||
169 | |||
170 | fn expand_first(rules: &crate::MacroRules, invocation: &str) -> ExpandResult<tt::Subtree> { | ||
171 | let source_file = ast::SourceFile::parse(invocation).ok().unwrap(); | ||
172 | let macro_invocation = | ||
173 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | ||
174 | |||
175 | let (invocation_tt, _) = | ||
176 | ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap(); | ||
177 | |||
178 | expand_rules(&rules.rules, &invocation_tt) | ||
179 | } | ||
180 | } | ||
diff --git a/crates/mbe/src/mbe_expander/matcher.rs b/crates/mbe/src/mbe_expander/matcher.rs new file mode 100644 index 000000000..b698b9832 --- /dev/null +++ b/crates/mbe/src/mbe_expander/matcher.rs | |||
@@ -0,0 +1,477 @@ | |||
1 | //! FIXME: write short doc here | ||
2 | |||
3 | use crate::{ | ||
4 | mbe_expander::{Binding, Bindings, Fragment}, | ||
5 | parser::{parse_pattern, Op, RepeatKind, Separator}, | ||
6 | subtree_source::SubtreeTokenSource, | ||
7 | tt_iter::TtIter, | ||
8 | ExpandError, | ||
9 | }; | ||
10 | |||
11 | use super::ExpandResult; | ||
12 | use parser::{FragmentKind::*, TreeSink}; | ||
13 | use syntax::{SmolStr, SyntaxKind}; | ||
14 | use tt::buffer::{Cursor, TokenBuffer}; | ||
15 | |||
16 | impl Bindings { | ||
17 | fn push_optional(&mut self, name: &SmolStr) { | ||
18 | // FIXME: Do we have a better way to represent an empty token ? | ||
19 | // Insert an empty subtree for empty token | ||
20 | let tt = tt::Subtree::default().into(); | ||
21 | self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt))); | ||
22 | } | ||
23 | |||
24 | fn push_empty(&mut self, name: &SmolStr) { | ||
25 | self.inner.insert(name.clone(), Binding::Empty); | ||
26 | } | ||
27 | |||
28 | fn push_nested(&mut self, idx: usize, nested: Bindings) -> Result<(), ExpandError> { | ||
29 | for (key, value) in nested.inner { | ||
30 | if !self.inner.contains_key(&key) { | ||
31 | self.inner.insert(key.clone(), Binding::Nested(Vec::new())); | ||
32 | } | ||
33 | match self.inner.get_mut(&key) { | ||
34 | Some(Binding::Nested(it)) => { | ||
35 | // insert empty nested bindings before this one | ||
36 | while it.len() < idx { | ||
37 | it.push(Binding::Nested(vec![])); | ||
38 | } | ||
39 | it.push(value); | ||
40 | } | ||
41 | _ => { | ||
42 | return Err(ExpandError::BindingError(format!( | ||
43 | "could not find binding `{}`", | ||
44 | key | ||
45 | ))); | ||
46 | } | ||
47 | } | ||
48 | } | ||
49 | Ok(()) | ||
50 | } | ||
51 | } | ||
52 | |||
53 | macro_rules! err { | ||
54 | () => { | ||
55 | ExpandError::BindingError(format!("")) | ||
56 | }; | ||
57 | ($($tt:tt)*) => { | ||
58 | ExpandError::BindingError(format!($($tt)*)) | ||
59 | }; | ||
60 | } | ||
61 | |||
62 | #[derive(Debug, Default)] | ||
63 | pub(super) struct Match { | ||
64 | pub bindings: Bindings, | ||
65 | /// We currently just keep the first error and count the rest to compare matches. | ||
66 | pub err: Option<ExpandError>, | ||
67 | pub err_count: usize, | ||
68 | /// How many top-level token trees were left to match. | ||
69 | pub unmatched_tts: usize, | ||
70 | } | ||
71 | |||
72 | impl Match { | ||
73 | pub fn add_err(&mut self, err: ExpandError) { | ||
74 | let prev_err = self.err.take(); | ||
75 | self.err = prev_err.or(Some(err)); | ||
76 | self.err_count += 1; | ||
77 | } | ||
78 | } | ||
79 | |||
80 | // General note: These functions have two channels to return errors, a `Result` | ||
81 | // return value and the `&mut Match`. The returned Result is for pattern parsing | ||
82 | // errors; if a branch of the macro definition doesn't parse, it doesn't make | ||
83 | // sense to try using it. Matching errors are added to the `Match`. It might | ||
84 | // make sense to make pattern parsing a separate step? | ||
85 | |||
86 | pub(super) fn match_(pattern: &tt::Subtree, src: &tt::Subtree) -> Result<Match, ExpandError> { | ||
87 | assert!(pattern.delimiter == None); | ||
88 | |||
89 | let mut res = Match::default(); | ||
90 | let mut src = TtIter::new(src); | ||
91 | |||
92 | match_subtree(&mut res, pattern, &mut src)?; | ||
93 | |||
94 | if src.len() > 0 { | ||
95 | res.unmatched_tts += src.len(); | ||
96 | res.add_err(err!("leftover tokens")); | ||
97 | } | ||
98 | |||
99 | Ok(res) | ||
100 | } | ||
101 | |||
102 | fn match_subtree( | ||
103 | res: &mut Match, | ||
104 | pattern: &tt::Subtree, | ||
105 | src: &mut TtIter, | ||
106 | ) -> Result<(), ExpandError> { | ||
107 | for op in parse_pattern(pattern) { | ||
108 | match op? { | ||
109 | Op::TokenTree(tt::TokenTree::Leaf(lhs)) => { | ||
110 | let rhs = match src.expect_leaf() { | ||
111 | Ok(l) => l, | ||
112 | Err(()) => { | ||
113 | res.add_err(err!("expected leaf: `{}`", lhs)); | ||
114 | continue; | ||
115 | } | ||
116 | }; | ||
117 | match (lhs, rhs) { | ||
118 | ( | ||
119 | tt::Leaf::Punct(tt::Punct { char: lhs, .. }), | ||
120 | tt::Leaf::Punct(tt::Punct { char: rhs, .. }), | ||
121 | ) if lhs == rhs => (), | ||
122 | ( | ||
123 | tt::Leaf::Ident(tt::Ident { text: lhs, .. }), | ||
124 | tt::Leaf::Ident(tt::Ident { text: rhs, .. }), | ||
125 | ) if lhs == rhs => (), | ||
126 | ( | ||
127 | tt::Leaf::Literal(tt::Literal { text: lhs, .. }), | ||
128 | tt::Leaf::Literal(tt::Literal { text: rhs, .. }), | ||
129 | ) if lhs == rhs => (), | ||
130 | _ => { | ||
131 | res.add_err(ExpandError::UnexpectedToken); | ||
132 | } | ||
133 | } | ||
134 | } | ||
135 | Op::TokenTree(tt::TokenTree::Subtree(lhs)) => { | ||
136 | let rhs = match src.expect_subtree() { | ||
137 | Ok(s) => s, | ||
138 | Err(()) => { | ||
139 | res.add_err(err!("expected subtree")); | ||
140 | continue; | ||
141 | } | ||
142 | }; | ||
143 | if lhs.delimiter_kind() != rhs.delimiter_kind() { | ||
144 | res.add_err(err!("mismatched delimiter")); | ||
145 | continue; | ||
146 | } | ||
147 | let mut src = TtIter::new(rhs); | ||
148 | match_subtree(res, lhs, &mut src)?; | ||
149 | if src.len() > 0 { | ||
150 | res.add_err(err!("leftover tokens")); | ||
151 | } | ||
152 | } | ||
153 | Op::Var { name, kind } => { | ||
154 | let kind = match kind { | ||
155 | Some(k) => k, | ||
156 | None => { | ||
157 | res.add_err(ExpandError::UnexpectedToken); | ||
158 | continue; | ||
159 | } | ||
160 | }; | ||
161 | let ExpandResult(matched, match_err) = match_meta_var(kind.as_str(), src); | ||
162 | match matched { | ||
163 | Some(fragment) => { | ||
164 | res.bindings.inner.insert(name.clone(), Binding::Fragment(fragment)); | ||
165 | } | ||
166 | None if match_err.is_none() => res.bindings.push_optional(name), | ||
167 | _ => {} | ||
168 | } | ||
169 | if let Some(err) = match_err { | ||
170 | res.add_err(err); | ||
171 | } | ||
172 | } | ||
173 | Op::Repeat { subtree, kind, separator } => { | ||
174 | match_repeat(res, subtree, kind, separator, src)?; | ||
175 | } | ||
176 | } | ||
177 | } | ||
178 | Ok(()) | ||
179 | } | ||
180 | |||
181 | impl<'a> TtIter<'a> { | ||
182 | fn eat_separator(&mut self, separator: &Separator) -> bool { | ||
183 | let mut fork = self.clone(); | ||
184 | let ok = match separator { | ||
185 | Separator::Ident(lhs) => match fork.expect_ident() { | ||
186 | Ok(rhs) => rhs.text == lhs.text, | ||
187 | _ => false, | ||
188 | }, | ||
189 | Separator::Literal(lhs) => match fork.expect_literal() { | ||
190 | Ok(rhs) => match rhs { | ||
191 | tt::Leaf::Literal(rhs) => rhs.text == lhs.text, | ||
192 | tt::Leaf::Ident(rhs) => rhs.text == lhs.text, | ||
193 | tt::Leaf::Punct(_) => false, | ||
194 | }, | ||
195 | _ => false, | ||
196 | }, | ||
197 | Separator::Puncts(lhss) => lhss.iter().all(|lhs| match fork.expect_punct() { | ||
198 | Ok(rhs) => rhs.char == lhs.char, | ||
199 | _ => false, | ||
200 | }), | ||
201 | }; | ||
202 | if ok { | ||
203 | *self = fork; | ||
204 | } | ||
205 | ok | ||
206 | } | ||
207 | |||
208 | pub(crate) fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> { | ||
209 | match self.peek_n(0) { | ||
210 | Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => { | ||
211 | return self.expect_lifetime(); | ||
212 | } | ||
213 | _ => (), | ||
214 | } | ||
215 | |||
216 | let tt = self.next().ok_or_else(|| ())?.clone(); | ||
217 | let punct = match tt { | ||
218 | tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => { | ||
219 | punct | ||
220 | } | ||
221 | _ => return Ok(tt), | ||
222 | }; | ||
223 | |||
224 | let (second, third) = match (self.peek_n(0), self.peek_n(1)) { | ||
225 | ( | ||
226 | Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), | ||
227 | Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))), | ||
228 | ) if p2.spacing == tt::Spacing::Joint => (p2.char, Some(p3.char)), | ||
229 | (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2.char, None), | ||
230 | _ => return Ok(tt), | ||
231 | }; | ||
232 | |||
233 | match (punct.char, second, third) { | ||
234 | ('.', '.', Some('.')) | ||
235 | | ('.', '.', Some('=')) | ||
236 | | ('<', '<', Some('=')) | ||
237 | | ('>', '>', Some('=')) => { | ||
238 | let tt2 = self.next().unwrap().clone(); | ||
239 | let tt3 = self.next().unwrap().clone(); | ||
240 | Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2, tt3] }.into()) | ||
241 | } | ||
242 | ('-', '=', None) | ||
243 | | ('-', '>', None) | ||
244 | | (':', ':', None) | ||
245 | | ('!', '=', None) | ||
246 | | ('.', '.', None) | ||
247 | | ('*', '=', None) | ||
248 | | ('/', '=', None) | ||
249 | | ('&', '&', None) | ||
250 | | ('&', '=', None) | ||
251 | | ('%', '=', None) | ||
252 | | ('^', '=', None) | ||
253 | | ('+', '=', None) | ||
254 | | ('<', '<', None) | ||
255 | | ('<', '=', None) | ||
256 | | ('=', '=', None) | ||
257 | | ('=', '>', None) | ||
258 | | ('>', '=', None) | ||
259 | | ('>', '>', None) | ||
260 | | ('|', '=', None) | ||
261 | | ('|', '|', None) => { | ||
262 | let tt2 = self.next().unwrap().clone(); | ||
263 | Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2] }.into()) | ||
264 | } | ||
265 | _ => Ok(tt), | ||
266 | } | ||
267 | } | ||
268 | |||
269 | pub(crate) fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> { | ||
270 | let punct = self.expect_punct()?; | ||
271 | if punct.char != '\'' { | ||
272 | return Err(()); | ||
273 | } | ||
274 | let ident = self.expect_ident()?; | ||
275 | |||
276 | Ok(tt::Subtree { | ||
277 | delimiter: None, | ||
278 | token_trees: vec![ | ||
279 | tt::Leaf::Punct(*punct).into(), | ||
280 | tt::Leaf::Ident(ident.clone()).into(), | ||
281 | ], | ||
282 | } | ||
283 | .into()) | ||
284 | } | ||
285 | |||
286 | pub(crate) fn expect_fragment( | ||
287 | &mut self, | ||
288 | fragment_kind: parser::FragmentKind, | ||
289 | ) -> ExpandResult<Option<tt::TokenTree>> { | ||
290 | pub(crate) struct OffsetTokenSink<'a> { | ||
291 | pub(crate) cursor: Cursor<'a>, | ||
292 | pub(crate) error: bool, | ||
293 | } | ||
294 | |||
295 | impl<'a> TreeSink for OffsetTokenSink<'a> { | ||
296 | fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { | ||
297 | if kind == SyntaxKind::LIFETIME { | ||
298 | n_tokens = 2; | ||
299 | } | ||
300 | for _ in 0..n_tokens { | ||
301 | self.cursor = self.cursor.bump_subtree(); | ||
302 | } | ||
303 | } | ||
304 | fn start_node(&mut self, _kind: SyntaxKind) {} | ||
305 | fn finish_node(&mut self) {} | ||
306 | fn error(&mut self, _error: parser::ParseError) { | ||
307 | self.error = true; | ||
308 | } | ||
309 | } | ||
310 | |||
311 | let buffer = TokenBuffer::new(&self.inner.as_slice()); | ||
312 | let mut src = SubtreeTokenSource::new(&buffer); | ||
313 | let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; | ||
314 | |||
315 | parser::parse_fragment(&mut src, &mut sink, fragment_kind); | ||
316 | |||
317 | let mut err = None; | ||
318 | if !sink.cursor.is_root() || sink.error { | ||
319 | err = Some(err!("expected {:?}", fragment_kind)); | ||
320 | } | ||
321 | |||
322 | let mut curr = buffer.begin(); | ||
323 | let mut res = vec![]; | ||
324 | |||
325 | if sink.cursor.is_root() { | ||
326 | while curr != sink.cursor { | ||
327 | if let Some(token) = curr.token_tree() { | ||
328 | res.push(token); | ||
329 | } | ||
330 | curr = curr.bump(); | ||
331 | } | ||
332 | } | ||
333 | self.inner = self.inner.as_slice()[res.len()..].iter(); | ||
334 | if res.len() == 0 && err.is_none() { | ||
335 | err = Some(err!("no tokens consumed")); | ||
336 | } | ||
337 | let res = match res.len() { | ||
338 | 1 => Some(res[0].clone()), | ||
339 | 0 => None, | ||
340 | _ => Some(tt::TokenTree::Subtree(tt::Subtree { | ||
341 | delimiter: None, | ||
342 | token_trees: res.into_iter().cloned().collect(), | ||
343 | })), | ||
344 | }; | ||
345 | ExpandResult(res, err) | ||
346 | } | ||
347 | |||
348 | pub(crate) fn eat_vis(&mut self) -> Option<tt::TokenTree> { | ||
349 | let mut fork = self.clone(); | ||
350 | match fork.expect_fragment(Visibility) { | ||
351 | ExpandResult(tt, None) => { | ||
352 | *self = fork; | ||
353 | tt | ||
354 | } | ||
355 | ExpandResult(_, Some(_)) => None, | ||
356 | } | ||
357 | } | ||
358 | } | ||
359 | |||
360 | pub(super) fn match_repeat( | ||
361 | res: &mut Match, | ||
362 | pattern: &tt::Subtree, | ||
363 | kind: RepeatKind, | ||
364 | separator: Option<Separator>, | ||
365 | src: &mut TtIter, | ||
366 | ) -> Result<(), ExpandError> { | ||
367 | // Dirty hack to make macro-expansion terminate. | ||
368 | // This should be replaced by a propper macro-by-example implementation | ||
369 | let mut limit = 65536; | ||
370 | let mut counter = 0; | ||
371 | |||
372 | for i in 0.. { | ||
373 | let mut fork = src.clone(); | ||
374 | |||
375 | if let Some(separator) = &separator { | ||
376 | if i != 0 && !fork.eat_separator(separator) { | ||
377 | break; | ||
378 | } | ||
379 | } | ||
380 | |||
381 | let mut nested = Match::default(); | ||
382 | match_subtree(&mut nested, pattern, &mut fork)?; | ||
383 | if nested.err.is_none() { | ||
384 | limit -= 1; | ||
385 | if limit == 0 { | ||
386 | log::warn!( | ||
387 | "match_lhs exceeded repeat pattern limit => {:#?}\n{:#?}\n{:#?}\n{:#?}", | ||
388 | pattern, | ||
389 | src, | ||
390 | kind, | ||
391 | separator | ||
392 | ); | ||
393 | break; | ||
394 | } | ||
395 | *src = fork; | ||
396 | |||
397 | if let Err(err) = res.bindings.push_nested(counter, nested.bindings) { | ||
398 | res.add_err(err); | ||
399 | } | ||
400 | counter += 1; | ||
401 | if counter == 1 { | ||
402 | if let RepeatKind::ZeroOrOne = kind { | ||
403 | break; | ||
404 | } | ||
405 | } | ||
406 | } else { | ||
407 | break; | ||
408 | } | ||
409 | } | ||
410 | |||
411 | match (kind, counter) { | ||
412 | (RepeatKind::OneOrMore, 0) => { | ||
413 | res.add_err(ExpandError::UnexpectedToken); | ||
414 | } | ||
415 | (_, 0) => { | ||
416 | // Collect all empty variables in subtrees | ||
417 | let mut vars = Vec::new(); | ||
418 | collect_vars(&mut vars, pattern)?; | ||
419 | for var in vars { | ||
420 | res.bindings.push_empty(&var) | ||
421 | } | ||
422 | } | ||
423 | _ => (), | ||
424 | } | ||
425 | Ok(()) | ||
426 | } | ||
427 | |||
428 | fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragment>> { | ||
429 | let fragment = match kind { | ||
430 | "path" => Path, | ||
431 | "expr" => Expr, | ||
432 | "ty" => Type, | ||
433 | "pat" => Pattern, | ||
434 | "stmt" => Statement, | ||
435 | "block" => Block, | ||
436 | "meta" => MetaItem, | ||
437 | "item" => Item, | ||
438 | _ => { | ||
439 | let tt_result = match kind { | ||
440 | "ident" => input | ||
441 | .expect_ident() | ||
442 | .map(|ident| Some(tt::Leaf::from(ident.clone()).into())) | ||
443 | .map_err(|()| err!("expected ident")), | ||
444 | "tt" => input.expect_tt().map(Some).map_err(|()| err!()), | ||
445 | "lifetime" => input | ||
446 | .expect_lifetime() | ||
447 | .map(|tt| Some(tt)) | ||
448 | .map_err(|()| err!("expected lifetime")), | ||
449 | "literal" => input | ||
450 | .expect_literal() | ||
451 | .map(|literal| Some(tt::Leaf::from(literal.clone()).into())) | ||
452 | .map_err(|()| err!()), | ||
453 | // `vis` is optional | ||
454 | "vis" => match input.eat_vis() { | ||
455 | Some(vis) => Ok(Some(vis)), | ||
456 | None => Ok(None), | ||
457 | }, | ||
458 | _ => Err(ExpandError::UnexpectedToken), | ||
459 | }; | ||
460 | return tt_result.map(|it| it.map(Fragment::Tokens)).into(); | ||
461 | } | ||
462 | }; | ||
463 | let result = input.expect_fragment(fragment); | ||
464 | result.map(|tt| if kind == "expr" { tt.map(Fragment::Ast) } else { tt.map(Fragment::Tokens) }) | ||
465 | } | ||
466 | |||
467 | fn collect_vars(buf: &mut Vec<SmolStr>, pattern: &tt::Subtree) -> Result<(), ExpandError> { | ||
468 | for op in parse_pattern(pattern) { | ||
469 | match op? { | ||
470 | Op::Var { name, .. } => buf.push(name.clone()), | ||
471 | Op::TokenTree(tt::TokenTree::Leaf(_)) => (), | ||
472 | Op::TokenTree(tt::TokenTree::Subtree(subtree)) => collect_vars(buf, subtree)?, | ||
473 | Op::Repeat { subtree, .. } => collect_vars(buf, subtree)?, | ||
474 | } | ||
475 | } | ||
476 | Ok(()) | ||
477 | } | ||
diff --git a/crates/mbe/src/mbe_expander/transcriber.rs b/crates/mbe/src/mbe_expander/transcriber.rs new file mode 100644 index 000000000..c9525c5bf --- /dev/null +++ b/crates/mbe/src/mbe_expander/transcriber.rs | |||
@@ -0,0 +1,254 @@ | |||
1 | //! Transcriber takes a template, like `fn $ident() {}`, a set of bindings like | ||
2 | //! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}` | ||
3 | |||
4 | use syntax::SmolStr; | ||
5 | |||
6 | use super::ExpandResult; | ||
7 | use crate::{ | ||
8 | mbe_expander::{Binding, Bindings, Fragment}, | ||
9 | parser::{parse_template, Op, RepeatKind, Separator}, | ||
10 | ExpandError, | ||
11 | }; | ||
12 | |||
13 | impl Bindings { | ||
14 | fn contains(&self, name: &str) -> bool { | ||
15 | self.inner.contains_key(name) | ||
16 | } | ||
17 | |||
18 | fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result<&Fragment, ExpandError> { | ||
19 | let mut b = self.inner.get(name).ok_or_else(|| { | ||
20 | ExpandError::BindingError(format!("could not find binding `{}`", name)) | ||
21 | })?; | ||
22 | for nesting_state in nesting.iter_mut() { | ||
23 | nesting_state.hit = true; | ||
24 | b = match b { | ||
25 | Binding::Fragment(_) => break, | ||
26 | Binding::Nested(bs) => bs.get(nesting_state.idx).ok_or_else(|| { | ||
27 | nesting_state.at_end = true; | ||
28 | ExpandError::BindingError(format!("could not find nested binding `{}`", name)) | ||
29 | })?, | ||
30 | Binding::Empty => { | ||
31 | nesting_state.at_end = true; | ||
32 | return Err(ExpandError::BindingError(format!( | ||
33 | "could not find empty binding `{}`", | ||
34 | name | ||
35 | ))); | ||
36 | } | ||
37 | }; | ||
38 | } | ||
39 | match b { | ||
40 | Binding::Fragment(it) => Ok(it), | ||
41 | Binding::Nested(_) => Err(ExpandError::BindingError(format!( | ||
42 | "expected simple binding, found nested binding `{}`", | ||
43 | name | ||
44 | ))), | ||
45 | Binding::Empty => Err(ExpandError::BindingError(format!( | ||
46 | "expected simple binding, found empty binding `{}`", | ||
47 | name | ||
48 | ))), | ||
49 | } | ||
50 | } | ||
51 | } | ||
52 | |||
53 | pub(super) fn transcribe(template: &tt::Subtree, bindings: &Bindings) -> ExpandResult<tt::Subtree> { | ||
54 | assert!(template.delimiter == None); | ||
55 | let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() }; | ||
56 | let mut arena: Vec<tt::TokenTree> = Vec::new(); | ||
57 | expand_subtree(&mut ctx, template, &mut arena) | ||
58 | } | ||
59 | |||
60 | #[derive(Debug)] | ||
61 | struct NestingState { | ||
62 | idx: usize, | ||
63 | /// `hit` is currently necessary to tell `expand_repeat` if it should stop | ||
64 | /// because there is no variable in use by the current repetition | ||
65 | hit: bool, | ||
66 | /// `at_end` is currently necessary to tell `expand_repeat` if it should stop | ||
67 | /// because there is no more value avaible for the current repetition | ||
68 | at_end: bool, | ||
69 | } | ||
70 | |||
71 | #[derive(Debug)] | ||
72 | struct ExpandCtx<'a> { | ||
73 | bindings: &'a Bindings, | ||
74 | nesting: Vec<NestingState>, | ||
75 | } | ||
76 | |||
77 | fn expand_subtree( | ||
78 | ctx: &mut ExpandCtx, | ||
79 | template: &tt::Subtree, | ||
80 | arena: &mut Vec<tt::TokenTree>, | ||
81 | ) -> ExpandResult<tt::Subtree> { | ||
82 | // remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation | ||
83 | let start_elements = arena.len(); | ||
84 | let mut err = None; | ||
85 | for op in parse_template(template) { | ||
86 | let op = match op { | ||
87 | Ok(op) => op, | ||
88 | Err(e) => { | ||
89 | err = Some(e); | ||
90 | break; | ||
91 | } | ||
92 | }; | ||
93 | match op { | ||
94 | Op::TokenTree(tt @ tt::TokenTree::Leaf(..)) => arena.push(tt.clone()), | ||
95 | Op::TokenTree(tt::TokenTree::Subtree(tt)) => { | ||
96 | let ExpandResult(tt, e) = expand_subtree(ctx, tt, arena); | ||
97 | err = err.or(e); | ||
98 | arena.push(tt.into()); | ||
99 | } | ||
100 | Op::Var { name, kind: _ } => { | ||
101 | let ExpandResult(fragment, e) = expand_var(ctx, name); | ||
102 | err = err.or(e); | ||
103 | push_fragment(arena, fragment); | ||
104 | } | ||
105 | Op::Repeat { subtree, kind, separator } => { | ||
106 | let ExpandResult(fragment, e) = expand_repeat(ctx, subtree, kind, separator, arena); | ||
107 | err = err.or(e); | ||
108 | push_fragment(arena, fragment) | ||
109 | } | ||
110 | } | ||
111 | } | ||
112 | // drain the elements added in this instance of expand_subtree | ||
113 | let tts = arena.drain(start_elements..arena.len()).collect(); | ||
114 | ExpandResult(tt::Subtree { delimiter: template.delimiter, token_trees: tts }, err) | ||
115 | } | ||
116 | |||
117 | fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> ExpandResult<Fragment> { | ||
118 | if v == "crate" { | ||
119 | // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path. | ||
120 | let tt = | ||
121 | tt::Leaf::from(tt::Ident { text: "$crate".into(), id: tt::TokenId::unspecified() }) | ||
122 | .into(); | ||
123 | ExpandResult::ok(Fragment::Tokens(tt)) | ||
124 | } else if !ctx.bindings.contains(v) { | ||
125 | // Note that it is possible to have a `$var` inside a macro which is not bound. | ||
126 | // For example: | ||
127 | // ``` | ||
128 | // macro_rules! foo { | ||
129 | // ($a:ident, $b:ident, $c:tt) => { | ||
130 | // macro_rules! bar { | ||
131 | // ($bi:ident) => { | ||
132 | // fn $bi() -> u8 {$c} | ||
133 | // } | ||
134 | // } | ||
135 | // } | ||
136 | // ``` | ||
137 | // We just treat it a normal tokens | ||
138 | let tt = tt::Subtree { | ||
139 | delimiter: None, | ||
140 | token_trees: vec![ | ||
141 | tt::Leaf::from(tt::Punct { | ||
142 | char: '$', | ||
143 | spacing: tt::Spacing::Alone, | ||
144 | id: tt::TokenId::unspecified(), | ||
145 | }) | ||
146 | .into(), | ||
147 | tt::Leaf::from(tt::Ident { text: v.clone(), id: tt::TokenId::unspecified() }) | ||
148 | .into(), | ||
149 | ], | ||
150 | } | ||
151 | .into(); | ||
152 | ExpandResult::ok(Fragment::Tokens(tt)) | ||
153 | } else { | ||
154 | ctx.bindings.get(&v, &mut ctx.nesting).map_or_else( | ||
155 | |e| ExpandResult(Fragment::Tokens(tt::TokenTree::empty()), Some(e)), | ||
156 | |b| ExpandResult::ok(b.clone()), | ||
157 | ) | ||
158 | } | ||
159 | } | ||
160 | |||
161 | fn expand_repeat( | ||
162 | ctx: &mut ExpandCtx, | ||
163 | template: &tt::Subtree, | ||
164 | kind: RepeatKind, | ||
165 | separator: Option<Separator>, | ||
166 | arena: &mut Vec<tt::TokenTree>, | ||
167 | ) -> ExpandResult<Fragment> { | ||
168 | let mut buf: Vec<tt::TokenTree> = Vec::new(); | ||
169 | ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false }); | ||
170 | // Dirty hack to make macro-expansion terminate. | ||
171 | // This should be replaced by a proper macro-by-example implementation | ||
172 | let limit = 65536; | ||
173 | let mut has_seps = 0; | ||
174 | let mut counter = 0; | ||
175 | |||
176 | loop { | ||
177 | let ExpandResult(mut t, e) = expand_subtree(ctx, template, arena); | ||
178 | let nesting_state = ctx.nesting.last_mut().unwrap(); | ||
179 | if nesting_state.at_end || !nesting_state.hit { | ||
180 | break; | ||
181 | } | ||
182 | nesting_state.idx += 1; | ||
183 | nesting_state.hit = false; | ||
184 | |||
185 | counter += 1; | ||
186 | if counter == limit { | ||
187 | log::warn!( | ||
188 | "expand_tt excced in repeat pattern exceed limit => {:#?}\n{:#?}", | ||
189 | template, | ||
190 | ctx | ||
191 | ); | ||
192 | break; | ||
193 | } | ||
194 | |||
195 | if e.is_some() { | ||
196 | continue; | ||
197 | } | ||
198 | |||
199 | t.delimiter = None; | ||
200 | push_subtree(&mut buf, t); | ||
201 | |||
202 | if let Some(ref sep) = separator { | ||
203 | match sep { | ||
204 | Separator::Ident(ident) => { | ||
205 | has_seps = 1; | ||
206 | buf.push(tt::Leaf::from(ident.clone()).into()); | ||
207 | } | ||
208 | Separator::Literal(lit) => { | ||
209 | has_seps = 1; | ||
210 | buf.push(tt::Leaf::from(lit.clone()).into()); | ||
211 | } | ||
212 | |||
213 | Separator::Puncts(puncts) => { | ||
214 | has_seps = puncts.len(); | ||
215 | for punct in puncts { | ||
216 | buf.push(tt::Leaf::from(*punct).into()); | ||
217 | } | ||
218 | } | ||
219 | } | ||
220 | } | ||
221 | |||
222 | if RepeatKind::ZeroOrOne == kind { | ||
223 | break; | ||
224 | } | ||
225 | } | ||
226 | |||
227 | ctx.nesting.pop().unwrap(); | ||
228 | for _ in 0..has_seps { | ||
229 | buf.pop(); | ||
230 | } | ||
231 | |||
232 | // Check if it is a single token subtree without any delimiter | ||
233 | // e.g {Delimiter:None> ['>'] /Delimiter:None>} | ||
234 | let tt = tt::Subtree { delimiter: None, token_trees: buf }.into(); | ||
235 | |||
236 | if RepeatKind::OneOrMore == kind && counter == 0 { | ||
237 | return ExpandResult(Fragment::Tokens(tt), Some(ExpandError::UnexpectedToken)); | ||
238 | } | ||
239 | ExpandResult::ok(Fragment::Tokens(tt)) | ||
240 | } | ||
241 | |||
242 | fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) { | ||
243 | match fragment { | ||
244 | Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt), | ||
245 | Fragment::Tokens(tt) | Fragment::Ast(tt) => buf.push(tt), | ||
246 | } | ||
247 | } | ||
248 | |||
249 | fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) { | ||
250 | match tt.delimiter { | ||
251 | None => buf.extend(tt.token_trees), | ||
252 | _ => buf.push(tt.into()), | ||
253 | } | ||
254 | } | ||
diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs new file mode 100644 index 000000000..6b46a1673 --- /dev/null +++ b/crates/mbe/src/parser.rs | |||
@@ -0,0 +1,184 @@ | |||
1 | //! Parser recognizes special macro syntax, `$var` and `$(repeat)*`, in token | ||
2 | //! trees. | ||
3 | |||
4 | use smallvec::SmallVec; | ||
5 | use syntax::SmolStr; | ||
6 | |||
7 | use crate::{tt_iter::TtIter, ExpandError}; | ||
8 | |||
9 | #[derive(Debug)] | ||
10 | pub(crate) enum Op<'a> { | ||
11 | Var { name: &'a SmolStr, kind: Option<&'a SmolStr> }, | ||
12 | Repeat { subtree: &'a tt::Subtree, kind: RepeatKind, separator: Option<Separator> }, | ||
13 | TokenTree(&'a tt::TokenTree), | ||
14 | } | ||
15 | |||
16 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
17 | pub(crate) enum RepeatKind { | ||
18 | ZeroOrMore, | ||
19 | OneOrMore, | ||
20 | ZeroOrOne, | ||
21 | } | ||
22 | |||
23 | #[derive(Clone, Debug, Eq)] | ||
24 | pub(crate) enum Separator { | ||
25 | Literal(tt::Literal), | ||
26 | Ident(tt::Ident), | ||
27 | Puncts(SmallVec<[tt::Punct; 3]>), | ||
28 | } | ||
29 | |||
30 | // Note that when we compare a Separator, we just care about its textual value. | ||
31 | impl PartialEq for Separator { | ||
32 | fn eq(&self, other: &Separator) -> bool { | ||
33 | use Separator::*; | ||
34 | |||
35 | match (self, other) { | ||
36 | (Ident(ref a), Ident(ref b)) => a.text == b.text, | ||
37 | (Literal(ref a), Literal(ref b)) => a.text == b.text, | ||
38 | (Puncts(ref a), Puncts(ref b)) if a.len() == b.len() => { | ||
39 | let a_iter = a.iter().map(|a| a.char); | ||
40 | let b_iter = b.iter().map(|b| b.char); | ||
41 | a_iter.eq(b_iter) | ||
42 | } | ||
43 | _ => false, | ||
44 | } | ||
45 | } | ||
46 | } | ||
47 | |||
48 | pub(crate) fn parse_template( | ||
49 | template: &tt::Subtree, | ||
50 | ) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> { | ||
51 | parse_inner(template, Mode::Template) | ||
52 | } | ||
53 | |||
54 | pub(crate) fn parse_pattern( | ||
55 | pattern: &tt::Subtree, | ||
56 | ) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> { | ||
57 | parse_inner(pattern, Mode::Pattern) | ||
58 | } | ||
59 | |||
60 | #[derive(Clone, Copy)] | ||
61 | enum Mode { | ||
62 | Pattern, | ||
63 | Template, | ||
64 | } | ||
65 | |||
66 | fn parse_inner(src: &tt::Subtree, mode: Mode) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> { | ||
67 | let mut src = TtIter::new(src); | ||
68 | std::iter::from_fn(move || { | ||
69 | let first = src.next()?; | ||
70 | Some(next_op(first, &mut src, mode)) | ||
71 | }) | ||
72 | } | ||
73 | |||
74 | macro_rules! err { | ||
75 | ($($tt:tt)*) => { | ||
76 | ExpandError::UnexpectedToken | ||
77 | }; | ||
78 | } | ||
79 | |||
80 | macro_rules! bail { | ||
81 | ($($tt:tt)*) => { | ||
82 | return Err(err!($($tt)*)) | ||
83 | }; | ||
84 | } | ||
85 | |||
86 | fn next_op<'a>( | ||
87 | first: &'a tt::TokenTree, | ||
88 | src: &mut TtIter<'a>, | ||
89 | mode: Mode, | ||
90 | ) -> Result<Op<'a>, ExpandError> { | ||
91 | let res = match first { | ||
92 | tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '$', .. })) => { | ||
93 | // Note that the '$' itself is a valid token inside macro_rules. | ||
94 | let second = match src.next() { | ||
95 | None => return Ok(Op::TokenTree(first)), | ||
96 | Some(it) => it, | ||
97 | }; | ||
98 | match second { | ||
99 | tt::TokenTree::Subtree(subtree) => { | ||
100 | let (separator, kind) = parse_repeat(src)?; | ||
101 | Op::Repeat { subtree, separator, kind } | ||
102 | } | ||
103 | tt::TokenTree::Leaf(leaf) => match leaf { | ||
104 | tt::Leaf::Punct(..) => return Err(ExpandError::UnexpectedToken), | ||
105 | tt::Leaf::Ident(ident) => { | ||
106 | let name = &ident.text; | ||
107 | let kind = eat_fragment_kind(src, mode)?; | ||
108 | Op::Var { name, kind } | ||
109 | } | ||
110 | tt::Leaf::Literal(lit) => { | ||
111 | if is_boolean_literal(lit) { | ||
112 | let name = &lit.text; | ||
113 | let kind = eat_fragment_kind(src, mode)?; | ||
114 | Op::Var { name, kind } | ||
115 | } else { | ||
116 | bail!("bad var 2"); | ||
117 | } | ||
118 | } | ||
119 | }, | ||
120 | } | ||
121 | } | ||
122 | tt => Op::TokenTree(tt), | ||
123 | }; | ||
124 | Ok(res) | ||
125 | } | ||
126 | |||
127 | fn eat_fragment_kind<'a>( | ||
128 | src: &mut TtIter<'a>, | ||
129 | mode: Mode, | ||
130 | ) -> Result<Option<&'a SmolStr>, ExpandError> { | ||
131 | if let Mode::Pattern = mode { | ||
132 | src.expect_char(':').map_err(|()| err!("bad fragment specifier 1"))?; | ||
133 | let ident = src.expect_ident().map_err(|()| err!("bad fragment specifier 1"))?; | ||
134 | return Ok(Some(&ident.text)); | ||
135 | }; | ||
136 | Ok(None) | ||
137 | } | ||
138 | |||
139 | fn is_boolean_literal(lit: &tt::Literal) -> bool { | ||
140 | matches!(lit.text.as_str(), "true" | "false") | ||
141 | } | ||
142 | |||
143 | fn parse_repeat(src: &mut TtIter) -> Result<(Option<Separator>, RepeatKind), ExpandError> { | ||
144 | let mut separator = Separator::Puncts(SmallVec::new()); | ||
145 | for tt in src { | ||
146 | let tt = match tt { | ||
147 | tt::TokenTree::Leaf(leaf) => leaf, | ||
148 | tt::TokenTree::Subtree(_) => return Err(ExpandError::InvalidRepeat), | ||
149 | }; | ||
150 | let has_sep = match &separator { | ||
151 | Separator::Puncts(puncts) => !puncts.is_empty(), | ||
152 | _ => true, | ||
153 | }; | ||
154 | match tt { | ||
155 | tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => { | ||
156 | return Err(ExpandError::InvalidRepeat) | ||
157 | } | ||
158 | tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()), | ||
159 | tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()), | ||
160 | tt::Leaf::Punct(punct) => { | ||
161 | let repeat_kind = match punct.char { | ||
162 | '*' => RepeatKind::ZeroOrMore, | ||
163 | '+' => RepeatKind::OneOrMore, | ||
164 | '?' => RepeatKind::ZeroOrOne, | ||
165 | _ => { | ||
166 | match &mut separator { | ||
167 | Separator::Puncts(puncts) => { | ||
168 | if puncts.len() == 3 { | ||
169 | return Err(ExpandError::InvalidRepeat); | ||
170 | } | ||
171 | puncts.push(punct.clone()) | ||
172 | } | ||
173 | _ => return Err(ExpandError::InvalidRepeat), | ||
174 | } | ||
175 | continue; | ||
176 | } | ||
177 | }; | ||
178 | let separator = if has_sep { Some(separator) } else { None }; | ||
179 | return Ok((separator, repeat_kind)); | ||
180 | } | ||
181 | } | ||
182 | } | ||
183 | Err(ExpandError::InvalidRepeat) | ||
184 | } | ||
diff --git a/crates/mbe/src/subtree_source.rs b/crates/mbe/src/subtree_source.rs new file mode 100644 index 000000000..41461b315 --- /dev/null +++ b/crates/mbe/src/subtree_source.rs | |||
@@ -0,0 +1,197 @@ | |||
1 | //! FIXME: write short doc here | ||
2 | |||
3 | use parser::{Token, TokenSource}; | ||
4 | use std::cell::{Cell, Ref, RefCell}; | ||
5 | use syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T}; | ||
6 | use tt::buffer::{Cursor, TokenBuffer}; | ||
7 | |||
8 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
9 | struct TtToken { | ||
10 | pub kind: SyntaxKind, | ||
11 | pub is_joint_to_next: bool, | ||
12 | pub text: SmolStr, | ||
13 | } | ||
14 | |||
15 | pub(crate) struct SubtreeTokenSource<'a> { | ||
16 | cached_cursor: Cell<Cursor<'a>>, | ||
17 | cached: RefCell<Vec<Option<TtToken>>>, | ||
18 | curr: (Token, usize), | ||
19 | } | ||
20 | |||
21 | impl<'a> SubtreeTokenSource<'a> { | ||
22 | // Helper function used in test | ||
23 | #[cfg(test)] | ||
24 | pub fn text(&self) -> SmolStr { | ||
25 | match *self.get(self.curr.1) { | ||
26 | Some(ref tt) => tt.text.clone(), | ||
27 | _ => SmolStr::new(""), | ||
28 | } | ||
29 | } | ||
30 | } | ||
31 | |||
32 | impl<'a> SubtreeTokenSource<'a> { | ||
33 | pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> { | ||
34 | let cursor = buffer.begin(); | ||
35 | |||
36 | let mut res = SubtreeTokenSource { | ||
37 | curr: (Token { kind: EOF, is_jointed_to_next: false }, 0), | ||
38 | cached_cursor: Cell::new(cursor), | ||
39 | cached: RefCell::new(Vec::with_capacity(10)), | ||
40 | }; | ||
41 | res.curr = (res.mk_token(0), 0); | ||
42 | res | ||
43 | } | ||
44 | |||
45 | fn mk_token(&self, pos: usize) -> Token { | ||
46 | match *self.get(pos) { | ||
47 | Some(ref tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next }, | ||
48 | None => Token { kind: EOF, is_jointed_to_next: false }, | ||
49 | } | ||
50 | } | ||
51 | |||
52 | fn get(&self, pos: usize) -> Ref<Option<TtToken>> { | ||
53 | fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> { | ||
54 | let tkn = c.token_tree(); | ||
55 | |||
56 | if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tkn { | ||
57 | if punct.char == '\'' { | ||
58 | let next = c.bump(); | ||
59 | if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = next.token_tree() { | ||
60 | let res_cursor = next.bump(); | ||
61 | let text = SmolStr::new("'".to_string() + &ident.to_string()); | ||
62 | |||
63 | return Some((res_cursor, text)); | ||
64 | } else { | ||
65 | panic!("Next token must be ident : {:#?}", next.token_tree()); | ||
66 | } | ||
67 | } | ||
68 | } | ||
69 | |||
70 | None | ||
71 | } | ||
72 | |||
73 | if pos < self.cached.borrow().len() { | ||
74 | return Ref::map(self.cached.borrow(), |c| &c[pos]); | ||
75 | } | ||
76 | |||
77 | { | ||
78 | let mut cached = self.cached.borrow_mut(); | ||
79 | while pos >= cached.len() { | ||
80 | let cursor = self.cached_cursor.get(); | ||
81 | if cursor.eof() { | ||
82 | cached.push(None); | ||
83 | continue; | ||
84 | } | ||
85 | |||
86 | if let Some((curr, text)) = is_lifetime(cursor) { | ||
87 | cached.push(Some(TtToken { kind: LIFETIME, is_joint_to_next: false, text })); | ||
88 | self.cached_cursor.set(curr); | ||
89 | continue; | ||
90 | } | ||
91 | |||
92 | match cursor.token_tree() { | ||
93 | Some(tt::TokenTree::Leaf(leaf)) => { | ||
94 | cached.push(Some(convert_leaf(&leaf))); | ||
95 | self.cached_cursor.set(cursor.bump()); | ||
96 | } | ||
97 | Some(tt::TokenTree::Subtree(subtree)) => { | ||
98 | self.cached_cursor.set(cursor.subtree().unwrap()); | ||
99 | cached.push(Some(convert_delim(subtree.delimiter_kind(), false))); | ||
100 | } | ||
101 | None => { | ||
102 | if let Some(subtree) = cursor.end() { | ||
103 | cached.push(Some(convert_delim(subtree.delimiter_kind(), true))); | ||
104 | self.cached_cursor.set(cursor.bump()); | ||
105 | } | ||
106 | } | ||
107 | } | ||
108 | } | ||
109 | } | ||
110 | |||
111 | Ref::map(self.cached.borrow(), |c| &c[pos]) | ||
112 | } | ||
113 | } | ||
114 | |||
115 | impl<'a> TokenSource for SubtreeTokenSource<'a> { | ||
116 | fn current(&self) -> Token { | ||
117 | self.curr.0 | ||
118 | } | ||
119 | |||
120 | /// Lookahead n token | ||
121 | fn lookahead_nth(&self, n: usize) -> Token { | ||
122 | self.mk_token(self.curr.1 + n) | ||
123 | } | ||
124 | |||
125 | /// bump cursor to next token | ||
126 | fn bump(&mut self) { | ||
127 | if self.current().kind == EOF { | ||
128 | return; | ||
129 | } | ||
130 | |||
131 | self.curr = (self.mk_token(self.curr.1 + 1), self.curr.1 + 1); | ||
132 | } | ||
133 | |||
134 | /// Is the current token a specified keyword? | ||
135 | fn is_keyword(&self, kw: &str) -> bool { | ||
136 | match *self.get(self.curr.1) { | ||
137 | Some(ref t) => t.text == *kw, | ||
138 | _ => false, | ||
139 | } | ||
140 | } | ||
141 | } | ||
142 | |||
143 | fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken { | ||
144 | let (kinds, texts) = match d { | ||
145 | Some(tt::DelimiterKind::Parenthesis) => ([T!['('], T![')']], "()"), | ||
146 | Some(tt::DelimiterKind::Brace) => ([T!['{'], T!['}']], "{}"), | ||
147 | Some(tt::DelimiterKind::Bracket) => ([T!['['], T![']']], "[]"), | ||
148 | None => ([L_DOLLAR, R_DOLLAR], ""), | ||
149 | }; | ||
150 | |||
151 | let idx = closing as usize; | ||
152 | let kind = kinds[idx]; | ||
153 | let text = if !texts.is_empty() { &texts[idx..texts.len() - (1 - idx)] } else { "" }; | ||
154 | TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text) } | ||
155 | } | ||
156 | |||
157 | fn convert_literal(l: &tt::Literal) -> TtToken { | ||
158 | let kind = lex_single_syntax_kind(&l.text) | ||
159 | .map(|(kind, _error)| kind) | ||
160 | .filter(|kind| kind.is_literal()) | ||
161 | .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l)); | ||
162 | |||
163 | TtToken { kind, is_joint_to_next: false, text: l.text.clone() } | ||
164 | } | ||
165 | |||
166 | fn convert_ident(ident: &tt::Ident) -> TtToken { | ||
167 | let kind = match ident.text.as_ref() { | ||
168 | "true" => T![true], | ||
169 | "false" => T![false], | ||
170 | i if i.starts_with('\'') => LIFETIME, | ||
171 | _ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT), | ||
172 | }; | ||
173 | |||
174 | TtToken { kind, is_joint_to_next: false, text: ident.text.clone() } | ||
175 | } | ||
176 | |||
177 | fn convert_punct(p: tt::Punct) -> TtToken { | ||
178 | let kind = match SyntaxKind::from_char(p.char) { | ||
179 | None => panic!("{:#?} is not a valid punct", p), | ||
180 | Some(kind) => kind, | ||
181 | }; | ||
182 | |||
183 | let text = { | ||
184 | let mut buf = [0u8; 4]; | ||
185 | let s: &str = p.char.encode_utf8(&mut buf); | ||
186 | SmolStr::new(s) | ||
187 | }; | ||
188 | TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text } | ||
189 | } | ||
190 | |||
191 | fn convert_leaf(leaf: &tt::Leaf) -> TtToken { | ||
192 | match leaf { | ||
193 | tt::Leaf::Literal(l) => convert_literal(l), | ||
194 | tt::Leaf::Ident(ident) => convert_ident(ident), | ||
195 | tt::Leaf::Punct(punct) => convert_punct(*punct), | ||
196 | } | ||
197 | } | ||
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs new file mode 100644 index 000000000..a8ad917fb --- /dev/null +++ b/crates/mbe/src/syntax_bridge.rs | |||
@@ -0,0 +1,832 @@ | |||
1 | //! FIXME: write short doc here | ||
2 | |||
3 | use parser::{FragmentKind, ParseError, TreeSink}; | ||
4 | use rustc_hash::FxHashMap; | ||
5 | use syntax::{ | ||
6 | ast::{self, make::tokens::doc_comment}, | ||
7 | tokenize, AstToken, Parse, SmolStr, SyntaxKind, | ||
8 | SyntaxKind::*, | ||
9 | SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, T, | ||
10 | }; | ||
11 | use tt::buffer::{Cursor, TokenBuffer}; | ||
12 | |||
13 | use crate::subtree_source::SubtreeTokenSource; | ||
14 | use crate::ExpandError; | ||
15 | |||
16 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | ||
17 | pub enum TokenTextRange { | ||
18 | Token(TextRange), | ||
19 | Delimiter(TextRange, TextRange), | ||
20 | } | ||
21 | |||
22 | impl TokenTextRange { | ||
23 | pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> { | ||
24 | match self { | ||
25 | TokenTextRange::Token(it) => Some(it), | ||
26 | TokenTextRange::Delimiter(open, close) => match kind { | ||
27 | T!['{'] | T!['('] | T!['['] => Some(open), | ||
28 | T!['}'] | T![')'] | T![']'] => Some(close), | ||
29 | _ => None, | ||
30 | }, | ||
31 | } | ||
32 | } | ||
33 | } | ||
34 | |||
35 | /// Maps `tt::TokenId` to the relative range of the original token. | ||
36 | #[derive(Debug, PartialEq, Eq, Clone, Default)] | ||
37 | pub struct TokenMap { | ||
38 | /// Maps `tt::TokenId` to the *relative* source range. | ||
39 | entries: Vec<(tt::TokenId, TokenTextRange)>, | ||
40 | } | ||
41 | |||
42 | /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro | ||
43 | /// will consume). | ||
44 | pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> Option<(tt::Subtree, TokenMap)> { | ||
45 | syntax_node_to_token_tree(ast.syntax()) | ||
46 | } | ||
47 | |||
48 | /// Convert the syntax node to a `TokenTree` (what macro | ||
49 | /// will consume). | ||
50 | pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> { | ||
51 | let global_offset = node.text_range().start(); | ||
52 | let mut c = Convertor::new(node, global_offset); | ||
53 | let subtree = c.go()?; | ||
54 | Some((subtree, c.id_alloc.map)) | ||
55 | } | ||
56 | |||
57 | // The following items are what `rustc` macro can be parsed into : | ||
58 | // link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141 | ||
59 | // * Expr(P<ast::Expr>) -> token_tree_to_expr | ||
60 | // * Pat(P<ast::Pat>) -> token_tree_to_pat | ||
61 | // * Ty(P<ast::Ty>) -> token_tree_to_ty | ||
62 | // * Stmts(SmallVec<[ast::Stmt; 1]>) -> token_tree_to_stmts | ||
63 | // * Items(SmallVec<[P<ast::Item>; 1]>) -> token_tree_to_items | ||
64 | // | ||
65 | // * TraitItems(SmallVec<[ast::TraitItem; 1]>) | ||
66 | // * AssocItems(SmallVec<[ast::AssocItem; 1]>) | ||
67 | // * ForeignItems(SmallVec<[ast::ForeignItem; 1]> | ||
68 | |||
69 | pub fn token_tree_to_syntax_node( | ||
70 | tt: &tt::Subtree, | ||
71 | fragment_kind: FragmentKind, | ||
72 | ) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> { | ||
73 | let tmp; | ||
74 | let tokens = match tt { | ||
75 | tt::Subtree { delimiter: None, token_trees } => token_trees.as_slice(), | ||
76 | _ => { | ||
77 | tmp = [tt.clone().into()]; | ||
78 | &tmp[..] | ||
79 | } | ||
80 | }; | ||
81 | let buffer = TokenBuffer::new(&tokens); | ||
82 | let mut token_source = SubtreeTokenSource::new(&buffer); | ||
83 | let mut tree_sink = TtTreeSink::new(buffer.begin()); | ||
84 | parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind); | ||
85 | if tree_sink.roots.len() != 1 { | ||
86 | return Err(ExpandError::ConversionError); | ||
87 | } | ||
88 | //FIXME: would be cool to report errors | ||
89 | let (parse, range_map) = tree_sink.finish(); | ||
90 | Ok((parse, range_map)) | ||
91 | } | ||
92 | |||
93 | /// Convert a string to a `TokenTree` | ||
94 | pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { | ||
95 | let (tokens, errors) = tokenize(text); | ||
96 | if !errors.is_empty() { | ||
97 | return None; | ||
98 | } | ||
99 | |||
100 | let mut conv = RawConvertor { | ||
101 | text, | ||
102 | offset: TextSize::default(), | ||
103 | inner: tokens.iter(), | ||
104 | id_alloc: TokenIdAlloc { | ||
105 | map: Default::default(), | ||
106 | global_offset: TextSize::default(), | ||
107 | next_id: 0, | ||
108 | }, | ||
109 | }; | ||
110 | |||
111 | let subtree = conv.go()?; | ||
112 | Some((subtree, conv.id_alloc.map)) | ||
113 | } | ||
114 | |||
115 | impl TokenMap { | ||
116 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { | ||
117 | let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { | ||
118 | TokenTextRange::Token(it) => *it == relative_range, | ||
119 | TokenTextRange::Delimiter(open, close) => { | ||
120 | *open == relative_range || *close == relative_range | ||
121 | } | ||
122 | })?; | ||
123 | Some(token_id) | ||
124 | } | ||
125 | |||
126 | pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TokenTextRange> { | ||
127 | let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; | ||
128 | Some(range) | ||
129 | } | ||
130 | |||
131 | fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { | ||
132 | self.entries.push((token_id, TokenTextRange::Token(relative_range))); | ||
133 | } | ||
134 | |||
135 | fn insert_delim( | ||
136 | &mut self, | ||
137 | token_id: tt::TokenId, | ||
138 | open_relative_range: TextRange, | ||
139 | close_relative_range: TextRange, | ||
140 | ) -> usize { | ||
141 | let res = self.entries.len(); | ||
142 | self.entries | ||
143 | .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range))); | ||
144 | res | ||
145 | } | ||
146 | |||
147 | fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { | ||
148 | let (_, token_text_range) = &mut self.entries[idx]; | ||
149 | if let TokenTextRange::Delimiter(dim, _) = token_text_range { | ||
150 | *token_text_range = TokenTextRange::Delimiter(*dim, close_relative_range); | ||
151 | } | ||
152 | } | ||
153 | |||
154 | fn remove_delim(&mut self, idx: usize) { | ||
155 | // FIXME: This could be accidently quadratic | ||
156 | self.entries.remove(idx); | ||
157 | } | ||
158 | } | ||
159 | |||
160 | /// Returns the textual content of a doc comment block as a quoted string | ||
161 | /// That is, strips leading `///` (or `/**`, etc) | ||
162 | /// and strips the ending `*/` | ||
163 | /// And then quote the string, which is needed to convert to `tt::Literal` | ||
164 | fn doc_comment_text(comment: &ast::Comment) -> SmolStr { | ||
165 | let prefix_len = comment.prefix().len(); | ||
166 | let mut text = &comment.text()[prefix_len..]; | ||
167 | |||
168 | // Remove ending "*/" | ||
169 | if comment.kind().shape == ast::CommentShape::Block { | ||
170 | text = &text[0..text.len() - 2]; | ||
171 | } | ||
172 | |||
173 | // Quote the string | ||
174 | // Note that `tt::Literal` expect an escaped string | ||
175 | let text = format!("{:?}", text.escape_default().to_string()); | ||
176 | text.into() | ||
177 | } | ||
178 | |||
179 | fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> { | ||
180 | let comment = ast::Comment::cast(token.clone())?; | ||
181 | let doc = comment.kind().doc?; | ||
182 | |||
183 | // Make `doc="\" Comments\"" | ||
184 | let mut meta_tkns = Vec::new(); | ||
185 | meta_tkns.push(mk_ident("doc")); | ||
186 | meta_tkns.push(mk_punct('=')); | ||
187 | meta_tkns.push(mk_doc_literal(&comment)); | ||
188 | |||
189 | // Make `#![]` | ||
190 | let mut token_trees = Vec::new(); | ||
191 | token_trees.push(mk_punct('#')); | ||
192 | if let ast::CommentPlacement::Inner = doc { | ||
193 | token_trees.push(mk_punct('!')); | ||
194 | } | ||
195 | token_trees.push(tt::TokenTree::from(tt::Subtree { | ||
196 | delimiter: Some(tt::Delimiter { | ||
197 | kind: tt::DelimiterKind::Bracket, | ||
198 | id: tt::TokenId::unspecified(), | ||
199 | }), | ||
200 | token_trees: meta_tkns, | ||
201 | })); | ||
202 | |||
203 | return Some(token_trees); | ||
204 | |||
205 | // Helper functions | ||
206 | fn mk_ident(s: &str) -> tt::TokenTree { | ||
207 | tt::TokenTree::from(tt::Leaf::from(tt::Ident { | ||
208 | text: s.into(), | ||
209 | id: tt::TokenId::unspecified(), | ||
210 | })) | ||
211 | } | ||
212 | |||
213 | fn mk_punct(c: char) -> tt::TokenTree { | ||
214 | tt::TokenTree::from(tt::Leaf::from(tt::Punct { | ||
215 | char: c, | ||
216 | spacing: tt::Spacing::Alone, | ||
217 | id: tt::TokenId::unspecified(), | ||
218 | })) | ||
219 | } | ||
220 | |||
221 | fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree { | ||
222 | let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() }; | ||
223 | |||
224 | tt::TokenTree::from(tt::Leaf::from(lit)) | ||
225 | } | ||
226 | } | ||
227 | |||
228 | struct TokenIdAlloc { | ||
229 | map: TokenMap, | ||
230 | global_offset: TextSize, | ||
231 | next_id: u32, | ||
232 | } | ||
233 | |||
234 | impl TokenIdAlloc { | ||
235 | fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId { | ||
236 | let relative_range = absolute_range - self.global_offset; | ||
237 | let token_id = tt::TokenId(self.next_id); | ||
238 | self.next_id += 1; | ||
239 | self.map.insert(token_id, relative_range); | ||
240 | token_id | ||
241 | } | ||
242 | |||
243 | fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) { | ||
244 | let token_id = tt::TokenId(self.next_id); | ||
245 | self.next_id += 1; | ||
246 | let idx = self.map.insert_delim( | ||
247 | token_id, | ||
248 | open_abs_range - self.global_offset, | ||
249 | open_abs_range - self.global_offset, | ||
250 | ); | ||
251 | (token_id, idx) | ||
252 | } | ||
253 | |||
254 | fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) { | ||
255 | match close_abs_range { | ||
256 | None => { | ||
257 | self.map.remove_delim(idx); | ||
258 | } | ||
259 | Some(close) => { | ||
260 | self.map.update_close_delim(idx, close - self.global_offset); | ||
261 | } | ||
262 | } | ||
263 | } | ||
264 | } | ||
265 | |||
266 | /// A Raw Token (straightly from lexer) convertor | ||
267 | struct RawConvertor<'a> { | ||
268 | text: &'a str, | ||
269 | offset: TextSize, | ||
270 | id_alloc: TokenIdAlloc, | ||
271 | inner: std::slice::Iter<'a, RawToken>, | ||
272 | } | ||
273 | |||
274 | trait SrcToken: std::fmt::Debug { | ||
275 | fn kind(&self) -> SyntaxKind; | ||
276 | |||
277 | fn to_char(&self) -> Option<char>; | ||
278 | |||
279 | fn to_text(&self) -> SmolStr; | ||
280 | } | ||
281 | |||
282 | trait TokenConvertor { | ||
283 | type Token: SrcToken; | ||
284 | |||
285 | fn go(&mut self) -> Option<tt::Subtree> { | ||
286 | let mut subtree = tt::Subtree::default(); | ||
287 | subtree.delimiter = None; | ||
288 | while self.peek().is_some() { | ||
289 | self.collect_leaf(&mut subtree.token_trees); | ||
290 | } | ||
291 | if subtree.token_trees.is_empty() { | ||
292 | return None; | ||
293 | } | ||
294 | if subtree.token_trees.len() == 1 { | ||
295 | if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] { | ||
296 | return Some(first.clone()); | ||
297 | } | ||
298 | } | ||
299 | Some(subtree) | ||
300 | } | ||
301 | |||
302 | fn collect_leaf(&mut self, result: &mut Vec<tt::TokenTree>) { | ||
303 | let (token, range) = match self.bump() { | ||
304 | None => return, | ||
305 | Some(it) => it, | ||
306 | }; | ||
307 | |||
308 | let k: SyntaxKind = token.kind(); | ||
309 | if k == COMMENT { | ||
310 | if let Some(tokens) = self.convert_doc_comment(&token) { | ||
311 | result.extend(tokens); | ||
312 | } | ||
313 | return; | ||
314 | } | ||
315 | |||
316 | result.push(if k.is_punct() { | ||
317 | assert_eq!(range.len(), TextSize::of('.')); | ||
318 | let delim = match k { | ||
319 | T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])), | ||
320 | T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])), | ||
321 | T!['['] => Some((tt::DelimiterKind::Bracket, T![']'])), | ||
322 | _ => None, | ||
323 | }; | ||
324 | |||
325 | if let Some((kind, closed)) = delim { | ||
326 | let mut subtree = tt::Subtree::default(); | ||
327 | let (id, idx) = self.id_alloc().open_delim(range); | ||
328 | subtree.delimiter = Some(tt::Delimiter { kind, id }); | ||
329 | |||
330 | while self.peek().map(|it| it.kind() != closed).unwrap_or(false) { | ||
331 | self.collect_leaf(&mut subtree.token_trees); | ||
332 | } | ||
333 | let last_range = match self.bump() { | ||
334 | None => { | ||
335 | // For error resilience, we insert an char punct for the opening delim here | ||
336 | self.id_alloc().close_delim(idx, None); | ||
337 | let leaf: tt::Leaf = tt::Punct { | ||
338 | id: self.id_alloc().alloc(range), | ||
339 | char: token.to_char().unwrap(), | ||
340 | spacing: tt::Spacing::Alone, | ||
341 | } | ||
342 | .into(); | ||
343 | result.push(leaf.into()); | ||
344 | result.extend(subtree.token_trees); | ||
345 | return; | ||
346 | } | ||
347 | Some(it) => it.1, | ||
348 | }; | ||
349 | self.id_alloc().close_delim(idx, Some(last_range)); | ||
350 | subtree.into() | ||
351 | } else { | ||
352 | let spacing = match self.peek() { | ||
353 | Some(next) | ||
354 | if next.kind().is_trivia() | ||
355 | || next.kind() == T!['['] | ||
356 | || next.kind() == T!['{'] | ||
357 | || next.kind() == T!['('] => | ||
358 | { | ||
359 | tt::Spacing::Alone | ||
360 | } | ||
361 | Some(next) if next.kind().is_punct() => tt::Spacing::Joint, | ||
362 | _ => tt::Spacing::Alone, | ||
363 | }; | ||
364 | let char = match token.to_char() { | ||
365 | Some(c) => c, | ||
366 | None => { | ||
367 | panic!("Token from lexer must be single char: token = {:#?}", token); | ||
368 | } | ||
369 | }; | ||
370 | tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into() | ||
371 | } | ||
372 | } else { | ||
373 | macro_rules! make_leaf { | ||
374 | ($i:ident) => { | ||
375 | tt::$i { id: self.id_alloc().alloc(range), text: token.to_text() }.into() | ||
376 | }; | ||
377 | } | ||
378 | let leaf: tt::Leaf = match k { | ||
379 | T![true] | T![false] => make_leaf!(Ident), | ||
380 | IDENT => make_leaf!(Ident), | ||
381 | k if k.is_keyword() => make_leaf!(Ident), | ||
382 | k if k.is_literal() => make_leaf!(Literal), | ||
383 | LIFETIME => { | ||
384 | let char_unit = TextSize::of('\''); | ||
385 | let r = TextRange::at(range.start(), char_unit); | ||
386 | let apostrophe = tt::Leaf::from(tt::Punct { | ||
387 | char: '\'', | ||
388 | spacing: tt::Spacing::Joint, | ||
389 | id: self.id_alloc().alloc(r), | ||
390 | }); | ||
391 | result.push(apostrophe.into()); | ||
392 | |||
393 | let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); | ||
394 | let ident = tt::Leaf::from(tt::Ident { | ||
395 | text: SmolStr::new(&token.to_text()[1..]), | ||
396 | id: self.id_alloc().alloc(r), | ||
397 | }); | ||
398 | result.push(ident.into()); | ||
399 | return; | ||
400 | } | ||
401 | _ => return, | ||
402 | }; | ||
403 | |||
404 | leaf.into() | ||
405 | }); | ||
406 | } | ||
407 | |||
408 | fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>; | ||
409 | |||
410 | fn bump(&mut self) -> Option<(Self::Token, TextRange)>; | ||
411 | |||
412 | fn peek(&self) -> Option<Self::Token>; | ||
413 | |||
414 | fn id_alloc(&mut self) -> &mut TokenIdAlloc; | ||
415 | } | ||
416 | |||
417 | impl<'a> SrcToken for (RawToken, &'a str) { | ||
418 | fn kind(&self) -> SyntaxKind { | ||
419 | self.0.kind | ||
420 | } | ||
421 | |||
422 | fn to_char(&self) -> Option<char> { | ||
423 | self.1.chars().next() | ||
424 | } | ||
425 | |||
426 | fn to_text(&self) -> SmolStr { | ||
427 | self.1.into() | ||
428 | } | ||
429 | } | ||
430 | |||
431 | impl RawConvertor<'_> {} | ||
432 | |||
433 | impl<'a> TokenConvertor for RawConvertor<'a> { | ||
434 | type Token = (RawToken, &'a str); | ||
435 | |||
436 | fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> { | ||
437 | convert_doc_comment(&doc_comment(token.1)) | ||
438 | } | ||
439 | |||
440 | fn bump(&mut self) -> Option<(Self::Token, TextRange)> { | ||
441 | let token = self.inner.next()?; | ||
442 | let range = TextRange::at(self.offset, token.len); | ||
443 | self.offset += token.len; | ||
444 | |||
445 | Some(((*token, &self.text[range]), range)) | ||
446 | } | ||
447 | |||
448 | fn peek(&self) -> Option<Self::Token> { | ||
449 | let token = self.inner.as_slice().get(0).cloned(); | ||
450 | |||
451 | token.map(|it| { | ||
452 | let range = TextRange::at(self.offset, it.len); | ||
453 | (it, &self.text[range]) | ||
454 | }) | ||
455 | } | ||
456 | |||
457 | fn id_alloc(&mut self) -> &mut TokenIdAlloc { | ||
458 | &mut self.id_alloc | ||
459 | } | ||
460 | } | ||
461 | |||
462 | struct Convertor { | ||
463 | id_alloc: TokenIdAlloc, | ||
464 | current: Option<SyntaxToken>, | ||
465 | range: TextRange, | ||
466 | punct_offset: Option<(SyntaxToken, TextSize)>, | ||
467 | } | ||
468 | |||
469 | impl Convertor { | ||
470 | fn new(node: &SyntaxNode, global_offset: TextSize) -> Convertor { | ||
471 | Convertor { | ||
472 | id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } }, | ||
473 | current: node.first_token(), | ||
474 | range: node.text_range(), | ||
475 | punct_offset: None, | ||
476 | } | ||
477 | } | ||
478 | } | ||
479 | |||
480 | #[derive(Debug)] | ||
481 | enum SynToken { | ||
482 | Ordiniary(SyntaxToken), | ||
483 | Punch(SyntaxToken, TextSize), | ||
484 | } | ||
485 | |||
486 | impl SynToken { | ||
487 | fn token(&self) -> &SyntaxToken { | ||
488 | match self { | ||
489 | SynToken::Ordiniary(it) => it, | ||
490 | SynToken::Punch(it, _) => it, | ||
491 | } | ||
492 | } | ||
493 | } | ||
494 | |||
495 | impl SrcToken for SynToken { | ||
496 | fn kind(&self) -> SyntaxKind { | ||
497 | self.token().kind() | ||
498 | } | ||
499 | fn to_char(&self) -> Option<char> { | ||
500 | match self { | ||
501 | SynToken::Ordiniary(_) => None, | ||
502 | SynToken::Punch(it, i) => it.text().chars().nth((*i).into()), | ||
503 | } | ||
504 | } | ||
505 | fn to_text(&self) -> SmolStr { | ||
506 | self.token().text().clone() | ||
507 | } | ||
508 | } | ||
509 | |||
510 | impl TokenConvertor for Convertor { | ||
511 | type Token = SynToken; | ||
512 | fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> { | ||
513 | convert_doc_comment(token.token()) | ||
514 | } | ||
515 | |||
516 | fn bump(&mut self) -> Option<(Self::Token, TextRange)> { | ||
517 | if let Some((punct, offset)) = self.punct_offset.clone() { | ||
518 | if usize::from(offset) + 1 < punct.text().len() { | ||
519 | let offset = offset + TextSize::of('.'); | ||
520 | let range = punct.text_range(); | ||
521 | self.punct_offset = Some((punct.clone(), offset)); | ||
522 | let range = TextRange::at(range.start() + offset, TextSize::of('.')); | ||
523 | return Some((SynToken::Punch(punct, offset), range)); | ||
524 | } | ||
525 | } | ||
526 | |||
527 | let curr = self.current.clone()?; | ||
528 | if !&self.range.contains_range(curr.text_range()) { | ||
529 | return None; | ||
530 | } | ||
531 | self.current = curr.next_token(); | ||
532 | |||
533 | let token = if curr.kind().is_punct() { | ||
534 | let range = curr.text_range(); | ||
535 | let range = TextRange::at(range.start(), TextSize::of('.')); | ||
536 | self.punct_offset = Some((curr.clone(), 0.into())); | ||
537 | (SynToken::Punch(curr, 0.into()), range) | ||
538 | } else { | ||
539 | self.punct_offset = None; | ||
540 | let range = curr.text_range(); | ||
541 | (SynToken::Ordiniary(curr), range) | ||
542 | }; | ||
543 | |||
544 | Some(token) | ||
545 | } | ||
546 | |||
547 | fn peek(&self) -> Option<Self::Token> { | ||
548 | if let Some((punct, mut offset)) = self.punct_offset.clone() { | ||
549 | offset = offset + TextSize::of('.'); | ||
550 | if usize::from(offset) < punct.text().len() { | ||
551 | return Some(SynToken::Punch(punct, offset)); | ||
552 | } | ||
553 | } | ||
554 | |||
555 | let curr = self.current.clone()?; | ||
556 | if !self.range.contains_range(curr.text_range()) { | ||
557 | return None; | ||
558 | } | ||
559 | |||
560 | let token = if curr.kind().is_punct() { | ||
561 | SynToken::Punch(curr, 0.into()) | ||
562 | } else { | ||
563 | SynToken::Ordiniary(curr) | ||
564 | }; | ||
565 | Some(token) | ||
566 | } | ||
567 | |||
568 | fn id_alloc(&mut self) -> &mut TokenIdAlloc { | ||
569 | &mut self.id_alloc | ||
570 | } | ||
571 | } | ||
572 | |||
573 | struct TtTreeSink<'a> { | ||
574 | buf: String, | ||
575 | cursor: Cursor<'a>, | ||
576 | open_delims: FxHashMap<tt::TokenId, TextSize>, | ||
577 | text_pos: TextSize, | ||
578 | inner: SyntaxTreeBuilder, | ||
579 | token_map: TokenMap, | ||
580 | |||
581 | // Number of roots | ||
582 | // Use for detect ill-form tree which is not single root | ||
583 | roots: smallvec::SmallVec<[usize; 1]>, | ||
584 | } | ||
585 | |||
586 | impl<'a> TtTreeSink<'a> { | ||
587 | fn new(cursor: Cursor<'a>) -> Self { | ||
588 | TtTreeSink { | ||
589 | buf: String::new(), | ||
590 | cursor, | ||
591 | open_delims: FxHashMap::default(), | ||
592 | text_pos: 0.into(), | ||
593 | inner: SyntaxTreeBuilder::default(), | ||
594 | roots: smallvec::SmallVec::new(), | ||
595 | token_map: TokenMap::default(), | ||
596 | } | ||
597 | } | ||
598 | |||
599 | fn finish(self) -> (Parse<SyntaxNode>, TokenMap) { | ||
600 | (self.inner.finish(), self.token_map) | ||
601 | } | ||
602 | } | ||
603 | |||
604 | fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr { | ||
605 | let texts = match d { | ||
606 | Some(tt::DelimiterKind::Parenthesis) => "()", | ||
607 | Some(tt::DelimiterKind::Brace) => "{}", | ||
608 | Some(tt::DelimiterKind::Bracket) => "[]", | ||
609 | None => return "".into(), | ||
610 | }; | ||
611 | |||
612 | let idx = closing as usize; | ||
613 | let text = &texts[idx..texts.len() - (1 - idx)]; | ||
614 | text.into() | ||
615 | } | ||
616 | |||
617 | impl<'a> TreeSink for TtTreeSink<'a> { | ||
618 | fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { | ||
619 | if kind == L_DOLLAR || kind == R_DOLLAR { | ||
620 | self.cursor = self.cursor.bump_subtree(); | ||
621 | return; | ||
622 | } | ||
623 | if kind == LIFETIME { | ||
624 | n_tokens = 2; | ||
625 | } | ||
626 | |||
627 | let mut last = self.cursor; | ||
628 | for _ in 0..n_tokens { | ||
629 | if self.cursor.eof() { | ||
630 | break; | ||
631 | } | ||
632 | last = self.cursor; | ||
633 | let text: SmolStr = match self.cursor.token_tree() { | ||
634 | Some(tt::TokenTree::Leaf(leaf)) => { | ||
635 | // Mark the range if needed | ||
636 | let (text, id) = match leaf { | ||
637 | tt::Leaf::Ident(ident) => (ident.text.clone(), ident.id), | ||
638 | tt::Leaf::Punct(punct) => { | ||
639 | (SmolStr::new_inline_from_ascii(1, &[punct.char as u8]), punct.id) | ||
640 | } | ||
641 | tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id), | ||
642 | }; | ||
643 | let range = TextRange::at(self.text_pos, TextSize::of(text.as_str())); | ||
644 | self.token_map.insert(id, range); | ||
645 | self.cursor = self.cursor.bump(); | ||
646 | text | ||
647 | } | ||
648 | Some(tt::TokenTree::Subtree(subtree)) => { | ||
649 | self.cursor = self.cursor.subtree().unwrap(); | ||
650 | if let Some(id) = subtree.delimiter.map(|it| it.id) { | ||
651 | self.open_delims.insert(id, self.text_pos); | ||
652 | } | ||
653 | delim_to_str(subtree.delimiter_kind(), false) | ||
654 | } | ||
655 | None => { | ||
656 | if let Some(parent) = self.cursor.end() { | ||
657 | self.cursor = self.cursor.bump(); | ||
658 | if let Some(id) = parent.delimiter.map(|it| it.id) { | ||
659 | if let Some(open_delim) = self.open_delims.get(&id) { | ||
660 | let open_range = TextRange::at(*open_delim, TextSize::of('(')); | ||
661 | let close_range = TextRange::at(self.text_pos, TextSize::of('(')); | ||
662 | self.token_map.insert_delim(id, open_range, close_range); | ||
663 | } | ||
664 | } | ||
665 | delim_to_str(parent.delimiter_kind(), true) | ||
666 | } else { | ||
667 | continue; | ||
668 | } | ||
669 | } | ||
670 | }; | ||
671 | self.buf += &text; | ||
672 | self.text_pos += TextSize::of(text.as_str()); | ||
673 | } | ||
674 | |||
675 | let text = SmolStr::new(self.buf.as_str()); | ||
676 | self.buf.clear(); | ||
677 | self.inner.token(kind, text); | ||
678 | |||
679 | // Add whitespace between adjoint puncts | ||
680 | let next = last.bump(); | ||
681 | if let ( | ||
682 | Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))), | ||
683 | Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))), | ||
684 | ) = (last.token_tree(), next.token_tree()) | ||
685 | { | ||
686 | // Note: We always assume the semi-colon would be the last token in | ||
687 | // other parts of RA such that we don't add whitespace here. | ||
688 | if curr.spacing == tt::Spacing::Alone && curr.char != ';' { | ||
689 | self.inner.token(WHITESPACE, " ".into()); | ||
690 | self.text_pos += TextSize::of(' '); | ||
691 | } | ||
692 | } | ||
693 | } | ||
694 | |||
695 | fn start_node(&mut self, kind: SyntaxKind) { | ||
696 | self.inner.start_node(kind); | ||
697 | |||
698 | match self.roots.last_mut() { | ||
699 | None | Some(0) => self.roots.push(1), | ||
700 | Some(ref mut n) => **n += 1, | ||
701 | }; | ||
702 | } | ||
703 | |||
704 | fn finish_node(&mut self) { | ||
705 | self.inner.finish_node(); | ||
706 | *self.roots.last_mut().unwrap() -= 1; | ||
707 | } | ||
708 | |||
709 | fn error(&mut self, error: ParseError) { | ||
710 | self.inner.error(error, self.text_pos) | ||
711 | } | ||
712 | } | ||
713 | |||
714 | #[cfg(test)] | ||
715 | mod tests { | ||
716 | use super::*; | ||
717 | use crate::tests::parse_macro; | ||
718 | use parser::TokenSource; | ||
719 | use syntax::{ | ||
720 | algo::{insert_children, InsertPosition}, | ||
721 | ast::AstNode, | ||
722 | }; | ||
723 | |||
724 | #[test] | ||
725 | fn convert_tt_token_source() { | ||
726 | let expansion = parse_macro( | ||
727 | r#" | ||
728 | macro_rules! literals { | ||
729 | ($i:ident) => { | ||
730 | { | ||
731 | let a = 'c'; | ||
732 | let c = 1000; | ||
733 | let f = 12E+99_f64; | ||
734 | let s = "rust1"; | ||
735 | } | ||
736 | } | ||
737 | } | ||
738 | "#, | ||
739 | ) | ||
740 | .expand_tt("literals!(foo);"); | ||
741 | let tts = &[expansion.into()]; | ||
742 | let buffer = tt::buffer::TokenBuffer::new(tts); | ||
743 | let mut tt_src = SubtreeTokenSource::new(&buffer); | ||
744 | let mut tokens = vec![]; | ||
745 | while tt_src.current().kind != EOF { | ||
746 | tokens.push((tt_src.current().kind, tt_src.text())); | ||
747 | tt_src.bump(); | ||
748 | } | ||
749 | |||
750 | // [${] | ||
751 | // [let] [a] [=] ['c'] [;] | ||
752 | assert_eq!(tokens[2 + 3].1, "'c'"); | ||
753 | assert_eq!(tokens[2 + 3].0, CHAR); | ||
754 | // [let] [c] [=] [1000] [;] | ||
755 | assert_eq!(tokens[2 + 5 + 3].1, "1000"); | ||
756 | assert_eq!(tokens[2 + 5 + 3].0, INT_NUMBER); | ||
757 | // [let] [f] [=] [12E+99_f64] [;] | ||
758 | assert_eq!(tokens[2 + 10 + 3].1, "12E+99_f64"); | ||
759 | assert_eq!(tokens[2 + 10 + 3].0, FLOAT_NUMBER); | ||
760 | |||
761 | // [let] [s] [=] ["rust1"] [;] | ||
762 | assert_eq!(tokens[2 + 15 + 3].1, "\"rust1\""); | ||
763 | assert_eq!(tokens[2 + 15 + 3].0, STRING); | ||
764 | } | ||
765 | |||
766 | #[test] | ||
767 | fn stmts_token_trees_to_expr_is_err() { | ||
768 | let expansion = parse_macro( | ||
769 | r#" | ||
770 | macro_rules! stmts { | ||
771 | () => { | ||
772 | let a = 0; | ||
773 | let b = 0; | ||
774 | let c = 0; | ||
775 | let d = 0; | ||
776 | } | ||
777 | } | ||
778 | "#, | ||
779 | ) | ||
780 | .expand_tt("stmts!();"); | ||
781 | assert!(token_tree_to_syntax_node(&expansion, FragmentKind::Expr).is_err()); | ||
782 | } | ||
783 | |||
784 | #[test] | ||
785 | fn test_token_tree_last_child_is_white_space() { | ||
786 | let source_file = ast::SourceFile::parse("f!({} );").ok().unwrap(); | ||
787 | let macro_call = source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | ||
788 | let token_tree = macro_call.token_tree().unwrap(); | ||
789 | |||
790 | // Token Tree now is : | ||
791 | // TokenTree | ||
792 | // - T!['('] | ||
793 | // - TokenTree | ||
794 | // - T!['{'] | ||
795 | // - T!['}'] | ||
796 | // - WHITE_SPACE | ||
797 | // - T![')'] | ||
798 | |||
799 | let rbrace = | ||
800 | token_tree.syntax().descendants_with_tokens().find(|it| it.kind() == T!['}']).unwrap(); | ||
801 | let space = token_tree | ||
802 | .syntax() | ||
803 | .descendants_with_tokens() | ||
804 | .find(|it| it.kind() == SyntaxKind::WHITESPACE) | ||
805 | .unwrap(); | ||
806 | |||
807 | // reorder th white space, such that the white is inside the inner token-tree. | ||
808 | let token_tree = insert_children( | ||
809 | &rbrace.parent().unwrap(), | ||
810 | InsertPosition::Last, | ||
811 | std::iter::once(space), | ||
812 | ); | ||
813 | |||
814 | // Token Tree now is : | ||
815 | // TokenTree | ||
816 | // - T!['{'] | ||
817 | // - T!['}'] | ||
818 | // - WHITE_SPACE | ||
819 | let token_tree = ast::TokenTree::cast(token_tree).unwrap(); | ||
820 | let tt = ast_to_token_tree(&token_tree).unwrap().0; | ||
821 | |||
822 | assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace)); | ||
823 | } | ||
824 | |||
825 | #[test] | ||
826 | fn test_token_tree_multi_char_punct() { | ||
827 | let source_file = ast::SourceFile::parse("struct Foo { a: x::Y }").ok().unwrap(); | ||
828 | let struct_def = source_file.syntax().descendants().find_map(ast::Struct::cast).unwrap(); | ||
829 | let tt = ast_to_token_tree(&struct_def).unwrap().0; | ||
830 | token_tree_to_syntax_node(&tt, FragmentKind::Item).unwrap(); | ||
831 | } | ||
832 | } | ||
diff --git a/crates/mbe/src/tests.rs b/crates/mbe/src/tests.rs new file mode 100644 index 000000000..0796ceee1 --- /dev/null +++ b/crates/mbe/src/tests.rs | |||
@@ -0,0 +1,1898 @@ | |||
1 | use std::fmt::Write; | ||
2 | |||
3 | use ::parser::FragmentKind; | ||
4 | use syntax::{ast, AstNode, NodeOrToken, SyntaxKind::IDENT, SyntaxNode, WalkEvent, T}; | ||
5 | use test_utils::assert_eq_text; | ||
6 | |||
7 | use super::*; | ||
8 | |||
9 | mod rule_parsing { | ||
10 | use syntax::{ast, AstNode}; | ||
11 | |||
12 | use crate::ast_to_token_tree; | ||
13 | |||
14 | use super::*; | ||
15 | |||
16 | #[test] | ||
17 | fn test_valid_arms() { | ||
18 | fn check(macro_body: &str) { | ||
19 | let m = parse_macro_arm(macro_body); | ||
20 | m.unwrap(); | ||
21 | } | ||
22 | |||
23 | check("($i:ident) => ()"); | ||
24 | check("($($i:ident)*) => ($_)"); | ||
25 | check("($($true:ident)*) => ($true)"); | ||
26 | check("($($false:ident)*) => ($false)"); | ||
27 | check("($) => ($)"); | ||
28 | } | ||
29 | |||
30 | #[test] | ||
31 | fn test_invalid_arms() { | ||
32 | fn check(macro_body: &str, err: &str) { | ||
33 | let m = parse_macro_arm(macro_body); | ||
34 | assert_eq!(m, Err(ParseError::Expected(String::from(err)))); | ||
35 | } | ||
36 | |||
37 | check("invalid", "expected subtree"); | ||
38 | |||
39 | check("$i:ident => ()", "expected subtree"); | ||
40 | check("($i:ident) ()", "expected `=`"); | ||
41 | check("($($i:ident)_) => ()", "invalid repeat"); | ||
42 | |||
43 | check("($i) => ($i)", "invalid macro definition"); | ||
44 | check("($i:) => ($i)", "invalid macro definition"); | ||
45 | } | ||
46 | |||
47 | fn parse_macro_arm(arm_definition: &str) -> Result<crate::MacroRules, ParseError> { | ||
48 | let macro_definition = format!(" macro_rules! m {{ {} }} ", arm_definition); | ||
49 | let source_file = ast::SourceFile::parse(¯o_definition).ok().unwrap(); | ||
50 | let macro_definition = | ||
51 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | ||
52 | |||
53 | let (definition_tt, _) = | ||
54 | ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap(); | ||
55 | crate::MacroRules::parse(&definition_tt) | ||
56 | } | ||
57 | } | ||
58 | |||
59 | // Good first issue (although a slightly challenging one): | ||
60 | // | ||
61 | // * Pick a random test from here | ||
62 | // https://github.com/intellij-rust/intellij-rust/blob/c4e9feee4ad46e7953b1948c112533360b6087bb/src/test/kotlin/org/rust/lang/core/macros/RsMacroExpansionTest.kt | ||
63 | // * Port the test to rust and add it to this module | ||
64 | // * Make it pass :-) | ||
65 | |||
66 | #[test] | ||
67 | fn test_token_id_shift() { | ||
68 | let expansion = parse_macro( | ||
69 | r#" | ||
70 | macro_rules! foobar { | ||
71 | ($e:ident) => { foo bar $e } | ||
72 | } | ||
73 | "#, | ||
74 | ) | ||
75 | .expand_tt("foobar!(baz);"); | ||
76 | |||
77 | fn get_id(t: &tt::TokenTree) -> Option<u32> { | ||
78 | if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = t { | ||
79 | return Some(ident.id.0); | ||
80 | } | ||
81 | None | ||
82 | } | ||
83 | |||
84 | assert_eq!(expansion.token_trees.len(), 3); | ||
85 | // {($e:ident) => { foo bar $e }} | ||
86 | // 012345 67 8 9 T 12 | ||
87 | assert_eq!(get_id(&expansion.token_trees[0]), Some(9)); | ||
88 | assert_eq!(get_id(&expansion.token_trees[1]), Some(10)); | ||
89 | |||
90 | // The input args of macro call include parentheses: | ||
91 | // (baz) | ||
92 | // So baz should be 12+1+1 | ||
93 | assert_eq!(get_id(&expansion.token_trees[2]), Some(14)); | ||
94 | } | ||
95 | |||
96 | #[test] | ||
97 | fn test_token_map() { | ||
98 | let expanded = parse_macro( | ||
99 | r#" | ||
100 | macro_rules! foobar { | ||
101 | ($e:ident) => { fn $e() {} } | ||
102 | } | ||
103 | "#, | ||
104 | ) | ||
105 | .expand_tt("foobar!(baz);"); | ||
106 | |||
107 | let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap(); | ||
108 | let content = node.syntax_node().to_string(); | ||
109 | |||
110 | let get_text = |id, kind| -> String { | ||
111 | content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string() | ||
112 | }; | ||
113 | |||
114 | assert_eq!(expanded.token_trees.len(), 4); | ||
115 | // {($e:ident) => { fn $e() {} }} | ||
116 | // 012345 67 8 9 T12 3 | ||
117 | |||
118 | assert_eq!(get_text(tt::TokenId(9), IDENT), "fn"); | ||
119 | assert_eq!(get_text(tt::TokenId(12), T!['(']), "("); | ||
120 | assert_eq!(get_text(tt::TokenId(13), T!['{']), "{"); | ||
121 | } | ||
122 | |||
123 | #[test] | ||
124 | fn test_convert_tt() { | ||
125 | parse_macro(r#" | ||
126 | macro_rules! impl_froms { | ||
127 | ($e:ident: $($v:ident),*) => { | ||
128 | $( | ||
129 | impl From<$v> for $e { | ||
130 | fn from(it: $v) -> $e { | ||
131 | $e::$v(it) | ||
132 | } | ||
133 | } | ||
134 | )* | ||
135 | } | ||
136 | } | ||
137 | "#) | ||
138 | .assert_expand_tt( | ||
139 | "impl_froms!(TokenTree: Leaf, Subtree);", | ||
140 | "impl From <Leaf > for TokenTree {fn from (it : Leaf) -> TokenTree {TokenTree ::Leaf (it)}} \ | ||
141 | impl From <Subtree > for TokenTree {fn from (it : Subtree) -> TokenTree {TokenTree ::Subtree (it)}}" | ||
142 | ); | ||
143 | } | ||
144 | |||
145 | #[test] | ||
146 | fn test_convert_tt2() { | ||
147 | parse_macro( | ||
148 | r#" | ||
149 | macro_rules! impl_froms { | ||
150 | ($e:ident: $($v:ident),*) => { | ||
151 | $( | ||
152 | impl From<$v> for $e { | ||
153 | fn from(it: $v) -> $e { | ||
154 | $e::$v(it) | ||
155 | } | ||
156 | } | ||
157 | )* | ||
158 | } | ||
159 | } | ||
160 | "#, | ||
161 | ) | ||
162 | .assert_expand( | ||
163 | "impl_froms!(TokenTree: Leaf, Subtree);", | ||
164 | r#" | ||
165 | SUBTREE $ | ||
166 | IDENT impl 20 | ||
167 | IDENT From 21 | ||
168 | PUNCH < [joint] 22 | ||
169 | IDENT Leaf 53 | ||
170 | PUNCH > [alone] 25 | ||
171 | IDENT for 26 | ||
172 | IDENT TokenTree 51 | ||
173 | SUBTREE {} 29 | ||
174 | IDENT fn 30 | ||
175 | IDENT from 31 | ||
176 | SUBTREE () 32 | ||
177 | IDENT it 33 | ||
178 | PUNCH : [alone] 34 | ||
179 | IDENT Leaf 53 | ||
180 | PUNCH - [joint] 37 | ||
181 | PUNCH > [alone] 38 | ||
182 | IDENT TokenTree 51 | ||
183 | SUBTREE {} 41 | ||
184 | IDENT TokenTree 51 | ||
185 | PUNCH : [joint] 44 | ||
186 | PUNCH : [joint] 45 | ||
187 | IDENT Leaf 53 | ||
188 | SUBTREE () 48 | ||
189 | IDENT it 49 | ||
190 | IDENT impl 20 | ||
191 | IDENT From 21 | ||
192 | PUNCH < [joint] 22 | ||
193 | IDENT Subtree 55 | ||
194 | PUNCH > [alone] 25 | ||
195 | IDENT for 26 | ||
196 | IDENT TokenTree 51 | ||
197 | SUBTREE {} 29 | ||
198 | IDENT fn 30 | ||
199 | IDENT from 31 | ||
200 | SUBTREE () 32 | ||
201 | IDENT it 33 | ||
202 | PUNCH : [alone] 34 | ||
203 | IDENT Subtree 55 | ||
204 | PUNCH - [joint] 37 | ||
205 | PUNCH > [alone] 38 | ||
206 | IDENT TokenTree 51 | ||
207 | SUBTREE {} 41 | ||
208 | IDENT TokenTree 51 | ||
209 | PUNCH : [joint] 44 | ||
210 | PUNCH : [joint] 45 | ||
211 | IDENT Subtree 55 | ||
212 | SUBTREE () 48 | ||
213 | IDENT it 49 | ||
214 | "#, | ||
215 | ); | ||
216 | } | ||
217 | |||
218 | #[test] | ||
219 | fn test_lifetime_split() { | ||
220 | parse_macro( | ||
221 | r#" | ||
222 | macro_rules! foo { | ||
223 | ($($t:tt)*) => { $($t)*} | ||
224 | } | ||
225 | "#, | ||
226 | ) | ||
227 | .assert_expand( | ||
228 | r#"foo!(static bar: &'static str = "hello";);"#, | ||
229 | r#" | ||
230 | SUBTREE $ | ||
231 | IDENT static 17 | ||
232 | IDENT bar 18 | ||
233 | PUNCH : [alone] 19 | ||
234 | PUNCH & [alone] 20 | ||
235 | PUNCH ' [joint] 21 | ||
236 | IDENT static 22 | ||
237 | IDENT str 23 | ||
238 | PUNCH = [alone] 24 | ||
239 | LITERAL "hello" 25 | ||
240 | PUNCH ; [joint] 26 | ||
241 | "#, | ||
242 | ); | ||
243 | } | ||
244 | |||
245 | #[test] | ||
246 | fn test_expr_order() { | ||
247 | let expanded = parse_macro( | ||
248 | r#" | ||
249 | macro_rules! foo { | ||
250 | ($ i:expr) => { | ||
251 | fn bar() { $ i * 2; } | ||
252 | } | ||
253 | } | ||
254 | "#, | ||
255 | ) | ||
256 | .expand_items("foo! { 1 + 1}"); | ||
257 | |||
258 | let dump = format!("{:#?}", expanded); | ||
259 | assert_eq_text!( | ||
260 | dump.trim(), | ||
261 | r#"[email protected] | ||
262 | [email protected] | ||
263 | [email protected] "fn" | ||
264 | [email protected] | ||
265 | [email protected] "bar" | ||
266 | [email protected] | ||
267 | [email protected] "(" | ||
268 | [email protected] ")" | ||
269 | [email protected] | ||
270 | [email protected] "{" | ||
271 | [email protected] | ||
272 | [email protected] | ||
273 | [email protected] | ||
274 | [email protected] | ||
275 | [email protected] "1" | ||
276 | [email protected] "+" | ||
277 | [email protected] | ||
278 | [email protected] "1" | ||
279 | [email protected] "*" | ||
280 | [email protected] | ||
281 | [email protected] "2" | ||
282 | [email protected] ";" | ||
283 | [email protected] "}""#, | ||
284 | ); | ||
285 | } | ||
286 | |||
287 | #[test] | ||
288 | fn test_fail_match_pattern_by_first_token() { | ||
289 | parse_macro( | ||
290 | r#" | ||
291 | macro_rules! foo { | ||
292 | ($ i:ident) => ( | ||
293 | mod $ i {} | ||
294 | ); | ||
295 | (= $ i:ident) => ( | ||
296 | fn $ i() {} | ||
297 | ); | ||
298 | (+ $ i:ident) => ( | ||
299 | struct $ i; | ||
300 | ) | ||
301 | } | ||
302 | "#, | ||
303 | ) | ||
304 | .assert_expand_items("foo! { foo }", "mod foo {}") | ||
305 | .assert_expand_items("foo! { = bar }", "fn bar () {}") | ||
306 | .assert_expand_items("foo! { + Baz }", "struct Baz ;"); | ||
307 | } | ||
308 | |||
309 | #[test] | ||
310 | fn test_fail_match_pattern_by_last_token() { | ||
311 | parse_macro( | ||
312 | r#" | ||
313 | macro_rules! foo { | ||
314 | ($ i:ident) => ( | ||
315 | mod $ i {} | ||
316 | ); | ||
317 | ($ i:ident =) => ( | ||
318 | fn $ i() {} | ||
319 | ); | ||
320 | ($ i:ident +) => ( | ||
321 | struct $ i; | ||
322 | ) | ||
323 | } | ||
324 | "#, | ||
325 | ) | ||
326 | .assert_expand_items("foo! { foo }", "mod foo {}") | ||
327 | .assert_expand_items("foo! { bar = }", "fn bar () {}") | ||
328 | .assert_expand_items("foo! { Baz + }", "struct Baz ;"); | ||
329 | } | ||
330 | |||
331 | #[test] | ||
332 | fn test_fail_match_pattern_by_word_token() { | ||
333 | parse_macro( | ||
334 | r#" | ||
335 | macro_rules! foo { | ||
336 | ($ i:ident) => ( | ||
337 | mod $ i {} | ||
338 | ); | ||
339 | (spam $ i:ident) => ( | ||
340 | fn $ i() {} | ||
341 | ); | ||
342 | (eggs $ i:ident) => ( | ||
343 | struct $ i; | ||
344 | ) | ||
345 | } | ||
346 | "#, | ||
347 | ) | ||
348 | .assert_expand_items("foo! { foo }", "mod foo {}") | ||
349 | .assert_expand_items("foo! { spam bar }", "fn bar () {}") | ||
350 | .assert_expand_items("foo! { eggs Baz }", "struct Baz ;"); | ||
351 | } | ||
352 | |||
353 | #[test] | ||
354 | fn test_match_group_pattern_by_separator_token() { | ||
355 | parse_macro( | ||
356 | r#" | ||
357 | macro_rules! foo { | ||
358 | ($ ($ i:ident),*) => ($ ( | ||
359 | mod $ i {} | ||
360 | )*); | ||
361 | ($ ($ i:ident)#*) => ($ ( | ||
362 | fn $ i() {} | ||
363 | )*); | ||
364 | ($ i:ident ,# $ j:ident) => ( | ||
365 | struct $ i; | ||
366 | struct $ j; | ||
367 | ) | ||
368 | } | ||
369 | "#, | ||
370 | ) | ||
371 | .assert_expand_items("foo! { foo, bar }", "mod foo {} mod bar {}") | ||
372 | .assert_expand_items("foo! { foo# bar }", "fn foo () {} fn bar () {}") | ||
373 | .assert_expand_items("foo! { Foo,# Bar }", "struct Foo ; struct Bar ;"); | ||
374 | } | ||
375 | |||
376 | #[test] | ||
377 | fn test_match_group_pattern_with_multiple_defs() { | ||
378 | parse_macro( | ||
379 | r#" | ||
380 | macro_rules! foo { | ||
381 | ($ ($ i:ident),*) => ( struct Bar { $ ( | ||
382 | fn $ i {} | ||
383 | )*} ); | ||
384 | } | ||
385 | "#, | ||
386 | ) | ||
387 | .assert_expand_items("foo! { foo, bar }", "struct Bar {fn foo {} fn bar {}}"); | ||
388 | } | ||
389 | |||
390 | #[test] | ||
391 | fn test_match_group_pattern_with_multiple_statement() { | ||
392 | parse_macro( | ||
393 | r#" | ||
394 | macro_rules! foo { | ||
395 | ($ ($ i:ident),*) => ( fn baz { $ ( | ||
396 | $ i (); | ||
397 | )*} ); | ||
398 | } | ||
399 | "#, | ||
400 | ) | ||
401 | .assert_expand_items("foo! { foo, bar }", "fn baz {foo () ; bar () ;}"); | ||
402 | } | ||
403 | |||
404 | #[test] | ||
405 | fn test_match_group_pattern_with_multiple_statement_without_semi() { | ||
406 | parse_macro( | ||
407 | r#" | ||
408 | macro_rules! foo { | ||
409 | ($ ($ i:ident),*) => ( fn baz { $ ( | ||
410 | $i() | ||
411 | );*} ); | ||
412 | } | ||
413 | "#, | ||
414 | ) | ||
415 | .assert_expand_items("foo! { foo, bar }", "fn baz {foo () ;bar ()}"); | ||
416 | } | ||
417 | |||
418 | #[test] | ||
419 | fn test_match_group_empty_fixed_token() { | ||
420 | parse_macro( | ||
421 | r#" | ||
422 | macro_rules! foo { | ||
423 | ($ ($ i:ident)* #abc) => ( fn baz { $ ( | ||
424 | $ i (); | ||
425 | )*} ); | ||
426 | } | ||
427 | "#, | ||
428 | ) | ||
429 | .assert_expand_items("foo! {#abc}", "fn baz {}"); | ||
430 | } | ||
431 | |||
432 | #[test] | ||
433 | fn test_match_group_in_subtree() { | ||
434 | parse_macro( | ||
435 | r#" | ||
436 | macro_rules! foo { | ||
437 | (fn $name:ident {$($i:ident)*} ) => ( fn $name() { $ ( | ||
438 | $ i (); | ||
439 | )*} ); | ||
440 | }"#, | ||
441 | ) | ||
442 | .assert_expand_items("foo! {fn baz {a b} }", "fn baz () {a () ; b () ;}"); | ||
443 | } | ||
444 | |||
445 | #[test] | ||
446 | fn test_match_group_with_multichar_sep() { | ||
447 | parse_macro( | ||
448 | r#" | ||
449 | macro_rules! foo { | ||
450 | (fn $name:ident {$($i:literal)*} ) => ( fn $name() -> bool { $($i)&&*} ); | ||
451 | }"#, | ||
452 | ) | ||
453 | .assert_expand_items("foo! (fn baz {true true} );", "fn baz () -> bool {true &&true}"); | ||
454 | } | ||
455 | |||
456 | #[test] | ||
457 | fn test_match_group_zero_match() { | ||
458 | parse_macro( | ||
459 | r#" | ||
460 | macro_rules! foo { | ||
461 | ( $($i:ident)* ) => (); | ||
462 | }"#, | ||
463 | ) | ||
464 | .assert_expand_items("foo! ();", ""); | ||
465 | } | ||
466 | |||
467 | #[test] | ||
468 | fn test_match_group_in_group() { | ||
469 | parse_macro( | ||
470 | r#" | ||
471 | macro_rules! foo { | ||
472 | { $( ( $($i:ident)* ) )* } => ( $( ( $($i)* ) )* ); | ||
473 | }"#, | ||
474 | ) | ||
475 | .assert_expand_items("foo! ( (a b) );", "(a b)"); | ||
476 | } | ||
477 | |||
478 | #[test] | ||
479 | fn test_expand_to_item_list() { | ||
480 | let tree = parse_macro( | ||
481 | " | ||
482 | macro_rules! structs { | ||
483 | ($($i:ident),*) => { | ||
484 | $(struct $i { field: u32 } )* | ||
485 | } | ||
486 | } | ||
487 | ", | ||
488 | ) | ||
489 | .expand_items("structs!(Foo, Bar);"); | ||
490 | assert_eq!( | ||
491 | format!("{:#?}", tree).trim(), | ||
492 | r#" | ||
493 | [email protected] | ||
494 | [email protected] | ||
495 | [email protected] "struct" | ||
496 | [email protected] | ||
497 | [email protected] "Foo" | ||
498 | [email protected] | ||
499 | [email protected] "{" | ||
500 | [email protected] | ||
501 | [email protected] | ||
502 | [email protected] "field" | ||
503 | [email protected] ":" | ||
504 | [email protected] | ||
505 | [email protected] | ||
506 | [email protected] | ||
507 | [email protected] | ||
508 | [email protected] "u32" | ||
509 | [email protected] "}" | ||
510 | [email protected] | ||
511 | [email protected] "struct" | ||
512 | [email protected] | ||
513 | [email protected] "Bar" | ||
514 | [email protected] | ||
515 | [email protected] "{" | ||
516 | [email protected] | ||
517 | [email protected] | ||
518 | [email protected] "field" | ||
519 | [email protected] ":" | ||
520 | [email protected] | ||
521 | [email protected] | ||
522 | [email protected] | ||
523 | [email protected] | ||
524 | [email protected] "u32" | ||
525 | [email protected] "}""# | ||
526 | .trim() | ||
527 | ); | ||
528 | } | ||
529 | |||
530 | fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree { | ||
531 | if let tt::TokenTree::Subtree(subtree) = tt { | ||
532 | return &subtree; | ||
533 | } | ||
534 | unreachable!("It is not a subtree"); | ||
535 | } | ||
536 | fn to_literal(tt: &tt::TokenTree) -> &tt::Literal { | ||
537 | if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = tt { | ||
538 | return lit; | ||
539 | } | ||
540 | unreachable!("It is not a literal"); | ||
541 | } | ||
542 | |||
543 | fn to_punct(tt: &tt::TokenTree) -> &tt::Punct { | ||
544 | if let tt::TokenTree::Leaf(tt::Leaf::Punct(lit)) = tt { | ||
545 | return lit; | ||
546 | } | ||
547 | unreachable!("It is not a Punct"); | ||
548 | } | ||
549 | |||
550 | #[test] | ||
551 | fn test_expand_literals_to_token_tree() { | ||
552 | let expansion = parse_macro( | ||
553 | r#" | ||
554 | macro_rules! literals { | ||
555 | ($i:ident) => { | ||
556 | { | ||
557 | let a = 'c'; | ||
558 | let c = 1000; | ||
559 | let f = 12E+99_f64; | ||
560 | let s = "rust1"; | ||
561 | } | ||
562 | } | ||
563 | } | ||
564 | "#, | ||
565 | ) | ||
566 | .expand_tt("literals!(foo);"); | ||
567 | let stm_tokens = &to_subtree(&expansion.token_trees[0]).token_trees; | ||
568 | |||
569 | // [let] [a] [=] ['c'] [;] | ||
570 | assert_eq!(to_literal(&stm_tokens[3]).text, "'c'"); | ||
571 | // [let] [c] [=] [1000] [;] | ||
572 | assert_eq!(to_literal(&stm_tokens[5 + 3]).text, "1000"); | ||
573 | // [let] [f] [=] [12E+99_f64] [;] | ||
574 | assert_eq!(to_literal(&stm_tokens[10 + 3]).text, "12E+99_f64"); | ||
575 | // [let] [s] [=] ["rust1"] [;] | ||
576 | assert_eq!(to_literal(&stm_tokens[15 + 3]).text, "\"rust1\""); | ||
577 | } | ||
578 | |||
579 | #[test] | ||
580 | fn test_attr_to_token_tree() { | ||
581 | let expansion = parse_to_token_tree_by_syntax( | ||
582 | r#" | ||
583 | #[derive(Copy)] | ||
584 | struct Foo; | ||
585 | "#, | ||
586 | ); | ||
587 | |||
588 | assert_eq!(to_punct(&expansion.token_trees[0]).char, '#'); | ||
589 | assert_eq!( | ||
590 | to_subtree(&expansion.token_trees[1]).delimiter_kind(), | ||
591 | Some(tt::DelimiterKind::Bracket) | ||
592 | ); | ||
593 | } | ||
594 | |||
595 | #[test] | ||
596 | fn test_two_idents() { | ||
597 | parse_macro( | ||
598 | r#" | ||
599 | macro_rules! foo { | ||
600 | ($ i:ident, $ j:ident) => { | ||
601 | fn foo() { let a = $ i; let b = $j; } | ||
602 | } | ||
603 | } | ||
604 | "#, | ||
605 | ) | ||
606 | .assert_expand_items("foo! { foo, bar }", "fn foo () {let a = foo ; let b = bar ;}"); | ||
607 | } | ||
608 | |||
609 | #[test] | ||
610 | fn test_tt_to_stmts() { | ||
611 | let stmts = parse_macro( | ||
612 | r#" | ||
613 | macro_rules! foo { | ||
614 | () => { | ||
615 | let a = 0; | ||
616 | a = 10 + 1; | ||
617 | a | ||
618 | } | ||
619 | } | ||
620 | "#, | ||
621 | ) | ||
622 | .expand_statements("foo!{}"); | ||
623 | |||
624 | assert_eq!( | ||
625 | format!("{:#?}", stmts).trim(), | ||
626 | r#"[email protected] | ||
627 | [email protected] | ||
628 | [email protected] "let" | ||
629 | [email protected] | ||
630 | [email protected] | ||
631 | [email protected] "a" | ||
632 | [email protected] "=" | ||
633 | [email protected] | ||
634 | [email protected] "0" | ||
635 | [email protected] ";" | ||
636 | [email protected] | ||
637 | [email protected] | ||
638 | [email protected] | ||
639 | [email protected] | ||
640 | [email protected] | ||
641 | [email protected] | ||
642 | [email protected] "a" | ||
643 | [email protected] "=" | ||
644 | [email protected] | ||
645 | [email protected] | ||
646 | [email protected] "10" | ||
647 | [email protected] "+" | ||
648 | [email protected] | ||
649 | [email protected] "1" | ||
650 | [email protected] ";" | ||
651 | [email protected] | ||
652 | [email protected] | ||
653 | [email protected] | ||
654 | [email protected] | ||
655 | [email protected] | ||
656 | [email protected] "a""#, | ||
657 | ); | ||
658 | } | ||
659 | |||
660 | #[test] | ||
661 | fn test_match_literal() { | ||
662 | parse_macro( | ||
663 | r#" | ||
664 | macro_rules! foo { | ||
665 | ('(') => { | ||
666 | fn foo() {} | ||
667 | } | ||
668 | } | ||
669 | "#, | ||
670 | ) | ||
671 | .assert_expand_items("foo! ['('];", "fn foo () {}"); | ||
672 | } | ||
673 | |||
674 | // The following tests are port from intellij-rust directly | ||
675 | // https://github.com/intellij-rust/intellij-rust/blob/c4e9feee4ad46e7953b1948c112533360b6087bb/src/test/kotlin/org/rust/lang/core/macros/RsMacroExpansionTest.kt | ||
676 | |||
677 | #[test] | ||
678 | fn test_path() { | ||
679 | parse_macro( | ||
680 | r#" | ||
681 | macro_rules! foo { | ||
682 | ($ i:path) => { | ||
683 | fn foo() { let a = $ i; } | ||
684 | } | ||
685 | } | ||
686 | "#, | ||
687 | ) | ||
688 | .assert_expand_items("foo! { foo }", "fn foo () {let a = foo ;}") | ||
689 | .assert_expand_items( | ||
690 | "foo! { bar::<u8>::baz::<u8> }", | ||
691 | "fn foo () {let a = bar ::< u8 >:: baz ::< u8 > ;}", | ||
692 | ); | ||
693 | } | ||
694 | |||
695 | #[test] | ||
696 | fn test_two_paths() { | ||
697 | parse_macro( | ||
698 | r#" | ||
699 | macro_rules! foo { | ||
700 | ($ i:path, $ j:path) => { | ||
701 | fn foo() { let a = $ i; let b = $j; } | ||
702 | } | ||
703 | } | ||
704 | "#, | ||
705 | ) | ||
706 | .assert_expand_items("foo! { foo, bar }", "fn foo () {let a = foo ; let b = bar ;}"); | ||
707 | } | ||
708 | |||
709 | #[test] | ||
710 | fn test_path_with_path() { | ||
711 | parse_macro( | ||
712 | r#" | ||
713 | macro_rules! foo { | ||
714 | ($ i:path) => { | ||
715 | fn foo() { let a = $ i :: bar; } | ||
716 | } | ||
717 | } | ||
718 | "#, | ||
719 | ) | ||
720 | .assert_expand_items("foo! { foo }", "fn foo () {let a = foo :: bar ;}"); | ||
721 | } | ||
722 | |||
723 | #[test] | ||
724 | fn test_expr() { | ||
725 | parse_macro( | ||
726 | r#" | ||
727 | macro_rules! foo { | ||
728 | ($ i:expr) => { | ||
729 | fn bar() { $ i; } | ||
730 | } | ||
731 | } | ||
732 | "#, | ||
733 | ) | ||
734 | .assert_expand_items( | ||
735 | "foo! { 2 + 2 * baz(3).quux() }", | ||
736 | "fn bar () {2 + 2 * baz (3) . quux () ;}", | ||
737 | ); | ||
738 | } | ||
739 | |||
740 | #[test] | ||
741 | fn test_last_expr() { | ||
742 | parse_macro( | ||
743 | r#" | ||
744 | macro_rules! vec { | ||
745 | ($($item:expr),*) => { | ||
746 | { | ||
747 | let mut v = Vec::new(); | ||
748 | $( | ||
749 | v.push($item); | ||
750 | )* | ||
751 | v | ||
752 | } | ||
753 | }; | ||
754 | } | ||
755 | "#, | ||
756 | ) | ||
757 | .assert_expand_items( | ||
758 | "vec!(1,2,3);", | ||
759 | "{let mut v = Vec :: new () ; v . push (1) ; v . push (2) ; v . push (3) ; v}", | ||
760 | ); | ||
761 | } | ||
762 | |||
763 | #[test] | ||
764 | fn test_ty() { | ||
765 | parse_macro( | ||
766 | r#" | ||
767 | macro_rules! foo { | ||
768 | ($ i:ty) => ( | ||
769 | fn bar() -> $ i { unimplemented!() } | ||
770 | ) | ||
771 | } | ||
772 | "#, | ||
773 | ) | ||
774 | .assert_expand_items("foo! { Baz<u8> }", "fn bar () -> Baz < u8 > {unimplemented ! ()}"); | ||
775 | } | ||
776 | |||
777 | #[test] | ||
778 | fn test_ty_with_complex_type() { | ||
779 | parse_macro( | ||
780 | r#" | ||
781 | macro_rules! foo { | ||
782 | ($ i:ty) => ( | ||
783 | fn bar() -> $ i { unimplemented!() } | ||
784 | ) | ||
785 | } | ||
786 | "#, | ||
787 | ) | ||
788 | // Reference lifetime struct with generic type | ||
789 | .assert_expand_items( | ||
790 | "foo! { &'a Baz<u8> }", | ||
791 | "fn bar () -> & 'a Baz < u8 > {unimplemented ! ()}", | ||
792 | ) | ||
793 | // extern "Rust" func type | ||
794 | .assert_expand_items( | ||
795 | r#"foo! { extern "Rust" fn() -> Ret }"#, | ||
796 | r#"fn bar () -> extern "Rust" fn () -> Ret {unimplemented ! ()}"#, | ||
797 | ); | ||
798 | } | ||
799 | |||
800 | #[test] | ||
801 | fn test_pat_() { | ||
802 | parse_macro( | ||
803 | r#" | ||
804 | macro_rules! foo { | ||
805 | ($ i:pat) => { fn foo() { let $ i; } } | ||
806 | } | ||
807 | "#, | ||
808 | ) | ||
809 | .assert_expand_items("foo! { (a, b) }", "fn foo () {let (a , b) ;}"); | ||
810 | } | ||
811 | |||
812 | #[test] | ||
813 | fn test_stmt() { | ||
814 | parse_macro( | ||
815 | r#" | ||
816 | macro_rules! foo { | ||
817 | ($ i:stmt) => ( | ||
818 | fn bar() { $ i; } | ||
819 | ) | ||
820 | } | ||
821 | "#, | ||
822 | ) | ||
823 | .assert_expand_items("foo! { 2 }", "fn bar () {2 ;}") | ||
824 | .assert_expand_items("foo! { let a = 0 }", "fn bar () {let a = 0 ;}"); | ||
825 | } | ||
826 | |||
827 | #[test] | ||
828 | fn test_single_item() { | ||
829 | parse_macro( | ||
830 | r#" | ||
831 | macro_rules! foo { | ||
832 | ($ i:item) => ( | ||
833 | $ i | ||
834 | ) | ||
835 | } | ||
836 | "#, | ||
837 | ) | ||
838 | .assert_expand_items("foo! {mod c {}}", "mod c {}"); | ||
839 | } | ||
840 | |||
841 | #[test] | ||
842 | fn test_all_items() { | ||
843 | parse_macro( | ||
844 | r#" | ||
845 | macro_rules! foo { | ||
846 | ($ ($ i:item)*) => ($ ( | ||
847 | $ i | ||
848 | )*) | ||
849 | } | ||
850 | "#, | ||
851 | ). | ||
852 | assert_expand_items( | ||
853 | r#" | ||
854 | foo! { | ||
855 | extern crate a; | ||
856 | mod b; | ||
857 | mod c {} | ||
858 | use d; | ||
859 | const E: i32 = 0; | ||
860 | static F: i32 = 0; | ||
861 | impl G {} | ||
862 | struct H; | ||
863 | enum I { Foo } | ||
864 | trait J {} | ||
865 | fn h() {} | ||
866 | extern {} | ||
867 | type T = u8; | ||
868 | } | ||
869 | "#, | ||
870 | r#"extern crate a ; mod b ; mod c {} use d ; const E : i32 = 0 ; static F : i32 = 0 ; impl G {} struct H ; enum I {Foo} trait J {} fn h () {} extern {} type T = u8 ;"#, | ||
871 | ); | ||
872 | } | ||
873 | |||
874 | #[test] | ||
875 | fn test_block() { | ||
876 | parse_macro( | ||
877 | r#" | ||
878 | macro_rules! foo { | ||
879 | ($ i:block) => { fn foo() $ i } | ||
880 | } | ||
881 | "#, | ||
882 | ) | ||
883 | .assert_expand_statements("foo! { { 1; } }", "fn foo () {1 ;}"); | ||
884 | } | ||
885 | |||
886 | #[test] | ||
887 | fn test_meta() { | ||
888 | parse_macro( | ||
889 | r#" | ||
890 | macro_rules! foo { | ||
891 | ($ i:meta) => ( | ||
892 | #[$ i] | ||
893 | fn bar() {} | ||
894 | ) | ||
895 | } | ||
896 | "#, | ||
897 | ) | ||
898 | .assert_expand_items( | ||
899 | r#"foo! { cfg(target_os = "windows") }"#, | ||
900 | r#"# [cfg (target_os = "windows")] fn bar () {}"#, | ||
901 | ); | ||
902 | } | ||
903 | |||
904 | #[test] | ||
905 | fn test_meta_doc_comments() { | ||
906 | parse_macro( | ||
907 | r#" | ||
908 | macro_rules! foo { | ||
909 | ($(#[$ i:meta])+) => ( | ||
910 | $(#[$ i])+ | ||
911 | fn bar() {} | ||
912 | ) | ||
913 | } | ||
914 | "#, | ||
915 | ). | ||
916 | assert_expand_items( | ||
917 | r#"foo! { | ||
918 | /// Single Line Doc 1 | ||
919 | /** | ||
920 | MultiLines Doc | ||
921 | */ | ||
922 | }"#, | ||
923 | "# [doc = \" Single Line Doc 1\"] # [doc = \"\\\\n MultiLines Doc\\\\n \"] fn bar () {}", | ||
924 | ); | ||
925 | } | ||
926 | |||
927 | #[test] | ||
928 | fn test_tt_block() { | ||
929 | parse_macro( | ||
930 | r#" | ||
931 | macro_rules! foo { | ||
932 | ($ i:tt) => { fn foo() $ i } | ||
933 | } | ||
934 | "#, | ||
935 | ) | ||
936 | .assert_expand_items(r#"foo! { { 1; } }"#, r#"fn foo () {1 ;}"#); | ||
937 | } | ||
938 | |||
939 | #[test] | ||
940 | fn test_tt_group() { | ||
941 | parse_macro( | ||
942 | r#" | ||
943 | macro_rules! foo { | ||
944 | ($($ i:tt)*) => { $($ i)* } | ||
945 | } | ||
946 | "#, | ||
947 | ) | ||
948 | .assert_expand_items(r#"foo! { fn foo() {} }"#, r#"fn foo () {}"#); | ||
949 | } | ||
950 | |||
951 | #[test] | ||
952 | fn test_tt_composite() { | ||
953 | parse_macro( | ||
954 | r#" | ||
955 | macro_rules! foo { | ||
956 | ($i:tt) => { 0 } | ||
957 | } | ||
958 | "#, | ||
959 | ) | ||
960 | .assert_expand_items(r#"foo! { => }"#, r#"0"#); | ||
961 | } | ||
962 | |||
963 | #[test] | ||
964 | fn test_tt_composite2() { | ||
965 | let node = parse_macro( | ||
966 | r#" | ||
967 | macro_rules! foo { | ||
968 | ($($tt:tt)*) => { abs!(=> $($tt)*) } | ||
969 | } | ||
970 | "#, | ||
971 | ) | ||
972 | .expand_items(r#"foo!{#}"#); | ||
973 | |||
974 | let res = format!("{:#?}", &node); | ||
975 | assert_eq_text!( | ||
976 | res.trim(), | ||
977 | r###"[email protected] | ||
978 | [email protected] | ||
979 | [email protected] | ||
980 | [email protected] | ||
981 | [email protected] | ||
982 | [email protected] "abs" | ||
983 | [email protected] "!" | ||
984 | [email protected] | ||
985 | [email protected] "(" | ||
986 | [email protected] "=" | ||
987 | [email protected] ">" | ||
988 | [email protected] " " | ||
989 | [email protected] "#" | ||
990 | [email protected] ")""### | ||
991 | ); | ||
992 | } | ||
993 | |||
994 | #[test] | ||
995 | fn test_lifetime() { | ||
996 | parse_macro( | ||
997 | r#" | ||
998 | macro_rules! foo { | ||
999 | ($ lt:lifetime) => { struct Ref<$ lt>{ s: &$ lt str } } | ||
1000 | } | ||
1001 | "#, | ||
1002 | ) | ||
1003 | .assert_expand_items(r#"foo!{'a}"#, r#"struct Ref <'a > {s : &'a str}"#); | ||
1004 | } | ||
1005 | |||
1006 | #[test] | ||
1007 | fn test_literal() { | ||
1008 | parse_macro( | ||
1009 | r#" | ||
1010 | macro_rules! foo { | ||
1011 | ($ type:ty $ lit:literal) => { const VALUE: $ type = $ lit;}; | ||
1012 | } | ||
1013 | "#, | ||
1014 | ) | ||
1015 | .assert_expand_items(r#"foo!(u8 0);"#, r#"const VALUE : u8 = 0 ;"#); | ||
1016 | } | ||
1017 | |||
1018 | #[test] | ||
1019 | fn test_boolean_is_ident() { | ||
1020 | parse_macro( | ||
1021 | r#" | ||
1022 | macro_rules! foo { | ||
1023 | ($lit0:literal, $lit1:literal) => { const VALUE: (bool,bool) = ($lit0,$lit1); }; | ||
1024 | } | ||
1025 | "#, | ||
1026 | ) | ||
1027 | .assert_expand( | ||
1028 | r#"foo!(true,false);"#, | ||
1029 | r#" | ||
1030 | SUBTREE $ | ||
1031 | IDENT const 14 | ||
1032 | IDENT VALUE 15 | ||
1033 | PUNCH : [alone] 16 | ||
1034 | SUBTREE () 17 | ||
1035 | IDENT bool 18 | ||
1036 | PUNCH , [alone] 19 | ||
1037 | IDENT bool 20 | ||
1038 | PUNCH = [alone] 21 | ||
1039 | SUBTREE () 22 | ||
1040 | IDENT true 29 | ||
1041 | PUNCH , [joint] 25 | ||
1042 | IDENT false 31 | ||
1043 | PUNCH ; [alone] 28 | ||
1044 | "#, | ||
1045 | ); | ||
1046 | } | ||
1047 | |||
1048 | #[test] | ||
1049 | fn test_vis() { | ||
1050 | parse_macro( | ||
1051 | r#" | ||
1052 | macro_rules! foo { | ||
1053 | ($ vis:vis $ name:ident) => { $ vis fn $ name() {}}; | ||
1054 | } | ||
1055 | "#, | ||
1056 | ) | ||
1057 | .assert_expand_items(r#"foo!(pub foo);"#, r#"pub fn foo () {}"#) | ||
1058 | // test optional cases | ||
1059 | .assert_expand_items(r#"foo!(foo);"#, r#"fn foo () {}"#); | ||
1060 | } | ||
1061 | |||
1062 | #[test] | ||
1063 | fn test_inner_macro_rules() { | ||
1064 | parse_macro( | ||
1065 | r#" | ||
1066 | macro_rules! foo { | ||
1067 | ($a:ident, $b:ident, $c:tt) => { | ||
1068 | |||
1069 | macro_rules! bar { | ||
1070 | ($bi:ident) => { | ||
1071 | fn $bi() -> u8 {$c} | ||
1072 | } | ||
1073 | } | ||
1074 | |||
1075 | bar!($a); | ||
1076 | fn $b() -> u8 {$c} | ||
1077 | } | ||
1078 | } | ||
1079 | "#, | ||
1080 | ). | ||
1081 | assert_expand_items( | ||
1082 | r#"foo!(x,y, 1);"#, | ||
1083 | r#"macro_rules ! bar {($ bi : ident) => {fn $ bi () -> u8 {1}}} bar ! (x) ; fn y () -> u8 {1}"#, | ||
1084 | ); | ||
1085 | } | ||
1086 | |||
1087 | // The following tests are based on real world situations | ||
1088 | #[test] | ||
1089 | fn test_vec() { | ||
1090 | let fixture = parse_macro( | ||
1091 | r#" | ||
1092 | macro_rules! vec { | ||
1093 | ($($item:expr),*) => { | ||
1094 | { | ||
1095 | let mut v = Vec::new(); | ||
1096 | $( | ||
1097 | v.push($item); | ||
1098 | )* | ||
1099 | v | ||
1100 | } | ||
1101 | }; | ||
1102 | } | ||
1103 | "#, | ||
1104 | ); | ||
1105 | fixture | ||
1106 | .assert_expand_items(r#"vec!();"#, r#"{let mut v = Vec :: new () ; v}"#) | ||
1107 | .assert_expand_items( | ||
1108 | r#"vec![1u32,2];"#, | ||
1109 | r#"{let mut v = Vec :: new () ; v . push (1u32) ; v . push (2) ; v}"#, | ||
1110 | ); | ||
1111 | |||
1112 | let tree = fixture.expand_expr(r#"vec![1u32,2];"#); | ||
1113 | |||
1114 | assert_eq!( | ||
1115 | format!("{:#?}", tree).trim(), | ||
1116 | r#"[email protected] | ||
1117 | [email protected] "{" | ||
1118 | [email protected] | ||
1119 | [email protected] "let" | ||
1120 | [email protected] | ||
1121 | [email protected] "mut" | ||
1122 | [email protected] | ||
1123 | [email protected] "v" | ||
1124 | [email protected] "=" | ||
1125 | [email protected] | ||
1126 | [email protected] | ||
1127 | [email protected] | ||
1128 | [email protected] | ||
1129 | [email protected] | ||
1130 | [email protected] | ||
1131 | [email protected] "Vec" | ||
1132 | [email protected] "::" | ||
1133 | [email protected] | ||
1134 | [email protected] | ||
1135 | [email protected] "new" | ||
1136 | [email protected] | ||
1137 | [email protected] "(" | ||
1138 | [email protected] ")" | ||
1139 | [email protected] ";" | ||
1140 | [email protected] | ||
1141 | [email protected] | ||
1142 | [email protected] | ||
1143 | [email protected] | ||
1144 | [email protected] | ||
1145 | [email protected] | ||
1146 | [email protected] "v" | ||
1147 | [email protected] "." | ||
1148 | [email protected] | ||
1149 | [email protected] "push" | ||
1150 | [email protected] | ||
1151 | [email protected] "(" | ||
1152 | [email protected] | ||
1153 | [email protected] "1u32" | ||
1154 | [email protected] ")" | ||
1155 | [email protected] ";" | ||
1156 | [email protected] | ||
1157 | [email protected] | ||
1158 | [email protected] | ||
1159 | [email protected] | ||
1160 | [email protected] | ||
1161 | [email protected] | ||
1162 | [email protected] "v" | ||
1163 | [email protected] "." | ||
1164 | [email protected] | ||
1165 | [email protected] "push" | ||
1166 | [email protected] | ||
1167 | [email protected] "(" | ||
1168 | [email protected] | ||
1169 | [email protected] "2" | ||
1170 | [email protected] ")" | ||
1171 | [email protected] ";" | ||
1172 | [email protected] | ||
1173 | [email protected] | ||
1174 | [email protected] | ||
1175 | [email protected] | ||
1176 | [email protected] "v" | ||
1177 | [email protected] "}""# | ||
1178 | ); | ||
1179 | } | ||
1180 | |||
1181 | #[test] | ||
1182 | fn test_winapi_struct() { | ||
1183 | // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/macros.rs#L366 | ||
1184 | |||
1185 | parse_macro( | ||
1186 | r#" | ||
1187 | macro_rules! STRUCT { | ||
1188 | ($(#[$attrs:meta])* struct $name:ident { | ||
1189 | $($field:ident: $ftype:ty,)+ | ||
1190 | }) => ( | ||
1191 | #[repr(C)] #[derive(Copy)] $(#[$attrs])* | ||
1192 | pub struct $name { | ||
1193 | $(pub $field: $ftype,)+ | ||
1194 | } | ||
1195 | impl Clone for $name { | ||
1196 | #[inline] | ||
1197 | fn clone(&self) -> $name { *self } | ||
1198 | } | ||
1199 | #[cfg(feature = "impl-default")] | ||
1200 | impl Default for $name { | ||
1201 | #[inline] | ||
1202 | fn default() -> $name { unsafe { $crate::_core::mem::zeroed() } } | ||
1203 | } | ||
1204 | ); | ||
1205 | } | ||
1206 | "#, | ||
1207 | ). | ||
1208 | // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/shared/d3d9caps.rs | ||
1209 | assert_expand_items(r#"STRUCT!{struct D3DVSHADERCAPS2_0 {Caps: u8,}}"#, | ||
1210 | "# [repr (C)] # [derive (Copy)] pub struct D3DVSHADERCAPS2_0 {pub Caps : u8 ,} impl Clone for D3DVSHADERCAPS2_0 {# [inline] fn clone (& self) -> D3DVSHADERCAPS2_0 {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DVSHADERCAPS2_0 {# [inline] fn default () -> D3DVSHADERCAPS2_0 {unsafe {$crate :: _core :: mem :: zeroed ()}}}" | ||
1211 | ) | ||
1212 | .assert_expand_items(r#"STRUCT!{#[cfg_attr(target_arch = "x86", repr(packed))] struct D3DCONTENTPROTECTIONCAPS {Caps : u8 ,}}"#, | ||
1213 | "# [repr (C)] # [derive (Copy)] # [cfg_attr (target_arch = \"x86\" , repr (packed))] pub struct D3DCONTENTPROTECTIONCAPS {pub Caps : u8 ,} impl Clone for D3DCONTENTPROTECTIONCAPS {# [inline] fn clone (& self) -> D3DCONTENTPROTECTIONCAPS {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DCONTENTPROTECTIONCAPS {# [inline] fn default () -> D3DCONTENTPROTECTIONCAPS {unsafe {$crate :: _core :: mem :: zeroed ()}}}" | ||
1214 | ); | ||
1215 | } | ||
1216 | |||
1217 | #[test] | ||
1218 | fn test_int_base() { | ||
1219 | parse_macro( | ||
1220 | r#" | ||
1221 | macro_rules! int_base { | ||
1222 | ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => { | ||
1223 | #[stable(feature = "rust1", since = "1.0.0")] | ||
1224 | impl fmt::$Trait for $T { | ||
1225 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
1226 | $Radix.fmt_int(*self as $U, f) | ||
1227 | } | ||
1228 | } | ||
1229 | } | ||
1230 | } | ||
1231 | "#, | ||
1232 | ).assert_expand_items(r#" int_base!{Binary for isize as usize -> Binary}"#, | ||
1233 | "# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt ::Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}" | ||
1234 | ); | ||
1235 | } | ||
1236 | |||
1237 | #[test] | ||
1238 | fn test_generate_pattern_iterators() { | ||
1239 | // from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/str/mod.rs | ||
1240 | parse_macro( | ||
1241 | r#" | ||
1242 | macro_rules! generate_pattern_iterators { | ||
1243 | { double ended; with $(#[$common_stability_attribute:meta])*, | ||
1244 | $forward_iterator:ident, | ||
1245 | $reverse_iterator:ident, $iterty:ty | ||
1246 | } => { | ||
1247 | fn foo(){} | ||
1248 | } | ||
1249 | } | ||
1250 | "#, | ||
1251 | ).assert_expand_items( | ||
1252 | r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#, | ||
1253 | "fn foo () {}", | ||
1254 | ); | ||
1255 | } | ||
1256 | |||
1257 | #[test] | ||
1258 | fn test_impl_fn_for_zst() { | ||
1259 | // from https://github.com/rust-lang/rust/blob/5d20ff4d2718c820632b38c1e49d4de648a9810b/src/libcore/internal_macros.rs | ||
1260 | parse_macro( | ||
1261 | r#" | ||
1262 | macro_rules! impl_fn_for_zst { | ||
1263 | { $( $( #[$attr: meta] )* | ||
1264 | struct $Name: ident impl$( <$( $lifetime : lifetime ),+> )? Fn = | ||
1265 | |$( $arg: ident: $ArgTy: ty ),*| -> $ReturnTy: ty | ||
1266 | $body: block; )+ | ||
1267 | } => { | ||
1268 | $( | ||
1269 | $( #[$attr] )* | ||
1270 | struct $Name; | ||
1271 | |||
1272 | impl $( <$( $lifetime ),+> )? Fn<($( $ArgTy, )*)> for $Name { | ||
1273 | #[inline] | ||
1274 | extern "rust-call" fn call(&self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy { | ||
1275 | $body | ||
1276 | } | ||
1277 | } | ||
1278 | |||
1279 | impl $( <$( $lifetime ),+> )? FnMut<($( $ArgTy, )*)> for $Name { | ||
1280 | #[inline] | ||
1281 | extern "rust-call" fn call_mut( | ||
1282 | &mut self, | ||
1283 | ($( $arg, )*): ($( $ArgTy, )*) | ||
1284 | ) -> $ReturnTy { | ||
1285 | Fn::call(&*self, ($( $arg, )*)) | ||
1286 | } | ||
1287 | } | ||
1288 | |||
1289 | impl $( <$( $lifetime ),+> )? FnOnce<($( $ArgTy, )*)> for $Name { | ||
1290 | type Output = $ReturnTy; | ||
1291 | |||
1292 | #[inline] | ||
1293 | extern "rust-call" fn call_once(self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy { | ||
1294 | Fn::call(&self, ($( $arg, )*)) | ||
1295 | } | ||
1296 | } | ||
1297 | )+ | ||
1298 | } | ||
1299 | } | ||
1300 | "#, | ||
1301 | ).assert_expand_items(r#" | ||
1302 | impl_fn_for_zst ! { | ||
1303 | # [ derive ( Clone ) ] | ||
1304 | struct CharEscapeDebugContinue impl Fn = | c : char | -> char :: EscapeDebug { | ||
1305 | c . escape_debug_ext ( false ) | ||
1306 | } ; | ||
1307 | |||
1308 | # [ derive ( Clone ) ] | ||
1309 | struct CharEscapeUnicode impl Fn = | c : char | -> char :: EscapeUnicode { | ||
1310 | c . escape_unicode ( ) | ||
1311 | } ; | ||
1312 | # [ derive ( Clone ) ] | ||
1313 | struct CharEscapeDefault impl Fn = | c : char | -> char :: EscapeDefault { | ||
1314 | c . escape_default ( ) | ||
1315 | } ; | ||
1316 | } | ||
1317 | "#, | ||
1318 | "# [derive (Clone)] struct CharEscapeDebugContinue ; impl Fn < (char ,) > for CharEscapeDebugContinue {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeDebug {{c . escape_debug_ext (false)}}} impl FnMut < (char ,) > for CharEscapeDebugContinue {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeDebug {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeDebugContinue {type Output = char :: EscapeDebug ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeDebug {Fn :: call (& self , (c ,))}} # [derive (Clone)] struct CharEscapeUnicode ; impl Fn < (char ,) > for CharEscapeUnicode {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeUnicode {{c . escape_unicode ()}}} impl FnMut < (char ,) > for CharEscapeUnicode {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeUnicode {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeUnicode {type Output = char :: EscapeUnicode ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeUnicode {Fn :: call (& self , (c ,))}} # [derive (Clone)] struct CharEscapeDefault ; impl Fn < (char ,) > for CharEscapeDefault {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeDefault {{c . escape_default ()}}} impl FnMut < (char ,) > for CharEscapeDefault {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeDefault {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeDefault {type Output = char :: EscapeDefault ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeDefault {Fn :: call (& self , (c ,))}}" | ||
1319 | ); | ||
1320 | } | ||
1321 | |||
1322 | #[test] | ||
1323 | fn test_impl_nonzero_fmt() { | ||
1324 | // from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/num/mod.rs#L12 | ||
1325 | parse_macro( | ||
1326 | r#" | ||
1327 | macro_rules! impl_nonzero_fmt { | ||
1328 | ( #[$stability: meta] ( $( $Trait: ident ),+ ) for $Ty: ident ) => { | ||
1329 | fn foo () {} | ||
1330 | } | ||
1331 | } | ||
1332 | "#, | ||
1333 | ).assert_expand_items( | ||
1334 | r#"impl_nonzero_fmt! { # [stable(feature= "nonzero",since="1.28.0")] (Debug,Display,Binary,Octal,LowerHex,UpperHex) for NonZeroU8}"#, | ||
1335 | "fn foo () {}", | ||
1336 | ); | ||
1337 | } | ||
1338 | |||
1339 | #[test] | ||
1340 | fn test_cfg_if_items() { | ||
1341 | // from https://github.com/rust-lang/rust/blob/33fe1131cadba69d317156847be9a402b89f11bb/src/libstd/macros.rs#L986 | ||
1342 | parse_macro( | ||
1343 | r#" | ||
1344 | macro_rules! __cfg_if_items { | ||
1345 | (($($not:meta,)*) ; ) => {}; | ||
1346 | (($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => { | ||
1347 | __cfg_if_items! { ($($not,)* $($m,)*) ; $($rest)* } | ||
1348 | } | ||
1349 | } | ||
1350 | "#, | ||
1351 | ).assert_expand_items( | ||
1352 | r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#, | ||
1353 | "__cfg_if_items ! {(rustdoc ,) ;}", | ||
1354 | ); | ||
1355 | } | ||
1356 | |||
1357 | #[test] | ||
1358 | fn test_cfg_if_main() { | ||
1359 | // from https://github.com/rust-lang/rust/blob/3d211248393686e0f73851fc7548f6605220fbe1/src/libpanic_unwind/macros.rs#L9 | ||
1360 | parse_macro( | ||
1361 | r#" | ||
1362 | macro_rules! cfg_if { | ||
1363 | ($( | ||
1364 | if #[cfg($($meta:meta),*)] { $($it:item)* } | ||
1365 | ) else * else { | ||
1366 | $($it2:item)* | ||
1367 | }) => { | ||
1368 | __cfg_if_items! { | ||
1369 | () ; | ||
1370 | $( ( ($($meta),*) ($($it)*) ), )* | ||
1371 | ( () ($($it2)*) ), | ||
1372 | } | ||
1373 | }; | ||
1374 | |||
1375 | // Internal macro to Apply a cfg attribute to a list of items | ||
1376 | (@__apply $m:meta, $($it:item)*) => { | ||
1377 | $(#[$m] $it)* | ||
1378 | }; | ||
1379 | } | ||
1380 | "#, | ||
1381 | ).assert_expand_items(r#" | ||
1382 | cfg_if ! { | ||
1383 | if # [ cfg ( target_env = "msvc" ) ] { | ||
1384 | // no extra unwinder support needed | ||
1385 | } else if # [ cfg ( all ( target_arch = "wasm32" , not ( target_os = "emscripten" ) ) ) ] { | ||
1386 | // no unwinder on the system! | ||
1387 | } else { | ||
1388 | mod libunwind ; | ||
1389 | pub use libunwind :: * ; | ||
1390 | } | ||
1391 | } | ||
1392 | "#, | ||
1393 | "__cfg_if_items ! {() ; ((target_env = \"msvc\") ()) , ((all (target_arch = \"wasm32\" , not (target_os = \"emscripten\"))) ()) , (() (mod libunwind ; pub use libunwind :: * ;)) ,}" | ||
1394 | ).assert_expand_items( | ||
1395 | r#" | ||
1396 | cfg_if ! { @ __apply cfg ( all ( not ( any ( not ( any ( target_os = "solaris" , target_os = "illumos" ) ) ) ) ) ) , } | ||
1397 | "#, | ||
1398 | "", | ||
1399 | ); | ||
1400 | } | ||
1401 | |||
1402 | #[test] | ||
1403 | fn test_proptest_arbitrary() { | ||
1404 | // from https://github.com/AltSysrq/proptest/blob/d1c4b049337d2f75dd6f49a095115f7c532e5129/proptest/src/arbitrary/macros.rs#L16 | ||
1405 | parse_macro( | ||
1406 | r#" | ||
1407 | macro_rules! arbitrary { | ||
1408 | ([$($bounds : tt)*] $typ: ty, $strat: ty, $params: ty; | ||
1409 | $args: ident => $logic: expr) => { | ||
1410 | impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ { | ||
1411 | type Parameters = $params; | ||
1412 | type Strategy = $strat; | ||
1413 | fn arbitrary_with($args: Self::Parameters) -> Self::Strategy { | ||
1414 | $logic | ||
1415 | } | ||
1416 | } | ||
1417 | }; | ||
1418 | |||
1419 | }"#, | ||
1420 | ).assert_expand_items(r#"arbitrary ! ( [ A : Arbitrary ] | ||
1421 | Vec < A > , | ||
1422 | VecStrategy < A :: Strategy > , | ||
1423 | RangedParams1 < A :: Parameters > ; | ||
1424 | args => { let product_unpack ! [ range , a ] = args ; vec ( any_with :: < A > ( a ) , range ) } | ||
1425 | ) ;"#, | ||
1426 | "impl <A : Arbitrary > $crate :: arbitrary :: Arbitrary for Vec < A > {type Parameters = RangedParams1 < A :: Parameters > ; type Strategy = VecStrategy < A :: Strategy > ; fn arbitrary_with (args : Self :: Parameters) -> Self :: Strategy {{let product_unpack ! [range , a] = args ; vec (any_with :: < A > (a) , range)}}}" | ||
1427 | ); | ||
1428 | } | ||
1429 | |||
1430 | #[test] | ||
1431 | fn test_old_ridl() { | ||
1432 | // This is from winapi 2.8, which do not have a link from github | ||
1433 | // | ||
1434 | let expanded = parse_macro( | ||
1435 | r#" | ||
1436 | #[macro_export] | ||
1437 | macro_rules! RIDL { | ||
1438 | (interface $interface:ident ($vtbl:ident) : $pinterface:ident ($pvtbl:ident) | ||
1439 | {$( | ||
1440 | fn $method:ident(&mut self $(,$p:ident : $t:ty)*) -> $rtr:ty | ||
1441 | ),+} | ||
1442 | ) => { | ||
1443 | impl $interface { | ||
1444 | $(pub unsafe fn $method(&mut self) -> $rtr { | ||
1445 | ((*self.lpVtbl).$method)(self $(,$p)*) | ||
1446 | })+ | ||
1447 | } | ||
1448 | }; | ||
1449 | }"#, | ||
1450 | ).expand_tt(r#" | ||
1451 | RIDL!{interface ID3D11Asynchronous(ID3D11AsynchronousVtbl): ID3D11DeviceChild(ID3D11DeviceChildVtbl) { | ||
1452 | fn GetDataSize(&mut self) -> UINT | ||
1453 | }}"#); | ||
1454 | |||
1455 | assert_eq!(expanded.to_string(), "impl ID3D11Asynchronous {pub unsafe fn GetDataSize (& mut self) -> UINT {((* self . lpVtbl) .GetDataSize) (self)}}"); | ||
1456 | } | ||
1457 | |||
1458 | #[test] | ||
1459 | fn test_quick_error() { | ||
1460 | let expanded = parse_macro( | ||
1461 | r#" | ||
1462 | macro_rules! quick_error { | ||
1463 | |||
1464 | (SORT [enum $name:ident $( #[$meta:meta] )*] | ||
1465 | items [$($( #[$imeta:meta] )* | ||
1466 | => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*] | ||
1467 | {$( $ifuncs:tt )*} )* ] | ||
1468 | buf [ ] | ||
1469 | queue [ ] | ||
1470 | ) => { | ||
1471 | quick_error!(ENUMINITION [enum $name $( #[$meta] )*] | ||
1472 | body [] | ||
1473 | queue [$( | ||
1474 | $( #[$imeta] )* | ||
1475 | => | ||
1476 | $iitem: $imode [$( $ivar: $ityp ),*] | ||
1477 | )*] | ||
1478 | ); | ||
1479 | }; | ||
1480 | |||
1481 | } | ||
1482 | "#, | ||
1483 | ) | ||
1484 | .expand_tt( | ||
1485 | r#" | ||
1486 | quick_error ! (SORT [enum Wrapped # [derive (Debug)]] items [ | ||
1487 | => One : UNIT [] {} | ||
1488 | => Two : TUPLE [s :String] {display ("two: {}" , s) from ()} | ||
1489 | ] buf [] queue []) ; | ||
1490 | "#, | ||
1491 | ); | ||
1492 | |||
1493 | assert_eq!(expanded.to_string(), "quick_error ! (ENUMINITION [enum Wrapped # [derive (Debug)]] body [] queue [=> One : UNIT [] => Two : TUPLE [s : String]]) ;"); | ||
1494 | } | ||
1495 | |||
1496 | #[test] | ||
1497 | fn test_empty_repeat_vars_in_empty_repeat_vars() { | ||
1498 | parse_macro( | ||
1499 | r#" | ||
1500 | macro_rules! delegate_impl { | ||
1501 | ([$self_type:ident, $self_wrap:ty, $self_map:ident] | ||
1502 | pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* { | ||
1503 | |||
1504 | // "Escaped" associated types. Stripped before making the `trait` | ||
1505 | // itself, but forwarded when delegating impls. | ||
1506 | $( | ||
1507 | @escape [type $assoc_name_ext:ident] | ||
1508 | // Associated types. Forwarded. | ||
1509 | )* | ||
1510 | $( | ||
1511 | @section type | ||
1512 | $( | ||
1513 | $(#[$_assoc_attr:meta])* | ||
1514 | type $assoc_name:ident $(: $assoc_bound:ty)*; | ||
1515 | )+ | ||
1516 | )* | ||
1517 | // Methods. Forwarded. Using $self_map!(self) around the self argument. | ||
1518 | // Methods must use receiver `self` or explicit type like `self: &Self` | ||
1519 | // &self and &mut self are _not_ supported. | ||
1520 | $( | ||
1521 | @section self | ||
1522 | $( | ||
1523 | $(#[$_method_attr:meta])* | ||
1524 | fn $method_name:ident(self $(: $self_selftype:ty)* $(,$marg:ident : $marg_ty:ty)*) -> $mret:ty; | ||
1525 | )+ | ||
1526 | )* | ||
1527 | // Arbitrary tail that is ignored when forwarding. | ||
1528 | $( | ||
1529 | @section nodelegate | ||
1530 | $($tail:tt)* | ||
1531 | )* | ||
1532 | }) => { | ||
1533 | impl<> $name for $self_wrap where $self_type: $name { | ||
1534 | $( | ||
1535 | $( | ||
1536 | fn $method_name(self $(: $self_selftype)* $(,$marg: $marg_ty)*) -> $mret { | ||
1537 | $self_map!(self).$method_name($($marg),*) | ||
1538 | } | ||
1539 | )* | ||
1540 | )* | ||
1541 | } | ||
1542 | } | ||
1543 | } | ||
1544 | "#, | ||
1545 | ).assert_expand_items( | ||
1546 | r#"delegate_impl ! {[G , & 'a mut G , deref] pub trait Data : GraphBase {@ section type type NodeWeight ;}}"#, | ||
1547 | "impl <> Data for & \'a mut G where G : Data {}", | ||
1548 | ); | ||
1549 | } | ||
1550 | |||
1551 | #[test] | ||
1552 | fn expr_interpolation() { | ||
1553 | let expanded = parse_macro( | ||
1554 | r#" | ||
1555 | macro_rules! id { | ||
1556 | ($expr:expr) => { | ||
1557 | map($expr) | ||
1558 | } | ||
1559 | } | ||
1560 | "#, | ||
1561 | ) | ||
1562 | .expand_expr("id!(x + foo);"); | ||
1563 | |||
1564 | assert_eq!(expanded.to_string(), "map(x+foo)"); | ||
1565 | } | ||
1566 | |||
1567 | pub(crate) struct MacroFixture { | ||
1568 | rules: MacroRules, | ||
1569 | } | ||
1570 | |||
1571 | impl MacroFixture { | ||
1572 | pub(crate) fn expand_tt(&self, invocation: &str) -> tt::Subtree { | ||
1573 | self.try_expand_tt(invocation).unwrap() | ||
1574 | } | ||
1575 | |||
1576 | fn try_expand_tt(&self, invocation: &str) -> Result<tt::Subtree, ExpandError> { | ||
1577 | let source_file = ast::SourceFile::parse(invocation).tree(); | ||
1578 | let macro_invocation = | ||
1579 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | ||
1580 | |||
1581 | let (invocation_tt, _) = ast_to_token_tree(¯o_invocation.token_tree().unwrap()) | ||
1582 | .ok_or_else(|| ExpandError::ConversionError)?; | ||
1583 | |||
1584 | self.rules.expand(&invocation_tt).result() | ||
1585 | } | ||
1586 | |||
1587 | fn assert_expand_err(&self, invocation: &str, err: &ExpandError) { | ||
1588 | assert_eq!(self.try_expand_tt(invocation).as_ref(), Err(err)); | ||
1589 | } | ||
1590 | |||
1591 | fn expand_items(&self, invocation: &str) -> SyntaxNode { | ||
1592 | let expanded = self.expand_tt(invocation); | ||
1593 | token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap().0.syntax_node() | ||
1594 | } | ||
1595 | |||
1596 | fn expand_statements(&self, invocation: &str) -> SyntaxNode { | ||
1597 | let expanded = self.expand_tt(invocation); | ||
1598 | token_tree_to_syntax_node(&expanded, FragmentKind::Statements).unwrap().0.syntax_node() | ||
1599 | } | ||
1600 | |||
1601 | fn expand_expr(&self, invocation: &str) -> SyntaxNode { | ||
1602 | let expanded = self.expand_tt(invocation); | ||
1603 | token_tree_to_syntax_node(&expanded, FragmentKind::Expr).unwrap().0.syntax_node() | ||
1604 | } | ||
1605 | |||
1606 | fn assert_expand_tt(&self, invocation: &str, expected: &str) { | ||
1607 | let expansion = self.expand_tt(invocation); | ||
1608 | assert_eq!(expansion.to_string(), expected); | ||
1609 | } | ||
1610 | |||
1611 | fn assert_expand(&self, invocation: &str, expected: &str) { | ||
1612 | let expansion = self.expand_tt(invocation); | ||
1613 | let actual = format!("{:?}", expansion); | ||
1614 | test_utils::assert_eq_text!(&actual.trim(), &expected.trim()); | ||
1615 | } | ||
1616 | |||
1617 | fn assert_expand_items(&self, invocation: &str, expected: &str) -> &MacroFixture { | ||
1618 | self.assert_expansion(FragmentKind::Items, invocation, expected); | ||
1619 | self | ||
1620 | } | ||
1621 | |||
1622 | fn assert_expand_statements(&self, invocation: &str, expected: &str) -> &MacroFixture { | ||
1623 | self.assert_expansion(FragmentKind::Statements, invocation, expected); | ||
1624 | self | ||
1625 | } | ||
1626 | |||
1627 | fn assert_expansion(&self, kind: FragmentKind, invocation: &str, expected: &str) { | ||
1628 | let expanded = self.expand_tt(invocation); | ||
1629 | assert_eq!(expanded.to_string(), expected); | ||
1630 | |||
1631 | let expected = expected.replace("$crate", "C_C__C"); | ||
1632 | |||
1633 | // wrap the given text to a macro call | ||
1634 | let expected = { | ||
1635 | let wrapped = format!("wrap_macro!( {} )", expected); | ||
1636 | let wrapped = ast::SourceFile::parse(&wrapped); | ||
1637 | let wrapped = | ||
1638 | wrapped.tree().syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); | ||
1639 | let mut wrapped = ast_to_token_tree(&wrapped).unwrap().0; | ||
1640 | wrapped.delimiter = None; | ||
1641 | wrapped | ||
1642 | }; | ||
1643 | |||
1644 | let expanded_tree = token_tree_to_syntax_node(&expanded, kind).unwrap().0.syntax_node(); | ||
1645 | let expanded_tree = debug_dump_ignore_spaces(&expanded_tree).trim().to_string(); | ||
1646 | |||
1647 | let expected_tree = token_tree_to_syntax_node(&expected, kind).unwrap().0.syntax_node(); | ||
1648 | let expected_tree = debug_dump_ignore_spaces(&expected_tree).trim().to_string(); | ||
1649 | |||
1650 | let expected_tree = expected_tree.replace("C_C__C", "$crate"); | ||
1651 | assert_eq!( | ||
1652 | expanded_tree, expected_tree, | ||
1653 | "\nleft:\n{}\nright:\n{}", | ||
1654 | expanded_tree, expected_tree, | ||
1655 | ); | ||
1656 | } | ||
1657 | } | ||
1658 | |||
1659 | fn parse_macro_to_tt(ra_fixture: &str) -> tt::Subtree { | ||
1660 | let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap(); | ||
1661 | let macro_definition = | ||
1662 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | ||
1663 | |||
1664 | let (definition_tt, _) = ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap(); | ||
1665 | |||
1666 | let parsed = parse_to_token_tree( | ||
1667 | &ra_fixture[macro_definition.token_tree().unwrap().syntax().text_range()], | ||
1668 | ) | ||
1669 | .unwrap() | ||
1670 | .0; | ||
1671 | assert_eq!(definition_tt, parsed); | ||
1672 | |||
1673 | definition_tt | ||
1674 | } | ||
1675 | |||
1676 | pub(crate) fn parse_macro(ra_fixture: &str) -> MacroFixture { | ||
1677 | let definition_tt = parse_macro_to_tt(ra_fixture); | ||
1678 | let rules = MacroRules::parse(&definition_tt).unwrap(); | ||
1679 | MacroFixture { rules } | ||
1680 | } | ||
1681 | |||
1682 | pub(crate) fn parse_macro_error(ra_fixture: &str) -> ParseError { | ||
1683 | let definition_tt = parse_macro_to_tt(ra_fixture); | ||
1684 | |||
1685 | match MacroRules::parse(&definition_tt) { | ||
1686 | Ok(_) => panic!("Expect error"), | ||
1687 | Err(err) => err, | ||
1688 | } | ||
1689 | } | ||
1690 | |||
1691 | pub(crate) fn parse_to_token_tree_by_syntax(ra_fixture: &str) -> tt::Subtree { | ||
1692 | let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap(); | ||
1693 | let tt = syntax_node_to_token_tree(source_file.syntax()).unwrap().0; | ||
1694 | |||
1695 | let parsed = parse_to_token_tree(ra_fixture).unwrap().0; | ||
1696 | assert_eq!(tt, parsed); | ||
1697 | |||
1698 | parsed | ||
1699 | } | ||
1700 | |||
1701 | fn debug_dump_ignore_spaces(node: &syntax::SyntaxNode) -> String { | ||
1702 | let mut level = 0; | ||
1703 | let mut buf = String::new(); | ||
1704 | macro_rules! indent { | ||
1705 | () => { | ||
1706 | for _ in 0..level { | ||
1707 | buf.push_str(" "); | ||
1708 | } | ||
1709 | }; | ||
1710 | } | ||
1711 | |||
1712 | for event in node.preorder_with_tokens() { | ||
1713 | match event { | ||
1714 | WalkEvent::Enter(element) => { | ||
1715 | match element { | ||
1716 | NodeOrToken::Node(node) => { | ||
1717 | indent!(); | ||
1718 | writeln!(buf, "{:?}", node.kind()).unwrap(); | ||
1719 | } | ||
1720 | NodeOrToken::Token(token) => match token.kind() { | ||
1721 | syntax::SyntaxKind::WHITESPACE => {} | ||
1722 | _ => { | ||
1723 | indent!(); | ||
1724 | writeln!(buf, "{:?}", token.kind()).unwrap(); | ||
1725 | } | ||
1726 | }, | ||
1727 | } | ||
1728 | level += 1; | ||
1729 | } | ||
1730 | WalkEvent::Leave(_) => level -= 1, | ||
1731 | } | ||
1732 | } | ||
1733 | |||
1734 | buf | ||
1735 | } | ||
1736 | |||
1737 | #[test] | ||
1738 | fn test_issue_2520() { | ||
1739 | let macro_fixture = parse_macro( | ||
1740 | r#" | ||
1741 | macro_rules! my_macro { | ||
1742 | { | ||
1743 | ( $( | ||
1744 | $( [] $sname:ident : $stype:ty )? | ||
1745 | $( [$expr:expr] $nname:ident : $ntype:ty )? | ||
1746 | ),* ) | ||
1747 | } => { | ||
1748 | Test { | ||
1749 | $( | ||
1750 | $( $sname, )? | ||
1751 | )* | ||
1752 | } | ||
1753 | }; | ||
1754 | } | ||
1755 | "#, | ||
1756 | ); | ||
1757 | |||
1758 | macro_fixture.assert_expand_items( | ||
1759 | r#"my_macro ! { | ||
1760 | ([] p1 : u32 , [|_| S0K0] s : S0K0 , [] k0 : i32) | ||
1761 | }"#, | ||
1762 | "Test {p1 , k0 ,}", | ||
1763 | ); | ||
1764 | } | ||
1765 | |||
1766 | #[test] | ||
1767 | fn test_issue_3861() { | ||
1768 | let macro_fixture = parse_macro( | ||
1769 | r#" | ||
1770 | macro_rules! rgb_color { | ||
1771 | ($p:expr, $t: ty) => { | ||
1772 | pub fn new() { | ||
1773 | let _ = 0 as $t << $p; | ||
1774 | } | ||
1775 | }; | ||
1776 | } | ||
1777 | "#, | ||
1778 | ); | ||
1779 | |||
1780 | macro_fixture.expand_items(r#"rgb_color!(8 + 8, u32);"#); | ||
1781 | } | ||
1782 | |||
1783 | #[test] | ||
1784 | fn test_repeat_bad_var() { | ||
1785 | // FIXME: the second rule of the macro should be removed and an error about | ||
1786 | // `$( $c )+` raised | ||
1787 | parse_macro( | ||
1788 | r#" | ||
1789 | macro_rules! foo { | ||
1790 | ($( $b:ident )+) => { | ||
1791 | $( $c )+ | ||
1792 | }; | ||
1793 | ($( $b:ident )+) => { | ||
1794 | $( $b )+ | ||
1795 | } | ||
1796 | } | ||
1797 | "#, | ||
1798 | ) | ||
1799 | .assert_expand_items("foo!(b0 b1);", "b0 b1"); | ||
1800 | } | ||
1801 | |||
1802 | #[test] | ||
1803 | fn test_no_space_after_semi_colon() { | ||
1804 | let expanded = parse_macro( | ||
1805 | r#" | ||
1806 | macro_rules! with_std { ($($i:item)*) => ($(#[cfg(feature = "std")]$i)*) } | ||
1807 | "#, | ||
1808 | ) | ||
1809 | .expand_items(r#"with_std! {mod m;mod f;}"#); | ||
1810 | |||
1811 | let dump = format!("{:#?}", expanded); | ||
1812 | assert_eq_text!( | ||
1813 | dump.trim(), | ||
1814 | r###"[email protected] | ||
1815 | [email protected] | ||
1816 | [email protected] | ||
1817 | [email protected] "#" | ||
1818 | [email protected] "[" | ||
1819 | [email protected] | ||
1820 | [email protected] | ||
1821 | [email protected] | ||
1822 | [email protected] "cfg" | ||
1823 | [email protected] | ||
1824 | [email protected] "(" | ||
1825 | [email protected] "feature" | ||
1826 | [email protected] "=" | ||
1827 | [email protected] "\"std\"" | ||
1828 | [email protected] ")" | ||
1829 | [email protected] "]" | ||
1830 | [email protected] "mod" | ||
1831 | [email protected] | ||
1832 | [email protected] "m" | ||
1833 | [email protected] ";" | ||
1834 | [email protected] | ||
1835 | [email protected] | ||
1836 | [email protected] "#" | ||
1837 | [email protected] "[" | ||
1838 | [email protected] | ||
1839 | [email protected] | ||
1840 | [email protected] | ||
1841 | [email protected] "cfg" | ||
1842 | [email protected] | ||
1843 | [email protected] "(" | ||
1844 | [email protected] "feature" | ||
1845 | [email protected] "=" | ||
1846 | [email protected] "\"std\"" | ||
1847 | [email protected] ")" | ||
1848 | [email protected] "]" | ||
1849 | [email protected] "mod" | ||
1850 | [email protected] | ||
1851 | [email protected] "f" | ||
1852 | [email protected] ";""###, | ||
1853 | ); | ||
1854 | } | ||
1855 | |||
1856 | // https://github.com/rust-lang/rust/blob/master/src/test/ui/issues/issue-57597.rs | ||
1857 | #[test] | ||
1858 | fn test_rustc_issue_57597() { | ||
1859 | fn test_error(fixture: &str) { | ||
1860 | assert_eq!(parse_macro_error(fixture), ParseError::RepetitionEmtpyTokenTree); | ||
1861 | } | ||
1862 | |||
1863 | test_error("macro_rules! foo { ($($($i:ident)?)+) => {}; }"); | ||
1864 | test_error("macro_rules! foo { ($($($i:ident)?)*) => {}; }"); | ||
1865 | test_error("macro_rules! foo { ($($($i:ident)?)?) => {}; }"); | ||
1866 | test_error("macro_rules! foo { ($($($($i:ident)?)?)?) => {}; }"); | ||
1867 | test_error("macro_rules! foo { ($($($($i:ident)*)?)?) => {}; }"); | ||
1868 | test_error("macro_rules! foo { ($($($($i:ident)?)*)?) => {}; }"); | ||
1869 | test_error("macro_rules! foo { ($($($($i:ident)?)?)*) => {}; }"); | ||
1870 | test_error("macro_rules! foo { ($($($($i:ident)*)*)?) => {}; }"); | ||
1871 | test_error("macro_rules! foo { ($($($($i:ident)?)*)*) => {}; }"); | ||
1872 | test_error("macro_rules! foo { ($($($($i:ident)?)*)+) => {}; }"); | ||
1873 | test_error("macro_rules! foo { ($($($($i:ident)+)?)*) => {}; }"); | ||
1874 | test_error("macro_rules! foo { ($($($($i:ident)+)*)?) => {}; }"); | ||
1875 | } | ||
1876 | |||
1877 | #[test] | ||
1878 | fn test_expand_bad_literal() { | ||
1879 | parse_macro( | ||
1880 | r#" | ||
1881 | macro_rules! foo { ($i:literal) => {}; } | ||
1882 | "#, | ||
1883 | ) | ||
1884 | .assert_expand_err(r#"foo!(&k");"#, &ExpandError::BindingError("".into())); | ||
1885 | } | ||
1886 | |||
1887 | #[test] | ||
1888 | fn test_empty_comments() { | ||
1889 | parse_macro( | ||
1890 | r#" | ||
1891 | macro_rules! one_arg_macro { ($fmt:expr) => (); } | ||
1892 | "#, | ||
1893 | ) | ||
1894 | .assert_expand_err( | ||
1895 | r#"one_arg_macro!(/**/)"#, | ||
1896 | &ExpandError::BindingError("expected Expr".into()), | ||
1897 | ); | ||
1898 | } | ||
diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs new file mode 100644 index 000000000..46c420718 --- /dev/null +++ b/crates/mbe/src/tt_iter.rs | |||
@@ -0,0 +1,75 @@ | |||
1 | //! FIXME: write short doc here | ||
2 | |||
3 | #[derive(Debug, Clone)] | ||
4 | pub(crate) struct TtIter<'a> { | ||
5 | pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>, | ||
6 | } | ||
7 | |||
8 | impl<'a> TtIter<'a> { | ||
9 | pub(crate) fn new(subtree: &'a tt::Subtree) -> TtIter<'a> { | ||
10 | TtIter { inner: subtree.token_trees.iter() } | ||
11 | } | ||
12 | |||
13 | pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ()> { | ||
14 | match self.next() { | ||
15 | Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: c, .. }))) if *c == char => { | ||
16 | Ok(()) | ||
17 | } | ||
18 | _ => Err(()), | ||
19 | } | ||
20 | } | ||
21 | |||
22 | pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree, ()> { | ||
23 | match self.next() { | ||
24 | Some(tt::TokenTree::Subtree(it)) => Ok(it), | ||
25 | _ => Err(()), | ||
26 | } | ||
27 | } | ||
28 | |||
29 | pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf, ()> { | ||
30 | match self.next() { | ||
31 | Some(tt::TokenTree::Leaf(it)) => Ok(it), | ||
32 | _ => Err(()), | ||
33 | } | ||
34 | } | ||
35 | |||
36 | pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident, ()> { | ||
37 | match self.expect_leaf()? { | ||
38 | tt::Leaf::Ident(it) => Ok(it), | ||
39 | _ => Err(()), | ||
40 | } | ||
41 | } | ||
42 | |||
43 | pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> { | ||
44 | let it = self.expect_leaf()?; | ||
45 | match it { | ||
46 | tt::Leaf::Literal(_) => Ok(it), | ||
47 | tt::Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it), | ||
48 | _ => Err(()), | ||
49 | } | ||
50 | } | ||
51 | |||
52 | pub(crate) fn expect_punct(&mut self) -> Result<&'a tt::Punct, ()> { | ||
53 | match self.expect_leaf()? { | ||
54 | tt::Leaf::Punct(it) => Ok(it), | ||
55 | _ => Err(()), | ||
56 | } | ||
57 | } | ||
58 | |||
59 | pub(crate) fn peek_n(&self, n: usize) -> Option<&tt::TokenTree> { | ||
60 | self.inner.as_slice().get(n) | ||
61 | } | ||
62 | } | ||
63 | |||
64 | impl<'a> Iterator for TtIter<'a> { | ||
65 | type Item = &'a tt::TokenTree; | ||
66 | fn next(&mut self) -> Option<Self::Item> { | ||
67 | self.inner.next() | ||
68 | } | ||
69 | |||
70 | fn size_hint(&self) -> (usize, Option<usize>) { | ||
71 | self.inner.size_hint() | ||
72 | } | ||
73 | } | ||
74 | |||
75 | impl<'a> std::iter::ExactSizeIterator for TtIter<'a> {} | ||