aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_mbe
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_mbe')
-rw-r--r--crates/ra_mbe/Cargo.toml20
-rw-r--r--crates/ra_mbe/src/lib.rs278
-rw-r--r--crates/ra_mbe/src/mbe_expander.rs180
-rw-r--r--crates/ra_mbe/src/mbe_expander/matcher.rs477
-rw-r--r--crates/ra_mbe/src/mbe_expander/transcriber.rs254
-rw-r--r--crates/ra_mbe/src/parser.rs184
-rw-r--r--crates/ra_mbe/src/subtree_source.rs197
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs832
-rw-r--r--crates/ra_mbe/src/tests.rs1897
-rw-r--r--crates/ra_mbe/src/tt_iter.rs75
10 files changed, 0 insertions, 4394 deletions
diff --git a/crates/ra_mbe/Cargo.toml b/crates/ra_mbe/Cargo.toml
deleted file mode 100644
index a26746a19..000000000
--- a/crates/ra_mbe/Cargo.toml
+++ /dev/null
@@ -1,20 +0,0 @@
1[package]
2edition = "2018"
3name = "ra_mbe"
4version = "0.1.0"
5authors = ["rust-analyzer developers"]
6license = "MIT OR Apache-2.0"
7
8[lib]
9doctest = false
10
11[dependencies]
12ra_syntax = { path = "../ra_syntax" }
13ra_parser = { path = "../ra_parser" }
14tt = { path = "../ra_tt", package = "ra_tt" }
15rustc-hash = "1.1.0"
16smallvec = "1.2.0"
17log = "0.4.8"
18
19[dev-dependencies]
20test_utils = { path = "../test_utils" }
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs
deleted file mode 100644
index dec7ba22e..000000000
--- a/crates/ra_mbe/src/lib.rs
+++ /dev/null
@@ -1,278 +0,0 @@
1//! `mbe` (short for Macro By Example) crate contains code for handling
2//! `macro_rules` macros. It uses `TokenTree` (from `ra_tt` package) as the
3//! interface, although it contains some code to bridge `SyntaxNode`s and
4//! `TokenTree`s as well!
5
6mod parser;
7mod mbe_expander;
8mod syntax_bridge;
9mod tt_iter;
10mod subtree_source;
11
12#[cfg(test)]
13mod tests;
14
15pub use tt::{Delimiter, Punct};
16
17use crate::{
18 parser::{parse_pattern, Op},
19 tt_iter::TtIter,
20};
21
22#[derive(Debug, PartialEq, Eq)]
23pub enum ParseError {
24 Expected(String),
25 RepetitionEmtpyTokenTree,
26}
27
28#[derive(Debug, PartialEq, Eq, Clone)]
29pub enum ExpandError {
30 NoMatchingRule,
31 UnexpectedToken,
32 BindingError(String),
33 ConversionError,
34 InvalidRepeat,
35 ProcMacroError(tt::ExpansionError),
36}
37
38impl From<tt::ExpansionError> for ExpandError {
39 fn from(it: tt::ExpansionError) -> Self {
40 ExpandError::ProcMacroError(it)
41 }
42}
43
44pub use crate::syntax_bridge::{
45 ast_to_token_tree, parse_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node,
46 TokenMap,
47};
48
49/// This struct contains AST for a single `macro_rules` definition. What might
50/// be very confusing is that AST has almost exactly the same shape as
51/// `tt::TokenTree`, but there's a crucial difference: in macro rules, `$ident`
52/// and `$()*` have special meaning (see `Var` and `Repeat` data structures)
53#[derive(Clone, Debug, PartialEq, Eq)]
54pub struct MacroRules {
55 rules: Vec<Rule>,
56 /// Highest id of the token we have in TokenMap
57 shift: Shift,
58}
59
60#[derive(Clone, Debug, PartialEq, Eq)]
61struct Rule {
62 lhs: tt::Subtree,
63 rhs: tt::Subtree,
64}
65
66#[derive(Clone, Copy, Debug, PartialEq, Eq)]
67struct Shift(u32);
68
69impl Shift {
70 fn new(tt: &tt::Subtree) -> Shift {
71 // Note that TokenId is started from zero,
72 // We have to add 1 to prevent duplication.
73 let value = max_id(tt).map_or(0, |it| it + 1);
74 return Shift(value);
75
76 // Find the max token id inside a subtree
77 fn max_id(subtree: &tt::Subtree) -> Option<u32> {
78 subtree
79 .token_trees
80 .iter()
81 .filter_map(|tt| match tt {
82 tt::TokenTree::Subtree(subtree) => {
83 let tree_id = max_id(subtree);
84 match subtree.delimiter {
85 Some(it) if it.id != tt::TokenId::unspecified() => {
86 Some(tree_id.map_or(it.id.0, |t| t.max(it.id.0)))
87 }
88 _ => tree_id,
89 }
90 }
91 tt::TokenTree::Leaf(tt::Leaf::Ident(ident))
92 if ident.id != tt::TokenId::unspecified() =>
93 {
94 Some(ident.id.0)
95 }
96 _ => None,
97 })
98 .max()
99 }
100 }
101
102 /// Shift given TokenTree token id
103 fn shift_all(self, tt: &mut tt::Subtree) {
104 for t in tt.token_trees.iter_mut() {
105 match t {
106 tt::TokenTree::Leaf(leaf) => match leaf {
107 tt::Leaf::Ident(ident) => ident.id = self.shift(ident.id),
108 tt::Leaf::Punct(punct) => punct.id = self.shift(punct.id),
109 tt::Leaf::Literal(lit) => lit.id = self.shift(lit.id),
110 },
111 tt::TokenTree::Subtree(tt) => {
112 if let Some(it) = tt.delimiter.as_mut() {
113 it.id = self.shift(it.id);
114 };
115 self.shift_all(tt)
116 }
117 }
118 }
119 }
120
121 fn shift(self, id: tt::TokenId) -> tt::TokenId {
122 if id == tt::TokenId::unspecified() {
123 return id;
124 }
125 tt::TokenId(id.0 + self.0)
126 }
127
128 fn unshift(self, id: tt::TokenId) -> Option<tt::TokenId> {
129 id.0.checked_sub(self.0).map(tt::TokenId)
130 }
131}
132
133#[derive(Debug, Eq, PartialEq)]
134pub enum Origin {
135 Def,
136 Call,
137}
138
139impl MacroRules {
140 pub fn parse(tt: &tt::Subtree) -> Result<MacroRules, ParseError> {
141 // Note: this parsing can be implemented using mbe machinery itself, by
142 // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
143 // manually seems easier.
144 let mut src = TtIter::new(tt);
145 let mut rules = Vec::new();
146 while src.len() > 0 {
147 let rule = Rule::parse(&mut src)?;
148 rules.push(rule);
149 if let Err(()) = src.expect_char(';') {
150 if src.len() > 0 {
151 return Err(ParseError::Expected("expected `:`".to_string()));
152 }
153 break;
154 }
155 }
156
157 for rule in rules.iter() {
158 validate(&rule.lhs)?;
159 }
160
161 Ok(MacroRules { rules, shift: Shift::new(tt) })
162 }
163
164 pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> {
165 // apply shift
166 let mut tt = tt.clone();
167 self.shift.shift_all(&mut tt);
168 mbe_expander::expand(self, &tt)
169 }
170
171 pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
172 self.shift.shift(id)
173 }
174
175 pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) {
176 match self.shift.unshift(id) {
177 Some(id) => (id, Origin::Call),
178 None => (id, Origin::Def),
179 }
180 }
181}
182
183impl Rule {
184 fn parse(src: &mut TtIter) -> Result<Rule, ParseError> {
185 let mut lhs = src
186 .expect_subtree()
187 .map_err(|()| ParseError::Expected("expected subtree".to_string()))?
188 .clone();
189 lhs.delimiter = None;
190 src.expect_char('=').map_err(|()| ParseError::Expected("expected `=`".to_string()))?;
191 src.expect_char('>').map_err(|()| ParseError::Expected("expected `>`".to_string()))?;
192 let mut rhs = src
193 .expect_subtree()
194 .map_err(|()| ParseError::Expected("expected subtree".to_string()))?
195 .clone();
196 rhs.delimiter = None;
197 Ok(crate::Rule { lhs, rhs })
198 }
199}
200
201fn to_parse_error(e: ExpandError) -> ParseError {
202 let msg = match e {
203 ExpandError::InvalidRepeat => "invalid repeat".to_string(),
204 _ => "invalid macro definition".to_string(),
205 };
206 ParseError::Expected(msg)
207}
208
209fn validate(pattern: &tt::Subtree) -> Result<(), ParseError> {
210 for op in parse_pattern(pattern) {
211 let op = op.map_err(to_parse_error)?;
212
213 match op {
214 Op::TokenTree(tt::TokenTree::Subtree(subtree)) => validate(subtree)?,
215 Op::Repeat { subtree, separator, .. } => {
216 // Checks that no repetition which could match an empty token
217 // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558
218
219 if separator.is_none() {
220 if parse_pattern(subtree).all(|child_op| {
221 match child_op.map_err(to_parse_error) {
222 Ok(Op::Var { kind, .. }) => {
223 // vis is optional
224 if kind.map_or(false, |it| it == "vis") {
225 return true;
226 }
227 }
228 Ok(Op::Repeat { kind, .. }) => {
229 return matches!(
230 kind,
231 parser::RepeatKind::ZeroOrMore | parser::RepeatKind::ZeroOrOne
232 )
233 }
234 _ => {}
235 }
236 false
237 }) {
238 return Err(ParseError::RepetitionEmtpyTokenTree);
239 }
240 }
241 validate(subtree)?
242 }
243 _ => (),
244 }
245 }
246 Ok(())
247}
248
249#[derive(Debug)]
250pub struct ExpandResult<T>(pub T, pub Option<ExpandError>);
251
252impl<T> ExpandResult<T> {
253 pub fn ok(t: T) -> ExpandResult<T> {
254 ExpandResult(t, None)
255 }
256
257 pub fn only_err(err: ExpandError) -> ExpandResult<T>
258 where
259 T: Default,
260 {
261 ExpandResult(Default::default(), Some(err))
262 }
263
264 pub fn map<U>(self, f: impl FnOnce(T) -> U) -> ExpandResult<U> {
265 ExpandResult(f(self.0), self.1)
266 }
267
268 pub fn result(self) -> Result<T, ExpandError> {
269 self.1.map(Err).unwrap_or(Ok(self.0))
270 }
271}
272
273impl<T: Default> From<Result<T, ExpandError>> for ExpandResult<T> {
274 fn from(result: Result<T, ExpandError>) -> ExpandResult<T> {
275 result
276 .map_or_else(|e| ExpandResult(Default::default(), Some(e)), |it| ExpandResult(it, None))
277 }
278}
diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs
deleted file mode 100644
index b1eacf124..000000000
--- a/crates/ra_mbe/src/mbe_expander.rs
+++ /dev/null
@@ -1,180 +0,0 @@
1//! This module takes a (parsed) definition of `macro_rules` invocation, a
2//! `tt::TokenTree` representing an argument of macro invocation, and produces a
3//! `tt::TokenTree` for the result of the expansion.
4
5mod matcher;
6mod transcriber;
7
8use ra_syntax::SmolStr;
9use rustc_hash::FxHashMap;
10
11use crate::{ExpandError, ExpandResult};
12
13pub(crate) fn expand(rules: &crate::MacroRules, input: &tt::Subtree) -> ExpandResult<tt::Subtree> {
14 expand_rules(&rules.rules, input)
15}
16
17fn expand_rules(rules: &[crate::Rule], input: &tt::Subtree) -> ExpandResult<tt::Subtree> {
18 let mut match_: Option<(matcher::Match, &crate::Rule)> = None;
19 for rule in rules {
20 let new_match = match matcher::match_(&rule.lhs, input) {
21 Ok(m) => m,
22 Err(_e) => {
23 // error in pattern parsing
24 continue;
25 }
26 };
27 if new_match.err.is_none() {
28 // If we find a rule that applies without errors, we're done.
29 // Unconditionally returning the transcription here makes the
30 // `test_repeat_bad_var` test fail.
31 let ExpandResult(res, transcribe_err) =
32 transcriber::transcribe(&rule.rhs, &new_match.bindings);
33 if transcribe_err.is_none() {
34 return ExpandResult::ok(res);
35 }
36 }
37 // Use the rule if we matched more tokens, or had fewer errors
38 if let Some((prev_match, _)) = &match_ {
39 if (new_match.unmatched_tts, new_match.err_count)
40 < (prev_match.unmatched_tts, prev_match.err_count)
41 {
42 match_ = Some((new_match, rule));
43 }
44 } else {
45 match_ = Some((new_match, rule));
46 }
47 }
48 if let Some((match_, rule)) = match_ {
49 // if we got here, there was no match without errors
50 let ExpandResult(result, transcribe_err) =
51 transcriber::transcribe(&rule.rhs, &match_.bindings);
52 ExpandResult(result, match_.err.or(transcribe_err))
53 } else {
54 ExpandResult(tt::Subtree::default(), Some(ExpandError::NoMatchingRule))
55 }
56}
57
58/// The actual algorithm for expansion is not too hard, but is pretty tricky.
59/// `Bindings` structure is the key to understanding what we are doing here.
60///
61/// On the high level, it stores mapping from meta variables to the bits of
62/// syntax it should be substituted with. For example, if `$e:expr` is matched
63/// with `1 + 1` by macro_rules, the `Binding` will store `$e -> 1 + 1`.
64///
65/// The tricky bit is dealing with repetitions (`$()*`). Consider this example:
66///
67/// ```not_rust
68/// macro_rules! foo {
69/// ($($ i:ident $($ e:expr),*);*) => {
70/// $(fn $ i() { $($ e);*; })*
71/// }
72/// }
73/// foo! { foo 1,2,3; bar 4,5,6 }
74/// ```
75///
76/// Here, the `$i` meta variable is matched first with `foo` and then with
77/// `bar`, and `$e` is matched in turn with `1`, `2`, `3`, `4`, `5`, `6`.
78///
79/// To represent such "multi-mappings", we use a recursive structures: we map
80/// variables not to values, but to *lists* of values or other lists (that is,
81/// to the trees).
82///
83/// For the above example, the bindings would store
84///
85/// ```not_rust
86/// i -> [foo, bar]
87/// e -> [[1, 2, 3], [4, 5, 6]]
88/// ```
89///
90/// We construct `Bindings` in the `match_lhs`. The interesting case is
91/// `TokenTree::Repeat`, where we use `push_nested` to create the desired
92/// nesting structure.
93///
94/// The other side of the puzzle is `expand_subtree`, where we use the bindings
95/// to substitute meta variables in the output template. When expanding, we
96/// maintain a `nesting` stack of indices which tells us which occurrence from
97/// the `Bindings` we should take. We push to the stack when we enter a
98/// repetition.
99///
100/// In other words, `Bindings` is a *multi* mapping from `SmolStr` to
101/// `tt::TokenTree`, where the index to select a particular `TokenTree` among
102/// many is not a plain `usize`, but an `&[usize]`.
103#[derive(Debug, Default)]
104struct Bindings {
105 inner: FxHashMap<SmolStr, Binding>,
106}
107
108#[derive(Debug)]
109enum Binding {
110 Fragment(Fragment),
111 Nested(Vec<Binding>),
112 Empty,
113}
114
115#[derive(Debug, Clone)]
116enum Fragment {
117 /// token fragments are just copy-pasted into the output
118 Tokens(tt::TokenTree),
119 /// Ast fragments are inserted with fake delimiters, so as to make things
120 /// like `$i * 2` where `$i = 1 + 1` work as expectd.
121 Ast(tt::TokenTree),
122}
123
124#[cfg(test)]
125mod tests {
126 use ra_syntax::{ast, AstNode};
127
128 use super::*;
129 use crate::ast_to_token_tree;
130
131 #[test]
132 fn test_expand_rule() {
133 assert_err(
134 "($($i:ident);*) => ($i)",
135 "foo!{a}",
136 ExpandError::BindingError(String::from(
137 "expected simple binding, found nested binding `i`",
138 )),
139 );
140
141 // FIXME:
142 // Add an err test case for ($($i:ident)) => ($())
143 }
144
145 fn assert_err(macro_body: &str, invocation: &str, err: ExpandError) {
146 assert_eq!(expand_first(&create_rules(&format_macro(macro_body)), invocation).1, Some(err));
147 }
148
149 fn format_macro(macro_body: &str) -> String {
150 format!(
151 "
152 macro_rules! foo {{
153 {}
154 }}
155",
156 macro_body
157 )
158 }
159
160 fn create_rules(macro_definition: &str) -> crate::MacroRules {
161 let source_file = ast::SourceFile::parse(macro_definition).ok().unwrap();
162 let macro_definition =
163 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
164
165 let (definition_tt, _) =
166 ast_to_token_tree(&macro_definition.token_tree().unwrap()).unwrap();
167 crate::MacroRules::parse(&definition_tt).unwrap()
168 }
169
170 fn expand_first(rules: &crate::MacroRules, invocation: &str) -> ExpandResult<tt::Subtree> {
171 let source_file = ast::SourceFile::parse(invocation).ok().unwrap();
172 let macro_invocation =
173 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
174
175 let (invocation_tt, _) =
176 ast_to_token_tree(&macro_invocation.token_tree().unwrap()).unwrap();
177
178 expand_rules(&rules.rules, &invocation_tt)
179 }
180}
diff --git a/crates/ra_mbe/src/mbe_expander/matcher.rs b/crates/ra_mbe/src/mbe_expander/matcher.rs
deleted file mode 100644
index f9e515b81..000000000
--- a/crates/ra_mbe/src/mbe_expander/matcher.rs
+++ /dev/null
@@ -1,477 +0,0 @@
1//! FIXME: write short doc here
2
3use crate::{
4 mbe_expander::{Binding, Bindings, Fragment},
5 parser::{parse_pattern, Op, RepeatKind, Separator},
6 subtree_source::SubtreeTokenSource,
7 tt_iter::TtIter,
8 ExpandError,
9};
10
11use super::ExpandResult;
12use ra_parser::{FragmentKind::*, TreeSink};
13use ra_syntax::{SmolStr, SyntaxKind};
14use tt::buffer::{Cursor, TokenBuffer};
15
16impl Bindings {
17 fn push_optional(&mut self, name: &SmolStr) {
18 // FIXME: Do we have a better way to represent an empty token ?
19 // Insert an empty subtree for empty token
20 let tt = tt::Subtree::default().into();
21 self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt)));
22 }
23
24 fn push_empty(&mut self, name: &SmolStr) {
25 self.inner.insert(name.clone(), Binding::Empty);
26 }
27
28 fn push_nested(&mut self, idx: usize, nested: Bindings) -> Result<(), ExpandError> {
29 for (key, value) in nested.inner {
30 if !self.inner.contains_key(&key) {
31 self.inner.insert(key.clone(), Binding::Nested(Vec::new()));
32 }
33 match self.inner.get_mut(&key) {
34 Some(Binding::Nested(it)) => {
35 // insert empty nested bindings before this one
36 while it.len() < idx {
37 it.push(Binding::Nested(vec![]));
38 }
39 it.push(value);
40 }
41 _ => {
42 return Err(ExpandError::BindingError(format!(
43 "could not find binding `{}`",
44 key
45 )));
46 }
47 }
48 }
49 Ok(())
50 }
51}
52
53macro_rules! err {
54 () => {
55 ExpandError::BindingError(format!(""))
56 };
57 ($($tt:tt)*) => {
58 ExpandError::BindingError(format!($($tt)*))
59 };
60}
61
62#[derive(Debug, Default)]
63pub(super) struct Match {
64 pub bindings: Bindings,
65 /// We currently just keep the first error and count the rest to compare matches.
66 pub err: Option<ExpandError>,
67 pub err_count: usize,
68 /// How many top-level token trees were left to match.
69 pub unmatched_tts: usize,
70}
71
72impl Match {
73 pub fn add_err(&mut self, err: ExpandError) {
74 let prev_err = self.err.take();
75 self.err = prev_err.or(Some(err));
76 self.err_count += 1;
77 }
78}
79
80// General note: These functions have two channels to return errors, a `Result`
81// return value and the `&mut Match`. The returned Result is for pattern parsing
82// errors; if a branch of the macro definition doesn't parse, it doesn't make
83// sense to try using it. Matching errors are added to the `Match`. It might
84// make sense to make pattern parsing a separate step?
85
86pub(super) fn match_(pattern: &tt::Subtree, src: &tt::Subtree) -> Result<Match, ExpandError> {
87 assert!(pattern.delimiter == None);
88
89 let mut res = Match::default();
90 let mut src = TtIter::new(src);
91
92 match_subtree(&mut res, pattern, &mut src)?;
93
94 if src.len() > 0 {
95 res.unmatched_tts += src.len();
96 res.add_err(err!("leftover tokens"));
97 }
98
99 Ok(res)
100}
101
102fn match_subtree(
103 res: &mut Match,
104 pattern: &tt::Subtree,
105 src: &mut TtIter,
106) -> Result<(), ExpandError> {
107 for op in parse_pattern(pattern) {
108 match op? {
109 Op::TokenTree(tt::TokenTree::Leaf(lhs)) => {
110 let rhs = match src.expect_leaf() {
111 Ok(l) => l,
112 Err(()) => {
113 res.add_err(err!("expected leaf: `{}`", lhs));
114 continue;
115 }
116 };
117 match (lhs, rhs) {
118 (
119 tt::Leaf::Punct(tt::Punct { char: lhs, .. }),
120 tt::Leaf::Punct(tt::Punct { char: rhs, .. }),
121 ) if lhs == rhs => (),
122 (
123 tt::Leaf::Ident(tt::Ident { text: lhs, .. }),
124 tt::Leaf::Ident(tt::Ident { text: rhs, .. }),
125 ) if lhs == rhs => (),
126 (
127 tt::Leaf::Literal(tt::Literal { text: lhs, .. }),
128 tt::Leaf::Literal(tt::Literal { text: rhs, .. }),
129 ) if lhs == rhs => (),
130 _ => {
131 res.add_err(ExpandError::UnexpectedToken);
132 }
133 }
134 }
135 Op::TokenTree(tt::TokenTree::Subtree(lhs)) => {
136 let rhs = match src.expect_subtree() {
137 Ok(s) => s,
138 Err(()) => {
139 res.add_err(err!("expected subtree"));
140 continue;
141 }
142 };
143 if lhs.delimiter_kind() != rhs.delimiter_kind() {
144 res.add_err(err!("mismatched delimiter"));
145 continue;
146 }
147 let mut src = TtIter::new(rhs);
148 match_subtree(res, lhs, &mut src)?;
149 if src.len() > 0 {
150 res.add_err(err!("leftover tokens"));
151 }
152 }
153 Op::Var { name, kind } => {
154 let kind = match kind {
155 Some(k) => k,
156 None => {
157 res.add_err(ExpandError::UnexpectedToken);
158 continue;
159 }
160 };
161 let ExpandResult(matched, match_err) = match_meta_var(kind.as_str(), src);
162 match matched {
163 Some(fragment) => {
164 res.bindings.inner.insert(name.clone(), Binding::Fragment(fragment));
165 }
166 None if match_err.is_none() => res.bindings.push_optional(name),
167 _ => {}
168 }
169 if let Some(err) = match_err {
170 res.add_err(err);
171 }
172 }
173 Op::Repeat { subtree, kind, separator } => {
174 match_repeat(res, subtree, kind, separator, src)?;
175 }
176 }
177 }
178 Ok(())
179}
180
181impl<'a> TtIter<'a> {
182 fn eat_separator(&mut self, separator: &Separator) -> bool {
183 let mut fork = self.clone();
184 let ok = match separator {
185 Separator::Ident(lhs) => match fork.expect_ident() {
186 Ok(rhs) => rhs.text == lhs.text,
187 _ => false,
188 },
189 Separator::Literal(lhs) => match fork.expect_literal() {
190 Ok(rhs) => match rhs {
191 tt::Leaf::Literal(rhs) => rhs.text == lhs.text,
192 tt::Leaf::Ident(rhs) => rhs.text == lhs.text,
193 tt::Leaf::Punct(_) => false,
194 },
195 _ => false,
196 },
197 Separator::Puncts(lhss) => lhss.iter().all(|lhs| match fork.expect_punct() {
198 Ok(rhs) => rhs.char == lhs.char,
199 _ => false,
200 }),
201 };
202 if ok {
203 *self = fork;
204 }
205 ok
206 }
207
208 pub(crate) fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> {
209 match self.peek_n(0) {
210 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => {
211 return self.expect_lifetime();
212 }
213 _ => (),
214 }
215
216 let tt = self.next().ok_or_else(|| ())?.clone();
217 let punct = match tt {
218 tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => {
219 punct
220 }
221 _ => return Ok(tt),
222 };
223
224 let (second, third) = match (self.peek_n(0), self.peek_n(1)) {
225 (
226 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))),
227 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))),
228 ) if p2.spacing == tt::Spacing::Joint => (p2.char, Some(p3.char)),
229 (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2.char, None),
230 _ => return Ok(tt),
231 };
232
233 match (punct.char, second, third) {
234 ('.', '.', Some('.'))
235 | ('.', '.', Some('='))
236 | ('<', '<', Some('='))
237 | ('>', '>', Some('=')) => {
238 let tt2 = self.next().unwrap().clone();
239 let tt3 = self.next().unwrap().clone();
240 Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2, tt3] }.into())
241 }
242 ('-', '=', None)
243 | ('-', '>', None)
244 | (':', ':', None)
245 | ('!', '=', None)
246 | ('.', '.', None)
247 | ('*', '=', None)
248 | ('/', '=', None)
249 | ('&', '&', None)
250 | ('&', '=', None)
251 | ('%', '=', None)
252 | ('^', '=', None)
253 | ('+', '=', None)
254 | ('<', '<', None)
255 | ('<', '=', None)
256 | ('=', '=', None)
257 | ('=', '>', None)
258 | ('>', '=', None)
259 | ('>', '>', None)
260 | ('|', '=', None)
261 | ('|', '|', None) => {
262 let tt2 = self.next().unwrap().clone();
263 Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2] }.into())
264 }
265 _ => Ok(tt),
266 }
267 }
268
269 pub(crate) fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> {
270 let punct = self.expect_punct()?;
271 if punct.char != '\'' {
272 return Err(());
273 }
274 let ident = self.expect_ident()?;
275
276 Ok(tt::Subtree {
277 delimiter: None,
278 token_trees: vec![
279 tt::Leaf::Punct(punct.clone()).into(),
280 tt::Leaf::Ident(ident.clone()).into(),
281 ],
282 }
283 .into())
284 }
285
286 pub(crate) fn expect_fragment(
287 &mut self,
288 fragment_kind: ra_parser::FragmentKind,
289 ) -> ExpandResult<Option<tt::TokenTree>> {
290 pub(crate) struct OffsetTokenSink<'a> {
291 pub(crate) cursor: Cursor<'a>,
292 pub(crate) error: bool,
293 }
294
295 impl<'a> TreeSink for OffsetTokenSink<'a> {
296 fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
297 if kind == SyntaxKind::LIFETIME {
298 n_tokens = 2;
299 }
300 for _ in 0..n_tokens {
301 self.cursor = self.cursor.bump_subtree();
302 }
303 }
304 fn start_node(&mut self, _kind: SyntaxKind) {}
305 fn finish_node(&mut self) {}
306 fn error(&mut self, _error: ra_parser::ParseError) {
307 self.error = true;
308 }
309 }
310
311 let buffer = TokenBuffer::new(&self.inner.as_slice());
312 let mut src = SubtreeTokenSource::new(&buffer);
313 let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
314
315 ra_parser::parse_fragment(&mut src, &mut sink, fragment_kind);
316
317 let mut err = None;
318 if !sink.cursor.is_root() || sink.error {
319 err = Some(err!("expected {:?}", fragment_kind));
320 }
321
322 let mut curr = buffer.begin();
323 let mut res = vec![];
324
325 if sink.cursor.is_root() {
326 while curr != sink.cursor {
327 if let Some(token) = curr.token_tree() {
328 res.push(token);
329 }
330 curr = curr.bump();
331 }
332 }
333 self.inner = self.inner.as_slice()[res.len()..].iter();
334 if res.len() == 0 && err.is_none() {
335 err = Some(err!("no tokens consumed"));
336 }
337 let res = match res.len() {
338 1 => Some(res[0].clone()),
339 0 => None,
340 _ => Some(tt::TokenTree::Subtree(tt::Subtree {
341 delimiter: None,
342 token_trees: res.into_iter().cloned().collect(),
343 })),
344 };
345 ExpandResult(res, err)
346 }
347
348 pub(crate) fn eat_vis(&mut self) -> Option<tt::TokenTree> {
349 let mut fork = self.clone();
350 match fork.expect_fragment(Visibility) {
351 ExpandResult(tt, None) => {
352 *self = fork;
353 tt
354 }
355 ExpandResult(_, Some(_)) => None,
356 }
357 }
358}
359
360pub(super) fn match_repeat(
361 res: &mut Match,
362 pattern: &tt::Subtree,
363 kind: RepeatKind,
364 separator: Option<Separator>,
365 src: &mut TtIter,
366) -> Result<(), ExpandError> {
367 // Dirty hack to make macro-expansion terminate.
368 // This should be replaced by a propper macro-by-example implementation
369 let mut limit = 65536;
370 let mut counter = 0;
371
372 for i in 0.. {
373 let mut fork = src.clone();
374
375 if let Some(separator) = &separator {
376 if i != 0 && !fork.eat_separator(separator) {
377 break;
378 }
379 }
380
381 let mut nested = Match::default();
382 match_subtree(&mut nested, pattern, &mut fork)?;
383 if nested.err.is_none() {
384 limit -= 1;
385 if limit == 0 {
386 log::warn!(
387 "match_lhs exceeded repeat pattern limit => {:#?}\n{:#?}\n{:#?}\n{:#?}",
388 pattern,
389 src,
390 kind,
391 separator
392 );
393 break;
394 }
395 *src = fork;
396
397 if let Err(err) = res.bindings.push_nested(counter, nested.bindings) {
398 res.add_err(err);
399 }
400 counter += 1;
401 if counter == 1 {
402 if let RepeatKind::ZeroOrOne = kind {
403 break;
404 }
405 }
406 } else {
407 break;
408 }
409 }
410
411 match (kind, counter) {
412 (RepeatKind::OneOrMore, 0) => {
413 res.add_err(ExpandError::UnexpectedToken);
414 }
415 (_, 0) => {
416 // Collect all empty variables in subtrees
417 let mut vars = Vec::new();
418 collect_vars(&mut vars, pattern)?;
419 for var in vars {
420 res.bindings.push_empty(&var)
421 }
422 }
423 _ => (),
424 }
425 Ok(())
426}
427
428fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragment>> {
429 let fragment = match kind {
430 "path" => Path,
431 "expr" => Expr,
432 "ty" => Type,
433 "pat" => Pattern,
434 "stmt" => Statement,
435 "block" => Block,
436 "meta" => MetaItem,
437 "item" => Item,
438 _ => {
439 let tt_result = match kind {
440 "ident" => input
441 .expect_ident()
442 .map(|ident| Some(tt::Leaf::from(ident.clone()).into()))
443 .map_err(|()| err!("expected ident")),
444 "tt" => input.expect_tt().map(Some).map_err(|()| err!()),
445 "lifetime" => input
446 .expect_lifetime()
447 .map(|tt| Some(tt))
448 .map_err(|()| err!("expected lifetime")),
449 "literal" => input
450 .expect_literal()
451 .map(|literal| Some(tt::Leaf::from(literal.clone()).into()))
452 .map_err(|()| err!()),
453 // `vis` is optional
454 "vis" => match input.eat_vis() {
455 Some(vis) => Ok(Some(vis)),
456 None => Ok(None),
457 },
458 _ => Err(ExpandError::UnexpectedToken),
459 };
460 return tt_result.map(|it| it.map(Fragment::Tokens)).into();
461 }
462 };
463 let result = input.expect_fragment(fragment);
464 result.map(|tt| if kind == "expr" { tt.map(Fragment::Ast) } else { tt.map(Fragment::Tokens) })
465}
466
467fn collect_vars(buf: &mut Vec<SmolStr>, pattern: &tt::Subtree) -> Result<(), ExpandError> {
468 for op in parse_pattern(pattern) {
469 match op? {
470 Op::Var { name, .. } => buf.push(name.clone()),
471 Op::TokenTree(tt::TokenTree::Leaf(_)) => (),
472 Op::TokenTree(tt::TokenTree::Subtree(subtree)) => collect_vars(buf, subtree)?,
473 Op::Repeat { subtree, .. } => collect_vars(buf, subtree)?,
474 }
475 }
476 Ok(())
477}
diff --git a/crates/ra_mbe/src/mbe_expander/transcriber.rs b/crates/ra_mbe/src/mbe_expander/transcriber.rs
deleted file mode 100644
index 7c9bb4d00..000000000
--- a/crates/ra_mbe/src/mbe_expander/transcriber.rs
+++ /dev/null
@@ -1,254 +0,0 @@
1//! Transcriber takes a template, like `fn $ident() {}`, a set of bindings like
2//! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}`
3
4use ra_syntax::SmolStr;
5
6use super::ExpandResult;
7use crate::{
8 mbe_expander::{Binding, Bindings, Fragment},
9 parser::{parse_template, Op, RepeatKind, Separator},
10 ExpandError,
11};
12
13impl Bindings {
14 fn contains(&self, name: &str) -> bool {
15 self.inner.contains_key(name)
16 }
17
18 fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result<&Fragment, ExpandError> {
19 let mut b = self.inner.get(name).ok_or_else(|| {
20 ExpandError::BindingError(format!("could not find binding `{}`", name))
21 })?;
22 for nesting_state in nesting.iter_mut() {
23 nesting_state.hit = true;
24 b = match b {
25 Binding::Fragment(_) => break,
26 Binding::Nested(bs) => bs.get(nesting_state.idx).ok_or_else(|| {
27 nesting_state.at_end = true;
28 ExpandError::BindingError(format!("could not find nested binding `{}`", name))
29 })?,
30 Binding::Empty => {
31 nesting_state.at_end = true;
32 return Err(ExpandError::BindingError(format!(
33 "could not find empty binding `{}`",
34 name
35 )));
36 }
37 };
38 }
39 match b {
40 Binding::Fragment(it) => Ok(it),
41 Binding::Nested(_) => Err(ExpandError::BindingError(format!(
42 "expected simple binding, found nested binding `{}`",
43 name
44 ))),
45 Binding::Empty => Err(ExpandError::BindingError(format!(
46 "expected simple binding, found empty binding `{}`",
47 name
48 ))),
49 }
50 }
51}
52
53pub(super) fn transcribe(template: &tt::Subtree, bindings: &Bindings) -> ExpandResult<tt::Subtree> {
54 assert!(template.delimiter == None);
55 let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() };
56 let mut arena: Vec<tt::TokenTree> = Vec::new();
57 expand_subtree(&mut ctx, template, &mut arena)
58}
59
60#[derive(Debug)]
61struct NestingState {
62 idx: usize,
63 /// `hit` is currently necessary to tell `expand_repeat` if it should stop
64 /// because there is no variable in use by the current repetition
65 hit: bool,
66 /// `at_end` is currently necessary to tell `expand_repeat` if it should stop
67 /// because there is no more value avaible for the current repetition
68 at_end: bool,
69}
70
71#[derive(Debug)]
72struct ExpandCtx<'a> {
73 bindings: &'a Bindings,
74 nesting: Vec<NestingState>,
75}
76
77fn expand_subtree(
78 ctx: &mut ExpandCtx,
79 template: &tt::Subtree,
80 arena: &mut Vec<tt::TokenTree>,
81) -> ExpandResult<tt::Subtree> {
82 // remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation
83 let start_elements = arena.len();
84 let mut err = None;
85 for op in parse_template(template) {
86 let op = match op {
87 Ok(op) => op,
88 Err(e) => {
89 err = Some(e);
90 break;
91 }
92 };
93 match op {
94 Op::TokenTree(tt @ tt::TokenTree::Leaf(..)) => arena.push(tt.clone()),
95 Op::TokenTree(tt::TokenTree::Subtree(tt)) => {
96 let ExpandResult(tt, e) = expand_subtree(ctx, tt, arena);
97 err = err.or(e);
98 arena.push(tt.into());
99 }
100 Op::Var { name, kind: _ } => {
101 let ExpandResult(fragment, e) = expand_var(ctx, name);
102 err = err.or(e);
103 push_fragment(arena, fragment);
104 }
105 Op::Repeat { subtree, kind, separator } => {
106 let ExpandResult(fragment, e) = expand_repeat(ctx, subtree, kind, separator, arena);
107 err = err.or(e);
108 push_fragment(arena, fragment)
109 }
110 }
111 }
112 // drain the elements added in this instance of expand_subtree
113 let tts = arena.drain(start_elements..arena.len()).collect();
114 ExpandResult(tt::Subtree { delimiter: template.delimiter, token_trees: tts }, err)
115}
116
117fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> ExpandResult<Fragment> {
118 if v == "crate" {
119 // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path.
120 let tt =
121 tt::Leaf::from(tt::Ident { text: "$crate".into(), id: tt::TokenId::unspecified() })
122 .into();
123 ExpandResult::ok(Fragment::Tokens(tt))
124 } else if !ctx.bindings.contains(v) {
125 // Note that it is possible to have a `$var` inside a macro which is not bound.
126 // For example:
127 // ```
128 // macro_rules! foo {
129 // ($a:ident, $b:ident, $c:tt) => {
130 // macro_rules! bar {
131 // ($bi:ident) => {
132 // fn $bi() -> u8 {$c}
133 // }
134 // }
135 // }
136 // ```
137 // We just treat it a normal tokens
138 let tt = tt::Subtree {
139 delimiter: None,
140 token_trees: vec![
141 tt::Leaf::from(tt::Punct {
142 char: '$',
143 spacing: tt::Spacing::Alone,
144 id: tt::TokenId::unspecified(),
145 })
146 .into(),
147 tt::Leaf::from(tt::Ident { text: v.clone(), id: tt::TokenId::unspecified() })
148 .into(),
149 ],
150 }
151 .into();
152 ExpandResult::ok(Fragment::Tokens(tt))
153 } else {
154 ctx.bindings.get(&v, &mut ctx.nesting).map_or_else(
155 |e| ExpandResult(Fragment::Tokens(tt::TokenTree::empty()), Some(e)),
156 |b| ExpandResult::ok(b.clone()),
157 )
158 }
159}
160
161fn expand_repeat(
162 ctx: &mut ExpandCtx,
163 template: &tt::Subtree,
164 kind: RepeatKind,
165 separator: Option<Separator>,
166 arena: &mut Vec<tt::TokenTree>,
167) -> ExpandResult<Fragment> {
168 let mut buf: Vec<tt::TokenTree> = Vec::new();
169 ctx.nesting.push(NestingState { idx: 0, at_end: false, hit: false });
170 // Dirty hack to make macro-expansion terminate.
171 // This should be replaced by a proper macro-by-example implementation
172 let limit = 65536;
173 let mut has_seps = 0;
174 let mut counter = 0;
175
176 loop {
177 let ExpandResult(mut t, e) = expand_subtree(ctx, template, arena);
178 let nesting_state = ctx.nesting.last_mut().unwrap();
179 if nesting_state.at_end || !nesting_state.hit {
180 break;
181 }
182 nesting_state.idx += 1;
183 nesting_state.hit = false;
184
185 counter += 1;
186 if counter == limit {
187 log::warn!(
188 "expand_tt excced in repeat pattern exceed limit => {:#?}\n{:#?}",
189 template,
190 ctx
191 );
192 break;
193 }
194
195 if e.is_some() {
196 continue;
197 }
198
199 t.delimiter = None;
200 push_subtree(&mut buf, t);
201
202 if let Some(ref sep) = separator {
203 match sep {
204 Separator::Ident(ident) => {
205 has_seps = 1;
206 buf.push(tt::Leaf::from(ident.clone()).into());
207 }
208 Separator::Literal(lit) => {
209 has_seps = 1;
210 buf.push(tt::Leaf::from(lit.clone()).into());
211 }
212
213 Separator::Puncts(puncts) => {
214 has_seps = puncts.len();
215 for punct in puncts {
216 buf.push(tt::Leaf::from(*punct).into());
217 }
218 }
219 }
220 }
221
222 if RepeatKind::ZeroOrOne == kind {
223 break;
224 }
225 }
226
227 ctx.nesting.pop().unwrap();
228 for _ in 0..has_seps {
229 buf.pop();
230 }
231
232 // Check if it is a single token subtree without any delimiter
233 // e.g {Delimiter:None> ['>'] /Delimiter:None>}
234 let tt = tt::Subtree { delimiter: None, token_trees: buf }.into();
235
236 if RepeatKind::OneOrMore == kind && counter == 0 {
237 return ExpandResult(Fragment::Tokens(tt), Some(ExpandError::UnexpectedToken));
238 }
239 ExpandResult::ok(Fragment::Tokens(tt))
240}
241
242fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
243 match fragment {
244 Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
245 Fragment::Tokens(tt) | Fragment::Ast(tt) => buf.push(tt),
246 }
247}
248
249fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
250 match tt.delimiter {
251 None => buf.extend(tt.token_trees),
252 _ => buf.push(tt.into()),
253 }
254}
diff --git a/crates/ra_mbe/src/parser.rs b/crates/ra_mbe/src/parser.rs
deleted file mode 100644
index 1e5dafbdf..000000000
--- a/crates/ra_mbe/src/parser.rs
+++ /dev/null
@@ -1,184 +0,0 @@
1//! Parser recognizes special macro syntax, `$var` and `$(repeat)*`, in token
2//! trees.
3
4use ra_syntax::SmolStr;
5use smallvec::SmallVec;
6
7use crate::{tt_iter::TtIter, ExpandError};
8
9#[derive(Debug)]
10pub(crate) enum Op<'a> {
11 Var { name: &'a SmolStr, kind: Option<&'a SmolStr> },
12 Repeat { subtree: &'a tt::Subtree, kind: RepeatKind, separator: Option<Separator> },
13 TokenTree(&'a tt::TokenTree),
14}
15
16#[derive(Clone, Debug, PartialEq, Eq)]
17pub(crate) enum RepeatKind {
18 ZeroOrMore,
19 OneOrMore,
20 ZeroOrOne,
21}
22
23#[derive(Clone, Debug, Eq)]
24pub(crate) enum Separator {
25 Literal(tt::Literal),
26 Ident(tt::Ident),
27 Puncts(SmallVec<[tt::Punct; 3]>),
28}
29
30// Note that when we compare a Separator, we just care about its textual value.
31impl PartialEq for Separator {
32 fn eq(&self, other: &Separator) -> bool {
33 use Separator::*;
34
35 match (self, other) {
36 (Ident(ref a), Ident(ref b)) => a.text == b.text,
37 (Literal(ref a), Literal(ref b)) => a.text == b.text,
38 (Puncts(ref a), Puncts(ref b)) if a.len() == b.len() => {
39 let a_iter = a.iter().map(|a| a.char);
40 let b_iter = b.iter().map(|b| b.char);
41 a_iter.eq(b_iter)
42 }
43 _ => false,
44 }
45 }
46}
47
48pub(crate) fn parse_template(
49 template: &tt::Subtree,
50) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> {
51 parse_inner(template, Mode::Template)
52}
53
54pub(crate) fn parse_pattern(
55 pattern: &tt::Subtree,
56) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> {
57 parse_inner(pattern, Mode::Pattern)
58}
59
60#[derive(Clone, Copy)]
61enum Mode {
62 Pattern,
63 Template,
64}
65
66fn parse_inner(src: &tt::Subtree, mode: Mode) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> {
67 let mut src = TtIter::new(src);
68 std::iter::from_fn(move || {
69 let first = src.next()?;
70 Some(next_op(first, &mut src, mode))
71 })
72}
73
74macro_rules! err {
75 ($($tt:tt)*) => {
76 ExpandError::UnexpectedToken
77 };
78}
79
80macro_rules! bail {
81 ($($tt:tt)*) => {
82 return Err(err!($($tt)*))
83 };
84}
85
86fn next_op<'a>(
87 first: &'a tt::TokenTree,
88 src: &mut TtIter<'a>,
89 mode: Mode,
90) -> Result<Op<'a>, ExpandError> {
91 let res = match first {
92 tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '$', .. })) => {
93 // Note that the '$' itself is a valid token inside macro_rules.
94 let second = match src.next() {
95 None => return Ok(Op::TokenTree(first)),
96 Some(it) => it,
97 };
98 match second {
99 tt::TokenTree::Subtree(subtree) => {
100 let (separator, kind) = parse_repeat(src)?;
101 Op::Repeat { subtree, separator, kind }
102 }
103 tt::TokenTree::Leaf(leaf) => match leaf {
104 tt::Leaf::Punct(..) => return Err(ExpandError::UnexpectedToken),
105 tt::Leaf::Ident(ident) => {
106 let name = &ident.text;
107 let kind = eat_fragment_kind(src, mode)?;
108 Op::Var { name, kind }
109 }
110 tt::Leaf::Literal(lit) => {
111 if is_boolean_literal(lit) {
112 let name = &lit.text;
113 let kind = eat_fragment_kind(src, mode)?;
114 Op::Var { name, kind }
115 } else {
116 bail!("bad var 2");
117 }
118 }
119 },
120 }
121 }
122 tt => Op::TokenTree(tt),
123 };
124 Ok(res)
125}
126
127fn eat_fragment_kind<'a>(
128 src: &mut TtIter<'a>,
129 mode: Mode,
130) -> Result<Option<&'a SmolStr>, ExpandError> {
131 if let Mode::Pattern = mode {
132 src.expect_char(':').map_err(|()| err!("bad fragment specifier 1"))?;
133 let ident = src.expect_ident().map_err(|()| err!("bad fragment specifier 1"))?;
134 return Ok(Some(&ident.text));
135 };
136 Ok(None)
137}
138
139fn is_boolean_literal(lit: &tt::Literal) -> bool {
140 matches!(lit.text.as_str(), "true" | "false")
141}
142
143fn parse_repeat(src: &mut TtIter) -> Result<(Option<Separator>, RepeatKind), ExpandError> {
144 let mut separator = Separator::Puncts(SmallVec::new());
145 for tt in src {
146 let tt = match tt {
147 tt::TokenTree::Leaf(leaf) => leaf,
148 tt::TokenTree::Subtree(_) => return Err(ExpandError::InvalidRepeat),
149 };
150 let has_sep = match &separator {
151 Separator::Puncts(puncts) => !puncts.is_empty(),
152 _ => true,
153 };
154 match tt {
155 tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => {
156 return Err(ExpandError::InvalidRepeat)
157 }
158 tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()),
159 tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()),
160 tt::Leaf::Punct(punct) => {
161 let repeat_kind = match punct.char {
162 '*' => RepeatKind::ZeroOrMore,
163 '+' => RepeatKind::OneOrMore,
164 '?' => RepeatKind::ZeroOrOne,
165 _ => {
166 match &mut separator {
167 Separator::Puncts(puncts) => {
168 if puncts.len() == 3 {
169 return Err(ExpandError::InvalidRepeat);
170 }
171 puncts.push(punct.clone())
172 }
173 _ => return Err(ExpandError::InvalidRepeat),
174 }
175 continue;
176 }
177 };
178 let separator = if has_sep { Some(separator) } else { None };
179 return Ok((separator, repeat_kind));
180 }
181 }
182 }
183 Err(ExpandError::InvalidRepeat)
184}
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs
deleted file mode 100644
index d7866452d..000000000
--- a/crates/ra_mbe/src/subtree_source.rs
+++ /dev/null
@@ -1,197 +0,0 @@
1//! FIXME: write short doc here
2
3use ra_parser::{Token, TokenSource};
4use ra_syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T};
5use std::cell::{Cell, Ref, RefCell};
6use tt::buffer::{Cursor, TokenBuffer};
7
8#[derive(Debug, Clone, Eq, PartialEq)]
9struct TtToken {
10 pub kind: SyntaxKind,
11 pub is_joint_to_next: bool,
12 pub text: SmolStr,
13}
14
15pub(crate) struct SubtreeTokenSource<'a> {
16 cached_cursor: Cell<Cursor<'a>>,
17 cached: RefCell<Vec<Option<TtToken>>>,
18 curr: (Token, usize),
19}
20
21impl<'a> SubtreeTokenSource<'a> {
22 // Helper function used in test
23 #[cfg(test)]
24 pub fn text(&self) -> SmolStr {
25 match *self.get(self.curr.1) {
26 Some(ref tt) => tt.text.clone(),
27 _ => SmolStr::new(""),
28 }
29 }
30}
31
32impl<'a> SubtreeTokenSource<'a> {
33 pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> {
34 let cursor = buffer.begin();
35
36 let mut res = SubtreeTokenSource {
37 curr: (Token { kind: EOF, is_jointed_to_next: false }, 0),
38 cached_cursor: Cell::new(cursor),
39 cached: RefCell::new(Vec::with_capacity(10)),
40 };
41 res.curr = (res.mk_token(0), 0);
42 res
43 }
44
45 fn mk_token(&self, pos: usize) -> Token {
46 match *self.get(pos) {
47 Some(ref tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next },
48 None => Token { kind: EOF, is_jointed_to_next: false },
49 }
50 }
51
52 fn get(&self, pos: usize) -> Ref<Option<TtToken>> {
53 fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> {
54 let tkn = c.token_tree();
55
56 if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tkn {
57 if punct.char == '\'' {
58 let next = c.bump();
59 if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = next.token_tree() {
60 let res_cursor = next.bump();
61 let text = SmolStr::new("'".to_string() + &ident.to_string());
62
63 return Some((res_cursor, text));
64 } else {
65 panic!("Next token must be ident : {:#?}", next.token_tree());
66 }
67 }
68 }
69
70 None
71 }
72
73 if pos < self.cached.borrow().len() {
74 return Ref::map(self.cached.borrow(), |c| &c[pos]);
75 }
76
77 {
78 let mut cached = self.cached.borrow_mut();
79 while pos >= cached.len() {
80 let cursor = self.cached_cursor.get();
81 if cursor.eof() {
82 cached.push(None);
83 continue;
84 }
85
86 if let Some((curr, text)) = is_lifetime(cursor) {
87 cached.push(Some(TtToken { kind: LIFETIME, is_joint_to_next: false, text }));
88 self.cached_cursor.set(curr);
89 continue;
90 }
91
92 match cursor.token_tree() {
93 Some(tt::TokenTree::Leaf(leaf)) => {
94 cached.push(Some(convert_leaf(&leaf)));
95 self.cached_cursor.set(cursor.bump());
96 }
97 Some(tt::TokenTree::Subtree(subtree)) => {
98 self.cached_cursor.set(cursor.subtree().unwrap());
99 cached.push(Some(convert_delim(subtree.delimiter_kind(), false)));
100 }
101 None => {
102 if let Some(subtree) = cursor.end() {
103 cached.push(Some(convert_delim(subtree.delimiter_kind(), true)));
104 self.cached_cursor.set(cursor.bump());
105 }
106 }
107 }
108 }
109 }
110
111 Ref::map(self.cached.borrow(), |c| &c[pos])
112 }
113}
114
115impl<'a> TokenSource for SubtreeTokenSource<'a> {
116 fn current(&self) -> Token {
117 self.curr.0
118 }
119
120 /// Lookahead n token
121 fn lookahead_nth(&self, n: usize) -> Token {
122 self.mk_token(self.curr.1 + n)
123 }
124
125 /// bump cursor to next token
126 fn bump(&mut self) {
127 if self.current().kind == EOF {
128 return;
129 }
130
131 self.curr = (self.mk_token(self.curr.1 + 1), self.curr.1 + 1);
132 }
133
134 /// Is the current token a specified keyword?
135 fn is_keyword(&self, kw: &str) -> bool {
136 match *self.get(self.curr.1) {
137 Some(ref t) => t.text == *kw,
138 _ => false,
139 }
140 }
141}
142
143fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken {
144 let (kinds, texts) = match d {
145 Some(tt::DelimiterKind::Parenthesis) => ([T!['('], T![')']], "()"),
146 Some(tt::DelimiterKind::Brace) => ([T!['{'], T!['}']], "{}"),
147 Some(tt::DelimiterKind::Bracket) => ([T!['['], T![']']], "[]"),
148 None => ([L_DOLLAR, R_DOLLAR], ""),
149 };
150
151 let idx = closing as usize;
152 let kind = kinds[idx];
153 let text = if !texts.is_empty() { &texts[idx..texts.len() - (1 - idx)] } else { "" };
154 TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text) }
155}
156
157fn convert_literal(l: &tt::Literal) -> TtToken {
158 let kind = lex_single_syntax_kind(&l.text)
159 .map(|(kind, _error)| kind)
160 .filter(|kind| kind.is_literal())
161 .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l));
162
163 TtToken { kind, is_joint_to_next: false, text: l.text.clone() }
164}
165
166fn convert_ident(ident: &tt::Ident) -> TtToken {
167 let kind = match ident.text.as_ref() {
168 "true" => T![true],
169 "false" => T![false],
170 i if i.starts_with('\'') => LIFETIME,
171 _ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT),
172 };
173
174 TtToken { kind, is_joint_to_next: false, text: ident.text.clone() }
175}
176
177fn convert_punct(p: tt::Punct) -> TtToken {
178 let kind = match SyntaxKind::from_char(p.char) {
179 None => panic!("{:#?} is not a valid punct", p),
180 Some(kind) => kind,
181 };
182
183 let text = {
184 let mut buf = [0u8; 4];
185 let s: &str = p.char.encode_utf8(&mut buf);
186 SmolStr::new(s)
187 };
188 TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text }
189}
190
191fn convert_leaf(leaf: &tt::Leaf) -> TtToken {
192 match leaf {
193 tt::Leaf::Literal(l) => convert_literal(l),
194 tt::Leaf::Ident(ident) => convert_ident(ident),
195 tt::Leaf::Punct(punct) => convert_punct(*punct),
196 }
197}
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
deleted file mode 100644
index 5fc48507f..000000000
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ /dev/null
@@ -1,832 +0,0 @@
1//! FIXME: write short doc here
2
3use ra_parser::{FragmentKind, ParseError, TreeSink};
4use ra_syntax::{
5 ast::{self, make::tokens::doc_comment},
6 tokenize, AstToken, Parse, SmolStr, SyntaxKind,
7 SyntaxKind::*,
8 SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, T,
9};
10use rustc_hash::FxHashMap;
11use tt::buffer::{Cursor, TokenBuffer};
12
13use crate::subtree_source::SubtreeTokenSource;
14use crate::ExpandError;
15
16#[derive(Debug, PartialEq, Eq, Clone, Copy)]
17pub enum TokenTextRange {
18 Token(TextRange),
19 Delimiter(TextRange, TextRange),
20}
21
22impl TokenTextRange {
23 pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
24 match self {
25 TokenTextRange::Token(it) => Some(it),
26 TokenTextRange::Delimiter(open, close) => match kind {
27 T!['{'] | T!['('] | T!['['] => Some(open),
28 T!['}'] | T![')'] | T![']'] => Some(close),
29 _ => None,
30 },
31 }
32 }
33}
34
35/// Maps `tt::TokenId` to the relative range of the original token.
36#[derive(Debug, PartialEq, Eq, Clone, Default)]
37pub struct TokenMap {
38 /// Maps `tt::TokenId` to the *relative* source range.
39 entries: Vec<(tt::TokenId, TokenTextRange)>,
40}
41
42/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
43/// will consume).
44pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> Option<(tt::Subtree, TokenMap)> {
45 syntax_node_to_token_tree(ast.syntax())
46}
47
48/// Convert the syntax node to a `TokenTree` (what macro
49/// will consume).
50pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> {
51 let global_offset = node.text_range().start();
52 let mut c = Convertor::new(node, global_offset);
53 let subtree = c.go()?;
54 Some((subtree, c.id_alloc.map))
55}
56
57// The following items are what `rustc` macro can be parsed into :
58// link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141
59// * Expr(P<ast::Expr>) -> token_tree_to_expr
60// * Pat(P<ast::Pat>) -> token_tree_to_pat
61// * Ty(P<ast::Ty>) -> token_tree_to_ty
62// * Stmts(SmallVec<[ast::Stmt; 1]>) -> token_tree_to_stmts
63// * Items(SmallVec<[P<ast::Item>; 1]>) -> token_tree_to_items
64//
65// * TraitItems(SmallVec<[ast::TraitItem; 1]>)
66// * AssocItems(SmallVec<[ast::AssocItem; 1]>)
67// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
68
69pub fn token_tree_to_syntax_node(
70 tt: &tt::Subtree,
71 fragment_kind: FragmentKind,
72) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
73 let tmp;
74 let tokens = match tt {
75 tt::Subtree { delimiter: None, token_trees } => token_trees.as_slice(),
76 _ => {
77 tmp = [tt.clone().into()];
78 &tmp[..]
79 }
80 };
81 let buffer = TokenBuffer::new(&tokens);
82 let mut token_source = SubtreeTokenSource::new(&buffer);
83 let mut tree_sink = TtTreeSink::new(buffer.begin());
84 ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
85 if tree_sink.roots.len() != 1 {
86 return Err(ExpandError::ConversionError);
87 }
88 //FIXME: would be cool to report errors
89 let (parse, range_map) = tree_sink.finish();
90 Ok((parse, range_map))
91}
92
93/// Convert a string to a `TokenTree`
94pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
95 let (tokens, errors) = tokenize(text);
96 if !errors.is_empty() {
97 return None;
98 }
99
100 let mut conv = RawConvertor {
101 text,
102 offset: TextSize::default(),
103 inner: tokens.iter(),
104 id_alloc: TokenIdAlloc {
105 map: Default::default(),
106 global_offset: TextSize::default(),
107 next_id: 0,
108 },
109 };
110
111 let subtree = conv.go()?;
112 Some((subtree, conv.id_alloc.map))
113}
114
115impl TokenMap {
116 pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
117 let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
118 TokenTextRange::Token(it) => *it == relative_range,
119 TokenTextRange::Delimiter(open, close) => {
120 *open == relative_range || *close == relative_range
121 }
122 })?;
123 Some(token_id)
124 }
125
126 pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TokenTextRange> {
127 let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
128 Some(range)
129 }
130
131 fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
132 self.entries.push((token_id, TokenTextRange::Token(relative_range)));
133 }
134
135 fn insert_delim(
136 &mut self,
137 token_id: tt::TokenId,
138 open_relative_range: TextRange,
139 close_relative_range: TextRange,
140 ) -> usize {
141 let res = self.entries.len();
142 self.entries
143 .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
144 res
145 }
146
147 fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) {
148 let (_, token_text_range) = &mut self.entries[idx];
149 if let TokenTextRange::Delimiter(dim, _) = token_text_range {
150 *token_text_range = TokenTextRange::Delimiter(*dim, close_relative_range);
151 }
152 }
153
154 fn remove_delim(&mut self, idx: usize) {
155 // FIXME: This could be accidently quadratic
156 self.entries.remove(idx);
157 }
158}
159
160/// Returns the textual content of a doc comment block as a quoted string
161/// That is, strips leading `///` (or `/**`, etc)
162/// and strips the ending `*/`
163/// And then quote the string, which is needed to convert to `tt::Literal`
164fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
165 let prefix_len = comment.prefix().len();
166 let mut text = &comment.text()[prefix_len..];
167
168 // Remove ending "*/"
169 if comment.kind().shape == ast::CommentShape::Block {
170 text = &text[0..text.len() - 2];
171 }
172
173 // Quote the string
174 // Note that `tt::Literal` expect an escaped string
175 let text = format!("{:?}", text.escape_default().to_string());
176 text.into()
177}
178
179fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> {
180 let comment = ast::Comment::cast(token.clone())?;
181 let doc = comment.kind().doc?;
182
183 // Make `doc="\" Comments\""
184 let mut meta_tkns = Vec::new();
185 meta_tkns.push(mk_ident("doc"));
186 meta_tkns.push(mk_punct('='));
187 meta_tkns.push(mk_doc_literal(&comment));
188
189 // Make `#![]`
190 let mut token_trees = Vec::new();
191 token_trees.push(mk_punct('#'));
192 if let ast::CommentPlacement::Inner = doc {
193 token_trees.push(mk_punct('!'));
194 }
195 token_trees.push(tt::TokenTree::from(tt::Subtree {
196 delimiter: Some(tt::Delimiter {
197 kind: tt::DelimiterKind::Bracket,
198 id: tt::TokenId::unspecified(),
199 }),
200 token_trees: meta_tkns,
201 }));
202
203 return Some(token_trees);
204
205 // Helper functions
206 fn mk_ident(s: &str) -> tt::TokenTree {
207 tt::TokenTree::from(tt::Leaf::from(tt::Ident {
208 text: s.into(),
209 id: tt::TokenId::unspecified(),
210 }))
211 }
212
213 fn mk_punct(c: char) -> tt::TokenTree {
214 tt::TokenTree::from(tt::Leaf::from(tt::Punct {
215 char: c,
216 spacing: tt::Spacing::Alone,
217 id: tt::TokenId::unspecified(),
218 }))
219 }
220
221 fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
222 let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() };
223
224 tt::TokenTree::from(tt::Leaf::from(lit))
225 }
226}
227
228struct TokenIdAlloc {
229 map: TokenMap,
230 global_offset: TextSize,
231 next_id: u32,
232}
233
234impl TokenIdAlloc {
235 fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId {
236 let relative_range = absolute_range - self.global_offset;
237 let token_id = tt::TokenId(self.next_id);
238 self.next_id += 1;
239 self.map.insert(token_id, relative_range);
240 token_id
241 }
242
243 fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) {
244 let token_id = tt::TokenId(self.next_id);
245 self.next_id += 1;
246 let idx = self.map.insert_delim(
247 token_id,
248 open_abs_range - self.global_offset,
249 open_abs_range - self.global_offset,
250 );
251 (token_id, idx)
252 }
253
254 fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
255 match close_abs_range {
256 None => {
257 self.map.remove_delim(idx);
258 }
259 Some(close) => {
260 self.map.update_close_delim(idx, close - self.global_offset);
261 }
262 }
263 }
264}
265
266/// A Raw Token (straightly from lexer) convertor
267struct RawConvertor<'a> {
268 text: &'a str,
269 offset: TextSize,
270 id_alloc: TokenIdAlloc,
271 inner: std::slice::Iter<'a, RawToken>,
272}
273
274trait SrcToken: std::fmt::Debug {
275 fn kind(&self) -> SyntaxKind;
276
277 fn to_char(&self) -> Option<char>;
278
279 fn to_text(&self) -> SmolStr;
280}
281
282trait TokenConvertor {
283 type Token: SrcToken;
284
285 fn go(&mut self) -> Option<tt::Subtree> {
286 let mut subtree = tt::Subtree::default();
287 subtree.delimiter = None;
288 while self.peek().is_some() {
289 self.collect_leaf(&mut subtree.token_trees);
290 }
291 if subtree.token_trees.is_empty() {
292 return None;
293 }
294 if subtree.token_trees.len() == 1 {
295 if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] {
296 return Some(first.clone());
297 }
298 }
299 Some(subtree)
300 }
301
302 fn collect_leaf(&mut self, result: &mut Vec<tt::TokenTree>) {
303 let (token, range) = match self.bump() {
304 None => return,
305 Some(it) => it,
306 };
307
308 let k: SyntaxKind = token.kind();
309 if k == COMMENT {
310 if let Some(tokens) = self.convert_doc_comment(&token) {
311 result.extend(tokens);
312 }
313 return;
314 }
315
316 result.push(if k.is_punct() {
317 assert_eq!(range.len(), TextSize::of('.'));
318 let delim = match k {
319 T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])),
320 T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])),
321 T!['['] => Some((tt::DelimiterKind::Bracket, T![']'])),
322 _ => None,
323 };
324
325 if let Some((kind, closed)) = delim {
326 let mut subtree = tt::Subtree::default();
327 let (id, idx) = self.id_alloc().open_delim(range);
328 subtree.delimiter = Some(tt::Delimiter { kind, id });
329
330 while self.peek().map(|it| it.kind() != closed).unwrap_or(false) {
331 self.collect_leaf(&mut subtree.token_trees);
332 }
333 let last_range = match self.bump() {
334 None => {
335 // For error resilience, we insert an char punct for the opening delim here
336 self.id_alloc().close_delim(idx, None);
337 let leaf: tt::Leaf = tt::Punct {
338 id: self.id_alloc().alloc(range),
339 char: token.to_char().unwrap(),
340 spacing: tt::Spacing::Alone,
341 }
342 .into();
343 result.push(leaf.into());
344 result.extend(subtree.token_trees);
345 return;
346 }
347 Some(it) => it.1,
348 };
349 self.id_alloc().close_delim(idx, Some(last_range));
350 subtree.into()
351 } else {
352 let spacing = match self.peek() {
353 Some(next)
354 if next.kind().is_trivia()
355 || next.kind() == T!['[']
356 || next.kind() == T!['{']
357 || next.kind() == T!['('] =>
358 {
359 tt::Spacing::Alone
360 }
361 Some(next) if next.kind().is_punct() => tt::Spacing::Joint,
362 _ => tt::Spacing::Alone,
363 };
364 let char = match token.to_char() {
365 Some(c) => c,
366 None => {
367 panic!("Token from lexer must be single char: token = {:#?}", token);
368 }
369 };
370 tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into()
371 }
372 } else {
373 macro_rules! make_leaf {
374 ($i:ident) => {
375 tt::$i { id: self.id_alloc().alloc(range), text: token.to_text() }.into()
376 };
377 }
378 let leaf: tt::Leaf = match k {
379 T![true] | T![false] => make_leaf!(Ident),
380 IDENT => make_leaf!(Ident),
381 k if k.is_keyword() => make_leaf!(Ident),
382 k if k.is_literal() => make_leaf!(Literal),
383 LIFETIME => {
384 let char_unit = TextSize::of('\'');
385 let r = TextRange::at(range.start(), char_unit);
386 let apostrophe = tt::Leaf::from(tt::Punct {
387 char: '\'',
388 spacing: tt::Spacing::Joint,
389 id: self.id_alloc().alloc(r),
390 });
391 result.push(apostrophe.into());
392
393 let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
394 let ident = tt::Leaf::from(tt::Ident {
395 text: SmolStr::new(&token.to_text()[1..]),
396 id: self.id_alloc().alloc(r),
397 });
398 result.push(ident.into());
399 return;
400 }
401 _ => return,
402 };
403
404 leaf.into()
405 });
406 }
407
408 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>;
409
410 fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
411
412 fn peek(&self) -> Option<Self::Token>;
413
414 fn id_alloc(&mut self) -> &mut TokenIdAlloc;
415}
416
417impl<'a> SrcToken for (RawToken, &'a str) {
418 fn kind(&self) -> SyntaxKind {
419 self.0.kind
420 }
421
422 fn to_char(&self) -> Option<char> {
423 self.1.chars().next()
424 }
425
426 fn to_text(&self) -> SmolStr {
427 self.1.into()
428 }
429}
430
431impl RawConvertor<'_> {}
432
433impl<'a> TokenConvertor for RawConvertor<'a> {
434 type Token = (RawToken, &'a str);
435
436 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
437 convert_doc_comment(&doc_comment(token.1))
438 }
439
440 fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
441 let token = self.inner.next()?;
442 let range = TextRange::at(self.offset, token.len);
443 self.offset += token.len;
444
445 Some(((*token, &self.text[range]), range))
446 }
447
448 fn peek(&self) -> Option<Self::Token> {
449 let token = self.inner.as_slice().get(0).cloned();
450
451 token.map(|it| {
452 let range = TextRange::at(self.offset, it.len);
453 (it, &self.text[range])
454 })
455 }
456
457 fn id_alloc(&mut self) -> &mut TokenIdAlloc {
458 &mut self.id_alloc
459 }
460}
461
462struct Convertor {
463 id_alloc: TokenIdAlloc,
464 current: Option<SyntaxToken>,
465 range: TextRange,
466 punct_offset: Option<(SyntaxToken, TextSize)>,
467}
468
469impl Convertor {
470 fn new(node: &SyntaxNode, global_offset: TextSize) -> Convertor {
471 Convertor {
472 id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
473 current: node.first_token(),
474 range: node.text_range(),
475 punct_offset: None,
476 }
477 }
478}
479
480#[derive(Debug)]
481enum SynToken {
482 Ordiniary(SyntaxToken),
483 Punch(SyntaxToken, TextSize),
484}
485
486impl SynToken {
487 fn token(&self) -> &SyntaxToken {
488 match self {
489 SynToken::Ordiniary(it) => it,
490 SynToken::Punch(it, _) => it,
491 }
492 }
493}
494
495impl SrcToken for SynToken {
496 fn kind(&self) -> SyntaxKind {
497 self.token().kind()
498 }
499 fn to_char(&self) -> Option<char> {
500 match self {
501 SynToken::Ordiniary(_) => None,
502 SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
503 }
504 }
505 fn to_text(&self) -> SmolStr {
506 self.token().text().clone()
507 }
508}
509
510impl TokenConvertor for Convertor {
511 type Token = SynToken;
512 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
513 convert_doc_comment(token.token())
514 }
515
516 fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
517 if let Some((punct, offset)) = self.punct_offset.clone() {
518 if usize::from(offset) + 1 < punct.text().len() {
519 let offset = offset + TextSize::of('.');
520 let range = punct.text_range();
521 self.punct_offset = Some((punct.clone(), offset));
522 let range = TextRange::at(range.start() + offset, TextSize::of('.'));
523 return Some((SynToken::Punch(punct, offset), range));
524 }
525 }
526
527 let curr = self.current.clone()?;
528 if !&self.range.contains_range(curr.text_range()) {
529 return None;
530 }
531 self.current = curr.next_token();
532
533 let token = if curr.kind().is_punct() {
534 let range = curr.text_range();
535 let range = TextRange::at(range.start(), TextSize::of('.'));
536 self.punct_offset = Some((curr.clone(), 0.into()));
537 (SynToken::Punch(curr, 0.into()), range)
538 } else {
539 self.punct_offset = None;
540 let range = curr.text_range();
541 (SynToken::Ordiniary(curr), range)
542 };
543
544 Some(token)
545 }
546
547 fn peek(&self) -> Option<Self::Token> {
548 if let Some((punct, mut offset)) = self.punct_offset.clone() {
549 offset = offset + TextSize::of('.');
550 if usize::from(offset) < punct.text().len() {
551 return Some(SynToken::Punch(punct, offset));
552 }
553 }
554
555 let curr = self.current.clone()?;
556 if !self.range.contains_range(curr.text_range()) {
557 return None;
558 }
559
560 let token = if curr.kind().is_punct() {
561 SynToken::Punch(curr, 0.into())
562 } else {
563 SynToken::Ordiniary(curr)
564 };
565 Some(token)
566 }
567
568 fn id_alloc(&mut self) -> &mut TokenIdAlloc {
569 &mut self.id_alloc
570 }
571}
572
573struct TtTreeSink<'a> {
574 buf: String,
575 cursor: Cursor<'a>,
576 open_delims: FxHashMap<tt::TokenId, TextSize>,
577 text_pos: TextSize,
578 inner: SyntaxTreeBuilder,
579 token_map: TokenMap,
580
581 // Number of roots
582 // Use for detect ill-form tree which is not single root
583 roots: smallvec::SmallVec<[usize; 1]>,
584}
585
586impl<'a> TtTreeSink<'a> {
587 fn new(cursor: Cursor<'a>) -> Self {
588 TtTreeSink {
589 buf: String::new(),
590 cursor,
591 open_delims: FxHashMap::default(),
592 text_pos: 0.into(),
593 inner: SyntaxTreeBuilder::default(),
594 roots: smallvec::SmallVec::new(),
595 token_map: TokenMap::default(),
596 }
597 }
598
599 fn finish(self) -> (Parse<SyntaxNode>, TokenMap) {
600 (self.inner.finish(), self.token_map)
601 }
602}
603
604fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr {
605 let texts = match d {
606 Some(tt::DelimiterKind::Parenthesis) => "()",
607 Some(tt::DelimiterKind::Brace) => "{}",
608 Some(tt::DelimiterKind::Bracket) => "[]",
609 None => return "".into(),
610 };
611
612 let idx = closing as usize;
613 let text = &texts[idx..texts.len() - (1 - idx)];
614 text.into()
615}
616
617impl<'a> TreeSink for TtTreeSink<'a> {
618 fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
619 if kind == L_DOLLAR || kind == R_DOLLAR {
620 self.cursor = self.cursor.bump_subtree();
621 return;
622 }
623 if kind == LIFETIME {
624 n_tokens = 2;
625 }
626
627 let mut last = self.cursor;
628 for _ in 0..n_tokens {
629 if self.cursor.eof() {
630 break;
631 }
632 last = self.cursor;
633 let text: SmolStr = match self.cursor.token_tree() {
634 Some(tt::TokenTree::Leaf(leaf)) => {
635 // Mark the range if needed
636 let (text, id) = match leaf {
637 tt::Leaf::Ident(ident) => (ident.text.clone(), ident.id),
638 tt::Leaf::Punct(punct) => {
639 (SmolStr::new_inline_from_ascii(1, &[punct.char as u8]), punct.id)
640 }
641 tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id),
642 };
643 let range = TextRange::at(self.text_pos, TextSize::of(text.as_str()));
644 self.token_map.insert(id, range);
645 self.cursor = self.cursor.bump();
646 text
647 }
648 Some(tt::TokenTree::Subtree(subtree)) => {
649 self.cursor = self.cursor.subtree().unwrap();
650 if let Some(id) = subtree.delimiter.map(|it| it.id) {
651 self.open_delims.insert(id, self.text_pos);
652 }
653 delim_to_str(subtree.delimiter_kind(), false)
654 }
655 None => {
656 if let Some(parent) = self.cursor.end() {
657 self.cursor = self.cursor.bump();
658 if let Some(id) = parent.delimiter.map(|it| it.id) {
659 if let Some(open_delim) = self.open_delims.get(&id) {
660 let open_range = TextRange::at(*open_delim, TextSize::of('('));
661 let close_range = TextRange::at(self.text_pos, TextSize::of('('));
662 self.token_map.insert_delim(id, open_range, close_range);
663 }
664 }
665 delim_to_str(parent.delimiter_kind(), true)
666 } else {
667 continue;
668 }
669 }
670 };
671 self.buf += &text;
672 self.text_pos += TextSize::of(text.as_str());
673 }
674
675 let text = SmolStr::new(self.buf.as_str());
676 self.buf.clear();
677 self.inner.token(kind, text);
678
679 // Add whitespace between adjoint puncts
680 let next = last.bump();
681 if let (
682 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))),
683 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))),
684 ) = (last.token_tree(), next.token_tree())
685 {
686 // Note: We always assume the semi-colon would be the last token in
687 // other parts of RA such that we don't add whitespace here.
688 if curr.spacing == tt::Spacing::Alone && curr.char != ';' {
689 self.inner.token(WHITESPACE, " ".into());
690 self.text_pos += TextSize::of(' ');
691 }
692 }
693 }
694
695 fn start_node(&mut self, kind: SyntaxKind) {
696 self.inner.start_node(kind);
697
698 match self.roots.last_mut() {
699 None | Some(0) => self.roots.push(1),
700 Some(ref mut n) => **n += 1,
701 };
702 }
703
704 fn finish_node(&mut self) {
705 self.inner.finish_node();
706 *self.roots.last_mut().unwrap() -= 1;
707 }
708
709 fn error(&mut self, error: ParseError) {
710 self.inner.error(error, self.text_pos)
711 }
712}
713
714#[cfg(test)]
715mod tests {
716 use super::*;
717 use crate::tests::parse_macro;
718 use ra_parser::TokenSource;
719 use ra_syntax::{
720 algo::{insert_children, InsertPosition},
721 ast::AstNode,
722 };
723
724 #[test]
725 fn convert_tt_token_source() {
726 let expansion = parse_macro(
727 r#"
728 macro_rules! literals {
729 ($i:ident) => {
730 {
731 let a = 'c';
732 let c = 1000;
733 let f = 12E+99_f64;
734 let s = "rust1";
735 }
736 }
737 }
738 "#,
739 )
740 .expand_tt("literals!(foo);");
741 let tts = &[expansion.into()];
742 let buffer = tt::buffer::TokenBuffer::new(tts);
743 let mut tt_src = SubtreeTokenSource::new(&buffer);
744 let mut tokens = vec![];
745 while tt_src.current().kind != EOF {
746 tokens.push((tt_src.current().kind, tt_src.text()));
747 tt_src.bump();
748 }
749
750 // [${]
751 // [let] [a] [=] ['c'] [;]
752 assert_eq!(tokens[2 + 3].1, "'c'");
753 assert_eq!(tokens[2 + 3].0, CHAR);
754 // [let] [c] [=] [1000] [;]
755 assert_eq!(tokens[2 + 5 + 3].1, "1000");
756 assert_eq!(tokens[2 + 5 + 3].0, INT_NUMBER);
757 // [let] [f] [=] [12E+99_f64] [;]
758 assert_eq!(tokens[2 + 10 + 3].1, "12E+99_f64");
759 assert_eq!(tokens[2 + 10 + 3].0, FLOAT_NUMBER);
760
761 // [let] [s] [=] ["rust1"] [;]
762 assert_eq!(tokens[2 + 15 + 3].1, "\"rust1\"");
763 assert_eq!(tokens[2 + 15 + 3].0, STRING);
764 }
765
766 #[test]
767 fn stmts_token_trees_to_expr_is_err() {
768 let expansion = parse_macro(
769 r#"
770 macro_rules! stmts {
771 () => {
772 let a = 0;
773 let b = 0;
774 let c = 0;
775 let d = 0;
776 }
777 }
778 "#,
779 )
780 .expand_tt("stmts!();");
781 assert!(token_tree_to_syntax_node(&expansion, FragmentKind::Expr).is_err());
782 }
783
784 #[test]
785 fn test_token_tree_last_child_is_white_space() {
786 let source_file = ast::SourceFile::parse("f!({} );").ok().unwrap();
787 let macro_call = source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
788 let token_tree = macro_call.token_tree().unwrap();
789
790 // Token Tree now is :
791 // TokenTree
792 // - T!['(']
793 // - TokenTree
794 // - T!['{']
795 // - T!['}']
796 // - WHITE_SPACE
797 // - T![')']
798
799 let rbrace =
800 token_tree.syntax().descendants_with_tokens().find(|it| it.kind() == T!['}']).unwrap();
801 let space = token_tree
802 .syntax()
803 .descendants_with_tokens()
804 .find(|it| it.kind() == SyntaxKind::WHITESPACE)
805 .unwrap();
806
807 // reorder th white space, such that the white is inside the inner token-tree.
808 let token_tree = insert_children(
809 &rbrace.parent().unwrap(),
810 InsertPosition::Last,
811 std::iter::once(space),
812 );
813
814 // Token Tree now is :
815 // TokenTree
816 // - T!['{']
817 // - T!['}']
818 // - WHITE_SPACE
819 let token_tree = ast::TokenTree::cast(token_tree).unwrap();
820 let tt = ast_to_token_tree(&token_tree).unwrap().0;
821
822 assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace));
823 }
824
825 #[test]
826 fn test_token_tree_multi_char_punct() {
827 let source_file = ast::SourceFile::parse("struct Foo { a: x::Y }").ok().unwrap();
828 let struct_def = source_file.syntax().descendants().find_map(ast::Struct::cast).unwrap();
829 let tt = ast_to_token_tree(&struct_def).unwrap().0;
830 token_tree_to_syntax_node(&tt, FragmentKind::Item).unwrap();
831 }
832}
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
deleted file mode 100644
index 286983d60..000000000
--- a/crates/ra_mbe/src/tests.rs
+++ /dev/null
@@ -1,1897 +0,0 @@
1use std::fmt::Write;
2
3use ra_parser::FragmentKind;
4use ra_syntax::{ast, AstNode, NodeOrToken, SyntaxKind::IDENT, SyntaxNode, WalkEvent, T};
5use test_utils::assert_eq_text;
6
7use super::*;
8
9mod rule_parsing {
10 use ra_syntax::{ast, AstNode};
11
12 use super::*;
13 use crate::ast_to_token_tree;
14
15 #[test]
16 fn test_valid_arms() {
17 fn check(macro_body: &str) {
18 let m = parse_macro_arm(macro_body);
19 m.unwrap();
20 }
21
22 check("($i:ident) => ()");
23 check("($($i:ident)*) => ($_)");
24 check("($($true:ident)*) => ($true)");
25 check("($($false:ident)*) => ($false)");
26 check("($) => ($)");
27 }
28
29 #[test]
30 fn test_invalid_arms() {
31 fn check(macro_body: &str, err: &str) {
32 let m = parse_macro_arm(macro_body);
33 assert_eq!(m, Err(ParseError::Expected(String::from(err))));
34 }
35
36 check("invalid", "expected subtree");
37
38 check("$i:ident => ()", "expected subtree");
39 check("($i:ident) ()", "expected `=`");
40 check("($($i:ident)_) => ()", "invalid repeat");
41
42 check("($i) => ($i)", "invalid macro definition");
43 check("($i:) => ($i)", "invalid macro definition");
44 }
45
46 fn parse_macro_arm(arm_definition: &str) -> Result<crate::MacroRules, ParseError> {
47 let macro_definition = format!(" macro_rules! m {{ {} }} ", arm_definition);
48 let source_file = ast::SourceFile::parse(&macro_definition).ok().unwrap();
49 let macro_definition =
50 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
51
52 let (definition_tt, _) =
53 ast_to_token_tree(&macro_definition.token_tree().unwrap()).unwrap();
54 crate::MacroRules::parse(&definition_tt)
55 }
56}
57
58// Good first issue (although a slightly challenging one):
59//
60// * Pick a random test from here
61// https://github.com/intellij-rust/intellij-rust/blob/c4e9feee4ad46e7953b1948c112533360b6087bb/src/test/kotlin/org/rust/lang/core/macros/RsMacroExpansionTest.kt
62// * Port the test to rust and add it to this module
63// * Make it pass :-)
64
65#[test]
66fn test_token_id_shift() {
67 let expansion = parse_macro(
68 r#"
69macro_rules! foobar {
70 ($e:ident) => { foo bar $e }
71}
72"#,
73 )
74 .expand_tt("foobar!(baz);");
75
76 fn get_id(t: &tt::TokenTree) -> Option<u32> {
77 if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = t {
78 return Some(ident.id.0);
79 }
80 None
81 }
82
83 assert_eq!(expansion.token_trees.len(), 3);
84 // {($e:ident) => { foo bar $e }}
85 // 012345 67 8 9 T 12
86 assert_eq!(get_id(&expansion.token_trees[0]), Some(9));
87 assert_eq!(get_id(&expansion.token_trees[1]), Some(10));
88
89 // The input args of macro call include parentheses:
90 // (baz)
91 // So baz should be 12+1+1
92 assert_eq!(get_id(&expansion.token_trees[2]), Some(14));
93}
94
95#[test]
96fn test_token_map() {
97 let expanded = parse_macro(
98 r#"
99macro_rules! foobar {
100 ($e:ident) => { fn $e() {} }
101}
102"#,
103 )
104 .expand_tt("foobar!(baz);");
105
106 let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap();
107 let content = node.syntax_node().to_string();
108
109 let get_text = |id, kind| -> String {
110 content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string()
111 };
112
113 assert_eq!(expanded.token_trees.len(), 4);
114 // {($e:ident) => { fn $e() {} }}
115 // 012345 67 8 9 T12 3
116
117 assert_eq!(get_text(tt::TokenId(9), IDENT), "fn");
118 assert_eq!(get_text(tt::TokenId(12), T!['(']), "(");
119 assert_eq!(get_text(tt::TokenId(13), T!['{']), "{");
120}
121
122#[test]
123fn test_convert_tt() {
124 parse_macro(r#"
125macro_rules! impl_froms {
126 ($e:ident: $($v:ident),*) => {
127 $(
128 impl From<$v> for $e {
129 fn from(it: $v) -> $e {
130 $e::$v(it)
131 }
132 }
133 )*
134 }
135}
136"#)
137 .assert_expand_tt(
138 "impl_froms!(TokenTree: Leaf, Subtree);",
139 "impl From <Leaf > for TokenTree {fn from (it : Leaf) -> TokenTree {TokenTree ::Leaf (it)}} \
140 impl From <Subtree > for TokenTree {fn from (it : Subtree) -> TokenTree {TokenTree ::Subtree (it)}}"
141 );
142}
143
144#[test]
145fn test_convert_tt2() {
146 parse_macro(
147 r#"
148macro_rules! impl_froms {
149 ($e:ident: $($v:ident),*) => {
150 $(
151 impl From<$v> for $e {
152 fn from(it: $v) -> $e {
153 $e::$v(it)
154 }
155 }
156 )*
157 }
158}
159"#,
160 )
161 .assert_expand(
162 "impl_froms!(TokenTree: Leaf, Subtree);",
163 r#"
164SUBTREE $
165 IDENT impl 20
166 IDENT From 21
167 PUNCH < [joint] 22
168 IDENT Leaf 53
169 PUNCH > [alone] 25
170 IDENT for 26
171 IDENT TokenTree 51
172 SUBTREE {} 29
173 IDENT fn 30
174 IDENT from 31
175 SUBTREE () 32
176 IDENT it 33
177 PUNCH : [alone] 34
178 IDENT Leaf 53
179 PUNCH - [joint] 37
180 PUNCH > [alone] 38
181 IDENT TokenTree 51
182 SUBTREE {} 41
183 IDENT TokenTree 51
184 PUNCH : [joint] 44
185 PUNCH : [joint] 45
186 IDENT Leaf 53
187 SUBTREE () 48
188 IDENT it 49
189 IDENT impl 20
190 IDENT From 21
191 PUNCH < [joint] 22
192 IDENT Subtree 55
193 PUNCH > [alone] 25
194 IDENT for 26
195 IDENT TokenTree 51
196 SUBTREE {} 29
197 IDENT fn 30
198 IDENT from 31
199 SUBTREE () 32
200 IDENT it 33
201 PUNCH : [alone] 34
202 IDENT Subtree 55
203 PUNCH - [joint] 37
204 PUNCH > [alone] 38
205 IDENT TokenTree 51
206 SUBTREE {} 41
207 IDENT TokenTree 51
208 PUNCH : [joint] 44
209 PUNCH : [joint] 45
210 IDENT Subtree 55
211 SUBTREE () 48
212 IDENT it 49
213"#,
214 );
215}
216
217#[test]
218fn test_lifetime_split() {
219 parse_macro(
220 r#"
221macro_rules! foo {
222 ($($t:tt)*) => { $($t)*}
223}
224"#,
225 )
226 .assert_expand(
227 r#"foo!(static bar: &'static str = "hello";);"#,
228 r#"
229SUBTREE $
230 IDENT static 17
231 IDENT bar 18
232 PUNCH : [alone] 19
233 PUNCH & [alone] 20
234 PUNCH ' [joint] 21
235 IDENT static 22
236 IDENT str 23
237 PUNCH = [alone] 24
238 LITERAL "hello" 25
239 PUNCH ; [joint] 26
240"#,
241 );
242}
243
244#[test]
245fn test_expr_order() {
246 let expanded = parse_macro(
247 r#"
248 macro_rules! foo {
249 ($ i:expr) => {
250 fn bar() { $ i * 2; }
251 }
252 }
253"#,
254 )
255 .expand_items("foo! { 1 + 1}");
256
257 let dump = format!("{:#?}", expanded);
258 assert_eq_text!(
259 dump.trim(),
260 r#"[email protected]
261 [email protected]
262 [email protected] "fn"
263 [email protected]
264 [email protected] "bar"
265 [email protected]
266 [email protected] "("
267 [email protected] ")"
268 [email protected]
269 [email protected] "{"
270 [email protected]
271 [email protected]
272 [email protected]
273 [email protected]
274 [email protected] "1"
275 [email protected] "+"
276 [email protected]
277 [email protected] "1"
278 [email protected] "*"
279 [email protected]
280 [email protected] "2"
281 [email protected] ";"
282 [email protected] "}""#,
283 );
284}
285
286#[test]
287fn test_fail_match_pattern_by_first_token() {
288 parse_macro(
289 r#"
290 macro_rules! foo {
291 ($ i:ident) => (
292 mod $ i {}
293 );
294 (= $ i:ident) => (
295 fn $ i() {}
296 );
297 (+ $ i:ident) => (
298 struct $ i;
299 )
300 }
301"#,
302 )
303 .assert_expand_items("foo! { foo }", "mod foo {}")
304 .assert_expand_items("foo! { = bar }", "fn bar () {}")
305 .assert_expand_items("foo! { + Baz }", "struct Baz ;");
306}
307
308#[test]
309fn test_fail_match_pattern_by_last_token() {
310 parse_macro(
311 r#"
312 macro_rules! foo {
313 ($ i:ident) => (
314 mod $ i {}
315 );
316 ($ i:ident =) => (
317 fn $ i() {}
318 );
319 ($ i:ident +) => (
320 struct $ i;
321 )
322 }
323"#,
324 )
325 .assert_expand_items("foo! { foo }", "mod foo {}")
326 .assert_expand_items("foo! { bar = }", "fn bar () {}")
327 .assert_expand_items("foo! { Baz + }", "struct Baz ;");
328}
329
330#[test]
331fn test_fail_match_pattern_by_word_token() {
332 parse_macro(
333 r#"
334 macro_rules! foo {
335 ($ i:ident) => (
336 mod $ i {}
337 );
338 (spam $ i:ident) => (
339 fn $ i() {}
340 );
341 (eggs $ i:ident) => (
342 struct $ i;
343 )
344 }
345"#,
346 )
347 .assert_expand_items("foo! { foo }", "mod foo {}")
348 .assert_expand_items("foo! { spam bar }", "fn bar () {}")
349 .assert_expand_items("foo! { eggs Baz }", "struct Baz ;");
350}
351
352#[test]
353fn test_match_group_pattern_by_separator_token() {
354 parse_macro(
355 r#"
356 macro_rules! foo {
357 ($ ($ i:ident),*) => ($ (
358 mod $ i {}
359 )*);
360 ($ ($ i:ident)#*) => ($ (
361 fn $ i() {}
362 )*);
363 ($ i:ident ,# $ j:ident) => (
364 struct $ i;
365 struct $ j;
366 )
367 }
368"#,
369 )
370 .assert_expand_items("foo! { foo, bar }", "mod foo {} mod bar {}")
371 .assert_expand_items("foo! { foo# bar }", "fn foo () {} fn bar () {}")
372 .assert_expand_items("foo! { Foo,# Bar }", "struct Foo ; struct Bar ;");
373}
374
375#[test]
376fn test_match_group_pattern_with_multiple_defs() {
377 parse_macro(
378 r#"
379 macro_rules! foo {
380 ($ ($ i:ident),*) => ( struct Bar { $ (
381 fn $ i {}
382 )*} );
383 }
384"#,
385 )
386 .assert_expand_items("foo! { foo, bar }", "struct Bar {fn foo {} fn bar {}}");
387}
388
389#[test]
390fn test_match_group_pattern_with_multiple_statement() {
391 parse_macro(
392 r#"
393 macro_rules! foo {
394 ($ ($ i:ident),*) => ( fn baz { $ (
395 $ i ();
396 )*} );
397 }
398"#,
399 )
400 .assert_expand_items("foo! { foo, bar }", "fn baz {foo () ; bar () ;}");
401}
402
403#[test]
404fn test_match_group_pattern_with_multiple_statement_without_semi() {
405 parse_macro(
406 r#"
407 macro_rules! foo {
408 ($ ($ i:ident),*) => ( fn baz { $ (
409 $i()
410 );*} );
411 }
412"#,
413 )
414 .assert_expand_items("foo! { foo, bar }", "fn baz {foo () ;bar ()}");
415}
416
417#[test]
418fn test_match_group_empty_fixed_token() {
419 parse_macro(
420 r#"
421 macro_rules! foo {
422 ($ ($ i:ident)* #abc) => ( fn baz { $ (
423 $ i ();
424 )*} );
425 }
426"#,
427 )
428 .assert_expand_items("foo! {#abc}", "fn baz {}");
429}
430
431#[test]
432fn test_match_group_in_subtree() {
433 parse_macro(
434 r#"
435 macro_rules! foo {
436 (fn $name:ident {$($i:ident)*} ) => ( fn $name() { $ (
437 $ i ();
438 )*} );
439 }"#,
440 )
441 .assert_expand_items("foo! {fn baz {a b} }", "fn baz () {a () ; b () ;}");
442}
443
444#[test]
445fn test_match_group_with_multichar_sep() {
446 parse_macro(
447 r#"
448 macro_rules! foo {
449 (fn $name:ident {$($i:literal)*} ) => ( fn $name() -> bool { $($i)&&*} );
450 }"#,
451 )
452 .assert_expand_items("foo! (fn baz {true true} );", "fn baz () -> bool {true &&true}");
453}
454
455#[test]
456fn test_match_group_zero_match() {
457 parse_macro(
458 r#"
459 macro_rules! foo {
460 ( $($i:ident)* ) => ();
461 }"#,
462 )
463 .assert_expand_items("foo! ();", "");
464}
465
466#[test]
467fn test_match_group_in_group() {
468 parse_macro(
469 r#"
470 macro_rules! foo {
471 { $( ( $($i:ident)* ) )* } => ( $( ( $($i)* ) )* );
472 }"#,
473 )
474 .assert_expand_items("foo! ( (a b) );", "(a b)");
475}
476
477#[test]
478fn test_expand_to_item_list() {
479 let tree = parse_macro(
480 "
481 macro_rules! structs {
482 ($($i:ident),*) => {
483 $(struct $i { field: u32 } )*
484 }
485 }
486 ",
487 )
488 .expand_items("structs!(Foo, Bar);");
489 assert_eq!(
490 format!("{:#?}", tree).trim(),
491 r#"
492[email protected]
493 [email protected]
494 [email protected] "struct"
495 [email protected]
496 [email protected] "Foo"
497 [email protected]
498 [email protected] "{"
499 [email protected]
500 [email protected]
501 [email protected] "field"
502 [email protected] ":"
503 [email protected]
504 [email protected]
505 [email protected]
506 [email protected]
507 [email protected] "u32"
508 [email protected] "}"
509 [email protected]
510 [email protected] "struct"
511 [email protected]
512 [email protected] "Bar"
513 [email protected]
514 [email protected] "{"
515 [email protected]
516 [email protected]
517 [email protected] "field"
518 [email protected] ":"
519 [email protected]
520 [email protected]
521 [email protected]
522 [email protected]
523 [email protected] "u32"
524 [email protected] "}""#
525 .trim()
526 );
527}
528
529fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree {
530 if let tt::TokenTree::Subtree(subtree) = tt {
531 return &subtree;
532 }
533 unreachable!("It is not a subtree");
534}
535fn to_literal(tt: &tt::TokenTree) -> &tt::Literal {
536 if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = tt {
537 return lit;
538 }
539 unreachable!("It is not a literal");
540}
541
542fn to_punct(tt: &tt::TokenTree) -> &tt::Punct {
543 if let tt::TokenTree::Leaf(tt::Leaf::Punct(lit)) = tt {
544 return lit;
545 }
546 unreachable!("It is not a Punct");
547}
548
549#[test]
550fn test_expand_literals_to_token_tree() {
551 let expansion = parse_macro(
552 r#"
553 macro_rules! literals {
554 ($i:ident) => {
555 {
556 let a = 'c';
557 let c = 1000;
558 let f = 12E+99_f64;
559 let s = "rust1";
560 }
561 }
562 }
563 "#,
564 )
565 .expand_tt("literals!(foo);");
566 let stm_tokens = &to_subtree(&expansion.token_trees[0]).token_trees;
567
568 // [let] [a] [=] ['c'] [;]
569 assert_eq!(to_literal(&stm_tokens[3]).text, "'c'");
570 // [let] [c] [=] [1000] [;]
571 assert_eq!(to_literal(&stm_tokens[5 + 3]).text, "1000");
572 // [let] [f] [=] [12E+99_f64] [;]
573 assert_eq!(to_literal(&stm_tokens[10 + 3]).text, "12E+99_f64");
574 // [let] [s] [=] ["rust1"] [;]
575 assert_eq!(to_literal(&stm_tokens[15 + 3]).text, "\"rust1\"");
576}
577
578#[test]
579fn test_attr_to_token_tree() {
580 let expansion = parse_to_token_tree_by_syntax(
581 r#"
582 #[derive(Copy)]
583 struct Foo;
584 "#,
585 );
586
587 assert_eq!(to_punct(&expansion.token_trees[0]).char, '#');
588 assert_eq!(
589 to_subtree(&expansion.token_trees[1]).delimiter_kind(),
590 Some(tt::DelimiterKind::Bracket)
591 );
592}
593
594#[test]
595fn test_two_idents() {
596 parse_macro(
597 r#"
598 macro_rules! foo {
599 ($ i:ident, $ j:ident) => {
600 fn foo() { let a = $ i; let b = $j; }
601 }
602 }
603"#,
604 )
605 .assert_expand_items("foo! { foo, bar }", "fn foo () {let a = foo ; let b = bar ;}");
606}
607
608#[test]
609fn test_tt_to_stmts() {
610 let stmts = parse_macro(
611 r#"
612 macro_rules! foo {
613 () => {
614 let a = 0;
615 a = 10 + 1;
616 a
617 }
618 }
619"#,
620 )
621 .expand_statements("foo!{}");
622
623 assert_eq!(
624 format!("{:#?}", stmts).trim(),
625 r#"[email protected]
626 [email protected]
627 [email protected] "let"
628 [email protected]
629 [email protected]
630 [email protected] "a"
631 [email protected] "="
632 [email protected]
633 [email protected] "0"
634 [email protected] ";"
635 [email protected]
636 [email protected]
637 [email protected]
638 [email protected]
639 [email protected]
640 [email protected]
641 [email protected] "a"
642 [email protected] "="
643 [email protected]
644 [email protected]
645 [email protected] "10"
646 [email protected] "+"
647 [email protected]
648 [email protected] "1"
649 [email protected] ";"
650 [email protected]
651 [email protected]
652 [email protected]
653 [email protected]
654 [email protected]
655 [email protected] "a""#,
656 );
657}
658
659#[test]
660fn test_match_literal() {
661 parse_macro(
662 r#"
663 macro_rules! foo {
664 ('(') => {
665 fn foo() {}
666 }
667 }
668"#,
669 )
670 .assert_expand_items("foo! ['('];", "fn foo () {}");
671}
672
673// The following tests are port from intellij-rust directly
674// https://github.com/intellij-rust/intellij-rust/blob/c4e9feee4ad46e7953b1948c112533360b6087bb/src/test/kotlin/org/rust/lang/core/macros/RsMacroExpansionTest.kt
675
676#[test]
677fn test_path() {
678 parse_macro(
679 r#"
680 macro_rules! foo {
681 ($ i:path) => {
682 fn foo() { let a = $ i; }
683 }
684 }
685"#,
686 )
687 .assert_expand_items("foo! { foo }", "fn foo () {let a = foo ;}")
688 .assert_expand_items(
689 "foo! { bar::<u8>::baz::<u8> }",
690 "fn foo () {let a = bar ::< u8 >:: baz ::< u8 > ;}",
691 );
692}
693
694#[test]
695fn test_two_paths() {
696 parse_macro(
697 r#"
698 macro_rules! foo {
699 ($ i:path, $ j:path) => {
700 fn foo() { let a = $ i; let b = $j; }
701 }
702 }
703"#,
704 )
705 .assert_expand_items("foo! { foo, bar }", "fn foo () {let a = foo ; let b = bar ;}");
706}
707
708#[test]
709fn test_path_with_path() {
710 parse_macro(
711 r#"
712 macro_rules! foo {
713 ($ i:path) => {
714 fn foo() { let a = $ i :: bar; }
715 }
716 }
717"#,
718 )
719 .assert_expand_items("foo! { foo }", "fn foo () {let a = foo :: bar ;}");
720}
721
722#[test]
723fn test_expr() {
724 parse_macro(
725 r#"
726 macro_rules! foo {
727 ($ i:expr) => {
728 fn bar() { $ i; }
729 }
730 }
731"#,
732 )
733 .assert_expand_items(
734 "foo! { 2 + 2 * baz(3).quux() }",
735 "fn bar () {2 + 2 * baz (3) . quux () ;}",
736 );
737}
738
739#[test]
740fn test_last_expr() {
741 parse_macro(
742 r#"
743 macro_rules! vec {
744 ($($item:expr),*) => {
745 {
746 let mut v = Vec::new();
747 $(
748 v.push($item);
749 )*
750 v
751 }
752 };
753 }
754"#,
755 )
756 .assert_expand_items(
757 "vec!(1,2,3);",
758 "{let mut v = Vec :: new () ; v . push (1) ; v . push (2) ; v . push (3) ; v}",
759 );
760}
761
762#[test]
763fn test_ty() {
764 parse_macro(
765 r#"
766 macro_rules! foo {
767 ($ i:ty) => (
768 fn bar() -> $ i { unimplemented!() }
769 )
770 }
771"#,
772 )
773 .assert_expand_items("foo! { Baz<u8> }", "fn bar () -> Baz < u8 > {unimplemented ! ()}");
774}
775
776#[test]
777fn test_ty_with_complex_type() {
778 parse_macro(
779 r#"
780 macro_rules! foo {
781 ($ i:ty) => (
782 fn bar() -> $ i { unimplemented!() }
783 )
784 }
785"#,
786 )
787 // Reference lifetime struct with generic type
788 .assert_expand_items(
789 "foo! { &'a Baz<u8> }",
790 "fn bar () -> & 'a Baz < u8 > {unimplemented ! ()}",
791 )
792 // extern "Rust" func type
793 .assert_expand_items(
794 r#"foo! { extern "Rust" fn() -> Ret }"#,
795 r#"fn bar () -> extern "Rust" fn () -> Ret {unimplemented ! ()}"#,
796 );
797}
798
799#[test]
800fn test_pat_() {
801 parse_macro(
802 r#"
803 macro_rules! foo {
804 ($ i:pat) => { fn foo() { let $ i; } }
805 }
806"#,
807 )
808 .assert_expand_items("foo! { (a, b) }", "fn foo () {let (a , b) ;}");
809}
810
811#[test]
812fn test_stmt() {
813 parse_macro(
814 r#"
815 macro_rules! foo {
816 ($ i:stmt) => (
817 fn bar() { $ i; }
818 )
819 }
820"#,
821 )
822 .assert_expand_items("foo! { 2 }", "fn bar () {2 ;}")
823 .assert_expand_items("foo! { let a = 0 }", "fn bar () {let a = 0 ;}");
824}
825
826#[test]
827fn test_single_item() {
828 parse_macro(
829 r#"
830 macro_rules! foo {
831 ($ i:item) => (
832 $ i
833 )
834 }
835"#,
836 )
837 .assert_expand_items("foo! {mod c {}}", "mod c {}");
838}
839
840#[test]
841fn test_all_items() {
842 parse_macro(
843 r#"
844 macro_rules! foo {
845 ($ ($ i:item)*) => ($ (
846 $ i
847 )*)
848 }
849"#,
850 ).
851 assert_expand_items(
852 r#"
853 foo! {
854 extern crate a;
855 mod b;
856 mod c {}
857 use d;
858 const E: i32 = 0;
859 static F: i32 = 0;
860 impl G {}
861 struct H;
862 enum I { Foo }
863 trait J {}
864 fn h() {}
865 extern {}
866 type T = u8;
867 }
868"#,
869 r#"extern crate a ; mod b ; mod c {} use d ; const E : i32 = 0 ; static F : i32 = 0 ; impl G {} struct H ; enum I {Foo} trait J {} fn h () {} extern {} type T = u8 ;"#,
870 );
871}
872
873#[test]
874fn test_block() {
875 parse_macro(
876 r#"
877 macro_rules! foo {
878 ($ i:block) => { fn foo() $ i }
879 }
880"#,
881 )
882 .assert_expand_statements("foo! { { 1; } }", "fn foo () {1 ;}");
883}
884
885#[test]
886fn test_meta() {
887 parse_macro(
888 r#"
889 macro_rules! foo {
890 ($ i:meta) => (
891 #[$ i]
892 fn bar() {}
893 )
894 }
895"#,
896 )
897 .assert_expand_items(
898 r#"foo! { cfg(target_os = "windows") }"#,
899 r#"# [cfg (target_os = "windows")] fn bar () {}"#,
900 );
901}
902
903#[test]
904fn test_meta_doc_comments() {
905 parse_macro(
906 r#"
907 macro_rules! foo {
908 ($(#[$ i:meta])+) => (
909 $(#[$ i])+
910 fn bar() {}
911 )
912 }
913"#,
914 ).
915 assert_expand_items(
916 r#"foo! {
917 /// Single Line Doc 1
918 /**
919 MultiLines Doc
920 */
921 }"#,
922 "# [doc = \" Single Line Doc 1\"] # [doc = \"\\\\n MultiLines Doc\\\\n \"] fn bar () {}",
923 );
924}
925
926#[test]
927fn test_tt_block() {
928 parse_macro(
929 r#"
930 macro_rules! foo {
931 ($ i:tt) => { fn foo() $ i }
932 }
933 "#,
934 )
935 .assert_expand_items(r#"foo! { { 1; } }"#, r#"fn foo () {1 ;}"#);
936}
937
938#[test]
939fn test_tt_group() {
940 parse_macro(
941 r#"
942 macro_rules! foo {
943 ($($ i:tt)*) => { $($ i)* }
944 }
945 "#,
946 )
947 .assert_expand_items(r#"foo! { fn foo() {} }"#, r#"fn foo () {}"#);
948}
949
950#[test]
951fn test_tt_composite() {
952 parse_macro(
953 r#"
954 macro_rules! foo {
955 ($i:tt) => { 0 }
956 }
957 "#,
958 )
959 .assert_expand_items(r#"foo! { => }"#, r#"0"#);
960}
961
962#[test]
963fn test_tt_composite2() {
964 let node = parse_macro(
965 r#"
966 macro_rules! foo {
967 ($($tt:tt)*) => { abs!(=> $($tt)*) }
968 }
969 "#,
970 )
971 .expand_items(r#"foo!{#}"#);
972
973 let res = format!("{:#?}", &node);
974 assert_eq_text!(
975 res.trim(),
976 r###"[email protected]
977 [email protected]
978 [email protected]
979 [email protected]
980 [email protected]
981 [email protected] "abs"
982 [email protected] "!"
983 [email protected]
984 [email protected] "("
985 [email protected] "="
986 [email protected] ">"
987 [email protected] " "
988 [email protected] "#"
989 [email protected] ")""###
990 );
991}
992
993#[test]
994fn test_lifetime() {
995 parse_macro(
996 r#"
997 macro_rules! foo {
998 ($ lt:lifetime) => { struct Ref<$ lt>{ s: &$ lt str } }
999 }
1000"#,
1001 )
1002 .assert_expand_items(r#"foo!{'a}"#, r#"struct Ref <'a > {s : &'a str}"#);
1003}
1004
1005#[test]
1006fn test_literal() {
1007 parse_macro(
1008 r#"
1009 macro_rules! foo {
1010 ($ type:ty $ lit:literal) => { const VALUE: $ type = $ lit;};
1011 }
1012"#,
1013 )
1014 .assert_expand_items(r#"foo!(u8 0);"#, r#"const VALUE : u8 = 0 ;"#);
1015}
1016
1017#[test]
1018fn test_boolean_is_ident() {
1019 parse_macro(
1020 r#"
1021 macro_rules! foo {
1022 ($lit0:literal, $lit1:literal) => { const VALUE: (bool,bool) = ($lit0,$lit1); };
1023 }
1024"#,
1025 )
1026 .assert_expand(
1027 r#"foo!(true,false);"#,
1028 r#"
1029SUBTREE $
1030 IDENT const 14
1031 IDENT VALUE 15
1032 PUNCH : [alone] 16
1033 SUBTREE () 17
1034 IDENT bool 18
1035 PUNCH , [alone] 19
1036 IDENT bool 20
1037 PUNCH = [alone] 21
1038 SUBTREE () 22
1039 IDENT true 29
1040 PUNCH , [joint] 25
1041 IDENT false 31
1042 PUNCH ; [alone] 28
1043"#,
1044 );
1045}
1046
1047#[test]
1048fn test_vis() {
1049 parse_macro(
1050 r#"
1051 macro_rules! foo {
1052 ($ vis:vis $ name:ident) => { $ vis fn $ name() {}};
1053 }
1054"#,
1055 )
1056 .assert_expand_items(r#"foo!(pub foo);"#, r#"pub fn foo () {}"#)
1057 // test optional cases
1058 .assert_expand_items(r#"foo!(foo);"#, r#"fn foo () {}"#);
1059}
1060
1061#[test]
1062fn test_inner_macro_rules() {
1063 parse_macro(
1064 r#"
1065macro_rules! foo {
1066 ($a:ident, $b:ident, $c:tt) => {
1067
1068 macro_rules! bar {
1069 ($bi:ident) => {
1070 fn $bi() -> u8 {$c}
1071 }
1072 }
1073
1074 bar!($a);
1075 fn $b() -> u8 {$c}
1076 }
1077}
1078"#,
1079 ).
1080 assert_expand_items(
1081 r#"foo!(x,y, 1);"#,
1082 r#"macro_rules ! bar {($ bi : ident) => {fn $ bi () -> u8 {1}}} bar ! (x) ; fn y () -> u8 {1}"#,
1083 );
1084}
1085
1086// The following tests are based on real world situations
1087#[test]
1088fn test_vec() {
1089 let fixture = parse_macro(
1090 r#"
1091 macro_rules! vec {
1092 ($($item:expr),*) => {
1093 {
1094 let mut v = Vec::new();
1095 $(
1096 v.push($item);
1097 )*
1098 v
1099 }
1100 };
1101}
1102"#,
1103 );
1104 fixture
1105 .assert_expand_items(r#"vec!();"#, r#"{let mut v = Vec :: new () ; v}"#)
1106 .assert_expand_items(
1107 r#"vec![1u32,2];"#,
1108 r#"{let mut v = Vec :: new () ; v . push (1u32) ; v . push (2) ; v}"#,
1109 );
1110
1111 let tree = fixture.expand_expr(r#"vec![1u32,2];"#);
1112
1113 assert_eq!(
1114 format!("{:#?}", tree).trim(),
1115 r#"[email protected]
1116 [email protected] "{"
1117 [email protected]
1118 [email protected] "let"
1119 [email protected]
1120 [email protected] "mut"
1121 [email protected]
1122 [email protected] "v"
1123 [email protected] "="
1124 [email protected]
1125 [email protected]
1126 [email protected]
1127 [email protected]
1128 [email protected]
1129 [email protected]
1130 [email protected] "Vec"
1131 [email protected] "::"
1132 [email protected]
1133 [email protected]
1134 [email protected] "new"
1135 [email protected]
1136 [email protected] "("
1137 [email protected] ")"
1138 [email protected] ";"
1139 [email protected]
1140 [email protected]
1141 [email protected]
1142 [email protected]
1143 [email protected]
1144 [email protected]
1145 [email protected] "v"
1146 [email protected] "."
1147 [email protected]
1148 [email protected] "push"
1149 [email protected]
1150 [email protected] "("
1151 [email protected]
1152 [email protected] "1u32"
1153 [email protected] ")"
1154 [email protected] ";"
1155 [email protected]
1156 [email protected]
1157 [email protected]
1158 [email protected]
1159 [email protected]
1160 [email protected]
1161 [email protected] "v"
1162 [email protected] "."
1163 [email protected]
1164 [email protected] "push"
1165 [email protected]
1166 [email protected] "("
1167 [email protected]
1168 [email protected] "2"
1169 [email protected] ")"
1170 [email protected] ";"
1171 [email protected]
1172 [email protected]
1173 [email protected]
1174 [email protected]
1175 [email protected] "v"
1176 [email protected] "}""#
1177 );
1178}
1179
1180#[test]
1181fn test_winapi_struct() {
1182 // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/macros.rs#L366
1183
1184 parse_macro(
1185 r#"
1186macro_rules! STRUCT {
1187 ($(#[$attrs:meta])* struct $name:ident {
1188 $($field:ident: $ftype:ty,)+
1189 }) => (
1190 #[repr(C)] #[derive(Copy)] $(#[$attrs])*
1191 pub struct $name {
1192 $(pub $field: $ftype,)+
1193 }
1194 impl Clone for $name {
1195 #[inline]
1196 fn clone(&self) -> $name { *self }
1197 }
1198 #[cfg(feature = "impl-default")]
1199 impl Default for $name {
1200 #[inline]
1201 fn default() -> $name { unsafe { $crate::_core::mem::zeroed() } }
1202 }
1203 );
1204}
1205"#,
1206 ).
1207 // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/shared/d3d9caps.rs
1208 assert_expand_items(r#"STRUCT!{struct D3DVSHADERCAPS2_0 {Caps: u8,}}"#,
1209 "# [repr (C)] # [derive (Copy)] pub struct D3DVSHADERCAPS2_0 {pub Caps : u8 ,} impl Clone for D3DVSHADERCAPS2_0 {# [inline] fn clone (& self) -> D3DVSHADERCAPS2_0 {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DVSHADERCAPS2_0 {# [inline] fn default () -> D3DVSHADERCAPS2_0 {unsafe {$crate :: _core :: mem :: zeroed ()}}}"
1210 )
1211 .assert_expand_items(r#"STRUCT!{#[cfg_attr(target_arch = "x86", repr(packed))] struct D3DCONTENTPROTECTIONCAPS {Caps : u8 ,}}"#,
1212 "# [repr (C)] # [derive (Copy)] # [cfg_attr (target_arch = \"x86\" , repr (packed))] pub struct D3DCONTENTPROTECTIONCAPS {pub Caps : u8 ,} impl Clone for D3DCONTENTPROTECTIONCAPS {# [inline] fn clone (& self) -> D3DCONTENTPROTECTIONCAPS {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DCONTENTPROTECTIONCAPS {# [inline] fn default () -> D3DCONTENTPROTECTIONCAPS {unsafe {$crate :: _core :: mem :: zeroed ()}}}"
1213 );
1214}
1215
1216#[test]
1217fn test_int_base() {
1218 parse_macro(
1219 r#"
1220macro_rules! int_base {
1221 ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => {
1222 #[stable(feature = "rust1", since = "1.0.0")]
1223 impl fmt::$Trait for $T {
1224 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1225 $Radix.fmt_int(*self as $U, f)
1226 }
1227 }
1228 }
1229}
1230"#,
1231 ).assert_expand_items(r#" int_base!{Binary for isize as usize -> Binary}"#,
1232 "# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt ::Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}"
1233 );
1234}
1235
1236#[test]
1237fn test_generate_pattern_iterators() {
1238 // from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/str/mod.rs
1239 parse_macro(
1240 r#"
1241macro_rules! generate_pattern_iterators {
1242 { double ended; with $(#[$common_stability_attribute:meta])*,
1243 $forward_iterator:ident,
1244 $reverse_iterator:ident, $iterty:ty
1245 } => {
1246 fn foo(){}
1247 }
1248}
1249"#,
1250 ).assert_expand_items(
1251 r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#,
1252 "fn foo () {}",
1253 );
1254}
1255
1256#[test]
1257fn test_impl_fn_for_zst() {
1258 // from https://github.com/rust-lang/rust/blob/5d20ff4d2718c820632b38c1e49d4de648a9810b/src/libcore/internal_macros.rs
1259 parse_macro(
1260 r#"
1261macro_rules! impl_fn_for_zst {
1262 { $( $( #[$attr: meta] )*
1263 struct $Name: ident impl$( <$( $lifetime : lifetime ),+> )? Fn =
1264 |$( $arg: ident: $ArgTy: ty ),*| -> $ReturnTy: ty
1265$body: block; )+
1266 } => {
1267 $(
1268 $( #[$attr] )*
1269 struct $Name;
1270
1271 impl $( <$( $lifetime ),+> )? Fn<($( $ArgTy, )*)> for $Name {
1272 #[inline]
1273 extern "rust-call" fn call(&self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy {
1274 $body
1275 }
1276 }
1277
1278 impl $( <$( $lifetime ),+> )? FnMut<($( $ArgTy, )*)> for $Name {
1279 #[inline]
1280 extern "rust-call" fn call_mut(
1281 &mut self,
1282 ($( $arg, )*): ($( $ArgTy, )*)
1283 ) -> $ReturnTy {
1284 Fn::call(&*self, ($( $arg, )*))
1285 }
1286 }
1287
1288 impl $( <$( $lifetime ),+> )? FnOnce<($( $ArgTy, )*)> for $Name {
1289 type Output = $ReturnTy;
1290
1291 #[inline]
1292 extern "rust-call" fn call_once(self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy {
1293 Fn::call(&self, ($( $arg, )*))
1294 }
1295 }
1296 )+
1297}
1298 }
1299"#,
1300 ).assert_expand_items(r#"
1301impl_fn_for_zst ! {
1302 # [ derive ( Clone ) ]
1303 struct CharEscapeDebugContinue impl Fn = | c : char | -> char :: EscapeDebug {
1304 c . escape_debug_ext ( false )
1305 } ;
1306
1307 # [ derive ( Clone ) ]
1308 struct CharEscapeUnicode impl Fn = | c : char | -> char :: EscapeUnicode {
1309 c . escape_unicode ( )
1310 } ;
1311 # [ derive ( Clone ) ]
1312 struct CharEscapeDefault impl Fn = | c : char | -> char :: EscapeDefault {
1313 c . escape_default ( )
1314 } ;
1315 }
1316"#,
1317 "# [derive (Clone)] struct CharEscapeDebugContinue ; impl Fn < (char ,) > for CharEscapeDebugContinue {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeDebug {{c . escape_debug_ext (false)}}} impl FnMut < (char ,) > for CharEscapeDebugContinue {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeDebug {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeDebugContinue {type Output = char :: EscapeDebug ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeDebug {Fn :: call (& self , (c ,))}} # [derive (Clone)] struct CharEscapeUnicode ; impl Fn < (char ,) > for CharEscapeUnicode {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeUnicode {{c . escape_unicode ()}}} impl FnMut < (char ,) > for CharEscapeUnicode {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeUnicode {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeUnicode {type Output = char :: EscapeUnicode ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeUnicode {Fn :: call (& self , (c ,))}} # [derive (Clone)] struct CharEscapeDefault ; impl Fn < (char ,) > for CharEscapeDefault {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeDefault {{c . escape_default ()}}} impl FnMut < (char ,) > for CharEscapeDefault {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeDefault {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeDefault {type Output = char :: EscapeDefault ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeDefault {Fn :: call (& self , (c ,))}}"
1318 );
1319}
1320
1321#[test]
1322fn test_impl_nonzero_fmt() {
1323 // from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/num/mod.rs#L12
1324 parse_macro(
1325 r#"
1326 macro_rules! impl_nonzero_fmt {
1327 ( #[$stability: meta] ( $( $Trait: ident ),+ ) for $Ty: ident ) => {
1328 fn foo () {}
1329 }
1330 }
1331"#,
1332 ).assert_expand_items(
1333 r#"impl_nonzero_fmt! { # [stable(feature= "nonzero",since="1.28.0")] (Debug,Display,Binary,Octal,LowerHex,UpperHex) for NonZeroU8}"#,
1334 "fn foo () {}",
1335 );
1336}
1337
1338#[test]
1339fn test_cfg_if_items() {
1340 // from https://github.com/rust-lang/rust/blob/33fe1131cadba69d317156847be9a402b89f11bb/src/libstd/macros.rs#L986
1341 parse_macro(
1342 r#"
1343 macro_rules! __cfg_if_items {
1344 (($($not:meta,)*) ; ) => {};
1345 (($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => {
1346 __cfg_if_items! { ($($not,)* $($m,)*) ; $($rest)* }
1347 }
1348 }
1349"#,
1350 ).assert_expand_items(
1351 r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#,
1352 "__cfg_if_items ! {(rustdoc ,) ;}",
1353 );
1354}
1355
1356#[test]
1357fn test_cfg_if_main() {
1358 // from https://github.com/rust-lang/rust/blob/3d211248393686e0f73851fc7548f6605220fbe1/src/libpanic_unwind/macros.rs#L9
1359 parse_macro(
1360 r#"
1361 macro_rules! cfg_if {
1362 ($(
1363 if #[cfg($($meta:meta),*)] { $($it:item)* }
1364 ) else * else {
1365 $($it2:item)*
1366 }) => {
1367 __cfg_if_items! {
1368 () ;
1369 $( ( ($($meta),*) ($($it)*) ), )*
1370 ( () ($($it2)*) ),
1371 }
1372 };
1373
1374 // Internal macro to Apply a cfg attribute to a list of items
1375 (@__apply $m:meta, $($it:item)*) => {
1376 $(#[$m] $it)*
1377 };
1378 }
1379"#,
1380 ).assert_expand_items(r#"
1381cfg_if ! {
1382 if # [ cfg ( target_env = "msvc" ) ] {
1383 // no extra unwinder support needed
1384 } else if # [ cfg ( all ( target_arch = "wasm32" , not ( target_os = "emscripten" ) ) ) ] {
1385 // no unwinder on the system!
1386 } else {
1387 mod libunwind ;
1388 pub use libunwind :: * ;
1389 }
1390 }
1391"#,
1392 "__cfg_if_items ! {() ; ((target_env = \"msvc\") ()) , ((all (target_arch = \"wasm32\" , not (target_os = \"emscripten\"))) ()) , (() (mod libunwind ; pub use libunwind :: * ;)) ,}"
1393 ).assert_expand_items(
1394 r#"
1395cfg_if ! { @ __apply cfg ( all ( not ( any ( not ( any ( target_os = "solaris" , target_os = "illumos" ) ) ) ) ) ) , }
1396"#,
1397 "",
1398 );
1399}
1400
1401#[test]
1402fn test_proptest_arbitrary() {
1403 // from https://github.com/AltSysrq/proptest/blob/d1c4b049337d2f75dd6f49a095115f7c532e5129/proptest/src/arbitrary/macros.rs#L16
1404 parse_macro(
1405 r#"
1406macro_rules! arbitrary {
1407 ([$($bounds : tt)*] $typ: ty, $strat: ty, $params: ty;
1408 $args: ident => $logic: expr) => {
1409 impl<$($bounds)*> $crate::arbitrary::Arbitrary for $typ {
1410 type Parameters = $params;
1411 type Strategy = $strat;
1412 fn arbitrary_with($args: Self::Parameters) -> Self::Strategy {
1413 $logic
1414 }
1415 }
1416 };
1417
1418}"#,
1419 ).assert_expand_items(r#"arbitrary ! ( [ A : Arbitrary ]
1420 Vec < A > ,
1421 VecStrategy < A :: Strategy > ,
1422 RangedParams1 < A :: Parameters > ;
1423 args => { let product_unpack ! [ range , a ] = args ; vec ( any_with :: < A > ( a ) , range ) }
1424 ) ;"#,
1425 "impl <A : Arbitrary > $crate :: arbitrary :: Arbitrary for Vec < A > {type Parameters = RangedParams1 < A :: Parameters > ; type Strategy = VecStrategy < A :: Strategy > ; fn arbitrary_with (args : Self :: Parameters) -> Self :: Strategy {{let product_unpack ! [range , a] = args ; vec (any_with :: < A > (a) , range)}}}"
1426 );
1427}
1428
1429#[test]
1430fn test_old_ridl() {
1431 // This is from winapi 2.8, which do not have a link from github
1432 //
1433 let expanded = parse_macro(
1434 r#"
1435#[macro_export]
1436macro_rules! RIDL {
1437 (interface $interface:ident ($vtbl:ident) : $pinterface:ident ($pvtbl:ident)
1438 {$(
1439 fn $method:ident(&mut self $(,$p:ident : $t:ty)*) -> $rtr:ty
1440 ),+}
1441 ) => {
1442 impl $interface {
1443 $(pub unsafe fn $method(&mut self) -> $rtr {
1444 ((*self.lpVtbl).$method)(self $(,$p)*)
1445 })+
1446 }
1447 };
1448}"#,
1449 ).expand_tt(r#"
1450 RIDL!{interface ID3D11Asynchronous(ID3D11AsynchronousVtbl): ID3D11DeviceChild(ID3D11DeviceChildVtbl) {
1451 fn GetDataSize(&mut self) -> UINT
1452 }}"#);
1453
1454 assert_eq!(expanded.to_string(), "impl ID3D11Asynchronous {pub unsafe fn GetDataSize (& mut self) -> UINT {((* self . lpVtbl) .GetDataSize) (self)}}");
1455}
1456
1457#[test]
1458fn test_quick_error() {
1459 let expanded = parse_macro(
1460 r#"
1461macro_rules! quick_error {
1462
1463 (SORT [enum $name:ident $( #[$meta:meta] )*]
1464 items [$($( #[$imeta:meta] )*
1465 => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*]
1466 {$( $ifuncs:tt )*} )* ]
1467 buf [ ]
1468 queue [ ]
1469 ) => {
1470 quick_error!(ENUMINITION [enum $name $( #[$meta] )*]
1471 body []
1472 queue [$(
1473 $( #[$imeta] )*
1474 =>
1475 $iitem: $imode [$( $ivar: $ityp ),*]
1476 )*]
1477 );
1478};
1479
1480}
1481"#,
1482 )
1483 .expand_tt(
1484 r#"
1485quick_error ! (SORT [enum Wrapped # [derive (Debug)]] items [
1486 => One : UNIT [] {}
1487 => Two : TUPLE [s :String] {display ("two: {}" , s) from ()}
1488 ] buf [] queue []) ;
1489"#,
1490 );
1491
1492 assert_eq!(expanded.to_string(), "quick_error ! (ENUMINITION [enum Wrapped # [derive (Debug)]] body [] queue [=> One : UNIT [] => Two : TUPLE [s : String]]) ;");
1493}
1494
1495#[test]
1496fn test_empty_repeat_vars_in_empty_repeat_vars() {
1497 parse_macro(
1498 r#"
1499macro_rules! delegate_impl {
1500 ([$self_type:ident, $self_wrap:ty, $self_map:ident]
1501 pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* {
1502
1503 // "Escaped" associated types. Stripped before making the `trait`
1504 // itself, but forwarded when delegating impls.
1505 $(
1506 @escape [type $assoc_name_ext:ident]
1507 // Associated types. Forwarded.
1508 )*
1509 $(
1510 @section type
1511 $(
1512 $(#[$_assoc_attr:meta])*
1513 type $assoc_name:ident $(: $assoc_bound:ty)*;
1514 )+
1515 )*
1516 // Methods. Forwarded. Using $self_map!(self) around the self argument.
1517 // Methods must use receiver `self` or explicit type like `self: &Self`
1518 // &self and &mut self are _not_ supported.
1519 $(
1520 @section self
1521 $(
1522 $(#[$_method_attr:meta])*
1523 fn $method_name:ident(self $(: $self_selftype:ty)* $(,$marg:ident : $marg_ty:ty)*) -> $mret:ty;
1524 )+
1525 )*
1526 // Arbitrary tail that is ignored when forwarding.
1527 $(
1528 @section nodelegate
1529 $($tail:tt)*
1530 )*
1531 }) => {
1532 impl<> $name for $self_wrap where $self_type: $name {
1533 $(
1534 $(
1535 fn $method_name(self $(: $self_selftype)* $(,$marg: $marg_ty)*) -> $mret {
1536 $self_map!(self).$method_name($($marg),*)
1537 }
1538 )*
1539 )*
1540 }
1541 }
1542}
1543"#,
1544 ).assert_expand_items(
1545 r#"delegate_impl ! {[G , & 'a mut G , deref] pub trait Data : GraphBase {@ section type type NodeWeight ;}}"#,
1546 "impl <> Data for & \'a mut G where G : Data {}",
1547 );
1548}
1549
1550#[test]
1551fn expr_interpolation() {
1552 let expanded = parse_macro(
1553 r#"
1554 macro_rules! id {
1555 ($expr:expr) => {
1556 map($expr)
1557 }
1558 }
1559 "#,
1560 )
1561 .expand_expr("id!(x + foo);");
1562
1563 assert_eq!(expanded.to_string(), "map(x+foo)");
1564}
1565
1566pub(crate) struct MacroFixture {
1567 rules: MacroRules,
1568}
1569
1570impl MacroFixture {
1571 pub(crate) fn expand_tt(&self, invocation: &str) -> tt::Subtree {
1572 self.try_expand_tt(invocation).unwrap()
1573 }
1574
1575 fn try_expand_tt(&self, invocation: &str) -> Result<tt::Subtree, ExpandError> {
1576 let source_file = ast::SourceFile::parse(invocation).tree();
1577 let macro_invocation =
1578 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
1579
1580 let (invocation_tt, _) = ast_to_token_tree(&macro_invocation.token_tree().unwrap())
1581 .ok_or_else(|| ExpandError::ConversionError)?;
1582
1583 self.rules.expand(&invocation_tt).result()
1584 }
1585
1586 fn assert_expand_err(&self, invocation: &str, err: &ExpandError) {
1587 assert_eq!(self.try_expand_tt(invocation).as_ref(), Err(err));
1588 }
1589
1590 fn expand_items(&self, invocation: &str) -> SyntaxNode {
1591 let expanded = self.expand_tt(invocation);
1592 token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap().0.syntax_node()
1593 }
1594
1595 fn expand_statements(&self, invocation: &str) -> SyntaxNode {
1596 let expanded = self.expand_tt(invocation);
1597 token_tree_to_syntax_node(&expanded, FragmentKind::Statements).unwrap().0.syntax_node()
1598 }
1599
1600 fn expand_expr(&self, invocation: &str) -> SyntaxNode {
1601 let expanded = self.expand_tt(invocation);
1602 token_tree_to_syntax_node(&expanded, FragmentKind::Expr).unwrap().0.syntax_node()
1603 }
1604
1605 fn assert_expand_tt(&self, invocation: &str, expected: &str) {
1606 let expansion = self.expand_tt(invocation);
1607 assert_eq!(expansion.to_string(), expected);
1608 }
1609
1610 fn assert_expand(&self, invocation: &str, expected: &str) {
1611 let expansion = self.expand_tt(invocation);
1612 let actual = format!("{:?}", expansion);
1613 test_utils::assert_eq_text!(&actual.trim(), &expected.trim());
1614 }
1615
1616 fn assert_expand_items(&self, invocation: &str, expected: &str) -> &MacroFixture {
1617 self.assert_expansion(FragmentKind::Items, invocation, expected);
1618 self
1619 }
1620
1621 fn assert_expand_statements(&self, invocation: &str, expected: &str) -> &MacroFixture {
1622 self.assert_expansion(FragmentKind::Statements, invocation, expected);
1623 self
1624 }
1625
1626 fn assert_expansion(&self, kind: FragmentKind, invocation: &str, expected: &str) {
1627 let expanded = self.expand_tt(invocation);
1628 assert_eq!(expanded.to_string(), expected);
1629
1630 let expected = expected.replace("$crate", "C_C__C");
1631
1632 // wrap the given text to a macro call
1633 let expected = {
1634 let wrapped = format!("wrap_macro!( {} )", expected);
1635 let wrapped = ast::SourceFile::parse(&wrapped);
1636 let wrapped =
1637 wrapped.tree().syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
1638 let mut wrapped = ast_to_token_tree(&wrapped).unwrap().0;
1639 wrapped.delimiter = None;
1640 wrapped
1641 };
1642
1643 let expanded_tree = token_tree_to_syntax_node(&expanded, kind).unwrap().0.syntax_node();
1644 let expanded_tree = debug_dump_ignore_spaces(&expanded_tree).trim().to_string();
1645
1646 let expected_tree = token_tree_to_syntax_node(&expected, kind).unwrap().0.syntax_node();
1647 let expected_tree = debug_dump_ignore_spaces(&expected_tree).trim().to_string();
1648
1649 let expected_tree = expected_tree.replace("C_C__C", "$crate");
1650 assert_eq!(
1651 expanded_tree, expected_tree,
1652 "\nleft:\n{}\nright:\n{}",
1653 expanded_tree, expected_tree,
1654 );
1655 }
1656}
1657
1658fn parse_macro_to_tt(ra_fixture: &str) -> tt::Subtree {
1659 let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap();
1660 let macro_definition =
1661 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
1662
1663 let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap()).unwrap();
1664
1665 let parsed = parse_to_token_tree(
1666 &ra_fixture[macro_definition.token_tree().unwrap().syntax().text_range()],
1667 )
1668 .unwrap()
1669 .0;
1670 assert_eq!(definition_tt, parsed);
1671
1672 definition_tt
1673}
1674
1675pub(crate) fn parse_macro(ra_fixture: &str) -> MacroFixture {
1676 let definition_tt = parse_macro_to_tt(ra_fixture);
1677 let rules = MacroRules::parse(&definition_tt).unwrap();
1678 MacroFixture { rules }
1679}
1680
1681pub(crate) fn parse_macro_error(ra_fixture: &str) -> ParseError {
1682 let definition_tt = parse_macro_to_tt(ra_fixture);
1683
1684 match MacroRules::parse(&definition_tt) {
1685 Ok(_) => panic!("Expect error"),
1686 Err(err) => err,
1687 }
1688}
1689
1690pub(crate) fn parse_to_token_tree_by_syntax(ra_fixture: &str) -> tt::Subtree {
1691 let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap();
1692 let tt = syntax_node_to_token_tree(source_file.syntax()).unwrap().0;
1693
1694 let parsed = parse_to_token_tree(ra_fixture).unwrap().0;
1695 assert_eq!(tt, parsed);
1696
1697 parsed
1698}
1699
1700fn debug_dump_ignore_spaces(node: &ra_syntax::SyntaxNode) -> String {
1701 let mut level = 0;
1702 let mut buf = String::new();
1703 macro_rules! indent {
1704 () => {
1705 for _ in 0..level {
1706 buf.push_str(" ");
1707 }
1708 };
1709 }
1710
1711 for event in node.preorder_with_tokens() {
1712 match event {
1713 WalkEvent::Enter(element) => {
1714 match element {
1715 NodeOrToken::Node(node) => {
1716 indent!();
1717 writeln!(buf, "{:?}", node.kind()).unwrap();
1718 }
1719 NodeOrToken::Token(token) => match token.kind() {
1720 ra_syntax::SyntaxKind::WHITESPACE => {}
1721 _ => {
1722 indent!();
1723 writeln!(buf, "{:?}", token.kind()).unwrap();
1724 }
1725 },
1726 }
1727 level += 1;
1728 }
1729 WalkEvent::Leave(_) => level -= 1,
1730 }
1731 }
1732
1733 buf
1734}
1735
1736#[test]
1737fn test_issue_2520() {
1738 let macro_fixture = parse_macro(
1739 r#"
1740 macro_rules! my_macro {
1741 {
1742 ( $(
1743 $( [] $sname:ident : $stype:ty )?
1744 $( [$expr:expr] $nname:ident : $ntype:ty )?
1745 ),* )
1746 } => {
1747 Test {
1748 $(
1749 $( $sname, )?
1750 )*
1751 }
1752 };
1753 }
1754 "#,
1755 );
1756
1757 macro_fixture.assert_expand_items(
1758 r#"my_macro ! {
1759 ([] p1 : u32 , [|_| S0K0] s : S0K0 , [] k0 : i32)
1760 }"#,
1761 "Test {p1 , k0 ,}",
1762 );
1763}
1764
1765#[test]
1766fn test_issue_3861() {
1767 let macro_fixture = parse_macro(
1768 r#"
1769 macro_rules! rgb_color {
1770 ($p:expr, $t: ty) => {
1771 pub fn new() {
1772 let _ = 0 as $t << $p;
1773 }
1774 };
1775 }
1776 "#,
1777 );
1778
1779 macro_fixture.expand_items(r#"rgb_color!(8 + 8, u32);"#);
1780}
1781
1782#[test]
1783fn test_repeat_bad_var() {
1784 // FIXME: the second rule of the macro should be removed and an error about
1785 // `$( $c )+` raised
1786 parse_macro(
1787 r#"
1788 macro_rules! foo {
1789 ($( $b:ident )+) => {
1790 $( $c )+
1791 };
1792 ($( $b:ident )+) => {
1793 $( $b )+
1794 }
1795 }
1796 "#,
1797 )
1798 .assert_expand_items("foo!(b0 b1);", "b0 b1");
1799}
1800
1801#[test]
1802fn test_no_space_after_semi_colon() {
1803 let expanded = parse_macro(
1804 r#"
1805 macro_rules! with_std { ($($i:item)*) => ($(#[cfg(feature = "std")]$i)*) }
1806 "#,
1807 )
1808 .expand_items(r#"with_std! {mod m;mod f;}"#);
1809
1810 let dump = format!("{:#?}", expanded);
1811 assert_eq_text!(
1812 dump.trim(),
1813 r###"[email protected]
1814 [email protected]
1815 [email protected]
1816 [email protected] "#"
1817 [email protected] "["
1818 [email protected]
1819 [email protected]
1820 [email protected]
1821 [email protected] "cfg"
1822 [email protected]
1823 [email protected] "("
1824 [email protected] "feature"
1825 [email protected] "="
1826 [email protected] "\"std\""
1827 [email protected] ")"
1828 [email protected] "]"
1829 [email protected] "mod"
1830 [email protected]
1831 [email protected] "m"
1832 [email protected] ";"
1833 [email protected]
1834 [email protected]
1835 [email protected] "#"
1836 [email protected] "["
1837 [email protected]
1838 [email protected]
1839 [email protected]
1840 [email protected] "cfg"
1841 [email protected]
1842 [email protected] "("
1843 [email protected] "feature"
1844 [email protected] "="
1845 [email protected] "\"std\""
1846 [email protected] ")"
1847 [email protected] "]"
1848 [email protected] "mod"
1849 [email protected]
1850 [email protected] "f"
1851 [email protected] ";""###,
1852 );
1853}
1854
1855// https://github.com/rust-lang/rust/blob/master/src/test/ui/issues/issue-57597.rs
1856#[test]
1857fn test_rustc_issue_57597() {
1858 fn test_error(fixture: &str) {
1859 assert_eq!(parse_macro_error(fixture), ParseError::RepetitionEmtpyTokenTree);
1860 }
1861
1862 test_error("macro_rules! foo { ($($($i:ident)?)+) => {}; }");
1863 test_error("macro_rules! foo { ($($($i:ident)?)*) => {}; }");
1864 test_error("macro_rules! foo { ($($($i:ident)?)?) => {}; }");
1865 test_error("macro_rules! foo { ($($($($i:ident)?)?)?) => {}; }");
1866 test_error("macro_rules! foo { ($($($($i:ident)*)?)?) => {}; }");
1867 test_error("macro_rules! foo { ($($($($i:ident)?)*)?) => {}; }");
1868 test_error("macro_rules! foo { ($($($($i:ident)?)?)*) => {}; }");
1869 test_error("macro_rules! foo { ($($($($i:ident)*)*)?) => {}; }");
1870 test_error("macro_rules! foo { ($($($($i:ident)?)*)*) => {}; }");
1871 test_error("macro_rules! foo { ($($($($i:ident)?)*)+) => {}; }");
1872 test_error("macro_rules! foo { ($($($($i:ident)+)?)*) => {}; }");
1873 test_error("macro_rules! foo { ($($($($i:ident)+)*)?) => {}; }");
1874}
1875
1876#[test]
1877fn test_expand_bad_literal() {
1878 parse_macro(
1879 r#"
1880 macro_rules! foo { ($i:literal) => {}; }
1881 "#,
1882 )
1883 .assert_expand_err(r#"foo!(&k");"#, &ExpandError::BindingError("".into()));
1884}
1885
1886#[test]
1887fn test_empty_comments() {
1888 parse_macro(
1889 r#"
1890 macro_rules! one_arg_macro { ($fmt:expr) => (); }
1891 "#,
1892 )
1893 .assert_expand_err(
1894 r#"one_arg_macro!(/**/)"#,
1895 &ExpandError::BindingError("expected Expr".into()),
1896 );
1897}
diff --git a/crates/ra_mbe/src/tt_iter.rs b/crates/ra_mbe/src/tt_iter.rs
deleted file mode 100644
index 46c420718..000000000
--- a/crates/ra_mbe/src/tt_iter.rs
+++ /dev/null
@@ -1,75 +0,0 @@
1//! FIXME: write short doc here
2
3#[derive(Debug, Clone)]
4pub(crate) struct TtIter<'a> {
5 pub(crate) inner: std::slice::Iter<'a, tt::TokenTree>,
6}
7
8impl<'a> TtIter<'a> {
9 pub(crate) fn new(subtree: &'a tt::Subtree) -> TtIter<'a> {
10 TtIter { inner: subtree.token_trees.iter() }
11 }
12
13 pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ()> {
14 match self.next() {
15 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: c, .. }))) if *c == char => {
16 Ok(())
17 }
18 _ => Err(()),
19 }
20 }
21
22 pub(crate) fn expect_subtree(&mut self) -> Result<&'a tt::Subtree, ()> {
23 match self.next() {
24 Some(tt::TokenTree::Subtree(it)) => Ok(it),
25 _ => Err(()),
26 }
27 }
28
29 pub(crate) fn expect_leaf(&mut self) -> Result<&'a tt::Leaf, ()> {
30 match self.next() {
31 Some(tt::TokenTree::Leaf(it)) => Ok(it),
32 _ => Err(()),
33 }
34 }
35
36 pub(crate) fn expect_ident(&mut self) -> Result<&'a tt::Ident, ()> {
37 match self.expect_leaf()? {
38 tt::Leaf::Ident(it) => Ok(it),
39 _ => Err(()),
40 }
41 }
42
43 pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> {
44 let it = self.expect_leaf()?;
45 match it {
46 tt::Leaf::Literal(_) => Ok(it),
47 tt::Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it),
48 _ => Err(()),
49 }
50 }
51
52 pub(crate) fn expect_punct(&mut self) -> Result<&'a tt::Punct, ()> {
53 match self.expect_leaf()? {
54 tt::Leaf::Punct(it) => Ok(it),
55 _ => Err(()),
56 }
57 }
58
59 pub(crate) fn peek_n(&self, n: usize) -> Option<&tt::TokenTree> {
60 self.inner.as_slice().get(n)
61 }
62}
63
64impl<'a> Iterator for TtIter<'a> {
65 type Item = &'a tt::TokenTree;
66 fn next(&mut self) -> Option<Self::Item> {
67 self.inner.next()
68 }
69
70 fn size_hint(&self) -> (usize, Option<usize>) {
71 self.inner.size_hint()
72 }
73}
74
75impl<'a> std::iter::ExactSizeIterator for TtIter<'a> {}