diff options
Diffstat (limited to 'crates/mbe')
-rw-r--r-- | crates/mbe/Cargo.toml | 3 | ||||
-rw-r--r-- | crates/mbe/src/benchmark.rs | 211 | ||||
-rw-r--r-- | crates/mbe/src/lib.rs | 3 |
3 files changed, 217 insertions, 0 deletions
diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml index ef0907194..0abba3584 100644 --- a/crates/mbe/Cargo.toml +++ b/crates/mbe/Cargo.toml | |||
@@ -19,3 +19,6 @@ parser = { path = "../parser", version = "0.0.0" } | |||
19 | tt = { path = "../tt", version = "0.0.0" } | 19 | tt = { path = "../tt", version = "0.0.0" } |
20 | test_utils = { path = "../test_utils", version = "0.0.0" } | 20 | test_utils = { path = "../test_utils", version = "0.0.0" } |
21 | 21 | ||
22 | [dev-dependencies] | ||
23 | profile = { path = "../profile", version = "0.0.0" } | ||
24 | |||
diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs new file mode 100644 index 000000000..0d0acd589 --- /dev/null +++ b/crates/mbe/src/benchmark.rs | |||
@@ -0,0 +1,211 @@ | |||
1 | //! This module add real world mbe example for benchmark tests | ||
2 | |||
3 | use rustc_hash::FxHashMap; | ||
4 | use syntax::{ | ||
5 | ast::{self, NameOwner}, | ||
6 | AstNode, SmolStr, | ||
7 | }; | ||
8 | use test_utils::{bench, bench_fixture, skip_slow_tests}; | ||
9 | |||
10 | use crate::{ | ||
11 | ast_to_token_tree, | ||
12 | parser::{Op, RepeatKind, Separator}, | ||
13 | MacroRules, | ||
14 | }; | ||
15 | |||
16 | #[test] | ||
17 | fn benchmark_parse_macro_rules() { | ||
18 | if skip_slow_tests() { | ||
19 | return; | ||
20 | } | ||
21 | let rules = macro_rules_fixtures_tt(); | ||
22 | let hash: usize = { | ||
23 | let _pt = bench("mbe parse macro rules"); | ||
24 | rules.values().map(|it| MacroRules::parse(it).unwrap().rules.len()).sum() | ||
25 | }; | ||
26 | assert_eq!(hash, 1144); | ||
27 | } | ||
28 | |||
29 | #[test] | ||
30 | fn benchmark_expand_macro_rules() { | ||
31 | if skip_slow_tests() { | ||
32 | return; | ||
33 | } | ||
34 | let rules = macro_rules_fixtures(); | ||
35 | let invocations = invocation_fixtures(&rules); | ||
36 | |||
37 | let hash: usize = { | ||
38 | let _pt = bench("mbe expand macro rules"); | ||
39 | invocations | ||
40 | .into_iter() | ||
41 | .map(|(id, tt)| { | ||
42 | let res = rules[&id].expand(&tt); | ||
43 | if res.err.is_some() { | ||
44 | // FIXME: | ||
45 | // Currently `invocation_fixtures` will generate some correct invocations but | ||
46 | // cannot be expanded by mbe. We ignore errors here. | ||
47 | // See: https://github.com/rust-analyzer/rust-analyzer/issues/4777 | ||
48 | eprintln!("err from {} {:?}", id, res.err); | ||
49 | } | ||
50 | res.value.token_trees.len() | ||
51 | }) | ||
52 | .sum() | ||
53 | }; | ||
54 | assert_eq!(hash, 66995); | ||
55 | } | ||
56 | |||
57 | fn macro_rules_fixtures() -> FxHashMap<String, MacroRules> { | ||
58 | macro_rules_fixtures_tt() | ||
59 | .into_iter() | ||
60 | .map(|(id, tt)| (id, MacroRules::parse(&tt).unwrap())) | ||
61 | .collect() | ||
62 | } | ||
63 | |||
64 | fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> { | ||
65 | let fixture = bench_fixture::numerous_macro_rules(); | ||
66 | let source_file = ast::SourceFile::parse(&fixture).ok().unwrap(); | ||
67 | |||
68 | source_file | ||
69 | .syntax() | ||
70 | .descendants() | ||
71 | .filter_map(ast::MacroRules::cast) | ||
72 | .map(|rule| { | ||
73 | let id = rule.name().unwrap().to_string(); | ||
74 | let (def_tt, _) = ast_to_token_tree(&rule.token_tree().unwrap()).unwrap(); | ||
75 | (id, def_tt) | ||
76 | }) | ||
77 | .collect() | ||
78 | } | ||
79 | |||
80 | // Generate random invocation fixtures from rules | ||
81 | fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt::Subtree)> { | ||
82 | let mut seed = 123456789; | ||
83 | let mut res = Vec::new(); | ||
84 | |||
85 | for (name, it) in rules { | ||
86 | for rule in &it.rules { | ||
87 | // Generate twice | ||
88 | for _ in 0..2 { | ||
89 | let mut subtree = tt::Subtree::default(); | ||
90 | for op in rule.lhs.iter() { | ||
91 | collect_from_op(op, &mut subtree, &mut seed); | ||
92 | } | ||
93 | res.push((name.clone(), subtree)); | ||
94 | } | ||
95 | } | ||
96 | } | ||
97 | return res; | ||
98 | |||
99 | fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) { | ||
100 | return match op { | ||
101 | Op::Var { kind, .. } => match kind.as_ref().map(|it| it.as_str()) { | ||
102 | Some("ident") => parent.token_trees.push(make_ident("foo")), | ||
103 | Some("ty") => parent.token_trees.push(make_ident("Foo")), | ||
104 | Some("tt") => parent.token_trees.push(make_ident("foo")), | ||
105 | Some("vis") => parent.token_trees.push(make_ident("pub")), | ||
106 | Some("pat") => parent.token_trees.push(make_ident("foo")), | ||
107 | Some("path") => parent.token_trees.push(make_ident("foo")), | ||
108 | Some("literal") => parent.token_trees.push(make_literal("1")), | ||
109 | Some("expr") => parent.token_trees.push(make_ident("foo").into()), | ||
110 | Some("lifetime") => { | ||
111 | parent.token_trees.push(make_punct('\'')); | ||
112 | parent.token_trees.push(make_ident("a")); | ||
113 | } | ||
114 | Some("block") => { | ||
115 | parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None)) | ||
116 | } | ||
117 | Some("item") => { | ||
118 | parent.token_trees.push(make_ident("fn")); | ||
119 | parent.token_trees.push(make_ident("foo")); | ||
120 | parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None)); | ||
121 | parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None)); | ||
122 | } | ||
123 | Some("meta") => { | ||
124 | parent.token_trees.push(make_ident("foo")); | ||
125 | parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None)); | ||
126 | } | ||
127 | |||
128 | None => (), | ||
129 | Some(kind) => panic!("Unhandled kind {}", kind), | ||
130 | }, | ||
131 | Op::Leaf(leaf) => parent.token_trees.push(leaf.clone().into()), | ||
132 | Op::Repeat { tokens, kind, separator } => { | ||
133 | let max = 10; | ||
134 | let cnt = match kind { | ||
135 | RepeatKind::ZeroOrMore => rand(seed) % max, | ||
136 | RepeatKind::OneOrMore => 1 + rand(seed) % max, | ||
137 | RepeatKind::ZeroOrOne => rand(seed) % 2, | ||
138 | }; | ||
139 | for i in 0..cnt { | ||
140 | for it in tokens.iter() { | ||
141 | collect_from_op(it, parent, seed); | ||
142 | } | ||
143 | if i + 1 != cnt { | ||
144 | if let Some(sep) = separator { | ||
145 | match sep { | ||
146 | Separator::Literal(it) => parent | ||
147 | .token_trees | ||
148 | .push(tt::Leaf::Literal(it.clone().into()).into()), | ||
149 | Separator::Ident(it) => parent | ||
150 | .token_trees | ||
151 | .push(tt::Leaf::Ident(it.clone().into()).into()), | ||
152 | Separator::Puncts(puncts) => { | ||
153 | for it in puncts { | ||
154 | parent | ||
155 | .token_trees | ||
156 | .push(tt::Leaf::Punct(it.clone().into()).into()) | ||
157 | } | ||
158 | } | ||
159 | }; | ||
160 | } | ||
161 | } | ||
162 | } | ||
163 | } | ||
164 | Op::Subtree { tokens, delimiter } => { | ||
165 | let mut subtree = | ||
166 | tt::Subtree { delimiter: delimiter.clone(), token_trees: Vec::new() }; | ||
167 | tokens.iter().for_each(|it| { | ||
168 | collect_from_op(it, &mut subtree, seed); | ||
169 | }); | ||
170 | parent.token_trees.push(subtree.into()); | ||
171 | } | ||
172 | }; | ||
173 | |||
174 | // Simple linear congruential generator for determistic result | ||
175 | fn rand(seed: &mut usize) -> usize { | ||
176 | let a = 1664525; | ||
177 | let c = 1013904223; | ||
178 | *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c); | ||
179 | return *seed; | ||
180 | }; | ||
181 | fn make_ident(ident: &str) -> tt::TokenTree { | ||
182 | tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) }) | ||
183 | .into() | ||
184 | } | ||
185 | fn make_punct(char: char) -> tt::TokenTree { | ||
186 | tt::Leaf::Punct(tt::Punct { | ||
187 | id: tt::TokenId::unspecified(), | ||
188 | char, | ||
189 | spacing: tt::Spacing::Alone, | ||
190 | }) | ||
191 | .into() | ||
192 | } | ||
193 | fn make_literal(lit: &str) -> tt::TokenTree { | ||
194 | tt::Leaf::Literal(tt::Literal { | ||
195 | id: tt::TokenId::unspecified(), | ||
196 | text: SmolStr::new(lit), | ||
197 | }) | ||
198 | .into() | ||
199 | } | ||
200 | fn make_subtree( | ||
201 | kind: tt::DelimiterKind, | ||
202 | token_trees: Option<Vec<tt::TokenTree>>, | ||
203 | ) -> tt::TokenTree { | ||
204 | tt::Subtree { | ||
205 | delimiter: Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }), | ||
206 | token_trees: token_trees.unwrap_or_default(), | ||
207 | } | ||
208 | .into() | ||
209 | } | ||
210 | } | ||
211 | } | ||
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index d80bd7a33..6b4a4eb53 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs | |||
@@ -12,6 +12,9 @@ mod subtree_source; | |||
12 | #[cfg(test)] | 12 | #[cfg(test)] |
13 | mod tests; | 13 | mod tests; |
14 | 14 | ||
15 | #[cfg(test)] | ||
16 | mod benchmark; | ||
17 | |||
15 | use std::fmt; | 18 | use std::fmt; |
16 | 19 | ||
17 | use test_utils::mark; | 20 | use test_utils::mark; |