From 23dbf36c7dfcbb43a0def2642287c2fb30864a07 Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Fri, 5 Feb 2021 19:57:32 +0800 Subject: Add benchmark test for mbe --- crates/mbe/Cargo.toml | 3 + crates/mbe/src/benchmark.rs | 211 +++++++++++++++++++++++++++++++++ crates/mbe/src/lib.rs | 3 + crates/test_utils/src/bench_fixture.rs | 5 + 4 files changed, 222 insertions(+) create mode 100644 crates/mbe/src/benchmark.rs (limited to 'crates') diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml index ef0907194..0abba3584 100644 --- a/crates/mbe/Cargo.toml +++ b/crates/mbe/Cargo.toml @@ -19,3 +19,6 @@ parser = { path = "../parser", version = "0.0.0" } tt = { path = "../tt", version = "0.0.0" } test_utils = { path = "../test_utils", version = "0.0.0" } +[dev-dependencies] +profile = { path = "../profile", version = "0.0.0" } + diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs new file mode 100644 index 000000000..0d0acd589 --- /dev/null +++ b/crates/mbe/src/benchmark.rs @@ -0,0 +1,211 @@ +//! This module add real world mbe example for benchmark tests + +use rustc_hash::FxHashMap; +use syntax::{ + ast::{self, NameOwner}, + AstNode, SmolStr, +}; +use test_utils::{bench, bench_fixture, skip_slow_tests}; + +use crate::{ + ast_to_token_tree, + parser::{Op, RepeatKind, Separator}, + MacroRules, +}; + +#[test] +fn benchmark_parse_macro_rules() { + if skip_slow_tests() { + return; + } + let rules = macro_rules_fixtures_tt(); + let hash: usize = { + let _pt = bench("mbe parse macro rules"); + rules.values().map(|it| MacroRules::parse(it).unwrap().rules.len()).sum() + }; + assert_eq!(hash, 1144); +} + +#[test] +fn benchmark_expand_macro_rules() { + if skip_slow_tests() { + return; + } + let rules = macro_rules_fixtures(); + let invocations = invocation_fixtures(&rules); + + let hash: usize = { + let _pt = bench("mbe expand macro rules"); + invocations + .into_iter() + .map(|(id, tt)| { + let res = rules[&id].expand(&tt); + if res.err.is_some() { + // FIXME: + // Currently `invocation_fixtures` will generate some correct invocations but + // cannot be expanded by mbe. We ignore errors here. + // See: https://github.com/rust-analyzer/rust-analyzer/issues/4777 + eprintln!("err from {} {:?}", id, res.err); + } + res.value.token_trees.len() + }) + .sum() + }; + assert_eq!(hash, 66995); +} + +fn macro_rules_fixtures() -> FxHashMap { + macro_rules_fixtures_tt() + .into_iter() + .map(|(id, tt)| (id, MacroRules::parse(&tt).unwrap())) + .collect() +} + +fn macro_rules_fixtures_tt() -> FxHashMap { + let fixture = bench_fixture::numerous_macro_rules(); + let source_file = ast::SourceFile::parse(&fixture).ok().unwrap(); + + source_file + .syntax() + .descendants() + .filter_map(ast::MacroRules::cast) + .map(|rule| { + let id = rule.name().unwrap().to_string(); + let (def_tt, _) = ast_to_token_tree(&rule.token_tree().unwrap()).unwrap(); + (id, def_tt) + }) + .collect() +} + +// Generate random invocation fixtures from rules +fn invocation_fixtures(rules: &FxHashMap) -> Vec<(String, tt::Subtree)> { + let mut seed = 123456789; + let mut res = Vec::new(); + + for (name, it) in rules { + for rule in &it.rules { + // Generate twice + for _ in 0..2 { + let mut subtree = tt::Subtree::default(); + for op in rule.lhs.iter() { + collect_from_op(op, &mut subtree, &mut seed); + } + res.push((name.clone(), subtree)); + } + } + } + return res; + + fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) { + return match op { + Op::Var { kind, .. } => match kind.as_ref().map(|it| it.as_str()) { + Some("ident") => parent.token_trees.push(make_ident("foo")), + Some("ty") => parent.token_trees.push(make_ident("Foo")), + Some("tt") => parent.token_trees.push(make_ident("foo")), + Some("vis") => parent.token_trees.push(make_ident("pub")), + Some("pat") => parent.token_trees.push(make_ident("foo")), + Some("path") => parent.token_trees.push(make_ident("foo")), + Some("literal") => parent.token_trees.push(make_literal("1")), + Some("expr") => parent.token_trees.push(make_ident("foo").into()), + Some("lifetime") => { + parent.token_trees.push(make_punct('\'')); + parent.token_trees.push(make_ident("a")); + } + Some("block") => { + parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None)) + } + Some("item") => { + parent.token_trees.push(make_ident("fn")); + parent.token_trees.push(make_ident("foo")); + parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None)); + parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None)); + } + Some("meta") => { + parent.token_trees.push(make_ident("foo")); + parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None)); + } + + None => (), + Some(kind) => panic!("Unhandled kind {}", kind), + }, + Op::Leaf(leaf) => parent.token_trees.push(leaf.clone().into()), + Op::Repeat { tokens, kind, separator } => { + let max = 10; + let cnt = match kind { + RepeatKind::ZeroOrMore => rand(seed) % max, + RepeatKind::OneOrMore => 1 + rand(seed) % max, + RepeatKind::ZeroOrOne => rand(seed) % 2, + }; + for i in 0..cnt { + for it in tokens.iter() { + collect_from_op(it, parent, seed); + } + if i + 1 != cnt { + if let Some(sep) = separator { + match sep { + Separator::Literal(it) => parent + .token_trees + .push(tt::Leaf::Literal(it.clone().into()).into()), + Separator::Ident(it) => parent + .token_trees + .push(tt::Leaf::Ident(it.clone().into()).into()), + Separator::Puncts(puncts) => { + for it in puncts { + parent + .token_trees + .push(tt::Leaf::Punct(it.clone().into()).into()) + } + } + }; + } + } + } + } + Op::Subtree { tokens, delimiter } => { + let mut subtree = + tt::Subtree { delimiter: delimiter.clone(), token_trees: Vec::new() }; + tokens.iter().for_each(|it| { + collect_from_op(it, &mut subtree, seed); + }); + parent.token_trees.push(subtree.into()); + } + }; + + // Simple linear congruential generator for determistic result + fn rand(seed: &mut usize) -> usize { + let a = 1664525; + let c = 1013904223; + *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c); + return *seed; + }; + fn make_ident(ident: &str) -> tt::TokenTree { + tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) }) + .into() + } + fn make_punct(char: char) -> tt::TokenTree { + tt::Leaf::Punct(tt::Punct { + id: tt::TokenId::unspecified(), + char, + spacing: tt::Spacing::Alone, + }) + .into() + } + fn make_literal(lit: &str) -> tt::TokenTree { + tt::Leaf::Literal(tt::Literal { + id: tt::TokenId::unspecified(), + text: SmolStr::new(lit), + }) + .into() + } + fn make_subtree( + kind: tt::DelimiterKind, + token_trees: Option>, + ) -> tt::TokenTree { + tt::Subtree { + delimiter: Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }), + token_trees: token_trees.unwrap_or_default(), + } + .into() + } + } +} diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index d80bd7a33..6b4a4eb53 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -12,6 +12,9 @@ mod subtree_source; #[cfg(test)] mod tests; +#[cfg(test)] +mod benchmark; + use std::fmt; use test_utils::mark; diff --git a/crates/test_utils/src/bench_fixture.rs b/crates/test_utils/src/bench_fixture.rs index aa1bea9bb..d775e2cc9 100644 --- a/crates/test_utils/src/bench_fixture.rs +++ b/crates/test_utils/src/bench_fixture.rs @@ -35,3 +35,8 @@ pub fn glorious_old_parser() -> String { let path = project_dir().join("bench_data/glorious_old_parser"); fs::read_to_string(&path).unwrap() } + +pub fn numerous_macro_rules() -> String { + let path = project_dir().join("bench_data/numerous_macro_rules"); + fs::read_to_string(&path).unwrap() +} -- cgit v1.2.3