aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_mbe/src
diff options
context:
space:
mode:
authorSeivan Heidari <[email protected]>2019-11-09 15:27:40 +0000
committerSeivan Heidari <[email protected]>2019-11-09 15:27:40 +0000
commit529b227d42951feabf64c8c964b00e726dd92d46 (patch)
treed54ea1a30905d4fd8798129103bb13921fb55c94 /crates/ra_mbe/src
parent81662d23a64c7eb7ea10cb7bbf316f17d78dc4bc (diff)
parent9d786ea221b27fbdf7c7f7beea0290db448e0611 (diff)
Merge branch 'master' of https://github.com/rust-analyzer/rust-analyzer into feature/themes
Diffstat (limited to 'crates/ra_mbe/src')
-rw-r--r--crates/ra_mbe/src/lib.rs7
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs75
-rw-r--r--crates/ra_mbe/src/tests.rs102
3 files changed, 111 insertions, 73 deletions
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs
index 15f000175..8a31d1c36 100644
--- a/crates/ra_mbe/src/lib.rs
+++ b/crates/ra_mbe/src/lib.rs
@@ -31,8 +31,7 @@ pub enum ExpandError {
31} 31}
32 32
33pub use crate::syntax_bridge::{ 33pub use crate::syntax_bridge::{
34 ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_expr, token_tree_to_items, 34 ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, RevTokenMap, TokenMap,
35 token_tree_to_macro_stmts, token_tree_to_pat, token_tree_to_ty,
36}; 35};
37 36
38/// This struct contains AST for a single `macro_rules` definition. What might 37/// This struct contains AST for a single `macro_rules` definition. What might
@@ -118,6 +117,10 @@ impl MacroRules {
118 shift_subtree(&mut tt, self.shift); 117 shift_subtree(&mut tt, self.shift);
119 mbe_expander::expand(self, &tt) 118 mbe_expander::expand(self, &tt)
120 } 119 }
120
121 pub fn shift(&self) -> u32 {
122 self.shift
123 }
121} 124}
122 125
123impl Rule { 126impl Rule {
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 592fcf527..3f57ce3b5 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -1,9 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use ra_parser::{ 3use ra_parser::{FragmentKind, ParseError, TreeSink};
4 FragmentKind::{self, *},
5 ParseError, TreeSink,
6};
7use ra_syntax::{ 4use ra_syntax::{
8 ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode, 5 ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
9 SyntaxTreeBuilder, TextRange, TextUnit, T, 6 SyntaxTreeBuilder, TextRange, TextUnit, T,
@@ -14,12 +11,18 @@ use crate::subtree_source::SubtreeTokenSource;
14use crate::ExpandError; 11use crate::ExpandError;
15 12
16/// Maps `tt::TokenId` to the relative range of the original token. 13/// Maps `tt::TokenId` to the relative range of the original token.
17#[derive(Default)] 14#[derive(Debug, PartialEq, Eq, Default)]
18pub struct TokenMap { 15pub struct TokenMap {
19 /// Maps `tt::TokenId` to the *relative* source range. 16 /// Maps `tt::TokenId` to the *relative* source range.
20 tokens: Vec<TextRange>, 17 tokens: Vec<TextRange>,
21} 18}
22 19
20/// Maps relative range of the expanded syntax node to `tt::TokenId`
21#[derive(Debug, PartialEq, Eq, Default)]
22pub struct RevTokenMap {
23 pub ranges: Vec<(TextRange, tt::TokenId)>,
24}
25
23/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro 26/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
24/// will consume). 27/// will consume).
25pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> { 28pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> {
@@ -49,10 +52,10 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
49// * ImplItems(SmallVec<[ast::ImplItem; 1]>) 52// * ImplItems(SmallVec<[ast::ImplItem; 1]>)
50// * ForeignItems(SmallVec<[ast::ForeignItem; 1]> 53// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
51 54
52fn fragment_to_syntax_node( 55pub fn token_tree_to_syntax_node(
53 tt: &tt::Subtree, 56 tt: &tt::Subtree,
54 fragment_kind: FragmentKind, 57 fragment_kind: FragmentKind,
55) -> Result<Parse<SyntaxNode>, ExpandError> { 58) -> Result<(Parse<SyntaxNode>, RevTokenMap), ExpandError> {
56 let tmp; 59 let tmp;
57 let tokens = match tt { 60 let tokens = match tt {
58 tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(), 61 tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(),
@@ -69,38 +72,8 @@ fn fragment_to_syntax_node(
69 return Err(ExpandError::ConversionError); 72 return Err(ExpandError::ConversionError);
70 } 73 }
71 //FIXME: would be cool to report errors 74 //FIXME: would be cool to report errors
72 let parse = tree_sink.inner.finish(); 75 let (parse, range_map) = tree_sink.finish();
73 Ok(parse) 76 Ok((parse, range_map))
74}
75
76/// Parses the token tree (result of macro expansion) to an expression
77pub fn token_tree_to_expr(tt: &tt::Subtree) -> Result<Parse<ast::Expr>, ExpandError> {
78 let parse = fragment_to_syntax_node(tt, Expr)?;
79 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
80}
81
82/// Parses the token tree (result of macro expansion) to a Pattern
83pub fn token_tree_to_pat(tt: &tt::Subtree) -> Result<Parse<ast::Pat>, ExpandError> {
84 let parse = fragment_to_syntax_node(tt, Pattern)?;
85 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
86}
87
88/// Parses the token tree (result of macro expansion) to a Type
89pub fn token_tree_to_ty(tt: &tt::Subtree) -> Result<Parse<ast::TypeRef>, ExpandError> {
90 let parse = fragment_to_syntax_node(tt, Type)?;
91 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
92}
93
94/// Parses the token tree (result of macro expansion) as a sequence of stmts
95pub fn token_tree_to_macro_stmts(tt: &tt::Subtree) -> Result<Parse<ast::MacroStmts>, ExpandError> {
96 let parse = fragment_to_syntax_node(tt, Statements)?;
97 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
98}
99
100/// Parses the token tree (result of macro expansion) as a sequence of items
101pub fn token_tree_to_items(tt: &tt::Subtree) -> Result<Parse<ast::MacroItems>, ExpandError> {
102 let parse = fragment_to_syntax_node(tt, Items)?;
103 parse.cast().ok_or_else(|| crate::ExpandError::ConversionError)
104} 77}
105 78
106impl TokenMap { 79impl TokenMap {
@@ -116,6 +89,12 @@ impl TokenMap {
116 } 89 }
117} 90}
118 91
92impl RevTokenMap {
93 fn add(&mut self, relative_range: TextRange, token_id: tt::TokenId) {
94 self.ranges.push((relative_range, token_id.clone()))
95 }
96}
97
119/// Returns the textual content of a doc comment block as a quoted string 98/// Returns the textual content of a doc comment block as a quoted string
120/// That is, strips leading `///` (or `/**`, etc) 99/// That is, strips leading `///` (or `/**`, etc)
121/// and strips the ending `*/` 100/// and strips the ending `*/`
@@ -262,6 +241,7 @@ struct TtTreeSink<'a> {
262 cursor: Cursor<'a>, 241 cursor: Cursor<'a>,
263 text_pos: TextUnit, 242 text_pos: TextUnit,
264 inner: SyntaxTreeBuilder, 243 inner: SyntaxTreeBuilder,
244 range_map: RevTokenMap,
265 245
266 // Number of roots 246 // Number of roots
267 // Use for detect ill-form tree which is not single root 247 // Use for detect ill-form tree which is not single root
@@ -276,8 +256,13 @@ impl<'a> TtTreeSink<'a> {
276 text_pos: 0.into(), 256 text_pos: 0.into(),
277 inner: SyntaxTreeBuilder::default(), 257 inner: SyntaxTreeBuilder::default(),
278 roots: smallvec::SmallVec::new(), 258 roots: smallvec::SmallVec::new(),
259 range_map: RevTokenMap::default(),
279 } 260 }
280 } 261 }
262
263 fn finish(self) -> (Parse<SyntaxNode>, RevTokenMap) {
264 (self.inner.finish(), self.range_map)
265 }
281} 266}
282 267
283fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr { 268fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr {
@@ -307,6 +292,15 @@ impl<'a> TreeSink for TtTreeSink<'a> {
307 292
308 match self.cursor.token_tree() { 293 match self.cursor.token_tree() {
309 Some(tt::TokenTree::Leaf(leaf)) => { 294 Some(tt::TokenTree::Leaf(leaf)) => {
295 // Mark the range if needed
296 if let tt::Leaf::Ident(ident) = leaf {
297 if kind == IDENT {
298 let range =
299 TextRange::offset_len(self.text_pos, TextUnit::of_str(&ident.text));
300 self.range_map.add(range, ident.id);
301 }
302 }
303
310 self.cursor = self.cursor.bump(); 304 self.cursor = self.cursor.bump();
311 self.buf += &format!("{}", leaf); 305 self.buf += &format!("{}", leaf);
312 } 306 }
@@ -337,6 +331,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
337 { 331 {
338 if curr.spacing == tt::Spacing::Alone { 332 if curr.spacing == tt::Spacing::Alone {
339 self.inner.token(WHITESPACE, " ".into()); 333 self.inner.token(WHITESPACE, " ".into());
334 self.text_pos += TextUnit::of_char(' ');
340 } 335 }
341 } 336 }
342 } 337 }
@@ -423,6 +418,6 @@ mod tests {
423 "#, 418 "#,
424 ); 419 );
425 let expansion = expand(&rules, "stmts!();"); 420 let expansion = expand(&rules, "stmts!();");
426 assert!(token_tree_to_expr(&expansion).is_err()); 421 assert!(token_tree_to_syntax_node(&expansion, FragmentKind::Expr).is_err());
427 } 422 }
428} 423}
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index a23e3afe3..0109a4d98 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -1,3 +1,4 @@
1use ra_parser::FragmentKind;
1use ra_syntax::{ast, AstNode, NodeOrToken, WalkEvent}; 2use ra_syntax::{ast, AstNode, NodeOrToken, WalkEvent};
2use test_utils::assert_eq_text; 3use test_utils::assert_eq_text;
3 4
@@ -126,9 +127,9 @@ fn test_expr_order() {
126"#, 127"#,
127 ); 128 );
128 let expanded = expand(&rules, "foo! { 1 + 1}"); 129 let expanded = expand(&rules, "foo! { 1 + 1}");
129 let tree = token_tree_to_items(&expanded).unwrap().tree(); 130 let tree = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap().0.syntax_node();
130 131
131 let dump = format!("{:#?}", tree.syntax()); 132 let dump = format!("{:#?}", tree);
132 assert_eq_text!( 133 assert_eq_text!(
133 dump.trim(), 134 dump.trim(),
134 r#"MACRO_ITEMS@[0; 15) 135 r#"MACRO_ITEMS@[0; 15)
@@ -383,9 +384,9 @@ fn test_expand_to_item_list() {
383 ", 384 ",
384 ); 385 );
385 let expansion = expand(&rules, "structs!(Foo, Bar);"); 386 let expansion = expand(&rules, "structs!(Foo, Bar);");
386 let tree = token_tree_to_items(&expansion).unwrap().tree(); 387 let tree = token_tree_to_syntax_node(&expansion, FragmentKind::Items).unwrap().0.syntax_node();
387 assert_eq!( 388 assert_eq!(
388 format!("{:#?}", tree.syntax()).trim(), 389 format!("{:#?}", tree).trim(),
389 r#" 390 r#"
390MACRO_ITEMS@[0; 40) 391MACRO_ITEMS@[0; 40)
391 STRUCT_DEF@[0; 20) 392 STRUCT_DEF@[0; 20)
@@ -501,10 +502,11 @@ fn test_tt_to_stmts() {
501 ); 502 );
502 503
503 let expanded = expand(&rules, "foo!{}"); 504 let expanded = expand(&rules, "foo!{}");
504 let stmts = token_tree_to_macro_stmts(&expanded).unwrap().tree(); 505 let stmts =
506 token_tree_to_syntax_node(&expanded, FragmentKind::Statements).unwrap().0.syntax_node();
505 507
506 assert_eq!( 508 assert_eq!(
507 format!("{:#?}", stmts.syntax()).trim(), 509 format!("{:#?}", stmts).trim(),
508 r#"MACRO_STMTS@[0; 15) 510 r#"MACRO_STMTS@[0; 15)
509 LET_STMT@[0; 7) 511 LET_STMT@[0; 7)
510 LET_KW@[0; 3) "let" 512 LET_KW@[0; 3) "let"
@@ -754,7 +756,10 @@ fn test_all_items() {
754 } 756 }
755"#, 757"#,
756 ); 758 );
757 assert_expansion(MacroKind::Items, &rules, r#" 759 assert_expansion(
760 MacroKind::Items,
761 &rules,
762 r#"
758 foo! { 763 foo! {
759 extern crate a; 764 extern crate a;
760 mod b; 765 mod b;
@@ -770,7 +775,9 @@ fn test_all_items() {
770 extern {} 775 extern {}
771 type T = u8; 776 type T = u8;
772 } 777 }
773"#, r#"extern crate a ; mod b ; mod c {} use d ; const E : i32 = 0 ; static F : i32 = 0 ; impl G {} struct H ; enum I {Foo} trait J {} fn h () {} extern {} type T = u8 ;"#); 778"#,
779 r#"extern crate a ; mod b ; mod c {} use d ; const E : i32 = 0 ; static F : i32 = 0 ; impl G {} struct H ; enum I {Foo} trait J {} fn h () {} extern {} type T = u8 ;"#,
780 );
774} 781}
775 782
776#[test] 783#[test]
@@ -946,10 +953,10 @@ fn test_vec() {
946 ); 953 );
947 954
948 let expansion = expand(&rules, r#"vec![1u32,2];"#); 955 let expansion = expand(&rules, r#"vec![1u32,2];"#);
949 let tree = token_tree_to_expr(&expansion).unwrap().tree(); 956 let tree = token_tree_to_syntax_node(&expansion, FragmentKind::Expr).unwrap().0.syntax_node();
950 957
951 assert_eq!( 958 assert_eq!(
952 format!("{:#?}", tree.syntax()).trim(), 959 format!("{:#?}", tree).trim(),
953 r#"BLOCK_EXPR@[0; 45) 960 r#"BLOCK_EXPR@[0; 45)
954 BLOCK@[0; 45) 961 BLOCK@[0; 45)
955 L_CURLY@[0; 1) "{" 962 L_CURLY@[0; 1) "{"
@@ -1088,8 +1095,12 @@ macro_rules! generate_pattern_iterators {
1088"#, 1095"#,
1089 ); 1096 );
1090 1097
1091 assert_expansion(MacroKind::Items, &rules, r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#, 1098 assert_expansion(
1092 "fn foo () {}"); 1099 MacroKind::Items,
1100 &rules,
1101 r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#,
1102 "fn foo () {}",
1103 );
1093} 1104}
1094 1105
1095#[test] 1106#[test]
@@ -1171,8 +1182,12 @@ fn test_impl_nonzero_fmt() {
1171"#, 1182"#,
1172 ); 1183 );
1173 1184
1174 assert_expansion(MacroKind::Items, &rules, r#"impl_nonzero_fmt! { # [stable(feature= "nonzero",since="1.28.0")] (Debug,Display,Binary,Octal,LowerHex,UpperHex) for NonZeroU8}"#, 1185 assert_expansion(
1175 "fn foo () {}"); 1186 MacroKind::Items,
1187 &rules,
1188 r#"impl_nonzero_fmt! { # [stable(feature= "nonzero",since="1.28.0")] (Debug,Display,Binary,Octal,LowerHex,UpperHex) for NonZeroU8}"#,
1189 "fn foo () {}",
1190 );
1176} 1191}
1177 1192
1178#[test] 1193#[test]
@@ -1189,8 +1204,12 @@ fn test_cfg_if_items() {
1189"#, 1204"#,
1190 ); 1205 );
1191 1206
1192 assert_expansion(MacroKind::Items, &rules, r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#, 1207 assert_expansion(
1193 "__cfg_if_items ! {(rustdoc ,) ;}"); 1208 MacroKind::Items,
1209 &rules,
1210 r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#,
1211 "__cfg_if_items ! {(rustdoc ,) ;}",
1212 );
1194} 1213}
1195 1214
1196#[test] 1215#[test]
@@ -1233,10 +1252,13 @@ cfg_if ! {
1233"#, 1252"#,
1234 "__cfg_if_items ! {() ; ((target_env = \"msvc\") ()) , ((all (target_arch = \"wasm32\" , not (target_os = \"emscripten\"))) ()) , (() (mod libunwind ; pub use libunwind :: * ;)) ,}"); 1253 "__cfg_if_items ! {() ; ((target_env = \"msvc\") ()) , ((all (target_arch = \"wasm32\" , not (target_os = \"emscripten\"))) ()) , (() (mod libunwind ; pub use libunwind :: * ;)) ,}");
1235 1254
1236 assert_expansion(MacroKind::Items, &rules, r#" 1255 assert_expansion(
1256 MacroKind::Items,
1257 &rules,
1258 r#"
1237cfg_if ! { @ __apply cfg ( all ( not ( any ( not ( any ( target_os = "solaris" , target_os = "illumos" ) ) ) ) ) ) , } 1259cfg_if ! { @ __apply cfg ( all ( not ( any ( not ( any ( target_os = "solaris" , target_os = "illumos" ) ) ) ) ) ) , }
1238"#, 1260"#,
1239 "" 1261 "",
1240 ); 1262 );
1241} 1263}
1242 1264
@@ -1291,10 +1313,13 @@ macro_rules! RIDL {
1291}"#, 1313}"#,
1292 ); 1314 );
1293 1315
1294 let expanded = expand(&rules, r#" 1316 let expanded = expand(
1317 &rules,
1318 r#"
1295RIDL!{interface ID3D11Asynchronous(ID3D11AsynchronousVtbl): ID3D11DeviceChild(ID3D11DeviceChildVtbl) { 1319RIDL!{interface ID3D11Asynchronous(ID3D11AsynchronousVtbl): ID3D11DeviceChild(ID3D11DeviceChildVtbl) {
1296 fn GetDataSize(&mut self) -> UINT 1320 fn GetDataSize(&mut self) -> UINT
1297}}"#); 1321}}"#,
1322 );
1298 assert_eq!(expanded.to_string(), "impl ID3D11Asynchronous {pub unsafe fn GetDataSize (& mut self) -> UINT {((* self . lpVtbl) .GetDataSize) (self)}}"); 1323 assert_eq!(expanded.to_string(), "impl ID3D11Asynchronous {pub unsafe fn GetDataSize (& mut self) -> UINT {((* self . lpVtbl) .GetDataSize) (self)}}");
1299} 1324}
1300 1325
@@ -1340,7 +1365,8 @@ quick_error ! (SORT [enum Wrapped # [derive (Debug)]] items [
1340 1365
1341#[test] 1366#[test]
1342fn test_empty_repeat_vars_in_empty_repeat_vars() { 1367fn test_empty_repeat_vars_in_empty_repeat_vars() {
1343 let rules = create_rules(r#" 1368 let rules = create_rules(
1369 r#"
1344macro_rules! delegate_impl { 1370macro_rules! delegate_impl {
1345 ([$self_type:ident, $self_wrap:ty, $self_map:ident] 1371 ([$self_type:ident, $self_wrap:ty, $self_map:ident]
1346 pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* { 1372 pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* {
@@ -1385,9 +1411,15 @@ macro_rules! delegate_impl {
1385 } 1411 }
1386 } 1412 }
1387} 1413}
1388"#); 1414"#,
1415 );
1389 1416
1390 assert_expansion(MacroKind::Items, &rules, r#"delegate_impl ! {[G , & 'a mut G , deref] pub trait Data : GraphBase {@ section type type NodeWeight ;}}"#, "impl <> Data for & \'a mut G where G : Data {}"); 1417 assert_expansion(
1418 MacroKind::Items,
1419 &rules,
1420 r#"delegate_impl ! {[G , & 'a mut G , deref] pub trait Data : GraphBase {@ section type type NodeWeight ;}}"#,
1421 "impl <> Data for & \'a mut G where G : Data {}",
1422 );
1391} 1423}
1392 1424
1393pub(crate) fn create_rules(macro_definition: &str) -> MacroRules { 1425pub(crate) fn create_rules(macro_definition: &str) -> MacroRules {
@@ -1436,22 +1468,30 @@ pub(crate) fn assert_expansion(
1436 }; 1468 };
1437 let (expanded_tree, expected_tree) = match kind { 1469 let (expanded_tree, expected_tree) = match kind {
1438 MacroKind::Items => { 1470 MacroKind::Items => {
1439 let expanded_tree = token_tree_to_items(&expanded).unwrap().tree(); 1471 let expanded_tree =
1440 let expected_tree = token_tree_to_items(&expected).unwrap().tree(); 1472 token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap().0.syntax_node();
1473 let expected_tree =
1474 token_tree_to_syntax_node(&expected, FragmentKind::Items).unwrap().0.syntax_node();
1441 1475
1442 ( 1476 (
1443 debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), 1477 debug_dump_ignore_spaces(&expanded_tree).trim().to_string(),
1444 debug_dump_ignore_spaces(expected_tree.syntax()).trim().to_string(), 1478 debug_dump_ignore_spaces(&expected_tree).trim().to_string(),
1445 ) 1479 )
1446 } 1480 }
1447 1481
1448 MacroKind::Stmts => { 1482 MacroKind::Stmts => {
1449 let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().tree(); 1483 let expanded_tree = token_tree_to_syntax_node(&expanded, FragmentKind::Statements)
1450 let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().tree(); 1484 .unwrap()
1485 .0
1486 .syntax_node();
1487 let expected_tree = token_tree_to_syntax_node(&expected, FragmentKind::Statements)
1488 .unwrap()
1489 .0
1490 .syntax_node();
1451 1491
1452 ( 1492 (
1453 debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(), 1493 debug_dump_ignore_spaces(&expanded_tree).trim().to_string(),
1454 debug_dump_ignore_spaces(expected_tree.syntax()).trim().to_string(), 1494 debug_dump_ignore_spaces(&expected_tree).trim().to_string(),
1455 ) 1495 )
1456 } 1496 }
1457 }; 1497 };