diff options
-rw-r--r-- | crates/ra_mbe/src/lib.rs | 61 | ||||
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 29 |
2 files changed, 80 insertions, 10 deletions
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index 768f335fa..907402f5f 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs | |||
@@ -164,14 +164,18 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
164 | crate::MacroRules::parse(&definition_tt).unwrap() | 164 | crate::MacroRules::parse(&definition_tt).unwrap() |
165 | } | 165 | } |
166 | 166 | ||
167 | fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { | 167 | fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree { |
168 | let source_file = ast::SourceFile::parse(invocation); | 168 | let source_file = ast::SourceFile::parse(invocation); |
169 | let macro_invocation = | 169 | let macro_invocation = |
170 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 170 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
171 | 171 | ||
172 | let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap(); | 172 | let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap(); |
173 | 173 | ||
174 | let expanded = rules.expand(&invocation_tt).unwrap(); | 174 | rules.expand(&invocation_tt).unwrap() |
175 | } | ||
176 | |||
177 | fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { | ||
178 | let expanded = expand(rules, invocation); | ||
175 | assert_eq!(expanded.to_string(), expansion); | 179 | assert_eq!(expanded.to_string(), expansion); |
176 | } | 180 | } |
177 | 181 | ||
@@ -268,4 +272,57 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
268 | assert_expansion(&rules, "foo! { Foo,# Bar }", "struct Foo ; struct Bar ;"); | 272 | assert_expansion(&rules, "foo! { Foo,# Bar }", "struct Foo ; struct Bar ;"); |
269 | } | 273 | } |
270 | 274 | ||
275 | #[test] | ||
276 | fn expand_to_item_list() { | ||
277 | let rules = create_rules( | ||
278 | " | ||
279 | macro_rules! structs { | ||
280 | ($($i:ident),*) => { | ||
281 | $(struct $i { field: u32 } )* | ||
282 | } | ||
283 | } | ||
284 | ", | ||
285 | ); | ||
286 | let expansion = expand(&rules, "structs!(Foo, Bar)"); | ||
287 | let tree = token_tree_to_ast_item_list(&expansion); | ||
288 | assert_eq!( | ||
289 | tree.syntax().debug_dump().trim(), | ||
290 | r#" | ||
291 | SOURCE_FILE@[0; 40) | ||
292 | STRUCT_DEF@[0; 20) | ||
293 | STRUCT_KW@[0; 6) | ||
294 | NAME@[6; 9) | ||
295 | IDENT@[6; 9) "Foo" | ||
296 | NAMED_FIELD_DEF_LIST@[9; 20) | ||
297 | L_CURLY@[9; 10) | ||
298 | NAMED_FIELD_DEF@[10; 19) | ||
299 | NAME@[10; 15) | ||
300 | IDENT@[10; 15) "field" | ||
301 | COLON@[15; 16) | ||
302 | PATH_TYPE@[16; 19) | ||
303 | PATH@[16; 19) | ||
304 | PATH_SEGMENT@[16; 19) | ||
305 | NAME_REF@[16; 19) | ||
306 | IDENT@[16; 19) "u32" | ||
307 | R_CURLY@[19; 20) | ||
308 | STRUCT_DEF@[20; 40) | ||
309 | STRUCT_KW@[20; 26) | ||
310 | NAME@[26; 29) | ||
311 | IDENT@[26; 29) "Bar" | ||
312 | NAMED_FIELD_DEF_LIST@[29; 40) | ||
313 | L_CURLY@[29; 30) | ||
314 | NAMED_FIELD_DEF@[30; 39) | ||
315 | NAME@[30; 35) | ||
316 | IDENT@[30; 35) "field" | ||
317 | COLON@[35; 36) | ||
318 | PATH_TYPE@[36; 39) | ||
319 | PATH@[36; 39) | ||
320 | PATH_SEGMENT@[36; 39) | ||
321 | NAME_REF@[36; 39) | ||
322 | IDENT@[36; 39) "u32" | ||
323 | R_CURLY@[39; 40)"# | ||
324 | .trim() | ||
325 | ); | ||
326 | } | ||
327 | |||
271 | } | 328 | } |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 521b96d68..3fe5abba3 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -129,17 +129,26 @@ impl TtTokenSource { | |||
129 | is_joint_to_next: false, | 129 | is_joint_to_next: false, |
130 | text: l.text.clone(), | 130 | text: l.text.clone(), |
131 | }, | 131 | }, |
132 | tt::Leaf::Punct(p) => Tok { | 132 | tt::Leaf::Punct(p) => { |
133 | kind: SyntaxKind::from_char(p.char).unwrap(), | 133 | let kind = match p.char { |
134 | is_joint_to_next: p.spacing == tt::Spacing::Joint, | 134 | // lexer may produce combpund tokens for these ones |
135 | text: { | 135 | '.' => DOT, |
136 | ':' => COLON, | ||
137 | '=' => EQ, | ||
138 | '!' => EXCL, | ||
139 | '-' => MINUS, | ||
140 | c => SyntaxKind::from_char(c).unwrap(), | ||
141 | }; | ||
142 | let text = { | ||
136 | let mut buf = [0u8; 4]; | 143 | let mut buf = [0u8; 4]; |
137 | let s: &str = p.char.encode_utf8(&mut buf); | 144 | let s: &str = p.char.encode_utf8(&mut buf); |
138 | SmolStr::new(s) | 145 | SmolStr::new(s) |
139 | }, | 146 | }; |
140 | }, | 147 | Tok { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text } |
148 | } | ||
141 | tt::Leaf::Ident(ident) => { | 149 | tt::Leaf::Ident(ident) => { |
142 | Tok { kind: IDENT, is_joint_to_next: false, text: ident.text.clone() } | 150 | let kind = SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT); |
151 | Tok { kind, is_joint_to_next: false, text: ident.text.clone() } | ||
143 | } | 152 | } |
144 | }; | 153 | }; |
145 | self.tokens.push(tok) | 154 | self.tokens.push(tok) |
@@ -161,7 +170,11 @@ impl TtTokenSource { | |||
161 | 170 | ||
162 | impl TokenSource for TtTokenSource { | 171 | impl TokenSource for TtTokenSource { |
163 | fn token_kind(&self, pos: usize) -> SyntaxKind { | 172 | fn token_kind(&self, pos: usize) -> SyntaxKind { |
164 | self.tokens[pos].kind | 173 | if let Some(tok) = self.tokens.get(pos) { |
174 | tok.kind | ||
175 | } else { | ||
176 | SyntaxKind::EOF | ||
177 | } | ||
165 | } | 178 | } |
166 | fn is_token_joint_to_next(&self, pos: usize) -> bool { | 179 | fn is_token_joint_to_next(&self, pos: usize) -> bool { |
167 | self.tokens[pos].is_joint_to_next | 180 | self.tokens[pos].is_joint_to_next |