diff options
author | bors[bot] <bors[bot]@users.noreply.github.com> | 2019-04-05 15:17:07 +0100 |
---|---|---|
committer | bors[bot] <bors[bot]@users.noreply.github.com> | 2019-04-05 15:17:07 +0100 |
commit | a52e86f9a9a21313a1543823b92e82c0a30e0870 (patch) | |
tree | 35d8b8b04a8e17a162fd6b95105db34919d59507 /crates/ra_mbe/src | |
parent | be9a44e9bad262ac5e615730e540fd434f846a0e (diff) | |
parent | 7abc06bd576264cb6b7c8becdbd1a8c0e914463d (diff) |
Merge #1112
1112: Fix literal support in token tree to ast item list r=matklad a=edwin0cheng
This PR implements following things :
1. Expose `next_token` from `ra_parse`
2. Fix the literal conversion in `token_tree_to_ast_item_list`
3. Add test for the conversion
Co-authored-by: Edwin Cheng <[email protected]>
Diffstat (limited to 'crates/ra_mbe/src')
-rw-r--r-- | crates/ra_mbe/src/lib.rs | 48 | ||||
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 47 |
2 files changed, 90 insertions, 5 deletions
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index 93246f54a..4203929d4 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs | |||
@@ -167,7 +167,7 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
167 | ) | 167 | ) |
168 | } | 168 | } |
169 | 169 | ||
170 | fn create_rules(macro_definition: &str) -> MacroRules { | 170 | pub(crate) fn create_rules(macro_definition: &str) -> MacroRules { |
171 | let source_file = ast::SourceFile::parse(macro_definition); | 171 | let source_file = ast::SourceFile::parse(macro_definition); |
172 | let macro_definition = | 172 | let macro_definition = |
173 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 173 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
@@ -176,7 +176,7 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
176 | crate::MacroRules::parse(&definition_tt).unwrap() | 176 | crate::MacroRules::parse(&definition_tt).unwrap() |
177 | } | 177 | } |
178 | 178 | ||
179 | fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree { | 179 | pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree { |
180 | let source_file = ast::SourceFile::parse(invocation); | 180 | let source_file = ast::SourceFile::parse(invocation); |
181 | let macro_invocation = | 181 | let macro_invocation = |
182 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 182 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
@@ -186,7 +186,7 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
186 | rules.expand(&invocation_tt).unwrap() | 186 | rules.expand(&invocation_tt).unwrap() |
187 | } | 187 | } |
188 | 188 | ||
189 | fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { | 189 | pub(crate) fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { |
190 | let expanded = expand(rules, invocation); | 190 | let expanded = expand(rules, invocation); |
191 | assert_eq!(expanded.to_string(), expansion); | 191 | assert_eq!(expanded.to_string(), expansion); |
192 | } | 192 | } |
@@ -337,4 +337,46 @@ SOURCE_FILE@[0; 40) | |||
337 | ); | 337 | ); |
338 | } | 338 | } |
339 | 339 | ||
340 | #[test] | ||
341 | fn expand_literals_to_token_tree() { | ||
342 | fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree { | ||
343 | if let tt::TokenTree::Subtree(subtree) = tt { | ||
344 | return &subtree; | ||
345 | } | ||
346 | unreachable!("It is not a subtree"); | ||
347 | } | ||
348 | |||
349 | fn to_literal(tt: &tt::TokenTree) -> &tt::Literal { | ||
350 | if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = tt { | ||
351 | return lit; | ||
352 | } | ||
353 | unreachable!("It is not a literal"); | ||
354 | } | ||
355 | |||
356 | let rules = create_rules( | ||
357 | r#" | ||
358 | macro_rules! literals { | ||
359 | ($i:ident) => { | ||
360 | { | ||
361 | let a = 'c'; | ||
362 | let c = 1000; | ||
363 | let f = 12E+99_f64; | ||
364 | let s = "rust1"; | ||
365 | } | ||
366 | } | ||
367 | } | ||
368 | "#, | ||
369 | ); | ||
370 | let expansion = expand(&rules, "literals!(foo)"); | ||
371 | let stm_tokens = &to_subtree(&expansion.token_trees[0]).token_trees; | ||
372 | |||
373 | // [let] [a] [=] ['c'] [;] | ||
374 | assert_eq!(to_literal(&stm_tokens[3]).text, "'c'"); | ||
375 | // [let] [c] [=] [1000] [;] | ||
376 | assert_eq!(to_literal(&stm_tokens[5 + 3]).text, "1000"); | ||
377 | // [let] [f] [=] [12E+99_f64] [;] | ||
378 | assert_eq!(to_literal(&stm_tokens[10 + 3]).text, "12E+99_f64"); | ||
379 | // [let] [s] [=] ["rust1"] [;] | ||
380 | assert_eq!(to_literal(&stm_tokens[15 + 3]).text, "\"rust1\""); | ||
381 | } | ||
340 | } | 382 | } |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 257503de8..139a0fd33 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | use ra_parser::{TokenSource, TreeSink, ParseError}; | 1 | use ra_parser::{TokenSource, TreeSink, ParseError}; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, | 3 | AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, |
4 | ast, SyntaxKind::*, TextUnit | 4 | ast, SyntaxKind::*, TextUnit, classify_literal |
5 | }; | 5 | }; |
6 | 6 | ||
7 | /// Maps `tt::TokenId` to the relative range of the original token. | 7 | /// Maps `tt::TokenId` to the relative range of the original token. |
@@ -103,10 +103,12 @@ fn convert_tt( | |||
103 | Some(res) | 103 | Some(res) |
104 | } | 104 | } |
105 | 105 | ||
106 | #[derive(Debug)] | ||
106 | struct TtTokenSource { | 107 | struct TtTokenSource { |
107 | tokens: Vec<TtToken>, | 108 | tokens: Vec<TtToken>, |
108 | } | 109 | } |
109 | 110 | ||
111 | #[derive(Debug)] | ||
110 | struct TtToken { | 112 | struct TtToken { |
111 | kind: SyntaxKind, | 113 | kind: SyntaxKind, |
112 | is_joint_to_next: bool, | 114 | is_joint_to_next: bool, |
@@ -189,7 +191,7 @@ impl TtTokenSource { | |||
189 | { | 191 | { |
190 | let tok = match token { | 192 | let tok = match token { |
191 | tt::Leaf::Literal(l) => TtToken { | 193 | tt::Leaf::Literal(l) => TtToken { |
192 | kind: SyntaxKind::INT_NUMBER, // FIXME | 194 | kind: classify_literal(&l.text).unwrap().kind, |
193 | is_joint_to_next: false, | 195 | is_joint_to_next: false, |
194 | text: l.text.clone(), | 196 | text: l.text.clone(), |
195 | }, | 197 | }, |
@@ -355,3 +357,44 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
355 | self.inner.error(error, self.text_pos) | 357 | self.inner.error(error, self.text_pos) |
356 | } | 358 | } |
357 | } | 359 | } |
360 | |||
361 | #[cfg(test)] | ||
362 | mod tests { | ||
363 | use super::*; | ||
364 | use crate::tests::{expand, create_rules}; | ||
365 | |||
366 | #[test] | ||
367 | fn convert_tt_token_source() { | ||
368 | let rules = create_rules( | ||
369 | r#" | ||
370 | macro_rules! literals { | ||
371 | ($i:ident) => { | ||
372 | { | ||
373 | let a = 'c'; | ||
374 | let c = 1000; | ||
375 | let f = 12E+99_f64; | ||
376 | let s = "rust1"; | ||
377 | } | ||
378 | } | ||
379 | } | ||
380 | "#, | ||
381 | ); | ||
382 | let expansion = expand(&rules, "literals!(foo)"); | ||
383 | let tt_src = TtTokenSource::new(&expansion); | ||
384 | |||
385 | // [{] | ||
386 | // [let] [a] [=] ['c'] [;] | ||
387 | assert_eq!(tt_src.tokens[1 + 3].text, "'c'"); | ||
388 | assert_eq!(tt_src.tokens[1 + 3].kind, CHAR); | ||
389 | // [let] [c] [=] [1000] [;] | ||
390 | assert_eq!(tt_src.tokens[1 + 5 + 3].text, "1000"); | ||
391 | assert_eq!(tt_src.tokens[1 + 5 + 3].kind, INT_NUMBER); | ||
392 | // [let] [f] [=] [12E+99_f64] [;] | ||
393 | assert_eq!(tt_src.tokens[1 + 10 + 3].text, "12E+99_f64"); | ||
394 | assert_eq!(tt_src.tokens[1 + 10 + 3].kind, FLOAT_NUMBER); | ||
395 | |||
396 | // [let] [s] [=] ["rust1"] [;] | ||
397 | assert_eq!(tt_src.tokens[1 + 15 + 3].text, "\"rust1\""); | ||
398 | assert_eq!(tt_src.tokens[1 + 15 + 3].kind, STRING); | ||
399 | } | ||
400 | } | ||