diff options
| author | bors[bot] <bors[bot]@users.noreply.github.com> | 2019-04-05 15:17:07 +0100 |
|---|---|---|
| committer | bors[bot] <bors[bot]@users.noreply.github.com> | 2019-04-05 15:17:07 +0100 |
| commit | a52e86f9a9a21313a1543823b92e82c0a30e0870 (patch) | |
| tree | 35d8b8b04a8e17a162fd6b95105db34919d59507 /crates | |
| parent | be9a44e9bad262ac5e615730e540fd434f846a0e (diff) | |
| parent | 7abc06bd576264cb6b7c8becdbd1a8c0e914463d (diff) | |
Merge #1112
1112: Fix literal support in token tree to ast item list r=matklad a=edwin0cheng
This PR implements following things :
1. Expose `next_token` from `ra_parse`
2. Fix the literal conversion in `token_tree_to_ast_item_list`
3. Add test for the conversion
Co-authored-by: Edwin Cheng <[email protected]>
Diffstat (limited to 'crates')
| -rw-r--r-- | crates/ra_mbe/src/lib.rs | 48 | ||||
| -rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 47 | ||||
| -rw-r--r-- | crates/ra_syntax/src/lib.rs | 2 | ||||
| -rw-r--r-- | crates/ra_syntax/src/parsing.rs | 2 | ||||
| -rw-r--r-- | crates/ra_syntax/src/parsing/lexer.rs | 9 |
5 files changed, 101 insertions, 7 deletions
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index 93246f54a..4203929d4 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs | |||
| @@ -167,7 +167,7 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
| 167 | ) | 167 | ) |
| 168 | } | 168 | } |
| 169 | 169 | ||
| 170 | fn create_rules(macro_definition: &str) -> MacroRules { | 170 | pub(crate) fn create_rules(macro_definition: &str) -> MacroRules { |
| 171 | let source_file = ast::SourceFile::parse(macro_definition); | 171 | let source_file = ast::SourceFile::parse(macro_definition); |
| 172 | let macro_definition = | 172 | let macro_definition = |
| 173 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 173 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
| @@ -176,7 +176,7 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
| 176 | crate::MacroRules::parse(&definition_tt).unwrap() | 176 | crate::MacroRules::parse(&definition_tt).unwrap() |
| 177 | } | 177 | } |
| 178 | 178 | ||
| 179 | fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree { | 179 | pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree { |
| 180 | let source_file = ast::SourceFile::parse(invocation); | 180 | let source_file = ast::SourceFile::parse(invocation); |
| 181 | let macro_invocation = | 181 | let macro_invocation = |
| 182 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 182 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
| @@ -186,7 +186,7 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
| 186 | rules.expand(&invocation_tt).unwrap() | 186 | rules.expand(&invocation_tt).unwrap() |
| 187 | } | 187 | } |
| 188 | 188 | ||
| 189 | fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { | 189 | pub(crate) fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { |
| 190 | let expanded = expand(rules, invocation); | 190 | let expanded = expand(rules, invocation); |
| 191 | assert_eq!(expanded.to_string(), expansion); | 191 | assert_eq!(expanded.to_string(), expansion); |
| 192 | } | 192 | } |
| @@ -337,4 +337,46 @@ SOURCE_FILE@[0; 40) | |||
| 337 | ); | 337 | ); |
| 338 | } | 338 | } |
| 339 | 339 | ||
| 340 | #[test] | ||
| 341 | fn expand_literals_to_token_tree() { | ||
| 342 | fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree { | ||
| 343 | if let tt::TokenTree::Subtree(subtree) = tt { | ||
| 344 | return &subtree; | ||
| 345 | } | ||
| 346 | unreachable!("It is not a subtree"); | ||
| 347 | } | ||
| 348 | |||
| 349 | fn to_literal(tt: &tt::TokenTree) -> &tt::Literal { | ||
| 350 | if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = tt { | ||
| 351 | return lit; | ||
| 352 | } | ||
| 353 | unreachable!("It is not a literal"); | ||
| 354 | } | ||
| 355 | |||
| 356 | let rules = create_rules( | ||
| 357 | r#" | ||
| 358 | macro_rules! literals { | ||
| 359 | ($i:ident) => { | ||
| 360 | { | ||
| 361 | let a = 'c'; | ||
| 362 | let c = 1000; | ||
| 363 | let f = 12E+99_f64; | ||
| 364 | let s = "rust1"; | ||
| 365 | } | ||
| 366 | } | ||
| 367 | } | ||
| 368 | "#, | ||
| 369 | ); | ||
| 370 | let expansion = expand(&rules, "literals!(foo)"); | ||
| 371 | let stm_tokens = &to_subtree(&expansion.token_trees[0]).token_trees; | ||
| 372 | |||
| 373 | // [let] [a] [=] ['c'] [;] | ||
| 374 | assert_eq!(to_literal(&stm_tokens[3]).text, "'c'"); | ||
| 375 | // [let] [c] [=] [1000] [;] | ||
| 376 | assert_eq!(to_literal(&stm_tokens[5 + 3]).text, "1000"); | ||
| 377 | // [let] [f] [=] [12E+99_f64] [;] | ||
| 378 | assert_eq!(to_literal(&stm_tokens[10 + 3]).text, "12E+99_f64"); | ||
| 379 | // [let] [s] [=] ["rust1"] [;] | ||
| 380 | assert_eq!(to_literal(&stm_tokens[15 + 3]).text, "\"rust1\""); | ||
| 381 | } | ||
| 340 | } | 382 | } |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 257503de8..139a0fd33 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
| @@ -1,7 +1,7 @@ | |||
| 1 | use ra_parser::{TokenSource, TreeSink, ParseError}; | 1 | use ra_parser::{TokenSource, TreeSink, ParseError}; |
| 2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
| 3 | AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, | 3 | AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, |
| 4 | ast, SyntaxKind::*, TextUnit | 4 | ast, SyntaxKind::*, TextUnit, classify_literal |
| 5 | }; | 5 | }; |
| 6 | 6 | ||
| 7 | /// Maps `tt::TokenId` to the relative range of the original token. | 7 | /// Maps `tt::TokenId` to the relative range of the original token. |
| @@ -103,10 +103,12 @@ fn convert_tt( | |||
| 103 | Some(res) | 103 | Some(res) |
| 104 | } | 104 | } |
| 105 | 105 | ||
| 106 | #[derive(Debug)] | ||
| 106 | struct TtTokenSource { | 107 | struct TtTokenSource { |
| 107 | tokens: Vec<TtToken>, | 108 | tokens: Vec<TtToken>, |
| 108 | } | 109 | } |
| 109 | 110 | ||
| 111 | #[derive(Debug)] | ||
| 110 | struct TtToken { | 112 | struct TtToken { |
| 111 | kind: SyntaxKind, | 113 | kind: SyntaxKind, |
| 112 | is_joint_to_next: bool, | 114 | is_joint_to_next: bool, |
| @@ -189,7 +191,7 @@ impl TtTokenSource { | |||
| 189 | { | 191 | { |
| 190 | let tok = match token { | 192 | let tok = match token { |
| 191 | tt::Leaf::Literal(l) => TtToken { | 193 | tt::Leaf::Literal(l) => TtToken { |
| 192 | kind: SyntaxKind::INT_NUMBER, // FIXME | 194 | kind: classify_literal(&l.text).unwrap().kind, |
| 193 | is_joint_to_next: false, | 195 | is_joint_to_next: false, |
| 194 | text: l.text.clone(), | 196 | text: l.text.clone(), |
| 195 | }, | 197 | }, |
| @@ -355,3 +357,44 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
| 355 | self.inner.error(error, self.text_pos) | 357 | self.inner.error(error, self.text_pos) |
| 356 | } | 358 | } |
| 357 | } | 359 | } |
| 360 | |||
| 361 | #[cfg(test)] | ||
| 362 | mod tests { | ||
| 363 | use super::*; | ||
| 364 | use crate::tests::{expand, create_rules}; | ||
| 365 | |||
| 366 | #[test] | ||
| 367 | fn convert_tt_token_source() { | ||
| 368 | let rules = create_rules( | ||
| 369 | r#" | ||
| 370 | macro_rules! literals { | ||
| 371 | ($i:ident) => { | ||
| 372 | { | ||
| 373 | let a = 'c'; | ||
| 374 | let c = 1000; | ||
| 375 | let f = 12E+99_f64; | ||
| 376 | let s = "rust1"; | ||
| 377 | } | ||
| 378 | } | ||
| 379 | } | ||
| 380 | "#, | ||
| 381 | ); | ||
| 382 | let expansion = expand(&rules, "literals!(foo)"); | ||
| 383 | let tt_src = TtTokenSource::new(&expansion); | ||
| 384 | |||
| 385 | // [{] | ||
| 386 | // [let] [a] [=] ['c'] [;] | ||
| 387 | assert_eq!(tt_src.tokens[1 + 3].text, "'c'"); | ||
| 388 | assert_eq!(tt_src.tokens[1 + 3].kind, CHAR); | ||
| 389 | // [let] [c] [=] [1000] [;] | ||
| 390 | assert_eq!(tt_src.tokens[1 + 5 + 3].text, "1000"); | ||
| 391 | assert_eq!(tt_src.tokens[1 + 5 + 3].kind, INT_NUMBER); | ||
| 392 | // [let] [f] [=] [12E+99_f64] [;] | ||
| 393 | assert_eq!(tt_src.tokens[1 + 10 + 3].text, "12E+99_f64"); | ||
| 394 | assert_eq!(tt_src.tokens[1 + 10 + 3].kind, FLOAT_NUMBER); | ||
| 395 | |||
| 396 | // [let] [s] [=] ["rust1"] [;] | ||
| 397 | assert_eq!(tt_src.tokens[1 + 15 + 3].text, "\"rust1\""); | ||
| 398 | assert_eq!(tt_src.tokens[1 + 15 + 3].kind, STRING); | ||
| 399 | } | ||
| 400 | } | ||
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index e1088e296..c56bc9f16 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
| @@ -40,7 +40,7 @@ pub use crate::{ | |||
| 40 | syntax_text::SyntaxText, | 40 | syntax_text::SyntaxText, |
| 41 | syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc, SyntaxTreeBuilder, SyntaxElement, SyntaxToken}, | 41 | syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc, SyntaxTreeBuilder, SyntaxElement, SyntaxToken}, |
| 42 | ptr::{SyntaxNodePtr, AstPtr}, | 42 | ptr::{SyntaxNodePtr, AstPtr}, |
| 43 | parsing::{tokenize, Token}, | 43 | parsing::{tokenize, classify_literal, Token}, |
| 44 | }; | 44 | }; |
| 45 | 45 | ||
| 46 | use ra_text_edit::AtomTextEdit; | 46 | use ra_text_edit::AtomTextEdit; |
diff --git a/crates/ra_syntax/src/parsing.rs b/crates/ra_syntax/src/parsing.rs index ad5668a65..15d69c5ab 100644 --- a/crates/ra_syntax/src/parsing.rs +++ b/crates/ra_syntax/src/parsing.rs | |||
| @@ -11,7 +11,7 @@ use crate::{ | |||
| 11 | syntax_node::GreenNode, | 11 | syntax_node::GreenNode, |
| 12 | }; | 12 | }; |
| 13 | 13 | ||
| 14 | pub use self::lexer::{tokenize, Token}; | 14 | pub use self::lexer::{tokenize, classify_literal, Token}; |
| 15 | 15 | ||
| 16 | pub(crate) use self::reparsing::incremental_reparse; | 16 | pub(crate) use self::reparsing::incremental_reparse; |
| 17 | 17 | ||
diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/ra_syntax/src/parsing/lexer.rs index 36e841609..3ae42912c 100644 --- a/crates/ra_syntax/src/parsing/lexer.rs +++ b/crates/ra_syntax/src/parsing/lexer.rs | |||
| @@ -214,3 +214,12 @@ fn scan_literal_suffix(ptr: &mut Ptr) { | |||
| 214 | } | 214 | } |
| 215 | ptr.bump_while(is_ident_continue); | 215 | ptr.bump_while(is_ident_continue); |
| 216 | } | 216 | } |
| 217 | |||
| 218 | pub fn classify_literal(text: &str) -> Option<Token> { | ||
| 219 | let tkn = next_token(text); | ||
| 220 | if !tkn.kind.is_literal() || tkn.len.to_usize() != text.len() { | ||
| 221 | return None; | ||
| 222 | } | ||
| 223 | |||
| 224 | Some(tkn) | ||
| 225 | } | ||
