diff options
author | Edwin Cheng <[email protected]> | 2019-04-05 13:58:24 +0100 |
---|---|---|
committer | Edwin Cheng <[email protected]> | 2019-04-05 13:58:24 +0100 |
commit | 7abc06bd576264cb6b7c8becdbd1a8c0e914463d (patch) | |
tree | 35d8b8b04a8e17a162fd6b95105db34919d59507 /crates | |
parent | 1ea0238e538dc332b23698d54c02d8bd037f58bb (diff) |
Add proper test for literals and fixed typo bug
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_mbe/src/lib.rs | 11 | ||||
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 43 | ||||
-rw-r--r-- | crates/ra_syntax/src/parsing/lexer.rs | 2 |
3 files changed, 51 insertions, 5 deletions
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index 38f0049ed..4203929d4 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs | |||
@@ -167,7 +167,7 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
167 | ) | 167 | ) |
168 | } | 168 | } |
169 | 169 | ||
170 | fn create_rules(macro_definition: &str) -> MacroRules { | 170 | pub(crate) fn create_rules(macro_definition: &str) -> MacroRules { |
171 | let source_file = ast::SourceFile::parse(macro_definition); | 171 | let source_file = ast::SourceFile::parse(macro_definition); |
172 | let macro_definition = | 172 | let macro_definition = |
173 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 173 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
@@ -176,7 +176,7 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
176 | crate::MacroRules::parse(&definition_tt).unwrap() | 176 | crate::MacroRules::parse(&definition_tt).unwrap() |
177 | } | 177 | } |
178 | 178 | ||
179 | fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree { | 179 | pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree { |
180 | let source_file = ast::SourceFile::parse(invocation); | 180 | let source_file = ast::SourceFile::parse(invocation); |
181 | let macro_invocation = | 181 | let macro_invocation = |
182 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 182 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
@@ -186,7 +186,7 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
186 | rules.expand(&invocation_tt).unwrap() | 186 | rules.expand(&invocation_tt).unwrap() |
187 | } | 187 | } |
188 | 188 | ||
189 | fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { | 189 | pub(crate) fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { |
190 | let expanded = expand(rules, invocation); | 190 | let expanded = expand(rules, invocation); |
191 | assert_eq!(expanded.to_string(), expansion); | 191 | assert_eq!(expanded.to_string(), expansion); |
192 | } | 192 | } |
@@ -338,7 +338,7 @@ SOURCE_FILE@[0; 40) | |||
338 | } | 338 | } |
339 | 339 | ||
340 | #[test] | 340 | #[test] |
341 | fn expand_literals_to_item_list() { | 341 | fn expand_literals_to_token_tree() { |
342 | fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree { | 342 | fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree { |
343 | if let tt::TokenTree::Subtree(subtree) = tt { | 343 | if let tt::TokenTree::Subtree(subtree) = tt { |
344 | return &subtree; | 344 | return &subtree; |
@@ -361,6 +361,7 @@ SOURCE_FILE@[0; 40) | |||
361 | let a = 'c'; | 361 | let a = 'c'; |
362 | let c = 1000; | 362 | let c = 1000; |
363 | let f = 12E+99_f64; | 363 | let f = 12E+99_f64; |
364 | let s = "rust1"; | ||
364 | } | 365 | } |
365 | } | 366 | } |
366 | } | 367 | } |
@@ -375,5 +376,7 @@ SOURCE_FILE@[0; 40) | |||
375 | assert_eq!(to_literal(&stm_tokens[5 + 3]).text, "1000"); | 376 | assert_eq!(to_literal(&stm_tokens[5 + 3]).text, "1000"); |
376 | // [let] [f] [=] [12E+99_f64] [;] | 377 | // [let] [f] [=] [12E+99_f64] [;] |
377 | assert_eq!(to_literal(&stm_tokens[10 + 3]).text, "12E+99_f64"); | 378 | assert_eq!(to_literal(&stm_tokens[10 + 3]).text, "12E+99_f64"); |
379 | // [let] [s] [=] ["rust1"] [;] | ||
380 | assert_eq!(to_literal(&stm_tokens[15 + 3]).text, "\"rust1\""); | ||
378 | } | 381 | } |
379 | } | 382 | } |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 9664280b5..139a0fd33 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -103,10 +103,12 @@ fn convert_tt( | |||
103 | Some(res) | 103 | Some(res) |
104 | } | 104 | } |
105 | 105 | ||
106 | #[derive(Debug)] | ||
106 | struct TtTokenSource { | 107 | struct TtTokenSource { |
107 | tokens: Vec<TtToken>, | 108 | tokens: Vec<TtToken>, |
108 | } | 109 | } |
109 | 110 | ||
111 | #[derive(Debug)] | ||
110 | struct TtToken { | 112 | struct TtToken { |
111 | kind: SyntaxKind, | 113 | kind: SyntaxKind, |
112 | is_joint_to_next: bool, | 114 | is_joint_to_next: bool, |
@@ -355,3 +357,44 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
355 | self.inner.error(error, self.text_pos) | 357 | self.inner.error(error, self.text_pos) |
356 | } | 358 | } |
357 | } | 359 | } |
360 | |||
361 | #[cfg(test)] | ||
362 | mod tests { | ||
363 | use super::*; | ||
364 | use crate::tests::{expand, create_rules}; | ||
365 | |||
366 | #[test] | ||
367 | fn convert_tt_token_source() { | ||
368 | let rules = create_rules( | ||
369 | r#" | ||
370 | macro_rules! literals { | ||
371 | ($i:ident) => { | ||
372 | { | ||
373 | let a = 'c'; | ||
374 | let c = 1000; | ||
375 | let f = 12E+99_f64; | ||
376 | let s = "rust1"; | ||
377 | } | ||
378 | } | ||
379 | } | ||
380 | "#, | ||
381 | ); | ||
382 | let expansion = expand(&rules, "literals!(foo)"); | ||
383 | let tt_src = TtTokenSource::new(&expansion); | ||
384 | |||
385 | // [{] | ||
386 | // [let] [a] [=] ['c'] [;] | ||
387 | assert_eq!(tt_src.tokens[1 + 3].text, "'c'"); | ||
388 | assert_eq!(tt_src.tokens[1 + 3].kind, CHAR); | ||
389 | // [let] [c] [=] [1000] [;] | ||
390 | assert_eq!(tt_src.tokens[1 + 5 + 3].text, "1000"); | ||
391 | assert_eq!(tt_src.tokens[1 + 5 + 3].kind, INT_NUMBER); | ||
392 | // [let] [f] [=] [12E+99_f64] [;] | ||
393 | assert_eq!(tt_src.tokens[1 + 10 + 3].text, "12E+99_f64"); | ||
394 | assert_eq!(tt_src.tokens[1 + 10 + 3].kind, FLOAT_NUMBER); | ||
395 | |||
396 | // [let] [s] [=] ["rust1"] [;] | ||
397 | assert_eq!(tt_src.tokens[1 + 15 + 3].text, "\"rust1\""); | ||
398 | assert_eq!(tt_src.tokens[1 + 15 + 3].kind, STRING); | ||
399 | } | ||
400 | } | ||
diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/ra_syntax/src/parsing/lexer.rs index e75f3aae0..3ae42912c 100644 --- a/crates/ra_syntax/src/parsing/lexer.rs +++ b/crates/ra_syntax/src/parsing/lexer.rs | |||
@@ -217,7 +217,7 @@ fn scan_literal_suffix(ptr: &mut Ptr) { | |||
217 | 217 | ||
218 | pub fn classify_literal(text: &str) -> Option<Token> { | 218 | pub fn classify_literal(text: &str) -> Option<Token> { |
219 | let tkn = next_token(text); | 219 | let tkn = next_token(text); |
220 | if tkn.kind.is_literal() || tkn.len.to_usize() != text.len() { | 220 | if !tkn.kind.is_literal() || tkn.len.to_usize() != text.len() { |
221 | return None; | 221 | return None; |
222 | } | 222 | } |
223 | 223 | ||