diff options
author | bors[bot] <bors[bot]@users.noreply.github.com> | 2019-04-21 22:28:06 +0100 |
---|---|---|
committer | bors[bot] <bors[bot]@users.noreply.github.com> | 2019-04-21 22:28:06 +0100 |
commit | bbc5c1d24e1a641b134f634516828301e8cfc320 (patch) | |
tree | f10fe8412874714edcc2d317ab7822b9bbf80a74 | |
parent | ee94edc722c9649bd16bb754959ad349593045e2 (diff) | |
parent | 120bfde3c22ed662cd4d3c35e91a739a86d0e990 (diff) |
Merge #1189
1189: Fix #1178 r=matklad a=edwin0cheng
This PR improves / fixes mbe :
1. Fixed a offest bug in `SourceTreeWalker`
2. Handle `*+` matcher properly
3. Add missing separator in rhs macro expansion.
4. Fixed bug in single token with empty delimiter subtree case. It is because the current `mbe_expander` will create an delimiter subtree for each expansion. But in `tt` case, all puncts expansion will be incorrect because of it.
5. Fixed lifetime bug
6. Add more information on parse_macro fail
7. Add tests for above.
Co-authored-by: Edwin Cheng <[email protected]>
-rw-r--r-- | crates/ra_hir/src/ids.rs | 37 | ||||
-rw-r--r-- | crates/ra_mbe/src/lib.rs | 89 | ||||
-rw-r--r-- | crates/ra_mbe/src/mbe_expander.rs | 61 | ||||
-rw-r--r-- | crates/ra_mbe/src/mbe_parser.rs | 21 | ||||
-rw-r--r-- | crates/ra_mbe/src/subtree_source.rs | 20 |
5 files changed, 194 insertions, 34 deletions
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs index 2a1ed9b81..e771a311c 100644 --- a/crates/ra_hir/src/ids.rs +++ b/crates/ra_hir/src/ids.rs | |||
@@ -63,11 +63,15 @@ impl HirFileId { | |||
63 | match file_id.0 { | 63 | match file_id.0 { |
64 | HirFileIdRepr::File(file_id) => db.parse(file_id), | 64 | HirFileIdRepr::File(file_id) => db.parse(file_id), |
65 | HirFileIdRepr::Macro(macro_call_id) => { | 65 | HirFileIdRepr::Macro(macro_call_id) => { |
66 | parse_macro(db, macro_call_id).unwrap_or_else(|| { | 66 | parse_macro(db, macro_call_id).unwrap_or_else(|err| { |
67 | // Note: | 67 | // Note: |
68 | // The final goal we would like to make all parse_macro success, | 68 | // The final goal we would like to make all parse_macro success, |
69 | // such that the following log will not call anyway. | 69 | // such that the following log will not call anyway. |
70 | log::warn!("fail on macro_parse: {}", macro_call_id.debug_dump(db)); | 70 | log::warn!( |
71 | "fail on macro_parse: (reason: {}) {}", | ||
72 | err, | ||
73 | macro_call_id.debug_dump(db) | ||
74 | ); | ||
71 | 75 | ||
72 | // returning an empty string looks fishy... | 76 | // returning an empty string looks fishy... |
73 | SourceFile::parse("") | 77 | SourceFile::parse("") |
@@ -77,14 +81,20 @@ impl HirFileId { | |||
77 | } | 81 | } |
78 | } | 82 | } |
79 | 83 | ||
80 | fn parse_macro(db: &impl DefDatabase, macro_call_id: MacroCallId) -> Option<TreeArc<SourceFile>> { | 84 | fn parse_macro( |
85 | db: &impl DefDatabase, | ||
86 | macro_call_id: MacroCallId, | ||
87 | ) -> Result<TreeArc<SourceFile>, String> { | ||
81 | let loc = macro_call_id.loc(db); | 88 | let loc = macro_call_id.loc(db); |
82 | let macro_call = loc.ast_id.to_node(db); | 89 | let macro_call = loc.ast_id.to_node(db); |
83 | let (macro_arg, _) = macro_call.token_tree().and_then(mbe::ast_to_token_tree)?; | 90 | let (macro_arg, _) = macro_call |
84 | 91 | .token_tree() | |
85 | let macro_rules = db.macro_def(loc.def)?; | 92 | .and_then(mbe::ast_to_token_tree) |
86 | let tt = macro_rules.expand(¯o_arg).ok()?; | 93 | .ok_or("Fail to args in to tt::TokenTree")?; |
87 | Some(mbe::token_tree_to_ast_item_list(&tt)) | 94 | |
95 | let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?; | ||
96 | let tt = macro_rules.expand(¯o_arg).map_err(|err| format!("{:?}", err))?; | ||
97 | Ok(mbe::token_tree_to_ast_item_list(&tt)) | ||
88 | } | 98 | } |
89 | 99 | ||
90 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 100 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
@@ -311,11 +321,18 @@ impl MacroCallId { | |||
311 | pub fn debug_dump(&self, db: &impl DefDatabase) -> String { | 321 | pub fn debug_dump(&self, db: &impl DefDatabase) -> String { |
312 | let loc = self.clone().loc(db); | 322 | let loc = self.clone().loc(db); |
313 | let node = loc.ast_id.to_node(db); | 323 | let node = loc.ast_id.to_node(db); |
314 | let syntax_str = node.syntax().to_string(); | 324 | let syntax_str = node.syntax().text().chunks().collect::<Vec<_>>().join(" "); |
315 | 325 | ||
316 | // dump the file name | 326 | // dump the file name |
317 | let file_id: HirFileId = self.clone().into(); | 327 | let file_id: HirFileId = self.clone().into(); |
318 | let original = file_id.original_file(db); | 328 | let original = file_id.original_file(db); |
319 | format!("macro call [file: {:#?}] : {}", db.file_relative_path(original), syntax_str) | 329 | let macro_rules = db.macro_def(loc.def); |
330 | |||
331 | format!( | ||
332 | "macro call [file: {:#?}] : {}\nhas rules: {}", | ||
333 | db.file_relative_path(original), | ||
334 | syntax_str, | ||
335 | macro_rules.is_some() | ||
336 | ) | ||
320 | } | 337 | } |
321 | } | 338 | } |
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index 9aad08db9..e78bc734b 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs | |||
@@ -220,9 +220,10 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
220 | let expansion = syntax_node_to_token_tree(expansion.syntax()).unwrap().0; | 220 | let expansion = syntax_node_to_token_tree(expansion.syntax()).unwrap().0; |
221 | let file = token_tree_to_macro_items(&expansion); | 221 | let file = token_tree_to_macro_items(&expansion); |
222 | let file = file.unwrap().syntax().debug_dump().trim().to_string(); | 222 | let file = file.unwrap().syntax().debug_dump().trim().to_string(); |
223 | let file = file.replace("C_C__C", "$crate"); | 223 | let tree = tree.unwrap().syntax().debug_dump().trim().to_string(); |
224 | 224 | ||
225 | assert_eq!(tree.unwrap().syntax().debug_dump().trim(), file,); | 225 | let file = file.replace("C_C__C", "$crate"); |
226 | assert_eq!(tree, file,); | ||
226 | } | 227 | } |
227 | 228 | ||
228 | #[test] | 229 | #[test] |
@@ -349,6 +350,21 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
349 | } | 350 | } |
350 | 351 | ||
351 | #[test] | 352 | #[test] |
353 | fn test_match_group_pattern_with_multiple_statement_without_semi() { | ||
354 | let rules = create_rules( | ||
355 | r#" | ||
356 | macro_rules! foo { | ||
357 | ($ ($ i:ident),*) => ( fn baz { $ ( | ||
358 | $i() | ||
359 | );*} ); | ||
360 | } | ||
361 | "#, | ||
362 | ); | ||
363 | |||
364 | assert_expansion(&rules, "foo! { foo, bar }", "fn baz {foo () ; bar () ;}"); | ||
365 | } | ||
366 | |||
367 | #[test] | ||
352 | fn test_match_group_empty_fixed_token() { | 368 | fn test_match_group_empty_fixed_token() { |
353 | let rules = create_rules( | 369 | let rules = create_rules( |
354 | r#" | 370 | r#" |
@@ -692,6 +708,33 @@ MACRO_ITEMS@[0; 40) | |||
692 | } | 708 | } |
693 | 709 | ||
694 | #[test] | 710 | #[test] |
711 | fn test_ty_with_complex_type() { | ||
712 | let rules = create_rules( | ||
713 | r#" | ||
714 | macro_rules! foo { | ||
715 | ($ i:ty) => ( | ||
716 | fn bar() -> $ i { unimplemented!() } | ||
717 | ) | ||
718 | } | ||
719 | "#, | ||
720 | ); | ||
721 | |||
722 | // Reference lifetime struct with generic type | ||
723 | assert_expansion( | ||
724 | &rules, | ||
725 | "foo! { &'a Baz<u8> }", | ||
726 | "fn bar () -> & 'a Baz < u8 > {unimplemented ! ()}", | ||
727 | ); | ||
728 | |||
729 | // extern "Rust" func type | ||
730 | assert_expansion( | ||
731 | &rules, | ||
732 | r#"foo! { extern "Rust" fn() -> Ret }"#, | ||
733 | r#"fn bar () -> extern "Rust" fn () -> Ret {unimplemented ! ()}"#, | ||
734 | ); | ||
735 | } | ||
736 | |||
737 | #[test] | ||
695 | fn test_pat_() { | 738 | fn test_pat_() { |
696 | let rules = create_rules( | 739 | let rules = create_rules( |
697 | r#" | 740 | r#" |
@@ -854,6 +897,26 @@ MACRO_ITEMS@[0; 40) | |||
854 | 897 | ||
855 | // The following tests are based on real world situations | 898 | // The following tests are based on real world situations |
856 | #[test] | 899 | #[test] |
900 | fn test_vec() { | ||
901 | let rules = create_rules( | ||
902 | r#" | ||
903 | macro_rules! vec { | ||
904 | ($($item:expr),*) => { | ||
905 | { | ||
906 | let mut v = Vec::new(); | ||
907 | $( | ||
908 | v.push($item); | ||
909 | )* | ||
910 | v | ||
911 | } | ||
912 | }; | ||
913 | } | ||
914 | "#, | ||
915 | ); | ||
916 | assert_expansion(&rules, r#"vec!();"#, r#"{let mut v = Vec :: new () ; v}"#); | ||
917 | } | ||
918 | |||
919 | #[test] | ||
857 | fn test_winapi_struct() { | 920 | fn test_winapi_struct() { |
858 | // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/macros.rs#L366 | 921 | // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/macros.rs#L366 |
859 | 922 | ||
@@ -886,4 +949,26 @@ macro_rules! STRUCT { | |||
886 | assert_expansion(&rules, r#"STRUCT!{#[cfg_attr(target_arch = "x86", repr(packed))] struct D3DCONTENTPROTECTIONCAPS {Caps : u8 ,}}"#, | 949 | assert_expansion(&rules, r#"STRUCT!{#[cfg_attr(target_arch = "x86", repr(packed))] struct D3DCONTENTPROTECTIONCAPS {Caps : u8 ,}}"#, |
887 | "# [repr (C)] # [derive (Copy)] # [cfg_attr (target_arch = \"x86\" , repr (packed))] pub struct D3DCONTENTPROTECTIONCAPS {pub Caps : u8 ,} impl Clone for D3DCONTENTPROTECTIONCAPS {# [inline] fn clone (& self) -> D3DCONTENTPROTECTIONCAPS {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DCONTENTPROTECTIONCAPS {# [inline] fn default () -> D3DCONTENTPROTECTIONCAPS {unsafe {$crate :: _core :: mem :: zeroed ()}}}"); | 950 | "# [repr (C)] # [derive (Copy)] # [cfg_attr (target_arch = \"x86\" , repr (packed))] pub struct D3DCONTENTPROTECTIONCAPS {pub Caps : u8 ,} impl Clone for D3DCONTENTPROTECTIONCAPS {# [inline] fn clone (& self) -> D3DCONTENTPROTECTIONCAPS {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DCONTENTPROTECTIONCAPS {# [inline] fn default () -> D3DCONTENTPROTECTIONCAPS {unsafe {$crate :: _core :: mem :: zeroed ()}}}"); |
888 | } | 951 | } |
952 | |||
953 | #[test] | ||
954 | fn test_int_base() { | ||
955 | let rules = create_rules( | ||
956 | r#" | ||
957 | macro_rules! int_base { | ||
958 | ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => { | ||
959 | #[stable(feature = "rust1", since = "1.0.0")] | ||
960 | impl fmt::$Trait for $T { | ||
961 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
962 | $Radix.fmt_int(*self as $U, f) | ||
963 | } | ||
964 | } | ||
965 | } | ||
966 | } | ||
967 | "#, | ||
968 | ); | ||
969 | |||
970 | assert_expansion(&rules, r#" int_base!{Binary for isize as usize -> Binary}"#, | ||
971 | "# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt :: Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}" | ||
972 | ); | ||
973 | } | ||
889 | } | 974 | } |
diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs index 66ea76698..00fb09a3b 100644 --- a/crates/ra_mbe/src/mbe_expander.rs +++ b/crates/ra_mbe/src/mbe_expander.rs | |||
@@ -221,11 +221,13 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, | |||
221 | } | 221 | } |
222 | _ => return Err(ExpandError::UnexpectedToken), | 222 | _ => return Err(ExpandError::UnexpectedToken), |
223 | }, | 223 | }, |
224 | crate::TokenTree::Repeat(crate::Repeat { subtree, kind: _, separator }) => { | 224 | crate::TokenTree::Repeat(crate::Repeat { subtree, kind, separator }) => { |
225 | // Dirty hack to make macro-expansion terminate. | 225 | // Dirty hack to make macro-expansion terminate. |
226 | // This should be replaced by a propper macro-by-example implementation | 226 | // This should be replaced by a propper macro-by-example implementation |
227 | let mut limit = 128; | 227 | let mut limit = 128; |
228 | let mut counter = 0; | ||
228 | while let Ok(nested) = match_lhs(subtree, input) { | 229 | while let Ok(nested) = match_lhs(subtree, input) { |
230 | counter += 1; | ||
229 | limit -= 1; | 231 | limit -= 1; |
230 | if limit == 0 { | 232 | if limit == 0 { |
231 | break; | 233 | break; |
@@ -239,6 +241,17 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, | |||
239 | } | 241 | } |
240 | } | 242 | } |
241 | } | 243 | } |
244 | |||
245 | match kind { | ||
246 | crate::RepeatKind::OneOrMore if counter == 0 => { | ||
247 | return Err(ExpandError::UnexpectedToken); | ||
248 | } | ||
249 | crate::RepeatKind::ZeroOrOne if counter > 1 => { | ||
250 | return Err(ExpandError::UnexpectedToken); | ||
251 | } | ||
252 | |||
253 | _ => {} | ||
254 | } | ||
242 | } | 255 | } |
243 | crate::TokenTree::Subtree(subtree) => { | 256 | crate::TokenTree::Subtree(subtree) => { |
244 | let input_subtree = | 257 | let input_subtree = |
@@ -274,6 +287,20 @@ fn expand_subtree( | |||
274 | Ok(tt::Subtree { token_trees, delimiter: template.delimiter }) | 287 | Ok(tt::Subtree { token_trees, delimiter: template.delimiter }) |
275 | } | 288 | } |
276 | 289 | ||
290 | /// Reduce single token subtree to single token | ||
291 | /// In `tt` matcher case, all tt tokens will be braced by a Delimiter::None | ||
292 | /// which makes all sort of problems. | ||
293 | fn reduce_single_token(mut subtree: tt::Subtree) -> tt::TokenTree { | ||
294 | if subtree.delimiter != tt::Delimiter::None || subtree.token_trees.len() != 1 { | ||
295 | return subtree.into(); | ||
296 | } | ||
297 | |||
298 | match subtree.token_trees.pop().unwrap() { | ||
299 | tt::TokenTree::Subtree(subtree) => reduce_single_token(subtree), | ||
300 | tt::TokenTree::Leaf(token) => token.into(), | ||
301 | } | ||
302 | } | ||
303 | |||
277 | fn expand_tt( | 304 | fn expand_tt( |
278 | template: &crate::TokenTree, | 305 | template: &crate::TokenTree, |
279 | bindings: &Bindings, | 306 | bindings: &Bindings, |
@@ -282,11 +309,13 @@ fn expand_tt( | |||
282 | let res: tt::TokenTree = match template { | 309 | let res: tt::TokenTree = match template { |
283 | crate::TokenTree::Subtree(subtree) => expand_subtree(subtree, bindings, nesting)?.into(), | 310 | crate::TokenTree::Subtree(subtree) => expand_subtree(subtree, bindings, nesting)?.into(), |
284 | crate::TokenTree::Repeat(repeat) => { | 311 | crate::TokenTree::Repeat(repeat) => { |
285 | let mut token_trees = Vec::new(); | 312 | let mut token_trees: Vec<tt::TokenTree> = Vec::new(); |
286 | nesting.push(0); | 313 | nesting.push(0); |
287 | // Dirty hack to make macro-expansion terminate. | 314 | // Dirty hack to make macro-expansion terminate. |
288 | // This should be replaced by a propper macro-by-example implementation | 315 | // This should be replaced by a propper macro-by-example implementation |
289 | let mut limit = 128; | 316 | let mut limit = 128; |
317 | let mut has_sep = false; | ||
318 | |||
290 | while let Ok(t) = expand_subtree(&repeat.subtree, bindings, nesting) { | 319 | while let Ok(t) = expand_subtree(&repeat.subtree, bindings, nesting) { |
291 | limit -= 1; | 320 | limit -= 1; |
292 | if limit == 0 { | 321 | if limit == 0 { |
@@ -294,10 +323,26 @@ fn expand_tt( | |||
294 | } | 323 | } |
295 | let idx = nesting.pop().unwrap(); | 324 | let idx = nesting.pop().unwrap(); |
296 | nesting.push(idx + 1); | 325 | nesting.push(idx + 1); |
297 | token_trees.push(t.into()) | 326 | token_trees.push(reduce_single_token(t).into()); |
327 | |||
328 | if let Some(sep) = repeat.separator { | ||
329 | let punct = | ||
330 | tt::Leaf::from(tt::Punct { char: sep, spacing: tt::Spacing::Alone }); | ||
331 | token_trees.push(punct.into()); | ||
332 | has_sep = true; | ||
333 | } | ||
298 | } | 334 | } |
299 | nesting.pop().unwrap(); | 335 | nesting.pop().unwrap(); |
300 | tt::Subtree { token_trees, delimiter: tt::Delimiter::None }.into() | 336 | |
337 | // Dirty hack for remove the last sep | ||
338 | // if it is a "," undo the push | ||
339 | if has_sep && repeat.separator.unwrap() == ',' { | ||
340 | token_trees.pop(); | ||
341 | } | ||
342 | |||
343 | // Check if it is a singel token subtree without any delimiter | ||
344 | // e.g {Delimiter:None> ['>'] /Delimiter:None>} | ||
345 | reduce_single_token(tt::Subtree { token_trees, delimiter: tt::Delimiter::None }) | ||
301 | } | 346 | } |
302 | crate::TokenTree::Leaf(leaf) => match leaf { | 347 | crate::TokenTree::Leaf(leaf) => match leaf { |
303 | crate::Leaf::Ident(ident) => { | 348 | crate::Leaf::Ident(ident) => { |
@@ -311,7 +356,13 @@ fn expand_tt( | |||
311 | tt::Leaf::from(tt::Ident { text: "$crate".into(), id: TokenId::unspecified() }) | 356 | tt::Leaf::from(tt::Ident { text: "$crate".into(), id: TokenId::unspecified() }) |
312 | .into() | 357 | .into() |
313 | } else { | 358 | } else { |
314 | bindings.get(&v.text, nesting)?.clone() | 359 | let tkn = bindings.get(&v.text, nesting)?.clone(); |
360 | |||
361 | if let tt::TokenTree::Subtree(subtree) = tkn { | ||
362 | reduce_single_token(subtree) | ||
363 | } else { | ||
364 | tkn | ||
365 | } | ||
315 | } | 366 | } |
316 | } | 367 | } |
317 | crate::Leaf::Literal(l) => tt::Leaf::from(tt::Literal { text: l.text.clone() }).into(), | 368 | crate::Leaf::Literal(l) => tt::Leaf::from(tt::Literal { text: l.text.clone() }).into(), |
diff --git a/crates/ra_mbe/src/mbe_parser.rs b/crates/ra_mbe/src/mbe_parser.rs index f37c422d3..0710062d9 100644 --- a/crates/ra_mbe/src/mbe_parser.rs +++ b/crates/ra_mbe/src/mbe_parser.rs | |||
@@ -20,15 +20,15 @@ pub(crate) fn parse(tt: &tt::Subtree) -> Result<crate::MacroRules, ParseError> { | |||
20 | } | 20 | } |
21 | 21 | ||
22 | fn parse_rule(p: &mut TtCursor) -> Result<crate::Rule, ParseError> { | 22 | fn parse_rule(p: &mut TtCursor) -> Result<crate::Rule, ParseError> { |
23 | let lhs = parse_subtree(p.eat_subtree()?)?; | 23 | let lhs = parse_subtree(p.eat_subtree()?, false)?; |
24 | p.expect_char('=')?; | 24 | p.expect_char('=')?; |
25 | p.expect_char('>')?; | 25 | p.expect_char('>')?; |
26 | let mut rhs = parse_subtree(p.eat_subtree()?)?; | 26 | let mut rhs = parse_subtree(p.eat_subtree()?, true)?; |
27 | rhs.delimiter = crate::Delimiter::None; | 27 | rhs.delimiter = crate::Delimiter::None; |
28 | Ok(crate::Rule { lhs, rhs }) | 28 | Ok(crate::Rule { lhs, rhs }) |
29 | } | 29 | } |
30 | 30 | ||
31 | fn parse_subtree(tt: &tt::Subtree) -> Result<crate::Subtree, ParseError> { | 31 | fn parse_subtree(tt: &tt::Subtree, transcriber: bool) -> Result<crate::Subtree, ParseError> { |
32 | let mut token_trees = Vec::new(); | 32 | let mut token_trees = Vec::new(); |
33 | let mut p = TtCursor::new(tt); | 33 | let mut p = TtCursor::new(tt); |
34 | while let Some(tt) = p.eat() { | 34 | while let Some(tt) = p.eat() { |
@@ -36,9 +36,9 @@ fn parse_subtree(tt: &tt::Subtree) -> Result<crate::Subtree, ParseError> { | |||
36 | tt::TokenTree::Leaf(leaf) => match leaf { | 36 | tt::TokenTree::Leaf(leaf) => match leaf { |
37 | tt::Leaf::Punct(tt::Punct { char: '$', .. }) => { | 37 | tt::Leaf::Punct(tt::Punct { char: '$', .. }) => { |
38 | if p.at_ident().is_some() { | 38 | if p.at_ident().is_some() { |
39 | crate::Leaf::from(parse_var(&mut p)?).into() | 39 | crate::Leaf::from(parse_var(&mut p, transcriber)?).into() |
40 | } else { | 40 | } else { |
41 | parse_repeat(&mut p)?.into() | 41 | parse_repeat(&mut p, transcriber)?.into() |
42 | } | 42 | } |
43 | } | 43 | } |
44 | tt::Leaf::Punct(punct) => crate::Leaf::from(*punct).into(), | 44 | tt::Leaf::Punct(punct) => crate::Leaf::from(*punct).into(), |
@@ -49,17 +49,17 @@ fn parse_subtree(tt: &tt::Subtree) -> Result<crate::Subtree, ParseError> { | |||
49 | crate::Leaf::from(crate::Literal { text: text.clone() }).into() | 49 | crate::Leaf::from(crate::Literal { text: text.clone() }).into() |
50 | } | 50 | } |
51 | }, | 51 | }, |
52 | tt::TokenTree::Subtree(subtree) => parse_subtree(&subtree)?.into(), | 52 | tt::TokenTree::Subtree(subtree) => parse_subtree(&subtree, transcriber)?.into(), |
53 | }; | 53 | }; |
54 | token_trees.push(child); | 54 | token_trees.push(child); |
55 | } | 55 | } |
56 | Ok(crate::Subtree { token_trees, delimiter: tt.delimiter }) | 56 | Ok(crate::Subtree { token_trees, delimiter: tt.delimiter }) |
57 | } | 57 | } |
58 | 58 | ||
59 | fn parse_var(p: &mut TtCursor) -> Result<crate::Var, ParseError> { | 59 | fn parse_var(p: &mut TtCursor, transcriber: bool) -> Result<crate::Var, ParseError> { |
60 | let ident = p.eat_ident().unwrap(); | 60 | let ident = p.eat_ident().unwrap(); |
61 | let text = ident.text.clone(); | 61 | let text = ident.text.clone(); |
62 | let kind = if p.at_char(':') { | 62 | let kind = if !transcriber && p.at_char(':') { |
63 | p.bump(); | 63 | p.bump(); |
64 | if let Some(ident) = p.eat_ident() { | 64 | if let Some(ident) = p.eat_ident() { |
65 | Some(ident.text.clone()) | 65 | Some(ident.text.clone()) |
@@ -70,12 +70,13 @@ fn parse_var(p: &mut TtCursor) -> Result<crate::Var, ParseError> { | |||
70 | } else { | 70 | } else { |
71 | None | 71 | None |
72 | }; | 72 | }; |
73 | |||
73 | Ok(crate::Var { text, kind }) | 74 | Ok(crate::Var { text, kind }) |
74 | } | 75 | } |
75 | 76 | ||
76 | fn parse_repeat(p: &mut TtCursor) -> Result<crate::Repeat, ParseError> { | 77 | fn parse_repeat(p: &mut TtCursor, transcriber: bool) -> Result<crate::Repeat, ParseError> { |
77 | let subtree = p.eat_subtree().unwrap(); | 78 | let subtree = p.eat_subtree().unwrap(); |
78 | let mut subtree = parse_subtree(subtree)?; | 79 | let mut subtree = parse_subtree(subtree, transcriber)?; |
79 | subtree.delimiter = crate::Delimiter::None; | 80 | subtree.delimiter = crate::Delimiter::None; |
80 | let sep = p.eat_punct().ok_or(ParseError::Expected(String::from("separator")))?; | 81 | let sep = p.eat_punct().ok_or(ParseError::Expected(String::from("separator")))?; |
81 | let (separator, rep) = match sep.char { | 82 | let (separator, rep) = match sep.char { |
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index 16a053b49..20da1e9f7 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs | |||
@@ -21,6 +21,7 @@ impl<'a> From<&'a [tt::TokenTree]> for TokenSeq<'a> { | |||
21 | } | 21 | } |
22 | } | 22 | } |
23 | 23 | ||
24 | #[derive(Debug)] | ||
24 | enum DelimToken<'a> { | 25 | enum DelimToken<'a> { |
25 | Delim(&'a tt::Delimiter, bool), | 26 | Delim(&'a tt::Delimiter, bool), |
26 | Token(&'a tt::TokenTree), | 27 | Token(&'a tt::TokenTree), |
@@ -52,10 +53,10 @@ impl<'a> TokenSeq<'a> { | |||
52 | } | 53 | } |
53 | } | 54 | } |
54 | 55 | ||
55 | fn child_slice(&self) -> &[tt::TokenTree] { | 56 | fn child_slice(&self, pos: usize) -> &[tt::TokenTree] { |
56 | match self { | 57 | match self { |
57 | TokenSeq::Subtree(subtree) => &subtree.token_trees, | 58 | TokenSeq::Subtree(subtree) => &subtree.token_trees[pos - 1..], |
58 | TokenSeq::Seq(tokens) => &tokens, | 59 | TokenSeq::Seq(tokens) => &tokens[pos..], |
59 | } | 60 | } |
60 | } | 61 | } |
61 | } | 62 | } |
@@ -114,7 +115,7 @@ impl<'a> SubTreeWalker<'a> { | |||
114 | WalkCursor::Token(0, convert_delim(subtree.delimiter, false)) | 115 | WalkCursor::Token(0, convert_delim(subtree.delimiter, false)) |
115 | } | 116 | } |
116 | tt::TokenTree::Leaf(leaf) => { | 117 | tt::TokenTree::Leaf(leaf) => { |
117 | let next_tokens = self.ts.child_slice(); | 118 | let next_tokens = self.ts.child_slice(0); |
118 | WalkCursor::Token(0, convert_leaf(&next_tokens, leaf)) | 119 | WalkCursor::Token(0, convert_leaf(&next_tokens, leaf)) |
119 | } | 120 | } |
120 | }, | 121 | }, |
@@ -190,8 +191,8 @@ impl<'a> SubTreeWalker<'a> { | |||
190 | WalkCursor::Token(new_idx, convert_delim(subtree.delimiter, backward)) | 191 | WalkCursor::Token(new_idx, convert_delim(subtree.delimiter, backward)) |
191 | } | 192 | } |
192 | tt::TokenTree::Leaf(leaf) => { | 193 | tt::TokenTree::Leaf(leaf) => { |
193 | let next_tokens = top.child_slice(); | 194 | let next_tokens = top.child_slice(pos); |
194 | WalkCursor::Token(pos, convert_leaf(&next_tokens[pos..], leaf)) | 195 | WalkCursor::Token(pos, convert_leaf(&next_tokens, leaf)) |
195 | } | 196 | } |
196 | }, | 197 | }, |
197 | DelimToken::Delim(delim, is_end) => { | 198 | DelimToken::Delim(delim, is_end) => { |
@@ -429,7 +430,12 @@ fn convert_literal(l: &tt::Literal) -> TtToken { | |||
429 | } | 430 | } |
430 | 431 | ||
431 | fn convert_ident(ident: &tt::Ident) -> TtToken { | 432 | fn convert_ident(ident: &tt::Ident) -> TtToken { |
432 | let kind = SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT); | 433 | let kind = if let Some('\'') = ident.text.chars().next() { |
434 | LIFETIME | ||
435 | } else { | ||
436 | SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT) | ||
437 | }; | ||
438 | |||
433 | TtToken { kind, is_joint_to_next: false, text: ident.text.clone(), n_tokens: 1 } | 439 | TtToken { kind, is_joint_to_next: false, text: ident.text.clone(), n_tokens: 1 } |
434 | } | 440 | } |
435 | 441 | ||