aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--crates/ra_hir/src/ids.rs42
-rw-r--r--crates/ra_hir/src/nameres.rs16
-rw-r--r--crates/ra_hir/src/nameres/collector.rs165
-rw-r--r--crates/ra_mbe/src/lib.rs89
-rw-r--r--crates/ra_mbe/src/mbe_expander.rs61
-rw-r--r--crates/ra_mbe/src/mbe_parser.rs21
-rw-r--r--crates/ra_mbe/src/subtree_parser.rs13
-rw-r--r--crates/ra_mbe/src/subtree_source.rs20
-rw-r--r--crates/ra_tt/src/lib.rs16
9 files changed, 395 insertions, 48 deletions
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs
index 2a1ed9b81..c7849c995 100644
--- a/crates/ra_hir/src/ids.rs
+++ b/crates/ra_hir/src/ids.rs
@@ -63,11 +63,15 @@ impl HirFileId {
63 match file_id.0 { 63 match file_id.0 {
64 HirFileIdRepr::File(file_id) => db.parse(file_id), 64 HirFileIdRepr::File(file_id) => db.parse(file_id),
65 HirFileIdRepr::Macro(macro_call_id) => { 65 HirFileIdRepr::Macro(macro_call_id) => {
66 parse_macro(db, macro_call_id).unwrap_or_else(|| { 66 parse_macro(db, macro_call_id).unwrap_or_else(|err| {
67 // Note: 67 // Note:
68 // The final goal we would like to make all parse_macro success, 68 // The final goal we would like to make all parse_macro success,
69 // such that the following log will not call anyway. 69 // such that the following log will not call anyway.
70 log::warn!("fail on macro_parse: {}", macro_call_id.debug_dump(db)); 70 log::warn!(
71 "fail on macro_parse: (reason: {}) {}",
72 err,
73 macro_call_id.debug_dump(db)
74 );
71 75
72 // returning an empty string looks fishy... 76 // returning an empty string looks fishy...
73 SourceFile::parse("") 77 SourceFile::parse("")
@@ -77,14 +81,27 @@ impl HirFileId {
77 } 81 }
78} 82}
79 83
80fn parse_macro(db: &impl DefDatabase, macro_call_id: MacroCallId) -> Option<TreeArc<SourceFile>> { 84fn parse_macro(
85 db: &impl DefDatabase,
86 macro_call_id: MacroCallId,
87) -> Result<TreeArc<SourceFile>, String> {
81 let loc = macro_call_id.loc(db); 88 let loc = macro_call_id.loc(db);
82 let macro_call = loc.ast_id.to_node(db); 89 let macro_call = loc.ast_id.to_node(db);
83 let (macro_arg, _) = macro_call.token_tree().and_then(mbe::ast_to_token_tree)?; 90 let (macro_arg, _) = macro_call
91 .token_tree()
92 .and_then(mbe::ast_to_token_tree)
93 .ok_or("Fail to args in to tt::TokenTree")?;
94
95 let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?;
96 let tt = macro_rules.expand(&macro_arg).map_err(|err| format!("{:?}", err))?;
97
98 // Set a hard limit for the expanded tt
99 let count = tt.count();
100 if count > 65536 {
101 return Err(format!("Total tokens count exceed limit : count = {}", count));
102 }
84 103
85 let macro_rules = db.macro_def(loc.def)?; 104 Ok(mbe::token_tree_to_ast_item_list(&tt))
86 let tt = macro_rules.expand(&macro_arg).ok()?;
87 Some(mbe::token_tree_to_ast_item_list(&tt))
88} 105}
89 106
90#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 107#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -311,11 +328,18 @@ impl MacroCallId {
311 pub fn debug_dump(&self, db: &impl DefDatabase) -> String { 328 pub fn debug_dump(&self, db: &impl DefDatabase) -> String {
312 let loc = self.clone().loc(db); 329 let loc = self.clone().loc(db);
313 let node = loc.ast_id.to_node(db); 330 let node = loc.ast_id.to_node(db);
314 let syntax_str = node.syntax().to_string(); 331 let syntax_str = node.syntax().text().chunks().collect::<Vec<_>>().join(" ");
315 332
316 // dump the file name 333 // dump the file name
317 let file_id: HirFileId = self.clone().into(); 334 let file_id: HirFileId = self.clone().into();
318 let original = file_id.original_file(db); 335 let original = file_id.original_file(db);
319 format!("macro call [file: {:#?}] : {}", db.file_relative_path(original), syntax_str) 336 let macro_rules = db.macro_def(loc.def);
337
338 format!(
339 "macro call [file: {:#?}] : {}\nhas rules: {}",
340 db.file_relative_path(original),
341 syntax_str,
342 macro_rules.is_some()
343 )
320 } 344 }
321} 345}
diff --git a/crates/ra_hir/src/nameres.rs b/crates/ra_hir/src/nameres.rs
index 39152360c..fbfff4fd7 100644
--- a/crates/ra_hir/src/nameres.rs
+++ b/crates/ra_hir/src/nameres.rs
@@ -55,7 +55,7 @@ mod tests;
55 55
56use std::sync::Arc; 56use std::sync::Arc;
57 57
58use rustc_hash::FxHashMap; 58use rustc_hash::{FxHashMap, FxHashSet};
59use ra_arena::{Arena, RawId, impl_arena_id}; 59use ra_arena::{Arena, RawId, impl_arena_id};
60use ra_db::{FileId, Edition}; 60use ra_db::{FileId, Edition};
61use test_utils::tested_by; 61use test_utils::tested_by;
@@ -91,6 +91,19 @@ pub struct CrateDefMap {
91 root: CrateModuleId, 91 root: CrateModuleId,
92 modules: Arena<CrateModuleId, ModuleData>, 92 modules: Arena<CrateModuleId, ModuleData>,
93 public_macros: FxHashMap<Name, MacroDefId>, 93 public_macros: FxHashMap<Name, MacroDefId>,
94
95 /// Some macros are not well-behavior, which leads to infinite loop
96 /// e.g. macro_rules! foo { ($ty:ty) => { foo!($ty); } }
97 /// We mark it down and skip it in collector
98 ///
99 /// FIXME:
100 /// Right now it only handle a poison macro in a single crate,
101 /// such that if other crate try to call that macro,
102 /// the whole process will do again until it became poisoned in that crate.
103 /// We should handle this macro set globally
104 /// However, do we want to put it as a global variable?
105 poison_macros: FxHashSet<MacroDefId>,
106
94 diagnostics: Vec<DefDiagnostic>, 107 diagnostics: Vec<DefDiagnostic>,
95} 108}
96 109
@@ -195,6 +208,7 @@ impl CrateDefMap {
195 root, 208 root,
196 modules, 209 modules,
197 public_macros: FxHashMap::default(), 210 public_macros: FxHashMap::default(),
211 poison_macros: FxHashSet::default(),
198 diagnostics: Vec::new(), 212 diagnostics: Vec::new(),
199 } 213 }
200 }; 214 };
diff --git a/crates/ra_hir/src/nameres/collector.rs b/crates/ra_hir/src/nameres/collector.rs
index 6147b3219..4590a5184 100644
--- a/crates/ra_hir/src/nameres/collector.rs
+++ b/crates/ra_hir/src/nameres/collector.rs
@@ -42,12 +42,40 @@ pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> C
42 unresolved_imports: Vec::new(), 42 unresolved_imports: Vec::new(),
43 unexpanded_macros: Vec::new(), 43 unexpanded_macros: Vec::new(),
44 global_macro_scope: FxHashMap::default(), 44 global_macro_scope: FxHashMap::default(),
45 marco_stack_count: 0, 45 macro_stack_monitor: MacroStackMonitor::default(),
46 }; 46 };
47 collector.collect(); 47 collector.collect();
48 collector.finish() 48 collector.finish()
49} 49}
50 50
51#[derive(Default)]
52struct MacroStackMonitor {
53 counts: FxHashMap<MacroDefId, u32>,
54
55 /// Mainly use for test
56 validator: Option<Box<dyn Fn(u32) -> bool>>,
57}
58
59impl MacroStackMonitor {
60 fn increase(&mut self, macro_def_id: MacroDefId) {
61 *self.counts.entry(macro_def_id).or_default() += 1;
62 }
63
64 fn decrease(&mut self, macro_def_id: MacroDefId) {
65 *self.counts.entry(macro_def_id).or_default() -= 1;
66 }
67
68 fn is_poison(&self, macro_def_id: MacroDefId) -> bool {
69 let cur = *self.counts.get(&macro_def_id).unwrap_or(&0);
70
71 if let Some(validator) = &self.validator {
72 validator(cur)
73 } else {
74 cur > 100
75 }
76 }
77}
78
51/// Walks the tree of module recursively 79/// Walks the tree of module recursively
52struct DefCollector<DB> { 80struct DefCollector<DB> {
53 db: DB, 81 db: DB,
@@ -59,7 +87,7 @@ struct DefCollector<DB> {
59 87
60 /// Some macro use `$tt:tt which mean we have to handle the macro perfectly 88 /// Some macro use `$tt:tt which mean we have to handle the macro perfectly
61 /// To prevent stackoverflow, we add a deep counter here for prevent that. 89 /// To prevent stackoverflow, we add a deep counter here for prevent that.
62 marco_stack_count: u32, 90 macro_stack_monitor: MacroStackMonitor,
63} 91}
64 92
65impl<'a, DB> DefCollector<&'a DB> 93impl<'a, DB> DefCollector<&'a DB>
@@ -317,30 +345,40 @@ where
317 let def_map = self.db.crate_def_map(krate); 345 let def_map = self.db.crate_def_map(krate);
318 if let Some(macro_id) = def_map.public_macros.get(&path.segments[1].name).cloned() { 346 if let Some(macro_id) = def_map.public_macros.get(&path.segments[1].name).cloned() {
319 let call_id = MacroCallLoc { def: macro_id, ast_id: *ast_id }.id(self.db); 347 let call_id = MacroCallLoc { def: macro_id, ast_id: *ast_id }.id(self.db);
320 resolved.push((*module_id, call_id)); 348 resolved.push((*module_id, call_id, macro_id));
321 } 349 }
322 false 350 false
323 }); 351 });
324 352
325 for (module_id, macro_call_id) in resolved { 353 for (module_id, macro_call_id, macro_def_id) in resolved {
326 self.collect_macro_expansion(module_id, macro_call_id); 354 self.collect_macro_expansion(module_id, macro_call_id, macro_def_id);
327 } 355 }
328 res 356 res
329 } 357 }
330 358
331 fn collect_macro_expansion(&mut self, module_id: CrateModuleId, macro_call_id: MacroCallId) { 359 fn collect_macro_expansion(
332 self.marco_stack_count += 1; 360 &mut self,
361 module_id: CrateModuleId,
362 macro_call_id: MacroCallId,
363 macro_def_id: MacroDefId,
364 ) {
365 if self.def_map.poison_macros.contains(&macro_def_id) {
366 return;
367 }
368
369 self.macro_stack_monitor.increase(macro_def_id);
333 370
334 if self.marco_stack_count < 300 { 371 if !self.macro_stack_monitor.is_poison(macro_def_id) {
335 let file_id: HirFileId = macro_call_id.into(); 372 let file_id: HirFileId = macro_call_id.into();
336 let raw_items = self.db.raw_items(file_id); 373 let raw_items = self.db.raw_items(file_id);
337 ModCollector { def_collector: &mut *self, file_id, module_id, raw_items: &raw_items } 374 ModCollector { def_collector: &mut *self, file_id, module_id, raw_items: &raw_items }
338 .collect(raw_items.items()) 375 .collect(raw_items.items());
339 } else { 376 } else {
340 log::error!("Too deep macro expansion: {}", macro_call_id.debug_dump(self.db)); 377 log::error!("Too deep macro expansion: {}", macro_call_id.debug_dump(self.db));
378 self.def_map.poison_macros.insert(macro_def_id);
341 } 379 }
342 380
343 self.marco_stack_count -= 1; 381 self.macro_stack_monitor.decrease(macro_def_id);
344 } 382 }
345 383
346 fn finish(self) -> CrateDefMap { 384 fn finish(self) -> CrateDefMap {
@@ -484,7 +522,7 @@ where
484 { 522 {
485 let macro_call_id = MacroCallLoc { def: macro_id, ast_id }.id(self.def_collector.db); 523 let macro_call_id = MacroCallLoc { def: macro_id, ast_id }.id(self.def_collector.db);
486 524
487 self.def_collector.collect_macro_expansion(self.module_id, macro_call_id); 525 self.def_collector.collect_macro_expansion(self.module_id, macro_call_id, macro_id);
488 return; 526 return;
489 } 527 }
490 528
@@ -530,3 +568,108 @@ fn resolve_submodule(
530 None => Err(if is_dir_owner { file_mod } else { file_dir_mod }), 568 None => Err(if is_dir_owner { file_mod } else { file_dir_mod }),
531 } 569 }
532} 570}
571
572#[cfg(test)]
573mod tests {
574 use ra_db::SourceDatabase;
575
576 use crate::{Crate, mock::MockDatabase, DefDatabase};
577 use ra_arena::{Arena};
578 use super::*;
579 use rustc_hash::FxHashSet;
580
581 fn do_collect_defs(
582 db: &impl DefDatabase,
583 def_map: CrateDefMap,
584 monitor: MacroStackMonitor,
585 ) -> CrateDefMap {
586 let mut collector = DefCollector {
587 db,
588 def_map,
589 glob_imports: FxHashMap::default(),
590 unresolved_imports: Vec::new(),
591 unexpanded_macros: Vec::new(),
592 global_macro_scope: FxHashMap::default(),
593 macro_stack_monitor: monitor,
594 };
595 collector.collect();
596 collector.finish()
597 }
598
599 fn do_limited_resolve(code: &str, limit: u32, poison_limit: u32) -> CrateDefMap {
600 let (db, _source_root, _) = MockDatabase::with_single_file(&code);
601 let crate_id = db.crate_graph().iter().next().unwrap();
602 let krate = Crate { crate_id };
603
604 let def_map = {
605 let edition = krate.edition(&db);
606 let mut modules: Arena<CrateModuleId, ModuleData> = Arena::default();
607 let root = modules.alloc(ModuleData::default());
608 CrateDefMap {
609 krate,
610 edition,
611 extern_prelude: FxHashMap::default(),
612 prelude: None,
613 root,
614 modules,
615 public_macros: FxHashMap::default(),
616 poison_macros: FxHashSet::default(),
617 diagnostics: Vec::new(),
618 }
619 };
620
621 let mut monitor = MacroStackMonitor::default();
622 monitor.validator = Some(Box::new(move |count| {
623 assert!(count < limit);
624 count >= poison_limit
625 }));
626
627 do_collect_defs(&db, def_map, monitor)
628 }
629
630 #[test]
631 fn test_macro_expand_limit_width() {
632 do_limited_resolve(
633 r#"
634 macro_rules! foo {
635 ($($ty:ty)*) => { foo!($($ty)*, $($ty)*); }
636 }
637foo!(KABOOM);
638 "#,
639 16,
640 1000,
641 );
642 }
643
644 #[test]
645 fn test_macro_expand_poisoned() {
646 let def = do_limited_resolve(
647 r#"
648 macro_rules! foo {
649 ($ty:ty) => { foo!($ty); }
650 }
651foo!(KABOOM);
652 "#,
653 100,
654 16,
655 );
656
657 assert_eq!(def.poison_macros.len(), 1);
658 }
659
660 #[test]
661 fn test_macro_expand_normal() {
662 let def = do_limited_resolve(
663 r#"
664 macro_rules! foo {
665 ($ident:ident) => { struct $ident {} }
666 }
667foo!(Bar);
668 "#,
669 16,
670 16,
671 );
672
673 assert_eq!(def.poison_macros.len(), 0);
674 }
675}
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs
index 9aad08db9..e78bc734b 100644
--- a/crates/ra_mbe/src/lib.rs
+++ b/crates/ra_mbe/src/lib.rs
@@ -220,9 +220,10 @@ impl_froms!(TokenTree: Leaf, Subtree);
220 let expansion = syntax_node_to_token_tree(expansion.syntax()).unwrap().0; 220 let expansion = syntax_node_to_token_tree(expansion.syntax()).unwrap().0;
221 let file = token_tree_to_macro_items(&expansion); 221 let file = token_tree_to_macro_items(&expansion);
222 let file = file.unwrap().syntax().debug_dump().trim().to_string(); 222 let file = file.unwrap().syntax().debug_dump().trim().to_string();
223 let file = file.replace("C_C__C", "$crate"); 223 let tree = tree.unwrap().syntax().debug_dump().trim().to_string();
224 224
225 assert_eq!(tree.unwrap().syntax().debug_dump().trim(), file,); 225 let file = file.replace("C_C__C", "$crate");
226 assert_eq!(tree, file,);
226 } 227 }
227 228
228 #[test] 229 #[test]
@@ -349,6 +350,21 @@ impl_froms!(TokenTree: Leaf, Subtree);
349 } 350 }
350 351
351 #[test] 352 #[test]
353 fn test_match_group_pattern_with_multiple_statement_without_semi() {
354 let rules = create_rules(
355 r#"
356 macro_rules! foo {
357 ($ ($ i:ident),*) => ( fn baz { $ (
358 $i()
359 );*} );
360 }
361"#,
362 );
363
364 assert_expansion(&rules, "foo! { foo, bar }", "fn baz {foo () ; bar () ;}");
365 }
366
367 #[test]
352 fn test_match_group_empty_fixed_token() { 368 fn test_match_group_empty_fixed_token() {
353 let rules = create_rules( 369 let rules = create_rules(
354 r#" 370 r#"
@@ -692,6 +708,33 @@ MACRO_ITEMS@[0; 40)
692 } 708 }
693 709
694 #[test] 710 #[test]
711 fn test_ty_with_complex_type() {
712 let rules = create_rules(
713 r#"
714 macro_rules! foo {
715 ($ i:ty) => (
716 fn bar() -> $ i { unimplemented!() }
717 )
718 }
719"#,
720 );
721
722 // Reference lifetime struct with generic type
723 assert_expansion(
724 &rules,
725 "foo! { &'a Baz<u8> }",
726 "fn bar () -> & 'a Baz < u8 > {unimplemented ! ()}",
727 );
728
729 // extern "Rust" func type
730 assert_expansion(
731 &rules,
732 r#"foo! { extern "Rust" fn() -> Ret }"#,
733 r#"fn bar () -> extern "Rust" fn () -> Ret {unimplemented ! ()}"#,
734 );
735 }
736
737 #[test]
695 fn test_pat_() { 738 fn test_pat_() {
696 let rules = create_rules( 739 let rules = create_rules(
697 r#" 740 r#"
@@ -854,6 +897,26 @@ MACRO_ITEMS@[0; 40)
854 897
855 // The following tests are based on real world situations 898 // The following tests are based on real world situations
856 #[test] 899 #[test]
900 fn test_vec() {
901 let rules = create_rules(
902 r#"
903 macro_rules! vec {
904 ($($item:expr),*) => {
905 {
906 let mut v = Vec::new();
907 $(
908 v.push($item);
909 )*
910 v
911 }
912 };
913}
914"#,
915 );
916 assert_expansion(&rules, r#"vec!();"#, r#"{let mut v = Vec :: new () ; v}"#);
917 }
918
919 #[test]
857 fn test_winapi_struct() { 920 fn test_winapi_struct() {
858 // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/macros.rs#L366 921 // from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/macros.rs#L366
859 922
@@ -886,4 +949,26 @@ macro_rules! STRUCT {
886 assert_expansion(&rules, r#"STRUCT!{#[cfg_attr(target_arch = "x86", repr(packed))] struct D3DCONTENTPROTECTIONCAPS {Caps : u8 ,}}"#, 949 assert_expansion(&rules, r#"STRUCT!{#[cfg_attr(target_arch = "x86", repr(packed))] struct D3DCONTENTPROTECTIONCAPS {Caps : u8 ,}}"#,
887 "# [repr (C)] # [derive (Copy)] # [cfg_attr (target_arch = \"x86\" , repr (packed))] pub struct D3DCONTENTPROTECTIONCAPS {pub Caps : u8 ,} impl Clone for D3DCONTENTPROTECTIONCAPS {# [inline] fn clone (& self) -> D3DCONTENTPROTECTIONCAPS {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DCONTENTPROTECTIONCAPS {# [inline] fn default () -> D3DCONTENTPROTECTIONCAPS {unsafe {$crate :: _core :: mem :: zeroed ()}}}"); 950 "# [repr (C)] # [derive (Copy)] # [cfg_attr (target_arch = \"x86\" , repr (packed))] pub struct D3DCONTENTPROTECTIONCAPS {pub Caps : u8 ,} impl Clone for D3DCONTENTPROTECTIONCAPS {# [inline] fn clone (& self) -> D3DCONTENTPROTECTIONCAPS {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DCONTENTPROTECTIONCAPS {# [inline] fn default () -> D3DCONTENTPROTECTIONCAPS {unsafe {$crate :: _core :: mem :: zeroed ()}}}");
888 } 951 }
952
953 #[test]
954 fn test_int_base() {
955 let rules = create_rules(
956 r#"
957macro_rules! int_base {
958 ($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => {
959 #[stable(feature = "rust1", since = "1.0.0")]
960 impl fmt::$Trait for $T {
961 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
962 $Radix.fmt_int(*self as $U, f)
963 }
964 }
965 }
966}
967"#,
968 );
969
970 assert_expansion(&rules, r#" int_base!{Binary for isize as usize -> Binary}"#,
971 "# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt :: Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}"
972 );
973 }
889} 974}
diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs
index 66ea76698..00fb09a3b 100644
--- a/crates/ra_mbe/src/mbe_expander.rs
+++ b/crates/ra_mbe/src/mbe_expander.rs
@@ -221,11 +221,13 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
221 } 221 }
222 _ => return Err(ExpandError::UnexpectedToken), 222 _ => return Err(ExpandError::UnexpectedToken),
223 }, 223 },
224 crate::TokenTree::Repeat(crate::Repeat { subtree, kind: _, separator }) => { 224 crate::TokenTree::Repeat(crate::Repeat { subtree, kind, separator }) => {
225 // Dirty hack to make macro-expansion terminate. 225 // Dirty hack to make macro-expansion terminate.
226 // This should be replaced by a propper macro-by-example implementation 226 // This should be replaced by a propper macro-by-example implementation
227 let mut limit = 128; 227 let mut limit = 128;
228 let mut counter = 0;
228 while let Ok(nested) = match_lhs(subtree, input) { 229 while let Ok(nested) = match_lhs(subtree, input) {
230 counter += 1;
229 limit -= 1; 231 limit -= 1;
230 if limit == 0 { 232 if limit == 0 {
231 break; 233 break;
@@ -239,6 +241,17 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
239 } 241 }
240 } 242 }
241 } 243 }
244
245 match kind {
246 crate::RepeatKind::OneOrMore if counter == 0 => {
247 return Err(ExpandError::UnexpectedToken);
248 }
249 crate::RepeatKind::ZeroOrOne if counter > 1 => {
250 return Err(ExpandError::UnexpectedToken);
251 }
252
253 _ => {}
254 }
242 } 255 }
243 crate::TokenTree::Subtree(subtree) => { 256 crate::TokenTree::Subtree(subtree) => {
244 let input_subtree = 257 let input_subtree =
@@ -274,6 +287,20 @@ fn expand_subtree(
274 Ok(tt::Subtree { token_trees, delimiter: template.delimiter }) 287 Ok(tt::Subtree { token_trees, delimiter: template.delimiter })
275} 288}
276 289
290/// Reduce single token subtree to single token
291/// In `tt` matcher case, all tt tokens will be braced by a Delimiter::None
292/// which makes all sort of problems.
293fn reduce_single_token(mut subtree: tt::Subtree) -> tt::TokenTree {
294 if subtree.delimiter != tt::Delimiter::None || subtree.token_trees.len() != 1 {
295 return subtree.into();
296 }
297
298 match subtree.token_trees.pop().unwrap() {
299 tt::TokenTree::Subtree(subtree) => reduce_single_token(subtree),
300 tt::TokenTree::Leaf(token) => token.into(),
301 }
302}
303
277fn expand_tt( 304fn expand_tt(
278 template: &crate::TokenTree, 305 template: &crate::TokenTree,
279 bindings: &Bindings, 306 bindings: &Bindings,
@@ -282,11 +309,13 @@ fn expand_tt(
282 let res: tt::TokenTree = match template { 309 let res: tt::TokenTree = match template {
283 crate::TokenTree::Subtree(subtree) => expand_subtree(subtree, bindings, nesting)?.into(), 310 crate::TokenTree::Subtree(subtree) => expand_subtree(subtree, bindings, nesting)?.into(),
284 crate::TokenTree::Repeat(repeat) => { 311 crate::TokenTree::Repeat(repeat) => {
285 let mut token_trees = Vec::new(); 312 let mut token_trees: Vec<tt::TokenTree> = Vec::new();
286 nesting.push(0); 313 nesting.push(0);
287 // Dirty hack to make macro-expansion terminate. 314 // Dirty hack to make macro-expansion terminate.
288 // This should be replaced by a propper macro-by-example implementation 315 // This should be replaced by a propper macro-by-example implementation
289 let mut limit = 128; 316 let mut limit = 128;
317 let mut has_sep = false;
318
290 while let Ok(t) = expand_subtree(&repeat.subtree, bindings, nesting) { 319 while let Ok(t) = expand_subtree(&repeat.subtree, bindings, nesting) {
291 limit -= 1; 320 limit -= 1;
292 if limit == 0 { 321 if limit == 0 {
@@ -294,10 +323,26 @@ fn expand_tt(
294 } 323 }
295 let idx = nesting.pop().unwrap(); 324 let idx = nesting.pop().unwrap();
296 nesting.push(idx + 1); 325 nesting.push(idx + 1);
297 token_trees.push(t.into()) 326 token_trees.push(reduce_single_token(t).into());
327
328 if let Some(sep) = repeat.separator {
329 let punct =
330 tt::Leaf::from(tt::Punct { char: sep, spacing: tt::Spacing::Alone });
331 token_trees.push(punct.into());
332 has_sep = true;
333 }
298 } 334 }
299 nesting.pop().unwrap(); 335 nesting.pop().unwrap();
300 tt::Subtree { token_trees, delimiter: tt::Delimiter::None }.into() 336
337 // Dirty hack for remove the last sep
338 // if it is a "," undo the push
339 if has_sep && repeat.separator.unwrap() == ',' {
340 token_trees.pop();
341 }
342
343 // Check if it is a singel token subtree without any delimiter
344 // e.g {Delimiter:None> ['>'] /Delimiter:None>}
345 reduce_single_token(tt::Subtree { token_trees, delimiter: tt::Delimiter::None })
301 } 346 }
302 crate::TokenTree::Leaf(leaf) => match leaf { 347 crate::TokenTree::Leaf(leaf) => match leaf {
303 crate::Leaf::Ident(ident) => { 348 crate::Leaf::Ident(ident) => {
@@ -311,7 +356,13 @@ fn expand_tt(
311 tt::Leaf::from(tt::Ident { text: "$crate".into(), id: TokenId::unspecified() }) 356 tt::Leaf::from(tt::Ident { text: "$crate".into(), id: TokenId::unspecified() })
312 .into() 357 .into()
313 } else { 358 } else {
314 bindings.get(&v.text, nesting)?.clone() 359 let tkn = bindings.get(&v.text, nesting)?.clone();
360
361 if let tt::TokenTree::Subtree(subtree) = tkn {
362 reduce_single_token(subtree)
363 } else {
364 tkn
365 }
315 } 366 }
316 } 367 }
317 crate::Leaf::Literal(l) => tt::Leaf::from(tt::Literal { text: l.text.clone() }).into(), 368 crate::Leaf::Literal(l) => tt::Leaf::from(tt::Literal { text: l.text.clone() }).into(),
diff --git a/crates/ra_mbe/src/mbe_parser.rs b/crates/ra_mbe/src/mbe_parser.rs
index f37c422d3..0710062d9 100644
--- a/crates/ra_mbe/src/mbe_parser.rs
+++ b/crates/ra_mbe/src/mbe_parser.rs
@@ -20,15 +20,15 @@ pub(crate) fn parse(tt: &tt::Subtree) -> Result<crate::MacroRules, ParseError> {
20} 20}
21 21
22fn parse_rule(p: &mut TtCursor) -> Result<crate::Rule, ParseError> { 22fn parse_rule(p: &mut TtCursor) -> Result<crate::Rule, ParseError> {
23 let lhs = parse_subtree(p.eat_subtree()?)?; 23 let lhs = parse_subtree(p.eat_subtree()?, false)?;
24 p.expect_char('=')?; 24 p.expect_char('=')?;
25 p.expect_char('>')?; 25 p.expect_char('>')?;
26 let mut rhs = parse_subtree(p.eat_subtree()?)?; 26 let mut rhs = parse_subtree(p.eat_subtree()?, true)?;
27 rhs.delimiter = crate::Delimiter::None; 27 rhs.delimiter = crate::Delimiter::None;
28 Ok(crate::Rule { lhs, rhs }) 28 Ok(crate::Rule { lhs, rhs })
29} 29}
30 30
31fn parse_subtree(tt: &tt::Subtree) -> Result<crate::Subtree, ParseError> { 31fn parse_subtree(tt: &tt::Subtree, transcriber: bool) -> Result<crate::Subtree, ParseError> {
32 let mut token_trees = Vec::new(); 32 let mut token_trees = Vec::new();
33 let mut p = TtCursor::new(tt); 33 let mut p = TtCursor::new(tt);
34 while let Some(tt) = p.eat() { 34 while let Some(tt) = p.eat() {
@@ -36,9 +36,9 @@ fn parse_subtree(tt: &tt::Subtree) -> Result<crate::Subtree, ParseError> {
36 tt::TokenTree::Leaf(leaf) => match leaf { 36 tt::TokenTree::Leaf(leaf) => match leaf {
37 tt::Leaf::Punct(tt::Punct { char: '$', .. }) => { 37 tt::Leaf::Punct(tt::Punct { char: '$', .. }) => {
38 if p.at_ident().is_some() { 38 if p.at_ident().is_some() {
39 crate::Leaf::from(parse_var(&mut p)?).into() 39 crate::Leaf::from(parse_var(&mut p, transcriber)?).into()
40 } else { 40 } else {
41 parse_repeat(&mut p)?.into() 41 parse_repeat(&mut p, transcriber)?.into()
42 } 42 }
43 } 43 }
44 tt::Leaf::Punct(punct) => crate::Leaf::from(*punct).into(), 44 tt::Leaf::Punct(punct) => crate::Leaf::from(*punct).into(),
@@ -49,17 +49,17 @@ fn parse_subtree(tt: &tt::Subtree) -> Result<crate::Subtree, ParseError> {
49 crate::Leaf::from(crate::Literal { text: text.clone() }).into() 49 crate::Leaf::from(crate::Literal { text: text.clone() }).into()
50 } 50 }
51 }, 51 },
52 tt::TokenTree::Subtree(subtree) => parse_subtree(&subtree)?.into(), 52 tt::TokenTree::Subtree(subtree) => parse_subtree(&subtree, transcriber)?.into(),
53 }; 53 };
54 token_trees.push(child); 54 token_trees.push(child);
55 } 55 }
56 Ok(crate::Subtree { token_trees, delimiter: tt.delimiter }) 56 Ok(crate::Subtree { token_trees, delimiter: tt.delimiter })
57} 57}
58 58
59fn parse_var(p: &mut TtCursor) -> Result<crate::Var, ParseError> { 59fn parse_var(p: &mut TtCursor, transcriber: bool) -> Result<crate::Var, ParseError> {
60 let ident = p.eat_ident().unwrap(); 60 let ident = p.eat_ident().unwrap();
61 let text = ident.text.clone(); 61 let text = ident.text.clone();
62 let kind = if p.at_char(':') { 62 let kind = if !transcriber && p.at_char(':') {
63 p.bump(); 63 p.bump();
64 if let Some(ident) = p.eat_ident() { 64 if let Some(ident) = p.eat_ident() {
65 Some(ident.text.clone()) 65 Some(ident.text.clone())
@@ -70,12 +70,13 @@ fn parse_var(p: &mut TtCursor) -> Result<crate::Var, ParseError> {
70 } else { 70 } else {
71 None 71 None
72 }; 72 };
73
73 Ok(crate::Var { text, kind }) 74 Ok(crate::Var { text, kind })
74} 75}
75 76
76fn parse_repeat(p: &mut TtCursor) -> Result<crate::Repeat, ParseError> { 77fn parse_repeat(p: &mut TtCursor, transcriber: bool) -> Result<crate::Repeat, ParseError> {
77 let subtree = p.eat_subtree().unwrap(); 78 let subtree = p.eat_subtree().unwrap();
78 let mut subtree = parse_subtree(subtree)?; 79 let mut subtree = parse_subtree(subtree, transcriber)?;
79 subtree.delimiter = crate::Delimiter::None; 80 subtree.delimiter = crate::Delimiter::None;
80 let sep = p.eat_punct().ok_or(ParseError::Expected(String::from("separator")))?; 81 let sep = p.eat_punct().ok_or(ParseError::Expected(String::from("separator")))?;
81 let (separator, rep) = match sep.char { 82 let (separator, rep) = match sep.char {
diff --git a/crates/ra_mbe/src/subtree_parser.rs b/crates/ra_mbe/src/subtree_parser.rs
index 528aa0f8a..f07107414 100644
--- a/crates/ra_mbe/src/subtree_parser.rs
+++ b/crates/ra_mbe/src/subtree_parser.rs
@@ -5,6 +5,7 @@ use ra_syntax::{SyntaxKind};
5 5
6struct OffsetTokenSink { 6struct OffsetTokenSink {
7 token_pos: usize, 7 token_pos: usize,
8 error: bool,
8} 9}
9 10
10impl TreeSink for OffsetTokenSink { 11impl TreeSink for OffsetTokenSink {
@@ -13,7 +14,9 @@ impl TreeSink for OffsetTokenSink {
13 } 14 }
14 fn start_node(&mut self, _kind: SyntaxKind) {} 15 fn start_node(&mut self, _kind: SyntaxKind) {}
15 fn finish_node(&mut self) {} 16 fn finish_node(&mut self) {}
16 fn error(&mut self, _error: ra_parser::ParseError) {} 17 fn error(&mut self, _error: ra_parser::ParseError) {
18 self.error = true;
19 }
17} 20}
18 21
19pub(crate) struct Parser<'a> { 22pub(crate) struct Parser<'a> {
@@ -67,11 +70,15 @@ impl<'a> Parser<'a> {
67 F: FnOnce(&dyn TokenSource, &mut dyn TreeSink), 70 F: FnOnce(&dyn TokenSource, &mut dyn TreeSink),
68 { 71 {
69 let mut src = SubtreeTokenSource::new(&self.subtree.token_trees[*self.cur_pos..]); 72 let mut src = SubtreeTokenSource::new(&self.subtree.token_trees[*self.cur_pos..]);
70 let mut sink = OffsetTokenSink { token_pos: 0 }; 73 let mut sink = OffsetTokenSink { token_pos: 0, error: false };
71 74
72 f(&src, &mut sink); 75 f(&src, &mut sink);
73 76
74 self.finish(sink.token_pos, &mut src) 77 let r = self.finish(sink.token_pos, &mut src);
78 if sink.error {
79 return None;
80 }
81 r
75 } 82 }
76 83
77 fn finish(self, parsed_token: usize, src: &mut SubtreeTokenSource) -> Option<tt::TokenTree> { 84 fn finish(self, parsed_token: usize, src: &mut SubtreeTokenSource) -> Option<tt::TokenTree> {
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs
index 16a053b49..20da1e9f7 100644
--- a/crates/ra_mbe/src/subtree_source.rs
+++ b/crates/ra_mbe/src/subtree_source.rs
@@ -21,6 +21,7 @@ impl<'a> From<&'a [tt::TokenTree]> for TokenSeq<'a> {
21 } 21 }
22} 22}
23 23
24#[derive(Debug)]
24enum DelimToken<'a> { 25enum DelimToken<'a> {
25 Delim(&'a tt::Delimiter, bool), 26 Delim(&'a tt::Delimiter, bool),
26 Token(&'a tt::TokenTree), 27 Token(&'a tt::TokenTree),
@@ -52,10 +53,10 @@ impl<'a> TokenSeq<'a> {
52 } 53 }
53 } 54 }
54 55
55 fn child_slice(&self) -> &[tt::TokenTree] { 56 fn child_slice(&self, pos: usize) -> &[tt::TokenTree] {
56 match self { 57 match self {
57 TokenSeq::Subtree(subtree) => &subtree.token_trees, 58 TokenSeq::Subtree(subtree) => &subtree.token_trees[pos - 1..],
58 TokenSeq::Seq(tokens) => &tokens, 59 TokenSeq::Seq(tokens) => &tokens[pos..],
59 } 60 }
60 } 61 }
61} 62}
@@ -114,7 +115,7 @@ impl<'a> SubTreeWalker<'a> {
114 WalkCursor::Token(0, convert_delim(subtree.delimiter, false)) 115 WalkCursor::Token(0, convert_delim(subtree.delimiter, false))
115 } 116 }
116 tt::TokenTree::Leaf(leaf) => { 117 tt::TokenTree::Leaf(leaf) => {
117 let next_tokens = self.ts.child_slice(); 118 let next_tokens = self.ts.child_slice(0);
118 WalkCursor::Token(0, convert_leaf(&next_tokens, leaf)) 119 WalkCursor::Token(0, convert_leaf(&next_tokens, leaf))
119 } 120 }
120 }, 121 },
@@ -190,8 +191,8 @@ impl<'a> SubTreeWalker<'a> {
190 WalkCursor::Token(new_idx, convert_delim(subtree.delimiter, backward)) 191 WalkCursor::Token(new_idx, convert_delim(subtree.delimiter, backward))
191 } 192 }
192 tt::TokenTree::Leaf(leaf) => { 193 tt::TokenTree::Leaf(leaf) => {
193 let next_tokens = top.child_slice(); 194 let next_tokens = top.child_slice(pos);
194 WalkCursor::Token(pos, convert_leaf(&next_tokens[pos..], leaf)) 195 WalkCursor::Token(pos, convert_leaf(&next_tokens, leaf))
195 } 196 }
196 }, 197 },
197 DelimToken::Delim(delim, is_end) => { 198 DelimToken::Delim(delim, is_end) => {
@@ -429,7 +430,12 @@ fn convert_literal(l: &tt::Literal) -> TtToken {
429} 430}
430 431
431fn convert_ident(ident: &tt::Ident) -> TtToken { 432fn convert_ident(ident: &tt::Ident) -> TtToken {
432 let kind = SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT); 433 let kind = if let Some('\'') = ident.text.chars().next() {
434 LIFETIME
435 } else {
436 SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT)
437 };
438
433 TtToken { kind, is_joint_to_next: false, text: ident.text.clone(), n_tokens: 1 } 439 TtToken { kind, is_joint_to_next: false, text: ident.text.clone(), n_tokens: 1 }
434} 440}
435 441
diff --git a/crates/ra_tt/src/lib.rs b/crates/ra_tt/src/lib.rs
index 0b0b9b4d2..62c5ac52a 100644
--- a/crates/ra_tt/src/lib.rs
+++ b/crates/ra_tt/src/lib.rs
@@ -149,3 +149,19 @@ impl fmt::Display for Punct {
149 fmt::Display::fmt(&self.char, f) 149 fmt::Display::fmt(&self.char, f)
150 } 150 }
151} 151}
152
153impl Subtree {
154 /// Count the number of tokens recursively
155 pub fn count(&self) -> usize {
156 let children_count = self
157 .token_trees
158 .iter()
159 .map(|c| match c {
160 TokenTree::Subtree(c) => c.count(),
161 _ => 0,
162 })
163 .sum::<usize>();
164
165 self.token_trees.len() + children_count
166 }
167}