diff options
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_hir/src/ids.rs | 10 | ||||
-rw-r--r-- | crates/ra_mbe/Cargo.toml | 2 | ||||
-rw-r--r-- | crates/ra_mbe/src/lib.rs | 216 | ||||
-rw-r--r-- | crates/ra_mbe/src/mbe_expander.rs | 167 | ||||
-rw-r--r-- | crates/ra_mbe/src/mbe_parser.rs | 40 | ||||
-rw-r--r-- | crates/ra_mbe/src/subtree_source.rs | 29 | ||||
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 41 | ||||
-rw-r--r-- | crates/ra_mbe/src/tt_cursor.rs | 106 | ||||
-rw-r--r-- | crates/ra_parser/src/grammar.rs | 17 |
9 files changed, 541 insertions, 87 deletions
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs index c7849c995..b0e9b1f9a 100644 --- a/crates/ra_hir/src/ids.rs +++ b/crates/ra_hir/src/ids.rs | |||
@@ -128,8 +128,14 @@ pub struct MacroDefId(pub(crate) AstId<ast::MacroCall>); | |||
128 | pub(crate) fn macro_def_query(db: &impl DefDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> { | 128 | pub(crate) fn macro_def_query(db: &impl DefDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> { |
129 | let macro_call = id.0.to_node(db); | 129 | let macro_call = id.0.to_node(db); |
130 | let arg = macro_call.token_tree()?; | 130 | let arg = macro_call.token_tree()?; |
131 | let (tt, _) = mbe::ast_to_token_tree(arg)?; | 131 | let (tt, _) = mbe::ast_to_token_tree(arg).or_else(|| { |
132 | let rules = MacroRules::parse(&tt).ok()?; | 132 | log::warn!("fail on macro_def to token tree: {:#?}", arg); |
133 | None | ||
134 | })?; | ||
135 | let rules = MacroRules::parse(&tt).ok().or_else(|| { | ||
136 | log::warn!("fail on macro_def parse: {:#?}", tt); | ||
137 | None | ||
138 | })?; | ||
133 | Some(Arc::new(rules)) | 139 | Some(Arc::new(rules)) |
134 | } | 140 | } |
135 | 141 | ||
diff --git a/crates/ra_mbe/Cargo.toml b/crates/ra_mbe/Cargo.toml index 1d0c2a340..68f559295 100644 --- a/crates/ra_mbe/Cargo.toml +++ b/crates/ra_mbe/Cargo.toml | |||
@@ -10,3 +10,5 @@ ra_parser = { path = "../ra_parser" } | |||
10 | tt = { path = "../ra_tt", package = "ra_tt" } | 10 | tt = { path = "../ra_tt", package = "ra_tt" } |
11 | itertools = "0.8.0" | 11 | itertools = "0.8.0" |
12 | rustc-hash = "1.0.0" | 12 | rustc-hash = "1.0.0" |
13 | smallvec = "0.6.9" | ||
14 | log = "0.4.5" | ||
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index eedc0c5dd..7817232d6 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs | |||
@@ -24,6 +24,7 @@ mod subtree_source; | |||
24 | mod subtree_parser; | 24 | mod subtree_parser; |
25 | 25 | ||
26 | use ra_syntax::SmolStr; | 26 | use ra_syntax::SmolStr; |
27 | use smallvec::SmallVec; | ||
27 | 28 | ||
28 | pub use tt::{Delimiter, Punct}; | 29 | pub use tt::{Delimiter, Punct}; |
29 | 30 | ||
@@ -99,10 +100,17 @@ pub(crate) struct Subtree { | |||
99 | } | 100 | } |
100 | 101 | ||
101 | #[derive(Clone, Debug, PartialEq, Eq)] | 102 | #[derive(Clone, Debug, PartialEq, Eq)] |
103 | pub(crate) enum Separator { | ||
104 | Literal(tt::Literal), | ||
105 | Ident(tt::Ident), | ||
106 | Puncts(SmallVec<[tt::Punct; 3]>), | ||
107 | } | ||
108 | |||
109 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
102 | pub(crate) struct Repeat { | 110 | pub(crate) struct Repeat { |
103 | pub(crate) subtree: Subtree, | 111 | pub(crate) subtree: Subtree, |
104 | pub(crate) kind: RepeatKind, | 112 | pub(crate) kind: RepeatKind, |
105 | pub(crate) separator: Option<char>, | 113 | pub(crate) separator: Option<Separator>, |
106 | } | 114 | } |
107 | 115 | ||
108 | #[derive(Clone, Debug, PartialEq, Eq)] | 116 | #[derive(Clone, Debug, PartialEq, Eq)] |
@@ -175,8 +183,8 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
175 | let expansion = rules.expand(&invocation_tt).unwrap(); | 183 | let expansion = rules.expand(&invocation_tt).unwrap(); |
176 | assert_eq!( | 184 | assert_eq!( |
177 | expansion.to_string(), | 185 | expansion.to_string(), |
178 | "impl From < Leaf > for TokenTree {fn from (it : Leaf) -> TokenTree {TokenTree :: Leaf (it)}} \ | 186 | "impl From <Leaf > for TokenTree {fn from (it : Leaf) -> TokenTree {TokenTree ::Leaf (it)}} \ |
179 | impl From < Subtree > for TokenTree {fn from (it : Subtree) -> TokenTree {TokenTree :: Subtree (it)}}" | 187 | impl From <Subtree > for TokenTree {fn from (it : Subtree) -> TokenTree {TokenTree ::Subtree (it)}}" |
180 | ) | 188 | ) |
181 | } | 189 | } |
182 | 190 | ||
@@ -384,7 +392,7 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
384 | "#, | 392 | "#, |
385 | ); | 393 | ); |
386 | 394 | ||
387 | assert_expansion(&rules, "foo! { foo, bar }", "fn baz {foo () ; bar () ;}"); | 395 | assert_expansion(&rules, "foo! { foo, bar }", "fn baz {foo () ;bar ()}"); |
388 | } | 396 | } |
389 | 397 | ||
390 | #[test] | 398 | #[test] |
@@ -417,6 +425,42 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
417 | } | 425 | } |
418 | 426 | ||
419 | #[test] | 427 | #[test] |
428 | fn test_match_group_with_multichar_sep() { | ||
429 | let rules = create_rules( | ||
430 | r#" | ||
431 | macro_rules! foo { | ||
432 | (fn $name:ident {$($i:literal)*} ) => ( fn $name() -> bool { $($i)&&*} ); | ||
433 | }"#, | ||
434 | ); | ||
435 | |||
436 | assert_expansion(&rules, "foo! (fn baz {true true} )", "fn baz () -> bool {true &&true}"); | ||
437 | } | ||
438 | |||
439 | #[test] | ||
440 | fn test_match_group_zero_match() { | ||
441 | let rules = create_rules( | ||
442 | r#" | ||
443 | macro_rules! foo { | ||
444 | ( $($i:ident)* ) => (); | ||
445 | }"#, | ||
446 | ); | ||
447 | |||
448 | assert_expansion(&rules, "foo! ()", ""); | ||
449 | } | ||
450 | |||
451 | #[test] | ||
452 | fn test_match_group_in_group() { | ||
453 | let rules = create_rules( | ||
454 | r#" | ||
455 | macro_rules! foo { | ||
456 | { $( ( $($i:ident)* ) )* } => ( $( ( $($i)* ) )* ); | ||
457 | }"#, | ||
458 | ); | ||
459 | |||
460 | assert_expansion(&rules, "foo! ( (a b) )", "(a b)"); | ||
461 | } | ||
462 | |||
463 | #[test] | ||
420 | fn test_expand_to_item_list() { | 464 | fn test_expand_to_item_list() { |
421 | let rules = create_rules( | 465 | let rules = create_rules( |
422 | " | 466 | " |
@@ -597,7 +641,7 @@ MACRO_ITEMS@[0; 40) | |||
597 | assert_expansion( | 641 | assert_expansion( |
598 | &rules, | 642 | &rules, |
599 | "foo! { bar::<u8>::baz::<u8> }", | 643 | "foo! { bar::<u8>::baz::<u8> }", |
600 | "fn foo () {let a = bar :: < u8 > :: baz :: < u8 > ;}", | 644 | "fn foo () {let a = bar ::< u8 >:: baz ::< u8 > ;}", |
601 | ); | 645 | ); |
602 | } | 646 | } |
603 | 647 | ||
@@ -891,7 +935,7 @@ MACRO_ITEMS@[0; 40) | |||
891 | } | 935 | } |
892 | "#, | 936 | "#, |
893 | ); | 937 | ); |
894 | assert_expansion(&rules, r#"foo!{'a}"#, r#"struct Ref < 'a > {s : & 'a str}"#); | 938 | assert_expansion(&rules, r#"foo!{'a}"#, r#"struct Ref <'a > {s : &'a str}"#); |
895 | } | 939 | } |
896 | 940 | ||
897 | #[test] | 941 | #[test] |
@@ -1063,7 +1107,165 @@ macro_rules! int_base { | |||
1063 | ); | 1107 | ); |
1064 | 1108 | ||
1065 | assert_expansion(&rules, r#" int_base!{Binary for isize as usize -> Binary}"#, | 1109 | assert_expansion(&rules, r#" int_base!{Binary for isize as usize -> Binary}"#, |
1066 | "# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt :: Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}" | 1110 | "# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt ::Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}" |
1111 | ); | ||
1112 | } | ||
1113 | |||
1114 | #[test] | ||
1115 | fn test_generate_pattern_iterators() { | ||
1116 | // from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/str/mod.rs | ||
1117 | let rules = create_rules( | ||
1118 | r#" | ||
1119 | macro_rules! generate_pattern_iterators { | ||
1120 | { double ended; with $(#[$common_stability_attribute:meta])*, | ||
1121 | $forward_iterator:ident, | ||
1122 | $reverse_iterator:ident, $iterty:ty | ||
1123 | } => { | ||
1124 | fn foo(){} | ||
1125 | } | ||
1126 | } | ||
1127 | "#, | ||
1128 | ); | ||
1129 | |||
1130 | assert_expansion(&rules, r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str )"#, | ||
1131 | "fn foo () {}"); | ||
1132 | } | ||
1133 | |||
1134 | #[test] | ||
1135 | fn test_impl_fn_for_zst() { | ||
1136 | // from https://github.com/rust-lang/rust/blob/5d20ff4d2718c820632b38c1e49d4de648a9810b/src/libcore/internal_macros.rs | ||
1137 | let rules = create_rules( | ||
1138 | r#" | ||
1139 | macro_rules! impl_fn_for_zst { | ||
1140 | { $( $( #[$attr: meta] )* | ||
1141 | struct $Name: ident impl$( <$( $lifetime : lifetime ),+> )? Fn = | ||
1142 | |$( $arg: ident: $ArgTy: ty ),*| -> $ReturnTy: ty | ||
1143 | $body: block; )+ | ||
1144 | } => { | ||
1145 | $( | ||
1146 | $( #[$attr] )* | ||
1147 | struct $Name; | ||
1148 | |||
1149 | impl $( <$( $lifetime ),+> )? Fn<($( $ArgTy, )*)> for $Name { | ||
1150 | #[inline] | ||
1151 | extern "rust-call" fn call(&self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy { | ||
1152 | $body | ||
1153 | } | ||
1154 | } | ||
1155 | |||
1156 | impl $( <$( $lifetime ),+> )? FnMut<($( $ArgTy, )*)> for $Name { | ||
1157 | #[inline] | ||
1158 | extern "rust-call" fn call_mut( | ||
1159 | &mut self, | ||
1160 | ($( $arg, )*): ($( $ArgTy, )*) | ||
1161 | ) -> $ReturnTy { | ||
1162 | Fn::call(&*self, ($( $arg, )*)) | ||
1163 | } | ||
1164 | } | ||
1165 | |||
1166 | impl $( <$( $lifetime ),+> )? FnOnce<($( $ArgTy, )*)> for $Name { | ||
1167 | type Output = $ReturnTy; | ||
1168 | |||
1169 | #[inline] | ||
1170 | extern "rust-call" fn call_once(self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy { | ||
1171 | Fn::call(&self, ($( $arg, )*)) | ||
1172 | } | ||
1173 | } | ||
1174 | )+ | ||
1175 | } | ||
1176 | } | ||
1177 | } | ||
1178 | "#, | ||
1179 | ); | ||
1180 | |||
1181 | assert_expansion(&rules, r#" | ||
1182 | impl_fn_for_zst ! { | ||
1183 | # [ derive ( Clone ) ] | ||
1184 | struct CharEscapeDebugContinue impl Fn = | c : char | -> char :: EscapeDebug { | ||
1185 | c . escape_debug_ext ( false ) | ||
1186 | } ; | ||
1187 | |||
1188 | # [ derive ( Clone ) ] | ||
1189 | struct CharEscapeUnicode impl Fn = | c : char | -> char :: EscapeUnicode { | ||
1190 | c . escape_unicode ( ) | ||
1191 | } ; | ||
1192 | # [ derive ( Clone ) ] | ||
1193 | struct CharEscapeDefault impl Fn = | c : char | -> char :: EscapeDefault { | ||
1194 | c . escape_default ( ) | ||
1195 | } ; | ||
1196 | } | ||
1197 | "#, | ||
1198 | "# [derive (Clone)] struct CharEscapeDebugContinue ; impl Fn < (char ,) > for CharEscapeDebugContinue {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeDebug {{c . escape_debug_ext (false)}}} impl FnMut < (char ,) > for CharEscapeDebugContinue {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeDebug {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeDebugContinue {type Output = char :: EscapeDebug ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeDebug {Fn :: call (& self , (c ,))}} # [derive (Clone)] struct CharEscapeUnicode ; impl Fn < (char ,) > for CharEscapeUnicode {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeUnicode {{c . escape_unicode ()}}} impl FnMut < (char ,) > for CharEscapeUnicode {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeUnicode {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeUnicode {type Output = char :: EscapeUnicode ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeUnicode {Fn :: call (& self , (c ,))}} # [derive (Clone)] struct CharEscapeDefault ; impl Fn < (char ,) > for CharEscapeDefault {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeDefault {{c . escape_default ()}}} impl FnMut < (char ,) > for CharEscapeDefault {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeDefault {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeDefault {type Output = char :: EscapeDefault ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeDefault {Fn :: call (& self , (c ,))}}"); | ||
1199 | } | ||
1200 | |||
1201 | #[test] | ||
1202 | fn test_impl_nonzero_fmt() { | ||
1203 | // from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/num/mod.rs#L12 | ||
1204 | let rules = create_rules( | ||
1205 | r#" | ||
1206 | macro_rules! impl_nonzero_fmt { | ||
1207 | ( #[$stability: meta] ( $( $Trait: ident ),+ ) for $Ty: ident ) => { | ||
1208 | fn foo() {} | ||
1209 | } | ||
1210 | } | ||
1211 | "#, | ||
1067 | ); | 1212 | ); |
1213 | |||
1214 | assert_expansion(&rules, r#"impl_nonzero_fmt ! { # [ stable ( feature = "nonzero" , since = "1.28.0" ) ] ( Debug , Display , Binary , Octal , LowerHex , UpperHex ) for NonZeroU8 }"#, | ||
1215 | "fn foo () {}"); | ||
1216 | } | ||
1217 | |||
1218 | #[test] | ||
1219 | fn test_cfg_if_items() { | ||
1220 | // from https://github.com/rust-lang/rust/blob/33fe1131cadba69d317156847be9a402b89f11bb/src/libstd/macros.rs#L986 | ||
1221 | let rules = create_rules( | ||
1222 | r#" | ||
1223 | macro_rules! __cfg_if_items { | ||
1224 | (($($not:meta,)*) ; ) => {}; | ||
1225 | (($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => { | ||
1226 | __cfg_if_items! { ($($not,)* $($m,)*) ; $($rest)* } | ||
1227 | } | ||
1228 | } | ||
1229 | "#, | ||
1230 | ); | ||
1231 | |||
1232 | assert_expansion(&rules, r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#, | ||
1233 | "__cfg_if_items ! {(rustdoc , ) ; }"); | ||
1234 | } | ||
1235 | |||
1236 | #[test] | ||
1237 | fn test_cfg_if_main() { | ||
1238 | // from https://github.com/rust-lang/rust/blob/3d211248393686e0f73851fc7548f6605220fbe1/src/libpanic_unwind/macros.rs#L9 | ||
1239 | let rules = create_rules( | ||
1240 | r#" | ||
1241 | macro_rules! cfg_if { | ||
1242 | ($( | ||
1243 | if #[cfg($($meta:meta),*)] { $($it:item)* } | ||
1244 | ) else * else { | ||
1245 | $($it2:item)* | ||
1246 | }) => { | ||
1247 | __cfg_if_items! { | ||
1248 | () ; | ||
1249 | $( ( ($($meta),*) ($($it)*) ), )* | ||
1250 | ( () ($($it2)*) ), | ||
1251 | } | ||
1252 | } | ||
1253 | } | ||
1254 | "#, | ||
1255 | ); | ||
1256 | |||
1257 | assert_expansion(&rules, r#" | ||
1258 | cfg_if ! { | ||
1259 | if # [ cfg ( target_env = "msvc" ) ] { | ||
1260 | // no extra unwinder support needed | ||
1261 | } else if # [ cfg ( all ( target_arch = "wasm32" , not ( target_os = "emscripten" ) ) ) ] { | ||
1262 | // no unwinder on the system! | ||
1263 | } else { | ||
1264 | mod libunwind ; | ||
1265 | pub use libunwind :: * ; | ||
1266 | } | ||
1267 | } | ||
1268 | "#, | ||
1269 | "__cfg_if_items ! {() ; (() (mod libunwind ; pub use libunwind :: * ;)) ,}"); | ||
1068 | } | 1270 | } |
1069 | } | 1271 | } |
diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs index 00fb09a3b..d5189b537 100644 --- a/crates/ra_mbe/src/mbe_expander.rs +++ b/crates/ra_mbe/src/mbe_expander.rs | |||
@@ -21,7 +21,10 @@ fn expand_rule(rule: &crate::Rule, input: &tt::Subtree) -> Result<tt::Subtree, E | |||
21 | if !input.is_eof() { | 21 | if !input.is_eof() { |
22 | return Err(ExpandError::UnexpectedToken); | 22 | return Err(ExpandError::UnexpectedToken); |
23 | } | 23 | } |
24 | expand_subtree(&rule.rhs, &bindings, &mut Vec::new()) | 24 | |
25 | let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new(), var_expanded: false }; | ||
26 | |||
27 | expand_subtree(&rule.rhs, &mut ctx) | ||
25 | } | 28 | } |
26 | 29 | ||
27 | /// The actual algorithm for expansion is not too hard, but is pretty tricky. | 30 | /// The actual algorithm for expansion is not too hard, but is pretty tricky. |
@@ -179,10 +182,10 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, | |||
179 | // Enable followiing code when everything is fixed | 182 | // Enable followiing code when everything is fixed |
180 | // At least we can dogfood itself to not stackoverflow | 183 | // At least we can dogfood itself to not stackoverflow |
181 | // | 184 | // |
182 | // "tt" => { | 185 | "tt" => { |
183 | // let token = input.eat().ok_or(ExpandError::UnexpectedToken)?.clone(); | 186 | let token = input.eat().ok_or(ExpandError::UnexpectedToken)?.clone(); |
184 | // res.inner.insert(text.clone(), Binding::Simple(token.into())); | 187 | res.inner.insert(text.clone(), Binding::Simple(token.into())); |
185 | // } | 188 | } |
186 | "item" => { | 189 | "item" => { |
187 | let item = | 190 | let item = |
188 | input.eat_item().ok_or(ExpandError::UnexpectedToken)?.clone(); | 191 | input.eat_item().ok_or(ExpandError::UnexpectedToken)?.clone(); |
@@ -196,6 +199,7 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, | |||
196 | "literal" => { | 199 | "literal" => { |
197 | let literal = | 200 | let literal = |
198 | input.eat_literal().ok_or(ExpandError::UnexpectedToken)?.clone(); | 201 | input.eat_literal().ok_or(ExpandError::UnexpectedToken)?.clone(); |
202 | |||
199 | res.inner.insert( | 203 | res.inner.insert( |
200 | text.clone(), | 204 | text.clone(), |
201 | Binding::Simple(tt::Leaf::from(literal).into()), | 205 | Binding::Simple(tt::Leaf::from(literal).into()), |
@@ -210,7 +214,7 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, | |||
210 | } | 214 | } |
211 | } | 215 | } |
212 | crate::Leaf::Punct(punct) => { | 216 | crate::Leaf::Punct(punct) => { |
213 | if input.eat_punct() != Some(punct) { | 217 | if !input.eat_punct().map(|p| p.char == punct.char).unwrap_or(false) { |
214 | return Err(ExpandError::UnexpectedToken); | 218 | return Err(ExpandError::UnexpectedToken); |
215 | } | 219 | } |
216 | } | 220 | } |
@@ -224,20 +228,54 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, | |||
224 | crate::TokenTree::Repeat(crate::Repeat { subtree, kind, separator }) => { | 228 | crate::TokenTree::Repeat(crate::Repeat { subtree, kind, separator }) => { |
225 | // Dirty hack to make macro-expansion terminate. | 229 | // Dirty hack to make macro-expansion terminate. |
226 | // This should be replaced by a propper macro-by-example implementation | 230 | // This should be replaced by a propper macro-by-example implementation |
227 | let mut limit = 128; | 231 | let mut limit = 65536; |
228 | let mut counter = 0; | 232 | let mut counter = 0; |
229 | while let Ok(nested) = match_lhs(subtree, input) { | 233 | |
230 | counter += 1; | 234 | let mut memento = input.save(); |
231 | limit -= 1; | 235 | |
232 | if limit == 0 { | 236 | loop { |
233 | break; | 237 | match match_lhs(subtree, input) { |
234 | } | 238 | Ok(nested) => { |
235 | res.push_nested(nested)?; | 239 | counter += 1; |
236 | if let Some(separator) = *separator { | 240 | limit -= 1; |
237 | if !input.is_eof() { | 241 | if limit == 0 { |
238 | if input.eat_punct().map(|p| p.char) != Some(separator) { | 242 | log::warn!("match_lhs excced in repeat pattern exceed limit => {:#?}\n{:#?}\n{:#?}\n{:#?}", subtree, input, kind, separator); |
239 | return Err(ExpandError::UnexpectedToken); | 243 | break; |
244 | } | ||
245 | |||
246 | memento = input.save(); | ||
247 | res.push_nested(nested)?; | ||
248 | if counter == 1 { | ||
249 | if let crate::RepeatKind::ZeroOrOne = kind { | ||
250 | break; | ||
251 | } | ||
240 | } | 252 | } |
253 | |||
254 | if let Some(separator) = separator { | ||
255 | use crate::Separator::*; | ||
256 | |||
257 | if !input | ||
258 | .eat_seperator() | ||
259 | .map(|sep| match (sep, separator) { | ||
260 | (Ident(ref a), Ident(ref b)) => a.text == b.text, | ||
261 | (Literal(ref a), Literal(ref b)) => a.text == b.text, | ||
262 | (Puncts(ref a), Puncts(ref b)) if a.len() == b.len() => { | ||
263 | let a_iter = a.iter().map(|a| a.char); | ||
264 | let b_iter = b.iter().map(|b| b.char); | ||
265 | a_iter.eq(b_iter) | ||
266 | } | ||
267 | _ => false, | ||
268 | }) | ||
269 | .unwrap_or(false) | ||
270 | { | ||
271 | input.rollback(memento); | ||
272 | break; | ||
273 | } | ||
274 | } | ||
275 | } | ||
276 | Err(_) => { | ||
277 | input.rollback(memento); | ||
278 | break; | ||
241 | } | 279 | } |
242 | } | 280 | } |
243 | } | 281 | } |
@@ -246,10 +284,6 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, | |||
246 | crate::RepeatKind::OneOrMore if counter == 0 => { | 284 | crate::RepeatKind::OneOrMore if counter == 0 => { |
247 | return Err(ExpandError::UnexpectedToken); | 285 | return Err(ExpandError::UnexpectedToken); |
248 | } | 286 | } |
249 | crate::RepeatKind::ZeroOrOne if counter > 1 => { | ||
250 | return Err(ExpandError::UnexpectedToken); | ||
251 | } | ||
252 | |||
253 | _ => {} | 287 | _ => {} |
254 | } | 288 | } |
255 | } | 289 | } |
@@ -273,15 +307,21 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, | |||
273 | Ok(res) | 307 | Ok(res) |
274 | } | 308 | } |
275 | 309 | ||
310 | #[derive(Debug)] | ||
311 | struct ExpandCtx<'a> { | ||
312 | bindings: &'a Bindings, | ||
313 | nesting: Vec<usize>, | ||
314 | var_expanded: bool, | ||
315 | } | ||
316 | |||
276 | fn expand_subtree( | 317 | fn expand_subtree( |
277 | template: &crate::Subtree, | 318 | template: &crate::Subtree, |
278 | bindings: &Bindings, | 319 | ctx: &mut ExpandCtx, |
279 | nesting: &mut Vec<usize>, | ||
280 | ) -> Result<tt::Subtree, ExpandError> { | 320 | ) -> Result<tt::Subtree, ExpandError> { |
281 | let token_trees = template | 321 | let token_trees = template |
282 | .token_trees | 322 | .token_trees |
283 | .iter() | 323 | .iter() |
284 | .map(|it| expand_tt(it, bindings, nesting)) | 324 | .map(|it| expand_tt(it, ctx)) |
285 | .collect::<Result<Vec<_>, ExpandError>>()?; | 325 | .collect::<Result<Vec<_>, ExpandError>>()?; |
286 | 326 | ||
287 | Ok(tt::Subtree { token_trees, delimiter: template.delimiter }) | 327 | Ok(tt::Subtree { token_trees, delimiter: template.delimiter }) |
@@ -303,43 +343,81 @@ fn reduce_single_token(mut subtree: tt::Subtree) -> tt::TokenTree { | |||
303 | 343 | ||
304 | fn expand_tt( | 344 | fn expand_tt( |
305 | template: &crate::TokenTree, | 345 | template: &crate::TokenTree, |
306 | bindings: &Bindings, | 346 | ctx: &mut ExpandCtx, |
307 | nesting: &mut Vec<usize>, | ||
308 | ) -> Result<tt::TokenTree, ExpandError> { | 347 | ) -> Result<tt::TokenTree, ExpandError> { |
309 | let res: tt::TokenTree = match template { | 348 | let res: tt::TokenTree = match template { |
310 | crate::TokenTree::Subtree(subtree) => expand_subtree(subtree, bindings, nesting)?.into(), | 349 | crate::TokenTree::Subtree(subtree) => expand_subtree(subtree, ctx)?.into(), |
311 | crate::TokenTree::Repeat(repeat) => { | 350 | crate::TokenTree::Repeat(repeat) => { |
312 | let mut token_trees: Vec<tt::TokenTree> = Vec::new(); | 351 | let mut token_trees: Vec<tt::TokenTree> = Vec::new(); |
313 | nesting.push(0); | 352 | ctx.nesting.push(0); |
314 | // Dirty hack to make macro-expansion terminate. | 353 | // Dirty hack to make macro-expansion terminate. |
315 | // This should be replaced by a propper macro-by-example implementation | 354 | // This should be replaced by a propper macro-by-example implementation |
316 | let mut limit = 128; | 355 | let mut limit = 65536; |
317 | let mut has_sep = false; | 356 | let mut has_seps = 0; |
357 | let mut counter = 0; | ||
318 | 358 | ||
319 | while let Ok(t) = expand_subtree(&repeat.subtree, bindings, nesting) { | 359 | let mut some_var_expanded = false; |
360 | ctx.var_expanded = false; | ||
361 | |||
362 | while let Ok(t) = expand_subtree(&repeat.subtree, ctx) { | ||
363 | // if no var expaned in the child, we count it as a fail | ||
364 | if !ctx.var_expanded { | ||
365 | break; | ||
366 | } | ||
367 | some_var_expanded = true; | ||
368 | ctx.var_expanded = false; | ||
369 | |||
370 | counter += 1; | ||
320 | limit -= 1; | 371 | limit -= 1; |
321 | if limit == 0 { | 372 | if limit == 0 { |
373 | log::warn!( | ||
374 | "expand_tt excced in repeat pattern exceed limit => {:#?}\n{:#?}", | ||
375 | template, | ||
376 | ctx | ||
377 | ); | ||
322 | break; | 378 | break; |
323 | } | 379 | } |
324 | let idx = nesting.pop().unwrap(); | 380 | |
325 | nesting.push(idx + 1); | 381 | let idx = ctx.nesting.pop().unwrap(); |
382 | ctx.nesting.push(idx + 1); | ||
326 | token_trees.push(reduce_single_token(t).into()); | 383 | token_trees.push(reduce_single_token(t).into()); |
327 | 384 | ||
328 | if let Some(sep) = repeat.separator { | 385 | if let Some(ref sep) = repeat.separator { |
329 | let punct = | 386 | match sep { |
330 | tt::Leaf::from(tt::Punct { char: sep, spacing: tt::Spacing::Alone }); | 387 | crate::Separator::Ident(ident) => { |
331 | token_trees.push(punct.into()); | 388 | has_seps = 1; |
332 | has_sep = true; | 389 | token_trees.push(tt::Leaf::from(ident.clone()).into()); |
390 | } | ||
391 | crate::Separator::Literal(lit) => { | ||
392 | has_seps = 1; | ||
393 | token_trees.push(tt::Leaf::from(lit.clone()).into()); | ||
394 | } | ||
395 | |||
396 | crate::Separator::Puncts(puncts) => { | ||
397 | has_seps = puncts.len(); | ||
398 | for punct in puncts { | ||
399 | token_trees.push(tt::Leaf::from(*punct).into()); | ||
400 | } | ||
401 | } | ||
402 | } | ||
403 | } | ||
404 | |||
405 | if let crate::RepeatKind::ZeroOrOne = repeat.kind { | ||
406 | break; | ||
333 | } | 407 | } |
334 | } | 408 | } |
335 | nesting.pop().unwrap(); | ||
336 | 409 | ||
337 | // Dirty hack for remove the last sep | 410 | ctx.var_expanded = some_var_expanded; |
338 | // if it is a "," undo the push | 411 | |
339 | if has_sep && repeat.separator.unwrap() == ',' { | 412 | ctx.nesting.pop().unwrap(); |
413 | for _ in 0..has_seps { | ||
340 | token_trees.pop(); | 414 | token_trees.pop(); |
341 | } | 415 | } |
342 | 416 | ||
417 | if crate::RepeatKind::OneOrMore == repeat.kind && counter == 0 { | ||
418 | return Err(ExpandError::UnexpectedToken); | ||
419 | } | ||
420 | |||
343 | // Check if it is a singel token subtree without any delimiter | 421 | // Check if it is a singel token subtree without any delimiter |
344 | // e.g {Delimiter:None> ['>'] /Delimiter:None>} | 422 | // e.g {Delimiter:None> ['>'] /Delimiter:None>} |
345 | reduce_single_token(tt::Subtree { token_trees, delimiter: tt::Delimiter::None }) | 423 | reduce_single_token(tt::Subtree { token_trees, delimiter: tt::Delimiter::None }) |
@@ -356,7 +434,8 @@ fn expand_tt( | |||
356 | tt::Leaf::from(tt::Ident { text: "$crate".into(), id: TokenId::unspecified() }) | 434 | tt::Leaf::from(tt::Ident { text: "$crate".into(), id: TokenId::unspecified() }) |
357 | .into() | 435 | .into() |
358 | } else { | 436 | } else { |
359 | let tkn = bindings.get(&v.text, nesting)?.clone(); | 437 | let tkn = ctx.bindings.get(&v.text, &ctx.nesting)?.clone(); |
438 | ctx.var_expanded = true; | ||
360 | 439 | ||
361 | if let tt::TokenTree::Subtree(subtree) = tkn { | 440 | if let tt::TokenTree::Subtree(subtree) = tkn { |
362 | reduce_single_token(subtree) | 441 | reduce_single_token(subtree) |
diff --git a/crates/ra_mbe/src/mbe_parser.rs b/crates/ra_mbe/src/mbe_parser.rs index 0710062d9..c7ab463e2 100644 --- a/crates/ra_mbe/src/mbe_parser.rs +++ b/crates/ra_mbe/src/mbe_parser.rs | |||
@@ -74,18 +74,11 @@ fn parse_var(p: &mut TtCursor, transcriber: bool) -> Result<crate::Var, ParseErr | |||
74 | Ok(crate::Var { text, kind }) | 74 | Ok(crate::Var { text, kind }) |
75 | } | 75 | } |
76 | 76 | ||
77 | fn parse_repeat(p: &mut TtCursor, transcriber: bool) -> Result<crate::Repeat, ParseError> { | 77 | fn mk_repeat( |
78 | let subtree = p.eat_subtree().unwrap(); | 78 | rep: char, |
79 | let mut subtree = parse_subtree(subtree, transcriber)?; | 79 | subtree: crate::Subtree, |
80 | subtree.delimiter = crate::Delimiter::None; | 80 | separator: Option<crate::Separator>, |
81 | let sep = p.eat_punct().ok_or(ParseError::Expected(String::from("separator")))?; | 81 | ) -> Result<crate::Repeat, ParseError> { |
82 | let (separator, rep) = match sep.char { | ||
83 | '*' | '+' | '?' => (None, sep.char), | ||
84 | char => { | ||
85 | (Some(char), p.eat_punct().ok_or(ParseError::Expected(String::from("separator")))?.char) | ||
86 | } | ||
87 | }; | ||
88 | |||
89 | let kind = match rep { | 82 | let kind = match rep { |
90 | '*' => crate::RepeatKind::ZeroOrMore, | 83 | '*' => crate::RepeatKind::ZeroOrMore, |
91 | '+' => crate::RepeatKind::OneOrMore, | 84 | '+' => crate::RepeatKind::OneOrMore, |
@@ -95,6 +88,27 @@ fn parse_repeat(p: &mut TtCursor, transcriber: bool) -> Result<crate::Repeat, Pa | |||
95 | Ok(crate::Repeat { subtree, kind, separator }) | 88 | Ok(crate::Repeat { subtree, kind, separator }) |
96 | } | 89 | } |
97 | 90 | ||
91 | fn parse_repeat(p: &mut TtCursor, transcriber: bool) -> Result<crate::Repeat, ParseError> { | ||
92 | let subtree = p.eat_subtree().unwrap(); | ||
93 | let mut subtree = parse_subtree(subtree, transcriber)?; | ||
94 | subtree.delimiter = crate::Delimiter::None; | ||
95 | |||
96 | if let Some(rep) = p.at_punct() { | ||
97 | match rep.char { | ||
98 | '*' | '+' | '?' => { | ||
99 | p.bump(); | ||
100 | return mk_repeat(rep.char, subtree, None); | ||
101 | } | ||
102 | _ => {} | ||
103 | } | ||
104 | } | ||
105 | |||
106 | let sep = p.eat_seperator().ok_or(ParseError::Expected(String::from("separator")))?; | ||
107 | let rep = p.eat_punct().ok_or(ParseError::Expected(String::from("repeat")))?; | ||
108 | |||
109 | mk_repeat(rep.char, subtree, Some(sep)) | ||
110 | } | ||
111 | |||
98 | #[cfg(test)] | 112 | #[cfg(test)] |
99 | mod tests { | 113 | mod tests { |
100 | use ra_syntax::{ast, AstNode}; | 114 | use ra_syntax::{ast, AstNode}; |
@@ -109,7 +123,7 @@ mod tests { | |||
109 | is_valid("($i:ident) => ()"); | 123 | is_valid("($i:ident) => ()"); |
110 | expect_err("$i:ident => ()", "subtree"); | 124 | expect_err("$i:ident => ()", "subtree"); |
111 | expect_err("($i:ident) ()", "`=`"); | 125 | expect_err("($i:ident) ()", "`=`"); |
112 | expect_err("($($i:ident)_) => ()", "separator"); | 126 | expect_err("($($i:ident)_) => ()", "repeat"); |
113 | } | 127 | } |
114 | 128 | ||
115 | fn expect_err(macro_body: &str, expected: &str) { | 129 | fn expect_err(macro_body: &str, expected: &str) { |
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index 3229cfa8f..6255ea304 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs | |||
@@ -212,7 +212,7 @@ impl<'a> SubTreeWalker<'a> { | |||
212 | } | 212 | } |
213 | 213 | ||
214 | pub(crate) trait Querier { | 214 | pub(crate) trait Querier { |
215 | fn token(&self, uidx: usize) -> (SyntaxKind, SmolStr); | 215 | fn token(&self, uidx: usize) -> (SyntaxKind, SmolStr, bool); |
216 | } | 216 | } |
217 | 217 | ||
218 | // A wrapper class for ref cell | 218 | // A wrapper class for ref cell |
@@ -292,9 +292,10 @@ impl<'a> WalkerOwner<'a> { | |||
292 | } | 292 | } |
293 | 293 | ||
294 | impl<'a> Querier for WalkerOwner<'a> { | 294 | impl<'a> Querier for WalkerOwner<'a> { |
295 | fn token(&self, uidx: usize) -> (SyntaxKind, SmolStr) { | 295 | fn token(&self, uidx: usize) -> (SyntaxKind, SmolStr, bool) { |
296 | let tkn = self.get(uidx).unwrap(); | 296 | self.get(uidx) |
297 | (tkn.kind, tkn.text) | 297 | .map(|tkn| (tkn.kind, tkn.text, tkn.is_joint_to_next)) |
298 | .unwrap_or_else(|| (SyntaxKind::EOF, "".into(), false)) | ||
298 | } | 299 | } |
299 | } | 300 | } |
300 | 301 | ||
@@ -342,7 +343,7 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> { | |||
342 | } | 343 | } |
343 | } | 344 | } |
344 | 345 | ||
345 | struct TokenPeek<'a, I> | 346 | pub(crate) struct TokenPeek<'a, I> |
346 | where | 347 | where |
347 | I: Iterator<Item = &'a tt::TokenTree>, | 348 | I: Iterator<Item = &'a tt::TokenTree>, |
348 | { | 349 | { |
@@ -365,7 +366,7 @@ where | |||
365 | TokenPeek { iter: itertools::multipeek(iter) } | 366 | TokenPeek { iter: itertools::multipeek(iter) } |
366 | } | 367 | } |
367 | 368 | ||
368 | fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> { | 369 | pub fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> { |
369 | if p.spacing != tt::Spacing::Joint { | 370 | if p.spacing != tt::Spacing::Joint { |
370 | return None; | 371 | return None; |
371 | } | 372 | } |
@@ -375,7 +376,7 @@ where | |||
375 | Some(((p.char, p1.char), p1.spacing == tt::Spacing::Joint)) | 376 | Some(((p.char, p1.char), p1.spacing == tt::Spacing::Joint)) |
376 | } | 377 | } |
377 | 378 | ||
378 | fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> { | 379 | pub fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> { |
379 | self.current_punct2(p).and_then(|((p0, p1), last_joint)| { | 380 | self.current_punct2(p).and_then(|((p0, p1), last_joint)| { |
380 | if !last_joint { | 381 | if !last_joint { |
381 | None | 382 | None |
@@ -437,12 +438,14 @@ fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { | |||
437 | } | 438 | } |
438 | 439 | ||
439 | fn convert_literal(l: &tt::Literal) -> TtToken { | 440 | fn convert_literal(l: &tt::Literal) -> TtToken { |
440 | TtToken { | 441 | let kind = |
441 | kind: classify_literal(&l.text).unwrap().kind, | 442 | classify_literal(&l.text).map(|tkn| tkn.kind).unwrap_or_else(|| match l.text.as_ref() { |
442 | is_joint_to_next: false, | 443 | "true" => SyntaxKind::TRUE_KW, |
443 | text: l.text.clone(), | 444 | "false" => SyntaxKind::FALSE_KW, |
444 | n_tokens: 1, | 445 | _ => panic!("Fail to convert given literal {:#?}", &l), |
445 | } | 446 | }); |
447 | |||
448 | TtToken { kind, is_joint_to_next: false, text: l.text.clone(), n_tokens: 1 } | ||
446 | } | 449 | } |
447 | 450 | ||
448 | fn convert_ident(ident: &tt::Ident) -> TtToken { | 451 | fn convert_ident(ident: &tt::Ident) -> TtToken { |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 38a481029..e0f228ce9 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -123,6 +123,11 @@ fn convert_tt( | |||
123 | global_offset: TextUnit, | 123 | global_offset: TextUnit, |
124 | tt: &SyntaxNode, | 124 | tt: &SyntaxNode, |
125 | ) -> Option<tt::Subtree> { | 125 | ) -> Option<tt::Subtree> { |
126 | // This tree is empty | ||
127 | if tt.first_child_or_token().is_none() { | ||
128 | return Some(tt::Subtree { token_trees: vec![], delimiter: tt::Delimiter::None }); | ||
129 | } | ||
130 | |||
126 | let first_child = tt.first_child_or_token()?; | 131 | let first_child = tt.first_child_or_token()?; |
127 | let last_child = tt.last_child_or_token()?; | 132 | let last_child = tt.last_child_or_token()?; |
128 | let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) { | 133 | let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) { |
@@ -133,7 +138,9 @@ fn convert_tt( | |||
133 | }; | 138 | }; |
134 | 139 | ||
135 | let mut token_trees = Vec::new(); | 140 | let mut token_trees = Vec::new(); |
136 | for child in tt.children_with_tokens().skip(skip_first as usize) { | 141 | let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable(); |
142 | |||
143 | while let Some(child) = child_iter.next() { | ||
137 | if (skip_first && (child == first_child || child == last_child)) || child.kind().is_trivia() | 144 | if (skip_first && (child == first_child || child == last_child)) || child.kind().is_trivia() |
138 | { | 145 | { |
139 | continue; | 146 | continue; |
@@ -152,12 +159,25 @@ fn convert_tt( | |||
152 | prev = Some(char) | 159 | prev = Some(char) |
153 | } | 160 | } |
154 | if let Some(char) = prev { | 161 | if let Some(char) = prev { |
155 | token_trees.push( | 162 | let spacing = match child_iter.peek() { |
156 | tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Alone }).into(), | 163 | Some(SyntaxElement::Token(token)) => { |
157 | ); | 164 | if token.kind().is_punct() { |
165 | tt::Spacing::Joint | ||
166 | } else { | ||
167 | tt::Spacing::Alone | ||
168 | } | ||
169 | } | ||
170 | _ => tt::Spacing::Alone, | ||
171 | }; | ||
172 | |||
173 | token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); | ||
158 | } | 174 | } |
159 | } else { | 175 | } else { |
160 | let child: tt::TokenTree = if token.kind().is_keyword() | 176 | let child: tt::TokenTree = if token.kind() == SyntaxKind::TRUE_KW |
177 | || token.kind() == SyntaxKind::FALSE_KW | ||
178 | { | ||
179 | tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() | ||
180 | } else if token.kind().is_keyword() | ||
161 | || token.kind() == IDENT | 181 | || token.kind() == IDENT |
162 | || token.kind() == LIFETIME | 182 | || token.kind() == LIFETIME |
163 | { | 183 | { |
@@ -218,7 +238,16 @@ impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> { | |||
218 | self.text_pos += TextUnit::of_str(&self.buf); | 238 | self.text_pos += TextUnit::of_str(&self.buf); |
219 | let text = SmolStr::new(self.buf.as_str()); | 239 | let text = SmolStr::new(self.buf.as_str()); |
220 | self.buf.clear(); | 240 | self.buf.clear(); |
221 | self.inner.token(kind, text) | 241 | self.inner.token(kind, text); |
242 | |||
243 | // // Add a white space to token | ||
244 | // let (last_kind, _, last_joint_to_next ) = self.src_querier.token(self.token_pos-n_tokens as usize); | ||
245 | // if !last_joint_to_next && last_kind.is_punct() { | ||
246 | // let (cur_kind, _, _ ) = self.src_querier.token(self.token_pos); | ||
247 | // if cur_kind.is_punct() { | ||
248 | // self.inner.token(WHITESPACE, " ".into()); | ||
249 | // } | ||
250 | // } | ||
222 | } | 251 | } |
223 | 252 | ||
224 | fn start_node(&mut self, kind: SyntaxKind) { | 253 | fn start_node(&mut self, kind: SyntaxKind) { |
diff --git a/crates/ra_mbe/src/tt_cursor.rs b/crates/ra_mbe/src/tt_cursor.rs index 741b5ea1c..eef642a9c 100644 --- a/crates/ra_mbe/src/tt_cursor.rs +++ b/crates/ra_mbe/src/tt_cursor.rs | |||
@@ -1,12 +1,18 @@ | |||
1 | use crate::ParseError; | 1 | use crate::ParseError; |
2 | use crate::subtree_parser::Parser; | 2 | use crate::subtree_parser::Parser; |
3 | use crate::subtree_source::TokenPeek; | ||
4 | use smallvec::{SmallVec, smallvec}; | ||
3 | 5 | ||
4 | #[derive(Clone)] | 6 | #[derive(Debug, Clone)] |
5 | pub(crate) struct TtCursor<'a> { | 7 | pub(crate) struct TtCursor<'a> { |
6 | subtree: &'a tt::Subtree, | 8 | subtree: &'a tt::Subtree, |
7 | pos: usize, | 9 | pos: usize, |
8 | } | 10 | } |
9 | 11 | ||
12 | pub(crate) struct TtCursorMemento { | ||
13 | pos: usize, | ||
14 | } | ||
15 | |||
10 | impl<'a> TtCursor<'a> { | 16 | impl<'a> TtCursor<'a> { |
11 | pub(crate) fn new(subtree: &'a tt::Subtree) -> TtCursor<'a> { | 17 | pub(crate) fn new(subtree: &'a tt::Subtree) -> TtCursor<'a> { |
12 | TtCursor { subtree, pos: 0 } | 18 | TtCursor { subtree, pos: 0 } |
@@ -157,4 +163,102 @@ impl<'a> TtCursor<'a> { | |||
157 | Err(ParseError::Expected(format!("`{}`", char))) | 163 | Err(ParseError::Expected(format!("`{}`", char))) |
158 | } | 164 | } |
159 | } | 165 | } |
166 | |||
167 | fn eat_punct3(&mut self, p: &tt::Punct) -> Option<SmallVec<[tt::Punct; 3]>> { | ||
168 | let sec = self.eat_punct()?.clone(); | ||
169 | let third = self.eat_punct()?.clone(); | ||
170 | Some(smallvec![p.clone(), sec, third]) | ||
171 | } | ||
172 | |||
173 | fn eat_punct2(&mut self, p: &tt::Punct) -> Option<SmallVec<[tt::Punct; 3]>> { | ||
174 | let sec = self.eat_punct()?.clone(); | ||
175 | Some(smallvec![p.clone(), sec]) | ||
176 | } | ||
177 | |||
178 | fn eat_multi_char_punct<'b, I>( | ||
179 | &mut self, | ||
180 | p: &tt::Punct, | ||
181 | iter: &mut TokenPeek<'b, I>, | ||
182 | ) -> Option<SmallVec<[tt::Punct; 3]>> | ||
183 | where | ||
184 | I: Iterator<Item = &'b tt::TokenTree>, | ||
185 | { | ||
186 | if let Some((m, _)) = iter.current_punct3(p) { | ||
187 | if let r @ Some(_) = match m { | ||
188 | ('<', '<', '=') | ('>', '>', '=') | ('.', '.', '.') | ('.', '.', '=') => { | ||
189 | self.eat_punct3(p) | ||
190 | } | ||
191 | _ => None, | ||
192 | } { | ||
193 | return r; | ||
194 | } | ||
195 | } | ||
196 | |||
197 | if let Some((m, _)) = iter.current_punct2(p) { | ||
198 | if let r @ Some(_) = match m { | ||
199 | ('<', '=') | ||
200 | | ('>', '=') | ||
201 | | ('+', '=') | ||
202 | | ('-', '=') | ||
203 | | ('|', '=') | ||
204 | | ('&', '=') | ||
205 | | ('^', '=') | ||
206 | | ('/', '=') | ||
207 | | ('*', '=') | ||
208 | | ('%', '=') | ||
209 | | ('&', '&') | ||
210 | | ('|', '|') | ||
211 | | ('<', '<') | ||
212 | | ('>', '>') | ||
213 | | ('-', '>') | ||
214 | | ('!', '=') | ||
215 | | ('=', '>') | ||
216 | | ('=', '=') | ||
217 | | ('.', '.') | ||
218 | | (':', ':') => self.eat_punct2(p), | ||
219 | |||
220 | _ => None, | ||
221 | } { | ||
222 | return r; | ||
223 | } | ||
224 | } | ||
225 | |||
226 | None | ||
227 | } | ||
228 | |||
229 | pub(crate) fn eat_seperator(&mut self) -> Option<crate::Separator> { | ||
230 | match self.eat()? { | ||
231 | tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { | ||
232 | Some(crate::Separator::Literal(lit.clone())) | ||
233 | } | ||
234 | tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { | ||
235 | Some(crate::Separator::Ident(ident.clone())) | ||
236 | } | ||
237 | tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { | ||
238 | match punct.char { | ||
239 | '*' | '+' | '?' => return None, | ||
240 | _ => {} | ||
241 | }; | ||
242 | |||
243 | // FIXME: The parser is only handle some compositeable punct, | ||
244 | // But at this phase, some punct still is jointed. | ||
245 | // So we by pass that check here. | ||
246 | let mut peekable = TokenPeek::new(self.subtree.token_trees[self.pos..].iter()); | ||
247 | let puncts = self.eat_multi_char_punct(punct, &mut peekable); | ||
248 | let puncts = puncts.unwrap_or_else(|| smallvec![punct.clone()]); | ||
249 | |||
250 | Some(crate::Separator::Puncts(puncts)) | ||
251 | } | ||
252 | _ => None, | ||
253 | } | ||
254 | } | ||
255 | |||
256 | #[must_use] | ||
257 | pub(crate) fn save(&self) -> TtCursorMemento { | ||
258 | TtCursorMemento { pos: self.pos } | ||
259 | } | ||
260 | |||
261 | pub(crate) fn rollback(&mut self, memento: TtCursorMemento) { | ||
262 | self.pos = memento.pos; | ||
263 | } | ||
160 | } | 264 | } |
diff --git a/crates/ra_parser/src/grammar.rs b/crates/ra_parser/src/grammar.rs index 67eae749d..a538ec081 100644 --- a/crates/ra_parser/src/grammar.rs +++ b/crates/ra_parser/src/grammar.rs | |||
@@ -119,7 +119,22 @@ pub(crate) fn meta_item(p: &mut Parser) { | |||
119 | items::token_tree(p); | 119 | items::token_tree(p); |
120 | break; | 120 | break; |
121 | } else { | 121 | } else { |
122 | p.bump(); | 122 | // https://doc.rust-lang.org/reference/attributes.html |
123 | // https://doc.rust-lang.org/reference/paths.html#simple-paths | ||
124 | // The start of an meta must be a simple path | ||
125 | match p.current() { | ||
126 | IDENT | COLONCOLON | SUPER_KW | SELF_KW | CRATE_KW => p.bump(), | ||
127 | EQ => { | ||
128 | p.bump(); | ||
129 | match p.current() { | ||
130 | c if c.is_literal() => p.bump(), | ||
131 | TRUE_KW | FALSE_KW => p.bump(), | ||
132 | _ => {} | ||
133 | } | ||
134 | break; | ||
135 | } | ||
136 | _ => break, | ||
137 | } | ||
123 | } | 138 | } |
124 | } | 139 | } |
125 | 140 | ||