diff options
author | Edwin Cheng <[email protected]> | 2019-04-24 16:01:32 +0100 |
---|---|---|
committer | Edwin Cheng <[email protected]> | 2019-04-25 19:03:55 +0100 |
commit | 299d97b6d98cec673ff056c188ac45a17febc7d4 (patch) | |
tree | caec1cdbcd6350d26ebe984b3eca177079aa02b3 /crates/ra_mbe | |
parent | dfab545d5df974d4a50325695a25f763b7613baf (diff) |
Add handling `token` seperator in mbe
Diffstat (limited to 'crates/ra_mbe')
-rw-r--r-- | crates/ra_mbe/Cargo.toml | 1 | ||||
-rw-r--r-- | crates/ra_mbe/src/lib.rs | 216 | ||||
-rw-r--r-- | crates/ra_mbe/src/mbe_expander.rs | 49 | ||||
-rw-r--r-- | crates/ra_mbe/src/mbe_parser.rs | 40 | ||||
-rw-r--r-- | crates/ra_mbe/src/subtree_source.rs | 22 | ||||
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 25 | ||||
-rw-r--r-- | crates/ra_mbe/src/tt_cursor.rs | 91 |
7 files changed, 218 insertions, 226 deletions
diff --git a/crates/ra_mbe/Cargo.toml b/crates/ra_mbe/Cargo.toml index 1d0c2a340..1e5ed6907 100644 --- a/crates/ra_mbe/Cargo.toml +++ b/crates/ra_mbe/Cargo.toml | |||
@@ -10,3 +10,4 @@ ra_parser = { path = "../ra_parser" } | |||
10 | tt = { path = "../ra_tt", package = "ra_tt" } | 10 | tt = { path = "../ra_tt", package = "ra_tt" } |
11 | itertools = "0.8.0" | 11 | itertools = "0.8.0" |
12 | rustc-hash = "1.0.0" | 12 | rustc-hash = "1.0.0" |
13 | smallvec = "0.6.9" | ||
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index d7b18dd0f..7ebba807c 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs | |||
@@ -24,6 +24,7 @@ mod subtree_source; | |||
24 | mod subtree_parser; | 24 | mod subtree_parser; |
25 | 25 | ||
26 | use ra_syntax::SmolStr; | 26 | use ra_syntax::SmolStr; |
27 | use smallvec::SmallVec; | ||
27 | 28 | ||
28 | pub use tt::{Delimiter, Punct}; | 29 | pub use tt::{Delimiter, Punct}; |
29 | 30 | ||
@@ -99,10 +100,17 @@ pub(crate) struct Subtree { | |||
99 | } | 100 | } |
100 | 101 | ||
101 | #[derive(Clone, Debug, PartialEq, Eq)] | 102 | #[derive(Clone, Debug, PartialEq, Eq)] |
103 | pub(crate) enum Separator { | ||
104 | Literal(tt::Literal), | ||
105 | Ident(tt::Ident), | ||
106 | Puncts(SmallVec<[tt::Punct; 3]>), | ||
107 | } | ||
108 | |||
109 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
102 | pub(crate) struct Repeat { | 110 | pub(crate) struct Repeat { |
103 | pub(crate) subtree: Subtree, | 111 | pub(crate) subtree: Subtree, |
104 | pub(crate) kind: RepeatKind, | 112 | pub(crate) kind: RepeatKind, |
105 | pub(crate) separator: Option<char>, | 113 | pub(crate) separator: Option<Separator>, |
106 | } | 114 | } |
107 | 115 | ||
108 | #[derive(Clone, Debug, PartialEq, Eq)] | 116 | #[derive(Clone, Debug, PartialEq, Eq)] |
@@ -175,8 +183,8 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
175 | let expansion = rules.expand(&invocation_tt).unwrap(); | 183 | let expansion = rules.expand(&invocation_tt).unwrap(); |
176 | assert_eq!( | 184 | assert_eq!( |
177 | expansion.to_string(), | 185 | expansion.to_string(), |
178 | "impl From < Leaf > for TokenTree {fn from (it : Leaf) -> TokenTree {TokenTree :: Leaf (it)}} \ | 186 | "impl From <Leaf > for TokenTree {fn from (it : Leaf) -> TokenTree {TokenTree ::Leaf (it)}} \ |
179 | impl From < Subtree > for TokenTree {fn from (it : Subtree) -> TokenTree {TokenTree :: Subtree (it)}}" | 187 | impl From <Subtree > for TokenTree {fn from (it : Subtree) -> TokenTree {TokenTree ::Subtree (it)}}" |
180 | ) | 188 | ) |
181 | } | 189 | } |
182 | 190 | ||
@@ -384,7 +392,7 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
384 | "#, | 392 | "#, |
385 | ); | 393 | ); |
386 | 394 | ||
387 | assert_expansion(&rules, "foo! { foo, bar }", "fn baz {foo () ; bar ()}"); | 395 | assert_expansion(&rules, "foo! { foo, bar }", "fn baz {foo () ;bar ()}"); |
388 | } | 396 | } |
389 | 397 | ||
390 | #[test] | 398 | #[test] |
@@ -417,6 +425,18 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
417 | } | 425 | } |
418 | 426 | ||
419 | #[test] | 427 | #[test] |
428 | fn test_match_group_with_multichar_sep() { | ||
429 | let rules = create_rules( | ||
430 | r#" | ||
431 | macro_rules! foo { | ||
432 | (fn $name:ident {$($i:literal)*} ) => ( fn $name() -> bool { $($i)&&*} ); | ||
433 | }"#, | ||
434 | ); | ||
435 | |||
436 | assert_expansion(&rules, "foo! (fn baz {true true} )", "fn baz () -> bool {true &&true}"); | ||
437 | } | ||
438 | |||
439 | #[test] | ||
420 | fn test_expand_to_item_list() { | 440 | fn test_expand_to_item_list() { |
421 | let rules = create_rules( | 441 | let rules = create_rules( |
422 | " | 442 | " |
@@ -597,7 +617,7 @@ MACRO_ITEMS@[0; 40) | |||
597 | assert_expansion( | 617 | assert_expansion( |
598 | &rules, | 618 | &rules, |
599 | "foo! { bar::<u8>::baz::<u8> }", | 619 | "foo! { bar::<u8>::baz::<u8> }", |
600 | "fn foo () {let a = bar :: < u8 > :: baz :: < u8 > ;}", | 620 | "fn foo () {let a = bar ::< u8 >:: baz ::< u8 > ;}", |
601 | ); | 621 | ); |
602 | } | 622 | } |
603 | 623 | ||
@@ -891,7 +911,7 @@ MACRO_ITEMS@[0; 40) | |||
891 | } | 911 | } |
892 | "#, | 912 | "#, |
893 | ); | 913 | ); |
894 | assert_expansion(&rules, r#"foo!{'a}"#, r#"struct Ref < 'a > {s : & 'a str}"#); | 914 | assert_expansion(&rules, r#"foo!{'a}"#, r#"struct Ref <'a > {s : &'a str}"#); |
895 | } | 915 | } |
896 | 916 | ||
897 | #[test] | 917 | #[test] |
@@ -1063,7 +1083,7 @@ macro_rules! int_base { | |||
1063 | ); | 1083 | ); |
1064 | 1084 | ||
1065 | assert_expansion(&rules, r#" int_base!{Binary for isize as usize -> Binary}"#, | 1085 | assert_expansion(&rules, r#" int_base!{Binary for isize as usize -> Binary}"#, |
1066 | "# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt :: Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}" | 1086 | "# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt ::Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}" |
1067 | ); | 1087 | ); |
1068 | } | 1088 | } |
1069 | 1089 | ||
@@ -1140,186 +1160,4 @@ impl_fn_for_zst ! { | |||
1140 | assert_expansion(&rules, r#"impl_nonzero_fmt ! { # [ stable ( feature = "nonzero" , since = "1.28.0" ) ] ( Debug , Display , Binary , Octal , LowerHex , UpperHex ) for NonZeroU8 }"#, | 1160 | assert_expansion(&rules, r#"impl_nonzero_fmt ! { # [ stable ( feature = "nonzero" , since = "1.28.0" ) ] ( Debug , Display , Binary , Octal , LowerHex , UpperHex ) for NonZeroU8 }"#, |
1141 | "fn foo () {}"); | 1161 | "fn foo () {}"); |
1142 | } | 1162 | } |
1143 | |||
1144 | #[test] | ||
1145 | fn test_tuple_impls() { | ||
1146 | // from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/num/mod.rs#L12 | ||
1147 | let rules = create_rules( | ||
1148 | r#" | ||
1149 | macro_rules! tuple_impls { | ||
1150 | ($( | ||
1151 | $Tuple:ident { | ||
1152 | $(($idx:tt) -> $T:ident)+ | ||
1153 | } | ||
1154 | )+) => { | ||
1155 | $( | ||
1156 | #[stable(feature = "rust1", since = "1.0.0")] | ||
1157 | impl<$($T:PartialEq),+> PartialEq for ($($T,)+) where last_type!($($T,)+): ?Sized { | ||
1158 | #[inline] | ||
1159 | fn eq(&self, other: &($($T,)+)) -> bool { | ||
1160 | $(self.$idx == other.$idx)&&+ | ||
1161 | } | ||
1162 | #[inline] | ||
1163 | fn ne(&self, other: &($($T,)+)) -> bool { | ||
1164 | $(self.$idx != other.$idx)||+ | ||
1165 | } | ||
1166 | } | ||
1167 | |||
1168 | #[stable(feature = "rust1", since = "1.0.0")] | ||
1169 | impl<$($T:Eq),+> Eq for ($($T,)+) where last_type!($($T,)+): ?Sized {} | ||
1170 | |||
1171 | #[stable(feature = "rust1", since = "1.0.0")] | ||
1172 | impl<$($T:PartialOrd + PartialEq),+> PartialOrd for ($($T,)+) | ||
1173 | where last_type!($($T,)+): ?Sized { | ||
1174 | #[inline] | ||
1175 | fn partial_cmp(&self, other: &($($T,)+)) -> Option<Ordering> { | ||
1176 | lexical_partial_cmp!($(self.$idx, other.$idx),+) | ||
1177 | } | ||
1178 | #[inline] | ||
1179 | fn lt(&self, other: &($($T,)+)) -> bool { | ||
1180 | lexical_ord!(lt, $(self.$idx, other.$idx),+) | ||
1181 | } | ||
1182 | #[inline] | ||
1183 | fn le(&self, other: &($($T,)+)) -> bool { | ||
1184 | lexical_ord!(le, $(self.$idx, other.$idx),+) | ||
1185 | } | ||
1186 | #[inline] | ||
1187 | fn ge(&self, other: &($($T,)+)) -> bool { | ||
1188 | lexical_ord!(ge, $(self.$idx, other.$idx),+) | ||
1189 | } | ||
1190 | #[inline] | ||
1191 | fn gt(&self, other: &($($T,)+)) -> bool { | ||
1192 | lexical_ord!(gt, $(self.$idx, other.$idx),+) | ||
1193 | } | ||
1194 | } | ||
1195 | |||
1196 | #[stable(feature = "rust1", since = "1.0.0")] | ||
1197 | impl<$($T:Ord),+> Ord for ($($T,)+) where last_type!($($T,)+): ?Sized { | ||
1198 | #[inline] | ||
1199 | fn cmp(&self, other: &($($T,)+)) -> Ordering { | ||
1200 | lexical_cmp!($(self.$idx, other.$idx),+) | ||
1201 | } | ||
1202 | } | ||
1203 | |||
1204 | #[stable(feature = "rust1", since = "1.0.0")] | ||
1205 | impl<$($T:Default),+> Default for ($($T,)+) { | ||
1206 | #[inline] | ||
1207 | fn default() -> ($($T,)+) { | ||
1208 | ($({ let x: $T = Default::default(); x},)+) | ||
1209 | } | ||
1210 | } | ||
1211 | )+ | ||
1212 | } | ||
1213 | }"#, | ||
1214 | ); | ||
1215 | |||
1216 | assert_expansion( | ||
1217 | &rules, | ||
1218 | r#"tuple_impls ! { | ||
1219 | Tuple1 { | ||
1220 | ( 0 ) -> A | ||
1221 | } | ||
1222 | Tuple2 { | ||
1223 | ( 0 ) -> A | ||
1224 | ( 1 ) -> B | ||
1225 | } | ||
1226 | Tuple3 { | ||
1227 | ( 0 ) -> A | ||
1228 | ( 1 ) -> B | ||
1229 | ( 2 ) -> C | ||
1230 | } | ||
1231 | Tuple4 { | ||
1232 | ( 0 ) -> A | ||
1233 | ( 1 ) -> B | ||
1234 | ( 2 ) -> C | ||
1235 | ( 3 ) -> D | ||
1236 | } | ||
1237 | Tuple5 { | ||
1238 | ( 0 ) -> A | ||
1239 | ( 1 ) -> B | ||
1240 | ( 2 ) -> C | ||
1241 | ( 3 ) -> D | ||
1242 | ( 4 ) -> E | ||
1243 | } | ||
1244 | Tuple6 { | ||
1245 | ( 0 ) -> A | ||
1246 | ( 1 ) -> B | ||
1247 | ( 2 ) -> C | ||
1248 | ( 3 ) -> D | ||
1249 | ( 4 ) -> E | ||
1250 | ( 5 ) -> F | ||
1251 | } | ||
1252 | Tuple7 { | ||
1253 | ( 0 ) -> A | ||
1254 | ( 1 ) -> B | ||
1255 | ( 2 ) -> C | ||
1256 | ( 3 ) -> D | ||
1257 | ( 4 ) -> E | ||
1258 | ( 5 ) -> F | ||
1259 | ( 6 ) -> G | ||
1260 | } | ||
1261 | Tuple8 { | ||
1262 | ( 0 ) -> A | ||
1263 | ( 1 ) -> B | ||
1264 | ( 2 ) -> C | ||
1265 | ( 3 ) -> D | ||
1266 | ( 4 ) -> E | ||
1267 | ( 5 ) -> F | ||
1268 | ( 6 ) -> G | ||
1269 | ( 7 ) -> H | ||
1270 | } | ||
1271 | Tuple9 { | ||
1272 | ( 0 ) -> A | ||
1273 | ( 1 ) -> B | ||
1274 | ( 2 ) -> C | ||
1275 | ( 3 ) -> D | ||
1276 | ( 4 ) -> E | ||
1277 | ( 5 ) -> F | ||
1278 | ( 6 ) -> G | ||
1279 | ( 7 ) -> H | ||
1280 | ( 8 ) -> I | ||
1281 | } | ||
1282 | Tuple10 { | ||
1283 | ( 0 ) -> A | ||
1284 | ( 1 ) -> B | ||
1285 | ( 2 ) -> C | ||
1286 | ( 3 ) -> D | ||
1287 | ( 4 ) -> E | ||
1288 | ( 5 ) -> F | ||
1289 | ( 6 ) -> G | ||
1290 | ( 7 ) -> H | ||
1291 | ( 8 ) -> I | ||
1292 | ( 9 ) -> J | ||
1293 | } | ||
1294 | Tuple11 { | ||
1295 | ( 0 ) -> A | ||
1296 | ( 1 ) -> B | ||
1297 | ( 2 ) -> C | ||
1298 | ( 3 ) -> D | ||
1299 | ( 4 ) -> E | ||
1300 | ( 5 ) -> F | ||
1301 | ( 6 ) -> G | ||
1302 | ( 7 ) -> H | ||
1303 | ( 8 ) -> I | ||
1304 | ( 9 ) -> J | ||
1305 | ( 10 ) -> K | ||
1306 | } | ||
1307 | Tuple12 { | ||
1308 | ( 0 ) -> A | ||
1309 | ( 1 ) -> B | ||
1310 | ( 2 ) -> C | ||
1311 | ( 3 ) -> D | ||
1312 | ( 4 ) -> E | ||
1313 | ( 5 ) -> F | ||
1314 | ( 6 ) -> G | ||
1315 | ( 7 ) -> H | ||
1316 | ( 8 ) -> I | ||
1317 | ( 9 ) -> J | ||
1318 | ( 10 ) -> K | ||
1319 | ( 11 ) -> L | ||
1320 | } | ||
1321 | }"#, | ||
1322 | "fn foo () {}", | ||
1323 | ); | ||
1324 | } | ||
1325 | } | 1163 | } |
diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs index 91b6db522..7411dd8b1 100644 --- a/crates/ra_mbe/src/mbe_expander.rs +++ b/crates/ra_mbe/src/mbe_expander.rs | |||
@@ -196,6 +196,7 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, | |||
196 | "literal" => { | 196 | "literal" => { |
197 | let literal = | 197 | let literal = |
198 | input.eat_literal().ok_or(ExpandError::UnexpectedToken)?.clone(); | 198 | input.eat_literal().ok_or(ExpandError::UnexpectedToken)?.clone(); |
199 | |||
199 | res.inner.insert( | 200 | res.inner.insert( |
200 | text.clone(), | 201 | text.clone(), |
201 | Binding::Simple(tt::Leaf::from(literal).into()), | 202 | Binding::Simple(tt::Leaf::from(literal).into()), |
@@ -210,7 +211,7 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, | |||
210 | } | 211 | } |
211 | } | 212 | } |
212 | crate::Leaf::Punct(punct) => { | 213 | crate::Leaf::Punct(punct) => { |
213 | if input.eat_punct() != Some(punct) { | 214 | if !input.eat_punct().map(|p| p.char == punct.char).unwrap_or(false) { |
214 | return Err(ExpandError::UnexpectedToken); | 215 | return Err(ExpandError::UnexpectedToken); |
215 | } | 216 | } |
216 | } | 217 | } |
@@ -246,8 +247,23 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, | |||
246 | } | 247 | } |
247 | } | 248 | } |
248 | 249 | ||
249 | if let Some(separator) = *separator { | 250 | if let Some(separator) = separator { |
250 | if input.eat_punct().map(|p| p.char) != Some(separator) { | 251 | use crate::Separator::*; |
252 | |||
253 | if !input | ||
254 | .eat_seperator() | ||
255 | .map(|sep| match (sep, separator) { | ||
256 | (Ident(ref a), Ident(ref b)) => a.text == b.text, | ||
257 | (Literal(ref a), Literal(ref b)) => a.text == b.text, | ||
258 | (Puncts(ref a), Puncts(ref b)) if a.len() == b.len() => { | ||
259 | let a_iter = a.iter().map(|a| a.char); | ||
260 | let b_iter = b.iter().map(|b| b.char); | ||
261 | a_iter.eq(b_iter) | ||
262 | } | ||
263 | _ => false, | ||
264 | }) | ||
265 | .unwrap_or(false) | ||
266 | { | ||
251 | input.rollback(memento); | 267 | input.rollback(memento); |
252 | break; | 268 | break; |
253 | } | 269 | } |
@@ -328,7 +344,7 @@ fn expand_tt( | |||
328 | // Dirty hack to make macro-expansion terminate. | 344 | // Dirty hack to make macro-expansion terminate. |
329 | // This should be replaced by a propper macro-by-example implementation | 345 | // This should be replaced by a propper macro-by-example implementation |
330 | let mut limit = 128; | 346 | let mut limit = 128; |
331 | let mut has_sep = false; | 347 | let mut has_seps = 0; |
332 | 348 | ||
333 | while let Ok(t) = expand_subtree(&repeat.subtree, bindings, nesting) { | 349 | while let Ok(t) = expand_subtree(&repeat.subtree, bindings, nesting) { |
334 | limit -= 1; | 350 | limit -= 1; |
@@ -339,15 +355,28 @@ fn expand_tt( | |||
339 | nesting.push(idx + 1); | 355 | nesting.push(idx + 1); |
340 | token_trees.push(reduce_single_token(t).into()); | 356 | token_trees.push(reduce_single_token(t).into()); |
341 | 357 | ||
342 | if let Some(sep) = repeat.separator { | 358 | if let Some(ref sep) = repeat.separator { |
343 | let punct = | 359 | match sep { |
344 | tt::Leaf::from(tt::Punct { char: sep, spacing: tt::Spacing::Alone }); | 360 | crate::Separator::Ident(ident) => { |
345 | token_trees.push(punct.into()); | 361 | has_seps = 1; |
346 | has_sep = true; | 362 | token_trees.push(tt::Leaf::from(ident.clone()).into()); |
363 | } | ||
364 | crate::Separator::Literal(lit) => { | ||
365 | has_seps = 1; | ||
366 | token_trees.push(tt::Leaf::from(lit.clone()).into()); | ||
367 | } | ||
368 | |||
369 | crate::Separator::Puncts(puncts) => { | ||
370 | has_seps = puncts.len(); | ||
371 | for punct in puncts { | ||
372 | token_trees.push(tt::Leaf::from(*punct).into()); | ||
373 | } | ||
374 | } | ||
375 | } | ||
347 | } | 376 | } |
348 | } | 377 | } |
349 | nesting.pop().unwrap(); | 378 | nesting.pop().unwrap(); |
350 | if has_sep { | 379 | for _ in 0..has_seps { |
351 | token_trees.pop(); | 380 | token_trees.pop(); |
352 | } | 381 | } |
353 | 382 | ||
diff --git a/crates/ra_mbe/src/mbe_parser.rs b/crates/ra_mbe/src/mbe_parser.rs index 0710062d9..c7ab463e2 100644 --- a/crates/ra_mbe/src/mbe_parser.rs +++ b/crates/ra_mbe/src/mbe_parser.rs | |||
@@ -74,18 +74,11 @@ fn parse_var(p: &mut TtCursor, transcriber: bool) -> Result<crate::Var, ParseErr | |||
74 | Ok(crate::Var { text, kind }) | 74 | Ok(crate::Var { text, kind }) |
75 | } | 75 | } |
76 | 76 | ||
77 | fn parse_repeat(p: &mut TtCursor, transcriber: bool) -> Result<crate::Repeat, ParseError> { | 77 | fn mk_repeat( |
78 | let subtree = p.eat_subtree().unwrap(); | 78 | rep: char, |
79 | let mut subtree = parse_subtree(subtree, transcriber)?; | 79 | subtree: crate::Subtree, |
80 | subtree.delimiter = crate::Delimiter::None; | 80 | separator: Option<crate::Separator>, |
81 | let sep = p.eat_punct().ok_or(ParseError::Expected(String::from("separator")))?; | 81 | ) -> Result<crate::Repeat, ParseError> { |
82 | let (separator, rep) = match sep.char { | ||
83 | '*' | '+' | '?' => (None, sep.char), | ||
84 | char => { | ||
85 | (Some(char), p.eat_punct().ok_or(ParseError::Expected(String::from("separator")))?.char) | ||
86 | } | ||
87 | }; | ||
88 | |||
89 | let kind = match rep { | 82 | let kind = match rep { |
90 | '*' => crate::RepeatKind::ZeroOrMore, | 83 | '*' => crate::RepeatKind::ZeroOrMore, |
91 | '+' => crate::RepeatKind::OneOrMore, | 84 | '+' => crate::RepeatKind::OneOrMore, |
@@ -95,6 +88,27 @@ fn parse_repeat(p: &mut TtCursor, transcriber: bool) -> Result<crate::Repeat, Pa | |||
95 | Ok(crate::Repeat { subtree, kind, separator }) | 88 | Ok(crate::Repeat { subtree, kind, separator }) |
96 | } | 89 | } |
97 | 90 | ||
91 | fn parse_repeat(p: &mut TtCursor, transcriber: bool) -> Result<crate::Repeat, ParseError> { | ||
92 | let subtree = p.eat_subtree().unwrap(); | ||
93 | let mut subtree = parse_subtree(subtree, transcriber)?; | ||
94 | subtree.delimiter = crate::Delimiter::None; | ||
95 | |||
96 | if let Some(rep) = p.at_punct() { | ||
97 | match rep.char { | ||
98 | '*' | '+' | '?' => { | ||
99 | p.bump(); | ||
100 | return mk_repeat(rep.char, subtree, None); | ||
101 | } | ||
102 | _ => {} | ||
103 | } | ||
104 | } | ||
105 | |||
106 | let sep = p.eat_seperator().ok_or(ParseError::Expected(String::from("separator")))?; | ||
107 | let rep = p.eat_punct().ok_or(ParseError::Expected(String::from("repeat")))?; | ||
108 | |||
109 | mk_repeat(rep.char, subtree, Some(sep)) | ||
110 | } | ||
111 | |||
98 | #[cfg(test)] | 112 | #[cfg(test)] |
99 | mod tests { | 113 | mod tests { |
100 | use ra_syntax::{ast, AstNode}; | 114 | use ra_syntax::{ast, AstNode}; |
@@ -109,7 +123,7 @@ mod tests { | |||
109 | is_valid("($i:ident) => ()"); | 123 | is_valid("($i:ident) => ()"); |
110 | expect_err("$i:ident => ()", "subtree"); | 124 | expect_err("$i:ident => ()", "subtree"); |
111 | expect_err("($i:ident) ()", "`=`"); | 125 | expect_err("($i:ident) ()", "`=`"); |
112 | expect_err("($($i:ident)_) => ()", "separator"); | 126 | expect_err("($($i:ident)_) => ()", "repeat"); |
113 | } | 127 | } |
114 | 128 | ||
115 | fn expect_err(macro_body: &str, expected: &str) { | 129 | fn expect_err(macro_body: &str, expected: &str) { |
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index 3229cfa8f..e979777fe 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs | |||
@@ -342,7 +342,7 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> { | |||
342 | } | 342 | } |
343 | } | 343 | } |
344 | 344 | ||
345 | struct TokenPeek<'a, I> | 345 | pub(crate) struct TokenPeek<'a, I> |
346 | where | 346 | where |
347 | I: Iterator<Item = &'a tt::TokenTree>, | 347 | I: Iterator<Item = &'a tt::TokenTree>, |
348 | { | 348 | { |
@@ -365,7 +365,7 @@ where | |||
365 | TokenPeek { iter: itertools::multipeek(iter) } | 365 | TokenPeek { iter: itertools::multipeek(iter) } |
366 | } | 366 | } |
367 | 367 | ||
368 | fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> { | 368 | pub fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> { |
369 | if p.spacing != tt::Spacing::Joint { | 369 | if p.spacing != tt::Spacing::Joint { |
370 | return None; | 370 | return None; |
371 | } | 371 | } |
@@ -375,7 +375,7 @@ where | |||
375 | Some(((p.char, p1.char), p1.spacing == tt::Spacing::Joint)) | 375 | Some(((p.char, p1.char), p1.spacing == tt::Spacing::Joint)) |
376 | } | 376 | } |
377 | 377 | ||
378 | fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> { | 378 | pub fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> { |
379 | self.current_punct2(p).and_then(|((p0, p1), last_joint)| { | 379 | self.current_punct2(p).and_then(|((p0, p1), last_joint)| { |
380 | if !last_joint { | 380 | if !last_joint { |
381 | None | 381 | None |
@@ -437,12 +437,16 @@ fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { | |||
437 | } | 437 | } |
438 | 438 | ||
439 | fn convert_literal(l: &tt::Literal) -> TtToken { | 439 | fn convert_literal(l: &tt::Literal) -> TtToken { |
440 | TtToken { | 440 | let kind = classify_literal(&l.text) |
441 | kind: classify_literal(&l.text).unwrap().kind, | 441 | .map(|tkn| tkn.kind) |
442 | is_joint_to_next: false, | 442 | .or_else(|| match l.text.as_ref() { |
443 | text: l.text.clone(), | 443 | "true" => Some(SyntaxKind::TRUE_KW), |
444 | n_tokens: 1, | 444 | "false" => Some(SyntaxKind::FALSE_KW), |
445 | } | 445 | _ => None, |
446 | }) | ||
447 | .unwrap(); | ||
448 | |||
449 | TtToken { kind, is_joint_to_next: false, text: l.text.clone(), n_tokens: 1 } | ||
446 | } | 450 | } |
447 | 451 | ||
448 | fn convert_ident(ident: &tt::Ident) -> TtToken { | 452 | fn convert_ident(ident: &tt::Ident) -> TtToken { |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 38a481029..9cca19dbb 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -133,7 +133,9 @@ fn convert_tt( | |||
133 | }; | 133 | }; |
134 | 134 | ||
135 | let mut token_trees = Vec::new(); | 135 | let mut token_trees = Vec::new(); |
136 | for child in tt.children_with_tokens().skip(skip_first as usize) { | 136 | let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable(); |
137 | |||
138 | while let Some(child) = child_iter.next() { | ||
137 | if (skip_first && (child == first_child || child == last_child)) || child.kind().is_trivia() | 139 | if (skip_first && (child == first_child || child == last_child)) || child.kind().is_trivia() |
138 | { | 140 | { |
139 | continue; | 141 | continue; |
@@ -152,12 +154,25 @@ fn convert_tt( | |||
152 | prev = Some(char) | 154 | prev = Some(char) |
153 | } | 155 | } |
154 | if let Some(char) = prev { | 156 | if let Some(char) = prev { |
155 | token_trees.push( | 157 | let spacing = match child_iter.peek() { |
156 | tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Alone }).into(), | 158 | Some(SyntaxElement::Token(token)) => { |
157 | ); | 159 | if token.kind().is_punct() { |
160 | tt::Spacing::Joint | ||
161 | } else { | ||
162 | tt::Spacing::Alone | ||
163 | } | ||
164 | } | ||
165 | _ => tt::Spacing::Alone, | ||
166 | }; | ||
167 | |||
168 | token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); | ||
158 | } | 169 | } |
159 | } else { | 170 | } else { |
160 | let child: tt::TokenTree = if token.kind().is_keyword() | 171 | let child: tt::TokenTree = if token.kind() == SyntaxKind::TRUE_KW |
172 | || token.kind() == SyntaxKind::FALSE_KW | ||
173 | { | ||
174 | tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() | ||
175 | } else if token.kind().is_keyword() | ||
161 | || token.kind() == IDENT | 176 | || token.kind() == IDENT |
162 | || token.kind() == LIFETIME | 177 | || token.kind() == LIFETIME |
163 | { | 178 | { |
diff --git a/crates/ra_mbe/src/tt_cursor.rs b/crates/ra_mbe/src/tt_cursor.rs index 87bcf8b0d..343416421 100644 --- a/crates/ra_mbe/src/tt_cursor.rs +++ b/crates/ra_mbe/src/tt_cursor.rs | |||
@@ -1,5 +1,7 @@ | |||
1 | use crate::ParseError; | 1 | use crate::ParseError; |
2 | use crate::subtree_parser::Parser; | 2 | use crate::subtree_parser::Parser; |
3 | use crate::subtree_source::TokenPeek; | ||
4 | use smallvec::{SmallVec, smallvec}; | ||
3 | 5 | ||
4 | #[derive(Clone)] | 6 | #[derive(Clone)] |
5 | pub(crate) struct TtCursor<'a> { | 7 | pub(crate) struct TtCursor<'a> { |
@@ -162,6 +164,95 @@ impl<'a> TtCursor<'a> { | |||
162 | } | 164 | } |
163 | } | 165 | } |
164 | 166 | ||
167 | fn eat_punct3(&mut self, p: &tt::Punct) -> Option<SmallVec<[tt::Punct; 3]>> { | ||
168 | let sec = self.eat_punct()?.clone(); | ||
169 | let third = self.eat_punct()?.clone(); | ||
170 | Some(smallvec![p.clone(), sec, third]) | ||
171 | } | ||
172 | |||
173 | fn eat_punct2(&mut self, p: &tt::Punct) -> Option<SmallVec<[tt::Punct; 3]>> { | ||
174 | let sec = self.eat_punct()?.clone(); | ||
175 | Some(smallvec![p.clone(), sec]) | ||
176 | } | ||
177 | |||
178 | fn eat_multi_char_punct<'b, I>( | ||
179 | &mut self, | ||
180 | p: &tt::Punct, | ||
181 | iter: &mut TokenPeek<'b, I>, | ||
182 | ) -> Option<SmallVec<[tt::Punct; 3]>> | ||
183 | where | ||
184 | I: Iterator<Item = &'b tt::TokenTree>, | ||
185 | { | ||
186 | if let Some((m, _)) = iter.current_punct3(p) { | ||
187 | if let r @ Some(_) = match m { | ||
188 | ('<', '<', '=') | ('>', '>', '=') | ('.', '.', '.') | ('.', '.', '=') => { | ||
189 | self.eat_punct3(p) | ||
190 | } | ||
191 | _ => None, | ||
192 | } { | ||
193 | return r; | ||
194 | } | ||
195 | } | ||
196 | |||
197 | if let Some((m, _)) = iter.current_punct2(p) { | ||
198 | if let r @ Some(_) = match m { | ||
199 | ('<', '=') | ||
200 | | ('>', '=') | ||
201 | | ('+', '=') | ||
202 | | ('-', '=') | ||
203 | | ('|', '=') | ||
204 | | ('&', '=') | ||
205 | | ('^', '=') | ||
206 | | ('/', '=') | ||
207 | | ('*', '=') | ||
208 | | ('%', '=') | ||
209 | | ('&', '&') | ||
210 | | ('|', '|') | ||
211 | | ('<', '<') | ||
212 | | ('>', '>') | ||
213 | | ('-', '>') | ||
214 | | ('!', '=') | ||
215 | | ('=', '>') | ||
216 | | ('=', '=') | ||
217 | | ('.', '.') | ||
218 | | (':', ':') => self.eat_punct2(p), | ||
219 | |||
220 | _ => None, | ||
221 | } { | ||
222 | return r; | ||
223 | } | ||
224 | } | ||
225 | |||
226 | None | ||
227 | } | ||
228 | |||
229 | pub(crate) fn eat_seperator(&mut self) -> Option<crate::Separator> { | ||
230 | match self.eat()? { | ||
231 | tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { | ||
232 | Some(crate::Separator::Literal(lit.clone())) | ||
233 | } | ||
234 | tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { | ||
235 | Some(crate::Separator::Ident(ident.clone())) | ||
236 | } | ||
237 | tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { | ||
238 | match punct.char { | ||
239 | '*' | '+' | '?' => return None, | ||
240 | _ => {} | ||
241 | }; | ||
242 | |||
243 | // FIXME: The parser is only handle some compositeable punct, | ||
244 | // But at this phase, some punct still is jointed. | ||
245 | // So we by pass that check here. | ||
246 | let mut peekable = TokenPeek::new(self.subtree.token_trees[self.pos..].iter()); | ||
247 | let puncts = self.eat_multi_char_punct(punct, &mut peekable); | ||
248 | let puncts = puncts.unwrap_or_else(|| smallvec![punct.clone()]); | ||
249 | |||
250 | Some(crate::Separator::Puncts(puncts)) | ||
251 | } | ||
252 | _ => None, | ||
253 | } | ||
254 | } | ||
255 | |||
165 | #[must_use] | 256 | #[must_use] |
166 | pub(crate) fn save(&self) -> TtCursorMemento { | 257 | pub(crate) fn save(&self) -> TtCursorMemento { |
167 | TtCursorMemento { pos: self.pos } | 258 | TtCursorMemento { pos: self.pos } |