diff options
Diffstat (limited to 'crates/parser')
-rw-r--r-- | crates/parser/src/grammar/expressions.rs | 2 | ||||
-rw-r--r-- | crates/parser/src/grammar/expressions/atom.rs | 12 | ||||
-rw-r--r-- | crates/parser/src/grammar/items.rs | 4 | ||||
-rw-r--r-- | crates/parser/src/grammar/paths.rs | 2 | ||||
-rw-r--r-- | crates/parser/src/grammar/patterns.rs | 17 | ||||
-rw-r--r-- | crates/parser/src/grammar/types.rs | 8 | ||||
-rw-r--r-- | crates/parser/src/token_set.rs | 20 |
7 files changed, 37 insertions, 28 deletions
diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index e72929f8c..5f885edfd 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs | |||
@@ -316,7 +316,7 @@ fn expr_bp(p: &mut Parser, mut r: Restrictions, bp: u8) -> (Option<CompletedMark | |||
316 | } | 316 | } |
317 | 317 | ||
318 | const LHS_FIRST: TokenSet = | 318 | const LHS_FIRST: TokenSet = |
319 | atom::ATOM_EXPR_FIRST.union(token_set![T![&], T![*], T![!], T![.], T![-]]); | 319 | atom::ATOM_EXPR_FIRST.union(TokenSet::new(&[T![&], T![*], T![!], T![.], T![-]])); |
320 | 320 | ||
321 | fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { | 321 | fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { |
322 | let m; | 322 | let m; |
diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index ba6dd2fbc..66a92a4e1 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs | |||
@@ -15,7 +15,7 @@ use super::*; | |||
15 | // let _ = b"e"; | 15 | // let _ = b"e"; |
16 | // let _ = br"f"; | 16 | // let _ = br"f"; |
17 | // } | 17 | // } |
18 | pub(crate) const LITERAL_FIRST: TokenSet = token_set![ | 18 | pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[ |
19 | TRUE_KW, | 19 | TRUE_KW, |
20 | FALSE_KW, | 20 | FALSE_KW, |
21 | INT_NUMBER, | 21 | INT_NUMBER, |
@@ -25,8 +25,8 @@ pub(crate) const LITERAL_FIRST: TokenSet = token_set![ | |||
25 | STRING, | 25 | STRING, |
26 | RAW_STRING, | 26 | RAW_STRING, |
27 | BYTE_STRING, | 27 | BYTE_STRING, |
28 | RAW_BYTE_STRING | 28 | RAW_BYTE_STRING, |
29 | ]; | 29 | ]); |
30 | 30 | ||
31 | pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> { | 31 | pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> { |
32 | if !p.at_ts(LITERAL_FIRST) { | 32 | if !p.at_ts(LITERAL_FIRST) { |
@@ -39,7 +39,7 @@ pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> { | |||
39 | 39 | ||
40 | // E.g. for after the break in `if break {}`, this should not match | 40 | // E.g. for after the break in `if break {}`, this should not match |
41 | pub(super) const ATOM_EXPR_FIRST: TokenSet = | 41 | pub(super) const ATOM_EXPR_FIRST: TokenSet = |
42 | LITERAL_FIRST.union(paths::PATH_FIRST).union(token_set![ | 42 | LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[ |
43 | T!['('], | 43 | T!['('], |
44 | T!['{'], | 44 | T!['{'], |
45 | T!['['], | 45 | T!['['], |
@@ -59,9 +59,9 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = | |||
59 | T![loop], | 59 | T![loop], |
60 | T![for], | 60 | T![for], |
61 | LIFETIME, | 61 | LIFETIME, |
62 | ]); | 62 | ])); |
63 | 63 | ||
64 | const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW, R_DOLLAR]; | 64 | const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[LET_KW, R_DOLLAR]); |
65 | 65 | ||
66 | pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { | 66 | pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { |
67 | if let Some(m) = literal(p) { | 67 | if let Some(m) = literal(p) { |
diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs index 8fd8f3b80..22810e6fb 100644 --- a/crates/parser/src/grammar/items.rs +++ b/crates/parser/src/grammar/items.rs | |||
@@ -26,7 +26,7 @@ pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) { | |||
26 | } | 26 | } |
27 | } | 27 | } |
28 | 28 | ||
29 | pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![ | 29 | pub(super) const ITEM_RECOVERY_SET: TokenSet = TokenSet::new(&[ |
30 | FN_KW, | 30 | FN_KW, |
31 | STRUCT_KW, | 31 | STRUCT_KW, |
32 | ENUM_KW, | 32 | ENUM_KW, |
@@ -41,7 +41,7 @@ pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![ | |||
41 | USE_KW, | 41 | USE_KW, |
42 | MACRO_KW, | 42 | MACRO_KW, |
43 | T![;], | 43 | T![;], |
44 | ]; | 44 | ]); |
45 | 45 | ||
46 | pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool) { | 46 | pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool) { |
47 | let m = p.start(); | 47 | let m = p.start(); |
diff --git a/crates/parser/src/grammar/paths.rs b/crates/parser/src/grammar/paths.rs index 52562afa4..5d297e2d6 100644 --- a/crates/parser/src/grammar/paths.rs +++ b/crates/parser/src/grammar/paths.rs | |||
@@ -3,7 +3,7 @@ | |||
3 | use super::*; | 3 | use super::*; |
4 | 4 | ||
5 | pub(super) const PATH_FIRST: TokenSet = | 5 | pub(super) const PATH_FIRST: TokenSet = |
6 | token_set![IDENT, T![self], T![super], T![crate], T![:], T![<]]; | 6 | TokenSet::new(&[IDENT, T![self], T![super], T![crate], T![:], T![<]]); |
7 | 7 | ||
8 | pub(super) fn is_path_start(p: &Parser) -> bool { | 8 | pub(super) fn is_path_start(p: &Parser) -> bool { |
9 | is_use_path_start(p) || p.at(T![<]) | 9 | is_use_path_start(p) || p.at(T![<]) |
diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs index 07b1d6dd5..796f206e1 100644 --- a/crates/parser/src/grammar/patterns.rs +++ b/crates/parser/src/grammar/patterns.rs | |||
@@ -2,9 +2,18 @@ | |||
2 | 2 | ||
3 | use super::*; | 3 | use super::*; |
4 | 4 | ||
5 | pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST | 5 | pub(super) const PATTERN_FIRST: TokenSet = |
6 | .union(paths::PATH_FIRST) | 6 | expressions::LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[ |
7 | .union(token_set![T![box], T![ref], T![mut], T!['('], T!['['], T![&], T![_], T![-], T![.]]); | 7 | T![box], |
8 | T![ref], | ||
9 | T![mut], | ||
10 | T!['('], | ||
11 | T!['['], | ||
12 | T![&], | ||
13 | T![_], | ||
14 | T![-], | ||
15 | T![.], | ||
16 | ])); | ||
8 | 17 | ||
9 | pub(crate) fn pattern(p: &mut Parser) { | 18 | pub(crate) fn pattern(p: &mut Parser) { |
10 | pattern_r(p, PAT_RECOVERY_SET); | 19 | pattern_r(p, PAT_RECOVERY_SET); |
@@ -74,7 +83,7 @@ fn pattern_single_r(p: &mut Parser, recovery_set: TokenSet) { | |||
74 | } | 83 | } |
75 | 84 | ||
76 | const PAT_RECOVERY_SET: TokenSet = | 85 | const PAT_RECOVERY_SET: TokenSet = |
77 | token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]; | 86 | TokenSet::new(&[LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]); |
78 | 87 | ||
79 | fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> { | 88 | fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> { |
80 | let m = match p.nth(0) { | 89 | let m = match p.nth(0) { |
diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs index 9d00eb9b9..1ea130ac5 100644 --- a/crates/parser/src/grammar/types.rs +++ b/crates/parser/src/grammar/types.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | 2 | ||
3 | use super::*; | 3 | use super::*; |
4 | 4 | ||
5 | pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![ | 5 | pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[ |
6 | T!['('], | 6 | T!['('], |
7 | T!['['], | 7 | T!['['], |
8 | T![<], | 8 | T![<], |
@@ -16,16 +16,16 @@ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![ | |||
16 | T![for], | 16 | T![for], |
17 | T![impl], | 17 | T![impl], |
18 | T![dyn], | 18 | T![dyn], |
19 | ]); | 19 | ])); |
20 | 20 | ||
21 | const TYPE_RECOVERY_SET: TokenSet = token_set![ | 21 | const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[ |
22 | T![')'], | 22 | T![')'], |
23 | T![,], | 23 | T![,], |
24 | L_DOLLAR, | 24 | L_DOLLAR, |
25 | // test_err struct_field_recover | 25 | // test_err struct_field_recover |
26 | // struct S { f pub g: () } | 26 | // struct S { f pub g: () } |
27 | T![pub], | 27 | T![pub], |
28 | ]; | 28 | ]); |
29 | 29 | ||
30 | pub(crate) fn type_(p: &mut Parser) { | 30 | pub(crate) fn type_(p: &mut Parser) { |
31 | type_with_bounds_cond(p, true); | 31 | type_with_bounds_cond(p, true); |
diff --git a/crates/parser/src/token_set.rs b/crates/parser/src/token_set.rs index 994017acf..a68f0144e 100644 --- a/crates/parser/src/token_set.rs +++ b/crates/parser/src/token_set.rs | |||
@@ -9,15 +9,21 @@ pub(crate) struct TokenSet(u128); | |||
9 | impl TokenSet { | 9 | impl TokenSet { |
10 | pub(crate) const EMPTY: TokenSet = TokenSet(0); | 10 | pub(crate) const EMPTY: TokenSet = TokenSet(0); |
11 | 11 | ||
12 | pub(crate) const fn singleton(kind: SyntaxKind) -> TokenSet { | 12 | pub(crate) const fn new(kinds: &[SyntaxKind]) -> TokenSet { |
13 | TokenSet(mask(kind)) | 13 | let mut res = 0u128; |
14 | let mut i = 0; | ||
15 | while i < kinds.len() { | ||
16 | res |= mask(kinds[i]); | ||
17 | i += 1 | ||
18 | } | ||
19 | TokenSet(res) | ||
14 | } | 20 | } |
15 | 21 | ||
16 | pub(crate) const fn union(self, other: TokenSet) -> TokenSet { | 22 | pub(crate) const fn union(self, other: TokenSet) -> TokenSet { |
17 | TokenSet(self.0 | other.0) | 23 | TokenSet(self.0 | other.0) |
18 | } | 24 | } |
19 | 25 | ||
20 | pub(crate) fn contains(&self, kind: SyntaxKind) -> bool { | 26 | pub(crate) const fn contains(&self, kind: SyntaxKind) -> bool { |
21 | self.0 & mask(kind) != 0 | 27 | self.0 & mask(kind) != 0 |
22 | } | 28 | } |
23 | } | 29 | } |
@@ -26,16 +32,10 @@ const fn mask(kind: SyntaxKind) -> u128 { | |||
26 | 1u128 << (kind as usize) | 32 | 1u128 << (kind as usize) |
27 | } | 33 | } |
28 | 34 | ||
29 | #[macro_export] | ||
30 | macro_rules! token_set { | ||
31 | ($($t:expr),*) => { TokenSet::EMPTY$(.union(TokenSet::singleton($t)))* }; | ||
32 | ($($t:expr),* ,) => { token_set!($($t),*) }; | ||
33 | } | ||
34 | |||
35 | #[test] | 35 | #[test] |
36 | fn token_set_works_for_tokens() { | 36 | fn token_set_works_for_tokens() { |
37 | use crate::SyntaxKind::*; | 37 | use crate::SyntaxKind::*; |
38 | let ts = token_set![EOF, SHEBANG]; | 38 | let ts = TokenSet::new(&[EOF, SHEBANG]); |
39 | assert!(ts.contains(EOF)); | 39 | assert!(ts.contains(EOF)); |
40 | assert!(ts.contains(SHEBANG)); | 40 | assert!(ts.contains(SHEBANG)); |
41 | assert!(!ts.contains(PLUS)); | 41 | assert!(!ts.contains(PLUS)); |