diff options
author | Aleksey Kladov <[email protected]> | 2019-01-18 08:02:30 +0000 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2019-01-19 12:36:58 +0000 |
commit | b82fe73d1ab9727ff650382d9c86a231b06245be (patch) | |
tree | e919865c68f78492bab5baa574f1a35a094b0339 /crates/ra_syntax | |
parent | b028472481df108537b60104314081b65bf51147 (diff) |
make token set a const-fn
Diffstat (limited to 'crates/ra_syntax')
-rw-r--r-- | crates/ra_syntax/src/grammar.rs | 2 | ||||
-rw-r--r-- | crates/ra_syntax/src/grammar/expressions.rs | 6 | ||||
-rw-r--r-- | crates/ra_syntax/src/grammar/expressions/atom.rs | 45 | ||||
-rw-r--r-- | crates/ra_syntax/src/grammar/params.rs | 3 | ||||
-rw-r--r-- | crates/ra_syntax/src/grammar/patterns.rs | 10 | ||||
-rw-r--r-- | crates/ra_syntax/src/grammar/types.rs | 11 | ||||
-rw-r--r-- | crates/ra_syntax/src/parser_api.rs | 2 | ||||
-rw-r--r-- | crates/ra_syntax/src/token_set.rs | 30 |
8 files changed, 52 insertions, 57 deletions
diff --git a/crates/ra_syntax/src/grammar.rs b/crates/ra_syntax/src/grammar.rs index 06a37d648..060c0ccdf 100644 --- a/crates/ra_syntax/src/grammar.rs +++ b/crates/ra_syntax/src/grammar.rs | |||
@@ -150,7 +150,7 @@ fn name_r(p: &mut Parser, recovery: TokenSet) { | |||
150 | } | 150 | } |
151 | 151 | ||
152 | fn name(p: &mut Parser) { | 152 | fn name(p: &mut Parser) { |
153 | name_r(p, TokenSet::EMPTY) | 153 | name_r(p, TokenSet::empty()) |
154 | } | 154 | } |
155 | 155 | ||
156 | fn name_ref(p: &mut Parser) { | 156 | fn name_ref(p: &mut Parser) { |
diff --git a/crates/ra_syntax/src/grammar/expressions.rs b/crates/ra_syntax/src/grammar/expressions.rs index 2d1f17491..2236555e0 100644 --- a/crates/ra_syntax/src/grammar/expressions.rs +++ b/crates/ra_syntax/src/grammar/expressions.rs | |||
@@ -211,10 +211,8 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> BlockLike { | |||
211 | BlockLike::NotBlock | 211 | BlockLike::NotBlock |
212 | } | 212 | } |
213 | 213 | ||
214 | const LHS_FIRST: TokenSet = token_set_union![ | 214 | const LHS_FIRST: TokenSet = |
215 | token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS], | 215 | atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS]); |
216 | atom::ATOM_EXPR_FIRST, | ||
217 | ]; | ||
218 | 216 | ||
219 | fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { | 217 | fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { |
220 | let m; | 218 | let m; |
diff --git a/crates/ra_syntax/src/grammar/expressions/atom.rs b/crates/ra_syntax/src/grammar/expressions/atom.rs index 31b09ac5b..167a76551 100644 --- a/crates/ra_syntax/src/grammar/expressions/atom.rs +++ b/crates/ra_syntax/src/grammar/expressions/atom.rs | |||
@@ -36,29 +36,26 @@ pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> { | |||
36 | } | 36 | } |
37 | 37 | ||
38 | // E.g. for after the break in `if break {}`, this should not match | 38 | // E.g. for after the break in `if break {}`, this should not match |
39 | pub(super) const ATOM_EXPR_FIRST: TokenSet = token_set_union![ | 39 | pub(super) const ATOM_EXPR_FIRST: TokenSet = LITERAL_FIRST.union(token_set![ |
40 | LITERAL_FIRST, | 40 | L_PAREN, |
41 | token_set![ | 41 | L_CURLY, |
42 | L_PAREN, | 42 | L_BRACK, |
43 | L_CURLY, | 43 | PIPE, |
44 | L_BRACK, | 44 | MOVE_KW, |
45 | PIPE, | 45 | IF_KW, |
46 | MOVE_KW, | 46 | WHILE_KW, |
47 | IF_KW, | 47 | MATCH_KW, |
48 | WHILE_KW, | 48 | UNSAFE_KW, |
49 | MATCH_KW, | 49 | RETURN_KW, |
50 | UNSAFE_KW, | 50 | IDENT, |
51 | RETURN_KW, | 51 | SELF_KW, |
52 | IDENT, | 52 | SUPER_KW, |
53 | SELF_KW, | 53 | CRATE_KW, |
54 | SUPER_KW, | 54 | COLONCOLON, |
55 | CRATE_KW, | 55 | BREAK_KW, |
56 | COLONCOLON, | 56 | CONTINUE_KW, |
57 | BREAK_KW, | 57 | LIFETIME, |
58 | CONTINUE_KW, | 58 | ]); |
59 | LIFETIME | ||
60 | ], | ||
61 | ]; | ||
62 | 59 | ||
63 | const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW]; | 60 | const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW]; |
64 | 61 | ||
@@ -363,7 +360,7 @@ pub(crate) fn match_arm_list(p: &mut Parser) { | |||
363 | fn match_arm(p: &mut Parser) -> BlockLike { | 360 | fn match_arm(p: &mut Parser) -> BlockLike { |
364 | let m = p.start(); | 361 | let m = p.start(); |
365 | p.eat(PIPE); | 362 | p.eat(PIPE); |
366 | patterns::pattern_r(p, TokenSet::EMPTY); | 363 | patterns::pattern_r(p, TokenSet::empty()); |
367 | while p.eat(PIPE) { | 364 | while p.eat(PIPE) { |
368 | patterns::pattern(p); | 365 | patterns::pattern(p); |
369 | } | 366 | } |
diff --git a/crates/ra_syntax/src/grammar/params.rs b/crates/ra_syntax/src/grammar/params.rs index b71a72ca3..658fc5820 100644 --- a/crates/ra_syntax/src/grammar/params.rs +++ b/crates/ra_syntax/src/grammar/params.rs | |||
@@ -61,8 +61,7 @@ fn list_(p: &mut Parser, flavor: Flavor) { | |||
61 | m.complete(p, PARAM_LIST); | 61 | m.complete(p, PARAM_LIST); |
62 | } | 62 | } |
63 | 63 | ||
64 | const VALUE_PARAMETER_FIRST: TokenSet = | 64 | const VALUE_PARAMETER_FIRST: TokenSet = patterns::PATTERN_FIRST.union(types::TYPE_FIRST); |
65 | token_set_union![patterns::PATTERN_FIRST, types::TYPE_FIRST,]; | ||
66 | 65 | ||
67 | fn value_parameter(p: &mut Parser, flavor: Flavor) { | 66 | fn value_parameter(p: &mut Parser, flavor: Flavor) { |
68 | let m = p.start(); | 67 | let m = p.start(); |
diff --git a/crates/ra_syntax/src/grammar/patterns.rs b/crates/ra_syntax/src/grammar/patterns.rs index 692ffbb8c..7820c4e02 100644 --- a/crates/ra_syntax/src/grammar/patterns.rs +++ b/crates/ra_syntax/src/grammar/patterns.rs | |||
@@ -1,10 +1,10 @@ | |||
1 | use super::*; | 1 | use super::*; |
2 | 2 | ||
3 | pub(super) const PATTERN_FIRST: TokenSet = token_set_union![ | 3 | pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST |
4 | token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE], | 4 | .union(paths::PATH_FIRST) |
5 | expressions::LITERAL_FIRST, | 5 | .union(token_set![ |
6 | paths::PATH_FIRST, | 6 | REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE |
7 | ]; | 7 | ]); |
8 | 8 | ||
9 | pub(super) fn pattern(p: &mut Parser) { | 9 | pub(super) fn pattern(p: &mut Parser) { |
10 | pattern_r(p, PAT_RECOVERY_SET) | 10 | pattern_r(p, PAT_RECOVERY_SET) |
diff --git a/crates/ra_syntax/src/grammar/types.rs b/crates/ra_syntax/src/grammar/types.rs index a933b986b..21d89d83b 100644 --- a/crates/ra_syntax/src/grammar/types.rs +++ b/crates/ra_syntax/src/grammar/types.rs | |||
@@ -1,12 +1,9 @@ | |||
1 | use super::*; | 1 | use super::*; |
2 | 2 | ||
3 | pub(super) const TYPE_FIRST: TokenSet = token_set_union![ | 3 | pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![ |
4 | token_set![ | 4 | L_PAREN, EXCL, STAR, L_BRACK, AMP, UNDERSCORE, FN_KW, UNSAFE_KW, EXTERN_KW, FOR_KW, IMPL_KW, |
5 | L_PAREN, EXCL, STAR, L_BRACK, AMP, UNDERSCORE, FN_KW, UNSAFE_KW, EXTERN_KW, FOR_KW, | 5 | DYN_KW, L_ANGLE, |
6 | IMPL_KW, DYN_KW, L_ANGLE, | 6 | ]); |
7 | ], | ||
8 | paths::PATH_FIRST, | ||
9 | ]; | ||
10 | 7 | ||
11 | const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA]; | 8 | const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA]; |
12 | 9 | ||
diff --git a/crates/ra_syntax/src/parser_api.rs b/crates/ra_syntax/src/parser_api.rs index 3487aef85..3148371c5 100644 --- a/crates/ra_syntax/src/parser_api.rs +++ b/crates/ra_syntax/src/parser_api.rs | |||
@@ -112,7 +112,7 @@ impl<'t> Parser<'t> { | |||
112 | 112 | ||
113 | /// Create an error node and consume the next token. | 113 | /// Create an error node and consume the next token. |
114 | pub(crate) fn err_and_bump(&mut self, message: &str) { | 114 | pub(crate) fn err_and_bump(&mut self, message: &str) { |
115 | self.err_recover(message, TokenSet::EMPTY); | 115 | self.err_recover(message, TokenSet::empty()); |
116 | } | 116 | } |
117 | 117 | ||
118 | /// Create an error node and consume the next token. | 118 | /// Create an error node and consume the next token. |
diff --git a/crates/ra_syntax/src/token_set.rs b/crates/ra_syntax/src/token_set.rs index d407dfa48..b3fe633e0 100644 --- a/crates/ra_syntax/src/token_set.rs +++ b/crates/ra_syntax/src/token_set.rs | |||
@@ -1,30 +1,34 @@ | |||
1 | use crate::SyntaxKind; | 1 | use crate::SyntaxKind; |
2 | 2 | ||
3 | #[derive(Clone, Copy)] | 3 | #[derive(Clone, Copy)] |
4 | pub(crate) struct TokenSet(pub(crate) u128); | 4 | pub(crate) struct TokenSet(u128); |
5 | |||
6 | fn mask(kind: SyntaxKind) -> u128 { | ||
7 | 1u128 << (kind as usize) | ||
8 | } | ||
9 | 5 | ||
10 | impl TokenSet { | 6 | impl TokenSet { |
11 | pub const EMPTY: TokenSet = TokenSet(0); | 7 | pub const fn empty() -> TokenSet { |
8 | TokenSet(0) | ||
9 | } | ||
10 | |||
11 | pub const fn singleton(kind: SyntaxKind) -> TokenSet { | ||
12 | TokenSet(mask(kind)) | ||
13 | } | ||
14 | |||
15 | pub const fn union(self, other: TokenSet) -> TokenSet { | ||
16 | TokenSet(self.0 | other.0) | ||
17 | } | ||
12 | 18 | ||
13 | pub fn contains(&self, kind: SyntaxKind) -> bool { | 19 | pub fn contains(&self, kind: SyntaxKind) -> bool { |
14 | self.0 & mask(kind) != 0 | 20 | self.0 & mask(kind) != 0 |
15 | } | 21 | } |
16 | } | 22 | } |
17 | 23 | ||
18 | #[macro_export] | 24 | const fn mask(kind: SyntaxKind) -> u128 { |
19 | macro_rules! token_set { | 25 | 1u128 << (kind as usize) |
20 | ($($t:ident),*) => { TokenSet($(1u128 << ($t as usize))|*) }; | ||
21 | ($($t:ident),* ,) => { token_set!($($t),*) }; | ||
22 | } | 26 | } |
23 | 27 | ||
24 | #[macro_export] | 28 | #[macro_export] |
25 | macro_rules! token_set_union { | 29 | macro_rules! token_set { |
26 | ($($ts:expr),*) => { TokenSet($($ts.0)|*) }; | 30 | ($($t:ident),*) => { TokenSet::empty()$(.union(TokenSet::singleton($t)))* }; |
27 | ($($ts:expr),* ,) => { token_set_union!($($ts),*) }; | 31 | ($($t:ident),* ,) => { token_set!($($t),*) }; |
28 | } | 32 | } |
29 | 33 | ||
30 | #[test] | 34 | #[test] |