From 4b989009e3839cfc6f021d1552a46561cee6cde2 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Thu, 27 Aug 2020 18:11:33 +0200 Subject: CONST LOOPS ARE HERE --- crates/parser/src/grammar/expressions.rs | 2 +- crates/parser/src/grammar/expressions/atom.rs | 12 ++++++------ crates/parser/src/grammar/items.rs | 4 ++-- crates/parser/src/grammar/paths.rs | 2 +- crates/parser/src/grammar/patterns.rs | 17 +++++++++++++---- crates/parser/src/grammar/types.rs | 8 ++++---- crates/parser/src/token_set.rs | 20 ++++++++++---------- xtask/src/install.rs | 2 +- 8 files changed, 38 insertions(+), 29 deletions(-) diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index e72929f8c..5f885edfd 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -316,7 +316,7 @@ fn expr_bp(p: &mut Parser, mut r: Restrictions, bp: u8) -> (Option Option<(CompletedMarker, BlockLike)> { let m; diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index ba6dd2fbc..66a92a4e1 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs @@ -15,7 +15,7 @@ use super::*; // let _ = b"e"; // let _ = br"f"; // } -pub(crate) const LITERAL_FIRST: TokenSet = token_set![ +pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[ TRUE_KW, FALSE_KW, INT_NUMBER, @@ -25,8 +25,8 @@ pub(crate) const LITERAL_FIRST: TokenSet = token_set![ STRING, RAW_STRING, BYTE_STRING, - RAW_BYTE_STRING -]; + RAW_BYTE_STRING, +]); pub(crate) fn literal(p: &mut Parser) -> Option { if !p.at_ts(LITERAL_FIRST) { @@ -39,7 +39,7 @@ pub(crate) fn literal(p: &mut Parser) -> Option { // E.g. for after the break in `if break {}`, this should not match pub(super) const ATOM_EXPR_FIRST: TokenSet = - LITERAL_FIRST.union(paths::PATH_FIRST).union(token_set![ + LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[ T!['('], T!['{'], T!['['], @@ -59,9 +59,9 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = T![loop], T![for], LIFETIME, - ]); + ])); -const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW, R_DOLLAR]; +const EXPR_RECOVERY_SET: TokenSet = TokenSet::new(&[LET_KW, R_DOLLAR]); pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { if let Some(m) = literal(p) { diff --git a/crates/parser/src/grammar/items.rs b/crates/parser/src/grammar/items.rs index 8fd8f3b80..22810e6fb 100644 --- a/crates/parser/src/grammar/items.rs +++ b/crates/parser/src/grammar/items.rs @@ -26,7 +26,7 @@ pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) { } } -pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![ +pub(super) const ITEM_RECOVERY_SET: TokenSet = TokenSet::new(&[ FN_KW, STRUCT_KW, ENUM_KW, @@ -41,7 +41,7 @@ pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![ USE_KW, MACRO_KW, T![;], -]; +]); pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool) { let m = p.start(); diff --git a/crates/parser/src/grammar/paths.rs b/crates/parser/src/grammar/paths.rs index 52562afa4..5d297e2d6 100644 --- a/crates/parser/src/grammar/paths.rs +++ b/crates/parser/src/grammar/paths.rs @@ -3,7 +3,7 @@ use super::*; pub(super) const PATH_FIRST: TokenSet = - token_set![IDENT, T![self], T![super], T![crate], T![:], T![<]]; + TokenSet::new(&[IDENT, T![self], T![super], T![crate], T![:], T![<]]); pub(super) fn is_path_start(p: &Parser) -> bool { is_use_path_start(p) || p.at(T![<]) diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs index 07b1d6dd5..796f206e1 100644 --- a/crates/parser/src/grammar/patterns.rs +++ b/crates/parser/src/grammar/patterns.rs @@ -2,9 +2,18 @@ use super::*; -pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST - .union(paths::PATH_FIRST) - .union(token_set![T![box], T![ref], T![mut], T!['('], T!['['], T![&], T![_], T![-], T![.]]); +pub(super) const PATTERN_FIRST: TokenSet = + expressions::LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[ + T![box], + T![ref], + T![mut], + T!['('], + T!['['], + T![&], + T![_], + T![-], + T![.], + ])); pub(crate) fn pattern(p: &mut Parser) { pattern_r(p, PAT_RECOVERY_SET); @@ -74,7 +83,7 @@ fn pattern_single_r(p: &mut Parser, recovery_set: TokenSet) { } const PAT_RECOVERY_SET: TokenSet = - token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]; + TokenSet::new(&[LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]); fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option { let m = match p.nth(0) { diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs index 9d00eb9b9..1ea130ac5 100644 --- a/crates/parser/src/grammar/types.rs +++ b/crates/parser/src/grammar/types.rs @@ -2,7 +2,7 @@ use super::*; -pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![ +pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(TokenSet::new(&[ T!['('], T!['['], T![<], @@ -16,16 +16,16 @@ pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![ T![for], T![impl], T![dyn], -]); +])); -const TYPE_RECOVERY_SET: TokenSet = token_set![ +const TYPE_RECOVERY_SET: TokenSet = TokenSet::new(&[ T![')'], T![,], L_DOLLAR, // test_err struct_field_recover // struct S { f pub g: () } T![pub], -]; +]); pub(crate) fn type_(p: &mut Parser) { type_with_bounds_cond(p, true); diff --git a/crates/parser/src/token_set.rs b/crates/parser/src/token_set.rs index 994017acf..a68f0144e 100644 --- a/crates/parser/src/token_set.rs +++ b/crates/parser/src/token_set.rs @@ -9,15 +9,21 @@ pub(crate) struct TokenSet(u128); impl TokenSet { pub(crate) const EMPTY: TokenSet = TokenSet(0); - pub(crate) const fn singleton(kind: SyntaxKind) -> TokenSet { - TokenSet(mask(kind)) + pub(crate) const fn new(kinds: &[SyntaxKind]) -> TokenSet { + let mut res = 0u128; + let mut i = 0; + while i < kinds.len() { + res |= mask(kinds[i]); + i += 1 + } + TokenSet(res) } pub(crate) const fn union(self, other: TokenSet) -> TokenSet { TokenSet(self.0 | other.0) } - pub(crate) fn contains(&self, kind: SyntaxKind) -> bool { + pub(crate) const fn contains(&self, kind: SyntaxKind) -> bool { self.0 & mask(kind) != 0 } } @@ -26,16 +32,10 @@ const fn mask(kind: SyntaxKind) -> u128 { 1u128 << (kind as usize) } -#[macro_export] -macro_rules! token_set { - ($($t:expr),*) => { TokenSet::EMPTY$(.union(TokenSet::singleton($t)))* }; - ($($t:expr),* ,) => { token_set!($($t),*) }; -} - #[test] fn token_set_works_for_tokens() { use crate::SyntaxKind::*; - let ts = token_set![EOF, SHEBANG]; + let ts = TokenSet::new(&[EOF, SHEBANG]); assert!(ts.contains(EOF)); assert!(ts.contains(SHEBANG)); assert!(!ts.contains(PLUS)); diff --git a/xtask/src/install.rs b/xtask/src/install.rs index b25a6e301..d829790d7 100644 --- a/xtask/src/install.rs +++ b/xtask/src/install.rs @@ -7,7 +7,7 @@ use anyhow::{bail, format_err, Context, Result}; use crate::not_bash::{pushd, run}; // Latest stable, feel free to send a PR if this lags behind. -const REQUIRED_RUST_VERSION: u32 = 43; +const REQUIRED_RUST_VERSION: u32 = 46; pub struct InstallCmd { pub client: Option, -- cgit v1.2.3