From d77175ce28da45960a89811f273b6c614d7a9413 Mon Sep 17 00:00:00 2001 From: Sergey Parilin Date: Wed, 15 May 2019 12:34:48 +0300 Subject: fixed macro for brackets --- crates/ra_parser/src/syntax_kind/generated.rs | 4 ++-- crates/ra_parser/src/syntax_kind/generated.rs.tera | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'crates') diff --git a/crates/ra_parser/src/syntax_kind/generated.rs b/crates/ra_parser/src/syntax_kind/generated.rs index 1a08cc6eb..d7926bd91 100644 --- a/crates/ra_parser/src/syntax_kind/generated.rs +++ b/crates/ra_parser/src/syntax_kind/generated.rs @@ -245,8 +245,8 @@ use self::SyntaxKind::*; macro_rules! T { (;) => { $crate::SyntaxKind::SEMI }; (,) => { $crate::SyntaxKind::COMMA }; - (() => { $crate::SyntaxKind::L_PAREN }; - ()) => { $crate::SyntaxKind::R_PAREN }; + ('(') => { $crate::SyntaxKind::L_PAREN }; + (')') => { $crate::SyntaxKind::R_PAREN }; ('{') => { $crate::SyntaxKind::L_CURLY }; ('}') => { $crate::SyntaxKind::R_CURLY }; ('[') => { $crate::SyntaxKind::L_BRACK }; diff --git a/crates/ra_parser/src/syntax_kind/generated.rs.tera b/crates/ra_parser/src/syntax_kind/generated.rs.tera index ccb8ca4ba..f5abbec4b 100644 --- a/crates/ra_parser/src/syntax_kind/generated.rs.tera +++ b/crates/ra_parser/src/syntax_kind/generated.rs.tera @@ -36,7 +36,7 @@ use self::SyntaxKind::*; #[macro_export] macro_rules! T { {%- for t in concat(a=single_byte_tokens, b=multi_byte_tokens) %} - {%- if t.0 == '{' or t.0 == '}' or t.0 == '[' or t.0 == ']' %} + {%- if t.0 == '{' or t.0 == '}' or t.0 == '[' or t.0 == ']' or t.0 == '(' or t.0 == ')' %} ('{{t.0}}') => { $crate::SyntaxKind::{{t.1}} }; {%- else %} ({{t.0}}) => { $crate::SyntaxKind::{{t.1}} }; -- cgit v1.2.3 From 993abedd77cf23ce2281b6c8e60cab49ab4fa97e Mon Sep 17 00:00:00 2001 From: Sergey Parilin Date: Wed, 15 May 2019 15:35:47 +0300 Subject: apply T! macro where it is possible --- crates/ra_assists/src/add_explicit_type.rs | 4 +- crates/ra_assists/src/ast_editor.rs | 14 +- crates/ra_assists/src/auto_import.rs | 7 +- crates/ra_assists/src/change_visibility.rs | 5 +- crates/ra_assists/src/flip_comma.rs | 4 +- crates/ra_assists/src/remove_dbg.rs | 8 +- crates/ra_assists/src/split_import.rs | 5 +- crates/ra_fmt/src/lib.rs | 14 +- crates/ra_ide_api/src/diagnostics.rs | 6 +- crates/ra_ide_api/src/extend_selection.rs | 2 +- crates/ra_ide_api/src/join_lines.rs | 9 +- crates/ra_ide_api/src/matching_brace.rs | 5 +- crates/ra_ide_api/src/syntax_highlighting.rs | 4 +- crates/ra_mbe/src/subtree_source.rs | 22 ++-- crates/ra_mbe/src/syntax_bridge.rs | 45 +++---- crates/ra_parser/src/event.rs | 2 +- crates/ra_parser/src/grammar.rs | 42 +++--- crates/ra_parser/src/grammar/attributes.rs | 10 +- crates/ra_parser/src/grammar/expressions.rs | 156 +++++++++++------------ crates/ra_parser/src/grammar/expressions/atom.rs | 144 ++++++++++----------- crates/ra_parser/src/grammar/items.rs | 114 ++++++++--------- crates/ra_parser/src/grammar/items/consts.rs | 10 +- crates/ra_parser/src/grammar/items/nominal.rs | 66 +++++----- crates/ra_parser/src/grammar/items/traits.rs | 38 +++--- crates/ra_parser/src/grammar/items/use_item.rs | 30 ++--- crates/ra_parser/src/grammar/params.rs | 38 +++--- crates/ra_parser/src/grammar/paths.rs | 18 +-- crates/ra_parser/src/grammar/patterns.rs | 78 ++++++------ crates/ra_parser/src/grammar/type_args.rs | 14 +- crates/ra_parser/src/grammar/type_params.rs | 46 +++---- crates/ra_parser/src/grammar/types.rs | 84 ++++++------ crates/ra_parser/src/parser.rs | 25 ++-- crates/ra_syntax/src/ast/expr_extensions.rs | 77 +++++------ crates/ra_syntax/src/ast/extensions.rs | 44 +++---- crates/ra_syntax/src/lib.rs | 9 +- crates/ra_syntax/src/parsing/lexer.rs | 25 ++-- crates/ra_syntax/src/parsing/reparsing.rs | 11 +- crates/ra_syntax/src/validation.rs | 7 +- 38 files changed, 619 insertions(+), 623 deletions(-) (limited to 'crates') diff --git a/crates/ra_assists/src/add_explicit_type.rs b/crates/ra_assists/src/add_explicit_type.rs index cb0ac9885..f3ed74b7f 100644 --- a/crates/ra_assists/src/add_explicit_type.rs +++ b/crates/ra_assists/src/add_explicit_type.rs @@ -3,7 +3,7 @@ use hir::{ db::HirDatabase, }; use ra_syntax::{ - SyntaxKind, + T, ast::{LetStmt, PatKind, NameOwner, AstNode} }; @@ -24,7 +24,7 @@ pub(crate) fn add_explicit_type(mut ctx: AssistCtx) -> Option< let name = pat.name()?; let name_range = name.syntax().range(); // Assist not applicable if the type has already been specified - if stmt.syntax().children_with_tokens().any(|child| child.kind() == SyntaxKind::COLON) { + if stmt.syntax().children_with_tokens().any(|child| child.kind() == T![:]) { return None; } // Infer type diff --git a/crates/ra_assists/src/ast_editor.rs b/crates/ra_assists/src/ast_editor.rs index aa7aeaabb..9afcac01a 100644 --- a/crates/ra_assists/src/ast_editor.rs +++ b/crates/ra_assists/src/ast_editor.rs @@ -2,7 +2,7 @@ use std::{iter, ops::RangeInclusive}; use arrayvec::ArrayVec; use ra_text_edit::TextEditBuilder; -use ra_syntax::{AstNode, TreeArc, ast, SyntaxKind::*, SyntaxElement, SourceFile, InsertPosition, Direction}; +use ra_syntax::{AstNode, TreeArc, ast, SyntaxKind::*, SyntaxElement, SourceFile, InsertPosition, Direction, T}; use ra_fmt::leading_indent; use hir::Name; @@ -49,7 +49,7 @@ impl AstEditor { fn do_make_multiline(&mut self) { let l_curly = - match self.ast().syntax().children_with_tokens().find(|it| it.kind() == L_CURLY) { + match self.ast().syntax().children_with_tokens().find(|it| it.kind() == T!['{']) { Some(it) => it, None => return, }; @@ -124,7 +124,7 @@ impl AstEditor { if let Some(comma) = $anchor .syntax() .siblings_with_tokens(Direction::Next) - .find(|it| it.kind() == COMMA) + .find(|it| it.kind() == T![,]) { InsertPosition::After(comma) } else { @@ -154,7 +154,7 @@ impl AstEditor { } fn l_curly(&self) -> Option { - self.ast().syntax().children_with_tokens().find(|it| it.kind() == L_CURLY) + self.ast().syntax().children_with_tokens().find(|it| it.kind() == T!['{']) } } @@ -188,7 +188,7 @@ impl AstEditor { } fn l_curly(&self) -> Option { - self.ast().syntax().children_with_tokens().find(|it| it.kind() == L_CURLY) + self.ast().syntax().children_with_tokens().find(|it| it.kind() == T!['{']) } } @@ -290,7 +290,7 @@ fn ast_node_from_file_text(text: &str) -> TreeArc { mod tokens { use once_cell::sync::Lazy; - use ra_syntax::{AstNode, SourceFile, TreeArc, SyntaxToken, SyntaxKind::*}; + use ra_syntax::{AstNode, SourceFile, TreeArc, SyntaxToken, SyntaxKind::*, T}; static SOURCE_FILE: Lazy> = Lazy::new(|| SourceFile::parse(",\n; ;")); @@ -299,7 +299,7 @@ mod tokens { .syntax() .descendants_with_tokens() .filter_map(|it| it.as_token()) - .find(|it| it.kind() == COMMA) + .find(|it| it.kind() == T![,]) .unwrap() } diff --git a/crates/ra_assists/src/auto_import.rs b/crates/ra_assists/src/auto_import.rs index 7c856c19b..1566cf179 100644 --- a/crates/ra_assists/src/auto_import.rs +++ b/crates/ra_assists/src/auto_import.rs @@ -2,8 +2,9 @@ use ra_text_edit::TextEditBuilder; use hir::{ self, db::HirDatabase}; use ra_syntax::{ + T, ast::{ self, NameOwner }, AstNode, SyntaxNode, Direction, TextRange, SmolStr, - SyntaxKind::{ PATH, PATH_SEGMENT, COLONCOLON, COMMA } + SyntaxKind::{ PATH, PATH_SEGMENT } }; use crate::{ AssistId, @@ -23,7 +24,7 @@ fn collect_path_segments_raw<'a>( children.next().map(|n| (n, n.kind())), ); match (first, second, third) { - (Some((subpath, PATH)), Some((_, COLONCOLON)), Some((segment, PATH_SEGMENT))) => { + (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => { path = ast::Path::cast(subpath.as_node()?)?; segments.push(ast::PathSegment::cast(segment.as_node()?)?); } @@ -421,7 +422,7 @@ fn make_assist_add_in_tree_list( let last = tree_list.use_trees().last(); if let Some(last) = last { let mut buf = String::new(); - let comma = last.syntax().siblings(Direction::Next).find(|n| n.kind() == COMMA); + let comma = last.syntax().siblings(Direction::Next).find(|n| n.kind() == T![,]); let offset = if let Some(comma) = comma { comma.range().end() } else { diff --git a/crates/ra_assists/src/change_visibility.rs b/crates/ra_assists/src/change_visibility.rs index c63470726..620f534b5 100644 --- a/crates/ra_assists/src/change_visibility.rs +++ b/crates/ra_assists/src/change_visibility.rs @@ -1,8 +1,9 @@ use hir::db::HirDatabase; use ra_syntax::{ + T, AstNode, SyntaxNode, TextUnit, ast::{self, VisibilityOwner, NameOwner}, - SyntaxKind::{VISIBILITY, FN_KW, MOD_KW, STRUCT_KW, ENUM_KW, TRAIT_KW, FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF, IDENT, WHITESPACE, COMMENT, ATTR}, + SyntaxKind::{VISIBILITY, FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF, IDENT, WHITESPACE, COMMENT, ATTR}, }; use crate::{AssistCtx, Assist, AssistId}; @@ -16,7 +17,7 @@ pub(crate) fn change_visibility(ctx: AssistCtx) -> Option) -> Option { let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() { - FN_KW | MOD_KW | STRUCT_KW | ENUM_KW | TRAIT_KW => true, + T![fn] | T![mod] | T![struct] | T![enum] | T![trait] => true, _ => false, }); diff --git a/crates/ra_assists/src/flip_comma.rs b/crates/ra_assists/src/flip_comma.rs index a9b108111..7626ffad3 100644 --- a/crates/ra_assists/src/flip_comma.rs +++ b/crates/ra_assists/src/flip_comma.rs @@ -1,14 +1,14 @@ use hir::db::HirDatabase; use ra_syntax::{ + T, Direction, - SyntaxKind::COMMA, algo::non_trivia_sibling, }; use crate::{AssistCtx, Assist, AssistId}; pub(crate) fn flip_comma(mut ctx: AssistCtx) -> Option { - let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == COMMA)?; + let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == T![,])?; let prev = non_trivia_sibling(comma.into(), Direction::Prev)?; let next = non_trivia_sibling(comma.into(), Direction::Next)?; ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| { diff --git a/crates/ra_assists/src/remove_dbg.rs b/crates/ra_assists/src/remove_dbg.rs index ae9958f11..6e900f8ef 100644 --- a/crates/ra_assists/src/remove_dbg.rs +++ b/crates/ra_assists/src/remove_dbg.rs @@ -2,9 +2,7 @@ use hir::db::HirDatabase; use ra_syntax::{ ast::{self, AstNode}, TextUnit, - SyntaxKind::{ - L_PAREN, R_PAREN, L_CURLY, R_CURLY, L_BRACK, R_BRACK, EXCL - }, + T }; use crate::{AssistCtx, Assist, AssistId}; @@ -64,7 +62,7 @@ fn is_valid_macrocall(macro_call: &ast::MacroCall, macro_name: &str) -> Option Option Some(true), + (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']) => Some(true), _ => Some(false), } } diff --git a/crates/ra_assists/src/split_import.rs b/crates/ra_assists/src/split_import.rs index 57e0efaf2..881c5ecdc 100644 --- a/crates/ra_assists/src/split_import.rs +++ b/crates/ra_assists/src/split_import.rs @@ -2,14 +2,15 @@ use std::iter::successors; use hir::db::HirDatabase; use ra_syntax::{ - TextUnit, AstNode, SyntaxKind::COLONCOLON, + T, + TextUnit, AstNode, ast, }; use crate::{AssistCtx, Assist, AssistId}; pub(crate) fn split_import(mut ctx: AssistCtx) -> Option { - let colon_colon = ctx.token_at_offset().find(|leaf| leaf.kind() == COLONCOLON)?; + let colon_colon = ctx.token_at_offset().find(|leaf| leaf.kind() == T![::])?; let path = ast::Path::cast(colon_colon.parent())?; let top_path = successors(Some(path), |it| it.parent_path()).last()?; diff --git a/crates/ra_fmt/src/lib.rs b/crates/ra_fmt/src/lib.rs index 603be1854..aac5a1d23 100644 --- a/crates/ra_fmt/src/lib.rs +++ b/crates/ra_fmt/src/lib.rs @@ -3,7 +3,7 @@ use std::iter::successors; use itertools::Itertools; use ra_syntax::{ - SyntaxNode, SyntaxKind::*, SyntaxToken, SyntaxKind, + SyntaxNode, SyntaxKind::*, SyntaxToken, SyntaxKind, T, ast::{self, AstNode, AstToken}, }; @@ -38,7 +38,7 @@ pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> { return None; } let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { - WHITESPACE | L_CURLY | R_CURLY => false, + WHITESPACE | T!['{'] | T!['}'] => false, _ => it != &expr.syntax(), }); if non_trivial_children.count() > 0 { @@ -49,8 +49,8 @@ pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> { pub fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str { match left { - L_PAREN | L_BRACK => return "", - L_CURLY => { + T!['('] | T!['['] => return "", + T!['{'] => { if let USE_TREE = right { return ""; } @@ -58,13 +58,13 @@ pub fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str { _ => (), } match right { - R_PAREN | R_BRACK => return "", - R_CURLY => { + T![')'] | T![']'] => return "", + T!['}'] => { if let USE_TREE = left { return ""; } } - DOT => return "", + T![.] => return "", _ => (), } " " diff --git a/crates/ra_ide_api/src/diagnostics.rs b/crates/ra_ide_api/src/diagnostics.rs index e23d178b0..9a0eb2c14 100644 --- a/crates/ra_ide_api/src/diagnostics.rs +++ b/crates/ra_ide_api/src/diagnostics.rs @@ -4,7 +4,7 @@ use itertools::Itertools; use hir::{source_binder, diagnostics::{Diagnostic as _, DiagnosticSink}}; use ra_db::SourceDatabase; use ra_syntax::{ - Location, SourceFile, SyntaxKind, TextRange, SyntaxNode, + T, Location, SourceFile, TextRange, SyntaxNode, ast::{self, AstNode, NamedFieldList, NamedField}, }; use ra_assists::ast_editor::{AstEditor, AstBuilder}; @@ -130,9 +130,7 @@ fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement( single_use_tree: &ast::UseTree, ) -> Option { let use_tree_list_node = single_use_tree.syntax().parent()?; - if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind() - == SyntaxKind::SELF_KW - { + if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind() == T![self] { let start = use_tree_list_node.prev_sibling_or_token()?.range().start(); let end = use_tree_list_node.range().end(); let range = TextRange::from_to(start, end); diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs index 163fa8c3c..4553faad0 100644 --- a/crates/ra_ide_api/src/extend_selection.rs +++ b/crates/ra_ide_api/src/extend_selection.rs @@ -157,7 +157,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option { }) .next() .and_then(|it| it.as_token()) - .filter(|node| node.kind() == COMMA) + .filter(|node| node.kind() == T![,]) } if let Some(comma_node) = nearby_comma(node, Direction::Prev) { diff --git a/crates/ra_ide_api/src/join_lines.rs b/crates/ra_ide_api/src/join_lines.rs index 598717311..4ca005466 100644 --- a/crates/ra_ide_api/src/join_lines.rs +++ b/crates/ra_ide_api/src/join_lines.rs @@ -1,7 +1,8 @@ use itertools::Itertools; use ra_syntax::{ + T, SourceFile, TextRange, TextUnit, SyntaxNode, SyntaxElement, SyntaxToken, - SyntaxKind::{self, WHITESPACE, COMMA, R_CURLY, R_PAREN, R_BRACK}, + SyntaxKind::{self, WHITESPACE}, algo::{find_covering_element, non_trivia_sibling}, ast::{self, AstNode, AstToken}, Direction, @@ -89,7 +90,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn if is_trailing_comma(prev.kind(), next.kind()) { // Removes: trailing comma, newline (incl. surrounding whitespace) edit.delete(TextRange::from_to(prev.range().start(), token.range().end())); - } else if prev.kind() == COMMA && next.kind() == R_CURLY { + } else if prev.kind() == T![,] && next.kind() == T!['}'] { // Removes: comma, newline (incl. surrounding whitespace) let space = if let Some(left) = prev.prev_sibling_or_token() { compute_ws(left.kind(), next.kind()) @@ -116,7 +117,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn fn has_comma_after(node: &SyntaxNode) -> bool { match non_trivia_sibling(node.into(), Direction::Next) { - Some(n) => n.kind() == COMMA, + Some(n) => n.kind() == T![,], _ => false, } } @@ -150,7 +151,7 @@ fn join_single_use_tree(edit: &mut TextEditBuilder, token: SyntaxToken) -> Optio fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { match (left, right) { - (COMMA, R_PAREN) | (COMMA, R_BRACK) => true, + (T![,], T![')']) | (T![,], T![']']) => true, _ => false, } } diff --git a/crates/ra_ide_api/src/matching_brace.rs b/crates/ra_ide_api/src/matching_brace.rs index bebd16a69..eaa4b620c 100644 --- a/crates/ra_ide_api/src/matching_brace.rs +++ b/crates/ra_ide_api/src/matching_brace.rs @@ -1,13 +1,14 @@ use ra_syntax::{ SourceFile, TextUnit, algo::find_token_at_offset, - SyntaxKind::{self, *}, + SyntaxKind::{self}, ast::AstNode, + T }; pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option { const BRACES: &[SyntaxKind] = - &[L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE]; + &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]]; let (brace_node, brace_idx) = find_token_at_offset(file.syntax(), offset) .filter_map(|node| { let idx = BRACES.iter().position(|&brace| brace == node.kind())?; diff --git a/crates/ra_ide_api/src/syntax_highlighting.rs b/crates/ra_ide_api/src/syntax_highlighting.rs index d9a28d2b5..a03b13839 100644 --- a/crates/ra_ide_api/src/syntax_highlighting.rs +++ b/crates/ra_ide_api/src/syntax_highlighting.rs @@ -1,6 +1,6 @@ use rustc_hash::FxHashSet; -use ra_syntax::{ast, AstNode, TextRange, Direction, SyntaxKind::*, SyntaxElement}; +use ra_syntax::{ast, AstNode, TextRange, Direction, SyntaxKind::*, SyntaxElement, T}; use ra_db::SourceDatabase; use crate::{FileId, db::RootDatabase}; @@ -40,7 +40,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec range_end = sibling.range().end(), + T![!] | IDENT => range_end = sibling.range().end(), _ => (), } } diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index 3554dc110..c938acf64 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs @@ -1,5 +1,5 @@ use ra_parser::{TokenSource}; -use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*}; +use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*, T}; use std::cell::{RefCell}; // A Sequece of Token, @@ -284,9 +284,9 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> { fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { let (kinds, texts) = match d { - tt::Delimiter::Parenthesis => ([L_PAREN, R_PAREN], "()"), - tt::Delimiter::Brace => ([L_CURLY, R_CURLY], "{}"), - tt::Delimiter::Bracket => ([L_BRACK, R_BRACK], "[]"), + tt::Delimiter::Parenthesis => ([T!['('], T![')']], "()"), + tt::Delimiter::Brace => ([T!['{'], T!['}']], "{}"), + tt::Delimiter::Bracket => ([T!['['], T![']']], "[]"), tt::Delimiter::None => ([L_DOLLAR, R_DOLLAR], ""), }; @@ -299,8 +299,8 @@ fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { fn convert_literal(l: &tt::Literal) -> TtToken { let kind = classify_literal(&l.text).map(|tkn| tkn.kind).unwrap_or_else(|| match l.text.as_ref() { - "true" => SyntaxKind::TRUE_KW, - "false" => SyntaxKind::FALSE_KW, + "true" => T![true], + "false" => T![false], _ => panic!("Fail to convert given literal {:#?}", &l), }); @@ -320,11 +320,11 @@ fn convert_ident(ident: &tt::Ident) -> TtToken { fn convert_punct(p: &tt::Punct) -> TtToken { let kind = match p.char { // lexer may produce compound tokens for these ones - '.' => DOT, - ':' => COLON, - '=' => EQ, - '!' => EXCL, - '-' => MINUS, + '.' => T![.], + ':' => T![:], + '=' => T![=], + '!' => T![!], + '-' => T![-], c => SyntaxKind::from_char(c).unwrap(), }; let text = { diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 5e6a6f2a1..d8e344557 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs @@ -1,7 +1,7 @@ use ra_parser::{TreeSink, ParseError}; use ra_syntax::{ AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, - ast, SyntaxKind::*, TextUnit + ast, SyntaxKind::*, TextUnit, T }; use crate::subtree_source::{SubtreeTokenSource, Querier}; @@ -211,9 +211,9 @@ fn convert_tt( let first_child = tt.first_child_or_token()?; let last_child = tt.last_child_or_token()?; let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) { - (L_PAREN, R_PAREN) => (tt::Delimiter::Parenthesis, true), - (L_CURLY, R_CURLY) => (tt::Delimiter::Brace, true), - (L_BRACK, R_BRACK) => (tt::Delimiter::Bracket, true), + (T!['('], T![')']) => (tt::Delimiter::Parenthesis, true), + (T!['{'], T!['}']) => (tt::Delimiter::Brace, true), + (T!['['], T![']']) => (tt::Delimiter::Bracket, true), _ => (tt::Delimiter::None, false), }; @@ -248,23 +248,22 @@ fn convert_tt( token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); } else { - let child: tt::TokenTree = if token.kind() == SyntaxKind::TRUE_KW - || token.kind() == SyntaxKind::FALSE_KW - { - tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() - } else if token.kind().is_keyword() - || token.kind() == IDENT - || token.kind() == LIFETIME - { - let relative_range = token.range() - global_offset; - let id = token_map.alloc(relative_range); - let text = token.text().clone(); - tt::Leaf::from(tt::Ident { text, id }).into() - } else if token.kind().is_literal() { - tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() - } else { - return None; - }; + let child: tt::TokenTree = + if token.kind() == T![true] || token.kind() == T![false] { + tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() + } else if token.kind().is_keyword() + || token.kind() == IDENT + || token.kind() == LIFETIME + { + let relative_range = token.range() - global_offset; + let id = token_map.alloc(relative_range); + let text = token.text().clone(); + tt::Leaf::from(tt::Ident { text, id }).into() + } else if token.kind().is_literal() { + tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() + } else { + return None; + }; token_trees.push(child); } } @@ -305,10 +304,8 @@ impl<'a, Q: Querier> TtTreeSink<'a, Q> { } fn is_delimiter(kind: SyntaxKind) -> bool { - use SyntaxKind::*; - match kind { - L_PAREN | L_BRACK | L_CURLY | R_PAREN | R_BRACK | R_CURLY => true, + T!['('] | T!['['] | T!['{'] | T![')'] | T![']'] | T!['}'] => true, _ => false, } } diff --git a/crates/ra_parser/src/event.rs b/crates/ra_parser/src/event.rs index 87cf4eca0..51beb0866 100644 --- a/crates/ra_parser/src/event.rs +++ b/crates/ra_parser/src/event.rs @@ -38,7 +38,7 @@ pub(crate) enum Event { /// The events for it would look like this: /// /// - /// START(PATH) IDENT('foo') FINISH START(PATH) COLONCOLON IDENT('bar') FINISH + /// START(PATH) IDENT('foo') FINISH START(PATH) T![::] IDENT('bar') FINISH /// | /\ /// | | /// +------forward-parent------+ diff --git a/crates/ra_parser/src/grammar.rs b/crates/ra_parser/src/grammar.rs index a538ec081..cf603eba1 100644 --- a/crates/ra_parser/src/grammar.rs +++ b/crates/ra_parser/src/grammar.rs @@ -59,7 +59,7 @@ pub(crate) fn macro_stmts(p: &mut Parser) { let m = p.start(); while !p.at(EOF) { - if p.current() == SEMI { + if p.current() == T![;] { p.bump(); continue; } @@ -103,7 +103,7 @@ pub(crate) fn block(p: &mut Parser) { pub(crate) fn meta_item(p: &mut Parser) { fn is_delimiter(p: &mut Parser) -> bool { match p.current() { - L_CURLY | L_PAREN | L_BRACK => true, + T!['{'] | T!['('] | T!['['] => true, _ => false, } } @@ -123,12 +123,12 @@ pub(crate) fn meta_item(p: &mut Parser) { // https://doc.rust-lang.org/reference/paths.html#simple-paths // The start of an meta must be a simple path match p.current() { - IDENT | COLONCOLON | SUPER_KW | SELF_KW | CRATE_KW => p.bump(), - EQ => { + IDENT | T![::] | T![super] | T![self] | T![crate] => p.bump(), + T![=] => { p.bump(); match p.current() { c if c.is_literal() => p.bump(), - TRUE_KW | FALSE_KW => p.bump(), + T![true] | T![false] => p.bump(), _ => {} } break; @@ -158,7 +158,7 @@ pub(crate) fn reparser( MATCH_ARM_LIST => items::match_arm_list, USE_TREE_LIST => items::use_tree_list, EXTERN_ITEM_LIST => items::extern_item_list, - TOKEN_TREE if first_child? == L_CURLY => items::token_tree, + TOKEN_TREE if first_child? == T!['{'] => items::token_tree, ITEM_LIST => match parent? { IMPL_BLOCK => items::impl_item_list, TRAIT_DEF => items::trait_item_list, @@ -184,26 +184,26 @@ impl BlockLike { pub(crate) fn opt_visibility(p: &mut Parser) -> bool { match p.current() { - PUB_KW => { + T![pub] => { let m = p.start(); p.bump(); - if p.at(L_PAREN) { + if p.at(T!['(']) { match p.nth(1) { // test crate_visibility // pub(crate) struct S; // pub(self) struct S; // pub(self) struct S; // pub(self) struct S; - CRATE_KW | SELF_KW | SUPER_KW => { + T![crate] | T![self] | T![super] => { p.bump(); p.bump(); - p.expect(R_PAREN); + p.expect(T![')']); } - IN_KW => { + T![in] => { p.bump(); p.bump(); paths::use_path(p); - p.expect(R_PAREN); + p.expect(T![')']); } _ => (), } @@ -217,7 +217,7 @@ pub(crate) fn opt_visibility(p: &mut Parser) -> bool { // // test crate_keyword_path // fn foo() { crate::foo(); } - CRATE_KW if p.nth(1) != COLONCOLON => { + T![crate] if p.nth(1) != T![::] => { let m = p.start(); p.bump(); m.complete(p, VISIBILITY); @@ -228,10 +228,10 @@ pub(crate) fn opt_visibility(p: &mut Parser) -> bool { } fn opt_alias(p: &mut Parser) { - if p.at(AS_KW) { + if p.at(T![as]) { let m = p.start(); p.bump(); - if !p.eat(UNDERSCORE) { + if !p.eat(T![_]) { name(p); } m.complete(p, ALIAS); @@ -239,7 +239,7 @@ fn opt_alias(p: &mut Parser) { } fn abi(p: &mut Parser) { - assert!(p.at(EXTERN_KW)); + assert!(p.at(T![extern])); let abi = p.start(); p.bump(); match p.current() { @@ -250,7 +250,7 @@ fn abi(p: &mut Parser) { } fn opt_fn_ret_type(p: &mut Parser) -> bool { - if p.at(THIN_ARROW) { + if p.at(T![->]) { let m = p.start(); p.bump(); types::type_(p); @@ -280,21 +280,21 @@ fn name_ref(p: &mut Parser) { let m = p.start(); p.bump(); m.complete(p, NAME_REF); - } else if p.at(SELF_KW) { + } else if p.at(T![self]) { let m = p.start(); p.bump(); - m.complete(p, SELF_KW); + m.complete(p, T![self]); } else { p.err_and_bump("expected identifier"); } } fn error_block(p: &mut Parser, message: &str) { - assert!(p.at(L_CURLY)); + assert!(p.at(T!['{'])); let m = p.start(); p.error(message); p.bump(); expressions::expr_block_contents(p); - p.eat(R_CURLY); + p.eat(T!['}']); m.complete(p, ERROR); } diff --git a/crates/ra_parser/src/grammar/attributes.rs b/crates/ra_parser/src/grammar/attributes.rs index cd30e8a45..20d58445f 100644 --- a/crates/ra_parser/src/grammar/attributes.rs +++ b/crates/ra_parser/src/grammar/attributes.rs @@ -1,28 +1,28 @@ use super::*; pub(super) fn inner_attributes(p: &mut Parser) { - while p.current() == POUND && p.nth(1) == EXCL { + while p.current() == T![#] && p.nth(1) == T![!] { attribute(p, true) } } pub(super) fn outer_attributes(p: &mut Parser) { - while p.at(POUND) { + while p.at(T![#]) { attribute(p, false) } } fn attribute(p: &mut Parser, inner: bool) { let attr = p.start(); - assert!(p.at(POUND)); + assert!(p.at(T![#])); p.bump(); if inner { - assert!(p.at(EXCL)); + assert!(p.at(T![!])); p.bump(); } - if p.at(L_BRACK) { + if p.at(T!['[']) { items::token_tree(p); } else { p.error("expected `[`"); diff --git a/crates/ra_parser/src/grammar/expressions.rs b/crates/ra_parser/src/grammar/expressions.rs index 9fe529f53..bb6c78b5f 100644 --- a/crates/ra_parser/src/grammar/expressions.rs +++ b/crates/ra_parser/src/grammar/expressions.rs @@ -36,14 +36,14 @@ fn expr_no_struct(p: &mut Parser) { // fn c() { 1; 2; } // fn d() { 1; 2 } pub(crate) fn block(p: &mut Parser) { - if !p.at(L_CURLY) { + if !p.at(T!['{']) { p.error("expected a block"); return; } let m = p.start(); p.bump(); expr_block_contents(p); - p.expect(R_CURLY); + p.expect(T!['}']); m.complete(p, BLOCK); } @@ -65,10 +65,10 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) { // #[C] #[D] {} // #[D] return (); // } - let has_attrs = p.at(POUND); + let has_attrs = p.at(T![#]); attributes::outer_attributes(p); - if p.at(LET_KW) { + if p.at(T![let]) { let_stmt(p, m, with_semi); return; } @@ -90,7 +90,7 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) { p.error(format!("attributes are not allowed on {:?}", kind)); } - if p.at(R_CURLY) { + if p.at(T!['}']) { // test attr_on_last_expr_in_block // fn foo() { // { #[A] bar!()? } @@ -121,15 +121,15 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) { match with_semi { StmtWithSemi::Yes => { if blocklike.is_block() { - p.eat(SEMI); + p.eat(T![;]); } else { - p.expect(SEMI); + p.expect(T![;]); } } StmtWithSemi::No => {} StmtWithSemi::Optional => { - if p.at(SEMI) { - p.eat(SEMI); + if p.at(T![;]) { + p.eat(T![;]); } } } @@ -145,24 +145,24 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) { // let d: i32 = 92; // } fn let_stmt(p: &mut Parser, m: Marker, with_semi: StmtWithSemi) { - assert!(p.at(LET_KW)); + assert!(p.at(T![let])); p.bump(); patterns::pattern(p); - if p.at(COLON) { + if p.at(T![:]) { types::ascription(p); } - if p.eat(EQ) { + if p.eat(T![=]) { expressions::expr(p); } match with_semi { StmtWithSemi::Yes => { - p.expect(SEMI); + p.expect(T![;]); } StmtWithSemi::No => {} StmtWithSemi::Optional => { - if p.at(SEMI) { - p.eat(SEMI); + if p.at(T![;]) { + p.eat(T![;]); } } } @@ -174,12 +174,12 @@ pub(crate) fn expr_block_contents(p: &mut Parser) { // This is checked by a validator attributes::inner_attributes(p); - while !p.at(EOF) && !p.at(R_CURLY) { + while !p.at(EOF) && !p.at(T!['}']) { // test nocontentexpr // fn foo(){ // ;;;some_expr();;;;{;;;};;;;Ok(()) // } - if p.current() == SEMI { + if p.current() == T![;] { p.bump(); continue; } @@ -202,41 +202,41 @@ enum Op { fn current_op(p: &Parser) -> (u8, Op) { if let Some(t) = p.current3() { match t { - (L_ANGLE, L_ANGLE, EQ) => return (1, Op::Composite(SHLEQ, 3)), - (R_ANGLE, R_ANGLE, EQ) => return (1, Op::Composite(SHREQ, 3)), + (T![<], T![<], T![=]) => return (1, Op::Composite(T![<<=], 3)), + (T![>], T![>], T![=]) => return (1, Op::Composite(T![>>=], 3)), _ => (), } } if let Some(t) = p.current2() { match t { - (PLUS, EQ) => return (1, Op::Composite(PLUSEQ, 2)), - (MINUS, EQ) => return (1, Op::Composite(MINUSEQ, 2)), - (STAR, EQ) => return (1, Op::Composite(STAREQ, 2)), - (PERCENT, EQ) => return (1, Op::Composite(PERCENTEQ, 2)), - (SLASH, EQ) => return (1, Op::Composite(SLASHEQ, 2)), - (PIPE, EQ) => return (1, Op::Composite(PIPEEQ, 2)), - (AMP, EQ) => return (1, Op::Composite(AMPEQ, 2)), - (CARET, EQ) => return (1, Op::Composite(CARETEQ, 2)), - (PIPE, PIPE) => return (3, Op::Composite(PIPEPIPE, 2)), - (AMP, AMP) => return (4, Op::Composite(AMPAMP, 2)), - (L_ANGLE, EQ) => return (5, Op::Composite(LTEQ, 2)), - (R_ANGLE, EQ) => return (5, Op::Composite(GTEQ, 2)), - (L_ANGLE, L_ANGLE) => return (9, Op::Composite(SHL, 2)), - (R_ANGLE, R_ANGLE) => return (9, Op::Composite(SHR, 2)), + (T![+], T![=]) => return (1, Op::Composite(T![+=], 2)), + (T![-], T![=]) => return (1, Op::Composite(T![-=], 2)), + (T![*], T![=]) => return (1, Op::Composite(T![*=], 2)), + (T![%], T![=]) => return (1, Op::Composite(T![%=], 2)), + (T![/], T![=]) => return (1, Op::Composite(T![/=], 2)), + (T![|], T![=]) => return (1, Op::Composite(T![|=], 2)), + (T![&], T![=]) => return (1, Op::Composite(T![&=], 2)), + (T![^], T![=]) => return (1, Op::Composite(T![^=], 2)), + (T![|], T![|]) => return (3, Op::Composite(T![||], 2)), + (T![&], T![&]) => return (4, Op::Composite(T![&&], 2)), + (T![<], T![=]) => return (5, Op::Composite(T![<=], 2)), + (T![>], T![=]) => return (5, Op::Composite(T![>=], 2)), + (T![<], T![<]) => return (9, Op::Composite(T![<<], 2)), + (T![>], T![>]) => return (9, Op::Composite(T![>>], 2)), _ => (), } } let bp = match p.current() { - EQ => 1, - DOTDOT | DOTDOTEQ => 2, - EQEQ | NEQ | L_ANGLE | R_ANGLE => 5, - PIPE => 6, - CARET => 7, - AMP => 8, - MINUS | PLUS => 10, - STAR | SLASH | PERCENT => 11, + T![=] => 1, + T![..] | T![..=] => 2, + T![==] | T![!=] | T![<] | T![>] => 5, + T![|] => 6, + T![^] => 7, + T![&] => 8, + T![-] | T![+] => 10, + T![*] | T![/] | T![%] => 11, _ => 0, }; (bp, Op::Simple) @@ -284,7 +284,7 @@ fn expr_bp( newly_dollar_open = false; } - let is_range = p.current() == DOTDOT || p.current() == DOTDOTEQ; + let is_range = p.current() == T![..] || p.current() == T![..=]; let (op_bp, op) = current_op(p); if op_bp < bp { break; @@ -318,10 +318,10 @@ fn lhs( // let _ = &1; // let _ = &mut &f(); // } - AMP => { + T![&] => { m = p.start(); p.bump(); - p.eat(MUT_KW); + p.eat(T![mut]); REF_EXPR } // test unary_expr @@ -330,14 +330,14 @@ fn lhs( // !!true; // --1; // } - STAR | EXCL | MINUS => { + T![*] | T![!] | T![-] => { m = p.start(); p.bump(); PREFIX_EXPR } // test full_range_expr // fn foo() { xs[..]; } - DOTDOT | DOTDOTEQ => { + T![..] | T![..=] => { m = p.start(); p.bump(); if p.at_ts(EXPR_FIRST) { @@ -375,21 +375,21 @@ fn postfix_expr( // [] => {} // } // } - L_PAREN if allow_calls => call_expr(p, lhs), - L_BRACK if allow_calls => index_expr(p, lhs), - DOT if p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON) => { + T!['('] if allow_calls => call_expr(p, lhs), + T!['['] if allow_calls => index_expr(p, lhs), + T![.] if p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth(2) == T![::]) => { method_call_expr(p, lhs) } - DOT => field_expr(p, lhs), + T![.] => field_expr(p, lhs), // test postfix_range // fn foo() { let x = 1..; } - DOTDOT | DOTDOTEQ if !EXPR_FIRST.contains(p.nth(1)) => { + T![..] | T![..=] if !EXPR_FIRST.contains(p.nth(1)) => { let m = lhs.precede(p); p.bump(); m.complete(p, RANGE_EXPR) } - QUESTION => try_expr(p, lhs), - AS_KW => cast_expr(p, lhs), + T![?] => try_expr(p, lhs), + T![as] => cast_expr(p, lhs), _ => break, }; allow_calls = true @@ -405,7 +405,7 @@ fn postfix_expr( // f(::func()); // } fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { - assert!(p.at(L_PAREN)); + assert!(p.at(T!['('])); let m = lhs.precede(p); arg_list(p); m.complete(p, CALL_EXPR) @@ -416,11 +416,11 @@ fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { // x[1][2]; // } fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { - assert!(p.at(L_BRACK)); + assert!(p.at(T!['['])); let m = lhs.precede(p); p.bump(); expr(p); - p.expect(R_BRACK); + p.expect(T![']']); m.complete(p, INDEX_EXPR) } @@ -430,12 +430,12 @@ fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { // y.bar::(1, 2,); // } fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { - assert!(p.at(DOT) && p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON)); + assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth(2) == T![::])); let m = lhs.precede(p); p.bump(); name_ref(p); type_args::opt_type_arg_list(p, true); - if p.at(L_PAREN) { + if p.at(T!['(']) { arg_list(p); } m.complete(p, METHOD_CALL_EXPR) @@ -455,7 +455,7 @@ fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { // x.0x01; // } fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { - assert!(p.at(DOT)); + assert!(p.at(T![.])); let m = lhs.precede(p); p.bump(); if p.at(IDENT) { @@ -463,7 +463,7 @@ fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { } else if p.at(INT_NUMBER) { p.bump(); } else if p.at(FLOAT_NUMBER) { - // FIXME: How to recover and instead parse INT + DOT? + // FIXME: How to recover and instead parse INT + T![.]? p.bump(); } else { p.error("expected field name or number") @@ -476,7 +476,7 @@ fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { // x?; // } fn try_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { - assert!(p.at(QUESTION)); + assert!(p.at(T![?])); let m = lhs.precede(p); p.bump(); m.complete(p, TRY_EXPR) @@ -490,7 +490,7 @@ fn try_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { // 0x36 as u8 <= 0x37; // } fn cast_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { - assert!(p.at(AS_KW)); + assert!(p.at(T![as])); let m = lhs.precede(p); p.bump(); // Use type_no_bounds(), because cast expressions are not @@ -500,20 +500,20 @@ fn cast_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { } fn arg_list(p: &mut Parser) { - assert!(p.at(L_PAREN)); + assert!(p.at(T!['('])); let m = p.start(); p.bump(); - while !p.at(R_PAREN) && !p.at(EOF) { + while !p.at(T![')']) && !p.at(EOF) { if !p.at_ts(EXPR_FIRST) { p.error("expected expression"); break; } expr(p); - if !p.at(R_PAREN) && !p.expect(COMMA) { + if !p.at(T![')']) && !p.expect(T![,]) { break; } } - p.eat(R_PAREN); + p.eat(T![')']); m.complete(p, ARG_LIST); } @@ -525,15 +525,15 @@ fn arg_list(p: &mut Parser) { // let _ = format!(); // } fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) { - assert!(paths::is_path_start(p) || p.at(L_ANGLE)); + assert!(paths::is_path_start(p) || p.at(T![<])); let m = p.start(); paths::expr_path(p); match p.current() { - L_CURLY if !r.forbid_structs => { + T!['{'] if !r.forbid_structs => { named_field_list(p); (m.complete(p, STRUCT_LIT), BlockLike::NotBlock) } - EXCL => { + T![!] => { let block_like = items::macro_call_after_excl(p); (m.complete(p, MACRO_CALL), block_like) } @@ -548,35 +548,35 @@ fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) { // S { x, y: 32, ..Default::default() }; // } pub(crate) fn named_field_list(p: &mut Parser) { - assert!(p.at(L_CURLY)); + assert!(p.at(T!['{'])); let m = p.start(); p.bump(); - while !p.at(EOF) && !p.at(R_CURLY) { + while !p.at(EOF) && !p.at(T!['}']) { match p.current() { // test struct_literal_field_with_attr // fn main() { // S { #[cfg(test)] field: 1 } // } - IDENT | POUND => { + IDENT | T![#] => { let m = p.start(); attributes::outer_attributes(p); name_ref(p); - if p.eat(COLON) { + if p.eat(T![:]) { expr(p); } m.complete(p, NAMED_FIELD); } - DOTDOT => { + T![..] => { p.bump(); expr(p); } - L_CURLY => error_block(p, "expected a field"), + T!['{'] => error_block(p, "expected a field"), _ => p.err_and_bump("expected identifier"), } - if !p.at(R_CURLY) { - p.expect(COMMA); + if !p.at(T!['}']) { + p.expect(T![,]); } } - p.expect(R_CURLY); + p.expect(T!['}']); m.complete(p, NAMED_FIELD_LIST); } diff --git a/crates/ra_parser/src/grammar/expressions/atom.rs b/crates/ra_parser/src/grammar/expressions/atom.rs index 8dc7e44a9..8b1a1de49 100644 --- a/crates/ra_parser/src/grammar/expressions/atom.rs +++ b/crates/ra_parser/src/grammar/expressions/atom.rs @@ -60,29 +60,29 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar if let Some(m) = literal(p) { return Some((m, BlockLike::NotBlock)); } - if paths::is_path_start(p) || p.at(L_ANGLE) { + if paths::is_path_start(p) || p.at(T![<]) { return Some(path_expr(p, r)); } let la = p.nth(1); let done = match p.current() { - L_PAREN => tuple_expr(p), - L_BRACK => array_expr(p), - PIPE => lambda_expr(p), - MOVE_KW if la == PIPE => lambda_expr(p), - ASYNC_KW if la == PIPE || (la == MOVE_KW && p.nth(2) == PIPE) => lambda_expr(p), - IF_KW => if_expr(p), + T!['('] => tuple_expr(p), + T!['['] => array_expr(p), + T![|] => lambda_expr(p), + T![move] if la == T![|] => lambda_expr(p), + T![async] if la == T![|] || (la == T![move] && p.nth(2) == T![|]) => lambda_expr(p), + T![if] => if_expr(p), - LOOP_KW => loop_expr(p, None), - FOR_KW => for_expr(p, None), - WHILE_KW => while_expr(p, None), - LIFETIME if la == COLON => { + T![loop] => loop_expr(p, None), + T![for] => for_expr(p, None), + T![while] => while_expr(p, None), + LIFETIME if la == T![:] => { let m = p.start(); label(p); match p.current() { - LOOP_KW => loop_expr(p, Some(m)), - FOR_KW => for_expr(p, Some(m)), - WHILE_KW => while_expr(p, Some(m)), - L_CURLY => block_expr(p, Some(m)), + T![loop] => loop_expr(p, Some(m)), + T![for] => for_expr(p, Some(m)), + T![while] => while_expr(p, Some(m)), + T!['{'] => block_expr(p, Some(m)), _ => { // test_err misplaced_label_err // fn main() { @@ -94,22 +94,22 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar } } } - ASYNC_KW if la == L_CURLY || (la == MOVE_KW && p.nth(2) == L_CURLY) => { + T![async] if la == T!['{'] || (la == T![move] && p.nth(2) == T!['{']) => { let m = p.start(); p.bump(); - p.eat(MOVE_KW); + p.eat(T![move]); block_expr(p, Some(m)) } - MATCH_KW => match_expr(p), - UNSAFE_KW if la == L_CURLY => { + T![match] => match_expr(p), + T![unsafe] if la == T!['{'] => { let m = p.start(); p.bump(); block_expr(p, Some(m)) } - L_CURLY => block_expr(p, None), - RETURN_KW => return_expr(p), - CONTINUE_KW => continue_expr(p), - BREAK_KW => break_expr(p, r), + T!['{'] => block_expr(p, None), + T![return] => return_expr(p), + T![continue] => continue_expr(p), + T![break] => break_expr(p, r), _ => { p.err_recover("expected expression", EXPR_RECOVERY_SET); return None; @@ -129,25 +129,25 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar // (1,); // } fn tuple_expr(p: &mut Parser) -> CompletedMarker { - assert!(p.at(L_PAREN)); + assert!(p.at(T!['('])); let m = p.start(); - p.expect(L_PAREN); + p.expect(T!['(']); let mut saw_comma = false; let mut saw_expr = false; - while !p.at(EOF) && !p.at(R_PAREN) { + while !p.at(EOF) && !p.at(T![')']) { saw_expr = true; if !p.at_ts(EXPR_FIRST) { p.error("expected expression"); break; } expr(p); - if !p.at(R_PAREN) { + if !p.at(T![')']) { saw_comma = true; - p.expect(COMMA); + p.expect(T![,]); } } - p.expect(R_PAREN); + p.expect(T![')']); m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR }) } @@ -159,21 +159,21 @@ fn tuple_expr(p: &mut Parser) -> CompletedMarker { // [1; 2]; // } fn array_expr(p: &mut Parser) -> CompletedMarker { - assert!(p.at(L_BRACK)); + assert!(p.at(T!['['])); let m = p.start(); p.bump(); - if p.eat(R_BRACK) { + if p.eat(T![']']) { return m.complete(p, ARRAY_EXPR); } expr(p); - if p.eat(SEMI) { + if p.eat(T![;]) { expr(p); - p.expect(R_BRACK); + p.expect(T![']']); return m.complete(p, ARRAY_EXPR); } - while !p.at(EOF) && !p.at(R_BRACK) { - p.expect(COMMA); - if p.at(R_BRACK) { + while !p.at(EOF) && !p.at(T![']']) { + p.expect(T![,]); + if p.at(T![']']) { break; } if !p.at_ts(EXPR_FIRST) { @@ -182,7 +182,7 @@ fn array_expr(p: &mut Parser) -> CompletedMarker { } expr(p); } - p.expect(R_BRACK); + p.expect(T![']']); m.complete(p, ARRAY_EXPR) } @@ -198,17 +198,17 @@ fn array_expr(p: &mut Parser) -> CompletedMarker { // } fn lambda_expr(p: &mut Parser) -> CompletedMarker { assert!( - p.at(PIPE) - || (p.at(MOVE_KW) && p.nth(1) == PIPE) - || (p.at(ASYNC_KW) && p.nth(1) == PIPE) - || (p.at(ASYNC_KW) && p.nth(1) == MOVE_KW && p.nth(2) == PIPE) + p.at(T![|]) + || (p.at(T![move]) && p.nth(1) == T![|]) + || (p.at(T![async]) && p.nth(1) == T![|]) + || (p.at(T![async]) && p.nth(1) == T![move] && p.nth(2) == T![|]) ); let m = p.start(); - p.eat(ASYNC_KW); - p.eat(MOVE_KW); + p.eat(T![async]); + p.eat(T![move]); params::param_list_opt_types(p); if opt_fn_ret_type(p) { - if !p.at(L_CURLY) { + if !p.at(T!['{']) { p.error("expected `{`"); } } @@ -224,14 +224,14 @@ fn lambda_expr(p: &mut Parser) -> CompletedMarker { // if S {}; // } fn if_expr(p: &mut Parser) -> CompletedMarker { - assert!(p.at(IF_KW)); + assert!(p.at(T![if])); let m = p.start(); p.bump(); cond(p); block(p); - if p.at(ELSE_KW) { + if p.at(T![else]) { p.bump(); - if p.at(IF_KW) { + if p.at(T![if]) { if_expr(p); } else { block(p); @@ -247,7 +247,7 @@ fn if_expr(p: &mut Parser) -> CompletedMarker { // 'c: for x in () {} // } fn label(p: &mut Parser) { - assert!(p.at(LIFETIME) && p.nth(1) == COLON); + assert!(p.at(LIFETIME) && p.nth(1) == T![:]); let m = p.start(); p.bump(); p.bump(); @@ -259,7 +259,7 @@ fn label(p: &mut Parser) { // loop {}; // } fn loop_expr(p: &mut Parser, m: Option) -> CompletedMarker { - assert!(p.at(LOOP_KW)); + assert!(p.at(T![loop])); let m = m.unwrap_or_else(|| p.start()); p.bump(); block(p); @@ -272,7 +272,7 @@ fn loop_expr(p: &mut Parser, m: Option) -> CompletedMarker { // while let Some(x) = it.next() {}; // } fn while_expr(p: &mut Parser, m: Option) -> CompletedMarker { - assert!(p.at(WHILE_KW)); + assert!(p.at(T![while])); let m = m.unwrap_or_else(|| p.start()); p.bump(); cond(p); @@ -285,11 +285,11 @@ fn while_expr(p: &mut Parser, m: Option) -> CompletedMarker { // for x in [] {}; // } fn for_expr(p: &mut Parser, m: Option) -> CompletedMarker { - assert!(p.at(FOR_KW)); + assert!(p.at(T![for])); let m = m.unwrap_or_else(|| p.start()); p.bump(); patterns::pattern(p); - p.expect(IN_KW); + p.expect(T![in]); expr_no_struct(p); block(p); m.complete(p, FOR_EXPR) @@ -305,9 +305,9 @@ fn for_expr(p: &mut Parser, m: Option) -> CompletedMarker { // } fn cond(p: &mut Parser) { let m = p.start(); - if p.eat(LET_KW) { + if p.eat(T![let]) { patterns::pattern_list(p); - p.expect(EQ); + p.expect(T![=]); } expr_no_struct(p); m.complete(p, CONDITION); @@ -319,11 +319,11 @@ fn cond(p: &mut Parser) { // match S {}; // } fn match_expr(p: &mut Parser) -> CompletedMarker { - assert!(p.at(MATCH_KW)); + assert!(p.at(T![match])); let m = p.start(); p.bump(); expr_no_struct(p); - if p.at(L_CURLY) { + if p.at(T!['{']) { match_arm_list(p); } else { p.error("expected `{`") @@ -332,9 +332,9 @@ fn match_expr(p: &mut Parser) -> CompletedMarker { } pub(crate) fn match_arm_list(p: &mut Parser) { - assert!(p.at(L_CURLY)); + assert!(p.at(T!['{'])); let m = p.start(); - p.eat(L_CURLY); + p.eat(T!['{']); // test match_arms_inner_attribute // fn foo() { @@ -347,8 +347,8 @@ pub(crate) fn match_arm_list(p: &mut Parser) { // } attributes::inner_attributes(p); - while !p.at(EOF) && !p.at(R_CURLY) { - if p.at(L_CURLY) { + while !p.at(EOF) && !p.at(T!['}']) { + if p.at(T!['{']) { error_block(p, "expected match arm"); continue; } @@ -362,12 +362,12 @@ pub(crate) fn match_arm_list(p: &mut Parser) { // } // } if match_arm(p).is_block() { - p.eat(COMMA); - } else if !p.at(R_CURLY) { - p.expect(COMMA); + p.eat(T![,]); + } else if !p.at(T!['}']) { + p.expect(T![,]); } } - p.expect(R_CURLY); + p.expect(T!['}']); m.complete(p, MATCH_ARM_LIST); } @@ -399,10 +399,10 @@ fn match_arm(p: &mut Parser) -> BlockLike { attributes::outer_attributes(p); patterns::pattern_list_r(p, TokenSet::empty()); - if p.at(IF_KW) { + if p.at(T![if]) { match_guard(p); } - p.expect(FAT_ARROW); + p.expect(T![=>]); let blocklike = expr_stmt(p).1; m.complete(p, MATCH_ARM); blocklike @@ -415,7 +415,7 @@ fn match_arm(p: &mut Parser) -> BlockLike { // } // } fn match_guard(p: &mut Parser) -> CompletedMarker { - assert!(p.at(IF_KW)); + assert!(p.at(T![if])); let m = p.start(); p.bump(); expr(p); @@ -429,7 +429,7 @@ fn match_guard(p: &mut Parser) -> CompletedMarker { // 'label: {}; // } fn block_expr(p: &mut Parser, m: Option) -> CompletedMarker { - assert!(p.at(L_CURLY)); + assert!(p.at(T!['{'])); let m = m.unwrap_or_else(|| p.start()); block(p); m.complete(p, BLOCK_EXPR) @@ -441,7 +441,7 @@ fn block_expr(p: &mut Parser, m: Option) -> CompletedMarker { // return 92; // } fn return_expr(p: &mut Parser) -> CompletedMarker { - assert!(p.at(RETURN_KW)); + assert!(p.at(T![return])); let m = p.start(); p.bump(); if p.at_ts(EXPR_FIRST) { @@ -458,7 +458,7 @@ fn return_expr(p: &mut Parser) -> CompletedMarker { // } // } fn continue_expr(p: &mut Parser) -> CompletedMarker { - assert!(p.at(CONTINUE_KW)); + assert!(p.at(T![continue])); let m = p.start(); p.bump(); p.eat(LIFETIME); @@ -475,7 +475,7 @@ fn continue_expr(p: &mut Parser) -> CompletedMarker { // } // } fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker { - assert!(p.at(BREAK_KW)); + assert!(p.at(T![break])); let m = p.start(); p.bump(); p.eat(LIFETIME); @@ -486,7 +486,7 @@ fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker { // for i in break {} // match break {} // } - if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(L_CURLY)) { + if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{'])) { expr(p); } m.complete(p, BREAK_EXPR) diff --git a/crates/ra_parser/src/grammar/items.rs b/crates/ra_parser/src/grammar/items.rs index 97f8122a9..6728e395f 100644 --- a/crates/ra_parser/src/grammar/items.rs +++ b/crates/ra_parser/src/grammar/items.rs @@ -19,7 +19,7 @@ use super::*; // struct S; pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) { attributes::inner_attributes(p); - while !p.at(EOF) && !(stop_on_r_curly && p.at(R_CURLY)) { + while !p.at(EOF) && !(stop_on_r_curly && p.at(T!['}'])) { item_or_macro(p, stop_on_r_curly, ItemFlavor::Mod) } } @@ -45,20 +45,20 @@ pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool, flavor: ItemF match macro_call(p) { BlockLike::Block => (), BlockLike::NotBlock => { - p.expect(SEMI); + p.expect(T![;]); } } m.complete(p, MACRO_CALL); } else { m.abandon(p); - if p.at(L_CURLY) { + if p.at(T!['{']) { error_block(p, "expected an item"); - } else if p.at(R_CURLY) && !stop_on_r_curly { + } else if p.at(T!['}']) && !stop_on_r_curly { let e = p.start(); p.error("unmatched `}`"); p.bump(); e.complete(p, ERROR); - } else if !p.at(EOF) && !p.at(R_CURLY) { + } else if !p.at(EOF) && !p.at(T!['}']) { p.err_and_bump("expected an item"); } else { p.error("expected an item"); @@ -79,32 +79,32 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul let mut has_mods = false; // modifiers - has_mods |= p.eat(CONST_KW); + has_mods |= p.eat(T![const]); // test_err unsafe_block_in_mod // fn foo(){} unsafe { } fn bar(){} - if p.at(UNSAFE_KW) && p.nth(1) != L_CURLY { - p.eat(UNSAFE_KW); + if p.at(T![unsafe]) && p.nth(1) != T!['{'] { + p.eat(T![unsafe]); has_mods = true; } // test_err async_without_semicolon // fn foo() { let _ = async {} } - if p.at(ASYNC_KW) && p.nth(1) != L_CURLY && p.nth(1) != MOVE_KW && p.nth(1) != PIPE { - p.eat(ASYNC_KW); + if p.at(T![async]) && p.nth(1) != T!['{'] && p.nth(1) != T![move] && p.nth(1) != T![|] { + p.eat(T![async]); has_mods = true; } - if p.at(EXTERN_KW) { + if p.at(T![extern]) { has_mods = true; abi(p); } - if p.at(IDENT) && p.at_contextual_kw("auto") && p.nth(1) == TRAIT_KW { - p.bump_remap(AUTO_KW); + if p.at(IDENT) && p.at_contextual_kw("auto") && p.nth(1) == T![trait] { + p.bump_remap(T![auto]); has_mods = true; } - if p.at(IDENT) && p.at_contextual_kw("default") && p.nth(1) == IMPL_KW { - p.bump_remap(DEFAULT_KW); + if p.at(IDENT) && p.at_contextual_kw("default") && p.nth(1) == T![impl ] { + p.bump_remap(T![default]); has_mods = true; } @@ -135,7 +135,7 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul // test_err wrong_order_fns // async unsafe fn foo() {} // unsafe const fn bar() {} - FN_KW => { + T![fn] => { fn_def(p, flavor); m.complete(p, FN_DEF); } @@ -148,7 +148,7 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul // test unsafe_auto_trait // unsafe auto trait T {} - TRAIT_KW => { + T![trait] => { traits::trait_def(p); m.complete(p, TRAIT_DEF); } @@ -161,7 +161,7 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul // test unsafe_default_impl // unsafe default impl Foo {} - IMPL_KW => { + T![impl ] => { traits::impl_block(p); m.complete(p, IMPL_BLOCK); } @@ -186,10 +186,10 @@ fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { match p.current() { // test extern_crate // extern crate foo; - EXTERN_KW if la == CRATE_KW => extern_crate_item(p, m), - TYPE_KW => type_def(p, m), - MOD_KW => mod_item(p, m), - STRUCT_KW => { + T![extern] if la == T![crate] => extern_crate_item(p, m), + T![type] => type_def(p, m), + T![mod] => mod_item(p, m), + T![struct] => { // test struct_items // struct Foo; // struct Foo {} @@ -199,7 +199,7 @@ fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { // a: i32, // b: f32, // } - nominal::struct_def(p, m, STRUCT_KW); + nominal::struct_def(p, m, T![struct]); } IDENT if p.at_contextual_kw("union") && p.nth(1) == IDENT => { // test union_items @@ -208,16 +208,16 @@ fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { // a: i32, // b: f32, // } - nominal::struct_def(p, m, UNION_KW); + nominal::struct_def(p, m, T![union]); } - ENUM_KW => nominal::enum_def(p, m), - USE_KW => use_item::use_item(p, m), - CONST_KW if (la == IDENT || la == MUT_KW) => consts::const_def(p, m), - STATIC_KW => consts::static_def(p, m), + T![enum] => nominal::enum_def(p, m), + T![use] => use_item::use_item(p, m), + T![const] if (la == IDENT || la == T![mut]) => consts::const_def(p, m), + T![static] => consts::static_def(p, m), // test extern_block // extern {} - EXTERN_KW - if la == L_CURLY || ((la == STRING || la == RAW_STRING) && p.nth(2) == L_CURLY) => + T![extern] + if la == T!['{'] || ((la == STRING || la == RAW_STRING) && p.nth(2) == T!['{']) => { abi(p); extern_item_list(p); @@ -225,7 +225,7 @@ fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { } _ => return Err(m), }; - if p.at(SEMI) { + if p.at(T![;]) { p.err_and_bump( "expected item, found `;`\n\ consider removing this semicolon", @@ -235,27 +235,27 @@ fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { } fn extern_crate_item(p: &mut Parser, m: Marker) { - assert!(p.at(EXTERN_KW)); + assert!(p.at(T![extern])); p.bump(); - assert!(p.at(CRATE_KW)); + assert!(p.at(T![crate])); p.bump(); name_ref(p); opt_alias(p); - p.expect(SEMI); + p.expect(T![;]); m.complete(p, EXTERN_CRATE_ITEM); } pub(crate) fn extern_item_list(p: &mut Parser) { - assert!(p.at(L_CURLY)); + assert!(p.at(T!['{'])); let m = p.start(); p.bump(); mod_contents(p, true); - p.expect(R_CURLY); + p.expect(T!['}']); m.complete(p, EXTERN_ITEM_LIST); } fn fn_def(p: &mut Parser, flavor: ItemFlavor) { - assert!(p.at(FN_KW)); + assert!(p.at(T![fn])); p.bump(); name_r(p, ITEM_RECOVERY_SET); @@ -263,7 +263,7 @@ fn fn_def(p: &mut Parser, flavor: ItemFlavor) { // fn foo(){} type_params::opt_type_param_list(p); - if p.at(L_PAREN) { + if p.at(T!['(']) { match flavor { ItemFlavor::Mod => params::param_list(p), ItemFlavor::Trait => params::param_list_opt_patterns(p), @@ -282,7 +282,7 @@ fn fn_def(p: &mut Parser, flavor: ItemFlavor) { // test fn_decl // trait T { fn foo(); } - if p.at(SEMI) { + if p.at(T![;]) { p.bump(); } else { expressions::block(p) @@ -292,7 +292,7 @@ fn fn_def(p: &mut Parser, flavor: ItemFlavor) { // test type_item // type Foo = Bar; fn type_def(p: &mut Parser, m: Marker) { - assert!(p.at(TYPE_KW)); + assert!(p.at(T![type])); p.bump(); name(p); @@ -301,7 +301,7 @@ fn type_def(p: &mut Parser, m: Marker) { // type Result = (); type_params::opt_type_param_list(p); - if p.at(COLON) { + if p.at(T![:]) { type_params::bounds(p); } @@ -309,32 +309,32 @@ fn type_def(p: &mut Parser, m: Marker) { // type Foo where Foo: Copy = (); type_params::opt_where_clause(p); - if p.eat(EQ) { + if p.eat(T![=]) { types::type_(p); } - p.expect(SEMI); + p.expect(T![;]); m.complete(p, TYPE_ALIAS_DEF); } pub(crate) fn mod_item(p: &mut Parser, m: Marker) { - assert!(p.at(MOD_KW)); + assert!(p.at(T![mod])); p.bump(); name(p); - if p.at(L_CURLY) { + if p.at(T!['{']) { mod_item_list(p); - } else if !p.eat(SEMI) { + } else if !p.eat(T![;]) { p.error("expected `;` or `{`"); } m.complete(p, MODULE); } pub(crate) fn mod_item_list(p: &mut Parser) { - assert!(p.at(L_CURLY)); + assert!(p.at(T!['{'])); let m = p.start(); p.bump(); mod_contents(p, true); - p.expect(R_CURLY); + p.expect(T!['}']); m.complete(p, ITEM_LIST); } @@ -345,16 +345,16 @@ fn macro_call(p: &mut Parser) -> BlockLike { } pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike { - p.expect(EXCL); + p.expect(T![!]); if p.at(IDENT) { name(p); } match p.current() { - L_CURLY => { + T!['{'] => { token_tree(p); BlockLike::Block } - L_PAREN | L_BRACK => { + T!['('] | T!['['] => { token_tree(p); BlockLike::NotBlock } @@ -367,22 +367,22 @@ pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike { pub(crate) fn token_tree(p: &mut Parser) { let closing_paren_kind = match p.current() { - L_CURLY => R_CURLY, - L_PAREN => R_PAREN, - L_BRACK => R_BRACK, + T!['{'] => T!['}'], + T!['('] => T![')'], + T!['['] => T![']'], _ => unreachable!(), }; let m = p.start(); p.bump(); while !p.at(EOF) && !p.at(closing_paren_kind) { match p.current() { - L_CURLY | L_PAREN | L_BRACK => token_tree(p), - R_CURLY => { + T!['{'] | T!['('] | T!['['] => token_tree(p), + T!['}'] => { p.error("unmatched `}`"); m.complete(p, TOKEN_TREE); return; } - R_PAREN | R_BRACK => p.err_and_bump("unmatched brace"), + T![')'] | T![']'] => p.err_and_bump("unmatched brace"), _ => p.bump_raw(), } } diff --git a/crates/ra_parser/src/grammar/items/consts.rs b/crates/ra_parser/src/grammar/items/consts.rs index 1f802246f..b4908ebba 100644 --- a/crates/ra_parser/src/grammar/items/consts.rs +++ b/crates/ra_parser/src/grammar/items/consts.rs @@ -1,22 +1,22 @@ use super::*; pub(super) fn static_def(p: &mut Parser, m: Marker) { - const_or_static(p, m, STATIC_KW, STATIC_DEF) + const_or_static(p, m, T![static], STATIC_DEF) } pub(super) fn const_def(p: &mut Parser, m: Marker) { - const_or_static(p, m, CONST_KW, CONST_DEF) + const_or_static(p, m, T![const], CONST_DEF) } fn const_or_static(p: &mut Parser, m: Marker, kw: SyntaxKind, def: SyntaxKind) { assert!(p.at(kw)); p.bump(); - p.eat(MUT_KW); // FIXME: validator to forbid const mut + p.eat(T![mut]); // FIXME: validator to forbid const mut name(p); types::ascription(p); - if p.eat(EQ) { + if p.eat(T![=]) { expressions::expr(p); } - p.expect(SEMI); + p.expect(T![;]); m.complete(p, def); } diff --git a/crates/ra_parser/src/grammar/items/nominal.rs b/crates/ra_parser/src/grammar/items/nominal.rs index e93bd76b8..bd4edab89 100644 --- a/crates/ra_parser/src/grammar/items/nominal.rs +++ b/crates/ra_parser/src/grammar/items/nominal.rs @@ -1,38 +1,38 @@ use super::*; pub(super) fn struct_def(p: &mut Parser, m: Marker, kind: SyntaxKind) { - assert!(p.at(STRUCT_KW) || p.at_contextual_kw("union")); + assert!(p.at(T![struct]) || p.at_contextual_kw("union")); p.bump_remap(kind); name_r(p, ITEM_RECOVERY_SET); type_params::opt_type_param_list(p); match p.current() { - WHERE_KW => { + T![where] => { type_params::opt_where_clause(p); match p.current() { - SEMI => { + T![;] => { p.bump(); } - L_CURLY => named_field_def_list(p), + T!['{'] => named_field_def_list(p), _ => { //FIXME: special case `(` error message p.error("expected `;` or `{`"); } } } - SEMI if kind == STRUCT_KW => { + T![;] if kind == T![struct] => { p.bump(); } - L_CURLY => named_field_def_list(p), - L_PAREN if kind == STRUCT_KW => { + T!['{'] => named_field_def_list(p), + T!['('] if kind == T![struct] => { pos_field_def_list(p); // test tuple_struct_where // struct Test(T) where T: Clone; // struct Test(T); type_params::opt_where_clause(p); - p.expect(SEMI); + p.expect(T![;]); } - _ if kind == STRUCT_KW => { + _ if kind == T![struct] => { p.error("expected `;`, `{`, or `(`"); } _ => { @@ -43,12 +43,12 @@ pub(super) fn struct_def(p: &mut Parser, m: Marker, kind: SyntaxKind) { } pub(super) fn enum_def(p: &mut Parser, m: Marker) { - assert!(p.at(ENUM_KW)); + assert!(p.at(T![enum])); p.bump(); name_r(p, ITEM_RECOVERY_SET); type_params::opt_type_param_list(p); type_params::opt_where_clause(p); - if p.at(L_CURLY) { + if p.at(T!['{']) { enum_variant_list(p); } else { p.error("expected `{`") @@ -57,11 +57,11 @@ pub(super) fn enum_def(p: &mut Parser, m: Marker) { } pub(crate) fn enum_variant_list(p: &mut Parser) { - assert!(p.at(L_CURLY)); + assert!(p.at(T!['{'])); let m = p.start(); p.bump(); - while !p.at(EOF) && !p.at(R_CURLY) { - if p.at(L_CURLY) { + while !p.at(EOF) && !p.at(T!['}']) { + if p.at(T!['{']) { error_block(p, "expected enum variant"); continue; } @@ -70,9 +70,9 @@ pub(crate) fn enum_variant_list(p: &mut Parser) { if p.at(IDENT) { name(p); match p.current() { - L_CURLY => named_field_def_list(p), - L_PAREN => pos_field_def_list(p), - EQ => { + T!['{'] => named_field_def_list(p), + T!['('] => pos_field_def_list(p), + T![=] => { p.bump(); expressions::expr(p); } @@ -83,29 +83,29 @@ pub(crate) fn enum_variant_list(p: &mut Parser) { var.abandon(p); p.err_and_bump("expected enum variant"); } - if !p.at(R_CURLY) { - p.expect(COMMA); + if !p.at(T!['}']) { + p.expect(T![,]); } } - p.expect(R_CURLY); + p.expect(T!['}']); m.complete(p, ENUM_VARIANT_LIST); } pub(crate) fn named_field_def_list(p: &mut Parser) { - assert!(p.at(L_CURLY)); + assert!(p.at(T!['{'])); let m = p.start(); p.bump(); - while !p.at(R_CURLY) && !p.at(EOF) { - if p.at(L_CURLY) { + while !p.at(T!['}']) && !p.at(EOF) { + if p.at(T!['{']) { error_block(p, "expected field"); continue; } named_field_def(p); - if !p.at(R_CURLY) { - p.expect(COMMA); + if !p.at(T!['}']) { + p.expect(T![,]); } } - p.expect(R_CURLY); + p.expect(T!['}']); m.complete(p, NAMED_FIELD_DEF_LIST); fn named_field_def(p: &mut Parser) { @@ -119,7 +119,7 @@ pub(crate) fn named_field_def_list(p: &mut Parser) { opt_visibility(p); if p.at(IDENT) { name(p); - p.expect(COLON); + p.expect(T![:]); types::type_(p); m.complete(p, NAMED_FIELD_DEF); } else { @@ -130,12 +130,12 @@ pub(crate) fn named_field_def_list(p: &mut Parser) { } fn pos_field_def_list(p: &mut Parser) { - assert!(p.at(L_PAREN)); + assert!(p.at(T!['('])); let m = p.start(); - if !p.expect(L_PAREN) { + if !p.expect(T!['(']) { return; } - while !p.at(R_PAREN) && !p.at(EOF) { + while !p.at(T![')']) && !p.at(EOF) { let m = p.start(); // test pos_field_attrs // struct S ( @@ -156,10 +156,10 @@ fn pos_field_def_list(p: &mut Parser) { types::type_(p); m.complete(p, POS_FIELD_DEF); - if !p.at(R_PAREN) { - p.expect(COMMA); + if !p.at(T![')']) { + p.expect(T![,]); } } - p.expect(R_PAREN); + p.expect(T![')']); m.complete(p, POS_FIELD_DEF_LIST); } diff --git a/crates/ra_parser/src/grammar/items/traits.rs b/crates/ra_parser/src/grammar/items/traits.rs index d03a6be0d..09ab3bfd4 100644 --- a/crates/ra_parser/src/grammar/items/traits.rs +++ b/crates/ra_parser/src/grammar/items/traits.rs @@ -4,15 +4,15 @@ use super::*; // trait T: Hash + Clone where U: Copy {} // trait X: Hash + Clone where U: Copy {} pub(super) fn trait_def(p: &mut Parser) { - assert!(p.at(TRAIT_KW)); + assert!(p.at(T![trait])); p.bump(); name_r(p, ITEM_RECOVERY_SET); type_params::opt_type_param_list(p); - if p.at(COLON) { + if p.at(T![:]) { type_params::bounds(p); } type_params::opt_where_clause(p); - if p.at(L_CURLY) { + if p.at(T!['{']) { trait_item_list(p); } else { p.error("expected `{`"); @@ -27,24 +27,24 @@ pub(super) fn trait_def(p: &mut Parser) { // fn bar(&self); // } pub(crate) fn trait_item_list(p: &mut Parser) { - assert!(p.at(L_CURLY)); + assert!(p.at(T!['{'])); let m = p.start(); p.bump(); - while !p.at(EOF) && !p.at(R_CURLY) { - if p.at(L_CURLY) { + while !p.at(EOF) && !p.at(T!['}']) { + if p.at(T!['{']) { error_block(p, "expected an item"); continue; } item_or_macro(p, true, ItemFlavor::Trait); } - p.expect(R_CURLY); + p.expect(T!['}']); m.complete(p, ITEM_LIST); } // test impl_block // impl Foo {} pub(super) fn impl_block(p: &mut Parser) { - assert!(p.at(IMPL_KW)); + assert!(p.at(T![impl ])); p.bump(); if choose_type_params_over_qpath(p) { type_params::opt_type_param_list(p); @@ -55,13 +55,13 @@ pub(super) fn impl_block(p: &mut Parser) { // test impl_block_neg // impl !Send for X {} - p.eat(EXCL); + p.eat(T![!]); impl_type(p); - if p.eat(FOR_KW) { + if p.eat(T![for]) { impl_type(p); } type_params::opt_where_clause(p); - if p.at(L_CURLY) { + if p.at(T!['{']) { impl_item_list(p); } else { p.error("expected `{`"); @@ -76,7 +76,7 @@ pub(super) fn impl_block(p: &mut Parser) { // fn bar(&self) {} // } pub(crate) fn impl_item_list(p: &mut Parser) { - assert!(p.at(L_CURLY)); + assert!(p.at(T!['{'])); let m = p.start(); p.bump(); // test impl_inner_attributes @@ -87,14 +87,14 @@ pub(crate) fn impl_item_list(p: &mut Parser) { // } attributes::inner_attributes(p); - while !p.at(EOF) && !p.at(R_CURLY) { - if p.at(L_CURLY) { + while !p.at(EOF) && !p.at(T!['}']) { + if p.at(T!['{']) { error_block(p, "expected an item"); continue; } item_or_macro(p, true, ItemFlavor::Mod); } - p.expect(R_CURLY); + p.expect(T!['}']); m.complete(p, ITEM_LIST); } @@ -114,14 +114,14 @@ fn choose_type_params_over_qpath(p: &Parser) -> bool { // we disambiguate it in favor of generics (`impl ::absolute::Path { ... }`) // because this is what almost always expected in practice, qualified paths in impls // (`impl ::AssocTy { ... }`) aren't even allowed by type checker at the moment. - if !p.at(L_ANGLE) { + if !p.at(T![<]) { return false; } - if p.nth(1) == POUND || p.nth(1) == R_ANGLE { + if p.nth(1) == T![#] || p.nth(1) == T![>] { return true; } (p.nth(1) == LIFETIME || p.nth(1) == IDENT) - && (p.nth(2) == R_ANGLE || p.nth(2) == COMMA || p.nth(2) == COLON || p.nth(2) == EQ) + && (p.nth(2) == T![>] || p.nth(2) == T![,] || p.nth(2) == T![:] || p.nth(2) == T![=]) } // test_err impl_type @@ -130,7 +130,7 @@ fn choose_type_params_over_qpath(p: &Parser) -> bool { // impl impl NotType {} // impl Trait2 for impl NotType {} pub(crate) fn impl_type(p: &mut Parser) { - if p.at(IMPL_KW) { + if p.at(T![impl ]) { p.error("expected trait or type"); return; } diff --git a/crates/ra_parser/src/grammar/items/use_item.rs b/crates/ra_parser/src/grammar/items/use_item.rs index 908493789..c3a0b4410 100644 --- a/crates/ra_parser/src/grammar/items/use_item.rs +++ b/crates/ra_parser/src/grammar/items/use_item.rs @@ -1,10 +1,10 @@ use super::*; pub(super) fn use_item(p: &mut Parser, m: Marker) { - assert!(p.at(USE_KW)); + assert!(p.at(T![use])); p.bump(); use_tree(p); - p.expect(SEMI); + p.expect(T![;]); m.complete(p, USE_ITEM); } @@ -28,8 +28,8 @@ fn use_tree(p: &mut Parser) { // use ::*; // use some::path::{*}; // use some::path::{::*}; - (STAR, _) => p.bump(), - (COLONCOLON, STAR) => { + (T![*], _) => p.bump(), + (T![::], T![*]) => { // Parse `use ::*;`, which imports all from the crate root in Rust 2015 // This is invalid inside a use_tree_list, (e.g. `use some::path::{::*}`) // but still parses and errors later: ('crate root in paths can only be used in start position') @@ -47,8 +47,8 @@ fn use_tree(p: &mut Parser) { // use {path::from::root}; // Rust 2015 // use ::{some::arbritrary::path}; // Rust 2015 // use ::{{{crate::export}}}; // Nonsensical but perfectly legal nestnig - (L_CURLY, _) | (COLONCOLON, L_CURLY) => { - if p.at(COLONCOLON) { + (T!['{'], _) | (T![::], T!['{']) => { + if p.at(T![::]) { p.bump(); } use_tree_list(p); @@ -68,7 +68,7 @@ fn use_tree(p: &mut Parser) { _ if paths::is_path_start(p) => { paths::use_path(p); match p.current() { - AS_KW => { + T![as] => { // test use_alias // use some::path as some_name; // use some::{ @@ -80,16 +80,16 @@ fn use_tree(p: &mut Parser) { // use Trait as _; opt_alias(p); } - COLONCOLON => { + T![::] => { p.bump(); match p.current() { - STAR => { + T![*] => { p.bump(); } // test use_tree_list_after_path // use crate::{Item}; // use self::{Item}; - L_CURLY => use_tree_list(p), + T!['{'] => use_tree_list(p), _ => { // is this unreachable? p.error("expected `{` or `*`"); @@ -109,15 +109,15 @@ fn use_tree(p: &mut Parser) { } pub(crate) fn use_tree_list(p: &mut Parser) { - assert!(p.at(L_CURLY)); + assert!(p.at(T!['{'])); let m = p.start(); p.bump(); - while !p.at(EOF) && !p.at(R_CURLY) { + while !p.at(EOF) && !p.at(T!['}']) { use_tree(p); - if !p.at(R_CURLY) { - p.expect(COMMA); + if !p.at(T!['}']) { + p.expect(T![,]); } } - p.expect(R_CURLY); + p.expect(T!['}']); m.complete(p, USE_TREE_LIST); } diff --git a/crates/ra_parser/src/grammar/params.rs b/crates/ra_parser/src/grammar/params.rs index 3d3bd4cc1..723b56343 100644 --- a/crates/ra_parser/src/grammar/params.rs +++ b/crates/ra_parser/src/grammar/params.rs @@ -36,27 +36,27 @@ impl Flavor { } fn list_(p: &mut Parser, flavor: Flavor) { - let (bra, ket) = if flavor.type_required() { (L_PAREN, R_PAREN) } else { (PIPE, PIPE) }; + let (bra, ket) = if flavor.type_required() { (T!['('], T![')']) } else { (T![|], T![|]) }; assert!(p.at(bra)); let m = p.start(); p.bump(); if flavor.type_required() { opt_self_param(p); } - while !p.at(EOF) && !p.at(ket) && !(flavor.type_required() && p.at(DOTDOTDOT)) { + while !p.at(EOF) && !p.at(ket) && !(flavor.type_required() && p.at(T![...])) { if !p.at_ts(VALUE_PARAMETER_FIRST) { p.error("expected value parameter"); break; } value_parameter(p, flavor); if !p.at(ket) { - p.expect(COMMA); + p.expect(T![,]); } } // test param_list_vararg // extern "C" { fn printf(format: *const i8, ...) -> i32; } if flavor.type_required() { - p.eat(DOTDOTDOT); + p.eat(T![...]); } p.expect(ket); m.complete(p, PARAM_LIST); @@ -69,7 +69,7 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) { match flavor { Flavor::OptionalType | Flavor::Normal => { patterns::pattern(p); - if p.at(COLON) || flavor.type_required() { + if p.at(T![:]) || flavor.type_required() { types::ascription(p) } } @@ -85,10 +85,10 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) { // trait Foo { // fn bar(_: u64, mut x: i32); // } - if (la0 == IDENT || la0 == UNDERSCORE) && la1 == COLON - || la0 == MUT_KW && la1 == IDENT && la2 == COLON - || la0 == AMP && la1 == IDENT && la2 == COLON - || la0 == AMP && la1 == MUT_KW && la2 == IDENT && la3 == COLON + if (la0 == IDENT || la0 == T![_]) && la1 == T![:] + || la0 == T![mut] && la1 == IDENT && la2 == T![:] + || la0 == T![&] && la1 == IDENT && la2 == T![:] + || la0 == T![&] && la1 == T![mut] && la2 == IDENT && la3 == T![:] { patterns::pattern(p); types::ascription(p); @@ -110,16 +110,16 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) { // } fn opt_self_param(p: &mut Parser) { let m; - if p.at(SELF_KW) || p.at(MUT_KW) && p.nth(1) == SELF_KW { + if p.at(T![self]) || p.at(T![mut]) && p.nth(1) == T![self] { m = p.start(); - p.eat(MUT_KW); - p.eat(SELF_KW); + p.eat(T![mut]); + p.eat(T![self]); // test arb_self_types // impl S { // fn a(self: &Self) {} // fn b(mut self: Box) {} // } - if p.at(COLON) { + if p.at(T![:]) { types::ascription(p); } } else { @@ -127,10 +127,10 @@ fn opt_self_param(p: &mut Parser) { let la2 = p.nth(2); let la3 = p.nth(3); let n_toks = match (p.current(), la1, la2, la3) { - (AMP, SELF_KW, _, _) => 2, - (AMP, MUT_KW, SELF_KW, _) => 3, - (AMP, LIFETIME, SELF_KW, _) => 3, - (AMP, LIFETIME, MUT_KW, SELF_KW) => 4, + (T![&], T![self], _, _) => 2, + (T![&], T![mut], T![self], _) => 3, + (T![&], LIFETIME, T![self], _) => 3, + (T![&], LIFETIME, T![mut], T![self]) => 4, _ => return, }; m = p.start(); @@ -139,7 +139,7 @@ fn opt_self_param(p: &mut Parser) { } } m.complete(p, SELF_PARAM); - if !p.at(R_PAREN) { - p.expect(COMMA); + if !p.at(T![')']) { + p.expect(T![,]); } } diff --git a/crates/ra_parser/src/grammar/paths.rs b/crates/ra_parser/src/grammar/paths.rs index 33a11886c..3537b0da1 100644 --- a/crates/ra_parser/src/grammar/paths.rs +++ b/crates/ra_parser/src/grammar/paths.rs @@ -5,7 +5,7 @@ pub(super) const PATH_FIRST: TokenSet = pub(super) fn is_path_start(p: &Parser) -> bool { match p.current() { - IDENT | SELF_KW | SUPER_KW | CRATE_KW | COLONCOLON => true, + IDENT | T![self] | T![super] | T![crate] | T![::] => true, _ => false, } } @@ -35,10 +35,10 @@ fn path(p: &mut Parser, mode: Mode) { let mut qual = path.complete(p, PATH); loop { let use_tree = match p.nth(1) { - STAR | L_CURLY => true, + T![*] | T!['{'] => true, _ => false, }; - if p.at(COLONCOLON) && !use_tree { + if p.at(T![::]) && !use_tree { let path = qual.precede(p); p.bump(); path_segment(p, mode, false); @@ -55,19 +55,19 @@ fn path_segment(p: &mut Parser, mode: Mode, first: bool) { // test qual_paths // type X = ::Output; // fn foo() { ::default(); } - if first && p.eat(L_ANGLE) { + if first && p.eat(T![<]) { types::type_(p); - if p.eat(AS_KW) { + if p.eat(T![as]) { if is_path_start(p) { types::path_type(p); } else { p.error("expected a trait"); } } - p.expect(R_ANGLE); + p.expect(T![>]); } else { if first { - p.eat(COLONCOLON); + p.eat(T![::]); } match p.current() { IDENT => { @@ -76,7 +76,7 @@ fn path_segment(p: &mut Parser, mode: Mode, first: bool) { } // test crate_path // use crate::foo; - SELF_KW | SUPER_KW | CRATE_KW => p.bump(), + T![self] | T![super] | T![crate] => p.bump(), _ => { p.err_recover("expected identifier", items::ITEM_RECOVERY_SET); } @@ -91,7 +91,7 @@ fn opt_path_type_args(p: &mut Parser, mode: Mode) { Mode::Type => { // test path_fn_trait_args // type F = Box ()>; - if p.at(L_PAREN) { + if p.at(T!['(']) { params::param_list_opt_patterns(p); opt_fn_ret_type(p); } else { diff --git a/crates/ra_parser/src/grammar/patterns.rs b/crates/ra_parser/src/grammar/patterns.rs index 12dd22bde..16ae9da63 100644 --- a/crates/ra_parser/src/grammar/patterns.rs +++ b/crates/ra_parser/src/grammar/patterns.rs @@ -16,10 +16,10 @@ pub(super) fn pattern_list(p: &mut Parser) { /// Parses a pattern list separated by pipes `|` /// using the given `recovery_set` pub(super) fn pattern_list_r(p: &mut Parser, recovery_set: TokenSet) { - p.eat(PIPE); + p.eat(T![|]); pattern_r(p, recovery_set); - while p.eat(PIPE) { + while p.eat(T![|]) { pattern_r(p, recovery_set); } } @@ -34,7 +34,7 @@ pub(super) fn pattern_r(p: &mut Parser, recovery_set: TokenSet) { // 200 .. 301=> (), // } // } - if p.at(DOTDOTDOT) || p.at(DOTDOTEQ) || p.at(DOTDOT) { + if p.at(T![...]) || p.at(T![..=]) || p.at(T![..]) { let m = lhs.precede(p); p.bump(); atom_pat(p, recovery_set); @@ -44,7 +44,7 @@ pub(super) fn pattern_r(p: &mut Parser, recovery_set: TokenSet) { // fn main() { // let m!(x) = 0; // } - else if lhs.kind() == PATH_PAT && p.at(EXCL) { + else if lhs.kind() == PATH_PAT && p.at(T![!]) { let m = lhs.precede(p); items::macro_call_after_excl(p); m.complete(p, MACRO_CALL); @@ -58,9 +58,9 @@ const PAT_RECOVERY_SET: TokenSet = fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option { let la0 = p.nth(0); let la1 = p.nth(1); - if la0 == REF_KW - || la0 == MUT_KW - || (la0 == IDENT && !(la1 == COLONCOLON || la1 == L_PAREN || la1 == L_CURLY || la1 == EXCL)) + if la0 == T![ref] + || la0 == T![mut] + || (la0 == IDENT && !(la1 == T![::] || la1 == T!['('] || la1 == T!['{'] || la1 == T![!])) { return Some(bind_pat(p, true)); } @@ -73,10 +73,10 @@ fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option { } let m = match la0 { - UNDERSCORE => placeholder_pat(p), - AMP => ref_pat(p), - L_PAREN => tuple_pat(p), - L_BRACK => slice_pat(p), + T![_] => placeholder_pat(p), + T![&] => ref_pat(p), + T!['('] => tuple_pat(p), + T!['['] => slice_pat(p), _ => { p.err_recover("expected pattern", recovery_set); return None; @@ -86,7 +86,7 @@ fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option { } fn is_literal_pat_start(p: &mut Parser) -> bool { - p.at(MINUS) && (p.nth(1) == INT_NUMBER || p.nth(1) == FLOAT_NUMBER) + p.at(T![-]) && (p.nth(1) == INT_NUMBER || p.nth(1) == FLOAT_NUMBER) || p.at_ts(expressions::LITERAL_FIRST) } @@ -102,7 +102,7 @@ fn is_literal_pat_start(p: &mut Parser) -> bool { fn literal_pat(p: &mut Parser) -> CompletedMarker { assert!(is_literal_pat_start(p)); let m = p.start(); - if p.at(MINUS) { + if p.at(T![-]) { p.bump(); } expressions::literal(p); @@ -121,11 +121,11 @@ fn path_pat(p: &mut Parser) -> CompletedMarker { let m = p.start(); paths::expr_path(p); let kind = match p.current() { - L_PAREN => { + T!['('] => { tuple_pat_fields(p); TUPLE_STRUCT_PAT } - L_CURLY => { + T!['{'] => { field_pat_list(p); STRUCT_PAT } @@ -142,10 +142,10 @@ fn path_pat(p: &mut Parser) -> CompletedMarker { // let S(_, .. , x) = (); // } fn tuple_pat_fields(p: &mut Parser) { - assert!(p.at(L_PAREN)); + assert!(p.at(T!['('])); p.bump(); - pat_list(p, R_PAREN); - p.expect(R_PAREN); + pat_list(p, T![')']); + p.expect(T![')']); } // test field_pat_list @@ -156,29 +156,29 @@ fn tuple_pat_fields(p: &mut Parser) { // let S { h: _, } = (); // } fn field_pat_list(p: &mut Parser) { - assert!(p.at(L_CURLY)); + assert!(p.at(T!['{'])); let m = p.start(); p.bump(); - while !p.at(EOF) && !p.at(R_CURLY) { + while !p.at(EOF) && !p.at(T!['}']) { match p.current() { - DOTDOT => p.bump(), - IDENT if p.nth(1) == COLON => field_pat(p), - L_CURLY => error_block(p, "expected ident"), + T![..] => p.bump(), + IDENT if p.nth(1) == T![:] => field_pat(p), + T!['{'] => error_block(p, "expected ident"), _ => { bind_pat(p, false); } } - if !p.at(R_CURLY) { - p.expect(COMMA); + if !p.at(T!['}']) { + p.expect(T![,]); } } - p.expect(R_CURLY); + p.expect(T!['}']); m.complete(p, FIELD_PAT_LIST); } fn field_pat(p: &mut Parser) { assert!(p.at(IDENT)); - assert!(p.nth(1) == COLON); + assert!(p.nth(1) == T![:]); let m = p.start(); name(p); @@ -190,7 +190,7 @@ fn field_pat(p: &mut Parser) { // test placeholder_pat // fn main() { let _ = (); } fn placeholder_pat(p: &mut Parser) -> CompletedMarker { - assert!(p.at(UNDERSCORE)); + assert!(p.at(T![_])); let m = p.start(); p.bump(); m.complete(p, PLACEHOLDER_PAT) @@ -202,10 +202,10 @@ fn placeholder_pat(p: &mut Parser) -> CompletedMarker { // let &mut b = (); // } fn ref_pat(p: &mut Parser) -> CompletedMarker { - assert!(p.at(AMP)); + assert!(p.at(T![&])); let m = p.start(); p.bump(); - p.eat(MUT_KW); + p.eat(T![mut]); pattern(p); m.complete(p, REF_PAT) } @@ -215,7 +215,7 @@ fn ref_pat(p: &mut Parser) -> CompletedMarker { // let (a, b, ..) = (); // } fn tuple_pat(p: &mut Parser) -> CompletedMarker { - assert!(p.at(L_PAREN)); + assert!(p.at(T!['('])); let m = p.start(); tuple_pat_fields(p); m.complete(p, TUPLE_PAT) @@ -226,18 +226,18 @@ fn tuple_pat(p: &mut Parser) -> CompletedMarker { // let [a, b, ..] = []; // } fn slice_pat(p: &mut Parser) -> CompletedMarker { - assert!(p.at(L_BRACK)); + assert!(p.at(T!['['])); let m = p.start(); p.bump(); - pat_list(p, R_BRACK); - p.expect(R_BRACK); + pat_list(p, T![']']); + p.expect(T![']']); m.complete(p, SLICE_PAT) } fn pat_list(p: &mut Parser, ket: SyntaxKind) { while !p.at(EOF) && !p.at(ket) { match p.current() { - DOTDOT => p.bump(), + T![..] => p.bump(), _ => { if !p.at_ts(PATTERN_FIRST) { p.error("expected a pattern"); @@ -247,7 +247,7 @@ fn pat_list(p: &mut Parser, ket: SyntaxKind) { } } if !p.at(ket) { - p.expect(COMMA); + p.expect(T![,]); } } } @@ -263,10 +263,10 @@ fn pat_list(p: &mut Parser, ket: SyntaxKind) { // } fn bind_pat(p: &mut Parser, with_at: bool) -> CompletedMarker { let m = p.start(); - p.eat(REF_KW); - p.eat(MUT_KW); + p.eat(T![ref]); + p.eat(T![mut]); name(p); - if with_at && p.eat(AT) { + if with_at && p.eat(T![@]) { pattern(p); } m.complete(p, BIND_PAT) diff --git a/crates/ra_parser/src/grammar/type_args.rs b/crates/ra_parser/src/grammar/type_args.rs index 684976b99..f391b63db 100644 --- a/crates/ra_parser/src/grammar/type_args.rs +++ b/crates/ra_parser/src/grammar/type_args.rs @@ -3,26 +3,26 @@ use super::*; pub(super) fn opt_type_arg_list(p: &mut Parser, colon_colon_required: bool) { let m; match (colon_colon_required, p.nth(0), p.nth(1)) { - (_, COLONCOLON, L_ANGLE) => { + (_, T![::], T![<]) => { m = p.start(); p.bump(); p.bump(); } - (false, L_ANGLE, EQ) => return, - (false, L_ANGLE, _) => { + (false, T![<], T![=]) => return, + (false, T![<], _) => { m = p.start(); p.bump(); } _ => return, }; - while !p.at(EOF) && !p.at(R_ANGLE) { + while !p.at(EOF) && !p.at(T![>]) { type_arg(p); - if !p.at(R_ANGLE) && !p.expect(COMMA) { + if !p.at(T![>]) && !p.expect(T![,]) { break; } } - p.expect(R_ANGLE); + p.expect(T![>]); m.complete(p, TYPE_ARG_LIST); } @@ -35,7 +35,7 @@ fn type_arg(p: &mut Parser) { p.bump(); m.complete(p, LIFETIME_ARG); } - IDENT if p.nth(1) == EQ => { + IDENT if p.nth(1) == T![=] => { name_ref(p); p.bump(); types::type_(p); diff --git a/crates/ra_parser/src/grammar/type_params.rs b/crates/ra_parser/src/grammar/type_params.rs index 07d9b0792..4bbfed780 100644 --- a/crates/ra_parser/src/grammar/type_params.rs +++ b/crates/ra_parser/src/grammar/type_params.rs @@ -1,18 +1,18 @@ use super::*; pub(super) fn opt_type_param_list(p: &mut Parser) { - if !p.at(L_ANGLE) { + if !p.at(T![<]) { return; } type_param_list(p); } fn type_param_list(p: &mut Parser) { - assert!(p.at(L_ANGLE)); + assert!(p.at(T![<])); let m = p.start(); p.bump(); - while !p.at(EOF) && !p.at(R_ANGLE) { + while !p.at(EOF) && !p.at(T![>]) { let m = p.start(); // test generic_lifetime_type_attribute @@ -28,18 +28,18 @@ fn type_param_list(p: &mut Parser) { p.err_and_bump("expected type parameter") } } - if !p.at(R_ANGLE) && !p.expect(COMMA) { + if !p.at(T![>]) && !p.expect(T![,]) { break; } } - p.expect(R_ANGLE); + p.expect(T![>]); m.complete(p, TYPE_PARAM_LIST); } fn lifetime_param(p: &mut Parser, m: Marker) { assert!(p.at(LIFETIME)); p.bump(); - if p.at(COLON) { + if p.at(T![:]) { lifetime_bounds(p); } m.complete(p, LIFETIME_PARAM); @@ -48,12 +48,12 @@ fn lifetime_param(p: &mut Parser, m: Marker) { fn type_param(p: &mut Parser, m: Marker) { assert!(p.at(IDENT)); name(p); - if p.at(COLON) { + if p.at(T![:]) { bounds(p); } // test type_param_default // struct S; - if p.at(EQ) { + if p.at(T![=]) { p.bump(); types::type_(p) } @@ -63,17 +63,17 @@ fn type_param(p: &mut Parser, m: Marker) { // test type_param_bounds // struct S; pub(super) fn bounds(p: &mut Parser) { - assert!(p.at(COLON)); + assert!(p.at(T![:])); p.bump(); bounds_without_colon(p); } fn lifetime_bounds(p: &mut Parser) { - assert!(p.at(COLON)); + assert!(p.at(T![:])); p.bump(); while p.at(LIFETIME) { p.bump(); - if !p.eat(PLUS) { + if !p.eat(T![+]) { break; } } @@ -81,7 +81,7 @@ fn lifetime_bounds(p: &mut Parser) { pub(super) fn bounds_without_colon_m(p: &mut Parser, marker: Marker) -> CompletedMarker { while type_bound(p) { - if !p.eat(PLUS) { + if !p.eat(T![+]) { break; } } @@ -96,11 +96,11 @@ pub(super) fn bounds_without_colon(p: &mut Parser) { fn type_bound(p: &mut Parser) -> bool { let m = p.start(); - let has_paren = p.eat(L_PAREN); - p.eat(QUESTION); + let has_paren = p.eat(T!['(']); + p.eat(T![?]); match p.current() { LIFETIME => p.bump(), - FOR_KW => types::for_type(p), + T![for] => types::for_type(p), _ if paths::is_path_start(p) => types::path_type_(p, false), _ => { m.abandon(p); @@ -108,7 +108,7 @@ fn type_bound(p: &mut Parser) -> bool { } } if has_paren { - p.expect(R_PAREN); + p.expect(T![')']); } m.complete(p, TYPE_BOUND); @@ -124,7 +124,7 @@ fn type_bound(p: &mut Parser) -> bool { // ::Item: 'a // {} pub(super) fn opt_where_clause(p: &mut Parser) { - if !p.at(WHERE_KW) { + if !p.at(T![where]) { return; } let m = p.start(); @@ -133,7 +133,7 @@ pub(super) fn opt_where_clause(p: &mut Parser) { while is_where_predicate(p) { where_predicate(p); - let comma = p.eat(COMMA); + let comma = p.eat(T![,]); if is_where_clause_end(p) { break; @@ -150,13 +150,13 @@ pub(super) fn opt_where_clause(p: &mut Parser) { fn is_where_predicate(p: &mut Parser) -> bool { match p.current() { LIFETIME => true, - IMPL_KW => false, + T![impl ] => false, token => types::TYPE_FIRST.contains(token), } } fn is_where_clause_end(p: &mut Parser) -> bool { - p.current() == L_CURLY || p.current() == SEMI || p.current() == EQ + p.current() == T!['{'] || p.current() == T![;] || p.current() == T![=] } fn where_predicate(p: &mut Parser) { @@ -164,13 +164,13 @@ fn where_predicate(p: &mut Parser) { match p.current() { LIFETIME => { p.bump(); - if p.at(COLON) { + if p.at(T![:]) { bounds(p); } else { p.error("expected colon"); } } - IMPL_KW => { + T![impl ] => { p.error("expected lifetime or type"); } _ => { @@ -181,7 +181,7 @@ fn where_predicate(p: &mut Parser) { // { } types::type_(p); - if p.at(COLON) { + if p.at(T![:]) { bounds(p); } else { p.error("expected colon"); diff --git a/crates/ra_parser/src/grammar/types.rs b/crates/ra_parser/src/grammar/types.rs index 686c80f3c..438e3ab0e 100644 --- a/crates/ra_parser/src/grammar/types.rs +++ b/crates/ra_parser/src/grammar/types.rs @@ -17,18 +17,18 @@ pub(super) fn type_no_bounds(p: &mut Parser) { fn type_with_bounds_cond(p: &mut Parser, allow_bounds: bool) { match p.current() { - L_PAREN => paren_or_tuple_type(p), - EXCL => never_type(p), - STAR => pointer_type(p), - L_BRACK => array_or_slice_type(p), - AMP => reference_type(p), - UNDERSCORE => placeholder_type(p), - FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p), - FOR_KW => for_type(p), - IMPL_KW => impl_trait_type(p), - DYN_KW => dyn_trait_type(p), + T!['('] => paren_or_tuple_type(p), + T![!] => never_type(p), + T![*] => pointer_type(p), + T!['['] => array_or_slice_type(p), + T![&] => reference_type(p), + T![_] => placeholder_type(p), + T![fn] | T![unsafe] | T![extern] => fn_pointer_type(p), + T![for] => for_type(p), + T![impl ] => impl_trait_type(p), + T![dyn ] => dyn_trait_type(p), // Some path types are not allowed to have bounds (no plus) - L_ANGLE => path_type_(p, allow_bounds), + T![<] => path_type_(p, allow_bounds), _ if paths::is_path_start(p) => path_or_macro_type_(p, allow_bounds), _ => { p.err_recover("expected type", TYPE_RECOVERY_SET); @@ -37,27 +37,27 @@ fn type_with_bounds_cond(p: &mut Parser, allow_bounds: bool) { } pub(super) fn ascription(p: &mut Parser) { - p.expect(COLON); + p.expect(T![:]); type_(p) } fn paren_or_tuple_type(p: &mut Parser) { - assert!(p.at(L_PAREN)); + assert!(p.at(T!['('])); let m = p.start(); p.bump(); let mut n_types: u32 = 0; let mut trailing_comma: bool = false; - while !p.at(EOF) && !p.at(R_PAREN) { + while !p.at(EOF) && !p.at(T![')']) { n_types += 1; type_(p); - if p.eat(COMMA) { + if p.eat(T![,]) { trailing_comma = true; } else { trailing_comma = false; break; } } - p.expect(R_PAREN); + p.expect(T![')']); let kind = if n_types == 1 && !trailing_comma { // test paren_type @@ -77,14 +77,14 @@ fn paren_or_tuple_type(p: &mut Parser) { // test never_type // type Never = !; fn never_type(p: &mut Parser) { - assert!(p.at(EXCL)); + assert!(p.at(T![!])); let m = p.start(); p.bump(); m.complete(p, NEVER_TYPE); } fn pointer_type(p: &mut Parser) { - assert!(p.at(STAR)); + assert!(p.at(T![*])); let m = p.start(); p.bump(); @@ -92,7 +92,7 @@ fn pointer_type(p: &mut Parser) { // test pointer_type_mut // type M = *mut (); // type C = *mut (); - MUT_KW | CONST_KW => p.bump(), + T![mut] | T![const] => p.bump(), _ => { // test_err pointer_type_no_mutability // type T = *(); @@ -108,7 +108,7 @@ fn pointer_type(p: &mut Parser) { } fn array_or_slice_type(p: &mut Parser) { - assert!(p.at(L_BRACK)); + assert!(p.at(T!['['])); let m = p.start(); p.bump(); @@ -116,17 +116,17 @@ fn array_or_slice_type(p: &mut Parser) { let kind = match p.current() { // test slice_type // type T = [()]; - R_BRACK => { + T![']'] => { p.bump(); SLICE_TYPE } // test array_type // type T = [(); 92]; - SEMI => { + T![;] => { p.bump(); expressions::expr(p); - p.expect(R_BRACK); + p.expect(T![']']); ARRAY_TYPE } // test_err array_type_missing_semi @@ -144,11 +144,11 @@ fn array_or_slice_type(p: &mut Parser) { // type B = &'static (); // type C = &mut (); fn reference_type(p: &mut Parser) { - assert!(p.at(AMP)); + assert!(p.at(T![&])); let m = p.start(); p.bump(); p.eat(LIFETIME); - p.eat(MUT_KW); + p.eat(T![mut]); type_no_bounds(p); m.complete(p, REFERENCE_TYPE); } @@ -156,7 +156,7 @@ fn reference_type(p: &mut Parser) { // test placeholder_type // type Placeholder = _; fn placeholder_type(p: &mut Parser) { - assert!(p.at(UNDERSCORE)); + assert!(p.at(T![_])); let m = p.start(); p.bump(); m.complete(p, PLACEHOLDER_TYPE); @@ -169,18 +169,18 @@ fn placeholder_type(p: &mut Parser) { // type D = extern "C" fn ( u8 , ... ) -> u8; fn fn_pointer_type(p: &mut Parser) { let m = p.start(); - p.eat(UNSAFE_KW); - if p.at(EXTERN_KW) { + p.eat(T![unsafe]); + if p.at(T![extern]) { abi(p); } // test_err fn_pointer_type_missing_fn // type F = unsafe (); - if !p.eat(FN_KW) { + if !p.eat(T![fn]) { m.abandon(p); p.error("expected `fn`"); return; } - if p.at(L_PAREN) { + if p.at(T!['(']) { params::param_list_opt_patterns(p); } else { p.error("expected parameters") @@ -192,9 +192,9 @@ fn fn_pointer_type(p: &mut Parser) { } pub(super) fn for_binder(p: &mut Parser) { - assert!(p.at(FOR_KW)); + assert!(p.at(T![for])); p.bump(); - if p.at(L_ANGLE) { + if p.at(T![<]) { type_params::opt_type_param_list(p); } else { p.error("expected `<`"); @@ -206,12 +206,12 @@ pub(super) fn for_binder(p: &mut Parser) { // fn foo(_t: &T) where for<'a> &'a T: Iterator {} // fn bar(_t: &T) where for<'a> &'a mut T: Iterator {} pub(super) fn for_type(p: &mut Parser) { - assert!(p.at(FOR_KW)); + assert!(p.at(T![for])); let m = p.start(); for_binder(p); match p.current() { - FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p), - AMP => reference_type(p), + T![fn] | T![unsafe] | T![extern] => fn_pointer_type(p), + T![&] => reference_type(p), _ if paths::is_path_start(p) => path_type_(p, false), _ => p.error("expected a path"), } @@ -221,7 +221,7 @@ pub(super) fn for_type(p: &mut Parser) { // test impl_trait_type // type A = impl Iterator> + 'a; fn impl_trait_type(p: &mut Parser) { - assert!(p.at(IMPL_KW)); + assert!(p.at(T![impl ])); let m = p.start(); p.bump(); type_params::bounds_without_colon(p); @@ -231,7 +231,7 @@ fn impl_trait_type(p: &mut Parser) { // test dyn_trait_type // type A = dyn Iterator> + 'a; fn dyn_trait_type(p: &mut Parser) { - assert!(p.at(DYN_KW)); + assert!(p.at(T![dyn ])); let m = p.start(); p.bump(); type_params::bounds_without_colon(p); @@ -251,11 +251,11 @@ pub(super) fn path_type(p: &mut Parser) { // type A = foo!(); // type B = crate::foo!(); fn path_or_macro_type_(p: &mut Parser, allow_bounds: bool) { - assert!(paths::is_path_start(p) || p.at(L_ANGLE)); + assert!(paths::is_path_start(p) || p.at(T![<])); let m = p.start(); paths::type_path(p); - let kind = if p.at(EXCL) { + let kind = if p.at(T![!]) { items::macro_call_after_excl(p); MACRO_CALL } else { @@ -270,7 +270,7 @@ fn path_or_macro_type_(p: &mut Parser, allow_bounds: bool) { } pub(super) fn path_type_(p: &mut Parser, allow_bounds: bool) { - assert!(paths::is_path_start(p) || p.at(L_ANGLE)); + assert!(paths::is_path_start(p) || p.at(T![<])); let m = p.start(); paths::type_path(p); @@ -286,7 +286,7 @@ pub(super) fn path_type_(p: &mut Parser, allow_bounds: bool) { /// This turns a parsed PATH_TYPE optionally into a DYN_TRAIT_TYPE /// with a TYPE_BOUND_LIST fn opt_path_type_bounds_as_dyn_trait_type(p: &mut Parser, path_type_marker: CompletedMarker) { - if !p.at(PLUS) { + if !p.at(T![+]) { return; } @@ -298,7 +298,7 @@ fn opt_path_type_bounds_as_dyn_trait_type(p: &mut Parser, path_type_marker: Comp // This gets consumed here so it gets properly set // in the TYPE_BOUND_LIST - p.eat(PLUS); + p.eat(T![+]); // Parse rest of the bounds into the TYPE_BOUND_LIST let m = type_params::bounds_without_colon_m(p, m); diff --git a/crates/ra_parser/src/parser.rs b/crates/ra_parser/src/parser.rs index 8eff930db..4434dfb09 100644 --- a/crates/ra_parser/src/parser.rs +++ b/crates/ra_parser/src/parser.rs @@ -6,6 +6,7 @@ use crate::{ SyntaxKind::{self, ERROR, EOF, TOMBSTONE}, TokenSource, ParseError, TokenSet, event::Event, + T }; /// `Parser` struct provides the low-level API for @@ -155,10 +156,10 @@ impl<'t> Parser<'t> { // Handle parser composites match kind { - DOTDOTDOT | DOTDOTEQ => { + T![...] | T![..=] => { self.bump_compound(kind, 3); } - DOTDOT | COLONCOLON | EQEQ | FAT_ARROW | NEQ | THIN_ARROW => { + T![..] | T![::] | T![==] | T![=>] | T![!=] | T![->] => { self.bump_compound(kind, 2); } _ => { @@ -223,7 +224,7 @@ impl<'t> Parser<'t> { /// Create an error node and consume the next token. pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) { - if self.at(SyntaxKind::L_CURLY) || self.at(SyntaxKind::R_CURLY) || self.at_ts(recovery) { + if self.at(T!['{']) || self.at(T!['}']) || self.at_ts(recovery) { self.error(message); } else { let m = self.start(); @@ -253,19 +254,17 @@ impl<'t> Parser<'t> { let jn2 = self.token_source.is_token_joint_to_next(self.token_pos + n + 1); let la3 = self.token_source.token_kind(self.token_pos + n + 2); - use SyntaxKind::*; - match kind { - DOT if jn1 && la2 == DOT && jn2 && la3 == DOT => Some((DOTDOTDOT, 3)), - DOT if jn1 && la2 == DOT && la3 == EQ => Some((DOTDOTEQ, 3)), - DOT if jn1 && la2 == DOT => Some((DOTDOT, 2)), + T![.] if jn1 && la2 == T![.] && jn2 && la3 == T![.] => Some((T![...], 3)), + T![.] if jn1 && la2 == T![.] && la3 == T![=] => Some((T![..=], 3)), + T![.] if jn1 && la2 == T![.] => Some((T![..], 2)), - COLON if jn1 && la2 == COLON => Some((COLONCOLON, 2)), - EQ if jn1 && la2 == EQ => Some((EQEQ, 2)), - EQ if jn1 && la2 == R_ANGLE => Some((FAT_ARROW, 2)), + T![:] if jn1 && la2 == T![:] => Some((T![::], 2)), + T![=] if jn1 && la2 == T![=] => Some((T![==], 2)), + T![=] if jn1 && la2 == T![>] => Some((T![=>], 2)), - EXCL if la2 == EQ => Some((NEQ, 2)), - MINUS if la2 == R_ANGLE => Some((THIN_ARROW, 2)), + T![!] if la2 == T![=] => Some((T![!=], 2)), + T![-] if la2 == T![>] => Some((T![->], 2)), _ => None, } } diff --git a/crates/ra_syntax/src/ast/expr_extensions.rs b/crates/ra_syntax/src/ast/expr_extensions.rs index 9484c3b9b..17763809d 100644 --- a/crates/ra_syntax/src/ast/expr_extensions.rs +++ b/crates/ra_syntax/src/ast/expr_extensions.rs @@ -3,7 +3,8 @@ use crate::{ SyntaxToken, SyntaxElement, SmolStr, ast::{self, AstNode, AstChildren, children, child_opt}, - SyntaxKind::* + SyntaxKind::*, + T }; #[derive(Debug, Clone, PartialEq, Eq)] @@ -34,7 +35,7 @@ impl ast::IfExpr { impl ast::RefExpr { pub fn is_mut(&self) -> bool { - self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) + self.syntax().children_with_tokens().any(|n| n.kind() == T![mut]) } } @@ -51,9 +52,9 @@ pub enum PrefixOp { impl ast::PrefixExpr { pub fn op_kind(&self) -> Option { match self.op_token()?.kind() { - STAR => Some(PrefixOp::Deref), - EXCL => Some(PrefixOp::Not), - MINUS => Some(PrefixOp::Neg), + T![*] => Some(PrefixOp::Deref), + T![!] => Some(PrefixOp::Not), + T![-] => Some(PrefixOp::Neg), _ => None, } } @@ -133,37 +134,37 @@ impl ast::BinExpr { fn op_details(&self) -> Option<(SyntaxToken, BinOp)> { self.syntax().children_with_tokens().filter_map(|it| it.as_token()).find_map(|c| { match c.kind() { - PIPEPIPE => Some((c, BinOp::BooleanOr)), - AMPAMP => Some((c, BinOp::BooleanAnd)), - EQEQ => Some((c, BinOp::EqualityTest)), - NEQ => Some((c, BinOp::NegatedEqualityTest)), - LTEQ => Some((c, BinOp::LesserEqualTest)), - GTEQ => Some((c, BinOp::GreaterEqualTest)), - L_ANGLE => Some((c, BinOp::LesserTest)), - R_ANGLE => Some((c, BinOp::GreaterTest)), - PLUS => Some((c, BinOp::Addition)), - STAR => Some((c, BinOp::Multiplication)), - MINUS => Some((c, BinOp::Subtraction)), - SLASH => Some((c, BinOp::Division)), - PERCENT => Some((c, BinOp::Remainder)), - SHL => Some((c, BinOp::LeftShift)), - SHR => Some((c, BinOp::RightShift)), - CARET => Some((c, BinOp::BitwiseXor)), - PIPE => Some((c, BinOp::BitwiseOr)), - AMP => Some((c, BinOp::BitwiseAnd)), - DOTDOT => Some((c, BinOp::RangeRightOpen)), - DOTDOTEQ => Some((c, BinOp::RangeRightClosed)), - EQ => Some((c, BinOp::Assignment)), - PLUSEQ => Some((c, BinOp::AddAssign)), - SLASHEQ => Some((c, BinOp::DivAssign)), - STAREQ => Some((c, BinOp::MulAssign)), - PERCENTEQ => Some((c, BinOp::RemAssign)), - SHREQ => Some((c, BinOp::ShrAssign)), - SHLEQ => Some((c, BinOp::ShlAssign)), - MINUSEQ => Some((c, BinOp::SubAssign)), - PIPEEQ => Some((c, BinOp::BitOrAssign)), - AMPEQ => Some((c, BinOp::BitAndAssign)), - CARETEQ => Some((c, BinOp::BitXorAssign)), + T![||] => Some((c, BinOp::BooleanOr)), + T![&&] => Some((c, BinOp::BooleanAnd)), + T![==] => Some((c, BinOp::EqualityTest)), + T![!=] => Some((c, BinOp::NegatedEqualityTest)), + T![<=] => Some((c, BinOp::LesserEqualTest)), + T![>=] => Some((c, BinOp::GreaterEqualTest)), + T![<] => Some((c, BinOp::LesserTest)), + T![>] => Some((c, BinOp::GreaterTest)), + T![+] => Some((c, BinOp::Addition)), + T![*] => Some((c, BinOp::Multiplication)), + T![-] => Some((c, BinOp::Subtraction)), + T![/] => Some((c, BinOp::Division)), + T![%] => Some((c, BinOp::Remainder)), + T![<<] => Some((c, BinOp::LeftShift)), + T![>>] => Some((c, BinOp::RightShift)), + T![^] => Some((c, BinOp::BitwiseXor)), + T![|] => Some((c, BinOp::BitwiseOr)), + T![&] => Some((c, BinOp::BitwiseAnd)), + T![..] => Some((c, BinOp::RangeRightOpen)), + T![..=] => Some((c, BinOp::RangeRightClosed)), + T![=] => Some((c, BinOp::Assignment)), + T![+=] => Some((c, BinOp::AddAssign)), + T![/=] => Some((c, BinOp::DivAssign)), + T![*=] => Some((c, BinOp::MulAssign)), + T![%=] => Some((c, BinOp::RemAssign)), + T![>>=] => Some((c, BinOp::ShrAssign)), + T![<<=] => Some((c, BinOp::ShlAssign)), + T![-=] => Some((c, BinOp::SubAssign)), + T![|=] => Some((c, BinOp::BitOrAssign)), + T![&=] => Some((c, BinOp::BitAndAssign)), + T![^=] => Some((c, BinOp::BitXorAssign)), _ => None, } }) @@ -211,7 +212,7 @@ impl ast::ArrayExpr { } fn is_repeat(&self) -> bool { - self.syntax().children_with_tokens().any(|it| it.kind() == SEMI) + self.syntax().children_with_tokens().any(|it| it.kind() == T![;]) } } @@ -258,7 +259,7 @@ impl ast::Literal { LiteralKind::FloatNumber { suffix: suffix } } STRING | RAW_STRING => LiteralKind::String, - TRUE_KW | FALSE_KW => LiteralKind::Bool, + T![true] | T![false] => LiteralKind::Bool, BYTE_STRING | RAW_BYTE_STRING => LiteralKind::ByteString, CHAR => LiteralKind::Char, BYTE => LiteralKind::Byte, diff --git a/crates/ra_syntax/src/ast/extensions.rs b/crates/ra_syntax/src/ast/extensions.rs index f3466c585..f030e0df8 100644 --- a/crates/ra_syntax/src/ast/extensions.rs +++ b/crates/ra_syntax/src/ast/extensions.rs @@ -3,7 +3,7 @@ use itertools::Itertools; -use crate::{SmolStr, SyntaxToken, ast::{self, AstNode, children, child_opt}, SyntaxKind::*, SyntaxElement}; +use crate::{SmolStr, SyntaxToken, ast::{self, AstNode, children, child_opt}, SyntaxKind::*, SyntaxElement, T}; use ra_parser::SyntaxKind; impl ast::Name { @@ -32,7 +32,7 @@ impl ast::Attr { Some(prev) => prev, }; - prev.kind() == EXCL + prev.kind() == T![!] } pub fn as_atom(&self) -> Option { @@ -102,9 +102,9 @@ impl ast::PathSegment { PathSegmentKind::Name(name_ref) } else { match self.syntax().first_child_or_token()?.kind() { - SELF_KW => PathSegmentKind::SelfKw, - SUPER_KW => PathSegmentKind::SuperKw, - CRATE_KW => PathSegmentKind::CrateKw, + T![self] => PathSegmentKind::SelfKw, + T![super] => PathSegmentKind::SuperKw, + T![crate] => PathSegmentKind::CrateKw, _ => return None, } }; @@ -113,7 +113,7 @@ impl ast::PathSegment { pub fn has_colon_colon(&self) -> bool { match self.syntax.first_child_or_token().map(|s| s.kind()) { - Some(COLONCOLON) => true, + Some(T![::]) => true, _ => false, } } @@ -129,14 +129,14 @@ impl ast::Module { pub fn has_semi(&self) -> bool { match self.syntax().last_child_or_token() { None => false, - Some(node) => node.kind() == SEMI, + Some(node) => node.kind() == T![;], } } } impl ast::UseTree { pub fn has_star(&self) -> bool { - self.syntax().children_with_tokens().any(|it| it.kind() == STAR) + self.syntax().children_with_tokens().any(|it| it.kind() == T![*]) } } @@ -172,7 +172,7 @@ impl ast::ImplBlock { } pub fn is_negative(&self) -> bool { - self.syntax().children_with_tokens().any(|t| t.kind() == EXCL) + self.syntax().children_with_tokens().any(|t| t.kind() == T![!]) } } @@ -219,7 +219,7 @@ impl ast::FnDef { self.syntax() .last_child_or_token() .and_then(|it| it.as_token()) - .filter(|it| it.kind() == SEMI) + .filter(|it| it.kind() == T![;]) } } @@ -227,7 +227,7 @@ impl ast::LetStmt { pub fn has_semi(&self) -> bool { match self.syntax().last_child_or_token() { None => false, - Some(node) => node.kind() == SEMI, + Some(node) => node.kind() == T![;], } } } @@ -236,7 +236,7 @@ impl ast::ExprStmt { pub fn has_semi(&self) -> bool { match self.syntax().last_child_or_token() { None => false, - Some(node) => node.kind() == SEMI, + Some(node) => node.kind() == T![;], } } } @@ -270,29 +270,29 @@ impl ast::FieldExpr { impl ast::RefPat { pub fn is_mut(&self) -> bool { - self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) + self.syntax().children_with_tokens().any(|n| n.kind() == T![mut]) } } impl ast::BindPat { pub fn is_mutable(&self) -> bool { - self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) + self.syntax().children_with_tokens().any(|n| n.kind() == T![mut]) } pub fn is_ref(&self) -> bool { - self.syntax().children_with_tokens().any(|n| n.kind() == REF_KW) + self.syntax().children_with_tokens().any(|n| n.kind() == T![ref]) } } impl ast::PointerType { pub fn is_mut(&self) -> bool { - self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) + self.syntax().children_with_tokens().any(|n| n.kind() == T![mut]) } } impl ast::ReferenceType { pub fn is_mut(&self) -> bool { - self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) + self.syntax().children_with_tokens().any(|n| n.kind() == T![mut]) } } @@ -311,19 +311,19 @@ impl ast::SelfParam { self.syntax() .children_with_tokens() .filter_map(|it| it.as_token()) - .find(|it| it.kind() == SELF_KW) + .find(|it| it.kind() == T![self]) .expect("invalid tree: self param must have self") } pub fn kind(&self) -> SelfParamKind { - let borrowed = self.syntax().children_with_tokens().any(|n| n.kind() == AMP); + let borrowed = self.syntax().children_with_tokens().any(|n| n.kind() == T![&]); if borrowed { // check for a `mut` coming after the & -- `mut &self` != `&mut self` if self .syntax() .children_with_tokens() - .skip_while(|n| n.kind() != AMP) - .any(|n| n.kind() == MUT_KW) + .skip_while(|n| n.kind() != T![&]) + .any(|n| n.kind() == T![mut]) { SelfParamKind::MutRef } else { @@ -355,6 +355,6 @@ impl ast::WherePred { impl ast::TraitDef { pub fn is_auto(&self) -> bool { - self.syntax().children_with_tokens().any(|t| t.kind() == AUTO_KW) + self.syntax().children_with_tokens().any(|t| t.kind() == T![auto]) } } diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 65c65d6aa..0ceabc203 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs @@ -179,10 +179,7 @@ fn api_walkthrough() { // There's a bunch of traversal methods on `SyntaxNode`: assert_eq!(expr_syntax.parent(), Some(block.syntax())); - assert_eq!( - block.syntax().first_child_or_token().map(|it| it.kind()), - Some(SyntaxKind::L_CURLY) - ); + assert_eq!(block.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{'])); assert_eq!( expr_syntax.next_sibling_or_token().map(|it| it.kind()), Some(SyntaxKind::WHITESPACE) @@ -191,9 +188,7 @@ fn api_walkthrough() { // As well as some iterator helpers: let f = expr_syntax.ancestors().find_map(ast::FnDef::cast); assert_eq!(f, Some(&*func)); - assert!(expr_syntax - .siblings_with_tokens(Direction::Next) - .any(|it| it.kind() == SyntaxKind::R_CURLY)); + assert!(expr_syntax.siblings_with_tokens(Direction::Next).any(|it| it.kind() == T!['}'])); assert_eq!( expr_syntax.descendants_with_tokens().count(), 8, // 5 tokens `1`, ` `, `+`, ` `, `!` diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/ra_syntax/src/parsing/lexer.rs index a3791b503..6eb96f03d 100644 --- a/crates/ra_syntax/src/parsing/lexer.rs +++ b/crates/ra_syntax/src/parsing/lexer.rs @@ -7,6 +7,7 @@ mod strings; use crate::{ SyntaxKind::{self, *}, TextUnit, + T, }; use self::{ @@ -90,16 +91,16 @@ fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind { match c { // Possiblily multi-byte tokens, // but we only produce single byte token now - // DOTDOTDOT, DOTDOT, DOTDOTEQ, DOT - '.' => return DOT, - // COLONCOLON COLON - ':' => return COLON, - // EQEQ FATARROW EQ - '=' => return EQ, - // NEQ EXCL - '!' => return EXCL, - // THIN_ARROW MINUS - '-' => return MINUS, + // T![...], T![..], T![..=], T![.] + '.' => return T![.], + // T![::] T![:] + ':' => return T![:], + // T![==] FATARROW T![=] + '=' => return T![=], + // T![!=] T![!] + '!' => return T![!], + // T![->] T![-] + '-' => return T![-], // If the character is an ident start not followed by another single // quote, then this is a lifetime name: @@ -148,8 +149,8 @@ fn scan_ident(c: char, ptr: &mut Ptr) -> SyntaxKind { ptr.bump(); true } - ('_', None) => return UNDERSCORE, - ('_', Some(c)) if !is_ident_continue(c) => return UNDERSCORE, + ('_', None) => return T![_], + ('_', Some(c)) if !is_ident_continue(c) => return T![_], _ => false, }; ptr.bump_while(is_ident_continue); diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs index 434f850d1..6de02a15a 100644 --- a/crates/ra_syntax/src/parsing/reparsing.rs +++ b/crates/ra_syntax/src/parsing/reparsing.rs @@ -17,7 +17,8 @@ use crate::{ text_token_source::TextTokenSource, text_tree_sink::TextTreeSink, lexer::{tokenize, Token}, - } + }, + T, }; pub(crate) fn incremental_reparse( @@ -122,16 +123,16 @@ fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxN fn is_balanced(tokens: &[Token]) -> bool { if tokens.is_empty() - || tokens.first().unwrap().kind != L_CURLY - || tokens.last().unwrap().kind != R_CURLY + || tokens.first().unwrap().kind != T!['{'] + || tokens.last().unwrap().kind != T!['}'] { return false; } let mut balance = 0usize; for t in &tokens[1..tokens.len() - 1] { match t.kind { - L_CURLY => balance += 1, - R_CURLY => { + T!['{'] => balance += 1, + T!['}'] => { balance = match balance.checked_sub(1) { Some(b) => b, None => return false, diff --git a/crates/ra_syntax/src/validation.rs b/crates/ra_syntax/src/validation.rs index 11a1fb4a7..b53900a4b 100644 --- a/crates/ra_syntax/src/validation.rs +++ b/crates/ra_syntax/src/validation.rs @@ -5,9 +5,10 @@ mod field_expr; use crate::{ SourceFile, SyntaxError, AstNode, SyntaxNode, TextUnit, - SyntaxKind::{L_CURLY, R_CURLY, BYTE, BYTE_STRING, STRING, CHAR}, + SyntaxKind::{BYTE, BYTE_STRING, STRING, CHAR}, ast, algo::visit::{visitor_ctx, VisitorCtx}, + T, }; pub(crate) use unescape::EscapeError; @@ -83,8 +84,8 @@ pub(crate) fn validate_block_structure(root: &SyntaxNode) { let mut stack = Vec::new(); for node in root.descendants() { match node.kind() { - L_CURLY => stack.push(node), - R_CURLY => { + T!['{'] => stack.push(node), + T!['}'] => { if let Some(pair) = stack.pop() { assert_eq!( node.parent(), -- cgit v1.2.3