diff options
Diffstat (limited to 'crates')
65 files changed, 1114 insertions, 988 deletions
diff --git a/crates/gen_lsp_server/Cargo.toml b/crates/gen_lsp_server/Cargo.toml index 34343e2f2..ba8bfdbd3 100644 --- a/crates/gen_lsp_server/Cargo.toml +++ b/crates/gen_lsp_server/Cargo.toml | |||
@@ -1,7 +1,7 @@ | |||
1 | [package] | 1 | [package] |
2 | edition = "2018" | 2 | edition = "2018" |
3 | name = "gen_lsp_server" | 3 | name = "gen_lsp_server" |
4 | version = "0.1.0" | 4 | version = "0.2.0" |
5 | authors = ["rust-analyzer developers"] | 5 | authors = ["rust-analyzer developers"] |
6 | repository = "https://github.com/rust-analyzer/rust-analyzer" | 6 | repository = "https://github.com/rust-analyzer/rust-analyzer" |
7 | license = "MIT OR Apache-2.0" | 7 | license = "MIT OR Apache-2.0" |
diff --git a/crates/ra_assists/src/add_explicit_type.rs b/crates/ra_assists/src/add_explicit_type.rs index cb0ac9885..f3ed74b7f 100644 --- a/crates/ra_assists/src/add_explicit_type.rs +++ b/crates/ra_assists/src/add_explicit_type.rs | |||
@@ -3,7 +3,7 @@ use hir::{ | |||
3 | db::HirDatabase, | 3 | db::HirDatabase, |
4 | }; | 4 | }; |
5 | use ra_syntax::{ | 5 | use ra_syntax::{ |
6 | SyntaxKind, | 6 | T, |
7 | ast::{LetStmt, PatKind, NameOwner, AstNode} | 7 | ast::{LetStmt, PatKind, NameOwner, AstNode} |
8 | }; | 8 | }; |
9 | 9 | ||
@@ -24,7 +24,7 @@ pub(crate) fn add_explicit_type(mut ctx: AssistCtx<impl HirDatabase>) -> Option< | |||
24 | let name = pat.name()?; | 24 | let name = pat.name()?; |
25 | let name_range = name.syntax().range(); | 25 | let name_range = name.syntax().range(); |
26 | // Assist not applicable if the type has already been specified | 26 | // Assist not applicable if the type has already been specified |
27 | if stmt.syntax().children_with_tokens().any(|child| child.kind() == SyntaxKind::COLON) { | 27 | if stmt.syntax().children_with_tokens().any(|child| child.kind() == T![:]) { |
28 | return None; | 28 | return None; |
29 | } | 29 | } |
30 | // Infer type | 30 | // Infer type |
diff --git a/crates/ra_assists/src/ast_editor.rs b/crates/ra_assists/src/ast_editor.rs index aa7aeaabb..9afcac01a 100644 --- a/crates/ra_assists/src/ast_editor.rs +++ b/crates/ra_assists/src/ast_editor.rs | |||
@@ -2,7 +2,7 @@ use std::{iter, ops::RangeInclusive}; | |||
2 | 2 | ||
3 | use arrayvec::ArrayVec; | 3 | use arrayvec::ArrayVec; |
4 | use ra_text_edit::TextEditBuilder; | 4 | use ra_text_edit::TextEditBuilder; |
5 | use ra_syntax::{AstNode, TreeArc, ast, SyntaxKind::*, SyntaxElement, SourceFile, InsertPosition, Direction}; | 5 | use ra_syntax::{AstNode, TreeArc, ast, SyntaxKind::*, SyntaxElement, SourceFile, InsertPosition, Direction, T}; |
6 | use ra_fmt::leading_indent; | 6 | use ra_fmt::leading_indent; |
7 | use hir::Name; | 7 | use hir::Name; |
8 | 8 | ||
@@ -49,7 +49,7 @@ impl<N: AstNode> AstEditor<N> { | |||
49 | 49 | ||
50 | fn do_make_multiline(&mut self) { | 50 | fn do_make_multiline(&mut self) { |
51 | let l_curly = | 51 | let l_curly = |
52 | match self.ast().syntax().children_with_tokens().find(|it| it.kind() == L_CURLY) { | 52 | match self.ast().syntax().children_with_tokens().find(|it| it.kind() == T!['{']) { |
53 | Some(it) => it, | 53 | Some(it) => it, |
54 | None => return, | 54 | None => return, |
55 | }; | 55 | }; |
@@ -124,7 +124,7 @@ impl AstEditor<ast::NamedFieldList> { | |||
124 | if let Some(comma) = $anchor | 124 | if let Some(comma) = $anchor |
125 | .syntax() | 125 | .syntax() |
126 | .siblings_with_tokens(Direction::Next) | 126 | .siblings_with_tokens(Direction::Next) |
127 | .find(|it| it.kind() == COMMA) | 127 | .find(|it| it.kind() == T![,]) |
128 | { | 128 | { |
129 | InsertPosition::After(comma) | 129 | InsertPosition::After(comma) |
130 | } else { | 130 | } else { |
@@ -154,7 +154,7 @@ impl AstEditor<ast::NamedFieldList> { | |||
154 | } | 154 | } |
155 | 155 | ||
156 | fn l_curly(&self) -> Option<SyntaxElement> { | 156 | fn l_curly(&self) -> Option<SyntaxElement> { |
157 | self.ast().syntax().children_with_tokens().find(|it| it.kind() == L_CURLY) | 157 | self.ast().syntax().children_with_tokens().find(|it| it.kind() == T!['{']) |
158 | } | 158 | } |
159 | } | 159 | } |
160 | 160 | ||
@@ -188,7 +188,7 @@ impl AstEditor<ast::ItemList> { | |||
188 | } | 188 | } |
189 | 189 | ||
190 | fn l_curly(&self) -> Option<SyntaxElement> { | 190 | fn l_curly(&self) -> Option<SyntaxElement> { |
191 | self.ast().syntax().children_with_tokens().find(|it| it.kind() == L_CURLY) | 191 | self.ast().syntax().children_with_tokens().find(|it| it.kind() == T!['{']) |
192 | } | 192 | } |
193 | } | 193 | } |
194 | 194 | ||
@@ -290,7 +290,7 @@ fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> { | |||
290 | 290 | ||
291 | mod tokens { | 291 | mod tokens { |
292 | use once_cell::sync::Lazy; | 292 | use once_cell::sync::Lazy; |
293 | use ra_syntax::{AstNode, SourceFile, TreeArc, SyntaxToken, SyntaxKind::*}; | 293 | use ra_syntax::{AstNode, SourceFile, TreeArc, SyntaxToken, SyntaxKind::*, T}; |
294 | 294 | ||
295 | static SOURCE_FILE: Lazy<TreeArc<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;")); | 295 | static SOURCE_FILE: Lazy<TreeArc<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;")); |
296 | 296 | ||
@@ -299,7 +299,7 @@ mod tokens { | |||
299 | .syntax() | 299 | .syntax() |
300 | .descendants_with_tokens() | 300 | .descendants_with_tokens() |
301 | .filter_map(|it| it.as_token()) | 301 | .filter_map(|it| it.as_token()) |
302 | .find(|it| it.kind() == COMMA) | 302 | .find(|it| it.kind() == T![,]) |
303 | .unwrap() | 303 | .unwrap() |
304 | } | 304 | } |
305 | 305 | ||
diff --git a/crates/ra_assists/src/auto_import.rs b/crates/ra_assists/src/auto_import.rs index 7c856c19b..1566cf179 100644 --- a/crates/ra_assists/src/auto_import.rs +++ b/crates/ra_assists/src/auto_import.rs | |||
@@ -2,8 +2,9 @@ use ra_text_edit::TextEditBuilder; | |||
2 | use hir::{ self, db::HirDatabase}; | 2 | use hir::{ self, db::HirDatabase}; |
3 | 3 | ||
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | T, | ||
5 | ast::{ self, NameOwner }, AstNode, SyntaxNode, Direction, TextRange, SmolStr, | 6 | ast::{ self, NameOwner }, AstNode, SyntaxNode, Direction, TextRange, SmolStr, |
6 | SyntaxKind::{ PATH, PATH_SEGMENT, COLONCOLON, COMMA } | 7 | SyntaxKind::{ PATH, PATH_SEGMENT } |
7 | }; | 8 | }; |
8 | use crate::{ | 9 | use crate::{ |
9 | AssistId, | 10 | AssistId, |
@@ -23,7 +24,7 @@ fn collect_path_segments_raw<'a>( | |||
23 | children.next().map(|n| (n, n.kind())), | 24 | children.next().map(|n| (n, n.kind())), |
24 | ); | 25 | ); |
25 | match (first, second, third) { | 26 | match (first, second, third) { |
26 | (Some((subpath, PATH)), Some((_, COLONCOLON)), Some((segment, PATH_SEGMENT))) => { | 27 | (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => { |
27 | path = ast::Path::cast(subpath.as_node()?)?; | 28 | path = ast::Path::cast(subpath.as_node()?)?; |
28 | segments.push(ast::PathSegment::cast(segment.as_node()?)?); | 29 | segments.push(ast::PathSegment::cast(segment.as_node()?)?); |
29 | } | 30 | } |
@@ -421,7 +422,7 @@ fn make_assist_add_in_tree_list( | |||
421 | let last = tree_list.use_trees().last(); | 422 | let last = tree_list.use_trees().last(); |
422 | if let Some(last) = last { | 423 | if let Some(last) = last { |
423 | let mut buf = String::new(); | 424 | let mut buf = String::new(); |
424 | let comma = last.syntax().siblings(Direction::Next).find(|n| n.kind() == COMMA); | 425 | let comma = last.syntax().siblings(Direction::Next).find(|n| n.kind() == T![,]); |
425 | let offset = if let Some(comma) = comma { | 426 | let offset = if let Some(comma) = comma { |
426 | comma.range().end() | 427 | comma.range().end() |
427 | } else { | 428 | } else { |
diff --git a/crates/ra_assists/src/change_visibility.rs b/crates/ra_assists/src/change_visibility.rs index c63470726..620f534b5 100644 --- a/crates/ra_assists/src/change_visibility.rs +++ b/crates/ra_assists/src/change_visibility.rs | |||
@@ -1,8 +1,9 @@ | |||
1 | use hir::db::HirDatabase; | 1 | use hir::db::HirDatabase; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | T, | ||
3 | AstNode, SyntaxNode, TextUnit, | 4 | AstNode, SyntaxNode, TextUnit, |
4 | ast::{self, VisibilityOwner, NameOwner}, | 5 | ast::{self, VisibilityOwner, NameOwner}, |
5 | SyntaxKind::{VISIBILITY, FN_KW, MOD_KW, STRUCT_KW, ENUM_KW, TRAIT_KW, FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF, IDENT, WHITESPACE, COMMENT, ATTR}, | 6 | SyntaxKind::{VISIBILITY, FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF, IDENT, WHITESPACE, COMMENT, ATTR}, |
6 | }; | 7 | }; |
7 | 8 | ||
8 | use crate::{AssistCtx, Assist, AssistId}; | 9 | use crate::{AssistCtx, Assist, AssistId}; |
@@ -16,7 +17,7 @@ pub(crate) fn change_visibility(ctx: AssistCtx<impl HirDatabase>) -> Option<Assi | |||
16 | 17 | ||
17 | fn add_vis(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 18 | fn add_vis(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
18 | let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() { | 19 | let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() { |
19 | FN_KW | MOD_KW | STRUCT_KW | ENUM_KW | TRAIT_KW => true, | 20 | T![fn] | T![mod] | T![struct] | T![enum] | T![trait] => true, |
20 | _ => false, | 21 | _ => false, |
21 | }); | 22 | }); |
22 | 23 | ||
diff --git a/crates/ra_assists/src/flip_comma.rs b/crates/ra_assists/src/flip_comma.rs index a9b108111..7626ffad3 100644 --- a/crates/ra_assists/src/flip_comma.rs +++ b/crates/ra_assists/src/flip_comma.rs | |||
@@ -1,14 +1,14 @@ | |||
1 | use hir::db::HirDatabase; | 1 | use hir::db::HirDatabase; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | T, | ||
3 | Direction, | 4 | Direction, |
4 | SyntaxKind::COMMA, | ||
5 | algo::non_trivia_sibling, | 5 | algo::non_trivia_sibling, |
6 | }; | 6 | }; |
7 | 7 | ||
8 | use crate::{AssistCtx, Assist, AssistId}; | 8 | use crate::{AssistCtx, Assist, AssistId}; |
9 | 9 | ||
10 | pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 10 | pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
11 | let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == COMMA)?; | 11 | let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == T![,])?; |
12 | let prev = non_trivia_sibling(comma.into(), Direction::Prev)?; | 12 | let prev = non_trivia_sibling(comma.into(), Direction::Prev)?; |
13 | let next = non_trivia_sibling(comma.into(), Direction::Next)?; | 13 | let next = non_trivia_sibling(comma.into(), Direction::Next)?; |
14 | ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| { | 14 | ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| { |
diff --git a/crates/ra_assists/src/remove_dbg.rs b/crates/ra_assists/src/remove_dbg.rs index ae9958f11..6e900f8ef 100644 --- a/crates/ra_assists/src/remove_dbg.rs +++ b/crates/ra_assists/src/remove_dbg.rs | |||
@@ -2,9 +2,7 @@ use hir::db::HirDatabase; | |||
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | ast::{self, AstNode}, | 3 | ast::{self, AstNode}, |
4 | TextUnit, | 4 | TextUnit, |
5 | SyntaxKind::{ | 5 | T |
6 | L_PAREN, R_PAREN, L_CURLY, R_CURLY, L_BRACK, R_BRACK, EXCL | ||
7 | }, | ||
8 | }; | 6 | }; |
9 | use crate::{AssistCtx, Assist, AssistId}; | 7 | use crate::{AssistCtx, Assist, AssistId}; |
10 | 8 | ||
@@ -64,7 +62,7 @@ fn is_valid_macrocall(macro_call: &ast::MacroCall, macro_name: &str) -> Option<b | |||
64 | // Make sure it is actually a dbg-macro call, dbg followed by ! | 62 | // Make sure it is actually a dbg-macro call, dbg followed by ! |
65 | let excl = path.syntax().next_sibling_or_token()?; | 63 | let excl = path.syntax().next_sibling_or_token()?; |
66 | 64 | ||
67 | if name_ref.text() != macro_name || excl.kind() != EXCL { | 65 | if name_ref.text() != macro_name || excl.kind() != T![!] { |
68 | return None; | 66 | return None; |
69 | } | 67 | } |
70 | 68 | ||
@@ -73,7 +71,7 @@ fn is_valid_macrocall(macro_call: &ast::MacroCall, macro_name: &str) -> Option<b | |||
73 | let last_child = node.last_child_or_token()?; | 71 | let last_child = node.last_child_or_token()?; |
74 | 72 | ||
75 | match (first_child.kind(), last_child.kind()) { | 73 | match (first_child.kind(), last_child.kind()) { |
76 | (L_PAREN, R_PAREN) | (L_BRACK, R_BRACK) | (L_CURLY, R_CURLY) => Some(true), | 74 | (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']) => Some(true), |
77 | _ => Some(false), | 75 | _ => Some(false), |
78 | } | 76 | } |
79 | } | 77 | } |
diff --git a/crates/ra_assists/src/split_import.rs b/crates/ra_assists/src/split_import.rs index 57e0efaf2..881c5ecdc 100644 --- a/crates/ra_assists/src/split_import.rs +++ b/crates/ra_assists/src/split_import.rs | |||
@@ -2,14 +2,15 @@ use std::iter::successors; | |||
2 | 2 | ||
3 | use hir::db::HirDatabase; | 3 | use hir::db::HirDatabase; |
4 | use ra_syntax::{ | 4 | use ra_syntax::{ |
5 | TextUnit, AstNode, SyntaxKind::COLONCOLON, | 5 | T, |
6 | TextUnit, AstNode, | ||
6 | ast, | 7 | ast, |
7 | }; | 8 | }; |
8 | 9 | ||
9 | use crate::{AssistCtx, Assist, AssistId}; | 10 | use crate::{AssistCtx, Assist, AssistId}; |
10 | 11 | ||
11 | pub(crate) fn split_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { | 12 | pub(crate) fn split_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { |
12 | let colon_colon = ctx.token_at_offset().find(|leaf| leaf.kind() == COLONCOLON)?; | 13 | let colon_colon = ctx.token_at_offset().find(|leaf| leaf.kind() == T![::])?; |
13 | let path = ast::Path::cast(colon_colon.parent())?; | 14 | let path = ast::Path::cast(colon_colon.parent())?; |
14 | let top_path = successors(Some(path), |it| it.parent_path()).last()?; | 15 | let top_path = successors(Some(path), |it| it.parent_path()).last()?; |
15 | 16 | ||
diff --git a/crates/ra_cli/src/analysis_stats.rs b/crates/ra_cli/src/analysis_stats.rs index c95d452b3..b481ace9e 100644 --- a/crates/ra_cli/src/analysis_stats.rs +++ b/crates/ra_cli/src/analysis_stats.rs | |||
@@ -51,7 +51,10 @@ pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> { | |||
51 | println!("Total modules found: {}", visited_modules.len()); | 51 | println!("Total modules found: {}", visited_modules.len()); |
52 | println!("Total declarations: {}", num_decls); | 52 | println!("Total declarations: {}", num_decls); |
53 | println!("Total functions: {}", funcs.len()); | 53 | println!("Total functions: {}", funcs.len()); |
54 | let bar = indicatif::ProgressBar::new(funcs.len() as u64); | 54 | let bar = indicatif::ProgressBar::with_draw_target( |
55 | funcs.len() as u64, | ||
56 | indicatif::ProgressDrawTarget::stderr_nohz(), | ||
57 | ); | ||
55 | bar.set_style( | 58 | bar.set_style( |
56 | indicatif::ProgressStyle::default_bar().template("{wide_bar} {pos}/{len}\n{msg}"), | 59 | indicatif::ProgressStyle::default_bar().template("{wide_bar} {pos}/{len}\n{msg}"), |
57 | ); | 60 | ); |
diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs index 1cd400752..bf567721a 100644 --- a/crates/ra_db/src/lib.rs +++ b/crates/ra_db/src/lib.rs | |||
@@ -15,7 +15,7 @@ pub use crate::{ | |||
15 | }, | 15 | }, |
16 | }; | 16 | }; |
17 | 17 | ||
18 | pub trait CheckCanceled: panic::RefUnwindSafe { | 18 | pub trait CheckCanceled { |
19 | /// Aborts current query if there are pending changes. | 19 | /// Aborts current query if there are pending changes. |
20 | /// | 20 | /// |
21 | /// rust-analyzer needs to be able to answer semantic questions about the | 21 | /// rust-analyzer needs to be able to answer semantic questions about the |
@@ -36,14 +36,15 @@ pub trait CheckCanceled: panic::RefUnwindSafe { | |||
36 | Self: Sized, | 36 | Self: Sized, |
37 | F: FnOnce(&Self) -> T + panic::UnwindSafe, | 37 | F: FnOnce(&Self) -> T + panic::UnwindSafe, |
38 | { | 38 | { |
39 | panic::catch_unwind(|| f(self)).map_err(|err| match err.downcast::<Canceled>() { | 39 | let this = panic::AssertUnwindSafe(self); |
40 | panic::catch_unwind(|| f(*this)).map_err(|err| match err.downcast::<Canceled>() { | ||
40 | Ok(canceled) => *canceled, | 41 | Ok(canceled) => *canceled, |
41 | Err(payload) => panic::resume_unwind(payload), | 42 | Err(payload) => panic::resume_unwind(payload), |
42 | }) | 43 | }) |
43 | } | 44 | } |
44 | } | 45 | } |
45 | 46 | ||
46 | impl<T: salsa::Database + panic::RefUnwindSafe> CheckCanceled for T { | 47 | impl<T: salsa::Database> CheckCanceled for T { |
47 | fn check_canceled(&self) { | 48 | fn check_canceled(&self) { |
48 | if self.salsa_runtime().is_current_revision_canceled() { | 49 | if self.salsa_runtime().is_current_revision_canceled() { |
49 | Canceled::throw() | 50 | Canceled::throw() |
diff --git a/crates/ra_fmt/src/lib.rs b/crates/ra_fmt/src/lib.rs index 603be1854..aac5a1d23 100644 --- a/crates/ra_fmt/src/lib.rs +++ b/crates/ra_fmt/src/lib.rs | |||
@@ -3,7 +3,7 @@ | |||
3 | use std::iter::successors; | 3 | use std::iter::successors; |
4 | use itertools::Itertools; | 4 | use itertools::Itertools; |
5 | use ra_syntax::{ | 5 | use ra_syntax::{ |
6 | SyntaxNode, SyntaxKind::*, SyntaxToken, SyntaxKind, | 6 | SyntaxNode, SyntaxKind::*, SyntaxToken, SyntaxKind, T, |
7 | ast::{self, AstNode, AstToken}, | 7 | ast::{self, AstNode, AstToken}, |
8 | }; | 8 | }; |
9 | 9 | ||
@@ -38,7 +38,7 @@ pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> { | |||
38 | return None; | 38 | return None; |
39 | } | 39 | } |
40 | let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { | 40 | let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { |
41 | WHITESPACE | L_CURLY | R_CURLY => false, | 41 | WHITESPACE | T!['{'] | T!['}'] => false, |
42 | _ => it != &expr.syntax(), | 42 | _ => it != &expr.syntax(), |
43 | }); | 43 | }); |
44 | if non_trivial_children.count() > 0 { | 44 | if non_trivial_children.count() > 0 { |
@@ -49,8 +49,8 @@ pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> { | |||
49 | 49 | ||
50 | pub fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str { | 50 | pub fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str { |
51 | match left { | 51 | match left { |
52 | L_PAREN | L_BRACK => return "", | 52 | T!['('] | T!['['] => return "", |
53 | L_CURLY => { | 53 | T!['{'] => { |
54 | if let USE_TREE = right { | 54 | if let USE_TREE = right { |
55 | return ""; | 55 | return ""; |
56 | } | 56 | } |
@@ -58,13 +58,13 @@ pub fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str { | |||
58 | _ => (), | 58 | _ => (), |
59 | } | 59 | } |
60 | match right { | 60 | match right { |
61 | R_PAREN | R_BRACK => return "", | 61 | T![')'] | T![']'] => return "", |
62 | R_CURLY => { | 62 | T!['}'] => { |
63 | if let USE_TREE = left { | 63 | if let USE_TREE = left { |
64 | return ""; | 64 | return ""; |
65 | } | 65 | } |
66 | } | 66 | } |
67 | DOT => return "", | 67 | T![.] => return "", |
68 | _ => (), | 68 | _ => (), |
69 | } | 69 | } |
70 | " " | 70 | " " |
diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml index 68ffcb2e9..294d047d8 100644 --- a/crates/ra_hir/Cargo.toml +++ b/crates/ra_hir/Cargo.toml | |||
@@ -21,10 +21,10 @@ tt = { path = "../ra_tt", package = "ra_tt" } | |||
21 | test_utils = { path = "../test_utils" } | 21 | test_utils = { path = "../test_utils" } |
22 | ra_prof = { path = "../ra_prof" } | 22 | ra_prof = { path = "../ra_prof" } |
23 | 23 | ||
24 | chalk-solve = { git = "https://github.com/rust-lang/chalk.git" } | 24 | chalk-solve = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" } |
25 | chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git" } | 25 | chalk-rust-ir = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" } |
26 | chalk-ir = { git = "https://github.com/rust-lang/chalk.git" } | 26 | chalk-ir = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" } |
27 | 27 | ||
28 | [dev-dependencies] | 28 | [dev-dependencies] |
29 | flexi_logger = "0.11.0" | 29 | flexi_logger = "0.11.0" |
30 | insta = "0.7.0" | 30 | insta = "0.8.1" |
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index 8e827d4f5..11cdf9c34 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -11,7 +11,7 @@ use crate::{ | |||
11 | DefWithBody, Trait, | 11 | DefWithBody, Trait, |
12 | ids, | 12 | ids, |
13 | nameres::{Namespace, ImportSourceMap, RawItems, CrateDefMap}, | 13 | nameres::{Namespace, ImportSourceMap, RawItems, CrateDefMap}, |
14 | ty::{InferenceResult, Ty, method_resolution::CrateImplBlocks, TypableDef, CallableDef, FnSig, TypeCtor, GenericPredicate}, | 14 | ty::{InferenceResult, Ty, method_resolution::CrateImplBlocks, TypableDef, CallableDef, FnSig, TypeCtor, GenericPredicate, Substs}, |
15 | adt::{StructData, EnumData}, | 15 | adt::{StructData, EnumData}, |
16 | impl_block::{ModuleImplBlocks, ImplSourceMap, ImplBlock}, | 16 | impl_block::{ModuleImplBlocks, ImplSourceMap, ImplBlock}, |
17 | generics::{GenericParams, GenericDef}, | 17 | generics::{GenericParams, GenericDef}, |
@@ -126,7 +126,7 @@ pub trait HirDatabase: DefDatabase { | |||
126 | #[salsa::invoke(ExprScopes::expr_scopes_query)] | 126 | #[salsa::invoke(ExprScopes::expr_scopes_query)] |
127 | fn expr_scopes(&self, def: DefWithBody) -> Arc<ExprScopes>; | 127 | fn expr_scopes(&self, def: DefWithBody) -> Arc<ExprScopes>; |
128 | 128 | ||
129 | #[salsa::invoke(crate::ty::infer)] | 129 | #[salsa::invoke(crate::ty::infer_query)] |
130 | fn infer(&self, def: DefWithBody) -> Arc<InferenceResult>; | 130 | fn infer(&self, def: DefWithBody) -> Arc<InferenceResult>; |
131 | 131 | ||
132 | #[salsa::invoke(crate::ty::type_for_def)] | 132 | #[salsa::invoke(crate::ty::type_for_def)] |
@@ -141,6 +141,9 @@ pub trait HirDatabase: DefDatabase { | |||
141 | #[salsa::invoke(crate::ty::generic_predicates)] | 141 | #[salsa::invoke(crate::ty::generic_predicates)] |
142 | fn generic_predicates(&self, def: GenericDef) -> Arc<[GenericPredicate]>; | 142 | fn generic_predicates(&self, def: GenericDef) -> Arc<[GenericPredicate]>; |
143 | 143 | ||
144 | #[salsa::invoke(crate::ty::generic_defaults)] | ||
145 | fn generic_defaults(&self, def: GenericDef) -> Substs; | ||
146 | |||
144 | #[salsa::invoke(crate::expr::body_with_source_map_query)] | 147 | #[salsa::invoke(crate::expr::body_with_source_map_query)] |
145 | fn body_with_source_map( | 148 | fn body_with_source_map( |
146 | &self, | 149 | &self, |
@@ -153,7 +156,7 @@ pub trait HirDatabase: DefDatabase { | |||
153 | #[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)] | 156 | #[salsa::invoke(crate::ty::method_resolution::CrateImplBlocks::impls_in_crate_query)] |
154 | fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>; | 157 | fn impls_in_crate(&self, krate: Crate) -> Arc<CrateImplBlocks>; |
155 | 158 | ||
156 | #[salsa::invoke(crate::ty::traits::impls_for_trait)] | 159 | #[salsa::invoke(crate::ty::traits::impls_for_trait_query)] |
157 | fn impls_for_trait(&self, krate: Crate, trait_: Trait) -> Arc<[ImplBlock]>; | 160 | fn impls_for_trait(&self, krate: Crate, trait_: Trait) -> Arc<[ImplBlock]>; |
158 | 161 | ||
159 | /// This provides the Chalk trait solver instance. Because Chalk always | 162 | /// This provides the Chalk trait solver instance. Because Chalk always |
@@ -161,11 +164,11 @@ pub trait HirDatabase: DefDatabase { | |||
161 | /// because Chalk does its own internal caching, the solver is wrapped in a | 164 | /// because Chalk does its own internal caching, the solver is wrapped in a |
162 | /// Mutex and the query is marked volatile, to make sure the cached state is | 165 | /// Mutex and the query is marked volatile, to make sure the cached state is |
163 | /// thrown away when input facts change. | 166 | /// thrown away when input facts change. |
164 | #[salsa::invoke(crate::ty::traits::solver)] | 167 | #[salsa::invoke(crate::ty::traits::solver_query)] |
165 | #[salsa::volatile] | 168 | #[salsa::volatile] |
166 | fn solver(&self, krate: Crate) -> Arc<Mutex<crate::ty::traits::Solver>>; | 169 | fn solver(&self, krate: Crate) -> Arc<Mutex<crate::ty::traits::Solver>>; |
167 | 170 | ||
168 | #[salsa::invoke(crate::ty::traits::implements)] | 171 | #[salsa::invoke(crate::ty::traits::implements_query)] |
169 | fn implements( | 172 | fn implements( |
170 | &self, | 173 | &self, |
171 | krate: Crate, | 174 | krate: Crate, |
diff --git a/crates/ra_hir/src/expr/validation.rs b/crates/ra_hir/src/expr/validation.rs index aebed6788..3f758f283 100644 --- a/crates/ra_hir/src/expr/validation.rs +++ b/crates/ra_hir/src/expr/validation.rs | |||
@@ -5,13 +5,11 @@ use ra_syntax::ast::{AstNode, StructLit}; | |||
5 | 5 | ||
6 | use crate::{ | 6 | use crate::{ |
7 | expr::AstPtr, | 7 | expr::AstPtr, |
8 | HirDatabase, | 8 | HirDatabase, Function, Name, |
9 | Function, | ||
10 | Name, | ||
11 | diagnostics::{DiagnosticSink, MissingFields}, | 9 | diagnostics::{DiagnosticSink, MissingFields}, |
12 | adt::AdtDef, | 10 | adt::AdtDef, |
13 | Path, | 11 | Path, |
14 | ty::InferenceResult | 12 | ty::InferenceResult, |
15 | }; | 13 | }; |
16 | use super::{Expr, StructLitField, ExprId}; | 14 | use super::{Expr, StructLitField, ExprId}; |
17 | 15 | ||
@@ -50,43 +48,46 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
50 | spread: &Option<ExprId>, | 48 | spread: &Option<ExprId>, |
51 | db: &impl HirDatabase, | 49 | db: &impl HirDatabase, |
52 | ) { | 50 | ) { |
53 | if let Some(_) = spread { | 51 | if spread.is_some() { |
54 | return; | 52 | return; |
55 | } | 53 | } |
54 | |||
55 | let struct_def = match self.infer[id].as_adt() { | ||
56 | Some((AdtDef::Struct(s), _)) => s, | ||
57 | _ => return, | ||
58 | }; | ||
59 | |||
56 | let lit_fields: FxHashSet<_> = fields.into_iter().map(|f| &f.name).collect(); | 60 | let lit_fields: FxHashSet<_> = fields.into_iter().map(|f| &f.name).collect(); |
57 | let struct_ty = &self.infer[id]; | 61 | let missed_fields: Vec<Name> = struct_def |
58 | if let Some((AdtDef::Struct(s), _)) = struct_ty.as_adt() { | 62 | .fields(db) |
59 | let missed_fields: Vec<Name> = s | 63 | .iter() |
60 | .fields(db) | 64 | .filter_map(|f| { |
61 | .iter() | 65 | let name = f.name(db); |
62 | .filter_map(|f| { | 66 | if lit_fields.contains(&name) { |
63 | let name = f.name(db); | 67 | None |
64 | if lit_fields.contains(&name) { | 68 | } else { |
65 | None | 69 | Some(name) |
66 | } else { | 70 | } |
67 | Some(name) | 71 | }) |
68 | } | 72 | .collect(); |
69 | }) | 73 | if missed_fields.is_empty() { |
70 | .collect(); | 74 | return; |
71 | if missed_fields.is_empty() { | 75 | } |
72 | return; | 76 | let source_map = self.func.body_source_map(db); |
73 | } | 77 | let file_id = self.func.source(db).0; |
74 | let source_map = self.func.body_source_map(db); | 78 | let source_file = db.parse(file_id.original_file(db)); |
75 | let file_id = self.func.source(db).0; | 79 | if let Some(field_list_node) = source_map |
76 | let source_file = db.parse(file_id.original_file(db)); | 80 | .expr_syntax(id) |
77 | if let Some(field_list_node) = source_map | 81 | .map(|ptr| ptr.to_node(source_file.syntax())) |
78 | .expr_syntax(id) | 82 | .and_then(StructLit::cast) |
79 | .map(|ptr| ptr.to_node(source_file.syntax())) | 83 | .and_then(|lit| lit.named_field_list()) |
80 | .and_then(StructLit::cast) | 84 | { |
81 | .and_then(|lit| lit.named_field_list()) | 85 | let field_list_ptr = AstPtr::new(field_list_node); |
82 | { | 86 | self.sink.push(MissingFields { |
83 | let field_list_ptr = AstPtr::new(field_list_node); | 87 | file: file_id, |
84 | self.sink.push(MissingFields { | 88 | field_list: field_list_ptr, |
85 | file: file_id, | 89 | missed_fields, |
86 | field_list: field_list_ptr, | 90 | }) |
87 | missed_fields, | ||
88 | }) | ||
89 | } | ||
90 | } | 91 | } |
91 | } | 92 | } |
92 | } | 93 | } |
diff --git a/crates/ra_hir/src/generics.rs b/crates/ra_hir/src/generics.rs index c29b96f50..79a7fa23a 100644 --- a/crates/ra_hir/src/generics.rs +++ b/crates/ra_hir/src/generics.rs | |||
@@ -5,7 +5,7 @@ | |||
5 | 5 | ||
6 | use std::sync::Arc; | 6 | use std::sync::Arc; |
7 | 7 | ||
8 | use ra_syntax::ast::{self, NameOwner, TypeParamsOwner, TypeBoundsOwner}; | 8 | use ra_syntax::ast::{self, NameOwner, TypeParamsOwner, TypeBoundsOwner, DefaultTypeParamOwner}; |
9 | 9 | ||
10 | use crate::{ | 10 | use crate::{ |
11 | db::{ HirDatabase, DefDatabase}, | 11 | db::{ HirDatabase, DefDatabase}, |
@@ -18,6 +18,7 @@ pub struct GenericParam { | |||
18 | // FIXME: give generic params proper IDs | 18 | // FIXME: give generic params proper IDs |
19 | pub(crate) idx: u32, | 19 | pub(crate) idx: u32, |
20 | pub(crate) name: Name, | 20 | pub(crate) name: Name, |
21 | pub(crate) default: Option<Path>, | ||
21 | } | 22 | } |
22 | 23 | ||
23 | /// Data about the generic parameters of a function, struct, impl, etc. | 24 | /// Data about the generic parameters of a function, struct, impl, etc. |
@@ -68,7 +69,11 @@ impl GenericParams { | |||
68 | GenericDef::Enum(it) => generics.fill(&*it.source(db).1, start), | 69 | GenericDef::Enum(it) => generics.fill(&*it.source(db).1, start), |
69 | GenericDef::Trait(it) => { | 70 | GenericDef::Trait(it) => { |
70 | // traits get the Self type as an implicit first type parameter | 71 | // traits get the Self type as an implicit first type parameter |
71 | generics.params.push(GenericParam { idx: start, name: Name::self_type() }); | 72 | generics.params.push(GenericParam { |
73 | idx: start, | ||
74 | name: Name::self_type(), | ||
75 | default: None, | ||
76 | }); | ||
72 | generics.fill(&*it.source(db).1, start + 1); | 77 | generics.fill(&*it.source(db).1, start + 1); |
73 | } | 78 | } |
74 | GenericDef::TypeAlias(it) => generics.fill(&*it.source(db).1, start), | 79 | GenericDef::TypeAlias(it) => generics.fill(&*it.source(db).1, start), |
@@ -90,7 +95,9 @@ impl GenericParams { | |||
90 | fn fill_params(&mut self, params: &ast::TypeParamList, start: u32) { | 95 | fn fill_params(&mut self, params: &ast::TypeParamList, start: u32) { |
91 | for (idx, type_param) in params.type_params().enumerate() { | 96 | for (idx, type_param) in params.type_params().enumerate() { |
92 | let name = type_param.name().map(AsName::as_name).unwrap_or_else(Name::missing); | 97 | let name = type_param.name().map(AsName::as_name).unwrap_or_else(Name::missing); |
93 | let param = GenericParam { idx: idx as u32 + start, name: name.clone() }; | 98 | let default = type_param.default_type().and_then(|t| t.path()).and_then(Path::from_ast); |
99 | |||
100 | let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default }; | ||
94 | self.params.push(param); | 101 | self.params.push(param); |
95 | 102 | ||
96 | let type_ref = TypeRef::Path(name.into()); | 103 | let type_ref = TypeRef::Path(name.into()); |
@@ -190,13 +197,13 @@ impl From<crate::adt::AdtDef> for GenericDef { | |||
190 | } | 197 | } |
191 | } | 198 | } |
192 | 199 | ||
193 | pub trait HasGenericParams { | 200 | pub trait HasGenericParams: Copy { |
194 | fn generic_params(self, db: &impl DefDatabase) -> Arc<GenericParams>; | 201 | fn generic_params(self, db: &impl DefDatabase) -> Arc<GenericParams>; |
195 | } | 202 | } |
196 | 203 | ||
197 | impl<T> HasGenericParams for T | 204 | impl<T> HasGenericParams for T |
198 | where | 205 | where |
199 | T: Into<GenericDef>, | 206 | T: Into<GenericDef> + Copy, |
200 | { | 207 | { |
201 | fn generic_params(self, db: &impl DefDatabase) -> Arc<GenericParams> { | 208 | fn generic_params(self, db: &impl DefDatabase) -> Arc<GenericParams> { |
202 | db.generic_params(self.into()) | 209 | db.generic_params(self.into()) |
diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs index fa5882dea..b84cb7503 100644 --- a/crates/ra_hir/src/mock.rs +++ b/crates/ra_hir/src/mock.rs | |||
@@ -236,7 +236,7 @@ impl MockDatabase { | |||
236 | } | 236 | } |
237 | 237 | ||
238 | #[derive(Default)] | 238 | #[derive(Default)] |
239 | pub struct CrateGraphFixture(pub FxHashMap<String, (String, Edition, Vec<String>)>); | 239 | pub struct CrateGraphFixture(pub Vec<(String, (String, Edition, Vec<String>))>); |
240 | 240 | ||
241 | #[macro_export] | 241 | #[macro_export] |
242 | macro_rules! crate_graph { | 242 | macro_rules! crate_graph { |
@@ -246,10 +246,10 @@ macro_rules! crate_graph { | |||
246 | #[allow(unused_mut, unused_assignments)] | 246 | #[allow(unused_mut, unused_assignments)] |
247 | let mut edition = ra_db::Edition::Edition2018; | 247 | let mut edition = ra_db::Edition::Edition2018; |
248 | $(edition = ra_db::Edition::from_string($edition);)? | 248 | $(edition = ra_db::Edition::from_string($edition);)? |
249 | res.0.insert( | 249 | res.0.push(( |
250 | $crate_name.to_string(), | 250 | $crate_name.to_string(), |
251 | ($crate_path.to_string(), edition, vec![$($dep.to_string()),*]) | 251 | ($crate_path.to_string(), edition, vec![$($dep.to_string()),*]) |
252 | ); | 252 | )); |
253 | )* | 253 | )* |
254 | res | 254 | res |
255 | }} | 255 | }} |
diff --git a/crates/ra_hir/src/name.rs b/crates/ra_hir/src/name.rs index 9a999e66c..e3a82cf03 100644 --- a/crates/ra_hir/src/name.rs +++ b/crates/ra_hir/src/name.rs | |||
@@ -5,7 +5,7 @@ use ra_syntax::{ast, SmolStr}; | |||
5 | /// `Name` is a wrapper around string, which is used in hir for both references | 5 | /// `Name` is a wrapper around string, which is used in hir for both references |
6 | /// and declarations. In theory, names should also carry hygiene info, but we are | 6 | /// and declarations. In theory, names should also carry hygiene info, but we are |
7 | /// not there yet! | 7 | /// not there yet! |
8 | #[derive(Clone, PartialEq, Eq, Hash)] | 8 | #[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] |
9 | pub struct Name { | 9 | pub struct Name { |
10 | text: SmolStr, | 10 | text: SmolStr, |
11 | } | 11 | } |
diff --git a/crates/ra_hir/src/nameres/tests.rs b/crates/ra_hir/src/nameres/tests.rs index 572bd1bf7..14c8ee50b 100644 --- a/crates/ra_hir/src/nameres/tests.rs +++ b/crates/ra_hir/src/nameres/tests.rs | |||
@@ -8,7 +8,11 @@ use ra_db::SourceDatabase; | |||
8 | use test_utils::covers; | 8 | use test_utils::covers; |
9 | use insta::assert_snapshot_matches; | 9 | use insta::assert_snapshot_matches; |
10 | 10 | ||
11 | use crate::{Crate, mock::{MockDatabase, CrateGraphFixture}, nameres::Resolution}; | 11 | use crate::{ |
12 | Crate, | ||
13 | mock::{MockDatabase, CrateGraphFixture}, | ||
14 | nameres::Resolution, | ||
15 | }; | ||
12 | 16 | ||
13 | use super::*; | 17 | use super::*; |
14 | 18 | ||
@@ -25,12 +29,15 @@ fn compute_crate_def_map(fixture: &str, graph: Option<CrateGraphFixture>) -> Arc | |||
25 | fn render_crate_def_map(map: &CrateDefMap) -> String { | 29 | fn render_crate_def_map(map: &CrateDefMap) -> String { |
26 | let mut buf = String::new(); | 30 | let mut buf = String::new(); |
27 | go(&mut buf, map, "\ncrate", map.root); | 31 | go(&mut buf, map, "\ncrate", map.root); |
28 | return buf; | 32 | return buf.trim().to_string(); |
29 | 33 | ||
30 | fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: CrateModuleId) { | 34 | fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: CrateModuleId) { |
31 | *buf += path; | 35 | *buf += path; |
32 | *buf += "\n"; | 36 | *buf += "\n"; |
33 | for (name, res) in map.modules[module].scope.items.iter() { | 37 | |
38 | let mut entries = map.modules[module].scope.items.iter().collect::<Vec<_>>(); | ||
39 | entries.sort_by_key(|(name, _)| *name); | ||
40 | for (name, res) in entries { | ||
34 | *buf += &format!("{}: {}\n", name, dump_resolution(res)) | 41 | *buf += &format!("{}: {}\n", name, dump_resolution(res)) |
35 | } | 42 | } |
36 | for (name, child) in map.modules[module].children.iter() { | 43 | for (name, child) in map.modules[module].children.iter() { |
@@ -54,8 +61,8 @@ fn def_map(fixtute: &str) -> String { | |||
54 | render_crate_def_map(&dm) | 61 | render_crate_def_map(&dm) |
55 | } | 62 | } |
56 | 63 | ||
57 | fn def_map_with_crate_graph(fixtute: &str, graph: CrateGraphFixture) -> String { | 64 | fn def_map_with_crate_graph(fixture: &str, graph: CrateGraphFixture) -> String { |
58 | let dm = compute_crate_def_map(fixtute, Some(graph)); | 65 | let dm = compute_crate_def_map(fixture, Some(graph)); |
59 | render_crate_def_map(&dm) | 66 | render_crate_def_map(&dm) |
60 | } | 67 | } |
61 | 68 | ||
@@ -79,21 +86,20 @@ fn crate_def_map_smoke_test() { | |||
79 | ", | 86 | ", |
80 | ); | 87 | ); |
81 | assert_snapshot_matches!(map, @r###" | 88 | assert_snapshot_matches!(map, @r###" |
82 | crate | 89 | â‹®crate |
83 | V: t v | 90 | â‹®E: t |
84 | E: t | 91 | â‹®S: t v |
85 | foo: t | 92 | â‹®V: t v |
86 | S: t v | 93 | â‹®foo: t |
87 | 94 | â‹® | |
88 | crate::foo | 95 | â‹®crate::foo |
89 | bar: t | 96 | â‹®bar: t |
90 | f: v | 97 | â‹®f: v |
91 | 98 | â‹® | |
92 | crate::foo::bar | 99 | â‹®crate::foo::bar |
93 | Baz: t v | 100 | â‹®Baz: t v |
94 | E: t | 101 | â‹®E: t |
95 | "### | 102 | "###) |
96 | ) | ||
97 | } | 103 | } |
98 | 104 | ||
99 | #[test] | 105 | #[test] |
@@ -113,12 +119,12 @@ fn bogus_paths() { | |||
113 | ", | 119 | ", |
114 | ); | 120 | ); |
115 | assert_snapshot_matches!(map, @r###" | 121 | assert_snapshot_matches!(map, @r###" |
116 | crate | 122 | â‹®crate |
117 | foo: t | 123 | â‹®S: t v |
118 | S: t v | 124 | â‹®foo: t |
119 | 125 | â‹® | |
120 | crate::foo | 126 | â‹®crate::foo |
121 | "### | 127 | "### |
122 | ) | 128 | ) |
123 | } | 129 | } |
124 | 130 | ||
@@ -137,13 +143,13 @@ fn use_as() { | |||
137 | ); | 143 | ); |
138 | assert_snapshot_matches!(map, | 144 | assert_snapshot_matches!(map, |
139 | @r###" | 145 | @r###" |
140 | crate | 146 | â‹®crate |
141 | Foo: t v | 147 | â‹®Foo: t v |
142 | foo: t | 148 | â‹®foo: t |
143 | 149 | â‹® | |
144 | crate::foo | 150 | â‹®crate::foo |
145 | Baz: t v | 151 | â‹®Baz: t v |
146 | "### | 152 | "### |
147 | ); | 153 | ); |
148 | } | 154 | } |
149 | 155 | ||
@@ -164,21 +170,19 @@ fn use_trees() { | |||
164 | pub enum Quux {}; | 170 | pub enum Quux {}; |
165 | ", | 171 | ", |
166 | ); | 172 | ); |
167 | assert_snapshot_matches!(map, | 173 | assert_snapshot_matches!(map, @r###" |
168 | @r###" | 174 | â‹®crate |
169 | crate | 175 | â‹®Baz: t v |
170 | Quux: t | 176 | â‹®Quux: t |
171 | Baz: t v | 177 | â‹®foo: t |
172 | foo: t | 178 | â‹® |
173 | 179 | â‹®crate::foo | |
174 | crate::foo | 180 | â‹®bar: t |
175 | bar: t | 181 | â‹® |
176 | 182 | â‹®crate::foo::bar | |
177 | crate::foo::bar | 183 | â‹®Baz: t v |
178 | Quux: t | 184 | â‹®Quux: t |
179 | Baz: t v | 185 | "###); |
180 | "### | ||
181 | ); | ||
182 | } | 186 | } |
183 | 187 | ||
184 | #[test] | 188 | #[test] |
@@ -199,20 +203,18 @@ fn re_exports() { | |||
199 | pub struct Baz; | 203 | pub struct Baz; |
200 | ", | 204 | ", |
201 | ); | 205 | ); |
202 | assert_snapshot_matches!(map, | 206 | assert_snapshot_matches!(map, @r###" |
203 | @r###" | 207 | â‹®crate |
204 | crate | 208 | â‹®Baz: t v |
205 | Baz: t v | 209 | â‹®foo: t |
206 | foo: t | 210 | â‹® |
207 | 211 | â‹®crate::foo | |
208 | crate::foo | 212 | â‹®Baz: t v |
209 | bar: t | 213 | â‹®bar: t |
210 | Baz: t v | 214 | â‹® |
211 | 215 | â‹®crate::foo::bar | |
212 | crate::foo::bar | 216 | â‹®Baz: t v |
213 | Baz: t v | 217 | "###); |
214 | "### | ||
215 | ); | ||
216 | } | 218 | } |
217 | 219 | ||
218 | #[test] | 220 | #[test] |
@@ -237,10 +239,10 @@ fn std_prelude() { | |||
237 | }, | 239 | }, |
238 | ); | 240 | ); |
239 | assert_snapshot_matches!(map, @r###" | 241 | assert_snapshot_matches!(map, @r###" |
240 | crate | 242 | â‹®crate |
241 | Bar: t v | 243 | â‹®Bar: t v |
242 | Baz: t v | 244 | â‹®Baz: t v |
243 | "###); | 245 | "###); |
244 | } | 246 | } |
245 | 247 | ||
246 | #[test] | 248 | #[test] |
@@ -254,10 +256,10 @@ fn can_import_enum_variant() { | |||
254 | ", | 256 | ", |
255 | ); | 257 | ); |
256 | assert_snapshot_matches!(map, @r###" | 258 | assert_snapshot_matches!(map, @r###" |
257 | crate | 259 | â‹®crate |
258 | V: t v | 260 | â‹®E: t |
259 | E: t | 261 | â‹®V: t v |
260 | "### | 262 | "### |
261 | ); | 263 | ); |
262 | } | 264 | } |
263 | 265 | ||
@@ -285,20 +287,18 @@ fn edition_2015_imports() { | |||
285 | }, | 287 | }, |
286 | ); | 288 | ); |
287 | 289 | ||
288 | assert_snapshot_matches!(map, | 290 | assert_snapshot_matches!(map, @r###" |
289 | @r###" | 291 | â‹®crate |
290 | crate | 292 | â‹®bar: t |
291 | bar: t | 293 | â‹®foo: t |
292 | foo: t | 294 | â‹® |
293 | 295 | â‹®crate::bar | |
294 | crate::bar | 296 | â‹®Bar: t v |
295 | Bar: t v | 297 | â‹® |
296 | 298 | â‹®crate::foo | |
297 | crate::foo | 299 | â‹®Bar: t v |
298 | FromLib: t v | 300 | â‹®FromLib: t v |
299 | Bar: t v | 301 | "###); |
300 | "### | ||
301 | ); | ||
302 | } | 302 | } |
303 | 303 | ||
304 | #[test] | 304 | #[test] |
@@ -317,16 +317,14 @@ fn module_resolution_works_for_non_standard_filenames() { | |||
317 | }, | 317 | }, |
318 | ); | 318 | ); |
319 | 319 | ||
320 | assert_snapshot_matches!(map, | 320 | assert_snapshot_matches!(map, @r###" |
321 | @r###" | 321 | â‹®crate |
322 | crate | 322 | â‹®Bar: t v |
323 | Bar: t v | 323 | â‹®foo: t |
324 | foo: t | 324 | â‹® |
325 | 325 | â‹®crate::foo | |
326 | crate::foo | 326 | â‹®Bar: t v |
327 | Bar: t v | 327 | "###); |
328 | "### | ||
329 | ); | ||
330 | } | 328 | } |
331 | 329 | ||
332 | #[test] | 330 | #[test] |
@@ -348,12 +346,10 @@ fn name_res_works_for_broken_modules() { | |||
348 | pub struct Baz; | 346 | pub struct Baz; |
349 | ", | 347 | ", |
350 | ); | 348 | ); |
351 | assert_snapshot_matches!(map, | 349 | assert_snapshot_matches!(map, @r###" |
352 | @r###" | 350 | â‹®crate |
353 | crate | 351 | â‹®Baz: _ |
354 | Baz: _ | 352 | "###); |
355 | "### | ||
356 | ); | ||
357 | } | 353 | } |
358 | 354 | ||
359 | #[test] | 355 | #[test] |
@@ -369,19 +365,17 @@ fn item_map_using_self() { | |||
369 | pub struct Baz; | 365 | pub struct Baz; |
370 | ", | 366 | ", |
371 | ); | 367 | ); |
372 | assert_snapshot_matches!(map, | 368 | assert_snapshot_matches!(map, @r###" |
373 | @r###" | 369 | â‹®crate |
374 | crate | 370 | â‹®Baz: t v |
375 | Baz: t v | 371 | â‹®foo: t |
376 | foo: t | 372 | â‹® |
377 | 373 | â‹®crate::foo | |
378 | crate::foo | 374 | â‹®bar: t |
379 | bar: t | 375 | â‹® |
380 | 376 | â‹®crate::foo::bar | |
381 | crate::foo::bar | 377 | â‹®Baz: t v |
382 | Baz: t v | 378 | "###); |
383 | "### | ||
384 | ); | ||
385 | } | 379 | } |
386 | 380 | ||
387 | #[test] | 381 | #[test] |
@@ -400,12 +394,10 @@ fn item_map_across_crates() { | |||
400 | }, | 394 | }, |
401 | ); | 395 | ); |
402 | 396 | ||
403 | assert_snapshot_matches!(map, | 397 | assert_snapshot_matches!(map, @r###" |
404 | @r###" | 398 | â‹®crate |
405 | crate | 399 | â‹®Baz: t v |
406 | Baz: t v | 400 | "###); |
407 | "### | ||
408 | ); | ||
409 | } | 401 | } |
410 | 402 | ||
411 | #[test] | 403 | #[test] |
@@ -430,12 +422,14 @@ fn extern_crate_rename() { | |||
430 | }, | 422 | }, |
431 | ); | 423 | ); |
432 | 424 | ||
433 | assert_snapshot_matches!(map, | 425 | assert_snapshot_matches!(map, @r###" |
434 | @r###" | 426 | â‹®crate |
435 | crate | 427 | â‹®alloc_crate: t |
436 | Arc: t v | 428 | â‹®sync: t |
437 | "### | 429 | â‹® |
438 | ); | 430 | â‹®crate::sync |
431 | â‹®Arc: t v | ||
432 | "###); | ||
439 | } | 433 | } |
440 | 434 | ||
441 | #[test] | 435 | #[test] |
@@ -462,9 +456,13 @@ fn extern_crate_rename_2015_edition() { | |||
462 | 456 | ||
463 | assert_snapshot_matches!(map, | 457 | assert_snapshot_matches!(map, |
464 | @r###" | 458 | @r###" |
465 | crate | 459 | â‹®crate |
466 | Arc: t v | 460 | â‹®alloc_crate: t |
467 | "### | 461 | â‹®sync: t |
462 | â‹® | ||
463 | â‹®crate::sync | ||
464 | â‹®Arc: t v | ||
465 | "### | ||
468 | ); | 466 | ); |
469 | } | 467 | } |
470 | 468 | ||
@@ -490,12 +488,10 @@ fn import_across_source_roots() { | |||
490 | }, | 488 | }, |
491 | ); | 489 | ); |
492 | 490 | ||
493 | assert_snapshot_matches!(map, | 491 | assert_snapshot_matches!(map, @r###" |
494 | @r###" | 492 | â‹®crate |
495 | crate | 493 | â‹®C: t v |
496 | C: t v | 494 | "###); |
497 | "### | ||
498 | ); | ||
499 | } | 495 | } |
500 | 496 | ||
501 | #[test] | 497 | #[test] |
@@ -519,12 +515,10 @@ fn reexport_across_crates() { | |||
519 | }, | 515 | }, |
520 | ); | 516 | ); |
521 | 517 | ||
522 | assert_snapshot_matches!(map, | 518 | assert_snapshot_matches!(map, @r###" |
523 | @r###" | 519 | â‹®crate |
524 | crate | 520 | â‹®Baz: t v |
525 | Baz: t v | 521 | "###); |
526 | "### | ||
527 | ); | ||
528 | } | 522 | } |
529 | 523 | ||
530 | #[test] | 524 | #[test] |
@@ -544,13 +538,11 @@ fn values_dont_shadow_extern_crates() { | |||
544 | }, | 538 | }, |
545 | ); | 539 | ); |
546 | 540 | ||
547 | assert_snapshot_matches!(map, | 541 | assert_snapshot_matches!(map, @r###" |
548 | @r###" | 542 | â‹®crate |
549 | crate | 543 | â‹®Bar: t v |
550 | Bar: t v | 544 | â‹®foo: v |
551 | foo: v | 545 | "###); |
552 | "### | ||
553 | ); | ||
554 | } | 546 | } |
555 | 547 | ||
556 | #[test] | 548 | #[test] |
diff --git a/crates/ra_hir/src/nameres/tests/globs.rs b/crates/ra_hir/src/nameres/tests/globs.rs index 6e50c7ff6..e1519ca6b 100644 --- a/crates/ra_hir/src/nameres/tests/globs.rs +++ b/crates/ra_hir/src/nameres/tests/globs.rs | |||
@@ -18,20 +18,20 @@ fn glob_1() { | |||
18 | ", | 18 | ", |
19 | ); | 19 | ); |
20 | assert_snapshot_matches!(map, @r###" | 20 | assert_snapshot_matches!(map, @r###" |
21 | crate | 21 | â‹®crate |
22 | bar: t | 22 | â‹®Baz: t v |
23 | Foo: t v | 23 | â‹®Foo: t v |
24 | Baz: t v | 24 | â‹®bar: t |
25 | foo: t | 25 | â‹®foo: t |
26 | 26 | â‹® | |
27 | crate::foo | 27 | â‹®crate::foo |
28 | bar: t | 28 | â‹®Baz: t v |
29 | Foo: t v | 29 | â‹®Foo: t v |
30 | Baz: t v | 30 | â‹®bar: t |
31 | 31 | â‹® | |
32 | crate::foo::bar | 32 | â‹®crate::foo::bar |
33 | Baz: t v | 33 | â‹®Baz: t v |
34 | "### | 34 | "### |
35 | ); | 35 | ); |
36 | } | 36 | } |
37 | 37 | ||
@@ -54,22 +54,22 @@ fn glob_2() { | |||
54 | ", | 54 | ", |
55 | ); | 55 | ); |
56 | assert_snapshot_matches!(map, @r###" | 56 | assert_snapshot_matches!(map, @r###" |
57 | crate | 57 | â‹®crate |
58 | bar: t | 58 | â‹®Baz: t v |
59 | Foo: t v | 59 | â‹®Foo: t v |
60 | Baz: t v | 60 | â‹®bar: t |
61 | foo: t | 61 | â‹®foo: t |
62 | 62 | â‹® | |
63 | crate::foo | 63 | â‹®crate::foo |
64 | bar: t | 64 | â‹®Baz: t v |
65 | Foo: t v | 65 | â‹®Foo: t v |
66 | Baz: t v | 66 | â‹®bar: t |
67 | 67 | â‹® | |
68 | crate::foo::bar | 68 | â‹®crate::foo::bar |
69 | bar: t | 69 | â‹®Baz: t v |
70 | Foo: t v | 70 | â‹®Foo: t v |
71 | Baz: t v | 71 | â‹®bar: t |
72 | "### | 72 | "### |
73 | ); | 73 | ); |
74 | } | 74 | } |
75 | 75 | ||
@@ -90,9 +90,9 @@ fn glob_across_crates() { | |||
90 | }, | 90 | }, |
91 | ); | 91 | ); |
92 | assert_snapshot_matches!(map, @r###" | 92 | assert_snapshot_matches!(map, @r###" |
93 | crate | 93 | â‹®crate |
94 | Baz: t v | 94 | â‹®Baz: t v |
95 | "### | 95 | "### |
96 | ); | 96 | ); |
97 | } | 97 | } |
98 | 98 | ||
@@ -109,10 +109,10 @@ fn glob_enum() { | |||
109 | ", | 109 | ", |
110 | ); | 110 | ); |
111 | assert_snapshot_matches!(map, @r###" | 111 | assert_snapshot_matches!(map, @r###" |
112 | crate | 112 | â‹®crate |
113 | Foo: t | 113 | â‹®Bar: t v |
114 | Bar: t v | 114 | â‹®Baz: t v |
115 | Baz: t v | 115 | â‹®Foo: t |
116 | "### | 116 | "### |
117 | ); | 117 | ); |
118 | } | 118 | } |
diff --git a/crates/ra_hir/src/nameres/tests/macros.rs b/crates/ra_hir/src/nameres/tests/macros.rs index 8781b026b..f7ca380ad 100644 --- a/crates/ra_hir/src/nameres/tests/macros.rs +++ b/crates/ra_hir/src/nameres/tests/macros.rs | |||
@@ -18,14 +18,14 @@ fn macro_rules_are_globally_visible() { | |||
18 | ", | 18 | ", |
19 | ); | 19 | ); |
20 | assert_snapshot_matches!(map, @r###" | 20 | assert_snapshot_matches!(map, @r###" |
21 | crate | 21 | â‹®crate |
22 | nested: t | 22 | â‹®Foo: t v |
23 | Foo: t v | 23 | â‹®nested: t |
24 | 24 | â‹® | |
25 | crate::nested | 25 | â‹®crate::nested |
26 | Bar: t v | 26 | â‹®Bar: t v |
27 | Baz: t v | 27 | â‹®Baz: t v |
28 | "###); | 28 | "###); |
29 | } | 29 | } |
30 | 30 | ||
31 | #[test] | 31 | #[test] |
@@ -45,15 +45,15 @@ fn macro_rules_can_define_modules() { | |||
45 | ", | 45 | ", |
46 | ); | 46 | ); |
47 | assert_snapshot_matches!(map, @r###" | 47 | assert_snapshot_matches!(map, @r###" |
48 | crate | 48 | â‹®crate |
49 | n1: t | 49 | â‹®n1: t |
50 | 50 | â‹® | |
51 | crate::n1 | 51 | â‹®crate::n1 |
52 | n2: t | 52 | â‹®n2: t |
53 | 53 | â‹® | |
54 | crate::n1::n2 | 54 | â‹®crate::n1::n2 |
55 | X: t v | 55 | â‹®X: t v |
56 | "###); | 56 | "###); |
57 | } | 57 | } |
58 | 58 | ||
59 | #[test] | 59 | #[test] |
@@ -81,14 +81,14 @@ fn macro_rules_from_other_crates_are_visible() { | |||
81 | }, | 81 | }, |
82 | ); | 82 | ); |
83 | assert_snapshot_matches!(map, @r###" | 83 | assert_snapshot_matches!(map, @r###" |
84 | crate | 84 | â‹®crate |
85 | bar: t | 85 | â‹®Bar: t v |
86 | Foo: t v | 86 | â‹®Foo: t v |
87 | Bar: t v | 87 | â‹®bar: t |
88 | 88 | â‹® | |
89 | crate::bar | 89 | â‹®crate::bar |
90 | bar: t | 90 | â‹®Bar: t v |
91 | Foo: t v | 91 | â‹®Foo: t v |
92 | Bar: t v | 92 | â‹®bar: t |
93 | "###); | 93 | "###); |
94 | } | 94 | } |
diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index cfe07156b..3679a2242 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs | |||
@@ -19,8 +19,8 @@ use std::{fmt, mem}; | |||
19 | use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase, Trait, GenericParams}; | 19 | use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase, Trait, GenericParams}; |
20 | use display::{HirDisplay, HirFormatter}; | 20 | use display::{HirDisplay, HirFormatter}; |
21 | 21 | ||
22 | pub(crate) use lower::{TypableDef, type_for_def, type_for_field, callable_item_sig, generic_predicates}; | 22 | pub(crate) use lower::{TypableDef, type_for_def, type_for_field, callable_item_sig, generic_predicates, generic_defaults}; |
23 | pub(crate) use infer::{infer, InferenceResult, InferTy}; | 23 | pub(crate) use infer::{infer_query, InferenceResult, InferTy}; |
24 | pub use lower::CallableDef; | 24 | pub use lower::CallableDef; |
25 | 25 | ||
26 | /// A type constructor or type name: this might be something like the primitive | 26 | /// A type constructor or type name: this might be something like the primitive |
diff --git a/crates/ra_hir/src/ty/infer.rs b/crates/ra_hir/src/ty/infer.rs index 1e7d97f51..a48272981 100644 --- a/crates/ra_hir/src/ty/infer.rs +++ b/crates/ra_hir/src/ty/infer.rs | |||
@@ -23,6 +23,7 @@ use ena::unify::{InPlaceUnificationTable, UnifyKey, UnifyValue, NoError}; | |||
23 | use rustc_hash::FxHashMap; | 23 | use rustc_hash::FxHashMap; |
24 | 24 | ||
25 | use ra_arena::map::ArenaMap; | 25 | use ra_arena::map::ArenaMap; |
26 | use ra_prof::profile; | ||
26 | use test_utils::tested_by; | 27 | use test_utils::tested_by; |
27 | 28 | ||
28 | use crate::{ | 29 | use crate::{ |
@@ -51,7 +52,8 @@ use super::{ | |||
51 | mod unify; | 52 | mod unify; |
52 | 53 | ||
53 | /// The entry point of type inference. | 54 | /// The entry point of type inference. |
54 | pub fn infer(db: &impl HirDatabase, def: DefWithBody) -> Arc<InferenceResult> { | 55 | pub fn infer_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<InferenceResult> { |
56 | let _p = profile("infer_query"); | ||
55 | db.check_canceled(); | 57 | db.check_canceled(); |
56 | let body = def.body(db); | 58 | let body = def.body(db); |
57 | let resolver = def.resolver(db); | 59 | let resolver = def.resolver(db); |
diff --git a/crates/ra_hir/src/ty/infer/unify.rs b/crates/ra_hir/src/ty/infer/unify.rs index 8ca7e957d..bc9719725 100644 --- a/crates/ra_hir/src/ty/infer/unify.rs +++ b/crates/ra_hir/src/ty/infer/unify.rs | |||
@@ -56,7 +56,12 @@ where | |||
56 | self.var_stack.pop(); | 56 | self.var_stack.pop(); |
57 | result | 57 | result |
58 | } else { | 58 | } else { |
59 | let free_var = InferTy::TypeVar(self.ctx.var_unification_table.find(inner)); | 59 | let root = self.ctx.var_unification_table.find(inner); |
60 | let free_var = match tv { | ||
61 | InferTy::TypeVar(_) => InferTy::TypeVar(root), | ||
62 | InferTy::IntVar(_) => InferTy::IntVar(root), | ||
63 | InferTy::FloatVar(_) => InferTy::FloatVar(root), | ||
64 | }; | ||
60 | let position = self.add(free_var); | 65 | let position = self.add(free_var); |
61 | Ty::Bound(position as u32) | 66 | Ty::Bound(position as u32) |
62 | } | 67 | } |
diff --git a/crates/ra_hir/src/ty/lower.rs b/crates/ra_hir/src/ty/lower.rs index 09d26ce5a..a1a2d0f6b 100644 --- a/crates/ra_hir/src/ty/lower.rs +++ b/crates/ra_hir/src/ty/lower.rs | |||
@@ -9,17 +9,18 @@ use std::sync::Arc; | |||
9 | use std::iter; | 9 | use std::iter; |
10 | 10 | ||
11 | use crate::{ | 11 | use crate::{ |
12 | Function, Struct, StructField, Enum, EnumVariant, Path, | 12 | Function, Struct, StructField, Enum, EnumVariant, Path, ModuleDef, TypeAlias, Const, Static, |
13 | ModuleDef, TypeAlias, | ||
14 | Const, Static, | ||
15 | HirDatabase, | 13 | HirDatabase, |
16 | type_ref::TypeRef, | 14 | type_ref::TypeRef, |
17 | name::KnownName, | 15 | name::KnownName, |
18 | nameres::Namespace, | 16 | nameres::Namespace, |
19 | resolve::{Resolver, Resolution}, | 17 | resolve::{Resolver, Resolution}, |
20 | path::{PathSegment, GenericArg}, | 18 | path::{PathSegment, GenericArg}, |
21 | generics::{GenericParams, HasGenericParams}, | 19 | generics::{HasGenericParams}, |
22 | adt::VariantDef, Trait, generics::{ WherePredicate, GenericDef} | 20 | adt::VariantDef, |
21 | Trait, | ||
22 | generics::{WherePredicate, GenericDef}, | ||
23 | ty::AdtDef, | ||
23 | }; | 24 | }; |
24 | use super::{Ty, primitive, FnSig, Substs, TypeCtor, TraitRef, GenericPredicate}; | 25 | use super::{Ty, primitive, FnSig, Substs, TypeCtor, TraitRef, GenericPredicate}; |
25 | 26 | ||
@@ -120,15 +121,15 @@ impl Ty { | |||
120 | segment: &PathSegment, | 121 | segment: &PathSegment, |
121 | resolved: TypableDef, | 122 | resolved: TypableDef, |
122 | ) -> Substs { | 123 | ) -> Substs { |
123 | let def_generics = match resolved { | 124 | let def_generic: Option<GenericDef> = match resolved { |
124 | TypableDef::Function(func) => func.generic_params(db), | 125 | TypableDef::Function(func) => Some(func.into()), |
125 | TypableDef::Struct(s) => s.generic_params(db), | 126 | TypableDef::Struct(s) => Some(s.into()), |
126 | TypableDef::Enum(e) => e.generic_params(db), | 127 | TypableDef::Enum(e) => Some(e.into()), |
127 | TypableDef::EnumVariant(var) => var.parent_enum(db).generic_params(db), | 128 | TypableDef::EnumVariant(var) => Some(var.parent_enum(db).into()), |
128 | TypableDef::TypeAlias(t) => t.generic_params(db), | 129 | TypableDef::TypeAlias(t) => Some(t.into()), |
129 | TypableDef::Const(_) | TypableDef::Static(_) => GenericParams::default().into(), | 130 | TypableDef::Const(_) | TypableDef::Static(_) => None, |
130 | }; | 131 | }; |
131 | substs_from_path_segment(db, resolver, segment, &def_generics, false) | 132 | substs_from_path_segment(db, resolver, segment, def_generic, false) |
132 | } | 133 | } |
133 | 134 | ||
134 | /// Collect generic arguments from a path into a `Substs`. See also | 135 | /// Collect generic arguments from a path into a `Substs`. See also |
@@ -172,10 +173,12 @@ pub(super) fn substs_from_path_segment( | |||
172 | db: &impl HirDatabase, | 173 | db: &impl HirDatabase, |
173 | resolver: &Resolver, | 174 | resolver: &Resolver, |
174 | segment: &PathSegment, | 175 | segment: &PathSegment, |
175 | def_generics: &GenericParams, | 176 | def_generic: Option<GenericDef>, |
176 | add_self_param: bool, | 177 | add_self_param: bool, |
177 | ) -> Substs { | 178 | ) -> Substs { |
178 | let mut substs = Vec::new(); | 179 | let mut substs = Vec::new(); |
180 | let def_generics = def_generic.map(|def| def.generic_params(db)).unwrap_or_default(); | ||
181 | |||
179 | let parent_param_count = def_generics.count_parent_params(); | 182 | let parent_param_count = def_generics.count_parent_params(); |
180 | substs.extend(iter::repeat(Ty::Unknown).take(parent_param_count)); | 183 | substs.extend(iter::repeat(Ty::Unknown).take(parent_param_count)); |
181 | if add_self_param { | 184 | if add_self_param { |
@@ -199,12 +202,24 @@ pub(super) fn substs_from_path_segment( | |||
199 | } | 202 | } |
200 | } | 203 | } |
201 | // add placeholders for args that were not provided | 204 | // add placeholders for args that were not provided |
202 | // FIXME: handle defaults | ||
203 | let supplied_params = substs.len(); | 205 | let supplied_params = substs.len(); |
204 | for _ in supplied_params..def_generics.count_params_including_parent() { | 206 | for _ in supplied_params..def_generics.count_params_including_parent() { |
205 | substs.push(Ty::Unknown); | 207 | substs.push(Ty::Unknown); |
206 | } | 208 | } |
207 | assert_eq!(substs.len(), def_generics.count_params_including_parent()); | 209 | assert_eq!(substs.len(), def_generics.count_params_including_parent()); |
210 | |||
211 | // handle defaults | ||
212 | if let Some(def_generic) = def_generic { | ||
213 | let default_substs = db.generic_defaults(def_generic); | ||
214 | assert_eq!(substs.len(), default_substs.len()); | ||
215 | |||
216 | for (i, default_ty) in default_substs.iter().enumerate() { | ||
217 | if substs[i] == Ty::Unknown { | ||
218 | substs[i] = default_ty.clone(); | ||
219 | } | ||
220 | } | ||
221 | } | ||
222 | |||
208 | Substs(substs.into()) | 223 | Substs(substs.into()) |
209 | } | 224 | } |
210 | 225 | ||
@@ -249,7 +264,7 @@ impl TraitRef { | |||
249 | resolved: Trait, | 264 | resolved: Trait, |
250 | ) -> Substs { | 265 | ) -> Substs { |
251 | let segment = path.segments.last().expect("path should have at least one segment"); | 266 | let segment = path.segments.last().expect("path should have at least one segment"); |
252 | substs_from_path_segment(db, resolver, segment, &resolved.generic_params(db), true) | 267 | substs_from_path_segment(db, resolver, segment, Some(resolved.into()), true) |
253 | } | 268 | } |
254 | 269 | ||
255 | pub(crate) fn for_trait(db: &impl HirDatabase, trait_: Trait) -> TraitRef { | 270 | pub(crate) fn for_trait(db: &impl HirDatabase, trait_: Trait) -> TraitRef { |
@@ -274,9 +289,9 @@ impl TraitRef { | |||
274 | pub(crate) fn type_for_def(db: &impl HirDatabase, def: TypableDef, ns: Namespace) -> Ty { | 289 | pub(crate) fn type_for_def(db: &impl HirDatabase, def: TypableDef, ns: Namespace) -> Ty { |
275 | match (def, ns) { | 290 | match (def, ns) { |
276 | (TypableDef::Function(f), Namespace::Values) => type_for_fn(db, f), | 291 | (TypableDef::Function(f), Namespace::Values) => type_for_fn(db, f), |
277 | (TypableDef::Struct(s), Namespace::Types) => type_for_struct(db, s), | 292 | (TypableDef::Struct(s), Namespace::Types) => type_for_adt(db, s), |
278 | (TypableDef::Struct(s), Namespace::Values) => type_for_struct_constructor(db, s), | 293 | (TypableDef::Struct(s), Namespace::Values) => type_for_struct_constructor(db, s), |
279 | (TypableDef::Enum(e), Namespace::Types) => type_for_enum(db, e), | 294 | (TypableDef::Enum(e), Namespace::Types) => type_for_adt(db, e), |
280 | (TypableDef::EnumVariant(v), Namespace::Values) => type_for_enum_variant_constructor(db, v), | 295 | (TypableDef::EnumVariant(v), Namespace::Values) => type_for_enum_variant_constructor(db, v), |
281 | (TypableDef::TypeAlias(t), Namespace::Types) => type_for_type_alias(db, t), | 296 | (TypableDef::TypeAlias(t), Namespace::Types) => type_for_type_alias(db, t), |
282 | (TypableDef::Const(c), Namespace::Values) => type_for_const(db, c), | 297 | (TypableDef::Const(c), Namespace::Values) => type_for_const(db, c), |
@@ -331,6 +346,22 @@ pub(crate) fn generic_predicates( | |||
331 | predicates.into() | 346 | predicates.into() |
332 | } | 347 | } |
333 | 348 | ||
349 | /// Resolve the default type params from generics | ||
350 | pub(crate) fn generic_defaults(db: &impl HirDatabase, def: GenericDef) -> Substs { | ||
351 | let resolver = def.resolver(db); | ||
352 | let generic_params = def.generic_params(db); | ||
353 | |||
354 | let defaults = generic_params | ||
355 | .params_including_parent() | ||
356 | .into_iter() | ||
357 | .map(|p| { | ||
358 | p.default.as_ref().map_or(Ty::Unknown, |path| Ty::from_hir_path(db, &resolver, path)) | ||
359 | }) | ||
360 | .collect::<Vec<_>>(); | ||
361 | |||
362 | Substs(defaults.into()) | ||
363 | } | ||
364 | |||
334 | fn fn_sig_for_fn(db: &impl HirDatabase, def: Function) -> FnSig { | 365 | fn fn_sig_for_fn(db: &impl HirDatabase, def: Function) -> FnSig { |
335 | let signature = def.signature(db); | 366 | let signature = def.signature(db); |
336 | let resolver = def.resolver(db); | 367 | let resolver = def.resolver(db); |
@@ -375,7 +406,7 @@ fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> FnSig { | |||
375 | .iter() | 406 | .iter() |
376 | .map(|(_, field)| Ty::from_hir(db, &resolver, &field.type_ref)) | 407 | .map(|(_, field)| Ty::from_hir(db, &resolver, &field.type_ref)) |
377 | .collect::<Vec<_>>(); | 408 | .collect::<Vec<_>>(); |
378 | let ret = type_for_struct(db, def); | 409 | let ret = type_for_adt(db, def); |
379 | FnSig::from_params_and_return(params, ret) | 410 | FnSig::from_params_and_return(params, ret) |
380 | } | 411 | } |
381 | 412 | ||
@@ -383,7 +414,7 @@ fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> FnSig { | |||
383 | fn type_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> Ty { | 414 | fn type_for_struct_constructor(db: &impl HirDatabase, def: Struct) -> Ty { |
384 | let var_data = def.variant_data(db); | 415 | let var_data = def.variant_data(db); |
385 | if var_data.fields().is_none() { | 416 | if var_data.fields().is_none() { |
386 | return type_for_struct(db, def); // Unit struct | 417 | return type_for_adt(db, def); // Unit struct |
387 | } | 418 | } |
388 | let generics = def.generic_params(db); | 419 | let generics = def.generic_params(db); |
389 | let substs = Substs::identity(&generics); | 420 | let substs = Substs::identity(&generics); |
@@ -403,7 +434,7 @@ fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) | |||
403 | .collect::<Vec<_>>(); | 434 | .collect::<Vec<_>>(); |
404 | let generics = def.parent_enum(db).generic_params(db); | 435 | let generics = def.parent_enum(db).generic_params(db); |
405 | let substs = Substs::identity(&generics); | 436 | let substs = Substs::identity(&generics); |
406 | let ret = type_for_enum(db, def.parent_enum(db)).subst(&substs); | 437 | let ret = type_for_adt(db, def.parent_enum(db)).subst(&substs); |
407 | FnSig::from_params_and_return(params, ret) | 438 | FnSig::from_params_and_return(params, ret) |
408 | } | 439 | } |
409 | 440 | ||
@@ -411,21 +442,16 @@ fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) | |||
411 | fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) -> Ty { | 442 | fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariant) -> Ty { |
412 | let var_data = def.variant_data(db); | 443 | let var_data = def.variant_data(db); |
413 | if var_data.fields().is_none() { | 444 | if var_data.fields().is_none() { |
414 | return type_for_enum(db, def.parent_enum(db)); // Unit variant | 445 | return type_for_adt(db, def.parent_enum(db)); // Unit variant |
415 | } | 446 | } |
416 | let generics = def.parent_enum(db).generic_params(db); | 447 | let generics = def.parent_enum(db).generic_params(db); |
417 | let substs = Substs::identity(&generics); | 448 | let substs = Substs::identity(&generics); |
418 | Ty::apply(TypeCtor::FnDef(def.into()), substs) | 449 | Ty::apply(TypeCtor::FnDef(def.into()), substs) |
419 | } | 450 | } |
420 | 451 | ||
421 | fn type_for_struct(db: &impl HirDatabase, s: Struct) -> Ty { | 452 | fn type_for_adt(db: &impl HirDatabase, adt: impl Into<AdtDef> + HasGenericParams) -> Ty { |
422 | let generics = s.generic_params(db); | 453 | let generics = adt.generic_params(db); |
423 | Ty::apply(TypeCtor::Adt(s.into()), Substs::identity(&generics)) | 454 | Ty::apply(TypeCtor::Adt(adt.into()), Substs::identity(&generics)) |
424 | } | ||
425 | |||
426 | fn type_for_enum(db: &impl HirDatabase, s: Enum) -> Ty { | ||
427 | let generics = s.generic_params(db); | ||
428 | Ty::apply(TypeCtor::Adt(s.into()), Substs::identity(&generics)) | ||
429 | } | 455 | } |
430 | 456 | ||
431 | fn type_for_type_alias(db: &impl HirDatabase, t: TypeAlias) -> Ty { | 457 | fn type_for_type_alias(db: &impl HirDatabase, t: TypeAlias) -> Ty { |
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index f8364203d..cd24faba5 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs | |||
@@ -1449,6 +1449,35 @@ fn test() { | |||
1449 | } | 1449 | } |
1450 | 1450 | ||
1451 | #[test] | 1451 | #[test] |
1452 | fn infer_associated_method_generics_with_default_param() { | ||
1453 | assert_snapshot_matches!( | ||
1454 | infer(r#" | ||
1455 | struct Gen<T=u32> { | ||
1456 | val: T | ||
1457 | } | ||
1458 | |||
1459 | impl<T> Gen<T> { | ||
1460 | pub fn make() -> Gen<T> { | ||
1461 | loop { } | ||
1462 | } | ||
1463 | } | ||
1464 | |||
1465 | fn test() { | ||
1466 | let a = Gen::make(); | ||
1467 | } | ||
1468 | "#), | ||
1469 | @r###" | ||
1470 | [80; 104) '{ ... }': ! | ||
1471 | [90; 98) 'loop { }': ! | ||
1472 | [95; 98) '{ }': () | ||
1473 | [118; 146) '{ ...e(); }': () | ||
1474 | [128; 129) 'a': Gen<u32> | ||
1475 | [132; 141) 'Gen::make': fn make<u32>() -> Gen<T> | ||
1476 | [132; 143) 'Gen::make()': Gen<u32>"### | ||
1477 | ); | ||
1478 | } | ||
1479 | |||
1480 | #[test] | ||
1452 | fn infer_associated_method_generics_without_args() { | 1481 | fn infer_associated_method_generics_without_args() { |
1453 | assert_snapshot_matches!( | 1482 | assert_snapshot_matches!( |
1454 | infer(r#" | 1483 | infer(r#" |
diff --git a/crates/ra_hir/src/ty/traits.rs b/crates/ra_hir/src/ty/traits.rs index 7de04c044..e6c78c0d4 100644 --- a/crates/ra_hir/src/ty/traits.rs +++ b/crates/ra_hir/src/ty/traits.rs | |||
@@ -4,6 +4,7 @@ use std::sync::{Arc, Mutex}; | |||
4 | use rustc_hash::FxHashSet; | 4 | use rustc_hash::FxHashSet; |
5 | use log::debug; | 5 | use log::debug; |
6 | use chalk_ir::cast::Cast; | 6 | use chalk_ir::cast::Cast; |
7 | use ra_prof::profile; | ||
7 | 8 | ||
8 | use crate::{Crate, Trait, db::HirDatabase, ImplBlock}; | 9 | use crate::{Crate, Trait, db::HirDatabase, ImplBlock}; |
9 | use super::{TraitRef, Ty, Canonical}; | 10 | use super::{TraitRef, Ty, Canonical}; |
@@ -25,7 +26,7 @@ struct ChalkContext<'a, DB> { | |||
25 | krate: Crate, | 26 | krate: Crate, |
26 | } | 27 | } |
27 | 28 | ||
28 | pub(crate) fn solver(_db: &impl HirDatabase, _krate: Crate) -> Arc<Mutex<Solver>> { | 29 | pub(crate) fn solver_query(_db: &impl HirDatabase, _krate: Crate) -> Arc<Mutex<Solver>> { |
29 | // krate parameter is just so we cache a unique solver per crate | 30 | // krate parameter is just so we cache a unique solver per crate |
30 | let solver_choice = chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE }; | 31 | let solver_choice = chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE }; |
31 | debug!("Creating new solver for crate {:?}", _krate); | 32 | debug!("Creating new solver for crate {:?}", _krate); |
@@ -33,7 +34,7 @@ pub(crate) fn solver(_db: &impl HirDatabase, _krate: Crate) -> Arc<Mutex<Solver> | |||
33 | } | 34 | } |
34 | 35 | ||
35 | /// Collects impls for the given trait in the whole dependency tree of `krate`. | 36 | /// Collects impls for the given trait in the whole dependency tree of `krate`. |
36 | pub(crate) fn impls_for_trait( | 37 | pub(crate) fn impls_for_trait_query( |
37 | db: &impl HirDatabase, | 38 | db: &impl HirDatabase, |
38 | krate: Crate, | 39 | krate: Crate, |
39 | trait_: Trait, | 40 | trait_: Trait, |
@@ -60,7 +61,7 @@ fn solve( | |||
60 | let context = ChalkContext { db, krate }; | 61 | let context = ChalkContext { db, krate }; |
61 | let solver = db.solver(krate); | 62 | let solver = db.solver(krate); |
62 | debug!("solve goal: {:?}", goal); | 63 | debug!("solve goal: {:?}", goal); |
63 | let solution = solver.lock().unwrap().solve(&context, goal); | 64 | let solution = solver.lock().unwrap().solve_with_fuel(&context, goal, Some(1000)); |
64 | debug!("solve({:?}) => {:?}", goal, solution); | 65 | debug!("solve({:?}) => {:?}", goal, solution); |
65 | solution | 66 | solution |
66 | } | 67 | } |
@@ -76,11 +77,12 @@ pub enum Obligation { | |||
76 | } | 77 | } |
77 | 78 | ||
78 | /// Check using Chalk whether trait is implemented for given parameters including `Self` type. | 79 | /// Check using Chalk whether trait is implemented for given parameters including `Self` type. |
79 | pub(crate) fn implements( | 80 | pub(crate) fn implements_query( |
80 | db: &impl HirDatabase, | 81 | db: &impl HirDatabase, |
81 | krate: Crate, | 82 | krate: Crate, |
82 | trait_ref: Canonical<TraitRef>, | 83 | trait_ref: Canonical<TraitRef>, |
83 | ) -> Option<Solution> { | 84 | ) -> Option<Solution> { |
85 | let _p = profile("implements_query"); | ||
84 | let goal: chalk_ir::Goal = trait_ref.value.to_chalk(db).cast(); | 86 | let goal: chalk_ir::Goal = trait_ref.value.to_chalk(db).cast(); |
85 | debug!("goal: {:?}", goal); | 87 | debug!("goal: {:?}", goal); |
86 | let env = chalk_ir::Environment::new(); | 88 | let env = chalk_ir::Environment::new(); |
diff --git a/crates/ra_ide_api/Cargo.toml b/crates/ra_ide_api/Cargo.toml index 333706c1a..d399d5e2e 100644 --- a/crates/ra_ide_api/Cargo.toml +++ b/crates/ra_ide_api/Cargo.toml | |||
@@ -29,7 +29,7 @@ test_utils = { path = "../test_utils" } | |||
29 | ra_assists = { path = "../ra_assists" } | 29 | ra_assists = { path = "../ra_assists" } |
30 | 30 | ||
31 | [dev-dependencies] | 31 | [dev-dependencies] |
32 | insta = "0.7.0" | 32 | insta = "0.8.1" |
33 | 33 | ||
34 | [dev-dependencies.proptest] | 34 | [dev-dependencies.proptest] |
35 | version = "0.9.0" | 35 | version = "0.9.0" |
diff --git a/crates/ra_ide_api/src/diagnostics.rs b/crates/ra_ide_api/src/diagnostics.rs index e23d178b0..9a0eb2c14 100644 --- a/crates/ra_ide_api/src/diagnostics.rs +++ b/crates/ra_ide_api/src/diagnostics.rs | |||
@@ -4,7 +4,7 @@ use itertools::Itertools; | |||
4 | use hir::{source_binder, diagnostics::{Diagnostic as _, DiagnosticSink}}; | 4 | use hir::{source_binder, diagnostics::{Diagnostic as _, DiagnosticSink}}; |
5 | use ra_db::SourceDatabase; | 5 | use ra_db::SourceDatabase; |
6 | use ra_syntax::{ | 6 | use ra_syntax::{ |
7 | Location, SourceFile, SyntaxKind, TextRange, SyntaxNode, | 7 | T, Location, SourceFile, TextRange, SyntaxNode, |
8 | ast::{self, AstNode, NamedFieldList, NamedField}, | 8 | ast::{self, AstNode, NamedFieldList, NamedField}, |
9 | }; | 9 | }; |
10 | use ra_assists::ast_editor::{AstEditor, AstBuilder}; | 10 | use ra_assists::ast_editor::{AstEditor, AstBuilder}; |
@@ -130,9 +130,7 @@ fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement( | |||
130 | single_use_tree: &ast::UseTree, | 130 | single_use_tree: &ast::UseTree, |
131 | ) -> Option<TextEdit> { | 131 | ) -> Option<TextEdit> { |
132 | let use_tree_list_node = single_use_tree.syntax().parent()?; | 132 | let use_tree_list_node = single_use_tree.syntax().parent()?; |
133 | if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind() | 133 | if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind() == T![self] { |
134 | == SyntaxKind::SELF_KW | ||
135 | { | ||
136 | let start = use_tree_list_node.prev_sibling_or_token()?.range().start(); | 134 | let start = use_tree_list_node.prev_sibling_or_token()?.range().start(); |
137 | let end = use_tree_list_node.range().end(); | 135 | let end = use_tree_list_node.range().end(); |
138 | let range = TextRange::from_to(start, end); | 136 | let range = TextRange::from_to(start, end); |
diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs index 163fa8c3c..4553faad0 100644 --- a/crates/ra_ide_api/src/extend_selection.rs +++ b/crates/ra_ide_api/src/extend_selection.rs | |||
@@ -157,7 +157,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> { | |||
157 | }) | 157 | }) |
158 | .next() | 158 | .next() |
159 | .and_then(|it| it.as_token()) | 159 | .and_then(|it| it.as_token()) |
160 | .filter(|node| node.kind() == COMMA) | 160 | .filter(|node| node.kind() == T![,]) |
161 | } | 161 | } |
162 | 162 | ||
163 | if let Some(comma_node) = nearby_comma(node, Direction::Prev) { | 163 | if let Some(comma_node) = nearby_comma(node, Direction::Prev) { |
diff --git a/crates/ra_ide_api/src/join_lines.rs b/crates/ra_ide_api/src/join_lines.rs index 598717311..4ca005466 100644 --- a/crates/ra_ide_api/src/join_lines.rs +++ b/crates/ra_ide_api/src/join_lines.rs | |||
@@ -1,7 +1,8 @@ | |||
1 | use itertools::Itertools; | 1 | use itertools::Itertools; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | T, | ||
3 | SourceFile, TextRange, TextUnit, SyntaxNode, SyntaxElement, SyntaxToken, | 4 | SourceFile, TextRange, TextUnit, SyntaxNode, SyntaxElement, SyntaxToken, |
4 | SyntaxKind::{self, WHITESPACE, COMMA, R_CURLY, R_PAREN, R_BRACK}, | 5 | SyntaxKind::{self, WHITESPACE}, |
5 | algo::{find_covering_element, non_trivia_sibling}, | 6 | algo::{find_covering_element, non_trivia_sibling}, |
6 | ast::{self, AstNode, AstToken}, | 7 | ast::{self, AstNode, AstToken}, |
7 | Direction, | 8 | Direction, |
@@ -89,7 +90,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn | |||
89 | if is_trailing_comma(prev.kind(), next.kind()) { | 90 | if is_trailing_comma(prev.kind(), next.kind()) { |
90 | // Removes: trailing comma, newline (incl. surrounding whitespace) | 91 | // Removes: trailing comma, newline (incl. surrounding whitespace) |
91 | edit.delete(TextRange::from_to(prev.range().start(), token.range().end())); | 92 | edit.delete(TextRange::from_to(prev.range().start(), token.range().end())); |
92 | } else if prev.kind() == COMMA && next.kind() == R_CURLY { | 93 | } else if prev.kind() == T![,] && next.kind() == T!['}'] { |
93 | // Removes: comma, newline (incl. surrounding whitespace) | 94 | // Removes: comma, newline (incl. surrounding whitespace) |
94 | let space = if let Some(left) = prev.prev_sibling_or_token() { | 95 | let space = if let Some(left) = prev.prev_sibling_or_token() { |
95 | compute_ws(left.kind(), next.kind()) | 96 | compute_ws(left.kind(), next.kind()) |
@@ -116,7 +117,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn | |||
116 | 117 | ||
117 | fn has_comma_after(node: &SyntaxNode) -> bool { | 118 | fn has_comma_after(node: &SyntaxNode) -> bool { |
118 | match non_trivia_sibling(node.into(), Direction::Next) { | 119 | match non_trivia_sibling(node.into(), Direction::Next) { |
119 | Some(n) => n.kind() == COMMA, | 120 | Some(n) => n.kind() == T![,], |
120 | _ => false, | 121 | _ => false, |
121 | } | 122 | } |
122 | } | 123 | } |
@@ -150,7 +151,7 @@ fn join_single_use_tree(edit: &mut TextEditBuilder, token: SyntaxToken) -> Optio | |||
150 | 151 | ||
151 | fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { | 152 | fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool { |
152 | match (left, right) { | 153 | match (left, right) { |
153 | (COMMA, R_PAREN) | (COMMA, R_BRACK) => true, | 154 | (T![,], T![')']) | (T![,], T![']']) => true, |
154 | _ => false, | 155 | _ => false, |
155 | } | 156 | } |
156 | } | 157 | } |
diff --git a/crates/ra_ide_api/src/matching_brace.rs b/crates/ra_ide_api/src/matching_brace.rs index bebd16a69..eaa4b620c 100644 --- a/crates/ra_ide_api/src/matching_brace.rs +++ b/crates/ra_ide_api/src/matching_brace.rs | |||
@@ -1,13 +1,14 @@ | |||
1 | use ra_syntax::{ | 1 | use ra_syntax::{ |
2 | SourceFile, TextUnit, | 2 | SourceFile, TextUnit, |
3 | algo::find_token_at_offset, | 3 | algo::find_token_at_offset, |
4 | SyntaxKind::{self, *}, | 4 | SyntaxKind::{self}, |
5 | ast::AstNode, | 5 | ast::AstNode, |
6 | T | ||
6 | }; | 7 | }; |
7 | 8 | ||
8 | pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> { | 9 | pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> { |
9 | const BRACES: &[SyntaxKind] = | 10 | const BRACES: &[SyntaxKind] = |
10 | &[L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE]; | 11 | &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]]; |
11 | let (brace_node, brace_idx) = find_token_at_offset(file.syntax(), offset) | 12 | let (brace_node, brace_idx) = find_token_at_offset(file.syntax(), offset) |
12 | .filter_map(|node| { | 13 | .filter_map(|node| { |
13 | let idx = BRACES.iter().position(|&brace| brace == node.kind())?; | 14 | let idx = BRACES.iter().position(|&brace| brace == node.kind())?; |
diff --git a/crates/ra_ide_api/src/syntax_highlighting.rs b/crates/ra_ide_api/src/syntax_highlighting.rs index d9a28d2b5..2158291dc 100644 --- a/crates/ra_ide_api/src/syntax_highlighting.rs +++ b/crates/ra_ide_api/src/syntax_highlighting.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | use rustc_hash::FxHashSet; | 1 | use rustc_hash::FxHashSet; |
2 | 2 | ||
3 | use ra_syntax::{ast, AstNode, TextRange, Direction, SyntaxKind::*, SyntaxElement}; | 3 | use ra_syntax::{ast, AstNode, TextRange, Direction, SyntaxKind, SyntaxKind::*, SyntaxElement, T}; |
4 | use ra_db::SourceDatabase; | 4 | use ra_db::SourceDatabase; |
5 | 5 | ||
6 | use crate::{FileId, db::RootDatabase}; | 6 | use crate::{FileId, db::RootDatabase}; |
@@ -11,6 +11,21 @@ pub struct HighlightedRange { | |||
11 | pub tag: &'static str, | 11 | pub tag: &'static str, |
12 | } | 12 | } |
13 | 13 | ||
14 | fn is_control_keyword(kind: SyntaxKind) -> bool { | ||
15 | match kind { | ||
16 | T![for] | ||
17 | | T![loop] | ||
18 | | T![while] | ||
19 | | T![continue] | ||
20 | | T![break] | ||
21 | | T![if] | ||
22 | | T![else] | ||
23 | | T![match] | ||
24 | | T![return] => true, | ||
25 | _ => false, | ||
26 | } | ||
27 | } | ||
28 | |||
14 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { | 29 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { |
15 | let source_file = db.parse(file_id); | 30 | let source_file = db.parse(file_id); |
16 | 31 | ||
@@ -29,6 +44,8 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa | |||
29 | NAME => "function", | 44 | NAME => "function", |
30 | INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE => "literal", | 45 | INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE => "literal", |
31 | LIFETIME => "parameter", | 46 | LIFETIME => "parameter", |
47 | T![unsafe] => "keyword.unsafe", | ||
48 | k if is_control_keyword(k) => "keyword.control", | ||
32 | k if k.is_keyword() => "keyword", | 49 | k if k.is_keyword() => "keyword", |
33 | _ => { | 50 | _ => { |
34 | if let Some(macro_call) = node.as_node().and_then(ast::MacroCall::cast) { | 51 | if let Some(macro_call) = node.as_node().and_then(ast::MacroCall::cast) { |
@@ -40,7 +57,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa | |||
40 | let mut range_end = name_ref.syntax().range().end(); | 57 | let mut range_end = name_ref.syntax().range().end(); |
41 | for sibling in path.syntax().siblings_with_tokens(Direction::Next) { | 58 | for sibling in path.syntax().siblings_with_tokens(Direction::Next) { |
42 | match sibling.kind() { | 59 | match sibling.kind() { |
43 | EXCL | IDENT => range_end = sibling.range().end(), | 60 | T![!] | IDENT => range_end = sibling.range().end(), |
44 | _ => (), | 61 | _ => (), |
45 | } | 62 | } |
46 | } | 63 | } |
diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs index 3a4dbb5f5..7fff8deff 100644 --- a/crates/ra_mbe/src/mbe_expander.rs +++ b/crates/ra_mbe/src/mbe_expander.rs | |||
@@ -281,7 +281,11 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings, | |||
281 | return Err(ExpandError::UnexpectedToken); | 281 | return Err(ExpandError::UnexpectedToken); |
282 | } | 282 | } |
283 | } | 283 | } |
284 | _ => return Err(ExpandError::UnexpectedToken), | 284 | crate::Leaf::Literal(literal) => { |
285 | if input.eat_literal().map(|i| &i.text) != Some(&literal.text) { | ||
286 | return Err(ExpandError::UnexpectedToken); | ||
287 | } | ||
288 | } | ||
285 | }, | 289 | }, |
286 | crate::TokenTree::Repeat(crate::Repeat { subtree, kind, separator }) => { | 290 | crate::TokenTree::Repeat(crate::Repeat { subtree, kind, separator }) => { |
287 | // Dirty hack to make macro-expansion terminate. | 291 | // Dirty hack to make macro-expansion terminate. |
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index 3554dc110..c938acf64 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | use ra_parser::{TokenSource}; | 1 | use ra_parser::{TokenSource}; |
2 | use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*}; | 2 | use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*, T}; |
3 | use std::cell::{RefCell}; | 3 | use std::cell::{RefCell}; |
4 | 4 | ||
5 | // A Sequece of Token, | 5 | // A Sequece of Token, |
@@ -284,9 +284,9 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> { | |||
284 | 284 | ||
285 | fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { | 285 | fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { |
286 | let (kinds, texts) = match d { | 286 | let (kinds, texts) = match d { |
287 | tt::Delimiter::Parenthesis => ([L_PAREN, R_PAREN], "()"), | 287 | tt::Delimiter::Parenthesis => ([T!['('], T![')']], "()"), |
288 | tt::Delimiter::Brace => ([L_CURLY, R_CURLY], "{}"), | 288 | tt::Delimiter::Brace => ([T!['{'], T!['}']], "{}"), |
289 | tt::Delimiter::Bracket => ([L_BRACK, R_BRACK], "[]"), | 289 | tt::Delimiter::Bracket => ([T!['['], T![']']], "[]"), |
290 | tt::Delimiter::None => ([L_DOLLAR, R_DOLLAR], ""), | 290 | tt::Delimiter::None => ([L_DOLLAR, R_DOLLAR], ""), |
291 | }; | 291 | }; |
292 | 292 | ||
@@ -299,8 +299,8 @@ fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken { | |||
299 | fn convert_literal(l: &tt::Literal) -> TtToken { | 299 | fn convert_literal(l: &tt::Literal) -> TtToken { |
300 | let kind = | 300 | let kind = |
301 | classify_literal(&l.text).map(|tkn| tkn.kind).unwrap_or_else(|| match l.text.as_ref() { | 301 | classify_literal(&l.text).map(|tkn| tkn.kind).unwrap_or_else(|| match l.text.as_ref() { |
302 | "true" => SyntaxKind::TRUE_KW, | 302 | "true" => T![true], |
303 | "false" => SyntaxKind::FALSE_KW, | 303 | "false" => T![false], |
304 | _ => panic!("Fail to convert given literal {:#?}", &l), | 304 | _ => panic!("Fail to convert given literal {:#?}", &l), |
305 | }); | 305 | }); |
306 | 306 | ||
@@ -320,11 +320,11 @@ fn convert_ident(ident: &tt::Ident) -> TtToken { | |||
320 | fn convert_punct(p: &tt::Punct) -> TtToken { | 320 | fn convert_punct(p: &tt::Punct) -> TtToken { |
321 | let kind = match p.char { | 321 | let kind = match p.char { |
322 | // lexer may produce compound tokens for these ones | 322 | // lexer may produce compound tokens for these ones |
323 | '.' => DOT, | 323 | '.' => T![.], |
324 | ':' => COLON, | 324 | ':' => T![:], |
325 | '=' => EQ, | 325 | '=' => T![=], |
326 | '!' => EXCL, | 326 | '!' => T![!], |
327 | '-' => MINUS, | 327 | '-' => T![-], |
328 | c => SyntaxKind::from_char(c).unwrap(), | 328 | c => SyntaxKind::from_char(c).unwrap(), |
329 | }; | 329 | }; |
330 | let text = { | 330 | let text = { |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 5e6a6f2a1..d8e344557 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | use ra_parser::{TreeSink, ParseError}; | 1 | use ra_parser::{TreeSink, ParseError}; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, | 3 | AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, |
4 | ast, SyntaxKind::*, TextUnit | 4 | ast, SyntaxKind::*, TextUnit, T |
5 | }; | 5 | }; |
6 | 6 | ||
7 | use crate::subtree_source::{SubtreeTokenSource, Querier}; | 7 | use crate::subtree_source::{SubtreeTokenSource, Querier}; |
@@ -211,9 +211,9 @@ fn convert_tt( | |||
211 | let first_child = tt.first_child_or_token()?; | 211 | let first_child = tt.first_child_or_token()?; |
212 | let last_child = tt.last_child_or_token()?; | 212 | let last_child = tt.last_child_or_token()?; |
213 | let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) { | 213 | let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) { |
214 | (L_PAREN, R_PAREN) => (tt::Delimiter::Parenthesis, true), | 214 | (T!['('], T![')']) => (tt::Delimiter::Parenthesis, true), |
215 | (L_CURLY, R_CURLY) => (tt::Delimiter::Brace, true), | 215 | (T!['{'], T!['}']) => (tt::Delimiter::Brace, true), |
216 | (L_BRACK, R_BRACK) => (tt::Delimiter::Bracket, true), | 216 | (T!['['], T![']']) => (tt::Delimiter::Bracket, true), |
217 | _ => (tt::Delimiter::None, false), | 217 | _ => (tt::Delimiter::None, false), |
218 | }; | 218 | }; |
219 | 219 | ||
@@ -248,23 +248,22 @@ fn convert_tt( | |||
248 | 248 | ||
249 | token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); | 249 | token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); |
250 | } else { | 250 | } else { |
251 | let child: tt::TokenTree = if token.kind() == SyntaxKind::TRUE_KW | 251 | let child: tt::TokenTree = |
252 | || token.kind() == SyntaxKind::FALSE_KW | 252 | if token.kind() == T![true] || token.kind() == T![false] { |
253 | { | 253 | tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() |
254 | tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() | 254 | } else if token.kind().is_keyword() |
255 | } else if token.kind().is_keyword() | 255 | || token.kind() == IDENT |
256 | || token.kind() == IDENT | 256 | || token.kind() == LIFETIME |
257 | || token.kind() == LIFETIME | 257 | { |
258 | { | 258 | let relative_range = token.range() - global_offset; |
259 | let relative_range = token.range() - global_offset; | 259 | let id = token_map.alloc(relative_range); |
260 | let id = token_map.alloc(relative_range); | 260 | let text = token.text().clone(); |
261 | let text = token.text().clone(); | 261 | tt::Leaf::from(tt::Ident { text, id }).into() |
262 | tt::Leaf::from(tt::Ident { text, id }).into() | 262 | } else if token.kind().is_literal() { |
263 | } else if token.kind().is_literal() { | 263 | tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() |
264 | tt::Leaf::from(tt::Literal { text: token.text().clone() }).into() | 264 | } else { |
265 | } else { | 265 | return None; |
266 | return None; | 266 | }; |
267 | }; | ||
268 | token_trees.push(child); | 267 | token_trees.push(child); |
269 | } | 268 | } |
270 | } | 269 | } |
@@ -305,10 +304,8 @@ impl<'a, Q: Querier> TtTreeSink<'a, Q> { | |||
305 | } | 304 | } |
306 | 305 | ||
307 | fn is_delimiter(kind: SyntaxKind) -> bool { | 306 | fn is_delimiter(kind: SyntaxKind) -> bool { |
308 | use SyntaxKind::*; | ||
309 | |||
310 | match kind { | 307 | match kind { |
311 | L_PAREN | L_BRACK | L_CURLY | R_PAREN | R_BRACK | R_CURLY => true, | 308 | T!['('] | T!['['] | T!['{'] | T![')'] | T![']'] | T!['}'] => true, |
312 | _ => false, | 309 | _ => false, |
313 | } | 310 | } |
314 | } | 311 | } |
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs index 004faf77e..e3a5ceecf 100644 --- a/crates/ra_mbe/src/tests.rs +++ b/crates/ra_mbe/src/tests.rs | |||
@@ -575,6 +575,20 @@ fn test_tt_to_stmts() { | |||
575 | ); | 575 | ); |
576 | } | 576 | } |
577 | 577 | ||
578 | #[test] | ||
579 | fn test_match_literal() { | ||
580 | let rules = create_rules( | ||
581 | r#" | ||
582 | macro_rules! foo { | ||
583 | ('(') => { | ||
584 | fn foo() {} | ||
585 | } | ||
586 | } | ||
587 | "#, | ||
588 | ); | ||
589 | assert_expansion(MacroKind::Items, &rules, "foo! ['(']", "fn foo () {}"); | ||
590 | } | ||
591 | |||
578 | // The following tests are port from intellij-rust directly | 592 | // The following tests are port from intellij-rust directly |
579 | // https://github.com/intellij-rust/intellij-rust/blob/c4e9feee4ad46e7953b1948c112533360b6087bb/src/test/kotlin/org/rust/lang/core/macros/RsMacroExpansionTest.kt | 593 | // https://github.com/intellij-rust/intellij-rust/blob/c4e9feee4ad46e7953b1948c112533360b6087bb/src/test/kotlin/org/rust/lang/core/macros/RsMacroExpansionTest.kt |
580 | 594 | ||
diff --git a/crates/ra_parser/src/event.rs b/crates/ra_parser/src/event.rs index 87cf4eca0..51beb0866 100644 --- a/crates/ra_parser/src/event.rs +++ b/crates/ra_parser/src/event.rs | |||
@@ -38,7 +38,7 @@ pub(crate) enum Event { | |||
38 | /// The events for it would look like this: | 38 | /// The events for it would look like this: |
39 | /// | 39 | /// |
40 | /// | 40 | /// |
41 | /// START(PATH) IDENT('foo') FINISH START(PATH) COLONCOLON IDENT('bar') FINISH | 41 | /// START(PATH) IDENT('foo') FINISH START(PATH) T![::] IDENT('bar') FINISH |
42 | /// | /\ | 42 | /// | /\ |
43 | /// | | | 43 | /// | | |
44 | /// +------forward-parent------+ | 44 | /// +------forward-parent------+ |
diff --git a/crates/ra_parser/src/grammar.rs b/crates/ra_parser/src/grammar.rs index a538ec081..cf603eba1 100644 --- a/crates/ra_parser/src/grammar.rs +++ b/crates/ra_parser/src/grammar.rs | |||
@@ -59,7 +59,7 @@ pub(crate) fn macro_stmts(p: &mut Parser) { | |||
59 | let m = p.start(); | 59 | let m = p.start(); |
60 | 60 | ||
61 | while !p.at(EOF) { | 61 | while !p.at(EOF) { |
62 | if p.current() == SEMI { | 62 | if p.current() == T![;] { |
63 | p.bump(); | 63 | p.bump(); |
64 | continue; | 64 | continue; |
65 | } | 65 | } |
@@ -103,7 +103,7 @@ pub(crate) fn block(p: &mut Parser) { | |||
103 | pub(crate) fn meta_item(p: &mut Parser) { | 103 | pub(crate) fn meta_item(p: &mut Parser) { |
104 | fn is_delimiter(p: &mut Parser) -> bool { | 104 | fn is_delimiter(p: &mut Parser) -> bool { |
105 | match p.current() { | 105 | match p.current() { |
106 | L_CURLY | L_PAREN | L_BRACK => true, | 106 | T!['{'] | T!['('] | T!['['] => true, |
107 | _ => false, | 107 | _ => false, |
108 | } | 108 | } |
109 | } | 109 | } |
@@ -123,12 +123,12 @@ pub(crate) fn meta_item(p: &mut Parser) { | |||
123 | // https://doc.rust-lang.org/reference/paths.html#simple-paths | 123 | // https://doc.rust-lang.org/reference/paths.html#simple-paths |
124 | // The start of an meta must be a simple path | 124 | // The start of an meta must be a simple path |
125 | match p.current() { | 125 | match p.current() { |
126 | IDENT | COLONCOLON | SUPER_KW | SELF_KW | CRATE_KW => p.bump(), | 126 | IDENT | T![::] | T![super] | T![self] | T![crate] => p.bump(), |
127 | EQ => { | 127 | T![=] => { |
128 | p.bump(); | 128 | p.bump(); |
129 | match p.current() { | 129 | match p.current() { |
130 | c if c.is_literal() => p.bump(), | 130 | c if c.is_literal() => p.bump(), |
131 | TRUE_KW | FALSE_KW => p.bump(), | 131 | T![true] | T![false] => p.bump(), |
132 | _ => {} | 132 | _ => {} |
133 | } | 133 | } |
134 | break; | 134 | break; |
@@ -158,7 +158,7 @@ pub(crate) fn reparser( | |||
158 | MATCH_ARM_LIST => items::match_arm_list, | 158 | MATCH_ARM_LIST => items::match_arm_list, |
159 | USE_TREE_LIST => items::use_tree_list, | 159 | USE_TREE_LIST => items::use_tree_list, |
160 | EXTERN_ITEM_LIST => items::extern_item_list, | 160 | EXTERN_ITEM_LIST => items::extern_item_list, |
161 | TOKEN_TREE if first_child? == L_CURLY => items::token_tree, | 161 | TOKEN_TREE if first_child? == T!['{'] => items::token_tree, |
162 | ITEM_LIST => match parent? { | 162 | ITEM_LIST => match parent? { |
163 | IMPL_BLOCK => items::impl_item_list, | 163 | IMPL_BLOCK => items::impl_item_list, |
164 | TRAIT_DEF => items::trait_item_list, | 164 | TRAIT_DEF => items::trait_item_list, |
@@ -184,26 +184,26 @@ impl BlockLike { | |||
184 | 184 | ||
185 | pub(crate) fn opt_visibility(p: &mut Parser) -> bool { | 185 | pub(crate) fn opt_visibility(p: &mut Parser) -> bool { |
186 | match p.current() { | 186 | match p.current() { |
187 | PUB_KW => { | 187 | T![pub] => { |
188 | let m = p.start(); | 188 | let m = p.start(); |
189 | p.bump(); | 189 | p.bump(); |
190 | if p.at(L_PAREN) { | 190 | if p.at(T!['(']) { |
191 | match p.nth(1) { | 191 | match p.nth(1) { |
192 | // test crate_visibility | 192 | // test crate_visibility |
193 | // pub(crate) struct S; | 193 | // pub(crate) struct S; |
194 | // pub(self) struct S; | 194 | // pub(self) struct S; |
195 | // pub(self) struct S; | 195 | // pub(self) struct S; |
196 | // pub(self) struct S; | 196 | // pub(self) struct S; |
197 | CRATE_KW | SELF_KW | SUPER_KW => { | 197 | T![crate] | T![self] | T![super] => { |
198 | p.bump(); | 198 | p.bump(); |
199 | p.bump(); | 199 | p.bump(); |
200 | p.expect(R_PAREN); | 200 | p.expect(T![')']); |
201 | } | 201 | } |
202 | IN_KW => { | 202 | T![in] => { |
203 | p.bump(); | 203 | p.bump(); |
204 | p.bump(); | 204 | p.bump(); |
205 | paths::use_path(p); | 205 | paths::use_path(p); |
206 | p.expect(R_PAREN); | 206 | p.expect(T![')']); |
207 | } | 207 | } |
208 | _ => (), | 208 | _ => (), |
209 | } | 209 | } |
@@ -217,7 +217,7 @@ pub(crate) fn opt_visibility(p: &mut Parser) -> bool { | |||
217 | // | 217 | // |
218 | // test crate_keyword_path | 218 | // test crate_keyword_path |
219 | // fn foo() { crate::foo(); } | 219 | // fn foo() { crate::foo(); } |
220 | CRATE_KW if p.nth(1) != COLONCOLON => { | 220 | T![crate] if p.nth(1) != T![::] => { |
221 | let m = p.start(); | 221 | let m = p.start(); |
222 | p.bump(); | 222 | p.bump(); |
223 | m.complete(p, VISIBILITY); | 223 | m.complete(p, VISIBILITY); |
@@ -228,10 +228,10 @@ pub(crate) fn opt_visibility(p: &mut Parser) -> bool { | |||
228 | } | 228 | } |
229 | 229 | ||
230 | fn opt_alias(p: &mut Parser) { | 230 | fn opt_alias(p: &mut Parser) { |
231 | if p.at(AS_KW) { | 231 | if p.at(T![as]) { |
232 | let m = p.start(); | 232 | let m = p.start(); |
233 | p.bump(); | 233 | p.bump(); |
234 | if !p.eat(UNDERSCORE) { | 234 | if !p.eat(T![_]) { |
235 | name(p); | 235 | name(p); |
236 | } | 236 | } |
237 | m.complete(p, ALIAS); | 237 | m.complete(p, ALIAS); |
@@ -239,7 +239,7 @@ fn opt_alias(p: &mut Parser) { | |||
239 | } | 239 | } |
240 | 240 | ||
241 | fn abi(p: &mut Parser) { | 241 | fn abi(p: &mut Parser) { |
242 | assert!(p.at(EXTERN_KW)); | 242 | assert!(p.at(T![extern])); |
243 | let abi = p.start(); | 243 | let abi = p.start(); |
244 | p.bump(); | 244 | p.bump(); |
245 | match p.current() { | 245 | match p.current() { |
@@ -250,7 +250,7 @@ fn abi(p: &mut Parser) { | |||
250 | } | 250 | } |
251 | 251 | ||
252 | fn opt_fn_ret_type(p: &mut Parser) -> bool { | 252 | fn opt_fn_ret_type(p: &mut Parser) -> bool { |
253 | if p.at(THIN_ARROW) { | 253 | if p.at(T![->]) { |
254 | let m = p.start(); | 254 | let m = p.start(); |
255 | p.bump(); | 255 | p.bump(); |
256 | types::type_(p); | 256 | types::type_(p); |
@@ -280,21 +280,21 @@ fn name_ref(p: &mut Parser) { | |||
280 | let m = p.start(); | 280 | let m = p.start(); |
281 | p.bump(); | 281 | p.bump(); |
282 | m.complete(p, NAME_REF); | 282 | m.complete(p, NAME_REF); |
283 | } else if p.at(SELF_KW) { | 283 | } else if p.at(T![self]) { |
284 | let m = p.start(); | 284 | let m = p.start(); |
285 | p.bump(); | 285 | p.bump(); |
286 | m.complete(p, SELF_KW); | 286 | m.complete(p, T![self]); |
287 | } else { | 287 | } else { |
288 | p.err_and_bump("expected identifier"); | 288 | p.err_and_bump("expected identifier"); |
289 | } | 289 | } |
290 | } | 290 | } |
291 | 291 | ||
292 | fn error_block(p: &mut Parser, message: &str) { | 292 | fn error_block(p: &mut Parser, message: &str) { |
293 | assert!(p.at(L_CURLY)); | 293 | assert!(p.at(T!['{'])); |
294 | let m = p.start(); | 294 | let m = p.start(); |
295 | p.error(message); | 295 | p.error(message); |
296 | p.bump(); | 296 | p.bump(); |
297 | expressions::expr_block_contents(p); | 297 | expressions::expr_block_contents(p); |
298 | p.eat(R_CURLY); | 298 | p.eat(T!['}']); |
299 | m.complete(p, ERROR); | 299 | m.complete(p, ERROR); |
300 | } | 300 | } |
diff --git a/crates/ra_parser/src/grammar/attributes.rs b/crates/ra_parser/src/grammar/attributes.rs index cd30e8a45..20d58445f 100644 --- a/crates/ra_parser/src/grammar/attributes.rs +++ b/crates/ra_parser/src/grammar/attributes.rs | |||
@@ -1,28 +1,28 @@ | |||
1 | use super::*; | 1 | use super::*; |
2 | 2 | ||
3 | pub(super) fn inner_attributes(p: &mut Parser) { | 3 | pub(super) fn inner_attributes(p: &mut Parser) { |
4 | while p.current() == POUND && p.nth(1) == EXCL { | 4 | while p.current() == T![#] && p.nth(1) == T![!] { |
5 | attribute(p, true) | 5 | attribute(p, true) |
6 | } | 6 | } |
7 | } | 7 | } |
8 | 8 | ||
9 | pub(super) fn outer_attributes(p: &mut Parser) { | 9 | pub(super) fn outer_attributes(p: &mut Parser) { |
10 | while p.at(POUND) { | 10 | while p.at(T![#]) { |
11 | attribute(p, false) | 11 | attribute(p, false) |
12 | } | 12 | } |
13 | } | 13 | } |
14 | 14 | ||
15 | fn attribute(p: &mut Parser, inner: bool) { | 15 | fn attribute(p: &mut Parser, inner: bool) { |
16 | let attr = p.start(); | 16 | let attr = p.start(); |
17 | assert!(p.at(POUND)); | 17 | assert!(p.at(T![#])); |
18 | p.bump(); | 18 | p.bump(); |
19 | 19 | ||
20 | if inner { | 20 | if inner { |
21 | assert!(p.at(EXCL)); | 21 | assert!(p.at(T![!])); |
22 | p.bump(); | 22 | p.bump(); |
23 | } | 23 | } |
24 | 24 | ||
25 | if p.at(L_BRACK) { | 25 | if p.at(T!['[']) { |
26 | items::token_tree(p); | 26 | items::token_tree(p); |
27 | } else { | 27 | } else { |
28 | p.error("expected `[`"); | 28 | p.error("expected `[`"); |
diff --git a/crates/ra_parser/src/grammar/expressions.rs b/crates/ra_parser/src/grammar/expressions.rs index 9fe529f53..bb6c78b5f 100644 --- a/crates/ra_parser/src/grammar/expressions.rs +++ b/crates/ra_parser/src/grammar/expressions.rs | |||
@@ -36,14 +36,14 @@ fn expr_no_struct(p: &mut Parser) { | |||
36 | // fn c() { 1; 2; } | 36 | // fn c() { 1; 2; } |
37 | // fn d() { 1; 2 } | 37 | // fn d() { 1; 2 } |
38 | pub(crate) fn block(p: &mut Parser) { | 38 | pub(crate) fn block(p: &mut Parser) { |
39 | if !p.at(L_CURLY) { | 39 | if !p.at(T!['{']) { |
40 | p.error("expected a block"); | 40 | p.error("expected a block"); |
41 | return; | 41 | return; |
42 | } | 42 | } |
43 | let m = p.start(); | 43 | let m = p.start(); |
44 | p.bump(); | 44 | p.bump(); |
45 | expr_block_contents(p); | 45 | expr_block_contents(p); |
46 | p.expect(R_CURLY); | 46 | p.expect(T!['}']); |
47 | m.complete(p, BLOCK); | 47 | m.complete(p, BLOCK); |
48 | } | 48 | } |
49 | 49 | ||
@@ -65,10 +65,10 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) { | |||
65 | // #[C] #[D] {} | 65 | // #[C] #[D] {} |
66 | // #[D] return (); | 66 | // #[D] return (); |
67 | // } | 67 | // } |
68 | let has_attrs = p.at(POUND); | 68 | let has_attrs = p.at(T![#]); |
69 | attributes::outer_attributes(p); | 69 | attributes::outer_attributes(p); |
70 | 70 | ||
71 | if p.at(LET_KW) { | 71 | if p.at(T![let]) { |
72 | let_stmt(p, m, with_semi); | 72 | let_stmt(p, m, with_semi); |
73 | return; | 73 | return; |
74 | } | 74 | } |
@@ -90,7 +90,7 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) { | |||
90 | p.error(format!("attributes are not allowed on {:?}", kind)); | 90 | p.error(format!("attributes are not allowed on {:?}", kind)); |
91 | } | 91 | } |
92 | 92 | ||
93 | if p.at(R_CURLY) { | 93 | if p.at(T!['}']) { |
94 | // test attr_on_last_expr_in_block | 94 | // test attr_on_last_expr_in_block |
95 | // fn foo() { | 95 | // fn foo() { |
96 | // { #[A] bar!()? } | 96 | // { #[A] bar!()? } |
@@ -121,15 +121,15 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) { | |||
121 | match with_semi { | 121 | match with_semi { |
122 | StmtWithSemi::Yes => { | 122 | StmtWithSemi::Yes => { |
123 | if blocklike.is_block() { | 123 | if blocklike.is_block() { |
124 | p.eat(SEMI); | 124 | p.eat(T![;]); |
125 | } else { | 125 | } else { |
126 | p.expect(SEMI); | 126 | p.expect(T![;]); |
127 | } | 127 | } |
128 | } | 128 | } |
129 | StmtWithSemi::No => {} | 129 | StmtWithSemi::No => {} |
130 | StmtWithSemi::Optional => { | 130 | StmtWithSemi::Optional => { |
131 | if p.at(SEMI) { | 131 | if p.at(T![;]) { |
132 | p.eat(SEMI); | 132 | p.eat(T![;]); |
133 | } | 133 | } |
134 | } | 134 | } |
135 | } | 135 | } |
@@ -145,24 +145,24 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) { | |||
145 | // let d: i32 = 92; | 145 | // let d: i32 = 92; |
146 | // } | 146 | // } |
147 | fn let_stmt(p: &mut Parser, m: Marker, with_semi: StmtWithSemi) { | 147 | fn let_stmt(p: &mut Parser, m: Marker, with_semi: StmtWithSemi) { |
148 | assert!(p.at(LET_KW)); | 148 | assert!(p.at(T![let])); |
149 | p.bump(); | 149 | p.bump(); |
150 | patterns::pattern(p); | 150 | patterns::pattern(p); |
151 | if p.at(COLON) { | 151 | if p.at(T![:]) { |
152 | types::ascription(p); | 152 | types::ascription(p); |
153 | } | 153 | } |
154 | if p.eat(EQ) { | 154 | if p.eat(T![=]) { |
155 | expressions::expr(p); | 155 | expressions::expr(p); |
156 | } | 156 | } |
157 | 157 | ||
158 | match with_semi { | 158 | match with_semi { |
159 | StmtWithSemi::Yes => { | 159 | StmtWithSemi::Yes => { |
160 | p.expect(SEMI); | 160 | p.expect(T![;]); |
161 | } | 161 | } |
162 | StmtWithSemi::No => {} | 162 | StmtWithSemi::No => {} |
163 | StmtWithSemi::Optional => { | 163 | StmtWithSemi::Optional => { |
164 | if p.at(SEMI) { | 164 | if p.at(T![;]) { |
165 | p.eat(SEMI); | 165 | p.eat(T![;]); |
166 | } | 166 | } |
167 | } | 167 | } |
168 | } | 168 | } |
@@ -174,12 +174,12 @@ pub(crate) fn expr_block_contents(p: &mut Parser) { | |||
174 | // This is checked by a validator | 174 | // This is checked by a validator |
175 | attributes::inner_attributes(p); | 175 | attributes::inner_attributes(p); |
176 | 176 | ||
177 | while !p.at(EOF) && !p.at(R_CURLY) { | 177 | while !p.at(EOF) && !p.at(T!['}']) { |
178 | // test nocontentexpr | 178 | // test nocontentexpr |
179 | // fn foo(){ | 179 | // fn foo(){ |
180 | // ;;;some_expr();;;;{;;;};;;;Ok(()) | 180 | // ;;;some_expr();;;;{;;;};;;;Ok(()) |
181 | // } | 181 | // } |
182 | if p.current() == SEMI { | 182 | if p.current() == T![;] { |
183 | p.bump(); | 183 | p.bump(); |
184 | continue; | 184 | continue; |
185 | } | 185 | } |
@@ -202,41 +202,41 @@ enum Op { | |||
202 | fn current_op(p: &Parser) -> (u8, Op) { | 202 | fn current_op(p: &Parser) -> (u8, Op) { |
203 | if let Some(t) = p.current3() { | 203 | if let Some(t) = p.current3() { |
204 | match t { | 204 | match t { |
205 | (L_ANGLE, L_ANGLE, EQ) => return (1, Op::Composite(SHLEQ, 3)), | 205 | (T![<], T![<], T![=]) => return (1, Op::Composite(T![<<=], 3)), |
206 | (R_ANGLE, R_ANGLE, EQ) => return (1, Op::Composite(SHREQ, 3)), | 206 | (T![>], T![>], T![=]) => return (1, Op::Composite(T![>>=], 3)), |
207 | _ => (), | 207 | _ => (), |
208 | } | 208 | } |
209 | } | 209 | } |
210 | 210 | ||
211 | if let Some(t) = p.current2() { | 211 | if let Some(t) = p.current2() { |
212 | match t { | 212 | match t { |
213 | (PLUS, EQ) => return (1, Op::Composite(PLUSEQ, 2)), | 213 | (T![+], T![=]) => return (1, Op::Composite(T![+=], 2)), |
214 | (MINUS, EQ) => return (1, Op::Composite(MINUSEQ, 2)), | 214 | (T![-], T![=]) => return (1, Op::Composite(T![-=], 2)), |
215 | (STAR, EQ) => return (1, Op::Composite(STAREQ, 2)), | 215 | (T![*], T![=]) => return (1, Op::Composite(T![*=], 2)), |
216 | (PERCENT, EQ) => return (1, Op::Composite(PERCENTEQ, 2)), | 216 | (T![%], T![=]) => return (1, Op::Composite(T![%=], 2)), |
217 | (SLASH, EQ) => return (1, Op::Composite(SLASHEQ, 2)), | 217 | (T![/], T![=]) => return (1, Op::Composite(T![/=], 2)), |
218 | (PIPE, EQ) => return (1, Op::Composite(PIPEEQ, 2)), | 218 | (T![|], T![=]) => return (1, Op::Composite(T![|=], 2)), |
219 | (AMP, EQ) => return (1, Op::Composite(AMPEQ, 2)), | 219 | (T![&], T![=]) => return (1, Op::Composite(T![&=], 2)), |
220 | (CARET, EQ) => return (1, Op::Composite(CARETEQ, 2)), | 220 | (T![^], T![=]) => return (1, Op::Composite(T![^=], 2)), |
221 | (PIPE, PIPE) => return (3, Op::Composite(PIPEPIPE, 2)), | 221 | (T![|], T![|]) => return (3, Op::Composite(T![||], 2)), |
222 | (AMP, AMP) => return (4, Op::Composite(AMPAMP, 2)), | 222 | (T![&], T![&]) => return (4, Op::Composite(T![&&], 2)), |
223 | (L_ANGLE, EQ) => return (5, Op::Composite(LTEQ, 2)), | 223 | (T![<], T![=]) => return (5, Op::Composite(T![<=], 2)), |
224 | (R_ANGLE, EQ) => return (5, Op::Composite(GTEQ, 2)), | 224 | (T![>], T![=]) => return (5, Op::Composite(T![>=], 2)), |
225 | (L_ANGLE, L_ANGLE) => return (9, Op::Composite(SHL, 2)), | 225 | (T![<], T![<]) => return (9, Op::Composite(T![<<], 2)), |
226 | (R_ANGLE, R_ANGLE) => return (9, Op::Composite(SHR, 2)), | 226 | (T![>], T![>]) => return (9, Op::Composite(T![>>], 2)), |
227 | _ => (), | 227 | _ => (), |
228 | } | 228 | } |
229 | } | 229 | } |
230 | 230 | ||
231 | let bp = match p.current() { | 231 | let bp = match p.current() { |
232 | EQ => 1, | 232 | T![=] => 1, |
233 | DOTDOT | DOTDOTEQ => 2, | 233 | T![..] | T![..=] => 2, |
234 | EQEQ | NEQ | L_ANGLE | R_ANGLE => 5, | 234 | T![==] | T![!=] | T![<] | T![>] => 5, |
235 | PIPE => 6, | 235 | T![|] => 6, |
236 | CARET => 7, | 236 | T![^] => 7, |
237 | AMP => 8, | 237 | T![&] => 8, |
238 | MINUS | PLUS => 10, | 238 | T![-] | T![+] => 10, |
239 | STAR | SLASH | PERCENT => 11, | 239 | T![*] | T![/] | T![%] => 11, |
240 | _ => 0, | 240 | _ => 0, |
241 | }; | 241 | }; |
242 | (bp, Op::Simple) | 242 | (bp, Op::Simple) |
@@ -284,7 +284,7 @@ fn expr_bp( | |||
284 | newly_dollar_open = false; | 284 | newly_dollar_open = false; |
285 | } | 285 | } |
286 | 286 | ||
287 | let is_range = p.current() == DOTDOT || p.current() == DOTDOTEQ; | 287 | let is_range = p.current() == T![..] || p.current() == T![..=]; |
288 | let (op_bp, op) = current_op(p); | 288 | let (op_bp, op) = current_op(p); |
289 | if op_bp < bp { | 289 | if op_bp < bp { |
290 | break; | 290 | break; |
@@ -318,10 +318,10 @@ fn lhs( | |||
318 | // let _ = &1; | 318 | // let _ = &1; |
319 | // let _ = &mut &f(); | 319 | // let _ = &mut &f(); |
320 | // } | 320 | // } |
321 | AMP => { | 321 | T![&] => { |
322 | m = p.start(); | 322 | m = p.start(); |
323 | p.bump(); | 323 | p.bump(); |
324 | p.eat(MUT_KW); | 324 | p.eat(T![mut]); |
325 | REF_EXPR | 325 | REF_EXPR |
326 | } | 326 | } |
327 | // test unary_expr | 327 | // test unary_expr |
@@ -330,14 +330,14 @@ fn lhs( | |||
330 | // !!true; | 330 | // !!true; |
331 | // --1; | 331 | // --1; |
332 | // } | 332 | // } |
333 | STAR | EXCL | MINUS => { | 333 | T![*] | T![!] | T![-] => { |
334 | m = p.start(); | 334 | m = p.start(); |
335 | p.bump(); | 335 | p.bump(); |
336 | PREFIX_EXPR | 336 | PREFIX_EXPR |
337 | } | 337 | } |
338 | // test full_range_expr | 338 | // test full_range_expr |
339 | // fn foo() { xs[..]; } | 339 | // fn foo() { xs[..]; } |
340 | DOTDOT | DOTDOTEQ => { | 340 | T![..] | T![..=] => { |
341 | m = p.start(); | 341 | m = p.start(); |
342 | p.bump(); | 342 | p.bump(); |
343 | if p.at_ts(EXPR_FIRST) { | 343 | if p.at_ts(EXPR_FIRST) { |
@@ -375,21 +375,21 @@ fn postfix_expr( | |||
375 | // [] => {} | 375 | // [] => {} |
376 | // } | 376 | // } |
377 | // } | 377 | // } |
378 | L_PAREN if allow_calls => call_expr(p, lhs), | 378 | T!['('] if allow_calls => call_expr(p, lhs), |
379 | L_BRACK if allow_calls => index_expr(p, lhs), | 379 | T!['['] if allow_calls => index_expr(p, lhs), |
380 | DOT if p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON) => { | 380 | T![.] if p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth(2) == T![::]) => { |
381 | method_call_expr(p, lhs) | 381 | method_call_expr(p, lhs) |
382 | } | 382 | } |
383 | DOT => field_expr(p, lhs), | 383 | T![.] => field_expr(p, lhs), |
384 | // test postfix_range | 384 | // test postfix_range |
385 | // fn foo() { let x = 1..; } | 385 | // fn foo() { let x = 1..; } |
386 | DOTDOT | DOTDOTEQ if !EXPR_FIRST.contains(p.nth(1)) => { | 386 | T![..] | T![..=] if !EXPR_FIRST.contains(p.nth(1)) => { |
387 | let m = lhs.precede(p); | 387 | let m = lhs.precede(p); |
388 | p.bump(); | 388 | p.bump(); |
389 | m.complete(p, RANGE_EXPR) | 389 | m.complete(p, RANGE_EXPR) |
390 | } | 390 | } |
391 | QUESTION => try_expr(p, lhs), | 391 | T![?] => try_expr(p, lhs), |
392 | AS_KW => cast_expr(p, lhs), | 392 | T![as] => cast_expr(p, lhs), |
393 | _ => break, | 393 | _ => break, |
394 | }; | 394 | }; |
395 | allow_calls = true | 395 | allow_calls = true |
@@ -405,7 +405,7 @@ fn postfix_expr( | |||
405 | // f(<Foo as Trait>::func()); | 405 | // f(<Foo as Trait>::func()); |
406 | // } | 406 | // } |
407 | fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | 407 | fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { |
408 | assert!(p.at(L_PAREN)); | 408 | assert!(p.at(T!['('])); |
409 | let m = lhs.precede(p); | 409 | let m = lhs.precede(p); |
410 | arg_list(p); | 410 | arg_list(p); |
411 | m.complete(p, CALL_EXPR) | 411 | m.complete(p, CALL_EXPR) |
@@ -416,11 +416,11 @@ fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | |||
416 | // x[1][2]; | 416 | // x[1][2]; |
417 | // } | 417 | // } |
418 | fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | 418 | fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { |
419 | assert!(p.at(L_BRACK)); | 419 | assert!(p.at(T!['['])); |
420 | let m = lhs.precede(p); | 420 | let m = lhs.precede(p); |
421 | p.bump(); | 421 | p.bump(); |
422 | expr(p); | 422 | expr(p); |
423 | p.expect(R_BRACK); | 423 | p.expect(T![']']); |
424 | m.complete(p, INDEX_EXPR) | 424 | m.complete(p, INDEX_EXPR) |
425 | } | 425 | } |
426 | 426 | ||
@@ -430,12 +430,12 @@ fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | |||
430 | // y.bar::<T>(1, 2,); | 430 | // y.bar::<T>(1, 2,); |
431 | // } | 431 | // } |
432 | fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | 432 | fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { |
433 | assert!(p.at(DOT) && p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON)); | 433 | assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth(2) == T![::])); |
434 | let m = lhs.precede(p); | 434 | let m = lhs.precede(p); |
435 | p.bump(); | 435 | p.bump(); |
436 | name_ref(p); | 436 | name_ref(p); |
437 | type_args::opt_type_arg_list(p, true); | 437 | type_args::opt_type_arg_list(p, true); |
438 | if p.at(L_PAREN) { | 438 | if p.at(T!['(']) { |
439 | arg_list(p); | 439 | arg_list(p); |
440 | } | 440 | } |
441 | m.complete(p, METHOD_CALL_EXPR) | 441 | m.complete(p, METHOD_CALL_EXPR) |
@@ -455,7 +455,7 @@ fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | |||
455 | // x.0x01; | 455 | // x.0x01; |
456 | // } | 456 | // } |
457 | fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | 457 | fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { |
458 | assert!(p.at(DOT)); | 458 | assert!(p.at(T![.])); |
459 | let m = lhs.precede(p); | 459 | let m = lhs.precede(p); |
460 | p.bump(); | 460 | p.bump(); |
461 | if p.at(IDENT) { | 461 | if p.at(IDENT) { |
@@ -463,7 +463,7 @@ fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | |||
463 | } else if p.at(INT_NUMBER) { | 463 | } else if p.at(INT_NUMBER) { |
464 | p.bump(); | 464 | p.bump(); |
465 | } else if p.at(FLOAT_NUMBER) { | 465 | } else if p.at(FLOAT_NUMBER) { |
466 | // FIXME: How to recover and instead parse INT + DOT? | 466 | // FIXME: How to recover and instead parse INT + T![.]? |
467 | p.bump(); | 467 | p.bump(); |
468 | } else { | 468 | } else { |
469 | p.error("expected field name or number") | 469 | p.error("expected field name or number") |
@@ -476,7 +476,7 @@ fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | |||
476 | // x?; | 476 | // x?; |
477 | // } | 477 | // } |
478 | fn try_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | 478 | fn try_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { |
479 | assert!(p.at(QUESTION)); | 479 | assert!(p.at(T![?])); |
480 | let m = lhs.precede(p); | 480 | let m = lhs.precede(p); |
481 | p.bump(); | 481 | p.bump(); |
482 | m.complete(p, TRY_EXPR) | 482 | m.complete(p, TRY_EXPR) |
@@ -490,7 +490,7 @@ fn try_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | |||
490 | // 0x36 as u8 <= 0x37; | 490 | // 0x36 as u8 <= 0x37; |
491 | // } | 491 | // } |
492 | fn cast_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | 492 | fn cast_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { |
493 | assert!(p.at(AS_KW)); | 493 | assert!(p.at(T![as])); |
494 | let m = lhs.precede(p); | 494 | let m = lhs.precede(p); |
495 | p.bump(); | 495 | p.bump(); |
496 | // Use type_no_bounds(), because cast expressions are not | 496 | // Use type_no_bounds(), because cast expressions are not |
@@ -500,20 +500,20 @@ fn cast_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | |||
500 | } | 500 | } |
501 | 501 | ||
502 | fn arg_list(p: &mut Parser) { | 502 | fn arg_list(p: &mut Parser) { |
503 | assert!(p.at(L_PAREN)); | 503 | assert!(p.at(T!['('])); |
504 | let m = p.start(); | 504 | let m = p.start(); |
505 | p.bump(); | 505 | p.bump(); |
506 | while !p.at(R_PAREN) && !p.at(EOF) { | 506 | while !p.at(T![')']) && !p.at(EOF) { |
507 | if !p.at_ts(EXPR_FIRST) { | 507 | if !p.at_ts(EXPR_FIRST) { |
508 | p.error("expected expression"); | 508 | p.error("expected expression"); |
509 | break; | 509 | break; |
510 | } | 510 | } |
511 | expr(p); | 511 | expr(p); |
512 | if !p.at(R_PAREN) && !p.expect(COMMA) { | 512 | if !p.at(T![')']) && !p.expect(T![,]) { |
513 | break; | 513 | break; |
514 | } | 514 | } |
515 | } | 515 | } |
516 | p.eat(R_PAREN); | 516 | p.eat(T![')']); |
517 | m.complete(p, ARG_LIST); | 517 | m.complete(p, ARG_LIST); |
518 | } | 518 | } |
519 | 519 | ||
@@ -525,15 +525,15 @@ fn arg_list(p: &mut Parser) { | |||
525 | // let _ = format!(); | 525 | // let _ = format!(); |
526 | // } | 526 | // } |
527 | fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) { | 527 | fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) { |
528 | assert!(paths::is_path_start(p) || p.at(L_ANGLE)); | 528 | assert!(paths::is_path_start(p) || p.at(T![<])); |
529 | let m = p.start(); | 529 | let m = p.start(); |
530 | paths::expr_path(p); | 530 | paths::expr_path(p); |
531 | match p.current() { | 531 | match p.current() { |
532 | L_CURLY if !r.forbid_structs => { | 532 | T!['{'] if !r.forbid_structs => { |
533 | named_field_list(p); | 533 | named_field_list(p); |
534 | (m.complete(p, STRUCT_LIT), BlockLike::NotBlock) | 534 | (m.complete(p, STRUCT_LIT), BlockLike::NotBlock) |
535 | } | 535 | } |
536 | EXCL => { | 536 | T![!] => { |
537 | let block_like = items::macro_call_after_excl(p); | 537 | let block_like = items::macro_call_after_excl(p); |
538 | (m.complete(p, MACRO_CALL), block_like) | 538 | (m.complete(p, MACRO_CALL), block_like) |
539 | } | 539 | } |
@@ -548,35 +548,35 @@ fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) { | |||
548 | // S { x, y: 32, ..Default::default() }; | 548 | // S { x, y: 32, ..Default::default() }; |
549 | // } | 549 | // } |
550 | pub(crate) fn named_field_list(p: &mut Parser) { | 550 | pub(crate) fn named_field_list(p: &mut Parser) { |
551 | assert!(p.at(L_CURLY)); | 551 | assert!(p.at(T!['{'])); |
552 | let m = p.start(); | 552 | let m = p.start(); |
553 | p.bump(); | 553 | p.bump(); |
554 | while !p.at(EOF) && !p.at(R_CURLY) { | 554 | while !p.at(EOF) && !p.at(T!['}']) { |
555 | match p.current() { | 555 | match p.current() { |
556 | // test struct_literal_field_with_attr | 556 | // test struct_literal_field_with_attr |
557 | // fn main() { | 557 | // fn main() { |
558 | // S { #[cfg(test)] field: 1 } | 558 | // S { #[cfg(test)] field: 1 } |
559 | // } | 559 | // } |
560 | IDENT | POUND => { | 560 | IDENT | T![#] => { |
561 | let m = p.start(); | 561 | let m = p.start(); |
562 | attributes::outer_attributes(p); | 562 | attributes::outer_attributes(p); |
563 | name_ref(p); | 563 | name_ref(p); |
564 | if p.eat(COLON) { | 564 | if p.eat(T![:]) { |
565 | expr(p); | 565 | expr(p); |
566 | } | 566 | } |
567 | m.complete(p, NAMED_FIELD); | 567 | m.complete(p, NAMED_FIELD); |
568 | } | 568 | } |
569 | DOTDOT => { | 569 | T![..] => { |
570 | p.bump(); | 570 | p.bump(); |
571 | expr(p); | 571 | expr(p); |
572 | } | 572 | } |
573 | L_CURLY => error_block(p, "expected a field"), | 573 | T!['{'] => error_block(p, "expected a field"), |
574 | _ => p.err_and_bump("expected identifier"), | 574 | _ => p.err_and_bump("expected identifier"), |
575 | } | 575 | } |
576 | if !p.at(R_CURLY) { | 576 | if !p.at(T!['}']) { |
577 | p.expect(COMMA); | 577 | p.expect(T![,]); |
578 | } | 578 | } |
579 | } | 579 | } |
580 | p.expect(R_CURLY); | 580 | p.expect(T!['}']); |
581 | m.complete(p, NAMED_FIELD_LIST); | 581 | m.complete(p, NAMED_FIELD_LIST); |
582 | } | 582 | } |
diff --git a/crates/ra_parser/src/grammar/expressions/atom.rs b/crates/ra_parser/src/grammar/expressions/atom.rs index 8dc7e44a9..8b1a1de49 100644 --- a/crates/ra_parser/src/grammar/expressions/atom.rs +++ b/crates/ra_parser/src/grammar/expressions/atom.rs | |||
@@ -60,29 +60,29 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar | |||
60 | if let Some(m) = literal(p) { | 60 | if let Some(m) = literal(p) { |
61 | return Some((m, BlockLike::NotBlock)); | 61 | return Some((m, BlockLike::NotBlock)); |
62 | } | 62 | } |
63 | if paths::is_path_start(p) || p.at(L_ANGLE) { | 63 | if paths::is_path_start(p) || p.at(T![<]) { |
64 | return Some(path_expr(p, r)); | 64 | return Some(path_expr(p, r)); |
65 | } | 65 | } |
66 | let la = p.nth(1); | 66 | let la = p.nth(1); |
67 | let done = match p.current() { | 67 | let done = match p.current() { |
68 | L_PAREN => tuple_expr(p), | 68 | T!['('] => tuple_expr(p), |
69 | L_BRACK => array_expr(p), | 69 | T!['['] => array_expr(p), |
70 | PIPE => lambda_expr(p), | 70 | T![|] => lambda_expr(p), |
71 | MOVE_KW if la == PIPE => lambda_expr(p), | 71 | T![move] if la == T![|] => lambda_expr(p), |
72 | ASYNC_KW if la == PIPE || (la == MOVE_KW && p.nth(2) == PIPE) => lambda_expr(p), | 72 | T![async] if la == T![|] || (la == T![move] && p.nth(2) == T![|]) => lambda_expr(p), |
73 | IF_KW => if_expr(p), | 73 | T![if] => if_expr(p), |
74 | 74 | ||
75 | LOOP_KW => loop_expr(p, None), | 75 | T![loop] => loop_expr(p, None), |
76 | FOR_KW => for_expr(p, None), | 76 | T![for] => for_expr(p, None), |
77 | WHILE_KW => while_expr(p, None), | 77 | T![while] => while_expr(p, None), |
78 | LIFETIME if la == COLON => { | 78 | LIFETIME if la == T![:] => { |
79 | let m = p.start(); | 79 | let m = p.start(); |
80 | label(p); | 80 | label(p); |
81 | match p.current() { | 81 | match p.current() { |
82 | LOOP_KW => loop_expr(p, Some(m)), | 82 | T![loop] => loop_expr(p, Some(m)), |
83 | FOR_KW => for_expr(p, Some(m)), | 83 | T![for] => for_expr(p, Some(m)), |
84 | WHILE_KW => while_expr(p, Some(m)), | 84 | T![while] => while_expr(p, Some(m)), |
85 | L_CURLY => block_expr(p, Some(m)), | 85 | T!['{'] => block_expr(p, Some(m)), |
86 | _ => { | 86 | _ => { |
87 | // test_err misplaced_label_err | 87 | // test_err misplaced_label_err |
88 | // fn main() { | 88 | // fn main() { |
@@ -94,22 +94,22 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar | |||
94 | } | 94 | } |
95 | } | 95 | } |
96 | } | 96 | } |
97 | ASYNC_KW if la == L_CURLY || (la == MOVE_KW && p.nth(2) == L_CURLY) => { | 97 | T![async] if la == T!['{'] || (la == T![move] && p.nth(2) == T!['{']) => { |
98 | let m = p.start(); | 98 | let m = p.start(); |
99 | p.bump(); | 99 | p.bump(); |
100 | p.eat(MOVE_KW); | 100 | p.eat(T![move]); |
101 | block_expr(p, Some(m)) | 101 | block_expr(p, Some(m)) |
102 | } | 102 | } |
103 | MATCH_KW => match_expr(p), | 103 | T![match] => match_expr(p), |
104 | UNSAFE_KW if la == L_CURLY => { | 104 | T![unsafe] if la == T!['{'] => { |
105 | let m = p.start(); | 105 | let m = p.start(); |
106 | p.bump(); | 106 | p.bump(); |
107 | block_expr(p, Some(m)) | 107 | block_expr(p, Some(m)) |
108 | } | 108 | } |
109 | L_CURLY => block_expr(p, None), | 109 | T!['{'] => block_expr(p, None), |
110 | RETURN_KW => return_expr(p), | 110 | T![return] => return_expr(p), |
111 | CONTINUE_KW => continue_expr(p), | 111 | T![continue] => continue_expr(p), |
112 | BREAK_KW => break_expr(p, r), | 112 | T![break] => break_expr(p, r), |
113 | _ => { | 113 | _ => { |
114 | p.err_recover("expected expression", EXPR_RECOVERY_SET); | 114 | p.err_recover("expected expression", EXPR_RECOVERY_SET); |
115 | return None; | 115 | return None; |
@@ -129,25 +129,25 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar | |||
129 | // (1,); | 129 | // (1,); |
130 | // } | 130 | // } |
131 | fn tuple_expr(p: &mut Parser) -> CompletedMarker { | 131 | fn tuple_expr(p: &mut Parser) -> CompletedMarker { |
132 | assert!(p.at(L_PAREN)); | 132 | assert!(p.at(T!['('])); |
133 | let m = p.start(); | 133 | let m = p.start(); |
134 | p.expect(L_PAREN); | 134 | p.expect(T!['(']); |
135 | 135 | ||
136 | let mut saw_comma = false; | 136 | let mut saw_comma = false; |
137 | let mut saw_expr = false; | 137 | let mut saw_expr = false; |
138 | while !p.at(EOF) && !p.at(R_PAREN) { | 138 | while !p.at(EOF) && !p.at(T![')']) { |
139 | saw_expr = true; | 139 | saw_expr = true; |
140 | if !p.at_ts(EXPR_FIRST) { | 140 | if !p.at_ts(EXPR_FIRST) { |
141 | p.error("expected expression"); | 141 | p.error("expected expression"); |
142 | break; | 142 | break; |
143 | } | 143 | } |
144 | expr(p); | 144 | expr(p); |
145 | if !p.at(R_PAREN) { | 145 | if !p.at(T![')']) { |
146 | saw_comma = true; | 146 | saw_comma = true; |
147 | p.expect(COMMA); | 147 | p.expect(T![,]); |
148 | } | 148 | } |
149 | } | 149 | } |
150 | p.expect(R_PAREN); | 150 | p.expect(T![')']); |
151 | m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR }) | 151 | m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR }) |
152 | } | 152 | } |
153 | 153 | ||
@@ -159,21 +159,21 @@ fn tuple_expr(p: &mut Parser) -> CompletedMarker { | |||
159 | // [1; 2]; | 159 | // [1; 2]; |
160 | // } | 160 | // } |
161 | fn array_expr(p: &mut Parser) -> CompletedMarker { | 161 | fn array_expr(p: &mut Parser) -> CompletedMarker { |
162 | assert!(p.at(L_BRACK)); | 162 | assert!(p.at(T!['['])); |
163 | let m = p.start(); | 163 | let m = p.start(); |
164 | p.bump(); | 164 | p.bump(); |
165 | if p.eat(R_BRACK) { | 165 | if p.eat(T![']']) { |
166 | return m.complete(p, ARRAY_EXPR); | 166 | return m.complete(p, ARRAY_EXPR); |
167 | } | 167 | } |
168 | expr(p); | 168 | expr(p); |
169 | if p.eat(SEMI) { | 169 | if p.eat(T![;]) { |
170 | expr(p); | 170 | expr(p); |
171 | p.expect(R_BRACK); | 171 | p.expect(T![']']); |
172 | return m.complete(p, ARRAY_EXPR); | 172 | return m.complete(p, ARRAY_EXPR); |
173 | } | 173 | } |
174 | while !p.at(EOF) && !p.at(R_BRACK) { | 174 | while !p.at(EOF) && !p.at(T![']']) { |
175 | p.expect(COMMA); | 175 | p.expect(T![,]); |
176 | if p.at(R_BRACK) { | 176 | if p.at(T![']']) { |
177 | break; | 177 | break; |
178 | } | 178 | } |
179 | if !p.at_ts(EXPR_FIRST) { | 179 | if !p.at_ts(EXPR_FIRST) { |
@@ -182,7 +182,7 @@ fn array_expr(p: &mut Parser) -> CompletedMarker { | |||
182 | } | 182 | } |
183 | expr(p); | 183 | expr(p); |
184 | } | 184 | } |
185 | p.expect(R_BRACK); | 185 | p.expect(T![']']); |
186 | m.complete(p, ARRAY_EXPR) | 186 | m.complete(p, ARRAY_EXPR) |
187 | } | 187 | } |
188 | 188 | ||
@@ -198,17 +198,17 @@ fn array_expr(p: &mut Parser) -> CompletedMarker { | |||
198 | // } | 198 | // } |
199 | fn lambda_expr(p: &mut Parser) -> CompletedMarker { | 199 | fn lambda_expr(p: &mut Parser) -> CompletedMarker { |
200 | assert!( | 200 | assert!( |
201 | p.at(PIPE) | 201 | p.at(T![|]) |
202 | || (p.at(MOVE_KW) && p.nth(1) == PIPE) | 202 | || (p.at(T![move]) && p.nth(1) == T![|]) |
203 | || (p.at(ASYNC_KW) && p.nth(1) == PIPE) | 203 | || (p.at(T![async]) && p.nth(1) == T![|]) |
204 | || (p.at(ASYNC_KW) && p.nth(1) == MOVE_KW && p.nth(2) == PIPE) | 204 | || (p.at(T![async]) && p.nth(1) == T![move] && p.nth(2) == T![|]) |
205 | ); | 205 | ); |
206 | let m = p.start(); | 206 | let m = p.start(); |
207 | p.eat(ASYNC_KW); | 207 | p.eat(T![async]); |
208 | p.eat(MOVE_KW); | 208 | p.eat(T![move]); |
209 | params::param_list_opt_types(p); | 209 | params::param_list_opt_types(p); |
210 | if opt_fn_ret_type(p) { | 210 | if opt_fn_ret_type(p) { |
211 | if !p.at(L_CURLY) { | 211 | if !p.at(T!['{']) { |
212 | p.error("expected `{`"); | 212 | p.error("expected `{`"); |
213 | } | 213 | } |
214 | } | 214 | } |
@@ -224,14 +224,14 @@ fn lambda_expr(p: &mut Parser) -> CompletedMarker { | |||
224 | // if S {}; | 224 | // if S {}; |
225 | // } | 225 | // } |
226 | fn if_expr(p: &mut Parser) -> CompletedMarker { | 226 | fn if_expr(p: &mut Parser) -> CompletedMarker { |
227 | assert!(p.at(IF_KW)); | 227 | assert!(p.at(T![if])); |
228 | let m = p.start(); | 228 | let m = p.start(); |
229 | p.bump(); | 229 | p.bump(); |
230 | cond(p); | 230 | cond(p); |
231 | block(p); | 231 | block(p); |
232 | if p.at(ELSE_KW) { | 232 | if p.at(T![else]) { |
233 | p.bump(); | 233 | p.bump(); |
234 | if p.at(IF_KW) { | 234 | if p.at(T![if]) { |
235 | if_expr(p); | 235 | if_expr(p); |
236 | } else { | 236 | } else { |
237 | block(p); | 237 | block(p); |
@@ -247,7 +247,7 @@ fn if_expr(p: &mut Parser) -> CompletedMarker { | |||
247 | // 'c: for x in () {} | 247 | // 'c: for x in () {} |
248 | // } | 248 | // } |
249 | fn label(p: &mut Parser) { | 249 | fn label(p: &mut Parser) { |
250 | assert!(p.at(LIFETIME) && p.nth(1) == COLON); | 250 | assert!(p.at(LIFETIME) && p.nth(1) == T![:]); |
251 | let m = p.start(); | 251 | let m = p.start(); |
252 | p.bump(); | 252 | p.bump(); |
253 | p.bump(); | 253 | p.bump(); |
@@ -259,7 +259,7 @@ fn label(p: &mut Parser) { | |||
259 | // loop {}; | 259 | // loop {}; |
260 | // } | 260 | // } |
261 | fn loop_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | 261 | fn loop_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { |
262 | assert!(p.at(LOOP_KW)); | 262 | assert!(p.at(T![loop])); |
263 | let m = m.unwrap_or_else(|| p.start()); | 263 | let m = m.unwrap_or_else(|| p.start()); |
264 | p.bump(); | 264 | p.bump(); |
265 | block(p); | 265 | block(p); |
@@ -272,7 +272,7 @@ fn loop_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | |||
272 | // while let Some(x) = it.next() {}; | 272 | // while let Some(x) = it.next() {}; |
273 | // } | 273 | // } |
274 | fn while_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | 274 | fn while_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { |
275 | assert!(p.at(WHILE_KW)); | 275 | assert!(p.at(T![while])); |
276 | let m = m.unwrap_or_else(|| p.start()); | 276 | let m = m.unwrap_or_else(|| p.start()); |
277 | p.bump(); | 277 | p.bump(); |
278 | cond(p); | 278 | cond(p); |
@@ -285,11 +285,11 @@ fn while_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | |||
285 | // for x in [] {}; | 285 | // for x in [] {}; |
286 | // } | 286 | // } |
287 | fn for_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | 287 | fn for_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { |
288 | assert!(p.at(FOR_KW)); | 288 | assert!(p.at(T![for])); |
289 | let m = m.unwrap_or_else(|| p.start()); | 289 | let m = m.unwrap_or_else(|| p.start()); |
290 | p.bump(); | 290 | p.bump(); |
291 | patterns::pattern(p); | 291 | patterns::pattern(p); |
292 | p.expect(IN_KW); | 292 | p.expect(T![in]); |
293 | expr_no_struct(p); | 293 | expr_no_struct(p); |
294 | block(p); | 294 | block(p); |
295 | m.complete(p, FOR_EXPR) | 295 | m.complete(p, FOR_EXPR) |
@@ -305,9 +305,9 @@ fn for_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | |||
305 | // } | 305 | // } |
306 | fn cond(p: &mut Parser) { | 306 | fn cond(p: &mut Parser) { |
307 | let m = p.start(); | 307 | let m = p.start(); |
308 | if p.eat(LET_KW) { | 308 | if p.eat(T![let]) { |
309 | patterns::pattern_list(p); | 309 | patterns::pattern_list(p); |
310 | p.expect(EQ); | 310 | p.expect(T![=]); |
311 | } | 311 | } |
312 | expr_no_struct(p); | 312 | expr_no_struct(p); |
313 | m.complete(p, CONDITION); | 313 | m.complete(p, CONDITION); |
@@ -319,11 +319,11 @@ fn cond(p: &mut Parser) { | |||
319 | // match S {}; | 319 | // match S {}; |
320 | // } | 320 | // } |
321 | fn match_expr(p: &mut Parser) -> CompletedMarker { | 321 | fn match_expr(p: &mut Parser) -> CompletedMarker { |
322 | assert!(p.at(MATCH_KW)); | 322 | assert!(p.at(T![match])); |
323 | let m = p.start(); | 323 | let m = p.start(); |
324 | p.bump(); | 324 | p.bump(); |
325 | expr_no_struct(p); | 325 | expr_no_struct(p); |
326 | if p.at(L_CURLY) { | 326 | if p.at(T!['{']) { |
327 | match_arm_list(p); | 327 | match_arm_list(p); |
328 | } else { | 328 | } else { |
329 | p.error("expected `{`") | 329 | p.error("expected `{`") |
@@ -332,9 +332,9 @@ fn match_expr(p: &mut Parser) -> CompletedMarker { | |||
332 | } | 332 | } |
333 | 333 | ||
334 | pub(crate) fn match_arm_list(p: &mut Parser) { | 334 | pub(crate) fn match_arm_list(p: &mut Parser) { |
335 | assert!(p.at(L_CURLY)); | 335 | assert!(p.at(T!['{'])); |
336 | let m = p.start(); | 336 | let m = p.start(); |
337 | p.eat(L_CURLY); | 337 | p.eat(T!['{']); |
338 | 338 | ||
339 | // test match_arms_inner_attribute | 339 | // test match_arms_inner_attribute |
340 | // fn foo() { | 340 | // fn foo() { |
@@ -347,8 +347,8 @@ pub(crate) fn match_arm_list(p: &mut Parser) { | |||
347 | // } | 347 | // } |
348 | attributes::inner_attributes(p); | 348 | attributes::inner_attributes(p); |
349 | 349 | ||
350 | while !p.at(EOF) && !p.at(R_CURLY) { | 350 | while !p.at(EOF) && !p.at(T!['}']) { |
351 | if p.at(L_CURLY) { | 351 | if p.at(T!['{']) { |
352 | error_block(p, "expected match arm"); | 352 | error_block(p, "expected match arm"); |
353 | continue; | 353 | continue; |
354 | } | 354 | } |
@@ -362,12 +362,12 @@ pub(crate) fn match_arm_list(p: &mut Parser) { | |||
362 | // } | 362 | // } |
363 | // } | 363 | // } |
364 | if match_arm(p).is_block() { | 364 | if match_arm(p).is_block() { |
365 | p.eat(COMMA); | 365 | p.eat(T![,]); |
366 | } else if !p.at(R_CURLY) { | 366 | } else if !p.at(T!['}']) { |
367 | p.expect(COMMA); | 367 | p.expect(T![,]); |
368 | } | 368 | } |
369 | } | 369 | } |
370 | p.expect(R_CURLY); | 370 | p.expect(T!['}']); |
371 | m.complete(p, MATCH_ARM_LIST); | 371 | m.complete(p, MATCH_ARM_LIST); |
372 | } | 372 | } |
373 | 373 | ||
@@ -399,10 +399,10 @@ fn match_arm(p: &mut Parser) -> BlockLike { | |||
399 | attributes::outer_attributes(p); | 399 | attributes::outer_attributes(p); |
400 | 400 | ||
401 | patterns::pattern_list_r(p, TokenSet::empty()); | 401 | patterns::pattern_list_r(p, TokenSet::empty()); |
402 | if p.at(IF_KW) { | 402 | if p.at(T![if]) { |
403 | match_guard(p); | 403 | match_guard(p); |
404 | } | 404 | } |
405 | p.expect(FAT_ARROW); | 405 | p.expect(T![=>]); |
406 | let blocklike = expr_stmt(p).1; | 406 | let blocklike = expr_stmt(p).1; |
407 | m.complete(p, MATCH_ARM); | 407 | m.complete(p, MATCH_ARM); |
408 | blocklike | 408 | blocklike |
@@ -415,7 +415,7 @@ fn match_arm(p: &mut Parser) -> BlockLike { | |||
415 | // } | 415 | // } |
416 | // } | 416 | // } |
417 | fn match_guard(p: &mut Parser) -> CompletedMarker { | 417 | fn match_guard(p: &mut Parser) -> CompletedMarker { |
418 | assert!(p.at(IF_KW)); | 418 | assert!(p.at(T![if])); |
419 | let m = p.start(); | 419 | let m = p.start(); |
420 | p.bump(); | 420 | p.bump(); |
421 | expr(p); | 421 | expr(p); |
@@ -429,7 +429,7 @@ fn match_guard(p: &mut Parser) -> CompletedMarker { | |||
429 | // 'label: {}; | 429 | // 'label: {}; |
430 | // } | 430 | // } |
431 | fn block_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | 431 | fn block_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { |
432 | assert!(p.at(L_CURLY)); | 432 | assert!(p.at(T!['{'])); |
433 | let m = m.unwrap_or_else(|| p.start()); | 433 | let m = m.unwrap_or_else(|| p.start()); |
434 | block(p); | 434 | block(p); |
435 | m.complete(p, BLOCK_EXPR) | 435 | m.complete(p, BLOCK_EXPR) |
@@ -441,7 +441,7 @@ fn block_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | |||
441 | // return 92; | 441 | // return 92; |
442 | // } | 442 | // } |
443 | fn return_expr(p: &mut Parser) -> CompletedMarker { | 443 | fn return_expr(p: &mut Parser) -> CompletedMarker { |
444 | assert!(p.at(RETURN_KW)); | 444 | assert!(p.at(T![return])); |
445 | let m = p.start(); | 445 | let m = p.start(); |
446 | p.bump(); | 446 | p.bump(); |
447 | if p.at_ts(EXPR_FIRST) { | 447 | if p.at_ts(EXPR_FIRST) { |
@@ -458,7 +458,7 @@ fn return_expr(p: &mut Parser) -> CompletedMarker { | |||
458 | // } | 458 | // } |
459 | // } | 459 | // } |
460 | fn continue_expr(p: &mut Parser) -> CompletedMarker { | 460 | fn continue_expr(p: &mut Parser) -> CompletedMarker { |
461 | assert!(p.at(CONTINUE_KW)); | 461 | assert!(p.at(T![continue])); |
462 | let m = p.start(); | 462 | let m = p.start(); |
463 | p.bump(); | 463 | p.bump(); |
464 | p.eat(LIFETIME); | 464 | p.eat(LIFETIME); |
@@ -475,7 +475,7 @@ fn continue_expr(p: &mut Parser) -> CompletedMarker { | |||
475 | // } | 475 | // } |
476 | // } | 476 | // } |
477 | fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker { | 477 | fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker { |
478 | assert!(p.at(BREAK_KW)); | 478 | assert!(p.at(T![break])); |
479 | let m = p.start(); | 479 | let m = p.start(); |
480 | p.bump(); | 480 | p.bump(); |
481 | p.eat(LIFETIME); | 481 | p.eat(LIFETIME); |
@@ -486,7 +486,7 @@ fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker { | |||
486 | // for i in break {} | 486 | // for i in break {} |
487 | // match break {} | 487 | // match break {} |
488 | // } | 488 | // } |
489 | if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(L_CURLY)) { | 489 | if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{'])) { |
490 | expr(p); | 490 | expr(p); |
491 | } | 491 | } |
492 | m.complete(p, BREAK_EXPR) | 492 | m.complete(p, BREAK_EXPR) |
diff --git a/crates/ra_parser/src/grammar/items.rs b/crates/ra_parser/src/grammar/items.rs index 97f8122a9..6728e395f 100644 --- a/crates/ra_parser/src/grammar/items.rs +++ b/crates/ra_parser/src/grammar/items.rs | |||
@@ -19,7 +19,7 @@ use super::*; | |||
19 | // struct S; | 19 | // struct S; |
20 | pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) { | 20 | pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) { |
21 | attributes::inner_attributes(p); | 21 | attributes::inner_attributes(p); |
22 | while !p.at(EOF) && !(stop_on_r_curly && p.at(R_CURLY)) { | 22 | while !p.at(EOF) && !(stop_on_r_curly && p.at(T!['}'])) { |
23 | item_or_macro(p, stop_on_r_curly, ItemFlavor::Mod) | 23 | item_or_macro(p, stop_on_r_curly, ItemFlavor::Mod) |
24 | } | 24 | } |
25 | } | 25 | } |
@@ -45,20 +45,20 @@ pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool, flavor: ItemF | |||
45 | match macro_call(p) { | 45 | match macro_call(p) { |
46 | BlockLike::Block => (), | 46 | BlockLike::Block => (), |
47 | BlockLike::NotBlock => { | 47 | BlockLike::NotBlock => { |
48 | p.expect(SEMI); | 48 | p.expect(T![;]); |
49 | } | 49 | } |
50 | } | 50 | } |
51 | m.complete(p, MACRO_CALL); | 51 | m.complete(p, MACRO_CALL); |
52 | } else { | 52 | } else { |
53 | m.abandon(p); | 53 | m.abandon(p); |
54 | if p.at(L_CURLY) { | 54 | if p.at(T!['{']) { |
55 | error_block(p, "expected an item"); | 55 | error_block(p, "expected an item"); |
56 | } else if p.at(R_CURLY) && !stop_on_r_curly { | 56 | } else if p.at(T!['}']) && !stop_on_r_curly { |
57 | let e = p.start(); | 57 | let e = p.start(); |
58 | p.error("unmatched `}`"); | 58 | p.error("unmatched `}`"); |
59 | p.bump(); | 59 | p.bump(); |
60 | e.complete(p, ERROR); | 60 | e.complete(p, ERROR); |
61 | } else if !p.at(EOF) && !p.at(R_CURLY) { | 61 | } else if !p.at(EOF) && !p.at(T!['}']) { |
62 | p.err_and_bump("expected an item"); | 62 | p.err_and_bump("expected an item"); |
63 | } else { | 63 | } else { |
64 | p.error("expected an item"); | 64 | p.error("expected an item"); |
@@ -79,32 +79,32 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul | |||
79 | let mut has_mods = false; | 79 | let mut has_mods = false; |
80 | 80 | ||
81 | // modifiers | 81 | // modifiers |
82 | has_mods |= p.eat(CONST_KW); | 82 | has_mods |= p.eat(T![const]); |
83 | 83 | ||
84 | // test_err unsafe_block_in_mod | 84 | // test_err unsafe_block_in_mod |
85 | // fn foo(){} unsafe { } fn bar(){} | 85 | // fn foo(){} unsafe { } fn bar(){} |
86 | if p.at(UNSAFE_KW) && p.nth(1) != L_CURLY { | 86 | if p.at(T![unsafe]) && p.nth(1) != T!['{'] { |
87 | p.eat(UNSAFE_KW); | 87 | p.eat(T![unsafe]); |
88 | has_mods = true; | 88 | has_mods = true; |
89 | } | 89 | } |
90 | 90 | ||
91 | // test_err async_without_semicolon | 91 | // test_err async_without_semicolon |
92 | // fn foo() { let _ = async {} } | 92 | // fn foo() { let _ = async {} } |
93 | if p.at(ASYNC_KW) && p.nth(1) != L_CURLY && p.nth(1) != MOVE_KW && p.nth(1) != PIPE { | 93 | if p.at(T![async]) && p.nth(1) != T!['{'] && p.nth(1) != T![move] && p.nth(1) != T![|] { |
94 | p.eat(ASYNC_KW); | 94 | p.eat(T![async]); |
95 | has_mods = true; | 95 | has_mods = true; |
96 | } | 96 | } |
97 | 97 | ||
98 | if p.at(EXTERN_KW) { | 98 | if p.at(T![extern]) { |
99 | has_mods = true; | 99 | has_mods = true; |
100 | abi(p); | 100 | abi(p); |
101 | } | 101 | } |
102 | if p.at(IDENT) && p.at_contextual_kw("auto") && p.nth(1) == TRAIT_KW { | 102 | if p.at(IDENT) && p.at_contextual_kw("auto") && p.nth(1) == T![trait] { |
103 | p.bump_remap(AUTO_KW); | 103 | p.bump_remap(T![auto]); |
104 | has_mods = true; | 104 | has_mods = true; |
105 | } | 105 | } |
106 | if p.at(IDENT) && p.at_contextual_kw("default") && p.nth(1) == IMPL_KW { | 106 | if p.at(IDENT) && p.at_contextual_kw("default") && p.nth(1) == T![impl ] { |
107 | p.bump_remap(DEFAULT_KW); | 107 | p.bump_remap(T![default]); |
108 | has_mods = true; | 108 | has_mods = true; |
109 | } | 109 | } |
110 | 110 | ||
@@ -135,7 +135,7 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul | |||
135 | // test_err wrong_order_fns | 135 | // test_err wrong_order_fns |
136 | // async unsafe fn foo() {} | 136 | // async unsafe fn foo() {} |
137 | // unsafe const fn bar() {} | 137 | // unsafe const fn bar() {} |
138 | FN_KW => { | 138 | T![fn] => { |
139 | fn_def(p, flavor); | 139 | fn_def(p, flavor); |
140 | m.complete(p, FN_DEF); | 140 | m.complete(p, FN_DEF); |
141 | } | 141 | } |
@@ -148,7 +148,7 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul | |||
148 | 148 | ||
149 | // test unsafe_auto_trait | 149 | // test unsafe_auto_trait |
150 | // unsafe auto trait T {} | 150 | // unsafe auto trait T {} |
151 | TRAIT_KW => { | 151 | T![trait] => { |
152 | traits::trait_def(p); | 152 | traits::trait_def(p); |
153 | m.complete(p, TRAIT_DEF); | 153 | m.complete(p, TRAIT_DEF); |
154 | } | 154 | } |
@@ -161,7 +161,7 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul | |||
161 | 161 | ||
162 | // test unsafe_default_impl | 162 | // test unsafe_default_impl |
163 | // unsafe default impl Foo {} | 163 | // unsafe default impl Foo {} |
164 | IMPL_KW => { | 164 | T![impl ] => { |
165 | traits::impl_block(p); | 165 | traits::impl_block(p); |
166 | m.complete(p, IMPL_BLOCK); | 166 | m.complete(p, IMPL_BLOCK); |
167 | } | 167 | } |
@@ -186,10 +186,10 @@ fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { | |||
186 | match p.current() { | 186 | match p.current() { |
187 | // test extern_crate | 187 | // test extern_crate |
188 | // extern crate foo; | 188 | // extern crate foo; |
189 | EXTERN_KW if la == CRATE_KW => extern_crate_item(p, m), | 189 | T![extern] if la == T![crate] => extern_crate_item(p, m), |
190 | TYPE_KW => type_def(p, m), | 190 | T![type] => type_def(p, m), |
191 | MOD_KW => mod_item(p, m), | 191 | T![mod] => mod_item(p, m), |
192 | STRUCT_KW => { | 192 | T![struct] => { |
193 | // test struct_items | 193 | // test struct_items |
194 | // struct Foo; | 194 | // struct Foo; |
195 | // struct Foo {} | 195 | // struct Foo {} |
@@ -199,7 +199,7 @@ fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { | |||
199 | // a: i32, | 199 | // a: i32, |
200 | // b: f32, | 200 | // b: f32, |
201 | // } | 201 | // } |
202 | nominal::struct_def(p, m, STRUCT_KW); | 202 | nominal::struct_def(p, m, T![struct]); |
203 | } | 203 | } |
204 | IDENT if p.at_contextual_kw("union") && p.nth(1) == IDENT => { | 204 | IDENT if p.at_contextual_kw("union") && p.nth(1) == IDENT => { |
205 | // test union_items | 205 | // test union_items |
@@ -208,16 +208,16 @@ fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { | |||
208 | // a: i32, | 208 | // a: i32, |
209 | // b: f32, | 209 | // b: f32, |
210 | // } | 210 | // } |
211 | nominal::struct_def(p, m, UNION_KW); | 211 | nominal::struct_def(p, m, T![union]); |
212 | } | 212 | } |
213 | ENUM_KW => nominal::enum_def(p, m), | 213 | T![enum] => nominal::enum_def(p, m), |
214 | USE_KW => use_item::use_item(p, m), | 214 | T![use] => use_item::use_item(p, m), |
215 | CONST_KW if (la == IDENT || la == MUT_KW) => consts::const_def(p, m), | 215 | T![const] if (la == IDENT || la == T![mut]) => consts::const_def(p, m), |
216 | STATIC_KW => consts::static_def(p, m), | 216 | T![static] => consts::static_def(p, m), |
217 | // test extern_block | 217 | // test extern_block |
218 | // extern {} | 218 | // extern {} |
219 | EXTERN_KW | 219 | T![extern] |
220 | if la == L_CURLY || ((la == STRING || la == RAW_STRING) && p.nth(2) == L_CURLY) => | 220 | if la == T!['{'] || ((la == STRING || la == RAW_STRING) && p.nth(2) == T!['{']) => |
221 | { | 221 | { |
222 | abi(p); | 222 | abi(p); |
223 | extern_item_list(p); | 223 | extern_item_list(p); |
@@ -225,7 +225,7 @@ fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { | |||
225 | } | 225 | } |
226 | _ => return Err(m), | 226 | _ => return Err(m), |
227 | }; | 227 | }; |
228 | if p.at(SEMI) { | 228 | if p.at(T![;]) { |
229 | p.err_and_bump( | 229 | p.err_and_bump( |
230 | "expected item, found `;`\n\ | 230 | "expected item, found `;`\n\ |
231 | consider removing this semicolon", | 231 | consider removing this semicolon", |
@@ -235,27 +235,27 @@ fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> { | |||
235 | } | 235 | } |
236 | 236 | ||
237 | fn extern_crate_item(p: &mut Parser, m: Marker) { | 237 | fn extern_crate_item(p: &mut Parser, m: Marker) { |
238 | assert!(p.at(EXTERN_KW)); | 238 | assert!(p.at(T![extern])); |
239 | p.bump(); | 239 | p.bump(); |
240 | assert!(p.at(CRATE_KW)); | 240 | assert!(p.at(T![crate])); |
241 | p.bump(); | 241 | p.bump(); |
242 | name_ref(p); | 242 | name_ref(p); |
243 | opt_alias(p); | 243 | opt_alias(p); |
244 | p.expect(SEMI); | 244 | p.expect(T![;]); |
245 | m.complete(p, EXTERN_CRATE_ITEM); | 245 | m.complete(p, EXTERN_CRATE_ITEM); |
246 | } | 246 | } |
247 | 247 | ||
248 | pub(crate) fn extern_item_list(p: &mut Parser) { | 248 | pub(crate) fn extern_item_list(p: &mut Parser) { |
249 | assert!(p.at(L_CURLY)); | 249 | assert!(p.at(T!['{'])); |
250 | let m = p.start(); | 250 | let m = p.start(); |
251 | p.bump(); | 251 | p.bump(); |
252 | mod_contents(p, true); | 252 | mod_contents(p, true); |
253 | p.expect(R_CURLY); | 253 | p.expect(T!['}']); |
254 | m.complete(p, EXTERN_ITEM_LIST); | 254 | m.complete(p, EXTERN_ITEM_LIST); |
255 | } | 255 | } |
256 | 256 | ||
257 | fn fn_def(p: &mut Parser, flavor: ItemFlavor) { | 257 | fn fn_def(p: &mut Parser, flavor: ItemFlavor) { |
258 | assert!(p.at(FN_KW)); | 258 | assert!(p.at(T![fn])); |
259 | p.bump(); | 259 | p.bump(); |
260 | 260 | ||
261 | name_r(p, ITEM_RECOVERY_SET); | 261 | name_r(p, ITEM_RECOVERY_SET); |
@@ -263,7 +263,7 @@ fn fn_def(p: &mut Parser, flavor: ItemFlavor) { | |||
263 | // fn foo<T: Clone + Copy>(){} | 263 | // fn foo<T: Clone + Copy>(){} |
264 | type_params::opt_type_param_list(p); | 264 | type_params::opt_type_param_list(p); |
265 | 265 | ||
266 | if p.at(L_PAREN) { | 266 | if p.at(T!['(']) { |
267 | match flavor { | 267 | match flavor { |
268 | ItemFlavor::Mod => params::param_list(p), | 268 | ItemFlavor::Mod => params::param_list(p), |
269 | ItemFlavor::Trait => params::param_list_opt_patterns(p), | 269 | ItemFlavor::Trait => params::param_list_opt_patterns(p), |
@@ -282,7 +282,7 @@ fn fn_def(p: &mut Parser, flavor: ItemFlavor) { | |||
282 | 282 | ||
283 | // test fn_decl | 283 | // test fn_decl |
284 | // trait T { fn foo(); } | 284 | // trait T { fn foo(); } |
285 | if p.at(SEMI) { | 285 | if p.at(T![;]) { |
286 | p.bump(); | 286 | p.bump(); |
287 | } else { | 287 | } else { |
288 | expressions::block(p) | 288 | expressions::block(p) |
@@ -292,7 +292,7 @@ fn fn_def(p: &mut Parser, flavor: ItemFlavor) { | |||
292 | // test type_item | 292 | // test type_item |
293 | // type Foo = Bar; | 293 | // type Foo = Bar; |
294 | fn type_def(p: &mut Parser, m: Marker) { | 294 | fn type_def(p: &mut Parser, m: Marker) { |
295 | assert!(p.at(TYPE_KW)); | 295 | assert!(p.at(T![type])); |
296 | p.bump(); | 296 | p.bump(); |
297 | 297 | ||
298 | name(p); | 298 | name(p); |
@@ -301,7 +301,7 @@ fn type_def(p: &mut Parser, m: Marker) { | |||
301 | // type Result<T> = (); | 301 | // type Result<T> = (); |
302 | type_params::opt_type_param_list(p); | 302 | type_params::opt_type_param_list(p); |
303 | 303 | ||
304 | if p.at(COLON) { | 304 | if p.at(T![:]) { |
305 | type_params::bounds(p); | 305 | type_params::bounds(p); |
306 | } | 306 | } |
307 | 307 | ||
@@ -309,32 +309,32 @@ fn type_def(p: &mut Parser, m: Marker) { | |||
309 | // type Foo where Foo: Copy = (); | 309 | // type Foo where Foo: Copy = (); |
310 | type_params::opt_where_clause(p); | 310 | type_params::opt_where_clause(p); |
311 | 311 | ||
312 | if p.eat(EQ) { | 312 | if p.eat(T![=]) { |
313 | types::type_(p); | 313 | types::type_(p); |
314 | } | 314 | } |
315 | p.expect(SEMI); | 315 | p.expect(T![;]); |
316 | m.complete(p, TYPE_ALIAS_DEF); | 316 | m.complete(p, TYPE_ALIAS_DEF); |
317 | } | 317 | } |
318 | 318 | ||
319 | pub(crate) fn mod_item(p: &mut Parser, m: Marker) { | 319 | pub(crate) fn mod_item(p: &mut Parser, m: Marker) { |
320 | assert!(p.at(MOD_KW)); | 320 | assert!(p.at(T![mod])); |
321 | p.bump(); | 321 | p.bump(); |
322 | 322 | ||
323 | name(p); | 323 | name(p); |
324 | if p.at(L_CURLY) { | 324 | if p.at(T!['{']) { |
325 | mod_item_list(p); | 325 | mod_item_list(p); |
326 | } else if !p.eat(SEMI) { | 326 | } else if !p.eat(T![;]) { |
327 | p.error("expected `;` or `{`"); | 327 | p.error("expected `;` or `{`"); |
328 | } | 328 | } |
329 | m.complete(p, MODULE); | 329 | m.complete(p, MODULE); |
330 | } | 330 | } |
331 | 331 | ||
332 | pub(crate) fn mod_item_list(p: &mut Parser) { | 332 | pub(crate) fn mod_item_list(p: &mut Parser) { |
333 | assert!(p.at(L_CURLY)); | 333 | assert!(p.at(T!['{'])); |
334 | let m = p.start(); | 334 | let m = p.start(); |
335 | p.bump(); | 335 | p.bump(); |
336 | mod_contents(p, true); | 336 | mod_contents(p, true); |
337 | p.expect(R_CURLY); | 337 | p.expect(T!['}']); |
338 | m.complete(p, ITEM_LIST); | 338 | m.complete(p, ITEM_LIST); |
339 | } | 339 | } |
340 | 340 | ||
@@ -345,16 +345,16 @@ fn macro_call(p: &mut Parser) -> BlockLike { | |||
345 | } | 345 | } |
346 | 346 | ||
347 | pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike { | 347 | pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike { |
348 | p.expect(EXCL); | 348 | p.expect(T![!]); |
349 | if p.at(IDENT) { | 349 | if p.at(IDENT) { |
350 | name(p); | 350 | name(p); |
351 | } | 351 | } |
352 | match p.current() { | 352 | match p.current() { |
353 | L_CURLY => { | 353 | T!['{'] => { |
354 | token_tree(p); | 354 | token_tree(p); |
355 | BlockLike::Block | 355 | BlockLike::Block |
356 | } | 356 | } |
357 | L_PAREN | L_BRACK => { | 357 | T!['('] | T!['['] => { |
358 | token_tree(p); | 358 | token_tree(p); |
359 | BlockLike::NotBlock | 359 | BlockLike::NotBlock |
360 | } | 360 | } |
@@ -367,22 +367,22 @@ pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike { | |||
367 | 367 | ||
368 | pub(crate) fn token_tree(p: &mut Parser) { | 368 | pub(crate) fn token_tree(p: &mut Parser) { |
369 | let closing_paren_kind = match p.current() { | 369 | let closing_paren_kind = match p.current() { |
370 | L_CURLY => R_CURLY, | 370 | T!['{'] => T!['}'], |
371 | L_PAREN => R_PAREN, | 371 | T!['('] => T![')'], |
372 | L_BRACK => R_BRACK, | 372 | T!['['] => T![']'], |
373 | _ => unreachable!(), | 373 | _ => unreachable!(), |
374 | }; | 374 | }; |
375 | let m = p.start(); | 375 | let m = p.start(); |
376 | p.bump(); | 376 | p.bump(); |
377 | while !p.at(EOF) && !p.at(closing_paren_kind) { | 377 | while !p.at(EOF) && !p.at(closing_paren_kind) { |
378 | match p.current() { | 378 | match p.current() { |
379 | L_CURLY | L_PAREN | L_BRACK => token_tree(p), | 379 | T!['{'] | T!['('] | T!['['] => token_tree(p), |
380 | R_CURLY => { | 380 | T!['}'] => { |
381 | p.error("unmatched `}`"); | 381 | p.error("unmatched `}`"); |
382 | m.complete(p, TOKEN_TREE); | 382 | m.complete(p, TOKEN_TREE); |
383 | return; | 383 | return; |
384 | } | 384 | } |
385 | R_PAREN | R_BRACK => p.err_and_bump("unmatched brace"), | 385 | T![')'] | T![']'] => p.err_and_bump("unmatched brace"), |
386 | _ => p.bump_raw(), | 386 | _ => p.bump_raw(), |
387 | } | 387 | } |
388 | } | 388 | } |
diff --git a/crates/ra_parser/src/grammar/items/consts.rs b/crates/ra_parser/src/grammar/items/consts.rs index 1f802246f..b4908ebba 100644 --- a/crates/ra_parser/src/grammar/items/consts.rs +++ b/crates/ra_parser/src/grammar/items/consts.rs | |||
@@ -1,22 +1,22 @@ | |||
1 | use super::*; | 1 | use super::*; |
2 | 2 | ||
3 | pub(super) fn static_def(p: &mut Parser, m: Marker) { | 3 | pub(super) fn static_def(p: &mut Parser, m: Marker) { |
4 | const_or_static(p, m, STATIC_KW, STATIC_DEF) | 4 | const_or_static(p, m, T![static], STATIC_DEF) |
5 | } | 5 | } |
6 | 6 | ||
7 | pub(super) fn const_def(p: &mut Parser, m: Marker) { | 7 | pub(super) fn const_def(p: &mut Parser, m: Marker) { |
8 | const_or_static(p, m, CONST_KW, CONST_DEF) | 8 | const_or_static(p, m, T![const], CONST_DEF) |
9 | } | 9 | } |
10 | 10 | ||
11 | fn const_or_static(p: &mut Parser, m: Marker, kw: SyntaxKind, def: SyntaxKind) { | 11 | fn const_or_static(p: &mut Parser, m: Marker, kw: SyntaxKind, def: SyntaxKind) { |
12 | assert!(p.at(kw)); | 12 | assert!(p.at(kw)); |
13 | p.bump(); | 13 | p.bump(); |
14 | p.eat(MUT_KW); // FIXME: validator to forbid const mut | 14 | p.eat(T![mut]); // FIXME: validator to forbid const mut |
15 | name(p); | 15 | name(p); |
16 | types::ascription(p); | 16 | types::ascription(p); |
17 | if p.eat(EQ) { | 17 | if p.eat(T![=]) { |
18 | expressions::expr(p); | 18 | expressions::expr(p); |
19 | } | 19 | } |
20 | p.expect(SEMI); | 20 | p.expect(T![;]); |
21 | m.complete(p, def); | 21 | m.complete(p, def); |
22 | } | 22 | } |
diff --git a/crates/ra_parser/src/grammar/items/nominal.rs b/crates/ra_parser/src/grammar/items/nominal.rs index e93bd76b8..bd4edab89 100644 --- a/crates/ra_parser/src/grammar/items/nominal.rs +++ b/crates/ra_parser/src/grammar/items/nominal.rs | |||
@@ -1,38 +1,38 @@ | |||
1 | use super::*; | 1 | use super::*; |
2 | 2 | ||
3 | pub(super) fn struct_def(p: &mut Parser, m: Marker, kind: SyntaxKind) { | 3 | pub(super) fn struct_def(p: &mut Parser, m: Marker, kind: SyntaxKind) { |
4 | assert!(p.at(STRUCT_KW) || p.at_contextual_kw("union")); | 4 | assert!(p.at(T![struct]) || p.at_contextual_kw("union")); |
5 | p.bump_remap(kind); | 5 | p.bump_remap(kind); |
6 | 6 | ||
7 | name_r(p, ITEM_RECOVERY_SET); | 7 | name_r(p, ITEM_RECOVERY_SET); |
8 | type_params::opt_type_param_list(p); | 8 | type_params::opt_type_param_list(p); |
9 | match p.current() { | 9 | match p.current() { |
10 | WHERE_KW => { | 10 | T![where] => { |
11 | type_params::opt_where_clause(p); | 11 | type_params::opt_where_clause(p); |
12 | match p.current() { | 12 | match p.current() { |
13 | SEMI => { | 13 | T![;] => { |
14 | p.bump(); | 14 | p.bump(); |
15 | } | 15 | } |
16 | L_CURLY => named_field_def_list(p), | 16 | T!['{'] => named_field_def_list(p), |
17 | _ => { | 17 | _ => { |
18 | //FIXME: special case `(` error message | 18 | //FIXME: special case `(` error message |
19 | p.error("expected `;` or `{`"); | 19 | p.error("expected `;` or `{`"); |
20 | } | 20 | } |
21 | } | 21 | } |
22 | } | 22 | } |
23 | SEMI if kind == STRUCT_KW => { | 23 | T![;] if kind == T![struct] => { |
24 | p.bump(); | 24 | p.bump(); |
25 | } | 25 | } |
26 | L_CURLY => named_field_def_list(p), | 26 | T!['{'] => named_field_def_list(p), |
27 | L_PAREN if kind == STRUCT_KW => { | 27 | T!['('] if kind == T![struct] => { |
28 | pos_field_def_list(p); | 28 | pos_field_def_list(p); |
29 | // test tuple_struct_where | 29 | // test tuple_struct_where |
30 | // struct Test<T>(T) where T: Clone; | 30 | // struct Test<T>(T) where T: Clone; |
31 | // struct Test<T>(T); | 31 | // struct Test<T>(T); |
32 | type_params::opt_where_clause(p); | 32 | type_params::opt_where_clause(p); |
33 | p.expect(SEMI); | 33 | p.expect(T![;]); |
34 | } | 34 | } |
35 | _ if kind == STRUCT_KW => { | 35 | _ if kind == T![struct] => { |
36 | p.error("expected `;`, `{`, or `(`"); | 36 | p.error("expected `;`, `{`, or `(`"); |
37 | } | 37 | } |
38 | _ => { | 38 | _ => { |
@@ -43,12 +43,12 @@ pub(super) fn struct_def(p: &mut Parser, m: Marker, kind: SyntaxKind) { | |||
43 | } | 43 | } |
44 | 44 | ||
45 | pub(super) fn enum_def(p: &mut Parser, m: Marker) { | 45 | pub(super) fn enum_def(p: &mut Parser, m: Marker) { |
46 | assert!(p.at(ENUM_KW)); | 46 | assert!(p.at(T![enum])); |
47 | p.bump(); | 47 | p.bump(); |
48 | name_r(p, ITEM_RECOVERY_SET); | 48 | name_r(p, ITEM_RECOVERY_SET); |
49 | type_params::opt_type_param_list(p); | 49 | type_params::opt_type_param_list(p); |
50 | type_params::opt_where_clause(p); | 50 | type_params::opt_where_clause(p); |
51 | if p.at(L_CURLY) { | 51 | if p.at(T!['{']) { |
52 | enum_variant_list(p); | 52 | enum_variant_list(p); |
53 | } else { | 53 | } else { |
54 | p.error("expected `{`") | 54 | p.error("expected `{`") |
@@ -57,11 +57,11 @@ pub(super) fn enum_def(p: &mut Parser, m: Marker) { | |||
57 | } | 57 | } |
58 | 58 | ||
59 | pub(crate) fn enum_variant_list(p: &mut Parser) { | 59 | pub(crate) fn enum_variant_list(p: &mut Parser) { |
60 | assert!(p.at(L_CURLY)); | 60 | assert!(p.at(T!['{'])); |
61 | let m = p.start(); | 61 | let m = p.start(); |
62 | p.bump(); | 62 | p.bump(); |
63 | while !p.at(EOF) && !p.at(R_CURLY) { | 63 | while !p.at(EOF) && !p.at(T!['}']) { |
64 | if p.at(L_CURLY) { | 64 | if p.at(T!['{']) { |
65 | error_block(p, "expected enum variant"); | 65 | error_block(p, "expected enum variant"); |
66 | continue; | 66 | continue; |
67 | } | 67 | } |
@@ -70,9 +70,9 @@ pub(crate) fn enum_variant_list(p: &mut Parser) { | |||
70 | if p.at(IDENT) { | 70 | if p.at(IDENT) { |
71 | name(p); | 71 | name(p); |
72 | match p.current() { | 72 | match p.current() { |
73 | L_CURLY => named_field_def_list(p), | 73 | T!['{'] => named_field_def_list(p), |
74 | L_PAREN => pos_field_def_list(p), | 74 | T!['('] => pos_field_def_list(p), |
75 | EQ => { | 75 | T![=] => { |
76 | p.bump(); | 76 | p.bump(); |
77 | expressions::expr(p); | 77 | expressions::expr(p); |
78 | } | 78 | } |
@@ -83,29 +83,29 @@ pub(crate) fn enum_variant_list(p: &mut Parser) { | |||
83 | var.abandon(p); | 83 | var.abandon(p); |
84 | p.err_and_bump("expected enum variant"); | 84 | p.err_and_bump("expected enum variant"); |
85 | } | 85 | } |
86 | if !p.at(R_CURLY) { | 86 | if !p.at(T!['}']) { |
87 | p.expect(COMMA); | 87 | p.expect(T![,]); |
88 | } | 88 | } |
89 | } | 89 | } |
90 | p.expect(R_CURLY); | 90 | p.expect(T!['}']); |
91 | m.complete(p, ENUM_VARIANT_LIST); | 91 | m.complete(p, ENUM_VARIANT_LIST); |
92 | } | 92 | } |
93 | 93 | ||
94 | pub(crate) fn named_field_def_list(p: &mut Parser) { | 94 | pub(crate) fn named_field_def_list(p: &mut Parser) { |
95 | assert!(p.at(L_CURLY)); | 95 | assert!(p.at(T!['{'])); |
96 | let m = p.start(); | 96 | let m = p.start(); |
97 | p.bump(); | 97 | p.bump(); |
98 | while !p.at(R_CURLY) && !p.at(EOF) { | 98 | while !p.at(T!['}']) && !p.at(EOF) { |
99 | if p.at(L_CURLY) { | 99 | if p.at(T!['{']) { |
100 | error_block(p, "expected field"); | 100 | error_block(p, "expected field"); |
101 | continue; | 101 | continue; |
102 | } | 102 | } |
103 | named_field_def(p); | 103 | named_field_def(p); |
104 | if !p.at(R_CURLY) { | 104 | if !p.at(T!['}']) { |
105 | p.expect(COMMA); | 105 | p.expect(T![,]); |
106 | } | 106 | } |
107 | } | 107 | } |
108 | p.expect(R_CURLY); | 108 | p.expect(T!['}']); |
109 | m.complete(p, NAMED_FIELD_DEF_LIST); | 109 | m.complete(p, NAMED_FIELD_DEF_LIST); |
110 | 110 | ||
111 | fn named_field_def(p: &mut Parser) { | 111 | fn named_field_def(p: &mut Parser) { |
@@ -119,7 +119,7 @@ pub(crate) fn named_field_def_list(p: &mut Parser) { | |||
119 | opt_visibility(p); | 119 | opt_visibility(p); |
120 | if p.at(IDENT) { | 120 | if p.at(IDENT) { |
121 | name(p); | 121 | name(p); |
122 | p.expect(COLON); | 122 | p.expect(T![:]); |
123 | types::type_(p); | 123 | types::type_(p); |
124 | m.complete(p, NAMED_FIELD_DEF); | 124 | m.complete(p, NAMED_FIELD_DEF); |
125 | } else { | 125 | } else { |
@@ -130,12 +130,12 @@ pub(crate) fn named_field_def_list(p: &mut Parser) { | |||
130 | } | 130 | } |
131 | 131 | ||
132 | fn pos_field_def_list(p: &mut Parser) { | 132 | fn pos_field_def_list(p: &mut Parser) { |
133 | assert!(p.at(L_PAREN)); | 133 | assert!(p.at(T!['('])); |
134 | let m = p.start(); | 134 | let m = p.start(); |
135 | if !p.expect(L_PAREN) { | 135 | if !p.expect(T!['(']) { |
136 | return; | 136 | return; |
137 | } | 137 | } |
138 | while !p.at(R_PAREN) && !p.at(EOF) { | 138 | while !p.at(T![')']) && !p.at(EOF) { |
139 | let m = p.start(); | 139 | let m = p.start(); |
140 | // test pos_field_attrs | 140 | // test pos_field_attrs |
141 | // struct S ( | 141 | // struct S ( |
@@ -156,10 +156,10 @@ fn pos_field_def_list(p: &mut Parser) { | |||
156 | types::type_(p); | 156 | types::type_(p); |
157 | m.complete(p, POS_FIELD_DEF); | 157 | m.complete(p, POS_FIELD_DEF); |
158 | 158 | ||
159 | if !p.at(R_PAREN) { | 159 | if !p.at(T![')']) { |
160 | p.expect(COMMA); | 160 | p.expect(T![,]); |
161 | } | 161 | } |
162 | } | 162 | } |
163 | p.expect(R_PAREN); | 163 | p.expect(T![')']); |
164 | m.complete(p, POS_FIELD_DEF_LIST); | 164 | m.complete(p, POS_FIELD_DEF_LIST); |
165 | } | 165 | } |
diff --git a/crates/ra_parser/src/grammar/items/traits.rs b/crates/ra_parser/src/grammar/items/traits.rs index d03a6be0d..09ab3bfd4 100644 --- a/crates/ra_parser/src/grammar/items/traits.rs +++ b/crates/ra_parser/src/grammar/items/traits.rs | |||
@@ -4,15 +4,15 @@ use super::*; | |||
4 | // trait T<U>: Hash + Clone where U: Copy {} | 4 | // trait T<U>: Hash + Clone where U: Copy {} |
5 | // trait X<U: Debug + Display>: Hash + Clone where U: Copy {} | 5 | // trait X<U: Debug + Display>: Hash + Clone where U: Copy {} |
6 | pub(super) fn trait_def(p: &mut Parser) { | 6 | pub(super) fn trait_def(p: &mut Parser) { |
7 | assert!(p.at(TRAIT_KW)); | 7 | assert!(p.at(T![trait])); |
8 | p.bump(); | 8 | p.bump(); |
9 | name_r(p, ITEM_RECOVERY_SET); | 9 | name_r(p, ITEM_RECOVERY_SET); |
10 | type_params::opt_type_param_list(p); | 10 | type_params::opt_type_param_list(p); |
11 | if p.at(COLON) { | 11 | if p.at(T![:]) { |
12 | type_params::bounds(p); | 12 | type_params::bounds(p); |
13 | } | 13 | } |
14 | type_params::opt_where_clause(p); | 14 | type_params::opt_where_clause(p); |
15 | if p.at(L_CURLY) { | 15 | if p.at(T!['{']) { |
16 | trait_item_list(p); | 16 | trait_item_list(p); |
17 | } else { | 17 | } else { |
18 | p.error("expected `{`"); | 18 | p.error("expected `{`"); |
@@ -27,24 +27,24 @@ pub(super) fn trait_def(p: &mut Parser) { | |||
27 | // fn bar(&self); | 27 | // fn bar(&self); |
28 | // } | 28 | // } |
29 | pub(crate) fn trait_item_list(p: &mut Parser) { | 29 | pub(crate) fn trait_item_list(p: &mut Parser) { |
30 | assert!(p.at(L_CURLY)); | 30 | assert!(p.at(T!['{'])); |
31 | let m = p.start(); | 31 | let m = p.start(); |
32 | p.bump(); | 32 | p.bump(); |
33 | while !p.at(EOF) && !p.at(R_CURLY) { | 33 | while !p.at(EOF) && !p.at(T!['}']) { |
34 | if p.at(L_CURLY) { | 34 | if p.at(T!['{']) { |
35 | error_block(p, "expected an item"); | 35 | error_block(p, "expected an item"); |
36 | continue; | 36 | continue; |
37 | } | 37 | } |
38 | item_or_macro(p, true, ItemFlavor::Trait); | 38 | item_or_macro(p, true, ItemFlavor::Trait); |
39 | } | 39 | } |
40 | p.expect(R_CURLY); | 40 | p.expect(T!['}']); |
41 | m.complete(p, ITEM_LIST); | 41 | m.complete(p, ITEM_LIST); |
42 | } | 42 | } |
43 | 43 | ||
44 | // test impl_block | 44 | // test impl_block |
45 | // impl Foo {} | 45 | // impl Foo {} |
46 | pub(super) fn impl_block(p: &mut Parser) { | 46 | pub(super) fn impl_block(p: &mut Parser) { |
47 | assert!(p.at(IMPL_KW)); | 47 | assert!(p.at(T![impl ])); |
48 | p.bump(); | 48 | p.bump(); |
49 | if choose_type_params_over_qpath(p) { | 49 | if choose_type_params_over_qpath(p) { |
50 | type_params::opt_type_param_list(p); | 50 | type_params::opt_type_param_list(p); |
@@ -55,13 +55,13 @@ pub(super) fn impl_block(p: &mut Parser) { | |||
55 | 55 | ||
56 | // test impl_block_neg | 56 | // test impl_block_neg |
57 | // impl !Send for X {} | 57 | // impl !Send for X {} |
58 | p.eat(EXCL); | 58 | p.eat(T![!]); |
59 | impl_type(p); | 59 | impl_type(p); |
60 | if p.eat(FOR_KW) { | 60 | if p.eat(T![for]) { |
61 | impl_type(p); | 61 | impl_type(p); |
62 | } | 62 | } |
63 | type_params::opt_where_clause(p); | 63 | type_params::opt_where_clause(p); |
64 | if p.at(L_CURLY) { | 64 | if p.at(T!['{']) { |
65 | impl_item_list(p); | 65 | impl_item_list(p); |
66 | } else { | 66 | } else { |
67 | p.error("expected `{`"); | 67 | p.error("expected `{`"); |
@@ -76,7 +76,7 @@ pub(super) fn impl_block(p: &mut Parser) { | |||
76 | // fn bar(&self) {} | 76 | // fn bar(&self) {} |
77 | // } | 77 | // } |
78 | pub(crate) fn impl_item_list(p: &mut Parser) { | 78 | pub(crate) fn impl_item_list(p: &mut Parser) { |
79 | assert!(p.at(L_CURLY)); | 79 | assert!(p.at(T!['{'])); |
80 | let m = p.start(); | 80 | let m = p.start(); |
81 | p.bump(); | 81 | p.bump(); |
82 | // test impl_inner_attributes | 82 | // test impl_inner_attributes |
@@ -87,14 +87,14 @@ pub(crate) fn impl_item_list(p: &mut Parser) { | |||
87 | // } | 87 | // } |
88 | attributes::inner_attributes(p); | 88 | attributes::inner_attributes(p); |
89 | 89 | ||
90 | while !p.at(EOF) && !p.at(R_CURLY) { | 90 | while !p.at(EOF) && !p.at(T!['}']) { |
91 | if p.at(L_CURLY) { | 91 | if p.at(T!['{']) { |
92 | error_block(p, "expected an item"); | 92 | error_block(p, "expected an item"); |
93 | continue; | 93 | continue; |
94 | } | 94 | } |
95 | item_or_macro(p, true, ItemFlavor::Mod); | 95 | item_or_macro(p, true, ItemFlavor::Mod); |
96 | } | 96 | } |
97 | p.expect(R_CURLY); | 97 | p.expect(T!['}']); |
98 | m.complete(p, ITEM_LIST); | 98 | m.complete(p, ITEM_LIST); |
99 | } | 99 | } |
100 | 100 | ||
@@ -114,14 +114,14 @@ fn choose_type_params_over_qpath(p: &Parser) -> bool { | |||
114 | // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`) | 114 | // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`) |
115 | // because this is what almost always expected in practice, qualified paths in impls | 115 | // because this is what almost always expected in practice, qualified paths in impls |
116 | // (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment. | 116 | // (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment. |
117 | if !p.at(L_ANGLE) { | 117 | if !p.at(T![<]) { |
118 | return false; | 118 | return false; |
119 | } | 119 | } |
120 | if p.nth(1) == POUND || p.nth(1) == R_ANGLE { | 120 | if p.nth(1) == T![#] || p.nth(1) == T![>] { |
121 | return true; | 121 | return true; |
122 | } | 122 | } |
123 | (p.nth(1) == LIFETIME || p.nth(1) == IDENT) | 123 | (p.nth(1) == LIFETIME || p.nth(1) == IDENT) |
124 | && (p.nth(2) == R_ANGLE || p.nth(2) == COMMA || p.nth(2) == COLON || p.nth(2) == EQ) | 124 | && (p.nth(2) == T![>] || p.nth(2) == T![,] || p.nth(2) == T![:] || p.nth(2) == T![=]) |
125 | } | 125 | } |
126 | 126 | ||
127 | // test_err impl_type | 127 | // test_err impl_type |
@@ -130,7 +130,7 @@ fn choose_type_params_over_qpath(p: &Parser) -> bool { | |||
130 | // impl impl NotType {} | 130 | // impl impl NotType {} |
131 | // impl Trait2 for impl NotType {} | 131 | // impl Trait2 for impl NotType {} |
132 | pub(crate) fn impl_type(p: &mut Parser) { | 132 | pub(crate) fn impl_type(p: &mut Parser) { |
133 | if p.at(IMPL_KW) { | 133 | if p.at(T![impl ]) { |
134 | p.error("expected trait or type"); | 134 | p.error("expected trait or type"); |
135 | return; | 135 | return; |
136 | } | 136 | } |
diff --git a/crates/ra_parser/src/grammar/items/use_item.rs b/crates/ra_parser/src/grammar/items/use_item.rs index 908493789..c3a0b4410 100644 --- a/crates/ra_parser/src/grammar/items/use_item.rs +++ b/crates/ra_parser/src/grammar/items/use_item.rs | |||
@@ -1,10 +1,10 @@ | |||
1 | use super::*; | 1 | use super::*; |
2 | 2 | ||
3 | pub(super) fn use_item(p: &mut Parser, m: Marker) { | 3 | pub(super) fn use_item(p: &mut Parser, m: Marker) { |
4 | assert!(p.at(USE_KW)); | 4 | assert!(p.at(T![use])); |
5 | p.bump(); | 5 | p.bump(); |
6 | use_tree(p); | 6 | use_tree(p); |
7 | p.expect(SEMI); | 7 | p.expect(T![;]); |
8 | m.complete(p, USE_ITEM); | 8 | m.complete(p, USE_ITEM); |
9 | } | 9 | } |
10 | 10 | ||
@@ -28,8 +28,8 @@ fn use_tree(p: &mut Parser) { | |||
28 | // use ::*; | 28 | // use ::*; |
29 | // use some::path::{*}; | 29 | // use some::path::{*}; |
30 | // use some::path::{::*}; | 30 | // use some::path::{::*}; |
31 | (STAR, _) => p.bump(), | 31 | (T![*], _) => p.bump(), |
32 | (COLONCOLON, STAR) => { | 32 | (T![::], T![*]) => { |
33 | // Parse `use ::*;`, which imports all from the crate root in Rust 2015 | 33 | // Parse `use ::*;`, which imports all from the crate root in Rust 2015 |
34 | // This is invalid inside a use_tree_list, (e.g. `use some::path::{::*}`) | 34 | // This is invalid inside a use_tree_list, (e.g. `use some::path::{::*}`) |
35 | // but still parses and errors later: ('crate root in paths can only be used in start position') | 35 | // but still parses and errors later: ('crate root in paths can only be used in start position') |
@@ -47,8 +47,8 @@ fn use_tree(p: &mut Parser) { | |||
47 | // use {path::from::root}; // Rust 2015 | 47 | // use {path::from::root}; // Rust 2015 |
48 | // use ::{some::arbritrary::path}; // Rust 2015 | 48 | // use ::{some::arbritrary::path}; // Rust 2015 |
49 | // use ::{{{crate::export}}}; // Nonsensical but perfectly legal nestnig | 49 | // use ::{{{crate::export}}}; // Nonsensical but perfectly legal nestnig |
50 | (L_CURLY, _) | (COLONCOLON, L_CURLY) => { | 50 | (T!['{'], _) | (T![::], T!['{']) => { |
51 | if p.at(COLONCOLON) { | 51 | if p.at(T![::]) { |
52 | p.bump(); | 52 | p.bump(); |
53 | } | 53 | } |
54 | use_tree_list(p); | 54 | use_tree_list(p); |
@@ -68,7 +68,7 @@ fn use_tree(p: &mut Parser) { | |||
68 | _ if paths::is_path_start(p) => { | 68 | _ if paths::is_path_start(p) => { |
69 | paths::use_path(p); | 69 | paths::use_path(p); |
70 | match p.current() { | 70 | match p.current() { |
71 | AS_KW => { | 71 | T![as] => { |
72 | // test use_alias | 72 | // test use_alias |
73 | // use some::path as some_name; | 73 | // use some::path as some_name; |
74 | // use some::{ | 74 | // use some::{ |
@@ -80,16 +80,16 @@ fn use_tree(p: &mut Parser) { | |||
80 | // use Trait as _; | 80 | // use Trait as _; |
81 | opt_alias(p); | 81 | opt_alias(p); |
82 | } | 82 | } |
83 | COLONCOLON => { | 83 | T![::] => { |
84 | p.bump(); | 84 | p.bump(); |
85 | match p.current() { | 85 | match p.current() { |
86 | STAR => { | 86 | T![*] => { |
87 | p.bump(); | 87 | p.bump(); |
88 | } | 88 | } |
89 | // test use_tree_list_after_path | 89 | // test use_tree_list_after_path |
90 | // use crate::{Item}; | 90 | // use crate::{Item}; |
91 | // use self::{Item}; | 91 | // use self::{Item}; |
92 | L_CURLY => use_tree_list(p), | 92 | T!['{'] => use_tree_list(p), |
93 | _ => { | 93 | _ => { |
94 | // is this unreachable? | 94 | // is this unreachable? |
95 | p.error("expected `{` or `*`"); | 95 | p.error("expected `{` or `*`"); |
@@ -109,15 +109,15 @@ fn use_tree(p: &mut Parser) { | |||
109 | } | 109 | } |
110 | 110 | ||
111 | pub(crate) fn use_tree_list(p: &mut Parser) { | 111 | pub(crate) fn use_tree_list(p: &mut Parser) { |
112 | assert!(p.at(L_CURLY)); | 112 | assert!(p.at(T!['{'])); |
113 | let m = p.start(); | 113 | let m = p.start(); |
114 | p.bump(); | 114 | p.bump(); |
115 | while !p.at(EOF) && !p.at(R_CURLY) { | 115 | while !p.at(EOF) && !p.at(T!['}']) { |
116 | use_tree(p); | 116 | use_tree(p); |
117 | if !p.at(R_CURLY) { | 117 | if !p.at(T!['}']) { |
118 | p.expect(COMMA); | 118 | p.expect(T![,]); |
119 | } | 119 | } |
120 | } | 120 | } |
121 | p.expect(R_CURLY); | 121 | p.expect(T!['}']); |
122 | m.complete(p, USE_TREE_LIST); | 122 | m.complete(p, USE_TREE_LIST); |
123 | } | 123 | } |
diff --git a/crates/ra_parser/src/grammar/params.rs b/crates/ra_parser/src/grammar/params.rs index 3d3bd4cc1..723b56343 100644 --- a/crates/ra_parser/src/grammar/params.rs +++ b/crates/ra_parser/src/grammar/params.rs | |||
@@ -36,27 +36,27 @@ impl Flavor { | |||
36 | } | 36 | } |
37 | 37 | ||
38 | fn list_(p: &mut Parser, flavor: Flavor) { | 38 | fn list_(p: &mut Parser, flavor: Flavor) { |
39 | let (bra, ket) = if flavor.type_required() { (L_PAREN, R_PAREN) } else { (PIPE, PIPE) }; | 39 | let (bra, ket) = if flavor.type_required() { (T!['('], T![')']) } else { (T![|], T![|]) }; |
40 | assert!(p.at(bra)); | 40 | assert!(p.at(bra)); |
41 | let m = p.start(); | 41 | let m = p.start(); |
42 | p.bump(); | 42 | p.bump(); |
43 | if flavor.type_required() { | 43 | if flavor.type_required() { |
44 | opt_self_param(p); | 44 | opt_self_param(p); |
45 | } | 45 | } |
46 | while !p.at(EOF) && !p.at(ket) && !(flavor.type_required() && p.at(DOTDOTDOT)) { | 46 | while !p.at(EOF) && !p.at(ket) && !(flavor.type_required() && p.at(T![...])) { |
47 | if !p.at_ts(VALUE_PARAMETER_FIRST) { | 47 | if !p.at_ts(VALUE_PARAMETER_FIRST) { |
48 | p.error("expected value parameter"); | 48 | p.error("expected value parameter"); |
49 | break; | 49 | break; |
50 | } | 50 | } |
51 | value_parameter(p, flavor); | 51 | value_parameter(p, flavor); |
52 | if !p.at(ket) { | 52 | if !p.at(ket) { |
53 | p.expect(COMMA); | 53 | p.expect(T![,]); |
54 | } | 54 | } |
55 | } | 55 | } |
56 | // test param_list_vararg | 56 | // test param_list_vararg |
57 | // extern "C" { fn printf(format: *const i8, ...) -> i32; } | 57 | // extern "C" { fn printf(format: *const i8, ...) -> i32; } |
58 | if flavor.type_required() { | 58 | if flavor.type_required() { |
59 | p.eat(DOTDOTDOT); | 59 | p.eat(T![...]); |
60 | } | 60 | } |
61 | p.expect(ket); | 61 | p.expect(ket); |
62 | m.complete(p, PARAM_LIST); | 62 | m.complete(p, PARAM_LIST); |
@@ -69,7 +69,7 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) { | |||
69 | match flavor { | 69 | match flavor { |
70 | Flavor::OptionalType | Flavor::Normal => { | 70 | Flavor::OptionalType | Flavor::Normal => { |
71 | patterns::pattern(p); | 71 | patterns::pattern(p); |
72 | if p.at(COLON) || flavor.type_required() { | 72 | if p.at(T![:]) || flavor.type_required() { |
73 | types::ascription(p) | 73 | types::ascription(p) |
74 | } | 74 | } |
75 | } | 75 | } |
@@ -85,10 +85,10 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) { | |||
85 | // trait Foo { | 85 | // trait Foo { |
86 | // fn bar(_: u64, mut x: i32); | 86 | // fn bar(_: u64, mut x: i32); |
87 | // } | 87 | // } |
88 | if (la0 == IDENT || la0 == UNDERSCORE) && la1 == COLON | 88 | if (la0 == IDENT || la0 == T![_]) && la1 == T![:] |
89 | || la0 == MUT_KW && la1 == IDENT && la2 == COLON | 89 | || la0 == T![mut] && la1 == IDENT && la2 == T![:] |
90 | || la0 == AMP && la1 == IDENT && la2 == COLON | 90 | || la0 == T![&] && la1 == IDENT && la2 == T![:] |
91 | || la0 == AMP && la1 == MUT_KW && la2 == IDENT && la3 == COLON | 91 | || la0 == T![&] && la1 == T![mut] && la2 == IDENT && la3 == T![:] |
92 | { | 92 | { |
93 | patterns::pattern(p); | 93 | patterns::pattern(p); |
94 | types::ascription(p); | 94 | types::ascription(p); |
@@ -110,16 +110,16 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) { | |||
110 | // } | 110 | // } |
111 | fn opt_self_param(p: &mut Parser) { | 111 | fn opt_self_param(p: &mut Parser) { |
112 | let m; | 112 | let m; |
113 | if p.at(SELF_KW) || p.at(MUT_KW) && p.nth(1) == SELF_KW { | 113 | if p.at(T![self]) || p.at(T![mut]) && p.nth(1) == T![self] { |
114 | m = p.start(); | 114 | m = p.start(); |
115 | p.eat(MUT_KW); | 115 | p.eat(T![mut]); |
116 | p.eat(SELF_KW); | 116 | p.eat(T![self]); |
117 | // test arb_self_types | 117 | // test arb_self_types |
118 | // impl S { | 118 | // impl S { |
119 | // fn a(self: &Self) {} | 119 | // fn a(self: &Self) {} |
120 | // fn b(mut self: Box<Self>) {} | 120 | // fn b(mut self: Box<Self>) {} |
121 | // } | 121 | // } |
122 | if p.at(COLON) { | 122 | if p.at(T![:]) { |
123 | types::ascription(p); | 123 | types::ascription(p); |
124 | } | 124 | } |
125 | } else { | 125 | } else { |
@@ -127,10 +127,10 @@ fn opt_self_param(p: &mut Parser) { | |||
127 | let la2 = p.nth(2); | 127 | let la2 = p.nth(2); |
128 | let la3 = p.nth(3); | 128 | let la3 = p.nth(3); |
129 | let n_toks = match (p.current(), la1, la2, la3) { | 129 | let n_toks = match (p.current(), la1, la2, la3) { |
130 | (AMP, SELF_KW, _, _) => 2, | 130 | (T![&], T![self], _, _) => 2, |
131 | (AMP, MUT_KW, SELF_KW, _) => 3, | 131 | (T![&], T![mut], T![self], _) => 3, |
132 | (AMP, LIFETIME, SELF_KW, _) => 3, | 132 | (T![&], LIFETIME, T![self], _) => 3, |
133 | (AMP, LIFETIME, MUT_KW, SELF_KW) => 4, | 133 | (T![&], LIFETIME, T![mut], T![self]) => 4, |
134 | _ => return, | 134 | _ => return, |
135 | }; | 135 | }; |
136 | m = p.start(); | 136 | m = p.start(); |
@@ -139,7 +139,7 @@ fn opt_self_param(p: &mut Parser) { | |||
139 | } | 139 | } |
140 | } | 140 | } |
141 | m.complete(p, SELF_PARAM); | 141 | m.complete(p, SELF_PARAM); |
142 | if !p.at(R_PAREN) { | 142 | if !p.at(T![')']) { |
143 | p.expect(COMMA); | 143 | p.expect(T![,]); |
144 | } | 144 | } |
145 | } | 145 | } |
diff --git a/crates/ra_parser/src/grammar/paths.rs b/crates/ra_parser/src/grammar/paths.rs index 33a11886c..3537b0da1 100644 --- a/crates/ra_parser/src/grammar/paths.rs +++ b/crates/ra_parser/src/grammar/paths.rs | |||
@@ -5,7 +5,7 @@ pub(super) const PATH_FIRST: TokenSet = | |||
5 | 5 | ||
6 | pub(super) fn is_path_start(p: &Parser) -> bool { | 6 | pub(super) fn is_path_start(p: &Parser) -> bool { |
7 | match p.current() { | 7 | match p.current() { |
8 | IDENT | SELF_KW | SUPER_KW | CRATE_KW | COLONCOLON => true, | 8 | IDENT | T![self] | T![super] | T![crate] | T![::] => true, |
9 | _ => false, | 9 | _ => false, |
10 | } | 10 | } |
11 | } | 11 | } |
@@ -35,10 +35,10 @@ fn path(p: &mut Parser, mode: Mode) { | |||
35 | let mut qual = path.complete(p, PATH); | 35 | let mut qual = path.complete(p, PATH); |
36 | loop { | 36 | loop { |
37 | let use_tree = match p.nth(1) { | 37 | let use_tree = match p.nth(1) { |
38 | STAR | L_CURLY => true, | 38 | T![*] | T!['{'] => true, |
39 | _ => false, | 39 | _ => false, |
40 | }; | 40 | }; |
41 | if p.at(COLONCOLON) && !use_tree { | 41 | if p.at(T![::]) && !use_tree { |
42 | let path = qual.precede(p); | 42 | let path = qual.precede(p); |
43 | p.bump(); | 43 | p.bump(); |
44 | path_segment(p, mode, false); | 44 | path_segment(p, mode, false); |
@@ -55,19 +55,19 @@ fn path_segment(p: &mut Parser, mode: Mode, first: bool) { | |||
55 | // test qual_paths | 55 | // test qual_paths |
56 | // type X = <A as B>::Output; | 56 | // type X = <A as B>::Output; |
57 | // fn foo() { <usize as Default>::default(); } | 57 | // fn foo() { <usize as Default>::default(); } |
58 | if first && p.eat(L_ANGLE) { | 58 | if first && p.eat(T![<]) { |
59 | types::type_(p); | 59 | types::type_(p); |
60 | if p.eat(AS_KW) { | 60 | if p.eat(T![as]) { |
61 | if is_path_start(p) { | 61 | if is_path_start(p) { |
62 | types::path_type(p); | 62 | types::path_type(p); |
63 | } else { | 63 | } else { |
64 | p.error("expected a trait"); | 64 | p.error("expected a trait"); |
65 | } | 65 | } |
66 | } | 66 | } |
67 | p.expect(R_ANGLE); | 67 | p.expect(T![>]); |
68 | } else { | 68 | } else { |
69 | if first { | 69 | if first { |
70 | p.eat(COLONCOLON); | 70 | p.eat(T![::]); |
71 | } | 71 | } |
72 | match p.current() { | 72 | match p.current() { |
73 | IDENT => { | 73 | IDENT => { |
@@ -76,7 +76,7 @@ fn path_segment(p: &mut Parser, mode: Mode, first: bool) { | |||
76 | } | 76 | } |
77 | // test crate_path | 77 | // test crate_path |
78 | // use crate::foo; | 78 | // use crate::foo; |
79 | SELF_KW | SUPER_KW | CRATE_KW => p.bump(), | 79 | T![self] | T![super] | T![crate] => p.bump(), |
80 | _ => { | 80 | _ => { |
81 | p.err_recover("expected identifier", items::ITEM_RECOVERY_SET); | 81 | p.err_recover("expected identifier", items::ITEM_RECOVERY_SET); |
82 | } | 82 | } |
@@ -91,7 +91,7 @@ fn opt_path_type_args(p: &mut Parser, mode: Mode) { | |||
91 | Mode::Type => { | 91 | Mode::Type => { |
92 | // test path_fn_trait_args | 92 | // test path_fn_trait_args |
93 | // type F = Box<Fn(x: i32) -> ()>; | 93 | // type F = Box<Fn(x: i32) -> ()>; |
94 | if p.at(L_PAREN) { | 94 | if p.at(T!['(']) { |
95 | params::param_list_opt_patterns(p); | 95 | params::param_list_opt_patterns(p); |
96 | opt_fn_ret_type(p); | 96 | opt_fn_ret_type(p); |
97 | } else { | 97 | } else { |
diff --git a/crates/ra_parser/src/grammar/patterns.rs b/crates/ra_parser/src/grammar/patterns.rs index 12dd22bde..16ae9da63 100644 --- a/crates/ra_parser/src/grammar/patterns.rs +++ b/crates/ra_parser/src/grammar/patterns.rs | |||
@@ -16,10 +16,10 @@ pub(super) fn pattern_list(p: &mut Parser) { | |||
16 | /// Parses a pattern list separated by pipes `|` | 16 | /// Parses a pattern list separated by pipes `|` |
17 | /// using the given `recovery_set` | 17 | /// using the given `recovery_set` |
18 | pub(super) fn pattern_list_r(p: &mut Parser, recovery_set: TokenSet) { | 18 | pub(super) fn pattern_list_r(p: &mut Parser, recovery_set: TokenSet) { |
19 | p.eat(PIPE); | 19 | p.eat(T![|]); |
20 | pattern_r(p, recovery_set); | 20 | pattern_r(p, recovery_set); |
21 | 21 | ||
22 | while p.eat(PIPE) { | 22 | while p.eat(T![|]) { |
23 | pattern_r(p, recovery_set); | 23 | pattern_r(p, recovery_set); |
24 | } | 24 | } |
25 | } | 25 | } |
@@ -34,7 +34,7 @@ pub(super) fn pattern_r(p: &mut Parser, recovery_set: TokenSet) { | |||
34 | // 200 .. 301=> (), | 34 | // 200 .. 301=> (), |
35 | // } | 35 | // } |
36 | // } | 36 | // } |
37 | if p.at(DOTDOTDOT) || p.at(DOTDOTEQ) || p.at(DOTDOT) { | 37 | if p.at(T![...]) || p.at(T![..=]) || p.at(T![..]) { |
38 | let m = lhs.precede(p); | 38 | let m = lhs.precede(p); |
39 | p.bump(); | 39 | p.bump(); |
40 | atom_pat(p, recovery_set); | 40 | atom_pat(p, recovery_set); |
@@ -44,7 +44,7 @@ pub(super) fn pattern_r(p: &mut Parser, recovery_set: TokenSet) { | |||
44 | // fn main() { | 44 | // fn main() { |
45 | // let m!(x) = 0; | 45 | // let m!(x) = 0; |
46 | // } | 46 | // } |
47 | else if lhs.kind() == PATH_PAT && p.at(EXCL) { | 47 | else if lhs.kind() == PATH_PAT && p.at(T![!]) { |
48 | let m = lhs.precede(p); | 48 | let m = lhs.precede(p); |
49 | items::macro_call_after_excl(p); | 49 | items::macro_call_after_excl(p); |
50 | m.complete(p, MACRO_CALL); | 50 | m.complete(p, MACRO_CALL); |
@@ -58,9 +58,9 @@ const PAT_RECOVERY_SET: TokenSet = | |||
58 | fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> { | 58 | fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> { |
59 | let la0 = p.nth(0); | 59 | let la0 = p.nth(0); |
60 | let la1 = p.nth(1); | 60 | let la1 = p.nth(1); |
61 | if la0 == REF_KW | 61 | if la0 == T![ref] |
62 | || la0 == MUT_KW | 62 | || la0 == T![mut] |
63 | || (la0 == IDENT && !(la1 == COLONCOLON || la1 == L_PAREN || la1 == L_CURLY || la1 == EXCL)) | 63 | || (la0 == IDENT && !(la1 == T![::] || la1 == T!['('] || la1 == T!['{'] || la1 == T![!])) |
64 | { | 64 | { |
65 | return Some(bind_pat(p, true)); | 65 | return Some(bind_pat(p, true)); |
66 | } | 66 | } |
@@ -73,10 +73,10 @@ fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> { | |||
73 | } | 73 | } |
74 | 74 | ||
75 | let m = match la0 { | 75 | let m = match la0 { |
76 | UNDERSCORE => placeholder_pat(p), | 76 | T![_] => placeholder_pat(p), |
77 | AMP => ref_pat(p), | 77 | T![&] => ref_pat(p), |
78 | L_PAREN => tuple_pat(p), | 78 | T!['('] => tuple_pat(p), |
79 | L_BRACK => slice_pat(p), | 79 | T!['['] => slice_pat(p), |
80 | _ => { | 80 | _ => { |
81 | p.err_recover("expected pattern", recovery_set); | 81 | p.err_recover("expected pattern", recovery_set); |
82 | return None; | 82 | return None; |
@@ -86,7 +86,7 @@ fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> { | |||
86 | } | 86 | } |
87 | 87 | ||
88 | fn is_literal_pat_start(p: &mut Parser) -> bool { | 88 | fn is_literal_pat_start(p: &mut Parser) -> bool { |
89 | p.at(MINUS) && (p.nth(1) == INT_NUMBER || p.nth(1) == FLOAT_NUMBER) | 89 | p.at(T![-]) && (p.nth(1) == INT_NUMBER || p.nth(1) == FLOAT_NUMBER) |
90 | || p.at_ts(expressions::LITERAL_FIRST) | 90 | || p.at_ts(expressions::LITERAL_FIRST) |
91 | } | 91 | } |
92 | 92 | ||
@@ -102,7 +102,7 @@ fn is_literal_pat_start(p: &mut Parser) -> bool { | |||
102 | fn literal_pat(p: &mut Parser) -> CompletedMarker { | 102 | fn literal_pat(p: &mut Parser) -> CompletedMarker { |
103 | assert!(is_literal_pat_start(p)); | 103 | assert!(is_literal_pat_start(p)); |
104 | let m = p.start(); | 104 | let m = p.start(); |
105 | if p.at(MINUS) { | 105 | if p.at(T![-]) { |
106 | p.bump(); | 106 | p.bump(); |
107 | } | 107 | } |
108 | expressions::literal(p); | 108 | expressions::literal(p); |
@@ -121,11 +121,11 @@ fn path_pat(p: &mut Parser) -> CompletedMarker { | |||
121 | let m = p.start(); | 121 | let m = p.start(); |
122 | paths::expr_path(p); | 122 | paths::expr_path(p); |
123 | let kind = match p.current() { | 123 | let kind = match p.current() { |
124 | L_PAREN => { | 124 | T!['('] => { |
125 | tuple_pat_fields(p); | 125 | tuple_pat_fields(p); |
126 | TUPLE_STRUCT_PAT | 126 | TUPLE_STRUCT_PAT |
127 | } | 127 | } |
128 | L_CURLY => { | 128 | T!['{'] => { |
129 | field_pat_list(p); | 129 | field_pat_list(p); |
130 | STRUCT_PAT | 130 | STRUCT_PAT |
131 | } | 131 | } |
@@ -142,10 +142,10 @@ fn path_pat(p: &mut Parser) -> CompletedMarker { | |||
142 | // let S(_, .. , x) = (); | 142 | // let S(_, .. , x) = (); |
143 | // } | 143 | // } |
144 | fn tuple_pat_fields(p: &mut Parser) { | 144 | fn tuple_pat_fields(p: &mut Parser) { |
145 | assert!(p.at(L_PAREN)); | 145 | assert!(p.at(T!['('])); |
146 | p.bump(); | 146 | p.bump(); |
147 | pat_list(p, R_PAREN); | 147 | pat_list(p, T![')']); |
148 | p.expect(R_PAREN); | 148 | p.expect(T![')']); |
149 | } | 149 | } |
150 | 150 | ||
151 | // test field_pat_list | 151 | // test field_pat_list |
@@ -156,29 +156,29 @@ fn tuple_pat_fields(p: &mut Parser) { | |||
156 | // let S { h: _, } = (); | 156 | // let S { h: _, } = (); |
157 | // } | 157 | // } |
158 | fn field_pat_list(p: &mut Parser) { | 158 | fn field_pat_list(p: &mut Parser) { |
159 | assert!(p.at(L_CURLY)); | 159 | assert!(p.at(T!['{'])); |
160 | let m = p.start(); | 160 | let m = p.start(); |
161 | p.bump(); | 161 | p.bump(); |
162 | while !p.at(EOF) && !p.at(R_CURLY) { | 162 | while !p.at(EOF) && !p.at(T!['}']) { |
163 | match p.current() { | 163 | match p.current() { |
164 | DOTDOT => p.bump(), | 164 | T![..] => p.bump(), |
165 | IDENT if p.nth(1) == COLON => field_pat(p), | 165 | IDENT if p.nth(1) == T![:] => field_pat(p), |
166 | L_CURLY => error_block(p, "expected ident"), | 166 | T!['{'] => error_block(p, "expected ident"), |
167 | _ => { | 167 | _ => { |
168 | bind_pat(p, false); | 168 | bind_pat(p, false); |
169 | } | 169 | } |
170 | } | 170 | } |
171 | if !p.at(R_CURLY) { | 171 | if !p.at(T!['}']) { |
172 | p.expect(COMMA); | 172 | p.expect(T![,]); |
173 | } | 173 | } |
174 | } | 174 | } |
175 | p.expect(R_CURLY); | 175 | p.expect(T!['}']); |
176 | m.complete(p, FIELD_PAT_LIST); | 176 | m.complete(p, FIELD_PAT_LIST); |
177 | } | 177 | } |
178 | 178 | ||
179 | fn field_pat(p: &mut Parser) { | 179 | fn field_pat(p: &mut Parser) { |
180 | assert!(p.at(IDENT)); | 180 | assert!(p.at(IDENT)); |
181 | assert!(p.nth(1) == COLON); | 181 | assert!(p.nth(1) == T![:]); |
182 | 182 | ||
183 | let m = p.start(); | 183 | let m = p.start(); |
184 | name(p); | 184 | name(p); |
@@ -190,7 +190,7 @@ fn field_pat(p: &mut Parser) { | |||
190 | // test placeholder_pat | 190 | // test placeholder_pat |
191 | // fn main() { let _ = (); } | 191 | // fn main() { let _ = (); } |
192 | fn placeholder_pat(p: &mut Parser) -> CompletedMarker { | 192 | fn placeholder_pat(p: &mut Parser) -> CompletedMarker { |
193 | assert!(p.at(UNDERSCORE)); | 193 | assert!(p.at(T![_])); |
194 | let m = p.start(); | 194 | let m = p.start(); |
195 | p.bump(); | 195 | p.bump(); |
196 | m.complete(p, PLACEHOLDER_PAT) | 196 | m.complete(p, PLACEHOLDER_PAT) |
@@ -202,10 +202,10 @@ fn placeholder_pat(p: &mut Parser) -> CompletedMarker { | |||
202 | // let &mut b = (); | 202 | // let &mut b = (); |
203 | // } | 203 | // } |
204 | fn ref_pat(p: &mut Parser) -> CompletedMarker { | 204 | fn ref_pat(p: &mut Parser) -> CompletedMarker { |
205 | assert!(p.at(AMP)); | 205 | assert!(p.at(T![&])); |
206 | let m = p.start(); | 206 | let m = p.start(); |
207 | p.bump(); | 207 | p.bump(); |
208 | p.eat(MUT_KW); | 208 | p.eat(T![mut]); |
209 | pattern(p); | 209 | pattern(p); |
210 | m.complete(p, REF_PAT) | 210 | m.complete(p, REF_PAT) |
211 | } | 211 | } |
@@ -215,7 +215,7 @@ fn ref_pat(p: &mut Parser) -> CompletedMarker { | |||
215 | // let (a, b, ..) = (); | 215 | // let (a, b, ..) = (); |
216 | // } | 216 | // } |
217 | fn tuple_pat(p: &mut Parser) -> CompletedMarker { | 217 | fn tuple_pat(p: &mut Parser) -> CompletedMarker { |
218 | assert!(p.at(L_PAREN)); | 218 | assert!(p.at(T!['('])); |
219 | let m = p.start(); | 219 | let m = p.start(); |
220 | tuple_pat_fields(p); | 220 | tuple_pat_fields(p); |
221 | m.complete(p, TUPLE_PAT) | 221 | m.complete(p, TUPLE_PAT) |
@@ -226,18 +226,18 @@ fn tuple_pat(p: &mut Parser) -> CompletedMarker { | |||
226 | // let [a, b, ..] = []; | 226 | // let [a, b, ..] = []; |
227 | // } | 227 | // } |
228 | fn slice_pat(p: &mut Parser) -> CompletedMarker { | 228 | fn slice_pat(p: &mut Parser) -> CompletedMarker { |
229 | assert!(p.at(L_BRACK)); | 229 | assert!(p.at(T!['['])); |
230 | let m = p.start(); | 230 | let m = p.start(); |
231 | p.bump(); | 231 | p.bump(); |
232 | pat_list(p, R_BRACK); | 232 | pat_list(p, T![']']); |
233 | p.expect(R_BRACK); | 233 | p.expect(T![']']); |
234 | m.complete(p, SLICE_PAT) | 234 | m.complete(p, SLICE_PAT) |
235 | } | 235 | } |
236 | 236 | ||
237 | fn pat_list(p: &mut Parser, ket: SyntaxKind) { | 237 | fn pat_list(p: &mut Parser, ket: SyntaxKind) { |
238 | while !p.at(EOF) && !p.at(ket) { | 238 | while !p.at(EOF) && !p.at(ket) { |
239 | match p.current() { | 239 | match p.current() { |
240 | DOTDOT => p.bump(), | 240 | T![..] => p.bump(), |
241 | _ => { | 241 | _ => { |
242 | if !p.at_ts(PATTERN_FIRST) { | 242 | if !p.at_ts(PATTERN_FIRST) { |
243 | p.error("expected a pattern"); | 243 | p.error("expected a pattern"); |
@@ -247,7 +247,7 @@ fn pat_list(p: &mut Parser, ket: SyntaxKind) { | |||
247 | } | 247 | } |
248 | } | 248 | } |
249 | if !p.at(ket) { | 249 | if !p.at(ket) { |
250 | p.expect(COMMA); | 250 | p.expect(T![,]); |
251 | } | 251 | } |
252 | } | 252 | } |
253 | } | 253 | } |
@@ -263,10 +263,10 @@ fn pat_list(p: &mut Parser, ket: SyntaxKind) { | |||
263 | // } | 263 | // } |
264 | fn bind_pat(p: &mut Parser, with_at: bool) -> CompletedMarker { | 264 | fn bind_pat(p: &mut Parser, with_at: bool) -> CompletedMarker { |
265 | let m = p.start(); | 265 | let m = p.start(); |
266 | p.eat(REF_KW); | 266 | p.eat(T![ref]); |
267 | p.eat(MUT_KW); | 267 | p.eat(T![mut]); |
268 | name(p); | 268 | name(p); |
269 | if with_at && p.eat(AT) { | 269 | if with_at && p.eat(T![@]) { |
270 | pattern(p); | 270 | pattern(p); |
271 | } | 271 | } |
272 | m.complete(p, BIND_PAT) | 272 | m.complete(p, BIND_PAT) |
diff --git a/crates/ra_parser/src/grammar/type_args.rs b/crates/ra_parser/src/grammar/type_args.rs index 684976b99..f391b63db 100644 --- a/crates/ra_parser/src/grammar/type_args.rs +++ b/crates/ra_parser/src/grammar/type_args.rs | |||
@@ -3,26 +3,26 @@ use super::*; | |||
3 | pub(super) fn opt_type_arg_list(p: &mut Parser, colon_colon_required: bool) { | 3 | pub(super) fn opt_type_arg_list(p: &mut Parser, colon_colon_required: bool) { |
4 | let m; | 4 | let m; |
5 | match (colon_colon_required, p.nth(0), p.nth(1)) { | 5 | match (colon_colon_required, p.nth(0), p.nth(1)) { |
6 | (_, COLONCOLON, L_ANGLE) => { | 6 | (_, T![::], T![<]) => { |
7 | m = p.start(); | 7 | m = p.start(); |
8 | p.bump(); | 8 | p.bump(); |
9 | p.bump(); | 9 | p.bump(); |
10 | } | 10 | } |
11 | (false, L_ANGLE, EQ) => return, | 11 | (false, T![<], T![=]) => return, |
12 | (false, L_ANGLE, _) => { | 12 | (false, T![<], _) => { |
13 | m = p.start(); | 13 | m = p.start(); |
14 | p.bump(); | 14 | p.bump(); |
15 | } | 15 | } |
16 | _ => return, | 16 | _ => return, |
17 | }; | 17 | }; |
18 | 18 | ||
19 | while !p.at(EOF) && !p.at(R_ANGLE) { | 19 | while !p.at(EOF) && !p.at(T![>]) { |
20 | type_arg(p); | 20 | type_arg(p); |
21 | if !p.at(R_ANGLE) && !p.expect(COMMA) { | 21 | if !p.at(T![>]) && !p.expect(T![,]) { |
22 | break; | 22 | break; |
23 | } | 23 | } |
24 | } | 24 | } |
25 | p.expect(R_ANGLE); | 25 | p.expect(T![>]); |
26 | m.complete(p, TYPE_ARG_LIST); | 26 | m.complete(p, TYPE_ARG_LIST); |
27 | } | 27 | } |
28 | 28 | ||
@@ -35,7 +35,7 @@ fn type_arg(p: &mut Parser) { | |||
35 | p.bump(); | 35 | p.bump(); |
36 | m.complete(p, LIFETIME_ARG); | 36 | m.complete(p, LIFETIME_ARG); |
37 | } | 37 | } |
38 | IDENT if p.nth(1) == EQ => { | 38 | IDENT if p.nth(1) == T![=] => { |
39 | name_ref(p); | 39 | name_ref(p); |
40 | p.bump(); | 40 | p.bump(); |
41 | types::type_(p); | 41 | types::type_(p); |
diff --git a/crates/ra_parser/src/grammar/type_params.rs b/crates/ra_parser/src/grammar/type_params.rs index 07d9b0792..4bbfed780 100644 --- a/crates/ra_parser/src/grammar/type_params.rs +++ b/crates/ra_parser/src/grammar/type_params.rs | |||
@@ -1,18 +1,18 @@ | |||
1 | use super::*; | 1 | use super::*; |
2 | 2 | ||
3 | pub(super) fn opt_type_param_list(p: &mut Parser) { | 3 | pub(super) fn opt_type_param_list(p: &mut Parser) { |
4 | if !p.at(L_ANGLE) { | 4 | if !p.at(T![<]) { |
5 | return; | 5 | return; |
6 | } | 6 | } |
7 | type_param_list(p); | 7 | type_param_list(p); |
8 | } | 8 | } |
9 | 9 | ||
10 | fn type_param_list(p: &mut Parser) { | 10 | fn type_param_list(p: &mut Parser) { |
11 | assert!(p.at(L_ANGLE)); | 11 | assert!(p.at(T![<])); |
12 | let m = p.start(); | 12 | let m = p.start(); |
13 | p.bump(); | 13 | p.bump(); |
14 | 14 | ||
15 | while !p.at(EOF) && !p.at(R_ANGLE) { | 15 | while !p.at(EOF) && !p.at(T![>]) { |
16 | let m = p.start(); | 16 | let m = p.start(); |
17 | 17 | ||
18 | // test generic_lifetime_type_attribute | 18 | // test generic_lifetime_type_attribute |
@@ -28,18 +28,18 @@ fn type_param_list(p: &mut Parser) { | |||
28 | p.err_and_bump("expected type parameter") | 28 | p.err_and_bump("expected type parameter") |
29 | } | 29 | } |
30 | } | 30 | } |
31 | if !p.at(R_ANGLE) && !p.expect(COMMA) { | 31 | if !p.at(T![>]) && !p.expect(T![,]) { |
32 | break; | 32 | break; |
33 | } | 33 | } |
34 | } | 34 | } |
35 | p.expect(R_ANGLE); | 35 | p.expect(T![>]); |
36 | m.complete(p, TYPE_PARAM_LIST); | 36 | m.complete(p, TYPE_PARAM_LIST); |
37 | } | 37 | } |
38 | 38 | ||
39 | fn lifetime_param(p: &mut Parser, m: Marker) { | 39 | fn lifetime_param(p: &mut Parser, m: Marker) { |
40 | assert!(p.at(LIFETIME)); | 40 | assert!(p.at(LIFETIME)); |
41 | p.bump(); | 41 | p.bump(); |
42 | if p.at(COLON) { | 42 | if p.at(T![:]) { |
43 | lifetime_bounds(p); | 43 | lifetime_bounds(p); |
44 | } | 44 | } |
45 | m.complete(p, LIFETIME_PARAM); | 45 | m.complete(p, LIFETIME_PARAM); |
@@ -48,12 +48,12 @@ fn lifetime_param(p: &mut Parser, m: Marker) { | |||
48 | fn type_param(p: &mut Parser, m: Marker) { | 48 | fn type_param(p: &mut Parser, m: Marker) { |
49 | assert!(p.at(IDENT)); | 49 | assert!(p.at(IDENT)); |
50 | name(p); | 50 | name(p); |
51 | if p.at(COLON) { | 51 | if p.at(T![:]) { |
52 | bounds(p); | 52 | bounds(p); |
53 | } | 53 | } |
54 | // test type_param_default | 54 | // test type_param_default |
55 | // struct S<T = i32>; | 55 | // struct S<T = i32>; |
56 | if p.at(EQ) { | 56 | if p.at(T![=]) { |
57 | p.bump(); | 57 | p.bump(); |
58 | types::type_(p) | 58 | types::type_(p) |
59 | } | 59 | } |
@@ -63,17 +63,17 @@ fn type_param(p: &mut Parser, m: Marker) { | |||
63 | // test type_param_bounds | 63 | // test type_param_bounds |
64 | // struct S<T: 'a + ?Sized + (Copy)>; | 64 | // struct S<T: 'a + ?Sized + (Copy)>; |
65 | pub(super) fn bounds(p: &mut Parser) { | 65 | pub(super) fn bounds(p: &mut Parser) { |
66 | assert!(p.at(COLON)); | 66 | assert!(p.at(T![:])); |
67 | p.bump(); | 67 | p.bump(); |
68 | bounds_without_colon(p); | 68 | bounds_without_colon(p); |
69 | } | 69 | } |
70 | 70 | ||
71 | fn lifetime_bounds(p: &mut Parser) { | 71 | fn lifetime_bounds(p: &mut Parser) { |
72 | assert!(p.at(COLON)); | 72 | assert!(p.at(T![:])); |
73 | p.bump(); | 73 | p.bump(); |
74 | while p.at(LIFETIME) { | 74 | while p.at(LIFETIME) { |
75 | p.bump(); | 75 | p.bump(); |
76 | if !p.eat(PLUS) { | 76 | if !p.eat(T![+]) { |
77 | break; | 77 | break; |
78 | } | 78 | } |
79 | } | 79 | } |
@@ -81,7 +81,7 @@ fn lifetime_bounds(p: &mut Parser) { | |||
81 | 81 | ||
82 | pub(super) fn bounds_without_colon_m(p: &mut Parser, marker: Marker) -> CompletedMarker { | 82 | pub(super) fn bounds_without_colon_m(p: &mut Parser, marker: Marker) -> CompletedMarker { |
83 | while type_bound(p) { | 83 | while type_bound(p) { |
84 | if !p.eat(PLUS) { | 84 | if !p.eat(T![+]) { |
85 | break; | 85 | break; |
86 | } | 86 | } |
87 | } | 87 | } |
@@ -96,11 +96,11 @@ pub(super) fn bounds_without_colon(p: &mut Parser) { | |||
96 | 96 | ||
97 | fn type_bound(p: &mut Parser) -> bool { | 97 | fn type_bound(p: &mut Parser) -> bool { |
98 | let m = p.start(); | 98 | let m = p.start(); |
99 | let has_paren = p.eat(L_PAREN); | 99 | let has_paren = p.eat(T!['(']); |
100 | p.eat(QUESTION); | 100 | p.eat(T![?]); |
101 | match p.current() { | 101 | match p.current() { |
102 | LIFETIME => p.bump(), | 102 | LIFETIME => p.bump(), |
103 | FOR_KW => types::for_type(p), | 103 | T![for] => types::for_type(p), |
104 | _ if paths::is_path_start(p) => types::path_type_(p, false), | 104 | _ if paths::is_path_start(p) => types::path_type_(p, false), |
105 | _ => { | 105 | _ => { |
106 | m.abandon(p); | 106 | m.abandon(p); |
@@ -108,7 +108,7 @@ fn type_bound(p: &mut Parser) -> bool { | |||
108 | } | 108 | } |
109 | } | 109 | } |
110 | if has_paren { | 110 | if has_paren { |
111 | p.expect(R_PAREN); | 111 | p.expect(T![')']); |
112 | } | 112 | } |
113 | m.complete(p, TYPE_BOUND); | 113 | m.complete(p, TYPE_BOUND); |
114 | 114 | ||
@@ -124,7 +124,7 @@ fn type_bound(p: &mut Parser) -> bool { | |||
124 | // <T as Iterator>::Item: 'a | 124 | // <T as Iterator>::Item: 'a |
125 | // {} | 125 | // {} |
126 | pub(super) fn opt_where_clause(p: &mut Parser) { | 126 | pub(super) fn opt_where_clause(p: &mut Parser) { |
127 | if !p.at(WHERE_KW) { | 127 | if !p.at(T![where]) { |
128 | return; | 128 | return; |
129 | } | 129 | } |
130 | let m = p.start(); | 130 | let m = p.start(); |
@@ -133,7 +133,7 @@ pub(super) fn opt_where_clause(p: &mut Parser) { | |||
133 | while is_where_predicate(p) { | 133 | while is_where_predicate(p) { |
134 | where_predicate(p); | 134 | where_predicate(p); |
135 | 135 | ||
136 | let comma = p.eat(COMMA); | 136 | let comma = p.eat(T![,]); |
137 | 137 | ||
138 | if is_where_clause_end(p) { | 138 | if is_where_clause_end(p) { |
139 | break; | 139 | break; |
@@ -150,13 +150,13 @@ pub(super) fn opt_where_clause(p: &mut Parser) { | |||
150 | fn is_where_predicate(p: &mut Parser) -> bool { | 150 | fn is_where_predicate(p: &mut Parser) -> bool { |
151 | match p.current() { | 151 | match p.current() { |
152 | LIFETIME => true, | 152 | LIFETIME => true, |
153 | IMPL_KW => false, | 153 | T![impl ] => false, |
154 | token => types::TYPE_FIRST.contains(token), | 154 | token => types::TYPE_FIRST.contains(token), |
155 | } | 155 | } |
156 | } | 156 | } |
157 | 157 | ||
158 | fn is_where_clause_end(p: &mut Parser) -> bool { | 158 | fn is_where_clause_end(p: &mut Parser) -> bool { |
159 | p.current() == L_CURLY || p.current() == SEMI || p.current() == EQ | 159 | p.current() == T!['{'] || p.current() == T![;] || p.current() == T![=] |
160 | } | 160 | } |
161 | 161 | ||
162 | fn where_predicate(p: &mut Parser) { | 162 | fn where_predicate(p: &mut Parser) { |
@@ -164,13 +164,13 @@ fn where_predicate(p: &mut Parser) { | |||
164 | match p.current() { | 164 | match p.current() { |
165 | LIFETIME => { | 165 | LIFETIME => { |
166 | p.bump(); | 166 | p.bump(); |
167 | if p.at(COLON) { | 167 | if p.at(T![:]) { |
168 | bounds(p); | 168 | bounds(p); |
169 | } else { | 169 | } else { |
170 | p.error("expected colon"); | 170 | p.error("expected colon"); |
171 | } | 171 | } |
172 | } | 172 | } |
173 | IMPL_KW => { | 173 | T![impl ] => { |
174 | p.error("expected lifetime or type"); | 174 | p.error("expected lifetime or type"); |
175 | } | 175 | } |
176 | _ => { | 176 | _ => { |
@@ -181,7 +181,7 @@ fn where_predicate(p: &mut Parser) { | |||
181 | // { } | 181 | // { } |
182 | types::type_(p); | 182 | types::type_(p); |
183 | 183 | ||
184 | if p.at(COLON) { | 184 | if p.at(T![:]) { |
185 | bounds(p); | 185 | bounds(p); |
186 | } else { | 186 | } else { |
187 | p.error("expected colon"); | 187 | p.error("expected colon"); |
diff --git a/crates/ra_parser/src/grammar/types.rs b/crates/ra_parser/src/grammar/types.rs index 686c80f3c..438e3ab0e 100644 --- a/crates/ra_parser/src/grammar/types.rs +++ b/crates/ra_parser/src/grammar/types.rs | |||
@@ -17,18 +17,18 @@ pub(super) fn type_no_bounds(p: &mut Parser) { | |||
17 | 17 | ||
18 | fn type_with_bounds_cond(p: &mut Parser, allow_bounds: bool) { | 18 | fn type_with_bounds_cond(p: &mut Parser, allow_bounds: bool) { |
19 | match p.current() { | 19 | match p.current() { |
20 | L_PAREN => paren_or_tuple_type(p), | 20 | T!['('] => paren_or_tuple_type(p), |
21 | EXCL => never_type(p), | 21 | T![!] => never_type(p), |
22 | STAR => pointer_type(p), | 22 | T![*] => pointer_type(p), |
23 | L_BRACK => array_or_slice_type(p), | 23 | T!['['] => array_or_slice_type(p), |
24 | AMP => reference_type(p), | 24 | T![&] => reference_type(p), |
25 | UNDERSCORE => placeholder_type(p), | 25 | T![_] => placeholder_type(p), |
26 | FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p), | 26 | T![fn] | T![unsafe] | T![extern] => fn_pointer_type(p), |
27 | FOR_KW => for_type(p), | 27 | T![for] => for_type(p), |
28 | IMPL_KW => impl_trait_type(p), | 28 | T![impl ] => impl_trait_type(p), |
29 | DYN_KW => dyn_trait_type(p), | 29 | T![dyn ] => dyn_trait_type(p), |
30 | // Some path types are not allowed to have bounds (no plus) | 30 | // Some path types are not allowed to have bounds (no plus) |
31 | L_ANGLE => path_type_(p, allow_bounds), | 31 | T![<] => path_type_(p, allow_bounds), |
32 | _ if paths::is_path_start(p) => path_or_macro_type_(p, allow_bounds), | 32 | _ if paths::is_path_start(p) => path_or_macro_type_(p, allow_bounds), |
33 | _ => { | 33 | _ => { |
34 | p.err_recover("expected type", TYPE_RECOVERY_SET); | 34 | p.err_recover("expected type", TYPE_RECOVERY_SET); |
@@ -37,27 +37,27 @@ fn type_with_bounds_cond(p: &mut Parser, allow_bounds: bool) { | |||
37 | } | 37 | } |
38 | 38 | ||
39 | pub(super) fn ascription(p: &mut Parser) { | 39 | pub(super) fn ascription(p: &mut Parser) { |
40 | p.expect(COLON); | 40 | p.expect(T![:]); |
41 | type_(p) | 41 | type_(p) |
42 | } | 42 | } |
43 | 43 | ||
44 | fn paren_or_tuple_type(p: &mut Parser) { | 44 | fn paren_or_tuple_type(p: &mut Parser) { |
45 | assert!(p.at(L_PAREN)); | 45 | assert!(p.at(T!['('])); |
46 | let m = p.start(); | 46 | let m = p.start(); |
47 | p.bump(); | 47 | p.bump(); |
48 | let mut n_types: u32 = 0; | 48 | let mut n_types: u32 = 0; |
49 | let mut trailing_comma: bool = false; | 49 | let mut trailing_comma: bool = false; |
50 | while !p.at(EOF) && !p.at(R_PAREN) { | 50 | while !p.at(EOF) && !p.at(T![')']) { |
51 | n_types += 1; | 51 | n_types += 1; |
52 | type_(p); | 52 | type_(p); |
53 | if p.eat(COMMA) { | 53 | if p.eat(T![,]) { |
54 | trailing_comma = true; | 54 | trailing_comma = true; |
55 | } else { | 55 | } else { |
56 | trailing_comma = false; | 56 | trailing_comma = false; |
57 | break; | 57 | break; |
58 | } | 58 | } |
59 | } | 59 | } |
60 | p.expect(R_PAREN); | 60 | p.expect(T![')']); |
61 | 61 | ||
62 | let kind = if n_types == 1 && !trailing_comma { | 62 | let kind = if n_types == 1 && !trailing_comma { |
63 | // test paren_type | 63 | // test paren_type |
@@ -77,14 +77,14 @@ fn paren_or_tuple_type(p: &mut Parser) { | |||
77 | // test never_type | 77 | // test never_type |
78 | // type Never = !; | 78 | // type Never = !; |
79 | fn never_type(p: &mut Parser) { | 79 | fn never_type(p: &mut Parser) { |
80 | assert!(p.at(EXCL)); | 80 | assert!(p.at(T![!])); |
81 | let m = p.start(); | 81 | let m = p.start(); |
82 | p.bump(); | 82 | p.bump(); |
83 | m.complete(p, NEVER_TYPE); | 83 | m.complete(p, NEVER_TYPE); |
84 | } | 84 | } |
85 | 85 | ||
86 | fn pointer_type(p: &mut Parser) { | 86 | fn pointer_type(p: &mut Parser) { |
87 | assert!(p.at(STAR)); | 87 | assert!(p.at(T![*])); |
88 | let m = p.start(); | 88 | let m = p.start(); |
89 | p.bump(); | 89 | p.bump(); |
90 | 90 | ||
@@ -92,7 +92,7 @@ fn pointer_type(p: &mut Parser) { | |||
92 | // test pointer_type_mut | 92 | // test pointer_type_mut |
93 | // type M = *mut (); | 93 | // type M = *mut (); |
94 | // type C = *mut (); | 94 | // type C = *mut (); |
95 | MUT_KW | CONST_KW => p.bump(), | 95 | T![mut] | T![const] => p.bump(), |
96 | _ => { | 96 | _ => { |
97 | // test_err pointer_type_no_mutability | 97 | // test_err pointer_type_no_mutability |
98 | // type T = *(); | 98 | // type T = *(); |
@@ -108,7 +108,7 @@ fn pointer_type(p: &mut Parser) { | |||
108 | } | 108 | } |
109 | 109 | ||
110 | fn array_or_slice_type(p: &mut Parser) { | 110 | fn array_or_slice_type(p: &mut Parser) { |
111 | assert!(p.at(L_BRACK)); | 111 | assert!(p.at(T!['['])); |
112 | let m = p.start(); | 112 | let m = p.start(); |
113 | p.bump(); | 113 | p.bump(); |
114 | 114 | ||
@@ -116,17 +116,17 @@ fn array_or_slice_type(p: &mut Parser) { | |||
116 | let kind = match p.current() { | 116 | let kind = match p.current() { |
117 | // test slice_type | 117 | // test slice_type |
118 | // type T = [()]; | 118 | // type T = [()]; |
119 | R_BRACK => { | 119 | T![']'] => { |
120 | p.bump(); | 120 | p.bump(); |
121 | SLICE_TYPE | 121 | SLICE_TYPE |
122 | } | 122 | } |
123 | 123 | ||
124 | // test array_type | 124 | // test array_type |
125 | // type T = [(); 92]; | 125 | // type T = [(); 92]; |
126 | SEMI => { | 126 | T![;] => { |
127 | p.bump(); | 127 | p.bump(); |
128 | expressions::expr(p); | 128 | expressions::expr(p); |
129 | p.expect(R_BRACK); | 129 | p.expect(T![']']); |
130 | ARRAY_TYPE | 130 | ARRAY_TYPE |
131 | } | 131 | } |
132 | // test_err array_type_missing_semi | 132 | // test_err array_type_missing_semi |
@@ -144,11 +144,11 @@ fn array_or_slice_type(p: &mut Parser) { | |||
144 | // type B = &'static (); | 144 | // type B = &'static (); |
145 | // type C = &mut (); | 145 | // type C = &mut (); |
146 | fn reference_type(p: &mut Parser) { | 146 | fn reference_type(p: &mut Parser) { |
147 | assert!(p.at(AMP)); | 147 | assert!(p.at(T![&])); |
148 | let m = p.start(); | 148 | let m = p.start(); |
149 | p.bump(); | 149 | p.bump(); |
150 | p.eat(LIFETIME); | 150 | p.eat(LIFETIME); |
151 | p.eat(MUT_KW); | 151 | p.eat(T![mut]); |
152 | type_no_bounds(p); | 152 | type_no_bounds(p); |
153 | m.complete(p, REFERENCE_TYPE); | 153 | m.complete(p, REFERENCE_TYPE); |
154 | } | 154 | } |
@@ -156,7 +156,7 @@ fn reference_type(p: &mut Parser) { | |||
156 | // test placeholder_type | 156 | // test placeholder_type |
157 | // type Placeholder = _; | 157 | // type Placeholder = _; |
158 | fn placeholder_type(p: &mut Parser) { | 158 | fn placeholder_type(p: &mut Parser) { |
159 | assert!(p.at(UNDERSCORE)); | 159 | assert!(p.at(T![_])); |
160 | let m = p.start(); | 160 | let m = p.start(); |
161 | p.bump(); | 161 | p.bump(); |
162 | m.complete(p, PLACEHOLDER_TYPE); | 162 | m.complete(p, PLACEHOLDER_TYPE); |
@@ -169,18 +169,18 @@ fn placeholder_type(p: &mut Parser) { | |||
169 | // type D = extern "C" fn ( u8 , ... ) -> u8; | 169 | // type D = extern "C" fn ( u8 , ... ) -> u8; |
170 | fn fn_pointer_type(p: &mut Parser) { | 170 | fn fn_pointer_type(p: &mut Parser) { |
171 | let m = p.start(); | 171 | let m = p.start(); |
172 | p.eat(UNSAFE_KW); | 172 | p.eat(T![unsafe]); |
173 | if p.at(EXTERN_KW) { | 173 | if p.at(T![extern]) { |
174 | abi(p); | 174 | abi(p); |
175 | } | 175 | } |
176 | // test_err fn_pointer_type_missing_fn | 176 | // test_err fn_pointer_type_missing_fn |
177 | // type F = unsafe (); | 177 | // type F = unsafe (); |
178 | if !p.eat(FN_KW) { | 178 | if !p.eat(T![fn]) { |
179 | m.abandon(p); | 179 | m.abandon(p); |
180 | p.error("expected `fn`"); | 180 | p.error("expected `fn`"); |
181 | return; | 181 | return; |
182 | } | 182 | } |
183 | if p.at(L_PAREN) { | 183 | if p.at(T!['(']) { |
184 | params::param_list_opt_patterns(p); | 184 | params::param_list_opt_patterns(p); |
185 | } else { | 185 | } else { |
186 | p.error("expected parameters") | 186 | p.error("expected parameters") |
@@ -192,9 +192,9 @@ fn fn_pointer_type(p: &mut Parser) { | |||
192 | } | 192 | } |
193 | 193 | ||
194 | pub(super) fn for_binder(p: &mut Parser) { | 194 | pub(super) fn for_binder(p: &mut Parser) { |
195 | assert!(p.at(FOR_KW)); | 195 | assert!(p.at(T![for])); |
196 | p.bump(); | 196 | p.bump(); |
197 | if p.at(L_ANGLE) { | 197 | if p.at(T![<]) { |
198 | type_params::opt_type_param_list(p); | 198 | type_params::opt_type_param_list(p); |
199 | } else { | 199 | } else { |
200 | p.error("expected `<`"); | 200 | p.error("expected `<`"); |
@@ -206,12 +206,12 @@ pub(super) fn for_binder(p: &mut Parser) { | |||
206 | // fn foo<T>(_t: &T) where for<'a> &'a T: Iterator {} | 206 | // fn foo<T>(_t: &T) where for<'a> &'a T: Iterator {} |
207 | // fn bar<T>(_t: &T) where for<'a> &'a mut T: Iterator {} | 207 | // fn bar<T>(_t: &T) where for<'a> &'a mut T: Iterator {} |
208 | pub(super) fn for_type(p: &mut Parser) { | 208 | pub(super) fn for_type(p: &mut Parser) { |
209 | assert!(p.at(FOR_KW)); | 209 | assert!(p.at(T![for])); |
210 | let m = p.start(); | 210 | let m = p.start(); |
211 | for_binder(p); | 211 | for_binder(p); |
212 | match p.current() { | 212 | match p.current() { |
213 | FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p), | 213 | T![fn] | T![unsafe] | T![extern] => fn_pointer_type(p), |
214 | AMP => reference_type(p), | 214 | T![&] => reference_type(p), |
215 | _ if paths::is_path_start(p) => path_type_(p, false), | 215 | _ if paths::is_path_start(p) => path_type_(p, false), |
216 | _ => p.error("expected a path"), | 216 | _ => p.error("expected a path"), |
217 | } | 217 | } |
@@ -221,7 +221,7 @@ pub(super) fn for_type(p: &mut Parser) { | |||
221 | // test impl_trait_type | 221 | // test impl_trait_type |
222 | // type A = impl Iterator<Item=Foo<'a>> + 'a; | 222 | // type A = impl Iterator<Item=Foo<'a>> + 'a; |
223 | fn impl_trait_type(p: &mut Parser) { | 223 | fn impl_trait_type(p: &mut Parser) { |
224 | assert!(p.at(IMPL_KW)); | 224 | assert!(p.at(T![impl ])); |
225 | let m = p.start(); | 225 | let m = p.start(); |
226 | p.bump(); | 226 | p.bump(); |
227 | type_params::bounds_without_colon(p); | 227 | type_params::bounds_without_colon(p); |
@@ -231,7 +231,7 @@ fn impl_trait_type(p: &mut Parser) { | |||
231 | // test dyn_trait_type | 231 | // test dyn_trait_type |
232 | // type A = dyn Iterator<Item=Foo<'a>> + 'a; | 232 | // type A = dyn Iterator<Item=Foo<'a>> + 'a; |
233 | fn dyn_trait_type(p: &mut Parser) { | 233 | fn dyn_trait_type(p: &mut Parser) { |
234 | assert!(p.at(DYN_KW)); | 234 | assert!(p.at(T![dyn ])); |
235 | let m = p.start(); | 235 | let m = p.start(); |
236 | p.bump(); | 236 | p.bump(); |
237 | type_params::bounds_without_colon(p); | 237 | type_params::bounds_without_colon(p); |
@@ -251,11 +251,11 @@ pub(super) fn path_type(p: &mut Parser) { | |||
251 | // type A = foo!(); | 251 | // type A = foo!(); |
252 | // type B = crate::foo!(); | 252 | // type B = crate::foo!(); |
253 | fn path_or_macro_type_(p: &mut Parser, allow_bounds: bool) { | 253 | fn path_or_macro_type_(p: &mut Parser, allow_bounds: bool) { |
254 | assert!(paths::is_path_start(p) || p.at(L_ANGLE)); | 254 | assert!(paths::is_path_start(p) || p.at(T![<])); |
255 | let m = p.start(); | 255 | let m = p.start(); |
256 | paths::type_path(p); | 256 | paths::type_path(p); |
257 | 257 | ||
258 | let kind = if p.at(EXCL) { | 258 | let kind = if p.at(T![!]) { |
259 | items::macro_call_after_excl(p); | 259 | items::macro_call_after_excl(p); |
260 | MACRO_CALL | 260 | MACRO_CALL |
261 | } else { | 261 | } else { |
@@ -270,7 +270,7 @@ fn path_or_macro_type_(p: &mut Parser, allow_bounds: bool) { | |||
270 | } | 270 | } |
271 | 271 | ||
272 | pub(super) fn path_type_(p: &mut Parser, allow_bounds: bool) { | 272 | pub(super) fn path_type_(p: &mut Parser, allow_bounds: bool) { |
273 | assert!(paths::is_path_start(p) || p.at(L_ANGLE)); | 273 | assert!(paths::is_path_start(p) || p.at(T![<])); |
274 | let m = p.start(); | 274 | let m = p.start(); |
275 | paths::type_path(p); | 275 | paths::type_path(p); |
276 | 276 | ||
@@ -286,7 +286,7 @@ pub(super) fn path_type_(p: &mut Parser, allow_bounds: bool) { | |||
286 | /// This turns a parsed PATH_TYPE optionally into a DYN_TRAIT_TYPE | 286 | /// This turns a parsed PATH_TYPE optionally into a DYN_TRAIT_TYPE |
287 | /// with a TYPE_BOUND_LIST | 287 | /// with a TYPE_BOUND_LIST |
288 | fn opt_path_type_bounds_as_dyn_trait_type(p: &mut Parser, path_type_marker: CompletedMarker) { | 288 | fn opt_path_type_bounds_as_dyn_trait_type(p: &mut Parser, path_type_marker: CompletedMarker) { |
289 | if !p.at(PLUS) { | 289 | if !p.at(T![+]) { |
290 | return; | 290 | return; |
291 | } | 291 | } |
292 | 292 | ||
@@ -298,7 +298,7 @@ fn opt_path_type_bounds_as_dyn_trait_type(p: &mut Parser, path_type_marker: Comp | |||
298 | 298 | ||
299 | // This gets consumed here so it gets properly set | 299 | // This gets consumed here so it gets properly set |
300 | // in the TYPE_BOUND_LIST | 300 | // in the TYPE_BOUND_LIST |
301 | p.eat(PLUS); | 301 | p.eat(T![+]); |
302 | 302 | ||
303 | // Parse rest of the bounds into the TYPE_BOUND_LIST | 303 | // Parse rest of the bounds into the TYPE_BOUND_LIST |
304 | let m = type_params::bounds_without_colon_m(p, m); | 304 | let m = type_params::bounds_without_colon_m(p, m); |
diff --git a/crates/ra_parser/src/parser.rs b/crates/ra_parser/src/parser.rs index 8eff930db..4434dfb09 100644 --- a/crates/ra_parser/src/parser.rs +++ b/crates/ra_parser/src/parser.rs | |||
@@ -6,6 +6,7 @@ use crate::{ | |||
6 | SyntaxKind::{self, ERROR, EOF, TOMBSTONE}, | 6 | SyntaxKind::{self, ERROR, EOF, TOMBSTONE}, |
7 | TokenSource, ParseError, TokenSet, | 7 | TokenSource, ParseError, TokenSet, |
8 | event::Event, | 8 | event::Event, |
9 | T | ||
9 | }; | 10 | }; |
10 | 11 | ||
11 | /// `Parser` struct provides the low-level API for | 12 | /// `Parser` struct provides the low-level API for |
@@ -155,10 +156,10 @@ impl<'t> Parser<'t> { | |||
155 | 156 | ||
156 | // Handle parser composites | 157 | // Handle parser composites |
157 | match kind { | 158 | match kind { |
158 | DOTDOTDOT | DOTDOTEQ => { | 159 | T![...] | T![..=] => { |
159 | self.bump_compound(kind, 3); | 160 | self.bump_compound(kind, 3); |
160 | } | 161 | } |
161 | DOTDOT | COLONCOLON | EQEQ | FAT_ARROW | NEQ | THIN_ARROW => { | 162 | T![..] | T![::] | T![==] | T![=>] | T![!=] | T![->] => { |
162 | self.bump_compound(kind, 2); | 163 | self.bump_compound(kind, 2); |
163 | } | 164 | } |
164 | _ => { | 165 | _ => { |
@@ -223,7 +224,7 @@ impl<'t> Parser<'t> { | |||
223 | 224 | ||
224 | /// Create an error node and consume the next token. | 225 | /// Create an error node and consume the next token. |
225 | pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) { | 226 | pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) { |
226 | if self.at(SyntaxKind::L_CURLY) || self.at(SyntaxKind::R_CURLY) || self.at_ts(recovery) { | 227 | if self.at(T!['{']) || self.at(T!['}']) || self.at_ts(recovery) { |
227 | self.error(message); | 228 | self.error(message); |
228 | } else { | 229 | } else { |
229 | let m = self.start(); | 230 | let m = self.start(); |
@@ -253,19 +254,17 @@ impl<'t> Parser<'t> { | |||
253 | let jn2 = self.token_source.is_token_joint_to_next(self.token_pos + n + 1); | 254 | let jn2 = self.token_source.is_token_joint_to_next(self.token_pos + n + 1); |
254 | let la3 = self.token_source.token_kind(self.token_pos + n + 2); | 255 | let la3 = self.token_source.token_kind(self.token_pos + n + 2); |
255 | 256 | ||
256 | use SyntaxKind::*; | ||
257 | |||
258 | match kind { | 257 | match kind { |
259 | DOT if jn1 && la2 == DOT && jn2 && la3 == DOT => Some((DOTDOTDOT, 3)), | 258 | T![.] if jn1 && la2 == T![.] && jn2 && la3 == T![.] => Some((T![...], 3)), |
260 | DOT if jn1 && la2 == DOT && la3 == EQ => Some((DOTDOTEQ, 3)), | 259 | T![.] if jn1 && la2 == T![.] && la3 == T![=] => Some((T![..=], 3)), |
261 | DOT if jn1 && la2 == DOT => Some((DOTDOT, 2)), | 260 | T![.] if jn1 && la2 == T![.] => Some((T![..], 2)), |
262 | 261 | ||
263 | COLON if jn1 && la2 == COLON => Some((COLONCOLON, 2)), | 262 | T![:] if jn1 && la2 == T![:] => Some((T![::], 2)), |
264 | EQ if jn1 && la2 == EQ => Some((EQEQ, 2)), | 263 | T![=] if jn1 && la2 == T![=] => Some((T![==], 2)), |
265 | EQ if jn1 && la2 == R_ANGLE => Some((FAT_ARROW, 2)), | 264 | T![=] if jn1 && la2 == T![>] => Some((T![=>], 2)), |
266 | 265 | ||
267 | EXCL if la2 == EQ => Some((NEQ, 2)), | 266 | T![!] if la2 == T![=] => Some((T![!=], 2)), |
268 | MINUS if la2 == R_ANGLE => Some((THIN_ARROW, 2)), | 267 | T![-] if la2 == T![>] => Some((T![->], 2)), |
269 | _ => None, | 268 | _ => None, |
270 | } | 269 | } |
271 | } | 270 | } |
diff --git a/crates/ra_parser/src/syntax_kind/generated.rs b/crates/ra_parser/src/syntax_kind/generated.rs index 1a08cc6eb..d7926bd91 100644 --- a/crates/ra_parser/src/syntax_kind/generated.rs +++ b/crates/ra_parser/src/syntax_kind/generated.rs | |||
@@ -245,8 +245,8 @@ use self::SyntaxKind::*; | |||
245 | macro_rules! T { | 245 | macro_rules! T { |
246 | (;) => { $crate::SyntaxKind::SEMI }; | 246 | (;) => { $crate::SyntaxKind::SEMI }; |
247 | (,) => { $crate::SyntaxKind::COMMA }; | 247 | (,) => { $crate::SyntaxKind::COMMA }; |
248 | (() => { $crate::SyntaxKind::L_PAREN }; | 248 | ('(') => { $crate::SyntaxKind::L_PAREN }; |
249 | ()) => { $crate::SyntaxKind::R_PAREN }; | 249 | (')') => { $crate::SyntaxKind::R_PAREN }; |
250 | ('{') => { $crate::SyntaxKind::L_CURLY }; | 250 | ('{') => { $crate::SyntaxKind::L_CURLY }; |
251 | ('}') => { $crate::SyntaxKind::R_CURLY }; | 251 | ('}') => { $crate::SyntaxKind::R_CURLY }; |
252 | ('[') => { $crate::SyntaxKind::L_BRACK }; | 252 | ('[') => { $crate::SyntaxKind::L_BRACK }; |
diff --git a/crates/ra_parser/src/syntax_kind/generated.rs.tera b/crates/ra_parser/src/syntax_kind/generated.rs.tera index ccb8ca4ba..f5abbec4b 100644 --- a/crates/ra_parser/src/syntax_kind/generated.rs.tera +++ b/crates/ra_parser/src/syntax_kind/generated.rs.tera | |||
@@ -36,7 +36,7 @@ use self::SyntaxKind::*; | |||
36 | #[macro_export] | 36 | #[macro_export] |
37 | macro_rules! T { | 37 | macro_rules! T { |
38 | {%- for t in concat(a=single_byte_tokens, b=multi_byte_tokens) %} | 38 | {%- for t in concat(a=single_byte_tokens, b=multi_byte_tokens) %} |
39 | {%- if t.0 == '{' or t.0 == '}' or t.0 == '[' or t.0 == ']' %} | 39 | {%- if t.0 == '{' or t.0 == '}' or t.0 == '[' or t.0 == ']' or t.0 == '(' or t.0 == ')' %} |
40 | ('{{t.0}}') => { $crate::SyntaxKind::{{t.1}} }; | 40 | ('{{t.0}}') => { $crate::SyntaxKind::{{t.1}} }; |
41 | {%- else %} | 41 | {%- else %} |
42 | ({{t.0}}) => { $crate::SyntaxKind::{{t.1}} }; | 42 | ({{t.0}}) => { $crate::SyntaxKind::{{t.1}} }; |
diff --git a/crates/ra_prof/Cargo.toml b/crates/ra_prof/Cargo.toml index 5f23e865c..efcce3d65 100644 --- a/crates/ra_prof/Cargo.toml +++ b/crates/ra_prof/Cargo.toml | |||
@@ -7,3 +7,4 @@ publish = false | |||
7 | 7 | ||
8 | [dependencies] | 8 | [dependencies] |
9 | once_cell = "0.2.0" | 9 | once_cell = "0.2.0" |
10 | itertools = "0.8.0" | ||
diff --git a/crates/ra_prof/src/lib.rs b/crates/ra_prof/src/lib.rs index e56446c9f..031405879 100644 --- a/crates/ra_prof/src/lib.rs +++ b/crates/ra_prof/src/lib.rs | |||
@@ -9,6 +9,7 @@ use std::{ | |||
9 | }; | 9 | }; |
10 | 10 | ||
11 | use once_cell::sync::Lazy; | 11 | use once_cell::sync::Lazy; |
12 | use itertools::Itertools; | ||
12 | 13 | ||
13 | /// Set profiling filter. It specifies descriptions allowed to profile. | 14 | /// Set profiling filter. It specifies descriptions allowed to profile. |
14 | /// This is helpful when call stack has too many nested profiling scopes. | 15 | /// This is helpful when call stack has too many nested profiling scopes. |
@@ -179,8 +180,9 @@ impl Drop for Profiler { | |||
179 | stack.messages.push(Message { level, duration, message }); | 180 | stack.messages.push(Message { level, duration, message }); |
180 | if level == 0 { | 181 | if level == 0 { |
181 | let stdout = stderr(); | 182 | let stdout = stderr(); |
182 | if duration >= stack.filter_data.longer_than { | 183 | let longer_than = stack.filter_data.longer_than; |
183 | print(0, &stack.messages, &mut stdout.lock()); | 184 | if duration >= longer_than { |
185 | print(0, &stack.messages, &mut stdout.lock(), longer_than); | ||
184 | } | 186 | } |
185 | stack.messages.clear(); | 187 | stack.messages.clear(); |
186 | } | 188 | } |
@@ -191,19 +193,37 @@ impl Drop for Profiler { | |||
191 | } | 193 | } |
192 | } | 194 | } |
193 | 195 | ||
194 | fn print(lvl: usize, msgs: &[Message], out: &mut impl Write) { | 196 | fn print(lvl: usize, msgs: &[Message], out: &mut impl Write, longer_than: Duration) { |
195 | let mut last = 0; | 197 | let mut last = 0; |
196 | let indent = repeat(" ").take(lvl + 1).collect::<String>(); | 198 | let indent = repeat(" ").take(lvl + 1).collect::<String>(); |
197 | for (i, &Message { level: l, duration: dur, message: ref msg }) in msgs.iter().enumerate() { | 199 | // We output hierarchy for long calls, but sum up all short calls |
198 | if l != lvl { | 200 | let mut short = Vec::new(); |
201 | for (i, &Message { level, duration, message: ref msg }) in msgs.iter().enumerate() { | ||
202 | if level != lvl { | ||
199 | continue; | 203 | continue; |
200 | } | 204 | } |
201 | writeln!(out, "{} {:6}ms - {}", indent, dur.as_millis(), msg) | 205 | if duration >= longer_than { |
202 | .expect("printing profiling info to stdout"); | 206 | writeln!(out, "{} {:6}ms - {}", indent, duration.as_millis(), msg) |
207 | .expect("printing profiling info to stdout"); | ||
208 | |||
209 | print(lvl + 1, &msgs[last..i], out, longer_than); | ||
210 | } else { | ||
211 | short.push((msg, duration)) | ||
212 | } | ||
203 | 213 | ||
204 | print(lvl + 1, &msgs[last..i], out); | ||
205 | last = i; | 214 | last = i; |
206 | } | 215 | } |
216 | short.sort_by_key(|(msg, _time)| *msg); | ||
217 | for (msg, entires) in short.iter().group_by(|(msg, _time)| msg).into_iter() { | ||
218 | let mut count = 0; | ||
219 | let mut total_duration = Duration::default(); | ||
220 | entires.for_each(|(_msg, time)| { | ||
221 | count += 1; | ||
222 | total_duration += *time; | ||
223 | }); | ||
224 | writeln!(out, "{} {:6}ms - {} ({} calls)", indent, total_duration.as_millis(), msg, count) | ||
225 | .expect("printing profiling info to stdout"); | ||
226 | } | ||
207 | } | 227 | } |
208 | 228 | ||
209 | #[cfg(test)] | 229 | #[cfg(test)] |
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml index 1f3981f5a..082bc5253 100644 --- a/crates/ra_syntax/Cargo.toml +++ b/crates/ra_syntax/Cargo.toml | |||
@@ -17,8 +17,8 @@ rowan = "0.5.0" | |||
17 | 17 | ||
18 | # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here | 18 | # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here |
19 | # to reduce number of compilations | 19 | # to reduce number of compilations |
20 | text_unit = { version = "0.1.6", features = ["serde"] } | 20 | text_unit = { version = "0.1.8", features = ["serde"] } |
21 | smol_str = { version = "0.1.9", features = ["serde"] } | 21 | smol_str = { version = "0.1.11", features = ["serde"] } |
22 | 22 | ||
23 | ra_text_edit = { path = "../ra_text_edit" } | 23 | ra_text_edit = { path = "../ra_text_edit" } |
24 | ra_parser = { path = "../ra_parser" } | 24 | ra_parser = { path = "../ra_parser" } |
diff --git a/crates/ra_syntax/src/ast/expr_extensions.rs b/crates/ra_syntax/src/ast/expr_extensions.rs index 9484c3b9b..17763809d 100644 --- a/crates/ra_syntax/src/ast/expr_extensions.rs +++ b/crates/ra_syntax/src/ast/expr_extensions.rs | |||
@@ -3,7 +3,8 @@ | |||
3 | use crate::{ | 3 | use crate::{ |
4 | SyntaxToken, SyntaxElement, SmolStr, | 4 | SyntaxToken, SyntaxElement, SmolStr, |
5 | ast::{self, AstNode, AstChildren, children, child_opt}, | 5 | ast::{self, AstNode, AstChildren, children, child_opt}, |
6 | SyntaxKind::* | 6 | SyntaxKind::*, |
7 | T | ||
7 | }; | 8 | }; |
8 | 9 | ||
9 | #[derive(Debug, Clone, PartialEq, Eq)] | 10 | #[derive(Debug, Clone, PartialEq, Eq)] |
@@ -34,7 +35,7 @@ impl ast::IfExpr { | |||
34 | 35 | ||
35 | impl ast::RefExpr { | 36 | impl ast::RefExpr { |
36 | pub fn is_mut(&self) -> bool { | 37 | pub fn is_mut(&self) -> bool { |
37 | self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) | 38 | self.syntax().children_with_tokens().any(|n| n.kind() == T![mut]) |
38 | } | 39 | } |
39 | } | 40 | } |
40 | 41 | ||
@@ -51,9 +52,9 @@ pub enum PrefixOp { | |||
51 | impl ast::PrefixExpr { | 52 | impl ast::PrefixExpr { |
52 | pub fn op_kind(&self) -> Option<PrefixOp> { | 53 | pub fn op_kind(&self) -> Option<PrefixOp> { |
53 | match self.op_token()?.kind() { | 54 | match self.op_token()?.kind() { |
54 | STAR => Some(PrefixOp::Deref), | 55 | T![*] => Some(PrefixOp::Deref), |
55 | EXCL => Some(PrefixOp::Not), | 56 | T![!] => Some(PrefixOp::Not), |
56 | MINUS => Some(PrefixOp::Neg), | 57 | T![-] => Some(PrefixOp::Neg), |
57 | _ => None, | 58 | _ => None, |
58 | } | 59 | } |
59 | } | 60 | } |
@@ -133,37 +134,37 @@ impl ast::BinExpr { | |||
133 | fn op_details(&self) -> Option<(SyntaxToken, BinOp)> { | 134 | fn op_details(&self) -> Option<(SyntaxToken, BinOp)> { |
134 | self.syntax().children_with_tokens().filter_map(|it| it.as_token()).find_map(|c| { | 135 | self.syntax().children_with_tokens().filter_map(|it| it.as_token()).find_map(|c| { |
135 | match c.kind() { | 136 | match c.kind() { |
136 | PIPEPIPE => Some((c, BinOp::BooleanOr)), | 137 | T![||] => Some((c, BinOp::BooleanOr)), |
137 | AMPAMP => Some((c, BinOp::BooleanAnd)), | 138 | T![&&] => Some((c, BinOp::BooleanAnd)), |
138 | EQEQ => Some((c, BinOp::EqualityTest)), | 139 | T![==] => Some((c, BinOp::EqualityTest)), |
139 | NEQ => Some((c, BinOp::NegatedEqualityTest)), | 140 | T![!=] => Some((c, BinOp::NegatedEqualityTest)), |
140 | LTEQ => Some((c, BinOp::LesserEqualTest)), | 141 | T![<=] => Some((c, BinOp::LesserEqualTest)), |
141 | GTEQ => Some((c, BinOp::GreaterEqualTest)), | 142 | T![>=] => Some((c, BinOp::GreaterEqualTest)), |
142 | L_ANGLE => Some((c, BinOp::LesserTest)), | 143 | T![<] => Some((c, BinOp::LesserTest)), |
143 | R_ANGLE => Some((c, BinOp::GreaterTest)), | 144 | T![>] => Some((c, BinOp::GreaterTest)), |
144 | PLUS => Some((c, BinOp::Addition)), | 145 | T![+] => Some((c, BinOp::Addition)), |
145 | STAR => Some((c, BinOp::Multiplication)), | 146 | T![*] => Some((c, BinOp::Multiplication)), |
146 | MINUS => Some((c, BinOp::Subtraction)), | 147 | T![-] => Some((c, BinOp::Subtraction)), |
147 | SLASH => Some((c, BinOp::Division)), | 148 | T![/] => Some((c, BinOp::Division)), |
148 | PERCENT => Some((c, BinOp::Remainder)), | 149 | T![%] => Some((c, BinOp::Remainder)), |
149 | SHL => Some((c, BinOp::LeftShift)), | 150 | T![<<] => Some((c, BinOp::LeftShift)), |
150 | SHR => Some((c, BinOp::RightShift)), | 151 | T![>>] => Some((c, BinOp::RightShift)), |
151 | CARET => Some((c, BinOp::BitwiseXor)), | 152 | T![^] => Some((c, BinOp::BitwiseXor)), |
152 | PIPE => Some((c, BinOp::BitwiseOr)), | 153 | T![|] => Some((c, BinOp::BitwiseOr)), |
153 | AMP => Some((c, BinOp::BitwiseAnd)), | 154 | T![&] => Some((c, BinOp::BitwiseAnd)), |
154 | DOTDOT => Some((c, BinOp::RangeRightOpen)), | 155 | T![..] => Some((c, BinOp::RangeRightOpen)), |
155 | DOTDOTEQ => Some((c, BinOp::RangeRightClosed)), | 156 | T![..=] => Some((c, BinOp::RangeRightClosed)), |
156 | EQ => Some((c, BinOp::Assignment)), | 157 | T![=] => Some((c, BinOp::Assignment)), |
157 | PLUSEQ => Some((c, BinOp::AddAssign)), | 158 | T![+=] => Some((c, BinOp::AddAssign)), |
158 | SLASHEQ => Some((c, BinOp::DivAssign)), | 159 | T![/=] => Some((c, BinOp::DivAssign)), |
159 | STAREQ => Some((c, BinOp::MulAssign)), | 160 | T![*=] => Some((c, BinOp::MulAssign)), |
160 | PERCENTEQ => Some((c, BinOp::RemAssign)), | 161 | T![%=] => Some((c, BinOp::RemAssign)), |
161 | SHREQ => Some((c, BinOp::ShrAssign)), | 162 | T![>>=] => Some((c, BinOp::ShrAssign)), |
162 | SHLEQ => Some((c, BinOp::ShlAssign)), | 163 | T![<<=] => Some((c, BinOp::ShlAssign)), |
163 | MINUSEQ => Some((c, BinOp::SubAssign)), | 164 | T![-=] => Some((c, BinOp::SubAssign)), |
164 | PIPEEQ => Some((c, BinOp::BitOrAssign)), | 165 | T![|=] => Some((c, BinOp::BitOrAssign)), |
165 | AMPEQ => Some((c, BinOp::BitAndAssign)), | 166 | T![&=] => Some((c, BinOp::BitAndAssign)), |
166 | CARETEQ => Some((c, BinOp::BitXorAssign)), | 167 | T![^=] => Some((c, BinOp::BitXorAssign)), |
167 | _ => None, | 168 | _ => None, |
168 | } | 169 | } |
169 | }) | 170 | }) |
@@ -211,7 +212,7 @@ impl ast::ArrayExpr { | |||
211 | } | 212 | } |
212 | 213 | ||
213 | fn is_repeat(&self) -> bool { | 214 | fn is_repeat(&self) -> bool { |
214 | self.syntax().children_with_tokens().any(|it| it.kind() == SEMI) | 215 | self.syntax().children_with_tokens().any(|it| it.kind() == T![;]) |
215 | } | 216 | } |
216 | } | 217 | } |
217 | 218 | ||
@@ -258,7 +259,7 @@ impl ast::Literal { | |||
258 | LiteralKind::FloatNumber { suffix: suffix } | 259 | LiteralKind::FloatNumber { suffix: suffix } |
259 | } | 260 | } |
260 | STRING | RAW_STRING => LiteralKind::String, | 261 | STRING | RAW_STRING => LiteralKind::String, |
261 | TRUE_KW | FALSE_KW => LiteralKind::Bool, | 262 | T![true] | T![false] => LiteralKind::Bool, |
262 | BYTE_STRING | RAW_BYTE_STRING => LiteralKind::ByteString, | 263 | BYTE_STRING | RAW_BYTE_STRING => LiteralKind::ByteString, |
263 | CHAR => LiteralKind::Char, | 264 | CHAR => LiteralKind::Char, |
264 | BYTE => LiteralKind::Byte, | 265 | BYTE => LiteralKind::Byte, |
diff --git a/crates/ra_syntax/src/ast/extensions.rs b/crates/ra_syntax/src/ast/extensions.rs index f3466c585..e4c99784c 100644 --- a/crates/ra_syntax/src/ast/extensions.rs +++ b/crates/ra_syntax/src/ast/extensions.rs | |||
@@ -3,7 +3,12 @@ | |||
3 | 3 | ||
4 | use itertools::Itertools; | 4 | use itertools::Itertools; |
5 | 5 | ||
6 | use crate::{SmolStr, SyntaxToken, ast::{self, AstNode, children, child_opt}, SyntaxKind::*, SyntaxElement}; | 6 | use crate::{ |
7 | SmolStr, SyntaxToken, | ||
8 | ast::{self, AstNode, children, child_opt}, | ||
9 | SyntaxKind::*, | ||
10 | SyntaxElement, T, | ||
11 | }; | ||
7 | use ra_parser::SyntaxKind; | 12 | use ra_parser::SyntaxKind; |
8 | 13 | ||
9 | impl ast::Name { | 14 | impl ast::Name { |
@@ -32,7 +37,7 @@ impl ast::Attr { | |||
32 | Some(prev) => prev, | 37 | Some(prev) => prev, |
33 | }; | 38 | }; |
34 | 39 | ||
35 | prev.kind() == EXCL | 40 | prev.kind() == T![!] |
36 | } | 41 | } |
37 | 42 | ||
38 | pub fn as_atom(&self) -> Option<SmolStr> { | 43 | pub fn as_atom(&self) -> Option<SmolStr> { |
@@ -102,9 +107,9 @@ impl ast::PathSegment { | |||
102 | PathSegmentKind::Name(name_ref) | 107 | PathSegmentKind::Name(name_ref) |
103 | } else { | 108 | } else { |
104 | match self.syntax().first_child_or_token()?.kind() { | 109 | match self.syntax().first_child_or_token()?.kind() { |
105 | SELF_KW => PathSegmentKind::SelfKw, | 110 | T![self] => PathSegmentKind::SelfKw, |
106 | SUPER_KW => PathSegmentKind::SuperKw, | 111 | T![super] => PathSegmentKind::SuperKw, |
107 | CRATE_KW => PathSegmentKind::CrateKw, | 112 | T![crate] => PathSegmentKind::CrateKw, |
108 | _ => return None, | 113 | _ => return None, |
109 | } | 114 | } |
110 | }; | 115 | }; |
@@ -113,7 +118,7 @@ impl ast::PathSegment { | |||
113 | 118 | ||
114 | pub fn has_colon_colon(&self) -> bool { | 119 | pub fn has_colon_colon(&self) -> bool { |
115 | match self.syntax.first_child_or_token().map(|s| s.kind()) { | 120 | match self.syntax.first_child_or_token().map(|s| s.kind()) { |
116 | Some(COLONCOLON) => true, | 121 | Some(T![::]) => true, |
117 | _ => false, | 122 | _ => false, |
118 | } | 123 | } |
119 | } | 124 | } |
@@ -129,14 +134,14 @@ impl ast::Module { | |||
129 | pub fn has_semi(&self) -> bool { | 134 | pub fn has_semi(&self) -> bool { |
130 | match self.syntax().last_child_or_token() { | 135 | match self.syntax().last_child_or_token() { |
131 | None => false, | 136 | None => false, |
132 | Some(node) => node.kind() == SEMI, | 137 | Some(node) => node.kind() == T![;], |
133 | } | 138 | } |
134 | } | 139 | } |
135 | } | 140 | } |
136 | 141 | ||
137 | impl ast::UseTree { | 142 | impl ast::UseTree { |
138 | pub fn has_star(&self) -> bool { | 143 | pub fn has_star(&self) -> bool { |
139 | self.syntax().children_with_tokens().any(|it| it.kind() == STAR) | 144 | self.syntax().children_with_tokens().any(|it| it.kind() == T![*]) |
140 | } | 145 | } |
141 | } | 146 | } |
142 | 147 | ||
@@ -172,7 +177,7 @@ impl ast::ImplBlock { | |||
172 | } | 177 | } |
173 | 178 | ||
174 | pub fn is_negative(&self) -> bool { | 179 | pub fn is_negative(&self) -> bool { |
175 | self.syntax().children_with_tokens().any(|t| t.kind() == EXCL) | 180 | self.syntax().children_with_tokens().any(|t| t.kind() == T![!]) |
176 | } | 181 | } |
177 | } | 182 | } |
178 | 183 | ||
@@ -196,6 +201,17 @@ impl StructKind<'_> { | |||
196 | } | 201 | } |
197 | 202 | ||
198 | impl ast::StructDef { | 203 | impl ast::StructDef { |
204 | pub fn is_union(&self) -> bool { | ||
205 | for child in self.syntax().children_with_tokens() { | ||
206 | match child.kind() { | ||
207 | T![struct] => return false, | ||
208 | T![union] => return true, | ||
209 | _ => (), | ||
210 | } | ||
211 | } | ||
212 | false | ||
213 | } | ||
214 | |||
199 | pub fn kind(&self) -> StructKind { | 215 | pub fn kind(&self) -> StructKind { |
200 | StructKind::from_node(self) | 216 | StructKind::from_node(self) |
201 | } | 217 | } |
@@ -219,7 +235,7 @@ impl ast::FnDef { | |||
219 | self.syntax() | 235 | self.syntax() |
220 | .last_child_or_token() | 236 | .last_child_or_token() |
221 | .and_then(|it| it.as_token()) | 237 | .and_then(|it| it.as_token()) |
222 | .filter(|it| it.kind() == SEMI) | 238 | .filter(|it| it.kind() == T![;]) |
223 | } | 239 | } |
224 | } | 240 | } |
225 | 241 | ||
@@ -227,7 +243,7 @@ impl ast::LetStmt { | |||
227 | pub fn has_semi(&self) -> bool { | 243 | pub fn has_semi(&self) -> bool { |
228 | match self.syntax().last_child_or_token() { | 244 | match self.syntax().last_child_or_token() { |
229 | None => false, | 245 | None => false, |
230 | Some(node) => node.kind() == SEMI, | 246 | Some(node) => node.kind() == T![;], |
231 | } | 247 | } |
232 | } | 248 | } |
233 | } | 249 | } |
@@ -236,7 +252,7 @@ impl ast::ExprStmt { | |||
236 | pub fn has_semi(&self) -> bool { | 252 | pub fn has_semi(&self) -> bool { |
237 | match self.syntax().last_child_or_token() { | 253 | match self.syntax().last_child_or_token() { |
238 | None => false, | 254 | None => false, |
239 | Some(node) => node.kind() == SEMI, | 255 | Some(node) => node.kind() == T![;], |
240 | } | 256 | } |
241 | } | 257 | } |
242 | } | 258 | } |
@@ -270,29 +286,29 @@ impl ast::FieldExpr { | |||
270 | 286 | ||
271 | impl ast::RefPat { | 287 | impl ast::RefPat { |
272 | pub fn is_mut(&self) -> bool { | 288 | pub fn is_mut(&self) -> bool { |
273 | self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) | 289 | self.syntax().children_with_tokens().any(|n| n.kind() == T![mut]) |
274 | } | 290 | } |
275 | } | 291 | } |
276 | 292 | ||
277 | impl ast::BindPat { | 293 | impl ast::BindPat { |
278 | pub fn is_mutable(&self) -> bool { | 294 | pub fn is_mutable(&self) -> bool { |
279 | self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) | 295 | self.syntax().children_with_tokens().any(|n| n.kind() == T![mut]) |
280 | } | 296 | } |
281 | 297 | ||
282 | pub fn is_ref(&self) -> bool { | 298 | pub fn is_ref(&self) -> bool { |
283 | self.syntax().children_with_tokens().any(|n| n.kind() == REF_KW) | 299 | self.syntax().children_with_tokens().any(|n| n.kind() == T![ref]) |
284 | } | 300 | } |
285 | } | 301 | } |
286 | 302 | ||
287 | impl ast::PointerType { | 303 | impl ast::PointerType { |
288 | pub fn is_mut(&self) -> bool { | 304 | pub fn is_mut(&self) -> bool { |
289 | self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) | 305 | self.syntax().children_with_tokens().any(|n| n.kind() == T![mut]) |
290 | } | 306 | } |
291 | } | 307 | } |
292 | 308 | ||
293 | impl ast::ReferenceType { | 309 | impl ast::ReferenceType { |
294 | pub fn is_mut(&self) -> bool { | 310 | pub fn is_mut(&self) -> bool { |
295 | self.syntax().children_with_tokens().any(|n| n.kind() == MUT_KW) | 311 | self.syntax().children_with_tokens().any(|n| n.kind() == T![mut]) |
296 | } | 312 | } |
297 | } | 313 | } |
298 | 314 | ||
@@ -311,19 +327,19 @@ impl ast::SelfParam { | |||
311 | self.syntax() | 327 | self.syntax() |
312 | .children_with_tokens() | 328 | .children_with_tokens() |
313 | .filter_map(|it| it.as_token()) | 329 | .filter_map(|it| it.as_token()) |
314 | .find(|it| it.kind() == SELF_KW) | 330 | .find(|it| it.kind() == T![self]) |
315 | .expect("invalid tree: self param must have self") | 331 | .expect("invalid tree: self param must have self") |
316 | } | 332 | } |
317 | 333 | ||
318 | pub fn kind(&self) -> SelfParamKind { | 334 | pub fn kind(&self) -> SelfParamKind { |
319 | let borrowed = self.syntax().children_with_tokens().any(|n| n.kind() == AMP); | 335 | let borrowed = self.syntax().children_with_tokens().any(|n| n.kind() == T![&]); |
320 | if borrowed { | 336 | if borrowed { |
321 | // check for a `mut` coming after the & -- `mut &self` != `&mut self` | 337 | // check for a `mut` coming after the & -- `mut &self` != `&mut self` |
322 | if self | 338 | if self |
323 | .syntax() | 339 | .syntax() |
324 | .children_with_tokens() | 340 | .children_with_tokens() |
325 | .skip_while(|n| n.kind() != AMP) | 341 | .skip_while(|n| n.kind() != T![&]) |
326 | .any(|n| n.kind() == MUT_KW) | 342 | .any(|n| n.kind() == T![mut]) |
327 | { | 343 | { |
328 | SelfParamKind::MutRef | 344 | SelfParamKind::MutRef |
329 | } else { | 345 | } else { |
@@ -355,6 +371,6 @@ impl ast::WherePred { | |||
355 | 371 | ||
356 | impl ast::TraitDef { | 372 | impl ast::TraitDef { |
357 | pub fn is_auto(&self) -> bool { | 373 | pub fn is_auto(&self) -> bool { |
358 | self.syntax().children_with_tokens().any(|t| t.kind() == AUTO_KW) | 374 | self.syntax().children_with_tokens().any(|t| t.kind() == T![auto]) |
359 | } | 375 | } |
360 | } | 376 | } |
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 65c65d6aa..0ceabc203 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
@@ -179,10 +179,7 @@ fn api_walkthrough() { | |||
179 | 179 | ||
180 | // There's a bunch of traversal methods on `SyntaxNode`: | 180 | // There's a bunch of traversal methods on `SyntaxNode`: |
181 | assert_eq!(expr_syntax.parent(), Some(block.syntax())); | 181 | assert_eq!(expr_syntax.parent(), Some(block.syntax())); |
182 | assert_eq!( | 182 | assert_eq!(block.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{'])); |
183 | block.syntax().first_child_or_token().map(|it| it.kind()), | ||
184 | Some(SyntaxKind::L_CURLY) | ||
185 | ); | ||
186 | assert_eq!( | 183 | assert_eq!( |
187 | expr_syntax.next_sibling_or_token().map(|it| it.kind()), | 184 | expr_syntax.next_sibling_or_token().map(|it| it.kind()), |
188 | Some(SyntaxKind::WHITESPACE) | 185 | Some(SyntaxKind::WHITESPACE) |
@@ -191,9 +188,7 @@ fn api_walkthrough() { | |||
191 | // As well as some iterator helpers: | 188 | // As well as some iterator helpers: |
192 | let f = expr_syntax.ancestors().find_map(ast::FnDef::cast); | 189 | let f = expr_syntax.ancestors().find_map(ast::FnDef::cast); |
193 | assert_eq!(f, Some(&*func)); | 190 | assert_eq!(f, Some(&*func)); |
194 | assert!(expr_syntax | 191 | assert!(expr_syntax.siblings_with_tokens(Direction::Next).any(|it| it.kind() == T!['}'])); |
195 | .siblings_with_tokens(Direction::Next) | ||
196 | .any(|it| it.kind() == SyntaxKind::R_CURLY)); | ||
197 | assert_eq!( | 192 | assert_eq!( |
198 | expr_syntax.descendants_with_tokens().count(), | 193 | expr_syntax.descendants_with_tokens().count(), |
199 | 8, // 5 tokens `1`, ` `, `+`, ` `, `!` | 194 | 8, // 5 tokens `1`, ` `, `+`, ` `, `!` |
diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/ra_syntax/src/parsing/lexer.rs index a3791b503..6eb96f03d 100644 --- a/crates/ra_syntax/src/parsing/lexer.rs +++ b/crates/ra_syntax/src/parsing/lexer.rs | |||
@@ -7,6 +7,7 @@ mod strings; | |||
7 | use crate::{ | 7 | use crate::{ |
8 | SyntaxKind::{self, *}, | 8 | SyntaxKind::{self, *}, |
9 | TextUnit, | 9 | TextUnit, |
10 | T, | ||
10 | }; | 11 | }; |
11 | 12 | ||
12 | use self::{ | 13 | use self::{ |
@@ -90,16 +91,16 @@ fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind { | |||
90 | match c { | 91 | match c { |
91 | // Possiblily multi-byte tokens, | 92 | // Possiblily multi-byte tokens, |
92 | // but we only produce single byte token now | 93 | // but we only produce single byte token now |
93 | // DOTDOTDOT, DOTDOT, DOTDOTEQ, DOT | 94 | // T![...], T![..], T![..=], T![.] |
94 | '.' => return DOT, | 95 | '.' => return T![.], |
95 | // COLONCOLON COLON | 96 | // T![::] T![:] |
96 | ':' => return COLON, | 97 | ':' => return T![:], |
97 | // EQEQ FATARROW EQ | 98 | // T![==] FATARROW T![=] |
98 | '=' => return EQ, | 99 | '=' => return T![=], |
99 | // NEQ EXCL | 100 | // T![!=] T![!] |
100 | '!' => return EXCL, | 101 | '!' => return T![!], |
101 | // THIN_ARROW MINUS | 102 | // T![->] T![-] |
102 | '-' => return MINUS, | 103 | '-' => return T![-], |
103 | 104 | ||
104 | // If the character is an ident start not followed by another single | 105 | // If the character is an ident start not followed by another single |
105 | // quote, then this is a lifetime name: | 106 | // quote, then this is a lifetime name: |
@@ -148,8 +149,8 @@ fn scan_ident(c: char, ptr: &mut Ptr) -> SyntaxKind { | |||
148 | ptr.bump(); | 149 | ptr.bump(); |
149 | true | 150 | true |
150 | } | 151 | } |
151 | ('_', None) => return UNDERSCORE, | 152 | ('_', None) => return T![_], |
152 | ('_', Some(c)) if !is_ident_continue(c) => return UNDERSCORE, | 153 | ('_', Some(c)) if !is_ident_continue(c) => return T![_], |
153 | _ => false, | 154 | _ => false, |
154 | }; | 155 | }; |
155 | ptr.bump_while(is_ident_continue); | 156 | ptr.bump_while(is_ident_continue); |
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs index 434f850d1..6de02a15a 100644 --- a/crates/ra_syntax/src/parsing/reparsing.rs +++ b/crates/ra_syntax/src/parsing/reparsing.rs | |||
@@ -17,7 +17,8 @@ use crate::{ | |||
17 | text_token_source::TextTokenSource, | 17 | text_token_source::TextTokenSource, |
18 | text_tree_sink::TextTreeSink, | 18 | text_tree_sink::TextTreeSink, |
19 | lexer::{tokenize, Token}, | 19 | lexer::{tokenize, Token}, |
20 | } | 20 | }, |
21 | T, | ||
21 | }; | 22 | }; |
22 | 23 | ||
23 | pub(crate) fn incremental_reparse( | 24 | pub(crate) fn incremental_reparse( |
@@ -122,16 +123,16 @@ fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxN | |||
122 | 123 | ||
123 | fn is_balanced(tokens: &[Token]) -> bool { | 124 | fn is_balanced(tokens: &[Token]) -> bool { |
124 | if tokens.is_empty() | 125 | if tokens.is_empty() |
125 | || tokens.first().unwrap().kind != L_CURLY | 126 | || tokens.first().unwrap().kind != T!['{'] |
126 | || tokens.last().unwrap().kind != R_CURLY | 127 | || tokens.last().unwrap().kind != T!['}'] |
127 | { | 128 | { |
128 | return false; | 129 | return false; |
129 | } | 130 | } |
130 | let mut balance = 0usize; | 131 | let mut balance = 0usize; |
131 | for t in &tokens[1..tokens.len() - 1] { | 132 | for t in &tokens[1..tokens.len() - 1] { |
132 | match t.kind { | 133 | match t.kind { |
133 | L_CURLY => balance += 1, | 134 | T!['{'] => balance += 1, |
134 | R_CURLY => { | 135 | T!['}'] => { |
135 | balance = match balance.checked_sub(1) { | 136 | balance = match balance.checked_sub(1) { |
136 | Some(b) => b, | 137 | Some(b) => b, |
137 | None => return false, | 138 | None => return false, |
diff --git a/crates/ra_syntax/src/syntax_text.rs b/crates/ra_syntax/src/syntax_text.rs index 6bb2ff461..b013164c4 100644 --- a/crates/ra_syntax/src/syntax_text.rs +++ b/crates/ra_syntax/src/syntax_text.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use std::{fmt, ops}; | 1 | use std::{fmt, ops::{self, Bound}}; |
2 | 2 | ||
3 | use crate::{SyntaxNode, TextRange, TextUnit, SyntaxElement}; | 3 | use crate::{SyntaxNode, TextRange, TextUnit, SyntaxElement}; |
4 | 4 | ||
@@ -54,10 +54,31 @@ impl<'a> SyntaxText<'a> { | |||
54 | self.range.len() | 54 | self.range.len() |
55 | } | 55 | } |
56 | 56 | ||
57 | pub fn slice(&self, range: impl SyntaxTextSlice) -> SyntaxText<'a> { | 57 | /// NB, the offsets here are absolute, and this probably doesn't make sense! |
58 | let range = range.restrict(self.range).unwrap_or_else(|| { | 58 | pub fn slice(&self, range: impl ops::RangeBounds<TextUnit>) -> SyntaxText<'a> { |
59 | panic!("invalid slice, range: {:?}, slice: {:?}", self.range, range) | 59 | let start = match range.start_bound() { |
60 | }); | 60 | Bound::Included(b) => *b, |
61 | Bound::Excluded(b) => *b + TextUnit::from(1u32), | ||
62 | Bound::Unbounded => self.range.start(), | ||
63 | }; | ||
64 | let end = match range.end_bound() { | ||
65 | Bound::Included(b) => *b + TextUnit::from(1u32), | ||
66 | Bound::Excluded(b) => *b, | ||
67 | Bound::Unbounded => self.range.end(), | ||
68 | }; | ||
69 | assert!( | ||
70 | start <= end, | ||
71 | "invalid slice, range: {:?}, slice: {:?}", | ||
72 | self.range, | ||
73 | (range.start_bound(), range.end_bound()), | ||
74 | ); | ||
75 | let range = TextRange::from_to(start, end); | ||
76 | assert!( | ||
77 | range.is_subrange(&self.range), | ||
78 | "invalid slice, range: {:?}, slice: {:?}", | ||
79 | self.range, | ||
80 | range, | ||
81 | ); | ||
61 | SyntaxText { node: self.node, range } | 82 | SyntaxText { node: self.node, range } |
62 | } | 83 | } |
63 | 84 | ||
@@ -88,40 +109,6 @@ impl<'a> fmt::Display for SyntaxText<'a> { | |||
88 | } | 109 | } |
89 | } | 110 | } |
90 | 111 | ||
91 | pub trait SyntaxTextSlice: fmt::Debug { | ||
92 | fn restrict(&self, range: TextRange) -> Option<TextRange>; | ||
93 | } | ||
94 | |||
95 | impl SyntaxTextSlice for TextRange { | ||
96 | fn restrict(&self, range: TextRange) -> Option<TextRange> { | ||
97 | self.intersection(&range) | ||
98 | } | ||
99 | } | ||
100 | |||
101 | impl SyntaxTextSlice for ops::RangeTo<TextUnit> { | ||
102 | fn restrict(&self, range: TextRange) -> Option<TextRange> { | ||
103 | if !range.contains_inclusive(self.end) { | ||
104 | return None; | ||
105 | } | ||
106 | Some(TextRange::from_to(range.start(), self.end)) | ||
107 | } | ||
108 | } | ||
109 | |||
110 | impl SyntaxTextSlice for ops::RangeFrom<TextUnit> { | ||
111 | fn restrict(&self, range: TextRange) -> Option<TextRange> { | ||
112 | if !range.contains_inclusive(self.start) { | ||
113 | return None; | ||
114 | } | ||
115 | Some(TextRange::from_to(self.start, range.end())) | ||
116 | } | ||
117 | } | ||
118 | |||
119 | impl SyntaxTextSlice for ops::Range<TextUnit> { | ||
120 | fn restrict(&self, range: TextRange) -> Option<TextRange> { | ||
121 | TextRange::from_to(self.start, self.end).restrict(range) | ||
122 | } | ||
123 | } | ||
124 | |||
125 | impl From<SyntaxText<'_>> for String { | 112 | impl From<SyntaxText<'_>> for String { |
126 | fn from(text: SyntaxText) -> String { | 113 | fn from(text: SyntaxText) -> String { |
127 | text.to_string() | 114 | text.to_string() |
diff --git a/crates/ra_syntax/src/validation.rs b/crates/ra_syntax/src/validation.rs index 11a1fb4a7..b53900a4b 100644 --- a/crates/ra_syntax/src/validation.rs +++ b/crates/ra_syntax/src/validation.rs | |||
@@ -5,9 +5,10 @@ mod field_expr; | |||
5 | 5 | ||
6 | use crate::{ | 6 | use crate::{ |
7 | SourceFile, SyntaxError, AstNode, SyntaxNode, TextUnit, | 7 | SourceFile, SyntaxError, AstNode, SyntaxNode, TextUnit, |
8 | SyntaxKind::{L_CURLY, R_CURLY, BYTE, BYTE_STRING, STRING, CHAR}, | 8 | SyntaxKind::{BYTE, BYTE_STRING, STRING, CHAR}, |
9 | ast, | 9 | ast, |
10 | algo::visit::{visitor_ctx, VisitorCtx}, | 10 | algo::visit::{visitor_ctx, VisitorCtx}, |
11 | T, | ||
11 | }; | 12 | }; |
12 | 13 | ||
13 | pub(crate) use unescape::EscapeError; | 14 | pub(crate) use unescape::EscapeError; |
@@ -83,8 +84,8 @@ pub(crate) fn validate_block_structure(root: &SyntaxNode) { | |||
83 | let mut stack = Vec::new(); | 84 | let mut stack = Vec::new(); |
84 | for node in root.descendants() { | 85 | for node in root.descendants() { |
85 | match node.kind() { | 86 | match node.kind() { |
86 | L_CURLY => stack.push(node), | 87 | T!['{'] => stack.push(node), |
87 | R_CURLY => { | 88 | T!['}'] => { |
88 | if let Some(pair) = stack.pop() { | 89 | if let Some(pair) = stack.pop() { |
89 | assert_eq!( | 90 | assert_eq!( |
90 | node.parent(), | 91 | node.parent(), |