diff options
Diffstat (limited to 'crates/ra_syntax/src/parsing')
28 files changed, 4839 insertions, 0 deletions
diff --git a/crates/ra_syntax/src/parsing/builder.rs b/crates/ra_syntax/src/parsing/builder.rs new file mode 100644 index 000000000..9090c60c2 --- /dev/null +++ b/crates/ra_syntax/src/parsing/builder.rs | |||
@@ -0,0 +1,42 @@ | |||
1 | use crate::{ | ||
2 | parsing::parser_impl::Sink, | ||
3 | syntax_node::{GreenNode, RaTypes}, | ||
4 | SmolStr, SyntaxKind, SyntaxError, | ||
5 | }; | ||
6 | |||
7 | use rowan::GreenNodeBuilder; | ||
8 | |||
9 | pub(crate) struct GreenBuilder { | ||
10 | errors: Vec<SyntaxError>, | ||
11 | inner: GreenNodeBuilder<RaTypes>, | ||
12 | } | ||
13 | |||
14 | impl GreenBuilder { | ||
15 | pub(crate) fn new() -> GreenBuilder { | ||
16 | GreenBuilder { errors: Vec::new(), inner: GreenNodeBuilder::new() } | ||
17 | } | ||
18 | } | ||
19 | |||
20 | impl Sink for GreenBuilder { | ||
21 | type Tree = (GreenNode, Vec<SyntaxError>); | ||
22 | |||
23 | fn leaf(&mut self, kind: SyntaxKind, text: SmolStr) { | ||
24 | self.inner.leaf(kind, text); | ||
25 | } | ||
26 | |||
27 | fn start_branch(&mut self, kind: SyntaxKind) { | ||
28 | self.inner.start_internal(kind) | ||
29 | } | ||
30 | |||
31 | fn finish_branch(&mut self) { | ||
32 | self.inner.finish_internal(); | ||
33 | } | ||
34 | |||
35 | fn error(&mut self, error: SyntaxError) { | ||
36 | self.errors.push(error) | ||
37 | } | ||
38 | |||
39 | fn finish(self) -> (GreenNode, Vec<SyntaxError>) { | ||
40 | (self.inner.finish(), self.errors) | ||
41 | } | ||
42 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar.rs b/crates/ra_syntax/src/parsing/grammar.rs new file mode 100644 index 000000000..bcdcd9f57 --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar.rs | |||
@@ -0,0 +1,204 @@ | |||
1 | //! This is the actual "grammar" of the Rust language. | ||
2 | //! | ||
3 | //! Each function in this module and its children corresponds | ||
4 | //! to a production of the format grammar. Submodules roughly | ||
5 | //! correspond to different *areas* of the grammar. By convention, | ||
6 | //! each submodule starts with `use super::*` import and exports | ||
7 | //! "public" productions via `pub(super)`. | ||
8 | //! | ||
9 | //! See docs for `Parser` to learn about API, available to the grammar, | ||
10 | //! and see docs for `Event` to learn how this actually manages to | ||
11 | //! produce parse trees. | ||
12 | //! | ||
13 | //! Code in this module also contains inline tests, which start with | ||
14 | //! `// test name-of-the-test` comment and look like this: | ||
15 | //! | ||
16 | //! ``` | ||
17 | //! // test function_with_zero_parameters | ||
18 | //! // fn foo() {} | ||
19 | //! ``` | ||
20 | //! | ||
21 | //! After adding a new inline-test, run `cargo collect-tests` to extract | ||
22 | //! it as a standalone text-fixture into `tests/data/parser/inline`, and | ||
23 | //! run `cargo test` once to create the "gold" value. | ||
24 | //! | ||
25 | //! Coding convention: rules like `where_clause` always produce either a | ||
26 | //! node or an error, rules like `opt_where_clause` may produce nothing. | ||
27 | //! Non-opt rules typically start with `assert!(p.at(FIRST_TOKEN))`, the | ||
28 | //! caller is responsible for branching on the first token. | ||
29 | mod attributes; | ||
30 | mod expressions; | ||
31 | mod items; | ||
32 | mod params; | ||
33 | mod paths; | ||
34 | mod patterns; | ||
35 | mod type_args; | ||
36 | mod type_params; | ||
37 | mod types; | ||
38 | |||
39 | use crate::{ | ||
40 | SyntaxNode, | ||
41 | SyntaxKind::{self, *}, | ||
42 | parsing::{ | ||
43 | token_set::TokenSet, | ||
44 | parser_api::{CompletedMarker, Marker, Parser} | ||
45 | }, | ||
46 | }; | ||
47 | |||
48 | pub(super) fn root(p: &mut Parser) { | ||
49 | let m = p.start(); | ||
50 | p.eat(SHEBANG); | ||
51 | items::mod_contents(p, false); | ||
52 | m.complete(p, SOURCE_FILE); | ||
53 | } | ||
54 | |||
55 | pub(super) fn reparser(node: &SyntaxNode) -> Option<fn(&mut Parser)> { | ||
56 | let res = match node.kind() { | ||
57 | BLOCK => expressions::block, | ||
58 | NAMED_FIELD_DEF_LIST => items::named_field_def_list, | ||
59 | NAMED_FIELD_LIST => items::named_field_list, | ||
60 | ENUM_VARIANT_LIST => items::enum_variant_list, | ||
61 | MATCH_ARM_LIST => items::match_arm_list, | ||
62 | USE_TREE_LIST => items::use_tree_list, | ||
63 | EXTERN_ITEM_LIST => items::extern_item_list, | ||
64 | TOKEN_TREE if node.first_child().unwrap().kind() == L_CURLY => items::token_tree, | ||
65 | ITEM_LIST => { | ||
66 | let parent = node.parent().unwrap(); | ||
67 | match parent.kind() { | ||
68 | IMPL_BLOCK => items::impl_item_list, | ||
69 | TRAIT_DEF => items::trait_item_list, | ||
70 | MODULE => items::mod_item_list, | ||
71 | _ => return None, | ||
72 | } | ||
73 | } | ||
74 | _ => return None, | ||
75 | }; | ||
76 | Some(res) | ||
77 | } | ||
78 | |||
79 | #[derive(Clone, Copy, PartialEq, Eq)] | ||
80 | enum BlockLike { | ||
81 | Block, | ||
82 | NotBlock, | ||
83 | } | ||
84 | |||
85 | impl BlockLike { | ||
86 | fn is_block(self) -> bool { | ||
87 | self == BlockLike::Block | ||
88 | } | ||
89 | } | ||
90 | |||
91 | fn opt_visibility(p: &mut Parser) { | ||
92 | match p.current() { | ||
93 | PUB_KW => { | ||
94 | let m = p.start(); | ||
95 | p.bump(); | ||
96 | if p.at(L_PAREN) { | ||
97 | match p.nth(1) { | ||
98 | // test crate_visibility | ||
99 | // pub(crate) struct S; | ||
100 | // pub(self) struct S; | ||
101 | // pub(self) struct S; | ||
102 | // pub(self) struct S; | ||
103 | CRATE_KW | SELF_KW | SUPER_KW => { | ||
104 | p.bump(); | ||
105 | p.bump(); | ||
106 | p.expect(R_PAREN); | ||
107 | } | ||
108 | IN_KW => { | ||
109 | p.bump(); | ||
110 | p.bump(); | ||
111 | paths::use_path(p); | ||
112 | p.expect(R_PAREN); | ||
113 | } | ||
114 | _ => (), | ||
115 | } | ||
116 | } | ||
117 | m.complete(p, VISIBILITY); | ||
118 | } | ||
119 | // test crate_keyword_vis | ||
120 | // crate fn main() { } | ||
121 | CRATE_KW => { | ||
122 | let m = p.start(); | ||
123 | p.bump(); | ||
124 | m.complete(p, VISIBILITY); | ||
125 | } | ||
126 | _ => (), | ||
127 | } | ||
128 | } | ||
129 | |||
130 | fn opt_alias(p: &mut Parser) { | ||
131 | if p.at(AS_KW) { | ||
132 | let m = p.start(); | ||
133 | p.bump(); | ||
134 | name(p); | ||
135 | m.complete(p, ALIAS); | ||
136 | } | ||
137 | } | ||
138 | |||
139 | fn abi(p: &mut Parser) { | ||
140 | assert!(p.at(EXTERN_KW)); | ||
141 | let abi = p.start(); | ||
142 | p.bump(); | ||
143 | match p.current() { | ||
144 | STRING | RAW_STRING => p.bump(), | ||
145 | _ => (), | ||
146 | } | ||
147 | abi.complete(p, ABI); | ||
148 | } | ||
149 | |||
150 | fn opt_fn_ret_type(p: &mut Parser) -> bool { | ||
151 | if p.at(THIN_ARROW) { | ||
152 | let m = p.start(); | ||
153 | p.bump(); | ||
154 | types::type_(p); | ||
155 | m.complete(p, RET_TYPE); | ||
156 | true | ||
157 | } else { | ||
158 | false | ||
159 | } | ||
160 | } | ||
161 | |||
162 | fn name_r(p: &mut Parser, recovery: TokenSet) { | ||
163 | if p.at(IDENT) { | ||
164 | let m = p.start(); | ||
165 | p.bump(); | ||
166 | m.complete(p, NAME); | ||
167 | } else { | ||
168 | p.err_recover("expected a name", recovery); | ||
169 | } | ||
170 | } | ||
171 | |||
172 | fn name(p: &mut Parser) { | ||
173 | name_r(p, TokenSet::empty()) | ||
174 | } | ||
175 | |||
176 | fn name_ref(p: &mut Parser) { | ||
177 | if p.at(IDENT) { | ||
178 | let m = p.start(); | ||
179 | p.bump(); | ||
180 | m.complete(p, NAME_REF); | ||
181 | } else { | ||
182 | p.err_and_bump("expected identifier"); | ||
183 | } | ||
184 | } | ||
185 | |||
186 | fn error_block(p: &mut Parser, message: &str) { | ||
187 | go(p, Some(message)); | ||
188 | fn go(p: &mut Parser, message: Option<&str>) { | ||
189 | assert!(p.at(L_CURLY)); | ||
190 | let m = p.start(); | ||
191 | if let Some(message) = message { | ||
192 | p.error(message); | ||
193 | } | ||
194 | p.bump(); | ||
195 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
196 | match p.current() { | ||
197 | L_CURLY => go(p, None), | ||
198 | _ => p.bump(), | ||
199 | } | ||
200 | } | ||
201 | p.eat(R_CURLY); | ||
202 | m.complete(p, ERROR); | ||
203 | } | ||
204 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/attributes.rs b/crates/ra_syntax/src/parsing/grammar/attributes.rs new file mode 100644 index 000000000..cd30e8a45 --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar/attributes.rs | |||
@@ -0,0 +1,31 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) fn inner_attributes(p: &mut Parser) { | ||
4 | while p.current() == POUND && p.nth(1) == EXCL { | ||
5 | attribute(p, true) | ||
6 | } | ||
7 | } | ||
8 | |||
9 | pub(super) fn outer_attributes(p: &mut Parser) { | ||
10 | while p.at(POUND) { | ||
11 | attribute(p, false) | ||
12 | } | ||
13 | } | ||
14 | |||
15 | fn attribute(p: &mut Parser, inner: bool) { | ||
16 | let attr = p.start(); | ||
17 | assert!(p.at(POUND)); | ||
18 | p.bump(); | ||
19 | |||
20 | if inner { | ||
21 | assert!(p.at(EXCL)); | ||
22 | p.bump(); | ||
23 | } | ||
24 | |||
25 | if p.at(L_BRACK) { | ||
26 | items::token_tree(p); | ||
27 | } else { | ||
28 | p.error("expected `[`"); | ||
29 | } | ||
30 | attr.complete(p, ATTR); | ||
31 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/expressions.rs b/crates/ra_syntax/src/parsing/grammar/expressions.rs new file mode 100644 index 000000000..d5a4f4d7b --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar/expressions.rs | |||
@@ -0,0 +1,473 @@ | |||
1 | mod atom; | ||
2 | |||
3 | pub(crate) use self::atom::match_arm_list; | ||
4 | pub(super) use self::atom::{literal, LITERAL_FIRST}; | ||
5 | use super::*; | ||
6 | |||
7 | const EXPR_FIRST: TokenSet = LHS_FIRST; | ||
8 | |||
9 | pub(super) fn expr(p: &mut Parser) -> BlockLike { | ||
10 | let r = Restrictions { forbid_structs: false, prefer_stmt: false }; | ||
11 | expr_bp(p, r, 1) | ||
12 | } | ||
13 | |||
14 | pub(super) fn expr_stmt(p: &mut Parser) -> BlockLike { | ||
15 | let r = Restrictions { forbid_structs: false, prefer_stmt: true }; | ||
16 | expr_bp(p, r, 1) | ||
17 | } | ||
18 | |||
19 | fn expr_no_struct(p: &mut Parser) { | ||
20 | let r = Restrictions { forbid_structs: true, prefer_stmt: false }; | ||
21 | expr_bp(p, r, 1); | ||
22 | } | ||
23 | |||
24 | // test block | ||
25 | // fn a() {} | ||
26 | // fn b() { let _ = 1; } | ||
27 | // fn c() { 1; 2; } | ||
28 | // fn d() { 1; 2 } | ||
29 | pub(crate) fn block(p: &mut Parser) { | ||
30 | if !p.at(L_CURLY) { | ||
31 | p.error("expected a block"); | ||
32 | return; | ||
33 | } | ||
34 | let m = p.start(); | ||
35 | p.bump(); | ||
36 | // This is checked by a validator | ||
37 | attributes::inner_attributes(p); | ||
38 | |||
39 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
40 | match p.current() { | ||
41 | // test nocontentexpr | ||
42 | // fn foo(){ | ||
43 | // ;;;some_expr();;;;{;;;};;;;Ok(()) | ||
44 | // } | ||
45 | SEMI => p.bump(), | ||
46 | _ => { | ||
47 | // test block_items | ||
48 | // fn a() { fn b() {} } | ||
49 | let m = p.start(); | ||
50 | let has_attrs = p.at(POUND); | ||
51 | attributes::outer_attributes(p); | ||
52 | if p.at(LET_KW) { | ||
53 | let_stmt(p, m); | ||
54 | } else { | ||
55 | match items::maybe_item(p, items::ItemFlavor::Mod) { | ||
56 | items::MaybeItem::Item(kind) => { | ||
57 | m.complete(p, kind); | ||
58 | } | ||
59 | items::MaybeItem::Modifiers => { | ||
60 | m.abandon(p); | ||
61 | p.error("expected an item"); | ||
62 | } | ||
63 | // test pub_expr | ||
64 | // fn foo() { pub 92; } //FIXME | ||
65 | items::MaybeItem::None => { | ||
66 | if has_attrs { | ||
67 | m.abandon(p); | ||
68 | p.error( | ||
69 | "expected a let statement or an item after attributes in block", | ||
70 | ); | ||
71 | } else { | ||
72 | let is_blocklike = expressions::expr_stmt(p) == BlockLike::Block; | ||
73 | if p.at(R_CURLY) { | ||
74 | m.abandon(p); | ||
75 | } else { | ||
76 | // test no_semi_after_block | ||
77 | // fn foo() { | ||
78 | // if true {} | ||
79 | // loop {} | ||
80 | // match () {} | ||
81 | // while true {} | ||
82 | // for _ in () {} | ||
83 | // {} | ||
84 | // {} | ||
85 | // macro_rules! test { | ||
86 | // () => {} | ||
87 | // } | ||
88 | // test!{} | ||
89 | // } | ||
90 | if is_blocklike { | ||
91 | p.eat(SEMI); | ||
92 | } else { | ||
93 | p.expect(SEMI); | ||
94 | } | ||
95 | m.complete(p, EXPR_STMT); | ||
96 | } | ||
97 | } | ||
98 | } | ||
99 | } | ||
100 | } | ||
101 | } | ||
102 | } | ||
103 | } | ||
104 | p.expect(R_CURLY); | ||
105 | m.complete(p, BLOCK); | ||
106 | |||
107 | // test let_stmt; | ||
108 | // fn foo() { | ||
109 | // let a; | ||
110 | // let b: i32; | ||
111 | // let c = 92; | ||
112 | // let d: i32 = 92; | ||
113 | // } | ||
114 | fn let_stmt(p: &mut Parser, m: Marker) { | ||
115 | assert!(p.at(LET_KW)); | ||
116 | p.bump(); | ||
117 | patterns::pattern(p); | ||
118 | if p.at(COLON) { | ||
119 | types::ascription(p); | ||
120 | } | ||
121 | if p.eat(EQ) { | ||
122 | expressions::expr(p); | ||
123 | } | ||
124 | p.expect(SEMI); | ||
125 | m.complete(p, LET_STMT); | ||
126 | } | ||
127 | } | ||
128 | |||
129 | #[derive(Clone, Copy)] | ||
130 | struct Restrictions { | ||
131 | forbid_structs: bool, | ||
132 | prefer_stmt: bool, | ||
133 | } | ||
134 | |||
135 | enum Op { | ||
136 | Simple, | ||
137 | Composite(SyntaxKind, u8), | ||
138 | } | ||
139 | |||
140 | fn current_op(p: &Parser) -> (u8, Op) { | ||
141 | if let Some(t) = p.current3() { | ||
142 | match t { | ||
143 | (L_ANGLE, L_ANGLE, EQ) => return (1, Op::Composite(SHLEQ, 3)), | ||
144 | (R_ANGLE, R_ANGLE, EQ) => return (1, Op::Composite(SHREQ, 3)), | ||
145 | _ => (), | ||
146 | } | ||
147 | } | ||
148 | |||
149 | if let Some(t) = p.current2() { | ||
150 | match t { | ||
151 | (PLUS, EQ) => return (1, Op::Composite(PLUSEQ, 2)), | ||
152 | (MINUS, EQ) => return (1, Op::Composite(MINUSEQ, 2)), | ||
153 | (STAR, EQ) => return (1, Op::Composite(STAREQ, 2)), | ||
154 | (SLASH, EQ) => return (1, Op::Composite(SLASHEQ, 2)), | ||
155 | (PIPE, EQ) => return (1, Op::Composite(PIPEEQ, 2)), | ||
156 | (AMP, EQ) => return (1, Op::Composite(AMPEQ, 2)), | ||
157 | (CARET, EQ) => return (1, Op::Composite(CARETEQ, 2)), | ||
158 | (PIPE, PIPE) => return (3, Op::Composite(PIPEPIPE, 2)), | ||
159 | (AMP, AMP) => return (4, Op::Composite(AMPAMP, 2)), | ||
160 | (L_ANGLE, EQ) => return (5, Op::Composite(LTEQ, 2)), | ||
161 | (R_ANGLE, EQ) => return (5, Op::Composite(GTEQ, 2)), | ||
162 | (L_ANGLE, L_ANGLE) => return (9, Op::Composite(SHL, 2)), | ||
163 | (R_ANGLE, R_ANGLE) => return (9, Op::Composite(SHR, 2)), | ||
164 | _ => (), | ||
165 | } | ||
166 | } | ||
167 | |||
168 | let bp = match p.current() { | ||
169 | EQ => 1, | ||
170 | DOTDOT | DOTDOTEQ => 2, | ||
171 | EQEQ | NEQ | L_ANGLE | R_ANGLE => 5, | ||
172 | PIPE => 6, | ||
173 | CARET => 7, | ||
174 | AMP => 8, | ||
175 | MINUS | PLUS => 10, | ||
176 | STAR | SLASH | PERCENT => 11, | ||
177 | _ => 0, | ||
178 | }; | ||
179 | (bp, Op::Simple) | ||
180 | } | ||
181 | |||
182 | // Parses expression with binding power of at least bp. | ||
183 | fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> BlockLike { | ||
184 | let mut lhs = match lhs(p, r) { | ||
185 | Some((lhs, blocklike)) => { | ||
186 | // test stmt_bin_expr_ambiguity | ||
187 | // fn foo() { | ||
188 | // let _ = {1} & 2; | ||
189 | // {1} &2; | ||
190 | // } | ||
191 | if r.prefer_stmt && blocklike.is_block() { | ||
192 | return BlockLike::Block; | ||
193 | } | ||
194 | lhs | ||
195 | } | ||
196 | None => return BlockLike::NotBlock, | ||
197 | }; | ||
198 | |||
199 | loop { | ||
200 | let is_range = p.current() == DOTDOT || p.current() == DOTDOTEQ; | ||
201 | let (op_bp, op) = current_op(p); | ||
202 | if op_bp < bp { | ||
203 | break; | ||
204 | } | ||
205 | let m = lhs.precede(p); | ||
206 | match op { | ||
207 | Op::Simple => p.bump(), | ||
208 | Op::Composite(kind, n) => { | ||
209 | p.bump_compound(kind, n); | ||
210 | } | ||
211 | } | ||
212 | expr_bp(p, r, op_bp + 1); | ||
213 | lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR }); | ||
214 | } | ||
215 | BlockLike::NotBlock | ||
216 | } | ||
217 | |||
218 | const LHS_FIRST: TokenSet = | ||
219 | atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS]); | ||
220 | |||
221 | fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { | ||
222 | let m; | ||
223 | let kind = match p.current() { | ||
224 | // test ref_expr | ||
225 | // fn foo() { | ||
226 | // let _ = &1; | ||
227 | // let _ = &mut &f(); | ||
228 | // } | ||
229 | AMP => { | ||
230 | m = p.start(); | ||
231 | p.bump(); | ||
232 | p.eat(MUT_KW); | ||
233 | REF_EXPR | ||
234 | } | ||
235 | // test unary_expr | ||
236 | // fn foo() { | ||
237 | // **&1; | ||
238 | // !!true; | ||
239 | // --1; | ||
240 | // } | ||
241 | STAR | EXCL | MINUS => { | ||
242 | m = p.start(); | ||
243 | p.bump(); | ||
244 | PREFIX_EXPR | ||
245 | } | ||
246 | // test full_range_expr | ||
247 | // fn foo() { xs[..]; } | ||
248 | DOTDOT | DOTDOTEQ => { | ||
249 | m = p.start(); | ||
250 | p.bump(); | ||
251 | if p.at_ts(EXPR_FIRST) { | ||
252 | expr_bp(p, r, 2); | ||
253 | } | ||
254 | return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock)); | ||
255 | } | ||
256 | _ => { | ||
257 | let (lhs, blocklike) = atom::atom_expr(p, r)?; | ||
258 | return Some(( | ||
259 | postfix_expr(p, lhs, !(r.prefer_stmt && blocklike.is_block())), | ||
260 | blocklike, | ||
261 | )); | ||
262 | } | ||
263 | }; | ||
264 | expr_bp(p, r, 255); | ||
265 | Some((m.complete(p, kind), BlockLike::NotBlock)) | ||
266 | } | ||
267 | |||
268 | fn postfix_expr( | ||
269 | p: &mut Parser, | ||
270 | mut lhs: CompletedMarker, | ||
271 | // Calls are disallowed if the type is a block and we prefer statements because the call cannot be disambiguated from a tuple | ||
272 | // E.g. `while true {break}();` is parsed as | ||
273 | // `while true {break}; ();` | ||
274 | mut allow_calls: bool, | ||
275 | ) -> CompletedMarker { | ||
276 | loop { | ||
277 | lhs = match p.current() { | ||
278 | // test stmt_postfix_expr_ambiguity | ||
279 | // fn foo() { | ||
280 | // match () { | ||
281 | // _ => {} | ||
282 | // () => {} | ||
283 | // [] => {} | ||
284 | // } | ||
285 | // } | ||
286 | L_PAREN if allow_calls => call_expr(p, lhs), | ||
287 | L_BRACK if allow_calls => index_expr(p, lhs), | ||
288 | DOT if p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON) => { | ||
289 | method_call_expr(p, lhs) | ||
290 | } | ||
291 | DOT => field_expr(p, lhs), | ||
292 | // test postfix_range | ||
293 | // fn foo() { let x = 1..; } | ||
294 | DOTDOT | DOTDOTEQ if !EXPR_FIRST.contains(p.nth(1)) => { | ||
295 | let m = lhs.precede(p); | ||
296 | p.bump(); | ||
297 | m.complete(p, RANGE_EXPR) | ||
298 | } | ||
299 | QUESTION => try_expr(p, lhs), | ||
300 | AS_KW => cast_expr(p, lhs), | ||
301 | _ => break, | ||
302 | }; | ||
303 | allow_calls = true | ||
304 | } | ||
305 | lhs | ||
306 | } | ||
307 | |||
308 | // test call_expr | ||
309 | // fn foo() { | ||
310 | // let _ = f(); | ||
311 | // let _ = f()(1)(1, 2,); | ||
312 | // let _ = f(<Foo>::func()); | ||
313 | // f(<Foo as Trait>::func()); | ||
314 | // } | ||
315 | fn call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | ||
316 | assert!(p.at(L_PAREN)); | ||
317 | let m = lhs.precede(p); | ||
318 | arg_list(p); | ||
319 | m.complete(p, CALL_EXPR) | ||
320 | } | ||
321 | |||
322 | // test index_expr | ||
323 | // fn foo() { | ||
324 | // x[1][2]; | ||
325 | // } | ||
326 | fn index_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | ||
327 | assert!(p.at(L_BRACK)); | ||
328 | let m = lhs.precede(p); | ||
329 | p.bump(); | ||
330 | expr(p); | ||
331 | p.expect(R_BRACK); | ||
332 | m.complete(p, INDEX_EXPR) | ||
333 | } | ||
334 | |||
335 | // test method_call_expr | ||
336 | // fn foo() { | ||
337 | // x.foo(); | ||
338 | // y.bar::<T>(1, 2,); | ||
339 | // } | ||
340 | fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | ||
341 | assert!(p.at(DOT) && p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON)); | ||
342 | let m = lhs.precede(p); | ||
343 | p.bump(); | ||
344 | name_ref(p); | ||
345 | type_args::opt_type_arg_list(p, true); | ||
346 | if p.at(L_PAREN) { | ||
347 | arg_list(p); | ||
348 | } | ||
349 | m.complete(p, METHOD_CALL_EXPR) | ||
350 | } | ||
351 | |||
352 | // test field_expr | ||
353 | // fn foo() { | ||
354 | // x.foo; | ||
355 | // x.0.bar; | ||
356 | // } | ||
357 | fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | ||
358 | assert!(p.at(DOT)); | ||
359 | let m = lhs.precede(p); | ||
360 | p.bump(); | ||
361 | if p.at(IDENT) { | ||
362 | name_ref(p) | ||
363 | } else if p.at(INT_NUMBER) { | ||
364 | p.bump() | ||
365 | } else { | ||
366 | p.error("expected field name or number") | ||
367 | } | ||
368 | m.complete(p, FIELD_EXPR) | ||
369 | } | ||
370 | |||
371 | // test try_expr | ||
372 | // fn foo() { | ||
373 | // x?; | ||
374 | // } | ||
375 | fn try_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | ||
376 | assert!(p.at(QUESTION)); | ||
377 | let m = lhs.precede(p); | ||
378 | p.bump(); | ||
379 | m.complete(p, TRY_EXPR) | ||
380 | } | ||
381 | |||
382 | // test cast_expr | ||
383 | // fn foo() { | ||
384 | // 82 as i32; | ||
385 | // 81 as i8 + 1; | ||
386 | // 79 as i16 - 1; | ||
387 | // } | ||
388 | fn cast_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { | ||
389 | assert!(p.at(AS_KW)); | ||
390 | let m = lhs.precede(p); | ||
391 | p.bump(); | ||
392 | // Use type_no_bounds(), because cast expressions are not | ||
393 | // allowed to have bounds. | ||
394 | types::type_no_bounds(p); | ||
395 | m.complete(p, CAST_EXPR) | ||
396 | } | ||
397 | |||
398 | fn arg_list(p: &mut Parser) { | ||
399 | assert!(p.at(L_PAREN)); | ||
400 | let m = p.start(); | ||
401 | p.bump(); | ||
402 | while !p.at(R_PAREN) && !p.at(EOF) { | ||
403 | if !p.at_ts(EXPR_FIRST) { | ||
404 | p.error("expected expression"); | ||
405 | break; | ||
406 | } | ||
407 | expr(p); | ||
408 | if !p.at(R_PAREN) && !p.expect(COMMA) { | ||
409 | break; | ||
410 | } | ||
411 | } | ||
412 | p.eat(R_PAREN); | ||
413 | m.complete(p, ARG_LIST); | ||
414 | } | ||
415 | |||
416 | // test path_expr | ||
417 | // fn foo() { | ||
418 | // let _ = a; | ||
419 | // let _ = a::b; | ||
420 | // let _ = ::a::<b>; | ||
421 | // let _ = format!(); | ||
422 | // } | ||
423 | fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, BlockLike) { | ||
424 | assert!(paths::is_path_start(p) || p.at(L_ANGLE)); | ||
425 | let m = p.start(); | ||
426 | paths::expr_path(p); | ||
427 | match p.current() { | ||
428 | L_CURLY if !r.forbid_structs => { | ||
429 | named_field_list(p); | ||
430 | (m.complete(p, STRUCT_LIT), BlockLike::NotBlock) | ||
431 | } | ||
432 | EXCL => { | ||
433 | let block_like = items::macro_call_after_excl(p); | ||
434 | return (m.complete(p, MACRO_CALL), block_like); | ||
435 | } | ||
436 | _ => (m.complete(p, PATH_EXPR), BlockLike::NotBlock), | ||
437 | } | ||
438 | } | ||
439 | |||
440 | // test struct_lit | ||
441 | // fn foo() { | ||
442 | // S {}; | ||
443 | // S { x, y: 32, }; | ||
444 | // S { x, y: 32, ..Default::default() }; | ||
445 | // } | ||
446 | pub(crate) fn named_field_list(p: &mut Parser) { | ||
447 | assert!(p.at(L_CURLY)); | ||
448 | let m = p.start(); | ||
449 | p.bump(); | ||
450 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
451 | match p.current() { | ||
452 | IDENT => { | ||
453 | let m = p.start(); | ||
454 | name_ref(p); | ||
455 | if p.eat(COLON) { | ||
456 | expr(p); | ||
457 | } | ||
458 | m.complete(p, NAMED_FIELD); | ||
459 | } | ||
460 | DOTDOT => { | ||
461 | p.bump(); | ||
462 | expr(p); | ||
463 | } | ||
464 | L_CURLY => error_block(p, "expected a field"), | ||
465 | _ => p.err_and_bump("expected identifier"), | ||
466 | } | ||
467 | if !p.at(R_CURLY) { | ||
468 | p.expect(COMMA); | ||
469 | } | ||
470 | } | ||
471 | p.expect(R_CURLY); | ||
472 | m.complete(p, NAMED_FIELD_LIST); | ||
473 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/expressions/atom.rs b/crates/ra_syntax/src/parsing/grammar/expressions/atom.rs new file mode 100644 index 000000000..e74305b6a --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar/expressions/atom.rs | |||
@@ -0,0 +1,475 @@ | |||
1 | use super::*; | ||
2 | |||
3 | // test expr_literals | ||
4 | // fn foo() { | ||
5 | // let _ = true; | ||
6 | // let _ = false; | ||
7 | // let _ = 1; | ||
8 | // let _ = 2.0; | ||
9 | // let _ = b'a'; | ||
10 | // let _ = 'b'; | ||
11 | // let _ = "c"; | ||
12 | // let _ = r"d"; | ||
13 | // let _ = b"e"; | ||
14 | // let _ = br"f"; | ||
15 | // } | ||
16 | pub(crate) const LITERAL_FIRST: TokenSet = token_set![ | ||
17 | TRUE_KW, | ||
18 | FALSE_KW, | ||
19 | INT_NUMBER, | ||
20 | FLOAT_NUMBER, | ||
21 | BYTE, | ||
22 | CHAR, | ||
23 | STRING, | ||
24 | RAW_STRING, | ||
25 | BYTE_STRING, | ||
26 | RAW_BYTE_STRING | ||
27 | ]; | ||
28 | |||
29 | pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> { | ||
30 | if !p.at_ts(LITERAL_FIRST) { | ||
31 | return None; | ||
32 | } | ||
33 | let m = p.start(); | ||
34 | p.bump(); | ||
35 | Some(m.complete(p, LITERAL)) | ||
36 | } | ||
37 | |||
38 | // E.g. for after the break in `if break {}`, this should not match | ||
39 | pub(super) const ATOM_EXPR_FIRST: TokenSet = | ||
40 | LITERAL_FIRST.union(paths::PATH_FIRST).union(token_set![ | ||
41 | L_PAREN, | ||
42 | L_CURLY, | ||
43 | L_BRACK, | ||
44 | PIPE, | ||
45 | MOVE_KW, | ||
46 | IF_KW, | ||
47 | WHILE_KW, | ||
48 | MATCH_KW, | ||
49 | UNSAFE_KW, | ||
50 | RETURN_KW, | ||
51 | BREAK_KW, | ||
52 | CONTINUE_KW, | ||
53 | LIFETIME, | ||
54 | ]); | ||
55 | |||
56 | const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW]; | ||
57 | |||
58 | pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { | ||
59 | if let Some(m) = literal(p) { | ||
60 | return Some((m, BlockLike::NotBlock)); | ||
61 | } | ||
62 | if paths::is_path_start(p) || p.at(L_ANGLE) { | ||
63 | return Some(path_expr(p, r)); | ||
64 | } | ||
65 | let la = p.nth(1); | ||
66 | let done = match p.current() { | ||
67 | L_PAREN => tuple_expr(p), | ||
68 | L_BRACK => array_expr(p), | ||
69 | PIPE => lambda_expr(p), | ||
70 | MOVE_KW if la == PIPE => lambda_expr(p), | ||
71 | IF_KW => if_expr(p), | ||
72 | |||
73 | LOOP_KW => loop_expr(p, None), | ||
74 | FOR_KW => for_expr(p, None), | ||
75 | WHILE_KW => while_expr(p, None), | ||
76 | LIFETIME if la == COLON => { | ||
77 | let m = p.start(); | ||
78 | label(p); | ||
79 | match p.current() { | ||
80 | LOOP_KW => loop_expr(p, Some(m)), | ||
81 | FOR_KW => for_expr(p, Some(m)), | ||
82 | WHILE_KW => while_expr(p, Some(m)), | ||
83 | L_CURLY => block_expr(p, Some(m)), | ||
84 | _ => { | ||
85 | // test_err misplaced_label_err | ||
86 | // fn main() { | ||
87 | // 'loop: impl | ||
88 | // } | ||
89 | p.error("expected a loop"); | ||
90 | m.complete(p, ERROR); | ||
91 | return None; | ||
92 | } | ||
93 | } | ||
94 | } | ||
95 | |||
96 | MATCH_KW => match_expr(p), | ||
97 | UNSAFE_KW if la == L_CURLY => { | ||
98 | let m = p.start(); | ||
99 | p.bump(); | ||
100 | block_expr(p, Some(m)) | ||
101 | } | ||
102 | L_CURLY => block_expr(p, None), | ||
103 | RETURN_KW => return_expr(p), | ||
104 | CONTINUE_KW => continue_expr(p), | ||
105 | BREAK_KW => break_expr(p, r), | ||
106 | _ => { | ||
107 | p.err_recover("expected expression", EXPR_RECOVERY_SET); | ||
108 | return None; | ||
109 | } | ||
110 | }; | ||
111 | let blocklike = match done.kind() { | ||
112 | IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR | BLOCK_EXPR => BlockLike::Block, | ||
113 | _ => BlockLike::NotBlock, | ||
114 | }; | ||
115 | Some((done, blocklike)) | ||
116 | } | ||
117 | |||
118 | // test tuple_expr | ||
119 | // fn foo() { | ||
120 | // (); | ||
121 | // (1); | ||
122 | // (1,); | ||
123 | // } | ||
124 | fn tuple_expr(p: &mut Parser) -> CompletedMarker { | ||
125 | assert!(p.at(L_PAREN)); | ||
126 | let m = p.start(); | ||
127 | p.expect(L_PAREN); | ||
128 | |||
129 | let mut saw_comma = false; | ||
130 | let mut saw_expr = false; | ||
131 | while !p.at(EOF) && !p.at(R_PAREN) { | ||
132 | saw_expr = true; | ||
133 | if !p.at_ts(EXPR_FIRST) { | ||
134 | p.error("expected expression"); | ||
135 | break; | ||
136 | } | ||
137 | expr(p); | ||
138 | if !p.at(R_PAREN) { | ||
139 | saw_comma = true; | ||
140 | p.expect(COMMA); | ||
141 | } | ||
142 | } | ||
143 | p.expect(R_PAREN); | ||
144 | m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR }) | ||
145 | } | ||
146 | |||
147 | // test array_expr | ||
148 | // fn foo() { | ||
149 | // []; | ||
150 | // [1]; | ||
151 | // [1, 2,]; | ||
152 | // [1; 2]; | ||
153 | // } | ||
154 | fn array_expr(p: &mut Parser) -> CompletedMarker { | ||
155 | assert!(p.at(L_BRACK)); | ||
156 | let m = p.start(); | ||
157 | p.bump(); | ||
158 | if p.eat(R_BRACK) { | ||
159 | return m.complete(p, ARRAY_EXPR); | ||
160 | } | ||
161 | expr(p); | ||
162 | if p.eat(SEMI) { | ||
163 | expr(p); | ||
164 | p.expect(R_BRACK); | ||
165 | return m.complete(p, ARRAY_EXPR); | ||
166 | } | ||
167 | while !p.at(EOF) && !p.at(R_BRACK) { | ||
168 | p.expect(COMMA); | ||
169 | if p.at(R_BRACK) { | ||
170 | break; | ||
171 | } | ||
172 | if !p.at_ts(EXPR_FIRST) { | ||
173 | p.error("expected expression"); | ||
174 | break; | ||
175 | } | ||
176 | expr(p); | ||
177 | } | ||
178 | p.expect(R_BRACK); | ||
179 | m.complete(p, ARRAY_EXPR) | ||
180 | } | ||
181 | |||
182 | // test lambda_expr | ||
183 | // fn foo() { | ||
184 | // || (); | ||
185 | // || -> i32 { 92 }; | ||
186 | // |x| x; | ||
187 | // move |x: i32,| x; | ||
188 | // } | ||
189 | fn lambda_expr(p: &mut Parser) -> CompletedMarker { | ||
190 | assert!(p.at(PIPE) || (p.at(MOVE_KW) && p.nth(1) == PIPE)); | ||
191 | let m = p.start(); | ||
192 | p.eat(MOVE_KW); | ||
193 | params::param_list_opt_types(p); | ||
194 | if opt_fn_ret_type(p) { | ||
195 | if !p.at(L_CURLY) { | ||
196 | p.error("expected `{`"); | ||
197 | } | ||
198 | } | ||
199 | expr(p); | ||
200 | m.complete(p, LAMBDA_EXPR) | ||
201 | } | ||
202 | |||
203 | // test if_expr | ||
204 | // fn foo() { | ||
205 | // if true {}; | ||
206 | // if true {} else {}; | ||
207 | // if true {} else if false {} else {}; | ||
208 | // if S {}; | ||
209 | // } | ||
210 | fn if_expr(p: &mut Parser) -> CompletedMarker { | ||
211 | assert!(p.at(IF_KW)); | ||
212 | let m = p.start(); | ||
213 | p.bump(); | ||
214 | cond(p); | ||
215 | block(p); | ||
216 | if p.at(ELSE_KW) { | ||
217 | p.bump(); | ||
218 | if p.at(IF_KW) { | ||
219 | if_expr(p); | ||
220 | } else { | ||
221 | block(p); | ||
222 | } | ||
223 | } | ||
224 | m.complete(p, IF_EXPR) | ||
225 | } | ||
226 | |||
227 | // test label | ||
228 | // fn foo() { | ||
229 | // 'a: loop {} | ||
230 | // 'b: while true {} | ||
231 | // 'c: for x in () {} | ||
232 | // } | ||
233 | fn label(p: &mut Parser) { | ||
234 | assert!(p.at(LIFETIME) && p.nth(1) == COLON); | ||
235 | let m = p.start(); | ||
236 | p.bump(); | ||
237 | p.bump(); | ||
238 | m.complete(p, LABEL); | ||
239 | } | ||
240 | |||
241 | // test loop_expr | ||
242 | // fn foo() { | ||
243 | // loop {}; | ||
244 | // } | ||
245 | fn loop_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | ||
246 | assert!(p.at(LOOP_KW)); | ||
247 | let m = m.unwrap_or_else(|| p.start()); | ||
248 | p.bump(); | ||
249 | block(p); | ||
250 | m.complete(p, LOOP_EXPR) | ||
251 | } | ||
252 | |||
253 | // test while_expr | ||
254 | // fn foo() { | ||
255 | // while true {}; | ||
256 | // while let Some(x) = it.next() {}; | ||
257 | // } | ||
258 | fn while_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | ||
259 | assert!(p.at(WHILE_KW)); | ||
260 | let m = m.unwrap_or_else(|| p.start()); | ||
261 | p.bump(); | ||
262 | cond(p); | ||
263 | block(p); | ||
264 | m.complete(p, WHILE_EXPR) | ||
265 | } | ||
266 | |||
267 | // test for_expr | ||
268 | // fn foo() { | ||
269 | // for x in [] {}; | ||
270 | // } | ||
271 | fn for_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | ||
272 | assert!(p.at(FOR_KW)); | ||
273 | let m = m.unwrap_or_else(|| p.start()); | ||
274 | p.bump(); | ||
275 | patterns::pattern(p); | ||
276 | p.expect(IN_KW); | ||
277 | expr_no_struct(p); | ||
278 | block(p); | ||
279 | m.complete(p, FOR_EXPR) | ||
280 | } | ||
281 | |||
282 | // test cond | ||
283 | // fn foo() { if let Some(_) = None {} } | ||
284 | fn cond(p: &mut Parser) { | ||
285 | let m = p.start(); | ||
286 | if p.eat(LET_KW) { | ||
287 | patterns::pattern(p); | ||
288 | p.expect(EQ); | ||
289 | } | ||
290 | expr_no_struct(p); | ||
291 | m.complete(p, CONDITION); | ||
292 | } | ||
293 | |||
294 | // test match_expr | ||
295 | // fn foo() { | ||
296 | // match () { }; | ||
297 | // match S {}; | ||
298 | // } | ||
299 | fn match_expr(p: &mut Parser) -> CompletedMarker { | ||
300 | assert!(p.at(MATCH_KW)); | ||
301 | let m = p.start(); | ||
302 | p.bump(); | ||
303 | expr_no_struct(p); | ||
304 | if p.at(L_CURLY) { | ||
305 | match_arm_list(p); | ||
306 | } else { | ||
307 | p.error("expected `{`") | ||
308 | } | ||
309 | m.complete(p, MATCH_EXPR) | ||
310 | } | ||
311 | |||
312 | pub(crate) fn match_arm_list(p: &mut Parser) { | ||
313 | assert!(p.at(L_CURLY)); | ||
314 | let m = p.start(); | ||
315 | p.eat(L_CURLY); | ||
316 | |||
317 | // test match_arms_inner_attribute | ||
318 | // fn foo() { | ||
319 | // match () { | ||
320 | // #![doc("Inner attribute")] | ||
321 | // #![doc("Can be")] | ||
322 | // #![doc("Stacked")] | ||
323 | // _ => (), | ||
324 | // } | ||
325 | // } | ||
326 | attributes::inner_attributes(p); | ||
327 | |||
328 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
329 | if p.at(L_CURLY) { | ||
330 | error_block(p, "expected match arm"); | ||
331 | continue; | ||
332 | } | ||
333 | |||
334 | // test match_arms_outer_attributes | ||
335 | // fn foo() { | ||
336 | // match () { | ||
337 | // #[cfg(feature = "some")] | ||
338 | // _ => (), | ||
339 | // #[cfg(feature = "other")] | ||
340 | // _ => (), | ||
341 | // #[cfg(feature = "many")] | ||
342 | // #[cfg(feature = "attributes")] | ||
343 | // #[cfg(feature = "before")] | ||
344 | // _ => (), | ||
345 | // } | ||
346 | // } | ||
347 | attributes::outer_attributes(p); | ||
348 | |||
349 | // test match_arms_commas | ||
350 | // fn foo() { | ||
351 | // match () { | ||
352 | // _ => (), | ||
353 | // _ => {} | ||
354 | // _ => () | ||
355 | // } | ||
356 | // } | ||
357 | if match_arm(p).is_block() { | ||
358 | p.eat(COMMA); | ||
359 | } else if !p.at(R_CURLY) { | ||
360 | p.expect(COMMA); | ||
361 | } | ||
362 | } | ||
363 | p.expect(R_CURLY); | ||
364 | m.complete(p, MATCH_ARM_LIST); | ||
365 | } | ||
366 | |||
367 | // test match_arm | ||
368 | // fn foo() { | ||
369 | // match () { | ||
370 | // _ => (), | ||
371 | // _ if Test > Test{field: 0} => (), | ||
372 | // X | Y if Z => (), | ||
373 | // | X | Y if Z => (), | ||
374 | // | X => (), | ||
375 | // }; | ||
376 | // } | ||
377 | fn match_arm(p: &mut Parser) -> BlockLike { | ||
378 | let m = p.start(); | ||
379 | p.eat(PIPE); | ||
380 | patterns::pattern_r(p, TokenSet::empty()); | ||
381 | while p.eat(PIPE) { | ||
382 | patterns::pattern(p); | ||
383 | } | ||
384 | if p.at(IF_KW) { | ||
385 | match_guard(p); | ||
386 | } | ||
387 | p.expect(FAT_ARROW); | ||
388 | let ret = expr_stmt(p); | ||
389 | m.complete(p, MATCH_ARM); | ||
390 | ret | ||
391 | } | ||
392 | |||
393 | // test match_guard | ||
394 | // fn foo() { | ||
395 | // match () { | ||
396 | // _ if foo => (), | ||
397 | // } | ||
398 | // } | ||
399 | fn match_guard(p: &mut Parser) -> CompletedMarker { | ||
400 | assert!(p.at(IF_KW)); | ||
401 | let m = p.start(); | ||
402 | p.bump(); | ||
403 | expr(p); | ||
404 | m.complete(p, MATCH_GUARD) | ||
405 | } | ||
406 | |||
407 | // test block_expr | ||
408 | // fn foo() { | ||
409 | // {}; | ||
410 | // unsafe {}; | ||
411 | // 'label: {}; | ||
412 | // } | ||
413 | fn block_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | ||
414 | assert!(p.at(L_CURLY)); | ||
415 | let m = m.unwrap_or_else(|| p.start()); | ||
416 | block(p); | ||
417 | m.complete(p, BLOCK_EXPR) | ||
418 | } | ||
419 | |||
420 | // test return_expr | ||
421 | // fn foo() { | ||
422 | // return; | ||
423 | // return 92; | ||
424 | // } | ||
425 | fn return_expr(p: &mut Parser) -> CompletedMarker { | ||
426 | assert!(p.at(RETURN_KW)); | ||
427 | let m = p.start(); | ||
428 | p.bump(); | ||
429 | if p.at_ts(EXPR_FIRST) { | ||
430 | expr(p); | ||
431 | } | ||
432 | m.complete(p, RETURN_EXPR) | ||
433 | } | ||
434 | |||
435 | // test continue_expr | ||
436 | // fn foo() { | ||
437 | // loop { | ||
438 | // continue; | ||
439 | // continue 'l; | ||
440 | // } | ||
441 | // } | ||
442 | fn continue_expr(p: &mut Parser) -> CompletedMarker { | ||
443 | assert!(p.at(CONTINUE_KW)); | ||
444 | let m = p.start(); | ||
445 | p.bump(); | ||
446 | p.eat(LIFETIME); | ||
447 | m.complete(p, CONTINUE_EXPR) | ||
448 | } | ||
449 | |||
450 | // test break_expr | ||
451 | // fn foo() { | ||
452 | // loop { | ||
453 | // break; | ||
454 | // break 'l; | ||
455 | // break 92; | ||
456 | // break 'l 92; | ||
457 | // } | ||
458 | // } | ||
459 | fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker { | ||
460 | assert!(p.at(BREAK_KW)); | ||
461 | let m = p.start(); | ||
462 | p.bump(); | ||
463 | p.eat(LIFETIME); | ||
464 | // test break_ambiguity | ||
465 | // fn foo(){ | ||
466 | // if break {} | ||
467 | // while break {} | ||
468 | // for i in break {} | ||
469 | // match break {} | ||
470 | // } | ||
471 | if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(L_CURLY)) { | ||
472 | expr(p); | ||
473 | } | ||
474 | m.complete(p, BREAK_EXPR) | ||
475 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/items.rs b/crates/ra_syntax/src/parsing/grammar/items.rs new file mode 100644 index 000000000..4b962c1f3 --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar/items.rs | |||
@@ -0,0 +1,392 @@ | |||
1 | mod consts; | ||
2 | mod nominal; | ||
3 | mod traits; | ||
4 | mod use_item; | ||
5 | |||
6 | pub(crate) use self::{ | ||
7 | expressions::{match_arm_list, named_field_list}, | ||
8 | nominal::{enum_variant_list, named_field_def_list}, | ||
9 | traits::{impl_item_list, trait_item_list}, | ||
10 | use_item::use_tree_list, | ||
11 | }; | ||
12 | use super::*; | ||
13 | |||
14 | // test mod_contents | ||
15 | // fn foo() {} | ||
16 | // macro_rules! foo {} | ||
17 | // foo::bar!(); | ||
18 | // super::baz! {} | ||
19 | // struct S; | ||
20 | pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) { | ||
21 | attributes::inner_attributes(p); | ||
22 | while !p.at(EOF) && !(stop_on_r_curly && p.at(R_CURLY)) { | ||
23 | item_or_macro(p, stop_on_r_curly, ItemFlavor::Mod) | ||
24 | } | ||
25 | } | ||
26 | |||
27 | pub(super) enum ItemFlavor { | ||
28 | Mod, | ||
29 | Trait, | ||
30 | } | ||
31 | |||
32 | pub(super) const ITEM_RECOVERY_SET: TokenSet = token_set![ | ||
33 | FN_KW, STRUCT_KW, ENUM_KW, IMPL_KW, TRAIT_KW, CONST_KW, STATIC_KW, LET_KW, MOD_KW, PUB_KW, | ||
34 | CRATE_KW | ||
35 | ]; | ||
36 | |||
37 | pub(super) fn item_or_macro(p: &mut Parser, stop_on_r_curly: bool, flavor: ItemFlavor) { | ||
38 | let m = p.start(); | ||
39 | attributes::outer_attributes(p); | ||
40 | match maybe_item(p, flavor) { | ||
41 | MaybeItem::Item(kind) => { | ||
42 | m.complete(p, kind); | ||
43 | } | ||
44 | MaybeItem::None => { | ||
45 | if paths::is_path_start(p) { | ||
46 | match macro_call(p) { | ||
47 | BlockLike::Block => (), | ||
48 | BlockLike::NotBlock => { | ||
49 | p.expect(SEMI); | ||
50 | } | ||
51 | } | ||
52 | m.complete(p, MACRO_CALL); | ||
53 | } else { | ||
54 | m.abandon(p); | ||
55 | if p.at(L_CURLY) { | ||
56 | error_block(p, "expected an item"); | ||
57 | } else if p.at(R_CURLY) && !stop_on_r_curly { | ||
58 | let e = p.start(); | ||
59 | p.error("unmatched `}`"); | ||
60 | p.bump(); | ||
61 | e.complete(p, ERROR); | ||
62 | } else if !p.at(EOF) && !p.at(R_CURLY) { | ||
63 | p.err_and_bump("expected an item"); | ||
64 | } else { | ||
65 | p.error("expected an item"); | ||
66 | } | ||
67 | } | ||
68 | } | ||
69 | MaybeItem::Modifiers => { | ||
70 | p.error("expected fn, trait or impl"); | ||
71 | m.complete(p, ERROR); | ||
72 | } | ||
73 | } | ||
74 | } | ||
75 | |||
76 | pub(super) enum MaybeItem { | ||
77 | None, | ||
78 | Item(SyntaxKind), | ||
79 | Modifiers, | ||
80 | } | ||
81 | |||
82 | pub(super) fn maybe_item(p: &mut Parser, flavor: ItemFlavor) -> MaybeItem { | ||
83 | opt_visibility(p); | ||
84 | if let Some(kind) = items_without_modifiers(p) { | ||
85 | return MaybeItem::Item(kind); | ||
86 | } | ||
87 | |||
88 | let mut has_mods = false; | ||
89 | // modifiers | ||
90 | has_mods |= p.eat(CONST_KW); | ||
91 | |||
92 | // test_err unsafe_block_in_mod | ||
93 | // fn foo(){} unsafe { } fn bar(){} | ||
94 | if p.at(UNSAFE_KW) && p.nth(1) != L_CURLY { | ||
95 | p.eat(UNSAFE_KW); | ||
96 | has_mods = true; | ||
97 | } | ||
98 | if p.at(EXTERN_KW) { | ||
99 | has_mods = true; | ||
100 | abi(p); | ||
101 | } | ||
102 | if p.at(IDENT) && p.at_contextual_kw("auto") && p.nth(1) == TRAIT_KW { | ||
103 | p.bump_remap(AUTO_KW); | ||
104 | has_mods = true; | ||
105 | } | ||
106 | if p.at(IDENT) && p.at_contextual_kw("default") && p.nth(1) == IMPL_KW { | ||
107 | p.bump_remap(DEFAULT_KW); | ||
108 | has_mods = true; | ||
109 | } | ||
110 | |||
111 | // items | ||
112 | let kind = match p.current() { | ||
113 | // test extern_fn | ||
114 | // extern fn foo() {} | ||
115 | |||
116 | // test const_fn | ||
117 | // const fn foo() {} | ||
118 | |||
119 | // test const_unsafe_fn | ||
120 | // const unsafe fn foo() {} | ||
121 | |||
122 | // test unsafe_extern_fn | ||
123 | // unsafe extern "C" fn foo() {} | ||
124 | |||
125 | // test unsafe_fn | ||
126 | // unsafe fn foo() {} | ||
127 | FN_KW => { | ||
128 | fn_def(p, flavor); | ||
129 | FN_DEF | ||
130 | } | ||
131 | |||
132 | // test unsafe_trait | ||
133 | // unsafe trait T {} | ||
134 | |||
135 | // test auto_trait | ||
136 | // auto trait T {} | ||
137 | |||
138 | // test unsafe_auto_trait | ||
139 | // unsafe auto trait T {} | ||
140 | TRAIT_KW => { | ||
141 | traits::trait_def(p); | ||
142 | TRAIT_DEF | ||
143 | } | ||
144 | |||
145 | // test unsafe_impl | ||
146 | // unsafe impl Foo {} | ||
147 | |||
148 | // test default_impl | ||
149 | // default impl Foo {} | ||
150 | |||
151 | // test unsafe_default_impl | ||
152 | // unsafe default impl Foo {} | ||
153 | IMPL_KW => { | ||
154 | traits::impl_block(p); | ||
155 | IMPL_BLOCK | ||
156 | } | ||
157 | _ => { | ||
158 | return if has_mods { MaybeItem::Modifiers } else { MaybeItem::None }; | ||
159 | } | ||
160 | }; | ||
161 | |||
162 | MaybeItem::Item(kind) | ||
163 | } | ||
164 | |||
165 | fn items_without_modifiers(p: &mut Parser) -> Option<SyntaxKind> { | ||
166 | let la = p.nth(1); | ||
167 | let kind = match p.current() { | ||
168 | // test extern_crate | ||
169 | // extern crate foo; | ||
170 | EXTERN_KW if la == CRATE_KW => { | ||
171 | extern_crate_item(p); | ||
172 | EXTERN_CRATE_ITEM | ||
173 | } | ||
174 | TYPE_KW => { | ||
175 | type_def(p); | ||
176 | TYPE_DEF | ||
177 | } | ||
178 | MOD_KW => { | ||
179 | mod_item(p); | ||
180 | MODULE | ||
181 | } | ||
182 | STRUCT_KW => { | ||
183 | // test struct_items | ||
184 | // struct Foo; | ||
185 | // struct Foo {} | ||
186 | // struct Foo(); | ||
187 | // struct Foo(String, usize); | ||
188 | // struct Foo { | ||
189 | // a: i32, | ||
190 | // b: f32, | ||
191 | // } | ||
192 | nominal::struct_def(p, STRUCT_KW); | ||
193 | if p.at(SEMI) { | ||
194 | p.err_and_bump( | ||
195 | "expected item, found `;`\n\ | ||
196 | consider removing this semicolon", | ||
197 | ); | ||
198 | } | ||
199 | STRUCT_DEF | ||
200 | } | ||
201 | IDENT if p.at_contextual_kw("union") && p.nth(1) == IDENT => { | ||
202 | // test union_items | ||
203 | // union Foo {} | ||
204 | // union Foo { | ||
205 | // a: i32, | ||
206 | // b: f32, | ||
207 | // } | ||
208 | nominal::struct_def(p, UNION_KW); | ||
209 | STRUCT_DEF | ||
210 | } | ||
211 | ENUM_KW => { | ||
212 | nominal::enum_def(p); | ||
213 | ENUM_DEF | ||
214 | } | ||
215 | USE_KW => { | ||
216 | use_item::use_item(p); | ||
217 | USE_ITEM | ||
218 | } | ||
219 | CONST_KW if (la == IDENT || la == MUT_KW) => { | ||
220 | consts::const_def(p); | ||
221 | CONST_DEF | ||
222 | } | ||
223 | STATIC_KW => { | ||
224 | consts::static_def(p); | ||
225 | STATIC_DEF | ||
226 | } | ||
227 | // test extern_block | ||
228 | // extern {} | ||
229 | EXTERN_KW | ||
230 | if la == L_CURLY || ((la == STRING || la == RAW_STRING) && p.nth(2) == L_CURLY) => | ||
231 | { | ||
232 | abi(p); | ||
233 | extern_item_list(p); | ||
234 | EXTERN_BLOCK | ||
235 | } | ||
236 | _ => return None, | ||
237 | }; | ||
238 | Some(kind) | ||
239 | } | ||
240 | |||
241 | fn extern_crate_item(p: &mut Parser) { | ||
242 | assert!(p.at(EXTERN_KW)); | ||
243 | p.bump(); | ||
244 | assert!(p.at(CRATE_KW)); | ||
245 | p.bump(); | ||
246 | name_ref(p); | ||
247 | opt_alias(p); | ||
248 | p.expect(SEMI); | ||
249 | } | ||
250 | |||
251 | pub(crate) fn extern_item_list(p: &mut Parser) { | ||
252 | assert!(p.at(L_CURLY)); | ||
253 | let m = p.start(); | ||
254 | p.bump(); | ||
255 | mod_contents(p, true); | ||
256 | p.expect(R_CURLY); | ||
257 | m.complete(p, EXTERN_ITEM_LIST); | ||
258 | } | ||
259 | |||
260 | fn fn_def(p: &mut Parser, flavor: ItemFlavor) { | ||
261 | assert!(p.at(FN_KW)); | ||
262 | p.bump(); | ||
263 | |||
264 | name_r(p, ITEM_RECOVERY_SET); | ||
265 | // test function_type_params | ||
266 | // fn foo<T: Clone + Copy>(){} | ||
267 | type_params::opt_type_param_list(p); | ||
268 | |||
269 | if p.at(L_PAREN) { | ||
270 | match flavor { | ||
271 | ItemFlavor::Mod => params::param_list(p), | ||
272 | ItemFlavor::Trait => params::param_list_opt_patterns(p), | ||
273 | } | ||
274 | } else { | ||
275 | p.error("expected function arguments"); | ||
276 | } | ||
277 | // test function_ret_type | ||
278 | // fn foo() {} | ||
279 | // fn bar() -> () {} | ||
280 | opt_fn_ret_type(p); | ||
281 | |||
282 | // test function_where_clause | ||
283 | // fn foo<T>() where T: Copy {} | ||
284 | type_params::opt_where_clause(p); | ||
285 | |||
286 | // test fn_decl | ||
287 | // trait T { fn foo(); } | ||
288 | if p.at(SEMI) { | ||
289 | p.bump(); | ||
290 | } else { | ||
291 | expressions::block(p) | ||
292 | } | ||
293 | } | ||
294 | |||
295 | // test type_item | ||
296 | // type Foo = Bar; | ||
297 | fn type_def(p: &mut Parser) { | ||
298 | assert!(p.at(TYPE_KW)); | ||
299 | p.bump(); | ||
300 | |||
301 | name(p); | ||
302 | |||
303 | // test type_item_type_params | ||
304 | // type Result<T> = (); | ||
305 | type_params::opt_type_param_list(p); | ||
306 | |||
307 | if p.at(COLON) { | ||
308 | type_params::bounds(p); | ||
309 | } | ||
310 | |||
311 | // test type_item_where_clause | ||
312 | // type Foo where Foo: Copy = (); | ||
313 | type_params::opt_where_clause(p); | ||
314 | |||
315 | if p.eat(EQ) { | ||
316 | types::type_(p); | ||
317 | } | ||
318 | p.expect(SEMI); | ||
319 | } | ||
320 | |||
321 | pub(crate) fn mod_item(p: &mut Parser) { | ||
322 | assert!(p.at(MOD_KW)); | ||
323 | p.bump(); | ||
324 | |||
325 | name(p); | ||
326 | if p.at(L_CURLY) { | ||
327 | mod_item_list(p); | ||
328 | } else if !p.eat(SEMI) { | ||
329 | p.error("expected `;` or `{`"); | ||
330 | } | ||
331 | } | ||
332 | |||
333 | pub(crate) fn mod_item_list(p: &mut Parser) { | ||
334 | assert!(p.at(L_CURLY)); | ||
335 | let m = p.start(); | ||
336 | p.bump(); | ||
337 | mod_contents(p, true); | ||
338 | p.expect(R_CURLY); | ||
339 | m.complete(p, ITEM_LIST); | ||
340 | } | ||
341 | |||
342 | fn macro_call(p: &mut Parser) -> BlockLike { | ||
343 | assert!(paths::is_path_start(p)); | ||
344 | paths::use_path(p); | ||
345 | macro_call_after_excl(p) | ||
346 | } | ||
347 | |||
348 | pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike { | ||
349 | p.expect(EXCL); | ||
350 | if p.at(IDENT) { | ||
351 | name(p); | ||
352 | } | ||
353 | match p.current() { | ||
354 | L_CURLY => { | ||
355 | token_tree(p); | ||
356 | BlockLike::Block | ||
357 | } | ||
358 | L_PAREN | L_BRACK => { | ||
359 | token_tree(p); | ||
360 | BlockLike::NotBlock | ||
361 | } | ||
362 | _ => { | ||
363 | p.error("expected `{`, `[`, `(`"); | ||
364 | BlockLike::NotBlock | ||
365 | } | ||
366 | } | ||
367 | } | ||
368 | |||
369 | pub(crate) fn token_tree(p: &mut Parser) { | ||
370 | let closing_paren_kind = match p.current() { | ||
371 | L_CURLY => R_CURLY, | ||
372 | L_PAREN => R_PAREN, | ||
373 | L_BRACK => R_BRACK, | ||
374 | _ => unreachable!(), | ||
375 | }; | ||
376 | let m = p.start(); | ||
377 | p.bump(); | ||
378 | while !p.at(EOF) && !p.at(closing_paren_kind) { | ||
379 | match p.current() { | ||
380 | L_CURLY | L_PAREN | L_BRACK => token_tree(p), | ||
381 | R_CURLY => { | ||
382 | p.error("unmatched `}`"); | ||
383 | m.complete(p, TOKEN_TREE); | ||
384 | return; | ||
385 | } | ||
386 | R_PAREN | R_BRACK => p.err_and_bump("unmatched brace"), | ||
387 | _ => p.bump(), | ||
388 | } | ||
389 | } | ||
390 | p.expect(closing_paren_kind); | ||
391 | m.complete(p, TOKEN_TREE); | ||
392 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/items/consts.rs b/crates/ra_syntax/src/parsing/grammar/items/consts.rs new file mode 100644 index 000000000..5a5852f83 --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar/items/consts.rs | |||
@@ -0,0 +1,21 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) fn static_def(p: &mut Parser) { | ||
4 | const_or_static(p, STATIC_KW) | ||
5 | } | ||
6 | |||
7 | pub(super) fn const_def(p: &mut Parser) { | ||
8 | const_or_static(p, CONST_KW) | ||
9 | } | ||
10 | |||
11 | fn const_or_static(p: &mut Parser, kw: SyntaxKind) { | ||
12 | assert!(p.at(kw)); | ||
13 | p.bump(); | ||
14 | p.eat(MUT_KW); // TODO: validator to forbid const mut | ||
15 | name(p); | ||
16 | types::ascription(p); | ||
17 | if p.eat(EQ) { | ||
18 | expressions::expr(p); | ||
19 | } | ||
20 | p.expect(SEMI); | ||
21 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/items/nominal.rs b/crates/ra_syntax/src/parsing/grammar/items/nominal.rs new file mode 100644 index 000000000..ff9b38f9c --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar/items/nominal.rs | |||
@@ -0,0 +1,168 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) fn struct_def(p: &mut Parser, kind: SyntaxKind) { | ||
4 | assert!(p.at(STRUCT_KW) || p.at_contextual_kw("union")); | ||
5 | p.bump_remap(kind); | ||
6 | |||
7 | name_r(p, ITEM_RECOVERY_SET); | ||
8 | type_params::opt_type_param_list(p); | ||
9 | match p.current() { | ||
10 | WHERE_KW => { | ||
11 | type_params::opt_where_clause(p); | ||
12 | match p.current() { | ||
13 | SEMI => { | ||
14 | p.bump(); | ||
15 | return; | ||
16 | } | ||
17 | L_CURLY => named_field_def_list(p), | ||
18 | _ => { | ||
19 | //TODO: special case `(` error message | ||
20 | p.error("expected `;` or `{`"); | ||
21 | return; | ||
22 | } | ||
23 | } | ||
24 | } | ||
25 | SEMI if kind == STRUCT_KW => { | ||
26 | p.bump(); | ||
27 | return; | ||
28 | } | ||
29 | L_CURLY => named_field_def_list(p), | ||
30 | L_PAREN if kind == STRUCT_KW => { | ||
31 | pos_field_def_list(p); | ||
32 | // test tuple_struct_where | ||
33 | // struct Test<T>(T) where T: Clone; | ||
34 | // struct Test<T>(T); | ||
35 | type_params::opt_where_clause(p); | ||
36 | p.expect(SEMI); | ||
37 | } | ||
38 | _ if kind == STRUCT_KW => { | ||
39 | p.error("expected `;`, `{`, or `(`"); | ||
40 | return; | ||
41 | } | ||
42 | _ => { | ||
43 | p.error("expected `{`"); | ||
44 | return; | ||
45 | } | ||
46 | } | ||
47 | } | ||
48 | |||
49 | pub(super) fn enum_def(p: &mut Parser) { | ||
50 | assert!(p.at(ENUM_KW)); | ||
51 | p.bump(); | ||
52 | name_r(p, ITEM_RECOVERY_SET); | ||
53 | type_params::opt_type_param_list(p); | ||
54 | type_params::opt_where_clause(p); | ||
55 | if p.at(L_CURLY) { | ||
56 | enum_variant_list(p); | ||
57 | } else { | ||
58 | p.error("expected `{`") | ||
59 | } | ||
60 | } | ||
61 | |||
62 | pub(crate) fn enum_variant_list(p: &mut Parser) { | ||
63 | assert!(p.at(L_CURLY)); | ||
64 | let m = p.start(); | ||
65 | p.bump(); | ||
66 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
67 | if p.at(L_CURLY) { | ||
68 | error_block(p, "expected enum variant"); | ||
69 | continue; | ||
70 | } | ||
71 | let var = p.start(); | ||
72 | attributes::outer_attributes(p); | ||
73 | if p.at(IDENT) { | ||
74 | name(p); | ||
75 | match p.current() { | ||
76 | L_CURLY => named_field_def_list(p), | ||
77 | L_PAREN => pos_field_def_list(p), | ||
78 | EQ => { | ||
79 | p.bump(); | ||
80 | expressions::expr(p); | ||
81 | } | ||
82 | _ => (), | ||
83 | } | ||
84 | var.complete(p, ENUM_VARIANT); | ||
85 | } else { | ||
86 | var.abandon(p); | ||
87 | p.err_and_bump("expected enum variant"); | ||
88 | } | ||
89 | if !p.at(R_CURLY) { | ||
90 | p.expect(COMMA); | ||
91 | } | ||
92 | } | ||
93 | p.expect(R_CURLY); | ||
94 | m.complete(p, ENUM_VARIANT_LIST); | ||
95 | } | ||
96 | |||
97 | pub(crate) fn named_field_def_list(p: &mut Parser) { | ||
98 | assert!(p.at(L_CURLY)); | ||
99 | let m = p.start(); | ||
100 | p.bump(); | ||
101 | while !p.at(R_CURLY) && !p.at(EOF) { | ||
102 | if p.at(L_CURLY) { | ||
103 | error_block(p, "expected field"); | ||
104 | continue; | ||
105 | } | ||
106 | named_field_def(p); | ||
107 | if !p.at(R_CURLY) { | ||
108 | p.expect(COMMA); | ||
109 | } | ||
110 | } | ||
111 | p.expect(R_CURLY); | ||
112 | m.complete(p, NAMED_FIELD_DEF_LIST); | ||
113 | |||
114 | fn named_field_def(p: &mut Parser) { | ||
115 | let m = p.start(); | ||
116 | // test field_attrs | ||
117 | // struct S { | ||
118 | // #[serde(with = "url_serde")] | ||
119 | // pub uri: Uri, | ||
120 | // } | ||
121 | attributes::outer_attributes(p); | ||
122 | opt_visibility(p); | ||
123 | if p.at(IDENT) { | ||
124 | name(p); | ||
125 | p.expect(COLON); | ||
126 | types::type_(p); | ||
127 | m.complete(p, NAMED_FIELD_DEF); | ||
128 | } else { | ||
129 | m.abandon(p); | ||
130 | p.err_and_bump("expected field declaration"); | ||
131 | } | ||
132 | } | ||
133 | } | ||
134 | |||
135 | fn pos_field_def_list(p: &mut Parser) { | ||
136 | assert!(p.at(L_PAREN)); | ||
137 | let m = p.start(); | ||
138 | if !p.expect(L_PAREN) { | ||
139 | return; | ||
140 | } | ||
141 | while !p.at(R_PAREN) && !p.at(EOF) { | ||
142 | let m = p.start(); | ||
143 | // test pos_field_attrs | ||
144 | // struct S ( | ||
145 | // #[serde(with = "url_serde")] | ||
146 | // pub Uri, | ||
147 | // ); | ||
148 | // | ||
149 | // enum S { | ||
150 | // Uri(#[serde(with = "url_serde")] Uri), | ||
151 | // } | ||
152 | attributes::outer_attributes(p); | ||
153 | opt_visibility(p); | ||
154 | if !p.at_ts(types::TYPE_FIRST) { | ||
155 | p.error("expected a type"); | ||
156 | m.complete(p, ERROR); | ||
157 | break; | ||
158 | } | ||
159 | types::type_(p); | ||
160 | m.complete(p, POS_FIELD_DEF); | ||
161 | |||
162 | if !p.at(R_PAREN) { | ||
163 | p.expect(COMMA); | ||
164 | } | ||
165 | } | ||
166 | p.expect(R_PAREN); | ||
167 | m.complete(p, POS_FIELD_DEF_LIST); | ||
168 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/items/traits.rs b/crates/ra_syntax/src/parsing/grammar/items/traits.rs new file mode 100644 index 000000000..d5a8ccd98 --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar/items/traits.rs | |||
@@ -0,0 +1,137 @@ | |||
1 | use super::*; | ||
2 | |||
3 | // test trait_item | ||
4 | // trait T<U>: Hash + Clone where U: Copy {} | ||
5 | pub(super) fn trait_def(p: &mut Parser) { | ||
6 | assert!(p.at(TRAIT_KW)); | ||
7 | p.bump(); | ||
8 | name_r(p, ITEM_RECOVERY_SET); | ||
9 | type_params::opt_type_param_list(p); | ||
10 | if p.at(COLON) { | ||
11 | type_params::bounds(p); | ||
12 | } | ||
13 | type_params::opt_where_clause(p); | ||
14 | if p.at(L_CURLY) { | ||
15 | trait_item_list(p); | ||
16 | } else { | ||
17 | p.error("expected `{`"); | ||
18 | } | ||
19 | } | ||
20 | |||
21 | // test trait_item_list | ||
22 | // impl F { | ||
23 | // type A: Clone; | ||
24 | // const B: i32; | ||
25 | // fn foo() {} | ||
26 | // fn bar(&self); | ||
27 | // } | ||
28 | pub(crate) fn trait_item_list(p: &mut Parser) { | ||
29 | assert!(p.at(L_CURLY)); | ||
30 | let m = p.start(); | ||
31 | p.bump(); | ||
32 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
33 | if p.at(L_CURLY) { | ||
34 | error_block(p, "expected an item"); | ||
35 | continue; | ||
36 | } | ||
37 | item_or_macro(p, true, ItemFlavor::Trait); | ||
38 | } | ||
39 | p.expect(R_CURLY); | ||
40 | m.complete(p, ITEM_LIST); | ||
41 | } | ||
42 | |||
43 | // test impl_block | ||
44 | // impl Foo {} | ||
45 | pub(super) fn impl_block(p: &mut Parser) { | ||
46 | assert!(p.at(IMPL_KW)); | ||
47 | p.bump(); | ||
48 | if choose_type_params_over_qpath(p) { | ||
49 | type_params::opt_type_param_list(p); | ||
50 | } | ||
51 | |||
52 | // TODO: never type | ||
53 | // impl ! {} | ||
54 | |||
55 | // test impl_block_neg | ||
56 | // impl !Send for X {} | ||
57 | p.eat(EXCL); | ||
58 | impl_type(p); | ||
59 | if p.eat(FOR_KW) { | ||
60 | impl_type(p); | ||
61 | } | ||
62 | type_params::opt_where_clause(p); | ||
63 | if p.at(L_CURLY) { | ||
64 | impl_item_list(p); | ||
65 | } else { | ||
66 | p.error("expected `{`"); | ||
67 | } | ||
68 | } | ||
69 | |||
70 | // test impl_item_list | ||
71 | // impl F { | ||
72 | // type A = i32; | ||
73 | // const B: i32 = 92; | ||
74 | // fn foo() {} | ||
75 | // fn bar(&self) {} | ||
76 | // } | ||
77 | pub(crate) fn impl_item_list(p: &mut Parser) { | ||
78 | assert!(p.at(L_CURLY)); | ||
79 | let m = p.start(); | ||
80 | p.bump(); | ||
81 | // test impl_inner_attributes | ||
82 | // enum F{} | ||
83 | // impl F { | ||
84 | // //! This is a doc comment | ||
85 | // #![doc("This is also a doc comment")] | ||
86 | // } | ||
87 | attributes::inner_attributes(p); | ||
88 | |||
89 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
90 | if p.at(L_CURLY) { | ||
91 | error_block(p, "expected an item"); | ||
92 | continue; | ||
93 | } | ||
94 | item_or_macro(p, true, ItemFlavor::Mod); | ||
95 | } | ||
96 | p.expect(R_CURLY); | ||
97 | m.complete(p, ITEM_LIST); | ||
98 | } | ||
99 | |||
100 | fn choose_type_params_over_qpath(p: &Parser) -> bool { | ||
101 | // There's an ambiguity between generic parameters and qualified paths in impls. | ||
102 | // If we see `<` it may start both, so we have to inspect some following tokens. | ||
103 | // The following combinations can only start generics, | ||
104 | // but not qualified paths (with one exception): | ||
105 | // `<` `>` - empty generic parameters | ||
106 | // `<` `#` - generic parameters with attributes | ||
107 | // `<` (LIFETIME|IDENT) `>` - single generic parameter | ||
108 | // `<` (LIFETIME|IDENT) `,` - first generic parameter in a list | ||
109 | // `<` (LIFETIME|IDENT) `:` - generic parameter with bounds | ||
110 | // `<` (LIFETIME|IDENT) `=` - generic parameter with a default | ||
111 | // The only truly ambiguous case is | ||
112 | // `<` IDENT `>` `::` IDENT ... | ||
113 | // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`) | ||
114 | // because this is what almost always expected in practice, qualified paths in impls | ||
115 | // (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment. | ||
116 | if !p.at(L_ANGLE) { | ||
117 | return false; | ||
118 | } | ||
119 | if p.nth(1) == POUND || p.nth(1) == R_ANGLE { | ||
120 | return true; | ||
121 | } | ||
122 | (p.nth(1) == LIFETIME || p.nth(1) == IDENT) | ||
123 | && (p.nth(2) == R_ANGLE || p.nth(2) == COMMA || p.nth(2) == COLON || p.nth(2) == EQ) | ||
124 | } | ||
125 | |||
126 | // test_err impl_type | ||
127 | // impl Type {} | ||
128 | // impl Trait1 for T {} | ||
129 | // impl impl NotType {} | ||
130 | // impl Trait2 for impl NotType {} | ||
131 | pub(crate) fn impl_type(p: &mut Parser) { | ||
132 | if p.at(IMPL_KW) { | ||
133 | p.error("expected trait or type"); | ||
134 | return; | ||
135 | } | ||
136 | types::type_(p); | ||
137 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/items/use_item.rs b/crates/ra_syntax/src/parsing/grammar/items/use_item.rs new file mode 100644 index 000000000..5111d37eb --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar/items/use_item.rs | |||
@@ -0,0 +1,121 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) fn use_item(p: &mut Parser) { | ||
4 | assert!(p.at(USE_KW)); | ||
5 | p.bump(); | ||
6 | use_tree(p); | ||
7 | p.expect(SEMI); | ||
8 | } | ||
9 | |||
10 | /// Parse a use 'tree', such as `some::path` in `use some::path;` | ||
11 | /// Note that this is called both by `use_item` and `use_tree_list`, | ||
12 | /// so handles both `some::path::{inner::path}` and `inner::path` in | ||
13 | /// `use some::path::{inner::path};` | ||
14 | fn use_tree(p: &mut Parser) { | ||
15 | let la = p.nth(1); | ||
16 | let m = p.start(); | ||
17 | match (p.current(), la) { | ||
18 | // Finish the use_tree for cases of e.g. | ||
19 | // `use some::path::{self, *};` or `use *;` | ||
20 | // This does not handle cases such as `use some::path::*` | ||
21 | // N.B. in Rust 2015 `use *;` imports all from crate root | ||
22 | // however in Rust 2018 `use *;` errors: ('cannot glob-import all possible crates') | ||
23 | // TODO: Add this error (if not out of scope) | ||
24 | |||
25 | // test use_star | ||
26 | // use *; | ||
27 | // use ::*; | ||
28 | // use some::path::{*}; | ||
29 | // use some::path::{::*}; | ||
30 | (STAR, _) => p.bump(), | ||
31 | (COLONCOLON, STAR) => { | ||
32 | // Parse `use ::*;`, which imports all from the crate root in Rust 2015 | ||
33 | // This is invalid inside a use_tree_list, (e.g. `use some::path::{::*}`) | ||
34 | // but still parses and errors later: ('crate root in paths can only be used in start position') | ||
35 | // TODO: Add this error (if not out of scope) | ||
36 | // In Rust 2018, it is always invalid (see above) | ||
37 | p.bump(); | ||
38 | p.bump(); | ||
39 | } | ||
40 | // Open a use tree list | ||
41 | // Handles cases such as `use {some::path};` or `{inner::path}` in | ||
42 | // `use some::path::{{inner::path}, other::path}` | ||
43 | |||
44 | // test use_tree_list | ||
45 | // use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`) | ||
46 | // use {path::from::root}; // Rust 2015 | ||
47 | // use ::{some::arbritrary::path}; // Rust 2015 | ||
48 | // use ::{{{crate::export}}}; // Nonsensical but perfectly legal nestnig | ||
49 | (L_CURLY, _) | (COLONCOLON, L_CURLY) => { | ||
50 | if p.at(COLONCOLON) { | ||
51 | p.bump(); | ||
52 | } | ||
53 | use_tree_list(p); | ||
54 | } | ||
55 | // Parse a 'standard' path. | ||
56 | // Also handles aliases (e.g. `use something as something_else`) | ||
57 | |||
58 | // test use_path | ||
59 | // use ::crate_name; // Rust 2018 - All flavours | ||
60 | // use crate_name; // Rust 2018 - Anchored paths | ||
61 | // use item_in_scope_or_crate_name; // Rust 2018 - Uniform Paths | ||
62 | // | ||
63 | // use self::module::Item; | ||
64 | // use crate::Item; | ||
65 | // use self::some::Struct; | ||
66 | // use crate_name::some_item; | ||
67 | _ if paths::is_path_start(p) => { | ||
68 | paths::use_path(p); | ||
69 | match p.current() { | ||
70 | AS_KW => { | ||
71 | // test use_alias | ||
72 | // use some::path as some_name; | ||
73 | // use some::{ | ||
74 | // other::path as some_other_name, | ||
75 | // different::path as different_name, | ||
76 | // yet::another::path, | ||
77 | // running::out::of::synonyms::for_::different::* | ||
78 | // }; | ||
79 | opt_alias(p); | ||
80 | } | ||
81 | COLONCOLON => { | ||
82 | p.bump(); | ||
83 | match p.current() { | ||
84 | STAR => { | ||
85 | p.bump(); | ||
86 | } | ||
87 | // test use_tree_list_after_path | ||
88 | // use crate::{Item}; | ||
89 | // use self::{Item}; | ||
90 | L_CURLY => use_tree_list(p), | ||
91 | _ => { | ||
92 | // is this unreachable? | ||
93 | p.error("expected `{` or `*`"); | ||
94 | } | ||
95 | } | ||
96 | } | ||
97 | _ => (), | ||
98 | } | ||
99 | } | ||
100 | _ => { | ||
101 | m.abandon(p); | ||
102 | p.err_and_bump("expected one of `*`, `::`, `{`, `self`, `super` or an indentifier"); | ||
103 | return; | ||
104 | } | ||
105 | } | ||
106 | m.complete(p, USE_TREE); | ||
107 | } | ||
108 | |||
109 | pub(crate) fn use_tree_list(p: &mut Parser) { | ||
110 | assert!(p.at(L_CURLY)); | ||
111 | let m = p.start(); | ||
112 | p.bump(); | ||
113 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
114 | use_tree(p); | ||
115 | if !p.at(R_CURLY) { | ||
116 | p.expect(COMMA); | ||
117 | } | ||
118 | } | ||
119 | p.expect(R_CURLY); | ||
120 | m.complete(p, USE_TREE_LIST); | ||
121 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/params.rs b/crates/ra_syntax/src/parsing/grammar/params.rs new file mode 100644 index 000000000..185386569 --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar/params.rs | |||
@@ -0,0 +1,139 @@ | |||
1 | use super::*; | ||
2 | |||
3 | // test param_list | ||
4 | // fn a() {} | ||
5 | // fn b(x: i32) {} | ||
6 | // fn c(x: i32, ) {} | ||
7 | // fn d(x: i32, y: ()) {} | ||
8 | pub(super) fn param_list(p: &mut Parser) { | ||
9 | list_(p, Flavor::Normal) | ||
10 | } | ||
11 | |||
12 | // test param_list_opt_patterns | ||
13 | // fn foo<F: FnMut(&mut Foo<'a>)>(){} | ||
14 | pub(super) fn param_list_opt_patterns(p: &mut Parser) { | ||
15 | list_(p, Flavor::OptionalPattern) | ||
16 | } | ||
17 | |||
18 | pub(super) fn param_list_opt_types(p: &mut Parser) { | ||
19 | list_(p, Flavor::OptionalType) | ||
20 | } | ||
21 | |||
22 | #[derive(Clone, Copy, Eq, PartialEq)] | ||
23 | enum Flavor { | ||
24 | OptionalType, | ||
25 | OptionalPattern, | ||
26 | Normal, | ||
27 | } | ||
28 | |||
29 | impl Flavor { | ||
30 | fn type_required(self) -> bool { | ||
31 | match self { | ||
32 | Flavor::OptionalType => false, | ||
33 | _ => true, | ||
34 | } | ||
35 | } | ||
36 | } | ||
37 | |||
38 | fn list_(p: &mut Parser, flavor: Flavor) { | ||
39 | let (bra, ket) = if flavor.type_required() { (L_PAREN, R_PAREN) } else { (PIPE, PIPE) }; | ||
40 | assert!(p.at(bra)); | ||
41 | let m = p.start(); | ||
42 | p.bump(); | ||
43 | if flavor.type_required() { | ||
44 | opt_self_param(p); | ||
45 | } | ||
46 | while !p.at(EOF) && !p.at(ket) { | ||
47 | if !p.at_ts(VALUE_PARAMETER_FIRST) { | ||
48 | p.error("expected value parameter"); | ||
49 | break; | ||
50 | } | ||
51 | value_parameter(p, flavor); | ||
52 | if !p.at(ket) { | ||
53 | p.expect(COMMA); | ||
54 | } | ||
55 | } | ||
56 | p.expect(ket); | ||
57 | m.complete(p, PARAM_LIST); | ||
58 | } | ||
59 | |||
60 | const VALUE_PARAMETER_FIRST: TokenSet = patterns::PATTERN_FIRST.union(types::TYPE_FIRST); | ||
61 | |||
62 | fn value_parameter(p: &mut Parser, flavor: Flavor) { | ||
63 | let m = p.start(); | ||
64 | match flavor { | ||
65 | Flavor::OptionalType | Flavor::Normal => { | ||
66 | patterns::pattern(p); | ||
67 | if p.at(COLON) || flavor.type_required() { | ||
68 | types::ascription(p) | ||
69 | } | ||
70 | } | ||
71 | // test value_parameters_no_patterns | ||
72 | // type F = Box<Fn(a: i32, &b: &i32, &mut c: &i32, ())>; | ||
73 | Flavor::OptionalPattern => { | ||
74 | let la0 = p.current(); | ||
75 | let la1 = p.nth(1); | ||
76 | let la2 = p.nth(2); | ||
77 | let la3 = p.nth(3); | ||
78 | |||
79 | // test trait_fn_placeholder_parameter | ||
80 | // trait Foo { | ||
81 | // fn bar(_: u64); | ||
82 | // } | ||
83 | if (la0 == IDENT || la0 == UNDERSCORE) && la1 == COLON | ||
84 | || la0 == AMP && la1 == IDENT && la2 == COLON | ||
85 | || la0 == AMP && la1 == MUT_KW && la2 == IDENT && la3 == COLON | ||
86 | { | ||
87 | patterns::pattern(p); | ||
88 | types::ascription(p); | ||
89 | } else { | ||
90 | types::type_(p); | ||
91 | } | ||
92 | } | ||
93 | } | ||
94 | m.complete(p, PARAM); | ||
95 | } | ||
96 | |||
97 | // test self_param | ||
98 | // impl S { | ||
99 | // fn a(self) {} | ||
100 | // fn b(&self,) {} | ||
101 | // fn c(&'a self,) {} | ||
102 | // fn d(&'a mut self, x: i32) {} | ||
103 | // fn e(mut self) {} | ||
104 | // } | ||
105 | fn opt_self_param(p: &mut Parser) { | ||
106 | let m; | ||
107 | if p.at(SELF_KW) || p.at(MUT_KW) && p.nth(1) == SELF_KW { | ||
108 | m = p.start(); | ||
109 | p.eat(MUT_KW); | ||
110 | p.eat(SELF_KW); | ||
111 | // test arb_self_types | ||
112 | // impl S { | ||
113 | // fn a(self: &Self) {} | ||
114 | // fn b(mut self: Box<Self>) {} | ||
115 | // } | ||
116 | if p.at(COLON) { | ||
117 | types::ascription(p); | ||
118 | } | ||
119 | } else { | ||
120 | let la1 = p.nth(1); | ||
121 | let la2 = p.nth(2); | ||
122 | let la3 = p.nth(3); | ||
123 | let n_toks = match (p.current(), la1, la2, la3) { | ||
124 | (AMP, SELF_KW, _, _) => 2, | ||
125 | (AMP, MUT_KW, SELF_KW, _) => 3, | ||
126 | (AMP, LIFETIME, SELF_KW, _) => 3, | ||
127 | (AMP, LIFETIME, MUT_KW, SELF_KW) => 4, | ||
128 | _ => return, | ||
129 | }; | ||
130 | m = p.start(); | ||
131 | for _ in 0..n_toks { | ||
132 | p.bump(); | ||
133 | } | ||
134 | } | ||
135 | m.complete(p, SELF_PARAM); | ||
136 | if !p.at(R_PAREN) { | ||
137 | p.expect(COMMA); | ||
138 | } | ||
139 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/paths.rs b/crates/ra_syntax/src/parsing/grammar/paths.rs new file mode 100644 index 000000000..33a11886c --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar/paths.rs | |||
@@ -0,0 +1,103 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) const PATH_FIRST: TokenSet = | ||
4 | token_set![IDENT, SELF_KW, SUPER_KW, CRATE_KW, COLONCOLON, L_ANGLE]; | ||
5 | |||
6 | pub(super) fn is_path_start(p: &Parser) -> bool { | ||
7 | match p.current() { | ||
8 | IDENT | SELF_KW | SUPER_KW | CRATE_KW | COLONCOLON => true, | ||
9 | _ => false, | ||
10 | } | ||
11 | } | ||
12 | |||
13 | pub(super) fn use_path(p: &mut Parser) { | ||
14 | path(p, Mode::Use) | ||
15 | } | ||
16 | |||
17 | pub(super) fn type_path(p: &mut Parser) { | ||
18 | path(p, Mode::Type) | ||
19 | } | ||
20 | |||
21 | pub(super) fn expr_path(p: &mut Parser) { | ||
22 | path(p, Mode::Expr) | ||
23 | } | ||
24 | |||
25 | #[derive(Clone, Copy, Eq, PartialEq)] | ||
26 | enum Mode { | ||
27 | Use, | ||
28 | Type, | ||
29 | Expr, | ||
30 | } | ||
31 | |||
32 | fn path(p: &mut Parser, mode: Mode) { | ||
33 | let path = p.start(); | ||
34 | path_segment(p, mode, true); | ||
35 | let mut qual = path.complete(p, PATH); | ||
36 | loop { | ||
37 | let use_tree = match p.nth(1) { | ||
38 | STAR | L_CURLY => true, | ||
39 | _ => false, | ||
40 | }; | ||
41 | if p.at(COLONCOLON) && !use_tree { | ||
42 | let path = qual.precede(p); | ||
43 | p.bump(); | ||
44 | path_segment(p, mode, false); | ||
45 | let path = path.complete(p, PATH); | ||
46 | qual = path; | ||
47 | } else { | ||
48 | break; | ||
49 | } | ||
50 | } | ||
51 | } | ||
52 | |||
53 | fn path_segment(p: &mut Parser, mode: Mode, first: bool) { | ||
54 | let m = p.start(); | ||
55 | // test qual_paths | ||
56 | // type X = <A as B>::Output; | ||
57 | // fn foo() { <usize as Default>::default(); } | ||
58 | if first && p.eat(L_ANGLE) { | ||
59 | types::type_(p); | ||
60 | if p.eat(AS_KW) { | ||
61 | if is_path_start(p) { | ||
62 | types::path_type(p); | ||
63 | } else { | ||
64 | p.error("expected a trait"); | ||
65 | } | ||
66 | } | ||
67 | p.expect(R_ANGLE); | ||
68 | } else { | ||
69 | if first { | ||
70 | p.eat(COLONCOLON); | ||
71 | } | ||
72 | match p.current() { | ||
73 | IDENT => { | ||
74 | name_ref(p); | ||
75 | opt_path_type_args(p, mode); | ||
76 | } | ||
77 | // test crate_path | ||
78 | // use crate::foo; | ||
79 | SELF_KW | SUPER_KW | CRATE_KW => p.bump(), | ||
80 | _ => { | ||
81 | p.err_recover("expected identifier", items::ITEM_RECOVERY_SET); | ||
82 | } | ||
83 | }; | ||
84 | } | ||
85 | m.complete(p, PATH_SEGMENT); | ||
86 | } | ||
87 | |||
88 | fn opt_path_type_args(p: &mut Parser, mode: Mode) { | ||
89 | match mode { | ||
90 | Mode::Use => return, | ||
91 | Mode::Type => { | ||
92 | // test path_fn_trait_args | ||
93 | // type F = Box<Fn(x: i32) -> ()>; | ||
94 | if p.at(L_PAREN) { | ||
95 | params::param_list_opt_patterns(p); | ||
96 | opt_fn_ret_type(p); | ||
97 | } else { | ||
98 | type_args::opt_type_arg_list(p, false) | ||
99 | } | ||
100 | } | ||
101 | Mode::Expr => type_args::opt_type_arg_list(p, true), | ||
102 | } | ||
103 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/patterns.rs b/crates/ra_syntax/src/parsing/grammar/patterns.rs new file mode 100644 index 000000000..9d7da639d --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar/patterns.rs | |||
@@ -0,0 +1,248 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST | ||
4 | .union(paths::PATH_FIRST) | ||
5 | .union(token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE]); | ||
6 | |||
7 | pub(super) fn pattern(p: &mut Parser) { | ||
8 | pattern_r(p, PAT_RECOVERY_SET) | ||
9 | } | ||
10 | |||
11 | pub(super) fn pattern_r(p: &mut Parser, recovery_set: TokenSet) { | ||
12 | if let Some(lhs) = atom_pat(p, recovery_set) { | ||
13 | // test range_pat | ||
14 | // fn main() { | ||
15 | // match 92 { | ||
16 | // 0 ... 100 => (), | ||
17 | // 101 ..= 200 => (), | ||
18 | // 200 .. 301=> (), | ||
19 | // } | ||
20 | // } | ||
21 | if p.at(DOTDOTDOT) || p.at(DOTDOTEQ) || p.at(DOTDOT) { | ||
22 | let m = lhs.precede(p); | ||
23 | p.bump(); | ||
24 | atom_pat(p, recovery_set); | ||
25 | m.complete(p, RANGE_PAT); | ||
26 | } | ||
27 | } | ||
28 | } | ||
29 | |||
30 | const PAT_RECOVERY_SET: TokenSet = | ||
31 | token_set![LET_KW, IF_KW, WHILE_KW, LOOP_KW, MATCH_KW, R_PAREN, COMMA]; | ||
32 | |||
33 | fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> { | ||
34 | let la0 = p.nth(0); | ||
35 | let la1 = p.nth(1); | ||
36 | if la0 == REF_KW | ||
37 | || la0 == MUT_KW | ||
38 | || (la0 == IDENT && !(la1 == COLONCOLON || la1 == L_PAREN || la1 == L_CURLY)) | ||
39 | { | ||
40 | return Some(bind_pat(p, true)); | ||
41 | } | ||
42 | if paths::is_path_start(p) { | ||
43 | return Some(path_pat(p)); | ||
44 | } | ||
45 | |||
46 | if is_literal_pat_start(p) { | ||
47 | return Some(literal_pat(p)); | ||
48 | } | ||
49 | |||
50 | let m = match la0 { | ||
51 | UNDERSCORE => placeholder_pat(p), | ||
52 | AMP => ref_pat(p), | ||
53 | L_PAREN => tuple_pat(p), | ||
54 | L_BRACK => slice_pat(p), | ||
55 | _ => { | ||
56 | p.err_recover("expected pattern", recovery_set); | ||
57 | return None; | ||
58 | } | ||
59 | }; | ||
60 | Some(m) | ||
61 | } | ||
62 | |||
63 | fn is_literal_pat_start(p: &mut Parser) -> bool { | ||
64 | p.at(MINUS) && (p.nth(1) == INT_NUMBER || p.nth(1) == FLOAT_NUMBER) | ||
65 | || p.at_ts(expressions::LITERAL_FIRST) | ||
66 | } | ||
67 | |||
68 | // test literal_pattern | ||
69 | // fn main() { | ||
70 | // match () { | ||
71 | // -1 => (), | ||
72 | // 92 => (), | ||
73 | // 'c' => (), | ||
74 | // "hello" => (), | ||
75 | // } | ||
76 | // } | ||
77 | fn literal_pat(p: &mut Parser) -> CompletedMarker { | ||
78 | assert!(is_literal_pat_start(p)); | ||
79 | let m = p.start(); | ||
80 | if p.at(MINUS) { | ||
81 | p.bump(); | ||
82 | } | ||
83 | expressions::literal(p); | ||
84 | m.complete(p, LITERAL_PAT) | ||
85 | } | ||
86 | |||
87 | // test path_part | ||
88 | // fn foo() { | ||
89 | // let foo::Bar = (); | ||
90 | // let ::Bar = (); | ||
91 | // let Bar { .. } = (); | ||
92 | // let Bar(..) = (); | ||
93 | // } | ||
94 | fn path_pat(p: &mut Parser) -> CompletedMarker { | ||
95 | assert!(paths::is_path_start(p)); | ||
96 | let m = p.start(); | ||
97 | paths::expr_path(p); | ||
98 | let kind = match p.current() { | ||
99 | L_PAREN => { | ||
100 | tuple_pat_fields(p); | ||
101 | TUPLE_STRUCT_PAT | ||
102 | } | ||
103 | L_CURLY => { | ||
104 | field_pat_list(p); | ||
105 | STRUCT_PAT | ||
106 | } | ||
107 | _ => PATH_PAT, | ||
108 | }; | ||
109 | m.complete(p, kind) | ||
110 | } | ||
111 | |||
112 | // test tuple_pat_fields | ||
113 | // fn foo() { | ||
114 | // let S() = (); | ||
115 | // let S(_) = (); | ||
116 | // let S(_,) = (); | ||
117 | // let S(_, .. , x) = (); | ||
118 | // } | ||
119 | fn tuple_pat_fields(p: &mut Parser) { | ||
120 | assert!(p.at(L_PAREN)); | ||
121 | p.bump(); | ||
122 | pat_list(p, R_PAREN); | ||
123 | p.expect(R_PAREN); | ||
124 | } | ||
125 | |||
126 | // test field_pat_list | ||
127 | // fn foo() { | ||
128 | // let S {} = (); | ||
129 | // let S { f, ref mut g } = (); | ||
130 | // let S { h: _, ..} = (); | ||
131 | // let S { h: _, } = (); | ||
132 | // } | ||
133 | fn field_pat_list(p: &mut Parser) { | ||
134 | assert!(p.at(L_CURLY)); | ||
135 | let m = p.start(); | ||
136 | p.bump(); | ||
137 | while !p.at(EOF) && !p.at(R_CURLY) { | ||
138 | match p.current() { | ||
139 | DOTDOT => p.bump(), | ||
140 | IDENT if p.nth(1) == COLON => field_pat(p), | ||
141 | L_CURLY => error_block(p, "expected ident"), | ||
142 | _ => { | ||
143 | bind_pat(p, false); | ||
144 | } | ||
145 | } | ||
146 | if !p.at(R_CURLY) { | ||
147 | p.expect(COMMA); | ||
148 | } | ||
149 | } | ||
150 | p.expect(R_CURLY); | ||
151 | m.complete(p, FIELD_PAT_LIST); | ||
152 | } | ||
153 | |||
154 | fn field_pat(p: &mut Parser) { | ||
155 | assert!(p.at(IDENT)); | ||
156 | assert!(p.nth(1) == COLON); | ||
157 | |||
158 | let m = p.start(); | ||
159 | name(p); | ||
160 | p.bump(); | ||
161 | pattern(p); | ||
162 | m.complete(p, FIELD_PAT); | ||
163 | } | ||
164 | |||
165 | // test placeholder_pat | ||
166 | // fn main() { let _ = (); } | ||
167 | fn placeholder_pat(p: &mut Parser) -> CompletedMarker { | ||
168 | assert!(p.at(UNDERSCORE)); | ||
169 | let m = p.start(); | ||
170 | p.bump(); | ||
171 | m.complete(p, PLACEHOLDER_PAT) | ||
172 | } | ||
173 | |||
174 | // test ref_pat | ||
175 | // fn main() { | ||
176 | // let &a = (); | ||
177 | // let &mut b = (); | ||
178 | // } | ||
179 | fn ref_pat(p: &mut Parser) -> CompletedMarker { | ||
180 | assert!(p.at(AMP)); | ||
181 | let m = p.start(); | ||
182 | p.bump(); | ||
183 | p.eat(MUT_KW); | ||
184 | pattern(p); | ||
185 | m.complete(p, REF_PAT) | ||
186 | } | ||
187 | |||
188 | // test tuple_pat | ||
189 | // fn main() { | ||
190 | // let (a, b, ..) = (); | ||
191 | // } | ||
192 | fn tuple_pat(p: &mut Parser) -> CompletedMarker { | ||
193 | assert!(p.at(L_PAREN)); | ||
194 | let m = p.start(); | ||
195 | tuple_pat_fields(p); | ||
196 | m.complete(p, TUPLE_PAT) | ||
197 | } | ||
198 | |||
199 | // test slice_pat | ||
200 | // fn main() { | ||
201 | // let [a, b, ..] = []; | ||
202 | // } | ||
203 | fn slice_pat(p: &mut Parser) -> CompletedMarker { | ||
204 | assert!(p.at(L_BRACK)); | ||
205 | let m = p.start(); | ||
206 | p.bump(); | ||
207 | pat_list(p, R_BRACK); | ||
208 | p.expect(R_BRACK); | ||
209 | m.complete(p, SLICE_PAT) | ||
210 | } | ||
211 | |||
212 | fn pat_list(p: &mut Parser, ket: SyntaxKind) { | ||
213 | while !p.at(EOF) && !p.at(ket) { | ||
214 | match p.current() { | ||
215 | DOTDOT => p.bump(), | ||
216 | _ => { | ||
217 | if !p.at_ts(PATTERN_FIRST) { | ||
218 | p.error("expected a pattern"); | ||
219 | break; | ||
220 | } | ||
221 | pattern(p) | ||
222 | } | ||
223 | } | ||
224 | if !p.at(ket) { | ||
225 | p.expect(COMMA); | ||
226 | } | ||
227 | } | ||
228 | } | ||
229 | |||
230 | // test bind_pat | ||
231 | // fn main() { | ||
232 | // let a = (); | ||
233 | // let mut b = (); | ||
234 | // let ref c = (); | ||
235 | // let ref mut d = (); | ||
236 | // let e @ _ = (); | ||
237 | // let ref mut f @ g @ _ = (); | ||
238 | // } | ||
239 | fn bind_pat(p: &mut Parser, with_at: bool) -> CompletedMarker { | ||
240 | let m = p.start(); | ||
241 | p.eat(REF_KW); | ||
242 | p.eat(MUT_KW); | ||
243 | name(p); | ||
244 | if with_at && p.eat(AT) { | ||
245 | pattern(p); | ||
246 | } | ||
247 | m.complete(p, BIND_PAT) | ||
248 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/type_args.rs b/crates/ra_syntax/src/parsing/grammar/type_args.rs new file mode 100644 index 000000000..f889419c5 --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar/type_args.rs | |||
@@ -0,0 +1,48 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) fn opt_type_arg_list(p: &mut Parser, colon_colon_required: bool) { | ||
4 | let m; | ||
5 | match (colon_colon_required, p.nth(0), p.nth(1)) { | ||
6 | (_, COLONCOLON, L_ANGLE) => { | ||
7 | m = p.start(); | ||
8 | p.bump(); | ||
9 | p.bump(); | ||
10 | } | ||
11 | (false, L_ANGLE, _) => { | ||
12 | m = p.start(); | ||
13 | p.bump(); | ||
14 | } | ||
15 | _ => return, | ||
16 | }; | ||
17 | |||
18 | while !p.at(EOF) && !p.at(R_ANGLE) { | ||
19 | type_arg(p); | ||
20 | if !p.at(R_ANGLE) && !p.expect(COMMA) { | ||
21 | break; | ||
22 | } | ||
23 | } | ||
24 | p.expect(R_ANGLE); | ||
25 | m.complete(p, TYPE_ARG_LIST); | ||
26 | } | ||
27 | |||
28 | // test type_arg | ||
29 | // type A = B<'static, i32, Item=u64>; | ||
30 | fn type_arg(p: &mut Parser) { | ||
31 | let m = p.start(); | ||
32 | match p.current() { | ||
33 | LIFETIME => { | ||
34 | p.bump(); | ||
35 | m.complete(p, LIFETIME_ARG); | ||
36 | } | ||
37 | IDENT if p.nth(1) == EQ => { | ||
38 | name_ref(p); | ||
39 | p.bump(); | ||
40 | types::type_(p); | ||
41 | m.complete(p, ASSOC_TYPE_ARG); | ||
42 | } | ||
43 | _ => { | ||
44 | types::type_(p); | ||
45 | m.complete(p, TYPE_ARG); | ||
46 | } | ||
47 | } | ||
48 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/type_params.rs b/crates/ra_syntax/src/parsing/grammar/type_params.rs new file mode 100644 index 000000000..40f998682 --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar/type_params.rs | |||
@@ -0,0 +1,175 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) fn opt_type_param_list(p: &mut Parser) { | ||
4 | if !p.at(L_ANGLE) { | ||
5 | return; | ||
6 | } | ||
7 | type_param_list(p); | ||
8 | } | ||
9 | |||
10 | fn type_param_list(p: &mut Parser) { | ||
11 | assert!(p.at(L_ANGLE)); | ||
12 | let m = p.start(); | ||
13 | p.bump(); | ||
14 | |||
15 | while !p.at(EOF) && !p.at(R_ANGLE) { | ||
16 | let m = p.start(); | ||
17 | |||
18 | // test generic_lifetime_type_attribute | ||
19 | // fn foo<#[derive(Lifetime)] 'a, #[derive(Type)] T>(_: &'a T) { | ||
20 | // } | ||
21 | attributes::outer_attributes(p); | ||
22 | |||
23 | match p.current() { | ||
24 | LIFETIME => lifetime_param(p, m), | ||
25 | IDENT => type_param(p, m), | ||
26 | _ => { | ||
27 | m.abandon(p); | ||
28 | p.err_and_bump("expected type parameter") | ||
29 | } | ||
30 | } | ||
31 | if !p.at(R_ANGLE) && !p.expect(COMMA) { | ||
32 | break; | ||
33 | } | ||
34 | } | ||
35 | p.expect(R_ANGLE); | ||
36 | m.complete(p, TYPE_PARAM_LIST); | ||
37 | } | ||
38 | |||
39 | fn lifetime_param(p: &mut Parser, m: Marker) { | ||
40 | assert!(p.at(LIFETIME)); | ||
41 | p.bump(); | ||
42 | if p.at(COLON) { | ||
43 | lifetime_bounds(p); | ||
44 | } | ||
45 | m.complete(p, LIFETIME_PARAM); | ||
46 | } | ||
47 | |||
48 | fn type_param(p: &mut Parser, m: Marker) { | ||
49 | assert!(p.at(IDENT)); | ||
50 | name(p); | ||
51 | if p.at(COLON) { | ||
52 | bounds(p); | ||
53 | } | ||
54 | // test type_param_default | ||
55 | // struct S<T = i32>; | ||
56 | if p.at(EQ) { | ||
57 | p.bump(); | ||
58 | types::type_(p) | ||
59 | } | ||
60 | m.complete(p, TYPE_PARAM); | ||
61 | } | ||
62 | |||
63 | // test type_param_bounds | ||
64 | // struct S<T: 'a + ?Sized + (Copy)>; | ||
65 | pub(super) fn bounds(p: &mut Parser) { | ||
66 | assert!(p.at(COLON)); | ||
67 | p.bump(); | ||
68 | bounds_without_colon(p); | ||
69 | } | ||
70 | |||
71 | fn lifetime_bounds(p: &mut Parser) { | ||
72 | assert!(p.at(COLON)); | ||
73 | p.bump(); | ||
74 | while p.at(LIFETIME) { | ||
75 | p.bump(); | ||
76 | if !p.eat(PLUS) { | ||
77 | break; | ||
78 | } | ||
79 | } | ||
80 | } | ||
81 | |||
82 | pub(super) fn bounds_without_colon(p: &mut Parser) { | ||
83 | loop { | ||
84 | let has_paren = p.eat(L_PAREN); | ||
85 | p.eat(QUESTION); | ||
86 | match p.current() { | ||
87 | LIFETIME => p.bump(), | ||
88 | FOR_KW => types::for_type(p), | ||
89 | _ if paths::is_path_start(p) => types::path_type(p), | ||
90 | _ => break, | ||
91 | } | ||
92 | if has_paren { | ||
93 | p.expect(R_PAREN); | ||
94 | } | ||
95 | if !p.eat(PLUS) { | ||
96 | break; | ||
97 | } | ||
98 | } | ||
99 | } | ||
100 | |||
101 | // test where_clause | ||
102 | // fn foo() | ||
103 | // where | ||
104 | // 'a: 'b + 'c, | ||
105 | // T: Clone + Copy + 'static, | ||
106 | // Iterator::Item: 'a, | ||
107 | // <T as Iterator>::Item: 'a | ||
108 | // {} | ||
109 | pub(super) fn opt_where_clause(p: &mut Parser) { | ||
110 | if !p.at(WHERE_KW) { | ||
111 | return; | ||
112 | } | ||
113 | let m = p.start(); | ||
114 | p.bump(); | ||
115 | |||
116 | while is_where_predicate(p) { | ||
117 | where_predicate(p); | ||
118 | |||
119 | let comma = p.eat(COMMA); | ||
120 | |||
121 | if is_where_clause_end(p) { | ||
122 | break; | ||
123 | } | ||
124 | |||
125 | if !comma { | ||
126 | p.error("expected comma"); | ||
127 | } | ||
128 | } | ||
129 | |||
130 | m.complete(p, WHERE_CLAUSE); | ||
131 | } | ||
132 | |||
133 | fn is_where_predicate(p: &mut Parser) -> bool { | ||
134 | match p.current() { | ||
135 | LIFETIME => true, | ||
136 | IMPL_KW => false, | ||
137 | token => types::TYPE_FIRST.contains(token), | ||
138 | } | ||
139 | } | ||
140 | |||
141 | fn is_where_clause_end(p: &mut Parser) -> bool { | ||
142 | p.current() == L_CURLY || p.current() == SEMI || p.current() == EQ | ||
143 | } | ||
144 | |||
145 | fn where_predicate(p: &mut Parser) { | ||
146 | let m = p.start(); | ||
147 | match p.current() { | ||
148 | LIFETIME => { | ||
149 | p.bump(); | ||
150 | if p.at(COLON) { | ||
151 | lifetime_bounds(p); | ||
152 | } else { | ||
153 | p.error("expected colon"); | ||
154 | } | ||
155 | } | ||
156 | IMPL_KW => { | ||
157 | p.error("expected lifetime or type"); | ||
158 | } | ||
159 | _ => { | ||
160 | // test where_pred_for | ||
161 | // fn test<F>() | ||
162 | // where | ||
163 | // for<'a> F: Fn(&'a str) | ||
164 | // { } | ||
165 | types::type_(p); | ||
166 | |||
167 | if p.at(COLON) { | ||
168 | bounds(p); | ||
169 | } else { | ||
170 | p.error("expected colon"); | ||
171 | } | ||
172 | } | ||
173 | } | ||
174 | m.complete(p, WHERE_PRED); | ||
175 | } | ||
diff --git a/crates/ra_syntax/src/parsing/grammar/types.rs b/crates/ra_syntax/src/parsing/grammar/types.rs new file mode 100644 index 000000000..adc189a29 --- /dev/null +++ b/crates/ra_syntax/src/parsing/grammar/types.rs | |||
@@ -0,0 +1,278 @@ | |||
1 | use super::*; | ||
2 | |||
3 | pub(super) const TYPE_FIRST: TokenSet = paths::PATH_FIRST.union(token_set![ | ||
4 | L_PAREN, EXCL, STAR, L_BRACK, AMP, UNDERSCORE, FN_KW, UNSAFE_KW, EXTERN_KW, FOR_KW, IMPL_KW, | ||
5 | DYN_KW, L_ANGLE, | ||
6 | ]); | ||
7 | |||
8 | const TYPE_RECOVERY_SET: TokenSet = token_set![R_PAREN, COMMA]; | ||
9 | |||
10 | pub(super) fn type_(p: &mut Parser) { | ||
11 | type_with_bounds_cond(p, true); | ||
12 | } | ||
13 | |||
14 | pub(super) fn type_no_bounds(p: &mut Parser) { | ||
15 | type_with_bounds_cond(p, false); | ||
16 | } | ||
17 | |||
18 | fn type_with_bounds_cond(p: &mut Parser, allow_bounds: bool) { | ||
19 | match p.current() { | ||
20 | L_PAREN => paren_or_tuple_type(p), | ||
21 | EXCL => never_type(p), | ||
22 | STAR => pointer_type(p), | ||
23 | L_BRACK => array_or_slice_type(p), | ||
24 | AMP => reference_type(p), | ||
25 | UNDERSCORE => placeholder_type(p), | ||
26 | FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p), | ||
27 | FOR_KW => for_type(p), | ||
28 | IMPL_KW => impl_trait_type(p), | ||
29 | DYN_KW => dyn_trait_type(p), | ||
30 | // Some path types are not allowed to have bounds (no plus) | ||
31 | L_ANGLE => path_type_(p, allow_bounds), | ||
32 | _ if paths::is_path_start(p) => path_or_macro_type_(p, allow_bounds), | ||
33 | _ => { | ||
34 | p.err_recover("expected type", TYPE_RECOVERY_SET); | ||
35 | } | ||
36 | } | ||
37 | } | ||
38 | |||
39 | pub(super) fn ascription(p: &mut Parser) { | ||
40 | p.expect(COLON); | ||
41 | type_(p) | ||
42 | } | ||
43 | |||
44 | fn paren_or_tuple_type(p: &mut Parser) { | ||
45 | assert!(p.at(L_PAREN)); | ||
46 | let m = p.start(); | ||
47 | p.bump(); | ||
48 | let mut n_types: u32 = 0; | ||
49 | let mut trailing_comma: bool = false; | ||
50 | while !p.at(EOF) && !p.at(R_PAREN) { | ||
51 | n_types += 1; | ||
52 | type_(p); | ||
53 | if p.eat(COMMA) { | ||
54 | trailing_comma = true; | ||
55 | } else { | ||
56 | trailing_comma = false; | ||
57 | break; | ||
58 | } | ||
59 | } | ||
60 | p.expect(R_PAREN); | ||
61 | |||
62 | let kind = if n_types == 1 && !trailing_comma { | ||
63 | // test paren_type | ||
64 | // type T = (i32); | ||
65 | PAREN_TYPE | ||
66 | } else { | ||
67 | // test unit_type | ||
68 | // type T = (); | ||
69 | |||
70 | // test singleton_tuple_type | ||
71 | // type T = (i32,); | ||
72 | TUPLE_TYPE | ||
73 | }; | ||
74 | m.complete(p, kind); | ||
75 | } | ||
76 | |||
77 | // test never_type | ||
78 | // type Never = !; | ||
79 | fn never_type(p: &mut Parser) { | ||
80 | assert!(p.at(EXCL)); | ||
81 | let m = p.start(); | ||
82 | p.bump(); | ||
83 | m.complete(p, NEVER_TYPE); | ||
84 | } | ||
85 | |||
86 | fn pointer_type(p: &mut Parser) { | ||
87 | assert!(p.at(STAR)); | ||
88 | let m = p.start(); | ||
89 | p.bump(); | ||
90 | |||
91 | match p.current() { | ||
92 | // test pointer_type_mut | ||
93 | // type M = *mut (); | ||
94 | // type C = *mut (); | ||
95 | MUT_KW | CONST_KW => p.bump(), | ||
96 | _ => { | ||
97 | // test_err pointer_type_no_mutability | ||
98 | // type T = *(); | ||
99 | p.error( | ||
100 | "expected mut or const in raw pointer type \ | ||
101 | (use `*mut T` or `*const T` as appropriate)", | ||
102 | ); | ||
103 | } | ||
104 | }; | ||
105 | |||
106 | type_no_bounds(p); | ||
107 | m.complete(p, POINTER_TYPE); | ||
108 | } | ||
109 | |||
110 | fn array_or_slice_type(p: &mut Parser) { | ||
111 | assert!(p.at(L_BRACK)); | ||
112 | let m = p.start(); | ||
113 | p.bump(); | ||
114 | |||
115 | type_(p); | ||
116 | let kind = match p.current() { | ||
117 | // test slice_type | ||
118 | // type T = [()]; | ||
119 | R_BRACK => { | ||
120 | p.bump(); | ||
121 | SLICE_TYPE | ||
122 | } | ||
123 | |||
124 | // test array_type | ||
125 | // type T = [(); 92]; | ||
126 | SEMI => { | ||
127 | p.bump(); | ||
128 | expressions::expr(p); | ||
129 | p.expect(R_BRACK); | ||
130 | ARRAY_TYPE | ||
131 | } | ||
132 | // test_err array_type_missing_semi | ||
133 | // type T = [() 92]; | ||
134 | _ => { | ||
135 | p.error("expected `;` or `]`"); | ||
136 | SLICE_TYPE | ||
137 | } | ||
138 | }; | ||
139 | m.complete(p, kind); | ||
140 | } | ||
141 | |||
142 | // test reference_type; | ||
143 | // type A = &(); | ||
144 | // type B = &'static (); | ||
145 | // type C = &mut (); | ||
146 | fn reference_type(p: &mut Parser) { | ||
147 | assert!(p.at(AMP)); | ||
148 | let m = p.start(); | ||
149 | p.bump(); | ||
150 | p.eat(LIFETIME); | ||
151 | p.eat(MUT_KW); | ||
152 | type_no_bounds(p); | ||
153 | m.complete(p, REFERENCE_TYPE); | ||
154 | } | ||
155 | |||
156 | // test placeholder_type | ||
157 | // type Placeholder = _; | ||
158 | fn placeholder_type(p: &mut Parser) { | ||
159 | assert!(p.at(UNDERSCORE)); | ||
160 | let m = p.start(); | ||
161 | p.bump(); | ||
162 | m.complete(p, PLACEHOLDER_TYPE); | ||
163 | } | ||
164 | |||
165 | // test fn_pointer_type | ||
166 | // type A = fn(); | ||
167 | // type B = unsafe fn(); | ||
168 | // type C = unsafe extern "C" fn(); | ||
169 | fn fn_pointer_type(p: &mut Parser) { | ||
170 | let m = p.start(); | ||
171 | p.eat(UNSAFE_KW); | ||
172 | if p.at(EXTERN_KW) { | ||
173 | abi(p); | ||
174 | } | ||
175 | // test_err fn_pointer_type_missing_fn | ||
176 | // type F = unsafe (); | ||
177 | if !p.eat(FN_KW) { | ||
178 | m.abandon(p); | ||
179 | p.error("expected `fn`"); | ||
180 | return; | ||
181 | } | ||
182 | if p.at(L_PAREN) { | ||
183 | params::param_list_opt_patterns(p); | ||
184 | } else { | ||
185 | p.error("expected parameters") | ||
186 | } | ||
187 | // test fn_pointer_type_with_ret | ||
188 | // type F = fn() -> (); | ||
189 | opt_fn_ret_type(p); | ||
190 | m.complete(p, FN_POINTER_TYPE); | ||
191 | } | ||
192 | |||
193 | pub(super) fn for_binder(p: &mut Parser) { | ||
194 | assert!(p.at(FOR_KW)); | ||
195 | p.bump(); | ||
196 | if p.at(L_ANGLE) { | ||
197 | type_params::opt_type_param_list(p); | ||
198 | } else { | ||
199 | p.error("expected `<`"); | ||
200 | } | ||
201 | } | ||
202 | |||
203 | // test for_type | ||
204 | // type A = for<'a> fn() -> (); | ||
205 | pub(super) fn for_type(p: &mut Parser) { | ||
206 | assert!(p.at(FOR_KW)); | ||
207 | let m = p.start(); | ||
208 | for_binder(p); | ||
209 | match p.current() { | ||
210 | FN_KW | UNSAFE_KW | EXTERN_KW => fn_pointer_type(p), | ||
211 | _ if paths::is_path_start(p) => path_type_(p, false), | ||
212 | _ => p.error("expected a path"), | ||
213 | } | ||
214 | m.complete(p, FOR_TYPE); | ||
215 | } | ||
216 | |||
217 | // test impl_trait_type | ||
218 | // type A = impl Iterator<Item=Foo<'a>> + 'a; | ||
219 | fn impl_trait_type(p: &mut Parser) { | ||
220 | assert!(p.at(IMPL_KW)); | ||
221 | let m = p.start(); | ||
222 | p.bump(); | ||
223 | type_params::bounds_without_colon(p); | ||
224 | m.complete(p, IMPL_TRAIT_TYPE); | ||
225 | } | ||
226 | |||
227 | // test dyn_trait_type | ||
228 | // type A = dyn Iterator<Item=Foo<'a>> + 'a; | ||
229 | fn dyn_trait_type(p: &mut Parser) { | ||
230 | assert!(p.at(DYN_KW)); | ||
231 | let m = p.start(); | ||
232 | p.bump(); | ||
233 | type_params::bounds_without_colon(p); | ||
234 | m.complete(p, DYN_TRAIT_TYPE); | ||
235 | } | ||
236 | |||
237 | // test path_type | ||
238 | // type A = Foo; | ||
239 | // type B = ::Foo; | ||
240 | // type C = self::Foo; | ||
241 | // type D = super::Foo; | ||
242 | pub(super) fn path_type(p: &mut Parser) { | ||
243 | path_type_(p, true) | ||
244 | } | ||
245 | |||
246 | // test macro_call_type | ||
247 | // type A = foo!(); | ||
248 | // type B = crate::foo!(); | ||
249 | fn path_or_macro_type_(p: &mut Parser, allow_bounds: bool) { | ||
250 | assert!(paths::is_path_start(p) || p.at(L_ANGLE)); | ||
251 | let m = p.start(); | ||
252 | paths::type_path(p); | ||
253 | |||
254 | let kind = if p.at(EXCL) { | ||
255 | items::macro_call_after_excl(p); | ||
256 | MACRO_CALL | ||
257 | } else { | ||
258 | PATH_TYPE | ||
259 | }; | ||
260 | |||
261 | if allow_bounds && p.eat(PLUS) { | ||
262 | type_params::bounds_without_colon(p); | ||
263 | } | ||
264 | |||
265 | m.complete(p, kind); | ||
266 | } | ||
267 | |||
268 | pub(super) fn path_type_(p: &mut Parser, allow_bounds: bool) { | ||
269 | assert!(paths::is_path_start(p) || p.at(L_ANGLE)); | ||
270 | let m = p.start(); | ||
271 | paths::type_path(p); | ||
272 | // test path_type_with_bounds | ||
273 | // fn foo() -> Box<T + 'f> {} | ||
274 | if allow_bounds && p.eat(PLUS) { | ||
275 | type_params::bounds_without_colon(p); | ||
276 | } | ||
277 | m.complete(p, PATH_TYPE); | ||
278 | } | ||
diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/ra_syntax/src/parsing/lexer.rs new file mode 100644 index 000000000..f9362120e --- /dev/null +++ b/crates/ra_syntax/src/parsing/lexer.rs | |||
@@ -0,0 +1,215 @@ | |||
1 | mod classes; | ||
2 | mod comments; | ||
3 | mod numbers; | ||
4 | mod ptr; | ||
5 | mod strings; | ||
6 | |||
7 | use crate::{ | ||
8 | SyntaxKind::{self, *}, | ||
9 | TextUnit, | ||
10 | }; | ||
11 | |||
12 | use self::{ | ||
13 | classes::*, | ||
14 | comments::{scan_comment, scan_shebang}, | ||
15 | numbers::scan_number, | ||
16 | ptr::Ptr, | ||
17 | strings::{ | ||
18 | is_string_literal_start, scan_byte_char_or_string, scan_char, scan_raw_string, scan_string, | ||
19 | }, | ||
20 | }; | ||
21 | |||
22 | /// A token of Rust source. | ||
23 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] | ||
24 | pub struct Token { | ||
25 | /// The kind of token. | ||
26 | pub kind: SyntaxKind, | ||
27 | /// The length of the token. | ||
28 | pub len: TextUnit, | ||
29 | } | ||
30 | |||
31 | /// Break a string up into its component tokens | ||
32 | pub fn tokenize(text: &str) -> Vec<Token> { | ||
33 | let mut text = text; | ||
34 | let mut acc = Vec::new(); | ||
35 | while !text.is_empty() { | ||
36 | let token = next_token(text); | ||
37 | acc.push(token); | ||
38 | let len: u32 = token.len.into(); | ||
39 | text = &text[len as usize..]; | ||
40 | } | ||
41 | acc | ||
42 | } | ||
43 | |||
44 | /// Get the next token from a string | ||
45 | pub fn next_token(text: &str) -> Token { | ||
46 | assert!(!text.is_empty()); | ||
47 | let mut ptr = Ptr::new(text); | ||
48 | let c = ptr.bump().unwrap(); | ||
49 | let kind = next_token_inner(c, &mut ptr); | ||
50 | let len = ptr.into_len(); | ||
51 | Token { kind, len } | ||
52 | } | ||
53 | |||
54 | fn next_token_inner(c: char, ptr: &mut Ptr) -> SyntaxKind { | ||
55 | if is_whitespace(c) { | ||
56 | ptr.bump_while(is_whitespace); | ||
57 | return WHITESPACE; | ||
58 | } | ||
59 | |||
60 | match c { | ||
61 | '#' => { | ||
62 | if scan_shebang(ptr) { | ||
63 | return SHEBANG; | ||
64 | } | ||
65 | } | ||
66 | '/' => { | ||
67 | if let Some(kind) = scan_comment(ptr) { | ||
68 | return kind; | ||
69 | } | ||
70 | } | ||
71 | _ => (), | ||
72 | } | ||
73 | |||
74 | let ident_start = is_ident_start(c) && !is_string_literal_start(c, ptr.current(), ptr.nth(1)); | ||
75 | if ident_start { | ||
76 | return scan_ident(c, ptr); | ||
77 | } | ||
78 | |||
79 | if is_dec_digit(c) { | ||
80 | let kind = scan_number(c, ptr); | ||
81 | scan_literal_suffix(ptr); | ||
82 | return kind; | ||
83 | } | ||
84 | |||
85 | // One-byte tokens. | ||
86 | if let Some(kind) = SyntaxKind::from_char(c) { | ||
87 | return kind; | ||
88 | } | ||
89 | |||
90 | match c { | ||
91 | // Multi-byte tokens. | ||
92 | '.' => { | ||
93 | return match (ptr.current(), ptr.nth(1)) { | ||
94 | (Some('.'), Some('.')) => { | ||
95 | ptr.bump(); | ||
96 | ptr.bump(); | ||
97 | DOTDOTDOT | ||
98 | } | ||
99 | (Some('.'), Some('=')) => { | ||
100 | ptr.bump(); | ||
101 | ptr.bump(); | ||
102 | DOTDOTEQ | ||
103 | } | ||
104 | (Some('.'), _) => { | ||
105 | ptr.bump(); | ||
106 | DOTDOT | ||
107 | } | ||
108 | _ => DOT, | ||
109 | }; | ||
110 | } | ||
111 | ':' => { | ||
112 | return match ptr.current() { | ||
113 | Some(':') => { | ||
114 | ptr.bump(); | ||
115 | COLONCOLON | ||
116 | } | ||
117 | _ => COLON, | ||
118 | }; | ||
119 | } | ||
120 | '=' => { | ||
121 | return match ptr.current() { | ||
122 | Some('=') => { | ||
123 | ptr.bump(); | ||
124 | EQEQ | ||
125 | } | ||
126 | Some('>') => { | ||
127 | ptr.bump(); | ||
128 | FAT_ARROW | ||
129 | } | ||
130 | _ => EQ, | ||
131 | }; | ||
132 | } | ||
133 | '!' => { | ||
134 | return match ptr.current() { | ||
135 | Some('=') => { | ||
136 | ptr.bump(); | ||
137 | NEQ | ||
138 | } | ||
139 | _ => EXCL, | ||
140 | }; | ||
141 | } | ||
142 | '-' => { | ||
143 | return if ptr.at('>') { | ||
144 | ptr.bump(); | ||
145 | THIN_ARROW | ||
146 | } else { | ||
147 | MINUS | ||
148 | }; | ||
149 | } | ||
150 | |||
151 | // If the character is an ident start not followed by another single | ||
152 | // quote, then this is a lifetime name: | ||
153 | '\'' => { | ||
154 | return if ptr.at_p(is_ident_start) && !ptr.at_str("''") { | ||
155 | ptr.bump(); | ||
156 | while ptr.at_p(is_ident_continue) { | ||
157 | ptr.bump(); | ||
158 | } | ||
159 | // lifetimes shouldn't end with a single quote | ||
160 | // if we find one, then this is an invalid character literal | ||
161 | if ptr.at('\'') { | ||
162 | ptr.bump(); | ||
163 | return CHAR; | ||
164 | } | ||
165 | LIFETIME | ||
166 | } else { | ||
167 | scan_char(ptr); | ||
168 | scan_literal_suffix(ptr); | ||
169 | CHAR | ||
170 | }; | ||
171 | } | ||
172 | 'b' => { | ||
173 | let kind = scan_byte_char_or_string(ptr); | ||
174 | scan_literal_suffix(ptr); | ||
175 | return kind; | ||
176 | } | ||
177 | '"' => { | ||
178 | scan_string(ptr); | ||
179 | scan_literal_suffix(ptr); | ||
180 | return STRING; | ||
181 | } | ||
182 | 'r' => { | ||
183 | scan_raw_string(ptr); | ||
184 | scan_literal_suffix(ptr); | ||
185 | return RAW_STRING; | ||
186 | } | ||
187 | _ => (), | ||
188 | } | ||
189 | ERROR | ||
190 | } | ||
191 | |||
192 | fn scan_ident(c: char, ptr: &mut Ptr) -> SyntaxKind { | ||
193 | let is_raw = match (c, ptr.current()) { | ||
194 | ('r', Some('#')) => { | ||
195 | ptr.bump(); | ||
196 | true | ||
197 | } | ||
198 | ('_', Some(c)) if !is_ident_continue(c) => return UNDERSCORE, | ||
199 | _ => false, | ||
200 | }; | ||
201 | ptr.bump_while(is_ident_continue); | ||
202 | if !is_raw { | ||
203 | if let Some(kind) = SyntaxKind::from_keyword(ptr.current_token_text()) { | ||
204 | return kind; | ||
205 | } | ||
206 | } | ||
207 | IDENT | ||
208 | } | ||
209 | |||
210 | fn scan_literal_suffix(ptr: &mut Ptr) { | ||
211 | if ptr.at_p(is_ident_start) { | ||
212 | ptr.bump(); | ||
213 | } | ||
214 | ptr.bump_while(is_ident_continue); | ||
215 | } | ||
diff --git a/crates/ra_syntax/src/parsing/lexer/classes.rs b/crates/ra_syntax/src/parsing/lexer/classes.rs new file mode 100644 index 000000000..4235d2648 --- /dev/null +++ b/crates/ra_syntax/src/parsing/lexer/classes.rs | |||
@@ -0,0 +1,26 @@ | |||
1 | use unicode_xid::UnicodeXID; | ||
2 | |||
3 | pub fn is_ident_start(c: char) -> bool { | ||
4 | (c >= 'a' && c <= 'z') | ||
5 | || (c >= 'A' && c <= 'Z') | ||
6 | || c == '_' | ||
7 | || (c > '\x7f' && UnicodeXID::is_xid_start(c)) | ||
8 | } | ||
9 | |||
10 | pub fn is_ident_continue(c: char) -> bool { | ||
11 | (c >= 'a' && c <= 'z') | ||
12 | || (c >= 'A' && c <= 'Z') | ||
13 | || (c >= '0' && c <= '9') | ||
14 | || c == '_' | ||
15 | || (c > '\x7f' && UnicodeXID::is_xid_continue(c)) | ||
16 | } | ||
17 | |||
18 | pub fn is_whitespace(c: char) -> bool { | ||
19 | //FIXME: use is_pattern_whitespace | ||
20 | //https://github.com/behnam/rust-unic/issues/192 | ||
21 | c.is_whitespace() | ||
22 | } | ||
23 | |||
24 | pub fn is_dec_digit(c: char) -> bool { | ||
25 | '0' <= c && c <= '9' | ||
26 | } | ||
diff --git a/crates/ra_syntax/src/parsing/lexer/comments.rs b/crates/ra_syntax/src/parsing/lexer/comments.rs new file mode 100644 index 000000000..8bbbe659b --- /dev/null +++ b/crates/ra_syntax/src/parsing/lexer/comments.rs | |||
@@ -0,0 +1,57 @@ | |||
1 | use crate::parsing::lexer::ptr::Ptr; | ||
2 | |||
3 | use crate::SyntaxKind::{self, *}; | ||
4 | |||
5 | pub(crate) fn scan_shebang(ptr: &mut Ptr) -> bool { | ||
6 | if ptr.at_str("!/") { | ||
7 | ptr.bump(); | ||
8 | ptr.bump(); | ||
9 | bump_until_eol(ptr); | ||
10 | true | ||
11 | } else { | ||
12 | false | ||
13 | } | ||
14 | } | ||
15 | |||
16 | fn scan_block_comment(ptr: &mut Ptr) -> Option<SyntaxKind> { | ||
17 | if ptr.at('*') { | ||
18 | ptr.bump(); | ||
19 | let mut depth: u32 = 1; | ||
20 | while depth > 0 { | ||
21 | if ptr.at_str("*/") { | ||
22 | depth -= 1; | ||
23 | ptr.bump(); | ||
24 | ptr.bump(); | ||
25 | } else if ptr.at_str("/*") { | ||
26 | depth += 1; | ||
27 | ptr.bump(); | ||
28 | ptr.bump(); | ||
29 | } else if ptr.bump().is_none() { | ||
30 | break; | ||
31 | } | ||
32 | } | ||
33 | Some(COMMENT) | ||
34 | } else { | ||
35 | None | ||
36 | } | ||
37 | } | ||
38 | |||
39 | pub(crate) fn scan_comment(ptr: &mut Ptr) -> Option<SyntaxKind> { | ||
40 | if ptr.at('/') { | ||
41 | bump_until_eol(ptr); | ||
42 | Some(COMMENT) | ||
43 | } else { | ||
44 | scan_block_comment(ptr) | ||
45 | } | ||
46 | } | ||
47 | |||
48 | fn bump_until_eol(ptr: &mut Ptr) { | ||
49 | loop { | ||
50 | if ptr.at('\n') || ptr.at_str("\r\n") { | ||
51 | return; | ||
52 | } | ||
53 | if ptr.bump().is_none() { | ||
54 | break; | ||
55 | } | ||
56 | } | ||
57 | } | ||
diff --git a/crates/ra_syntax/src/parsing/lexer/numbers.rs b/crates/ra_syntax/src/parsing/lexer/numbers.rs new file mode 100644 index 000000000..7f6abe1d5 --- /dev/null +++ b/crates/ra_syntax/src/parsing/lexer/numbers.rs | |||
@@ -0,0 +1,69 @@ | |||
1 | use crate::parsing::lexer::{ | ||
2 | ptr::Ptr, | ||
3 | classes::*, | ||
4 | }; | ||
5 | |||
6 | use crate::SyntaxKind::{self, *}; | ||
7 | |||
8 | pub(crate) fn scan_number(c: char, ptr: &mut Ptr) -> SyntaxKind { | ||
9 | if c == '0' { | ||
10 | match ptr.current().unwrap_or('\0') { | ||
11 | 'b' | 'o' => { | ||
12 | ptr.bump(); | ||
13 | scan_digits(ptr, false); | ||
14 | } | ||
15 | 'x' => { | ||
16 | ptr.bump(); | ||
17 | scan_digits(ptr, true); | ||
18 | } | ||
19 | '0'...'9' | '_' | '.' | 'e' | 'E' => { | ||
20 | scan_digits(ptr, true); | ||
21 | } | ||
22 | _ => return INT_NUMBER, | ||
23 | } | ||
24 | } else { | ||
25 | scan_digits(ptr, false); | ||
26 | } | ||
27 | |||
28 | // might be a float, but don't be greedy if this is actually an | ||
29 | // integer literal followed by field/method access or a range pattern | ||
30 | // (`0..2` and `12.foo()`) | ||
31 | if ptr.at('.') && !(ptr.at_str("..") || ptr.nth_is_p(1, is_ident_start)) { | ||
32 | // might have stuff after the ., and if it does, it needs to start | ||
33 | // with a number | ||
34 | ptr.bump(); | ||
35 | scan_digits(ptr, false); | ||
36 | scan_float_exponent(ptr); | ||
37 | return FLOAT_NUMBER; | ||
38 | } | ||
39 | // it might be a float if it has an exponent | ||
40 | if ptr.at('e') || ptr.at('E') { | ||
41 | scan_float_exponent(ptr); | ||
42 | return FLOAT_NUMBER; | ||
43 | } | ||
44 | INT_NUMBER | ||
45 | } | ||
46 | |||
47 | fn scan_digits(ptr: &mut Ptr, allow_hex: bool) { | ||
48 | while let Some(c) = ptr.current() { | ||
49 | match c { | ||
50 | '_' | '0'...'9' => { | ||
51 | ptr.bump(); | ||
52 | } | ||
53 | 'a'...'f' | 'A'...'F' if allow_hex => { | ||
54 | ptr.bump(); | ||
55 | } | ||
56 | _ => return, | ||
57 | } | ||
58 | } | ||
59 | } | ||
60 | |||
61 | fn scan_float_exponent(ptr: &mut Ptr) { | ||
62 | if ptr.at('e') || ptr.at('E') { | ||
63 | ptr.bump(); | ||
64 | if ptr.at('-') || ptr.at('+') { | ||
65 | ptr.bump(); | ||
66 | } | ||
67 | scan_digits(ptr, false); | ||
68 | } | ||
69 | } | ||
diff --git a/crates/ra_syntax/src/parsing/lexer/ptr.rs b/crates/ra_syntax/src/parsing/lexer/ptr.rs new file mode 100644 index 000000000..c341c4176 --- /dev/null +++ b/crates/ra_syntax/src/parsing/lexer/ptr.rs | |||
@@ -0,0 +1,162 @@ | |||
1 | use crate::TextUnit; | ||
2 | |||
3 | use std::str::Chars; | ||
4 | |||
5 | /// A simple view into the characters of a string. | ||
6 | pub(crate) struct Ptr<'s> { | ||
7 | text: &'s str, | ||
8 | len: TextUnit, | ||
9 | } | ||
10 | |||
11 | impl<'s> Ptr<'s> { | ||
12 | /// Creates a new `Ptr` from a string. | ||
13 | pub fn new(text: &'s str) -> Ptr<'s> { | ||
14 | Ptr { text, len: 0.into() } | ||
15 | } | ||
16 | |||
17 | /// Gets the length of the remaining string. | ||
18 | pub fn into_len(self) -> TextUnit { | ||
19 | self.len | ||
20 | } | ||
21 | |||
22 | /// Gets the current character, if one exists. | ||
23 | pub fn current(&self) -> Option<char> { | ||
24 | self.chars().next() | ||
25 | } | ||
26 | |||
27 | /// Gets the nth character from the current. | ||
28 | /// For example, 0 will return the current character, 1 will return the next, etc. | ||
29 | pub fn nth(&self, n: u32) -> Option<char> { | ||
30 | self.chars().nth(n as usize) | ||
31 | } | ||
32 | |||
33 | /// Checks whether the current character is `c`. | ||
34 | pub fn at(&self, c: char) -> bool { | ||
35 | self.current() == Some(c) | ||
36 | } | ||
37 | |||
38 | /// Checks whether the next characters match `s`. | ||
39 | pub fn at_str(&self, s: &str) -> bool { | ||
40 | let chars = self.chars(); | ||
41 | chars.as_str().starts_with(s) | ||
42 | } | ||
43 | |||
44 | /// Checks whether the current character satisfies the predicate `p`. | ||
45 | pub fn at_p<P: Fn(char) -> bool>(&self, p: P) -> bool { | ||
46 | self.current().map(p) == Some(true) | ||
47 | } | ||
48 | |||
49 | /// Checks whether the nth character satisfies the predicate `p`. | ||
50 | pub fn nth_is_p<P: Fn(char) -> bool>(&self, n: u32, p: P) -> bool { | ||
51 | self.nth(n).map(p) == Some(true) | ||
52 | } | ||
53 | |||
54 | /// Moves to the next character. | ||
55 | pub fn bump(&mut self) -> Option<char> { | ||
56 | let ch = self.chars().next()?; | ||
57 | self.len += TextUnit::of_char(ch); | ||
58 | Some(ch) | ||
59 | } | ||
60 | |||
61 | /// Moves to the next character as long as `pred` is satisfied. | ||
62 | pub fn bump_while<F: Fn(char) -> bool>(&mut self, pred: F) { | ||
63 | loop { | ||
64 | match self.current() { | ||
65 | Some(c) if pred(c) => { | ||
66 | self.bump(); | ||
67 | } | ||
68 | _ => return, | ||
69 | } | ||
70 | } | ||
71 | } | ||
72 | |||
73 | /// Returns the text up to the current point. | ||
74 | pub fn current_token_text(&self) -> &str { | ||
75 | let len: u32 = self.len.into(); | ||
76 | &self.text[..len as usize] | ||
77 | } | ||
78 | |||
79 | /// Returns an iterator over the remaining characters. | ||
80 | fn chars(&self) -> Chars { | ||
81 | let len: u32 = self.len.into(); | ||
82 | self.text[len as usize..].chars() | ||
83 | } | ||
84 | } | ||
85 | |||
86 | #[cfg(test)] | ||
87 | mod tests { | ||
88 | use super::*; | ||
89 | |||
90 | #[test] | ||
91 | fn test_current() { | ||
92 | let ptr = Ptr::new("test"); | ||
93 | assert_eq!(ptr.current(), Some('t')); | ||
94 | } | ||
95 | |||
96 | #[test] | ||
97 | fn test_nth() { | ||
98 | let ptr = Ptr::new("test"); | ||
99 | assert_eq!(ptr.nth(0), Some('t')); | ||
100 | assert_eq!(ptr.nth(1), Some('e')); | ||
101 | assert_eq!(ptr.nth(2), Some('s')); | ||
102 | assert_eq!(ptr.nth(3), Some('t')); | ||
103 | assert_eq!(ptr.nth(4), None); | ||
104 | } | ||
105 | |||
106 | #[test] | ||
107 | fn test_at() { | ||
108 | let ptr = Ptr::new("test"); | ||
109 | assert!(ptr.at('t')); | ||
110 | assert!(!ptr.at('a')); | ||
111 | } | ||
112 | |||
113 | #[test] | ||
114 | fn test_at_str() { | ||
115 | let ptr = Ptr::new("test"); | ||
116 | assert!(ptr.at_str("t")); | ||
117 | assert!(ptr.at_str("te")); | ||
118 | assert!(ptr.at_str("test")); | ||
119 | assert!(!ptr.at_str("tests")); | ||
120 | assert!(!ptr.at_str("rust")); | ||
121 | } | ||
122 | |||
123 | #[test] | ||
124 | fn test_at_p() { | ||
125 | let ptr = Ptr::new("test"); | ||
126 | assert!(ptr.at_p(|c| c == 't')); | ||
127 | assert!(!ptr.at_p(|c| c == 'e')); | ||
128 | } | ||
129 | |||
130 | #[test] | ||
131 | fn test_nth_is_p() { | ||
132 | let ptr = Ptr::new("test"); | ||
133 | assert!(ptr.nth_is_p(0, |c| c == 't')); | ||
134 | assert!(!ptr.nth_is_p(1, |c| c == 't')); | ||
135 | assert!(ptr.nth_is_p(3, |c| c == 't')); | ||
136 | assert!(!ptr.nth_is_p(150, |c| c == 't')); | ||
137 | } | ||
138 | |||
139 | #[test] | ||
140 | fn test_bump() { | ||
141 | let mut ptr = Ptr::new("test"); | ||
142 | assert_eq!(ptr.current(), Some('t')); | ||
143 | ptr.bump(); | ||
144 | assert_eq!(ptr.current(), Some('e')); | ||
145 | ptr.bump(); | ||
146 | assert_eq!(ptr.current(), Some('s')); | ||
147 | ptr.bump(); | ||
148 | assert_eq!(ptr.current(), Some('t')); | ||
149 | ptr.bump(); | ||
150 | assert_eq!(ptr.current(), None); | ||
151 | ptr.bump(); | ||
152 | assert_eq!(ptr.current(), None); | ||
153 | } | ||
154 | |||
155 | #[test] | ||
156 | fn test_bump_while() { | ||
157 | let mut ptr = Ptr::new("test"); | ||
158 | assert_eq!(ptr.current(), Some('t')); | ||
159 | ptr.bump_while(|c| c != 's'); | ||
160 | assert_eq!(ptr.current(), Some('s')); | ||
161 | } | ||
162 | } | ||
diff --git a/crates/ra_syntax/src/parsing/lexer/strings.rs b/crates/ra_syntax/src/parsing/lexer/strings.rs new file mode 100644 index 000000000..f74acff9e --- /dev/null +++ b/crates/ra_syntax/src/parsing/lexer/strings.rs | |||
@@ -0,0 +1,112 @@ | |||
1 | use crate::{ | ||
2 | parsing::lexer::ptr::Ptr, | ||
3 | SyntaxKind::{self, *}, | ||
4 | }; | ||
5 | |||
6 | pub(crate) fn is_string_literal_start(c: char, c1: Option<char>, c2: Option<char>) -> bool { | ||
7 | match (c, c1, c2) { | ||
8 | ('r', Some('"'), _) | ||
9 | | ('r', Some('#'), Some('"')) | ||
10 | | ('r', Some('#'), Some('#')) | ||
11 | | ('b', Some('"'), _) | ||
12 | | ('b', Some('\''), _) | ||
13 | | ('b', Some('r'), Some('"')) | ||
14 | | ('b', Some('r'), Some('#')) => true, | ||
15 | _ => false, | ||
16 | } | ||
17 | } | ||
18 | |||
19 | pub(crate) fn scan_char(ptr: &mut Ptr) { | ||
20 | while let Some(c) = ptr.current() { | ||
21 | match c { | ||
22 | '\\' => { | ||
23 | ptr.bump(); | ||
24 | if ptr.at('\\') || ptr.at('\'') { | ||
25 | ptr.bump(); | ||
26 | } | ||
27 | } | ||
28 | '\'' => { | ||
29 | ptr.bump(); | ||
30 | return; | ||
31 | } | ||
32 | '\n' => return, | ||
33 | _ => { | ||
34 | ptr.bump(); | ||
35 | } | ||
36 | } | ||
37 | } | ||
38 | } | ||
39 | |||
40 | pub(crate) fn scan_byte_char_or_string(ptr: &mut Ptr) -> SyntaxKind { | ||
41 | // unwrapping and not-exhaustive match are ok | ||
42 | // because of string_literal_start | ||
43 | let c = ptr.bump().unwrap(); | ||
44 | match c { | ||
45 | '\'' => { | ||
46 | scan_byte(ptr); | ||
47 | BYTE | ||
48 | } | ||
49 | '"' => { | ||
50 | scan_byte_string(ptr); | ||
51 | BYTE_STRING | ||
52 | } | ||
53 | 'r' => { | ||
54 | scan_raw_string(ptr); | ||
55 | RAW_BYTE_STRING | ||
56 | } | ||
57 | _ => unreachable!(), | ||
58 | } | ||
59 | } | ||
60 | |||
61 | pub(crate) fn scan_string(ptr: &mut Ptr) { | ||
62 | while let Some(c) = ptr.current() { | ||
63 | match c { | ||
64 | '\\' => { | ||
65 | ptr.bump(); | ||
66 | if ptr.at('\\') || ptr.at('"') { | ||
67 | ptr.bump(); | ||
68 | } | ||
69 | } | ||
70 | '"' => { | ||
71 | ptr.bump(); | ||
72 | return; | ||
73 | } | ||
74 | _ => { | ||
75 | ptr.bump(); | ||
76 | } | ||
77 | } | ||
78 | } | ||
79 | } | ||
80 | |||
81 | pub(crate) fn scan_raw_string(ptr: &mut Ptr) { | ||
82 | let mut hashes = 0; | ||
83 | while ptr.at('#') { | ||
84 | hashes += 1; | ||
85 | ptr.bump(); | ||
86 | } | ||
87 | if !ptr.at('"') { | ||
88 | return; | ||
89 | } | ||
90 | ptr.bump(); | ||
91 | |||
92 | while let Some(c) = ptr.bump() { | ||
93 | if c == '"' { | ||
94 | let mut hashes_left = hashes; | ||
95 | while ptr.at('#') && hashes_left > 0 { | ||
96 | hashes_left -= 1; | ||
97 | ptr.bump(); | ||
98 | } | ||
99 | if hashes_left == 0 { | ||
100 | return; | ||
101 | } | ||
102 | } | ||
103 | } | ||
104 | } | ||
105 | |||
106 | fn scan_byte(ptr: &mut Ptr) { | ||
107 | scan_char(ptr) | ||
108 | } | ||
109 | |||
110 | fn scan_byte_string(ptr: &mut Ptr) { | ||
111 | scan_string(ptr) | ||
112 | } | ||
diff --git a/crates/ra_syntax/src/parsing/parser_api.rs b/crates/ra_syntax/src/parsing/parser_api.rs new file mode 100644 index 000000000..781c407de --- /dev/null +++ b/crates/ra_syntax/src/parsing/parser_api.rs | |||
@@ -0,0 +1,195 @@ | |||
1 | use drop_bomb::DropBomb; | ||
2 | |||
3 | use crate::{ | ||
4 | SyntaxKind::{self, ERROR}, | ||
5 | parsing::{ | ||
6 | token_set::TokenSet, | ||
7 | parser_impl::ParserImpl | ||
8 | }, | ||
9 | }; | ||
10 | |||
11 | /// `Parser` struct provides the low-level API for | ||
12 | /// navigating through the stream of tokens and | ||
13 | /// constructing the parse tree. The actual parsing | ||
14 | /// happens in the `grammar` module. | ||
15 | /// | ||
16 | /// However, the result of this `Parser` is not a real | ||
17 | /// tree, but rather a flat stream of events of the form | ||
18 | /// "start expression, consume number literal, | ||
19 | /// finish expression". See `Event` docs for more. | ||
20 | pub(crate) struct Parser<'t>(pub(super) ParserImpl<'t>); | ||
21 | |||
22 | impl<'t> Parser<'t> { | ||
23 | /// Returns the kind of the current token. | ||
24 | /// If parser has already reached the end of input, | ||
25 | /// the special `EOF` kind is returned. | ||
26 | pub(crate) fn current(&self) -> SyntaxKind { | ||
27 | self.nth(0) | ||
28 | } | ||
29 | |||
30 | /// Returns the kinds of the current two tokens, if they are not separated | ||
31 | /// by trivia. | ||
32 | /// | ||
33 | /// Useful for parsing things like `>>`. | ||
34 | pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> { | ||
35 | self.0.current2() | ||
36 | } | ||
37 | |||
38 | /// Returns the kinds of the current three tokens, if they are not separated | ||
39 | /// by trivia. | ||
40 | /// | ||
41 | /// Useful for parsing things like `=>>`. | ||
42 | pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> { | ||
43 | self.0.current3() | ||
44 | } | ||
45 | |||
46 | /// Lookahead operation: returns the kind of the next nth | ||
47 | /// token. | ||
48 | pub(crate) fn nth(&self, n: u32) -> SyntaxKind { | ||
49 | self.0.nth(n) | ||
50 | } | ||
51 | |||
52 | /// Checks if the current token is `kind`. | ||
53 | pub(crate) fn at(&self, kind: SyntaxKind) -> bool { | ||
54 | self.current() == kind | ||
55 | } | ||
56 | |||
57 | /// Checks if the current token is in `kinds`. | ||
58 | pub(crate) fn at_ts(&self, kinds: TokenSet) -> bool { | ||
59 | kinds.contains(self.current()) | ||
60 | } | ||
61 | |||
62 | /// Checks if the current token is contextual keyword with text `t`. | ||
63 | pub(crate) fn at_contextual_kw(&self, t: &str) -> bool { | ||
64 | self.0.at_kw(t) | ||
65 | } | ||
66 | |||
67 | /// Starts a new node in the syntax tree. All nodes and tokens | ||
68 | /// consumed between the `start` and the corresponding `Marker::complete` | ||
69 | /// belong to the same node. | ||
70 | pub(crate) fn start(&mut self) -> Marker { | ||
71 | Marker::new(self.0.start()) | ||
72 | } | ||
73 | |||
74 | /// Advances the parser by one token unconditionally. | ||
75 | pub(crate) fn bump(&mut self) { | ||
76 | self.0.bump(); | ||
77 | } | ||
78 | |||
79 | /// Advances the parser by one token, remapping its kind. | ||
80 | /// This is useful to create contextual keywords from | ||
81 | /// identifiers. For example, the lexer creates an `union` | ||
82 | /// *identifier* token, but the parser remaps it to the | ||
83 | /// `union` keyword, and keyword is what ends up in the | ||
84 | /// final tree. | ||
85 | pub(crate) fn bump_remap(&mut self, kind: SyntaxKind) { | ||
86 | self.0.bump_remap(kind); | ||
87 | } | ||
88 | |||
89 | /// Advances the parser by `n` tokens, remapping its kind. | ||
90 | /// This is useful to create compound tokens from parts. For | ||
91 | /// example, an `<<` token is two consecutive remapped `<` tokens | ||
92 | pub(crate) fn bump_compound(&mut self, kind: SyntaxKind, n: u8) { | ||
93 | self.0.bump_compound(kind, n); | ||
94 | } | ||
95 | |||
96 | /// Emit error with the `message` | ||
97 | /// TODO: this should be much more fancy and support | ||
98 | /// structured errors with spans and notes, like rustc | ||
99 | /// does. | ||
100 | pub(crate) fn error<T: Into<String>>(&mut self, message: T) { | ||
101 | self.0.error(message.into()) | ||
102 | } | ||
103 | |||
104 | /// Consume the next token if `kind` matches. | ||
105 | pub(crate) fn eat(&mut self, kind: SyntaxKind) -> bool { | ||
106 | if !self.at(kind) { | ||
107 | return false; | ||
108 | } | ||
109 | self.bump(); | ||
110 | true | ||
111 | } | ||
112 | |||
113 | /// Consume the next token if it is `kind` or emit an error | ||
114 | /// otherwise. | ||
115 | pub(crate) fn expect(&mut self, kind: SyntaxKind) -> bool { | ||
116 | if self.eat(kind) { | ||
117 | return true; | ||
118 | } | ||
119 | self.error(format!("expected {:?}", kind)); | ||
120 | false | ||
121 | } | ||
122 | |||
123 | /// Create an error node and consume the next token. | ||
124 | pub(crate) fn err_and_bump(&mut self, message: &str) { | ||
125 | self.err_recover(message, TokenSet::empty()); | ||
126 | } | ||
127 | |||
128 | /// Create an error node and consume the next token. | ||
129 | pub(crate) fn err_recover(&mut self, message: &str, recovery: TokenSet) { | ||
130 | if self.at(SyntaxKind::L_CURLY) || self.at(SyntaxKind::R_CURLY) || self.at_ts(recovery) { | ||
131 | self.error(message); | ||
132 | } else { | ||
133 | let m = self.start(); | ||
134 | self.error(message); | ||
135 | self.bump(); | ||
136 | m.complete(self, ERROR); | ||
137 | }; | ||
138 | } | ||
139 | } | ||
140 | |||
141 | /// See `Parser::start`. | ||
142 | pub(crate) struct Marker { | ||
143 | pos: u32, | ||
144 | bomb: DropBomb, | ||
145 | } | ||
146 | |||
147 | impl Marker { | ||
148 | fn new(pos: u32) -> Marker { | ||
149 | Marker { pos, bomb: DropBomb::new("Marker must be either completed or abandoned") } | ||
150 | } | ||
151 | |||
152 | /// Finishes the syntax tree node and assigns `kind` to it, | ||
153 | /// and mark the create a `CompletedMarker` for possible future | ||
154 | /// operation like `.precede()` to deal with forward_parent. | ||
155 | pub(crate) fn complete(mut self, p: &mut Parser, kind: SyntaxKind) -> CompletedMarker { | ||
156 | self.bomb.defuse(); | ||
157 | p.0.complete(self.pos, kind); | ||
158 | CompletedMarker::new(self.pos, kind) | ||
159 | } | ||
160 | |||
161 | /// Abandons the syntax tree node. All its children | ||
162 | /// are attached to its parent instead. | ||
163 | pub(crate) fn abandon(mut self, p: &mut Parser) { | ||
164 | self.bomb.defuse(); | ||
165 | p.0.abandon(self.pos); | ||
166 | } | ||
167 | } | ||
168 | |||
169 | pub(crate) struct CompletedMarker(u32, SyntaxKind); | ||
170 | |||
171 | impl CompletedMarker { | ||
172 | fn new(pos: u32, kind: SyntaxKind) -> Self { | ||
173 | CompletedMarker(pos, kind) | ||
174 | } | ||
175 | |||
176 | /// This method allows to create a new node which starts | ||
177 | /// *before* the current one. That is, parser could start | ||
178 | /// node `A`, then complete it, and then after parsing the | ||
179 | /// whole `A`, decide that it should have started some node | ||
180 | /// `B` before starting `A`. `precede` allows to do exactly | ||
181 | /// that. See also docs about `forward_parent` in `Event::Start`. | ||
182 | /// | ||
183 | /// Given completed events `[START, FINISH]` and its corresponding | ||
184 | /// `CompletedMarker(pos: 0, _)`. | ||
185 | /// Append a new `START` events as `[START, FINISH, NEWSTART]`, | ||
186 | /// then mark `NEWSTART` as `START`'s parent with saving its relative | ||
187 | /// distance to `NEWSTART` into forward_parent(=2 in this case); | ||
188 | pub(crate) fn precede(self, p: &mut Parser) -> Marker { | ||
189 | Marker::new(p.0.precede(self.0)) | ||
190 | } | ||
191 | |||
192 | pub(crate) fn kind(&self) -> SyntaxKind { | ||
193 | self.1 | ||
194 | } | ||
195 | } | ||
diff --git a/crates/ra_syntax/src/parsing/parser_impl.rs b/crates/ra_syntax/src/parsing/parser_impl.rs new file mode 100644 index 000000000..8cce1ab01 --- /dev/null +++ b/crates/ra_syntax/src/parsing/parser_impl.rs | |||
@@ -0,0 +1,200 @@ | |||
1 | mod event; | ||
2 | mod input; | ||
3 | |||
4 | use std::cell::Cell; | ||
5 | |||
6 | use crate::{ | ||
7 | SmolStr, | ||
8 | syntax_error::{ParseError, SyntaxError}, | ||
9 | parsing::{ | ||
10 | lexer::Token, | ||
11 | parser_api::Parser, | ||
12 | parser_impl::{ | ||
13 | event::{Event, EventProcessor}, | ||
14 | input::{InputPosition, ParserInput}, | ||
15 | }, | ||
16 | }, | ||
17 | }; | ||
18 | |||
19 | use crate::SyntaxKind::{self, EOF, TOMBSTONE}; | ||
20 | |||
21 | pub(super) trait Sink { | ||
22 | type Tree; | ||
23 | |||
24 | /// Adds new leaf to the current branch. | ||
25 | fn leaf(&mut self, kind: SyntaxKind, text: SmolStr); | ||
26 | |||
27 | /// Start new branch and make it current. | ||
28 | fn start_branch(&mut self, kind: SyntaxKind); | ||
29 | |||
30 | /// Finish current branch and restore previous | ||
31 | /// branch as current. | ||
32 | fn finish_branch(&mut self); | ||
33 | |||
34 | fn error(&mut self, error: SyntaxError); | ||
35 | |||
36 | /// Complete tree building. Make sure that | ||
37 | /// `start_branch` and `finish_branch` calls | ||
38 | /// are paired! | ||
39 | fn finish(self) -> Self::Tree; | ||
40 | } | ||
41 | |||
42 | /// Parse a sequence of tokens into the representative node tree | ||
43 | pub(super) fn parse_with<S: Sink>( | ||
44 | sink: S, | ||
45 | text: &str, | ||
46 | tokens: &[Token], | ||
47 | parser: fn(&mut Parser), | ||
48 | ) -> S::Tree { | ||
49 | let mut events = { | ||
50 | let input = input::ParserInput::new(text, tokens); | ||
51 | let parser_impl = ParserImpl::new(&input); | ||
52 | let mut parser_api = Parser(parser_impl); | ||
53 | parser(&mut parser_api); | ||
54 | parser_api.0.into_events() | ||
55 | }; | ||
56 | EventProcessor::new(sink, text, tokens, &mut events).process().finish() | ||
57 | } | ||
58 | |||
59 | /// Implementation details of `Parser`, extracted | ||
60 | /// to a separate struct in order not to pollute | ||
61 | /// the public API of the `Parser`. | ||
62 | pub(super) struct ParserImpl<'t> { | ||
63 | parser_input: &'t ParserInput<'t>, | ||
64 | pos: InputPosition, | ||
65 | events: Vec<Event>, | ||
66 | steps: Cell<u32>, | ||
67 | } | ||
68 | |||
69 | impl<'t> ParserImpl<'t> { | ||
70 | fn new(inp: &'t ParserInput<'t>) -> ParserImpl<'t> { | ||
71 | ParserImpl { | ||
72 | parser_input: inp, | ||
73 | pos: InputPosition::new(), | ||
74 | events: Vec::new(), | ||
75 | steps: Cell::new(0), | ||
76 | } | ||
77 | } | ||
78 | |||
79 | fn into_events(self) -> Vec<Event> { | ||
80 | assert_eq!(self.nth(0), EOF); | ||
81 | self.events | ||
82 | } | ||
83 | |||
84 | pub(super) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> { | ||
85 | let c1 = self.parser_input.kind(self.pos); | ||
86 | let c2 = self.parser_input.kind(self.pos + 1); | ||
87 | if self.parser_input.token_start_at(self.pos + 1) | ||
88 | == self.parser_input.token_start_at(self.pos) + self.parser_input.token_len(self.pos) | ||
89 | { | ||
90 | Some((c1, c2)) | ||
91 | } else { | ||
92 | None | ||
93 | } | ||
94 | } | ||
95 | |||
96 | pub(super) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> { | ||
97 | let c1 = self.parser_input.kind(self.pos); | ||
98 | let c2 = self.parser_input.kind(self.pos + 1); | ||
99 | let c3 = self.parser_input.kind(self.pos + 2); | ||
100 | if self.parser_input.token_start_at(self.pos + 1) | ||
101 | == self.parser_input.token_start_at(self.pos) + self.parser_input.token_len(self.pos) | ||
102 | && self.parser_input.token_start_at(self.pos + 2) | ||
103 | == self.parser_input.token_start_at(self.pos + 1) | ||
104 | + self.parser_input.token_len(self.pos + 1) | ||
105 | { | ||
106 | Some((c1, c2, c3)) | ||
107 | } else { | ||
108 | None | ||
109 | } | ||
110 | } | ||
111 | |||
112 | /// Get the syntax kind of the nth token. | ||
113 | pub(super) fn nth(&self, n: u32) -> SyntaxKind { | ||
114 | let steps = self.steps.get(); | ||
115 | assert!(steps <= 10_000_000, "the parser seems stuck"); | ||
116 | self.steps.set(steps + 1); | ||
117 | |||
118 | self.parser_input.kind(self.pos + n) | ||
119 | } | ||
120 | |||
121 | pub(super) fn at_kw(&self, t: &str) -> bool { | ||
122 | self.parser_input.token_text(self.pos) == t | ||
123 | } | ||
124 | |||
125 | /// Start parsing right behind the last event. | ||
126 | pub(super) fn start(&mut self) -> u32 { | ||
127 | let pos = self.events.len() as u32; | ||
128 | self.push_event(Event::tombstone()); | ||
129 | pos | ||
130 | } | ||
131 | |||
132 | /// Advances the parser by one token unconditionally. | ||
133 | pub(super) fn bump(&mut self) { | ||
134 | let kind = self.nth(0); | ||
135 | if kind == EOF { | ||
136 | return; | ||
137 | } | ||
138 | self.do_bump(kind, 1); | ||
139 | } | ||
140 | |||
141 | pub(super) fn bump_remap(&mut self, kind: SyntaxKind) { | ||
142 | if self.nth(0) == EOF { | ||
143 | // TODO: panic!? | ||
144 | return; | ||
145 | } | ||
146 | self.do_bump(kind, 1); | ||
147 | } | ||
148 | |||
149 | pub(super) fn bump_compound(&mut self, kind: SyntaxKind, n: u8) { | ||
150 | self.do_bump(kind, n); | ||
151 | } | ||
152 | |||
153 | fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) { | ||
154 | self.pos += u32::from(n_raw_tokens); | ||
155 | self.push_event(Event::Token { kind, n_raw_tokens }); | ||
156 | } | ||
157 | |||
158 | /// Append one Error event to the back of events. | ||
159 | pub(super) fn error(&mut self, msg: String) { | ||
160 | self.push_event(Event::Error { msg: ParseError(msg) }) | ||
161 | } | ||
162 | |||
163 | /// Complete an event with appending a `Finish` event. | ||
164 | pub(super) fn complete(&mut self, pos: u32, kind: SyntaxKind) { | ||
165 | match self.events[pos as usize] { | ||
166 | Event::Start { kind: ref mut slot, .. } => { | ||
167 | *slot = kind; | ||
168 | } | ||
169 | _ => unreachable!(), | ||
170 | } | ||
171 | self.push_event(Event::Finish); | ||
172 | } | ||
173 | |||
174 | /// Ignore the dummy `Start` event. | ||
175 | pub(super) fn abandon(&mut self, pos: u32) { | ||
176 | let idx = pos as usize; | ||
177 | if idx == self.events.len() - 1 { | ||
178 | match self.events.pop() { | ||
179 | Some(Event::Start { kind: TOMBSTONE, forward_parent: None }) => (), | ||
180 | _ => unreachable!(), | ||
181 | } | ||
182 | } | ||
183 | } | ||
184 | |||
185 | /// Save the relative distance of a completed event to its forward_parent. | ||
186 | pub(super) fn precede(&mut self, pos: u32) -> u32 { | ||
187 | let new_pos = self.start(); | ||
188 | match self.events[pos as usize] { | ||
189 | Event::Start { ref mut forward_parent, .. } => { | ||
190 | *forward_parent = Some(new_pos - pos); | ||
191 | } | ||
192 | _ => unreachable!(), | ||
193 | } | ||
194 | new_pos | ||
195 | } | ||
196 | |||
197 | fn push_event(&mut self, event: Event) { | ||
198 | self.events.push(event) | ||
199 | } | ||
200 | } | ||
diff --git a/crates/ra_syntax/src/parsing/parser_impl/event.rs b/crates/ra_syntax/src/parsing/parser_impl/event.rs new file mode 100644 index 000000000..2ddbdd34d --- /dev/null +++ b/crates/ra_syntax/src/parsing/parser_impl/event.rs | |||
@@ -0,0 +1,254 @@ | |||
1 | //! This module provides a way to construct a `File`. | ||
2 | //! It is intended to be completely decoupled from the | ||
3 | //! parser, so as to allow to evolve the tree representation | ||
4 | //! and the parser algorithm independently. | ||
5 | //! | ||
6 | //! The `Sink` trait is the bridge between the parser and the | ||
7 | //! tree builder: the parser produces a stream of events like | ||
8 | //! `start node`, `finish node`, and `FileBuilder` converts | ||
9 | //! this stream to a real tree. | ||
10 | use std::mem; | ||
11 | |||
12 | use crate::{ | ||
13 | SmolStr, | ||
14 | SyntaxKind::{self, *}, | ||
15 | TextRange, TextUnit, | ||
16 | syntax_error::{ | ||
17 | ParseError, | ||
18 | SyntaxError, | ||
19 | SyntaxErrorKind, | ||
20 | }, | ||
21 | parsing::{ | ||
22 | lexer::Token, | ||
23 | parser_impl::Sink, | ||
24 | }, | ||
25 | }; | ||
26 | |||
27 | /// `Parser` produces a flat list of `Event`s. | ||
28 | /// They are converted to a tree-structure in | ||
29 | /// a separate pass, via `TreeBuilder`. | ||
30 | #[derive(Debug)] | ||
31 | pub(crate) enum Event { | ||
32 | /// This event signifies the start of the node. | ||
33 | /// It should be either abandoned (in which case the | ||
34 | /// `kind` is `TOMBSTONE`, and the event is ignored), | ||
35 | /// or completed via a `Finish` event. | ||
36 | /// | ||
37 | /// All tokens between a `Start` and a `Finish` would | ||
38 | /// become the children of the respective node. | ||
39 | /// | ||
40 | /// For left-recursive syntactic constructs, the parser produces | ||
41 | /// a child node before it sees a parent. `forward_parent` | ||
42 | /// saves the position of current event's parent. | ||
43 | /// | ||
44 | /// Consider this path | ||
45 | /// | ||
46 | /// foo::bar | ||
47 | /// | ||
48 | /// The events for it would look like this: | ||
49 | /// | ||
50 | /// | ||
51 | /// START(PATH) IDENT('foo') FINISH START(PATH) COLONCOLON IDENT('bar') FINISH | ||
52 | /// | /\ | ||
53 | /// | | | ||
54 | /// +------forward-parent------+ | ||
55 | /// | ||
56 | /// And the tree would look like this | ||
57 | /// | ||
58 | /// +--PATH---------+ | ||
59 | /// | | | | ||
60 | /// | | | | ||
61 | /// | '::' 'bar' | ||
62 | /// | | ||
63 | /// PATH | ||
64 | /// | | ||
65 | /// 'foo' | ||
66 | /// | ||
67 | /// See also `CompletedMarker::precede`. | ||
68 | Start { | ||
69 | kind: SyntaxKind, | ||
70 | forward_parent: Option<u32>, | ||
71 | }, | ||
72 | |||
73 | /// Complete the previous `Start` event | ||
74 | Finish, | ||
75 | |||
76 | /// Produce a single leaf-element. | ||
77 | /// `n_raw_tokens` is used to glue complex contextual tokens. | ||
78 | /// For example, lexer tokenizes `>>` as `>`, `>`, and | ||
79 | /// `n_raw_tokens = 2` is used to produced a single `>>`. | ||
80 | Token { | ||
81 | kind: SyntaxKind, | ||
82 | n_raw_tokens: u8, | ||
83 | }, | ||
84 | |||
85 | Error { | ||
86 | msg: ParseError, | ||
87 | }, | ||
88 | } | ||
89 | |||
90 | impl Event { | ||
91 | pub(crate) fn tombstone() -> Self { | ||
92 | Event::Start { kind: TOMBSTONE, forward_parent: None } | ||
93 | } | ||
94 | } | ||
95 | |||
96 | pub(super) struct EventProcessor<'a, S: Sink> { | ||
97 | sink: S, | ||
98 | text_pos: TextUnit, | ||
99 | text: &'a str, | ||
100 | token_pos: usize, | ||
101 | tokens: &'a [Token], | ||
102 | events: &'a mut [Event], | ||
103 | } | ||
104 | |||
105 | impl<'a, S: Sink> EventProcessor<'a, S> { | ||
106 | pub(super) fn new( | ||
107 | sink: S, | ||
108 | text: &'a str, | ||
109 | tokens: &'a [Token], | ||
110 | events: &'a mut [Event], | ||
111 | ) -> EventProcessor<'a, S> { | ||
112 | EventProcessor { sink, text_pos: 0.into(), text, token_pos: 0, tokens, events } | ||
113 | } | ||
114 | |||
115 | /// Generate the syntax tree with the control of events. | ||
116 | pub(super) fn process(mut self) -> S { | ||
117 | let mut forward_parents = Vec::new(); | ||
118 | |||
119 | for i in 0..self.events.len() { | ||
120 | match mem::replace(&mut self.events[i], Event::tombstone()) { | ||
121 | Event::Start { kind: TOMBSTONE, .. } => (), | ||
122 | |||
123 | Event::Start { kind, forward_parent } => { | ||
124 | // For events[A, B, C], B is A's forward_parent, C is B's forward_parent, | ||
125 | // in the normal control flow, the parent-child relation: `A -> B -> C`, | ||
126 | // while with the magic forward_parent, it writes: `C <- B <- A`. | ||
127 | |||
128 | // append `A` into parents. | ||
129 | forward_parents.push(kind); | ||
130 | let mut idx = i; | ||
131 | let mut fp = forward_parent; | ||
132 | while let Some(fwd) = fp { | ||
133 | idx += fwd as usize; | ||
134 | // append `A`'s forward_parent `B` | ||
135 | fp = match mem::replace(&mut self.events[idx], Event::tombstone()) { | ||
136 | Event::Start { kind, forward_parent } => { | ||
137 | forward_parents.push(kind); | ||
138 | forward_parent | ||
139 | } | ||
140 | _ => unreachable!(), | ||
141 | }; | ||
142 | // append `B`'s forward_parent `C` in the next stage. | ||
143 | } | ||
144 | |||
145 | for kind in forward_parents.drain(..).rev() { | ||
146 | self.start(kind); | ||
147 | } | ||
148 | } | ||
149 | Event::Finish => { | ||
150 | let is_last = i == self.events.len() - 1; | ||
151 | self.finish(is_last); | ||
152 | } | ||
153 | Event::Token { kind, n_raw_tokens } => { | ||
154 | self.eat_trivias(); | ||
155 | let n_raw_tokens = n_raw_tokens as usize; | ||
156 | let len = self.tokens[self.token_pos..self.token_pos + n_raw_tokens] | ||
157 | .iter() | ||
158 | .map(|it| it.len) | ||
159 | .sum::<TextUnit>(); | ||
160 | self.leaf(kind, len, n_raw_tokens); | ||
161 | } | ||
162 | Event::Error { msg } => self | ||
163 | .sink | ||
164 | .error(SyntaxError::new(SyntaxErrorKind::ParseError(msg), self.text_pos)), | ||
165 | } | ||
166 | } | ||
167 | self.sink | ||
168 | } | ||
169 | |||
170 | /// Add the node into syntax tree but discard the comments/whitespaces. | ||
171 | fn start(&mut self, kind: SyntaxKind) { | ||
172 | if kind == SOURCE_FILE { | ||
173 | self.sink.start_branch(kind); | ||
174 | return; | ||
175 | } | ||
176 | let n_trivias = | ||
177 | self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count(); | ||
178 | let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias]; | ||
179 | let mut trivia_end = | ||
180 | self.text_pos + leading_trivias.iter().map(|it| it.len).sum::<TextUnit>(); | ||
181 | |||
182 | let n_attached_trivias = { | ||
183 | let leading_trivias = leading_trivias.iter().rev().map(|it| { | ||
184 | let next_end = trivia_end - it.len; | ||
185 | let range = TextRange::from_to(next_end, trivia_end); | ||
186 | trivia_end = next_end; | ||
187 | (it.kind, &self.text[range]) | ||
188 | }); | ||
189 | n_attached_trivias(kind, leading_trivias) | ||
190 | }; | ||
191 | self.eat_n_trivias(n_trivias - n_attached_trivias); | ||
192 | self.sink.start_branch(kind); | ||
193 | self.eat_n_trivias(n_attached_trivias); | ||
194 | } | ||
195 | |||
196 | fn finish(&mut self, is_last: bool) { | ||
197 | if is_last { | ||
198 | self.eat_trivias() | ||
199 | } | ||
200 | self.sink.finish_branch(); | ||
201 | } | ||
202 | |||
203 | fn eat_trivias(&mut self) { | ||
204 | while let Some(&token) = self.tokens.get(self.token_pos) { | ||
205 | if !token.kind.is_trivia() { | ||
206 | break; | ||
207 | } | ||
208 | self.leaf(token.kind, token.len, 1); | ||
209 | } | ||
210 | } | ||
211 | |||
212 | fn eat_n_trivias(&mut self, n: usize) { | ||
213 | for _ in 0..n { | ||
214 | let token = self.tokens[self.token_pos]; | ||
215 | assert!(token.kind.is_trivia()); | ||
216 | self.leaf(token.kind, token.len, 1); | ||
217 | } | ||
218 | } | ||
219 | |||
220 | fn leaf(&mut self, kind: SyntaxKind, len: TextUnit, n_tokens: usize) { | ||
221 | let range = TextRange::offset_len(self.text_pos, len); | ||
222 | let text: SmolStr = self.text[range].into(); | ||
223 | self.text_pos += len; | ||
224 | self.token_pos += n_tokens; | ||
225 | self.sink.leaf(kind, text); | ||
226 | } | ||
227 | } | ||
228 | |||
229 | fn n_attached_trivias<'a>( | ||
230 | kind: SyntaxKind, | ||
231 | trivias: impl Iterator<Item = (SyntaxKind, &'a str)>, | ||
232 | ) -> usize { | ||
233 | match kind { | ||
234 | CONST_DEF | TYPE_DEF | STRUCT_DEF | ENUM_DEF | ENUM_VARIANT | FN_DEF | TRAIT_DEF | ||
235 | | MODULE | NAMED_FIELD_DEF => { | ||
236 | let mut res = 0; | ||
237 | for (i, (kind, text)) in trivias.enumerate() { | ||
238 | match kind { | ||
239 | WHITESPACE => { | ||
240 | if text.contains("\n\n") { | ||
241 | break; | ||
242 | } | ||
243 | } | ||
244 | COMMENT => { | ||
245 | res = i + 1; | ||
246 | } | ||
247 | _ => (), | ||
248 | } | ||
249 | } | ||
250 | res | ||
251 | } | ||
252 | _ => 0, | ||
253 | } | ||
254 | } | ||
diff --git a/crates/ra_syntax/src/parsing/parser_impl/input.rs b/crates/ra_syntax/src/parsing/parser_impl/input.rs new file mode 100644 index 000000000..275d94918 --- /dev/null +++ b/crates/ra_syntax/src/parsing/parser_impl/input.rs | |||
@@ -0,0 +1,104 @@ | |||
1 | use crate::{ | ||
2 | SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit, | ||
3 | parsing::lexer::Token, | ||
4 | }; | ||
5 | |||
6 | use std::ops::{Add, AddAssign}; | ||
7 | |||
8 | pub(crate) struct ParserInput<'t> { | ||
9 | text: &'t str, | ||
10 | /// start position of each token(expect whitespace and comment) | ||
11 | /// ```non-rust | ||
12 | /// struct Foo; | ||
13 | /// ^------^--- | ||
14 | /// | | ^- | ||
15 | /// 0 7 10 | ||
16 | /// ``` | ||
17 | /// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]` | ||
18 | start_offsets: Vec<TextUnit>, | ||
19 | /// non-whitespace/comment tokens | ||
20 | /// ```non-rust | ||
21 | /// struct Foo {} | ||
22 | /// ^^^^^^ ^^^ ^^ | ||
23 | /// ``` | ||
24 | /// tokens: `[struct, Foo, {, }]` | ||
25 | tokens: Vec<Token>, | ||
26 | } | ||
27 | |||
28 | impl<'t> ParserInput<'t> { | ||
29 | /// Generate input from tokens(expect comment and whitespace). | ||
30 | pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> ParserInput<'t> { | ||
31 | let mut tokens = Vec::new(); | ||
32 | let mut start_offsets = Vec::new(); | ||
33 | let mut len = 0.into(); | ||
34 | for &token in raw_tokens.iter() { | ||
35 | if !token.kind.is_trivia() { | ||
36 | tokens.push(token); | ||
37 | start_offsets.push(len); | ||
38 | } | ||
39 | len += token.len; | ||
40 | } | ||
41 | |||
42 | ParserInput { text, start_offsets, tokens } | ||
43 | } | ||
44 | |||
45 | /// Get the syntax kind of token at given input position. | ||
46 | pub fn kind(&self, pos: InputPosition) -> SyntaxKind { | ||
47 | let idx = pos.0 as usize; | ||
48 | if !(idx < self.tokens.len()) { | ||
49 | return EOF; | ||
50 | } | ||
51 | self.tokens[idx].kind | ||
52 | } | ||
53 | |||
54 | /// Get the length of a token at given input position. | ||
55 | pub fn token_len(&self, pos: InputPosition) -> TextUnit { | ||
56 | let idx = pos.0 as usize; | ||
57 | if !(idx < self.tokens.len()) { | ||
58 | return 0.into(); | ||
59 | } | ||
60 | self.tokens[idx].len | ||
61 | } | ||
62 | |||
63 | /// Get the start position of a taken at given input position. | ||
64 | pub fn token_start_at(&self, pos: InputPosition) -> TextUnit { | ||
65 | let idx = pos.0 as usize; | ||
66 | if !(idx < self.tokens.len()) { | ||
67 | return 0.into(); | ||
68 | } | ||
69 | self.start_offsets[idx] | ||
70 | } | ||
71 | |||
72 | /// Get the raw text of a token at given input position. | ||
73 | pub fn token_text(&self, pos: InputPosition) -> &'t str { | ||
74 | let idx = pos.0 as usize; | ||
75 | if !(idx < self.tokens.len()) { | ||
76 | return ""; | ||
77 | } | ||
78 | let range = TextRange::offset_len(self.start_offsets[idx], self.tokens[idx].len); | ||
79 | &self.text[range] | ||
80 | } | ||
81 | } | ||
82 | |||
83 | #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)] | ||
84 | pub(crate) struct InputPosition(u32); | ||
85 | |||
86 | impl InputPosition { | ||
87 | pub fn new() -> Self { | ||
88 | InputPosition(0) | ||
89 | } | ||
90 | } | ||
91 | |||
92 | impl Add<u32> for InputPosition { | ||
93 | type Output = InputPosition; | ||
94 | |||
95 | fn add(self, rhs: u32) -> InputPosition { | ||
96 | InputPosition(self.0 + rhs) | ||
97 | } | ||
98 | } | ||
99 | |||
100 | impl AddAssign<u32> for InputPosition { | ||
101 | fn add_assign(&mut self, rhs: u32) { | ||
102 | self.0 += rhs | ||
103 | } | ||
104 | } | ||
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs new file mode 100644 index 000000000..edf3fa291 --- /dev/null +++ b/crates/ra_syntax/src/parsing/reparsing.rs | |||
@@ -0,0 +1,349 @@ | |||
1 | use crate::{ | ||
2 | SyntaxKind::*, TextRange, TextUnit, | ||
3 | algo, | ||
4 | syntax_node::{GreenNode, SyntaxNode}, | ||
5 | syntax_error::SyntaxError, | ||
6 | parsing::{ | ||
7 | grammar, | ||
8 | parser_impl, | ||
9 | builder::GreenBuilder, | ||
10 | parser_api::Parser, | ||
11 | lexer::{tokenize, Token}, | ||
12 | } | ||
13 | }; | ||
14 | |||
15 | use ra_text_edit::AtomTextEdit; | ||
16 | |||
17 | pub(crate) fn incremental_reparse( | ||
18 | node: &SyntaxNode, | ||
19 | edit: &AtomTextEdit, | ||
20 | errors: Vec<SyntaxError>, | ||
21 | ) -> Option<(GreenNode, Vec<SyntaxError>)> { | ||
22 | let (node, green, new_errors) = | ||
23 | reparse_leaf(node, &edit).or_else(|| reparse_block(node, &edit))?; | ||
24 | let green_root = node.replace_with(green); | ||
25 | let errors = merge_errors(errors, new_errors, node, edit); | ||
26 | Some((green_root, errors)) | ||
27 | } | ||
28 | |||
29 | fn reparse_leaf<'node>( | ||
30 | node: &'node SyntaxNode, | ||
31 | edit: &AtomTextEdit, | ||
32 | ) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> { | ||
33 | let node = algo::find_covering_node(node, edit.delete); | ||
34 | match node.kind() { | ||
35 | WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { | ||
36 | let text = get_text_after_edit(node, &edit); | ||
37 | let tokens = tokenize(&text); | ||
38 | let token = match tokens[..] { | ||
39 | [token] if token.kind == node.kind() => token, | ||
40 | _ => return None, | ||
41 | }; | ||
42 | |||
43 | if token.kind == IDENT && is_contextual_kw(&text) { | ||
44 | return None; | ||
45 | } | ||
46 | |||
47 | let green = GreenNode::new_leaf(node.kind(), text.into()); | ||
48 | let new_errors = vec![]; | ||
49 | Some((node, green, new_errors)) | ||
50 | } | ||
51 | _ => None, | ||
52 | } | ||
53 | } | ||
54 | |||
55 | fn reparse_block<'node>( | ||
56 | node: &'node SyntaxNode, | ||
57 | edit: &AtomTextEdit, | ||
58 | ) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> { | ||
59 | let (node, reparser) = find_reparsable_node(node, edit.delete)?; | ||
60 | let text = get_text_after_edit(node, &edit); | ||
61 | let tokens = tokenize(&text); | ||
62 | if !is_balanced(&tokens) { | ||
63 | return None; | ||
64 | } | ||
65 | let (green, new_errors) = | ||
66 | parser_impl::parse_with(GreenBuilder::new(), &text, &tokens, reparser); | ||
67 | Some((node, green, new_errors)) | ||
68 | } | ||
69 | |||
70 | fn get_text_after_edit(node: &SyntaxNode, edit: &AtomTextEdit) -> String { | ||
71 | let edit = AtomTextEdit::replace(edit.delete - node.range().start(), edit.insert.clone()); | ||
72 | edit.apply(node.text().to_string()) | ||
73 | } | ||
74 | |||
75 | fn is_contextual_kw(text: &str) -> bool { | ||
76 | match text { | ||
77 | "auto" | "default" | "union" => true, | ||
78 | _ => false, | ||
79 | } | ||
80 | } | ||
81 | |||
82 | fn find_reparsable_node( | ||
83 | node: &SyntaxNode, | ||
84 | range: TextRange, | ||
85 | ) -> Option<(&SyntaxNode, fn(&mut Parser))> { | ||
86 | let node = algo::find_covering_node(node, range); | ||
87 | node.ancestors().find_map(|node| grammar::reparser(node).map(|r| (node, r))) | ||
88 | } | ||
89 | |||
90 | fn is_balanced(tokens: &[Token]) -> bool { | ||
91 | if tokens.is_empty() | ||
92 | || tokens.first().unwrap().kind != L_CURLY | ||
93 | || tokens.last().unwrap().kind != R_CURLY | ||
94 | { | ||
95 | return false; | ||
96 | } | ||
97 | let mut balance = 0usize; | ||
98 | for t in tokens.iter() { | ||
99 | match t.kind { | ||
100 | L_CURLY => balance += 1, | ||
101 | R_CURLY => { | ||
102 | balance = match balance.checked_sub(1) { | ||
103 | Some(b) => b, | ||
104 | None => return false, | ||
105 | } | ||
106 | } | ||
107 | _ => (), | ||
108 | } | ||
109 | } | ||
110 | balance == 0 | ||
111 | } | ||
112 | |||
113 | fn merge_errors( | ||
114 | old_errors: Vec<SyntaxError>, | ||
115 | new_errors: Vec<SyntaxError>, | ||
116 | old_node: &SyntaxNode, | ||
117 | edit: &AtomTextEdit, | ||
118 | ) -> Vec<SyntaxError> { | ||
119 | let mut res = Vec::new(); | ||
120 | for e in old_errors { | ||
121 | if e.offset() <= old_node.range().start() { | ||
122 | res.push(e) | ||
123 | } else if e.offset() >= old_node.range().end() { | ||
124 | res.push(e.add_offset(TextUnit::of_str(&edit.insert) - edit.delete.len())); | ||
125 | } | ||
126 | } | ||
127 | for e in new_errors { | ||
128 | res.push(e.add_offset(old_node.range().start())); | ||
129 | } | ||
130 | res | ||
131 | } | ||
132 | |||
133 | #[cfg(test)] | ||
134 | mod tests { | ||
135 | use test_utils::{extract_range, assert_eq_text}; | ||
136 | |||
137 | use crate::{SourceFile, AstNode, utils::dump_tree}; | ||
138 | use super::*; | ||
139 | |||
140 | fn do_check<F>(before: &str, replace_with: &str, reparser: F) | ||
141 | where | ||
142 | for<'a> F: Fn( | ||
143 | &'a SyntaxNode, | ||
144 | &AtomTextEdit, | ||
145 | ) -> Option<(&'a SyntaxNode, GreenNode, Vec<SyntaxError>)>, | ||
146 | { | ||
147 | let (range, before) = extract_range(before); | ||
148 | let edit = AtomTextEdit::replace(range, replace_with.to_owned()); | ||
149 | let after = edit.apply(before.clone()); | ||
150 | |||
151 | let fully_reparsed = SourceFile::parse(&after); | ||
152 | let incrementally_reparsed = { | ||
153 | let f = SourceFile::parse(&before); | ||
154 | let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() }; | ||
155 | let (node, green, new_errors) = | ||
156 | reparser(f.syntax(), &edit).expect("cannot incrementally reparse"); | ||
157 | let green_root = node.replace_with(green); | ||
158 | let errors = super::merge_errors(f.errors(), new_errors, node, &edit); | ||
159 | SourceFile::new(green_root, errors) | ||
160 | }; | ||
161 | |||
162 | assert_eq_text!( | ||
163 | &dump_tree(fully_reparsed.syntax()), | ||
164 | &dump_tree(incrementally_reparsed.syntax()), | ||
165 | ) | ||
166 | } | ||
167 | |||
168 | #[test] | ||
169 | fn reparse_block_tests() { | ||
170 | let do_check = |before, replace_to| do_check(before, replace_to, reparse_block); | ||
171 | |||
172 | do_check( | ||
173 | r" | ||
174 | fn foo() { | ||
175 | let x = foo + <|>bar<|> | ||
176 | } | ||
177 | ", | ||
178 | "baz", | ||
179 | ); | ||
180 | do_check( | ||
181 | r" | ||
182 | fn foo() { | ||
183 | let x = foo<|> + bar<|> | ||
184 | } | ||
185 | ", | ||
186 | "baz", | ||
187 | ); | ||
188 | do_check( | ||
189 | r" | ||
190 | struct Foo { | ||
191 | f: foo<|><|> | ||
192 | } | ||
193 | ", | ||
194 | ",\n g: (),", | ||
195 | ); | ||
196 | do_check( | ||
197 | r" | ||
198 | fn foo { | ||
199 | let; | ||
200 | 1 + 1; | ||
201 | <|>92<|>; | ||
202 | } | ||
203 | ", | ||
204 | "62", | ||
205 | ); | ||
206 | do_check( | ||
207 | r" | ||
208 | mod foo { | ||
209 | fn <|><|> | ||
210 | } | ||
211 | ", | ||
212 | "bar", | ||
213 | ); | ||
214 | do_check( | ||
215 | r" | ||
216 | trait Foo { | ||
217 | type <|>Foo<|>; | ||
218 | } | ||
219 | ", | ||
220 | "Output", | ||
221 | ); | ||
222 | do_check( | ||
223 | r" | ||
224 | impl IntoIterator<Item=i32> for Foo { | ||
225 | f<|><|> | ||
226 | } | ||
227 | ", | ||
228 | "n next(", | ||
229 | ); | ||
230 | do_check( | ||
231 | r" | ||
232 | use a::b::{foo,<|>,bar<|>}; | ||
233 | ", | ||
234 | "baz", | ||
235 | ); | ||
236 | do_check( | ||
237 | r" | ||
238 | pub enum A { | ||
239 | Foo<|><|> | ||
240 | } | ||
241 | ", | ||
242 | "\nBar;\n", | ||
243 | ); | ||
244 | do_check( | ||
245 | r" | ||
246 | foo!{a, b<|><|> d} | ||
247 | ", | ||
248 | ", c[3]", | ||
249 | ); | ||
250 | do_check( | ||
251 | r" | ||
252 | fn foo() { | ||
253 | vec![<|><|>] | ||
254 | } | ||
255 | ", | ||
256 | "123", | ||
257 | ); | ||
258 | do_check( | ||
259 | r" | ||
260 | extern { | ||
261 | fn<|>;<|> | ||
262 | } | ||
263 | ", | ||
264 | " exit(code: c_int)", | ||
265 | ); | ||
266 | } | ||
267 | |||
268 | #[test] | ||
269 | fn reparse_leaf_tests() { | ||
270 | let do_check = |before, replace_to| do_check(before, replace_to, reparse_leaf); | ||
271 | |||
272 | do_check( | ||
273 | r"<|><|> | ||
274 | fn foo() -> i32 { 1 } | ||
275 | ", | ||
276 | "\n\n\n \n", | ||
277 | ); | ||
278 | do_check( | ||
279 | r" | ||
280 | fn foo() -> <|><|> {} | ||
281 | ", | ||
282 | " \n", | ||
283 | ); | ||
284 | do_check( | ||
285 | r" | ||
286 | fn <|>foo<|>() -> i32 { 1 } | ||
287 | ", | ||
288 | "bar", | ||
289 | ); | ||
290 | do_check( | ||
291 | r" | ||
292 | fn foo<|><|>foo() { } | ||
293 | ", | ||
294 | "bar", | ||
295 | ); | ||
296 | do_check( | ||
297 | r" | ||
298 | fn foo /* <|><|> */ () {} | ||
299 | ", | ||
300 | "some comment", | ||
301 | ); | ||
302 | do_check( | ||
303 | r" | ||
304 | fn baz <|><|> () {} | ||
305 | ", | ||
306 | " \t\t\n\n", | ||
307 | ); | ||
308 | do_check( | ||
309 | r" | ||
310 | fn baz <|><|> () {} | ||
311 | ", | ||
312 | " \t\t\n\n", | ||
313 | ); | ||
314 | do_check( | ||
315 | r" | ||
316 | /// foo <|><|>omment | ||
317 | mod { } | ||
318 | ", | ||
319 | "c", | ||
320 | ); | ||
321 | do_check( | ||
322 | r#" | ||
323 | fn -> &str { "Hello<|><|>" } | ||
324 | "#, | ||
325 | ", world", | ||
326 | ); | ||
327 | do_check( | ||
328 | r#" | ||
329 | fn -> &str { // "Hello<|><|>" | ||
330 | "#, | ||
331 | ", world", | ||
332 | ); | ||
333 | do_check( | ||
334 | r##" | ||
335 | fn -> &str { r#"Hello<|><|>"# | ||
336 | "##, | ||
337 | ", world", | ||
338 | ); | ||
339 | do_check( | ||
340 | r" | ||
341 | #[derive(<|>Copy<|>)] | ||
342 | enum Foo { | ||
343 | |||
344 | } | ||
345 | ", | ||
346 | "Clone", | ||
347 | ); | ||
348 | } | ||
349 | } | ||
diff --git a/crates/ra_syntax/src/parsing/token_set.rs b/crates/ra_syntax/src/parsing/token_set.rs new file mode 100644 index 000000000..5719fe5a2 --- /dev/null +++ b/crates/ra_syntax/src/parsing/token_set.rs | |||
@@ -0,0 +1,41 @@ | |||
1 | use crate::SyntaxKind; | ||
2 | |||
3 | #[derive(Clone, Copy)] | ||
4 | pub(crate) struct TokenSet(u128); | ||
5 | |||
6 | impl TokenSet { | ||
7 | pub(crate) const fn empty() -> TokenSet { | ||
8 | TokenSet(0) | ||
9 | } | ||
10 | |||
11 | pub(crate) const fn singleton(kind: SyntaxKind) -> TokenSet { | ||
12 | TokenSet(mask(kind)) | ||
13 | } | ||
14 | |||
15 | pub(crate) const fn union(self, other: TokenSet) -> TokenSet { | ||
16 | TokenSet(self.0 | other.0) | ||
17 | } | ||
18 | |||
19 | pub(crate) fn contains(&self, kind: SyntaxKind) -> bool { | ||
20 | self.0 & mask(kind) != 0 | ||
21 | } | ||
22 | } | ||
23 | |||
24 | const fn mask(kind: SyntaxKind) -> u128 { | ||
25 | 1u128 << (kind as usize) | ||
26 | } | ||
27 | |||
28 | #[macro_export] | ||
29 | macro_rules! token_set { | ||
30 | ($($t:ident),*) => { TokenSet::empty()$(.union(TokenSet::singleton($t)))* }; | ||
31 | ($($t:ident),* ,) => { token_set!($($t),*) }; | ||
32 | } | ||
33 | |||
34 | #[test] | ||
35 | fn token_set_works_for_tokens() { | ||
36 | use crate::SyntaxKind::*; | ||
37 | let ts = token_set! { EOF, SHEBANG }; | ||
38 | assert!(ts.contains(EOF)); | ||
39 | assert!(ts.contains(SHEBANG)); | ||
40 | assert!(!ts.contains(PLUS)); | ||
41 | } | ||