diff options
author | Edwin Cheng <[email protected]> | 2019-04-13 11:38:31 +0100 |
---|---|---|
committer | Edwin Cheng <[email protected]> | 2019-04-14 04:42:20 +0100 |
commit | 6646d49f238bb92d55fcb4900830f19faa2994a5 (patch) | |
tree | 1a7091b6e104abedcc086e99f560f36010aefe5f /crates/ra_parser | |
parent | f66300ccd1e6ef05b633cda06c87f913d1c91a1e (diff) |
Fix bug and add expr , pat , ty matcher
Diffstat (limited to 'crates/ra_parser')
-rw-r--r-- | crates/ra_parser/src/grammar.rs | 12 | ||||
-rw-r--r-- | crates/ra_parser/src/grammar/expressions.rs | 50 | ||||
-rw-r--r-- | crates/ra_parser/src/grammar/patterns.rs | 2 | ||||
-rw-r--r-- | crates/ra_parser/src/lib.rs | 33 | ||||
-rw-r--r-- | crates/ra_parser/src/parser.rs | 92 |
5 files changed, 159 insertions, 30 deletions
diff --git a/crates/ra_parser/src/grammar.rs b/crates/ra_parser/src/grammar.rs index c5f510e6b..5a7a55141 100644 --- a/crates/ra_parser/src/grammar.rs +++ b/crates/ra_parser/src/grammar.rs | |||
@@ -53,6 +53,18 @@ pub(crate) fn path(p: &mut Parser) { | |||
53 | paths::type_path(p); | 53 | paths::type_path(p); |
54 | } | 54 | } |
55 | 55 | ||
56 | pub(crate) fn expr(p: &mut Parser) { | ||
57 | expressions::expr(p); | ||
58 | } | ||
59 | |||
60 | pub(crate) fn type_(p: &mut Parser) { | ||
61 | types::type_(p) | ||
62 | } | ||
63 | |||
64 | pub(crate) fn pattern(p: &mut Parser) { | ||
65 | patterns::pattern(p) | ||
66 | } | ||
67 | |||
56 | pub(crate) fn reparser( | 68 | pub(crate) fn reparser( |
57 | node: SyntaxKind, | 69 | node: SyntaxKind, |
58 | first_child: Option<SyntaxKind>, | 70 | first_child: Option<SyntaxKind>, |
diff --git a/crates/ra_parser/src/grammar/expressions.rs b/crates/ra_parser/src/grammar/expressions.rs index 9b38b0a31..295577325 100644 --- a/crates/ra_parser/src/grammar/expressions.rs +++ b/crates/ra_parser/src/grammar/expressions.rs | |||
@@ -8,17 +8,20 @@ const EXPR_FIRST: TokenSet = LHS_FIRST; | |||
8 | 8 | ||
9 | pub(super) fn expr(p: &mut Parser) -> BlockLike { | 9 | pub(super) fn expr(p: &mut Parser) -> BlockLike { |
10 | let r = Restrictions { forbid_structs: false, prefer_stmt: false }; | 10 | let r = Restrictions { forbid_structs: false, prefer_stmt: false }; |
11 | expr_bp(p, r, 1).1 | 11 | let mut dollar_lvl = 0; |
12 | expr_bp(p, r, 1, &mut dollar_lvl).1 | ||
12 | } | 13 | } |
13 | 14 | ||
14 | pub(super) fn expr_stmt(p: &mut Parser) -> (Option<CompletedMarker>, BlockLike) { | 15 | pub(super) fn expr_stmt(p: &mut Parser) -> (Option<CompletedMarker>, BlockLike) { |
15 | let r = Restrictions { forbid_structs: false, prefer_stmt: true }; | 16 | let r = Restrictions { forbid_structs: false, prefer_stmt: true }; |
16 | expr_bp(p, r, 1) | 17 | let mut dollar_lvl = 0; |
18 | expr_bp(p, r, 1, &mut dollar_lvl) | ||
17 | } | 19 | } |
18 | 20 | ||
19 | fn expr_no_struct(p: &mut Parser) { | 21 | fn expr_no_struct(p: &mut Parser) { |
20 | let r = Restrictions { forbid_structs: true, prefer_stmt: false }; | 22 | let r = Restrictions { forbid_structs: true, prefer_stmt: false }; |
21 | expr_bp(p, r, 1); | 23 | let mut dollar_lvl = 0; |
24 | expr_bp(p, r, 1, &mut dollar_lvl); | ||
22 | } | 25 | } |
23 | 26 | ||
24 | // test block | 27 | // test block |
@@ -206,8 +209,23 @@ fn current_op(p: &Parser) -> (u8, Op) { | |||
206 | } | 209 | } |
207 | 210 | ||
208 | // Parses expression with binding power of at least bp. | 211 | // Parses expression with binding power of at least bp. |
209 | fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>, BlockLike) { | 212 | fn expr_bp( |
210 | let mut lhs = match lhs(p, r) { | 213 | p: &mut Parser, |
214 | r: Restrictions, | ||
215 | mut bp: u8, | ||
216 | dollar_lvl: &mut usize, | ||
217 | ) -> (Option<CompletedMarker>, BlockLike) { | ||
218 | // `newly_dollar_open` is a flag indicated that dollar is just closed after lhs, e.g. | ||
219 | // `$1$ + a` | ||
220 | // We use this flag to skip handling it. | ||
221 | let mut newly_dollar_open = false; | ||
222 | |||
223 | if p.at_l_dollar() { | ||
224 | *dollar_lvl += p.eat_l_dollars(); | ||
225 | newly_dollar_open = true; | ||
226 | } | ||
227 | |||
228 | let mut lhs = match lhs(p, r, dollar_lvl) { | ||
211 | Some((lhs, blocklike)) => { | 229 | Some((lhs, blocklike)) => { |
212 | // test stmt_bin_expr_ambiguity | 230 | // test stmt_bin_expr_ambiguity |
213 | // fn foo() { | 231 | // fn foo() { |
@@ -223,6 +241,15 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>, | |||
223 | }; | 241 | }; |
224 | 242 | ||
225 | loop { | 243 | loop { |
244 | if *dollar_lvl > 0 && p.at_r_dollar() { | ||
245 | *dollar_lvl -= p.eat_r_dollars(*dollar_lvl); | ||
246 | if !newly_dollar_open { | ||
247 | // We "pump" bp for make it highest priority | ||
248 | bp = 255; | ||
249 | } | ||
250 | newly_dollar_open = false; | ||
251 | } | ||
252 | |||
226 | let is_range = p.current() == DOTDOT || p.current() == DOTDOTEQ; | 253 | let is_range = p.current() == DOTDOT || p.current() == DOTDOTEQ; |
227 | let (op_bp, op) = current_op(p); | 254 | let (op_bp, op) = current_op(p); |
228 | if op_bp < bp { | 255 | if op_bp < bp { |
@@ -235,7 +262,8 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>, | |||
235 | p.bump_compound(kind, n); | 262 | p.bump_compound(kind, n); |
236 | } | 263 | } |
237 | } | 264 | } |
238 | expr_bp(p, r, op_bp + 1); | 265 | |
266 | expr_bp(p, r, op_bp + 1, dollar_lvl); | ||
239 | lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR }); | 267 | lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR }); |
240 | } | 268 | } |
241 | (Some(lhs), BlockLike::NotBlock) | 269 | (Some(lhs), BlockLike::NotBlock) |
@@ -244,7 +272,11 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>, | |||
244 | const LHS_FIRST: TokenSet = | 272 | const LHS_FIRST: TokenSet = |
245 | atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS]); | 273 | atom::ATOM_EXPR_FIRST.union(token_set![AMP, STAR, EXCL, DOTDOT, DOTDOTEQ, MINUS]); |
246 | 274 | ||
247 | fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { | 275 | fn lhs( |
276 | p: &mut Parser, | ||
277 | r: Restrictions, | ||
278 | dollar_lvl: &mut usize, | ||
279 | ) -> Option<(CompletedMarker, BlockLike)> { | ||
248 | let m; | 280 | let m; |
249 | let kind = match p.current() { | 281 | let kind = match p.current() { |
250 | // test ref_expr | 282 | // test ref_expr |
@@ -275,7 +307,7 @@ fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> | |||
275 | m = p.start(); | 307 | m = p.start(); |
276 | p.bump(); | 308 | p.bump(); |
277 | if p.at_ts(EXPR_FIRST) { | 309 | if p.at_ts(EXPR_FIRST) { |
278 | expr_bp(p, r, 2); | 310 | expr_bp(p, r, 2, dollar_lvl); |
279 | } | 311 | } |
280 | return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock)); | 312 | return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock)); |
281 | } | 313 | } |
@@ -287,7 +319,7 @@ fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> | |||
287 | )); | 319 | )); |
288 | } | 320 | } |
289 | }; | 321 | }; |
290 | expr_bp(p, r, 255); | 322 | expr_bp(p, r, 255, dollar_lvl); |
291 | Some((m.complete(p, kind), BlockLike::NotBlock)) | 323 | Some((m.complete(p, kind), BlockLike::NotBlock)) |
292 | } | 324 | } |
293 | 325 | ||
diff --git a/crates/ra_parser/src/grammar/patterns.rs b/crates/ra_parser/src/grammar/patterns.rs index 9a307559b..03fa9b71e 100644 --- a/crates/ra_parser/src/grammar/patterns.rs +++ b/crates/ra_parser/src/grammar/patterns.rs | |||
@@ -5,7 +5,7 @@ pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST | |||
5 | .union(token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE, MINUS]); | 5 | .union(token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE, MINUS]); |
6 | 6 | ||
7 | pub(super) fn pattern(p: &mut Parser) { | 7 | pub(super) fn pattern(p: &mut Parser) { |
8 | pattern_r(p, PAT_RECOVERY_SET) | 8 | pattern_r(p, PAT_RECOVERY_SET); |
9 | } | 9 | } |
10 | 10 | ||
11 | /// Parses a pattern list separated by pipes `|` | 11 | /// Parses a pattern list separated by pipes `|` |
diff --git a/crates/ra_parser/src/lib.rs b/crates/ra_parser/src/lib.rs index 3ceeeebd7..56755c394 100644 --- a/crates/ra_parser/src/lib.rs +++ b/crates/ra_parser/src/lib.rs | |||
@@ -53,20 +53,39 @@ pub trait TreeSink { | |||
53 | fn error(&mut self, error: ParseError); | 53 | fn error(&mut self, error: ParseError); |
54 | } | 54 | } |
55 | 55 | ||
56 | /// Parse given tokens into the given sink as a rust file. | 56 | fn parse_from_tokens<F>(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink, f: F) |
57 | pub fn parse(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { | 57 | where |
58 | F: FnOnce(&mut parser::Parser), | ||
59 | { | ||
58 | let mut p = parser::Parser::new(token_source); | 60 | let mut p = parser::Parser::new(token_source); |
59 | grammar::root(&mut p); | 61 | f(&mut p); |
60 | let events = p.finish(); | 62 | let events = p.finish(); |
61 | event::process(tree_sink, events); | 63 | event::process(tree_sink, events); |
62 | } | 64 | } |
63 | 65 | ||
66 | /// Parse given tokens into the given sink as a rust file. | ||
67 | pub fn parse(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { | ||
68 | parse_from_tokens(token_source, tree_sink, grammar::root); | ||
69 | } | ||
70 | |||
64 | /// Parse given tokens into the given sink as a path | 71 | /// Parse given tokens into the given sink as a path |
65 | pub fn parse_path(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { | 72 | pub fn parse_path(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { |
66 | let mut p = parser::Parser::new(token_source); | 73 | parse_from_tokens(token_source, tree_sink, grammar::path); |
67 | grammar::path(&mut p); | 74 | } |
68 | let events = p.finish(); | 75 | |
69 | event::process(tree_sink, events); | 76 | /// Parse given tokens into the given sink as a expression |
77 | pub fn parse_expr(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { | ||
78 | parse_from_tokens(token_source, tree_sink, grammar::expr); | ||
79 | } | ||
80 | |||
81 | /// Parse given tokens into the given sink as a ty | ||
82 | pub fn parse_ty(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { | ||
83 | parse_from_tokens(token_source, tree_sink, grammar::type_); | ||
84 | } | ||
85 | |||
86 | /// Parse given tokens into the given sink as a pattern | ||
87 | pub fn parse_pat(token_source: &dyn TokenSource, tree_sink: &mut dyn TreeSink) { | ||
88 | parse_from_tokens(token_source, tree_sink, grammar::pattern); | ||
70 | } | 89 | } |
71 | 90 | ||
72 | /// A parsing function for a specific braced-block. | 91 | /// A parsing function for a specific braced-block. |
diff --git a/crates/ra_parser/src/parser.rs b/crates/ra_parser/src/parser.rs index 3cb57ed9c..71f1f8b30 100644 --- a/crates/ra_parser/src/parser.rs +++ b/crates/ra_parser/src/parser.rs | |||
@@ -45,8 +45,9 @@ impl<'t> Parser<'t> { | |||
45 | /// | 45 | /// |
46 | /// Useful for parsing things like `>>`. | 46 | /// Useful for parsing things like `>>`. |
47 | pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> { | 47 | pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> { |
48 | let c1 = self.token_source.token_kind(self.token_pos); | 48 | let c1 = self.nth(0); |
49 | let c2 = self.token_source.token_kind(self.token_pos + 1); | 49 | let c2 = self.nth(1); |
50 | |||
50 | if self.token_source.is_token_joint_to_next(self.token_pos) { | 51 | if self.token_source.is_token_joint_to_next(self.token_pos) { |
51 | Some((c1, c2)) | 52 | Some((c1, c2)) |
52 | } else { | 53 | } else { |
@@ -59,9 +60,9 @@ impl<'t> Parser<'t> { | |||
59 | /// | 60 | /// |
60 | /// Useful for parsing things like `=>>`. | 61 | /// Useful for parsing things like `=>>`. |
61 | pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> { | 62 | pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> { |
62 | let c1 = self.token_source.token_kind(self.token_pos); | 63 | let c1 = self.nth(0); |
63 | let c2 = self.token_source.token_kind(self.token_pos + 1); | 64 | let c2 = self.nth(1); |
64 | let c3 = self.token_source.token_kind(self.token_pos + 2); | 65 | let c3 = self.nth(2); |
65 | if self.token_source.is_token_joint_to_next(self.token_pos) | 66 | if self.token_source.is_token_joint_to_next(self.token_pos) |
66 | && self.token_source.is_token_joint_to_next(self.token_pos + 1) | 67 | && self.token_source.is_token_joint_to_next(self.token_pos + 1) |
67 | { | 68 | { |
@@ -77,7 +78,23 @@ impl<'t> Parser<'t> { | |||
77 | let steps = self.steps.get(); | 78 | let steps = self.steps.get(); |
78 | assert!(steps <= 10_000_000, "the parser seems stuck"); | 79 | assert!(steps <= 10_000_000, "the parser seems stuck"); |
79 | self.steps.set(steps + 1); | 80 | self.steps.set(steps + 1); |
80 | self.token_source.token_kind(self.token_pos + n) | 81 | |
82 | // It is beecause the Dollar will appear between nth | ||
83 | // Following code skips through it | ||
84 | let mut non_dollars_count = 0; | ||
85 | let mut i = 0; | ||
86 | |||
87 | loop { | ||
88 | let kind = self.token_source.token_kind(self.token_pos + i); | ||
89 | i += 1; | ||
90 | |||
91 | match kind { | ||
92 | EOF => return EOF, | ||
93 | SyntaxKind::L_DOLLAR | SyntaxKind::R_DOLLAR => {} | ||
94 | _ if non_dollars_count == n => return kind, | ||
95 | _ => non_dollars_count += 1, | ||
96 | } | ||
97 | } | ||
81 | } | 98 | } |
82 | 99 | ||
83 | /// Checks if the current token is `kind`. | 100 | /// Checks if the current token is `kind`. |
@@ -99,8 +116,6 @@ impl<'t> Parser<'t> { | |||
99 | /// consumed between the `start` and the corresponding `Marker::complete` | 116 | /// consumed between the `start` and the corresponding `Marker::complete` |
100 | /// belong to the same node. | 117 | /// belong to the same node. |
101 | pub(crate) fn start(&mut self) -> Marker { | 118 | pub(crate) fn start(&mut self) -> Marker { |
102 | self.eat_dollars(); | ||
103 | |||
104 | let pos = self.events.len() as u32; | 119 | let pos = self.events.len() as u32; |
105 | self.push_event(Event::tombstone()); | 120 | self.push_event(Event::tombstone()); |
106 | Marker::new(pos) | 121 | Marker::new(pos) |
@@ -185,7 +200,6 @@ impl<'t> Parser<'t> { | |||
185 | self.eat_dollars(); | 200 | self.eat_dollars(); |
186 | self.token_pos += usize::from(n_raw_tokens); | 201 | self.token_pos += usize::from(n_raw_tokens); |
187 | self.push_event(Event::Token { kind, n_raw_tokens }); | 202 | self.push_event(Event::Token { kind, n_raw_tokens }); |
188 | self.eat_dollars(); | ||
189 | } | 203 | } |
190 | 204 | ||
191 | fn push_event(&mut self, event: Event) { | 205 | fn push_event(&mut self, event: Event) { |
@@ -193,12 +207,64 @@ impl<'t> Parser<'t> { | |||
193 | } | 207 | } |
194 | 208 | ||
195 | fn eat_dollars(&mut self) { | 209 | fn eat_dollars(&mut self) { |
196 | while self.nth(0) == SyntaxKind::L_DOLLAR || self.nth(0) == SyntaxKind::R_DOLLAR { | 210 | loop { |
197 | let kind = self.nth(0); | 211 | match self.token_source.token_kind(self.token_pos) { |
198 | self.token_pos += 1; | 212 | k @ SyntaxKind::L_DOLLAR | k @ SyntaxKind::R_DOLLAR => { |
199 | self.push_event(Event::Token { kind, n_raw_tokens: 1 }); | 213 | self.token_pos += 1; |
214 | self.push_event(Event::Token { kind: k, n_raw_tokens: 1 }); | ||
215 | } | ||
216 | _ => { | ||
217 | return; | ||
218 | } | ||
219 | } | ||
220 | } | ||
221 | } | ||
222 | |||
223 | pub(crate) fn eat_l_dollars(&mut self) -> usize { | ||
224 | let mut ate_count = 0; | ||
225 | loop { | ||
226 | match self.token_source.token_kind(self.token_pos) { | ||
227 | k @ SyntaxKind::L_DOLLAR => { | ||
228 | self.token_pos += 1; | ||
229 | self.push_event(Event::Token { kind: k, n_raw_tokens: 1 }); | ||
230 | ate_count += 1; | ||
231 | } | ||
232 | _ => { | ||
233 | return ate_count; | ||
234 | } | ||
235 | } | ||
200 | } | 236 | } |
201 | } | 237 | } |
238 | |||
239 | pub(crate) fn eat_r_dollars(&mut self, max_count: usize) -> usize { | ||
240 | let mut ate_count = 0; | ||
241 | loop { | ||
242 | match self.token_source.token_kind(self.token_pos) { | ||
243 | k @ SyntaxKind::R_DOLLAR => { | ||
244 | self.token_pos += 1; | ||
245 | self.push_event(Event::Token { kind: k, n_raw_tokens: 1 }); | ||
246 | ate_count += 1; | ||
247 | |||
248 | if max_count >= ate_count { | ||
249 | return ate_count; | ||
250 | } | ||
251 | } | ||
252 | _ => { | ||
253 | return ate_count; | ||
254 | } | ||
255 | } | ||
256 | } | ||
257 | } | ||
258 | |||
259 | pub(crate) fn at_l_dollar(&self) -> bool { | ||
260 | let kind = self.token_source.token_kind(self.token_pos); | ||
261 | (kind == SyntaxKind::L_DOLLAR) | ||
262 | } | ||
263 | |||
264 | pub(crate) fn at_r_dollar(&self) -> bool { | ||
265 | let kind = self.token_source.token_kind(self.token_pos); | ||
266 | (kind == SyntaxKind::R_DOLLAR) | ||
267 | } | ||
202 | } | 268 | } |
203 | 269 | ||
204 | /// See `Parser::start`. | 270 | /// See `Parser::start`. |