diff options
author | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2020-04-18 18:56:54 +0100 |
---|---|---|
committer | GitHub <[email protected]> | 2020-04-18 18:56:54 +0100 |
commit | 98819d89199c5138cc5018b036b0ec5d3fade77e (patch) | |
tree | 77b293ab5eb3a99f70082efe9cb66784f71f3343 /crates/ra_mbe/src/syntax_bridge.rs | |
parent | 84e3304a9bf0d68e30d58b1e37a6db2e9ec97525 (diff) | |
parent | 72bba9882889b2e20fd91e3c6c3a97debbbe6543 (diff) |
Merge #4029
4029: Fix various proc-macro bugs r=matklad a=edwin0cheng
This PRs does the following things:
1. Fixed #4001 by splitting `LIFETIME` lexer token to two mbe tokens. It is because rustc token stream expects `LIFETIME` as a combination of punct and ident, but RA `tt:TokenTree` treats it as a single `Ident` previously.
2. Fixed #4003, by skipping `proc-macro` for completion. It is because currently we don't have `AstNode` for `proc-macro`. We would need to redesign how to implement `HasSource` for `proc-macro`.
3. Fixed a bug how empty `TokenStream` merging in `proc-macro-srv` such that no L_DOLLAR and R_DOLLAR will be emitted accidentally.
Co-authored-by: Edwin Cheng <[email protected]>
Diffstat (limited to 'crates/ra_mbe/src/syntax_bridge.rs')
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 37 |
1 files changed, 32 insertions, 5 deletions
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 31e9b22e7..70899bc5d 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -271,7 +271,7 @@ struct RawConvertor<'a> { | |||
271 | inner: std::slice::Iter<'a, RawToken>, | 271 | inner: std::slice::Iter<'a, RawToken>, |
272 | } | 272 | } |
273 | 273 | ||
274 | trait SrcToken { | 274 | trait SrcToken: std::fmt::Debug { |
275 | fn kind(&self) -> SyntaxKind; | 275 | fn kind(&self) -> SyntaxKind; |
276 | 276 | ||
277 | fn to_char(&self) -> Option<char>; | 277 | fn to_char(&self) -> Option<char>; |
@@ -361,8 +361,12 @@ trait TokenConvertor { | |||
361 | Some(next) if next.kind().is_punct() => tt::Spacing::Joint, | 361 | Some(next) if next.kind().is_punct() => tt::Spacing::Joint, |
362 | _ => tt::Spacing::Alone, | 362 | _ => tt::Spacing::Alone, |
363 | }; | 363 | }; |
364 | let char = token.to_char().expect("Token from lexer must be single char"); | 364 | let char = match token.to_char() { |
365 | 365 | Some(c) => c, | |
366 | None => { | ||
367 | panic!("Token from lexer must be single char: token = {:#?}", token); | ||
368 | } | ||
369 | }; | ||
366 | tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into() | 370 | tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into() |
367 | } | 371 | } |
368 | } else { | 372 | } else { |
@@ -373,9 +377,28 @@ trait TokenConvertor { | |||
373 | } | 377 | } |
374 | let leaf: tt::Leaf = match k { | 378 | let leaf: tt::Leaf = match k { |
375 | T![true] | T![false] => make_leaf!(Literal), | 379 | T![true] | T![false] => make_leaf!(Literal), |
376 | IDENT | LIFETIME => make_leaf!(Ident), | 380 | IDENT => make_leaf!(Ident), |
377 | k if k.is_keyword() => make_leaf!(Ident), | 381 | k if k.is_keyword() => make_leaf!(Ident), |
378 | k if k.is_literal() => make_leaf!(Literal), | 382 | k if k.is_literal() => make_leaf!(Literal), |
383 | LIFETIME => { | ||
384 | let char_unit = TextUnit::from_usize(1); | ||
385 | let r = TextRange::offset_len(range.start(), char_unit); | ||
386 | let apostrophe = tt::Leaf::from(tt::Punct { | ||
387 | char: '\'', | ||
388 | spacing: tt::Spacing::Joint, | ||
389 | id: self.id_alloc().alloc(r), | ||
390 | }); | ||
391 | result.push(apostrophe.into()); | ||
392 | |||
393 | let r = | ||
394 | TextRange::offset_len(range.start() + char_unit, range.len() - char_unit); | ||
395 | let ident = tt::Leaf::from(tt::Ident { | ||
396 | text: SmolStr::new(&token.to_text()[1..]), | ||
397 | id: self.id_alloc().alloc(r), | ||
398 | }); | ||
399 | result.push(ident.into()); | ||
400 | return; | ||
401 | } | ||
379 | _ => return, | 402 | _ => return, |
380 | }; | 403 | }; |
381 | 404 | ||
@@ -455,6 +478,7 @@ impl Convertor { | |||
455 | } | 478 | } |
456 | } | 479 | } |
457 | 480 | ||
481 | #[derive(Debug)] | ||
458 | enum SynToken { | 482 | enum SynToken { |
459 | Ordiniary(SyntaxToken), | 483 | Ordiniary(SyntaxToken), |
460 | Punch(SyntaxToken, TextUnit), | 484 | Punch(SyntaxToken, TextUnit), |
@@ -592,11 +616,14 @@ fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr { | |||
592 | } | 616 | } |
593 | 617 | ||
594 | impl<'a> TreeSink for TtTreeSink<'a> { | 618 | impl<'a> TreeSink for TtTreeSink<'a> { |
595 | fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { | 619 | fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { |
596 | if kind == L_DOLLAR || kind == R_DOLLAR { | 620 | if kind == L_DOLLAR || kind == R_DOLLAR { |
597 | self.cursor = self.cursor.bump_subtree(); | 621 | self.cursor = self.cursor.bump_subtree(); |
598 | return; | 622 | return; |
599 | } | 623 | } |
624 | if kind == LIFETIME { | ||
625 | n_tokens = 2; | ||
626 | } | ||
600 | 627 | ||
601 | let mut last = self.cursor; | 628 | let mut last = self.cursor; |
602 | for _ in 0..n_tokens { | 629 | for _ in 0..n_tokens { |