aboutsummaryrefslogtreecommitdiff
path: root/crates/mbe/src/syntax_bridge.rs
diff options
context:
space:
mode:
Diffstat (limited to 'crates/mbe/src/syntax_bridge.rs')
-rw-r--r--crates/mbe/src/syntax_bridge.rs51
1 files changed, 25 insertions, 26 deletions
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index 265c0d63d..671036e1c 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -70,15 +70,12 @@ pub fn token_tree_to_syntax_node(
70 tt: &tt::Subtree, 70 tt: &tt::Subtree,
71 fragment_kind: FragmentKind, 71 fragment_kind: FragmentKind,
72) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> { 72) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
73 let tmp; 73 let buffer = match tt {
74 let tokens = match tt { 74 tt::Subtree { delimiter: None, token_trees } => {
75 tt::Subtree { delimiter: None, token_trees } => token_trees.as_slice(), 75 TokenBuffer::from_tokens(token_trees.as_slice())
76 _ => {
77 tmp = [tt.clone().into()];
78 &tmp[..]
79 } 76 }
77 _ => TokenBuffer::from_subtree(tt),
80 }; 78 };
81 let buffer = TokenBuffer::new(&tokens);
82 let mut token_source = SubtreeTokenSource::new(&buffer); 79 let mut token_source = SubtreeTokenSource::new(&buffer);
83 let mut tree_sink = TtTreeSink::new(buffer.begin()); 80 let mut tree_sink = TtTreeSink::new(buffer.begin());
84 parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind); 81 parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
@@ -414,7 +411,7 @@ trait TokenConvertor {
414 fn id_alloc(&mut self) -> &mut TokenIdAlloc; 411 fn id_alloc(&mut self) -> &mut TokenIdAlloc;
415} 412}
416 413
417impl<'a> SrcToken for (RawToken, &'a str) { 414impl<'a> SrcToken for (&'a RawToken, &'a str) {
418 fn kind(&self) -> SyntaxKind { 415 fn kind(&self) -> SyntaxKind {
419 self.0.kind 416 self.0.kind
420 } 417 }
@@ -431,7 +428,7 @@ impl<'a> SrcToken for (RawToken, &'a str) {
431impl RawConvertor<'_> {} 428impl RawConvertor<'_> {}
432 429
433impl<'a> TokenConvertor for RawConvertor<'a> { 430impl<'a> TokenConvertor for RawConvertor<'a> {
434 type Token = (RawToken, &'a str); 431 type Token = (&'a RawToken, &'a str);
435 432
436 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> { 433 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
437 convert_doc_comment(&doc_comment(token.1)) 434 convert_doc_comment(&doc_comment(token.1))
@@ -442,11 +439,11 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
442 let range = TextRange::at(self.offset, token.len); 439 let range = TextRange::at(self.offset, token.len);
443 self.offset += token.len; 440 self.offset += token.len;
444 441
445 Some(((*token, &self.text[range]), range)) 442 Some(((token, &self.text[range]), range))
446 } 443 }
447 444
448 fn peek(&self) -> Option<Self::Token> { 445 fn peek(&self) -> Option<Self::Token> {
449 let token = self.inner.as_slice().get(0).cloned(); 446 let token = self.inner.as_slice().get(0);
450 447
451 token.map(|it| { 448 token.map(|it| {
452 let range = TextRange::at(self.offset, it.len); 449 let range = TextRange::at(self.offset, it.len);
@@ -601,17 +598,16 @@ impl<'a> TtTreeSink<'a> {
601 } 598 }
602} 599}
603 600
604fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr { 601fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> &'static str {
605 let texts = match d { 602 let texts = match d {
606 Some(tt::DelimiterKind::Parenthesis) => "()", 603 Some(tt::DelimiterKind::Parenthesis) => "()",
607 Some(tt::DelimiterKind::Brace) => "{}", 604 Some(tt::DelimiterKind::Brace) => "{}",
608 Some(tt::DelimiterKind::Bracket) => "[]", 605 Some(tt::DelimiterKind::Bracket) => "[]",
609 None => return "".into(), 606 None => return "",
610 }; 607 };
611 608
612 let idx = closing as usize; 609 let idx = closing as usize;
613 let text = &texts[idx..texts.len() - (1 - idx)]; 610 &texts[idx..texts.len() - (1 - idx)]
614 text.into()
615} 611}
616 612
617impl<'a> TreeSink for TtTreeSink<'a> { 613impl<'a> TreeSink for TtTreeSink<'a> {
@@ -626,29 +622,32 @@ impl<'a> TreeSink for TtTreeSink<'a> {
626 622
627 let mut last = self.cursor; 623 let mut last = self.cursor;
628 for _ in 0..n_tokens { 624 for _ in 0..n_tokens {
625 let tmp_str: SmolStr;
629 if self.cursor.eof() { 626 if self.cursor.eof() {
630 break; 627 break;
631 } 628 }
632 last = self.cursor; 629 last = self.cursor;
633 let text: SmolStr = match self.cursor.token_tree() { 630 let text: &str = match self.cursor.token_tree() {
634 Some(tt::TokenTree::Leaf(leaf)) => { 631 Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
635 // Mark the range if needed 632 // Mark the range if needed
636 let (text, id) = match leaf { 633 let (text, id) = match leaf {
637 tt::Leaf::Ident(ident) => (ident.text.clone(), ident.id), 634 tt::Leaf::Ident(ident) => (&ident.text, ident.id),
638 tt::Leaf::Punct(punct) => { 635 tt::Leaf::Punct(punct) => {
639 assert!(punct.char.is_ascii()); 636 assert!(punct.char.is_ascii());
640 let char = &(punct.char as u8); 637 let char = &(punct.char as u8);
641 let text = std::str::from_utf8(std::slice::from_ref(char)).unwrap(); 638 tmp_str = SmolStr::new_inline(
642 (SmolStr::new_inline(text), punct.id) 639 std::str::from_utf8(std::slice::from_ref(char)).unwrap(),
640 );
641 (&tmp_str, punct.id)
643 } 642 }
644 tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id), 643 tt::Leaf::Literal(lit) => (&lit.text, lit.id),
645 }; 644 };
646 let range = TextRange::at(self.text_pos, TextSize::of(text.as_str())); 645 let range = TextRange::at(self.text_pos, TextSize::of(text.as_str()));
647 self.token_map.insert(id, range); 646 self.token_map.insert(id, range);
648 self.cursor = self.cursor.bump(); 647 self.cursor = self.cursor.bump();
649 text 648 text
650 } 649 }
651 Some(tt::TokenTree::Subtree(subtree)) => { 650 Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
652 self.cursor = self.cursor.subtree().unwrap(); 651 self.cursor = self.cursor.subtree().unwrap();
653 if let Some(id) = subtree.delimiter.map(|it| it.id) { 652 if let Some(id) = subtree.delimiter.map(|it| it.id) {
654 self.open_delims.insert(id, self.text_pos); 653 self.open_delims.insert(id, self.text_pos);
@@ -672,7 +671,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
672 } 671 }
673 }; 672 };
674 self.buf += &text; 673 self.buf += &text;
675 self.text_pos += TextSize::of(text.as_str()); 674 self.text_pos += TextSize::of(text);
676 } 675 }
677 676
678 let text = SmolStr::new(self.buf.as_str()); 677 let text = SmolStr::new(self.buf.as_str());
@@ -682,8 +681,8 @@ impl<'a> TreeSink for TtTreeSink<'a> {
682 // Add whitespace between adjoint puncts 681 // Add whitespace between adjoint puncts
683 let next = last.bump(); 682 let next = last.bump();
684 if let ( 683 if let (
685 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))), 684 Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(curr), _)),
686 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))), 685 Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(_), _)),
687 ) = (last.token_tree(), next.token_tree()) 686 ) = (last.token_tree(), next.token_tree())
688 { 687 {
689 // Note: We always assume the semi-colon would be the last token in 688 // Note: We always assume the semi-colon would be the last token in
@@ -742,7 +741,7 @@ mod tests {
742 ) 741 )
743 .expand_tt("literals!(foo);"); 742 .expand_tt("literals!(foo);");
744 let tts = &[expansion.into()]; 743 let tts = &[expansion.into()];
745 let buffer = tt::buffer::TokenBuffer::new(tts); 744 let buffer = tt::buffer::TokenBuffer::from_tokens(tts);
746 let mut tt_src = SubtreeTokenSource::new(&buffer); 745 let mut tt_src = SubtreeTokenSource::new(&buffer);
747 let mut tokens = vec![]; 746 let mut tokens = vec![];
748 while tt_src.current().kind != EOF { 747 while tt_src.current().kind != EOF {