aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2021-01-04 18:02:54 +0000
committerGitHub <[email protected]>2021-01-04 18:02:54 +0000
commit0708bfeb7270923be5a2059ad5b99de183e667ba (patch)
treebb7ef6ad070b09e0cd5738a7a7a6f1b430009e8e /crates
parentc96b4eec957e53802817384ca3c58d834f06d6db (diff)
parentd387bfdc4abc47ff62f1dffe7d41ce2bb11e7a00 (diff)
Merge #7159
7159: Refactor mbe to reduce clone and copying r=edwin0cheng a=edwin0cheng bors r+ Co-authored-by: Edwin Cheng <[email protected]>
Diffstat (limited to 'crates')
-rw-r--r--crates/hir_expand/src/proc_macro.rs10
-rw-r--r--crates/mbe/src/mbe_expander/matcher.rs6
-rw-r--r--crates/mbe/src/subtree_source.rs152
-rw-r--r--crates/mbe/src/syntax_bridge.rs51
-rw-r--r--crates/tt/src/buffer.rs105
5 files changed, 172 insertions, 152 deletions
diff --git a/crates/hir_expand/src/proc_macro.rs b/crates/hir_expand/src/proc_macro.rs
index 7c77f6ce0..1923daca5 100644
--- a/crates/hir_expand/src/proc_macro.rs
+++ b/crates/hir_expand/src/proc_macro.rs
@@ -58,7 +58,7 @@ impl ProcMacroExpander {
58} 58}
59 59
60fn eat_punct(cursor: &mut Cursor, c: char) -> bool { 60fn eat_punct(cursor: &mut Cursor, c: char) -> bool {
61 if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = cursor.token_tree() { 61 if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = cursor.token_tree() {
62 if punct.char == c { 62 if punct.char == c {
63 *cursor = cursor.bump(); 63 *cursor = cursor.bump();
64 return true; 64 return true;
@@ -68,7 +68,7 @@ fn eat_punct(cursor: &mut Cursor, c: char) -> bool {
68} 68}
69 69
70fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool { 70fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool {
71 if let Some(tt::TokenTree::Subtree(subtree)) = cursor.token_tree() { 71 if let Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) = cursor.token_tree() {
72 if Some(kind) == subtree.delimiter_kind() { 72 if Some(kind) == subtree.delimiter_kind() {
73 *cursor = cursor.bump_subtree(); 73 *cursor = cursor.bump_subtree();
74 return true; 74 return true;
@@ -78,7 +78,7 @@ fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool {
78} 78}
79 79
80fn eat_ident(cursor: &mut Cursor, t: &str) -> bool { 80fn eat_ident(cursor: &mut Cursor, t: &str) -> bool {
81 if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = cursor.token_tree() { 81 if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(ident), _)) = cursor.token_tree() {
82 if t == ident.text.as_str() { 82 if t == ident.text.as_str() {
83 *cursor = cursor.bump(); 83 *cursor = cursor.bump();
84 return true; 84 return true;
@@ -88,7 +88,7 @@ fn eat_ident(cursor: &mut Cursor, t: &str) -> bool {
88} 88}
89 89
90fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> { 90fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> {
91 let buffer = TokenBuffer::new(&tt.token_trees); 91 let buffer = TokenBuffer::from_tokens(&tt.token_trees);
92 let mut p = buffer.begin(); 92 let mut p = buffer.begin();
93 let mut result = tt::Subtree::default(); 93 let mut result = tt::Subtree::default();
94 94
@@ -106,7 +106,7 @@ fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> {
106 } 106 }
107 } 107 }
108 108
109 result.token_trees.push(curr.token_tree()?.clone()); 109 result.token_trees.push(curr.token_tree()?.cloned());
110 p = curr.bump(); 110 p = curr.bump();
111 } 111 }
112 112
diff --git a/crates/mbe/src/mbe_expander/matcher.rs b/crates/mbe/src/mbe_expander/matcher.rs
index ab5f87c48..fdc8844ce 100644
--- a/crates/mbe/src/mbe_expander/matcher.rs
+++ b/crates/mbe/src/mbe_expander/matcher.rs
@@ -309,7 +309,7 @@ impl<'a> TtIter<'a> {
309 } 309 }
310 } 310 }
311 311
312 let buffer = TokenBuffer::new(&self.inner.as_slice()); 312 let buffer = TokenBuffer::from_tokens(&self.inner.as_slice());
313 let mut src = SubtreeTokenSource::new(&buffer); 313 let mut src = SubtreeTokenSource::new(&buffer);
314 let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; 314 let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
315 315
@@ -336,11 +336,11 @@ impl<'a> TtIter<'a> {
336 err = Some(err!("no tokens consumed")); 336 err = Some(err!("no tokens consumed"));
337 } 337 }
338 let res = match res.len() { 338 let res = match res.len() {
339 1 => Some(res[0].clone()), 339 1 => Some(res[0].cloned()),
340 0 => None, 340 0 => None,
341 _ => Some(tt::TokenTree::Subtree(tt::Subtree { 341 _ => Some(tt::TokenTree::Subtree(tt::Subtree {
342 delimiter: None, 342 delimiter: None,
343 token_trees: res.into_iter().cloned().collect(), 343 token_trees: res.into_iter().map(|it| it.cloned()).collect(),
344 })), 344 })),
345 }; 345 };
346 ExpandResult { value: res, err } 346 ExpandResult { value: res, err }
diff --git a/crates/mbe/src/subtree_source.rs b/crates/mbe/src/subtree_source.rs
index d10d4b70e..d7433bd35 100644
--- a/crates/mbe/src/subtree_source.rs
+++ b/crates/mbe/src/subtree_source.rs
@@ -1,129 +1,104 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use parser::{Token, TokenSource}; 3use parser::{Token, TokenSource};
4use std::cell::{Cell, Ref, RefCell};
5use syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T}; 4use syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T};
6use tt::buffer::{Cursor, TokenBuffer}; 5use tt::buffer::TokenBuffer;
7 6
8#[derive(Debug, Clone, Eq, PartialEq)] 7#[derive(Debug, Clone, Eq, PartialEq)]
9struct TtToken { 8struct TtToken {
10 kind: SyntaxKind, 9 tt: Token,
11 is_joint_to_next: bool,
12 text: SmolStr, 10 text: SmolStr,
13} 11}
14 12
15pub(crate) struct SubtreeTokenSource<'a> { 13pub(crate) struct SubtreeTokenSource {
16 cached_cursor: Cell<Cursor<'a>>, 14 cached: Vec<TtToken>,
17 cached: RefCell<Vec<Option<TtToken>>>,
18 curr: (Token, usize), 15 curr: (Token, usize),
19} 16}
20 17
21impl<'a> SubtreeTokenSource<'a> { 18impl<'a> SubtreeTokenSource {
22 // Helper function used in test 19 // Helper function used in test
23 #[cfg(test)] 20 #[cfg(test)]
24 pub(crate) fn text(&self) -> SmolStr { 21 pub(crate) fn text(&self) -> SmolStr {
25 match *self.get(self.curr.1) { 22 match self.cached.get(self.curr.1) {
26 Some(ref tt) => tt.text.clone(), 23 Some(ref tt) => tt.text.clone(),
27 _ => SmolStr::new(""), 24 _ => SmolStr::new(""),
28 } 25 }
29 } 26 }
30} 27}
31 28
32impl<'a> SubtreeTokenSource<'a> { 29impl<'a> SubtreeTokenSource {
33 pub(crate) fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> { 30 pub(crate) fn new(buffer: &TokenBuffer) -> SubtreeTokenSource {
34 let cursor = buffer.begin(); 31 let mut current = buffer.begin();
32 let mut cached = Vec::with_capacity(100);
35 33
36 let mut res = SubtreeTokenSource { 34 while !current.eof() {
37 curr: (Token { kind: EOF, is_jointed_to_next: false }, 0), 35 let cursor = current;
38 cached_cursor: Cell::new(cursor), 36 let tt = cursor.token_tree();
39 cached: RefCell::new(Vec::with_capacity(10)),
40 };
41 res.curr = (res.mk_token(0), 0);
42 res
43 }
44 37
45 fn mk_token(&self, pos: usize) -> Token { 38 // Check if it is lifetime
46 match *self.get(pos) { 39 if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = tt {
47 Some(ref tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next },
48 None => Token { kind: EOF, is_jointed_to_next: false },
49 }
50 }
51
52 fn get(&self, pos: usize) -> Ref<Option<TtToken>> {
53 fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> {
54 let tkn = c.token_tree();
55
56 if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tkn {
57 if punct.char == '\'' { 40 if punct.char == '\'' {
58 let next = c.bump(); 41 let next = cursor.bump();
59 if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = next.token_tree() { 42 if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(ident), _)) =
60 let res_cursor = next.bump(); 43 next.token_tree()
61 let text = SmolStr::new("'".to_string() + &ident.to_string()); 44 {
62 45 let text = SmolStr::new("'".to_string() + &ident.text);
63 return Some((res_cursor, text)); 46 cached.push(TtToken {
47 tt: Token { kind: LIFETIME_IDENT, is_jointed_to_next: false },
48 text,
49 });
50 current = next.bump();
51 continue;
64 } else { 52 } else {
65 panic!("Next token must be ident : {:#?}", next.token_tree()); 53 panic!("Next token must be ident : {:#?}", next.token_tree());
66 } 54 }
67 } 55 }
68 } 56 }
69 57
70 None 58 current = match tt {
71 } 59 Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
72 60 cached.push(convert_leaf(&leaf));
73 if pos < self.cached.borrow().len() { 61 cursor.bump()
74 return Ref::map(self.cached.borrow(), |c| &c[pos]);
75 }
76
77 {
78 let mut cached = self.cached.borrow_mut();
79 while pos >= cached.len() {
80 let cursor = self.cached_cursor.get();
81 if cursor.eof() {
82 cached.push(None);
83 continue;
84 } 62 }
85 63 Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
86 if let Some((curr, text)) = is_lifetime(cursor) { 64 cached.push(convert_delim(subtree.delimiter_kind(), false));
87 cached.push(Some(TtToken { 65 cursor.subtree().unwrap()
88 kind: LIFETIME_IDENT,
89 is_joint_to_next: false,
90 text,
91 }));
92 self.cached_cursor.set(curr);
93 continue;
94 } 66 }
95 67 None => {
96 match cursor.token_tree() { 68 if let Some(subtree) = cursor.end() {
97 Some(tt::TokenTree::Leaf(leaf)) => { 69 cached.push(convert_delim(subtree.delimiter_kind(), true));
98 cached.push(Some(convert_leaf(&leaf))); 70 cursor.bump()
99 self.cached_cursor.set(cursor.bump()); 71 } else {
100 } 72 continue;
101 Some(tt::TokenTree::Subtree(subtree)) => {
102 self.cached_cursor.set(cursor.subtree().unwrap());
103 cached.push(Some(convert_delim(subtree.delimiter_kind(), false)));
104 }
105 None => {
106 if let Some(subtree) = cursor.end() {
107 cached.push(Some(convert_delim(subtree.delimiter_kind(), true)));
108 self.cached_cursor.set(cursor.bump());
109 }
110 } 73 }
111 } 74 }
112 } 75 };
113 } 76 }
114 77
115 Ref::map(self.cached.borrow(), |c| &c[pos]) 78 let mut res = SubtreeTokenSource {
79 curr: (Token { kind: EOF, is_jointed_to_next: false }, 0),
80 cached,
81 };
82 res.curr = (res.token(0), 0);
83 res
84 }
85
86 fn token(&self, pos: usize) -> Token {
87 match self.cached.get(pos) {
88 Some(it) => it.tt,
89 None => Token { kind: EOF, is_jointed_to_next: false },
90 }
116 } 91 }
117} 92}
118 93
119impl<'a> TokenSource for SubtreeTokenSource<'a> { 94impl<'a> TokenSource for SubtreeTokenSource {
120 fn current(&self) -> Token { 95 fn current(&self) -> Token {
121 self.curr.0 96 self.curr.0
122 } 97 }
123 98
124 /// Lookahead n token 99 /// Lookahead n token
125 fn lookahead_nth(&self, n: usize) -> Token { 100 fn lookahead_nth(&self, n: usize) -> Token {
126 self.mk_token(self.curr.1 + n) 101 self.token(self.curr.1 + n)
127 } 102 }
128 103
129 /// bump cursor to next token 104 /// bump cursor to next token
@@ -131,13 +106,12 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> {
131 if self.current().kind == EOF { 106 if self.current().kind == EOF {
132 return; 107 return;
133 } 108 }
134 109 self.curr = (self.token(self.curr.1 + 1), self.curr.1 + 1);
135 self.curr = (self.mk_token(self.curr.1 + 1), self.curr.1 + 1);
136 } 110 }
137 111
138 /// Is the current token a specified keyword? 112 /// Is the current token a specified keyword?
139 fn is_keyword(&self, kw: &str) -> bool { 113 fn is_keyword(&self, kw: &str) -> bool {
140 match *self.get(self.curr.1) { 114 match self.cached.get(self.curr.1) {
141 Some(ref t) => t.text == *kw, 115 Some(ref t) => t.text == *kw,
142 _ => false, 116 _ => false,
143 } 117 }
@@ -155,7 +129,7 @@ fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken {
155 let idx = closing as usize; 129 let idx = closing as usize;
156 let kind = kinds[idx]; 130 let kind = kinds[idx];
157 let text = if !texts.is_empty() { &texts[idx..texts.len() - (1 - idx)] } else { "" }; 131 let text = if !texts.is_empty() { &texts[idx..texts.len() - (1 - idx)] } else { "" };
158 TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text) } 132 TtToken { tt: Token { kind, is_jointed_to_next: false }, text: SmolStr::new(text) }
159} 133}
160 134
161fn convert_literal(l: &tt::Literal) -> TtToken { 135fn convert_literal(l: &tt::Literal) -> TtToken {
@@ -169,7 +143,7 @@ fn convert_literal(l: &tt::Literal) -> TtToken {
169 }) 143 })
170 .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l)); 144 .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l));
171 145
172 TtToken { kind, is_joint_to_next: false, text: l.text.clone() } 146 TtToken { tt: Token { kind, is_jointed_to_next: false }, text: l.text.clone() }
173} 147}
174 148
175fn convert_ident(ident: &tt::Ident) -> TtToken { 149fn convert_ident(ident: &tt::Ident) -> TtToken {
@@ -180,7 +154,7 @@ fn convert_ident(ident: &tt::Ident) -> TtToken {
180 _ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT), 154 _ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT),
181 }; 155 };
182 156
183 TtToken { kind, is_joint_to_next: false, text: ident.text.clone() } 157 TtToken { tt: Token { kind, is_jointed_to_next: false }, text: ident.text.clone() }
184} 158}
185 159
186fn convert_punct(p: tt::Punct) -> TtToken { 160fn convert_punct(p: tt::Punct) -> TtToken {
@@ -194,7 +168,7 @@ fn convert_punct(p: tt::Punct) -> TtToken {
194 let s: &str = p.char.encode_utf8(&mut buf); 168 let s: &str = p.char.encode_utf8(&mut buf);
195 SmolStr::new(s) 169 SmolStr::new(s)
196 }; 170 };
197 TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text } 171 TtToken { tt: Token { kind, is_jointed_to_next: p.spacing == tt::Spacing::Joint }, text }
198} 172}
199 173
200fn convert_leaf(leaf: &tt::Leaf) -> TtToken { 174fn convert_leaf(leaf: &tt::Leaf) -> TtToken {
@@ -208,6 +182,7 @@ fn convert_leaf(leaf: &tt::Leaf) -> TtToken {
208#[cfg(test)] 182#[cfg(test)]
209mod tests { 183mod tests {
210 use super::{convert_literal, TtToken}; 184 use super::{convert_literal, TtToken};
185 use parser::Token;
211 use syntax::{SmolStr, SyntaxKind}; 186 use syntax::{SmolStr, SyntaxKind};
212 187
213 #[test] 188 #[test]
@@ -218,8 +193,7 @@ mod tests {
218 text: SmolStr::new("-42.0") 193 text: SmolStr::new("-42.0")
219 }), 194 }),
220 TtToken { 195 TtToken {
221 kind: SyntaxKind::FLOAT_NUMBER, 196 tt: Token { kind: SyntaxKind::FLOAT_NUMBER, is_jointed_to_next: false },
222 is_joint_to_next: false,
223 text: SmolStr::new("-42.0") 197 text: SmolStr::new("-42.0")
224 } 198 }
225 ); 199 );
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index 265c0d63d..671036e1c 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -70,15 +70,12 @@ pub fn token_tree_to_syntax_node(
70 tt: &tt::Subtree, 70 tt: &tt::Subtree,
71 fragment_kind: FragmentKind, 71 fragment_kind: FragmentKind,
72) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> { 72) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
73 let tmp; 73 let buffer = match tt {
74 let tokens = match tt { 74 tt::Subtree { delimiter: None, token_trees } => {
75 tt::Subtree { delimiter: None, token_trees } => token_trees.as_slice(), 75 TokenBuffer::from_tokens(token_trees.as_slice())
76 _ => {
77 tmp = [tt.clone().into()];
78 &tmp[..]
79 } 76 }
77 _ => TokenBuffer::from_subtree(tt),
80 }; 78 };
81 let buffer = TokenBuffer::new(&tokens);
82 let mut token_source = SubtreeTokenSource::new(&buffer); 79 let mut token_source = SubtreeTokenSource::new(&buffer);
83 let mut tree_sink = TtTreeSink::new(buffer.begin()); 80 let mut tree_sink = TtTreeSink::new(buffer.begin());
84 parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind); 81 parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
@@ -414,7 +411,7 @@ trait TokenConvertor {
414 fn id_alloc(&mut self) -> &mut TokenIdAlloc; 411 fn id_alloc(&mut self) -> &mut TokenIdAlloc;
415} 412}
416 413
417impl<'a> SrcToken for (RawToken, &'a str) { 414impl<'a> SrcToken for (&'a RawToken, &'a str) {
418 fn kind(&self) -> SyntaxKind { 415 fn kind(&self) -> SyntaxKind {
419 self.0.kind 416 self.0.kind
420 } 417 }
@@ -431,7 +428,7 @@ impl<'a> SrcToken for (RawToken, &'a str) {
431impl RawConvertor<'_> {} 428impl RawConvertor<'_> {}
432 429
433impl<'a> TokenConvertor for RawConvertor<'a> { 430impl<'a> TokenConvertor for RawConvertor<'a> {
434 type Token = (RawToken, &'a str); 431 type Token = (&'a RawToken, &'a str);
435 432
436 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> { 433 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
437 convert_doc_comment(&doc_comment(token.1)) 434 convert_doc_comment(&doc_comment(token.1))
@@ -442,11 +439,11 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
442 let range = TextRange::at(self.offset, token.len); 439 let range = TextRange::at(self.offset, token.len);
443 self.offset += token.len; 440 self.offset += token.len;
444 441
445 Some(((*token, &self.text[range]), range)) 442 Some(((token, &self.text[range]), range))
446 } 443 }
447 444
448 fn peek(&self) -> Option<Self::Token> { 445 fn peek(&self) -> Option<Self::Token> {
449 let token = self.inner.as_slice().get(0).cloned(); 446 let token = self.inner.as_slice().get(0);
450 447
451 token.map(|it| { 448 token.map(|it| {
452 let range = TextRange::at(self.offset, it.len); 449 let range = TextRange::at(self.offset, it.len);
@@ -601,17 +598,16 @@ impl<'a> TtTreeSink<'a> {
601 } 598 }
602} 599}
603 600
604fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr { 601fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> &'static str {
605 let texts = match d { 602 let texts = match d {
606 Some(tt::DelimiterKind::Parenthesis) => "()", 603 Some(tt::DelimiterKind::Parenthesis) => "()",
607 Some(tt::DelimiterKind::Brace) => "{}", 604 Some(tt::DelimiterKind::Brace) => "{}",
608 Some(tt::DelimiterKind::Bracket) => "[]", 605 Some(tt::DelimiterKind::Bracket) => "[]",
609 None => return "".into(), 606 None => return "",
610 }; 607 };
611 608
612 let idx = closing as usize; 609 let idx = closing as usize;
613 let text = &texts[idx..texts.len() - (1 - idx)]; 610 &texts[idx..texts.len() - (1 - idx)]
614 text.into()
615} 611}
616 612
617impl<'a> TreeSink for TtTreeSink<'a> { 613impl<'a> TreeSink for TtTreeSink<'a> {
@@ -626,29 +622,32 @@ impl<'a> TreeSink for TtTreeSink<'a> {
626 622
627 let mut last = self.cursor; 623 let mut last = self.cursor;
628 for _ in 0..n_tokens { 624 for _ in 0..n_tokens {
625 let tmp_str: SmolStr;
629 if self.cursor.eof() { 626 if self.cursor.eof() {
630 break; 627 break;
631 } 628 }
632 last = self.cursor; 629 last = self.cursor;
633 let text: SmolStr = match self.cursor.token_tree() { 630 let text: &str = match self.cursor.token_tree() {
634 Some(tt::TokenTree::Leaf(leaf)) => { 631 Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
635 // Mark the range if needed 632 // Mark the range if needed
636 let (text, id) = match leaf { 633 let (text, id) = match leaf {
637 tt::Leaf::Ident(ident) => (ident.text.clone(), ident.id), 634 tt::Leaf::Ident(ident) => (&ident.text, ident.id),
638 tt::Leaf::Punct(punct) => { 635 tt::Leaf::Punct(punct) => {
639 assert!(punct.char.is_ascii()); 636 assert!(punct.char.is_ascii());
640 let char = &(punct.char as u8); 637 let char = &(punct.char as u8);
641 let text = std::str::from_utf8(std::slice::from_ref(char)).unwrap(); 638 tmp_str = SmolStr::new_inline(
642 (SmolStr::new_inline(text), punct.id) 639 std::str::from_utf8(std::slice::from_ref(char)).unwrap(),
640 );
641 (&tmp_str, punct.id)
643 } 642 }
644 tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id), 643 tt::Leaf::Literal(lit) => (&lit.text, lit.id),
645 }; 644 };
646 let range = TextRange::at(self.text_pos, TextSize::of(text.as_str())); 645 let range = TextRange::at(self.text_pos, TextSize::of(text.as_str()));
647 self.token_map.insert(id, range); 646 self.token_map.insert(id, range);
648 self.cursor = self.cursor.bump(); 647 self.cursor = self.cursor.bump();
649 text 648 text
650 } 649 }
651 Some(tt::TokenTree::Subtree(subtree)) => { 650 Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
652 self.cursor = self.cursor.subtree().unwrap(); 651 self.cursor = self.cursor.subtree().unwrap();
653 if let Some(id) = subtree.delimiter.map(|it| it.id) { 652 if let Some(id) = subtree.delimiter.map(|it| it.id) {
654 self.open_delims.insert(id, self.text_pos); 653 self.open_delims.insert(id, self.text_pos);
@@ -672,7 +671,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
672 } 671 }
673 }; 672 };
674 self.buf += &text; 673 self.buf += &text;
675 self.text_pos += TextSize::of(text.as_str()); 674 self.text_pos += TextSize::of(text);
676 } 675 }
677 676
678 let text = SmolStr::new(self.buf.as_str()); 677 let text = SmolStr::new(self.buf.as_str());
@@ -682,8 +681,8 @@ impl<'a> TreeSink for TtTreeSink<'a> {
682 // Add whitespace between adjoint puncts 681 // Add whitespace between adjoint puncts
683 let next = last.bump(); 682 let next = last.bump();
684 if let ( 683 if let (
685 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))), 684 Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(curr), _)),
686 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))), 685 Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(_), _)),
687 ) = (last.token_tree(), next.token_tree()) 686 ) = (last.token_tree(), next.token_tree())
688 { 687 {
689 // Note: We always assume the semi-colon would be the last token in 688 // Note: We always assume the semi-colon would be the last token in
@@ -742,7 +741,7 @@ mod tests {
742 ) 741 )
743 .expand_tt("literals!(foo);"); 742 .expand_tt("literals!(foo);");
744 let tts = &[expansion.into()]; 743 let tts = &[expansion.into()];
745 let buffer = tt::buffer::TokenBuffer::new(tts); 744 let buffer = tt::buffer::TokenBuffer::from_tokens(tts);
746 let mut tt_src = SubtreeTokenSource::new(&buffer); 745 let mut tt_src = SubtreeTokenSource::new(&buffer);
747 let mut tokens = vec![]; 746 let mut tokens = vec![];
748 while tt_src.current().kind != EOF { 747 while tt_src.current().kind != EOF {
diff --git a/crates/tt/src/buffer.rs b/crates/tt/src/buffer.rs
index 02c771f70..3606c887d 100644
--- a/crates/tt/src/buffer.rs
+++ b/crates/tt/src/buffer.rs
@@ -1,6 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use crate::{Subtree, TokenTree}; 3use crate::{Leaf, Subtree, TokenTree};
4 4
5#[derive(Copy, Clone, Debug, Eq, PartialEq)] 5#[derive(Copy, Clone, Debug, Eq, PartialEq)]
6struct EntryId(usize); 6struct EntryId(usize);
@@ -13,7 +13,7 @@ struct EntryPtr(EntryId, usize);
13#[derive(Debug)] 13#[derive(Debug)]
14enum Entry<'t> { 14enum Entry<'t> {
15 // Mimicking types from proc-macro. 15 // Mimicking types from proc-macro.
16 Subtree(&'t TokenTree, EntryId), 16 Subtree(Option<&'t TokenTree>, &'t Subtree, EntryId),
17 Leaf(&'t TokenTree), 17 Leaf(&'t TokenTree),
18 // End entries contain a pointer to the entry from the containing 18 // End entries contain a pointer to the entry from the containing
19 // token tree, or None if this is the outermost level. 19 // token tree, or None if this is the outermost level.
@@ -27,37 +27,64 @@ pub struct TokenBuffer<'t> {
27 buffers: Vec<Box<[Entry<'t>]>>, 27 buffers: Vec<Box<[Entry<'t>]>>,
28} 28}
29 29
30impl<'t> TokenBuffer<'t> { 30trait TokenList<'a> {
31 pub fn new(tokens: &'t [TokenTree]) -> TokenBuffer<'t> { 31 fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>);
32 let mut buffers = vec![]; 32}
33
34 let idx = TokenBuffer::new_inner(tokens, &mut buffers, None);
35 assert_eq!(idx, 0);
36
37 TokenBuffer { buffers }
38 }
39 33
40 fn new_inner( 34impl<'a> TokenList<'a> for &'a [TokenTree] {
41 tokens: &'t [TokenTree], 35 fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) {
42 buffers: &mut Vec<Box<[Entry<'t>]>>,
43 next: Option<EntryPtr>,
44 ) -> usize {
45 // Must contain everything in tokens and then the Entry::End 36 // Must contain everything in tokens and then the Entry::End
46 let start_capacity = tokens.len() + 1; 37 let start_capacity = self.len() + 1;
47 let mut entries = Vec::with_capacity(start_capacity); 38 let mut entries = Vec::with_capacity(start_capacity);
48 let mut children = vec![]; 39 let mut children = vec![];
49 40 for (idx, tt) in self.iter().enumerate() {
50 for (idx, tt) in tokens.iter().enumerate() {
51 match tt { 41 match tt {
52 TokenTree::Leaf(_) => { 42 TokenTree::Leaf(_) => {
53 entries.push(Entry::Leaf(tt)); 43 entries.push(Entry::Leaf(tt));
54 } 44 }
55 TokenTree::Subtree(subtree) => { 45 TokenTree::Subtree(subtree) => {
56 entries.push(Entry::End(None)); 46 entries.push(Entry::End(None));
57 children.push((idx, (subtree, tt))); 47 children.push((idx, (subtree, Some(tt))));
58 } 48 }
59 } 49 }
60 } 50 }
51 (children, entries)
52 }
53}
54
55impl<'a> TokenList<'a> for &'a Subtree {
56 fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) {
57 // Must contain everything in tokens and then the Entry::End
58 let mut entries = vec![];
59 let mut children = vec![];
60 entries.push(Entry::End(None));
61 children.push((0usize, (*self, None)));
62 (children, entries)
63 }
64}
65
66impl<'t> TokenBuffer<'t> {
67 pub fn from_tokens(tokens: &'t [TokenTree]) -> TokenBuffer<'t> {
68 Self::new(tokens)
69 }
70
71 pub fn from_subtree(subtree: &'t Subtree) -> TokenBuffer<'t> {
72 Self::new(subtree)
73 }
74
75 fn new<T: TokenList<'t>>(tokens: T) -> TokenBuffer<'t> {
76 let mut buffers = vec![];
77 let idx = TokenBuffer::new_inner(tokens, &mut buffers, None);
78 assert_eq!(idx, 0);
79 TokenBuffer { buffers }
80 }
81
82 fn new_inner<T: TokenList<'t>>(
83 tokens: T,
84 buffers: &mut Vec<Box<[Entry<'t>]>>,
85 next: Option<EntryPtr>,
86 ) -> usize {
87 let (children, mut entries) = tokens.entries();
61 88
62 entries.push(Entry::End(next)); 89 entries.push(Entry::End(next));
63 let res = buffers.len(); 90 let res = buffers.len();
@@ -65,11 +92,11 @@ impl<'t> TokenBuffer<'t> {
65 92
66 for (child_idx, (subtree, tt)) in children { 93 for (child_idx, (subtree, tt)) in children {
67 let idx = TokenBuffer::new_inner( 94 let idx = TokenBuffer::new_inner(
68 &subtree.token_trees, 95 subtree.token_trees.as_slice(),
69 buffers, 96 buffers,
70 Some(EntryPtr(EntryId(res), child_idx + 1)), 97 Some(EntryPtr(EntryId(res), child_idx + 1)),
71 ); 98 );
72 buffers[res].as_mut()[child_idx] = Entry::Subtree(tt, EntryId(idx)); 99 buffers[res].as_mut()[child_idx] = Entry::Subtree(tt, subtree, EntryId(idx));
73 } 100 }
74 101
75 res 102 res
@@ -87,6 +114,24 @@ impl<'t> TokenBuffer<'t> {
87 } 114 }
88} 115}
89 116
117#[derive(Debug)]
118pub enum TokenTreeRef<'a> {
119 Subtree(&'a Subtree, Option<&'a TokenTree>),
120 Leaf(&'a Leaf, &'a TokenTree),
121}
122
123impl<'a> TokenTreeRef<'a> {
124 pub fn cloned(&self) -> TokenTree {
125 match &self {
126 TokenTreeRef::Subtree(subtree, tt) => match tt {
127 Some(it) => (*it).clone(),
128 None => (*subtree).clone().into(),
129 },
130 TokenTreeRef::Leaf(_, tt) => (*tt).clone(),
131 }
132 }
133}
134
90/// A safe version of `Cursor` from `syn` crate https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L125 135/// A safe version of `Cursor` from `syn` crate https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L125
91#[derive(Copy, Clone, Debug)] 136#[derive(Copy, Clone, Debug)]
92pub struct Cursor<'a> { 137pub struct Cursor<'a> {
@@ -114,12 +159,11 @@ impl<'a> Cursor<'a> {
114 match self.entry() { 159 match self.entry() {
115 Some(Entry::End(Some(ptr))) => { 160 Some(Entry::End(Some(ptr))) => {
116 let idx = ptr.1; 161 let idx = ptr.1;
117 if let Some(Entry::Subtree(TokenTree::Subtree(subtree), _)) = 162 if let Some(Entry::Subtree(_, subtree, _)) =
118 self.buffer.entry(&EntryPtr(ptr.0, idx - 1)) 163 self.buffer.entry(&EntryPtr(ptr.0, idx - 1))
119 { 164 {
120 return Some(subtree); 165 return Some(subtree);
121 } 166 }
122
123 None 167 None
124 } 168 }
125 _ => None, 169 _ => None,
@@ -134,7 +178,7 @@ impl<'a> Cursor<'a> {
134 /// a cursor into that subtree 178 /// a cursor into that subtree
135 pub fn subtree(self) -> Option<Cursor<'a>> { 179 pub fn subtree(self) -> Option<Cursor<'a>> {
136 match self.entry() { 180 match self.entry() {
137 Some(Entry::Subtree(_, entry_id)) => { 181 Some(Entry::Subtree(_, _, entry_id)) => {
138 Some(Cursor::create(self.buffer, EntryPtr(*entry_id, 0))) 182 Some(Cursor::create(self.buffer, EntryPtr(*entry_id, 0)))
139 } 183 }
140 _ => None, 184 _ => None,
@@ -142,10 +186,13 @@ impl<'a> Cursor<'a> {
142 } 186 }
143 187
144 /// If the cursor is pointing at a `TokenTree`, returns it 188 /// If the cursor is pointing at a `TokenTree`, returns it
145 pub fn token_tree(self) -> Option<&'a TokenTree> { 189 pub fn token_tree(self) -> Option<TokenTreeRef<'a>> {
146 match self.entry() { 190 match self.entry() {
147 Some(Entry::Leaf(tt)) => Some(tt), 191 Some(Entry::Leaf(tt)) => match tt {
148 Some(Entry::Subtree(tt, _)) => Some(tt), 192 TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, *tt)),
193 TokenTree::Subtree(subtree) => Some(TokenTreeRef::Subtree(subtree, Some(tt))),
194 },
195 Some(Entry::Subtree(tt, subtree, _)) => Some(TokenTreeRef::Subtree(subtree, *tt)),
149 Some(Entry::End(_)) => None, 196 Some(Entry::End(_)) => None,
150 None => None, 197 None => None,
151 } 198 }
@@ -172,7 +219,7 @@ impl<'a> Cursor<'a> {
172 /// a cursor into that subtree 219 /// a cursor into that subtree
173 pub fn bump_subtree(self) -> Cursor<'a> { 220 pub fn bump_subtree(self) -> Cursor<'a> {
174 match self.entry() { 221 match self.entry() {
175 Some(Entry::Subtree(_, _)) => self.subtree().unwrap(), 222 Some(Entry::Subtree(_, _, _)) => self.subtree().unwrap(),
176 _ => self.bump(), 223 _ => self.bump(),
177 } 224 }
178 } 225 }