diff options
Diffstat (limited to 'crates/ra_mbe/src')
-rw-r--r-- | crates/ra_mbe/src/subtree_parser.rs | 42 | ||||
-rw-r--r-- | crates/ra_mbe/src/subtree_source.rs | 139 | ||||
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 109 |
3 files changed, 134 insertions, 156 deletions
diff --git a/crates/ra_mbe/src/subtree_parser.rs b/crates/ra_mbe/src/subtree_parser.rs index 9cc989b23..1f12e42ef 100644 --- a/crates/ra_mbe/src/subtree_parser.rs +++ b/crates/ra_mbe/src/subtree_parser.rs | |||
@@ -2,16 +2,38 @@ use crate::subtree_source::SubtreeTokenSource; | |||
2 | 2 | ||
3 | use ra_parser::{TokenSource, TreeSink}; | 3 | use ra_parser::{TokenSource, TreeSink}; |
4 | use ra_syntax::{SyntaxKind}; | 4 | use ra_syntax::{SyntaxKind}; |
5 | use tt::buffer::TokenBuffer; | 5 | use tt::buffer::{TokenBuffer, Cursor}; |
6 | 6 | ||
7 | struct OffsetTokenSink { | 7 | struct OffsetTokenSink<'a> { |
8 | token_pos: usize, | 8 | cursor: Cursor<'a>, |
9 | error: bool, | 9 | error: bool, |
10 | } | 10 | } |
11 | 11 | ||
12 | impl TreeSink for OffsetTokenSink { | 12 | impl<'a> OffsetTokenSink<'a> { |
13 | pub fn collect(&self, begin: Cursor<'a>) -> Vec<tt::TokenTree> { | ||
14 | if !self.cursor.is_root() { | ||
15 | return vec![]; | ||
16 | } | ||
17 | |||
18 | let mut curr = begin; | ||
19 | let mut res = vec![]; | ||
20 | |||
21 | while self.cursor != curr { | ||
22 | if let Some(token) = curr.token_tree() { | ||
23 | res.push(token); | ||
24 | } | ||
25 | curr = curr.bump(); | ||
26 | } | ||
27 | |||
28 | res | ||
29 | } | ||
30 | } | ||
31 | |||
32 | impl<'a> TreeSink for OffsetTokenSink<'a> { | ||
13 | fn token(&mut self, _kind: SyntaxKind, n_tokens: u8) { | 33 | fn token(&mut self, _kind: SyntaxKind, n_tokens: u8) { |
14 | self.token_pos += n_tokens as usize; | 34 | for _ in 0..n_tokens { |
35 | self.cursor = self.cursor.bump_subtree(); | ||
36 | } | ||
15 | } | 37 | } |
16 | fn start_node(&mut self, _kind: SyntaxKind) {} | 38 | fn start_node(&mut self, _kind: SyntaxKind) {} |
17 | fn finish_node(&mut self) {} | 39 | fn finish_node(&mut self) {} |
@@ -72,23 +94,21 @@ impl<'a> Parser<'a> { | |||
72 | { | 94 | { |
73 | let buffer = TokenBuffer::new(&self.subtree.token_trees[*self.cur_pos..]); | 95 | let buffer = TokenBuffer::new(&self.subtree.token_trees[*self.cur_pos..]); |
74 | let mut src = SubtreeTokenSource::new(&buffer); | 96 | let mut src = SubtreeTokenSource::new(&buffer); |
75 | let mut sink = OffsetTokenSink { token_pos: 0, error: false }; | 97 | let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; |
76 | 98 | ||
77 | f(&mut src, &mut sink); | 99 | f(&mut src, &mut sink); |
78 | 100 | ||
79 | let r = self.finish(sink.token_pos, &mut src); | 101 | let r = self.finish(buffer.begin(), &mut sink); |
80 | if sink.error { | 102 | if sink.error { |
81 | return None; | 103 | return None; |
82 | } | 104 | } |
83 | r | 105 | r |
84 | } | 106 | } |
85 | 107 | ||
86 | fn finish(self, parsed_token: usize, src: &mut SubtreeTokenSource) -> Option<tt::TokenTree> { | 108 | fn finish(self, begin: Cursor, sink: &mut OffsetTokenSink) -> Option<tt::TokenTree> { |
87 | let res = src.bump_n(parsed_token); | 109 | let res = sink.collect(begin); |
88 | *self.cur_pos += res.len(); | 110 | *self.cur_pos += res.len(); |
89 | 111 | ||
90 | let res: Vec<_> = res.into_iter().collect(); | ||
91 | |||
92 | match res.len() { | 112 | match res.len() { |
93 | 0 => None, | 113 | 0 => None, |
94 | 1 => Some(res[0].clone()), | 114 | 1 => Some(res[0].clone()), |
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index c4f79f38a..7647e16d8 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs | |||
@@ -1,13 +1,8 @@ | |||
1 | use ra_parser::{TokenSource, Token}; | 1 | use ra_parser::{TokenSource, Token}; |
2 | use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*, T}; | 2 | use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*, T}; |
3 | use std::cell::{RefCell, Cell}; | 3 | use std::cell::{RefCell, Cell}; |
4 | use std::sync::Arc; | ||
5 | use tt::buffer::{TokenBuffer, Cursor}; | 4 | use tt::buffer::{TokenBuffer, Cursor}; |
6 | 5 | ||
7 | pub(crate) trait Querier { | ||
8 | fn token(&self, uidx: usize) -> (SyntaxKind, SmolStr, bool); | ||
9 | } | ||
10 | |||
11 | #[derive(Debug, Clone, Eq, PartialEq)] | 6 | #[derive(Debug, Clone, Eq, PartialEq)] |
12 | struct TtToken { | 7 | struct TtToken { |
13 | pub kind: SyntaxKind, | 8 | pub kind: SyntaxKind, |
@@ -15,20 +10,40 @@ struct TtToken { | |||
15 | pub text: SmolStr, | 10 | pub text: SmolStr, |
16 | } | 11 | } |
17 | 12 | ||
18 | // A wrapper class for ref cell | 13 | pub(crate) struct SubtreeTokenSource<'a> { |
19 | #[derive(Debug)] | 14 | cached_cursor: Cell<Cursor<'a>>, |
20 | pub(crate) struct SubtreeWalk<'a> { | ||
21 | start: Cursor<'a>, | ||
22 | cursor: Cell<Cursor<'a>>, | ||
23 | cached: RefCell<Vec<Option<TtToken>>>, | 15 | cached: RefCell<Vec<Option<TtToken>>>, |
16 | curr: (Token, usize), | ||
17 | } | ||
18 | |||
19 | impl<'a> SubtreeTokenSource<'a> { | ||
20 | // Helper function used in test | ||
21 | #[cfg(test)] | ||
22 | pub fn text(&self) -> SmolStr { | ||
23 | match self.get(self.curr.1) { | ||
24 | Some(tt) => tt.text, | ||
25 | _ => SmolStr::new(""), | ||
26 | } | ||
27 | } | ||
24 | } | 28 | } |
25 | 29 | ||
26 | impl<'a> SubtreeWalk<'a> { | 30 | impl<'a> SubtreeTokenSource<'a> { |
27 | fn new(cursor: Cursor<'a>) -> Self { | 31 | pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> { |
28 | SubtreeWalk { | 32 | let cursor = buffer.begin(); |
29 | start: cursor, | 33 | |
30 | cursor: Cell::new(cursor), | 34 | let mut res = SubtreeTokenSource { |
35 | curr: (Token { kind: EOF, is_jointed_to_next: false }, 0), | ||
36 | cached_cursor: Cell::new(cursor), | ||
31 | cached: RefCell::new(Vec::with_capacity(10)), | 37 | cached: RefCell::new(Vec::with_capacity(10)), |
38 | }; | ||
39 | res.curr = (res.mk_token(0), 0); | ||
40 | res | ||
41 | } | ||
42 | |||
43 | fn mk_token(&self, pos: usize) -> Token { | ||
44 | match self.get(pos) { | ||
45 | Some(tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next }, | ||
46 | None => Token { kind: EOF, is_jointed_to_next: false }, | ||
32 | } | 47 | } |
33 | } | 48 | } |
34 | 49 | ||
@@ -39,7 +54,7 @@ impl<'a> SubtreeWalk<'a> { | |||
39 | } | 54 | } |
40 | 55 | ||
41 | while pos >= cached.len() { | 56 | while pos >= cached.len() { |
42 | let cursor = self.cursor.get(); | 57 | let cursor = self.cached_cursor.get(); |
43 | if cursor.eof() { | 58 | if cursor.eof() { |
44 | cached.push(None); | 59 | cached.push(None); |
45 | continue; | 60 | continue; |
@@ -48,16 +63,16 @@ impl<'a> SubtreeWalk<'a> { | |||
48 | match cursor.token_tree() { | 63 | match cursor.token_tree() { |
49 | Some(tt::TokenTree::Leaf(leaf)) => { | 64 | Some(tt::TokenTree::Leaf(leaf)) => { |
50 | cached.push(Some(convert_leaf(&leaf))); | 65 | cached.push(Some(convert_leaf(&leaf))); |
51 | self.cursor.set(cursor.bump()); | 66 | self.cached_cursor.set(cursor.bump()); |
52 | } | 67 | } |
53 | Some(tt::TokenTree::Subtree(subtree)) => { | 68 | Some(tt::TokenTree::Subtree(subtree)) => { |
54 | self.cursor.set(cursor.subtree().unwrap()); | 69 | self.cached_cursor.set(cursor.subtree().unwrap()); |
55 | cached.push(Some(convert_delim(subtree.delimiter, false))); | 70 | cached.push(Some(convert_delim(subtree.delimiter, false))); |
56 | } | 71 | } |
57 | None => { | 72 | None => { |
58 | if let Some(subtree) = cursor.end() { | 73 | if let Some(subtree) = cursor.end() { |
59 | cached.push(Some(convert_delim(subtree.delimiter, true))); | 74 | cached.push(Some(convert_delim(subtree.delimiter, true))); |
60 | self.cursor.set(cursor.bump()); | 75 | self.cached_cursor.set(cursor.bump()); |
61 | } | 76 | } |
62 | } | 77 | } |
63 | } | 78 | } |
@@ -65,88 +80,6 @@ impl<'a> SubtreeWalk<'a> { | |||
65 | 80 | ||
66 | return cached[pos].clone(); | 81 | return cached[pos].clone(); |
67 | } | 82 | } |
68 | |||
69 | fn collect_token_trees(&self, n: usize) -> Vec<tt::TokenTree> { | ||
70 | let mut res = vec![]; | ||
71 | |||
72 | let mut pos = 0; | ||
73 | let mut cursor = self.start; | ||
74 | let mut level = 0; | ||
75 | |||
76 | while pos < n { | ||
77 | if cursor.eof() { | ||
78 | break; | ||
79 | } | ||
80 | |||
81 | match cursor.token_tree() { | ||
82 | Some(tt::TokenTree::Leaf(leaf)) => { | ||
83 | if level == 0 { | ||
84 | res.push(leaf.into()); | ||
85 | } | ||
86 | cursor = cursor.bump(); | ||
87 | pos += 1; | ||
88 | } | ||
89 | Some(tt::TokenTree::Subtree(subtree)) => { | ||
90 | if level == 0 { | ||
91 | res.push(subtree.into()); | ||
92 | } | ||
93 | pos += 1; | ||
94 | level += 1; | ||
95 | cursor = cursor.subtree().unwrap(); | ||
96 | } | ||
97 | |||
98 | None => { | ||
99 | if let Some(_) = cursor.end() { | ||
100 | level -= 1; | ||
101 | pos += 1; | ||
102 | cursor = cursor.bump(); | ||
103 | } | ||
104 | } | ||
105 | } | ||
106 | } | ||
107 | |||
108 | res | ||
109 | } | ||
110 | } | ||
111 | |||
112 | impl<'a> Querier for SubtreeWalk<'a> { | ||
113 | fn token(&self, uidx: usize) -> (SyntaxKind, SmolStr, bool) { | ||
114 | self.get(uidx) | ||
115 | .map(|tkn| (tkn.kind, tkn.text, tkn.is_joint_to_next)) | ||
116 | .unwrap_or_else(|| (SyntaxKind::EOF, "".into(), false)) | ||
117 | } | ||
118 | } | ||
119 | |||
120 | pub(crate) struct SubtreeTokenSource<'a> { | ||
121 | walker: Arc<SubtreeWalk<'a>>, | ||
122 | curr: (Token, usize), | ||
123 | } | ||
124 | |||
125 | impl<'a> SubtreeTokenSource<'a> { | ||
126 | pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> { | ||
127 | let mut res = SubtreeTokenSource { | ||
128 | walker: Arc::new(SubtreeWalk::new(buffer.begin())), | ||
129 | curr: (Token { kind: EOF, is_jointed_to_next: false }, 0), | ||
130 | }; | ||
131 | res.curr = (res.mk_token(0), 0); | ||
132 | res | ||
133 | } | ||
134 | |||
135 | pub fn querier(&self) -> Arc<SubtreeWalk<'a>> { | ||
136 | self.walker.clone() | ||
137 | } | ||
138 | |||
139 | pub(crate) fn bump_n(&mut self, parsed_tokens: usize) -> Vec<tt::TokenTree> { | ||
140 | let res = self.walker.collect_token_trees(parsed_tokens); | ||
141 | res | ||
142 | } | ||
143 | |||
144 | fn mk_token(&self, pos: usize) -> Token { | ||
145 | match self.walker.get(pos) { | ||
146 | Some(tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next }, | ||
147 | None => Token { kind: EOF, is_jointed_to_next: false }, | ||
148 | } | ||
149 | } | ||
150 | } | 83 | } |
151 | 84 | ||
152 | impl<'a> TokenSource for SubtreeTokenSource<'a> { | 85 | impl<'a> TokenSource for SubtreeTokenSource<'a> { |
@@ -165,12 +98,12 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> { | |||
165 | return; | 98 | return; |
166 | } | 99 | } |
167 | 100 | ||
168 | self.curr = (self.mk_token(self.curr.1 + 1), self.curr.1 + 1) | 101 | self.curr = (self.mk_token(self.curr.1 + 1), self.curr.1 + 1); |
169 | } | 102 | } |
170 | 103 | ||
171 | /// Is the current token a specified keyword? | 104 | /// Is the current token a specified keyword? |
172 | fn is_keyword(&self, kw: &str) -> bool { | 105 | fn is_keyword(&self, kw: &str) -> bool { |
173 | match self.walker.get(self.curr.1) { | 106 | match self.get(self.curr.1) { |
174 | Some(t) => t.text == *kw, | 107 | Some(t) => t.text == *kw, |
175 | _ => false, | 108 | _ => false, |
176 | } | 109 | } |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 0a7e50c4e..c0a3fec35 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -3,8 +3,8 @@ use ra_syntax::{ | |||
3 | AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, | 3 | AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, |
4 | ast, SyntaxKind::*, TextUnit, T | 4 | ast, SyntaxKind::*, TextUnit, T |
5 | }; | 5 | }; |
6 | 6 | use tt::buffer::{TokenBuffer, Cursor}; | |
7 | use crate::subtree_source::{SubtreeTokenSource, Querier}; | 7 | use crate::subtree_source::{SubtreeTokenSource}; |
8 | use crate::ExpandError; | 8 | use crate::ExpandError; |
9 | 9 | ||
10 | /// Maps `tt::TokenId` to the relative range of the original token. | 10 | /// Maps `tt::TokenId` to the relative range of the original token. |
@@ -49,10 +49,9 @@ fn token_tree_to_syntax_node<F>(tt: &tt::Subtree, f: F) -> Result<TreeArc<Syntax | |||
49 | where | 49 | where |
50 | F: Fn(&mut ra_parser::TokenSource, &mut ra_parser::TreeSink), | 50 | F: Fn(&mut ra_parser::TokenSource, &mut ra_parser::TreeSink), |
51 | { | 51 | { |
52 | let buffer = tt::buffer::TokenBuffer::new(&[tt.clone().into()]); | 52 | let buffer = TokenBuffer::new(&[tt.clone().into()]); |
53 | let mut token_source = SubtreeTokenSource::new(&buffer); | 53 | let mut token_source = SubtreeTokenSource::new(&buffer); |
54 | let querier = token_source.querier(); | 54 | let mut tree_sink = TtTreeSink::new(buffer.begin()); |
55 | let mut tree_sink = TtTreeSink::new(querier.as_ref()); | ||
56 | f(&mut token_source, &mut tree_sink); | 55 | f(&mut token_source, &mut tree_sink); |
57 | if tree_sink.roots.len() != 1 { | 56 | if tree_sink.roots.len() != 1 { |
58 | return Err(ExpandError::ConversionError); | 57 | return Err(ExpandError::ConversionError); |
@@ -259,11 +258,10 @@ fn convert_tt( | |||
259 | Some(res) | 258 | Some(res) |
260 | } | 259 | } |
261 | 260 | ||
262 | struct TtTreeSink<'a, Q: Querier> { | 261 | struct TtTreeSink<'a> { |
263 | buf: String, | 262 | buf: String, |
264 | src_querier: &'a Q, | 263 | cursor: Cursor<'a>, |
265 | text_pos: TextUnit, | 264 | text_pos: TextUnit, |
266 | token_pos: usize, | ||
267 | inner: SyntaxTreeBuilder, | 265 | inner: SyntaxTreeBuilder, |
268 | 266 | ||
269 | // Number of roots | 267 | // Number of roots |
@@ -271,52 +269,75 @@ struct TtTreeSink<'a, Q: Querier> { | |||
271 | roots: smallvec::SmallVec<[usize; 1]>, | 269 | roots: smallvec::SmallVec<[usize; 1]>, |
272 | } | 270 | } |
273 | 271 | ||
274 | impl<'a, Q: Querier> TtTreeSink<'a, Q> { | 272 | impl<'a> TtTreeSink<'a> { |
275 | fn new(src_querier: &'a Q) -> Self { | 273 | fn new(cursor: Cursor<'a>) -> Self { |
276 | TtTreeSink { | 274 | TtTreeSink { |
277 | buf: String::new(), | 275 | buf: String::new(), |
278 | src_querier, | 276 | cursor, |
279 | text_pos: 0.into(), | 277 | text_pos: 0.into(), |
280 | token_pos: 0, | ||
281 | inner: SyntaxTreeBuilder::default(), | 278 | inner: SyntaxTreeBuilder::default(), |
282 | roots: smallvec::SmallVec::new(), | 279 | roots: smallvec::SmallVec::new(), |
283 | } | 280 | } |
284 | } | 281 | } |
285 | } | 282 | } |
286 | 283 | ||
287 | fn is_delimiter(kind: SyntaxKind) -> bool { | 284 | fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr { |
288 | match kind { | 285 | let texts = match d { |
289 | T!['('] | T!['['] | T!['{'] | T![')'] | T![']'] | T!['}'] => true, | 286 | tt::Delimiter::Parenthesis => "()", |
290 | _ => false, | 287 | tt::Delimiter::Brace => "{}", |
291 | } | 288 | tt::Delimiter::Bracket => "[]", |
289 | tt::Delimiter::None => "", | ||
290 | }; | ||
291 | |||
292 | let idx = closing as usize; | ||
293 | let text = if texts.len() > 0 { &texts[idx..texts.len() - (1 - idx)] } else { "" }; | ||
294 | text.into() | ||
292 | } | 295 | } |
293 | 296 | ||
294 | impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> { | 297 | impl<'a> TreeSink for TtTreeSink<'a> { |
295 | fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { | 298 | fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { |
296 | if kind == L_DOLLAR || kind == R_DOLLAR { | 299 | if kind == L_DOLLAR || kind == R_DOLLAR { |
297 | self.token_pos += n_tokens as usize; | 300 | self.cursor = self.cursor.bump_subtree(); |
298 | return; | 301 | return; |
299 | } | 302 | } |
300 | 303 | ||
301 | for _ in 0..n_tokens { | 304 | for _ in 0..n_tokens { |
302 | self.buf += &self.src_querier.token(self.token_pos).1; | 305 | if self.cursor.eof() { |
303 | self.token_pos += 1; | 306 | break; |
307 | } | ||
308 | |||
309 | match self.cursor.token_tree() { | ||
310 | Some(tt::TokenTree::Leaf(leaf)) => { | ||
311 | self.cursor = self.cursor.bump(); | ||
312 | self.buf += &format!("{}", leaf); | ||
313 | } | ||
314 | Some(tt::TokenTree::Subtree(subtree)) => { | ||
315 | self.cursor = self.cursor.subtree().unwrap(); | ||
316 | self.buf += &delim_to_str(subtree.delimiter, false); | ||
317 | } | ||
318 | None => { | ||
319 | if let Some(parent) = self.cursor.end() { | ||
320 | self.cursor = self.cursor.bump(); | ||
321 | self.buf += &delim_to_str(parent.delimiter, true); | ||
322 | } | ||
323 | } | ||
324 | }; | ||
304 | } | 325 | } |
326 | |||
305 | self.text_pos += TextUnit::of_str(&self.buf); | 327 | self.text_pos += TextUnit::of_str(&self.buf); |
306 | let text = SmolStr::new(self.buf.as_str()); | 328 | let text = SmolStr::new(self.buf.as_str()); |
307 | self.buf.clear(); | 329 | self.buf.clear(); |
308 | self.inner.token(kind, text); | 330 | self.inner.token(kind, text); |
309 | 331 | ||
310 | // Add a white space between tokens, only if both are not delimiters | 332 | // Add whitespace between adjoint puncts |
311 | if !is_delimiter(kind) { | 333 | let next = self.cursor.bump(); |
312 | let (last_kind, _, last_joint_to_next) = self.src_querier.token(self.token_pos - 1); | 334 | if let ( |
313 | if !last_joint_to_next && last_kind.is_punct() { | 335 | Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))), |
314 | let (cur_kind, _, _) = self.src_querier.token(self.token_pos); | 336 | Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))), |
315 | if !is_delimiter(cur_kind) { | 337 | ) = (self.cursor.token_tree(), next.token_tree()) |
316 | if cur_kind.is_punct() { | 338 | { |
317 | self.inner.token(WHITESPACE, " ".into()); | 339 | if curr.spacing == tt::Spacing::Alone { |
318 | } | 340 | self.inner.token(WHITESPACE, " ".into()); |
319 | } | ||
320 | } | 341 | } |
321 | } | 342 | } |
322 | } | 343 | } |
@@ -344,6 +365,7 @@ impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> { | |||
344 | mod tests { | 365 | mod tests { |
345 | use super::*; | 366 | use super::*; |
346 | use crate::tests::{expand, create_rules}; | 367 | use crate::tests::{expand, create_rules}; |
368 | use ra_parser::TokenSource; | ||
347 | 369 | ||
348 | #[test] | 370 | #[test] |
349 | fn convert_tt_token_source() { | 371 | fn convert_tt_token_source() { |
@@ -363,24 +385,27 @@ mod tests { | |||
363 | ); | 385 | ); |
364 | let expansion = expand(&rules, "literals!(foo)"); | 386 | let expansion = expand(&rules, "literals!(foo)"); |
365 | let buffer = tt::buffer::TokenBuffer::new(&[expansion.clone().into()]); | 387 | let buffer = tt::buffer::TokenBuffer::new(&[expansion.clone().into()]); |
366 | let tt_src = SubtreeTokenSource::new(&buffer); | 388 | let mut tt_src = SubtreeTokenSource::new(&buffer); |
367 | 389 | let mut tokens = vec![]; | |
368 | let query = tt_src.querier(); | 390 | while tt_src.current().kind != EOF { |
391 | tokens.push((tt_src.current().kind, tt_src.text())); | ||
392 | tt_src.bump(); | ||
393 | } | ||
369 | 394 | ||
370 | // [${] | 395 | // [${] |
371 | // [let] [a] [=] ['c'] [;] | 396 | // [let] [a] [=] ['c'] [;] |
372 | assert_eq!(query.token(2 + 3).1, "'c'"); | 397 | assert_eq!(tokens[2 + 3].1, "'c'"); |
373 | assert_eq!(query.token(2 + 3).0, CHAR); | 398 | assert_eq!(tokens[2 + 3].0, CHAR); |
374 | // [let] [c] [=] [1000] [;] | 399 | // [let] [c] [=] [1000] [;] |
375 | assert_eq!(query.token(2 + 5 + 3).1, "1000"); | 400 | assert_eq!(tokens[2 + 5 + 3].1, "1000"); |
376 | assert_eq!(query.token(2 + 5 + 3).0, INT_NUMBER); | 401 | assert_eq!(tokens[2 + 5 + 3].0, INT_NUMBER); |
377 | // [let] [f] [=] [12E+99_f64] [;] | 402 | // [let] [f] [=] [12E+99_f64] [;] |
378 | assert_eq!(query.token(2 + 10 + 3).1, "12E+99_f64"); | 403 | assert_eq!(tokens[2 + 10 + 3].1, "12E+99_f64"); |
379 | assert_eq!(query.token(2 + 10 + 3).0, FLOAT_NUMBER); | 404 | assert_eq!(tokens[2 + 10 + 3].0, FLOAT_NUMBER); |
380 | 405 | ||
381 | // [let] [s] [=] ["rust1"] [;] | 406 | // [let] [s] [=] ["rust1"] [;] |
382 | assert_eq!(query.token(2 + 15 + 3).1, "\"rust1\""); | 407 | assert_eq!(tokens[2 + 15 + 3].1, "\"rust1\""); |
383 | assert_eq!(query.token(2 + 15 + 3).0, STRING); | 408 | assert_eq!(tokens[2 + 15 + 3].0, STRING); |
384 | } | 409 | } |
385 | 410 | ||
386 | #[test] | 411 | #[test] |