aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEdwin Cheng <[email protected]>2019-05-27 15:56:21 +0100
committerEdwin Cheng <[email protected]>2019-05-27 16:20:43 +0100
commitd833ded3b4a12280f3e63e182ca7268a17cff90d (patch)
tree2e9fb66389163fa1924f0e5e8819fda968eee032
parent0d1c6076073c73f57340e256dc25da9d37311ef0 (diff)
Remove Queier and SubtreeWalk
-rw-r--r--crates/ra_mbe/src/subtree_source.rs86
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs110
2 files changed, 104 insertions, 92 deletions
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs
index c4f79f38a..2ba0b1601 100644
--- a/crates/ra_mbe/src/subtree_source.rs
+++ b/crates/ra_mbe/src/subtree_source.rs
@@ -1,13 +1,8 @@
1use ra_parser::{TokenSource, Token}; 1use ra_parser::{TokenSource, Token};
2use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*, T}; 2use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*, T};
3use std::cell::{RefCell, Cell}; 3use std::cell::{RefCell, Cell};
4use std::sync::Arc;
5use tt::buffer::{TokenBuffer, Cursor}; 4use tt::buffer::{TokenBuffer, Cursor};
6 5
7pub(crate) trait Querier {
8 fn token(&self, uidx: usize) -> (SyntaxKind, SmolStr, bool);
9}
10
11#[derive(Debug, Clone, Eq, PartialEq)] 6#[derive(Debug, Clone, Eq, PartialEq)]
12struct TtToken { 7struct TtToken {
13 pub kind: SyntaxKind, 8 pub kind: SyntaxKind,
@@ -15,20 +10,47 @@ struct TtToken {
15 pub text: SmolStr, 10 pub text: SmolStr,
16} 11}
17 12
18// A wrapper class for ref cell 13pub(crate) struct SubtreeTokenSource<'a> {
19#[derive(Debug)]
20pub(crate) struct SubtreeWalk<'a> {
21 start: Cursor<'a>, 14 start: Cursor<'a>,
22 cursor: Cell<Cursor<'a>>, 15 cursor: Cell<Cursor<'a>>,
23 cached: RefCell<Vec<Option<TtToken>>>, 16 cached: RefCell<Vec<Option<TtToken>>>,
17 curr: (Token, usize),
18}
19
20impl<'a> SubtreeTokenSource<'a> {
21 // Helper function used in test
22 #[allow(unused)]
23 pub fn text(&self) -> SmolStr {
24 match self.get(self.curr.1) {
25 Some(tt) => tt.text,
26 _ => SmolStr::new(""),
27 }
28 }
24} 29}
25 30
26impl<'a> SubtreeWalk<'a> { 31impl<'a> SubtreeTokenSource<'a> {
27 fn new(cursor: Cursor<'a>) -> Self { 32 pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> {
28 SubtreeWalk { 33 let cursor = buffer.begin();
34
35 let mut res = SubtreeTokenSource {
36 curr: (Token { kind: EOF, is_jointed_to_next: false }, 0),
29 start: cursor, 37 start: cursor,
30 cursor: Cell::new(cursor), 38 cursor: Cell::new(cursor),
31 cached: RefCell::new(Vec::with_capacity(10)), 39 cached: RefCell::new(Vec::with_capacity(10)),
40 };
41 res.curr = (res.mk_token(0), 0);
42 res
43 }
44
45 pub(crate) fn bump_n(&mut self, parsed_tokens: usize) -> Vec<tt::TokenTree> {
46 let res = self.collect_token_trees(parsed_tokens);
47 res
48 }
49
50 fn mk_token(&self, pos: usize) -> Token {
51 match self.get(pos) {
52 Some(tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next },
53 None => Token { kind: EOF, is_jointed_to_next: false },
32 } 54 }
33 } 55 }
34 56
@@ -109,46 +131,6 @@ impl<'a> SubtreeWalk<'a> {
109 } 131 }
110} 132}
111 133
112impl<'a> Querier for SubtreeWalk<'a> {
113 fn token(&self, uidx: usize) -> (SyntaxKind, SmolStr, bool) {
114 self.get(uidx)
115 .map(|tkn| (tkn.kind, tkn.text, tkn.is_joint_to_next))
116 .unwrap_or_else(|| (SyntaxKind::EOF, "".into(), false))
117 }
118}
119
120pub(crate) struct SubtreeTokenSource<'a> {
121 walker: Arc<SubtreeWalk<'a>>,
122 curr: (Token, usize),
123}
124
125impl<'a> SubtreeTokenSource<'a> {
126 pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> {
127 let mut res = SubtreeTokenSource {
128 walker: Arc::new(SubtreeWalk::new(buffer.begin())),
129 curr: (Token { kind: EOF, is_jointed_to_next: false }, 0),
130 };
131 res.curr = (res.mk_token(0), 0);
132 res
133 }
134
135 pub fn querier(&self) -> Arc<SubtreeWalk<'a>> {
136 self.walker.clone()
137 }
138
139 pub(crate) fn bump_n(&mut self, parsed_tokens: usize) -> Vec<tt::TokenTree> {
140 let res = self.walker.collect_token_trees(parsed_tokens);
141 res
142 }
143
144 fn mk_token(&self, pos: usize) -> Token {
145 match self.walker.get(pos) {
146 Some(tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next },
147 None => Token { kind: EOF, is_jointed_to_next: false },
148 }
149 }
150}
151
152impl<'a> TokenSource for SubtreeTokenSource<'a> { 134impl<'a> TokenSource for SubtreeTokenSource<'a> {
153 fn current(&self) -> Token { 135 fn current(&self) -> Token {
154 self.curr.0 136 self.curr.0
@@ -170,7 +152,7 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> {
170 152
171 /// Is the current token a specified keyword? 153 /// Is the current token a specified keyword?
172 fn is_keyword(&self, kw: &str) -> bool { 154 fn is_keyword(&self, kw: &str) -> bool {
173 match self.walker.get(self.curr.1) { 155 match self.get(self.curr.1) {
174 Some(t) => t.text == *kw, 156 Some(t) => t.text == *kw,
175 _ => false, 157 _ => false,
176 } 158 }
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 0a7e50c4e..058f65ffd 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -3,8 +3,9 @@ use ra_syntax::{
3 AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, 3 AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement,
4 ast, SyntaxKind::*, TextUnit, T 4 ast, SyntaxKind::*, TextUnit, T
5}; 5};
6use tt::buffer::Cursor;
6 7
7use crate::subtree_source::{SubtreeTokenSource, Querier}; 8use crate::subtree_source::{SubtreeTokenSource};
8use crate::ExpandError; 9use crate::ExpandError;
9 10
10/// Maps `tt::TokenId` to the relative range of the original token. 11/// Maps `tt::TokenId` to the relative range of the original token.
@@ -51,8 +52,7 @@ where
51{ 52{
52 let buffer = tt::buffer::TokenBuffer::new(&[tt.clone().into()]); 53 let buffer = tt::buffer::TokenBuffer::new(&[tt.clone().into()]);
53 let mut token_source = SubtreeTokenSource::new(&buffer); 54 let mut token_source = SubtreeTokenSource::new(&buffer);
54 let querier = token_source.querier(); 55 let mut tree_sink = TtTreeSink::new(buffer.begin());
55 let mut tree_sink = TtTreeSink::new(querier.as_ref());
56 f(&mut token_source, &mut tree_sink); 56 f(&mut token_source, &mut tree_sink);
57 if tree_sink.roots.len() != 1 { 57 if tree_sink.roots.len() != 1 {
58 return Err(ExpandError::ConversionError); 58 return Err(ExpandError::ConversionError);
@@ -259,11 +259,10 @@ fn convert_tt(
259 Some(res) 259 Some(res)
260} 260}
261 261
262struct TtTreeSink<'a, Q: Querier> { 262struct TtTreeSink<'a> {
263 buf: String, 263 buf: String,
264 src_querier: &'a Q, 264 cursor: Cursor<'a>,
265 text_pos: TextUnit, 265 text_pos: TextUnit,
266 token_pos: usize,
267 inner: SyntaxTreeBuilder, 266 inner: SyntaxTreeBuilder,
268 267
269 // Number of roots 268 // Number of roots
@@ -271,52 +270,79 @@ struct TtTreeSink<'a, Q: Querier> {
271 roots: smallvec::SmallVec<[usize; 1]>, 270 roots: smallvec::SmallVec<[usize; 1]>,
272} 271}
273 272
274impl<'a, Q: Querier> TtTreeSink<'a, Q> { 273impl<'a> TtTreeSink<'a> {
275 fn new(src_querier: &'a Q) -> Self { 274 fn new(cursor: Cursor<'a>) -> Self {
276 TtTreeSink { 275 TtTreeSink {
277 buf: String::new(), 276 buf: String::new(),
278 src_querier, 277 cursor,
279 text_pos: 0.into(), 278 text_pos: 0.into(),
280 token_pos: 0,
281 inner: SyntaxTreeBuilder::default(), 279 inner: SyntaxTreeBuilder::default(),
282 roots: smallvec::SmallVec::new(), 280 roots: smallvec::SmallVec::new(),
283 } 281 }
284 } 282 }
285} 283}
286 284
287fn is_delimiter(kind: SyntaxKind) -> bool { 285fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr {
288 match kind { 286 let texts = match d {
289 T!['('] | T!['['] | T!['{'] | T![')'] | T![']'] | T!['}'] => true, 287 tt::Delimiter::Parenthesis => "()",
290 _ => false, 288 tt::Delimiter::Brace => "{}",
291 } 289 tt::Delimiter::Bracket => "[]",
290 tt::Delimiter::None => "",
291 };
292
293 let idx = closing as usize;
294 let text = if texts.len() > 0 { &texts[idx..texts.len() - (1 - idx)] } else { "" };
295 text.into()
292} 296}
293 297
294impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> { 298impl<'a> TreeSink for TtTreeSink<'a> {
295 fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { 299 fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
296 if kind == L_DOLLAR || kind == R_DOLLAR { 300 if kind == L_DOLLAR || kind == R_DOLLAR {
297 self.token_pos += n_tokens as usize; 301 if let Some(_) = self.cursor.end() {
302 self.cursor = self.cursor.bump();
303 } else {
304 self.cursor = self.cursor.subtree().unwrap();
305 }
298 return; 306 return;
299 } 307 }
300 308
301 for _ in 0..n_tokens { 309 for _ in 0..n_tokens {
302 self.buf += &self.src_querier.token(self.token_pos).1; 310 if self.cursor.eof() {
303 self.token_pos += 1; 311 break;
312 }
313
314 match self.cursor.token_tree() {
315 Some(tt::TokenTree::Leaf(leaf)) => {
316 self.cursor = self.cursor.bump();
317 self.buf += &format!("{}", leaf);
318 }
319 Some(tt::TokenTree::Subtree(subtree)) => {
320 self.cursor = self.cursor.subtree().unwrap();
321 self.buf += &delim_to_str(subtree.delimiter, false);
322 }
323 None => {
324 if let Some(parent) = self.cursor.end() {
325 self.cursor = self.cursor.bump();
326 self.buf += &delim_to_str(parent.delimiter, true);
327 }
328 }
329 };
304 } 330 }
331
305 self.text_pos += TextUnit::of_str(&self.buf); 332 self.text_pos += TextUnit::of_str(&self.buf);
306 let text = SmolStr::new(self.buf.as_str()); 333 let text = SmolStr::new(self.buf.as_str());
307 self.buf.clear(); 334 self.buf.clear();
308 self.inner.token(kind, text); 335 self.inner.token(kind, text);
309 336
310 // Add a white space between tokens, only if both are not delimiters 337 // Add whitespace between adjoint puncts
311 if !is_delimiter(kind) { 338 let next = self.cursor.bump();
312 let (last_kind, _, last_joint_to_next) = self.src_querier.token(self.token_pos - 1); 339 if let (
313 if !last_joint_to_next && last_kind.is_punct() { 340 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))),
314 let (cur_kind, _, _) = self.src_querier.token(self.token_pos); 341 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))),
315 if !is_delimiter(cur_kind) { 342 ) = (self.cursor.token_tree(), next.token_tree())
316 if cur_kind.is_punct() { 343 {
317 self.inner.token(WHITESPACE, " ".into()); 344 if curr.spacing == tt::Spacing::Alone {
318 } 345 self.inner.token(WHITESPACE, " ".into());
319 }
320 } 346 }
321 } 347 }
322 } 348 }
@@ -344,6 +370,7 @@ impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> {
344mod tests { 370mod tests {
345 use super::*; 371 use super::*;
346 use crate::tests::{expand, create_rules}; 372 use crate::tests::{expand, create_rules};
373 use ra_parser::TokenSource;
347 374
348 #[test] 375 #[test]
349 fn convert_tt_token_source() { 376 fn convert_tt_token_source() {
@@ -363,24 +390,27 @@ mod tests {
363 ); 390 );
364 let expansion = expand(&rules, "literals!(foo)"); 391 let expansion = expand(&rules, "literals!(foo)");
365 let buffer = tt::buffer::TokenBuffer::new(&[expansion.clone().into()]); 392 let buffer = tt::buffer::TokenBuffer::new(&[expansion.clone().into()]);
366 let tt_src = SubtreeTokenSource::new(&buffer); 393 let mut tt_src = SubtreeTokenSource::new(&buffer);
367 394 let mut tokens = vec![];
368 let query = tt_src.querier(); 395 while tt_src.current().kind != EOF {
396 tokens.push((tt_src.current().kind, tt_src.text()));
397 tt_src.bump();
398 }
369 399
370 // [${] 400 // [${]
371 // [let] [a] [=] ['c'] [;] 401 // [let] [a] [=] ['c'] [;]
372 assert_eq!(query.token(2 + 3).1, "'c'"); 402 assert_eq!(tokens[2 + 3].1, "'c'");
373 assert_eq!(query.token(2 + 3).0, CHAR); 403 assert_eq!(tokens[2 + 3].0, CHAR);
374 // [let] [c] [=] [1000] [;] 404 // [let] [c] [=] [1000] [;]
375 assert_eq!(query.token(2 + 5 + 3).1, "1000"); 405 assert_eq!(tokens[2 + 5 + 3].1, "1000");
376 assert_eq!(query.token(2 + 5 + 3).0, INT_NUMBER); 406 assert_eq!(tokens[2 + 5 + 3].0, INT_NUMBER);
377 // [let] [f] [=] [12E+99_f64] [;] 407 // [let] [f] [=] [12E+99_f64] [;]
378 assert_eq!(query.token(2 + 10 + 3).1, "12E+99_f64"); 408 assert_eq!(tokens[2 + 10 + 3].1, "12E+99_f64");
379 assert_eq!(query.token(2 + 10 + 3).0, FLOAT_NUMBER); 409 assert_eq!(tokens[2 + 10 + 3].0, FLOAT_NUMBER);
380 410
381 // [let] [s] [=] ["rust1"] [;] 411 // [let] [s] [=] ["rust1"] [;]
382 assert_eq!(query.token(2 + 15 + 3).1, "\"rust1\""); 412 assert_eq!(tokens[2 + 15 + 3].1, "\"rust1\"");
383 assert_eq!(query.token(2 + 15 + 3).0, STRING); 413 assert_eq!(tokens[2 + 15 + 3].0, STRING);
384 } 414 }
385 415
386 #[test] 416 #[test]