aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_mbe/src/syntax_bridge.rs
diff options
context:
space:
mode:
authorEdwin Cheng <[email protected]>2019-05-27 15:56:21 +0100
committerEdwin Cheng <[email protected]>2019-05-27 16:20:43 +0100
commitd833ded3b4a12280f3e63e182ca7268a17cff90d (patch)
tree2e9fb66389163fa1924f0e5e8819fda968eee032 /crates/ra_mbe/src/syntax_bridge.rs
parent0d1c6076073c73f57340e256dc25da9d37311ef0 (diff)
Remove Queier and SubtreeWalk
Diffstat (limited to 'crates/ra_mbe/src/syntax_bridge.rs')
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs110
1 files changed, 70 insertions, 40 deletions
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 0a7e50c4e..058f65ffd 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -3,8 +3,9 @@ use ra_syntax::{
3 AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, 3 AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement,
4 ast, SyntaxKind::*, TextUnit, T 4 ast, SyntaxKind::*, TextUnit, T
5}; 5};
6use tt::buffer::Cursor;
6 7
7use crate::subtree_source::{SubtreeTokenSource, Querier}; 8use crate::subtree_source::{SubtreeTokenSource};
8use crate::ExpandError; 9use crate::ExpandError;
9 10
10/// Maps `tt::TokenId` to the relative range of the original token. 11/// Maps `tt::TokenId` to the relative range of the original token.
@@ -51,8 +52,7 @@ where
51{ 52{
52 let buffer = tt::buffer::TokenBuffer::new(&[tt.clone().into()]); 53 let buffer = tt::buffer::TokenBuffer::new(&[tt.clone().into()]);
53 let mut token_source = SubtreeTokenSource::new(&buffer); 54 let mut token_source = SubtreeTokenSource::new(&buffer);
54 let querier = token_source.querier(); 55 let mut tree_sink = TtTreeSink::new(buffer.begin());
55 let mut tree_sink = TtTreeSink::new(querier.as_ref());
56 f(&mut token_source, &mut tree_sink); 56 f(&mut token_source, &mut tree_sink);
57 if tree_sink.roots.len() != 1 { 57 if tree_sink.roots.len() != 1 {
58 return Err(ExpandError::ConversionError); 58 return Err(ExpandError::ConversionError);
@@ -259,11 +259,10 @@ fn convert_tt(
259 Some(res) 259 Some(res)
260} 260}
261 261
262struct TtTreeSink<'a, Q: Querier> { 262struct TtTreeSink<'a> {
263 buf: String, 263 buf: String,
264 src_querier: &'a Q, 264 cursor: Cursor<'a>,
265 text_pos: TextUnit, 265 text_pos: TextUnit,
266 token_pos: usize,
267 inner: SyntaxTreeBuilder, 266 inner: SyntaxTreeBuilder,
268 267
269 // Number of roots 268 // Number of roots
@@ -271,52 +270,79 @@ struct TtTreeSink<'a, Q: Querier> {
271 roots: smallvec::SmallVec<[usize; 1]>, 270 roots: smallvec::SmallVec<[usize; 1]>,
272} 271}
273 272
274impl<'a, Q: Querier> TtTreeSink<'a, Q> { 273impl<'a> TtTreeSink<'a> {
275 fn new(src_querier: &'a Q) -> Self { 274 fn new(cursor: Cursor<'a>) -> Self {
276 TtTreeSink { 275 TtTreeSink {
277 buf: String::new(), 276 buf: String::new(),
278 src_querier, 277 cursor,
279 text_pos: 0.into(), 278 text_pos: 0.into(),
280 token_pos: 0,
281 inner: SyntaxTreeBuilder::default(), 279 inner: SyntaxTreeBuilder::default(),
282 roots: smallvec::SmallVec::new(), 280 roots: smallvec::SmallVec::new(),
283 } 281 }
284 } 282 }
285} 283}
286 284
287fn is_delimiter(kind: SyntaxKind) -> bool { 285fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr {
288 match kind { 286 let texts = match d {
289 T!['('] | T!['['] | T!['{'] | T![')'] | T![']'] | T!['}'] => true, 287 tt::Delimiter::Parenthesis => "()",
290 _ => false, 288 tt::Delimiter::Brace => "{}",
291 } 289 tt::Delimiter::Bracket => "[]",
290 tt::Delimiter::None => "",
291 };
292
293 let idx = closing as usize;
294 let text = if texts.len() > 0 { &texts[idx..texts.len() - (1 - idx)] } else { "" };
295 text.into()
292} 296}
293 297
294impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> { 298impl<'a> TreeSink for TtTreeSink<'a> {
295 fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { 299 fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
296 if kind == L_DOLLAR || kind == R_DOLLAR { 300 if kind == L_DOLLAR || kind == R_DOLLAR {
297 self.token_pos += n_tokens as usize; 301 if let Some(_) = self.cursor.end() {
302 self.cursor = self.cursor.bump();
303 } else {
304 self.cursor = self.cursor.subtree().unwrap();
305 }
298 return; 306 return;
299 } 307 }
300 308
301 for _ in 0..n_tokens { 309 for _ in 0..n_tokens {
302 self.buf += &self.src_querier.token(self.token_pos).1; 310 if self.cursor.eof() {
303 self.token_pos += 1; 311 break;
312 }
313
314 match self.cursor.token_tree() {
315 Some(tt::TokenTree::Leaf(leaf)) => {
316 self.cursor = self.cursor.bump();
317 self.buf += &format!("{}", leaf);
318 }
319 Some(tt::TokenTree::Subtree(subtree)) => {
320 self.cursor = self.cursor.subtree().unwrap();
321 self.buf += &delim_to_str(subtree.delimiter, false);
322 }
323 None => {
324 if let Some(parent) = self.cursor.end() {
325 self.cursor = self.cursor.bump();
326 self.buf += &delim_to_str(parent.delimiter, true);
327 }
328 }
329 };
304 } 330 }
331
305 self.text_pos += TextUnit::of_str(&self.buf); 332 self.text_pos += TextUnit::of_str(&self.buf);
306 let text = SmolStr::new(self.buf.as_str()); 333 let text = SmolStr::new(self.buf.as_str());
307 self.buf.clear(); 334 self.buf.clear();
308 self.inner.token(kind, text); 335 self.inner.token(kind, text);
309 336
310 // Add a white space between tokens, only if both are not delimiters 337 // Add whitespace between adjoint puncts
311 if !is_delimiter(kind) { 338 let next = self.cursor.bump();
312 let (last_kind, _, last_joint_to_next) = self.src_querier.token(self.token_pos - 1); 339 if let (
313 if !last_joint_to_next && last_kind.is_punct() { 340 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))),
314 let (cur_kind, _, _) = self.src_querier.token(self.token_pos); 341 Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))),
315 if !is_delimiter(cur_kind) { 342 ) = (self.cursor.token_tree(), next.token_tree())
316 if cur_kind.is_punct() { 343 {
317 self.inner.token(WHITESPACE, " ".into()); 344 if curr.spacing == tt::Spacing::Alone {
318 } 345 self.inner.token(WHITESPACE, " ".into());
319 }
320 } 346 }
321 } 347 }
322 } 348 }
@@ -344,6 +370,7 @@ impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> {
344mod tests { 370mod tests {
345 use super::*; 371 use super::*;
346 use crate::tests::{expand, create_rules}; 372 use crate::tests::{expand, create_rules};
373 use ra_parser::TokenSource;
347 374
348 #[test] 375 #[test]
349 fn convert_tt_token_source() { 376 fn convert_tt_token_source() {
@@ -363,24 +390,27 @@ mod tests {
363 ); 390 );
364 let expansion = expand(&rules, "literals!(foo)"); 391 let expansion = expand(&rules, "literals!(foo)");
365 let buffer = tt::buffer::TokenBuffer::new(&[expansion.clone().into()]); 392 let buffer = tt::buffer::TokenBuffer::new(&[expansion.clone().into()]);
366 let tt_src = SubtreeTokenSource::new(&buffer); 393 let mut tt_src = SubtreeTokenSource::new(&buffer);
367 394 let mut tokens = vec![];
368 let query = tt_src.querier(); 395 while tt_src.current().kind != EOF {
396 tokens.push((tt_src.current().kind, tt_src.text()));
397 tt_src.bump();
398 }
369 399
370 // [${] 400 // [${]
371 // [let] [a] [=] ['c'] [;] 401 // [let] [a] [=] ['c'] [;]
372 assert_eq!(query.token(2 + 3).1, "'c'"); 402 assert_eq!(tokens[2 + 3].1, "'c'");
373 assert_eq!(query.token(2 + 3).0, CHAR); 403 assert_eq!(tokens[2 + 3].0, CHAR);
374 // [let] [c] [=] [1000] [;] 404 // [let] [c] [=] [1000] [;]
375 assert_eq!(query.token(2 + 5 + 3).1, "1000"); 405 assert_eq!(tokens[2 + 5 + 3].1, "1000");
376 assert_eq!(query.token(2 + 5 + 3).0, INT_NUMBER); 406 assert_eq!(tokens[2 + 5 + 3].0, INT_NUMBER);
377 // [let] [f] [=] [12E+99_f64] [;] 407 // [let] [f] [=] [12E+99_f64] [;]
378 assert_eq!(query.token(2 + 10 + 3).1, "12E+99_f64"); 408 assert_eq!(tokens[2 + 10 + 3].1, "12E+99_f64");
379 assert_eq!(query.token(2 + 10 + 3).0, FLOAT_NUMBER); 409 assert_eq!(tokens[2 + 10 + 3].0, FLOAT_NUMBER);
380 410
381 // [let] [s] [=] ["rust1"] [;] 411 // [let] [s] [=] ["rust1"] [;]
382 assert_eq!(query.token(2 + 15 + 3).1, "\"rust1\""); 412 assert_eq!(tokens[2 + 15 + 3].1, "\"rust1\"");
383 assert_eq!(query.token(2 + 15 + 3).0, STRING); 413 assert_eq!(tokens[2 + 15 + 3].0, STRING);
384 } 414 }
385 415
386 #[test] 416 #[test]