diff options
Diffstat (limited to 'crates/ra_mbe/src/syntax_bridge.rs')
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 109 |
1 files changed, 67 insertions, 42 deletions
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 0a7e50c4e..c0a3fec35 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -3,8 +3,8 @@ use ra_syntax::{ | |||
3 | AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, | 3 | AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, |
4 | ast, SyntaxKind::*, TextUnit, T | 4 | ast, SyntaxKind::*, TextUnit, T |
5 | }; | 5 | }; |
6 | 6 | use tt::buffer::{TokenBuffer, Cursor}; | |
7 | use crate::subtree_source::{SubtreeTokenSource, Querier}; | 7 | use crate::subtree_source::{SubtreeTokenSource}; |
8 | use crate::ExpandError; | 8 | use crate::ExpandError; |
9 | 9 | ||
10 | /// Maps `tt::TokenId` to the relative range of the original token. | 10 | /// Maps `tt::TokenId` to the relative range of the original token. |
@@ -49,10 +49,9 @@ fn token_tree_to_syntax_node<F>(tt: &tt::Subtree, f: F) -> Result<TreeArc<Syntax | |||
49 | where | 49 | where |
50 | F: Fn(&mut ra_parser::TokenSource, &mut ra_parser::TreeSink), | 50 | F: Fn(&mut ra_parser::TokenSource, &mut ra_parser::TreeSink), |
51 | { | 51 | { |
52 | let buffer = tt::buffer::TokenBuffer::new(&[tt.clone().into()]); | 52 | let buffer = TokenBuffer::new(&[tt.clone().into()]); |
53 | let mut token_source = SubtreeTokenSource::new(&buffer); | 53 | let mut token_source = SubtreeTokenSource::new(&buffer); |
54 | let querier = token_source.querier(); | 54 | let mut tree_sink = TtTreeSink::new(buffer.begin()); |
55 | let mut tree_sink = TtTreeSink::new(querier.as_ref()); | ||
56 | f(&mut token_source, &mut tree_sink); | 55 | f(&mut token_source, &mut tree_sink); |
57 | if tree_sink.roots.len() != 1 { | 56 | if tree_sink.roots.len() != 1 { |
58 | return Err(ExpandError::ConversionError); | 57 | return Err(ExpandError::ConversionError); |
@@ -259,11 +258,10 @@ fn convert_tt( | |||
259 | Some(res) | 258 | Some(res) |
260 | } | 259 | } |
261 | 260 | ||
262 | struct TtTreeSink<'a, Q: Querier> { | 261 | struct TtTreeSink<'a> { |
263 | buf: String, | 262 | buf: String, |
264 | src_querier: &'a Q, | 263 | cursor: Cursor<'a>, |
265 | text_pos: TextUnit, | 264 | text_pos: TextUnit, |
266 | token_pos: usize, | ||
267 | inner: SyntaxTreeBuilder, | 265 | inner: SyntaxTreeBuilder, |
268 | 266 | ||
269 | // Number of roots | 267 | // Number of roots |
@@ -271,52 +269,75 @@ struct TtTreeSink<'a, Q: Querier> { | |||
271 | roots: smallvec::SmallVec<[usize; 1]>, | 269 | roots: smallvec::SmallVec<[usize; 1]>, |
272 | } | 270 | } |
273 | 271 | ||
274 | impl<'a, Q: Querier> TtTreeSink<'a, Q> { | 272 | impl<'a> TtTreeSink<'a> { |
275 | fn new(src_querier: &'a Q) -> Self { | 273 | fn new(cursor: Cursor<'a>) -> Self { |
276 | TtTreeSink { | 274 | TtTreeSink { |
277 | buf: String::new(), | 275 | buf: String::new(), |
278 | src_querier, | 276 | cursor, |
279 | text_pos: 0.into(), | 277 | text_pos: 0.into(), |
280 | token_pos: 0, | ||
281 | inner: SyntaxTreeBuilder::default(), | 278 | inner: SyntaxTreeBuilder::default(), |
282 | roots: smallvec::SmallVec::new(), | 279 | roots: smallvec::SmallVec::new(), |
283 | } | 280 | } |
284 | } | 281 | } |
285 | } | 282 | } |
286 | 283 | ||
287 | fn is_delimiter(kind: SyntaxKind) -> bool { | 284 | fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr { |
288 | match kind { | 285 | let texts = match d { |
289 | T!['('] | T!['['] | T!['{'] | T![')'] | T![']'] | T!['}'] => true, | 286 | tt::Delimiter::Parenthesis => "()", |
290 | _ => false, | 287 | tt::Delimiter::Brace => "{}", |
291 | } | 288 | tt::Delimiter::Bracket => "[]", |
289 | tt::Delimiter::None => "", | ||
290 | }; | ||
291 | |||
292 | let idx = closing as usize; | ||
293 | let text = if texts.len() > 0 { &texts[idx..texts.len() - (1 - idx)] } else { "" }; | ||
294 | text.into() | ||
292 | } | 295 | } |
293 | 296 | ||
294 | impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> { | 297 | impl<'a> TreeSink for TtTreeSink<'a> { |
295 | fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { | 298 | fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { |
296 | if kind == L_DOLLAR || kind == R_DOLLAR { | 299 | if kind == L_DOLLAR || kind == R_DOLLAR { |
297 | self.token_pos += n_tokens as usize; | 300 | self.cursor = self.cursor.bump_subtree(); |
298 | return; | 301 | return; |
299 | } | 302 | } |
300 | 303 | ||
301 | for _ in 0..n_tokens { | 304 | for _ in 0..n_tokens { |
302 | self.buf += &self.src_querier.token(self.token_pos).1; | 305 | if self.cursor.eof() { |
303 | self.token_pos += 1; | 306 | break; |
307 | } | ||
308 | |||
309 | match self.cursor.token_tree() { | ||
310 | Some(tt::TokenTree::Leaf(leaf)) => { | ||
311 | self.cursor = self.cursor.bump(); | ||
312 | self.buf += &format!("{}", leaf); | ||
313 | } | ||
314 | Some(tt::TokenTree::Subtree(subtree)) => { | ||
315 | self.cursor = self.cursor.subtree().unwrap(); | ||
316 | self.buf += &delim_to_str(subtree.delimiter, false); | ||
317 | } | ||
318 | None => { | ||
319 | if let Some(parent) = self.cursor.end() { | ||
320 | self.cursor = self.cursor.bump(); | ||
321 | self.buf += &delim_to_str(parent.delimiter, true); | ||
322 | } | ||
323 | } | ||
324 | }; | ||
304 | } | 325 | } |
326 | |||
305 | self.text_pos += TextUnit::of_str(&self.buf); | 327 | self.text_pos += TextUnit::of_str(&self.buf); |
306 | let text = SmolStr::new(self.buf.as_str()); | 328 | let text = SmolStr::new(self.buf.as_str()); |
307 | self.buf.clear(); | 329 | self.buf.clear(); |
308 | self.inner.token(kind, text); | 330 | self.inner.token(kind, text); |
309 | 331 | ||
310 | // Add a white space between tokens, only if both are not delimiters | 332 | // Add whitespace between adjoint puncts |
311 | if !is_delimiter(kind) { | 333 | let next = self.cursor.bump(); |
312 | let (last_kind, _, last_joint_to_next) = self.src_querier.token(self.token_pos - 1); | 334 | if let ( |
313 | if !last_joint_to_next && last_kind.is_punct() { | 335 | Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))), |
314 | let (cur_kind, _, _) = self.src_querier.token(self.token_pos); | 336 | Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))), |
315 | if !is_delimiter(cur_kind) { | 337 | ) = (self.cursor.token_tree(), next.token_tree()) |
316 | if cur_kind.is_punct() { | 338 | { |
317 | self.inner.token(WHITESPACE, " ".into()); | 339 | if curr.spacing == tt::Spacing::Alone { |
318 | } | 340 | self.inner.token(WHITESPACE, " ".into()); |
319 | } | ||
320 | } | 341 | } |
321 | } | 342 | } |
322 | } | 343 | } |
@@ -344,6 +365,7 @@ impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> { | |||
344 | mod tests { | 365 | mod tests { |
345 | use super::*; | 366 | use super::*; |
346 | use crate::tests::{expand, create_rules}; | 367 | use crate::tests::{expand, create_rules}; |
368 | use ra_parser::TokenSource; | ||
347 | 369 | ||
348 | #[test] | 370 | #[test] |
349 | fn convert_tt_token_source() { | 371 | fn convert_tt_token_source() { |
@@ -363,24 +385,27 @@ mod tests { | |||
363 | ); | 385 | ); |
364 | let expansion = expand(&rules, "literals!(foo)"); | 386 | let expansion = expand(&rules, "literals!(foo)"); |
365 | let buffer = tt::buffer::TokenBuffer::new(&[expansion.clone().into()]); | 387 | let buffer = tt::buffer::TokenBuffer::new(&[expansion.clone().into()]); |
366 | let tt_src = SubtreeTokenSource::new(&buffer); | 388 | let mut tt_src = SubtreeTokenSource::new(&buffer); |
367 | 389 | let mut tokens = vec![]; | |
368 | let query = tt_src.querier(); | 390 | while tt_src.current().kind != EOF { |
391 | tokens.push((tt_src.current().kind, tt_src.text())); | ||
392 | tt_src.bump(); | ||
393 | } | ||
369 | 394 | ||
370 | // [${] | 395 | // [${] |
371 | // [let] [a] [=] ['c'] [;] | 396 | // [let] [a] [=] ['c'] [;] |
372 | assert_eq!(query.token(2 + 3).1, "'c'"); | 397 | assert_eq!(tokens[2 + 3].1, "'c'"); |
373 | assert_eq!(query.token(2 + 3).0, CHAR); | 398 | assert_eq!(tokens[2 + 3].0, CHAR); |
374 | // [let] [c] [=] [1000] [;] | 399 | // [let] [c] [=] [1000] [;] |
375 | assert_eq!(query.token(2 + 5 + 3).1, "1000"); | 400 | assert_eq!(tokens[2 + 5 + 3].1, "1000"); |
376 | assert_eq!(query.token(2 + 5 + 3).0, INT_NUMBER); | 401 | assert_eq!(tokens[2 + 5 + 3].0, INT_NUMBER); |
377 | // [let] [f] [=] [12E+99_f64] [;] | 402 | // [let] [f] [=] [12E+99_f64] [;] |
378 | assert_eq!(query.token(2 + 10 + 3).1, "12E+99_f64"); | 403 | assert_eq!(tokens[2 + 10 + 3].1, "12E+99_f64"); |
379 | assert_eq!(query.token(2 + 10 + 3).0, FLOAT_NUMBER); | 404 | assert_eq!(tokens[2 + 10 + 3].0, FLOAT_NUMBER); |
380 | 405 | ||
381 | // [let] [s] [=] ["rust1"] [;] | 406 | // [let] [s] [=] ["rust1"] [;] |
382 | assert_eq!(query.token(2 + 15 + 3).1, "\"rust1\""); | 407 | assert_eq!(tokens[2 + 15 + 3].1, "\"rust1\""); |
383 | assert_eq!(query.token(2 + 15 + 3).0, STRING); | 408 | assert_eq!(tokens[2 + 15 + 3].0, STRING); |
384 | } | 409 | } |
385 | 410 | ||
386 | #[test] | 411 | #[test] |