aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_mbe
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_mbe')
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs359
-rw-r--r--crates/ra_mbe/src/tests.rs62
2 files changed, 250 insertions, 171 deletions
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index fcb73fbc7..e3cde9eed 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -3,12 +3,11 @@
3use ra_parser::{FragmentKind, ParseError, TreeSink}; 3use ra_parser::{FragmentKind, ParseError, TreeSink};
4use ra_syntax::{ 4use ra_syntax::{
5 ast::{self, make::tokens::doc_comment}, 5 ast::{self, make::tokens::doc_comment},
6 tokenize, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, 6 tokenize, AstToken, Parse, SmolStr, SyntaxKind,
7 SyntaxKind::*, 7 SyntaxKind::*,
8 SyntaxNode, SyntaxTreeBuilder, TextRange, TextUnit, Token, T, 8 SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextUnit, Token as RawToken, T,
9}; 9};
10use rustc_hash::FxHashMap; 10use rustc_hash::FxHashMap;
11use std::iter::successors;
12use tt::buffer::{Cursor, TokenBuffer}; 11use tt::buffer::{Cursor, TokenBuffer};
13 12
14use crate::subtree_source::SubtreeTokenSource; 13use crate::subtree_source::SubtreeTokenSource;
@@ -50,10 +49,8 @@ pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> Option<(tt::Subtree, TokenM
50/// will consume). 49/// will consume).
51pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> { 50pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> {
52 let global_offset = node.text_range().start(); 51 let global_offset = node.text_range().start();
53 let mut c = Convertor { 52 let mut c = Convertor::new(node, global_offset);
54 id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } }, 53 let subtree = c.go()?;
55 };
56 let subtree = c.go(node)?;
57 Some((subtree, c.id_alloc.map)) 54 Some((subtree, c.id_alloc.map))
58} 55}
59 56
@@ -152,6 +149,10 @@ impl TokenMap {
152 } 149 }
153 } 150 }
154 } 151 }
152
153 fn remove_delim(&mut self, token_id: tt::TokenId) {
154 self.entries.retain(|(tid, _)| *tid != token_id);
155 }
155} 156}
156 157
157/// Returns the textual content of a doc comment block as a quoted string 158/// Returns the textual content of a doc comment block as a quoted string
@@ -237,25 +238,26 @@ impl TokenIdAlloc {
237 token_id 238 token_id
238 } 239 }
239 240
240 fn delim(&mut self, open_abs_range: TextRange, close_abs_range: TextRange) -> tt::TokenId {
241 let open_relative_range = open_abs_range - self.global_offset;
242 let close_relative_range = close_abs_range - self.global_offset;
243 let token_id = tt::TokenId(self.next_id);
244 self.next_id += 1;
245
246 self.map.insert_delim(token_id, open_relative_range, close_relative_range);
247 token_id
248 }
249
250 fn open_delim(&mut self, open_abs_range: TextRange) -> tt::TokenId { 241 fn open_delim(&mut self, open_abs_range: TextRange) -> tt::TokenId {
251 let token_id = tt::TokenId(self.next_id); 242 let token_id = tt::TokenId(self.next_id);
252 self.next_id += 1; 243 self.next_id += 1;
253 self.map.insert_delim(token_id, open_abs_range, open_abs_range); 244 self.map.insert_delim(
245 token_id,
246 open_abs_range - self.global_offset,
247 open_abs_range - self.global_offset,
248 );
254 token_id 249 token_id
255 } 250 }
256 251
257 fn close_delim(&mut self, id: tt::TokenId, close_abs_range: TextRange) { 252 fn close_delim(&mut self, id: tt::TokenId, close_abs_range: Option<TextRange>) {
258 self.map.update_close_delim(id, close_abs_range); 253 match close_abs_range {
254 None => {
255 self.map.remove_delim(id);
256 }
257 Some(close) => {
258 self.map.update_close_delim(id, close - self.global_offset);
259 }
260 }
259 } 261 }
260} 262}
261 263
@@ -264,10 +266,20 @@ struct RawConvertor<'a> {
264 text: &'a str, 266 text: &'a str,
265 offset: TextUnit, 267 offset: TextUnit,
266 id_alloc: TokenIdAlloc, 268 id_alloc: TokenIdAlloc,
267 inner: std::slice::Iter<'a, Token>, 269 inner: std::slice::Iter<'a, RawToken>,
268} 270}
269 271
270impl RawConvertor<'_> { 272trait SrcToken {
273 fn kind(&self) -> SyntaxKind;
274
275 fn to_char(&self) -> Option<char>;
276
277 fn to_text(&self) -> SmolStr;
278}
279
280trait TokenConvertor {
281 type Token: SrcToken;
282
271 fn go(&mut self) -> Option<tt::Subtree> { 283 fn go(&mut self) -> Option<tt::Subtree> {
272 let mut subtree = tt::Subtree::default(); 284 let mut subtree = tt::Subtree::default();
273 subtree.delimiter = None; 285 subtree.delimiter = None;
@@ -285,33 +297,22 @@ impl RawConvertor<'_> {
285 Some(subtree) 297 Some(subtree)
286 } 298 }
287 299
288 fn bump(&mut self) -> Option<(Token, TextRange)> {
289 let token = self.inner.next()?;
290 let range = TextRange::offset_len(self.offset, token.len);
291 self.offset += token.len;
292 Some((*token, range))
293 }
294
295 fn peek(&self) -> Option<Token> {
296 self.inner.as_slice().get(0).cloned()
297 }
298
299 fn collect_leaf(&mut self, result: &mut Vec<tt::TokenTree>) { 300 fn collect_leaf(&mut self, result: &mut Vec<tt::TokenTree>) {
300 let (token, range) = match self.bump() { 301 let (token, range) = match self.bump() {
301 None => return, 302 None => return,
302 Some(it) => it, 303 Some(it) => it,
303 }; 304 };
304 305
305 let k: SyntaxKind = token.kind; 306 let k: SyntaxKind = token.kind();
306 if k == COMMENT { 307 if k == COMMENT {
307 let node = doc_comment(&self.text[range]); 308 if let Some(tokens) = self.convert_doc_comment(&token) {
308 if let Some(tokens) = convert_doc_comment(&node) {
309 result.extend(tokens); 309 result.extend(tokens);
310 } 310 }
311 return; 311 return;
312 } 312 }
313 313
314 result.push(if k.is_punct() { 314 result.push(if k.is_punct() {
315 assert_eq!(range.len().to_usize(), 1);
315 let delim = match k { 316 let delim = match k {
316 T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])), 317 T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])),
317 T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])), 318 T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])),
@@ -321,40 +322,51 @@ impl RawConvertor<'_> {
321 322
322 if let Some((kind, closed)) = delim { 323 if let Some((kind, closed)) = delim {
323 let mut subtree = tt::Subtree::default(); 324 let mut subtree = tt::Subtree::default();
324 let id = self.id_alloc.open_delim(range); 325 let id = self.id_alloc().open_delim(range);
325 subtree.delimiter = Some(tt::Delimiter { kind, id }); 326 subtree.delimiter = Some(tt::Delimiter { kind, id });
326 327
327 while self.peek().map(|it| it.kind != closed).unwrap_or(false) { 328 while self.peek().map(|it| it.kind() != closed).unwrap_or(false) {
328 self.collect_leaf(&mut subtree.token_trees); 329 self.collect_leaf(&mut subtree.token_trees);
329 } 330 }
330 let last_range = match self.bump() { 331 let last_range = match self.bump() {
331 None => return, 332 None => {
333 // For error resilience, we insert an char punct for the opening delim here
334 self.id_alloc().close_delim(id, None);
335 let leaf: tt::Leaf = tt::Punct {
336 id: self.id_alloc().alloc(range),
337 char: token.to_char().unwrap(),
338 spacing: tt::Spacing::Alone,
339 }
340 .into();
341 result.push(leaf.into());
342 result.extend(subtree.token_trees);
343 return;
344 }
332 Some(it) => it.1, 345 Some(it) => it.1,
333 }; 346 };
334 self.id_alloc.close_delim(id, last_range); 347 self.id_alloc().close_delim(id, Some(last_range));
335 subtree.into() 348 subtree.into()
336 } else { 349 } else {
337 let spacing = match self.peek() { 350 let spacing = match self.peek() {
338 Some(next) 351 Some(next)
339 if next.kind.is_trivia() 352 if next.kind().is_trivia()
340 || next.kind == T!['['] 353 || next.kind() == T!['[']
341 || next.kind == T!['{'] 354 || next.kind() == T!['{']
342 || next.kind == T!['('] => 355 || next.kind() == T!['('] =>
343 { 356 {
344 tt::Spacing::Alone 357 tt::Spacing::Alone
345 } 358 }
346 Some(next) if next.kind.is_punct() => tt::Spacing::Joint, 359 Some(next) if next.kind().is_punct() => tt::Spacing::Joint,
347 _ => tt::Spacing::Alone, 360 _ => tt::Spacing::Alone,
348 }; 361 };
349 let char = 362 let char = token.to_char().expect("Token from lexer must be single char");
350 self.text[range].chars().next().expect("Token from lexer must be single char");
351 363
352 tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc.alloc(range) }).into() 364 tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into()
353 } 365 }
354 } else { 366 } else {
355 macro_rules! make_leaf { 367 macro_rules! make_leaf {
356 ($i:ident) => { 368 ($i:ident) => {
357 tt::$i { id: self.id_alloc.alloc(range), text: self.text[range].into() }.into() 369 tt::$i { id: self.id_alloc().alloc(range), text: token.to_text() }.into()
358 }; 370 };
359 } 371 }
360 let leaf: tt::Leaf = match k { 372 let leaf: tt::Leaf = match k {
@@ -368,133 +380,168 @@ impl RawConvertor<'_> {
368 leaf.into() 380 leaf.into()
369 }); 381 });
370 } 382 }
383
384 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>;
385
386 fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
387
388 fn peek(&self) -> Option<Self::Token>;
389
390 fn id_alloc(&mut self) -> &mut TokenIdAlloc;
391}
392
393impl<'a> SrcToken for (RawToken, &'a str) {
394 fn kind(&self) -> SyntaxKind {
395 self.0.kind
396 }
397
398 fn to_char(&self) -> Option<char> {
399 self.1.chars().next()
400 }
401
402 fn to_text(&self) -> SmolStr {
403 self.1.into()
404 }
405}
406
407impl RawConvertor<'_> {}
408
409impl<'a> TokenConvertor for RawConvertor<'a> {
410 type Token = (RawToken, &'a str);
411
412 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
413 convert_doc_comment(&doc_comment(token.1))
414 }
415
416 fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
417 let token = self.inner.next()?;
418 let range = TextRange::offset_len(self.offset, token.len);
419 self.offset += token.len;
420
421 Some(((*token, &self.text[range]), range))
422 }
423
424 fn peek(&self) -> Option<Self::Token> {
425 let token = self.inner.as_slice().get(0).cloned();
426
427 token.map(|it| {
428 let range = TextRange::offset_len(self.offset, it.len);
429 (it, &self.text[range])
430 })
431 }
432
433 fn id_alloc(&mut self) -> &mut TokenIdAlloc {
434 &mut self.id_alloc
435 }
371} 436}
372 437
373// FIXME: There are some duplicate logic between RawConvertor and Convertor
374// It would be nice to refactor to converting SyntaxNode to ra_parser::Token and thus
375// use RawConvertor directly. But performance-wise it may not be a good idea ?
376struct Convertor { 438struct Convertor {
377 id_alloc: TokenIdAlloc, 439 id_alloc: TokenIdAlloc,
440 current: Option<SyntaxToken>,
441 range: TextRange,
442 punct_offset: Option<(SyntaxToken, TextUnit)>,
378} 443}
379 444
380impl Convertor { 445impl Convertor {
381 fn go(&mut self, tt: &SyntaxNode) -> Option<tt::Subtree> { 446 fn new(node: &SyntaxNode, global_offset: TextUnit) -> Convertor {
382 // This tree is empty 447 Convertor {
383 if tt.first_child_or_token().is_none() { 448 id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
384 return Some(tt::Subtree { token_trees: vec![], delimiter: None }); 449 current: node.first_token(),
450 range: node.text_range(),
451 punct_offset: None,
385 } 452 }
453 }
454}
386 455
387 let first_child = tt.first_child_or_token()?; 456enum SynToken {
388 let last_child = tt.last_child_or_token()?; 457 Ordiniary(SyntaxToken),
458 Punch(SyntaxToken, TextUnit),
459}
389 460
390 // ignore trivial first_child and last_child 461impl SynToken {
391 let first_child = successors(Some(first_child), |it| { 462 fn token(&self) -> &SyntaxToken {
392 if it.kind().is_trivia() { 463 match self {
393 it.next_sibling_or_token() 464 SynToken::Ordiniary(it) => it,
394 } else { 465 SynToken::Punch(it, _) => it,
395 None
396 }
397 })
398 .last()
399 .unwrap();
400 if first_child.kind().is_trivia() {
401 return Some(tt::Subtree { token_trees: vec![], delimiter: None });
402 } 466 }
467 }
468}
403 469
404 let last_child = successors(Some(last_child), |it| { 470impl SrcToken for SynToken {
405 if it.kind().is_trivia() { 471 fn kind(&self) -> SyntaxKind {
406 it.prev_sibling_or_token() 472 self.token().kind()
407 } else { 473 }
408 None 474 fn to_char(&self) -> Option<char> {
475 match self {
476 SynToken::Ordiniary(_) => None,
477 SynToken::Punch(it, i) => it.text().chars().nth(i.to_usize()),
478 }
479 }
480 fn to_text(&self) -> SmolStr {
481 self.token().text().clone()
482 }
483}
484
485impl TokenConvertor for Convertor {
486 type Token = SynToken;
487 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
488 convert_doc_comment(token.token())
489 }
490
491 fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
492 if let Some((punct, offset)) = self.punct_offset.clone() {
493 if offset.to_usize() + 1 < punct.text().len() {
494 let offset = offset + TextUnit::from_usize(1);
495 let range = punct.text_range();
496 self.punct_offset = Some((punct.clone(), offset));
497 let range = TextRange::offset_len(range.start() + offset, TextUnit::from_usize(1));
498 return Some((SynToken::Punch(punct, offset), range));
409 } 499 }
410 }) 500 }
411 .last() 501
412 .unwrap(); 502 let curr = self.current.clone()?;
413 503 if !curr.text_range().is_subrange(&self.range) {
414 let (delimiter_kind, skip_first) = match (first_child.kind(), last_child.kind()) { 504 return None;
415 (T!['('], T![')']) => (Some(tt::DelimiterKind::Parenthesis), true), 505 }
416 (T!['{'], T!['}']) => (Some(tt::DelimiterKind::Brace), true), 506 self.current = curr.next_token();
417 (T!['['], T![']']) => (Some(tt::DelimiterKind::Bracket), true), 507
418 _ => (None, false), 508 let token = if curr.kind().is_punct() {
509 let range = curr.text_range();
510 let range = TextRange::offset_len(range.start(), TextUnit::from_usize(1));
511 self.punct_offset = Some((curr.clone(), TextUnit::from_usize(0)));
512 (SynToken::Punch(curr, TextUnit::from_usize(0)), range)
513 } else {
514 self.punct_offset = None;
515 let range = curr.text_range();
516 (SynToken::Ordiniary(curr), range)
419 }; 517 };
420 let delimiter = delimiter_kind.map(|kind| tt::Delimiter {
421 kind,
422 id: self.id_alloc.delim(first_child.text_range(), last_child.text_range()),
423 });
424 518
425 let mut token_trees = Vec::new(); 519 Some(token)
426 let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable(); 520 }
427 521
428 while let Some(child) = child_iter.next() { 522 fn peek(&self) -> Option<Self::Token> {
429 if skip_first && (child == first_child || child == last_child) { 523 if let Some((punct, mut offset)) = self.punct_offset.clone() {
430 continue; 524 offset = offset + TextUnit::from_usize(1);
525 if offset.to_usize() < punct.text().len() {
526 return Some(SynToken::Punch(punct, offset));
431 } 527 }
528 }
432 529
433 match child { 530 let curr = self.current.clone()?;
434 NodeOrToken::Token(token) => { 531 if !curr.text_range().is_subrange(&self.range) {
435 if let Some(doc_tokens) = convert_doc_comment(&token) { 532 return None;
436 token_trees.extend(doc_tokens);
437 } else if token.kind().is_trivia() {
438 continue;
439 } else if token.kind().is_punct() {
440 // we need to pull apart joined punctuation tokens
441 let last_spacing = match child_iter.peek() {
442 Some(NodeOrToken::Token(token)) => {
443 if token.kind().is_punct() {
444 tt::Spacing::Joint
445 } else {
446 tt::Spacing::Alone
447 }
448 }
449 _ => tt::Spacing::Alone,
450 };
451 let spacing_iter = std::iter::repeat(tt::Spacing::Joint)
452 .take(token.text().len() - 1)
453 .chain(std::iter::once(last_spacing));
454 for (char, spacing) in token.text().chars().zip(spacing_iter) {
455 token_trees.push(
456 tt::Leaf::from(tt::Punct {
457 char,
458 spacing,
459 id: self.id_alloc.alloc(token.text_range()),
460 })
461 .into(),
462 );
463 }
464 } else {
465 macro_rules! make_leaf {
466 ($i:ident) => {
467 tt::$i {
468 id: self.id_alloc.alloc(token.text_range()),
469 text: token.text().clone(),
470 }
471 .into()
472 };
473 }
474
475 let child: tt::Leaf = match token.kind() {
476 T![true] | T![false] => make_leaf!(Literal),
477 IDENT | LIFETIME => make_leaf!(Ident),
478 k if k.is_keyword() => make_leaf!(Ident),
479 k if k.is_literal() => make_leaf!(Literal),
480 _ => return None,
481 };
482 token_trees.push(child.into());
483 }
484 }
485 NodeOrToken::Node(node) => {
486 let child_subtree = self.go(&node)?;
487 if child_subtree.delimiter.is_none() && node.kind() != SyntaxKind::TOKEN_TREE {
488 token_trees.extend(child_subtree.token_trees);
489 } else {
490 token_trees.push(child_subtree.into());
491 }
492 }
493 };
494 } 533 }
495 534
496 let res = tt::Subtree { delimiter, token_trees }; 535 let token = if curr.kind().is_punct() {
497 Some(res) 536 SynToken::Punch(curr, TextUnit::from_usize(0))
537 } else {
538 SynToken::Ordiniary(curr)
539 };
540 Some(token)
541 }
542
543 fn id_alloc(&mut self) -> &mut TokenIdAlloc {
544 &mut self.id_alloc
498 } 545 }
499} 546}
500 547
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index 44f381938..a7fcea0ac 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -427,22 +427,28 @@ MACRO_ITEMS@[0; 40)
427 ); 427 );
428} 428}
429 429
430#[test] 430fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree {
431fn test_expand_literals_to_token_tree() { 431 if let tt::TokenTree::Subtree(subtree) = tt {
432 fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree { 432 return &subtree;
433 if let tt::TokenTree::Subtree(subtree) = tt {
434 return &subtree;
435 }
436 unreachable!("It is not a subtree");
437 } 433 }
434 unreachable!("It is not a subtree");
435}
436fn to_literal(tt: &tt::TokenTree) -> &tt::Literal {
437 if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = tt {
438 return lit;
439 }
440 unreachable!("It is not a literal");
441}
438 442
439 fn to_literal(tt: &tt::TokenTree) -> &tt::Literal { 443fn to_punct(tt: &tt::TokenTree) -> &tt::Punct {
440 if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = tt { 444 if let tt::TokenTree::Leaf(tt::Leaf::Punct(lit)) = tt {
441 return lit; 445 return lit;
442 }
443 unreachable!("It is not a literal");
444 } 446 }
447 unreachable!("It is not a Punct");
448}
445 449
450#[test]
451fn test_expand_literals_to_token_tree() {
446 let expansion = parse_macro( 452 let expansion = parse_macro(
447 r#" 453 r#"
448 macro_rules! literals { 454 macro_rules! literals {
@@ -471,6 +477,22 @@ fn test_expand_literals_to_token_tree() {
471} 477}
472 478
473#[test] 479#[test]
480fn test_attr_to_token_tree() {
481 let expansion = parse_to_token_tree_by_syntax(
482 r#"
483 #[derive(Copy)]
484 struct Foo;
485 "#,
486 );
487
488 assert_eq!(to_punct(&expansion.token_trees[0]).char, '#');
489 assert_eq!(
490 to_subtree(&expansion.token_trees[1]).delimiter_kind(),
491 Some(tt::DelimiterKind::Bracket)
492 );
493}
494
495#[test]
474fn test_two_idents() { 496fn test_two_idents() {
475 parse_macro( 497 parse_macro(
476 r#" 498 r#"
@@ -1427,8 +1449,8 @@ impl MacroFixture {
1427 let macro_invocation = 1449 let macro_invocation =
1428 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); 1450 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
1429 1451
1430 let (invocation_tt, _) = 1452 let (invocation_tt, _) = ast_to_token_tree(&macro_invocation.token_tree().unwrap())
1431 ast_to_token_tree(&macro_invocation.token_tree().unwrap()).unwrap(); 1453 .ok_or_else(|| ExpandError::ConversionError)?;
1432 1454
1433 self.rules.expand(&invocation_tt).result() 1455 self.rules.expand(&invocation_tt).result()
1434 } 1456 }
@@ -1517,6 +1539,16 @@ pub(crate) fn parse_macro(ra_fixture: &str) -> MacroFixture {
1517 MacroFixture { rules } 1539 MacroFixture { rules }
1518} 1540}
1519 1541
1542pub(crate) fn parse_to_token_tree_by_syntax(ra_fixture: &str) -> tt::Subtree {
1543 let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap();
1544 let tt = syntax_node_to_token_tree(source_file.syntax()).unwrap().0;
1545
1546 let parsed = parse_to_token_tree(ra_fixture).unwrap().0;
1547 assert_eq!(tt, parsed);
1548
1549 parsed
1550}
1551
1520fn debug_dump_ignore_spaces(node: &ra_syntax::SyntaxNode) -> String { 1552fn debug_dump_ignore_spaces(node: &ra_syntax::SyntaxNode) -> String {
1521 let mut level = 0; 1553 let mut level = 0;
1522 let mut buf = String::new(); 1554 let mut buf = String::new();
@@ -1662,5 +1694,5 @@ fn test_expand_bad_literal() {
1662 macro_rules! foo { ($i:literal) => {}; } 1694 macro_rules! foo { ($i:literal) => {}; }
1663 "#, 1695 "#,
1664 ) 1696 )
1665 .assert_expand_err(r#"foo!(&k");"#, &ExpandError::BindingError("".to_string())); 1697 .assert_expand_err(r#"foo!(&k");"#, &ExpandError::BindingError("".into()));
1666} 1698}