diff options
Diffstat (limited to 'crates/ra_mbe/src')
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 69 | ||||
-rw-r--r-- | crates/ra_mbe/src/tests.rs | 408 |
2 files changed, 237 insertions, 240 deletions
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 2b4390eb2..bb28acfd9 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -5,7 +5,7 @@ use ra_syntax::{ | |||
5 | ast::{self, make::tokens::doc_comment}, | 5 | ast::{self, make::tokens::doc_comment}, |
6 | tokenize, AstToken, Parse, SmolStr, SyntaxKind, | 6 | tokenize, AstToken, Parse, SmolStr, SyntaxKind, |
7 | SyntaxKind::*, | 7 | SyntaxKind::*, |
8 | SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextUnit, Token as RawToken, T, | 8 | SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, T, |
9 | }; | 9 | }; |
10 | use rustc_hash::FxHashMap; | 10 | use rustc_hash::FxHashMap; |
11 | use tt::buffer::{Cursor, TokenBuffer}; | 11 | use tt::buffer::{Cursor, TokenBuffer}; |
@@ -99,11 +99,11 @@ pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { | |||
99 | 99 | ||
100 | let mut conv = RawConvertor { | 100 | let mut conv = RawConvertor { |
101 | text, | 101 | text, |
102 | offset: TextUnit::default(), | 102 | offset: TextSize::default(), |
103 | inner: tokens.iter(), | 103 | inner: tokens.iter(), |
104 | id_alloc: TokenIdAlloc { | 104 | id_alloc: TokenIdAlloc { |
105 | map: Default::default(), | 105 | map: Default::default(), |
106 | global_offset: TextUnit::default(), | 106 | global_offset: TextSize::default(), |
107 | next_id: 0, | 107 | next_id: 0, |
108 | }, | 108 | }, |
109 | }; | 109 | }; |
@@ -227,7 +227,7 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr | |||
227 | 227 | ||
228 | struct TokenIdAlloc { | 228 | struct TokenIdAlloc { |
229 | map: TokenMap, | 229 | map: TokenMap, |
230 | global_offset: TextUnit, | 230 | global_offset: TextSize, |
231 | next_id: u32, | 231 | next_id: u32, |
232 | } | 232 | } |
233 | 233 | ||
@@ -266,7 +266,7 @@ impl TokenIdAlloc { | |||
266 | /// A Raw Token (straightly from lexer) convertor | 266 | /// A Raw Token (straightly from lexer) convertor |
267 | struct RawConvertor<'a> { | 267 | struct RawConvertor<'a> { |
268 | text: &'a str, | 268 | text: &'a str, |
269 | offset: TextUnit, | 269 | offset: TextSize, |
270 | id_alloc: TokenIdAlloc, | 270 | id_alloc: TokenIdAlloc, |
271 | inner: std::slice::Iter<'a, RawToken>, | 271 | inner: std::slice::Iter<'a, RawToken>, |
272 | } | 272 | } |
@@ -314,7 +314,7 @@ trait TokenConvertor { | |||
314 | } | 314 | } |
315 | 315 | ||
316 | result.push(if k.is_punct() { | 316 | result.push(if k.is_punct() { |
317 | assert_eq!(range.len().to_usize(), 1); | 317 | assert_eq!(range.len(), TextSize::of('.')); |
318 | let delim = match k { | 318 | let delim = match k { |
319 | T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])), | 319 | T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])), |
320 | T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])), | 320 | T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])), |
@@ -381,8 +381,8 @@ trait TokenConvertor { | |||
381 | k if k.is_keyword() => make_leaf!(Ident), | 381 | k if k.is_keyword() => make_leaf!(Ident), |
382 | k if k.is_literal() => make_leaf!(Literal), | 382 | k if k.is_literal() => make_leaf!(Literal), |
383 | LIFETIME => { | 383 | LIFETIME => { |
384 | let char_unit = TextUnit::from_usize(1); | 384 | let char_unit = TextSize::of('\''); |
385 | let r = TextRange::offset_len(range.start(), char_unit); | 385 | let r = TextRange::at(range.start(), char_unit); |
386 | let apostrophe = tt::Leaf::from(tt::Punct { | 386 | let apostrophe = tt::Leaf::from(tt::Punct { |
387 | char: '\'', | 387 | char: '\'', |
388 | spacing: tt::Spacing::Joint, | 388 | spacing: tt::Spacing::Joint, |
@@ -390,8 +390,7 @@ trait TokenConvertor { | |||
390 | }); | 390 | }); |
391 | result.push(apostrophe.into()); | 391 | result.push(apostrophe.into()); |
392 | 392 | ||
393 | let r = | 393 | let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); |
394 | TextRange::offset_len(range.start() + char_unit, range.len() - char_unit); | ||
395 | let ident = tt::Leaf::from(tt::Ident { | 394 | let ident = tt::Leaf::from(tt::Ident { |
396 | text: SmolStr::new(&token.to_text()[1..]), | 395 | text: SmolStr::new(&token.to_text()[1..]), |
397 | id: self.id_alloc().alloc(r), | 396 | id: self.id_alloc().alloc(r), |
@@ -440,7 +439,7 @@ impl<'a> TokenConvertor for RawConvertor<'a> { | |||
440 | 439 | ||
441 | fn bump(&mut self) -> Option<(Self::Token, TextRange)> { | 440 | fn bump(&mut self) -> Option<(Self::Token, TextRange)> { |
442 | let token = self.inner.next()?; | 441 | let token = self.inner.next()?; |
443 | let range = TextRange::offset_len(self.offset, token.len); | 442 | let range = TextRange::at(self.offset, token.len); |
444 | self.offset += token.len; | 443 | self.offset += token.len; |
445 | 444 | ||
446 | Some(((*token, &self.text[range]), range)) | 445 | Some(((*token, &self.text[range]), range)) |
@@ -450,7 +449,7 @@ impl<'a> TokenConvertor for RawConvertor<'a> { | |||
450 | let token = self.inner.as_slice().get(0).cloned(); | 449 | let token = self.inner.as_slice().get(0).cloned(); |
451 | 450 | ||
452 | token.map(|it| { | 451 | token.map(|it| { |
453 | let range = TextRange::offset_len(self.offset, it.len); | 452 | let range = TextRange::at(self.offset, it.len); |
454 | (it, &self.text[range]) | 453 | (it, &self.text[range]) |
455 | }) | 454 | }) |
456 | } | 455 | } |
@@ -464,11 +463,11 @@ struct Convertor { | |||
464 | id_alloc: TokenIdAlloc, | 463 | id_alloc: TokenIdAlloc, |
465 | current: Option<SyntaxToken>, | 464 | current: Option<SyntaxToken>, |
466 | range: TextRange, | 465 | range: TextRange, |
467 | punct_offset: Option<(SyntaxToken, TextUnit)>, | 466 | punct_offset: Option<(SyntaxToken, TextSize)>, |
468 | } | 467 | } |
469 | 468 | ||
470 | impl Convertor { | 469 | impl Convertor { |
471 | fn new(node: &SyntaxNode, global_offset: TextUnit) -> Convertor { | 470 | fn new(node: &SyntaxNode, global_offset: TextSize) -> Convertor { |
472 | Convertor { | 471 | Convertor { |
473 | id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } }, | 472 | id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } }, |
474 | current: node.first_token(), | 473 | current: node.first_token(), |
@@ -481,7 +480,7 @@ impl Convertor { | |||
481 | #[derive(Debug)] | 480 | #[derive(Debug)] |
482 | enum SynToken { | 481 | enum SynToken { |
483 | Ordiniary(SyntaxToken), | 482 | Ordiniary(SyntaxToken), |
484 | Punch(SyntaxToken, TextUnit), | 483 | Punch(SyntaxToken, TextSize), |
485 | } | 484 | } |
486 | 485 | ||
487 | impl SynToken { | 486 | impl SynToken { |
@@ -500,7 +499,7 @@ impl SrcToken for SynToken { | |||
500 | fn to_char(&self) -> Option<char> { | 499 | fn to_char(&self) -> Option<char> { |
501 | match self { | 500 | match self { |
502 | SynToken::Ordiniary(_) => None, | 501 | SynToken::Ordiniary(_) => None, |
503 | SynToken::Punch(it, i) => it.text().chars().nth(i.to_usize()), | 502 | SynToken::Punch(it, i) => it.text().chars().nth((*i).into()), |
504 | } | 503 | } |
505 | } | 504 | } |
506 | fn to_text(&self) -> SmolStr { | 505 | fn to_text(&self) -> SmolStr { |
@@ -516,26 +515,26 @@ impl TokenConvertor for Convertor { | |||
516 | 515 | ||
517 | fn bump(&mut self) -> Option<(Self::Token, TextRange)> { | 516 | fn bump(&mut self) -> Option<(Self::Token, TextRange)> { |
518 | if let Some((punct, offset)) = self.punct_offset.clone() { | 517 | if let Some((punct, offset)) = self.punct_offset.clone() { |
519 | if offset.to_usize() + 1 < punct.text().len() { | 518 | if usize::from(offset) + 1 < punct.text().len() { |
520 | let offset = offset + TextUnit::from_usize(1); | 519 | let offset = offset + TextSize::of('.'); |
521 | let range = punct.text_range(); | 520 | let range = punct.text_range(); |
522 | self.punct_offset = Some((punct.clone(), offset)); | 521 | self.punct_offset = Some((punct.clone(), offset)); |
523 | let range = TextRange::offset_len(range.start() + offset, TextUnit::from_usize(1)); | 522 | let range = TextRange::at(range.start() + offset, TextSize::of('.')); |
524 | return Some((SynToken::Punch(punct, offset), range)); | 523 | return Some((SynToken::Punch(punct, offset), range)); |
525 | } | 524 | } |
526 | } | 525 | } |
527 | 526 | ||
528 | let curr = self.current.clone()?; | 527 | let curr = self.current.clone()?; |
529 | if !curr.text_range().is_subrange(&self.range) { | 528 | if !&self.range.contains_range(curr.text_range()) { |
530 | return None; | 529 | return None; |
531 | } | 530 | } |
532 | self.current = curr.next_token(); | 531 | self.current = curr.next_token(); |
533 | 532 | ||
534 | let token = if curr.kind().is_punct() { | 533 | let token = if curr.kind().is_punct() { |
535 | let range = curr.text_range(); | 534 | let range = curr.text_range(); |
536 | let range = TextRange::offset_len(range.start(), TextUnit::from_usize(1)); | 535 | let range = TextRange::at(range.start(), TextSize::of('.')); |
537 | self.punct_offset = Some((curr.clone(), TextUnit::from_usize(0))); | 536 | self.punct_offset = Some((curr.clone(), 0.into())); |
538 | (SynToken::Punch(curr, TextUnit::from_usize(0)), range) | 537 | (SynToken::Punch(curr, 0.into()), range) |
539 | } else { | 538 | } else { |
540 | self.punct_offset = None; | 539 | self.punct_offset = None; |
541 | let range = curr.text_range(); | 540 | let range = curr.text_range(); |
@@ -547,19 +546,19 @@ impl TokenConvertor for Convertor { | |||
547 | 546 | ||
548 | fn peek(&self) -> Option<Self::Token> { | 547 | fn peek(&self) -> Option<Self::Token> { |
549 | if let Some((punct, mut offset)) = self.punct_offset.clone() { | 548 | if let Some((punct, mut offset)) = self.punct_offset.clone() { |
550 | offset = offset + TextUnit::from_usize(1); | 549 | offset = offset + TextSize::of('.'); |
551 | if offset.to_usize() < punct.text().len() { | 550 | if usize::from(offset) < punct.text().len() { |
552 | return Some(SynToken::Punch(punct, offset)); | 551 | return Some(SynToken::Punch(punct, offset)); |
553 | } | 552 | } |
554 | } | 553 | } |
555 | 554 | ||
556 | let curr = self.current.clone()?; | 555 | let curr = self.current.clone()?; |
557 | if !curr.text_range().is_subrange(&self.range) { | 556 | if !self.range.contains_range(curr.text_range()) { |
558 | return None; | 557 | return None; |
559 | } | 558 | } |
560 | 559 | ||
561 | let token = if curr.kind().is_punct() { | 560 | let token = if curr.kind().is_punct() { |
562 | SynToken::Punch(curr, TextUnit::from_usize(0)) | 561 | SynToken::Punch(curr, 0.into()) |
563 | } else { | 562 | } else { |
564 | SynToken::Ordiniary(curr) | 563 | SynToken::Ordiniary(curr) |
565 | }; | 564 | }; |
@@ -574,8 +573,8 @@ impl TokenConvertor for Convertor { | |||
574 | struct TtTreeSink<'a> { | 573 | struct TtTreeSink<'a> { |
575 | buf: String, | 574 | buf: String, |
576 | cursor: Cursor<'a>, | 575 | cursor: Cursor<'a>, |
577 | open_delims: FxHashMap<tt::TokenId, TextUnit>, | 576 | open_delims: FxHashMap<tt::TokenId, TextSize>, |
578 | text_pos: TextUnit, | 577 | text_pos: TextSize, |
579 | inner: SyntaxTreeBuilder, | 578 | inner: SyntaxTreeBuilder, |
580 | token_map: TokenMap, | 579 | token_map: TokenMap, |
581 | 580 | ||
@@ -641,7 +640,7 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
641 | } | 640 | } |
642 | tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id), | 641 | tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id), |
643 | }; | 642 | }; |
644 | let range = TextRange::offset_len(self.text_pos, TextUnit::of_str(&text)); | 643 | let range = TextRange::at(self.text_pos, TextSize::of(text.as_str())); |
645 | self.token_map.insert(id, range); | 644 | self.token_map.insert(id, range); |
646 | self.cursor = self.cursor.bump(); | 645 | self.cursor = self.cursor.bump(); |
647 | text | 646 | text |
@@ -658,10 +657,8 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
658 | self.cursor = self.cursor.bump(); | 657 | self.cursor = self.cursor.bump(); |
659 | if let Some(id) = parent.delimiter.map(|it| it.id) { | 658 | if let Some(id) = parent.delimiter.map(|it| it.id) { |
660 | if let Some(open_delim) = self.open_delims.get(&id) { | 659 | if let Some(open_delim) = self.open_delims.get(&id) { |
661 | let open_range = | 660 | let open_range = TextRange::at(*open_delim, TextSize::of('(')); |
662 | TextRange::offset_len(*open_delim, TextUnit::from_usize(1)); | 661 | let close_range = TextRange::at(self.text_pos, TextSize::of('(')); |
663 | let close_range = | ||
664 | TextRange::offset_len(self.text_pos, TextUnit::from_usize(1)); | ||
665 | self.token_map.insert_delim(id, open_range, close_range); | 662 | self.token_map.insert_delim(id, open_range, close_range); |
666 | } | 663 | } |
667 | } | 664 | } |
@@ -672,7 +669,7 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
672 | } | 669 | } |
673 | }; | 670 | }; |
674 | self.buf += &text; | 671 | self.buf += &text; |
675 | self.text_pos += TextUnit::of_str(&text); | 672 | self.text_pos += TextSize::of(text.as_str()); |
676 | } | 673 | } |
677 | 674 | ||
678 | let text = SmolStr::new(self.buf.as_str()); | 675 | let text = SmolStr::new(self.buf.as_str()); |
@@ -690,7 +687,7 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
690 | // other parts of RA such that we don't add whitespace here. | 687 | // other parts of RA such that we don't add whitespace here. |
691 | if curr.spacing == tt::Spacing::Alone && curr.char != ';' { | 688 | if curr.spacing == tt::Spacing::Alone && curr.char != ';' { |
692 | self.inner.token(WHITESPACE, " ".into()); | 689 | self.inner.token(WHITESPACE, " ".into()); |
693 | self.text_pos += TextUnit::of_char(' '); | 690 | self.text_pos += TextSize::of(' '); |
694 | } | 691 | } |
695 | } | 692 | } |
696 | } | 693 | } |
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs index 76f571502..7ceec7752 100644 --- a/crates/ra_mbe/src/tests.rs +++ b/crates/ra_mbe/src/tests.rs | |||
@@ -257,30 +257,30 @@ fn test_expr_order() { | |||
257 | let dump = format!("{:#?}", expanded); | 257 | let dump = format!("{:#?}", expanded); |
258 | assert_eq_text!( | 258 | assert_eq_text!( |
259 | dump.trim(), | 259 | dump.trim(), |
260 | r#"MACRO_ITEMS@[0; 15) | 260 | r#"MACRO_ITEMS@0..15 |
261 | FN_DEF@[0; 15) | 261 | FN_DEF@0..15 |
262 | FN_KW@[0; 2) "fn" | 262 | FN_KW@0..2 "fn" |
263 | NAME@[2; 5) | 263 | NAME@2..5 |
264 | IDENT@[2; 5) "bar" | 264 | IDENT@2..5 "bar" |
265 | PARAM_LIST@[5; 7) | 265 | PARAM_LIST@5..7 |
266 | L_PAREN@[5; 6) "(" | 266 | L_PAREN@5..6 "(" |
267 | R_PAREN@[6; 7) ")" | 267 | R_PAREN@6..7 ")" |
268 | BLOCK_EXPR@[7; 15) | 268 | BLOCK_EXPR@7..15 |
269 | BLOCK@[7; 15) | 269 | BLOCK@7..15 |
270 | L_CURLY@[7; 8) "{" | 270 | L_CURLY@7..8 "{" |
271 | EXPR_STMT@[8; 14) | 271 | EXPR_STMT@8..14 |
272 | BIN_EXPR@[8; 13) | 272 | BIN_EXPR@8..13 |
273 | BIN_EXPR@[8; 11) | 273 | BIN_EXPR@8..11 |
274 | LITERAL@[8; 9) | 274 | LITERAL@8..9 |
275 | INT_NUMBER@[8; 9) "1" | 275 | INT_NUMBER@8..9 "1" |
276 | PLUS@[9; 10) "+" | 276 | PLUS@9..10 "+" |
277 | LITERAL@[10; 11) | 277 | LITERAL@10..11 |
278 | INT_NUMBER@[10; 11) "1" | 278 | INT_NUMBER@10..11 "1" |
279 | STAR@[11; 12) "*" | 279 | STAR@11..12 "*" |
280 | LITERAL@[12; 13) | 280 | LITERAL@12..13 |
281 | INT_NUMBER@[12; 13) "2" | 281 | INT_NUMBER@12..13 "2" |
282 | SEMICOLON@[13; 14) ";" | 282 | SEMICOLON@13..14 ";" |
283 | R_CURLY@[14; 15) "}""#, | 283 | R_CURLY@14..15 "}""#, |
284 | ); | 284 | ); |
285 | } | 285 | } |
286 | 286 | ||
@@ -490,39 +490,39 @@ fn test_expand_to_item_list() { | |||
490 | assert_eq!( | 490 | assert_eq!( |
491 | format!("{:#?}", tree).trim(), | 491 | format!("{:#?}", tree).trim(), |
492 | r#" | 492 | r#" |
493 | MACRO_ITEMS@[0; 40) | 493 | MACRO_ITEMS@0..40 |
494 | STRUCT_DEF@[0; 20) | 494 | STRUCT_DEF@0..20 |
495 | STRUCT_KW@[0; 6) "struct" | 495 | STRUCT_KW@0..6 "struct" |
496 | NAME@[6; 9) | 496 | NAME@6..9 |
497 | IDENT@[6; 9) "Foo" | 497 | IDENT@6..9 "Foo" |
498 | RECORD_FIELD_DEF_LIST@[9; 20) | 498 | RECORD_FIELD_DEF_LIST@9..20 |
499 | L_CURLY@[9; 10) "{" | 499 | L_CURLY@9..10 "{" |
500 | RECORD_FIELD_DEF@[10; 19) | 500 | RECORD_FIELD_DEF@10..19 |
501 | NAME@[10; 15) | 501 | NAME@10..15 |
502 | IDENT@[10; 15) "field" | 502 | IDENT@10..15 "field" |
503 | COLON@[15; 16) ":" | 503 | COLON@15..16 ":" |
504 | PATH_TYPE@[16; 19) | 504 | PATH_TYPE@16..19 |
505 | PATH@[16; 19) | 505 | PATH@16..19 |
506 | PATH_SEGMENT@[16; 19) | 506 | PATH_SEGMENT@16..19 |
507 | NAME_REF@[16; 19) | 507 | NAME_REF@16..19 |
508 | IDENT@[16; 19) "u32" | 508 | IDENT@16..19 "u32" |
509 | R_CURLY@[19; 20) "}" | 509 | R_CURLY@19..20 "}" |
510 | STRUCT_DEF@[20; 40) | 510 | STRUCT_DEF@20..40 |
511 | STRUCT_KW@[20; 26) "struct" | 511 | STRUCT_KW@20..26 "struct" |
512 | NAME@[26; 29) | 512 | NAME@26..29 |
513 | IDENT@[26; 29) "Bar" | 513 | IDENT@26..29 "Bar" |
514 | RECORD_FIELD_DEF_LIST@[29; 40) | 514 | RECORD_FIELD_DEF_LIST@29..40 |
515 | L_CURLY@[29; 30) "{" | 515 | L_CURLY@29..30 "{" |
516 | RECORD_FIELD_DEF@[30; 39) | 516 | RECORD_FIELD_DEF@30..39 |
517 | NAME@[30; 35) | 517 | NAME@30..35 |
518 | IDENT@[30; 35) "field" | 518 | IDENT@30..35 "field" |
519 | COLON@[35; 36) ":" | 519 | COLON@35..36 ":" |
520 | PATH_TYPE@[36; 39) | 520 | PATH_TYPE@36..39 |
521 | PATH@[36; 39) | 521 | PATH@36..39 |
522 | PATH_SEGMENT@[36; 39) | 522 | PATH_SEGMENT@36..39 |
523 | NAME_REF@[36; 39) | 523 | NAME_REF@36..39 |
524 | IDENT@[36; 39) "u32" | 524 | IDENT@36..39 "u32" |
525 | R_CURLY@[39; 40) "}""# | 525 | R_CURLY@39..40 "}""# |
526 | .trim() | 526 | .trim() |
527 | ); | 527 | ); |
528 | } | 528 | } |
@@ -623,37 +623,37 @@ fn test_tt_to_stmts() { | |||
623 | 623 | ||
624 | assert_eq!( | 624 | assert_eq!( |
625 | format!("{:#?}", stmts).trim(), | 625 | format!("{:#?}", stmts).trim(), |
626 | r#"MACRO_STMTS@[0; 15) | 626 | r#"MACRO_STMTS@0..15 |
627 | LET_STMT@[0; 7) | 627 | LET_STMT@0..7 |
628 | LET_KW@[0; 3) "let" | 628 | LET_KW@0..3 "let" |
629 | BIND_PAT@[3; 4) | 629 | BIND_PAT@3..4 |
630 | NAME@[3; 4) | 630 | NAME@3..4 |
631 | IDENT@[3; 4) "a" | 631 | IDENT@3..4 "a" |
632 | EQ@[4; 5) "=" | 632 | EQ@4..5 "=" |
633 | LITERAL@[5; 6) | 633 | LITERAL@5..6 |
634 | INT_NUMBER@[5; 6) "0" | 634 | INT_NUMBER@5..6 "0" |
635 | SEMICOLON@[6; 7) ";" | 635 | SEMICOLON@6..7 ";" |
636 | EXPR_STMT@[7; 14) | 636 | EXPR_STMT@7..14 |
637 | BIN_EXPR@[7; 13) | 637 | BIN_EXPR@7..13 |
638 | PATH_EXPR@[7; 8) | 638 | PATH_EXPR@7..8 |
639 | PATH@[7; 8) | 639 | PATH@7..8 |
640 | PATH_SEGMENT@[7; 8) | 640 | PATH_SEGMENT@7..8 |
641 | NAME_REF@[7; 8) | 641 | NAME_REF@7..8 |
642 | IDENT@[7; 8) "a" | 642 | IDENT@7..8 "a" |
643 | EQ@[8; 9) "=" | 643 | EQ@8..9 "=" |
644 | BIN_EXPR@[9; 13) | 644 | BIN_EXPR@9..13 |
645 | LITERAL@[9; 11) | 645 | LITERAL@9..11 |
646 | INT_NUMBER@[9; 11) "10" | 646 | INT_NUMBER@9..11 "10" |
647 | PLUS@[11; 12) "+" | 647 | PLUS@11..12 "+" |
648 | LITERAL@[12; 13) | 648 | LITERAL@12..13 |
649 | INT_NUMBER@[12; 13) "1" | 649 | INT_NUMBER@12..13 "1" |
650 | SEMICOLON@[13; 14) ";" | 650 | SEMICOLON@13..14 ";" |
651 | EXPR_STMT@[14; 15) | 651 | EXPR_STMT@14..15 |
652 | PATH_EXPR@[14; 15) | 652 | PATH_EXPR@14..15 |
653 | PATH@[14; 15) | 653 | PATH@14..15 |
654 | PATH_SEGMENT@[14; 15) | 654 | PATH_SEGMENT@14..15 |
655 | NAME_REF@[14; 15) | 655 | NAME_REF@14..15 |
656 | IDENT@[14; 15) "a""#, | 656 | IDENT@14..15 "a""#, |
657 | ); | 657 | ); |
658 | } | 658 | } |
659 | 659 | ||
@@ -974,20 +974,20 @@ fn test_tt_composite2() { | |||
974 | let res = format!("{:#?}", &node); | 974 | let res = format!("{:#?}", &node); |
975 | assert_eq_text!( | 975 | assert_eq_text!( |
976 | res.trim(), | 976 | res.trim(), |
977 | r###"MACRO_ITEMS@[0; 10) | 977 | r###"MACRO_ITEMS@0..10 |
978 | MACRO_CALL@[0; 10) | 978 | MACRO_CALL@0..10 |
979 | PATH@[0; 3) | 979 | PATH@0..3 |
980 | PATH_SEGMENT@[0; 3) | 980 | PATH_SEGMENT@0..3 |
981 | NAME_REF@[0; 3) | 981 | NAME_REF@0..3 |
982 | IDENT@[0; 3) "abs" | 982 | IDENT@0..3 "abs" |
983 | BANG@[3; 4) "!" | 983 | BANG@3..4 "!" |
984 | TOKEN_TREE@[4; 10) | 984 | TOKEN_TREE@4..10 |
985 | L_PAREN@[4; 5) "(" | 985 | L_PAREN@4..5 "(" |
986 | EQ@[5; 6) "=" | 986 | EQ@5..6 "=" |
987 | R_ANGLE@[6; 7) ">" | 987 | R_ANGLE@6..7 ">" |
988 | WHITESPACE@[7; 8) " " | 988 | WHITESPACE@7..8 " " |
989 | POUND@[8; 9) "#" | 989 | POUND@8..9 "#" |
990 | R_PAREN@[9; 10) ")""### | 990 | R_PAREN@9..10 ")""### |
991 | ); | 991 | ); |
992 | } | 992 | } |
993 | 993 | ||
@@ -1113,69 +1113,69 @@ fn test_vec() { | |||
1113 | 1113 | ||
1114 | assert_eq!( | 1114 | assert_eq!( |
1115 | format!("{:#?}", tree).trim(), | 1115 | format!("{:#?}", tree).trim(), |
1116 | r#"BLOCK_EXPR@[0; 45) | 1116 | r#"BLOCK_EXPR@0..45 |
1117 | BLOCK@[0; 45) | 1117 | BLOCK@0..45 |
1118 | L_CURLY@[0; 1) "{" | 1118 | L_CURLY@0..1 "{" |
1119 | LET_STMT@[1; 20) | 1119 | LET_STMT@1..20 |
1120 | LET_KW@[1; 4) "let" | 1120 | LET_KW@1..4 "let" |
1121 | BIND_PAT@[4; 8) | 1121 | BIND_PAT@4..8 |
1122 | MUT_KW@[4; 7) "mut" | 1122 | MUT_KW@4..7 "mut" |
1123 | NAME@[7; 8) | 1123 | NAME@7..8 |
1124 | IDENT@[7; 8) "v" | 1124 | IDENT@7..8 "v" |
1125 | EQ@[8; 9) "=" | 1125 | EQ@8..9 "=" |
1126 | CALL_EXPR@[9; 19) | 1126 | CALL_EXPR@9..19 |
1127 | PATH_EXPR@[9; 17) | 1127 | PATH_EXPR@9..17 |
1128 | PATH@[9; 17) | 1128 | PATH@9..17 |
1129 | PATH@[9; 12) | 1129 | PATH@9..12 |
1130 | PATH_SEGMENT@[9; 12) | 1130 | PATH_SEGMENT@9..12 |
1131 | NAME_REF@[9; 12) | 1131 | NAME_REF@9..12 |
1132 | IDENT@[9; 12) "Vec" | 1132 | IDENT@9..12 "Vec" |
1133 | COLON2@[12; 14) "::" | 1133 | COLON2@12..14 "::" |
1134 | PATH_SEGMENT@[14; 17) | 1134 | PATH_SEGMENT@14..17 |
1135 | NAME_REF@[14; 17) | 1135 | NAME_REF@14..17 |
1136 | IDENT@[14; 17) "new" | 1136 | IDENT@14..17 "new" |
1137 | ARG_LIST@[17; 19) | 1137 | ARG_LIST@17..19 |
1138 | L_PAREN@[17; 18) "(" | 1138 | L_PAREN@17..18 "(" |
1139 | R_PAREN@[18; 19) ")" | 1139 | R_PAREN@18..19 ")" |
1140 | SEMICOLON@[19; 20) ";" | 1140 | SEMICOLON@19..20 ";" |
1141 | EXPR_STMT@[20; 33) | 1141 | EXPR_STMT@20..33 |
1142 | METHOD_CALL_EXPR@[20; 32) | 1142 | METHOD_CALL_EXPR@20..32 |
1143 | PATH_EXPR@[20; 21) | 1143 | PATH_EXPR@20..21 |
1144 | PATH@[20; 21) | 1144 | PATH@20..21 |
1145 | PATH_SEGMENT@[20; 21) | 1145 | PATH_SEGMENT@20..21 |
1146 | NAME_REF@[20; 21) | 1146 | NAME_REF@20..21 |
1147 | IDENT@[20; 21) "v" | 1147 | IDENT@20..21 "v" |
1148 | DOT@[21; 22) "." | 1148 | DOT@21..22 "." |
1149 | NAME_REF@[22; 26) | 1149 | NAME_REF@22..26 |
1150 | IDENT@[22; 26) "push" | 1150 | IDENT@22..26 "push" |
1151 | ARG_LIST@[26; 32) | 1151 | ARG_LIST@26..32 |
1152 | L_PAREN@[26; 27) "(" | 1152 | L_PAREN@26..27 "(" |
1153 | LITERAL@[27; 31) | 1153 | LITERAL@27..31 |
1154 | INT_NUMBER@[27; 31) "1u32" | 1154 | INT_NUMBER@27..31 "1u32" |
1155 | R_PAREN@[31; 32) ")" | 1155 | R_PAREN@31..32 ")" |
1156 | SEMICOLON@[32; 33) ";" | 1156 | SEMICOLON@32..33 ";" |
1157 | EXPR_STMT@[33; 43) | 1157 | EXPR_STMT@33..43 |
1158 | METHOD_CALL_EXPR@[33; 42) | 1158 | METHOD_CALL_EXPR@33..42 |
1159 | PATH_EXPR@[33; 34) | 1159 | PATH_EXPR@33..34 |
1160 | PATH@[33; 34) | 1160 | PATH@33..34 |
1161 | PATH_SEGMENT@[33; 34) | 1161 | PATH_SEGMENT@33..34 |
1162 | NAME_REF@[33; 34) | 1162 | NAME_REF@33..34 |
1163 | IDENT@[33; 34) "v" | 1163 | IDENT@33..34 "v" |
1164 | DOT@[34; 35) "." | 1164 | DOT@34..35 "." |
1165 | NAME_REF@[35; 39) | 1165 | NAME_REF@35..39 |
1166 | IDENT@[35; 39) "push" | 1166 | IDENT@35..39 "push" |
1167 | ARG_LIST@[39; 42) | 1167 | ARG_LIST@39..42 |
1168 | L_PAREN@[39; 40) "(" | 1168 | L_PAREN@39..40 "(" |
1169 | LITERAL@[40; 41) | 1169 | LITERAL@40..41 |
1170 | INT_NUMBER@[40; 41) "2" | 1170 | INT_NUMBER@40..41 "2" |
1171 | R_PAREN@[41; 42) ")" | 1171 | R_PAREN@41..42 ")" |
1172 | SEMICOLON@[42; 43) ";" | 1172 | SEMICOLON@42..43 ";" |
1173 | PATH_EXPR@[43; 44) | 1173 | PATH_EXPR@43..44 |
1174 | PATH@[43; 44) | 1174 | PATH@43..44 |
1175 | PATH_SEGMENT@[43; 44) | 1175 | PATH_SEGMENT@43..44 |
1176 | NAME_REF@[43; 44) | 1176 | NAME_REF@43..44 |
1177 | IDENT@[43; 44) "v" | 1177 | IDENT@43..44 "v" |
1178 | R_CURLY@[44; 45) "}""# | 1178 | R_CURLY@44..45 "}""# |
1179 | ); | 1179 | ); |
1180 | } | 1180 | } |
1181 | 1181 | ||
@@ -1798,45 +1798,45 @@ fn test_no_space_after_semi_colon() { | |||
1798 | let dump = format!("{:#?}", expanded); | 1798 | let dump = format!("{:#?}", expanded); |
1799 | assert_eq_text!( | 1799 | assert_eq_text!( |
1800 | dump.trim(), | 1800 | dump.trim(), |
1801 | r###"MACRO_ITEMS@[0; 52) | 1801 | r###"MACRO_ITEMS@0..52 |
1802 | MODULE@[0; 26) | 1802 | MODULE@0..26 |
1803 | ATTR@[0; 21) | 1803 | ATTR@0..21 |
1804 | POUND@[0; 1) "#" | 1804 | POUND@0..1 "#" |
1805 | L_BRACK@[1; 2) "[" | 1805 | L_BRACK@1..2 "[" |
1806 | PATH@[2; 5) | 1806 | PATH@2..5 |
1807 | PATH_SEGMENT@[2; 5) | 1807 | PATH_SEGMENT@2..5 |
1808 | NAME_REF@[2; 5) | 1808 | NAME_REF@2..5 |
1809 | IDENT@[2; 5) "cfg" | 1809 | IDENT@2..5 "cfg" |
1810 | TOKEN_TREE@[5; 20) | 1810 | TOKEN_TREE@5..20 |
1811 | L_PAREN@[5; 6) "(" | 1811 | L_PAREN@5..6 "(" |
1812 | IDENT@[6; 13) "feature" | 1812 | IDENT@6..13 "feature" |
1813 | EQ@[13; 14) "=" | 1813 | EQ@13..14 "=" |
1814 | STRING@[14; 19) "\"std\"" | 1814 | STRING@14..19 "\"std\"" |
1815 | R_PAREN@[19; 20) ")" | 1815 | R_PAREN@19..20 ")" |
1816 | R_BRACK@[20; 21) "]" | 1816 | R_BRACK@20..21 "]" |
1817 | MOD_KW@[21; 24) "mod" | 1817 | MOD_KW@21..24 "mod" |
1818 | NAME@[24; 25) | 1818 | NAME@24..25 |
1819 | IDENT@[24; 25) "m" | 1819 | IDENT@24..25 "m" |
1820 | SEMICOLON@[25; 26) ";" | 1820 | SEMICOLON@25..26 ";" |
1821 | MODULE@[26; 52) | 1821 | MODULE@26..52 |
1822 | ATTR@[26; 47) | 1822 | ATTR@26..47 |
1823 | POUND@[26; 27) "#" | 1823 | POUND@26..27 "#" |
1824 | L_BRACK@[27; 28) "[" | 1824 | L_BRACK@27..28 "[" |
1825 | PATH@[28; 31) | 1825 | PATH@28..31 |
1826 | PATH_SEGMENT@[28; 31) | 1826 | PATH_SEGMENT@28..31 |
1827 | NAME_REF@[28; 31) | 1827 | NAME_REF@28..31 |
1828 | IDENT@[28; 31) "cfg" | 1828 | IDENT@28..31 "cfg" |
1829 | TOKEN_TREE@[31; 46) | 1829 | TOKEN_TREE@31..46 |
1830 | L_PAREN@[31; 32) "(" | 1830 | L_PAREN@31..32 "(" |
1831 | IDENT@[32; 39) "feature" | 1831 | IDENT@32..39 "feature" |
1832 | EQ@[39; 40) "=" | 1832 | EQ@39..40 "=" |
1833 | STRING@[40; 45) "\"std\"" | 1833 | STRING@40..45 "\"std\"" |
1834 | R_PAREN@[45; 46) ")" | 1834 | R_PAREN@45..46 ")" |
1835 | R_BRACK@[46; 47) "]" | 1835 | R_BRACK@46..47 "]" |
1836 | MOD_KW@[47; 50) "mod" | 1836 | MOD_KW@47..50 "mod" |
1837 | NAME@[50; 51) | 1837 | NAME@50..51 |
1838 | IDENT@[50; 51) "f" | 1838 | IDENT@50..51 "f" |
1839 | SEMICOLON@[51; 52) ";""###, | 1839 | SEMICOLON@51..52 ";""###, |
1840 | ); | 1840 | ); |
1841 | } | 1841 | } |
1842 | 1842 | ||