aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_mbe/src
diff options
context:
space:
mode:
authorAleksey Kladov <[email protected]>2020-04-24 22:40:41 +0100
committerAleksey Kladov <[email protected]>2020-04-25 10:59:18 +0100
commitb1d5817dd18b7b5fc102a63b084b1ee7ff4f9996 (patch)
treee5d136c5ba4a6ba96aeeb423e6e3f64ca7cea3f9 /crates/ra_mbe/src
parent27a7718880d93f55f905da606d108d3b3c682ab4 (diff)
Convert code to text-size
Diffstat (limited to 'crates/ra_mbe/src')
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs69
1 files changed, 33 insertions, 36 deletions
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 2b4390eb2..fa9787266 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -5,7 +5,7 @@ use ra_syntax::{
5 ast::{self, make::tokens::doc_comment}, 5 ast::{self, make::tokens::doc_comment},
6 tokenize, AstToken, Parse, SmolStr, SyntaxKind, 6 tokenize, AstToken, Parse, SmolStr, SyntaxKind,
7 SyntaxKind::*, 7 SyntaxKind::*,
8 SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextUnit, Token as RawToken, T, 8 SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, T,
9}; 9};
10use rustc_hash::FxHashMap; 10use rustc_hash::FxHashMap;
11use tt::buffer::{Cursor, TokenBuffer}; 11use tt::buffer::{Cursor, TokenBuffer};
@@ -99,11 +99,11 @@ pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
99 99
100 let mut conv = RawConvertor { 100 let mut conv = RawConvertor {
101 text, 101 text,
102 offset: TextUnit::default(), 102 offset: TextSize::default(),
103 inner: tokens.iter(), 103 inner: tokens.iter(),
104 id_alloc: TokenIdAlloc { 104 id_alloc: TokenIdAlloc {
105 map: Default::default(), 105 map: Default::default(),
106 global_offset: TextUnit::default(), 106 global_offset: TextSize::default(),
107 next_id: 0, 107 next_id: 0,
108 }, 108 },
109 }; 109 };
@@ -227,7 +227,7 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr
227 227
228struct TokenIdAlloc { 228struct TokenIdAlloc {
229 map: TokenMap, 229 map: TokenMap,
230 global_offset: TextUnit, 230 global_offset: TextSize,
231 next_id: u32, 231 next_id: u32,
232} 232}
233 233
@@ -266,7 +266,7 @@ impl TokenIdAlloc {
266/// A Raw Token (straightly from lexer) convertor 266/// A Raw Token (straightly from lexer) convertor
267struct RawConvertor<'a> { 267struct RawConvertor<'a> {
268 text: &'a str, 268 text: &'a str,
269 offset: TextUnit, 269 offset: TextSize,
270 id_alloc: TokenIdAlloc, 270 id_alloc: TokenIdAlloc,
271 inner: std::slice::Iter<'a, RawToken>, 271 inner: std::slice::Iter<'a, RawToken>,
272} 272}
@@ -314,7 +314,7 @@ trait TokenConvertor {
314 } 314 }
315 315
316 result.push(if k.is_punct() { 316 result.push(if k.is_punct() {
317 assert_eq!(range.len().to_usize(), 1); 317 assert_eq!(range.len(), TextSize::of('.'));
318 let delim = match k { 318 let delim = match k {
319 T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])), 319 T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])),
320 T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])), 320 T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])),
@@ -381,8 +381,8 @@ trait TokenConvertor {
381 k if k.is_keyword() => make_leaf!(Ident), 381 k if k.is_keyword() => make_leaf!(Ident),
382 k if k.is_literal() => make_leaf!(Literal), 382 k if k.is_literal() => make_leaf!(Literal),
383 LIFETIME => { 383 LIFETIME => {
384 let char_unit = TextUnit::from_usize(1); 384 let char_unit = TextSize::of('\'');
385 let r = TextRange::offset_len(range.start(), char_unit); 385 let r = TextRange::at(range.start(), char_unit);
386 let apostrophe = tt::Leaf::from(tt::Punct { 386 let apostrophe = tt::Leaf::from(tt::Punct {
387 char: '\'', 387 char: '\'',
388 spacing: tt::Spacing::Joint, 388 spacing: tt::Spacing::Joint,
@@ -390,8 +390,7 @@ trait TokenConvertor {
390 }); 390 });
391 result.push(apostrophe.into()); 391 result.push(apostrophe.into());
392 392
393 let r = 393 let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
394 TextRange::offset_len(range.start() + char_unit, range.len() - char_unit);
395 let ident = tt::Leaf::from(tt::Ident { 394 let ident = tt::Leaf::from(tt::Ident {
396 text: SmolStr::new(&token.to_text()[1..]), 395 text: SmolStr::new(&token.to_text()[1..]),
397 id: self.id_alloc().alloc(r), 396 id: self.id_alloc().alloc(r),
@@ -440,7 +439,7 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
440 439
441 fn bump(&mut self) -> Option<(Self::Token, TextRange)> { 440 fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
442 let token = self.inner.next()?; 441 let token = self.inner.next()?;
443 let range = TextRange::offset_len(self.offset, token.len); 442 let range = TextRange::at(self.offset, token.len);
444 self.offset += token.len; 443 self.offset += token.len;
445 444
446 Some(((*token, &self.text[range]), range)) 445 Some(((*token, &self.text[range]), range))
@@ -450,7 +449,7 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
450 let token = self.inner.as_slice().get(0).cloned(); 449 let token = self.inner.as_slice().get(0).cloned();
451 450
452 token.map(|it| { 451 token.map(|it| {
453 let range = TextRange::offset_len(self.offset, it.len); 452 let range = TextRange::at(self.offset, it.len);
454 (it, &self.text[range]) 453 (it, &self.text[range])
455 }) 454 })
456 } 455 }
@@ -464,11 +463,11 @@ struct Convertor {
464 id_alloc: TokenIdAlloc, 463 id_alloc: TokenIdAlloc,
465 current: Option<SyntaxToken>, 464 current: Option<SyntaxToken>,
466 range: TextRange, 465 range: TextRange,
467 punct_offset: Option<(SyntaxToken, TextUnit)>, 466 punct_offset: Option<(SyntaxToken, TextSize)>,
468} 467}
469 468
470impl Convertor { 469impl Convertor {
471 fn new(node: &SyntaxNode, global_offset: TextUnit) -> Convertor { 470 fn new(node: &SyntaxNode, global_offset: TextSize) -> Convertor {
472 Convertor { 471 Convertor {
473 id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } }, 472 id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
474 current: node.first_token(), 473 current: node.first_token(),
@@ -481,7 +480,7 @@ impl Convertor {
481#[derive(Debug)] 480#[derive(Debug)]
482enum SynToken { 481enum SynToken {
483 Ordiniary(SyntaxToken), 482 Ordiniary(SyntaxToken),
484 Punch(SyntaxToken, TextUnit), 483 Punch(SyntaxToken, TextSize),
485} 484}
486 485
487impl SynToken { 486impl SynToken {
@@ -500,7 +499,7 @@ impl SrcToken for SynToken {
500 fn to_char(&self) -> Option<char> { 499 fn to_char(&self) -> Option<char> {
501 match self { 500 match self {
502 SynToken::Ordiniary(_) => None, 501 SynToken::Ordiniary(_) => None,
503 SynToken::Punch(it, i) => it.text().chars().nth(i.to_usize()), 502 SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
504 } 503 }
505 } 504 }
506 fn to_text(&self) -> SmolStr { 505 fn to_text(&self) -> SmolStr {
@@ -516,26 +515,26 @@ impl TokenConvertor for Convertor {
516 515
517 fn bump(&mut self) -> Option<(Self::Token, TextRange)> { 516 fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
518 if let Some((punct, offset)) = self.punct_offset.clone() { 517 if let Some((punct, offset)) = self.punct_offset.clone() {
519 if offset.to_usize() + 1 < punct.text().len() { 518 if usize::from(offset) + 1 < punct.text().len() {
520 let offset = offset + TextUnit::from_usize(1); 519 let offset = offset + TextSize::from_usize(1);
521 let range = punct.text_range(); 520 let range = punct.text_range();
522 self.punct_offset = Some((punct.clone(), offset)); 521 self.punct_offset = Some((punct.clone(), offset));
523 let range = TextRange::offset_len(range.start() + offset, TextUnit::from_usize(1)); 522 let range = TextRange::at(range.start() + offset, TextSize::of('.'));
524 return Some((SynToken::Punch(punct, offset), range)); 523 return Some((SynToken::Punch(punct, offset), range));
525 } 524 }
526 } 525 }
527 526
528 let curr = self.current.clone()?; 527 let curr = self.current.clone()?;
529 if !curr.text_range().is_subrange(&self.range) { 528 if !&self.range.contains_range(curr.text_range()) {
530 return None; 529 return None;
531 } 530 }
532 self.current = curr.next_token(); 531 self.current = curr.next_token();
533 532
534 let token = if curr.kind().is_punct() { 533 let token = if curr.kind().is_punct() {
535 let range = curr.text_range(); 534 let range = curr.text_range();
536 let range = TextRange::offset_len(range.start(), TextUnit::from_usize(1)); 535 let range = TextRange::at(range.start(), TextSize::from_usize(1));
537 self.punct_offset = Some((curr.clone(), TextUnit::from_usize(0))); 536 self.punct_offset = Some((curr.clone(), TextSize::from_usize(0)));
538 (SynToken::Punch(curr, TextUnit::from_usize(0)), range) 537 (SynToken::Punch(curr, TextSize::from_usize(0)), range)
539 } else { 538 } else {
540 self.punct_offset = None; 539 self.punct_offset = None;
541 let range = curr.text_range(); 540 let range = curr.text_range();
@@ -547,19 +546,19 @@ impl TokenConvertor for Convertor {
547 546
548 fn peek(&self) -> Option<Self::Token> { 547 fn peek(&self) -> Option<Self::Token> {
549 if let Some((punct, mut offset)) = self.punct_offset.clone() { 548 if let Some((punct, mut offset)) = self.punct_offset.clone() {
550 offset = offset + TextUnit::from_usize(1); 549 offset = offset + TextSize::from_usize(1);
551 if offset.to_usize() < punct.text().len() { 550 if usize::from(offset) < punct.text().len() {
552 return Some(SynToken::Punch(punct, offset)); 551 return Some(SynToken::Punch(punct, offset));
553 } 552 }
554 } 553 }
555 554
556 let curr = self.current.clone()?; 555 let curr = self.current.clone()?;
557 if !curr.text_range().is_subrange(&self.range) { 556 if !self.range.contains_range(curr.text_range()) {
558 return None; 557 return None;
559 } 558 }
560 559
561 let token = if curr.kind().is_punct() { 560 let token = if curr.kind().is_punct() {
562 SynToken::Punch(curr, TextUnit::from_usize(0)) 561 SynToken::Punch(curr, TextSize::from_usize(0))
563 } else { 562 } else {
564 SynToken::Ordiniary(curr) 563 SynToken::Ordiniary(curr)
565 }; 564 };
@@ -574,8 +573,8 @@ impl TokenConvertor for Convertor {
574struct TtTreeSink<'a> { 573struct TtTreeSink<'a> {
575 buf: String, 574 buf: String,
576 cursor: Cursor<'a>, 575 cursor: Cursor<'a>,
577 open_delims: FxHashMap<tt::TokenId, TextUnit>, 576 open_delims: FxHashMap<tt::TokenId, TextSize>,
578 text_pos: TextUnit, 577 text_pos: TextSize,
579 inner: SyntaxTreeBuilder, 578 inner: SyntaxTreeBuilder,
580 token_map: TokenMap, 579 token_map: TokenMap,
581 580
@@ -641,7 +640,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
641 } 640 }
642 tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id), 641 tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id),
643 }; 642 };
644 let range = TextRange::offset_len(self.text_pos, TextUnit::of_str(&text)); 643 let range = TextRange::at(self.text_pos, TextSize::of(text.as_str()));
645 self.token_map.insert(id, range); 644 self.token_map.insert(id, range);
646 self.cursor = self.cursor.bump(); 645 self.cursor = self.cursor.bump();
647 text 646 text
@@ -658,10 +657,8 @@ impl<'a> TreeSink for TtTreeSink<'a> {
658 self.cursor = self.cursor.bump(); 657 self.cursor = self.cursor.bump();
659 if let Some(id) = parent.delimiter.map(|it| it.id) { 658 if let Some(id) = parent.delimiter.map(|it| it.id) {
660 if let Some(open_delim) = self.open_delims.get(&id) { 659 if let Some(open_delim) = self.open_delims.get(&id) {
661 let open_range = 660 let open_range = TextRange::at(*open_delim, TextSize::of('('));
662 TextRange::offset_len(*open_delim, TextUnit::from_usize(1)); 661 let close_range = TextRange::at(self.text_pos, TextSize::of('('));
663 let close_range =
664 TextRange::offset_len(self.text_pos, TextUnit::from_usize(1));
665 self.token_map.insert_delim(id, open_range, close_range); 662 self.token_map.insert_delim(id, open_range, close_range);
666 } 663 }
667 } 664 }
@@ -672,7 +669,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
672 } 669 }
673 }; 670 };
674 self.buf += &text; 671 self.buf += &text;
675 self.text_pos += TextUnit::of_str(&text); 672 self.text_pos += TextSize::of(text.as_str());
676 } 673 }
677 674
678 let text = SmolStr::new(self.buf.as_str()); 675 let text = SmolStr::new(self.buf.as_str());
@@ -690,7 +687,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
690 // other parts of RA such that we don't add whitespace here. 687 // other parts of RA such that we don't add whitespace here.
691 if curr.spacing == tt::Spacing::Alone && curr.char != ';' { 688 if curr.spacing == tt::Spacing::Alone && curr.char != ';' {
692 self.inner.token(WHITESPACE, " ".into()); 689 self.inner.token(WHITESPACE, " ".into());
693 self.text_pos += TextUnit::of_char(' '); 690 self.text_pos += TextSize::of(' ');
694 } 691 }
695 } 692 }
696 } 693 }