From fcb1eef3232c3fc673bf5f98595708e108c3950c Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Sat, 25 May 2019 20:31:53 +0800 Subject: Change TokenSource to iteration based --- crates/ra_syntax/src/parsing.rs | 4 +- crates/ra_syntax/src/parsing/reparsing.rs | 4 +- crates/ra_syntax/src/parsing/text_token_source.rs | 47 ++++++++++++++++------- 3 files changed, 38 insertions(+), 17 deletions(-) (limited to 'crates/ra_syntax') diff --git a/crates/ra_syntax/src/parsing.rs b/crates/ra_syntax/src/parsing.rs index 15d69c5ab..4c1fa6c4f 100644 --- a/crates/ra_syntax/src/parsing.rs +++ b/crates/ra_syntax/src/parsing.rs @@ -17,8 +17,8 @@ pub(crate) use self::reparsing::incremental_reparse; pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec) { let tokens = tokenize(&text); - let token_source = text_token_source::TextTokenSource::new(text, &tokens); + let mut token_source = text_token_source::TextTokenSource::new(text, &tokens); let mut tree_sink = text_tree_sink::TextTreeSink::new(text, &tokens); - ra_parser::parse(&token_source, &mut tree_sink); + ra_parser::parse(&mut token_source, &mut tree_sink); tree_sink.finish() } diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs index 6de02a15a..3b6687f61 100644 --- a/crates/ra_syntax/src/parsing/reparsing.rs +++ b/crates/ra_syntax/src/parsing/reparsing.rs @@ -85,9 +85,9 @@ fn reparse_block<'node>( if !is_balanced(&tokens) { return None; } - let token_source = TextTokenSource::new(&text, &tokens); + let mut token_source = TextTokenSource::new(&text, &tokens); let mut tree_sink = TextTreeSink::new(&text, &tokens); - reparser.parse(&token_source, &mut tree_sink); + reparser.parse(&mut token_source, &mut tree_sink); let (green, new_errors) = tree_sink.finish(); Some((node.replace_with(green), new_errors, node.range())) } diff --git a/crates/ra_syntax/src/parsing/text_token_source.rs b/crates/ra_syntax/src/parsing/text_token_source.rs index a6277f66f..71d2947f7 100644 --- a/crates/ra_syntax/src/parsing/text_token_source.rs +++ b/crates/ra_syntax/src/parsing/text_token_source.rs @@ -1,7 +1,8 @@ use ra_parser::TokenSource; +use ra_parser::Token as PToken; use crate::{ - SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit, + SyntaxKind::EOF, TextRange, TextUnit, parsing::lexer::Token, }; @@ -23,31 +24,50 @@ pub(crate) struct TextTokenSource<'t> { /// ``` /// tokens: `[struct, Foo, {, }]` tokens: Vec, + + /// Current token and position + curr: (PToken, usize), } impl<'t> TokenSource for TextTokenSource<'t> { - fn token_kind(&self, pos: usize) -> SyntaxKind { - if !(pos < self.tokens.len()) { - return EOF; - } - self.tokens[pos].kind + fn current(&self) -> PToken { + return self.curr.0; } - fn is_token_joint_to_next(&self, pos: usize) -> bool { - if !(pos + 1 < self.tokens.len()) { - return true; + + fn lookahead_nth(&self, n: usize) -> PToken { + mk_token(self.curr.1 + n, &self.start_offsets, &self.tokens) + } + + fn bump(&mut self) { + if self.curr.0.kind == EOF { + return; } - self.start_offsets[pos] + self.tokens[pos].len == self.start_offsets[pos + 1] + + let pos = self.curr.1 + 1; + self.curr = (mk_token(pos, &self.start_offsets, &self.tokens), pos); } - fn is_keyword(&self, pos: usize, kw: &str) -> bool { + + fn is_keyword(&self, kw: &str) -> bool { + let pos = self.curr.1; if !(pos < self.tokens.len()) { return false; } let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len); - self.text[range] == *kw } } +fn mk_token(pos: usize, start_offsets: &[TextUnit], tokens: &[Token]) -> PToken { + let kind = tokens.get(pos).map(|t| t.kind).unwrap_or(EOF); + let is_jointed_to_next = if pos + 1 < start_offsets.len() { + start_offsets[pos] + tokens[pos].len == start_offsets[pos + 1] + } else { + false + }; + + PToken { kind, is_jointed_to_next } +} + impl<'t> TextTokenSource<'t> { /// Generate input from tokens(expect comment and whitespace). pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> { @@ -62,6 +82,7 @@ impl<'t> TextTokenSource<'t> { len += token.len; } - TextTokenSource { text, start_offsets, tokens } + let first = mk_token(0, &start_offsets, &tokens); + TextTokenSource { text, start_offsets, tokens, curr: (first, 0) } } } -- cgit v1.2.3