aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_syntax/src/parsing
diff options
context:
space:
mode:
authorEdwin Cheng <[email protected]>2019-05-25 13:31:53 +0100
committerEdwin Cheng <[email protected]>2019-05-25 13:41:03 +0100
commitfcb1eef3232c3fc673bf5f98595708e108c3950c (patch)
tree326a3961c5ca99cb50a12fb94f3398116af4a2f1 /crates/ra_syntax/src/parsing
parentef00b5af1c7a7a7cac685eff661a10252825d84a (diff)
Change TokenSource to iteration based
Diffstat (limited to 'crates/ra_syntax/src/parsing')
-rw-r--r--crates/ra_syntax/src/parsing/reparsing.rs4
-rw-r--r--crates/ra_syntax/src/parsing/text_token_source.rs47
2 files changed, 36 insertions, 15 deletions
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs
index 6de02a15a..3b6687f61 100644
--- a/crates/ra_syntax/src/parsing/reparsing.rs
+++ b/crates/ra_syntax/src/parsing/reparsing.rs
@@ -85,9 +85,9 @@ fn reparse_block<'node>(
85 if !is_balanced(&tokens) { 85 if !is_balanced(&tokens) {
86 return None; 86 return None;
87 } 87 }
88 let token_source = TextTokenSource::new(&text, &tokens); 88 let mut token_source = TextTokenSource::new(&text, &tokens);
89 let mut tree_sink = TextTreeSink::new(&text, &tokens); 89 let mut tree_sink = TextTreeSink::new(&text, &tokens);
90 reparser.parse(&token_source, &mut tree_sink); 90 reparser.parse(&mut token_source, &mut tree_sink);
91 let (green, new_errors) = tree_sink.finish(); 91 let (green, new_errors) = tree_sink.finish();
92 Some((node.replace_with(green), new_errors, node.range())) 92 Some((node.replace_with(green), new_errors, node.range()))
93} 93}
diff --git a/crates/ra_syntax/src/parsing/text_token_source.rs b/crates/ra_syntax/src/parsing/text_token_source.rs
index a6277f66f..71d2947f7 100644
--- a/crates/ra_syntax/src/parsing/text_token_source.rs
+++ b/crates/ra_syntax/src/parsing/text_token_source.rs
@@ -1,7 +1,8 @@
1use ra_parser::TokenSource; 1use ra_parser::TokenSource;
2use ra_parser::Token as PToken;
2 3
3use crate::{ 4use crate::{
4 SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit, 5 SyntaxKind::EOF, TextRange, TextUnit,
5 parsing::lexer::Token, 6 parsing::lexer::Token,
6}; 7};
7 8
@@ -23,31 +24,50 @@ pub(crate) struct TextTokenSource<'t> {
23 /// ``` 24 /// ```
24 /// tokens: `[struct, Foo, {, }]` 25 /// tokens: `[struct, Foo, {, }]`
25 tokens: Vec<Token>, 26 tokens: Vec<Token>,
27
28 /// Current token and position
29 curr: (PToken, usize),
26} 30}
27 31
28impl<'t> TokenSource for TextTokenSource<'t> { 32impl<'t> TokenSource for TextTokenSource<'t> {
29 fn token_kind(&self, pos: usize) -> SyntaxKind { 33 fn current(&self) -> PToken {
30 if !(pos < self.tokens.len()) { 34 return self.curr.0;
31 return EOF;
32 }
33 self.tokens[pos].kind
34 } 35 }
35 fn is_token_joint_to_next(&self, pos: usize) -> bool { 36
36 if !(pos + 1 < self.tokens.len()) { 37 fn lookahead_nth(&self, n: usize) -> PToken {
37 return true; 38 mk_token(self.curr.1 + n, &self.start_offsets, &self.tokens)
39 }
40
41 fn bump(&mut self) {
42 if self.curr.0.kind == EOF {
43 return;
38 } 44 }
39 self.start_offsets[pos] + self.tokens[pos].len == self.start_offsets[pos + 1] 45
46 let pos = self.curr.1 + 1;
47 self.curr = (mk_token(pos, &self.start_offsets, &self.tokens), pos);
40 } 48 }
41 fn is_keyword(&self, pos: usize, kw: &str) -> bool { 49
50 fn is_keyword(&self, kw: &str) -> bool {
51 let pos = self.curr.1;
42 if !(pos < self.tokens.len()) { 52 if !(pos < self.tokens.len()) {
43 return false; 53 return false;
44 } 54 }
45 let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len); 55 let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len);
46
47 self.text[range] == *kw 56 self.text[range] == *kw
48 } 57 }
49} 58}
50 59
60fn mk_token(pos: usize, start_offsets: &[TextUnit], tokens: &[Token]) -> PToken {
61 let kind = tokens.get(pos).map(|t| t.kind).unwrap_or(EOF);
62 let is_jointed_to_next = if pos + 1 < start_offsets.len() {
63 start_offsets[pos] + tokens[pos].len == start_offsets[pos + 1]
64 } else {
65 false
66 };
67
68 PToken { kind, is_jointed_to_next }
69}
70
51impl<'t> TextTokenSource<'t> { 71impl<'t> TextTokenSource<'t> {
52 /// Generate input from tokens(expect comment and whitespace). 72 /// Generate input from tokens(expect comment and whitespace).
53 pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> { 73 pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> {
@@ -62,6 +82,7 @@ impl<'t> TextTokenSource<'t> {
62 len += token.len; 82 len += token.len;
63 } 83 }
64 84
65 TextTokenSource { text, start_offsets, tokens } 85 let first = mk_token(0, &start_offsets, &tokens);
86 TextTokenSource { text, start_offsets, tokens, curr: (first, 0) }
66 } 87 }
67} 88}