aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/ra_syntax/src/parsing.rs23
-rw-r--r--crates/ra_syntax/src/parsing/input.rs24
-rw-r--r--crates/ra_syntax/src/parsing/parser_api.rs30
3 files changed, 28 insertions, 49 deletions
diff --git a/crates/ra_syntax/src/parsing.rs b/crates/ra_syntax/src/parsing.rs
index f74c365d5..5de6ff8c1 100644
--- a/crates/ra_syntax/src/parsing.rs
+++ b/crates/ra_syntax/src/parsing.rs
@@ -69,24 +69,7 @@ trait TreeSink {
69/// 69///
70/// Hopefully this will allow us to treat text and token trees in the same way! 70/// Hopefully this will allow us to treat text and token trees in the same way!
71trait TokenSource { 71trait TokenSource {
72 fn token_kind(&self, pos: TokenPos) -> SyntaxKind; 72 fn token_kind(&self, pos: usize) -> SyntaxKind;
73 fn is_token_joint_to_next(&self, pos: TokenPos) -> bool; 73 fn is_token_joint_to_next(&self, pos: usize) -> bool;
74 fn is_keyword(&self, pos: TokenPos, kw: &str) -> bool; 74 fn is_keyword(&self, pos: usize, kw: &str) -> bool;
75}
76
77#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Default)]
78pub(crate) struct TokenPos(pub u32);
79
80impl std::ops::Add<u32> for TokenPos {
81 type Output = TokenPos;
82
83 fn add(self, rhs: u32) -> TokenPos {
84 TokenPos(self.0 + rhs)
85 }
86}
87
88impl std::ops::AddAssign<u32> for TokenPos {
89 fn add_assign(&mut self, rhs: u32) {
90 self.0 += rhs
91 }
92} 75}
diff --git a/crates/ra_syntax/src/parsing/input.rs b/crates/ra_syntax/src/parsing/input.rs
index 0f1810df5..96c03bb11 100644
--- a/crates/ra_syntax/src/parsing/input.rs
+++ b/crates/ra_syntax/src/parsing/input.rs
@@ -1,33 +1,29 @@
1use crate::{ 1use crate::{
2 SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit, 2 SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit,
3 parsing::{ 3 parsing::{
4 TokenPos, TokenSource, 4 TokenSource,
5 lexer::Token, 5 lexer::Token,
6 }, 6 },
7}; 7};
8 8
9impl<'t> TokenSource for ParserInput<'t> { 9impl<'t> TokenSource for ParserInput<'t> {
10 fn token_kind(&self, pos: TokenPos) -> SyntaxKind { 10 fn token_kind(&self, pos: usize) -> SyntaxKind {
11 let idx = pos.0 as usize; 11 if !(pos < self.tokens.len()) {
12 if !(idx < self.tokens.len()) {
13 return EOF; 12 return EOF;
14 } 13 }
15 self.tokens[idx].kind 14 self.tokens[pos].kind
16 } 15 }
17 fn is_token_joint_to_next(&self, pos: TokenPos) -> bool { 16 fn is_token_joint_to_next(&self, pos: usize) -> bool {
18 let idx_curr = pos.0 as usize; 17 if !(pos + 1 < self.tokens.len()) {
19 let idx_next = pos.0 as usize + 1;
20 if !(idx_next < self.tokens.len()) {
21 return true; 18 return true;
22 } 19 }
23 self.start_offsets[idx_curr] + self.tokens[idx_curr].len == self.start_offsets[idx_next] 20 self.start_offsets[pos] + self.tokens[pos].len == self.start_offsets[pos + 1]
24 } 21 }
25 fn is_keyword(&self, pos: TokenPos, kw: &str) -> bool { 22 fn is_keyword(&self, pos: usize, kw: &str) -> bool {
26 let idx = pos.0 as usize; 23 if !(pos < self.tokens.len()) {
27 if !(idx < self.tokens.len()) {
28 return false; 24 return false;
29 } 25 }
30 let range = TextRange::offset_len(self.start_offsets[idx], self.tokens[idx].len); 26 let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len);
31 27
32 self.text[range] == *kw 28 self.text[range] == *kw
33 } 29 }
diff --git a/crates/ra_syntax/src/parsing/parser_api.rs b/crates/ra_syntax/src/parsing/parser_api.rs
index 99f6183a4..988fcb518 100644
--- a/crates/ra_syntax/src/parsing/parser_api.rs
+++ b/crates/ra_syntax/src/parsing/parser_api.rs
@@ -6,7 +6,7 @@ use crate::{
6 syntax_error::ParseError, 6 syntax_error::ParseError,
7 SyntaxKind::{self, ERROR, EOF, TOMBSTONE}, 7 SyntaxKind::{self, ERROR, EOF, TOMBSTONE},
8 parsing::{ 8 parsing::{
9 TokenSource, TokenPos, 9 TokenSource,
10 token_set::TokenSet, 10 token_set::TokenSet,
11 event::Event, 11 event::Event,
12 }, 12 },
@@ -23,14 +23,14 @@ use crate::{
23/// finish expression". See `Event` docs for more. 23/// finish expression". See `Event` docs for more.
24pub(crate) struct Parser<'t> { 24pub(crate) struct Parser<'t> {
25 token_source: &'t dyn TokenSource, 25 token_source: &'t dyn TokenSource,
26 pos: TokenPos, 26 token_pos: usize,
27 events: Vec<Event>, 27 events: Vec<Event>,
28 steps: Cell<u32>, 28 steps: Cell<u32>,
29} 29}
30 30
31impl<'t> Parser<'t> { 31impl<'t> Parser<'t> {
32 pub(super) fn new(token_source: &'t dyn TokenSource) -> Parser<'t> { 32 pub(super) fn new(token_source: &'t dyn TokenSource) -> Parser<'t> {
33 Parser { token_source, pos: TokenPos::default(), events: Vec::new(), steps: Cell::new(0) } 33 Parser { token_source, token_pos: 0, events: Vec::new(), steps: Cell::new(0) }
34 } 34 }
35 35
36 pub(crate) fn finish(self) -> Vec<Event> { 36 pub(crate) fn finish(self) -> Vec<Event> {
@@ -49,9 +49,9 @@ impl<'t> Parser<'t> {
49 /// 49 ///
50 /// Useful for parsing things like `>>`. 50 /// Useful for parsing things like `>>`.
51 pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> { 51 pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> {
52 let c1 = self.token_source.token_kind(self.pos); 52 let c1 = self.token_source.token_kind(self.token_pos);
53 let c2 = self.token_source.token_kind(self.pos + 1); 53 let c2 = self.token_source.token_kind(self.token_pos + 1);
54 if self.token_source.is_token_joint_to_next(self.pos) { 54 if self.token_source.is_token_joint_to_next(self.token_pos) {
55 Some((c1, c2)) 55 Some((c1, c2))
56 } else { 56 } else {
57 None 57 None
@@ -63,11 +63,11 @@ impl<'t> Parser<'t> {
63 /// 63 ///
64 /// Useful for parsing things like `=>>`. 64 /// Useful for parsing things like `=>>`.
65 pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> { 65 pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> {
66 let c1 = self.token_source.token_kind(self.pos); 66 let c1 = self.token_source.token_kind(self.token_pos);
67 let c2 = self.token_source.token_kind(self.pos + 1); 67 let c2 = self.token_source.token_kind(self.token_pos + 1);
68 let c3 = self.token_source.token_kind(self.pos + 2); 68 let c3 = self.token_source.token_kind(self.token_pos + 2);
69 if self.token_source.is_token_joint_to_next(self.pos) 69 if self.token_source.is_token_joint_to_next(self.token_pos)
70 && self.token_source.is_token_joint_to_next(self.pos + 1) 70 && self.token_source.is_token_joint_to_next(self.token_pos + 1)
71 { 71 {
72 Some((c1, c2, c3)) 72 Some((c1, c2, c3))
73 } else { 73 } else {
@@ -77,11 +77,11 @@ impl<'t> Parser<'t> {
77 77
78 /// Lookahead operation: returns the kind of the next nth 78 /// Lookahead operation: returns the kind of the next nth
79 /// token. 79 /// token.
80 pub(crate) fn nth(&self, n: u32) -> SyntaxKind { 80 pub(crate) fn nth(&self, n: usize) -> SyntaxKind {
81 let steps = self.steps.get(); 81 let steps = self.steps.get();
82 assert!(steps <= 10_000_000, "the parser seems stuck"); 82 assert!(steps <= 10_000_000, "the parser seems stuck");
83 self.steps.set(steps + 1); 83 self.steps.set(steps + 1);
84 self.token_source.token_kind(self.pos + n) 84 self.token_source.token_kind(self.token_pos + n)
85 } 85 }
86 86
87 /// Checks if the current token is `kind`. 87 /// Checks if the current token is `kind`.
@@ -96,7 +96,7 @@ impl<'t> Parser<'t> {
96 96
97 /// Checks if the current token is contextual keyword with text `t`. 97 /// Checks if the current token is contextual keyword with text `t`.
98 pub(crate) fn at_contextual_kw(&self, kw: &str) -> bool { 98 pub(crate) fn at_contextual_kw(&self, kw: &str) -> bool {
99 self.token_source.is_keyword(self.pos, kw) 99 self.token_source.is_keyword(self.token_pos, kw)
100 } 100 }
101 101
102 /// Starts a new node in the syntax tree. All nodes and tokens 102 /// Starts a new node in the syntax tree. All nodes and tokens
@@ -184,7 +184,7 @@ impl<'t> Parser<'t> {
184 } 184 }
185 185
186 fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) { 186 fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
187 self.pos += u32::from(n_raw_tokens); 187 self.token_pos += usize::from(n_raw_tokens);
188 self.push_event(Event::Token { kind, n_raw_tokens }); 188 self.push_event(Event::Token { kind, n_raw_tokens });
189 } 189 }
190 190