diff options
author | Aleksey Kladov <[email protected]> | 2019-02-23 13:07:29 +0000 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2019-02-23 13:07:29 +0000 |
commit | 0c1cb981820c55127c3c09d93868814a1df98246 (patch) | |
tree | edb870567bf099c5fa5edc5b89bcab410a34e5e0 /crates/ra_syntax/src/parsing | |
parent | e5fb33a94618d45051192d99e859bdd44c3daa36 (diff) |
rename
Diffstat (limited to 'crates/ra_syntax/src/parsing')
-rw-r--r-- | crates/ra_syntax/src/parsing/reparsing.rs | 8 | ||||
-rw-r--r-- | crates/ra_syntax/src/parsing/text_token_source.rs (renamed from crates/ra_syntax/src/parsing/input.rs) | 10 | ||||
-rw-r--r-- | crates/ra_syntax/src/parsing/text_tree_sink.rs (renamed from crates/ra_syntax/src/parsing/builder.rs) | 12 |
3 files changed, 15 insertions, 15 deletions
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs index 19d8adcfb..ba77a3b6c 100644 --- a/crates/ra_syntax/src/parsing/reparsing.rs +++ b/crates/ra_syntax/src/parsing/reparsing.rs | |||
@@ -14,8 +14,8 @@ use crate::{ | |||
14 | algo, | 14 | algo, |
15 | syntax_node::{GreenNode, SyntaxNode}, | 15 | syntax_node::{GreenNode, SyntaxNode}, |
16 | parsing::{ | 16 | parsing::{ |
17 | input::ParserInput, | 17 | text_token_source::TextTokenSource, |
18 | builder::TreeBuilder, | 18 | text_tree_sink::TextTreeSink, |
19 | lexer::{tokenize, Token}, | 19 | lexer::{tokenize, Token}, |
20 | } | 20 | } |
21 | }; | 21 | }; |
@@ -68,8 +68,8 @@ fn reparse_block<'node>( | |||
68 | if !is_balanced(&tokens) { | 68 | if !is_balanced(&tokens) { |
69 | return None; | 69 | return None; |
70 | } | 70 | } |
71 | let token_source = ParserInput::new(&text, &tokens); | 71 | let token_source = TextTokenSource::new(&text, &tokens); |
72 | let mut tree_sink = TreeBuilder::new(&text, &tokens); | 72 | let mut tree_sink = TextTreeSink::new(&text, &tokens); |
73 | reparser.parse(&token_source, &mut tree_sink); | 73 | reparser.parse(&token_source, &mut tree_sink); |
74 | let (green, new_errors) = tree_sink.finish(); | 74 | let (green, new_errors) = tree_sink.finish(); |
75 | Some((node, green, new_errors)) | 75 | Some((node, green, new_errors)) |
diff --git a/crates/ra_syntax/src/parsing/input.rs b/crates/ra_syntax/src/parsing/text_token_source.rs index 31c6a3b9b..a6277f66f 100644 --- a/crates/ra_syntax/src/parsing/input.rs +++ b/crates/ra_syntax/src/parsing/text_token_source.rs | |||
@@ -5,7 +5,7 @@ use crate::{ | |||
5 | parsing::lexer::Token, | 5 | parsing::lexer::Token, |
6 | }; | 6 | }; |
7 | 7 | ||
8 | pub(crate) struct ParserInput<'t> { | 8 | pub(crate) struct TextTokenSource<'t> { |
9 | text: &'t str, | 9 | text: &'t str, |
10 | /// start position of each token(expect whitespace and comment) | 10 | /// start position of each token(expect whitespace and comment) |
11 | /// ```non-rust | 11 | /// ```non-rust |
@@ -25,7 +25,7 @@ pub(crate) struct ParserInput<'t> { | |||
25 | tokens: Vec<Token>, | 25 | tokens: Vec<Token>, |
26 | } | 26 | } |
27 | 27 | ||
28 | impl<'t> TokenSource for ParserInput<'t> { | 28 | impl<'t> TokenSource for TextTokenSource<'t> { |
29 | fn token_kind(&self, pos: usize) -> SyntaxKind { | 29 | fn token_kind(&self, pos: usize) -> SyntaxKind { |
30 | if !(pos < self.tokens.len()) { | 30 | if !(pos < self.tokens.len()) { |
31 | return EOF; | 31 | return EOF; |
@@ -48,9 +48,9 @@ impl<'t> TokenSource for ParserInput<'t> { | |||
48 | } | 48 | } |
49 | } | 49 | } |
50 | 50 | ||
51 | impl<'t> ParserInput<'t> { | 51 | impl<'t> TextTokenSource<'t> { |
52 | /// Generate input from tokens(expect comment and whitespace). | 52 | /// Generate input from tokens(expect comment and whitespace). |
53 | pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> ParserInput<'t> { | 53 | pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> { |
54 | let mut tokens = Vec::new(); | 54 | let mut tokens = Vec::new(); |
55 | let mut start_offsets = Vec::new(); | 55 | let mut start_offsets = Vec::new(); |
56 | let mut len = 0.into(); | 56 | let mut len = 0.into(); |
@@ -62,6 +62,6 @@ impl<'t> ParserInput<'t> { | |||
62 | len += token.len; | 62 | len += token.len; |
63 | } | 63 | } |
64 | 64 | ||
65 | ParserInput { text, start_offsets, tokens } | 65 | TextTokenSource { text, start_offsets, tokens } |
66 | } | 66 | } |
67 | } | 67 | } |
diff --git a/crates/ra_syntax/src/parsing/builder.rs b/crates/ra_syntax/src/parsing/text_tree_sink.rs index cfe3139b8..8c1d78deb 100644 --- a/crates/ra_syntax/src/parsing/builder.rs +++ b/crates/ra_syntax/src/parsing/text_tree_sink.rs | |||
@@ -12,8 +12,8 @@ use crate::{ | |||
12 | 12 | ||
13 | /// Bridges the parser with our specific syntax tree representation. | 13 | /// Bridges the parser with our specific syntax tree representation. |
14 | /// | 14 | /// |
15 | /// `TreeBuilder` also handles attachment of trivia (whitespace) to nodes. | 15 | /// `TextTreeSink` also handles attachment of trivia (whitespace) to nodes. |
16 | pub(crate) struct TreeBuilder<'a> { | 16 | pub(crate) struct TextTreeSink<'a> { |
17 | text: &'a str, | 17 | text: &'a str, |
18 | tokens: &'a [Token], | 18 | tokens: &'a [Token], |
19 | text_pos: TextUnit, | 19 | text_pos: TextUnit, |
@@ -29,7 +29,7 @@ enum State { | |||
29 | PendingFinish, | 29 | PendingFinish, |
30 | } | 30 | } |
31 | 31 | ||
32 | impl<'a> TreeSink for TreeBuilder<'a> { | 32 | impl<'a> TreeSink for TextTreeSink<'a> { |
33 | fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8) { | 33 | fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8) { |
34 | match mem::replace(&mut self.state, State::Normal) { | 34 | match mem::replace(&mut self.state, State::Normal) { |
35 | State::PendingStart => unreachable!(), | 35 | State::PendingStart => unreachable!(), |
@@ -91,9 +91,9 @@ impl<'a> TreeSink for TreeBuilder<'a> { | |||
91 | } | 91 | } |
92 | } | 92 | } |
93 | 93 | ||
94 | impl<'a> TreeBuilder<'a> { | 94 | impl<'a> TextTreeSink<'a> { |
95 | pub(super) fn new(text: &'a str, tokens: &'a [Token]) -> TreeBuilder<'a> { | 95 | pub(super) fn new(text: &'a str, tokens: &'a [Token]) -> TextTreeSink<'a> { |
96 | TreeBuilder { | 96 | TextTreeSink { |
97 | text, | 97 | text, |
98 | tokens, | 98 | tokens, |
99 | text_pos: 0.into(), | 99 | text_pos: 0.into(), |