diff options
author | Jonas Schievink <[email protected]> | 2021-05-24 17:43:42 +0100 |
---|---|---|
committer | Jonas Schievink <[email protected]> | 2021-05-24 17:43:42 +0100 |
commit | 27bf62b70eeb6f4cb620be5630c4c4506be3539f (patch) | |
tree | 4630dfb5f82b533f013a706aed14a31e8721a899 | |
parent | 01f8d40c5cb28854091d2081b7aa607ad9902930 (diff) |
Move `TokenMap` to its own file
-rw-r--r-- | crates/mbe/src/lib.rs | 4 | ||||
-rw-r--r-- | crates/mbe/src/syntax_bridge.rs | 83 | ||||
-rw-r--r-- | crates/mbe/src/token_map.rs | 83 |
3 files changed, 89 insertions, 81 deletions
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 3af5bc18b..b7aa64713 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs | |||
@@ -14,6 +14,7 @@ mod tests; | |||
14 | 14 | ||
15 | #[cfg(test)] | 15 | #[cfg(test)] |
16 | mod benchmark; | 16 | mod benchmark; |
17 | mod token_map; | ||
17 | 18 | ||
18 | use std::fmt; | 19 | use std::fmt; |
19 | 20 | ||
@@ -65,8 +66,9 @@ impl fmt::Display for ExpandError { | |||
65 | 66 | ||
66 | pub use crate::syntax_bridge::{ | 67 | pub use crate::syntax_bridge::{ |
67 | ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, | 68 | ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, |
68 | token_tree_to_syntax_node, TokenMap, | 69 | token_tree_to_syntax_node, |
69 | }; | 70 | }; |
71 | pub use crate::token_map::TokenMap; | ||
70 | 72 | ||
71 | /// This struct contains AST for a single `macro_rules` definition. What might | 73 | /// This struct contains AST for a single `macro_rules` definition. What might |
72 | /// be very confusing is that AST has almost exactly the same shape as | 74 | /// be very confusing is that AST has almost exactly the same shape as |
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index b13168bd3..b11172caf 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs | |||
@@ -10,36 +10,8 @@ use syntax::{ | |||
10 | }; | 10 | }; |
11 | use tt::buffer::{Cursor, TokenBuffer}; | 11 | use tt::buffer::{Cursor, TokenBuffer}; |
12 | 12 | ||
13 | use crate::ExpandError; | ||
14 | use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; | 13 | use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; |
15 | 14 | use crate::{ExpandError, TokenMap}; | |
16 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | ||
17 | pub enum TokenTextRange { | ||
18 | Token(TextRange), | ||
19 | Delimiter(TextRange), | ||
20 | } | ||
21 | |||
22 | impl TokenTextRange { | ||
23 | pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> { | ||
24 | match self { | ||
25 | TokenTextRange::Token(it) => Some(it), | ||
26 | TokenTextRange::Delimiter(it) => match kind { | ||
27 | T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())), | ||
28 | T!['}'] | T![')'] | T![']'] => { | ||
29 | Some(TextRange::at(it.end() - TextSize::of('}'), 1.into())) | ||
30 | } | ||
31 | _ => None, | ||
32 | }, | ||
33 | } | ||
34 | } | ||
35 | } | ||
36 | |||
37 | /// Maps `tt::TokenId` to the relative range of the original token. | ||
38 | #[derive(Debug, PartialEq, Eq, Clone, Default)] | ||
39 | pub struct TokenMap { | ||
40 | /// Maps `tt::TokenId` to the *relative* source range. | ||
41 | entries: Vec<(tt::TokenId, TokenTextRange)>, | ||
42 | } | ||
43 | 15 | ||
44 | /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro | 16 | /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro |
45 | /// will consume). | 17 | /// will consume). |
@@ -53,7 +25,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { | |||
53 | let global_offset = node.text_range().start(); | 25 | let global_offset = node.text_range().start(); |
54 | let mut c = Convertor::new(node, global_offset); | 26 | let mut c = Convertor::new(node, global_offset); |
55 | let subtree = c.go(); | 27 | let subtree = c.go(); |
56 | c.id_alloc.map.entries.shrink_to_fit(); | 28 | c.id_alloc.map.shrink_to_fit(); |
57 | (subtree, c.id_alloc.map) | 29 | (subtree, c.id_alloc.map) |
58 | } | 30 | } |
59 | 31 | ||
@@ -149,55 +121,6 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> { | |||
149 | res | 121 | res |
150 | } | 122 | } |
151 | 123 | ||
152 | impl TokenMap { | ||
153 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { | ||
154 | let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { | ||
155 | TokenTextRange::Token(it) => *it == relative_range, | ||
156 | TokenTextRange::Delimiter(it) => { | ||
157 | let open = TextRange::at(it.start(), 1.into()); | ||
158 | let close = TextRange::at(it.end() - TextSize::of('}'), 1.into()); | ||
159 | open == relative_range || close == relative_range | ||
160 | } | ||
161 | })?; | ||
162 | Some(token_id) | ||
163 | } | ||
164 | |||
165 | pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TokenTextRange> { | ||
166 | let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; | ||
167 | Some(range) | ||
168 | } | ||
169 | |||
170 | fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { | ||
171 | self.entries.push((token_id, TokenTextRange::Token(relative_range))); | ||
172 | } | ||
173 | |||
174 | fn insert_delim( | ||
175 | &mut self, | ||
176 | token_id: tt::TokenId, | ||
177 | open_relative_range: TextRange, | ||
178 | close_relative_range: TextRange, | ||
179 | ) -> usize { | ||
180 | let res = self.entries.len(); | ||
181 | let cover = open_relative_range.cover(close_relative_range); | ||
182 | |||
183 | self.entries.push((token_id, TokenTextRange::Delimiter(cover))); | ||
184 | res | ||
185 | } | ||
186 | |||
187 | fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { | ||
188 | let (_, token_text_range) = &mut self.entries[idx]; | ||
189 | if let TokenTextRange::Delimiter(dim) = token_text_range { | ||
190 | let cover = dim.cover(close_relative_range); | ||
191 | *token_text_range = TokenTextRange::Delimiter(cover); | ||
192 | } | ||
193 | } | ||
194 | |||
195 | fn remove_delim(&mut self, idx: usize) { | ||
196 | // FIXME: This could be accidentally quadratic | ||
197 | self.entries.remove(idx); | ||
198 | } | ||
199 | } | ||
200 | |||
201 | /// Returns the textual content of a doc comment block as a quoted string | 124 | /// Returns the textual content of a doc comment block as a quoted string |
202 | /// That is, strips leading `///` (or `/**`, etc) | 125 | /// That is, strips leading `///` (or `/**`, etc) |
203 | /// and strips the ending `*/` | 126 | /// and strips the ending `*/` |
@@ -634,7 +557,7 @@ impl<'a> TtTreeSink<'a> { | |||
634 | } | 557 | } |
635 | 558 | ||
636 | fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) { | 559 | fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) { |
637 | self.token_map.entries.shrink_to_fit(); | 560 | self.token_map.shrink_to_fit(); |
638 | (self.inner.finish(), self.token_map) | 561 | (self.inner.finish(), self.token_map) |
639 | } | 562 | } |
640 | } | 563 | } |
diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs new file mode 100644 index 000000000..58c9f5aa5 --- /dev/null +++ b/crates/mbe/src/token_map.rs | |||
@@ -0,0 +1,83 @@ | |||
1 | use parser::{SyntaxKind, T}; | ||
2 | use syntax::{TextRange, TextSize}; | ||
3 | |||
4 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | ||
5 | pub enum TokenTextRange { | ||
6 | Token(TextRange), | ||
7 | Delimiter(TextRange), | ||
8 | } | ||
9 | |||
10 | impl TokenTextRange { | ||
11 | pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> { | ||
12 | match self { | ||
13 | TokenTextRange::Token(it) => Some(it), | ||
14 | TokenTextRange::Delimiter(it) => match kind { | ||
15 | T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())), | ||
16 | T!['}'] | T![')'] | T![']'] => { | ||
17 | Some(TextRange::at(it.end() - TextSize::of('}'), 1.into())) | ||
18 | } | ||
19 | _ => None, | ||
20 | }, | ||
21 | } | ||
22 | } | ||
23 | } | ||
24 | |||
25 | /// Maps `tt::TokenId` to the relative range of the original token. | ||
26 | #[derive(Debug, PartialEq, Eq, Clone, Default)] | ||
27 | pub struct TokenMap { | ||
28 | /// Maps `tt::TokenId` to the *relative* source range. | ||
29 | entries: Vec<(tt::TokenId, TokenTextRange)>, | ||
30 | } | ||
31 | |||
32 | impl TokenMap { | ||
33 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { | ||
34 | let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { | ||
35 | TokenTextRange::Token(it) => *it == relative_range, | ||
36 | TokenTextRange::Delimiter(it) => { | ||
37 | let open = TextRange::at(it.start(), 1.into()); | ||
38 | let close = TextRange::at(it.end() - TextSize::of('}'), 1.into()); | ||
39 | open == relative_range || close == relative_range | ||
40 | } | ||
41 | })?; | ||
42 | Some(token_id) | ||
43 | } | ||
44 | |||
45 | pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TokenTextRange> { | ||
46 | let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; | ||
47 | Some(range) | ||
48 | } | ||
49 | |||
50 | pub(crate) fn shrink_to_fit(&mut self) { | ||
51 | self.entries.shrink_to_fit(); | ||
52 | } | ||
53 | |||
54 | pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { | ||
55 | self.entries.push((token_id, TokenTextRange::Token(relative_range))); | ||
56 | } | ||
57 | |||
58 | pub(crate) fn insert_delim( | ||
59 | &mut self, | ||
60 | token_id: tt::TokenId, | ||
61 | open_relative_range: TextRange, | ||
62 | close_relative_range: TextRange, | ||
63 | ) -> usize { | ||
64 | let res = self.entries.len(); | ||
65 | let cover = open_relative_range.cover(close_relative_range); | ||
66 | |||
67 | self.entries.push((token_id, TokenTextRange::Delimiter(cover))); | ||
68 | res | ||
69 | } | ||
70 | |||
71 | pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { | ||
72 | let (_, token_text_range) = &mut self.entries[idx]; | ||
73 | if let TokenTextRange::Delimiter(dim) = token_text_range { | ||
74 | let cover = dim.cover(close_relative_range); | ||
75 | *token_text_range = TokenTextRange::Delimiter(cover); | ||
76 | } | ||
77 | } | ||
78 | |||
79 | pub(crate) fn remove_delim(&mut self, idx: usize) { | ||
80 | // FIXME: This could be accidentally quadratic | ||
81 | self.entries.remove(idx); | ||
82 | } | ||
83 | } | ||