diff options
author | Edwin Cheng <[email protected]> | 2021-01-19 11:26:53 +0000 |
---|---|---|
committer | Edwin Cheng <[email protected]> | 2021-01-19 11:26:53 +0000 |
commit | 7b3e960d9465d6ea4bc051336456ad285779b8f5 (patch) | |
tree | cdedef9efe798bce9e997bf96863c862cd2afb93 /crates/mbe/src/syntax_bridge.rs | |
parent | 8935106b395edf3a923b1be6f039a115a9f67144 (diff) |
Reduce TokenMap size
Diffstat (limited to 'crates/mbe/src/syntax_bridge.rs')
-rw-r--r-- | crates/mbe/src/syntax_bridge.rs | 26 |
1 files changed, 16 insertions, 10 deletions
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 2a41d8167..51002e7b8 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs | |||
@@ -16,16 +16,18 @@ use crate::ExpandError; | |||
16 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | 16 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] |
17 | pub enum TokenTextRange { | 17 | pub enum TokenTextRange { |
18 | Token(TextRange), | 18 | Token(TextRange), |
19 | Delimiter(TextRange, TextRange), | 19 | Delimiter(TextRange), |
20 | } | 20 | } |
21 | 21 | ||
22 | impl TokenTextRange { | 22 | impl TokenTextRange { |
23 | pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> { | 23 | pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> { |
24 | match self { | 24 | match self { |
25 | TokenTextRange::Token(it) => Some(it), | 25 | TokenTextRange::Token(it) => Some(it), |
26 | TokenTextRange::Delimiter(open, close) => match kind { | 26 | TokenTextRange::Delimiter(it) => match kind { |
27 | T!['{'] | T!['('] | T!['['] => Some(open), | 27 | T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())), |
28 | T!['}'] | T![')'] | T![']'] => Some(close), | 28 | T!['}'] | T![')'] | T![']'] => { |
29 | Some(TextRange::at(it.end() - TextSize::of('}'), 1.into())) | ||
30 | } | ||
29 | _ => None, | 31 | _ => None, |
30 | }, | 32 | }, |
31 | } | 33 | } |
@@ -114,8 +116,10 @@ impl TokenMap { | |||
114 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { | 116 | pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { |
115 | let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { | 117 | let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { |
116 | TokenTextRange::Token(it) => *it == relative_range, | 118 | TokenTextRange::Token(it) => *it == relative_range, |
117 | TokenTextRange::Delimiter(open, close) => { | 119 | TokenTextRange::Delimiter(it) => { |
118 | *open == relative_range || *close == relative_range | 120 | let open = TextRange::at(it.start(), 1.into()); |
121 | let close = TextRange::at(it.end() - TextSize::of('}'), 1.into()); | ||
122 | open == relative_range || close == relative_range | ||
119 | } | 123 | } |
120 | })?; | 124 | })?; |
121 | Some(token_id) | 125 | Some(token_id) |
@@ -137,15 +141,17 @@ impl TokenMap { | |||
137 | close_relative_range: TextRange, | 141 | close_relative_range: TextRange, |
138 | ) -> usize { | 142 | ) -> usize { |
139 | let res = self.entries.len(); | 143 | let res = self.entries.len(); |
140 | self.entries | 144 | let cover = open_relative_range.cover(close_relative_range); |
141 | .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range))); | 145 | |
146 | self.entries.push((token_id, TokenTextRange::Delimiter(cover))); | ||
142 | res | 147 | res |
143 | } | 148 | } |
144 | 149 | ||
145 | fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { | 150 | fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { |
146 | let (_, token_text_range) = &mut self.entries[idx]; | 151 | let (_, token_text_range) = &mut self.entries[idx]; |
147 | if let TokenTextRange::Delimiter(dim, _) = token_text_range { | 152 | if let TokenTextRange::Delimiter(dim) = token_text_range { |
148 | *token_text_range = TokenTextRange::Delimiter(*dim, close_relative_range); | 153 | let cover = dim.cover(close_relative_range); |
154 | *token_text_range = TokenTextRange::Delimiter(cover); | ||
149 | } | 155 | } |
150 | } | 156 | } |
151 | 157 | ||