diff options
author | Jonas Schievink <[email protected]> | 2021-06-21 21:57:54 +0100 |
---|---|---|
committer | Jonas Schievink <[email protected]> | 2021-06-22 23:19:54 +0100 |
commit | 6504c3c32a9795173fc0c9c94befe7f5d0e7fe9e (patch) | |
tree | 4738998b71657a0f39f5be4724b0e34a81ea4138 /crates/mbe/src/syntax_bridge.rs | |
parent | 38da41ea6e58255223686104b3fbca27392d8162 (diff) |
Move subtree collection out of `TokenConvertor`
Diffstat (limited to 'crates/mbe/src/syntax_bridge.rs')
-rw-r--r-- | crates/mbe/src/syntax_bridge.rs | 248 |
1 files changed, 124 insertions, 124 deletions
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 7526bd8e6..adf5a56ec 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs | |||
@@ -24,7 +24,7 @@ pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> (tt::Subtree, TokenMap) { | |||
24 | pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { | 24 | pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { |
25 | let global_offset = node.text_range().start(); | 25 | let global_offset = node.text_range().start(); |
26 | let mut c = Convertor::new(node, global_offset); | 26 | let mut c = Convertor::new(node, global_offset); |
27 | let subtree = c.go(); | 27 | let subtree = convert_tokens(&mut c); |
28 | c.id_alloc.map.shrink_to_fit(); | 28 | c.id_alloc.map.shrink_to_fit(); |
29 | (subtree, c.id_alloc.map) | 29 | (subtree, c.id_alloc.map) |
30 | } | 30 | } |
@@ -80,7 +80,7 @@ pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { | |||
80 | }, | 80 | }, |
81 | }; | 81 | }; |
82 | 82 | ||
83 | let subtree = conv.go(); | 83 | let subtree = convert_tokens(&mut conv); |
84 | Some((subtree, conv.id_alloc.map)) | 84 | Some((subtree, conv.id_alloc.map)) |
85 | } | 85 | } |
86 | 86 | ||
@@ -121,6 +121,128 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> { | |||
121 | res | 121 | res |
122 | } | 122 | } |
123 | 123 | ||
124 | fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree { | ||
125 | let mut subtree = tt::Subtree { delimiter: None, ..Default::default() }; | ||
126 | while conv.peek().is_some() { | ||
127 | collect_leaf(conv, &mut subtree.token_trees); | ||
128 | } | ||
129 | if subtree.token_trees.len() == 1 { | ||
130 | if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] { | ||
131 | return first.clone(); | ||
132 | } | ||
133 | } | ||
134 | return subtree; | ||
135 | |||
136 | fn collect_leaf<C: TokenConvertor>(conv: &mut C, result: &mut Vec<tt::TokenTree>) { | ||
137 | let (token, range) = match conv.bump() { | ||
138 | None => return, | ||
139 | Some(it) => it, | ||
140 | }; | ||
141 | |||
142 | let k: SyntaxKind = token.kind(); | ||
143 | if k == COMMENT { | ||
144 | if let Some(tokens) = conv.convert_doc_comment(&token) { | ||
145 | result.extend(tokens); | ||
146 | } | ||
147 | return; | ||
148 | } | ||
149 | |||
150 | result.push(if k.is_punct() && k != UNDERSCORE { | ||
151 | assert_eq!(range.len(), TextSize::of('.')); | ||
152 | let delim = match k { | ||
153 | T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])), | ||
154 | T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])), | ||
155 | T!['['] => Some((tt::DelimiterKind::Bracket, T![']'])), | ||
156 | _ => None, | ||
157 | }; | ||
158 | |||
159 | if let Some((kind, closed)) = delim { | ||
160 | let mut subtree = tt::Subtree::default(); | ||
161 | let (id, idx) = conv.id_alloc().open_delim(range); | ||
162 | subtree.delimiter = Some(tt::Delimiter { id, kind }); | ||
163 | |||
164 | while conv.peek().map_or(false, |it| it.kind() != closed) { | ||
165 | collect_leaf(conv, &mut subtree.token_trees); | ||
166 | } | ||
167 | let last_range = match conv.bump() { | ||
168 | None => { | ||
169 | // For error resilience, we insert an char punct for the opening delim here | ||
170 | conv.id_alloc().close_delim(idx, None); | ||
171 | let leaf: tt::Leaf = tt::Punct { | ||
172 | id: conv.id_alloc().alloc(range), | ||
173 | char: token.to_char().unwrap(), | ||
174 | spacing: tt::Spacing::Alone, | ||
175 | } | ||
176 | .into(); | ||
177 | result.push(leaf.into()); | ||
178 | result.extend(subtree.token_trees); | ||
179 | return; | ||
180 | } | ||
181 | Some(it) => it.1, | ||
182 | }; | ||
183 | conv.id_alloc().close_delim(idx, Some(last_range)); | ||
184 | subtree.into() | ||
185 | } else { | ||
186 | let spacing = match conv.peek() { | ||
187 | Some(next) | ||
188 | if next.kind().is_trivia() | ||
189 | || next.kind() == T!['['] | ||
190 | || next.kind() == T!['{'] | ||
191 | || next.kind() == T!['('] => | ||
192 | { | ||
193 | tt::Spacing::Alone | ||
194 | } | ||
195 | Some(next) if next.kind().is_punct() && next.kind() != UNDERSCORE => { | ||
196 | tt::Spacing::Joint | ||
197 | } | ||
198 | _ => tt::Spacing::Alone, | ||
199 | }; | ||
200 | let char = match token.to_char() { | ||
201 | Some(c) => c, | ||
202 | None => { | ||
203 | panic!("Token from lexer must be single char: token = {:#?}", token); | ||
204 | } | ||
205 | }; | ||
206 | tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range) }).into() | ||
207 | } | ||
208 | } else { | ||
209 | macro_rules! make_leaf { | ||
210 | ($i:ident) => { | ||
211 | tt::$i { id: conv.id_alloc().alloc(range), text: token.to_text() }.into() | ||
212 | }; | ||
213 | } | ||
214 | let leaf: tt::Leaf = match k { | ||
215 | T![true] | T![false] => make_leaf!(Ident), | ||
216 | IDENT => make_leaf!(Ident), | ||
217 | UNDERSCORE => make_leaf!(Ident), | ||
218 | k if k.is_keyword() => make_leaf!(Ident), | ||
219 | k if k.is_literal() => make_leaf!(Literal), | ||
220 | LIFETIME_IDENT => { | ||
221 | let char_unit = TextSize::of('\''); | ||
222 | let r = TextRange::at(range.start(), char_unit); | ||
223 | let apostrophe = tt::Leaf::from(tt::Punct { | ||
224 | char: '\'', | ||
225 | spacing: tt::Spacing::Joint, | ||
226 | id: conv.id_alloc().alloc(r), | ||
227 | }); | ||
228 | result.push(apostrophe.into()); | ||
229 | |||
230 | let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); | ||
231 | let ident = tt::Leaf::from(tt::Ident { | ||
232 | text: SmolStr::new(&token.to_text()[1..]), | ||
233 | id: conv.id_alloc().alloc(r), | ||
234 | }); | ||
235 | result.push(ident.into()); | ||
236 | return; | ||
237 | } | ||
238 | _ => return, | ||
239 | }; | ||
240 | |||
241 | leaf.into() | ||
242 | }); | ||
243 | } | ||
244 | } | ||
245 | |||
124 | /// Returns the textual content of a doc comment block as a quoted string | 246 | /// Returns the textual content of a doc comment block as a quoted string |
125 | /// That is, strips leading `///` (or `/**`, etc) | 247 | /// That is, strips leading `///` (or `/**`, etc) |
126 | /// and strips the ending `*/` | 248 | /// and strips the ending `*/` |
@@ -242,128 +364,6 @@ trait SrcToken: std::fmt::Debug { | |||
242 | trait TokenConvertor { | 364 | trait TokenConvertor { |
243 | type Token: SrcToken; | 365 | type Token: SrcToken; |
244 | 366 | ||
245 | fn go(&mut self) -> tt::Subtree { | ||
246 | let mut subtree = tt::Subtree { delimiter: None, ..Default::default() }; | ||
247 | while self.peek().is_some() { | ||
248 | self.collect_leaf(&mut subtree.token_trees); | ||
249 | } | ||
250 | if subtree.token_trees.len() == 1 { | ||
251 | if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] { | ||
252 | return first.clone(); | ||
253 | } | ||
254 | } | ||
255 | subtree | ||
256 | } | ||
257 | |||
258 | fn collect_leaf(&mut self, result: &mut Vec<tt::TokenTree>) { | ||
259 | let (token, range) = match self.bump() { | ||
260 | None => return, | ||
261 | Some(it) => it, | ||
262 | }; | ||
263 | |||
264 | let k: SyntaxKind = token.kind(); | ||
265 | if k == COMMENT { | ||
266 | if let Some(tokens) = self.convert_doc_comment(&token) { | ||
267 | result.extend(tokens); | ||
268 | } | ||
269 | return; | ||
270 | } | ||
271 | |||
272 | result.push(if k.is_punct() && k != UNDERSCORE { | ||
273 | assert_eq!(range.len(), TextSize::of('.')); | ||
274 | let delim = match k { | ||
275 | T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])), | ||
276 | T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])), | ||
277 | T!['['] => Some((tt::DelimiterKind::Bracket, T![']'])), | ||
278 | _ => None, | ||
279 | }; | ||
280 | |||
281 | if let Some((kind, closed)) = delim { | ||
282 | let mut subtree = tt::Subtree::default(); | ||
283 | let (id, idx) = self.id_alloc().open_delim(range); | ||
284 | subtree.delimiter = Some(tt::Delimiter { id, kind }); | ||
285 | |||
286 | while self.peek().map_or(false, |it| it.kind() != closed) { | ||
287 | self.collect_leaf(&mut subtree.token_trees); | ||
288 | } | ||
289 | let last_range = match self.bump() { | ||
290 | None => { | ||
291 | // For error resilience, we insert an char punct for the opening delim here | ||
292 | self.id_alloc().close_delim(idx, None); | ||
293 | let leaf: tt::Leaf = tt::Punct { | ||
294 | id: self.id_alloc().alloc(range), | ||
295 | char: token.to_char().unwrap(), | ||
296 | spacing: tt::Spacing::Alone, | ||
297 | } | ||
298 | .into(); | ||
299 | result.push(leaf.into()); | ||
300 | result.extend(subtree.token_trees); | ||
301 | return; | ||
302 | } | ||
303 | Some(it) => it.1, | ||
304 | }; | ||
305 | self.id_alloc().close_delim(idx, Some(last_range)); | ||
306 | subtree.into() | ||
307 | } else { | ||
308 | let spacing = match self.peek() { | ||
309 | Some(next) | ||
310 | if next.kind().is_trivia() | ||
311 | || next.kind() == T!['['] | ||
312 | || next.kind() == T!['{'] | ||
313 | || next.kind() == T!['('] => | ||
314 | { | ||
315 | tt::Spacing::Alone | ||
316 | } | ||
317 | Some(next) if next.kind().is_punct() && next.kind() != UNDERSCORE => { | ||
318 | tt::Spacing::Joint | ||
319 | } | ||
320 | _ => tt::Spacing::Alone, | ||
321 | }; | ||
322 | let char = match token.to_char() { | ||
323 | Some(c) => c, | ||
324 | None => { | ||
325 | panic!("Token from lexer must be single char: token = {:#?}", token); | ||
326 | } | ||
327 | }; | ||
328 | tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into() | ||
329 | } | ||
330 | } else { | ||
331 | macro_rules! make_leaf { | ||
332 | ($i:ident) => { | ||
333 | tt::$i { id: self.id_alloc().alloc(range), text: token.to_text() }.into() | ||
334 | }; | ||
335 | } | ||
336 | let leaf: tt::Leaf = match k { | ||
337 | T![true] | T![false] => make_leaf!(Ident), | ||
338 | IDENT => make_leaf!(Ident), | ||
339 | UNDERSCORE => make_leaf!(Ident), | ||
340 | k if k.is_keyword() => make_leaf!(Ident), | ||
341 | k if k.is_literal() => make_leaf!(Literal), | ||
342 | LIFETIME_IDENT => { | ||
343 | let char_unit = TextSize::of('\''); | ||
344 | let r = TextRange::at(range.start(), char_unit); | ||
345 | let apostrophe = tt::Leaf::from(tt::Punct { | ||
346 | char: '\'', | ||
347 | spacing: tt::Spacing::Joint, | ||
348 | id: self.id_alloc().alloc(r), | ||
349 | }); | ||
350 | result.push(apostrophe.into()); | ||
351 | |||
352 | let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); | ||
353 | let ident = tt::Leaf::from(tt::Ident { | ||
354 | text: SmolStr::new(&token.to_text()[1..]), | ||
355 | id: self.id_alloc().alloc(r), | ||
356 | }); | ||
357 | result.push(ident.into()); | ||
358 | return; | ||
359 | } | ||
360 | _ => return, | ||
361 | }; | ||
362 | |||
363 | leaf.into() | ||
364 | }); | ||
365 | } | ||
366 | |||
367 | fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>; | 367 | fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>; |
368 | 368 | ||
369 | fn bump(&mut self) -> Option<(Self::Token, TextRange)>; | 369 | fn bump(&mut self) -> Option<(Self::Token, TextRange)>; |