diff options
Diffstat (limited to 'crates/ra_mbe')
-rw-r--r-- | crates/ra_mbe/src/mbe_expander/matcher.rs | 2 | ||||
-rw-r--r-- | crates/ra_mbe/src/subtree_source.rs | 8 | ||||
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 144 | ||||
-rw-r--r-- | crates/ra_mbe/src/tests.rs | 16 |
4 files changed, 113 insertions, 57 deletions
diff --git a/crates/ra_mbe/src/mbe_expander/matcher.rs b/crates/ra_mbe/src/mbe_expander/matcher.rs index 3f5136478..c67ae4110 100644 --- a/crates/ra_mbe/src/mbe_expander/matcher.rs +++ b/crates/ra_mbe/src/mbe_expander/matcher.rs | |||
@@ -106,7 +106,7 @@ fn match_subtree( | |||
106 | } | 106 | } |
107 | Op::TokenTree(tt::TokenTree::Subtree(lhs)) => { | 107 | Op::TokenTree(tt::TokenTree::Subtree(lhs)) => { |
108 | let rhs = src.expect_subtree().map_err(|()| err!("expected subtree"))?; | 108 | let rhs = src.expect_subtree().map_err(|()| err!("expected subtree"))?; |
109 | if lhs.delimiter != rhs.delimiter { | 109 | if lhs.delimiter.map(|it| it.kind) != rhs.delimiter.map(|it| it.kind) { |
110 | bail!("mismatched delimiter") | 110 | bail!("mismatched delimiter") |
111 | } | 111 | } |
112 | let mut src = TtIter::new(rhs); | 112 | let mut src = TtIter::new(rhs); |
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index 061e9f20b..5a03a372a 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs | |||
@@ -115,10 +115,10 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> { | |||
115 | } | 115 | } |
116 | 116 | ||
117 | fn convert_delim(d: Option<tt::Delimiter>, closing: bool) -> TtToken { | 117 | fn convert_delim(d: Option<tt::Delimiter>, closing: bool) -> TtToken { |
118 | let (kinds, texts) = match d { | 118 | let (kinds, texts) = match d.map(|it| it.kind) { |
119 | Some(tt::Delimiter::Parenthesis) => ([T!['('], T![')']], "()"), | 119 | Some(tt::DelimiterKind::Parenthesis) => ([T!['('], T![')']], "()"), |
120 | Some(tt::Delimiter::Brace) => ([T!['{'], T!['}']], "{}"), | 120 | Some(tt::DelimiterKind::Brace) => ([T!['{'], T!['}']], "{}"), |
121 | Some(tt::Delimiter::Bracket) => ([T!['['], T![']']], "[]"), | 121 | Some(tt::DelimiterKind::Bracket) => ([T!['['], T![']']], "[]"), |
122 | None => ([L_DOLLAR, R_DOLLAR], ""), | 122 | None => ([L_DOLLAR, R_DOLLAR], ""), |
123 | }; | 123 | }; |
124 | 124 | ||
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 8f65ff125..a85bb058b 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -5,6 +5,7 @@ use ra_syntax::{ | |||
5 | ast, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode, | 5 | ast, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode, |
6 | SyntaxTreeBuilder, TextRange, TextUnit, T, | 6 | SyntaxTreeBuilder, TextRange, TextUnit, T, |
7 | }; | 7 | }; |
8 | use rustc_hash::FxHashMap; | ||
8 | use std::iter::successors; | 9 | use std::iter::successors; |
9 | use tt::buffer::{Cursor, TokenBuffer}; | 10 | use tt::buffer::{Cursor, TokenBuffer}; |
10 | 11 | ||
@@ -83,6 +84,15 @@ impl TokenMap { | |||
83 | fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { | 84 | fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { |
84 | self.entries.push((token_id, relative_range)); | 85 | self.entries.push((token_id, relative_range)); |
85 | } | 86 | } |
87 | |||
88 | fn insert_delim( | ||
89 | &mut self, | ||
90 | _token_id: tt::TokenId, | ||
91 | _open_relative_range: TextRange, | ||
92 | _close_relative_range: TextRange, | ||
93 | ) { | ||
94 | // FIXME: Add entries for delimiter | ||
95 | } | ||
86 | } | 96 | } |
87 | 97 | ||
88 | /// Returns the textual content of a doc comment block as a quoted string | 98 | /// Returns the textual content of a doc comment block as a quoted string |
@@ -121,7 +131,10 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr | |||
121 | token_trees.push(mk_punct('!')); | 131 | token_trees.push(mk_punct('!')); |
122 | } | 132 | } |
123 | token_trees.push(tt::TokenTree::from(tt::Subtree { | 133 | token_trees.push(tt::TokenTree::from(tt::Subtree { |
124 | delimiter: Some(tt::Delimiter::Bracket), | 134 | delimiter: Some(tt::Delimiter { |
135 | kind: tt::DelimiterKind::Bracket, | ||
136 | id: tt::TokenId::unspecified(), | ||
137 | }), | ||
125 | token_trees: meta_tkns, | 138 | token_trees: meta_tkns, |
126 | })); | 139 | })); |
127 | 140 | ||
@@ -190,12 +203,16 @@ impl Convertor { | |||
190 | .last() | 203 | .last() |
191 | .unwrap(); | 204 | .unwrap(); |
192 | 205 | ||
193 | let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) { | 206 | let (delimiter_kind, skip_first) = match (first_child.kind(), last_child.kind()) { |
194 | (T!['('], T![')']) => (Some(tt::Delimiter::Parenthesis), true), | 207 | (T!['('], T![')']) => (Some(tt::DelimiterKind::Parenthesis), true), |
195 | (T!['{'], T!['}']) => (Some(tt::Delimiter::Brace), true), | 208 | (T!['{'], T!['}']) => (Some(tt::DelimiterKind::Brace), true), |
196 | (T!['['], T![']']) => (Some(tt::Delimiter::Bracket), true), | 209 | (T!['['], T![']']) => (Some(tt::DelimiterKind::Bracket), true), |
197 | _ => (None, false), | 210 | _ => (None, false), |
198 | }; | 211 | }; |
212 | let delimiter = delimiter_kind.map(|kind| tt::Delimiter { | ||
213 | kind, | ||
214 | id: self.alloc_delim(first_child.text_range(), last_child.text_range()), | ||
215 | }); | ||
199 | 216 | ||
200 | let mut token_trees = Vec::new(); | 217 | let mut token_trees = Vec::new(); |
201 | let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable(); | 218 | let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable(); |
@@ -232,25 +249,31 @@ impl Convertor { | |||
232 | .push(tt::Leaf::from(tt::Punct { char, spacing, id }).into()); | 249 | .push(tt::Leaf::from(tt::Punct { char, spacing, id }).into()); |
233 | } | 250 | } |
234 | } else { | 251 | } else { |
235 | let child: tt::TokenTree = if token.kind() == T![true] | 252 | let child: tt::Leaf = match token.kind() { |
236 | || token.kind() == T![false] | 253 | T![true] | T![false] => { |
237 | { | 254 | let id = self.alloc(token.text_range()); |
238 | let id = self.alloc(token.text_range()); | 255 | let text = token.text().clone(); |
239 | tt::Leaf::from(tt::Literal { text: token.text().clone(), id }).into() | 256 | tt::Literal { text, id }.into() |
240 | } else if token.kind().is_keyword() | 257 | } |
241 | || token.kind() == IDENT | 258 | IDENT | LIFETIME => { |
242 | || token.kind() == LIFETIME | 259 | let id = self.alloc(token.text_range()); |
243 | { | 260 | let text = token.text().clone(); |
244 | let id = self.alloc(token.text_range()); | 261 | tt::Ident { text, id }.into() |
245 | let text = token.text().clone(); | 262 | } |
246 | tt::Leaf::from(tt::Ident { text, id }).into() | 263 | k if k.is_keyword() => { |
247 | } else if token.kind().is_literal() { | 264 | let id = self.alloc(token.text_range()); |
248 | let id = self.alloc(token.text_range()); | 265 | let text = token.text().clone(); |
249 | tt::Leaf::from(tt::Literal { text: token.text().clone(), id }).into() | 266 | tt::Ident { text, id }.into() |
250 | } else { | 267 | } |
251 | return None; | 268 | k if k.is_literal() => { |
269 | let id = self.alloc(token.text_range()); | ||
270 | let text = token.text().clone(); | ||
271 | tt::Literal { text, id }.into() | ||
272 | } | ||
273 | _ => return None, | ||
252 | }; | 274 | }; |
253 | token_trees.push(child); | 275 | |
276 | token_trees.push(child.into()); | ||
254 | } | 277 | } |
255 | } | 278 | } |
256 | NodeOrToken::Node(node) => { | 279 | NodeOrToken::Node(node) => { |
@@ -275,11 +298,26 @@ impl Convertor { | |||
275 | self.map.insert(token_id, relative_range); | 298 | self.map.insert(token_id, relative_range); |
276 | token_id | 299 | token_id |
277 | } | 300 | } |
301 | |||
302 | fn alloc_delim( | ||
303 | &mut self, | ||
304 | open_abs_range: TextRange, | ||
305 | close_abs_range: TextRange, | ||
306 | ) -> tt::TokenId { | ||
307 | let open_relative_range = open_abs_range - self.global_offset; | ||
308 | let close_relative_range = close_abs_range - self.global_offset; | ||
309 | let token_id = tt::TokenId(self.next_id); | ||
310 | self.next_id += 1; | ||
311 | |||
312 | self.map.insert_delim(token_id, open_relative_range, close_relative_range); | ||
313 | token_id | ||
314 | } | ||
278 | } | 315 | } |
279 | 316 | ||
280 | struct TtTreeSink<'a> { | 317 | struct TtTreeSink<'a> { |
281 | buf: String, | 318 | buf: String, |
282 | cursor: Cursor<'a>, | 319 | cursor: Cursor<'a>, |
320 | open_delims: FxHashMap<tt::TokenId, TextUnit>, | ||
283 | text_pos: TextUnit, | 321 | text_pos: TextUnit, |
284 | inner: SyntaxTreeBuilder, | 322 | inner: SyntaxTreeBuilder, |
285 | token_map: TokenMap, | 323 | token_map: TokenMap, |
@@ -294,6 +332,7 @@ impl<'a> TtTreeSink<'a> { | |||
294 | TtTreeSink { | 332 | TtTreeSink { |
295 | buf: String::new(), | 333 | buf: String::new(), |
296 | cursor, | 334 | cursor, |
335 | open_delims: FxHashMap::default(), | ||
297 | text_pos: 0.into(), | 336 | text_pos: 0.into(), |
298 | inner: SyntaxTreeBuilder::default(), | 337 | inner: SyntaxTreeBuilder::default(), |
299 | roots: smallvec::SmallVec::new(), | 338 | roots: smallvec::SmallVec::new(), |
@@ -307,10 +346,10 @@ impl<'a> TtTreeSink<'a> { | |||
307 | } | 346 | } |
308 | 347 | ||
309 | fn delim_to_str(d: Option<tt::Delimiter>, closing: bool) -> SmolStr { | 348 | fn delim_to_str(d: Option<tt::Delimiter>, closing: bool) -> SmolStr { |
310 | let texts = match d { | 349 | let texts = match d.map(|it| it.kind) { |
311 | Some(tt::Delimiter::Parenthesis) => "()", | 350 | Some(tt::DelimiterKind::Parenthesis) => "()", |
312 | Some(tt::Delimiter::Brace) => "{}", | 351 | Some(tt::DelimiterKind::Brace) => "{}", |
313 | Some(tt::Delimiter::Bracket) => "[]", | 352 | Some(tt::DelimiterKind::Bracket) => "[]", |
314 | None => return "".into(), | 353 | None => return "".into(), |
315 | }; | 354 | }; |
316 | 355 | ||
@@ -331,34 +370,49 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
331 | break; | 370 | break; |
332 | } | 371 | } |
333 | 372 | ||
334 | match self.cursor.token_tree() { | 373 | let text: Option<SmolStr> = match self.cursor.token_tree() { |
335 | Some(tt::TokenTree::Leaf(leaf)) => { | 374 | Some(tt::TokenTree::Leaf(leaf)) => { |
336 | // Mark the range if needed | 375 | // Mark the range if needed |
337 | if let tt::Leaf::Ident(ident) = leaf { | 376 | let id = match leaf { |
338 | if kind == IDENT { | 377 | tt::Leaf::Ident(ident) => ident.id, |
339 | let range = | 378 | tt::Leaf::Punct(punct) => punct.id, |
340 | TextRange::offset_len(self.text_pos, TextUnit::of_str(&ident.text)); | 379 | tt::Leaf::Literal(lit) => lit.id, |
341 | self.token_map.insert(ident.id, range); | 380 | }; |
342 | } | 381 | let text = SmolStr::new(format!("{}", leaf)); |
343 | } | 382 | let range = TextRange::offset_len(self.text_pos, TextUnit::of_str(&text)); |
344 | 383 | self.token_map.insert(id, range); | |
345 | self.cursor = self.cursor.bump(); | 384 | self.cursor = self.cursor.bump(); |
346 | self.buf += &format!("{}", leaf); | 385 | Some(text) |
347 | } | 386 | } |
348 | Some(tt::TokenTree::Subtree(subtree)) => { | 387 | Some(tt::TokenTree::Subtree(subtree)) => { |
349 | self.cursor = self.cursor.subtree().unwrap(); | 388 | self.cursor = self.cursor.subtree().unwrap(); |
350 | self.buf += &delim_to_str(subtree.delimiter, false); | 389 | if let Some(id) = subtree.delimiter.map(|it| it.id) { |
351 | } | 390 | self.open_delims.insert(id, self.text_pos); |
352 | None => { | ||
353 | if let Some(parent) = self.cursor.end() { | ||
354 | self.cursor = self.cursor.bump(); | ||
355 | self.buf += &delim_to_str(parent.delimiter, true); | ||
356 | } | 391 | } |
392 | Some(delim_to_str(subtree.delimiter, false)) | ||
357 | } | 393 | } |
394 | None => self.cursor.end().and_then(|parent| { | ||
395 | self.cursor = self.cursor.bump(); | ||
396 | if let Some(id) = parent.delimiter.map(|it| it.id) { | ||
397 | if let Some(open_delim) = self.open_delims.get(&id) { | ||
398 | let open_range = | ||
399 | TextRange::offset_len(*open_delim, TextUnit::from_usize(1)); | ||
400 | let close_range = | ||
401 | TextRange::offset_len(self.text_pos, TextUnit::from_usize(1)); | ||
402 | self.token_map.insert_delim(id, open_range, close_range); | ||
403 | } | ||
404 | } | ||
405 | |||
406 | Some(delim_to_str(parent.delimiter, true)) | ||
407 | }), | ||
358 | }; | 408 | }; |
409 | |||
410 | if let Some(text) = text { | ||
411 | self.buf += &text; | ||
412 | self.text_pos += TextUnit::of_str(&text); | ||
413 | } | ||
359 | } | 414 | } |
360 | 415 | ||
361 | self.text_pos += TextUnit::of_str(&self.buf); | ||
362 | let text = SmolStr::new(self.buf.as_str()); | 416 | let text = SmolStr::new(self.buf.as_str()); |
363 | self.buf.clear(); | 417 | self.buf.clear(); |
364 | self.inner.token(kind, text); | 418 | self.inner.token(kind, text); |
@@ -504,7 +558,7 @@ mod tests { | |||
504 | let token_tree = ast::TokenTree::cast(token_tree).unwrap(); | 558 | let token_tree = ast::TokenTree::cast(token_tree).unwrap(); |
505 | let tt = ast_to_token_tree(&token_tree).unwrap().0; | 559 | let tt = ast_to_token_tree(&token_tree).unwrap().0; |
506 | 560 | ||
507 | assert_eq!(tt.delimiter, Some(tt::Delimiter::Brace)); | 561 | assert_eq!(tt.delimiter.map(|it| it.kind), Some(tt::DelimiterKind::Brace)); |
508 | } | 562 | } |
509 | 563 | ||
510 | #[test] | 564 | #[test] |
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs index 70e65bc74..6bcfedcac 100644 --- a/crates/ra_mbe/src/tests.rs +++ b/crates/ra_mbe/src/tests.rs | |||
@@ -77,13 +77,15 @@ macro_rules! foobar { | |||
77 | } | 77 | } |
78 | 78 | ||
79 | assert_eq!(expansion.token_trees.len(), 3); | 79 | assert_eq!(expansion.token_trees.len(), 3); |
80 | // ($e:ident) => { foo bar $e } | 80 | // {($e:ident) => { foo bar $e }} |
81 | // 0123 45 6 7 89 | 81 | // 012345 67 8 9 T 12 |
82 | assert_eq!(get_id(&expansion.token_trees[0]), Some(6)); | 82 | assert_eq!(get_id(&expansion.token_trees[0]), Some(9)); |
83 | assert_eq!(get_id(&expansion.token_trees[1]), Some(7)); | 83 | assert_eq!(get_id(&expansion.token_trees[1]), Some(10)); |
84 | 84 | ||
85 | // So baz should be 10 | 85 | // The input args of macro call include parentheses: |
86 | assert_eq!(get_id(&expansion.token_trees[2]), Some(10)); | 86 | // (baz) |
87 | // So baz should be 12+1+1 | ||
88 | assert_eq!(get_id(&expansion.token_trees[2]), Some(14)); | ||
87 | } | 89 | } |
88 | 90 | ||
89 | #[test] | 91 | #[test] |