aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_mbe/src
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_mbe/src')
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs34
1 files changed, 22 insertions, 12 deletions
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 1de399fee..0fbcb2f66 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -2,7 +2,7 @@
2 2
3use ra_parser::{FragmentKind, ParseError, TreeSink}; 3use ra_parser::{FragmentKind, ParseError, TreeSink};
4use ra_syntax::{ 4use ra_syntax::{
5 ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode, 5 ast, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
6 SyntaxTreeBuilder, TextRange, TextUnit, T, 6 SyntaxTreeBuilder, TextRange, TextUnit, T,
7}; 7};
8use std::iter::successors; 8use std::iter::successors;
@@ -20,7 +20,7 @@ pub struct TokenMap {
20 20
21/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro 21/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
22/// will consume). 22/// will consume).
23pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> { 23pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> Option<(tt::Subtree, TokenMap)> {
24 syntax_node_to_token_tree(ast.syntax()) 24 syntax_node_to_token_tree(ast.syntax())
25} 25}
26 26
@@ -208,13 +208,8 @@ impl Convertor {
208 } else if token.kind().is_trivia() { 208 } else if token.kind().is_trivia() {
209 continue; 209 continue;
210 } else if token.kind().is_punct() { 210 } else if token.kind().is_punct() {
211 assert!( 211 // we need to pull apart joined punctuation tokens
212 token.text().len() == 1, 212 let last_spacing = match child_iter.peek() {
213 "Input ast::token punct must be single char."
214 );
215 let char = token.text().chars().next().unwrap();
216
217 let spacing = match child_iter.peek() {
218 Some(NodeOrToken::Token(token)) => { 213 Some(NodeOrToken::Token(token)) => {
219 if token.kind().is_punct() { 214 if token.kind().is_punct() {
220 tt::Spacing::Joint 215 tt::Spacing::Joint
@@ -224,8 +219,12 @@ impl Convertor {
224 } 219 }
225 _ => tt::Spacing::Alone, 220 _ => tt::Spacing::Alone,
226 }; 221 };
227 222 let spacing_iter = std::iter::repeat(tt::Spacing::Joint)
228 token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); 223 .take(token.text().len() - 1)
224 .chain(std::iter::once(last_spacing));
225 for (char, spacing) in token.text().chars().zip(spacing_iter) {
226 token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into());
227 }
229 } else { 228 } else {
230 let child: tt::TokenTree = 229 let child: tt::TokenTree =
231 if token.kind() == T![true] || token.kind() == T![false] { 230 if token.kind() == T![true] || token.kind() == T![false] {
@@ -389,7 +388,10 @@ mod tests {
389 use super::*; 388 use super::*;
390 use crate::tests::{create_rules, expand}; 389 use crate::tests::{create_rules, expand};
391 use ra_parser::TokenSource; 390 use ra_parser::TokenSource;
392 use ra_syntax::algo::{insert_children, InsertPosition}; 391 use ra_syntax::{
392 algo::{insert_children, InsertPosition},
393 ast::AstNode,
394 };
393 395
394 #[test] 396 #[test]
395 fn convert_tt_token_source() { 397 fn convert_tt_token_source() {
@@ -491,4 +493,12 @@ mod tests {
491 493
492 assert_eq!(tt.delimiter, tt::Delimiter::Brace); 494 assert_eq!(tt.delimiter, tt::Delimiter::Brace);
493 } 495 }
496
497 #[test]
498 fn test_token_tree_multi_char_punct() {
499 let source_file = ast::SourceFile::parse("struct Foo { a: x::Y }").ok().unwrap();
500 let struct_def = source_file.syntax().descendants().find_map(ast::StructDef::cast).unwrap();
501 let tt = ast_to_token_tree(&struct_def).unwrap().0;
502 token_tree_to_syntax_node(&tt, FragmentKind::Item).unwrap();
503 }
494} 504}