aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_mbe/src/syntax_bridge.rs
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_mbe/src/syntax_bridge.rs')
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs47
1 files changed, 31 insertions, 16 deletions
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 19c17bd55..28ded7870 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -22,6 +22,14 @@ pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)
22 Some((tt, token_map)) 22 Some((tt, token_map))
23} 23}
24 24
25/// Convert the syntax node to a `TokenTree` (what macro
26/// will consume).
27pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> {
28 let mut token_map = TokenMap::default();
29 let tt = convert_tt(&mut token_map, node.range().start(), node)?;
30 Some((tt, token_map))
31}
32
25/// Parses the token tree (result of macro expansion) as a sequence of items 33/// Parses the token tree (result of macro expansion) as a sequence of items
26pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> TreeArc<ast::SourceFile> { 34pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> TreeArc<ast::SourceFile> {
27 let token_source = SubtreeTokenSource::new(tt); 35 let token_source = SubtreeTokenSource::new(tt);
@@ -51,15 +59,17 @@ fn convert_tt(
51) -> Option<tt::Subtree> { 59) -> Option<tt::Subtree> {
52 let first_child = tt.first_child_or_token()?; 60 let first_child = tt.first_child_or_token()?;
53 let last_child = tt.last_child_or_token()?; 61 let last_child = tt.last_child_or_token()?;
54 let delimiter = match (first_child.kind(), last_child.kind()) { 62 let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) {
55 (L_PAREN, R_PAREN) => tt::Delimiter::Parenthesis, 63 (L_PAREN, R_PAREN) => (tt::Delimiter::Parenthesis, true),
56 (L_CURLY, R_CURLY) => tt::Delimiter::Brace, 64 (L_CURLY, R_CURLY) => (tt::Delimiter::Brace, true),
57 (L_BRACK, R_BRACK) => tt::Delimiter::Bracket, 65 (L_BRACK, R_BRACK) => (tt::Delimiter::Bracket, true),
58 _ => return None, 66 _ => (tt::Delimiter::None, false),
59 }; 67 };
68
60 let mut token_trees = Vec::new(); 69 let mut token_trees = Vec::new();
61 for child in tt.children_with_tokens().skip(1) { 70 for child in tt.children_with_tokens().skip(skip_first as usize) {
62 if child == first_child || child == last_child || child.kind().is_trivia() { 71 if (skip_first && (child == first_child || child == last_child)) || child.kind().is_trivia()
72 {
63 continue; 73 continue;
64 } 74 }
65 match child { 75 match child {
@@ -127,6 +137,11 @@ impl<'a, Q: Querier> TtTreeSink<'a, Q> {
127 137
128impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> { 138impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> {
129 fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { 139 fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
140 if kind == L_DOLLAR || kind == R_DOLLAR {
141 self.token_pos += n_tokens as usize;
142 return;
143 }
144
130 for _ in 0..n_tokens { 145 for _ in 0..n_tokens {
131 self.buf += &self.src_querier.token(self.token_pos).1; 146 self.buf += &self.src_querier.token(self.token_pos).1;
132 self.token_pos += 1; 147 self.token_pos += 1;
@@ -176,19 +191,19 @@ mod tests {
176 191
177 let query = tt_src.querier(); 192 let query = tt_src.querier();
178 193
179 // [{] 194 // [${]
180 // [let] [a] [=] ['c'] [;] 195 // [let] [a] [=] ['c'] [;]
181 assert_eq!(query.token(1 + 3).1, "'c'"); 196 assert_eq!(query.token(2 + 3).1, "'c'");
182 assert_eq!(query.token(1 + 3).0, CHAR); 197 assert_eq!(query.token(2 + 3).0, CHAR);
183 // [let] [c] [=] [1000] [;] 198 // [let] [c] [=] [1000] [;]
184 assert_eq!(query.token(1 + 5 + 3).1, "1000"); 199 assert_eq!(query.token(2 + 5 + 3).1, "1000");
185 assert_eq!(query.token(1 + 5 + 3).0, INT_NUMBER); 200 assert_eq!(query.token(2 + 5 + 3).0, INT_NUMBER);
186 // [let] [f] [=] [12E+99_f64] [;] 201 // [let] [f] [=] [12E+99_f64] [;]
187 assert_eq!(query.token(1 + 10 + 3).1, "12E+99_f64"); 202 assert_eq!(query.token(2 + 10 + 3).1, "12E+99_f64");
188 assert_eq!(query.token(1 + 10 + 3).0, FLOAT_NUMBER); 203 assert_eq!(query.token(2 + 10 + 3).0, FLOAT_NUMBER);
189 204
190 // [let] [s] [=] ["rust1"] [;] 205 // [let] [s] [=] ["rust1"] [;]
191 assert_eq!(query.token(1 + 15 + 3).1, "\"rust1\""); 206 assert_eq!(query.token(2 + 15 + 3).1, "\"rust1\"");
192 assert_eq!(query.token(1 + 15 + 3).0, STRING); 207 assert_eq!(query.token(2 + 15 + 3).0, STRING);
193 } 208 }
194} 209}