aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_mbe/src/syntax_bridge.rs
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2019-12-05 20:00:20 +0000
committerGitHub <[email protected]>2019-12-05 20:00:20 +0000
commit6e10a9f57815ad865a570816436adfdf0de1cdf0 (patch)
tree410c416bfe9daa05743ce8c49418c20df28dd625 /crates/ra_mbe/src/syntax_bridge.rs
parent217a6fa4a387dbfe6ac725b6dba2f15d6532679f (diff)
parent10697041c1c72ddbe27c41912e691656be6ccce4 (diff)
Merge #2479
2479: Add expansion infrastructure for derive macros r=matklad a=flodiebold I thought I'd experiment a bit with attribute macro/derive expansion, and here's what I've got so far. It has dummy implementations of the Copy / Clone derives, to show that the approach works; it doesn't add any attribute macro support, but I think that fits into the architecture. Basically, during raw item collection, we look at the attributes and generate macro calls for them if necessary. Currently I only do this for derives, and just add the derive macro calls as separate calls next to the item. I think for derives, it's important that they don't obscure the actual item, since they can't actually change it (e.g. sending the item token tree through macro expansion unnecessarily might make completion within it more complicated). Attribute macros would have to be recognized at that stage and replace the item (i.e., the raw item collector will just emit an attribute macro call, and not the item). I think when we implement this, we should try to recognize known inert attributes, so that we don't do macro expansion unnecessarily; anything that isn't known needs to be treated as a possible attribute macro call (since the raw item collector can't resolve the macro yet). There's basically no name resolution for attribute macros implemented, I just hardcoded the built-in derives. In the future, the built-ins should work within the normal name resolution infrastructure; the problem there is that the builtin stubs in `std` use macros 2.0, which we don't support yet (and adding support is outside the scope of this). One aspect that I don't really have a solution for, but I don't know how important it is, is removing the attribute itself from its input. I'm pretty sure rustc leaves out the attribute macro from the input, but to do that, we'd have to create a completely new syntax node. I guess we could do it when / after converting to a token tree. Co-authored-by: Florian Diebold <[email protected]>
Diffstat (limited to 'crates/ra_mbe/src/syntax_bridge.rs')
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs44
1 files changed, 30 insertions, 14 deletions
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 1de399fee..66c1f0337 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -2,7 +2,7 @@
2 2
3use ra_parser::{FragmentKind, ParseError, TreeSink}; 3use ra_parser::{FragmentKind, ParseError, TreeSink};
4use ra_syntax::{ 4use ra_syntax::{
5 ast, AstNode, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode, 5 ast, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
6 SyntaxTreeBuilder, TextRange, TextUnit, T, 6 SyntaxTreeBuilder, TextRange, TextUnit, T,
7}; 7};
8use std::iter::successors; 8use std::iter::successors;
@@ -20,7 +20,7 @@ pub struct TokenMap {
20 20
21/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro 21/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
22/// will consume). 22/// will consume).
23pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> { 23pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> Option<(tt::Subtree, TokenMap)> {
24 syntax_node_to_token_tree(ast.syntax()) 24 syntax_node_to_token_tree(ast.syntax())
25} 25}
26 26
@@ -208,13 +208,8 @@ impl Convertor {
208 } else if token.kind().is_trivia() { 208 } else if token.kind().is_trivia() {
209 continue; 209 continue;
210 } else if token.kind().is_punct() { 210 } else if token.kind().is_punct() {
211 assert!( 211 // we need to pull apart joined punctuation tokens
212 token.text().len() == 1, 212 let last_spacing = match child_iter.peek() {
213 "Input ast::token punct must be single char."
214 );
215 let char = token.text().chars().next().unwrap();
216
217 let spacing = match child_iter.peek() {
218 Some(NodeOrToken::Token(token)) => { 213 Some(NodeOrToken::Token(token)) => {
219 if token.kind().is_punct() { 214 if token.kind().is_punct() {
220 tt::Spacing::Joint 215 tt::Spacing::Joint
@@ -224,8 +219,12 @@ impl Convertor {
224 } 219 }
225 _ => tt::Spacing::Alone, 220 _ => tt::Spacing::Alone,
226 }; 221 };
227 222 let spacing_iter = std::iter::repeat(tt::Spacing::Joint)
228 token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into()); 223 .take(token.text().len() - 1)
224 .chain(std::iter::once(last_spacing));
225 for (char, spacing) in token.text().chars().zip(spacing_iter) {
226 token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into());
227 }
229 } else { 228 } else {
230 let child: tt::TokenTree = 229 let child: tt::TokenTree =
231 if token.kind() == T![true] || token.kind() == T![false] { 230 if token.kind() == T![true] || token.kind() == T![false] {
@@ -246,8 +245,14 @@ impl Convertor {
246 } 245 }
247 } 246 }
248 NodeOrToken::Node(node) => { 247 NodeOrToken::Node(node) => {
249 let child = self.go(&node)?.into(); 248 let child_subtree = self.go(&node)?;
250 token_trees.push(child); 249 if child_subtree.delimiter == tt::Delimiter::None
250 && node.kind() != SyntaxKind::TOKEN_TREE
251 {
252 token_trees.extend(child_subtree.token_trees);
253 } else {
254 token_trees.push(child_subtree.into());
255 }
251 } 256 }
252 }; 257 };
253 } 258 }
@@ -389,7 +394,10 @@ mod tests {
389 use super::*; 394 use super::*;
390 use crate::tests::{create_rules, expand}; 395 use crate::tests::{create_rules, expand};
391 use ra_parser::TokenSource; 396 use ra_parser::TokenSource;
392 use ra_syntax::algo::{insert_children, InsertPosition}; 397 use ra_syntax::{
398 algo::{insert_children, InsertPosition},
399 ast::AstNode,
400 };
393 401
394 #[test] 402 #[test]
395 fn convert_tt_token_source() { 403 fn convert_tt_token_source() {
@@ -491,4 +499,12 @@ mod tests {
491 499
492 assert_eq!(tt.delimiter, tt::Delimiter::Brace); 500 assert_eq!(tt.delimiter, tt::Delimiter::Brace);
493 } 501 }
502
503 #[test]
504 fn test_token_tree_multi_char_punct() {
505 let source_file = ast::SourceFile::parse("struct Foo { a: x::Y }").ok().unwrap();
506 let struct_def = source_file.syntax().descendants().find_map(ast::StructDef::cast).unwrap();
507 let tt = ast_to_token_tree(&struct_def).unwrap().0;
508 token_tree_to_syntax_node(&tt, FragmentKind::Item).unwrap();
509 }
494} 510}