diff options
Diffstat (limited to 'crates/ra_mbe/src')
-rw-r--r-- | crates/ra_mbe/src/syntax_bridge.rs | 73 | ||||
-rw-r--r-- | crates/ra_mbe/src/tests.rs | 25 |
2 files changed, 95 insertions, 3 deletions
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 3521b382a..73a0780da 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -118,6 +118,69 @@ impl TokenMap { | |||
118 | } | 118 | } |
119 | } | 119 | } |
120 | 120 | ||
121 | /// Returns the textual content of a doc comment block as a quoted string | ||
122 | /// That is, strips leading `///` (or `/**`, etc) | ||
123 | /// and strips the ending `*/` | ||
124 | /// And then quote the string, which is needed to convert to `tt::Literal` | ||
125 | fn doc_comment_text(comment: &ast::Comment) -> SmolStr { | ||
126 | use ast::AstToken; | ||
127 | |||
128 | let prefix_len = comment.prefix().len(); | ||
129 | let mut text = &comment.text()[prefix_len..]; | ||
130 | |||
131 | // Remove ending "*/" | ||
132 | if comment.kind().shape == ast::CommentShape::Block { | ||
133 | text = &text[0..text.len() - 2]; | ||
134 | } | ||
135 | |||
136 | // Quote the string | ||
137 | // Note that `tt::Literal` expect an escaped string | ||
138 | let text = format!("{:?}", text.escape_default().to_string()); | ||
139 | text.into() | ||
140 | } | ||
141 | |||
142 | fn convert_doc_comment<'a>(token: &ra_syntax::SyntaxToken<'a>) -> Option<Vec<tt::TokenTree>> { | ||
143 | use ast::AstToken; | ||
144 | let comment = ast::Comment::cast(*token)?; | ||
145 | let doc = comment.kind().doc?; | ||
146 | |||
147 | // Make `doc="\" Comments\"" | ||
148 | let mut meta_tkns = Vec::new(); | ||
149 | meta_tkns.push(mk_ident("doc")); | ||
150 | meta_tkns.push(mk_punct('=')); | ||
151 | meta_tkns.push(mk_doc_literal(&comment)); | ||
152 | |||
153 | // Make `#![]` | ||
154 | let mut token_trees = Vec::new(); | ||
155 | token_trees.push(mk_punct('#')); | ||
156 | if let ast::CommentPlacement::Inner = doc { | ||
157 | token_trees.push(mk_punct('!')); | ||
158 | } | ||
159 | token_trees.push(tt::TokenTree::from(tt::Subtree::from( | ||
160 | tt::Subtree { delimiter: tt::Delimiter::Bracket, token_trees: meta_tkns }.into(), | ||
161 | ))); | ||
162 | |||
163 | return Some(token_trees); | ||
164 | |||
165 | // Helper functions | ||
166 | fn mk_ident(s: &str) -> tt::TokenTree { | ||
167 | tt::TokenTree::from(tt::Leaf::from(tt::Ident { | ||
168 | text: s.into(), | ||
169 | id: tt::TokenId::unspecified(), | ||
170 | })) | ||
171 | } | ||
172 | |||
173 | fn mk_punct(c: char) -> tt::TokenTree { | ||
174 | tt::TokenTree::from(tt::Leaf::from(tt::Punct { char: c, spacing: tt::Spacing::Alone })) | ||
175 | } | ||
176 | |||
177 | fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree { | ||
178 | let lit = tt::Literal { text: doc_comment_text(comment) }; | ||
179 | |||
180 | tt::TokenTree::from(tt::Leaf::from(lit)) | ||
181 | } | ||
182 | } | ||
183 | |||
121 | fn convert_tt( | 184 | fn convert_tt( |
122 | token_map: &mut TokenMap, | 185 | token_map: &mut TokenMap, |
123 | global_offset: TextUnit, | 186 | global_offset: TextUnit, |
@@ -141,13 +204,17 @@ fn convert_tt( | |||
141 | let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable(); | 204 | let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable(); |
142 | 205 | ||
143 | while let Some(child) = child_iter.next() { | 206 | while let Some(child) = child_iter.next() { |
144 | if (skip_first && (child == first_child || child == last_child)) || child.kind().is_trivia() | 207 | if skip_first && (child == first_child || child == last_child) { |
145 | { | ||
146 | continue; | 208 | continue; |
147 | } | 209 | } |
210 | |||
148 | match child { | 211 | match child { |
149 | SyntaxElement::Token(token) => { | 212 | SyntaxElement::Token(token) => { |
150 | if token.kind().is_punct() { | 213 | if let Some(doc_tokens) = convert_doc_comment(&token) { |
214 | token_trees.extend(doc_tokens); | ||
215 | } else if token.kind().is_trivia() { | ||
216 | continue; | ||
217 | } else if token.kind().is_punct() { | ||
151 | assert!(token.text().len() == 1, "Input ast::token punct must be single char."); | 218 | assert!(token.text().len() == 1, "Input ast::token punct must be single char."); |
152 | let char = token.text().chars().next().unwrap(); | 219 | let char = token.text().chars().next().unwrap(); |
153 | 220 | ||
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs index bd5a44240..c487bbbd4 100644 --- a/crates/ra_mbe/src/tests.rs +++ b/crates/ra_mbe/src/tests.rs | |||
@@ -868,6 +868,31 @@ fn test_meta() { | |||
868 | } | 868 | } |
869 | 869 | ||
870 | #[test] | 870 | #[test] |
871 | fn test_meta_doc_comments() { | ||
872 | let rules = create_rules( | ||
873 | r#" | ||
874 | macro_rules! foo { | ||
875 | ($(#[$ i:meta])+) => ( | ||
876 | $(#[$ i])+ | ||
877 | fn bar() {} | ||
878 | ) | ||
879 | } | ||
880 | "#, | ||
881 | ); | ||
882 | assert_expansion( | ||
883 | MacroKind::Items, | ||
884 | &rules, | ||
885 | r#"foo! { | ||
886 | /// Single Line Doc 1 | ||
887 | /** | ||
888 | MultiLines Doc | ||
889 | */ | ||
890 | }"#, | ||
891 | "# [doc = \" Single Line Doc 1\"] # [doc = \" \\\\n MultiLines Doc\\\\n \"] fn bar () {}", | ||
892 | ); | ||
893 | } | ||
894 | |||
895 | #[test] | ||
871 | fn test_tt_block() { | 896 | fn test_tt_block() { |
872 | let rules = create_rules( | 897 | let rules = create_rules( |
873 | r#" | 898 | r#" |