diff options
Diffstat (limited to 'crates/hir_expand/src')
-rw-r--r-- | crates/hir_expand/src/builtin_derive.rs | 4 | ||||
-rw-r--r-- | crates/hir_expand/src/builtin_macro.rs | 5 | ||||
-rw-r--r-- | crates/hir_expand/src/db.rs | 469 | ||||
-rw-r--r-- | crates/hir_expand/src/eager.rs | 7 | ||||
-rw-r--r-- | crates/hir_expand/src/hygiene.rs | 73 | ||||
-rw-r--r-- | crates/hir_expand/src/input.rs | 94 | ||||
-rw-r--r-- | crates/hir_expand/src/lib.rs | 106 | ||||
-rw-r--r-- | crates/hir_expand/src/proc_macro.rs | 102 | ||||
-rw-r--r-- | crates/hir_expand/src/quote.rs | 4 |
9 files changed, 455 insertions, 409 deletions
diff --git a/crates/hir_expand/src/builtin_derive.rs b/crates/hir_expand/src/builtin_derive.rs index 537c03028..b6a6d602f 100644 --- a/crates/hir_expand/src/builtin_derive.rs +++ b/crates/hir_expand/src/builtin_derive.rs | |||
@@ -269,7 +269,7 @@ mod tests { | |||
269 | use expect_test::{expect, Expect}; | 269 | use expect_test::{expect, Expect}; |
270 | use name::AsName; | 270 | use name::AsName; |
271 | 271 | ||
272 | use crate::{test_db::TestDB, AstId, AttrId, MacroCallId, MacroCallKind, MacroCallLoc}; | 272 | use crate::{test_db::TestDB, AstId, MacroCallId, MacroCallKind, MacroCallLoc}; |
273 | 273 | ||
274 | use super::*; | 274 | use super::*; |
275 | 275 | ||
@@ -320,7 +320,7 @@ $0 | |||
320 | kind: MacroCallKind::Derive { | 320 | kind: MacroCallKind::Derive { |
321 | ast_id, | 321 | ast_id, |
322 | derive_name: name.to_string(), | 322 | derive_name: name.to_string(), |
323 | derive_attr: AttrId(0), | 323 | derive_attr_index: 0, |
324 | }, | 324 | }, |
325 | }; | 325 | }; |
326 | 326 | ||
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs index 179de61f9..af9802144 100644 --- a/crates/hir_expand/src/builtin_macro.rs +++ b/crates/hir_expand/src/builtin_macro.rs | |||
@@ -578,6 +578,7 @@ mod tests { | |||
578 | krate, | 578 | krate, |
579 | kind: MacroCallKind::FnLike { | 579 | kind: MacroCallKind::FnLike { |
580 | ast_id: AstId::new(file_id.into(), ast_id_map.ast_id(¯o_call)), | 580 | ast_id: AstId::new(file_id.into(), ast_id_map.ast_id(¯o_call)), |
581 | fragment: FragmentKind::Expr, | ||
581 | }, | 582 | }, |
582 | }; | 583 | }; |
583 | 584 | ||
@@ -788,9 +789,9 @@ mod tests { | |||
788 | r##" | 789 | r##" |
789 | #[rustc_builtin_macro] | 790 | #[rustc_builtin_macro] |
790 | macro_rules! concat {} | 791 | macro_rules! concat {} |
791 | concat!("foo", "r", 0, r#"bar"#, false); | 792 | concat!("foo", "r", 0, r#"bar"#, "\n", false); |
792 | "##, | 793 | "##, |
793 | expect![[r#""foor0barfalse""#]], | 794 | expect![[r#""foor0bar\nfalse""#]], |
794 | ); | 795 | ); |
795 | } | 796 | } |
796 | } | 797 | } |
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index 1e4b0cc19..9fa419fcf 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs | |||
@@ -3,20 +3,18 @@ | |||
3 | use std::sync::Arc; | 3 | use std::sync::Arc; |
4 | 4 | ||
5 | use base_db::{salsa, SourceDatabase}; | 5 | use base_db::{salsa, SourceDatabase}; |
6 | use mbe::{ExpandError, ExpandResult, MacroDef, MacroRules}; | 6 | use mbe::{ExpandError, ExpandResult}; |
7 | use parser::FragmentKind; | 7 | use parser::FragmentKind; |
8 | use syntax::{ | 8 | use syntax::{ |
9 | algo::diff, | 9 | algo::diff, |
10 | ast::{MacroStmts, NameOwner}, | 10 | ast::{self, NameOwner}, |
11 | AstNode, GreenNode, Parse, | 11 | AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, |
12 | SyntaxKind::*, | ||
13 | SyntaxNode, | ||
14 | }; | 12 | }; |
15 | 13 | ||
16 | use crate::{ | 14 | use crate::{ |
17 | ast_id_map::AstIdMap, hygiene::HygieneFrame, BuiltinDeriveExpander, BuiltinFnLikeExpander, | 15 | ast_id_map::AstIdMap, hygiene::HygieneFrame, input::process_macro_input, BuiltinDeriveExpander, |
18 | EagerCallLoc, EagerMacroId, HirFileId, HirFileIdRepr, LazyMacroId, MacroCallId, MacroCallLoc, | 16 | BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId, HirFileId, HirFileIdRepr, LazyMacroId, |
19 | MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, | 17 | MacroCallId, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, |
20 | }; | 18 | }; |
21 | 19 | ||
22 | /// Total limit on the number of tokens produced by any macro invocation. | 20 | /// Total limit on the number of tokens produced by any macro invocation. |
@@ -27,23 +25,28 @@ const TOKEN_LIMIT: usize = 524288; | |||
27 | 25 | ||
28 | #[derive(Debug, Clone, Eq, PartialEq)] | 26 | #[derive(Debug, Clone, Eq, PartialEq)] |
29 | pub enum TokenExpander { | 27 | pub enum TokenExpander { |
30 | MacroRules(mbe::MacroRules), | 28 | /// Old-style `macro_rules`. |
31 | MacroDef(mbe::MacroDef), | 29 | MacroRules { mac: mbe::MacroRules, def_site_token_map: mbe::TokenMap }, |
30 | /// AKA macros 2.0. | ||
31 | MacroDef { mac: mbe::MacroDef, def_site_token_map: mbe::TokenMap }, | ||
32 | /// Stuff like `line!` and `file!`. | ||
32 | Builtin(BuiltinFnLikeExpander), | 33 | Builtin(BuiltinFnLikeExpander), |
34 | /// `derive(Copy)` and such. | ||
33 | BuiltinDerive(BuiltinDeriveExpander), | 35 | BuiltinDerive(BuiltinDeriveExpander), |
36 | /// The thing we love the most here in rust-analyzer -- procedural macros. | ||
34 | ProcMacro(ProcMacroExpander), | 37 | ProcMacro(ProcMacroExpander), |
35 | } | 38 | } |
36 | 39 | ||
37 | impl TokenExpander { | 40 | impl TokenExpander { |
38 | pub fn expand( | 41 | fn expand( |
39 | &self, | 42 | &self, |
40 | db: &dyn AstDatabase, | 43 | db: &dyn AstDatabase, |
41 | id: LazyMacroId, | 44 | id: LazyMacroId, |
42 | tt: &tt::Subtree, | 45 | tt: &tt::Subtree, |
43 | ) -> mbe::ExpandResult<tt::Subtree> { | 46 | ) -> mbe::ExpandResult<tt::Subtree> { |
44 | match self { | 47 | match self { |
45 | TokenExpander::MacroRules(it) => it.expand(tt), | 48 | TokenExpander::MacroRules { mac, .. } => mac.expand(tt), |
46 | TokenExpander::MacroDef(it) => it.expand(tt), | 49 | TokenExpander::MacroDef { mac, .. } => mac.expand(tt), |
47 | TokenExpander::Builtin(it) => it.expand(db, id, tt), | 50 | TokenExpander::Builtin(it) => it.expand(db, id, tt), |
48 | // FIXME switch these to ExpandResult as well | 51 | // FIXME switch these to ExpandResult as well |
49 | TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(), | 52 | TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(), |
@@ -56,23 +59,23 @@ impl TokenExpander { | |||
56 | } | 59 | } |
57 | } | 60 | } |
58 | 61 | ||
59 | pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { | 62 | pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { |
60 | match self { | 63 | match self { |
61 | TokenExpander::MacroRules(it) => it.map_id_down(id), | 64 | TokenExpander::MacroRules { mac, .. } => mac.map_id_down(id), |
62 | TokenExpander::MacroDef(it) => it.map_id_down(id), | 65 | TokenExpander::MacroDef { mac, .. } => mac.map_id_down(id), |
63 | TokenExpander::Builtin(..) => id, | 66 | TokenExpander::Builtin(..) |
64 | TokenExpander::BuiltinDerive(..) => id, | 67 | | TokenExpander::BuiltinDerive(..) |
65 | TokenExpander::ProcMacro(..) => id, | 68 | | TokenExpander::ProcMacro(..) => id, |
66 | } | 69 | } |
67 | } | 70 | } |
68 | 71 | ||
69 | pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { | 72 | pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { |
70 | match self { | 73 | match self { |
71 | TokenExpander::MacroRules(it) => it.map_id_up(id), | 74 | TokenExpander::MacroRules { mac, .. } => mac.map_id_up(id), |
72 | TokenExpander::MacroDef(it) => it.map_id_up(id), | 75 | TokenExpander::MacroDef { mac, .. } => mac.map_id_up(id), |
73 | TokenExpander::Builtin(..) => (id, mbe::Origin::Call), | 76 | TokenExpander::Builtin(..) |
74 | TokenExpander::BuiltinDerive(..) => (id, mbe::Origin::Call), | 77 | | TokenExpander::BuiltinDerive(..) |
75 | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), | 78 | | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), |
76 | } | 79 | } |
77 | } | 80 | } |
78 | } | 81 | } |
@@ -82,28 +85,48 @@ impl TokenExpander { | |||
82 | pub trait AstDatabase: SourceDatabase { | 85 | pub trait AstDatabase: SourceDatabase { |
83 | fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>; | 86 | fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>; |
84 | 87 | ||
88 | /// Main public API -- parsis a hir file, not caring whether it's a real | ||
89 | /// file or a macro expansion. | ||
85 | #[salsa::transparent] | 90 | #[salsa::transparent] |
86 | fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>; | 91 | fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>; |
87 | 92 | /// Implementation for the macro case. | |
88 | #[salsa::interned] | ||
89 | fn intern_macro(&self, macro_call: MacroCallLoc) -> LazyMacroId; | ||
90 | fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>; | ||
91 | #[salsa::transparent] | ||
92 | fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>; | ||
93 | fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>; | ||
94 | fn parse_macro_expansion( | 93 | fn parse_macro_expansion( |
95 | &self, | 94 | &self, |
96 | macro_file: MacroFile, | 95 | macro_file: MacroFile, |
97 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>; | 96 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>; |
98 | fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>; | ||
99 | |||
100 | /// Firewall query that returns the error from the `macro_expand` query. | ||
101 | fn macro_expand_error(&self, macro_call: MacroCallId) -> Option<ExpandError>; | ||
102 | 97 | ||
98 | /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the | ||
99 | /// reason why we use salsa at all. | ||
100 | /// | ||
101 | /// We encode macro definitions into ids of macro calls, this what allows us | ||
102 | /// to be incremental. | ||
103 | #[salsa::interned] | ||
104 | fn intern_macro(&self, macro_call: MacroCallLoc) -> LazyMacroId; | ||
105 | /// Certain built-in macros are eager (`format!(concat!("file: ", file!(), "{}"")), 92`). | ||
106 | /// For them, we actually want to encode the whole token tree as an argument. | ||
103 | #[salsa::interned] | 107 | #[salsa::interned] |
104 | fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; | 108 | fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; |
105 | 109 | ||
110 | /// Lowers syntactic macro call to a token tree representation. | ||
111 | #[salsa::transparent] | ||
112 | fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>; | ||
113 | /// Extracts syntax node, corresponding to a macro call. That's a firewall | ||
114 | /// query, only typing in the macro call itself changes the returned | ||
115 | /// subtree. | ||
116 | fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>; | ||
117 | /// Gets the expander for this macro. This compiles declarative macros, and | ||
118 | /// just fetches procedural ones. | ||
119 | fn macro_def(&self, id: MacroDefId) -> Option<Arc<TokenExpander>>; | ||
120 | |||
121 | /// Expand macro call to a token tree. This query is LRUed (we keep 128 or so results in memory) | ||
122 | fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>; | ||
123 | /// Special case of the previous query for procedural macros. We can't LRU | ||
124 | /// proc macros, since they are not deterministic in general, and | ||
125 | /// non-determinism breaks salsa in a very, very, very bad way. @edwin0cheng | ||
126 | /// heroically debugged this once! | ||
106 | fn expand_proc_macro(&self, call: MacroCallId) -> Result<tt::Subtree, mbe::ExpandError>; | 127 | fn expand_proc_macro(&self, call: MacroCallId) -> Result<tt::Subtree, mbe::ExpandError>; |
128 | /// Firewall query that returns the error from the `macro_expand` query. | ||
129 | fn macro_expand_error(&self, macro_call: MacroCallId) -> Option<ExpandError>; | ||
107 | 130 | ||
108 | fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>; | 131 | fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>; |
109 | } | 132 | } |
@@ -115,36 +138,160 @@ pub trait AstDatabase: SourceDatabase { | |||
115 | pub fn expand_hypothetical( | 138 | pub fn expand_hypothetical( |
116 | db: &dyn AstDatabase, | 139 | db: &dyn AstDatabase, |
117 | actual_macro_call: MacroCallId, | 140 | actual_macro_call: MacroCallId, |
118 | hypothetical_args: &syntax::ast::TokenTree, | 141 | hypothetical_args: &ast::TokenTree, |
119 | token_to_map: syntax::SyntaxToken, | 142 | token_to_map: SyntaxToken, |
120 | ) -> Option<(SyntaxNode, syntax::SyntaxToken)> { | 143 | ) -> Option<(SyntaxNode, SyntaxToken)> { |
121 | let macro_file = MacroFile { macro_call_id: actual_macro_call }; | ||
122 | let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()); | 144 | let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()); |
123 | let range = | 145 | let range = |
124 | token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; | 146 | token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; |
125 | let token_id = tmap_1.token_by_range(range)?; | 147 | let token_id = tmap_1.token_by_range(range)?; |
126 | let macro_def = expander(db, actual_macro_call)?; | 148 | |
149 | let lazy_id = match actual_macro_call { | ||
150 | MacroCallId::LazyMacro(id) => id, | ||
151 | MacroCallId::EagerMacro(_) => return None, | ||
152 | }; | ||
153 | |||
154 | let macro_def = { | ||
155 | let loc = db.lookup_intern_macro(lazy_id); | ||
156 | db.macro_def(loc.def)? | ||
157 | }; | ||
158 | |||
159 | let hypothetical_expansion = macro_def.expand(db, lazy_id, &tt); | ||
160 | |||
161 | let fragment_kind = macro_fragment_kind(db, actual_macro_call); | ||
162 | |||
127 | let (node, tmap_2) = | 163 | let (node, tmap_2) = |
128 | parse_macro_with_arg(db, macro_file, Some(std::sync::Arc::new((tt, tmap_1)))).value?; | 164 | mbe::token_tree_to_syntax_node(&hypothetical_expansion.value, fragment_kind).ok()?; |
129 | let token_id = macro_def.0.map_id_down(token_id); | 165 | |
166 | let token_id = macro_def.map_id_down(token_id); | ||
130 | let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; | 167 | let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; |
131 | let token = node.syntax_node().covering_element(range).into_token()?; | 168 | let token = node.syntax_node().covering_element(range).into_token()?; |
132 | Some((node.syntax_node(), token)) | 169 | Some((node.syntax_node(), token)) |
133 | } | 170 | } |
134 | 171 | ||
135 | fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { | 172 | fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { |
136 | let map = | 173 | let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default(); |
137 | db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it)); | ||
138 | Arc::new(map) | 174 | Arc::new(map) |
139 | } | 175 | } |
140 | 176 | ||
141 | fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { | 177 | fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> { |
178 | match file_id.0 { | ||
179 | HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), | ||
180 | HirFileIdRepr::MacroFile(macro_file) => { | ||
181 | db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node()) | ||
182 | } | ||
183 | } | ||
184 | } | ||
185 | |||
186 | fn parse_macro_expansion( | ||
187 | db: &dyn AstDatabase, | ||
188 | macro_file: MacroFile, | ||
189 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> { | ||
190 | let _p = profile::span("parse_macro_expansion"); | ||
191 | let result = db.macro_expand(macro_file.macro_call_id); | ||
192 | |||
193 | if let Some(err) = &result.err { | ||
194 | // Note: | ||
195 | // The final goal we would like to make all parse_macro success, | ||
196 | // such that the following log will not call anyway. | ||
197 | match macro_file.macro_call_id { | ||
198 | MacroCallId::LazyMacro(id) => { | ||
199 | let loc: MacroCallLoc = db.lookup_intern_macro(id); | ||
200 | let node = loc.kind.node(db); | ||
201 | |||
202 | // collect parent information for warning log | ||
203 | let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| { | ||
204 | it.file_id.call_node(db) | ||
205 | }) | ||
206 | .map(|n| format!("{:#}", n.value)) | ||
207 | .collect::<Vec<_>>() | ||
208 | .join("\n"); | ||
209 | |||
210 | log::warn!( | ||
211 | "fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}", | ||
212 | err, | ||
213 | node.value, | ||
214 | parents | ||
215 | ); | ||
216 | } | ||
217 | _ => { | ||
218 | log::warn!("fail on macro_parse: (reason: {:?})", err); | ||
219 | } | ||
220 | } | ||
221 | } | ||
222 | let tt = match result.value { | ||
223 | Some(tt) => tt, | ||
224 | None => return ExpandResult { value: None, err: result.err }, | ||
225 | }; | ||
226 | |||
227 | let fragment_kind = macro_fragment_kind(db, macro_file.macro_call_id); | ||
228 | |||
229 | log::debug!("expanded = {}", tt.as_debug_string()); | ||
230 | log::debug!("kind = {:?}", fragment_kind); | ||
231 | |||
232 | let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) { | ||
233 | Ok(it) => it, | ||
234 | Err(err) => { | ||
235 | log::debug!( | ||
236 | "failed to parse expanstion to {:?} = {}", | ||
237 | fragment_kind, | ||
238 | tt.as_debug_string() | ||
239 | ); | ||
240 | return ExpandResult::only_err(err); | ||
241 | } | ||
242 | }; | ||
243 | |||
244 | match result.err { | ||
245 | Some(err) => { | ||
246 | // Safety check for recursive identity macro. | ||
247 | let node = parse.syntax_node(); | ||
248 | let file: HirFileId = macro_file.into(); | ||
249 | let call_node = match file.call_node(db) { | ||
250 | Some(it) => it, | ||
251 | None => { | ||
252 | return ExpandResult::only_err(err); | ||
253 | } | ||
254 | }; | ||
255 | if is_self_replicating(&node, &call_node.value) { | ||
256 | return ExpandResult::only_err(err); | ||
257 | } else { | ||
258 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) } | ||
259 | } | ||
260 | } | ||
261 | None => { | ||
262 | log::debug!("parse = {:?}", parse.syntax_node().kind()); | ||
263 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None } | ||
264 | } | ||
265 | } | ||
266 | } | ||
267 | |||
268 | fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { | ||
269 | let arg = db.macro_arg_text(id)?; | ||
270 | let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg)); | ||
271 | Some(Arc::new((tt, tmap))) | ||
272 | } | ||
273 | |||
274 | fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { | ||
275 | let id = match id { | ||
276 | MacroCallId::LazyMacro(id) => id, | ||
277 | MacroCallId::EagerMacro(_id) => { | ||
278 | // FIXME: support macro_arg for eager macro | ||
279 | return None; | ||
280 | } | ||
281 | }; | ||
282 | let loc = db.lookup_intern_macro(id); | ||
283 | let arg = loc.kind.arg(db)?; | ||
284 | let arg = process_macro_input(db, arg, id); | ||
285 | Some(arg.green().into()) | ||
286 | } | ||
287 | |||
288 | fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<TokenExpander>> { | ||
142 | match id.kind { | 289 | match id.kind { |
143 | MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { | 290 | MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { |
144 | syntax::ast::Macro::MacroRules(macro_rules) => { | 291 | ast::Macro::MacroRules(macro_rules) => { |
145 | let arg = macro_rules.token_tree()?; | 292 | let arg = macro_rules.token_tree()?; |
146 | let (tt, tmap) = mbe::ast_to_token_tree(&arg); | 293 | let (tt, def_site_token_map) = mbe::ast_to_token_tree(&arg); |
147 | let rules = match MacroRules::parse(&tt) { | 294 | let mac = match mbe::MacroRules::parse(&tt) { |
148 | Ok(it) => it, | 295 | Ok(it) => it, |
149 | Err(err) => { | 296 | Err(err) => { |
150 | let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default(); | 297 | let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default(); |
@@ -152,12 +299,12 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, | |||
152 | return None; | 299 | return None; |
153 | } | 300 | } |
154 | }; | 301 | }; |
155 | Some(Arc::new((TokenExpander::MacroRules(rules), tmap))) | 302 | Some(Arc::new(TokenExpander::MacroRules { mac, def_site_token_map })) |
156 | } | 303 | } |
157 | syntax::ast::Macro::MacroDef(macro_def) => { | 304 | ast::Macro::MacroDef(macro_def) => { |
158 | let arg = macro_def.body()?; | 305 | let arg = macro_def.body()?; |
159 | let (tt, tmap) = mbe::ast_to_token_tree(&arg); | 306 | let (tt, def_site_token_map) = mbe::ast_to_token_tree(&arg); |
160 | let rules = match MacroDef::parse(&tt) { | 307 | let mac = match mbe::MacroDef::parse(&tt) { |
161 | Ok(it) => it, | 308 | Ok(it) => it, |
162 | Err(err) => { | 309 | Err(err) => { |
163 | let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default(); | 310 | let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default(); |
@@ -165,41 +312,18 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, | |||
165 | return None; | 312 | return None; |
166 | } | 313 | } |
167 | }; | 314 | }; |
168 | Some(Arc::new((TokenExpander::MacroDef(rules), tmap))) | 315 | Some(Arc::new(TokenExpander::MacroDef { mac, def_site_token_map })) |
169 | } | 316 | } |
170 | }, | 317 | }, |
171 | MacroDefKind::BuiltIn(expander, _) => { | 318 | MacroDefKind::BuiltIn(expander, _) => Some(Arc::new(TokenExpander::Builtin(expander))), |
172 | Some(Arc::new((TokenExpander::Builtin(expander), mbe::TokenMap::default()))) | ||
173 | } | ||
174 | MacroDefKind::BuiltInDerive(expander, _) => { | 319 | MacroDefKind::BuiltInDerive(expander, _) => { |
175 | Some(Arc::new((TokenExpander::BuiltinDerive(expander), mbe::TokenMap::default()))) | 320 | Some(Arc::new(TokenExpander::BuiltinDerive(expander))) |
176 | } | 321 | } |
177 | MacroDefKind::BuiltInEager(..) => None, | 322 | MacroDefKind::BuiltInEager(..) => None, |
178 | MacroDefKind::ProcMacro(expander, ..) => { | 323 | MacroDefKind::ProcMacro(expander, ..) => Some(Arc::new(TokenExpander::ProcMacro(expander))), |
179 | Some(Arc::new((TokenExpander::ProcMacro(expander), mbe::TokenMap::default()))) | ||
180 | } | ||
181 | } | 324 | } |
182 | } | 325 | } |
183 | 326 | ||
184 | fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { | ||
185 | let id = match id { | ||
186 | MacroCallId::LazyMacro(id) => id, | ||
187 | MacroCallId::EagerMacro(_id) => { | ||
188 | // FIXME: support macro_arg for eager macro | ||
189 | return None; | ||
190 | } | ||
191 | }; | ||
192 | let loc = db.lookup_intern_macro(id); | ||
193 | let arg = loc.kind.arg(db)?; | ||
194 | Some(arg.green()) | ||
195 | } | ||
196 | |||
197 | fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { | ||
198 | let arg = db.macro_arg_text(id)?; | ||
199 | let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg)); | ||
200 | Some(Arc::new((tt, tmap))) | ||
201 | } | ||
202 | |||
203 | fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> { | 327 | fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> { |
204 | macro_expand_with_arg(db, id, None) | 328 | macro_expand_with_arg(db, id, None) |
205 | } | 329 | } |
@@ -208,19 +332,6 @@ fn macro_expand_error(db: &dyn AstDatabase, macro_call: MacroCallId) -> Option<E | |||
208 | db.macro_expand(macro_call).err | 332 | db.macro_expand(macro_call).err |
209 | } | 333 | } |
210 | 334 | ||
211 | fn expander(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { | ||
212 | let lazy_id = match id { | ||
213 | MacroCallId::LazyMacro(id) => id, | ||
214 | MacroCallId::EagerMacro(_id) => { | ||
215 | return None; | ||
216 | } | ||
217 | }; | ||
218 | |||
219 | let loc = db.lookup_intern_macro(lazy_id); | ||
220 | let macro_rules = db.macro_def(loc.def)?; | ||
221 | Some(macro_rules) | ||
222 | } | ||
223 | |||
224 | fn macro_expand_with_arg( | 335 | fn macro_expand_with_arg( |
225 | db: &dyn AstDatabase, | 336 | db: &dyn AstDatabase, |
226 | id: MacroCallId, | 337 | id: MacroCallId, |
@@ -254,7 +365,7 @@ fn macro_expand_with_arg( | |||
254 | Some(it) => it, | 365 | Some(it) => it, |
255 | None => return ExpandResult::str_err("Fail to find macro definition".into()), | 366 | None => return ExpandResult::str_err("Fail to find macro definition".into()), |
256 | }; | 367 | }; |
257 | let ExpandResult { value: tt, err } = macro_rules.0.expand(db, lazy_id, ¯o_arg.0); | 368 | let ExpandResult { value: tt, err } = macro_rules.expand(db, lazy_id, ¯o_arg.0); |
258 | // Set a hard limit for the expanded tt | 369 | // Set a hard limit for the expanded tt |
259 | let count = tt.count(); | 370 | let count = tt.count(); |
260 | if count > TOKEN_LIMIT { | 371 | if count > TOKEN_LIMIT { |
@@ -294,116 +405,11 @@ fn expand_proc_macro( | |||
294 | expander.expand(db, loc.krate, ¯o_arg.0) | 405 | expander.expand(db, loc.krate, ¯o_arg.0) |
295 | } | 406 | } |
296 | 407 | ||
297 | fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> { | ||
298 | match file_id.0 { | ||
299 | HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), | ||
300 | HirFileIdRepr::MacroFile(macro_file) => { | ||
301 | db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node()) | ||
302 | } | ||
303 | } | ||
304 | } | ||
305 | |||
306 | fn parse_macro_expansion( | ||
307 | db: &dyn AstDatabase, | ||
308 | macro_file: MacroFile, | ||
309 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> { | ||
310 | parse_macro_with_arg(db, macro_file, None) | ||
311 | } | ||
312 | |||
313 | fn parse_macro_with_arg( | ||
314 | db: &dyn AstDatabase, | ||
315 | macro_file: MacroFile, | ||
316 | arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>, | ||
317 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> { | ||
318 | let macro_call_id = macro_file.macro_call_id; | ||
319 | let result = if let Some(arg) = arg { | ||
320 | macro_expand_with_arg(db, macro_call_id, Some(arg)) | ||
321 | } else { | ||
322 | db.macro_expand(macro_call_id) | ||
323 | }; | ||
324 | |||
325 | let _p = profile::span("parse_macro_expansion"); | ||
326 | |||
327 | if let Some(err) = &result.err { | ||
328 | // Note: | ||
329 | // The final goal we would like to make all parse_macro success, | ||
330 | // such that the following log will not call anyway. | ||
331 | match macro_call_id { | ||
332 | MacroCallId::LazyMacro(id) => { | ||
333 | let loc: MacroCallLoc = db.lookup_intern_macro(id); | ||
334 | let node = loc.kind.node(db); | ||
335 | |||
336 | // collect parent information for warning log | ||
337 | let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| { | ||
338 | it.file_id.call_node(db) | ||
339 | }) | ||
340 | .map(|n| format!("{:#}", n.value)) | ||
341 | .collect::<Vec<_>>() | ||
342 | .join("\n"); | ||
343 | |||
344 | log::warn!( | ||
345 | "fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}", | ||
346 | err, | ||
347 | node.value, | ||
348 | parents | ||
349 | ); | ||
350 | } | ||
351 | _ => { | ||
352 | log::warn!("fail on macro_parse: (reason: {:?})", err); | ||
353 | } | ||
354 | } | ||
355 | } | ||
356 | let tt = match result.value { | ||
357 | Some(tt) => tt, | ||
358 | None => return ExpandResult { value: None, err: result.err }, | ||
359 | }; | ||
360 | |||
361 | let fragment_kind = to_fragment_kind(db, macro_call_id); | ||
362 | |||
363 | log::debug!("expanded = {}", tt.as_debug_string()); | ||
364 | log::debug!("kind = {:?}", fragment_kind); | ||
365 | |||
366 | let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) { | ||
367 | Ok(it) => it, | ||
368 | Err(err) => { | ||
369 | log::debug!( | ||
370 | "failed to parse expanstion to {:?} = {}", | ||
371 | fragment_kind, | ||
372 | tt.as_debug_string() | ||
373 | ); | ||
374 | return ExpandResult::only_err(err); | ||
375 | } | ||
376 | }; | ||
377 | |||
378 | match result.err { | ||
379 | Some(err) => { | ||
380 | // Safety check for recursive identity macro. | ||
381 | let node = parse.syntax_node(); | ||
382 | let file: HirFileId = macro_file.into(); | ||
383 | let call_node = match file.call_node(db) { | ||
384 | Some(it) => it, | ||
385 | None => { | ||
386 | return ExpandResult::only_err(err); | ||
387 | } | ||
388 | }; | ||
389 | if is_self_replicating(&node, &call_node.value) { | ||
390 | return ExpandResult::only_err(err); | ||
391 | } else { | ||
392 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) } | ||
393 | } | ||
394 | } | ||
395 | None => { | ||
396 | log::debug!("parse = {:?}", parse.syntax_node().kind()); | ||
397 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None } | ||
398 | } | ||
399 | } | ||
400 | } | ||
401 | |||
402 | fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool { | 408 | fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool { |
403 | if diff(from, to).is_empty() { | 409 | if diff(from, to).is_empty() { |
404 | return true; | 410 | return true; |
405 | } | 411 | } |
406 | if let Some(stmts) = MacroStmts::cast(from.clone()) { | 412 | if let Some(stmts) = ast::MacroStmts::cast(from.clone()) { |
407 | if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) { | 413 | if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) { |
408 | return true; | 414 | return true; |
409 | } | 415 | } |
@@ -420,62 +426,15 @@ fn hygiene_frame(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<HygieneFrame> | |||
420 | Arc::new(HygieneFrame::new(db, file_id)) | 426 | Arc::new(HygieneFrame::new(db, file_id)) |
421 | } | 427 | } |
422 | 428 | ||
423 | /// Given a `MacroCallId`, return what `FragmentKind` it belongs to. | 429 | fn macro_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind { |
424 | /// FIXME: Not completed | 430 | match id { |
425 | fn to_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind { | 431 | MacroCallId::LazyMacro(id) => { |
426 | let lazy_id = match id { | 432 | let loc: MacroCallLoc = db.lookup_intern_macro(id); |
427 | MacroCallId::LazyMacro(id) => id, | 433 | loc.kind.fragment_kind() |
428 | MacroCallId::EagerMacro(id) => { | ||
429 | return db.lookup_intern_eager_expansion(id).fragment; | ||
430 | } | ||
431 | }; | ||
432 | let syn = db.lookup_intern_macro(lazy_id).kind.node(db).value; | ||
433 | |||
434 | let parent = match syn.parent() { | ||
435 | Some(it) => it, | ||
436 | None => return FragmentKind::Statements, | ||
437 | }; | ||
438 | |||
439 | match parent.kind() { | ||
440 | MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items, | ||
441 | MACRO_STMTS => FragmentKind::Statements, | ||
442 | MACRO_PAT => FragmentKind::Pattern, | ||
443 | MACRO_TYPE => FragmentKind::Type, | ||
444 | ITEM_LIST => FragmentKind::Items, | ||
445 | LET_STMT => { | ||
446 | // FIXME: Handle LHS Pattern | ||
447 | FragmentKind::Expr | ||
448 | } | 434 | } |
449 | EXPR_STMT => FragmentKind::Statements, | 435 | MacroCallId::EagerMacro(id) => { |
450 | BLOCK_EXPR => FragmentKind::Statements, | 436 | let loc: EagerCallLoc = db.lookup_intern_eager_expansion(id); |
451 | ARG_LIST => FragmentKind::Expr, | 437 | loc.fragment |
452 | TRY_EXPR => FragmentKind::Expr, | ||
453 | TUPLE_EXPR => FragmentKind::Expr, | ||
454 | PAREN_EXPR => FragmentKind::Expr, | ||
455 | ARRAY_EXPR => FragmentKind::Expr, | ||
456 | FOR_EXPR => FragmentKind::Expr, | ||
457 | PATH_EXPR => FragmentKind::Expr, | ||
458 | CLOSURE_EXPR => FragmentKind::Expr, | ||
459 | CONDITION => FragmentKind::Expr, | ||
460 | BREAK_EXPR => FragmentKind::Expr, | ||
461 | RETURN_EXPR => FragmentKind::Expr, | ||
462 | MATCH_EXPR => FragmentKind::Expr, | ||
463 | MATCH_ARM => FragmentKind::Expr, | ||
464 | MATCH_GUARD => FragmentKind::Expr, | ||
465 | RECORD_EXPR_FIELD => FragmentKind::Expr, | ||
466 | CALL_EXPR => FragmentKind::Expr, | ||
467 | INDEX_EXPR => FragmentKind::Expr, | ||
468 | METHOD_CALL_EXPR => FragmentKind::Expr, | ||
469 | FIELD_EXPR => FragmentKind::Expr, | ||
470 | AWAIT_EXPR => FragmentKind::Expr, | ||
471 | CAST_EXPR => FragmentKind::Expr, | ||
472 | REF_EXPR => FragmentKind::Expr, | ||
473 | PREFIX_EXPR => FragmentKind::Expr, | ||
474 | RANGE_EXPR => FragmentKind::Expr, | ||
475 | BIN_EXPR => FragmentKind::Expr, | ||
476 | _ => { | ||
477 | // Unknown , Just guess it is `Items` | ||
478 | FragmentKind::Items | ||
479 | } | 438 | } |
480 | } | 439 | } |
481 | } | 440 | } |
diff --git a/crates/hir_expand/src/eager.rs b/crates/hir_expand/src/eager.rs index f12132f84..85491fe8b 100644 --- a/crates/hir_expand/src/eager.rs +++ b/crates/hir_expand/src/eager.rs | |||
@@ -175,8 +175,13 @@ fn lazy_expand( | |||
175 | ) -> ExpandResult<Option<InFile<SyntaxNode>>> { | 175 | ) -> ExpandResult<Option<InFile<SyntaxNode>>> { |
176 | let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); | 176 | let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); |
177 | 177 | ||
178 | let fragment = crate::to_fragment_kind(¯o_call.value); | ||
178 | let id: MacroCallId = def | 179 | let id: MacroCallId = def |
179 | .as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id) }) | 180 | .as_lazy_macro( |
181 | db, | ||
182 | krate, | ||
183 | MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), fragment }, | ||
184 | ) | ||
180 | .into(); | 185 | .into(); |
181 | 186 | ||
182 | let err = db.macro_expand_error(id); | 187 | let err = db.macro_expand_error(id); |
diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs index 779725629..aca69e35a 100644 --- a/crates/hir_expand/src/hygiene.rs +++ b/crates/hir_expand/src/hygiene.rs | |||
@@ -5,6 +5,7 @@ | |||
5 | use std::sync::Arc; | 5 | use std::sync::Arc; |
6 | 6 | ||
7 | use base_db::CrateId; | 7 | use base_db::CrateId; |
8 | use db::TokenExpander; | ||
8 | use either::Either; | 9 | use either::Either; |
9 | use mbe::Origin; | 10 | use mbe::Origin; |
10 | use parser::SyntaxKind; | 11 | use parser::SyntaxKind; |
@@ -31,10 +32,14 @@ impl Hygiene { | |||
31 | } | 32 | } |
32 | 33 | ||
33 | // FIXME: this should just return name | 34 | // FIXME: this should just return name |
34 | pub fn name_ref_to_name(&self, name_ref: ast::NameRef) -> Either<Name, CrateId> { | 35 | pub fn name_ref_to_name( |
36 | &self, | ||
37 | db: &dyn AstDatabase, | ||
38 | name_ref: ast::NameRef, | ||
39 | ) -> Either<Name, CrateId> { | ||
35 | if let Some(frames) = &self.frames { | 40 | if let Some(frames) = &self.frames { |
36 | if name_ref.text() == "$crate" { | 41 | if name_ref.text() == "$crate" { |
37 | if let Some(krate) = frames.root_crate(name_ref.syntax()) { | 42 | if let Some(krate) = frames.root_crate(db, name_ref.syntax()) { |
38 | return Either::Right(krate); | 43 | return Either::Right(krate); |
39 | } | 44 | } |
40 | } | 45 | } |
@@ -43,15 +48,19 @@ impl Hygiene { | |||
43 | Either::Left(name_ref.as_name()) | 48 | Either::Left(name_ref.as_name()) |
44 | } | 49 | } |
45 | 50 | ||
46 | pub fn local_inner_macros(&self, path: ast::Path) -> Option<CrateId> { | 51 | pub fn local_inner_macros(&self, db: &dyn AstDatabase, path: ast::Path) -> Option<CrateId> { |
47 | let mut token = path.syntax().first_token()?.text_range(); | 52 | let mut token = path.syntax().first_token()?.text_range(); |
48 | let frames = self.frames.as_ref()?; | 53 | let frames = self.frames.as_ref()?; |
49 | let mut current = frames.0.clone(); | 54 | let mut current = frames.0.clone(); |
50 | 55 | ||
51 | loop { | 56 | loop { |
52 | let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(token)?; | 57 | let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?; |
53 | if origin == Origin::Def { | 58 | if origin == Origin::Def { |
54 | return if current.local_inner { frames.root_crate(path.syntax()) } else { None }; | 59 | return if current.local_inner { |
60 | frames.root_crate(db, path.syntax()) | ||
61 | } else { | ||
62 | None | ||
63 | }; | ||
55 | } | 64 | } |
56 | current = current.call_site.as_ref()?.clone(); | 65 | current = current.call_site.as_ref()?.clone(); |
57 | token = mapped.value; | 66 | token = mapped.value; |
@@ -81,13 +90,13 @@ impl HygieneFrames { | |||
81 | HygieneFrames(Arc::new(HygieneFrame::new(db, file_id))) | 90 | HygieneFrames(Arc::new(HygieneFrame::new(db, file_id))) |
82 | } | 91 | } |
83 | 92 | ||
84 | fn root_crate(&self, node: &SyntaxNode) -> Option<CrateId> { | 93 | fn root_crate(&self, db: &dyn AstDatabase, node: &SyntaxNode) -> Option<CrateId> { |
85 | let mut token = node.first_token()?.text_range(); | 94 | let mut token = node.first_token()?.text_range(); |
86 | let mut result = self.0.krate; | 95 | let mut result = self.0.krate; |
87 | let mut current = self.0.clone(); | 96 | let mut current = self.0.clone(); |
88 | 97 | ||
89 | while let Some((mapped, origin)) = | 98 | while let Some((mapped, origin)) = |
90 | current.expansion.as_ref().and_then(|it| it.map_ident_up(token)) | 99 | current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token)) |
91 | { | 100 | { |
92 | result = current.krate; | 101 | result = current.krate; |
93 | 102 | ||
@@ -111,26 +120,41 @@ impl HygieneFrames { | |||
111 | 120 | ||
112 | #[derive(Debug, Clone, PartialEq, Eq)] | 121 | #[derive(Debug, Clone, PartialEq, Eq)] |
113 | struct HygieneInfo { | 122 | struct HygieneInfo { |
114 | arg_start: InFile<TextSize>, | 123 | file: MacroFile, |
115 | /// The `macro_rules!` arguments. | 124 | /// The `macro_rules!` arguments. |
116 | def_start: Option<InFile<TextSize>>, | 125 | def_start: Option<InFile<TextSize>>, |
117 | 126 | ||
118 | macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, | 127 | macro_def: Arc<TokenExpander>, |
119 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, | 128 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, |
120 | exp_map: Arc<mbe::TokenMap>, | 129 | exp_map: Arc<mbe::TokenMap>, |
121 | } | 130 | } |
122 | 131 | ||
123 | impl HygieneInfo { | 132 | impl HygieneInfo { |
124 | fn map_ident_up(&self, token: TextRange) -> Option<(InFile<TextRange>, Origin)> { | 133 | fn map_ident_up( |
134 | &self, | ||
135 | db: &dyn AstDatabase, | ||
136 | token: TextRange, | ||
137 | ) -> Option<(InFile<TextRange>, Origin)> { | ||
125 | let token_id = self.exp_map.token_by_range(token)?; | 138 | let token_id = self.exp_map.token_by_range(token)?; |
126 | 139 | ||
127 | let (token_id, origin) = self.macro_def.0.map_id_up(token_id); | 140 | let (token_id, origin) = self.macro_def.map_id_up(token_id); |
128 | let (token_map, tt) = match origin { | 141 | let (token_map, tt) = match origin { |
129 | mbe::Origin::Call => (&self.macro_arg.1, self.arg_start), | 142 | mbe::Origin::Call => { |
130 | mbe::Origin::Def => ( | 143 | let call_id = match self.file.macro_call_id { |
131 | &self.macro_def.1, | 144 | MacroCallId::LazyMacro(lazy) => lazy, |
132 | *self.def_start.as_ref().expect("`Origin::Def` used with non-`macro_rules!` macro"), | 145 | MacroCallId::EagerMacro(_) => unreachable!(), |
133 | ), | 146 | }; |
147 | let loc: MacroCallLoc = db.lookup_intern_macro(call_id); | ||
148 | let arg_start = loc.kind.arg(db)?.text_range().start(); | ||
149 | (&self.macro_arg.1, InFile::new(loc.kind.file_id(), arg_start)) | ||
150 | } | ||
151 | mbe::Origin::Def => match (&*self.macro_def, self.def_start) { | ||
152 | (TokenExpander::MacroDef { def_site_token_map, .. }, Some(tt)) | ||
153 | | (TokenExpander::MacroRules { def_site_token_map, .. }, Some(tt)) => { | ||
154 | (def_site_token_map, tt) | ||
155 | } | ||
156 | _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), | ||
157 | }, | ||
134 | }; | 158 | }; |
135 | 159 | ||
136 | let range = token_map.range_by_token(token_id)?.by_kind(SyntaxKind::IDENT)?; | 160 | let range = token_map.range_by_token(token_id)?.by_kind(SyntaxKind::IDENT)?; |
@@ -143,8 +167,6 @@ fn make_hygiene_info( | |||
143 | macro_file: MacroFile, | 167 | macro_file: MacroFile, |
144 | loc: &MacroCallLoc, | 168 | loc: &MacroCallLoc, |
145 | ) -> Option<HygieneInfo> { | 169 | ) -> Option<HygieneInfo> { |
146 | let arg_tt = loc.kind.arg(db)?; | ||
147 | |||
148 | let def_offset = loc.def.ast_id().left().and_then(|id| { | 170 | let def_offset = loc.def.ast_id().left().and_then(|id| { |
149 | let def_tt = match id.to_node(db) { | 171 | let def_tt = match id.to_node(db) { |
150 | ast::Macro::MacroRules(mac) => mac.token_tree()?.syntax().text_range().start(), | 172 | ast::Macro::MacroRules(mac) => mac.token_tree()?.syntax().text_range().start(), |
@@ -157,13 +179,7 @@ fn make_hygiene_info( | |||
157 | let (_, exp_map) = db.parse_macro_expansion(macro_file).value?; | 179 | let (_, exp_map) = db.parse_macro_expansion(macro_file).value?; |
158 | let macro_arg = db.macro_arg(macro_file.macro_call_id)?; | 180 | let macro_arg = db.macro_arg(macro_file.macro_call_id)?; |
159 | 181 | ||
160 | Some(HygieneInfo { | 182 | Some(HygieneInfo { file: macro_file, def_start: def_offset, macro_arg, macro_def, exp_map }) |
161 | arg_start: InFile::new(loc.kind.file_id(), arg_tt.text_range().start()), | ||
162 | def_start: def_offset, | ||
163 | macro_arg, | ||
164 | macro_def, | ||
165 | exp_map, | ||
166 | }) | ||
167 | } | 183 | } |
168 | 184 | ||
169 | impl HygieneFrame { | 185 | impl HygieneFrame { |
@@ -174,7 +190,8 @@ impl HygieneFrame { | |||
174 | MacroCallId::EagerMacro(_id) => (None, None, false), | 190 | MacroCallId::EagerMacro(_id) => (None, None, false), |
175 | MacroCallId::LazyMacro(id) => { | 191 | MacroCallId::LazyMacro(id) => { |
176 | let loc = db.lookup_intern_macro(id); | 192 | let loc = db.lookup_intern_macro(id); |
177 | let info = make_hygiene_info(db, macro_file, &loc); | 193 | let info = make_hygiene_info(db, macro_file, &loc) |
194 | .map(|info| (loc.kind.file_id(), info)); | ||
178 | match loc.def.kind { | 195 | match loc.def.kind { |
179 | MacroDefKind::Declarative(_) => { | 196 | MacroDefKind::Declarative(_) => { |
180 | (info, Some(loc.def.krate), loc.def.local_inner) | 197 | (info, Some(loc.def.krate), loc.def.local_inner) |
@@ -188,7 +205,7 @@ impl HygieneFrame { | |||
188 | }, | 205 | }, |
189 | }; | 206 | }; |
190 | 207 | ||
191 | let info = match info { | 208 | let (calling_file, info) = match info { |
192 | None => { | 209 | None => { |
193 | return HygieneFrame { | 210 | return HygieneFrame { |
194 | expansion: None, | 211 | expansion: None, |
@@ -202,7 +219,7 @@ impl HygieneFrame { | |||
202 | }; | 219 | }; |
203 | 220 | ||
204 | let def_site = info.def_start.map(|it| db.hygiene_frame(it.file_id)); | 221 | let def_site = info.def_start.map(|it| db.hygiene_frame(it.file_id)); |
205 | let call_site = Some(db.hygiene_frame(info.arg_start.file_id)); | 222 | let call_site = Some(db.hygiene_frame(calling_file)); |
206 | 223 | ||
207 | HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site } | 224 | HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site } |
208 | } | 225 | } |
diff --git a/crates/hir_expand/src/input.rs b/crates/hir_expand/src/input.rs new file mode 100644 index 000000000..112216859 --- /dev/null +++ b/crates/hir_expand/src/input.rs | |||
@@ -0,0 +1,94 @@ | |||
1 | //! Macro input conditioning. | ||
2 | |||
3 | use syntax::{ | ||
4 | ast::{self, AttrsOwner}, | ||
5 | AstNode, SyntaxNode, | ||
6 | }; | ||
7 | |||
8 | use crate::{ | ||
9 | db::AstDatabase, | ||
10 | name::{name, AsName}, | ||
11 | LazyMacroId, MacroCallKind, MacroCallLoc, | ||
12 | }; | ||
13 | |||
14 | pub(crate) fn process_macro_input( | ||
15 | db: &dyn AstDatabase, | ||
16 | node: SyntaxNode, | ||
17 | id: LazyMacroId, | ||
18 | ) -> SyntaxNode { | ||
19 | let loc: MacroCallLoc = db.lookup_intern_macro(id); | ||
20 | |||
21 | match loc.kind { | ||
22 | MacroCallKind::FnLike { .. } => node, | ||
23 | MacroCallKind::Derive { derive_attr_index, .. } => { | ||
24 | let item = match ast::Item::cast(node.clone()) { | ||
25 | Some(item) => item, | ||
26 | None => return node, | ||
27 | }; | ||
28 | |||
29 | remove_derives_up_to(item, derive_attr_index as usize).syntax().clone() | ||
30 | } | ||
31 | } | ||
32 | } | ||
33 | |||
34 | /// Removes `#[derive]` attributes from `item`, up to `attr_index`. | ||
35 | fn remove_derives_up_to(item: ast::Item, attr_index: usize) -> ast::Item { | ||
36 | let item = item.clone_for_update(); | ||
37 | for attr in item.attrs().take(attr_index + 1) { | ||
38 | if let Some(name) = | ||
39 | attr.path().and_then(|path| path.as_single_segment()).and_then(|seg| seg.name_ref()) | ||
40 | { | ||
41 | if name.as_name() == name![derive] { | ||
42 | attr.syntax().detach(); | ||
43 | } | ||
44 | } | ||
45 | } | ||
46 | item | ||
47 | } | ||
48 | |||
49 | #[cfg(test)] | ||
50 | mod tests { | ||
51 | use base_db::fixture::WithFixture; | ||
52 | use base_db::SourceDatabase; | ||
53 | use expect_test::{expect, Expect}; | ||
54 | |||
55 | use crate::test_db::TestDB; | ||
56 | |||
57 | use super::*; | ||
58 | |||
59 | fn test_remove_derives_up_to(attr: usize, ra_fixture: &str, expect: Expect) { | ||
60 | let (db, file_id) = TestDB::with_single_file(&ra_fixture); | ||
61 | let parsed = db.parse(file_id); | ||
62 | |||
63 | let mut items: Vec<_> = | ||
64 | parsed.syntax_node().descendants().filter_map(ast::Item::cast).collect(); | ||
65 | assert_eq!(items.len(), 1); | ||
66 | |||
67 | let item = remove_derives_up_to(items.pop().unwrap(), attr); | ||
68 | expect.assert_eq(&item.to_string()); | ||
69 | } | ||
70 | |||
71 | #[test] | ||
72 | fn remove_derive() { | ||
73 | test_remove_derives_up_to( | ||
74 | 2, | ||
75 | r#" | ||
76 | #[allow(unused)] | ||
77 | #[derive(Copy)] | ||
78 | #[derive(Hello)] | ||
79 | #[derive(Clone)] | ||
80 | struct A { | ||
81 | bar: u32 | ||
82 | } | ||
83 | "#, | ||
84 | expect![[r#" | ||
85 | #[allow(unused)] | ||
86 | |||
87 | |||
88 | #[derive(Clone)] | ||
89 | struct A { | ||
90 | bar: u32 | ||
91 | }"#]], | ||
92 | ); | ||
93 | } | ||
94 | } | ||
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index a0e6aec62..5df11856e 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs | |||
@@ -14,9 +14,12 @@ pub mod builtin_macro; | |||
14 | pub mod proc_macro; | 14 | pub mod proc_macro; |
15 | pub mod quote; | 15 | pub mod quote; |
16 | pub mod eager; | 16 | pub mod eager; |
17 | mod input; | ||
17 | 18 | ||
18 | use either::Either; | 19 | use either::Either; |
20 | |||
19 | pub use mbe::{ExpandError, ExpandResult}; | 21 | pub use mbe::{ExpandError, ExpandResult}; |
22 | pub use parser::FragmentKind; | ||
20 | 23 | ||
21 | use std::hash::Hash; | 24 | use std::hash::Hash; |
22 | use std::sync::Arc; | 25 | use std::sync::Arc; |
@@ -290,13 +293,21 @@ pub struct MacroCallLoc { | |||
290 | 293 | ||
291 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 294 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
292 | pub enum MacroCallKind { | 295 | pub enum MacroCallKind { |
293 | FnLike { ast_id: AstId<ast::MacroCall> }, | 296 | FnLike { |
294 | Derive { ast_id: AstId<ast::Item>, derive_name: String, derive_attr: AttrId }, | 297 | ast_id: AstId<ast::MacroCall>, |
298 | fragment: FragmentKind, | ||
299 | }, | ||
300 | Derive { | ||
301 | ast_id: AstId<ast::Item>, | ||
302 | derive_name: String, | ||
303 | /// Syntactical index of the invoking `#[derive]` attribute. | ||
304 | /// | ||
305 | /// Outer attributes are counted first, then inner attributes. This does not support | ||
306 | /// out-of-line modules, which may have attributes spread across 2 files! | ||
307 | derive_attr_index: u32, | ||
308 | }, | ||
295 | } | 309 | } |
296 | 310 | ||
297 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
298 | pub struct AttrId(pub u32); | ||
299 | |||
300 | impl MacroCallKind { | 311 | impl MacroCallKind { |
301 | fn file_id(&self) -> HirFileId { | 312 | fn file_id(&self) -> HirFileId { |
302 | match self { | 313 | match self { |
@@ -324,6 +335,13 @@ impl MacroCallKind { | |||
324 | MacroCallKind::Derive { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()), | 335 | MacroCallKind::Derive { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()), |
325 | } | 336 | } |
326 | } | 337 | } |
338 | |||
339 | fn fragment_kind(&self) -> FragmentKind { | ||
340 | match self { | ||
341 | MacroCallKind::FnLike { fragment, .. } => *fragment, | ||
342 | MacroCallKind::Derive { .. } => FragmentKind::Items, | ||
343 | } | ||
344 | } | ||
327 | } | 345 | } |
328 | 346 | ||
329 | impl MacroCallId { | 347 | impl MacroCallId { |
@@ -351,13 +369,12 @@ pub struct ExpansionInfo { | |||
351 | /// The `macro_rules!` arguments. | 369 | /// The `macro_rules!` arguments. |
352 | def: Option<InFile<ast::TokenTree>>, | 370 | def: Option<InFile<ast::TokenTree>>, |
353 | 371 | ||
354 | macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, | 372 | macro_def: Arc<db::TokenExpander>, |
355 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, | 373 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, |
356 | exp_map: Arc<mbe::TokenMap>, | 374 | exp_map: Arc<mbe::TokenMap>, |
357 | } | 375 | } |
358 | 376 | ||
359 | pub use mbe::Origin; | 377 | pub use mbe::Origin; |
360 | use parser::FragmentKind; | ||
361 | 378 | ||
362 | impl ExpansionInfo { | 379 | impl ExpansionInfo { |
363 | pub fn call_node(&self) -> Option<InFile<SyntaxNode>> { | 380 | pub fn call_node(&self) -> Option<InFile<SyntaxNode>> { |
@@ -368,7 +385,7 @@ impl ExpansionInfo { | |||
368 | assert_eq!(token.file_id, self.arg.file_id); | 385 | assert_eq!(token.file_id, self.arg.file_id); |
369 | let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?; | 386 | let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?; |
370 | let token_id = self.macro_arg.1.token_by_range(range)?; | 387 | let token_id = self.macro_arg.1.token_by_range(range)?; |
371 | let token_id = self.macro_def.0.map_id_down(token_id); | 388 | let token_id = self.macro_def.map_id_down(token_id); |
372 | 389 | ||
373 | let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?; | 390 | let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?; |
374 | 391 | ||
@@ -383,17 +400,16 @@ impl ExpansionInfo { | |||
383 | ) -> Option<(InFile<SyntaxToken>, Origin)> { | 400 | ) -> Option<(InFile<SyntaxToken>, Origin)> { |
384 | let token_id = self.exp_map.token_by_range(token.value.text_range())?; | 401 | let token_id = self.exp_map.token_by_range(token.value.text_range())?; |
385 | 402 | ||
386 | let (token_id, origin) = self.macro_def.0.map_id_up(token_id); | 403 | let (token_id, origin) = self.macro_def.map_id_up(token_id); |
387 | let (token_map, tt) = match origin { | 404 | let (token_map, tt) = match origin { |
388 | mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), | 405 | mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), |
389 | mbe::Origin::Def => ( | 406 | mbe::Origin::Def => match (&*self.macro_def, self.def.as_ref()) { |
390 | &self.macro_def.1, | 407 | (db::TokenExpander::MacroRules { def_site_token_map, .. }, Some(tt)) |
391 | self.def | 408 | | (db::TokenExpander::MacroDef { def_site_token_map, .. }, Some(tt)) => { |
392 | .as_ref() | 409 | (def_site_token_map, tt.as_ref().map(|tt| tt.syntax().clone())) |
393 | .expect("`Origin::Def` used with non-`macro_rules!` macro") | 410 | } |
394 | .as_ref() | 411 | _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), |
395 | .map(|tt| tt.syntax().clone()), | 412 | }, |
396 | ), | ||
397 | }; | 413 | }; |
398 | 414 | ||
399 | let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; | 415 | let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; |
@@ -563,3 +579,59 @@ impl<N: AstNode> InFile<N> { | |||
563 | self.with_value(self.value.syntax()) | 579 | self.with_value(self.value.syntax()) |
564 | } | 580 | } |
565 | } | 581 | } |
582 | |||
583 | /// Given a `MacroCallId`, return what `FragmentKind` it belongs to. | ||
584 | /// FIXME: Not completed | ||
585 | pub fn to_fragment_kind(call: &ast::MacroCall) -> FragmentKind { | ||
586 | use syntax::SyntaxKind::*; | ||
587 | |||
588 | let syn = call.syntax(); | ||
589 | |||
590 | let parent = match syn.parent() { | ||
591 | Some(it) => it, | ||
592 | None => return FragmentKind::Statements, | ||
593 | }; | ||
594 | |||
595 | match parent.kind() { | ||
596 | MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items, | ||
597 | MACRO_STMTS => FragmentKind::Statements, | ||
598 | MACRO_PAT => FragmentKind::Pattern, | ||
599 | MACRO_TYPE => FragmentKind::Type, | ||
600 | ITEM_LIST => FragmentKind::Items, | ||
601 | LET_STMT => { | ||
602 | // FIXME: Handle LHS Pattern | ||
603 | FragmentKind::Expr | ||
604 | } | ||
605 | EXPR_STMT => FragmentKind::Statements, | ||
606 | BLOCK_EXPR => FragmentKind::Statements, | ||
607 | ARG_LIST => FragmentKind::Expr, | ||
608 | TRY_EXPR => FragmentKind::Expr, | ||
609 | TUPLE_EXPR => FragmentKind::Expr, | ||
610 | PAREN_EXPR => FragmentKind::Expr, | ||
611 | ARRAY_EXPR => FragmentKind::Expr, | ||
612 | FOR_EXPR => FragmentKind::Expr, | ||
613 | PATH_EXPR => FragmentKind::Expr, | ||
614 | CLOSURE_EXPR => FragmentKind::Expr, | ||
615 | CONDITION => FragmentKind::Expr, | ||
616 | BREAK_EXPR => FragmentKind::Expr, | ||
617 | RETURN_EXPR => FragmentKind::Expr, | ||
618 | MATCH_EXPR => FragmentKind::Expr, | ||
619 | MATCH_ARM => FragmentKind::Expr, | ||
620 | MATCH_GUARD => FragmentKind::Expr, | ||
621 | RECORD_EXPR_FIELD => FragmentKind::Expr, | ||
622 | CALL_EXPR => FragmentKind::Expr, | ||
623 | INDEX_EXPR => FragmentKind::Expr, | ||
624 | METHOD_CALL_EXPR => FragmentKind::Expr, | ||
625 | FIELD_EXPR => FragmentKind::Expr, | ||
626 | AWAIT_EXPR => FragmentKind::Expr, | ||
627 | CAST_EXPR => FragmentKind::Expr, | ||
628 | REF_EXPR => FragmentKind::Expr, | ||
629 | PREFIX_EXPR => FragmentKind::Expr, | ||
630 | RANGE_EXPR => FragmentKind::Expr, | ||
631 | BIN_EXPR => FragmentKind::Expr, | ||
632 | _ => { | ||
633 | // Unknown , Just guess it is `Items` | ||
634 | FragmentKind::Items | ||
635 | } | ||
636 | } | ||
637 | } | ||
diff --git a/crates/hir_expand/src/proc_macro.rs b/crates/hir_expand/src/proc_macro.rs index 75e950816..d5643393a 100644 --- a/crates/hir_expand/src/proc_macro.rs +++ b/crates/hir_expand/src/proc_macro.rs | |||
@@ -2,7 +2,6 @@ | |||
2 | 2 | ||
3 | use crate::db::AstDatabase; | 3 | use crate::db::AstDatabase; |
4 | use base_db::{CrateId, ProcMacroId}; | 4 | use base_db::{CrateId, ProcMacroId}; |
5 | use tt::buffer::{Cursor, TokenBuffer}; | ||
6 | 5 | ||
7 | #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] | 6 | #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] |
8 | pub struct ProcMacroExpander { | 7 | pub struct ProcMacroExpander { |
@@ -44,9 +43,6 @@ impl ProcMacroExpander { | |||
44 | .clone() | 43 | .clone() |
45 | .ok_or_else(|| err!("No derive macro found."))?; | 44 | .ok_or_else(|| err!("No derive macro found."))?; |
46 | 45 | ||
47 | let tt = remove_derive_attrs(tt) | ||
48 | .ok_or_else(|| err!("Fail to remove derive for custom derive"))?; | ||
49 | |||
50 | // Proc macros have access to the environment variables of the invoking crate. | 46 | // Proc macros have access to the environment variables of the invoking crate. |
51 | let env = &krate_graph[calling_crate].env; | 47 | let env = &krate_graph[calling_crate].env; |
52 | 48 | ||
@@ -56,101 +52,3 @@ impl ProcMacroExpander { | |||
56 | } | 52 | } |
57 | } | 53 | } |
58 | } | 54 | } |
59 | |||
60 | fn eat_punct(cursor: &mut Cursor, c: char) -> bool { | ||
61 | if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = cursor.token_tree() { | ||
62 | if punct.char == c { | ||
63 | *cursor = cursor.bump(); | ||
64 | return true; | ||
65 | } | ||
66 | } | ||
67 | false | ||
68 | } | ||
69 | |||
70 | fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool { | ||
71 | if let Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) = cursor.token_tree() { | ||
72 | if Some(kind) == subtree.delimiter_kind() { | ||
73 | *cursor = cursor.bump_subtree(); | ||
74 | return true; | ||
75 | } | ||
76 | } | ||
77 | false | ||
78 | } | ||
79 | |||
80 | fn eat_ident(cursor: &mut Cursor, t: &str) -> bool { | ||
81 | if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(ident), _)) = cursor.token_tree() { | ||
82 | if t == ident.text.as_str() { | ||
83 | *cursor = cursor.bump(); | ||
84 | return true; | ||
85 | } | ||
86 | } | ||
87 | false | ||
88 | } | ||
89 | |||
90 | fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> { | ||
91 | let buffer = TokenBuffer::from_tokens(&tt.token_trees); | ||
92 | let mut p = buffer.begin(); | ||
93 | let mut result = tt::Subtree::default(); | ||
94 | |||
95 | while !p.eof() { | ||
96 | let curr = p; | ||
97 | |||
98 | if eat_punct(&mut p, '#') { | ||
99 | eat_punct(&mut p, '!'); | ||
100 | let parent = p; | ||
101 | if eat_subtree(&mut p, tt::DelimiterKind::Bracket) { | ||
102 | if eat_ident(&mut p, "derive") { | ||
103 | p = parent.bump(); | ||
104 | continue; | ||
105 | } | ||
106 | } | ||
107 | } | ||
108 | |||
109 | result.token_trees.push(curr.token_tree()?.cloned()); | ||
110 | p = curr.bump(); | ||
111 | } | ||
112 | |||
113 | Some(result) | ||
114 | } | ||
115 | |||
116 | #[cfg(test)] | ||
117 | mod tests { | ||
118 | use super::*; | ||
119 | use test_utils::assert_eq_text; | ||
120 | |||
121 | #[test] | ||
122 | fn test_remove_derive_attrs() { | ||
123 | let tt = mbe::parse_to_token_tree( | ||
124 | r#" | ||
125 | #[allow(unused)] | ||
126 | #[derive(Copy)] | ||
127 | #[derive(Hello)] | ||
128 | struct A { | ||
129 | bar: u32 | ||
130 | } | ||
131 | "#, | ||
132 | ) | ||
133 | .unwrap() | ||
134 | .0; | ||
135 | let result = format!("{:#?}", remove_derive_attrs(&tt).unwrap()); | ||
136 | |||
137 | assert_eq_text!( | ||
138 | r#" | ||
139 | SUBTREE $ | ||
140 | PUNCH # [alone] 0 | ||
141 | SUBTREE [] 1 | ||
142 | IDENT allow 2 | ||
143 | SUBTREE () 3 | ||
144 | IDENT unused 4 | ||
145 | IDENT struct 15 | ||
146 | IDENT A 16 | ||
147 | SUBTREE {} 17 | ||
148 | IDENT bar 18 | ||
149 | PUNCH : [alone] 19 | ||
150 | IDENT u32 20 | ||
151 | "# | ||
152 | .trim(), | ||
153 | &result | ||
154 | ); | ||
155 | } | ||
156 | } | ||
diff --git a/crates/hir_expand/src/quote.rs b/crates/hir_expand/src/quote.rs index c82487ef0..230a59964 100644 --- a/crates/hir_expand/src/quote.rs +++ b/crates/hir_expand/src/quote.rs | |||
@@ -196,8 +196,8 @@ impl_to_to_tokentrees! { | |||
196 | tt::Literal => self { self }; | 196 | tt::Literal => self { self }; |
197 | tt::Ident => self { self }; | 197 | tt::Ident => self { self }; |
198 | tt::Punct => self { self }; | 198 | tt::Punct => self { self }; |
199 | &str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}}; | 199 | &str => self { tt::Literal{text: format!("\"{}\"", self.escape_debug()).into(), id: tt::TokenId::unspecified()}}; |
200 | String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}} | 200 | String => self { tt::Literal{text: format!("\"{}\"", self.escape_debug()).into(), id: tt::TokenId::unspecified()}} |
201 | } | 201 | } |
202 | 202 | ||
203 | #[cfg(test)] | 203 | #[cfg(test)] |