diff options
-rw-r--r-- | crates/hir_expand/src/db.rs | 368 | ||||
-rw-r--r-- | crates/hir_expand/src/hygiene.rs | 16 | ||||
-rw-r--r-- | crates/hir_expand/src/lib.rs | 21 | ||||
-rw-r--r-- | docs/dev/style.md | 33 |
4 files changed, 238 insertions, 200 deletions
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index 1389e30db..3e9abd8a1 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs | |||
@@ -28,9 +28,9 @@ const TOKEN_LIMIT: usize = 524288; | |||
28 | #[derive(Debug, Clone, Eq, PartialEq)] | 28 | #[derive(Debug, Clone, Eq, PartialEq)] |
29 | pub enum TokenExpander { | 29 | pub enum TokenExpander { |
30 | /// Old-style `macro_rules`. | 30 | /// Old-style `macro_rules`. |
31 | MacroRules(mbe::MacroRules), | 31 | MacroRules { mac: mbe::MacroRules, def_site_token_map: mbe::TokenMap }, |
32 | /// AKA macros 2.0. | 32 | /// AKA macros 2.0. |
33 | MacroDef(mbe::MacroDef), | 33 | MacroDef { mac: mbe::MacroDef, def_site_token_map: mbe::TokenMap }, |
34 | /// Stuff like `line!` and `file!`. | 34 | /// Stuff like `line!` and `file!`. |
35 | Builtin(BuiltinFnLikeExpander), | 35 | Builtin(BuiltinFnLikeExpander), |
36 | /// `derive(Copy)` and such. | 36 | /// `derive(Copy)` and such. |
@@ -47,8 +47,8 @@ impl TokenExpander { | |||
47 | tt: &tt::Subtree, | 47 | tt: &tt::Subtree, |
48 | ) -> mbe::ExpandResult<tt::Subtree> { | 48 | ) -> mbe::ExpandResult<tt::Subtree> { |
49 | match self { | 49 | match self { |
50 | TokenExpander::MacroRules(it) => it.expand(tt), | 50 | TokenExpander::MacroRules { mac, .. } => mac.expand(tt), |
51 | TokenExpander::MacroDef(it) => it.expand(tt), | 51 | TokenExpander::MacroDef { mac, .. } => mac.expand(tt), |
52 | TokenExpander::Builtin(it) => it.expand(db, id, tt), | 52 | TokenExpander::Builtin(it) => it.expand(db, id, tt), |
53 | // FIXME switch these to ExpandResult as well | 53 | // FIXME switch these to ExpandResult as well |
54 | TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(), | 54 | TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(), |
@@ -63,21 +63,21 @@ impl TokenExpander { | |||
63 | 63 | ||
64 | pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { | 64 | pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { |
65 | match self { | 65 | match self { |
66 | TokenExpander::MacroRules(it) => it.map_id_down(id), | 66 | TokenExpander::MacroRules { mac, .. } => mac.map_id_down(id), |
67 | TokenExpander::MacroDef(it) => it.map_id_down(id), | 67 | TokenExpander::MacroDef { mac, .. } => mac.map_id_down(id), |
68 | TokenExpander::Builtin(..) => id, | 68 | TokenExpander::Builtin(..) |
69 | TokenExpander::BuiltinDerive(..) => id, | 69 | | TokenExpander::BuiltinDerive(..) |
70 | TokenExpander::ProcMacro(..) => id, | 70 | | TokenExpander::ProcMacro(..) => id, |
71 | } | 71 | } |
72 | } | 72 | } |
73 | 73 | ||
74 | pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { | 74 | pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { |
75 | match self { | 75 | match self { |
76 | TokenExpander::MacroRules(it) => it.map_id_up(id), | 76 | TokenExpander::MacroRules { mac, .. } => mac.map_id_up(id), |
77 | TokenExpander::MacroDef(it) => it.map_id_up(id), | 77 | TokenExpander::MacroDef { mac, .. } => mac.map_id_up(id), |
78 | TokenExpander::Builtin(..) => (id, mbe::Origin::Call), | 78 | TokenExpander::Builtin(..) |
79 | TokenExpander::BuiltinDerive(..) => (id, mbe::Origin::Call), | 79 | | TokenExpander::BuiltinDerive(..) |
80 | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), | 80 | | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), |
81 | } | 81 | } |
82 | } | 82 | } |
83 | } | 83 | } |
@@ -87,28 +87,48 @@ impl TokenExpander { | |||
87 | pub trait AstDatabase: SourceDatabase { | 87 | pub trait AstDatabase: SourceDatabase { |
88 | fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>; | 88 | fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>; |
89 | 89 | ||
90 | /// Main public API -- parsis a hir file, not caring whether it's a real | ||
91 | /// file or a macro expansion. | ||
90 | #[salsa::transparent] | 92 | #[salsa::transparent] |
91 | fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>; | 93 | fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>; |
92 | 94 | /// Implementation for the macro case. | |
93 | #[salsa::interned] | ||
94 | fn intern_macro(&self, macro_call: MacroCallLoc) -> LazyMacroId; | ||
95 | fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>; | ||
96 | #[salsa::transparent] | ||
97 | fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>; | ||
98 | fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>; | ||
99 | fn parse_macro_expansion( | 95 | fn parse_macro_expansion( |
100 | &self, | 96 | &self, |
101 | macro_file: MacroFile, | 97 | macro_file: MacroFile, |
102 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>; | 98 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>; |
103 | fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>; | ||
104 | |||
105 | /// Firewall query that returns the error from the `macro_expand` query. | ||
106 | fn macro_expand_error(&self, macro_call: MacroCallId) -> Option<ExpandError>; | ||
107 | 99 | ||
100 | /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the | ||
101 | /// reason why we use salsa at all. | ||
102 | /// | ||
103 | /// We encode macro definitions into ids of macro calls, this what allows us | ||
104 | /// to be incremental. | ||
105 | #[salsa::interned] | ||
106 | fn intern_macro(&self, macro_call: MacroCallLoc) -> LazyMacroId; | ||
107 | /// Certain built-in macros are eager (`format!(concat!("file: ", file!(), "{}"")), 92`). | ||
108 | /// For them, we actually want to encode the whole token tree as an argument. | ||
108 | #[salsa::interned] | 109 | #[salsa::interned] |
109 | fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; | 110 | fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; |
110 | 111 | ||
112 | /// Lowers syntactic macro call to a token tree representation. | ||
113 | #[salsa::transparent] | ||
114 | fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>; | ||
115 | /// Extracts syntax node, corresponding to a macro call. That's a firewall | ||
116 | /// query, only typing in the macro call itself changes the returned | ||
117 | /// subtree. | ||
118 | fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>; | ||
119 | /// Gets the expander for this macro. This compiles declarative macros, and | ||
120 | /// just fetches procedural ones. | ||
121 | fn macro_def(&self, id: MacroDefId) -> Option<Arc<TokenExpander>>; | ||
122 | |||
123 | /// Expand macro call to a token tree. This query is LRUed (we keep 128 or so results in memory) | ||
124 | fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>; | ||
125 | /// Special case of the previous query for procedural macros. We can't LRU | ||
126 | /// proc macros, since they are not deterministic in general, and | ||
127 | /// non-determinism breaks salsa in a very, very, very bad way. @edwin0cheng | ||
128 | /// heroically debugged this once! | ||
111 | fn expand_proc_macro(&self, call: MacroCallId) -> Result<tt::Subtree, mbe::ExpandError>; | 129 | fn expand_proc_macro(&self, call: MacroCallId) -> Result<tt::Subtree, mbe::ExpandError>; |
130 | /// Firewall query that returns the error from the `macro_expand` query. | ||
131 | fn macro_expand_error(&self, macro_call: MacroCallId) -> Option<ExpandError>; | ||
112 | 132 | ||
113 | fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>; | 133 | fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>; |
114 | } | 134 | } |
@@ -123,33 +143,156 @@ pub fn expand_hypothetical( | |||
123 | hypothetical_args: &ast::TokenTree, | 143 | hypothetical_args: &ast::TokenTree, |
124 | token_to_map: SyntaxToken, | 144 | token_to_map: SyntaxToken, |
125 | ) -> Option<(SyntaxNode, SyntaxToken)> { | 145 | ) -> Option<(SyntaxNode, SyntaxToken)> { |
126 | let macro_file = MacroFile { macro_call_id: actual_macro_call }; | ||
127 | let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()); | 146 | let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()); |
128 | let range = | 147 | let range = |
129 | token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; | 148 | token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; |
130 | let token_id = tmap_1.token_by_range(range)?; | 149 | let token_id = tmap_1.token_by_range(range)?; |
131 | let macro_def = expander(db, actual_macro_call)?; | 150 | |
151 | let lazy_id = match actual_macro_call { | ||
152 | MacroCallId::LazyMacro(id) => id, | ||
153 | MacroCallId::EagerMacro(_) => return None, | ||
154 | }; | ||
155 | |||
156 | let macro_def = { | ||
157 | let loc = db.lookup_intern_macro(lazy_id); | ||
158 | db.macro_def(loc.def)? | ||
159 | }; | ||
160 | |||
161 | let hypothetical_expansion = macro_def.expand(db, lazy_id, &tt); | ||
162 | |||
163 | let fragment_kind = to_fragment_kind(db, actual_macro_call); | ||
164 | |||
132 | let (node, tmap_2) = | 165 | let (node, tmap_2) = |
133 | parse_macro_with_arg(db, macro_file, Some(std::sync::Arc::new((tt, tmap_1)))).value?; | 166 | mbe::token_tree_to_syntax_node(&hypothetical_expansion.value, fragment_kind).ok()?; |
134 | let token_id = macro_def.0.map_id_down(token_id); | 167 | |
168 | let token_id = macro_def.map_id_down(token_id); | ||
135 | let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; | 169 | let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; |
136 | let token = node.syntax_node().covering_element(range).into_token()?; | 170 | let token = node.syntax_node().covering_element(range).into_token()?; |
137 | Some((node.syntax_node(), token)) | 171 | Some((node.syntax_node(), token)) |
138 | } | 172 | } |
139 | 173 | ||
140 | fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { | 174 | fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { |
141 | let map = | 175 | let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default(); |
142 | db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it)); | ||
143 | Arc::new(map) | 176 | Arc::new(map) |
144 | } | 177 | } |
145 | 178 | ||
146 | fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { | 179 | fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> { |
180 | match file_id.0 { | ||
181 | HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), | ||
182 | HirFileIdRepr::MacroFile(macro_file) => { | ||
183 | db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node()) | ||
184 | } | ||
185 | } | ||
186 | } | ||
187 | |||
188 | fn parse_macro_expansion( | ||
189 | db: &dyn AstDatabase, | ||
190 | macro_file: MacroFile, | ||
191 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> { | ||
192 | let _p = profile::span("parse_macro_expansion"); | ||
193 | let result = db.macro_expand(macro_file.macro_call_id); | ||
194 | |||
195 | if let Some(err) = &result.err { | ||
196 | // Note: | ||
197 | // The final goal we would like to make all parse_macro success, | ||
198 | // such that the following log will not call anyway. | ||
199 | match macro_file.macro_call_id { | ||
200 | MacroCallId::LazyMacro(id) => { | ||
201 | let loc: MacroCallLoc = db.lookup_intern_macro(id); | ||
202 | let node = loc.kind.node(db); | ||
203 | |||
204 | // collect parent information for warning log | ||
205 | let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| { | ||
206 | it.file_id.call_node(db) | ||
207 | }) | ||
208 | .map(|n| format!("{:#}", n.value)) | ||
209 | .collect::<Vec<_>>() | ||
210 | .join("\n"); | ||
211 | |||
212 | log::warn!( | ||
213 | "fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}", | ||
214 | err, | ||
215 | node.value, | ||
216 | parents | ||
217 | ); | ||
218 | } | ||
219 | _ => { | ||
220 | log::warn!("fail on macro_parse: (reason: {:?})", err); | ||
221 | } | ||
222 | } | ||
223 | } | ||
224 | let tt = match result.value { | ||
225 | Some(tt) => tt, | ||
226 | None => return ExpandResult { value: None, err: result.err }, | ||
227 | }; | ||
228 | |||
229 | let fragment_kind = to_fragment_kind(db, macro_file.macro_call_id); | ||
230 | |||
231 | log::debug!("expanded = {}", tt.as_debug_string()); | ||
232 | log::debug!("kind = {:?}", fragment_kind); | ||
233 | |||
234 | let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) { | ||
235 | Ok(it) => it, | ||
236 | Err(err) => { | ||
237 | log::debug!( | ||
238 | "failed to parse expanstion to {:?} = {}", | ||
239 | fragment_kind, | ||
240 | tt.as_debug_string() | ||
241 | ); | ||
242 | return ExpandResult::only_err(err); | ||
243 | } | ||
244 | }; | ||
245 | |||
246 | match result.err { | ||
247 | Some(err) => { | ||
248 | // Safety check for recursive identity macro. | ||
249 | let node = parse.syntax_node(); | ||
250 | let file: HirFileId = macro_file.into(); | ||
251 | let call_node = match file.call_node(db) { | ||
252 | Some(it) => it, | ||
253 | None => { | ||
254 | return ExpandResult::only_err(err); | ||
255 | } | ||
256 | }; | ||
257 | if is_self_replicating(&node, &call_node.value) { | ||
258 | return ExpandResult::only_err(err); | ||
259 | } else { | ||
260 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) } | ||
261 | } | ||
262 | } | ||
263 | None => { | ||
264 | log::debug!("parse = {:?}", parse.syntax_node().kind()); | ||
265 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None } | ||
266 | } | ||
267 | } | ||
268 | } | ||
269 | |||
270 | fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { | ||
271 | let arg = db.macro_arg_text(id)?; | ||
272 | let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg)); | ||
273 | Some(Arc::new((tt, tmap))) | ||
274 | } | ||
275 | |||
276 | fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { | ||
277 | let id = match id { | ||
278 | MacroCallId::LazyMacro(id) => id, | ||
279 | MacroCallId::EagerMacro(_id) => { | ||
280 | // FIXME: support macro_arg for eager macro | ||
281 | return None; | ||
282 | } | ||
283 | }; | ||
284 | let loc = db.lookup_intern_macro(id); | ||
285 | let arg = loc.kind.arg(db)?; | ||
286 | Some(arg.green()) | ||
287 | } | ||
288 | |||
289 | fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<TokenExpander>> { | ||
147 | match id.kind { | 290 | match id.kind { |
148 | MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { | 291 | MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { |
149 | ast::Macro::MacroRules(macro_rules) => { | 292 | ast::Macro::MacroRules(macro_rules) => { |
150 | let arg = macro_rules.token_tree()?; | 293 | let arg = macro_rules.token_tree()?; |
151 | let (tt, tmap) = mbe::ast_to_token_tree(&arg); | 294 | let (tt, def_site_token_map) = mbe::ast_to_token_tree(&arg); |
152 | let rules = match mbe::MacroRules::parse(&tt) { | 295 | let mac = match mbe::MacroRules::parse(&tt) { |
153 | Ok(it) => it, | 296 | Ok(it) => it, |
154 | Err(err) => { | 297 | Err(err) => { |
155 | let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default(); | 298 | let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default(); |
@@ -157,12 +300,12 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, | |||
157 | return None; | 300 | return None; |
158 | } | 301 | } |
159 | }; | 302 | }; |
160 | Some(Arc::new((TokenExpander::MacroRules(rules), tmap))) | 303 | Some(Arc::new(TokenExpander::MacroRules { mac, def_site_token_map })) |
161 | } | 304 | } |
162 | ast::Macro::MacroDef(macro_def) => { | 305 | ast::Macro::MacroDef(macro_def) => { |
163 | let arg = macro_def.body()?; | 306 | let arg = macro_def.body()?; |
164 | let (tt, tmap) = mbe::ast_to_token_tree(&arg); | 307 | let (tt, def_site_token_map) = mbe::ast_to_token_tree(&arg); |
165 | let rules = match mbe::MacroDef::parse(&tt) { | 308 | let mac = match mbe::MacroDef::parse(&tt) { |
166 | Ok(it) => it, | 309 | Ok(it) => it, |
167 | Err(err) => { | 310 | Err(err) => { |
168 | let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default(); | 311 | let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default(); |
@@ -170,41 +313,18 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, | |||
170 | return None; | 313 | return None; |
171 | } | 314 | } |
172 | }; | 315 | }; |
173 | Some(Arc::new((TokenExpander::MacroDef(rules), tmap))) | 316 | Some(Arc::new(TokenExpander::MacroDef { mac, def_site_token_map })) |
174 | } | 317 | } |
175 | }, | 318 | }, |
176 | MacroDefKind::BuiltIn(expander, _) => { | 319 | MacroDefKind::BuiltIn(expander, _) => Some(Arc::new(TokenExpander::Builtin(expander))), |
177 | Some(Arc::new((TokenExpander::Builtin(expander), mbe::TokenMap::default()))) | ||
178 | } | ||
179 | MacroDefKind::BuiltInDerive(expander, _) => { | 320 | MacroDefKind::BuiltInDerive(expander, _) => { |
180 | Some(Arc::new((TokenExpander::BuiltinDerive(expander), mbe::TokenMap::default()))) | 321 | Some(Arc::new(TokenExpander::BuiltinDerive(expander))) |
181 | } | 322 | } |
182 | MacroDefKind::BuiltInEager(..) => None, | 323 | MacroDefKind::BuiltInEager(..) => None, |
183 | MacroDefKind::ProcMacro(expander, ..) => { | 324 | MacroDefKind::ProcMacro(expander, ..) => Some(Arc::new(TokenExpander::ProcMacro(expander))), |
184 | Some(Arc::new((TokenExpander::ProcMacro(expander), mbe::TokenMap::default()))) | ||
185 | } | ||
186 | } | 325 | } |
187 | } | 326 | } |
188 | 327 | ||
189 | fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { | ||
190 | let id = match id { | ||
191 | MacroCallId::LazyMacro(id) => id, | ||
192 | MacroCallId::EagerMacro(_id) => { | ||
193 | // FIXME: support macro_arg for eager macro | ||
194 | return None; | ||
195 | } | ||
196 | }; | ||
197 | let loc = db.lookup_intern_macro(id); | ||
198 | let arg = loc.kind.arg(db)?; | ||
199 | Some(arg.green()) | ||
200 | } | ||
201 | |||
202 | fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { | ||
203 | let arg = db.macro_arg_text(id)?; | ||
204 | let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg)); | ||
205 | Some(Arc::new((tt, tmap))) | ||
206 | } | ||
207 | |||
208 | fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> { | 328 | fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> { |
209 | macro_expand_with_arg(db, id, None) | 329 | macro_expand_with_arg(db, id, None) |
210 | } | 330 | } |
@@ -213,19 +333,6 @@ fn macro_expand_error(db: &dyn AstDatabase, macro_call: MacroCallId) -> Option<E | |||
213 | db.macro_expand(macro_call).err | 333 | db.macro_expand(macro_call).err |
214 | } | 334 | } |
215 | 335 | ||
216 | fn expander(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { | ||
217 | let lazy_id = match id { | ||
218 | MacroCallId::LazyMacro(id) => id, | ||
219 | MacroCallId::EagerMacro(_id) => { | ||
220 | return None; | ||
221 | } | ||
222 | }; | ||
223 | |||
224 | let loc = db.lookup_intern_macro(lazy_id); | ||
225 | let macro_rules = db.macro_def(loc.def)?; | ||
226 | Some(macro_rules) | ||
227 | } | ||
228 | |||
229 | fn macro_expand_with_arg( | 336 | fn macro_expand_with_arg( |
230 | db: &dyn AstDatabase, | 337 | db: &dyn AstDatabase, |
231 | id: MacroCallId, | 338 | id: MacroCallId, |
@@ -259,7 +366,7 @@ fn macro_expand_with_arg( | |||
259 | Some(it) => it, | 366 | Some(it) => it, |
260 | None => return ExpandResult::str_err("Fail to find macro definition".into()), | 367 | None => return ExpandResult::str_err("Fail to find macro definition".into()), |
261 | }; | 368 | }; |
262 | let ExpandResult { value: tt, err } = macro_rules.0.expand(db, lazy_id, ¯o_arg.0); | 369 | let ExpandResult { value: tt, err } = macro_rules.expand(db, lazy_id, ¯o_arg.0); |
263 | // Set a hard limit for the expanded tt | 370 | // Set a hard limit for the expanded tt |
264 | let count = tt.count(); | 371 | let count = tt.count(); |
265 | if count > TOKEN_LIMIT { | 372 | if count > TOKEN_LIMIT { |
@@ -299,111 +406,6 @@ fn expand_proc_macro( | |||
299 | expander.expand(db, loc.krate, ¯o_arg.0) | 406 | expander.expand(db, loc.krate, ¯o_arg.0) |
300 | } | 407 | } |
301 | 408 | ||
302 | fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> { | ||
303 | match file_id.0 { | ||
304 | HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), | ||
305 | HirFileIdRepr::MacroFile(macro_file) => { | ||
306 | db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node()) | ||
307 | } | ||
308 | } | ||
309 | } | ||
310 | |||
311 | fn parse_macro_expansion( | ||
312 | db: &dyn AstDatabase, | ||
313 | macro_file: MacroFile, | ||
314 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> { | ||
315 | parse_macro_with_arg(db, macro_file, None) | ||
316 | } | ||
317 | |||
318 | fn parse_macro_with_arg( | ||
319 | db: &dyn AstDatabase, | ||
320 | macro_file: MacroFile, | ||
321 | arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>, | ||
322 | ) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> { | ||
323 | let macro_call_id = macro_file.macro_call_id; | ||
324 | let result = if let Some(arg) = arg { | ||
325 | macro_expand_with_arg(db, macro_call_id, Some(arg)) | ||
326 | } else { | ||
327 | db.macro_expand(macro_call_id) | ||
328 | }; | ||
329 | |||
330 | let _p = profile::span("parse_macro_expansion"); | ||
331 | |||
332 | if let Some(err) = &result.err { | ||
333 | // Note: | ||
334 | // The final goal we would like to make all parse_macro success, | ||
335 | // such that the following log will not call anyway. | ||
336 | match macro_call_id { | ||
337 | MacroCallId::LazyMacro(id) => { | ||
338 | let loc: MacroCallLoc = db.lookup_intern_macro(id); | ||
339 | let node = loc.kind.node(db); | ||
340 | |||
341 | // collect parent information for warning log | ||
342 | let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| { | ||
343 | it.file_id.call_node(db) | ||
344 | }) | ||
345 | .map(|n| format!("{:#}", n.value)) | ||
346 | .collect::<Vec<_>>() | ||
347 | .join("\n"); | ||
348 | |||
349 | log::warn!( | ||
350 | "fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}", | ||
351 | err, | ||
352 | node.value, | ||
353 | parents | ||
354 | ); | ||
355 | } | ||
356 | _ => { | ||
357 | log::warn!("fail on macro_parse: (reason: {:?})", err); | ||
358 | } | ||
359 | } | ||
360 | } | ||
361 | let tt = match result.value { | ||
362 | Some(tt) => tt, | ||
363 | None => return ExpandResult { value: None, err: result.err }, | ||
364 | }; | ||
365 | |||
366 | let fragment_kind = to_fragment_kind(db, macro_call_id); | ||
367 | |||
368 | log::debug!("expanded = {}", tt.as_debug_string()); | ||
369 | log::debug!("kind = {:?}", fragment_kind); | ||
370 | |||
371 | let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) { | ||
372 | Ok(it) => it, | ||
373 | Err(err) => { | ||
374 | log::debug!( | ||
375 | "failed to parse expanstion to {:?} = {}", | ||
376 | fragment_kind, | ||
377 | tt.as_debug_string() | ||
378 | ); | ||
379 | return ExpandResult::only_err(err); | ||
380 | } | ||
381 | }; | ||
382 | |||
383 | match result.err { | ||
384 | Some(err) => { | ||
385 | // Safety check for recursive identity macro. | ||
386 | let node = parse.syntax_node(); | ||
387 | let file: HirFileId = macro_file.into(); | ||
388 | let call_node = match file.call_node(db) { | ||
389 | Some(it) => it, | ||
390 | None => { | ||
391 | return ExpandResult::only_err(err); | ||
392 | } | ||
393 | }; | ||
394 | if is_self_replicating(&node, &call_node.value) { | ||
395 | return ExpandResult::only_err(err); | ||
396 | } else { | ||
397 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) } | ||
398 | } | ||
399 | } | ||
400 | None => { | ||
401 | log::debug!("parse = {:?}", parse.syntax_node().kind()); | ||
402 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None } | ||
403 | } | ||
404 | } | ||
405 | } | ||
406 | |||
407 | fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool { | 409 | fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool { |
408 | if diff(from, to).is_empty() { | 410 | if diff(from, to).is_empty() { |
409 | return true; | 411 | return true; |
diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs index 779725629..ed61ebca3 100644 --- a/crates/hir_expand/src/hygiene.rs +++ b/crates/hir_expand/src/hygiene.rs | |||
@@ -5,6 +5,7 @@ | |||
5 | use std::sync::Arc; | 5 | use std::sync::Arc; |
6 | 6 | ||
7 | use base_db::CrateId; | 7 | use base_db::CrateId; |
8 | use db::TokenExpander; | ||
8 | use either::Either; | 9 | use either::Either; |
9 | use mbe::Origin; | 10 | use mbe::Origin; |
10 | use parser::SyntaxKind; | 11 | use parser::SyntaxKind; |
@@ -115,7 +116,7 @@ struct HygieneInfo { | |||
115 | /// The `macro_rules!` arguments. | 116 | /// The `macro_rules!` arguments. |
116 | def_start: Option<InFile<TextSize>>, | 117 | def_start: Option<InFile<TextSize>>, |
117 | 118 | ||
118 | macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, | 119 | macro_def: Arc<TokenExpander>, |
119 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, | 120 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, |
120 | exp_map: Arc<mbe::TokenMap>, | 121 | exp_map: Arc<mbe::TokenMap>, |
121 | } | 122 | } |
@@ -124,13 +125,16 @@ impl HygieneInfo { | |||
124 | fn map_ident_up(&self, token: TextRange) -> Option<(InFile<TextRange>, Origin)> { | 125 | fn map_ident_up(&self, token: TextRange) -> Option<(InFile<TextRange>, Origin)> { |
125 | let token_id = self.exp_map.token_by_range(token)?; | 126 | let token_id = self.exp_map.token_by_range(token)?; |
126 | 127 | ||
127 | let (token_id, origin) = self.macro_def.0.map_id_up(token_id); | 128 | let (token_id, origin) = self.macro_def.map_id_up(token_id); |
128 | let (token_map, tt) = match origin { | 129 | let (token_map, tt) = match origin { |
129 | mbe::Origin::Call => (&self.macro_arg.1, self.arg_start), | 130 | mbe::Origin::Call => (&self.macro_arg.1, self.arg_start), |
130 | mbe::Origin::Def => ( | 131 | mbe::Origin::Def => match (&*self.macro_def, self.def_start) { |
131 | &self.macro_def.1, | 132 | (TokenExpander::MacroDef { def_site_token_map, .. }, Some(tt)) |
132 | *self.def_start.as_ref().expect("`Origin::Def` used with non-`macro_rules!` macro"), | 133 | | (TokenExpander::MacroRules { def_site_token_map, .. }, Some(tt)) => { |
133 | ), | 134 | (def_site_token_map, tt) |
135 | } | ||
136 | _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), | ||
137 | }, | ||
134 | }; | 138 | }; |
135 | 139 | ||
136 | let range = token_map.range_by_token(token_id)?.by_kind(SyntaxKind::IDENT)?; | 140 | let range = token_map.range_by_token(token_id)?.by_kind(SyntaxKind::IDENT)?; |
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index a0e6aec62..0402640de 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs | |||
@@ -351,7 +351,7 @@ pub struct ExpansionInfo { | |||
351 | /// The `macro_rules!` arguments. | 351 | /// The `macro_rules!` arguments. |
352 | def: Option<InFile<ast::TokenTree>>, | 352 | def: Option<InFile<ast::TokenTree>>, |
353 | 353 | ||
354 | macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, | 354 | macro_def: Arc<db::TokenExpander>, |
355 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, | 355 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, |
356 | exp_map: Arc<mbe::TokenMap>, | 356 | exp_map: Arc<mbe::TokenMap>, |
357 | } | 357 | } |
@@ -368,7 +368,7 @@ impl ExpansionInfo { | |||
368 | assert_eq!(token.file_id, self.arg.file_id); | 368 | assert_eq!(token.file_id, self.arg.file_id); |
369 | let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?; | 369 | let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?; |
370 | let token_id = self.macro_arg.1.token_by_range(range)?; | 370 | let token_id = self.macro_arg.1.token_by_range(range)?; |
371 | let token_id = self.macro_def.0.map_id_down(token_id); | 371 | let token_id = self.macro_def.map_id_down(token_id); |
372 | 372 | ||
373 | let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?; | 373 | let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?; |
374 | 374 | ||
@@ -383,17 +383,16 @@ impl ExpansionInfo { | |||
383 | ) -> Option<(InFile<SyntaxToken>, Origin)> { | 383 | ) -> Option<(InFile<SyntaxToken>, Origin)> { |
384 | let token_id = self.exp_map.token_by_range(token.value.text_range())?; | 384 | let token_id = self.exp_map.token_by_range(token.value.text_range())?; |
385 | 385 | ||
386 | let (token_id, origin) = self.macro_def.0.map_id_up(token_id); | 386 | let (token_id, origin) = self.macro_def.map_id_up(token_id); |
387 | let (token_map, tt) = match origin { | 387 | let (token_map, tt) = match origin { |
388 | mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), | 388 | mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), |
389 | mbe::Origin::Def => ( | 389 | mbe::Origin::Def => match (&*self.macro_def, self.def.as_ref()) { |
390 | &self.macro_def.1, | 390 | (db::TokenExpander::MacroRules { def_site_token_map, .. }, Some(tt)) |
391 | self.def | 391 | | (db::TokenExpander::MacroDef { def_site_token_map, .. }, Some(tt)) => { |
392 | .as_ref() | 392 | (def_site_token_map, tt.as_ref().map(|tt| tt.syntax().clone())) |
393 | .expect("`Origin::Def` used with non-`macro_rules!` macro") | 393 | } |
394 | .as_ref() | 394 | _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), |
395 | .map(|tt| tt.syntax().clone()), | 395 | }, |
396 | ), | ||
397 | }; | 396 | }; |
398 | 397 | ||
399 | let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; | 398 | let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; |
diff --git a/docs/dev/style.md b/docs/dev/style.md index 6ab60b50e..00de7a711 100644 --- a/docs/dev/style.md +++ b/docs/dev/style.md | |||
@@ -449,6 +449,39 @@ fn query_all(name: String, case_sensitive: bool) -> Vec<Item> { ... } | |||
449 | fn query_first(name: String, case_sensitive: bool) -> Option<Item> { ... } | 449 | fn query_first(name: String, case_sensitive: bool) -> Option<Item> { ... } |
450 | ``` | 450 | ``` |
451 | 451 | ||
452 | ## Prefer Separate Functions Over Parameters | ||
453 | |||
454 | If a function has a `bool` or an `Option` parameter, and it is always called with `true`, `false`, `Some` and `None` literals, split the function in two. | ||
455 | |||
456 | ```rust | ||
457 | // GOOD | ||
458 | fn caller_a() { | ||
459 | foo() | ||
460 | } | ||
461 | |||
462 | fn caller_b() { | ||
463 | foo_with_bar(Bar::new()) | ||
464 | } | ||
465 | |||
466 | fn foo() { ... } | ||
467 | fn foo_with_bar(bar: Bar) { ... } | ||
468 | |||
469 | // BAD | ||
470 | fn caller_a() { | ||
471 | foo(None) | ||
472 | } | ||
473 | |||
474 | fn caller_b() { | ||
475 | foo(Some(Bar::new())) | ||
476 | } | ||
477 | |||
478 | fn foo(bar: Option<Bar>) { ... } | ||
479 | ``` | ||
480 | |||
481 | **Rationale:** more often than not, such functions display "`false sharing`" -- they have additional `if` branching inside for two different cases. | ||
482 | Splitting the two different control flows into two functions simplifies each path, and remove cross-dependencies between the two paths. | ||
483 | If there's common code between `foo` and `foo_with_bar`, extract *that* into a common helper. | ||
484 | |||
452 | ## Avoid Monomorphization | 485 | ## Avoid Monomorphization |
453 | 486 | ||
454 | Avoid making a lot of code type parametric, *especially* on the boundaries between crates. | 487 | Avoid making a lot of code type parametric, *especially* on the boundaries between crates. |