diff options
Diffstat (limited to 'crates/hir_expand/src')
-rw-r--r-- | crates/hir_expand/src/builtin_macro.rs | 29 | ||||
-rw-r--r-- | crates/hir_expand/src/db.rs | 117 | ||||
-rw-r--r-- | crates/hir_expand/src/eager.rs | 4 | ||||
-rw-r--r-- | crates/hir_expand/src/hygiene.rs | 2 | ||||
-rw-r--r-- | crates/hir_expand/src/lib.rs | 2 | ||||
-rw-r--r-- | crates/hir_expand/src/name.rs | 15 | ||||
-rw-r--r-- | crates/hir_expand/src/quote.rs | 1 |
7 files changed, 120 insertions, 50 deletions
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs index 4d52904b9..a7d0f5b1f 100644 --- a/crates/hir_expand/src/builtin_macro.rs +++ b/crates/hir_expand/src/builtin_macro.rs | |||
@@ -1,10 +1,10 @@ | |||
1 | //! Builtin macro | 1 | //! Builtin macro |
2 | use crate::{ | 2 | use crate::{ |
3 | db::AstDatabase, name, quote, AstId, CrateId, EagerMacroId, LazyMacroId, MacroCallId, | 3 | db::AstDatabase, name, quote, AstId, CrateId, EagerMacroId, LazyMacroId, MacroCallId, |
4 | MacroDefId, MacroDefKind, TextSize, | 4 | MacroCallLoc, MacroDefId, MacroDefKind, TextSize, |
5 | }; | 5 | }; |
6 | 6 | ||
7 | use base_db::{AnchoredPath, FileId}; | 7 | use base_db::{AnchoredPath, Edition, FileId}; |
8 | use cfg::CfgExpr; | 8 | use cfg::CfgExpr; |
9 | use either::Either; | 9 | use either::Either; |
10 | use mbe::{parse_exprs_with_sep, parse_to_token_tree, ExpandResult}; | 10 | use mbe::{parse_exprs_with_sep, parse_to_token_tree, ExpandResult}; |
@@ -111,6 +111,8 @@ register_builtin! { | |||
111 | (llvm_asm, LlvmAsm) => asm_expand, | 111 | (llvm_asm, LlvmAsm) => asm_expand, |
112 | (asm, Asm) => asm_expand, | 112 | (asm, Asm) => asm_expand, |
113 | (cfg, Cfg) => cfg_expand, | 113 | (cfg, Cfg) => cfg_expand, |
114 | (core_panic, CorePanic) => panic_expand, | ||
115 | (std_panic, StdPanic) => panic_expand, | ||
114 | 116 | ||
115 | EAGER: | 117 | EAGER: |
116 | (compile_error, CompileError) => compile_error_expand, | 118 | (compile_error, CompileError) => compile_error_expand, |
@@ -284,6 +286,25 @@ fn cfg_expand( | |||
284 | ExpandResult::ok(expanded) | 286 | ExpandResult::ok(expanded) |
285 | } | 287 | } |
286 | 288 | ||
289 | fn panic_expand( | ||
290 | db: &dyn AstDatabase, | ||
291 | id: LazyMacroId, | ||
292 | tt: &tt::Subtree, | ||
293 | ) -> ExpandResult<tt::Subtree> { | ||
294 | let loc: MacroCallLoc = db.lookup_intern_macro(id); | ||
295 | // Expand to a macro call `$crate::panic::panic_{edition}` | ||
296 | let krate = tt::Ident { text: "$crate".into(), id: tt::TokenId::unspecified() }; | ||
297 | let mut call = if db.crate_graph()[loc.krate].edition == Edition::Edition2021 { | ||
298 | quote!(#krate::panic::panic_2021!) | ||
299 | } else { | ||
300 | quote!(#krate::panic::panic_2015!) | ||
301 | }; | ||
302 | |||
303 | // Pass the original arguments | ||
304 | call.token_trees.push(tt::TokenTree::Subtree(tt.clone())); | ||
305 | ExpandResult::ok(call) | ||
306 | } | ||
307 | |||
287 | fn unquote_str(lit: &tt::Literal) -> Option<String> { | 308 | fn unquote_str(lit: &tt::Literal) -> Option<String> { |
288 | let lit = ast::make::tokens::literal(&lit.to_string()); | 309 | let lit = ast::make::tokens::literal(&lit.to_string()); |
289 | let token = ast::String::cast(lit)?; | 310 | let token = ast::String::cast(lit)?; |
@@ -469,7 +490,7 @@ fn env_expand( | |||
469 | // unnecessary diagnostics for eg. `CARGO_PKG_NAME`. | 490 | // unnecessary diagnostics for eg. `CARGO_PKG_NAME`. |
470 | if key == "OUT_DIR" { | 491 | if key == "OUT_DIR" { |
471 | err = Some(mbe::ExpandError::Other( | 492 | err = Some(mbe::ExpandError::Other( |
472 | r#"`OUT_DIR` not set, enable "load out dirs from check" to fix"#.into(), | 493 | r#"`OUT_DIR` not set, enable "run build scripts" to fix"#.into(), |
473 | )); | 494 | )); |
474 | } | 495 | } |
475 | 496 | ||
@@ -563,7 +584,7 @@ mod tests { | |||
563 | }; | 584 | }; |
564 | 585 | ||
565 | let args = macro_call.token_tree().unwrap(); | 586 | let args = macro_call.token_tree().unwrap(); |
566 | let parsed_args = mbe::ast_to_token_tree(&args).unwrap().0; | 587 | let parsed_args = mbe::ast_to_token_tree(&args).0; |
567 | let call_id = AstId::new(file_id.into(), ast_id_map.ast_id(¯o_call)); | 588 | let call_id = AstId::new(file_id.into(), ast_id_map.ast_id(¯o_call)); |
568 | 589 | ||
569 | let arg_id = db.intern_eager_expansion({ | 590 | let arg_id = db.intern_eager_expansion({ |
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index fc73e435b..10fe60821 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs | |||
@@ -3,9 +3,15 @@ | |||
3 | use std::sync::Arc; | 3 | use std::sync::Arc; |
4 | 4 | ||
5 | use base_db::{salsa, SourceDatabase}; | 5 | use base_db::{salsa, SourceDatabase}; |
6 | use mbe::{ExpandError, ExpandResult, MacroRules}; | 6 | use mbe::{ExpandError, ExpandResult, MacroDef, MacroRules}; |
7 | use parser::FragmentKind; | 7 | use parser::FragmentKind; |
8 | use syntax::{algo::diff, ast::NameOwner, AstNode, GreenNode, Parse, SyntaxKind::*, SyntaxNode}; | 8 | use syntax::{ |
9 | algo::diff, | ||
10 | ast::{MacroStmts, NameOwner}, | ||
11 | AstNode, GreenNode, Parse, | ||
12 | SyntaxKind::*, | ||
13 | SyntaxNode, | ||
14 | }; | ||
9 | 15 | ||
10 | use crate::{ | 16 | use crate::{ |
11 | ast_id_map::AstIdMap, hygiene::HygieneFrame, BuiltinDeriveExpander, BuiltinFnLikeExpander, | 17 | ast_id_map::AstIdMap, hygiene::HygieneFrame, BuiltinDeriveExpander, BuiltinFnLikeExpander, |
@@ -22,6 +28,7 @@ const TOKEN_LIMIT: usize = 524288; | |||
22 | #[derive(Debug, Clone, Eq, PartialEq)] | 28 | #[derive(Debug, Clone, Eq, PartialEq)] |
23 | pub enum TokenExpander { | 29 | pub enum TokenExpander { |
24 | MacroRules(mbe::MacroRules), | 30 | MacroRules(mbe::MacroRules), |
31 | MacroDef(mbe::MacroDef), | ||
25 | Builtin(BuiltinFnLikeExpander), | 32 | Builtin(BuiltinFnLikeExpander), |
26 | BuiltinDerive(BuiltinDeriveExpander), | 33 | BuiltinDerive(BuiltinDeriveExpander), |
27 | ProcMacro(ProcMacroExpander), | 34 | ProcMacro(ProcMacroExpander), |
@@ -36,6 +43,7 @@ impl TokenExpander { | |||
36 | ) -> mbe::ExpandResult<tt::Subtree> { | 43 | ) -> mbe::ExpandResult<tt::Subtree> { |
37 | match self { | 44 | match self { |
38 | TokenExpander::MacroRules(it) => it.expand(tt), | 45 | TokenExpander::MacroRules(it) => it.expand(tt), |
46 | TokenExpander::MacroDef(it) => it.expand(tt), | ||
39 | TokenExpander::Builtin(it) => it.expand(db, id, tt), | 47 | TokenExpander::Builtin(it) => it.expand(db, id, tt), |
40 | // FIXME switch these to ExpandResult as well | 48 | // FIXME switch these to ExpandResult as well |
41 | TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(), | 49 | TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(), |
@@ -51,6 +59,7 @@ impl TokenExpander { | |||
51 | pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { | 59 | pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { |
52 | match self { | 60 | match self { |
53 | TokenExpander::MacroRules(it) => it.map_id_down(id), | 61 | TokenExpander::MacroRules(it) => it.map_id_down(id), |
62 | TokenExpander::MacroDef(it) => it.map_id_down(id), | ||
54 | TokenExpander::Builtin(..) => id, | 63 | TokenExpander::Builtin(..) => id, |
55 | TokenExpander::BuiltinDerive(..) => id, | 64 | TokenExpander::BuiltinDerive(..) => id, |
56 | TokenExpander::ProcMacro(..) => id, | 65 | TokenExpander::ProcMacro(..) => id, |
@@ -60,6 +69,7 @@ impl TokenExpander { | |||
60 | pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { | 69 | pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { |
61 | match self { | 70 | match self { |
62 | TokenExpander::MacroRules(it) => it.map_id_up(id), | 71 | TokenExpander::MacroRules(it) => it.map_id_up(id), |
72 | TokenExpander::MacroDef(it) => it.map_id_up(id), | ||
63 | TokenExpander::Builtin(..) => (id, mbe::Origin::Call), | 73 | TokenExpander::Builtin(..) => (id, mbe::Origin::Call), |
64 | TokenExpander::BuiltinDerive(..) => (id, mbe::Origin::Call), | 74 | TokenExpander::BuiltinDerive(..) => (id, mbe::Origin::Call), |
65 | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), | 75 | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), |
@@ -109,7 +119,7 @@ pub fn expand_hypothetical( | |||
109 | token_to_map: syntax::SyntaxToken, | 119 | token_to_map: syntax::SyntaxToken, |
110 | ) -> Option<(SyntaxNode, syntax::SyntaxToken)> { | 120 | ) -> Option<(SyntaxNode, syntax::SyntaxToken)> { |
111 | let macro_file = MacroFile { macro_call_id: actual_macro_call }; | 121 | let macro_file = MacroFile { macro_call_id: actual_macro_call }; |
112 | let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()).unwrap(); | 122 | let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()); |
113 | let range = | 123 | let range = |
114 | token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; | 124 | token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; |
115 | let token_id = tmap_1.token_by_range(range)?; | 125 | let token_id = tmap_1.token_by_range(range)?; |
@@ -130,26 +140,34 @@ fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { | |||
130 | 140 | ||
131 | fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { | 141 | fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { |
132 | match id.kind { | 142 | match id.kind { |
133 | MacroDefKind::Declarative(ast_id) => { | 143 | MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { |
134 | let macro_rules = match ast_id.to_node(db) { | 144 | syntax::ast::Macro::MacroRules(macro_rules) => { |
135 | syntax::ast::Macro::MacroRules(mac) => mac, | 145 | let arg = macro_rules.token_tree()?; |
136 | syntax::ast::Macro::MacroDef(_) => return None, | 146 | let (tt, tmap) = mbe::ast_to_token_tree(&arg); |
137 | }; | 147 | let rules = match MacroRules::parse(&tt) { |
138 | let arg = macro_rules.token_tree()?; | 148 | Ok(it) => it, |
139 | let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| { | 149 | Err(err) => { |
140 | log::warn!("fail on macro_def to token tree: {:#?}", arg); | 150 | let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default(); |
141 | None | 151 | log::warn!("fail on macro_def parse ({}): {:?} {:#?}", name, err, tt); |
142 | })?; | 152 | return None; |
143 | let rules = match MacroRules::parse(&tt) { | 153 | } |
144 | Ok(it) => it, | 154 | }; |
145 | Err(err) => { | 155 | Some(Arc::new((TokenExpander::MacroRules(rules), tmap))) |
146 | let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default(); | 156 | } |
147 | log::warn!("fail on macro_def parse ({}): {:?} {:#?}", name, err, tt); | 157 | syntax::ast::Macro::MacroDef(macro_def) => { |
148 | return None; | 158 | let arg = macro_def.body()?; |
149 | } | 159 | let (tt, tmap) = mbe::ast_to_token_tree(&arg); |
150 | }; | 160 | let rules = match MacroDef::parse(&tt) { |
151 | Some(Arc::new((TokenExpander::MacroRules(rules), tmap))) | 161 | Ok(it) => it, |
152 | } | 162 | Err(err) => { |
163 | let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default(); | ||
164 | log::warn!("fail on macro_def parse ({}): {:?} {:#?}", name, err, tt); | ||
165 | return None; | ||
166 | } | ||
167 | }; | ||
168 | Some(Arc::new((TokenExpander::MacroDef(rules), tmap))) | ||
169 | } | ||
170 | }, | ||
153 | MacroDefKind::BuiltIn(expander, _) => { | 171 | MacroDefKind::BuiltIn(expander, _) => { |
154 | Some(Arc::new((TokenExpander::Builtin(expander), mbe::TokenMap::default()))) | 172 | Some(Arc::new((TokenExpander::Builtin(expander), mbe::TokenMap::default()))) |
155 | } | 173 | } |
@@ -178,7 +196,7 @@ fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { | |||
178 | 196 | ||
179 | fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { | 197 | fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { |
180 | let arg = db.macro_arg_text(id)?; | 198 | let arg = db.macro_arg_text(id)?; |
181 | let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg))?; | 199 | let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg)); |
182 | Some(Arc::new((tt, tmap))) | 200 | Some(Arc::new((tt, tmap))) |
183 | } | 201 | } |
184 | 202 | ||
@@ -340,13 +358,19 @@ fn parse_macro_with_arg( | |||
340 | None => return ExpandResult { value: None, err: result.err }, | 358 | None => return ExpandResult { value: None, err: result.err }, |
341 | }; | 359 | }; |
342 | 360 | ||
343 | log::debug!("expanded = {}", tt.as_debug_string()); | ||
344 | |||
345 | let fragment_kind = to_fragment_kind(db, macro_call_id); | 361 | let fragment_kind = to_fragment_kind(db, macro_call_id); |
346 | 362 | ||
363 | log::debug!("expanded = {}", tt.as_debug_string()); | ||
364 | log::debug!("kind = {:?}", fragment_kind); | ||
365 | |||
347 | let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) { | 366 | let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) { |
348 | Ok(it) => it, | 367 | Ok(it) => it, |
349 | Err(err) => { | 368 | Err(err) => { |
369 | log::debug!( | ||
370 | "failed to parse expanstion to {:?} = {}", | ||
371 | fragment_kind, | ||
372 | tt.as_debug_string() | ||
373 | ); | ||
350 | return ExpandResult::only_err(err); | 374 | return ExpandResult::only_err(err); |
351 | } | 375 | } |
352 | }; | 376 | }; |
@@ -362,15 +386,34 @@ fn parse_macro_with_arg( | |||
362 | return ExpandResult::only_err(err); | 386 | return ExpandResult::only_err(err); |
363 | } | 387 | } |
364 | }; | 388 | }; |
365 | 389 | if is_self_replicating(&node, &call_node.value) { | |
366 | if !diff(&node, &call_node.value).is_empty() { | ||
367 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) } | ||
368 | } else { | ||
369 | return ExpandResult::only_err(err); | 390 | return ExpandResult::only_err(err); |
391 | } else { | ||
392 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) } | ||
393 | } | ||
394 | } | ||
395 | None => { | ||
396 | log::debug!("parse = {:?}", parse.syntax_node().kind()); | ||
397 | ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None } | ||
398 | } | ||
399 | } | ||
400 | } | ||
401 | |||
402 | fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool { | ||
403 | if diff(from, to).is_empty() { | ||
404 | return true; | ||
405 | } | ||
406 | if let Some(stmts) = MacroStmts::cast(from.clone()) { | ||
407 | if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) { | ||
408 | return true; | ||
409 | } | ||
410 | if let Some(expr) = stmts.expr() { | ||
411 | if diff(expr.syntax(), to).is_empty() { | ||
412 | return true; | ||
370 | } | 413 | } |
371 | } | 414 | } |
372 | None => ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None }, | ||
373 | } | 415 | } |
416 | false | ||
374 | } | 417 | } |
375 | 418 | ||
376 | fn hygiene_frame(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<HygieneFrame> { | 419 | fn hygiene_frame(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<HygieneFrame> { |
@@ -390,21 +433,15 @@ fn to_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind { | |||
390 | 433 | ||
391 | let parent = match syn.parent() { | 434 | let parent = match syn.parent() { |
392 | Some(it) => it, | 435 | Some(it) => it, |
393 | None => { | 436 | None => return FragmentKind::Statements, |
394 | // FIXME: | ||
395 | // If it is root, which means the parent HirFile | ||
396 | // MacroKindFile must be non-items | ||
397 | // return expr now. | ||
398 | return FragmentKind::Expr; | ||
399 | } | ||
400 | }; | 437 | }; |
401 | 438 | ||
402 | match parent.kind() { | 439 | match parent.kind() { |
403 | MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items, | 440 | MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items, |
404 | MACRO_STMTS => FragmentKind::Statement, | 441 | MACRO_STMTS => FragmentKind::Statements, |
405 | ITEM_LIST => FragmentKind::Items, | 442 | ITEM_LIST => FragmentKind::Items, |
406 | LET_STMT => { | 443 | LET_STMT => { |
407 | // FIXME: Handle Pattern | 444 | // FIXME: Handle LHS Pattern |
408 | FragmentKind::Expr | 445 | FragmentKind::Expr |
409 | } | 446 | } |
410 | EXPR_STMT => FragmentKind::Statements, | 447 | EXPR_STMT => FragmentKind::Statements, |
diff --git a/crates/hir_expand/src/eager.rs b/crates/hir_expand/src/eager.rs index 9eedc8461..9705526fa 100644 --- a/crates/hir_expand/src/eager.rs +++ b/crates/hir_expand/src/eager.rs | |||
@@ -106,7 +106,7 @@ pub fn expand_eager_macro( | |||
106 | mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError), | 106 | mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError), |
107 | ) -> Result<EagerMacroId, ErrorEmitted> { | 107 | ) -> Result<EagerMacroId, ErrorEmitted> { |
108 | let parsed_args = diagnostic_sink.option_with( | 108 | let parsed_args = diagnostic_sink.option_with( |
109 | || Some(mbe::ast_to_token_tree(¯o_call.value.token_tree()?)?.0), | 109 | || Some(mbe::ast_to_token_tree(¯o_call.value.token_tree()?).0), |
110 | || err("malformed macro invocation"), | 110 | || err("malformed macro invocation"), |
111 | )?; | 111 | )?; |
112 | 112 | ||
@@ -161,7 +161,7 @@ pub fn expand_eager_macro( | |||
161 | } | 161 | } |
162 | 162 | ||
163 | fn to_subtree(node: &SyntaxNode) -> Option<tt::Subtree> { | 163 | fn to_subtree(node: &SyntaxNode) -> Option<tt::Subtree> { |
164 | let mut subtree = mbe::syntax_node_to_token_tree(node)?.0; | 164 | let mut subtree = mbe::syntax_node_to_token_tree(node).0; |
165 | subtree.delimiter = None; | 165 | subtree.delimiter = None; |
166 | Some(subtree) | 166 | Some(subtree) |
167 | } | 167 | } |
diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs index 0e0f7214a..779725629 100644 --- a/crates/hir_expand/src/hygiene.rs +++ b/crates/hir_expand/src/hygiene.rs | |||
@@ -148,7 +148,7 @@ fn make_hygiene_info( | |||
148 | let def_offset = loc.def.ast_id().left().and_then(|id| { | 148 | let def_offset = loc.def.ast_id().left().and_then(|id| { |
149 | let def_tt = match id.to_node(db) { | 149 | let def_tt = match id.to_node(db) { |
150 | ast::Macro::MacroRules(mac) => mac.token_tree()?.syntax().text_range().start(), | 150 | ast::Macro::MacroRules(mac) => mac.token_tree()?.syntax().text_range().start(), |
151 | ast::Macro::MacroDef(_) => return None, | 151 | ast::Macro::MacroDef(mac) => mac.body()?.syntax().text_range().start(), |
152 | }; | 152 | }; |
153 | Some(InFile::new(id.file_id, def_tt)) | 153 | Some(InFile::new(id.file_id, def_tt)) |
154 | }); | 154 | }); |
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index b8045fda9..3e332ee47 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs | |||
@@ -151,7 +151,7 @@ impl HirFileId { | |||
151 | let def = loc.def.ast_id().left().and_then(|id| { | 151 | let def = loc.def.ast_id().left().and_then(|id| { |
152 | let def_tt = match id.to_node(db) { | 152 | let def_tt = match id.to_node(db) { |
153 | ast::Macro::MacroRules(mac) => mac.token_tree()?, | 153 | ast::Macro::MacroRules(mac) => mac.token_tree()?, |
154 | ast::Macro::MacroDef(_) => return None, | 154 | ast::Macro::MacroDef(mac) => mac.body()?, |
155 | }; | 155 | }; |
156 | Some(InFile::new(id.file_id, def_tt)) | 156 | Some(InFile::new(id.file_id, def_tt)) |
157 | }); | 157 | }); |
diff --git a/crates/hir_expand/src/name.rs b/crates/hir_expand/src/name.rs index 0aeea48d5..a0f8766b0 100644 --- a/crates/hir_expand/src/name.rs +++ b/crates/hir_expand/src/name.rs | |||
@@ -55,6 +55,15 @@ impl Name { | |||
55 | } | 55 | } |
56 | } | 56 | } |
57 | 57 | ||
58 | /// A fake name for things missing in the source code. | ||
59 | /// | ||
60 | /// For example, `impl Foo for {}` should be treated as a trait impl for a | ||
61 | /// type with a missing name. Similarly, `struct S { : u32 }` should have a | ||
62 | /// single field with a missing name. | ||
63 | /// | ||
64 | /// Ideally, we want a `gensym` semantics for missing names -- each missing | ||
65 | /// name is equal only to itself. It's not clear how to implement this in | ||
66 | /// salsa though, so we punt on that bit for a moment. | ||
58 | pub fn missing() -> Name { | 67 | pub fn missing() -> Name { |
59 | Name::new_text("[missing name]".into()) | 68 | Name::new_text("[missing name]".into()) |
60 | } | 69 | } |
@@ -75,14 +84,14 @@ impl AsName for ast::NameRef { | |||
75 | fn as_name(&self) -> Name { | 84 | fn as_name(&self) -> Name { |
76 | match self.as_tuple_field() { | 85 | match self.as_tuple_field() { |
77 | Some(idx) => Name::new_tuple_field(idx), | 86 | Some(idx) => Name::new_tuple_field(idx), |
78 | None => Name::resolve(self.text()), | 87 | None => Name::resolve(&self.text()), |
79 | } | 88 | } |
80 | } | 89 | } |
81 | } | 90 | } |
82 | 91 | ||
83 | impl AsName for ast::Name { | 92 | impl AsName for ast::Name { |
84 | fn as_name(&self) -> Name { | 93 | fn as_name(&self) -> Name { |
85 | Name::resolve(self.text()) | 94 | Name::resolve(&self.text()) |
86 | } | 95 | } |
87 | } | 96 | } |
88 | 97 | ||
@@ -199,6 +208,8 @@ pub mod known { | |||
199 | line, | 208 | line, |
200 | module_path, | 209 | module_path, |
201 | assert, | 210 | assert, |
211 | core_panic, | ||
212 | std_panic, | ||
202 | stringify, | 213 | stringify, |
203 | concat, | 214 | concat, |
204 | include, | 215 | include, |
diff --git a/crates/hir_expand/src/quote.rs b/crates/hir_expand/src/quote.rs index 08bc5aa49..c82487ef0 100644 --- a/crates/hir_expand/src/quote.rs +++ b/crates/hir_expand/src/quote.rs | |||
@@ -104,6 +104,7 @@ macro_rules! __quote { | |||
104 | ( . ) => {$crate::__quote!(@PUNCT '.')}; | 104 | ( . ) => {$crate::__quote!(@PUNCT '.')}; |
105 | ( < ) => {$crate::__quote!(@PUNCT '<')}; | 105 | ( < ) => {$crate::__quote!(@PUNCT '<')}; |
106 | ( > ) => {$crate::__quote!(@PUNCT '>')}; | 106 | ( > ) => {$crate::__quote!(@PUNCT '>')}; |
107 | ( ! ) => {$crate::__quote!(@PUNCT '!')}; | ||
107 | 108 | ||
108 | ( $first:tt $($tail:tt)+ ) => { | 109 | ( $first:tt $($tail:tt)+ ) => { |
109 | { | 110 | { |