aboutsummaryrefslogtreecommitdiff
path: root/crates/hir_expand
diff options
context:
space:
mode:
Diffstat (limited to 'crates/hir_expand')
-rw-r--r--crates/hir_expand/src/db.rs33
1 files changed, 19 insertions, 14 deletions
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs
index 1e4b0cc19..1389e30db 100644
--- a/crates/hir_expand/src/db.rs
+++ b/crates/hir_expand/src/db.rs
@@ -3,14 +3,14 @@
3use std::sync::Arc; 3use std::sync::Arc;
4 4
5use base_db::{salsa, SourceDatabase}; 5use base_db::{salsa, SourceDatabase};
6use mbe::{ExpandError, ExpandResult, MacroDef, MacroRules}; 6use mbe::{ExpandError, ExpandResult};
7use parser::FragmentKind; 7use parser::FragmentKind;
8use syntax::{ 8use syntax::{
9 algo::diff, 9 algo::diff,
10 ast::{MacroStmts, NameOwner}, 10 ast::{self, NameOwner},
11 AstNode, GreenNode, Parse, 11 AstNode, GreenNode, Parse,
12 SyntaxKind::*, 12 SyntaxKind::*,
13 SyntaxNode, 13 SyntaxNode, SyntaxToken,
14}; 14};
15 15
16use crate::{ 16use crate::{
@@ -27,15 +27,20 @@ const TOKEN_LIMIT: usize = 524288;
27 27
28#[derive(Debug, Clone, Eq, PartialEq)] 28#[derive(Debug, Clone, Eq, PartialEq)]
29pub enum TokenExpander { 29pub enum TokenExpander {
30 /// Old-style `macro_rules`.
30 MacroRules(mbe::MacroRules), 31 MacroRules(mbe::MacroRules),
32 /// AKA macros 2.0.
31 MacroDef(mbe::MacroDef), 33 MacroDef(mbe::MacroDef),
34 /// Stuff like `line!` and `file!`.
32 Builtin(BuiltinFnLikeExpander), 35 Builtin(BuiltinFnLikeExpander),
36 /// `derive(Copy)` and such.
33 BuiltinDerive(BuiltinDeriveExpander), 37 BuiltinDerive(BuiltinDeriveExpander),
38 /// The thing we love the most here in rust-analyzer -- procedural macros.
34 ProcMacro(ProcMacroExpander), 39 ProcMacro(ProcMacroExpander),
35} 40}
36 41
37impl TokenExpander { 42impl TokenExpander {
38 pub fn expand( 43 fn expand(
39 &self, 44 &self,
40 db: &dyn AstDatabase, 45 db: &dyn AstDatabase,
41 id: LazyMacroId, 46 id: LazyMacroId,
@@ -56,7 +61,7 @@ impl TokenExpander {
56 } 61 }
57 } 62 }
58 63
59 pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { 64 pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
60 match self { 65 match self {
61 TokenExpander::MacroRules(it) => it.map_id_down(id), 66 TokenExpander::MacroRules(it) => it.map_id_down(id),
62 TokenExpander::MacroDef(it) => it.map_id_down(id), 67 TokenExpander::MacroDef(it) => it.map_id_down(id),
@@ -66,7 +71,7 @@ impl TokenExpander {
66 } 71 }
67 } 72 }
68 73
69 pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { 74 pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
70 match self { 75 match self {
71 TokenExpander::MacroRules(it) => it.map_id_up(id), 76 TokenExpander::MacroRules(it) => it.map_id_up(id),
72 TokenExpander::MacroDef(it) => it.map_id_up(id), 77 TokenExpander::MacroDef(it) => it.map_id_up(id),
@@ -115,9 +120,9 @@ pub trait AstDatabase: SourceDatabase {
115pub fn expand_hypothetical( 120pub fn expand_hypothetical(
116 db: &dyn AstDatabase, 121 db: &dyn AstDatabase,
117 actual_macro_call: MacroCallId, 122 actual_macro_call: MacroCallId,
118 hypothetical_args: &syntax::ast::TokenTree, 123 hypothetical_args: &ast::TokenTree,
119 token_to_map: syntax::SyntaxToken, 124 token_to_map: SyntaxToken,
120) -> Option<(SyntaxNode, syntax::SyntaxToken)> { 125) -> Option<(SyntaxNode, SyntaxToken)> {
121 let macro_file = MacroFile { macro_call_id: actual_macro_call }; 126 let macro_file = MacroFile { macro_call_id: actual_macro_call };
122 let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()); 127 let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax());
123 let range = 128 let range =
@@ -141,10 +146,10 @@ fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
141fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { 146fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> {
142 match id.kind { 147 match id.kind {
143 MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) { 148 MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) {
144 syntax::ast::Macro::MacroRules(macro_rules) => { 149 ast::Macro::MacroRules(macro_rules) => {
145 let arg = macro_rules.token_tree()?; 150 let arg = macro_rules.token_tree()?;
146 let (tt, tmap) = mbe::ast_to_token_tree(&arg); 151 let (tt, tmap) = mbe::ast_to_token_tree(&arg);
147 let rules = match MacroRules::parse(&tt) { 152 let rules = match mbe::MacroRules::parse(&tt) {
148 Ok(it) => it, 153 Ok(it) => it,
149 Err(err) => { 154 Err(err) => {
150 let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default(); 155 let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default();
@@ -154,10 +159,10 @@ fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<(TokenExpander,
154 }; 159 };
155 Some(Arc::new((TokenExpander::MacroRules(rules), tmap))) 160 Some(Arc::new((TokenExpander::MacroRules(rules), tmap)))
156 } 161 }
157 syntax::ast::Macro::MacroDef(macro_def) => { 162 ast::Macro::MacroDef(macro_def) => {
158 let arg = macro_def.body()?; 163 let arg = macro_def.body()?;
159 let (tt, tmap) = mbe::ast_to_token_tree(&arg); 164 let (tt, tmap) = mbe::ast_to_token_tree(&arg);
160 let rules = match MacroDef::parse(&tt) { 165 let rules = match mbe::MacroDef::parse(&tt) {
161 Ok(it) => it, 166 Ok(it) => it,
162 Err(err) => { 167 Err(err) => {
163 let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default(); 168 let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default();
@@ -403,7 +408,7 @@ fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool {
403 if diff(from, to).is_empty() { 408 if diff(from, to).is_empty() {
404 return true; 409 return true;
405 } 410 }
406 if let Some(stmts) = MacroStmts::cast(from.clone()) { 411 if let Some(stmts) = ast::MacroStmts::cast(from.clone()) {
407 if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) { 412 if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) {
408 return true; 413 return true;
409 } 414 }