diff options
author | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2020-08-13 15:31:49 +0100 |
---|---|---|
committer | GitHub <[email protected]> | 2020-08-13 15:31:49 +0100 |
commit | e9926948ca267932ccc1341388bfd1b3fa88a001 (patch) | |
tree | cc4b797cb39a40b59e9e3d37178e8a1907f12358 /crates/hir_expand | |
parent | 902f74c2697cc2a50de9067845814a2a852fccfd (diff) | |
parent | 50f8c1ebf23f634b68529603a917e3feeda457fa (diff) |
Merge #5747
5747: Rename crate r=matklad a=matklad
bors r+
🤖
Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates/hir_expand')
-rw-r--r-- | crates/hir_expand/Cargo.toml | 23 | ||||
-rw-r--r-- | crates/hir_expand/src/ast_id_map.rs | 119 | ||||
-rw-r--r-- | crates/hir_expand/src/builtin_derive.rs | 361 | ||||
-rw-r--r-- | crates/hir_expand/src/builtin_macro.rs | 649 | ||||
-rw-r--r-- | crates/hir_expand/src/db.rs | 403 | ||||
-rw-r--r-- | crates/hir_expand/src/diagnostics.rs | 95 | ||||
-rw-r--r-- | crates/hir_expand/src/eager.rs | 144 | ||||
-rw-r--r-- | crates/hir_expand/src/hygiene.rs | 66 | ||||
-rw-r--r-- | crates/hir_expand/src/lib.rs | 453 | ||||
-rw-r--r-- | crates/hir_expand/src/name.rs | 230 | ||||
-rw-r--r-- | crates/hir_expand/src/proc_macro.rs | 143 | ||||
-rw-r--r-- | crates/hir_expand/src/quote.rs | 282 | ||||
-rw-r--r-- | crates/hir_expand/src/test_db.rs | 49 |
13 files changed, 3017 insertions, 0 deletions
diff --git a/crates/hir_expand/Cargo.toml b/crates/hir_expand/Cargo.toml new file mode 100644 index 000000000..1c4699291 --- /dev/null +++ b/crates/hir_expand/Cargo.toml | |||
@@ -0,0 +1,23 @@ | |||
1 | [package] | ||
2 | name = "hir_expand" | ||
3 | version = "0.0.0" | ||
4 | license = "MIT OR Apache-2.0" | ||
5 | authors = ["rust-analyzer developers"] | ||
6 | edition = "2018" | ||
7 | |||
8 | [lib] | ||
9 | doctest = false | ||
10 | |||
11 | [dependencies] | ||
12 | log = "0.4.8" | ||
13 | either = "1.5.3" | ||
14 | rustc-hash = "1.0.0" | ||
15 | |||
16 | arena = { path = "../arena" } | ||
17 | base_db = { path = "../base_db" } | ||
18 | syntax = { path = "../syntax" } | ||
19 | parser = { path = "../parser" } | ||
20 | profile = { path = "../profile" } | ||
21 | tt = { path = "../tt" } | ||
22 | mbe = { path = "../mbe" } | ||
23 | test_utils = { path = "../test_utils"} | ||
diff --git a/crates/hir_expand/src/ast_id_map.rs b/crates/hir_expand/src/ast_id_map.rs new file mode 100644 index 000000000..f63629b30 --- /dev/null +++ b/crates/hir_expand/src/ast_id_map.rs | |||
@@ -0,0 +1,119 @@ | |||
1 | //! `AstIdMap` allows to create stable IDs for "large" syntax nodes like items | ||
2 | //! and macro calls. | ||
3 | //! | ||
4 | //! Specifically, it enumerates all items in a file and uses position of a an | ||
5 | //! item as an ID. That way, id's don't change unless the set of items itself | ||
6 | //! changes. | ||
7 | |||
8 | use std::{ | ||
9 | any::type_name, | ||
10 | fmt, | ||
11 | hash::{Hash, Hasher}, | ||
12 | marker::PhantomData, | ||
13 | }; | ||
14 | |||
15 | use arena::{Arena, Idx}; | ||
16 | use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; | ||
17 | |||
18 | /// `AstId` points to an AST node in a specific file. | ||
19 | pub struct FileAstId<N: AstNode> { | ||
20 | raw: ErasedFileAstId, | ||
21 | _ty: PhantomData<fn() -> N>, | ||
22 | } | ||
23 | |||
24 | impl<N: AstNode> Clone for FileAstId<N> { | ||
25 | fn clone(&self) -> FileAstId<N> { | ||
26 | *self | ||
27 | } | ||
28 | } | ||
29 | impl<N: AstNode> Copy for FileAstId<N> {} | ||
30 | |||
31 | impl<N: AstNode> PartialEq for FileAstId<N> { | ||
32 | fn eq(&self, other: &Self) -> bool { | ||
33 | self.raw == other.raw | ||
34 | } | ||
35 | } | ||
36 | impl<N: AstNode> Eq for FileAstId<N> {} | ||
37 | impl<N: AstNode> Hash for FileAstId<N> { | ||
38 | fn hash<H: Hasher>(&self, hasher: &mut H) { | ||
39 | self.raw.hash(hasher); | ||
40 | } | ||
41 | } | ||
42 | |||
43 | impl<N: AstNode> fmt::Debug for FileAstId<N> { | ||
44 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
45 | write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw()) | ||
46 | } | ||
47 | } | ||
48 | |||
49 | impl<N: AstNode> FileAstId<N> { | ||
50 | // Can't make this a From implementation because of coherence | ||
51 | pub fn upcast<M: AstNode>(self) -> FileAstId<M> | ||
52 | where | ||
53 | N: Into<M>, | ||
54 | { | ||
55 | FileAstId { raw: self.raw, _ty: PhantomData } | ||
56 | } | ||
57 | } | ||
58 | |||
59 | type ErasedFileAstId = Idx<SyntaxNodePtr>; | ||
60 | |||
61 | /// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back. | ||
62 | #[derive(Debug, PartialEq, Eq, Default)] | ||
63 | pub struct AstIdMap { | ||
64 | arena: Arena<SyntaxNodePtr>, | ||
65 | } | ||
66 | |||
67 | impl AstIdMap { | ||
68 | pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap { | ||
69 | assert!(node.parent().is_none()); | ||
70 | let mut res = AstIdMap { arena: Arena::default() }; | ||
71 | // By walking the tree in breadth-first order we make sure that parents | ||
72 | // get lower ids then children. That is, adding a new child does not | ||
73 | // change parent's id. This means that, say, adding a new function to a | ||
74 | // trait does not change ids of top-level items, which helps caching. | ||
75 | bfs(node, |it| { | ||
76 | if let Some(module_item) = ast::Item::cast(it) { | ||
77 | res.alloc(module_item.syntax()); | ||
78 | } | ||
79 | }); | ||
80 | res | ||
81 | } | ||
82 | |||
83 | pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> { | ||
84 | let raw = self.erased_ast_id(item.syntax()); | ||
85 | FileAstId { raw, _ty: PhantomData } | ||
86 | } | ||
87 | fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId { | ||
88 | let ptr = SyntaxNodePtr::new(item); | ||
89 | match self.arena.iter().find(|(_id, i)| **i == ptr) { | ||
90 | Some((it, _)) => it, | ||
91 | None => panic!( | ||
92 | "Can't find {:?} in AstIdMap:\n{:?}", | ||
93 | item, | ||
94 | self.arena.iter().map(|(_id, i)| i).collect::<Vec<_>>(), | ||
95 | ), | ||
96 | } | ||
97 | } | ||
98 | |||
99 | pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> { | ||
100 | self.arena[id.raw].clone().cast::<N>().unwrap() | ||
101 | } | ||
102 | |||
103 | fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId { | ||
104 | self.arena.alloc(SyntaxNodePtr::new(item)) | ||
105 | } | ||
106 | } | ||
107 | |||
108 | /// Walks the subtree in bfs order, calling `f` for each node. | ||
109 | fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) { | ||
110 | let mut curr_layer = vec![node.clone()]; | ||
111 | let mut next_layer = vec![]; | ||
112 | while !curr_layer.is_empty() { | ||
113 | curr_layer.drain(..).for_each(|node| { | ||
114 | next_layer.extend(node.children()); | ||
115 | f(node); | ||
116 | }); | ||
117 | std::mem::swap(&mut curr_layer, &mut next_layer); | ||
118 | } | ||
119 | } | ||
diff --git a/crates/hir_expand/src/builtin_derive.rs b/crates/hir_expand/src/builtin_derive.rs new file mode 100644 index 000000000..988a60d56 --- /dev/null +++ b/crates/hir_expand/src/builtin_derive.rs | |||
@@ -0,0 +1,361 @@ | |||
1 | //! Builtin derives. | ||
2 | |||
3 | use log::debug; | ||
4 | |||
5 | use parser::FragmentKind; | ||
6 | use syntax::{ | ||
7 | ast::{self, AstNode, GenericParamsOwner, ModuleItemOwner, NameOwner}, | ||
8 | match_ast, | ||
9 | }; | ||
10 | |||
11 | use crate::{db::AstDatabase, name, quote, LazyMacroId, MacroDefId, MacroDefKind}; | ||
12 | |||
13 | macro_rules! register_builtin { | ||
14 | ( $($trait:ident => $expand:ident),* ) => { | ||
15 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
16 | pub enum BuiltinDeriveExpander { | ||
17 | $($trait),* | ||
18 | } | ||
19 | |||
20 | impl BuiltinDeriveExpander { | ||
21 | pub fn expand( | ||
22 | &self, | ||
23 | db: &dyn AstDatabase, | ||
24 | id: LazyMacroId, | ||
25 | tt: &tt::Subtree, | ||
26 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
27 | let expander = match *self { | ||
28 | $( BuiltinDeriveExpander::$trait => $expand, )* | ||
29 | }; | ||
30 | expander(db, id, tt) | ||
31 | } | ||
32 | } | ||
33 | |||
34 | pub fn find_builtin_derive(ident: &name::Name) -> Option<MacroDefId> { | ||
35 | let kind = match ident { | ||
36 | $( id if id == &name::name![$trait] => BuiltinDeriveExpander::$trait, )* | ||
37 | _ => return None, | ||
38 | }; | ||
39 | |||
40 | Some(MacroDefId { krate: None, ast_id: None, kind: MacroDefKind::BuiltInDerive(kind), local_inner: false }) | ||
41 | } | ||
42 | }; | ||
43 | } | ||
44 | |||
45 | register_builtin! { | ||
46 | Copy => copy_expand, | ||
47 | Clone => clone_expand, | ||
48 | Default => default_expand, | ||
49 | Debug => debug_expand, | ||
50 | Hash => hash_expand, | ||
51 | Ord => ord_expand, | ||
52 | PartialOrd => partial_ord_expand, | ||
53 | Eq => eq_expand, | ||
54 | PartialEq => partial_eq_expand | ||
55 | } | ||
56 | |||
57 | struct BasicAdtInfo { | ||
58 | name: tt::Ident, | ||
59 | type_params: usize, | ||
60 | } | ||
61 | |||
62 | fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> { | ||
63 | let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, FragmentKind::Items)?; // FragmentKind::Items doesn't parse attrs? | ||
64 | let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| { | ||
65 | debug!("derive node didn't parse"); | ||
66 | mbe::ExpandError::UnexpectedToken | ||
67 | })?; | ||
68 | let item = macro_items.items().next().ok_or_else(|| { | ||
69 | debug!("no module item parsed"); | ||
70 | mbe::ExpandError::NoMatchingRule | ||
71 | })?; | ||
72 | let node = item.syntax(); | ||
73 | let (name, params) = match_ast! { | ||
74 | match node { | ||
75 | ast::Struct(it) => (it.name(), it.generic_param_list()), | ||
76 | ast::Enum(it) => (it.name(), it.generic_param_list()), | ||
77 | ast::Union(it) => (it.name(), it.generic_param_list()), | ||
78 | _ => { | ||
79 | debug!("unexpected node is {:?}", node); | ||
80 | return Err(mbe::ExpandError::ConversionError) | ||
81 | }, | ||
82 | } | ||
83 | }; | ||
84 | let name = name.ok_or_else(|| { | ||
85 | debug!("parsed item has no name"); | ||
86 | mbe::ExpandError::NoMatchingRule | ||
87 | })?; | ||
88 | let name_token_id = token_map.token_by_range(name.syntax().text_range()).ok_or_else(|| { | ||
89 | debug!("name token not found"); | ||
90 | mbe::ExpandError::ConversionError | ||
91 | })?; | ||
92 | let name_token = tt::Ident { id: name_token_id, text: name.text().clone() }; | ||
93 | let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count()); | ||
94 | Ok(BasicAdtInfo { name: name_token, type_params }) | ||
95 | } | ||
96 | |||
97 | fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> { | ||
98 | let mut result = Vec::<tt::TokenTree>::new(); | ||
99 | result.push( | ||
100 | tt::Leaf::Punct(tt::Punct { | ||
101 | char: '<', | ||
102 | spacing: tt::Spacing::Alone, | ||
103 | id: tt::TokenId::unspecified(), | ||
104 | }) | ||
105 | .into(), | ||
106 | ); | ||
107 | for i in 0..n { | ||
108 | if i > 0 { | ||
109 | result.push( | ||
110 | tt::Leaf::Punct(tt::Punct { | ||
111 | char: ',', | ||
112 | spacing: tt::Spacing::Alone, | ||
113 | id: tt::TokenId::unspecified(), | ||
114 | }) | ||
115 | .into(), | ||
116 | ); | ||
117 | } | ||
118 | result.push( | ||
119 | tt::Leaf::Ident(tt::Ident { | ||
120 | id: tt::TokenId::unspecified(), | ||
121 | text: format!("T{}", i).into(), | ||
122 | }) | ||
123 | .into(), | ||
124 | ); | ||
125 | result.extend(bound.iter().cloned()); | ||
126 | } | ||
127 | result.push( | ||
128 | tt::Leaf::Punct(tt::Punct { | ||
129 | char: '>', | ||
130 | spacing: tt::Spacing::Alone, | ||
131 | id: tt::TokenId::unspecified(), | ||
132 | }) | ||
133 | .into(), | ||
134 | ); | ||
135 | result | ||
136 | } | ||
137 | |||
138 | fn expand_simple_derive( | ||
139 | tt: &tt::Subtree, | ||
140 | trait_path: tt::Subtree, | ||
141 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
142 | let info = parse_adt(tt)?; | ||
143 | let name = info.name; | ||
144 | let trait_path_clone = trait_path.token_trees.clone(); | ||
145 | let bound = (quote! { : ##trait_path_clone }).token_trees; | ||
146 | let type_params = make_type_args(info.type_params, bound); | ||
147 | let type_args = make_type_args(info.type_params, Vec::new()); | ||
148 | let trait_path = trait_path.token_trees; | ||
149 | let expanded = quote! { | ||
150 | impl ##type_params ##trait_path for #name ##type_args {} | ||
151 | }; | ||
152 | Ok(expanded) | ||
153 | } | ||
154 | |||
155 | fn find_builtin_crate(db: &dyn AstDatabase, id: LazyMacroId) -> tt::TokenTree { | ||
156 | // FIXME: make hygiene works for builtin derive macro | ||
157 | // such that $crate can be used here. | ||
158 | let cg = db.crate_graph(); | ||
159 | let krate = db.lookup_intern_macro(id).krate; | ||
160 | |||
161 | // XXX | ||
162 | // All crates except core itself should have a dependency on core, | ||
163 | // We detect `core` by seeing whether it doesn't have such a dependency. | ||
164 | let tt = if cg[krate].dependencies.iter().any(|dep| &*dep.name == "core") { | ||
165 | quote! { core } | ||
166 | } else { | ||
167 | quote! { crate } | ||
168 | }; | ||
169 | |||
170 | tt.token_trees[0].clone() | ||
171 | } | ||
172 | |||
173 | fn copy_expand( | ||
174 | db: &dyn AstDatabase, | ||
175 | id: LazyMacroId, | ||
176 | tt: &tt::Subtree, | ||
177 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
178 | let krate = find_builtin_crate(db, id); | ||
179 | expand_simple_derive(tt, quote! { #krate::marker::Copy }) | ||
180 | } | ||
181 | |||
182 | fn clone_expand( | ||
183 | db: &dyn AstDatabase, | ||
184 | id: LazyMacroId, | ||
185 | tt: &tt::Subtree, | ||
186 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
187 | let krate = find_builtin_crate(db, id); | ||
188 | expand_simple_derive(tt, quote! { #krate::clone::Clone }) | ||
189 | } | ||
190 | |||
191 | fn default_expand( | ||
192 | db: &dyn AstDatabase, | ||
193 | id: LazyMacroId, | ||
194 | tt: &tt::Subtree, | ||
195 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
196 | let krate = find_builtin_crate(db, id); | ||
197 | expand_simple_derive(tt, quote! { #krate::default::Default }) | ||
198 | } | ||
199 | |||
200 | fn debug_expand( | ||
201 | db: &dyn AstDatabase, | ||
202 | id: LazyMacroId, | ||
203 | tt: &tt::Subtree, | ||
204 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
205 | let krate = find_builtin_crate(db, id); | ||
206 | expand_simple_derive(tt, quote! { #krate::fmt::Debug }) | ||
207 | } | ||
208 | |||
209 | fn hash_expand( | ||
210 | db: &dyn AstDatabase, | ||
211 | id: LazyMacroId, | ||
212 | tt: &tt::Subtree, | ||
213 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
214 | let krate = find_builtin_crate(db, id); | ||
215 | expand_simple_derive(tt, quote! { #krate::hash::Hash }) | ||
216 | } | ||
217 | |||
218 | fn eq_expand( | ||
219 | db: &dyn AstDatabase, | ||
220 | id: LazyMacroId, | ||
221 | tt: &tt::Subtree, | ||
222 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
223 | let krate = find_builtin_crate(db, id); | ||
224 | expand_simple_derive(tt, quote! { #krate::cmp::Eq }) | ||
225 | } | ||
226 | |||
227 | fn partial_eq_expand( | ||
228 | db: &dyn AstDatabase, | ||
229 | id: LazyMacroId, | ||
230 | tt: &tt::Subtree, | ||
231 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
232 | let krate = find_builtin_crate(db, id); | ||
233 | expand_simple_derive(tt, quote! { #krate::cmp::PartialEq }) | ||
234 | } | ||
235 | |||
236 | fn ord_expand( | ||
237 | db: &dyn AstDatabase, | ||
238 | id: LazyMacroId, | ||
239 | tt: &tt::Subtree, | ||
240 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
241 | let krate = find_builtin_crate(db, id); | ||
242 | expand_simple_derive(tt, quote! { #krate::cmp::Ord }) | ||
243 | } | ||
244 | |||
245 | fn partial_ord_expand( | ||
246 | db: &dyn AstDatabase, | ||
247 | id: LazyMacroId, | ||
248 | tt: &tt::Subtree, | ||
249 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
250 | let krate = find_builtin_crate(db, id); | ||
251 | expand_simple_derive(tt, quote! { #krate::cmp::PartialOrd }) | ||
252 | } | ||
253 | |||
254 | #[cfg(test)] | ||
255 | mod tests { | ||
256 | use base_db::{fixture::WithFixture, CrateId, SourceDatabase}; | ||
257 | use name::{known, Name}; | ||
258 | |||
259 | use crate::{test_db::TestDB, AstId, MacroCallId, MacroCallKind, MacroCallLoc}; | ||
260 | |||
261 | use super::*; | ||
262 | |||
263 | fn expand_builtin_derive(s: &str, name: Name) -> String { | ||
264 | let def = find_builtin_derive(&name).unwrap(); | ||
265 | let fixture = format!( | ||
266 | r#"//- /main.rs crate:main deps:core | ||
267 | <|> | ||
268 | {} | ||
269 | //- /lib.rs crate:core | ||
270 | // empty | ||
271 | "#, | ||
272 | s | ||
273 | ); | ||
274 | |||
275 | let (db, file_pos) = TestDB::with_position(&fixture); | ||
276 | let file_id = file_pos.file_id; | ||
277 | let parsed = db.parse(file_id); | ||
278 | let items: Vec<_> = | ||
279 | parsed.syntax_node().descendants().filter_map(ast::Item::cast).collect(); | ||
280 | |||
281 | let ast_id_map = db.ast_id_map(file_id.into()); | ||
282 | |||
283 | let attr_id = AstId::new(file_id.into(), ast_id_map.ast_id(&items[0])); | ||
284 | |||
285 | let loc = MacroCallLoc { | ||
286 | def, | ||
287 | krate: CrateId(0), | ||
288 | kind: MacroCallKind::Attr(attr_id, name.to_string()), | ||
289 | }; | ||
290 | |||
291 | let id: MacroCallId = db.intern_macro(loc).into(); | ||
292 | let parsed = db.parse_or_expand(id.as_file()).unwrap(); | ||
293 | |||
294 | // FIXME text() for syntax nodes parsed from token tree looks weird | ||
295 | // because there's no whitespace, see below | ||
296 | parsed.text().to_string() | ||
297 | } | ||
298 | |||
299 | #[test] | ||
300 | fn test_copy_expand_simple() { | ||
301 | let expanded = expand_builtin_derive( | ||
302 | r#" | ||
303 | #[derive(Copy)] | ||
304 | struct Foo; | ||
305 | "#, | ||
306 | known::Copy, | ||
307 | ); | ||
308 | |||
309 | assert_eq!(expanded, "impl< >core::marker::CopyforFoo< >{}"); | ||
310 | } | ||
311 | |||
312 | #[test] | ||
313 | fn test_copy_expand_with_type_params() { | ||
314 | let expanded = expand_builtin_derive( | ||
315 | r#" | ||
316 | #[derive(Copy)] | ||
317 | struct Foo<A, B>; | ||
318 | "#, | ||
319 | known::Copy, | ||
320 | ); | ||
321 | |||
322 | assert_eq!( | ||
323 | expanded, | ||
324 | "impl<T0:core::marker::Copy,T1:core::marker::Copy>core::marker::CopyforFoo<T0,T1>{}" | ||
325 | ); | ||
326 | } | ||
327 | |||
328 | #[test] | ||
329 | fn test_copy_expand_with_lifetimes() { | ||
330 | let expanded = expand_builtin_derive( | ||
331 | r#" | ||
332 | #[derive(Copy)] | ||
333 | struct Foo<A, B, 'a, 'b>; | ||
334 | "#, | ||
335 | known::Copy, | ||
336 | ); | ||
337 | |||
338 | // We currently just ignore lifetimes | ||
339 | |||
340 | assert_eq!( | ||
341 | expanded, | ||
342 | "impl<T0:core::marker::Copy,T1:core::marker::Copy>core::marker::CopyforFoo<T0,T1>{}" | ||
343 | ); | ||
344 | } | ||
345 | |||
346 | #[test] | ||
347 | fn test_clone_expand() { | ||
348 | let expanded = expand_builtin_derive( | ||
349 | r#" | ||
350 | #[derive(Clone)] | ||
351 | struct Foo<A, B>; | ||
352 | "#, | ||
353 | known::Clone, | ||
354 | ); | ||
355 | |||
356 | assert_eq!( | ||
357 | expanded, | ||
358 | "impl<T0:core::clone::Clone,T1:core::clone::Clone>core::clone::CloneforFoo<T0,T1>{}" | ||
359 | ); | ||
360 | } | ||
361 | } | ||
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs new file mode 100644 index 000000000..86918b626 --- /dev/null +++ b/crates/hir_expand/src/builtin_macro.rs | |||
@@ -0,0 +1,649 @@ | |||
1 | //! Builtin macro | ||
2 | use crate::{ | ||
3 | db::AstDatabase, name, quote, AstId, CrateId, EagerMacroId, LazyMacroId, MacroCallId, | ||
4 | MacroDefId, MacroDefKind, TextSize, | ||
5 | }; | ||
6 | |||
7 | use base_db::FileId; | ||
8 | use either::Either; | ||
9 | use mbe::parse_to_token_tree; | ||
10 | use parser::FragmentKind; | ||
11 | use syntax::ast::{self, AstToken, HasStringValue}; | ||
12 | |||
13 | macro_rules! register_builtin { | ||
14 | ( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => { | ||
15 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
16 | pub enum BuiltinFnLikeExpander { | ||
17 | $($kind),* | ||
18 | } | ||
19 | |||
20 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
21 | pub enum EagerExpander { | ||
22 | $($e_kind),* | ||
23 | } | ||
24 | |||
25 | impl BuiltinFnLikeExpander { | ||
26 | pub fn expand( | ||
27 | &self, | ||
28 | db: &dyn AstDatabase, | ||
29 | id: LazyMacroId, | ||
30 | tt: &tt::Subtree, | ||
31 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
32 | let expander = match *self { | ||
33 | $( BuiltinFnLikeExpander::$kind => $expand, )* | ||
34 | }; | ||
35 | expander(db, id, tt) | ||
36 | } | ||
37 | } | ||
38 | |||
39 | impl EagerExpander { | ||
40 | pub fn expand( | ||
41 | &self, | ||
42 | db: &dyn AstDatabase, | ||
43 | arg_id: EagerMacroId, | ||
44 | tt: &tt::Subtree, | ||
45 | ) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { | ||
46 | let expander = match *self { | ||
47 | $( EagerExpander::$e_kind => $e_expand, )* | ||
48 | }; | ||
49 | expander(db,arg_id,tt) | ||
50 | } | ||
51 | } | ||
52 | |||
53 | fn find_by_name(ident: &name::Name) -> Option<Either<BuiltinFnLikeExpander, EagerExpander>> { | ||
54 | match ident { | ||
55 | $( id if id == &name::name![$name] => Some(Either::Left(BuiltinFnLikeExpander::$kind)), )* | ||
56 | $( id if id == &name::name![$e_name] => Some(Either::Right(EagerExpander::$e_kind)), )* | ||
57 | _ => return None, | ||
58 | } | ||
59 | } | ||
60 | }; | ||
61 | } | ||
62 | |||
63 | pub fn find_builtin_macro( | ||
64 | ident: &name::Name, | ||
65 | krate: CrateId, | ||
66 | ast_id: AstId<ast::MacroCall>, | ||
67 | ) -> Option<MacroDefId> { | ||
68 | let kind = find_by_name(ident)?; | ||
69 | |||
70 | match kind { | ||
71 | Either::Left(kind) => Some(MacroDefId { | ||
72 | krate: Some(krate), | ||
73 | ast_id: Some(ast_id), | ||
74 | kind: MacroDefKind::BuiltIn(kind), | ||
75 | local_inner: false, | ||
76 | }), | ||
77 | Either::Right(kind) => Some(MacroDefId { | ||
78 | krate: Some(krate), | ||
79 | ast_id: Some(ast_id), | ||
80 | kind: MacroDefKind::BuiltInEager(kind), | ||
81 | local_inner: false, | ||
82 | }), | ||
83 | } | ||
84 | } | ||
85 | |||
86 | register_builtin! { | ||
87 | LAZY: | ||
88 | (column, Column) => column_expand, | ||
89 | (compile_error, CompileError) => compile_error_expand, | ||
90 | (file, File) => file_expand, | ||
91 | (line, Line) => line_expand, | ||
92 | (assert, Assert) => assert_expand, | ||
93 | (stringify, Stringify) => stringify_expand, | ||
94 | (format_args, FormatArgs) => format_args_expand, | ||
95 | // format_args_nl only differs in that it adds a newline in the end, | ||
96 | // so we use the same stub expansion for now | ||
97 | (format_args_nl, FormatArgsNl) => format_args_expand, | ||
98 | |||
99 | EAGER: | ||
100 | (concat, Concat) => concat_expand, | ||
101 | (include, Include) => include_expand, | ||
102 | (include_bytes, IncludeBytes) => include_bytes_expand, | ||
103 | (include_str, IncludeStr) => include_str_expand, | ||
104 | (env, Env) => env_expand, | ||
105 | (option_env, OptionEnv) => option_env_expand | ||
106 | } | ||
107 | |||
108 | fn line_expand( | ||
109 | _db: &dyn AstDatabase, | ||
110 | _id: LazyMacroId, | ||
111 | _tt: &tt::Subtree, | ||
112 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
113 | // dummy implementation for type-checking purposes | ||
114 | let line_num = 0; | ||
115 | let expanded = quote! { | ||
116 | #line_num | ||
117 | }; | ||
118 | |||
119 | Ok(expanded) | ||
120 | } | ||
121 | |||
122 | fn stringify_expand( | ||
123 | db: &dyn AstDatabase, | ||
124 | id: LazyMacroId, | ||
125 | _tt: &tt::Subtree, | ||
126 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
127 | let loc = db.lookup_intern_macro(id); | ||
128 | |||
129 | let macro_content = { | ||
130 | let arg = loc.kind.arg(db).ok_or_else(|| mbe::ExpandError::UnexpectedToken)?; | ||
131 | let macro_args = arg; | ||
132 | let text = macro_args.text(); | ||
133 | let without_parens = TextSize::of('(')..text.len() - TextSize::of(')'); | ||
134 | text.slice(without_parens).to_string() | ||
135 | }; | ||
136 | |||
137 | let expanded = quote! { | ||
138 | #macro_content | ||
139 | }; | ||
140 | |||
141 | Ok(expanded) | ||
142 | } | ||
143 | |||
144 | fn column_expand( | ||
145 | _db: &dyn AstDatabase, | ||
146 | _id: LazyMacroId, | ||
147 | _tt: &tt::Subtree, | ||
148 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
149 | // dummy implementation for type-checking purposes | ||
150 | let col_num = 0; | ||
151 | let expanded = quote! { | ||
152 | #col_num | ||
153 | }; | ||
154 | |||
155 | Ok(expanded) | ||
156 | } | ||
157 | |||
158 | fn assert_expand( | ||
159 | _db: &dyn AstDatabase, | ||
160 | _id: LazyMacroId, | ||
161 | tt: &tt::Subtree, | ||
162 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
163 | // A hacky implementation for goto def and hover | ||
164 | // We expand `assert!(cond, arg1, arg2)` to | ||
165 | // ``` | ||
166 | // {(cond, &(arg1), &(arg2));} | ||
167 | // ```, | ||
168 | // which is wrong but useful. | ||
169 | |||
170 | let mut args = Vec::new(); | ||
171 | let mut current = Vec::new(); | ||
172 | for tt in tt.token_trees.iter().cloned() { | ||
173 | match tt { | ||
174 | tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => { | ||
175 | args.push(current); | ||
176 | current = Vec::new(); | ||
177 | } | ||
178 | _ => { | ||
179 | current.push(tt); | ||
180 | } | ||
181 | } | ||
182 | } | ||
183 | if !current.is_empty() { | ||
184 | args.push(current); | ||
185 | } | ||
186 | |||
187 | let arg_tts = args.into_iter().flat_map(|arg| { | ||
188 | quote! { &(##arg), } | ||
189 | }.token_trees).collect::<Vec<_>>(); | ||
190 | |||
191 | let expanded = quote! { | ||
192 | { { (##arg_tts); } } | ||
193 | }; | ||
194 | Ok(expanded) | ||
195 | } | ||
196 | |||
197 | fn file_expand( | ||
198 | _db: &dyn AstDatabase, | ||
199 | _id: LazyMacroId, | ||
200 | _tt: &tt::Subtree, | ||
201 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
202 | // FIXME: RA purposefully lacks knowledge of absolute file names | ||
203 | // so just return "". | ||
204 | let file_name = ""; | ||
205 | |||
206 | let expanded = quote! { | ||
207 | #file_name | ||
208 | }; | ||
209 | |||
210 | Ok(expanded) | ||
211 | } | ||
212 | |||
213 | fn compile_error_expand( | ||
214 | _db: &dyn AstDatabase, | ||
215 | _id: LazyMacroId, | ||
216 | tt: &tt::Subtree, | ||
217 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
218 | if tt.count() == 1 { | ||
219 | if let tt::TokenTree::Leaf(tt::Leaf::Literal(it)) = &tt.token_trees[0] { | ||
220 | let s = it.text.as_str(); | ||
221 | if s.contains('"') { | ||
222 | return Ok(quote! { loop { #it }}); | ||
223 | } | ||
224 | }; | ||
225 | } | ||
226 | |||
227 | Err(mbe::ExpandError::BindingError("Must be a string".into())) | ||
228 | } | ||
229 | |||
230 | fn format_args_expand( | ||
231 | _db: &dyn AstDatabase, | ||
232 | _id: LazyMacroId, | ||
233 | tt: &tt::Subtree, | ||
234 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
235 | // We expand `format_args!("", a1, a2)` to | ||
236 | // ``` | ||
237 | // std::fmt::Arguments::new_v1(&[], &[ | ||
238 | // std::fmt::ArgumentV1::new(&arg1,std::fmt::Display::fmt), | ||
239 | // std::fmt::ArgumentV1::new(&arg2,std::fmt::Display::fmt), | ||
240 | // ]) | ||
241 | // ```, | ||
242 | // which is still not really correct, but close enough for now | ||
243 | let mut args = Vec::new(); | ||
244 | let mut current = Vec::new(); | ||
245 | for tt in tt.token_trees.iter().cloned() { | ||
246 | match tt { | ||
247 | tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => { | ||
248 | args.push(current); | ||
249 | current = Vec::new(); | ||
250 | } | ||
251 | _ => { | ||
252 | current.push(tt); | ||
253 | } | ||
254 | } | ||
255 | } | ||
256 | if !current.is_empty() { | ||
257 | args.push(current); | ||
258 | } | ||
259 | if args.is_empty() { | ||
260 | return Err(mbe::ExpandError::NoMatchingRule); | ||
261 | } | ||
262 | let _format_string = args.remove(0); | ||
263 | let arg_tts = args.into_iter().flat_map(|arg| { | ||
264 | quote! { std::fmt::ArgumentV1::new(&(##arg), std::fmt::Display::fmt), } | ||
265 | }.token_trees).collect::<Vec<_>>(); | ||
266 | let expanded = quote! { | ||
267 | std::fmt::Arguments::new_v1(&[], &[##arg_tts]) | ||
268 | }; | ||
269 | Ok(expanded) | ||
270 | } | ||
271 | |||
272 | fn unquote_str(lit: &tt::Literal) -> Option<String> { | ||
273 | let lit = ast::make::tokens::literal(&lit.to_string()); | ||
274 | let token = ast::String::cast(lit)?; | ||
275 | token.value().map(|it| it.into_owned()) | ||
276 | } | ||
277 | |||
278 | fn concat_expand( | ||
279 | _db: &dyn AstDatabase, | ||
280 | _arg_id: EagerMacroId, | ||
281 | tt: &tt::Subtree, | ||
282 | ) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { | ||
283 | let mut text = String::new(); | ||
284 | for (i, t) in tt.token_trees.iter().enumerate() { | ||
285 | match t { | ||
286 | tt::TokenTree::Leaf(tt::Leaf::Literal(it)) if i % 2 == 0 => { | ||
287 | text += &unquote_str(&it).ok_or_else(|| mbe::ExpandError::ConversionError)?; | ||
288 | } | ||
289 | tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), | ||
290 | _ => return Err(mbe::ExpandError::UnexpectedToken), | ||
291 | } | ||
292 | } | ||
293 | |||
294 | Ok((quote!(#text), FragmentKind::Expr)) | ||
295 | } | ||
296 | |||
297 | fn relative_file( | ||
298 | db: &dyn AstDatabase, | ||
299 | call_id: MacroCallId, | ||
300 | path: &str, | ||
301 | allow_recursion: bool, | ||
302 | ) -> Option<FileId> { | ||
303 | let call_site = call_id.as_file().original_file(db); | ||
304 | let res = db.resolve_path(call_site, path)?; | ||
305 | // Prevent include itself | ||
306 | if res == call_site && !allow_recursion { | ||
307 | None | ||
308 | } else { | ||
309 | Some(res) | ||
310 | } | ||
311 | } | ||
312 | |||
313 | fn parse_string(tt: &tt::Subtree) -> Result<String, mbe::ExpandError> { | ||
314 | tt.token_trees | ||
315 | .get(0) | ||
316 | .and_then(|tt| match tt { | ||
317 | tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(&it), | ||
318 | _ => None, | ||
319 | }) | ||
320 | .ok_or_else(|| mbe::ExpandError::ConversionError) | ||
321 | } | ||
322 | |||
323 | fn include_expand( | ||
324 | db: &dyn AstDatabase, | ||
325 | arg_id: EagerMacroId, | ||
326 | tt: &tt::Subtree, | ||
327 | ) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { | ||
328 | let path = parse_string(tt)?; | ||
329 | let file_id = relative_file(db, arg_id.into(), &path, false) | ||
330 | .ok_or_else(|| mbe::ExpandError::ConversionError)?; | ||
331 | |||
332 | // FIXME: | ||
333 | // Handle include as expression | ||
334 | let res = parse_to_token_tree(&db.file_text(file_id)) | ||
335 | .ok_or_else(|| mbe::ExpandError::ConversionError)? | ||
336 | .0; | ||
337 | |||
338 | Ok((res, FragmentKind::Items)) | ||
339 | } | ||
340 | |||
341 | fn include_bytes_expand( | ||
342 | _db: &dyn AstDatabase, | ||
343 | _arg_id: EagerMacroId, | ||
344 | tt: &tt::Subtree, | ||
345 | ) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { | ||
346 | let _path = parse_string(tt)?; | ||
347 | |||
348 | // FIXME: actually read the file here if the user asked for macro expansion | ||
349 | let res = tt::Subtree { | ||
350 | delimiter: None, | ||
351 | token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { | ||
352 | text: r#"b"""#.into(), | ||
353 | id: tt::TokenId::unspecified(), | ||
354 | }))], | ||
355 | }; | ||
356 | Ok((res, FragmentKind::Expr)) | ||
357 | } | ||
358 | |||
359 | fn include_str_expand( | ||
360 | db: &dyn AstDatabase, | ||
361 | arg_id: EagerMacroId, | ||
362 | tt: &tt::Subtree, | ||
363 | ) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { | ||
364 | let path = parse_string(tt)?; | ||
365 | |||
366 | // FIXME: we're not able to read excluded files (which is most of them because | ||
367 | // it's unusual to `include_str!` a Rust file), but we can return an empty string. | ||
368 | // Ideally, we'd be able to offer a precise expansion if the user asks for macro | ||
369 | // expansion. | ||
370 | let file_id = match relative_file(db, arg_id.into(), &path, true) { | ||
371 | Some(file_id) => file_id, | ||
372 | None => { | ||
373 | return Ok((quote!(""), FragmentKind::Expr)); | ||
374 | } | ||
375 | }; | ||
376 | |||
377 | let text = db.file_text(file_id); | ||
378 | let text = &*text; | ||
379 | |||
380 | Ok((quote!(#text), FragmentKind::Expr)) | ||
381 | } | ||
382 | |||
383 | fn get_env_inner(db: &dyn AstDatabase, arg_id: EagerMacroId, key: &str) -> Option<String> { | ||
384 | let krate = db.lookup_intern_eager_expansion(arg_id).krate; | ||
385 | db.crate_graph()[krate].env.get(key) | ||
386 | } | ||
387 | |||
388 | fn env_expand( | ||
389 | db: &dyn AstDatabase, | ||
390 | arg_id: EagerMacroId, | ||
391 | tt: &tt::Subtree, | ||
392 | ) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { | ||
393 | let key = parse_string(tt)?; | ||
394 | |||
395 | // FIXME: | ||
396 | // If the environment variable is not defined int rustc, then a compilation error will be emitted. | ||
397 | // We might do the same if we fully support all other stuffs. | ||
398 | // But for now on, we should return some dummy string for better type infer purpose. | ||
399 | // However, we cannot use an empty string here, because for | ||
400 | // `include!(concat!(env!("OUT_DIR"), "/foo.rs"))` will become | ||
401 | // `include!("foo.rs"), which might go to infinite loop | ||
402 | let s = get_env_inner(db, arg_id, &key).unwrap_or_else(|| "__RA_UNIMPLEMENTED__".to_string()); | ||
403 | let expanded = quote! { #s }; | ||
404 | |||
405 | Ok((expanded, FragmentKind::Expr)) | ||
406 | } | ||
407 | |||
408 | fn option_env_expand( | ||
409 | db: &dyn AstDatabase, | ||
410 | arg_id: EagerMacroId, | ||
411 | tt: &tt::Subtree, | ||
412 | ) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> { | ||
413 | let key = parse_string(tt)?; | ||
414 | let expanded = match get_env_inner(db, arg_id, &key) { | ||
415 | None => quote! { std::option::Option::None::<&str> }, | ||
416 | Some(s) => quote! { std::option::Some(#s) }, | ||
417 | }; | ||
418 | |||
419 | Ok((expanded, FragmentKind::Expr)) | ||
420 | } | ||
421 | |||
422 | #[cfg(test)] | ||
423 | mod tests { | ||
424 | use super::*; | ||
425 | use crate::{ | ||
426 | name::AsName, test_db::TestDB, AstNode, EagerCallLoc, MacroCallId, MacroCallKind, | ||
427 | MacroCallLoc, | ||
428 | }; | ||
429 | use base_db::{fixture::WithFixture, SourceDatabase}; | ||
430 | use std::sync::Arc; | ||
431 | use syntax::ast::NameOwner; | ||
432 | |||
433 | fn expand_builtin_macro(ra_fixture: &str) -> String { | ||
434 | let (db, file_id) = TestDB::with_single_file(&ra_fixture); | ||
435 | let parsed = db.parse(file_id); | ||
436 | let macro_calls: Vec<_> = | ||
437 | parsed.syntax_node().descendants().filter_map(ast::MacroCall::cast).collect(); | ||
438 | |||
439 | let ast_id_map = db.ast_id_map(file_id.into()); | ||
440 | |||
441 | let expander = find_by_name(¯o_calls[0].name().unwrap().as_name()).unwrap(); | ||
442 | |||
443 | let krate = CrateId(0); | ||
444 | let file_id = match expander { | ||
445 | Either::Left(expander) => { | ||
446 | // the first one should be a macro_rules | ||
447 | let def = MacroDefId { | ||
448 | krate: Some(CrateId(0)), | ||
449 | ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(¯o_calls[0]))), | ||
450 | kind: MacroDefKind::BuiltIn(expander), | ||
451 | local_inner: false, | ||
452 | }; | ||
453 | |||
454 | let loc = MacroCallLoc { | ||
455 | def, | ||
456 | krate, | ||
457 | kind: MacroCallKind::FnLike(AstId::new( | ||
458 | file_id.into(), | ||
459 | ast_id_map.ast_id(¯o_calls[1]), | ||
460 | )), | ||
461 | }; | ||
462 | |||
463 | let id: MacroCallId = db.intern_macro(loc).into(); | ||
464 | id.as_file() | ||
465 | } | ||
466 | Either::Right(expander) => { | ||
467 | // the first one should be a macro_rules | ||
468 | let def = MacroDefId { | ||
469 | krate: Some(krate), | ||
470 | ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(¯o_calls[0]))), | ||
471 | kind: MacroDefKind::BuiltInEager(expander), | ||
472 | local_inner: false, | ||
473 | }; | ||
474 | |||
475 | let args = macro_calls[1].token_tree().unwrap(); | ||
476 | let parsed_args = mbe::ast_to_token_tree(&args).unwrap().0; | ||
477 | |||
478 | let arg_id = db.intern_eager_expansion({ | ||
479 | EagerCallLoc { | ||
480 | def, | ||
481 | fragment: FragmentKind::Expr, | ||
482 | subtree: Arc::new(parsed_args.clone()), | ||
483 | krate, | ||
484 | file_id: file_id.into(), | ||
485 | } | ||
486 | }); | ||
487 | |||
488 | let (subtree, fragment) = expander.expand(&db, arg_id, &parsed_args).unwrap(); | ||
489 | let eager = EagerCallLoc { | ||
490 | def, | ||
491 | fragment, | ||
492 | subtree: Arc::new(subtree), | ||
493 | krate, | ||
494 | file_id: file_id.into(), | ||
495 | }; | ||
496 | |||
497 | let id: MacroCallId = db.intern_eager_expansion(eager).into(); | ||
498 | id.as_file() | ||
499 | } | ||
500 | }; | ||
501 | |||
502 | db.parse_or_expand(file_id).unwrap().to_string() | ||
503 | } | ||
504 | |||
505 | #[test] | ||
506 | fn test_column_expand() { | ||
507 | let expanded = expand_builtin_macro( | ||
508 | r#" | ||
509 | #[rustc_builtin_macro] | ||
510 | macro_rules! column {() => {}} | ||
511 | column!() | ||
512 | "#, | ||
513 | ); | ||
514 | |||
515 | assert_eq!(expanded, "0"); | ||
516 | } | ||
517 | |||
518 | #[test] | ||
519 | fn test_line_expand() { | ||
520 | let expanded = expand_builtin_macro( | ||
521 | r#" | ||
522 | #[rustc_builtin_macro] | ||
523 | macro_rules! line {() => {}} | ||
524 | line!() | ||
525 | "#, | ||
526 | ); | ||
527 | |||
528 | assert_eq!(expanded, "0"); | ||
529 | } | ||
530 | |||
531 | #[test] | ||
532 | fn test_stringify_expand() { | ||
533 | let expanded = expand_builtin_macro( | ||
534 | r#" | ||
535 | #[rustc_builtin_macro] | ||
536 | macro_rules! stringify {() => {}} | ||
537 | stringify!(a b c) | ||
538 | "#, | ||
539 | ); | ||
540 | |||
541 | assert_eq!(expanded, "\"a b c\""); | ||
542 | } | ||
543 | |||
544 | #[test] | ||
545 | fn test_env_expand() { | ||
546 | let expanded = expand_builtin_macro( | ||
547 | r#" | ||
548 | #[rustc_builtin_macro] | ||
549 | macro_rules! env {() => {}} | ||
550 | env!("TEST_ENV_VAR") | ||
551 | "#, | ||
552 | ); | ||
553 | |||
554 | assert_eq!(expanded, "\"__RA_UNIMPLEMENTED__\""); | ||
555 | } | ||
556 | |||
557 | #[test] | ||
558 | fn test_option_env_expand() { | ||
559 | let expanded = expand_builtin_macro( | ||
560 | r#" | ||
561 | #[rustc_builtin_macro] | ||
562 | macro_rules! option_env {() => {}} | ||
563 | option_env!("TEST_ENV_VAR") | ||
564 | "#, | ||
565 | ); | ||
566 | |||
567 | assert_eq!(expanded, "std::option::Option::None:: < &str>"); | ||
568 | } | ||
569 | |||
570 | #[test] | ||
571 | fn test_file_expand() { | ||
572 | let expanded = expand_builtin_macro( | ||
573 | r#" | ||
574 | #[rustc_builtin_macro] | ||
575 | macro_rules! file {() => {}} | ||
576 | file!() | ||
577 | "#, | ||
578 | ); | ||
579 | |||
580 | assert_eq!(expanded, "\"\""); | ||
581 | } | ||
582 | |||
583 | #[test] | ||
584 | fn test_assert_expand() { | ||
585 | let expanded = expand_builtin_macro( | ||
586 | r#" | ||
587 | #[rustc_builtin_macro] | ||
588 | macro_rules! assert { | ||
589 | ($cond:expr) => ({ /* compiler built-in */ }); | ||
590 | ($cond:expr, $($args:tt)*) => ({ /* compiler built-in */ }) | ||
591 | } | ||
592 | assert!(true, "{} {:?}", arg1(a, b, c), arg2); | ||
593 | "#, | ||
594 | ); | ||
595 | |||
596 | assert_eq!(expanded, "{{(&(true), &(\"{} {:?}\"), &(arg1(a,b,c)), &(arg2),);}}"); | ||
597 | } | ||
598 | |||
599 | #[test] | ||
600 | fn test_compile_error_expand() { | ||
601 | let expanded = expand_builtin_macro( | ||
602 | r#" | ||
603 | #[rustc_builtin_macro] | ||
604 | macro_rules! compile_error { | ||
605 | ($msg:expr) => ({ /* compiler built-in */ }); | ||
606 | ($msg:expr,) => ({ /* compiler built-in */ }) | ||
607 | } | ||
608 | compile_error!("error!"); | ||
609 | "#, | ||
610 | ); | ||
611 | |||
612 | assert_eq!(expanded, r#"loop{"error!"}"#); | ||
613 | } | ||
614 | |||
615 | #[test] | ||
616 | fn test_format_args_expand() { | ||
617 | let expanded = expand_builtin_macro( | ||
618 | r#" | ||
619 | #[rustc_builtin_macro] | ||
620 | macro_rules! format_args { | ||
621 | ($fmt:expr) => ({ /* compiler built-in */ }); | ||
622 | ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ }) | ||
623 | } | ||
624 | format_args!("{} {:?}", arg1(a, b, c), arg2); | ||
625 | "#, | ||
626 | ); | ||
627 | |||
628 | assert_eq!( | ||
629 | expanded, | ||
630 | r#"std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(arg1(a,b,c)),std::fmt::Display::fmt),std::fmt::ArgumentV1::new(&(arg2),std::fmt::Display::fmt),])"# | ||
631 | ); | ||
632 | } | ||
633 | |||
634 | #[test] | ||
635 | fn test_include_bytes_expand() { | ||
636 | let expanded = expand_builtin_macro( | ||
637 | r#" | ||
638 | #[rustc_builtin_macro] | ||
639 | macro_rules! include_bytes { | ||
640 | ($file:expr) => {{ /* compiler built-in */ }}; | ||
641 | ($file:expr,) => {{ /* compiler built-in */ }}; | ||
642 | } | ||
643 | include_bytes("foo"); | ||
644 | "#, | ||
645 | ); | ||
646 | |||
647 | assert_eq!(expanded, r#"b"""#); | ||
648 | } | ||
649 | } | ||
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs new file mode 100644 index 000000000..dcc038bcd --- /dev/null +++ b/crates/hir_expand/src/db.rs | |||
@@ -0,0 +1,403 @@ | |||
1 | //! Defines database & queries for macro expansion. | ||
2 | |||
3 | use std::sync::Arc; | ||
4 | |||
5 | use base_db::{salsa, SourceDatabase}; | ||
6 | use mbe::{ExpandResult, MacroRules}; | ||
7 | use parser::FragmentKind; | ||
8 | use syntax::{algo::diff, AstNode, GreenNode, Parse, SyntaxKind::*, SyntaxNode}; | ||
9 | |||
10 | use crate::{ | ||
11 | ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId, | ||
12 | HirFileId, HirFileIdRepr, LazyMacroId, MacroCallId, MacroCallLoc, MacroDefId, MacroDefKind, | ||
13 | MacroFile, ProcMacroExpander, | ||
14 | }; | ||
15 | |||
16 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
17 | pub enum TokenExpander { | ||
18 | MacroRules(mbe::MacroRules), | ||
19 | Builtin(BuiltinFnLikeExpander), | ||
20 | BuiltinDerive(BuiltinDeriveExpander), | ||
21 | ProcMacro(ProcMacroExpander), | ||
22 | } | ||
23 | |||
24 | impl TokenExpander { | ||
25 | pub fn expand( | ||
26 | &self, | ||
27 | db: &dyn AstDatabase, | ||
28 | id: LazyMacroId, | ||
29 | tt: &tt::Subtree, | ||
30 | ) -> mbe::ExpandResult<tt::Subtree> { | ||
31 | match self { | ||
32 | TokenExpander::MacroRules(it) => it.expand(tt), | ||
33 | // FIXME switch these to ExpandResult as well | ||
34 | TokenExpander::Builtin(it) => it.expand(db, id, tt).into(), | ||
35 | TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(), | ||
36 | TokenExpander::ProcMacro(_) => { | ||
37 | // We store the result in salsa db to prevent non-determinisc behavior in | ||
38 | // some proc-macro implementation | ||
39 | // See #4315 for details | ||
40 | db.expand_proc_macro(id.into()).into() | ||
41 | } | ||
42 | } | ||
43 | } | ||
44 | |||
45 | pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { | ||
46 | match self { | ||
47 | TokenExpander::MacroRules(it) => it.map_id_down(id), | ||
48 | TokenExpander::Builtin(..) => id, | ||
49 | TokenExpander::BuiltinDerive(..) => id, | ||
50 | TokenExpander::ProcMacro(..) => id, | ||
51 | } | ||
52 | } | ||
53 | |||
54 | pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { | ||
55 | match self { | ||
56 | TokenExpander::MacroRules(it) => it.map_id_up(id), | ||
57 | TokenExpander::Builtin(..) => (id, mbe::Origin::Call), | ||
58 | TokenExpander::BuiltinDerive(..) => (id, mbe::Origin::Call), | ||
59 | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), | ||
60 | } | ||
61 | } | ||
62 | } | ||
63 | |||
64 | // FIXME: rename to ExpandDatabase | ||
65 | #[salsa::query_group(AstDatabaseStorage)] | ||
66 | pub trait AstDatabase: SourceDatabase { | ||
67 | fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>; | ||
68 | |||
69 | #[salsa::transparent] | ||
70 | fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>; | ||
71 | |||
72 | #[salsa::interned] | ||
73 | fn intern_macro(&self, macro_call: MacroCallLoc) -> LazyMacroId; | ||
74 | fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>; | ||
75 | #[salsa::transparent] | ||
76 | fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>; | ||
77 | fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>; | ||
78 | fn parse_macro(&self, macro_file: MacroFile) | ||
79 | -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>; | ||
80 | fn macro_expand(&self, macro_call: MacroCallId) -> (Option<Arc<tt::Subtree>>, Option<String>); | ||
81 | |||
82 | #[salsa::interned] | ||
83 | fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; | ||
84 | |||
85 | fn expand_proc_macro(&self, call: MacroCallId) -> Result<tt::Subtree, mbe::ExpandError>; | ||
86 | } | ||
87 | |||
88 | /// This expands the given macro call, but with different arguments. This is | ||
89 | /// used for completion, where we want to see what 'would happen' if we insert a | ||
90 | /// token. The `token_to_map` mapped down into the expansion, with the mapped | ||
91 | /// token returned. | ||
92 | pub fn expand_hypothetical( | ||
93 | db: &dyn AstDatabase, | ||
94 | actual_macro_call: MacroCallId, | ||
95 | hypothetical_args: &syntax::ast::TokenTree, | ||
96 | token_to_map: syntax::SyntaxToken, | ||
97 | ) -> Option<(SyntaxNode, syntax::SyntaxToken)> { | ||
98 | let macro_file = MacroFile { macro_call_id: actual_macro_call }; | ||
99 | let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()).unwrap(); | ||
100 | let range = | ||
101 | token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?; | ||
102 | let token_id = tmap_1.token_by_range(range)?; | ||
103 | let macro_def = expander(db, actual_macro_call)?; | ||
104 | let (node, tmap_2) = | ||
105 | parse_macro_with_arg(db, macro_file, Some(std::sync::Arc::new((tt, tmap_1))))?; | ||
106 | let token_id = macro_def.0.map_id_down(token_id); | ||
107 | let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; | ||
108 | let token = syntax::algo::find_covering_element(&node.syntax_node(), range).into_token()?; | ||
109 | Some((node.syntax_node(), token)) | ||
110 | } | ||
111 | |||
112 | pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { | ||
113 | let map = | ||
114 | db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it)); | ||
115 | Arc::new(map) | ||
116 | } | ||
117 | |||
118 | pub(crate) fn macro_def( | ||
119 | db: &dyn AstDatabase, | ||
120 | id: MacroDefId, | ||
121 | ) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { | ||
122 | match id.kind { | ||
123 | MacroDefKind::Declarative => { | ||
124 | let macro_call = id.ast_id?.to_node(db); | ||
125 | let arg = macro_call.token_tree()?; | ||
126 | let (tt, tmap) = mbe::ast_to_token_tree(&arg).or_else(|| { | ||
127 | log::warn!("fail on macro_def to token tree: {:#?}", arg); | ||
128 | None | ||
129 | })?; | ||
130 | let rules = match MacroRules::parse(&tt) { | ||
131 | Ok(it) => it, | ||
132 | Err(err) => { | ||
133 | log::warn!("fail on macro_def parse: error: {:#?} {:#?}", err, tt); | ||
134 | return None; | ||
135 | } | ||
136 | }; | ||
137 | Some(Arc::new((TokenExpander::MacroRules(rules), tmap))) | ||
138 | } | ||
139 | MacroDefKind::BuiltIn(expander) => { | ||
140 | Some(Arc::new((TokenExpander::Builtin(expander), mbe::TokenMap::default()))) | ||
141 | } | ||
142 | MacroDefKind::BuiltInDerive(expander) => { | ||
143 | Some(Arc::new((TokenExpander::BuiltinDerive(expander), mbe::TokenMap::default()))) | ||
144 | } | ||
145 | MacroDefKind::BuiltInEager(_) => None, | ||
146 | MacroDefKind::CustomDerive(expander) => { | ||
147 | Some(Arc::new((TokenExpander::ProcMacro(expander), mbe::TokenMap::default()))) | ||
148 | } | ||
149 | } | ||
150 | } | ||
151 | |||
152 | pub(crate) fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> { | ||
153 | let id = match id { | ||
154 | MacroCallId::LazyMacro(id) => id, | ||
155 | MacroCallId::EagerMacro(_id) => { | ||
156 | // FIXME: support macro_arg for eager macro | ||
157 | return None; | ||
158 | } | ||
159 | }; | ||
160 | let loc = db.lookup_intern_macro(id); | ||
161 | let arg = loc.kind.arg(db)?; | ||
162 | Some(arg.green().clone()) | ||
163 | } | ||
164 | |||
165 | pub(crate) fn macro_arg( | ||
166 | db: &dyn AstDatabase, | ||
167 | id: MacroCallId, | ||
168 | ) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> { | ||
169 | let arg = db.macro_arg_text(id)?; | ||
170 | let (tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg))?; | ||
171 | Some(Arc::new((tt, tmap))) | ||
172 | } | ||
173 | |||
174 | pub(crate) fn macro_expand( | ||
175 | db: &dyn AstDatabase, | ||
176 | id: MacroCallId, | ||
177 | ) -> (Option<Arc<tt::Subtree>>, Option<String>) { | ||
178 | macro_expand_with_arg(db, id, None) | ||
179 | } | ||
180 | |||
181 | fn expander(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> { | ||
182 | let lazy_id = match id { | ||
183 | MacroCallId::LazyMacro(id) => id, | ||
184 | MacroCallId::EagerMacro(_id) => { | ||
185 | return None; | ||
186 | } | ||
187 | }; | ||
188 | |||
189 | let loc = db.lookup_intern_macro(lazy_id); | ||
190 | let macro_rules = db.macro_def(loc.def)?; | ||
191 | Some(macro_rules) | ||
192 | } | ||
193 | |||
194 | fn macro_expand_with_arg( | ||
195 | db: &dyn AstDatabase, | ||
196 | id: MacroCallId, | ||
197 | arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>, | ||
198 | ) -> (Option<Arc<tt::Subtree>>, Option<String>) { | ||
199 | let lazy_id = match id { | ||
200 | MacroCallId::LazyMacro(id) => id, | ||
201 | MacroCallId::EagerMacro(id) => { | ||
202 | if arg.is_some() { | ||
203 | return ( | ||
204 | None, | ||
205 | Some("hypothetical macro expansion not implemented for eager macro".to_owned()), | ||
206 | ); | ||
207 | } else { | ||
208 | return (Some(db.lookup_intern_eager_expansion(id).subtree), None); | ||
209 | } | ||
210 | } | ||
211 | }; | ||
212 | |||
213 | let loc = db.lookup_intern_macro(lazy_id); | ||
214 | let macro_arg = match arg.or_else(|| db.macro_arg(id)) { | ||
215 | Some(it) => it, | ||
216 | None => return (None, Some("Fail to args in to tt::TokenTree".into())), | ||
217 | }; | ||
218 | |||
219 | let macro_rules = match db.macro_def(loc.def) { | ||
220 | Some(it) => it, | ||
221 | None => return (None, Some("Fail to find macro definition".into())), | ||
222 | }; | ||
223 | let ExpandResult(tt, err) = macro_rules.0.expand(db, lazy_id, ¯o_arg.0); | ||
224 | // Set a hard limit for the expanded tt | ||
225 | let count = tt.count(); | ||
226 | if count > 65536 { | ||
227 | return (None, Some(format!("Total tokens count exceed limit : count = {}", count))); | ||
228 | } | ||
229 | (Some(Arc::new(tt)), err.map(|e| format!("{:?}", e))) | ||
230 | } | ||
231 | |||
232 | pub(crate) fn expand_proc_macro( | ||
233 | db: &dyn AstDatabase, | ||
234 | id: MacroCallId, | ||
235 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
236 | let lazy_id = match id { | ||
237 | MacroCallId::LazyMacro(id) => id, | ||
238 | MacroCallId::EagerMacro(_) => unreachable!(), | ||
239 | }; | ||
240 | |||
241 | let loc = db.lookup_intern_macro(lazy_id); | ||
242 | let macro_arg = match db.macro_arg(id) { | ||
243 | Some(it) => it, | ||
244 | None => { | ||
245 | return Err( | ||
246 | tt::ExpansionError::Unknown("No arguments for proc-macro".to_string()).into() | ||
247 | ) | ||
248 | } | ||
249 | }; | ||
250 | |||
251 | let expander = match loc.def.kind { | ||
252 | MacroDefKind::CustomDerive(expander) => expander, | ||
253 | _ => unreachable!(), | ||
254 | }; | ||
255 | |||
256 | expander.expand(db, lazy_id, ¯o_arg.0) | ||
257 | } | ||
258 | |||
259 | pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> { | ||
260 | match file_id.0 { | ||
261 | HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), | ||
262 | HirFileIdRepr::MacroFile(macro_file) => { | ||
263 | db.parse_macro(macro_file).map(|(it, _)| it.syntax_node()) | ||
264 | } | ||
265 | } | ||
266 | } | ||
267 | |||
268 | pub(crate) fn parse_macro( | ||
269 | db: &dyn AstDatabase, | ||
270 | macro_file: MacroFile, | ||
271 | ) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> { | ||
272 | parse_macro_with_arg(db, macro_file, None) | ||
273 | } | ||
274 | |||
275 | pub fn parse_macro_with_arg( | ||
276 | db: &dyn AstDatabase, | ||
277 | macro_file: MacroFile, | ||
278 | arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>, | ||
279 | ) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> { | ||
280 | let _p = profile::span("parse_macro_query"); | ||
281 | |||
282 | let macro_call_id = macro_file.macro_call_id; | ||
283 | let (tt, err) = if let Some(arg) = arg { | ||
284 | macro_expand_with_arg(db, macro_call_id, Some(arg)) | ||
285 | } else { | ||
286 | db.macro_expand(macro_call_id) | ||
287 | }; | ||
288 | if let Some(err) = &err { | ||
289 | // Note: | ||
290 | // The final goal we would like to make all parse_macro success, | ||
291 | // such that the following log will not call anyway. | ||
292 | match macro_call_id { | ||
293 | MacroCallId::LazyMacro(id) => { | ||
294 | let loc: MacroCallLoc = db.lookup_intern_macro(id); | ||
295 | let node = loc.kind.node(db); | ||
296 | |||
297 | // collect parent information for warning log | ||
298 | let parents = std::iter::successors(loc.kind.file_id().call_node(db), |it| { | ||
299 | it.file_id.call_node(db) | ||
300 | }) | ||
301 | .map(|n| format!("{:#}", n.value)) | ||
302 | .collect::<Vec<_>>() | ||
303 | .join("\n"); | ||
304 | |||
305 | log::warn!( | ||
306 | "fail on macro_parse: (reason: {} macro_call: {:#}) parents: {}", | ||
307 | err, | ||
308 | node.value, | ||
309 | parents | ||
310 | ); | ||
311 | } | ||
312 | _ => { | ||
313 | log::warn!("fail on macro_parse: (reason: {})", err); | ||
314 | } | ||
315 | } | ||
316 | }; | ||
317 | let tt = tt?; | ||
318 | |||
319 | let fragment_kind = to_fragment_kind(db, macro_call_id); | ||
320 | |||
321 | let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?; | ||
322 | |||
323 | if err.is_none() { | ||
324 | Some((parse, Arc::new(rev_token_map))) | ||
325 | } else { | ||
326 | // FIXME: | ||
327 | // In future, we should propagate the actual error with recovery information | ||
328 | // instead of ignore the error here. | ||
329 | |||
330 | // Safe check for recurisve identity macro | ||
331 | let node = parse.syntax_node(); | ||
332 | let file: HirFileId = macro_file.into(); | ||
333 | let call_node = file.call_node(db)?; | ||
334 | |||
335 | if !diff(&node, &call_node.value).is_empty() { | ||
336 | Some((parse, Arc::new(rev_token_map))) | ||
337 | } else { | ||
338 | None | ||
339 | } | ||
340 | } | ||
341 | } | ||
342 | |||
343 | /// Given a `MacroCallId`, return what `FragmentKind` it belongs to. | ||
344 | /// FIXME: Not completed | ||
345 | fn to_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind { | ||
346 | let lazy_id = match id { | ||
347 | MacroCallId::LazyMacro(id) => id, | ||
348 | MacroCallId::EagerMacro(id) => { | ||
349 | return db.lookup_intern_eager_expansion(id).fragment; | ||
350 | } | ||
351 | }; | ||
352 | let syn = db.lookup_intern_macro(lazy_id).kind.node(db).value; | ||
353 | |||
354 | let parent = match syn.parent() { | ||
355 | Some(it) => it, | ||
356 | None => { | ||
357 | // FIXME: | ||
358 | // If it is root, which means the parent HirFile | ||
359 | // MacroKindFile must be non-items | ||
360 | // return expr now. | ||
361 | return FragmentKind::Expr; | ||
362 | } | ||
363 | }; | ||
364 | |||
365 | match parent.kind() { | ||
366 | MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items, | ||
367 | ITEM_LIST => FragmentKind::Items, | ||
368 | LET_STMT => { | ||
369 | // FIXME: Handle Pattern | ||
370 | FragmentKind::Expr | ||
371 | } | ||
372 | // FIXME: Expand to statements in appropriate positions; HIR lowering needs to handle that | ||
373 | EXPR_STMT | BLOCK_EXPR => FragmentKind::Expr, | ||
374 | ARG_LIST => FragmentKind::Expr, | ||
375 | TRY_EXPR => FragmentKind::Expr, | ||
376 | TUPLE_EXPR => FragmentKind::Expr, | ||
377 | PAREN_EXPR => FragmentKind::Expr, | ||
378 | |||
379 | FOR_EXPR => FragmentKind::Expr, | ||
380 | PATH_EXPR => FragmentKind::Expr, | ||
381 | CLOSURE_EXPR => FragmentKind::Expr, | ||
382 | CONDITION => FragmentKind::Expr, | ||
383 | BREAK_EXPR => FragmentKind::Expr, | ||
384 | RETURN_EXPR => FragmentKind::Expr, | ||
385 | MATCH_EXPR => FragmentKind::Expr, | ||
386 | MATCH_ARM => FragmentKind::Expr, | ||
387 | MATCH_GUARD => FragmentKind::Expr, | ||
388 | RECORD_EXPR_FIELD => FragmentKind::Expr, | ||
389 | CALL_EXPR => FragmentKind::Expr, | ||
390 | INDEX_EXPR => FragmentKind::Expr, | ||
391 | METHOD_CALL_EXPR => FragmentKind::Expr, | ||
392 | AWAIT_EXPR => FragmentKind::Expr, | ||
393 | CAST_EXPR => FragmentKind::Expr, | ||
394 | REF_EXPR => FragmentKind::Expr, | ||
395 | PREFIX_EXPR => FragmentKind::Expr, | ||
396 | RANGE_EXPR => FragmentKind::Expr, | ||
397 | BIN_EXPR => FragmentKind::Expr, | ||
398 | _ => { | ||
399 | // Unknown , Just guess it is `Items` | ||
400 | FragmentKind::Items | ||
401 | } | ||
402 | } | ||
403 | } | ||
diff --git a/crates/hir_expand/src/diagnostics.rs b/crates/hir_expand/src/diagnostics.rs new file mode 100644 index 000000000..59d35debe --- /dev/null +++ b/crates/hir_expand/src/diagnostics.rs | |||
@@ -0,0 +1,95 @@ | |||
1 | //! Semantic errors and warnings. | ||
2 | //! | ||
3 | //! The `Diagnostic` trait defines a trait object which can represent any | ||
4 | //! diagnostic. | ||
5 | //! | ||
6 | //! `DiagnosticSink` struct is used as an emitter for diagnostic. When creating | ||
7 | //! a `DiagnosticSink`, you supply a callback which can react to a `dyn | ||
8 | //! Diagnostic` or to any concrete diagnostic (downcasting is sued internally). | ||
9 | //! | ||
10 | //! Because diagnostics store file offsets, it's a bad idea to store them | ||
11 | //! directly in salsa. For this reason, every hir subsytem defines it's own | ||
12 | //! strongly-typed closed set of diagnostics which use hir ids internally, are | ||
13 | //! stored in salsa and do *not* implement the `Diagnostic` trait. Instead, a | ||
14 | //! subsystem provides a separate, non-query-based API which can walk all stored | ||
15 | //! values and transform them into instances of `Diagnostic`. | ||
16 | |||
17 | use std::{any::Any, fmt}; | ||
18 | |||
19 | use syntax::SyntaxNodePtr; | ||
20 | |||
21 | use crate::InFile; | ||
22 | |||
23 | pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static { | ||
24 | fn message(&self) -> String; | ||
25 | /// Used in highlighting and related purposes | ||
26 | fn display_source(&self) -> InFile<SyntaxNodePtr>; | ||
27 | fn as_any(&self) -> &(dyn Any + Send + 'static); | ||
28 | fn is_experimental(&self) -> bool { | ||
29 | false | ||
30 | } | ||
31 | } | ||
32 | |||
33 | pub struct DiagnosticSink<'a> { | ||
34 | callbacks: Vec<Box<dyn FnMut(&dyn Diagnostic) -> Result<(), ()> + 'a>>, | ||
35 | filters: Vec<Box<dyn FnMut(&dyn Diagnostic) -> bool + 'a>>, | ||
36 | default_callback: Box<dyn FnMut(&dyn Diagnostic) + 'a>, | ||
37 | } | ||
38 | |||
39 | impl<'a> DiagnosticSink<'a> { | ||
40 | pub fn push(&mut self, d: impl Diagnostic) { | ||
41 | let d: &dyn Diagnostic = &d; | ||
42 | self._push(d); | ||
43 | } | ||
44 | |||
45 | fn _push(&mut self, d: &dyn Diagnostic) { | ||
46 | for filter in &mut self.filters { | ||
47 | if !filter(d) { | ||
48 | return; | ||
49 | } | ||
50 | } | ||
51 | for cb in &mut self.callbacks { | ||
52 | match cb(d) { | ||
53 | Ok(()) => return, | ||
54 | Err(()) => (), | ||
55 | } | ||
56 | } | ||
57 | (self.default_callback)(d) | ||
58 | } | ||
59 | } | ||
60 | |||
61 | pub struct DiagnosticSinkBuilder<'a> { | ||
62 | callbacks: Vec<Box<dyn FnMut(&dyn Diagnostic) -> Result<(), ()> + 'a>>, | ||
63 | filters: Vec<Box<dyn FnMut(&dyn Diagnostic) -> bool + 'a>>, | ||
64 | } | ||
65 | |||
66 | impl<'a> DiagnosticSinkBuilder<'a> { | ||
67 | pub fn new() -> Self { | ||
68 | Self { callbacks: Vec::new(), filters: Vec::new() } | ||
69 | } | ||
70 | |||
71 | pub fn filter<F: FnMut(&dyn Diagnostic) -> bool + 'a>(mut self, cb: F) -> Self { | ||
72 | self.filters.push(Box::new(cb)); | ||
73 | self | ||
74 | } | ||
75 | |||
76 | pub fn on<D: Diagnostic, F: FnMut(&D) + 'a>(mut self, mut cb: F) -> Self { | ||
77 | let cb = move |diag: &dyn Diagnostic| match diag.as_any().downcast_ref::<D>() { | ||
78 | Some(d) => { | ||
79 | cb(d); | ||
80 | Ok(()) | ||
81 | } | ||
82 | None => Err(()), | ||
83 | }; | ||
84 | self.callbacks.push(Box::new(cb)); | ||
85 | self | ||
86 | } | ||
87 | |||
88 | pub fn build<F: FnMut(&dyn Diagnostic) + 'a>(self, default_callback: F) -> DiagnosticSink<'a> { | ||
89 | DiagnosticSink { | ||
90 | callbacks: self.callbacks, | ||
91 | filters: self.filters, | ||
92 | default_callback: Box::new(default_callback), | ||
93 | } | ||
94 | } | ||
95 | } | ||
diff --git a/crates/hir_expand/src/eager.rs b/crates/hir_expand/src/eager.rs new file mode 100644 index 000000000..10c45646f --- /dev/null +++ b/crates/hir_expand/src/eager.rs | |||
@@ -0,0 +1,144 @@ | |||
1 | //! Eager expansion related utils | ||
2 | //! | ||
3 | //! Here is a dump of a discussion from Vadim Petrochenkov about Eager Expansion and | ||
4 | //! Its name resolution : | ||
5 | //! | ||
6 | //! > Eagerly expanded macros (and also macros eagerly expanded by eagerly expanded macros, | ||
7 | //! > which actually happens in practice too!) are resolved at the location of the "root" macro | ||
8 | //! > that performs the eager expansion on its arguments. | ||
9 | //! > If some name cannot be resolved at the eager expansion time it's considered unresolved, | ||
10 | //! > even if becomes available later (e.g. from a glob import or other macro). | ||
11 | //! | ||
12 | //! > Eagerly expanded macros don't add anything to the module structure of the crate and | ||
13 | //! > don't build any speculative module structures, i.e. they are expanded in a "flat" | ||
14 | //! > way even if tokens in them look like modules. | ||
15 | //! | ||
16 | //! > In other words, it kinda works for simple cases for which it was originally intended, | ||
17 | //! > and we need to live with it because it's available on stable and widely relied upon. | ||
18 | //! | ||
19 | //! | ||
20 | //! See the full discussion : https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros | ||
21 | |||
22 | use crate::{ | ||
23 | ast::{self, AstNode}, | ||
24 | db::AstDatabase, | ||
25 | EagerCallLoc, EagerMacroId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, | ||
26 | }; | ||
27 | |||
28 | use base_db::CrateId; | ||
29 | use parser::FragmentKind; | ||
30 | use std::sync::Arc; | ||
31 | use syntax::{algo::SyntaxRewriter, SyntaxNode}; | ||
32 | |||
33 | pub fn expand_eager_macro( | ||
34 | db: &dyn AstDatabase, | ||
35 | krate: CrateId, | ||
36 | macro_call: InFile<ast::MacroCall>, | ||
37 | def: MacroDefId, | ||
38 | resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>, | ||
39 | ) -> Option<EagerMacroId> { | ||
40 | let args = macro_call.value.token_tree()?; | ||
41 | let parsed_args = mbe::ast_to_token_tree(&args)?.0; | ||
42 | |||
43 | // Note: | ||
44 | // When `lazy_expand` is called, its *parent* file must be already exists. | ||
45 | // Here we store an eager macro id for the argument expanded subtree here | ||
46 | // for that purpose. | ||
47 | let arg_id = db.intern_eager_expansion({ | ||
48 | EagerCallLoc { | ||
49 | def, | ||
50 | fragment: FragmentKind::Expr, | ||
51 | subtree: Arc::new(parsed_args.clone()), | ||
52 | krate, | ||
53 | file_id: macro_call.file_id, | ||
54 | } | ||
55 | }); | ||
56 | let arg_file_id: MacroCallId = arg_id.into(); | ||
57 | |||
58 | let parsed_args = mbe::token_tree_to_syntax_node(&parsed_args, FragmentKind::Expr).ok()?.0; | ||
59 | let result = eager_macro_recur( | ||
60 | db, | ||
61 | InFile::new(arg_file_id.as_file(), parsed_args.syntax_node()), | ||
62 | krate, | ||
63 | resolver, | ||
64 | )?; | ||
65 | let subtree = to_subtree(&result)?; | ||
66 | |||
67 | if let MacroDefKind::BuiltInEager(eager) = def.kind { | ||
68 | let (subtree, fragment) = eager.expand(db, arg_id, &subtree).ok()?; | ||
69 | let eager = EagerCallLoc { | ||
70 | def, | ||
71 | fragment, | ||
72 | subtree: Arc::new(subtree), | ||
73 | krate, | ||
74 | file_id: macro_call.file_id, | ||
75 | }; | ||
76 | |||
77 | Some(db.intern_eager_expansion(eager)) | ||
78 | } else { | ||
79 | None | ||
80 | } | ||
81 | } | ||
82 | |||
83 | fn to_subtree(node: &SyntaxNode) -> Option<tt::Subtree> { | ||
84 | let mut subtree = mbe::syntax_node_to_token_tree(node)?.0; | ||
85 | subtree.delimiter = None; | ||
86 | Some(subtree) | ||
87 | } | ||
88 | |||
89 | fn lazy_expand( | ||
90 | db: &dyn AstDatabase, | ||
91 | def: &MacroDefId, | ||
92 | macro_call: InFile<ast::MacroCall>, | ||
93 | krate: CrateId, | ||
94 | ) -> Option<InFile<SyntaxNode>> { | ||
95 | let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); | ||
96 | |||
97 | let id: MacroCallId = | ||
98 | def.as_lazy_macro(db, krate, MacroCallKind::FnLike(macro_call.with_value(ast_id))).into(); | ||
99 | |||
100 | db.parse_or_expand(id.as_file()).map(|node| InFile::new(id.as_file(), node)) | ||
101 | } | ||
102 | |||
103 | fn eager_macro_recur( | ||
104 | db: &dyn AstDatabase, | ||
105 | curr: InFile<SyntaxNode>, | ||
106 | krate: CrateId, | ||
107 | macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>, | ||
108 | ) -> Option<SyntaxNode> { | ||
109 | let original = curr.value.clone(); | ||
110 | |||
111 | let children = curr.value.descendants().filter_map(ast::MacroCall::cast); | ||
112 | let mut rewriter = SyntaxRewriter::default(); | ||
113 | |||
114 | // Collect replacement | ||
115 | for child in children { | ||
116 | let def: MacroDefId = macro_resolver(child.path()?)?; | ||
117 | let insert = match def.kind { | ||
118 | MacroDefKind::BuiltInEager(_) => { | ||
119 | let id: MacroCallId = expand_eager_macro( | ||
120 | db, | ||
121 | krate, | ||
122 | curr.with_value(child.clone()), | ||
123 | def, | ||
124 | macro_resolver, | ||
125 | )? | ||
126 | .into(); | ||
127 | db.parse_or_expand(id.as_file())? | ||
128 | } | ||
129 | MacroDefKind::Declarative | ||
130 | | MacroDefKind::BuiltIn(_) | ||
131 | | MacroDefKind::BuiltInDerive(_) | ||
132 | | MacroDefKind::CustomDerive(_) => { | ||
133 | let expanded = lazy_expand(db, &def, curr.with_value(child.clone()), krate)?; | ||
134 | // replace macro inside | ||
135 | eager_macro_recur(db, expanded, krate, macro_resolver)? | ||
136 | } | ||
137 | }; | ||
138 | |||
139 | rewriter.replace(child.syntax(), &insert); | ||
140 | } | ||
141 | |||
142 | let res = rewriter.rewrite(&original); | ||
143 | Some(res) | ||
144 | } | ||
diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs new file mode 100644 index 000000000..845e9cbc1 --- /dev/null +++ b/crates/hir_expand/src/hygiene.rs | |||
@@ -0,0 +1,66 @@ | |||
1 | //! This modules handles hygiene information. | ||
2 | //! | ||
3 | //! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at | ||
4 | //! this moment, this is horribly incomplete and handles only `$crate`. | ||
5 | use base_db::CrateId; | ||
6 | use either::Either; | ||
7 | use syntax::ast; | ||
8 | |||
9 | use crate::{ | ||
10 | db::AstDatabase, | ||
11 | name::{AsName, Name}, | ||
12 | HirFileId, HirFileIdRepr, MacroCallId, MacroDefKind, | ||
13 | }; | ||
14 | |||
15 | #[derive(Clone, Debug)] | ||
16 | pub struct Hygiene { | ||
17 | // This is what `$crate` expands to | ||
18 | def_crate: Option<CrateId>, | ||
19 | |||
20 | // Indicate this is a local inner macro | ||
21 | local_inner: bool, | ||
22 | } | ||
23 | |||
24 | impl Hygiene { | ||
25 | pub fn new(db: &dyn AstDatabase, file_id: HirFileId) -> Hygiene { | ||
26 | let (def_crate, local_inner) = match file_id.0 { | ||
27 | HirFileIdRepr::FileId(_) => (None, false), | ||
28 | HirFileIdRepr::MacroFile(macro_file) => match macro_file.macro_call_id { | ||
29 | MacroCallId::LazyMacro(id) => { | ||
30 | let loc = db.lookup_intern_macro(id); | ||
31 | match loc.def.kind { | ||
32 | MacroDefKind::Declarative => (loc.def.krate, loc.def.local_inner), | ||
33 | MacroDefKind::BuiltIn(_) => (None, false), | ||
34 | MacroDefKind::BuiltInDerive(_) => (None, false), | ||
35 | MacroDefKind::BuiltInEager(_) => (None, false), | ||
36 | MacroDefKind::CustomDerive(_) => (None, false), | ||
37 | } | ||
38 | } | ||
39 | MacroCallId::EagerMacro(_id) => (None, false), | ||
40 | }, | ||
41 | }; | ||
42 | Hygiene { def_crate, local_inner } | ||
43 | } | ||
44 | |||
45 | pub fn new_unhygienic() -> Hygiene { | ||
46 | Hygiene { def_crate: None, local_inner: false } | ||
47 | } | ||
48 | |||
49 | // FIXME: this should just return name | ||
50 | pub fn name_ref_to_name(&self, name_ref: ast::NameRef) -> Either<Name, CrateId> { | ||
51 | if let Some(def_crate) = self.def_crate { | ||
52 | if name_ref.text() == "$crate" { | ||
53 | return Either::Right(def_crate); | ||
54 | } | ||
55 | } | ||
56 | Either::Left(name_ref.as_name()) | ||
57 | } | ||
58 | |||
59 | pub fn local_inner_macros(&self) -> Option<CrateId> { | ||
60 | if self.local_inner { | ||
61 | self.def_crate | ||
62 | } else { | ||
63 | None | ||
64 | } | ||
65 | } | ||
66 | } | ||
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs new file mode 100644 index 000000000..2be15e841 --- /dev/null +++ b/crates/hir_expand/src/lib.rs | |||
@@ -0,0 +1,453 @@ | |||
1 | //! `hir_expand` deals with macro expansion. | ||
2 | //! | ||
3 | //! Specifically, it implements a concept of `MacroFile` -- a file whose syntax | ||
4 | //! tree originates not from the text of some `FileId`, but from some macro | ||
5 | //! expansion. | ||
6 | |||
7 | pub mod db; | ||
8 | pub mod ast_id_map; | ||
9 | pub mod name; | ||
10 | pub mod hygiene; | ||
11 | pub mod diagnostics; | ||
12 | pub mod builtin_derive; | ||
13 | pub mod builtin_macro; | ||
14 | pub mod proc_macro; | ||
15 | pub mod quote; | ||
16 | pub mod eager; | ||
17 | |||
18 | use std::hash::Hash; | ||
19 | use std::sync::Arc; | ||
20 | |||
21 | use base_db::{impl_intern_key, salsa, CrateId, FileId}; | ||
22 | use syntax::{ | ||
23 | algo, | ||
24 | ast::{self, AstNode}, | ||
25 | SyntaxNode, SyntaxToken, TextSize, | ||
26 | }; | ||
27 | |||
28 | use crate::ast_id_map::FileAstId; | ||
29 | use crate::builtin_derive::BuiltinDeriveExpander; | ||
30 | use crate::builtin_macro::{BuiltinFnLikeExpander, EagerExpander}; | ||
31 | use crate::proc_macro::ProcMacroExpander; | ||
32 | |||
33 | #[cfg(test)] | ||
34 | mod test_db; | ||
35 | |||
36 | /// Input to the analyzer is a set of files, where each file is identified by | ||
37 | /// `FileId` and contains source code. However, another source of source code in | ||
38 | /// Rust are macros: each macro can be thought of as producing a "temporary | ||
39 | /// file". To assign an id to such a file, we use the id of the macro call that | ||
40 | /// produced the file. So, a `HirFileId` is either a `FileId` (source code | ||
41 | /// written by user), or a `MacroCallId` (source code produced by macro). | ||
42 | /// | ||
43 | /// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file | ||
44 | /// containing the call plus the offset of the macro call in the file. Note that | ||
45 | /// this is a recursive definition! However, the size_of of `HirFileId` is | ||
46 | /// finite (because everything bottoms out at the real `FileId`) and small | ||
47 | /// (`MacroCallId` uses the location interning. You can check details here: | ||
48 | /// https://en.wikipedia.org/wiki/String_interning). | ||
49 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
50 | pub struct HirFileId(HirFileIdRepr); | ||
51 | |||
52 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
53 | enum HirFileIdRepr { | ||
54 | FileId(FileId), | ||
55 | MacroFile(MacroFile), | ||
56 | } | ||
57 | |||
58 | impl From<FileId> for HirFileId { | ||
59 | fn from(id: FileId) -> Self { | ||
60 | HirFileId(HirFileIdRepr::FileId(id)) | ||
61 | } | ||
62 | } | ||
63 | |||
64 | impl From<MacroFile> for HirFileId { | ||
65 | fn from(id: MacroFile) -> Self { | ||
66 | HirFileId(HirFileIdRepr::MacroFile(id)) | ||
67 | } | ||
68 | } | ||
69 | |||
70 | impl HirFileId { | ||
71 | /// For macro-expansion files, returns the file original source file the | ||
72 | /// expansion originated from. | ||
73 | pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId { | ||
74 | match self.0 { | ||
75 | HirFileIdRepr::FileId(file_id) => file_id, | ||
76 | HirFileIdRepr::MacroFile(macro_file) => { | ||
77 | let file_id = match macro_file.macro_call_id { | ||
78 | MacroCallId::LazyMacro(id) => { | ||
79 | let loc = db.lookup_intern_macro(id); | ||
80 | loc.kind.file_id() | ||
81 | } | ||
82 | MacroCallId::EagerMacro(id) => { | ||
83 | let loc = db.lookup_intern_eager_expansion(id); | ||
84 | loc.file_id | ||
85 | } | ||
86 | }; | ||
87 | file_id.original_file(db) | ||
88 | } | ||
89 | } | ||
90 | } | ||
91 | |||
92 | pub fn expansion_level(self, db: &dyn db::AstDatabase) -> u32 { | ||
93 | let mut level = 0; | ||
94 | let mut curr = self; | ||
95 | while let HirFileIdRepr::MacroFile(macro_file) = curr.0 { | ||
96 | level += 1; | ||
97 | curr = match macro_file.macro_call_id { | ||
98 | MacroCallId::LazyMacro(id) => { | ||
99 | let loc = db.lookup_intern_macro(id); | ||
100 | loc.kind.file_id() | ||
101 | } | ||
102 | MacroCallId::EagerMacro(id) => { | ||
103 | let loc = db.lookup_intern_eager_expansion(id); | ||
104 | loc.file_id | ||
105 | } | ||
106 | }; | ||
107 | } | ||
108 | level | ||
109 | } | ||
110 | |||
111 | /// If this is a macro call, returns the syntax node of the call. | ||
112 | pub fn call_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> { | ||
113 | match self.0 { | ||
114 | HirFileIdRepr::FileId(_) => None, | ||
115 | HirFileIdRepr::MacroFile(macro_file) => { | ||
116 | let lazy_id = match macro_file.macro_call_id { | ||
117 | MacroCallId::LazyMacro(id) => id, | ||
118 | MacroCallId::EagerMacro(_id) => { | ||
119 | // FIXME: handle call node for eager macro | ||
120 | return None; | ||
121 | } | ||
122 | }; | ||
123 | let loc = db.lookup_intern_macro(lazy_id); | ||
124 | Some(loc.kind.node(db)) | ||
125 | } | ||
126 | } | ||
127 | } | ||
128 | |||
129 | /// Return expansion information if it is a macro-expansion file | ||
130 | pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option<ExpansionInfo> { | ||
131 | match self.0 { | ||
132 | HirFileIdRepr::FileId(_) => None, | ||
133 | HirFileIdRepr::MacroFile(macro_file) => { | ||
134 | let lazy_id = match macro_file.macro_call_id { | ||
135 | MacroCallId::LazyMacro(id) => id, | ||
136 | MacroCallId::EagerMacro(_id) => { | ||
137 | // FIXME: handle expansion_info for eager macro | ||
138 | return None; | ||
139 | } | ||
140 | }; | ||
141 | let loc: MacroCallLoc = db.lookup_intern_macro(lazy_id); | ||
142 | |||
143 | let arg_tt = loc.kind.arg(db)?; | ||
144 | let def_tt = loc.def.ast_id?.to_node(db).token_tree()?; | ||
145 | |||
146 | let macro_def = db.macro_def(loc.def)?; | ||
147 | let (parse, exp_map) = db.parse_macro(macro_file)?; | ||
148 | let macro_arg = db.macro_arg(macro_file.macro_call_id)?; | ||
149 | |||
150 | Some(ExpansionInfo { | ||
151 | expanded: InFile::new(self, parse.syntax_node()), | ||
152 | arg: InFile::new(loc.kind.file_id(), arg_tt), | ||
153 | def: InFile::new(loc.def.ast_id?.file_id, def_tt), | ||
154 | macro_arg, | ||
155 | macro_def, | ||
156 | exp_map, | ||
157 | }) | ||
158 | } | ||
159 | } | ||
160 | } | ||
161 | |||
162 | /// Indicate it is macro file generated for builtin derive | ||
163 | pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option<InFile<ast::Item>> { | ||
164 | match self.0 { | ||
165 | HirFileIdRepr::FileId(_) => None, | ||
166 | HirFileIdRepr::MacroFile(macro_file) => { | ||
167 | let lazy_id = match macro_file.macro_call_id { | ||
168 | MacroCallId::LazyMacro(id) => id, | ||
169 | MacroCallId::EagerMacro(_id) => { | ||
170 | return None; | ||
171 | } | ||
172 | }; | ||
173 | let loc: MacroCallLoc = db.lookup_intern_macro(lazy_id); | ||
174 | let item = match loc.def.kind { | ||
175 | MacroDefKind::BuiltInDerive(_) => loc.kind.node(db), | ||
176 | _ => return None, | ||
177 | }; | ||
178 | Some(item.with_value(ast::Item::cast(item.value.clone())?)) | ||
179 | } | ||
180 | } | ||
181 | } | ||
182 | } | ||
183 | |||
184 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
185 | pub struct MacroFile { | ||
186 | macro_call_id: MacroCallId, | ||
187 | } | ||
188 | |||
189 | /// `MacroCallId` identifies a particular macro invocation, like | ||
190 | /// `println!("Hello, {}", world)`. | ||
191 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
192 | pub enum MacroCallId { | ||
193 | LazyMacro(LazyMacroId), | ||
194 | EagerMacro(EagerMacroId), | ||
195 | } | ||
196 | |||
197 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
198 | pub struct LazyMacroId(salsa::InternId); | ||
199 | impl_intern_key!(LazyMacroId); | ||
200 | |||
201 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
202 | pub struct EagerMacroId(salsa::InternId); | ||
203 | impl_intern_key!(EagerMacroId); | ||
204 | |||
205 | impl From<LazyMacroId> for MacroCallId { | ||
206 | fn from(it: LazyMacroId) -> Self { | ||
207 | MacroCallId::LazyMacro(it) | ||
208 | } | ||
209 | } | ||
210 | impl From<EagerMacroId> for MacroCallId { | ||
211 | fn from(it: EagerMacroId) -> Self { | ||
212 | MacroCallId::EagerMacro(it) | ||
213 | } | ||
214 | } | ||
215 | |||
216 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
217 | pub struct MacroDefId { | ||
218 | // FIXME: krate and ast_id are currently optional because we don't have a | ||
219 | // definition location for built-in derives. There is one, though: the | ||
220 | // standard library defines them. The problem is that it uses the new | ||
221 | // `macro` syntax for this, which we don't support yet. As soon as we do | ||
222 | // (which will probably require touching this code), we can instead use | ||
223 | // that (and also remove the hacks for resolving built-in derives). | ||
224 | pub krate: Option<CrateId>, | ||
225 | pub ast_id: Option<AstId<ast::MacroCall>>, | ||
226 | pub kind: MacroDefKind, | ||
227 | |||
228 | pub local_inner: bool, | ||
229 | } | ||
230 | |||
231 | impl MacroDefId { | ||
232 | pub fn as_lazy_macro( | ||
233 | self, | ||
234 | db: &dyn db::AstDatabase, | ||
235 | krate: CrateId, | ||
236 | kind: MacroCallKind, | ||
237 | ) -> LazyMacroId { | ||
238 | db.intern_macro(MacroCallLoc { def: self, krate, kind }) | ||
239 | } | ||
240 | } | ||
241 | |||
242 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
243 | pub enum MacroDefKind { | ||
244 | Declarative, | ||
245 | BuiltIn(BuiltinFnLikeExpander), | ||
246 | // FIXME: maybe just Builtin and rename BuiltinFnLikeExpander to BuiltinExpander | ||
247 | BuiltInDerive(BuiltinDeriveExpander), | ||
248 | BuiltInEager(EagerExpander), | ||
249 | CustomDerive(ProcMacroExpander), | ||
250 | } | ||
251 | |||
252 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
253 | pub struct MacroCallLoc { | ||
254 | pub(crate) def: MacroDefId, | ||
255 | pub(crate) krate: CrateId, | ||
256 | pub(crate) kind: MacroCallKind, | ||
257 | } | ||
258 | |||
259 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
260 | pub enum MacroCallKind { | ||
261 | FnLike(AstId<ast::MacroCall>), | ||
262 | Attr(AstId<ast::Item>, String), | ||
263 | } | ||
264 | |||
265 | impl MacroCallKind { | ||
266 | fn file_id(&self) -> HirFileId { | ||
267 | match self { | ||
268 | MacroCallKind::FnLike(ast_id) => ast_id.file_id, | ||
269 | MacroCallKind::Attr(ast_id, _) => ast_id.file_id, | ||
270 | } | ||
271 | } | ||
272 | |||
273 | fn node(&self, db: &dyn db::AstDatabase) -> InFile<SyntaxNode> { | ||
274 | match self { | ||
275 | MacroCallKind::FnLike(ast_id) => ast_id.with_value(ast_id.to_node(db).syntax().clone()), | ||
276 | MacroCallKind::Attr(ast_id, _) => { | ||
277 | ast_id.with_value(ast_id.to_node(db).syntax().clone()) | ||
278 | } | ||
279 | } | ||
280 | } | ||
281 | |||
282 | fn arg(&self, db: &dyn db::AstDatabase) -> Option<SyntaxNode> { | ||
283 | match self { | ||
284 | MacroCallKind::FnLike(ast_id) => { | ||
285 | Some(ast_id.to_node(db).token_tree()?.syntax().clone()) | ||
286 | } | ||
287 | MacroCallKind::Attr(ast_id, _) => Some(ast_id.to_node(db).syntax().clone()), | ||
288 | } | ||
289 | } | ||
290 | } | ||
291 | |||
292 | impl MacroCallId { | ||
293 | pub fn as_file(self) -> HirFileId { | ||
294 | MacroFile { macro_call_id: self }.into() | ||
295 | } | ||
296 | } | ||
297 | |||
298 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
299 | pub struct EagerCallLoc { | ||
300 | pub(crate) def: MacroDefId, | ||
301 | pub(crate) fragment: FragmentKind, | ||
302 | pub(crate) subtree: Arc<tt::Subtree>, | ||
303 | pub(crate) krate: CrateId, | ||
304 | pub(crate) file_id: HirFileId, | ||
305 | } | ||
306 | |||
307 | /// ExpansionInfo mainly describes how to map text range between src and expanded macro | ||
308 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
309 | pub struct ExpansionInfo { | ||
310 | expanded: InFile<SyntaxNode>, | ||
311 | arg: InFile<SyntaxNode>, | ||
312 | def: InFile<ast::TokenTree>, | ||
313 | |||
314 | macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, | ||
315 | macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, | ||
316 | exp_map: Arc<mbe::TokenMap>, | ||
317 | } | ||
318 | |||
319 | pub use mbe::Origin; | ||
320 | use parser::FragmentKind; | ||
321 | |||
322 | impl ExpansionInfo { | ||
323 | pub fn call_node(&self) -> Option<InFile<SyntaxNode>> { | ||
324 | Some(self.arg.with_value(self.arg.value.parent()?)) | ||
325 | } | ||
326 | |||
327 | pub fn map_token_down(&self, token: InFile<&SyntaxToken>) -> Option<InFile<SyntaxToken>> { | ||
328 | assert_eq!(token.file_id, self.arg.file_id); | ||
329 | let range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?; | ||
330 | let token_id = self.macro_arg.1.token_by_range(range)?; | ||
331 | let token_id = self.macro_def.0.map_id_down(token_id); | ||
332 | |||
333 | let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?; | ||
334 | |||
335 | let token = algo::find_covering_element(&self.expanded.value, range).into_token()?; | ||
336 | |||
337 | Some(self.expanded.with_value(token)) | ||
338 | } | ||
339 | |||
340 | pub fn map_token_up( | ||
341 | &self, | ||
342 | token: InFile<&SyntaxToken>, | ||
343 | ) -> Option<(InFile<SyntaxToken>, Origin)> { | ||
344 | let token_id = self.exp_map.token_by_range(token.value.text_range())?; | ||
345 | |||
346 | let (token_id, origin) = self.macro_def.0.map_id_up(token_id); | ||
347 | let (token_map, tt) = match origin { | ||
348 | mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), | ||
349 | mbe::Origin::Def => { | ||
350 | (&self.macro_def.1, self.def.as_ref().map(|tt| tt.syntax().clone())) | ||
351 | } | ||
352 | }; | ||
353 | |||
354 | let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; | ||
355 | let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start()) | ||
356 | .into_token()?; | ||
357 | Some((tt.with_value(token), origin)) | ||
358 | } | ||
359 | } | ||
360 | |||
361 | /// `AstId` points to an AST node in any file. | ||
362 | /// | ||
363 | /// It is stable across reparses, and can be used as salsa key/value. | ||
364 | // FIXME: isn't this just a `Source<FileAstId<N>>` ? | ||
365 | pub type AstId<N> = InFile<FileAstId<N>>; | ||
366 | |||
367 | impl<N: AstNode> AstId<N> { | ||
368 | pub fn to_node(&self, db: &dyn db::AstDatabase) -> N { | ||
369 | let root = db.parse_or_expand(self.file_id).unwrap(); | ||
370 | db.ast_id_map(self.file_id).get(self.value).to_node(&root) | ||
371 | } | ||
372 | } | ||
373 | |||
374 | /// `InFile<T>` stores a value of `T` inside a particular file/syntax tree. | ||
375 | /// | ||
376 | /// Typical usages are: | ||
377 | /// | ||
378 | /// * `InFile<SyntaxNode>` -- syntax node in a file | ||
379 | /// * `InFile<ast::FnDef>` -- ast node in a file | ||
380 | /// * `InFile<TextSize>` -- offset in a file | ||
381 | #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] | ||
382 | pub struct InFile<T> { | ||
383 | pub file_id: HirFileId, | ||
384 | pub value: T, | ||
385 | } | ||
386 | |||
387 | impl<T> InFile<T> { | ||
388 | pub fn new(file_id: HirFileId, value: T) -> InFile<T> { | ||
389 | InFile { file_id, value } | ||
390 | } | ||
391 | |||
392 | // Similarly, naming here is stupid... | ||
393 | pub fn with_value<U>(&self, value: U) -> InFile<U> { | ||
394 | InFile::new(self.file_id, value) | ||
395 | } | ||
396 | |||
397 | pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFile<U> { | ||
398 | InFile::new(self.file_id, f(self.value)) | ||
399 | } | ||
400 | pub fn as_ref(&self) -> InFile<&T> { | ||
401 | self.with_value(&self.value) | ||
402 | } | ||
403 | pub fn file_syntax(&self, db: &dyn db::AstDatabase) -> SyntaxNode { | ||
404 | db.parse_or_expand(self.file_id).expect("source created from invalid file") | ||
405 | } | ||
406 | } | ||
407 | |||
408 | impl<T: Clone> InFile<&T> { | ||
409 | pub fn cloned(&self) -> InFile<T> { | ||
410 | self.with_value(self.value.clone()) | ||
411 | } | ||
412 | } | ||
413 | |||
414 | impl<T> InFile<Option<T>> { | ||
415 | pub fn transpose(self) -> Option<InFile<T>> { | ||
416 | let value = self.value?; | ||
417 | Some(InFile::new(self.file_id, value)) | ||
418 | } | ||
419 | } | ||
420 | |||
421 | impl InFile<SyntaxNode> { | ||
422 | pub fn ancestors_with_macros( | ||
423 | self, | ||
424 | db: &dyn db::AstDatabase, | ||
425 | ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ { | ||
426 | std::iter::successors(Some(self), move |node| match node.value.parent() { | ||
427 | Some(parent) => Some(node.with_value(parent)), | ||
428 | None => { | ||
429 | let parent_node = node.file_id.call_node(db)?; | ||
430 | Some(parent_node) | ||
431 | } | ||
432 | }) | ||
433 | } | ||
434 | } | ||
435 | |||
436 | impl InFile<SyntaxToken> { | ||
437 | pub fn ancestors_with_macros( | ||
438 | self, | ||
439 | db: &dyn db::AstDatabase, | ||
440 | ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ { | ||
441 | self.map(|it| it.parent()).ancestors_with_macros(db) | ||
442 | } | ||
443 | } | ||
444 | |||
445 | impl<N: AstNode> InFile<N> { | ||
446 | pub fn descendants<T: AstNode>(self) -> impl Iterator<Item = InFile<T>> { | ||
447 | self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n)) | ||
448 | } | ||
449 | |||
450 | pub fn syntax(&self) -> InFile<&SyntaxNode> { | ||
451 | self.with_value(self.value.syntax()) | ||
452 | } | ||
453 | } | ||
diff --git a/crates/hir_expand/src/name.rs b/crates/hir_expand/src/name.rs new file mode 100644 index 000000000..49841c7a1 --- /dev/null +++ b/crates/hir_expand/src/name.rs | |||
@@ -0,0 +1,230 @@ | |||
1 | //! FIXME: write short doc here | ||
2 | |||
3 | use std::fmt; | ||
4 | |||
5 | use syntax::{ast, SmolStr}; | ||
6 | |||
7 | /// `Name` is a wrapper around string, which is used in hir for both references | ||
8 | /// and declarations. In theory, names should also carry hygiene info, but we are | ||
9 | /// not there yet! | ||
10 | #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] | ||
11 | pub struct Name(Repr); | ||
12 | |||
13 | #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] | ||
14 | enum Repr { | ||
15 | Text(SmolStr), | ||
16 | TupleField(usize), | ||
17 | } | ||
18 | |||
19 | impl fmt::Display for Name { | ||
20 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
21 | match &self.0 { | ||
22 | Repr::Text(text) => fmt::Display::fmt(&text, f), | ||
23 | Repr::TupleField(idx) => fmt::Display::fmt(&idx, f), | ||
24 | } | ||
25 | } | ||
26 | } | ||
27 | |||
28 | impl Name { | ||
29 | /// Note: this is private to make creating name from random string hard. | ||
30 | /// Hopefully, this should allow us to integrate hygiene cleaner in the | ||
31 | /// future, and to switch to interned representation of names. | ||
32 | const fn new_text(text: SmolStr) -> Name { | ||
33 | Name(Repr::Text(text)) | ||
34 | } | ||
35 | |||
36 | pub fn new_tuple_field(idx: usize) -> Name { | ||
37 | Name(Repr::TupleField(idx)) | ||
38 | } | ||
39 | |||
40 | pub fn new_lifetime(lt: &syntax::SyntaxToken) -> Name { | ||
41 | assert!(lt.kind() == syntax::SyntaxKind::LIFETIME); | ||
42 | Name(Repr::Text(lt.text().clone())) | ||
43 | } | ||
44 | |||
45 | /// Shortcut to create inline plain text name | ||
46 | const fn new_inline_ascii(text: &[u8]) -> Name { | ||
47 | Name::new_text(SmolStr::new_inline_from_ascii(text.len(), text)) | ||
48 | } | ||
49 | |||
50 | /// Resolve a name from the text of token. | ||
51 | fn resolve(raw_text: &SmolStr) -> Name { | ||
52 | let raw_start = "r#"; | ||
53 | if raw_text.as_str().starts_with(raw_start) { | ||
54 | Name::new_text(SmolStr::new(&raw_text[raw_start.len()..])) | ||
55 | } else { | ||
56 | Name::new_text(raw_text.clone()) | ||
57 | } | ||
58 | } | ||
59 | |||
60 | pub fn missing() -> Name { | ||
61 | Name::new_text("[missing name]".into()) | ||
62 | } | ||
63 | |||
64 | pub fn as_tuple_index(&self) -> Option<usize> { | ||
65 | match self.0 { | ||
66 | Repr::TupleField(idx) => Some(idx), | ||
67 | _ => None, | ||
68 | } | ||
69 | } | ||
70 | } | ||
71 | |||
72 | pub trait AsName { | ||
73 | fn as_name(&self) -> Name; | ||
74 | } | ||
75 | |||
76 | impl AsName for ast::NameRef { | ||
77 | fn as_name(&self) -> Name { | ||
78 | match self.as_tuple_field() { | ||
79 | Some(idx) => Name::new_tuple_field(idx), | ||
80 | None => Name::resolve(self.text()), | ||
81 | } | ||
82 | } | ||
83 | } | ||
84 | |||
85 | impl AsName for ast::Name { | ||
86 | fn as_name(&self) -> Name { | ||
87 | Name::resolve(self.text()) | ||
88 | } | ||
89 | } | ||
90 | |||
91 | impl AsName for ast::NameOrNameRef { | ||
92 | fn as_name(&self) -> Name { | ||
93 | match self { | ||
94 | ast::NameOrNameRef::Name(it) => it.as_name(), | ||
95 | ast::NameOrNameRef::NameRef(it) => it.as_name(), | ||
96 | } | ||
97 | } | ||
98 | } | ||
99 | |||
100 | impl AsName for tt::Ident { | ||
101 | fn as_name(&self) -> Name { | ||
102 | Name::resolve(&self.text) | ||
103 | } | ||
104 | } | ||
105 | |||
106 | impl AsName for ast::FieldKind { | ||
107 | fn as_name(&self) -> Name { | ||
108 | match self { | ||
109 | ast::FieldKind::Name(nr) => nr.as_name(), | ||
110 | ast::FieldKind::Index(idx) => { | ||
111 | let idx = idx.text().parse::<usize>().unwrap_or(0); | ||
112 | Name::new_tuple_field(idx) | ||
113 | } | ||
114 | } | ||
115 | } | ||
116 | } | ||
117 | |||
118 | impl AsName for base_db::Dependency { | ||
119 | fn as_name(&self) -> Name { | ||
120 | Name::new_text(SmolStr::new(&*self.name)) | ||
121 | } | ||
122 | } | ||
123 | |||
124 | pub mod known { | ||
125 | macro_rules! known_names { | ||
126 | ($($ident:ident),* $(,)?) => { | ||
127 | $( | ||
128 | #[allow(bad_style)] | ||
129 | pub const $ident: super::Name = | ||
130 | super::Name::new_inline_ascii(stringify!($ident).as_bytes()); | ||
131 | )* | ||
132 | }; | ||
133 | } | ||
134 | |||
135 | known_names!( | ||
136 | // Primitives | ||
137 | isize, | ||
138 | i8, | ||
139 | i16, | ||
140 | i32, | ||
141 | i64, | ||
142 | i128, | ||
143 | usize, | ||
144 | u8, | ||
145 | u16, | ||
146 | u32, | ||
147 | u64, | ||
148 | u128, | ||
149 | f32, | ||
150 | f64, | ||
151 | bool, | ||
152 | char, | ||
153 | str, | ||
154 | // Special names | ||
155 | macro_rules, | ||
156 | doc, | ||
157 | // Components of known path (value or mod name) | ||
158 | std, | ||
159 | core, | ||
160 | alloc, | ||
161 | iter, | ||
162 | ops, | ||
163 | future, | ||
164 | result, | ||
165 | boxed, | ||
166 | // Components of known path (type name) | ||
167 | IntoIterator, | ||
168 | Item, | ||
169 | Try, | ||
170 | Ok, | ||
171 | Future, | ||
172 | Result, | ||
173 | Output, | ||
174 | Target, | ||
175 | Box, | ||
176 | RangeFrom, | ||
177 | RangeFull, | ||
178 | RangeInclusive, | ||
179 | RangeToInclusive, | ||
180 | RangeTo, | ||
181 | Range, | ||
182 | Neg, | ||
183 | Not, | ||
184 | Index, | ||
185 | // Builtin macros | ||
186 | file, | ||
187 | column, | ||
188 | compile_error, | ||
189 | line, | ||
190 | assert, | ||
191 | stringify, | ||
192 | concat, | ||
193 | include, | ||
194 | include_bytes, | ||
195 | include_str, | ||
196 | format_args, | ||
197 | format_args_nl, | ||
198 | env, | ||
199 | option_env, | ||
200 | // Builtin derives | ||
201 | Copy, | ||
202 | Clone, | ||
203 | Default, | ||
204 | Debug, | ||
205 | Hash, | ||
206 | Ord, | ||
207 | PartialOrd, | ||
208 | Eq, | ||
209 | PartialEq, | ||
210 | ); | ||
211 | |||
212 | // self/Self cannot be used as an identifier | ||
213 | pub const SELF_PARAM: super::Name = super::Name::new_inline_ascii(b"self"); | ||
214 | pub const SELF_TYPE: super::Name = super::Name::new_inline_ascii(b"Self"); | ||
215 | |||
216 | #[macro_export] | ||
217 | macro_rules! name { | ||
218 | (self) => { | ||
219 | $crate::name::known::SELF_PARAM | ||
220 | }; | ||
221 | (Self) => { | ||
222 | $crate::name::known::SELF_TYPE | ||
223 | }; | ||
224 | ($ident:ident) => { | ||
225 | $crate::name::known::$ident | ||
226 | }; | ||
227 | } | ||
228 | } | ||
229 | |||
230 | pub use crate::name; | ||
diff --git a/crates/hir_expand/src/proc_macro.rs b/crates/hir_expand/src/proc_macro.rs new file mode 100644 index 000000000..80255ea32 --- /dev/null +++ b/crates/hir_expand/src/proc_macro.rs | |||
@@ -0,0 +1,143 @@ | |||
1 | //! Proc Macro Expander stub | ||
2 | |||
3 | use crate::{db::AstDatabase, LazyMacroId}; | ||
4 | use base_db::{CrateId, ProcMacroId}; | ||
5 | use tt::buffer::{Cursor, TokenBuffer}; | ||
6 | |||
7 | #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] | ||
8 | pub struct ProcMacroExpander { | ||
9 | krate: CrateId, | ||
10 | proc_macro_id: ProcMacroId, | ||
11 | } | ||
12 | |||
13 | macro_rules! err { | ||
14 | ($fmt:literal, $($tt:tt),*) => { | ||
15 | mbe::ExpandError::ProcMacroError(tt::ExpansionError::Unknown(format!($fmt, $($tt),*))) | ||
16 | }; | ||
17 | ($fmt:literal) => { | ||
18 | mbe::ExpandError::ProcMacroError(tt::ExpansionError::Unknown($fmt.to_string())) | ||
19 | } | ||
20 | } | ||
21 | |||
22 | impl ProcMacroExpander { | ||
23 | pub fn new(krate: CrateId, proc_macro_id: ProcMacroId) -> ProcMacroExpander { | ||
24 | ProcMacroExpander { krate, proc_macro_id } | ||
25 | } | ||
26 | |||
27 | pub fn expand( | ||
28 | self, | ||
29 | db: &dyn AstDatabase, | ||
30 | _id: LazyMacroId, | ||
31 | tt: &tt::Subtree, | ||
32 | ) -> Result<tt::Subtree, mbe::ExpandError> { | ||
33 | let krate_graph = db.crate_graph(); | ||
34 | let proc_macro = krate_graph[self.krate] | ||
35 | .proc_macro | ||
36 | .get(self.proc_macro_id.0 as usize) | ||
37 | .clone() | ||
38 | .ok_or_else(|| err!("No derive macro found."))?; | ||
39 | |||
40 | let tt = remove_derive_attrs(tt) | ||
41 | .ok_or_else(|| err!("Fail to remove derive for custom derive"))?; | ||
42 | |||
43 | proc_macro.expander.expand(&tt, None).map_err(mbe::ExpandError::from) | ||
44 | } | ||
45 | } | ||
46 | |||
47 | fn eat_punct(cursor: &mut Cursor, c: char) -> bool { | ||
48 | if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = cursor.token_tree() { | ||
49 | if punct.char == c { | ||
50 | *cursor = cursor.bump(); | ||
51 | return true; | ||
52 | } | ||
53 | } | ||
54 | false | ||
55 | } | ||
56 | |||
57 | fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool { | ||
58 | if let Some(tt::TokenTree::Subtree(subtree)) = cursor.token_tree() { | ||
59 | if Some(kind) == subtree.delimiter_kind() { | ||
60 | *cursor = cursor.bump_subtree(); | ||
61 | return true; | ||
62 | } | ||
63 | } | ||
64 | false | ||
65 | } | ||
66 | |||
67 | fn eat_ident(cursor: &mut Cursor, t: &str) -> bool { | ||
68 | if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = cursor.token_tree() { | ||
69 | if t == ident.text.as_str() { | ||
70 | *cursor = cursor.bump(); | ||
71 | return true; | ||
72 | } | ||
73 | } | ||
74 | false | ||
75 | } | ||
76 | |||
77 | fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> { | ||
78 | let buffer = TokenBuffer::new(&tt.token_trees); | ||
79 | let mut p = buffer.begin(); | ||
80 | let mut result = tt::Subtree::default(); | ||
81 | |||
82 | while !p.eof() { | ||
83 | let curr = p; | ||
84 | |||
85 | if eat_punct(&mut p, '#') { | ||
86 | eat_punct(&mut p, '!'); | ||
87 | let parent = p; | ||
88 | if eat_subtree(&mut p, tt::DelimiterKind::Bracket) { | ||
89 | if eat_ident(&mut p, "derive") { | ||
90 | p = parent.bump(); | ||
91 | continue; | ||
92 | } | ||
93 | } | ||
94 | } | ||
95 | |||
96 | result.token_trees.push(curr.token_tree()?.clone()); | ||
97 | p = curr.bump(); | ||
98 | } | ||
99 | |||
100 | Some(result) | ||
101 | } | ||
102 | |||
103 | #[cfg(test)] | ||
104 | mod tests { | ||
105 | use super::*; | ||
106 | use test_utils::assert_eq_text; | ||
107 | |||
108 | #[test] | ||
109 | fn test_remove_derive_attrs() { | ||
110 | let tt = mbe::parse_to_token_tree( | ||
111 | r#" | ||
112 | #[allow(unused)] | ||
113 | #[derive(Copy)] | ||
114 | #[derive(Hello)] | ||
115 | struct A { | ||
116 | bar: u32 | ||
117 | } | ||
118 | "#, | ||
119 | ) | ||
120 | .unwrap() | ||
121 | .0; | ||
122 | let result = format!("{:#?}", remove_derive_attrs(&tt).unwrap()); | ||
123 | |||
124 | assert_eq_text!( | ||
125 | &result, | ||
126 | r#" | ||
127 | SUBTREE $ | ||
128 | PUNCH # [alone] 0 | ||
129 | SUBTREE [] 1 | ||
130 | IDENT allow 2 | ||
131 | SUBTREE () 3 | ||
132 | IDENT unused 4 | ||
133 | IDENT struct 15 | ||
134 | IDENT A 16 | ||
135 | SUBTREE {} 17 | ||
136 | IDENT bar 18 | ||
137 | PUNCH : [alone] 19 | ||
138 | IDENT u32 20 | ||
139 | "# | ||
140 | .trim() | ||
141 | ); | ||
142 | } | ||
143 | } | ||
diff --git a/crates/hir_expand/src/quote.rs b/crates/hir_expand/src/quote.rs new file mode 100644 index 000000000..219bc2097 --- /dev/null +++ b/crates/hir_expand/src/quote.rs | |||
@@ -0,0 +1,282 @@ | |||
1 | //! A simplified version of quote-crate like quasi quote macro | ||
2 | |||
3 | // A helper macro quote macro | ||
4 | // FIXME: | ||
5 | // 1. Not all puncts are handled | ||
6 | // 2. #()* pattern repetition not supported now | ||
7 | // * But we can do it manually, see `test_quote_derive_copy_hack` | ||
8 | #[doc(hidden)] | ||
9 | #[macro_export] | ||
10 | macro_rules! __quote { | ||
11 | () => { | ||
12 | Vec::<tt::TokenTree>::new() | ||
13 | }; | ||
14 | |||
15 | ( @SUBTREE $delim:ident $($tt:tt)* ) => { | ||
16 | { | ||
17 | let children = $crate::__quote!($($tt)*); | ||
18 | tt::Subtree { | ||
19 | delimiter: Some(tt::Delimiter { | ||
20 | kind: tt::DelimiterKind::$delim, | ||
21 | id: tt::TokenId::unspecified(), | ||
22 | }), | ||
23 | token_trees: $crate::quote::IntoTt::to_tokens(children), | ||
24 | } | ||
25 | } | ||
26 | }; | ||
27 | |||
28 | ( @PUNCT $first:literal ) => { | ||
29 | { | ||
30 | vec![ | ||
31 | tt::Leaf::Punct(tt::Punct { | ||
32 | char: $first, | ||
33 | spacing: tt::Spacing::Alone, | ||
34 | id: tt::TokenId::unspecified(), | ||
35 | }).into() | ||
36 | ] | ||
37 | } | ||
38 | }; | ||
39 | |||
40 | ( @PUNCT $first:literal, $sec:literal ) => { | ||
41 | { | ||
42 | vec![ | ||
43 | tt::Leaf::Punct(tt::Punct { | ||
44 | char: $first, | ||
45 | spacing: tt::Spacing::Joint, | ||
46 | id: tt::TokenId::unspecified(), | ||
47 | }).into(), | ||
48 | tt::Leaf::Punct(tt::Punct { | ||
49 | char: $sec, | ||
50 | spacing: tt::Spacing::Alone, | ||
51 | id: tt::TokenId::unspecified(), | ||
52 | }).into() | ||
53 | ] | ||
54 | } | ||
55 | }; | ||
56 | |||
57 | // hash variable | ||
58 | ( # $first:ident $($tail:tt)* ) => { | ||
59 | { | ||
60 | let token = $crate::quote::ToTokenTree::to_token($first); | ||
61 | let mut tokens = vec![token.into()]; | ||
62 | let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*)); | ||
63 | tokens.append(&mut tail_tokens); | ||
64 | tokens | ||
65 | } | ||
66 | }; | ||
67 | |||
68 | ( ## $first:ident $($tail:tt)* ) => { | ||
69 | { | ||
70 | let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<tt::TokenTree>>(); | ||
71 | let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*)); | ||
72 | tokens.append(&mut tail_tokens); | ||
73 | tokens | ||
74 | } | ||
75 | }; | ||
76 | |||
77 | // Brace | ||
78 | ( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) }; | ||
79 | // Bracket | ||
80 | ( [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE Bracket $($tt)*) }; | ||
81 | // Parenthesis | ||
82 | ( ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE Parenthesis $($tt)*) }; | ||
83 | |||
84 | // Literal | ||
85 | ( $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt).into()] }; | ||
86 | // Ident | ||
87 | ( $tt:ident ) => { | ||
88 | vec![ { | ||
89 | tt::Leaf::Ident(tt::Ident { | ||
90 | text: stringify!($tt).into(), | ||
91 | id: tt::TokenId::unspecified(), | ||
92 | }).into() | ||
93 | }] | ||
94 | }; | ||
95 | |||
96 | // Puncts | ||
97 | // FIXME: Not all puncts are handled | ||
98 | ( -> ) => {$crate::__quote!(@PUNCT '-', '>')}; | ||
99 | ( & ) => {$crate::__quote!(@PUNCT '&')}; | ||
100 | ( , ) => {$crate::__quote!(@PUNCT ',')}; | ||
101 | ( : ) => {$crate::__quote!(@PUNCT ':')}; | ||
102 | ( ; ) => {$crate::__quote!(@PUNCT ';')}; | ||
103 | ( :: ) => {$crate::__quote!(@PUNCT ':', ':')}; | ||
104 | ( . ) => {$crate::__quote!(@PUNCT '.')}; | ||
105 | ( < ) => {$crate::__quote!(@PUNCT '<')}; | ||
106 | ( > ) => {$crate::__quote!(@PUNCT '>')}; | ||
107 | |||
108 | ( $first:tt $($tail:tt)+ ) => { | ||
109 | { | ||
110 | let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($first)); | ||
111 | let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*)); | ||
112 | |||
113 | tokens.append(&mut tail_tokens); | ||
114 | tokens | ||
115 | } | ||
116 | }; | ||
117 | } | ||
118 | |||
119 | /// FIXME: | ||
120 | /// It probably should implement in proc-macro | ||
121 | #[macro_export] | ||
122 | macro_rules! quote { | ||
123 | ( $($tt:tt)* ) => { | ||
124 | $crate::quote::IntoTt::to_subtree($crate::__quote!($($tt)*)) | ||
125 | } | ||
126 | } | ||
127 | |||
128 | pub(crate) trait IntoTt { | ||
129 | fn to_subtree(self) -> tt::Subtree; | ||
130 | fn to_tokens(self) -> Vec<tt::TokenTree>; | ||
131 | } | ||
132 | |||
133 | impl IntoTt for Vec<tt::TokenTree> { | ||
134 | fn to_subtree(self) -> tt::Subtree { | ||
135 | tt::Subtree { delimiter: None, token_trees: self } | ||
136 | } | ||
137 | |||
138 | fn to_tokens(self) -> Vec<tt::TokenTree> { | ||
139 | self | ||
140 | } | ||
141 | } | ||
142 | |||
143 | impl IntoTt for tt::Subtree { | ||
144 | fn to_subtree(self) -> tt::Subtree { | ||
145 | self | ||
146 | } | ||
147 | |||
148 | fn to_tokens(self) -> Vec<tt::TokenTree> { | ||
149 | vec![tt::TokenTree::Subtree(self)] | ||
150 | } | ||
151 | } | ||
152 | |||
153 | pub(crate) trait ToTokenTree { | ||
154 | fn to_token(self) -> tt::TokenTree; | ||
155 | } | ||
156 | |||
157 | impl ToTokenTree for tt::TokenTree { | ||
158 | fn to_token(self) -> tt::TokenTree { | ||
159 | self | ||
160 | } | ||
161 | } | ||
162 | |||
163 | impl ToTokenTree for tt::Subtree { | ||
164 | fn to_token(self) -> tt::TokenTree { | ||
165 | self.into() | ||
166 | } | ||
167 | } | ||
168 | |||
169 | macro_rules! impl_to_to_tokentrees { | ||
170 | ($($ty:ty => $this:ident $im:block);*) => { | ||
171 | $( | ||
172 | impl ToTokenTree for $ty { | ||
173 | fn to_token($this) -> tt::TokenTree { | ||
174 | let leaf: tt::Leaf = $im.into(); | ||
175 | leaf.into() | ||
176 | } | ||
177 | } | ||
178 | |||
179 | impl ToTokenTree for &$ty { | ||
180 | fn to_token($this) -> tt::TokenTree { | ||
181 | let leaf: tt::Leaf = $im.clone().into(); | ||
182 | leaf.into() | ||
183 | } | ||
184 | } | ||
185 | )* | ||
186 | } | ||
187 | } | ||
188 | |||
189 | impl_to_to_tokentrees! { | ||
190 | u32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} }; | ||
191 | usize => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}}; | ||
192 | i32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}}; | ||
193 | tt::Leaf => self { self }; | ||
194 | tt::Literal => self { self }; | ||
195 | tt::Ident => self { self }; | ||
196 | tt::Punct => self { self }; | ||
197 | &str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}}; | ||
198 | String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}} | ||
199 | } | ||
200 | |||
201 | #[cfg(test)] | ||
202 | mod tests { | ||
203 | #[test] | ||
204 | fn test_quote_delimiters() { | ||
205 | assert_eq!(quote!({}).to_string(), "{}"); | ||
206 | assert_eq!(quote!(()).to_string(), "()"); | ||
207 | assert_eq!(quote!([]).to_string(), "[]"); | ||
208 | } | ||
209 | |||
210 | #[test] | ||
211 | fn test_quote_idents() { | ||
212 | assert_eq!(quote!(32).to_string(), "32"); | ||
213 | assert_eq!(quote!(struct).to_string(), "struct"); | ||
214 | } | ||
215 | |||
216 | #[test] | ||
217 | fn test_quote_hash_simple_literal() { | ||
218 | let a = 20; | ||
219 | assert_eq!(quote!(#a).to_string(), "20"); | ||
220 | let s: String = "hello".into(); | ||
221 | assert_eq!(quote!(#s).to_string(), "\"hello\""); | ||
222 | } | ||
223 | |||
224 | fn mk_ident(name: &str) -> tt::Ident { | ||
225 | tt::Ident { text: name.into(), id: tt::TokenId::unspecified() } | ||
226 | } | ||
227 | |||
228 | #[test] | ||
229 | fn test_quote_hash_token_tree() { | ||
230 | let a = mk_ident("hello"); | ||
231 | |||
232 | let quoted = quote!(#a); | ||
233 | assert_eq!(quoted.to_string(), "hello"); | ||
234 | let t = format!("{:?}", quoted); | ||
235 | assert_eq!(t, "SUBTREE $\n IDENT hello 4294967295"); | ||
236 | } | ||
237 | |||
238 | #[test] | ||
239 | fn test_quote_simple_derive_copy() { | ||
240 | let name = mk_ident("Foo"); | ||
241 | |||
242 | let quoted = quote! { | ||
243 | impl Clone for #name { | ||
244 | fn clone(&self) -> Self { | ||
245 | Self {} | ||
246 | } | ||
247 | } | ||
248 | }; | ||
249 | |||
250 | assert_eq!(quoted.to_string(), "impl Clone for Foo {fn clone (& self) -> Self {Self {}}}"); | ||
251 | } | ||
252 | |||
253 | #[test] | ||
254 | fn test_quote_derive_copy_hack() { | ||
255 | // Assume the given struct is: | ||
256 | // struct Foo { | ||
257 | // name: String, | ||
258 | // id: u32, | ||
259 | // } | ||
260 | let struct_name = mk_ident("Foo"); | ||
261 | let fields = [mk_ident("name"), mk_ident("id")]; | ||
262 | let fields = fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees).flatten(); | ||
263 | |||
264 | let list = tt::Subtree { | ||
265 | delimiter: Some(tt::Delimiter { | ||
266 | kind: tt::DelimiterKind::Brace, | ||
267 | id: tt::TokenId::unspecified(), | ||
268 | }), | ||
269 | token_trees: fields.collect(), | ||
270 | }; | ||
271 | |||
272 | let quoted = quote! { | ||
273 | impl Clone for #struct_name { | ||
274 | fn clone(&self) -> Self { | ||
275 | Self #list | ||
276 | } | ||
277 | } | ||
278 | }; | ||
279 | |||
280 | assert_eq!(quoted.to_string(), "impl Clone for Foo {fn clone (& self) -> Self {Self {name : self . name . clone () , id : self . id . clone () ,}}}"); | ||
281 | } | ||
282 | } | ||
diff --git a/crates/hir_expand/src/test_db.rs b/crates/hir_expand/src/test_db.rs new file mode 100644 index 000000000..86a5d867e --- /dev/null +++ b/crates/hir_expand/src/test_db.rs | |||
@@ -0,0 +1,49 @@ | |||
1 | //! Database used for testing `hir_expand`. | ||
2 | |||
3 | use std::{ | ||
4 | fmt, panic, | ||
5 | sync::{Arc, Mutex}, | ||
6 | }; | ||
7 | |||
8 | use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate}; | ||
9 | use rustc_hash::FxHashSet; | ||
10 | |||
11 | #[salsa::database( | ||
12 | base_db::SourceDatabaseExtStorage, | ||
13 | base_db::SourceDatabaseStorage, | ||
14 | crate::db::AstDatabaseStorage | ||
15 | )] | ||
16 | #[derive(Default)] | ||
17 | pub struct TestDB { | ||
18 | storage: salsa::Storage<TestDB>, | ||
19 | events: Mutex<Option<Vec<salsa::Event>>>, | ||
20 | } | ||
21 | |||
22 | impl fmt::Debug for TestDB { | ||
23 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
24 | f.debug_struct("TestDB").finish() | ||
25 | } | ||
26 | } | ||
27 | |||
28 | impl salsa::Database for TestDB { | ||
29 | fn salsa_event(&self, event: salsa::Event) { | ||
30 | let mut events = self.events.lock().unwrap(); | ||
31 | if let Some(events) = &mut *events { | ||
32 | events.push(event); | ||
33 | } | ||
34 | } | ||
35 | } | ||
36 | |||
37 | impl panic::RefUnwindSafe for TestDB {} | ||
38 | |||
39 | impl FileLoader for TestDB { | ||
40 | fn file_text(&self, file_id: FileId) -> Arc<String> { | ||
41 | FileLoaderDelegate(self).file_text(file_id) | ||
42 | } | ||
43 | fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { | ||
44 | FileLoaderDelegate(self).resolve_path(anchor, path) | ||
45 | } | ||
46 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { | ||
47 | FileLoaderDelegate(self).relevant_crates(file_id) | ||
48 | } | ||
49 | } | ||