diff options
Diffstat (limited to 'crates/ra_hir')
-rw-r--r-- | crates/ra_hir/Cargo.toml | 17 | ||||
-rw-r--r-- | crates/ra_hir/src/arena.rs | 66 | ||||
-rw-r--r-- | crates/ra_hir/src/db.rs | 66 | ||||
-rw-r--r-- | crates/ra_hir/src/function/mod.rs | 190 | ||||
-rw-r--r-- | crates/ra_hir/src/function/scope.rs | 447 | ||||
-rw-r--r-- | crates/ra_hir/src/lib.rs | 141 | ||||
-rw-r--r-- | crates/ra_hir/src/mock.rs | 172 | ||||
-rw-r--r-- | crates/ra_hir/src/module/imp.rs | 194 | ||||
-rw-r--r-- | crates/ra_hir/src/module/mod.rs | 373 | ||||
-rw-r--r-- | crates/ra_hir/src/module/nameres.rs | 434 | ||||
-rw-r--r-- | crates/ra_hir/src/path.rs | 148 | ||||
-rw-r--r-- | crates/ra_hir/src/query_definitions.rs | 154 |
12 files changed, 2402 insertions, 0 deletions
diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml new file mode 100644 index 000000000..1b9e148b2 --- /dev/null +++ b/crates/ra_hir/Cargo.toml | |||
@@ -0,0 +1,17 @@ | |||
1 | [package] | ||
2 | edition = "2018" | ||
3 | name = "ra_hir" | ||
4 | version = "0.1.0" | ||
5 | authors = ["Aleksey Kladov <[email protected]>"] | ||
6 | |||
7 | [dependencies] | ||
8 | log = "0.4.5" | ||
9 | relative-path = "0.4.0" | ||
10 | salsa = "0.8.0" | ||
11 | rustc-hash = "1.0" | ||
12 | parking_lot = "0.6.4" | ||
13 | id-arena = "2.0" | ||
14 | ra_syntax = { path = "../ra_syntax" } | ||
15 | ra_editor = { path = "../ra_editor" } | ||
16 | ra_db = { path = "../ra_db" } | ||
17 | test_utils = { path = "../test_utils" } | ||
diff --git a/crates/ra_hir/src/arena.rs b/crates/ra_hir/src/arena.rs new file mode 100644 index 000000000..d4f9d9cb9 --- /dev/null +++ b/crates/ra_hir/src/arena.rs | |||
@@ -0,0 +1,66 @@ | |||
1 | //! A simple id-based arena, similar to https://github.com/fitzgen/id-arena. | ||
2 | //! We use our own version for more compact id's and to allow inherent impls | ||
3 | //! on Ids. | ||
4 | |||
5 | use std::{ | ||
6 | fmt, | ||
7 | hash::{Hash, Hasher}, | ||
8 | marker::PhantomData, | ||
9 | }; | ||
10 | |||
11 | pub struct Id<T> { | ||
12 | idx: u32, | ||
13 | _ty: PhantomData<fn() -> T>, | ||
14 | } | ||
15 | |||
16 | impl<T> fmt::Debug for Id<T> { | ||
17 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
18 | f.debug_tuple("Id").field(&self.idx).finish() | ||
19 | } | ||
20 | } | ||
21 | impl<T> Copy for Id<T> {} | ||
22 | impl<T> Clone for Id<T> { | ||
23 | fn clone(&self) -> Id<T> { | ||
24 | *self | ||
25 | } | ||
26 | } | ||
27 | |||
28 | impl<T> PartialEq for Id<T> { | ||
29 | fn eq(&self, other: &Id<T>) -> bool { | ||
30 | self.idx == other.idx | ||
31 | } | ||
32 | } | ||
33 | |||
34 | impl<T> Eq for Id<T> {} | ||
35 | |||
36 | impl<T> Hash for Id<T> { | ||
37 | fn hash<H: Hasher>(&self, h: &mut H) { | ||
38 | self.idx.hash(h); | ||
39 | } | ||
40 | } | ||
41 | |||
42 | #[derive(Debug, PartialEq, Eq)] | ||
43 | pub(crate) struct ArenaBehavior<T> { | ||
44 | _ty: PhantomData<T>, | ||
45 | } | ||
46 | |||
47 | impl<T> id_arena::ArenaBehavior for ArenaBehavior<T> { | ||
48 | type Id = Id<T>; | ||
49 | fn new_arena_id() -> u32 { | ||
50 | 0 | ||
51 | } | ||
52 | fn new_id(_arena_id: u32, index: usize) -> Id<T> { | ||
53 | Id { | ||
54 | idx: index as u32, | ||
55 | _ty: PhantomData, | ||
56 | } | ||
57 | } | ||
58 | fn index(id: Id<T>) -> usize { | ||
59 | id.idx as usize | ||
60 | } | ||
61 | fn arena_id(_id: Id<T>) -> u32 { | ||
62 | 0 | ||
63 | } | ||
64 | } | ||
65 | |||
66 | pub(crate) type Arena<T> = id_arena::Arena<T, ArenaBehavior<T>>; | ||
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs new file mode 100644 index 000000000..2f01bae6d --- /dev/null +++ b/crates/ra_hir/src/db.rs | |||
@@ -0,0 +1,66 @@ | |||
1 | use std::sync::Arc; | ||
2 | |||
3 | use ra_syntax::{ | ||
4 | SyntaxNode, | ||
5 | ast::FnDefNode, | ||
6 | }; | ||
7 | use ra_db::{SourceRootId, LocationIntener, SyntaxDatabase, FileId, Cancelable}; | ||
8 | |||
9 | use crate::{ | ||
10 | DefLoc, DefId, FnId, | ||
11 | SourceFileItems, SourceItemId, | ||
12 | query_definitions, | ||
13 | FnScopes, | ||
14 | module::{ModuleId, ModuleTree, ModuleSource, | ||
15 | nameres::{ItemMap, InputModuleItems}}, | ||
16 | }; | ||
17 | |||
18 | salsa::query_group! { | ||
19 | |||
20 | pub trait HirDatabase: SyntaxDatabase | ||
21 | + AsRef<LocationIntener<DefLoc, DefId>> | ||
22 | + AsRef<LocationIntener<SourceItemId, FnId>> | ||
23 | { | ||
24 | fn fn_scopes(fn_id: FnId) -> Arc<FnScopes> { | ||
25 | type FnScopesQuery; | ||
26 | use fn query_definitions::fn_scopes; | ||
27 | } | ||
28 | fn fn_syntax(fn_id: FnId) -> FnDefNode { | ||
29 | type FnSyntaxQuery; | ||
30 | // Don't retain syntax trees in memory | ||
31 | storage dependencies; | ||
32 | use fn query_definitions::fn_syntax; | ||
33 | } | ||
34 | |||
35 | fn file_items(file_id: FileId) -> Arc<SourceFileItems> { | ||
36 | type SourceFileItemsQuery; | ||
37 | storage dependencies; | ||
38 | use fn query_definitions::file_items; | ||
39 | } | ||
40 | |||
41 | fn file_item(source_item_id: SourceItemId) -> SyntaxNode { | ||
42 | type FileItemQuery; | ||
43 | storage dependencies; | ||
44 | use fn query_definitions::file_item; | ||
45 | } | ||
46 | |||
47 | fn submodules(source: ModuleSource) -> Cancelable<Arc<Vec<crate::module::imp::Submodule>>> { | ||
48 | type SubmodulesQuery; | ||
49 | use fn query_definitions::submodules; | ||
50 | } | ||
51 | |||
52 | fn input_module_items(source_root_id: SourceRootId, module_id: ModuleId) -> Cancelable<Arc<InputModuleItems>> { | ||
53 | type InputModuleItemsQuery; | ||
54 | use fn query_definitions::input_module_items; | ||
55 | } | ||
56 | fn item_map(source_root_id: SourceRootId) -> Cancelable<Arc<ItemMap>> { | ||
57 | type ItemMapQuery; | ||
58 | use fn query_definitions::item_map; | ||
59 | } | ||
60 | fn module_tree(source_root_id: SourceRootId) -> Cancelable<Arc<ModuleTree>> { | ||
61 | type ModuleTreeQuery; | ||
62 | use fn crate::module::imp::module_tree; | ||
63 | } | ||
64 | } | ||
65 | |||
66 | } | ||
diff --git a/crates/ra_hir/src/function/mod.rs b/crates/ra_hir/src/function/mod.rs new file mode 100644 index 000000000..c8af2e54f --- /dev/null +++ b/crates/ra_hir/src/function/mod.rs | |||
@@ -0,0 +1,190 @@ | |||
1 | mod scope; | ||
2 | |||
3 | use std::{ | ||
4 | cmp::{max, min}, | ||
5 | sync::Arc, | ||
6 | }; | ||
7 | |||
8 | use ra_syntax::{ | ||
9 | TextRange, TextUnit, SyntaxNodeRef, | ||
10 | ast::{self, AstNode, DocCommentsOwner, NameOwner}, | ||
11 | }; | ||
12 | use ra_db::FileId; | ||
13 | |||
14 | use crate::{ | ||
15 | FnId, HirDatabase, SourceItemId, | ||
16 | }; | ||
17 | |||
18 | pub use self::scope::FnScopes; | ||
19 | |||
20 | impl FnId { | ||
21 | pub fn get(db: &impl HirDatabase, file_id: FileId, fn_def: ast::FnDef) -> FnId { | ||
22 | let file_items = db.file_items(file_id); | ||
23 | let item_id = file_items.id_of(fn_def.syntax()); | ||
24 | let item_id = SourceItemId { file_id, item_id }; | ||
25 | FnId::from_loc(db, &item_id) | ||
26 | } | ||
27 | } | ||
28 | |||
29 | pub struct Function { | ||
30 | fn_id: FnId, | ||
31 | } | ||
32 | |||
33 | impl Function { | ||
34 | pub fn guess_from_source( | ||
35 | db: &impl HirDatabase, | ||
36 | file_id: FileId, | ||
37 | fn_def: ast::FnDef, | ||
38 | ) -> Function { | ||
39 | let fn_id = FnId::get(db, file_id, fn_def); | ||
40 | Function { fn_id } | ||
41 | } | ||
42 | |||
43 | pub fn guess_for_name_ref( | ||
44 | db: &impl HirDatabase, | ||
45 | file_id: FileId, | ||
46 | name_ref: ast::NameRef, | ||
47 | ) -> Option<Function> { | ||
48 | Function::guess_for_node(db, file_id, name_ref.syntax()) | ||
49 | } | ||
50 | |||
51 | pub fn guess_for_bind_pat( | ||
52 | db: &impl HirDatabase, | ||
53 | file_id: FileId, | ||
54 | bind_pat: ast::BindPat, | ||
55 | ) -> Option<Function> { | ||
56 | Function::guess_for_node(db, file_id, bind_pat.syntax()) | ||
57 | } | ||
58 | |||
59 | fn guess_for_node( | ||
60 | db: &impl HirDatabase, | ||
61 | file_id: FileId, | ||
62 | node: SyntaxNodeRef, | ||
63 | ) -> Option<Function> { | ||
64 | let fn_def = node.ancestors().find_map(ast::FnDef::cast)?; | ||
65 | let res = Function::guess_from_source(db, file_id, fn_def); | ||
66 | Some(res) | ||
67 | } | ||
68 | |||
69 | pub fn scope(&self, db: &impl HirDatabase) -> Arc<FnScopes> { | ||
70 | db.fn_scopes(self.fn_id) | ||
71 | } | ||
72 | |||
73 | pub fn signature_info(&self, db: &impl HirDatabase) -> Option<FnSignatureInfo> { | ||
74 | let syntax = db.fn_syntax(self.fn_id); | ||
75 | FnSignatureInfo::new(syntax.borrowed()) | ||
76 | } | ||
77 | } | ||
78 | |||
79 | #[derive(Debug, Clone)] | ||
80 | pub struct FnSignatureInfo { | ||
81 | pub name: String, | ||
82 | pub label: String, | ||
83 | pub ret_type: Option<String>, | ||
84 | pub params: Vec<String>, | ||
85 | pub doc: Option<String>, | ||
86 | } | ||
87 | |||
88 | impl FnSignatureInfo { | ||
89 | fn new(node: ast::FnDef) -> Option<Self> { | ||
90 | let name = node.name()?.text().to_string(); | ||
91 | |||
92 | let mut doc = None; | ||
93 | |||
94 | // Strip the body out for the label. | ||
95 | let mut label: String = if let Some(body) = node.body() { | ||
96 | let body_range = body.syntax().range(); | ||
97 | let label: String = node | ||
98 | .syntax() | ||
99 | .children() | ||
100 | .filter(|child| !child.range().is_subrange(&body_range)) | ||
101 | .map(|node| node.text().to_string()) | ||
102 | .collect(); | ||
103 | label | ||
104 | } else { | ||
105 | node.syntax().text().to_string() | ||
106 | }; | ||
107 | |||
108 | if let Some((comment_range, docs)) = FnSignatureInfo::extract_doc_comments(node) { | ||
109 | let comment_range = comment_range | ||
110 | .checked_sub(node.syntax().range().start()) | ||
111 | .unwrap(); | ||
112 | let start = comment_range.start().to_usize(); | ||
113 | let end = comment_range.end().to_usize(); | ||
114 | |||
115 | // Remove the comment from the label | ||
116 | label.replace_range(start..end, ""); | ||
117 | |||
118 | // Massage markdown | ||
119 | let mut processed_lines = Vec::new(); | ||
120 | let mut in_code_block = false; | ||
121 | for line in docs.lines() { | ||
122 | if line.starts_with("```") { | ||
123 | in_code_block = !in_code_block; | ||
124 | } | ||
125 | |||
126 | let line = if in_code_block && line.starts_with("```") && !line.contains("rust") { | ||
127 | "```rust".into() | ||
128 | } else { | ||
129 | line.to_string() | ||
130 | }; | ||
131 | |||
132 | processed_lines.push(line); | ||
133 | } | ||
134 | |||
135 | if !processed_lines.is_empty() { | ||
136 | doc = Some(processed_lines.join("\n")); | ||
137 | } | ||
138 | } | ||
139 | |||
140 | let params = FnSignatureInfo::param_list(node); | ||
141 | let ret_type = node.ret_type().map(|r| r.syntax().text().to_string()); | ||
142 | |||
143 | Some(FnSignatureInfo { | ||
144 | name, | ||
145 | ret_type, | ||
146 | params, | ||
147 | label: label.trim().to_owned(), | ||
148 | doc, | ||
149 | }) | ||
150 | } | ||
151 | |||
152 | fn extract_doc_comments(node: ast::FnDef) -> Option<(TextRange, String)> { | ||
153 | if node.doc_comments().count() == 0 { | ||
154 | return None; | ||
155 | } | ||
156 | |||
157 | let comment_text = node.doc_comment_text(); | ||
158 | |||
159 | let (begin, end) = node | ||
160 | .doc_comments() | ||
161 | .map(|comment| comment.syntax().range()) | ||
162 | .map(|range| (range.start().to_usize(), range.end().to_usize())) | ||
163 | .fold((std::usize::MAX, std::usize::MIN), |acc, range| { | ||
164 | (min(acc.0, range.0), max(acc.1, range.1)) | ||
165 | }); | ||
166 | |||
167 | let range = TextRange::from_to(TextUnit::from_usize(begin), TextUnit::from_usize(end)); | ||
168 | |||
169 | Some((range, comment_text)) | ||
170 | } | ||
171 | |||
172 | fn param_list(node: ast::FnDef) -> Vec<String> { | ||
173 | let mut res = vec![]; | ||
174 | if let Some(param_list) = node.param_list() { | ||
175 | if let Some(self_param) = param_list.self_param() { | ||
176 | res.push(self_param.syntax().text().to_string()) | ||
177 | } | ||
178 | |||
179 | // Maybe use param.pat here? See if we can just extract the name? | ||
180 | //res.extend(param_list.params().map(|p| p.syntax().text().to_string())); | ||
181 | res.extend( | ||
182 | param_list | ||
183 | .params() | ||
184 | .filter_map(|p| p.pat()) | ||
185 | .map(|pat| pat.syntax().text().to_string()), | ||
186 | ); | ||
187 | } | ||
188 | res | ||
189 | } | ||
190 | } | ||
diff --git a/crates/ra_hir/src/function/scope.rs b/crates/ra_hir/src/function/scope.rs new file mode 100644 index 000000000..863453291 --- /dev/null +++ b/crates/ra_hir/src/function/scope.rs | |||
@@ -0,0 +1,447 @@ | |||
1 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
2 | |||
3 | use ra_syntax::{ | ||
4 | AstNode, SmolStr, SyntaxNodeRef, TextRange, | ||
5 | algo::generate, | ||
6 | ast::{self, ArgListOwner, LoopBodyOwner, NameOwner}, | ||
7 | }; | ||
8 | use ra_db::LocalSyntaxPtr; | ||
9 | |||
10 | use crate::{ | ||
11 | arena::{Arena, Id}, | ||
12 | }; | ||
13 | |||
14 | pub(crate) type ScopeId = Id<ScopeData>; | ||
15 | |||
16 | #[derive(Debug, PartialEq, Eq)] | ||
17 | pub struct FnScopes { | ||
18 | pub self_param: Option<LocalSyntaxPtr>, | ||
19 | scopes: Arena<ScopeData>, | ||
20 | scope_for: FxHashMap<LocalSyntaxPtr, ScopeId>, | ||
21 | } | ||
22 | |||
23 | #[derive(Debug, PartialEq, Eq)] | ||
24 | pub struct ScopeEntry { | ||
25 | name: SmolStr, | ||
26 | ptr: LocalSyntaxPtr, | ||
27 | } | ||
28 | |||
29 | #[derive(Debug, PartialEq, Eq)] | ||
30 | pub struct ScopeData { | ||
31 | parent: Option<ScopeId>, | ||
32 | entries: Vec<ScopeEntry>, | ||
33 | } | ||
34 | |||
35 | impl FnScopes { | ||
36 | pub fn new(fn_def: ast::FnDef) -> FnScopes { | ||
37 | let mut scopes = FnScopes { | ||
38 | self_param: fn_def | ||
39 | .param_list() | ||
40 | .and_then(|it| it.self_param()) | ||
41 | .map(|it| LocalSyntaxPtr::new(it.syntax())), | ||
42 | scopes: Arena::default(), | ||
43 | scope_for: FxHashMap::default(), | ||
44 | }; | ||
45 | let root = scopes.root_scope(); | ||
46 | scopes.add_params_bindings(root, fn_def.param_list()); | ||
47 | if let Some(body) = fn_def.body() { | ||
48 | compute_block_scopes(body, &mut scopes, root) | ||
49 | } | ||
50 | scopes | ||
51 | } | ||
52 | pub fn entries(&self, scope: ScopeId) -> &[ScopeEntry] { | ||
53 | &self.scopes[scope].entries | ||
54 | } | ||
55 | pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator<Item = ScopeId> + 'a { | ||
56 | generate(self.scope_for(node), move |&scope| { | ||
57 | self.scopes[scope].parent | ||
58 | }) | ||
59 | } | ||
60 | pub fn resolve_local_name<'a>(&'a self, name_ref: ast::NameRef) -> Option<&'a ScopeEntry> { | ||
61 | let mut shadowed = FxHashSet::default(); | ||
62 | let ret = self | ||
63 | .scope_chain(name_ref.syntax()) | ||
64 | .flat_map(|scope| self.entries(scope).iter()) | ||
65 | .filter(|entry| shadowed.insert(entry.name())) | ||
66 | .filter(|entry| entry.name() == &name_ref.text()) | ||
67 | .nth(0); | ||
68 | ret | ||
69 | } | ||
70 | |||
71 | pub fn find_all_refs(&self, pat: ast::BindPat) -> Vec<ReferenceDescriptor> { | ||
72 | let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); | ||
73 | let name_ptr = LocalSyntaxPtr::new(pat.syntax()); | ||
74 | let refs: Vec<_> = fn_def | ||
75 | .syntax() | ||
76 | .descendants() | ||
77 | .filter_map(ast::NameRef::cast) | ||
78 | .filter(|name_ref| match self.resolve_local_name(*name_ref) { | ||
79 | None => false, | ||
80 | Some(entry) => entry.ptr() == name_ptr, | ||
81 | }) | ||
82 | .map(|name_ref| ReferenceDescriptor { | ||
83 | name: name_ref.syntax().text().to_string(), | ||
84 | range: name_ref.syntax().range(), | ||
85 | }) | ||
86 | .collect(); | ||
87 | |||
88 | refs | ||
89 | } | ||
90 | |||
91 | fn root_scope(&mut self) -> ScopeId { | ||
92 | self.scopes.alloc(ScopeData { | ||
93 | parent: None, | ||
94 | entries: vec![], | ||
95 | }) | ||
96 | } | ||
97 | fn new_scope(&mut self, parent: ScopeId) -> ScopeId { | ||
98 | self.scopes.alloc(ScopeData { | ||
99 | parent: Some(parent), | ||
100 | entries: vec![], | ||
101 | }) | ||
102 | } | ||
103 | fn add_bindings(&mut self, scope: ScopeId, pat: ast::Pat) { | ||
104 | let entries = pat | ||
105 | .syntax() | ||
106 | .descendants() | ||
107 | .filter_map(ast::BindPat::cast) | ||
108 | .filter_map(ScopeEntry::new); | ||
109 | self.scopes[scope].entries.extend(entries); | ||
110 | } | ||
111 | fn add_params_bindings(&mut self, scope: ScopeId, params: Option<ast::ParamList>) { | ||
112 | params | ||
113 | .into_iter() | ||
114 | .flat_map(|it| it.params()) | ||
115 | .filter_map(|it| it.pat()) | ||
116 | .for_each(|it| self.add_bindings(scope, it)); | ||
117 | } | ||
118 | fn set_scope(&mut self, node: SyntaxNodeRef, scope: ScopeId) { | ||
119 | self.scope_for.insert(LocalSyntaxPtr::new(node), scope); | ||
120 | } | ||
121 | fn scope_for(&self, node: SyntaxNodeRef) -> Option<ScopeId> { | ||
122 | node.ancestors() | ||
123 | .map(LocalSyntaxPtr::new) | ||
124 | .filter_map(|it| self.scope_for.get(&it).map(|&scope| scope)) | ||
125 | .next() | ||
126 | } | ||
127 | } | ||
128 | |||
129 | impl ScopeEntry { | ||
130 | fn new(pat: ast::BindPat) -> Option<ScopeEntry> { | ||
131 | let name = pat.name()?; | ||
132 | let res = ScopeEntry { | ||
133 | name: name.text(), | ||
134 | ptr: LocalSyntaxPtr::new(pat.syntax()), | ||
135 | }; | ||
136 | Some(res) | ||
137 | } | ||
138 | pub fn name(&self) -> &SmolStr { | ||
139 | &self.name | ||
140 | } | ||
141 | pub fn ptr(&self) -> LocalSyntaxPtr { | ||
142 | self.ptr | ||
143 | } | ||
144 | } | ||
145 | |||
146 | fn compute_block_scopes(block: ast::Block, scopes: &mut FnScopes, mut scope: ScopeId) { | ||
147 | for stmt in block.statements() { | ||
148 | match stmt { | ||
149 | ast::Stmt::LetStmt(stmt) => { | ||
150 | if let Some(expr) = stmt.initializer() { | ||
151 | scopes.set_scope(expr.syntax(), scope); | ||
152 | compute_expr_scopes(expr, scopes, scope); | ||
153 | } | ||
154 | scope = scopes.new_scope(scope); | ||
155 | if let Some(pat) = stmt.pat() { | ||
156 | scopes.add_bindings(scope, pat); | ||
157 | } | ||
158 | } | ||
159 | ast::Stmt::ExprStmt(expr_stmt) => { | ||
160 | if let Some(expr) = expr_stmt.expr() { | ||
161 | scopes.set_scope(expr.syntax(), scope); | ||
162 | compute_expr_scopes(expr, scopes, scope); | ||
163 | } | ||
164 | } | ||
165 | } | ||
166 | } | ||
167 | if let Some(expr) = block.expr() { | ||
168 | scopes.set_scope(expr.syntax(), scope); | ||
169 | compute_expr_scopes(expr, scopes, scope); | ||
170 | } | ||
171 | } | ||
172 | |||
173 | fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) { | ||
174 | match expr { | ||
175 | ast::Expr::IfExpr(e) => { | ||
176 | let cond_scope = e | ||
177 | .condition() | ||
178 | .and_then(|cond| compute_cond_scopes(cond, scopes, scope)); | ||
179 | if let Some(block) = e.then_branch() { | ||
180 | compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope)); | ||
181 | } | ||
182 | if let Some(block) = e.else_branch() { | ||
183 | compute_block_scopes(block, scopes, scope); | ||
184 | } | ||
185 | } | ||
186 | ast::Expr::BlockExpr(e) => { | ||
187 | if let Some(block) = e.block() { | ||
188 | compute_block_scopes(block, scopes, scope); | ||
189 | } | ||
190 | } | ||
191 | ast::Expr::LoopExpr(e) => { | ||
192 | if let Some(block) = e.loop_body() { | ||
193 | compute_block_scopes(block, scopes, scope); | ||
194 | } | ||
195 | } | ||
196 | ast::Expr::WhileExpr(e) => { | ||
197 | let cond_scope = e | ||
198 | .condition() | ||
199 | .and_then(|cond| compute_cond_scopes(cond, scopes, scope)); | ||
200 | if let Some(block) = e.loop_body() { | ||
201 | compute_block_scopes(block, scopes, cond_scope.unwrap_or(scope)); | ||
202 | } | ||
203 | } | ||
204 | ast::Expr::ForExpr(e) => { | ||
205 | if let Some(expr) = e.iterable() { | ||
206 | compute_expr_scopes(expr, scopes, scope); | ||
207 | } | ||
208 | let mut scope = scope; | ||
209 | if let Some(pat) = e.pat() { | ||
210 | scope = scopes.new_scope(scope); | ||
211 | scopes.add_bindings(scope, pat); | ||
212 | } | ||
213 | if let Some(block) = e.loop_body() { | ||
214 | compute_block_scopes(block, scopes, scope); | ||
215 | } | ||
216 | } | ||
217 | ast::Expr::LambdaExpr(e) => { | ||
218 | let scope = scopes.new_scope(scope); | ||
219 | scopes.add_params_bindings(scope, e.param_list()); | ||
220 | if let Some(body) = e.body() { | ||
221 | scopes.set_scope(body.syntax(), scope); | ||
222 | compute_expr_scopes(body, scopes, scope); | ||
223 | } | ||
224 | } | ||
225 | ast::Expr::CallExpr(e) => { | ||
226 | compute_call_scopes(e.expr(), e.arg_list(), scopes, scope); | ||
227 | } | ||
228 | ast::Expr::MethodCallExpr(e) => { | ||
229 | compute_call_scopes(e.expr(), e.arg_list(), scopes, scope); | ||
230 | } | ||
231 | ast::Expr::MatchExpr(e) => { | ||
232 | if let Some(expr) = e.expr() { | ||
233 | compute_expr_scopes(expr, scopes, scope); | ||
234 | } | ||
235 | for arm in e.match_arm_list().into_iter().flat_map(|it| it.arms()) { | ||
236 | let scope = scopes.new_scope(scope); | ||
237 | for pat in arm.pats() { | ||
238 | scopes.add_bindings(scope, pat); | ||
239 | } | ||
240 | if let Some(expr) = arm.expr() { | ||
241 | compute_expr_scopes(expr, scopes, scope); | ||
242 | } | ||
243 | } | ||
244 | } | ||
245 | _ => expr | ||
246 | .syntax() | ||
247 | .children() | ||
248 | .filter_map(ast::Expr::cast) | ||
249 | .for_each(|expr| compute_expr_scopes(expr, scopes, scope)), | ||
250 | }; | ||
251 | |||
252 | fn compute_call_scopes( | ||
253 | receiver: Option<ast::Expr>, | ||
254 | arg_list: Option<ast::ArgList>, | ||
255 | scopes: &mut FnScopes, | ||
256 | scope: ScopeId, | ||
257 | ) { | ||
258 | arg_list | ||
259 | .into_iter() | ||
260 | .flat_map(|it| it.args()) | ||
261 | .chain(receiver) | ||
262 | .for_each(|expr| compute_expr_scopes(expr, scopes, scope)); | ||
263 | } | ||
264 | |||
265 | fn compute_cond_scopes( | ||
266 | cond: ast::Condition, | ||
267 | scopes: &mut FnScopes, | ||
268 | scope: ScopeId, | ||
269 | ) -> Option<ScopeId> { | ||
270 | if let Some(expr) = cond.expr() { | ||
271 | compute_expr_scopes(expr, scopes, scope); | ||
272 | } | ||
273 | if let Some(pat) = cond.pat() { | ||
274 | let s = scopes.new_scope(scope); | ||
275 | scopes.add_bindings(s, pat); | ||
276 | Some(s) | ||
277 | } else { | ||
278 | None | ||
279 | } | ||
280 | } | ||
281 | } | ||
282 | |||
283 | #[derive(Debug)] | ||
284 | pub struct ReferenceDescriptor { | ||
285 | pub range: TextRange, | ||
286 | pub name: String, | ||
287 | } | ||
288 | |||
289 | #[cfg(test)] | ||
290 | mod tests { | ||
291 | use ra_editor::find_node_at_offset; | ||
292 | use ra_syntax::SourceFileNode; | ||
293 | use test_utils::extract_offset; | ||
294 | |||
295 | use super::*; | ||
296 | |||
297 | fn do_check(code: &str, expected: &[&str]) { | ||
298 | let (off, code) = extract_offset(code); | ||
299 | let code = { | ||
300 | let mut buf = String::new(); | ||
301 | let off = u32::from(off) as usize; | ||
302 | buf.push_str(&code[..off]); | ||
303 | buf.push_str("marker"); | ||
304 | buf.push_str(&code[off..]); | ||
305 | buf | ||
306 | }; | ||
307 | let file = SourceFileNode::parse(&code); | ||
308 | let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); | ||
309 | let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); | ||
310 | let scopes = FnScopes::new(fn_def); | ||
311 | let actual = scopes | ||
312 | .scope_chain(marker.syntax()) | ||
313 | .flat_map(|scope| scopes.entries(scope)) | ||
314 | .map(|it| it.name()) | ||
315 | .collect::<Vec<_>>(); | ||
316 | assert_eq!(actual.as_slice(), expected); | ||
317 | } | ||
318 | |||
319 | #[test] | ||
320 | fn test_lambda_scope() { | ||
321 | do_check( | ||
322 | r" | ||
323 | fn quux(foo: i32) { | ||
324 | let f = |bar, baz: i32| { | ||
325 | <|> | ||
326 | }; | ||
327 | }", | ||
328 | &["bar", "baz", "foo"], | ||
329 | ); | ||
330 | } | ||
331 | |||
332 | #[test] | ||
333 | fn test_call_scope() { | ||
334 | do_check( | ||
335 | r" | ||
336 | fn quux() { | ||
337 | f(|x| <|> ); | ||
338 | }", | ||
339 | &["x"], | ||
340 | ); | ||
341 | } | ||
342 | |||
343 | #[test] | ||
344 | fn test_metod_call_scope() { | ||
345 | do_check( | ||
346 | r" | ||
347 | fn quux() { | ||
348 | z.f(|x| <|> ); | ||
349 | }", | ||
350 | &["x"], | ||
351 | ); | ||
352 | } | ||
353 | |||
354 | #[test] | ||
355 | fn test_loop_scope() { | ||
356 | do_check( | ||
357 | r" | ||
358 | fn quux() { | ||
359 | loop { | ||
360 | let x = (); | ||
361 | <|> | ||
362 | }; | ||
363 | }", | ||
364 | &["x"], | ||
365 | ); | ||
366 | } | ||
367 | |||
368 | #[test] | ||
369 | fn test_match() { | ||
370 | do_check( | ||
371 | r" | ||
372 | fn quux() { | ||
373 | match () { | ||
374 | Some(x) => { | ||
375 | <|> | ||
376 | } | ||
377 | }; | ||
378 | }", | ||
379 | &["x"], | ||
380 | ); | ||
381 | } | ||
382 | |||
383 | #[test] | ||
384 | fn test_shadow_variable() { | ||
385 | do_check( | ||
386 | r" | ||
387 | fn foo(x: String) { | ||
388 | let x : &str = &x<|>; | ||
389 | }", | ||
390 | &["x"], | ||
391 | ); | ||
392 | } | ||
393 | |||
394 | fn do_check_local_name(code: &str, expected_offset: u32) { | ||
395 | let (off, code) = extract_offset(code); | ||
396 | let file = SourceFileNode::parse(&code); | ||
397 | let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap(); | ||
398 | let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); | ||
399 | |||
400 | let scopes = FnScopes::new(fn_def); | ||
401 | |||
402 | let local_name_entry = scopes.resolve_local_name(name_ref).unwrap(); | ||
403 | let local_name = local_name_entry.ptr().resolve(&file); | ||
404 | let expected_name = | ||
405 | find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()).unwrap(); | ||
406 | assert_eq!(local_name.range(), expected_name.syntax().range()); | ||
407 | } | ||
408 | |||
409 | #[test] | ||
410 | fn test_resolve_local_name() { | ||
411 | do_check_local_name( | ||
412 | r#" | ||
413 | fn foo(x: i32, y: u32) { | ||
414 | { | ||
415 | let z = x * 2; | ||
416 | } | ||
417 | { | ||
418 | let t = x<|> * 3; | ||
419 | } | ||
420 | }"#, | ||
421 | 21, | ||
422 | ); | ||
423 | } | ||
424 | |||
425 | #[test] | ||
426 | fn test_resolve_local_name_declaration() { | ||
427 | do_check_local_name( | ||
428 | r#" | ||
429 | fn foo(x: String) { | ||
430 | let x : &str = &x<|>; | ||
431 | }"#, | ||
432 | 21, | ||
433 | ); | ||
434 | } | ||
435 | |||
436 | #[test] | ||
437 | fn test_resolve_local_name_shadow() { | ||
438 | do_check_local_name( | ||
439 | r" | ||
440 | fn foo(x: String) { | ||
441 | let x : &str = &x; | ||
442 | x<|> | ||
443 | }", | ||
444 | 46, | ||
445 | ); | ||
446 | } | ||
447 | } | ||
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs new file mode 100644 index 000000000..e7b6a81f4 --- /dev/null +++ b/crates/ra_hir/src/lib.rs | |||
@@ -0,0 +1,141 @@ | |||
1 | //! HIR (previsouly known as descriptors) provides a high-level OO acess to Rust | ||
2 | //! code. | ||
3 | //! | ||
4 | //! The principal difference between HIR and syntax trees is that HIR is bound | ||
5 | //! to a particular crate instance. That is, it has cfg flags and features | ||
6 | //! applied. So, there relation between syntax and HIR is many-to-one. | ||
7 | |||
8 | macro_rules! ctry { | ||
9 | ($expr:expr) => { | ||
10 | match $expr { | ||
11 | None => return Ok(None), | ||
12 | Some(it) => it, | ||
13 | } | ||
14 | }; | ||
15 | } | ||
16 | |||
17 | pub mod db; | ||
18 | #[cfg(test)] | ||
19 | mod mock; | ||
20 | mod query_definitions; | ||
21 | mod function; | ||
22 | mod module; | ||
23 | mod path; | ||
24 | mod arena; | ||
25 | |||
26 | use std::ops::Index; | ||
27 | |||
28 | use ra_syntax::{SyntaxNodeRef, SyntaxNode}; | ||
29 | use ra_db::{LocationIntener, SourceRootId, FileId, Cancelable}; | ||
30 | |||
31 | use crate::{ | ||
32 | db::HirDatabase, | ||
33 | arena::{Arena, Id}, | ||
34 | }; | ||
35 | |||
36 | pub use self::{ | ||
37 | path::{Path, PathKind}, | ||
38 | module::{Module, ModuleId, Problem, nameres::ItemMap}, | ||
39 | function::{Function, FnScopes}, | ||
40 | }; | ||
41 | |||
42 | pub use self::function::FnSignatureInfo; | ||
43 | |||
44 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
45 | pub struct FnId(u32); | ||
46 | ra_db::impl_numeric_id!(FnId); | ||
47 | |||
48 | impl FnId { | ||
49 | pub fn from_loc( | ||
50 | db: &impl AsRef<LocationIntener<SourceItemId, FnId>>, | ||
51 | loc: &SourceItemId, | ||
52 | ) -> FnId { | ||
53 | db.as_ref().loc2id(loc) | ||
54 | } | ||
55 | pub fn loc(self, db: &impl AsRef<LocationIntener<SourceItemId, FnId>>) -> SourceItemId { | ||
56 | db.as_ref().id2loc(self) | ||
57 | } | ||
58 | } | ||
59 | |||
60 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
61 | pub struct DefId(u32); | ||
62 | ra_db::impl_numeric_id!(DefId); | ||
63 | |||
64 | #[derive(Clone, Debug, PartialEq, Eq, Hash)] | ||
65 | pub enum DefLoc { | ||
66 | Module { | ||
67 | id: ModuleId, | ||
68 | source_root: SourceRootId, | ||
69 | }, | ||
70 | Item { | ||
71 | source_item_id: SourceItemId, | ||
72 | }, | ||
73 | } | ||
74 | |||
75 | impl DefId { | ||
76 | pub fn loc(self, db: &impl AsRef<LocationIntener<DefLoc, DefId>>) -> DefLoc { | ||
77 | db.as_ref().id2loc(self) | ||
78 | } | ||
79 | } | ||
80 | |||
81 | impl DefLoc { | ||
82 | pub fn id(&self, db: &impl AsRef<LocationIntener<DefLoc, DefId>>) -> DefId { | ||
83 | db.as_ref().loc2id(&self) | ||
84 | } | ||
85 | } | ||
86 | |||
87 | pub enum Def { | ||
88 | Module(Module), | ||
89 | Item, | ||
90 | } | ||
91 | |||
92 | impl DefId { | ||
93 | pub fn resolve(self, db: &impl HirDatabase) -> Cancelable<Def> { | ||
94 | let loc = self.loc(db); | ||
95 | let res = match loc { | ||
96 | DefLoc::Module { id, source_root } => { | ||
97 | let descr = Module::new(db, source_root, id)?; | ||
98 | Def::Module(descr) | ||
99 | } | ||
100 | DefLoc::Item { .. } => Def::Item, | ||
101 | }; | ||
102 | Ok(res) | ||
103 | } | ||
104 | } | ||
105 | |||
106 | /// Identifier of item within a specific file. This is stable over reparses, so | ||
107 | /// it's OK to use it as a salsa key/value. | ||
108 | pub(crate) type SourceFileItemId = Id<SyntaxNode>; | ||
109 | |||
110 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
111 | pub struct SourceItemId { | ||
112 | file_id: FileId, | ||
113 | item_id: SourceFileItemId, | ||
114 | } | ||
115 | |||
116 | /// Maps item's `SyntaxNode`s to `SourceFileItemId` and back. | ||
117 | #[derive(Debug, PartialEq, Eq, Default)] | ||
118 | pub struct SourceFileItems { | ||
119 | arena: Arena<SyntaxNode>, | ||
120 | } | ||
121 | |||
122 | impl SourceFileItems { | ||
123 | fn alloc(&mut self, item: SyntaxNode) -> SourceFileItemId { | ||
124 | self.arena.alloc(item) | ||
125 | } | ||
126 | pub fn id_of(&self, item: SyntaxNodeRef) -> SourceFileItemId { | ||
127 | let (id, _item) = self | ||
128 | .arena | ||
129 | .iter() | ||
130 | .find(|(_id, i)| i.borrowed() == item) | ||
131 | .unwrap(); | ||
132 | id | ||
133 | } | ||
134 | } | ||
135 | |||
136 | impl Index<SourceFileItemId> for SourceFileItems { | ||
137 | type Output = SyntaxNode; | ||
138 | fn index(&self, idx: SourceFileItemId) -> &SyntaxNode { | ||
139 | &self.arena[idx] | ||
140 | } | ||
141 | } | ||
diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs new file mode 100644 index 000000000..8e256b89f --- /dev/null +++ b/crates/ra_hir/src/mock.rs | |||
@@ -0,0 +1,172 @@ | |||
1 | use std::sync::Arc; | ||
2 | |||
3 | use parking_lot::Mutex; | ||
4 | use salsa::{self, Database}; | ||
5 | use ra_db::{LocationIntener, BaseDatabase, FilePosition, mock::FileMap, FileId, WORKSPACE}; | ||
6 | use relative_path::RelativePathBuf; | ||
7 | use test_utils::{parse_fixture, CURSOR_MARKER, extract_offset}; | ||
8 | |||
9 | use crate::{db, DefId, DefLoc, FnId, SourceItemId}; | ||
10 | |||
11 | #[derive(Debug)] | ||
12 | pub(crate) struct MockDatabase { | ||
13 | events: Mutex<Option<Vec<salsa::Event<MockDatabase>>>>, | ||
14 | runtime: salsa::Runtime<MockDatabase>, | ||
15 | id_maps: Arc<IdMaps>, | ||
16 | } | ||
17 | |||
18 | impl MockDatabase { | ||
19 | pub(crate) fn with_position(fixture: &str) -> (MockDatabase, FilePosition) { | ||
20 | let mut db = MockDatabase::default(); | ||
21 | |||
22 | let mut position = None; | ||
23 | let mut file_map = FileMap::default(); | ||
24 | for entry in parse_fixture(fixture) { | ||
25 | if entry.text.contains(CURSOR_MARKER) { | ||
26 | assert!( | ||
27 | position.is_none(), | ||
28 | "only one marker (<|>) per fixture is allowed" | ||
29 | ); | ||
30 | position = Some(db.add_file_with_position(&mut file_map, &entry.meta, &entry.text)); | ||
31 | } else { | ||
32 | db.add_file(&mut file_map, &entry.meta, &entry.text); | ||
33 | } | ||
34 | } | ||
35 | let position = position.expect("expected a marker (<|>)"); | ||
36 | let source_root = file_map.into_source_root(); | ||
37 | db.query_mut(ra_db::SourceRootQuery) | ||
38 | .set(WORKSPACE, Arc::new(source_root)); | ||
39 | (db, position) | ||
40 | } | ||
41 | |||
42 | fn add_file(&mut self, file_map: &mut FileMap, path: &str, text: &str) -> FileId { | ||
43 | assert!(path.starts_with('/')); | ||
44 | let path = RelativePathBuf::from_path(&path[1..]).unwrap(); | ||
45 | |||
46 | let file_id = file_map.add(path); | ||
47 | let text = Arc::new(text.to_string()); | ||
48 | self.query_mut(ra_db::FileTextQuery).set(file_id, text); | ||
49 | self.query_mut(ra_db::FileSourceRootQuery) | ||
50 | .set(file_id, WORKSPACE); | ||
51 | file_id | ||
52 | } | ||
53 | |||
54 | fn add_file_with_position( | ||
55 | &mut self, | ||
56 | file_map: &mut FileMap, | ||
57 | path: &str, | ||
58 | text: &str, | ||
59 | ) -> FilePosition { | ||
60 | let (offset, text) = extract_offset(text); | ||
61 | let file_id = self.add_file(file_map, path, &text); | ||
62 | FilePosition { file_id, offset } | ||
63 | } | ||
64 | } | ||
65 | |||
66 | #[derive(Debug, Default)] | ||
67 | struct IdMaps { | ||
68 | fns: LocationIntener<SourceItemId, FnId>, | ||
69 | defs: LocationIntener<DefLoc, DefId>, | ||
70 | } | ||
71 | |||
72 | impl salsa::Database for MockDatabase { | ||
73 | fn salsa_runtime(&self) -> &salsa::Runtime<MockDatabase> { | ||
74 | &self.runtime | ||
75 | } | ||
76 | |||
77 | fn salsa_event(&self, event: impl Fn() -> salsa::Event<MockDatabase>) { | ||
78 | let mut events = self.events.lock(); | ||
79 | if let Some(events) = &mut *events { | ||
80 | events.push(event()); | ||
81 | } | ||
82 | } | ||
83 | } | ||
84 | |||
85 | impl Default for MockDatabase { | ||
86 | fn default() -> MockDatabase { | ||
87 | let mut db = MockDatabase { | ||
88 | events: Default::default(), | ||
89 | runtime: salsa::Runtime::default(), | ||
90 | id_maps: Default::default(), | ||
91 | }; | ||
92 | db.query_mut(ra_db::SourceRootQuery) | ||
93 | .set(ra_db::WORKSPACE, Default::default()); | ||
94 | db.query_mut(ra_db::CrateGraphQuery) | ||
95 | .set((), Default::default()); | ||
96 | db.query_mut(ra_db::LibrariesQuery) | ||
97 | .set((), Default::default()); | ||
98 | db | ||
99 | } | ||
100 | } | ||
101 | |||
102 | impl salsa::ParallelDatabase for MockDatabase { | ||
103 | fn snapshot(&self) -> salsa::Snapshot<MockDatabase> { | ||
104 | salsa::Snapshot::new(MockDatabase { | ||
105 | events: Default::default(), | ||
106 | runtime: self.runtime.snapshot(self), | ||
107 | id_maps: self.id_maps.clone(), | ||
108 | }) | ||
109 | } | ||
110 | } | ||
111 | |||
112 | impl BaseDatabase for MockDatabase {} | ||
113 | |||
114 | impl AsRef<LocationIntener<DefLoc, DefId>> for MockDatabase { | ||
115 | fn as_ref(&self) -> &LocationIntener<DefLoc, DefId> { | ||
116 | &self.id_maps.defs | ||
117 | } | ||
118 | } | ||
119 | |||
120 | impl AsRef<LocationIntener<SourceItemId, FnId>> for MockDatabase { | ||
121 | fn as_ref(&self) -> &LocationIntener<SourceItemId, FnId> { | ||
122 | &self.id_maps.fns | ||
123 | } | ||
124 | } | ||
125 | |||
126 | impl MockDatabase { | ||
127 | pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event<MockDatabase>> { | ||
128 | *self.events.lock() = Some(Vec::new()); | ||
129 | f(); | ||
130 | let events = self.events.lock().take().unwrap(); | ||
131 | events | ||
132 | } | ||
133 | |||
134 | pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> { | ||
135 | let events = self.log(f); | ||
136 | events | ||
137 | .into_iter() | ||
138 | .filter_map(|e| match e.kind { | ||
139 | // This pretty horrible, but `Debug` is the only way to inspect | ||
140 | // QueryDescriptor at the moment. | ||
141 | salsa::EventKind::WillExecute { descriptor } => Some(format!("{:?}", descriptor)), | ||
142 | _ => None, | ||
143 | }) | ||
144 | .collect() | ||
145 | } | ||
146 | } | ||
147 | |||
148 | salsa::database_storage! { | ||
149 | pub(crate) struct MockDatabaseStorage for MockDatabase { | ||
150 | impl ra_db::FilesDatabase { | ||
151 | fn file_text() for ra_db::FileTextQuery; | ||
152 | fn file_source_root() for ra_db::FileSourceRootQuery; | ||
153 | fn source_root() for ra_db::SourceRootQuery; | ||
154 | fn libraries() for ra_db::LibrariesQuery; | ||
155 | fn crate_graph() for ra_db::CrateGraphQuery; | ||
156 | } | ||
157 | impl ra_db::SyntaxDatabase { | ||
158 | fn source_file() for ra_db::SourceFileQuery; | ||
159 | fn file_lines() for ra_db::FileLinesQuery; | ||
160 | } | ||
161 | impl db::HirDatabase { | ||
162 | fn module_tree() for db::ModuleTreeQuery; | ||
163 | fn fn_scopes() for db::FnScopesQuery; | ||
164 | fn file_items() for db::SourceFileItemsQuery; | ||
165 | fn file_item() for db::FileItemQuery; | ||
166 | fn input_module_items() for db::InputModuleItemsQuery; | ||
167 | fn item_map() for db::ItemMapQuery; | ||
168 | fn fn_syntax() for db::FnSyntaxQuery; | ||
169 | fn submodules() for db::SubmodulesQuery; | ||
170 | } | ||
171 | } | ||
172 | } | ||
diff --git a/crates/ra_hir/src/module/imp.rs b/crates/ra_hir/src/module/imp.rs new file mode 100644 index 000000000..76ea129a7 --- /dev/null +++ b/crates/ra_hir/src/module/imp.rs | |||
@@ -0,0 +1,194 @@ | |||
1 | use std::sync::Arc; | ||
2 | |||
3 | use ra_syntax::{ | ||
4 | ast::{self, NameOwner}, | ||
5 | SmolStr, | ||
6 | }; | ||
7 | use relative_path::RelativePathBuf; | ||
8 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
9 | use ra_db::{SourceRoot, SourceRootId, FileResolverImp, Cancelable, FileId,}; | ||
10 | |||
11 | use crate::{ | ||
12 | HirDatabase, | ||
13 | }; | ||
14 | |||
15 | use super::{ | ||
16 | LinkData, LinkId, ModuleData, ModuleId, ModuleSource, | ||
17 | ModuleTree, Problem, | ||
18 | }; | ||
19 | |||
20 | #[derive(Clone, Hash, PartialEq, Eq, Debug)] | ||
21 | pub enum Submodule { | ||
22 | Declaration(SmolStr), | ||
23 | Definition(SmolStr, ModuleSource), | ||
24 | } | ||
25 | |||
26 | impl Submodule { | ||
27 | fn name(&self) -> &SmolStr { | ||
28 | match self { | ||
29 | Submodule::Declaration(name) => name, | ||
30 | Submodule::Definition(name, _) => name, | ||
31 | } | ||
32 | } | ||
33 | } | ||
34 | |||
35 | pub(crate) fn modules<'a>( | ||
36 | root: impl ast::ModuleItemOwner<'a>, | ||
37 | ) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> { | ||
38 | root.items() | ||
39 | .filter_map(|item| match item { | ||
40 | ast::ModuleItem::Module(m) => Some(m), | ||
41 | _ => None, | ||
42 | }) | ||
43 | .filter_map(|module| { | ||
44 | let name = module.name()?.text(); | ||
45 | Some((name, module)) | ||
46 | }) | ||
47 | } | ||
48 | |||
49 | pub(crate) fn module_tree( | ||
50 | db: &impl HirDatabase, | ||
51 | source_root: SourceRootId, | ||
52 | ) -> Cancelable<Arc<ModuleTree>> { | ||
53 | db.check_canceled()?; | ||
54 | let res = create_module_tree(db, source_root)?; | ||
55 | Ok(Arc::new(res)) | ||
56 | } | ||
57 | |||
58 | fn create_module_tree<'a>( | ||
59 | db: &impl HirDatabase, | ||
60 | source_root: SourceRootId, | ||
61 | ) -> Cancelable<ModuleTree> { | ||
62 | let mut tree = ModuleTree::default(); | ||
63 | |||
64 | let mut roots = FxHashMap::default(); | ||
65 | let mut visited = FxHashSet::default(); | ||
66 | |||
67 | let source_root = db.source_root(source_root); | ||
68 | for &file_id in source_root.files.iter() { | ||
69 | let source = ModuleSource::SourceFile(file_id); | ||
70 | if visited.contains(&source) { | ||
71 | continue; // TODO: use explicit crate_roots here | ||
72 | } | ||
73 | assert!(!roots.contains_key(&file_id)); | ||
74 | let module_id = build_subtree( | ||
75 | db, | ||
76 | &source_root, | ||
77 | &mut tree, | ||
78 | &mut visited, | ||
79 | &mut roots, | ||
80 | None, | ||
81 | source, | ||
82 | )?; | ||
83 | roots.insert(file_id, module_id); | ||
84 | } | ||
85 | Ok(tree) | ||
86 | } | ||
87 | |||
88 | fn build_subtree( | ||
89 | db: &impl HirDatabase, | ||
90 | source_root: &SourceRoot, | ||
91 | tree: &mut ModuleTree, | ||
92 | visited: &mut FxHashSet<ModuleSource>, | ||
93 | roots: &mut FxHashMap<FileId, ModuleId>, | ||
94 | parent: Option<LinkId>, | ||
95 | source: ModuleSource, | ||
96 | ) -> Cancelable<ModuleId> { | ||
97 | visited.insert(source); | ||
98 | let id = tree.push_mod(ModuleData { | ||
99 | source, | ||
100 | parent, | ||
101 | children: Vec::new(), | ||
102 | }); | ||
103 | for sub in db.submodules(source)?.iter() { | ||
104 | let link = tree.push_link(LinkData { | ||
105 | name: sub.name().clone(), | ||
106 | owner: id, | ||
107 | points_to: Vec::new(), | ||
108 | problem: None, | ||
109 | }); | ||
110 | |||
111 | let (points_to, problem) = match sub { | ||
112 | Submodule::Declaration(name) => { | ||
113 | let (points_to, problem) = | ||
114 | resolve_submodule(source, &name, &source_root.file_resolver); | ||
115 | let points_to = points_to | ||
116 | .into_iter() | ||
117 | .map(|file_id| match roots.remove(&file_id) { | ||
118 | Some(module_id) => { | ||
119 | tree.mods[module_id].parent = Some(link); | ||
120 | Ok(module_id) | ||
121 | } | ||
122 | None => build_subtree( | ||
123 | db, | ||
124 | source_root, | ||
125 | tree, | ||
126 | visited, | ||
127 | roots, | ||
128 | Some(link), | ||
129 | ModuleSource::SourceFile(file_id), | ||
130 | ), | ||
131 | }) | ||
132 | .collect::<Cancelable<Vec<_>>>()?; | ||
133 | (points_to, problem) | ||
134 | } | ||
135 | Submodule::Definition(_name, submodule_source) => { | ||
136 | let points_to = build_subtree( | ||
137 | db, | ||
138 | source_root, | ||
139 | tree, | ||
140 | visited, | ||
141 | roots, | ||
142 | Some(link), | ||
143 | *submodule_source, | ||
144 | )?; | ||
145 | (vec![points_to], None) | ||
146 | } | ||
147 | }; | ||
148 | |||
149 | tree.links[link].points_to = points_to; | ||
150 | tree.links[link].problem = problem; | ||
151 | } | ||
152 | Ok(id) | ||
153 | } | ||
154 | |||
155 | fn resolve_submodule( | ||
156 | source: ModuleSource, | ||
157 | name: &SmolStr, | ||
158 | file_resolver: &FileResolverImp, | ||
159 | ) -> (Vec<FileId>, Option<Problem>) { | ||
160 | let file_id = match source { | ||
161 | ModuleSource::SourceFile(it) => it, | ||
162 | ModuleSource::Module(..) => { | ||
163 | // TODO | ||
164 | return (Vec::new(), None); | ||
165 | } | ||
166 | }; | ||
167 | let mod_name = file_resolver.file_stem(file_id); | ||
168 | let is_dir_owner = mod_name == "mod" || mod_name == "lib" || mod_name == "main"; | ||
169 | |||
170 | let file_mod = RelativePathBuf::from(format!("../{}.rs", name)); | ||
171 | let dir_mod = RelativePathBuf::from(format!("../{}/mod.rs", name)); | ||
172 | let points_to: Vec<FileId>; | ||
173 | let problem: Option<Problem>; | ||
174 | if is_dir_owner { | ||
175 | points_to = [&file_mod, &dir_mod] | ||
176 | .iter() | ||
177 | .filter_map(|path| file_resolver.resolve(file_id, path)) | ||
178 | .collect(); | ||
179 | problem = if points_to.is_empty() { | ||
180 | Some(Problem::UnresolvedModule { | ||
181 | candidate: file_mod, | ||
182 | }) | ||
183 | } else { | ||
184 | None | ||
185 | } | ||
186 | } else { | ||
187 | points_to = Vec::new(); | ||
188 | problem = Some(Problem::NotDirOwner { | ||
189 | move_to: RelativePathBuf::from(format!("../{}/mod.rs", mod_name)), | ||
190 | candidate: file_mod, | ||
191 | }); | ||
192 | } | ||
193 | (points_to, problem) | ||
194 | } | ||
diff --git a/crates/ra_hir/src/module/mod.rs b/crates/ra_hir/src/module/mod.rs new file mode 100644 index 000000000..3ae83d8cb --- /dev/null +++ b/crates/ra_hir/src/module/mod.rs | |||
@@ -0,0 +1,373 @@ | |||
1 | pub(super) mod imp; | ||
2 | pub(super) mod nameres; | ||
3 | |||
4 | use std::sync::Arc; | ||
5 | |||
6 | use ra_editor::find_node_at_offset; | ||
7 | |||
8 | use ra_syntax::{ | ||
9 | algo::generate, | ||
10 | ast::{self, AstNode, NameOwner}, | ||
11 | SmolStr, SyntaxNode, | ||
12 | }; | ||
13 | use ra_db::{SourceRootId, FileId, FilePosition, Cancelable}; | ||
14 | use relative_path::RelativePathBuf; | ||
15 | |||
16 | use crate::{ | ||
17 | DefLoc, DefId, Path, PathKind, HirDatabase, SourceItemId, | ||
18 | arena::{Arena, Id}, | ||
19 | }; | ||
20 | |||
21 | pub use self::nameres::ModuleScope; | ||
22 | |||
23 | /// `Module` is API entry point to get all the information | ||
24 | /// about a particular module. | ||
25 | #[derive(Debug, Clone)] | ||
26 | pub struct Module { | ||
27 | tree: Arc<ModuleTree>, | ||
28 | source_root_id: SourceRootId, | ||
29 | module_id: ModuleId, | ||
30 | } | ||
31 | |||
32 | impl Module { | ||
33 | /// Lookup `Module` by `FileId`. Note that this is inherently | ||
34 | /// lossy transformation: in general, a single source might correspond to | ||
35 | /// several modules. | ||
36 | pub fn guess_from_file_id( | ||
37 | db: &impl HirDatabase, | ||
38 | file_id: FileId, | ||
39 | ) -> Cancelable<Option<Module>> { | ||
40 | Module::guess_from_source(db, file_id, ModuleSource::SourceFile(file_id)) | ||
41 | } | ||
42 | |||
43 | /// Lookup `Module` by position in the source code. Note that this | ||
44 | /// is inherently lossy transformation: in general, a single source might | ||
45 | /// correspond to several modules. | ||
46 | pub fn guess_from_position( | ||
47 | db: &impl HirDatabase, | ||
48 | position: FilePosition, | ||
49 | ) -> Cancelable<Option<Module>> { | ||
50 | let file = db.source_file(position.file_id); | ||
51 | let module_source = match find_node_at_offset::<ast::Module>(file.syntax(), position.offset) | ||
52 | { | ||
53 | Some(m) if !m.has_semi() => ModuleSource::new_inline(db, position.file_id, m), | ||
54 | _ => ModuleSource::SourceFile(position.file_id), | ||
55 | }; | ||
56 | Module::guess_from_source(db, position.file_id, module_source) | ||
57 | } | ||
58 | |||
59 | fn guess_from_source( | ||
60 | db: &impl HirDatabase, | ||
61 | file_id: FileId, | ||
62 | module_source: ModuleSource, | ||
63 | ) -> Cancelable<Option<Module>> { | ||
64 | let source_root_id = db.file_source_root(file_id); | ||
65 | let module_tree = db.module_tree(source_root_id)?; | ||
66 | |||
67 | let res = match module_tree.any_module_for_source(module_source) { | ||
68 | None => None, | ||
69 | Some(module_id) => Some(Module { | ||
70 | tree: module_tree, | ||
71 | source_root_id, | ||
72 | module_id, | ||
73 | }), | ||
74 | }; | ||
75 | Ok(res) | ||
76 | } | ||
77 | |||
78 | pub(super) fn new( | ||
79 | db: &impl HirDatabase, | ||
80 | source_root_id: SourceRootId, | ||
81 | module_id: ModuleId, | ||
82 | ) -> Cancelable<Module> { | ||
83 | let module_tree = db.module_tree(source_root_id)?; | ||
84 | let res = Module { | ||
85 | tree: module_tree, | ||
86 | source_root_id, | ||
87 | module_id, | ||
88 | }; | ||
89 | Ok(res) | ||
90 | } | ||
91 | |||
92 | /// Returns `mod foo;` or `mod foo {}` node whihc declared this module. | ||
93 | /// Returns `None` for the root module | ||
94 | pub fn parent_link_source(&self, db: &impl HirDatabase) -> Option<(FileId, ast::ModuleNode)> { | ||
95 | let link = self.module_id.parent_link(&self.tree)?; | ||
96 | let file_id = link.owner(&self.tree).source(&self.tree).file_id(); | ||
97 | let src = link.bind_source(&self.tree, db); | ||
98 | Some((file_id, src)) | ||
99 | } | ||
100 | |||
101 | pub fn source(&self) -> ModuleSource { | ||
102 | self.module_id.source(&self.tree) | ||
103 | } | ||
104 | |||
105 | /// Parent module. Returns `None` if this is a root module. | ||
106 | pub fn parent(&self) -> Option<Module> { | ||
107 | let parent_id = self.module_id.parent(&self.tree)?; | ||
108 | Some(Module { | ||
109 | module_id: parent_id, | ||
110 | ..self.clone() | ||
111 | }) | ||
112 | } | ||
113 | |||
114 | /// The root of the tree this module is part of | ||
115 | pub fn crate_root(&self) -> Module { | ||
116 | let root_id = self.module_id.crate_root(&self.tree); | ||
117 | Module { | ||
118 | module_id: root_id, | ||
119 | ..self.clone() | ||
120 | } | ||
121 | } | ||
122 | |||
123 | /// `name` is `None` for the crate's root module | ||
124 | pub fn name(&self) -> Option<SmolStr> { | ||
125 | let link = self.module_id.parent_link(&self.tree)?; | ||
126 | Some(link.name(&self.tree)) | ||
127 | } | ||
128 | |||
129 | pub fn def_id(&self, db: &impl HirDatabase) -> DefId { | ||
130 | let def_loc = DefLoc::Module { | ||
131 | id: self.module_id, | ||
132 | source_root: self.source_root_id, | ||
133 | }; | ||
134 | def_loc.id(db) | ||
135 | } | ||
136 | |||
137 | /// Finds a child module with the specified name. | ||
138 | pub fn child(&self, name: &str) -> Option<Module> { | ||
139 | let child_id = self.module_id.child(&self.tree, name)?; | ||
140 | Some(Module { | ||
141 | module_id: child_id, | ||
142 | ..self.clone() | ||
143 | }) | ||
144 | } | ||
145 | |||
146 | /// Returns a `ModuleScope`: a set of items, visible in this module. | ||
147 | pub fn scope(&self, db: &impl HirDatabase) -> Cancelable<ModuleScope> { | ||
148 | let item_map = db.item_map(self.source_root_id)?; | ||
149 | let res = item_map.per_module[&self.module_id].clone(); | ||
150 | Ok(res) | ||
151 | } | ||
152 | |||
153 | pub fn resolve_path(&self, db: &impl HirDatabase, path: Path) -> Cancelable<Option<DefId>> { | ||
154 | let mut curr = match path.kind { | ||
155 | PathKind::Crate => self.crate_root(), | ||
156 | PathKind::Self_ | PathKind::Plain => self.clone(), | ||
157 | PathKind::Super => ctry!(self.parent()), | ||
158 | } | ||
159 | .def_id(db); | ||
160 | |||
161 | let segments = path.segments; | ||
162 | for name in segments.iter() { | ||
163 | let module = match curr.loc(db) { | ||
164 | DefLoc::Module { id, source_root } => Module::new(db, source_root, id)?, | ||
165 | _ => return Ok(None), | ||
166 | }; | ||
167 | let scope = module.scope(db)?; | ||
168 | curr = ctry!(ctry!(scope.get(&name)).def_id); | ||
169 | } | ||
170 | Ok(Some(curr)) | ||
171 | } | ||
172 | |||
173 | pub fn problems(&self, db: &impl HirDatabase) -> Vec<(SyntaxNode, Problem)> { | ||
174 | self.module_id.problems(&self.tree, db) | ||
175 | } | ||
176 | } | ||
177 | |||
178 | /// Phisically, rust source is organized as a set of files, but logically it is | ||
179 | /// organized as a tree of modules. Usually, a single file corresponds to a | ||
180 | /// single module, but it is not nessary the case. | ||
181 | /// | ||
182 | /// Module encapsulate the logic of transitioning from the fuzzy world of files | ||
183 | /// (which can have multiple parents) to the precise world of modules (which | ||
184 | /// always have one parent). | ||
185 | #[derive(Default, Debug, PartialEq, Eq)] | ||
186 | pub struct ModuleTree { | ||
187 | mods: Arena<ModuleData>, | ||
188 | links: Arena<LinkData>, | ||
189 | } | ||
190 | |||
191 | impl ModuleTree { | ||
192 | pub(crate) fn modules<'a>(&'a self) -> impl Iterator<Item = ModuleId> + 'a { | ||
193 | self.mods.iter().map(|(id, _)| id) | ||
194 | } | ||
195 | |||
196 | fn modules_for_source(&self, source: ModuleSource) -> Vec<ModuleId> { | ||
197 | self.mods | ||
198 | .iter() | ||
199 | .filter(|(_idx, it)| it.source == source) | ||
200 | .map(|(idx, _)| idx) | ||
201 | .collect() | ||
202 | } | ||
203 | |||
204 | fn any_module_for_source(&self, source: ModuleSource) -> Option<ModuleId> { | ||
205 | self.modules_for_source(source).pop() | ||
206 | } | ||
207 | } | ||
208 | |||
209 | /// `ModuleSource` is the syntax tree element that produced this module: | ||
210 | /// either a file, or an inlinde module. | ||
211 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] | ||
212 | pub enum ModuleSource { | ||
213 | SourceFile(FileId), | ||
214 | Module(SourceItemId), | ||
215 | } | ||
216 | |||
217 | /// An owned syntax node for a module. Unlike `ModuleSource`, | ||
218 | /// this holds onto the AST for the whole file. | ||
219 | pub(crate) enum ModuleSourceNode { | ||
220 | SourceFile(ast::SourceFileNode), | ||
221 | Module(ast::ModuleNode), | ||
222 | } | ||
223 | |||
224 | pub type ModuleId = Id<ModuleData>; | ||
225 | type LinkId = Id<LinkData>; | ||
226 | |||
227 | #[derive(Clone, Debug, Hash, PartialEq, Eq)] | ||
228 | pub enum Problem { | ||
229 | UnresolvedModule { | ||
230 | candidate: RelativePathBuf, | ||
231 | }, | ||
232 | NotDirOwner { | ||
233 | move_to: RelativePathBuf, | ||
234 | candidate: RelativePathBuf, | ||
235 | }, | ||
236 | } | ||
237 | |||
238 | impl ModuleId { | ||
239 | pub(crate) fn source(self, tree: &ModuleTree) -> ModuleSource { | ||
240 | tree.mods[self].source | ||
241 | } | ||
242 | fn parent_link(self, tree: &ModuleTree) -> Option<LinkId> { | ||
243 | tree.mods[self].parent | ||
244 | } | ||
245 | fn parent(self, tree: &ModuleTree) -> Option<ModuleId> { | ||
246 | let link = self.parent_link(tree)?; | ||
247 | Some(tree.links[link].owner) | ||
248 | } | ||
249 | fn crate_root(self, tree: &ModuleTree) -> ModuleId { | ||
250 | generate(Some(self), move |it| it.parent(tree)) | ||
251 | .last() | ||
252 | .unwrap() | ||
253 | } | ||
254 | fn child(self, tree: &ModuleTree, name: &str) -> Option<ModuleId> { | ||
255 | let link = tree.mods[self] | ||
256 | .children | ||
257 | .iter() | ||
258 | .map(|&it| &tree.links[it]) | ||
259 | .find(|it| it.name == name)?; | ||
260 | Some(*link.points_to.first()?) | ||
261 | } | ||
262 | fn children<'a>(self, tree: &'a ModuleTree) -> impl Iterator<Item = (SmolStr, ModuleId)> + 'a { | ||
263 | tree.mods[self].children.iter().filter_map(move |&it| { | ||
264 | let link = &tree.links[it]; | ||
265 | let module = *link.points_to.first()?; | ||
266 | Some((link.name.clone(), module)) | ||
267 | }) | ||
268 | } | ||
269 | fn problems(self, tree: &ModuleTree, db: &impl HirDatabase) -> Vec<(SyntaxNode, Problem)> { | ||
270 | tree.mods[self] | ||
271 | .children | ||
272 | .iter() | ||
273 | .filter_map(|&it| { | ||
274 | let p = tree.links[it].problem.clone()?; | ||
275 | let s = it.bind_source(tree, db); | ||
276 | let s = s.borrowed().name().unwrap().syntax().owned(); | ||
277 | Some((s, p)) | ||
278 | }) | ||
279 | .collect() | ||
280 | } | ||
281 | } | ||
282 | |||
283 | impl LinkId { | ||
284 | fn owner(self, tree: &ModuleTree) -> ModuleId { | ||
285 | tree.links[self].owner | ||
286 | } | ||
287 | fn name(self, tree: &ModuleTree) -> SmolStr { | ||
288 | tree.links[self].name.clone() | ||
289 | } | ||
290 | fn bind_source<'a>(self, tree: &ModuleTree, db: &impl HirDatabase) -> ast::ModuleNode { | ||
291 | let owner = self.owner(tree); | ||
292 | match owner.source(tree).resolve(db) { | ||
293 | ModuleSourceNode::SourceFile(root) => { | ||
294 | let ast = imp::modules(root.borrowed()) | ||
295 | .find(|(name, _)| name == &tree.links[self].name) | ||
296 | .unwrap() | ||
297 | .1; | ||
298 | ast.owned() | ||
299 | } | ||
300 | ModuleSourceNode::Module(it) => it, | ||
301 | } | ||
302 | } | ||
303 | } | ||
304 | |||
305 | #[derive(Debug, PartialEq, Eq, Hash)] | ||
306 | pub struct ModuleData { | ||
307 | source: ModuleSource, | ||
308 | parent: Option<LinkId>, | ||
309 | children: Vec<LinkId>, | ||
310 | } | ||
311 | |||
312 | impl ModuleSource { | ||
313 | pub(crate) fn new_inline( | ||
314 | db: &impl HirDatabase, | ||
315 | file_id: FileId, | ||
316 | module: ast::Module, | ||
317 | ) -> ModuleSource { | ||
318 | assert!(!module.has_semi()); | ||
319 | let items = db.file_items(file_id); | ||
320 | let item_id = items.id_of(module.syntax()); | ||
321 | let id = SourceItemId { file_id, item_id }; | ||
322 | ModuleSource::Module(id) | ||
323 | } | ||
324 | |||
325 | pub fn as_file(self) -> Option<FileId> { | ||
326 | match self { | ||
327 | ModuleSource::SourceFile(f) => Some(f), | ||
328 | ModuleSource::Module(..) => None, | ||
329 | } | ||
330 | } | ||
331 | |||
332 | pub fn file_id(self) -> FileId { | ||
333 | match self { | ||
334 | ModuleSource::SourceFile(f) => f, | ||
335 | ModuleSource::Module(source_item_id) => source_item_id.file_id, | ||
336 | } | ||
337 | } | ||
338 | |||
339 | pub(crate) fn resolve(self, db: &impl HirDatabase) -> ModuleSourceNode { | ||
340 | match self { | ||
341 | ModuleSource::SourceFile(file_id) => { | ||
342 | let syntax = db.source_file(file_id); | ||
343 | ModuleSourceNode::SourceFile(syntax.ast().owned()) | ||
344 | } | ||
345 | ModuleSource::Module(item_id) => { | ||
346 | let syntax = db.file_item(item_id); | ||
347 | let syntax = syntax.borrowed(); | ||
348 | let module = ast::Module::cast(syntax).unwrap(); | ||
349 | ModuleSourceNode::Module(module.owned()) | ||
350 | } | ||
351 | } | ||
352 | } | ||
353 | } | ||
354 | |||
355 | #[derive(Hash, Debug, PartialEq, Eq)] | ||
356 | struct LinkData { | ||
357 | owner: ModuleId, | ||
358 | name: SmolStr, | ||
359 | points_to: Vec<ModuleId>, | ||
360 | problem: Option<Problem>, | ||
361 | } | ||
362 | |||
363 | impl ModuleTree { | ||
364 | fn push_mod(&mut self, data: ModuleData) -> ModuleId { | ||
365 | self.mods.alloc(data) | ||
366 | } | ||
367 | fn push_link(&mut self, data: LinkData) -> LinkId { | ||
368 | let owner = data.owner; | ||
369 | let id = self.links.alloc(data); | ||
370 | self.mods[owner].children.push(id); | ||
371 | id | ||
372 | } | ||
373 | } | ||
diff --git a/crates/ra_hir/src/module/nameres.rs b/crates/ra_hir/src/module/nameres.rs new file mode 100644 index 000000000..8529e16b3 --- /dev/null +++ b/crates/ra_hir/src/module/nameres.rs | |||
@@ -0,0 +1,434 @@ | |||
1 | //! Name resolution algorithm. The end result of the algorithm is `ItemMap`: a | ||
2 | //! map with maps each module to it's scope: the set of items, visible in the | ||
3 | //! module. That is, we only resolve imports here, name resolution of item | ||
4 | //! bodies will be done in a separate step. | ||
5 | //! | ||
6 | //! Like Rustc, we use an interative per-crate algorithm: we start with scopes | ||
7 | //! containing only directly defined items, and then iteratively resolve | ||
8 | //! imports. | ||
9 | //! | ||
10 | //! To make this work nicely in the IDE scenarios, we place `InputModuleItems` | ||
11 | //! in between raw syntax and name resolution. `InputModuleItems` are computed | ||
12 | //! using only the module's syntax, and it is all directly defined items plus | ||
13 | //! imports. The plain is to make `InputModuleItems` independent of local | ||
14 | //! modifications (that is, typing inside a function shold not change IMIs), | ||
15 | //! such that the results of name resolution can be preserved unless the module | ||
16 | //! structure itself is modified. | ||
17 | use std::{ | ||
18 | sync::Arc, | ||
19 | }; | ||
20 | |||
21 | use rustc_hash::FxHashMap; | ||
22 | use ra_syntax::{ | ||
23 | TextRange, | ||
24 | SmolStr, SyntaxKind::{self, *}, | ||
25 | ast::{self, AstNode} | ||
26 | }; | ||
27 | use ra_db::SourceRootId; | ||
28 | |||
29 | use crate::{ | ||
30 | Cancelable, FileId, | ||
31 | DefId, DefLoc, | ||
32 | SourceItemId, SourceFileItemId, SourceFileItems, | ||
33 | Path, PathKind, | ||
34 | HirDatabase, | ||
35 | module::{ModuleId, ModuleTree}, | ||
36 | }; | ||
37 | |||
38 | /// Item map is the result of the name resolution. Item map contains, for each | ||
39 | /// module, the set of visible items. | ||
40 | #[derive(Default, Debug, PartialEq, Eq)] | ||
41 | pub struct ItemMap { | ||
42 | pub per_module: FxHashMap<ModuleId, ModuleScope>, | ||
43 | } | ||
44 | |||
45 | #[derive(Debug, Default, PartialEq, Eq, Clone)] | ||
46 | pub struct ModuleScope { | ||
47 | items: FxHashMap<SmolStr, Resolution>, | ||
48 | } | ||
49 | |||
50 | impl ModuleScope { | ||
51 | pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a SmolStr, &Resolution)> + 'a { | ||
52 | self.items.iter() | ||
53 | } | ||
54 | pub fn get(&self, name: &SmolStr) -> Option<&Resolution> { | ||
55 | self.items.get(name) | ||
56 | } | ||
57 | } | ||
58 | |||
59 | /// A set of items and imports declared inside a module, without relation to | ||
60 | /// other modules. | ||
61 | /// | ||
62 | /// This stands in-between raw syntax and name resolution and alow us to avoid | ||
63 | /// recomputing name res: if `InputModuleItems` are the same, we can avoid | ||
64 | /// running name resolution. | ||
65 | #[derive(Debug, Default, PartialEq, Eq)] | ||
66 | pub struct InputModuleItems { | ||
67 | items: Vec<ModuleItem>, | ||
68 | imports: Vec<Import>, | ||
69 | } | ||
70 | |||
71 | #[derive(Debug, PartialEq, Eq)] | ||
72 | struct ModuleItem { | ||
73 | id: SourceFileItemId, | ||
74 | name: SmolStr, | ||
75 | kind: SyntaxKind, | ||
76 | vis: Vis, | ||
77 | } | ||
78 | |||
79 | #[derive(Debug, PartialEq, Eq)] | ||
80 | enum Vis { | ||
81 | // Priv, | ||
82 | Other, | ||
83 | } | ||
84 | |||
85 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
86 | struct Import { | ||
87 | path: Path, | ||
88 | kind: ImportKind, | ||
89 | } | ||
90 | |||
91 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] | ||
92 | pub struct NamedImport { | ||
93 | pub file_item_id: SourceFileItemId, | ||
94 | pub relative_range: TextRange, | ||
95 | } | ||
96 | |||
97 | impl NamedImport { | ||
98 | pub fn range(&self, db: &impl HirDatabase, file_id: FileId) -> TextRange { | ||
99 | let source_item_id = SourceItemId { | ||
100 | file_id, | ||
101 | item_id: self.file_item_id, | ||
102 | }; | ||
103 | let syntax = db.file_item(source_item_id); | ||
104 | let offset = syntax.borrowed().range().start(); | ||
105 | self.relative_range + offset | ||
106 | } | ||
107 | } | ||
108 | |||
109 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
110 | enum ImportKind { | ||
111 | Glob, | ||
112 | Named(NamedImport), | ||
113 | } | ||
114 | |||
115 | /// Resolution is basically `DefId` atm, but it should account for stuff like | ||
116 | /// multiple namespaces, ambiguity and errors. | ||
117 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
118 | pub struct Resolution { | ||
119 | /// None for unresolved | ||
120 | pub def_id: Option<DefId>, | ||
121 | /// ident by whitch this is imported into local scope. | ||
122 | pub import: Option<NamedImport>, | ||
123 | } | ||
124 | |||
125 | // #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] | ||
126 | // enum Namespace { | ||
127 | // Types, | ||
128 | // Values, | ||
129 | // } | ||
130 | |||
131 | // #[derive(Debug)] | ||
132 | // struct PerNs<T> { | ||
133 | // types: Option<T>, | ||
134 | // values: Option<T>, | ||
135 | // } | ||
136 | |||
137 | impl InputModuleItems { | ||
138 | pub(crate) fn new<'a>( | ||
139 | file_items: &SourceFileItems, | ||
140 | items: impl Iterator<Item = ast::ModuleItem<'a>>, | ||
141 | ) -> InputModuleItems { | ||
142 | let mut res = InputModuleItems::default(); | ||
143 | for item in items { | ||
144 | res.add_item(file_items, item); | ||
145 | } | ||
146 | res | ||
147 | } | ||
148 | |||
149 | fn add_item(&mut self, file_items: &SourceFileItems, item: ast::ModuleItem) -> Option<()> { | ||
150 | match item { | ||
151 | ast::ModuleItem::StructDef(it) => self.items.push(ModuleItem::new(file_items, it)?), | ||
152 | ast::ModuleItem::EnumDef(it) => self.items.push(ModuleItem::new(file_items, it)?), | ||
153 | ast::ModuleItem::FnDef(it) => self.items.push(ModuleItem::new(file_items, it)?), | ||
154 | ast::ModuleItem::TraitDef(it) => self.items.push(ModuleItem::new(file_items, it)?), | ||
155 | ast::ModuleItem::TypeDef(it) => self.items.push(ModuleItem::new(file_items, it)?), | ||
156 | ast::ModuleItem::ImplItem(_) => { | ||
157 | // impls don't define items | ||
158 | } | ||
159 | ast::ModuleItem::UseItem(it) => self.add_use_item(file_items, it), | ||
160 | ast::ModuleItem::ExternCrateItem(_) => { | ||
161 | // TODO | ||
162 | } | ||
163 | ast::ModuleItem::ConstDef(it) => self.items.push(ModuleItem::new(file_items, it)?), | ||
164 | ast::ModuleItem::StaticDef(it) => self.items.push(ModuleItem::new(file_items, it)?), | ||
165 | ast::ModuleItem::Module(it) => self.items.push(ModuleItem::new(file_items, it)?), | ||
166 | } | ||
167 | Some(()) | ||
168 | } | ||
169 | |||
170 | fn add_use_item(&mut self, file_items: &SourceFileItems, item: ast::UseItem) { | ||
171 | let file_item_id = file_items.id_of(item.syntax()); | ||
172 | let start_offset = item.syntax().range().start(); | ||
173 | Path::expand_use_item(item, |path, range| { | ||
174 | let kind = match range { | ||
175 | None => ImportKind::Glob, | ||
176 | Some(range) => ImportKind::Named(NamedImport { | ||
177 | file_item_id, | ||
178 | relative_range: range - start_offset, | ||
179 | }), | ||
180 | }; | ||
181 | self.imports.push(Import { kind, path }) | ||
182 | }) | ||
183 | } | ||
184 | } | ||
185 | |||
186 | impl ModuleItem { | ||
187 | fn new<'a>(file_items: &SourceFileItems, item: impl ast::NameOwner<'a>) -> Option<ModuleItem> { | ||
188 | let name = item.name()?.text(); | ||
189 | let kind = item.syntax().kind(); | ||
190 | let vis = Vis::Other; | ||
191 | let id = file_items.id_of(item.syntax()); | ||
192 | let res = ModuleItem { | ||
193 | id, | ||
194 | name, | ||
195 | kind, | ||
196 | vis, | ||
197 | }; | ||
198 | Some(res) | ||
199 | } | ||
200 | } | ||
201 | |||
202 | pub(crate) struct Resolver<'a, DB> { | ||
203 | pub(crate) db: &'a DB, | ||
204 | pub(crate) input: &'a FxHashMap<ModuleId, Arc<InputModuleItems>>, | ||
205 | pub(crate) source_root: SourceRootId, | ||
206 | pub(crate) module_tree: Arc<ModuleTree>, | ||
207 | pub(crate) result: ItemMap, | ||
208 | } | ||
209 | |||
210 | impl<'a, DB> Resolver<'a, DB> | ||
211 | where | ||
212 | DB: HirDatabase, | ||
213 | { | ||
214 | pub(crate) fn resolve(mut self) -> Cancelable<ItemMap> { | ||
215 | for (&module_id, items) in self.input.iter() { | ||
216 | self.populate_module(module_id, items) | ||
217 | } | ||
218 | |||
219 | for &module_id in self.input.keys() { | ||
220 | self.db.check_canceled()?; | ||
221 | self.resolve_imports(module_id); | ||
222 | } | ||
223 | Ok(self.result) | ||
224 | } | ||
225 | |||
226 | fn populate_module(&mut self, module_id: ModuleId, input: &InputModuleItems) { | ||
227 | let file_id = module_id.source(&self.module_tree).file_id(); | ||
228 | |||
229 | let mut module_items = ModuleScope::default(); | ||
230 | |||
231 | for import in input.imports.iter() { | ||
232 | if let Some(name) = import.path.segments.iter().last() { | ||
233 | if let ImportKind::Named(import) = import.kind { | ||
234 | module_items.items.insert( | ||
235 | name.clone(), | ||
236 | Resolution { | ||
237 | def_id: None, | ||
238 | import: Some(import), | ||
239 | }, | ||
240 | ); | ||
241 | } | ||
242 | } | ||
243 | } | ||
244 | |||
245 | for item in input.items.iter() { | ||
246 | if item.kind == MODULE { | ||
247 | // handle submodules separatelly | ||
248 | continue; | ||
249 | } | ||
250 | let def_loc = DefLoc::Item { | ||
251 | source_item_id: SourceItemId { | ||
252 | file_id, | ||
253 | item_id: item.id, | ||
254 | }, | ||
255 | }; | ||
256 | let def_id = def_loc.id(self.db); | ||
257 | let resolution = Resolution { | ||
258 | def_id: Some(def_id), | ||
259 | import: None, | ||
260 | }; | ||
261 | module_items.items.insert(item.name.clone(), resolution); | ||
262 | } | ||
263 | |||
264 | for (name, mod_id) in module_id.children(&self.module_tree) { | ||
265 | let def_loc = DefLoc::Module { | ||
266 | id: mod_id, | ||
267 | source_root: self.source_root, | ||
268 | }; | ||
269 | let def_id = def_loc.id(self.db); | ||
270 | let resolution = Resolution { | ||
271 | def_id: Some(def_id), | ||
272 | import: None, | ||
273 | }; | ||
274 | module_items.items.insert(name, resolution); | ||
275 | } | ||
276 | |||
277 | self.result.per_module.insert(module_id, module_items); | ||
278 | } | ||
279 | |||
280 | fn resolve_imports(&mut self, module_id: ModuleId) { | ||
281 | for import in self.input[&module_id].imports.iter() { | ||
282 | self.resolve_import(module_id, import); | ||
283 | } | ||
284 | } | ||
285 | |||
286 | fn resolve_import(&mut self, module_id: ModuleId, import: &Import) { | ||
287 | let ptr = match import.kind { | ||
288 | ImportKind::Glob => return, | ||
289 | ImportKind::Named(ptr) => ptr, | ||
290 | }; | ||
291 | |||
292 | let mut curr = match import.path.kind { | ||
293 | // TODO: handle extern crates | ||
294 | PathKind::Plain => return, | ||
295 | PathKind::Self_ => module_id, | ||
296 | PathKind::Super => { | ||
297 | match module_id.parent(&self.module_tree) { | ||
298 | Some(it) => it, | ||
299 | // TODO: error | ||
300 | None => return, | ||
301 | } | ||
302 | } | ||
303 | PathKind::Crate => module_id.crate_root(&self.module_tree), | ||
304 | }; | ||
305 | |||
306 | for (i, name) in import.path.segments.iter().enumerate() { | ||
307 | let is_last = i == import.path.segments.len() - 1; | ||
308 | |||
309 | let def_id = match self.result.per_module[&curr].items.get(name) { | ||
310 | None => return, | ||
311 | Some(res) => match res.def_id { | ||
312 | Some(it) => it, | ||
313 | None => return, | ||
314 | }, | ||
315 | }; | ||
316 | |||
317 | if !is_last { | ||
318 | curr = match def_id.loc(self.db) { | ||
319 | DefLoc::Module { id, .. } => id, | ||
320 | _ => return, | ||
321 | } | ||
322 | } else { | ||
323 | self.update(module_id, |items| { | ||
324 | let res = Resolution { | ||
325 | def_id: Some(def_id), | ||
326 | import: Some(ptr), | ||
327 | }; | ||
328 | items.items.insert(name.clone(), res); | ||
329 | }) | ||
330 | } | ||
331 | } | ||
332 | } | ||
333 | |||
334 | fn update(&mut self, module_id: ModuleId, f: impl FnOnce(&mut ModuleScope)) { | ||
335 | let module_items = self.result.per_module.get_mut(&module_id).unwrap(); | ||
336 | f(module_items) | ||
337 | } | ||
338 | } | ||
339 | |||
340 | #[cfg(test)] | ||
341 | mod tests { | ||
342 | use std::sync::Arc; | ||
343 | |||
344 | use salsa::Database; | ||
345 | use ra_db::FilesDatabase; | ||
346 | use ra_syntax::SmolStr; | ||
347 | |||
348 | use crate::{ | ||
349 | self as hir, | ||
350 | db::HirDatabase, | ||
351 | mock::MockDatabase, | ||
352 | }; | ||
353 | |||
354 | fn item_map(fixture: &str) -> (Arc<hir::ItemMap>, hir::ModuleId) { | ||
355 | let (db, pos) = MockDatabase::with_position(fixture); | ||
356 | let source_root = db.file_source_root(pos.file_id); | ||
357 | let module = hir::Module::guess_from_position(&db, pos).unwrap().unwrap(); | ||
358 | let module_id = module.module_id; | ||
359 | (db.item_map(source_root).unwrap(), module_id) | ||
360 | } | ||
361 | |||
362 | #[test] | ||
363 | fn test_item_map() { | ||
364 | let (item_map, module_id) = item_map( | ||
365 | " | ||
366 | //- /lib.rs | ||
367 | mod foo; | ||
368 | |||
369 | use crate::foo::bar::Baz; | ||
370 | <|> | ||
371 | |||
372 | //- /foo/mod.rs | ||
373 | pub mod bar; | ||
374 | |||
375 | //- /foo/bar.rs | ||
376 | pub struct Baz; | ||
377 | ", | ||
378 | ); | ||
379 | let name = SmolStr::from("Baz"); | ||
380 | let resolution = &item_map.per_module[&module_id].items[&name]; | ||
381 | assert!(resolution.def_id.is_some()); | ||
382 | } | ||
383 | |||
384 | #[test] | ||
385 | fn typing_inside_a_function_should_not_invalidate_item_map() { | ||
386 | let (mut db, pos) = MockDatabase::with_position( | ||
387 | " | ||
388 | //- /lib.rs | ||
389 | mod foo;<|> | ||
390 | |||
391 | use crate::foo::bar::Baz; | ||
392 | |||
393 | fn foo() -> i32 { | ||
394 | 1 + 1 | ||
395 | } | ||
396 | //- /foo/mod.rs | ||
397 | pub mod bar; | ||
398 | |||
399 | //- /foo/bar.rs | ||
400 | pub struct Baz; | ||
401 | ", | ||
402 | ); | ||
403 | let source_root = db.file_source_root(pos.file_id); | ||
404 | { | ||
405 | let events = db.log_executed(|| { | ||
406 | db.item_map(source_root).unwrap(); | ||
407 | }); | ||
408 | assert!(format!("{:?}", events).contains("item_map")) | ||
409 | } | ||
410 | |||
411 | let new_text = " | ||
412 | mod foo; | ||
413 | |||
414 | use crate::foo::bar::Baz; | ||
415 | |||
416 | fn foo() -> i32 { 92 } | ||
417 | " | ||
418 | .to_string(); | ||
419 | |||
420 | db.query_mut(ra_db::FileTextQuery) | ||
421 | .set(pos.file_id, Arc::new(new_text)); | ||
422 | |||
423 | { | ||
424 | let events = db.log_executed(|| { | ||
425 | db.item_map(source_root).unwrap(); | ||
426 | }); | ||
427 | assert!( | ||
428 | !format!("{:?}", events).contains("_item_map"), | ||
429 | "{:#?}", | ||
430 | events | ||
431 | ) | ||
432 | } | ||
433 | } | ||
434 | } | ||
diff --git a/crates/ra_hir/src/path.rs b/crates/ra_hir/src/path.rs new file mode 100644 index 000000000..4a2e427cd --- /dev/null +++ b/crates/ra_hir/src/path.rs | |||
@@ -0,0 +1,148 @@ | |||
1 | use ra_syntax::{SmolStr, ast, AstNode, TextRange}; | ||
2 | |||
3 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
4 | pub struct Path { | ||
5 | pub kind: PathKind, | ||
6 | pub segments: Vec<SmolStr>, | ||
7 | } | ||
8 | |||
9 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] | ||
10 | pub enum PathKind { | ||
11 | Plain, | ||
12 | Self_, | ||
13 | Super, | ||
14 | Crate, | ||
15 | } | ||
16 | |||
17 | impl Path { | ||
18 | /// Calls `cb` with all paths, represented by this use item. | ||
19 | pub fn expand_use_item(item: ast::UseItem, mut cb: impl FnMut(Path, Option<TextRange>)) { | ||
20 | if let Some(tree) = item.use_tree() { | ||
21 | expand_use_tree(None, tree, &mut cb); | ||
22 | } | ||
23 | } | ||
24 | |||
25 | /// Converts an `ast::Path` to `Path`. Works with use trees. | ||
26 | pub fn from_ast(mut path: ast::Path) -> Option<Path> { | ||
27 | let mut kind = PathKind::Plain; | ||
28 | let mut segments = Vec::new(); | ||
29 | loop { | ||
30 | let segment = path.segment()?; | ||
31 | match segment.kind()? { | ||
32 | ast::PathSegmentKind::Name(name) => segments.push(name.text()), | ||
33 | ast::PathSegmentKind::CrateKw => { | ||
34 | kind = PathKind::Crate; | ||
35 | break; | ||
36 | } | ||
37 | ast::PathSegmentKind::SelfKw => { | ||
38 | kind = PathKind::Self_; | ||
39 | break; | ||
40 | } | ||
41 | ast::PathSegmentKind::SuperKw => { | ||
42 | kind = PathKind::Super; | ||
43 | break; | ||
44 | } | ||
45 | } | ||
46 | path = match qualifier(path) { | ||
47 | Some(it) => it, | ||
48 | None => break, | ||
49 | }; | ||
50 | } | ||
51 | segments.reverse(); | ||
52 | return Some(Path { kind, segments }); | ||
53 | |||
54 | fn qualifier(path: ast::Path) -> Option<ast::Path> { | ||
55 | if let Some(q) = path.qualifier() { | ||
56 | return Some(q); | ||
57 | } | ||
58 | // TODO: this bottom up traversal is not too precise. | ||
59 | // Should we handle do a top-down analysiss, recording results? | ||
60 | let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?; | ||
61 | let use_tree = use_tree_list.parent_use_tree(); | ||
62 | use_tree.path() | ||
63 | } | ||
64 | } | ||
65 | |||
66 | /// `true` is this path is a single identifier, like `foo` | ||
67 | pub fn is_ident(&self) -> bool { | ||
68 | self.kind == PathKind::Plain && self.segments.len() == 1 | ||
69 | } | ||
70 | } | ||
71 | |||
72 | fn expand_use_tree( | ||
73 | prefix: Option<Path>, | ||
74 | tree: ast::UseTree, | ||
75 | cb: &mut impl FnMut(Path, Option<TextRange>), | ||
76 | ) { | ||
77 | if let Some(use_tree_list) = tree.use_tree_list() { | ||
78 | let prefix = match tree.path() { | ||
79 | None => prefix, | ||
80 | Some(path) => match convert_path(prefix, path) { | ||
81 | Some(it) => Some(it), | ||
82 | None => return, // TODO: report errors somewhere | ||
83 | }, | ||
84 | }; | ||
85 | for tree in use_tree_list.use_trees() { | ||
86 | expand_use_tree(prefix.clone(), tree, cb); | ||
87 | } | ||
88 | } else { | ||
89 | if let Some(ast_path) = tree.path() { | ||
90 | if let Some(path) = convert_path(prefix, ast_path) { | ||
91 | let range = if tree.has_star() { | ||
92 | None | ||
93 | } else { | ||
94 | let range = ast_path.segment().unwrap().syntax().range(); | ||
95 | Some(range) | ||
96 | }; | ||
97 | cb(path, range) | ||
98 | } | ||
99 | } | ||
100 | } | ||
101 | } | ||
102 | |||
103 | fn convert_path(prefix: Option<Path>, path: ast::Path) -> Option<Path> { | ||
104 | let prefix = if let Some(qual) = path.qualifier() { | ||
105 | Some(convert_path(prefix, qual)?) | ||
106 | } else { | ||
107 | None | ||
108 | }; | ||
109 | let segment = path.segment()?; | ||
110 | let res = match segment.kind()? { | ||
111 | ast::PathSegmentKind::Name(name) => { | ||
112 | let mut res = prefix.unwrap_or_else(|| Path { | ||
113 | kind: PathKind::Plain, | ||
114 | segments: Vec::with_capacity(1), | ||
115 | }); | ||
116 | res.segments.push(name.text()); | ||
117 | res | ||
118 | } | ||
119 | ast::PathSegmentKind::CrateKw => { | ||
120 | if prefix.is_some() { | ||
121 | return None; | ||
122 | } | ||
123 | Path { | ||
124 | kind: PathKind::Crate, | ||
125 | segments: Vec::new(), | ||
126 | } | ||
127 | } | ||
128 | ast::PathSegmentKind::SelfKw => { | ||
129 | if prefix.is_some() { | ||
130 | return None; | ||
131 | } | ||
132 | Path { | ||
133 | kind: PathKind::Self_, | ||
134 | segments: Vec::new(), | ||
135 | } | ||
136 | } | ||
137 | ast::PathSegmentKind::SuperKw => { | ||
138 | if prefix.is_some() { | ||
139 | return None; | ||
140 | } | ||
141 | Path { | ||
142 | kind: PathKind::Super, | ||
143 | segments: Vec::new(), | ||
144 | } | ||
145 | } | ||
146 | }; | ||
147 | Some(res) | ||
148 | } | ||
diff --git a/crates/ra_hir/src/query_definitions.rs b/crates/ra_hir/src/query_definitions.rs new file mode 100644 index 000000000..6f602878c --- /dev/null +++ b/crates/ra_hir/src/query_definitions.rs | |||
@@ -0,0 +1,154 @@ | |||
1 | use std::{ | ||
2 | sync::Arc, | ||
3 | time::Instant, | ||
4 | }; | ||
5 | |||
6 | use rustc_hash::FxHashMap; | ||
7 | use ra_syntax::{ | ||
8 | AstNode, SyntaxNode, SmolStr, | ||
9 | ast::{self, FnDef, FnDefNode, NameOwner, ModuleItemOwner} | ||
10 | }; | ||
11 | use ra_db::{SourceRootId, FileId, Cancelable,}; | ||
12 | |||
13 | use crate::{ | ||
14 | FnId, | ||
15 | SourceFileItems, SourceItemId, | ||
16 | db::HirDatabase, | ||
17 | function::FnScopes, | ||
18 | module::{ | ||
19 | ModuleSource, ModuleSourceNode, ModuleId, | ||
20 | imp::Submodule, | ||
21 | nameres::{InputModuleItems, ItemMap, Resolver}, | ||
22 | }, | ||
23 | }; | ||
24 | |||
25 | /// Resolve `FnId` to the corresponding `SyntaxNode` | ||
26 | pub(super) fn fn_syntax(db: &impl HirDatabase, fn_id: FnId) -> FnDefNode { | ||
27 | let item_id = fn_id.loc(db); | ||
28 | let syntax = db.file_item(item_id); | ||
29 | FnDef::cast(syntax.borrowed()).unwrap().owned() | ||
30 | } | ||
31 | |||
32 | pub(super) fn fn_scopes(db: &impl HirDatabase, fn_id: FnId) -> Arc<FnScopes> { | ||
33 | let syntax = db.fn_syntax(fn_id); | ||
34 | let res = FnScopes::new(syntax.borrowed()); | ||
35 | Arc::new(res) | ||
36 | } | ||
37 | |||
38 | pub(super) fn file_items(db: &impl HirDatabase, file_id: FileId) -> Arc<SourceFileItems> { | ||
39 | let source_file = db.source_file(file_id); | ||
40 | let source_file = source_file.borrowed(); | ||
41 | let mut res = SourceFileItems::default(); | ||
42 | source_file | ||
43 | .syntax() | ||
44 | .descendants() | ||
45 | .filter_map(ast::ModuleItem::cast) | ||
46 | .map(|it| it.syntax().owned()) | ||
47 | .for_each(|it| { | ||
48 | res.alloc(it); | ||
49 | }); | ||
50 | Arc::new(res) | ||
51 | } | ||
52 | |||
53 | pub(super) fn file_item(db: &impl HirDatabase, source_item_id: SourceItemId) -> SyntaxNode { | ||
54 | db.file_items(source_item_id.file_id)[source_item_id.item_id].clone() | ||
55 | } | ||
56 | |||
57 | pub(crate) fn submodules( | ||
58 | db: &impl HirDatabase, | ||
59 | source: ModuleSource, | ||
60 | ) -> Cancelable<Arc<Vec<Submodule>>> { | ||
61 | db.check_canceled()?; | ||
62 | let file_id = source.file_id(); | ||
63 | let submodules = match source.resolve(db) { | ||
64 | ModuleSourceNode::SourceFile(it) => collect_submodules(db, file_id, it.borrowed()), | ||
65 | ModuleSourceNode::Module(it) => it | ||
66 | .borrowed() | ||
67 | .item_list() | ||
68 | .map(|it| collect_submodules(db, file_id, it)) | ||
69 | .unwrap_or_else(Vec::new), | ||
70 | }; | ||
71 | return Ok(Arc::new(submodules)); | ||
72 | |||
73 | fn collect_submodules<'a>( | ||
74 | db: &impl HirDatabase, | ||
75 | file_id: FileId, | ||
76 | root: impl ast::ModuleItemOwner<'a>, | ||
77 | ) -> Vec<Submodule> { | ||
78 | modules(root) | ||
79 | .map(|(name, m)| { | ||
80 | if m.has_semi() { | ||
81 | Submodule::Declaration(name) | ||
82 | } else { | ||
83 | let src = ModuleSource::new_inline(db, file_id, m); | ||
84 | Submodule::Definition(name, src) | ||
85 | } | ||
86 | }) | ||
87 | .collect() | ||
88 | } | ||
89 | } | ||
90 | |||
91 | pub(crate) fn modules<'a>( | ||
92 | root: impl ast::ModuleItemOwner<'a>, | ||
93 | ) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> { | ||
94 | root.items() | ||
95 | .filter_map(|item| match item { | ||
96 | ast::ModuleItem::Module(m) => Some(m), | ||
97 | _ => None, | ||
98 | }) | ||
99 | .filter_map(|module| { | ||
100 | let name = module.name()?.text(); | ||
101 | Some((name, module)) | ||
102 | }) | ||
103 | } | ||
104 | |||
105 | pub(super) fn input_module_items( | ||
106 | db: &impl HirDatabase, | ||
107 | source_root: SourceRootId, | ||
108 | module_id: ModuleId, | ||
109 | ) -> Cancelable<Arc<InputModuleItems>> { | ||
110 | let module_tree = db.module_tree(source_root)?; | ||
111 | let source = module_id.source(&module_tree); | ||
112 | let file_items = db.file_items(source.file_id()); | ||
113 | let res = match source.resolve(db) { | ||
114 | ModuleSourceNode::SourceFile(it) => { | ||
115 | let items = it.borrowed().items(); | ||
116 | InputModuleItems::new(&file_items, items) | ||
117 | } | ||
118 | ModuleSourceNode::Module(it) => { | ||
119 | let items = it | ||
120 | .borrowed() | ||
121 | .item_list() | ||
122 | .into_iter() | ||
123 | .flat_map(|it| it.items()); | ||
124 | InputModuleItems::new(&file_items, items) | ||
125 | } | ||
126 | }; | ||
127 | Ok(Arc::new(res)) | ||
128 | } | ||
129 | |||
130 | pub(super) fn item_map( | ||
131 | db: &impl HirDatabase, | ||
132 | source_root: SourceRootId, | ||
133 | ) -> Cancelable<Arc<ItemMap>> { | ||
134 | let start = Instant::now(); | ||
135 | let module_tree = db.module_tree(source_root)?; | ||
136 | let input = module_tree | ||
137 | .modules() | ||
138 | .map(|id| { | ||
139 | let items = db.input_module_items(source_root, id)?; | ||
140 | Ok((id, items)) | ||
141 | }) | ||
142 | .collect::<Cancelable<FxHashMap<_, _>>>()?; | ||
143 | let resolver = Resolver { | ||
144 | db: db, | ||
145 | input: &input, | ||
146 | source_root, | ||
147 | module_tree, | ||
148 | result: ItemMap::default(), | ||
149 | }; | ||
150 | let res = resolver.resolve()?; | ||
151 | let elapsed = start.elapsed(); | ||
152 | log::info!("item_map: {:?}", elapsed); | ||
153 | Ok(Arc::new(res)) | ||
154 | } | ||