diff options
author | Dmitry <[email protected]> | 2020-08-14 19:32:05 +0100 |
---|---|---|
committer | Dmitry <[email protected]> | 2020-08-14 19:32:05 +0100 |
commit | 178c3e135a2a249692f7784712492e7884ae0c00 (patch) | |
tree | ac6b769dbf7162150caa0c1624786a4dd79ff3be /crates/ide_db/src | |
parent | 06ff8e6c760ff05f10e868b5d1f9d79e42fbb49c (diff) | |
parent | c2594daf2974dbd4ce3d9b7ec72481764abaceb5 (diff) |
Merge remote-tracking branch 'origin/master'
Diffstat (limited to 'crates/ide_db/src')
-rw-r--r-- | crates/ide_db/src/change.rs | 318 | ||||
-rw-r--r-- | crates/ide_db/src/defs.rs | 348 | ||||
-rw-r--r-- | crates/ide_db/src/imports_locator.rs | 64 | ||||
-rw-r--r-- | crates/ide_db/src/lib.rs | 139 | ||||
-rw-r--r-- | crates/ide_db/src/line_index.rs | 281 | ||||
-rw-r--r-- | crates/ide_db/src/search.rs | 322 | ||||
-rw-r--r-- | crates/ide_db/src/source_change.rs | 59 | ||||
-rw-r--r-- | crates/ide_db/src/symbol_index.rs | 429 | ||||
-rw-r--r-- | crates/ide_db/src/wasm_shims.rs | 19 |
9 files changed, 1979 insertions, 0 deletions
diff --git a/crates/ide_db/src/change.rs b/crates/ide_db/src/change.rs new file mode 100644 index 000000000..8b4fd7ab8 --- /dev/null +++ b/crates/ide_db/src/change.rs | |||
@@ -0,0 +1,318 @@ | |||
1 | //! Defines a unit of change that can applied to a state of IDE to get the next | ||
2 | //! state. Changes are transactional. | ||
3 | |||
4 | use std::{fmt, sync::Arc, time}; | ||
5 | |||
6 | use base_db::{ | ||
7 | salsa::{Database, Durability, SweepStrategy}, | ||
8 | CrateGraph, FileId, SourceDatabase, SourceDatabaseExt, SourceRoot, SourceRootId, | ||
9 | }; | ||
10 | use profile::{memory_usage, Bytes}; | ||
11 | use rustc_hash::FxHashSet; | ||
12 | |||
13 | use crate::{symbol_index::SymbolsDatabase, RootDatabase}; | ||
14 | |||
15 | #[derive(Default)] | ||
16 | pub struct AnalysisChange { | ||
17 | roots: Option<Vec<SourceRoot>>, | ||
18 | files_changed: Vec<(FileId, Option<Arc<String>>)>, | ||
19 | crate_graph: Option<CrateGraph>, | ||
20 | } | ||
21 | |||
22 | impl fmt::Debug for AnalysisChange { | ||
23 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
24 | let mut d = fmt.debug_struct("AnalysisChange"); | ||
25 | if let Some(roots) = &self.roots { | ||
26 | d.field("roots", roots); | ||
27 | } | ||
28 | if !self.files_changed.is_empty() { | ||
29 | d.field("files_changed", &self.files_changed.len()); | ||
30 | } | ||
31 | if self.crate_graph.is_some() { | ||
32 | d.field("crate_graph", &self.crate_graph); | ||
33 | } | ||
34 | d.finish() | ||
35 | } | ||
36 | } | ||
37 | |||
38 | impl AnalysisChange { | ||
39 | pub fn new() -> AnalysisChange { | ||
40 | AnalysisChange::default() | ||
41 | } | ||
42 | |||
43 | pub fn set_roots(&mut self, roots: Vec<SourceRoot>) { | ||
44 | self.roots = Some(roots); | ||
45 | } | ||
46 | |||
47 | pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<String>>) { | ||
48 | self.files_changed.push((file_id, new_text)) | ||
49 | } | ||
50 | |||
51 | pub fn set_crate_graph(&mut self, graph: CrateGraph) { | ||
52 | self.crate_graph = Some(graph); | ||
53 | } | ||
54 | } | ||
55 | |||
56 | #[derive(Debug)] | ||
57 | struct AddFile { | ||
58 | file_id: FileId, | ||
59 | path: String, | ||
60 | text: Arc<String>, | ||
61 | } | ||
62 | |||
63 | #[derive(Debug)] | ||
64 | struct RemoveFile { | ||
65 | file_id: FileId, | ||
66 | path: String, | ||
67 | } | ||
68 | |||
69 | #[derive(Default)] | ||
70 | struct RootChange { | ||
71 | added: Vec<AddFile>, | ||
72 | removed: Vec<RemoveFile>, | ||
73 | } | ||
74 | |||
75 | impl fmt::Debug for RootChange { | ||
76 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
77 | fmt.debug_struct("AnalysisChange") | ||
78 | .field("added", &self.added.len()) | ||
79 | .field("removed", &self.removed.len()) | ||
80 | .finish() | ||
81 | } | ||
82 | } | ||
83 | |||
84 | const GC_COOLDOWN: time::Duration = time::Duration::from_millis(100); | ||
85 | |||
86 | impl RootDatabase { | ||
87 | pub fn request_cancellation(&mut self) { | ||
88 | let _p = profile::span("RootDatabase::request_cancellation"); | ||
89 | self.salsa_runtime_mut().synthetic_write(Durability::LOW); | ||
90 | } | ||
91 | |||
92 | pub fn apply_change(&mut self, change: AnalysisChange) { | ||
93 | let _p = profile::span("RootDatabase::apply_change"); | ||
94 | self.request_cancellation(); | ||
95 | log::info!("apply_change {:?}", change); | ||
96 | if let Some(roots) = change.roots { | ||
97 | let mut local_roots = FxHashSet::default(); | ||
98 | let mut library_roots = FxHashSet::default(); | ||
99 | for (idx, root) in roots.into_iter().enumerate() { | ||
100 | let root_id = SourceRootId(idx as u32); | ||
101 | let durability = durability(&root); | ||
102 | if root.is_library { | ||
103 | library_roots.insert(root_id); | ||
104 | } else { | ||
105 | local_roots.insert(root_id); | ||
106 | } | ||
107 | for file_id in root.iter() { | ||
108 | self.set_file_source_root_with_durability(file_id, root_id, durability); | ||
109 | } | ||
110 | self.set_source_root_with_durability(root_id, Arc::new(root), durability); | ||
111 | } | ||
112 | self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); | ||
113 | self.set_library_roots_with_durability(Arc::new(library_roots), Durability::HIGH); | ||
114 | } | ||
115 | |||
116 | for (file_id, text) in change.files_changed { | ||
117 | let source_root_id = self.file_source_root(file_id); | ||
118 | let source_root = self.source_root(source_root_id); | ||
119 | let durability = durability(&source_root); | ||
120 | // XXX: can't actually remove the file, just reset the text | ||
121 | let text = text.unwrap_or_default(); | ||
122 | self.set_file_text_with_durability(file_id, text, durability) | ||
123 | } | ||
124 | if let Some(crate_graph) = change.crate_graph { | ||
125 | self.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH) | ||
126 | } | ||
127 | } | ||
128 | |||
129 | pub fn maybe_collect_garbage(&mut self) { | ||
130 | if cfg!(feature = "wasm") { | ||
131 | return; | ||
132 | } | ||
133 | |||
134 | if self.last_gc_check.elapsed() > GC_COOLDOWN { | ||
135 | self.last_gc_check = crate::wasm_shims::Instant::now(); | ||
136 | } | ||
137 | } | ||
138 | |||
139 | pub fn collect_garbage(&mut self) { | ||
140 | if cfg!(feature = "wasm") { | ||
141 | return; | ||
142 | } | ||
143 | |||
144 | let _p = profile::span("RootDatabase::collect_garbage"); | ||
145 | self.last_gc = crate::wasm_shims::Instant::now(); | ||
146 | |||
147 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); | ||
148 | |||
149 | base_db::ParseQuery.in_db(self).sweep(sweep); | ||
150 | hir::db::ParseMacroQuery.in_db(self).sweep(sweep); | ||
151 | |||
152 | // Macros do take significant space, but less then the syntax trees | ||
153 | // self.query(hir::db::MacroDefQuery).sweep(sweep); | ||
154 | // self.query(hir::db::MacroArgTextQuery).sweep(sweep); | ||
155 | // self.query(hir::db::MacroExpandQuery).sweep(sweep); | ||
156 | |||
157 | hir::db::AstIdMapQuery.in_db(self).sweep(sweep); | ||
158 | |||
159 | hir::db::BodyWithSourceMapQuery.in_db(self).sweep(sweep); | ||
160 | |||
161 | hir::db::ExprScopesQuery.in_db(self).sweep(sweep); | ||
162 | hir::db::InferQueryQuery.in_db(self).sweep(sweep); | ||
163 | hir::db::BodyQuery.in_db(self).sweep(sweep); | ||
164 | } | ||
165 | |||
166 | // Feature: Memory Usage | ||
167 | // | ||
168 | // Clears rust-analyzer's internal database and prints memory usage statistics. | ||
169 | // | ||
170 | // |=== | ||
171 | // | Editor | Action Name | ||
172 | // | ||
173 | // | VS Code | **Rust Analyzer: Memory Usage (Clears Database)** | ||
174 | // |=== | ||
175 | pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { | ||
176 | let mut acc: Vec<(String, Bytes)> = vec![]; | ||
177 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); | ||
178 | macro_rules! sweep_each_query { | ||
179 | ($($q:path)*) => {$( | ||
180 | let before = memory_usage().allocated; | ||
181 | $q.in_db(self).sweep(sweep); | ||
182 | let after = memory_usage().allocated; | ||
183 | let q: $q = Default::default(); | ||
184 | let name = format!("{:?}", q); | ||
185 | acc.push((name, before - after)); | ||
186 | |||
187 | let before = memory_usage().allocated; | ||
188 | $q.in_db(self).sweep(sweep.discard_everything()); | ||
189 | let after = memory_usage().allocated; | ||
190 | let q: $q = Default::default(); | ||
191 | let name = format!("{:?} (deps)", q); | ||
192 | acc.push((name, before - after)); | ||
193 | |||
194 | let before = memory_usage().allocated; | ||
195 | $q.in_db(self).purge(); | ||
196 | let after = memory_usage().allocated; | ||
197 | let q: $q = Default::default(); | ||
198 | let name = format!("{:?} (purge)", q); | ||
199 | acc.push((name, before - after)); | ||
200 | )*} | ||
201 | } | ||
202 | sweep_each_query![ | ||
203 | // SourceDatabase | ||
204 | base_db::ParseQuery | ||
205 | base_db::CrateGraphQuery | ||
206 | |||
207 | // SourceDatabaseExt | ||
208 | base_db::FileTextQuery | ||
209 | base_db::FileSourceRootQuery | ||
210 | base_db::SourceRootQuery | ||
211 | base_db::SourceRootCratesQuery | ||
212 | |||
213 | // AstDatabase | ||
214 | hir::db::AstIdMapQuery | ||
215 | hir::db::MacroArgTextQuery | ||
216 | hir::db::MacroDefQuery | ||
217 | hir::db::ParseMacroQuery | ||
218 | hir::db::MacroExpandQuery | ||
219 | |||
220 | // DefDatabase | ||
221 | hir::db::ItemTreeQuery | ||
222 | hir::db::CrateDefMapQueryQuery | ||
223 | hir::db::StructDataQuery | ||
224 | hir::db::UnionDataQuery | ||
225 | hir::db::EnumDataQuery | ||
226 | hir::db::ImplDataQuery | ||
227 | hir::db::TraitDataQuery | ||
228 | hir::db::TypeAliasDataQuery | ||
229 | hir::db::FunctionDataQuery | ||
230 | hir::db::ConstDataQuery | ||
231 | hir::db::StaticDataQuery | ||
232 | hir::db::BodyWithSourceMapQuery | ||
233 | hir::db::BodyQuery | ||
234 | hir::db::ExprScopesQuery | ||
235 | hir::db::GenericParamsQuery | ||
236 | hir::db::AttrsQuery | ||
237 | hir::db::ModuleLangItemsQuery | ||
238 | hir::db::CrateLangItemsQuery | ||
239 | hir::db::LangItemQuery | ||
240 | hir::db::DocumentationQuery | ||
241 | hir::db::ImportMapQuery | ||
242 | |||
243 | // HirDatabase | ||
244 | hir::db::InferQueryQuery | ||
245 | hir::db::TyQuery | ||
246 | hir::db::ValueTyQuery | ||
247 | hir::db::ImplSelfTyQuery | ||
248 | hir::db::ImplTraitQuery | ||
249 | hir::db::FieldTypesQuery | ||
250 | hir::db::CallableItemSignatureQuery | ||
251 | hir::db::GenericPredicatesForParamQuery | ||
252 | hir::db::GenericPredicatesQuery | ||
253 | hir::db::GenericDefaultsQuery | ||
254 | hir::db::InherentImplsInCrateQuery | ||
255 | hir::db::TraitImplsInCrateQuery | ||
256 | hir::db::TraitImplsInDepsQuery | ||
257 | hir::db::AssociatedTyDataQuery | ||
258 | hir::db::AssociatedTyDataQuery | ||
259 | hir::db::TraitDatumQuery | ||
260 | hir::db::StructDatumQuery | ||
261 | hir::db::ImplDatumQuery | ||
262 | hir::db::FnDefDatumQuery | ||
263 | hir::db::ReturnTypeImplTraitsQuery | ||
264 | hir::db::InternCallableDefQuery | ||
265 | hir::db::InternTypeParamIdQuery | ||
266 | hir::db::InternImplTraitIdQuery | ||
267 | hir::db::InternClosureQuery | ||
268 | hir::db::AssociatedTyValueQuery | ||
269 | hir::db::TraitSolveQuery | ||
270 | |||
271 | // SymbolsDatabase | ||
272 | crate::symbol_index::FileSymbolsQuery | ||
273 | crate::symbol_index::LibrarySymbolsQuery | ||
274 | crate::symbol_index::LocalRootsQuery | ||
275 | crate::symbol_index::LibraryRootsQuery | ||
276 | |||
277 | // LineIndexDatabase | ||
278 | crate::LineIndexQuery | ||
279 | ]; | ||
280 | |||
281 | // To collect interned data, we need to bump the revision counter by performing a synthetic | ||
282 | // write. | ||
283 | // We do this after collecting the non-interned queries to correctly attribute memory used | ||
284 | // by interned data. | ||
285 | self.salsa_runtime_mut().synthetic_write(Durability::HIGH); | ||
286 | |||
287 | sweep_each_query![ | ||
288 | // AstDatabase | ||
289 | hir::db::InternMacroQuery | ||
290 | hir::db::InternEagerExpansionQuery | ||
291 | |||
292 | // InternDatabase | ||
293 | hir::db::InternFunctionQuery | ||
294 | hir::db::InternStructQuery | ||
295 | hir::db::InternUnionQuery | ||
296 | hir::db::InternEnumQuery | ||
297 | hir::db::InternConstQuery | ||
298 | hir::db::InternStaticQuery | ||
299 | hir::db::InternTraitQuery | ||
300 | hir::db::InternTypeAliasQuery | ||
301 | hir::db::InternImplQuery | ||
302 | |||
303 | // HirDatabase | ||
304 | hir::db::InternTypeParamIdQuery | ||
305 | ]; | ||
306 | |||
307 | acc.sort_by_key(|it| std::cmp::Reverse(it.1)); | ||
308 | acc | ||
309 | } | ||
310 | } | ||
311 | |||
312 | fn durability(source_root: &SourceRoot) -> Durability { | ||
313 | if source_root.is_library { | ||
314 | Durability::HIGH | ||
315 | } else { | ||
316 | Durability::LOW | ||
317 | } | ||
318 | } | ||
diff --git a/crates/ide_db/src/defs.rs b/crates/ide_db/src/defs.rs new file mode 100644 index 000000000..0d0affc27 --- /dev/null +++ b/crates/ide_db/src/defs.rs | |||
@@ -0,0 +1,348 @@ | |||
1 | //! `NameDefinition` keeps information about the element we want to search references for. | ||
2 | //! The element is represented by `NameKind`. It's located inside some `container` and | ||
3 | //! has a `visibility`, which defines a search scope. | ||
4 | //! Note that the reference search is possible for not all of the classified items. | ||
5 | |||
6 | // FIXME: this badly needs rename/rewrite (matklad, 2020-02-06). | ||
7 | |||
8 | use hir::{ | ||
9 | db::HirDatabase, Crate, Field, HasVisibility, ImplDef, Local, MacroDef, Module, ModuleDef, | ||
10 | Name, PathResolution, Semantics, TypeParam, Visibility, | ||
11 | }; | ||
12 | use syntax::{ | ||
13 | ast::{self, AstNode}, | ||
14 | match_ast, SyntaxNode, | ||
15 | }; | ||
16 | |||
17 | use crate::RootDatabase; | ||
18 | |||
19 | // FIXME: a more precise name would probably be `Symbol`? | ||
20 | #[derive(Debug, PartialEq, Eq, Copy, Clone)] | ||
21 | pub enum Definition { | ||
22 | Macro(MacroDef), | ||
23 | Field(Field), | ||
24 | ModuleDef(ModuleDef), | ||
25 | SelfType(ImplDef), | ||
26 | Local(Local), | ||
27 | TypeParam(TypeParam), | ||
28 | } | ||
29 | |||
30 | impl Definition { | ||
31 | pub fn module(&self, db: &RootDatabase) -> Option<Module> { | ||
32 | match self { | ||
33 | Definition::Macro(it) => it.module(db), | ||
34 | Definition::Field(it) => Some(it.parent_def(db).module(db)), | ||
35 | Definition::ModuleDef(it) => it.module(db), | ||
36 | Definition::SelfType(it) => Some(it.module(db)), | ||
37 | Definition::Local(it) => Some(it.module(db)), | ||
38 | Definition::TypeParam(it) => Some(it.module(db)), | ||
39 | } | ||
40 | } | ||
41 | |||
42 | pub fn visibility(&self, db: &RootDatabase) -> Option<Visibility> { | ||
43 | match self { | ||
44 | Definition::Macro(_) => None, | ||
45 | Definition::Field(sf) => Some(sf.visibility(db)), | ||
46 | Definition::ModuleDef(def) => def.definition_visibility(db), | ||
47 | Definition::SelfType(_) => None, | ||
48 | Definition::Local(_) => None, | ||
49 | Definition::TypeParam(_) => None, | ||
50 | } | ||
51 | } | ||
52 | |||
53 | pub fn name(&self, db: &RootDatabase) -> Option<Name> { | ||
54 | let name = match self { | ||
55 | Definition::Macro(it) => it.name(db)?, | ||
56 | Definition::Field(it) => it.name(db), | ||
57 | Definition::ModuleDef(def) => match def { | ||
58 | hir::ModuleDef::Module(it) => it.name(db)?, | ||
59 | hir::ModuleDef::Function(it) => it.name(db), | ||
60 | hir::ModuleDef::Adt(def) => match def { | ||
61 | hir::Adt::Struct(it) => it.name(db), | ||
62 | hir::Adt::Union(it) => it.name(db), | ||
63 | hir::Adt::Enum(it) => it.name(db), | ||
64 | }, | ||
65 | hir::ModuleDef::EnumVariant(it) => it.name(db), | ||
66 | hir::ModuleDef::Const(it) => it.name(db)?, | ||
67 | hir::ModuleDef::Static(it) => it.name(db)?, | ||
68 | hir::ModuleDef::Trait(it) => it.name(db), | ||
69 | hir::ModuleDef::TypeAlias(it) => it.name(db), | ||
70 | hir::ModuleDef::BuiltinType(_) => return None, | ||
71 | }, | ||
72 | Definition::SelfType(_) => return None, | ||
73 | Definition::Local(it) => it.name(db)?, | ||
74 | Definition::TypeParam(it) => it.name(db), | ||
75 | }; | ||
76 | Some(name) | ||
77 | } | ||
78 | } | ||
79 | |||
80 | #[derive(Debug)] | ||
81 | pub enum NameClass { | ||
82 | ExternCrate(Crate), | ||
83 | Definition(Definition), | ||
84 | /// `None` in `if let None = Some(82) {}` | ||
85 | ConstReference(Definition), | ||
86 | FieldShorthand { | ||
87 | local: Local, | ||
88 | field: Definition, | ||
89 | }, | ||
90 | } | ||
91 | |||
92 | impl NameClass { | ||
93 | pub fn into_definition(self, db: &dyn HirDatabase) -> Option<Definition> { | ||
94 | Some(match self { | ||
95 | NameClass::ExternCrate(krate) => Definition::ModuleDef(krate.root_module(db).into()), | ||
96 | NameClass::Definition(it) => it, | ||
97 | NameClass::ConstReference(_) => return None, | ||
98 | NameClass::FieldShorthand { local, field: _ } => Definition::Local(local), | ||
99 | }) | ||
100 | } | ||
101 | |||
102 | pub fn definition(self, db: &dyn HirDatabase) -> Definition { | ||
103 | match self { | ||
104 | NameClass::ExternCrate(krate) => Definition::ModuleDef(krate.root_module(db).into()), | ||
105 | NameClass::Definition(it) | NameClass::ConstReference(it) => it, | ||
106 | NameClass::FieldShorthand { local: _, field } => field, | ||
107 | } | ||
108 | } | ||
109 | } | ||
110 | |||
111 | pub fn classify_name(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Option<NameClass> { | ||
112 | let _p = profile::span("classify_name"); | ||
113 | |||
114 | let parent = name.syntax().parent()?; | ||
115 | |||
116 | if let Some(bind_pat) = ast::IdentPat::cast(parent.clone()) { | ||
117 | if let Some(def) = sema.resolve_bind_pat_to_const(&bind_pat) { | ||
118 | return Some(NameClass::ConstReference(Definition::ModuleDef(def))); | ||
119 | } | ||
120 | } | ||
121 | |||
122 | match_ast! { | ||
123 | match parent { | ||
124 | ast::Rename(it) => { | ||
125 | if let Some(use_tree) = it.syntax().parent().and_then(ast::UseTree::cast) { | ||
126 | let path = use_tree.path()?; | ||
127 | let path_segment = path.segment()?; | ||
128 | let name_ref_class = path_segment | ||
129 | .name_ref() | ||
130 | // The rename might be from a `self` token, so fallback to the name higher | ||
131 | // in the use tree. | ||
132 | .or_else(||{ | ||
133 | if path_segment.self_token().is_none() { | ||
134 | return None; | ||
135 | } | ||
136 | |||
137 | let use_tree = use_tree | ||
138 | .syntax() | ||
139 | .parent() | ||
140 | .as_ref() | ||
141 | // Skip over UseTreeList | ||
142 | .and_then(SyntaxNode::parent) | ||
143 | .and_then(ast::UseTree::cast)?; | ||
144 | let path = use_tree.path()?; | ||
145 | let path_segment = path.segment()?; | ||
146 | path_segment.name_ref() | ||
147 | }) | ||
148 | .and_then(|name_ref| classify_name_ref(sema, &name_ref))?; | ||
149 | |||
150 | Some(NameClass::Definition(name_ref_class.definition(sema.db))) | ||
151 | } else { | ||
152 | let extern_crate = it.syntax().parent().and_then(ast::ExternCrate::cast)?; | ||
153 | let resolved = sema.resolve_extern_crate(&extern_crate)?; | ||
154 | Some(NameClass::ExternCrate(resolved)) | ||
155 | } | ||
156 | }, | ||
157 | ast::IdentPat(it) => { | ||
158 | let local = sema.to_def(&it)?; | ||
159 | |||
160 | if let Some(record_field_pat) = it.syntax().parent().and_then(ast::RecordPatField::cast) { | ||
161 | if record_field_pat.name_ref().is_none() { | ||
162 | if let Some(field) = sema.resolve_record_field_pat(&record_field_pat) { | ||
163 | let field = Definition::Field(field); | ||
164 | return Some(NameClass::FieldShorthand { local, field }); | ||
165 | } | ||
166 | } | ||
167 | } | ||
168 | |||
169 | Some(NameClass::Definition(Definition::Local(local))) | ||
170 | }, | ||
171 | ast::RecordField(it) => { | ||
172 | let field: hir::Field = sema.to_def(&it)?; | ||
173 | Some(NameClass::Definition(Definition::Field(field))) | ||
174 | }, | ||
175 | ast::Module(it) => { | ||
176 | let def = sema.to_def(&it)?; | ||
177 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | ||
178 | }, | ||
179 | ast::Struct(it) => { | ||
180 | let def: hir::Struct = sema.to_def(&it)?; | ||
181 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | ||
182 | }, | ||
183 | ast::Union(it) => { | ||
184 | let def: hir::Union = sema.to_def(&it)?; | ||
185 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | ||
186 | }, | ||
187 | ast::Enum(it) => { | ||
188 | let def: hir::Enum = sema.to_def(&it)?; | ||
189 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | ||
190 | }, | ||
191 | ast::Trait(it) => { | ||
192 | let def: hir::Trait = sema.to_def(&it)?; | ||
193 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | ||
194 | }, | ||
195 | ast::Static(it) => { | ||
196 | let def: hir::Static = sema.to_def(&it)?; | ||
197 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | ||
198 | }, | ||
199 | ast::Variant(it) => { | ||
200 | let def: hir::EnumVariant = sema.to_def(&it)?; | ||
201 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | ||
202 | }, | ||
203 | ast::Fn(it) => { | ||
204 | let def: hir::Function = sema.to_def(&it)?; | ||
205 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | ||
206 | }, | ||
207 | ast::Const(it) => { | ||
208 | let def: hir::Const = sema.to_def(&it)?; | ||
209 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | ||
210 | }, | ||
211 | ast::TypeAlias(it) => { | ||
212 | let def: hir::TypeAlias = sema.to_def(&it)?; | ||
213 | Some(NameClass::Definition(Definition::ModuleDef(def.into()))) | ||
214 | }, | ||
215 | ast::MacroCall(it) => { | ||
216 | let def = sema.to_def(&it)?; | ||
217 | Some(NameClass::Definition(Definition::Macro(def))) | ||
218 | }, | ||
219 | ast::TypeParam(it) => { | ||
220 | let def = sema.to_def(&it)?; | ||
221 | Some(NameClass::Definition(Definition::TypeParam(def))) | ||
222 | }, | ||
223 | _ => None, | ||
224 | } | ||
225 | } | ||
226 | } | ||
227 | |||
228 | #[derive(Debug)] | ||
229 | pub enum NameRefClass { | ||
230 | ExternCrate(Crate), | ||
231 | Definition(Definition), | ||
232 | FieldShorthand { local: Local, field: Definition }, | ||
233 | } | ||
234 | |||
235 | impl NameRefClass { | ||
236 | pub fn definition(self, db: &dyn HirDatabase) -> Definition { | ||
237 | match self { | ||
238 | NameRefClass::ExternCrate(krate) => Definition::ModuleDef(krate.root_module(db).into()), | ||
239 | NameRefClass::Definition(def) => def, | ||
240 | NameRefClass::FieldShorthand { local, field: _ } => Definition::Local(local), | ||
241 | } | ||
242 | } | ||
243 | } | ||
244 | |||
245 | // Note: we don't have unit-tests for this rather important function. | ||
246 | // It is primarily exercised via goto definition tests in `ide`. | ||
247 | pub fn classify_name_ref( | ||
248 | sema: &Semantics<RootDatabase>, | ||
249 | name_ref: &ast::NameRef, | ||
250 | ) -> Option<NameRefClass> { | ||
251 | let _p = profile::span("classify_name_ref"); | ||
252 | |||
253 | let parent = name_ref.syntax().parent()?; | ||
254 | |||
255 | if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) { | ||
256 | if let Some(func) = sema.resolve_method_call(&method_call) { | ||
257 | return Some(NameRefClass::Definition(Definition::ModuleDef(func.into()))); | ||
258 | } | ||
259 | } | ||
260 | |||
261 | if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { | ||
262 | if let Some(field) = sema.resolve_field(&field_expr) { | ||
263 | return Some(NameRefClass::Definition(Definition::Field(field))); | ||
264 | } | ||
265 | } | ||
266 | |||
267 | if let Some(record_field) = ast::RecordExprField::for_field_name(name_ref) { | ||
268 | if let Some((field, local)) = sema.resolve_record_field(&record_field) { | ||
269 | let field = Definition::Field(field); | ||
270 | let res = match local { | ||
271 | None => NameRefClass::Definition(field), | ||
272 | Some(local) => NameRefClass::FieldShorthand { field, local }, | ||
273 | }; | ||
274 | return Some(res); | ||
275 | } | ||
276 | } | ||
277 | |||
278 | if let Some(record_field_pat) = ast::RecordPatField::cast(parent.clone()) { | ||
279 | if let Some(field) = sema.resolve_record_field_pat(&record_field_pat) { | ||
280 | let field = Definition::Field(field); | ||
281 | return Some(NameRefClass::Definition(field)); | ||
282 | } | ||
283 | } | ||
284 | |||
285 | if ast::AssocTypeArg::cast(parent.clone()).is_some() { | ||
286 | // `Trait<Assoc = Ty>` | ||
287 | // ^^^^^ | ||
288 | let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?; | ||
289 | let resolved = sema.resolve_path(&path)?; | ||
290 | if let PathResolution::Def(ModuleDef::Trait(tr)) = resolved { | ||
291 | if let Some(ty) = tr | ||
292 | .items(sema.db) | ||
293 | .iter() | ||
294 | .filter_map(|assoc| match assoc { | ||
295 | hir::AssocItem::TypeAlias(it) => Some(*it), | ||
296 | _ => None, | ||
297 | }) | ||
298 | .find(|alias| alias.name(sema.db).to_string() == **name_ref.text()) | ||
299 | { | ||
300 | return Some(NameRefClass::Definition(Definition::ModuleDef( | ||
301 | ModuleDef::TypeAlias(ty), | ||
302 | ))); | ||
303 | } | ||
304 | } | ||
305 | } | ||
306 | |||
307 | if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { | ||
308 | if let Some(path) = macro_call.path() { | ||
309 | if path.qualifier().is_none() { | ||
310 | // Only use this to resolve single-segment macro calls like `foo!()`. Multi-segment | ||
311 | // paths are handled below (allowing `log<|>::info!` to resolve to the log crate). | ||
312 | if let Some(macro_def) = sema.resolve_macro_call(¯o_call) { | ||
313 | return Some(NameRefClass::Definition(Definition::Macro(macro_def))); | ||
314 | } | ||
315 | } | ||
316 | } | ||
317 | } | ||
318 | |||
319 | if let Some(path) = name_ref.syntax().ancestors().find_map(ast::Path::cast) { | ||
320 | if let Some(resolved) = sema.resolve_path(&path) { | ||
321 | return Some(NameRefClass::Definition(resolved.into())); | ||
322 | } | ||
323 | } | ||
324 | |||
325 | let extern_crate = ast::ExternCrate::cast(parent)?; | ||
326 | let resolved = sema.resolve_extern_crate(&extern_crate)?; | ||
327 | Some(NameRefClass::ExternCrate(resolved)) | ||
328 | } | ||
329 | |||
330 | impl From<PathResolution> for Definition { | ||
331 | fn from(path_resolution: PathResolution) -> Self { | ||
332 | match path_resolution { | ||
333 | PathResolution::Def(def) => Definition::ModuleDef(def), | ||
334 | PathResolution::AssocItem(item) => { | ||
335 | let def = match item { | ||
336 | hir::AssocItem::Function(it) => it.into(), | ||
337 | hir::AssocItem::Const(it) => it.into(), | ||
338 | hir::AssocItem::TypeAlias(it) => it.into(), | ||
339 | }; | ||
340 | Definition::ModuleDef(def) | ||
341 | } | ||
342 | PathResolution::Local(local) => Definition::Local(local), | ||
343 | PathResolution::TypeParam(par) => Definition::TypeParam(par), | ||
344 | PathResolution::Macro(def) => Definition::Macro(def), | ||
345 | PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def), | ||
346 | } | ||
347 | } | ||
348 | } | ||
diff --git a/crates/ide_db/src/imports_locator.rs b/crates/ide_db/src/imports_locator.rs new file mode 100644 index 000000000..ed67e3553 --- /dev/null +++ b/crates/ide_db/src/imports_locator.rs | |||
@@ -0,0 +1,64 @@ | |||
1 | //! This module contains an import search funcionality that is provided to the assists module. | ||
2 | //! Later, this should be moved away to a separate crate that is accessible from the assists module. | ||
3 | |||
4 | use hir::{Crate, MacroDef, ModuleDef, Semantics}; | ||
5 | use syntax::{ast, AstNode, SyntaxKind::NAME}; | ||
6 | |||
7 | use crate::{ | ||
8 | defs::{classify_name, Definition}, | ||
9 | symbol_index::{self, FileSymbol, Query}, | ||
10 | RootDatabase, | ||
11 | }; | ||
12 | use either::Either; | ||
13 | use rustc_hash::FxHashSet; | ||
14 | |||
15 | pub fn find_imports<'a>( | ||
16 | sema: &Semantics<'a, RootDatabase>, | ||
17 | krate: Crate, | ||
18 | name_to_import: &str, | ||
19 | ) -> Vec<Either<ModuleDef, MacroDef>> { | ||
20 | let _p = profile::span("search_for_imports"); | ||
21 | let db = sema.db; | ||
22 | |||
23 | // Query dependencies first. | ||
24 | let mut candidates: FxHashSet<_> = | ||
25 | krate.query_external_importables(db, name_to_import).collect(); | ||
26 | |||
27 | // Query the local crate using the symbol index. | ||
28 | let local_results = { | ||
29 | let mut query = Query::new(name_to_import.to_string()); | ||
30 | query.exact(); | ||
31 | query.limit(40); | ||
32 | symbol_index::crate_symbols(db, krate.into(), query) | ||
33 | }; | ||
34 | |||
35 | candidates.extend( | ||
36 | local_results | ||
37 | .into_iter() | ||
38 | .filter_map(|import_candidate| get_name_definition(sema, &import_candidate)) | ||
39 | .filter_map(|name_definition_to_import| match name_definition_to_import { | ||
40 | Definition::ModuleDef(module_def) => Some(Either::Left(module_def)), | ||
41 | Definition::Macro(macro_def) => Some(Either::Right(macro_def)), | ||
42 | _ => None, | ||
43 | }), | ||
44 | ); | ||
45 | |||
46 | candidates.into_iter().collect() | ||
47 | } | ||
48 | |||
49 | fn get_name_definition<'a>( | ||
50 | sema: &Semantics<'a, RootDatabase>, | ||
51 | import_candidate: &FileSymbol, | ||
52 | ) -> Option<Definition> { | ||
53 | let _p = profile::span("get_name_definition"); | ||
54 | let file_id = import_candidate.file_id; | ||
55 | |||
56 | let candidate_node = import_candidate.ptr.to_node(sema.parse(file_id).syntax()); | ||
57 | let candidate_name_node = if candidate_node.kind() != NAME { | ||
58 | candidate_node.children().find(|it| it.kind() == NAME)? | ||
59 | } else { | ||
60 | candidate_node | ||
61 | }; | ||
62 | let name = ast::Name::cast(candidate_name_node)?; | ||
63 | classify_name(sema, &name)?.into_definition(sema.db) | ||
64 | } | ||
diff --git a/crates/ide_db/src/lib.rs b/crates/ide_db/src/lib.rs new file mode 100644 index 000000000..fd474cd0f --- /dev/null +++ b/crates/ide_db/src/lib.rs | |||
@@ -0,0 +1,139 @@ | |||
1 | //! This crate defines the core datastructure representing IDE state -- `RootDatabase`. | ||
2 | //! | ||
3 | //! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search. | ||
4 | |||
5 | pub mod line_index; | ||
6 | pub mod symbol_index; | ||
7 | pub mod change; | ||
8 | pub mod defs; | ||
9 | pub mod search; | ||
10 | pub mod imports_locator; | ||
11 | pub mod source_change; | ||
12 | mod wasm_shims; | ||
13 | |||
14 | use std::{fmt, sync::Arc}; | ||
15 | |||
16 | use base_db::{ | ||
17 | salsa::{self, Durability}, | ||
18 | Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, | ||
19 | Upcast, | ||
20 | }; | ||
21 | use hir::db::{AstDatabase, DefDatabase, HirDatabase}; | ||
22 | use rustc_hash::FxHashSet; | ||
23 | |||
24 | use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase}; | ||
25 | |||
26 | #[salsa::database( | ||
27 | base_db::SourceDatabaseStorage, | ||
28 | base_db::SourceDatabaseExtStorage, | ||
29 | LineIndexDatabaseStorage, | ||
30 | symbol_index::SymbolsDatabaseStorage, | ||
31 | hir::db::InternDatabaseStorage, | ||
32 | hir::db::AstDatabaseStorage, | ||
33 | hir::db::DefDatabaseStorage, | ||
34 | hir::db::HirDatabaseStorage | ||
35 | )] | ||
36 | pub struct RootDatabase { | ||
37 | storage: salsa::Storage<RootDatabase>, | ||
38 | pub last_gc: crate::wasm_shims::Instant, | ||
39 | pub last_gc_check: crate::wasm_shims::Instant, | ||
40 | } | ||
41 | |||
42 | impl fmt::Debug for RootDatabase { | ||
43 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
44 | f.debug_struct("RootDatabase").finish() | ||
45 | } | ||
46 | } | ||
47 | |||
48 | impl Upcast<dyn AstDatabase> for RootDatabase { | ||
49 | fn upcast(&self) -> &(dyn AstDatabase + 'static) { | ||
50 | &*self | ||
51 | } | ||
52 | } | ||
53 | |||
54 | impl Upcast<dyn DefDatabase> for RootDatabase { | ||
55 | fn upcast(&self) -> &(dyn DefDatabase + 'static) { | ||
56 | &*self | ||
57 | } | ||
58 | } | ||
59 | |||
60 | impl Upcast<dyn HirDatabase> for RootDatabase { | ||
61 | fn upcast(&self) -> &(dyn HirDatabase + 'static) { | ||
62 | &*self | ||
63 | } | ||
64 | } | ||
65 | |||
66 | impl FileLoader for RootDatabase { | ||
67 | fn file_text(&self, file_id: FileId) -> Arc<String> { | ||
68 | FileLoaderDelegate(self).file_text(file_id) | ||
69 | } | ||
70 | fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { | ||
71 | FileLoaderDelegate(self).resolve_path(anchor, path) | ||
72 | } | ||
73 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { | ||
74 | FileLoaderDelegate(self).relevant_crates(file_id) | ||
75 | } | ||
76 | } | ||
77 | |||
78 | impl salsa::Database for RootDatabase { | ||
79 | fn on_propagated_panic(&self) -> ! { | ||
80 | Canceled::throw() | ||
81 | } | ||
82 | fn salsa_event(&self, event: salsa::Event) { | ||
83 | match event.kind { | ||
84 | salsa::EventKind::DidValidateMemoizedValue { .. } | ||
85 | | salsa::EventKind::WillExecute { .. } => { | ||
86 | self.check_canceled(); | ||
87 | } | ||
88 | _ => (), | ||
89 | } | ||
90 | } | ||
91 | } | ||
92 | |||
93 | impl Default for RootDatabase { | ||
94 | fn default() -> RootDatabase { | ||
95 | RootDatabase::new(None) | ||
96 | } | ||
97 | } | ||
98 | |||
99 | impl RootDatabase { | ||
100 | pub fn new(lru_capacity: Option<usize>) -> RootDatabase { | ||
101 | let mut db = RootDatabase { | ||
102 | storage: salsa::Storage::default(), | ||
103 | last_gc: crate::wasm_shims::Instant::now(), | ||
104 | last_gc_check: crate::wasm_shims::Instant::now(), | ||
105 | }; | ||
106 | db.set_crate_graph_with_durability(Default::default(), Durability::HIGH); | ||
107 | db.set_local_roots_with_durability(Default::default(), Durability::HIGH); | ||
108 | db.set_library_roots_with_durability(Default::default(), Durability::HIGH); | ||
109 | db.update_lru_capacity(lru_capacity); | ||
110 | db | ||
111 | } | ||
112 | |||
113 | pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) { | ||
114 | let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_LRU_CAP); | ||
115 | base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); | ||
116 | hir::db::ParseMacroQuery.in_db_mut(self).set_lru_capacity(lru_capacity); | ||
117 | hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(lru_capacity); | ||
118 | } | ||
119 | } | ||
120 | |||
121 | impl salsa::ParallelDatabase for RootDatabase { | ||
122 | fn snapshot(&self) -> salsa::Snapshot<RootDatabase> { | ||
123 | salsa::Snapshot::new(RootDatabase { | ||
124 | storage: self.storage.snapshot(), | ||
125 | last_gc: self.last_gc, | ||
126 | last_gc_check: self.last_gc_check, | ||
127 | }) | ||
128 | } | ||
129 | } | ||
130 | |||
131 | #[salsa::query_group(LineIndexDatabaseStorage)] | ||
132 | pub trait LineIndexDatabase: base_db::SourceDatabase + CheckCanceled { | ||
133 | fn line_index(&self, file_id: FileId) -> Arc<LineIndex>; | ||
134 | } | ||
135 | |||
136 | fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> { | ||
137 | let text = db.file_text(file_id); | ||
138 | Arc::new(LineIndex::new(&*text)) | ||
139 | } | ||
diff --git a/crates/ide_db/src/line_index.rs b/crates/ide_db/src/line_index.rs new file mode 100644 index 000000000..a381f7fb8 --- /dev/null +++ b/crates/ide_db/src/line_index.rs | |||
@@ -0,0 +1,281 @@ | |||
1 | //! `LineIndex` maps flat `TextSize` offsets into `(Line, Column)` | ||
2 | //! representation. | ||
3 | use std::iter; | ||
4 | |||
5 | use rustc_hash::FxHashMap; | ||
6 | use stdx::partition_point; | ||
7 | use syntax::{TextRange, TextSize}; | ||
8 | |||
9 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
10 | pub struct LineIndex { | ||
11 | /// Offset the the beginning of each line, zero-based | ||
12 | pub(crate) newlines: Vec<TextSize>, | ||
13 | /// List of non-ASCII characters on each line | ||
14 | pub(crate) utf16_lines: FxHashMap<u32, Vec<Utf16Char>>, | ||
15 | } | ||
16 | |||
17 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] | ||
18 | pub struct LineCol { | ||
19 | /// Zero-based | ||
20 | pub line: u32, | ||
21 | /// Zero-based | ||
22 | pub col_utf16: u32, | ||
23 | } | ||
24 | |||
25 | #[derive(Clone, Debug, Hash, PartialEq, Eq)] | ||
26 | pub(crate) struct Utf16Char { | ||
27 | /// Start offset of a character inside a line, zero-based | ||
28 | pub(crate) start: TextSize, | ||
29 | /// End offset of a character inside a line, zero-based | ||
30 | pub(crate) end: TextSize, | ||
31 | } | ||
32 | |||
33 | impl Utf16Char { | ||
34 | /// Returns the length in 8-bit UTF-8 code units. | ||
35 | fn len(&self) -> TextSize { | ||
36 | self.end - self.start | ||
37 | } | ||
38 | |||
39 | /// Returns the length in 16-bit UTF-16 code units. | ||
40 | fn len_utf16(&self) -> usize { | ||
41 | if self.len() == TextSize::from(4) { | ||
42 | 2 | ||
43 | } else { | ||
44 | 1 | ||
45 | } | ||
46 | } | ||
47 | } | ||
48 | |||
49 | impl LineIndex { | ||
50 | pub fn new(text: &str) -> LineIndex { | ||
51 | let mut utf16_lines = FxHashMap::default(); | ||
52 | let mut utf16_chars = Vec::new(); | ||
53 | |||
54 | let mut newlines = vec![0.into()]; | ||
55 | let mut curr_row = 0.into(); | ||
56 | let mut curr_col = 0.into(); | ||
57 | let mut line = 0; | ||
58 | for c in text.chars() { | ||
59 | let c_len = TextSize::of(c); | ||
60 | curr_row += c_len; | ||
61 | if c == '\n' { | ||
62 | newlines.push(curr_row); | ||
63 | |||
64 | // Save any utf-16 characters seen in the previous line | ||
65 | if !utf16_chars.is_empty() { | ||
66 | utf16_lines.insert(line, utf16_chars); | ||
67 | utf16_chars = Vec::new(); | ||
68 | } | ||
69 | |||
70 | // Prepare for processing the next line | ||
71 | curr_col = 0.into(); | ||
72 | line += 1; | ||
73 | continue; | ||
74 | } | ||
75 | |||
76 | if !c.is_ascii() { | ||
77 | utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + c_len }); | ||
78 | } | ||
79 | |||
80 | curr_col += c_len; | ||
81 | } | ||
82 | |||
83 | // Save any utf-16 characters seen in the last line | ||
84 | if !utf16_chars.is_empty() { | ||
85 | utf16_lines.insert(line, utf16_chars); | ||
86 | } | ||
87 | |||
88 | LineIndex { newlines, utf16_lines } | ||
89 | } | ||
90 | |||
91 | pub fn line_col(&self, offset: TextSize) -> LineCol { | ||
92 | let line = partition_point(&self.newlines, |&it| it <= offset) - 1; | ||
93 | let line_start_offset = self.newlines[line]; | ||
94 | let col = offset - line_start_offset; | ||
95 | |||
96 | LineCol { line: line as u32, col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32 } | ||
97 | } | ||
98 | |||
99 | pub fn offset(&self, line_col: LineCol) -> TextSize { | ||
100 | //FIXME: return Result | ||
101 | let col = self.utf16_to_utf8_col(line_col.line, line_col.col_utf16); | ||
102 | self.newlines[line_col.line as usize] + col | ||
103 | } | ||
104 | |||
105 | pub fn lines(&self, range: TextRange) -> impl Iterator<Item = TextRange> + '_ { | ||
106 | let lo = partition_point(&self.newlines, |&it| it < range.start()); | ||
107 | let hi = partition_point(&self.newlines, |&it| it <= range.end()); | ||
108 | let all = iter::once(range.start()) | ||
109 | .chain(self.newlines[lo..hi].iter().copied()) | ||
110 | .chain(iter::once(range.end())); | ||
111 | |||
112 | all.clone() | ||
113 | .zip(all.skip(1)) | ||
114 | .map(|(lo, hi)| TextRange::new(lo, hi)) | ||
115 | .filter(|it| !it.is_empty()) | ||
116 | } | ||
117 | |||
118 | fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> usize { | ||
119 | let mut res: usize = col.into(); | ||
120 | if let Some(utf16_chars) = self.utf16_lines.get(&line) { | ||
121 | for c in utf16_chars { | ||
122 | if c.end <= col { | ||
123 | res -= usize::from(c.len()) - c.len_utf16(); | ||
124 | } else { | ||
125 | // From here on, all utf16 characters come *after* the character we are mapping, | ||
126 | // so we don't need to take them into account | ||
127 | break; | ||
128 | } | ||
129 | } | ||
130 | } | ||
131 | res | ||
132 | } | ||
133 | |||
134 | fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize { | ||
135 | if let Some(utf16_chars) = self.utf16_lines.get(&line) { | ||
136 | for c in utf16_chars { | ||
137 | if col > u32::from(c.start) { | ||
138 | col += u32::from(c.len()) - c.len_utf16() as u32; | ||
139 | } else { | ||
140 | // From here on, all utf16 characters come *after* the character we are mapping, | ||
141 | // so we don't need to take them into account | ||
142 | break; | ||
143 | } | ||
144 | } | ||
145 | } | ||
146 | |||
147 | col.into() | ||
148 | } | ||
149 | } | ||
150 | |||
151 | #[cfg(test)] | ||
152 | mod tests { | ||
153 | use super::*; | ||
154 | |||
155 | #[test] | ||
156 | fn test_line_index() { | ||
157 | let text = "hello\nworld"; | ||
158 | let index = LineIndex::new(text); | ||
159 | assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); | ||
160 | assert_eq!(index.line_col(1.into()), LineCol { line: 0, col_utf16: 1 }); | ||
161 | assert_eq!(index.line_col(5.into()), LineCol { line: 0, col_utf16: 5 }); | ||
162 | assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 0 }); | ||
163 | assert_eq!(index.line_col(7.into()), LineCol { line: 1, col_utf16: 1 }); | ||
164 | assert_eq!(index.line_col(8.into()), LineCol { line: 1, col_utf16: 2 }); | ||
165 | assert_eq!(index.line_col(10.into()), LineCol { line: 1, col_utf16: 4 }); | ||
166 | assert_eq!(index.line_col(11.into()), LineCol { line: 1, col_utf16: 5 }); | ||
167 | assert_eq!(index.line_col(12.into()), LineCol { line: 1, col_utf16: 6 }); | ||
168 | |||
169 | let text = "\nhello\nworld"; | ||
170 | let index = LineIndex::new(text); | ||
171 | assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); | ||
172 | assert_eq!(index.line_col(1.into()), LineCol { line: 1, col_utf16: 0 }); | ||
173 | assert_eq!(index.line_col(2.into()), LineCol { line: 1, col_utf16: 1 }); | ||
174 | assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 5 }); | ||
175 | assert_eq!(index.line_col(7.into()), LineCol { line: 2, col_utf16: 0 }); | ||
176 | } | ||
177 | |||
178 | #[test] | ||
179 | fn test_char_len() { | ||
180 | assert_eq!('メ'.len_utf8(), 3); | ||
181 | assert_eq!('メ'.len_utf16(), 1); | ||
182 | } | ||
183 | |||
184 | #[test] | ||
185 | fn test_empty_index() { | ||
186 | let col_index = LineIndex::new( | ||
187 | " | ||
188 | const C: char = 'x'; | ||
189 | ", | ||
190 | ); | ||
191 | assert_eq!(col_index.utf16_lines.len(), 0); | ||
192 | } | ||
193 | |||
194 | #[test] | ||
195 | fn test_single_char() { | ||
196 | let col_index = LineIndex::new( | ||
197 | " | ||
198 | const C: char = 'メ'; | ||
199 | ", | ||
200 | ); | ||
201 | |||
202 | assert_eq!(col_index.utf16_lines.len(), 1); | ||
203 | assert_eq!(col_index.utf16_lines[&1].len(), 1); | ||
204 | assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); | ||
205 | |||
206 | // UTF-8 to UTF-16, no changes | ||
207 | assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); | ||
208 | |||
209 | // UTF-8 to UTF-16 | ||
210 | assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20); | ||
211 | |||
212 | // UTF-16 to UTF-8, no changes | ||
213 | assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15)); | ||
214 | |||
215 | // UTF-16 to UTF-8 | ||
216 | assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); | ||
217 | |||
218 | let col_index = LineIndex::new("a𐐏b"); | ||
219 | assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5)); | ||
220 | } | ||
221 | |||
222 | #[test] | ||
223 | fn test_string() { | ||
224 | let col_index = LineIndex::new( | ||
225 | " | ||
226 | const C: char = \"メ メ\"; | ||
227 | ", | ||
228 | ); | ||
229 | |||
230 | assert_eq!(col_index.utf16_lines.len(), 1); | ||
231 | assert_eq!(col_index.utf16_lines[&1].len(), 2); | ||
232 | assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); | ||
233 | assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() }); | ||
234 | |||
235 | // UTF-8 to UTF-16 | ||
236 | assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); | ||
237 | |||
238 | assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19); | ||
239 | assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21); | ||
240 | |||
241 | assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15); | ||
242 | |||
243 | // UTF-16 to UTF-8 | ||
244 | assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15)); | ||
245 | |||
246 | // メ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1 | ||
247 | assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first メ at 17..20 | ||
248 | assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space | ||
249 | assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second メ at 21..24 | ||
250 | |||
251 | assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15)); | ||
252 | } | ||
253 | |||
254 | #[test] | ||
255 | fn test_splitlines() { | ||
256 | fn r(lo: u32, hi: u32) -> TextRange { | ||
257 | TextRange::new(lo.into(), hi.into()) | ||
258 | } | ||
259 | |||
260 | let text = "a\nbb\nccc\n"; | ||
261 | let line_index = LineIndex::new(text); | ||
262 | |||
263 | let actual = line_index.lines(r(0, 9)).collect::<Vec<_>>(); | ||
264 | let expected = vec![r(0, 2), r(2, 5), r(5, 9)]; | ||
265 | assert_eq!(actual, expected); | ||
266 | |||
267 | let text = ""; | ||
268 | let line_index = LineIndex::new(text); | ||
269 | |||
270 | let actual = line_index.lines(r(0, 0)).collect::<Vec<_>>(); | ||
271 | let expected = vec![]; | ||
272 | assert_eq!(actual, expected); | ||
273 | |||
274 | let text = "\n"; | ||
275 | let line_index = LineIndex::new(text); | ||
276 | |||
277 | let actual = line_index.lines(r(0, 1)).collect::<Vec<_>>(); | ||
278 | let expected = vec![r(0, 1)]; | ||
279 | assert_eq!(actual, expected) | ||
280 | } | ||
281 | } | ||
diff --git a/crates/ide_db/src/search.rs b/crates/ide_db/src/search.rs new file mode 100644 index 000000000..b9360bf12 --- /dev/null +++ b/crates/ide_db/src/search.rs | |||
@@ -0,0 +1,322 @@ | |||
1 | //! Implementation of find-usages functionality. | ||
2 | //! | ||
3 | //! It is based on the standard ide trick: first, we run a fast text search to | ||
4 | //! get a super-set of matches. Then, we we confirm each match using precise | ||
5 | //! name resolution. | ||
6 | |||
7 | use std::{convert::TryInto, mem}; | ||
8 | |||
9 | use base_db::{FileId, FileRange, SourceDatabaseExt}; | ||
10 | use hir::{DefWithBody, HasSource, Module, ModuleSource, Semantics, Visibility}; | ||
11 | use once_cell::unsync::Lazy; | ||
12 | use rustc_hash::FxHashMap; | ||
13 | use syntax::{ast, match_ast, AstNode, TextRange, TextSize}; | ||
14 | |||
15 | use crate::{ | ||
16 | defs::{classify_name_ref, Definition, NameRefClass}, | ||
17 | RootDatabase, | ||
18 | }; | ||
19 | |||
20 | #[derive(Debug, Clone)] | ||
21 | pub struct Reference { | ||
22 | pub file_range: FileRange, | ||
23 | pub kind: ReferenceKind, | ||
24 | pub access: Option<ReferenceAccess>, | ||
25 | } | ||
26 | |||
27 | #[derive(Debug, Clone, PartialEq)] | ||
28 | pub enum ReferenceKind { | ||
29 | FieldShorthandForField, | ||
30 | FieldShorthandForLocal, | ||
31 | StructLiteral, | ||
32 | Other, | ||
33 | } | ||
34 | |||
35 | #[derive(Debug, Copy, Clone, PartialEq)] | ||
36 | pub enum ReferenceAccess { | ||
37 | Read, | ||
38 | Write, | ||
39 | } | ||
40 | |||
41 | /// Generally, `search_scope` returns files that might contain references for the element. | ||
42 | /// For `pub(crate)` things it's a crate, for `pub` things it's a crate and dependant crates. | ||
43 | /// In some cases, the location of the references is known to within a `TextRange`, | ||
44 | /// e.g. for things like local variables. | ||
45 | pub struct SearchScope { | ||
46 | entries: FxHashMap<FileId, Option<TextRange>>, | ||
47 | } | ||
48 | |||
49 | impl SearchScope { | ||
50 | fn new(entries: FxHashMap<FileId, Option<TextRange>>) -> SearchScope { | ||
51 | SearchScope { entries } | ||
52 | } | ||
53 | |||
54 | pub fn empty() -> SearchScope { | ||
55 | SearchScope::new(FxHashMap::default()) | ||
56 | } | ||
57 | |||
58 | pub fn single_file(file: FileId) -> SearchScope { | ||
59 | SearchScope::new(std::iter::once((file, None)).collect()) | ||
60 | } | ||
61 | |||
62 | pub fn files(files: &[FileId]) -> SearchScope { | ||
63 | SearchScope::new(files.iter().map(|f| (*f, None)).collect()) | ||
64 | } | ||
65 | |||
66 | pub fn intersection(&self, other: &SearchScope) -> SearchScope { | ||
67 | let (mut small, mut large) = (&self.entries, &other.entries); | ||
68 | if small.len() > large.len() { | ||
69 | mem::swap(&mut small, &mut large) | ||
70 | } | ||
71 | |||
72 | let res = small | ||
73 | .iter() | ||
74 | .filter_map(|(file_id, r1)| { | ||
75 | let r2 = large.get(file_id)?; | ||
76 | let r = intersect_ranges(*r1, *r2)?; | ||
77 | Some((*file_id, r)) | ||
78 | }) | ||
79 | .collect(); | ||
80 | |||
81 | return SearchScope::new(res); | ||
82 | |||
83 | fn intersect_ranges( | ||
84 | r1: Option<TextRange>, | ||
85 | r2: Option<TextRange>, | ||
86 | ) -> Option<Option<TextRange>> { | ||
87 | match (r1, r2) { | ||
88 | (None, r) | (r, None) => Some(r), | ||
89 | (Some(r1), Some(r2)) => { | ||
90 | let r = r1.intersect(r2)?; | ||
91 | Some(Some(r)) | ||
92 | } | ||
93 | } | ||
94 | } | ||
95 | } | ||
96 | } | ||
97 | |||
98 | impl IntoIterator for SearchScope { | ||
99 | type Item = (FileId, Option<TextRange>); | ||
100 | type IntoIter = std::collections::hash_map::IntoIter<FileId, Option<TextRange>>; | ||
101 | |||
102 | fn into_iter(self) -> Self::IntoIter { | ||
103 | self.entries.into_iter() | ||
104 | } | ||
105 | } | ||
106 | |||
107 | impl Definition { | ||
108 | fn search_scope(&self, db: &RootDatabase) -> SearchScope { | ||
109 | let _p = profile::span("search_scope"); | ||
110 | let module = match self.module(db) { | ||
111 | Some(it) => it, | ||
112 | None => return SearchScope::empty(), | ||
113 | }; | ||
114 | let module_src = module.definition_source(db); | ||
115 | let file_id = module_src.file_id.original_file(db); | ||
116 | |||
117 | if let Definition::Local(var) = self { | ||
118 | let range = match var.parent(db) { | ||
119 | DefWithBody::Function(f) => f.source(db).value.syntax().text_range(), | ||
120 | DefWithBody::Const(c) => c.source(db).value.syntax().text_range(), | ||
121 | DefWithBody::Static(s) => s.source(db).value.syntax().text_range(), | ||
122 | }; | ||
123 | let mut res = FxHashMap::default(); | ||
124 | res.insert(file_id, Some(range)); | ||
125 | return SearchScope::new(res); | ||
126 | } | ||
127 | |||
128 | let vis = self.visibility(db); | ||
129 | |||
130 | if let Some(Visibility::Module(module)) = vis.and_then(|it| it.into()) { | ||
131 | let module: Module = module.into(); | ||
132 | let mut res = FxHashMap::default(); | ||
133 | |||
134 | let mut to_visit = vec![module]; | ||
135 | let mut is_first = true; | ||
136 | while let Some(module) = to_visit.pop() { | ||
137 | let src = module.definition_source(db); | ||
138 | let file_id = src.file_id.original_file(db); | ||
139 | match src.value { | ||
140 | ModuleSource::Module(m) => { | ||
141 | if is_first { | ||
142 | let range = Some(m.syntax().text_range()); | ||
143 | res.insert(file_id, range); | ||
144 | } else { | ||
145 | // We have already added the enclosing file to the search scope, | ||
146 | // so do nothing. | ||
147 | } | ||
148 | } | ||
149 | ModuleSource::SourceFile(_) => { | ||
150 | res.insert(file_id, None); | ||
151 | } | ||
152 | }; | ||
153 | is_first = false; | ||
154 | to_visit.extend(module.children(db)); | ||
155 | } | ||
156 | |||
157 | return SearchScope::new(res); | ||
158 | } | ||
159 | |||
160 | if let Some(Visibility::Public) = vis { | ||
161 | let source_root_id = db.file_source_root(file_id); | ||
162 | let source_root = db.source_root(source_root_id); | ||
163 | let mut res = source_root.iter().map(|id| (id, None)).collect::<FxHashMap<_, _>>(); | ||
164 | |||
165 | let krate = module.krate(); | ||
166 | for rev_dep in krate.reverse_dependencies(db) { | ||
167 | let root_file = rev_dep.root_file(db); | ||
168 | let source_root_id = db.file_source_root(root_file); | ||
169 | let source_root = db.source_root(source_root_id); | ||
170 | res.extend(source_root.iter().map(|id| (id, None))); | ||
171 | } | ||
172 | return SearchScope::new(res); | ||
173 | } | ||
174 | |||
175 | let mut res = FxHashMap::default(); | ||
176 | let range = match module_src.value { | ||
177 | ModuleSource::Module(m) => Some(m.syntax().text_range()), | ||
178 | ModuleSource::SourceFile(_) => None, | ||
179 | }; | ||
180 | res.insert(file_id, range); | ||
181 | SearchScope::new(res) | ||
182 | } | ||
183 | |||
184 | pub fn find_usages( | ||
185 | &self, | ||
186 | sema: &Semantics<RootDatabase>, | ||
187 | search_scope: Option<SearchScope>, | ||
188 | ) -> Vec<Reference> { | ||
189 | let _p = profile::span("Definition::find_usages"); | ||
190 | |||
191 | let search_scope = { | ||
192 | let base = self.search_scope(sema.db); | ||
193 | match search_scope { | ||
194 | None => base, | ||
195 | Some(scope) => base.intersection(&scope), | ||
196 | } | ||
197 | }; | ||
198 | |||
199 | let name = match self.name(sema.db) { | ||
200 | None => return Vec::new(), | ||
201 | Some(it) => it.to_string(), | ||
202 | }; | ||
203 | |||
204 | let pat = name.as_str(); | ||
205 | let mut refs = vec![]; | ||
206 | |||
207 | for (file_id, search_range) in search_scope { | ||
208 | let text = sema.db.file_text(file_id); | ||
209 | let search_range = | ||
210 | search_range.unwrap_or(TextRange::up_to(TextSize::of(text.as_str()))); | ||
211 | |||
212 | let tree = Lazy::new(|| sema.parse(file_id).syntax().clone()); | ||
213 | |||
214 | for (idx, _) in text.match_indices(pat) { | ||
215 | let offset: TextSize = idx.try_into().unwrap(); | ||
216 | if !search_range.contains_inclusive(offset) { | ||
217 | continue; | ||
218 | } | ||
219 | |||
220 | let name_ref: ast::NameRef = | ||
221 | if let Some(name_ref) = sema.find_node_at_offset_with_descend(&tree, offset) { | ||
222 | name_ref | ||
223 | } else { | ||
224 | continue; | ||
225 | }; | ||
226 | |||
227 | match classify_name_ref(&sema, &name_ref) { | ||
228 | Some(NameRefClass::Definition(def)) if &def == self => { | ||
229 | let kind = if is_record_lit_name_ref(&name_ref) | ||
230 | || is_call_expr_name_ref(&name_ref) | ||
231 | { | ||
232 | ReferenceKind::StructLiteral | ||
233 | } else { | ||
234 | ReferenceKind::Other | ||
235 | }; | ||
236 | |||
237 | let file_range = sema.original_range(name_ref.syntax()); | ||
238 | refs.push(Reference { | ||
239 | file_range, | ||
240 | kind, | ||
241 | access: reference_access(&def, &name_ref), | ||
242 | }); | ||
243 | } | ||
244 | Some(NameRefClass::FieldShorthand { local, field }) => { | ||
245 | match self { | ||
246 | Definition::Field(_) if &field == self => refs.push(Reference { | ||
247 | file_range: sema.original_range(name_ref.syntax()), | ||
248 | kind: ReferenceKind::FieldShorthandForField, | ||
249 | access: reference_access(&field, &name_ref), | ||
250 | }), | ||
251 | Definition::Local(l) if &local == l => refs.push(Reference { | ||
252 | file_range: sema.original_range(name_ref.syntax()), | ||
253 | kind: ReferenceKind::FieldShorthandForLocal, | ||
254 | access: reference_access(&Definition::Local(local), &name_ref), | ||
255 | }), | ||
256 | |||
257 | _ => {} // not a usage | ||
258 | }; | ||
259 | } | ||
260 | _ => {} // not a usage | ||
261 | } | ||
262 | } | ||
263 | } | ||
264 | refs | ||
265 | } | ||
266 | } | ||
267 | |||
268 | fn reference_access(def: &Definition, name_ref: &ast::NameRef) -> Option<ReferenceAccess> { | ||
269 | // Only Locals and Fields have accesses for now. | ||
270 | match def { | ||
271 | Definition::Local(_) | Definition::Field(_) => {} | ||
272 | _ => return None, | ||
273 | }; | ||
274 | |||
275 | let mode = name_ref.syntax().ancestors().find_map(|node| { | ||
276 | match_ast! { | ||
277 | match (node) { | ||
278 | ast::BinExpr(expr) => { | ||
279 | if expr.op_kind()?.is_assignment() { | ||
280 | // If the variable or field ends on the LHS's end then it's a Write (covers fields and locals). | ||
281 | // FIXME: This is not terribly accurate. | ||
282 | if let Some(lhs) = expr.lhs() { | ||
283 | if lhs.syntax().text_range().end() == name_ref.syntax().text_range().end() { | ||
284 | return Some(ReferenceAccess::Write); | ||
285 | } | ||
286 | } | ||
287 | } | ||
288 | Some(ReferenceAccess::Read) | ||
289 | }, | ||
290 | _ => None | ||
291 | } | ||
292 | } | ||
293 | }); | ||
294 | |||
295 | // Default Locals and Fields to read | ||
296 | mode.or(Some(ReferenceAccess::Read)) | ||
297 | } | ||
298 | |||
299 | fn is_call_expr_name_ref(name_ref: &ast::NameRef) -> bool { | ||
300 | name_ref | ||
301 | .syntax() | ||
302 | .ancestors() | ||
303 | .find_map(ast::CallExpr::cast) | ||
304 | .and_then(|c| match c.expr()? { | ||
305 | ast::Expr::PathExpr(p) => { | ||
306 | Some(p.path()?.segment()?.name_ref().as_ref() == Some(name_ref)) | ||
307 | } | ||
308 | _ => None, | ||
309 | }) | ||
310 | .unwrap_or(false) | ||
311 | } | ||
312 | |||
313 | fn is_record_lit_name_ref(name_ref: &ast::NameRef) -> bool { | ||
314 | name_ref | ||
315 | .syntax() | ||
316 | .ancestors() | ||
317 | .find_map(ast::RecordExpr::cast) | ||
318 | .and_then(|l| l.path()) | ||
319 | .and_then(|p| p.segment()) | ||
320 | .map(|p| p.name_ref().as_ref() == Some(name_ref)) | ||
321 | .unwrap_or(false) | ||
322 | } | ||
diff --git a/crates/ide_db/src/source_change.rs b/crates/ide_db/src/source_change.rs new file mode 100644 index 000000000..f1590ec66 --- /dev/null +++ b/crates/ide_db/src/source_change.rs | |||
@@ -0,0 +1,59 @@ | |||
1 | //! This modules defines type to represent changes to the source code, that flow | ||
2 | //! from the server to the client. | ||
3 | //! | ||
4 | //! It can be viewed as a dual for `AnalysisChange`. | ||
5 | |||
6 | use base_db::FileId; | ||
7 | use text_edit::TextEdit; | ||
8 | |||
9 | #[derive(Default, Debug, Clone)] | ||
10 | pub struct SourceChange { | ||
11 | pub source_file_edits: Vec<SourceFileEdit>, | ||
12 | pub file_system_edits: Vec<FileSystemEdit>, | ||
13 | pub is_snippet: bool, | ||
14 | } | ||
15 | |||
16 | impl SourceChange { | ||
17 | /// Creates a new SourceChange with the given label | ||
18 | /// from the edits. | ||
19 | pub fn from_edits( | ||
20 | source_file_edits: Vec<SourceFileEdit>, | ||
21 | file_system_edits: Vec<FileSystemEdit>, | ||
22 | ) -> Self { | ||
23 | SourceChange { source_file_edits, file_system_edits, is_snippet: false } | ||
24 | } | ||
25 | } | ||
26 | |||
27 | #[derive(Debug, Clone)] | ||
28 | pub struct SourceFileEdit { | ||
29 | pub file_id: FileId, | ||
30 | pub edit: TextEdit, | ||
31 | } | ||
32 | |||
33 | impl From<SourceFileEdit> for SourceChange { | ||
34 | fn from(edit: SourceFileEdit) -> SourceChange { | ||
35 | vec![edit].into() | ||
36 | } | ||
37 | } | ||
38 | |||
39 | impl From<Vec<SourceFileEdit>> for SourceChange { | ||
40 | fn from(source_file_edits: Vec<SourceFileEdit>) -> SourceChange { | ||
41 | SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false } | ||
42 | } | ||
43 | } | ||
44 | |||
45 | #[derive(Debug, Clone)] | ||
46 | pub enum FileSystemEdit { | ||
47 | CreateFile { anchor: FileId, dst: String }, | ||
48 | MoveFile { src: FileId, anchor: FileId, dst: String }, | ||
49 | } | ||
50 | |||
51 | impl From<FileSystemEdit> for SourceChange { | ||
52 | fn from(edit: FileSystemEdit) -> SourceChange { | ||
53 | SourceChange { | ||
54 | source_file_edits: Vec::new(), | ||
55 | file_system_edits: vec![edit], | ||
56 | is_snippet: false, | ||
57 | } | ||
58 | } | ||
59 | } | ||
diff --git a/crates/ide_db/src/symbol_index.rs b/crates/ide_db/src/symbol_index.rs new file mode 100644 index 000000000..654df898e --- /dev/null +++ b/crates/ide_db/src/symbol_index.rs | |||
@@ -0,0 +1,429 @@ | |||
1 | //! This module handles fuzzy-searching of functions, structs and other symbols | ||
2 | //! by name across the whole workspace and dependencies. | ||
3 | //! | ||
4 | //! It works by building an incrementally-updated text-search index of all | ||
5 | //! symbols. The backbone of the index is the **awesome** `fst` crate by | ||
6 | //! @BurntSushi. | ||
7 | //! | ||
8 | //! In a nutshell, you give a set of strings to `fst`, and it builds a | ||
9 | //! finite state machine describing this set of strings. The strings which | ||
10 | //! could fuzzy-match a pattern can also be described by a finite state machine. | ||
11 | //! What is freaking cool is that you can now traverse both state machines in | ||
12 | //! lock-step to enumerate the strings which are both in the input set and | ||
13 | //! fuzz-match the query. Or, more formally, given two languages described by | ||
14 | //! FSTs, one can build a product FST which describes the intersection of the | ||
15 | //! languages. | ||
16 | //! | ||
17 | //! `fst` does not support cheap updating of the index, but it supports unioning | ||
18 | //! of state machines. So, to account for changing source code, we build an FST | ||
19 | //! for each library (which is assumed to never change) and an FST for each Rust | ||
20 | //! file in the current workspace, and run a query against the union of all | ||
21 | //! those FSTs. | ||
22 | |||
23 | use std::{ | ||
24 | cmp::Ordering, | ||
25 | fmt, | ||
26 | hash::{Hash, Hasher}, | ||
27 | mem, | ||
28 | sync::Arc, | ||
29 | }; | ||
30 | |||
31 | use base_db::{ | ||
32 | salsa::{self, ParallelDatabase}, | ||
33 | CrateId, FileId, SourceDatabaseExt, SourceRootId, | ||
34 | }; | ||
35 | use fst::{self, Streamer}; | ||
36 | use hir::db::DefDatabase; | ||
37 | use rayon::prelude::*; | ||
38 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
39 | use syntax::{ | ||
40 | ast::{self, NameOwner}, | ||
41 | match_ast, AstNode, Parse, SmolStr, SourceFile, | ||
42 | SyntaxKind::{self, *}, | ||
43 | SyntaxNode, SyntaxNodePtr, TextRange, WalkEvent, | ||
44 | }; | ||
45 | |||
46 | use crate::RootDatabase; | ||
47 | |||
48 | #[derive(Debug)] | ||
49 | pub struct Query { | ||
50 | query: String, | ||
51 | lowercased: String, | ||
52 | only_types: bool, | ||
53 | libs: bool, | ||
54 | exact: bool, | ||
55 | limit: usize, | ||
56 | } | ||
57 | |||
58 | impl Query { | ||
59 | pub fn new(query: String) -> Query { | ||
60 | let lowercased = query.to_lowercase(); | ||
61 | Query { | ||
62 | query, | ||
63 | lowercased, | ||
64 | only_types: false, | ||
65 | libs: false, | ||
66 | exact: false, | ||
67 | limit: usize::max_value(), | ||
68 | } | ||
69 | } | ||
70 | |||
71 | pub fn only_types(&mut self) { | ||
72 | self.only_types = true; | ||
73 | } | ||
74 | |||
75 | pub fn libs(&mut self) { | ||
76 | self.libs = true; | ||
77 | } | ||
78 | |||
79 | pub fn exact(&mut self) { | ||
80 | self.exact = true; | ||
81 | } | ||
82 | |||
83 | pub fn limit(&mut self, limit: usize) { | ||
84 | self.limit = limit | ||
85 | } | ||
86 | } | ||
87 | |||
88 | #[salsa::query_group(SymbolsDatabaseStorage)] | ||
89 | pub trait SymbolsDatabase: hir::db::HirDatabase + SourceDatabaseExt { | ||
90 | fn file_symbols(&self, file_id: FileId) -> Arc<SymbolIndex>; | ||
91 | fn library_symbols(&self) -> Arc<FxHashMap<SourceRootId, SymbolIndex>>; | ||
92 | /// The set of "local" (that is, from the current workspace) roots. | ||
93 | /// Files in local roots are assumed to change frequently. | ||
94 | #[salsa::input] | ||
95 | fn local_roots(&self) -> Arc<FxHashSet<SourceRootId>>; | ||
96 | /// The set of roots for crates.io libraries. | ||
97 | /// Files in libraries are assumed to never change. | ||
98 | #[salsa::input] | ||
99 | fn library_roots(&self) -> Arc<FxHashSet<SourceRootId>>; | ||
100 | } | ||
101 | |||
102 | fn library_symbols(db: &dyn SymbolsDatabase) -> Arc<FxHashMap<SourceRootId, SymbolIndex>> { | ||
103 | let _p = profile::span("library_symbols"); | ||
104 | |||
105 | let roots = db.library_roots(); | ||
106 | let res = roots | ||
107 | .iter() | ||
108 | .map(|&root_id| { | ||
109 | let root = db.source_root(root_id); | ||
110 | let files = root | ||
111 | .iter() | ||
112 | .map(|it| (it, SourceDatabaseExt::file_text(db, it))) | ||
113 | .collect::<Vec<_>>(); | ||
114 | let symbol_index = SymbolIndex::for_files( | ||
115 | files.into_par_iter().map(|(file, text)| (file, SourceFile::parse(&text))), | ||
116 | ); | ||
117 | (root_id, symbol_index) | ||
118 | }) | ||
119 | .collect(); | ||
120 | Arc::new(res) | ||
121 | } | ||
122 | |||
123 | fn file_symbols(db: &dyn SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> { | ||
124 | db.check_canceled(); | ||
125 | let parse = db.parse(file_id); | ||
126 | |||
127 | let symbols = source_file_to_file_symbols(&parse.tree(), file_id); | ||
128 | |||
129 | // FIXME: add macros here | ||
130 | |||
131 | Arc::new(SymbolIndex::new(symbols)) | ||
132 | } | ||
133 | |||
134 | /// Need to wrap Snapshot to provide `Clone` impl for `map_with` | ||
135 | struct Snap<DB>(DB); | ||
136 | impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> { | ||
137 | fn clone(&self) -> Snap<salsa::Snapshot<DB>> { | ||
138 | Snap(self.0.snapshot()) | ||
139 | } | ||
140 | } | ||
141 | |||
142 | // Feature: Workspace Symbol | ||
143 | // | ||
144 | // Uses fuzzy-search to find types, modules and functions by name across your | ||
145 | // project and dependencies. This is **the** most useful feature, which improves code | ||
146 | // navigation tremendously. It mostly works on top of the built-in LSP | ||
147 | // functionality, however `#` and `*` symbols can be used to narrow down the | ||
148 | // search. Specifically, | ||
149 | // | ||
150 | // - `Foo` searches for `Foo` type in the current workspace | ||
151 | // - `foo#` searches for `foo` function in the current workspace | ||
152 | // - `Foo*` searches for `Foo` type among dependencies, including `stdlib` | ||
153 | // - `foo#*` searches for `foo` function among dependencies | ||
154 | // | ||
155 | // That is, `#` switches from "types" to all symbols, `*` switches from the current | ||
156 | // workspace to dependencies. | ||
157 | // | ||
158 | // |=== | ||
159 | // | Editor | Shortcut | ||
160 | // | ||
161 | // | VS Code | kbd:[Ctrl+T] | ||
162 | // |=== | ||
163 | pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> { | ||
164 | let _p = profile::span("world_symbols").detail(|| query.query.clone()); | ||
165 | |||
166 | let tmp1; | ||
167 | let tmp2; | ||
168 | let buf: Vec<&SymbolIndex> = if query.libs { | ||
169 | tmp1 = db.library_symbols(); | ||
170 | tmp1.values().collect() | ||
171 | } else { | ||
172 | let mut files = Vec::new(); | ||
173 | for &root in db.local_roots().iter() { | ||
174 | let sr = db.source_root(root); | ||
175 | files.extend(sr.iter()) | ||
176 | } | ||
177 | |||
178 | let snap = Snap(db.snapshot()); | ||
179 | tmp2 = files | ||
180 | .par_iter() | ||
181 | .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) | ||
182 | .collect::<Vec<_>>(); | ||
183 | tmp2.iter().map(|it| &**it).collect() | ||
184 | }; | ||
185 | query.search(&buf) | ||
186 | } | ||
187 | |||
188 | pub fn crate_symbols(db: &RootDatabase, krate: CrateId, query: Query) -> Vec<FileSymbol> { | ||
189 | // FIXME(#4842): This now depends on CrateDefMap, why not build the entire symbol index from | ||
190 | // that instead? | ||
191 | |||
192 | let def_map = db.crate_def_map(krate); | ||
193 | let mut files = Vec::new(); | ||
194 | let mut modules = vec![def_map.root]; | ||
195 | while let Some(module) = modules.pop() { | ||
196 | let data = &def_map[module]; | ||
197 | files.extend(data.origin.file_id()); | ||
198 | modules.extend(data.children.values()); | ||
199 | } | ||
200 | |||
201 | let snap = Snap(db.snapshot()); | ||
202 | |||
203 | let buf = files | ||
204 | .par_iter() | ||
205 | .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) | ||
206 | .collect::<Vec<_>>(); | ||
207 | let buf = buf.iter().map(|it| &**it).collect::<Vec<_>>(); | ||
208 | |||
209 | query.search(&buf) | ||
210 | } | ||
211 | |||
212 | pub fn index_resolve(db: &RootDatabase, name_ref: &ast::NameRef) -> Vec<FileSymbol> { | ||
213 | let name = name_ref.text(); | ||
214 | let mut query = Query::new(name.to_string()); | ||
215 | query.exact(); | ||
216 | query.limit(4); | ||
217 | world_symbols(db, query) | ||
218 | } | ||
219 | |||
220 | #[derive(Default)] | ||
221 | pub struct SymbolIndex { | ||
222 | symbols: Vec<FileSymbol>, | ||
223 | map: fst::Map<Vec<u8>>, | ||
224 | } | ||
225 | |||
226 | impl fmt::Debug for SymbolIndex { | ||
227 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
228 | f.debug_struct("SymbolIndex").field("n_symbols", &self.symbols.len()).finish() | ||
229 | } | ||
230 | } | ||
231 | |||
232 | impl PartialEq for SymbolIndex { | ||
233 | fn eq(&self, other: &SymbolIndex) -> bool { | ||
234 | self.symbols == other.symbols | ||
235 | } | ||
236 | } | ||
237 | |||
238 | impl Eq for SymbolIndex {} | ||
239 | |||
240 | impl Hash for SymbolIndex { | ||
241 | fn hash<H: Hasher>(&self, hasher: &mut H) { | ||
242 | self.symbols.hash(hasher) | ||
243 | } | ||
244 | } | ||
245 | |||
246 | impl SymbolIndex { | ||
247 | fn new(mut symbols: Vec<FileSymbol>) -> SymbolIndex { | ||
248 | fn cmp(lhs: &FileSymbol, rhs: &FileSymbol) -> Ordering { | ||
249 | let lhs_chars = lhs.name.chars().map(|c| c.to_ascii_lowercase()); | ||
250 | let rhs_chars = rhs.name.chars().map(|c| c.to_ascii_lowercase()); | ||
251 | lhs_chars.cmp(rhs_chars) | ||
252 | } | ||
253 | |||
254 | symbols.par_sort_by(cmp); | ||
255 | |||
256 | let mut builder = fst::MapBuilder::memory(); | ||
257 | |||
258 | let mut last_batch_start = 0; | ||
259 | |||
260 | for idx in 0..symbols.len() { | ||
261 | if let Some(next_symbol) = symbols.get(idx + 1) { | ||
262 | if cmp(&symbols[last_batch_start], next_symbol) == Ordering::Equal { | ||
263 | continue; | ||
264 | } | ||
265 | } | ||
266 | |||
267 | let start = last_batch_start; | ||
268 | let end = idx + 1; | ||
269 | last_batch_start = end; | ||
270 | |||
271 | let key = symbols[start].name.as_str().to_ascii_lowercase(); | ||
272 | let value = SymbolIndex::range_to_map_value(start, end); | ||
273 | |||
274 | builder.insert(key, value).unwrap(); | ||
275 | } | ||
276 | |||
277 | let map = fst::Map::new(builder.into_inner().unwrap()).unwrap(); | ||
278 | SymbolIndex { symbols, map } | ||
279 | } | ||
280 | |||
281 | pub fn len(&self) -> usize { | ||
282 | self.symbols.len() | ||
283 | } | ||
284 | |||
285 | pub fn memory_size(&self) -> usize { | ||
286 | self.map.as_fst().size() + self.symbols.len() * mem::size_of::<FileSymbol>() | ||
287 | } | ||
288 | |||
289 | pub(crate) fn for_files( | ||
290 | files: impl ParallelIterator<Item = (FileId, Parse<ast::SourceFile>)>, | ||
291 | ) -> SymbolIndex { | ||
292 | let symbols = files | ||
293 | .flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id)) | ||
294 | .collect::<Vec<_>>(); | ||
295 | SymbolIndex::new(symbols) | ||
296 | } | ||
297 | |||
298 | fn range_to_map_value(start: usize, end: usize) -> u64 { | ||
299 | debug_assert![start <= (std::u32::MAX as usize)]; | ||
300 | debug_assert![end <= (std::u32::MAX as usize)]; | ||
301 | |||
302 | ((start as u64) << 32) | end as u64 | ||
303 | } | ||
304 | |||
305 | fn map_value_to_range(value: u64) -> (usize, usize) { | ||
306 | let end = value as u32 as usize; | ||
307 | let start = (value >> 32) as usize; | ||
308 | (start, end) | ||
309 | } | ||
310 | } | ||
311 | |||
312 | impl Query { | ||
313 | pub(crate) fn search(self, indices: &[&SymbolIndex]) -> Vec<FileSymbol> { | ||
314 | let mut op = fst::map::OpBuilder::new(); | ||
315 | for file_symbols in indices.iter() { | ||
316 | let automaton = fst::automaton::Subsequence::new(&self.lowercased); | ||
317 | op = op.add(file_symbols.map.search(automaton)) | ||
318 | } | ||
319 | let mut stream = op.union(); | ||
320 | let mut res = Vec::new(); | ||
321 | while let Some((_, indexed_values)) = stream.next() { | ||
322 | for indexed_value in indexed_values { | ||
323 | let symbol_index = &indices[indexed_value.index]; | ||
324 | let (start, end) = SymbolIndex::map_value_to_range(indexed_value.value); | ||
325 | |||
326 | for symbol in &symbol_index.symbols[start..end] { | ||
327 | if self.only_types && !is_type(symbol.kind) { | ||
328 | continue; | ||
329 | } | ||
330 | if self.exact && symbol.name != self.query { | ||
331 | continue; | ||
332 | } | ||
333 | |||
334 | res.push(symbol.clone()); | ||
335 | if res.len() >= self.limit { | ||
336 | return res; | ||
337 | } | ||
338 | } | ||
339 | } | ||
340 | } | ||
341 | res | ||
342 | } | ||
343 | } | ||
344 | |||
345 | fn is_type(kind: SyntaxKind) -> bool { | ||
346 | matches!(kind, STRUCT | ENUM | TRAIT | TYPE_ALIAS) | ||
347 | } | ||
348 | |||
349 | /// The actual data that is stored in the index. It should be as compact as | ||
350 | /// possible. | ||
351 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
352 | pub struct FileSymbol { | ||
353 | pub file_id: FileId, | ||
354 | pub name: SmolStr, | ||
355 | pub kind: SyntaxKind, | ||
356 | pub range: TextRange, | ||
357 | pub ptr: SyntaxNodePtr, | ||
358 | pub name_range: Option<TextRange>, | ||
359 | pub container_name: Option<SmolStr>, | ||
360 | } | ||
361 | |||
362 | fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec<FileSymbol> { | ||
363 | let mut symbols = Vec::new(); | ||
364 | let mut stack = Vec::new(); | ||
365 | |||
366 | for event in source_file.syntax().preorder() { | ||
367 | match event { | ||
368 | WalkEvent::Enter(node) => { | ||
369 | if let Some(mut symbol) = to_file_symbol(&node, file_id) { | ||
370 | symbol.container_name = stack.last().cloned(); | ||
371 | |||
372 | stack.push(symbol.name.clone()); | ||
373 | symbols.push(symbol); | ||
374 | } | ||
375 | } | ||
376 | |||
377 | WalkEvent::Leave(node) => { | ||
378 | if to_symbol(&node).is_some() { | ||
379 | stack.pop(); | ||
380 | } | ||
381 | } | ||
382 | } | ||
383 | } | ||
384 | |||
385 | symbols | ||
386 | } | ||
387 | |||
388 | fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { | ||
389 | fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { | ||
390 | let name = node.name()?; | ||
391 | let name_range = name.syntax().text_range(); | ||
392 | let name = name.text().clone(); | ||
393 | let ptr = SyntaxNodePtr::new(node.syntax()); | ||
394 | |||
395 | Some((name, ptr, name_range)) | ||
396 | } | ||
397 | match_ast! { | ||
398 | match node { | ||
399 | ast::Fn(it) => decl(it), | ||
400 | ast::Struct(it) => decl(it), | ||
401 | ast::Enum(it) => decl(it), | ||
402 | ast::Trait(it) => decl(it), | ||
403 | ast::Module(it) => decl(it), | ||
404 | ast::TypeAlias(it) => decl(it), | ||
405 | ast::Const(it) => decl(it), | ||
406 | ast::Static(it) => decl(it), | ||
407 | ast::MacroCall(it) => { | ||
408 | if it.is_macro_rules().is_some() { | ||
409 | decl(it) | ||
410 | } else { | ||
411 | None | ||
412 | } | ||
413 | }, | ||
414 | _ => None, | ||
415 | } | ||
416 | } | ||
417 | } | ||
418 | |||
419 | fn to_file_symbol(node: &SyntaxNode, file_id: FileId) -> Option<FileSymbol> { | ||
420 | to_symbol(node).map(move |(name, ptr, name_range)| FileSymbol { | ||
421 | name, | ||
422 | kind: node.kind(), | ||
423 | range: node.text_range(), | ||
424 | ptr, | ||
425 | file_id, | ||
426 | name_range: Some(name_range), | ||
427 | container_name: None, | ||
428 | }) | ||
429 | } | ||
diff --git a/crates/ide_db/src/wasm_shims.rs b/crates/ide_db/src/wasm_shims.rs new file mode 100644 index 000000000..7af9f9d9b --- /dev/null +++ b/crates/ide_db/src/wasm_shims.rs | |||
@@ -0,0 +1,19 @@ | |||
1 | //! A version of `std::time::Instant` that doesn't panic in WASM. | ||
2 | |||
3 | #[cfg(not(feature = "wasm"))] | ||
4 | pub use std::time::Instant; | ||
5 | |||
6 | #[cfg(feature = "wasm")] | ||
7 | #[derive(Clone, Copy, Debug)] | ||
8 | pub struct Instant; | ||
9 | |||
10 | #[cfg(feature = "wasm")] | ||
11 | impl Instant { | ||
12 | pub fn now() -> Self { | ||
13 | Self | ||
14 | } | ||
15 | |||
16 | pub fn elapsed(&self) -> std::time::Duration { | ||
17 | std::time::Duration::new(0, 0) | ||
18 | } | ||
19 | } | ||