diff options
Diffstat (limited to 'crates/ra_ide_db')
-rw-r--r-- | crates/ra_ide_db/Cargo.toml | 48 | ||||
-rw-r--r-- | crates/ra_ide_db/src/change.rs | 387 | ||||
-rw-r--r-- | crates/ra_ide_db/src/feature_flags.rs | 71 | ||||
-rw-r--r-- | crates/ra_ide_db/src/lib.rs | 138 | ||||
-rw-r--r-- | crates/ra_ide_db/src/line_index.rs | 284 | ||||
-rw-r--r-- | crates/ra_ide_db/src/line_index_utils.rs | 341 | ||||
-rw-r--r-- | crates/ra_ide_db/src/symbol_index.rs | 372 | ||||
-rw-r--r-- | crates/ra_ide_db/src/wasm_shims.rs | 19 |
8 files changed, 1660 insertions, 0 deletions
diff --git a/crates/ra_ide_db/Cargo.toml b/crates/ra_ide_db/Cargo.toml new file mode 100644 index 000000000..1b7905eb3 --- /dev/null +++ b/crates/ra_ide_db/Cargo.toml | |||
@@ -0,0 +1,48 @@ | |||
1 | [package] | ||
2 | edition = "2018" | ||
3 | name = "ra_ide_db" | ||
4 | version = "0.1.0" | ||
5 | authors = ["rust-analyzer developers"] | ||
6 | |||
7 | [lib] | ||
8 | doctest = false | ||
9 | |||
10 | [features] | ||
11 | wasm = [] | ||
12 | |||
13 | [dependencies] | ||
14 | either = "1.5" | ||
15 | format-buf = "1.0.0" | ||
16 | indexmap = "1.3.0" | ||
17 | itertools = "0.8.0" | ||
18 | join_to_string = "0.1.3" | ||
19 | log = "0.4.5" | ||
20 | rayon = "1.0.2" | ||
21 | fst = { version = "0.3.1", default-features = false } | ||
22 | rustc-hash = "1.0" | ||
23 | unicase = "2.2.0" | ||
24 | superslice = "1.0.0" | ||
25 | rand = { version = "0.7.0", features = ["small_rng"] } | ||
26 | once_cell = "1.2.0" | ||
27 | |||
28 | ra_syntax = { path = "../ra_syntax" } | ||
29 | ra_text_edit = { path = "../ra_text_edit" } | ||
30 | ra_db = { path = "../ra_db" } | ||
31 | ra_cfg = { path = "../ra_cfg" } | ||
32 | ra_fmt = { path = "../ra_fmt" } | ||
33 | ra_prof = { path = "../ra_prof" } | ||
34 | test_utils = { path = "../test_utils" } | ||
35 | ra_assists = { path = "../ra_assists" } | ||
36 | |||
37 | # ra_ide should depend only on the top-level `hir` package. if you need | ||
38 | # something from some `hir_xxx` subpackage, reexport the API via `hir`. | ||
39 | hir = { path = "../ra_hir", package = "ra_hir" } | ||
40 | |||
41 | [dev-dependencies] | ||
42 | insta = "0.13.0" | ||
43 | |||
44 | [dev-dependencies.proptest] | ||
45 | version = "0.9.0" | ||
46 | # Disable `fork` feature to allow compiling on webassembly | ||
47 | default-features = false | ||
48 | features = ["std", "bit-set", "break-dead-code"] | ||
diff --git a/crates/ra_ide_db/src/change.rs b/crates/ra_ide_db/src/change.rs new file mode 100644 index 000000000..4668784d3 --- /dev/null +++ b/crates/ra_ide_db/src/change.rs | |||
@@ -0,0 +1,387 @@ | |||
1 | //! Defines a unit of change that can applied to a state of IDE to get the next | ||
2 | //! state. Changes are transactional. | ||
3 | |||
4 | use std::{fmt, sync::Arc, time}; | ||
5 | |||
6 | use ra_db::{ | ||
7 | salsa::{Database, Durability, SweepStrategy}, | ||
8 | CrateGraph, CrateId, FileId, RelativePathBuf, SourceDatabase, SourceDatabaseExt, SourceRoot, | ||
9 | SourceRootId, | ||
10 | }; | ||
11 | use ra_prof::{memory_usage, profile, Bytes}; | ||
12 | use ra_syntax::SourceFile; | ||
13 | #[cfg(not(feature = "wasm"))] | ||
14 | use rayon::prelude::*; | ||
15 | use rustc_hash::FxHashMap; | ||
16 | |||
17 | use crate::{ | ||
18 | symbol_index::{SymbolIndex, SymbolsDatabase}, | ||
19 | DebugData, RootDatabase, | ||
20 | }; | ||
21 | |||
22 | #[derive(Default)] | ||
23 | pub struct AnalysisChange { | ||
24 | new_roots: Vec<(SourceRootId, bool)>, | ||
25 | roots_changed: FxHashMap<SourceRootId, RootChange>, | ||
26 | files_changed: Vec<(FileId, Arc<String>)>, | ||
27 | libraries_added: Vec<LibraryData>, | ||
28 | crate_graph: Option<CrateGraph>, | ||
29 | debug_data: DebugData, | ||
30 | } | ||
31 | |||
32 | impl fmt::Debug for AnalysisChange { | ||
33 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
34 | let mut d = fmt.debug_struct("AnalysisChange"); | ||
35 | if !self.new_roots.is_empty() { | ||
36 | d.field("new_roots", &self.new_roots); | ||
37 | } | ||
38 | if !self.roots_changed.is_empty() { | ||
39 | d.field("roots_changed", &self.roots_changed); | ||
40 | } | ||
41 | if !self.files_changed.is_empty() { | ||
42 | d.field("files_changed", &self.files_changed.len()); | ||
43 | } | ||
44 | if !self.libraries_added.is_empty() { | ||
45 | d.field("libraries_added", &self.libraries_added.len()); | ||
46 | } | ||
47 | if !self.crate_graph.is_none() { | ||
48 | d.field("crate_graph", &self.crate_graph); | ||
49 | } | ||
50 | d.finish() | ||
51 | } | ||
52 | } | ||
53 | |||
54 | impl AnalysisChange { | ||
55 | pub fn new() -> AnalysisChange { | ||
56 | AnalysisChange::default() | ||
57 | } | ||
58 | |||
59 | pub fn add_root(&mut self, root_id: SourceRootId, is_local: bool) { | ||
60 | self.new_roots.push((root_id, is_local)); | ||
61 | } | ||
62 | |||
63 | pub fn add_file( | ||
64 | &mut self, | ||
65 | root_id: SourceRootId, | ||
66 | file_id: FileId, | ||
67 | path: RelativePathBuf, | ||
68 | text: Arc<String>, | ||
69 | ) { | ||
70 | let file = AddFile { file_id, path, text }; | ||
71 | self.roots_changed.entry(root_id).or_default().added.push(file); | ||
72 | } | ||
73 | |||
74 | pub fn change_file(&mut self, file_id: FileId, new_text: Arc<String>) { | ||
75 | self.files_changed.push((file_id, new_text)) | ||
76 | } | ||
77 | |||
78 | pub fn remove_file(&mut self, root_id: SourceRootId, file_id: FileId, path: RelativePathBuf) { | ||
79 | let file = RemoveFile { file_id, path }; | ||
80 | self.roots_changed.entry(root_id).or_default().removed.push(file); | ||
81 | } | ||
82 | |||
83 | pub fn add_library(&mut self, data: LibraryData) { | ||
84 | self.libraries_added.push(data) | ||
85 | } | ||
86 | |||
87 | pub fn set_crate_graph(&mut self, graph: CrateGraph) { | ||
88 | self.crate_graph = Some(graph); | ||
89 | } | ||
90 | |||
91 | pub fn set_debug_crate_name(&mut self, crate_id: CrateId, name: String) { | ||
92 | self.debug_data.crate_names.insert(crate_id, name); | ||
93 | } | ||
94 | |||
95 | pub fn set_debug_root_path(&mut self, source_root_id: SourceRootId, path: String) { | ||
96 | self.debug_data.root_paths.insert(source_root_id, path); | ||
97 | } | ||
98 | } | ||
99 | |||
100 | #[derive(Debug)] | ||
101 | struct AddFile { | ||
102 | file_id: FileId, | ||
103 | path: RelativePathBuf, | ||
104 | text: Arc<String>, | ||
105 | } | ||
106 | |||
107 | #[derive(Debug)] | ||
108 | struct RemoveFile { | ||
109 | file_id: FileId, | ||
110 | path: RelativePathBuf, | ||
111 | } | ||
112 | |||
113 | #[derive(Default)] | ||
114 | struct RootChange { | ||
115 | added: Vec<AddFile>, | ||
116 | removed: Vec<RemoveFile>, | ||
117 | } | ||
118 | |||
119 | impl fmt::Debug for RootChange { | ||
120 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | ||
121 | fmt.debug_struct("AnalysisChange") | ||
122 | .field("added", &self.added.len()) | ||
123 | .field("removed", &self.removed.len()) | ||
124 | .finish() | ||
125 | } | ||
126 | } | ||
127 | |||
128 | pub struct LibraryData { | ||
129 | root_id: SourceRootId, | ||
130 | root_change: RootChange, | ||
131 | symbol_index: SymbolIndex, | ||
132 | } | ||
133 | |||
134 | impl fmt::Debug for LibraryData { | ||
135 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
136 | f.debug_struct("LibraryData") | ||
137 | .field("root_id", &self.root_id) | ||
138 | .field("root_change", &self.root_change) | ||
139 | .field("n_symbols", &self.symbol_index.len()) | ||
140 | .finish() | ||
141 | } | ||
142 | } | ||
143 | |||
144 | impl LibraryData { | ||
145 | pub fn prepare( | ||
146 | root_id: SourceRootId, | ||
147 | files: Vec<(FileId, RelativePathBuf, Arc<String>)>, | ||
148 | ) -> LibraryData { | ||
149 | let _p = profile("LibraryData::prepare"); | ||
150 | |||
151 | #[cfg(not(feature = "wasm"))] | ||
152 | let iter = files.par_iter(); | ||
153 | #[cfg(feature = "wasm")] | ||
154 | let iter = files.iter(); | ||
155 | |||
156 | let symbol_index = SymbolIndex::for_files(iter.map(|(file_id, _, text)| { | ||
157 | let parse = SourceFile::parse(text); | ||
158 | (*file_id, parse) | ||
159 | })); | ||
160 | let mut root_change = RootChange::default(); | ||
161 | root_change.added = files | ||
162 | .into_iter() | ||
163 | .map(|(file_id, path, text)| AddFile { file_id, path, text }) | ||
164 | .collect(); | ||
165 | LibraryData { root_id, root_change, symbol_index } | ||
166 | } | ||
167 | } | ||
168 | |||
169 | const GC_COOLDOWN: time::Duration = time::Duration::from_millis(100); | ||
170 | |||
171 | impl RootDatabase { | ||
172 | pub fn request_cancellation(&mut self) { | ||
173 | let _p = profile("RootDatabase::request_cancellation"); | ||
174 | self.salsa_runtime_mut().synthetic_write(Durability::LOW); | ||
175 | } | ||
176 | |||
177 | pub fn apply_change(&mut self, change: AnalysisChange) { | ||
178 | let _p = profile("RootDatabase::apply_change"); | ||
179 | self.request_cancellation(); | ||
180 | log::info!("apply_change {:?}", change); | ||
181 | if !change.new_roots.is_empty() { | ||
182 | let mut local_roots = Vec::clone(&self.local_roots()); | ||
183 | for (root_id, is_local) in change.new_roots { | ||
184 | let root = | ||
185 | if is_local { SourceRoot::new_local() } else { SourceRoot::new_library() }; | ||
186 | let durability = durability(&root); | ||
187 | self.set_source_root_with_durability(root_id, Arc::new(root), durability); | ||
188 | if is_local { | ||
189 | local_roots.push(root_id); | ||
190 | } | ||
191 | } | ||
192 | self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); | ||
193 | } | ||
194 | |||
195 | for (root_id, root_change) in change.roots_changed { | ||
196 | self.apply_root_change(root_id, root_change); | ||
197 | } | ||
198 | for (file_id, text) in change.files_changed { | ||
199 | let source_root_id = self.file_source_root(file_id); | ||
200 | let source_root = self.source_root(source_root_id); | ||
201 | let durability = durability(&source_root); | ||
202 | self.set_file_text_with_durability(file_id, text, durability) | ||
203 | } | ||
204 | if !change.libraries_added.is_empty() { | ||
205 | let mut libraries = Vec::clone(&self.library_roots()); | ||
206 | for library in change.libraries_added { | ||
207 | libraries.push(library.root_id); | ||
208 | self.set_source_root_with_durability( | ||
209 | library.root_id, | ||
210 | Arc::new(SourceRoot::new_library()), | ||
211 | Durability::HIGH, | ||
212 | ); | ||
213 | self.set_library_symbols_with_durability( | ||
214 | library.root_id, | ||
215 | Arc::new(library.symbol_index), | ||
216 | Durability::HIGH, | ||
217 | ); | ||
218 | self.apply_root_change(library.root_id, library.root_change); | ||
219 | } | ||
220 | self.set_library_roots_with_durability(Arc::new(libraries), Durability::HIGH); | ||
221 | } | ||
222 | if let Some(crate_graph) = change.crate_graph { | ||
223 | self.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH) | ||
224 | } | ||
225 | |||
226 | Arc::make_mut(&mut self.debug_data).merge(change.debug_data) | ||
227 | } | ||
228 | |||
229 | fn apply_root_change(&mut self, root_id: SourceRootId, root_change: RootChange) { | ||
230 | let mut source_root = SourceRoot::clone(&self.source_root(root_id)); | ||
231 | let durability = durability(&source_root); | ||
232 | for add_file in root_change.added { | ||
233 | self.set_file_text_with_durability(add_file.file_id, add_file.text, durability); | ||
234 | self.set_file_relative_path_with_durability( | ||
235 | add_file.file_id, | ||
236 | add_file.path.clone(), | ||
237 | durability, | ||
238 | ); | ||
239 | self.set_file_source_root_with_durability(add_file.file_id, root_id, durability); | ||
240 | source_root.insert_file(add_file.path, add_file.file_id); | ||
241 | } | ||
242 | for remove_file in root_change.removed { | ||
243 | self.set_file_text_with_durability(remove_file.file_id, Default::default(), durability); | ||
244 | source_root.remove_file(&remove_file.path); | ||
245 | } | ||
246 | self.set_source_root_with_durability(root_id, Arc::new(source_root), durability); | ||
247 | } | ||
248 | |||
249 | pub fn maybe_collect_garbage(&mut self) { | ||
250 | if cfg!(feature = "wasm") { | ||
251 | return; | ||
252 | } | ||
253 | |||
254 | if self.last_gc_check.elapsed() > GC_COOLDOWN { | ||
255 | self.last_gc_check = crate::wasm_shims::Instant::now(); | ||
256 | } | ||
257 | } | ||
258 | |||
259 | pub fn collect_garbage(&mut self) { | ||
260 | if cfg!(feature = "wasm") { | ||
261 | return; | ||
262 | } | ||
263 | |||
264 | let _p = profile("RootDatabase::collect_garbage"); | ||
265 | self.last_gc = crate::wasm_shims::Instant::now(); | ||
266 | |||
267 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); | ||
268 | |||
269 | self.query(ra_db::ParseQuery).sweep(sweep); | ||
270 | self.query(hir::db::ParseMacroQuery).sweep(sweep); | ||
271 | |||
272 | // Macros do take significant space, but less then the syntax trees | ||
273 | // self.query(hir::db::MacroDefQuery).sweep(sweep); | ||
274 | // self.query(hir::db::MacroArgQuery).sweep(sweep); | ||
275 | // self.query(hir::db::MacroExpandQuery).sweep(sweep); | ||
276 | |||
277 | self.query(hir::db::AstIdMapQuery).sweep(sweep); | ||
278 | |||
279 | self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep); | ||
280 | |||
281 | self.query(hir::db::ExprScopesQuery).sweep(sweep); | ||
282 | self.query(hir::db::DoInferQuery).sweep(sweep); | ||
283 | self.query(hir::db::BodyQuery).sweep(sweep); | ||
284 | } | ||
285 | |||
286 | pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { | ||
287 | let mut acc: Vec<(String, Bytes)> = vec![]; | ||
288 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); | ||
289 | macro_rules! sweep_each_query { | ||
290 | ($($q:path)*) => {$( | ||
291 | let before = memory_usage().allocated; | ||
292 | self.query($q).sweep(sweep); | ||
293 | let after = memory_usage().allocated; | ||
294 | let q: $q = Default::default(); | ||
295 | let name = format!("{:?}", q); | ||
296 | acc.push((name, before - after)); | ||
297 | |||
298 | let before = memory_usage().allocated; | ||
299 | self.query($q).sweep(sweep.discard_everything()); | ||
300 | let after = memory_usage().allocated; | ||
301 | let q: $q = Default::default(); | ||
302 | let name = format!("{:?} (deps)", q); | ||
303 | acc.push((name, before - after)); | ||
304 | )*} | ||
305 | } | ||
306 | sweep_each_query![ | ||
307 | // SourceDatabase | ||
308 | ra_db::ParseQuery | ||
309 | ra_db::SourceRootCratesQuery | ||
310 | |||
311 | // AstDatabase | ||
312 | hir::db::AstIdMapQuery | ||
313 | hir::db::InternMacroQuery | ||
314 | hir::db::MacroArgQuery | ||
315 | hir::db::MacroDefQuery | ||
316 | hir::db::ParseMacroQuery | ||
317 | hir::db::MacroExpandQuery | ||
318 | |||
319 | // DefDatabase | ||
320 | hir::db::RawItemsQuery | ||
321 | hir::db::ComputeCrateDefMapQuery | ||
322 | hir::db::StructDataQuery | ||
323 | hir::db::UnionDataQuery | ||
324 | hir::db::EnumDataQuery | ||
325 | hir::db::ImplDataQuery | ||
326 | hir::db::TraitDataQuery | ||
327 | hir::db::TypeAliasDataQuery | ||
328 | hir::db::FunctionDataQuery | ||
329 | hir::db::ConstDataQuery | ||
330 | hir::db::StaticDataQuery | ||
331 | hir::db::BodyWithSourceMapQuery | ||
332 | hir::db::BodyQuery | ||
333 | hir::db::ExprScopesQuery | ||
334 | hir::db::GenericParamsQuery | ||
335 | hir::db::AttrsQuery | ||
336 | hir::db::ModuleLangItemsQuery | ||
337 | hir::db::CrateLangItemsQuery | ||
338 | hir::db::LangItemQuery | ||
339 | hir::db::DocumentationQuery | ||
340 | |||
341 | // InternDatabase | ||
342 | hir::db::InternFunctionQuery | ||
343 | hir::db::InternStructQuery | ||
344 | hir::db::InternUnionQuery | ||
345 | hir::db::InternEnumQuery | ||
346 | hir::db::InternConstQuery | ||
347 | hir::db::InternStaticQuery | ||
348 | hir::db::InternTraitQuery | ||
349 | hir::db::InternTypeAliasQuery | ||
350 | hir::db::InternImplQuery | ||
351 | |||
352 | // HirDatabase | ||
353 | hir::db::DoInferQuery | ||
354 | hir::db::TyQuery | ||
355 | hir::db::ValueTyQuery | ||
356 | hir::db::ImplSelfTyQuery | ||
357 | hir::db::ImplTraitQuery | ||
358 | hir::db::FieldTypesQuery | ||
359 | hir::db::CallableItemSignatureQuery | ||
360 | hir::db::GenericPredicatesForParamQuery | ||
361 | hir::db::GenericPredicatesQuery | ||
362 | hir::db::GenericDefaultsQuery | ||
363 | hir::db::ImplsInCrateQuery | ||
364 | hir::db::ImplsForTraitQuery | ||
365 | hir::db::TraitSolverQuery | ||
366 | hir::db::InternTypeCtorQuery | ||
367 | hir::db::InternChalkImplQuery | ||
368 | hir::db::InternAssocTyValueQuery | ||
369 | hir::db::AssociatedTyDataQuery | ||
370 | hir::db::AssociatedTyValueQuery | ||
371 | hir::db::TraitSolveQuery | ||
372 | hir::db::TraitDatumQuery | ||
373 | hir::db::StructDatumQuery | ||
374 | hir::db::ImplDatumQuery | ||
375 | ]; | ||
376 | acc.sort_by_key(|it| std::cmp::Reverse(it.1)); | ||
377 | acc | ||
378 | } | ||
379 | } | ||
380 | |||
381 | fn durability(source_root: &SourceRoot) -> Durability { | ||
382 | if source_root.is_library { | ||
383 | Durability::HIGH | ||
384 | } else { | ||
385 | Durability::LOW | ||
386 | } | ||
387 | } | ||
diff --git a/crates/ra_ide_db/src/feature_flags.rs b/crates/ra_ide_db/src/feature_flags.rs new file mode 100644 index 000000000..1b3cabf4d --- /dev/null +++ b/crates/ra_ide_db/src/feature_flags.rs | |||
@@ -0,0 +1,71 @@ | |||
1 | //! See docs for `FeatureFlags`. | ||
2 | |||
3 | use rustc_hash::FxHashMap; | ||
4 | |||
5 | /// Feature flags hold fine-grained toggles for all *user-visible* features of | ||
6 | /// rust-analyzer. | ||
7 | /// | ||
8 | /// The exists such that users are able to disable any annoying feature (and, | ||
9 | /// with many users and many features, some features are bound to be annoying | ||
10 | /// for some users) | ||
11 | /// | ||
12 | /// Note that we purposefully use run-time checked strings, and not something | ||
13 | /// checked at compile time, to keep things simple and flexible. | ||
14 | /// | ||
15 | /// Also note that, at the moment, `FeatureFlags` also store features for | ||
16 | /// `ra_lsp_server`. This should be benign layering violation. | ||
17 | #[derive(Debug)] | ||
18 | pub struct FeatureFlags { | ||
19 | flags: FxHashMap<String, bool>, | ||
20 | } | ||
21 | |||
22 | impl FeatureFlags { | ||
23 | fn new(flags: &[(&str, bool)]) -> FeatureFlags { | ||
24 | let flags = flags | ||
25 | .iter() | ||
26 | .map(|&(name, value)| { | ||
27 | check_flag_name(name); | ||
28 | (name.to_string(), value) | ||
29 | }) | ||
30 | .collect(); | ||
31 | FeatureFlags { flags } | ||
32 | } | ||
33 | |||
34 | pub fn set(&mut self, flag: &str, value: bool) -> Result<(), ()> { | ||
35 | match self.flags.get_mut(flag) { | ||
36 | None => Err(()), | ||
37 | Some(slot) => { | ||
38 | *slot = value; | ||
39 | Ok(()) | ||
40 | } | ||
41 | } | ||
42 | } | ||
43 | |||
44 | pub fn get(&self, flag: &str) -> bool { | ||
45 | match self.flags.get(flag) { | ||
46 | None => panic!("unknown flag: {:?}", flag), | ||
47 | Some(value) => *value, | ||
48 | } | ||
49 | } | ||
50 | } | ||
51 | |||
52 | impl Default for FeatureFlags { | ||
53 | fn default() -> FeatureFlags { | ||
54 | FeatureFlags::new(&[ | ||
55 | ("lsp.diagnostics", true), | ||
56 | ("completion.insertion.add-call-parenthesis", true), | ||
57 | ("completion.enable-postfix", true), | ||
58 | ("notifications.workspace-loaded", true), | ||
59 | ("notifications.cargo-toml-not-found", true), | ||
60 | ]) | ||
61 | } | ||
62 | } | ||
63 | |||
64 | fn check_flag_name(flag: &str) { | ||
65 | for c in flag.bytes() { | ||
66 | match c { | ||
67 | b'a'..=b'z' | b'-' | b'.' => (), | ||
68 | _ => panic!("flag name does not match conventions: {:?}", flag), | ||
69 | } | ||
70 | } | ||
71 | } | ||
diff --git a/crates/ra_ide_db/src/lib.rs b/crates/ra_ide_db/src/lib.rs new file mode 100644 index 000000000..e922d1e5f --- /dev/null +++ b/crates/ra_ide_db/src/lib.rs | |||
@@ -0,0 +1,138 @@ | |||
1 | //! This crate defines the core datastructure representing IDE state -- `RootDatabase`. | ||
2 | //! | ||
3 | //! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search. | ||
4 | |||
5 | pub mod line_index; | ||
6 | pub mod line_index_utils; | ||
7 | pub mod feature_flags; | ||
8 | pub mod symbol_index; | ||
9 | pub mod change; | ||
10 | mod wasm_shims; | ||
11 | |||
12 | use std::sync::Arc; | ||
13 | |||
14 | use ra_db::{ | ||
15 | salsa::{self, Database, Durability}, | ||
16 | Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath, | ||
17 | SourceDatabase, SourceRootId, | ||
18 | }; | ||
19 | use rustc_hash::FxHashMap; | ||
20 | |||
21 | use crate::{feature_flags::FeatureFlags, line_index::LineIndex, symbol_index::SymbolsDatabase}; | ||
22 | |||
23 | #[salsa::database( | ||
24 | ra_db::SourceDatabaseStorage, | ||
25 | ra_db::SourceDatabaseExtStorage, | ||
26 | LineIndexDatabaseStorage, | ||
27 | symbol_index::SymbolsDatabaseStorage, | ||
28 | hir::db::InternDatabaseStorage, | ||
29 | hir::db::AstDatabaseStorage, | ||
30 | hir::db::DefDatabaseStorage, | ||
31 | hir::db::HirDatabaseStorage | ||
32 | )] | ||
33 | #[derive(Debug)] | ||
34 | pub struct RootDatabase { | ||
35 | runtime: salsa::Runtime<RootDatabase>, | ||
36 | pub feature_flags: Arc<FeatureFlags>, | ||
37 | pub(crate) debug_data: Arc<DebugData>, | ||
38 | pub last_gc: crate::wasm_shims::Instant, | ||
39 | pub last_gc_check: crate::wasm_shims::Instant, | ||
40 | } | ||
41 | |||
42 | impl FileLoader for RootDatabase { | ||
43 | fn file_text(&self, file_id: FileId) -> Arc<String> { | ||
44 | FileLoaderDelegate(self).file_text(file_id) | ||
45 | } | ||
46 | fn resolve_relative_path( | ||
47 | &self, | ||
48 | anchor: FileId, | ||
49 | relative_path: &RelativePath, | ||
50 | ) -> Option<FileId> { | ||
51 | FileLoaderDelegate(self).resolve_relative_path(anchor, relative_path) | ||
52 | } | ||
53 | fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> { | ||
54 | FileLoaderDelegate(self).relevant_crates(file_id) | ||
55 | } | ||
56 | } | ||
57 | |||
58 | impl salsa::Database for RootDatabase { | ||
59 | fn salsa_runtime(&self) -> &salsa::Runtime<RootDatabase> { | ||
60 | &self.runtime | ||
61 | } | ||
62 | fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime<Self> { | ||
63 | &mut self.runtime | ||
64 | } | ||
65 | fn on_propagated_panic(&self) -> ! { | ||
66 | Canceled::throw() | ||
67 | } | ||
68 | fn salsa_event(&self, event: impl Fn() -> salsa::Event<RootDatabase>) { | ||
69 | match event().kind { | ||
70 | salsa::EventKind::DidValidateMemoizedValue { .. } | ||
71 | | salsa::EventKind::WillExecute { .. } => { | ||
72 | self.check_canceled(); | ||
73 | } | ||
74 | _ => (), | ||
75 | } | ||
76 | } | ||
77 | } | ||
78 | |||
79 | impl Default for RootDatabase { | ||
80 | fn default() -> RootDatabase { | ||
81 | RootDatabase::new(None, FeatureFlags::default()) | ||
82 | } | ||
83 | } | ||
84 | |||
85 | impl RootDatabase { | ||
86 | pub fn new(lru_capacity: Option<usize>, feature_flags: FeatureFlags) -> RootDatabase { | ||
87 | let mut db = RootDatabase { | ||
88 | runtime: salsa::Runtime::default(), | ||
89 | last_gc: crate::wasm_shims::Instant::now(), | ||
90 | last_gc_check: crate::wasm_shims::Instant::now(), | ||
91 | feature_flags: Arc::new(feature_flags), | ||
92 | debug_data: Default::default(), | ||
93 | }; | ||
94 | db.set_crate_graph_with_durability(Default::default(), Durability::HIGH); | ||
95 | db.set_local_roots_with_durability(Default::default(), Durability::HIGH); | ||
96 | db.set_library_roots_with_durability(Default::default(), Durability::HIGH); | ||
97 | let lru_capacity = lru_capacity.unwrap_or(ra_db::DEFAULT_LRU_CAP); | ||
98 | db.query_mut(ra_db::ParseQuery).set_lru_capacity(lru_capacity); | ||
99 | db.query_mut(hir::db::ParseMacroQuery).set_lru_capacity(lru_capacity); | ||
100 | db.query_mut(hir::db::MacroExpandQuery).set_lru_capacity(lru_capacity); | ||
101 | db | ||
102 | } | ||
103 | } | ||
104 | |||
105 | impl salsa::ParallelDatabase for RootDatabase { | ||
106 | fn snapshot(&self) -> salsa::Snapshot<RootDatabase> { | ||
107 | salsa::Snapshot::new(RootDatabase { | ||
108 | runtime: self.runtime.snapshot(self), | ||
109 | last_gc: self.last_gc, | ||
110 | last_gc_check: self.last_gc_check, | ||
111 | feature_flags: Arc::clone(&self.feature_flags), | ||
112 | debug_data: Arc::clone(&self.debug_data), | ||
113 | }) | ||
114 | } | ||
115 | } | ||
116 | |||
117 | #[salsa::query_group(LineIndexDatabaseStorage)] | ||
118 | pub trait LineIndexDatabase: ra_db::SourceDatabase + CheckCanceled { | ||
119 | fn line_index(&self, file_id: FileId) -> Arc<LineIndex>; | ||
120 | } | ||
121 | |||
122 | fn line_index(db: &impl LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> { | ||
123 | let text = db.file_text(file_id); | ||
124 | Arc::new(LineIndex::new(&*text)) | ||
125 | } | ||
126 | |||
127 | #[derive(Debug, Default, Clone)] | ||
128 | pub(crate) struct DebugData { | ||
129 | pub(crate) root_paths: FxHashMap<SourceRootId, String>, | ||
130 | pub(crate) crate_names: FxHashMap<CrateId, String>, | ||
131 | } | ||
132 | |||
133 | impl DebugData { | ||
134 | pub(crate) fn merge(&mut self, other: DebugData) { | ||
135 | self.root_paths.extend(other.root_paths.into_iter()); | ||
136 | self.crate_names.extend(other.crate_names.into_iter()); | ||
137 | } | ||
138 | } | ||
diff --git a/crates/ra_ide_db/src/line_index.rs b/crates/ra_ide_db/src/line_index.rs new file mode 100644 index 000000000..452c87ac5 --- /dev/null +++ b/crates/ra_ide_db/src/line_index.rs | |||
@@ -0,0 +1,284 @@ | |||
1 | //! `LineIndex` maps flat `TextUnit` offsets into `(Line, Column)` | ||
2 | //! representation. | ||
3 | |||
4 | use ra_syntax::TextUnit; | ||
5 | use rustc_hash::FxHashMap; | ||
6 | use superslice::Ext; | ||
7 | |||
8 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
9 | pub struct LineIndex { | ||
10 | pub(crate) newlines: Vec<TextUnit>, | ||
11 | pub(crate) utf16_lines: FxHashMap<u32, Vec<Utf16Char>>, | ||
12 | } | ||
13 | |||
14 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] | ||
15 | pub struct LineCol { | ||
16 | /// Zero-based | ||
17 | pub line: u32, | ||
18 | /// Zero-based | ||
19 | pub col_utf16: u32, | ||
20 | } | ||
21 | |||
22 | #[derive(Clone, Debug, Hash, PartialEq, Eq)] | ||
23 | pub(crate) struct Utf16Char { | ||
24 | pub(crate) start: TextUnit, | ||
25 | pub(crate) end: TextUnit, | ||
26 | } | ||
27 | |||
28 | impl Utf16Char { | ||
29 | fn len(&self) -> TextUnit { | ||
30 | self.end - self.start | ||
31 | } | ||
32 | } | ||
33 | |||
34 | impl LineIndex { | ||
35 | pub fn new(text: &str) -> LineIndex { | ||
36 | let mut utf16_lines = FxHashMap::default(); | ||
37 | let mut utf16_chars = Vec::new(); | ||
38 | |||
39 | let mut newlines = vec![0.into()]; | ||
40 | let mut curr_row = 0.into(); | ||
41 | let mut curr_col = 0.into(); | ||
42 | let mut line = 0; | ||
43 | for c in text.chars() { | ||
44 | curr_row += TextUnit::of_char(c); | ||
45 | if c == '\n' { | ||
46 | newlines.push(curr_row); | ||
47 | |||
48 | // Save any utf-16 characters seen in the previous line | ||
49 | if !utf16_chars.is_empty() { | ||
50 | utf16_lines.insert(line, utf16_chars); | ||
51 | utf16_chars = Vec::new(); | ||
52 | } | ||
53 | |||
54 | // Prepare for processing the next line | ||
55 | curr_col = 0.into(); | ||
56 | line += 1; | ||
57 | continue; | ||
58 | } | ||
59 | |||
60 | let char_len = TextUnit::of_char(c); | ||
61 | if char_len.to_usize() > 1 { | ||
62 | utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + char_len }); | ||
63 | } | ||
64 | |||
65 | curr_col += char_len; | ||
66 | } | ||
67 | |||
68 | // Save any utf-16 characters seen in the last line | ||
69 | if !utf16_chars.is_empty() { | ||
70 | utf16_lines.insert(line, utf16_chars); | ||
71 | } | ||
72 | |||
73 | LineIndex { newlines, utf16_lines } | ||
74 | } | ||
75 | |||
76 | pub fn line_col(&self, offset: TextUnit) -> LineCol { | ||
77 | let line = self.newlines.upper_bound(&offset) - 1; | ||
78 | let line_start_offset = self.newlines[line]; | ||
79 | let col = offset - line_start_offset; | ||
80 | |||
81 | LineCol { line: line as u32, col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32 } | ||
82 | } | ||
83 | |||
84 | pub fn offset(&self, line_col: LineCol) -> TextUnit { | ||
85 | //FIXME: return Result | ||
86 | let col = self.utf16_to_utf8_col(line_col.line, line_col.col_utf16); | ||
87 | self.newlines[line_col.line as usize] + col | ||
88 | } | ||
89 | |||
90 | fn utf8_to_utf16_col(&self, line: u32, mut col: TextUnit) -> usize { | ||
91 | if let Some(utf16_chars) = self.utf16_lines.get(&line) { | ||
92 | let mut correction = TextUnit::from_usize(0); | ||
93 | for c in utf16_chars { | ||
94 | if col >= c.end { | ||
95 | correction += c.len() - TextUnit::from_usize(1); | ||
96 | } else { | ||
97 | // From here on, all utf16 characters come *after* the character we are mapping, | ||
98 | // so we don't need to take them into account | ||
99 | break; | ||
100 | } | ||
101 | } | ||
102 | |||
103 | col -= correction; | ||
104 | } | ||
105 | |||
106 | col.to_usize() | ||
107 | } | ||
108 | |||
109 | fn utf16_to_utf8_col(&self, line: u32, col: u32) -> TextUnit { | ||
110 | let mut col: TextUnit = col.into(); | ||
111 | if let Some(utf16_chars) = self.utf16_lines.get(&line) { | ||
112 | for c in utf16_chars { | ||
113 | if col >= c.start { | ||
114 | col += c.len() - TextUnit::from_usize(1); | ||
115 | } else { | ||
116 | // From here on, all utf16 characters come *after* the character we are mapping, | ||
117 | // so we don't need to take them into account | ||
118 | break; | ||
119 | } | ||
120 | } | ||
121 | } | ||
122 | |||
123 | col | ||
124 | } | ||
125 | } | ||
126 | |||
127 | #[cfg(test)] | ||
128 | /// Simple reference implementation to use in proptests | ||
129 | pub fn to_line_col(text: &str, offset: TextUnit) -> LineCol { | ||
130 | let mut res = LineCol { line: 0, col_utf16: 0 }; | ||
131 | for (i, c) in text.char_indices() { | ||
132 | if i + c.len_utf8() > offset.to_usize() { | ||
133 | // if it's an invalid offset, inside a multibyte char | ||
134 | // return as if it was at the start of the char | ||
135 | break; | ||
136 | } | ||
137 | if c == '\n' { | ||
138 | res.line += 1; | ||
139 | res.col_utf16 = 0; | ||
140 | } else { | ||
141 | res.col_utf16 += 1; | ||
142 | } | ||
143 | } | ||
144 | res | ||
145 | } | ||
146 | |||
147 | #[cfg(test)] | ||
148 | mod test_line_index { | ||
149 | use super::*; | ||
150 | use proptest::{prelude::*, proptest}; | ||
151 | use ra_text_edit::test_utils::{arb_offset, arb_text}; | ||
152 | |||
153 | #[test] | ||
154 | fn test_line_index() { | ||
155 | let text = "hello\nworld"; | ||
156 | let index = LineIndex::new(text); | ||
157 | assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); | ||
158 | assert_eq!(index.line_col(1.into()), LineCol { line: 0, col_utf16: 1 }); | ||
159 | assert_eq!(index.line_col(5.into()), LineCol { line: 0, col_utf16: 5 }); | ||
160 | assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 0 }); | ||
161 | assert_eq!(index.line_col(7.into()), LineCol { line: 1, col_utf16: 1 }); | ||
162 | assert_eq!(index.line_col(8.into()), LineCol { line: 1, col_utf16: 2 }); | ||
163 | assert_eq!(index.line_col(10.into()), LineCol { line: 1, col_utf16: 4 }); | ||
164 | assert_eq!(index.line_col(11.into()), LineCol { line: 1, col_utf16: 5 }); | ||
165 | assert_eq!(index.line_col(12.into()), LineCol { line: 1, col_utf16: 6 }); | ||
166 | |||
167 | let text = "\nhello\nworld"; | ||
168 | let index = LineIndex::new(text); | ||
169 | assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 }); | ||
170 | assert_eq!(index.line_col(1.into()), LineCol { line: 1, col_utf16: 0 }); | ||
171 | assert_eq!(index.line_col(2.into()), LineCol { line: 1, col_utf16: 1 }); | ||
172 | assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 5 }); | ||
173 | assert_eq!(index.line_col(7.into()), LineCol { line: 2, col_utf16: 0 }); | ||
174 | } | ||
175 | |||
176 | fn arb_text_with_offset() -> BoxedStrategy<(TextUnit, String)> { | ||
177 | arb_text().prop_flat_map(|text| (arb_offset(&text), Just(text))).boxed() | ||
178 | } | ||
179 | |||
180 | fn to_line_col(text: &str, offset: TextUnit) -> LineCol { | ||
181 | let mut res = LineCol { line: 0, col_utf16: 0 }; | ||
182 | for (i, c) in text.char_indices() { | ||
183 | if i + c.len_utf8() > offset.to_usize() { | ||
184 | // if it's an invalid offset, inside a multibyte char | ||
185 | // return as if it was at the start of the char | ||
186 | break; | ||
187 | } | ||
188 | if c == '\n' { | ||
189 | res.line += 1; | ||
190 | res.col_utf16 = 0; | ||
191 | } else { | ||
192 | res.col_utf16 += 1; | ||
193 | } | ||
194 | } | ||
195 | res | ||
196 | } | ||
197 | |||
198 | proptest! { | ||
199 | #[test] | ||
200 | fn test_line_index_proptest((offset, text) in arb_text_with_offset()) { | ||
201 | let expected = to_line_col(&text, offset); | ||
202 | let line_index = LineIndex::new(&text); | ||
203 | let actual = line_index.line_col(offset); | ||
204 | |||
205 | assert_eq!(actual, expected); | ||
206 | } | ||
207 | } | ||
208 | } | ||
209 | |||
210 | #[cfg(test)] | ||
211 | mod test_utf8_utf16_conv { | ||
212 | use super::*; | ||
213 | |||
214 | #[test] | ||
215 | fn test_char_len() { | ||
216 | assert_eq!('メ'.len_utf8(), 3); | ||
217 | assert_eq!('メ'.len_utf16(), 1); | ||
218 | } | ||
219 | |||
220 | #[test] | ||
221 | fn test_empty_index() { | ||
222 | let col_index = LineIndex::new( | ||
223 | " | ||
224 | const C: char = 'x'; | ||
225 | ", | ||
226 | ); | ||
227 | assert_eq!(col_index.utf16_lines.len(), 0); | ||
228 | } | ||
229 | |||
230 | #[test] | ||
231 | fn test_single_char() { | ||
232 | let col_index = LineIndex::new( | ||
233 | " | ||
234 | const C: char = 'メ'; | ||
235 | ", | ||
236 | ); | ||
237 | |||
238 | assert_eq!(col_index.utf16_lines.len(), 1); | ||
239 | assert_eq!(col_index.utf16_lines[&1].len(), 1); | ||
240 | assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); | ||
241 | |||
242 | // UTF-8 to UTF-16, no changes | ||
243 | assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); | ||
244 | |||
245 | // UTF-8 to UTF-16 | ||
246 | assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20); | ||
247 | |||
248 | // UTF-16 to UTF-8, no changes | ||
249 | assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from(15)); | ||
250 | |||
251 | // UTF-16 to UTF-8 | ||
252 | assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from(21)); | ||
253 | } | ||
254 | |||
255 | #[test] | ||
256 | fn test_string() { | ||
257 | let col_index = LineIndex::new( | ||
258 | " | ||
259 | const C: char = \"メ メ\"; | ||
260 | ", | ||
261 | ); | ||
262 | |||
263 | assert_eq!(col_index.utf16_lines.len(), 1); | ||
264 | assert_eq!(col_index.utf16_lines[&1].len(), 2); | ||
265 | assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() }); | ||
266 | assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() }); | ||
267 | |||
268 | // UTF-8 to UTF-16 | ||
269 | assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15); | ||
270 | |||
271 | assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19); | ||
272 | assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21); | ||
273 | |||
274 | assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15); | ||
275 | |||
276 | // UTF-16 to UTF-8 | ||
277 | assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from_usize(15)); | ||
278 | |||
279 | assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextUnit::from_usize(20)); | ||
280 | assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from_usize(23)); | ||
281 | |||
282 | assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextUnit::from_usize(15)); | ||
283 | } | ||
284 | } | ||
diff --git a/crates/ra_ide_db/src/line_index_utils.rs b/crates/ra_ide_db/src/line_index_utils.rs new file mode 100644 index 000000000..435b06511 --- /dev/null +++ b/crates/ra_ide_db/src/line_index_utils.rs | |||
@@ -0,0 +1,341 @@ | |||
1 | //! Code actions can specify desirable final position of the cursor. | ||
2 | //! | ||
3 | //! The position is specified as a `TextUnit` in the final file. We need to send | ||
4 | //! it in `(Line, Column)` coordinate though. However, we only have a LineIndex | ||
5 | //! for a file pre-edit! | ||
6 | //! | ||
7 | //! Code in this module applies this "to (Line, Column) after edit" | ||
8 | //! transformation. | ||
9 | |||
10 | use ra_syntax::{TextRange, TextUnit}; | ||
11 | use ra_text_edit::{AtomTextEdit, TextEdit}; | ||
12 | |||
13 | use crate::line_index::{LineCol, LineIndex, Utf16Char}; | ||
14 | |||
15 | pub fn translate_offset_with_edit( | ||
16 | line_index: &LineIndex, | ||
17 | offset: TextUnit, | ||
18 | text_edit: &TextEdit, | ||
19 | ) -> LineCol { | ||
20 | let mut state = Edits::from_text_edit(&text_edit); | ||
21 | |||
22 | let mut res = RunningLineCol::new(); | ||
23 | |||
24 | macro_rules! test_step { | ||
25 | ($x:ident) => { | ||
26 | match &$x { | ||
27 | Step::Newline(n) => { | ||
28 | if offset < *n { | ||
29 | return res.to_line_col(offset); | ||
30 | } else { | ||
31 | res.add_line(*n); | ||
32 | } | ||
33 | } | ||
34 | Step::Utf16Char(x) => { | ||
35 | if offset < x.end() { | ||
36 | // if the offset is inside a multibyte char it's invalid | ||
37 | // clamp it to the start of the char | ||
38 | let clamp = offset.min(x.start()); | ||
39 | return res.to_line_col(clamp); | ||
40 | } else { | ||
41 | res.adjust_col(*x); | ||
42 | } | ||
43 | } | ||
44 | } | ||
45 | }; | ||
46 | } | ||
47 | |||
48 | for orig_step in LineIndexStepIter::from(line_index) { | ||
49 | loop { | ||
50 | let translated_step = state.translate_step(&orig_step); | ||
51 | match state.next_steps(&translated_step) { | ||
52 | NextSteps::Use => { | ||
53 | test_step!(translated_step); | ||
54 | break; | ||
55 | } | ||
56 | NextSteps::ReplaceMany(ns) => { | ||
57 | for n in ns { | ||
58 | test_step!(n); | ||
59 | } | ||
60 | break; | ||
61 | } | ||
62 | NextSteps::AddMany(ns) => { | ||
63 | for n in ns { | ||
64 | test_step!(n); | ||
65 | } | ||
66 | } | ||
67 | } | ||
68 | } | ||
69 | } | ||
70 | |||
71 | loop { | ||
72 | match state.next_inserted_steps() { | ||
73 | None => break, | ||
74 | Some(ns) => { | ||
75 | for n in ns { | ||
76 | test_step!(n); | ||
77 | } | ||
78 | } | ||
79 | } | ||
80 | } | ||
81 | |||
82 | res.to_line_col(offset) | ||
83 | } | ||
84 | |||
85 | #[derive(Debug, Clone)] | ||
86 | enum Step { | ||
87 | Newline(TextUnit), | ||
88 | Utf16Char(TextRange), | ||
89 | } | ||
90 | |||
91 | #[derive(Debug)] | ||
92 | struct LineIndexStepIter<'a> { | ||
93 | line_index: &'a LineIndex, | ||
94 | next_newline_idx: usize, | ||
95 | utf16_chars: Option<(TextUnit, std::slice::Iter<'a, Utf16Char>)>, | ||
96 | } | ||
97 | |||
98 | impl LineIndexStepIter<'_> { | ||
99 | fn from(line_index: &LineIndex) -> LineIndexStepIter { | ||
100 | let mut x = LineIndexStepIter { line_index, next_newline_idx: 0, utf16_chars: None }; | ||
101 | // skip first newline since it's not real | ||
102 | x.next(); | ||
103 | x | ||
104 | } | ||
105 | } | ||
106 | |||
107 | impl Iterator for LineIndexStepIter<'_> { | ||
108 | type Item = Step; | ||
109 | fn next(&mut self) -> Option<Step> { | ||
110 | self.utf16_chars | ||
111 | .as_mut() | ||
112 | .and_then(|(newline, x)| { | ||
113 | let x = x.next()?; | ||
114 | Some(Step::Utf16Char(TextRange::from_to(*newline + x.start, *newline + x.end))) | ||
115 | }) | ||
116 | .or_else(|| { | ||
117 | let next_newline = *self.line_index.newlines.get(self.next_newline_idx)?; | ||
118 | self.utf16_chars = self | ||
119 | .line_index | ||
120 | .utf16_lines | ||
121 | .get(&(self.next_newline_idx as u32)) | ||
122 | .map(|x| (next_newline, x.iter())); | ||
123 | self.next_newline_idx += 1; | ||
124 | Some(Step::Newline(next_newline)) | ||
125 | }) | ||
126 | } | ||
127 | } | ||
128 | |||
129 | #[derive(Debug)] | ||
130 | struct OffsetStepIter<'a> { | ||
131 | text: &'a str, | ||
132 | offset: TextUnit, | ||
133 | } | ||
134 | |||
135 | impl Iterator for OffsetStepIter<'_> { | ||
136 | type Item = Step; | ||
137 | fn next(&mut self) -> Option<Step> { | ||
138 | let (next, next_offset) = self | ||
139 | .text | ||
140 | .char_indices() | ||
141 | .filter_map(|(i, c)| { | ||
142 | if c == '\n' { | ||
143 | let next_offset = self.offset + TextUnit::from_usize(i + 1); | ||
144 | let next = Step::Newline(next_offset); | ||
145 | Some((next, next_offset)) | ||
146 | } else { | ||
147 | let char_len = TextUnit::of_char(c); | ||
148 | if char_len.to_usize() > 1 { | ||
149 | let start = self.offset + TextUnit::from_usize(i); | ||
150 | let end = start + char_len; | ||
151 | let next = Step::Utf16Char(TextRange::from_to(start, end)); | ||
152 | let next_offset = end; | ||
153 | Some((next, next_offset)) | ||
154 | } else { | ||
155 | None | ||
156 | } | ||
157 | } | ||
158 | }) | ||
159 | .next()?; | ||
160 | let next_idx = (next_offset - self.offset).to_usize(); | ||
161 | self.text = &self.text[next_idx..]; | ||
162 | self.offset = next_offset; | ||
163 | Some(next) | ||
164 | } | ||
165 | } | ||
166 | |||
167 | #[derive(Debug)] | ||
168 | enum NextSteps<'a> { | ||
169 | Use, | ||
170 | ReplaceMany(OffsetStepIter<'a>), | ||
171 | AddMany(OffsetStepIter<'a>), | ||
172 | } | ||
173 | |||
174 | #[derive(Debug)] | ||
175 | struct TranslatedEdit<'a> { | ||
176 | delete: TextRange, | ||
177 | insert: &'a str, | ||
178 | diff: i64, | ||
179 | } | ||
180 | |||
181 | struct Edits<'a> { | ||
182 | edits: &'a [AtomTextEdit], | ||
183 | current: Option<TranslatedEdit<'a>>, | ||
184 | acc_diff: i64, | ||
185 | } | ||
186 | |||
187 | impl<'a> Edits<'a> { | ||
188 | fn from_text_edit(text_edit: &'a TextEdit) -> Edits<'a> { | ||
189 | let mut x = Edits { edits: text_edit.as_atoms(), current: None, acc_diff: 0 }; | ||
190 | x.advance_edit(); | ||
191 | x | ||
192 | } | ||
193 | fn advance_edit(&mut self) { | ||
194 | self.acc_diff += self.current.as_ref().map_or(0, |x| x.diff); | ||
195 | match self.edits.split_first() { | ||
196 | Some((next, rest)) => { | ||
197 | let delete = self.translate_range(next.delete); | ||
198 | let diff = next.insert.len() as i64 - next.delete.len().to_usize() as i64; | ||
199 | self.current = Some(TranslatedEdit { delete, insert: &next.insert, diff }); | ||
200 | self.edits = rest; | ||
201 | } | ||
202 | None => { | ||
203 | self.current = None; | ||
204 | } | ||
205 | } | ||
206 | } | ||
207 | |||
208 | fn next_inserted_steps(&mut self) -> Option<OffsetStepIter<'a>> { | ||
209 | let cur = self.current.as_ref()?; | ||
210 | let res = Some(OffsetStepIter { offset: cur.delete.start(), text: &cur.insert }); | ||
211 | self.advance_edit(); | ||
212 | res | ||
213 | } | ||
214 | |||
215 | fn next_steps(&mut self, step: &Step) -> NextSteps { | ||
216 | let step_pos = match *step { | ||
217 | Step::Newline(n) => n, | ||
218 | Step::Utf16Char(r) => r.end(), | ||
219 | }; | ||
220 | match &mut self.current { | ||
221 | Some(edit) => { | ||
222 | if step_pos <= edit.delete.start() { | ||
223 | NextSteps::Use | ||
224 | } else if step_pos <= edit.delete.end() { | ||
225 | let iter = OffsetStepIter { offset: edit.delete.start(), text: &edit.insert }; | ||
226 | // empty slice to avoid returning steps again | ||
227 | edit.insert = &edit.insert[edit.insert.len()..]; | ||
228 | NextSteps::ReplaceMany(iter) | ||
229 | } else { | ||
230 | let iter = OffsetStepIter { offset: edit.delete.start(), text: &edit.insert }; | ||
231 | // empty slice to avoid returning steps again | ||
232 | edit.insert = &edit.insert[edit.insert.len()..]; | ||
233 | self.advance_edit(); | ||
234 | NextSteps::AddMany(iter) | ||
235 | } | ||
236 | } | ||
237 | None => NextSteps::Use, | ||
238 | } | ||
239 | } | ||
240 | |||
241 | fn translate_range(&self, range: TextRange) -> TextRange { | ||
242 | if self.acc_diff == 0 { | ||
243 | range | ||
244 | } else { | ||
245 | let start = self.translate(range.start()); | ||
246 | let end = self.translate(range.end()); | ||
247 | TextRange::from_to(start, end) | ||
248 | } | ||
249 | } | ||
250 | |||
251 | fn translate(&self, x: TextUnit) -> TextUnit { | ||
252 | if self.acc_diff == 0 { | ||
253 | x | ||
254 | } else { | ||
255 | TextUnit::from((x.to_usize() as i64 + self.acc_diff) as u32) | ||
256 | } | ||
257 | } | ||
258 | |||
259 | fn translate_step(&self, x: &Step) -> Step { | ||
260 | if self.acc_diff == 0 { | ||
261 | x.clone() | ||
262 | } else { | ||
263 | match *x { | ||
264 | Step::Newline(n) => Step::Newline(self.translate(n)), | ||
265 | Step::Utf16Char(r) => Step::Utf16Char(self.translate_range(r)), | ||
266 | } | ||
267 | } | ||
268 | } | ||
269 | } | ||
270 | |||
271 | #[derive(Debug)] | ||
272 | struct RunningLineCol { | ||
273 | line: u32, | ||
274 | last_newline: TextUnit, | ||
275 | col_adjust: TextUnit, | ||
276 | } | ||
277 | |||
278 | impl RunningLineCol { | ||
279 | fn new() -> RunningLineCol { | ||
280 | RunningLineCol { line: 0, last_newline: TextUnit::from(0), col_adjust: TextUnit::from(0) } | ||
281 | } | ||
282 | |||
283 | fn to_line_col(&self, offset: TextUnit) -> LineCol { | ||
284 | LineCol { | ||
285 | line: self.line, | ||
286 | col_utf16: ((offset - self.last_newline) - self.col_adjust).into(), | ||
287 | } | ||
288 | } | ||
289 | |||
290 | fn add_line(&mut self, newline: TextUnit) { | ||
291 | self.line += 1; | ||
292 | self.last_newline = newline; | ||
293 | self.col_adjust = TextUnit::from(0); | ||
294 | } | ||
295 | |||
296 | fn adjust_col(&mut self, range: TextRange) { | ||
297 | self.col_adjust += range.len() - TextUnit::from(1); | ||
298 | } | ||
299 | } | ||
300 | |||
301 | #[cfg(test)] | ||
302 | mod test { | ||
303 | use proptest::{prelude::*, proptest}; | ||
304 | use ra_text_edit::test_utils::{arb_offset, arb_text_with_edit}; | ||
305 | use ra_text_edit::TextEdit; | ||
306 | |||
307 | use crate::line_index; | ||
308 | |||
309 | use super::*; | ||
310 | |||
311 | #[derive(Debug)] | ||
312 | struct ArbTextWithEditAndOffset { | ||
313 | text: String, | ||
314 | edit: TextEdit, | ||
315 | edited_text: String, | ||
316 | offset: TextUnit, | ||
317 | } | ||
318 | |||
319 | fn arb_text_with_edit_and_offset() -> BoxedStrategy<ArbTextWithEditAndOffset> { | ||
320 | arb_text_with_edit() | ||
321 | .prop_flat_map(|x| { | ||
322 | let edited_text = x.edit.apply(&x.text); | ||
323 | let arb_offset = arb_offset(&edited_text); | ||
324 | (Just(x), Just(edited_text), arb_offset).prop_map(|(x, edited_text, offset)| { | ||
325 | ArbTextWithEditAndOffset { text: x.text, edit: x.edit, edited_text, offset } | ||
326 | }) | ||
327 | }) | ||
328 | .boxed() | ||
329 | } | ||
330 | |||
331 | proptest! { | ||
332 | #[test] | ||
333 | fn test_translate_offset_with_edit(x in arb_text_with_edit_and_offset()) { | ||
334 | let expected = line_index::to_line_col(&x.edited_text, x.offset); | ||
335 | let line_index = LineIndex::new(&x.text); | ||
336 | let actual = translate_offset_with_edit(&line_index, x.offset, &x.edit); | ||
337 | |||
338 | assert_eq!(actual, expected); | ||
339 | } | ||
340 | } | ||
341 | } | ||
diff --git a/crates/ra_ide_db/src/symbol_index.rs b/crates/ra_ide_db/src/symbol_index.rs new file mode 100644 index 000000000..64ddf2f95 --- /dev/null +++ b/crates/ra_ide_db/src/symbol_index.rs | |||
@@ -0,0 +1,372 @@ | |||
1 | //! This module handles fuzzy-searching of functions, structs and other symbols | ||
2 | //! by name across the whole workspace and dependencies. | ||
3 | //! | ||
4 | //! It works by building an incrementally-updated text-search index of all | ||
5 | //! symbols. The backbone of the index is the **awesome** `fst` crate by | ||
6 | //! @BurntSushi. | ||
7 | //! | ||
8 | //! In a nutshell, you give a set of strings to `fst`, and it builds a | ||
9 | //! finite state machine describing this set of strings. The strings which | ||
10 | //! could fuzzy-match a pattern can also be described by a finite state machine. | ||
11 | //! What is freaking cool is that you can now traverse both state machines in | ||
12 | //! lock-step to enumerate the strings which are both in the input set and | ||
13 | //! fuzz-match the query. Or, more formally, given two languages described by | ||
14 | //! FSTs, one can build a product FST which describes the intersection of the | ||
15 | //! languages. | ||
16 | //! | ||
17 | //! `fst` does not support cheap updating of the index, but it supports unioning | ||
18 | //! of state machines. So, to account for changing source code, we build an FST | ||
19 | //! for each library (which is assumed to never change) and an FST for each Rust | ||
20 | //! file in the current workspace, and run a query against the union of all | ||
21 | //! those FSTs. | ||
22 | |||
23 | use std::{ | ||
24 | fmt, | ||
25 | hash::{Hash, Hasher}, | ||
26 | mem, | ||
27 | sync::Arc, | ||
28 | }; | ||
29 | |||
30 | use fst::{self, Streamer}; | ||
31 | use ra_db::{ | ||
32 | salsa::{self, ParallelDatabase}, | ||
33 | FileId, SourceDatabaseExt, SourceRootId, | ||
34 | }; | ||
35 | use ra_syntax::{ | ||
36 | ast::{self, NameOwner}, | ||
37 | match_ast, AstNode, Parse, SmolStr, SourceFile, | ||
38 | SyntaxKind::{self, *}, | ||
39 | SyntaxNode, SyntaxNodePtr, TextRange, WalkEvent, | ||
40 | }; | ||
41 | #[cfg(not(feature = "wasm"))] | ||
42 | use rayon::prelude::*; | ||
43 | |||
44 | use crate::RootDatabase; | ||
45 | |||
46 | #[derive(Debug)] | ||
47 | pub struct Query { | ||
48 | query: String, | ||
49 | lowercased: String, | ||
50 | only_types: bool, | ||
51 | libs: bool, | ||
52 | exact: bool, | ||
53 | limit: usize, | ||
54 | } | ||
55 | |||
56 | impl Query { | ||
57 | pub fn new(query: String) -> Query { | ||
58 | let lowercased = query.to_lowercase(); | ||
59 | Query { | ||
60 | query, | ||
61 | lowercased, | ||
62 | only_types: false, | ||
63 | libs: false, | ||
64 | exact: false, | ||
65 | limit: usize::max_value(), | ||
66 | } | ||
67 | } | ||
68 | |||
69 | pub fn only_types(&mut self) { | ||
70 | self.only_types = true; | ||
71 | } | ||
72 | |||
73 | pub fn libs(&mut self) { | ||
74 | self.libs = true; | ||
75 | } | ||
76 | |||
77 | pub fn exact(&mut self) { | ||
78 | self.exact = true; | ||
79 | } | ||
80 | |||
81 | pub fn limit(&mut self, limit: usize) { | ||
82 | self.limit = limit | ||
83 | } | ||
84 | } | ||
85 | |||
86 | #[salsa::query_group(SymbolsDatabaseStorage)] | ||
87 | pub trait SymbolsDatabase: hir::db::HirDatabase { | ||
88 | fn file_symbols(&self, file_id: FileId) -> Arc<SymbolIndex>; | ||
89 | #[salsa::input] | ||
90 | fn library_symbols(&self, id: SourceRootId) -> Arc<SymbolIndex>; | ||
91 | /// The set of "local" (that is, from the current workspace) roots. | ||
92 | /// Files in local roots are assumed to change frequently. | ||
93 | #[salsa::input] | ||
94 | fn local_roots(&self) -> Arc<Vec<SourceRootId>>; | ||
95 | /// The set of roots for crates.io libraries. | ||
96 | /// Files in libraries are assumed to never change. | ||
97 | #[salsa::input] | ||
98 | fn library_roots(&self) -> Arc<Vec<SourceRootId>>; | ||
99 | } | ||
100 | |||
101 | fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> { | ||
102 | db.check_canceled(); | ||
103 | let parse = db.parse(file_id); | ||
104 | |||
105 | let symbols = source_file_to_file_symbols(&parse.tree(), file_id); | ||
106 | |||
107 | // FIXME: add macros here | ||
108 | |||
109 | Arc::new(SymbolIndex::new(symbols)) | ||
110 | } | ||
111 | |||
112 | pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> { | ||
113 | /// Need to wrap Snapshot to provide `Clone` impl for `map_with` | ||
114 | struct Snap(salsa::Snapshot<RootDatabase>); | ||
115 | impl Clone for Snap { | ||
116 | fn clone(&self) -> Snap { | ||
117 | Snap(self.0.snapshot()) | ||
118 | } | ||
119 | } | ||
120 | |||
121 | let buf: Vec<Arc<SymbolIndex>> = if query.libs { | ||
122 | let snap = Snap(db.snapshot()); | ||
123 | #[cfg(not(feature = "wasm"))] | ||
124 | let buf = db | ||
125 | .library_roots() | ||
126 | .par_iter() | ||
127 | .map_with(snap, |db, &lib_id| db.0.library_symbols(lib_id)) | ||
128 | .collect(); | ||
129 | |||
130 | #[cfg(feature = "wasm")] | ||
131 | let buf = db.library_roots().iter().map(|&lib_id| snap.0.library_symbols(lib_id)).collect(); | ||
132 | |||
133 | buf | ||
134 | } else { | ||
135 | let mut files = Vec::new(); | ||
136 | for &root in db.local_roots().iter() { | ||
137 | let sr = db.source_root(root); | ||
138 | files.extend(sr.walk()) | ||
139 | } | ||
140 | |||
141 | let snap = Snap(db.snapshot()); | ||
142 | #[cfg(not(feature = "wasm"))] | ||
143 | let buf = | ||
144 | files.par_iter().map_with(snap, |db, &file_id| db.0.file_symbols(file_id)).collect(); | ||
145 | |||
146 | #[cfg(feature = "wasm")] | ||
147 | let buf = files.iter().map(|&file_id| snap.0.file_symbols(file_id)).collect(); | ||
148 | |||
149 | buf | ||
150 | }; | ||
151 | query.search(&buf) | ||
152 | } | ||
153 | |||
154 | pub fn index_resolve(db: &RootDatabase, name_ref: &ast::NameRef) -> Vec<FileSymbol> { | ||
155 | let name = name_ref.text(); | ||
156 | let mut query = Query::new(name.to_string()); | ||
157 | query.exact(); | ||
158 | query.limit(4); | ||
159 | world_symbols(db, query) | ||
160 | } | ||
161 | |||
162 | #[derive(Default)] | ||
163 | pub struct SymbolIndex { | ||
164 | symbols: Vec<FileSymbol>, | ||
165 | map: fst::Map, | ||
166 | } | ||
167 | |||
168 | impl fmt::Debug for SymbolIndex { | ||
169 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
170 | f.debug_struct("SymbolIndex").field("n_symbols", &self.symbols.len()).finish() | ||
171 | } | ||
172 | } | ||
173 | |||
174 | impl PartialEq for SymbolIndex { | ||
175 | fn eq(&self, other: &SymbolIndex) -> bool { | ||
176 | self.symbols == other.symbols | ||
177 | } | ||
178 | } | ||
179 | |||
180 | impl Eq for SymbolIndex {} | ||
181 | |||
182 | impl Hash for SymbolIndex { | ||
183 | fn hash<H: Hasher>(&self, hasher: &mut H) { | ||
184 | self.symbols.hash(hasher) | ||
185 | } | ||
186 | } | ||
187 | |||
188 | impl SymbolIndex { | ||
189 | fn new(mut symbols: Vec<FileSymbol>) -> SymbolIndex { | ||
190 | fn cmp_key<'a>(s1: &'a FileSymbol) -> impl Ord + 'a { | ||
191 | unicase::Ascii::new(s1.name.as_str()) | ||
192 | } | ||
193 | #[cfg(not(feature = "wasm"))] | ||
194 | symbols.par_sort_by(|s1, s2| cmp_key(s1).cmp(&cmp_key(s2))); | ||
195 | |||
196 | #[cfg(feature = "wasm")] | ||
197 | symbols.sort_by(|s1, s2| cmp_key(s1).cmp(&cmp_key(s2))); | ||
198 | |||
199 | let mut builder = fst::MapBuilder::memory(); | ||
200 | |||
201 | let mut last_batch_start = 0; | ||
202 | |||
203 | for idx in 0..symbols.len() { | ||
204 | if symbols.get(last_batch_start).map(cmp_key) == symbols.get(idx + 1).map(cmp_key) { | ||
205 | continue; | ||
206 | } | ||
207 | |||
208 | let start = last_batch_start; | ||
209 | let end = idx + 1; | ||
210 | last_batch_start = end; | ||
211 | |||
212 | let key = symbols[start].name.as_str().to_lowercase(); | ||
213 | let value = SymbolIndex::range_to_map_value(start, end); | ||
214 | |||
215 | builder.insert(key, value).unwrap(); | ||
216 | } | ||
217 | |||
218 | let map = fst::Map::from_bytes(builder.into_inner().unwrap()).unwrap(); | ||
219 | SymbolIndex { symbols, map } | ||
220 | } | ||
221 | |||
222 | pub fn len(&self) -> usize { | ||
223 | self.symbols.len() | ||
224 | } | ||
225 | |||
226 | pub fn memory_size(&self) -> usize { | ||
227 | self.map.as_fst().size() + self.symbols.len() * mem::size_of::<FileSymbol>() | ||
228 | } | ||
229 | |||
230 | #[cfg(not(feature = "wasm"))] | ||
231 | pub(crate) fn for_files( | ||
232 | files: impl ParallelIterator<Item = (FileId, Parse<ast::SourceFile>)>, | ||
233 | ) -> SymbolIndex { | ||
234 | let symbols = files | ||
235 | .flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id)) | ||
236 | .collect::<Vec<_>>(); | ||
237 | SymbolIndex::new(symbols) | ||
238 | } | ||
239 | |||
240 | #[cfg(feature = "wasm")] | ||
241 | pub(crate) fn for_files( | ||
242 | files: impl Iterator<Item = (FileId, Parse<ast::SourceFile>)>, | ||
243 | ) -> SymbolIndex { | ||
244 | let symbols = files | ||
245 | .flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id)) | ||
246 | .collect::<Vec<_>>(); | ||
247 | SymbolIndex::new(symbols) | ||
248 | } | ||
249 | |||
250 | fn range_to_map_value(start: usize, end: usize) -> u64 { | ||
251 | debug_assert![start <= (std::u32::MAX as usize)]; | ||
252 | debug_assert![end <= (std::u32::MAX as usize)]; | ||
253 | |||
254 | ((start as u64) << 32) | end as u64 | ||
255 | } | ||
256 | |||
257 | fn map_value_to_range(value: u64) -> (usize, usize) { | ||
258 | let end = value as u32 as usize; | ||
259 | let start = (value >> 32) as usize; | ||
260 | (start, end) | ||
261 | } | ||
262 | } | ||
263 | |||
264 | impl Query { | ||
265 | pub(crate) fn search(self, indices: &[Arc<SymbolIndex>]) -> Vec<FileSymbol> { | ||
266 | let mut op = fst::map::OpBuilder::new(); | ||
267 | for file_symbols in indices.iter() { | ||
268 | let automaton = fst::automaton::Subsequence::new(&self.lowercased); | ||
269 | op = op.add(file_symbols.map.search(automaton)) | ||
270 | } | ||
271 | let mut stream = op.union(); | ||
272 | let mut res = Vec::new(); | ||
273 | while let Some((_, indexed_values)) = stream.next() { | ||
274 | if res.len() >= self.limit { | ||
275 | break; | ||
276 | } | ||
277 | for indexed_value in indexed_values { | ||
278 | let symbol_index = &indices[indexed_value.index]; | ||
279 | let (start, end) = SymbolIndex::map_value_to_range(indexed_value.value); | ||
280 | |||
281 | for symbol in &symbol_index.symbols[start..end] { | ||
282 | if self.only_types && !is_type(symbol.ptr.kind()) { | ||
283 | continue; | ||
284 | } | ||
285 | if self.exact && symbol.name != self.query { | ||
286 | continue; | ||
287 | } | ||
288 | res.push(symbol.clone()); | ||
289 | } | ||
290 | } | ||
291 | } | ||
292 | res | ||
293 | } | ||
294 | } | ||
295 | |||
296 | fn is_type(kind: SyntaxKind) -> bool { | ||
297 | match kind { | ||
298 | STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_ALIAS_DEF => true, | ||
299 | _ => false, | ||
300 | } | ||
301 | } | ||
302 | |||
303 | /// The actual data that is stored in the index. It should be as compact as | ||
304 | /// possible. | ||
305 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
306 | pub struct FileSymbol { | ||
307 | pub file_id: FileId, | ||
308 | pub name: SmolStr, | ||
309 | pub ptr: SyntaxNodePtr, | ||
310 | pub name_range: Option<TextRange>, | ||
311 | pub container_name: Option<SmolStr>, | ||
312 | } | ||
313 | |||
314 | fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec<FileSymbol> { | ||
315 | let mut symbols = Vec::new(); | ||
316 | let mut stack = Vec::new(); | ||
317 | |||
318 | for event in source_file.syntax().preorder() { | ||
319 | match event { | ||
320 | WalkEvent::Enter(node) => { | ||
321 | if let Some(mut symbol) = to_file_symbol(&node, file_id) { | ||
322 | symbol.container_name = stack.last().cloned(); | ||
323 | |||
324 | stack.push(symbol.name.clone()); | ||
325 | symbols.push(symbol); | ||
326 | } | ||
327 | } | ||
328 | |||
329 | WalkEvent::Leave(node) => { | ||
330 | if to_symbol(&node).is_some() { | ||
331 | stack.pop(); | ||
332 | } | ||
333 | } | ||
334 | } | ||
335 | } | ||
336 | |||
337 | symbols | ||
338 | } | ||
339 | |||
340 | fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { | ||
341 | fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { | ||
342 | let name = node.name()?; | ||
343 | let name_range = name.syntax().text_range(); | ||
344 | let name = name.text().clone(); | ||
345 | let ptr = SyntaxNodePtr::new(node.syntax()); | ||
346 | |||
347 | Some((name, ptr, name_range)) | ||
348 | } | ||
349 | match_ast! { | ||
350 | match node { | ||
351 | ast::FnDef(it) => { decl(it) }, | ||
352 | ast::StructDef(it) => { decl(it) }, | ||
353 | ast::EnumDef(it) => { decl(it) }, | ||
354 | ast::TraitDef(it) => { decl(it) }, | ||
355 | ast::Module(it) => { decl(it) }, | ||
356 | ast::TypeAliasDef(it) => { decl(it) }, | ||
357 | ast::ConstDef(it) => { decl(it) }, | ||
358 | ast::StaticDef(it) => { decl(it) }, | ||
359 | _ => None, | ||
360 | } | ||
361 | } | ||
362 | } | ||
363 | |||
364 | fn to_file_symbol(node: &SyntaxNode, file_id: FileId) -> Option<FileSymbol> { | ||
365 | to_symbol(node).map(move |(name, ptr, name_range)| FileSymbol { | ||
366 | name, | ||
367 | ptr, | ||
368 | file_id, | ||
369 | name_range: Some(name_range), | ||
370 | container_name: None, | ||
371 | }) | ||
372 | } | ||
diff --git a/crates/ra_ide_db/src/wasm_shims.rs b/crates/ra_ide_db/src/wasm_shims.rs new file mode 100644 index 000000000..7af9f9d9b --- /dev/null +++ b/crates/ra_ide_db/src/wasm_shims.rs | |||
@@ -0,0 +1,19 @@ | |||
1 | //! A version of `std::time::Instant` that doesn't panic in WASM. | ||
2 | |||
3 | #[cfg(not(feature = "wasm"))] | ||
4 | pub use std::time::Instant; | ||
5 | |||
6 | #[cfg(feature = "wasm")] | ||
7 | #[derive(Clone, Copy, Debug)] | ||
8 | pub struct Instant; | ||
9 | |||
10 | #[cfg(feature = "wasm")] | ||
11 | impl Instant { | ||
12 | pub fn now() -> Self { | ||
13 | Self | ||
14 | } | ||
15 | |||
16 | pub fn elapsed(&self) -> std::time::Duration { | ||
17 | std::time::Duration::new(0, 0) | ||
18 | } | ||
19 | } | ||