aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_analysis/src
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_analysis/src')
-rw-r--r--crates/ra_analysis/src/db.rs117
-rw-r--r--crates/ra_analysis/src/db/imp.rs153
-rw-r--r--crates/ra_analysis/src/db/mod.rs85
-rw-r--r--crates/ra_analysis/src/descriptors.rs68
-rw-r--r--crates/ra_analysis/src/imp.rs169
-rw-r--r--crates/ra_analysis/src/lib.rs21
-rw-r--r--crates/ra_analysis/src/module_map.rs165
-rw-r--r--crates/ra_analysis/src/queries.rs39
-rw-r--r--crates/ra_analysis/src/roots.rs72
-rw-r--r--crates/ra_analysis/src/symbol_index.rs11
10 files changed, 405 insertions, 495 deletions
diff --git a/crates/ra_analysis/src/db.rs b/crates/ra_analysis/src/db.rs
new file mode 100644
index 000000000..c69577233
--- /dev/null
+++ b/crates/ra_analysis/src/db.rs
@@ -0,0 +1,117 @@
1use std::{
2 fmt,
3 sync::Arc,
4 hash::{Hash, Hasher},
5};
6use salsa;
7use rustc_hash::FxHashSet;
8use ra_syntax::File;
9use ra_editor::{LineIndex};
10use crate::{
11 symbol_index::SymbolIndex,
12 module_map::{ModulesDatabase, ModuleTreeQuery, ModuleDescriptorQuery},
13 FileId, FileResolverImp,
14};
15
16#[derive(Default)]
17pub(crate) struct RootDatabase {
18 runtime: salsa::runtime::Runtime<RootDatabase>,
19}
20
21impl fmt::Debug for RootDatabase {
22 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
23 fmt.write_str("RootDatabase { ... }")
24 }
25}
26
27impl salsa::Database for RootDatabase {
28 fn salsa_runtime(&self) -> &salsa::runtime::Runtime<RootDatabase> {
29 &self.runtime
30 }
31}
32
33impl salsa::ParallelDatabase for RootDatabase {
34 fn fork(&self) -> Self {
35 RootDatabase {
36 runtime: self.runtime.fork(),
37 }
38 }
39}
40
41impl Clone for RootDatabase {
42 fn clone(&self) -> RootDatabase {
43 salsa::ParallelDatabase::fork(self)
44 }
45}
46
47salsa::database_storage! {
48 pub(crate) struct RootDatabaseStorage for RootDatabase {
49 impl FilesDatabase {
50 fn file_text() for FileTextQuery;
51 fn file_set() for FileSetQuery;
52 }
53 impl SyntaxDatabase {
54 fn file_syntax() for FileSyntaxQuery;
55 fn file_lines() for FileLinesQuery;
56 fn file_symbols() for FileSymbolsQuery;
57 }
58 impl ModulesDatabase {
59 fn module_tree() for ModuleTreeQuery;
60 fn module_descriptor() for ModuleDescriptorQuery;
61 }
62 }
63}
64
65salsa::query_group! {
66 pub(crate) trait FilesDatabase: salsa::Database {
67 fn file_text(file_id: FileId) -> Arc<String> {
68 type FileTextQuery;
69 storage input;
70 }
71 fn file_set(key: ()) -> Arc<FileSet> {
72 type FileSetQuery;
73 storage input;
74 }
75 }
76}
77
78#[derive(Default, Debug, PartialEq, Eq)]
79pub(crate) struct FileSet {
80 pub(crate) files: FxHashSet<FileId>,
81 pub(crate) resolver: FileResolverImp,
82}
83
84impl Hash for FileSet {
85 fn hash<H: Hasher>(&self, hasher: &mut H) {
86 let mut files = self.files.iter().cloned().collect::<Vec<_>>();
87 files.sort();
88 files.hash(hasher);
89 }
90}
91
92salsa::query_group! {
93 pub(crate) trait SyntaxDatabase: FilesDatabase {
94 fn file_syntax(file_id: FileId) -> File {
95 type FileSyntaxQuery;
96 }
97 fn file_lines(file_id: FileId) -> Arc<LineIndex> {
98 type FileLinesQuery;
99 }
100 fn file_symbols(file_id: FileId) -> Arc<SymbolIndex> {
101 type FileSymbolsQuery;
102 }
103 }
104}
105
106fn file_syntax(db: &impl SyntaxDatabase, file_id: FileId) -> File {
107 let text = db.file_text(file_id);
108 File::parse(&*text)
109}
110fn file_lines(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<LineIndex> {
111 let text = db.file_text(file_id);
112 Arc::new(LineIndex::new(&*text))
113}
114fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<SymbolIndex> {
115 let syntax = db.file_syntax(file_id);
116 Arc::new(SymbolIndex::for_file(file_id, syntax))
117}
diff --git a/crates/ra_analysis/src/db/imp.rs b/crates/ra_analysis/src/db/imp.rs
deleted file mode 100644
index 36f6cf290..000000000
--- a/crates/ra_analysis/src/db/imp.rs
+++ /dev/null
@@ -1,153 +0,0 @@
1use std::{
2 sync::Arc,
3 any::Any,
4 hash::{Hash, Hasher},
5 collections::hash_map::{DefaultHasher},
6 iter,
7};
8use rustc_hash::FxHashMap;
9use salsa;
10use {FileId, imp::FileResolverImp};
11use super::{State, Query, QueryCtx};
12
13pub(super) type Data = Arc<Any + Send + Sync + 'static>;
14
15#[derive(Debug)]
16pub(super) struct Db {
17 names: Arc<FxHashMap<salsa::QueryTypeId, &'static str>>,
18 pub(super) imp: salsa::Db<State, Data>,
19}
20
21impl Db {
22 pub(super) fn new(mut reg: QueryRegistry) -> Db {
23 let config = reg.config.take().unwrap();
24 Db {
25 names: Arc::new(reg.names),
26 imp: salsa::Db::new(config, State::default())
27 }
28 }
29 pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
30 let names = self.names.clone();
31 let mut invalidations = salsa::Invalidations::new();
32 invalidations.invalidate(FILE_TEXT, changed_files.iter().map(hash).map(salsa::InputFingerprint));
33 if resolver_changed {
34 invalidations.invalidate(FILE_SET, iter::once(salsa::InputFingerprint(hash(&()))));
35 } else {
36 invalidations.invalidate(FILE_SET, iter::empty());
37 }
38 let imp = self.imp.with_ground_data(
39 new_state,
40 invalidations,
41 );
42 Db { names, imp }
43 }
44 pub(super) fn extract_trace(&self, ctx: &salsa::QueryCtx<State, Data>) -> Vec<&'static str> {
45 ctx.trace().into_iter().map(|it| self.names[&it]).collect()
46 }
47}
48
49pub(crate) trait EvalQuery {
50 type Params;
51 type Output;
52 fn query_type(&self) -> salsa::QueryTypeId;
53 fn f(&self) -> salsa::QueryFn<State, Data>;
54 fn get(&self, &QueryCtx, Self::Params) -> Arc<Self::Output>;
55}
56
57impl<T, R> EvalQuery for Query<T, R>
58where
59 T: Hash + Send + Sync + 'static,
60 R: Hash + Send + Sync + 'static,
61{
62 type Params = T;
63 type Output = R;
64 fn query_type(&self) -> salsa::QueryTypeId {
65 salsa::QueryTypeId(self.0)
66 }
67 fn f(&self) -> salsa::QueryFn<State, Data> {
68 let f = self.1;
69 Box::new(move |ctx, data| {
70 let ctx = QueryCtx { imp: ctx };
71 let data: &T = data.downcast_ref().unwrap();
72 let res = f(ctx, data);
73 let h = hash(&res);
74 (Arc::new(res), salsa::OutputFingerprint(h))
75 })
76 }
77 fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output> {
78 let query_id = salsa::QueryId(
79 self.query_type(),
80 salsa::InputFingerprint(hash(&params)),
81 );
82 let res = ctx.imp.get(query_id, Arc::new(params));
83 res.downcast().unwrap()
84 }
85}
86
87pub(super) struct QueryRegistry {
88 config: Option<salsa::QueryConfig<State, Data>>,
89 names: FxHashMap<salsa::QueryTypeId, &'static str>,
90}
91
92impl QueryRegistry {
93 pub(super) fn new() -> QueryRegistry {
94 let mut config = salsa::QueryConfig::<State, Data>::new();
95 config = config.with_ground_query(
96 FILE_TEXT, Box::new(|state, params| {
97 let file_id: &FileId = params.downcast_ref().unwrap();
98 let res = state.file_map[file_id].clone();
99 let fingerprint = salsa::OutputFingerprint(hash(&res));
100 (res, fingerprint)
101 })
102 );
103 config = config.with_ground_query(
104 FILE_SET, Box::new(|state, _params| {
105 let file_ids: Vec<FileId> = state.file_map.keys().cloned().collect();
106 let hash = hash(&file_ids);
107 let file_resolver = state.file_resolver.clone();
108 let res = (file_ids, file_resolver);
109 let fingerprint = salsa::OutputFingerprint(hash);
110 (Arc::new(res), fingerprint)
111 })
112 );
113 let mut names = FxHashMap::default();
114 names.insert(FILE_TEXT, "FILE_TEXT");
115 names.insert(FILE_SET, "FILE_SET");
116 QueryRegistry { config: Some(config), names }
117 }
118 pub(super) fn add<Q: EvalQuery>(&mut self, q: Q, name: &'static str) {
119 let id = q.query_type();
120 let prev = self.names.insert(id, name);
121 assert!(prev.is_none(), "duplicate query: {:?}", id);
122 let config = self.config.take().unwrap();
123 let config = config.with_query(id, q.f());
124 self.config= Some(config);
125 }
126}
127
128fn hash<T: Hash>(x: &T) -> u64 {
129 let mut hasher = DefaultHasher::new();
130 x.hash(&mut hasher);
131 hasher.finish()
132}
133
134const FILE_TEXT: salsa::QueryTypeId = salsa::QueryTypeId(0);
135pub(super) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
136 let query_id = salsa::QueryId(
137 FILE_TEXT,
138 salsa::InputFingerprint(hash(&file_id)),
139 );
140 let res = ctx.imp.get(query_id, Arc::new(file_id));
141 res.downcast().unwrap()
142}
143
144const FILE_SET: salsa::QueryTypeId = salsa::QueryTypeId(1);
145pub(super) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
146 let query_id = salsa::QueryId(
147 FILE_SET,
148 salsa::InputFingerprint(hash(&())),
149 );
150 let res = ctx.imp.get(query_id, Arc::new(()));
151 res.downcast().unwrap()
152}
153
diff --git a/crates/ra_analysis/src/db/mod.rs b/crates/ra_analysis/src/db/mod.rs
deleted file mode 100644
index 22769d112..000000000
--- a/crates/ra_analysis/src/db/mod.rs
+++ /dev/null
@@ -1,85 +0,0 @@
1mod imp;
2
3use std::{
4 sync::Arc,
5};
6use im;
7use salsa;
8use {FileId, imp::FileResolverImp};
9
10#[derive(Debug, Default, Clone)]
11pub(crate) struct State {
12 pub(crate) file_map: im::HashMap<FileId, Arc<String>>,
13 pub(crate) file_resolver: FileResolverImp
14}
15
16#[derive(Debug)]
17pub(crate) struct Db {
18 imp: imp::Db,
19}
20
21#[derive(Clone, Copy)]
22pub(crate) struct QueryCtx<'a> {
23 imp: &'a salsa::QueryCtx<State, imp::Data>,
24}
25
26pub(crate) struct Query<T, R>(pub(crate) u16, pub(crate) fn(QueryCtx, &T) -> R);
27
28pub(crate) struct QueryRegistry {
29 imp: imp::QueryRegistry,
30}
31
32impl Default for Db {
33 fn default() -> Db {
34 Db::new()
35 }
36}
37
38impl Db {
39 pub(crate) fn new() -> Db {
40 let reg = QueryRegistry::new();
41 Db { imp: imp::Db::new(reg.imp) }
42 }
43 pub(crate) fn state(&self) -> &State {
44 self.imp.imp.ground_data()
45 }
46 pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
47 Db { imp: self.imp.with_changes(new_state, changed_files, resolver_changed) }
48 }
49 pub(crate) fn make_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> R {
50 let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() };
51 f(ctx)
52 }
53 #[allow(unused)]
54 pub(crate) fn trace_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> (R, Vec<&'static str>) {
55 let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() };
56 let res = f(ctx);
57 let trace = self.imp.extract_trace(ctx.imp);
58 (res, trace)
59 }
60}
61
62impl<'a> QueryCtx<'a> {
63 pub(crate) fn get<Q: imp::EvalQuery>(&self, q: Q, params: Q::Params) -> Arc<Q::Output> {
64 q.get(self, params)
65 }
66}
67
68pub(crate) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
69 imp::file_text(ctx, file_id)
70}
71
72pub(crate) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
73 imp::file_set(ctx)
74}
75impl QueryRegistry {
76 fn new() -> QueryRegistry {
77 let mut reg = QueryRegistry { imp: imp::QueryRegistry::new() };
78 ::queries::register_queries(&mut reg);
79 ::module_map::register_queries(&mut reg);
80 reg
81 }
82 pub(crate) fn add<Q: imp::EvalQuery>(&mut self, q: Q, name: &'static str) {
83 self.imp.add(q, name)
84 }
85}
diff --git a/crates/ra_analysis/src/descriptors.rs b/crates/ra_analysis/src/descriptors.rs
index 0731b5572..8d9f38ca5 100644
--- a/crates/ra_analysis/src/descriptors.rs
+++ b/crates/ra_analysis/src/descriptors.rs
@@ -4,14 +4,15 @@ use std::{
4use relative_path::RelativePathBuf; 4use relative_path::RelativePathBuf;
5use ra_syntax::{ 5use ra_syntax::{
6 SmolStr, 6 SmolStr,
7 ast::{self, NameOwner}, 7 ast::{self, NameOwner, AstNode},
8 text_utils::is_subrange
8}; 9};
9use { 10use crate::{
10 FileId, 11 FileId,
11 imp::FileResolverImp, 12 imp::FileResolverImp,
12}; 13};
13 14
14#[derive(Debug, Hash)] 15#[derive(Debug, PartialEq, Eq, Hash)]
15pub struct ModuleDescriptor { 16pub struct ModuleDescriptor {
16 pub submodules: Vec<Submodule> 17 pub submodules: Vec<Submodule>
17} 18}
@@ -42,7 +43,7 @@ pub struct Submodule {
42 pub name: SmolStr, 43 pub name: SmolStr,
43} 44}
44 45
45#[derive(Hash, Debug)] 46#[derive(Debug, PartialEq, Eq, Hash)]
46pub(crate) struct ModuleTreeDescriptor { 47pub(crate) struct ModuleTreeDescriptor {
47 nodes: Vec<NodeData>, 48 nodes: Vec<NodeData>,
48 links: Vec<LinkData>, 49 links: Vec<LinkData>,
@@ -51,7 +52,7 @@ pub(crate) struct ModuleTreeDescriptor {
51 52
52#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] 53#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
53struct Node(usize); 54struct Node(usize);
54#[derive(Hash, Debug)] 55#[derive(Hash, Debug, PartialEq, Eq)]
55struct NodeData { 56struct NodeData {
56 file_id: FileId, 57 file_id: FileId,
57 links: Vec<Link>, 58 links: Vec<Link>,
@@ -60,7 +61,7 @@ struct NodeData {
60 61
61#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] 62#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
62pub(crate) struct Link(usize); 63pub(crate) struct Link(usize);
63#[derive(Hash, Debug)] 64#[derive(Hash, Debug, PartialEq, Eq)]
64struct LinkData { 65struct LinkData {
65 owner: Node, 66 owner: Node,
66 name: SmolStr, 67 name: SmolStr,
@@ -69,7 +70,7 @@ struct LinkData {
69} 70}
70 71
71 72
72#[derive(Clone, Debug, Hash)] 73#[derive(Clone, Debug, Hash, PartialEq, Eq)]
73pub enum Problem { 74pub enum Problem {
74 UnresolvedModule { 75 UnresolvedModule {
75 candidate: RelativePathBuf, 76 candidate: RelativePathBuf,
@@ -218,3 +219,56 @@ fn resolve_submodule(
218 } 219 }
219 (points_to, problem) 220 (points_to, problem)
220} 221}
222
223#[derive(Debug, Clone)]
224pub struct FnDescriptor {
225 pub name: String,
226 pub label : String,
227 pub ret_type: Option<String>,
228 pub params: Vec<String>,
229}
230
231impl FnDescriptor {
232 pub fn new(node: ast::FnDef) -> Option<Self> {
233 let name = node.name()?.text().to_string();
234
235 // Strip the body out for the label.
236 let label : String = if let Some(body) = node.body() {
237 let body_range = body.syntax().range();
238 let label : String = node.syntax().children()
239 .filter(|child| !is_subrange(body_range, child.range()))
240 .map(|node| node.text().to_string())
241 .collect();
242 label
243 } else {
244 node.syntax().text().to_string()
245 };
246
247 let params = FnDescriptor::param_list(node);
248 let ret_type = node.ret_type().map(|r| r.syntax().text().to_string());
249
250 Some(FnDescriptor {
251 name,
252 ret_type,
253 params,
254 label
255 })
256 }
257
258 fn param_list(node: ast::FnDef) -> Vec<String> {
259 let mut res = vec![];
260 if let Some(param_list) = node.param_list() {
261 if let Some(self_param) = param_list.self_param() {
262 res.push(self_param.syntax().text().to_string())
263 }
264
265 // Maybe use param.pat here? See if we can just extract the name?
266 //res.extend(param_list.params().map(|p| p.syntax().text().to_string()));
267 res.extend(param_list.params()
268 .filter_map(|p| p.pat())
269 .map(|pat| pat.syntax().text().to_string())
270 );
271 }
272 res
273 }
274}
diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs
index 47bc0032b..5efcaeca0 100644
--- a/crates/ra_analysis/src/imp.rs
+++ b/crates/ra_analysis/src/imp.rs
@@ -1,8 +1,8 @@
1use std::{ 1use std::{
2 sync::{ 2 sync::{
3 Arc, 3 Arc,
4 atomic::{AtomicBool, Ordering::SeqCst},
5 }, 4 },
5 hash::{Hash, Hasher},
6 fmt, 6 fmt,
7 collections::VecDeque, 7 collections::VecDeque,
8 iter, 8 iter,
@@ -12,24 +12,38 @@ use relative_path::RelativePath;
12use rustc_hash::FxHashSet; 12use rustc_hash::FxHashSet;
13use ra_editor::{self, FileSymbol, LineIndex, find_node_at_offset, LocalEdit, resolve_local_name}; 13use ra_editor::{self, FileSymbol, LineIndex, find_node_at_offset, LocalEdit, resolve_local_name};
14use ra_syntax::{ 14use ra_syntax::{
15 TextUnit, TextRange, SmolStr, File, AstNode, 15 TextUnit, TextRange, SmolStr, File, AstNode, SyntaxNodeRef,
16 SyntaxKind::*, 16 SyntaxKind::*,
17 ast::{self, NameOwner}, 17 ast::{self, NameOwner, ArgListOwner, Expr},
18}; 18};
19 19
20use { 20use crate::{
21 FileId, FileResolver, Query, Diagnostic, SourceChange, SourceFileEdit, Position, FileSystemEdit, 21 FileId, FileResolver, Query, Diagnostic, SourceChange, SourceFileEdit, Position, FileSystemEdit,
22 JobToken, CrateGraph, CrateId, 22 JobToken, CrateGraph, CrateId,
23 roots::{SourceRoot, ReadonlySourceRoot, WritableSourceRoot}, 23 roots::{SourceRoot, ReadonlySourceRoot, WritableSourceRoot},
24 descriptors::{ModuleTreeDescriptor, Problem}, 24 descriptors::{FnDescriptor, ModuleTreeDescriptor, Problem},
25}; 25};
26 26
27
28#[derive(Clone, Debug)] 27#[derive(Clone, Debug)]
29pub(crate) struct FileResolverImp { 28pub(crate) struct FileResolverImp {
30 inner: Arc<FileResolver> 29 inner: Arc<FileResolver>
31} 30}
32 31
32impl PartialEq for FileResolverImp {
33 fn eq(&self, other: &FileResolverImp) -> bool {
34 self.inner() == other.inner()
35 }
36}
37
38impl Eq for FileResolverImp {
39}
40
41impl Hash for FileResolverImp {
42 fn hash<H: Hasher>(&self, hasher: &mut H) {
43 self.inner().hash(hasher);
44 }
45}
46
33impl FileResolverImp { 47impl FileResolverImp {
34 pub(crate) fn new(inner: Arc<FileResolver>) -> FileResolverImp { 48 pub(crate) fn new(inner: Arc<FileResolver>) -> FileResolverImp {
35 FileResolverImp { inner } 49 FileResolverImp { inner }
@@ -40,6 +54,9 @@ impl FileResolverImp {
40 pub(crate) fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> { 54 pub(crate) fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> {
41 self.inner.resolve(file_id, path) 55 self.inner.resolve(file_id, path)
42 } 56 }
57 fn inner(&self) -> *const FileResolver {
58 &*self.inner
59 }
43} 60}
44 61
45impl Default for FileResolverImp { 62impl Default for FileResolverImp {
@@ -60,29 +77,27 @@ impl Default for FileResolverImp {
60 77
61#[derive(Debug)] 78#[derive(Debug)]
62pub(crate) struct AnalysisHostImpl { 79pub(crate) struct AnalysisHostImpl {
63 data: Arc<WorldData> 80 data: WorldData
64} 81}
65 82
66impl AnalysisHostImpl { 83impl AnalysisHostImpl {
67 pub fn new() -> AnalysisHostImpl { 84 pub fn new() -> AnalysisHostImpl {
68 AnalysisHostImpl { 85 AnalysisHostImpl {
69 data: Arc::new(WorldData::default()), 86 data: WorldData::default(),
70 } 87 }
71 } 88 }
72 pub fn analysis(&self) -> AnalysisImpl { 89 pub fn analysis(&self) -> AnalysisImpl {
73 AnalysisImpl { 90 AnalysisImpl {
74 needs_reindex: AtomicBool::new(false),
75 data: self.data.clone(), 91 data: self.data.clone(),
76 } 92 }
77 } 93 }
78 pub fn change_files(&mut self, changes: &mut dyn Iterator<Item=(FileId, Option<String>)>) { 94 pub fn change_files(&mut self, changes: &mut dyn Iterator<Item=(FileId, Option<String>)>) {
79 let data = self.data_mut(); 95 self.data_mut()
80 data.root = Arc::new(data.root.apply_changes(changes, None)); 96 .root.apply_changes(changes, None);
81 } 97 }
82 pub fn set_file_resolver(&mut self, resolver: FileResolverImp) { 98 pub fn set_file_resolver(&mut self, resolver: FileResolverImp) {
83 let data = self.data_mut(); 99 self.data_mut()
84 data.file_resolver = resolver.clone(); 100 .root.apply_changes(&mut iter::empty(), Some(resolver));
85 data.root = Arc::new(data.root.apply_changes(&mut iter::empty(), Some(resolver)));
86 } 101 }
87 pub fn set_crate_graph(&mut self, graph: CrateGraph) { 102 pub fn set_crate_graph(&mut self, graph: CrateGraph) {
88 let mut visited = FxHashSet::default(); 103 let mut visited = FxHashSet::default();
@@ -97,34 +112,24 @@ impl AnalysisHostImpl {
97 self.data_mut().libs.push(Arc::new(root)); 112 self.data_mut().libs.push(Arc::new(root));
98 } 113 }
99 fn data_mut(&mut self) -> &mut WorldData { 114 fn data_mut(&mut self) -> &mut WorldData {
100 Arc::make_mut(&mut self.data) 115 &mut self.data
101 } 116 }
102} 117}
103 118
104pub(crate) struct AnalysisImpl { 119pub(crate) struct AnalysisImpl {
105 needs_reindex: AtomicBool, 120 data: WorldData,
106 data: Arc<WorldData>,
107} 121}
108 122
109impl fmt::Debug for AnalysisImpl { 123impl fmt::Debug for AnalysisImpl {
110 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 124 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
111 (&*self.data).fmt(f) 125 self.data.fmt(f)
112 }
113}
114
115impl Clone for AnalysisImpl {
116 fn clone(&self) -> AnalysisImpl {
117 AnalysisImpl {
118 needs_reindex: AtomicBool::new(self.needs_reindex.load(SeqCst)),
119 data: Arc::clone(&self.data),
120 }
121 } 126 }
122} 127}
123 128
124impl AnalysisImpl { 129impl AnalysisImpl {
125 fn root(&self, file_id: FileId) -> &SourceRoot { 130 fn root(&self, file_id: FileId) -> &SourceRoot {
126 if self.data.root.contains(file_id) { 131 if self.data.root.contains(file_id) {
127 return &*self.data.root; 132 return &self.data.root;
128 } 133 }
129 &**self.data.libs.iter().find(|it| it.contains(file_id)).unwrap() 134 &**self.data.libs.iter().find(|it| it.contains(file_id)).unwrap()
130 } 135 }
@@ -306,6 +311,68 @@ impl AnalysisImpl {
306 .collect() 311 .collect()
307 } 312 }
308 313
314 pub fn resolve_callable(&self, file_id: FileId, offset: TextUnit, token: &JobToken)
315 -> Option<(FnDescriptor, Option<usize>)> {
316
317 let root = self.root(file_id);
318 let file = root.syntax(file_id);
319 let syntax = file.syntax();
320
321 // Find the calling expression and it's NameRef
322 let calling_node = FnCallNode::with_node(syntax, offset)?;
323 let name_ref = calling_node.name_ref()?;
324
325 // Resolve the function's NameRef (NOTE: this isn't entirely accurate).
326 let file_symbols = self.index_resolve(name_ref, token);
327 for (_, fs) in file_symbols {
328 if fs.kind == FN_DEF {
329 if let Some(fn_def) = find_node_at_offset(syntax, fs.node_range.start()) {
330 if let Some(descriptor) = FnDescriptor::new(fn_def) {
331 // If we have a calling expression let's find which argument we are on
332 let mut current_parameter = None;
333
334 let num_params = descriptor.params.len();
335 let has_self = fn_def.param_list()
336 .and_then(|l| l.self_param())
337 .is_some();
338
339 if num_params == 1 {
340 if !has_self {
341 current_parameter = Some(1);
342 }
343 } else if num_params > 1 {
344 // Count how many parameters into the call we are.
345 // TODO: This is best effort for now and should be fixed at some point.
346 // It may be better to see where we are in the arg_list and then check
347 // where offset is in that list (or beyond).
348 // Revisit this after we get documentation comments in.
349 if let Some(ref arg_list) = calling_node.arg_list() {
350 let start = arg_list.syntax().range().start();
351
352 let range_search = TextRange::from_to(start, offset);
353 let mut commas: usize = arg_list.syntax().text()
354 .slice(range_search).to_string()
355 .matches(",")
356 .count();
357
358 // If we have a method call eat the first param since it's just self.
359 if has_self {
360 commas = commas + 1;
361 }
362
363 current_parameter = Some(commas);
364 }
365 }
366
367 return Some((descriptor, current_parameter));
368 }
369 }
370 }
371 }
372
373 None
374 }
375
309 fn index_resolve(&self, name_ref: ast::NameRef, token: &JobToken) -> Vec<(FileId, FileSymbol)> { 376 fn index_resolve(&self, name_ref: ast::NameRef, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
310 let name = name_ref.text(); 377 let name = name_ref.text();
311 let mut query = Query::new(name.to_string()); 378 let mut query = Query::new(name.to_string());
@@ -325,9 +392,8 @@ impl AnalysisImpl {
325 392
326#[derive(Default, Clone, Debug)] 393#[derive(Default, Clone, Debug)]
327struct WorldData { 394struct WorldData {
328 file_resolver: FileResolverImp,
329 crate_graph: CrateGraph, 395 crate_graph: CrateGraph,
330 root: Arc<WritableSourceRoot>, 396 root: WritableSourceRoot,
331 libs: Vec<Arc<ReadonlySourceRoot>>, 397 libs: Vec<Arc<ReadonlySourceRoot>>,
332} 398}
333 399
@@ -355,3 +421,46 @@ impl CrateGraph {
355 Some(crate_id) 421 Some(crate_id)
356 } 422 }
357} 423}
424
425enum FnCallNode<'a> {
426 CallExpr(ast::CallExpr<'a>),
427 MethodCallExpr(ast::MethodCallExpr<'a>)
428}
429
430impl<'a> FnCallNode<'a> {
431 pub fn with_node(syntax: SyntaxNodeRef, offset: TextUnit) -> Option<FnCallNode> {
432 if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) {
433 return Some(FnCallNode::CallExpr(expr));
434 }
435 if let Some(expr) = find_node_at_offset::<ast::MethodCallExpr>(syntax, offset) {
436 return Some(FnCallNode::MethodCallExpr(expr));
437 }
438 None
439 }
440
441 pub fn name_ref(&self) -> Option<ast::NameRef> {
442 match *self {
443 FnCallNode::CallExpr(call_expr) => {
444 Some(match call_expr.expr()? {
445 Expr::PathExpr(path_expr) => {
446 path_expr.path()?.segment()?.name_ref()?
447 },
448 _ => return None
449 })
450 },
451
452 FnCallNode::MethodCallExpr(call_expr) => {
453 call_expr.syntax().children()
454 .filter_map(ast::NameRef::cast)
455 .nth(0)
456 }
457 }
458 }
459
460 pub fn arg_list(&self) -> Option<ast::ArgList> {
461 match *self {
462 FnCallNode::CallExpr(expr) => expr.arg_list(),
463 FnCallNode::MethodCallExpr(expr) => expr.arg_list()
464 }
465 }
466}
diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs
index 849fd93e4..d8b355a81 100644
--- a/crates/ra_analysis/src/lib.rs
+++ b/crates/ra_analysis/src/lib.rs
@@ -19,7 +19,6 @@ mod imp;
19mod job; 19mod job;
20mod roots; 20mod roots;
21mod db; 21mod db;
22mod queries;
23mod descriptors; 22mod descriptors;
24 23
25use std::{ 24use std::{
@@ -29,15 +28,18 @@ use std::{
29 28
30use relative_path::{RelativePath, RelativePathBuf}; 29use relative_path::{RelativePath, RelativePathBuf};
31use ra_syntax::{File, TextRange, TextUnit, AtomEdit}; 30use ra_syntax::{File, TextRange, TextUnit, AtomEdit};
32use imp::{AnalysisImpl, AnalysisHostImpl, FileResolverImp};
33use rustc_hash::FxHashMap; 31use rustc_hash::FxHashMap;
32use crate::imp::{AnalysisImpl, AnalysisHostImpl, FileResolverImp};
34 33
35pub use ra_editor::{ 34pub use ra_editor::{
36 StructureNode, LineIndex, FileSymbol, 35 StructureNode, LineIndex, FileSymbol,
37 Runnable, RunnableKind, HighlightedRange, CompletionItem, 36 Runnable, RunnableKind, HighlightedRange, CompletionItem,
38 Fold, FoldKind 37 Fold, FoldKind
39}; 38};
40pub use job::{JobToken, JobHandle}; 39pub use crate::{
40 job::{JobToken, JobHandle},
41 descriptors::FnDescriptor,
42};
41 43
42#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] 44#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
43pub struct FileId(pub u32); 45pub struct FileId(pub u32);
@@ -159,7 +161,7 @@ impl Query {
159 } 161 }
160} 162}
161 163
162#[derive(Clone, Debug)] 164#[derive(Debug)]
163pub struct Analysis { 165pub struct Analysis {
164 imp: AnalysisImpl 166 imp: AnalysisImpl
165} 167}
@@ -236,6 +238,11 @@ impl Analysis {
236 let file = self.imp.file_syntax(file_id); 238 let file = self.imp.file_syntax(file_id);
237 ra_editor::folding_ranges(&file) 239 ra_editor::folding_ranges(&file)
238 } 240 }
241
242 pub fn resolve_callable(&self, file_id: FileId, offset: TextUnit, token: &JobToken)
243 -> Option<(FnDescriptor, Option<usize>)> {
244 self.imp.resolve_callable(file_id, offset, token)
245 }
239} 246}
240 247
241#[derive(Debug)] 248#[derive(Debug)]
@@ -250,3 +257,9 @@ impl LibraryData {
250 LibraryData { root } 257 LibraryData { root }
251 } 258 }
252} 259}
260
261#[test]
262fn analysis_is_send() {
263 fn is_send<T: Send>() {}
264 is_send::<Analysis>();
265}
diff --git a/crates/ra_analysis/src/module_map.rs b/crates/ra_analysis/src/module_map.rs
index a21f55fff..c1799e3d4 100644
--- a/crates/ra_analysis/src/module_map.rs
+++ b/crates/ra_analysis/src/module_map.rs
@@ -1,157 +1,34 @@
1use std::sync::Arc; 1use std::sync::Arc;
2use { 2use crate::{
3 FileId, 3 FileId,
4 db::{ 4 db::{SyntaxDatabase},
5 Query, QueryRegistry, QueryCtx,
6 file_set
7 },
8 queries::file_syntax,
9 descriptors::{ModuleDescriptor, ModuleTreeDescriptor}, 5 descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
10}; 6};
11 7
12pub(crate) fn register_queries(reg: &mut QueryRegistry) { 8salsa::query_group! {
13 reg.add(MODULE_DESCR, "MODULE_DESCR"); 9 pub(crate) trait ModulesDatabase: SyntaxDatabase {
14 reg.add(MODULE_TREE, "MODULE_TREE"); 10 fn module_tree(key: ()) -> Arc<ModuleTreeDescriptor> {
15} 11 type ModuleTreeQuery;
16
17pub(crate) fn module_tree(ctx: QueryCtx) -> Arc<ModuleTreeDescriptor> {
18 ctx.get(MODULE_TREE, ())
19}
20
21const MODULE_DESCR: Query<FileId, ModuleDescriptor> = Query(30, |ctx, &file_id| {
22 let file = file_syntax(ctx, file_id);
23 ModuleDescriptor::new(file.ast())
24});
25
26const MODULE_TREE: Query<(), ModuleTreeDescriptor> = Query(31, |ctx, _| {
27 let file_set = file_set(ctx);
28 let mut files = Vec::new();
29 for &file_id in file_set.0.iter() {
30 let module_descr = ctx.get(MODULE_DESCR, file_id);
31 files.push((file_id, module_descr));
32 }
33 ModuleTreeDescriptor::new(files.iter().map(|(file_id, descr)| (*file_id, &**descr)), &file_set.1)
34});
35
36#[cfg(test)]
37mod tests {
38 use std::collections::HashMap;
39 use im;
40 use relative_path::{RelativePath, RelativePathBuf};
41 use {
42 db::{Db},
43 imp::FileResolverImp,
44 FileId, FileResolver,
45 };
46 use super::*;
47
48 #[derive(Debug)]
49 struct FileMap(im::HashMap<FileId, RelativePathBuf>);
50
51 impl FileResolver for FileMap {
52 fn file_stem(&self, file_id: FileId) -> String {
53 self.0[&file_id].file_stem().unwrap().to_string()
54 } 12 }
55 fn resolve(&self, file_id: FileId, rel: &RelativePath) -> Option<FileId> { 13 fn module_descriptor(file_id: FileId) -> Arc<ModuleDescriptor> {
56 let path = self.0[&file_id].join(rel).normalize(); 14 type ModuleDescriptorQuery;
57 self.0.iter()
58 .filter_map(|&(id, ref p)| Some(id).filter(|_| p == &path))
59 .next()
60 } 15 }
61 } 16 }
17}
62 18
63 struct Fixture {
64 next_file_id: u32,
65 fm: im::HashMap<FileId, RelativePathBuf>,
66 db: Db,
67 }
68
69 impl Fixture {
70 fn new() -> Fixture {
71 Fixture {
72 next_file_id: 1,
73 fm: im::HashMap::new(),
74 db: Db::new(),
75 }
76 }
77 fn add_file(&mut self, path: &str, text: &str) -> FileId {
78 assert!(path.starts_with("/"));
79 let file_id = FileId(self.next_file_id);
80 self.next_file_id += 1;
81 self.fm.insert(file_id, RelativePathBuf::from(&path[1..]));
82 let mut new_state = self.db.state().clone();
83 new_state.file_map.insert(file_id, Arc::new(text.to_string()));
84 new_state.file_resolver = FileResolverImp::new(
85 Arc::new(FileMap(self.fm.clone()))
86 );
87 self.db = self.db.with_changes(new_state, &[file_id], true);
88 file_id
89 }
90 fn remove_file(&mut self, file_id: FileId) {
91 self.fm.remove(&file_id);
92 let mut new_state = self.db.state().clone();
93 new_state.file_map.remove(&file_id);
94 new_state.file_resolver = FileResolverImp::new(
95 Arc::new(FileMap(self.fm.clone()))
96 );
97 self.db = self.db.with_changes(new_state, &[file_id], true);
98 }
99 fn change_file(&mut self, file_id: FileId, new_text: &str) {
100 let mut new_state = self.db.state().clone();
101 new_state.file_map.insert(file_id, Arc::new(new_text.to_string()));
102 self.db = self.db.with_changes(new_state, &[file_id], false);
103 }
104 fn check_parent_modules(
105 &self,
106 file_id: FileId,
107 expected: &[FileId],
108 queries: &[(&'static str, u64)]
109 ) {
110 let (tree, events) = self.db.trace_query(|ctx| module_tree(ctx));
111 let actual = tree.parent_modules(file_id)
112 .into_iter()
113 .map(|link| link.owner(&tree))
114 .collect::<Vec<_>>();
115 assert_eq!(actual.as_slice(), expected);
116 let mut counts = HashMap::new();
117 events.into_iter()
118 .for_each(|event| *counts.entry(event).or_insert(0) += 1);
119 for &(query_id, expected_count) in queries.iter() {
120 let actual_count = *counts.get(&query_id).unwrap_or(&0);
121 assert_eq!(
122 actual_count,
123 expected_count,
124 "counts for {} differ",
125 query_id,
126 )
127 }
128
129 }
130 }
131
132 #[test]
133 fn test_parent_module() {
134 let mut f = Fixture::new();
135 let foo = f.add_file("/foo.rs", "");
136 f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
137
138 let lib = f.add_file("/lib.rs", "mod foo;");
139 f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
140 f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 0)]);
141
142 f.change_file(lib, "");
143 f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
144
145 f.change_file(lib, "mod foo;");
146 f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
147
148 f.change_file(lib, "mod bar;");
149 f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
150 19
151 f.change_file(lib, "mod foo;"); 20fn module_descriptor(db: &impl ModulesDatabase, file_id: FileId) -> Arc<ModuleDescriptor> {
152 f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]); 21 let file = db.file_syntax(file_id);
22 Arc::new(ModuleDescriptor::new(file.ast()))
23}
153 24
154 f.remove_file(lib); 25fn module_tree(db: &impl ModulesDatabase, (): ()) -> Arc<ModuleTreeDescriptor> {
155 f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 0)]); 26 let file_set = db.file_set(());
27 let mut files = Vec::new();
28 for &file_id in file_set.files.iter() {
29 let module_descr = db.module_descriptor(file_id);
30 files.push((file_id, module_descr));
156 } 31 }
32 let res = ModuleTreeDescriptor::new(files.iter().map(|(file_id, descr)| (*file_id, &**descr)), &file_set.resolver);
33 Arc::new(res)
157} 34}
diff --git a/crates/ra_analysis/src/queries.rs b/crates/ra_analysis/src/queries.rs
deleted file mode 100644
index 062a2f420..000000000
--- a/crates/ra_analysis/src/queries.rs
+++ /dev/null
@@ -1,39 +0,0 @@
1use std::sync::Arc;
2use ra_syntax::File;
3use ra_editor::LineIndex;
4use {
5 FileId,
6 db::{Query, QueryCtx, QueryRegistry},
7 symbol_index::SymbolIndex,
8};
9
10pub(crate) use db::{file_text, file_set};
11
12pub(crate) fn file_syntax(ctx: QueryCtx, file_id: FileId) -> File {
13 (&*ctx.get(FILE_SYNTAX, file_id)).clone()
14}
15pub(crate) fn file_lines(ctx: QueryCtx, file_id: FileId) -> Arc<LineIndex> {
16 ctx.get(FILE_LINES, file_id)
17}
18pub(crate) fn file_symbols(ctx: QueryCtx, file_id: FileId) -> Arc<SymbolIndex> {
19 ctx.get(FILE_SYMBOLS, file_id)
20}
21
22const FILE_SYNTAX: Query<FileId, File> = Query(16, |ctx, file_id: &FileId| {
23 let text = file_text(ctx, *file_id);
24 File::parse(&*text)
25});
26const FILE_LINES: Query<FileId, LineIndex> = Query(17, |ctx, file_id: &FileId| {
27 let text = file_text(ctx, *file_id);
28 LineIndex::new(&*text)
29});
30const FILE_SYMBOLS: Query<FileId, SymbolIndex> = Query(18, |ctx, file_id: &FileId| {
31 let syntax = file_syntax(ctx, *file_id);
32 SymbolIndex::for_file(*file_id, syntax)
33});
34
35pub(crate) fn register_queries(reg: &mut QueryRegistry) {
36 reg.add(FILE_SYNTAX, "FILE_SYNTAX");
37 reg.add(FILE_LINES, "FILE_LINES");
38 reg.add(FILE_SYMBOLS, "FILE_SYMBOLS");
39}
diff --git a/crates/ra_analysis/src/roots.rs b/crates/ra_analysis/src/roots.rs
index 32a8c5bd0..76bcecd38 100644
--- a/crates/ra_analysis/src/roots.rs
+++ b/crates/ra_analysis/src/roots.rs
@@ -5,16 +5,18 @@ use std::{
5 5
6use once_cell::sync::OnceCell; 6use once_cell::sync::OnceCell;
7use rayon::prelude::*; 7use rayon::prelude::*;
8use rustc_hash::FxHashMap; 8use salsa::Database;
9use rustc_hash::{FxHashMap, FxHashSet};
9use ra_editor::LineIndex; 10use ra_editor::LineIndex;
10use ra_syntax::File; 11use ra_syntax::File;
11 12
12use { 13use crate::{
13 FileId, 14 FileId,
14 imp::FileResolverImp, 15 imp::FileResolverImp,
15 symbol_index::SymbolIndex, 16 symbol_index::SymbolIndex,
16 descriptors::{ModuleDescriptor, ModuleTreeDescriptor}, 17 descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
17 db::Db, 18 db::{self, FilesDatabase, SyntaxDatabase},
19 module_map::ModulesDatabase,
18}; 20};
19 21
20pub(crate) trait SourceRoot { 22pub(crate) trait SourceRoot {
@@ -25,62 +27,68 @@ pub(crate) trait SourceRoot {
25 fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>); 27 fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>);
26} 28}
27 29
28#[derive(Default, Debug)] 30#[derive(Default, Debug, Clone)]
29pub(crate) struct WritableSourceRoot { 31pub(crate) struct WritableSourceRoot {
30 db: Db, 32 db: db::RootDatabase,
31} 33}
32 34
33impl WritableSourceRoot { 35impl WritableSourceRoot {
34 pub fn apply_changes( 36 pub fn apply_changes(
35 &self, 37 &mut self,
36 changes: &mut dyn Iterator<Item=(FileId, Option<String>)>, 38 changes: &mut dyn Iterator<Item=(FileId, Option<String>)>,
37 file_resolver: Option<FileResolverImp>, 39 file_resolver: Option<FileResolverImp>,
38 ) -> WritableSourceRoot { 40 ) {
39 let resolver_changed = file_resolver.is_some(); 41 let mut changed = FxHashSet::default();
40 let mut changed_files = Vec::new(); 42 let mut removed = FxHashSet::default();
41 let mut new_state = self.db.state().clone();
42
43 for (file_id, text) in changes { 43 for (file_id, text) in changes {
44 changed_files.push(file_id);
45 match text { 44 match text {
46 Some(text) => {
47 new_state.file_map.insert(file_id, Arc::new(text));
48 },
49 None => { 45 None => {
50 new_state.file_map.remove(&file_id); 46 removed.insert(file_id);
47 }
48 Some(text) => {
49 self.db.query(db::FileTextQuery)
50 .set(file_id, Arc::new(text));
51 changed.insert(file_id);
51 } 52 }
52 } 53 }
53 } 54 }
54 if let Some(file_resolver) = file_resolver { 55 let file_set = self.db.file_set(());
55 new_state.file_resolver = file_resolver 56 let mut files: FxHashSet<FileId> = file_set
56 } 57 .files
57 WritableSourceRoot { 58 .clone();
58 db: self.db.with_changes(new_state, &changed_files, resolver_changed) 59 for file_id in removed {
60 files.remove(&file_id);
59 } 61 }
62 files.extend(changed);
63 let resolver = file_resolver.unwrap_or_else(|| file_set.resolver.clone());
64 self.db.query(db::FileSetQuery)
65 .set((), Arc::new(db::FileSet { files, resolver }));
60 } 66 }
61} 67}
62 68
63impl SourceRoot for WritableSourceRoot { 69impl SourceRoot for WritableSourceRoot {
64 fn module_tree(&self) -> Arc<ModuleTreeDescriptor> { 70 fn module_tree(&self) -> Arc<ModuleTreeDescriptor> {
65 self.db.make_query(::module_map::module_tree) 71 self.db.module_tree(())
66 } 72 }
67
68 fn contains(&self, file_id: FileId) -> bool { 73 fn contains(&self, file_id: FileId) -> bool {
69 self.db.state().file_map.contains_key(&file_id) 74 self.db.file_set(())
75 .files
76 .contains(&file_id)
70 } 77 }
71 fn lines(&self, file_id: FileId) -> Arc<LineIndex> { 78 fn lines(&self, file_id: FileId) -> Arc<LineIndex> {
72 self.db.make_query(|ctx| ::queries::file_lines(ctx, file_id)) 79 self.db.file_lines(file_id)
73 } 80 }
74 fn syntax(&self, file_id: FileId) -> File { 81 fn syntax(&self, file_id: FileId) -> File {
75 self.db.make_query(|ctx| ::queries::file_syntax(ctx, file_id)) 82 self.db.file_syntax(file_id)
76 } 83 }
77 fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) { 84 fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) {
78 self.db.make_query(|ctx| { 85 let db = &self.db;
79 let file_set = ::queries::file_set(ctx); 86 let symbols = db.file_set(());
80 let syms = file_set.0.iter() 87 let symbols = symbols
81 .map(|file_id| ::queries::file_symbols(ctx, *file_id)); 88 .files
82 acc.extend(syms); 89 .iter()
83 }); 90 .map(|&file_id| db.file_symbols(file_id));
91 acc.extend(symbols);
84 } 92 }
85} 93}
86 94
diff --git a/crates/ra_analysis/src/symbol_index.rs b/crates/ra_analysis/src/symbol_index.rs
index ffbb6a29f..54672fde4 100644
--- a/crates/ra_analysis/src/symbol_index.rs
+++ b/crates/ra_analysis/src/symbol_index.rs
@@ -9,7 +9,7 @@ use ra_syntax::{
9}; 9};
10use fst::{self, Streamer}; 10use fst::{self, Streamer};
11use rayon::prelude::*; 11use rayon::prelude::*;
12use {Query, FileId, JobToken}; 12use crate::{Query, FileId, JobToken};
13 13
14#[derive(Debug)] 14#[derive(Debug)]
15pub(crate) struct SymbolIndex { 15pub(crate) struct SymbolIndex {
@@ -17,6 +17,15 @@ pub(crate) struct SymbolIndex {
17 map: fst::Map, 17 map: fst::Map,
18} 18}
19 19
20impl PartialEq for SymbolIndex {
21 fn eq(&self, other: &SymbolIndex) -> bool {
22 self.symbols == other.symbols
23 }
24}
25
26impl Eq for SymbolIndex {
27}
28
20impl Hash for SymbolIndex { 29impl Hash for SymbolIndex {
21 fn hash<H: Hasher>(&self, hasher: &mut H) { 30 fn hash<H: Hasher>(&self, hasher: &mut H) {
22 self.symbols.hash(hasher) 31 self.symbols.hash(hasher)