aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--crates/libanalysis/Cargo.toml2
-rw-r--r--crates/libanalysis/src/db/imp.rs152
-rw-r--r--crates/libanalysis/src/db/mod.rs85
-rw-r--r--crates/libanalysis/src/descriptors.rs220
-rw-r--r--crates/libanalysis/src/imp.rs146
-rw-r--r--crates/libanalysis/src/lib.rs23
-rw-r--r--crates/libanalysis/src/module_map.rs379
-rw-r--r--crates/libanalysis/src/queries.rs39
-rw-r--r--crates/libanalysis/src/roots.rs166
-rw-r--r--crates/libanalysis/src/symbol_index.rs12
-rw-r--r--crates/libanalysis/tests/tests.rs35
-rw-r--r--crates/libeditor/src/line_index.rs2
-rw-r--r--crates/libeditor/src/symbols.rs2
-rw-r--r--crates/libsyntax2/src/lib.rs2
-rw-r--r--crates/salsa/Cargo.toml8
-rw-r--r--crates/salsa/src/lib.rs293
-rw-r--r--crates/salsa/tests/integration.rs170
-rw-r--r--crates/server/Cargo.toml2
18 files changed, 1285 insertions, 453 deletions
diff --git a/crates/libanalysis/Cargo.toml b/crates/libanalysis/Cargo.toml
index 4d565e95f..88f29d7c8 100644
--- a/crates/libanalysis/Cargo.toml
+++ b/crates/libanalysis/Cargo.toml
@@ -11,8 +11,10 @@ parking_lot = "0.6.3"
11once_cell = "0.1.5" 11once_cell = "0.1.5"
12rayon = "1.0.2" 12rayon = "1.0.2"
13fst = "0.3.1" 13fst = "0.3.1"
14im = "12.0.0"
14libsyntax2 = { path = "../libsyntax2" } 15libsyntax2 = { path = "../libsyntax2" }
15libeditor = { path = "../libeditor" } 16libeditor = { path = "../libeditor" }
17salsa = { path = "../salsa" }
16 18
17[dev-dependencies] 19[dev-dependencies]
18test_utils = { path = "../test_utils" } 20test_utils = { path = "../test_utils" }
diff --git a/crates/libanalysis/src/db/imp.rs b/crates/libanalysis/src/db/imp.rs
new file mode 100644
index 000000000..f26be1046
--- /dev/null
+++ b/crates/libanalysis/src/db/imp.rs
@@ -0,0 +1,152 @@
1use std::{
2 sync::Arc,
3 any::Any,
4 hash::{Hash, Hasher},
5 collections::hash_map::{DefaultHasher, HashMap},
6 iter,
7};
8use salsa;
9use {FileId, imp::FileResolverImp};
10use super::{State, Query, QueryCtx};
11
12pub(super) type Data = Arc<Any + Send + Sync + 'static>;
13
14#[derive(Debug)]
15pub(super) struct Db {
16 names: Arc<HashMap<salsa::QueryTypeId, &'static str>>,
17 pub(super) imp: salsa::Db<State, Data>,
18}
19
20impl Db {
21 pub(super) fn new(mut reg: QueryRegistry) -> Db {
22 let config = reg.config.take().unwrap();
23 Db {
24 names: Arc::new(reg.names),
25 imp: salsa::Db::new(config, State::default())
26 }
27 }
28 pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
29 let names = self.names.clone();
30 let mut invalidations = salsa::Invalidations::new();
31 invalidations.invalidate(FILE_TEXT, changed_files.iter().map(hash).map(salsa::InputFingerprint));
32 if resolver_changed {
33 invalidations.invalidate(FILE_SET, iter::once(salsa::InputFingerprint(hash(&()))));
34 } else {
35 invalidations.invalidate(FILE_SET, iter::empty());
36 }
37 let imp = self.imp.with_ground_data(
38 new_state,
39 invalidations,
40 );
41 Db { names, imp }
42 }
43 pub(super) fn extract_trace(&self, ctx: &salsa::QueryCtx<State, Data>) -> Vec<&'static str> {
44 ctx.trace().into_iter().map(|it| self.names[&it]).collect()
45 }
46}
47
48pub(crate) trait EvalQuery {
49 type Params;
50 type Output;
51 fn query_type(&self) -> salsa::QueryTypeId;
52 fn f(&self) -> salsa::QueryFn<State, Data>;
53 fn get(&self, &QueryCtx, Self::Params) -> Arc<Self::Output>;
54}
55
56impl<T, R> EvalQuery for Query<T, R>
57where
58 T: Hash + Send + Sync + 'static,
59 R: Hash + Send + Sync + 'static,
60{
61 type Params = T;
62 type Output = R;
63 fn query_type(&self) -> salsa::QueryTypeId {
64 salsa::QueryTypeId(self.0)
65 }
66 fn f(&self) -> salsa::QueryFn<State, Data> {
67 let f = self.1;
68 Box::new(move |ctx, data| {
69 let ctx = QueryCtx { imp: ctx };
70 let data: &T = data.downcast_ref().unwrap();
71 let res = f(ctx, data);
72 let h = hash(&res);
73 (Arc::new(res), salsa::OutputFingerprint(h))
74 })
75 }
76 fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output> {
77 let query_id = salsa::QueryId(
78 self.query_type(),
79 salsa::InputFingerprint(hash(&params)),
80 );
81 let res = ctx.imp.get(query_id, Arc::new(params));
82 res.downcast().unwrap()
83 }
84}
85
86pub(super) struct QueryRegistry {
87 config: Option<salsa::QueryConfig<State, Data>>,
88 names: HashMap<salsa::QueryTypeId, &'static str>,
89}
90
91impl QueryRegistry {
92 pub(super) fn new() -> QueryRegistry {
93 let mut config = salsa::QueryConfig::<State, Data>::new();
94 config = config.with_ground_query(
95 FILE_TEXT, Box::new(|state, params| {
96 let file_id: &FileId = params.downcast_ref().unwrap();
97 let res = state.file_map[file_id].clone();
98 let fingerprint = salsa::OutputFingerprint(hash(&res));
99 (res, fingerprint)
100 })
101 );
102 config = config.with_ground_query(
103 FILE_SET, Box::new(|state, _params| {
104 let file_ids: Vec<FileId> = state.file_map.keys().cloned().collect();
105 let hash = hash(&file_ids);
106 let file_resolver = state.file_resolver.clone();
107 let res = (file_ids, file_resolver);
108 let fingerprint = salsa::OutputFingerprint(hash);
109 (Arc::new(res), fingerprint)
110 })
111 );
112 let mut names = HashMap::new();
113 names.insert(FILE_TEXT, "FILE_TEXT");
114 names.insert(FILE_SET, "FILE_SET");
115 QueryRegistry { config: Some(config), names }
116 }
117 pub(super) fn add<Q: EvalQuery>(&mut self, q: Q, name: &'static str) {
118 let id = q.query_type();
119 let prev = self.names.insert(id, name);
120 assert!(prev.is_none(), "duplicate query: {:?}", id);
121 let config = self.config.take().unwrap();
122 let config = config.with_query(id, q.f());
123 self.config= Some(config);
124 }
125}
126
127fn hash<T: Hash>(x: &T) -> u64 {
128 let mut hasher = DefaultHasher::new();
129 x.hash(&mut hasher);
130 hasher.finish()
131}
132
133const FILE_TEXT: salsa::QueryTypeId = salsa::QueryTypeId(0);
134pub(super) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
135 let query_id = salsa::QueryId(
136 FILE_TEXT,
137 salsa::InputFingerprint(hash(&file_id)),
138 );
139 let res = ctx.imp.get(query_id, Arc::new(file_id));
140 res.downcast().unwrap()
141}
142
143const FILE_SET: salsa::QueryTypeId = salsa::QueryTypeId(1);
144pub(super) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
145 let query_id = salsa::QueryId(
146 FILE_SET,
147 salsa::InputFingerprint(hash(&())),
148 );
149 let res = ctx.imp.get(query_id, Arc::new(()));
150 res.downcast().unwrap()
151}
152
diff --git a/crates/libanalysis/src/db/mod.rs b/crates/libanalysis/src/db/mod.rs
new file mode 100644
index 000000000..22769d112
--- /dev/null
+++ b/crates/libanalysis/src/db/mod.rs
@@ -0,0 +1,85 @@
1mod imp;
2
3use std::{
4 sync::Arc,
5};
6use im;
7use salsa;
8use {FileId, imp::FileResolverImp};
9
10#[derive(Debug, Default, Clone)]
11pub(crate) struct State {
12 pub(crate) file_map: im::HashMap<FileId, Arc<String>>,
13 pub(crate) file_resolver: FileResolverImp
14}
15
16#[derive(Debug)]
17pub(crate) struct Db {
18 imp: imp::Db,
19}
20
21#[derive(Clone, Copy)]
22pub(crate) struct QueryCtx<'a> {
23 imp: &'a salsa::QueryCtx<State, imp::Data>,
24}
25
26pub(crate) struct Query<T, R>(pub(crate) u16, pub(crate) fn(QueryCtx, &T) -> R);
27
28pub(crate) struct QueryRegistry {
29 imp: imp::QueryRegistry,
30}
31
32impl Default for Db {
33 fn default() -> Db {
34 Db::new()
35 }
36}
37
38impl Db {
39 pub(crate) fn new() -> Db {
40 let reg = QueryRegistry::new();
41 Db { imp: imp::Db::new(reg.imp) }
42 }
43 pub(crate) fn state(&self) -> &State {
44 self.imp.imp.ground_data()
45 }
46 pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
47 Db { imp: self.imp.with_changes(new_state, changed_files, resolver_changed) }
48 }
49 pub(crate) fn make_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> R {
50 let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() };
51 f(ctx)
52 }
53 #[allow(unused)]
54 pub(crate) fn trace_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> (R, Vec<&'static str>) {
55 let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() };
56 let res = f(ctx);
57 let trace = self.imp.extract_trace(ctx.imp);
58 (res, trace)
59 }
60}
61
62impl<'a> QueryCtx<'a> {
63 pub(crate) fn get<Q: imp::EvalQuery>(&self, q: Q, params: Q::Params) -> Arc<Q::Output> {
64 q.get(self, params)
65 }
66}
67
68pub(crate) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
69 imp::file_text(ctx, file_id)
70}
71
72pub(crate) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
73 imp::file_set(ctx)
74}
75impl QueryRegistry {
76 fn new() -> QueryRegistry {
77 let mut reg = QueryRegistry { imp: imp::QueryRegistry::new() };
78 ::queries::register_queries(&mut reg);
79 ::module_map::register_queries(&mut reg);
80 reg
81 }
82 pub(crate) fn add<Q: imp::EvalQuery>(&mut self, q: Q, name: &'static str) {
83 self.imp.add(q, name)
84 }
85}
diff --git a/crates/libanalysis/src/descriptors.rs b/crates/libanalysis/src/descriptors.rs
new file mode 100644
index 000000000..93a4158e4
--- /dev/null
+++ b/crates/libanalysis/src/descriptors.rs
@@ -0,0 +1,220 @@
1use std::{
2 collections::BTreeMap,
3};
4use relative_path::RelativePathBuf;
5use libsyntax2::{
6 SmolStr,
7 ast::{self, NameOwner},
8};
9use {
10 FileId,
11 imp::FileResolverImp,
12};
13
14#[derive(Debug, Hash)]
15pub struct ModuleDescriptor {
16 pub submodules: Vec<Submodule>
17}
18
19impl ModuleDescriptor {
20 pub fn new(root: ast::Root) -> ModuleDescriptor {
21 let submodules = modules(root)
22 .map(|(name, _)| Submodule { name })
23 .collect();
24
25 ModuleDescriptor { submodules } }
26}
27
28fn modules<'a>(root: ast::Root<'a>) -> impl Iterator<Item=(SmolStr, ast::Module<'a>)> {
29 root
30 .modules()
31 .filter_map(|module| {
32 let name = module.name()?.text();
33 if !module.has_semi() {
34 return None;
35 }
36 Some((name, module))
37 })
38}
39
40#[derive(Clone, Hash, PartialEq, Eq, Debug)]
41pub struct Submodule {
42 pub name: SmolStr,
43}
44
45#[derive(Hash, Debug)]
46pub(crate) struct ModuleTreeDescriptor {
47 nodes: Vec<NodeData>,
48 links: Vec<LinkData>,
49 file_id2node: BTreeMap<FileId, Node>,
50}
51
52#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
53struct Node(usize);
54#[derive(Hash, Debug)]
55struct NodeData {
56 file_id: FileId,
57 links: Vec<Link>,
58 parents: Vec<Link>
59}
60
61#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
62pub(crate) struct Link(usize);
63#[derive(Hash, Debug)]
64struct LinkData {
65 owner: Node,
66 name: SmolStr,
67 points_to: Vec<Node>,
68 problem: Option<Problem>,
69}
70
71
72#[derive(Clone, Debug, Hash)]
73pub enum Problem {
74 UnresolvedModule {
75 candidate: RelativePathBuf,
76 },
77 NotDirOwner {
78 move_to: RelativePathBuf,
79 candidate: RelativePathBuf,
80 }
81}
82
83impl ModuleTreeDescriptor {
84 pub(crate) fn new<'a>(
85 files: impl Iterator<Item=(FileId, &'a ModuleDescriptor)> + Clone,
86 file_resolver: &FileResolverImp,
87 ) -> ModuleTreeDescriptor {
88 let mut file_id2node = BTreeMap::new();
89 let mut nodes: Vec<NodeData> = files.clone().enumerate()
90 .map(|(idx, (file_id, _))| {
91 file_id2node.insert(file_id, Node(idx));
92 NodeData {
93 file_id,
94 links: Vec::new(),
95 parents: Vec::new(),
96 }
97 })
98 .collect();
99 let mut links = Vec::new();
100
101 for (idx, (file_id, descr)) in files.enumerate() {
102 let owner = Node(idx);
103 for sub in descr.submodules.iter() {
104 let link = Link(links.len());
105 nodes[owner.0].links.push(link);
106 let (points_to, problem) = resolve_submodule(file_id, &sub.name, file_resolver);
107 let points_to = points_to
108 .into_iter()
109 .map(|file_id| {
110 let node = file_id2node[&file_id];
111 nodes[node.0].parents.push(link);
112 node
113 })
114 .collect();
115
116 links.push(LinkData {
117 owner,
118 name: sub.name.clone(),
119 points_to,
120 problem,
121 })
122
123 }
124 }
125
126 ModuleTreeDescriptor {
127 nodes, links, file_id2node
128 }
129 }
130
131 pub(crate) fn parent_modules(&self, file_id: FileId) -> Vec<Link> {
132 let node = self.file_id2node[&file_id];
133 self.node(node)
134 .parents
135 .clone()
136 }
137 pub(crate) fn child_module_by_name(&self, file_id: FileId, name: &str) -> Vec<FileId> {
138 let node = self.file_id2node[&file_id];
139 self.node(node)
140 .links
141 .iter()
142 .filter(|it| it.name(self) == name)
143 .flat_map(|link| link.points_to(self).iter().map(|&node| self.node(node).file_id))
144 .collect()
145 }
146 pub(crate) fn problems<'a, 'b>(&'b self, file_id: FileId, root: ast::Root<'a>) -> Vec<(ast::Name<'a>, &'b Problem)> {
147 let node = self.file_id2node[&file_id];
148 self.node(node)
149 .links
150 .iter()
151 .filter_map(|&link| {
152 let problem = self.link(link).problem.as_ref()?;
153 let name = link.bind_source(self, root).name()?;
154 Some((name, problem))
155 })
156 .collect()
157 }
158
159 fn node(&self, node: Node) -> &NodeData {
160 &self.nodes[node.0]
161 }
162 fn link(&self, link: Link) -> &LinkData {
163 &self.links[link.0]
164 }
165}
166
167impl Link {
168 pub(crate) fn name(self, tree: &ModuleTreeDescriptor) -> SmolStr {
169 tree.link(self).name.clone()
170 }
171 pub(crate) fn owner(self, tree: &ModuleTreeDescriptor) -> FileId {
172 let owner = tree.link(self).owner;
173 tree.node(owner).file_id
174 }
175 fn points_to(self, tree: &ModuleTreeDescriptor) -> &[Node] {
176 &tree.link(self).points_to
177 }
178 pub(crate) fn bind_source<'a>(self, tree: &ModuleTreeDescriptor, root: ast::Root<'a>) -> ast::Module<'a> {
179 modules(root)
180 .filter(|(name, _)| name == &tree.link(self).name)
181 .next()
182 .unwrap()
183 .1
184 }
185}
186
187
188fn resolve_submodule(
189 file_id: FileId,
190 name: &SmolStr,
191 file_resolver: &FileResolverImp
192) -> (Vec<FileId>, Option<Problem>) {
193 let mod_name = file_resolver.file_stem(file_id);
194 let is_dir_owner =
195 mod_name == "mod" || mod_name == "lib" || mod_name == "main";
196
197 let file_mod = RelativePathBuf::from(format!("../{}.rs", name));
198 let dir_mod = RelativePathBuf::from(format!("../{}/mod.rs", name));
199 let points_to: Vec<FileId>;
200 let problem: Option<Problem>;
201 if is_dir_owner {
202 points_to = [&file_mod, &dir_mod].iter()
203 .filter_map(|path| file_resolver.resolve(file_id, path))
204 .collect();
205 problem = if points_to.is_empty() {
206 Some(Problem::UnresolvedModule {
207 candidate: file_mod,
208 })
209 } else {
210 None
211 }
212 } else {
213 points_to = Vec::new();
214 problem = Some(Problem::NotDirOwner {
215 move_to: RelativePathBuf::from(format!("../{}/mod.rs", mod_name)),
216 candidate: file_mod,
217 });
218 }
219 (points_to, problem)
220}
diff --git a/crates/libanalysis/src/imp.rs b/crates/libanalysis/src/imp.rs
index 3e65ee14a..6f3191fe7 100644
--- a/crates/libanalysis/src/imp.rs
+++ b/crates/libanalysis/src/imp.rs
@@ -5,6 +5,7 @@ use std::{
5 }, 5 },
6 fmt, 6 fmt,
7 collections::{HashSet, VecDeque}, 7 collections::{HashSet, VecDeque},
8 iter,
8}; 9};
9 10
10use relative_path::RelativePath; 11use relative_path::RelativePath;
@@ -18,8 +19,8 @@ use libsyntax2::{
18use { 19use {
19 FileId, FileResolver, Query, Diagnostic, SourceChange, SourceFileEdit, Position, FileSystemEdit, 20 FileId, FileResolver, Query, Diagnostic, SourceChange, SourceFileEdit, Position, FileSystemEdit,
20 JobToken, CrateGraph, CrateId, 21 JobToken, CrateGraph, CrateId,
21 module_map::{ModuleMap, Problem},
22 roots::{SourceRoot, ReadonlySourceRoot, WritableSourceRoot}, 22 roots::{SourceRoot, ReadonlySourceRoot, WritableSourceRoot},
23 descriptors::{ModuleTreeDescriptor, Problem},
23}; 24};
24 25
25 26
@@ -75,14 +76,12 @@ impl AnalysisHostImpl {
75 } 76 }
76 pub fn change_files(&mut self, changes: &mut dyn Iterator<Item=(FileId, Option<String>)>) { 77 pub fn change_files(&mut self, changes: &mut dyn Iterator<Item=(FileId, Option<String>)>) {
77 let data = self.data_mut(); 78 let data = self.data_mut();
78 for (file_id, text) in changes { 79 data.root = Arc::new(data.root.apply_changes(changes, None));
79 data.root.update(file_id, text);
80 }
81 } 80 }
82 pub fn set_file_resolver(&mut self, resolver: FileResolverImp) { 81 pub fn set_file_resolver(&mut self, resolver: FileResolverImp) {
83 let data = self.data_mut(); 82 let data = self.data_mut();
84 data.file_resolver = resolver.clone(); 83 data.file_resolver = resolver.clone();
85 data.root.set_file_resolver(resolver); 84 data.root = Arc::new(data.root.apply_changes(&mut iter::empty(), Some(resolver)));
86 } 85 }
87 pub fn set_crate_graph(&mut self, graph: CrateGraph) { 86 pub fn set_crate_graph(&mut self, graph: CrateGraph) {
88 let mut visited = HashSet::new(); 87 let mut visited = HashSet::new();
@@ -124,18 +123,17 @@ impl Clone for AnalysisImpl {
124impl AnalysisImpl { 123impl AnalysisImpl {
125 fn root(&self, file_id: FileId) -> &SourceRoot { 124 fn root(&self, file_id: FileId) -> &SourceRoot {
126 if self.data.root.contains(file_id) { 125 if self.data.root.contains(file_id) {
127 return &self.data.root; 126 return &*self.data.root;
128 } 127 }
129 &**self.data.libs.iter().find(|it| it.contains(file_id)).unwrap() 128 &**self.data.libs.iter().find(|it| it.contains(file_id)).unwrap()
130 } 129 }
131 pub fn file_syntax(&self, file_id: FileId) -> &File { 130 pub fn file_syntax(&self, file_id: FileId) -> File {
132 self.root(file_id).syntax(file_id) 131 self.root(file_id).syntax(file_id)
133 } 132 }
134 pub fn file_line_index(&self, file_id: FileId) -> &LineIndex { 133 pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
135 self.root(file_id).lines(file_id) 134 self.root(file_id).lines(file_id)
136 } 135 }
137 pub fn world_symbols(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> { 136 pub fn world_symbols(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
138 self.reindex();
139 let mut buf = Vec::new(); 137 let mut buf = Vec::new();
140 if query.libs { 138 if query.libs {
141 self.data.libs.iter() 139 self.data.libs.iter()
@@ -148,25 +146,24 @@ impl AnalysisImpl {
148 } 146 }
149 pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> { 147 pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> {
150 let root = self.root(file_id); 148 let root = self.root(file_id);
151 let module_map = root.module_map(); 149 let module_tree = root.module_tree();
152 let id = module_map.file2module(file_id); 150 module_tree.parent_modules(file_id)
153 module_map 151 .iter()
154 .parent_modules(id, &|file_id| root.syntax(file_id)) 152 .map(|link| {
155 .into_iter() 153 let file_id = link.owner(&module_tree);
156 .map(|(id, name, node)| { 154 let syntax = root.syntax(file_id);
157 let id = module_map.module2file(id); 155 let decl = link.bind_source(&module_tree, syntax.ast());
158 let sym = FileSymbol { 156 let sym = FileSymbol {
159 name, 157 name: link.name(&module_tree),
160 node_range: node.range(), 158 node_range: decl.syntax().range(),
161 kind: MODULE, 159 kind: MODULE,
162 }; 160 };
163 (id, sym) 161 (file_id, sym)
164 }) 162 })
165 .collect() 163 .collect()
166 } 164 }
167
168 pub fn crate_for(&self, file_id: FileId) -> Vec<CrateId> { 165 pub fn crate_for(&self, file_id: FileId) -> Vec<CrateId> {
169 let module_map = self.root(file_id).module_map(); 166 let module_tree = self.root(file_id).module_tree();
170 let crate_graph = &self.data.crate_graph; 167 let crate_graph = &self.data.crate_graph;
171 let mut res = Vec::new(); 168 let mut res = Vec::new();
172 let mut work = VecDeque::new(); 169 let mut work = VecDeque::new();
@@ -177,11 +174,10 @@ impl AnalysisImpl {
177 res.push(crate_id); 174 res.push(crate_id);
178 continue; 175 continue;
179 } 176 }
180 let mid = module_map.file2module(id); 177 let parents = module_tree
181 let parents = module_map 178 .parent_modules(id)
182 .parent_module_ids(mid, &|file_id| self.file_syntax(file_id))
183 .into_iter() 179 .into_iter()
184 .map(|id| module_map.module2file(id)) 180 .map(|link| link.owner(&module_tree))
185 .filter(|&id| visited.insert(id)); 181 .filter(|&id| visited.insert(id));
186 work.extend(parents); 182 work.extend(parents);
187 } 183 }
@@ -197,7 +193,7 @@ impl AnalysisImpl {
197 token: &JobToken, 193 token: &JobToken,
198 ) -> Vec<(FileId, FileSymbol)> { 194 ) -> Vec<(FileId, FileSymbol)> {
199 let root = self.root(file_id); 195 let root = self.root(file_id);
200 let module_map = root.module_map(); 196 let module_tree = root.module_tree();
201 let file = root.syntax(file_id); 197 let file = root.syntax(file_id);
202 let syntax = file.syntax(); 198 let syntax = file.syntax();
203 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) { 199 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) {
@@ -206,7 +202,7 @@ impl AnalysisImpl {
206 if let Some(name) = find_node_at_offset::<ast::Name>(syntax, offset) { 202 if let Some(name) = find_node_at_offset::<ast::Name>(syntax, offset) {
207 if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) { 203 if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) {
208 if module.has_semi() { 204 if module.has_semi() {
209 let file_ids = self.resolve_module(module_map, file_id, module); 205 let file_ids = self.resolve_module(&*module_tree, file_id, module);
210 206
211 let res = file_ids.into_iter().map(|id| { 207 let res = file_ids.into_iter().map(|id| {
212 let name = module.name() 208 let name = module.name()
@@ -229,7 +225,7 @@ impl AnalysisImpl {
229 225
230 pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> { 226 pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> {
231 let root = self.root(file_id); 227 let root = self.root(file_id);
232 let module_map = root.module_map(); 228 let module_tree = root.module_tree();
233 let syntax = root.syntax(file_id); 229 let syntax = root.syntax(file_id);
234 230
235 let mut res = libeditor::diagnostics(&syntax) 231 let mut res = libeditor::diagnostics(&syntax)
@@ -237,47 +233,43 @@ impl AnalysisImpl {
237 .map(|d| Diagnostic { range: d.range, message: d.msg, fix: None }) 233 .map(|d| Diagnostic { range: d.range, message: d.msg, fix: None })
238 .collect::<Vec<_>>(); 234 .collect::<Vec<_>>();
239 235
240 module_map.problems( 236 for (name_node, problem) in module_tree.problems(file_id, syntax.ast()) {
241 file_id, 237 let diag = match problem {
242 &|file_id| self.file_syntax(file_id), 238 Problem::UnresolvedModule { candidate } => {
243 |name_node, problem| { 239 let create_file = FileSystemEdit::CreateFile {
244 let diag = match problem { 240 anchor: file_id,
245 Problem::UnresolvedModule { candidate } => { 241 path: candidate.clone(),
246 let create_file = FileSystemEdit::CreateFile { 242 };
247 anchor: file_id, 243 let fix = SourceChange {
248 path: candidate.clone(), 244 label: "create module".to_string(),
249 }; 245 source_file_edits: Vec::new(),
250 let fix = SourceChange { 246 file_system_edits: vec![create_file],
251 label: "create module".to_string(), 247 cursor_position: None,
252 source_file_edits: Vec::new(), 248 };
253 file_system_edits: vec![create_file], 249 Diagnostic {
254 cursor_position: None, 250 range: name_node.syntax().range(),
255 }; 251 message: "unresolved module".to_string(),
256 Diagnostic { 252 fix: Some(fix),
257 range: name_node.syntax().range(),
258 message: "unresolved module".to_string(),
259 fix: Some(fix),
260 }
261 } 253 }
262 Problem::NotDirOwner { move_to, candidate } => { 254 }
263 let move_file = FileSystemEdit::MoveFile { file: file_id, path: move_to.clone() }; 255 Problem::NotDirOwner { move_to, candidate } => {
264 let create_file = FileSystemEdit::CreateFile { anchor: file_id, path: move_to.join(candidate) }; 256 let move_file = FileSystemEdit::MoveFile { file: file_id, path: move_to.clone() };
265 let fix = SourceChange { 257 let create_file = FileSystemEdit::CreateFile { anchor: file_id, path: move_to.join(candidate) };
266 label: "move file and create module".to_string(), 258 let fix = SourceChange {
267 source_file_edits: Vec::new(), 259 label: "move file and create module".to_string(),
268 file_system_edits: vec![move_file, create_file], 260 source_file_edits: Vec::new(),
269 cursor_position: None, 261 file_system_edits: vec![move_file, create_file],
270 }; 262 cursor_position: None,
271 Diagnostic { 263 };
272 range: name_node.syntax().range(), 264 Diagnostic {
273 message: "can't declare module at this location".to_string(), 265 range: name_node.syntax().range(),
274 fix: Some(fix), 266 message: "can't declare module at this location".to_string(),
275 } 267 fix: Some(fix),
276 } 268 }
277 }; 269 }
278 res.push(diag) 270 };
279 } 271 res.push(diag)
280 ); 272 }
281 res 273 res
282 } 274 }
283 275
@@ -307,26 +299,12 @@ impl AnalysisImpl {
307 self.world_symbols(query, token) 299 self.world_symbols(query, token)
308 } 300 }
309 301
310 fn resolve_module(&self, module_map: &ModuleMap, file_id: FileId, module: ast::Module) -> Vec<FileId> { 302 fn resolve_module(&self, module_tree: &ModuleTreeDescriptor, file_id: FileId, module: ast::Module) -> Vec<FileId> {
311 let name = match module.name() { 303 let name = match module.name() {
312 Some(name) => name.text(), 304 Some(name) => name.text(),
313 None => return Vec::new(), 305 None => return Vec::new(),
314 }; 306 };
315 let id = module_map.file2module(file_id); 307 module_tree.child_module_by_name(file_id, name.as_str())
316 module_map
317 .child_module_by_name(
318 id, name.as_str(),
319 &|file_id| self.file_syntax(file_id),
320 )
321 .into_iter()
322 .map(|id| module_map.module2file(id))
323 .collect()
324 }
325
326 fn reindex(&self) {
327 if self.needs_reindex.compare_and_swap(true, false, SeqCst) {
328 self.data.root.reindex();
329 }
330 } 308 }
331} 309}
332 310
@@ -334,7 +312,7 @@ impl AnalysisImpl {
334struct WorldData { 312struct WorldData {
335 file_resolver: FileResolverImp, 313 file_resolver: FileResolverImp,
336 crate_graph: CrateGraph, 314 crate_graph: CrateGraph,
337 root: WritableSourceRoot, 315 root: Arc<WritableSourceRoot>,
338 libs: Vec<Arc<ReadonlySourceRoot>>, 316 libs: Vec<Arc<ReadonlySourceRoot>>,
339} 317}
340 318
diff --git a/crates/libanalysis/src/lib.rs b/crates/libanalysis/src/lib.rs
index 80cde079f..b4b7a6a30 100644
--- a/crates/libanalysis/src/lib.rs
+++ b/crates/libanalysis/src/lib.rs
@@ -9,12 +9,17 @@ extern crate rayon;
9extern crate relative_path; 9extern crate relative_path;
10#[macro_use] 10#[macro_use]
11extern crate crossbeam_channel; 11extern crate crossbeam_channel;
12extern crate im;
13extern crate salsa;
12 14
13mod symbol_index; 15mod symbol_index;
14mod module_map; 16mod module_map;
15mod imp; 17mod imp;
16mod job; 18mod job;
17mod roots; 19mod roots;
20mod db;
21mod queries;
22mod descriptors;
18 23
19use std::{ 24use std::{
20 sync::Arc, 25 sync::Arc,
@@ -161,8 +166,8 @@ impl Analysis {
161 pub fn file_syntax(&self, file_id: FileId) -> File { 166 pub fn file_syntax(&self, file_id: FileId) -> File {
162 self.imp.file_syntax(file_id).clone() 167 self.imp.file_syntax(file_id).clone()
163 } 168 }
164 pub fn file_line_index(&self, file_id: FileId) -> LineIndex { 169 pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
165 self.imp.file_line_index(file_id).clone() 170 self.imp.file_line_index(file_id)
166 } 171 }
167 pub fn extend_selection(&self, file: &File, range: TextRange) -> TextRange { 172 pub fn extend_selection(&self, file: &File, range: TextRange) -> TextRange {
168 libeditor::extend_selection(file, range).unwrap_or(range) 173 libeditor::extend_selection(file, range).unwrap_or(range)
@@ -172,19 +177,19 @@ impl Analysis {
172 } 177 }
173 pub fn syntax_tree(&self, file_id: FileId) -> String { 178 pub fn syntax_tree(&self, file_id: FileId) -> String {
174 let file = self.imp.file_syntax(file_id); 179 let file = self.imp.file_syntax(file_id);
175 libeditor::syntax_tree(file) 180 libeditor::syntax_tree(&file)
176 } 181 }
177 pub fn join_lines(&self, file_id: FileId, range: TextRange) -> SourceChange { 182 pub fn join_lines(&self, file_id: FileId, range: TextRange) -> SourceChange {
178 let file = self.imp.file_syntax(file_id); 183 let file = self.imp.file_syntax(file_id);
179 SourceChange::from_local_edit(file_id, "join lines", libeditor::join_lines(file, range)) 184 SourceChange::from_local_edit(file_id, "join lines", libeditor::join_lines(&file, range))
180 } 185 }
181 pub fn on_eq_typed(&self, file_id: FileId, offset: TextUnit) -> Option<SourceChange> { 186 pub fn on_eq_typed(&self, file_id: FileId, offset: TextUnit) -> Option<SourceChange> {
182 let file = self.imp.file_syntax(file_id); 187 let file = self.imp.file_syntax(file_id);
183 Some(SourceChange::from_local_edit(file_id, "add semicolon", libeditor::on_eq_typed(file, offset)?)) 188 Some(SourceChange::from_local_edit(file_id, "add semicolon", libeditor::on_eq_typed(&file, offset)?))
184 } 189 }
185 pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { 190 pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> {
186 let file = self.imp.file_syntax(file_id); 191 let file = self.imp.file_syntax(file_id);
187 libeditor::file_structure(file) 192 libeditor::file_structure(&file)
188 } 193 }
189 pub fn symbol_search(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> { 194 pub fn symbol_search(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
190 self.imp.world_symbols(query, token) 195 self.imp.world_symbols(query, token)
@@ -203,15 +208,15 @@ impl Analysis {
203 } 208 }
204 pub fn runnables(&self, file_id: FileId) -> Vec<Runnable> { 209 pub fn runnables(&self, file_id: FileId) -> Vec<Runnable> {
205 let file = self.imp.file_syntax(file_id); 210 let file = self.imp.file_syntax(file_id);
206 libeditor::runnables(file) 211 libeditor::runnables(&file)
207 } 212 }
208 pub fn highlight(&self, file_id: FileId) -> Vec<HighlightedRange> { 213 pub fn highlight(&self, file_id: FileId) -> Vec<HighlightedRange> {
209 let file = self.imp.file_syntax(file_id); 214 let file = self.imp.file_syntax(file_id);
210 libeditor::highlight(file) 215 libeditor::highlight(&file)
211 } 216 }
212 pub fn completions(&self, file_id: FileId, offset: TextUnit) -> Option<Vec<CompletionItem>> { 217 pub fn completions(&self, file_id: FileId, offset: TextUnit) -> Option<Vec<CompletionItem>> {
213 let file = self.imp.file_syntax(file_id); 218 let file = self.imp.file_syntax(file_id);
214 libeditor::scope_completion(file, offset) 219 libeditor::scope_completion(&file, offset)
215 } 220 }
216 pub fn assists(&self, file_id: FileId, range: TextRange) -> Vec<SourceChange> { 221 pub fn assists(&self, file_id: FileId, range: TextRange) -> Vec<SourceChange> {
217 self.imp.assists(file_id, range) 222 self.imp.assists(file_id, range)
diff --git a/crates/libanalysis/src/module_map.rs b/crates/libanalysis/src/module_map.rs
index 9acebd6e2..a21f55fff 100644
--- a/crates/libanalysis/src/module_map.rs
+++ b/crates/libanalysis/src/module_map.rs
@@ -1,274 +1,157 @@
1use relative_path::RelativePathBuf; 1use std::sync::Arc;
2use parking_lot::{RwLock, RwLockReadGuard, RwLockWriteGuard}; 2use {
3use libsyntax2::{ 3 FileId,
4 File, 4 db::{
5 ast::{self, AstNode, NameOwner}, 5 Query, QueryRegistry, QueryCtx,
6 SyntaxNode, SmolStr, 6 file_set
7 },
8 queries::file_syntax,
9 descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
7}; 10};
8use {FileId, imp::FileResolverImp};
9
10type SyntaxProvider<'a> = dyn Fn(FileId) -> &'a File + 'a;
11
12#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
13pub struct ModuleId(FileId);
14 11
15#[derive(Debug, Default)] 12pub(crate) fn register_queries(reg: &mut QueryRegistry) {
16pub struct ModuleMap { 13 reg.add(MODULE_DESCR, "MODULE_DESCR");
17 state: RwLock<State>, 14 reg.add(MODULE_TREE, "MODULE_TREE");
18} 15}
19 16
20#[derive(Debug, Clone, Copy, PartialEq, Eq)] 17pub(crate) fn module_tree(ctx: QueryCtx) -> Arc<ModuleTreeDescriptor> {
21pub enum ChangeKind { 18 ctx.get(MODULE_TREE, ())
22 Delete, Insert, Update
23}
24
25impl Clone for ModuleMap {
26 fn clone(&self) -> ModuleMap {
27 let state = self.state.read().clone();
28 ModuleMap { state: RwLock::new(state) }
29 }
30} 19}
31 20
32#[derive(Clone, Debug, Default)] 21const MODULE_DESCR: Query<FileId, ModuleDescriptor> = Query(30, |ctx, &file_id| {
33struct State { 22 let file = file_syntax(ctx, file_id);
34 file_resolver: FileResolverImp, 23 ModuleDescriptor::new(file.ast())
35 changes: Vec<(FileId, ChangeKind)>, 24});
36 links: Vec<Link>, 25
37} 26const MODULE_TREE: Query<(), ModuleTreeDescriptor> = Query(31, |ctx, _| {
38 27 let file_set = file_set(ctx);
39#[derive(Clone, Debug)] 28 let mut files = Vec::new();
40struct Link { 29 for &file_id in file_set.0.iter() {
41 owner: ModuleId, 30 let module_descr = ctx.get(MODULE_DESCR, file_id);
42 syntax: SyntaxNode, 31 files.push((file_id, module_descr));
43 points_to: Vec<ModuleId>, 32 }
44 problem: Option<Problem>, 33 ModuleTreeDescriptor::new(files.iter().map(|(file_id, descr)| (*file_id, &**descr)), &file_set.1)
45} 34});
46 35
47#[derive(Clone, Debug)] 36#[cfg(test)]
48pub enum Problem { 37mod tests {
49 UnresolvedModule { 38 use std::collections::HashMap;
50 candidate: RelativePathBuf, 39 use im;
51 }, 40 use relative_path::{RelativePath, RelativePathBuf};
52 NotDirOwner { 41 use {
53 move_to: RelativePathBuf, 42 db::{Db},
54 candidate: RelativePathBuf, 43 imp::FileResolverImp,
55 } 44 FileId, FileResolver,
56} 45 };
57 46 use super::*;
58impl ModuleMap { 47
59 pub fn new() -> ModuleMap { 48 #[derive(Debug)]
60 Default::default() 49 struct FileMap(im::HashMap<FileId, RelativePathBuf>);
61 } 50
62 pub fn update_file(&mut self, file_id: FileId, change_kind: ChangeKind) { 51 impl FileResolver for FileMap {
63 self.state.get_mut().changes.push((file_id, change_kind)); 52 fn file_stem(&self, file_id: FileId) -> String {
64 } 53 self.0[&file_id].file_stem().unwrap().to_string()
65 pub(crate) fn set_file_resolver(&mut self, file_resolver: FileResolverImp) { 54 }
66 self.state.get_mut().file_resolver = file_resolver; 55 fn resolve(&self, file_id: FileId, rel: &RelativePath) -> Option<FileId> {
67 } 56 let path = self.0[&file_id].join(rel).normalize();
68 pub fn module2file(&self, m: ModuleId) -> FileId { 57 self.0.iter()
69 m.0 58 .filter_map(|&(id, ref p)| Some(id).filter(|_| p == &path))
70 } 59 .next()
71 pub fn file2module(&self, file_id: FileId) -> ModuleId { 60 }
72 ModuleId(file_id)
73 }
74 pub fn child_module_by_name<'a>(
75 &self,
76 parent_mod: ModuleId,
77 child_mod: &str,
78 syntax_provider: &SyntaxProvider,
79 ) -> Vec<ModuleId> {
80 self.links(syntax_provider)
81 .links
82 .iter()
83 .filter(|link| link.owner == parent_mod)
84 .filter(|link| link.name() == child_mod)
85 .filter_map(|it| it.points_to.first())
86 .map(|&it| it)
87 .collect()
88 }
89
90 pub fn parent_modules(
91 &self,
92 m: ModuleId,
93 syntax_provider: &SyntaxProvider,
94 ) -> Vec<(ModuleId, SmolStr, SyntaxNode)> {
95 let mut res = Vec::new();
96 self.for_each_parent_link(m, syntax_provider, |link| {
97 res.push(
98 (link.owner, link.name().clone(), link.syntax.clone())
99 )
100 });
101 res
102 }
103
104 pub fn parent_module_ids(
105 &self,
106 m: ModuleId,
107 syntax_provider: &SyntaxProvider,
108 ) -> Vec<ModuleId> {
109 let mut res = Vec::new();
110 self.for_each_parent_link(m, syntax_provider, |link| res.push(link.owner));
111 res
112 }
113
114 fn for_each_parent_link(
115 &self,
116 m: ModuleId,
117 syntax_provider: &SyntaxProvider,
118 f: impl FnMut(&Link)
119 ) {
120 self.links(syntax_provider)
121 .links
122 .iter()
123 .filter(move |link| link.points_to.iter().any(|&it| it == m))
124 .for_each(f)
125 } 61 }
126 62
127 pub fn problems( 63 struct Fixture {
128 &self, 64 next_file_id: u32,
129 file: FileId, 65 fm: im::HashMap<FileId, RelativePathBuf>,
130 syntax_provider: &SyntaxProvider, 66 db: Db,
131 mut cb: impl FnMut(ast::Name, &Problem),
132 ) {
133 let module = self.file2module(file);
134 let links = self.links(syntax_provider);
135 links
136 .links
137 .iter()
138 .filter(|link| link.owner == module)
139 .filter_map(|link| {
140 let problem = link.problem.as_ref()?;
141 Some((link, problem))
142 })
143 .for_each(|(link, problem)| cb(link.name_node(), problem))
144 } 67 }
145 68
146 fn links( 69 impl Fixture {
147 &self, 70 fn new() -> Fixture {
148 syntax_provider: &SyntaxProvider, 71 Fixture {
149 ) -> RwLockReadGuard<State> { 72 next_file_id: 1,
150 { 73 fm: im::HashMap::new(),
151 let guard = self.state.read(); 74 db: Db::new(),
152 if guard.changes.is_empty() {
153 return guard;
154 } 75 }
155 } 76 }
156 let mut guard = self.state.write(); 77 fn add_file(&mut self, path: &str, text: &str) -> FileId {
157 if !guard.changes.is_empty() { 78 assert!(path.starts_with("/"));
158 guard.apply_changes(syntax_provider); 79 let file_id = FileId(self.next_file_id);
80 self.next_file_id += 1;
81 self.fm.insert(file_id, RelativePathBuf::from(&path[1..]));
82 let mut new_state = self.db.state().clone();
83 new_state.file_map.insert(file_id, Arc::new(text.to_string()));
84 new_state.file_resolver = FileResolverImp::new(
85 Arc::new(FileMap(self.fm.clone()))
86 );
87 self.db = self.db.with_changes(new_state, &[file_id], true);
88 file_id
159 } 89 }
160 assert!(guard.changes.is_empty()); 90 fn remove_file(&mut self, file_id: FileId) {
161 RwLockWriteGuard::downgrade(guard) 91 self.fm.remove(&file_id);
162 } 92 let mut new_state = self.db.state().clone();
163} 93 new_state.file_map.remove(&file_id);
164 94 new_state.file_resolver = FileResolverImp::new(
165impl State { 95 Arc::new(FileMap(self.fm.clone()))
166 pub fn apply_changes( 96 );
167 &mut self, 97 self.db = self.db.with_changes(new_state, &[file_id], true);
168 syntax_provider: &SyntaxProvider,
169 ) {
170 let mut reresolve = false;
171 for (file_id, kind) in self.changes.drain(..) {
172 let mod_id = ModuleId(file_id);
173 self.links.retain(|link| link.owner != mod_id);
174 match kind {
175 ChangeKind::Delete => {
176 for link in self.links.iter_mut() {
177 link.points_to.retain(|&x| x != mod_id);
178 }
179 }
180 ChangeKind::Insert => {
181 let file = syntax_provider(file_id);
182 self.links.extend(
183 file
184 .ast()
185 .modules()
186 .filter_map(|it| Link::new(mod_id, it))
187 );
188 reresolve = true;
189 }
190 ChangeKind::Update => {
191 let file = syntax_provider(file_id);
192 let resolver = &self.file_resolver;
193 self.links.extend(
194 file
195 .ast()
196 .modules()
197 .filter_map(|it| Link::new(mod_id, it))
198 .map(|mut link| {
199 link.resolve(resolver);
200 link
201 })
202 );
203 }
204 }
205 } 98 }
206 if reresolve { 99 fn change_file(&mut self, file_id: FileId, new_text: &str) {
207 for link in self.links.iter_mut() { 100 let mut new_state = self.db.state().clone();
208 link.resolve(&self.file_resolver) 101 new_state.file_map.insert(file_id, Arc::new(new_text.to_string()));
209 } 102 self.db = self.db.with_changes(new_state, &[file_id], false);
210 } 103 }
211 } 104 fn check_parent_modules(
212} 105 &self,
106 file_id: FileId,
107 expected: &[FileId],
108 queries: &[(&'static str, u64)]
109 ) {
110 let (tree, events) = self.db.trace_query(|ctx| module_tree(ctx));
111 let actual = tree.parent_modules(file_id)
112 .into_iter()
113 .map(|link| link.owner(&tree))
114 .collect::<Vec<_>>();
115 assert_eq!(actual.as_slice(), expected);
116 let mut counts = HashMap::new();
117 events.into_iter()
118 .for_each(|event| *counts.entry(event).or_insert(0) += 1);
119 for &(query_id, expected_count) in queries.iter() {
120 let actual_count = *counts.get(&query_id).unwrap_or(&0);
121 assert_eq!(
122 actual_count,
123 expected_count,
124 "counts for {} differ",
125 query_id,
126 )
127 }
213 128
214impl Link {
215 fn new(owner: ModuleId, module: ast::Module) -> Option<Link> {
216 if module.name().is_none() {
217 return None;
218 } 129 }
219 let link = Link {
220 owner,
221 syntax: module.syntax().owned(),
222 points_to: Vec::new(),
223 problem: None,
224 };
225 Some(link)
226 } 130 }
227 131
228 fn name(&self) -> SmolStr { 132 #[test]
229 self.name_node().text() 133 fn test_parent_module() {
230 } 134 let mut f = Fixture::new();
135 let foo = f.add_file("/foo.rs", "");
136 f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
231 137
232 fn name_node(&self) -> ast::Name { 138 let lib = f.add_file("/lib.rs", "mod foo;");
233 self.ast().name().unwrap() 139 f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
234 } 140 f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 0)]);
235 141
236 fn ast(&self) -> ast::Module { 142 f.change_file(lib, "");
237 ast::Module::cast(self.syntax.borrowed()) 143 f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
238 .unwrap()
239 }
240 144
241 fn resolve(&mut self, file_resolver: &FileResolverImp) { 145 f.change_file(lib, "mod foo;");
242 if !self.ast().has_semi() { 146 f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
243 self.problem = None;
244 self.points_to = Vec::new();
245 return;
246 }
247 147
248 let mod_name = file_resolver.file_stem(self.owner.0); 148 f.change_file(lib, "mod bar;");
249 let is_dir_owner = 149 f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
250 mod_name == "mod" || mod_name == "lib" || mod_name == "main";
251 150
252 let file_mod = RelativePathBuf::from(format!("../{}.rs", self.name())); 151 f.change_file(lib, "mod foo;");
253 let dir_mod = RelativePathBuf::from(format!("../{}/mod.rs", self.name())); 152 f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
254 if is_dir_owner { 153
255 self.points_to = [&file_mod, &dir_mod].iter() 154 f.remove_file(lib);
256 .filter_map(|path| file_resolver.resolve(self.owner.0, path)) 155 f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 0)]);
257 .map(ModuleId)
258 .collect();
259 self.problem = if self.points_to.is_empty() {
260 Some(Problem::UnresolvedModule {
261 candidate: file_mod,
262 })
263 } else {
264 None
265 }
266 } else {
267 self.points_to = Vec::new();
268 self.problem = Some(Problem::NotDirOwner {
269 move_to: RelativePathBuf::from(format!("../{}/mod.rs", mod_name)),
270 candidate: file_mod,
271 });
272 }
273 } 156 }
274} 157}
diff --git a/crates/libanalysis/src/queries.rs b/crates/libanalysis/src/queries.rs
new file mode 100644
index 000000000..0b60316e6
--- /dev/null
+++ b/crates/libanalysis/src/queries.rs
@@ -0,0 +1,39 @@
1use std::sync::Arc;
2use libsyntax2::File;
3use libeditor::LineIndex;
4use {
5 FileId,
6 db::{Query, QueryCtx, QueryRegistry},
7 symbol_index::SymbolIndex,
8};
9
10pub(crate) use db::{file_text, file_set};
11
12pub(crate) fn file_syntax(ctx: QueryCtx, file_id: FileId) -> File {
13 (&*ctx.get(FILE_SYNTAX, file_id)).clone()
14}
15pub(crate) fn file_lines(ctx: QueryCtx, file_id: FileId) -> Arc<LineIndex> {
16 ctx.get(FILE_LINES, file_id)
17}
18pub(crate) fn file_symbols(ctx: QueryCtx, file_id: FileId) -> Arc<SymbolIndex> {
19 ctx.get(FILE_SYMBOLS, file_id)
20}
21
22const FILE_SYNTAX: Query<FileId, File> = Query(16, |ctx, file_id: &FileId| {
23 let text = file_text(ctx, *file_id);
24 File::parse(&*text)
25});
26const FILE_LINES: Query<FileId, LineIndex> = Query(17, |ctx, file_id: &FileId| {
27 let text = file_text(ctx, *file_id);
28 LineIndex::new(&*text)
29});
30const FILE_SYMBOLS: Query<FileId, SymbolIndex> = Query(18, |ctx, file_id: &FileId| {
31 let syntax = file_syntax(ctx, *file_id);
32 SymbolIndex::for_file(*file_id, syntax)
33});
34
35pub(crate) fn register_queries(reg: &mut QueryRegistry) {
36 reg.add(FILE_SYNTAX, "FILE_SYNTAX");
37 reg.add(FILE_LINES, "FILE_LINES");
38 reg.add(FILE_SYMBOLS, "FILE_SYMBOLS");
39}
diff --git a/crates/libanalysis/src/roots.rs b/crates/libanalysis/src/roots.rs
index 629a697c5..191d0d821 100644
--- a/crates/libanalysis/src/roots.rs
+++ b/crates/libanalysis/src/roots.rs
@@ -1,6 +1,5 @@
1use std::{ 1use std::{
2 collections::HashMap, 2 collections::HashMap,
3 time::Instant,
4 sync::Arc, 3 sync::Arc,
5 panic, 4 panic,
6}; 5};
@@ -13,94 +12,82 @@ use libsyntax2::File;
13use { 12use {
14 FileId, 13 FileId,
15 imp::FileResolverImp, 14 imp::FileResolverImp,
16 module_map::{ModuleMap, ChangeKind},
17 symbol_index::SymbolIndex, 15 symbol_index::SymbolIndex,
16 descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
17 db::Db,
18}; 18};
19 19
20pub(crate) trait SourceRoot { 20pub(crate) trait SourceRoot {
21 fn contains(&self, file_id: FileId) -> bool; 21 fn contains(&self, file_id: FileId) -> bool;
22 fn module_map(&self) -> &ModuleMap; 22 fn module_tree(&self) -> Arc<ModuleTreeDescriptor>;
23 fn lines(&self, file_id: FileId) -> &LineIndex; 23 fn lines(&self, file_id: FileId) -> Arc<LineIndex>;
24 fn syntax(&self, file_id: FileId) -> &File; 24 fn syntax(&self, file_id: FileId) -> File;
25 fn symbols<'a>(&'a self, acc: &mut Vec<&'a SymbolIndex>); 25 fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>);
26} 26}
27 27
28#[derive(Clone, Default, Debug)] 28#[derive(Default, Debug)]
29pub(crate) struct WritableSourceRoot { 29pub(crate) struct WritableSourceRoot {
30 file_map: HashMap<FileId, Arc<(FileData, OnceCell<SymbolIndex>)>>, 30 db: Db,
31 module_map: ModuleMap,
32} 31}
33 32
34impl WritableSourceRoot { 33impl WritableSourceRoot {
35 pub fn update(&mut self, file_id: FileId, text: Option<String>) { 34 pub fn apply_changes(
36 let change_kind = if self.file_map.remove(&file_id).is_some() { 35 &self,
37 if text.is_some() { 36 changes: &mut dyn Iterator<Item=(FileId, Option<String>)>,
38 ChangeKind::Update 37 file_resolver: Option<FileResolverImp>,
39 } else { 38 ) -> WritableSourceRoot {
40 ChangeKind::Delete 39 let resolver_changed = file_resolver.is_some();
40 let mut changed_files = Vec::new();
41 let mut new_state = self.db.state().clone();
42
43 for (file_id, text) in changes {
44 changed_files.push(file_id);
45 match text {
46 Some(text) => {
47 new_state.file_map.insert(file_id, Arc::new(text));
48 },
49 None => {
50 new_state.file_map.remove(&file_id);
51 }
41 } 52 }
42 } else {
43 ChangeKind::Insert
44 };
45 self.module_map.update_file(file_id, change_kind);
46 self.file_map.remove(&file_id);
47 if let Some(text) = text {
48 let file_data = FileData::new(text);
49 self.file_map.insert(file_id, Arc::new((file_data, Default::default())));
50 } 53 }
51 } 54 if let Some(file_resolver) = file_resolver {
52 pub fn set_file_resolver(&mut self, file_resolver: FileResolverImp) { 55 new_state.file_resolver = file_resolver
53 self.module_map.set_file_resolver(file_resolver) 56 }
54 } 57 WritableSourceRoot {
55 pub fn reindex(&self) { 58 db: self.db.with_changes(new_state, &changed_files, resolver_changed)
56 let now = Instant::now();
57 self.file_map
58 .par_iter()
59 .for_each(|(&file_id, data)| {
60 symbols(file_id, data);
61 });
62 info!("parallel indexing took {:?}", now.elapsed());
63
64 }
65 fn data(&self, file_id: FileId) -> &FileData {
66 match self.file_map.get(&file_id) {
67 Some(data) => &data.0,
68 None => panic!("unknown file: {:?}", file_id),
69 } 59 }
70 } 60 }
71} 61}
72 62
73impl SourceRoot for WritableSourceRoot { 63impl SourceRoot for WritableSourceRoot {
74 fn contains(&self, file_id: FileId) -> bool { 64 fn module_tree(&self) -> Arc<ModuleTreeDescriptor> {
75 self.file_map.contains_key(&file_id) 65 self.db.make_query(::module_map::module_tree)
76 } 66 }
77 fn module_map(&self) -> &ModuleMap { 67
78 &self.module_map 68 fn contains(&self, file_id: FileId) -> bool {
69 self.db.state().file_map.contains_key(&file_id)
79 } 70 }
80 fn lines(&self, file_id: FileId) -> &LineIndex { 71 fn lines(&self, file_id: FileId) -> Arc<LineIndex> {
81 self.data(file_id).lines() 72 self.db.make_query(|ctx| ::queries::file_lines(ctx, file_id))
82 } 73 }
83 fn syntax(&self, file_id: FileId) -> &File { 74 fn syntax(&self, file_id: FileId) -> File {
84 self.data(file_id).syntax() 75 self.db.make_query(|ctx| ::queries::file_syntax(ctx, file_id))
85 } 76 }
86 fn symbols<'a>(&'a self, acc: &mut Vec<&'a SymbolIndex>) { 77 fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) {
87 acc.extend( 78 self.db.make_query(|ctx| {
88 self.file_map 79 let file_set = ::queries::file_set(ctx);
89 .iter() 80 let syms = file_set.0.iter()
90 .map(|(&file_id, data)| symbols(file_id, data)) 81 .map(|file_id| ::queries::file_symbols(ctx, *file_id));
91 ) 82 acc.extend(syms);
83 });
92 } 84 }
93} 85}
94 86
95fn symbols(file_id: FileId, (data, symbols): &(FileData, OnceCell<SymbolIndex>)) -> &SymbolIndex {
96 let syntax = data.syntax_transient();
97 symbols.get_or_init(|| SymbolIndex::for_file(file_id, syntax))
98}
99
100#[derive(Debug)] 87#[derive(Debug)]
101struct FileData { 88struct FileData {
102 text: String, 89 text: String,
103 lines: OnceCell<LineIndex>, 90 lines: OnceCell<Arc<LineIndex>>,
104 syntax: OnceCell<File>, 91 syntax: OnceCell<File>,
105} 92}
106 93
@@ -112,8 +99,8 @@ impl FileData {
112 lines: OnceCell::new(), 99 lines: OnceCell::new(),
113 } 100 }
114 } 101 }
115 fn lines(&self) -> &LineIndex { 102 fn lines(&self) -> &Arc<LineIndex> {
116 self.lines.get_or_init(|| LineIndex::new(&self.text)) 103 self.lines.get_or_init(|| Arc::new(LineIndex::new(&self.text)))
117 } 104 }
118 fn syntax(&self) -> &File { 105 fn syntax(&self) -> &File {
119 let text = &self.text; 106 let text = &self.text;
@@ -126,40 +113,41 @@ impl FileData {
126 } 113 }
127 } 114 }
128 } 115 }
129 fn syntax_transient(&self) -> File {
130 self.syntax.get().map(|s| s.clone())
131 .unwrap_or_else(|| File::parse(&self.text))
132 }
133} 116}
134 117
135#[derive(Debug)] 118#[derive(Debug)]
136pub(crate) struct ReadonlySourceRoot { 119pub(crate) struct ReadonlySourceRoot {
137 symbol_index: SymbolIndex, 120 symbol_index: Arc<SymbolIndex>,
138 file_map: HashMap<FileId, FileData>, 121 file_map: HashMap<FileId, FileData>,
139 module_map: ModuleMap, 122 module_tree: Arc<ModuleTreeDescriptor>,
140} 123}
141 124
142impl ReadonlySourceRoot { 125impl ReadonlySourceRoot {
143 pub(crate) fn new(files: Vec<(FileId, String)>, file_resolver: FileResolverImp) -> ReadonlySourceRoot { 126 pub(crate) fn new(files: Vec<(FileId, String)>, file_resolver: FileResolverImp) -> ReadonlySourceRoot {
144 let mut module_map = ModuleMap::new(); 127 let modules = files.par_iter()
145 module_map.set_file_resolver(file_resolver); 128 .map(|(file_id, text)| {
146 let symbol_index = SymbolIndex::for_files( 129 let syntax = File::parse(text);
147 files.par_iter().map(|(file_id, text)| { 130 let mod_descr = ModuleDescriptor::new(syntax.ast());
148 (*file_id, File::parse(text)) 131 (*file_id, syntax, mod_descr)
149 }) 132 })
133 .collect::<Vec<_>>();
134 let module_tree = ModuleTreeDescriptor::new(
135 modules.iter().map(|it| (it.0, &it.2)),
136 &file_resolver,
137 );
138
139 let symbol_index = SymbolIndex::for_files(
140 modules.par_iter().map(|it| (it.0, it.1.clone()))
150 ); 141 );
151 let file_map: HashMap<FileId, FileData> = files 142 let file_map: HashMap<FileId, FileData> = files
152 .into_iter() 143 .into_iter()
153 .map(|(id, text)| { 144 .map(|(id, text)| (id, FileData::new(text)))
154 module_map.update_file(id, ChangeKind::Insert);
155 (id, FileData::new(text))
156 })
157 .collect(); 145 .collect();
158 146
159 ReadonlySourceRoot { 147 ReadonlySourceRoot {
160 symbol_index, 148 symbol_index: Arc::new(symbol_index),
161 file_map, 149 file_map,
162 module_map, 150 module_tree: Arc::new(module_tree),
163 } 151 }
164 } 152 }
165 153
@@ -172,19 +160,19 @@ impl ReadonlySourceRoot {
172} 160}
173 161
174impl SourceRoot for ReadonlySourceRoot { 162impl SourceRoot for ReadonlySourceRoot {
163 fn module_tree(&self) -> Arc<ModuleTreeDescriptor> {
164 Arc::clone(&self.module_tree)
165 }
175 fn contains(&self, file_id: FileId) -> bool { 166 fn contains(&self, file_id: FileId) -> bool {
176 self.file_map.contains_key(&file_id) 167 self.file_map.contains_key(&file_id)
177 } 168 }
178 fn module_map(&self) -> &ModuleMap { 169 fn lines(&self, file_id: FileId) -> Arc<LineIndex> {
179 &self.module_map 170 Arc::clone(self.data(file_id).lines())
180 }
181 fn lines(&self, file_id: FileId) -> &LineIndex {
182 self.data(file_id).lines()
183 } 171 }
184 fn syntax(&self, file_id: FileId) -> &File { 172 fn syntax(&self, file_id: FileId) -> File {
185 self.data(file_id).syntax() 173 self.data(file_id).syntax().clone()
186 } 174 }
187 fn symbols<'a>(&'a self, acc: &mut Vec<&'a SymbolIndex>) { 175 fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>) {
188 acc.push(&self.symbol_index) 176 acc.push(Arc::clone(&self.symbol_index))
189 } 177 }
190} 178}
diff --git a/crates/libanalysis/src/symbol_index.rs b/crates/libanalysis/src/symbol_index.rs
index 4c93761aa..d22187ac0 100644
--- a/crates/libanalysis/src/symbol_index.rs
+++ b/crates/libanalysis/src/symbol_index.rs
@@ -1,3 +1,7 @@
1use std::{
2 sync::Arc,
3 hash::{Hash, Hasher},
4};
1use libeditor::{FileSymbol, file_symbols}; 5use libeditor::{FileSymbol, file_symbols};
2use libsyntax2::{ 6use libsyntax2::{
3 File, 7 File,
@@ -13,6 +17,12 @@ pub(crate) struct SymbolIndex {
13 map: fst::Map, 17 map: fst::Map,
14} 18}
15 19
20impl Hash for SymbolIndex {
21 fn hash<H: Hasher>(&self, hasher: &mut H) {
22 self.symbols.hash(hasher)
23 }
24}
25
16impl SymbolIndex { 26impl SymbolIndex {
17 pub(crate) fn for_files(files: impl ParallelIterator<Item=(FileId, File)>) -> SymbolIndex { 27 pub(crate) fn for_files(files: impl ParallelIterator<Item=(FileId, File)>) -> SymbolIndex {
18 let mut symbols = files 28 let mut symbols = files
@@ -43,7 +53,7 @@ impl SymbolIndex {
43impl Query { 53impl Query {
44 pub(crate) fn search( 54 pub(crate) fn search(
45 self, 55 self,
46 indices: &[&SymbolIndex], 56 indices: &[Arc<SymbolIndex>],
47 token: &JobToken, 57 token: &JobToken,
48 ) -> Vec<(FileId, FileSymbol)> { 58 ) -> Vec<(FileId, FileSymbol)> {
49 59
diff --git a/crates/libanalysis/tests/tests.rs b/crates/libanalysis/tests/tests.rs
index 00efe059c..547f85958 100644
--- a/crates/libanalysis/tests/tests.rs
+++ b/crates/libanalysis/tests/tests.rs
@@ -14,24 +14,6 @@ use test_utils::assert_eq_dbg;
14#[derive(Debug)] 14#[derive(Debug)]
15struct FileMap(Vec<(FileId, RelativePathBuf)>); 15struct FileMap(Vec<(FileId, RelativePathBuf)>);
16 16
17fn analysis_host(files: &'static [(&'static str, &'static str)]) -> AnalysisHost {
18 let mut host = AnalysisHost::new();
19 let mut file_map = Vec::new();
20 for (id, &(path, contents)) in files.iter().enumerate() {
21 let file_id = FileId((id + 1) as u32);
22 assert!(path.starts_with('/'));
23 let path = RelativePathBuf::from_path(&path[1..]).unwrap();
24 host.change_file(file_id, Some(contents.to_string()));
25 file_map.push((file_id, path));
26 }
27 host.set_file_resolver(Arc::new(FileMap(file_map)));
28 host
29}
30
31fn analysis(files: &'static [(&'static str, &'static str)]) -> Analysis {
32 analysis_host(files).analysis()
33}
34
35impl FileMap { 17impl FileMap {
36 fn iter<'a>(&'a self) -> impl Iterator<Item=(FileId, &'a RelativePath)> + 'a { 18 fn iter<'a>(&'a self) -> impl Iterator<Item=(FileId, &'a RelativePath)> + 'a {
37 self.0.iter().map(|(id, path)| (*id, path.as_relative_path())) 19 self.0.iter().map(|(id, path)| (*id, path.as_relative_path()))
@@ -56,6 +38,23 @@ impl FileResolver for FileMap {
56 } 38 }
57} 39}
58 40
41fn analysis_host(files: &'static [(&'static str, &'static str)]) -> AnalysisHost {
42 let mut host = AnalysisHost::new();
43 let mut file_map = Vec::new();
44 for (id, &(path, contents)) in files.iter().enumerate() {
45 let file_id = FileId((id + 1) as u32);
46 assert!(path.starts_with('/'));
47 let path = RelativePathBuf::from_path(&path[1..]).unwrap();
48 host.change_file(file_id, Some(contents.to_string()));
49 file_map.push((file_id, path));
50 }
51 host.set_file_resolver(Arc::new(FileMap(file_map)));
52 host
53}
54
55fn analysis(files: &'static [(&'static str, &'static str)]) -> Analysis {
56 analysis_host(files).analysis()
57}
59 58
60#[test] 59#[test]
61fn test_resolve_module() { 60fn test_resolve_module() {
diff --git a/crates/libeditor/src/line_index.rs b/crates/libeditor/src/line_index.rs
index 801726aa5..9cd8da3a8 100644
--- a/crates/libeditor/src/line_index.rs
+++ b/crates/libeditor/src/line_index.rs
@@ -1,7 +1,7 @@
1use superslice::Ext; 1use superslice::Ext;
2use ::TextUnit; 2use ::TextUnit;
3 3
4#[derive(Clone, Debug)] 4#[derive(Clone, Debug, Hash)]
5pub struct LineIndex { 5pub struct LineIndex {
6 newlines: Vec<TextUnit>, 6 newlines: Vec<TextUnit>,
7} 7}
diff --git a/crates/libeditor/src/symbols.rs b/crates/libeditor/src/symbols.rs
index 28b86c004..2f9cc9233 100644
--- a/crates/libeditor/src/symbols.rs
+++ b/crates/libeditor/src/symbols.rs
@@ -17,7 +17,7 @@ pub struct StructureNode {
17 pub kind: SyntaxKind, 17 pub kind: SyntaxKind,
18} 18}
19 19
20#[derive(Debug, Clone)] 20#[derive(Debug, Clone, Hash)]
21pub struct FileSymbol { 21pub struct FileSymbol {
22 pub name: SmolStr, 22 pub name: SmolStr,
23 pub node_range: TextRange, 23 pub node_range: TextRange,
diff --git a/crates/libsyntax2/src/lib.rs b/crates/libsyntax2/src/lib.rs
index 886195660..eb271762e 100644
--- a/crates/libsyntax2/src/lib.rs
+++ b/crates/libsyntax2/src/lib.rs
@@ -61,7 +61,7 @@ use {
61 yellow::{GreenNode, SyntaxRoot}, 61 yellow::{GreenNode, SyntaxRoot},
62}; 62};
63 63
64#[derive(Clone, Debug)] 64#[derive(Clone, Debug, Hash)]
65pub struct File { 65pub struct File {
66 root: SyntaxNode 66 root: SyntaxNode
67} 67}
diff --git a/crates/salsa/Cargo.toml b/crates/salsa/Cargo.toml
new file mode 100644
index 000000000..9eb83234f
--- /dev/null
+++ b/crates/salsa/Cargo.toml
@@ -0,0 +1,8 @@
1[package]
2name = "salsa"
3version = "0.1.0"
4authors = ["Aleksey Kladov <[email protected]>"]
5
6[dependencies]
7parking_lot = "0.6.3"
8im = "12.0.0"
diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs
new file mode 100644
index 000000000..35deed374
--- /dev/null
+++ b/crates/salsa/src/lib.rs
@@ -0,0 +1,293 @@
1extern crate im;
2extern crate parking_lot;
3
4use std::{
5 sync::Arc,
6 collections::{HashSet, HashMap},
7 cell::RefCell,
8};
9use parking_lot::Mutex;
10
11pub type GroundQueryFn<T, D> = Box<Fn(&T, &D) -> (D, OutputFingerprint) + Send + Sync + 'static>;
12pub type QueryFn<T, D> = Box<Fn(&QueryCtx<T, D>, &D) -> (D, OutputFingerprint) + Send + Sync + 'static>;
13
14#[derive(Debug)]
15pub struct Db<T, D> {
16 db: Arc<DbState<T, D>>,
17 query_config: Arc<QueryConfig<T, D>>,
18}
19
20pub struct QueryConfig<T, D> {
21 ground_fn: HashMap<QueryTypeId, GroundQueryFn<T, D>>,
22 query_fn: HashMap<QueryTypeId, QueryFn<T, D>>,
23}
24
25impl<T, D> ::std::fmt::Debug for QueryConfig<T, D> {
26 fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
27 ::std::fmt::Display::fmt("QueryConfig { ... }", f)
28 }
29}
30
31#[derive(Debug)]
32struct DbState<T, D> {
33 ground_data: T,
34 gen: Gen,
35 graph: Mutex<im::HashMap<QueryId, (Gen, Arc<QueryRecord<D>>)>>,
36}
37
38#[derive(Debug)]
39struct QueryRecord<D> {
40 params: D,
41 output: D,
42 output_fingerprint: OutputFingerprint,
43 deps: Vec<(QueryId, OutputFingerprint)>,
44}
45
46impl<T, D> DbState<T, D> {
47 fn record(
48 &self,
49 query_id: QueryId,
50 params: D,
51 output: D,
52 output_fingerprint: OutputFingerprint,
53 deps: Vec<(QueryId, OutputFingerprint)>,
54 ) {
55 let gen = self.gen;
56 let record = QueryRecord {
57 params,
58 output,
59 output_fingerprint,
60 deps,
61 };
62 self.graph.lock().insert(query_id, (gen, Arc::new(record)));
63 }
64}
65
66impl<T, D> QueryConfig<T, D> {
67 pub fn new() -> Self {
68 QueryConfig {
69 ground_fn: HashMap::new(),
70 query_fn: HashMap::new(),
71 }
72 }
73 pub fn with_ground_query(
74 mut self,
75 query_type: QueryTypeId,
76 query_fn: GroundQueryFn<T, D>
77 ) -> Self {
78 let prev = self.ground_fn.insert(query_type, query_fn);
79 assert!(prev.is_none());
80 self
81 }
82 pub fn with_query(
83 mut self,
84 query_type: QueryTypeId,
85 query_fn: QueryFn<T, D>,
86 ) -> Self {
87 let prev = self.query_fn.insert(query_type, query_fn);
88 assert!(prev.is_none());
89 self
90 }
91}
92
93pub struct QueryCtx<T, D> {
94 db: Arc<DbState<T, D>>,
95 query_config: Arc<QueryConfig<T, D>>,
96 stack: RefCell<Vec<Vec<(QueryId, OutputFingerprint)>>>,
97 executed: RefCell<Vec<QueryTypeId>>,
98}
99
100impl<T, D> QueryCtx<T, D>
101where
102 D: Clone
103{
104 fn new(db: &Db<T, D>) -> QueryCtx<T, D> {
105 QueryCtx {
106 db: Arc::clone(&db.db),
107 query_config: Arc::clone(&db.query_config),
108 stack: RefCell::new(vec![Vec::new()]),
109 executed: RefCell::new(Vec::new()),
110 }
111 }
112 pub fn get(
113 &self,
114 query_id: QueryId,
115 params: D,
116 ) -> D {
117 let (res, output_fingerprint) = self.get_inner(query_id, params);
118 self.record_dep(query_id, output_fingerprint);
119 res
120 }
121 pub fn trace(&self) -> Vec<QueryTypeId> {
122 ::std::mem::replace(&mut *self.executed.borrow_mut(), Vec::new())
123 }
124
125 fn get_inner(
126 &self,
127 query_id: QueryId,
128 params: D,
129 ) -> (D, OutputFingerprint) {
130 let (gen, record) = {
131 let guard = self.db.graph.lock();
132 match guard.get(&query_id).map(|it| it.clone()){
133 None => {
134 drop(guard);
135 return self.force(query_id, params);
136 },
137 Some(it) => it,
138 }
139 };
140 if gen == self.db.gen {
141 return (record.output.clone(), record.output_fingerprint)
142 }
143 if self.query_config.ground_fn.contains_key(&query_id.0) {
144 let (invalidated, record) = {
145 let guard = self.db.graph.lock();
146 let (gen, ref record) = guard[&query_id];
147 (gen == INVALIDATED, record.clone())
148 };
149 if invalidated {
150 return self.force(query_id, params);
151 } else {
152 return (record.output.clone(), record.output_fingerprint);
153 }
154 }
155 for (dep_query_id, prev_fingerprint) in record.deps.iter().cloned() {
156 let dep_params: D = {
157 let guard = self.db.graph.lock();
158 guard[&dep_query_id]
159 .1
160 .params
161 .clone()
162 };
163 if prev_fingerprint != self.get_inner(dep_query_id, dep_params).1 {
164 return self.force(query_id, params)
165 }
166 }
167 let gen = self.db.gen;
168 {
169 let mut guard = self.db.graph.lock();
170 guard[&query_id].0 = gen;
171 }
172 (record.output.clone(), record.output_fingerprint)
173 }
174 fn force(
175 &self,
176 query_id: QueryId,
177 params: D,
178 ) -> (D, OutputFingerprint) {
179 self.executed.borrow_mut().push(query_id.0);
180 self.stack.borrow_mut().push(Vec::new());
181
182 let (res, output_fingerprint) = if let Some(f) = self.query_config.ground_fn.get(&query_id.0) {
183 f(&self.db.ground_data, &params)
184 } else if let Some(f) = self.query_config.query_fn.get(&query_id.0) {
185 f(self, &params)
186 } else {
187 panic!("unknown query type: {:?}", query_id.0);
188 };
189
190 let res: D = res.into();
191
192 let deps = self.stack.borrow_mut().pop().unwrap();
193 self.db.record(query_id, params, res.clone(), output_fingerprint, deps);
194 (res, output_fingerprint)
195 }
196 fn record_dep(
197 &self,
198 query_id: QueryId,
199 output_fingerprint: OutputFingerprint,
200 ) -> () {
201 let mut stack = self.stack.borrow_mut();
202 let deps = stack.last_mut().unwrap();
203 deps.push((query_id, output_fingerprint))
204 }
205}
206
207pub struct Invalidations {
208 types: HashSet<QueryTypeId>,
209 ids: Vec<QueryId>,
210}
211
212impl Invalidations {
213 pub fn new() -> Invalidations {
214 Invalidations {
215 types: HashSet::new(),
216 ids: Vec::new(),
217 }
218 }
219 pub fn invalidate(
220 &mut self,
221 query_type: QueryTypeId,
222 params: impl Iterator<Item=InputFingerprint>,
223 ) {
224 self.types.insert(query_type);
225 self.ids.extend(params.map(|it| QueryId(query_type, it)))
226 }
227}
228
229impl<T, D> Db<T, D>
230where
231 D: Clone
232{
233 pub fn new(query_config: QueryConfig<T, D>, ground_data: T) -> Db<T, D> {
234 Db {
235 db: Arc::new(DbState { ground_data, gen: Gen(0), graph: Default::default() }),
236 query_config: Arc::new(query_config),
237 }
238 }
239 pub fn ground_data(&self) -> &T {
240 &self.db.ground_data
241 }
242 pub fn with_ground_data(
243 &self,
244 ground_data: T,
245 invalidations: Invalidations,
246 ) -> Db<T, D> {
247 for id in self.query_config.ground_fn.keys() {
248 assert!(
249 invalidations.types.contains(id),
250 "all ground queries must be invalidated"
251 );
252 }
253
254 let gen = Gen(self.db.gen.0 + 1);
255 let mut graph = self.db.graph.lock().clone();
256 for id in invalidations.ids {
257 if let Some((gen, _)) = graph.get_mut(&id) {
258 *gen = INVALIDATED;
259 }
260 }
261 let graph = Mutex::new(graph);
262 Db {
263 db: Arc::new(DbState { ground_data, gen, graph }),
264 query_config: Arc::clone(&self.query_config)
265 }
266 }
267 pub fn query_ctx(&self) -> QueryCtx<T, D> {
268 QueryCtx::new(self)
269 }
270 pub fn get(
271 &self,
272 query_id: QueryId,
273 params: D,
274 ) -> (D, Vec<QueryTypeId>) {
275 let ctx = self.query_ctx();
276 let res = ctx.get(query_id, params.into());
277 let executed = ::std::mem::replace(&mut *ctx.executed.borrow_mut(), Vec::new());
278 (res, executed)
279 }
280}
281
282#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
283struct Gen(u64);
284const INVALIDATED: Gen = Gen(!0);
285#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
286pub struct InputFingerprint(pub u64);
287#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
288pub struct OutputFingerprint(pub u64);
289#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
290pub struct QueryTypeId(pub u16);
291#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
292pub struct QueryId(pub QueryTypeId, pub InputFingerprint);
293
diff --git a/crates/salsa/tests/integration.rs b/crates/salsa/tests/integration.rs
new file mode 100644
index 000000000..aed9219be
--- /dev/null
+++ b/crates/salsa/tests/integration.rs
@@ -0,0 +1,170 @@
1extern crate salsa;
2use std::{
3 iter::once,
4 sync::Arc,
5 collections::hash_map::{HashMap, DefaultHasher},
6 any::Any,
7 hash::{Hash, Hasher},
8};
9
10type State = HashMap<u32, String>;
11type Data = Arc<Any + Send + Sync + 'static>;
12const GET_TEXT: salsa::QueryTypeId = salsa::QueryTypeId(1);
13const GET_FILES: salsa::QueryTypeId = salsa::QueryTypeId(2);
14const FILE_NEWLINES: salsa::QueryTypeId = salsa::QueryTypeId(3);
15const TOTAL_NEWLINES: salsa::QueryTypeId = salsa::QueryTypeId(4);
16
17fn mk_ground_query<T, R>(
18 state: &State,
19 params: &Data,
20 f: fn(&State, &T) -> R,
21) -> (Data, salsa::OutputFingerprint)
22where
23 T: 'static,
24 R: Hash + Send + Sync + 'static,
25{
26 let params = params.downcast_ref().unwrap();
27 let result = f(state, params);
28 let fingerprint = o_print(&result);
29 (Arc::new(result), fingerprint)
30}
31
32fn get<T, R>(db: &salsa::Db<State, Data>, query_type: salsa::QueryTypeId, param: T) -> (Arc<R>, Vec<salsa::QueryTypeId>)
33where
34 T: Hash + Send + Sync + 'static,
35 R: Send + Sync + 'static,
36{
37 let i_print = i_print(&param);
38 let param = Arc::new(param);
39 let (res, trace) = db.get(salsa::QueryId(query_type, i_print), param);
40 (res.downcast().unwrap(), trace)
41}
42
43struct QueryCtx<'a>(&'a salsa::QueryCtx<State, Data>);
44
45impl<'a> QueryCtx<'a> {
46 fn get_text(&self, id: u32) -> Arc<String> {
47 let i_print = i_print(&id);
48 let text = self.0.get(salsa::QueryId(GET_TEXT, i_print), Arc::new(id));
49 text.downcast().unwrap()
50 }
51 fn get_files(&self) -> Arc<Vec<u32>> {
52 let i_print = i_print(&());
53 let files = self.0.get(salsa::QueryId(GET_FILES, i_print), Arc::new(()));
54 let res = files.downcast().unwrap();
55 res
56 }
57 fn get_n_lines(&self, id: u32) -> usize {
58 let i_print = i_print(&id);
59 let n_lines = self.0.get(salsa::QueryId(FILE_NEWLINES, i_print), Arc::new(id));
60 *n_lines.downcast().unwrap()
61 }
62}
63
64fn mk_query<T, R>(
65 query_ctx: &salsa::QueryCtx<State, Data>,
66 params: &Data,
67 f: fn(QueryCtx, &T) -> R,
68) -> (Data, salsa::OutputFingerprint)
69where
70 T: 'static,
71 R: Hash + Send + Sync + 'static,
72{
73 let params: &T = params.downcast_ref().unwrap();
74 let query_ctx = QueryCtx(query_ctx);
75 let result = f(query_ctx, params);
76 let fingerprint = o_print(&result);
77 (Arc::new(result), fingerprint)
78}
79
80fn mk_queries() -> salsa::QueryConfig<State, Data> {
81 salsa::QueryConfig::<State, Data>::new()
82 .with_ground_query(GET_TEXT, Box::new(|state, id| {
83 mk_ground_query::<u32, String>(state, id, |state, id| state[id].clone())
84 }))
85 .with_ground_query(GET_FILES, Box::new(|state, id| {
86 mk_ground_query::<(), Vec<u32>>(state, id, |state, &()| state.keys().cloned().collect())
87 }))
88 .with_query(FILE_NEWLINES, Box::new(|query_ctx, id| {
89 mk_query(query_ctx, id, |query_ctx, &id| {
90 let text = query_ctx.get_text(id);
91 text.lines().count()
92 })
93 }))
94 .with_query(TOTAL_NEWLINES, Box::new(|query_ctx, id| {
95 mk_query(query_ctx, id, |query_ctx, &()| {
96 let mut total = 0;
97 for &id in query_ctx.get_files().iter() {
98 total += query_ctx.get_n_lines(id)
99 }
100 total
101 })
102 }))
103}
104
105#[test]
106fn test_number_of_lines() {
107 let mut state = State::new();
108 let db = salsa::Db::new(mk_queries(), state.clone());
109 let (newlines, trace) = get::<(), usize>(&db, TOTAL_NEWLINES, ());
110 assert_eq!(*newlines, 0);
111 assert_eq!(trace.len(), 2);
112 let (newlines, trace) = get::<(), usize>(&db, TOTAL_NEWLINES, ());
113 assert_eq!(*newlines, 0);
114 assert_eq!(trace.len(), 0);
115
116 state.insert(1, "hello\nworld".to_string());
117 let mut inv = salsa::Invalidations::new();
118 inv.invalidate(GET_TEXT, once(i_print(&1u32)));
119 inv.invalidate(GET_FILES, once(i_print(&())));
120 let db = db.with_ground_data(state.clone(), inv);
121 let (newlines, trace) = get::<(), usize>(&db, TOTAL_NEWLINES, ());
122 assert_eq!(*newlines, 2);
123 assert_eq!(trace.len(), 4);
124
125 state.insert(2, "spam\neggs".to_string());
126 let mut inv = salsa::Invalidations::new();
127 inv.invalidate(GET_TEXT, once(i_print(&2u32)));
128 inv.invalidate(GET_FILES, once(i_print(&())));
129 let db = db.with_ground_data(state.clone(), inv);
130 let (newlines, trace) = get::<(), usize>(&db, TOTAL_NEWLINES, ());
131 assert_eq!(*newlines, 4);
132 assert_eq!(trace.len(), 4);
133
134 let mut invs = vec![];
135 for i in 0..10 {
136 let id = i + 10;
137 invs.push(i_print(&id));
138 state.insert(id, "spam".to_string());
139 }
140 let mut inv = salsa::Invalidations::new();
141 inv.invalidate(GET_TEXT, invs.into_iter());
142 inv.invalidate(GET_FILES, once(i_print(&())));
143 let db = db.with_ground_data(state.clone(), inv);
144 let (newlines, trace) = get::<(), usize>(&db, TOTAL_NEWLINES, ());
145 assert_eq!(*newlines, 14);
146 assert_eq!(trace.len(), 22);
147
148 state.insert(15, String::new());
149 let mut inv = salsa::Invalidations::new();
150 inv.invalidate(GET_TEXT, once(i_print(&15u32)));
151 inv.invalidate(GET_FILES, once(i_print(&())));
152 let db = db.with_ground_data(state.clone(), inv);
153 let (newlines, trace) = get::<(), usize>(&db, TOTAL_NEWLINES, ());
154 assert_eq!(*newlines, 13);
155 assert_eq!(trace.len(), 4);
156}
157
158fn o_print<T: Hash>(x: &T) -> salsa::OutputFingerprint {
159 let mut hasher = DefaultHasher::new();
160 x.hash(&mut hasher);
161 let hash = hasher.finish();
162 salsa::OutputFingerprint(hash)
163}
164
165fn i_print<T: Hash>(x: &T) -> salsa::InputFingerprint {
166 let mut hasher = DefaultHasher::new();
167 x.hash(&mut hasher);
168 let hash = hasher.finish();
169 salsa::InputFingerprint(hash)
170}
diff --git a/crates/server/Cargo.toml b/crates/server/Cargo.toml
index 9aeea9a9b..fc20730b8 100644
--- a/crates/server/Cargo.toml
+++ b/crates/server/Cargo.toml
@@ -17,7 +17,7 @@ log = "0.4.3"
17url_serde = "0.2.0" 17url_serde = "0.2.0"
18languageserver-types = "0.49.0" 18languageserver-types = "0.49.0"
19walkdir = "2.2.0" 19walkdir = "2.2.0"
20im = { version = "11.0.1", features = ["arc"] } 20im = "12.0.0"
21cargo_metadata = "0.6.0" 21cargo_metadata = "0.6.0"
22text_unit = { version = "0.1.2", features = ["serde"] } 22text_unit = { version = "0.1.2", features = ["serde"] }
23smol_str = { version = "0.1.5", features = ["serde"] } 23smol_str = { version = "0.1.5", features = ["serde"] }