aboutsummaryrefslogtreecommitdiff
path: root/crates/libanalysis/src
diff options
context:
space:
mode:
Diffstat (limited to 'crates/libanalysis/src')
-rw-r--r--crates/libanalysis/src/db/imp.rs152
-rw-r--r--crates/libanalysis/src/db/mod.rs85
-rw-r--r--crates/libanalysis/src/descriptors.rs220
-rw-r--r--crates/libanalysis/src/imp.rs342
-rw-r--r--crates/libanalysis/src/job.rs49
-rw-r--r--crates/libanalysis/src/lib.rs240
-rw-r--r--crates/libanalysis/src/module_map.rs157
-rw-r--r--crates/libanalysis/src/queries.rs39
-rw-r--r--crates/libanalysis/src/roots.rs178
-rw-r--r--crates/libanalysis/src/symbol_index.rs94
10 files changed, 0 insertions, 1556 deletions
diff --git a/crates/libanalysis/src/db/imp.rs b/crates/libanalysis/src/db/imp.rs
deleted file mode 100644
index f26be1046..000000000
--- a/crates/libanalysis/src/db/imp.rs
+++ /dev/null
@@ -1,152 +0,0 @@
1use std::{
2 sync::Arc,
3 any::Any,
4 hash::{Hash, Hasher},
5 collections::hash_map::{DefaultHasher, HashMap},
6 iter,
7};
8use salsa;
9use {FileId, imp::FileResolverImp};
10use super::{State, Query, QueryCtx};
11
12pub(super) type Data = Arc<Any + Send + Sync + 'static>;
13
14#[derive(Debug)]
15pub(super) struct Db {
16 names: Arc<HashMap<salsa::QueryTypeId, &'static str>>,
17 pub(super) imp: salsa::Db<State, Data>,
18}
19
20impl Db {
21 pub(super) fn new(mut reg: QueryRegistry) -> Db {
22 let config = reg.config.take().unwrap();
23 Db {
24 names: Arc::new(reg.names),
25 imp: salsa::Db::new(config, State::default())
26 }
27 }
28 pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
29 let names = self.names.clone();
30 let mut invalidations = salsa::Invalidations::new();
31 invalidations.invalidate(FILE_TEXT, changed_files.iter().map(hash).map(salsa::InputFingerprint));
32 if resolver_changed {
33 invalidations.invalidate(FILE_SET, iter::once(salsa::InputFingerprint(hash(&()))));
34 } else {
35 invalidations.invalidate(FILE_SET, iter::empty());
36 }
37 let imp = self.imp.with_ground_data(
38 new_state,
39 invalidations,
40 );
41 Db { names, imp }
42 }
43 pub(super) fn extract_trace(&self, ctx: &salsa::QueryCtx<State, Data>) -> Vec<&'static str> {
44 ctx.trace().into_iter().map(|it| self.names[&it]).collect()
45 }
46}
47
48pub(crate) trait EvalQuery {
49 type Params;
50 type Output;
51 fn query_type(&self) -> salsa::QueryTypeId;
52 fn f(&self) -> salsa::QueryFn<State, Data>;
53 fn get(&self, &QueryCtx, Self::Params) -> Arc<Self::Output>;
54}
55
56impl<T, R> EvalQuery for Query<T, R>
57where
58 T: Hash + Send + Sync + 'static,
59 R: Hash + Send + Sync + 'static,
60{
61 type Params = T;
62 type Output = R;
63 fn query_type(&self) -> salsa::QueryTypeId {
64 salsa::QueryTypeId(self.0)
65 }
66 fn f(&self) -> salsa::QueryFn<State, Data> {
67 let f = self.1;
68 Box::new(move |ctx, data| {
69 let ctx = QueryCtx { imp: ctx };
70 let data: &T = data.downcast_ref().unwrap();
71 let res = f(ctx, data);
72 let h = hash(&res);
73 (Arc::new(res), salsa::OutputFingerprint(h))
74 })
75 }
76 fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output> {
77 let query_id = salsa::QueryId(
78 self.query_type(),
79 salsa::InputFingerprint(hash(&params)),
80 );
81 let res = ctx.imp.get(query_id, Arc::new(params));
82 res.downcast().unwrap()
83 }
84}
85
86pub(super) struct QueryRegistry {
87 config: Option<salsa::QueryConfig<State, Data>>,
88 names: HashMap<salsa::QueryTypeId, &'static str>,
89}
90
91impl QueryRegistry {
92 pub(super) fn new() -> QueryRegistry {
93 let mut config = salsa::QueryConfig::<State, Data>::new();
94 config = config.with_ground_query(
95 FILE_TEXT, Box::new(|state, params| {
96 let file_id: &FileId = params.downcast_ref().unwrap();
97 let res = state.file_map[file_id].clone();
98 let fingerprint = salsa::OutputFingerprint(hash(&res));
99 (res, fingerprint)
100 })
101 );
102 config = config.with_ground_query(
103 FILE_SET, Box::new(|state, _params| {
104 let file_ids: Vec<FileId> = state.file_map.keys().cloned().collect();
105 let hash = hash(&file_ids);
106 let file_resolver = state.file_resolver.clone();
107 let res = (file_ids, file_resolver);
108 let fingerprint = salsa::OutputFingerprint(hash);
109 (Arc::new(res), fingerprint)
110 })
111 );
112 let mut names = HashMap::new();
113 names.insert(FILE_TEXT, "FILE_TEXT");
114 names.insert(FILE_SET, "FILE_SET");
115 QueryRegistry { config: Some(config), names }
116 }
117 pub(super) fn add<Q: EvalQuery>(&mut self, q: Q, name: &'static str) {
118 let id = q.query_type();
119 let prev = self.names.insert(id, name);
120 assert!(prev.is_none(), "duplicate query: {:?}", id);
121 let config = self.config.take().unwrap();
122 let config = config.with_query(id, q.f());
123 self.config= Some(config);
124 }
125}
126
127fn hash<T: Hash>(x: &T) -> u64 {
128 let mut hasher = DefaultHasher::new();
129 x.hash(&mut hasher);
130 hasher.finish()
131}
132
133const FILE_TEXT: salsa::QueryTypeId = salsa::QueryTypeId(0);
134pub(super) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
135 let query_id = salsa::QueryId(
136 FILE_TEXT,
137 salsa::InputFingerprint(hash(&file_id)),
138 );
139 let res = ctx.imp.get(query_id, Arc::new(file_id));
140 res.downcast().unwrap()
141}
142
143const FILE_SET: salsa::QueryTypeId = salsa::QueryTypeId(1);
144pub(super) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
145 let query_id = salsa::QueryId(
146 FILE_SET,
147 salsa::InputFingerprint(hash(&())),
148 );
149 let res = ctx.imp.get(query_id, Arc::new(()));
150 res.downcast().unwrap()
151}
152
diff --git a/crates/libanalysis/src/db/mod.rs b/crates/libanalysis/src/db/mod.rs
deleted file mode 100644
index 22769d112..000000000
--- a/crates/libanalysis/src/db/mod.rs
+++ /dev/null
@@ -1,85 +0,0 @@
1mod imp;
2
3use std::{
4 sync::Arc,
5};
6use im;
7use salsa;
8use {FileId, imp::FileResolverImp};
9
10#[derive(Debug, Default, Clone)]
11pub(crate) struct State {
12 pub(crate) file_map: im::HashMap<FileId, Arc<String>>,
13 pub(crate) file_resolver: FileResolverImp
14}
15
16#[derive(Debug)]
17pub(crate) struct Db {
18 imp: imp::Db,
19}
20
21#[derive(Clone, Copy)]
22pub(crate) struct QueryCtx<'a> {
23 imp: &'a salsa::QueryCtx<State, imp::Data>,
24}
25
26pub(crate) struct Query<T, R>(pub(crate) u16, pub(crate) fn(QueryCtx, &T) -> R);
27
28pub(crate) struct QueryRegistry {
29 imp: imp::QueryRegistry,
30}
31
32impl Default for Db {
33 fn default() -> Db {
34 Db::new()
35 }
36}
37
38impl Db {
39 pub(crate) fn new() -> Db {
40 let reg = QueryRegistry::new();
41 Db { imp: imp::Db::new(reg.imp) }
42 }
43 pub(crate) fn state(&self) -> &State {
44 self.imp.imp.ground_data()
45 }
46 pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
47 Db { imp: self.imp.with_changes(new_state, changed_files, resolver_changed) }
48 }
49 pub(crate) fn make_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> R {
50 let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() };
51 f(ctx)
52 }
53 #[allow(unused)]
54 pub(crate) fn trace_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> (R, Vec<&'static str>) {
55 let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() };
56 let res = f(ctx);
57 let trace = self.imp.extract_trace(ctx.imp);
58 (res, trace)
59 }
60}
61
62impl<'a> QueryCtx<'a> {
63 pub(crate) fn get<Q: imp::EvalQuery>(&self, q: Q, params: Q::Params) -> Arc<Q::Output> {
64 q.get(self, params)
65 }
66}
67
68pub(crate) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
69 imp::file_text(ctx, file_id)
70}
71
72pub(crate) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
73 imp::file_set(ctx)
74}
75impl QueryRegistry {
76 fn new() -> QueryRegistry {
77 let mut reg = QueryRegistry { imp: imp::QueryRegistry::new() };
78 ::queries::register_queries(&mut reg);
79 ::module_map::register_queries(&mut reg);
80 reg
81 }
82 pub(crate) fn add<Q: imp::EvalQuery>(&mut self, q: Q, name: &'static str) {
83 self.imp.add(q, name)
84 }
85}
diff --git a/crates/libanalysis/src/descriptors.rs b/crates/libanalysis/src/descriptors.rs
deleted file mode 100644
index 93a4158e4..000000000
--- a/crates/libanalysis/src/descriptors.rs
+++ /dev/null
@@ -1,220 +0,0 @@
1use std::{
2 collections::BTreeMap,
3};
4use relative_path::RelativePathBuf;
5use libsyntax2::{
6 SmolStr,
7 ast::{self, NameOwner},
8};
9use {
10 FileId,
11 imp::FileResolverImp,
12};
13
14#[derive(Debug, Hash)]
15pub struct ModuleDescriptor {
16 pub submodules: Vec<Submodule>
17}
18
19impl ModuleDescriptor {
20 pub fn new(root: ast::Root) -> ModuleDescriptor {
21 let submodules = modules(root)
22 .map(|(name, _)| Submodule { name })
23 .collect();
24
25 ModuleDescriptor { submodules } }
26}
27
28fn modules<'a>(root: ast::Root<'a>) -> impl Iterator<Item=(SmolStr, ast::Module<'a>)> {
29 root
30 .modules()
31 .filter_map(|module| {
32 let name = module.name()?.text();
33 if !module.has_semi() {
34 return None;
35 }
36 Some((name, module))
37 })
38}
39
40#[derive(Clone, Hash, PartialEq, Eq, Debug)]
41pub struct Submodule {
42 pub name: SmolStr,
43}
44
45#[derive(Hash, Debug)]
46pub(crate) struct ModuleTreeDescriptor {
47 nodes: Vec<NodeData>,
48 links: Vec<LinkData>,
49 file_id2node: BTreeMap<FileId, Node>,
50}
51
52#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
53struct Node(usize);
54#[derive(Hash, Debug)]
55struct NodeData {
56 file_id: FileId,
57 links: Vec<Link>,
58 parents: Vec<Link>
59}
60
61#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
62pub(crate) struct Link(usize);
63#[derive(Hash, Debug)]
64struct LinkData {
65 owner: Node,
66 name: SmolStr,
67 points_to: Vec<Node>,
68 problem: Option<Problem>,
69}
70
71
72#[derive(Clone, Debug, Hash)]
73pub enum Problem {
74 UnresolvedModule {
75 candidate: RelativePathBuf,
76 },
77 NotDirOwner {
78 move_to: RelativePathBuf,
79 candidate: RelativePathBuf,
80 }
81}
82
83impl ModuleTreeDescriptor {
84 pub(crate) fn new<'a>(
85 files: impl Iterator<Item=(FileId, &'a ModuleDescriptor)> + Clone,
86 file_resolver: &FileResolverImp,
87 ) -> ModuleTreeDescriptor {
88 let mut file_id2node = BTreeMap::new();
89 let mut nodes: Vec<NodeData> = files.clone().enumerate()
90 .map(|(idx, (file_id, _))| {
91 file_id2node.insert(file_id, Node(idx));
92 NodeData {
93 file_id,
94 links: Vec::new(),
95 parents: Vec::new(),
96 }
97 })
98 .collect();
99 let mut links = Vec::new();
100
101 for (idx, (file_id, descr)) in files.enumerate() {
102 let owner = Node(idx);
103 for sub in descr.submodules.iter() {
104 let link = Link(links.len());
105 nodes[owner.0].links.push(link);
106 let (points_to, problem) = resolve_submodule(file_id, &sub.name, file_resolver);
107 let points_to = points_to
108 .into_iter()
109 .map(|file_id| {
110 let node = file_id2node[&file_id];
111 nodes[node.0].parents.push(link);
112 node
113 })
114 .collect();
115
116 links.push(LinkData {
117 owner,
118 name: sub.name.clone(),
119 points_to,
120 problem,
121 })
122
123 }
124 }
125
126 ModuleTreeDescriptor {
127 nodes, links, file_id2node
128 }
129 }
130
131 pub(crate) fn parent_modules(&self, file_id: FileId) -> Vec<Link> {
132 let node = self.file_id2node[&file_id];
133 self.node(node)
134 .parents
135 .clone()
136 }
137 pub(crate) fn child_module_by_name(&self, file_id: FileId, name: &str) -> Vec<FileId> {
138 let node = self.file_id2node[&file_id];
139 self.node(node)
140 .links
141 .iter()
142 .filter(|it| it.name(self) == name)
143 .flat_map(|link| link.points_to(self).iter().map(|&node| self.node(node).file_id))
144 .collect()
145 }
146 pub(crate) fn problems<'a, 'b>(&'b self, file_id: FileId, root: ast::Root<'a>) -> Vec<(ast::Name<'a>, &'b Problem)> {
147 let node = self.file_id2node[&file_id];
148 self.node(node)
149 .links
150 .iter()
151 .filter_map(|&link| {
152 let problem = self.link(link).problem.as_ref()?;
153 let name = link.bind_source(self, root).name()?;
154 Some((name, problem))
155 })
156 .collect()
157 }
158
159 fn node(&self, node: Node) -> &NodeData {
160 &self.nodes[node.0]
161 }
162 fn link(&self, link: Link) -> &LinkData {
163 &self.links[link.0]
164 }
165}
166
167impl Link {
168 pub(crate) fn name(self, tree: &ModuleTreeDescriptor) -> SmolStr {
169 tree.link(self).name.clone()
170 }
171 pub(crate) fn owner(self, tree: &ModuleTreeDescriptor) -> FileId {
172 let owner = tree.link(self).owner;
173 tree.node(owner).file_id
174 }
175 fn points_to(self, tree: &ModuleTreeDescriptor) -> &[Node] {
176 &tree.link(self).points_to
177 }
178 pub(crate) fn bind_source<'a>(self, tree: &ModuleTreeDescriptor, root: ast::Root<'a>) -> ast::Module<'a> {
179 modules(root)
180 .filter(|(name, _)| name == &tree.link(self).name)
181 .next()
182 .unwrap()
183 .1
184 }
185}
186
187
188fn resolve_submodule(
189 file_id: FileId,
190 name: &SmolStr,
191 file_resolver: &FileResolverImp
192) -> (Vec<FileId>, Option<Problem>) {
193 let mod_name = file_resolver.file_stem(file_id);
194 let is_dir_owner =
195 mod_name == "mod" || mod_name == "lib" || mod_name == "main";
196
197 let file_mod = RelativePathBuf::from(format!("../{}.rs", name));
198 let dir_mod = RelativePathBuf::from(format!("../{}/mod.rs", name));
199 let points_to: Vec<FileId>;
200 let problem: Option<Problem>;
201 if is_dir_owner {
202 points_to = [&file_mod, &dir_mod].iter()
203 .filter_map(|path| file_resolver.resolve(file_id, path))
204 .collect();
205 problem = if points_to.is_empty() {
206 Some(Problem::UnresolvedModule {
207 candidate: file_mod,
208 })
209 } else {
210 None
211 }
212 } else {
213 points_to = Vec::new();
214 problem = Some(Problem::NotDirOwner {
215 move_to: RelativePathBuf::from(format!("../{}/mod.rs", mod_name)),
216 candidate: file_mod,
217 });
218 }
219 (points_to, problem)
220}
diff --git a/crates/libanalysis/src/imp.rs b/crates/libanalysis/src/imp.rs
deleted file mode 100644
index 6f3191fe7..000000000
--- a/crates/libanalysis/src/imp.rs
+++ /dev/null
@@ -1,342 +0,0 @@
1use std::{
2 sync::{
3 Arc,
4 atomic::{AtomicBool, Ordering::SeqCst},
5 },
6 fmt,
7 collections::{HashSet, VecDeque},
8 iter,
9};
10
11use relative_path::RelativePath;
12use libeditor::{self, FileSymbol, LineIndex, find_node_at_offset, LocalEdit};
13use libsyntax2::{
14 TextUnit, TextRange, SmolStr, File, AstNode,
15 SyntaxKind::*,
16 ast::{self, NameOwner},
17};
18
19use {
20 FileId, FileResolver, Query, Diagnostic, SourceChange, SourceFileEdit, Position, FileSystemEdit,
21 JobToken, CrateGraph, CrateId,
22 roots::{SourceRoot, ReadonlySourceRoot, WritableSourceRoot},
23 descriptors::{ModuleTreeDescriptor, Problem},
24};
25
26
27#[derive(Clone, Debug)]
28pub(crate) struct FileResolverImp {
29 inner: Arc<FileResolver>
30}
31
32impl FileResolverImp {
33 pub(crate) fn new(inner: Arc<FileResolver>) -> FileResolverImp {
34 FileResolverImp { inner }
35 }
36 pub(crate) fn file_stem(&self, file_id: FileId) -> String {
37 self.inner.file_stem(file_id)
38 }
39 pub(crate) fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> {
40 self.inner.resolve(file_id, path)
41 }
42}
43
44impl Default for FileResolverImp {
45 fn default() -> FileResolverImp {
46 #[derive(Debug)]
47 struct DummyResolver;
48 impl FileResolver for DummyResolver {
49 fn file_stem(&self, _file_: FileId) -> String {
50 panic!("file resolver not set")
51 }
52 fn resolve(&self, _file_id: FileId, _path: &::relative_path::RelativePath) -> Option<FileId> {
53 panic!("file resolver not set")
54 }
55 }
56 FileResolverImp { inner: Arc::new(DummyResolver) }
57 }
58}
59
60#[derive(Debug)]
61pub(crate) struct AnalysisHostImpl {
62 data: Arc<WorldData>
63}
64
65impl AnalysisHostImpl {
66 pub fn new() -> AnalysisHostImpl {
67 AnalysisHostImpl {
68 data: Arc::new(WorldData::default()),
69 }
70 }
71 pub fn analysis(&self) -> AnalysisImpl {
72 AnalysisImpl {
73 needs_reindex: AtomicBool::new(false),
74 data: self.data.clone(),
75 }
76 }
77 pub fn change_files(&mut self, changes: &mut dyn Iterator<Item=(FileId, Option<String>)>) {
78 let data = self.data_mut();
79 data.root = Arc::new(data.root.apply_changes(changes, None));
80 }
81 pub fn set_file_resolver(&mut self, resolver: FileResolverImp) {
82 let data = self.data_mut();
83 data.file_resolver = resolver.clone();
84 data.root = Arc::new(data.root.apply_changes(&mut iter::empty(), Some(resolver)));
85 }
86 pub fn set_crate_graph(&mut self, graph: CrateGraph) {
87 let mut visited = HashSet::new();
88 for &file_id in graph.crate_roots.values() {
89 if !visited.insert(file_id) {
90 panic!("duplicate crate root: {:?}", file_id);
91 }
92 }
93 self.data_mut().crate_graph = graph;
94 }
95 pub fn add_library(&mut self, root: ReadonlySourceRoot) {
96 self.data_mut().libs.push(Arc::new(root));
97 }
98 fn data_mut(&mut self) -> &mut WorldData {
99 Arc::make_mut(&mut self.data)
100 }
101}
102
103pub(crate) struct AnalysisImpl {
104 needs_reindex: AtomicBool,
105 data: Arc<WorldData>,
106}
107
108impl fmt::Debug for AnalysisImpl {
109 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
110 (&*self.data).fmt(f)
111 }
112}
113
114impl Clone for AnalysisImpl {
115 fn clone(&self) -> AnalysisImpl {
116 AnalysisImpl {
117 needs_reindex: AtomicBool::new(self.needs_reindex.load(SeqCst)),
118 data: Arc::clone(&self.data),
119 }
120 }
121}
122
123impl AnalysisImpl {
124 fn root(&self, file_id: FileId) -> &SourceRoot {
125 if self.data.root.contains(file_id) {
126 return &*self.data.root;
127 }
128 &**self.data.libs.iter().find(|it| it.contains(file_id)).unwrap()
129 }
130 pub fn file_syntax(&self, file_id: FileId) -> File {
131 self.root(file_id).syntax(file_id)
132 }
133 pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
134 self.root(file_id).lines(file_id)
135 }
136 pub fn world_symbols(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
137 let mut buf = Vec::new();
138 if query.libs {
139 self.data.libs.iter()
140 .for_each(|it| it.symbols(&mut buf));
141 } else {
142 self.data.root.symbols(&mut buf);
143 }
144 query.search(&buf, token)
145
146 }
147 pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> {
148 let root = self.root(file_id);
149 let module_tree = root.module_tree();
150 module_tree.parent_modules(file_id)
151 .iter()
152 .map(|link| {
153 let file_id = link.owner(&module_tree);
154 let syntax = root.syntax(file_id);
155 let decl = link.bind_source(&module_tree, syntax.ast());
156 let sym = FileSymbol {
157 name: link.name(&module_tree),
158 node_range: decl.syntax().range(),
159 kind: MODULE,
160 };
161 (file_id, sym)
162 })
163 .collect()
164 }
165 pub fn crate_for(&self, file_id: FileId) -> Vec<CrateId> {
166 let module_tree = self.root(file_id).module_tree();
167 let crate_graph = &self.data.crate_graph;
168 let mut res = Vec::new();
169 let mut work = VecDeque::new();
170 work.push_back(file_id);
171 let mut visited = HashSet::new();
172 while let Some(id) = work.pop_front() {
173 if let Some(crate_id) = crate_graph.crate_id_for_crate_root(id) {
174 res.push(crate_id);
175 continue;
176 }
177 let parents = module_tree
178 .parent_modules(id)
179 .into_iter()
180 .map(|link| link.owner(&module_tree))
181 .filter(|&id| visited.insert(id));
182 work.extend(parents);
183 }
184 res
185 }
186 pub fn crate_root(&self, crate_id: CrateId) -> FileId {
187 self.data.crate_graph.crate_roots[&crate_id]
188 }
189 pub fn approximately_resolve_symbol(
190 &self,
191 file_id: FileId,
192 offset: TextUnit,
193 token: &JobToken,
194 ) -> Vec<(FileId, FileSymbol)> {
195 let root = self.root(file_id);
196 let module_tree = root.module_tree();
197 let file = root.syntax(file_id);
198 let syntax = file.syntax();
199 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) {
200 return self.index_resolve(name_ref, token);
201 }
202 if let Some(name) = find_node_at_offset::<ast::Name>(syntax, offset) {
203 if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) {
204 if module.has_semi() {
205 let file_ids = self.resolve_module(&*module_tree, file_id, module);
206
207 let res = file_ids.into_iter().map(|id| {
208 let name = module.name()
209 .map(|n| n.text())
210 .unwrap_or_else(|| SmolStr::new(""));
211 let symbol = FileSymbol {
212 name,
213 node_range: TextRange::offset_len(0.into(), 0.into()),
214 kind: MODULE,
215 };
216 (id, symbol)
217 }).collect();
218
219 return res;
220 }
221 }
222 }
223 vec![]
224 }
225
226 pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> {
227 let root = self.root(file_id);
228 let module_tree = root.module_tree();
229 let syntax = root.syntax(file_id);
230
231 let mut res = libeditor::diagnostics(&syntax)
232 .into_iter()
233 .map(|d| Diagnostic { range: d.range, message: d.msg, fix: None })
234 .collect::<Vec<_>>();
235
236 for (name_node, problem) in module_tree.problems(file_id, syntax.ast()) {
237 let diag = match problem {
238 Problem::UnresolvedModule { candidate } => {
239 let create_file = FileSystemEdit::CreateFile {
240 anchor: file_id,
241 path: candidate.clone(),
242 };
243 let fix = SourceChange {
244 label: "create module".to_string(),
245 source_file_edits: Vec::new(),
246 file_system_edits: vec![create_file],
247 cursor_position: None,
248 };
249 Diagnostic {
250 range: name_node.syntax().range(),
251 message: "unresolved module".to_string(),
252 fix: Some(fix),
253 }
254 }
255 Problem::NotDirOwner { move_to, candidate } => {
256 let move_file = FileSystemEdit::MoveFile { file: file_id, path: move_to.clone() };
257 let create_file = FileSystemEdit::CreateFile { anchor: file_id, path: move_to.join(candidate) };
258 let fix = SourceChange {
259 label: "move file and create module".to_string(),
260 source_file_edits: Vec::new(),
261 file_system_edits: vec![move_file, create_file],
262 cursor_position: None,
263 };
264 Diagnostic {
265 range: name_node.syntax().range(),
266 message: "can't declare module at this location".to_string(),
267 fix: Some(fix),
268 }
269 }
270 };
271 res.push(diag)
272 }
273 res
274 }
275
276 pub fn assists(&self, file_id: FileId, range: TextRange) -> Vec<SourceChange> {
277 let file = self.file_syntax(file_id);
278 let offset = range.start();
279 let actions = vec![
280 ("flip comma", libeditor::flip_comma(&file, offset).map(|f| f())),
281 ("add `#[derive]`", libeditor::add_derive(&file, offset).map(|f| f())),
282 ("add impl", libeditor::add_impl(&file, offset).map(|f| f())),
283 ("introduce variable", libeditor::introduce_variable(&file, range).map(|f| f())),
284 ];
285 actions.into_iter()
286 .filter_map(|(name, local_edit)| {
287 Some(SourceChange::from_local_edit(
288 file_id, name, local_edit?,
289 ))
290 })
291 .collect()
292 }
293
294 fn index_resolve(&self, name_ref: ast::NameRef, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
295 let name = name_ref.text();
296 let mut query = Query::new(name.to_string());
297 query.exact();
298 query.limit(4);
299 self.world_symbols(query, token)
300 }
301
302 fn resolve_module(&self, module_tree: &ModuleTreeDescriptor, file_id: FileId, module: ast::Module) -> Vec<FileId> {
303 let name = match module.name() {
304 Some(name) => name.text(),
305 None => return Vec::new(),
306 };
307 module_tree.child_module_by_name(file_id, name.as_str())
308 }
309}
310
311#[derive(Default, Clone, Debug)]
312struct WorldData {
313 file_resolver: FileResolverImp,
314 crate_graph: CrateGraph,
315 root: Arc<WritableSourceRoot>,
316 libs: Vec<Arc<ReadonlySourceRoot>>,
317}
318
319impl SourceChange {
320 pub(crate) fn from_local_edit(file_id: FileId, label: &str, edit: LocalEdit) -> SourceChange {
321 let file_edit = SourceFileEdit {
322 file_id,
323 edits: edit.edit.into_atoms(),
324 };
325 SourceChange {
326 label: label.to_string(),
327 source_file_edits: vec![file_edit],
328 file_system_edits: vec![],
329 cursor_position: edit.cursor_position
330 .map(|offset| Position { offset, file_id })
331 }
332 }
333}
334
335impl CrateGraph {
336 fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
337 let (&crate_id, _) = self.crate_roots
338 .iter()
339 .find(|(_crate_id, &root_id)| root_id == file_id)?;
340 Some(crate_id)
341 }
342}
diff --git a/crates/libanalysis/src/job.rs b/crates/libanalysis/src/job.rs
deleted file mode 100644
index ea1652a26..000000000
--- a/crates/libanalysis/src/job.rs
+++ /dev/null
@@ -1,49 +0,0 @@
1use crossbeam_channel::{bounded, Receiver, Sender};
2
3pub struct JobHandle {
4 job_alive: Receiver<Never>,
5 _job_canceled: Sender<Never>,
6}
7
8pub struct JobToken {
9 _job_alive: Sender<Never>,
10 job_canceled: Receiver<Never>,
11}
12
13impl JobHandle {
14 pub fn new() -> (JobHandle, JobToken) {
15 let (sender_alive, receiver_alive) = bounded(0);
16 let (sender_canceled, receiver_canceled) = bounded(0);
17 let token = JobToken { _job_alive: sender_alive, job_canceled: receiver_canceled };
18 let handle = JobHandle { job_alive: receiver_alive, _job_canceled: sender_canceled };
19 (handle, token)
20 }
21 pub fn has_completed(&self) -> bool {
22 is_closed(&self.job_alive)
23 }
24 pub fn cancel(self) {
25 }
26}
27
28impl JobToken {
29 pub fn is_canceled(&self) -> bool {
30 is_closed(&self.job_canceled)
31 }
32}
33
34
35// We don't actually send messages through the channels,
36// and instead just check if the channel is closed,
37// so we use uninhabited enum as a message type
38enum Never {}
39
40/// Nonblocking
41fn is_closed(chan: &Receiver<Never>) -> bool {
42 select! {
43 recv(chan, msg) => match msg {
44 None => true,
45 Some(never) => match never {}
46 }
47 default => false,
48 }
49}
diff --git a/crates/libanalysis/src/lib.rs b/crates/libanalysis/src/lib.rs
deleted file mode 100644
index b4b7a6a30..000000000
--- a/crates/libanalysis/src/lib.rs
+++ /dev/null
@@ -1,240 +0,0 @@
1extern crate parking_lot;
2#[macro_use]
3extern crate log;
4extern crate once_cell;
5extern crate libsyntax2;
6extern crate libeditor;
7extern crate fst;
8extern crate rayon;
9extern crate relative_path;
10#[macro_use]
11extern crate crossbeam_channel;
12extern crate im;
13extern crate salsa;
14
15mod symbol_index;
16mod module_map;
17mod imp;
18mod job;
19mod roots;
20mod db;
21mod queries;
22mod descriptors;
23
24use std::{
25 sync::Arc,
26 collections::HashMap,
27 fmt::Debug,
28};
29
30use relative_path::{RelativePath, RelativePathBuf};
31use libsyntax2::{File, TextRange, TextUnit, AtomEdit};
32use imp::{AnalysisImpl, AnalysisHostImpl, FileResolverImp};
33
34pub use libeditor::{
35 StructureNode, LineIndex, FileSymbol,
36 Runnable, RunnableKind, HighlightedRange, CompletionItem,
37};
38pub use job::{JobToken, JobHandle};
39
40#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
41pub struct FileId(pub u32);
42
43#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
44pub struct CrateId(pub u32);
45
46#[derive(Debug, Clone, Default)]
47pub struct CrateGraph {
48 pub crate_roots: HashMap<CrateId, FileId>,
49}
50
51pub trait FileResolver: Debug + Send + Sync + 'static {
52 fn file_stem(&self, file_id: FileId) -> String;
53 fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId>;
54}
55
56#[derive(Debug)]
57pub struct AnalysisHost {
58 imp: AnalysisHostImpl
59}
60
61impl AnalysisHost {
62 pub fn new() -> AnalysisHost {
63 AnalysisHost { imp: AnalysisHostImpl::new() }
64 }
65 pub fn analysis(&self) -> Analysis {
66 Analysis { imp: self.imp.analysis() }
67 }
68 pub fn change_file(&mut self, file_id: FileId, text: Option<String>) {
69 self.change_files(::std::iter::once((file_id, text)));
70 }
71 pub fn change_files(&mut self, mut changes: impl Iterator<Item=(FileId, Option<String>)>) {
72 self.imp.change_files(&mut changes)
73 }
74 pub fn set_file_resolver(&mut self, resolver: Arc<FileResolver>) {
75 self.imp.set_file_resolver(FileResolverImp::new(resolver));
76 }
77 pub fn set_crate_graph(&mut self, graph: CrateGraph) {
78 self.imp.set_crate_graph(graph)
79 }
80 pub fn add_library(&mut self, data: LibraryData) {
81 self.imp.add_library(data.root)
82 }
83}
84
85#[derive(Debug)]
86pub struct SourceChange {
87 pub label: String,
88 pub source_file_edits: Vec<SourceFileEdit>,
89 pub file_system_edits: Vec<FileSystemEdit>,
90 pub cursor_position: Option<Position>,
91}
92
93#[derive(Debug)]
94pub struct Position {
95 pub file_id: FileId,
96 pub offset: TextUnit,
97}
98
99#[derive(Debug)]
100pub struct SourceFileEdit {
101 pub file_id: FileId,
102 pub edits: Vec<AtomEdit>,
103}
104
105#[derive(Debug)]
106pub enum FileSystemEdit {
107 CreateFile {
108 anchor: FileId,
109 path: RelativePathBuf,
110 },
111 MoveFile {
112 file: FileId,
113 path: RelativePathBuf,
114 }
115}
116
117#[derive(Debug)]
118pub struct Diagnostic {
119 pub message: String,
120 pub range: TextRange,
121 pub fix: Option<SourceChange>,
122}
123
124#[derive(Debug)]
125pub struct Query {
126 query: String,
127 lowercased: String,
128 only_types: bool,
129 libs: bool,
130 exact: bool,
131 limit: usize,
132}
133
134impl Query {
135 pub fn new(query: String) -> Query {
136 let lowercased = query.to_lowercase();
137 Query {
138 query,
139 lowercased,
140 only_types: false,
141 libs: false,
142 exact: false,
143 limit: usize::max_value()
144 }
145 }
146 pub fn only_types(&mut self) {
147 self.only_types = true;
148 }
149 pub fn libs(&mut self) {
150 self.libs = true;
151 }
152 pub fn exact(&mut self) {
153 self.exact = true;
154 }
155 pub fn limit(&mut self, limit: usize) {
156 self.limit = limit
157 }
158}
159
160#[derive(Clone, Debug)]
161pub struct Analysis {
162 imp: AnalysisImpl
163}
164
165impl Analysis {
166 pub fn file_syntax(&self, file_id: FileId) -> File {
167 self.imp.file_syntax(file_id).clone()
168 }
169 pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
170 self.imp.file_line_index(file_id)
171 }
172 pub fn extend_selection(&self, file: &File, range: TextRange) -> TextRange {
173 libeditor::extend_selection(file, range).unwrap_or(range)
174 }
175 pub fn matching_brace(&self, file: &File, offset: TextUnit) -> Option<TextUnit> {
176 libeditor::matching_brace(file, offset)
177 }
178 pub fn syntax_tree(&self, file_id: FileId) -> String {
179 let file = self.imp.file_syntax(file_id);
180 libeditor::syntax_tree(&file)
181 }
182 pub fn join_lines(&self, file_id: FileId, range: TextRange) -> SourceChange {
183 let file = self.imp.file_syntax(file_id);
184 SourceChange::from_local_edit(file_id, "join lines", libeditor::join_lines(&file, range))
185 }
186 pub fn on_eq_typed(&self, file_id: FileId, offset: TextUnit) -> Option<SourceChange> {
187 let file = self.imp.file_syntax(file_id);
188 Some(SourceChange::from_local_edit(file_id, "add semicolon", libeditor::on_eq_typed(&file, offset)?))
189 }
190 pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> {
191 let file = self.imp.file_syntax(file_id);
192 libeditor::file_structure(&file)
193 }
194 pub fn symbol_search(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
195 self.imp.world_symbols(query, token)
196 }
197 pub fn approximately_resolve_symbol(&self, file_id: FileId, offset: TextUnit, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
198 self.imp.approximately_resolve_symbol(file_id, offset, token)
199 }
200 pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> {
201 self.imp.parent_module(file_id)
202 }
203 pub fn crate_for(&self, file_id: FileId) -> Vec<CrateId> {
204 self.imp.crate_for(file_id)
205 }
206 pub fn crate_root(&self, crate_id: CrateId) -> FileId {
207 self.imp.crate_root(crate_id)
208 }
209 pub fn runnables(&self, file_id: FileId) -> Vec<Runnable> {
210 let file = self.imp.file_syntax(file_id);
211 libeditor::runnables(&file)
212 }
213 pub fn highlight(&self, file_id: FileId) -> Vec<HighlightedRange> {
214 let file = self.imp.file_syntax(file_id);
215 libeditor::highlight(&file)
216 }
217 pub fn completions(&self, file_id: FileId, offset: TextUnit) -> Option<Vec<CompletionItem>> {
218 let file = self.imp.file_syntax(file_id);
219 libeditor::scope_completion(&file, offset)
220 }
221 pub fn assists(&self, file_id: FileId, range: TextRange) -> Vec<SourceChange> {
222 self.imp.assists(file_id, range)
223 }
224 pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> {
225 self.imp.diagnostics(file_id)
226 }
227}
228
229#[derive(Debug)]
230pub struct LibraryData {
231 root: roots::ReadonlySourceRoot
232}
233
234impl LibraryData {
235 pub fn prepare(files: Vec<(FileId, String)>, file_resolver: Arc<FileResolver>) -> LibraryData {
236 let file_resolver = FileResolverImp::new(file_resolver);
237 let root = roots::ReadonlySourceRoot::new(files, file_resolver);
238 LibraryData { root }
239 }
240}
diff --git a/crates/libanalysis/src/module_map.rs b/crates/libanalysis/src/module_map.rs
deleted file mode 100644
index a21f55fff..000000000
--- a/crates/libanalysis/src/module_map.rs
+++ /dev/null
@@ -1,157 +0,0 @@
1use std::sync::Arc;
2use {
3 FileId,
4 db::{
5 Query, QueryRegistry, QueryCtx,
6 file_set
7 },
8 queries::file_syntax,
9 descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
10};
11
12pub(crate) fn register_queries(reg: &mut QueryRegistry) {
13 reg.add(MODULE_DESCR, "MODULE_DESCR");
14 reg.add(MODULE_TREE, "MODULE_TREE");
15}
16
17pub(crate) fn module_tree(ctx: QueryCtx) -> Arc<ModuleTreeDescriptor> {
18 ctx.get(MODULE_TREE, ())
19}
20
21const MODULE_DESCR: Query<FileId, ModuleDescriptor> = Query(30, |ctx, &file_id| {
22 let file = file_syntax(ctx, file_id);
23 ModuleDescriptor::new(file.ast())
24});
25
26const MODULE_TREE: Query<(), ModuleTreeDescriptor> = Query(31, |ctx, _| {
27 let file_set = file_set(ctx);
28 let mut files = Vec::new();
29 for &file_id in file_set.0.iter() {
30 let module_descr = ctx.get(MODULE_DESCR, file_id);
31 files.push((file_id, module_descr));
32 }
33 ModuleTreeDescriptor::new(files.iter().map(|(file_id, descr)| (*file_id, &**descr)), &file_set.1)
34});
35
36#[cfg(test)]
37mod tests {
38 use std::collections::HashMap;
39 use im;
40 use relative_path::{RelativePath, RelativePathBuf};
41 use {
42 db::{Db},
43 imp::FileResolverImp,
44 FileId, FileResolver,
45 };
46 use super::*;
47
48 #[derive(Debug)]
49 struct FileMap(im::HashMap<FileId, RelativePathBuf>);
50
51 impl FileResolver for FileMap {
52 fn file_stem(&self, file_id: FileId) -> String {
53 self.0[&file_id].file_stem().unwrap().to_string()
54 }
55 fn resolve(&self, file_id: FileId, rel: &RelativePath) -> Option<FileId> {
56 let path = self.0[&file_id].join(rel).normalize();
57 self.0.iter()
58 .filter_map(|&(id, ref p)| Some(id).filter(|_| p == &path))
59 .next()
60 }
61 }
62
63 struct Fixture {
64 next_file_id: u32,
65 fm: im::HashMap<FileId, RelativePathBuf>,
66 db: Db,
67 }
68
69 impl Fixture {
70 fn new() -> Fixture {
71 Fixture {
72 next_file_id: 1,
73 fm: im::HashMap::new(),
74 db: Db::new(),
75 }
76 }
77 fn add_file(&mut self, path: &str, text: &str) -> FileId {
78 assert!(path.starts_with("/"));
79 let file_id = FileId(self.next_file_id);
80 self.next_file_id += 1;
81 self.fm.insert(file_id, RelativePathBuf::from(&path[1..]));
82 let mut new_state = self.db.state().clone();
83 new_state.file_map.insert(file_id, Arc::new(text.to_string()));
84 new_state.file_resolver = FileResolverImp::new(
85 Arc::new(FileMap(self.fm.clone()))
86 );
87 self.db = self.db.with_changes(new_state, &[file_id], true);
88 file_id
89 }
90 fn remove_file(&mut self, file_id: FileId) {
91 self.fm.remove(&file_id);
92 let mut new_state = self.db.state().clone();
93 new_state.file_map.remove(&file_id);
94 new_state.file_resolver = FileResolverImp::new(
95 Arc::new(FileMap(self.fm.clone()))
96 );
97 self.db = self.db.with_changes(new_state, &[file_id], true);
98 }
99 fn change_file(&mut self, file_id: FileId, new_text: &str) {
100 let mut new_state = self.db.state().clone();
101 new_state.file_map.insert(file_id, Arc::new(new_text.to_string()));
102 self.db = self.db.with_changes(new_state, &[file_id], false);
103 }
104 fn check_parent_modules(
105 &self,
106 file_id: FileId,
107 expected: &[FileId],
108 queries: &[(&'static str, u64)]
109 ) {
110 let (tree, events) = self.db.trace_query(|ctx| module_tree(ctx));
111 let actual = tree.parent_modules(file_id)
112 .into_iter()
113 .map(|link| link.owner(&tree))
114 .collect::<Vec<_>>();
115 assert_eq!(actual.as_slice(), expected);
116 let mut counts = HashMap::new();
117 events.into_iter()
118 .for_each(|event| *counts.entry(event).or_insert(0) += 1);
119 for &(query_id, expected_count) in queries.iter() {
120 let actual_count = *counts.get(&query_id).unwrap_or(&0);
121 assert_eq!(
122 actual_count,
123 expected_count,
124 "counts for {} differ",
125 query_id,
126 )
127 }
128
129 }
130 }
131
132 #[test]
133 fn test_parent_module() {
134 let mut f = Fixture::new();
135 let foo = f.add_file("/foo.rs", "");
136 f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
137
138 let lib = f.add_file("/lib.rs", "mod foo;");
139 f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
140 f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 0)]);
141
142 f.change_file(lib, "");
143 f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
144
145 f.change_file(lib, "mod foo;");
146 f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
147
148 f.change_file(lib, "mod bar;");
149 f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
150
151 f.change_file(lib, "mod foo;");
152 f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
153
154 f.remove_file(lib);
155 f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 0)]);
156 }
157}
diff --git a/crates/libanalysis/src/queries.rs b/crates/libanalysis/src/queries.rs
deleted file mode 100644
index 0b60316e6..000000000
--- a/crates/libanalysis/src/queries.rs
+++ /dev/null
@@ -1,39 +0,0 @@
1use std::sync::Arc;
2use libsyntax2::File;
3use libeditor::LineIndex;
4use {
5 FileId,
6 db::{Query, QueryCtx, QueryRegistry},
7 symbol_index::SymbolIndex,
8};
9
10pub(crate) use db::{file_text, file_set};
11
12pub(crate) fn file_syntax(ctx: QueryCtx, file_id: FileId) -> File {
13 (&*ctx.get(FILE_SYNTAX, file_id)).clone()
14}
15pub(crate) fn file_lines(ctx: QueryCtx, file_id: FileId) -> Arc<LineIndex> {
16 ctx.get(FILE_LINES, file_id)
17}
18pub(crate) fn file_symbols(ctx: QueryCtx, file_id: FileId) -> Arc<SymbolIndex> {
19 ctx.get(FILE_SYMBOLS, file_id)
20}
21
22const FILE_SYNTAX: Query<FileId, File> = Query(16, |ctx, file_id: &FileId| {
23 let text = file_text(ctx, *file_id);
24 File::parse(&*text)
25});
26const FILE_LINES: Query<FileId, LineIndex> = Query(17, |ctx, file_id: &FileId| {
27 let text = file_text(ctx, *file_id);
28 LineIndex::new(&*text)
29});
30const FILE_SYMBOLS: Query<FileId, SymbolIndex> = Query(18, |ctx, file_id: &FileId| {
31 let syntax = file_syntax(ctx, *file_id);
32 SymbolIndex::for_file(*file_id, syntax)
33});
34
35pub(crate) fn register_queries(reg: &mut QueryRegistry) {
36 reg.add(FILE_SYNTAX, "FILE_SYNTAX");
37 reg.add(FILE_LINES, "FILE_LINES");
38 reg.add(FILE_SYMBOLS, "FILE_SYMBOLS");
39}
diff --git a/crates/libanalysis/src/roots.rs b/crates/libanalysis/src/roots.rs
deleted file mode 100644
index 191d0d821..000000000
--- a/crates/libanalysis/src/roots.rs
+++ /dev/null
@@ -1,178 +0,0 @@
1use std::{
2 collections::HashMap,
3 sync::Arc,
4 panic,
5};
6
7use once_cell::sync::OnceCell;
8use rayon::prelude::*;
9use libeditor::LineIndex;
10use libsyntax2::File;
11
12use {
13 FileId,
14 imp::FileResolverImp,
15 symbol_index::SymbolIndex,
16 descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
17 db::Db,
18};
19
20pub(crate) trait SourceRoot {
21 fn contains(&self, file_id: FileId) -> bool;
22 fn module_tree(&self) -> Arc<ModuleTreeDescriptor>;
23 fn lines(&self, file_id: FileId) -> Arc<LineIndex>;
24 fn syntax(&self, file_id: FileId) -> File;
25 fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>);
26}
27
28#[derive(Default, Debug)]
29pub(crate) struct WritableSourceRoot {
30 db: Db,
31}
32
33impl WritableSourceRoot {
34 pub fn apply_changes(
35 &self,
36 changes: &mut dyn Iterator<Item=(FileId, Option<String>)>,
37 file_resolver: Option<FileResolverImp>,
38 ) -> WritableSourceRoot {
39 let resolver_changed = file_resolver.is_some();
40 let mut changed_files = Vec::new();
41 let mut new_state = self.db.state().clone();
42
43 for (file_id, text) in changes {
44 changed_files.push(file_id);
45 match text {
46 Some(text) => {
47 new_state.file_map.insert(file_id, Arc::new(text));
48 },
49 None => {
50 new_state.file_map.remove(&file_id);
51 }
52 }
53 }
54 if let Some(file_resolver) = file_resolver {
55 new_state.file_resolver = file_resolver
56 }
57 WritableSourceRoot {
58 db: self.db.with_changes(new_state, &changed_files, resolver_changed)
59 }
60 }
61}
62
63impl SourceRoot for WritableSourceRoot {
64 fn module_tree(&self) -> Arc<ModuleTreeDescriptor> {
65 self.db.make_query(::module_map::module_tree)
66 }
67
68 fn contains(&self, file_id: FileId) -> bool {
69 self.db.state().file_map.contains_key(&file_id)
70 }
71 fn lines(&self, file_id: FileId) -> Arc<LineIndex> {
72 self.db.make_query(|ctx| ::queries::file_lines(ctx, file_id))
73 }
74 fn syntax(&self, file_id: FileId) -> File {
75 self.db.make_query(|ctx| ::queries::file_syntax(ctx, file_id))
76 }
77 fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) {
78 self.db.make_query(|ctx| {
79 let file_set = ::queries::file_set(ctx);
80 let syms = file_set.0.iter()
81 .map(|file_id| ::queries::file_symbols(ctx, *file_id));
82 acc.extend(syms);
83 });
84 }
85}
86
87#[derive(Debug)]
88struct FileData {
89 text: String,
90 lines: OnceCell<Arc<LineIndex>>,
91 syntax: OnceCell<File>,
92}
93
94impl FileData {
95 fn new(text: String) -> FileData {
96 FileData {
97 text,
98 syntax: OnceCell::new(),
99 lines: OnceCell::new(),
100 }
101 }
102 fn lines(&self) -> &Arc<LineIndex> {
103 self.lines.get_or_init(|| Arc::new(LineIndex::new(&self.text)))
104 }
105 fn syntax(&self) -> &File {
106 let text = &self.text;
107 let syntax = &self.syntax;
108 match panic::catch_unwind(panic::AssertUnwindSafe(|| syntax.get_or_init(|| File::parse(text)))) {
109 Ok(file) => file,
110 Err(err) => {
111 error!("Parser paniced on:\n------\n{}\n------\n", text);
112 panic::resume_unwind(err)
113 }
114 }
115 }
116}
117
118#[derive(Debug)]
119pub(crate) struct ReadonlySourceRoot {
120 symbol_index: Arc<SymbolIndex>,
121 file_map: HashMap<FileId, FileData>,
122 module_tree: Arc<ModuleTreeDescriptor>,
123}
124
125impl ReadonlySourceRoot {
126 pub(crate) fn new(files: Vec<(FileId, String)>, file_resolver: FileResolverImp) -> ReadonlySourceRoot {
127 let modules = files.par_iter()
128 .map(|(file_id, text)| {
129 let syntax = File::parse(text);
130 let mod_descr = ModuleDescriptor::new(syntax.ast());
131 (*file_id, syntax, mod_descr)
132 })
133 .collect::<Vec<_>>();
134 let module_tree = ModuleTreeDescriptor::new(
135 modules.iter().map(|it| (it.0, &it.2)),
136 &file_resolver,
137 );
138
139 let symbol_index = SymbolIndex::for_files(
140 modules.par_iter().map(|it| (it.0, it.1.clone()))
141 );
142 let file_map: HashMap<FileId, FileData> = files
143 .into_iter()
144 .map(|(id, text)| (id, FileData::new(text)))
145 .collect();
146
147 ReadonlySourceRoot {
148 symbol_index: Arc::new(symbol_index),
149 file_map,
150 module_tree: Arc::new(module_tree),
151 }
152 }
153
154 fn data(&self, file_id: FileId) -> &FileData {
155 match self.file_map.get(&file_id) {
156 Some(data) => data,
157 None => panic!("unknown file: {:?}", file_id),
158 }
159 }
160}
161
162impl SourceRoot for ReadonlySourceRoot {
163 fn module_tree(&self) -> Arc<ModuleTreeDescriptor> {
164 Arc::clone(&self.module_tree)
165 }
166 fn contains(&self, file_id: FileId) -> bool {
167 self.file_map.contains_key(&file_id)
168 }
169 fn lines(&self, file_id: FileId) -> Arc<LineIndex> {
170 Arc::clone(self.data(file_id).lines())
171 }
172 fn syntax(&self, file_id: FileId) -> File {
173 self.data(file_id).syntax().clone()
174 }
175 fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>) {
176 acc.push(Arc::clone(&self.symbol_index))
177 }
178}
diff --git a/crates/libanalysis/src/symbol_index.rs b/crates/libanalysis/src/symbol_index.rs
deleted file mode 100644
index d22187ac0..000000000
--- a/crates/libanalysis/src/symbol_index.rs
+++ /dev/null
@@ -1,94 +0,0 @@
1use std::{
2 sync::Arc,
3 hash::{Hash, Hasher},
4};
5use libeditor::{FileSymbol, file_symbols};
6use libsyntax2::{
7 File,
8 SyntaxKind::{self, *},
9};
10use fst::{self, Streamer};
11use rayon::prelude::*;
12use {Query, FileId, JobToken};
13
14#[derive(Debug)]
15pub(crate) struct SymbolIndex {
16 symbols: Vec<(FileId, FileSymbol)>,
17 map: fst::Map,
18}
19
20impl Hash for SymbolIndex {
21 fn hash<H: Hasher>(&self, hasher: &mut H) {
22 self.symbols.hash(hasher)
23 }
24}
25
26impl SymbolIndex {
27 pub(crate) fn for_files(files: impl ParallelIterator<Item=(FileId, File)>) -> SymbolIndex {
28 let mut symbols = files
29 .flat_map(|(file_id, file)| {
30 file_symbols(&file)
31 .into_iter()
32 .map(move |symbol| {
33 (symbol.name.as_str().to_lowercase(), (file_id, symbol))
34 })
35 .collect::<Vec<_>>()
36 })
37 .collect::<Vec<_>>();
38 symbols.par_sort_by(|s1, s2| s1.0.cmp(&s2.0));
39 symbols.dedup_by(|s1, s2| s1.0 == s2.0);
40 let (names, symbols): (Vec<String>, Vec<(FileId, FileSymbol)>) =
41 symbols.into_iter().unzip();
42 let map = fst::Map::from_iter(
43 names.into_iter().zip(0u64..)
44 ).unwrap();
45 SymbolIndex { symbols, map }
46 }
47
48 pub(crate) fn for_file(file_id: FileId, file: File) -> SymbolIndex {
49 SymbolIndex::for_files(::rayon::iter::once((file_id, file)))
50 }
51}
52
53impl Query {
54 pub(crate) fn search(
55 self,
56 indices: &[Arc<SymbolIndex>],
57 token: &JobToken,
58 ) -> Vec<(FileId, FileSymbol)> {
59
60 let mut op = fst::map::OpBuilder::new();
61 for file_symbols in indices.iter() {
62 let automaton = fst::automaton::Subsequence::new(&self.lowercased);
63 op = op.add(file_symbols.map.search(automaton))
64 }
65 let mut stream = op.union();
66 let mut res = Vec::new();
67 while let Some((_, indexed_values)) = stream.next() {
68 if res.len() >= self.limit || token.is_canceled() {
69 break;
70 }
71 for indexed_value in indexed_values {
72 let file_symbols = &indices[indexed_value.index];
73 let idx = indexed_value.value as usize;
74
75 let (file_id, symbol) = &file_symbols.symbols[idx];
76 if self.only_types && !is_type(symbol.kind) {
77 continue;
78 }
79 if self.exact && symbol.name != self.query {
80 continue;
81 }
82 res.push((*file_id, symbol.clone()));
83 }
84 }
85 res
86 }
87}
88
89fn is_type(kind: SyntaxKind) -> bool {
90 match kind {
91 STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_DEF => true,
92 _ => false,
93 }
94}