diff options
Diffstat (limited to 'crates/ra_analysis/src')
-rw-r--r-- | crates/ra_analysis/src/db/imp.rs | 152 | ||||
-rw-r--r-- | crates/ra_analysis/src/db/mod.rs | 85 | ||||
-rw-r--r-- | crates/ra_analysis/src/descriptors.rs | 220 | ||||
-rw-r--r-- | crates/ra_analysis/src/imp.rs | 342 | ||||
-rw-r--r-- | crates/ra_analysis/src/job.rs | 49 | ||||
-rw-r--r-- | crates/ra_analysis/src/lib.rs | 240 | ||||
-rw-r--r-- | crates/ra_analysis/src/module_map.rs | 157 | ||||
-rw-r--r-- | crates/ra_analysis/src/queries.rs | 39 | ||||
-rw-r--r-- | crates/ra_analysis/src/roots.rs | 178 | ||||
-rw-r--r-- | crates/ra_analysis/src/symbol_index.rs | 94 |
10 files changed, 1556 insertions, 0 deletions
diff --git a/crates/ra_analysis/src/db/imp.rs b/crates/ra_analysis/src/db/imp.rs new file mode 100644 index 000000000..f26be1046 --- /dev/null +++ b/crates/ra_analysis/src/db/imp.rs | |||
@@ -0,0 +1,152 @@ | |||
1 | use std::{ | ||
2 | sync::Arc, | ||
3 | any::Any, | ||
4 | hash::{Hash, Hasher}, | ||
5 | collections::hash_map::{DefaultHasher, HashMap}, | ||
6 | iter, | ||
7 | }; | ||
8 | use salsa; | ||
9 | use {FileId, imp::FileResolverImp}; | ||
10 | use super::{State, Query, QueryCtx}; | ||
11 | |||
12 | pub(super) type Data = Arc<Any + Send + Sync + 'static>; | ||
13 | |||
14 | #[derive(Debug)] | ||
15 | pub(super) struct Db { | ||
16 | names: Arc<HashMap<salsa::QueryTypeId, &'static str>>, | ||
17 | pub(super) imp: salsa::Db<State, Data>, | ||
18 | } | ||
19 | |||
20 | impl Db { | ||
21 | pub(super) fn new(mut reg: QueryRegistry) -> Db { | ||
22 | let config = reg.config.take().unwrap(); | ||
23 | Db { | ||
24 | names: Arc::new(reg.names), | ||
25 | imp: salsa::Db::new(config, State::default()) | ||
26 | } | ||
27 | } | ||
28 | pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db { | ||
29 | let names = self.names.clone(); | ||
30 | let mut invalidations = salsa::Invalidations::new(); | ||
31 | invalidations.invalidate(FILE_TEXT, changed_files.iter().map(hash).map(salsa::InputFingerprint)); | ||
32 | if resolver_changed { | ||
33 | invalidations.invalidate(FILE_SET, iter::once(salsa::InputFingerprint(hash(&())))); | ||
34 | } else { | ||
35 | invalidations.invalidate(FILE_SET, iter::empty()); | ||
36 | } | ||
37 | let imp = self.imp.with_ground_data( | ||
38 | new_state, | ||
39 | invalidations, | ||
40 | ); | ||
41 | Db { names, imp } | ||
42 | } | ||
43 | pub(super) fn extract_trace(&self, ctx: &salsa::QueryCtx<State, Data>) -> Vec<&'static str> { | ||
44 | ctx.trace().into_iter().map(|it| self.names[&it]).collect() | ||
45 | } | ||
46 | } | ||
47 | |||
48 | pub(crate) trait EvalQuery { | ||
49 | type Params; | ||
50 | type Output; | ||
51 | fn query_type(&self) -> salsa::QueryTypeId; | ||
52 | fn f(&self) -> salsa::QueryFn<State, Data>; | ||
53 | fn get(&self, &QueryCtx, Self::Params) -> Arc<Self::Output>; | ||
54 | } | ||
55 | |||
56 | impl<T, R> EvalQuery for Query<T, R> | ||
57 | where | ||
58 | T: Hash + Send + Sync + 'static, | ||
59 | R: Hash + Send + Sync + 'static, | ||
60 | { | ||
61 | type Params = T; | ||
62 | type Output = R; | ||
63 | fn query_type(&self) -> salsa::QueryTypeId { | ||
64 | salsa::QueryTypeId(self.0) | ||
65 | } | ||
66 | fn f(&self) -> salsa::QueryFn<State, Data> { | ||
67 | let f = self.1; | ||
68 | Box::new(move |ctx, data| { | ||
69 | let ctx = QueryCtx { imp: ctx }; | ||
70 | let data: &T = data.downcast_ref().unwrap(); | ||
71 | let res = f(ctx, data); | ||
72 | let h = hash(&res); | ||
73 | (Arc::new(res), salsa::OutputFingerprint(h)) | ||
74 | }) | ||
75 | } | ||
76 | fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output> { | ||
77 | let query_id = salsa::QueryId( | ||
78 | self.query_type(), | ||
79 | salsa::InputFingerprint(hash(¶ms)), | ||
80 | ); | ||
81 | let res = ctx.imp.get(query_id, Arc::new(params)); | ||
82 | res.downcast().unwrap() | ||
83 | } | ||
84 | } | ||
85 | |||
86 | pub(super) struct QueryRegistry { | ||
87 | config: Option<salsa::QueryConfig<State, Data>>, | ||
88 | names: HashMap<salsa::QueryTypeId, &'static str>, | ||
89 | } | ||
90 | |||
91 | impl QueryRegistry { | ||
92 | pub(super) fn new() -> QueryRegistry { | ||
93 | let mut config = salsa::QueryConfig::<State, Data>::new(); | ||
94 | config = config.with_ground_query( | ||
95 | FILE_TEXT, Box::new(|state, params| { | ||
96 | let file_id: &FileId = params.downcast_ref().unwrap(); | ||
97 | let res = state.file_map[file_id].clone(); | ||
98 | let fingerprint = salsa::OutputFingerprint(hash(&res)); | ||
99 | (res, fingerprint) | ||
100 | }) | ||
101 | ); | ||
102 | config = config.with_ground_query( | ||
103 | FILE_SET, Box::new(|state, _params| { | ||
104 | let file_ids: Vec<FileId> = state.file_map.keys().cloned().collect(); | ||
105 | let hash = hash(&file_ids); | ||
106 | let file_resolver = state.file_resolver.clone(); | ||
107 | let res = (file_ids, file_resolver); | ||
108 | let fingerprint = salsa::OutputFingerprint(hash); | ||
109 | (Arc::new(res), fingerprint) | ||
110 | }) | ||
111 | ); | ||
112 | let mut names = HashMap::new(); | ||
113 | names.insert(FILE_TEXT, "FILE_TEXT"); | ||
114 | names.insert(FILE_SET, "FILE_SET"); | ||
115 | QueryRegistry { config: Some(config), names } | ||
116 | } | ||
117 | pub(super) fn add<Q: EvalQuery>(&mut self, q: Q, name: &'static str) { | ||
118 | let id = q.query_type(); | ||
119 | let prev = self.names.insert(id, name); | ||
120 | assert!(prev.is_none(), "duplicate query: {:?}", id); | ||
121 | let config = self.config.take().unwrap(); | ||
122 | let config = config.with_query(id, q.f()); | ||
123 | self.config= Some(config); | ||
124 | } | ||
125 | } | ||
126 | |||
127 | fn hash<T: Hash>(x: &T) -> u64 { | ||
128 | let mut hasher = DefaultHasher::new(); | ||
129 | x.hash(&mut hasher); | ||
130 | hasher.finish() | ||
131 | } | ||
132 | |||
133 | const FILE_TEXT: salsa::QueryTypeId = salsa::QueryTypeId(0); | ||
134 | pub(super) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> { | ||
135 | let query_id = salsa::QueryId( | ||
136 | FILE_TEXT, | ||
137 | salsa::InputFingerprint(hash(&file_id)), | ||
138 | ); | ||
139 | let res = ctx.imp.get(query_id, Arc::new(file_id)); | ||
140 | res.downcast().unwrap() | ||
141 | } | ||
142 | |||
143 | const FILE_SET: salsa::QueryTypeId = salsa::QueryTypeId(1); | ||
144 | pub(super) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> { | ||
145 | let query_id = salsa::QueryId( | ||
146 | FILE_SET, | ||
147 | salsa::InputFingerprint(hash(&())), | ||
148 | ); | ||
149 | let res = ctx.imp.get(query_id, Arc::new(())); | ||
150 | res.downcast().unwrap() | ||
151 | } | ||
152 | |||
diff --git a/crates/ra_analysis/src/db/mod.rs b/crates/ra_analysis/src/db/mod.rs new file mode 100644 index 000000000..22769d112 --- /dev/null +++ b/crates/ra_analysis/src/db/mod.rs | |||
@@ -0,0 +1,85 @@ | |||
1 | mod imp; | ||
2 | |||
3 | use std::{ | ||
4 | sync::Arc, | ||
5 | }; | ||
6 | use im; | ||
7 | use salsa; | ||
8 | use {FileId, imp::FileResolverImp}; | ||
9 | |||
10 | #[derive(Debug, Default, Clone)] | ||
11 | pub(crate) struct State { | ||
12 | pub(crate) file_map: im::HashMap<FileId, Arc<String>>, | ||
13 | pub(crate) file_resolver: FileResolverImp | ||
14 | } | ||
15 | |||
16 | #[derive(Debug)] | ||
17 | pub(crate) struct Db { | ||
18 | imp: imp::Db, | ||
19 | } | ||
20 | |||
21 | #[derive(Clone, Copy)] | ||
22 | pub(crate) struct QueryCtx<'a> { | ||
23 | imp: &'a salsa::QueryCtx<State, imp::Data>, | ||
24 | } | ||
25 | |||
26 | pub(crate) struct Query<T, R>(pub(crate) u16, pub(crate) fn(QueryCtx, &T) -> R); | ||
27 | |||
28 | pub(crate) struct QueryRegistry { | ||
29 | imp: imp::QueryRegistry, | ||
30 | } | ||
31 | |||
32 | impl Default for Db { | ||
33 | fn default() -> Db { | ||
34 | Db::new() | ||
35 | } | ||
36 | } | ||
37 | |||
38 | impl Db { | ||
39 | pub(crate) fn new() -> Db { | ||
40 | let reg = QueryRegistry::new(); | ||
41 | Db { imp: imp::Db::new(reg.imp) } | ||
42 | } | ||
43 | pub(crate) fn state(&self) -> &State { | ||
44 | self.imp.imp.ground_data() | ||
45 | } | ||
46 | pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db { | ||
47 | Db { imp: self.imp.with_changes(new_state, changed_files, resolver_changed) } | ||
48 | } | ||
49 | pub(crate) fn make_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> R { | ||
50 | let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() }; | ||
51 | f(ctx) | ||
52 | } | ||
53 | #[allow(unused)] | ||
54 | pub(crate) fn trace_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> (R, Vec<&'static str>) { | ||
55 | let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() }; | ||
56 | let res = f(ctx); | ||
57 | let trace = self.imp.extract_trace(ctx.imp); | ||
58 | (res, trace) | ||
59 | } | ||
60 | } | ||
61 | |||
62 | impl<'a> QueryCtx<'a> { | ||
63 | pub(crate) fn get<Q: imp::EvalQuery>(&self, q: Q, params: Q::Params) -> Arc<Q::Output> { | ||
64 | q.get(self, params) | ||
65 | } | ||
66 | } | ||
67 | |||
68 | pub(crate) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> { | ||
69 | imp::file_text(ctx, file_id) | ||
70 | } | ||
71 | |||
72 | pub(crate) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> { | ||
73 | imp::file_set(ctx) | ||
74 | } | ||
75 | impl QueryRegistry { | ||
76 | fn new() -> QueryRegistry { | ||
77 | let mut reg = QueryRegistry { imp: imp::QueryRegistry::new() }; | ||
78 | ::queries::register_queries(&mut reg); | ||
79 | ::module_map::register_queries(&mut reg); | ||
80 | reg | ||
81 | } | ||
82 | pub(crate) fn add<Q: imp::EvalQuery>(&mut self, q: Q, name: &'static str) { | ||
83 | self.imp.add(q, name) | ||
84 | } | ||
85 | } | ||
diff --git a/crates/ra_analysis/src/descriptors.rs b/crates/ra_analysis/src/descriptors.rs new file mode 100644 index 000000000..0731b5572 --- /dev/null +++ b/crates/ra_analysis/src/descriptors.rs | |||
@@ -0,0 +1,220 @@ | |||
1 | use std::{ | ||
2 | collections::BTreeMap, | ||
3 | }; | ||
4 | use relative_path::RelativePathBuf; | ||
5 | use ra_syntax::{ | ||
6 | SmolStr, | ||
7 | ast::{self, NameOwner}, | ||
8 | }; | ||
9 | use { | ||
10 | FileId, | ||
11 | imp::FileResolverImp, | ||
12 | }; | ||
13 | |||
14 | #[derive(Debug, Hash)] | ||
15 | pub struct ModuleDescriptor { | ||
16 | pub submodules: Vec<Submodule> | ||
17 | } | ||
18 | |||
19 | impl ModuleDescriptor { | ||
20 | pub fn new(root: ast::Root) -> ModuleDescriptor { | ||
21 | let submodules = modules(root) | ||
22 | .map(|(name, _)| Submodule { name }) | ||
23 | .collect(); | ||
24 | |||
25 | ModuleDescriptor { submodules } } | ||
26 | } | ||
27 | |||
28 | fn modules<'a>(root: ast::Root<'a>) -> impl Iterator<Item=(SmolStr, ast::Module<'a>)> { | ||
29 | root | ||
30 | .modules() | ||
31 | .filter_map(|module| { | ||
32 | let name = module.name()?.text(); | ||
33 | if !module.has_semi() { | ||
34 | return None; | ||
35 | } | ||
36 | Some((name, module)) | ||
37 | }) | ||
38 | } | ||
39 | |||
40 | #[derive(Clone, Hash, PartialEq, Eq, Debug)] | ||
41 | pub struct Submodule { | ||
42 | pub name: SmolStr, | ||
43 | } | ||
44 | |||
45 | #[derive(Hash, Debug)] | ||
46 | pub(crate) struct ModuleTreeDescriptor { | ||
47 | nodes: Vec<NodeData>, | ||
48 | links: Vec<LinkData>, | ||
49 | file_id2node: BTreeMap<FileId, Node>, | ||
50 | } | ||
51 | |||
52 | #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] | ||
53 | struct Node(usize); | ||
54 | #[derive(Hash, Debug)] | ||
55 | struct NodeData { | ||
56 | file_id: FileId, | ||
57 | links: Vec<Link>, | ||
58 | parents: Vec<Link> | ||
59 | } | ||
60 | |||
61 | #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] | ||
62 | pub(crate) struct Link(usize); | ||
63 | #[derive(Hash, Debug)] | ||
64 | struct LinkData { | ||
65 | owner: Node, | ||
66 | name: SmolStr, | ||
67 | points_to: Vec<Node>, | ||
68 | problem: Option<Problem>, | ||
69 | } | ||
70 | |||
71 | |||
72 | #[derive(Clone, Debug, Hash)] | ||
73 | pub enum Problem { | ||
74 | UnresolvedModule { | ||
75 | candidate: RelativePathBuf, | ||
76 | }, | ||
77 | NotDirOwner { | ||
78 | move_to: RelativePathBuf, | ||
79 | candidate: RelativePathBuf, | ||
80 | } | ||
81 | } | ||
82 | |||
83 | impl ModuleTreeDescriptor { | ||
84 | pub(crate) fn new<'a>( | ||
85 | files: impl Iterator<Item=(FileId, &'a ModuleDescriptor)> + Clone, | ||
86 | file_resolver: &FileResolverImp, | ||
87 | ) -> ModuleTreeDescriptor { | ||
88 | let mut file_id2node = BTreeMap::new(); | ||
89 | let mut nodes: Vec<NodeData> = files.clone().enumerate() | ||
90 | .map(|(idx, (file_id, _))| { | ||
91 | file_id2node.insert(file_id, Node(idx)); | ||
92 | NodeData { | ||
93 | file_id, | ||
94 | links: Vec::new(), | ||
95 | parents: Vec::new(), | ||
96 | } | ||
97 | }) | ||
98 | .collect(); | ||
99 | let mut links = Vec::new(); | ||
100 | |||
101 | for (idx, (file_id, descr)) in files.enumerate() { | ||
102 | let owner = Node(idx); | ||
103 | for sub in descr.submodules.iter() { | ||
104 | let link = Link(links.len()); | ||
105 | nodes[owner.0].links.push(link); | ||
106 | let (points_to, problem) = resolve_submodule(file_id, &sub.name, file_resolver); | ||
107 | let points_to = points_to | ||
108 | .into_iter() | ||
109 | .map(|file_id| { | ||
110 | let node = file_id2node[&file_id]; | ||
111 | nodes[node.0].parents.push(link); | ||
112 | node | ||
113 | }) | ||
114 | .collect(); | ||
115 | |||
116 | links.push(LinkData { | ||
117 | owner, | ||
118 | name: sub.name.clone(), | ||
119 | points_to, | ||
120 | problem, | ||
121 | }) | ||
122 | |||
123 | } | ||
124 | } | ||
125 | |||
126 | ModuleTreeDescriptor { | ||
127 | nodes, links, file_id2node | ||
128 | } | ||
129 | } | ||
130 | |||
131 | pub(crate) fn parent_modules(&self, file_id: FileId) -> Vec<Link> { | ||
132 | let node = self.file_id2node[&file_id]; | ||
133 | self.node(node) | ||
134 | .parents | ||
135 | .clone() | ||
136 | } | ||
137 | pub(crate) fn child_module_by_name(&self, file_id: FileId, name: &str) -> Vec<FileId> { | ||
138 | let node = self.file_id2node[&file_id]; | ||
139 | self.node(node) | ||
140 | .links | ||
141 | .iter() | ||
142 | .filter(|it| it.name(self) == name) | ||
143 | .flat_map(|link| link.points_to(self).iter().map(|&node| self.node(node).file_id)) | ||
144 | .collect() | ||
145 | } | ||
146 | pub(crate) fn problems<'a, 'b>(&'b self, file_id: FileId, root: ast::Root<'a>) -> Vec<(ast::Name<'a>, &'b Problem)> { | ||
147 | let node = self.file_id2node[&file_id]; | ||
148 | self.node(node) | ||
149 | .links | ||
150 | .iter() | ||
151 | .filter_map(|&link| { | ||
152 | let problem = self.link(link).problem.as_ref()?; | ||
153 | let name = link.bind_source(self, root).name()?; | ||
154 | Some((name, problem)) | ||
155 | }) | ||
156 | .collect() | ||
157 | } | ||
158 | |||
159 | fn node(&self, node: Node) -> &NodeData { | ||
160 | &self.nodes[node.0] | ||
161 | } | ||
162 | fn link(&self, link: Link) -> &LinkData { | ||
163 | &self.links[link.0] | ||
164 | } | ||
165 | } | ||
166 | |||
167 | impl Link { | ||
168 | pub(crate) fn name(self, tree: &ModuleTreeDescriptor) -> SmolStr { | ||
169 | tree.link(self).name.clone() | ||
170 | } | ||
171 | pub(crate) fn owner(self, tree: &ModuleTreeDescriptor) -> FileId { | ||
172 | let owner = tree.link(self).owner; | ||
173 | tree.node(owner).file_id | ||
174 | } | ||
175 | fn points_to(self, tree: &ModuleTreeDescriptor) -> &[Node] { | ||
176 | &tree.link(self).points_to | ||
177 | } | ||
178 | pub(crate) fn bind_source<'a>(self, tree: &ModuleTreeDescriptor, root: ast::Root<'a>) -> ast::Module<'a> { | ||
179 | modules(root) | ||
180 | .filter(|(name, _)| name == &tree.link(self).name) | ||
181 | .next() | ||
182 | .unwrap() | ||
183 | .1 | ||
184 | } | ||
185 | } | ||
186 | |||
187 | |||
188 | fn resolve_submodule( | ||
189 | file_id: FileId, | ||
190 | name: &SmolStr, | ||
191 | file_resolver: &FileResolverImp | ||
192 | ) -> (Vec<FileId>, Option<Problem>) { | ||
193 | let mod_name = file_resolver.file_stem(file_id); | ||
194 | let is_dir_owner = | ||
195 | mod_name == "mod" || mod_name == "lib" || mod_name == "main"; | ||
196 | |||
197 | let file_mod = RelativePathBuf::from(format!("../{}.rs", name)); | ||
198 | let dir_mod = RelativePathBuf::from(format!("../{}/mod.rs", name)); | ||
199 | let points_to: Vec<FileId>; | ||
200 | let problem: Option<Problem>; | ||
201 | if is_dir_owner { | ||
202 | points_to = [&file_mod, &dir_mod].iter() | ||
203 | .filter_map(|path| file_resolver.resolve(file_id, path)) | ||
204 | .collect(); | ||
205 | problem = if points_to.is_empty() { | ||
206 | Some(Problem::UnresolvedModule { | ||
207 | candidate: file_mod, | ||
208 | }) | ||
209 | } else { | ||
210 | None | ||
211 | } | ||
212 | } else { | ||
213 | points_to = Vec::new(); | ||
214 | problem = Some(Problem::NotDirOwner { | ||
215 | move_to: RelativePathBuf::from(format!("../{}/mod.rs", mod_name)), | ||
216 | candidate: file_mod, | ||
217 | }); | ||
218 | } | ||
219 | (points_to, problem) | ||
220 | } | ||
diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs new file mode 100644 index 000000000..90184a4b9 --- /dev/null +++ b/crates/ra_analysis/src/imp.rs | |||
@@ -0,0 +1,342 @@ | |||
1 | use std::{ | ||
2 | sync::{ | ||
3 | Arc, | ||
4 | atomic::{AtomicBool, Ordering::SeqCst}, | ||
5 | }, | ||
6 | fmt, | ||
7 | collections::{HashSet, VecDeque}, | ||
8 | iter, | ||
9 | }; | ||
10 | |||
11 | use relative_path::RelativePath; | ||
12 | use ra_editor::{self, FileSymbol, LineIndex, find_node_at_offset, LocalEdit}; | ||
13 | use ra_syntax::{ | ||
14 | TextUnit, TextRange, SmolStr, File, AstNode, | ||
15 | SyntaxKind::*, | ||
16 | ast::{self, NameOwner}, | ||
17 | }; | ||
18 | |||
19 | use { | ||
20 | FileId, FileResolver, Query, Diagnostic, SourceChange, SourceFileEdit, Position, FileSystemEdit, | ||
21 | JobToken, CrateGraph, CrateId, | ||
22 | roots::{SourceRoot, ReadonlySourceRoot, WritableSourceRoot}, | ||
23 | descriptors::{ModuleTreeDescriptor, Problem}, | ||
24 | }; | ||
25 | |||
26 | |||
27 | #[derive(Clone, Debug)] | ||
28 | pub(crate) struct FileResolverImp { | ||
29 | inner: Arc<FileResolver> | ||
30 | } | ||
31 | |||
32 | impl FileResolverImp { | ||
33 | pub(crate) fn new(inner: Arc<FileResolver>) -> FileResolverImp { | ||
34 | FileResolverImp { inner } | ||
35 | } | ||
36 | pub(crate) fn file_stem(&self, file_id: FileId) -> String { | ||
37 | self.inner.file_stem(file_id) | ||
38 | } | ||
39 | pub(crate) fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> { | ||
40 | self.inner.resolve(file_id, path) | ||
41 | } | ||
42 | } | ||
43 | |||
44 | impl Default for FileResolverImp { | ||
45 | fn default() -> FileResolverImp { | ||
46 | #[derive(Debug)] | ||
47 | struct DummyResolver; | ||
48 | impl FileResolver for DummyResolver { | ||
49 | fn file_stem(&self, _file_: FileId) -> String { | ||
50 | panic!("file resolver not set") | ||
51 | } | ||
52 | fn resolve(&self, _file_id: FileId, _path: &::relative_path::RelativePath) -> Option<FileId> { | ||
53 | panic!("file resolver not set") | ||
54 | } | ||
55 | } | ||
56 | FileResolverImp { inner: Arc::new(DummyResolver) } | ||
57 | } | ||
58 | } | ||
59 | |||
60 | #[derive(Debug)] | ||
61 | pub(crate) struct AnalysisHostImpl { | ||
62 | data: Arc<WorldData> | ||
63 | } | ||
64 | |||
65 | impl AnalysisHostImpl { | ||
66 | pub fn new() -> AnalysisHostImpl { | ||
67 | AnalysisHostImpl { | ||
68 | data: Arc::new(WorldData::default()), | ||
69 | } | ||
70 | } | ||
71 | pub fn analysis(&self) -> AnalysisImpl { | ||
72 | AnalysisImpl { | ||
73 | needs_reindex: AtomicBool::new(false), | ||
74 | data: self.data.clone(), | ||
75 | } | ||
76 | } | ||
77 | pub fn change_files(&mut self, changes: &mut dyn Iterator<Item=(FileId, Option<String>)>) { | ||
78 | let data = self.data_mut(); | ||
79 | data.root = Arc::new(data.root.apply_changes(changes, None)); | ||
80 | } | ||
81 | pub fn set_file_resolver(&mut self, resolver: FileResolverImp) { | ||
82 | let data = self.data_mut(); | ||
83 | data.file_resolver = resolver.clone(); | ||
84 | data.root = Arc::new(data.root.apply_changes(&mut iter::empty(), Some(resolver))); | ||
85 | } | ||
86 | pub fn set_crate_graph(&mut self, graph: CrateGraph) { | ||
87 | let mut visited = HashSet::new(); | ||
88 | for &file_id in graph.crate_roots.values() { | ||
89 | if !visited.insert(file_id) { | ||
90 | panic!("duplicate crate root: {:?}", file_id); | ||
91 | } | ||
92 | } | ||
93 | self.data_mut().crate_graph = graph; | ||
94 | } | ||
95 | pub fn add_library(&mut self, root: ReadonlySourceRoot) { | ||
96 | self.data_mut().libs.push(Arc::new(root)); | ||
97 | } | ||
98 | fn data_mut(&mut self) -> &mut WorldData { | ||
99 | Arc::make_mut(&mut self.data) | ||
100 | } | ||
101 | } | ||
102 | |||
103 | pub(crate) struct AnalysisImpl { | ||
104 | needs_reindex: AtomicBool, | ||
105 | data: Arc<WorldData>, | ||
106 | } | ||
107 | |||
108 | impl fmt::Debug for AnalysisImpl { | ||
109 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
110 | (&*self.data).fmt(f) | ||
111 | } | ||
112 | } | ||
113 | |||
114 | impl Clone for AnalysisImpl { | ||
115 | fn clone(&self) -> AnalysisImpl { | ||
116 | AnalysisImpl { | ||
117 | needs_reindex: AtomicBool::new(self.needs_reindex.load(SeqCst)), | ||
118 | data: Arc::clone(&self.data), | ||
119 | } | ||
120 | } | ||
121 | } | ||
122 | |||
123 | impl AnalysisImpl { | ||
124 | fn root(&self, file_id: FileId) -> &SourceRoot { | ||
125 | if self.data.root.contains(file_id) { | ||
126 | return &*self.data.root; | ||
127 | } | ||
128 | &**self.data.libs.iter().find(|it| it.contains(file_id)).unwrap() | ||
129 | } | ||
130 | pub fn file_syntax(&self, file_id: FileId) -> File { | ||
131 | self.root(file_id).syntax(file_id) | ||
132 | } | ||
133 | pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> { | ||
134 | self.root(file_id).lines(file_id) | ||
135 | } | ||
136 | pub fn world_symbols(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> { | ||
137 | let mut buf = Vec::new(); | ||
138 | if query.libs { | ||
139 | self.data.libs.iter() | ||
140 | .for_each(|it| it.symbols(&mut buf)); | ||
141 | } else { | ||
142 | self.data.root.symbols(&mut buf); | ||
143 | } | ||
144 | query.search(&buf, token) | ||
145 | |||
146 | } | ||
147 | pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> { | ||
148 | let root = self.root(file_id); | ||
149 | let module_tree = root.module_tree(); | ||
150 | module_tree.parent_modules(file_id) | ||
151 | .iter() | ||
152 | .map(|link| { | ||
153 | let file_id = link.owner(&module_tree); | ||
154 | let syntax = root.syntax(file_id); | ||
155 | let decl = link.bind_source(&module_tree, syntax.ast()); | ||
156 | let sym = FileSymbol { | ||
157 | name: link.name(&module_tree), | ||
158 | node_range: decl.syntax().range(), | ||
159 | kind: MODULE, | ||
160 | }; | ||
161 | (file_id, sym) | ||
162 | }) | ||
163 | .collect() | ||
164 | } | ||
165 | pub fn crate_for(&self, file_id: FileId) -> Vec<CrateId> { | ||
166 | let module_tree = self.root(file_id).module_tree(); | ||
167 | let crate_graph = &self.data.crate_graph; | ||
168 | let mut res = Vec::new(); | ||
169 | let mut work = VecDeque::new(); | ||
170 | work.push_back(file_id); | ||
171 | let mut visited = HashSet::new(); | ||
172 | while let Some(id) = work.pop_front() { | ||
173 | if let Some(crate_id) = crate_graph.crate_id_for_crate_root(id) { | ||
174 | res.push(crate_id); | ||
175 | continue; | ||
176 | } | ||
177 | let parents = module_tree | ||
178 | .parent_modules(id) | ||
179 | .into_iter() | ||
180 | .map(|link| link.owner(&module_tree)) | ||
181 | .filter(|&id| visited.insert(id)); | ||
182 | work.extend(parents); | ||
183 | } | ||
184 | res | ||
185 | } | ||
186 | pub fn crate_root(&self, crate_id: CrateId) -> FileId { | ||
187 | self.data.crate_graph.crate_roots[&crate_id] | ||
188 | } | ||
189 | pub fn approximately_resolve_symbol( | ||
190 | &self, | ||
191 | file_id: FileId, | ||
192 | offset: TextUnit, | ||
193 | token: &JobToken, | ||
194 | ) -> Vec<(FileId, FileSymbol)> { | ||
195 | let root = self.root(file_id); | ||
196 | let module_tree = root.module_tree(); | ||
197 | let file = root.syntax(file_id); | ||
198 | let syntax = file.syntax(); | ||
199 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) { | ||
200 | return self.index_resolve(name_ref, token); | ||
201 | } | ||
202 | if let Some(name) = find_node_at_offset::<ast::Name>(syntax, offset) { | ||
203 | if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) { | ||
204 | if module.has_semi() { | ||
205 | let file_ids = self.resolve_module(&*module_tree, file_id, module); | ||
206 | |||
207 | let res = file_ids.into_iter().map(|id| { | ||
208 | let name = module.name() | ||
209 | .map(|n| n.text()) | ||
210 | .unwrap_or_else(|| SmolStr::new("")); | ||
211 | let symbol = FileSymbol { | ||
212 | name, | ||
213 | node_range: TextRange::offset_len(0.into(), 0.into()), | ||
214 | kind: MODULE, | ||
215 | }; | ||
216 | (id, symbol) | ||
217 | }).collect(); | ||
218 | |||
219 | return res; | ||
220 | } | ||
221 | } | ||
222 | } | ||
223 | vec![] | ||
224 | } | ||
225 | |||
226 | pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> { | ||
227 | let root = self.root(file_id); | ||
228 | let module_tree = root.module_tree(); | ||
229 | let syntax = root.syntax(file_id); | ||
230 | |||
231 | let mut res = ra_editor::diagnostics(&syntax) | ||
232 | .into_iter() | ||
233 | .map(|d| Diagnostic { range: d.range, message: d.msg, fix: None }) | ||
234 | .collect::<Vec<_>>(); | ||
235 | |||
236 | for (name_node, problem) in module_tree.problems(file_id, syntax.ast()) { | ||
237 | let diag = match problem { | ||
238 | Problem::UnresolvedModule { candidate } => { | ||
239 | let create_file = FileSystemEdit::CreateFile { | ||
240 | anchor: file_id, | ||
241 | path: candidate.clone(), | ||
242 | }; | ||
243 | let fix = SourceChange { | ||
244 | label: "create module".to_string(), | ||
245 | source_file_edits: Vec::new(), | ||
246 | file_system_edits: vec![create_file], | ||
247 | cursor_position: None, | ||
248 | }; | ||
249 | Diagnostic { | ||
250 | range: name_node.syntax().range(), | ||
251 | message: "unresolved module".to_string(), | ||
252 | fix: Some(fix), | ||
253 | } | ||
254 | } | ||
255 | Problem::NotDirOwner { move_to, candidate } => { | ||
256 | let move_file = FileSystemEdit::MoveFile { file: file_id, path: move_to.clone() }; | ||
257 | let create_file = FileSystemEdit::CreateFile { anchor: file_id, path: move_to.join(candidate) }; | ||
258 | let fix = SourceChange { | ||
259 | label: "move file and create module".to_string(), | ||
260 | source_file_edits: Vec::new(), | ||
261 | file_system_edits: vec![move_file, create_file], | ||
262 | cursor_position: None, | ||
263 | }; | ||
264 | Diagnostic { | ||
265 | range: name_node.syntax().range(), | ||
266 | message: "can't declare module at this location".to_string(), | ||
267 | fix: Some(fix), | ||
268 | } | ||
269 | } | ||
270 | }; | ||
271 | res.push(diag) | ||
272 | } | ||
273 | res | ||
274 | } | ||
275 | |||
276 | pub fn assists(&self, file_id: FileId, range: TextRange) -> Vec<SourceChange> { | ||
277 | let file = self.file_syntax(file_id); | ||
278 | let offset = range.start(); | ||
279 | let actions = vec![ | ||
280 | ("flip comma", ra_editor::flip_comma(&file, offset).map(|f| f())), | ||
281 | ("add `#[derive]`", ra_editor::add_derive(&file, offset).map(|f| f())), | ||
282 | ("add impl", ra_editor::add_impl(&file, offset).map(|f| f())), | ||
283 | ("introduce variable", ra_editor::introduce_variable(&file, range).map(|f| f())), | ||
284 | ]; | ||
285 | actions.into_iter() | ||
286 | .filter_map(|(name, local_edit)| { | ||
287 | Some(SourceChange::from_local_edit( | ||
288 | file_id, name, local_edit?, | ||
289 | )) | ||
290 | }) | ||
291 | .collect() | ||
292 | } | ||
293 | |||
294 | fn index_resolve(&self, name_ref: ast::NameRef, token: &JobToken) -> Vec<(FileId, FileSymbol)> { | ||
295 | let name = name_ref.text(); | ||
296 | let mut query = Query::new(name.to_string()); | ||
297 | query.exact(); | ||
298 | query.limit(4); | ||
299 | self.world_symbols(query, token) | ||
300 | } | ||
301 | |||
302 | fn resolve_module(&self, module_tree: &ModuleTreeDescriptor, file_id: FileId, module: ast::Module) -> Vec<FileId> { | ||
303 | let name = match module.name() { | ||
304 | Some(name) => name.text(), | ||
305 | None => return Vec::new(), | ||
306 | }; | ||
307 | module_tree.child_module_by_name(file_id, name.as_str()) | ||
308 | } | ||
309 | } | ||
310 | |||
311 | #[derive(Default, Clone, Debug)] | ||
312 | struct WorldData { | ||
313 | file_resolver: FileResolverImp, | ||
314 | crate_graph: CrateGraph, | ||
315 | root: Arc<WritableSourceRoot>, | ||
316 | libs: Vec<Arc<ReadonlySourceRoot>>, | ||
317 | } | ||
318 | |||
319 | impl SourceChange { | ||
320 | pub(crate) fn from_local_edit(file_id: FileId, label: &str, edit: LocalEdit) -> SourceChange { | ||
321 | let file_edit = SourceFileEdit { | ||
322 | file_id, | ||
323 | edits: edit.edit.into_atoms(), | ||
324 | }; | ||
325 | SourceChange { | ||
326 | label: label.to_string(), | ||
327 | source_file_edits: vec![file_edit], | ||
328 | file_system_edits: vec![], | ||
329 | cursor_position: edit.cursor_position | ||
330 | .map(|offset| Position { offset, file_id }) | ||
331 | } | ||
332 | } | ||
333 | } | ||
334 | |||
335 | impl CrateGraph { | ||
336 | fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> { | ||
337 | let (&crate_id, _) = self.crate_roots | ||
338 | .iter() | ||
339 | .find(|(_crate_id, &root_id)| root_id == file_id)?; | ||
340 | Some(crate_id) | ||
341 | } | ||
342 | } | ||
diff --git a/crates/ra_analysis/src/job.rs b/crates/ra_analysis/src/job.rs new file mode 100644 index 000000000..ea1652a26 --- /dev/null +++ b/crates/ra_analysis/src/job.rs | |||
@@ -0,0 +1,49 @@ | |||
1 | use crossbeam_channel::{bounded, Receiver, Sender}; | ||
2 | |||
3 | pub struct JobHandle { | ||
4 | job_alive: Receiver<Never>, | ||
5 | _job_canceled: Sender<Never>, | ||
6 | } | ||
7 | |||
8 | pub struct JobToken { | ||
9 | _job_alive: Sender<Never>, | ||
10 | job_canceled: Receiver<Never>, | ||
11 | } | ||
12 | |||
13 | impl JobHandle { | ||
14 | pub fn new() -> (JobHandle, JobToken) { | ||
15 | let (sender_alive, receiver_alive) = bounded(0); | ||
16 | let (sender_canceled, receiver_canceled) = bounded(0); | ||
17 | let token = JobToken { _job_alive: sender_alive, job_canceled: receiver_canceled }; | ||
18 | let handle = JobHandle { job_alive: receiver_alive, _job_canceled: sender_canceled }; | ||
19 | (handle, token) | ||
20 | } | ||
21 | pub fn has_completed(&self) -> bool { | ||
22 | is_closed(&self.job_alive) | ||
23 | } | ||
24 | pub fn cancel(self) { | ||
25 | } | ||
26 | } | ||
27 | |||
28 | impl JobToken { | ||
29 | pub fn is_canceled(&self) -> bool { | ||
30 | is_closed(&self.job_canceled) | ||
31 | } | ||
32 | } | ||
33 | |||
34 | |||
35 | // We don't actually send messages through the channels, | ||
36 | // and instead just check if the channel is closed, | ||
37 | // so we use uninhabited enum as a message type | ||
38 | enum Never {} | ||
39 | |||
40 | /// Nonblocking | ||
41 | fn is_closed(chan: &Receiver<Never>) -> bool { | ||
42 | select! { | ||
43 | recv(chan, msg) => match msg { | ||
44 | None => true, | ||
45 | Some(never) => match never {} | ||
46 | } | ||
47 | default => false, | ||
48 | } | ||
49 | } | ||
diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs new file mode 100644 index 000000000..4da55ab26 --- /dev/null +++ b/crates/ra_analysis/src/lib.rs | |||
@@ -0,0 +1,240 @@ | |||
1 | extern crate parking_lot; | ||
2 | #[macro_use] | ||
3 | extern crate log; | ||
4 | extern crate once_cell; | ||
5 | extern crate ra_syntax; | ||
6 | extern crate ra_editor; | ||
7 | extern crate fst; | ||
8 | extern crate rayon; | ||
9 | extern crate relative_path; | ||
10 | #[macro_use] | ||
11 | extern crate crossbeam_channel; | ||
12 | extern crate im; | ||
13 | extern crate salsa; | ||
14 | |||
15 | mod symbol_index; | ||
16 | mod module_map; | ||
17 | mod imp; | ||
18 | mod job; | ||
19 | mod roots; | ||
20 | mod db; | ||
21 | mod queries; | ||
22 | mod descriptors; | ||
23 | |||
24 | use std::{ | ||
25 | sync::Arc, | ||
26 | collections::HashMap, | ||
27 | fmt::Debug, | ||
28 | }; | ||
29 | |||
30 | use relative_path::{RelativePath, RelativePathBuf}; | ||
31 | use ra_syntax::{File, TextRange, TextUnit, AtomEdit}; | ||
32 | use imp::{AnalysisImpl, AnalysisHostImpl, FileResolverImp}; | ||
33 | |||
34 | pub use ra_editor::{ | ||
35 | StructureNode, LineIndex, FileSymbol, | ||
36 | Runnable, RunnableKind, HighlightedRange, CompletionItem, | ||
37 | }; | ||
38 | pub use job::{JobToken, JobHandle}; | ||
39 | |||
40 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] | ||
41 | pub struct FileId(pub u32); | ||
42 | |||
43 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] | ||
44 | pub struct CrateId(pub u32); | ||
45 | |||
46 | #[derive(Debug, Clone, Default)] | ||
47 | pub struct CrateGraph { | ||
48 | pub crate_roots: HashMap<CrateId, FileId>, | ||
49 | } | ||
50 | |||
51 | pub trait FileResolver: Debug + Send + Sync + 'static { | ||
52 | fn file_stem(&self, file_id: FileId) -> String; | ||
53 | fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId>; | ||
54 | } | ||
55 | |||
56 | #[derive(Debug)] | ||
57 | pub struct AnalysisHost { | ||
58 | imp: AnalysisHostImpl | ||
59 | } | ||
60 | |||
61 | impl AnalysisHost { | ||
62 | pub fn new() -> AnalysisHost { | ||
63 | AnalysisHost { imp: AnalysisHostImpl::new() } | ||
64 | } | ||
65 | pub fn analysis(&self) -> Analysis { | ||
66 | Analysis { imp: self.imp.analysis() } | ||
67 | } | ||
68 | pub fn change_file(&mut self, file_id: FileId, text: Option<String>) { | ||
69 | self.change_files(::std::iter::once((file_id, text))); | ||
70 | } | ||
71 | pub fn change_files(&mut self, mut changes: impl Iterator<Item=(FileId, Option<String>)>) { | ||
72 | self.imp.change_files(&mut changes) | ||
73 | } | ||
74 | pub fn set_file_resolver(&mut self, resolver: Arc<FileResolver>) { | ||
75 | self.imp.set_file_resolver(FileResolverImp::new(resolver)); | ||
76 | } | ||
77 | pub fn set_crate_graph(&mut self, graph: CrateGraph) { | ||
78 | self.imp.set_crate_graph(graph) | ||
79 | } | ||
80 | pub fn add_library(&mut self, data: LibraryData) { | ||
81 | self.imp.add_library(data.root) | ||
82 | } | ||
83 | } | ||
84 | |||
85 | #[derive(Debug)] | ||
86 | pub struct SourceChange { | ||
87 | pub label: String, | ||
88 | pub source_file_edits: Vec<SourceFileEdit>, | ||
89 | pub file_system_edits: Vec<FileSystemEdit>, | ||
90 | pub cursor_position: Option<Position>, | ||
91 | } | ||
92 | |||
93 | #[derive(Debug)] | ||
94 | pub struct Position { | ||
95 | pub file_id: FileId, | ||
96 | pub offset: TextUnit, | ||
97 | } | ||
98 | |||
99 | #[derive(Debug)] | ||
100 | pub struct SourceFileEdit { | ||
101 | pub file_id: FileId, | ||
102 | pub edits: Vec<AtomEdit>, | ||
103 | } | ||
104 | |||
105 | #[derive(Debug)] | ||
106 | pub enum FileSystemEdit { | ||
107 | CreateFile { | ||
108 | anchor: FileId, | ||
109 | path: RelativePathBuf, | ||
110 | }, | ||
111 | MoveFile { | ||
112 | file: FileId, | ||
113 | path: RelativePathBuf, | ||
114 | } | ||
115 | } | ||
116 | |||
117 | #[derive(Debug)] | ||
118 | pub struct Diagnostic { | ||
119 | pub message: String, | ||
120 | pub range: TextRange, | ||
121 | pub fix: Option<SourceChange>, | ||
122 | } | ||
123 | |||
124 | #[derive(Debug)] | ||
125 | pub struct Query { | ||
126 | query: String, | ||
127 | lowercased: String, | ||
128 | only_types: bool, | ||
129 | libs: bool, | ||
130 | exact: bool, | ||
131 | limit: usize, | ||
132 | } | ||
133 | |||
134 | impl Query { | ||
135 | pub fn new(query: String) -> Query { | ||
136 | let lowercased = query.to_lowercase(); | ||
137 | Query { | ||
138 | query, | ||
139 | lowercased, | ||
140 | only_types: false, | ||
141 | libs: false, | ||
142 | exact: false, | ||
143 | limit: usize::max_value() | ||
144 | } | ||
145 | } | ||
146 | pub fn only_types(&mut self) { | ||
147 | self.only_types = true; | ||
148 | } | ||
149 | pub fn libs(&mut self) { | ||
150 | self.libs = true; | ||
151 | } | ||
152 | pub fn exact(&mut self) { | ||
153 | self.exact = true; | ||
154 | } | ||
155 | pub fn limit(&mut self, limit: usize) { | ||
156 | self.limit = limit | ||
157 | } | ||
158 | } | ||
159 | |||
160 | #[derive(Clone, Debug)] | ||
161 | pub struct Analysis { | ||
162 | imp: AnalysisImpl | ||
163 | } | ||
164 | |||
165 | impl Analysis { | ||
166 | pub fn file_syntax(&self, file_id: FileId) -> File { | ||
167 | self.imp.file_syntax(file_id).clone() | ||
168 | } | ||
169 | pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> { | ||
170 | self.imp.file_line_index(file_id) | ||
171 | } | ||
172 | pub fn extend_selection(&self, file: &File, range: TextRange) -> TextRange { | ||
173 | ra_editor::extend_selection(file, range).unwrap_or(range) | ||
174 | } | ||
175 | pub fn matching_brace(&self, file: &File, offset: TextUnit) -> Option<TextUnit> { | ||
176 | ra_editor::matching_brace(file, offset) | ||
177 | } | ||
178 | pub fn syntax_tree(&self, file_id: FileId) -> String { | ||
179 | let file = self.imp.file_syntax(file_id); | ||
180 | ra_editor::syntax_tree(&file) | ||
181 | } | ||
182 | pub fn join_lines(&self, file_id: FileId, range: TextRange) -> SourceChange { | ||
183 | let file = self.imp.file_syntax(file_id); | ||
184 | SourceChange::from_local_edit(file_id, "join lines", ra_editor::join_lines(&file, range)) | ||
185 | } | ||
186 | pub fn on_eq_typed(&self, file_id: FileId, offset: TextUnit) -> Option<SourceChange> { | ||
187 | let file = self.imp.file_syntax(file_id); | ||
188 | Some(SourceChange::from_local_edit(file_id, "add semicolon", ra_editor::on_eq_typed(&file, offset)?)) | ||
189 | } | ||
190 | pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { | ||
191 | let file = self.imp.file_syntax(file_id); | ||
192 | ra_editor::file_structure(&file) | ||
193 | } | ||
194 | pub fn symbol_search(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> { | ||
195 | self.imp.world_symbols(query, token) | ||
196 | } | ||
197 | pub fn approximately_resolve_symbol(&self, file_id: FileId, offset: TextUnit, token: &JobToken) -> Vec<(FileId, FileSymbol)> { | ||
198 | self.imp.approximately_resolve_symbol(file_id, offset, token) | ||
199 | } | ||
200 | pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> { | ||
201 | self.imp.parent_module(file_id) | ||
202 | } | ||
203 | pub fn crate_for(&self, file_id: FileId) -> Vec<CrateId> { | ||
204 | self.imp.crate_for(file_id) | ||
205 | } | ||
206 | pub fn crate_root(&self, crate_id: CrateId) -> FileId { | ||
207 | self.imp.crate_root(crate_id) | ||
208 | } | ||
209 | pub fn runnables(&self, file_id: FileId) -> Vec<Runnable> { | ||
210 | let file = self.imp.file_syntax(file_id); | ||
211 | ra_editor::runnables(&file) | ||
212 | } | ||
213 | pub fn highlight(&self, file_id: FileId) -> Vec<HighlightedRange> { | ||
214 | let file = self.imp.file_syntax(file_id); | ||
215 | ra_editor::highlight(&file) | ||
216 | } | ||
217 | pub fn completions(&self, file_id: FileId, offset: TextUnit) -> Option<Vec<CompletionItem>> { | ||
218 | let file = self.imp.file_syntax(file_id); | ||
219 | ra_editor::scope_completion(&file, offset) | ||
220 | } | ||
221 | pub fn assists(&self, file_id: FileId, range: TextRange) -> Vec<SourceChange> { | ||
222 | self.imp.assists(file_id, range) | ||
223 | } | ||
224 | pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> { | ||
225 | self.imp.diagnostics(file_id) | ||
226 | } | ||
227 | } | ||
228 | |||
229 | #[derive(Debug)] | ||
230 | pub struct LibraryData { | ||
231 | root: roots::ReadonlySourceRoot | ||
232 | } | ||
233 | |||
234 | impl LibraryData { | ||
235 | pub fn prepare(files: Vec<(FileId, String)>, file_resolver: Arc<FileResolver>) -> LibraryData { | ||
236 | let file_resolver = FileResolverImp::new(file_resolver); | ||
237 | let root = roots::ReadonlySourceRoot::new(files, file_resolver); | ||
238 | LibraryData { root } | ||
239 | } | ||
240 | } | ||
diff --git a/crates/ra_analysis/src/module_map.rs b/crates/ra_analysis/src/module_map.rs new file mode 100644 index 000000000..a21f55fff --- /dev/null +++ b/crates/ra_analysis/src/module_map.rs | |||
@@ -0,0 +1,157 @@ | |||
1 | use std::sync::Arc; | ||
2 | use { | ||
3 | FileId, | ||
4 | db::{ | ||
5 | Query, QueryRegistry, QueryCtx, | ||
6 | file_set | ||
7 | }, | ||
8 | queries::file_syntax, | ||
9 | descriptors::{ModuleDescriptor, ModuleTreeDescriptor}, | ||
10 | }; | ||
11 | |||
12 | pub(crate) fn register_queries(reg: &mut QueryRegistry) { | ||
13 | reg.add(MODULE_DESCR, "MODULE_DESCR"); | ||
14 | reg.add(MODULE_TREE, "MODULE_TREE"); | ||
15 | } | ||
16 | |||
17 | pub(crate) fn module_tree(ctx: QueryCtx) -> Arc<ModuleTreeDescriptor> { | ||
18 | ctx.get(MODULE_TREE, ()) | ||
19 | } | ||
20 | |||
21 | const MODULE_DESCR: Query<FileId, ModuleDescriptor> = Query(30, |ctx, &file_id| { | ||
22 | let file = file_syntax(ctx, file_id); | ||
23 | ModuleDescriptor::new(file.ast()) | ||
24 | }); | ||
25 | |||
26 | const MODULE_TREE: Query<(), ModuleTreeDescriptor> = Query(31, |ctx, _| { | ||
27 | let file_set = file_set(ctx); | ||
28 | let mut files = Vec::new(); | ||
29 | for &file_id in file_set.0.iter() { | ||
30 | let module_descr = ctx.get(MODULE_DESCR, file_id); | ||
31 | files.push((file_id, module_descr)); | ||
32 | } | ||
33 | ModuleTreeDescriptor::new(files.iter().map(|(file_id, descr)| (*file_id, &**descr)), &file_set.1) | ||
34 | }); | ||
35 | |||
36 | #[cfg(test)] | ||
37 | mod tests { | ||
38 | use std::collections::HashMap; | ||
39 | use im; | ||
40 | use relative_path::{RelativePath, RelativePathBuf}; | ||
41 | use { | ||
42 | db::{Db}, | ||
43 | imp::FileResolverImp, | ||
44 | FileId, FileResolver, | ||
45 | }; | ||
46 | use super::*; | ||
47 | |||
48 | #[derive(Debug)] | ||
49 | struct FileMap(im::HashMap<FileId, RelativePathBuf>); | ||
50 | |||
51 | impl FileResolver for FileMap { | ||
52 | fn file_stem(&self, file_id: FileId) -> String { | ||
53 | self.0[&file_id].file_stem().unwrap().to_string() | ||
54 | } | ||
55 | fn resolve(&self, file_id: FileId, rel: &RelativePath) -> Option<FileId> { | ||
56 | let path = self.0[&file_id].join(rel).normalize(); | ||
57 | self.0.iter() | ||
58 | .filter_map(|&(id, ref p)| Some(id).filter(|_| p == &path)) | ||
59 | .next() | ||
60 | } | ||
61 | } | ||
62 | |||
63 | struct Fixture { | ||
64 | next_file_id: u32, | ||
65 | fm: im::HashMap<FileId, RelativePathBuf>, | ||
66 | db: Db, | ||
67 | } | ||
68 | |||
69 | impl Fixture { | ||
70 | fn new() -> Fixture { | ||
71 | Fixture { | ||
72 | next_file_id: 1, | ||
73 | fm: im::HashMap::new(), | ||
74 | db: Db::new(), | ||
75 | } | ||
76 | } | ||
77 | fn add_file(&mut self, path: &str, text: &str) -> FileId { | ||
78 | assert!(path.starts_with("/")); | ||
79 | let file_id = FileId(self.next_file_id); | ||
80 | self.next_file_id += 1; | ||
81 | self.fm.insert(file_id, RelativePathBuf::from(&path[1..])); | ||
82 | let mut new_state = self.db.state().clone(); | ||
83 | new_state.file_map.insert(file_id, Arc::new(text.to_string())); | ||
84 | new_state.file_resolver = FileResolverImp::new( | ||
85 | Arc::new(FileMap(self.fm.clone())) | ||
86 | ); | ||
87 | self.db = self.db.with_changes(new_state, &[file_id], true); | ||
88 | file_id | ||
89 | } | ||
90 | fn remove_file(&mut self, file_id: FileId) { | ||
91 | self.fm.remove(&file_id); | ||
92 | let mut new_state = self.db.state().clone(); | ||
93 | new_state.file_map.remove(&file_id); | ||
94 | new_state.file_resolver = FileResolverImp::new( | ||
95 | Arc::new(FileMap(self.fm.clone())) | ||
96 | ); | ||
97 | self.db = self.db.with_changes(new_state, &[file_id], true); | ||
98 | } | ||
99 | fn change_file(&mut self, file_id: FileId, new_text: &str) { | ||
100 | let mut new_state = self.db.state().clone(); | ||
101 | new_state.file_map.insert(file_id, Arc::new(new_text.to_string())); | ||
102 | self.db = self.db.with_changes(new_state, &[file_id], false); | ||
103 | } | ||
104 | fn check_parent_modules( | ||
105 | &self, | ||
106 | file_id: FileId, | ||
107 | expected: &[FileId], | ||
108 | queries: &[(&'static str, u64)] | ||
109 | ) { | ||
110 | let (tree, events) = self.db.trace_query(|ctx| module_tree(ctx)); | ||
111 | let actual = tree.parent_modules(file_id) | ||
112 | .into_iter() | ||
113 | .map(|link| link.owner(&tree)) | ||
114 | .collect::<Vec<_>>(); | ||
115 | assert_eq!(actual.as_slice(), expected); | ||
116 | let mut counts = HashMap::new(); | ||
117 | events.into_iter() | ||
118 | .for_each(|event| *counts.entry(event).or_insert(0) += 1); | ||
119 | for &(query_id, expected_count) in queries.iter() { | ||
120 | let actual_count = *counts.get(&query_id).unwrap_or(&0); | ||
121 | assert_eq!( | ||
122 | actual_count, | ||
123 | expected_count, | ||
124 | "counts for {} differ", | ||
125 | query_id, | ||
126 | ) | ||
127 | } | ||
128 | |||
129 | } | ||
130 | } | ||
131 | |||
132 | #[test] | ||
133 | fn test_parent_module() { | ||
134 | let mut f = Fixture::new(); | ||
135 | let foo = f.add_file("/foo.rs", ""); | ||
136 | f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]); | ||
137 | |||
138 | let lib = f.add_file("/lib.rs", "mod foo;"); | ||
139 | f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]); | ||
140 | f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 0)]); | ||
141 | |||
142 | f.change_file(lib, ""); | ||
143 | f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]); | ||
144 | |||
145 | f.change_file(lib, "mod foo;"); | ||
146 | f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]); | ||
147 | |||
148 | f.change_file(lib, "mod bar;"); | ||
149 | f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]); | ||
150 | |||
151 | f.change_file(lib, "mod foo;"); | ||
152 | f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]); | ||
153 | |||
154 | f.remove_file(lib); | ||
155 | f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 0)]); | ||
156 | } | ||
157 | } | ||
diff --git a/crates/ra_analysis/src/queries.rs b/crates/ra_analysis/src/queries.rs new file mode 100644 index 000000000..062a2f420 --- /dev/null +++ b/crates/ra_analysis/src/queries.rs | |||
@@ -0,0 +1,39 @@ | |||
1 | use std::sync::Arc; | ||
2 | use ra_syntax::File; | ||
3 | use ra_editor::LineIndex; | ||
4 | use { | ||
5 | FileId, | ||
6 | db::{Query, QueryCtx, QueryRegistry}, | ||
7 | symbol_index::SymbolIndex, | ||
8 | }; | ||
9 | |||
10 | pub(crate) use db::{file_text, file_set}; | ||
11 | |||
12 | pub(crate) fn file_syntax(ctx: QueryCtx, file_id: FileId) -> File { | ||
13 | (&*ctx.get(FILE_SYNTAX, file_id)).clone() | ||
14 | } | ||
15 | pub(crate) fn file_lines(ctx: QueryCtx, file_id: FileId) -> Arc<LineIndex> { | ||
16 | ctx.get(FILE_LINES, file_id) | ||
17 | } | ||
18 | pub(crate) fn file_symbols(ctx: QueryCtx, file_id: FileId) -> Arc<SymbolIndex> { | ||
19 | ctx.get(FILE_SYMBOLS, file_id) | ||
20 | } | ||
21 | |||
22 | const FILE_SYNTAX: Query<FileId, File> = Query(16, |ctx, file_id: &FileId| { | ||
23 | let text = file_text(ctx, *file_id); | ||
24 | File::parse(&*text) | ||
25 | }); | ||
26 | const FILE_LINES: Query<FileId, LineIndex> = Query(17, |ctx, file_id: &FileId| { | ||
27 | let text = file_text(ctx, *file_id); | ||
28 | LineIndex::new(&*text) | ||
29 | }); | ||
30 | const FILE_SYMBOLS: Query<FileId, SymbolIndex> = Query(18, |ctx, file_id: &FileId| { | ||
31 | let syntax = file_syntax(ctx, *file_id); | ||
32 | SymbolIndex::for_file(*file_id, syntax) | ||
33 | }); | ||
34 | |||
35 | pub(crate) fn register_queries(reg: &mut QueryRegistry) { | ||
36 | reg.add(FILE_SYNTAX, "FILE_SYNTAX"); | ||
37 | reg.add(FILE_LINES, "FILE_LINES"); | ||
38 | reg.add(FILE_SYMBOLS, "FILE_SYMBOLS"); | ||
39 | } | ||
diff --git a/crates/ra_analysis/src/roots.rs b/crates/ra_analysis/src/roots.rs new file mode 100644 index 000000000..1835a9b25 --- /dev/null +++ b/crates/ra_analysis/src/roots.rs | |||
@@ -0,0 +1,178 @@ | |||
1 | use std::{ | ||
2 | collections::HashMap, | ||
3 | sync::Arc, | ||
4 | panic, | ||
5 | }; | ||
6 | |||
7 | use once_cell::sync::OnceCell; | ||
8 | use rayon::prelude::*; | ||
9 | use ra_editor::LineIndex; | ||
10 | use ra_syntax::File; | ||
11 | |||
12 | use { | ||
13 | FileId, | ||
14 | imp::FileResolverImp, | ||
15 | symbol_index::SymbolIndex, | ||
16 | descriptors::{ModuleDescriptor, ModuleTreeDescriptor}, | ||
17 | db::Db, | ||
18 | }; | ||
19 | |||
20 | pub(crate) trait SourceRoot { | ||
21 | fn contains(&self, file_id: FileId) -> bool; | ||
22 | fn module_tree(&self) -> Arc<ModuleTreeDescriptor>; | ||
23 | fn lines(&self, file_id: FileId) -> Arc<LineIndex>; | ||
24 | fn syntax(&self, file_id: FileId) -> File; | ||
25 | fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>); | ||
26 | } | ||
27 | |||
28 | #[derive(Default, Debug)] | ||
29 | pub(crate) struct WritableSourceRoot { | ||
30 | db: Db, | ||
31 | } | ||
32 | |||
33 | impl WritableSourceRoot { | ||
34 | pub fn apply_changes( | ||
35 | &self, | ||
36 | changes: &mut dyn Iterator<Item=(FileId, Option<String>)>, | ||
37 | file_resolver: Option<FileResolverImp>, | ||
38 | ) -> WritableSourceRoot { | ||
39 | let resolver_changed = file_resolver.is_some(); | ||
40 | let mut changed_files = Vec::new(); | ||
41 | let mut new_state = self.db.state().clone(); | ||
42 | |||
43 | for (file_id, text) in changes { | ||
44 | changed_files.push(file_id); | ||
45 | match text { | ||
46 | Some(text) => { | ||
47 | new_state.file_map.insert(file_id, Arc::new(text)); | ||
48 | }, | ||
49 | None => { | ||
50 | new_state.file_map.remove(&file_id); | ||
51 | } | ||
52 | } | ||
53 | } | ||
54 | if let Some(file_resolver) = file_resolver { | ||
55 | new_state.file_resolver = file_resolver | ||
56 | } | ||
57 | WritableSourceRoot { | ||
58 | db: self.db.with_changes(new_state, &changed_files, resolver_changed) | ||
59 | } | ||
60 | } | ||
61 | } | ||
62 | |||
63 | impl SourceRoot for WritableSourceRoot { | ||
64 | fn module_tree(&self) -> Arc<ModuleTreeDescriptor> { | ||
65 | self.db.make_query(::module_map::module_tree) | ||
66 | } | ||
67 | |||
68 | fn contains(&self, file_id: FileId) -> bool { | ||
69 | self.db.state().file_map.contains_key(&file_id) | ||
70 | } | ||
71 | fn lines(&self, file_id: FileId) -> Arc<LineIndex> { | ||
72 | self.db.make_query(|ctx| ::queries::file_lines(ctx, file_id)) | ||
73 | } | ||
74 | fn syntax(&self, file_id: FileId) -> File { | ||
75 | self.db.make_query(|ctx| ::queries::file_syntax(ctx, file_id)) | ||
76 | } | ||
77 | fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) { | ||
78 | self.db.make_query(|ctx| { | ||
79 | let file_set = ::queries::file_set(ctx); | ||
80 | let syms = file_set.0.iter() | ||
81 | .map(|file_id| ::queries::file_symbols(ctx, *file_id)); | ||
82 | acc.extend(syms); | ||
83 | }); | ||
84 | } | ||
85 | } | ||
86 | |||
87 | #[derive(Debug)] | ||
88 | struct FileData { | ||
89 | text: String, | ||
90 | lines: OnceCell<Arc<LineIndex>>, | ||
91 | syntax: OnceCell<File>, | ||
92 | } | ||
93 | |||
94 | impl FileData { | ||
95 | fn new(text: String) -> FileData { | ||
96 | FileData { | ||
97 | text, | ||
98 | syntax: OnceCell::new(), | ||
99 | lines: OnceCell::new(), | ||
100 | } | ||
101 | } | ||
102 | fn lines(&self) -> &Arc<LineIndex> { | ||
103 | self.lines.get_or_init(|| Arc::new(LineIndex::new(&self.text))) | ||
104 | } | ||
105 | fn syntax(&self) -> &File { | ||
106 | let text = &self.text; | ||
107 | let syntax = &self.syntax; | ||
108 | match panic::catch_unwind(panic::AssertUnwindSafe(|| syntax.get_or_init(|| File::parse(text)))) { | ||
109 | Ok(file) => file, | ||
110 | Err(err) => { | ||
111 | error!("Parser paniced on:\n------\n{}\n------\n", text); | ||
112 | panic::resume_unwind(err) | ||
113 | } | ||
114 | } | ||
115 | } | ||
116 | } | ||
117 | |||
118 | #[derive(Debug)] | ||
119 | pub(crate) struct ReadonlySourceRoot { | ||
120 | symbol_index: Arc<SymbolIndex>, | ||
121 | file_map: HashMap<FileId, FileData>, | ||
122 | module_tree: Arc<ModuleTreeDescriptor>, | ||
123 | } | ||
124 | |||
125 | impl ReadonlySourceRoot { | ||
126 | pub(crate) fn new(files: Vec<(FileId, String)>, file_resolver: FileResolverImp) -> ReadonlySourceRoot { | ||
127 | let modules = files.par_iter() | ||
128 | .map(|(file_id, text)| { | ||
129 | let syntax = File::parse(text); | ||
130 | let mod_descr = ModuleDescriptor::new(syntax.ast()); | ||
131 | (*file_id, syntax, mod_descr) | ||
132 | }) | ||
133 | .collect::<Vec<_>>(); | ||
134 | let module_tree = ModuleTreeDescriptor::new( | ||
135 | modules.iter().map(|it| (it.0, &it.2)), | ||
136 | &file_resolver, | ||
137 | ); | ||
138 | |||
139 | let symbol_index = SymbolIndex::for_files( | ||
140 | modules.par_iter().map(|it| (it.0, it.1.clone())) | ||
141 | ); | ||
142 | let file_map: HashMap<FileId, FileData> = files | ||
143 | .into_iter() | ||
144 | .map(|(id, text)| (id, FileData::new(text))) | ||
145 | .collect(); | ||
146 | |||
147 | ReadonlySourceRoot { | ||
148 | symbol_index: Arc::new(symbol_index), | ||
149 | file_map, | ||
150 | module_tree: Arc::new(module_tree), | ||
151 | } | ||
152 | } | ||
153 | |||
154 | fn data(&self, file_id: FileId) -> &FileData { | ||
155 | match self.file_map.get(&file_id) { | ||
156 | Some(data) => data, | ||
157 | None => panic!("unknown file: {:?}", file_id), | ||
158 | } | ||
159 | } | ||
160 | } | ||
161 | |||
162 | impl SourceRoot for ReadonlySourceRoot { | ||
163 | fn module_tree(&self) -> Arc<ModuleTreeDescriptor> { | ||
164 | Arc::clone(&self.module_tree) | ||
165 | } | ||
166 | fn contains(&self, file_id: FileId) -> bool { | ||
167 | self.file_map.contains_key(&file_id) | ||
168 | } | ||
169 | fn lines(&self, file_id: FileId) -> Arc<LineIndex> { | ||
170 | Arc::clone(self.data(file_id).lines()) | ||
171 | } | ||
172 | fn syntax(&self, file_id: FileId) -> File { | ||
173 | self.data(file_id).syntax().clone() | ||
174 | } | ||
175 | fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>) { | ||
176 | acc.push(Arc::clone(&self.symbol_index)) | ||
177 | } | ||
178 | } | ||
diff --git a/crates/ra_analysis/src/symbol_index.rs b/crates/ra_analysis/src/symbol_index.rs new file mode 100644 index 000000000..ffbb6a29f --- /dev/null +++ b/crates/ra_analysis/src/symbol_index.rs | |||
@@ -0,0 +1,94 @@ | |||
1 | use std::{ | ||
2 | sync::Arc, | ||
3 | hash::{Hash, Hasher}, | ||
4 | }; | ||
5 | use ra_editor::{FileSymbol, file_symbols}; | ||
6 | use ra_syntax::{ | ||
7 | File, | ||
8 | SyntaxKind::{self, *}, | ||
9 | }; | ||
10 | use fst::{self, Streamer}; | ||
11 | use rayon::prelude::*; | ||
12 | use {Query, FileId, JobToken}; | ||
13 | |||
14 | #[derive(Debug)] | ||
15 | pub(crate) struct SymbolIndex { | ||
16 | symbols: Vec<(FileId, FileSymbol)>, | ||
17 | map: fst::Map, | ||
18 | } | ||
19 | |||
20 | impl Hash for SymbolIndex { | ||
21 | fn hash<H: Hasher>(&self, hasher: &mut H) { | ||
22 | self.symbols.hash(hasher) | ||
23 | } | ||
24 | } | ||
25 | |||
26 | impl SymbolIndex { | ||
27 | pub(crate) fn for_files(files: impl ParallelIterator<Item=(FileId, File)>) -> SymbolIndex { | ||
28 | let mut symbols = files | ||
29 | .flat_map(|(file_id, file)| { | ||
30 | file_symbols(&file) | ||
31 | .into_iter() | ||
32 | .map(move |symbol| { | ||
33 | (symbol.name.as_str().to_lowercase(), (file_id, symbol)) | ||
34 | }) | ||
35 | .collect::<Vec<_>>() | ||
36 | }) | ||
37 | .collect::<Vec<_>>(); | ||
38 | symbols.par_sort_by(|s1, s2| s1.0.cmp(&s2.0)); | ||
39 | symbols.dedup_by(|s1, s2| s1.0 == s2.0); | ||
40 | let (names, symbols): (Vec<String>, Vec<(FileId, FileSymbol)>) = | ||
41 | symbols.into_iter().unzip(); | ||
42 | let map = fst::Map::from_iter( | ||
43 | names.into_iter().zip(0u64..) | ||
44 | ).unwrap(); | ||
45 | SymbolIndex { symbols, map } | ||
46 | } | ||
47 | |||
48 | pub(crate) fn for_file(file_id: FileId, file: File) -> SymbolIndex { | ||
49 | SymbolIndex::for_files(::rayon::iter::once((file_id, file))) | ||
50 | } | ||
51 | } | ||
52 | |||
53 | impl Query { | ||
54 | pub(crate) fn search( | ||
55 | self, | ||
56 | indices: &[Arc<SymbolIndex>], | ||
57 | token: &JobToken, | ||
58 | ) -> Vec<(FileId, FileSymbol)> { | ||
59 | |||
60 | let mut op = fst::map::OpBuilder::new(); | ||
61 | for file_symbols in indices.iter() { | ||
62 | let automaton = fst::automaton::Subsequence::new(&self.lowercased); | ||
63 | op = op.add(file_symbols.map.search(automaton)) | ||
64 | } | ||
65 | let mut stream = op.union(); | ||
66 | let mut res = Vec::new(); | ||
67 | while let Some((_, indexed_values)) = stream.next() { | ||
68 | if res.len() >= self.limit || token.is_canceled() { | ||
69 | break; | ||
70 | } | ||
71 | for indexed_value in indexed_values { | ||
72 | let file_symbols = &indices[indexed_value.index]; | ||
73 | let idx = indexed_value.value as usize; | ||
74 | |||
75 | let (file_id, symbol) = &file_symbols.symbols[idx]; | ||
76 | if self.only_types && !is_type(symbol.kind) { | ||
77 | continue; | ||
78 | } | ||
79 | if self.exact && symbol.name != self.query { | ||
80 | continue; | ||
81 | } | ||
82 | res.push((*file_id, symbol.clone())); | ||
83 | } | ||
84 | } | ||
85 | res | ||
86 | } | ||
87 | } | ||
88 | |||
89 | fn is_type(kind: SyntaxKind) -> bool { | ||
90 | match kind { | ||
91 | STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_DEF => true, | ||
92 | _ => false, | ||
93 | } | ||
94 | } | ||