diff options
author | Aleksey Kladov <[email protected]> | 2018-10-15 19:56:01 +0100 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2018-10-15 19:56:01 +0100 |
commit | ee69fddf02b2c8d4b73f9412831f5fcc4fa931a1 (patch) | |
tree | c5694c8b25580219237764d5e8d852dacfcaea84 /crates/ra_analysis/src | |
parent | 7503c5528f20f7404bac6f70bcf2120edb54dc52 (diff) |
Drop dead code
Diffstat (limited to 'crates/ra_analysis/src')
-rw-r--r-- | crates/ra_analysis/src/db.rs (renamed from crates/ra_analysis/src/db/mod.rs) | 86 | ||||
-rw-r--r-- | crates/ra_analysis/src/db/imp.rs | 153 | ||||
-rw-r--r-- | crates/ra_analysis/src/module_map.rs | 123 | ||||
-rw-r--r-- | crates/ra_analysis/src/queries.rs | 39 |
4 files changed, 0 insertions, 401 deletions
diff --git a/crates/ra_analysis/src/db/mod.rs b/crates/ra_analysis/src/db.rs index 081510daa..0773edcc1 100644 --- a/crates/ra_analysis/src/db/mod.rs +++ b/crates/ra_analysis/src/db.rs | |||
@@ -101,89 +101,3 @@ fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<SymbolIndex> { | |||
101 | let syntax = db.file_syntax(file_id); | 101 | let syntax = db.file_syntax(file_id); |
102 | Arc::new(SymbolIndex::for_file(file_id, syntax)) | 102 | Arc::new(SymbolIndex::for_file(file_id, syntax)) |
103 | } | 103 | } |
104 | |||
105 | // mod imp; | ||
106 | |||
107 | // use std::{ | ||
108 | // sync::Arc, | ||
109 | // }; | ||
110 | // use im; | ||
111 | // use salsa; | ||
112 | // use {FileId, imp::FileResolverImp}; | ||
113 | |||
114 | // #[derive(Debug, Default, Clone)] | ||
115 | // pub(crate) struct State { | ||
116 | // pub(crate) file_map: im::HashMap<FileId, Arc<String>>, | ||
117 | // pub(crate) file_resolver: FileResolverImp | ||
118 | // } | ||
119 | |||
120 | // #[derive(Debug)] | ||
121 | // pub(crate) struct Db { | ||
122 | // imp: imp::Db, | ||
123 | // } | ||
124 | |||
125 | // #[derive(Clone, Copy)] | ||
126 | // pub(crate) struct QueryCtx<'a> { | ||
127 | // imp: &'a salsa::QueryCtx<State, imp::Data>, | ||
128 | // } | ||
129 | |||
130 | // pub(crate) struct Query<T, R>(pub(crate) u16, pub(crate) fn(QueryCtx, &T) -> R); | ||
131 | |||
132 | // pub(crate) struct QueryRegistry { | ||
133 | // imp: imp::QueryRegistry, | ||
134 | // } | ||
135 | |||
136 | // impl Default for Db { | ||
137 | // fn default() -> Db { | ||
138 | // Db::new() | ||
139 | // } | ||
140 | // } | ||
141 | |||
142 | // impl Db { | ||
143 | // pub(crate) fn new() -> Db { | ||
144 | // let reg = QueryRegistry::new(); | ||
145 | // Db { imp: imp::Db::new(reg.imp) } | ||
146 | // } | ||
147 | // pub(crate) fn state(&self) -> &State { | ||
148 | // self.imp.imp.ground_data() | ||
149 | // } | ||
150 | // pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db { | ||
151 | // Db { imp: self.imp.with_changes(new_state, changed_files, resolver_changed) } | ||
152 | // } | ||
153 | // pub(crate) fn make_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> R { | ||
154 | // let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() }; | ||
155 | // f(ctx) | ||
156 | // } | ||
157 | // #[allow(unused)] | ||
158 | // pub(crate) fn trace_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> (R, Vec<&'static str>) { | ||
159 | // let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() }; | ||
160 | // let res = f(ctx); | ||
161 | // let trace = self.imp.extract_trace(ctx.imp); | ||
162 | // (res, trace) | ||
163 | // } | ||
164 | // } | ||
165 | |||
166 | // impl<'a> QueryCtx<'a> { | ||
167 | // pub(crate) fn get<Q: imp::EvalQuery>(&self, q: Q, params: Q::Params) -> Arc<Q::Output> { | ||
168 | // q.get(self, params) | ||
169 | // } | ||
170 | // } | ||
171 | |||
172 | // pub(crate) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> { | ||
173 | // imp::file_text(ctx, file_id) | ||
174 | // } | ||
175 | |||
176 | // pub(crate) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> { | ||
177 | // imp::file_set(ctx) | ||
178 | // } | ||
179 | // impl QueryRegistry { | ||
180 | // fn new() -> QueryRegistry { | ||
181 | // let mut reg = QueryRegistry { imp: imp::QueryRegistry::new() }; | ||
182 | // ::queries::register_queries(&mut reg); | ||
183 | // ::module_map::register_queries(&mut reg); | ||
184 | // reg | ||
185 | // } | ||
186 | // pub(crate) fn add<Q: imp::EvalQuery>(&mut self, q: Q, name: &'static str) { | ||
187 | // self.imp.add(q, name) | ||
188 | // } | ||
189 | // } | ||
diff --git a/crates/ra_analysis/src/db/imp.rs b/crates/ra_analysis/src/db/imp.rs deleted file mode 100644 index 7669b6184..000000000 --- a/crates/ra_analysis/src/db/imp.rs +++ /dev/null | |||
@@ -1,153 +0,0 @@ | |||
1 | use std::{ | ||
2 | sync::Arc, | ||
3 | any::Any, | ||
4 | hash::{Hash, Hasher}, | ||
5 | collections::hash_map::{DefaultHasher}, | ||
6 | iter, | ||
7 | }; | ||
8 | use rustc_hash::FxHashMap; | ||
9 | use salsa; | ||
10 | use crate::{FileId, imp::FileResolverImp}; | ||
11 | use super::{State, Query, QueryCtx}; | ||
12 | |||
13 | pub(super) type Data = Arc<Any + Send + Sync + 'static>; | ||
14 | |||
15 | #[derive(Debug)] | ||
16 | pub(super) struct Db { | ||
17 | names: Arc<FxHashMap<salsa::QueryTypeId, &'static str>>, | ||
18 | pub(super) imp: salsa::Db<State, Data>, | ||
19 | } | ||
20 | |||
21 | impl Db { | ||
22 | pub(super) fn new(mut reg: QueryRegistry) -> Db { | ||
23 | let config = reg.config.take().unwrap(); | ||
24 | Db { | ||
25 | names: Arc::new(reg.names), | ||
26 | imp: salsa::Db::new(config, State::default()) | ||
27 | } | ||
28 | } | ||
29 | pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db { | ||
30 | let names = self.names.clone(); | ||
31 | let mut invalidations = salsa::Invalidations::new(); | ||
32 | invalidations.invalidate(FILE_TEXT, changed_files.iter().map(hash).map(salsa::InputFingerprint)); | ||
33 | if resolver_changed { | ||
34 | invalidations.invalidate(FILE_SET, iter::once(salsa::InputFingerprint(hash(&())))); | ||
35 | } else { | ||
36 | invalidations.invalidate(FILE_SET, iter::empty()); | ||
37 | } | ||
38 | let imp = self.imp.with_ground_data( | ||
39 | new_state, | ||
40 | invalidations, | ||
41 | ); | ||
42 | Db { names, imp } | ||
43 | } | ||
44 | pub(super) fn extract_trace(&self, ctx: &salsa::QueryCtx<State, Data>) -> Vec<&'static str> { | ||
45 | ctx.trace().into_iter().map(|it| self.names[&it]).collect() | ||
46 | } | ||
47 | } | ||
48 | |||
49 | pub(crate) trait EvalQuery { | ||
50 | type Params; | ||
51 | type Output; | ||
52 | fn query_type(&self) -> salsa::QueryTypeId; | ||
53 | fn f(&self) -> salsa::QueryFn<State, Data>; | ||
54 | fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output>; | ||
55 | } | ||
56 | |||
57 | impl<T, R> EvalQuery for Query<T, R> | ||
58 | where | ||
59 | T: Hash + Send + Sync + 'static, | ||
60 | R: Hash + Send + Sync + 'static, | ||
61 | { | ||
62 | type Params = T; | ||
63 | type Output = R; | ||
64 | fn query_type(&self) -> salsa::QueryTypeId { | ||
65 | salsa::QueryTypeId(self.0) | ||
66 | } | ||
67 | fn f(&self) -> salsa::QueryFn<State, Data> { | ||
68 | let f = self.1; | ||
69 | Box::new(move |ctx, data| { | ||
70 | let ctx = QueryCtx { imp: ctx }; | ||
71 | let data: &T = data.downcast_ref().unwrap(); | ||
72 | let res = f(ctx, data); | ||
73 | let h = hash(&res); | ||
74 | (Arc::new(res), salsa::OutputFingerprint(h)) | ||
75 | }) | ||
76 | } | ||
77 | fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output> { | ||
78 | let query_id = salsa::QueryId( | ||
79 | self.query_type(), | ||
80 | salsa::InputFingerprint(hash(¶ms)), | ||
81 | ); | ||
82 | let res = ctx.imp.get(query_id, Arc::new(params)); | ||
83 | res.downcast().unwrap() | ||
84 | } | ||
85 | } | ||
86 | |||
87 | pub(super) struct QueryRegistry { | ||
88 | config: Option<salsa::QueryConfig<State, Data>>, | ||
89 | names: FxHashMap<salsa::QueryTypeId, &'static str>, | ||
90 | } | ||
91 | |||
92 | impl QueryRegistry { | ||
93 | pub(super) fn new() -> QueryRegistry { | ||
94 | let mut config = salsa::QueryConfig::<State, Data>::new(); | ||
95 | config = config.with_ground_query( | ||
96 | FILE_TEXT, Box::new(|state, params| { | ||
97 | let file_id: &FileId = params.downcast_ref().unwrap(); | ||
98 | let res = state.file_map[file_id].clone(); | ||
99 | let fingerprint = salsa::OutputFingerprint(hash(&res)); | ||
100 | (res, fingerprint) | ||
101 | }) | ||
102 | ); | ||
103 | config = config.with_ground_query( | ||
104 | FILE_SET, Box::new(|state, _params| { | ||
105 | let file_ids: Vec<FileId> = state.file_map.keys().cloned().collect(); | ||
106 | let hash = hash(&file_ids); | ||
107 | let file_resolver = state.file_resolver.clone(); | ||
108 | let res = (file_ids, file_resolver); | ||
109 | let fingerprint = salsa::OutputFingerprint(hash); | ||
110 | (Arc::new(res), fingerprint) | ||
111 | }) | ||
112 | ); | ||
113 | let mut names = FxHashMap::default(); | ||
114 | names.insert(FILE_TEXT, "FILE_TEXT"); | ||
115 | names.insert(FILE_SET, "FILE_SET"); | ||
116 | QueryRegistry { config: Some(config), names } | ||
117 | } | ||
118 | pub(super) fn add<Q: EvalQuery>(&mut self, q: Q, name: &'static str) { | ||
119 | let id = q.query_type(); | ||
120 | let prev = self.names.insert(id, name); | ||
121 | assert!(prev.is_none(), "duplicate query: {:?}", id); | ||
122 | let config = self.config.take().unwrap(); | ||
123 | let config = config.with_query(id, q.f()); | ||
124 | self.config= Some(config); | ||
125 | } | ||
126 | } | ||
127 | |||
128 | fn hash<T: Hash>(x: &T) -> u64 { | ||
129 | let mut hasher = DefaultHasher::new(); | ||
130 | x.hash(&mut hasher); | ||
131 | hasher.finish() | ||
132 | } | ||
133 | |||
134 | const FILE_TEXT: salsa::QueryTypeId = salsa::QueryTypeId(0); | ||
135 | pub(super) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> { | ||
136 | let query_id = salsa::QueryId( | ||
137 | FILE_TEXT, | ||
138 | salsa::InputFingerprint(hash(&file_id)), | ||
139 | ); | ||
140 | let res = ctx.imp.get(query_id, Arc::new(file_id)); | ||
141 | res.downcast().unwrap() | ||
142 | } | ||
143 | |||
144 | const FILE_SET: salsa::QueryTypeId = salsa::QueryTypeId(1); | ||
145 | pub(super) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> { | ||
146 | let query_id = salsa::QueryId( | ||
147 | FILE_SET, | ||
148 | salsa::InputFingerprint(hash(&())), | ||
149 | ); | ||
150 | let res = ctx.imp.get(query_id, Arc::new(())); | ||
151 | res.downcast().unwrap() | ||
152 | } | ||
153 | |||
diff --git a/crates/ra_analysis/src/module_map.rs b/crates/ra_analysis/src/module_map.rs index 95a770ae7..c1799e3d4 100644 --- a/crates/ra_analysis/src/module_map.rs +++ b/crates/ra_analysis/src/module_map.rs | |||
@@ -32,126 +32,3 @@ fn module_tree(db: &impl ModulesDatabase, (): ()) -> Arc<ModuleTreeDescriptor> { | |||
32 | let res = ModuleTreeDescriptor::new(files.iter().map(|(file_id, descr)| (*file_id, &**descr)), &file_set.resolver); | 32 | let res = ModuleTreeDescriptor::new(files.iter().map(|(file_id, descr)| (*file_id, &**descr)), &file_set.resolver); |
33 | Arc::new(res) | 33 | Arc::new(res) |
34 | } | 34 | } |
35 | |||
36 | // #[cfg(test)] | ||
37 | // mod tests { | ||
38 | // use std::collections::HashMap; | ||
39 | // use im; | ||
40 | // use relative_path::{RelativePath, RelativePathBuf}; | ||
41 | // use { | ||
42 | // db::{Db}, | ||
43 | // imp::FileResolverImp, | ||
44 | // FileId, FileResolver, | ||
45 | // }; | ||
46 | // use super::*; | ||
47 | |||
48 | // #[derive(Debug)] | ||
49 | // struct FileMap(im::HashMap<FileId, RelativePathBuf>); | ||
50 | |||
51 | // impl FileResolver for FileMap { | ||
52 | // fn file_stem(&self, file_id: FileId) -> String { | ||
53 | // self.0[&file_id].file_stem().unwrap().to_string() | ||
54 | // } | ||
55 | // fn resolve(&self, file_id: FileId, rel: &RelativePath) -> Option<FileId> { | ||
56 | // let path = self.0[&file_id].join(rel).normalize(); | ||
57 | // self.0.iter() | ||
58 | // .filter_map(|&(id, ref p)| Some(id).filter(|_| p == &path)) | ||
59 | // .next() | ||
60 | // } | ||
61 | // } | ||
62 | |||
63 | // struct Fixture { | ||
64 | // next_file_id: u32, | ||
65 | // fm: im::HashMap<FileId, RelativePathBuf>, | ||
66 | // db: Db, | ||
67 | // } | ||
68 | |||
69 | // impl Fixture { | ||
70 | // fn new() -> Fixture { | ||
71 | // Fixture { | ||
72 | // next_file_id: 1, | ||
73 | // fm: im::HashMap::new(), | ||
74 | // db: Db::new(), | ||
75 | // } | ||
76 | // } | ||
77 | // fn add_file(&mut self, path: &str, text: &str) -> FileId { | ||
78 | // assert!(path.starts_with("/")); | ||
79 | // let file_id = FileId(self.next_file_id); | ||
80 | // self.next_file_id += 1; | ||
81 | // self.fm.insert(file_id, RelativePathBuf::from(&path[1..])); | ||
82 | // let mut new_state = self.db.state().clone(); | ||
83 | // new_state.file_map.insert(file_id, Arc::new(text.to_string())); | ||
84 | // new_state.file_resolver = FileResolverImp::new( | ||
85 | // Arc::new(FileMap(self.fm.clone())) | ||
86 | // ); | ||
87 | // self.db = self.db.with_changes(new_state, &[file_id], true); | ||
88 | // file_id | ||
89 | // } | ||
90 | // fn remove_file(&mut self, file_id: FileId) { | ||
91 | // self.fm.remove(&file_id); | ||
92 | // let mut new_state = self.db.state().clone(); | ||
93 | // new_state.file_map.remove(&file_id); | ||
94 | // new_state.file_resolver = FileResolverImp::new( | ||
95 | // Arc::new(FileMap(self.fm.clone())) | ||
96 | // ); | ||
97 | // self.db = self.db.with_changes(new_state, &[file_id], true); | ||
98 | // } | ||
99 | // fn change_file(&mut self, file_id: FileId, new_text: &str) { | ||
100 | // let mut new_state = self.db.state().clone(); | ||
101 | // new_state.file_map.insert(file_id, Arc::new(new_text.to_string())); | ||
102 | // self.db = self.db.with_changes(new_state, &[file_id], false); | ||
103 | // } | ||
104 | // fn check_parent_modules( | ||
105 | // &self, | ||
106 | // file_id: FileId, | ||
107 | // expected: &[FileId], | ||
108 | // queries: &[(&'static str, u64)] | ||
109 | // ) { | ||
110 | // let (tree, events) = self.db.trace_query(|ctx| module_tree(ctx)); | ||
111 | // let actual = tree.parent_modules(file_id) | ||
112 | // .into_iter() | ||
113 | // .map(|link| link.owner(&tree)) | ||
114 | // .collect::<Vec<_>>(); | ||
115 | // assert_eq!(actual.as_slice(), expected); | ||
116 | // let mut counts = HashMap::new(); | ||
117 | // events.into_iter() | ||
118 | // .for_each(|event| *counts.entry(event).or_insert(0) += 1); | ||
119 | // for &(query_id, expected_count) in queries.iter() { | ||
120 | // let actual_count = *counts.get(&query_id).unwrap_or(&0); | ||
121 | // assert_eq!( | ||
122 | // actual_count, | ||
123 | // expected_count, | ||
124 | // "counts for {} differ", | ||
125 | // query_id, | ||
126 | // ) | ||
127 | // } | ||
128 | |||
129 | // } | ||
130 | // } | ||
131 | |||
132 | // #[test] | ||
133 | // fn test_parent_module() { | ||
134 | // let mut f = Fixture::new(); | ||
135 | // let foo = f.add_file("/foo.rs", ""); | ||
136 | // f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]); | ||
137 | |||
138 | // let lib = f.add_file("/lib.rs", "mod foo;"); | ||
139 | // f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]); | ||
140 | // f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 0)]); | ||
141 | |||
142 | // f.change_file(lib, ""); | ||
143 | // f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]); | ||
144 | |||
145 | // f.change_file(lib, "mod foo;"); | ||
146 | // f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]); | ||
147 | |||
148 | // f.change_file(lib, "mod bar;"); | ||
149 | // f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]); | ||
150 | |||
151 | // f.change_file(lib, "mod foo;"); | ||
152 | // f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]); | ||
153 | |||
154 | // f.remove_file(lib); | ||
155 | // f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 0)]); | ||
156 | // } | ||
157 | // } | ||
diff --git a/crates/ra_analysis/src/queries.rs b/crates/ra_analysis/src/queries.rs deleted file mode 100644 index 613bf1e61..000000000 --- a/crates/ra_analysis/src/queries.rs +++ /dev/null | |||
@@ -1,39 +0,0 @@ | |||
1 | use std::sync::Arc; | ||
2 | use ra_syntax::File; | ||
3 | use ra_editor::LineIndex; | ||
4 | use crate::{ | ||
5 | FileId, | ||
6 | db::{Query, QueryCtx, QueryRegistry}, | ||
7 | symbol_index::SymbolIndex, | ||
8 | }; | ||
9 | |||
10 | pub(crate) use crate::db::{file_text, file_set}; | ||
11 | |||
12 | pub(crate) fn file_syntax(ctx: QueryCtx, file_id: FileId) -> File { | ||
13 | (&*ctx.get(FILE_SYNTAX, file_id)).clone() | ||
14 | } | ||
15 | pub(crate) fn file_lines(ctx: QueryCtx, file_id: FileId) -> Arc<LineIndex> { | ||
16 | ctx.get(FILE_LINES, file_id) | ||
17 | } | ||
18 | pub(crate) fn file_symbols(ctx: QueryCtx, file_id: FileId) -> Arc<SymbolIndex> { | ||
19 | ctx.get(FILE_SYMBOLS, file_id) | ||
20 | } | ||
21 | |||
22 | const FILE_SYNTAX: Query<FileId, File> = Query(16, |ctx, file_id: &FileId| { | ||
23 | let text = file_text(ctx, *file_id); | ||
24 | File::parse(&*text) | ||
25 | }); | ||
26 | const FILE_LINES: Query<FileId, LineIndex> = Query(17, |ctx, file_id: &FileId| { | ||
27 | let text = file_text(ctx, *file_id); | ||
28 | LineIndex::new(&*text) | ||
29 | }); | ||
30 | const FILE_SYMBOLS: Query<FileId, SymbolIndex> = Query(18, |ctx, file_id: &FileId| { | ||
31 | let syntax = file_syntax(ctx, *file_id); | ||
32 | SymbolIndex::for_file(*file_id, syntax) | ||
33 | }); | ||
34 | |||
35 | pub(crate) fn register_queries(reg: &mut QueryRegistry) { | ||
36 | reg.add(FILE_SYNTAX, "FILE_SYNTAX"); | ||
37 | reg.add(FILE_LINES, "FILE_LINES"); | ||
38 | reg.add(FILE_SYMBOLS, "FILE_SYMBOLS"); | ||
39 | } | ||