From d59413c895e7b49ed2ad01be35871e417a57a43c Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sat, 15 Sep 2018 17:21:47 +0300 Subject: yet another db api --- crates/libanalysis/src/db/imp.rs | 155 ++++++++++++++++++++ crates/libanalysis/src/db/mod.rs | 214 ++++++++-------------------- crates/libanalysis/src/db/queries.rs | 43 ------ crates/libanalysis/src/module_map_db/mod.rs | 108 +++++++------- 4 files changed, 265 insertions(+), 255 deletions(-) create mode 100644 crates/libanalysis/src/db/imp.rs delete mode 100644 crates/libanalysis/src/db/queries.rs (limited to 'crates/libanalysis') diff --git a/crates/libanalysis/src/db/imp.rs b/crates/libanalysis/src/db/imp.rs new file mode 100644 index 000000000..1b4ee5cf3 --- /dev/null +++ b/crates/libanalysis/src/db/imp.rs @@ -0,0 +1,155 @@ +use std::{ + sync::Arc, + any::Any, + hash::{Hash, Hasher}, + collections::hash_map::{DefaultHasher, HashMap}, + iter, +}; +use salsa; +use {FileId, imp::FileResolverImp}; +use super::{State, Query, QueryCtx}; + +pub(super) type Data = Arc; + +#[derive(Debug)] +pub(super) struct Db { + names: Arc>, + pub(super) imp: salsa::Db, +} + +impl Db { + pub(super) fn new(mut reg: QueryRegistry) -> Db { + let config = reg.config.take().unwrap(); + Db { + names: Arc::new(reg.names), + imp: salsa::Db::new(config, State::default()) + } + } + pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db { + let names = self.names.clone(); + let mut invalidations = salsa::Invalidations::new(); + invalidations.invalidate(FILE_TEXT, changed_files.iter().map(hash).map(salsa::InputFingerprint)); + if resolver_changed { + invalidations.invalidate(FILE_SET, iter::once(salsa::InputFingerprint(hash(&())))); + } else { + invalidations.invalidate(FILE_SET, iter::empty()); + } + let imp = self.imp.with_ground_data( + new_state, + invalidations, + ); + Db { names, imp } + } + pub(super) fn extract_trace(&self, ctx: &salsa::QueryCtx) -> Vec<&'static str> { + ctx.trace().into_iter().map(|it| self.names[&it]).collect() + } +} + +pub(crate) trait EvalQuery { + type Params; + type Output; + fn query_type(&self) -> salsa::QueryTypeId; + fn f(&self) -> salsa::QueryFn; + fn get(&self, &QueryCtx, Self::Params) -> Arc; +} + +impl EvalQuery for Query +where + T: Hash + Send + Sync + 'static, + R: Hash + Send + Sync + 'static, +{ + type Params = T; + type Output = R; + fn query_type(&self) -> salsa::QueryTypeId { + salsa::QueryTypeId(self.0) + } + fn f(&self) -> salsa::QueryFn { + let f = self.1; + Box::new(move |ctx, data| { + let ctx = QueryCtx { imp: ctx }; + let data: &T = data.downcast_ref().unwrap(); + let res = f(ctx, data); + let h = hash(&res); + (Arc::new(res), salsa::OutputFingerprint(h)) + }) + } + fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc { + let query_id = salsa::QueryId( + self.query_type(), + salsa::InputFingerprint(hash(¶ms)), + ); + let res = ctx.imp.get(query_id, Arc::new(params)); + res.downcast().unwrap() + } +} + +pub(super) struct QueryRegistry { + config: Option>, + names: HashMap, +} + +impl QueryRegistry { + pub(super) fn new() -> QueryRegistry { + let mut config = salsa::QueryConfig::::new(); + config = config.with_ground_query( + FILE_TEXT, Box::new(|state, params| { + let file_id: &FileId = params.downcast_ref().unwrap(); + let res = state.file_map[file_id].clone(); + let fingerprint = salsa::OutputFingerprint(hash(&res)); + (res, fingerprint) + }) + ); + config = config.with_ground_query( + FILE_SET, Box::new(|state, _params| { + let file_ids: Vec = state.file_map.keys().cloned().collect(); + let hash = hash(&file_ids); + let file_resolver = state.file_resolver.clone(); + let res = (file_ids, file_resolver); + let fingerprint = salsa::OutputFingerprint(hash); + (Arc::new(res), fingerprint) + }) + ); + let mut names = HashMap::new(); + names.insert(FILE_TEXT, "FILE_TEXT"); + names.insert(FILE_SET, "FILE_SET"); + QueryRegistry { config: Some(config), names } + } + pub(super) fn add(&mut self, q: Q, name: &'static str) { + let id = q.query_type(); + let prev = self.names.insert(id, name); + assert!(prev.is_none(), "duplicate query: {:?}", id); + let config = self.config.take().unwrap(); + let config = config.with_query(id, q.f()); + self.config= Some(config); + } + pub(super) fn finish(mut self) -> salsa::QueryConfig { + self.config.take().unwrap() + } +} + +fn hash(x: &T) -> u64 { + let mut hasher = DefaultHasher::new(); + x.hash(&mut hasher); + hasher.finish() +} + +const FILE_TEXT: salsa::QueryTypeId = salsa::QueryTypeId(0); +pub(super) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc { + let query_id = salsa::QueryId( + FILE_TEXT, + salsa::InputFingerprint(hash(&file_id)), + ); + let res = ctx.imp.get(query_id, Arc::new(file_id)); + res.downcast().unwrap() +} + +const FILE_SET: salsa::QueryTypeId = salsa::QueryTypeId(1); +pub(super) fn file_set(ctx: QueryCtx) -> Arc<(Vec, FileResolverImp)> { + let query_id = salsa::QueryId( + FILE_SET, + salsa::InputFingerprint(hash(&())), + ); + let res = ctx.imp.get(query_id, Arc::new(())); + res.downcast().unwrap() +} + diff --git a/crates/libanalysis/src/db/mod.rs b/crates/libanalysis/src/db/mod.rs index 38ba40273..a775b5f75 100644 --- a/crates/libanalysis/src/db/mod.rs +++ b/crates/libanalysis/src/db/mod.rs @@ -1,195 +1,99 @@ -mod queries; +mod imp; use std::{ - hash::{Hash}, sync::Arc, - fmt::Debug, - any::Any, - iter, }; use im; use salsa; -use { - FileId, - imp::{FileResolverImp}, -}; - +use {FileId, imp::FileResolverImp}; -#[derive(Clone, Default)] +#[derive(Debug, Default, Clone)] pub(crate) struct State { - pub(crate) resolver: FileResolverImp, - pub(crate) file_map: im::HashMap>, -} - -type Data = Arc; - -pub(crate) struct QueryCtx<'a> { - inner: &'a salsa::QueryCtx + pub(crate) file_map: im::HashMap>, + pub(crate) file_resolver: FileResolverImp } +#[derive(Debug)] pub(crate) struct Db { - inner: salsa::Db + imp: imp::Db, } -struct GroundQuery { - id: u16, - f: fn(&State, &T) -> R, - h: fn(&R) -> u64, +#[derive(Clone, Copy)] +pub(crate) struct QueryCtx<'a> { + imp: &'a salsa::QueryCtx, } -pub(crate) struct Query { - pub(crate) id: u16, - pub(crate) f: fn(QueryCtx, &T) -> R, +pub(crate) struct Query(pub(crate) u16, pub(crate) fn(QueryCtx, &T) -> R); + +pub(crate) struct QueryRegistry { + imp: imp::QueryRegistry, } impl Db { pub(crate) fn new() -> Db { - let state = Default::default(); - Db { inner: salsa::Db::new(query_config(), state) } + let reg = QueryRegistry::new(); + Db { imp: imp::Db::new(reg.imp) } } pub(crate) fn state(&self) -> &State { - self.inner.ground_data() + self.imp.imp.ground_data() } - pub(crate) fn with_state( - &self, - new_state: State, - updated_files: &[FileId], - file_set_changed: bool, - ) -> Db { - let mut inv = salsa::Invalidations::new(); - if file_set_changed { - inv.invalidate( - salsa::QueryTypeId(queries::FILE_SET.id), - iter::once(salsa::InputFingerprint(hash(&()))), - ); - } else { - inv.invalidate( - salsa::QueryTypeId(queries::FILE_SET.id), - iter::empty(), - ); - } - inv.invalidate( - salsa::QueryTypeId(queries::FILE_TEXT.id), - updated_files.iter().map(hash).map(salsa::InputFingerprint), - ); - Db { inner: self.inner.with_ground_data(new_state, inv) } + pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db { + Db { imp: self.imp.with_changes(new_state, changed_files, resolver_changed) } } - pub(crate) fn get(&self, q: Query, params: T) -> (Arc, Vec) - where - T: Hash + Send + Sync + 'static, - R: Send + Sync + 'static, - { - let query_id = salsa::QueryId( - salsa::QueryTypeId(q.id), - salsa::InputFingerprint(hash(¶ms)), - ); - let params = Arc::new(params); - let (res, events) = self.inner.get(query_id, params); - let res = res.downcast().unwrap(); - let events = events.into_iter().map(|it| it.0).collect(); - (res, events) + pub(crate) fn make_query R, R>(&self, f: F) -> R { + let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() }; + f(ctx) + } + pub(crate) fn trace_query R, R>(&self, f: F) -> (R, Vec<&'static str>) { + let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() }; + let res = f(ctx); + let trace = self.imp.extract_trace(ctx.imp); + (res, trace) } - } impl<'a> QueryCtx<'a> { - fn get_g(&self, q: GroundQuery, params: T) -> Arc - where - T: Hash + Send + Sync + 'static, - R: Send + Sync + 'static, - { - let query_id = salsa::QueryId( - salsa::QueryTypeId(q.id), - salsa::InputFingerprint(hash(¶ms)), - ); - let res = self.inner.get(query_id, Arc::new(params)); - res.downcast().unwrap() - } - pub(crate) fn get(&self, q: Query, params: T) -> Arc - where - T: Hash + Send + Sync + 'static, - R: Send + Sync + 'static, - { - let query_id = salsa::QueryId( - salsa::QueryTypeId(q.id), - salsa::InputFingerprint(hash(¶ms)), - ); - let res = self.inner.get(query_id, Arc::new(params)); - res.downcast().unwrap() + pub(crate) fn get(&self, q: Q, params: Q::Params) -> Arc { + q.get(self, params) } } -fn query_config() -> salsa::QueryConfig { - let mut res = salsa::QueryConfig::new(); - let queries: Vec = vec![ - queries::FILE_TEXT.into(), - queries::FILE_SET.into(), - ]; - for q in queries { - res = res.with_ground_query(q.query_type, q.f) - } - let mut queries: Vec = vec![ - queries::FILE_SYNTAX.into(), - ]; - ::module_map_db::queries(&mut queries); - for q in queries { - res = res.with_query(q.query_type, q.f); - } - res +pub(crate) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc { + imp::file_text(ctx, file_id) } -struct BoxedGroundQuery { - query_type: salsa::QueryTypeId, - f: Box (Data, salsa::OutputFingerprint) + Send + Sync + 'static>, +pub(crate) fn file_set(ctx: QueryCtx) -> Arc<(Vec, FileResolverImp)> { + imp::file_set(ctx) } +pub(crate) use self::queries::file_syntax; -impl From> for BoxedGroundQuery -where - T: Send + Sync + 'static, - R: Send + Sync + 'static, -{ - fn from(q: GroundQuery) -> BoxedGroundQuery - { - BoxedGroundQuery { - query_type: salsa::QueryTypeId(q.id), - f: Box::new(move |state, data| { - let data: &T = data.downcast_ref().unwrap(); - let res = (q.f)(state, data); - let h = (q.h)(&res); - (Arc::new(res), salsa::OutputFingerprint(h)) - }) - } - } -} +mod queries { + use libsyntax2::File; + use {FileId}; + use super::{Query, QueryCtx, QueryRegistry, file_text}; -pub(crate) struct BoxedQuery { - query_type: salsa::QueryTypeId, - f: Box, &Data) -> (Data, salsa::OutputFingerprint) + Send + Sync + 'static>, -} + pub(crate) fn register_queries(reg: &mut QueryRegistry) { + reg.add(FILE_SYNTAX, "FILE_SYNTAX") + } -impl From> for BoxedQuery -where - T: Hash + Send + Sync + 'static, - R: Hash + Send + Sync + 'static, -{ - fn from(q: Query) -> BoxedQuery - { - BoxedQuery { - query_type: salsa::QueryTypeId(q.id), - f: Box::new(move |ctx, data| { - let ctx = QueryCtx { inner: ctx }; - let data: &T = data.downcast_ref().unwrap(); - let res = (q.f)(ctx, data); - let h = hash(&res); - (Arc::new(res), salsa::OutputFingerprint(h)) - }) - } + pub(crate) fn file_syntax(ctx: QueryCtx, file_id: FileId) -> File { + (&*ctx.get(FILE_SYNTAX, file_id)).clone() } + + pub(super) const FILE_SYNTAX: Query = Query(16, |ctx, file_id: &FileId| { + let text = file_text(ctx, *file_id); + File::parse(&*text) + }); } -fn hash(x: &T) -> u64 { - use std::hash::Hasher; - let mut hasher = ::std::collections::hash_map::DefaultHasher::new(); - ::std::hash::Hash::hash(x, &mut hasher); - hasher.finish() +impl QueryRegistry { + fn new() -> QueryRegistry { + let mut reg = QueryRegistry { imp: imp::QueryRegistry::new() }; + queries::register_queries(&mut reg); + ::module_map_db::register_queries(&mut reg); + reg + } + pub(crate) fn add(&mut self, q: Q, name: &'static str) { + self.imp.add(q, name) + } } diff --git a/crates/libanalysis/src/db/queries.rs b/crates/libanalysis/src/db/queries.rs deleted file mode 100644 index 2d4aac6e9..000000000 --- a/crates/libanalysis/src/db/queries.rs +++ /dev/null @@ -1,43 +0,0 @@ -use std::sync::Arc; -use libsyntax2::{File}; -use { - FileId, FileResolverImp, - db::{Query, GroundQuery, QueryCtx, hash}, -}; - - -impl<'a> QueryCtx<'a> { - pub(crate) fn file_set(&self) -> Arc<(Vec, FileResolverImp)> { - self.get_g(FILE_SET, ()) - } - pub(crate) fn file_text(&self, file_id: FileId) -> Arc { - Arc::clone(&*self.get_g(FILE_TEXT, file_id)) - } - pub(crate) fn file_syntax(&self, file_id: FileId) -> File { - (&*self.get(FILE_SYNTAX, file_id)).clone() - } -} - -pub(super) const FILE_TEXT: GroundQuery> = GroundQuery { - id: 10, - f: |state, id| state.file_map[&id].clone(), - h: hash, -}; - -pub(super) const FILE_SET: GroundQuery<(), (Vec, FileResolverImp)> = GroundQuery { - id: 11, - f: |state, &()| { - let files = state.file_map.keys().cloned().collect(); - let resolver = state.resolver.clone(); - (files, resolver) - }, - h: |(files, _)| hash(files), -}; - -pub(super) const FILE_SYNTAX: Query = Query { - id: 20, - f: |ctx, file_id: &FileId| { - let text = ctx.file_text(*file_id); - File::parse(&*text) - } -}; diff --git a/crates/libanalysis/src/module_map_db/mod.rs b/crates/libanalysis/src/module_map_db/mod.rs index 02ac06c5f..5560e4a34 100644 --- a/crates/libanalysis/src/module_map_db/mod.rs +++ b/crates/libanalysis/src/module_map_db/mod.rs @@ -4,15 +4,16 @@ use std::sync::Arc; use { FileId, db::{ - BoxedQuery, Query, QueryCtx + Query, QueryRegistry, QueryCtx, + file_syntax, file_set }, module_map::resolve_submodule, }; -pub(crate) fn queries(acc: &mut Vec) { - acc.push(MODULE_DESCR.into()); - acc.push(RESOLVE_SUBMODULE.into()); - acc.push(PARENT_MODULE.into()); +pub(crate) fn register_queries(reg: &mut QueryRegistry) { + reg.add(MODULE_DESCR, "MODULE_DESCR"); + reg.add(RESOLVE_SUBMODULE, "RESOLVE_SUBMODULE"); + reg.add(PARENT_MODULE, "PARENT_MODULE"); } impl<'a> QueryCtx<'a> { @@ -24,41 +25,32 @@ impl<'a> QueryCtx<'a> { } } -pub(crate) const MODULE_DESCR: Query = Query { - id: 30, - f: |ctx, &file_id| { - let file = ctx.file_syntax(file_id); - descr::ModuleDescr::new(file.ast()) - } -}; - -pub(crate) const RESOLVE_SUBMODULE: Query<(FileId, descr::Submodule), Vec> = Query { - id: 31, - f: |ctx, params| { - let files = ctx.file_set(); - resolve_submodule(params.0, ¶ms.1.name, &files.1).0 - } -}; - -pub(crate) const PARENT_MODULE: Query> = Query { - id: 40, - f: |ctx, file_id| { - let files = ctx.file_set(); - let res = files.0.iter() - .map(|&parent_id| (parent_id, ctx.module_descr(parent_id))) - .filter(|(parent_id, descr)| { - descr.submodules.iter() - .any(|subm| { - ctx.resolve_submodule(*parent_id, subm.clone()) - .iter() - .any(|it| it == file_id) - }) - }) - .map(|(id, _)| id) - .collect(); - res - } -}; +const MODULE_DESCR: Query = Query(30, |ctx, &file_id| { + let file = file_syntax(ctx, file_id); + descr::ModuleDescr::new(file.ast()) +}); + +const RESOLVE_SUBMODULE: Query<(FileId, descr::Submodule), Vec> = Query(31, |ctx, params| { + let files = file_set(ctx); + resolve_submodule(params.0, ¶ms.1.name, &files.1).0 +}); + +const PARENT_MODULE: Query> = Query(40, |ctx, file_id| { + let files = file_set(ctx); + let res = files.0.iter() + .map(|&parent_id| (parent_id, ctx.module_descr(parent_id))) + .filter(|(parent_id, descr)| { + descr.submodules.iter() + .any(|subm| { + ctx.resolve_submodule(*parent_id, subm.clone()) + .iter() + .any(|it| it == file_id) + }) + }) + .map(|(id, _)| id) + .collect(); + res +}); #[cfg(test)] mod tests { @@ -107,34 +99,36 @@ mod tests { self.next_file_id += 1; self.fm.insert(file_id, RelativePathBuf::from(&path[1..])); let mut new_state = self.db.state().clone(); - new_state.file_map.insert(file_id, text.to_string().into_boxed_str().into()); - new_state.resolver = FileResolverImp::new( + new_state.file_map.insert(file_id, Arc::new(text.to_string())); + new_state.file_resolver = FileResolverImp::new( Arc::new(FileMap(self.fm.clone())) ); - self.db = self.db.with_state(new_state, &[file_id], true); + self.db = self.db.with_changes(new_state, &[file_id], true); file_id } fn remove_file(&mut self, file_id: FileId) { self.fm.remove(&file_id); let mut new_state = self.db.state().clone(); new_state.file_map.remove(&file_id); - new_state.resolver = FileResolverImp::new( + new_state.file_resolver = FileResolverImp::new( Arc::new(FileMap(self.fm.clone())) ); - self.db = self.db.with_state(new_state, &[file_id], true); + self.db = self.db.with_changes(new_state, &[file_id], true); } fn change_file(&mut self, file_id: FileId, new_text: &str) { let mut new_state = self.db.state().clone(); - new_state.file_map.insert(file_id, new_text.to_string().into_boxed_str().into()); - self.db = self.db.with_state(new_state, &[file_id], false); + new_state.file_map.insert(file_id, Arc::new(new_text.to_string())); + self.db = self.db.with_changes(new_state, &[file_id], false); } fn check_parent_modules( &self, file_id: FileId, expected: &[FileId], - queries: &[(u16, u64)] + queries: &[(&'static str, u64)] ) { - let (actual, events) = self.db.get(PARENT_MODULE, file_id); + let (actual, events) = self.db.trace_query(|ctx| { + ctx.get(PARENT_MODULE, file_id) + }); assert_eq!(actual.as_slice(), expected); let mut counts = HashMap::new(); events.into_iter() @@ -156,25 +150,25 @@ mod tests { fn test_parent_module() { let mut f = Fixture::new(); let foo = f.add_file("/foo.rs", ""); - f.check_parent_modules(foo, &[], &[(MODULE_DESCR.id, 1)]); + f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]); let lib = f.add_file("/lib.rs", "mod foo;"); - f.check_parent_modules(foo, &[lib], &[(MODULE_DESCR.id, 1)]); - f.check_parent_modules(foo, &[lib], &[(MODULE_DESCR.id, 0)]); + f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]); + f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 0)]); f.change_file(lib, ""); - f.check_parent_modules(foo, &[], &[(MODULE_DESCR.id, 1)]); + f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]); f.change_file(lib, "mod foo;"); - f.check_parent_modules(foo, &[lib], &[(MODULE_DESCR.id, 1)]); + f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]); f.change_file(lib, "mod bar;"); - f.check_parent_modules(foo, &[], &[(MODULE_DESCR.id, 1)]); + f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]); f.change_file(lib, "mod foo;"); - f.check_parent_modules(foo, &[lib], &[(MODULE_DESCR.id, 1)]); + f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]); f.remove_file(lib); - f.check_parent_modules(foo, &[], &[(MODULE_DESCR.id, 0)]); + f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 0)]); } } -- cgit v1.2.3