From d783371b862c15d6d402d68e8edce4e37d63796c Mon Sep 17 00:00:00 2001
From: Aleksey Kladov <aleksey.kladov@gmail.com>
Date: Mon, 8 Oct 2018 13:18:47 +0300
Subject: migrate modue map to salsa

---
 crates/ra_analysis/src/db/mod.rs      |   7 +-
 crates/ra_analysis/src/descriptors.rs |  10 +-
 crates/ra_analysis/src/lib.rs         |   2 +-
 crates/ra_analysis/src/module_map.rs  | 286 +++++++++++++++++-----------------
 crates/ra_analysis/src/roots.rs       |  32 ++--
 5 files changed, 172 insertions(+), 165 deletions(-)

(limited to 'crates/ra_analysis/src')

diff --git a/crates/ra_analysis/src/db/mod.rs b/crates/ra_analysis/src/db/mod.rs
index 4b3e0fc90..c54d50252 100644
--- a/crates/ra_analysis/src/db/mod.rs
+++ b/crates/ra_analysis/src/db/mod.rs
@@ -9,7 +9,8 @@ use ra_syntax::File;
 use ra_editor::{LineIndex};
 use crate::{
     symbol_index::SymbolIndex,
-    FileId, FileResolverImp
+    module_map::{ModulesDatabase, ModuleTreeQuery, ModuleDescriptorQuery},
+    FileId, FileResolverImp,
 };
 
 #[derive(Default)]
@@ -40,6 +41,10 @@ salsa::database_storage! {
             fn file_lines() for FileLinesQuery;
             fn file_symbols() for FileSymbolsQuery;
         }
+        impl ModulesDatabase {
+            fn module_tree() for ModuleTreeQuery;
+            fn module_descriptor() for ModuleDescriptorQuery;
+        }
     }
 }
 
diff --git a/crates/ra_analysis/src/descriptors.rs b/crates/ra_analysis/src/descriptors.rs
index f26dac875..8d9f38ca5 100644
--- a/crates/ra_analysis/src/descriptors.rs
+++ b/crates/ra_analysis/src/descriptors.rs
@@ -12,7 +12,7 @@ use crate::{
     imp::FileResolverImp,
 };
 
-#[derive(Debug, Hash)]
+#[derive(Debug, PartialEq, Eq, Hash)]
 pub struct ModuleDescriptor {
     pub submodules: Vec<Submodule>
 }
@@ -43,7 +43,7 @@ pub struct Submodule {
     pub name: SmolStr,
 }
 
-#[derive(Hash, Debug)]
+#[derive(Debug, PartialEq, Eq, Hash)]
 pub(crate) struct ModuleTreeDescriptor {
     nodes: Vec<NodeData>,
     links: Vec<LinkData>,
@@ -52,7 +52,7 @@ pub(crate) struct ModuleTreeDescriptor {
 
 #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
 struct Node(usize);
-#[derive(Hash, Debug)]
+#[derive(Hash, Debug, PartialEq, Eq)]
 struct NodeData {
     file_id: FileId,
     links: Vec<Link>,
@@ -61,7 +61,7 @@ struct NodeData {
 
 #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
 pub(crate) struct Link(usize);
-#[derive(Hash, Debug)]
+#[derive(Hash, Debug, PartialEq, Eq)]
 struct LinkData {
     owner: Node,
     name: SmolStr,
@@ -70,7 +70,7 @@ struct LinkData {
 }
 
 
-#[derive(Clone, Debug, Hash)]
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
 pub enum Problem {
     UnresolvedModule {
         candidate: RelativePathBuf,
diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs
index 4c1ccdeaf..d49132513 100644
--- a/crates/ra_analysis/src/lib.rs
+++ b/crates/ra_analysis/src/lib.rs
@@ -14,7 +14,7 @@ extern crate salsa;
 extern crate rustc_hash;
 
 mod symbol_index;
-// mod module_map;
+mod module_map;
 mod imp;
 mod job;
 mod roots;
diff --git a/crates/ra_analysis/src/module_map.rs b/crates/ra_analysis/src/module_map.rs
index c77c5cec6..95a770ae7 100644
--- a/crates/ra_analysis/src/module_map.rs
+++ b/crates/ra_analysis/src/module_map.rs
@@ -1,157 +1,157 @@
 use std::sync::Arc;
 use crate::{
     FileId,
-    db::{
-        Query, QueryRegistry, QueryCtx,
-        file_set
-    },
-    queries::file_syntax,
+    db::{SyntaxDatabase},
     descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
 };
 
-pub(crate) fn register_queries(reg: &mut QueryRegistry) {
-    reg.add(MODULE_DESCR, "MODULE_DESCR");
-    reg.add(MODULE_TREE, "MODULE_TREE");
-}
-
-pub(crate) fn module_tree(ctx: QueryCtx) -> Arc<ModuleTreeDescriptor> {
-    ctx.get(MODULE_TREE, ())
-}
-
-const MODULE_DESCR: Query<FileId, ModuleDescriptor> = Query(30, |ctx, &file_id| {
-    let file = file_syntax(ctx, file_id);
-    ModuleDescriptor::new(file.ast())
-});
-
-const MODULE_TREE: Query<(), ModuleTreeDescriptor> = Query(31, |ctx, _| {
-    let file_set = file_set(ctx);
-    let mut files = Vec::new();
-    for &file_id in file_set.0.iter() {
-        let module_descr = ctx.get(MODULE_DESCR, file_id);
-        files.push((file_id, module_descr));
-    }
-    ModuleTreeDescriptor::new(files.iter().map(|(file_id, descr)| (*file_id, &**descr)), &file_set.1)
-});
-
-#[cfg(test)]
-mod tests {
-    use std::collections::HashMap;
-    use im;
-    use relative_path::{RelativePath, RelativePathBuf};
-    use crate::{
-        db::{Db},
-        imp::FileResolverImp,
-        FileId, FileResolver,
-    };
-    use super::*;
-
-    #[derive(Debug)]
-    struct FileMap(im::HashMap<FileId, RelativePathBuf>);
-
-    impl FileResolver for FileMap {
-        fn file_stem(&self, file_id: FileId) -> String {
-            self.0[&file_id].file_stem().unwrap().to_string()
+salsa::query_group! {
+    pub(crate) trait ModulesDatabase: SyntaxDatabase {
+        fn module_tree(key: ()) -> Arc<ModuleTreeDescriptor> {
+            type ModuleTreeQuery;
         }
-        fn resolve(&self, file_id: FileId, rel: &RelativePath) -> Option<FileId> {
-            let path = self.0[&file_id].join(rel).normalize();
-            self.0.iter()
-                .filter_map(|&(id, ref p)| Some(id).filter(|_| p == &path))
-                .next()
+        fn module_descriptor(file_id: FileId) -> Arc<ModuleDescriptor> {
+            type ModuleDescriptorQuery;
         }
     }
+}
 
-    struct Fixture {
-        next_file_id: u32,
-        fm: im::HashMap<FileId, RelativePathBuf>,
-        db: Db,
-    }
-
-    impl Fixture {
-        fn new() -> Fixture {
-            Fixture {
-                next_file_id: 1,
-                fm: im::HashMap::new(),
-                db: Db::new(),
-            }
-        }
-        fn add_file(&mut self, path: &str, text: &str) -> FileId {
-            assert!(path.starts_with("/"));
-            let file_id = FileId(self.next_file_id);
-            self.next_file_id += 1;
-            self.fm.insert(file_id, RelativePathBuf::from(&path[1..]));
-            let mut new_state = self.db.state().clone();
-            new_state.file_map.insert(file_id, Arc::new(text.to_string()));
-            new_state.file_resolver = FileResolverImp::new(
-                Arc::new(FileMap(self.fm.clone()))
-            );
-            self.db = self.db.with_changes(new_state, &[file_id], true);
-            file_id
-        }
-        fn remove_file(&mut self, file_id: FileId) {
-            self.fm.remove(&file_id);
-            let mut new_state = self.db.state().clone();
-            new_state.file_map.remove(&file_id);
-            new_state.file_resolver = FileResolverImp::new(
-                Arc::new(FileMap(self.fm.clone()))
-            );
-            self.db = self.db.with_changes(new_state, &[file_id], true);
-        }
-        fn change_file(&mut self, file_id: FileId, new_text: &str) {
-            let mut new_state = self.db.state().clone();
-            new_state.file_map.insert(file_id, Arc::new(new_text.to_string()));
-            self.db = self.db.with_changes(new_state, &[file_id], false);
-        }
-        fn check_parent_modules(
-            &self,
-            file_id: FileId,
-            expected: &[FileId],
-            queries: &[(&'static str, u64)]
-        ) {
-            let (tree, events) = self.db.trace_query(|ctx| module_tree(ctx));
-            let actual = tree.parent_modules(file_id)
-                .into_iter()
-                .map(|link| link.owner(&tree))
-                .collect::<Vec<_>>();
-            assert_eq!(actual.as_slice(), expected);
-            let mut counts = HashMap::new();
-            events.into_iter()
-               .for_each(|event| *counts.entry(event).or_insert(0) += 1);
-            for &(query_id, expected_count) in queries.iter() {
-                let actual_count = *counts.get(&query_id).unwrap_or(&0);
-                assert_eq!(
-                    actual_count,
-                    expected_count,
-                    "counts for {} differ",
-                    query_id,
-                )
-            }
-
-        }
-    }
-
-    #[test]
-    fn test_parent_module() {
-        let mut f = Fixture::new();
-        let foo = f.add_file("/foo.rs", "");
-        f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
-
-        let lib = f.add_file("/lib.rs", "mod foo;");
-        f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
-        f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 0)]);
-
-        f.change_file(lib, "");
-        f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
-
-        f.change_file(lib, "mod foo;");
-        f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
-
-        f.change_file(lib, "mod bar;");
-        f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
 
-        f.change_file(lib, "mod foo;");
-        f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
+fn module_descriptor(db: &impl ModulesDatabase, file_id: FileId) -> Arc<ModuleDescriptor> {
+    let file = db.file_syntax(file_id);
+    Arc::new(ModuleDescriptor::new(file.ast()))
+}
 
-        f.remove_file(lib);
-        f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 0)]);
+fn module_tree(db: &impl ModulesDatabase, (): ()) -> Arc<ModuleTreeDescriptor> {
+    let file_set = db.file_set(());
+    let mut files = Vec::new();
+    for &file_id in file_set.files.iter() {
+        let module_descr = db.module_descriptor(file_id);
+        files.push((file_id, module_descr));
     }
+    let res = ModuleTreeDescriptor::new(files.iter().map(|(file_id, descr)| (*file_id, &**descr)), &file_set.resolver);
+    Arc::new(res)
 }
+
+// #[cfg(test)]
+// mod tests {
+//     use std::collections::HashMap;
+//     use im;
+//     use relative_path::{RelativePath, RelativePathBuf};
+//     use {
+//         db::{Db},
+//         imp::FileResolverImp,
+//         FileId, FileResolver,
+//     };
+//     use super::*;
+
+//     #[derive(Debug)]
+//     struct FileMap(im::HashMap<FileId, RelativePathBuf>);
+
+//     impl FileResolver for FileMap {
+//         fn file_stem(&self, file_id: FileId) -> String {
+//             self.0[&file_id].file_stem().unwrap().to_string()
+//         }
+//         fn resolve(&self, file_id: FileId, rel: &RelativePath) -> Option<FileId> {
+//             let path = self.0[&file_id].join(rel).normalize();
+//             self.0.iter()
+//                 .filter_map(|&(id, ref p)| Some(id).filter(|_| p == &path))
+//                 .next()
+//         }
+//     }
+
+//     struct Fixture {
+//         next_file_id: u32,
+//         fm: im::HashMap<FileId, RelativePathBuf>,
+//         db: Db,
+//     }
+
+//     impl Fixture {
+//         fn new() -> Fixture {
+//             Fixture {
+//                 next_file_id: 1,
+//                 fm: im::HashMap::new(),
+//                 db: Db::new(),
+//             }
+//         }
+//         fn add_file(&mut self, path: &str, text: &str) -> FileId {
+//             assert!(path.starts_with("/"));
+//             let file_id = FileId(self.next_file_id);
+//             self.next_file_id += 1;
+//             self.fm.insert(file_id, RelativePathBuf::from(&path[1..]));
+//             let mut new_state = self.db.state().clone();
+//             new_state.file_map.insert(file_id, Arc::new(text.to_string()));
+//             new_state.file_resolver = FileResolverImp::new(
+//                 Arc::new(FileMap(self.fm.clone()))
+//             );
+//             self.db = self.db.with_changes(new_state, &[file_id], true);
+//             file_id
+//         }
+//         fn remove_file(&mut self, file_id: FileId) {
+//             self.fm.remove(&file_id);
+//             let mut new_state = self.db.state().clone();
+//             new_state.file_map.remove(&file_id);
+//             new_state.file_resolver = FileResolverImp::new(
+//                 Arc::new(FileMap(self.fm.clone()))
+//             );
+//             self.db = self.db.with_changes(new_state, &[file_id], true);
+//         }
+//         fn change_file(&mut self, file_id: FileId, new_text: &str) {
+//             let mut new_state = self.db.state().clone();
+//             new_state.file_map.insert(file_id, Arc::new(new_text.to_string()));
+//             self.db = self.db.with_changes(new_state, &[file_id], false);
+//         }
+//         fn check_parent_modules(
+//             &self,
+//             file_id: FileId,
+//             expected: &[FileId],
+//             queries: &[(&'static str, u64)]
+//         ) {
+//             let (tree, events) = self.db.trace_query(|ctx| module_tree(ctx));
+//             let actual = tree.parent_modules(file_id)
+//                 .into_iter()
+//                 .map(|link| link.owner(&tree))
+//                 .collect::<Vec<_>>();
+//             assert_eq!(actual.as_slice(), expected);
+//             let mut counts = HashMap::new();
+//             events.into_iter()
+//                .for_each(|event| *counts.entry(event).or_insert(0) += 1);
+//             for &(query_id, expected_count) in queries.iter() {
+//                 let actual_count = *counts.get(&query_id).unwrap_or(&0);
+//                 assert_eq!(
+//                     actual_count,
+//                     expected_count,
+//                     "counts for {} differ",
+//                     query_id,
+//                 )
+//             }
+
+//         }
+//     }
+
+//     #[test]
+//     fn test_parent_module() {
+//         let mut f = Fixture::new();
+//         let foo = f.add_file("/foo.rs", "");
+//         f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
+
+//         let lib = f.add_file("/lib.rs", "mod foo;");
+//         f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
+//         f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 0)]);
+
+//         f.change_file(lib, "");
+//         f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
+
+//         f.change_file(lib, "mod foo;");
+//         f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
+
+//         f.change_file(lib, "mod bar;");
+//         f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
+
+//         f.change_file(lib, "mod foo;");
+//         f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
+
+//         f.remove_file(lib);
+//         f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 0)]);
+//     }
+// }
diff --git a/crates/ra_analysis/src/roots.rs b/crates/ra_analysis/src/roots.rs
index 7a7d1169e..908f49b0a 100644
--- a/crates/ra_analysis/src/roots.rs
+++ b/crates/ra_analysis/src/roots.rs
@@ -16,7 +16,8 @@ use crate::{
     imp::FileResolverImp,
     symbol_index::SymbolIndex,
     descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
-    db::{self, FilesDatabase, SyntaxDatabase}
+    db::{self, FilesDatabase, SyntaxDatabase},
+    module_map::ModulesDatabase,
 };
 
 pub(crate) trait SourceRoot {
@@ -53,17 +54,17 @@ impl WritableSourceRoot {
                 }
             }
         }
-        if let Some(resolver) = file_resolver {
-            let mut files: HashSet<FileId> = db.file_set(())
-                .files
-                .clone();
-            for file_id in removed {
-                files.remove(&file_id);
-            }
-            files.extend(changed);
-            db.query(db::FileSetQuery)
-                .set((), Arc::new(db::FileSet { files, resolver }))
+        let file_set = db.file_set(());
+        let mut files: HashSet<FileId> = file_set
+            .files
+            .clone();
+        for file_id in removed {
+            files.remove(&file_id);
         }
+        files.extend(changed);
+        let resolver = file_resolver.unwrap_or_else(|| file_set.resolver.clone());
+        db.query(db::FileSetQuery)
+            .set((), Arc::new(db::FileSet { files, resolver }));
         // TODO: reconcile sasla's API with our needs
         // https://github.com/salsa-rs/salsa/issues/12
         self.clone()
@@ -72,12 +73,13 @@ impl WritableSourceRoot {
 
 impl SourceRoot for WritableSourceRoot {
     fn module_tree(&self) -> Arc<ModuleTreeDescriptor> {
-        unimplemented!()
-        //self.db.make_query(::module_map::module_tree)
+        self.db.read().module_tree(())
     }
-
     fn contains(&self, file_id: FileId) -> bool {
-        self.db.read().file_set(()).files.contains(&file_id)
+        let db = self.db.read();
+        let files = &db.file_set(()).files;
+        eprintln!("files = {:?}", files);
+        files.contains(&file_id)
     }
     fn lines(&self, file_id: FileId) -> Arc<LineIndex> {
         self.db.read().file_lines(file_id)
-- 
cgit v1.2.3