aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_batch
diff options
context:
space:
mode:
authorbors[bot] <bors[bot]@users.noreply.github.com>2019-06-16 09:08:10 +0100
committerbors[bot] <bors[bot]@users.noreply.github.com>2019-06-16 09:08:10 +0100
commite6fbff3246cdd3278ff1c376d5abfc1d579f86c2 (patch)
tree736052286d9c0d8d06798165590bdf145a12d783 /crates/ra_batch
parentce9ea0939a1ae94a83d56ddafc7aeb757dcda776 (diff)
parentb0be4207d04b65580e7af10cb256ddd5d9ca006d (diff)
Merge #1406
1406: reuse AnalysisHost in batch analysis r=matklad a=matklad We do some custom setup in `AnalysisHost`, like setting up LRU size. I figure it's a good idea to not duplicate this work in batch analysis, *if* we want to keep batch and non-batch close. Long-term, I see a value in keeping batch a separate, lighter weight thing. However, because now we use batch to measure performance, keeping them closer makes more sense. I'd also like to add ability to get completions by using batch analysis, and that will require ra_ide_api as well. @flodiebold were there some reason why we haven't started with this approach from the start? Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates/ra_batch')
-rw-r--r--crates/ra_batch/Cargo.toml1
-rw-r--r--crates/ra_batch/src/lib.rs164
2 files changed, 71 insertions, 94 deletions
diff --git a/crates/ra_batch/Cargo.toml b/crates/ra_batch/Cargo.toml
index 8bf085bbf..7d8837fc3 100644
--- a/crates/ra_batch/Cargo.toml
+++ b/crates/ra_batch/Cargo.toml
@@ -11,6 +11,7 @@ rustc-hash = "1.0"
11ra_vfs = "0.2.0" 11ra_vfs = "0.2.0"
12ra_syntax = { path = "../ra_syntax" } 12ra_syntax = { path = "../ra_syntax" }
13ra_db = { path = "../ra_db" } 13ra_db = { path = "../ra_db" }
14ra_ide_api = { path = "../ra_ide_api" }
14ra_hir = { path = "../ra_hir" } 15ra_hir = { path = "../ra_hir" }
15ra_project_model = { path = "../ra_project_model" } 16ra_project_model = { path = "../ra_project_model" }
16 17
diff --git a/crates/ra_batch/src/lib.rs b/crates/ra_batch/src/lib.rs
index 96b32d9fe..fa244e86c 100644
--- a/crates/ra_batch/src/lib.rs
+++ b/crates/ra_batch/src/lib.rs
@@ -1,36 +1,19 @@
1mod vfs_filter; 1mod vfs_filter;
2 2
3use std::{sync::Arc, path::Path, collections::HashSet, error::Error}; 3use std::{path::Path, collections::HashSet, error::Error};
4 4
5use rustc_hash::FxHashMap; 5use rustc_hash::FxHashMap;
6 6
7use ra_db::{ 7use ra_db::{
8 CrateGraph, FileId, SourceRoot, SourceRootId, SourceDatabase, salsa::{self, Database}, 8 CrateGraph, FileId, SourceRootId,
9}; 9};
10use ra_hir::db; 10use ra_ide_api::{AnalysisHost, AnalysisChange};
11use ra_project_model::ProjectWorkspace; 11use ra_project_model::ProjectWorkspace;
12use ra_vfs::{Vfs, VfsChange}; 12use ra_vfs::{Vfs, VfsChange};
13use vfs_filter::IncludeRustFiles; 13use vfs_filter::IncludeRustFiles;
14 14
15type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>; 15type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>;
16 16
17#[salsa::database(
18 ra_db::SourceDatabaseStorage,
19 db::AstDatabaseStorage,
20 db::DefDatabaseStorage,
21 db::HirDatabaseStorage
22)]
23#[derive(Debug)]
24pub struct BatchDatabase {
25 runtime: salsa::Runtime<BatchDatabase>,
26}
27
28impl salsa::Database for BatchDatabase {
29 fn salsa_runtime(&self) -> &salsa::Runtime<BatchDatabase> {
30 &self.runtime
31 }
32}
33
34fn vfs_file_to_id(f: ra_vfs::VfsFile) -> FileId { 17fn vfs_file_to_id(f: ra_vfs::VfsFile) -> FileId {
35 FileId(f.0) 18 FileId(f.0)
36} 19}
@@ -38,86 +21,79 @@ fn vfs_root_to_id(r: ra_vfs::VfsRoot) -> SourceRootId {
38 SourceRootId(r.0) 21 SourceRootId(r.0)
39} 22}
40 23
41impl BatchDatabase { 24pub fn load_cargo(root: &Path) -> Result<(AnalysisHost, Vec<SourceRootId>)> {
42 pub fn load(crate_graph: CrateGraph, vfs: &mut Vfs) -> BatchDatabase { 25 let root = std::env::current_dir()?.join(root);
43 let mut db = BatchDatabase { runtime: salsa::Runtime::default() }; 26 let ws = ProjectWorkspace::discover(root.as_ref())?;
44 let lru_cap = std::env::var("RA_LRU_CAP") 27 let mut roots = Vec::new();
45 .ok() 28 roots.push(IncludeRustFiles::member(root.clone()));
46 .and_then(|it| it.parse::<usize>().ok()) 29 roots.extend(IncludeRustFiles::from_roots(ws.to_roots()));
47 .unwrap_or(ra_db::DEFAULT_LRU_CAP); 30 let (mut vfs, roots) = Vfs::new(roots);
48 db.query_mut(ra_db::ParseQuery).set_lru_capacity(lru_cap); 31 let crate_graph = ws.to_crate_graph(&mut |path: &Path| {
49 db.query_mut(ra_hir::db::ParseMacroQuery).set_lru_capacity(lru_cap); 32 let vfs_file = vfs.load(path);
50 db.set_crate_graph(Arc::new(crate_graph)); 33 log::debug!("vfs file {:?} -> {:?}", path, vfs_file);
34 vfs_file.map(vfs_file_to_id)
35 });
36 log::debug!("crate graph: {:?}", crate_graph);
37
38 let local_roots = roots
39 .into_iter()
40 .filter(|r| vfs.root2path(*r).starts_with(&root))
41 .map(vfs_root_to_id)
42 .collect();
43
44 let host = load(root.as_path(), crate_graph, &mut vfs);
45 Ok((host, local_roots))
46}
51 47
52 // wait until Vfs has loaded all roots 48pub fn load(project_root: &Path, crate_graph: CrateGraph, vfs: &mut Vfs) -> AnalysisHost {
53 let receiver = vfs.task_receiver().clone(); 49 let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
54 let mut roots_loaded = HashSet::new(); 50 let mut host = AnalysisHost::new(lru_cap);
55 for task in receiver { 51 let mut analysis_change = AnalysisChange::new();
56 vfs.handle_task(task); 52 analysis_change.set_crate_graph(crate_graph);
57 let mut done = false; 53
58 for change in vfs.commit_changes() { 54 // wait until Vfs has loaded all roots
59 match change { 55 let receiver = vfs.task_receiver().clone();
60 VfsChange::AddRoot { root, files } => { 56 let mut roots_loaded = HashSet::new();
61 let source_root_id = vfs_root_to_id(root); 57 for task in receiver {
62 log::debug!( 58 vfs.handle_task(task);
63 "loaded source root {:?} with path {:?}", 59 let mut done = false;
64 source_root_id, 60 for change in vfs.commit_changes() {
65 vfs.root2path(root) 61 match change {
66 ); 62 VfsChange::AddRoot { root, files } => {
67 let mut file_map = FxHashMap::default(); 63 let is_local = vfs.root2path(root).starts_with(&project_root);
68 for (vfs_file, path, text) in files { 64 let source_root_id = vfs_root_to_id(root);
69 let file_id = vfs_file_to_id(vfs_file); 65 log::debug!(
70 db.set_file_text(file_id, text); 66 "loaded source root {:?} with path {:?}",
71 db.set_file_relative_path(file_id, path.clone()); 67 source_root_id,
72 db.set_file_source_root(file_id, source_root_id); 68 vfs.root2path(root)
73 file_map.insert(path, file_id); 69 );
74 } 70 analysis_change.add_root(source_root_id, is_local);
75 let source_root = SourceRoot { files: file_map }; 71
76 db.set_source_root(source_root_id, Arc::new(source_root)); 72 let mut file_map = FxHashMap::default();
77 roots_loaded.insert(source_root_id); 73 for (vfs_file, path, text) in files {
78 if roots_loaded.len() == vfs.n_roots() { 74 let file_id = vfs_file_to_id(vfs_file);
79 done = true; 75 analysis_change.add_file(source_root_id, file_id, path.clone(), text);
80 } 76 file_map.insert(path, file_id);
81 } 77 }
82 VfsChange::AddFile { .. } 78 roots_loaded.insert(source_root_id);
83 | VfsChange::RemoveFile { .. } 79 if roots_loaded.len() == vfs.n_roots() {
84 | VfsChange::ChangeFile { .. } => { 80 done = true;
85 // We just need the first scan, so just ignore these
86 } 81 }
87 } 82 }
88 } 83 VfsChange::AddFile { .. }
89 if done { 84 | VfsChange::RemoveFile { .. }
90 break; 85 | VfsChange::ChangeFile { .. } => {
86 // We just need the first scan, so just ignore these
87 }
91 } 88 }
92 } 89 }
93 90 if done {
94 db 91 break;
92 }
95 } 93 }
96 94
97 pub fn load_cargo(root: impl AsRef<Path>) -> Result<(BatchDatabase, Vec<SourceRootId>)> { 95 host.apply_change(analysis_change);
98 let root = std::env::current_dir()?.join(root); 96 host
99 let ws = ProjectWorkspace::discover(root.as_ref())?;
100 let mut roots = Vec::new();
101 roots.push(IncludeRustFiles::member(root.clone()));
102 roots.extend(IncludeRustFiles::from_roots(ws.to_roots()));
103 let (mut vfs, roots) = Vfs::new(roots);
104 let mut load = |path: &Path| {
105 let vfs_file = vfs.load(path);
106 log::debug!("vfs file {:?} -> {:?}", path, vfs_file);
107 vfs_file.map(vfs_file_to_id)
108 };
109 let crate_graph = ws.to_crate_graph(&mut load);
110 log::debug!("crate graph: {:?}", crate_graph);
111
112 let local_roots = roots
113 .into_iter()
114 .filter(|r| vfs.root2path(*r).starts_with(&root))
115 .map(vfs_root_to_id)
116 .collect();
117
118 let db = BatchDatabase::load(crate_graph, &mut vfs);
119 Ok((db, local_roots))
120 }
121} 97}
122 98
123#[cfg(test)] 99#[cfg(test)]
@@ -128,10 +104,10 @@ mod tests {
128 #[test] 104 #[test]
129 fn test_loading_rust_analyzer() { 105 fn test_loading_rust_analyzer() {
130 let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); 106 let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
131 let (db, roots) = BatchDatabase::load_cargo(path).unwrap(); 107 let (host, roots) = load_cargo(path).unwrap();
132 let mut n_crates = 0; 108 let mut n_crates = 0;
133 for root in roots { 109 for root in roots {
134 for _krate in Crate::source_root_crates(&db, root) { 110 for _krate in Crate::source_root_crates(host.raw_database(), root) {
135 n_crates += 1; 111 n_crates += 1;
136 } 112 }
137 } 113 }