diff options
author | bors[bot] <bors[bot]@users.noreply.github.com> | 2019-06-16 09:08:10 +0100 |
---|---|---|
committer | bors[bot] <bors[bot]@users.noreply.github.com> | 2019-06-16 09:08:10 +0100 |
commit | e6fbff3246cdd3278ff1c376d5abfc1d579f86c2 (patch) | |
tree | 736052286d9c0d8d06798165590bdf145a12d783 /crates | |
parent | ce9ea0939a1ae94a83d56ddafc7aeb757dcda776 (diff) | |
parent | b0be4207d04b65580e7af10cb256ddd5d9ca006d (diff) |
Merge #1406
1406: reuse AnalysisHost in batch analysis r=matklad a=matklad
We do some custom setup in `AnalysisHost`, like setting up LRU size. I figure it's a good idea to not duplicate this work in batch analysis, *if* we want to keep batch and non-batch close.
Long-term, I see a value in keeping batch a separate, lighter weight thing. However, because now we use batch to measure performance, keeping them closer makes more sense.
I'd also like to add ability to get completions by using batch analysis, and that will require ra_ide_api as well.
@flodiebold were there some reason why we haven't started with this approach from the start?
Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_batch/Cargo.toml | 1 | ||||
-rw-r--r-- | crates/ra_batch/src/lib.rs | 164 | ||||
-rw-r--r-- | crates/ra_cli/src/analysis_stats.rs | 26 | ||||
-rw-r--r-- | crates/ra_ide_api/src/lib.rs | 3 | ||||
-rw-r--r-- | crates/ra_lsp_server/src/main.rs | 2 |
5 files changed, 88 insertions, 108 deletions
diff --git a/crates/ra_batch/Cargo.toml b/crates/ra_batch/Cargo.toml index 8bf085bbf..7d8837fc3 100644 --- a/crates/ra_batch/Cargo.toml +++ b/crates/ra_batch/Cargo.toml | |||
@@ -11,6 +11,7 @@ rustc-hash = "1.0" | |||
11 | ra_vfs = "0.2.0" | 11 | ra_vfs = "0.2.0" |
12 | ra_syntax = { path = "../ra_syntax" } | 12 | ra_syntax = { path = "../ra_syntax" } |
13 | ra_db = { path = "../ra_db" } | 13 | ra_db = { path = "../ra_db" } |
14 | ra_ide_api = { path = "../ra_ide_api" } | ||
14 | ra_hir = { path = "../ra_hir" } | 15 | ra_hir = { path = "../ra_hir" } |
15 | ra_project_model = { path = "../ra_project_model" } | 16 | ra_project_model = { path = "../ra_project_model" } |
16 | 17 | ||
diff --git a/crates/ra_batch/src/lib.rs b/crates/ra_batch/src/lib.rs index 96b32d9fe..fa244e86c 100644 --- a/crates/ra_batch/src/lib.rs +++ b/crates/ra_batch/src/lib.rs | |||
@@ -1,36 +1,19 @@ | |||
1 | mod vfs_filter; | 1 | mod vfs_filter; |
2 | 2 | ||
3 | use std::{sync::Arc, path::Path, collections::HashSet, error::Error}; | 3 | use std::{path::Path, collections::HashSet, error::Error}; |
4 | 4 | ||
5 | use rustc_hash::FxHashMap; | 5 | use rustc_hash::FxHashMap; |
6 | 6 | ||
7 | use ra_db::{ | 7 | use ra_db::{ |
8 | CrateGraph, FileId, SourceRoot, SourceRootId, SourceDatabase, salsa::{self, Database}, | 8 | CrateGraph, FileId, SourceRootId, |
9 | }; | 9 | }; |
10 | use ra_hir::db; | 10 | use ra_ide_api::{AnalysisHost, AnalysisChange}; |
11 | use ra_project_model::ProjectWorkspace; | 11 | use ra_project_model::ProjectWorkspace; |
12 | use ra_vfs::{Vfs, VfsChange}; | 12 | use ra_vfs::{Vfs, VfsChange}; |
13 | use vfs_filter::IncludeRustFiles; | 13 | use vfs_filter::IncludeRustFiles; |
14 | 14 | ||
15 | type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>; | 15 | type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>; |
16 | 16 | ||
17 | #[salsa::database( | ||
18 | ra_db::SourceDatabaseStorage, | ||
19 | db::AstDatabaseStorage, | ||
20 | db::DefDatabaseStorage, | ||
21 | db::HirDatabaseStorage | ||
22 | )] | ||
23 | #[derive(Debug)] | ||
24 | pub struct BatchDatabase { | ||
25 | runtime: salsa::Runtime<BatchDatabase>, | ||
26 | } | ||
27 | |||
28 | impl salsa::Database for BatchDatabase { | ||
29 | fn salsa_runtime(&self) -> &salsa::Runtime<BatchDatabase> { | ||
30 | &self.runtime | ||
31 | } | ||
32 | } | ||
33 | |||
34 | fn vfs_file_to_id(f: ra_vfs::VfsFile) -> FileId { | 17 | fn vfs_file_to_id(f: ra_vfs::VfsFile) -> FileId { |
35 | FileId(f.0) | 18 | FileId(f.0) |
36 | } | 19 | } |
@@ -38,86 +21,79 @@ fn vfs_root_to_id(r: ra_vfs::VfsRoot) -> SourceRootId { | |||
38 | SourceRootId(r.0) | 21 | SourceRootId(r.0) |
39 | } | 22 | } |
40 | 23 | ||
41 | impl BatchDatabase { | 24 | pub fn load_cargo(root: &Path) -> Result<(AnalysisHost, Vec<SourceRootId>)> { |
42 | pub fn load(crate_graph: CrateGraph, vfs: &mut Vfs) -> BatchDatabase { | 25 | let root = std::env::current_dir()?.join(root); |
43 | let mut db = BatchDatabase { runtime: salsa::Runtime::default() }; | 26 | let ws = ProjectWorkspace::discover(root.as_ref())?; |
44 | let lru_cap = std::env::var("RA_LRU_CAP") | 27 | let mut roots = Vec::new(); |
45 | .ok() | 28 | roots.push(IncludeRustFiles::member(root.clone())); |
46 | .and_then(|it| it.parse::<usize>().ok()) | 29 | roots.extend(IncludeRustFiles::from_roots(ws.to_roots())); |
47 | .unwrap_or(ra_db::DEFAULT_LRU_CAP); | 30 | let (mut vfs, roots) = Vfs::new(roots); |
48 | db.query_mut(ra_db::ParseQuery).set_lru_capacity(lru_cap); | 31 | let crate_graph = ws.to_crate_graph(&mut |path: &Path| { |
49 | db.query_mut(ra_hir::db::ParseMacroQuery).set_lru_capacity(lru_cap); | 32 | let vfs_file = vfs.load(path); |
50 | db.set_crate_graph(Arc::new(crate_graph)); | 33 | log::debug!("vfs file {:?} -> {:?}", path, vfs_file); |
34 | vfs_file.map(vfs_file_to_id) | ||
35 | }); | ||
36 | log::debug!("crate graph: {:?}", crate_graph); | ||
37 | |||
38 | let local_roots = roots | ||
39 | .into_iter() | ||
40 | .filter(|r| vfs.root2path(*r).starts_with(&root)) | ||
41 | .map(vfs_root_to_id) | ||
42 | .collect(); | ||
43 | |||
44 | let host = load(root.as_path(), crate_graph, &mut vfs); | ||
45 | Ok((host, local_roots)) | ||
46 | } | ||
51 | 47 | ||
52 | // wait until Vfs has loaded all roots | 48 | pub fn load(project_root: &Path, crate_graph: CrateGraph, vfs: &mut Vfs) -> AnalysisHost { |
53 | let receiver = vfs.task_receiver().clone(); | 49 | let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok()); |
54 | let mut roots_loaded = HashSet::new(); | 50 | let mut host = AnalysisHost::new(lru_cap); |
55 | for task in receiver { | 51 | let mut analysis_change = AnalysisChange::new(); |
56 | vfs.handle_task(task); | 52 | analysis_change.set_crate_graph(crate_graph); |
57 | let mut done = false; | 53 | |
58 | for change in vfs.commit_changes() { | 54 | // wait until Vfs has loaded all roots |
59 | match change { | 55 | let receiver = vfs.task_receiver().clone(); |
60 | VfsChange::AddRoot { root, files } => { | 56 | let mut roots_loaded = HashSet::new(); |
61 | let source_root_id = vfs_root_to_id(root); | 57 | for task in receiver { |
62 | log::debug!( | 58 | vfs.handle_task(task); |
63 | "loaded source root {:?} with path {:?}", | 59 | let mut done = false; |
64 | source_root_id, | 60 | for change in vfs.commit_changes() { |
65 | vfs.root2path(root) | 61 | match change { |
66 | ); | 62 | VfsChange::AddRoot { root, files } => { |
67 | let mut file_map = FxHashMap::default(); | 63 | let is_local = vfs.root2path(root).starts_with(&project_root); |
68 | for (vfs_file, path, text) in files { | 64 | let source_root_id = vfs_root_to_id(root); |
69 | let file_id = vfs_file_to_id(vfs_file); | 65 | log::debug!( |
70 | db.set_file_text(file_id, text); | 66 | "loaded source root {:?} with path {:?}", |
71 | db.set_file_relative_path(file_id, path.clone()); | 67 | source_root_id, |
72 | db.set_file_source_root(file_id, source_root_id); | 68 | vfs.root2path(root) |
73 | file_map.insert(path, file_id); | 69 | ); |
74 | } | 70 | analysis_change.add_root(source_root_id, is_local); |
75 | let source_root = SourceRoot { files: file_map }; | 71 | |
76 | db.set_source_root(source_root_id, Arc::new(source_root)); | 72 | let mut file_map = FxHashMap::default(); |
77 | roots_loaded.insert(source_root_id); | 73 | for (vfs_file, path, text) in files { |
78 | if roots_loaded.len() == vfs.n_roots() { | 74 | let file_id = vfs_file_to_id(vfs_file); |
79 | done = true; | 75 | analysis_change.add_file(source_root_id, file_id, path.clone(), text); |
80 | } | 76 | file_map.insert(path, file_id); |
81 | } | 77 | } |
82 | VfsChange::AddFile { .. } | 78 | roots_loaded.insert(source_root_id); |
83 | | VfsChange::RemoveFile { .. } | 79 | if roots_loaded.len() == vfs.n_roots() { |
84 | | VfsChange::ChangeFile { .. } => { | 80 | done = true; |
85 | // We just need the first scan, so just ignore these | ||
86 | } | 81 | } |
87 | } | 82 | } |
88 | } | 83 | VfsChange::AddFile { .. } |
89 | if done { | 84 | | VfsChange::RemoveFile { .. } |
90 | break; | 85 | | VfsChange::ChangeFile { .. } => { |
86 | // We just need the first scan, so just ignore these | ||
87 | } | ||
91 | } | 88 | } |
92 | } | 89 | } |
93 | 90 | if done { | |
94 | db | 91 | break; |
92 | } | ||
95 | } | 93 | } |
96 | 94 | ||
97 | pub fn load_cargo(root: impl AsRef<Path>) -> Result<(BatchDatabase, Vec<SourceRootId>)> { | 95 | host.apply_change(analysis_change); |
98 | let root = std::env::current_dir()?.join(root); | 96 | host |
99 | let ws = ProjectWorkspace::discover(root.as_ref())?; | ||
100 | let mut roots = Vec::new(); | ||
101 | roots.push(IncludeRustFiles::member(root.clone())); | ||
102 | roots.extend(IncludeRustFiles::from_roots(ws.to_roots())); | ||
103 | let (mut vfs, roots) = Vfs::new(roots); | ||
104 | let mut load = |path: &Path| { | ||
105 | let vfs_file = vfs.load(path); | ||
106 | log::debug!("vfs file {:?} -> {:?}", path, vfs_file); | ||
107 | vfs_file.map(vfs_file_to_id) | ||
108 | }; | ||
109 | let crate_graph = ws.to_crate_graph(&mut load); | ||
110 | log::debug!("crate graph: {:?}", crate_graph); | ||
111 | |||
112 | let local_roots = roots | ||
113 | .into_iter() | ||
114 | .filter(|r| vfs.root2path(*r).starts_with(&root)) | ||
115 | .map(vfs_root_to_id) | ||
116 | .collect(); | ||
117 | |||
118 | let db = BatchDatabase::load(crate_graph, &mut vfs); | ||
119 | Ok((db, local_roots)) | ||
120 | } | ||
121 | } | 97 | } |
122 | 98 | ||
123 | #[cfg(test)] | 99 | #[cfg(test)] |
@@ -128,10 +104,10 @@ mod tests { | |||
128 | #[test] | 104 | #[test] |
129 | fn test_loading_rust_analyzer() { | 105 | fn test_loading_rust_analyzer() { |
130 | let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); | 106 | let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); |
131 | let (db, roots) = BatchDatabase::load_cargo(path).unwrap(); | 107 | let (host, roots) = load_cargo(path).unwrap(); |
132 | let mut n_crates = 0; | 108 | let mut n_crates = 0; |
133 | for root in roots { | 109 | for root in roots { |
134 | for _krate in Crate::source_root_crates(&db, root) { | 110 | for _krate in Crate::source_root_crates(host.raw_database(), root) { |
135 | n_crates += 1; | 111 | n_crates += 1; |
136 | } | 112 | } |
137 | } | 113 | } |
diff --git a/crates/ra_cli/src/analysis_stats.rs b/crates/ra_cli/src/analysis_stats.rs index c19976bd2..d76c37d84 100644 --- a/crates/ra_cli/src/analysis_stats.rs +++ b/crates/ra_cli/src/analysis_stats.rs | |||
@@ -1,7 +1,6 @@ | |||
1 | use std::{collections::HashSet, time::Instant, fmt::Write}; | 1 | use std::{collections::HashSet, time::Instant, fmt::Write}; |
2 | 2 | ||
3 | use ra_db::SourceDatabase; | 3 | use ra_db::SourceDatabase; |
4 | use ra_batch::BatchDatabase; | ||
5 | use ra_hir::{Crate, ModuleDef, Ty, ImplItem, HasSource}; | 4 | use ra_hir::{Crate, ModuleDef, Ty, ImplItem, HasSource}; |
6 | use ra_syntax::AstNode; | 5 | use ra_syntax::AstNode; |
7 | 6 | ||
@@ -9,16 +8,17 @@ use crate::Result; | |||
9 | 8 | ||
10 | pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> { | 9 | pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> { |
11 | let db_load_time = Instant::now(); | 10 | let db_load_time = Instant::now(); |
12 | let (db, roots) = BatchDatabase::load_cargo(path)?; | 11 | let (host, roots) = ra_batch::load_cargo(path.as_ref())?; |
12 | let db = host.raw_database(); | ||
13 | println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed()); | 13 | println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed()); |
14 | let analysis_time = Instant::now(); | 14 | let analysis_time = Instant::now(); |
15 | let mut num_crates = 0; | 15 | let mut num_crates = 0; |
16 | let mut visited_modules = HashSet::new(); | 16 | let mut visited_modules = HashSet::new(); |
17 | let mut visit_queue = Vec::new(); | 17 | let mut visit_queue = Vec::new(); |
18 | for root in roots { | 18 | for root in roots { |
19 | for krate in Crate::source_root_crates(&db, root) { | 19 | for krate in Crate::source_root_crates(db, root) { |
20 | num_crates += 1; | 20 | num_crates += 1; |
21 | let module = krate.root_module(&db).expect("crate in source root without root module"); | 21 | let module = krate.root_module(db).expect("crate in source root without root module"); |
22 | visit_queue.push(module); | 22 | visit_queue.push(module); |
23 | } | 23 | } |
24 | } | 24 | } |
@@ -27,17 +27,17 @@ pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> { | |||
27 | let mut funcs = Vec::new(); | 27 | let mut funcs = Vec::new(); |
28 | while let Some(module) = visit_queue.pop() { | 28 | while let Some(module) = visit_queue.pop() { |
29 | if visited_modules.insert(module) { | 29 | if visited_modules.insert(module) { |
30 | visit_queue.extend(module.children(&db)); | 30 | visit_queue.extend(module.children(db)); |
31 | 31 | ||
32 | for decl in module.declarations(&db) { | 32 | for decl in module.declarations(db) { |
33 | num_decls += 1; | 33 | num_decls += 1; |
34 | if let ModuleDef::Function(f) = decl { | 34 | if let ModuleDef::Function(f) = decl { |
35 | funcs.push(f); | 35 | funcs.push(f); |
36 | } | 36 | } |
37 | } | 37 | } |
38 | 38 | ||
39 | for impl_block in module.impl_blocks(&db) { | 39 | for impl_block in module.impl_blocks(db) { |
40 | for item in impl_block.items(&db) { | 40 | for item in impl_block.items(db) { |
41 | num_decls += 1; | 41 | num_decls += 1; |
42 | if let ImplItem::Method(f) = item { | 42 | if let ImplItem::Method(f) = item { |
43 | funcs.push(f); | 43 | funcs.push(f); |
@@ -61,11 +61,11 @@ pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> { | |||
61 | let mut num_exprs_unknown = 0; | 61 | let mut num_exprs_unknown = 0; |
62 | let mut num_exprs_partially_unknown = 0; | 62 | let mut num_exprs_partially_unknown = 0; |
63 | for f in funcs { | 63 | for f in funcs { |
64 | let name = f.name(&db); | 64 | let name = f.name(db); |
65 | let mut msg = format!("processing: {}", name); | 65 | let mut msg = format!("processing: {}", name); |
66 | if verbose { | 66 | if verbose { |
67 | let src = f.source(&db); | 67 | let src = f.source(db); |
68 | let original_file = src.file_id.original_file(&db); | 68 | let original_file = src.file_id.original_file(db); |
69 | let path = db.file_relative_path(original_file); | 69 | let path = db.file_relative_path(original_file); |
70 | let syntax_range = src.ast.syntax().range(); | 70 | let syntax_range = src.ast.syntax().range(); |
71 | write!(msg, " ({:?} {})", path, syntax_range).unwrap(); | 71 | write!(msg, " ({:?} {})", path, syntax_range).unwrap(); |
@@ -76,8 +76,8 @@ pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> { | |||
76 | continue; | 76 | continue; |
77 | } | 77 | } |
78 | } | 78 | } |
79 | let body = f.body(&db); | 79 | let body = f.body(db); |
80 | let inference_result = f.infer(&db); | 80 | let inference_result = f.infer(db); |
81 | for (expr_id, _) in body.exprs() { | 81 | for (expr_id, _) in body.exprs() { |
82 | let ty = &inference_result[expr_id]; | 82 | let ty = &inference_result[expr_id]; |
83 | num_exprs += 1; | 83 | num_exprs += 1; |
diff --git a/crates/ra_ide_api/src/lib.rs b/crates/ra_ide_api/src/lib.rs index 8741e736f..a68c5e2a5 100644 --- a/crates/ra_ide_api/src/lib.rs +++ b/crates/ra_ide_api/src/lib.rs | |||
@@ -276,6 +276,9 @@ impl AnalysisHost { | |||
276 | pub fn collect_garbage(&mut self) { | 276 | pub fn collect_garbage(&mut self) { |
277 | self.db.collect_garbage(); | 277 | self.db.collect_garbage(); |
278 | } | 278 | } |
279 | pub fn raw_database(&self) -> &impl hir::db::HirDatabase { | ||
280 | &self.db | ||
281 | } | ||
279 | } | 282 | } |
280 | 283 | ||
281 | /// Analysis is a snapshot of a world state at a moment in time. It is the main | 284 | /// Analysis is a snapshot of a world state at a moment in time. It is the main |
diff --git a/crates/ra_lsp_server/src/main.rs b/crates/ra_lsp_server/src/main.rs index 7ed35c24a..4aadb5ea8 100644 --- a/crates/ra_lsp_server/src/main.rs +++ b/crates/ra_lsp_server/src/main.rs | |||
@@ -17,7 +17,7 @@ fn main() -> Result<()> { | |||
17 | Err(_) => ra_prof::Filter::disabled(), | 17 | Err(_) => ra_prof::Filter::disabled(), |
18 | }); | 18 | }); |
19 | log::info!("lifecycle: server started"); | 19 | log::info!("lifecycle: server started"); |
20 | match ::std::panic::catch_unwind(main_inner) { | 20 | match std::panic::catch_unwind(main_inner) { |
21 | Ok(res) => { | 21 | Ok(res) => { |
22 | log::info!("lifecycle: terminating process with {:?}", res); | 22 | log::info!("lifecycle: terminating process with {:?}", res); |
23 | res | 23 | res |