diff options
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_db/Cargo.toml | 2 | ||||
-rw-r--r-- | crates/ra_db/src/lib.rs | 7 | ||||
-rw-r--r-- | crates/ra_hir_def/src/data.rs | 2 | ||||
-rw-r--r-- | crates/ra_hir_def/src/test_db.rs | 28 | ||||
-rw-r--r-- | crates/ra_hir_expand/src/test_db.rs | 24 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/db.rs | 1 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/display.rs | 4 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/infer/expr.rs | 6 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/lib.rs | 2 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/lower.rs | 5 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/test_db.rs | 31 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/tests.rs | 4 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/traits/chalk/tls.rs | 2 | ||||
-rw-r--r-- | crates/ra_ide/src/status.rs | 15 | ||||
-rw-r--r-- | crates/ra_ide_db/src/change.rs | 20 | ||||
-rw-r--r-- | crates/ra_ide_db/src/lib.rs | 35 | ||||
-rw-r--r-- | crates/ra_ide_db/src/symbol_index.rs | 8 | ||||
-rw-r--r-- | crates/ra_syntax/src/ast/edit.rs | 8 | ||||
-rw-r--r-- | crates/test_utils/src/fixture.rs | 2 |
19 files changed, 92 insertions, 114 deletions
diff --git a/crates/ra_db/Cargo.toml b/crates/ra_db/Cargo.toml index 372fb242b..b2d481dfb 100644 --- a/crates/ra_db/Cargo.toml +++ b/crates/ra_db/Cargo.toml | |||
@@ -8,7 +8,7 @@ authors = ["rust-analyzer developers"] | |||
8 | doctest = false | 8 | doctest = false |
9 | 9 | ||
10 | [dependencies] | 10 | [dependencies] |
11 | salsa = "0.14.1" | 11 | salsa = { git = "https://github.com/nikomatsakis/salsa", branch = "dynamic-databases-rfc" } |
12 | relative-path = "1.0.0" | 12 | relative-path = "1.0.0" |
13 | rustc-hash = "1.1.0" | 13 | rustc-hash = "1.1.0" |
14 | 14 | ||
diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs index 1ddacc1f6..590efffa4 100644 --- a/crates/ra_db/src/lib.rs +++ b/crates/ra_db/src/lib.rs | |||
@@ -113,7 +113,7 @@ pub trait SourceDatabase: CheckCanceled + FileLoader + std::fmt::Debug { | |||
113 | fn crate_graph(&self) -> Arc<CrateGraph>; | 113 | fn crate_graph(&self) -> Arc<CrateGraph>; |
114 | } | 114 | } |
115 | 115 | ||
116 | fn parse_query(db: &impl SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> { | 116 | fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> { |
117 | let _p = profile("parse_query").detail(|| format!("{:?}", file_id)); | 117 | let _p = profile("parse_query").detail(|| format!("{:?}", file_id)); |
118 | let text = db.file_text(file_id); | 118 | let text = db.file_text(file_id); |
119 | SourceFile::parse(&*text) | 119 | SourceFile::parse(&*text) |
@@ -136,10 +136,7 @@ pub trait SourceDatabaseExt: SourceDatabase { | |||
136 | fn source_root_crates(&self, id: SourceRootId) -> Arc<FxHashSet<CrateId>>; | 136 | fn source_root_crates(&self, id: SourceRootId) -> Arc<FxHashSet<CrateId>>; |
137 | } | 137 | } |
138 | 138 | ||
139 | fn source_root_crates( | 139 | fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<FxHashSet<CrateId>> { |
140 | db: &(impl SourceDatabaseExt + SourceDatabase), | ||
141 | id: SourceRootId, | ||
142 | ) -> Arc<FxHashSet<CrateId>> { | ||
143 | let graph = db.crate_graph(); | 140 | let graph = db.crate_graph(); |
144 | let res = graph | 141 | let res = graph |
145 | .iter() | 142 | .iter() |
diff --git a/crates/ra_hir_def/src/data.rs b/crates/ra_hir_def/src/data.rs index 282ade2a3..aa335f1e3 100644 --- a/crates/ra_hir_def/src/data.rs +++ b/crates/ra_hir_def/src/data.rs | |||
@@ -31,7 +31,7 @@ pub struct FunctionData { | |||
31 | } | 31 | } |
32 | 32 | ||
33 | impl FunctionData { | 33 | impl FunctionData { |
34 | pub(crate) fn fn_data_query(db: &impl DefDatabase, func: FunctionId) -> Arc<FunctionData> { | 34 | pub(crate) fn fn_data_query(db: &dyn DefDatabase, func: FunctionId) -> Arc<FunctionData> { |
35 | let loc = func.lookup(db); | 35 | let loc = func.lookup(db); |
36 | let item_tree = db.item_tree(loc.id.file_id); | 36 | let item_tree = db.item_tree(loc.id.file_id); |
37 | let func = &item_tree[loc.id.value]; | 37 | let func = &item_tree[loc.id.value]; |
diff --git a/crates/ra_hir_def/src/test_db.rs b/crates/ra_hir_def/src/test_db.rs index 4581d8745..339f819b8 100644 --- a/crates/ra_hir_def/src/test_db.rs +++ b/crates/ra_hir_def/src/test_db.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! Database used for testing `hir_def`. | 1 | //! Database used for testing `hir_def`. |
2 | 2 | ||
3 | use std::{ | 3 | use std::{ |
4 | panic, | 4 | fmt, panic, |
5 | sync::{Arc, Mutex}, | 5 | sync::{Arc, Mutex}, |
6 | }; | 6 | }; |
7 | 7 | ||
@@ -18,10 +18,10 @@ use crate::db::DefDatabase; | |||
18 | crate::db::InternDatabaseStorage, | 18 | crate::db::InternDatabaseStorage, |
19 | crate::db::DefDatabaseStorage | 19 | crate::db::DefDatabaseStorage |
20 | )] | 20 | )] |
21 | #[derive(Debug, Default)] | 21 | #[derive(Default)] |
22 | pub struct TestDB { | 22 | pub struct TestDB { |
23 | runtime: salsa::Runtime<TestDB>, | 23 | storage: salsa::Storage<TestDB>, |
24 | events: Mutex<Option<Vec<salsa::Event<TestDB>>>>, | 24 | events: Mutex<Option<Vec<salsa::Event>>>, |
25 | } | 25 | } |
26 | 26 | ||
27 | impl Upcast<dyn AstDatabase> for TestDB { | 27 | impl Upcast<dyn AstDatabase> for TestDB { |
@@ -37,20 +37,20 @@ impl Upcast<dyn DefDatabase> for TestDB { | |||
37 | } | 37 | } |
38 | 38 | ||
39 | impl salsa::Database for TestDB { | 39 | impl salsa::Database for TestDB { |
40 | fn salsa_runtime(&self) -> &salsa::Runtime<Self> { | 40 | fn salsa_event(&self, event: salsa::Event) { |
41 | &self.runtime | ||
42 | } | ||
43 | fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime<Self> { | ||
44 | &mut self.runtime | ||
45 | } | ||
46 | fn salsa_event(&self, event: impl Fn() -> salsa::Event<TestDB>) { | ||
47 | let mut events = self.events.lock().unwrap(); | 41 | let mut events = self.events.lock().unwrap(); |
48 | if let Some(events) = &mut *events { | 42 | if let Some(events) = &mut *events { |
49 | events.push(event()); | 43 | events.push(event); |
50 | } | 44 | } |
51 | } | 45 | } |
52 | } | 46 | } |
53 | 47 | ||
48 | impl fmt::Debug for TestDB { | ||
49 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
50 | f.debug_struct("TestDB").finish() | ||
51 | } | ||
52 | } | ||
53 | |||
54 | impl panic::RefUnwindSafe for TestDB {} | 54 | impl panic::RefUnwindSafe for TestDB {} |
55 | 55 | ||
56 | impl FileLoader for TestDB { | 56 | impl FileLoader for TestDB { |
@@ -78,7 +78,7 @@ impl TestDB { | |||
78 | panic!("Can't find module for file") | 78 | panic!("Can't find module for file") |
79 | } | 79 | } |
80 | 80 | ||
81 | pub fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event<TestDB>> { | 81 | pub fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> { |
82 | *self.events.lock().unwrap() = Some(Vec::new()); | 82 | *self.events.lock().unwrap() = Some(Vec::new()); |
83 | f(); | 83 | f(); |
84 | self.events.lock().unwrap().take().unwrap() | 84 | self.events.lock().unwrap().take().unwrap() |
@@ -92,7 +92,7 @@ impl TestDB { | |||
92 | // This pretty horrible, but `Debug` is the only way to inspect | 92 | // This pretty horrible, but `Debug` is the only way to inspect |
93 | // QueryDescriptor at the moment. | 93 | // QueryDescriptor at the moment. |
94 | salsa::EventKind::WillExecute { database_key } => { | 94 | salsa::EventKind::WillExecute { database_key } => { |
95 | Some(format!("{:?}", database_key)) | 95 | Some(format!("{:?}", database_key.debug(self))) |
96 | } | 96 | } |
97 | _ => None, | 97 | _ => None, |
98 | }) | 98 | }) |
diff --git a/crates/ra_hir_expand/src/test_db.rs b/crates/ra_hir_expand/src/test_db.rs index 09fc18c36..332fa556f 100644 --- a/crates/ra_hir_expand/src/test_db.rs +++ b/crates/ra_hir_expand/src/test_db.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! Database used for testing `hir_expand`. | 1 | //! Database used for testing `hir_expand`. |
2 | 2 | ||
3 | use std::{ | 3 | use std::{ |
4 | panic, | 4 | fmt, panic, |
5 | sync::{Arc, Mutex}, | 5 | sync::{Arc, Mutex}, |
6 | }; | 6 | }; |
7 | 7 | ||
@@ -13,25 +13,23 @@ use rustc_hash::FxHashSet; | |||
13 | ra_db::SourceDatabaseStorage, | 13 | ra_db::SourceDatabaseStorage, |
14 | crate::db::AstDatabaseStorage | 14 | crate::db::AstDatabaseStorage |
15 | )] | 15 | )] |
16 | #[derive(Debug, Default)] | 16 | #[derive(Default)] |
17 | pub struct TestDB { | 17 | pub struct TestDB { |
18 | runtime: salsa::Runtime<TestDB>, | 18 | storage: salsa::Storage<TestDB>, |
19 | events: Mutex<Option<Vec<salsa::Event<TestDB>>>>, | 19 | events: Mutex<Option<Vec<salsa::Event>>>, |
20 | } | 20 | } |
21 | 21 | ||
22 | impl salsa::Database for TestDB { | 22 | impl fmt::Debug for TestDB { |
23 | fn salsa_runtime(&self) -> &salsa::Runtime<Self> { | 23 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { |
24 | &self.runtime | 24 | f.debug_struct("TestDB").finish() |
25 | } | ||
26 | |||
27 | fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime<Self> { | ||
28 | &mut self.runtime | ||
29 | } | 25 | } |
26 | } | ||
30 | 27 | ||
31 | fn salsa_event(&self, event: impl Fn() -> salsa::Event<TestDB>) { | 28 | impl salsa::Database for TestDB { |
29 | fn salsa_event(&self, event: salsa::Event) { | ||
32 | let mut events = self.events.lock().unwrap(); | 30 | let mut events = self.events.lock().unwrap(); |
33 | if let Some(events) = &mut *events { | 31 | if let Some(events) = &mut *events { |
34 | events.push(event()); | 32 | events.push(event); |
35 | } | 33 | } |
36 | } | 34 | } |
37 | } | 35 | } |
diff --git a/crates/ra_hir_ty/src/db.rs b/crates/ra_hir_ty/src/db.rs index dc06c0ee7..84afe0484 100644 --- a/crates/ra_hir_ty/src/db.rs +++ b/crates/ra_hir_ty/src/db.rs | |||
@@ -19,7 +19,6 @@ use crate::{ | |||
19 | use hir_expand::name::Name; | 19 | use hir_expand::name::Name; |
20 | 20 | ||
21 | #[salsa::query_group(HirDatabaseStorage)] | 21 | #[salsa::query_group(HirDatabaseStorage)] |
22 | #[salsa::requires(salsa::Database)] | ||
23 | pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { | 22 | pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { |
24 | #[salsa::invoke(infer_wait)] | 23 | #[salsa::invoke(infer_wait)] |
25 | #[salsa::transparent] | 24 | #[salsa::transparent] |
diff --git a/crates/ra_hir_ty/src/display.rs b/crates/ra_hir_ty/src/display.rs index 23cea1a2a..ac68c5661 100644 --- a/crates/ra_hir_ty/src/display.rs +++ b/crates/ra_hir_ty/src/display.rs | |||
@@ -369,7 +369,7 @@ impl HirDisplay for ApplicationTy { | |||
369 | let data = (*datas) | 369 | let data = (*datas) |
370 | .as_ref() | 370 | .as_ref() |
371 | .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); | 371 | .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); |
372 | data.clone().subst(&self.parameters) | 372 | data.subst(&self.parameters) |
373 | } | 373 | } |
374 | }; | 374 | }; |
375 | write!(f, "impl ")?; | 375 | write!(f, "impl ")?; |
@@ -456,7 +456,7 @@ impl HirDisplay for Ty { | |||
456 | let data = (*datas) | 456 | let data = (*datas) |
457 | .as_ref() | 457 | .as_ref() |
458 | .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); | 458 | .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); |
459 | data.clone().subst(&opaque_ty.parameters) | 459 | data.subst(&opaque_ty.parameters) |
460 | } | 460 | } |
461 | }; | 461 | }; |
462 | write!(f, "impl ")?; | 462 | write!(f, "impl ")?; |
diff --git a/crates/ra_hir_ty/src/infer/expr.rs b/crates/ra_hir_ty/src/infer/expr.rs index 22884522a..06baac2a9 100644 --- a/crates/ra_hir_ty/src/infer/expr.rs +++ b/crates/ra_hir_ty/src/infer/expr.rs | |||
@@ -85,10 +85,8 @@ impl<'a> InferenceContext<'a> { | |||
85 | ctor: TypeCtor::Tuple { cardinality: num_args as u16 }, | 85 | ctor: TypeCtor::Tuple { cardinality: num_args as u16 }, |
86 | parameters, | 86 | parameters, |
87 | }); | 87 | }); |
88 | let substs = Substs::build_for_generics(&generic_params) | 88 | let substs = |
89 | .push(ty.clone()) | 89 | Substs::build_for_generics(&generic_params).push(ty.clone()).push(arg_ty).build(); |
90 | .push(arg_ty.clone()) | ||
91 | .build(); | ||
92 | 90 | ||
93 | let trait_env = Arc::clone(&self.trait_env); | 91 | let trait_env = Arc::clone(&self.trait_env); |
94 | let implements_fn_trait = | 92 | let implements_fn_trait = |
diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs index 7f3f5e771..c12bed4af 100644 --- a/crates/ra_hir_ty/src/lib.rs +++ b/crates/ra_hir_ty/src/lib.rs | |||
@@ -891,7 +891,7 @@ impl Ty { | |||
891 | let data = (*it) | 891 | let data = (*it) |
892 | .as_ref() | 892 | .as_ref() |
893 | .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); | 893 | .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); |
894 | data.clone().subst(&opaque_ty.parameters) | 894 | data.subst(&opaque_ty.parameters) |
895 | }) | 895 | }) |
896 | } | 896 | } |
897 | }; | 897 | }; |
diff --git a/crates/ra_hir_ty/src/lower.rs b/crates/ra_hir_ty/src/lower.rs index 3dc154e92..3af8d55a1 100644 --- a/crates/ra_hir_ty/src/lower.rs +++ b/crates/ra_hir_ty/src/lower.rs | |||
@@ -720,8 +720,7 @@ fn assoc_type_bindings_from_type_bound<'a>( | |||
720 | None => return SmallVec::<[GenericPredicate; 1]>::new(), | 720 | None => return SmallVec::<[GenericPredicate; 1]>::new(), |
721 | Some(t) => t, | 721 | Some(t) => t, |
722 | }; | 722 | }; |
723 | let projection_ty = | 723 | let projection_ty = ProjectionTy { associated_ty, parameters: super_trait_ref.substs }; |
724 | ProjectionTy { associated_ty, parameters: super_trait_ref.substs.clone() }; | ||
725 | let mut preds = SmallVec::with_capacity( | 724 | let mut preds = SmallVec::with_capacity( |
726 | binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), | 725 | binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), |
727 | ); | 726 | ); |
@@ -1216,7 +1215,7 @@ pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option< | |||
1216 | } | 1215 | } |
1217 | 1216 | ||
1218 | pub(crate) fn return_type_impl_traits( | 1217 | pub(crate) fn return_type_impl_traits( |
1219 | db: &impl HirDatabase, | 1218 | db: &dyn HirDatabase, |
1220 | def: hir_def::FunctionId, | 1219 | def: hir_def::FunctionId, |
1221 | ) -> Option<Arc<Binders<ReturnTypeImplTraits>>> { | 1220 | ) -> Option<Arc<Binders<ReturnTypeImplTraits>>> { |
1222 | // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe | 1221 | // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe |
diff --git a/crates/ra_hir_ty/src/test_db.rs b/crates/ra_hir_ty/src/test_db.rs index fddf0604d..dc447955f 100644 --- a/crates/ra_hir_ty/src/test_db.rs +++ b/crates/ra_hir_ty/src/test_db.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! Database used for testing `hir`. | 1 | //! Database used for testing `hir`. |
2 | 2 | ||
3 | use std::{ | 3 | use std::{ |
4 | panic, | 4 | fmt, panic, |
5 | sync::{Arc, Mutex}, | 5 | sync::{Arc, Mutex}, |
6 | }; | 6 | }; |
7 | 7 | ||
@@ -26,10 +26,15 @@ use crate::{ | |||
26 | hir_def::db::DefDatabaseStorage, | 26 | hir_def::db::DefDatabaseStorage, |
27 | crate::db::HirDatabaseStorage | 27 | crate::db::HirDatabaseStorage |
28 | )] | 28 | )] |
29 | #[derive(Debug, Default)] | 29 | #[derive(Default)] |
30 | pub struct TestDB { | 30 | pub struct TestDB { |
31 | events: Mutex<Option<Vec<salsa::Event<TestDB>>>>, | 31 | storage: salsa::Storage<TestDB>, |
32 | runtime: salsa::Runtime<TestDB>, | 32 | events: Mutex<Option<Vec<salsa::Event>>>, |
33 | } | ||
34 | impl fmt::Debug for TestDB { | ||
35 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
36 | f.debug_struct("TestDB").finish() | ||
37 | } | ||
33 | } | 38 | } |
34 | 39 | ||
35 | impl Upcast<dyn AstDatabase> for TestDB { | 40 | impl Upcast<dyn AstDatabase> for TestDB { |
@@ -45,18 +50,10 @@ impl Upcast<dyn DefDatabase> for TestDB { | |||
45 | } | 50 | } |
46 | 51 | ||
47 | impl salsa::Database for TestDB { | 52 | impl salsa::Database for TestDB { |
48 | fn salsa_runtime(&self) -> &salsa::Runtime<TestDB> { | 53 | fn salsa_event(&self, event: salsa::Event) { |
49 | &self.runtime | ||
50 | } | ||
51 | |||
52 | fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime<Self> { | ||
53 | &mut self.runtime | ||
54 | } | ||
55 | |||
56 | fn salsa_event(&self, event: impl Fn() -> salsa::Event<TestDB>) { | ||
57 | let mut events = self.events.lock().unwrap(); | 54 | let mut events = self.events.lock().unwrap(); |
58 | if let Some(events) = &mut *events { | 55 | if let Some(events) = &mut *events { |
59 | events.push(event()); | 56 | events.push(event); |
60 | } | 57 | } |
61 | } | 58 | } |
62 | } | 59 | } |
@@ -64,8 +61,8 @@ impl salsa::Database for TestDB { | |||
64 | impl salsa::ParallelDatabase for TestDB { | 61 | impl salsa::ParallelDatabase for TestDB { |
65 | fn snapshot(&self) -> salsa::Snapshot<TestDB> { | 62 | fn snapshot(&self) -> salsa::Snapshot<TestDB> { |
66 | salsa::Snapshot::new(TestDB { | 63 | salsa::Snapshot::new(TestDB { |
64 | storage: self.storage.snapshot(), | ||
67 | events: Default::default(), | 65 | events: Default::default(), |
68 | runtime: self.runtime.snapshot(self), | ||
69 | }) | 66 | }) |
70 | } | 67 | } |
71 | } | 68 | } |
@@ -182,7 +179,7 @@ impl TestDB { | |||
182 | } | 179 | } |
183 | 180 | ||
184 | impl TestDB { | 181 | impl TestDB { |
185 | pub fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event<TestDB>> { | 182 | pub fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> { |
186 | *self.events.lock().unwrap() = Some(Vec::new()); | 183 | *self.events.lock().unwrap() = Some(Vec::new()); |
187 | f(); | 184 | f(); |
188 | self.events.lock().unwrap().take().unwrap() | 185 | self.events.lock().unwrap().take().unwrap() |
@@ -196,7 +193,7 @@ impl TestDB { | |||
196 | // This pretty horrible, but `Debug` is the only way to inspect | 193 | // This pretty horrible, but `Debug` is the only way to inspect |
197 | // QueryDescriptor at the moment. | 194 | // QueryDescriptor at the moment. |
198 | salsa::EventKind::WillExecute { database_key } => { | 195 | salsa::EventKind::WillExecute { database_key } => { |
199 | Some(format!("{:?}", database_key)) | 196 | Some(format!("{:?}", database_key.debug(self))) |
200 | } | 197 | } |
201 | _ => None, | 198 | _ => None, |
202 | }) | 199 | }) |
diff --git a/crates/ra_hir_ty/src/tests.rs b/crates/ra_hir_ty/src/tests.rs index eeac34d14..69f2d7667 100644 --- a/crates/ra_hir_ty/src/tests.rs +++ b/crates/ra_hir_ty/src/tests.rs | |||
@@ -21,7 +21,7 @@ use hir_def::{ | |||
21 | }; | 21 | }; |
22 | use hir_expand::{db::AstDatabase, InFile}; | 22 | use hir_expand::{db::AstDatabase, InFile}; |
23 | use insta::assert_snapshot; | 23 | use insta::assert_snapshot; |
24 | use ra_db::{fixture::WithFixture, salsa::Database, FileRange, SourceDatabase}; | 24 | use ra_db::{fixture::WithFixture, FileRange, SourceDatabase, SourceDatabaseExt}; |
25 | use ra_syntax::{ | 25 | use ra_syntax::{ |
26 | algo, | 26 | algo, |
27 | ast::{self, AstNode}, | 27 | ast::{self, AstNode}, |
@@ -317,7 +317,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { | |||
317 | " | 317 | " |
318 | .to_string(); | 318 | .to_string(); |
319 | 319 | ||
320 | db.query_mut(ra_db::FileTextQuery).set(pos.file_id, Arc::new(new_text)); | 320 | db.set_file_text(pos.file_id, Arc::new(new_text)); |
321 | 321 | ||
322 | { | 322 | { |
323 | let events = db.log_executed(|| { | 323 | let events = db.log_executed(|| { |
diff --git a/crates/ra_hir_ty/src/traits/chalk/tls.rs b/crates/ra_hir_ty/src/traits/chalk/tls.rs index 556af7098..e6a9d3211 100644 --- a/crates/ra_hir_ty/src/traits/chalk/tls.rs +++ b/crates/ra_hir_ty/src/traits/chalk/tls.rs | |||
@@ -10,7 +10,7 @@ use hir_def::{AdtId, AssocContainerId, DefWithBodyId, Lookup, TypeAliasId}; | |||
10 | 10 | ||
11 | pub use unsafe_tls::{set_current_program, with_current_program}; | 11 | pub use unsafe_tls::{set_current_program, with_current_program}; |
12 | 12 | ||
13 | pub struct DebugContext<'a>(&'a (dyn HirDatabase + 'a)); | 13 | pub struct DebugContext<'a>(&'a dyn HirDatabase); |
14 | 14 | ||
15 | impl DebugContext<'_> { | 15 | impl DebugContext<'_> { |
16 | pub fn debug_struct_id( | 16 | pub fn debug_struct_id( |
diff --git a/crates/ra_ide/src/status.rs b/crates/ra_ide/src/status.rs index 45411b357..08e6f69cb 100644 --- a/crates/ra_ide/src/status.rs +++ b/crates/ra_ide/src/status.rs | |||
@@ -2,10 +2,7 @@ use std::{fmt, iter::FromIterator, sync::Arc}; | |||
2 | 2 | ||
3 | use hir::MacroFile; | 3 | use hir::MacroFile; |
4 | use ra_db::{ | 4 | use ra_db::{ |
5 | salsa::{ | 5 | salsa::debug::{DebugQueryTable, TableEntry}, |
6 | debug::{DebugQueryTable, TableEntry}, | ||
7 | Database, | ||
8 | }, | ||
9 | FileTextQuery, SourceRootId, | 6 | FileTextQuery, SourceRootId, |
10 | }; | 7 | }; |
11 | use ra_ide_db::{ | 8 | use ra_ide_db::{ |
@@ -14,15 +11,15 @@ use ra_ide_db::{ | |||
14 | }; | 11 | }; |
15 | use ra_prof::{memory_usage, Bytes}; | 12 | use ra_prof::{memory_usage, Bytes}; |
16 | use ra_syntax::{ast, Parse, SyntaxNode}; | 13 | use ra_syntax::{ast, Parse, SyntaxNode}; |
14 | use rustc_hash::FxHashMap; | ||
17 | 15 | ||
18 | use crate::FileId; | 16 | use crate::FileId; |
19 | use rustc_hash::FxHashMap; | ||
20 | 17 | ||
21 | fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { | 18 | fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { |
22 | db.query(ra_db::ParseQuery).entries::<SyntaxTreeStats>() | 19 | ra_db::ParseQuery.in_db(db).entries::<SyntaxTreeStats>() |
23 | } | 20 | } |
24 | fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { | 21 | fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { |
25 | db.query(hir::db::ParseMacroQuery).entries::<SyntaxTreeStats>() | 22 | hir::db::ParseMacroQuery.in_db(db).entries::<SyntaxTreeStats>() |
26 | } | 23 | } |
27 | 24 | ||
28 | // Feature: Status | 25 | // Feature: Status |
@@ -35,10 +32,10 @@ fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { | |||
35 | // | VS Code | **Rust Analyzer: Status** | 32 | // | VS Code | **Rust Analyzer: Status** |
36 | // |=== | 33 | // |=== |
37 | pub(crate) fn status(db: &RootDatabase) -> String { | 34 | pub(crate) fn status(db: &RootDatabase) -> String { |
38 | let files_stats = db.query(FileTextQuery).entries::<FilesStats>(); | 35 | let files_stats = FileTextQuery.in_db(db).entries::<FilesStats>(); |
39 | let syntax_tree_stats = syntax_tree_stats(db); | 36 | let syntax_tree_stats = syntax_tree_stats(db); |
40 | let macro_syntax_tree_stats = macro_syntax_tree_stats(db); | 37 | let macro_syntax_tree_stats = macro_syntax_tree_stats(db); |
41 | let symbols_stats = db.query(LibrarySymbolsQuery).entries::<LibrarySymbolsStats>(); | 38 | let symbols_stats = LibrarySymbolsQuery.in_db(db).entries::<LibrarySymbolsStats>(); |
42 | format!( | 39 | format!( |
43 | "{}\n{}\n{}\n{} (macros)\n\n\nmemory:\n{}\ngc {:?} seconds ago", | 40 | "{}\n{}\n{}\n{} (macros)\n\n\nmemory:\n{}\ngc {:?} seconds ago", |
44 | files_stats, | 41 | files_stats, |
diff --git a/crates/ra_ide_db/src/change.rs b/crates/ra_ide_db/src/change.rs index 2504d7a33..d8da3f949 100644 --- a/crates/ra_ide_db/src/change.rs +++ b/crates/ra_ide_db/src/change.rs | |||
@@ -147,21 +147,21 @@ impl RootDatabase { | |||
147 | 147 | ||
148 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); | 148 | let sweep = SweepStrategy::default().discard_values().sweep_all_revisions(); |
149 | 149 | ||
150 | self.query(ra_db::ParseQuery).sweep(sweep); | 150 | ra_db::ParseQuery.in_db(self).sweep(sweep); |
151 | self.query(hir::db::ParseMacroQuery).sweep(sweep); | 151 | hir::db::ParseMacroQuery.in_db(self).sweep(sweep); |
152 | 152 | ||
153 | // Macros do take significant space, but less then the syntax trees | 153 | // Macros do take significant space, but less then the syntax trees |
154 | // self.query(hir::db::MacroDefQuery).sweep(sweep); | 154 | // self.query(hir::db::MacroDefQuery).sweep(sweep); |
155 | // self.query(hir::db::MacroArgQuery).sweep(sweep); | 155 | // self.query(hir::db::MacroArgQuery).sweep(sweep); |
156 | // self.query(hir::db::MacroExpandQuery).sweep(sweep); | 156 | // self.query(hir::db::MacroExpandQuery).sweep(sweep); |
157 | 157 | ||
158 | self.query(hir::db::AstIdMapQuery).sweep(sweep); | 158 | hir::db::AstIdMapQuery.in_db(self).sweep(sweep); |
159 | 159 | ||
160 | self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep); | 160 | hir::db::BodyWithSourceMapQuery.in_db(self).sweep(sweep); |
161 | 161 | ||
162 | self.query(hir::db::ExprScopesQuery).sweep(sweep); | 162 | hir::db::ExprScopesQuery.in_db(self).sweep(sweep); |
163 | self.query(hir::db::InferQueryQuery).sweep(sweep); | 163 | hir::db::InferQueryQuery.in_db(self).sweep(sweep); |
164 | self.query(hir::db::BodyQuery).sweep(sweep); | 164 | hir::db::BodyQuery.in_db(self).sweep(sweep); |
165 | } | 165 | } |
166 | 166 | ||
167 | pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { | 167 | pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> { |
@@ -170,14 +170,14 @@ impl RootDatabase { | |||
170 | macro_rules! sweep_each_query { | 170 | macro_rules! sweep_each_query { |
171 | ($($q:path)*) => {$( | 171 | ($($q:path)*) => {$( |
172 | let before = memory_usage().allocated; | 172 | let before = memory_usage().allocated; |
173 | self.query($q).sweep(sweep); | 173 | $q.in_db(self).sweep(sweep); |
174 | let after = memory_usage().allocated; | 174 | let after = memory_usage().allocated; |
175 | let q: $q = Default::default(); | 175 | let q: $q = Default::default(); |
176 | let name = format!("{:?}", q); | 176 | let name = format!("{:?}", q); |
177 | acc.push((name, before - after)); | 177 | acc.push((name, before - after)); |
178 | 178 | ||
179 | let before = memory_usage().allocated; | 179 | let before = memory_usage().allocated; |
180 | self.query($q).sweep(sweep.discard_everything()); | 180 | $q.in_db(self).sweep(sweep.discard_everything()); |
181 | let after = memory_usage().allocated; | 181 | let after = memory_usage().allocated; |
182 | let q: $q = Default::default(); | 182 | let q: $q = Default::default(); |
183 | let name = format!("{:?} (deps)", q); | 183 | let name = format!("{:?} (deps)", q); |
@@ -252,7 +252,7 @@ impl RootDatabase { | |||
252 | // write. | 252 | // write. |
253 | // We do this after collecting the non-interned queries to correctly attribute memory used | 253 | // We do this after collecting the non-interned queries to correctly attribute memory used |
254 | // by interned data. | 254 | // by interned data. |
255 | self.runtime.synthetic_write(Durability::HIGH); | 255 | self.salsa_runtime_mut().synthetic_write(Durability::HIGH); |
256 | 256 | ||
257 | sweep_each_query![ | 257 | sweep_each_query![ |
258 | // AstDatabase | 258 | // AstDatabase |
diff --git a/crates/ra_ide_db/src/lib.rs b/crates/ra_ide_db/src/lib.rs index c78071ad6..6900cac73 100644 --- a/crates/ra_ide_db/src/lib.rs +++ b/crates/ra_ide_db/src/lib.rs | |||
@@ -11,11 +11,11 @@ pub mod imports_locator; | |||
11 | pub mod source_change; | 11 | pub mod source_change; |
12 | mod wasm_shims; | 12 | mod wasm_shims; |
13 | 13 | ||
14 | use std::sync::Arc; | 14 | use std::{fmt, sync::Arc}; |
15 | 15 | ||
16 | use hir::db::{AstDatabase, DefDatabase, HirDatabase}; | 16 | use hir::db::{AstDatabase, DefDatabase, HirDatabase}; |
17 | use ra_db::{ | 17 | use ra_db::{ |
18 | salsa::{self, Database, Durability}, | 18 | salsa::{self, Durability}, |
19 | Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, | 19 | Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, |
20 | Upcast, | 20 | Upcast, |
21 | }; | 21 | }; |
@@ -33,13 +33,18 @@ use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase}; | |||
33 | hir::db::DefDatabaseStorage, | 33 | hir::db::DefDatabaseStorage, |
34 | hir::db::HirDatabaseStorage | 34 | hir::db::HirDatabaseStorage |
35 | )] | 35 | )] |
36 | #[derive(Debug)] | ||
37 | pub struct RootDatabase { | 36 | pub struct RootDatabase { |
38 | runtime: salsa::Runtime<RootDatabase>, | 37 | storage: salsa::Storage<RootDatabase>, |
39 | pub last_gc: crate::wasm_shims::Instant, | 38 | pub last_gc: crate::wasm_shims::Instant, |
40 | pub last_gc_check: crate::wasm_shims::Instant, | 39 | pub last_gc_check: crate::wasm_shims::Instant, |
41 | } | 40 | } |
42 | 41 | ||
42 | impl fmt::Debug for RootDatabase { | ||
43 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
44 | f.debug_struct("RootDatabase").finish() | ||
45 | } | ||
46 | } | ||
47 | |||
43 | impl Upcast<dyn AstDatabase> for RootDatabase { | 48 | impl Upcast<dyn AstDatabase> for RootDatabase { |
44 | fn upcast(&self) -> &(dyn AstDatabase + 'static) { | 49 | fn upcast(&self) -> &(dyn AstDatabase + 'static) { |
45 | &*self | 50 | &*self |
@@ -71,17 +76,11 @@ impl FileLoader for RootDatabase { | |||
71 | } | 76 | } |
72 | 77 | ||
73 | impl salsa::Database for RootDatabase { | 78 | impl salsa::Database for RootDatabase { |
74 | fn salsa_runtime(&self) -> &salsa::Runtime<RootDatabase> { | ||
75 | &self.runtime | ||
76 | } | ||
77 | fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime<Self> { | ||
78 | &mut self.runtime | ||
79 | } | ||
80 | fn on_propagated_panic(&self) -> ! { | 79 | fn on_propagated_panic(&self) -> ! { |
81 | Canceled::throw() | 80 | Canceled::throw() |
82 | } | 81 | } |
83 | fn salsa_event(&self, event: impl Fn() -> salsa::Event<RootDatabase>) { | 82 | fn salsa_event(&self, event: salsa::Event) { |
84 | match event().kind { | 83 | match event.kind { |
85 | salsa::EventKind::DidValidateMemoizedValue { .. } | 84 | salsa::EventKind::DidValidateMemoizedValue { .. } |
86 | | salsa::EventKind::WillExecute { .. } => { | 85 | | salsa::EventKind::WillExecute { .. } => { |
87 | self.check_canceled(); | 86 | self.check_canceled(); |
@@ -100,7 +99,7 @@ impl Default for RootDatabase { | |||
100 | impl RootDatabase { | 99 | impl RootDatabase { |
101 | pub fn new(lru_capacity: Option<usize>) -> RootDatabase { | 100 | pub fn new(lru_capacity: Option<usize>) -> RootDatabase { |
102 | let mut db = RootDatabase { | 101 | let mut db = RootDatabase { |
103 | runtime: salsa::Runtime::default(), | 102 | storage: salsa::Storage::default(), |
104 | last_gc: crate::wasm_shims::Instant::now(), | 103 | last_gc: crate::wasm_shims::Instant::now(), |
105 | last_gc_check: crate::wasm_shims::Instant::now(), | 104 | last_gc_check: crate::wasm_shims::Instant::now(), |
106 | }; | 105 | }; |
@@ -113,16 +112,16 @@ impl RootDatabase { | |||
113 | 112 | ||
114 | pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) { | 113 | pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) { |
115 | let lru_capacity = lru_capacity.unwrap_or(ra_db::DEFAULT_LRU_CAP); | 114 | let lru_capacity = lru_capacity.unwrap_or(ra_db::DEFAULT_LRU_CAP); |
116 | self.query_mut(ra_db::ParseQuery).set_lru_capacity(lru_capacity); | 115 | ra_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); |
117 | self.query_mut(hir::db::ParseMacroQuery).set_lru_capacity(lru_capacity); | 116 | hir::db::ParseMacroQuery.in_db_mut(self).set_lru_capacity(lru_capacity); |
118 | self.query_mut(hir::db::MacroExpandQuery).set_lru_capacity(lru_capacity); | 117 | hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(lru_capacity); |
119 | } | 118 | } |
120 | } | 119 | } |
121 | 120 | ||
122 | impl salsa::ParallelDatabase for RootDatabase { | 121 | impl salsa::ParallelDatabase for RootDatabase { |
123 | fn snapshot(&self) -> salsa::Snapshot<RootDatabase> { | 122 | fn snapshot(&self) -> salsa::Snapshot<RootDatabase> { |
124 | salsa::Snapshot::new(RootDatabase { | 123 | salsa::Snapshot::new(RootDatabase { |
125 | runtime: self.runtime.snapshot(self), | 124 | storage: self.storage.snapshot(), |
126 | last_gc: self.last_gc, | 125 | last_gc: self.last_gc, |
127 | last_gc_check: self.last_gc_check, | 126 | last_gc_check: self.last_gc_check, |
128 | }) | 127 | }) |
@@ -134,7 +133,7 @@ pub trait LineIndexDatabase: ra_db::SourceDatabase + CheckCanceled { | |||
134 | fn line_index(&self, file_id: FileId) -> Arc<LineIndex>; | 133 | fn line_index(&self, file_id: FileId) -> Arc<LineIndex>; |
135 | } | 134 | } |
136 | 135 | ||
137 | fn line_index(db: &impl LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> { | 136 | fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> { |
138 | let text = db.file_text(file_id); | 137 | let text = db.file_text(file_id); |
139 | Arc::new(LineIndex::new(&*text)) | 138 | Arc::new(LineIndex::new(&*text)) |
140 | } | 139 | } |
diff --git a/crates/ra_ide_db/src/symbol_index.rs b/crates/ra_ide_db/src/symbol_index.rs index 5a09e7d1d..131e2a128 100644 --- a/crates/ra_ide_db/src/symbol_index.rs +++ b/crates/ra_ide_db/src/symbol_index.rs | |||
@@ -87,7 +87,7 @@ impl Query { | |||
87 | } | 87 | } |
88 | 88 | ||
89 | #[salsa::query_group(SymbolsDatabaseStorage)] | 89 | #[salsa::query_group(SymbolsDatabaseStorage)] |
90 | pub trait SymbolsDatabase: hir::db::HirDatabase + SourceDatabaseExt + ParallelDatabase { | 90 | pub trait SymbolsDatabase: hir::db::HirDatabase + SourceDatabaseExt { |
91 | fn file_symbols(&self, file_id: FileId) -> Arc<SymbolIndex>; | 91 | fn file_symbols(&self, file_id: FileId) -> Arc<SymbolIndex>; |
92 | fn library_symbols(&self) -> Arc<FxHashMap<SourceRootId, SymbolIndex>>; | 92 | fn library_symbols(&self) -> Arc<FxHashMap<SourceRootId, SymbolIndex>>; |
93 | /// The set of "local" (that is, from the current workspace) roots. | 93 | /// The set of "local" (that is, from the current workspace) roots. |
@@ -100,9 +100,7 @@ pub trait SymbolsDatabase: hir::db::HirDatabase + SourceDatabaseExt + ParallelDa | |||
100 | fn library_roots(&self) -> Arc<FxHashSet<SourceRootId>>; | 100 | fn library_roots(&self) -> Arc<FxHashSet<SourceRootId>>; |
101 | } | 101 | } |
102 | 102 | ||
103 | fn library_symbols( | 103 | fn library_symbols(db: &dyn SymbolsDatabase) -> Arc<FxHashMap<SourceRootId, SymbolIndex>> { |
104 | db: &(impl SymbolsDatabase + ParallelDatabase), | ||
105 | ) -> Arc<FxHashMap<SourceRootId, SymbolIndex>> { | ||
106 | let _p = profile("library_symbols"); | 104 | let _p = profile("library_symbols"); |
107 | 105 | ||
108 | let roots = db.library_roots(); | 106 | let roots = db.library_roots(); |
@@ -123,7 +121,7 @@ fn library_symbols( | |||
123 | Arc::new(res) | 121 | Arc::new(res) |
124 | } | 122 | } |
125 | 123 | ||
126 | fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> { | 124 | fn file_symbols(db: &dyn SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> { |
127 | db.check_canceled(); | 125 | db.check_canceled(); |
128 | let parse = db.parse(file_id); | 126 | let parse = db.parse(file_id); |
129 | 127 | ||
diff --git a/crates/ra_syntax/src/ast/edit.rs b/crates/ra_syntax/src/ast/edit.rs index 2ef173a03..940c30c7f 100644 --- a/crates/ra_syntax/src/ast/edit.rs +++ b/crates/ra_syntax/src/ast/edit.rs | |||
@@ -299,12 +299,8 @@ impl ast::UseTree { | |||
299 | Some(it) => it, | 299 | Some(it) => it, |
300 | None => return self.clone(), | 300 | None => return self.clone(), |
301 | }; | 301 | }; |
302 | let use_tree = make::use_tree( | 302 | let use_tree = |
303 | suffix.clone(), | 303 | make::use_tree(suffix, self.use_tree_list(), self.alias(), self.star_token().is_some()); |
304 | self.use_tree_list(), | ||
305 | self.alias(), | ||
306 | self.star_token().is_some(), | ||
307 | ); | ||
308 | let nested = make::use_tree_list(iter::once(use_tree)); | 304 | let nested = make::use_tree_list(iter::once(use_tree)); |
309 | return make::use_tree(prefix.clone(), Some(nested), None, false); | 305 | return make::use_tree(prefix.clone(), Some(nested), None, false); |
310 | 306 | ||
diff --git a/crates/test_utils/src/fixture.rs b/crates/test_utils/src/fixture.rs index fad8f7e2c..ed764046b 100644 --- a/crates/test_utils/src/fixture.rs +++ b/crates/test_utils/src/fixture.rs | |||
@@ -62,7 +62,7 @@ impl Fixture { | |||
62 | let components = meta.split_ascii_whitespace().collect::<Vec<_>>(); | 62 | let components = meta.split_ascii_whitespace().collect::<Vec<_>>(); |
63 | 63 | ||
64 | let path = components[0].to_string(); | 64 | let path = components[0].to_string(); |
65 | assert!(path.starts_with("/")); | 65 | assert!(path.starts_with('/')); |
66 | 66 | ||
67 | let mut krate = None; | 67 | let mut krate = None; |
68 | let mut deps = Vec::new(); | 68 | let mut deps = Vec::new(); |