diff options
author | bors[bot] <26634292+bors[bot]@users.noreply.github.com> | 2021-04-03 01:09:41 +0100 |
---|---|---|
committer | GitHub <[email protected]> | 2021-04-03 01:09:41 +0100 |
commit | eb264fb81963d9ec08b2797818073e8ae2993a41 (patch) | |
tree | 35822d50eee5433a236b2281db15e3fbefeb9deb /crates/hir_def/src | |
parent | 8e3e13f3a41b311c82fa5859c5bfebbbcd82cad4 (diff) | |
parent | 85757be59aa401f250cadb50a4f6d75ffb526249 (diff) |
Merge #8303
8303: Allow interning strings r=jonas-schievink a=jonas-schievink
We don't use it yet, that's a bit more complicated.
bors r+
Co-authored-by: Jonas Schievink <[email protected]>
Diffstat (limited to 'crates/hir_def/src')
-rw-r--r-- | crates/hir_def/src/intern.rs | 55 |
1 files changed, 46 insertions, 9 deletions
diff --git a/crates/hir_def/src/intern.rs b/crates/hir_def/src/intern.rs index bc0307dbc..d163f633f 100644 --- a/crates/hir_def/src/intern.rs +++ b/crates/hir_def/src/intern.rs | |||
@@ -3,17 +3,20 @@ | |||
3 | //! Eventually this should probably be replaced with salsa-based interning. | 3 | //! Eventually this should probably be replaced with salsa-based interning. |
4 | 4 | ||
5 | use std::{ | 5 | use std::{ |
6 | collections::HashMap, | ||
6 | fmt::{self, Debug}, | 7 | fmt::{self, Debug}, |
7 | hash::{BuildHasherDefault, Hash}, | 8 | hash::{BuildHasherDefault, Hash}, |
8 | ops::Deref, | 9 | ops::Deref, |
9 | sync::Arc, | 10 | sync::Arc, |
10 | }; | 11 | }; |
11 | 12 | ||
12 | use dashmap::{DashMap, SharedValue}; | 13 | use dashmap::{lock::RwLockWriteGuard, DashMap, SharedValue}; |
13 | use once_cell::sync::OnceCell; | 14 | use once_cell::sync::OnceCell; |
14 | use rustc_hash::FxHasher; | 15 | use rustc_hash::FxHasher; |
15 | 16 | ||
16 | type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>; | 17 | type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>; |
18 | type Guard<T> = | ||
19 | RwLockWriteGuard<'static, HashMap<Arc<T>, SharedValue<()>, BuildHasherDefault<FxHasher>>>; | ||
17 | 20 | ||
18 | #[derive(Hash)] | 21 | #[derive(Hash)] |
19 | pub struct Interned<T: Internable + ?Sized> { | 22 | pub struct Interned<T: Internable + ?Sized> { |
@@ -22,10 +25,22 @@ pub struct Interned<T: Internable + ?Sized> { | |||
22 | 25 | ||
23 | impl<T: Internable> Interned<T> { | 26 | impl<T: Internable> Interned<T> { |
24 | pub fn new(obj: T) -> Self { | 27 | pub fn new(obj: T) -> Self { |
28 | match Interned::lookup(&obj) { | ||
29 | Ok(this) => this, | ||
30 | Err(shard) => { | ||
31 | let arc = Arc::new(obj); | ||
32 | Self::alloc(arc, shard) | ||
33 | } | ||
34 | } | ||
35 | } | ||
36 | } | ||
37 | |||
38 | impl<T: Internable + ?Sized> Interned<T> { | ||
39 | fn lookup(obj: &T) -> Result<Self, Guard<T>> { | ||
25 | let storage = T::storage().get(); | 40 | let storage = T::storage().get(); |
26 | let shard_idx = storage.determine_map(&obj); | 41 | let shard_idx = storage.determine_map(obj); |
27 | let shard = &storage.shards()[shard_idx]; | 42 | let shard = &storage.shards()[shard_idx]; |
28 | let mut shard = shard.write(); | 43 | let shard = shard.write(); |
29 | 44 | ||
30 | // Atomically, | 45 | // Atomically, |
31 | // - check if `obj` is already in the map | 46 | // - check if `obj` is already in the map |
@@ -34,13 +49,15 @@ impl<T: Internable> Interned<T> { | |||
34 | // This needs to be atomic (locking the shard) to avoid races with other thread, which could | 49 | // This needs to be atomic (locking the shard) to avoid races with other thread, which could |
35 | // insert the same object between us looking it up and inserting it. | 50 | // insert the same object between us looking it up and inserting it. |
36 | 51 | ||
37 | // FIXME: avoid double lookup by using raw entry API (once stable, or when hashbrown can be | 52 | // FIXME: avoid double lookup/hashing by using raw entry API (once stable, or when |
38 | // plugged into dashmap) | 53 | // hashbrown can be plugged into dashmap) |
39 | if let Some((arc, _)) = shard.get_key_value(&obj) { | 54 | match shard.get_key_value(obj) { |
40 | return Self { arc: arc.clone() }; | 55 | Some((arc, _)) => Ok(Self { arc: arc.clone() }), |
56 | None => Err(shard), | ||
41 | } | 57 | } |
58 | } | ||
42 | 59 | ||
43 | let arc = Arc::new(obj); | 60 | fn alloc(arc: Arc<T>, mut shard: Guard<T>) -> Self { |
44 | let arc2 = arc.clone(); | 61 | let arc2 = arc.clone(); |
45 | 62 | ||
46 | shard.insert(arc2, SharedValue::new(())); | 63 | shard.insert(arc2, SharedValue::new(())); |
@@ -49,6 +66,18 @@ impl<T: Internable> Interned<T> { | |||
49 | } | 66 | } |
50 | } | 67 | } |
51 | 68 | ||
69 | impl Interned<str> { | ||
70 | pub fn new_str(s: &str) -> Self { | ||
71 | match Interned::lookup(s) { | ||
72 | Ok(this) => this, | ||
73 | Err(shard) => { | ||
74 | let arc = Arc::<str>::from(s); | ||
75 | Self::alloc(arc, shard) | ||
76 | } | ||
77 | } | ||
78 | } | ||
79 | } | ||
80 | |||
52 | impl<T: Internable + ?Sized> Drop for Interned<T> { | 81 | impl<T: Internable + ?Sized> Drop for Interned<T> { |
53 | #[inline] | 82 | #[inline] |
54 | fn drop(&mut self) { | 83 | fn drop(&mut self) { |
@@ -98,6 +127,14 @@ impl<T: Internable> PartialEq for Interned<T> { | |||
98 | 127 | ||
99 | impl<T: Internable> Eq for Interned<T> {} | 128 | impl<T: Internable> Eq for Interned<T> {} |
100 | 129 | ||
130 | impl PartialEq for Interned<str> { | ||
131 | fn eq(&self, other: &Self) -> bool { | ||
132 | Arc::ptr_eq(&self.arc, &other.arc) | ||
133 | } | ||
134 | } | ||
135 | |||
136 | impl Eq for Interned<str> {} | ||
137 | |||
101 | impl<T: Internable + ?Sized> AsRef<T> for Interned<T> { | 138 | impl<T: Internable + ?Sized> AsRef<T> for Interned<T> { |
102 | #[inline] | 139 | #[inline] |
103 | fn as_ref(&self) -> &T { | 140 | fn as_ref(&self) -> &T { |
@@ -157,4 +194,4 @@ macro_rules! impl_internable { | |||
157 | )+ }; | 194 | )+ }; |
158 | } | 195 | } |
159 | 196 | ||
160 | impl_internable!(crate::type_ref::TypeRef, crate::type_ref::TraitRef, crate::path::ModPath); | 197 | impl_internable!(crate::type_ref::TypeRef, crate::type_ref::TraitRef, crate::path::ModPath, str); |