diff options
Diffstat (limited to 'crates/hir_def/src/intern.rs')
-rw-r--r-- | crates/hir_def/src/intern.rs | 157 |
1 files changed, 157 insertions, 0 deletions
diff --git a/crates/hir_def/src/intern.rs b/crates/hir_def/src/intern.rs new file mode 100644 index 000000000..28ec72cff --- /dev/null +++ b/crates/hir_def/src/intern.rs | |||
@@ -0,0 +1,157 @@ | |||
1 | //! Global `Arc`-based object interning infrastructure. | ||
2 | //! | ||
3 | //! Eventually this should probably be replaced with salsa-based interning. | ||
4 | |||
5 | use std::{ | ||
6 | fmt::{self, Debug}, | ||
7 | hash::{BuildHasherDefault, Hash}, | ||
8 | ops::Deref, | ||
9 | sync::Arc, | ||
10 | }; | ||
11 | |||
12 | use dashmap::{DashMap, SharedValue}; | ||
13 | use once_cell::sync::OnceCell; | ||
14 | use rustc_hash::FxHasher; | ||
15 | |||
16 | type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>; | ||
17 | |||
18 | pub struct Interned<T: Internable> { | ||
19 | arc: Arc<T>, | ||
20 | } | ||
21 | |||
22 | impl<T: Internable> Interned<T> { | ||
23 | pub fn new(obj: T) -> Self { | ||
24 | let storage = T::storage().get(); | ||
25 | let shard_idx = storage.determine_map(&obj); | ||
26 | let shard = &storage.shards()[shard_idx]; | ||
27 | let shard = shard.upgradeable_read(); | ||
28 | |||
29 | // Atomically, | ||
30 | // - check if `obj` is already in the map | ||
31 | // - if so, clone its `Arc` and return it | ||
32 | // - if not, box it up, insert it, and return a clone | ||
33 | // This needs to be atomic (locking the shard) to avoid races with other thread, which could | ||
34 | // insert the same object between us looking it up and inserting it. | ||
35 | |||
36 | // FIXME: avoid double lookup by using raw entry API (once stable, or when hashbrown can be | ||
37 | // plugged into dashmap) | ||
38 | if let Some((arc, _)) = shard.get_key_value(&obj) { | ||
39 | return Self { arc: arc.clone() }; | ||
40 | } | ||
41 | |||
42 | let arc = Arc::new(obj); | ||
43 | let arc2 = arc.clone(); | ||
44 | |||
45 | { | ||
46 | let mut shard = shard.upgrade(); | ||
47 | shard.insert(arc2, SharedValue::new(())); | ||
48 | } | ||
49 | |||
50 | Self { arc } | ||
51 | } | ||
52 | } | ||
53 | |||
54 | impl<T: Internable> Drop for Interned<T> { | ||
55 | fn drop(&mut self) { | ||
56 | // When the last `Ref` is dropped, remove the object from the global map. | ||
57 | if Arc::strong_count(&self.arc) == 2 { | ||
58 | // Only `self` and the global map point to the object. | ||
59 | |||
60 | let storage = T::storage().get(); | ||
61 | let shard_idx = storage.determine_map(&self.arc); | ||
62 | let shard = &storage.shards()[shard_idx]; | ||
63 | let mut shard = shard.write(); | ||
64 | |||
65 | // FIXME: avoid double lookup | ||
66 | let (arc, _) = | ||
67 | shard.get_key_value(&self.arc).expect("interned value removed prematurely"); | ||
68 | |||
69 | if Arc::strong_count(arc) != 2 { | ||
70 | // Another thread has interned another copy | ||
71 | return; | ||
72 | } | ||
73 | |||
74 | shard.remove(&self.arc); | ||
75 | |||
76 | // Shrink the backing storage if the shard is less than 50% occupied. | ||
77 | if shard.len() * 2 < shard.capacity() { | ||
78 | shard.shrink_to_fit(); | ||
79 | } | ||
80 | } | ||
81 | } | ||
82 | } | ||
83 | |||
84 | /// Compares interned `Ref`s using pointer equality. | ||
85 | impl<T: Internable> PartialEq for Interned<T> { | ||
86 | #[inline] | ||
87 | fn eq(&self, other: &Self) -> bool { | ||
88 | Arc::ptr_eq(&self.arc, &other.arc) | ||
89 | } | ||
90 | } | ||
91 | |||
92 | impl<T: Internable> Eq for Interned<T> {} | ||
93 | |||
94 | impl<T: Internable> AsRef<T> for Interned<T> { | ||
95 | #[inline] | ||
96 | fn as_ref(&self) -> &T { | ||
97 | &self.arc | ||
98 | } | ||
99 | } | ||
100 | |||
101 | impl<T: Internable> Deref for Interned<T> { | ||
102 | type Target = T; | ||
103 | |||
104 | #[inline] | ||
105 | fn deref(&self) -> &Self::Target { | ||
106 | &self.arc | ||
107 | } | ||
108 | } | ||
109 | |||
110 | impl<T: Internable> Clone for Interned<T> { | ||
111 | fn clone(&self) -> Self { | ||
112 | Self { arc: self.arc.clone() } | ||
113 | } | ||
114 | } | ||
115 | |||
116 | impl<T: Debug + Internable> Debug for Interned<T> { | ||
117 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
118 | (*self.arc).fmt(f) | ||
119 | } | ||
120 | } | ||
121 | |||
122 | pub struct InternStorage<T> { | ||
123 | map: OnceCell<InternMap<T>>, | ||
124 | } | ||
125 | |||
126 | impl<T> InternStorage<T> { | ||
127 | pub const fn new() -> Self { | ||
128 | Self { map: OnceCell::new() } | ||
129 | } | ||
130 | } | ||
131 | |||
132 | impl<T: Internable> InternStorage<T> { | ||
133 | fn get(&self) -> &InternMap<T> { | ||
134 | self.map.get_or_init(DashMap::default) | ||
135 | } | ||
136 | } | ||
137 | |||
138 | pub trait Internable: Hash + Eq + Sized + 'static { | ||
139 | fn storage() -> &'static InternStorage<Self>; | ||
140 | } | ||
141 | |||
142 | // region:`Internable` implementations | ||
143 | |||
144 | macro_rules! impl_internable { | ||
145 | ( $($t:ty),+ $(,)? ) => { $( | ||
146 | impl Internable for $t { | ||
147 | fn storage() -> &'static InternStorage<Self> { | ||
148 | static STORAGE: InternStorage<$t> = InternStorage::new(); | ||
149 | &STORAGE | ||
150 | } | ||
151 | } | ||
152 | )+ }; | ||
153 | } | ||
154 | |||
155 | impl_internable!(crate::type_ref::TypeRef, crate::type_ref::TraitRef); | ||
156 | |||
157 | // endregion | ||