aboutsummaryrefslogtreecommitdiff
path: root/crates/hir_def/src/intern.rs
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2021-04-02 17:43:16 +0100
committerGitHub <[email protected]>2021-04-02 17:43:16 +0100
commitf4d56989b657b15aec6675cf1ba697e3f87eb088 (patch)
tree23e4d8265444257f2e4018a003659a711fde0414 /crates/hir_def/src/intern.rs
parent9bcdbefc7b657f34704439d068113180b14359dc (diff)
parent6e227b80a7686a7ea5bc039d54c307fda29c99ba (diff)
Merge #8284
8284: Reduce memory usage by using global `Arc`-based interning r=jonas-schievink a=jonas-schievink This saves around 50 mb when running `analysis-stats` on r-a itself. Not a lot, but this infra can be easily reused to intern more stuff. Co-authored-by: Jonas Schievink <[email protected]>
Diffstat (limited to 'crates/hir_def/src/intern.rs')
-rw-r--r--crates/hir_def/src/intern.rs163
1 files changed, 163 insertions, 0 deletions
diff --git a/crates/hir_def/src/intern.rs b/crates/hir_def/src/intern.rs
new file mode 100644
index 000000000..cc0b5d350
--- /dev/null
+++ b/crates/hir_def/src/intern.rs
@@ -0,0 +1,163 @@
1//! Global `Arc`-based object interning infrastructure.
2//!
3//! Eventually this should probably be replaced with salsa-based interning.
4
5use std::{
6 fmt::{self, Debug},
7 hash::{BuildHasherDefault, Hash},
8 ops::Deref,
9 sync::Arc,
10};
11
12use dashmap::{DashMap, SharedValue};
13use once_cell::sync::OnceCell;
14use rustc_hash::FxHasher;
15
16type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>;
17
18#[derive(Hash)]
19pub struct Interned<T: Internable + ?Sized> {
20 arc: Arc<T>,
21}
22
23impl<T: Internable> Interned<T> {
24 pub fn new(obj: T) -> Self {
25 let storage = T::storage().get();
26 let shard_idx = storage.determine_map(&obj);
27 let shard = &storage.shards()[shard_idx];
28 let shard = shard.upgradeable_read();
29
30 // Atomically,
31 // - check if `obj` is already in the map
32 // - if so, clone its `Arc` and return it
33 // - if not, box it up, insert it, and return a clone
34 // This needs to be atomic (locking the shard) to avoid races with other thread, which could
35 // insert the same object between us looking it up and inserting it.
36
37 // FIXME: avoid double lookup by using raw entry API (once stable, or when hashbrown can be
38 // plugged into dashmap)
39 if let Some((arc, _)) = shard.get_key_value(&obj) {
40 return Self { arc: arc.clone() };
41 }
42
43 let arc = Arc::new(obj);
44 let arc2 = arc.clone();
45
46 {
47 let mut shard = shard.upgrade();
48 shard.insert(arc2, SharedValue::new(()));
49 }
50
51 Self { arc }
52 }
53}
54
55impl<T: Internable + ?Sized> Drop for Interned<T> {
56 #[inline]
57 fn drop(&mut self) {
58 // When the last `Ref` is dropped, remove the object from the global map.
59 if Arc::strong_count(&self.arc) == 2 {
60 // Only `self` and the global map point to the object.
61
62 self.drop_slow();
63 }
64 }
65}
66
67impl<T: Internable + ?Sized> Interned<T> {
68 #[cold]
69 fn drop_slow(&mut self) {
70 let storage = T::storage().get();
71 let shard_idx = storage.determine_map(&self.arc);
72 let shard = &storage.shards()[shard_idx];
73 let mut shard = shard.write();
74
75 // FIXME: avoid double lookup
76 let (arc, _) = shard.get_key_value(&self.arc).expect("interned value removed prematurely");
77
78 if Arc::strong_count(arc) != 2 {
79 // Another thread has interned another copy
80 return;
81 }
82
83 shard.remove(&self.arc);
84
85 // Shrink the backing storage if the shard is less than 50% occupied.
86 if shard.len() * 2 < shard.capacity() {
87 shard.shrink_to_fit();
88 }
89 }
90}
91
92/// Compares interned `Ref`s using pointer equality.
93impl<T: Internable> PartialEq for Interned<T> {
94 // NOTE: No `?Sized` because `ptr_eq` doesn't work right with trait objects.
95
96 #[inline]
97 fn eq(&self, other: &Self) -> bool {
98 Arc::ptr_eq(&self.arc, &other.arc)
99 }
100}
101
102impl<T: Internable> Eq for Interned<T> {}
103
104impl<T: Internable + ?Sized> AsRef<T> for Interned<T> {
105 #[inline]
106 fn as_ref(&self) -> &T {
107 &self.arc
108 }
109}
110
111impl<T: Internable + ?Sized> Deref for Interned<T> {
112 type Target = T;
113
114 #[inline]
115 fn deref(&self) -> &Self::Target {
116 &self.arc
117 }
118}
119
120impl<T: Internable + ?Sized> Clone for Interned<T> {
121 fn clone(&self) -> Self {
122 Self { arc: self.arc.clone() }
123 }
124}
125
126impl<T: Debug + Internable + ?Sized> Debug for Interned<T> {
127 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
128 (*self.arc).fmt(f)
129 }
130}
131
132pub struct InternStorage<T: ?Sized> {
133 map: OnceCell<InternMap<T>>,
134}
135
136impl<T: ?Sized> InternStorage<T> {
137 pub const fn new() -> Self {
138 Self { map: OnceCell::new() }
139 }
140}
141
142impl<T: Internable + ?Sized> InternStorage<T> {
143 fn get(&self) -> &InternMap<T> {
144 self.map.get_or_init(DashMap::default)
145 }
146}
147
148pub trait Internable: Hash + Eq + 'static {
149 fn storage() -> &'static InternStorage<Self>;
150}
151
152macro_rules! impl_internable {
153 ( $($t:path),+ $(,)? ) => { $(
154 impl Internable for $t {
155 fn storage() -> &'static InternStorage<Self> {
156 static STORAGE: InternStorage<$t> = InternStorage::new();
157 &STORAGE
158 }
159 }
160 )+ };
161}
162
163impl_internable!(crate::type_ref::TypeRef, crate::type_ref::TraitRef, crate::path::ModPath);