aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJonas Schievink <[email protected]>2021-04-03 00:00:45 +0100
committerJonas Schievink <[email protected]>2021-04-03 00:00:45 +0100
commit85757be59aa401f250cadb50a4f6d75ffb526249 (patch)
tree35822d50eee5433a236b2281db15e3fbefeb9deb
parent8e3e13f3a41b311c82fa5859c5bfebbbcd82cad4 (diff)
Allow interning strings
-rw-r--r--crates/hir_def/src/intern.rs55
1 files changed, 46 insertions, 9 deletions
diff --git a/crates/hir_def/src/intern.rs b/crates/hir_def/src/intern.rs
index bc0307dbc..d163f633f 100644
--- a/crates/hir_def/src/intern.rs
+++ b/crates/hir_def/src/intern.rs
@@ -3,17 +3,20 @@
3//! Eventually this should probably be replaced with salsa-based interning. 3//! Eventually this should probably be replaced with salsa-based interning.
4 4
5use std::{ 5use std::{
6 collections::HashMap,
6 fmt::{self, Debug}, 7 fmt::{self, Debug},
7 hash::{BuildHasherDefault, Hash}, 8 hash::{BuildHasherDefault, Hash},
8 ops::Deref, 9 ops::Deref,
9 sync::Arc, 10 sync::Arc,
10}; 11};
11 12
12use dashmap::{DashMap, SharedValue}; 13use dashmap::{lock::RwLockWriteGuard, DashMap, SharedValue};
13use once_cell::sync::OnceCell; 14use once_cell::sync::OnceCell;
14use rustc_hash::FxHasher; 15use rustc_hash::FxHasher;
15 16
16type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>; 17type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>;
18type Guard<T> =
19 RwLockWriteGuard<'static, HashMap<Arc<T>, SharedValue<()>, BuildHasherDefault<FxHasher>>>;
17 20
18#[derive(Hash)] 21#[derive(Hash)]
19pub struct Interned<T: Internable + ?Sized> { 22pub struct Interned<T: Internable + ?Sized> {
@@ -22,10 +25,22 @@ pub struct Interned<T: Internable + ?Sized> {
22 25
23impl<T: Internable> Interned<T> { 26impl<T: Internable> Interned<T> {
24 pub fn new(obj: T) -> Self { 27 pub fn new(obj: T) -> Self {
28 match Interned::lookup(&obj) {
29 Ok(this) => this,
30 Err(shard) => {
31 let arc = Arc::new(obj);
32 Self::alloc(arc, shard)
33 }
34 }
35 }
36}
37
38impl<T: Internable + ?Sized> Interned<T> {
39 fn lookup(obj: &T) -> Result<Self, Guard<T>> {
25 let storage = T::storage().get(); 40 let storage = T::storage().get();
26 let shard_idx = storage.determine_map(&obj); 41 let shard_idx = storage.determine_map(obj);
27 let shard = &storage.shards()[shard_idx]; 42 let shard = &storage.shards()[shard_idx];
28 let mut shard = shard.write(); 43 let shard = shard.write();
29 44
30 // Atomically, 45 // Atomically,
31 // - check if `obj` is already in the map 46 // - check if `obj` is already in the map
@@ -34,13 +49,15 @@ impl<T: Internable> Interned<T> {
34 // This needs to be atomic (locking the shard) to avoid races with other thread, which could 49 // This needs to be atomic (locking the shard) to avoid races with other thread, which could
35 // insert the same object between us looking it up and inserting it. 50 // insert the same object between us looking it up and inserting it.
36 51
37 // FIXME: avoid double lookup by using raw entry API (once stable, or when hashbrown can be 52 // FIXME: avoid double lookup/hashing by using raw entry API (once stable, or when
38 // plugged into dashmap) 53 // hashbrown can be plugged into dashmap)
39 if let Some((arc, _)) = shard.get_key_value(&obj) { 54 match shard.get_key_value(obj) {
40 return Self { arc: arc.clone() }; 55 Some((arc, _)) => Ok(Self { arc: arc.clone() }),
56 None => Err(shard),
41 } 57 }
58 }
42 59
43 let arc = Arc::new(obj); 60 fn alloc(arc: Arc<T>, mut shard: Guard<T>) -> Self {
44 let arc2 = arc.clone(); 61 let arc2 = arc.clone();
45 62
46 shard.insert(arc2, SharedValue::new(())); 63 shard.insert(arc2, SharedValue::new(()));
@@ -49,6 +66,18 @@ impl<T: Internable> Interned<T> {
49 } 66 }
50} 67}
51 68
69impl Interned<str> {
70 pub fn new_str(s: &str) -> Self {
71 match Interned::lookup(s) {
72 Ok(this) => this,
73 Err(shard) => {
74 let arc = Arc::<str>::from(s);
75 Self::alloc(arc, shard)
76 }
77 }
78 }
79}
80
52impl<T: Internable + ?Sized> Drop for Interned<T> { 81impl<T: Internable + ?Sized> Drop for Interned<T> {
53 #[inline] 82 #[inline]
54 fn drop(&mut self) { 83 fn drop(&mut self) {
@@ -98,6 +127,14 @@ impl<T: Internable> PartialEq for Interned<T> {
98 127
99impl<T: Internable> Eq for Interned<T> {} 128impl<T: Internable> Eq for Interned<T> {}
100 129
130impl PartialEq for Interned<str> {
131 fn eq(&self, other: &Self) -> bool {
132 Arc::ptr_eq(&self.arc, &other.arc)
133 }
134}
135
136impl Eq for Interned<str> {}
137
101impl<T: Internable + ?Sized> AsRef<T> for Interned<T> { 138impl<T: Internable + ?Sized> AsRef<T> for Interned<T> {
102 #[inline] 139 #[inline]
103 fn as_ref(&self) -> &T { 140 fn as_ref(&self) -> &T {
@@ -157,4 +194,4 @@ macro_rules! impl_internable {
157 )+ }; 194 )+ };
158} 195}
159 196
160impl_internable!(crate::type_ref::TypeRef, crate::type_ref::TraitRef, crate::path::ModPath); 197impl_internable!(crate::type_ref::TypeRef, crate::type_ref::TraitRef, crate::path::ModPath, str);