commonlibsse_ng\re\b\BSTHashMap/
scatter_table.rs1pub use super::{Allocator, BSTScatterKeyExtractor, BSTScatterTableHeapAllocator, KeyStrategy};
2
3use super::{EntryType, Iter, SentinelPtr};
4use core::alloc::Layout;
5use core::mem::MaybeUninit;
6use core::ptr::{self, NonNull};
7
8#[repr(C)]
9#[derive(Debug)]
10pub struct BSTScatterTable<S, A>
11where
12 S: KeyStrategy,
13 A: Allocator,
14{
15 pad00: u64,
16 pad08: u32,
17 pub(crate) capacity: u32,
19 pub(crate) free: u32,
21 pub(crate) good: u32,
23 pub(crate) sentinel: Option<SentinelPtr<EntryType<S::Pair>>>,
25 pub(crate) allocator: A,
26}
27const _: () = {
28 type TestHashMap =
29 BSTScatterTable<BSTScatterKeyExtractor<i32, i32>, BSTScatterTableHeapAllocator>;
30
31 assert!(core::mem::offset_of!(TestHashMap, pad00) == 0x00);
32 assert!(core::mem::offset_of!(TestHashMap, pad08) == 0x08);
33 assert!(core::mem::offset_of!(TestHashMap, capacity) == 0x0C);
34 assert!(core::mem::offset_of!(TestHashMap, free) == 0x10);
35 assert!(core::mem::offset_of!(TestHashMap, good) == 0x14);
36 assert!(core::mem::offset_of!(TestHashMap, sentinel) == 0x18);
37 assert!(core::mem::offset_of!(TestHashMap, allocator) == 0x20);
38
39 assert!(core::mem::size_of::<TestHashMap>() == 0x30);
40};
41
42impl<S, A> Default for BSTScatterTable<S, A>
43where
44 S: KeyStrategy,
45 A: Allocator + Default,
46{
47 fn default() -> Self {
48 Self {
49 pad00: 0,
50 pad08: 0,
51 capacity: 0,
52 free: 0,
53 good: 0,
54 sentinel: Some(SentinelPtr::sentinel()),
55 allocator: A::default(),
56 }
57 }
58}
59
60impl<S, A> BSTScatterTable<S, A>
61where
62 S: KeyStrategy<Key: PartialEq>,
63 A: Allocator,
64{
65 #[inline]
66 pub fn insert(&mut self, value: S::Pair) -> (Iter<S::Pair>, bool) {
67 self.do_insert(value)
68 }
69
70 #[inline]
71 pub fn insert_range<I>(&mut self, iter: I)
72 where
73 I: IntoIterator<Item = Option<S::Pair>>,
74 {
75 let iter = iter.into_iter();
76 let (lower, _) = iter.size_hint();
77 self.reserve(self.len() + lower as u32);
78
79 for value in iter.flatten() {
80 let _ = self.insert(value);
81 }
82 }
83
84 fn do_insert(&mut self, pair: S::Pair) -> (Iter<S::Pair>, bool) {
85 if let Some(iter) = self.find(S::get_key(&pair)) {
86 return (iter, false);
87 }
88
89 if self.free == 0 {
90 self.reserve(self.capacity + 1);
91 assert!(self.free > 0);
92 }
93
94 self.free -= 1;
95 let mut entry = self.get_entry_for(S::get_key(&pair));
96
97 if entry.is_some_and(|entry| unsafe {
98 entry.as_non_sentinel_ref().is_some_and(|entry| entry.is_occupied())
99 }) {
100 let mut free = self.get_free_entry();
102
103 let would_ve = entry
104 .as_ref()
105 .and_then(|e| unsafe { e.as_non_sentinel_ref() })
106 .map(|e| S::get_key(&e.value_data))
107 .and_then(|key| self.get_entry_for(key));
108
109 if would_ve == entry {
110 let prev_next =
112 core::mem::replace(unsafe { &mut entry.unwrap().as_mut().next }, free);
113 unsafe { free.unwrap().as_mut().push(pair, prev_next) };
114 return (Iter::new(free), true);
115 };
116
117 let mut prev = would_ve;
119 while {
120 let prev_ref = prev.and_then(|prev| unsafe { prev.as_non_sentinel_ref() });
121 let next = prev_ref.and_then(|prev| prev.next);
122 next != entry
123 } {
124 prev = prev.and_then(|prev| unsafe { prev.as_non_sentinel_ref() }?.next);
125 }
126
127 if free.is_some() {
129 if let Some(entry) = entry.take() {
130 free = Some(entry);
131 }
132 }
133 unsafe { prev.unwrap().as_mut().next = free };
134 };
135
136 unsafe { entry.unwrap().as_mut().push(pair, Some(SentinelPtr::sentinel())) };
137 (Iter::new(entry), true)
138 }
139
140 fn find<'a>(&self, key: &S::Key) -> Option<Iter<'a, S::Pair>> {
141 if self.is_empty() {
142 return None;
143 }
144
145 let mut current_entry = self.get_entry_for(key)?;
146
147 while let Some(entry_ref) = unsafe { current_entry.as_non_sentinel_ref() } {
148 if !entry_ref.is_occupied() {
150 break;
151 }
152
153 let current_key = S::get_key(&entry_ref.value_data);
154 if current_key == key {
155 return Some(entry_ref.iter());
156 }
157
158 current_entry = entry_ref.next?;
159 }
160
161 None
162 }
163
164 fn get_entry_for(&self, key: &S::Key) -> Option<SentinelPtr<EntryType<S::Pair>>> {
165 assert!(self.get_entry_start().is_some());
166
167 let hash = S::hash(key);
168 let idx = hash & (self.capacity - 1); let entries = self.get_entry_start()?;
171 unsafe { Some(entries.add(idx as usize)) }
172 }
173
174 fn get_free_entry(&mut self) -> Option<SentinelPtr<EntryType<S::Pair>>> {
176 assert!(self.free > 0);
177 assert!(self.get_entry_start().is_some());
178 assert!(self.capacity.is_power_of_two());
179 debug_assert!(self.entries().is_some_and(|e| unsafe {
180 e.as_non_sentinel_ref().is_some_and(|e| e.iter().any(|e| e.is_occupied()))
181 })); let entries = self.get_entry_start()?;
184
185 while unsafe { entries.add(self.good as usize).as_non_sentinel_ref() }?.is_occupied() {
186 self.good = (self.good + 1) & (self.capacity - 1); }
188
189 Some(unsafe { entries.add(self.good as usize) })
190 }
191
192 fn reserve(&mut self, new_capacity: u32) {
193 if new_capacity <= self.capacity {
194 return;
195 }
196
197 let old_capacity = self.capacity;
198 let old_entries = self.get_entry_start();
199
200 let (new_capacity, new_entries) = {
201 let min = A::min_size();
202 assert!((0..(u8::MAX as u32)).contains(&min), "Must be > 0");
203 let new_capacity = new_capacity.max(min);
204 if new_capacity > (1 << 31) {
205 panic!("BSTScatterTable: buffer grew too large");
206 }
207
208 unsafe { self.allocate_entries(new_capacity) }.map_or_else(
209 || panic!("BSTScatterTable: allocation failed"),
210 |entries| (new_capacity, entries),
211 )
212 };
213
214 if old_entries.is_some_and(|old| old.addr() == new_entries.addr()) {
215 unsafe {
217 core::ptr::write_bytes(
218 old_entries.unwrap().add(old_capacity as usize).as_ptr(),
219 0,
220 (new_capacity - old_capacity) as usize
221 * core::mem::size_of::<EntryType<S::Pair>>(),
222 );
223 }
224
225 let mut todo = Vec::with_capacity(self.len() as usize);
226 for i in 0..old_capacity {
227 if let Some(entry) =
228 unsafe { old_entries.unwrap().add(i as usize).as_non_sentinel_mut() }
229 {
230 if entry.is_occupied() {
231 todo.push(entry.steal());
232 }
233 };
234 }
235
236 self.capacity = new_capacity;
237 self.free = new_capacity;
238 self.good = 0;
239
240 self.insert_range(todo);
241 return;
242 }
243
244 #[allow(clippy::missing_transmute_annotations)]
245 self.set_entries(unsafe { core::mem::transmute(new_entries) }); self.capacity = new_capacity;
247 self.free = new_capacity;
248 self.good = 0;
249
250 if let Some(old_entries) = old_entries {
252 unsafe {
253 for i in 0..old_capacity {
254 let Some(entry) = old_entries.add(i as usize).as_non_sentinel_mut() else {
255 continue;
256 };
257 if entry.is_occupied() {
258 if let Some(pair) = entry.steal() {
259 self.insert(pair);
260 }
261 }
262 }
263
264 ptr::drop_in_place(core::slice::from_raw_parts_mut(
265 old_entries.as_ptr(),
266 old_capacity as usize,
267 ));
268 self.deallocate_entries(old_entries, old_capacity);
269 }
270 }
271 }
272}
273
274impl<S, A> BSTScatterTable<S, A>
275where
276 S: KeyStrategy,
277 A: Allocator,
278{
279 #[allow(clippy::type_complexity)]
281 unsafe fn allocate_entries(
282 &mut self,
283 count: u32,
284 ) -> Option<SentinelPtr<[MaybeUninit<EntryType<S::Pair>>]>> {
285 let ptr = unsafe { self.allocator.allocate_zeroed(Self::new_entries_layout(count)).ok() }?;
286 Some(SentinelPtr::slice_from_raw_parts(ptr.cast(), count as usize))
287 }
288
289 unsafe fn deallocate_entries(&mut self, ptr: SentinelPtr<EntryType<S::Pair>>, capacity: u32) {
290 let layout = Self::new_entries_layout(capacity);
291 unsafe {
292 self.allocator.deallocate(ptr.as_non_null().cast(), layout);
293 };
294 }
295
296 #[inline]
297 pub const fn is_empty(&self) -> bool {
298 self.len() == 0
299 }
300
301 #[inline]
302 pub const fn len(&self) -> u32 {
303 self.capacity - self.free
304 }
305
306 pub fn get_entry_start(&self) -> Option<SentinelPtr<EntryType<S::Pair>>> {
308 let base_ptr: *mut EntryType<S::Pair> = self.allocator.get_entries().cast();
309 SentinelPtr::new(base_ptr)
310 }
311
312 pub(crate) fn clear(&mut self) {
313 if self.is_empty() {
314 return;
315 }
316 self.for_each_valid_entry_mut(|entry| {
317 entry.destroy();
318 });
319 self.free = self.capacity;
320 self.good = 0;
321
322 debug_assert!(self.is_empty());
323 }
324
325 fn entries(&self) -> Option<SentinelPtr<[EntryType<S::Pair>]>> {
326 let ptr = self.get_entry_start()?.as_non_null();
327 Some(SentinelPtr::slice_from_raw_parts(ptr, self.capacity as usize))
328 }
329
330 fn for_each_valid_entry_mut<F>(&self, mut f: F)
331 where
332 F: FnMut(&mut EntryType<S::Pair>),
333 {
334 let mut current: Option<&mut EntryType<S::Pair>> =
335 NonNull::new(self.allocator.get_entries().cast())
336 .map(|mut ptr| unsafe { ptr.as_mut() });
337
338 while let Some(entry) = current {
339 f(entry);
340 current = unsafe { entry.next.as_mut().and_then(|ptr| ptr.as_non_sentinel_mut()) };
341 }
342 }
343
344 fn set_entries(&mut self, entires: SentinelPtr<[EntryType<S::Pair>]>) {
345 self.allocator.set_entries(entires.as_ptr().cast());
346 }
347
348 fn new_entries_layout(capacity: u32) -> Layout {
349 Layout::array::<EntryType<S::Pair>>(capacity as usize).expect("[BSTHashMap] valid Layout")
350 }
351
352 fn free_resources(&mut self) {
353 if self.capacity > 0 {
354 if let Some(entries) = self.get_entry_start() {
355 self.for_each_valid_entry_mut(|entry| {
356 entry.destroy();
357 });
358 unsafe { self.deallocate_entries(entries, self.capacity) };
359 self.allocator.set_entries(ptr::null_mut());
360 };
361 }
362
363 self.capacity = 0;
364 self.free = 0;
365 self.good = 0;
366 }
367}
368
369impl<S, A> Drop for BSTScatterTable<S, A>
370where
371 S: KeyStrategy,
372 A: Allocator,
373{
374 fn drop(&mut self) {
375 self.free_resources();
376 }
377}