commonlibsse_ng\re\n/
NiTMapBase.rs

1use core::ptr;
2
3#[repr(C)]
4pub struct NiTMapItem<K, V> {
5    pub(crate) next: *mut NiTMapItem<K, V>,
6    /// C++ `first`
7    pub(crate) key: K,
8    /// C++ `second`
9    pub(crate) value: V,
10}
11const _: () = assert!(core::mem::size_of::<NiTMapItem<u32, u64>>() == 0x18);
12
13#[repr(C)]
14pub struct NiTMapBase<K, V, A> {
15    pub(crate) vtable: *const NiTMapBaseVtbl<K, V, A>,
16    pub(crate) capacity: u32,
17    pub(crate) pad0c: u32,
18    pub(crate) data: *mut *mut NiTMapItem<K, V>,
19    pub(crate) allocator: AntiBloatAllocator<A>,
20}
21
22impl<K, V, A> NiTMapBase<K, V, A> {
23    #[inline]
24    pub const fn new(vptr: *const NiTMapBaseVtbl<K, V, A>) -> Self {
25        Self {
26            vtable: vptr,
27            capacity: 37,
28            pad0c: 0,
29            data: ptr::null_mut(),
30            allocator: AntiBloatAllocator::new(),
31        }
32    }
33
34    #[inline]
35    pub const fn with_capacity(capacity: u32) -> Self {
36        Self {
37            vtable: ptr::null(),
38            capacity,
39            pad0c: 0,
40            data: ptr::null_mut(),
41            allocator: AntiBloatAllocator::new(),
42        }
43    }
44
45    pub fn clear(&mut self) {
46        for i in 0..self.capacity {
47            let current_data_ptr = unsafe { self.data.add(i as usize) };
48
49            while let Some(elem) = unsafe { (*current_data_ptr).as_mut() } {
50                unsafe { *current_data_ptr = elem.next };
51                self.clear_value(elem);
52                self.free_value(elem);
53            }
54        }
55
56        self.allocator.size = 0;
57    }
58
59    #[inline]
60    pub const fn len(&self) -> usize {
61        self.allocator.size as usize
62    }
63
64    #[inline]
65    pub const fn is_empty(&self) -> bool {
66        self.len() == 0
67    }
68
69    #[inline]
70    fn vtable(&self) -> &NiTMapBaseVtbl<K, V, A> {
71        debug_assert!(!self.vtable.is_null(), "NiTMapBaseVtbl ptr must not be null ptr");
72        unsafe { &*self.vtable }
73    }
74
75    #[inline]
76    pub(crate) fn hash_function(&self, key: K) -> u32 {
77        (self.vtable().hash_function)(self, key)
78    }
79
80    #[inline]
81    pub(crate) fn key_eq(&self, lhs: K, rhs: K) -> bool {
82        (self.vtable().key_eq)(self, lhs, rhs)
83    }
84
85    #[inline]
86    fn assign_value(&self, value: *mut NiTMapItem<K, V>, key: K, mapped: V) {
87        (self.vtable().assign_value)(self, value, key, mapped);
88    }
89
90    #[inline]
91    fn clear_value(&mut self, value: *mut NiTMapItem<K, V>) {
92        (self.vtable().clear_value)(self, value);
93    }
94
95    #[inline]
96    fn malloc_value(&mut self) -> *mut NiTMapItem<K, V> {
97        (self.vtable().malloc_value)(self)
98    }
99
100    #[inline]
101    fn free_value(&mut self, value: *mut NiTMapItem<K, V>) {
102        (self.vtable().free_value)(self, value);
103    }
104}
105
106impl<K, V, A> NiTMapBase<K, V, A>
107where
108    K: Copy,
109{
110    /// C++ `insert_or_assign`
111    ///
112    /// # Panics
113    pub fn insert(&mut self, key: K, value: V) -> bool {
114        let index = self.hash_function(key) as usize;
115
116        let mut current_item = unsafe { *self.data.add(index) };
117        while let Some(item_) = unsafe { current_item.as_mut() } {
118            if self.key_eq(key, item_.key) {
119                item_.value = value;
120                return false;
121            }
122            current_item = item_.next;
123        }
124
125        current_item = self.malloc_value();
126
127        assert!(!current_item.is_null(), "The current_item after malloc_value must not be null.");
128        self.assign_value(current_item, key, value);
129
130        // Shift by one and insert.
131        let prev_item_ptr = unsafe { self.data.add(index) };
132        unsafe { (*current_item).next = *prev_item_ptr };
133        unsafe { *prev_item_ptr = current_item };
134        self.allocator.size += 1;
135
136        true
137    }
138
139    /// Copy and return the data corresponding to the key, and delete the map.
140    ///
141    /// - C++ `erase`
142    pub fn remove(&mut self, key: &K) -> Option<V> {
143        let index = self.hash_function(*key);
144        let current_item = unsafe { &mut *self.data.add(index as usize) };
145
146        let mut prev: *mut NiTMapItem<K, V> = ptr::null_mut();
147        let mut item_ptr = unsafe { current_item.as_mut() };
148
149        while let Some(item) = item_ptr {
150            if self.key_eq(*key, item.key) {
151                if prev.is_null() {
152                    *current_item = item.next;
153                } else {
154                    unsafe { (*prev).next = item.next };
155                }
156
157                // Take ownership of value
158                let value = unsafe {
159                    // Safety: we're taking out the value, and will drop the rest properly.
160                    ptr::read(&item.value)
161                };
162
163                self.remove_value(item);
164                return Some(value);
165            }
166            prev = item;
167            item_ptr = unsafe { item.next.as_mut() };
168        }
169
170        None
171    }
172
173    fn remove_value(&mut self, value: *mut NiTMapItem<K, V>) {
174        self.clear_value(value);
175        self.free_value(value);
176        self.allocator.size -= 1;
177    }
178
179    pub fn get(&self, key: &K) -> Option<&V> {
180        let key = *key;
181        let hash = self.hash_function(key);
182        let index = (hash % self.capacity) as usize;
183
184        let mut current = unsafe { *self.data.add(index) };
185        while let Some(node) = unsafe { current.as_ref() } {
186            if self.key_eq(node.key, key) {
187                return Some(&node.value);
188            }
189            current = node.next;
190        }
191
192        None
193    }
194
195    pub fn get_mut(&mut self, key: &K) -> Option<&mut V> {
196        let key = *key;
197        let hash = self.hash_function(key);
198        let index = (hash % self.capacity) as usize;
199
200        let mut current = unsafe { *self.data.add(index) };
201        while let Some(node) = unsafe { current.as_mut() } {
202            if self.key_eq(node.key, key) {
203                return Some(&mut node.value);
204            }
205            current = node.next;
206        }
207
208        None
209    }
210}
211
212#[repr(C)]
213pub(crate) struct AntiBloatAllocator<A> {
214    pub(crate) size: u32,
215    alloc_marker: core::marker::PhantomData<A>,
216}
217impl<A> AntiBloatAllocator<A> {
218    #[inline]
219    pub const fn new() -> Self {
220        Self { size: 0, alloc_marker: core::marker::PhantomData }
221    }
222}
223
224#[allow(clippy::type_complexity)]
225pub struct NiTMapBaseVtbl<K, V, A> {
226    pub CxxDrop: fn(this: *mut NiTMapBase<K, V, A>),
227
228    pub hash_function: fn(this: *const NiTMapBase<K, V, A>, key: K) -> u32, // 0x01 - { return a_key % _capacity; }
229    pub key_eq: fn(this: *const NiTMapBase<K, V, A>, lhs: K, rhs: K) -> bool, // 0x02 - { return stricmp(a_lhs == a_rhs); }
230    pub assign_value:
231        fn(this: *const NiTMapBase<K, V, A>, value: *mut NiTMapItem<K, V>, key: K, mapped: V), // 0x03 - { a_value->key = a_key; a_value->mapped = a_mapped; }
232    pub clear_value: fn(this: *mut NiTMapBase<K, V, A>, value: *mut NiTMapItem<K, V>), // 0x04 - { return; }
233    pub malloc_value: fn(this: *mut NiTMapBase<K, V, A>) -> *mut NiTMapItem<K, V>,     // 0x05
234    pub free_value: fn(this: *mut NiTMapBase<K, V, A>, value: *mut NiTMapItem<K, V>),  // 0x06
235}