1use core::{
6 alloc::Layout,
7 marker::PhantomData,
8 mem::{ManuallyDrop, MaybeUninit},
9 ops::{Index, IndexMut, Range, RangeBounds},
10 ptr::{self, NonNull, drop_in_place},
11};
12use std::alloc::handle_alloc_error;
13
14use std_fork::alloc::SelflessAllocator;
15use stdx::ptr::{ConstNonNull, Unique};
16
17use crate::re::MemoryManager::TESGlobalAlloc;
18
19#[repr(C)]
45pub struct BSTSmallArray<T, const N: usize = 1, A = TESGlobalAlloc>
46where
47 A: SelflessAllocator,
48{
49 capacity: u32, storage_type: StorageType_CEnum, data: RawBSTSmallArray<T, N>, size: u32, alloc: PhantomData<A>,
65}
66const _: () = assert!(core::mem::size_of::<BSTSmallArray<(), 10>>() == 0x18);
67
68#[commonlibsse_ng_derive_internal::ffi_enum]
73#[derive(Debug, Clone, Copy, PartialEq, Eq)]
74#[repr(u32)]
75pub enum StorageType {
76 Heap = 0,
78 Inline = 1,
80}
81
82#[repr(C)]
84union RawBSTSmallArray<T, const N: usize> {
85 heap: Option<Unique<T>>,
87 inline: ManuallyDrop<MaybeUninit<[T; N]>>,
89}
90
91impl<T, const N: usize> RawBSTSmallArray<T, N> {
92 #[inline]
93 const fn new() -> Self {
94 Self::new_inline(MaybeUninit::uninit())
95 }
96 #[inline]
97 const fn new_inline(inline: MaybeUninit<[T; N]>) -> Self {
98 Self { inline: ManuallyDrop::new(inline) }
99 }
100 #[inline]
101 const fn new_heap(ptr: NonNull<T>) -> Self {
102 Self { heap: Some(unsafe { Unique::new_unchecked(ptr.as_ptr()) }) }
103 }
104}
105
106const _: () = {
107 const SIZE: usize = core::mem::size_of::<RawBSTSmallArray<u32, 4>>();
108 assert!(SIZE == 0x10);
109};
110
111impl<T, const N: usize, A> BSTSmallArray<T, N, A>
112where
113 A: SelflessAllocator,
114{
115 #[inline]
128 pub const fn new() -> Self {
129 Self {
130 data: RawBSTSmallArray::new(),
131 capacity: N as u32, storage_type: StorageType_CEnum::from_enum(StorageType::Inline),
133 size: 0,
134 alloc: PhantomData,
135 }
136 }
137
138 #[inline]
151 pub const fn len(&self) -> usize {
152 self.size as usize
153 }
154
155 #[inline]
166 pub const fn is_empty(&self) -> bool {
167 self.len() == 0
168 }
169
170 #[inline]
183 pub const fn capacity(&self) -> usize {
184 self.capacity as usize
185 }
186
187 #[inline]
208 pub fn shrink_to_fit(&mut self) {
209 self.change_capacity(self.size);
210 }
211
212 #[inline]
227 pub fn push(&mut self, value: T) {
228 let size = self.size;
229 dbg!(size, self.capacity);
230 if size == self.capacity {
231 dbg!("Glow");
232 self.grow();
233 }
234 dbg!(size, self.capacity);
235 unsafe {
236 if let Some(ptr) = self.as_non_null_ptr() {
237 ptr.add(size as usize).write(value);
238 }
239 }
240
241 self.size += 1;
242 }
243
244 #[inline]
257 pub fn pop(&mut self) -> Option<T> {
258 let len = self.len();
259 if len == 0 {
260 None
261 } else {
262 self.size -= 1;
263 unsafe { Some(ptr::read(self.as_non_null_ptr()?.add(len - 1).as_ptr())) }
264 }
265 }
266
267 #[inline]
280 pub fn get(&self, index: usize) -> Option<&T> {
281 if index < self.len() {
282 return unsafe { Some(self.as_const_non_null_ptr()?.add(index).as_ref()) };
283 }
284 None
285 }
286
287 #[inline]
302 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
303 if index < self.len() {
304 return unsafe { Some(self.as_non_null_ptr()?.add(index).as_mut()) };
305 }
306 None
307 }
308
309 #[inline]
325 pub fn clear(&mut self) {
326 if let Some(non_null) = self.as_non_null_ptr() {
328 for i in 0..self.len() {
329 unsafe {
330 ptr::drop_in_place(non_null.add(i).as_ptr());
332 }
333 }
334 }
335
336 self.size = 0; }
338
339 #[inline]
341 #[allow(clippy::missing_const_for_fn)] pub fn as_non_null_ptr(&mut self) -> Option<NonNull<T>> {
343 match self.storage_type() {
344 StorageType::Heap => unsafe {
345 self.data.heap.map(|ptr| NonNull::new_unchecked(ptr.as_ptr()))
346 },
347 StorageType::Inline => {
348 let ptr = unsafe { (*self.data.inline).as_mut_ptr() }.cast::<T>();
349 Some(unsafe { NonNull::new_unchecked(ptr) })
350 }
351 }
352 }
353
354 #[inline]
356 pub fn as_const_non_null_ptr(&self) -> Option<ConstNonNull<T>> {
357 match self.storage_type() {
358 StorageType::Heap => unsafe { self.data.heap.map(ConstNonNull::from_unique) },
359 StorageType::Inline => {
360 let ptr = (unsafe { self.data.inline.as_ptr() }).cast::<T>();
361 Some(unsafe { ConstNonNull::new_unchecked(ptr) })
362 }
363 }
364 }
365
366 #[inline]
383 pub fn contains(&self, value: &T) -> bool
384 where
385 T: PartialEq,
386 {
387 for i in 0..self.len() {
388 if let Some(item) = self.get(i) {
389 if item == value {
390 return true;
391 }
392 }
393 }
394 false
395 }
396
397 #[inline]
422 pub fn retain<F>(&mut self, mut f: F)
423 where
424 F: FnMut(&T) -> bool,
425 {
426 let mut retained = 0;
427
428 for i in 0..self.len() {
429 let src = match self.as_non_null_ptr() {
430 Some(elem) => unsafe { elem.add(i) },
431 None => continue,
432 };
433
434 if f(unsafe { src.as_ref() }) {
435 if retained != i {
436 unsafe {
437 let dst = self.as_non_null_ptr().unwrap().add(retained).as_ptr();
438 ptr::copy_nonoverlapping(src.as_ptr(), dst, 1);
439 }
440 }
441 retained += 1;
442 } else {
443 unsafe { ptr::drop_in_place(src.as_ptr()) }; }
445 }
446
447 self.size = retained as u32;
448 }
449
450 #[inline]
470 pub fn resize(&mut self, new_size: usize, value: T)
471 where
472 T: Clone,
473 {
474 let prev_size = self.len();
475 if new_size > prev_size {
476 for _ in prev_size..new_size {
477 self.push(value.clone());
478 }
479 } else {
480 for i in new_size..prev_size {
481 if let Some(src) = self.as_non_null_ptr() {
482 unsafe { ptr::drop_in_place(src.add(i).as_ptr()) };
483 }
484 }
485 }
486 self.size = new_size as u32;
487 }
488
489 #[inline]
517 pub fn drain<R>(&mut self, range: R) -> BSTDrain<'_, T, N, A>
518 where
519 R: RangeBounds<usize>,
520 {
521 let len = self.len();
522 let Range { start, end } = stdx::slice::range(range, ..len);
523 debug_assert!(start <= end);
524 debug_assert!(end <= len);
525
526 self.size = start as u32;
529
530 let iter = match self.as_non_null_ptr() {
531 Some(src) => unsafe {
532 core::slice::from_raw_parts(src.add(start).as_ptr(), end - start)
533 },
534 None => &[],
535 }
536 .iter();
537
538 BSTDrain {
539 iter,
540 tail_start: end,
541 tail_len: len - end,
542 array: unsafe { NonNull::new_unchecked(self as *mut Self) },
543 }
544 }
545
546 #[inline]
548 pub fn as_slice(&self) -> &[T] {
549 let ptr = self.as_const_non_null_ptr();
550 let len = self.len();
551
552 if ptr.is_none() || (len == 0) {
553 return &[];
554 }
555
556 if let Some(src) = self.as_const_non_null_ptr() {
557 unsafe { core::slice::from_raw_parts(src.as_ptr(), len) }
558 } else {
559 &[]
560 }
561 }
562
563 #[inline]
565 pub fn as_mut_slice(&mut self) -> &mut [T] {
566 let ptr = self.as_non_null_ptr();
567 let len = self.len();
568
569 if ptr.is_none() || (len == 0) {
570 return &mut [];
571 }
572
573 if let Some(src) = self.as_non_null_ptr() {
574 unsafe { core::slice::from_raw_parts_mut(src.as_ptr(), len) }
575 } else {
576 &mut []
577 }
578 }
579
580 #[inline]
597 pub const fn iter(&self) -> BSTSmallArrayIterator<'_, T, N, A> {
598 BSTSmallArrayIterator { array: self, index: 0 }
599 }
600
601 fn grow(&mut self) {
608 const MIN_CAPACITY: u32 = 4;
609 const GROWTH_FACTOR: u32 = 2;
610
611 let old_capacity = self.capacity;
612 let new_capacity =
613 if old_capacity < MIN_CAPACITY { MIN_CAPACITY } else { old_capacity * GROWTH_FACTOR };
614 self.change_capacity(new_capacity);
615 }
616
617 fn change_capacity(&mut self, new_capacity: u32) {
623 if new_capacity == 0 {
624 return;
625 }
626
627 let storage_type = self.storage_type();
628
629 if new_capacity <= (N as u32) && storage_type == StorageType::Inline {
630 return;
631 }
632
633 let copy_count = core::cmp::min(self.size, new_capacity) as usize;
634
635 let new_layout = Self::new_layout(new_capacity);
636 let new_data = A::allocate(new_layout)
637 .map_or_else(|_| handle_alloc_error(new_layout), |data| data.cast::<T>());
638
639 unsafe {
640 let dst = new_data.as_ptr();
641
642 match storage_type {
643 StorageType::Inline => {
644 let src = self.data.inline.as_ptr().cast::<T>();
646 ptr::copy_nonoverlapping(src, dst, copy_count);
647 }
648 StorageType::Heap => {
649 if let Some(old_ptr) = self.data.heap {
650 ptr::copy_nonoverlapping(old_ptr.as_ptr(), dst, copy_count);
651 A::deallocate(old_ptr.as_non_null_ptr().cast::<u8>(), self.layout());
652 }
653 }
654 }
655 }
656
657 self.data = RawBSTSmallArray::new_heap(new_data);
658 self.storage_type = StorageType::Heap.into();
659 self.capacity = new_capacity;
660 }
661
662 fn new_layout(n: u32) -> Layout {
668 Layout::array::<T>(n as usize).expect("BSTSmallArray need: alloc size < isize::MAX")
669 }
670
671 fn layout(&self) -> Layout {
677 Self::new_layout(self.capacity)
678 }
679
680 const fn storage_type(&self) -> StorageType {
681 match self.storage_type.to_enum() {
682 Some(value) => value,
683 None => StorageType::Inline,
684 }
685 }
686}
687
688impl<T, const N: usize, A> Drop for BSTSmallArray<T, N, A>
689where
690 A: SelflessAllocator,
691{
692 fn drop(&mut self) {
693 unsafe {
694 match self.storage_type() {
695 StorageType::Heap => {
696 if let Some(ptr) = self.data.heap {
697 let ptr = ptr.as_non_null_ptr();
699 for i in 0..self.size as usize {
700 drop_in_place(ptr.add(i).as_ptr());
701 }
702 A::deallocate(ptr.cast(), self.layout());
703 }
704 }
705 StorageType::Inline => {
706 debug_assert!(
707 self.size <= N as u32,
708 "[BSTSmallArray] size is larger than stack capacity. Wrong Implementation."
709 );
710 for i in self.as_mut_slice() {
711 drop_in_place(i);
712 }
713 }
715 }
716 }
717 }
718}
719
720impl<T, const N: usize, A> Index<usize> for BSTSmallArray<T, N, A>
721where
722 A: SelflessAllocator,
723{
724 type Output = T;
725
726 #[inline]
727 fn index(&self, index: usize) -> &Self::Output {
728 assert!(index < self.len(), "Index out of bounds");
729 unsafe { self.as_const_non_null_ptr().unwrap().add(index).as_ref() }
730 }
731}
732
733impl<T, const N: usize, A> IndexMut<usize> for BSTSmallArray<T, N, A>
734where
735 A: SelflessAllocator,
736{
737 #[inline]
738 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
739 assert!(index < self.len(), "Index out of bounds");
740 unsafe { self.as_non_null_ptr().unwrap().add(index).as_mut() }
741 }
742}
743
744pub struct BSTDrain<'a, T, const N: usize, A>
749where
750 A: SelflessAllocator,
751{
752 tail_start: usize, tail_len: usize, iter: core::slice::Iter<'a, T>,
755 array: NonNull<BSTSmallArray<T, N, A>>,
756}
757
758impl<T, const N: usize, A> Iterator for BSTDrain<'_, T, N, A>
759where
760 A: SelflessAllocator,
761{
762 type Item = T;
763
764 #[inline]
765 fn next(&mut self) -> Option<Self::Item> {
766 self.iter.next().map(|item| unsafe { ptr::read(item) })
767 }
768
769 #[inline]
770 fn size_hint(&self) -> (usize, Option<usize>) {
771 self.iter.size_hint()
772 }
773}
774
775impl<T, const N: usize, A> DoubleEndedIterator for BSTDrain<'_, T, N, A>
776where
777 A: SelflessAllocator,
778{
779 #[inline]
780 fn next_back(&mut self) -> Option<Self::Item> {
781 self.iter.next_back().map(|item| unsafe { ptr::read(item) })
782 }
783}
784
785impl<T, const N: usize, A> ExactSizeIterator for BSTDrain<'_, T, N, A>
786where
787 A: SelflessAllocator,
788{
789 #[inline]
790 fn len(&self) -> usize {
791 self.iter.len()
792 }
793}
794
795impl<T, const N: usize, A: SelflessAllocator> Drop for BSTDrain<'_, T, N, A> {
796 fn drop(&mut self) {
797 if core::mem::needs_drop::<T>() {
802 self.for_each(drop);
803 }
804
805 if self.tail_len > 0 {
807 unsafe {
808 let array = self.array.as_mut();
809
810 let start = array.len();
811 let tail = self.tail_start;
812 if let Some(ptr) = array.as_non_null_ptr() {
813 if tail != start {
814 let src = ptr.add(tail).as_ptr();
815 let dst = ptr.add(start).as_ptr();
816 ptr::copy(src, dst, self.tail_len);
817 }
818 }
819 array.size = (start + self.tail_len) as u32;
820 }
821 }
822 }
823}
824
825pub struct BSTSmallArrayIterator<'a, T, const N: usize, A>
828where
829 A: SelflessAllocator,
830{
831 array: &'a BSTSmallArray<T, N, A>,
832 index: usize,
833}
834
835impl<'a, T, const N: usize, A> Iterator for BSTSmallArrayIterator<'a, T, N, A>
836where
837 A: SelflessAllocator,
838{
839 type Item = &'a T;
840
841 #[inline]
842 fn next(&mut self) -> Option<Self::Item> {
843 if self.index < self.array.len() {
844 let item = unsafe { self.array.as_const_non_null_ptr()?.add(self.index).as_ref() };
845 self.index += 1;
846 Some(item)
847 } else {
848 None
849 }
850 }
851}
852
853pub struct BSTSmallArrayIntoIterator<T, const N: usize, A>
854where
855 A: SelflessAllocator,
856{
857 array: BSTSmallArray<T, N, A>,
858 index: usize,
859}
860
861impl<T, const N: usize, A> Iterator for BSTSmallArrayIntoIterator<T, N, A>
862where
863 A: SelflessAllocator,
864{
865 type Item = T;
866
867 #[inline]
868 fn next(&mut self) -> Option<Self::Item> {
869 if self.index < self.array.len() {
870 let item = unsafe { ptr::read(self.array.as_non_null_ptr()?.add(self.index).as_ptr()) };
871 self.index += 1;
872 Some(item)
873 } else {
874 None
875 }
876 }
877}
878
879impl<T, const N: usize, A> IntoIterator for BSTSmallArray<T, N, A>
880where
881 A: SelflessAllocator,
882{
883 type Item = T;
884 type IntoIter = BSTSmallArrayIntoIterator<T, N, A>;
885
886 #[inline]
887 fn into_iter(self) -> Self::IntoIter {
888 BSTSmallArrayIntoIterator { array: self, index: 0 }
889 }
890}
891
892impl<'a, T, const N: usize, A> IntoIterator for &'a BSTSmallArray<T, N, A>
893where
894 A: SelflessAllocator,
895{
896 type Item = &'a T;
897 type IntoIter = BSTSmallArrayIterator<'a, T, N, A>;
898
899 #[inline]
900 fn into_iter(self) -> Self::IntoIter {
901 BSTSmallArrayIterator { array: self, index: 0 }
902 }
903}
904
905pub struct BSTSmallArrayIterMut<'a, T, const N: usize, A>
906where
907 A: SelflessAllocator,
908{
909 array: &'a mut BSTSmallArray<T, N, A>,
910 index: usize,
911}
912
913impl<'a, T, const N: usize, A> Iterator for BSTSmallArrayIterMut<'a, T, N, A>
914where
915 A: SelflessAllocator,
916{
917 type Item = &'a mut T;
918
919 #[inline]
920 fn next(&mut self) -> Option<Self::Item> {
921 if self.index < self.array.len() {
922 unsafe {
923 let mut ptr = self.array.as_non_null_ptr()?.add(self.index);
924 self.index += 1;
925 Some(ptr.as_mut())
926 }
927 } else {
928 None
929 }
930 }
931
932 #[inline]
933 fn size_hint(&self) -> (usize, Option<usize>) {
934 let len = self.array.len();
935 (len, Some(len))
936 }
937}
938
939impl<'a, T, const N: usize, A> IntoIterator for &'a mut BSTSmallArray<T, N, A>
940where
941 A: SelflessAllocator,
942{
943 type Item = &'a mut T;
944 type IntoIter = BSTSmallArrayIterMut<'a, T, N, A>;
945
946 #[inline]
947 fn into_iter(self) -> Self::IntoIter {
948 BSTSmallArrayIterMut { array: self, index: 0 }
949 }
950}
951
952impl<T, const N: usize, A> Extend<T> for BSTSmallArray<T, N, A>
953where
954 A: SelflessAllocator,
955{
956 #[inline]
957 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
958 for elem in iter {
959 self.push(elem);
960 }
961 }
962}
963
964impl<T, const N: usize, A> core::fmt::Debug for BSTSmallArray<T, N, A>
968where
969 T: core::fmt::Debug,
970 A: SelflessAllocator,
971{
972 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
973 f.debug_list().entries(self.as_slice()).finish()
974 }
975}
976
977impl<T, const N: usize> Default for BSTSmallArray<T, N> {
978 #[inline]
979 fn default() -> Self {
980 Self::new()
981 }
982}
983
984impl<T: Clone, const N: usize, A> Clone for BSTSmallArray<T, N, A>
985where
986 A: SelflessAllocator + Clone,
987{
988 fn clone(&self) -> Self {
989 let mut new = if self.capacity as usize > N {
990 let layout = Self::new_layout(self.capacity);
992 let new_ptr = A::allocate(layout).map_or_else(
993 |_| handle_alloc_error(layout),
994 |p| unsafe { NonNull::new_unchecked(p.cast::<T>().as_ptr()) },
995 );
996
997 Self {
998 data: RawBSTSmallArray::new_heap(new_ptr),
999 capacity: self.capacity,
1000 storage_type: StorageType::Heap.into(),
1001 size: self.size,
1002 alloc: PhantomData,
1003 }
1004 } else {
1005 Self {
1006 data: RawBSTSmallArray::new(),
1007 capacity: self.capacity,
1008 storage_type: StorageType::Inline.into(),
1009 size: self.size,
1010 alloc: PhantomData,
1011 }
1012 };
1013
1014 let count = self.size as usize;
1016 for i in 0..count {
1017 let src = &self[i];
1018 new.push(src.clone());
1019 }
1020
1021 new
1022 }
1023}
1024
1025impl<T, const N: usize, A> PartialEq for BSTSmallArray<T, N, A>
1026where
1027 T: PartialEq,
1028 A: SelflessAllocator,
1029{
1030 #[inline]
1031 fn eq(&self, other: &Self) -> bool {
1032 self.as_slice() == other.as_slice()
1033 }
1034}
1035impl<T, const N: usize, A> PartialEq<Vec<T>> for BSTSmallArray<T, N, A>
1036where
1037 T: PartialEq,
1038 A: SelflessAllocator,
1039{
1040 #[inline]
1041 fn eq(&self, other: &Vec<T>) -> bool {
1042 self.as_slice() == *other
1043 }
1044}
1045
1046impl<T, const N: usize, A> PartialEq<&[T]> for BSTSmallArray<T, N, A>
1047where
1048 T: PartialEq,
1049 A: SelflessAllocator,
1050{
1051 #[inline]
1052 fn eq(&self, other: &&[T]) -> bool {
1053 self.as_slice() == *other
1054 }
1055}
1056
1057impl<T, const N: usize, A> Eq for BSTSmallArray<T, N, A>
1058where
1059 T: Eq,
1060 A: SelflessAllocator,
1061{
1062}
1063
1064impl<T, const N: usize, A> PartialOrd for BSTSmallArray<T, N, A>
1065where
1066 T: PartialOrd,
1067 A: SelflessAllocator,
1068{
1069 #[inline]
1070 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
1071 self.as_slice().partial_cmp(other.as_slice())
1072 }
1073}
1074
1075impl<T, const N: usize, A> Ord for BSTSmallArray<T, N, A>
1076where
1077 T: Ord,
1078 A: SelflessAllocator,
1079{
1080 #[inline]
1081 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
1082 self.as_slice().cmp(other.as_slice())
1083 }
1084}
1085
1086impl<T, const N: usize, A> core::hash::Hash for BSTSmallArray<T, N, A>
1087where
1088 T: core::hash::Hash,
1089 A: SelflessAllocator,
1090{
1091 #[inline]
1092 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
1093 self.as_slice().hash(state);
1094 }
1095}
1096
1097#[cfg(test)]
1100mod tests {
1101 use stdx::alloc::Global;
1102
1103 use super::BSTSmallArray as BSTSmallArray_;
1104
1105 type BSTSmallArray<T, const N: usize = 10, A = Global> = BSTSmallArray_<T, N, A>;
1106
1107 #[test]
1108 fn test_drain() {
1109 let mut array = BSTSmallArray::<_, 10>::new();
1110 array.push(1);
1111 array.push(2);
1112 array.push(3);
1113 array.push(4);
1114 array.push(5);
1115
1116 let drained = array.drain(1..3);
1117 assert_eq!(drained.collect::<Vec<_>>(), vec![2, 3]);
1121 assert_eq!(array.len(), 3);
1122 assert_eq!(array[0], 1);
1123 assert_eq!(array[1], 4);
1124 assert_eq!(array[2], 5);
1125 }
1126
1127 #[test]
1128 fn test_shrink_to_fit_on_stack() {
1129 let mut array = BSTSmallArray::<_, 10>::new();
1130 array.push(1);
1131 array.push(2);
1132 array.push(3);
1133 array.push(4);
1134 array.push(5);
1135
1136 assert_eq!(array.len(), 5);
1137 assert_eq!(array.capacity(), 10);
1138
1139 array.shrink_to_fit();
1140
1141 assert_eq!(array.len(), 5);
1142 assert_eq!(array.capacity(), 10);
1144 }
1145
1146 #[test]
1147 fn test_shrink_to_fit_on_heap() {
1148 let mut array = BSTSmallArray::<_, 4>::new();
1149 array.push(1);
1150 array.push(2);
1151 array.push(3);
1152 array.push(4);
1153 array.push(5); assert_eq!(array.len(), 5);
1156 assert_eq!(array.capacity(), 4 * 2); array.shrink_to_fit();
1159
1160 assert_eq!(array.len(), 5);
1161 assert_eq!(array.capacity(), 5);
1162 }
1163}