stdx\alloc\impls/
global.rs

1// Unstable Rust code
2//
3// SPDX-FileCopyrightText: (c) The Rust Project Contributors
4// SPDX-License-Identifier: Apache-2.0 OR MIT
5// - https://github.com/rust-lang/rust/blob/master/LICENSE-MIT
6use core::ptr;
7use core::{alloc::Layout, hint, ptr::NonNull};
8use std::alloc::{alloc, alloc_zeroed, dealloc, realloc};
9
10use crate::alloc::allocator::non_null_empty_slice;
11use crate::alloc::{AllocError, Allocator};
12
13/// The global memory allocator.
14///
15/// This type implements the [`Allocator`] trait by forwarding calls
16/// to the allocator registered with the `#[global_allocator]` attribute
17/// if there is one, or the `std` crate’s default.
18///
19/// Note: while this type is unstable, the functionality it provides can be
20/// accessed through the [free functions in `alloc`](std::alloc::alloc)
21pub struct Global;
22
23impl Global {
24    #[inline]
25    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
26    #[allow(clippy::unused_self)]
27    fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
28        match layout.size() {
29            0 => Ok(non_null_empty_slice(layout)),
30            // SAFETY: `layout` is non-zero in size,
31            size => unsafe {
32                let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) };
33                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
34                Ok(NonNull::slice_from_raw_parts(ptr, size))
35            },
36        }
37    }
38
39    // SAFETY: Same as `Allocator::grow`
40    #[inline]
41    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
42    unsafe fn grow_impl(
43        &self,
44        ptr: NonNull<u8>,
45        old_layout: Layout,
46        new_layout: Layout,
47        zeroed: bool,
48    ) -> Result<NonNull<[u8]>, AllocError> {
49        debug_assert!(
50            new_layout.size() >= old_layout.size(),
51            "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
52        );
53
54        match old_layout.size() {
55            0 => self.alloc_impl(new_layout, zeroed),
56
57            // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
58            // as required by safety conditions. Other conditions must be upheld by the caller
59            old_size if old_layout.align() == new_layout.align() => unsafe {
60                let new_size = new_layout.size();
61
62                // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
63                hint::assert_unchecked(new_size >= old_layout.size());
64
65                let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
66                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
67                if zeroed {
68                    raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
69                }
70                Ok(NonNull::slice_from_raw_parts(ptr, new_size))
71            },
72
73            // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
74            // both the old and new memory allocation are valid for reads and writes for `old_size`
75            // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
76            // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
77            // for `dealloc` must be upheld by the caller.
78            old_size => unsafe {
79                let new_ptr = self.alloc_impl(new_layout, zeroed)?;
80                ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.cast().as_ptr(), old_size);
81                self.deallocate(ptr, old_layout);
82                Ok(new_ptr)
83            },
84        }
85    }
86}
87
88unsafe impl Allocator for Global {
89    #[inline]
90    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
91    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
92        self.alloc_impl(layout, false)
93    }
94
95    #[inline]
96    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
97    fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
98        self.alloc_impl(layout, true)
99    }
100
101    #[inline]
102    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
103    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
104        if layout.size() != 0 {
105            // SAFETY: `layout` is non-zero in size,
106            // other conditions must be upheld by the caller
107            unsafe { dealloc(ptr.as_ptr(), layout) }
108        }
109    }
110
111    #[inline]
112    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
113    unsafe fn grow(
114        &self,
115        ptr: NonNull<u8>,
116        old_layout: Layout,
117        new_layout: Layout,
118    ) -> Result<NonNull<[u8]>, AllocError> {
119        // SAFETY: all conditions must be upheld by the caller
120        unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
121    }
122
123    #[inline]
124    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
125    unsafe fn grow_zeroed(
126        &self,
127        ptr: NonNull<u8>,
128        old_layout: Layout,
129        new_layout: Layout,
130    ) -> Result<NonNull<[u8]>, AllocError> {
131        // SAFETY: all conditions must be upheld by the caller
132        unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
133    }
134
135    #[inline]
136    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
137    unsafe fn shrink(
138        &self,
139        ptr: NonNull<u8>,
140        old_layout: Layout,
141        new_layout: Layout,
142    ) -> Result<NonNull<[u8]>, AllocError> {
143        debug_assert!(
144            new_layout.size() <= old_layout.size(),
145            "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
146        );
147
148        match new_layout.size() {
149            // SAFETY: conditions must be upheld by the caller
150            0 => {
151                unsafe { self.deallocate(ptr, old_layout) };
152                Ok(non_null_empty_slice(new_layout))
153            }
154
155            // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
156            new_size if old_layout.align() == new_layout.align() => unsafe {
157                // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
158                hint::assert_unchecked(new_size <= old_layout.size());
159
160                let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
161                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
162                Ok(NonNull::slice_from_raw_parts(ptr, new_size))
163            },
164
165            // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
166            // both the old and new memory allocation are valid for reads and writes for `new_size`
167            // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
168            // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
169            // for `dealloc` must be upheld by the caller.
170            new_size => unsafe {
171                let new_ptr = self.allocate(new_layout)?;
172                ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.cast().as_ptr(), new_size);
173                self.deallocate(ptr, old_layout);
174                Ok(new_ptr)
175            },
176        }
177    }
178}