stdx\ptr/
const_non_null.rs

1#![allow(clippy::missing_safety_doc)]
2use crate::ptr::Unique;
3use core::cmp::Ordering;
4use core::mem::MaybeUninit;
5use core::num::NonZero;
6use core::ptr::NonNull;
7use core::{fmt, hash, mem, ptr};
8
9/// `*mut T` but non-zero and [covariant].
10///
11/// This is often the correct thing to use when building data structures using
12/// raw pointers, but is ultimately more dangerous to use because of its additional
13/// properties. If you're not sure if you should use `NonNull<T>`, just use `*mut T`!
14///
15/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
16/// is never dereferenced. This is so that enums may use this forbidden value
17/// as a discriminant -- `Option<NonNull<T>>` has the same size as `*mut T`.
18/// However the pointer may still dangle if it isn't dereferenced.
19///
20/// Unlike `*mut T`, `NonNull<T>` was chosen to be covariant over `T`. This makes it
21/// possible to use `NonNull<T>` when building covariant types, but introduces the
22/// risk of unsoundness if used in a type that shouldn't actually be covariant.
23/// (The opposite choice was made for `*mut T` even though technically the unsoundness
24/// could only be caused by calling unsafe functions.)
25///
26/// Covariance is correct for most safe abstractions, such as `Box`, `Rc`, `Arc`, `Vec`,
27/// and `LinkedList`. This is the case because they provide a public API that follows the
28/// normal shared XOR mutable rules of Rust.
29///
30/// If your type cannot safely be covariant, you must ensure it contains some
31/// additional field to provide invariance. Often this field will be a [`PhantomData`]
32/// type like `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
33///
34/// Notice that `NonNull<T>` has a `From` instance for `&T`. However, this does
35/// not change the fact that mutating through a (pointer derived from a) shared
36/// reference is undefined behavior unless the mutation happens inside an
37/// [`UnsafeCell<T>`]. The same goes for creating a mutable reference from a shared
38/// reference. When using this `From` instance without an `UnsafeCell<T>`,
39/// it is your responsibility to ensure that `as_mut` is never called, and `as_ptr`
40/// is never used for mutation.
41///
42/// # Representation
43///
44/// Thanks to the [null pointer optimization],
45/// `NonNull<T>` and `Option<NonNull<T>>`
46/// are guaranteed to have the same size and alignment:
47///
48/// ```
49/// # use std::mem::{size_of, align_of};
50/// use std::ptr::NonNull;
51///
52/// assert_eq!(size_of::<NonNull<i16>>(), size_of::<Option<NonNull<i16>>>());
53/// assert_eq!(align_of::<NonNull<i16>>(), align_of::<Option<NonNull<i16>>>());
54///
55/// assert_eq!(size_of::<NonNull<str>>(), size_of::<Option<NonNull<str>>>());
56/// assert_eq!(align_of::<NonNull<str>>(), align_of::<Option<NonNull<str>>>());
57/// ```
58///
59/// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
60/// [`PhantomData`]: core::marker::PhantomData
61/// [`UnsafeCell<T>`]: core::cell::UnsafeCell
62/// [null pointer optimization]: core::option#representation
63#[repr(transparent)]
64pub struct ConstNonNull<T: ?Sized> {
65    // Remember to use `.as_ptr()` instead of `.pointer`, as field projecting to
66    // this is banned by <https://github.com/rust-lang/compiler-team/issues/807>.
67    pointer: NonNull<T>,
68}
69
70impl<T: Sized> ConstNonNull<T> {
71    /// Creates a new `NonNull` that is dangling, but well-aligned.
72    ///
73    /// This is useful for initializing types which lazily allocate, like
74    /// `Vec::new` does.
75    ///
76    /// Note that the pointer value may potentially represent a valid pointer to
77    /// a `T`, which means this must not be used as a "not yet initialized"
78    /// sentinel value. Types that lazily allocate must track initialization by
79    /// some other means.
80    ///
81    /// # Examples
82    ///
83    /// ```
84    /// use std::ptr::NonNull;
85    ///
86    /// let ptr = NonNull::<u32>::dangling();
87    /// // Important: don't try to access the value of `ptr` without
88    /// // initializing it first! The pointer is not null but isn't valid either!
89    /// ```
90    #[must_use]
91    #[inline]
92    pub const fn dangling() -> Self {
93        ConstNonNull { pointer: NonNull::dangling() }
94    }
95
96    /// Returns a shared references to the value. In contrast to [`as_ref`], this does not require
97    /// that the value has to be initialized.
98    ///
99    /// For the mutable counterpart see [`as_uninit_mut`].
100    ///
101    /// [`as_ref`]: NonNull::as_ref
102    /// [`as_uninit_mut`]: NonNull::as_uninit_mut
103    ///
104    /// # Safety
105    ///
106    /// When calling this method, you have to ensure that
107    /// the pointer is [convertible to a reference](core::ptr#pointer-to-reference-conversion).
108    /// Note that because the created reference is to `MaybeUninit<T>`, the
109    /// source pointer can point to uninitialized memory.
110    #[inline]
111    #[must_use]
112    pub const unsafe fn as_uninit_ref<'a>(self) -> &'a MaybeUninit<T> {
113        // SAFETY: the caller must guarantee that `self` meets all the
114        // requirements for a reference.
115        unsafe { &*self.cast().as_ptr() }
116    }
117
118    /// Create a new `ConstNonNull` from a `Unique`.
119    pub const fn from_unique(unique: Unique<T>) -> Self {
120        Self { pointer: unique.as_non_null_ptr() }
121    }
122
123    /// Create a new `ConstNonNull` from a `NonNull<T>`.
124    pub const fn from_non_null(pointer: NonNull<T>) -> Self {
125        Self { pointer }
126    }
127}
128
129impl<T: ?Sized> ConstNonNull<T> {
130    /// Creates a new `NonNull`.
131    ///
132    /// # Safety
133    ///
134    /// `ptr` must be non-null.
135    ///
136    /// # Examples
137    ///
138    /// ```
139    /// use std::ptr::NonNull;
140    ///
141    /// let mut x = 0u32;
142    /// let ptr = unsafe { NonNull::new_unchecked(&mut x as *mut _) };
143    /// ```
144    ///
145    /// *Incorrect* usage of this function:
146    ///
147    /// ```rust,no_run
148    /// use std::ptr::NonNull;
149    ///
150    /// // NEVER DO THAT!!! This is undefined behavior. ⚠️
151    /// let ptr = unsafe { NonNull::<u32>::new_unchecked(std::ptr::null_mut()) };
152    /// ```
153    #[inline]
154    pub const unsafe fn new_unchecked(ptr: *const T) -> Self {
155        // SAFETY: the caller must guarantee that `ptr` is non-null.
156        debug_assert!(!ptr.is_null(), "NonNull::new_unchecked called with null pointer");
157        unsafe { ConstNonNull { pointer: NonNull::new_unchecked(ptr.cast_mut()) } }
158    }
159
160    /// Creates a new `NonNull` if `ptr` is non-null.
161    ///
162    /// # Panics during const evaluation
163    ///
164    /// This method will panic during const evaluation if the pointer cannot be
165    /// determined to be null or not. See [`is_null`] for more information.
166    ///
167    /// [`is_null`]: ../primitive.pointer.html#method.is_null-1
168    ///
169    /// # Examples
170    ///
171    /// ```
172    /// use std::ptr::NonNull;
173    ///
174    /// let mut x = 0u32;
175    /// let ptr = NonNull::<u32>::new(&mut x as *mut _).expect("ptr is null!");
176    ///
177    /// if let Some(ptr) = NonNull::<u32>::new(std::ptr::null_mut()) {
178    ///     unreachable!();
179    /// }
180    /// ```
181    #[inline]
182    #[allow(clippy::not_unsafe_ptr_arg_deref)]
183    pub const fn new(ptr: *mut T) -> Option<Self> {
184        if !ptr.is_null() {
185            // SAFETY: The pointer is already checked and is not null
186            Some(unsafe { Self::new_unchecked(ptr) })
187        } else {
188            None
189        }
190    }
191
192    /// Converts a reference to a `NonNull` pointer.
193    #[inline]
194    pub const fn from_ref(r: &T) -> Self {
195        // SAFETY: A reference cannot be null.
196        Self { pointer: unsafe { NonNull::new_unchecked((r as *const T).cast_mut()) } }
197    }
198
199    /// Gets the "address" portion of the pointer.
200    ///
201    /// For more details, see the equivalent method on a raw pointer, `pointer::addr`.
202    ///
203    /// This is a [Strict Provenance][core::ptr#strict-provenance] API.
204    #[must_use]
205    #[inline]
206    pub fn addr(self) -> NonZero<usize> {
207        // SAFETY: The pointer is guaranteed by the type to be non-null,
208        // meaning that the address will be non-zero.
209        unsafe { NonZero::new_unchecked(self.as_ptr().addr()) }
210    }
211
212    /// Creates a new pointer with the given address and the [provenance][core::ptr#provenance] of
213    /// `self`.
214    ///
215    /// For more details, see the equivalent method on a raw pointer, `pointer::with_addr`.
216    ///
217    /// This is a [Strict Provenance][core::ptr#strict-provenance] API.
218    #[must_use]
219    #[inline]
220    pub fn with_addr(self, addr: NonZero<usize>) -> Self {
221        // SAFETY: The result of `ptr::from::with_addr` is non-null because `addr` is guaranteed to be non-zero.
222        unsafe { ConstNonNull::new_unchecked(self.as_ptr().with_addr(addr.get()) as *mut _) }
223    }
224
225    /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
226    /// [provenance][core::ptr#provenance] of `self`.
227    ///
228    /// For more details, see the equivalent method on a raw pointer, `pointer::map_addr`.
229    ///
230    /// This is a [Strict Provenance][core::ptr#strict-provenance] API.
231    #[must_use]
232    #[inline]
233    pub fn map_addr(self, f: impl FnOnce(NonZero<usize>) -> NonZero<usize>) -> Self {
234        self.with_addr(f(self.addr()))
235    }
236
237    /// Acquires the underlying `*mut` pointer.
238    ///
239    /// # Examples
240    ///
241    /// ```
242    /// use std::ptr::NonNull;
243    ///
244    /// let mut x = 0u32;
245    /// let ptr = NonNull::new(&mut x).expect("ptr is null!");
246    ///
247    /// let x_value = unsafe { *ptr.as_ptr() };
248    /// assert_eq!(x_value, 0);
249    ///
250    /// unsafe { *ptr.as_ptr() += 2; }
251    /// let x_value = unsafe { *ptr.as_ptr() };
252    /// assert_eq!(x_value, 2);
253    /// ```
254    #[must_use]
255    #[inline(always)]
256    pub const fn as_ptr(self) -> *const T {
257        // This is a transmute for the same reasons as `NonZero::get`.
258
259        // SAFETY: `NonNull` is `transparent` over a `*const T`, and `*const T`
260        // and `*mut T` have the same layout, so transitively we can transmute
261        // our `NonNull` to a `*mut T` directly.
262        unsafe { mem::transmute::<Self, *const T>(self) }
263    }
264
265    /// Returns a shared reference to the value. If the value may be uninitialized, [`as_uninit_ref`]
266    /// must be used instead.
267    ///
268    /// For the mutable counterpart see [`as_mut`].
269    ///
270    /// [`as_uninit_ref`]: NonNull::as_uninit_ref
271    /// [`as_mut`]: NonNull::as_mut
272    ///
273    /// # Safety
274    ///
275    /// When calling this method, you have to ensure that
276    /// the pointer is [convertible to a reference](core::ptr#pointer-to-reference-conversion).
277    ///
278    /// # Examples
279    ///
280    /// ```
281    /// use std::ptr::NonNull;
282    ///
283    /// let mut x = 0u32;
284    /// let ptr = NonNull::new(&mut x as *mut _).expect("ptr is null!");
285    ///
286    /// let ref_x = unsafe { ptr.as_ref() };
287    /// println!("{ref_x}");
288    /// ```
289    ///
290    /// [the module documentation]: core::ptr#safety
291    #[must_use]
292    #[inline(always)]
293    pub const unsafe fn as_ref<'a>(&self) -> &'a T {
294        // SAFETY: the caller must guarantee that `self` meets all the
295        // requirements for a reference.
296        // `cast_const` avoids a mutable raw pointer deref.
297        unsafe { self.pointer.as_ref() }
298    }
299
300    /// Casts to a pointer of another type.
301    ///
302    /// # Examples
303    ///
304    /// ```
305    /// use std::ptr::NonNull;
306    ///
307    /// let mut x = 0u32;
308    /// let ptr = NonNull::new(&mut x as *mut _).expect("null pointer");
309    ///
310    /// let casted_ptr = ptr.cast::<i8>();
311    /// let raw_ptr: *mut i8 = casted_ptr.as_ptr();
312    /// ```
313    #[must_use = "this returns the result of the operation, \
314                  without modifying the original"]
315    #[inline]
316    pub const fn cast<U>(self) -> ConstNonNull<U> {
317        // SAFETY: `self` is a `NonNull` pointer which is necessarily non-null
318        ConstNonNull { pointer: self.pointer.cast() }
319    }
320
321    /// Adds an offset to a pointer.
322    ///
323    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
324    /// offset of `3 * size_of::<T>()` bytes.
325    ///
326    /// # Safety
327    ///
328    /// If any of the following conditions are violated, the result is Undefined Behavior:
329    ///
330    /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
331    ///
332    /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
333    ///   [allocated object], and the entire memory range between `self` and the result must be in
334    ///   bounds of that allocated object. In particular, this range must not "wrap around" the edge
335    ///   of the address space.
336    ///
337    /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
338    /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
339    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
340    /// safe.
341    ///
342    /// [allocated object]: core::ptr#allocated-object
343    ///
344    /// # Examples
345    ///
346    /// ```
347    /// use std::ptr::NonNull;
348    ///
349    /// let mut s = [1, 2, 3];
350    /// let ptr: NonNull<u32> = NonNull::new(s.as_mut_ptr()).unwrap();
351    ///
352    /// unsafe {
353    ///     println!("{}", ptr.offset(1).read());
354    ///     println!("{}", ptr.offset(2).read());
355    /// }
356    /// ```
357    #[inline(always)]
358    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
359    #[must_use = "returns a new pointer rather than modifying its argument"]
360    pub const unsafe fn offset(self, count: isize) -> Self
361    where
362        T: Sized,
363    {
364        // SAFETY: the caller must uphold the safety contract for `offset`.
365        // Additionally safety contract of `offset` guarantees that the resulting pointer is
366        // pointing to an allocation, there can't be an allocation at null, thus it's safe to
367        // construct `NonNull`.
368        unsafe { ConstNonNull { pointer: self.pointer.offset(count) } }
369    }
370
371    /// Adds an offset to a pointer (convenience for `.offset(count as isize)`).
372    ///
373    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
374    /// offset of `3 * size_of::<T>()` bytes.
375    ///
376    /// # Safety
377    ///
378    /// If any of the following conditions are violated, the result is Undefined Behavior:
379    ///
380    /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
381    ///
382    /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
383    ///   [allocated object], and the entire memory range between `self` and the result must be in
384    ///   bounds of that allocated object. In particular, this range must not "wrap around" the edge
385    ///   of the address space.
386    ///
387    /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
388    /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
389    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
390    /// safe.
391    ///
392    /// [allocated object]: core::ptr#allocated-object
393    ///
394    /// # Examples
395    ///
396    /// ```
397    /// use std::ptr::NonNull;
398    ///
399    /// let s: &str = "123";
400    /// let ptr: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap();
401    ///
402    /// unsafe {
403    ///     println!("{}", ptr.add(1).read() as char);
404    ///     println!("{}", ptr.add(2).read() as char);
405    /// }
406    /// ```
407    #[inline(always)]
408    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
409    #[must_use = "returns a new pointer rather than modifying its argument"]
410    pub const unsafe fn add(self, count: usize) -> Self
411    where
412        T: Sized,
413    {
414        // SAFETY: the caller must uphold the safety contract for `offset`.
415        // Additionally safety contract of `offset` guarantees that the resulting pointer is
416        // pointing to an allocation, there can't be an allocation at null, thus it's safe to
417        // construct `NonNull`.
418        unsafe { ConstNonNull { pointer: self.pointer.add(count) } }
419    }
420
421    /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
422    ///
423    /// `count` is in units of bytes.
424    ///
425    /// This is purely a convenience for casting to a `u8` pointer and
426    /// using [`add`][NonNull::add] on it. See that method for documentation
427    /// and safety requirements.
428    ///
429    /// For non-`Sized` pointees this operation changes only the data pointer,
430    /// leaving the metadata untouched.
431    ///
432    /// # Safety
433    /// valid pointer
434    #[must_use]
435    #[inline(always)]
436    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
437    pub const unsafe fn byte_add(self, count: usize) -> Self {
438        // SAFETY: the caller must uphold the safety contract for `add` and `byte_add` has the same
439        // safety contract.
440        // Additionally safety contract of `add` guarantees that the resulting pointer is pointing
441        // to an allocation, there can't be an allocation at null, thus it's safe to construct
442        // `NonNull`.
443        unsafe { ConstNonNull { pointer: self.pointer.byte_add(count) } }
444    }
445
446    /// Subtracts an offset from a pointer (convenience for
447    /// `.offset((count as isize).wrapping_neg())`).
448    ///
449    /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
450    /// offset of `3 * size_of::<T>()` bytes.
451    ///
452    /// # Safety
453    ///
454    /// If any of the following conditions are violated, the result is Undefined Behavior:
455    ///
456    /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
457    ///
458    /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
459    ///   [allocated object], and the entire memory range between `self` and the result must be in
460    ///   bounds of that allocated object. In particular, this range must not "wrap around" the edge
461    ///   of the address space.
462    ///
463    /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
464    /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
465    /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
466    /// safe.
467    ///
468    /// [allocated object]: core::ptr#allocated-object
469    ///
470    /// # Examples
471    ///
472    /// ```
473    /// use std::ptr::NonNull;
474    ///
475    /// let s: &str = "123";
476    ///
477    /// unsafe {
478    ///     let end: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap().add(3);
479    ///     println!("{}", end.sub(1).read() as char);
480    ///     println!("{}", end.sub(2).read() as char);
481    /// }
482    /// ```
483    #[inline(always)]
484    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
485    #[must_use = "returns a new pointer rather than modifying its argument"]
486    pub const unsafe fn sub(self, count: usize) -> Self
487    where
488        T: Sized,
489    {
490        unsafe { ConstNonNull { pointer: self.pointer.sub(count) } }
491    }
492
493    /// Calculates the offset from a pointer in bytes (convenience for
494    /// `.byte_offset((count as isize).wrapping_neg())`).
495    ///
496    /// `count` is in units of bytes.
497    ///
498    /// This is purely a convenience for casting to a `u8` pointer and
499    /// using [`sub`][NonNull::sub] on it. See that method for documentation
500    /// and safety requirements.
501    ///
502    /// For non-`Sized` pointees this operation changes only the data pointer,
503    /// leaving the metadata untouched.
504    ///
505    /// # Safety
506    /// valid pointer
507    #[must_use]
508    #[inline(always)]
509    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
510    pub const unsafe fn byte_sub(self, count: usize) -> Self {
511        // SAFETY: the caller must uphold the safety contract for `sub` and `byte_sub` has the same
512        // safety contract.
513        // Additionally safety contract of `sub` guarantees that the resulting pointer is pointing
514        // to an allocation, there can't be an allocation at null, thus it's safe to construct
515        // `NonNull`.
516        unsafe { ConstNonNull { pointer: self.pointer.byte_sub(count) } }
517    }
518
519    /// Calculates the distance between two pointers within the same allocation. The returned value is in
520    /// units of T: the distance in bytes divided by `mem::size_of::<T>()`.
521    ///
522    /// This is equivalent to `(self as isize - origin as isize) / (mem::size_of::<T>() as isize)`,
523    /// except that it has a lot more opportunities for UB, in exchange for the compiler
524    /// better understanding what you are doing.
525    ///
526    /// The primary motivation of this method is for computing the `len` of an array/slice
527    /// of `T` that you are currently representing as a "start" and "end" pointer
528    /// (and "end" is "one past the end" of the array).
529    /// In that case, `end.offset_from(start)` gets you the length of the array.
530    ///
531    /// All of the following safety requirements are trivially satisfied for this usecase.
532    ///
533    /// [`offset`]: #method.offset
534    ///
535    /// # Safety
536    ///
537    /// If any of the following conditions are violated, the result is Undefined Behavior:
538    ///
539    /// * `self` and `origin` must either
540    ///
541    ///   * point to the same address, or
542    ///   * both be *derived from* a pointer to the same [allocated object], and the memory range between
543    ///     the two pointers must be in bounds of that object. (See below for an example.)
544    ///
545    /// * The distance between the pointers, in bytes, must be an exact multiple
546    ///   of the size of `T`.
547    ///
548    /// As a consequence, the absolute distance between the pointers, in bytes, computed on
549    /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
550    /// implied by the in-bounds requirement, and the fact that no allocated object can be larger
551    /// than `isize::MAX` bytes.
552    ///
553    /// The requirement for pointers to be derived from the same allocated object is primarily
554    /// needed for `const`-compatibility: the distance between pointers into *different* allocated
555    /// objects is not known at compile-time. However, the requirement also exists at
556    /// runtime and may be exploited by optimizations. If you wish to compute the difference between
557    /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
558    /// origin as isize) / mem::size_of::<T>()`.
559    // FIXME: recommend `addr()` instead of `as usize` once that is stable.
560    ///
561    /// [`add`]: #method.add
562    /// [allocated object]: core::ptr#allocated-object
563    ///
564    /// # Panics
565    ///
566    /// This function panics if `T` is a Zero-Sized Type ("ZST").
567    ///
568    /// # Examples
569    ///
570    /// Basic usage:
571    ///
572    /// ```
573    /// use std::ptr::NonNull;
574    ///
575    /// let a = [0; 5];
576    /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
577    /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
578    /// unsafe {
579    ///     assert_eq!(ptr2.offset_from(ptr1), 2);
580    ///     assert_eq!(ptr1.offset_from(ptr2), -2);
581    ///     assert_eq!(ptr1.offset(2), ptr2);
582    ///     assert_eq!(ptr2.offset(-2), ptr1);
583    /// }
584    /// ```
585    ///
586    /// *Incorrect* usage:
587    ///
588    /// ```rust,no_run
589    /// use std::ptr::NonNull;
590    ///
591    /// let ptr1 = NonNull::new(Box::into_raw(Box::new(0u8))).unwrap();
592    /// let ptr2 = NonNull::new(Box::into_raw(Box::new(1u8))).unwrap();
593    /// let diff = (ptr2.addr().get() as isize).wrapping_sub(ptr1.addr().get() as isize);
594    /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
595    /// let diff_plus_1 = diff.wrapping_add(1);
596    /// let ptr2_other = NonNull::new(ptr1.as_ptr().wrapping_byte_offset(diff_plus_1)).unwrap();
597    /// assert_eq!(ptr2.addr(), ptr2_other.addr());
598    /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
599    /// // computing their offset is undefined behavior, even though
600    /// // they point to addresses that are in-bounds of the same object!
601    ///
602    /// let one = unsafe { ptr2_other.offset_from(ptr2) }; // Undefined Behavior! ⚠️
603    /// ```
604    #[inline]
605    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
606    pub const unsafe fn offset_from(self, origin: ConstNonNull<T>) -> isize
607    where
608        T: Sized,
609    {
610        // SAFETY: the caller must uphold the safety contract for `offset_from`.
611        unsafe { self.as_ptr().offset_from(origin.as_ptr()) }
612    }
613
614    /// Calculates the distance between two pointers within the same allocation. The returned value is in
615    /// units of **bytes**.
616    ///
617    /// This is purely a convenience for casting to a `u8` pointer and
618    /// using [`offset_from`][NonNull::offset_from] on it. See that method for
619    /// documentation and safety requirements.
620    ///
621    /// For non-`Sized` pointees this operation considers only the data pointers,
622    /// ignoring the metadata.
623    #[inline(always)]
624    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
625    pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: ConstNonNull<U>) -> isize {
626        // SAFETY: the caller must uphold the safety contract for `byte_offset_from`.
627        unsafe { self.as_ptr().byte_offset_from(origin.as_ptr()) }
628    }
629
630    // N.B. `wrapping_offset``, `wrapping_add`, etc are not implemented because they can wrap to null
631
632    /// Reads the value from `self` without moving it. This leaves the
633    /// memory in `self` unchanged.
634    ///
635    /// See [`ptr::read`] for safety concerns and examples.
636    ///
637    /// [`ptr::read`]: core::ptr::read()
638    #[inline]
639    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
640    pub const unsafe fn read(self) -> T
641    where
642        T: Sized,
643    {
644        // SAFETY: the caller must uphold the safety contract for `read`.
645        unsafe { ptr::read(self.as_ptr()) }
646    }
647
648    /// Performs a volatile read of the value from `self` without moving it. This
649    /// leaves the memory in `self` unchanged.
650    ///
651    /// Volatile operations are intended to act on I/O memory, and are guaranteed
652    /// to not be elided or reordered by the compiler across other volatile
653    /// operations.
654    ///
655    /// See [`ptr::read_volatile`] for safety concerns and examples.
656    ///
657    /// [`ptr::read_volatile`]: core::ptr::read_volatile()
658    #[inline]
659    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
660    pub unsafe fn read_volatile(self) -> T
661    where
662        T: Sized,
663    {
664        // SAFETY: the caller must uphold the safety contract for `read_volatile`.
665        unsafe { ptr::read_volatile(self.as_ptr()) }
666    }
667
668    /// Reads the value from `self` without moving it. This leaves the
669    /// memory in `self` unchanged.
670    ///
671    /// Unlike `read`, the pointer may be unaligned.
672    ///
673    /// See [`ptr::read_unaligned`] for safety concerns and examples.
674    ///
675    /// [`ptr::read_unaligned`]: core::ptr::read_unaligned()
676    #[inline]
677    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
678    pub const unsafe fn read_unaligned(self) -> T
679    where
680        T: Sized,
681    {
682        // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
683        unsafe { ptr::read_unaligned(self.as_ptr()) }
684    }
685
686    /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
687    /// and destination may overlap.
688    ///
689    /// NOTE: this has the *same* argument order as [`ptr::copy`].
690    ///
691    /// See [`ptr::copy`] for safety concerns and examples.
692    ///
693    /// [`ptr::copy`]: core::ptr::copy()
694    #[inline(always)]
695    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
696    pub const unsafe fn copy_to(self, dest: NonNull<T>, count: usize)
697    where
698        T: Sized,
699    {
700        // SAFETY: the caller must uphold the safety contract for `copy`.
701        unsafe { ptr::copy(self.as_ptr(), dest.as_ptr(), count) }
702    }
703
704    /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
705    /// and destination may *not* overlap.
706    ///
707    /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
708    ///
709    /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
710    ///
711    /// [`ptr::copy_nonoverlapping`]: core::ptr::copy_nonoverlapping()
712    #[inline(always)]
713    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
714    pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull<T>, count: usize)
715    where
716        T: Sized,
717    {
718        // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
719        unsafe { ptr::copy_nonoverlapping(self.as_ptr(), dest.as_ptr(), count) }
720    }
721
722    /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
723    /// `align`.
724    ///
725    /// If it is not possible to align the pointer, the implementation returns
726    /// `usize::MAX`.
727    ///
728    /// The offset is expressed in number of `T` elements, and not bytes.
729    ///
730    /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
731    /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
732    /// the returned offset is correct in all terms other than alignment.
733    ///
734    /// When this is called during compile-time evaluation (which is unstable), the implementation
735    /// may return `usize::MAX` in cases where that can never happen at runtime. This is because the
736    /// actual alignment of pointers is not known yet during compile-time, so an offset with
737    /// guaranteed alignment can sometimes not be computed. For example, a buffer declared as `[u8;
738    /// N]` might be allocated at an odd or an even address, but at compile-time this is not yet
739    /// known, so the execution has to be correct for either choice. It is therefore impossible to
740    /// find an offset that is guaranteed to be 2-aligned. (This behavior is subject to change, as usual
741    /// for unstable APIs.)
742    ///
743    /// # Panics
744    ///
745    /// The function panics if `align` is not a power-of-two.
746    ///
747    /// # Examples
748    ///
749    /// Accessing adjacent `u8` as `u16`
750    ///
751    /// ```
752    /// use std::mem::align_of;
753    /// use std::ptr::NonNull;
754    ///
755    /// # unsafe {
756    /// let x = [5_u8, 6, 7, 8, 9];
757    /// let ptr = NonNull::new(x.as_ptr() as *mut u8).unwrap();
758    /// let offset = ptr.align_offset(align_of::<u16>());
759    ///
760    /// if offset < x.len() - 1 {
761    ///     let u16_ptr = ptr.add(offset).cast::<u16>();
762    ///     assert!(u16_ptr.read() == u16::from_ne_bytes([5, 6]) || u16_ptr.read() == u16::from_ne_bytes([6, 7]));
763    /// } else {
764    ///     // while the pointer can be aligned via `offset`, it would point
765    ///     // outside the allocation
766    /// }
767    /// # }
768    /// ```
769    #[inline]
770    #[must_use]
771    pub fn align_offset(self, align: usize) -> usize
772    where
773        T: Sized,
774    {
775        self.pointer.align_offset(align)
776    }
777
778    /// Returns whether the pointer is properly aligned for `T`.
779    ///
780    /// # Examples
781    ///
782    /// ```
783    /// use std::ptr::NonNull;
784    ///
785    /// // On some platforms, the alignment of i32 is less than 4.
786    /// #[repr(align(4))]
787    /// struct AlignedI32(i32);
788    ///
789    /// let data = AlignedI32(42);
790    /// let ptr = NonNull::<AlignedI32>::from(&data);
791    ///
792    /// assert!(ptr.is_aligned());
793    /// assert!(!NonNull::new(ptr.as_ptr().wrapping_byte_add(1)).unwrap().is_aligned());
794    /// ```
795    #[inline]
796    #[must_use]
797    pub fn is_aligned(self) -> bool
798    where
799        T: Sized,
800    {
801        self.as_ptr().is_aligned()
802    }
803}
804
805impl<T> ConstNonNull<[T]> {
806    /// Creates a non-null raw slice from a thin pointer and a length.
807    ///
808    /// The `len` argument is the number of **elements**, not the number of bytes.
809    ///
810    /// This function is safe, but dereferencing the return value is unsafe.
811    /// See the documentation of [`slice::from_raw_parts`](core::slice::from_raw_parts) for slice safety requirements.
812    ///
813    /// # Examples
814    ///
815    /// ```rust
816    /// use std::ptr::NonNull;
817    ///
818    /// // create a slice pointer when starting out with a pointer to the first element
819    /// let mut x = [5, 6, 7];
820    /// let nonnull_pointer = NonNull::new(x.as_mut_ptr()).unwrap();
821    /// let slice = NonNull::slice_from_raw_parts(nonnull_pointer, 3);
822    /// assert_eq!(unsafe { slice.as_ref()[2] }, 7);
823    /// ```
824    ///
825    /// (Note that this example artificially demonstrates a use of this method,
826    /// but `let slice = NonNull::from(&x[..]);` would be a better way to write code like this.)
827    #[must_use]
828    #[inline]
829    pub const fn slice_from_raw_parts(data: ConstNonNull<T>, len: usize) -> Self {
830        // SAFETY: `data` is a `NonNull` pointer which is necessarily non-null
831        Self { pointer: NonNull::slice_from_raw_parts(data.pointer, len) }
832    }
833
834    /// Returns the length of a non-null raw slice.
835    ///
836    /// The returned value is the number of **elements**, not the number of bytes.
837    ///
838    /// This function is safe, even when the non-null raw slice cannot be dereferenced to a slice
839    /// because the pointer does not have a valid address.
840    ///
841    /// # Examples
842    ///
843    /// ```rust
844    /// use std::ptr::NonNull;
845    ///
846    /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
847    /// assert_eq!(slice.len(), 3);
848    /// ```
849    #[must_use]
850    #[inline]
851    pub const fn len(self) -> usize {
852        self.as_ptr().len()
853    }
854
855    /// Returns `true` if the non-null raw slice has a length of 0.
856    ///
857    /// # Examples
858    ///
859    /// ```rust
860    /// use std::ptr::NonNull;
861    ///
862    /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
863    /// assert!(!slice.is_empty());
864    /// ```
865    #[must_use]
866    #[inline]
867    pub const fn is_empty(self) -> bool {
868        self.len() == 0
869    }
870
871    /// Returns a non-null pointer to the slice's buffer.
872    ///
873    /// # Examples
874    ///
875    /// ```rust
876    /// #![feature(slice_ptr_get)]
877    /// use std::ptr::NonNull;
878    ///
879    /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
880    /// assert_eq!(slice.as_non_null_ptr(), NonNull::<i8>::dangling());
881    /// ```
882    #[inline]
883    #[must_use]
884    pub const fn as_non_null_ptr(self) -> ConstNonNull<T> {
885        self.cast()
886    }
887}
888
889impl<T: ?Sized> Clone for ConstNonNull<T> {
890    #[inline(always)]
891    fn clone(&self) -> Self {
892        *self
893    }
894}
895
896impl<T: ?Sized> Copy for ConstNonNull<T> {}
897
898impl<T: ?Sized> fmt::Debug for ConstNonNull<T> {
899    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
900        fmt::Pointer::fmt(&self.as_ptr(), f)
901    }
902}
903
904impl<T: ?Sized> fmt::Pointer for ConstNonNull<T> {
905    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
906        fmt::Pointer::fmt(&self.as_ptr(), f)
907    }
908}
909
910impl<T: ?Sized> Eq for ConstNonNull<T> {}
911
912impl<T: ?Sized> PartialEq for ConstNonNull<T> {
913    #[inline]
914    #[allow(ambiguous_wide_pointer_comparisons)]
915    fn eq(&self, other: &Self) -> bool {
916        self.as_ptr() == other.as_ptr()
917    }
918}
919
920impl<T: ?Sized> Ord for ConstNonNull<T> {
921    #[inline]
922    #[allow(ambiguous_wide_pointer_comparisons)]
923    fn cmp(&self, other: &Self) -> Ordering {
924        self.as_ptr().cmp(&other.as_ptr())
925    }
926}
927
928#[allow(clippy::non_canonical_partial_ord_impl)]
929impl<T: ?Sized> PartialOrd for ConstNonNull<T> {
930    #[inline]
931    #[allow(ambiguous_wide_pointer_comparisons)]
932    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
933        self.as_ptr().partial_cmp(&other.as_ptr())
934    }
935}
936
937impl<T: ?Sized> hash::Hash for ConstNonNull<T> {
938    #[inline]
939    fn hash<H: hash::Hasher>(&self, state: &mut H) {
940        self.as_ptr().hash(state)
941    }
942}
943
944impl<T: ?Sized> From<Unique<T>> for ConstNonNull<T> {
945    #[inline]
946    fn from(unique: Unique<T>) -> Self {
947        Self { pointer: unique.as_non_null_ptr() }
948    }
949}
950
951impl<T: ?Sized> From<&T> for ConstNonNull<T> {
952    /// Converts a `&T` to a `NonNull<T>`.
953    ///
954    /// This conversion is safe and infallible since references cannot be null.
955    #[inline]
956    fn from(r: &T) -> Self {
957        ConstNonNull::from_ref(r)
958    }
959}