core/ptr/non_null.rs
1use crate::clone::TrivialClone;
2use crate::cmp::Ordering;
3use crate::marker::{Destruct, PointeeSized, Unsize};
4use crate::mem::{MaybeUninit, SizedTypeProperties};
5use crate::num::NonZero;
6use crate::ops::{CoerceUnsized, DispatchFromDyn};
7use crate::pin::PinCoerceUnsized;
8use crate::ptr::Unique;
9use crate::slice::{self, SliceIndex};
10use crate::ub_checks::assert_unsafe_precondition;
11use crate::{fmt, hash, intrinsics, mem, ptr};
12
13/// `*mut T` but non-zero and [covariant].
14///
15/// This is often the correct thing to use when building data structures using
16/// raw pointers, but is ultimately more dangerous to use because of its additional
17/// properties. If you're not sure if you should use `NonNull<T>`, just use `*mut T`!
18///
19/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
20/// is never dereferenced. This is so that enums may use this forbidden value
21/// as a discriminant -- `Option<NonNull<T>>` has the same size as `*mut T`.
22/// However the pointer may still dangle if it isn't dereferenced.
23///
24/// Unlike `*mut T`, `NonNull<T>` is covariant over `T`. This is usually the correct
25/// choice for most data structures and safe abstractions, such as `Box`, `Rc`, `Arc`, `Vec`,
26/// and `LinkedList`.
27///
28/// In rare cases, if your type exposes a way to mutate the value of `T` through a `NonNull<T>`,
29/// and you need to prevent unsoundness from variance (for example, if `T` could be a reference
30/// with a shorter lifetime), you should add a field to make your type invariant, such as
31/// `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
32///
33/// Example of a type that must be invariant:
34/// ```rust
35/// use std::cell::Cell;
36/// use std::marker::PhantomData;
37/// struct Invariant<T> {
38/// ptr: std::ptr::NonNull<T>,
39/// _invariant: PhantomData<Cell<T>>,
40/// }
41/// ```
42///
43/// Notice that `NonNull<T>` has a `From` instance for `&T`. However, this does
44/// not change the fact that mutating through a (pointer derived from a) shared
45/// reference is undefined behavior unless the mutation happens inside an
46/// [`UnsafeCell<T>`]. The same goes for creating a mutable reference from a shared
47/// reference. When using this `From` instance without an `UnsafeCell<T>`,
48/// it is your responsibility to ensure that `as_mut` is never called, and `as_ptr`
49/// is never used for mutation.
50///
51/// # Representation
52///
53/// Thanks to the [null pointer optimization],
54/// `NonNull<T>` and `Option<NonNull<T>>`
55/// are guaranteed to have the same size and alignment:
56///
57/// ```
58/// use std::ptr::NonNull;
59///
60/// assert_eq!(size_of::<NonNull<i16>>(), size_of::<Option<NonNull<i16>>>());
61/// assert_eq!(align_of::<NonNull<i16>>(), align_of::<Option<NonNull<i16>>>());
62///
63/// assert_eq!(size_of::<NonNull<str>>(), size_of::<Option<NonNull<str>>>());
64/// assert_eq!(align_of::<NonNull<str>>(), align_of::<Option<NonNull<str>>>());
65/// ```
66///
67/// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
68/// [`PhantomData`]: crate::marker::PhantomData
69/// [`UnsafeCell<T>`]: crate::cell::UnsafeCell
70/// [null pointer optimization]: crate::option#representation
71#[stable(feature = "nonnull", since = "1.25.0")]
72#[repr(transparent)]
73#[rustc_layout_scalar_valid_range_start(1)]
74#[rustc_nonnull_optimization_guaranteed]
75#[rustc_diagnostic_item = "NonNull"]
76pub struct NonNull<T: PointeeSized> {
77 // Remember to use `.as_ptr()` instead of `.pointer`, as field projecting to
78 // this is banned by <https://github.com/rust-lang/compiler-team/issues/807>.
79 pointer: *const T,
80}
81
82/// `NonNull` pointers are not `Send` because the data they reference may be aliased.
83// N.B., this impl is unnecessary, but should provide better error messages.
84#[stable(feature = "nonnull", since = "1.25.0")]
85impl<T: PointeeSized> !Send for NonNull<T> {}
86
87/// `NonNull` pointers are not `Sync` because the data they reference may be aliased.
88// N.B., this impl is unnecessary, but should provide better error messages.
89#[stable(feature = "nonnull", since = "1.25.0")]
90impl<T: PointeeSized> !Sync for NonNull<T> {}
91
92impl<T: Sized> NonNull<T> {
93 /// Creates a pointer with the given address and no [provenance][crate::ptr#provenance].
94 ///
95 /// For more details, see the equivalent method on a raw pointer, [`ptr::without_provenance_mut`].
96 ///
97 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
98 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
99 #[rustc_const_stable(feature = "nonnull_provenance", since = "1.89.0")]
100 #[must_use]
101 #[inline]
102 pub const fn without_provenance(addr: NonZero<usize>) -> Self {
103 let pointer = crate::ptr::without_provenance(addr.get());
104 // SAFETY: we know `addr` is non-zero.
105 unsafe { NonNull { pointer } }
106 }
107
108 /// Creates a new `NonNull` that is dangling, but well-aligned.
109 ///
110 /// This is useful for initializing types which lazily allocate, like
111 /// `Vec::new` does.
112 ///
113 /// Note that the address of the returned pointer may potentially
114 /// be that of a valid pointer, which means this must not be used
115 /// as a "not yet initialized" sentinel value.
116 /// Types that lazily allocate must track initialization by some other means.
117 ///
118 /// # Examples
119 ///
120 /// ```
121 /// use std::ptr::NonNull;
122 ///
123 /// let ptr = NonNull::<u32>::dangling();
124 /// // Important: don't try to access the value of `ptr` without
125 /// // initializing it first! The pointer is not null but isn't valid either!
126 /// ```
127 #[stable(feature = "nonnull", since = "1.25.0")]
128 #[rustc_const_stable(feature = "const_nonnull_dangling", since = "1.36.0")]
129 #[must_use]
130 #[inline]
131 pub const fn dangling() -> Self {
132 let align = crate::ptr::Alignment::of::<T>();
133 NonNull::without_provenance(align.as_nonzero())
134 }
135
136 /// Converts an address back to a mutable pointer, picking up some previously 'exposed'
137 /// [provenance][crate::ptr#provenance].
138 ///
139 /// For more details, see the equivalent method on a raw pointer, [`ptr::with_exposed_provenance_mut`].
140 ///
141 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
142 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
143 #[inline]
144 pub fn with_exposed_provenance(addr: NonZero<usize>) -> Self {
145 // SAFETY: we know `addr` is non-zero.
146 unsafe {
147 let ptr = crate::ptr::with_exposed_provenance_mut(addr.get());
148 NonNull::new_unchecked(ptr)
149 }
150 }
151
152 /// Returns a shared references to the value. In contrast to [`as_ref`], this does not require
153 /// that the value has to be initialized.
154 ///
155 /// For the mutable counterpart see [`as_uninit_mut`].
156 ///
157 /// [`as_ref`]: NonNull::as_ref
158 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
159 ///
160 /// # Safety
161 ///
162 /// When calling this method, you have to ensure that
163 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
164 /// Note that because the created reference is to `MaybeUninit<T>`, the
165 /// source pointer can point to uninitialized memory.
166 #[inline]
167 #[must_use]
168 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
169 pub const unsafe fn as_uninit_ref<'a>(self) -> &'a MaybeUninit<T> {
170 // SAFETY: the caller must guarantee that `self` meets all the
171 // requirements for a reference.
172 unsafe { &*self.cast().as_ptr() }
173 }
174
175 /// Returns a unique references to the value. In contrast to [`as_mut`], this does not require
176 /// that the value has to be initialized.
177 ///
178 /// For the shared counterpart see [`as_uninit_ref`].
179 ///
180 /// [`as_mut`]: NonNull::as_mut
181 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
182 ///
183 /// # Safety
184 ///
185 /// When calling this method, you have to ensure that
186 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
187 /// Note that because the created reference is to `MaybeUninit<T>`, the
188 /// source pointer can point to uninitialized memory.
189 #[inline]
190 #[must_use]
191 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
192 pub const unsafe fn as_uninit_mut<'a>(self) -> &'a mut MaybeUninit<T> {
193 // SAFETY: the caller must guarantee that `self` meets all the
194 // requirements for a reference.
195 unsafe { &mut *self.cast().as_ptr() }
196 }
197
198 /// Casts from a pointer-to-`T` to a pointer-to-`[T; N]`.
199 #[inline]
200 #[unstable(feature = "ptr_cast_array", issue = "144514")]
201 pub const fn cast_array<const N: usize>(self) -> NonNull<[T; N]> {
202 self.cast()
203 }
204}
205
206impl<T: PointeeSized> NonNull<T> {
207 /// Creates a new `NonNull`.
208 ///
209 /// # Safety
210 ///
211 /// `ptr` must be non-null.
212 ///
213 /// # Examples
214 ///
215 /// ```
216 /// use std::ptr::NonNull;
217 ///
218 /// let mut x = 0u32;
219 /// let ptr = unsafe { NonNull::new_unchecked(&mut x as *mut _) };
220 /// ```
221 ///
222 /// *Incorrect* usage of this function:
223 ///
224 /// ```rust,no_run
225 /// use std::ptr::NonNull;
226 ///
227 /// // NEVER DO THAT!!! This is undefined behavior. ⚠️
228 /// let ptr = unsafe { NonNull::<u32>::new_unchecked(std::ptr::null_mut()) };
229 /// ```
230 #[stable(feature = "nonnull", since = "1.25.0")]
231 #[rustc_const_stable(feature = "const_nonnull_new_unchecked", since = "1.25.0")]
232 #[inline]
233 #[track_caller]
234 pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
235 // SAFETY: the caller must guarantee that `ptr` is non-null.
236 unsafe {
237 assert_unsafe_precondition!(
238 check_language_ub,
239 "NonNull::new_unchecked requires that the pointer is non-null",
240 (ptr: *mut () = ptr as *mut ()) => !ptr.is_null()
241 );
242 NonNull { pointer: ptr as _ }
243 }
244 }
245
246 /// Creates a new `NonNull` if `ptr` is non-null.
247 ///
248 /// # Panics during const evaluation
249 ///
250 /// This method will panic during const evaluation if the pointer cannot be
251 /// determined to be null or not. See [`is_null`] for more information.
252 ///
253 /// [`is_null`]: ../primitive.pointer.html#method.is_null-1
254 ///
255 /// # Examples
256 ///
257 /// ```
258 /// use std::ptr::NonNull;
259 ///
260 /// let mut x = 0u32;
261 /// let ptr = NonNull::<u32>::new(&mut x as *mut _).expect("ptr is null!");
262 ///
263 /// if let Some(ptr) = NonNull::<u32>::new(std::ptr::null_mut()) {
264 /// unreachable!();
265 /// }
266 /// ```
267 #[stable(feature = "nonnull", since = "1.25.0")]
268 #[rustc_const_stable(feature = "const_nonnull_new", since = "1.85.0")]
269 #[inline]
270 pub const fn new(ptr: *mut T) -> Option<Self> {
271 if !ptr.is_null() {
272 // SAFETY: The pointer is already checked and is not null
273 Some(unsafe { Self::new_unchecked(ptr) })
274 } else {
275 None
276 }
277 }
278
279 /// Converts a reference to a `NonNull` pointer.
280 #[stable(feature = "non_null_from_ref", since = "1.89.0")]
281 #[rustc_const_stable(feature = "non_null_from_ref", since = "1.89.0")]
282 #[inline]
283 pub const fn from_ref(r: &T) -> Self {
284 // SAFETY: A reference cannot be null.
285 unsafe { NonNull { pointer: r as *const T } }
286 }
287
288 /// Converts a mutable reference to a `NonNull` pointer.
289 #[stable(feature = "non_null_from_ref", since = "1.89.0")]
290 #[rustc_const_stable(feature = "non_null_from_ref", since = "1.89.0")]
291 #[inline]
292 pub const fn from_mut(r: &mut T) -> Self {
293 // SAFETY: A mutable reference cannot be null.
294 unsafe { NonNull { pointer: r as *mut T } }
295 }
296
297 /// Performs the same functionality as [`std::ptr::from_raw_parts`], except that a
298 /// `NonNull` pointer is returned, as opposed to a raw `*const` pointer.
299 ///
300 /// See the documentation of [`std::ptr::from_raw_parts`] for more details.
301 ///
302 /// [`std::ptr::from_raw_parts`]: crate::ptr::from_raw_parts
303 #[unstable(feature = "ptr_metadata", issue = "81513")]
304 #[inline]
305 pub const fn from_raw_parts(
306 data_pointer: NonNull<impl super::Thin>,
307 metadata: <T as super::Pointee>::Metadata,
308 ) -> NonNull<T> {
309 // SAFETY: The result of `ptr::from::raw_parts_mut` is non-null because `data_pointer` is.
310 unsafe {
311 NonNull::new_unchecked(super::from_raw_parts_mut(data_pointer.as_ptr(), metadata))
312 }
313 }
314
315 /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
316 ///
317 /// The pointer can be later reconstructed with [`NonNull::from_raw_parts`].
318 #[unstable(feature = "ptr_metadata", issue = "81513")]
319 #[must_use = "this returns the result of the operation, \
320 without modifying the original"]
321 #[inline]
322 pub const fn to_raw_parts(self) -> (NonNull<()>, <T as super::Pointee>::Metadata) {
323 (self.cast(), super::metadata(self.as_ptr()))
324 }
325
326 /// Gets the "address" portion of the pointer.
327 ///
328 /// For more details, see the equivalent method on a raw pointer, [`pointer::addr`].
329 ///
330 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
331 #[must_use]
332 #[inline]
333 #[stable(feature = "strict_provenance", since = "1.84.0")]
334 pub fn addr(self) -> NonZero<usize> {
335 // SAFETY: The pointer is guaranteed by the type to be non-null,
336 // meaning that the address will be non-zero.
337 unsafe { NonZero::new_unchecked(self.as_ptr().addr()) }
338 }
339
340 /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
341 /// [`with_exposed_provenance`][NonNull::with_exposed_provenance] and returns the "address" portion.
342 ///
343 /// For more details, see the equivalent method on a raw pointer, [`pointer::expose_provenance`].
344 ///
345 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
346 #[stable(feature = "nonnull_provenance", since = "1.89.0")]
347 pub fn expose_provenance(self) -> NonZero<usize> {
348 // SAFETY: The pointer is guaranteed by the type to be non-null,
349 // meaning that the address will be non-zero.
350 unsafe { NonZero::new_unchecked(self.as_ptr().expose_provenance()) }
351 }
352
353 /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
354 /// `self`.
355 ///
356 /// For more details, see the equivalent method on a raw pointer, [`pointer::with_addr`].
357 ///
358 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
359 #[must_use]
360 #[inline]
361 #[stable(feature = "strict_provenance", since = "1.84.0")]
362 pub fn with_addr(self, addr: NonZero<usize>) -> Self {
363 // SAFETY: The result of `ptr::from::with_addr` is non-null because `addr` is guaranteed to be non-zero.
364 unsafe { NonNull::new_unchecked(self.as_ptr().with_addr(addr.get()) as *mut _) }
365 }
366
367 /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
368 /// [provenance][crate::ptr#provenance] of `self`.
369 ///
370 /// For more details, see the equivalent method on a raw pointer, [`pointer::map_addr`].
371 ///
372 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
373 #[must_use]
374 #[inline]
375 #[stable(feature = "strict_provenance", since = "1.84.0")]
376 pub fn map_addr(self, f: impl FnOnce(NonZero<usize>) -> NonZero<usize>) -> Self {
377 self.with_addr(f(self.addr()))
378 }
379
380 /// Acquires the underlying `*mut` pointer.
381 ///
382 /// # Examples
383 ///
384 /// ```
385 /// use std::ptr::NonNull;
386 ///
387 /// let mut x = 0u32;
388 /// let ptr = NonNull::new(&mut x).expect("ptr is null!");
389 ///
390 /// let x_value = unsafe { *ptr.as_ptr() };
391 /// assert_eq!(x_value, 0);
392 ///
393 /// unsafe { *ptr.as_ptr() += 2; }
394 /// let x_value = unsafe { *ptr.as_ptr() };
395 /// assert_eq!(x_value, 2);
396 /// ```
397 #[stable(feature = "nonnull", since = "1.25.0")]
398 #[rustc_const_stable(feature = "const_nonnull_as_ptr", since = "1.32.0")]
399 #[rustc_never_returns_null_ptr]
400 #[must_use]
401 #[inline(always)]
402 pub const fn as_ptr(self) -> *mut T {
403 // This is a transmute for the same reasons as `NonZero::get`.
404
405 // SAFETY: `NonNull` is `transparent` over a `*const T`, and `*const T`
406 // and `*mut T` have the same layout, so transitively we can transmute
407 // our `NonNull` to a `*mut T` directly.
408 unsafe { mem::transmute::<Self, *mut T>(self) }
409 }
410
411 /// Returns a shared reference to the value. If the value may be uninitialized, [`as_uninit_ref`]
412 /// must be used instead.
413 ///
414 /// For the mutable counterpart see [`as_mut`].
415 ///
416 /// [`as_uninit_ref`]: NonNull::as_uninit_ref
417 /// [`as_mut`]: NonNull::as_mut
418 ///
419 /// # Safety
420 ///
421 /// When calling this method, you have to ensure that
422 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
423 ///
424 /// # Examples
425 ///
426 /// ```
427 /// use std::ptr::NonNull;
428 ///
429 /// let mut x = 0u32;
430 /// let ptr = NonNull::new(&mut x as *mut _).expect("ptr is null!");
431 ///
432 /// let ref_x = unsafe { ptr.as_ref() };
433 /// println!("{ref_x}");
434 /// ```
435 ///
436 /// [the module documentation]: crate::ptr#safety
437 #[stable(feature = "nonnull", since = "1.25.0")]
438 #[rustc_const_stable(feature = "const_nonnull_as_ref", since = "1.73.0")]
439 #[must_use]
440 #[inline(always)]
441 pub const unsafe fn as_ref<'a>(&self) -> &'a T {
442 // SAFETY: the caller must guarantee that `self` meets all the
443 // requirements for a reference.
444 // `cast_const` avoids a mutable raw pointer deref.
445 unsafe { &*self.as_ptr().cast_const() }
446 }
447
448 /// Returns a unique reference to the value. If the value may be uninitialized, [`as_uninit_mut`]
449 /// must be used instead.
450 ///
451 /// For the shared counterpart see [`as_ref`].
452 ///
453 /// [`as_uninit_mut`]: NonNull::as_uninit_mut
454 /// [`as_ref`]: NonNull::as_ref
455 ///
456 /// # Safety
457 ///
458 /// When calling this method, you have to ensure that
459 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
460 /// # Examples
461 ///
462 /// ```
463 /// use std::ptr::NonNull;
464 ///
465 /// let mut x = 0u32;
466 /// let mut ptr = NonNull::new(&mut x).expect("null pointer");
467 ///
468 /// let x_ref = unsafe { ptr.as_mut() };
469 /// assert_eq!(*x_ref, 0);
470 /// *x_ref += 2;
471 /// assert_eq!(*x_ref, 2);
472 /// ```
473 ///
474 /// [the module documentation]: crate::ptr#safety
475 #[stable(feature = "nonnull", since = "1.25.0")]
476 #[rustc_const_stable(feature = "const_ptr_as_ref", since = "1.83.0")]
477 #[must_use]
478 #[inline(always)]
479 pub const unsafe fn as_mut<'a>(&mut self) -> &'a mut T {
480 // SAFETY: the caller must guarantee that `self` meets all the
481 // requirements for a mutable reference.
482 unsafe { &mut *self.as_ptr() }
483 }
484
485 /// Casts to a pointer of another type.
486 ///
487 /// # Examples
488 ///
489 /// ```
490 /// use std::ptr::NonNull;
491 ///
492 /// let mut x = 0u32;
493 /// let ptr = NonNull::new(&mut x as *mut _).expect("null pointer");
494 ///
495 /// let casted_ptr = ptr.cast::<i8>();
496 /// let raw_ptr: *mut i8 = casted_ptr.as_ptr();
497 /// ```
498 #[stable(feature = "nonnull_cast", since = "1.27.0")]
499 #[rustc_const_stable(feature = "const_nonnull_cast", since = "1.36.0")]
500 #[must_use = "this returns the result of the operation, \
501 without modifying the original"]
502 #[inline]
503 pub const fn cast<U>(self) -> NonNull<U> {
504 // SAFETY: `self` is a `NonNull` pointer which is necessarily non-null
505 unsafe { NonNull { pointer: self.as_ptr() as *mut U } }
506 }
507
508 /// Try to cast to a pointer of another type by checking alignment.
509 ///
510 /// If the pointer is properly aligned to the target type, it will be
511 /// cast to the target type. Otherwise, `None` is returned.
512 ///
513 /// # Examples
514 ///
515 /// ```rust
516 /// #![feature(pointer_try_cast_aligned)]
517 /// use std::ptr::NonNull;
518 ///
519 /// let mut x = 0u64;
520 ///
521 /// let aligned = NonNull::from_mut(&mut x);
522 /// let unaligned = unsafe { aligned.byte_add(1) };
523 ///
524 /// assert!(aligned.try_cast_aligned::<u32>().is_some());
525 /// assert!(unaligned.try_cast_aligned::<u32>().is_none());
526 /// ```
527 #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
528 #[must_use = "this returns the result of the operation, \
529 without modifying the original"]
530 #[inline]
531 pub fn try_cast_aligned<U>(self) -> Option<NonNull<U>> {
532 if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
533 }
534
535 /// Adds an offset to a pointer.
536 ///
537 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
538 /// offset of `3 * size_of::<T>()` bytes.
539 ///
540 /// # Safety
541 ///
542 /// If any of the following conditions are violated, the result is Undefined Behavior:
543 ///
544 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
545 ///
546 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
547 /// [allocation], and the entire memory range between `self` and the result must be in
548 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
549 /// of the address space.
550 ///
551 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
552 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
553 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
554 /// safe.
555 ///
556 /// [allocation]: crate::ptr#allocation
557 ///
558 /// # Examples
559 ///
560 /// ```
561 /// use std::ptr::NonNull;
562 ///
563 /// let mut s = [1, 2, 3];
564 /// let ptr: NonNull<u32> = NonNull::new(s.as_mut_ptr()).unwrap();
565 ///
566 /// unsafe {
567 /// println!("{}", ptr.offset(1).read());
568 /// println!("{}", ptr.offset(2).read());
569 /// }
570 /// ```
571 #[inline(always)]
572 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
573 #[must_use = "returns a new pointer rather than modifying its argument"]
574 #[stable(feature = "non_null_convenience", since = "1.80.0")]
575 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
576 pub const unsafe fn offset(self, count: isize) -> Self
577 where
578 T: Sized,
579 {
580 // SAFETY: the caller must uphold the safety contract for `offset`.
581 // Additionally safety contract of `offset` guarantees that the resulting pointer is
582 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
583 // construct `NonNull`.
584 unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
585 }
586
587 /// Calculates the offset from a pointer in bytes.
588 ///
589 /// `count` is in units of **bytes**.
590 ///
591 /// This is purely a convenience for casting to a `u8` pointer and
592 /// using [offset][pointer::offset] on it. See that method for documentation
593 /// and safety requirements.
594 ///
595 /// For non-`Sized` pointees this operation changes only the data pointer,
596 /// leaving the metadata untouched.
597 #[must_use]
598 #[inline(always)]
599 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
600 #[stable(feature = "non_null_convenience", since = "1.80.0")]
601 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
602 pub const unsafe fn byte_offset(self, count: isize) -> Self {
603 // SAFETY: the caller must uphold the safety contract for `offset` and `byte_offset` has
604 // the same safety contract.
605 // Additionally safety contract of `offset` guarantees that the resulting pointer is
606 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
607 // construct `NonNull`.
608 unsafe { NonNull { pointer: self.as_ptr().byte_offset(count) } }
609 }
610
611 /// Adds an offset to a pointer (convenience for `.offset(count as isize)`).
612 ///
613 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
614 /// offset of `3 * size_of::<T>()` bytes.
615 ///
616 /// # Safety
617 ///
618 /// If any of the following conditions are violated, the result is Undefined Behavior:
619 ///
620 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
621 ///
622 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
623 /// [allocation], and the entire memory range between `self` and the result must be in
624 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
625 /// of the address space.
626 ///
627 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
628 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
629 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
630 /// safe.
631 ///
632 /// [allocation]: crate::ptr#allocation
633 ///
634 /// # Examples
635 ///
636 /// ```
637 /// use std::ptr::NonNull;
638 ///
639 /// let s: &str = "123";
640 /// let ptr: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap();
641 ///
642 /// unsafe {
643 /// println!("{}", ptr.add(1).read() as char);
644 /// println!("{}", ptr.add(2).read() as char);
645 /// }
646 /// ```
647 #[inline(always)]
648 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
649 #[must_use = "returns a new pointer rather than modifying its argument"]
650 #[stable(feature = "non_null_convenience", since = "1.80.0")]
651 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
652 pub const unsafe fn add(self, count: usize) -> Self
653 where
654 T: Sized,
655 {
656 // SAFETY: the caller must uphold the safety contract for `offset`.
657 // Additionally safety contract of `offset` guarantees that the resulting pointer is
658 // pointing to an allocation, there can't be an allocation at null, thus it's safe to
659 // construct `NonNull`.
660 unsafe { NonNull { pointer: intrinsics::offset(self.as_ptr(), count) } }
661 }
662
663 /// Calculates the offset from a pointer in bytes (convenience for `.byte_offset(count as isize)`).
664 ///
665 /// `count` is in units of bytes.
666 ///
667 /// This is purely a convenience for casting to a `u8` pointer and
668 /// using [`add`][NonNull::add] on it. See that method for documentation
669 /// and safety requirements.
670 ///
671 /// For non-`Sized` pointees this operation changes only the data pointer,
672 /// leaving the metadata untouched.
673 #[must_use]
674 #[inline(always)]
675 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
676 #[stable(feature = "non_null_convenience", since = "1.80.0")]
677 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
678 pub const unsafe fn byte_add(self, count: usize) -> Self {
679 // SAFETY: the caller must uphold the safety contract for `add` and `byte_add` has the same
680 // safety contract.
681 // Additionally safety contract of `add` guarantees that the resulting pointer is pointing
682 // to an allocation, there can't be an allocation at null, thus it's safe to construct
683 // `NonNull`.
684 unsafe { NonNull { pointer: self.as_ptr().byte_add(count) } }
685 }
686
687 /// Subtracts an offset from a pointer (convenience for
688 /// `.offset((count as isize).wrapping_neg())`).
689 ///
690 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
691 /// offset of `3 * size_of::<T>()` bytes.
692 ///
693 /// # Safety
694 ///
695 /// If any of the following conditions are violated, the result is Undefined Behavior:
696 ///
697 /// * The computed offset, `count * size_of::<T>()` bytes, must not overflow `isize`.
698 ///
699 /// * If the computed offset is non-zero, then `self` must be derived from a pointer to some
700 /// [allocation], and the entire memory range between `self` and the result must be in
701 /// bounds of that allocation. In particular, this range must not "wrap around" the edge
702 /// of the address space.
703 ///
704 /// Allocations can never be larger than `isize::MAX` bytes, so if the computed offset
705 /// stays in bounds of the allocation, it is guaranteed to satisfy the first requirement.
706 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
707 /// safe.
708 ///
709 /// [allocation]: crate::ptr#allocation
710 ///
711 /// # Examples
712 ///
713 /// ```
714 /// use std::ptr::NonNull;
715 ///
716 /// let s: &str = "123";
717 ///
718 /// unsafe {
719 /// let end: NonNull<u8> = NonNull::new(s.as_ptr().cast_mut()).unwrap().add(3);
720 /// println!("{}", end.sub(1).read() as char);
721 /// println!("{}", end.sub(2).read() as char);
722 /// }
723 /// ```
724 #[inline(always)]
725 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
726 #[must_use = "returns a new pointer rather than modifying its argument"]
727 #[stable(feature = "non_null_convenience", since = "1.80.0")]
728 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
729 pub const unsafe fn sub(self, count: usize) -> Self
730 where
731 T: Sized,
732 {
733 if T::IS_ZST {
734 // Pointer arithmetic does nothing when the pointee is a ZST.
735 self
736 } else {
737 // SAFETY: the caller must uphold the safety contract for `offset`.
738 // Because the pointee is *not* a ZST, that means that `count` is
739 // at most `isize::MAX`, and thus the negation cannot overflow.
740 unsafe { self.offset((count as isize).unchecked_neg()) }
741 }
742 }
743
744 /// Calculates the offset from a pointer in bytes (convenience for
745 /// `.byte_offset((count as isize).wrapping_neg())`).
746 ///
747 /// `count` is in units of bytes.
748 ///
749 /// This is purely a convenience for casting to a `u8` pointer and
750 /// using [`sub`][NonNull::sub] on it. See that method for documentation
751 /// and safety requirements.
752 ///
753 /// For non-`Sized` pointees this operation changes only the data pointer,
754 /// leaving the metadata untouched.
755 #[must_use]
756 #[inline(always)]
757 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
758 #[stable(feature = "non_null_convenience", since = "1.80.0")]
759 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
760 pub const unsafe fn byte_sub(self, count: usize) -> Self {
761 // SAFETY: the caller must uphold the safety contract for `sub` and `byte_sub` has the same
762 // safety contract.
763 // Additionally safety contract of `sub` guarantees that the resulting pointer is pointing
764 // to an allocation, there can't be an allocation at null, thus it's safe to construct
765 // `NonNull`.
766 unsafe { NonNull { pointer: self.as_ptr().byte_sub(count) } }
767 }
768
769 /// Calculates the distance between two pointers within the same allocation. The returned value is in
770 /// units of T: the distance in bytes divided by `size_of::<T>()`.
771 ///
772 /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
773 /// except that it has a lot more opportunities for UB, in exchange for the compiler
774 /// better understanding what you are doing.
775 ///
776 /// The primary motivation of this method is for computing the `len` of an array/slice
777 /// of `T` that you are currently representing as a "start" and "end" pointer
778 /// (and "end" is "one past the end" of the array).
779 /// In that case, `end.offset_from(start)` gets you the length of the array.
780 ///
781 /// All of the following safety requirements are trivially satisfied for this usecase.
782 ///
783 /// [`offset`]: #method.offset
784 ///
785 /// # Safety
786 ///
787 /// If any of the following conditions are violated, the result is Undefined Behavior:
788 ///
789 /// * `self` and `origin` must either
790 ///
791 /// * point to the same address, or
792 /// * both be *derived from* a pointer to the same [allocation], and the memory range between
793 /// the two pointers must be in bounds of that object. (See below for an example.)
794 ///
795 /// * The distance between the pointers, in bytes, must be an exact multiple
796 /// of the size of `T`.
797 ///
798 /// As a consequence, the absolute distance between the pointers, in bytes, computed on
799 /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
800 /// implied by the in-bounds requirement, and the fact that no allocation can be larger
801 /// than `isize::MAX` bytes.
802 ///
803 /// The requirement for pointers to be derived from the same allocation is primarily
804 /// needed for `const`-compatibility: the distance between pointers into *different* allocated
805 /// objects is not known at compile-time. However, the requirement also exists at
806 /// runtime and may be exploited by optimizations. If you wish to compute the difference between
807 /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
808 /// origin as isize) / size_of::<T>()`.
809 // FIXME: recommend `addr()` instead of `as usize` once that is stable.
810 ///
811 /// [`add`]: #method.add
812 /// [allocation]: crate::ptr#allocation
813 ///
814 /// # Panics
815 ///
816 /// This function panics if `T` is a Zero-Sized Type ("ZST").
817 ///
818 /// # Examples
819 ///
820 /// Basic usage:
821 ///
822 /// ```
823 /// use std::ptr::NonNull;
824 ///
825 /// let a = [0; 5];
826 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
827 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
828 /// unsafe {
829 /// assert_eq!(ptr2.offset_from(ptr1), 2);
830 /// assert_eq!(ptr1.offset_from(ptr2), -2);
831 /// assert_eq!(ptr1.offset(2), ptr2);
832 /// assert_eq!(ptr2.offset(-2), ptr1);
833 /// }
834 /// ```
835 ///
836 /// *Incorrect* usage:
837 ///
838 /// ```rust,no_run
839 /// use std::ptr::NonNull;
840 ///
841 /// let ptr1 = NonNull::new(Box::into_raw(Box::new(0u8))).unwrap();
842 /// let ptr2 = NonNull::new(Box::into_raw(Box::new(1u8))).unwrap();
843 /// let diff = (ptr2.addr().get() as isize).wrapping_sub(ptr1.addr().get() as isize);
844 /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
845 /// let diff_plus_1 = diff.wrapping_add(1);
846 /// let ptr2_other = NonNull::new(ptr1.as_ptr().wrapping_byte_offset(diff_plus_1)).unwrap();
847 /// assert_eq!(ptr2.addr(), ptr2_other.addr());
848 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
849 /// // computing their offset is undefined behavior, even though
850 /// // they point to addresses that are in-bounds of the same object!
851 ///
852 /// let one = unsafe { ptr2_other.offset_from(ptr2) }; // Undefined Behavior! ⚠️
853 /// ```
854 #[inline]
855 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
856 #[stable(feature = "non_null_convenience", since = "1.80.0")]
857 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
858 pub const unsafe fn offset_from(self, origin: NonNull<T>) -> isize
859 where
860 T: Sized,
861 {
862 // SAFETY: the caller must uphold the safety contract for `offset_from`.
863 unsafe { self.as_ptr().offset_from(origin.as_ptr()) }
864 }
865
866 /// Calculates the distance between two pointers within the same allocation. The returned value is in
867 /// units of **bytes**.
868 ///
869 /// This is purely a convenience for casting to a `u8` pointer and
870 /// using [`offset_from`][NonNull::offset_from] on it. See that method for
871 /// documentation and safety requirements.
872 ///
873 /// For non-`Sized` pointees this operation considers only the data pointers,
874 /// ignoring the metadata.
875 #[inline(always)]
876 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
877 #[stable(feature = "non_null_convenience", since = "1.80.0")]
878 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
879 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: NonNull<U>) -> isize {
880 // SAFETY: the caller must uphold the safety contract for `byte_offset_from`.
881 unsafe { self.as_ptr().byte_offset_from(origin.as_ptr()) }
882 }
883
884 // N.B. `wrapping_offset``, `wrapping_add`, etc are not implemented because they can wrap to null
885
886 /// Calculates the distance between two pointers within the same allocation, *where it's known that
887 /// `self` is equal to or greater than `origin`*. The returned value is in
888 /// units of T: the distance in bytes is divided by `size_of::<T>()`.
889 ///
890 /// This computes the same value that [`offset_from`](#method.offset_from)
891 /// would compute, but with the added precondition that the offset is
892 /// guaranteed to be non-negative. This method is equivalent to
893 /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
894 /// but it provides slightly more information to the optimizer, which can
895 /// sometimes allow it to optimize slightly better with some backends.
896 ///
897 /// This method can be though of as recovering the `count` that was passed
898 /// to [`add`](#method.add) (or, with the parameters in the other order,
899 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
900 /// that their safety preconditions are met:
901 /// ```rust
902 /// # unsafe fn blah(ptr: std::ptr::NonNull<u32>, origin: std::ptr::NonNull<u32>, count: usize) -> bool { unsafe {
903 /// ptr.offset_from_unsigned(origin) == count
904 /// # &&
905 /// origin.add(count) == ptr
906 /// # &&
907 /// ptr.sub(count) == origin
908 /// # } }
909 /// ```
910 ///
911 /// # Safety
912 ///
913 /// - The distance between the pointers must be non-negative (`self >= origin`)
914 ///
915 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
916 /// apply to this method as well; see it for the full details.
917 ///
918 /// Importantly, despite the return type of this method being able to represent
919 /// a larger offset, it's still *not permitted* to pass pointers which differ
920 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
921 /// always be less than or equal to `isize::MAX as usize`.
922 ///
923 /// # Panics
924 ///
925 /// This function panics if `T` is a Zero-Sized Type ("ZST").
926 ///
927 /// # Examples
928 ///
929 /// ```
930 /// use std::ptr::NonNull;
931 ///
932 /// let a = [0; 5];
933 /// let ptr1: NonNull<u32> = NonNull::from(&a[1]);
934 /// let ptr2: NonNull<u32> = NonNull::from(&a[3]);
935 /// unsafe {
936 /// assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
937 /// assert_eq!(ptr1.add(2), ptr2);
938 /// assert_eq!(ptr2.sub(2), ptr1);
939 /// assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
940 /// }
941 ///
942 /// // This would be incorrect, as the pointers are not correctly ordered:
943 /// // ptr1.offset_from_unsigned(ptr2)
944 /// ```
945 #[inline]
946 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
947 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
948 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
949 pub const unsafe fn offset_from_unsigned(self, subtracted: NonNull<T>) -> usize
950 where
951 T: Sized,
952 {
953 // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
954 unsafe { self.as_ptr().offset_from_unsigned(subtracted.as_ptr()) }
955 }
956
957 /// Calculates the distance between two pointers within the same allocation, *where it's known that
958 /// `self` is equal to or greater than `origin`*. The returned value is in
959 /// units of **bytes**.
960 ///
961 /// This is purely a convenience for casting to a `u8` pointer and
962 /// using [`offset_from_unsigned`][NonNull::offset_from_unsigned] on it.
963 /// See that method for documentation and safety requirements.
964 ///
965 /// For non-`Sized` pointees this operation considers only the data pointers,
966 /// ignoring the metadata.
967 #[inline(always)]
968 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
969 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
970 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
971 pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: NonNull<U>) -> usize {
972 // SAFETY: the caller must uphold the safety contract for `byte_offset_from_unsigned`.
973 unsafe { self.as_ptr().byte_offset_from_unsigned(origin.as_ptr()) }
974 }
975
976 /// Reads the value from `self` without moving it. This leaves the
977 /// memory in `self` unchanged.
978 ///
979 /// See [`ptr::read`] for safety concerns and examples.
980 ///
981 /// [`ptr::read`]: crate::ptr::read()
982 #[inline]
983 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
984 #[stable(feature = "non_null_convenience", since = "1.80.0")]
985 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
986 pub const unsafe fn read(self) -> T
987 where
988 T: Sized,
989 {
990 // SAFETY: the caller must uphold the safety contract for `read`.
991 unsafe { ptr::read(self.as_ptr()) }
992 }
993
994 /// Performs a volatile read of the value from `self` without moving it. This
995 /// leaves the memory in `self` unchanged.
996 ///
997 /// Volatile operations are intended to act on I/O memory, and are guaranteed
998 /// to not be elided or reordered by the compiler across other volatile
999 /// operations.
1000 ///
1001 /// See [`ptr::read_volatile`] for safety concerns and examples.
1002 ///
1003 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1004 #[inline]
1005 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1006 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1007 pub unsafe fn read_volatile(self) -> T
1008 where
1009 T: Sized,
1010 {
1011 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1012 unsafe { ptr::read_volatile(self.as_ptr()) }
1013 }
1014
1015 /// Reads the value from `self` without moving it. This leaves the
1016 /// memory in `self` unchanged.
1017 ///
1018 /// Unlike `read`, the pointer may be unaligned.
1019 ///
1020 /// See [`ptr::read_unaligned`] for safety concerns and examples.
1021 ///
1022 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1023 #[inline]
1024 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1025 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1026 #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
1027 pub const unsafe fn read_unaligned(self) -> T
1028 where
1029 T: Sized,
1030 {
1031 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1032 unsafe { ptr::read_unaligned(self.as_ptr()) }
1033 }
1034
1035 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1036 /// and destination may overlap.
1037 ///
1038 /// NOTE: this has the *same* argument order as [`ptr::copy`].
1039 ///
1040 /// See [`ptr::copy`] for safety concerns and examples.
1041 ///
1042 /// [`ptr::copy`]: crate::ptr::copy()
1043 #[inline(always)]
1044 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1045 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1046 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1047 pub const unsafe fn copy_to(self, dest: NonNull<T>, count: usize)
1048 where
1049 T: Sized,
1050 {
1051 // SAFETY: the caller must uphold the safety contract for `copy`.
1052 unsafe { ptr::copy(self.as_ptr(), dest.as_ptr(), count) }
1053 }
1054
1055 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1056 /// and destination may *not* overlap.
1057 ///
1058 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1059 ///
1060 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1061 ///
1062 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1063 #[inline(always)]
1064 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1065 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1066 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1067 pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull<T>, count: usize)
1068 where
1069 T: Sized,
1070 {
1071 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1072 unsafe { ptr::copy_nonoverlapping(self.as_ptr(), dest.as_ptr(), count) }
1073 }
1074
1075 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1076 /// and destination may overlap.
1077 ///
1078 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
1079 ///
1080 /// See [`ptr::copy`] for safety concerns and examples.
1081 ///
1082 /// [`ptr::copy`]: crate::ptr::copy()
1083 #[inline(always)]
1084 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1085 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1086 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1087 pub const unsafe fn copy_from(self, src: NonNull<T>, count: usize)
1088 where
1089 T: Sized,
1090 {
1091 // SAFETY: the caller must uphold the safety contract for `copy`.
1092 unsafe { ptr::copy(src.as_ptr(), self.as_ptr(), count) }
1093 }
1094
1095 /// Copies `count * size_of::<T>()` bytes from `src` to `self`. The source
1096 /// and destination may *not* overlap.
1097 ///
1098 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
1099 ///
1100 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1101 ///
1102 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1103 #[inline(always)]
1104 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1105 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1106 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1107 pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull<T>, count: usize)
1108 where
1109 T: Sized,
1110 {
1111 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1112 unsafe { ptr::copy_nonoverlapping(src.as_ptr(), self.as_ptr(), count) }
1113 }
1114
1115 /// Executes the destructor (if any) of the pointed-to value.
1116 ///
1117 /// See [`ptr::drop_in_place`] for safety concerns and examples.
1118 ///
1119 /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
1120 #[inline(always)]
1121 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1122 #[rustc_const_unstable(feature = "const_drop_in_place", issue = "109342")]
1123 pub const unsafe fn drop_in_place(self)
1124 where
1125 T: [const] Destruct,
1126 {
1127 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
1128 unsafe { ptr::drop_in_place(self.as_ptr()) }
1129 }
1130
1131 /// Overwrites a memory location with the given value without reading or
1132 /// dropping the old value.
1133 ///
1134 /// See [`ptr::write`] for safety concerns and examples.
1135 ///
1136 /// [`ptr::write`]: crate::ptr::write()
1137 #[inline(always)]
1138 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1139 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1140 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1141 pub const unsafe fn write(self, val: T)
1142 where
1143 T: Sized,
1144 {
1145 // SAFETY: the caller must uphold the safety contract for `write`.
1146 unsafe { ptr::write(self.as_ptr(), val) }
1147 }
1148
1149 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1150 /// bytes of memory starting at `self` to `val`.
1151 ///
1152 /// See [`ptr::write_bytes`] for safety concerns and examples.
1153 ///
1154 /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1155 #[inline(always)]
1156 #[doc(alias = "memset")]
1157 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1158 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1159 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1160 pub const unsafe fn write_bytes(self, val: u8, count: usize)
1161 where
1162 T: Sized,
1163 {
1164 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1165 unsafe { ptr::write_bytes(self.as_ptr(), val, count) }
1166 }
1167
1168 /// Performs a volatile write of a memory location with the given value without
1169 /// reading or dropping the old value.
1170 ///
1171 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1172 /// to not be elided or reordered by the compiler across other volatile
1173 /// operations.
1174 ///
1175 /// See [`ptr::write_volatile`] for safety concerns and examples.
1176 ///
1177 /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1178 #[inline(always)]
1179 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1180 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1181 pub unsafe fn write_volatile(self, val: T)
1182 where
1183 T: Sized,
1184 {
1185 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1186 unsafe { ptr::write_volatile(self.as_ptr(), val) }
1187 }
1188
1189 /// Overwrites a memory location with the given value without reading or
1190 /// dropping the old value.
1191 ///
1192 /// Unlike `write`, the pointer may be unaligned.
1193 ///
1194 /// See [`ptr::write_unaligned`] for safety concerns and examples.
1195 ///
1196 /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1197 #[inline(always)]
1198 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1199 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1200 #[rustc_const_stable(feature = "const_ptr_write", since = "1.83.0")]
1201 pub const unsafe fn write_unaligned(self, val: T)
1202 where
1203 T: Sized,
1204 {
1205 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1206 unsafe { ptr::write_unaligned(self.as_ptr(), val) }
1207 }
1208
1209 /// Replaces the value at `self` with `src`, returning the old
1210 /// value, without dropping either.
1211 ///
1212 /// See [`ptr::replace`] for safety concerns and examples.
1213 ///
1214 /// [`ptr::replace`]: crate::ptr::replace()
1215 #[inline(always)]
1216 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1217 #[rustc_const_stable(feature = "const_inherent_ptr_replace", since = "1.88.0")]
1218 pub const unsafe fn replace(self, src: T) -> T
1219 where
1220 T: Sized,
1221 {
1222 // SAFETY: the caller must uphold the safety contract for `replace`.
1223 unsafe { ptr::replace(self.as_ptr(), src) }
1224 }
1225
1226 /// Swaps the values at two mutable locations of the same type, without
1227 /// deinitializing either. They may overlap, unlike `mem::swap` which is
1228 /// otherwise equivalent.
1229 ///
1230 /// See [`ptr::swap`] for safety concerns and examples.
1231 ///
1232 /// [`ptr::swap`]: crate::ptr::swap()
1233 #[inline(always)]
1234 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1235 #[rustc_const_stable(feature = "const_swap", since = "1.85.0")]
1236 pub const unsafe fn swap(self, with: NonNull<T>)
1237 where
1238 T: Sized,
1239 {
1240 // SAFETY: the caller must uphold the safety contract for `swap`.
1241 unsafe { ptr::swap(self.as_ptr(), with.as_ptr()) }
1242 }
1243
1244 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1245 /// `align`.
1246 ///
1247 /// If it is not possible to align the pointer, the implementation returns
1248 /// `usize::MAX`.
1249 ///
1250 /// The offset is expressed in number of `T` elements, and not bytes.
1251 ///
1252 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1253 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1254 /// the returned offset is correct in all terms other than alignment.
1255 ///
1256 /// When this is called during compile-time evaluation (which is unstable), the implementation
1257 /// may return `usize::MAX` in cases where that can never happen at runtime. This is because the
1258 /// actual alignment of pointers is not known yet during compile-time, so an offset with
1259 /// guaranteed alignment can sometimes not be computed. For example, a buffer declared as `[u8;
1260 /// N]` might be allocated at an odd or an even address, but at compile-time this is not yet
1261 /// known, so the execution has to be correct for either choice. It is therefore impossible to
1262 /// find an offset that is guaranteed to be 2-aligned. (This behavior is subject to change, as usual
1263 /// for unstable APIs.)
1264 ///
1265 /// # Panics
1266 ///
1267 /// The function panics if `align` is not a power-of-two.
1268 ///
1269 /// # Examples
1270 ///
1271 /// Accessing adjacent `u8` as `u16`
1272 ///
1273 /// ```
1274 /// use std::ptr::NonNull;
1275 ///
1276 /// # unsafe {
1277 /// let x = [5_u8, 6, 7, 8, 9];
1278 /// let ptr = NonNull::new(x.as_ptr() as *mut u8).unwrap();
1279 /// let offset = ptr.align_offset(align_of::<u16>());
1280 ///
1281 /// if offset < x.len() - 1 {
1282 /// let u16_ptr = ptr.add(offset).cast::<u16>();
1283 /// assert!(u16_ptr.read() == u16::from_ne_bytes([5, 6]) || u16_ptr.read() == u16::from_ne_bytes([6, 7]));
1284 /// } else {
1285 /// // while the pointer can be aligned via `offset`, it would point
1286 /// // outside the allocation
1287 /// }
1288 /// # }
1289 /// ```
1290 #[inline]
1291 #[must_use]
1292 #[stable(feature = "non_null_convenience", since = "1.80.0")]
1293 pub fn align_offset(self, align: usize) -> usize
1294 where
1295 T: Sized,
1296 {
1297 if !align.is_power_of_two() {
1298 panic!("align_offset: align is not a power-of-two");
1299 }
1300
1301 {
1302 // SAFETY: `align` has been checked to be a power of 2 above.
1303 unsafe { ptr::align_offset(self.as_ptr(), align) }
1304 }
1305 }
1306
1307 /// Returns whether the pointer is properly aligned for `T`.
1308 ///
1309 /// # Examples
1310 ///
1311 /// ```
1312 /// use std::ptr::NonNull;
1313 ///
1314 /// // On some platforms, the alignment of i32 is less than 4.
1315 /// #[repr(align(4))]
1316 /// struct AlignedI32(i32);
1317 ///
1318 /// let data = AlignedI32(42);
1319 /// let ptr = NonNull::<AlignedI32>::from(&data);
1320 ///
1321 /// assert!(ptr.is_aligned());
1322 /// assert!(!NonNull::new(ptr.as_ptr().wrapping_byte_add(1)).unwrap().is_aligned());
1323 /// ```
1324 #[inline]
1325 #[must_use]
1326 #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1327 pub fn is_aligned(self) -> bool
1328 where
1329 T: Sized,
1330 {
1331 self.as_ptr().is_aligned()
1332 }
1333
1334 /// Returns whether the pointer is aligned to `align`.
1335 ///
1336 /// For non-`Sized` pointees this operation considers only the data pointer,
1337 /// ignoring the metadata.
1338 ///
1339 /// # Panics
1340 ///
1341 /// The function panics if `align` is not a power-of-two (this includes 0).
1342 ///
1343 /// # Examples
1344 ///
1345 /// ```
1346 /// #![feature(pointer_is_aligned_to)]
1347 ///
1348 /// // On some platforms, the alignment of i32 is less than 4.
1349 /// #[repr(align(4))]
1350 /// struct AlignedI32(i32);
1351 ///
1352 /// let data = AlignedI32(42);
1353 /// let ptr = &data as *const AlignedI32;
1354 ///
1355 /// assert!(ptr.is_aligned_to(1));
1356 /// assert!(ptr.is_aligned_to(2));
1357 /// assert!(ptr.is_aligned_to(4));
1358 ///
1359 /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1360 /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1361 ///
1362 /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1363 /// ```
1364 #[inline]
1365 #[must_use]
1366 #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1367 pub fn is_aligned_to(self, align: usize) -> bool {
1368 self.as_ptr().is_aligned_to(align)
1369 }
1370}
1371
1372impl<T> NonNull<T> {
1373 /// Casts from a type to its maybe-uninitialized version.
1374 #[must_use]
1375 #[inline(always)]
1376 #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1377 pub const fn cast_uninit(self) -> NonNull<MaybeUninit<T>> {
1378 self.cast()
1379 }
1380}
1381impl<T> NonNull<MaybeUninit<T>> {
1382 /// Casts from a maybe-uninitialized type to its initialized version.
1383 ///
1384 /// This is always safe, since UB can only occur if the pointer is read
1385 /// before being initialized.
1386 #[must_use]
1387 #[inline(always)]
1388 #[unstable(feature = "cast_maybe_uninit", issue = "145036")]
1389 pub const fn cast_init(self) -> NonNull<T> {
1390 self.cast()
1391 }
1392}
1393
1394impl<T> NonNull<[T]> {
1395 /// Creates a non-null raw slice from a thin pointer and a length.
1396 ///
1397 /// The `len` argument is the number of **elements**, not the number of bytes.
1398 ///
1399 /// This function is safe, but dereferencing the return value is unsafe.
1400 /// See the documentation of [`slice::from_raw_parts`] for slice safety requirements.
1401 ///
1402 /// # Examples
1403 ///
1404 /// ```rust
1405 /// use std::ptr::NonNull;
1406 ///
1407 /// // create a slice pointer when starting out with a pointer to the first element
1408 /// let mut x = [5, 6, 7];
1409 /// let nonnull_pointer = NonNull::new(x.as_mut_ptr()).unwrap();
1410 /// let slice = NonNull::slice_from_raw_parts(nonnull_pointer, 3);
1411 /// assert_eq!(unsafe { slice.as_ref()[2] }, 7);
1412 /// ```
1413 ///
1414 /// (Note that this example artificially demonstrates a use of this method,
1415 /// but `let slice = NonNull::from(&x[..]);` would be a better way to write code like this.)
1416 #[stable(feature = "nonnull_slice_from_raw_parts", since = "1.70.0")]
1417 #[rustc_const_stable(feature = "const_slice_from_raw_parts_mut", since = "1.83.0")]
1418 #[must_use]
1419 #[inline]
1420 pub const fn slice_from_raw_parts(data: NonNull<T>, len: usize) -> Self {
1421 // SAFETY: `data` is a `NonNull` pointer which is necessarily non-null
1422 unsafe { Self::new_unchecked(super::slice_from_raw_parts_mut(data.as_ptr(), len)) }
1423 }
1424
1425 /// Returns the length of a non-null raw slice.
1426 ///
1427 /// The returned value is the number of **elements**, not the number of bytes.
1428 ///
1429 /// This function is safe, even when the non-null raw slice cannot be dereferenced to a slice
1430 /// because the pointer does not have a valid address.
1431 ///
1432 /// # Examples
1433 ///
1434 /// ```rust
1435 /// use std::ptr::NonNull;
1436 ///
1437 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1438 /// assert_eq!(slice.len(), 3);
1439 /// ```
1440 #[stable(feature = "slice_ptr_len_nonnull", since = "1.63.0")]
1441 #[rustc_const_stable(feature = "const_slice_ptr_len_nonnull", since = "1.63.0")]
1442 #[must_use]
1443 #[inline]
1444 pub const fn len(self) -> usize {
1445 self.as_ptr().len()
1446 }
1447
1448 /// Returns `true` if the non-null raw slice has a length of 0.
1449 ///
1450 /// # Examples
1451 ///
1452 /// ```rust
1453 /// use std::ptr::NonNull;
1454 ///
1455 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1456 /// assert!(!slice.is_empty());
1457 /// ```
1458 #[stable(feature = "slice_ptr_is_empty_nonnull", since = "1.79.0")]
1459 #[rustc_const_stable(feature = "const_slice_ptr_is_empty_nonnull", since = "1.79.0")]
1460 #[must_use]
1461 #[inline]
1462 pub const fn is_empty(self) -> bool {
1463 self.len() == 0
1464 }
1465
1466 /// Returns a non-null pointer to the slice's buffer.
1467 ///
1468 /// # Examples
1469 ///
1470 /// ```rust
1471 /// #![feature(slice_ptr_get)]
1472 /// use std::ptr::NonNull;
1473 ///
1474 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1475 /// assert_eq!(slice.as_non_null_ptr(), NonNull::<i8>::dangling());
1476 /// ```
1477 #[inline]
1478 #[must_use]
1479 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1480 pub const fn as_non_null_ptr(self) -> NonNull<T> {
1481 self.cast()
1482 }
1483
1484 /// Returns a raw pointer to the slice's buffer.
1485 ///
1486 /// # Examples
1487 ///
1488 /// ```rust
1489 /// #![feature(slice_ptr_get)]
1490 /// use std::ptr::NonNull;
1491 ///
1492 /// let slice: NonNull<[i8]> = NonNull::slice_from_raw_parts(NonNull::dangling(), 3);
1493 /// assert_eq!(slice.as_mut_ptr(), NonNull::<i8>::dangling().as_ptr());
1494 /// ```
1495 #[inline]
1496 #[must_use]
1497 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1498 #[rustc_never_returns_null_ptr]
1499 pub const fn as_mut_ptr(self) -> *mut T {
1500 self.as_non_null_ptr().as_ptr()
1501 }
1502
1503 /// Returns a shared reference to a slice of possibly uninitialized values. In contrast to
1504 /// [`as_ref`], this does not require that the value has to be initialized.
1505 ///
1506 /// For the mutable counterpart see [`as_uninit_slice_mut`].
1507 ///
1508 /// [`as_ref`]: NonNull::as_ref
1509 /// [`as_uninit_slice_mut`]: NonNull::as_uninit_slice_mut
1510 ///
1511 /// # Safety
1512 ///
1513 /// When calling this method, you have to ensure that all of the following is true:
1514 ///
1515 /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1516 /// and it must be properly aligned. This means in particular:
1517 ///
1518 /// * The entire memory range of this slice must be contained within a single allocation!
1519 /// Slices can never span across multiple allocations.
1520 ///
1521 /// * The pointer must be aligned even for zero-length slices. One
1522 /// reason for this is that enum layout optimizations may rely on references
1523 /// (including slices of any length) being aligned and non-null to distinguish
1524 /// them from other data. You can obtain a pointer that is usable as `data`
1525 /// for zero-length slices using [`NonNull::dangling()`].
1526 ///
1527 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1528 /// See the safety documentation of [`pointer::offset`].
1529 ///
1530 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1531 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1532 /// In particular, while this reference exists, the memory the pointer points to must
1533 /// not get mutated (except inside `UnsafeCell`).
1534 ///
1535 /// This applies even if the result of this method is unused!
1536 ///
1537 /// See also [`slice::from_raw_parts`].
1538 ///
1539 /// [valid]: crate::ptr#safety
1540 #[inline]
1541 #[must_use]
1542 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1543 pub const unsafe fn as_uninit_slice<'a>(self) -> &'a [MaybeUninit<T>] {
1544 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1545 unsafe { slice::from_raw_parts(self.cast().as_ptr(), self.len()) }
1546 }
1547
1548 /// Returns a unique reference to a slice of possibly uninitialized values. In contrast to
1549 /// [`as_mut`], this does not require that the value has to be initialized.
1550 ///
1551 /// For the shared counterpart see [`as_uninit_slice`].
1552 ///
1553 /// [`as_mut`]: NonNull::as_mut
1554 /// [`as_uninit_slice`]: NonNull::as_uninit_slice
1555 ///
1556 /// # Safety
1557 ///
1558 /// When calling this method, you have to ensure that all of the following is true:
1559 ///
1560 /// * The pointer must be [valid] for reads and writes for `ptr.len() * size_of::<T>()`
1561 /// many bytes, and it must be properly aligned. This means in particular:
1562 ///
1563 /// * The entire memory range of this slice must be contained within a single allocation!
1564 /// Slices can never span across multiple allocations.
1565 ///
1566 /// * The pointer must be aligned even for zero-length slices. One
1567 /// reason for this is that enum layout optimizations may rely on references
1568 /// (including slices of any length) being aligned and non-null to distinguish
1569 /// them from other data. You can obtain a pointer that is usable as `data`
1570 /// for zero-length slices using [`NonNull::dangling()`].
1571 ///
1572 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1573 /// See the safety documentation of [`pointer::offset`].
1574 ///
1575 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1576 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1577 /// In particular, while this reference exists, the memory the pointer points to must
1578 /// not get accessed (read or written) through any other pointer.
1579 ///
1580 /// This applies even if the result of this method is unused!
1581 ///
1582 /// See also [`slice::from_raw_parts_mut`].
1583 ///
1584 /// [valid]: crate::ptr#safety
1585 ///
1586 /// # Examples
1587 ///
1588 /// ```rust
1589 /// #![feature(allocator_api, ptr_as_uninit)]
1590 ///
1591 /// use std::alloc::{Allocator, Layout, Global};
1592 /// use std::mem::MaybeUninit;
1593 /// use std::ptr::NonNull;
1594 ///
1595 /// let memory: NonNull<[u8]> = Global.allocate(Layout::new::<[u8; 32]>())?;
1596 /// // This is safe as `memory` is valid for reads and writes for `memory.len()` many bytes.
1597 /// // Note that calling `memory.as_mut()` is not allowed here as the content may be uninitialized.
1598 /// # #[allow(unused_variables)]
1599 /// let slice: &mut [MaybeUninit<u8>] = unsafe { memory.as_uninit_slice_mut() };
1600 /// # // Prevent leaks for Miri.
1601 /// # unsafe { Global.deallocate(memory.cast(), Layout::new::<[u8; 32]>()); }
1602 /// # Ok::<_, std::alloc::AllocError>(())
1603 /// ```
1604 #[inline]
1605 #[must_use]
1606 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1607 pub const unsafe fn as_uninit_slice_mut<'a>(self) -> &'a mut [MaybeUninit<T>] {
1608 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1609 unsafe { slice::from_raw_parts_mut(self.cast().as_ptr(), self.len()) }
1610 }
1611
1612 /// Returns a raw pointer to an element or subslice, without doing bounds
1613 /// checking.
1614 ///
1615 /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1616 /// is *[undefined behavior]* even if the resulting pointer is not used.
1617 ///
1618 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1619 ///
1620 /// # Examples
1621 ///
1622 /// ```
1623 /// #![feature(slice_ptr_get)]
1624 /// use std::ptr::NonNull;
1625 ///
1626 /// let x = &mut [1, 2, 4];
1627 /// let x = NonNull::slice_from_raw_parts(NonNull::new(x.as_mut_ptr()).unwrap(), x.len());
1628 ///
1629 /// unsafe {
1630 /// assert_eq!(x.get_unchecked_mut(1).as_ptr(), x.as_non_null_ptr().as_ptr().add(1));
1631 /// }
1632 /// ```
1633 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1634 #[rustc_const_unstable(feature = "const_index", issue = "143775")]
1635 #[inline]
1636 pub const unsafe fn get_unchecked_mut<I>(self, index: I) -> NonNull<I::Output>
1637 where
1638 I: [const] SliceIndex<[T]>,
1639 {
1640 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1641 // As a consequence, the resulting pointer cannot be null.
1642 unsafe { NonNull::new_unchecked(self.as_ptr().get_unchecked_mut(index)) }
1643 }
1644}
1645
1646#[stable(feature = "nonnull", since = "1.25.0")]
1647impl<T: PointeeSized> Clone for NonNull<T> {
1648 #[inline(always)]
1649 fn clone(&self) -> Self {
1650 *self
1651 }
1652}
1653
1654#[stable(feature = "nonnull", since = "1.25.0")]
1655impl<T: PointeeSized> Copy for NonNull<T> {}
1656
1657#[doc(hidden)]
1658#[unstable(feature = "trivial_clone", issue = "none")]
1659unsafe impl<T: ?Sized> TrivialClone for NonNull<T> {}
1660
1661#[unstable(feature = "coerce_unsized", issue = "18598")]
1662impl<T: PointeeSized, U: PointeeSized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1663
1664#[unstable(feature = "dispatch_from_dyn", issue = "none")]
1665impl<T: PointeeSized, U: PointeeSized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> {}
1666
1667#[stable(feature = "pin", since = "1.33.0")]
1668unsafe impl<T: PointeeSized> PinCoerceUnsized for NonNull<T> {}
1669
1670#[stable(feature = "nonnull", since = "1.25.0")]
1671impl<T: PointeeSized> fmt::Debug for NonNull<T> {
1672 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1673 fmt::Pointer::fmt(&self.as_ptr(), f)
1674 }
1675}
1676
1677#[stable(feature = "nonnull", since = "1.25.0")]
1678impl<T: PointeeSized> fmt::Pointer for NonNull<T> {
1679 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1680 fmt::Pointer::fmt(&self.as_ptr(), f)
1681 }
1682}
1683
1684#[stable(feature = "nonnull", since = "1.25.0")]
1685impl<T: PointeeSized> Eq for NonNull<T> {}
1686
1687#[stable(feature = "nonnull", since = "1.25.0")]
1688impl<T: PointeeSized> PartialEq for NonNull<T> {
1689 #[inline]
1690 #[allow(ambiguous_wide_pointer_comparisons)]
1691 fn eq(&self, other: &Self) -> bool {
1692 self.as_ptr() == other.as_ptr()
1693 }
1694}
1695
1696#[stable(feature = "nonnull", since = "1.25.0")]
1697impl<T: PointeeSized> Ord for NonNull<T> {
1698 #[inline]
1699 #[allow(ambiguous_wide_pointer_comparisons)]
1700 fn cmp(&self, other: &Self) -> Ordering {
1701 self.as_ptr().cmp(&other.as_ptr())
1702 }
1703}
1704
1705#[stable(feature = "nonnull", since = "1.25.0")]
1706impl<T: PointeeSized> PartialOrd for NonNull<T> {
1707 #[inline]
1708 #[allow(ambiguous_wide_pointer_comparisons)]
1709 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1710 self.as_ptr().partial_cmp(&other.as_ptr())
1711 }
1712}
1713
1714#[stable(feature = "nonnull", since = "1.25.0")]
1715impl<T: PointeeSized> hash::Hash for NonNull<T> {
1716 #[inline]
1717 fn hash<H: hash::Hasher>(&self, state: &mut H) {
1718 self.as_ptr().hash(state)
1719 }
1720}
1721
1722#[unstable(feature = "ptr_internals", issue = "none")]
1723#[rustc_const_unstable(feature = "const_convert", issue = "143773")]
1724impl<T: PointeeSized> const From<Unique<T>> for NonNull<T> {
1725 #[inline]
1726 fn from(unique: Unique<T>) -> Self {
1727 unique.as_non_null_ptr()
1728 }
1729}
1730
1731#[stable(feature = "nonnull", since = "1.25.0")]
1732#[rustc_const_unstable(feature = "const_convert", issue = "143773")]
1733impl<T: PointeeSized> const From<&mut T> for NonNull<T> {
1734 /// Converts a `&mut T` to a `NonNull<T>`.
1735 ///
1736 /// This conversion is safe and infallible since references cannot be null.
1737 #[inline]
1738 fn from(r: &mut T) -> Self {
1739 NonNull::from_mut(r)
1740 }
1741}
1742
1743#[stable(feature = "nonnull", since = "1.25.0")]
1744#[rustc_const_unstable(feature = "const_convert", issue = "143773")]
1745impl<T: PointeeSized> const From<&T> for NonNull<T> {
1746 /// Converts a `&T` to a `NonNull<T>`.
1747 ///
1748 /// This conversion is safe and infallible since references cannot be null.
1749 #[inline]
1750 fn from(r: &T) -> Self {
1751 NonNull::from_ref(r)
1752 }
1753}