Thanks to visit codestin.com
Credit goes to doc.rust-lang.org

alloc/
rc.rs

1//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
2//! Counted'.
3//!
4//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
5//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
6//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
7//! given allocation is destroyed, the value stored in that allocation (often
8//! referred to as "inner value") is also dropped.
9//!
10//! Shared references in Rust disallow mutation by default, and [`Rc`]
11//! is no exception: you cannot generally obtain a mutable reference to
12//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
13//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
14//! inside an `Rc`][mutability].
15//!
16//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
17//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
18//! does not implement [`Send`]. As a result, the Rust compiler
19//! will check *at compile time* that you are not sending [`Rc`]s between
20//! threads. If you need multi-threaded, atomic reference counting, use
21//! [`sync::Arc`][arc].
22//!
23//! The [`downgrade`][downgrade] method can be used to create a non-owning
24//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
25//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
26//! already been dropped. In other words, `Weak` pointers do not keep the value
27//! inside the allocation alive; however, they *do* keep the allocation
28//! (the backing store for the inner value) alive.
29//!
30//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
31//! [`Weak`] is used to break cycles. For example, a tree could have strong
32//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
33//! children back to their parents.
34//!
35//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
36//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
37//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
38//! functions, called using [fully qualified syntax]:
39//!
40//! ```
41//! use std::rc::Rc;
42//!
43//! let my_rc = Rc::new(());
44//! let my_weak = Rc::downgrade(&my_rc);
45//! ```
46//!
47//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
48//! fully qualified syntax. Some people prefer to use fully qualified syntax,
49//! while others prefer using method-call syntax.
50//!
51//! ```
52//! use std::rc::Rc;
53//!
54//! let rc = Rc::new(());
55//! // Method-call syntax
56//! let rc2 = rc.clone();
57//! // Fully qualified syntax
58//! let rc3 = Rc::clone(&rc);
59//! ```
60//!
61//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
62//! already been dropped.
63//!
64//! # Cloning references
65//!
66//! Creating a new reference to the same allocation as an existing reference counted pointer
67//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
68//!
69//! ```
70//! use std::rc::Rc;
71//!
72//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
73//! // The two syntaxes below are equivalent.
74//! let a = foo.clone();
75//! let b = Rc::clone(&foo);
76//! // a and b both point to the same memory location as foo.
77//! ```
78//!
79//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
80//! the meaning of the code. In the example above, this syntax makes it easier to see that
81//! this code is creating a new reference rather than copying the whole content of foo.
82//!
83//! # Examples
84//!
85//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
86//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
87//! unique ownership, because more than one gadget may belong to the same
88//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
89//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
90//!
91//! ```
92//! use std::rc::Rc;
93//!
94//! struct Owner {
95//!     name: String,
96//!     // ...other fields
97//! }
98//!
99//! struct Gadget {
100//!     id: i32,
101//!     owner: Rc<Owner>,
102//!     // ...other fields
103//! }
104//!
105//! fn main() {
106//!     // Create a reference-counted `Owner`.
107//!     let gadget_owner: Rc<Owner> = Rc::new(
108//!         Owner {
109//!             name: "Gadget Man".to_string(),
110//!         }
111//!     );
112//!
113//!     // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
114//!     // gives us a new pointer to the same `Owner` allocation, incrementing
115//!     // the reference count in the process.
116//!     let gadget1 = Gadget {
117//!         id: 1,
118//!         owner: Rc::clone(&gadget_owner),
119//!     };
120//!     let gadget2 = Gadget {
121//!         id: 2,
122//!         owner: Rc::clone(&gadget_owner),
123//!     };
124//!
125//!     // Dispose of our local variable `gadget_owner`.
126//!     drop(gadget_owner);
127//!
128//!     // Despite dropping `gadget_owner`, we're still able to print out the name
129//!     // of the `Owner` of the `Gadget`s. This is because we've only dropped a
130//!     // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
131//!     // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
132//!     // live. The field projection `gadget1.owner.name` works because
133//!     // `Rc<Owner>` automatically dereferences to `Owner`.
134//!     println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
135//!     println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
136//!
137//!     // At the end of the function, `gadget1` and `gadget2` are destroyed, and
138//!     // with them the last counted references to our `Owner`. Gadget Man now
139//!     // gets destroyed as well.
140//! }
141//! ```
142//!
143//! If our requirements change, and we also need to be able to traverse from
144//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
145//! to `Gadget` introduces a cycle. This means that their
146//! reference counts can never reach 0, and the allocation will never be destroyed:
147//! a memory leak. In order to get around this, we can use [`Weak`]
148//! pointers.
149//!
150//! Rust actually makes it somewhat difficult to produce this loop in the first
151//! place. In order to end up with two values that point at each other, one of
152//! them needs to be mutable. This is difficult because [`Rc`] enforces
153//! memory safety by only giving out shared references to the value it wraps,
154//! and these don't allow direct mutation. We need to wrap the part of the
155//! value we wish to mutate in a [`RefCell`], which provides *interior
156//! mutability*: a method to achieve mutability through a shared reference.
157//! [`RefCell`] enforces Rust's borrowing rules at runtime.
158//!
159//! ```
160//! use std::rc::Rc;
161//! use std::rc::Weak;
162//! use std::cell::RefCell;
163//!
164//! struct Owner {
165//!     name: String,
166//!     gadgets: RefCell<Vec<Weak<Gadget>>>,
167//!     // ...other fields
168//! }
169//!
170//! struct Gadget {
171//!     id: i32,
172//!     owner: Rc<Owner>,
173//!     // ...other fields
174//! }
175//!
176//! fn main() {
177//!     // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
178//!     // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
179//!     // a shared reference.
180//!     let gadget_owner: Rc<Owner> = Rc::new(
181//!         Owner {
182//!             name: "Gadget Man".to_string(),
183//!             gadgets: RefCell::new(vec![]),
184//!         }
185//!     );
186//!
187//!     // Create `Gadget`s belonging to `gadget_owner`, as before.
188//!     let gadget1 = Rc::new(
189//!         Gadget {
190//!             id: 1,
191//!             owner: Rc::clone(&gadget_owner),
192//!         }
193//!     );
194//!     let gadget2 = Rc::new(
195//!         Gadget {
196//!             id: 2,
197//!             owner: Rc::clone(&gadget_owner),
198//!         }
199//!     );
200//!
201//!     // Add the `Gadget`s to their `Owner`.
202//!     {
203//!         let mut gadgets = gadget_owner.gadgets.borrow_mut();
204//!         gadgets.push(Rc::downgrade(&gadget1));
205//!         gadgets.push(Rc::downgrade(&gadget2));
206//!
207//!         // `RefCell` dynamic borrow ends here.
208//!     }
209//!
210//!     // Iterate over our `Gadget`s, printing their details out.
211//!     for gadget_weak in gadget_owner.gadgets.borrow().iter() {
212//!
213//!         // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
214//!         // guarantee the allocation still exists, we need to call
215//!         // `upgrade`, which returns an `Option<Rc<Gadget>>`.
216//!         //
217//!         // In this case we know the allocation still exists, so we simply
218//!         // `unwrap` the `Option`. In a more complicated program, you might
219//!         // need graceful error handling for a `None` result.
220//!
221//!         let gadget = gadget_weak.upgrade().unwrap();
222//!         println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
223//!     }
224//!
225//!     // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
226//!     // are destroyed. There are now no strong (`Rc`) pointers to the
227//!     // gadgets, so they are destroyed. This zeroes the reference count on
228//!     // Gadget Man, so he gets destroyed as well.
229//! }
230//! ```
231//!
232//! [clone]: Clone::clone
233//! [`Cell`]: core::cell::Cell
234//! [`RefCell`]: core::cell::RefCell
235//! [arc]: crate::sync::Arc
236//! [`Deref`]: core::ops::Deref
237//! [downgrade]: Rc::downgrade
238//! [upgrade]: Weak::upgrade
239//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
240//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
241
242#![stable(feature = "rust1", since = "1.0.0")]
243
244use core::any::Any;
245use core::cell::{Cell, CloneFromCell};
246#[cfg(not(no_global_oom_handling))]
247use core::clone::CloneToUninit;
248use core::clone::UseCloned;
249use core::cmp::Ordering;
250use core::hash::{Hash, Hasher};
251use core::intrinsics::abort;
252#[cfg(not(no_global_oom_handling))]
253use core::iter;
254use core::marker::{PhantomData, Unsize};
255use core::mem::{self, ManuallyDrop, align_of_val_raw};
256use core::num::NonZeroUsize;
257use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
258#[cfg(not(no_global_oom_handling))]
259use core::ops::{Residual, Try};
260use core::panic::{RefUnwindSafe, UnwindSafe};
261#[cfg(not(no_global_oom_handling))]
262use core::pin::Pin;
263use core::pin::PinCoerceUnsized;
264use core::ptr::{self, NonNull, drop_in_place};
265#[cfg(not(no_global_oom_handling))]
266use core::slice::from_raw_parts_mut;
267use core::{borrow, fmt, hint};
268
269#[cfg(not(no_global_oom_handling))]
270use crate::alloc::handle_alloc_error;
271use crate::alloc::{AllocError, Allocator, Global, Layout};
272use crate::borrow::{Cow, ToOwned};
273use crate::boxed::Box;
274#[cfg(not(no_global_oom_handling))]
275use crate::string::String;
276#[cfg(not(no_global_oom_handling))]
277use crate::vec::Vec;
278
279// This is repr(C) to future-proof against possible field-reordering, which
280// would interfere with otherwise safe [into|from]_raw() of transmutable
281// inner types.
282// repr(align(2)) (forcing alignment to at least 2) is required because usize
283// has 1-byte alignment on AVR.
284#[repr(C, align(2))]
285struct RcInner<T: ?Sized> {
286    strong: Cell<usize>,
287    weak: Cell<usize>,
288    value: T,
289}
290
291/// Calculate layout for `RcInner<T>` using the inner value's layout
292fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout {
293    // Calculate layout using the given value layout.
294    // Previously, layout was calculated on the expression
295    // `&*(ptr as *const RcInner<T>)`, but this created a misaligned
296    // reference (see #54908).
297    Layout::new::<RcInner<()>>().extend(layout).unwrap().0.pad_to_align()
298}
299
300/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
301/// Counted'.
302///
303/// See the [module-level documentation](./index.html) for more details.
304///
305/// The inherent methods of `Rc` are all associated functions, which means
306/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
307/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
308///
309/// [get_mut]: Rc::get_mut
310#[doc(search_unbox)]
311#[rustc_diagnostic_item = "Rc"]
312#[stable(feature = "rust1", since = "1.0.0")]
313#[rustc_insignificant_dtor]
314pub struct Rc<
315    T: ?Sized,
316    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
317> {
318    ptr: NonNull<RcInner<T>>,
319    phantom: PhantomData<RcInner<T>>,
320    alloc: A,
321}
322
323#[stable(feature = "rust1", since = "1.0.0")]
324impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {}
325
326// Note that this negative impl isn't strictly necessary for correctness,
327// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
328// However, given how important `Rc`'s `!Sync`-ness is,
329// having an explicit negative impl is nice for documentation purposes
330// and results in nicer error messages.
331#[stable(feature = "rust1", since = "1.0.0")]
332impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {}
333
334#[stable(feature = "catch_unwind", since = "1.9.0")]
335impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {}
336#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
337impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {}
338
339#[unstable(feature = "coerce_unsized", issue = "18598")]
340impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {}
341
342#[unstable(feature = "dispatch_from_dyn", issue = "none")]
343impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
344
345// SAFETY: `Rc::clone` doesn't access any `Cell`s which could contain the `Rc` being cloned.
346#[unstable(feature = "cell_get_cloned", issue = "145329")]
347unsafe impl<T: ?Sized> CloneFromCell for Rc<T> {}
348
349impl<T: ?Sized> Rc<T> {
350    #[inline]
351    unsafe fn from_inner(ptr: NonNull<RcInner<T>>) -> Self {
352        unsafe { Self::from_inner_in(ptr, Global) }
353    }
354
355    #[inline]
356    unsafe fn from_ptr(ptr: *mut RcInner<T>) -> Self {
357        unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
358    }
359}
360
361impl<T: ?Sized, A: Allocator> Rc<T, A> {
362    #[inline(always)]
363    fn inner(&self) -> &RcInner<T> {
364        // This unsafety is ok because while this Rc is alive we're guaranteed
365        // that the inner pointer is valid.
366        unsafe { self.ptr.as_ref() }
367    }
368
369    #[inline]
370    fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
371        let this = mem::ManuallyDrop::new(this);
372        (this.ptr, unsafe { ptr::read(&this.alloc) })
373    }
374
375    #[inline]
376    unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
377        Self { ptr, phantom: PhantomData, alloc }
378    }
379
380    #[inline]
381    unsafe fn from_ptr_in(ptr: *mut RcInner<T>, alloc: A) -> Self {
382        unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
383    }
384
385    // Non-inlined part of `drop`.
386    #[inline(never)]
387    unsafe fn drop_slow(&mut self) {
388        // Reconstruct the "strong weak" pointer and drop it when this
389        // variable goes out of scope. This ensures that the memory is
390        // deallocated even if the destructor of `T` panics.
391        let _weak = Weak { ptr: self.ptr, alloc: &self.alloc };
392
393        // Destroy the contained object.
394        // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
395        unsafe {
396            ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value);
397        }
398    }
399}
400
401impl<T> Rc<T> {
402    /// Constructs a new `Rc<T>`.
403    ///
404    /// # Examples
405    ///
406    /// ```
407    /// use std::rc::Rc;
408    ///
409    /// let five = Rc::new(5);
410    /// ```
411    #[cfg(not(no_global_oom_handling))]
412    #[stable(feature = "rust1", since = "1.0.0")]
413    pub fn new(value: T) -> Rc<T> {
414        // There is an implicit weak pointer owned by all the strong
415        // pointers, which ensures that the weak destructor never frees
416        // the allocation while the strong destructor is running, even
417        // if the weak pointer is stored inside the strong one.
418        unsafe {
419            Self::from_inner(
420                Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value }))
421                    .into(),
422            )
423        }
424    }
425
426    /// Constructs a new `Rc<T>` while giving you a `Weak<T>` to the allocation,
427    /// to allow you to construct a `T` which holds a weak pointer to itself.
428    ///
429    /// Generally, a structure circularly referencing itself, either directly or
430    /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
431    /// Using this function, you get access to the weak pointer during the
432    /// initialization of `T`, before the `Rc<T>` is created, such that you can
433    /// clone and store it inside the `T`.
434    ///
435    /// `new_cyclic` first allocates the managed allocation for the `Rc<T>`,
436    /// then calls your closure, giving it a `Weak<T>` to this allocation,
437    /// and only afterwards completes the construction of the `Rc<T>` by placing
438    /// the `T` returned from your closure into the allocation.
439    ///
440    /// Since the new `Rc<T>` is not fully-constructed until `Rc<T>::new_cyclic`
441    /// returns, calling [`upgrade`] on the weak reference inside your closure will
442    /// fail and result in a `None` value.
443    ///
444    /// # Panics
445    ///
446    /// If `data_fn` panics, the panic is propagated to the caller, and the
447    /// temporary [`Weak<T>`] is dropped normally.
448    ///
449    /// # Examples
450    ///
451    /// ```
452    /// # #![allow(dead_code)]
453    /// use std::rc::{Rc, Weak};
454    ///
455    /// struct Gadget {
456    ///     me: Weak<Gadget>,
457    /// }
458    ///
459    /// impl Gadget {
460    ///     /// Constructs a reference counted Gadget.
461    ///     fn new() -> Rc<Self> {
462    ///         // `me` is a `Weak<Gadget>` pointing at the new allocation of the
463    ///         // `Rc` we're constructing.
464    ///         Rc::new_cyclic(|me| {
465    ///             // Create the actual struct here.
466    ///             Gadget { me: me.clone() }
467    ///         })
468    ///     }
469    ///
470    ///     /// Returns a reference counted pointer to Self.
471    ///     fn me(&self) -> Rc<Self> {
472    ///         self.me.upgrade().unwrap()
473    ///     }
474    /// }
475    /// ```
476    /// [`upgrade`]: Weak::upgrade
477    #[cfg(not(no_global_oom_handling))]
478    #[stable(feature = "arc_new_cyclic", since = "1.60.0")]
479    pub fn new_cyclic<F>(data_fn: F) -> Rc<T>
480    where
481        F: FnOnce(&Weak<T>) -> T,
482    {
483        Self::new_cyclic_in(data_fn, Global)
484    }
485
486    /// Constructs a new `Rc` with uninitialized contents.
487    ///
488    /// # Examples
489    ///
490    /// ```
491    /// use std::rc::Rc;
492    ///
493    /// let mut five = Rc::<u32>::new_uninit();
494    ///
495    /// // Deferred initialization:
496    /// Rc::get_mut(&mut five).unwrap().write(5);
497    ///
498    /// let five = unsafe { five.assume_init() };
499    ///
500    /// assert_eq!(*five, 5)
501    /// ```
502    #[cfg(not(no_global_oom_handling))]
503    #[stable(feature = "new_uninit", since = "1.82.0")]
504    #[must_use]
505    pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
506        unsafe {
507            Rc::from_ptr(Rc::allocate_for_layout(
508                Layout::new::<T>(),
509                |layout| Global.allocate(layout),
510                <*mut u8>::cast,
511            ))
512        }
513    }
514
515    /// Constructs a new `Rc` with uninitialized contents, with the memory
516    /// being filled with `0` bytes.
517    ///
518    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
519    /// incorrect usage of this method.
520    ///
521    /// # Examples
522    ///
523    /// ```
524    /// use std::rc::Rc;
525    ///
526    /// let zero = Rc::<u32>::new_zeroed();
527    /// let zero = unsafe { zero.assume_init() };
528    ///
529    /// assert_eq!(*zero, 0)
530    /// ```
531    ///
532    /// [zeroed]: mem::MaybeUninit::zeroed
533    #[cfg(not(no_global_oom_handling))]
534    #[stable(feature = "new_zeroed_alloc", since = "1.92.0")]
535    #[must_use]
536    pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
537        unsafe {
538            Rc::from_ptr(Rc::allocate_for_layout(
539                Layout::new::<T>(),
540                |layout| Global.allocate_zeroed(layout),
541                <*mut u8>::cast,
542            ))
543        }
544    }
545
546    /// Constructs a new `Rc<T>`, returning an error if the allocation fails
547    ///
548    /// # Examples
549    ///
550    /// ```
551    /// #![feature(allocator_api)]
552    /// use std::rc::Rc;
553    ///
554    /// let five = Rc::try_new(5);
555    /// # Ok::<(), std::alloc::AllocError>(())
556    /// ```
557    #[unstable(feature = "allocator_api", issue = "32838")]
558    pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
559        // There is an implicit weak pointer owned by all the strong
560        // pointers, which ensures that the weak destructor never frees
561        // the allocation while the strong destructor is running, even
562        // if the weak pointer is stored inside the strong one.
563        unsafe {
564            Ok(Self::from_inner(
565                Box::leak(Box::try_new(RcInner {
566                    strong: Cell::new(1),
567                    weak: Cell::new(1),
568                    value,
569                })?)
570                .into(),
571            ))
572        }
573    }
574
575    /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
576    ///
577    /// # Examples
578    ///
579    /// ```
580    /// #![feature(allocator_api)]
581    ///
582    /// use std::rc::Rc;
583    ///
584    /// let mut five = Rc::<u32>::try_new_uninit()?;
585    ///
586    /// // Deferred initialization:
587    /// Rc::get_mut(&mut five).unwrap().write(5);
588    ///
589    /// let five = unsafe { five.assume_init() };
590    ///
591    /// assert_eq!(*five, 5);
592    /// # Ok::<(), std::alloc::AllocError>(())
593    /// ```
594    #[unstable(feature = "allocator_api", issue = "32838")]
595    pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
596        unsafe {
597            Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
598                Layout::new::<T>(),
599                |layout| Global.allocate(layout),
600                <*mut u8>::cast,
601            )?))
602        }
603    }
604
605    /// Constructs a new `Rc` with uninitialized contents, with the memory
606    /// being filled with `0` bytes, returning an error if the allocation fails
607    ///
608    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
609    /// incorrect usage of this method.
610    ///
611    /// # Examples
612    ///
613    /// ```
614    /// #![feature(allocator_api)]
615    ///
616    /// use std::rc::Rc;
617    ///
618    /// let zero = Rc::<u32>::try_new_zeroed()?;
619    /// let zero = unsafe { zero.assume_init() };
620    ///
621    /// assert_eq!(*zero, 0);
622    /// # Ok::<(), std::alloc::AllocError>(())
623    /// ```
624    ///
625    /// [zeroed]: mem::MaybeUninit::zeroed
626    #[unstable(feature = "allocator_api", issue = "32838")]
627    pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
628        unsafe {
629            Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
630                Layout::new::<T>(),
631                |layout| Global.allocate_zeroed(layout),
632                <*mut u8>::cast,
633            )?))
634        }
635    }
636    /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
637    /// `value` will be pinned in memory and unable to be moved.
638    #[cfg(not(no_global_oom_handling))]
639    #[stable(feature = "pin", since = "1.33.0")]
640    #[must_use]
641    pub fn pin(value: T) -> Pin<Rc<T>> {
642        unsafe { Pin::new_unchecked(Rc::new(value)) }
643    }
644
645    /// Maps the value in an `Rc`, reusing the allocation if possible.
646    ///
647    /// `f` is called on a reference to the value in the `Rc`, and the result is returned, also in
648    /// an `Rc`.
649    ///
650    /// Note: this is an associated function, which means that you have
651    /// to call it as `Rc::map(r, f)` instead of `r.map(f)`. This
652    /// is so that there is no conflict with a method on the inner type.
653    ///
654    /// # Examples
655    ///
656    /// ```
657    /// #![feature(smart_pointer_try_map)]
658    ///
659    /// use std::rc::Rc;
660    ///
661    /// let r = Rc::new(7);
662    /// let new = Rc::map(r, |i| i + 7);
663    /// assert_eq!(*new, 14);
664    /// ```
665    #[cfg(not(no_global_oom_handling))]
666    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
667    pub fn map<U>(this: Self, f: impl FnOnce(&T) -> U) -> Rc<U> {
668        if size_of::<T>() == size_of::<U>()
669            && align_of::<T>() == align_of::<U>()
670            && Rc::is_unique(&this)
671        {
672            unsafe {
673                let ptr = Rc::into_raw(this);
674                let value = ptr.read();
675                let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
676
677                Rc::get_mut_unchecked(&mut allocation).write(f(&value));
678                allocation.assume_init()
679            }
680        } else {
681            Rc::new(f(&*this))
682        }
683    }
684
685    /// Attempts to map the value in an `Rc`, reusing the allocation if possible.
686    ///
687    /// `f` is called on a reference to the value in the `Rc`, and if the operation succeeds, the
688    /// result is returned, also in an `Rc`.
689    ///
690    /// Note: this is an associated function, which means that you have
691    /// to call it as `Rc::try_map(r, f)` instead of `r.try_map(f)`. This
692    /// is so that there is no conflict with a method on the inner type.
693    ///
694    /// # Examples
695    ///
696    /// ```
697    /// #![feature(smart_pointer_try_map)]
698    ///
699    /// use std::rc::Rc;
700    ///
701    /// let b = Rc::new(7);
702    /// let new = Rc::try_map(b, |&i| u32::try_from(i)).unwrap();
703    /// assert_eq!(*new, 7);
704    /// ```
705    #[cfg(not(no_global_oom_handling))]
706    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
707    pub fn try_map<R>(
708        this: Self,
709        f: impl FnOnce(&T) -> R,
710    ) -> <R::Residual as Residual<Rc<R::Output>>>::TryType
711    where
712        R: Try,
713        R::Residual: Residual<Rc<R::Output>>,
714    {
715        if size_of::<T>() == size_of::<R::Output>()
716            && align_of::<T>() == align_of::<R::Output>()
717            && Rc::is_unique(&this)
718        {
719            unsafe {
720                let ptr = Rc::into_raw(this);
721                let value = ptr.read();
722                let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
723
724                Rc::get_mut_unchecked(&mut allocation).write(f(&value)?);
725                try { allocation.assume_init() }
726            }
727        } else {
728            try { Rc::new(f(&*this)?) }
729        }
730    }
731}
732
733impl<T, A: Allocator> Rc<T, A> {
734    /// Constructs a new `Rc` in the provided allocator.
735    ///
736    /// # Examples
737    ///
738    /// ```
739    /// #![feature(allocator_api)]
740    /// use std::rc::Rc;
741    /// use std::alloc::System;
742    ///
743    /// let five = Rc::new_in(5, System);
744    /// ```
745    #[cfg(not(no_global_oom_handling))]
746    #[unstable(feature = "allocator_api", issue = "32838")]
747    #[inline]
748    pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
749        // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
750        // That would make code size bigger.
751        match Self::try_new_in(value, alloc) {
752            Ok(m) => m,
753            Err(_) => handle_alloc_error(Layout::new::<RcInner<T>>()),
754        }
755    }
756
757    /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
758    ///
759    /// # Examples
760    ///
761    /// ```
762    /// #![feature(get_mut_unchecked)]
763    /// #![feature(allocator_api)]
764    ///
765    /// use std::rc::Rc;
766    /// use std::alloc::System;
767    ///
768    /// let mut five = Rc::<u32, _>::new_uninit_in(System);
769    ///
770    /// let five = unsafe {
771    ///     // Deferred initialization:
772    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
773    ///
774    ///     five.assume_init()
775    /// };
776    ///
777    /// assert_eq!(*five, 5)
778    /// ```
779    #[cfg(not(no_global_oom_handling))]
780    #[unstable(feature = "allocator_api", issue = "32838")]
781    #[inline]
782    pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
783        unsafe {
784            Rc::from_ptr_in(
785                Rc::allocate_for_layout(
786                    Layout::new::<T>(),
787                    |layout| alloc.allocate(layout),
788                    <*mut u8>::cast,
789                ),
790                alloc,
791            )
792        }
793    }
794
795    /// Constructs a new `Rc` with uninitialized contents, with the memory
796    /// being filled with `0` bytes, in the provided allocator.
797    ///
798    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
799    /// incorrect usage of this method.
800    ///
801    /// # Examples
802    ///
803    /// ```
804    /// #![feature(allocator_api)]
805    ///
806    /// use std::rc::Rc;
807    /// use std::alloc::System;
808    ///
809    /// let zero = Rc::<u32, _>::new_zeroed_in(System);
810    /// let zero = unsafe { zero.assume_init() };
811    ///
812    /// assert_eq!(*zero, 0)
813    /// ```
814    ///
815    /// [zeroed]: mem::MaybeUninit::zeroed
816    #[cfg(not(no_global_oom_handling))]
817    #[unstable(feature = "allocator_api", issue = "32838")]
818    #[inline]
819    pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
820        unsafe {
821            Rc::from_ptr_in(
822                Rc::allocate_for_layout(
823                    Layout::new::<T>(),
824                    |layout| alloc.allocate_zeroed(layout),
825                    <*mut u8>::cast,
826                ),
827                alloc,
828            )
829        }
830    }
831
832    /// Constructs a new `Rc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
833    /// to allow you to construct a `T` which holds a weak pointer to itself.
834    ///
835    /// Generally, a structure circularly referencing itself, either directly or
836    /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
837    /// Using this function, you get access to the weak pointer during the
838    /// initialization of `T`, before the `Rc<T, A>` is created, such that you can
839    /// clone and store it inside the `T`.
840    ///
841    /// `new_cyclic_in` first allocates the managed allocation for the `Rc<T, A>`,
842    /// then calls your closure, giving it a `Weak<T, A>` to this allocation,
843    /// and only afterwards completes the construction of the `Rc<T, A>` by placing
844    /// the `T` returned from your closure into the allocation.
845    ///
846    /// Since the new `Rc<T, A>` is not fully-constructed until `Rc<T, A>::new_cyclic_in`
847    /// returns, calling [`upgrade`] on the weak reference inside your closure will
848    /// fail and result in a `None` value.
849    ///
850    /// # Panics
851    ///
852    /// If `data_fn` panics, the panic is propagated to the caller, and the
853    /// temporary [`Weak<T, A>`] is dropped normally.
854    ///
855    /// # Examples
856    ///
857    /// See [`new_cyclic`].
858    ///
859    /// [`new_cyclic`]: Rc::new_cyclic
860    /// [`upgrade`]: Weak::upgrade
861    #[cfg(not(no_global_oom_handling))]
862    #[unstable(feature = "allocator_api", issue = "32838")]
863    pub fn new_cyclic_in<F>(data_fn: F, alloc: A) -> Rc<T, A>
864    where
865        F: FnOnce(&Weak<T, A>) -> T,
866    {
867        // Construct the inner in the "uninitialized" state with a single
868        // weak reference.
869        let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
870            RcInner {
871                strong: Cell::new(0),
872                weak: Cell::new(1),
873                value: mem::MaybeUninit::<T>::uninit(),
874            },
875            alloc,
876        ));
877        let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
878        let init_ptr: NonNull<RcInner<T>> = uninit_ptr.cast();
879
880        let weak = Weak { ptr: init_ptr, alloc };
881
882        // It's important we don't give up ownership of the weak pointer, or
883        // else the memory might be freed by the time `data_fn` returns. If
884        // we really wanted to pass ownership, we could create an additional
885        // weak pointer for ourselves, but this would result in additional
886        // updates to the weak reference count which might not be necessary
887        // otherwise.
888        let data = data_fn(&weak);
889
890        let strong = unsafe {
891            let inner = init_ptr.as_ptr();
892            ptr::write(&raw mut (*inner).value, data);
893
894            let prev_value = (*inner).strong.get();
895            debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
896            (*inner).strong.set(1);
897
898            // Strong references should collectively own a shared weak reference,
899            // so don't run the destructor for our old weak reference.
900            // Calling into_raw_with_allocator has the double effect of giving us back the allocator,
901            // and forgetting the weak reference.
902            let alloc = weak.into_raw_with_allocator().1;
903
904            Rc::from_inner_in(init_ptr, alloc)
905        };
906
907        strong
908    }
909
910    /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
911    /// fails
912    ///
913    /// # Examples
914    ///
915    /// ```
916    /// #![feature(allocator_api)]
917    /// use std::rc::Rc;
918    /// use std::alloc::System;
919    ///
920    /// let five = Rc::try_new_in(5, System);
921    /// # Ok::<(), std::alloc::AllocError>(())
922    /// ```
923    #[unstable(feature = "allocator_api", issue = "32838")]
924    #[inline]
925    pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
926        // There is an implicit weak pointer owned by all the strong
927        // pointers, which ensures that the weak destructor never frees
928        // the allocation while the strong destructor is running, even
929        // if the weak pointer is stored inside the strong one.
930        let (ptr, alloc) = Box::into_unique(Box::try_new_in(
931            RcInner { strong: Cell::new(1), weak: Cell::new(1), value },
932            alloc,
933        )?);
934        Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
935    }
936
937    /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
938    /// error if the allocation fails
939    ///
940    /// # Examples
941    ///
942    /// ```
943    /// #![feature(allocator_api)]
944    /// #![feature(get_mut_unchecked)]
945    ///
946    /// use std::rc::Rc;
947    /// use std::alloc::System;
948    ///
949    /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?;
950    ///
951    /// let five = unsafe {
952    ///     // Deferred initialization:
953    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
954    ///
955    ///     five.assume_init()
956    /// };
957    ///
958    /// assert_eq!(*five, 5);
959    /// # Ok::<(), std::alloc::AllocError>(())
960    /// ```
961    #[unstable(feature = "allocator_api", issue = "32838")]
962    #[inline]
963    pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
964        unsafe {
965            Ok(Rc::from_ptr_in(
966                Rc::try_allocate_for_layout(
967                    Layout::new::<T>(),
968                    |layout| alloc.allocate(layout),
969                    <*mut u8>::cast,
970                )?,
971                alloc,
972            ))
973        }
974    }
975
976    /// Constructs a new `Rc` with uninitialized contents, with the memory
977    /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation
978    /// fails
979    ///
980    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
981    /// incorrect usage of this method.
982    ///
983    /// # Examples
984    ///
985    /// ```
986    /// #![feature(allocator_api)]
987    ///
988    /// use std::rc::Rc;
989    /// use std::alloc::System;
990    ///
991    /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?;
992    /// let zero = unsafe { zero.assume_init() };
993    ///
994    /// assert_eq!(*zero, 0);
995    /// # Ok::<(), std::alloc::AllocError>(())
996    /// ```
997    ///
998    /// [zeroed]: mem::MaybeUninit::zeroed
999    #[unstable(feature = "allocator_api", issue = "32838")]
1000    #[inline]
1001    pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
1002        unsafe {
1003            Ok(Rc::from_ptr_in(
1004                Rc::try_allocate_for_layout(
1005                    Layout::new::<T>(),
1006                    |layout| alloc.allocate_zeroed(layout),
1007                    <*mut u8>::cast,
1008                )?,
1009                alloc,
1010            ))
1011        }
1012    }
1013
1014    /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
1015    /// `value` will be pinned in memory and unable to be moved.
1016    #[cfg(not(no_global_oom_handling))]
1017    #[unstable(feature = "allocator_api", issue = "32838")]
1018    #[inline]
1019    pub fn pin_in(value: T, alloc: A) -> Pin<Self>
1020    where
1021        A: 'static,
1022    {
1023        unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) }
1024    }
1025
1026    /// Returns the inner value, if the `Rc` has exactly one strong reference.
1027    ///
1028    /// Otherwise, an [`Err`] is returned with the same `Rc` that was
1029    /// passed in.
1030    ///
1031    /// This will succeed even if there are outstanding weak references.
1032    ///
1033    /// # Examples
1034    ///
1035    /// ```
1036    /// use std::rc::Rc;
1037    ///
1038    /// let x = Rc::new(3);
1039    /// assert_eq!(Rc::try_unwrap(x), Ok(3));
1040    ///
1041    /// let x = Rc::new(4);
1042    /// let _y = Rc::clone(&x);
1043    /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
1044    /// ```
1045    #[inline]
1046    #[stable(feature = "rc_unique", since = "1.4.0")]
1047    pub fn try_unwrap(this: Self) -> Result<T, Self> {
1048        if Rc::strong_count(&this) == 1 {
1049            let this = ManuallyDrop::new(this);
1050
1051            let val: T = unsafe { ptr::read(&**this) }; // copy the contained object
1052            let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator
1053
1054            // Indicate to Weaks that they can't be promoted by decrementing
1055            // the strong count, and then remove the implicit "strong weak"
1056            // pointer while also handling drop logic by just crafting a
1057            // fake Weak.
1058            this.inner().dec_strong();
1059            let _weak = Weak { ptr: this.ptr, alloc };
1060            Ok(val)
1061        } else {
1062            Err(this)
1063        }
1064    }
1065
1066    /// Returns the inner value, if the `Rc` has exactly one strong reference.
1067    ///
1068    /// Otherwise, [`None`] is returned and the `Rc` is dropped.
1069    ///
1070    /// This will succeed even if there are outstanding weak references.
1071    ///
1072    /// If `Rc::into_inner` is called on every clone of this `Rc`,
1073    /// it is guaranteed that exactly one of the calls returns the inner value.
1074    /// This means in particular that the inner value is not dropped.
1075    ///
1076    /// [`Rc::try_unwrap`] is conceptually similar to `Rc::into_inner`.
1077    /// And while they are meant for different use-cases, `Rc::into_inner(this)`
1078    /// is in fact equivalent to <code>[Rc::try_unwrap]\(this).[ok][Result::ok]()</code>.
1079    /// (Note that the same kind of equivalence does **not** hold true for
1080    /// [`Arc`](crate::sync::Arc), due to race conditions that do not apply to `Rc`!)
1081    ///
1082    /// # Examples
1083    ///
1084    /// ```
1085    /// use std::rc::Rc;
1086    ///
1087    /// let x = Rc::new(3);
1088    /// assert_eq!(Rc::into_inner(x), Some(3));
1089    ///
1090    /// let x = Rc::new(4);
1091    /// let y = Rc::clone(&x);
1092    ///
1093    /// assert_eq!(Rc::into_inner(y), None);
1094    /// assert_eq!(Rc::into_inner(x), Some(4));
1095    /// ```
1096    #[inline]
1097    #[stable(feature = "rc_into_inner", since = "1.70.0")]
1098    pub fn into_inner(this: Self) -> Option<T> {
1099        Rc::try_unwrap(this).ok()
1100    }
1101}
1102
1103impl<T> Rc<[T]> {
1104    /// Constructs a new reference-counted slice with uninitialized contents.
1105    ///
1106    /// # Examples
1107    ///
1108    /// ```
1109    /// use std::rc::Rc;
1110    ///
1111    /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1112    ///
1113    /// // Deferred initialization:
1114    /// let data = Rc::get_mut(&mut values).unwrap();
1115    /// data[0].write(1);
1116    /// data[1].write(2);
1117    /// data[2].write(3);
1118    ///
1119    /// let values = unsafe { values.assume_init() };
1120    ///
1121    /// assert_eq!(*values, [1, 2, 3])
1122    /// ```
1123    #[cfg(not(no_global_oom_handling))]
1124    #[stable(feature = "new_uninit", since = "1.82.0")]
1125    #[must_use]
1126    pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1127        unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
1128    }
1129
1130    /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1131    /// filled with `0` bytes.
1132    ///
1133    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1134    /// incorrect usage of this method.
1135    ///
1136    /// # Examples
1137    ///
1138    /// ```
1139    /// use std::rc::Rc;
1140    ///
1141    /// let values = Rc::<[u32]>::new_zeroed_slice(3);
1142    /// let values = unsafe { values.assume_init() };
1143    ///
1144    /// assert_eq!(*values, [0, 0, 0])
1145    /// ```
1146    ///
1147    /// [zeroed]: mem::MaybeUninit::zeroed
1148    #[cfg(not(no_global_oom_handling))]
1149    #[stable(feature = "new_zeroed_alloc", since = "1.92.0")]
1150    #[must_use]
1151    pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1152        unsafe {
1153            Rc::from_ptr(Rc::allocate_for_layout(
1154                Layout::array::<T>(len).unwrap(),
1155                |layout| Global.allocate_zeroed(layout),
1156                |mem| {
1157                    ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1158                        as *mut RcInner<[mem::MaybeUninit<T>]>
1159                },
1160            ))
1161        }
1162    }
1163
1164    /// Converts the reference-counted slice into a reference-counted array.
1165    ///
1166    /// This operation does not reallocate; the underlying array of the slice is simply reinterpreted as an array type.
1167    ///
1168    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1169    #[unstable(feature = "slice_as_array", issue = "133508")]
1170    #[inline]
1171    #[must_use]
1172    pub fn into_array<const N: usize>(self) -> Option<Rc<[T; N]>> {
1173        if self.len() == N {
1174            let ptr = Self::into_raw(self) as *const [T; N];
1175
1176            // SAFETY: The underlying array of a slice has the exact same layout as an actual array `[T; N]` if `N` is equal to the slice's length.
1177            let me = unsafe { Rc::from_raw(ptr) };
1178            Some(me)
1179        } else {
1180            None
1181        }
1182    }
1183}
1184
1185impl<T, A: Allocator> Rc<[T], A> {
1186    /// Constructs a new reference-counted slice with uninitialized contents.
1187    ///
1188    /// # Examples
1189    ///
1190    /// ```
1191    /// #![feature(get_mut_unchecked)]
1192    /// #![feature(allocator_api)]
1193    ///
1194    /// use std::rc::Rc;
1195    /// use std::alloc::System;
1196    ///
1197    /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System);
1198    ///
1199    /// let values = unsafe {
1200    ///     // Deferred initialization:
1201    ///     Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
1202    ///     Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
1203    ///     Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
1204    ///
1205    ///     values.assume_init()
1206    /// };
1207    ///
1208    /// assert_eq!(*values, [1, 2, 3])
1209    /// ```
1210    #[cfg(not(no_global_oom_handling))]
1211    #[unstable(feature = "allocator_api", issue = "32838")]
1212    #[inline]
1213    pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1214        unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
1215    }
1216
1217    /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1218    /// filled with `0` bytes.
1219    ///
1220    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1221    /// incorrect usage of this method.
1222    ///
1223    /// # Examples
1224    ///
1225    /// ```
1226    /// #![feature(allocator_api)]
1227    ///
1228    /// use std::rc::Rc;
1229    /// use std::alloc::System;
1230    ///
1231    /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System);
1232    /// let values = unsafe { values.assume_init() };
1233    ///
1234    /// assert_eq!(*values, [0, 0, 0])
1235    /// ```
1236    ///
1237    /// [zeroed]: mem::MaybeUninit::zeroed
1238    #[cfg(not(no_global_oom_handling))]
1239    #[unstable(feature = "allocator_api", issue = "32838")]
1240    #[inline]
1241    pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1242        unsafe {
1243            Rc::from_ptr_in(
1244                Rc::allocate_for_layout(
1245                    Layout::array::<T>(len).unwrap(),
1246                    |layout| alloc.allocate_zeroed(layout),
1247                    |mem| {
1248                        ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1249                            as *mut RcInner<[mem::MaybeUninit<T>]>
1250                    },
1251                ),
1252                alloc,
1253            )
1254        }
1255    }
1256}
1257
1258impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
1259    /// Converts to `Rc<T>`.
1260    ///
1261    /// # Safety
1262    ///
1263    /// As with [`MaybeUninit::assume_init`],
1264    /// it is up to the caller to guarantee that the inner value
1265    /// really is in an initialized state.
1266    /// Calling this when the content is not yet fully initialized
1267    /// causes immediate undefined behavior.
1268    ///
1269    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1270    ///
1271    /// # Examples
1272    ///
1273    /// ```
1274    /// use std::rc::Rc;
1275    ///
1276    /// let mut five = Rc::<u32>::new_uninit();
1277    ///
1278    /// // Deferred initialization:
1279    /// Rc::get_mut(&mut five).unwrap().write(5);
1280    ///
1281    /// let five = unsafe { five.assume_init() };
1282    ///
1283    /// assert_eq!(*five, 5)
1284    /// ```
1285    #[stable(feature = "new_uninit", since = "1.82.0")]
1286    #[inline]
1287    pub unsafe fn assume_init(self) -> Rc<T, A> {
1288        let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1289        unsafe { Rc::from_inner_in(ptr.cast(), alloc) }
1290    }
1291}
1292
1293impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
1294    /// Converts to `Rc<[T]>`.
1295    ///
1296    /// # Safety
1297    ///
1298    /// As with [`MaybeUninit::assume_init`],
1299    /// it is up to the caller to guarantee that the inner value
1300    /// really is in an initialized state.
1301    /// Calling this when the content is not yet fully initialized
1302    /// causes immediate undefined behavior.
1303    ///
1304    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1305    ///
1306    /// # Examples
1307    ///
1308    /// ```
1309    /// use std::rc::Rc;
1310    ///
1311    /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1312    ///
1313    /// // Deferred initialization:
1314    /// let data = Rc::get_mut(&mut values).unwrap();
1315    /// data[0].write(1);
1316    /// data[1].write(2);
1317    /// data[2].write(3);
1318    ///
1319    /// let values = unsafe { values.assume_init() };
1320    ///
1321    /// assert_eq!(*values, [1, 2, 3])
1322    /// ```
1323    #[stable(feature = "new_uninit", since = "1.82.0")]
1324    #[inline]
1325    pub unsafe fn assume_init(self) -> Rc<[T], A> {
1326        let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1327        unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) }
1328    }
1329}
1330
1331impl<T: ?Sized> Rc<T> {
1332    /// Constructs an `Rc<T>` from a raw pointer.
1333    ///
1334    /// The raw pointer must have been previously returned by a call to
1335    /// [`Rc<U>::into_raw`][into_raw] with the following requirements:
1336    ///
1337    /// * If `U` is sized, it must have the same size and alignment as `T`. This
1338    ///   is trivially true if `U` is `T`.
1339    /// * If `U` is unsized, its data pointer must have the same size and
1340    ///   alignment as `T`. This is trivially true if `Rc<U>` was constructed
1341    ///   through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1342    ///   coercion].
1343    ///
1344    /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1345    /// and alignment, this is basically like transmuting references of
1346    /// different types. See [`mem::transmute`][transmute] for more information
1347    /// on what restrictions apply in this case.
1348    ///
1349    /// The raw pointer must point to a block of memory allocated by the global allocator
1350    ///
1351    /// The user of `from_raw` has to make sure a specific value of `T` is only
1352    /// dropped once.
1353    ///
1354    /// This function is unsafe because improper use may lead to memory unsafety,
1355    /// even if the returned `Rc<T>` is never accessed.
1356    ///
1357    /// [into_raw]: Rc::into_raw
1358    /// [transmute]: core::mem::transmute
1359    /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1360    ///
1361    /// # Examples
1362    ///
1363    /// ```
1364    /// use std::rc::Rc;
1365    ///
1366    /// let x = Rc::new("hello".to_owned());
1367    /// let x_ptr = Rc::into_raw(x);
1368    ///
1369    /// unsafe {
1370    ///     // Convert back to an `Rc` to prevent leak.
1371    ///     let x = Rc::from_raw(x_ptr);
1372    ///     assert_eq!(&*x, "hello");
1373    ///
1374    ///     // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1375    /// }
1376    ///
1377    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1378    /// ```
1379    ///
1380    /// Convert a slice back into its original array:
1381    ///
1382    /// ```
1383    /// use std::rc::Rc;
1384    ///
1385    /// let x: Rc<[u32]> = Rc::new([1, 2, 3]);
1386    /// let x_ptr: *const [u32] = Rc::into_raw(x);
1387    ///
1388    /// unsafe {
1389    ///     let x: Rc<[u32; 3]> = Rc::from_raw(x_ptr.cast::<[u32; 3]>());
1390    ///     assert_eq!(&*x, &[1, 2, 3]);
1391    /// }
1392    /// ```
1393    #[inline]
1394    #[stable(feature = "rc_raw", since = "1.17.0")]
1395    pub unsafe fn from_raw(ptr: *const T) -> Self {
1396        unsafe { Self::from_raw_in(ptr, Global) }
1397    }
1398
1399    /// Consumes the `Rc`, returning the wrapped pointer.
1400    ///
1401    /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1402    /// [`Rc::from_raw`].
1403    ///
1404    /// # Examples
1405    ///
1406    /// ```
1407    /// use std::rc::Rc;
1408    ///
1409    /// let x = Rc::new("hello".to_owned());
1410    /// let x_ptr = Rc::into_raw(x);
1411    /// assert_eq!(unsafe { &*x_ptr }, "hello");
1412    /// # // Prevent leaks for Miri.
1413    /// # drop(unsafe { Rc::from_raw(x_ptr) });
1414    /// ```
1415    #[must_use = "losing the pointer will leak memory"]
1416    #[stable(feature = "rc_raw", since = "1.17.0")]
1417    #[rustc_never_returns_null_ptr]
1418    pub fn into_raw(this: Self) -> *const T {
1419        let this = ManuallyDrop::new(this);
1420        Self::as_ptr(&*this)
1421    }
1422
1423    /// Increments the strong reference count on the `Rc<T>` associated with the
1424    /// provided pointer by one.
1425    ///
1426    /// # Safety
1427    ///
1428    /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1429    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1430    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1431    /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1432    /// allocated by the global allocator.
1433    ///
1434    /// [from_raw_in]: Rc::from_raw_in
1435    ///
1436    /// # Examples
1437    ///
1438    /// ```
1439    /// use std::rc::Rc;
1440    ///
1441    /// let five = Rc::new(5);
1442    ///
1443    /// unsafe {
1444    ///     let ptr = Rc::into_raw(five);
1445    ///     Rc::increment_strong_count(ptr);
1446    ///
1447    ///     let five = Rc::from_raw(ptr);
1448    ///     assert_eq!(2, Rc::strong_count(&five));
1449    /// #   // Prevent leaks for Miri.
1450    /// #   Rc::decrement_strong_count(ptr);
1451    /// }
1452    /// ```
1453    #[inline]
1454    #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1455    pub unsafe fn increment_strong_count(ptr: *const T) {
1456        unsafe { Self::increment_strong_count_in(ptr, Global) }
1457    }
1458
1459    /// Decrements the strong reference count on the `Rc<T>` associated with the
1460    /// provided pointer by one.
1461    ///
1462    /// # Safety
1463    ///
1464    /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1465    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1466    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1467    /// least 1) when invoking this method, and `ptr` must point to a block of memory
1468    /// allocated by the global allocator. This method can be used to release the final `Rc` and
1469    /// backing storage, but **should not** be called after the final `Rc` has been released.
1470    ///
1471    /// [from_raw_in]: Rc::from_raw_in
1472    ///
1473    /// # Examples
1474    ///
1475    /// ```
1476    /// use std::rc::Rc;
1477    ///
1478    /// let five = Rc::new(5);
1479    ///
1480    /// unsafe {
1481    ///     let ptr = Rc::into_raw(five);
1482    ///     Rc::increment_strong_count(ptr);
1483    ///
1484    ///     let five = Rc::from_raw(ptr);
1485    ///     assert_eq!(2, Rc::strong_count(&five));
1486    ///     Rc::decrement_strong_count(ptr);
1487    ///     assert_eq!(1, Rc::strong_count(&five));
1488    /// }
1489    /// ```
1490    #[inline]
1491    #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1492    pub unsafe fn decrement_strong_count(ptr: *const T) {
1493        unsafe { Self::decrement_strong_count_in(ptr, Global) }
1494    }
1495}
1496
1497impl<T: ?Sized, A: Allocator> Rc<T, A> {
1498    /// Returns a reference to the underlying allocator.
1499    ///
1500    /// Note: this is an associated function, which means that you have
1501    /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
1502    /// is so that there is no conflict with a method on the inner type.
1503    #[inline]
1504    #[unstable(feature = "allocator_api", issue = "32838")]
1505    pub fn allocator(this: &Self) -> &A {
1506        &this.alloc
1507    }
1508
1509    /// Consumes the `Rc`, returning the wrapped pointer and allocator.
1510    ///
1511    /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1512    /// [`Rc::from_raw_in`].
1513    ///
1514    /// # Examples
1515    ///
1516    /// ```
1517    /// #![feature(allocator_api)]
1518    /// use std::rc::Rc;
1519    /// use std::alloc::System;
1520    ///
1521    /// let x = Rc::new_in("hello".to_owned(), System);
1522    /// let (ptr, alloc) = Rc::into_raw_with_allocator(x);
1523    /// assert_eq!(unsafe { &*ptr }, "hello");
1524    /// let x = unsafe { Rc::from_raw_in(ptr, alloc) };
1525    /// assert_eq!(&*x, "hello");
1526    /// ```
1527    #[must_use = "losing the pointer will leak memory"]
1528    #[unstable(feature = "allocator_api", issue = "32838")]
1529    pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
1530        let this = mem::ManuallyDrop::new(this);
1531        let ptr = Self::as_ptr(&this);
1532        // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1533        let alloc = unsafe { ptr::read(&this.alloc) };
1534        (ptr, alloc)
1535    }
1536
1537    /// Provides a raw pointer to the data.
1538    ///
1539    /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
1540    /// for as long as there are strong counts in the `Rc`.
1541    ///
1542    /// # Examples
1543    ///
1544    /// ```
1545    /// use std::rc::Rc;
1546    ///
1547    /// let x = Rc::new(0);
1548    /// let y = Rc::clone(&x);
1549    /// let x_ptr = Rc::as_ptr(&x);
1550    /// assert_eq!(x_ptr, Rc::as_ptr(&y));
1551    /// assert_eq!(unsafe { *x_ptr }, 0);
1552    /// ```
1553    #[stable(feature = "weak_into_raw", since = "1.45.0")]
1554    #[rustc_never_returns_null_ptr]
1555    pub fn as_ptr(this: &Self) -> *const T {
1556        let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
1557
1558        // SAFETY: This cannot go through Deref::deref or Rc::inner because
1559        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
1560        // write through the pointer after the Rc is recovered through `from_raw`.
1561        unsafe { &raw mut (*ptr).value }
1562    }
1563
1564    /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
1565    ///
1566    /// The raw pointer must have been previously returned by a call to [`Rc<U,
1567    /// A>::into_raw`][into_raw] with the following requirements:
1568    ///
1569    /// * If `U` is sized, it must have the same size and alignment as `T`. This
1570    ///   is trivially true if `U` is `T`.
1571    /// * If `U` is unsized, its data pointer must have the same size and
1572    ///   alignment as `T`. This is trivially true if `Rc<U>` was constructed
1573    ///   through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1574    ///   coercion].
1575    ///
1576    /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1577    /// and alignment, this is basically like transmuting references of
1578    /// different types. See [`mem::transmute`][transmute] for more information
1579    /// on what restrictions apply in this case.
1580    ///
1581    /// The raw pointer must point to a block of memory allocated by `alloc`
1582    ///
1583    /// The user of `from_raw` has to make sure a specific value of `T` is only
1584    /// dropped once.
1585    ///
1586    /// This function is unsafe because improper use may lead to memory unsafety,
1587    /// even if the returned `Rc<T>` is never accessed.
1588    ///
1589    /// [into_raw]: Rc::into_raw
1590    /// [transmute]: core::mem::transmute
1591    /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1592    ///
1593    /// # Examples
1594    ///
1595    /// ```
1596    /// #![feature(allocator_api)]
1597    ///
1598    /// use std::rc::Rc;
1599    /// use std::alloc::System;
1600    ///
1601    /// let x = Rc::new_in("hello".to_owned(), System);
1602    /// let (x_ptr, _alloc) = Rc::into_raw_with_allocator(x);
1603    ///
1604    /// unsafe {
1605    ///     // Convert back to an `Rc` to prevent leak.
1606    ///     let x = Rc::from_raw_in(x_ptr, System);
1607    ///     assert_eq!(&*x, "hello");
1608    ///
1609    ///     // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1610    /// }
1611    ///
1612    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1613    /// ```
1614    ///
1615    /// Convert a slice back into its original array:
1616    ///
1617    /// ```
1618    /// #![feature(allocator_api)]
1619    ///
1620    /// use std::rc::Rc;
1621    /// use std::alloc::System;
1622    ///
1623    /// let x: Rc<[u32], _> = Rc::new_in([1, 2, 3], System);
1624    /// let x_ptr: *const [u32] = Rc::into_raw_with_allocator(x).0;
1625    ///
1626    /// unsafe {
1627    ///     let x: Rc<[u32; 3], _> = Rc::from_raw_in(x_ptr.cast::<[u32; 3]>(), System);
1628    ///     assert_eq!(&*x, &[1, 2, 3]);
1629    /// }
1630    /// ```
1631    #[unstable(feature = "allocator_api", issue = "32838")]
1632    pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
1633        let offset = unsafe { data_offset(ptr) };
1634
1635        // Reverse the offset to find the original RcInner.
1636        let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
1637
1638        unsafe { Self::from_ptr_in(rc_ptr, alloc) }
1639    }
1640
1641    /// Creates a new [`Weak`] pointer to this allocation.
1642    ///
1643    /// # Examples
1644    ///
1645    /// ```
1646    /// use std::rc::Rc;
1647    ///
1648    /// let five = Rc::new(5);
1649    ///
1650    /// let weak_five = Rc::downgrade(&five);
1651    /// ```
1652    #[must_use = "this returns a new `Weak` pointer, \
1653                  without modifying the original `Rc`"]
1654    #[stable(feature = "rc_weak", since = "1.4.0")]
1655    pub fn downgrade(this: &Self) -> Weak<T, A>
1656    where
1657        A: Clone,
1658    {
1659        this.inner().inc_weak();
1660        // Make sure we do not create a dangling Weak
1661        debug_assert!(!is_dangling(this.ptr.as_ptr()));
1662        Weak { ptr: this.ptr, alloc: this.alloc.clone() }
1663    }
1664
1665    /// Gets the number of [`Weak`] pointers to this allocation.
1666    ///
1667    /// # Examples
1668    ///
1669    /// ```
1670    /// use std::rc::Rc;
1671    ///
1672    /// let five = Rc::new(5);
1673    /// let _weak_five = Rc::downgrade(&five);
1674    ///
1675    /// assert_eq!(1, Rc::weak_count(&five));
1676    /// ```
1677    #[inline]
1678    #[stable(feature = "rc_counts", since = "1.15.0")]
1679    pub fn weak_count(this: &Self) -> usize {
1680        this.inner().weak() - 1
1681    }
1682
1683    /// Gets the number of strong (`Rc`) pointers to this allocation.
1684    ///
1685    /// # Examples
1686    ///
1687    /// ```
1688    /// use std::rc::Rc;
1689    ///
1690    /// let five = Rc::new(5);
1691    /// let _also_five = Rc::clone(&five);
1692    ///
1693    /// assert_eq!(2, Rc::strong_count(&five));
1694    /// ```
1695    #[inline]
1696    #[stable(feature = "rc_counts", since = "1.15.0")]
1697    pub fn strong_count(this: &Self) -> usize {
1698        this.inner().strong()
1699    }
1700
1701    /// Increments the strong reference count on the `Rc<T>` associated with the
1702    /// provided pointer by one.
1703    ///
1704    /// # Safety
1705    ///
1706    /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1707    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1708    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1709    /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1710    /// allocated by `alloc`.
1711    ///
1712    /// [from_raw_in]: Rc::from_raw_in
1713    ///
1714    /// # Examples
1715    ///
1716    /// ```
1717    /// #![feature(allocator_api)]
1718    ///
1719    /// use std::rc::Rc;
1720    /// use std::alloc::System;
1721    ///
1722    /// let five = Rc::new_in(5, System);
1723    ///
1724    /// unsafe {
1725    ///     let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1726    ///     Rc::increment_strong_count_in(ptr, System);
1727    ///
1728    ///     let five = Rc::from_raw_in(ptr, System);
1729    ///     assert_eq!(2, Rc::strong_count(&five));
1730    /// #   // Prevent leaks for Miri.
1731    /// #   Rc::decrement_strong_count_in(ptr, System);
1732    /// }
1733    /// ```
1734    #[inline]
1735    #[unstable(feature = "allocator_api", issue = "32838")]
1736    pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
1737    where
1738        A: Clone,
1739    {
1740        // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
1741        let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
1742        // Now increase refcount, but don't drop new refcount either
1743        let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
1744    }
1745
1746    /// Decrements the strong reference count on the `Rc<T>` associated with the
1747    /// provided pointer by one.
1748    ///
1749    /// # Safety
1750    ///
1751    /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1752    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1753    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1754    /// least 1) when invoking this method, and `ptr` must point to a block of memory
1755    /// allocated by `alloc`. This method can be used to release the final `Rc` and
1756    /// backing storage, but **should not** be called after the final `Rc` has been released.
1757    ///
1758    /// [from_raw_in]: Rc::from_raw_in
1759    ///
1760    /// # Examples
1761    ///
1762    /// ```
1763    /// #![feature(allocator_api)]
1764    ///
1765    /// use std::rc::Rc;
1766    /// use std::alloc::System;
1767    ///
1768    /// let five = Rc::new_in(5, System);
1769    ///
1770    /// unsafe {
1771    ///     let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1772    ///     Rc::increment_strong_count_in(ptr, System);
1773    ///
1774    ///     let five = Rc::from_raw_in(ptr, System);
1775    ///     assert_eq!(2, Rc::strong_count(&five));
1776    ///     Rc::decrement_strong_count_in(ptr, System);
1777    ///     assert_eq!(1, Rc::strong_count(&five));
1778    /// }
1779    /// ```
1780    #[inline]
1781    #[unstable(feature = "allocator_api", issue = "32838")]
1782    pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
1783        unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
1784    }
1785
1786    /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
1787    /// this allocation.
1788    #[inline]
1789    fn is_unique(this: &Self) -> bool {
1790        Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
1791    }
1792
1793    /// Returns a mutable reference into the given `Rc`, if there are
1794    /// no other `Rc` or [`Weak`] pointers to the same allocation.
1795    ///
1796    /// Returns [`None`] otherwise, because it is not safe to
1797    /// mutate a shared value.
1798    ///
1799    /// See also [`make_mut`][make_mut], which will [`clone`][clone]
1800    /// the inner value when there are other `Rc` pointers.
1801    ///
1802    /// [make_mut]: Rc::make_mut
1803    /// [clone]: Clone::clone
1804    ///
1805    /// # Examples
1806    ///
1807    /// ```
1808    /// use std::rc::Rc;
1809    ///
1810    /// let mut x = Rc::new(3);
1811    /// *Rc::get_mut(&mut x).unwrap() = 4;
1812    /// assert_eq!(*x, 4);
1813    ///
1814    /// let _y = Rc::clone(&x);
1815    /// assert!(Rc::get_mut(&mut x).is_none());
1816    /// ```
1817    #[inline]
1818    #[stable(feature = "rc_unique", since = "1.4.0")]
1819    pub fn get_mut(this: &mut Self) -> Option<&mut T> {
1820        if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
1821    }
1822
1823    /// Returns a mutable reference into the given `Rc`,
1824    /// without any check.
1825    ///
1826    /// See also [`get_mut`], which is safe and does appropriate checks.
1827    ///
1828    /// [`get_mut`]: Rc::get_mut
1829    ///
1830    /// # Safety
1831    ///
1832    /// If any other `Rc` or [`Weak`] pointers to the same allocation exist, then
1833    /// they must not be dereferenced or have active borrows for the duration
1834    /// of the returned borrow, and their inner type must be exactly the same as the
1835    /// inner type of this Rc (including lifetimes). This is trivially the case if no
1836    /// such pointers exist, for example immediately after `Rc::new`.
1837    ///
1838    /// # Examples
1839    ///
1840    /// ```
1841    /// #![feature(get_mut_unchecked)]
1842    ///
1843    /// use std::rc::Rc;
1844    ///
1845    /// let mut x = Rc::new(String::new());
1846    /// unsafe {
1847    ///     Rc::get_mut_unchecked(&mut x).push_str("foo")
1848    /// }
1849    /// assert_eq!(*x, "foo");
1850    /// ```
1851    /// Other `Rc` pointers to the same allocation must be to the same type.
1852    /// ```no_run
1853    /// #![feature(get_mut_unchecked)]
1854    ///
1855    /// use std::rc::Rc;
1856    ///
1857    /// let x: Rc<str> = Rc::from("Hello, world!");
1858    /// let mut y: Rc<[u8]> = x.clone().into();
1859    /// unsafe {
1860    ///     // this is Undefined Behavior, because x's inner type is str, not [u8]
1861    ///     Rc::get_mut_unchecked(&mut y).fill(0xff); // 0xff is invalid in UTF-8
1862    /// }
1863    /// println!("{}", &*x); // Invalid UTF-8 in a str
1864    /// ```
1865    /// Other `Rc` pointers to the same allocation must be to the exact same type, including lifetimes.
1866    /// ```no_run
1867    /// #![feature(get_mut_unchecked)]
1868    ///
1869    /// use std::rc::Rc;
1870    ///
1871    /// let x: Rc<&str> = Rc::new("Hello, world!");
1872    /// {
1873    ///     let s = String::from("Oh, no!");
1874    ///     let mut y: Rc<&str> = x.clone();
1875    ///     unsafe {
1876    ///         // this is Undefined Behavior, because x's inner type
1877    ///         // is &'long str, not &'short str
1878    ///         *Rc::get_mut_unchecked(&mut y) = &s;
1879    ///     }
1880    /// }
1881    /// println!("{}", &*x); // Use-after-free
1882    /// ```
1883    #[inline]
1884    #[unstable(feature = "get_mut_unchecked", issue = "63292")]
1885    pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
1886        // We are careful to *not* create a reference covering the "count" fields, as
1887        // this would conflict with accesses to the reference counts (e.g. by `Weak`).
1888        unsafe { &mut (*this.ptr.as_ptr()).value }
1889    }
1890
1891    #[inline]
1892    #[stable(feature = "ptr_eq", since = "1.17.0")]
1893    /// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to
1894    /// [`ptr::eq`]. This function ignores the metadata of  `dyn Trait` pointers.
1895    ///
1896    /// # Examples
1897    ///
1898    /// ```
1899    /// use std::rc::Rc;
1900    ///
1901    /// let five = Rc::new(5);
1902    /// let same_five = Rc::clone(&five);
1903    /// let other_five = Rc::new(5);
1904    ///
1905    /// assert!(Rc::ptr_eq(&five, &same_five));
1906    /// assert!(!Rc::ptr_eq(&five, &other_five));
1907    /// ```
1908    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
1909        ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr())
1910    }
1911}
1912
1913#[cfg(not(no_global_oom_handling))]
1914impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
1915    /// Makes a mutable reference into the given `Rc`.
1916    ///
1917    /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
1918    /// [`clone`] the inner value to a new allocation to ensure unique ownership.  This is also
1919    /// referred to as clone-on-write.
1920    ///
1921    /// However, if there are no other `Rc` pointers to this allocation, but some [`Weak`]
1922    /// pointers, then the [`Weak`] pointers will be disassociated and the inner value will not
1923    /// be cloned.
1924    ///
1925    /// See also [`get_mut`], which will fail rather than cloning the inner value
1926    /// or disassociating [`Weak`] pointers.
1927    ///
1928    /// [`clone`]: Clone::clone
1929    /// [`get_mut`]: Rc::get_mut
1930    ///
1931    /// # Examples
1932    ///
1933    /// ```
1934    /// use std::rc::Rc;
1935    ///
1936    /// let mut data = Rc::new(5);
1937    ///
1938    /// *Rc::make_mut(&mut data) += 1;         // Won't clone anything
1939    /// let mut other_data = Rc::clone(&data); // Won't clone inner data
1940    /// *Rc::make_mut(&mut data) += 1;         // Clones inner data
1941    /// *Rc::make_mut(&mut data) += 1;         // Won't clone anything
1942    /// *Rc::make_mut(&mut other_data) *= 2;   // Won't clone anything
1943    ///
1944    /// // Now `data` and `other_data` point to different allocations.
1945    /// assert_eq!(*data, 8);
1946    /// assert_eq!(*other_data, 12);
1947    /// ```
1948    ///
1949    /// [`Weak`] pointers will be disassociated:
1950    ///
1951    /// ```
1952    /// use std::rc::Rc;
1953    ///
1954    /// let mut data = Rc::new(75);
1955    /// let weak = Rc::downgrade(&data);
1956    ///
1957    /// assert!(75 == *data);
1958    /// assert!(75 == *weak.upgrade().unwrap());
1959    ///
1960    /// *Rc::make_mut(&mut data) += 1;
1961    ///
1962    /// assert!(76 == *data);
1963    /// assert!(weak.upgrade().is_none());
1964    /// ```
1965    #[inline]
1966    #[stable(feature = "rc_unique", since = "1.4.0")]
1967    pub fn make_mut(this: &mut Self) -> &mut T {
1968        let size_of_val = size_of_val::<T>(&**this);
1969
1970        if Rc::strong_count(this) != 1 {
1971            // Gotta clone the data, there are other Rcs.
1972
1973            let this_data_ref: &T = &**this;
1974            // `in_progress` drops the allocation if we panic before finishing initializing it.
1975            let mut in_progress: UniqueRcUninit<T, A> =
1976                UniqueRcUninit::new(this_data_ref, this.alloc.clone());
1977
1978            // Initialize with clone of this.
1979            let initialized_clone = unsafe {
1980                // Clone. If the clone panics, `in_progress` will be dropped and clean up.
1981                this_data_ref.clone_to_uninit(in_progress.data_ptr().cast());
1982                // Cast type of pointer, now that it is initialized.
1983                in_progress.into_rc()
1984            };
1985
1986            // Replace `this` with newly constructed Rc.
1987            *this = initialized_clone;
1988        } else if Rc::weak_count(this) != 0 {
1989            // Can just steal the data, all that's left is Weaks
1990
1991            // We don't need panic-protection like the above branch does, but we might as well
1992            // use the same mechanism.
1993            let mut in_progress: UniqueRcUninit<T, A> =
1994                UniqueRcUninit::new(&**this, this.alloc.clone());
1995            unsafe {
1996                // Initialize `in_progress` with move of **this.
1997                // We have to express this in terms of bytes because `T: ?Sized`; there is no
1998                // operation that just copies a value based on its `size_of_val()`.
1999                ptr::copy_nonoverlapping(
2000                    ptr::from_ref(&**this).cast::<u8>(),
2001                    in_progress.data_ptr().cast::<u8>(),
2002                    size_of_val,
2003                );
2004
2005                this.inner().dec_strong();
2006                // Remove implicit strong-weak ref (no need to craft a fake
2007                // Weak here -- we know other Weaks can clean up for us)
2008                this.inner().dec_weak();
2009                // Replace `this` with newly constructed Rc that has the moved data.
2010                ptr::write(this, in_progress.into_rc());
2011            }
2012        }
2013        // This unsafety is ok because we're guaranteed that the pointer
2014        // returned is the *only* pointer that will ever be returned to T. Our
2015        // reference count is guaranteed to be 1 at this point, and we required
2016        // the `Rc<T>` itself to be `mut`, so we're returning the only possible
2017        // reference to the allocation.
2018        unsafe { &mut this.ptr.as_mut().value }
2019    }
2020}
2021
2022impl<T: Clone, A: Allocator> Rc<T, A> {
2023    /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
2024    /// clone.
2025    ///
2026    /// Assuming `rc_t` is of type `Rc<T>`, this function is functionally equivalent to
2027    /// `(*rc_t).clone()`, but will avoid cloning the inner value where possible.
2028    ///
2029    /// # Examples
2030    ///
2031    /// ```
2032    /// # use std::{ptr, rc::Rc};
2033    /// let inner = String::from("test");
2034    /// let ptr = inner.as_ptr();
2035    ///
2036    /// let rc = Rc::new(inner);
2037    /// let inner = Rc::unwrap_or_clone(rc);
2038    /// // The inner value was not cloned
2039    /// assert!(ptr::eq(ptr, inner.as_ptr()));
2040    ///
2041    /// let rc = Rc::new(inner);
2042    /// let rc2 = rc.clone();
2043    /// let inner = Rc::unwrap_or_clone(rc);
2044    /// // Because there were 2 references, we had to clone the inner value.
2045    /// assert!(!ptr::eq(ptr, inner.as_ptr()));
2046    /// // `rc2` is the last reference, so when we unwrap it we get back
2047    /// // the original `String`.
2048    /// let inner = Rc::unwrap_or_clone(rc2);
2049    /// assert!(ptr::eq(ptr, inner.as_ptr()));
2050    /// ```
2051    #[inline]
2052    #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")]
2053    pub fn unwrap_or_clone(this: Self) -> T {
2054        Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone())
2055    }
2056}
2057
2058impl<A: Allocator> Rc<dyn Any, A> {
2059    /// Attempts to downcast the `Rc<dyn Any>` to a concrete type.
2060    ///
2061    /// # Examples
2062    ///
2063    /// ```
2064    /// use std::any::Any;
2065    /// use std::rc::Rc;
2066    ///
2067    /// fn print_if_string(value: Rc<dyn Any>) {
2068    ///     if let Ok(string) = value.downcast::<String>() {
2069    ///         println!("String ({}): {}", string.len(), string);
2070    ///     }
2071    /// }
2072    ///
2073    /// let my_string = "Hello World".to_string();
2074    /// print_if_string(Rc::new(my_string));
2075    /// print_if_string(Rc::new(0i8));
2076    /// ```
2077    #[inline]
2078    #[stable(feature = "rc_downcast", since = "1.29.0")]
2079    pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
2080        if (*self).is::<T>() {
2081            unsafe {
2082                let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2083                Ok(Rc::from_inner_in(ptr.cast(), alloc))
2084            }
2085        } else {
2086            Err(self)
2087        }
2088    }
2089
2090    /// Downcasts the `Rc<dyn Any>` to a concrete type.
2091    ///
2092    /// For a safe alternative see [`downcast`].
2093    ///
2094    /// # Examples
2095    ///
2096    /// ```
2097    /// #![feature(downcast_unchecked)]
2098    ///
2099    /// use std::any::Any;
2100    /// use std::rc::Rc;
2101    ///
2102    /// let x: Rc<dyn Any> = Rc::new(1_usize);
2103    ///
2104    /// unsafe {
2105    ///     assert_eq!(*x.downcast_unchecked::<usize>(), 1);
2106    /// }
2107    /// ```
2108    ///
2109    /// # Safety
2110    ///
2111    /// The contained value must be of type `T`. Calling this method
2112    /// with the incorrect type is *undefined behavior*.
2113    ///
2114    ///
2115    /// [`downcast`]: Self::downcast
2116    #[inline]
2117    #[unstable(feature = "downcast_unchecked", issue = "90850")]
2118    pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
2119        unsafe {
2120            let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2121            Rc::from_inner_in(ptr.cast(), alloc)
2122        }
2123    }
2124}
2125
2126impl<T: ?Sized> Rc<T> {
2127    /// Allocates an `RcInner<T>` with sufficient space for
2128    /// a possibly-unsized inner value where the value has the layout provided.
2129    ///
2130    /// The function `mem_to_rc_inner` is called with the data pointer
2131    /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2132    #[cfg(not(no_global_oom_handling))]
2133    unsafe fn allocate_for_layout(
2134        value_layout: Layout,
2135        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2136        mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2137    ) -> *mut RcInner<T> {
2138        let layout = rc_inner_layout_for_value_layout(value_layout);
2139        unsafe {
2140            Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner)
2141                .unwrap_or_else(|_| handle_alloc_error(layout))
2142        }
2143    }
2144
2145    /// Allocates an `RcInner<T>` with sufficient space for
2146    /// a possibly-unsized inner value where the value has the layout provided,
2147    /// returning an error if allocation fails.
2148    ///
2149    /// The function `mem_to_rc_inner` is called with the data pointer
2150    /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2151    #[inline]
2152    unsafe fn try_allocate_for_layout(
2153        value_layout: Layout,
2154        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2155        mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2156    ) -> Result<*mut RcInner<T>, AllocError> {
2157        let layout = rc_inner_layout_for_value_layout(value_layout);
2158
2159        // Allocate for the layout.
2160        let ptr = allocate(layout)?;
2161
2162        // Initialize the RcInner
2163        let inner = mem_to_rc_inner(ptr.as_non_null_ptr().as_ptr());
2164        unsafe {
2165            debug_assert_eq!(Layout::for_value_raw(inner), layout);
2166
2167            (&raw mut (*inner).strong).write(Cell::new(1));
2168            (&raw mut (*inner).weak).write(Cell::new(1));
2169        }
2170
2171        Ok(inner)
2172    }
2173}
2174
2175impl<T: ?Sized, A: Allocator> Rc<T, A> {
2176    /// Allocates an `RcInner<T>` with sufficient space for an unsized inner value
2177    #[cfg(not(no_global_oom_handling))]
2178    unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner<T> {
2179        // Allocate for the `RcInner<T>` using the given value.
2180        unsafe {
2181            Rc::<T>::allocate_for_layout(
2182                Layout::for_value_raw(ptr),
2183                |layout| alloc.allocate(layout),
2184                |mem| mem.with_metadata_of(ptr as *const RcInner<T>),
2185            )
2186        }
2187    }
2188
2189    #[cfg(not(no_global_oom_handling))]
2190    fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
2191        unsafe {
2192            let value_size = size_of_val(&*src);
2193            let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
2194
2195            // Copy value as bytes
2196            ptr::copy_nonoverlapping(
2197                (&raw const *src) as *const u8,
2198                (&raw mut (*ptr).value) as *mut u8,
2199                value_size,
2200            );
2201
2202            // Free the allocation without dropping its contents
2203            let (bptr, alloc) = Box::into_raw_with_allocator(src);
2204            let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop<T>, alloc.by_ref());
2205            drop(src);
2206
2207            Self::from_ptr_in(ptr, alloc)
2208        }
2209    }
2210}
2211
2212impl<T> Rc<[T]> {
2213    /// Allocates an `RcInner<[T]>` with the given length.
2214    #[cfg(not(no_global_oom_handling))]
2215    unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> {
2216        unsafe {
2217            Self::allocate_for_layout(
2218                Layout::array::<T>(len).unwrap(),
2219                |layout| Global.allocate(layout),
2220                |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2221            )
2222        }
2223    }
2224
2225    /// Copy elements from slice into newly allocated `Rc<[T]>`
2226    ///
2227    /// Unsafe because the caller must either take ownership or bind `T: Copy`
2228    #[cfg(not(no_global_oom_handling))]
2229    unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
2230        unsafe {
2231            let ptr = Self::allocate_for_slice(v.len());
2232            ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len());
2233            Self::from_ptr(ptr)
2234        }
2235    }
2236
2237    /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
2238    ///
2239    /// Behavior is undefined should the size be wrong.
2240    #[cfg(not(no_global_oom_handling))]
2241    unsafe fn from_iter_exact(iter: impl Iterator<Item = T>, len: usize) -> Rc<[T]> {
2242        // Panic guard while cloning T elements.
2243        // In the event of a panic, elements that have been written
2244        // into the new RcInner will be dropped, then the memory freed.
2245        struct Guard<T> {
2246            mem: NonNull<u8>,
2247            elems: *mut T,
2248            layout: Layout,
2249            n_elems: usize,
2250        }
2251
2252        impl<T> Drop for Guard<T> {
2253            fn drop(&mut self) {
2254                unsafe {
2255                    let slice = from_raw_parts_mut(self.elems, self.n_elems);
2256                    ptr::drop_in_place(slice);
2257
2258                    Global.deallocate(self.mem, self.layout);
2259                }
2260            }
2261        }
2262
2263        unsafe {
2264            let ptr = Self::allocate_for_slice(len);
2265
2266            let mem = ptr as *mut _ as *mut u8;
2267            let layout = Layout::for_value_raw(ptr);
2268
2269            // Pointer to first element
2270            let elems = (&raw mut (*ptr).value) as *mut T;
2271
2272            let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
2273
2274            for (i, item) in iter.enumerate() {
2275                ptr::write(elems.add(i), item);
2276                guard.n_elems += 1;
2277            }
2278
2279            // All clear. Forget the guard so it doesn't free the new RcInner.
2280            mem::forget(guard);
2281
2282            Self::from_ptr(ptr)
2283        }
2284    }
2285}
2286
2287impl<T, A: Allocator> Rc<[T], A> {
2288    /// Allocates an `RcInner<[T]>` with the given length.
2289    #[inline]
2290    #[cfg(not(no_global_oom_handling))]
2291    unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> {
2292        unsafe {
2293            Rc::<[T]>::allocate_for_layout(
2294                Layout::array::<T>(len).unwrap(),
2295                |layout| alloc.allocate(layout),
2296                |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2297            )
2298        }
2299    }
2300}
2301
2302#[cfg(not(no_global_oom_handling))]
2303/// Specialization trait used for `From<&[T]>`.
2304trait RcFromSlice<T> {
2305    fn from_slice(slice: &[T]) -> Self;
2306}
2307
2308#[cfg(not(no_global_oom_handling))]
2309impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
2310    #[inline]
2311    default fn from_slice(v: &[T]) -> Self {
2312        unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
2313    }
2314}
2315
2316#[cfg(not(no_global_oom_handling))]
2317impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
2318    #[inline]
2319    fn from_slice(v: &[T]) -> Self {
2320        unsafe { Rc::copy_from_slice(v) }
2321    }
2322}
2323
2324#[stable(feature = "rust1", since = "1.0.0")]
2325impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
2326    type Target = T;
2327
2328    #[inline(always)]
2329    fn deref(&self) -> &T {
2330        &self.inner().value
2331    }
2332}
2333
2334#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2335unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Rc<T, A> {}
2336
2337//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2338#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2339unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for UniqueRc<T, A> {}
2340
2341#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2342unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Weak<T, A> {}
2343
2344#[unstable(feature = "deref_pure_trait", issue = "87121")]
2345unsafe impl<T: ?Sized, A: Allocator> DerefPure for Rc<T, A> {}
2346
2347//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2348#[unstable(feature = "deref_pure_trait", issue = "87121")]
2349unsafe impl<T: ?Sized, A: Allocator> DerefPure for UniqueRc<T, A> {}
2350
2351#[unstable(feature = "legacy_receiver_trait", issue = "none")]
2352impl<T: ?Sized> LegacyReceiver for Rc<T> {}
2353
2354#[stable(feature = "rust1", since = "1.0.0")]
2355unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
2356    /// Drops the `Rc`.
2357    ///
2358    /// This will decrement the strong reference count. If the strong reference
2359    /// count reaches zero then the only other references (if any) are
2360    /// [`Weak`], so we `drop` the inner value.
2361    ///
2362    /// # Examples
2363    ///
2364    /// ```
2365    /// use std::rc::Rc;
2366    ///
2367    /// struct Foo;
2368    ///
2369    /// impl Drop for Foo {
2370    ///     fn drop(&mut self) {
2371    ///         println!("dropped!");
2372    ///     }
2373    /// }
2374    ///
2375    /// let foo  = Rc::new(Foo);
2376    /// let foo2 = Rc::clone(&foo);
2377    ///
2378    /// drop(foo);    // Doesn't print anything
2379    /// drop(foo2);   // Prints "dropped!"
2380    /// ```
2381    #[inline]
2382    fn drop(&mut self) {
2383        unsafe {
2384            self.inner().dec_strong();
2385            if self.inner().strong() == 0 {
2386                self.drop_slow();
2387            }
2388        }
2389    }
2390}
2391
2392#[stable(feature = "rust1", since = "1.0.0")]
2393impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
2394    /// Makes a clone of the `Rc` pointer.
2395    ///
2396    /// This creates another pointer to the same allocation, increasing the
2397    /// strong reference count.
2398    ///
2399    /// # Examples
2400    ///
2401    /// ```
2402    /// use std::rc::Rc;
2403    ///
2404    /// let five = Rc::new(5);
2405    ///
2406    /// let _ = Rc::clone(&five);
2407    /// ```
2408    #[inline]
2409    fn clone(&self) -> Self {
2410        unsafe {
2411            self.inner().inc_strong();
2412            Self::from_inner_in(self.ptr, self.alloc.clone())
2413        }
2414    }
2415}
2416
2417#[unstable(feature = "ergonomic_clones", issue = "132290")]
2418impl<T: ?Sized, A: Allocator + Clone> UseCloned for Rc<T, A> {}
2419
2420#[cfg(not(no_global_oom_handling))]
2421#[stable(feature = "rust1", since = "1.0.0")]
2422impl<T: Default> Default for Rc<T> {
2423    /// Creates a new `Rc<T>`, with the `Default` value for `T`.
2424    ///
2425    /// # Examples
2426    ///
2427    /// ```
2428    /// use std::rc::Rc;
2429    ///
2430    /// let x: Rc<i32> = Default::default();
2431    /// assert_eq!(*x, 0);
2432    /// ```
2433    #[inline]
2434    fn default() -> Self {
2435        unsafe {
2436            Self::from_inner(
2437                Box::leak(Box::write(
2438                    Box::new_uninit(),
2439                    RcInner { strong: Cell::new(1), weak: Cell::new(1), value: T::default() },
2440                ))
2441                .into(),
2442            )
2443        }
2444    }
2445}
2446
2447#[cfg(not(no_global_oom_handling))]
2448#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2449impl Default for Rc<str> {
2450    /// Creates an empty `str` inside an `Rc`.
2451    ///
2452    /// This may or may not share an allocation with other Rcs on the same thread.
2453    #[inline]
2454    fn default() -> Self {
2455        let rc = Rc::<[u8]>::default();
2456        // `[u8]` has the same layout as `str`.
2457        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2458    }
2459}
2460
2461#[cfg(not(no_global_oom_handling))]
2462#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2463impl<T> Default for Rc<[T]> {
2464    /// Creates an empty `[T]` inside an `Rc`.
2465    ///
2466    /// This may or may not share an allocation with other Rcs on the same thread.
2467    #[inline]
2468    fn default() -> Self {
2469        let arr: [T; 0] = [];
2470        Rc::from(arr)
2471    }
2472}
2473
2474#[cfg(not(no_global_oom_handling))]
2475#[stable(feature = "pin_default_impls", since = "1.91.0")]
2476impl<T> Default for Pin<Rc<T>>
2477where
2478    T: ?Sized,
2479    Rc<T>: Default,
2480{
2481    #[inline]
2482    fn default() -> Self {
2483        unsafe { Pin::new_unchecked(Rc::<T>::default()) }
2484    }
2485}
2486
2487#[stable(feature = "rust1", since = "1.0.0")]
2488trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
2489    fn eq(&self, other: &Rc<T, A>) -> bool;
2490    fn ne(&self, other: &Rc<T, A>) -> bool;
2491}
2492
2493#[stable(feature = "rust1", since = "1.0.0")]
2494impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2495    #[inline]
2496    default fn eq(&self, other: &Rc<T, A>) -> bool {
2497        **self == **other
2498    }
2499
2500    #[inline]
2501    default fn ne(&self, other: &Rc<T, A>) -> bool {
2502        **self != **other
2503    }
2504}
2505
2506// Hack to allow specializing on `Eq` even though `Eq` has a method.
2507#[rustc_unsafe_specialization_marker]
2508pub(crate) trait MarkerEq: PartialEq<Self> {}
2509
2510impl<T: Eq> MarkerEq for T {}
2511
2512/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
2513/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
2514/// store large values, that are slow to clone, but also heavy to check for equality, causing this
2515/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
2516/// the same value, than two `&T`s.
2517///
2518/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
2519#[stable(feature = "rust1", since = "1.0.0")]
2520impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2521    #[inline]
2522    fn eq(&self, other: &Rc<T, A>) -> bool {
2523        Rc::ptr_eq(self, other) || **self == **other
2524    }
2525
2526    #[inline]
2527    fn ne(&self, other: &Rc<T, A>) -> bool {
2528        !Rc::ptr_eq(self, other) && **self != **other
2529    }
2530}
2531
2532#[stable(feature = "rust1", since = "1.0.0")]
2533impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
2534    /// Equality for two `Rc`s.
2535    ///
2536    /// Two `Rc`s are equal if their inner values are equal, even if they are
2537    /// stored in different allocation.
2538    ///
2539    /// If `T` also implements `Eq` (implying reflexivity of equality),
2540    /// two `Rc`s that point to the same allocation are
2541    /// always equal.
2542    ///
2543    /// # Examples
2544    ///
2545    /// ```
2546    /// use std::rc::Rc;
2547    ///
2548    /// let five = Rc::new(5);
2549    ///
2550    /// assert!(five == Rc::new(5));
2551    /// ```
2552    #[inline]
2553    fn eq(&self, other: &Rc<T, A>) -> bool {
2554        RcEqIdent::eq(self, other)
2555    }
2556
2557    /// Inequality for two `Rc`s.
2558    ///
2559    /// Two `Rc`s are not equal if their inner values are not equal.
2560    ///
2561    /// If `T` also implements `Eq` (implying reflexivity of equality),
2562    /// two `Rc`s that point to the same allocation are
2563    /// always equal.
2564    ///
2565    /// # Examples
2566    ///
2567    /// ```
2568    /// use std::rc::Rc;
2569    ///
2570    /// let five = Rc::new(5);
2571    ///
2572    /// assert!(five != Rc::new(6));
2573    /// ```
2574    #[inline]
2575    fn ne(&self, other: &Rc<T, A>) -> bool {
2576        RcEqIdent::ne(self, other)
2577    }
2578}
2579
2580#[stable(feature = "rust1", since = "1.0.0")]
2581impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {}
2582
2583#[stable(feature = "rust1", since = "1.0.0")]
2584impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
2585    /// Partial comparison for two `Rc`s.
2586    ///
2587    /// The two are compared by calling `partial_cmp()` on their inner values.
2588    ///
2589    /// # Examples
2590    ///
2591    /// ```
2592    /// use std::rc::Rc;
2593    /// use std::cmp::Ordering;
2594    ///
2595    /// let five = Rc::new(5);
2596    ///
2597    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
2598    /// ```
2599    #[inline(always)]
2600    fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
2601        (**self).partial_cmp(&**other)
2602    }
2603
2604    /// Less-than comparison for two `Rc`s.
2605    ///
2606    /// The two are compared by calling `<` on their inner values.
2607    ///
2608    /// # Examples
2609    ///
2610    /// ```
2611    /// use std::rc::Rc;
2612    ///
2613    /// let five = Rc::new(5);
2614    ///
2615    /// assert!(five < Rc::new(6));
2616    /// ```
2617    #[inline(always)]
2618    fn lt(&self, other: &Rc<T, A>) -> bool {
2619        **self < **other
2620    }
2621
2622    /// 'Less than or equal to' comparison for two `Rc`s.
2623    ///
2624    /// The two are compared by calling `<=` on their inner values.
2625    ///
2626    /// # Examples
2627    ///
2628    /// ```
2629    /// use std::rc::Rc;
2630    ///
2631    /// let five = Rc::new(5);
2632    ///
2633    /// assert!(five <= Rc::new(5));
2634    /// ```
2635    #[inline(always)]
2636    fn le(&self, other: &Rc<T, A>) -> bool {
2637        **self <= **other
2638    }
2639
2640    /// Greater-than comparison for two `Rc`s.
2641    ///
2642    /// The two are compared by calling `>` on their inner values.
2643    ///
2644    /// # Examples
2645    ///
2646    /// ```
2647    /// use std::rc::Rc;
2648    ///
2649    /// let five = Rc::new(5);
2650    ///
2651    /// assert!(five > Rc::new(4));
2652    /// ```
2653    #[inline(always)]
2654    fn gt(&self, other: &Rc<T, A>) -> bool {
2655        **self > **other
2656    }
2657
2658    /// 'Greater than or equal to' comparison for two `Rc`s.
2659    ///
2660    /// The two are compared by calling `>=` on their inner values.
2661    ///
2662    /// # Examples
2663    ///
2664    /// ```
2665    /// use std::rc::Rc;
2666    ///
2667    /// let five = Rc::new(5);
2668    ///
2669    /// assert!(five >= Rc::new(5));
2670    /// ```
2671    #[inline(always)]
2672    fn ge(&self, other: &Rc<T, A>) -> bool {
2673        **self >= **other
2674    }
2675}
2676
2677#[stable(feature = "rust1", since = "1.0.0")]
2678impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
2679    /// Comparison for two `Rc`s.
2680    ///
2681    /// The two are compared by calling `cmp()` on their inner values.
2682    ///
2683    /// # Examples
2684    ///
2685    /// ```
2686    /// use std::rc::Rc;
2687    /// use std::cmp::Ordering;
2688    ///
2689    /// let five = Rc::new(5);
2690    ///
2691    /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
2692    /// ```
2693    #[inline]
2694    fn cmp(&self, other: &Rc<T, A>) -> Ordering {
2695        (**self).cmp(&**other)
2696    }
2697}
2698
2699#[stable(feature = "rust1", since = "1.0.0")]
2700impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
2701    fn hash<H: Hasher>(&self, state: &mut H) {
2702        (**self).hash(state);
2703    }
2704}
2705
2706#[stable(feature = "rust1", since = "1.0.0")]
2707impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
2708    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2709        fmt::Display::fmt(&**self, f)
2710    }
2711}
2712
2713#[stable(feature = "rust1", since = "1.0.0")]
2714impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
2715    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2716        fmt::Debug::fmt(&**self, f)
2717    }
2718}
2719
2720#[stable(feature = "rust1", since = "1.0.0")]
2721impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
2722    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2723        fmt::Pointer::fmt(&(&raw const **self), f)
2724    }
2725}
2726
2727#[cfg(not(no_global_oom_handling))]
2728#[stable(feature = "from_for_ptrs", since = "1.6.0")]
2729impl<T> From<T> for Rc<T> {
2730    /// Converts a generic type `T` into an `Rc<T>`
2731    ///
2732    /// The conversion allocates on the heap and moves `t`
2733    /// from the stack into it.
2734    ///
2735    /// # Example
2736    /// ```rust
2737    /// # use std::rc::Rc;
2738    /// let x = 5;
2739    /// let rc = Rc::new(5);
2740    ///
2741    /// assert_eq!(Rc::from(x), rc);
2742    /// ```
2743    fn from(t: T) -> Self {
2744        Rc::new(t)
2745    }
2746}
2747
2748#[cfg(not(no_global_oom_handling))]
2749#[stable(feature = "shared_from_array", since = "1.74.0")]
2750impl<T, const N: usize> From<[T; N]> for Rc<[T]> {
2751    /// Converts a [`[T; N]`](prim@array) into an `Rc<[T]>`.
2752    ///
2753    /// The conversion moves the array into a newly allocated `Rc`.
2754    ///
2755    /// # Example
2756    ///
2757    /// ```
2758    /// # use std::rc::Rc;
2759    /// let original: [i32; 3] = [1, 2, 3];
2760    /// let shared: Rc<[i32]> = Rc::from(original);
2761    /// assert_eq!(&[1, 2, 3], &shared[..]);
2762    /// ```
2763    #[inline]
2764    fn from(v: [T; N]) -> Rc<[T]> {
2765        Rc::<[T; N]>::from(v)
2766    }
2767}
2768
2769#[cfg(not(no_global_oom_handling))]
2770#[stable(feature = "shared_from_slice", since = "1.21.0")]
2771impl<T: Clone> From<&[T]> for Rc<[T]> {
2772    /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2773    ///
2774    /// # Example
2775    ///
2776    /// ```
2777    /// # use std::rc::Rc;
2778    /// let original: &[i32] = &[1, 2, 3];
2779    /// let shared: Rc<[i32]> = Rc::from(original);
2780    /// assert_eq!(&[1, 2, 3], &shared[..]);
2781    /// ```
2782    #[inline]
2783    fn from(v: &[T]) -> Rc<[T]> {
2784        <Self as RcFromSlice<T>>::from_slice(v)
2785    }
2786}
2787
2788#[cfg(not(no_global_oom_handling))]
2789#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2790impl<T: Clone> From<&mut [T]> for Rc<[T]> {
2791    /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2792    ///
2793    /// # Example
2794    ///
2795    /// ```
2796    /// # use std::rc::Rc;
2797    /// let mut original = [1, 2, 3];
2798    /// let original: &mut [i32] = &mut original;
2799    /// let shared: Rc<[i32]> = Rc::from(original);
2800    /// assert_eq!(&[1, 2, 3], &shared[..]);
2801    /// ```
2802    #[inline]
2803    fn from(v: &mut [T]) -> Rc<[T]> {
2804        Rc::from(&*v)
2805    }
2806}
2807
2808#[cfg(not(no_global_oom_handling))]
2809#[stable(feature = "shared_from_slice", since = "1.21.0")]
2810impl From<&str> for Rc<str> {
2811    /// Allocates a reference-counted string slice and copies `v` into it.
2812    ///
2813    /// # Example
2814    ///
2815    /// ```
2816    /// # use std::rc::Rc;
2817    /// let shared: Rc<str> = Rc::from("statue");
2818    /// assert_eq!("statue", &shared[..]);
2819    /// ```
2820    #[inline]
2821    fn from(v: &str) -> Rc<str> {
2822        let rc = Rc::<[u8]>::from(v.as_bytes());
2823        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2824    }
2825}
2826
2827#[cfg(not(no_global_oom_handling))]
2828#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2829impl From<&mut str> for Rc<str> {
2830    /// Allocates a reference-counted string slice and copies `v` into it.
2831    ///
2832    /// # Example
2833    ///
2834    /// ```
2835    /// # use std::rc::Rc;
2836    /// let mut original = String::from("statue");
2837    /// let original: &mut str = &mut original;
2838    /// let shared: Rc<str> = Rc::from(original);
2839    /// assert_eq!("statue", &shared[..]);
2840    /// ```
2841    #[inline]
2842    fn from(v: &mut str) -> Rc<str> {
2843        Rc::from(&*v)
2844    }
2845}
2846
2847#[cfg(not(no_global_oom_handling))]
2848#[stable(feature = "shared_from_slice", since = "1.21.0")]
2849impl From<String> for Rc<str> {
2850    /// Allocates a reference-counted string slice and copies `v` into it.
2851    ///
2852    /// # Example
2853    ///
2854    /// ```
2855    /// # use std::rc::Rc;
2856    /// let original: String = "statue".to_owned();
2857    /// let shared: Rc<str> = Rc::from(original);
2858    /// assert_eq!("statue", &shared[..]);
2859    /// ```
2860    #[inline]
2861    fn from(v: String) -> Rc<str> {
2862        Rc::from(&v[..])
2863    }
2864}
2865
2866#[cfg(not(no_global_oom_handling))]
2867#[stable(feature = "shared_from_slice", since = "1.21.0")]
2868impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
2869    /// Move a boxed object to a new, reference counted, allocation.
2870    ///
2871    /// # Example
2872    ///
2873    /// ```
2874    /// # use std::rc::Rc;
2875    /// let original: Box<i32> = Box::new(1);
2876    /// let shared: Rc<i32> = Rc::from(original);
2877    /// assert_eq!(1, *shared);
2878    /// ```
2879    #[inline]
2880    fn from(v: Box<T, A>) -> Rc<T, A> {
2881        Rc::from_box_in(v)
2882    }
2883}
2884
2885#[cfg(not(no_global_oom_handling))]
2886#[stable(feature = "shared_from_slice", since = "1.21.0")]
2887impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
2888    /// Allocates a reference-counted slice and moves `v`'s items into it.
2889    ///
2890    /// # Example
2891    ///
2892    /// ```
2893    /// # use std::rc::Rc;
2894    /// let unique: Vec<i32> = vec![1, 2, 3];
2895    /// let shared: Rc<[i32]> = Rc::from(unique);
2896    /// assert_eq!(&[1, 2, 3], &shared[..]);
2897    /// ```
2898    #[inline]
2899    fn from(v: Vec<T, A>) -> Rc<[T], A> {
2900        unsafe {
2901            let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
2902
2903            let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
2904            ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len);
2905
2906            // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
2907            // without dropping its contents or the allocator
2908            let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
2909
2910            Self::from_ptr_in(rc_ptr, alloc)
2911        }
2912    }
2913}
2914
2915#[stable(feature = "shared_from_cow", since = "1.45.0")]
2916impl<'a, B> From<Cow<'a, B>> for Rc<B>
2917where
2918    B: ToOwned + ?Sized,
2919    Rc<B>: From<&'a B> + From<B::Owned>,
2920{
2921    /// Creates a reference-counted pointer from a clone-on-write pointer by
2922    /// copying its content.
2923    ///
2924    /// # Example
2925    ///
2926    /// ```rust
2927    /// # use std::rc::Rc;
2928    /// # use std::borrow::Cow;
2929    /// let cow: Cow<'_, str> = Cow::Borrowed("eggplant");
2930    /// let shared: Rc<str> = Rc::from(cow);
2931    /// assert_eq!("eggplant", &shared[..]);
2932    /// ```
2933    #[inline]
2934    fn from(cow: Cow<'a, B>) -> Rc<B> {
2935        match cow {
2936            Cow::Borrowed(s) => Rc::from(s),
2937            Cow::Owned(s) => Rc::from(s),
2938        }
2939    }
2940}
2941
2942#[stable(feature = "shared_from_str", since = "1.62.0")]
2943impl From<Rc<str>> for Rc<[u8]> {
2944    /// Converts a reference-counted string slice into a byte slice.
2945    ///
2946    /// # Example
2947    ///
2948    /// ```
2949    /// # use std::rc::Rc;
2950    /// let string: Rc<str> = Rc::from("eggplant");
2951    /// let bytes: Rc<[u8]> = Rc::from(string);
2952    /// assert_eq!("eggplant".as_bytes(), bytes.as_ref());
2953    /// ```
2954    #[inline]
2955    fn from(rc: Rc<str>) -> Self {
2956        // SAFETY: `str` has the same layout as `[u8]`.
2957        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const [u8]) }
2958    }
2959}
2960
2961#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
2962impl<T, A: Allocator, const N: usize> TryFrom<Rc<[T], A>> for Rc<[T; N], A> {
2963    type Error = Rc<[T], A>;
2964
2965    fn try_from(boxed_slice: Rc<[T], A>) -> Result<Self, Self::Error> {
2966        if boxed_slice.len() == N {
2967            let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice);
2968            Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) })
2969        } else {
2970            Err(boxed_slice)
2971        }
2972    }
2973}
2974
2975#[cfg(not(no_global_oom_handling))]
2976#[stable(feature = "shared_from_iter", since = "1.37.0")]
2977impl<T> FromIterator<T> for Rc<[T]> {
2978    /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
2979    ///
2980    /// # Performance characteristics
2981    ///
2982    /// ## The general case
2983    ///
2984    /// In the general case, collecting into `Rc<[T]>` is done by first
2985    /// collecting into a `Vec<T>`. That is, when writing the following:
2986    ///
2987    /// ```rust
2988    /// # use std::rc::Rc;
2989    /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
2990    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2991    /// ```
2992    ///
2993    /// this behaves as if we wrote:
2994    ///
2995    /// ```rust
2996    /// # use std::rc::Rc;
2997    /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
2998    ///     .collect::<Vec<_>>() // The first set of allocations happens here.
2999    ///     .into(); // A second allocation for `Rc<[T]>` happens here.
3000    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
3001    /// ```
3002    ///
3003    /// This will allocate as many times as needed for constructing the `Vec<T>`
3004    /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
3005    ///
3006    /// ## Iterators of known length
3007    ///
3008    /// When your `Iterator` implements `TrustedLen` and is of an exact size,
3009    /// a single allocation will be made for the `Rc<[T]>`. For example:
3010    ///
3011    /// ```rust
3012    /// # use std::rc::Rc;
3013    /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
3014    /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
3015    /// ```
3016    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
3017        ToRcSlice::to_rc_slice(iter.into_iter())
3018    }
3019}
3020
3021/// Specialization trait used for collecting into `Rc<[T]>`.
3022#[cfg(not(no_global_oom_handling))]
3023trait ToRcSlice<T>: Iterator<Item = T> + Sized {
3024    fn to_rc_slice(self) -> Rc<[T]>;
3025}
3026
3027#[cfg(not(no_global_oom_handling))]
3028impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
3029    default fn to_rc_slice(self) -> Rc<[T]> {
3030        self.collect::<Vec<T>>().into()
3031    }
3032}
3033
3034#[cfg(not(no_global_oom_handling))]
3035impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
3036    fn to_rc_slice(self) -> Rc<[T]> {
3037        // This is the case for a `TrustedLen` iterator.
3038        let (low, high) = self.size_hint();
3039        if let Some(high) = high {
3040            debug_assert_eq!(
3041                low,
3042                high,
3043                "TrustedLen iterator's size hint is not exact: {:?}",
3044                (low, high)
3045            );
3046
3047            unsafe {
3048                // SAFETY: We need to ensure that the iterator has an exact length and we have.
3049                Rc::from_iter_exact(self, low)
3050            }
3051        } else {
3052            // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
3053            // length exceeding `usize::MAX`.
3054            // The default implementation would collect into a vec which would panic.
3055            // Thus we panic here immediately without invoking `Vec` code.
3056            panic!("capacity overflow");
3057        }
3058    }
3059}
3060
3061/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
3062/// managed allocation.
3063///
3064/// The allocation is accessed by calling [`upgrade`] on the `Weak`
3065/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
3066///
3067/// Since a `Weak` reference does not count towards ownership, it will not
3068/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
3069/// guarantees about the value still being present. Thus it may return [`None`]
3070/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
3071/// itself (the backing store) from being deallocated.
3072///
3073/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
3074/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
3075/// prevent circular references between [`Rc`] pointers, since mutual owning references
3076/// would never allow either [`Rc`] to be dropped. For example, a tree could
3077/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
3078/// pointers from children back to their parents.
3079///
3080/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
3081///
3082/// [`upgrade`]: Weak::upgrade
3083#[stable(feature = "rc_weak", since = "1.4.0")]
3084#[rustc_diagnostic_item = "RcWeak"]
3085pub struct Weak<
3086    T: ?Sized,
3087    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3088> {
3089    // This is a `NonNull` to allow optimizing the size of this type in enums,
3090    // but it is not necessarily a valid pointer.
3091    // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
3092    // to allocate space on the heap. That's not a value a real pointer
3093    // will ever have because RcInner has alignment at least 2.
3094    ptr: NonNull<RcInner<T>>,
3095    alloc: A,
3096}
3097
3098#[stable(feature = "rc_weak", since = "1.4.0")]
3099impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {}
3100#[stable(feature = "rc_weak", since = "1.4.0")]
3101impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {}
3102
3103#[unstable(feature = "coerce_unsized", issue = "18598")]
3104impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
3105
3106#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3107impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
3108
3109// SAFETY: `Weak::clone` doesn't access any `Cell`s which could contain the `Weak` being cloned.
3110#[unstable(feature = "cell_get_cloned", issue = "145329")]
3111unsafe impl<T: ?Sized> CloneFromCell for Weak<T> {}
3112
3113impl<T> Weak<T> {
3114    /// Constructs a new `Weak<T>`, without allocating any memory.
3115    /// Calling [`upgrade`] on the return value always gives [`None`].
3116    ///
3117    /// [`upgrade`]: Weak::upgrade
3118    ///
3119    /// # Examples
3120    ///
3121    /// ```
3122    /// use std::rc::Weak;
3123    ///
3124    /// let empty: Weak<i64> = Weak::new();
3125    /// assert!(empty.upgrade().is_none());
3126    /// ```
3127    #[inline]
3128    #[stable(feature = "downgraded_weak", since = "1.10.0")]
3129    #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
3130    #[must_use]
3131    pub const fn new() -> Weak<T> {
3132        Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc: Global }
3133    }
3134}
3135
3136impl<T, A: Allocator> Weak<T, A> {
3137    /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided
3138    /// allocator.
3139    /// Calling [`upgrade`] on the return value always gives [`None`].
3140    ///
3141    /// [`upgrade`]: Weak::upgrade
3142    ///
3143    /// # Examples
3144    ///
3145    /// ```
3146    /// use std::rc::Weak;
3147    ///
3148    /// let empty: Weak<i64> = Weak::new();
3149    /// assert!(empty.upgrade().is_none());
3150    /// ```
3151    #[inline]
3152    #[unstable(feature = "allocator_api", issue = "32838")]
3153    pub fn new_in(alloc: A) -> Weak<T, A> {
3154        Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc }
3155    }
3156}
3157
3158pub(crate) fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
3159    (ptr.cast::<()>()).addr() == usize::MAX
3160}
3161
3162/// Helper type to allow accessing the reference counts without
3163/// making any assertions about the data field.
3164struct WeakInner<'a> {
3165    weak: &'a Cell<usize>,
3166    strong: &'a Cell<usize>,
3167}
3168
3169impl<T: ?Sized> Weak<T> {
3170    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3171    ///
3172    /// This can be used to safely get a strong reference (by calling [`upgrade`]
3173    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3174    ///
3175    /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3176    /// as these don't own anything; the method still works on them).
3177    ///
3178    /// # Safety
3179    ///
3180    /// The pointer must have originated from the [`into_raw`] and must still own its potential
3181    /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator.
3182    ///
3183    /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3184    /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3185    /// count is not modified by this operation) and therefore it must be paired with a previous
3186    /// call to [`into_raw`].
3187    ///
3188    /// # Examples
3189    ///
3190    /// ```
3191    /// use std::rc::{Rc, Weak};
3192    ///
3193    /// let strong = Rc::new("hello".to_owned());
3194    ///
3195    /// let raw_1 = Rc::downgrade(&strong).into_raw();
3196    /// let raw_2 = Rc::downgrade(&strong).into_raw();
3197    ///
3198    /// assert_eq!(2, Rc::weak_count(&strong));
3199    ///
3200    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3201    /// assert_eq!(1, Rc::weak_count(&strong));
3202    ///
3203    /// drop(strong);
3204    ///
3205    /// // Decrement the last weak count.
3206    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3207    /// ```
3208    ///
3209    /// [`into_raw`]: Weak::into_raw
3210    /// [`upgrade`]: Weak::upgrade
3211    /// [`new`]: Weak::new
3212    #[inline]
3213    #[stable(feature = "weak_into_raw", since = "1.45.0")]
3214    pub unsafe fn from_raw(ptr: *const T) -> Self {
3215        unsafe { Self::from_raw_in(ptr, Global) }
3216    }
3217
3218    /// Consumes the `Weak<T>` and turns it into a raw pointer.
3219    ///
3220    /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3221    /// one weak reference (the weak count is not modified by this operation). It can be turned
3222    /// back into the `Weak<T>` with [`from_raw`].
3223    ///
3224    /// The same restrictions of accessing the target of the pointer as with
3225    /// [`as_ptr`] apply.
3226    ///
3227    /// # Examples
3228    ///
3229    /// ```
3230    /// use std::rc::{Rc, Weak};
3231    ///
3232    /// let strong = Rc::new("hello".to_owned());
3233    /// let weak = Rc::downgrade(&strong);
3234    /// let raw = weak.into_raw();
3235    ///
3236    /// assert_eq!(1, Rc::weak_count(&strong));
3237    /// assert_eq!("hello", unsafe { &*raw });
3238    ///
3239    /// drop(unsafe { Weak::from_raw(raw) });
3240    /// assert_eq!(0, Rc::weak_count(&strong));
3241    /// ```
3242    ///
3243    /// [`from_raw`]: Weak::from_raw
3244    /// [`as_ptr`]: Weak::as_ptr
3245    #[must_use = "losing the pointer will leak memory"]
3246    #[stable(feature = "weak_into_raw", since = "1.45.0")]
3247    pub fn into_raw(self) -> *const T {
3248        mem::ManuallyDrop::new(self).as_ptr()
3249    }
3250}
3251
3252impl<T: ?Sized, A: Allocator> Weak<T, A> {
3253    /// Returns a reference to the underlying allocator.
3254    #[inline]
3255    #[unstable(feature = "allocator_api", issue = "32838")]
3256    pub fn allocator(&self) -> &A {
3257        &self.alloc
3258    }
3259
3260    /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
3261    ///
3262    /// The pointer is valid only if there are some strong references. The pointer may be dangling,
3263    /// unaligned or even [`null`] otherwise.
3264    ///
3265    /// # Examples
3266    ///
3267    /// ```
3268    /// use std::rc::Rc;
3269    /// use std::ptr;
3270    ///
3271    /// let strong = Rc::new("hello".to_owned());
3272    /// let weak = Rc::downgrade(&strong);
3273    /// // Both point to the same object
3274    /// assert!(ptr::eq(&*strong, weak.as_ptr()));
3275    /// // The strong here keeps it alive, so we can still access the object.
3276    /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
3277    ///
3278    /// drop(strong);
3279    /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
3280    /// // undefined behavior.
3281    /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
3282    /// ```
3283    ///
3284    /// [`null`]: ptr::null
3285    #[must_use]
3286    #[stable(feature = "rc_as_ptr", since = "1.45.0")]
3287    pub fn as_ptr(&self) -> *const T {
3288        let ptr: *mut RcInner<T> = NonNull::as_ptr(self.ptr);
3289
3290        if is_dangling(ptr) {
3291            // If the pointer is dangling, we return the sentinel directly. This cannot be
3292            // a valid payload address, as the payload is at least as aligned as RcInner (usize).
3293            ptr as *const T
3294        } else {
3295            // SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
3296            // The payload may be dropped at this point, and we have to maintain provenance,
3297            // so use raw pointer manipulation.
3298            unsafe { &raw mut (*ptr).value }
3299        }
3300    }
3301
3302    /// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
3303    ///
3304    /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3305    /// one weak reference (the weak count is not modified by this operation). It can be turned
3306    /// back into the `Weak<T>` with [`from_raw_in`].
3307    ///
3308    /// The same restrictions of accessing the target of the pointer as with
3309    /// [`as_ptr`] apply.
3310    ///
3311    /// # Examples
3312    ///
3313    /// ```
3314    /// #![feature(allocator_api)]
3315    /// use std::rc::{Rc, Weak};
3316    /// use std::alloc::System;
3317    ///
3318    /// let strong = Rc::new_in("hello".to_owned(), System);
3319    /// let weak = Rc::downgrade(&strong);
3320    /// let (raw, alloc) = weak.into_raw_with_allocator();
3321    ///
3322    /// assert_eq!(1, Rc::weak_count(&strong));
3323    /// assert_eq!("hello", unsafe { &*raw });
3324    ///
3325    /// drop(unsafe { Weak::from_raw_in(raw, alloc) });
3326    /// assert_eq!(0, Rc::weak_count(&strong));
3327    /// ```
3328    ///
3329    /// [`from_raw_in`]: Weak::from_raw_in
3330    /// [`as_ptr`]: Weak::as_ptr
3331    #[must_use = "losing the pointer will leak memory"]
3332    #[inline]
3333    #[unstable(feature = "allocator_api", issue = "32838")]
3334    pub fn into_raw_with_allocator(self) -> (*const T, A) {
3335        let this = mem::ManuallyDrop::new(self);
3336        let result = this.as_ptr();
3337        // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
3338        let alloc = unsafe { ptr::read(&this.alloc) };
3339        (result, alloc)
3340    }
3341
3342    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3343    ///
3344    /// This can be used to safely get a strong reference (by calling [`upgrade`]
3345    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3346    ///
3347    /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3348    /// as these don't own anything; the method still works on them).
3349    ///
3350    /// # Safety
3351    ///
3352    /// The pointer must have originated from the [`into_raw`] and must still own its potential
3353    /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`.
3354    ///
3355    /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3356    /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3357    /// count is not modified by this operation) and therefore it must be paired with a previous
3358    /// call to [`into_raw`].
3359    ///
3360    /// # Examples
3361    ///
3362    /// ```
3363    /// use std::rc::{Rc, Weak};
3364    ///
3365    /// let strong = Rc::new("hello".to_owned());
3366    ///
3367    /// let raw_1 = Rc::downgrade(&strong).into_raw();
3368    /// let raw_2 = Rc::downgrade(&strong).into_raw();
3369    ///
3370    /// assert_eq!(2, Rc::weak_count(&strong));
3371    ///
3372    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3373    /// assert_eq!(1, Rc::weak_count(&strong));
3374    ///
3375    /// drop(strong);
3376    ///
3377    /// // Decrement the last weak count.
3378    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3379    /// ```
3380    ///
3381    /// [`into_raw`]: Weak::into_raw
3382    /// [`upgrade`]: Weak::upgrade
3383    /// [`new`]: Weak::new
3384    #[inline]
3385    #[unstable(feature = "allocator_api", issue = "32838")]
3386    pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
3387        // See Weak::as_ptr for context on how the input pointer is derived.
3388
3389        let ptr = if is_dangling(ptr) {
3390            // This is a dangling Weak.
3391            ptr as *mut RcInner<T>
3392        } else {
3393            // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
3394            // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
3395            let offset = unsafe { data_offset(ptr) };
3396            // Thus, we reverse the offset to get the whole RcInner.
3397            // SAFETY: the pointer originated from a Weak, so this offset is safe.
3398            unsafe { ptr.byte_sub(offset) as *mut RcInner<T> }
3399        };
3400
3401        // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
3402        Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
3403    }
3404
3405    /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
3406    /// dropping of the inner value if successful.
3407    ///
3408    /// Returns [`None`] if the inner value has since been dropped.
3409    ///
3410    /// # Examples
3411    ///
3412    /// ```
3413    /// use std::rc::Rc;
3414    ///
3415    /// let five = Rc::new(5);
3416    ///
3417    /// let weak_five = Rc::downgrade(&five);
3418    ///
3419    /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
3420    /// assert!(strong_five.is_some());
3421    ///
3422    /// // Destroy all strong pointers.
3423    /// drop(strong_five);
3424    /// drop(five);
3425    ///
3426    /// assert!(weak_five.upgrade().is_none());
3427    /// ```
3428    #[must_use = "this returns a new `Rc`, \
3429                  without modifying the original weak pointer"]
3430    #[stable(feature = "rc_weak", since = "1.4.0")]
3431    pub fn upgrade(&self) -> Option<Rc<T, A>>
3432    where
3433        A: Clone,
3434    {
3435        let inner = self.inner()?;
3436
3437        if inner.strong() == 0 {
3438            None
3439        } else {
3440            unsafe {
3441                inner.inc_strong();
3442                Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
3443            }
3444        }
3445    }
3446
3447    /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
3448    ///
3449    /// If `self` was created using [`Weak::new`], this will return 0.
3450    #[must_use]
3451    #[stable(feature = "weak_counts", since = "1.41.0")]
3452    pub fn strong_count(&self) -> usize {
3453        if let Some(inner) = self.inner() { inner.strong() } else { 0 }
3454    }
3455
3456    /// Gets the number of `Weak` pointers pointing to this allocation.
3457    ///
3458    /// If no strong pointers remain, this will return zero.
3459    #[must_use]
3460    #[stable(feature = "weak_counts", since = "1.41.0")]
3461    pub fn weak_count(&self) -> usize {
3462        if let Some(inner) = self.inner() {
3463            if inner.strong() > 0 {
3464                inner.weak() - 1 // subtract the implicit weak ptr
3465            } else {
3466                0
3467            }
3468        } else {
3469            0
3470        }
3471    }
3472
3473    /// Returns `None` when the pointer is dangling and there is no allocated `RcInner`,
3474    /// (i.e., when this `Weak` was created by `Weak::new`).
3475    #[inline]
3476    fn inner(&self) -> Option<WeakInner<'_>> {
3477        if is_dangling(self.ptr.as_ptr()) {
3478            None
3479        } else {
3480            // We are careful to *not* create a reference covering the "data" field, as
3481            // the field may be mutated concurrently (for example, if the last `Rc`
3482            // is dropped, the data field will be dropped in-place).
3483            Some(unsafe {
3484                let ptr = self.ptr.as_ptr();
3485                WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
3486            })
3487        }
3488    }
3489
3490    /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
3491    /// both don't point to any allocation (because they were created with `Weak::new()`). However,
3492    /// this function ignores the metadata of  `dyn Trait` pointers.
3493    ///
3494    /// # Notes
3495    ///
3496    /// Since this compares pointers it means that `Weak::new()` will equal each
3497    /// other, even though they don't point to any allocation.
3498    ///
3499    /// # Examples
3500    ///
3501    /// ```
3502    /// use std::rc::Rc;
3503    ///
3504    /// let first_rc = Rc::new(5);
3505    /// let first = Rc::downgrade(&first_rc);
3506    /// let second = Rc::downgrade(&first_rc);
3507    ///
3508    /// assert!(first.ptr_eq(&second));
3509    ///
3510    /// let third_rc = Rc::new(5);
3511    /// let third = Rc::downgrade(&third_rc);
3512    ///
3513    /// assert!(!first.ptr_eq(&third));
3514    /// ```
3515    ///
3516    /// Comparing `Weak::new`.
3517    ///
3518    /// ```
3519    /// use std::rc::{Rc, Weak};
3520    ///
3521    /// let first = Weak::new();
3522    /// let second = Weak::new();
3523    /// assert!(first.ptr_eq(&second));
3524    ///
3525    /// let third_rc = Rc::new(());
3526    /// let third = Rc::downgrade(&third_rc);
3527    /// assert!(!first.ptr_eq(&third));
3528    /// ```
3529    #[inline]
3530    #[must_use]
3531    #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
3532    pub fn ptr_eq(&self, other: &Self) -> bool {
3533        ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
3534    }
3535}
3536
3537#[stable(feature = "rc_weak", since = "1.4.0")]
3538unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
3539    /// Drops the `Weak` pointer.
3540    ///
3541    /// # Examples
3542    ///
3543    /// ```
3544    /// use std::rc::{Rc, Weak};
3545    ///
3546    /// struct Foo;
3547    ///
3548    /// impl Drop for Foo {
3549    ///     fn drop(&mut self) {
3550    ///         println!("dropped!");
3551    ///     }
3552    /// }
3553    ///
3554    /// let foo = Rc::new(Foo);
3555    /// let weak_foo = Rc::downgrade(&foo);
3556    /// let other_weak_foo = Weak::clone(&weak_foo);
3557    ///
3558    /// drop(weak_foo);   // Doesn't print anything
3559    /// drop(foo);        // Prints "dropped!"
3560    ///
3561    /// assert!(other_weak_foo.upgrade().is_none());
3562    /// ```
3563    fn drop(&mut self) {
3564        let inner = if let Some(inner) = self.inner() { inner } else { return };
3565
3566        inner.dec_weak();
3567        // the weak count starts at 1, and will only go to zero if all
3568        // the strong pointers have disappeared.
3569        if inner.weak() == 0 {
3570            unsafe {
3571                self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
3572            }
3573        }
3574    }
3575}
3576
3577#[stable(feature = "rc_weak", since = "1.4.0")]
3578impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
3579    /// Makes a clone of the `Weak` pointer that points to the same allocation.
3580    ///
3581    /// # Examples
3582    ///
3583    /// ```
3584    /// use std::rc::{Rc, Weak};
3585    ///
3586    /// let weak_five = Rc::downgrade(&Rc::new(5));
3587    ///
3588    /// let _ = Weak::clone(&weak_five);
3589    /// ```
3590    #[inline]
3591    fn clone(&self) -> Weak<T, A> {
3592        if let Some(inner) = self.inner() {
3593            inner.inc_weak()
3594        }
3595        Weak { ptr: self.ptr, alloc: self.alloc.clone() }
3596    }
3597}
3598
3599#[unstable(feature = "ergonomic_clones", issue = "132290")]
3600impl<T: ?Sized, A: Allocator + Clone> UseCloned for Weak<T, A> {}
3601
3602#[stable(feature = "rc_weak", since = "1.4.0")]
3603impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
3604    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3605        write!(f, "(Weak)")
3606    }
3607}
3608
3609#[stable(feature = "downgraded_weak", since = "1.10.0")]
3610impl<T> Default for Weak<T> {
3611    /// Constructs a new `Weak<T>`, without allocating any memory.
3612    /// Calling [`upgrade`] on the return value always gives [`None`].
3613    ///
3614    /// [`upgrade`]: Weak::upgrade
3615    ///
3616    /// # Examples
3617    ///
3618    /// ```
3619    /// use std::rc::Weak;
3620    ///
3621    /// let empty: Weak<i64> = Default::default();
3622    /// assert!(empty.upgrade().is_none());
3623    /// ```
3624    fn default() -> Weak<T> {
3625        Weak::new()
3626    }
3627}
3628
3629// NOTE: If you mem::forget Rcs (or Weaks), drop is skipped and the ref-count
3630// is not decremented, meaning the ref-count can overflow, and then you can
3631// free the allocation while outstanding Rcs (or Weaks) exist, which would be
3632// unsound. We abort because this is such a degenerate scenario that we don't
3633// care about what happens -- no real program should ever experience this.
3634//
3635// This should have negligible overhead since you don't actually need to
3636// clone these much in Rust thanks to ownership and move-semantics.
3637
3638#[doc(hidden)]
3639trait RcInnerPtr {
3640    fn weak_ref(&self) -> &Cell<usize>;
3641    fn strong_ref(&self) -> &Cell<usize>;
3642
3643    #[inline]
3644    fn strong(&self) -> usize {
3645        self.strong_ref().get()
3646    }
3647
3648    #[inline]
3649    fn inc_strong(&self) {
3650        let strong = self.strong();
3651
3652        // We insert an `assume` here to hint LLVM at an otherwise
3653        // missed optimization.
3654        // SAFETY: The reference count will never be zero when this is
3655        // called.
3656        unsafe {
3657            hint::assert_unchecked(strong != 0);
3658        }
3659
3660        let strong = strong.wrapping_add(1);
3661        self.strong_ref().set(strong);
3662
3663        // We want to abort on overflow instead of dropping the value.
3664        // Checking for overflow after the store instead of before
3665        // allows for slightly better code generation.
3666        if core::intrinsics::unlikely(strong == 0) {
3667            abort();
3668        }
3669    }
3670
3671    #[inline]
3672    fn dec_strong(&self) {
3673        self.strong_ref().set(self.strong() - 1);
3674    }
3675
3676    #[inline]
3677    fn weak(&self) -> usize {
3678        self.weak_ref().get()
3679    }
3680
3681    #[inline]
3682    fn inc_weak(&self) {
3683        let weak = self.weak();
3684
3685        // We insert an `assume` here to hint LLVM at an otherwise
3686        // missed optimization.
3687        // SAFETY: The reference count will never be zero when this is
3688        // called.
3689        unsafe {
3690            hint::assert_unchecked(weak != 0);
3691        }
3692
3693        let weak = weak.wrapping_add(1);
3694        self.weak_ref().set(weak);
3695
3696        // We want to abort on overflow instead of dropping the value.
3697        // Checking for overflow after the store instead of before
3698        // allows for slightly better code generation.
3699        if core::intrinsics::unlikely(weak == 0) {
3700            abort();
3701        }
3702    }
3703
3704    #[inline]
3705    fn dec_weak(&self) {
3706        self.weak_ref().set(self.weak() - 1);
3707    }
3708}
3709
3710impl<T: ?Sized> RcInnerPtr for RcInner<T> {
3711    #[inline(always)]
3712    fn weak_ref(&self) -> &Cell<usize> {
3713        &self.weak
3714    }
3715
3716    #[inline(always)]
3717    fn strong_ref(&self) -> &Cell<usize> {
3718        &self.strong
3719    }
3720}
3721
3722impl<'a> RcInnerPtr for WeakInner<'a> {
3723    #[inline(always)]
3724    fn weak_ref(&self) -> &Cell<usize> {
3725        self.weak
3726    }
3727
3728    #[inline(always)]
3729    fn strong_ref(&self) -> &Cell<usize> {
3730        self.strong
3731    }
3732}
3733
3734#[stable(feature = "rust1", since = "1.0.0")]
3735impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
3736    fn borrow(&self) -> &T {
3737        &**self
3738    }
3739}
3740
3741#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
3742impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
3743    fn as_ref(&self) -> &T {
3744        &**self
3745    }
3746}
3747
3748#[stable(feature = "pin", since = "1.33.0")]
3749impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
3750
3751/// Gets the offset within an `RcInner` for the payload behind a pointer.
3752///
3753/// # Safety
3754///
3755/// The pointer must point to (and have valid metadata for) a previously
3756/// valid instance of T, but the T is allowed to be dropped.
3757unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> usize {
3758    // Align the unsized value to the end of the RcInner.
3759    // Because RcInner is repr(C), it will always be the last field in memory.
3760    // SAFETY: since the only unsized types possible are slices, trait objects,
3761    // and extern types, the input safety requirement is currently enough to
3762    // satisfy the requirements of align_of_val_raw; this is an implementation
3763    // detail of the language that must not be relied upon outside of std.
3764    unsafe { data_offset_align(align_of_val_raw(ptr)) }
3765}
3766
3767#[inline]
3768fn data_offset_align(align: usize) -> usize {
3769    let layout = Layout::new::<RcInner<()>>();
3770    layout.size() + layout.padding_needed_for(align)
3771}
3772
3773/// A uniquely owned [`Rc`].
3774///
3775/// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong
3776/// reference. Multiple weak pointers can be created, but attempts to upgrade those to strong
3777/// references will fail unless the `UniqueRc` they point to has been converted into a regular `Rc`.
3778///
3779/// Because they are uniquely owned, the contents of a `UniqueRc` can be freely mutated. A common
3780/// use case is to have an object be mutable during its initialization phase but then have it become
3781/// immutable and converted to a normal `Rc`.
3782///
3783/// This can be used as a flexible way to create cyclic data structures, as in the example below.
3784///
3785/// ```
3786/// #![feature(unique_rc_arc)]
3787/// use std::rc::{Rc, Weak, UniqueRc};
3788///
3789/// struct Gadget {
3790///     #[allow(dead_code)]
3791///     me: Weak<Gadget>,
3792/// }
3793///
3794/// fn create_gadget() -> Option<Rc<Gadget>> {
3795///     let mut rc = UniqueRc::new(Gadget {
3796///         me: Weak::new(),
3797///     });
3798///     rc.me = UniqueRc::downgrade(&rc);
3799///     Some(UniqueRc::into_rc(rc))
3800/// }
3801///
3802/// create_gadget().unwrap();
3803/// ```
3804///
3805/// An advantage of using `UniqueRc` over [`Rc::new_cyclic`] to build cyclic data structures is that
3806/// [`Rc::new_cyclic`]'s `data_fn` parameter cannot be async or return a [`Result`]. As shown in the
3807/// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data,
3808/// including fallible or async constructors.
3809#[unstable(feature = "unique_rc_arc", issue = "112566")]
3810pub struct UniqueRc<
3811    T: ?Sized,
3812    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3813> {
3814    ptr: NonNull<RcInner<T>>,
3815    // Define the ownership of `RcInner<T>` for drop-check
3816    _marker: PhantomData<RcInner<T>>,
3817    // Invariance is necessary for soundness: once other `Weak`
3818    // references exist, we already have a form of shared mutability!
3819    _marker2: PhantomData<*mut T>,
3820    alloc: A,
3821}
3822
3823// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3824// but having an explicit negative impl is nice for documentation purposes
3825// and results in nicer error messages.
3826#[unstable(feature = "unique_rc_arc", issue = "112566")]
3827impl<T: ?Sized, A: Allocator> !Send for UniqueRc<T, A> {}
3828
3829// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3830// but having an explicit negative impl is nice for documentation purposes
3831// and results in nicer error messages.
3832#[unstable(feature = "unique_rc_arc", issue = "112566")]
3833impl<T: ?Sized, A: Allocator> !Sync for UniqueRc<T, A> {}
3834
3835#[unstable(feature = "unique_rc_arc", issue = "112566")]
3836impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<UniqueRc<U, A>>
3837    for UniqueRc<T, A>
3838{
3839}
3840
3841//#[unstable(feature = "unique_rc_arc", issue = "112566")]
3842#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3843impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<UniqueRc<U>> for UniqueRc<T> {}
3844
3845#[unstable(feature = "unique_rc_arc", issue = "112566")]
3846impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for UniqueRc<T, A> {
3847    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3848        fmt::Display::fmt(&**self, f)
3849    }
3850}
3851
3852#[unstable(feature = "unique_rc_arc", issue = "112566")]
3853impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for UniqueRc<T, A> {
3854    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3855        fmt::Debug::fmt(&**self, f)
3856    }
3857}
3858
3859#[unstable(feature = "unique_rc_arc", issue = "112566")]
3860impl<T: ?Sized, A: Allocator> fmt::Pointer for UniqueRc<T, A> {
3861    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3862        fmt::Pointer::fmt(&(&raw const **self), f)
3863    }
3864}
3865
3866#[unstable(feature = "unique_rc_arc", issue = "112566")]
3867impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for UniqueRc<T, A> {
3868    fn borrow(&self) -> &T {
3869        &**self
3870    }
3871}
3872
3873#[unstable(feature = "unique_rc_arc", issue = "112566")]
3874impl<T: ?Sized, A: Allocator> borrow::BorrowMut<T> for UniqueRc<T, A> {
3875    fn borrow_mut(&mut self) -> &mut T {
3876        &mut **self
3877    }
3878}
3879
3880#[unstable(feature = "unique_rc_arc", issue = "112566")]
3881impl<T: ?Sized, A: Allocator> AsRef<T> for UniqueRc<T, A> {
3882    fn as_ref(&self) -> &T {
3883        &**self
3884    }
3885}
3886
3887#[unstable(feature = "unique_rc_arc", issue = "112566")]
3888impl<T: ?Sized, A: Allocator> AsMut<T> for UniqueRc<T, A> {
3889    fn as_mut(&mut self) -> &mut T {
3890        &mut **self
3891    }
3892}
3893
3894#[unstable(feature = "unique_rc_arc", issue = "112566")]
3895impl<T: ?Sized, A: Allocator> Unpin for UniqueRc<T, A> {}
3896
3897#[unstable(feature = "unique_rc_arc", issue = "112566")]
3898impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for UniqueRc<T, A> {
3899    /// Equality for two `UniqueRc`s.
3900    ///
3901    /// Two `UniqueRc`s are equal if their inner values are equal.
3902    ///
3903    /// # Examples
3904    ///
3905    /// ```
3906    /// #![feature(unique_rc_arc)]
3907    /// use std::rc::UniqueRc;
3908    ///
3909    /// let five = UniqueRc::new(5);
3910    ///
3911    /// assert!(five == UniqueRc::new(5));
3912    /// ```
3913    #[inline]
3914    fn eq(&self, other: &Self) -> bool {
3915        PartialEq::eq(&**self, &**other)
3916    }
3917
3918    /// Inequality for two `UniqueRc`s.
3919    ///
3920    /// Two `UniqueRc`s are not equal if their inner values are not equal.
3921    ///
3922    /// # Examples
3923    ///
3924    /// ```
3925    /// #![feature(unique_rc_arc)]
3926    /// use std::rc::UniqueRc;
3927    ///
3928    /// let five = UniqueRc::new(5);
3929    ///
3930    /// assert!(five != UniqueRc::new(6));
3931    /// ```
3932    #[inline]
3933    fn ne(&self, other: &Self) -> bool {
3934        PartialEq::ne(&**self, &**other)
3935    }
3936}
3937
3938#[unstable(feature = "unique_rc_arc", issue = "112566")]
3939impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
3940    /// Partial comparison for two `UniqueRc`s.
3941    ///
3942    /// The two are compared by calling `partial_cmp()` on their inner values.
3943    ///
3944    /// # Examples
3945    ///
3946    /// ```
3947    /// #![feature(unique_rc_arc)]
3948    /// use std::rc::UniqueRc;
3949    /// use std::cmp::Ordering;
3950    ///
3951    /// let five = UniqueRc::new(5);
3952    ///
3953    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&UniqueRc::new(6)));
3954    /// ```
3955    #[inline(always)]
3956    fn partial_cmp(&self, other: &UniqueRc<T, A>) -> Option<Ordering> {
3957        (**self).partial_cmp(&**other)
3958    }
3959
3960    /// Less-than comparison for two `UniqueRc`s.
3961    ///
3962    /// The two are compared by calling `<` on their inner values.
3963    ///
3964    /// # Examples
3965    ///
3966    /// ```
3967    /// #![feature(unique_rc_arc)]
3968    /// use std::rc::UniqueRc;
3969    ///
3970    /// let five = UniqueRc::new(5);
3971    ///
3972    /// assert!(five < UniqueRc::new(6));
3973    /// ```
3974    #[inline(always)]
3975    fn lt(&self, other: &UniqueRc<T, A>) -> bool {
3976        **self < **other
3977    }
3978
3979    /// 'Less than or equal to' comparison for two `UniqueRc`s.
3980    ///
3981    /// The two are compared by calling `<=` on their inner values.
3982    ///
3983    /// # Examples
3984    ///
3985    /// ```
3986    /// #![feature(unique_rc_arc)]
3987    /// use std::rc::UniqueRc;
3988    ///
3989    /// let five = UniqueRc::new(5);
3990    ///
3991    /// assert!(five <= UniqueRc::new(5));
3992    /// ```
3993    #[inline(always)]
3994    fn le(&self, other: &UniqueRc<T, A>) -> bool {
3995        **self <= **other
3996    }
3997
3998    /// Greater-than comparison for two `UniqueRc`s.
3999    ///
4000    /// The two are compared by calling `>` on their inner values.
4001    ///
4002    /// # Examples
4003    ///
4004    /// ```
4005    /// #![feature(unique_rc_arc)]
4006    /// use std::rc::UniqueRc;
4007    ///
4008    /// let five = UniqueRc::new(5);
4009    ///
4010    /// assert!(five > UniqueRc::new(4));
4011    /// ```
4012    #[inline(always)]
4013    fn gt(&self, other: &UniqueRc<T, A>) -> bool {
4014        **self > **other
4015    }
4016
4017    /// 'Greater than or equal to' comparison for two `UniqueRc`s.
4018    ///
4019    /// The two are compared by calling `>=` on their inner values.
4020    ///
4021    /// # Examples
4022    ///
4023    /// ```
4024    /// #![feature(unique_rc_arc)]
4025    /// use std::rc::UniqueRc;
4026    ///
4027    /// let five = UniqueRc::new(5);
4028    ///
4029    /// assert!(five >= UniqueRc::new(5));
4030    /// ```
4031    #[inline(always)]
4032    fn ge(&self, other: &UniqueRc<T, A>) -> bool {
4033        **self >= **other
4034    }
4035}
4036
4037#[unstable(feature = "unique_rc_arc", issue = "112566")]
4038impl<T: ?Sized + Ord, A: Allocator> Ord for UniqueRc<T, A> {
4039    /// Comparison for two `UniqueRc`s.
4040    ///
4041    /// The two are compared by calling `cmp()` on their inner values.
4042    ///
4043    /// # Examples
4044    ///
4045    /// ```
4046    /// #![feature(unique_rc_arc)]
4047    /// use std::rc::UniqueRc;
4048    /// use std::cmp::Ordering;
4049    ///
4050    /// let five = UniqueRc::new(5);
4051    ///
4052    /// assert_eq!(Ordering::Less, five.cmp(&UniqueRc::new(6)));
4053    /// ```
4054    #[inline]
4055    fn cmp(&self, other: &UniqueRc<T, A>) -> Ordering {
4056        (**self).cmp(&**other)
4057    }
4058}
4059
4060#[unstable(feature = "unique_rc_arc", issue = "112566")]
4061impl<T: ?Sized + Eq, A: Allocator> Eq for UniqueRc<T, A> {}
4062
4063#[unstable(feature = "unique_rc_arc", issue = "112566")]
4064impl<T: ?Sized + Hash, A: Allocator> Hash for UniqueRc<T, A> {
4065    fn hash<H: Hasher>(&self, state: &mut H) {
4066        (**self).hash(state);
4067    }
4068}
4069
4070// Depends on A = Global
4071impl<T> UniqueRc<T> {
4072    /// Creates a new `UniqueRc`.
4073    ///
4074    /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
4075    /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
4076    /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
4077    /// point to the new [`Rc`].
4078    #[cfg(not(no_global_oom_handling))]
4079    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4080    pub fn new(value: T) -> Self {
4081        Self::new_in(value, Global)
4082    }
4083
4084    /// Maps the value in a `UniqueRc`, reusing the allocation if possible.
4085    ///
4086    /// `f` is called on a reference to the value in the `UniqueRc`, and the result is returned,
4087    /// also in a `UniqueRc`.
4088    ///
4089    /// Note: this is an associated function, which means that you have
4090    /// to call it as `UniqueRc::map(u, f)` instead of `u.map(f)`. This
4091    /// is so that there is no conflict with a method on the inner type.
4092    ///
4093    /// # Examples
4094    ///
4095    /// ```
4096    /// #![feature(smart_pointer_try_map)]
4097    /// #![feature(unique_rc_arc)]
4098    ///
4099    /// use std::rc::UniqueRc;
4100    ///
4101    /// let r = UniqueRc::new(7);
4102    /// let new = UniqueRc::map(r, |i| i + 7);
4103    /// assert_eq!(*new, 14);
4104    /// ```
4105    #[cfg(not(no_global_oom_handling))]
4106    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
4107    pub fn map<U>(this: Self, f: impl FnOnce(T) -> U) -> UniqueRc<U> {
4108        if size_of::<T>() == size_of::<U>()
4109            && align_of::<T>() == align_of::<U>()
4110            && UniqueRc::weak_count(&this) == 0
4111        {
4112            unsafe {
4113                let ptr = UniqueRc::into_raw(this);
4114                let value = ptr.read();
4115                let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
4116
4117                allocation.write(f(value));
4118                allocation.assume_init()
4119            }
4120        } else {
4121            UniqueRc::new(f(UniqueRc::unwrap(this)))
4122        }
4123    }
4124
4125    /// Attempts to map the value in a `UniqueRc`, reusing the allocation if possible.
4126    ///
4127    /// `f` is called on a reference to the value in the `UniqueRc`, and if the operation succeeds,
4128    /// the result is returned, also in a `UniqueRc`.
4129    ///
4130    /// Note: this is an associated function, which means that you have
4131    /// to call it as `UniqueRc::try_map(u, f)` instead of `u.try_map(f)`. This
4132    /// is so that there is no conflict with a method on the inner type.
4133    ///
4134    /// # Examples
4135    ///
4136    /// ```
4137    /// #![feature(smart_pointer_try_map)]
4138    /// #![feature(unique_rc_arc)]
4139    ///
4140    /// use std::rc::UniqueRc;
4141    ///
4142    /// let b = UniqueRc::new(7);
4143    /// let new = UniqueRc::try_map(b, u32::try_from).unwrap();
4144    /// assert_eq!(*new, 7);
4145    /// ```
4146    #[cfg(not(no_global_oom_handling))]
4147    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
4148    pub fn try_map<R>(
4149        this: Self,
4150        f: impl FnOnce(T) -> R,
4151    ) -> <R::Residual as Residual<UniqueRc<R::Output>>>::TryType
4152    where
4153        R: Try,
4154        R::Residual: Residual<UniqueRc<R::Output>>,
4155    {
4156        if size_of::<T>() == size_of::<R::Output>()
4157            && align_of::<T>() == align_of::<R::Output>()
4158            && UniqueRc::weak_count(&this) == 0
4159        {
4160            unsafe {
4161                let ptr = UniqueRc::into_raw(this);
4162                let value = ptr.read();
4163                let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
4164
4165                allocation.write(f(value)?);
4166                try { allocation.assume_init() }
4167            }
4168        } else {
4169            try { UniqueRc::new(f(UniqueRc::unwrap(this))?) }
4170        }
4171    }
4172
4173    #[cfg(not(no_global_oom_handling))]
4174    fn unwrap(this: Self) -> T {
4175        let this = ManuallyDrop::new(this);
4176        let val: T = unsafe { ptr::read(&**this) };
4177
4178        let _weak = Weak { ptr: this.ptr, alloc: Global };
4179
4180        val
4181    }
4182}
4183
4184impl<T: ?Sized> UniqueRc<T> {
4185    #[cfg(not(no_global_oom_handling))]
4186    unsafe fn from_raw(ptr: *const T) -> Self {
4187        let offset = unsafe { data_offset(ptr) };
4188
4189        // Reverse the offset to find the original RcInner.
4190        let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
4191
4192        Self {
4193            ptr: unsafe { NonNull::new_unchecked(rc_ptr) },
4194            _marker: PhantomData,
4195            _marker2: PhantomData,
4196            alloc: Global,
4197        }
4198    }
4199
4200    #[cfg(not(no_global_oom_handling))]
4201    fn into_raw(this: Self) -> *const T {
4202        let this = ManuallyDrop::new(this);
4203        Self::as_ptr(&*this)
4204    }
4205}
4206
4207impl<T, A: Allocator> UniqueRc<T, A> {
4208    /// Creates a new `UniqueRc` in the provided allocator.
4209    ///
4210    /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
4211    /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
4212    /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
4213    /// point to the new [`Rc`].
4214    #[cfg(not(no_global_oom_handling))]
4215    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4216    pub fn new_in(value: T, alloc: A) -> Self {
4217        let (ptr, alloc) = Box::into_unique(Box::new_in(
4218            RcInner {
4219                strong: Cell::new(0),
4220                // keep one weak reference so if all the weak pointers that are created are dropped
4221                // the UniqueRc still stays valid.
4222                weak: Cell::new(1),
4223                value,
4224            },
4225            alloc,
4226        ));
4227        Self { ptr: ptr.into(), _marker: PhantomData, _marker2: PhantomData, alloc }
4228    }
4229}
4230
4231impl<T: ?Sized, A: Allocator> UniqueRc<T, A> {
4232    /// Converts the `UniqueRc` into a regular [`Rc`].
4233    ///
4234    /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that
4235    /// is passed to `into_rc`.
4236    ///
4237    /// Any weak references created before this method is called can now be upgraded to strong
4238    /// references.
4239    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4240    pub fn into_rc(this: Self) -> Rc<T, A> {
4241        let mut this = ManuallyDrop::new(this);
4242
4243        // Move the allocator out.
4244        // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in
4245        // a `ManuallyDrop`.
4246        let alloc: A = unsafe { ptr::read(&this.alloc) };
4247
4248        // SAFETY: This pointer was allocated at creation time so we know it is valid.
4249        unsafe {
4250            // Convert our weak reference into a strong reference
4251            this.ptr.as_mut().strong.set(1);
4252            Rc::from_inner_in(this.ptr, alloc)
4253        }
4254    }
4255
4256    #[cfg(not(no_global_oom_handling))]
4257    fn weak_count(this: &Self) -> usize {
4258        this.inner().weak() - 1
4259    }
4260
4261    #[cfg(not(no_global_oom_handling))]
4262    fn inner(&self) -> &RcInner<T> {
4263        // SAFETY: while this UniqueRc is alive we're guaranteed that the inner pointer is valid.
4264        unsafe { self.ptr.as_ref() }
4265    }
4266
4267    #[cfg(not(no_global_oom_handling))]
4268    fn as_ptr(this: &Self) -> *const T {
4269        let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
4270
4271        // SAFETY: This cannot go through Deref::deref or UniqueRc::inner because
4272        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
4273        // write through the pointer after the Rc is recovered through `from_raw`.
4274        unsafe { &raw mut (*ptr).value }
4275    }
4276
4277    #[inline]
4278    #[cfg(not(no_global_oom_handling))]
4279    fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
4280        let this = mem::ManuallyDrop::new(this);
4281        (this.ptr, unsafe { ptr::read(&this.alloc) })
4282    }
4283
4284    #[inline]
4285    #[cfg(not(no_global_oom_handling))]
4286    unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
4287        Self { ptr, _marker: PhantomData, _marker2: PhantomData, alloc }
4288    }
4289}
4290
4291impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
4292    /// Creates a new weak reference to the `UniqueRc`.
4293    ///
4294    /// Attempting to upgrade this weak reference will fail before the `UniqueRc` has been converted
4295    /// to a [`Rc`] using [`UniqueRc::into_rc`].
4296    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4297    pub fn downgrade(this: &Self) -> Weak<T, A> {
4298        // SAFETY: This pointer was allocated at creation time and we guarantee that we only have
4299        // one strong reference before converting to a regular Rc.
4300        unsafe {
4301            this.ptr.as_ref().inc_weak();
4302        }
4303        Weak { ptr: this.ptr, alloc: this.alloc.clone() }
4304    }
4305}
4306
4307#[cfg(not(no_global_oom_handling))]
4308impl<T, A: Allocator> UniqueRc<mem::MaybeUninit<T>, A> {
4309    unsafe fn assume_init(self) -> UniqueRc<T, A> {
4310        let (ptr, alloc) = UniqueRc::into_inner_with_allocator(self);
4311        unsafe { UniqueRc::from_inner_in(ptr.cast(), alloc) }
4312    }
4313}
4314
4315#[unstable(feature = "unique_rc_arc", issue = "112566")]
4316impl<T: ?Sized, A: Allocator> Deref for UniqueRc<T, A> {
4317    type Target = T;
4318
4319    fn deref(&self) -> &T {
4320        // SAFETY: This pointer was allocated at creation time so we know it is valid.
4321        unsafe { &self.ptr.as_ref().value }
4322    }
4323}
4324
4325#[unstable(feature = "unique_rc_arc", issue = "112566")]
4326impl<T: ?Sized, A: Allocator> DerefMut for UniqueRc<T, A> {
4327    fn deref_mut(&mut self) -> &mut T {
4328        // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we
4329        // have unique ownership and therefore it's safe to make a mutable reference because
4330        // `UniqueRc` owns the only strong reference to itself.
4331        unsafe { &mut (*self.ptr.as_ptr()).value }
4332    }
4333}
4334
4335#[unstable(feature = "unique_rc_arc", issue = "112566")]
4336unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
4337    fn drop(&mut self) {
4338        unsafe {
4339            // destroy the contained object
4340            drop_in_place(DerefMut::deref_mut(self));
4341
4342            // remove the implicit "strong weak" pointer now that we've destroyed the contents.
4343            self.ptr.as_ref().dec_weak();
4344
4345            if self.ptr.as_ref().weak() == 0 {
4346                self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
4347            }
4348        }
4349    }
4350}
4351
4352/// A unique owning pointer to a [`RcInner`] **that does not imply the contents are initialized,**
4353/// but will deallocate it (without dropping the value) when dropped.
4354///
4355/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
4356/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
4357/// which `MaybeUninit` does not.
4358#[cfg(not(no_global_oom_handling))]
4359struct UniqueRcUninit<T: ?Sized, A: Allocator> {
4360    ptr: NonNull<RcInner<T>>,
4361    layout_for_value: Layout,
4362    alloc: Option<A>,
4363}
4364
4365#[cfg(not(no_global_oom_handling))]
4366impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
4367    /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it.
4368    fn new(for_value: &T, alloc: A) -> UniqueRcUninit<T, A> {
4369        let layout = Layout::for_value(for_value);
4370        let ptr = unsafe {
4371            Rc::allocate_for_layout(
4372                layout,
4373                |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner),
4374                |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner<T>),
4375            )
4376        };
4377        Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
4378    }
4379
4380    /// Returns the pointer to be written into to initialize the [`Rc`].
4381    fn data_ptr(&mut self) -> *mut T {
4382        let offset = data_offset_align(self.layout_for_value.align());
4383        unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
4384    }
4385
4386    /// Upgrade this into a normal [`Rc`].
4387    ///
4388    /// # Safety
4389    ///
4390    /// The data must have been initialized (by writing to [`Self::data_ptr()`]).
4391    unsafe fn into_rc(self) -> Rc<T, A> {
4392        let mut this = ManuallyDrop::new(self);
4393        let ptr = this.ptr;
4394        let alloc = this.alloc.take().unwrap();
4395
4396        // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
4397        // for having initialized the data.
4398        unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
4399    }
4400}
4401
4402#[cfg(not(no_global_oom_handling))]
4403impl<T: ?Sized, A: Allocator> Drop for UniqueRcUninit<T, A> {
4404    fn drop(&mut self) {
4405        // SAFETY:
4406        // * new() produced a pointer safe to deallocate.
4407        // * We own the pointer unless into_rc() was called, which forgets us.
4408        unsafe {
4409            self.alloc.take().unwrap().deallocate(
4410                self.ptr.cast(),
4411                rc_inner_layout_for_value_layout(self.layout_for_value),
4412            );
4413        }
4414    }
4415}